GAVideoLiveSource ::~GAVideoLiveSource() { // Any instance-specific 'destruction' (i.e., resetting) of the device would be done here: vLiveSource[this->channelId] = NULL; --referenceCount; if (referenceCount == 0) { // Any global 'destruction' (i.e., resetting) of the device would be done here: encoder_unregister_client(this); remove_startcode = 0; m = NULL; encoder_pktqueue_unregister_callback(this->channelId, signalNewVideoFrameData); // Reclaim our 'event trigger' envir().taskScheduler().deleteEventTrigger(eventTriggerId[this->channelId]); eventTriggerId[this->channelId] = 0; } }
void* rtspserver(void *arg) { #ifdef WIN32 SOCKET s = *((SOCKET*) arg); int sinlen = sizeof(struct sockaddr_in); #else int s = *((int*) arg); socklen_t sinlen = sizeof(struct sockaddr_in); #endif const char *p; char buf[8192]; char cmd[32], url[1024], protocol[32]; int rlen; struct sockaddr_in sin; RTSPContext ctx; RTSPMessageHeader header1, *header = &header1; int thread_ret; // image info int iwidth = video_source_width(0); int iheight = video_source_height(0); // rtspconf = rtspconf_global(); sinlen = sizeof(sin); getpeername(s, (struct sockaddr*) &sin, &sinlen); // bzero(&ctx, sizeof(ctx)); if(per_client_init(&ctx) < 0) { ga_error("server initialization failed.\n"); return NULL; } ctx.state = SERVER_STATE_IDLE; // XXX: hasVideo is used to sync audio/video // This value is increased by 1 for each captured frame until it is gerater than zero // when this value is greater than zero, audio encoding then starts ... //ctx.hasVideo = -(rtspconf->video_fps>>1); // for slow encoders? ctx.hasVideo = 0; // with 'zerolatency' pthread_mutex_init(&ctx.rtsp_writer_mutex, NULL); #if 0 ctx.audioparam.channels = rtspconf->audio_channels; ctx.audioparam.samplerate = rtspconf->audio_samplerate; if(rtspconf->audio_device_format == AV_SAMPLE_FMT_S16) { #ifdef WIN32 #else ctx.audioparam.format = SND_PCM_FORMAT_S16_LE; #endif ctx.audioparam.bits_per_sample = 16; } // ga_error("INFO: image: %dx%d; audio: %d ch 16-bit pcm @ %dHz\n", iwidth, iheight, ctx.audioparam.channels, ctx.audioparam.samplerate); #endif // #if 0 #ifdef WIN32 if(ga_wasapi_init(&ctx.audioparam) < 0) { ga_error("cannot init wasapi.\n"); return NULL; } #else if((ctx.audioparam.handle = ga_alsa_init(&ctx.audioparam.sndlog)) == NULL) { ga_error("cannot init alsa.\n"); return NULL; } if(ga_alsa_set_param(&ctx.audioparam) < 0) { ga_error("cannot set alsa parameter\n"); return NULL; } #endif #endif // ga_error("[tid %ld] client connected from %s:%d\n", ga_gettid(), inet_ntoa(sin.sin_addr), htons(sin.sin_port)); // ctx.fd = s; // do { fd_set rfds; FD_ZERO(&rfds); FD_SET(ctx.fd, &rfds); if(select(ctx.fd+1, &rfds, NULL, NULL, NULL) <=0) { ga_error("select() failed: %s\n", strerror(errno)); goto quit; } // read commands if((rlen = rtsp_getnext(&ctx, buf, sizeof(buf))) < 0) { goto quit; } // Interleaved binary data? if(buf[0] == '$') { handle_rtcp(&ctx, buf, rlen); continue; } // REQUEST line ga_error("%s", buf); p = buf; get_word(cmd, sizeof(cmd), &p); get_word(url, sizeof(url), &p); get_word(protocol, sizeof(protocol), &p); // check protocol if(strcmp(protocol, "RTSP/1.0") != 0) { rtsp_reply_error(&ctx, RTSP_STATUS_VERSION); goto quit; } // read headers bzero(header, sizeof(*header)); do { int myseq = -1; char mysession[sizeof(header->session_id)] = ""; if((rlen = rtsp_getnext(&ctx, buf, sizeof(buf))) < 0) goto quit; if(buf[0]=='\n' || (buf[0]=='\r' && buf[1]=='\n')) break; #if 0 ga_error("HEADER: %s", buf); #endif // Special handling to CSeq & Session header // ff_rtsp_parse_line cannot handle CSeq & Session properly on Windows // any more? if(strncasecmp("CSeq: ", buf, 6) == 0) { myseq = strtol(buf+6, NULL, 10); } if(strncasecmp("Session: ", buf, 9) == 0) { strcpy(mysession, buf+9); } // ff_rtsp_parse_line(header, buf, NULL, NULL); // if(myseq > 0 && header->seq <= 0) { ga_error("WARNING: CSeq fixes applied (%d->%d).\n", header->seq, myseq); header->seq = myseq; } if(mysession[0] != '\0' && header->session_id[0]=='\0') { unsigned i; for(i = 0; i < sizeof(header->session_id)-1; i++) { if(mysession[i] == '\0' || isspace(mysession[i]) || mysession[i] == ';') break; header->session_id[i] = mysession[i]; } header->session_id[i+1] = '\0'; ga_error("WARNING: Session fixes applied (%s)\n", header->session_id); } } while(1); // special handle to session_id if(header->session_id != NULL) { char *p = header->session_id; while(*p != '\0') { if(*p == '\r' || *p == '\n') { *p = '\0'; break; } p++; } } // handle commands ctx.seq = header->seq; if (!strcmp(cmd, "DESCRIBE")) rtsp_cmd_describe(&ctx, url); else if (!strcmp(cmd, "OPTIONS")) rtsp_cmd_options(&ctx, url); else if (!strcmp(cmd, "SETUP")) rtsp_cmd_setup(&ctx, url, header); else if (!strcmp(cmd, "PLAY")) rtsp_cmd_play(&ctx, url, header); else if (!strcmp(cmd, "PAUSE")) rtsp_cmd_pause(&ctx, url, header); else if (!strcmp(cmd, "TEARDOWN")) rtsp_cmd_teardown(&ctx, url, header); else rtsp_reply_error(&ctx, RTSP_STATUS_METHOD); if(ctx.state == SERVER_STATE_TEARDOWN) { break; } } while(1); quit: ctx.state = SERVER_STATE_TEARDOWN; // close(ctx.fd); #ifdef SHARE_ENCODER encoder_unregister_client(&ctx); #else ga_error("connection closed, checking for worker threads...\n"); #if 0 // if(ctx.vthreadId != 0) { video_source_notify_one(ctx.vthreadId); } #endif pthread_join(ctx.vthread, (void**) &thread_ret); #ifdef ENABLE_AUDIO pthread_join(ctx.athread, (void**) &thread_ret); #endif /* ENABLE_AUDIO */ #endif /* SHARE_ENCODER */ // per_client_deinit(&ctx); //ga_error("RTSP client thread terminated (%d/%d clients left).\n", // video_source_client_count(), audio_source_client_count()); ga_error("RTSP client thread terminated.\n"); // return NULL; }
int live_server_unregister_client(void *ccontext) { if(encoder_unregister_client(ccontext) < 0) return -1; return 0; }