コード例 #1
0
void *
asource_threadproc(void *arg) {
	int r;
	unsigned char *fbuffer;
	//
	if(asource_init(NULL) < 0) {
		exit(-1);
	}
	if((fbuffer = (unsigned char*) malloc(audioparam.chunk_bytes)) == NULL) {
		ga_error("Audio source: malloc failed (%d bytes) - %s\n",
			audioparam.chunk_bytes, strerror(errno));
		exit(-1);
	}
	//
	ga_error("Audio source thread started: tid=%ld\n", ga_gettid());
	//
	while(true) {
#ifdef WIN32
		r = ga_wasapi_read(&audioparam, fbuffer, audioparam.chunk_size);
		if(r < 0) {
			ga_error("Audio source: WASAPI read failed.\n");
			break;
		}
#else
		r = snd_pcm_readi(audioparam.handle, fbuffer, audioparam.chunk_size);
		if(r == -EAGAIN) {
			snd_pcm_wait(audioparam.handle, 1000);
			continue;
		} else if(r < 0) {
			ga_error("Audio source: ALSA read failed - %s\n",
				snd_strerror(r));
			break;
		}
#endif
		audio_source_buffer_fill(fbuffer, r);
	}
	//
	ga_error("audio capture thread terminated.\n");
	//
	return NULL;
}
コード例 #2
0
ファイル: rtspserver.cpp プロジェクト: 3009420/gaminganywhere
void*
rtspserver(void *arg) {
#ifdef WIN32
	SOCKET s = *((SOCKET*) arg);
	int sinlen = sizeof(struct sockaddr_in);
#else
	int s = *((int*) arg);
	socklen_t sinlen = sizeof(struct sockaddr_in);
#endif
	const char *p;
	char buf[8192];
	char cmd[32], url[1024], protocol[32];
	int rlen;
	struct sockaddr_in sin;
	RTSPContext ctx;
	RTSPMessageHeader header1, *header = &header1;
	//int thread_ret;
	// image info
	//int iwidth = video_source_maxwidth(0);
	//int iheight = video_source_maxheight(0);
	//
	rtspconf = rtspconf_global();
	sinlen = sizeof(sin);
	getpeername(s, (struct sockaddr*) &sin, &sinlen);
	//
	bzero(&ctx, sizeof(ctx));
	if(per_client_init(&ctx) < 0) {
		ga_error("server initialization failed.\n");
		return NULL;
	}
	bcopy(&sin, &ctx.client, sizeof(ctx.client));
	ctx.state = SERVER_STATE_IDLE;
	// XXX: hasVideo is used to sync audio/video
	// This value is increased by 1 for each captured frame until it is gerater than zero
	// when this value is greater than zero, audio encoding then starts ...
	//ctx.hasVideo = -(rtspconf->video_fps>>1);	// for slow encoders?
	ctx.hasVideo = 0;	// with 'zerolatency'
	pthread_mutex_init(&ctx.rtsp_writer_mutex, NULL);
	//
	ga_error("[tid %ld] client connected from %s:%d\n",
		ga_gettid(),
		inet_ntoa(sin.sin_addr), htons(sin.sin_port));
	//
	ctx.fd = s;
	//
	do {
		int i, fdmax, active;
		fd_set rfds;
		struct timeval to;
		FD_ZERO(&rfds);
		FD_SET(ctx.fd, &rfds);
		fdmax = ctx.fd;
#ifdef HOLE_PUNCHING
		for(i = 0; i < 2*ctx.streamCount; i++) {
			FD_SET(ctx.rtpSocket[i], &rfds);
			if(ctx.rtpSocket[i] > fdmax)
				fdmax = ctx.rtpSocket[i];
		}
#endif
		to.tv_sec = 0;
		to.tv_usec = 500000;
		if((active = select(fdmax+1, &rfds, NULL, NULL, &to)) < 0) {
			ga_error("select() failed: %s\n", strerror(errno));
			goto quit;
		}
		if(active == 0) {
			// try again!
			continue;
		}
#ifdef HOLE_PUNCHING
		for(i = 0; i < 2*ctx.streamCount; i++) {
			struct sockaddr_in xsin;
#ifdef WIN32
			int xsinlen = sizeof(xsin);
#else
			socklen_t xsinlen = sizeof(xsin);
#endif
			if(FD_ISSET(ctx.rtpSocket[i], &rfds) == 0)
				continue;
			recvfrom(ctx.rtpSocket[i], buf, sizeof(buf), 0,
				(struct sockaddr*) &xsin, &xsinlen);
			if(ctx.rtpPortChecked[i] != 0)
				continue;
			// XXX: port should not flip-flop, so check only once
			if(xsin.sin_addr.s_addr != ctx.client.sin_addr.s_addr) {
				ga_error("RTP: client address mismatched? %u.%u.%u.%u != %u.%u.%u.%u\n",
					NIPQUAD(ctx.client.sin_addr.s_addr),
					NIPQUAD(xsin.sin_addr.s_addr));
				continue;
			}
			if(xsin.sin_port != ctx.rtpPeerPort[i]) {
				ga_error("RTP: client port reconfigured: %u -> %u\n",
					(unsigned int) ntohs(ctx.rtpPeerPort[i]),
					(unsigned int) ntohs(xsin.sin_port));
				ctx.rtpPeerPort[i] = xsin.sin_port;
			} else {
				ga_error("RTP: client is not under an NAT, port %d confirmed\n",
					(int) ntohs(ctx.rtpPeerPort[i]));
			}
			ctx.rtpPortChecked[i] = 1;
		}
		// is RTSP connection?
		if(FD_ISSET(ctx.fd, &rfds) == 0)
			continue;
#endif
		// read commands
		if((rlen = rtsp_getnext(&ctx, buf, sizeof(buf))) < 0) {
			goto quit;
		}
		// Interleaved binary data?
		if(buf[0] == '$') {
			handle_rtcp(&ctx, buf, rlen);
			continue;
		}
		// REQUEST line
		ga_error("%s", buf);
		p = buf;
		get_word(cmd, sizeof(cmd), &p);
		get_word(url, sizeof(url), &p);
		get_word(protocol, sizeof(protocol), &p);
		// check protocol
		if(strcmp(protocol, "RTSP/1.0") != 0) {
			rtsp_reply_error(&ctx, RTSP_STATUS_VERSION);
			goto quit;
		}
		// read headers
		bzero(header, sizeof(*header));
		do {
			int myseq = -1;
			char mysession[sizeof(header->session_id)] = "";
			if((rlen = rtsp_getnext(&ctx, buf, sizeof(buf))) < 0)
				goto quit;
			if(buf[0]=='\n' || (buf[0]=='\r' && buf[1]=='\n'))
				break;
#if 0
			ga_error("HEADER: %s", buf);
#endif
			// Special handling to CSeq & Session header
			// ff_rtsp_parse_line cannot handle CSeq & Session properly on Windows
			// any more?
			if(strncasecmp("CSeq: ", buf, 6) == 0) {
				myseq = strtol(buf+6, NULL, 10);
			}
			if(strncasecmp("Session: ", buf, 9) == 0) {
				strcpy(mysession, buf+9);
			}
			//
			ff_rtsp_parse_line(header, buf, NULL, NULL);
			//
			if(myseq > 0 && header->seq <= 0) {
				ga_error("WARNING: CSeq fixes applied (%d->%d).\n",
					header->seq, myseq);
				header->seq = myseq;
			}
			if(mysession[0] != '\0' && header->session_id[0]=='\0') {
				unsigned i;
				for(i = 0; i < sizeof(header->session_id)-1; i++) {
					if(mysession[i] == '\0'
					|| isspace(mysession[i])
					|| mysession[i] == ';')
						break;
					header->session_id[i] = mysession[i];
				}
				header->session_id[i+1] = '\0';
				ga_error("WARNING: Session fixes applied (%s)\n",
					header->session_id);
			}
		} while(1);
		// special handle to session_id
		if(header->session_id != NULL) {
			char *p = header->session_id;
			while(*p != '\0') {
				if(*p == '\r' || *p == '\n') {
					*p = '\0';
					break;
				}
				p++;
			}
		}
		// handle commands
		ctx.seq = header->seq;
		if (!strcmp(cmd, "DESCRIBE"))
			rtsp_cmd_describe(&ctx, url);
		else if (!strcmp(cmd, "OPTIONS"))
			rtsp_cmd_options(&ctx, url);
		else if (!strcmp(cmd, "SETUP"))
			rtsp_cmd_setup(&ctx, url, header);
		else if (!strcmp(cmd, "PLAY"))
			rtsp_cmd_play(&ctx, url, header);
		else if (!strcmp(cmd, "PAUSE"))
			rtsp_cmd_pause(&ctx, url, header);
		else if (!strcmp(cmd, "TEARDOWN"))
			rtsp_cmd_teardown(&ctx, url, header, 1);
		else
			rtsp_reply_error(&ctx, RTSP_STATUS_METHOD);
		if(ctx.state == SERVER_STATE_TEARDOWN) {
			break;
		}
	} while(1);
quit:
	ctx.state = SERVER_STATE_TEARDOWN;
	//
	close(ctx.fd);
	// 2014-05-20: support only share-encoder model
	ff_server_unregister_client(&ctx);
	//
	per_client_deinit(&ctx);
	//ga_error("RTSP client thread terminated (%d/%d clients left).\n",
	//	video_source_client_count(), audio_source_client_count());
	ga_error("RTSP client thread terminated.\n");
	//
	return NULL;
}
コード例 #3
0
ファイル: rtspserver.cpp プロジェクト: wiplug/gaminganywhere
void*
rtspserver(void *arg) {
#ifdef WIN32
	SOCKET s = *((SOCKET*) arg);
	int sinlen = sizeof(struct sockaddr_in);
#else
	int s = *((int*) arg);
	socklen_t sinlen = sizeof(struct sockaddr_in);
#endif
	const char *p;
	char buf[8192];
	char cmd[32], url[1024], protocol[32];
	int rlen;
	struct sockaddr_in sin;
	RTSPContext ctx;
	RTSPMessageHeader header1, *header = &header1;
	int thread_ret;
	// image info
	int iwidth = video_source_width(0);
	int iheight = video_source_height(0);
	//
	rtspconf = rtspconf_global();
	sinlen = sizeof(sin);
	getpeername(s, (struct sockaddr*) &sin, &sinlen);
	//
	bzero(&ctx, sizeof(ctx));
	if(per_client_init(&ctx) < 0) {
		ga_error("server initialization failed.\n");
		return NULL;
	}
	ctx.state = SERVER_STATE_IDLE;
	// XXX: hasVideo is used to sync audio/video
	// This value is increased by 1 for each captured frame until it is gerater than zero
	// when this value is greater than zero, audio encoding then starts ...
	//ctx.hasVideo = -(rtspconf->video_fps>>1);	// for slow encoders?
	ctx.hasVideo = 0;	// with 'zerolatency'
	pthread_mutex_init(&ctx.rtsp_writer_mutex, NULL);
#if 0
	ctx.audioparam.channels = rtspconf->audio_channels;
	ctx.audioparam.samplerate = rtspconf->audio_samplerate;
	if(rtspconf->audio_device_format == AV_SAMPLE_FMT_S16) {
#ifdef WIN32
#else
		ctx.audioparam.format = SND_PCM_FORMAT_S16_LE;
#endif
		ctx.audioparam.bits_per_sample = 16;
	}
	//
	ga_error("INFO: image: %dx%d; audio: %d ch 16-bit pcm @ %dHz\n",
			iwidth, iheight,
			ctx.audioparam.channels,
			ctx.audioparam.samplerate);
#endif
	//
#if 0
#ifdef WIN32
	if(ga_wasapi_init(&ctx.audioparam) < 0) {
		ga_error("cannot init wasapi.\n");
		return NULL;
	}
#else
	if((ctx.audioparam.handle = ga_alsa_init(&ctx.audioparam.sndlog)) == NULL) {
		ga_error("cannot init alsa.\n");
		return NULL;
	}
	if(ga_alsa_set_param(&ctx.audioparam) < 0) {
		ga_error("cannot set alsa parameter\n");
		return NULL;
	}
#endif
#endif
	//
	ga_error("[tid %ld] client connected from %s:%d\n",
		ga_gettid(),
		inet_ntoa(sin.sin_addr), htons(sin.sin_port));
	//
	ctx.fd = s;
	//
	do {
		fd_set rfds;
		FD_ZERO(&rfds);
		FD_SET(ctx.fd, &rfds);
		if(select(ctx.fd+1, &rfds, NULL, NULL, NULL) <=0) {
			ga_error("select() failed: %s\n", strerror(errno));
			goto quit;
		}
		// read commands
		if((rlen = rtsp_getnext(&ctx, buf, sizeof(buf))) < 0) {
			goto quit;
		}
		// Interleaved binary data?
		if(buf[0] == '$') {
			handle_rtcp(&ctx, buf, rlen);
			continue;
		}
		// REQUEST line
		ga_error("%s", buf);
		p = buf;
		get_word(cmd, sizeof(cmd), &p);
		get_word(url, sizeof(url), &p);
		get_word(protocol, sizeof(protocol), &p);
		// check protocol
		if(strcmp(protocol, "RTSP/1.0") != 0) {
			rtsp_reply_error(&ctx, RTSP_STATUS_VERSION);
			goto quit;
		}
		// read headers
		bzero(header, sizeof(*header));
		do {
			int myseq = -1;
			char mysession[sizeof(header->session_id)] = "";
			if((rlen = rtsp_getnext(&ctx, buf, sizeof(buf))) < 0)
				goto quit;
			if(buf[0]=='\n' || (buf[0]=='\r' && buf[1]=='\n'))
				break;
#if 0
			ga_error("HEADER: %s", buf);
#endif
			// Special handling to CSeq & Session header
			// ff_rtsp_parse_line cannot handle CSeq & Session properly on Windows
			// any more?
			if(strncasecmp("CSeq: ", buf, 6) == 0) {
				myseq = strtol(buf+6, NULL, 10);
			}
			if(strncasecmp("Session: ", buf, 9) == 0) {
				strcpy(mysession, buf+9);
			}
			//
			ff_rtsp_parse_line(header, buf, NULL, NULL);
			//
			if(myseq > 0 && header->seq <= 0) {
				ga_error("WARNING: CSeq fixes applied (%d->%d).\n",
					header->seq, myseq);
				header->seq = myseq;
			}
			if(mysession[0] != '\0' && header->session_id[0]=='\0') {
				unsigned i;
				for(i = 0; i < sizeof(header->session_id)-1; i++) {
					if(mysession[i] == '\0'
					|| isspace(mysession[i])
					|| mysession[i] == ';')
						break;
					header->session_id[i] = mysession[i];
				}
				header->session_id[i+1] = '\0';
				ga_error("WARNING: Session fixes applied (%s)\n",
					header->session_id);
			}
		} while(1);
		// special handle to session_id
		if(header->session_id != NULL) {
			char *p = header->session_id;
			while(*p != '\0') {
				if(*p == '\r' || *p == '\n') {
					*p = '\0';
					break;
				}
				p++;
			}
		}
		// handle commands
		ctx.seq = header->seq;
		if (!strcmp(cmd, "DESCRIBE"))
			rtsp_cmd_describe(&ctx, url);
		else if (!strcmp(cmd, "OPTIONS"))
			rtsp_cmd_options(&ctx, url);
		else if (!strcmp(cmd, "SETUP"))
			rtsp_cmd_setup(&ctx, url, header);
		else if (!strcmp(cmd, "PLAY"))
			rtsp_cmd_play(&ctx, url, header);
		else if (!strcmp(cmd, "PAUSE"))
			rtsp_cmd_pause(&ctx, url, header);
		else if (!strcmp(cmd, "TEARDOWN"))
			rtsp_cmd_teardown(&ctx, url, header);
		else
			rtsp_reply_error(&ctx, RTSP_STATUS_METHOD);
		if(ctx.state == SERVER_STATE_TEARDOWN) {
			break;
		}
	} while(1);
quit:
	ctx.state = SERVER_STATE_TEARDOWN;
	//
	close(ctx.fd);
#ifdef	SHARE_ENCODER
	encoder_unregister_client(&ctx);
#else
	ga_error("connection closed, checking for worker threads...\n");
#if 0
	//
	if(ctx.vthreadId != 0) {
		video_source_notify_one(ctx.vthreadId);
	}
#endif
	pthread_join(ctx.vthread, (void**) &thread_ret);
#ifdef	ENABLE_AUDIO
	pthread_join(ctx.athread, (void**) &thread_ret);
#endif	/* ENABLE_AUDIO */
#endif	/* SHARE_ENCODER */
	//
	per_client_deinit(&ctx);
	//ga_error("RTSP client thread terminated (%d/%d clients left).\n",
	//	video_source_client_count(), audio_source_client_count());
	ga_error("RTSP client thread terminated.\n");
	//
	return NULL;
}
コード例 #4
0
void *
vencoder_threadproc(void *arg) {
	// arg is pointer to source pipe
	// image info
	int iid;
	int iwidth;
	int iheight;
	int rtp_id;
	struct pooldata *data = NULL;
	struct vsource_frame *frame = NULL;
	pipeline *pipe = (pipeline*) arg;
	AVCodecContext *encoder = NULL;
	//
	AVFrame *pic_in = NULL;
	unsigned char *pic_in_buf = NULL;
	int pic_in_size;
	unsigned char *nalbuf = NULL, *nalbuf_a = NULL;
	int nalbuf_size = 0, nalign = 0;
	long long basePts = -1LL, newpts = 0LL, pts = -1LL, ptsSync = 0LL;
	pthread_mutex_t condMutex = PTHREAD_MUTEX_INITIALIZER;
	pthread_cond_t cond = PTHREAD_COND_INITIALIZER;
	//
	int resolution[2];
	int video_written = 0;
	//
	if(pipe == NULL) {
		ga_error("video encoder: NULL pipeline specified.\n");
		goto video_quit;
	}
	//
	rtspconf = rtspconf_global();
	// init variables
	iid = ((struct vsource_config*) pipe->get_privdata())->id;
	iwidth = video_source_maxwidth(iid);
	iheight = video_source_maxheight(iid);
	rtp_id = ((struct vsource_config*) pipe->get_privdata())->rtp_id;
	//
	outputW = iwidth;	// by default, the same as max resolution
	outputH = iheight;
	if(ga_conf_readints("output-resolution", resolution, 2) == 2) {
		outputW = resolution[0];
		outputH = resolution[1];
	}
	//
	ga_error("video encoder: image source from '%s' (%dx%d) via channel %d, resolution=%dx%d.\n",
		pipe->name(), iwidth, iheight, rtp_id, outputW, outputH);
	//
	encoder = ga_avcodec_vencoder_init(
			NULL,
			rtspconf->video_encoder_codec,
			outputW, outputH,
			rtspconf->video_fps,
			rtspconf->vso);
	if(encoder == NULL) {
		ga_error("video encoder: cannot initialized the encoder.\n");
		goto video_quit;
	}
	//
	nalbuf_size = 100000+12 * outputW * outputH;
	if(ga_malloc(nalbuf_size, (void**) &nalbuf, &nalign) < 0) {
		ga_error("video encoder: buffer allocation failed, terminated.\n");
		goto video_quit;
	}
	nalbuf_a = nalbuf + nalign;
	//
	if((pic_in = avcodec_alloc_frame()) == NULL) {
		ga_error("video encoder: picture allocation failed, terminated.\n");
		goto video_quit;
	}
	pic_in_size = avpicture_get_size(PIX_FMT_YUV420P, outputW, outputH);
	if((pic_in_buf = (unsigned char*) av_malloc(pic_in_size)) == NULL) {
		ga_error("video encoder: picture buffer allocation failed, terminated.\n");
		goto video_quit;
	}
	avpicture_fill((AVPicture*) pic_in, pic_in_buf,
			PIX_FMT_YUV420P, outputW, outputH);
	//ga_error("video encoder: linesize = %d|%d|%d\n", pic_in->linesize[0], pic_in->linesize[1], pic_in->linesize[2]);
	// start encoding
	ga_error("video encoding started: tid=%ld %dx%d@%dfps, nalbuf_size=%d, pic_in_size=%d.\n",
		ga_gettid(),
		iwidth, iheight, rtspconf->video_fps,
		nalbuf_size, pic_in_size);
	//
	pipe->client_register(ga_gettid(), &cond);
	//
	while(encoder_running() > 0) {
		AVPacket pkt;
		int got_packet = 0;
		// wait for notification
		data = pipe->load_data();
		if(data == NULL) {
			int err;
			struct timeval tv;
			struct timespec to;
			gettimeofday(&tv, NULL);
			to.tv_sec = tv.tv_sec+1;
			to.tv_nsec = tv.tv_usec * 1000;
			//
			if((err = pipe->timedwait(&cond, &condMutex, &to)) != 0) {
				ga_error("viedo encoder: image source timed out.\n");
				continue;
			}
			data = pipe->load_data();
			if(data == NULL) {
				ga_error("viedo encoder: unexpected NULL frame received (from '%s', data=%d, buf=%d).\n",
					pipe->name(), pipe->data_count(), pipe->buf_count());
				continue;
			}
		}
		frame = (struct vsource_frame*) data->ptr;
		// handle pts
		if(basePts == -1LL) {
			basePts = frame->imgpts;
			ptsSync = encoder_pts_sync(rtspconf->video_fps);
			newpts = ptsSync;
		} else {
			newpts = ptsSync + frame->imgpts - basePts;
		}
		// XXX: assume always YUV420P
		if(pic_in->linesize[0] == frame->linesize[0]
		&& pic_in->linesize[1] == frame->linesize[1]
		&& pic_in->linesize[2] == frame->linesize[2]) {
			bcopy(frame->imgbuf, pic_in_buf, pic_in_size);
		} else {
			ga_error("video encoder: YUV mode failed - mismatched linesize(s) (src:%d,%d,%d; dst:%d,%d,%d)\n",
				frame->linesize[0], frame->linesize[1], frame->linesize[2],
				pic_in->linesize[0], pic_in->linesize[1], pic_in->linesize[2]);
			pipe->release_data(data);
			goto video_quit;
		}
		pipe->release_data(data);
		// pts must be monotonically increasing
		if(newpts > pts) {
			pts = newpts;
		} else {
			pts++;
		}
		// encode
		pic_in->pts = pts;
		av_init_packet(&pkt);
		pkt.data = nalbuf_a;
		pkt.size = nalbuf_size;
		if(avcodec_encode_video2(encoder, &pkt, pic_in, &got_packet) < 0) {
			ga_error("video encoder: encode failed, terminated.\n");
			goto video_quit;
		}
		if(got_packet) {
			if(pkt.pts == (int64_t) AV_NOPTS_VALUE) {
				pkt.pts = pts;
			}
			pkt.stream_index = 0;
			// send the packet
			if(encoder_send_packet_all("video-encoder",
				rtp_id/*rtspconf->video_id*/, &pkt,
				pkt.pts) < 0) {
				goto video_quit;
			}
			// free unused side-data
			if(pkt.side_data_elems > 0) {
				int i;
				for (i = 0; i < pkt.side_data_elems; i++)
					av_free(pkt.side_data[i].data);
				av_freep(&pkt.side_data);
				pkt.side_data_elems = 0;
			}
			//
			if(video_written == 0) {
				video_written = 1;
				ga_error("first video frame written (pts=%lld)\n", pts);
			}
		}
	}
	//
video_quit:
	if(pipe) {
		pipe->client_unregister(ga_gettid());
		pipe = NULL;
	}
	//
	if(pic_in_buf)	av_free(pic_in_buf);
	if(pic_in)	av_free(pic_in);
	if(nalbuf)	free(nalbuf);
	if(encoder)	ga_avcodec_close(encoder);
	//
	ga_error("video encoder: thread terminated (tid=%ld).\n", ga_gettid());
	//
	return NULL;
}
コード例 #5
0
ファイル: encoder-vpu.cpp プロジェクト: Ljinod/gaminganywhere
/// TODO
static void *
vencoder_threadproc(void *arg) {
	// arg is pointer to source pipename
	int cid;
	pooldata_t *data = NULL;
	vsource_frame_t *frame = NULL;
	char *pipename = (char*) arg;
	pipeline *pipe = pipeline::lookup(pipename);
	struct RTSPConf *rtspconf = NULL;
	//
	long long basePts = -1LL, newpts = 0LL, pts = -1LL, ptsSync = 0LL;
	pthread_mutex_t condMutex = PTHREAD_MUTEX_INITIALIZER;
	pthread_cond_t cond = PTHREAD_COND_INITIALIZER;
	//
	int outputW, outputH;
	//
	struct timeval pkttv;
#ifdef PRINT_LATENCY
	struct timeval ptv;
#endif
	//
	int video_written = 0;
	//
	if(pipe == NULL) {
		ga_error("video encoder: invalid pipeline specified (%s).\n", pipename);
		goto video_quit;
	}
	//
	rtspconf = rtspconf_global();
	cid = ((vsource_t*) pipe->get_privdata())->channel;
	outputW = video_source_out_width(cid);
	outputH = video_source_out_height(cid);
	//
	// start encoding
	ga_error("video encoding started: tid=%ld.\n", ga_gettid());
	pipe->client_register(ga_gettid(), &cond);
	//
	while(vencoder_started != 0 && encoder_running() > 0) {
		//
		AVPacket pkt;
		unsigned char *enc;
		int encsize;
		// wait for notification
		data = pipe->load_data();
		if(data == NULL) {
			int err;
			struct timeval tv;
			struct timespec to;
			gettimeofday(&tv, NULL);
			to.tv_sec = tv.tv_sec+1;
			to.tv_nsec = tv.tv_usec * 1000;
			//
			if((err = pipe->timedwait(&cond, &condMutex, &to)) != 0) {
				ga_error("viedo encoder: image source timed out.\n");
				continue;
			}
			data = pipe->load_data();
			if(data == NULL) {
				ga_error("viedo encoder: unexpected NULL frame received (from '%s', data=%d, buf=%d).\n",
					pipe->name(), pipe->data_count(), pipe->buf_count());
				continue;
			}
		}
		frame = (vsource_frame_t*) data->ptr;
		// handle pts
		if(basePts == -1LL) {
			basePts = frame->imgpts;
			ptsSync = encoder_pts_sync(rtspconf->video_fps);
			newpts = ptsSync;
		} else {
			newpts = ptsSync + frame->imgpts - basePts;
		}
		// encode!
		gettimeofday(&pkttv, NULL);
		enc = vpu_encoder_encode(&vpu[cid], frame->imgbuf, vpu[cid].vpu_framesize, &encsize);
		//
		pipe->release_data(data);
		//
		if(enc == NULL) {
			ga_error("encoder-vpu: encode failed.\n");
			goto video_quit;
		}
		// pts must be monotonically increasing
		if(newpts > pts) {
			pts = newpts;
		} else {
			pts++;
		}
		// send packet
#ifdef SAVEFILE
		if(fout != NULL)
			fwrite(enc, sizeof(char), encsize, fout);
#endif
		pkt.data = enc;
		pkt.size = encsize;
		if(encoder_send_packet_all("video-encoder", cid, &pkt, pkt.pts, &pkttv) < 0) {
			goto video_quit;
		}
		if(video_written == 0) {
			video_written = 1;
			ga_error("first video frame written (pts=%lld)\n", pts);
		}
#ifdef PRINT_LATENCY		/* print out latency */
		gettimeofday(&ptv, NULL);
		ga_aggregated_print(0x0001, 601, tvdiff_us(&ptv, &frame->timestamp));
#endif
	}
	//
video_quit:
	if(pipe) {
		pipe->client_unregister(ga_gettid());
		pipe = NULL;
	}
	//
	ga_error("video encoder: thread terminated (tid=%ld).\n", ga_gettid());
	//
	return NULL;
}