Exemplo n.º 1
0
static int
vencoder_init(void *arg) {
	int iid;
	char *pipefmt = (char*) arg;
	struct RTSPConf *rtspconf = rtspconf_global();
	//
	if(rtspconf == NULL) {
		ga_error("video encoder: no configuration found\n");
		return -1;
	}
	if(vencoder_initialized != 0)
		return 0;
	//
	for(iid = 0; iid < video_source_channels(); iid++) {
		char pipename[64];
		int outputW, outputH;
		pipeline *pipe;
		//
		_sps[iid] = _pps[iid] = NULL;
		_spslen[iid] = _ppslen[iid] = 0;
		snprintf(pipename, sizeof(pipename), pipefmt, iid);
		outputW = video_source_out_width(iid);
		outputH = video_source_out_height(iid);
		if((pipe = pipeline::lookup(pipename)) == NULL) {
			ga_error("video encoder: pipe %s is not found\n", pipename);
			goto init_failed;
		}
		ga_error("video encoder: video source #%d from '%s' (%dx%d).\n",
			iid, pipe->name(), outputW, outputH, iid);
		//
		if(vpu_encoder_init(&vpu[iid], outputW, outputH, rtspconf->video_fps, 1,
				ga_conf_mapreadint("video-specific", "b") / 1000,
				ga_conf_mapreadint("video-specific", "g")) < 0)
			goto init_failed;
	}
	vencoder_initialized = 1;
	ga_error("video encoder: initialized (%d channels).\n", iid);
	return 0;
init_failed:
	vencoder_deinit(NULL);
	return -1;
}
Exemplo n.º 2
0
static int
rtp_new_av_stream(RTSPContext *ctx, struct sockaddr_in *sin, int streamid, enum AVCodecID codecid) {
	AVOutputFormat *fmt = NULL;
	AVFormatContext *fmtctx = NULL;
	AVStream *stream = NULL;
	AVCodecContext *encoder = NULL;
	uint8_t *dummybuf = NULL;
	//
	if(streamid > VIDEO_SOURCE_CHANNEL_MAX) {
		ga_error("invalid stream index (%d > %d)\n",
			streamid, VIDEO_SOURCE_CHANNEL_MAX);
		return -1;
	}
	if(codecid != rtspconf->video_encoder_codec->id
	&& codecid != rtspconf->audio_encoder_codec->id) {
		ga_error("invalid codec (%d)\n", codecid);
		return -1;
	}
	if(ctx->fmtctx[streamid] != NULL) {
		ga_error("duplicated setup to an existing stream (%d)\n",
			streamid);
		return -1;
	}
	if((fmt = av_guess_format("rtp", NULL, NULL)) == NULL) {
		ga_error("RTP not supported.\n");
		return -1;
	}
	if((fmtctx = avformat_alloc_context()) == NULL) {
		ga_error("create avformat context failed.\n");
		return -1;
	}
	fmtctx->oformat = fmt;
	if(ctx->mtu > 0) {
		if(fmtctx->packet_size > 0) {
			fmtctx->packet_size =
				ctx->mtu < fmtctx->packet_size ? ctx->mtu : fmtctx->packet_size;
		} else {
			fmtctx->packet_size = ctx->mtu;
		}
		ga_error("RTP: packet size set to %d (configured: %d)\n",
			fmtctx->packet_size, ctx->mtu);
	}
#ifdef HOLE_PUNCHING
	if(ffio_open_dyn_packet_buf(&fmtctx->pb, ctx->mtu) < 0) {
		ga_error("cannot open dynamic packet buffer\n");
		return -1;
	}
	ga_error("RTP: Dynamic buffer opened, max_packet_size=%d.\n",
		(int) fmtctx->pb->max_packet_size);
	if(ctx->lower_transport[streamid] == RTSP_LOWER_TRANSPORT_UDP) {
		if(rtp_open_ports(ctx, streamid) < 0) {
			ga_error("RTP: open ports failed - %s\n", strerror(errno));
			return -1;
		}
	}
#else
	if(ctx->lower_transport[streamid] == RTSP_LOWER_TRANSPORT_UDP) {
		snprintf(fmtctx->filename, sizeof(fmtctx->filename),
			"rtp://%s:%d", inet_ntoa(sin->sin_addr), ntohs(sin->sin_port));
		if(avio_open(&fmtctx->pb, fmtctx->filename, AVIO_FLAG_WRITE) < 0) {
			ga_error("cannot open URL: %s\n", fmtctx->filename);
			return -1;
		}
		ga_error("RTP/UDP: URL opened [%d]: %s, max_packet_size=%d\n",
			streamid, fmtctx->filename, fmtctx->pb->max_packet_size);
	} else if(ctx->lower_transport[streamid] == RTSP_LOWER_TRANSPORT_TCP) {
		// XXX: should we use avio_open_dyn_buf(&fmtctx->pb)?
		if(ffio_open_dyn_packet_buf(&fmtctx->pb, ctx->mtu) < 0) {
			ga_error("cannot open dynamic packet buffer\n");
			return -1;
		}
		ga_error("RTP/TCP: Dynamic buffer opened, max_packet_size=%d.\n",
			(int) fmtctx->pb->max_packet_size);
	}
#endif
	fmtctx->pb->seekable = 0;
	//
	if((stream = ga_avformat_new_stream(fmtctx, 0,
			codecid == rtspconf->video_encoder_codec->id ?
				rtspconf->video_encoder_codec : rtspconf->audio_encoder_codec)) == NULL) {
		ga_error("Cannot create new stream (%d)\n", codecid);
		return -1;
	}
	//
	if(codecid == rtspconf->video_encoder_codec->id) {
		encoder = ga_avcodec_vencoder_init(
				stream->codec,
				rtspconf->video_encoder_codec,
				video_source_out_width(streamid),
				video_source_out_height(streamid),
				rtspconf->video_fps,
				rtspconf->vso);
	} else if(codecid == rtspconf->audio_encoder_codec->id) {
		encoder = ga_avcodec_aencoder_init(
				stream->codec,
				rtspconf->audio_encoder_codec,
				rtspconf->audio_bitrate,
				rtspconf->audio_samplerate,
				rtspconf->audio_channels,
				rtspconf->audio_codec_format,
				rtspconf->audio_codec_channel_layout);
	}
	if(encoder == NULL) {
		ga_error("Cannot init encoder\n");
		return -1;
	}
	//
	ctx->encoder[streamid] = encoder;
	ctx->stream[streamid] = stream;
	ctx->fmtctx[streamid] = fmtctx;
	// write header
	if(avformat_write_header(ctx->fmtctx[streamid], NULL) < 0) {
		ga_error("Cannot write stream id %d.\n", streamid);
		return -1;
	}
#ifdef HOLE_PUNCHING
	avio_close_dyn_buf(ctx->fmtctx[streamid]->pb, &dummybuf);
	av_free(dummybuf);
#else
	if(ctx->lower_transport[streamid] == RTSP_LOWER_TRANSPORT_TCP) {
		/*int rlen;
		rlen =*/ avio_close_dyn_buf(ctx->fmtctx[streamid]->pb, &dummybuf);
		av_free(dummybuf);
	}
#endif
	return 0;
}
Exemplo n.º 3
0
static int
per_client_init(RTSPContext *ctx) {
	int i;
	AVOutputFormat *fmt;
	//
	if((fmt = av_guess_format("rtp", NULL, NULL)) == NULL) {
		ga_error("RTP not supported.\n");
		return -1;
	}
	if((ctx->sdp_fmtctx = avformat_alloc_context()) == NULL) {
		ga_error("create avformat context failed.\n");
		return -1;
	}
	ctx->sdp_fmtctx->oformat = fmt;
	// video stream
	for(i = 0; i < video_source_channels(); i++) {
		if((ctx->sdp_vstream[i] = ga_avformat_new_stream(
			ctx->sdp_fmtctx,
			i, rtspconf->video_encoder_codec)) == NULL) {
			//
			ga_error("cannot create new video stream (%d:%d)\n",
				i, rtspconf->video_encoder_codec->id);
			return -1;
		}
		if((ctx->sdp_vencoder[i] = ga_avcodec_vencoder_init(
			ctx->sdp_vstream[i]->codec,
			rtspconf->video_encoder_codec,
			video_source_out_width(i), video_source_out_height(i),
			rtspconf->video_fps,
			rtspconf->vso)) == NULL) {
			//
			ga_error("cannot init video encoder\n");
			return -1;
		}
	}
	// audio stream
#ifdef ENABLE_AUDIO
	if((ctx->sdp_astream = ga_avformat_new_stream(
			ctx->sdp_fmtctx,
			video_source_channels(),
			rtspconf->audio_encoder_codec)) == NULL) {
		ga_error("cannot create new audio stream (%d)\n",
			rtspconf->audio_encoder_codec->id);
		return -1;
	}
	if((ctx->sdp_aencoder = ga_avcodec_aencoder_init(
			ctx->sdp_astream->codec,
			rtspconf->audio_encoder_codec,
			rtspconf->audio_bitrate,
			rtspconf->audio_samplerate,
			rtspconf->audio_channels,
			rtspconf->audio_codec_format,
			rtspconf->audio_codec_channel_layout)) == NULL) {
		ga_error("cannot init audio encoder\n");
		return -1;
	}
#endif
	if((ctx->mtu = ga_conf_readint("packet-size")) <= 0)
		ctx->mtu = RTSP_TCP_MAX_PACKET_SIZE;
	//
	return 0;
}
Exemplo n.º 4
0
/// TODO
static void *
vencoder_threadproc(void *arg) {
	// arg is pointer to source pipename
	int cid;
	pooldata_t *data = NULL;
	vsource_frame_t *frame = NULL;
	char *pipename = (char*) arg;
	pipeline *pipe = pipeline::lookup(pipename);
	struct RTSPConf *rtspconf = NULL;
	//
	long long basePts = -1LL, newpts = 0LL, pts = -1LL, ptsSync = 0LL;
	pthread_mutex_t condMutex = PTHREAD_MUTEX_INITIALIZER;
	pthread_cond_t cond = PTHREAD_COND_INITIALIZER;
	//
	int outputW, outputH;
	//
	struct timeval pkttv;
#ifdef PRINT_LATENCY
	struct timeval ptv;
#endif
	//
	int video_written = 0;
	//
	if(pipe == NULL) {
		ga_error("video encoder: invalid pipeline specified (%s).\n", pipename);
		goto video_quit;
	}
	//
	rtspconf = rtspconf_global();
	cid = ((vsource_t*) pipe->get_privdata())->channel;
	outputW = video_source_out_width(cid);
	outputH = video_source_out_height(cid);
	//
	// start encoding
	ga_error("video encoding started: tid=%ld.\n", ga_gettid());
	pipe->client_register(ga_gettid(), &cond);
	//
	while(vencoder_started != 0 && encoder_running() > 0) {
		//
		AVPacket pkt;
		unsigned char *enc;
		int encsize;
		// wait for notification
		data = pipe->load_data();
		if(data == NULL) {
			int err;
			struct timeval tv;
			struct timespec to;
			gettimeofday(&tv, NULL);
			to.tv_sec = tv.tv_sec+1;
			to.tv_nsec = tv.tv_usec * 1000;
			//
			if((err = pipe->timedwait(&cond, &condMutex, &to)) != 0) {
				ga_error("viedo encoder: image source timed out.\n");
				continue;
			}
			data = pipe->load_data();
			if(data == NULL) {
				ga_error("viedo encoder: unexpected NULL frame received (from '%s', data=%d, buf=%d).\n",
					pipe->name(), pipe->data_count(), pipe->buf_count());
				continue;
			}
		}
		frame = (vsource_frame_t*) data->ptr;
		// handle pts
		if(basePts == -1LL) {
			basePts = frame->imgpts;
			ptsSync = encoder_pts_sync(rtspconf->video_fps);
			newpts = ptsSync;
		} else {
			newpts = ptsSync + frame->imgpts - basePts;
		}
		// encode!
		gettimeofday(&pkttv, NULL);
		enc = vpu_encoder_encode(&vpu[cid], frame->imgbuf, vpu[cid].vpu_framesize, &encsize);
		//
		pipe->release_data(data);
		//
		if(enc == NULL) {
			ga_error("encoder-vpu: encode failed.\n");
			goto video_quit;
		}
		// pts must be monotonically increasing
		if(newpts > pts) {
			pts = newpts;
		} else {
			pts++;
		}
		// send packet
#ifdef SAVEFILE
		if(fout != NULL)
			fwrite(enc, sizeof(char), encsize, fout);
#endif
		pkt.data = enc;
		pkt.size = encsize;
		if(encoder_send_packet_all("video-encoder", cid, &pkt, pkt.pts, &pkttv) < 0) {
			goto video_quit;
		}
		if(video_written == 0) {
			video_written = 1;
			ga_error("first video frame written (pts=%lld)\n", pts);
		}
#ifdef PRINT_LATENCY		/* print out latency */
		gettimeofday(&ptv, NULL);
		ga_aggregated_print(0x0001, 601, tvdiff_us(&ptv, &frame->timestamp));
#endif
	}
	//
video_quit:
	if(pipe) {
		pipe->client_unregister(ga_gettid());
		pipe = NULL;
	}
	//
	ga_error("video encoder: thread terminated (tid=%ld).\n", ga_gettid());
	//
	return NULL;
}
Exemplo n.º 5
0
static int
vencoder_init(void *arg) {
	int iid;
	char *pipefmt = (char*) arg;
	struct RTSPConf *rtspconf = rtspconf_global();
	//
	if(rtspconf == NULL) {
		ga_error("video encoder: no configuration found\n");
		return -1;
	}
	if(vencoder_initialized != 0)
		return 0;
	//
	for(iid = 0; iid < video_source_channels(); iid++) {
		char pipename[64];
		int outputW, outputH;
		pipeline *pipe;
		//
		_sps[iid] = _pps[iid] = NULL;
		_spslen[iid] = _ppslen[iid] = 0;
		snprintf(pipename, sizeof(pipename), pipefmt, iid);
		outputW = video_source_out_width(iid);
		outputH = video_source_out_height(iid);
		if((pipe = pipeline::lookup(pipename)) == NULL) {
			ga_error("video encoder: pipe %s is not found\n", pipename);
			goto init_failed;
		}
		ga_error("video encoder: video source #%d from '%s' (%dx%d).\n",
			iid, pipe->name(), outputW, outputH, iid);
		vencoder[iid] = ga_avcodec_vencoder_init(NULL,
				rtspconf->video_encoder_codec,
				outputW, outputH,
				rtspconf->video_fps, rtspconf->vso);
		if(vencoder[iid] == NULL)
			goto init_failed;
#ifdef STANDALONE_SDP
		// encoders for SDP generation
		switch(rtspconf->video_encoder_codec->id) {
		case AV_CODEC_ID_H264:
		case AV_CODEC_ID_H265:
		case AV_CODEC_ID_CAVS:
		case AV_CODEC_ID_MPEG4:
			// need ctx with CODEC_FLAG_GLOBAL_HEADER flag
			avc = avcodec_alloc_context3(rtspconf->video_encoder_codec);
			if(avc == NULL)
				goto init_failed;
			avc->flags |= CODEC_FLAG_GLOBAL_HEADER;
			avc = ga_avcodec_vencoder_init(avc,
				rtspconf->video_encoder_codec,
				outputW, outputH,
				rtspconf->video_fps, rtspconf->vso);
			if(avc == NULL)
				goto init_failed;
			ga_error("video encoder: meta-encoder #%d created.\n", iid);
			break;
		default:
			// do nothing
			break;
		}
		vencoder_sdp[iid] = avc;
#endif
	}
	vencoder_initialized = 1;
	ga_error("video encoder: initialized.\n");
	return 0;
init_failed:
	vencoder_deinit(NULL);
	return -1;
}
Exemplo n.º 6
0
static void *
vencoder_threadproc(void *arg) {
	// arg is pointer to source pipename
	int iid, outputW, outputH;
	pooldata_t *data = NULL;
	vsource_frame_t *frame = NULL;
	char *pipename = (char*) arg;
	pipeline *pipe = pipeline::lookup(pipename);
	AVCodecContext *encoder = NULL;
	//
	AVFrame *pic_in = NULL;
	unsigned char *pic_in_buf = NULL;
	int pic_in_size;
	unsigned char *nalbuf = NULL, *nalbuf_a = NULL;
	int nalbuf_size = 0, nalign = 0;
	long long basePts = -1LL, newpts = 0LL, pts = -1LL, ptsSync = 0LL;
	pthread_mutex_t condMutex = PTHREAD_MUTEX_INITIALIZER;
	pthread_cond_t cond = PTHREAD_COND_INITIALIZER;
	//
	int video_written = 0;
	//
	if(pipe == NULL) {
		ga_error("video encoder: invalid pipeline specified (%s).\n", pipename);
		goto video_quit;
	}
	//
	rtspconf = rtspconf_global();
	// init variables
	iid = ((vsource_t*) pipe->get_privdata())->channel;
	encoder = vencoder[iid];
	//
	outputW = video_source_out_width(iid);
	outputH = video_source_out_height(iid);
	//
	nalbuf_size = 100000+12 * outputW * outputH;
	if(ga_malloc(nalbuf_size, (void**) &nalbuf, &nalign) < 0) {
		ga_error("video encoder: buffer allocation failed, terminated.\n");
		goto video_quit;
	}
	nalbuf_a = nalbuf + nalign;
	//
	if((pic_in = av_frame_alloc()) == NULL) {
		ga_error("video encoder: picture allocation failed, terminated.\n");
		goto video_quit;
	}
	pic_in_size = avpicture_get_size(PIX_FMT_YUV420P, outputW, outputH);
	if((pic_in_buf = (unsigned char*) av_malloc(pic_in_size)) == NULL) {
		ga_error("video encoder: picture buffer allocation failed, terminated.\n");
		goto video_quit;
	}
	avpicture_fill((AVPicture*) pic_in, pic_in_buf,
			PIX_FMT_YUV420P, outputW, outputH);
	//ga_error("video encoder: linesize = %d|%d|%d\n", pic_in->linesize[0], pic_in->linesize[1], pic_in->linesize[2]);
	// start encoding
	ga_error("video encoding started: tid=%ld %dx%d@%dfps, nalbuf_size=%d, pic_in_size=%d.\n",
		ga_gettid(),
		outputW, outputH, rtspconf->video_fps,
		nalbuf_size, pic_in_size);
	//
	pipe->client_register(ga_gettid(), &cond);
	//
	while(vencoder_started != 0 && encoder_running() > 0) {
		AVPacket pkt;
		int got_packet = 0;
		// wait for notification
		data = pipe->load_data();
		if(data == NULL) {
			int err;
			struct timeval tv;
			struct timespec to;
			gettimeofday(&tv, NULL);
			to.tv_sec = tv.tv_sec+1;
			to.tv_nsec = tv.tv_usec * 1000;
			//
			if((err = pipe->timedwait(&cond, &condMutex, &to)) != 0) {
				ga_error("viedo encoder: image source timed out.\n");
				continue;
			}
			data = pipe->load_data();
			if(data == NULL) {
				ga_error("viedo encoder: unexpected NULL frame received (from '%s', data=%d, buf=%d).\n",
					pipe->name(), pipe->data_count(), pipe->buf_count());
				continue;
			}
		}
		frame = (vsource_frame_t*) data->ptr;
		// handle pts
		if(basePts == -1LL) {
			basePts = frame->imgpts;
			ptsSync = encoder_pts_sync(rtspconf->video_fps);
			newpts = ptsSync;
		} else {
			newpts = ptsSync + frame->imgpts - basePts;
		}
		// XXX: assume always YUV420P
		if(pic_in->linesize[0] == frame->linesize[0]
		&& pic_in->linesize[1] == frame->linesize[1]
		&& pic_in->linesize[2] == frame->linesize[2]) {
			bcopy(frame->imgbuf, pic_in_buf, pic_in_size);
		} else {
			ga_error("video encoder: YUV mode failed - mismatched linesize(s) (src:%d,%d,%d; dst:%d,%d,%d)\n",
				frame->linesize[0], frame->linesize[1], frame->linesize[2],
				pic_in->linesize[0], pic_in->linesize[1], pic_in->linesize[2]);
			pipe->release_data(data);
			goto video_quit;
		}
		pipe->release_data(data);
		// pts must be monotonically increasing
		if(newpts > pts) {
			pts = newpts;
		} else {
			pts++;
		}
		// encode
		pic_in->pts = pts;
		av_init_packet(&pkt);
		pkt.data = nalbuf_a;
		pkt.size = nalbuf_size;
		if(avcodec_encode_video2(encoder, &pkt, pic_in, &got_packet) < 0) {
			ga_error("video encoder: encode failed, terminated.\n");
			goto video_quit;
		}
		if(got_packet) {
			if(pkt.pts == (int64_t) AV_NOPTS_VALUE) {
				pkt.pts = pts;
			}
			pkt.stream_index = 0;
			// send the packet
			if(encoder_send_packet_all("video-encoder",
				iid/*rtspconf->video_id*/, &pkt,
				pkt.pts, NULL) < 0) {
				goto video_quit;
			}
			// free unused side-data
			if(pkt.side_data_elems > 0) {
				int i;
				for (i = 0; i < pkt.side_data_elems; i++)
					av_free(pkt.side_data[i].data);
				av_freep(&pkt.side_data);
				pkt.side_data_elems = 0;
			}
			//
			if(video_written == 0) {
				video_written = 1;
				ga_error("first video frame written (pts=%lld)\n", pts);
			}
		}
	}
	//
video_quit:
	if(pipe) {
		pipe->client_unregister(ga_gettid());
		pipe = NULL;
	}
	//
	if(pic_in_buf)	av_free(pic_in_buf);
	if(pic_in)	av_free(pic_in);
	if(nalbuf)	free(nalbuf);
	//
	ga_error("video encoder: thread terminated (tid=%ld).\n", ga_gettid());
	//
	return NULL;
}