Example #1
0
static int WriteVideoFrame(AVFormatContext *oc, OutputStream *ost, BYTE* src_img)
{
	int ret = 0;
	AVCodecContext *c = NULL;
	AVFrame *frame = NULL;
	int got_packet = 0;

	c = ost->st->codec;

	frame = GetVideoFrame(ost, src_img);

	if (oc->oformat->flags & AVFMT_RAWPICTURE) {
		/* a hack to avoid data copy with some raw video muxers */
		AVPacket pkt;
		av_init_packet(&pkt);

		if (!frame)
			return 1;

		pkt.flags        |= AV_PKT_FLAG_KEY;
		pkt.stream_index  = ost->st->index;
		pkt.data          = (uint8_t *)frame;
		pkt.size          = sizeof(AVPicture);

		pkt.pts = pkt.dts = frame->pts;
		av_packet_rescale_ts(&pkt, c->time_base, ost->st->time_base);

		ret = av_interleaved_write_frame(oc, &pkt);
	} else {
		AVPacket pkt = { 0 };
		av_init_packet(&pkt);

		// 映像をエンコード
		ret = avcodec_encode_video2(c, &pkt, frame, &got_packet);
		if (ret < 0) {
			fprintf(stderr, "Error encoding video frame: %s\n", MakeErrorString(ret));
			return 0;
		}

		if (got_packet) {
			ret = WriteFrame(oc, &c->time_base, ost->st, &pkt);
		} else {
			ret = 0;
		}
	}

	if (ret < 0) {
		fprintf(stderr, "Error while writing video frame: %s\n", MakeErrorString(ret));
		return 0;
	}

	return (frame || got_packet) ? 0 : 1;
}
Example #2
0
int main(int argc, char **argv)
{
	int ret = -1;
	uint8_t *data = NULL;
	unsigned long size = 0;
	int times = 0;
	int width = 0;
	int height = 0;
	char *dest = NULL;
	unsigned long dest_size = 0;
	long long pts = 0;
	long long dts = 0;
	AUDIOPACKET ap[30] = {0};
	int ap_len = 0;
	int i = 0;
	CAPTURECONFIG captureConfig;
	PCAPTURECONFIG pCaptureConfig = &captureConfig;
	ENCODECONFIG encodeConfig;
	PENCODECONFIG pEncodeConfig = &encodeConfig;
	PENCODER pEncoder;
	PCAPTURE pCapture;
	DWORD start_time, end_time;
	pCaptureConfig->fps = 5;
	pCaptureConfig->channels = 2;
	pCaptureConfig->bits_per_sample = 16;
	pCaptureConfig->samples_per_sec = 48000;
	pCaptureConfig->avg_bytes_per_sec = 48000;

	pEncodeConfig->fps = 5;
	pEncodeConfig->width = 1366;
	pEncodeConfig->height = 768;
	pEncodeConfig->bit_rate = 400000;
	pEncodeConfig->channels = 2;
	pEncodeConfig->bits_per_sample = 16;
	pEncodeConfig->sample_rate = 48000;
	pEncodeConfig->avg_bytes_per_sec = 48000;
	pEncodeConfig->record = 1;

	memcpy(pEncodeConfig->record_file, "D:\\desktop_live.mp4", 20);

	InitLog(LOG_DEBUG, OUT_FILE);

	pCapture = InitCapture(pCaptureConfig);
	if (NULL == pCapture)
	{
		printf("init capture failed\n");
		return -1;
	}

	pEncoder = InitEncoder(pEncodeConfig);
	if (NULL == pEncoder)
	{
		printf("init encoder failed\n");
		return -1;
	}

	ret = StartCapture(pCapture);
	if (SECCESS != ret)
	{
		printf("start capture failed\n");
		return -1;
	}

	start_time = end_time = timeGetTime();
	while(10*1000 > (end_time - start_time))
	{
		if (SECCESS == GetVideoFrame(pCapture, &data, &size, &width, &height))
		{
			ret = EncodeVideo(pEncoder, data, width, height, &dest, &dest_size, &pts, &dts);
			if (ret == SECCESS)
			{
				free(dest);
			}

			times++;
			printf("video data size = %d\n", size);
			free(data);
		}

		if (SECCESS == GetAudioFrame(pCapture, &data, &size))
		{
			ap_len = 0;
			ret = EncodeAudio(pEncoder, data, size, ap, &ap_len);
			if (ret == SECCESS)
			{
				for (i=0; i<ap_len; i++)
				{
					free(ap[i].data);
				}
			}

			printf("audio data size = %d\n", size);
			free(data);
		}

		end_time = timeGetTime();
	}

	StopCapture(pCapture);
	FreeCapture(pCapture);

	FflushEncoder(pEncoder);
	FreeEncoder(pEncoder);

	FreeLog();
	_CrtDumpMemoryLeaks();
	return 0;
}