Ejemplo n.º 1
0
int dc_audio_encoder_read(AudioOutputFile * p_aout, AudioInputData * p_aind) {

	int ret;
	AudioDataNode * p_adn;

	ret = dc_consumer_lock(&p_aout->acon, &p_aind->p_cb);

	if (ret < 0) {
#ifdef DEBUG
		printf("Audio encoder got to end of buffer!\n");
#endif

		return -2;
	}

	dc_consumer_unlock_previous(&p_aout->acon, &p_aind->p_cb);

	p_adn = (AudioDataNode *) dc_consumer_consume(&p_aout->acon, &p_aind->p_cb);

	/* Write audio sample on fifo */
//	av_fifo_generic_write(p_aout->p_fifo, p_adn->p_aframe->data[0],
//			p_adn->p_aframe->linesize[0], NULL);
	av_fifo_generic_write(p_aout->p_fifo, p_adn->p_abuf, p_adn->i_abuf_size,
			NULL);

	dc_consumer_advance(&p_aout->acon);

	return 0;
}
Ejemplo n.º 2
0
int dc_audio_encoder_read(AudioOutputFile *audio_output_file, AudioInputData *audio_input_data)
{
	int ret;
	AudioDataNode *audio_data_node;

	ret = dc_consumer_lock(&audio_output_file->consumer, &audio_input_data->circular_buf);
	if (ret < 0) {
		GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Audio encoder got an end of buffer!\n"));
		return -2;
	}

	dc_consumer_unlock_previous(&audio_output_file->consumer, &audio_input_data->circular_buf);

	audio_data_node = (AudioDataNode *) dc_consumer_consume(&audio_output_file->consumer, &audio_input_data->circular_buf);
#ifndef GPAC_USE_LIBAV
	audio_output_file->aframe->channels = audio_output_file->codec_ctx->channels;
#endif
#ifndef LIBAV_FRAME_OLD
	audio_output_file->aframe->channel_layout = audio_output_file->codec_ctx->channel_layout;
	audio_output_file->aframe->sample_rate = audio_output_file->codec_ctx->sample_rate;
#endif
	audio_output_file->aframe->format = audio_output_file->codec_ctx->sample_fmt;

	/* Write audio sample on fifo */
	av_fifo_generic_write(audio_output_file->fifo, audio_data_node->abuf, audio_data_node->abuf_size, NULL);

	dc_consumer_advance(&audio_output_file->consumer);

	return 0;
}
Ejemplo n.º 3
0
int dc_video_encoder_encode(VideoOutputFile * p_voutf, VideoScaledData * p_vsd) {

	//AVPacket pkt;
	VideoDataNode * p_vn;
	int ret;
	//int i_out_size;

//	AVStream * p_video_stream = p_voutf->p_fmt->streams[p_voutf->i_vstream_idx];
//	AVCodecContext * p_video_codec_ctx = p_video_stream->codec;
	AVCodecContext * p_video_codec_ctx = p_voutf->p_codec_ctx;


	ret = dc_consumer_lock(&p_voutf->vcon, &p_vsd->p_cb);

	if (ret < 0) {
#ifdef DEBUG
		printf("Video encoder got to end of buffer!\n");
#endif

		return -2;
	}

	dc_consumer_unlock_previous(&p_voutf->vcon, &p_vsd->p_cb);

	p_vn = (VideoDataNode *) dc_consumer_consume(&p_voutf->vcon, &p_vsd->p_cb);

	/*
	 * Set PTS (method 1)
	 */
	p_vn->p_vframe->pts = p_video_codec_ctx->frame_number;

	/*
	 * Set PTS (method 2)
	 * int64_t now = av_gettime();
	 * p_vn->p_vframe->pts = av_rescale_q(now, AV_TIME_BASE_Q,
	 *		p_video_codec_ctx->time_base);
	 */

	/* Encoding video */
	p_voutf->i_encoded_frame_size = avcodec_encode_video(p_video_codec_ctx,
			p_voutf->p_vbuf, p_voutf->i_vbuf_size, p_vn->p_vframe);

	dc_consumer_advance(&p_voutf->vcon);

	if (p_voutf->i_encoded_frame_size < 0) {
		fprintf(stderr, "Error occured while encoding video frame.\n");
		return -1;
	}
	/* if zero size, it means the image was buffered */
//	if (i_out_size > 0) {
//
//		av_init_packet(&pkt);
//
//		if (p_video_codec_ctx->coded_frame->pts != AV_NOPTS_VALUE) {
//			pkt.pts = av_rescale_q(p_video_codec_ctx->coded_frame->pts,
//					p_video_codec_ctx->time_base, p_video_stream->time_base);
//		}
//
//
//		if (p_video_codec_ctx->coded_frame->key_frame)
//			pkt.flags |= AV_PKT_FLAG_KEY;
//
//		pkt.stream_index = p_video_stream->index;
//		pkt.data = p_voutf->p_vbuf;
//		pkt.size = i_out_size;
//
//		// write the compressed frame in the media file
//		if (av_interleaved_write_frame(p_voutf->p_fmt, &pkt)
//				!= 0) {
//			fprintf(stderr, "Writing frame is not successful\n");
//			return -1;
//		}
//
//		av_free_packet(&pkt);
//
//	}

	return p_voutf->i_encoded_frame_size;
}
Ejemplo n.º 4
0
int dc_video_encoder_encode(VideoOutputFile *video_output_file, VideoScaledData *video_scaled_data)
{
	//AVPacket pkt;
	VideoDataNode *video_data_node;
	int ret;
	//int out_size;

//	AVStream *video_stream = video_output_file->av_fmt_ctx->streams[video_output_file->vstream_idx];
//	AVCodecContext *video_codec_ctx = video_stream->codec;
	AVCodecContext *video_codec_ctx = video_output_file->codec_ctx;

	//FIXME: deadlock when pressing 'q' with BigBuckBunny_640x360.m4v
	ret = dc_consumer_lock(&video_output_file->consumer, &video_scaled_data->circular_buf);
	if (ret < 0) {
		GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Video encoder got an end of buffer!\n"));
		return -2;
	}

	if (video_scaled_data->circular_buf.size > 1)
		dc_consumer_unlock_previous(&video_output_file->consumer, &video_scaled_data->circular_buf);

	video_data_node = (VideoDataNode*)dc_consumer_consume(&video_output_file->consumer, &video_scaled_data->circular_buf);

	/*
	 * Set PTS (method 1)
	 */
	if (!video_output_file->use_source_timing) {
		video_data_node->vframe->pts = video_codec_ctx->frame_number;
	}	
	
	/* Encoding video */
	{
		int got_packet = 0;
		AVPacket pkt;
		av_init_packet(&pkt);
		pkt.data = video_output_file->vbuf;
		pkt.size = video_output_file->vbuf_size;
		pkt.pts = pkt.dts = video_data_node->vframe->pkt_dts = video_data_node->vframe->pkt_pts = video_data_node->vframe->pts;
#ifdef GPAC_USE_LIBAV
		video_output_file->encoded_frame_size = avcodec_encode_video(video_codec_ctx, video_output_file->vbuf, video_output_file->vbuf_size, video_data_node->vframe);
		got_packet = video_output_file->encoded_frame_size>=0 ? 1 : 0;
#else
		video_output_file->encoded_frame_size = avcodec_encode_video2(video_codec_ctx, &pkt, video_data_node->vframe, &got_packet);
		//this is not true with libav !
		if (video_output_file->encoded_frame_size >= 0)
			video_output_file->encoded_frame_size = pkt.size;
#endif
		if (video_output_file->encoded_frame_size >= 0) {
			if (got_packet) {
				video_codec_ctx->coded_frame->pts = video_codec_ctx->coded_frame->pkt_pts = pkt.pts;
				video_codec_ctx->coded_frame->pkt_dts = pkt.dts;
				video_codec_ctx->coded_frame->key_frame = (pkt.flags & AV_PKT_FLAG_KEY) ? 1 : 0;
			}
		}
	}

	dc_consumer_advance(&video_output_file->consumer);

	if (video_scaled_data->circular_buf.size == 1)
		dc_consumer_unlock_previous(&video_output_file->consumer, &video_scaled_data->circular_buf);

	if (video_output_file->encoded_frame_size < 0) {
		GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Error occured while encoding video frame.\n"));
		return -1;
	}

	GF_LOG(GF_LOG_INFO, GF_LOG_DASH, ("[DashCast] Video %s Frame TS "LLU" encoded at UTC "LLU" ms\n", video_output_file->rep_id, /*video_data_node->source_number, */video_data_node->vframe->pts, gf_net_get_utc() ));

	/* if zero size, it means the image was buffered */
//	if (out_size > 0) {
//
//		av_init_packet(&pkt);
//		pkt.data = NULL;
//		pkt.size = 0;
//
//		if (video_codec_ctx->coded_frame->pts != AV_NOPTS_VALUE) {
//			pkt.pts = av_rescale_q(video_codec_ctx->coded_frame->pts,
//					video_codec_ctx->time_base, video_stream->time_base);
//		}
//
//
//		if (video_codec_ctx->coded_frame->key_frame)
//			pkt.flags |= AV_PKT_FLAG_KEY;
//
//		pkt.stream_index = video_stream->index;
//		pkt.data = video_output_file->vbuf;
//		pkt.size = out_size;
//
//		// write the compressed frame in the media file
//		if (av_interleaved_write_frame(video_output_file->av_fmt_ctx, &pkt)
//				!= 0) {
//			GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Writing frame is not successful\n"));
//			return -1;
//		}
//
//		av_free_packet(&pkt);
//
//	}

	return video_output_file->encoded_frame_size;
}
Ejemplo n.º 5
0
int dc_video_scaler_scale(VideoInputData *video_input_data, VideoScaledData *video_scaled_data)
{
	int ret, index, src_height;
	VideoDataNode *video_data_node;
	VideoScaledDataNode *video_scaled_data_node;
	AVFrame *src_vframe;

	//step 1: try to lock output slot. If none available, return ....
	if (video_input_data->circular_buf.size > 1)
		dc_consumer_unlock_previous(&video_scaled_data->consumer, &video_input_data->circular_buf);

	ret = dc_producer_lock(&video_scaled_data->producer, &video_scaled_data->circular_buf);
	//not ready
	if (ret<0) {
		return -1;
	}
	dc_producer_unlock_previous(&video_scaled_data->producer, &video_scaled_data->circular_buf);

	//step 2: lock input
	ret = dc_consumer_lock(&video_scaled_data->consumer, &video_input_data->circular_buf);
	if (ret < 0) {
		GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Video scaler got an end of input tbuffer!\n"));
		return -2;
	}

	//step 3 - grab source and dest images
	video_data_node = (VideoDataNode*)dc_consumer_consume(&video_scaled_data->consumer, &video_input_data->circular_buf);
	video_scaled_data_node = (VideoScaledDataNode*)dc_producer_produce(&video_scaled_data->producer, &video_scaled_data->circular_buf);
	index = video_data_node->source_number;

	video_scaled_data->frame_duration = video_input_data->frame_duration;

	//crop if necessary
	if (video_input_data->vprop[index].crop_x || video_input_data->vprop[index].crop_y) {
#if 0
		av_frame_copy_props(video_scaled_data_node->cropped_frame, video_data_node->vframe);
		video_scaled_data_node->cropped_frame->width  = video_input_data->vprop[index].width  - video_input_data->vprop[index].crop_x;
		video_scaled_data_node->cropped_frame->height = video_input_data->vprop[index].height - video_input_data->vprop[index].crop_y;
#endif
		if (av_picture_crop((AVPicture*)video_scaled_data_node->cropped_frame, (AVPicture*)video_data_node->vframe, PIX_FMT_YUV420P, video_input_data->vprop[index].crop_y, video_input_data->vprop[index].crop_x) < 0) {
			GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Video scaler: error while cropping picture.\n"));
			return -1;
		}
		src_vframe = video_scaled_data_node->cropped_frame;
		src_height = video_input_data->vprop[index].height - video_input_data->vprop[index].crop_y;
	} else {
		assert(!video_scaled_data_node->cropped_frame);
		src_vframe = video_data_node->vframe;
		src_height = video_input_data->vprop[index].height;
	}


	//rescale the cropped frame
	ret = sws_scale(video_scaled_data->vsprop[index].sws_ctx,
	                (const uint8_t * const *)src_vframe->data, src_vframe->linesize, 0, src_height,
	                video_scaled_data_node->v_frame->data, video_scaled_data_node->v_frame->linesize);

	if (!ret) {
		GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Video scaler: error while resizing picture.\n"));
		return -1;
	}
	video_scaled_data_node->v_frame->pts = video_data_node->vframe->pts;

	if (video_data_node->nb_raw_frames_ref) {
		if (video_data_node->nb_raw_frames_ref==1) {
#ifndef GPAC_USE_LIBAV
			av_frame_unref(video_data_node->vframe);
#endif
			av_free_packet(&video_data_node->raw_packet);
		}
		video_data_node->nb_raw_frames_ref--;
	}

	dc_consumer_advance(&video_scaled_data->consumer);
	dc_producer_advance(&video_scaled_data->producer, &video_scaled_data->circular_buf);

	if (video_input_data->circular_buf.size == 1)
		dc_consumer_unlock_previous(&video_scaled_data->consumer, &video_input_data->circular_buf);
	return 0;
}