예제 #1
0
파일: AudioEncoder.cpp 프로젝트: sysms/ssr
bool AudioEncoder::EncodeFrame(AVFrameWrapper* frame) {

	if(frame != NULL) {
#if SSR_USE_AVFRAME_NB_SAMPLES
		assert((unsigned int) frame->GetFrame()->nb_samples == GetFrameSize());
#endif
#if SSR_USE_AVFRAME_CHANNELS
		assert(frame->GetFrame()->channels == GetCodecContext()->channels);
#endif
#if SSR_USE_AVFRAME_SAMPLE_RATE
		assert(frame->GetFrame()->sample_rate == GetCodecContext()->sample_rate);
#endif
#if SSR_USE_AVFRAME_FORMAT
		assert(frame->GetFrame()->format == GetCodecContext()->sample_fmt);
#endif
	}

#if SSR_USE_AVCODEC_SEND_RECEIVE

	// send a frame
	AVFrame *avframe = (frame == NULL)? NULL : frame->Release();
	try {
		if(avcodec_send_frame(GetCodecContext(), avframe) < 0) {
			Logger::LogError("[AudioEncoder::EncodeFrame] " + Logger::tr("Error: Sending of audio frame failed!"));
			throw LibavException();
		}
	} catch(...) {
		av_frame_free(&avframe);
		throw;
	}
	av_frame_free(&avframe);

	// try to receive a packet
	for( ; ; ) {
		std::unique_ptr<AVPacketWrapper> packet(new AVPacketWrapper());
		int res = avcodec_receive_packet(GetCodecContext(), packet->GetPacket());
		if(res == 0) { // we have a packet, send the packet to the muxer
			GetMuxer()->AddPacket(GetStream()->index, std::move(packet));
			IncrementPacketCounter();
		} else if(res == AVERROR(EAGAIN)) { // we have no packet
			return true;
		} else if(res == AVERROR_EOF) { // this is the end of the stream
			return false;
		} else {
			Logger::LogError("[AudioEncoder::EncodeFrame] " + Logger::tr("Error: Receiving of audio packet failed!"));
			throw LibavException();
		}
	}

#elif SSR_USE_AVCODEC_ENCODE_AUDIO2

	// allocate a packet
	std::unique_ptr<AVPacketWrapper> packet(new AVPacketWrapper());

	// encode the frame
	int got_packet;
	if(avcodec_encode_audio2(GetCodecContext(), packet->GetPacket(), (frame == NULL)? NULL : frame->GetFrame(), &got_packet) < 0) {
		Logger::LogError("[AudioEncoder::EncodeFrame] " + Logger::tr("Error: Encoding of audio frame failed!"));
		throw LibavException();
	}

	// do we have a packet?
	if(got_packet) {

		// send the packet to the muxer
		GetMuxer()->AddPacket(GetStream()->index, std::move(packet));
		IncrementPacketCounter();
		return true;

	} else {
		return false;
	}

#else

	// encode the frame
	short *data = (frame == NULL)? NULL : (short*) frame->GetFrame()->data[0];
	int bytes_encoded = avcodec_encode_audio(GetCodecContext(), m_temp_buffer.data(), m_temp_buffer.size(), data);
	if(bytes_encoded < 0) {
		Logger::LogError("[AudioEncoder::EncodeFrame] " + Logger::tr("Error: Encoding of audio frame failed!"));
		throw LibavException();
	}

	// do we have a packet?
	if(bytes_encoded > 0) {

		// allocate a packet
		std::unique_ptr<AVPacketWrapper> packet(new AVPacketWrapper(bytes_encoded));

		// copy the data
		memcpy(packet->GetPacket()->data, m_temp_buffer.data(), bytes_encoded);

		// set the timestamp
		// note: pts will be rescaled and stream_index will be set by Muxer
		if(GetCodecContext()->coded_frame != NULL && GetCodecContext()->coded_frame->pts != (int64_t) AV_NOPTS_VALUE)
			packet->GetPacket()->pts = GetCodecContext()->coded_frame->pts;

		// send the packet to the muxer
		GetMuxer()->AddPacket(GetStream()->index, std::move(packet));
		IncrementPacketCounter();
		return true;

	} else {
		return false;
	}

#endif

}
예제 #2
0
bool VideoEncoder::EncodeFrame(AVFrameWrapper* frame) {

	if(frame != NULL) {
#if SSR_USE_AVFRAME_WIDTH_HEIGHT
		assert(frame->GetFrame()->width == GetCodecContext()->width);
		assert(frame->GetFrame()->height == GetCodecContext()->height);
#endif
#if SSR_USE_AVFRAME_FORMAT
		assert(frame->GetFrame()->format == GetCodecContext()->pix_fmt);
#endif
#if SSR_USE_AVFRAME_SAR
		assert(frame->GetFrame()->sample_aspect_ratio.num == GetCodecContext()->sample_aspect_ratio.num);
		assert(frame->GetFrame()->sample_aspect_ratio.den == GetCodecContext()->sample_aspect_ratio.den);
#endif
	}

#if SSR_USE_AVCODEC_SEND_RECEIVE

	// send a frame
	AVFrame *avframe = (frame == NULL)? NULL : frame->Release();
	try {
		if(avcodec_send_frame(GetCodecContext(), avframe) < 0) {
			Logger::LogError("[VideoEncoder::EncodeFrame] " + Logger::tr("Error: Sending of video frame failed!"));
			throw LibavException();
		}
	} catch(...) {
		av_frame_free(&avframe);
		throw;
	}
	av_frame_free(&avframe);

	// try to receive a packet
	for( ; ; ) {
		std::unique_ptr<AVPacketWrapper> packet(new AVPacketWrapper());
		int res = avcodec_receive_packet(GetCodecContext(), packet->GetPacket());
		if(res == 0) { // we have a packet, send the packet to the muxer
			GetMuxer()->AddPacket(GetStream()->index, std::move(packet));
			IncrementPacketCounter();
		} else if(res == AVERROR(EAGAIN)) { // we have no packet
			return true;
		} else if(res == AVERROR_EOF) { // this is the end of the stream
			return false;
		} else {
			Logger::LogError("[VideoEncoder::EncodeFrame] " + Logger::tr("Error: Receiving of video packet failed!"));
			throw LibavException();
		}
	}

#elif SSR_USE_AVCODEC_ENCODE_VIDEO2

	// allocate a packet
	std::unique_ptr<AVPacketWrapper> packet(new AVPacketWrapper());

	// encode the frame
	int got_packet;
	if(avcodec_encode_video2(GetCodecContext(), packet->GetPacket(), (frame == NULL)? NULL : frame->GetFrame(), &got_packet) < 0) {
		Logger::LogError("[VideoEncoder::EncodeFrame] " + Logger::tr("Error: Encoding of video frame failed!"));
		throw LibavException();
	}

	// do we have a packet?
	if(got_packet) {

		// send the packet to the muxer
		GetMuxer()->AddPacket(GetStream()->index, std::move(packet));
		IncrementPacketCounter();
		return true;

	} else {
		return false;
	}

#else

	// encode the frame
	int bytes_encoded = avcodec_encode_video(GetCodecContext(), m_temp_buffer.data(), m_temp_buffer.size(), (frame == NULL)? NULL : frame->GetFrame());
	if(bytes_encoded < 0) {
		Logger::LogError("[VideoEncoder::EncodeFrame] " + Logger::tr("Error: Encoding of video frame failed!"));
		throw LibavException();
	}

	// do we have a packet?
	if(bytes_encoded > 0) {

		// allocate a packet
		std::unique_ptr<AVPacketWrapper> packet(new AVPacketWrapper(bytes_encoded));

		// copy the data
		memcpy(packet->GetPacket()->data, m_temp_buffer.data(), bytes_encoded);

		// set the timestamp
		// note: pts will be rescaled and stream_index will be set by Muxer
		if(GetCodecContext()->coded_frame != NULL && GetCodecContext()->coded_frame->pts != (int64_t) AV_NOPTS_VALUE)
			packet->GetPacket()->pts = GetCodecContext()->coded_frame->pts;

		// set the keyframe flag
		if(GetCodecContext()->coded_frame->key_frame)
			packet->GetPacket()->flags |= AV_PKT_FLAG_KEY;

		// send the packet to the muxer
		GetMuxer()->AddPacket(GetStream()->index, std::move(packet));
		IncrementPacketCounter();
		return true;

	} else {
		return false;
	}

#endif

}