Exemplo n.º 1
0
int encore(void *handle, int enc_opt, void *param1, void *param2)

{

    Encoder *pEnc = (Encoder *) handle;



    switch (enc_opt)

    {

    case ENC_OPT_INIT:

	return CreateEncoder((ENC_PARAM *) param1);



    case ENC_OPT_RELEASE:

	return FreeEncoder(pEnc);



    case ENC_OPT_ENCODE:

	return EncodeFrame(pEnc, 
		(ENC_FRAME *) param1, 
		(ENC_RESULT *) param2, 
		RC_MODE); 


	
   	case ENC_OPT_ENCODE_VBR:

	return EncodeFrame(pEnc, (ENC_FRAME *) param1, 
		(ENC_RESULT *) param2, 
		VBR_MODE);



	case ENC_OPT_ENCODE_EXT:

	return EncodeFrame(pEnc, (ENC_FRAME *) param1, (ENC_RESULT *) param2, EXT_MODE);


    default:

	return ENC_FAIL;

    }

}
Exemplo n.º 2
0
void MP4Encoder::EncodeStop() {
    //标记转换结束
    this->transform = false;

    int result = EncodeFrame(pCodecCtx, NULL, &avPacket);
    if (result >= 0) {
        //封装文件尾
        av_write_trailer(pFormatCtx);
        //释放内存
        if (pCodecCtx != NULL) {
            avcodec_close(pCodecCtx);
            avcodec_free_context(&pCodecCtx);
            pCodecCtx = NULL;
        }
        if (pFrame != NULL) {
            av_free(pFrame);
            pFrame = NULL;
        }
        if (pFrameBuffer != NULL) {
            av_free(pFrameBuffer);
            pFrameBuffer = NULL;
        }
        if (pFormatCtx != NULL) {
            avio_close(pFormatCtx->pb);
            avformat_free_context(pFormatCtx);
            pFormatCtx = NULL;
        }
    }
}
Exemplo n.º 3
0
bool CH264Encoder::Encode( DWORD lSampleTime, unsigned char *pData, int nSize )
{
    ++m_nFrameNum;
    m_pic.i_pts = (int64_t)m_nFrameNum * m_x264_param.i_fps_den;

    unsigned char *pDataT = pData;
    int nSrcWidth = m_uiWidth;

    uint8_t *src[3] = {pDataT, NULL, NULL};
    int srcStride[4] = {nSrcWidth * 3 + (((nSrcWidth * 3 % 4) != 0) ? (4 - nSrcWidth * 3 % 4) : 0), 0, 0, 0};
    int dstStride[4] = {m_x264_param.i_width, m_x264_param.i_width / 2, m_x264_param.i_width / 2, 0};
    assert(m_pSwsContext);
    sws_scale(m_pSwsContext, src, srcStride, 0, m_uiHeight, m_pic.img.plane, dstStride);

    if (pYUVFile)
    {
        int iYSize = m_pic.img.i_stride[0] * m_uiHeight;
        int iRet = fwrite(m_pic.img.plane[0], sizeof(uint8_t), iYSize, pYUVFile);
        int iVSize = m_pic.img.i_stride[1] * m_uiHeight / 2;
        iRet = fwrite(m_pic.img.plane[1], sizeof(uint8_t), iVSize, pYUVFile);
        int iUSize = m_pic.img.i_stride[2] * m_uiHeight / 2;
        iRet = fwrite(m_pic.img.plane[2], sizeof(uint8_t), iUSize, pYUVFile);
    }

    bool bRet = EncodeFrame(lSampleTime, m_px264, &m_pic);
    return bRet;
}
Exemplo n.º 4
0
void CTextEncoder::ProcessTextFrame (CMediaFrame *pFrame)
{
  const char *fptr = (const char *)pFrame->GetData();

  EncodeFrame(fptr);
  
  SendFrame(pFrame->GetTimestamp());
}
Exemplo n.º 5
0
static int SetFrame(const WebPConfig* const config,
                    const WebPMuxFrameInfo* const info, WebPPicture* const pic,
                    WebPMuxFrameInfo* const dst) {
  *dst = *info;
  if (!EncodeFrame(config, pic, &dst->bitstream)) {
    return 0;
  }
  return 1;
}
Exemplo n.º 6
0
// 编码
bool CVideoEncodeVt::Encode(AVFrame* apFrame, AVPacket* apPacket)
{
    if(NULL == apFrame || NULL == apPacket)
    {
        return false;
    }
    
    //编码
    int ret = EncodeFrame(apFrame, m_EncodeSession);
    if (ret < 0)
    {
        CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "EncodeFrame failed!");
        //assert(false);
        return false;
    }
    
    if(m_iFirstPts == AV_NOPTS_VALUE)
    {
        m_iFirstPts = apFrame->pts;
    }
    else if(m_iDtsDelta == AV_NOPTS_VALUE && m_bHasBFrames)
    {
        m_iDtsDelta = apFrame->pts - m_iFirstPts;
    }
    
    //取得编码后的数据
    CMSampleBufferRef sampeBuffer;
    ret = PopBuff(&sampeBuffer);
    if( ret < 0)
    {
        return false;
    }
    
    //拷贝为avpacket
    ret = CopySampleBufferToAVPakcet(sampeBuffer, apPacket);
    CFRelease(sampeBuffer);
    sampeBuffer = NULL;
    
    if(ret < 0)
    {
        return false;
    }
    
    return true;
}
Exemplo n.º 7
0
void MP4Encoder::EncodeBuffer(unsigned char *nv21Buffer) {

    uint8_t *i420_y = pFrameBuffer;
    uint8_t *i420_u = pFrameBuffer + width * height;
    uint8_t *i420_v = pFrameBuffer + width * height * 5 / 4;

    //NV21转I420
    libyuv::ConvertToI420(nv21Buffer, width * height, i420_y, height, i420_u, height / 2, i420_v,
                          height / 2, 0, 0, width, height, width, height, libyuv::kRotate270,
                          libyuv::FOURCC_NV21);

    pFrame->data[0] = i420_y;
    pFrame->data[1] = i420_u;
    pFrame->data[2] = i420_v;

    //AVFrame PTS
    pFrame->pts = index++;

    //编码数据
    EncodeFrame(pCodecCtx, pFrame, &avPacket);
}
bool VideoWriterUnit::PostProcess(list<FrameSetPtr>* append) {
  if (format_context_->streams[video_stream_->index]->codec->codec->capabilities &
      CODEC_CAP_DELAY) {
    while (true) {
      int got_frame;
      if (EncodeFrame(nullptr, &got_frame) < 0) {
        break;
      }
      if (!got_frame) {
        break;
      }
    }
  }

  // Close file.
  av_write_trailer(format_context_);

  if(!output_format_->flags & AVFMT_NOFILE) {
    avio_close(format_context_->pb);
  }

  // Free resources.
  avcodec_close(codec_context_);
  av_free(frame_encode_->data[0]);
  av_free(frame_encode_);

  av_free(frame_bgr_->data[0]);
  av_free(frame_bgr_);

  for (uint i = 0; i < format_context_->nb_streams; ++i) {
    av_freep(&format_context_->streams[i]->codec);
    av_freep(&format_context_->streams);
  }

  av_free(format_context_);

  return false;
}
void VideoWriterUnit::ProcessFrame(FrameSetPtr input, list<FrameSetPtr>* output) {
  // Write single frame.
  const VideoFrame* frame = input->at(video_stream_idx_)->AsPtr<VideoFrame>();

  // Copy video_frame to frame_bgr_.
  const uint8_t* src_data = frame->data();
  uint8_t* dst_data = frame_bgr_->data[0];

  for (int i = 0;
       i < frame_height_;
       ++i, src_data += frame->width_step(), dst_data += frame_bgr_->linesize[0]) {
   memset(dst_data, 0, LineSize());
   memcpy(dst_data, src_data, 3 * frame_width_);
  }

  // Convert bgr picture to codec.
  sws_scale(sws_context_, frame_bgr_->data, frame_bgr_->linesize, 0, frame_height_,
            frame_encode_->data, frame_encode_->linesize);
  int got_frame;
  EncodeFrame(frame_encode_, &got_frame);

  ++frame_num_;
  output->push_back(input);
}
Exemplo n.º 10
0
int CVideoEncodeVt::GetExtraData(const STRUCT_PUSH_STREAM_PARAM& aPushStreamParam, CMVideoCodecType aeCodecType,
                                 CFStringRef aProfileLevel, CFDictionaryRef aPixelBufferInfo)
{
    int ret = 0;
    AVFrame* pFrame = NULL;
    VTCompressionSessionRef session = NULL;
    CMSampleBufferRef sampleBuf = NULL;
    
    do
    {
        //创建编码器
        ret = CreateEncoder(aPushStreamParam, aeCodecType, aProfileLevel, aPixelBufferInfo, &session);
        if(ret < 0)
        {
            CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "CreateEncoder failed!");
            assert(false);
            break;
        }
        
        //创建一个avframe,并分配缓存,设置参数
        pFrame = av_frame_alloc();
        if(NULL == pFrame)
        {
            CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "av_frame_alloc failed!");
            assert(false);
            break;
        }
        
        int iWidth = aPushStreamParam.iVideoPushStreamWidth;
        int iHeight = aPushStreamParam.iVideoPushStreamHeight;
        int iYSize = iWidth * iHeight;
        int iUVSize = (iWidth / 2) * (iHeight / 2);
        
        pFrame->buf[0] = av_buffer_alloc(iYSize + iUVSize * 2);
        if(NULL == pFrame->buf[0])
        {
            CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "av_buffer_alloc failed!");
            assert(false);
            break;
        }
        pFrame->data[0] = pFrame->buf[0]->data;
        pFrame->data[1] = pFrame->buf[0]->data + iYSize;
        pFrame->data[2] = pFrame->buf[0]->data + iYSize + iUVSize;
        memset(pFrame->data[0], 0, iYSize);
        memset(pFrame->data[1], 128, iUVSize);
        memset(pFrame->data[2], 128, iUVSize);
        pFrame->linesize[0] = iWidth;
        pFrame->linesize[1] = (iWidth + 1) / 2;
        pFrame->linesize[2] = (iWidth + 1) / 2;
        pFrame->format = AV_PIX_FMT_YUV420P;
        pFrame->width = iWidth;
        pFrame->height = iHeight;
        pFrame->pts = 0;

        //编码
        ret = EncodeFrame(pFrame, session);
        if(ret < 0)
        {
            CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "EncodeFrame failed!");
            assert(false);
            break;
        }
        
        //等待编码结束
        ret = VTCompressionSessionCompleteFrames(session, kCMTimeIndefinite);
        if(0 != ret)
        {
            CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "VTCompressionSessionCompleteFrames failed!");
            break;
        }
        
        //取得编码后的数据
        ret = PopBuff(&sampleBuf);
        if(ret < 0)
        {
            CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "PopBuff failed!");
            assert(false);
            break;
        }
        
    }while(0);
    
    if(NULL != sampleBuf)
    {
        CFRelease(sampleBuf);
        sampleBuf = NULL;
    }

    if(NULL != pFrame)
    {
        av_frame_unref(pFrame);
        av_frame_free(&pFrame);
    }
    
    
    if(NULL != session)
    {
        CFRelease(session);
        session = NULL;
    }
    
    assert(0 == ret && m_pExtraData && m_iExtraDataSize > 0);
    
    return ret;
}
Exemplo n.º 11
0
void BaseEncoder::EncoderThread() {

	try {

		Logger::LogInfo("[BaseEncoder::EncoderThread] " + Logger::tr("Encoder thread started."));

		// normal encoding
		while(!m_should_stop) {

			// get a frame
			std::unique_ptr<AVFrameWrapper> frame;
			{
				SharedLock lock(&m_shared_data);
				if(!lock->m_frame_queue.empty()) {
					frame = std::move(lock->m_frame_queue.front());
					lock->m_frame_queue.pop_front();
				}
			}
			if(frame == NULL) {
				if(m_should_finish) {
					break;
				}
				usleep(20000);
				continue;
			}

			// encode the frame
			if(EncodeFrame(frame->GetFrame())) {
				SharedLock lock(&m_shared_data);
				++lock->m_total_packets;
			}

		}

		// flush the encoder
		if(!m_should_stop && (m_stream->codec->codec->capabilities & CODEC_CAP_DELAY)) {
			Logger::LogInfo("[BaseEncoder::EncoderThread] " + Logger::tr("Flushing encoder ..."));
			while(!m_should_stop) {
				if(EncodeFrame(NULL)) {
					SharedLock lock(&m_shared_data);
					++lock->m_total_packets;
				} else {
					break;
				}
			}
		}

		// tell the others that we're done
		m_is_done = true;

		Logger::LogInfo("[BaseEncoder::EncoderThread] " + Logger::tr("Encoder thread stopped."));

	} catch(const std::exception& e) {
		m_error_occurred = true;
		Logger::LogError("[BaseEncoder::EncoderThread] " + Logger::tr("Exception '%1' in encoder thread.").arg(e.what()));
	} catch(...) {
		m_error_occurred = true;
		Logger::LogError("[BaseEncoder::EncoderThread] " + Logger::tr("Unknown exception in encoder thread."));
	}

	// always end the stream, even if there was an error, otherwise the muxer will wait forever
	m_muxer->EndStream(m_stream->index);

}