예제 #1
0
SoftAVC::SoftAVC(
        const char *name,
        const OMX_CALLBACKTYPE *callbacks,
        OMX_PTR appData,
        OMX_COMPONENTTYPE **component)
    : SimpleSoftOMXComponent(name, callbacks, appData, component),
      mHandle(NULL),
      mInputBufferCount(0),
      mWidth(320),
      mHeight(240),
      mPictureSize(mWidth * mHeight * 3 / 2),
      mCropLeft(0),
      mCropTop(0),
      mCropWidth(mWidth),
      mCropHeight(mHeight),
      mFirstPicture(NULL),
      mFirstPictureId(-1),
      mPicId(0),
      mHeadersDecoded(false),
      mEOSStatus(INPUT_DATA_AVAILABLE),
      mOutputPortSettingsChange(NONE),
      mSignalledError(false) {
    initPorts();
    CHECK_EQ(initDecoder(), (status_t)OK);
}
SoftMPEG4::SoftMPEG4(
        const char *name,
        const char *componentRole,
        OMX_VIDEO_CODINGTYPE codingType,
        const CodecProfileLevel *profileLevels,
        size_t numProfileLevels,
        const OMX_CALLBACKTYPE *callbacks,
        OMX_PTR appData,
        OMX_COMPONENTTYPE **component)
    : SoftVideoDecoderOMXComponent(
            name, componentRole, codingType, profileLevels, numProfileLevels,
            352 /* width */, 288 /* height */, callbacks, appData, component),
      mMode(codingType == OMX_VIDEO_CodingH263 ? MODE_H263 : MODE_MPEG4),
      mHandle(new tagvideoDecControls),
      mInputBufferCount(0),
      mSignalledError(false),
      mInitialized(false),
      mFramesConfigured(false),
      mNumSamplesOutput(0),
      mPvTime(0) {
    initPorts(
            kNumInputBuffers,
            8192 /* inputBufferSize */,
            kNumOutputBuffers,
            (mMode == MODE_MPEG4)
            ? MEDIA_MIMETYPE_VIDEO_MPEG4 : MEDIA_MIMETYPE_VIDEO_H263);
    CHECK_EQ(initDecoder(), (status_t)OK);
}
예제 #3
0
STDMETHODIMP_(LRESULT) Tffvfw::decQuery(BITMAPINFO *lpbiInput,BITMAPINFO *lpbiOutput)
{
    if (initDec()) {
        if (lpbiInput==NULL) {
            return ICERR_ERROR;
        }
        CodecID codecId;
        autoptr<TvideoCodecDec> dec=initDecoder(lpbiInput,&codecId);
        if (codecId==CODEC_ID_NONE) {
            return ICERR_UNSUPPORTED;
        }
        if (lpbiOutput!=NULL) {
            if (!dec) {
                return ICERR_UNSUPPORTED;
            }
            dec->forceOutputColorspace(&lpbiInput->bmiHeader,&autoforcedilace,autoforcedcolorspaces);
            const BITMAPINFOHEADER *outhdr=&lpbiOutput->bmiHeader;
            char_t pomS[60];
            DPRINTF(_l("Tffvfw::decQuery: %s"),fourcc2str(hdr2fourcc(outhdr,NULL),pomS,60));
            if (lpbiInput->bmiHeader.biWidth!=outhdr->biWidth || abs(lpbiInput->bmiHeader.biHeight)!=abs(outhdr->biHeight) || getBMPcolorspace(outhdr,autoforcedcolorspaces.decGetForcedCsp(decVFW))==FF_CSP_NULL) {
                return ICERR_BADFORMAT;
            }
        }
        return ICERR_OK;
    } else {
        return VFW_E_RUNTIME_ERROR;
    }
}
예제 #4
0
// -----------------------------------------------------------------------------
// Default constructor
// -----------------------------------------------------------------------------
TTMpeg2Decoder::TTMpeg2Decoder()
{
  mpeg2Stream        = NULL;
  mpeg2FileName      = " ";
  mpeg2Decoder       = NULL;
  decoderBuffer      = NULL;
  decoderBufferSize  = 0;
  streamBuffer       = NULL;
  streamBufferSize   = 0;
  mpeg2StreamOK      = false;
  isDecoder          = false;
  streamEndReached   = false;
  currentStreamFrame = 0;
  currentFrameIndex  = 0;
  currentStreamPos   = 0;
  iSkipFrames        = 1;
  sliceData          = NULL;
  isIndexSeek        = false;
  videoIndexList     = NULL;
  videoHeaderList    = NULL;
  t_frame_info       = NULL;

  // initialize mpeg-2 decoder objekt
  initDecoder();
}
예제 #5
0
SoftAVC::SoftAVC(
        const char *name,
        const OMX_CALLBACKTYPE *callbacks,
        OMX_PTR appData,
        OMX_COMPONENTTYPE **component)
    : SoftVideoDecoderOMXComponent(
            name, componentName, codingType,
            kProfileLevels, ARRAY_SIZE(kProfileLevels),
            320 /* width */, 240 /* height */, callbacks,
            appData, component),
      mCodecCtx(NULL),
      mMemRecords(NULL),
      mFlushOutBuffer(NULL),
      mOmxColorFormat(OMX_COLOR_FormatYUV420Planar),
      mIvColorFormat(IV_YUV_420P),
      mNewWidth(mWidth),
      mNewHeight(mHeight),
      mNewLevel(0),
      mChangingResolution(false),
      mSignalledError(false) {
    initPorts(
            kNumBuffers, INPUT_BUF_SIZE, kNumBuffers, CODEC_MIME_TYPE);

    GETTIME(&mTimeStart, NULL);

    // If input dump is enabled, then open create an empty file
    GENERATE_FILE_NAMES();
    CREATE_DUMP_FILE(mInFile);

    CHECK_EQ(initDecoder(mWidth, mHeight), (status_t)OK);
}
SoftFFmpegVideo::SoftFFmpegVideo(
        const char *name,
        const OMX_CALLBACKTYPE *callbacks,
        OMX_PTR appData,
        OMX_COMPONENTTYPE **component)
    : SimpleSoftOMXComponent(name, callbacks, appData, component),
      mMode(MODE_H264),
      mCtx(NULL),
      mImgConvertCtx(NULL),
      mExtradataReady(false),
      mIgnoreExtradata(false),
      mSignalledError(false),
      mWidth(320),
      mHeight(240),
      mStride(320),
      mOutputPortSettingsChange(NONE) {
    if (!strcmp(name, "OMX.ffmpeg.mpeg4.decoder")) {
        mMode = MODE_MPEG4;
    } else if (!strcmp(name, "OMX.ffmpeg.mpeg2v.decoder")) {
        mMode = MODE_MPEG2;
    } else if (!strcmp(name, "OMX.ffmpeg.h263.decoder")) {
        mMode = MODE_H263;
    } else if (!strcmp(name, "OMX.ffmpeg.vc1.decoder")) {
        mMode = MODE_VC1;
    } else {
        CHECK(!strcmp(name, "OMX.ffmpeg.h264.decoder"));
        //mIgnoreExtradata = true;
    }

    LOGV("SoftFFmpegVideo component: %s", name);

    initPorts();
    CHECK_EQ(initDecoder(), (status_t)OK);
}
SoftMPEG4::SoftMPEG4(
        const char *name,
        const OMX_CALLBACKTYPE *callbacks,
        OMX_PTR appData,
        OMX_COMPONENTTYPE **component)
    : SimpleSoftOMXComponent(name, callbacks, appData, component),
      mMode(MODE_MPEG4),
      mHandle(new tagvideoDecControls),
      mInputBufferCount(0),
      mWidth(352),
      mHeight(288),
      mCropLeft(0),
      mCropTop(0),
      mCropRight(mWidth - 1),
      mCropBottom(mHeight - 1),
      mSignalledError(false),
      mInitialized(false),
      mFramesConfigured(false),
      mNumSamplesOutput(0),
      mOutputPortSettingsChange(NONE) {
    if (!strcmp(name, "OMX.google.h263.decoder")) {
        mMode = MODE_H263;
    } else {
        CHECK(!strcmp(name, "OMX.google.mpeg4.decoder"));
    }

    initPorts();
    CHECK_EQ(initDecoder(), (status_t)OK);
}
SPRDMP3Decoder::SPRDMP3Decoder(
    const char *name,
    const OMX_CALLBACKTYPE *callbacks,
    OMX_PTR appData,
    OMX_COMPONENTTYPE **component)
    : SprdSimpleOMXComponent(name, callbacks, appData, component),
      mNumChannels(2),
      mSamplingRate(44100),
      mBitRate(0),
      mNextMdBegin(0),
      mPreFilledLen(0),
      mMaxFrameBuf(NULL),
      mLastInTimeUs(0),
      mAnchorTimeUs(0),
      mNumFramesOutput(0),
      mEOSFlag(false),
      mSignalledError(false),
      mLibHandle(NULL),
      mOutputPortSettingsChange(NONE),
      mMP3_ARM_DEC_Construct(NULL),
      mMP3_ARM_DEC_Deconstruct(NULL),
      mMP3_ARM_DEC_InitDecoder(NULL),
      mMP3_ARM_DEC_DecodeFrame(NULL) {
    bool ret = false;
    ret = openDecoder("libomx_mp3dec_sprd.so");
    CHECK_EQ(ret, true);
    initPorts();
    initDecoder();
}
예제 #9
0
파일: XzDecoder.cpp 프로젝트: gt1/libmaus2
libmaus2::lz::XzDecoder::XzDecoder(std::istream & rin, size_t const inbufsize, size_t const outbufsize)
: in(rin), Ainbuf(inbufsize), Aoutbuf(outbufsize), pa(Aoutbuf.end()), pc(Aoutbuf.end()), pe(Aoutbuf.end())
	#if defined(LIBMAUS2_HAVE_LZMA)
	, lstr(LZMA_STREAM_INIT), lastret(LZMA_STREAM_END)
	#endif
{
	initDecoder();
}
예제 #10
0
/*
  * initialize mNumSamplesOutput and mAnchorTimeUs, 
  * used to compute output timestamps.
  ************************************
  *      
  *ActionsCode(author:jinsongxue, change_code)
  */
SoftRaw::SoftRaw(
        const char *name,
        const OMX_CALLBACKTYPE *callbacks,
        OMX_PTR appData,
        OMX_COMPONENTTYPE **component)
    : SimpleSoftOMXComponent(name, callbacks, appData, component),
      mSignalledError(false),      
      mChannelCount(2),
      mSampleRate(44100) {
    initPorts();
    CHECK_EQ(initDecoder(), (status_t)OK);
}
예제 #11
0
status_t SoftMPEG2::reInitDecoder() {
    status_t ret;

    deInitDecoder();

    ret = initDecoder();
    if (OK != ret) {
        ALOGE("Create failure");
        deInitDecoder();
        return NO_MEMORY;
    }
    return OK;
}
예제 #12
0
status_t SoftAVC::reInitDecoder(uint32_t width, uint32_t height) {
    status_t ret;

    deInitDecoder();

    ret = initDecoder(width, height);
    if (OK != ret) {
        ALOGE("Create failure");
        deInitDecoder();
        return NO_MEMORY;
    }
    return OK;
}
예제 #13
0
파일: codecMTF.cpp 프로젝트: Hexta/JAA
void Codec_MTF::decode_MTF(DataBlock* inData) {
    initDecoder(inData);

    buffer.reserve(decodedDataSize);
    buffer.resize(encodedDataSize);

    init_mtf(256);

    for (unsigned int i = 0; i < encodedDataSize; ++i)
        buffer[i] = get_mtf_c(data[i]);

    inData->setBlock(buffer.data());
}
STE_SoftMP3_Dec::STE_SoftMP3_Dec(
        const char *name,
        const OMX_CALLBACKTYPE *callbacks,
        OMX_PTR appData,
        OMX_COMPONENTTYPE **component)
    : SimpleSoftOMXComponent(name, callbacks, appData, component),
      mDecoderBuf(NULL),
      mAnchorTimeUs(0),
      mNumFramesOutput(0),
      mNumChannels(2),
      mSamplingRate(44100),
      mSignalledError(false),
      mFirstTime(1),
#ifdef PARTIAL_FRAME_HANDLING
      bEOSReceived(false),
      pAudioModuleItf(new AUDIO_MODULE_INTERFACE_T),
#else
      mEnable_silenceinsertion(false),
      mPrevTimeStamp(0LL),
      mNewTimeStamp(0LL),
      mExpectedTimeStamp(0LL),
      mDeltaTimeStamp(0LL),
      mInitialTimeStamp(0LL),
      mFramesTobeInserted(0),
      mFrameDuration(0),
      moutputFrameSize(0),
      mfirst_frame(0),
#endif
      mp3_itf(new CODEC_INTERFACE_T),
      mDLHandle(NULL),
      bSeekDone(false),
      mOutputPortSettingsChange(NONE) {

    mDLHandle = dlopen(STE_MP3_DECODER_LIBRARY, RTLD_NOW);

    if (mDLHandle == NULL) {
        ALOGE("Could not open the libstagefright_ste_mp3decode library.\n");
        return;
    }

    mp3_decode_init_malloc = (mp3_decode_init_malloc_t)dlsym(mDLHandle, "mp3_decode_init_malloc");
    mp3_close              = (mp3_close_t)dlsym(mDLHandle, "mp3_close");
    mp3_reset              = (mp3_reset_t)dlsym(mDLHandle, "mp3_reset");
    mp3_decode_frame       = (mp3_decode_frame_t)dlsym(mDLHandle, "mp3_decode_frame");
    mp3_2_5_parse_header   = (mp3_2_5_parse_header_t)dlsym(mDLHandle, "mp3_2_5_parse_header");

    initPorts();
    initDecoder();
    ALOGI("Successfully allocated ST-Ericsson '%s' decoder through the " \
         "SoftOMXPlugin interface", this->name());
}
SoftVorbis::SoftVorbis(
        const char *name,
        const OMX_CALLBACKTYPE *callbacks,
        OMX_PTR appData,
        OMX_COMPONENTTYPE **component)
    : SimpleSoftOMXComponent(name, callbacks, appData, component),
      mInputBufferCount(0),
      mState(NULL),
      mVi(NULL),
      mAnchorTimeUs(0),
      mNumFramesOutput(0),
      mNumFramesLeftOnPage(-1),
      mOutputPortSettingsChange(NONE) {
    initPorts();
    CHECK_EQ(initDecoder(), (status_t)OK);
}
예제 #16
0
SoftMP3::SoftMP3(
        const char *name,
        const OMX_CALLBACKTYPE *callbacks,
        OMX_PTR appData,
        OMX_COMPONENTTYPE **component)
    : SimpleSoftOMXComponent(name, callbacks, appData, component),
      mConfig(new tPVMP3DecoderExternal),
      mDecoderBuf(NULL),
      mAnchorTimeUs(0),
      mNumFramesOutput(0),
      mNumChannels(2),
      mSamplingRate(44100),
      mSignalledError(false),
      mOutputPortSettingsChange(NONE) {
    initPorts();
    initDecoder();
}
예제 #17
0
SoftOpus::SoftOpus(
        const char *name,
        const OMX_CALLBACKTYPE *callbacks,
        OMX_PTR appData,
        OMX_COMPONENTTYPE **component)
    : SimpleSoftOMXComponent(name, callbacks, appData, component),
      mInputBufferCount(0),
      mDecoder(NULL),
      mHeader(NULL),
      mCodecDelay(0),
      mSeekPreRoll(0),
      mAnchorTimeUs(0),
      mNumFramesOutput(0),
      mHaveEOS(false),
      mOutputPortSettingsChange(NONE) {
    initPorts();
    CHECK_EQ(initDecoder(), (status_t)OK);
}
예제 #18
0
void NSDecoder::setBuffer(byte* buffer_){ 
	Decoder::setBuffer(buffer_);
	if (buffer==NULL) throw new CodingException("NSDecoder: Error, buffer passed in is NULL");
	numBytesPtr=(unsigned int*) buffer_;
	startPosPtr=(unsigned int*) (buffer_+sizeof(int));
	//Log::writeToLog("NSDecoder", 1, "setBuffer(): buffer has numBytes=", *numBytesPtr);
	//Log::writeToLog("NSDecoder", 1, "setBuffer(): buffer has startPos=", *startPosPtr);
	currLoc=0;
	if (reader==NULL) {
		reader=new ByteReader(buffer_+2*sizeof(int), *numBytesPtr*8);
	}
	else {
		reader->setBuffer(buffer_+2*sizeof(int), *numBytesPtr*8);
	}
	
	valsInBuffer=false;
	initDecoder();
}
예제 #19
0
파일: Game.cpp 프로젝트: abbychau/BEATMAX
void Game::initialise (int argc, char *argv[]) {
   // Initialise libraries
   screen = NULL;
   errorLoadingLibraries = false;
   running = false;
   paused = false;
   initTime = SDL_GetTicks ();

   try {
      initGUI ();
      initGL ();
      resizeWindow (screen->w, screen->h);
      initAL ();
      initDecoder ();

   } catch (const char *message) {
      errorLoadingLibraries = true;
      std::cerr << message << std::endl;
   }
}
예제 #20
0
SoftHEVC::SoftHEVC(
        const char *name,
        const OMX_CALLBACKTYPE *callbacks,
        OMX_PTR appData,
        OMX_COMPONENTTYPE **component)
    : SoftVideoDecoderOMXComponent(name, componentName, codingType,
            kProfileLevels, ARRAY_SIZE(kProfileLevels),
            320 /* width */, 240 /* height */, callbacks,
            appData, component),
      mMemRecords(NULL),
      mFlushOutBuffer(NULL),
      mOmxColorFormat(OMX_COLOR_FormatYUV420Planar),
      mIvColorFormat(IV_YUV_420P),
      mNewWidth(mWidth),
      mNewHeight(mHeight),
      mChangingResolution(false) {
    initPorts(kNumBuffers, INPUT_BUF_SIZE, kNumBuffers,
            CODEC_MIME_TYPE);
    CHECK_EQ(initDecoder(), (status_t)OK);
}
예제 #21
0
VideoStreamDecoder::VideoStreamDecoder(int bufSize, int fps, PixelFormat pixelFormat) {
	this->bufSize = bufSize;
	this->fps = fps;
	this->openCVPixelFormat = pixelFormat;
	
	initialized = false;

	openCVFrame = NULL;

	if (bufSize <= 1) {
		fprintf(stderr, "buffer size is too small\n");
		return;
	}
	
	buffer = new unsigned char[bufSize];

	if (initDecoder())
		initialized = true;

}
예제 #22
0
// -----------------------------------------------------------------------------
// Constructor with file name
// -----------------------------------------------------------------------------
TTMpeg2Decoder::TTMpeg2Decoder( QString cFName, 
				TTVideoIndexList* viIndex, 
				TTVideoHeaderList* viHeader,
				long lOffset, long lSize )
{
  //int iCount = 0;

  mpeg2Stream        = NULL;
  mpeg2FileName      = " ";
  mpeg2Decoder       = NULL;
  decoderBuffer      = NULL;
  decoderBufferSize  = 0;
  streamBuffer       = NULL;
  streamBufferSize   = 0;
  mpeg2StreamOK      = false;
  isDecoder          = false;
  streamEndReached   = false;
  currentStreamFrame = 0;
  currentFrameIndex  = 0;
  currentStreamPos   = 0;
  iSkipFrames        = 1;
  sliceData          = NULL;
  isIndexSeek        = false;
  videoIndexList     = viIndex;
  videoHeaderList    = viHeader;
  t_frame_info       = NULL;

  // index seek available
  if ( ttAssigned(videoIndexList) && ttAssigned(videoHeaderList) )
    isIndexSeek = true;

  // open the mpeg-2 stream
  if ( openMPEG2File( cFName, lOffset, lSize ) )
  {
    streamBufferSize = initialStreamBufferSize;
    streamBuffer = new uint8_t[streamBufferSize];
  }

  // initialize mpeg-2 decoder objekt
  initDecoder();
}
예제 #23
0
STDMETHODIMP_(LRESULT) Tffvfw::decBegin(BITMAPINFO *lpbiInput,BITMAPINFO *lpbiOutput)
{
    if (!decVFW) {
        return VFW_E_RUNTIME_ERROR;
    }
    if (dec) {
        delete dec;
    }
    dec=initDecoder(lpbiInput,NULL);
    FOURCC infcc=lpbiInput->bmiHeader.biCompression;
    fixMPEGinAVI(infcc);
    CMediaType mt;
    bih2mediatype(lpbiInput->bmiHeader,&mt);
    TffPictBase p(lpbiInput->bmiHeader.biWidth,lpbiInput->bmiHeader.biHeight);
    LRESULT res=dec->beginDecompress(p,infcc,mt,0);
    if (!res) {
        return ICERR_UNSUPPORTED;
    }
    decVFW->dbgInit();
    return ICERR_OK;
}
예제 #24
0
파일: SoftRaw.cpp 프로젝트: Cosmos786/ML
SoftRaw::SoftRaw(
        const char *name,
        const OMX_CALLBACKTYPE *callbacks,
        OMX_PTR appData,
        OMX_COMPONENTTYPE **component)
    : SimpleSoftOMXComponent(name, callbacks, appData, component),
      mSignalledError(false),
      mChannelCount(2),
      mSampleRate(44100) ,
       wifidisplay_flag(0),
      wifidisplay_addr(0){


	
#if WIFI_DISPLAY_ENABLE_RAW
	widi_handle = NULL;
	mStreamSource = NULL;
	omx_rs_txt = NULL;
	widi_handle = dlopen("libmediaplayerservice.so", RTLD_LAZY | RTLD_LOCAL);
	if(widi_handle == NULL)
	{
		ALOGE("libmediaplayerservice can't be loaded");
	}
	else
		mStreamSource = (int (*)(void* ptr,int64_t *start_time,int64_t *audio_start_time))::dlsym(widi_handle, "Wifidisplay_get_Time");
	if(mStreamSource==NULL)
	{
		ALOGE("StreamingSource don't exit");
	}
	if(widi_handle!=NULL)
		dlclose(widi_handle);
	start_time = 0;
	start_timeUs = 0;
	wifi_start_time = 0;
	last_timeUs = 0;
	last_adujst_time = 0;
#endif
    initPorts();
    CHECK_EQ(initDecoder(), (status_t)OK);
}
예제 #25
0
SoftAVC::SoftAVC(
        const char *name,
        const OMX_CALLBACKTYPE *callbacks,
        OMX_PTR appData,
        OMX_COMPONENTTYPE **component)
    : SoftVideoDecoderOMXComponent(
            name, "video_decoder.avc", OMX_VIDEO_CodingAVC,
            kProfileLevels, ARRAY_SIZE(kProfileLevels),
            320 /* width */, 240 /* height */, callbacks, appData, component),
      mHandle(NULL),
      mInputBufferCount(0),
      mFirstPicture(NULL),
      mFirstPictureId(-1),
      mPicId(0),
      mHeadersDecoded(false),
      mEOSStatus(INPUT_DATA_AVAILABLE),
      mSignalledError(false) {
    initPorts(
            kNumInputBuffers, 8192 /* inputBufferSize */,
            kNumOutputBuffers, MEDIA_MIMETYPE_VIDEO_AVC);

    CHECK_EQ(initDecoder(), (status_t)OK);
}
예제 #26
0
SoftVPX::SoftVPX(
        const char *name,
        const char *componentRole,
        OMX_VIDEO_CODINGTYPE codingType,
        const OMX_CALLBACKTYPE *callbacks,
        OMX_PTR appData,
        OMX_COMPONENTTYPE **component)
    : SoftVideoDecoderOMXComponent(
            name, componentRole, codingType,
            NULL /* profileLevels */, 0 /* numProfileLevels */,
            320 /* width */, 240 /* height */, callbacks, appData, component),
      mMode(codingType == OMX_VIDEO_CodingVP8 ? MODE_VP8 : MODE_VP9),
      mCtx(NULL),
      mImg(NULL) {
    // arbitrary from avc/hevc as vpx does not specify a min compression ratio
    const size_t kMinCompressionRatio = mMode == MODE_VP8 ? 2 : 4;
    const char *mime = mMode == MODE_VP8 ? MEDIA_MIMETYPE_VIDEO_VP8 : MEDIA_MIMETYPE_VIDEO_VP9;
    const size_t kMaxOutputBufferSize = 2048 * 2048 * 3 / 2;
    initPorts(
            kNumBuffers, kMaxOutputBufferSize / kMinCompressionRatio /* inputBufferSize */,
            kNumBuffers, mime, kMinCompressionRatio);
    CHECK_EQ(initDecoder(), (status_t)OK);
}
SoftAMR::SoftAMR(
        const char *name,
        const OMX_CALLBACKTYPE *callbacks,
        OMX_PTR appData,
        OMX_COMPONENTTYPE **component)
    : SimpleSoftOMXComponent(name, callbacks, appData, component),
      mMode(MODE_NARROW),
      mState(NULL),
      mDecoderBuf(NULL),
      mDecoderCookie(NULL),
      mInputBufferCount(0),
      mAnchorTimeUs(0),
      mNumSamplesOutput(0),
      mSignalledError(false),
      mOutputPortSettingsChange(NONE) {
    if (!strcmp(name, "OMX.google.amrwb.decoder")) {
        mMode = MODE_WIDE;
    } else {
        CHECK(!strcmp(name, "OMX.google.amrnb.decoder"));
    }

    initPorts();
    CHECK_EQ(initDecoder(), (status_t)OK);
}
예제 #28
0
파일: SoftHEVC.cpp 프로젝트: Khaon/av
SoftHEVC::SoftHEVC(
        const char *name,
        const OMX_CALLBACKTYPE *callbacks,
        OMX_PTR appData,
        OMX_COMPONENTTYPE **component)
    : SoftVideoDecoderOMXComponent(name, componentName, codingType,
            kProfileLevels, ARRAY_SIZE(kProfileLevels),
            320 /* width */, 240 /* height */, callbacks,
            appData, component),
      mMemRecords(NULL),
      mFlushOutBuffer(NULL),
      mOmxColorFormat(OMX_COLOR_FormatYUV420Planar),
      mIvColorFormat(IV_YUV_420P),
      mNewWidth(mWidth),
      mNewHeight(mHeight),
      mChangingResolution(false) {
    const size_t kMinCompressionRatio = 4 /* compressionRatio (for Level 4+) */;
    const size_t kMaxOutputBufferSize = 2048 * 2048 * 3 / 2;
    // INPUT_BUF_SIZE is given by HEVC codec as minimum input size
    initPorts(
            kNumBuffers, max(kMaxOutputBufferSize / kMinCompressionRatio, (size_t)INPUT_BUF_SIZE),
            kNumBuffers, CODEC_MIME_TYPE, kMinCompressionRatio);
    CHECK_EQ(initDecoder(), (status_t)OK);
}
예제 #29
0
status_t SoftHEVC::init() {
    return initDecoder();
}
예제 #30
0
void SoftHEVC::onQueueFilled(OMX_U32 portIndex) {
    UNUSED(portIndex);

    if (mSignalledError) {
        return;
    }
    if (mOutputPortSettingsChange != NONE) {
        return;
    }

    if (NULL == mCodecCtx) {
        if (OK != initDecoder()) {
            ALOGE("Failed to initialize decoder");
            notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
            mSignalledError = true;
            return;
        }
    }
    if (outputBufferWidth() != mStride) {
        /* Set the run-time (dynamic) parameters */
        mStride = outputBufferWidth();
        setParams(mStride);
    }

    List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
    List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);

    while (!outQueue.empty()) {
        BufferInfo *inInfo;
        OMX_BUFFERHEADERTYPE *inHeader;

        BufferInfo *outInfo;
        OMX_BUFFERHEADERTYPE *outHeader;
        size_t timeStampIx;

        inInfo = NULL;
        inHeader = NULL;

        if (!mIsInFlush) {
            if (!inQueue.empty()) {
                inInfo = *inQueue.begin();
                inHeader = inInfo->mHeader;
            } else {
                break;
            }
        }

        outInfo = *outQueue.begin();
        outHeader = outInfo->mHeader;
        outHeader->nFlags = 0;
        outHeader->nTimeStamp = 0;
        outHeader->nOffset = 0;

        if (inHeader != NULL && (inHeader->nFlags & OMX_BUFFERFLAG_EOS)) {
            mReceivedEOS = true;
            if (inHeader->nFilledLen == 0) {
                inQueue.erase(inQueue.begin());
                inInfo->mOwnedByUs = false;
                notifyEmptyBufferDone(inHeader);
                inHeader = NULL;
                setFlushMode();
            }
        }

        /* Get a free slot in timestamp array to hold input timestamp */
        {
            size_t i;
            timeStampIx = 0;
            for (i = 0; i < MAX_TIME_STAMPS; i++) {
                if (!mTimeStampsValid[i]) {
                    timeStampIx = i;
                    break;
                }
            }
            if (inHeader != NULL) {
                mTimeStampsValid[timeStampIx] = true;
                mTimeStamps[timeStampIx] = inHeader->nTimeStamp;
            }
        }

        {
            ivd_video_decode_ip_t s_dec_ip;
            ivd_video_decode_op_t s_dec_op;
            WORD32 timeDelay, timeTaken;
            size_t sizeY, sizeUV;

            if (!setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx)) {
                ALOGE("Decoder arg setup failed");
                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
                mSignalledError = true;
                return;
            }

            GETTIME(&mTimeStart, NULL);
            /* Compute time elapsed between end of previous decode()
             * to start of current decode() */
            TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);

            IV_API_CALL_STATUS_T status;
            status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);

            bool unsupportedResolution =
                (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_dec_op.u4_error_code & 0xFF));

            /* Check for unsupported dimensions */
            if (unsupportedResolution) {
                ALOGE("Unsupported resolution : %dx%d", mWidth, mHeight);
                notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
                mSignalledError = true;
                return;
            }

            bool allocationFailed = (IVD_MEM_ALLOC_FAILED == (s_dec_op.u4_error_code & 0xFF));
            if (allocationFailed) {
                ALOGE("Allocation failure in decoder");
                notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
                mSignalledError = true;
                return;
            }

            bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & 0xFF));

            getVUIParams();

            GETTIME(&mTimeEnd, NULL);
            /* Compute time taken for decode() */
            TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);

            ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
                   s_dec_op.u4_num_bytes_consumed);
            if (s_dec_op.u4_frame_decoded_flag && !mFlushNeeded) {
                mFlushNeeded = true;
            }

            if ((inHeader != NULL) && (1 != s_dec_op.u4_frame_decoded_flag)) {
                /* If the input did not contain picture data, then ignore
                 * the associated timestamp */
                mTimeStampsValid[timeStampIx] = false;
            }

            // If the decoder is in the changing resolution mode and there is no output present,
            // that means the switching is done and it's ready to reset the decoder and the plugin.
            if (mChangingResolution && !s_dec_op.u4_output_present) {
                mChangingResolution = false;
                resetDecoder();
                resetPlugin();
                mStride = outputBufferWidth();
                setParams(mStride);
                continue;
            }

            if (resChanged) {
                mChangingResolution = true;
                if (mFlushNeeded) {
                    setFlushMode();
                }
                continue;
            }

            // Combine the resolution change and coloraspects change in one PortSettingChange event
            // if necessary.
            if ((0 < s_dec_op.u4_pic_wd) && (0 < s_dec_op.u4_pic_ht)) {
                uint32_t width = s_dec_op.u4_pic_wd;
                uint32_t height = s_dec_op.u4_pic_ht;
                bool portWillReset = false;
                handlePortSettingsChange(&portWillReset, width, height);

                if (portWillReset) {
                    resetDecoder();
                    resetPlugin();
                    return;
                }
            } else if (mUpdateColorAspects) {
                notify(OMX_EventPortSettingsChanged, kOutputPortIndex,
                    kDescribeColorAspectsIndex, NULL);
                mUpdateColorAspects = false;
                return;
            }

            if (s_dec_op.u4_output_present) {
                outHeader->nFilledLen = (outputBufferWidth() * outputBufferHeight() * 3) / 2;

                outHeader->nTimeStamp = mTimeStamps[s_dec_op.u4_ts];
                mTimeStampsValid[s_dec_op.u4_ts] = false;

                outInfo->mOwnedByUs = false;
                outQueue.erase(outQueue.begin());
                outInfo = NULL;
                notifyFillBufferDone(outHeader);
                outHeader = NULL;
            } else if (mIsInFlush) {
                /* If in flush mode and no output is returned by the codec,
                 * then come out of flush mode */
                mIsInFlush = false;

                /* If EOS was recieved on input port and there is no output
                 * from the codec, then signal EOS on output port */
                if (mReceivedEOS) {
                    outHeader->nFilledLen = 0;
                    outHeader->nFlags |= OMX_BUFFERFLAG_EOS;

                    outInfo->mOwnedByUs = false;
                    outQueue.erase(outQueue.begin());
                    outInfo = NULL;
                    notifyFillBufferDone(outHeader);
                    outHeader = NULL;
                    resetPlugin();
                }
            }
        }

        /* If input EOS is seen and decoder is not in flush mode,
         * set the decoder in flush mode.
         * There can be a case where EOS is sent along with last picture data
         * In that case, only after decoding that input data, decoder has to be
         * put in flush. This case is handled here  */

        if (mReceivedEOS && !mIsInFlush) {
            setFlushMode();
        }

        // TODO: Handle more than one picture data
        if (inHeader != NULL) {
            inInfo->mOwnedByUs = false;
            inQueue.erase(inQueue.begin());
            inInfo = NULL;
            notifyEmptyBufferDone(inHeader);
            inHeader = NULL;
        }
    }
}