// The real constructor of the proxy, to be able to have error management
OMX_ERRORTYPE H264Dec_Proxy::construct()
{
    OstTraceInt0(TRACE_API,"Enter H264Dec_Proxy::construct");
    // Note ostTrcFiltInst0 can not be used at that time: ENS has not alloc memory yet

    RETURN_OMX_ERROR_IF_ERROR(VFM_Component::construct(2));
    RETURN_OMX_ERROR_IF_ERROR(processingComponent.setPortCount(2));

    // Set the processing component. To be done after VFM_Component::construct
    // as ENS construct() reset it
    setProcessingComponent(&processingComponent);
	OstTraceInt0(TRACE_FLOW,"H264Dec_Proxy::construct():Create Input port");
    // 1 buffer... but this number is recomputed in the port depending the image size
    RETURN_OMX_ERROR_IF_ERROR(createPort(VPB+0, OMX_DirInput, OMX_BufferSupplyInput, 1, 0, OMX_PortDomainVideo));
	OstTraceInt0(TRACE_FLOW,"H264Dec_Proxy::construct():Create Output port");
    // 1 buffer... but this number is recomputed in the port depending the image size
    RETURN_OMX_ERROR_IF_ERROR(createPort(VPB+1, OMX_DirOutput, OMX_BufferSupplyOutput, 1, 0, OMX_PortDomainVideo));

    // set the default value of the ports
    ((H264Dec_Port *)getPort(VPB+0))->setDefault();
    ((H264Dec_Port *)getPort(VPB+1))->setDefault();

    // set the role of the component: video_decoder.avc
    h264dec_assert((1 == getRoleNB()), __LINE__, OMX_TRUE);
    ENS_String<20> role = "video_decoder.avc";
    role.put((OMX_STRING)mRoles[0]);
    setActiveRole(mRoles[0]);
    OstTraceInt0(TRACE_API,"Exit H264Dec_Proxy::construct");
    return OMX_ErrorNone;
}
/**
 \if INCLUDE_IN_HTML_ONLY
 \fn void GenerateFixedPointVersion(void)
 \brief The function calculate Hardware output to be programmed from contrast applied matrix
 \details: The function convert floating values into fixed point format
 \param void
 \return void
 \callgraph
 \callergraph
 \ingroup RgbToYuvCoder
 \endif
*/
void
GenerateFixedPointVersion(void)
{
    float_t fpScaled;
    uint8_t u8_TargetCount,
    u8_SourceCount = 0;

#if ENABLE_YUVCODER_TRACES
    OstTraceInt0(TRACE_DEBUG, "<yuvcoder>  >> GenerateFixedPointVersion\n");
#endif

    // copy correct parts of 6 item array into 9 element array
    for (u8_TargetCount = 0; u8_TargetCount < 9; u8_TargetCount++)
    {
#if ENABLE_YUVCODER_TRACES
        OstTraceInt2(TRACE_DEBUG, "<yuvcoder>  f_OutputMatrixView[%u] = %f\n", u8_SourceCount, f_OutputMatrixView[u8_SourceCount]);
#endif
        // f_OutputMatrixView has only 6 elements so output matrix column 3 is invalid
        if (!(2 == u8_TargetCount || 5 == u8_TargetCount || 8 == u8_TargetCount))
        {
            // round and convert to int16 every array element
            fpScaled = F_MATRIX_SCALER * f_OutputMatrixView[u8_SourceCount++];
            ptrs16_RgbToYuvMatrix[u8_TargetCount] = GetRounded(fpScaled);
#if ENABLE_YUVCODER_TRACES
            OstTraceInt3(TRACE_DEBUG, "<yuvcoder>  fpScaled = %f, ptrs16_RgbToYuvMatrix[%u] = %+d\n", fpScaled, u8_TargetCount, ptrs16_RgbToYuvMatrix[u8_TargetCount]);
#endif
        }
    }

#if ENABLE_YUVCODER_TRACES
    OstTraceInt0(TRACE_DEBUG, "<yuvcoder>  << GenerateFixedPointVersion\n");
#endif
    return;
}
MPEG4Dec_ParamAndConfig::MPEG4Dec_ParamAndConfig(VFM_Component *component):VFM_ParamAndConfig(component)
{
	OstTraceInt0(TRACE_API, "=> MPEG4Dec_ParamAndConfig::MPEG4Dec_ParamAndConfig() constructor");

    // initialization of attributes of VFM_ParamAndConfig
    setProfileLevel(0, 0);
    mMpeg4Param.nSize = sizeof(OMX_VIDEO_PARAM_MPEG4TYPE);
	//mMpeg4Param.nVersion = (OMX_VERSIONTYPE)0;
	mMpeg4Param.nPortIndex = 0;;                 
	mMpeg4Param.nSliceHeaderSpacing = 0;        
	mMpeg4Param.bSVH = OMX_FALSE;                      
	mMpeg4Param.bGov =OMX_FALSE;                      
	mMpeg4Param.nPFrames=0;                   
	mMpeg4Param.nBFrames=0;                   
	mMpeg4Param.nIDCVLCThreshold=0;           
	mMpeg4Param.bACPred=OMX_FALSE;                   
	mMpeg4Param.nMaxPacketSize=0;             
	mMpeg4Param.nTimeIncRes=0;                
	mMpeg4Param.eProfile=OMX_VIDEO_MPEG4ProfileSimple;
	mMpeg4Param.eLevel=OMX_VIDEO_MPEG4Level0;    
	mMpeg4Param.nAllowedPictureTypes=0;       
	mMpeg4Param.nHeaderExtension=0;           
	mMpeg4Param.bReversibleVLC=OMX_FALSE;            

	mMpeg4ProfileAndLevel.nSize = sizeof(OMX_VIDEO_PARAM_PROFILELEVELTYPE);
	//mMpeg4ProfileAndLevel.nVersion = (OMX_VERSIONTYPE)0;
	mMpeg4ProfileAndLevel.nPortIndex = 0;
	mMpeg4ProfileAndLevel.nProfileIndex = 0;
	mMpeg4ProfileAndLevel.eLevel = 0;
	mMpeg4ProfileAndLevel.eProfile = 0;
	CropWidth = 0;
    CropHeight = 0;
	OstTraceInt0(TRACE_API, "<= MPEG4Dec_ParamAndConfig::MPEG4Dec_ParamAndConfig() constructor");
}
// Automatically called by the VFM when the parameter of the port are set.
// It returns the minimum sizeof the buffer
OMX_U32 MPEG2Dec_Port::getBufferSize() const
{
    OstTraceInt0(TRACE_FLOW,"Enter MPEG2Dec_Port::getBufferSize");
    OMX_U32 default_return = 256;

    switch (getDirection())
        {
        case OMX_DirInput:
	         //+CR369244
	         if(VFM_SocCapabilityMgt::getMPEG4DecInputBuffSize())
	         {
	             default_return = VFM_SocCapabilityMgt::getMPEG4DecInputBuffSize();
	         }
            break;

        case OMX_DirOutput:
            default_return = (getFrameWidth()*(getFrameHeight() + 16)*3)/2; // Need 16 more pixel in height for some interleaved streams
            break;
        default:
            DBC_ASSERT(0==1);
        }
	#ifdef PACKET_VIDEO_SUPPORT
            if(mSuggestedBufferSize != 0) {
                default_return = mSuggestedBufferSize;
            }
	#endif

    OstTraceInt0(TRACE_FLOW,"Exit MPEG2Dec_Port::getBufferSize");
    return default_return;
}
/**
  \if INCLUDE_IN_HTML_ONLY
  \fn void STAT8_GLACE_AWB_ISR( void )
  \brief    Top level ISR
  \return void
  \callgraph
  \callergraph
  \ingroup  InterruptHandler
  \endif
*/
INTERRUPT void
STAT8_GLACE_AWB_ISR(void)
{
#ifdef PROFILER_USING_XTI
    OstTraceInt0(TRACE_USER8, "start_xti_profiler_STAT8_GLACE_AWB_ISR");
#endif

#ifdef DEBUG_PROFILER
    CpuCycleProfiler_ts *ptr_CPUProfilerData = NULL;
    ptr_CPUProfilerData = &g_Profile_AWB_Statistics;
    START_PROFILER();
#endif
    OstTraceInt0(TRACE_DEBUG,"<INT> GLACE");
    g_Interrupts_Count.u16_INT16_STAT8_Glace_AWB++;

    Glace_ISR();

    // Clear the interrupt output of the ITM
    ITM_Clear_Interrupt_STAT8_GLACE_AWB();
#ifdef DEBUG_PROFILER
    Profiler_Update(ptr_CPUProfilerData);
#endif

#ifdef PROFILER_USING_XTI
    OstTraceInt1(TRACE_USER8, "stop_xti_profiler_STAT8_GLACE_AWB_ISR : %d ",ptr_CPUProfilerData->u32_CurrentCycles);
#endif

    return;
}
/**
  \if INCLUDE_IN_HTML_ONLY
  \fn void STAT0_256_bins_histogram_AEC_ISR( void )
  \brief    Top level ISR
  \return void
  \callgraph
  \callergraph
  \ingroup  InterruptHandler
  \endif
*/
INTERRUPT void
STAT0_256_bins_histogram_AEC_ISR(void)
{
#ifdef DEBUG_PROFILER
    CpuCycleProfiler_ts *ptr_CPUProfilerData = NULL;
#endif

#ifdef PROFILER_USING_XTI
    OstTraceInt0(TRACE_USER8, "start_xti_profiler_STAT0_256_bins_histogram_AEC_ISR");
#endif

#ifdef DEBUG_PROFILER
    ptr_CPUProfilerData = &g_Profile_AEC_Statistics;
    START_PROFILER();
#endif
    OstTraceInt0(TRACE_DEBUG,"<INT> HISTO");
    Histogram_ISR();
    g_Interrupts_Count.u16_INT08_STAT0_256_bins_histogram++;

    // Clear the interrupt output of the ITM
    ITM_Clear_Interrupt_STAT0_256_Bin_Histogram_AEC();
#ifdef DEBUG_PROFILER
    Profiler_Update(ptr_CPUProfilerData);
#endif

#ifdef PROFILER_USING_XTI
    OstTraceInt1(TRACE_USER8, "stop_xti_profiler_STAT0_256_bins_histogram_AEC_ISR : %d ",ptr_CPUProfilerData->u32_CurrentCycles);
#endif
    return;
}
// MPEG2Dec_Port::checkIndexParamVideoPortFormat() is automatically called by the VFM when setting
// the video parameter of the port.
// This function checks the parameters that the user wants to set, regarding the OMX specification
// and regarding the capabilities of the components (in terms of Nomadik spec).
OMX_ERRORTYPE MPEG2Dec_Port::checkIndexParamVideoPortFormat(OMX_VIDEO_PARAM_PORTFORMATTYPE *pt)
{

    OstTraceInt0(TRACE_FLOW,"Enter MPEG2Dec_Port::checkIndexParamVideoPortFormat");
    DBC_ASSERT(mParamPortDefinition.nPortIndex==pt->nPortIndex);
    DBC_ASSERT(pt->nPortIndex==VPB+0 || pt->nPortIndex==VPB+1);

    if (pt->nPortIndex==VPB+0)
        {
        // input port
        RETURN_XXX_IF_WRONG_OST(pt->eCompressionFormat==OMX_VIDEO_CodingMPEG2, OMX_ErrorBadParameter);
        RETURN_XXX_IF_WRONG_OST(pt->eColorFormat==OMX_COLOR_FormatUnused, OMX_ErrorBadParameter);
        // nothing to be check on pt->nIndex
        }
    else
        {
        // output port
        RETURN_XXX_IF_WRONG_OST(pt->eCompressionFormat==OMX_VIDEO_CodingUnused, OMX_ErrorBadParameter);
        RETURN_XXX_IF_WRONG_OST( pt->eColorFormat==(OMX_COLOR_FORMATTYPE)OMX_COLOR_FormatYUV420PackedSemiPlanar
                          || pt->eColorFormat==OMX_COLOR_FormatYUV420Planar,OMX_ErrorBadParameter);
        // nothing to be check on pt->nIndex
        }
    OstTraceInt0(TRACE_FLOW,"Exit MPEG2Dec_Port::checkIndexParamVideoPortFormat");
    return OMX_ErrorNone;
}
void H264Dec_Proxy::NmfPanicCallback(void *contextHandler,t_nmf_service_type serviceType, t_nmf_service_data *serviceData)
{
  OstTraceInt1(TRACE_WARNING, "Enter H264Dec_Proxy::NmfPanicCallback, user_data=%d",serviceData->panic.info.mpc.panicInfo1);

  switch (serviceType) {
  case NMF_SERVICE_PANIC:
    if (serviceData->panic.panicSource==HOST_EE || serviceData->panic.info.mpc.coreid!=((t_nmf_core_id)SVA_CORE_ID)) {
        return;
    }
    OstTraceInt0(TRACE_WARNING, "H264Dec_Proxy::NmfPanicCallback: calls errorRecoveryKillMpc");
    processingComponent.errorRecoveryKillMpc(mpc_domain_id);
    break;
  case NMF_SERVICE_SHUTDOWN:
    if (serviceData->shutdown.coreid != ((t_nmf_core_id)SVA_CORE_ID)) {
        return;
    }
    if ((getCompFsm()->getOMXState() != OMX_StateLoaded) && !processingComponent.isGoToLoaded()) {

        OstTraceInt0(TRACE_WARNING, "H264Dec_Proxy::NmfPanicCallback: calls errorRecoveryRestartMpc");
        processingComponent.errorRecoveryRestartMpc(mpc_domain_id);
    } else {
        OstTraceInt0(TRACE_WARNING, "H264Dec_Proxy::NmfPanicCallback: NMF_SERVICE_SHUTDOWN, but we are in loaded mode");
    }
    break;
  default:
    OstTraceInt1(TRACE_ERROR, "H264Dec_Proxy::NmfPanicCallback: unknown service %d", serviceType);
  }
}
/**
 \if INCLUDE_IN_HTML_ONLY
 \fn void SetSignalRangesAndStockMatrix (uint8_t u8_PipeNo, uint8_t e_Transform_Type)
 \brief Update signal range based on output required from pipe
 \details   Excursion and midpoint of output parameters are effected by the selection of YUV/RGB transform and
            flavour of corresponding transform. Update the pointer to right signal range.
 \param u8_PipeNo: Pipe No in ISP
 \param e_Transform_Type: Output format requested in the PIPE
 \return void
 \callgraph
 \callergraph
 \ingroup RgbToYuvCoder
 \endif
*/
void
SetSignalRangesAndStockMatrix(
uint8_t u8_PipeNo,
uint8_t e_Transform_Type)
{
#if ENABLE_YUVCODER_TRACES
        uint32_t count = 0;
        OstTraceInt2(TRACE_DEBUG, "<yuvcoder>  >> SetSignalRangesAndStockMatrix >> u8_PipeNo = %u, e_Transform_Type = %u\n", u8_PipeNo, e_Transform_Type);
#endif

    // By default keep standard as REC601 for all the cases
    ptr_YuvStockMatrix = (float *) f_RgbToYuvStockMatrix_REC601;

    if (Transform_e_YCbCr_JFIF == e_Transform_Type)
    {
        ptr_PipeRgbToYUVSignalRange = ( RgbToYuvOutputSignalRange_ts * ) &YuvJfifOutputSignalRange;
    }
    else if (Transform_e_YCbCr_Rec601 == e_Transform_Type)
    {
        ptr_PipeRgbToYUVSignalRange = ( RgbToYuvOutputSignalRange_ts * ) &YuvRec601OutputSignalRange;
    }
    else if(Transform_e_YCbCr_Rec709_FULL_RANGE == e_Transform_Type)
    {
        OstTraceInt0(TRACE_DEBUG, "Transform_e_YCbCr_Rec709_FULL_RANGE \n");
        ptr_PipeRgbToYUVSignalRange =  ( RgbToYuvOutputSignalRange_ts * ) &YuvRec709OutputSignalRangeFull;
        ptr_YuvStockMatrix = (float *) f_RgbToYuvStockMatrix_REC709;
    }
    else if (Transform_e_YCbCr_Rec709 == e_Transform_Type)
    {
        OstTraceInt0(TRACE_DEBUG, "Transform_e_YCbCr_Rec709 \n");
        ptr_PipeRgbToYUVSignalRange = ( RgbToYuvOutputSignalRange_ts * ) &YuvRec709OutputSignalRange;
        ptr_YuvStockMatrix = (float *) f_RgbToYuvStockMatrix_REC709;
    }
    else if (Transform_e_YCbCr_Custom == e_Transform_Type)
    {
        OstTraceInt0(TRACE_DEBUG, "Transform_e_YCbCr_Custom \n");
        ptr_PipeRgbToYUVSignalRange = ( RgbToYuvOutputSignalRange_ts * ) &g_CE_CustomTransformOutputSignalRange[u8_PipeNo];
        ptr_YuvStockMatrix = (float *)&g_CustomStockMatrix[u8_PipeNo].f_StockMatrix[0];
    }
    else if (Transform_e_Rgb_Standard == e_Transform_Type)
    {
        ptr_PipeRgbToYUVSignalRange = ( RgbToYuvOutputSignalRange_ts * ) &RgbOutputSignalRange;
    }
    else
    {
        ptr_PipeRgbToYUVSignalRange = ( RgbToYuvOutputSignalRange_ts * ) &g_CE_CustomTransformOutputSignalRange[u8_PipeNo];
    }

#if ENABLE_YUVCODER_TRACES
        for(count=0; count<9; count++)
        {
            OstTraceInt4(TRACE_DEBUG, "ptr_YuvStockMatrix[%u] = %f, f_RgbToYuvStockMatrix_REC709[%u] = %f \n", count, ptr_YuvStockMatrix[count], count, f_RgbToYuvStockMatrix_REC709[count]);
        }
        OstTraceInt4(TRACE_DEBUG, "Signal range: %u %u %u %u\n", g_CE_CustomTransformOutputSignalRange[u8_PipeNo].u16_LumaExcursion, g_CE_CustomTransformOutputSignalRange[u8_PipeNo].u16_LumaMidpointTimes2, g_CE_CustomTransformOutputSignalRange[u8_PipeNo].u16_ChromaExcursion, g_CE_CustomTransformOutputSignalRange[u8_PipeNo].u16_ChromaMidpointTimes2);
        OstTraceInt0(TRACE_DEBUG, "<yuvcoder>  << SetSignalRangesAndStockMatrix \n");
#endif
    return;
}
MPEG2Dec_Port::MPEG2Dec_Port(const EnsCommonPortData& commonPortData, ENS_Component &enscomp) :
            VFM_Port(commonPortData, enscomp)
{
    OstTraceInt0(TRACE_FLOW,"Enter MPEG2Dec_Port_constructor");
    if (commonPortData.mDirection == OMX_DirInput) {            // input port
        mParamPortDefinition.format.video.nStride = 0;
        mParamPortDefinition.format.video.pNativeRender = 0;
        mParamPortDefinition.format.video.pNativeWindow = 0;
        mParamPortDefinition.format.video.xFramerate = 0;
        mParamPortDefinition.format.video.nSliceHeight = 0;
        mParamPortDefinition.format.video.nFrameHeight = 0;
        mParamPortDefinition.format.video.nFrameWidth = 0;
        mParamPortDefinition.format.video.nBitrate = 0;
        mParamPortDefinition.format.video.bFlagErrorConcealment = OMX_FALSE;
        mParamPortDefinition.format.video.cMIMEType = 0;

        mParamPortDefinition.format.video.eColorFormat = OMX_COLOR_FormatUnused;
        mParamPortDefinition.format.video.eCompressionFormat = OMX_VIDEO_CodingMPEG2;

        mParamPortDefinition.nBufferSize = 256; //any non-zero value

        mParamPortDefinition.bBuffersContiguous = OMX_TRUE;
        mParamPortDefinition.nBufferAlignment = 16;

    } else  {        // output port
        mParamPortDefinition.format.video.nStride = 0;
        mParamPortDefinition.format.video.pNativeRender = 0;
        mParamPortDefinition.format.video.pNativeWindow = 0;
        mParamPortDefinition.format.video.xFramerate = 0;
        mParamPortDefinition.format.video.nSliceHeight = 0;
        mParamPortDefinition.format.video.nFrameHeight = 0;
        mParamPortDefinition.format.video.nFrameWidth = 0;
        mParamPortDefinition.format.video.nBitrate = 0;
        mParamPortDefinition.format.video.bFlagErrorConcealment = OMX_FALSE;
        mParamPortDefinition.format.video.cMIMEType = 0;

        mParamPortDefinition.format.video.eColorFormat = (OMX_COLOR_FORMATTYPE)OMX_SYMBIAN_COLOR_FormatYUV420MBPackedSemiPlanar;
        mParamPortDefinition.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;

        mParamPortDefinition.nBufferSize = 256; //any non-zero value

        mParamPortDefinition.bBuffersContiguous = OMX_TRUE;
        mParamPortDefinition.nBufferAlignment = 256;

    }

#ifdef PACKET_VIDEO_SUPPORT
        mSuggestedBufferSize = 0;
#endif

    OstTraceInt0(TRACE_FLOW,"Exit MPEG2Dec_Port_constructor");
}
// Constructor: Nothing to be done as error management cannot be performed.
// The real constructor is in construct()
H264Dec_Proxy::H264Dec_Proxy():
    VFM_Component(&mParam, OMX_TRUE),
    mParam(this),
    processingComponent(*this)
{
	OstTraceInt0(TRACE_API,"Enter H264Dec_Proxy::constructor");
    // +CR332521 CHANGE START FOR
#ifdef _CACHE_OPT_
    mParam.setiOMXCacheProperties(OMX_TRUE, OMX_FALSE);
#endif
    // -CR332521 CHANGE END OF
	OstTraceInt0(TRACE_API,"Exit H264Dec_Proxy::constructor");
}
EXPORT_C
OMX_ERRORTYPE H264DecFactoryMethod(ENS_Component ** ppENSComponent)
{
    OstTraceInt0(TRACE_API,"Enter H264DecFactoryMethod");
    OstTraceInt0(TRACE_API,"Exit H264DecFactoryMethod");
#ifdef ENS_VERSION
#if ENS_VERSION == ENS_VERSION_VIDEO
    return VFM_Component::VFMFactoryMethod((ENS_Component_Video **)ppENSComponent, H264Dec_Proxy::newProxy);
#endif
#endif

    return VFM_Component::VFMFactoryMethod((ENS_Component **)ppENSComponent, H264Dec_Proxy::newProxy);
}
OMX_ERRORTYPE JPEGEnc_ArmNmfProcessingComponent::codecDeInstantiate()
{
	OstTraceFiltInst1(TRACE_API, "In JPEGEnc_ArmNmfProcessingComponent : In codecDeInstantiate line no %d> \n",__LINE__);
	t_nmf_error error;

    RETURN_OMX_ERROR_IF_ERROR(destroyPortInterface((VFM_Port *)mENSComponent.getPort(0), "emptythisbuffer", "inputport"));
    RETURN_OMX_ERROR_IF_ERROR(destroyPortInterface((VFM_Port *)mENSComponent.getPort(1), "fillthisbuffer", "outputport"));
	error = mCodec->unbindFromUser("setParam");
	if (error != NMF_OK)
	{
		OstTraceInt0(TRACE_ERROR, "In JPEGEnc_ArmNmfProcessingComponent : Error while un binding setParam \n");
		jpegenc_armnmf_assert(OMX_ErrorUndefined, __LINE__, OMX_TRUE);
		return OMX_ErrorUndefined ;
	}

	//EnsWrapper_unbindToUser(mENSComponent.getOMXHandle(),mCodec, "iChangePortSettingsSignal");
	error = EnsWrapper_unbindToUser(OMXHandle,mCodec, "iChangePortSettingsSignal");
	if (error != NMF_OK)
	{
		OstTraceInt0(TRACE_ERROR, "In JPEGEnc_ArmNmfProcessingComponent : Error while un binding iChangePortSettingsSignal \n");
		jpegenc_armnmf_assert(OMX_ErrorUndefined, __LINE__, OMX_TRUE);
		return OMX_ErrorUndefined ;
	}
	//+ER 354962
	error = EnsWrapper_unbindToUser(OMXHandle,mCodec, "iCommandAck");
	if (error != NMF_OK)
	{
		OstTraceInt0(TRACE_ERROR, "In JPEGEnc_ArmNmfProcessingComponent : Error while un binding iCommandAck \n");
		jpegenc_armnmf_assert(OMX_ErrorUndefined, __LINE__, OMX_TRUE);
		return OMX_ErrorUndefined ;
	}
	//-ER 354962
#ifdef HVA_JPEGENC
	if (pProxyComponent->isHVABased == OMX_TRUE)
	{
		error = mCodec->unbindFromUser("setMemoryParam");
		if (error != NMF_OK)
		{
			OstTraceInt0(TRACE_ERROR, "In JPEGEnc_ArmNmfProcessingComponent : Error while un binding setMemoryParam \n");
			jpegenc_armnmf_assert(OMX_ErrorUndefined, __LINE__, OMX_TRUE);
			return OMX_ErrorUndefined ;
		}
	}
#endif

	// for setting the parameters again incase another YUV is sent for encoding with same parameters
	pProxyComponent->mSendParamToARMNMF.set();
	OstTraceFiltInst1(TRACE_API, "In JPEGEnc_ArmNmfProcessingComponent : codecDeInstantiate DONE <line no %d> ",__LINE__);
    return OMX_ErrorNone;
}
OMX_ERRORTYPE JPEGEnc_ArmNmfProcessingComponent::codecInstantiate()
{
	//OMX_HANDLETYPE OMXHandle = mENSComponent.getOMXHandle();
    t_nmf_error error ;
    OMXHandle = mENSComponent.getOMXHandle();

	OstTraceFiltInst1(TRACE_API, "In JPEGEnc_ArmNmfProcessingComponent : In codecInstantiate line no %d> \n",__LINE__);

    RETURN_OMX_ERROR_IF_ERROR(createPortInterface((VFM_Port *)mENSComponent.getPort(0), "emptythisbuffer", "inputport"));
    RETURN_OMX_ERROR_IF_ERROR(createPortInterface((VFM_Port *)mENSComponent.getPort(1), "fillthisbuffer", "outputport"));

	error = mCodec->bindFromUser("setParam",8,&setparamitf) ;
	if (error != NMF_OK)
	{
		OstTraceInt0(TRACE_ERROR, "In JPEGEnc_ArmNmfProcessingComponent : Error while binding setParam \n");
		jpegenc_armnmf_assert(OMX_ErrorUndefined, __LINE__, OMX_TRUE);
		return OMX_ErrorUndefined ;
	}
	error = EnsWrapper_bindToUser(OMXHandle,mCodec,"iChangePortSettingsSignal",(jpegenc_arm_nmf_api_portSettingsDescriptor*)this,4);
	if (error != NMF_OK)
	{
		OstTraceInt0(TRACE_ERROR, "In JPEGEnc_ArmNmfProcessingComponent : Error while binding iChangePortSettingsSignal \n");
		jpegenc_armnmf_assert(OMX_ErrorUndefined, __LINE__, OMX_TRUE);
		return OMX_ErrorUndefined ;
	}
	//+ER 354962
	error = EnsWrapper_bindToUser(OMXHandle,mCodec,"iCommandAck",(jpegenc_arm_nmf_api_cmd_ackDescriptor*)this,4);
	if (error != NMF_OK)
	{
		OstTraceInt0(TRACE_ERROR, "In JPEGEnc_ArmNmfProcessingComponent : Error while binding iCommandAck \n");
		jpegenc_armnmf_assert(OMX_ErrorUndefined, __LINE__, OMX_TRUE);
		return OMX_ErrorUndefined ;
	}
	//-ER 354962
#ifdef HVA_JPEGENC
	if (pProxyComponent->isHVABased == OMX_TRUE)
	{
		error = mCodec->bindFromUser("setMemoryParam",8,&setMemoryitf) ;
		if (error != NMF_OK)
		{
			OstTraceInt0(TRACE_ERROR, "In JPEGEnc_ArmNmfProcessingComponent : Error while binding setMemoryParam \n");
			jpegenc_armnmf_assert(OMX_ErrorUndefined, __LINE__, OMX_TRUE);
			return OMX_ErrorUndefined;
		}
	}
#endif
	OstTraceFiltInst1(TRACE_API, "In JPEGEnc_ArmNmfProcessingComponent : codecInstantiate DONE line no %d> \n",__LINE__);
    return OMX_ErrorNone;
}
// Set the default value of the port. This function is used by the construct() function of the proxy
// when creating a new instance of the proxy
void MPEG2Dec_Port::setDefault()
{
    OstTraceInt0(TRACE_FLOW,"Enter MPEG2Dec_Port::setDefault");
    DBC_ASSERT(mParamPortDefinition.eDomain==OMX_PortDomainVideo);
    //MPEG2Dec_Proxy *comp = getProxy();

    mParamPortDefinition.format.video.cMIMEType = (char *)"video/MPEG2";
    mParamPortDefinition.format.video.pNativeRender = 0;
    mParamPortDefinition.format.video.nFrameHeight = 16;      // from OMX spec 1.1.1
    mParamPortDefinition.format.video.nFrameWidth = 16;       // from OMX spec 1.1.1
    mParamPortDefinition.format.video.bFlagErrorConcealment = OMX_FALSE;
    mParamPortDefinition.format.video.pNativeWindow = 0;

    switch (mParamPortDefinition.nPortIndex)
        {
        case 0:
            DBC_ASSERT(mParamPortDefinition.eDir==OMX_DirInput);
            mParamPortDefinition.format.video.eCompressionFormat = OMX_VIDEO_CodingMPEG2;
            mParamPortDefinition.format.video.eColorFormat = OMX_COLOR_FormatUnused;
            mParamPortDefinition.format.video.nBitrate = 64000;        // from OMX spec 1.1.1
            mParamPortDefinition.format.video.xFramerate = 15;         // from OMX spec 1.1.1
            mParamPortDefinition.format.video.nStride = 0;             // unused for compressed data
            mParamPortDefinition.format.video.nSliceHeight = 1;        // unused for compressed data
            //comp->mParam.setProfileLevel(OMX_VIDEO_MPEG2ProfileSimple, OMX_VIDEO_MPEG2LevelLL);  // from OMX spec 1.1.1
            break;
        case 1:
            DBC_ASSERT(mParamPortDefinition.eDir==OMX_DirOutput);
            mParamPortDefinition.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
            mParamPortDefinition.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar;
            mParamPortDefinition.format.video.nBitrate = 0;
            mParamPortDefinition.format.video.xFramerate = 0;
            mParamPortDefinition.format.video.nStride = (mParamPortDefinition.format.video.nFrameWidth * 3) / 2;        // corresponds to a raw in OMX_COLOR_FormatYUV420Planar
            mParamPortDefinition.format.video.nSliceHeight = 1;        // a single raw in the buffer is required. FIXME
            break;
        default:
            DBC_ASSERT(0==1);
        }

    // mParamPortDefinition.nBufferCountActual updated by the component when buffers are allocated
    // mParamPortDefinition.nBufferCountMin set at the creation of the port (check constructor of the proxy)
    // mParamPortDefinition.bEnabled set by the component
    // mParamPortDefinition.bPopulated set by the component
    mParamPortDefinition.bBuffersContiguous = OMX_TRUE;
    mParamPortDefinition.nBufferAlignment = 0x100;
    mParamPortDefinition.nBufferSize = getBufferSize();
    mParamPortDefinition.nBufferCountMin = getBufferCountMin();
    OstTraceInt0(TRACE_FLOW,"Exit MPEG2Dec_Port::setDefault");
}
OMX_ERRORTYPE MPEG2Dec_Port::setFormatInPortDefinition(const OMX_PARAM_PORTDEFINITIONTYPE& portDef)
{
    OstTraceInt0(TRACE_API, "=> <PG> MPEG2Dec_Port::setFormatInPortDefinition()");
    MPEG2Dec_Proxy *pComp = (MPEG2Dec_Proxy *)(&getENSComponent());
    MPEG2Dec_ParamAndConfig *pParam = ((MPEG2Dec_ParamAndConfig *)(pComp->getParamAndConfig()));
    OMX_VIDEO_PORTDEFINITIONTYPE *pt_video = (OMX_VIDEO_PORTDEFINITIONTYPE *)(&(portDef.format.video));

    if(portDef.nPortIndex == 0 && !(pParam->CropWidth) && !(pParam->CropHeight)) /* Change for ER 426137 */
    {
        pParam->CropWidth = pt_video->nFrameWidth;
        pParam->CropHeight = pt_video->nFrameHeight;
        OstTraceInt2(TRACE_FLOW, "=> <PG> MPEG2Dec_Port::setFormatInPortDefinition() Updated Crop width : %d Crop height : %d ",pParam->CropWidth,pParam->CropHeight);
    }

        //Making it x16
	pt_video->nFrameWidth = ((pt_video->nFrameWidth + 0xF) & (~0xF));
	pt_video->nFrameHeight = ((pt_video->nFrameHeight + 0xF) & (~0xF));

	OstTraceInt3(TRACE_FLOW, "=> Inside NEW nPortIndex %d pt_video->nFrameHeight %d mParamPortDefinition.format.video.nFrameHeight %d",portDef.nPortIndex,pt_video->nFrameHeight,mParamPortDefinition.format.video.nFrameHeight);
	if ((pt_video->nFrameHeight - mParamPortDefinition.format.video.nFrameHeight) == 16)
	{
		pt_video->nFrameHeight -=16;
	}
	OstTraceInt3(TRACE_FLOW, "=> AFTER Inside NEW nPortIndex %d pt_video->nFrameHeight %d mParamPortDefinition.format.video.nFrameHeight %d",portDef.nPortIndex,pt_video->nFrameHeight,mParamPortDefinition.format.video.nFrameHeight);


	return VFM_Port::setFormatInPortDefinition(portDef);
}
OMX_ERRORTYPE JPEGEnc_ArmNmfProcessingComponent::codecCreate(OMX_U32 domainId)
{
	OstTraceFiltInst1(TRACE_API, "In JPEGEnc_ArmNmfProcessingComponent : In codecCreate  <line no %d> ",__LINE__);
	OMX_ERRORTYPE error = OMX_ErrorNone;

#ifdef HVA_JPEGENC
	if (pProxyComponent->isHVABased == OMX_TRUE)
	{
		mCodec = jpegenc_arm_nmf_hva_jpegencCreate();
	}
	else
#endif
	{
		mCodec = jpegenc_arm_nmf_jpegenc_swCreate();
	}

	if(!mCodec)
	{
		OstTraceInt0(TRACE_ERROR, "In JPEGEnc_ArmNmfProcessingComponent : Error in codecCreate \n");
		jpegenc_armnmf_assert(OMX_ErrorUndefined, __LINE__, OMX_TRUE);
		error = OMX_ErrorUndefined;
	}
	OstTraceFiltInst1(TRACE_API, "In JPEGEnc_ArmNmfProcessingComponent : codecCreate DONE <line no %d> ",__LINE__);
	return error;
}
Пример #18
0
/// [AG/PM] Kept for debugging purpose.
uint8_t
Histogram_AreStatsValid(void)
{
    uint8_t e_Flag = Flag_e_FALSE;

    if (GlaceHistogramStatus_HistogramDone & g_GlaceHistogramStatsFrozen)
    {
        // Glace export completed, no need to do it again.
        OstTraceInt0(TRACE_FLOW, "->Histogram_AreStatsValid: Already exported histo statistics");
        e_Flag = Flag_e_FALSE;
    }
    else
    {   // if any of the following condition is true, export statistics. This will be hit when anything for statistics except e_coin_ctl is called
        if
        (
            (g_SystemSetup.e_Coin_Glace_Histogram_Ctrl_debug != g_SystemConfig_Status.e_Coin_Glace_Histogram_Status)
        ||  (g_HistStats_Status.e_CoinStatus != g_HistStats_Ctrl.e_CoinCtrl_debug)
        )
        {
            OstTraceInt0(TRACE_FLOW, "->Histogram_AreStatsValid: found coin other than e_coin_ctrl");
            e_Flag = Flag_e_TRUE;
        }
        else
        {
            if (Flash_IsFlashModeActive())  // if flash is active, we need to test both exposure and flash
            {
                if (Flash_IsFlashStatusParamsReceived() && SystemConfig_IsSensorSettingStatusParamsReceived())
                {
                    OstTraceInt0(TRACE_FLOW, "->Histogram_AreStatsValid: Flash case");
                    e_Flag = Flag_e_TRUE;
                }
            }
            else // Need to check for only exposure
            {
                if (SystemConfig_IsSensorSettingStatusParamsReceived())
                {
                    OstTraceInt0(TRACE_FLOW, "->Histogram_AreStatsValid: e_coin_ctrl case");
                    e_Flag = Flag_e_TRUE;
                }
            }
        }
    }


    return (e_Flag);
}
MPEG4Enc_ArmNmf_ProcessingComponent::MPEG4Enc_ArmNmf_ProcessingComponent(ENS_Component &enscomp): VFM_NmfHost_ProcessingComponent(enscomp)
{
	OstTraceInt0(TRACE_API, "=> MPEG4Enc_ArmNmf_ProcessingComponent::MPEG4Enc_ArmNmf_ProcessingComponent() constructor");

	//iDdep.THIS = 0;
	//iDdep.setConfig = 0;
	//iDdep.setNeeds = 0;
	//iDdep.setParameter = 0;

	pProxyComponent = (MPEG4Enc_Proxy *)0;
	//> mChannelId = 0;
	//> ddHandle = VFM_INVALID_CM_HANDLE;
	//> brcHandle = VFM_INVALID_CM_HANDLE;
	//> mNmfAlgoHandle = VFM_INVALID_CM_HANDLE;
	//> mNmfRmHandle = VFM_INVALID_CM_HANDLE;

	//> mBufferingModel = SVA_BUFFERING_NONE;

	mParamBufferDesc.nSize = 0;
	mParamBufferDesc.nMpcAddress = 0;
	mParamBufferDesc.nLogicalAddress = 0;
	mParamBufferDesc.nPhysicalAddress = 0;

	mInternalBufferDesc.nSize = 0;
	mInternalBufferDesc.nMpcAddress = 0;
	mInternalBufferDesc.nLogicalAddress = 0;
	mInternalBufferDesc.nPhysicalAddress = 0;

	mLinkListBufferDesc.nSize = 0;
	mLinkListBufferDesc.nMpcAddress = 0;
	mLinkListBufferDesc.nLogicalAddress = 0;
	mLinkListBufferDesc.nPhysicalAddress = 0;

	//> mDebugBufferDesc.nSize = 0;
	//> mDebugBufferDesc.nMpcAddress = 0;
	//> mDebugBufferDesc.nLogicalAddress = 0;
	//> mDebugBufferDesc.nPhysicalAddress = 0;

	vfm_mem_ctxt = 0;

	//> mSwisBufSize = 0;
	//> mVbvBufferSize = 0;
	//> mVbvOccupancy = 0;

	OstTraceInt0(TRACE_API, "<= MPEG4Enc_ArmNmf_ProcessingComponent::MPEG4Enc_ArmNmf_ProcessingComponent()");
}
OMX_ERRORTYPE MPEG2Dec_Port::getIndexParamVideoPortFormat(OMX_VIDEO_PARAM_PORTFORMATTYPE *portDef){
    OstTraceInt0(TRACE_FLOW,"Enter MPEG2Dec_Port::getIndexParamVideoPortFormat");
	OMX_VIDEO_PARAM_PORTFORMATTYPE *pt = (OMX_VIDEO_PARAM_PORTFORMATTYPE *)portDef;

	RETURN_XXX_IF_WRONG_OST(pt->nPortIndex==VPB+0 || pt->nPortIndex==VPB+1, OMX_ErrorBadPortIndex);

	if(pt->nPortIndex==VPB+1){
		pt->eCompressionFormat = OMX_VIDEO_CodingUnused;
        pt->eColorFormat = (OMX_COLOR_FORMATTYPE)OMX_SYMBIAN_COLOR_FormatYUV420MBPackedSemiPlanar;
	}
	else{
		pt->eCompressionFormat = OMX_VIDEO_CodingMPEG2;
		pt->eColorFormat = OMX_COLOR_FormatUnused;
	}
	pt->nIndex = 0xFF;		  // dummy value
    OstTraceInt0(TRACE_FLOW,"Exit MPEG2Dec_Port::getIndexParamVideoPortFormat");
    return OMX_ErrorNone;
}
OMX_ERRORTYPE MPEG2Dec_Port::setIndexParamVideoPortFormat(OMX_VIDEO_PARAM_PORTFORMATTYPE *portDef){
    OstTraceInt0(TRACE_FLOW,"Enter MPEG2Dec_Port::setIndexParamVideoPortFormat");
	OMX_VIDEO_PARAM_PORTFORMATTYPE *pt = (OMX_VIDEO_PARAM_PORTFORMATTYPE *)portDef;
	RETURN_XXX_IF_WRONG_OST(pt->nPortIndex==VPB+0 || pt->nPortIndex==VPB+1, OMX_ErrorBadPortIndex);
	if(pt->nPortIndex==VPB+1){
		RETURN_XXX_IF_WRONG_OST(pt->eCompressionFormat == OMX_VIDEO_CodingUnused, OMX_ErrorBadParameter);
		RETURN_XXX_IF_WRONG_OST(pt->eColorFormat== (OMX_COLOR_FORMATTYPE)OMX_SYMBIAN_COLOR_FormatYUV420MBPackedSemiPlanar, OMX_ErrorBadParameter);
	}
	else{
		RETURN_XXX_IF_WRONG_OST(pt->eCompressionFormat == OMX_VIDEO_CodingMPEG2, OMX_ErrorBadParameter);
		RETURN_XXX_IF_WRONG_OST(pt->eColorFormat == OMX_COLOR_FormatUnused, OMX_ErrorBadParameter);
	}

	mParamPortDefinition.format.video.eColorFormat = pt->eColorFormat;
	mParamPortDefinition.format.video.eCompressionFormat = pt->eCompressionFormat;
	mParamPortDefinition.format.video.xFramerate = pt->xFramerate;
    OstTraceInt0(TRACE_FLOW,"Exit MPEG2Dec_Port::setIndexParamVideoPortFormat");
	return OMX_ErrorNone;
}
EXPORT_C void VFM_NmfHost_ProcessingComponent::VFM_getConfigCB(t_common_frameinfo *pFrameInfo)
{
    VFM_Component *pComponent = (VFM_Component *)(&mENSComponent);
    OMX_U32 nSupportedExtension = pComponent->pParam->getSupportedExtension();
    vfm_assert_static((pComponent->isDecoder()), __LINE__, OMX_TRUE);

    // Profile and Level: no event is sent!
    OMX_U32 profile, level;
    pComponent->pParam->getProfileLevel(&profile, &level);
    if (profile!=pFrameInfo->eProfile || level!=pFrameInfo->eLevel) {
        pComponent->pParam->setProfileLevel(pFrameInfo->eProfile, pFrameInfo->eLevel);
    }

    // PAR
    if (nSupportedExtension & VFM_SUPPORTEDEXTENSION_PAR) {
        OMX_U32 parWidth, parHeight;
        pComponent->pParam->getPAR(&parWidth, &parHeight);
        if (parWidth!=pFrameInfo->nPixelAspectRatioWidth || parHeight!=pFrameInfo->nPixelAspectRatioHeight) {
            pComponent->pParam->setPAR(pFrameInfo->nPixelAspectRatioWidth, pFrameInfo->nPixelAspectRatioHeight);
            eventHandler(OMX_EventPortSettingsChanged, 1, OMX_IndexParamPixelAspectRatio);
        }
    }

    // Color Primary
    if (nSupportedExtension & VFM_SUPPORTEDEXTENSION_PRIMARYCOLOR) {
        OMX_COLORPRIMARYTYPE colorprimary = pComponent->pParam->getColorPrimary();
        if (pFrameInfo->nColorPrimary!=colorprimary) {
            pComponent->pParam->setColorPrimary((OMX_COLORPRIMARYTYPE)pFrameInfo->nColorPrimary);
        }
    }

    // Frame size, with slaving from master port to the slaved port
    if (nSupportedExtension & VFM_SUPPORTEDEXTENSION_SIZE) {
        if ((pFrameInfo->pic_width != pComponent->getFrameWidth(0)) ||
            (pFrameInfo->pic_height != pComponent->getFrameHeight(0))) {
                OMX_ERRORTYPE omxerror;
                VFM_Port *pMasterPort = ((VFM_Port *)(pComponent->getPort(0)))->getMasterPort();
                OMX_PARAM_PORTDEFINITIONTYPE local_param = *(pMasterPort->getParamPortDefinition());
                local_param.format.video.nFrameWidth = pFrameInfo->pic_width;
                local_param.format.video.nFrameHeight = pFrameInfo->pic_height;
                omxerror = ((ENS_Port *)pMasterPort)->setParameter(OMX_IndexParamPortDefinition, &local_param);
                if ( omxerror != OMX_ErrorNone){
                       OstTraceInt0(TRACE_ERROR,"VFM::VFM_getConfigCB  setParameter returned ERROR when portSettingChanged is detected");
                }
        }
    }

    // Crop
    if (nSupportedExtension & VFM_SUPPORTEDEXTENSION_CROP) {
        if (pComponent->pParam->isChangedCropping_And_Set(pFrameInfo)) {
            eventHandler(OMX_EventPortSettingsChanged, 1, OMX_IndexConfigCommonOutputCrop);
        }
    }
}
EXPORT_C
OMX_U32 VFM_NmfMpc_ProcessingComponent::getBufferPhysicalAddress(void * bufferAllocInfo, OMX_U8* aLogicalAddr, OMX_U32 aSize)
{
	OstTraceInt0(TRACE_API,"Entering VFM_NmfMpc_ProcessingComponent getBufferPhysicalAddress\n");
		OMX_U32 bufPhysicalAddr;

		OMX_ERRORTYPE error = ((MMHwBuffer *)bufferAllocInfo)->PhysAddress((OMX_U32)aLogicalAddr, aSize,bufPhysicalAddr);
        vfm_assert_static((error == OMX_ErrorNone), __LINE__, OMX_TRUE);

		return bufPhysicalAddr;
}
void ProcessVideoCompletePipe(uint8_t pipe_no)
{

#ifdef DEBUG_PROFILER
    START_PROFILER();
#endif

    if(0 == pipe_no)
    {
        VideoComplete_Pipe0();
        g_Interrupts_Count.u16_INT06_VIDEOPIPE0++;

        if(Is_ISP_CE0_IDP_GATE_ISP_CE0_IDP_GATE_ENABLE_mux2to1_enable_DISABLE())
        {
            Set_ISP_CE0_IDP_GATE_ISP_CE0_IDP_GATE_ENABLE(mux2to1_enable_ENABLE, mux2to1_soft_reset_B_0x0);
            OstTraceInt0(TRACE_FLOW, "ENABLING CE0");
        }

#if DEBUG_PROFILER
    Profiler_Update(&g_Profile_VID0);
#endif

    }

    else if (1 == pipe_no)
    {
        VideoComplete_Pipe1();
        g_Interrupts_Count.u16_INT07_VIDEOPIPE1++;

        if(Is_ISP_CE1_IDP_GATE_ISP_CE1_IDP_GATE_ENABLE_mux2to1_enable_DISABLE())
        {
            Set_ISP_CE1_IDP_GATE_ISP_CE1_IDP_GATE_ENABLE(mux2to1_enable_ENABLE, mux2to1_soft_reset_B_0x0);
            OstTraceInt0(TRACE_FLOW, "ENABLING CE1");
        }

#if DEBUG_PROFILER
    Profiler_Update(&g_Profile_VID1);
#endif

    }
}
Пример #25
0
void
Histogram_ExportStatistics(void)
{
    uint32_t    *ptru32_SrcR;
    uint32_t    *ptru32_SrcG;
    uint32_t    *ptru32_SrcB;
    uint32_t    *ptru32_DstR;
    uint32_t    *ptru32_DstG;
    uint32_t    *ptru32_DstB;
    uint16_t    u16_count,
                u16_NoOfBinsPerChannel;

    OstTraceInt0(TRACE_FLOW, "<HIST>->Histogram_ExportStatistics(void)");
    u16_NoOfBinsPerChannel = (uint16_t) 1 << ((uint16_t) NUM_OF_BITS_PER_PIXEL - g_HistStats_Ctrl.u8_HistPixelInputShift);

    ptru32_DstR = g_HistStats_Ctrl.ptru32_HistRAddr;
    ptru32_DstG = g_HistStats_Ctrl.ptru32_HistGAddr;
    ptru32_DstB = g_HistStats_Ctrl.ptru32_HistBAddr;

    ptru32_SrcR = ( uint32_t * ) Histogram_GetRStatsAddr();
    ptru32_SrcG = ( uint32_t * ) Histogram_GetGStatsAddr();
    ptru32_SrcB = ( uint32_t * ) Histogram_GetBStatsAddr();

#if HISTOGRAM_DUMP_ENABLE
    OstTraceInt4(TRACE_FLOW, "<HIST> ET: %d AG: %d EL: %d AL: %d ", g_FrameParamStatus.u32_ExposureTime_us, g_FrameParamStatus.u32_AnalogGain_x256, Get_ISP_SMIARX_ISP_SMIARX_COARSE_EXPOSURE_coarse_exposure(), Get_ISP_SMIARX_ISP_SMIARX_ANALOG_GAIN_analog_gain());
#endif
    if (ptru32_DstR != 0 && ptru32_DstG != 0 && ptru32_DstB != 0)
    {
        for (u16_count = 0; u16_count < u16_NoOfBinsPerChannel; u16_count++)
        {
            *(ptru32_DstR + u16_count) = *(ptru32_SrcR + u16_count);
            *(ptru32_DstG + u16_count) = *(ptru32_SrcG + u16_count);
            *(ptru32_DstB + u16_count) = *(ptru32_SrcB + u16_count);
#if HISTOGRAM_DUMP_ENABLE
            OstTraceInt3(TRACE_FLOW,"<HIST> %06d\t%06d\t%06d",*(ptru32_DstR + u16_count),*(ptru32_DstG + u16_count),*(ptru32_DstB + u16_count));
#endif
        }
    }

    OstTraceInt0(TRACE_FLOW, "<HIST><-Histogram_ExportStatistics(void)");
}
Пример #26
0
void METH(processBuffer)(t_sword * inbuf, t_sword * outbuf, t_uint16 size) {
	
	//int error = 0;
	if (mStereoenhancerConfig.mNewConfigAvailable == 1)
    {
        applyEffectConfig(&mStereoenhancerConfig,&mStereoEnhancerData);
        //reset the value
        mStereoenhancerConfig.mNewConfigAvailable = 0;
    }
	processFunction(inbuf, outbuf, size);
	OstTraceInt0(TRACE_DEBUG,"StereowidenerNmf::ProcessFunction reached");
}
OMX_U32 MPEG2Dec_Port::getBufferCountMin() const
{
    OstTraceInt0(TRACE_FLOW,"Enter MPEG2Dec_Port::getBufferCountMin");
    int buffer_min=0;

    switch (getDirection())
        {
        case OMX_DirInput:
            buffer_min = mParamPortDefinition.nBufferCountMin;
            break;
        case OMX_DirOutput:
            // FIXME: use the current level instead of Level4
            buffer_min = mParamPortDefinition.nBufferCountMin;
            break;
        default:
            DBC_ASSERT(0==1);
        }

    OstTraceInt0(TRACE_FLOW,"Exit MPEG2Dec_Port::getBufferCountMin");
    return buffer_min;
}
void METH(set_picsize)
		(t_uint16 pic_width,
		 t_uint16 pic_height)
{
     	OstTraceInt0(TRACE_API,  "VC1Dec: arm_nmf: decoder: set_picsize()\n");
#if VERBOSE_STANDARD == 1
    NMF_LOG("NMF-ARM decoder: call to set_picsize()\n");
#endif

      picture_width = pic_width;
      picture_height = pic_height;
}
/**
  \if INCLUDE_IN_HTML_ONLY
  \fn void ITM_ERROR_EOF_ISR(void)
  \brief    Top level ISR for ERROR and EOF
  \return void
  \callgraph
  \callergraph
  \ingroup  InterruptHandler
  \endif
*/
INTERRUPT void
ITM_ERROR_EOF_ISR(void)
{
    // Invoke the module interrupt service routine
    OstTraceInt0(TRACE_DEBUG,"<INT> ERROR_EOF");

    g_Interrupts_Count.u16_INT27_ERROR_EOF++;

    ErrorEOF_ISR();

    return;
}
/**
  \if INCLUDE_IN_HTML_ONLY
  \fn void CRM_ISR(void)
  \brief    Top level ISR for CRM
  \return void
  \callgraph
  \callergraph
  \ingroup  InterruptHandler
  \endif
*/
INTERRUPT void
ClockResetManager_ISR(void)
{
    OstTraceInt0(TRACE_DEBUG,"<INT> CRM");

    // Invoke the module interrupt service routine
    CRM_ISR();

    g_Interrupts_Count.u16_INT23_CRM++;

    // Clear the interrupt output of the ITM
//    ITM_Clear_Interrupt_CRM();

    // g_Interrupts_Count.u16_INT23_CRM++;
    return;
}