Пример #1
0
/**
	Register new RPC client.  
	@return		Client ID 
**/
RPC_Handle_t RPC_SyncRegisterClient(RPC_InitParams_t *initParams,
				    RPC_SyncInitParams_t *syncInitParams)
{
	RPC_SyncParams_t *internalParam =
	    (RPC_SyncParams_t *) OSHEAP_Alloc(sizeof(RPC_SyncParams_t));
	if (!internalParam) {
		panic("RPC_SyncRegisterClient: OSHEAP_Alloc failed");
		return 0;
	}
	/* **FIXME** MAG - need special CP reset handler in here? Release all
	   semaphores for pending requests?
	 */
	internalParam->clientParams = *initParams;
	internalParam->SyncRpcParams = *initParams;

	internalParam->SyncRpcParams.respCb = RPC_SyncHandleResponse;
	internalParam->SyncRpcParams.ackCb = RPC_SyncHandleAck;
	internalParam->SyncRpcParams.userData = (UInt32)internalParam;
	internalParam->SyncRpcParams.rpcNtfFn = RPC_SyncNotification;

	internalParam->syncInitParams = *syncInitParams;
	pr_info("RPC_SyncRegisterClient: calling RPC_SYS_RegisterClient\n");

	return RPC_SYS_RegisterClient(&internalParam->SyncRpcParams);
}
// ===================================================================
//
// Function Name: ConfigAudDrv
//
// Description: Configure the driver.
//
// ====================================================================
static Result_t ConfigAudDrv (VOCAPTURE_Drv_t *audDrv, 
							  VOCAPTURE_Configure_t    *config)
{
	//////////////////////////////////////////////////////////////////////////////////////////////////////
	// callbackThreshold is not required now.
	// If the interruptInterval is set to non-0 through _SetTransferParameters() API, 
	// the numBlocks and blockSize will set according to the user's requirement.
	// Otherwise, we use default values.
	///////////////////////////////////////////////////////////////////////////////////////////////////////
	//  This is not done for AMRWB yet because the DSP interface is asynchronous. //

	if (audDrv->interruptInterval == 0)
	{
		// use default
		audDrv->numFramesPerInterrupt = 4;
	}
	else
	{
		audDrv->numFramesPerInterrupt = audDrv->interruptInterval / DSP_RECORD_FRAME_DURATION;
		if (audDrv->numFramesPerInterrupt == 0)
			audDrv->numFramesPerInterrupt = 1;
	}
	
	memcpy (&audDrv->config, config, sizeof(VOCAPTURE_Configure_t));

	OSHEAP_Delete(config);

	switch (audDrv->drvType)
	{
		case VOCAPTURE_TYPE_AMRNB:
			// totally to queue about 1 second of amr_nb data, 50 frames 
			audDrv->bufferSize = sizeof(VR_Frame_AMR_t) * audDrv->numFramesPerInterrupt;
			audDrv->bufferNum = 50/audDrv->numFramesPerInterrupt; 
			break;
		case VOCAPTURE_TYPE_PCM:
			// totally to queue about 1 second of amr_nb data, 50 frames 
			audDrv->bufferSize = LIN_PCM_FRAME_SIZE*sizeof(UInt16)*audDrv->numFramesPerInterrupt; //320 bytes
			if (audDrv->config.speechMode == VP_SPEECH_MODE_LINEAR_PCM_16K)
				audDrv->bufferSize = WB_LIN_PCM_FRAME_SIZE*sizeof(UInt16)*audDrv->numFramesPerInterrupt;
			audDrv->bufferNum = 50/audDrv->numFramesPerInterrupt; 
			break;

		case VOCAPTURE_TYPE_AMRWB:
			audDrv->bufferSize = 0x48;//0x21c; //in  bytes
			audDrv->bufferNum = AUDIO_SIZE_PER_PAGE/audDrv->bufferSize*8;	
			break;

		default:
			Log_DebugPrintf(LOGID_AUDIO, "ConfigAudDrv:: Doesn't support audio driver type drvType = 0x%x\n", audDrv->drvType);
			break;
	}

	audDrv->ringBuffer = (UInt8 *)OSHEAP_Alloc (audDrv->bufferNum*audDrv->bufferSize);
	audDrv->audQueue = AUDQUE_Create (audDrv->ringBuffer, audDrv->bufferNum, audDrv->bufferSize);

	Log_DebugPrintf(LOGID_AUDIO, " : ConfigAudDrv::\n");

	return RESULT_OK;
}
Пример #3
0
/**
	Add ctx to tid. 
	@param		val (in) context value to be added 

**/
UInt32 RPC_SyncAddCbkToTid(UInt32 val)
{
	RPC_SyncContext_t *ctx;
	TaskRequestMap_t *taskMap = GetMapForCurrentTask();
	assert(taskMap);

	ctx = OSHEAP_Alloc(sizeof(RPC_SyncContext_t));
	assert(ctx != NULL);
	ctx->sig = 0xBABEFACE;
	ctx->val = val;

	_DBG_(RPC_TRACE
	      ("RPC_SyncAddCbkToTid oldTid=%d newTid=%ld TaskID=%p\r\n",
	       (int)taskMap->tid, (UInt32)ctx, taskMap->task));

	taskMap->tid = (UInt32)ctx;

	return (UInt32)ctx;
}
//===========================================================
//
// Function Name: AUDDRV_VoiceRender_SetConfig
//
//	Description: Configure voice render driver, Set parameters before start render.
// 
//================================================================== 
Result_t AUDDRV_VoiceRender_SetConfig(
                        VORENDER_TYPE_t				type,
						VORENDER_PLAYBACK_MODE_t	playbackMode,
						VORENDER_VOICE_MIX_MODE_t   mixMode,
						AUDIO_SAMPLING_RATE_t		samplingRate,
						UInt32						speechMode, // used by AMRNB and AMRWB
						UInt32						dataRateSelection // used by AMRNB and AMRWB     
					)
{
	VORENDER_Drv_t	*audDrv = NULL;
	VORENDER_Configure_t	*config;
	VORENDER_MSG_t	msg;
	
	audDrv = GetDriverByType (type);

	if (audDrv == NULL)
		return RESULT_ERROR;

	config = (VORENDER_Configure_t *)OSHEAP_Alloc(sizeof(VORENDER_Configure_t));

	config->playbackMode = playbackMode;
	config->mixMode = mixMode;
	config->samplingRate = samplingRate;
	config->speechMode = speechMode;
	config->dataRateSelection = dataRateSelection;
	
	memset (&msg, 0, sizeof(VORENDER_MSG_t));

	msg.msgID = VORENDER_MSG_CONFIG;
	msg.parm1 = (UInt32)config;

	OSQUEUE_Post(audDrv->msgQueue, (QMsg_t*)&msg, TICKS_FOREVER);
	

	return RESULT_OK;
}
// ===================================================================
//
// Function Name: AUDDRV_VoiceCapture_SetConfig
//
// Description: Configuree voice capture dirver, 
// Set parameters before start capture.
//
// ====================================================================
Result_t AUDDRV_VoiceCapture_SetConfig(
                        VOCAPTURE_TYPE_t      type,
						UInt32				speech_mode,
						UInt8				amr_data_rate,
						VOCAPTURE_RECORD_MODE_t record_mode,
                        AUDIO_SAMPLING_RATE_t    sample_rate,
						Boolean				audio_proc_enable,
						Boolean				vp_dtx_enable)
{
	VOCAPTURE_Drv_t	*audDrv = NULL;
	VOCAPTURE_Configure_t	*config;
	VOCAPTURE_MSG_t	msg;
	
	audDrv = GetDriverByType (type);

	if (audDrv == NULL)
		return RESULT_ERROR;

	config = (VOCAPTURE_Configure_t *)OSHEAP_Alloc(sizeof(VOCAPTURE_Configure_t));

	config->speechMode = speech_mode;
	config->dataRate = amr_data_rate;
	config->recordMode = record_mode;
	config->samplingRate = sample_rate;
	config->procEnable = audio_proc_enable;
	config->dtxEnable = vp_dtx_enable;

	memset (&msg, 0, sizeof(VOCAPTURE_MSG_t));
	msg.msgID = VOCAPTURE_MSG_CONFIG;
	msg.parm1 = (UInt32)config;

	OSQUEUE_Post(audDrv->msgQueue, (QMsg_t*)&msg, TICKS_FOREVER);
	

	return RESULT_OK;
}
Пример #6
0
void *RPC_SyncAllocFromHeap(UInt32 size)
{
	return OSHEAP_Alloc(size);
}
// ==============================================================================
// Function Name: ConfigAudDrv
//
//	Description: Configure the voice render driver with the passed in configuration.
// ================================================================================
static Result_t ConfigAudDrv (VORENDER_Drv_t *audDrv, 
							  VORENDER_Configure_t    *config)
{
	UInt32 ringBufferFrames;

	memcpy (&audDrv->config, config, sizeof(VORENDER_Configure_t));

	OSHEAP_Delete(config);

	//////////////////////////////////////////////////////////////////////////////////////////////////////
	// If the callbackThreshold and interruptInterval is set to non-0 through _SetTransferParameters() API, 
	// the numBlocks and blockSize will set according to the user's requirement.
	// Otherwise, we use default values.
	///////////////////////////////////////////////////////////////////////////////////////////////////////
	//  This is not done for AMRWB yet because the DSP interface is asynchronous. //

	if (audDrv->callbackThreshold == 0 || audDrv->interruptInterval == 0)
	{
		// use default
		audDrv->numFramesPerInterrupt = 4;
		// totally to queue about 1 second of amr_nb data, 50 frames 
		ringBufferFrames = 50;
	}
	else
	{
		audDrv->numFramesPerInterrupt = audDrv->interruptInterval / DSP_RENDER_FRAME_DURATION;
		if (audDrv->numFramesPerInterrupt == 0)
			audDrv->numFramesPerInterrupt = 1;
		ringBufferFrames = audDrv->callbackThreshold / DSP_RENDER_FRAME_DURATION;
		if (ringBufferFrames == 0)
			ringBufferFrames = 1;
	}

	switch (audDrv->drvType)
	{
		case VORENDER_TYPE_AMRNB:
			// totally to queue about 1 second of amr_nb data, 50 frames 
			audDrv->bufferSize = sizeof(VR_Frame_AMR_t)*audDrv->numFramesPerInterrupt;
			audDrv->bufferNum = ringBufferFrames/audDrv->numFramesPerInterrupt + 1; 

			break;
		case VORENDER_TYPE_PCM_VPU:
			// totally to queue about 1 second of amr_nb data, 50 frames 
			audDrv->bufferSize = (LIN_PCM_FRAME_SIZE*sizeof(UInt16))*audDrv->numFramesPerInterrupt; //320 bytes
			audDrv->bufferNum = ringBufferFrames/audDrv->numFramesPerInterrupt + 1; 

			break;

		case VORENDER_TYPE_PCM_ARM2SP:
		case VORENDER_TYPE_PCM_ARM2SP2:			
			// totally to queue about 1 second of amr_nb data, 50 frames 
			audDrv->bufferSize = (ARM2SP_INPUT_SIZE/4)*audDrv->numFramesPerInterrupt; //1280 bytes, 4 frames
			if (audDrv->config.samplingRate == AUDIO_SAMPLING_RATE_16000)
				audDrv->bufferSize *= 2;
			audDrv->bufferNum = ringBufferFrames/audDrv->numFramesPerInterrupt + 1; // arm2p is always 4 frames, not configurable.
			break;
			
		case VORENDER_TYPE_AMRWB:
			// size we dump data to shared mem when we got ripisr from dsp or data from omx
			audDrv->bufferSize = 0x400; // double the size of the dsp threshold
			audDrv->bufferNum = AUDIO_SIZE_PER_PAGE/audDrv->bufferSize*4;
			break;

		default:
			Log_DebugPrintf(LOGID_AUDIO, "ConfigAudDrv:: Doesn't support audio driver type drvType = 0x%x\n", audDrv->drvType);
			break;
	}

	audDrv->ringBuffer = (UInt8 *)OSHEAP_Alloc (audDrv->bufferNum*audDrv->bufferSize);
	audDrv->audQueue = AUDQUE_Create (audDrv->ringBuffer, audDrv->bufferNum, audDrv->bufferSize);

	Log_DebugPrintf(LOGID_AUDIO, " ConfigAudDrv::audio driver type drvType = 0x%x, bufferSize = 0x%x, bufferNum = 0x%x\n", 
							audDrv->drvType, audDrv->bufferSize, audDrv->bufferNum);

	return RESULT_OK;
}
Пример #8
0
//**************************************************
IPC_BufferPool IPC_CreateBufferPoolWithDescriptor
(
	IPC_EndpointId_T		SourceEndpointId,
	IPC_EndpointId_T		DestinationEndpointId,
	IPC_U32					NumberOfBuffers,
	IPC_U32					BufferSize,
	IPC_U32					FlowStartLimit,
	IPC_U32					FlowStopLimit,
	IPC_U32					LocalDescriptorSize
)
{
	IPC_U32				MaxDataSize		= ALIGN4 (BufferSize);
	IPC_BufferPool		Pool;
	IPC_BufferPool_T *	PoolPtr;
	IPC_Endpoint		DestinationEpPtr;
	IPC_SmPtr			Buffer;
	IPC_U32				Id;
	char *				LocalData;

	IPC_TRACE (IPC_Channel_Pool, "IPC_CreateBufferPool",
				"Source %02X, Destination %02X, Buffer Count %d, Buffer Size %d",
				SourceEndpointId, DestinationEndpointId, NumberOfBuffers, BufferSize);

	// Sanity Checks
	if (NumberOfBuffers == 0)
	{
		IPC_TRACE (IPC_Channel_Error, "IPC_CreateBufferPool", "Invalid NumberOfBuffers %d", NumberOfBuffers, 0, 0, 0);
		return 0;
	}

	if (!IPC_SmEndpointInfo (SourceEndpointId))
	{
		IPC_TRACE (IPC_Channel_Error, "IPC_CreateBufferPool", "Invalid Source Endpoint %d", SourceEndpointId, 0, 0, 0);
		return 0;
	}

	if (0 == (DestinationEpPtr = IPC_SmEndpointInfo (DestinationEndpointId)))
	{
		IPC_TRACE (IPC_Channel_Error, "IPC_CreateBufferPool", "Invalid Destination Endpoint %d", DestinationEndpointId, 0, 0, 0);
		return 0;
	}

	if (FlowStartLimit > NumberOfBuffers)
	{
		IPC_TRACE (IPC_Channel_Error, "IPC_CreateBufferPool", "Invalid FlowStartLimit %d", FlowStartLimit, 0, 0, 0);
		return 0;
	}

	if (FlowStopLimit >= NumberOfBuffers)
	{
		IPC_TRACE (IPC_Channel_Error, "IPC_CreateBufferPool", "Invalid FlowStopLimit %d", FlowStopLimit, 0, 0, 0);
		return 0;
	}

	// Allocate Sm For Pool
	Pool = IPC_SmPoolAlloc (sizeof (IPC_BufferPool_T), DestinationEpPtr->MaxHeaderSize, MaxDataSize, NumberOfBuffers);

	if (!Pool)
	{
		IPC_TRACE (IPC_Channel_Error, "IPC_CreateBufferPool", "IPC_SmPoolAlloc Failed", 0, 0, 0, 0);
		return 0;
	}

	if (LocalDescriptorSize != 0)
	{
#ifdef UNDER_LINUX
        // Use kmalloc instead of OSHEAP_Alloc in Linux platform
		LocalData = kmalloc ((LocalDescriptorSize * NumberOfBuffers), GFP_KERNEL);
#else
		LocalData = (char *) OSHEAP_Alloc (LocalDescriptorSize * NumberOfBuffers);
#endif  // UNDER_LINUX

		if (!LocalData)
		{
			IPC_TRACE (IPC_Channel_Error, "IPC_CreateBufferPool", "LocalData OSHEAP_Alloc Failed", 0, 0, 0, 0);
			return 0;
		}
	} else {
		LocalData = 0;
	}

	// Initialise Pool
	PoolPtr	= IPC_PoolPtr(Pool);

	PoolPtr->Cpu					= IPC_SM_CURRENT_CPU;
	PoolPtr->SourceEndpointId		= SourceEndpointId;
	PoolPtr->DestinationEndpointId	= DestinationEndpointId;
	PoolPtr->MaxDataSize			= MaxDataSize;
	PoolPtr->MaxHeaderSize			= DestinationEpPtr->MaxHeaderSize;
	PoolPtr->FlowStartLimit			= FlowStartLimit;
	PoolPtr->FlowStopLimit			= FlowStopLimit;
	PoolPtr->FlowControlState		= IPC_FLOW_START;
	PoolPtr->FlowControlCallPending	= IPC_FALSE;
	PoolPtr->FreeBuffers			= NumberOfBuffers;
	PoolPtr->MaxBuffers				= NumberOfBuffers;
	PoolPtr->LowWaterMark			= NumberOfBuffers;
	PoolPtr->NextPool				= 0;
	PoolPtr->BufferFreeFunction		= NULL;
	PoolPtr->AllocationFailures		= 0;
	PoolPtr->Allocations			= 0;
	PoolPtr->BytesSent				= 0;
	PoolPtr->FlowStopCalls			= 0;
	PoolPtr->FlowStartCalls			= 0;

	PoolPtr->EmptyEvent				= IPC_EVENT_CREATE;

	IPC_QInitialise			(IPC_SmOffset(&PoolPtr->FreeBufferQ), Pool);
	IPC_QInitialise			(IPC_SmOffset(&PoolPtr->AllocatedBufferQ), Pool);



	// Initialise Buffers in pool
	Buffer = Pool + sizeof (IPC_BufferPool_T);

	for (Id = 0; Id < NumberOfBuffers; Id++)
	{
		IPC_BufferToPtr (Buffer)->LocalData = LocalData;

		LocalData += LocalDescriptorSize;

		IPC_QAddBack (Buffer, IPC_POOLFreeQ(Pool));
		Buffer = IPC_BufferInitialise (Pool, Buffer, Id, PoolPtr->MaxHeaderSize, MaxDataSize);

	}

	// For Debug
	{
		IPC_PoolList_T * EpPools = &PoolList [PoolPtr->SourceEndpointId];

		if (EpPools->Count < IPC_POOLLIST_LENGTH)
		{
			EpPools->Pool [EpPools->Count++] = PoolPtr;
		}
	}

	IPC_TRACE (IPC_Channel_Pool, "IPC_CreateBufferPool", "Pool %08X", Pool, 0, 0, 0);

	return Pool;
}