Ejemplo n.º 1
0
void BaseVideoFilter::UpdateBufferSize()
{
	uint32_t newBufferSize = GetFrameInfo().Width*GetFrameInfo().Height*GetFrameInfo().BitsPerPixel;
	if(_bufferSize != newBufferSize) {
		_frameLock.Acquire();
		if(_outputBuffer) {
			delete[] _outputBuffer;
		}

		_bufferSize = newBufferSize;
		_outputBuffer = new uint8_t[newBufferSize];
		_frameLock.Release();
	}
}
Ejemplo n.º 2
0
//maks
wxIFMLayoutData::wxIFMLayoutData()
{
	bShowTitleBar = true;
	bMaximized = false;

	GetFrameInfo();
}
Ejemplo n.º 3
0
//++ ------------------------------------------------------------------------------------
// Details:	Form MI partial response by appending more MI value type objects to the 
//			tuple type object past in.
// Type:	Method.
// Args:	vrThread		- (R) LLDB thread object.
//			vwrMIValueTuple	- (W) MI value tuple object.
// Return:	MIstatus::success - Functional succeeded.
//			MIstatus::failure - Functional failed.
// Throws:	None.
//--
bool CMICmnLLDBDebugSessionInfo::MIResponseFormFrameInfo( const lldb::SBThread & vrThread, const MIuint vnLevel, CMICmnMIValueTuple & vwrMiValueTuple )
{
	lldb::SBThread & rThread = const_cast< lldb::SBThread & >( vrThread );
	
	lldb::SBFrame frame = rThread.GetFrameAtIndex( vnLevel );
	lldb::addr_t pc = 0;
	CMIUtilString fnName;
	CMIUtilString fileName;
	CMIUtilString path; 
	MIuint nLine = 0;
	if( !GetFrameInfo( frame, pc, fnName, fileName, path, nLine ) )
		return MIstatus::failure;
	
	CMICmnMIValueList miValueList( true );
	const MIuint vMaskVarTypes = 0x1000;
	if( !MIResponseFormVariableInfo( frame, vMaskVarTypes, miValueList ) )
		return MIstatus::failure;

	// MI print "{level=\"0\",addr=\"0x%08llx\",func=\"%s\",args=[%s],file=\"%s\",fullname=\"%s\",line=\"%d\"}"
	const CMIUtilString strLevel( CMIUtilString::Format( "%d", vnLevel ) );
	const CMICmnMIValueConst miValueConst( strLevel );
	const CMICmnMIValueResult miValueResult( "level", miValueConst );
	CMICmnMIValueTuple miValueTuple( miValueResult );
	if( !MIResponseFormFrameInfo( pc, fnName, miValueList.GetString(), fileName, path, nLine, miValueTuple ) )
		return MIstatus::failure;

	vwrMiValueTuple = miValueTuple;

	return MIstatus::success;
}
Ejemplo n.º 4
0
/*************************************************************************
 * Function: PvtTimecodeToOffset()
 *
 *      Given a timecode and a frame rate, this function returns a
 *      position relative to the beginning of a track.
 *
 * Argument Notes:
 *
 * ReturnValue:
 *		Error code (see below).
 *
 * Possible Errors:
 *		Standard errors (see top of file).
 *************************************************************************/
aafErr_t PvtTimecodeToOffset(
	aafInt16 frameRate,  /* IN - Frame Rate */
	aafInt16 hours,      /* IN - Hours value of Timecode */
	aafInt16 minutes,    /* IN - Minutes value of Timecode */
	aafInt16 seconds,    /* IN - Seconds value of Timecode */
	aafInt16 frames,     /* IN - Frames value of Timecode */
	aafDropType_t drop,  /* IN - Drop of non-drop Timecode */
	aafFrameOffset_t	*result) /* OUT - resulting position */

{
   aafUInt32		val;
	frameTbl_t	info;
	
	info = GetFrameInfo(frameRate);
	if(drop)
		{
		val = (hours * info.dropFpHour);
		val += ((minutes / 10) * info.dropFpMin10);
		val += (minutes % 10) * info.dropFpMin;
		}
	else
		{
		val = hours * info.fpHour;
		val += minutes * info.fpMinute;
		}

	val += seconds * frameRate;
	val += frames;
	
	*result = val;

	return(AAFRESULT_SUCCESS);
}
Ejemplo n.º 5
0
//++ ------------------------------------------------------------------------------------
// Details: Form MI partial response by appending more MI value type objects to the
//          tuple type object past in.
// Type:    Method.
// Args:    vrThread        - (R) LLDB thread object.
//          vwrMIValueTuple - (W) MI value tuple object.
//          vArgInfo        - (R) Args information in MI response form.
// Return:  MIstatus::success - Functional succeeded.
//          MIstatus::failure - Functional failed.
// Throws:  None.
//--
bool
CMICmnLLDBDebugSessionInfo::MIResponseFormFrameInfo(const lldb::SBThread &vrThread, const MIuint vnLevel,
                                                    const FrameInfoFormat_e veFrameInfoFormat, CMICmnMIValueTuple &vwrMiValueTuple)
{
    lldb::SBThread &rThread = const_cast<lldb::SBThread &>(vrThread);

    lldb::SBFrame frame = rThread.GetFrameAtIndex(vnLevel);
    lldb::addr_t pc = 0;
    CMIUtilString fnName;
    CMIUtilString fileName;
    CMIUtilString path;
    MIuint nLine = 0;
    if (!GetFrameInfo(frame, pc, fnName, fileName, path, nLine))
        return MIstatus::failure;

    // MI print "{level=\"0\",addr=\"0x%016" PRIx64 "\",func=\"%s\",file=\"%s\",fullname=\"%s\",line=\"%d\"}"
    const CMIUtilString strLevel(CMIUtilString::Format("%d", vnLevel));
    const CMICmnMIValueConst miValueConst(strLevel);
    const CMICmnMIValueResult miValueResult("level", miValueConst);
    vwrMiValueTuple.Add(miValueResult);
    const CMIUtilString strAddr(CMIUtilString::Format("0x%016" PRIx64, pc));
    const CMICmnMIValueConst miValueConst2(strAddr);
    const CMICmnMIValueResult miValueResult2("addr", miValueConst2);
    vwrMiValueTuple.Add(miValueResult2);
    const CMICmnMIValueConst miValueConst3(fnName);
    const CMICmnMIValueResult miValueResult3("func", miValueConst3);
    vwrMiValueTuple.Add(miValueResult3);
    if (veFrameInfoFormat != eFrameInfoFormat_NoArguments)
    {
        CMICmnMIValueList miValueList(true);
        const MIuint maskVarTypes = eVariableType_Arguments;
        if (veFrameInfoFormat == eFrameInfoFormat_AllArgumentsInSimpleForm)
        {
            if (!MIResponseFormVariableInfo(frame, maskVarTypes, eVariableInfoFormat_AllValues, miValueList, 0))
                return MIstatus::failure;
        }
        else
            if (!MIResponseFormVariableInfo(frame, maskVarTypes, eVariableInfoFormat_AllValues, miValueList))
                return MIstatus::failure;

        const CMICmnMIValueResult miValueResult4("args", miValueList);
        vwrMiValueTuple.Add(miValueResult4);
    }
    const CMICmnMIValueConst miValueConst5(fileName);
    const CMICmnMIValueResult miValueResult5("file", miValueConst5);
    vwrMiValueTuple.Add(miValueResult5);
    const CMICmnMIValueConst miValueConst6(path);
    const CMICmnMIValueResult miValueResult6("fullname", miValueConst6);
    vwrMiValueTuple.Add(miValueResult6);
    const CMIUtilString strLine(CMIUtilString::Format("%d", nLine));
    const CMICmnMIValueConst miValueConst7(strLine);
    const CMICmnMIValueResult miValueResult7("line", miValueConst7);
    vwrMiValueTuple.Add(miValueResult7);

    return MIstatus::success;
}
Ejemplo n.º 6
0
bool Frame::HasAvailableData() const
{
	long centerId = GetFrameInfo().centerId;

	bool hasPoleRa = (bodfnd_c(centerId, "POLE_RA") != SPICEFALSE);
	bool hasPoleDec = (bodfnd_c(centerId, "POLE_DEC") != SPICEFALSE);
	bool hasPm = (bodfnd_c(centerId, "PM") != SPICEFALSE);
	bool centerHasIAUFrameData = hasPoleRa && hasPoleDec && hasPm;

	bool isIAUFrame = GetSpiceName().substr(0, 3) == "IAU";

	return (isIAUFrame && centerHasIAUFrameData) || HasLimitedCoverage();
}
Ejemplo n.º 7
0
//++ ------------------------------------------------------------------------------------
// Details:	Retrieve the specified thread's frame information.
// Type:	Method.
// Args:	vCmdData		- (R) A command's information.
//			vThreadIdx		- (R) Thread index.
//			vwrThreadFrames	- (W) Frame data.
// Return:	MIstatus::success - Functional succeeded.
//			MIstatus::failure - Functional failed.
// Throws:	None.
//--
bool CMICmnLLDBDebugSessionInfo::GetThreadFrames( const SMICmdData & vCmdData, const MIuint vThreadIdx, CMICmnMIValueTuple & vwrThreadFrames ) 
{
	lldb::SBThread thread = m_lldbProcess.GetThreadByIndexID( vThreadIdx );
	const uint32_t nFrames = thread.GetNumFrames();
	if( nFrames == 0 )
	{
		vwrThreadFrames = CMICmnMIValueTuple();
		return MIstatus::success;
	}

	CMICmnMIValueTuple miValueTupleAll;
	for( MIuint nLevel = 0; nLevel < nFrames; nLevel++ )
	{
		lldb::SBFrame frame = thread.GetFrameAtIndex( nLevel );
		lldb::addr_t pc = 0;
		CMIUtilString fnName;
		CMIUtilString fileName;
		CMIUtilString path; 
		MIuint nLine = 0;
		if( !GetFrameInfo( frame, pc, fnName, fileName, path, nLine ) )
			return MIstatus::failure;

		// Function args
		CMICmnMIValueList miValueList( true );
		const MIuint vMaskVarTypes = 0x1000;
		if( !MIResponseFormVariableInfo( frame, vMaskVarTypes, miValueList ) )
			return MIstatus::failure;

		const MIchar * pUnknown = "??";
		if( fnName != pUnknown )
		{
			std::replace( fnName.begin(), fnName.end(), ')', ' ' );
			std::replace( fnName.begin(), fnName.end(), '(', ' ' );
			std::replace( fnName.begin(), fnName.end(), '\'', ' ' );
		}

		const CMIUtilString strLevel( CMIUtilString::Format( "%d", nLevel ) );
		const CMICmnMIValueConst miValueConst( strLevel );
		const CMICmnMIValueResult miValueResult( "level", miValueConst );
		miValueTupleAll.Add( miValueResult );
		
		CMICmnMIValueTuple miValueTuple( miValueResult );
		if( !MIResponseFormFrameInfo( pc, fnName, miValueList.GetString(), fileName, path, nLine, miValueTuple ) )
			return MIstatus::failure;
	}

	vwrThreadFrames = miValueTupleAll;

	return MIstatus::success;
}
Ejemplo n.º 8
0
void Frame::Construct(int spiceId, const std::string& name)
{
	if(!ValidateId(spiceId))
		CSpiceUtil::SignalError("No such CSpice frame is defined");

	this->spiceId = spiceId;

	if(name != "")
		this->name = name;
	else
		this->name = GetSpiceName();

	this->centerObject = new SpaceObject(GetFrameInfo().centerId, this->name + "_center");
}
Ejemplo n.º 9
0
static WebPMuxError GetImageInfo(const WebPMuxImage* const wpi,
                                 int* const x_offset, int* const y_offset,
                                 int* const duration,
                                 int* const width, int* const height) {
  const WebPChunk* const frame_chunk = wpi->header_;
  WebPMuxError err;
  assert(wpi != NULL);
  assert(frame_chunk != NULL);

  // Get offsets and duration from ANMF chunk.
  err = GetFrameInfo(frame_chunk, x_offset, y_offset, duration);
  if (err != WEBP_MUX_OK) return err;

  // Get width and height from VP8/VP8L chunk.
  if (width != NULL) *width = wpi->width_;
  if (height != NULL) *height = wpi->height_;
  return WEBP_MUX_OK;
}
Ejemplo n.º 10
0
void BaseVideoFilter::TakeScreenshot(VideoFilterType filterType, string filename, std::stringstream *stream)
{
	uint32_t* pngBuffer;
	FrameInfo frameInfo;
	uint32_t* frameBuffer = nullptr;
	{
		auto lock = _frameLock.AcquireSafe();
		if(_bufferSize == 0 || !GetOutputBuffer()) {
			return;
		}

		frameBuffer = (uint32_t*)new uint8_t[_bufferSize];
		memcpy(frameBuffer, GetOutputBuffer(), _bufferSize);
		frameInfo = GetFrameInfo();
	}

	pngBuffer = frameBuffer;

	uint32_t rotationAngle = EmulationSettings::GetScreenRotation();
	shared_ptr<RotateFilter> rotateFilter;
	if(rotationAngle > 0) {
		rotateFilter.reset(new RotateFilter(rotationAngle));
		pngBuffer = rotateFilter->ApplyFilter(pngBuffer, frameInfo.Width, frameInfo.Height);
		frameInfo = rotateFilter->GetFrameInfo(frameInfo);
	}

	shared_ptr<ScaleFilter> scaleFilter = ScaleFilter::GetScaleFilter(filterType);
	if(scaleFilter) {
		pngBuffer = scaleFilter->ApplyFilter(pngBuffer, frameInfo.Width, frameInfo.Height);
		frameInfo = scaleFilter->GetFrameInfo(frameInfo);
	}

	VideoHud hud;
	hud.DrawHud((uint8_t*)pngBuffer, frameInfo, EmulationSettings::GetOverscanDimensions());

	if(!filename.empty()) {
		PNGHelper::WritePNG(filename, pngBuffer, frameInfo.Width, frameInfo.Height);
	} else {
		PNGHelper::WritePNG(*stream, pngBuffer, frameInfo.Width, frameInfo.Height);
	}

	delete[] frameBuffer;
}
Ejemplo n.º 11
0
void Entity::GetTag( const char *tagname, orientation_t *orient )
{
	refEntity_t ref;

	if( tiki == NULL ) {
		return;
	}

	int num = cgi.Tag_NumForName( tiki, tagname );

	if( num == -1 ) {
		return;
	}

	memset( &ref, 0, sizeof( refEntity_t ) );

	VectorCopy( origin, ref.origin );
	MatrixToEulerAngles( ref.axis, angles );

	ref.reType = RT_MODEL;

	ref.scale = scale;
	ref.hModel = modelhandle;
	ref.hOldModel = modelhandle;
	ref.tiki = tiki;
	ref.entityNumber = 0;
	ref.actionWeight = 1.0f;

	for( int i = 0; i < GetNumFrames(); i++ )
	{
		frameInfo_t *frame = GetFrameInfo( i );

		ref.frameInfo[ i ].index = frame->index;
		ref.frameInfo[ i ].time = frame->time;
		ref.frameInfo[ i ].weight = frame->weight;
	}

	//cgi.TIKI_Orientation( orient, &ref, num );
	*orient = re.TIKI_Orientation( &ref, num );
}
Ejemplo n.º 12
0
/*************************************************************************
 * Private Function: PvtOffsetToTimecode()
 *
 *      Given an offset into a track and a frame rate, this function
 *      calculates a timecode value.
 *
 * Argument Notes:
 *
 * ReturnValue:
 *		Error code (see below).
 *
 * Possible Errors:
 *		Standard errors (see top of file).
 *************************************************************************/
aafErr_t PvtOffsetToTimecode(
	aafFrameOffset_t offset, /* IN - Offset into a track */
	aafInt16 frameRate,      /* IN - Frame rate */
	aafDropType_t drop,     /* OUT - Drop or non-drop Timecode */
	aafInt16 *hours,         /* OUT - Hours value of timecode */
	aafInt16 *minutes,       /* OUT - Minutes value of timecode */
	aafInt16 *seconds,       /* OUT - Seconds value of timecode */
	aafInt16 *frames)        /* OUT - Frames value of timecode */
{
  frameTbl_t info;
  aafUInt32		frames_day;
  aafFrameOffset_t min10, min1;
  aafBool frame_dropped;

  info = GetFrameInfo(frameRate);
  frames_day = (drop ? info.dropFpHour: info.fpHour) *24;

  if (offset < 0L)
	 offset += frames_day;
  if (offset >= frames_day)
	 offset -= frames_day;
  if (drop)
	 {
		*hours = (aafInt16)(offset / info.dropFpHour);
		offset = offset % info.dropFpHour;
		min10 = offset / info.dropFpMin10;
		offset = offset % info.dropFpMin10;
		if (offset < info.fpMinute)
		  {
			 frame_dropped = kAAFFalse;
			 min1 = 0;
		  }
		else
		  {
			 frame_dropped = kAAFTrue;
			 offset -= info.fpMinute;
			 min1 = (offset / info.dropFpMin) + 1;
			 offset = offset % info.dropFpMin;
		  }
		
		*minutes = (aafInt16)((min10 * 10) + min1);
		*seconds = (aafInt16)(offset / frameRate);
		*frames = (aafInt16)(offset % frameRate);
		if (frame_dropped)
		  {
			 (*frames) +=2;
			 if (*frames >= frameRate)
				{
				  (*frames) -= frameRate;
				  (*seconds)++;
				  if (*seconds > 60)
					 {
						(*seconds) -= 60;
						(*minutes)++;
						if (*minutes > 60)
						  {
							 (*minutes) -= 60;
							 (*hours)++;
						  }
					 }
				}
		  }
	 }
  else
	 {
		*hours = (aafInt16)(offset / info.fpHour);
		offset = offset % info.fpHour;
		*minutes = (aafInt16)(offset / info.fpMinute);
		offset = offset % info.fpMinute;
		*seconds = (aafInt16)(offset / frameRate);
		*frames = (aafInt16)(offset % frameRate);
	 }

  return(AAFRESULT_SUCCESS);
}
// 处理单个IP地址设置应答帧
void  ProcIPSetReturnFrameOne(m_oIPSetFrameThreadStruct* pIPSetFrameThread)
{
	ASSERT(pIPSetFrameThread != NULL);
	unsigned int uiIPInstrument = 0;
	m_oInstrumentStruct* pInstrument = NULL;
	m_oRoutStruct* pRout = NULL;
	unsigned short usCommand = 0;
	char cLAUXRoutOpenSet = 0;
	CString str = _T("");
	string strFrameData = "";
	string strConv = "";
	// 得到仪器IP
	EnterCriticalSection(&pIPSetFrameThread->m_pIPSetFrame->m_oSecIPSetFrame);
	uiIPInstrument = pIPSetFrameThread->m_pIPSetFrame->m_pCommandStructReturn->m_uiSrcIP;
	usCommand = pIPSetFrameThread->m_pIPSetFrame->m_pCommandStructReturn->m_usCommand;
	cLAUXRoutOpenSet = pIPSetFrameThread->m_pIPSetFrame->m_pCommandStructReturn->m_cLAUXRoutOpenSet;
	LeaveCriticalSection(&pIPSetFrameThread->m_pIPSetFrame->m_oSecIPSetFrame);
	EnterCriticalSection(&pIPSetFrameThread->m_pLineList->m_oSecLineList);
	// 仪器在索引表中
	if (TRUE == IfIndexExistInMap(uiIPInstrument, &pIPSetFrameThread->m_pLineList->m_pInstrumentList->m_oIPSetInstrumentMap))
	{
		pInstrument = GetInstrumentFromMap(uiIPInstrument, &pIPSetFrameThread->m_pLineList->m_pInstrumentList->m_oIPSetInstrumentMap);
		// 更新仪器的存活时间
		UpdateInstrActiveTime(pInstrument);
		// 从IP地址设置索引表中删除仪器
		DeleteInstrumentFromMap(uiIPInstrument, &pIPSetFrameThread->m_pLineList->m_pInstrumentList->m_oIPSetInstrumentMap);
		// 将仪器加入IP地址索引表
		pInstrument->m_bIPSetOK = true;
		// 路由方向仪器个数加一
		pRout = GetRout(pInstrument->m_uiRoutIP, &pIPSetFrameThread->m_pLineList->m_pRoutList->m_oRoutMap);
		if (FALSE == IfIndexExistInMap(uiIPInstrument, &pIPSetFrameThread->m_pLineList->m_pInstrumentList->m_oIPInstrumentMap))
		{
			pRout->m_uiInstrumentNum++;
			AddInstrumentToMap(uiIPInstrument, pInstrument, &pIPSetFrameThread->m_pLineList->m_pInstrumentList->m_oIPInstrumentMap);
		}
		// 加入仪器位置索引表
		AddLocationToMap(pInstrument->m_iLineIndex, pInstrument->m_iPointIndex, pInstrument, 
			&pIPSetFrameThread->m_pLineList->m_pInstrumentList->m_oInstrumentLocationMap);
		if (usCommand == pIPSetFrameThread->m_pThread->m_pConstVar->m_usSendSetCmd)
		{
			str.Format(_T("接收到SN = 0x%x,IP地址 = 0x%x仪器的IP地址设置应答, 仪器软件版本 = 0x%x"), 
				pInstrument->m_uiSN, uiIPInstrument, pInstrument->m_uiVersion);
		}
		else if (usCommand == pIPSetFrameThread->m_pThread->m_pConstVar->m_usSendQueryCmd)
		{
			str.Format(_T("接收到SN = 0x%x,IP地址 = 0x%x仪器的IP地址查询应答, 仪器软件版本 = 0x%x"), 
				pInstrument->m_uiSN, uiIPInstrument, pInstrument->m_uiVersion);
		}
		strConv = (CStringA)str;
		AddMsgToLogOutPutList(pIPSetFrameThread->m_pThread->m_pLogOutPut, "ProcIPSetReturnFrameOne", 
			strConv);
	}
	else
	{
		LeaveCriticalSection(&pIPSetFrameThread->m_pLineList->m_oSecLineList);
		EnterCriticalSection(&pIPSetFrameThread->m_pIPSetFrame->m_oSecIPSetFrame);
		GetFrameInfo(pIPSetFrameThread->m_pIPSetFrame->m_cpRcvFrameData,
			pIPSetFrameThread->m_pThread->m_pConstVar->m_iRcvFrameSize, &strFrameData);
		LeaveCriticalSection(&pIPSetFrameThread->m_pIPSetFrame->m_oSecIPSetFrame);
		AddMsgToLogOutPutList(pIPSetFrameThread->m_pThread->m_pLogOutPut, "ProcIPSetReturnFrameOne", 
			strFrameData, ErrorType, IDS_ERR_IPSETMAP_NOTEXIT);
		EnterCriticalSection(&pIPSetFrameThread->m_pLineList->m_oSecLineList);
	}
	// 如果仪器位于已分配IP地址索引表
	if (TRUE == IfIndexExistInMap(uiIPInstrument, &pIPSetFrameThread->m_pLineList->m_pInstrumentList->m_oIPInstrumentMap))
	{
		pInstrument = GetInstrumentFromMap(uiIPInstrument, &pIPSetFrameThread->m_pLineList->m_pInstrumentList->m_oIPInstrumentMap);
		// 如果仪器类型为LCI或交叉站
		if ((pInstrument->m_iInstrumentType == InstrumentTypeLCI)
			|| (pInstrument->m_iInstrumentType == InstrumentTypeLAUX))
		{
			// 关闭交叉线A电源
			if (cLAUXRoutOpenSet & (0x01 << 7))
			{
				AddMsgToLogOutPutList(pIPSetFrameThread->m_pThread->m_pLogOutPut, "ProcIPSetReturnFrameOne", 
					"关闭交叉线A电源");
				pRout = GetRout(pInstrument->m_uiRoutIPDown, &pIPSetFrameThread->m_pLineList->m_pRoutList->m_oRoutMap);
				// 在路由方向上删除该仪器之后的全部仪器
				DeleteAllInstrumentAlongRout(pInstrument, pRout, pIPSetFrameThread->m_pLineList, 
					pIPSetFrameThread->m_pThread->m_pLogOutPut);
			}
			// 关闭交叉线B电源
			if (cLAUXRoutOpenSet & (0x01 << 6))
			{
				AddMsgToLogOutPutList(pIPSetFrameThread->m_pThread->m_pLogOutPut, "ProcIPSetReturnFrameOne", 
					"关闭交叉线B电源");
				pRout = GetRout(pInstrument->m_uiRoutIPTop, &pIPSetFrameThread->m_pLineList->m_pRoutList->m_oRoutMap);
				// 在路由方向上删除该仪器之后的全部仪器
				DeleteAllInstrumentAlongRout(pInstrument, pRout, pIPSetFrameThread->m_pLineList, 
					pIPSetFrameThread->m_pThread->m_pLogOutPut);
			}
			// 关闭大线A电源
			if (cLAUXRoutOpenSet & (0x01 << 5))
			{
				AddMsgToLogOutPutList(pIPSetFrameThread->m_pThread->m_pLogOutPut, "ProcIPSetReturnFrameOne", 
					"关闭大线A电源");
				pRout = GetRout(pInstrument->m_uiRoutIPRight, &pIPSetFrameThread->m_pLineList->m_pRoutList->m_oRoutMap);
				// 在路由方向上删除该仪器之后的全部仪器
				DeleteAllInstrumentAlongRout(pInstrument, pRout, pIPSetFrameThread->m_pLineList, 
					pIPSetFrameThread->m_pThread->m_pLogOutPut);
			}
			// 关闭大线B电源
			if (cLAUXRoutOpenSet & (0x01 << 4))
			{
				AddMsgToLogOutPutList(pIPSetFrameThread->m_pThread->m_pLogOutPut, "ProcIPSetReturnFrameOne", 
					"关闭大线B电源");
				pRout = GetRout(pInstrument->m_uiRoutIPLeft, &pIPSetFrameThread->m_pLineList->m_pRoutList->m_oRoutMap);
				// 在路由方向上删除该仪器之后的全部仪器
				DeleteAllInstrumentAlongRout(pInstrument, pRout, pIPSetFrameThread->m_pLineList, 
					pIPSetFrameThread->m_pThread->m_pLogOutPut);
			}
		}
	}
	LeaveCriticalSection(&pIPSetFrameThread->m_pLineList->m_oSecLineList);
}
Ejemplo n.º 14
0
void CMP4Demux::RunL()
    {
    PRINT((_L("MP4Demux::RunL() in") ));

    // If we have demuxed everything up to stream end, theres is nothing for
    // us to do
    if ( iStreamEndDemuxed )
        return;
    
    // Don't do anything if we are not demuxing
    if ( !iDemultiplexing )
        {
        SetActive();
        iStatus = KRequestPending;
        return;
        }
    
    // If we don't have a primary channel, we have no open channels and may as
    // well quit
    if ( !iAudioChannel && !iVideoChannel )
        {
        iMonitor->StreamEndReached();
        return;
        }    

    // streaming case:
    // Try to demux as long as we have a free block in the primary output queue
    // and we can find more frames 
    // If we have both video and audio, we'll check the available space only
    // in the primary audio queue, and the video queue will allocate more
    // blocks as needed. This way the audio decoder will get more data as
    // needed, no matter what the video bitrate is.            

    // in file-reading case, GetFrameInfo() checks if there's available space
    // in queues, and this info is contained in variable iGotFrame       

    // send frame(s) if:
    // a frame is available AND
    // there are free blocks in output queue AND
    // we have not demuxed too much during this run so other objects get CPU AND
    // the stream end has not been demuxed    

    iBytesDemuxed = 0;

    // NOTE: only video queue fullness checked for now
    CActiveQueue *queue = iVideoChannel->iTargetQueue;

    GetFrameInfo();

    while ( iGotFrame && ( (iInputQueue && NumFreeBlocks() > 0) || 
          ( (queue->NumDataBlocks() < KMaxBlocksInQueue) && (iBytesDemuxed < KMaxBytesPerRun) ) ) && 
          (!iStreamEndDemuxed) )
        {
        // Read & send frame(s)        
        TInt error = ReadAndSendFrames();
        
        if ( error != KErrNone )
            {
            iMonitor->Error(error);
            return;        
            }
        
        // And to try get info for new frame
        GetFrameInfo();
        }
    
    // If we have demultiplexed everything up to stream end, signal the queues
    // and don't demux any more. If we have no output channels, notify the
    // status monitor.
    if ( iStreamEnd && (!iGotFrame) )
        {
        // report stream end in streaming case
        // in file-reading case, its reported in GetFrameInfo
        if ( iNumOutputChannels )
            {
            if ( iInputQueue )
                {
                TUint i;
                for ( i = 0; i < iNumOutputChannels; i++ )
                    iOutputChannels[i].iTargetQueue->WriteStreamEnd();
                }
            }
        else
            {
            iMonitor->StreamEndReached();
            }
        iStreamEndDemuxed = ETrue;
        return;
        }
    
    // Re-activate to get signals about new blocks
    SetActive();
    iStatus = KRequestPending;

    PRINT((_L("MP4Demux::RunL() out") ));
    }
// 处理单个首包帧
void ProcHeadFrameOne(m_oHeadFrameThreadStruct* pHeadFrameThread)
{
	ASSERT(pHeadFrameThread != NULL);
	// 新仪器指针为空
	m_oInstrumentStruct* pInstrument = NULL;
	m_oRoutStruct* pRout = NULL;
	CString str = _T("");
	string strFrameData = "";
	string strConv = "";
	unsigned int uiSN = 0;
	unsigned int uiTimeHeadFrame = 0;
	unsigned int uiRoutIP = 0;
	unsigned int uiVersion = 0;
	unsigned int uiHeadFrameCount = 0;
	EnterCriticalSection(&pHeadFrameThread->m_oSecHeadFrameThread);
	pHeadFrameThread->m_uiHeadFrameCount++;
	uiHeadFrameCount = pHeadFrameThread->m_uiHeadFrameCount;
	LeaveCriticalSection(&pHeadFrameThread->m_oSecHeadFrameThread);

	EnterCriticalSection(&pHeadFrameThread->m_pHeadFrame->m_oSecHeadFrame);
	uiSN = pHeadFrameThread->m_pHeadFrame->m_pCommandStruct->m_uiSN;
	uiTimeHeadFrame = pHeadFrameThread->m_pHeadFrame->m_pCommandStruct->m_uiTimeHeadFrame;
	uiRoutIP = pHeadFrameThread->m_pHeadFrame->m_pCommandStruct->m_uiRoutIP;
	uiVersion = pHeadFrameThread->m_pHeadFrame->m_pCommandStruct->m_uiVersion;
	LeaveCriticalSection(&pHeadFrameThread->m_pHeadFrame->m_oSecHeadFrame);
	EnterCriticalSection(&pHeadFrameThread->m_pLineList->m_oSecLineList);
	// 判断仪器SN是否在SN索引表中
	if(FALSE == IfIndexExistInMap(uiSN, &pHeadFrameThread->m_pLineList->m_pInstrumentList->m_oSNInstrumentMap))
	{
		int iDirection = 0;
		// 得到新仪器
		pInstrument = GetFreeInstrument(pHeadFrameThread->m_pLineList->m_pInstrumentList);
		//设置新仪器的SN
		pInstrument->m_uiSN = uiSN;
		// 仪器类型
		pInstrument->m_iInstrumentType = pInstrument->m_uiSN & 0xff;
		//设置新仪器的路由IP地址
		pInstrument->m_uiRoutIP = uiRoutIP;
		// 设置新仪器的首包时刻
		pInstrument->m_uiTimeHeadFrame = uiTimeHeadFrame;
		// 仪器软件版本号
		pInstrument->m_uiVersion = uiVersion;
		// 设置新仪器的仪器类型
		// 路由地址为0为LCI
		if (pInstrument->m_uiRoutIP == 0)
		{
			pInstrument->m_iInstrumentType = InstrumentTypeLCI;
			pInstrument->m_iRoutDirection = DirectionCenter;
			// 得到空闲路由对象
			iDirection = DirectionCenter;
			SetCrossRout(pInstrument, iDirection, pHeadFrameThread->m_pLineList->m_pRoutList);
		}
		else
		{
			if (TRUE == IfIndexExistInRoutMap(pInstrument->m_uiRoutIP,
				&pHeadFrameThread->m_pLineList->m_pRoutList->m_oRoutMap))
			{
				pRout = GetRout(uiRoutIP, &pHeadFrameThread->m_pLineList->m_pRoutList->m_oRoutMap);
				pInstrument->m_iRoutDirection = pRout->m_iRoutDirection;
				// 把仪器加入路由仪器队列
				pRout->m_olsRoutInstrument.push_back(pInstrument);
			}
		}

		if ((pInstrument->m_iInstrumentType == InstrumentTypeLCI)
			|| (pInstrument->m_iInstrumentType == InstrumentTypeLAUX))
		{
			// 设置交叉站路由
			iDirection = DirectionTop;
			SetCrossRout(pInstrument, iDirection, pHeadFrameThread->m_pLineList->m_pRoutList);
			// 设置交叉站路由
			iDirection = DirectionDown;
			SetCrossRout(pInstrument, iDirection, pHeadFrameThread->m_pLineList->m_pRoutList);
			// 设置交叉站路由
			iDirection = DirectionLeft;
			SetCrossRout(pInstrument, iDirection, pHeadFrameThread->m_pLineList->m_pRoutList);
			// 设置交叉站路由
			iDirection = DirectionRight;
			SetCrossRout(pInstrument, iDirection, pHeadFrameThread->m_pLineList->m_pRoutList);
		}
		pInstrument->m_uiBroadCastPort = pHeadFrameThread->m_pThread->m_pConstVar->m_iBroadcastPortStart + pInstrument->m_uiRoutIP;
		// 新仪器加入SN索引表
		AddInstrumentToMap(pInstrument->m_uiSN, pInstrument, &pHeadFrameThread->m_pLineList->m_pInstrumentList->m_oSNInstrumentMap);
	}

	// 在索引表中则找到该仪器,得到该仪器指针
	pInstrument = GetInstrumentFromMap(uiSN, &pHeadFrameThread->m_pLineList->m_pInstrumentList->m_oSNInstrumentMap);

	// 判断仪器是否已经设置IP
	if (pInstrument->m_bIPSetOK == true)
	{
		LeaveCriticalSection(&pHeadFrameThread->m_pLineList->m_oSecLineList);
		EnterCriticalSection(&pHeadFrameThread->m_pHeadFrame->m_oSecHeadFrame);
		GetFrameInfo(pHeadFrameThread->m_pHeadFrame->m_cpRcvFrameData, 
			pHeadFrameThread->m_pThread->m_pConstVar->m_iRcvFrameSize, &strFrameData);
		LeaveCriticalSection(&pHeadFrameThread->m_pHeadFrame->m_oSecHeadFrame);
		AddMsgToLogOutPutList(pHeadFrameThread->m_pThread->m_pLogOutPut, "ProcHeadFrameOne", 
			strFrameData, ErrorType, IDS_ERR_EXPIREDHEADFRAME);
		return;
	}
	// 判断首包时刻是否发生变化
	if (pInstrument->m_uiTimeHeadFrame != uiTimeHeadFrame)
	{
		LeaveCriticalSection(&pHeadFrameThread->m_pLineList->m_oSecLineList);
		EnterCriticalSection(&pHeadFrameThread->m_pHeadFrame->m_oSecHeadFrame);
		GetFrameInfo(pHeadFrameThread->m_pHeadFrame->m_cpRcvFrameData, 
			pHeadFrameThread->m_pThread->m_pConstVar->m_iRcvFrameSize, &strFrameData);
		LeaveCriticalSection(&pHeadFrameThread->m_pHeadFrame->m_oSecHeadFrame);
		AddMsgToLogOutPutList(pHeadFrameThread->m_pThread->m_pLogOutPut, "ProcHeadFrameOne", 
			strFrameData, ErrorType, IDS_ERR_HEADFRAMETIME);
		EnterCriticalSection(&pHeadFrameThread->m_pLineList->m_oSecLineList);
		pInstrument ->m_uiTimeHeadFrame = uiTimeHeadFrame;
	}
	// 更新仪器的存活时间
	UpdateInstrActiveTime(pInstrument);
	if (TRUE == IfIndexExistInRoutMap(pInstrument->m_uiRoutIP, 
		&pHeadFrameThread->m_pLineList->m_pRoutList->m_oRoutMap))
	{
		pRout = GetRout(uiRoutIP, &pHeadFrameThread->m_pLineList->m_pRoutList->m_oRoutMap);
		// 仪器位置按照首包时刻排序
		InstrumentLocationSort(pInstrument, pRout);
	}
	else
	{
		LeaveCriticalSection(&pHeadFrameThread->m_pLineList->m_oSecLineList);
		EnterCriticalSection(&pHeadFrameThread->m_pHeadFrame->m_oSecHeadFrame);
		GetFrameInfo(pHeadFrameThread->m_pHeadFrame->m_cpRcvFrameData, 
			pHeadFrameThread->m_pThread->m_pConstVar->m_iRcvFrameSize, &strFrameData);
		LeaveCriticalSection(&pHeadFrameThread->m_pHeadFrame->m_oSecHeadFrame);
		AddMsgToLogOutPutList(pHeadFrameThread->m_pThread->m_pLogOutPut, "ProcHeadFrameOne", 
			strFrameData, ErrorType, IDS_ERR_ROUT_NOTEXIT);
		return;
	}
	// 如果仪器在其路由方向上位置稳定次数超过设定次数
	// 则将该仪器加入IP地址设置队列
	if (pInstrument->m_iHeadFrameStableNum >= pHeadFrameThread->m_pThread->m_pConstVar->m_iHeadFrameStableTimes)
	{
		AddInstrumentToMap(pInstrument->m_uiIP, pInstrument, &pHeadFrameThread->m_pLineList->m_pInstrumentList->m_oIPSetInstrumentMap);
	}
	str.Format(_T("接收到SN = 0x%x的仪器首包帧,首包时刻 = 0x%x,路由IP = 0x%x, 软件版本 = 0x%x,测线号 = %d,测点序号 = %d,首包计数 = %d"), 
		pInstrument->m_uiSN, pInstrument->m_uiTimeHeadFrame, pInstrument->m_uiRoutIP, uiVersion, 
		pInstrument->m_iLineIndex, pInstrument->m_iPointIndex, uiHeadFrameCount);
	LeaveCriticalSection(&pHeadFrameThread->m_pLineList->m_oSecLineList);
	strConv = (CStringA)str;
	AddMsgToLogOutPutList(pHeadFrameThread->m_pThread->m_pLogOutPut, "ProcHeadFrameOne", strConv);
	OutputDebugString(str);
}