示例#1
0
bool CBindInfo::CalcMapsAndCheck()
{
  ClearMaps();

  UInt32 numStreams = 0;

  if (Coders.Size() == 0)
    return false;
  if (Coders.Size() - 1 != Bonds.Size())
    return false;

  FOR_VECTOR(i, Coders)
  {
    Coder_to_Stream.Add(numStreams);
    
    const CCoderStreamsInfo &c = Coders[i];
    
    for (unsigned j = 0; j < c.NumStreams; j++)
      Stream_to_Coder.Add(i);

    numStreams += c.NumStreams;
  }
示例#2
0
void MythUIStateType::CopyFrom(MythUIType *base)
{
    MythUIStateType *st = dynamic_cast<MythUIStateType *>(base);
    if (!st)
    {
        VERBOSE(VB_IMPORTANT, "ERROR, bad parsing");
        return;
    }

    ClearMaps();

    m_ShowEmpty = st->m_ShowEmpty;

    MythUIType::CopyFrom(base);

    QMap<QString, MythUIType *>::iterator i;
    for (i = st->m_ObjectsByName.begin(); i != st->m_ObjectsByName.end(); ++i)
    {
        MythUIType *other = i.data();
        QString key = i.key();

        MythUIType *newtype = GetChild(other->name());
        AddObject(key, newtype);
        newtype->SetVisible(other->IsVisible());
    }

    QMap<int, MythUIType *>::iterator j;
    for (j = st->m_ObjectsByState.begin(); j != st->m_ObjectsByState.end(); ++j)
    {
        MythUIType *other = j.data();
        int key = j.key();

        MythUIType *newtype = GetChild(other->name());
        AddObject((StateType)key, newtype);
        newtype->SetVisible(other->IsVisible());
    }
}
void wxsItemEditorContent::BeforePreviewChanged()
{
    ClearMaps();
    ClearDragPoints();
    BeforeContentChanged();
}
示例#4
0
pxcStatus UtilCapture::LocateStreams(std::vector<PXCCapture::VideoStream::DataDesc*> &vinputs,std::vector<PXCCapture::AudioStream::DataDesc*> &ainputs) {
    UtilTrace trace(L"UtilCapture::LocateStreams(video)", m_session_service);
	if (vinputs.empty() && ainputs.empty()) return PXC_STATUS_ITEM_UNAVAILABLE;

	PXCCapture::AudioStream::DataDesc ainput;
	if (!ConsolidateAudioRequests(ainputs,&ainput)) return PXC_STATUS_ITEM_UNAVAILABLE;

    int n1=CalculateNumFormats(vinputs);

    pxcStatus sts;
    m_desc_filter.group=PXCSession::IMPL_GROUP_SENSOR;
    m_desc_filter.subgroup=(vinputs.size()>0?PXCSession::IMPL_SUBGROUP_VIDEO_CAPTURE:0)|(ainputs.size()>0?PXCSession::IMPL_SUBGROUP_AUDIO_CAPTURE:0);
    for (int module_idx=0;;module_idx++) {
        sts = CreateCapture(module_idx, m_capture.ReleaseRef());
        if (sts == PXC_STATUS_ITEM_UNAVAILABLE) break;
        if (sts < PXC_STATUS_NO_ERROR) continue;

        PXCCapture::DeviceInfo dinfo;
        for (int device_idx=0;;device_idx++) {
            sts=m_capture->QueryDevice(device_idx,&dinfo);
            if (sts<PXC_STATUS_NO_ERROR) break;
            if (m_session_service) m_session_service->TraceParam(L"Locating stream(s) on device ", dinfo.name);
            if (m_name_filter) if (!wcsstr(dinfo.name,m_name_filter)) continue;

            sts=m_capture->CreateDevice(device_idx,m_device.ReleaseRef());
            if (sts<PXC_STATUS_NO_ERROR) continue;

            /* Match image formats */
            ClearMaps(vinputs);

            int n2=0;
            bool am=(ainputs.size()>0)?false:true;
            for (int stream_idx=0;;stream_idx++) {
                PXCCapture::Device::StreamInfo sinfo;
                sts=m_device->QueryStream(stream_idx, &sinfo);
                if (sts<PXC_STATUS_NO_ERROR) break;
                if (sinfo.cuid==PXCCapture::VideoStream::CUID && n2<n1) {
					PXCSmartPtr<PXCCapture::VideoStream> vstream;
					sts=m_device->CreateStream(stream_idx,PXCCapture::VideoStream::CUID,(void**)&vstream);
					if (sts<PXC_STATUS_NO_ERROR) break;

					std::list<PXCCapture::VideoStream::ProfileInfo> profiles;
					ScanProfiles(profiles,sinfo.imageType,vstream);

					int n3=MatchProfiles(vinputs,sinfo,profiles,(int)m_vstreams.size());
					if (n3==0) continue;

					FindBestProfile(vinputs,profiles,(int)m_vstreams.size());
					sts=vstream->SetProfile(&*profiles.begin());
					if (sts<PXC_STATUS_NO_ERROR) break;

					m_vstreams.push_back(vstream.ReleasePtr());
					n2+=n3;
				}
				if (sinfo.cuid==PXCCapture::AudioStream::CUID && !am) {
					sts=m_device->CreateStream(stream_idx, PXCCapture::AudioStream::CUID, (void**)m_astream.ReleaseRef());
					if (sts<PXC_STATUS_NO_ERROR) continue;

					for (int profile_idx=0;;profile_idx++) {
						PXCCapture::AudioStream::ProfileInfo pinfo;
						sts=m_astream->QueryProfile(profile_idx,&pinfo);
						if (sts<PXC_STATUS_NO_ERROR) break;

						if (ainput.info.nchannels>0  && ainput.info.nchannels!=pinfo.audioInfo.nchannels) continue;
						if (ainput.info.sampleRate>0 && ainput.info.sampleRate!=pinfo.audioInfo.sampleRate) continue;
						if (ainput.info.bufferSize>0 && ainput.info.bufferSize!=pinfo.audioInfo.bufferSize) continue;
						if (ainput.info.channelMask>0 && ainput.info.channelMask!=pinfo.audioInfo.channelMask) continue;

						sts=m_astream->SetProfile(&pinfo);
						if (sts<PXC_STATUS_NO_ERROR) break;

						for (int i=0;i<(int)ainputs.size();i++) {
							memcpy_s(&ainputs[i]->info,sizeof(ainputs[i]->info),&pinfo.audioInfo,sizeof(pinfo.audioInfo));
							ainputs[i]->options=pinfo.audioOptions;
						}
                        am=true;
						break;
					}
					if (sts<PXC_STATUS_NO_ERROR) m_astream.ReleaseRef();
				}
				if (sts>=PXC_STATUS_NO_ERROR && n2>=n1 && am) break;
            }
            if (sts>=PXC_STATUS_NO_ERROR && n2>=n1 && am) 
                if (RecordProperties(vinputs)) break;

            DeleteStreams();
            m_device.ReleaseRef();
        }
        if (sts>=PXC_STATUS_NO_ERROR)
        {
            if (m_session_service) m_session_service->TraceParam(L"Successfully located streams on device ", dinfo.name);
            // update actual image size
            for (int i = 0; i < (int)vinputs.size() && i < (int)m_maps.size(); i++)
            {
                for (int c = 0; c < PXCCapture::VideoStream::STREAM_LIMIT; c++)
                {
                    PXCCapture::VideoStream::ProfileInfo info;
            	    if (m_maps[i][c]<0) break;
                    PXCCapture::VideoStream* vstream = m_vstreams[m_maps[i][c]];
                    if (!vstream) break;
                    if (vstream->QueryProfile(&info) >= PXC_STATUS_NO_ERROR)
                    {
                        vinputs[i]->streams[c].sizeMin.width = info.imageInfo.width;
                        vinputs[i]->streams[c].sizeMin.height = info.imageInfo.height;
                        vinputs[i]->streams[c].sizeMax.width = info.imageInfo.width;
                        vinputs[i]->streams[c].sizeMax.height = info.imageInfo.height;
                    }
                }
            }
            break;
        }
        m_capture.ReleaseRef();
    }
    return sts;
}
示例#5
0
void MythUIStateType::ClearImages()
{
    ClearMaps();
    SetRedraw();
}
示例#6
0
void WrappedID3D11DeviceContext::ReplayLog(LogState readType, uint32_t startEventID, uint32_t endEventID, bool partial)
{
	m_State = readType;

	m_DoStateVerify = true;

	D3D11ChunkType header = (D3D11ChunkType)m_pSerialiser->PushContext(NULL, 1, false);
	RDCASSERT(header == CONTEXT_CAPTURE_HEADER);

	ResourceId id;
	m_pSerialiser->Serialise("context", id);

	WrappedID3D11DeviceContext *context = (WrappedID3D11DeviceContext *)m_pDevice->GetResourceManager()->GetLiveResource(id);
	
	RDCASSERT(WrappedID3D11DeviceContext::IsAlloc(context) && context == this);

	Serialise_BeginCaptureFrame(!partial);

	m_pSerialiser->PopContext(NULL, header);

	m_CurEvents.clear();
	
	if(m_State == EXECUTING)
	{
		FetchAPIEvent ev = GetEvent(startEventID);
		m_CurEventID = ev.eventID;
		m_pSerialiser->SetOffset(ev.fileOffset);
	}
	else if(m_State == READING)
	{
		m_CurEventID = 1;
	}

	if(m_State == EXECUTING)
	{
		ClearMaps();
		for(size_t i=0; i < m_pDevice->GetNumDeferredContexts(); i++)
		{
			WrappedID3D11DeviceContext *defcontext = m_pDevice->GetDeferredContext(i);
			defcontext->ClearMaps();
		}
	}

	m_pDevice->GetResourceManager()->MarkInFrame(true);

	uint64_t startOffset = m_pSerialiser->GetOffset();

	while(1)
	{
		if(m_State == EXECUTING && m_CurEventID > endEventID)
		{
			// we can just break out if we've done all the events desired.
			break;
		}

		uint64_t offset = m_pSerialiser->GetOffset();

		D3D11ChunkType chunktype = (D3D11ChunkType)m_pSerialiser->PushContext(NULL, 1, false);

		ProcessChunk(offset, chunktype, false);
		
		RenderDoc::Inst().SetProgress(FrameEventsRead, float(offset - startOffset)/float(m_pSerialiser->GetSize()));
		
		// for now just abort after capture scope. Really we'd need to support multiple frames
		// but for now this will do.
		if(chunktype == CONTEXT_CAPTURE_FOOTER)
			break;
		
		m_CurEventID++;
	}

	if(m_State == READING)
	{
		m_pDevice->GetFrameRecord().back().drawcallList = m_ParentDrawcall.Bake();
		m_pDevice->GetFrameRecord().back().frameInfo.debugMessages = m_pDevice->GetDebugMessages();

		int initialSkips = 0;

		for(auto it=WrappedID3D11Buffer::m_BufferList.begin(); it != WrappedID3D11Buffer::m_BufferList.end(); ++it)
			m_ResourceUses[it->first];

		for(auto it=WrappedID3D11Texture1D::m_TextureList.begin(); it != WrappedID3D11Texture1D::m_TextureList.end(); ++it)
			m_ResourceUses[it->first];
		for(auto it=WrappedID3D11Texture2D::m_TextureList.begin(); it != WrappedID3D11Texture2D::m_TextureList.end(); ++it)
			m_ResourceUses[it->first];
		for(auto it=WrappedID3D11Texture3D::m_TextureList.begin(); it != WrappedID3D11Texture3D::m_TextureList.end(); ++it)
			m_ResourceUses[it->first];
		
		// it's easier to remove duplicate usages here than check it as we go.
		// this means if textures are bound in multiple places in the same draw
		// we don't have duplicate uses
		for(auto it = m_ResourceUses.begin(); it != m_ResourceUses.end(); ++it)
		{
			vector<EventUsage> &v = it->second;
			std::sort(v.begin(), v.end());
			v.erase( std::unique(v.begin(), v.end()), v.end() );
			
#if 0
			ResourceId resid = m_pDevice->GetResourceManager()->GetOriginalID(it->first);
			
			if(m_pDevice->GetResourceManager()->GetInitialContents(resid).resource == NULL)
				continue;
			
			// code disabled for now as skipping these initial states
			// doesn't seem to produce any measurable improvement in any case
			// I've checked
			RDCDEBUG("Resource %llu", resid);
			if(v.empty())
			{
				RDCDEBUG("Never used!");
				initialSkips++;
			}
			else
			{
				bool written = false;

				for(auto usit = v.begin(); usit != v.end(); ++usit)
				{
					ResourceUsage u = usit->usage;

					if(u == eUsage_SO ||
						(u >= eUsage_VS_RWResource && u <= eUsage_CS_RWResource) ||
						u == eUsage_DepthStencilTarget || u == eUsage_ColourTarget)
					{
						written = true;
						break;
					}
				}

				if(written)
				{
					RDCDEBUG("Written in frame - needs initial state");
				}
				else
				{
					RDCDEBUG("Never written to in the frame");
					initialSkips++;
				}
			}
#endif
		}

		//RDCDEBUG("Can skip %d initial states.", initialSkips);
	}

	m_pDevice->GetResourceManager()->MarkInFrame(false);

	m_State = READING;

	m_DoStateVerify = false;
}
示例#7
0
void WrappedID3D11DeviceContext::ReplayLog(LogState readType, uint32_t startEventID, uint32_t endEventID, bool partial)
{
	m_State = readType;

	m_DoStateVerify = true;

	D3D11ChunkType header = (D3D11ChunkType)m_pSerialiser->PushContext(NULL, 1, false);
	RDCASSERT(header == CONTEXT_CAPTURE_HEADER);

	ResourceId id;
	m_pSerialiser->Serialise("context", id);

	WrappedID3D11DeviceContext *context = (WrappedID3D11DeviceContext *)m_pDevice->GetResourceManager()->GetLiveResource(id);
	
	RDCASSERT(WrappedID3D11DeviceContext::IsAlloc(context) && context == this);

	Serialise_BeginCaptureFrame(!partial);

	m_pSerialiser->PopContext(NULL, header);

	m_CurEvents.clear();
	
	if(m_State == EXECUTING)
	{
		FetchAPIEvent ev = GetEvent(startEventID);
		m_CurEventID = ev.eventID;
		m_pSerialiser->SetOffset(ev.fileOffset);
	}
	else if(m_State == READING)
	{
		m_CurEventID = 1;
	}

	if(m_State == EXECUTING)
	{
		ClearMaps();
		for(size_t i=0; i < m_pDevice->GetNumDeferredContexts(); i++)
		{
			WrappedID3D11DeviceContext *context = m_pDevice->GetDeferredContext(i);
			context->ClearMaps();
		}
	}

	m_pDevice->GetResourceManager()->MarkInFrame(true);

	while(1)
	{
		if(m_State == EXECUTING && m_CurEventID > endEventID)
		{
			// we can just break out if we've done all the events desired.
			break;
		}

		uint64_t offset = m_pSerialiser->GetOffset();

		D3D11ChunkType context = (D3D11ChunkType)m_pSerialiser->PushContext(NULL, 1, false);

		ProcessChunk(offset, context, false);
		
		RenderDoc::Inst().SetProgress(FileInitialRead, float(offset)/float(m_pSerialiser->GetSize()));
		
		// for now just abort after capture scope. Really we'd need to support multiple frames
		// but for now this will do.
		if(context == CONTEXT_CAPTURE_FOOTER)
			break;
		
		m_CurEventID++;
	}

	if(m_State == READING)
	{
		m_pDevice->GetFrameRecord().back().drawcallList = m_ParentDrawcall.Bake();

		m_ParentDrawcall.children.clear();

		int initialSkips = 0;

		for(auto it=WrappedID3D11Buffer::m_BufferList.begin(); it != WrappedID3D11Buffer::m_BufferList.end(); ++it)
			m_ResourceUses[it->first];

		for(auto it=WrappedID3D11Texture1D::m_TextureList.begin(); it != WrappedID3D11Texture1D::m_TextureList.end(); ++it)
			m_ResourceUses[it->first];
		for(auto it=WrappedID3D11Texture2D::m_TextureList.begin(); it != WrappedID3D11Texture2D::m_TextureList.end(); ++it)
			m_ResourceUses[it->first];
		for(auto it=WrappedID3D11Texture3D::m_TextureList.begin(); it != WrappedID3D11Texture3D::m_TextureList.end(); ++it)
			m_ResourceUses[it->first];

		for(auto it = m_ResourceUses.begin(); it != m_ResourceUses.end(); ++it)
		{
			ResourceId id = m_pDevice->GetResourceManager()->GetOriginalID(it->first);

			if(m_pDevice->GetResourceManager()->GetInitialContents(id) == NULL)
				continue;

			RDCDEBUG("Resource %llu", id);
			if(it->second.empty())
			{
				RDCDEBUG("Never used!");
				initialSkips++;
			}
			else
			{
				bool written = false;

				for(auto usit = it->second.begin(); usit != it->second.end(); ++usit)
				{
					ResourceUsage u = usit->usage;

					if(u == eUsage_SO ||
						u == eUsage_CS_UAV || u == eUsage_PS_UAV ||
						u == eUsage_OM_DSV || u == eUsage_OM_RTV)
					{
						written = true;
						break;
					}
				}

				if(written)
				{
					RDCDEBUG("Written in frame - needs initial state");
				}
				else
				{
					RDCDEBUG("Never written to in the frame");
					initialSkips++;
				}
			}
		}

		RDCDEBUG("Can skip %d initial states.", initialSkips);
	}

	m_pDevice->GetResourceManager()->MarkInFrame(false);

	m_State = READING;

	m_DoStateVerify = false;
}
void CVolatilityManagement::FinalRelease()
{
	ClearMaps();
	//UnregisterPublisher();
}
示例#9
0
BaseGrid::~BaseGrid() {
	ClearMaps();
	delete context_menu;
}