void setPointerToAudioStart( ImporterLocalRec8H ldataH, const PrAudioSample startAudioPosition, imFileRef SDKfileRef) { csSDK_uint32 totalVideoFramesLu = 0; csSDK_int64 bytesPerFrameLL = 0, videoOffsetLL = 0; PrAudioSample audioOffset = 0; #ifdef PRWIN_ENV csSDK_int32 tempErrorS = 0; LARGE_INTEGER distanceToMoveLI; #else SInt64 distanceToMove; #endif totalVideoFramesLu = (*ldataH)->theFile.numFrames; if ((*ldataH)->theFile.hasVideo && totalVideoFramesLu > 0) { bytesPerFrameLL = (*ldataH)->theFile.width * (*ldataH)->theFile.height * GetPixelFormatSize((*ldataH)->theFile.videoSubtype); videoOffsetLL += totalVideoFramesLu * bytesPerFrameLL; // Take the extra bytes at the end of each frame into account videoOffsetLL += (PLUS_LINE_LENGTH * (totalVideoFramesLu)); } audioOffset = startAudioPosition * AUDIO_SAMPLE_SIZE; #ifdef PRWIN_ENV distanceToMoveLI.QuadPart = sizeof(SDK_File) + videoOffsetLL + audioOffset; tempErrorS = SetFilePointerEx(SDKfileRef, distanceToMoveLI, NULL, FILE_BEGIN); if (tempErrorS == INVALID_SET_FILE_POINTER) { GetLastError (); } #else distanceToMove = sizeof(SDK_File) + videoOffsetLL + audioOffset; FSSetForkPosition ( reinterpret_cast<intptr_t>(SDKfileRef), fsFromStart, distanceToMove); #endif }
HBITMAP LoadTransparentImage(wchar_t *fileName) { if (m_pImageFactory != NULL && IsFileExists(fileName) == TRUE) { IImage* pImage = NULL; IBitmapImage* pBitmapImage = NULL; ImageInfo imageInfo; BitmapData bitmapData; m_pImageFactory->CreateImageFromFile(fileName, &pImage); m_pImageFactory->CreateBitmapFromImage(pImage, 0, 0, PixelFormat32bppPARGB, InterpolationHintNearestNeighbor, &pBitmapImage); pImage->GetImageInfo(&imageInfo); RECT rect = {0, 0, imageInfo.Width, imageInfo.Height}; pBitmapImage->LockBits(&rect, ImageLockModeRead, PixelFormat32bppPARGB, &bitmapData); return CreateBitmap( imageInfo.Width, imageInfo.Height, 1, GetPixelFormatSize(imageInfo.PixelFormat), bitmapData.Scan0); } return NULL; };
void Texture::SaveToSystemMemory() { if (isRenderTarget) { /* Do not save texture if autosave flag is false */ if (!renderTargetAutosave) return; HRESULT hr = RenderManager::Instance()->GetD3DDevice()->TestCooperativeLevel(); if (hr == D3DERR_DEVICELOST) { //if (!saveTexture) //Logger::FrameworkDebug("Trying to save to system memory rendertarget that was not saved before"); return; } // Render manager set this flag when you set sprite as render target. if (!renderTargetModified) return; // Release should be after check that renderTargetModified. D3DSafeRelease(saveTexture); LPDIRECT3DDEVICE9 device = RenderManager::Instance()->GetD3DDevice(); D3DSURFACE_DESC desc; id->GetLevelDesc(0, &desc); //Logger::FrameworkDebug("Saving render target to system memory: %s size: %d x %d format:%d", relativePathname.c_str(), width, height, desc.Format); /* HRESULT hr = device->CreateOffscreenPlainSurface(width, height, desc.Format, D3DPOOL_SYSTEMMEM, &saveSurface, NULL); DX_VERIFY(hr); */ hr = device->CreateTexture(width, height, 1/*means we create texture with 1 mipmap level*/, 0, desc.Format, D3DPOOL_SYSTEMMEM, &saveTexture, 0); RENDER_VERIFY(hr); LPDIRECT3DSURFACE9 renderTargetMainSurface; hr = id->GetSurfaceLevel(0, &renderTargetMainSurface); RENDER_VERIFY(hr); LPDIRECT3DSURFACE9 saveTextureMainSurface; hr = saveTexture->GetSurfaceLevel(0, &saveTextureMainSurface); RENDER_VERIFY(hr); hr = device->GetRenderTargetData(renderTargetMainSurface, saveTextureMainSurface); RENDER_VERIFY(hr); renderTargetModified = false; #if 0 //Image * image = new Image(); Image * image = image->Create(width, height, FORMAT_RGBA8888); D3DLOCKED_RECT rect; hr = saveTexture->LockRect(0, &rect, 0, 0); if (FAILED(hr)) { Logger::Error("[TextureDX9] Could not lock DirectX9 Texture."); return; } int32 pixelSizeInBits = GetPixelFormatSize(format); if (format == FORMAT_RGBA8888) { //int32 pitchInBytes = uint8 * destBits = (uint8*)image->GetData(); uint8 * sourceBits = (uint8*)rect.pBits; for (uint32 h = 0; h < height * width; ++h) { uint32 b = sourceBits[0]; uint32 g = sourceBits[1]; uint32 r = sourceBits[2]; uint32 a = sourceBits[3]; destBits[0] = (uint8)r; //sourceBits[3]; destBits[1] = (uint8)g; //sourceBits[0]; destBits[2] = (uint8)b;//sourceBits[1]; destBits[3] = (uint8)a; destBits += 4; sourceBits += 4; } } saveTexture->UnlockRect(0); image->Save(Format("FBO\\%s.png", relativePathname.c_str())); SafeRelease(image); #endif D3DSafeRelease(renderTargetMainSurface); D3DSafeRelease(saveTextureMainSurface); } }
// Returns malNoError if successful, or comp_CompileAbort if user aborted prMALError RenderAndWriteVideoFrame( const PrTime videoTime, exDoExportRec *exportInfoP) { csSDK_int32 resultS = malNoError; csSDK_uint32 exID = exportInfoP->exporterPluginID; ExportSettings *mySettings = reinterpret_cast<ExportSettings *>(exportInfoP->privateData); csSDK_int32 rowbytes = 0; csSDK_int32 renderedPixelSize = 0; exParamValues width, height, pixelAspectRatio, fieldType, codecSubType; PrPixelFormat renderedPixelFormat; csSDK_uint32 bytesToWriteLu = 0; char *frameBufferP = NULL, *f32BufferP = NULL, *frameNoPaddingP = NULL, *v410Buffer = NULL; SequenceRender_ParamsRec renderParms; PrPixelFormat pixelFormats[] = {PrPixelFormat_BGRA_4444_8u, PrPixelFormat_BGRA_4444_8u}; renderParms.inRequestedPixelFormatArray = pixelFormats; renderParms.inRequestedPixelFormatArrayCount = 1; mySettings->exportParamSuite->GetParamValue(exID, 0, ADBEVideoWidth, &width); renderParms.inWidth = width.value.intValue; mySettings->exportParamSuite->GetParamValue(exID, 0, ADBEVideoHeight, &height); renderParms.inHeight = height.value.intValue; mySettings->exportParamSuite->GetParamValue(exID, 0, ADBEVideoAspect, &pixelAspectRatio); renderParms.inPixelAspectRatioNumerator = pixelAspectRatio.value.ratioValue.numerator; renderParms.inPixelAspectRatioDenominator = pixelAspectRatio.value.ratioValue.denominator; renderParms.inRenderQuality = kPrRenderQuality_High; mySettings->exportParamSuite->GetParamValue(exID, 0, ADBEVideoFieldType, &fieldType); renderParms.inFieldType = fieldType.value.intValue; // By setting this to false, we basically leave deinterlacing up to the host logic // We could set it to true if we wanted to force deinterlacing renderParms.inDeinterlace = kPrFalse; renderParms.inDeinterlaceQuality = kPrRenderQuality_High; mySettings->exportParamSuite->GetParamValue(exID, 0, ADBEVideoCodec, &codecSubType); switch(codecSubType.value.intValue) { case SDK_8_BIT_RGB: renderParms.inCompositeOnBlack = kPrFalse; break; case SDK_10_BIT_YUV: renderParms.inCompositeOnBlack = kPrTrue; renderParms.inRequestedPixelFormatArrayCount = 2; pixelFormats[0] = PrPixelFormat_VUYA_4444_32f; break; } SequenceRender_GetFrameReturnRec renderResult; resultS = mySettings->sequenceRenderSuite->RenderVideoFrame(mySettings->videoRenderID, videoTime, &renderParms, kRenderCacheType_None, // [TODO] Try different settings &renderResult); mySettings->ppixSuite->GetPixels( renderResult.outFrame, PrPPixBufferAccess_ReadOnly, &frameBufferP); mySettings->ppixSuite->GetRowBytes(renderResult.outFrame, &rowbytes); mySettings->ppixSuite->GetPixelFormat(renderResult.outFrame, &renderedPixelFormat); renderedPixelSize = GetPixelFormatSize(renderedPixelFormat); // If user hit cancel if (resultS == suiteError_CompilerCompileAbort) { // Just return the abort } else { // If there is extra row padding, trim it off. // Currently, rows are aligned to 128 bytes for optimizations. // So, for example a width of 320 * 4 bytes per pixel = 1280 = 128 * 10 = no padding // But a width of 720 * 4 bytes per pixel = 2880 = 128 * 22.5 = padding if (rowbytes != width.value.intValue * renderedPixelSize) { frameNoPaddingP = new char[width.value.intValue * height.value.intValue * renderedPixelSize]; RemoveRowPadding( frameBufferP, frameNoPaddingP, rowbytes, renderedPixelSize, width.value.intValue, height.value.intValue); rowbytes = width.value.intValue * renderedPixelSize; frameBufferP = frameNoPaddingP; } if (codecSubType.value.intValue == SDK_10_BIT_YUV) { // Check for returned pixel format and convert if necessary if (renderedPixelFormat == PrPixelFormat_BGRA_4444_8u) { f32BufferP = new char[width.value.intValue * height.value.intValue * GetPixelFormatSize(PrPixelFormat_VUYA_4444_32f)]; ConvertFrom8uTo32f( frameBufferP, f32BufferP, width.value.intValue, height.value.intValue); ConvertFromBGRA32fToVUYA32f(f32BufferP, width.value.intValue, height.value.intValue); frameBufferP = f32BufferP; } v410Buffer = new char[width.value.intValue * height.value.intValue * GetPixelFormatSize(codecSubType.value.intValue)]; ConvertFrom32fToV410( frameBufferP, v410Buffer, width.value.intValue, height.value.intValue); rowbytes = width.value.intValue * GetPixelFormatSize(codecSubType.value.intValue); frameBufferP = v410Buffer; } // Save the uncompressed frames to disk if (codecSubType.value.intValue != SDK_RLE) { bytesToWriteLu = rowbytes * height.value.intValue; mySettings->exportFileSuite->Write(exportInfoP->fileObject, frameBufferP, bytesToWriteLu); } /* else (codecSubType.value.intValue == SDK_RLE) { // Currently the SDK import cannot read RLE WriteRLE( reinterpret_cast<long*>(frameBufferP), exportInfoP->outputFileRef, (height.value.intValue * width.value.intValue)); } */ // Write divider between frames bytesToWriteLu = PLUS_LINE_LENGTH; mySettings->exportFileSuite->Write(exportInfoP->fileObject, const_cast<char *>(PLUS_LINE), bytesToWriteLu); // Now that buffer is written to disk, we can dispose of memory mySettings->ppixSuite->Dispose(renderResult.outFrame); if (frameNoPaddingP) { delete(frameNoPaddingP); frameNoPaddingP = NULL; } if (f32BufferP) { delete(f32BufferP); f32BufferP = NULL; } if (v410Buffer) { delete(v410Buffer); v410Buffer = NULL; } } return resultS; }