void VDCPUTest() { SYSTEM_INFO si; long lEnableFlags = CPUCheckForExtensions(); GetSystemInfo(&si); if (si.wProcessorArchitecture == PROCESSOR_ARCHITECTURE_INTEL) if (si.wProcessorLevel < 4) lEnableFlags &= ~CPUF_SUPPORTS_FPU; // Not strictly true, but very slow anyway // Enable FPU support... CPUEnableExtensions(lEnableFlags); VDFastMemcpyAutodetect(); }
int GetCPUFlags() { static int lCPUExtensionsAvailable = CPUCheckForExtensions(); return lCPUExtensionsAvailable; }
void VDCreateTestPal8Video(VDGUIHandle h) { CPUEnableExtensions(CPUCheckForExtensions()); try { tVDInputDrivers inputDrivers; std::vector<int> xlat; VDGetInputDrivers(inputDrivers, IVDInputDriver::kF_Video); const VDStringW filter(VDMakeInputDriverFileFilter(inputDrivers, xlat)); const VDFileDialogOption opt[]={ { VDFileDialogOption::kSelectedFilter }, 0 }; int optval[1]={0}; const VDStringW srcfile(VDGetLoadFileName('pl8s', h, L"Choose source file", filter.c_str(), NULL, opt, optval)); if (srcfile.empty()) return; IVDInputDriver *pDrv; int filtidx = xlat[optval[0] - 1]; if (filtidx < 0) pDrv = VDAutoselectInputDriverForFile(srcfile.c_str(), IVDInputDriver::kF_Video); else { tVDInputDrivers::iterator itDrv(inputDrivers.begin()); std::advance(itDrv, filtidx); pDrv = *itDrv; } vdrefptr<InputFile> pIF(pDrv->CreateInputFile(0)); pIF->Init(srcfile.c_str()); const VDStringW dstfile(VDGetSaveFileName('pl8d', h, L"Choose destination 8-bit file", L"Audio-video interleaved (*.avi)\0*.avi\0All files\0*.*", L"avi", NULL, NULL)); if (dstfile.empty()) return; vdrefptr<IVDVideoSource> pVS; pIF->GetVideoSource(0, ~pVS); IVDStreamSource *pVSS = pVS->asStream(); const VDPosition frames = pVSS->getLength(); if (!pVS->setTargetFormat(nsVDPixmap::kPixFormat_XRGB8888)) throw MyError("Cannot set decompression format to 32-bit."); vdautoptr<IVDMediaOutputAVIFile> pOut(VDCreateMediaOutputAVIFile()); IVDMediaOutputStream *pVSOut = pOut->createVideoStream(); const VDPixmap& pxsrc = pVS->getTargetFormat(); const uint32 rowbytes = (pxsrc.w+3) & ~3; AVIStreamHeader_fixed hdr; hdr.fccType = 'sdiv'; hdr.fccHandler = 0; hdr.dwFlags = 0; hdr.wPriority = 0; hdr.wLanguage = 0; hdr.dwScale = pVSS->getStreamInfo().dwScale; hdr.dwRate = pVSS->getStreamInfo().dwRate; hdr.dwStart = 0; hdr.dwLength = 0; hdr.dwInitialFrames = 0; hdr.dwSuggestedBufferSize = 0; hdr.dwQuality = -1; hdr.dwSampleSize = 0; hdr.rcFrame.left = 0; hdr.rcFrame.top = 0; hdr.rcFrame.right = (short)pxsrc.w; hdr.rcFrame.bottom = (short)pxsrc.h; pVSOut->setStreamInfo(hdr); vdstructex<BITMAPINFOHEADER> bih; bih.resize(sizeof(BITMAPINFOHEADER) + sizeof(RGBQUAD)*252); bih->biSize = sizeof(BITMAPINFOHEADER); bih->biWidth = pxsrc.w; bih->biHeight = pxsrc.h; bih->biPlanes = 1; bih->biBitCount = 8; bih->biCompression = BI_RGB; bih->biSizeImage = rowbytes*pxsrc.h; bih->biXPelsPerMeter = 0; bih->biYPelsPerMeter = 0; bih->biClrUsed = 252; bih->biClrImportant = 252; RGBQUAD *pal = (RGBQUAD *)((char *)bih.data() + sizeof(BITMAPINFOHEADER)); for(int i=0; i<252; ++i) { pal[i].rgbRed = (BYTE)((i/42)*51); pal[i].rgbGreen = (BYTE)((((i/6)%7)*85)>>1); pal[i].rgbBlue = (BYTE)((i%6)*51); pal[i].rgbReserved = 0; } pVSOut->setFormat(bih.data(), bih.size()); pOut->init(dstfile.c_str()); ProgressDialog dlg((HWND)h, "Processing video stream", "Palettizing frames", (long)frames, true); vdblock<uint8> outbuf(rowbytes * pxsrc.h); const vdpixsize w = pxsrc.w; const vdpixsize h = pxsrc.h; try { for(uint32 frame=0; frame<frames; ++frame) { pVS->getFrame(frame); const uint8 *src = (const uint8 *)pxsrc.data; ptrdiff_t srcpitch = pxsrc.pitch; uint8 *dst = &outbuf[rowbytes * (pxsrc.h - 1)]; for(int y=0; y<h; ++y) { const uint8 *dr = ditherred[y & 15]; const uint8 *dg = dithergrn[y & 15]; const uint8 *db = ditherblu[y & 15]; for(int x=0; x<w; ++x) { const uint8 b = (uint8)((((src[0] * 1286)>>8) + dr[x&15]) >> 8); const uint8 g = (uint8)((((src[1] * 1543)>>8) + dg[x&15]) >> 8); const uint8 r = (uint8)((((src[2] * 1286)>>8) + db[x&15]) >> 8); src += 4; dst[x] = (uint8)(r*42 + g*6 + b); } vdptrstep(dst, -(ptrdiff_t)rowbytes); vdptrstep(src, srcpitch - w*4); } pVSOut->write(AVIOutputStream::kFlagKeyFrame, outbuf.data(), outbuf.size(), 1); dlg.advance(frame); dlg.check(); } } catch(const MyUserAbortError&) { } pVSOut->flush(); pOut->finalize(); } catch(const MyError& e) { e.post((HWND)h, g_szError); } }
void VDTestVideoFilters() { CPUEnableExtensions(CPUCheckForExtensions()); VDFastMemcpyAutodetect(); VDRegisterVideoDisplayControl(); HWND hwndDisp = CreateWindow(VIDEODISPLAYCONTROLCLASS, "Kasumi onee-sama", WS_VISIBLE|WS_POPUP, 0, 0, 1024, 768, NULL, NULL, GetModuleHandle(NULL), NULL); IVDVideoDisplay *pDisp = VDGetIVideoDisplay(hwndDisp); IVDInputDriver *pInputDriver = VDGetInputDriverByName(L"MPEG-1 input driver (internal)"); InputFile *pFile = pInputDriver->CreateInputFile(0); pFile->Init(L"e:\\anime\\Vandread OP - Trust.mpg"); vdrefptr<IVDVideoSource> pSource; pFile->GetVideoSource(0, ~pSource); pSource->setDecompressedFormat(32); // VBitmap src(pSource->getFrameBuffer(), pSource->getDecompressedFormat()); // pDisp->SetSourcePersistent(VDAsPixmap(src)); VDPosition len = pSource->asStream()->getLength(); vdautoptr<IVDVideoFilterSystem> pfiltsys(VDCreateVideoFilterSystem()); VDScheduler scheduler; class SchedulerThread : public VDThread { public: SchedulerThread(VDScheduler& s) : VDThread("Video filter thread"), mScheduler(s), mbRunning(true) {} ~SchedulerThread() { Stop(); } void ThreadRun() { while(mbRunning) { if (!mScheduler.Run()) Sleep(1); } } void Stop() { mbRunning = false; ThreadWait(); } protected: VDScheduler& mScheduler; VDAtomicInt mbRunning; } schthread(scheduler); schthread.ThreadStart(); try { pfiltsys->SetScheduler(&scheduler); IVDVideoFilterInstance *pInputFilter = pfiltsys->CreateFilter(&vpluginDef_input, pSource); IVDVideoFilterInstance *pFilter = pfiltsys->CreateFilter(&vpluginDef_adapter, &filterDef_resize); // IVDVideoFilterInstance *pFilter = pfiltsys->CreateFilter(&vpluginDef_avsadapter, L"c:\\avsfiltsrc\\debug\\tweak.dll"); pfiltsys->Connect(pInputFilter, pFilter, 0); pFilter->Config((VDGUIHandle)hwndDisp); pfiltsys->Prepare(); const VDPixmap& pxf = pFilter->GetFormat(); SetWindowPos(hwndDisp, NULL, 0, 0, pxf.w, pxf.h, SWP_NOMOVE|SWP_NOACTIVATE|SWP_NOZORDER); pfiltsys->Start(); // for(VDPosition i=0; i<len; ++i) { double t = 0; for(;;) { if (!pump()) break; VDPosition i = (VDPosition)(8.0 * (1.0 + sin(t))); IVDVideoFrameRequest *pReq = pFilter->RequestFrame(i, NULL, 0); while(!pReq->IsReady()) Sleep(1); VDVideoFilterFrame *pFrame = pReq->GetFrame(); pDisp->SetSource(true, *pFrame->mpPixmap); pReq->Release(); Sleep(30); t += 0.1; } pfiltsys->Stop(); pfiltsys->Clear(); } catch(const MyError& e) { e.post(NULL, "shimatta..."); } }
int wmain(int argc, wchar_t **argv) { _CrtSetDbgFlag(_CRTDBG_ALLOC_MEM_DF | _CRTDBG_CHECK_ALWAYS_DF | _CRTDBG_LEAK_CHECK_DF); wprintf(L"VirtualDub test harness utility for " BUILD L"\n"); wprintf(L"Copyright (C) 2005-2008 Avery Lee. Licensed under GNU General Public License\n\n"); Tests selectedTests; if (argc <= 1) { help(); exit(0); } else { for(int i=1; i<argc; ++i) { const wchar_t *test = argv[i]; if (!_wcsicmp(test, L"all")) { for(Tests::const_iterator it(g_tests.begin()), itEnd(g_tests.end()); it!=itEnd; ++it) { const TestInfo& ent = *it; if (ent.mbAutoRun) selectedTests.push_back(ent); } break; } for(Tests::const_iterator it(g_tests.begin()), itEnd(g_tests.end()); it!=itEnd; ++it) { const TestInfo& ent = *it; if (!_wcsicmp(VDTextAToW(ent.mpName).c_str(), test)) { selectedTests.push_back(ent); goto next; } } wprintf(L"\nUnknown test: %ls\n", test); help(); exit(5); next: ; } } long exts = CPUCheckForExtensions(); int failedTests = 0; for(;;) { CPUEnableExtensions(exts); wprintf(L"Setting CPU extensions: %08x\n", exts); for(Tests::const_iterator it(selectedTests.begin()), itEnd(selectedTests.end()); it!=itEnd; ++it) { const Tests::value_type& ent = *it; wprintf(L"Running test: %hs\n", ent.mpName); try { ent.mpTestFn(); } catch(const AssertionException& e) { wprintf(L" TEST FAILED: %hs\n", e.gets()); ++failedTests; } } if (!exts) break; exts &= ~(1 << VDFindHighestSetBitFast(exts)); } return failedTests; }
void VDTestPixmaps() { CPUEnableExtensions(CPUCheckForExtensions()); VDFastMemcpyAutodetect(); VDRegisterVideoDisplayControl(); HWND hwndDisp = CreateWindow(VIDEODISPLAYCONTROLCLASS, "Kasumi onee-sama", WS_VISIBLE|WS_POPUP, 0, 0, 1024, 768, NULL, NULL, GetModuleHandle(NULL), NULL); IVDVideoDisplay *pDisp = VDGetIVideoDisplay(hwndDisp); const int srcw = 80; const int srch = 60; VDPixmapBuffer image(srcw, srch, nsVDPixmap::kPixFormat_XRGB8888); for(int y=0; y<srch; ++y) { for(int x=0; x<srcw; ++x) { int x2 = x - (srcw>>1); int y2 = y - (srch>>1); uint32 v = (int)((1.0 + sin((x2*x2 + y2*y2) / 50.0)) * 255.0 / 2.0 + 0.5); uint32 r = (255-v)<<16; if ((x^y)&1) v = r = 0; ((uint32 *)((char *)image.data + image.pitch * y))[x] = (v*x/srcw) + (((v*y)/srch)<<8) + r; } } VDPixmapBuffer sprite(srcw, srch, nsVDPixmap::kPixFormat_XRGB8888); VDPixmapBuffer buffer(1024, 768, nsVDPixmap::kPixFormat_XRGB8888); VDPixmapBlt(sprite, image); pDisp->SetSourcePersistent(true, buffer); bouncer p1(-64, -48, 1024+64, 768+48, 1.0); bouncer p2(-64, -48, 1024+64, 768+48, 0.5); sint64 freq; QueryPerformanceFrequency((LARGE_INTEGER *)&freq); sint64 start; QueryPerformanceCounter((LARGE_INTEGER *)&start); int blits = 0; double th = 0; VDPixmapTextureMipmapChain mipchain(sprite); vdautoptr<IVDPixmapResampler> pResampler(VDCreatePixmapResampler()); while(pump()) { int x1 = p1.xposf(); int y1 = p1.yposf(); int x2 = p2.xposf(); int y2 = p2.yposf(); // VDPixmapBlt(buffer, xp, yp, image, 0, 0, 320, 240); // VDPixmapStretchBltNearest(buffer, x1, y1, x2, y2, sprite, -32<<16, -32<<16, (srcw+32)<<16, (srch+32)<<16); // VDPixmapStretchBltBilinear(buffer, x1, y1, x2, y2, sprite, 0, 0, srcw<<16, srch<<16); double fx1 = x1 / 65536.0; double fy1 = y1 / 65536.0; double fx2 = x2 / 65536.0; double fy2 = y2 / 65536.0; if (fx2 < fx1) std::swap(fx1, fx2); if (fy2 < fy1) std::swap(fy1, fy2); pResampler->Init(fx2-fx1, fy2-fy1, buffer.format, sprite.w, sprite.h, sprite.format, IVDPixmapResampler::kFilterLanczos3, IVDPixmapResampler::kFilterLanczos3, false); pResampler->Process(&buffer, fx1, fy1, fx2, fy2, &sprite, 0, 0); #if 0 float mx[16]={1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1}; mx[0] = cos(th) / 512.0f; mx[1] = sin(th) / 512.0f; mx[5] = cos(th) / 384.0f; mx[4] = -sin(th) / 384.0f; mx[13] = -6.0f / 384.0f; mx[15] = 1.0f; VDTriBltVertex vx[4]={ { -100, -100, 0, 0, 0 }, { +100, -100, 0, 0, 60 }, { +100, +100, 0, 80, 60 }, { -100, +100, 0, 80, 0 }, }; const int idx[6]={0,1,2,0,2,3}; VDPixmap buffer_cropped(VDPixmapOffset(buffer, 160, 120)); buffer_cropped.w -= 320; buffer_cropped.h -= 240; VDPixmapTriBlt(buffer_cropped, mipchain.Mips(), mipchain.Levels(), vx, 4, idx, 6, kTriBltFilterTrilinear, 0.0f, mx); th += 0.01; #endif pDisp->Update(); ++blits; p1.advance(); p2.advance(); sint64 last; QueryPerformanceCounter((LARGE_INTEGER *)&last); if (last-start >= freq) { start += freq; VDDEBUG2("%d blits/sec\n", blits); blits = 0; } } }