HRESULT WinCaptureDevice::OnReadSample(HRESULT hrStatus, DWORD dwStreamIndex, DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample *pSample) { HRESULT hr = S_OK; IMFMediaBuffer *pBuffer = NULL; if (FAILED(hrStatus)) hr = hrStatus; if (SUCCEEDED(hr)) { if (pSample) { hr = pSample->GetBufferByIndex(0, &pBuffer); if (SUCCEEDED(hr)) { VideoBufferLock vbuffer(pBuffer); BYTE* scan0 = NULL; LONG stride = 0; hr = vbuffer.LockBuffer(InputDefaultStride, InputHeight, &scan0, &stride); if (SUCCEEDED(hr)) { Image* img = new Image(); if (img->Alloc(ImgFmt::RGB8u, InputWidth, InputHeight)) { byte* lineIn = scan0; byte* lineOut = (byte*) img->Scan0; for (int iline = 0; iline < (int) InputHeight; iline++) { memcpy(lineOut, lineIn, InputWidth * 3); lineIn += stride; lineOut += img->Stride; } Frames.Add(img); // This method of keeping the frame count down is subject to race conditions, but you've already screwed up here by losing frames, // so losing even more is not the end of the world. // 'delete' is safe to call on null, so we're OK even if the above mentioned race condition catches us. while (Frames.Size() > MaxFrames) delete NextFrame(); } else { delete img; } } SafeRelease(&pBuffer); } } } // Request the next frame. if (SUCCEEDED(hr) && EnableCapture == 1) hr = Reader->ReadSample((DWORD) MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, NULL, NULL, NULL); return hr; }
void SaveExr(const Image<unsigned char>& image_in, const pangolin::VideoPixelFormat& fmt, const std::string& filename, bool top_line_first) { #ifdef HAVE_OPENEXR Image<unsigned char> image; if(top_line_first) { image = image_in; }else{ image.Alloc(image_in.w,image_in.h,image_in.pitch); for(size_t y=0; y<image_in.h; ++y) { std::memcpy(image.ptr + y*image.pitch, image_in.ptr + (image_in.h-y-1)*image_in.pitch, image.pitch); } } Imf::Header header (image.w, image.h); SetOpenEXRChannels(header.channels(), fmt); Imf::OutputFile file (filename.c_str(), header); Imf::FrameBuffer frameBuffer; int ch=0; size_t ch_bits = 0; for(Imf::ChannelList::Iterator it = header.channels().begin(); it != header.channels().end(); ++it) { frameBuffer.insert( it.name(), Imf::Slice( it.channel().type, (char*)image.ptr + ch_bits/8, fmt.channel_bits[ch]/8, image.pitch ) ); ch_bits += fmt.channel_bits[ch++]; } file.setFrameBuffer(frameBuffer); file.writePixels(image.h); if(!top_line_first) { image.Dealloc(); } #else throw std::runtime_error("EXR Support not enabled. Please rebuild Pangolin."); #endif // HAVE_OPENEXR }
size_t IntelH264Decoder::Decode(const void* buf, size_t bufSize) { if (Session == nullptr) return 0; size_t consumed = 0; if (!IsInitialized) { consumed = Reset(buf, bufSize); if (!IsInitialized) return consumed; } bool ateData = true; while (ateData) { ateData = false; mfxFrameSurface1* surf = nullptr; mfxSyncPoint sync = nullptr; mfxBitstream bs = { 0 }; bs.Data = (uint8_t*) buf + consumed; bs.DataLength = bufSize - consumed; bs.MaxLength = bufSize - consumed; mfxStatus err = MFXVideoDECODE_DecodeFrameAsync(Session, &bs, &WorkSurface, &surf, &sync); consumed += bs.DataOffset; if (bs.DataOffset != 0) ateData = true; if (err == MFX_ERR_NONE && surf != nullptr && surf->Info.FourCC == MFX_FOURCC_NV12) { if (surf->Info.FourCC == MFX_FOURCC_NV12) { Image* img = new Image(); if (!img->Alloc(ImgFmt::RGBA8u, VideoParam.mfx.FrameInfo.Width, VideoParam.mfx.FrameInfo.Height)) SXDIE_OOM(); libyuv::NV12ToARGB(surf->Data.Y, surf->Data.PitchLow, surf->Data.UV, surf->Data.PitchLow, (uint8*) img->Scan0, img->Stride, img->Width, img->Height); img->FixBGRA_to_RGBA(); Frames.push_back(img); } else { Log()->Error("Unexpected FourCC from MFXVideoDECODE_DecodeFrameAsync: %x", surf->Info.FourCC); } } } return consumed; //Reset(); /* mfxVideoParam vpar = { 0 }; vpar.IOPattern = MFX_IOPATTERN_IN_SYSTEM_MEMORY | MFX_IOPATTERN_OUT_SYSTEM_MEMORY; vpar.mfx.CodecId = MFX_CODEC_AVC; vpar.vpp.Out.Width = 640; vpar.vpp.Out.Height = 480; vpar.vpp.Out.FourCC = MFX_FOURCC_RGB4; err = MFXVideoDECODE_Init(Session, &vpar); if (err != MFX_ERR_NONE) { return false; } return true; */ }