VideoPixelFormat PpmFormat(const std::string& strType, int /*num_colours*/) { if(strType == "P5") { return VideoFormatFromString("GRAY8"); }else if(strType == "P6") { return VideoFormatFromString("RGB24"); }else{ throw std::runtime_error("Unsupported PPM/PGM format"); } }
VideoPixelFormat PleoraFormat(const PvGenEnum* pfmt) { std::string spfmt = pfmt->ToString().GetAscii(); if( !spfmt.compare("Mono8") ) { return VideoFormatFromString("GRAY8"); }else if( !spfmt.compare("Mono10p") ) { return VideoFormatFromString("GRAY10"); }else if( !spfmt.compare("Mono12p") ) { return VideoFormatFromString("GRAY12"); }else{ throw VideoException("Unknown Pleora pixel format", spfmt); } }
DeinterlaceVideo::DeinterlaceVideo(std::unique_ptr<VideoInterface> &videoin_) : videoin(std::move(videoin_)), buffer(0) { if(videoin->Streams().size() != 1) throw VideoException("FirewireDeinterlace input must have exactly one stream"); const StreamInfo& stmin = videoin->Streams()[0]; StreamInfo stm1(VideoFormatFromString("GRAY8"), stmin.Width(), stmin.Height(), stmin.Width(), 0); StreamInfo stm2(VideoFormatFromString("GRAY8"), stmin.Width(), stmin.Height(), stmin.Width(), (unsigned char*)0 + stmin.Width()*stmin.Height()); streams.push_back(stm1); streams.push_back(stm2); buffer = new unsigned char[videoin->SizeBytes()]; std::cout << videoin->Streams()[0].Width() << ", " << videoin->Streams()[0].Height() << std::endl; }
SharedMemoryVideo::SharedMemoryVideo(size_t w, size_t h, std::string pix_fmt, const std::shared_ptr<SharedMemoryBufferInterface>& shared_memory, const std::shared_ptr<ConditionVariableInterface>& buffer_full) : _fmt(VideoFormatFromString(pix_fmt)), _frame_size(w*h*_fmt.bpp/8), _shared_memory(shared_memory), _buffer_full(buffer_full) { const size_t pitch = w * _fmt.bpp/8; const StreamInfo stream(_fmt, w, h, pitch, 0); _streams.push_back(stream); }
TestVideo::TestVideo(size_t w, size_t h, size_t n, std::string pix_fmt) { const VideoPixelFormat pfmt = VideoFormatFromString(pix_fmt); size_bytes = 0; for(size_t c=0; c < n; ++c) { const StreamInfo stream_info(pfmt, w, h, (w*pfmt.bpp)/8, 0); streams.push_back(stream_info); size_bytes += w*h*(pfmt.bpp)/8; } }
VideoPixelFormat PngFormat(png_structp png_ptr, png_infop info_ptr ) { const png_byte colour = png_get_color_type(png_ptr, info_ptr); const png_byte depth = png_get_bit_depth(png_ptr, info_ptr); if( depth == 8 ) { if( colour == PNG_COLOR_MASK_COLOR ) { return VideoFormatFromString("RGB24"); } else if( colour == (PNG_COLOR_MASK_COLOR | PNG_COLOR_MASK_ALPHA) ) { return VideoFormatFromString("RGBA"); } else if( colour == PNG_COLOR_MASK_ALPHA ) { return VideoFormatFromString("Y400A"); } else { return VideoFormatFromString("GRAY8"); } }else if( depth == 16 ) { if( colour == 0 ) { return VideoFormatFromString("GRAY16LE"); } } throw std::runtime_error("Unsupported PNG format"); }
int PangoVideo::FindSource() { for(PacketStreamSourceId src_id=0; src_id < reader.Sources().size(); ++src_id) { const PacketStreamSource& src = reader.Sources()[src_id]; try { if( !src.driver.compare(pango_video_type) ) { // Read sources header size_bytes = 0; device_properties = src.info["device"]; const json::value& json_streams = src.info["streams"]; const size_t num_streams = json_streams.size(); for(size_t i=0; i<num_streams; ++i) { const json::value& json_stream = json_streams[i]; StreamInfo si( VideoFormatFromString( json_stream["encoding"].get<std::string>() ), json_stream["width"].get<int64_t>(), json_stream["height"].get<int64_t>(), json_stream["pitch"].get<int64_t>(), (unsigned char*)0 + json_stream["offset"].get<int64_t>() ); size_bytes += si.SizeBytes(); streams.push_back(si); } return src_id; } }catch(...) { pango_print_info("Unable to parse PacketStream Source. File version incompatible.\n"); } } return -1; }
VideoPixelFormat VideoFormatFromOpenNI2(openni::PixelFormat fmt) { std::string pvfmt; switch (fmt) { case openni::PIXEL_FORMAT_DEPTH_1_MM: pvfmt = "GRAY16LE"; break; case openni::PIXEL_FORMAT_DEPTH_100_UM: pvfmt = "GRAY16LE"; break; case openni::PIXEL_FORMAT_SHIFT_9_2: pvfmt = "GRAY16LE"; break; // ? case openni::PIXEL_FORMAT_SHIFT_9_3: pvfmt = "GRAY16LE"; break; // ? case openni::PIXEL_FORMAT_RGB888: pvfmt = "RGB24"; break; case openni::PIXEL_FORMAT_GRAY8: pvfmt = "GRAY8"; break; case openni::PIXEL_FORMAT_GRAY16: pvfmt = "GRAY16LE"; break; case openni::PIXEL_FORMAT_YUV422: pvfmt = "YUYV422"; break; #if ONI_VERSION_MAJOR >= 2 && ONI_VERSION_MINOR >= 2 case openni::PIXEL_FORMAT_YUYV: pvfmt = "Y400A"; break; #endif default: throw VideoException("Unknown OpenNI pixel format"); break; } return VideoFormatFromString(pvfmt); }
VideoPixelFormat JpgFormat(jpeg_decompress_struct& /*info*/ ) { // TODO: Actually work this out properly. return VideoFormatFromString("RGB24"); }
size_t FirewireVideo::SizeBytes() const { return (Width() * Height() * VideoFormatFromString(PixFormat()).bpp) / 8; }