void H264UserDataTest::TestZeroRawSEI() { printf("H264UserDataTest::TestZeroRawSEI()\n"); fflush(stdout); XIRef<XMemory> data = new XMemory; for(uint8_t i = 1; i < 77; ++i) data->Append<uint8_t>(0); XRef<SEIPayload> payload = new SEIPayload(data); CPPUNIT_ASSERT(payload->GetData().Get() == data.Get()); CPPUNIT_ASSERT(payload->GetUUID() == XUuid("7e0858c4-38fe-48ea-852d-dace39badb30")); H264UserData before(payload); const std::vector<XRef<SEIPayload> >& beforePayloads = before.GetPayloads(); CPPUNIT_ASSERT(beforePayloads.size() == 1); CPPUNIT_ASSERT(beforePayloads.front() == payload); XIRef<XMemory> sei = before.GenSEI(); H264UserData after(sei->begin(), sei->GetDataSize()); const std::vector<XRef<SEIPayload> >& afterPayloads = before.GetPayloads(); CPPUNIT_ASSERT(afterPayloads.size() == 1); CPPUNIT_ASSERT(afterPayloads.front().Get() == payload.Get()); }
AVDeMuxer::AVDeMuxer( XIRef<XSDK::XMemory> buffer, bool annexBFilter ) : _fileName(), _memoryIOContext( NULL ), _storage( new XMemory ), _pos( 0 ), _context( NULL ), _eof( false ), _deMuxPkt(), _filterPkt(), _streamTypes(), _videoStreamIndex( STREAM_TYPE_UNKNOWN ), _audioPrimaryStreamIndex( STREAM_TYPE_UNKNOWN ), _bsfc( (annexBFilter)? av_bitstream_filter_init( "h264_mp4toannexb" ) : NULL ), _pf( new PacketFactoryDefault() ) { if( !Locky::IsRegistered() ) X_THROW(("Please register AVKit::Locky before using this class.")); _deMuxPkt.size = 0; _deMuxPkt.data = NULL; _filterPkt.size = 0; _filterPkt.data = NULL; size_t bufferSize = buffer->GetDataSize(); _OpenCustomIOContext( buffer->Map(), bufferSize ); _OpenStreams(); }
XIRef<XDomParserNode> XUTC::ToXML() const { XIRef<XDomParserNode> node = new XDomParserNode; node->SetTagName("XUTC"); return node; }
XIRef<XDomParserNode> XLocalTime::ToXML() const { XIRef<XDomParserNode> node = new XDomParserNode; node->SetTagName("XLocalTime"); return node; }
XIRef<XMemory> H264Encoder::GetExtraData() const { XIRef<XMemory> ed = new XMemory( DEFAULT_EXTRADATA_BUFFER_SIZE ); memcpy( &ed->Extend( _extraData.GetDataSize() ), _extraData.Map(), _extraData.GetDataSize() ); return ed; }
struct StreamStatistics AVDeMuxer::GetVideoStreamStatistics( const XSDK::XString& fileName ) { struct StreamStatistics result; XStatistics<Average,uint32_t> avgFrameSize; uint32_t indexFirstKey = 0; bool foundFirstKey = false; bool foundGOPSize = false; uint32_t currentIndex = 0; AVDeMuxer dm( fileName ); int videoStreamIndex = dm.GetVideoStreamIndex(); result.frameRate = (((double)1.0) / dm.GetSecondsBetweenFrames( videoStreamIndex )); pair<int,int> tb = dm.GetTimeBase( videoStreamIndex ); result.timeBaseNum = tb.first; result.timeBaseDen = tb.second; int streamIndex = 0; while( dm.ReadFrame( streamIndex ) ) { if( streamIndex != videoStreamIndex ) continue; if( dm.IsKey() ) { if( !foundFirstKey ) { indexFirstKey = currentIndex; foundFirstKey = true; } else { if( !foundGOPSize ) { result.gopSize = currentIndex - indexFirstKey; foundGOPSize = true; } } } XIRef<Packet> pkt = dm.Get(); avgFrameSize.AddSample( pkt->GetDataSize() ); currentIndex++; } uint32_t avgSize = 0; avgFrameSize.GetResult( avgSize ); result.averageBitRate = (uint32_t)((avgSize * (1.0 / dm.GetSecondsBetweenFrames(videoStreamIndex))) * 8); result.numFrames = currentIndex; return result; }
ExportOverlay::ExportOverlay( const XSDK::XString& msg, bool withTime, OverlayHAlign hAlign, OverlayVAlign vAlign, uint16_t width, uint16_t height, int timeBaseNum, int timeBaseDen ) : _msg( msg ), _decodedMsg(), _withTime( withTime ), _hAlign( hAlign ), _vAlign( vAlign ), _width( width ), _height( height ), _timeBaseNum( timeBaseNum), _timeBaseDen( timeBaseDen ), _timePerFrame( ((double)timeBaseNum / timeBaseDen) ), _logoX( (uint16_t)((double)_width * 0.79) ), _logoY( (uint16_t)((double)_height * 0.92) ), _logoWidth( (uint16_t)((double)_width * 0.2) ), _logoHeight( (uint16_t)((double)_height * 0.07) ), _wmSurface( NULL ) { if( !_msg.empty() ) { XIRef<XSDK::XMemory> decodedBuf = _msg.FromBase64(); _decodedMsg = XString( (const char*)decodedBuf->Map(), decodedBuf->GetDataSize() ); } X_LOG_NOTICE("watermark: x=%u, y=%u, w=%u, h=%u", _logoX, _logoY, _logoWidth, _logoHeight); _wmSurface = cairo_image_surface_create( CAIRO_FORMAT_ARGB32, _logoWidth, _logoHeight ); if( !_wmSurface ) X_THROW(("Unable to allocate cairo surface for watermark: _logoWidth = %u, _logoHeight = %u", _logoWidth, _logoHeight)); cairo_t* wmCr = cairo_create( _wmSurface ); if( !wmCr ) X_THROW(("Unable to allocate cairo handle for watermark.")); cairo_scale( wmCr, (double)_width / 1408, (double)_height / 792 ); GError* err = NULL; RsvgHandle* rsvgHandle = rsvg_handle_new_from_file("multisight-logo-white-outline.svg", &err); if( !rsvgHandle ) X_THROW(("Unable to open ms logo from svg for watermark.")); if( rsvg_handle_render_cairo( rsvgHandle, wmCr ) != TRUE ) X_THROW(("svg render failed for watermark.")); g_object_unref(rsvgHandle); cairo_destroy( wmCr ); }
XIRef<XDomParserNode> XSimpleTimeZone::ToXML() const { XIRef<XDomParserNode> node = new XDomParserNode; node->SetTagName("XSimpleTimeZone"); node->AddMetaData("utcOffset", XString::FromInt(_utcOffset)); node->AddMetaData("dstOffset", XString::FromInt(_dstOffset)); return node; }
void JPEGEncoder::WriteJPEGFile( const XSDK::XString& fileName, XIRef<Packet> jpeg ) { FILE* outFile = fopen( fileName.c_str(), "wb" ); if( !outFile ) X_THROW(("Unable to open output file.")); fwrite( jpeg->Map(), 1, jpeg->GetDataSize(), outFile ); fclose( outFile ); }
XIRef<XTimeZone> XUTC::Instance() { XGuard lock(_cInstanceLock); static XIRef<XTimeZone> instance; if( instance.IsEmpty() ) instance = new XUTC; return instance; }
void AVMuxer::WriteVideoPacket( XIRef<Packet> input, bool keyFrame ) { if( _context->pb == NULL ) _OpenIO(); if( _isTS ) { if( _numVideoFramesWritten == 0 ) { if( _fileNum == 0 ) { if( avformat_write_header( _context, NULL ) < 0 ) X_THROW(("Unable to write header to container.")); } av_opt_set( _context->priv_data, "mpegts_flags", "resend_headers", 0 ); } } else { if( !_oweTrailer ) { if( avformat_write_header( _context, NULL ) < 0 ) X_THROW(("Unable to write header to container.")); _oweTrailer = true; } } AVPacket pkt; av_init_packet( &pkt ); pkt.stream_index = _stream->index; pkt.data = input->Map(); pkt.size = input->GetDataSize(); pkt.pts = _ts; pkt.dts = _ts; // convert a tick of 1 from the codecs time_base (e.g. 1/15) to the containers // time_base _ts += av_rescale_q(1, _stream->codec->time_base, _stream->time_base); pkt.flags |= (keyFrame) ? AV_PKT_FLAG_KEY : 0; if( av_interleaved_write_frame( _context, &pkt ) < 0 ) X_THROW(("Unable to write video frame.")); _numVideoFramesWritten++; }
void AVMuxer::SetExtraData( XIRef<XSDK::XMemory> extraData ) { if( !(_context->oformat->flags & AVFMT_GLOBALHEADER) ) X_LOG_INFO("Extradata not required for %s container.",_fileName.c_str()); else { _stream->codec->extradata = (uint8_t*)av_mallocz( extraData->GetDataSize() ); if( !_stream->codec->extradata ) X_THROW(("Unable to allocate extradata storage.")); _stream->codec->extradata_size = extraData->GetDataSize(); memcpy( _stream->codec->extradata, extraData->Map(), extraData->GetDataSize() ); } }
XIRef<XTimeZone> XLocalTime::Instance() { XGuard lock(_cInstanceLock); static XIRef<XTimeZone> instance; if( instance.IsEmpty() ) { instance = new XLocalTime; #ifndef WIN32 tzset(); #endif } return instance; }
void AVMuxer::FinalizeBuffer( XIRef<XSDK::XMemory> buffer ) { if( _location != OUTPUT_LOCATION_BUFFER ) X_THROW(("Unable to finalize a non buffer IO object.")); _FinalizeCommon(); uint8_t* fileBytes = NULL; int fileSize = avio_close_dyn_buf( _context->pb, &fileBytes ); _context->pb = NULL; if( fileBytes == NULL || fileSize == 0 ) X_THROW(("Unable to finalize empty buffer.")); buffer->ResizeData( fileSize ); memcpy( buffer->Map(), fileBytes, fileSize ); av_freep( &fileBytes ); }
void H264UserDataTest::TestEmptySEI() { printf("H264UserDataTest::TestEmptySEI()\n"); fflush(stdout); XRef<SEIPayload> payload = new SEIPayload(XIRef<XMemory>(new XMemory)); CPPUNIT_ASSERT(payload->GetData()->empty()); CPPUNIT_ASSERT(payload->GetUUID() == XUuid("7e0858c4-38fe-48ea-852d-dace39badb30")); H264UserData before(payload); const std::vector<XRef<SEIPayload> >& beforePayloads = before.GetPayloads(); CPPUNIT_ASSERT(beforePayloads.size() == 1); CPPUNIT_ASSERT(beforePayloads.front() == payload); XIRef<XMemory> sei = before.GenSEI(); H264UserData after(sei->begin(), sei->GetDataSize()); const std::vector<XRef<SEIPayload> >& afterPayloads = before.GetPayloads(); CPPUNIT_ASSERT(afterPayloads.size() == 1); CPPUNIT_ASSERT(afterPayloads.front().Get() == payload.Get()); }
XIRef<XTimeZone> XSimpleTimeZone::FromXML(XIRef<XDomParserNode> node) { const XString utcOffsetStr = node->GetMetaData("utcOffset"); const XString dstOffsetStr = node->GetMetaData("dstOffset"); if(node->GetTagName() != "XSimpleTimeZone" || utcOffsetStr.empty() || (!verifyDigit(utcOffsetStr[0]) && utcOffsetStr[0] != '-') || count_if(utcOffsetStr.begin() + 1, utcOffsetStr.end(), verifyDigit) != (int)utcOffsetStr.size() - 1 || dstOffsetStr.empty() || (!verifyDigit(dstOffsetStr[0]) && dstOffsetStr[0] != '-') || count_if(dstOffsetStr.begin() + 1, dstOffsetStr.end(), verifyDigit) != (int)dstOffsetStr.size() - 1) { return 0; } const int utcOffset = utcOffsetStr.ToInt(); const int dstOffset = dstOffsetStr.ToInt(); return new XSimpleTimeZone(utcOffset, dstOffset); }
void H264UserDataTest::TestMultiplePayloads() { printf("H264UserDataTest::TestMultiplePayloads()\n"); fflush(stdout); XIRef<XMemory> data1 = new XMemory; for(uint8_t i = 1; i < 77; ++i) data1->Append<uint8_t>(0); XIRef<XMemory> data2 = new XMemory; for(uint8_t i = 219; i > 59; --i) data2->Append<uint8_t>(i); XRef<SEIPayload> payload1 = new SEIPayload(data1, XUuid("4ae62deb-d9a5-417e-8981-2379b0a756f6")); CPPUNIT_ASSERT(payload1->GetData().Get() == data1.Get()); CPPUNIT_ASSERT(payload1->GetUUID() == XUuid("4ae62deb-d9a5-417e-8981-2379b0a756f6")); XRef<SEIPayload> payload2 = new SEIPayload(data2, XUuid("9fc80724-4208-40f7-8d13-b03bfa4140df")); CPPUNIT_ASSERT(payload2->GetData() == data2); CPPUNIT_ASSERT(payload2->GetUUID() == XUuid("9fc80724-4208-40f7-8d13-b03bfa4140df")); vector<XRef<SEIPayload> > payloads; payloads.push_back(payload2); payloads.push_back(payload1); payloads.push_back(payload1); payloads.push_back(payload2); H264UserData before(payloads); const std::vector<XRef<SEIPayload> >& beforePayloads = before.GetPayloads(); CPPUNIT_ASSERT(beforePayloads == payloads); XIRef<XMemory> sei = before.GenSEI(); H264UserData after(sei->begin(), sei->GetDataSize()); const std::vector<XRef<SEIPayload> >& afterPayloads = before.GetPayloads(); CPPUNIT_ASSERT(afterPayloads == payloads); }
XIRef<XMemory> AVDeMuxer::LoadFile( const XSDK::XString& fileName ) { XIRef<XMemory> buffer = new XMemory; struct x_file_info fileInfo; if( x_stat( fileName, &fileInfo ) < 0 ) X_THROW(("Unable to stat specified file.")); FILE* inFile = fopen( fileName.c_str(), "rb" ); if( !inFile ) X_THROW(("Unable to open specified file.")); uint8_t* d = &buffer->Extend( fileInfo._fileSize ); int itemsRead = fread( d, 1, fileInfo._fileSize, inFile ); fclose( inFile ); if( itemsRead != fileInfo._fileSize ) X_THROW(("Failed to read all of the data from the file.")); return buffer; }
XIRef<XMemory> H264Encoder::EncodeYUV420P( XIRef<XMemory> pic, FrameType type ) { XIRef<XMemory> frame = new XMemory( DEFAULT_ENCODE_BUFFER_SIZE + DEFAULT_PADDING ); uint8_t* p = &frame->Extend( DEFAULT_ENCODE_BUFFER_SIZE ); size_t outputSize = EncodeYUV420P( pic->Map(), frame->Map(), frame->GetDataSize(), type ); frame->ResizeData( outputSize ); return frame; }
void JPEGEncoder::EncodeYUV420P( XIRef<Packet> input ) { AVFrame frame; avcodec_get_frame_defaults( &frame ); _output = _pf->Get( DEFAULT_JPEG_ENCODE_BUFFER_SIZE + DEFAULT_PADDING ); uint8_t* pic = input->Map(); frame.data[0] = pic; pic += (_context->width * _context->height); frame.data[1] = pic; pic += ((_context->width/4) * _context->height); frame.data[2] = pic; frame.linesize[0] = _context->width; frame.linesize[1] = (_context->width/2); frame.linesize[2] = (_context->width/2); int attempt = 0; int gotPacket = 0; AVPacket pkt; do { av_init_packet( &pkt ); pkt.data = _output->Map(); pkt.size = _output->GetBufferSize(); if( avcodec_encode_video2( _context, &pkt, &frame, &gotPacket ) < 0 ) X_THROW(("Error while encoding.")); attempt++; } while( gotPacket == 0 && (attempt < _encodeAttempts) ); _output->SetDataSize( pkt.size ); }
void YUV420PToARGB24::Transform( XIRef<Packet> input, size_t width, size_t height ) { uint8_t* src = input->Map(); AVFrame frame; frame.data[0] = src; src += width * height; frame.data[1] = src; src += ((width/2) * (height/2)); frame.data[2] = src; frame.linesize[0] = width; frame.linesize[1] = (width/2); frame.linesize[2] = (width/2); size_t dataSize = height * (width*4); _rgb24 = _pf->Get( dataSize + DEFAULT_PADDING ); _rgb24->SetDataSize( dataSize ); AVPicture pict; pict.data[0] = _rgb24->Map(); pict.linesize[0] = width * 4; if( (width != _currentWidth) || (height != _currentHeight) ) _DestroyScaler(); if( !_scaler ) _InitScaler( width, height ); int ret = sws_scale( _scaler, frame.data, frame.linesize, 0, height, pict.data, pict.linesize ); }
XIRef<Packet> AVDeMuxer::Get() { XIRef<Packet> pkt; if( _bsfc && (_deMuxPkt.stream_index == _videoStreamIndex) ) { pkt = _pf->Get( (size_t)_filterPkt.size + DEFAULT_PADDING ); pkt->SetDataSize( _filterPkt.size ); memcpy( pkt->Map(), _filterPkt.data, _filterPkt.size ); } else { pkt = _pf->Get( (size_t)_deMuxPkt.size + DEFAULT_PADDING ); pkt->SetDataSize( _deMuxPkt.size ); memcpy( pkt->Map(), _deMuxPkt.data, _deMuxPkt.size ); } if( IsKey() ) pkt->SetKey( true ); return pkt; }
XIRef<Packet> ExportOverlay::Process( XIRef<Packet> input, int64_t clockTime ) { cairo_surface_t* surface = NULL; cairo_t* cr = NULL; try { surface = cairo_image_surface_create( CAIRO_FORMAT_ARGB32, _width, _height ); cr = cairo_create( surface ); uint8_t* cairoSrc = cairo_image_surface_get_data( surface ); int cairoSrcWidth = cairo_image_surface_get_width( surface ); int cairoSrcHeight = cairo_image_surface_get_height( surface ); if( cairo_image_surface_get_stride( surface ) != (cairoSrcWidth * 4) ) X_THROW(("Unexpected cairo stride!")); cairo_set_source_rgba( cr, 0.0, 0.0, 0.0, 1.0 ); cairo_rectangle( cr, 0.0, 0.0, cairoSrcWidth, cairoSrcHeight ); cairo_fill( cr ); memcpy( cairoSrc, input->Map(), input->GetDataSize() ); PangoLayout* layout = pango_cairo_create_layout( cr ); pango_layout_set_text( layout, _decodedMsg.c_str(), -1 ); PangoFontDescription* desc = pango_font_description_from_string( "Helvetica 22" ); pango_layout_set_font_description( layout, desc ); pango_font_description_free( desc ); PangoRectangle logicalRect; pango_layout_get_pixel_extents( layout, NULL, &logicalRect ); uint16_t y = (_vAlign==V_ALIGN_TOP) ? 14 : _height - 52; uint16_t timeX = 0; uint16_t msgX = 0; uint16_t bgX = 0; uint16_t bgWidth = 0; _GetXPositions( timeX, msgX, logicalRect.width, bgX, bgWidth ); cairo_set_source_rgba( cr, 0.5, 0.5, 0.5, 0.50 ); cairo_rectangle( cr, bgX, y, bgWidth, 32 ); cairo_fill( cr ); cairo_set_source_rgba( cr, 1.0, 1.0, 1.0, 1.0 ); if( !_decodedMsg.empty() ) _DrawMessage( cr, layout, msgX, y ); if( _withTime ) _DrawTime( cr, timeX, y, clockTime ); g_object_unref( layout ); // copy from our watermark surface to our output surface... cairo_set_source_surface( cr, _wmSurface, _logoX, _logoY ); cairo_rectangle( cr, _logoX, _logoY, _logoWidth, _logoHeight ); cairo_clip( cr ); cairo_paint_with_alpha( cr, 0.70 ); // Copy data out of our cairo surface into our output packet... size_t outputSize = (cairoSrcWidth * 4) * cairoSrcHeight; XIRef<Packet> dest = new Packet( outputSize ); memcpy( dest->Map(), cairoSrc, outputSize ); dest->SetDataSize( outputSize ); cairo_destroy( cr ); cairo_surface_destroy( surface ); return dest; } catch(...) { if( cr ) cairo_destroy( cr ); if( surface ) cairo_surface_destroy( surface ); throw; } }
Config::Config() : _recorderIP( "127.0.0.1" ), _recorderPort( 10013 ), _logFilePath( "" ), _hasDRIEncoding( false ), _hasDRIDecoding( false ), _transcodeSleep( 0 ), _enableDecodeSkipping( false ), _cacheLok(), _progressCache(10) { if( XPath::Exists( "config.xml" ) ) { XIRef<XDomParser> domParser = new XDomParser; domParser->OpenAndSetDocument( "config.xml" ); XIRef<XDomParserNode> rootNode = domParser->Parse(); { list<XIRef<XDomParserNode> > searchResults = domParser->SearchForAll( "recorder_ip", rootNode ); if( !searchResults.empty() ) _recorderIP = searchResults.front()->GetData(); } { list<XIRef<XDomParserNode> > searchResults = domParser->SearchForAll( "recorder_port", rootNode ); if( !searchResults.empty() ) _recorderPort = searchResults.front()->GetData().ToInt(); } { list<XIRef<XDomParserNode> > searchResults = domParser->SearchForAll( "log_file_path", rootNode ); if( !searchResults.empty() ) _logFilePath = searchResults.front()->GetData(); } { list<XIRef<XDomParserNode> > searchResults = domParser->SearchForAll( "transcode_sleep", rootNode ); if( !searchResults.empty() ) _transcodeSleep = searchResults.front()->GetData().ToInt(); } { list<XIRef<XDomParserNode> > searchResults = domParser->SearchForAll( "decode_skipping", rootNode ); if( !searchResults.empty() ) _enableDecodeSkipping = searchResults.front()->GetData().ToInt() != 0; } } try { #ifndef WIN32 _hasDRIEncoding = VAH264Encoder::HasHW( "/dev/dri/card0" ); #endif } catch(...) { X_LOG_NOTICE("/dev/dri/card0 device not supported for encoding."); } try { #ifndef WIN32 _hasDRIDecoding = VAH264Decoder::HasHW( "/dev/dri/card0" ); #endif } catch(...) { X_LOG_NOTICE("/dev/dri/card0 device not supported for encoding."); } }
void H264UserDataTest::TestNalify() { printf("H264UserDataTest::TestNalify()\n"); fflush(stdout); XIRef<XMemory> data = new XMemory; data->Append<uint8_t>(0); data->Append<uint8_t>(0); data->Append<uint8_t>(3); data->Append<uint8_t>(0); data->Append<uint8_t>(0); data->Append<uint8_t>(0); data->Append<uint8_t>(3); data->Append<uint8_t>(0); data->Append<uint8_t>(0); data->Append<uint8_t>(0); data->Append<uint8_t>(0); const size_t expectedSize = 19; const uint8_t expected[expectedSize] = { 0, 0, 1, 0x06, 0, 0, 3, 3, 0, 0, 3, 0, 3, 0, 0, 3, 0, 0, 3 }; XIRef<XMemory> nalified = H264UserData::_Nalify(data); CPPUNIT_ASSERT_EQUAL(nalified->GetDataSize(), expectedSize); CPPUNIT_ASSERT(memcmp(nalified->Map(), expected, nalified->GetDataSize()) == 0); XIRef<XMemory> denalified = H264UserData::_Denalify(nalified->Map(), nalified->GetDataSize()); CPPUNIT_ASSERT_EQUAL(data->GetDataSize(), denalified->GetDataSize()); CPPUNIT_ASSERT(memcmp(data->Map(), denalified->Map(), data->GetDataSize()) == 0); }
void TranscodeExport::Create( XIRef<XMemory> output ) { XString tempFileName = _GetTMPName( _fileName ); // If their is only 1 export in progress (us), but the temp file exists then it means we were interrupted // (either a power issue, or a segfault) and we should delete the temporary. if( _exportsInProgress == 1 ) { if( XPath::Exists(tempFileName) ) unlink(tempFileName.c_str()); } if( XPath::Exists(tempFileName) ) X_THROW(("Export in progress exception: %s", tempFileName.c_str())); bool outputToFile = (output.IsEmpty()) ? true : false; H264Decoder decoder( GetFastH264DecoderOptions() ); XRef<YUV420PToARGB24> yuvToARGB = new YUV420PToARGB24; XRef<ARGB24ToYUV420P> argbToYUV = new ARGB24ToYUV420P; XRef<H264Transcoder> transcoder; XRef<H264Encoder> encoder; XRef<AVMuxer> muxer; XRef<ExportOverlay> ov; bool wroteToContainer = false; auto lastProgressTime = steady_clock::now(); // We are going to count how many decoding or encoding exceptions we get... If it // ever exceeds some large threshold, we bail on this export. int64_t codingExceptions = 0; XString recorderURI; while( _recorderURLS.GetNextURL( recorderURI ) ) { auto now = steady_clock::now(); if( wroteToContainer && duration_cast<seconds>(now-lastProgressTime).count() > 2 ) { _progress( _recorderURLS.PercentComplete() ); lastProgressTime = now; } try { XIRef<XMemory> responseBuffer = FRAME_STORE_CLIENT::FetchMedia( _config->GetRecorderIP(), _config->GetRecorderPort(), recorderURI ); ResultParser resultParser; resultParser.Parse( responseBuffer ); FRAME_STORE_CLIENT::ResultStatistics stats = resultParser.GetStatistics(); // If we are not provided with a bit rate or a frame rate, we use the sources values. if( _bitRate == 0 ) _bitRate = stats.averageBitRate; if( _maxRate == 0 ) _maxRate = 2 * stats.averageBitRate; if( _bufSize == 0 ) _bufSize = 2 * stats.averageBitRate; if( _frameRate == 0.0 ) _frameRate = stats.frameRate; // Fix for ffmpeg's inability to make files with fps < 6.0. Don't believe me? Try these 2 commands and play // output in vlc: // // # generate a test movie of the game of life in life.mp4 // ffmpeg -f lavfi -i life -frames:v 1000 life.mp4 // # transcode and drop framerate of life.mp4 to 1 fps. output.mp4 won't play in vlc and will have a weird // # pause at the beginning for other players. // ffmpeg -i life.mp4 -vf fps=fps=1/1 -vcodec h264 output.mp4 // if( _frameRate < 6.0 ) _frameRate = 6.0; int outputTimeBaseNum = 0; int outputTimeBaseDen = 0; int inputTimeBaseNum = 0; int inputTimeBaseDen = 0; AVKit::DToQ( (1/stats.frameRate), inputTimeBaseNum, inputTimeBaseDen ); AVKit::DToQ( (1/_frameRate), outputTimeBaseNum, outputTimeBaseDen ); if( transcoder.IsEmpty() ) { transcoder = new H264Transcoder( inputTimeBaseNum, inputTimeBaseDen, outputTimeBaseNum, outputTimeBaseDen, _speed, // if our input is key only, enable decode skipping... _recorderURLS.KeyFrameOnly() ); } double secondsPer = AVKit::QToD(inputTimeBaseNum, inputTimeBaseDen) / (AVKit::QToD(inputTimeBaseNum, inputTimeBaseDen) / (AVKit::QToD(outputTimeBaseNum, outputTimeBaseDen) * _speed)); int traversalNum = 0; int traversalDen = 0; AVKit::DToQ( secondsPer, traversalNum, traversalDen ); while( !resultParser.EndOfFile() ) { try { if( transcoder->Decode( resultParser, decoder ) ) { if( encoder.IsEmpty() ) _FinishInit( encoder, muxer, decoder, tempFileName, outputToFile, traversalNum, traversalDen ); if( ov.IsEmpty() ) ov = new ExportOverlay( _msg, _withTime, _hAlign, _vAlign, decoder.GetOutputWidth(), decoder.GetOutputHeight(), traversalNum, traversalDen ); yuvToARGB->Transform( decoder.Get(), decoder.GetOutputWidth(), decoder.GetOutputHeight() ); XIRef<Packet> rgb = yuvToARGB->Get(); XIRef<Packet> withOverlay = ov->Process( rgb, resultParser.GetFrameTS() ); argbToYUV->Transform( withOverlay, decoder.GetOutputWidth(), decoder.GetOutputHeight() ); transcoder->EncodeYUV420PAndMux( *encoder, *muxer, argbToYUV->Get() ); wroteToContainer = true; } } catch(XException& ex) { X_LOG_NOTICE("Coding exception: %s",ex.what()); ++codingExceptions; // If we have had a LOT of decoding or encoding exceptions, just give up. if( codingExceptions > 100000 ) throw ex; } } } catch( XException& ex ) { X_LOG_NOTICE("Exception thrown while processing export. Continuing: %s",ex.what()); } } if( wroteToContainer ) _progress( 1.0 ); else X_STHROW( HTTP404Exception, ("No video was found during entire export.")); if( outputToFile ) { muxer->FinalizeFile(); rename( tempFileName.c_str(), _fileName.c_str() ); } else muxer->FinalizeBuffer( output ); }
void AVMuxer::WriteVideoFrame( XIRef<XMemory> frame, bool keyFrame ) { WriteVideoFrame( frame->Map(), frame->GetDataSize(), keyFrame ); }
XIRef<XDomParserNode> SoapArgs::_GetNode( const XString& path, XIRef<XDomParserNode> parent ) { const size_t dot = path.find('.'); XString front = path.substr(0, dot); XString rest = dot == string::npos ? "" : path.substr(dot + 1); const list<XIRef<XDomParserNode> >::const_iterator end = parent->GetChildren().end(); list<XIRef<XDomParserNode> >::const_iterator found = end; for(list<XIRef<XDomParserNode> >::const_iterator iter = parent->GetChildren().begin(); iter != end; ++iter) { if((*iter)->GetTagName() == front) { found = iter; break; } } if(found != end) return rest.empty() ? *found : _GetNode(rest, *found); XIRef<XDomParserNode> node(new XDomParserNode(XString::Format("<%s>", front.c_str()))); parent->AppendChild(node); if(!rest.empty()) return _GetNode(rest, node); return node; /* // Key - "foo.bar.alpha" // In this case, foo and bar are CONTAINER_TYPE and alpha is a VALUE_TYPE SoapArgs* existingNode = current->Find( path); if(existingNode) { } vector<XString> parts; key.Split( ".", parts ); XHash<SoapArgsNode>* current = &_complex; if( !parts.empty() ) { XString currPath = parts[0]; size_t limit = parts.size() - 1; // Loop over all the CONTAINER_TYPE parts of the path... for( size_t i = 0; i < limit; ++i ) { SoapArgsNode* found = current->Find( currPath ); if( found ) { current = &found->_children; } else { SoapArgsNode node; node._nodeType = CONTAINER_TYPE; node._name = parts[i]; current->Add( currPath, node ); SoapArgsNode* foundNode = current->Find( currPath ); current = &foundNode->_children; } currPath += "." + parts[i]; } SoapArgsNode* found == current->Find( path ); if(found) { SoapArgs node = *found; current->Remove( parts[i] ); return found; } } SoapArgsNode node; node._nodeType = VALUE_TYPE; node._name = !parts.empty() ? parts[i] : key; */ }
void SoapArgs::AddAttribute( const XString& path, const XString& name, const XString& val ) { XIRef<XDomParserNode> node = _GetNode(path, _fakeRoot); node->AddMetaData(name, val); }
void SoapArgs::AddArg( const XString& path, const XString& val ) { XIRef<XDomParserNode> node = _GetNode(path, _fakeRoot); node->SetData(val); }