void AVMuxer::SetExtraData( XIRef<XSDK::XMemory> extraData ) { if( !(_context->oformat->flags & AVFMT_GLOBALHEADER) ) X_LOG_INFO("Extradata not required for %s container.",_fileName.c_str()); else { _stream->codec->extradata = (uint8_t*)av_mallocz( extraData->GetDataSize() ); if( !_stream->codec->extradata ) X_THROW(("Unable to allocate extradata storage.")); _stream->codec->extradata_size = extraData->GetDataSize(); memcpy( _stream->codec->extradata, extraData->Map(), extraData->GetDataSize() ); } }
AVDeMuxer::AVDeMuxer( XIRef<XSDK::XMemory> buffer, bool annexBFilter ) : _fileName(), _memoryIOContext( NULL ), _storage( new XMemory ), _pos( 0 ), _context( NULL ), _eof( false ), _deMuxPkt(), _filterPkt(), _streamTypes(), _videoStreamIndex( STREAM_TYPE_UNKNOWN ), _audioPrimaryStreamIndex( STREAM_TYPE_UNKNOWN ), _bsfc( (annexBFilter)? av_bitstream_filter_init( "h264_mp4toannexb" ) : NULL ), _pf( new PacketFactoryDefault() ) { if( !Locky::IsRegistered() ) X_THROW(("Please register AVKit::Locky before using this class.")); _deMuxPkt.size = 0; _deMuxPkt.data = NULL; _filterPkt.size = 0; _filterPkt.data = NULL; size_t bufferSize = buffer->GetDataSize(); _OpenCustomIOContext( buffer->Map(), bufferSize ); _OpenStreams(); }
void H264UserDataTest::TestZeroRawSEI() { printf("H264UserDataTest::TestZeroRawSEI()\n"); fflush(stdout); XIRef<XMemory> data = new XMemory; for(uint8_t i = 1; i < 77; ++i) data->Append<uint8_t>(0); XRef<SEIPayload> payload = new SEIPayload(data); CPPUNIT_ASSERT(payload->GetData().Get() == data.Get()); CPPUNIT_ASSERT(payload->GetUUID() == XUuid("7e0858c4-38fe-48ea-852d-dace39badb30")); H264UserData before(payload); const std::vector<XRef<SEIPayload> >& beforePayloads = before.GetPayloads(); CPPUNIT_ASSERT(beforePayloads.size() == 1); CPPUNIT_ASSERT(beforePayloads.front() == payload); XIRef<XMemory> sei = before.GenSEI(); H264UserData after(sei->begin(), sei->GetDataSize()); const std::vector<XRef<SEIPayload> >& afterPayloads = before.GetPayloads(); CPPUNIT_ASSERT(afterPayloads.size() == 1); CPPUNIT_ASSERT(afterPayloads.front().Get() == payload.Get()); }
struct StreamStatistics AVDeMuxer::GetVideoStreamStatistics( const XSDK::XString& fileName ) { struct StreamStatistics result; XStatistics<Average,uint32_t> avgFrameSize; uint32_t indexFirstKey = 0; bool foundFirstKey = false; bool foundGOPSize = false; uint32_t currentIndex = 0; AVDeMuxer dm( fileName ); int videoStreamIndex = dm.GetVideoStreamIndex(); result.frameRate = (((double)1.0) / dm.GetSecondsBetweenFrames( videoStreamIndex )); pair<int,int> tb = dm.GetTimeBase( videoStreamIndex ); result.timeBaseNum = tb.first; result.timeBaseDen = tb.second; int streamIndex = 0; while( dm.ReadFrame( streamIndex ) ) { if( streamIndex != videoStreamIndex ) continue; if( dm.IsKey() ) { if( !foundFirstKey ) { indexFirstKey = currentIndex; foundFirstKey = true; } else { if( !foundGOPSize ) { result.gopSize = currentIndex - indexFirstKey; foundGOPSize = true; } } } XIRef<Packet> pkt = dm.Get(); avgFrameSize.AddSample( pkt->GetDataSize() ); currentIndex++; } uint32_t avgSize = 0; avgFrameSize.GetResult( avgSize ); result.averageBitRate = (uint32_t)((avgSize * (1.0 / dm.GetSecondsBetweenFrames(videoStreamIndex))) * 8); result.numFrames = currentIndex; return result; }
ExportOverlay::ExportOverlay( const XSDK::XString& msg, bool withTime, OverlayHAlign hAlign, OverlayVAlign vAlign, uint16_t width, uint16_t height, int timeBaseNum, int timeBaseDen ) : _msg( msg ), _decodedMsg(), _withTime( withTime ), _hAlign( hAlign ), _vAlign( vAlign ), _width( width ), _height( height ), _timeBaseNum( timeBaseNum), _timeBaseDen( timeBaseDen ), _timePerFrame( ((double)timeBaseNum / timeBaseDen) ), _logoX( (uint16_t)((double)_width * 0.79) ), _logoY( (uint16_t)((double)_height * 0.92) ), _logoWidth( (uint16_t)((double)_width * 0.2) ), _logoHeight( (uint16_t)((double)_height * 0.07) ), _wmSurface( NULL ) { if( !_msg.empty() ) { XIRef<XSDK::XMemory> decodedBuf = _msg.FromBase64(); _decodedMsg = XString( (const char*)decodedBuf->Map(), decodedBuf->GetDataSize() ); } X_LOG_NOTICE("watermark: x=%u, y=%u, w=%u, h=%u", _logoX, _logoY, _logoWidth, _logoHeight); _wmSurface = cairo_image_surface_create( CAIRO_FORMAT_ARGB32, _logoWidth, _logoHeight ); if( !_wmSurface ) X_THROW(("Unable to allocate cairo surface for watermark: _logoWidth = %u, _logoHeight = %u", _logoWidth, _logoHeight)); cairo_t* wmCr = cairo_create( _wmSurface ); if( !wmCr ) X_THROW(("Unable to allocate cairo handle for watermark.")); cairo_scale( wmCr, (double)_width / 1408, (double)_height / 792 ); GError* err = NULL; RsvgHandle* rsvgHandle = rsvg_handle_new_from_file("multisight-logo-white-outline.svg", &err); if( !rsvgHandle ) X_THROW(("Unable to open ms logo from svg for watermark.")); if( rsvg_handle_render_cairo( rsvgHandle, wmCr ) != TRUE ) X_THROW(("svg render failed for watermark.")); g_object_unref(rsvgHandle); cairo_destroy( wmCr ); }
void JPEGEncoder::WriteJPEGFile( const XSDK::XString& fileName, XIRef<Packet> jpeg ) { FILE* outFile = fopen( fileName.c_str(), "wb" ); if( !outFile ) X_THROW(("Unable to open output file.")); fwrite( jpeg->Map(), 1, jpeg->GetDataSize(), outFile ); fclose( outFile ); }
void H264UserDataTest::TestNalify() { printf("H264UserDataTest::TestNalify()\n"); fflush(stdout); XIRef<XMemory> data = new XMemory; data->Append<uint8_t>(0); data->Append<uint8_t>(0); data->Append<uint8_t>(3); data->Append<uint8_t>(0); data->Append<uint8_t>(0); data->Append<uint8_t>(0); data->Append<uint8_t>(3); data->Append<uint8_t>(0); data->Append<uint8_t>(0); data->Append<uint8_t>(0); data->Append<uint8_t>(0); const size_t expectedSize = 19; const uint8_t expected[expectedSize] = { 0, 0, 1, 0x06, 0, 0, 3, 3, 0, 0, 3, 0, 3, 0, 0, 3, 0, 0, 3 }; XIRef<XMemory> nalified = H264UserData::_Nalify(data); CPPUNIT_ASSERT_EQUAL(nalified->GetDataSize(), expectedSize); CPPUNIT_ASSERT(memcmp(nalified->Map(), expected, nalified->GetDataSize()) == 0); XIRef<XMemory> denalified = H264UserData::_Denalify(nalified->Map(), nalified->GetDataSize()); CPPUNIT_ASSERT_EQUAL(data->GetDataSize(), denalified->GetDataSize()); CPPUNIT_ASSERT(memcmp(data->Map(), denalified->Map(), data->GetDataSize()) == 0); }
void AVMuxer::WriteVideoPacket( XIRef<Packet> input, bool keyFrame ) { if( _context->pb == NULL ) _OpenIO(); if( _isTS ) { if( _numVideoFramesWritten == 0 ) { if( _fileNum == 0 ) { if( avformat_write_header( _context, NULL ) < 0 ) X_THROW(("Unable to write header to container.")); } av_opt_set( _context->priv_data, "mpegts_flags", "resend_headers", 0 ); } } else { if( !_oweTrailer ) { if( avformat_write_header( _context, NULL ) < 0 ) X_THROW(("Unable to write header to container.")); _oweTrailer = true; } } AVPacket pkt; av_init_packet( &pkt ); pkt.stream_index = _stream->index; pkt.data = input->Map(); pkt.size = input->GetDataSize(); pkt.pts = _ts; pkt.dts = _ts; // convert a tick of 1 from the codecs time_base (e.g. 1/15) to the containers // time_base _ts += av_rescale_q(1, _stream->codec->time_base, _stream->time_base); pkt.flags |= (keyFrame) ? AV_PKT_FLAG_KEY : 0; if( av_interleaved_write_frame( _context, &pkt ) < 0 ) X_THROW(("Unable to write video frame.")); _numVideoFramesWritten++; }
XIRef<XMemory> H264Encoder::EncodeYUV420P( XIRef<XMemory> pic, FrameType type ) { XIRef<XMemory> frame = new XMemory( DEFAULT_ENCODE_BUFFER_SIZE + DEFAULT_PADDING ); uint8_t* p = &frame->Extend( DEFAULT_ENCODE_BUFFER_SIZE ); size_t outputSize = EncodeYUV420P( pic->Map(), frame->Map(), frame->GetDataSize(), type ); frame->ResizeData( outputSize ); return frame; }
void H264UserDataTest::TestEmptySEI() { printf("H264UserDataTest::TestEmptySEI()\n"); fflush(stdout); XRef<SEIPayload> payload = new SEIPayload(XIRef<XMemory>(new XMemory)); CPPUNIT_ASSERT(payload->GetData()->empty()); CPPUNIT_ASSERT(payload->GetUUID() == XUuid("7e0858c4-38fe-48ea-852d-dace39badb30")); H264UserData before(payload); const std::vector<XRef<SEIPayload> >& beforePayloads = before.GetPayloads(); CPPUNIT_ASSERT(beforePayloads.size() == 1); CPPUNIT_ASSERT(beforePayloads.front() == payload); XIRef<XMemory> sei = before.GenSEI(); H264UserData after(sei->begin(), sei->GetDataSize()); const std::vector<XRef<SEIPayload> >& afterPayloads = before.GetPayloads(); CPPUNIT_ASSERT(afterPayloads.size() == 1); CPPUNIT_ASSERT(afterPayloads.front().Get() == payload.Get()); }
void H264UserDataTest::TestMultiplePayloads() { printf("H264UserDataTest::TestMultiplePayloads()\n"); fflush(stdout); XIRef<XMemory> data1 = new XMemory; for(uint8_t i = 1; i < 77; ++i) data1->Append<uint8_t>(0); XIRef<XMemory> data2 = new XMemory; for(uint8_t i = 219; i > 59; --i) data2->Append<uint8_t>(i); XRef<SEIPayload> payload1 = new SEIPayload(data1, XUuid("4ae62deb-d9a5-417e-8981-2379b0a756f6")); CPPUNIT_ASSERT(payload1->GetData().Get() == data1.Get()); CPPUNIT_ASSERT(payload1->GetUUID() == XUuid("4ae62deb-d9a5-417e-8981-2379b0a756f6")); XRef<SEIPayload> payload2 = new SEIPayload(data2, XUuid("9fc80724-4208-40f7-8d13-b03bfa4140df")); CPPUNIT_ASSERT(payload2->GetData() == data2); CPPUNIT_ASSERT(payload2->GetUUID() == XUuid("9fc80724-4208-40f7-8d13-b03bfa4140df")); vector<XRef<SEIPayload> > payloads; payloads.push_back(payload2); payloads.push_back(payload1); payloads.push_back(payload1); payloads.push_back(payload2); H264UserData before(payloads); const std::vector<XRef<SEIPayload> >& beforePayloads = before.GetPayloads(); CPPUNIT_ASSERT(beforePayloads == payloads); XIRef<XMemory> sei = before.GenSEI(); H264UserData after(sei->begin(), sei->GetDataSize()); const std::vector<XRef<SEIPayload> >& afterPayloads = before.GetPayloads(); CPPUNIT_ASSERT(afterPayloads == payloads); }
void AVMuxer::WriteVideoFrame( XIRef<XMemory> frame, bool keyFrame ) { WriteVideoFrame( frame->Map(), frame->GetDataSize(), keyFrame ); }
XIRef<Packet> ExportOverlay::Process( XIRef<Packet> input, int64_t clockTime ) { cairo_surface_t* surface = NULL; cairo_t* cr = NULL; try { surface = cairo_image_surface_create( CAIRO_FORMAT_ARGB32, _width, _height ); cr = cairo_create( surface ); uint8_t* cairoSrc = cairo_image_surface_get_data( surface ); int cairoSrcWidth = cairo_image_surface_get_width( surface ); int cairoSrcHeight = cairo_image_surface_get_height( surface ); if( cairo_image_surface_get_stride( surface ) != (cairoSrcWidth * 4) ) X_THROW(("Unexpected cairo stride!")); cairo_set_source_rgba( cr, 0.0, 0.0, 0.0, 1.0 ); cairo_rectangle( cr, 0.0, 0.0, cairoSrcWidth, cairoSrcHeight ); cairo_fill( cr ); memcpy( cairoSrc, input->Map(), input->GetDataSize() ); PangoLayout* layout = pango_cairo_create_layout( cr ); pango_layout_set_text( layout, _decodedMsg.c_str(), -1 ); PangoFontDescription* desc = pango_font_description_from_string( "Helvetica 22" ); pango_layout_set_font_description( layout, desc ); pango_font_description_free( desc ); PangoRectangle logicalRect; pango_layout_get_pixel_extents( layout, NULL, &logicalRect ); uint16_t y = (_vAlign==V_ALIGN_TOP) ? 14 : _height - 52; uint16_t timeX = 0; uint16_t msgX = 0; uint16_t bgX = 0; uint16_t bgWidth = 0; _GetXPositions( timeX, msgX, logicalRect.width, bgX, bgWidth ); cairo_set_source_rgba( cr, 0.5, 0.5, 0.5, 0.50 ); cairo_rectangle( cr, bgX, y, bgWidth, 32 ); cairo_fill( cr ); cairo_set_source_rgba( cr, 1.0, 1.0, 1.0, 1.0 ); if( !_decodedMsg.empty() ) _DrawMessage( cr, layout, msgX, y ); if( _withTime ) _DrawTime( cr, timeX, y, clockTime ); g_object_unref( layout ); // copy from our watermark surface to our output surface... cairo_set_source_surface( cr, _wmSurface, _logoX, _logoY ); cairo_rectangle( cr, _logoX, _logoY, _logoWidth, _logoHeight ); cairo_clip( cr ); cairo_paint_with_alpha( cr, 0.70 ); // Copy data out of our cairo surface into our output packet... size_t outputSize = (cairoSrcWidth * 4) * cairoSrcHeight; XIRef<Packet> dest = new Packet( outputSize ); memcpy( dest->Map(), cairoSrc, outputSize ); dest->SetDataSize( outputSize ); cairo_destroy( cr ); cairo_surface_destroy( surface ); return dest; } catch(...) { if( cr ) cairo_destroy( cr ); if( surface ) cairo_surface_destroy( surface ); throw; } }