/** \fn goToTimeVideo \brief Seek video to the given time. Must be an exact time. */ bool ADM_Composer::goToTimeVideo(uint64_t startTime) { uint64_t segTime; uint32_t seg; if(false==_segments.convertLinearTimeToSeg(startTime,&seg,&segTime)) { ADM_warning("Cannot find segment for time %"PRIu64" ms\n",startTime/1000); return false; } // Try to seek... _SEGMENT *s=_segments.getSegment(seg); _VIDEOS *v=_segments.getRefVideo(s->_reference); if(!s->_reference && !segTime && s->_refStartTimeUs<v->firstFramePts) { segTime=v->firstFramePts; ADM_warning("Fixating start time to %"PRIu64" ms\n",segTime/1000); } uint64_t to=segTime+s->_refStartTimeUs; if(false==seektoTime(s->_reference,to)) { ADM_warning("Cannot seek to beginning of segment %"PRIu32" at %"PRIu64" ms\n",s,to/1000); return false; } _currentSegment=seg; int64_t newTime=(int64_t)v->lastDecodedPts+(int64_t)s->_startTimeUs-(int64_t)s->_refStartTimeUs; ADM_info("Seek done, in reference, gone to %"PRIu64" with segment start at %"PRIu64"\n",v->lastDecodedPts,s->_refStartTimeUs); SET_CURRENT_PTS(newTime); return true; }
bool AUDMEncoder_DcaEnc::initialize (void) { int chan_config=0; switch(wavheader.channels) { case 1: chan_config=DCAENC_CHANNELS_MONO;break; case 2: chan_config=DCAENC_CHANNELS_STEREO;break; case 6: chan_config=DCAENC_CHANNELS_3FRONT_2REAR_1OV;break; case 5: chan_config=DCAENC_CHANNELS_3FRONT_2REAR;break; default: ADM_warning("Unsupported channel configuration \n"); break; } wavheader.byterate=(config.bitrate*1000)>>3; ADM_info("Starting dcaenc with channels=%d, bitrate=%d\n",wavheader.channels,config.bitrate); context=dcaenc_create(wavheader.frequency,chan_config,config.bitrate*1000,DCAENC_FLAG_BIGENDIAN ); if(!context) { ADM_warning("Cannot create dcaenc context \n"); return false; } inputSize=dcaenc_input_size(context); outputSize=dcaenc_output_size(context); ADM_info("Converting %d samples to %d bytes\n",inputSize,outputSize); return true; }
/** \fn switchToSegment \brief Switch to the segment given as argument */ bool ADM_Composer::switchToSegment(uint32_t s,bool dontdecode) { if(s+1>_segments.getNbSegments()) { ADM_warning("Cannot switch to segment:%"PRIu32"\n",s); return false; } _SEGMENT *seg=_segments.getSegment(s); ADM_assert(seg); ADM_info("Trying to switch to seg %"PRIu32" with startTime in reference pic= %"PRIu32" ms\n",s,seg->_refStartTimeUs/1000); // If the refStartTime is 0, it is the first image // But the fist image might not be = 0 _VIDEOS *vid=_segments.getRefVideo(seg->_reference); vidHeader *demuxer=vid->_aviheader; ADM_assert(vid); uint64_t from=seg->_refStartTimeUs; uint64_t pts,dts; if(!from) from=vid->firstFramePts; if(false==seektoTime(seg->_reference,from,dontdecode)) { ADM_warning("Cannot seek to beginning of segment %"PRIu32" at %"PRIu64" ms\n",s,from/1000); return false; } _currentSegment=s; ADM_info("Switched ok to segment %"PRIu32" (dontdecode=%d)\n",s,dontdecode); return true; }
/** * \fn switchToNextAudioSegment * */ bool ADM_edAudioTrackFromVideo::switchToNextAudioSegment(void) { // Try to switch segment if(_audioSeg+1>=parent->_segments.getNbSegments()) return false; ADM_warning("Switching to segment %"PRIu32"\n",_audioSeg+1); _audioSeg++; _SEGMENT *seg=parent->_segments.getSegment(_audioSeg); ADM_audioStreamTrack *trk=getTrackAtVideoNumber(seg->_reference); // ADM_Audiocodec *codec=NULL; if(trk) if(trk->codec) codec=trk->codec; if(codec) { codec->resetAfterSeek(); } // Go to beginning of the stream if(false==trk->stream->goToTime(seg->_refStartTimeUs)) { ADM_warning("Fail to seek audio to %"PRIu64"ms\n",seg->_refStartTimeUs/1000); return false; } ADM_info("Switched ok to audio segment %"PRIu32", with a ref time=%s\n", _audioSeg,ADM_us2plain(seg->_refStartTimeUs)); return true; }
/** \fn displayImage */ bool libvaRender::displayImage(ADMImage *pic) { // if input is already a VA surface, no need to reupload it... if(pic->refType==ADM_HW_LIBVA) { ADM_vaSurface *img=(ADM_vaSurface *)pic->refDescriptor.refInstance; admLibVA::putX11Surface(img,info.systemWindowId,displayWidth,displayHeight); lastSurface=img; }else { if(!mySurface[0] || !mySurface[1]) { ADM_warning("[VARender] No surface\n"); return false; } ADM_vaSurface *dest=mySurface[toggle]; toggle^=1; if(false==dest->fromAdmImage(pic)) { ADM_warning("VaRender] Failed to upload pic \n"); return false; } admLibVA::putX11Surface(dest,info.systemWindowId,displayWidth,displayHeight); lastSurface=dest; } return true; }
/** \fn ADMImage_To_argbSurface */ static bool ADMImage_To_argbSurface(ADMImage *pic, IDirect3DSurface9 *surface,ADMColorScalerFull *scaler) { D3DLOCKED_RECT lock; if (ADM_FAILED(IDirect3DSurface9_LockRect(surface,&lock, NULL, 0))) { ADM_warning("D3D Cannot lock surface\n"); return false; } // RGB uint8_t *src[3]; uint8_t *dst[3]; pic->GetReadPlanes(src); dst[0]=(uint8_t *)lock.pBits; dst[1]=dst[2]=NULL; int sourcePitch[3],dstPitch[3]; pic->GetPitches(sourcePitch); dstPitch[0]=lock.Pitch; dstPitch[1]=dstPitch[2]=0; scaler-> convertPlanes(sourcePitch,dstPitch, src, dst); if (ADM_FAILED(IDirect3DSurface9_UnlockRect(surface))) { ADM_warning("D3D Cannot unlock surface\n"); return false; } return true; }
/** \fn ADMImage_To_yv12Surface */ static bool ADMImage_To_yv12Surface(ADMImage *pic, IDirect3DSurface9 *surface) { D3DLOCKED_RECT lock;; if (ADM_FAILED(IDirect3DSurface9_LockRect(surface,&lock, NULL, 0))) { ADM_warning("D3D Cannot lock surface\n"); return false; } // copy uint8_t *dst=(uint8_t *)lock.pBits; int dStride=lock.Pitch; int width=pic->GetWidth(PLANAR_Y); int height=pic->GetHeight(PLANAR_Y); d3dBlit(pic, PLANAR_Y,dst,dStride,width,height); dst+=height*dStride; d3dBlit(pic, PLANAR_U,dst,dStride>>1,width>>1,height>>1); dst+=(height/2)*(dStride/2); d3dBlit(pic, PLANAR_V,dst,dStride>>1,width>>1,height>>1); if (ADM_FAILED(IDirect3DSurface9_UnlockRect(surface))) { ADM_warning("D3D Cannot unlock surface\n"); return false; } return true; }
/** \fn displayImage_argb \brief manually do the yv12-> RGB conversion + rescale and the upload to backbuffer */ bool dxvaRender::displayImage_argb(ADMImage *pic) { IDirect3DSurface9 *bBuffer; // 1 upload to myYV12 surface if( ADM_FAILED(IDirect3DDevice9_GetBackBuffer(d3dDevice, 0, 0, D3DBACKBUFFER_TYPE_MONO, &bBuffer))) { ADM_warning("D3D Cannot create backBuffer\n"); return false; } if(!ADMImage_To_argbSurface(pic,bBuffer,scaler)) { ADM_warning("Image to argb surface failed\n"); return false; } IDirect3DDevice9_BeginScene(d3dDevice); IDirect3DDevice9_EndScene(d3dDevice); if( ADM_FAILED(IDirect3DDevice9_Present(d3dDevice, &targetRect, 0, 0, 0))) { ADM_warning("D3D Present failed\n"); } return true; }
/** \fn uploadImage \brief upload an image to a vdpau surface */ bool vdpauVideoFilterDeint::uploadImage(ADMImage *next,VdpVideoSurface surface) { if(!next) // empty image { ADM_warning("VdpauDeint:No image to upload\n"); return true; } if(surface==VDP_INVALID_HANDLE) { ADM_error("Surface provided is invalid\n"); return false; } // Blit our image to surface uint32_t pitches[3]; uint8_t *planes[3]; next->GetPitches(pitches); next->GetReadPlanes(planes); aprintf("Putting image in surface %d\n",(int)surface); // Put out stuff in input... #if VDP_DEBUG printf("Uploading image to surface %d\n",surfaceIndex%ADM_NB_SURFACES); #endif if(VDP_STATUS_OK!=admVdpau::surfacePutBits( surface, planes,pitches)) { ADM_warning("[Vdpau] video surface : Cannot putbits\n"); return false; } return true; }
bool ADM_libvaEncoder::setup(void) { int width=getWidth(); int height=getHeight(); // Allocate VAImage for(int i=0;i<VA_ENC_NB_SURFACE;i++) { vaSurface[i]=new ADM_vaSurface(NULL,width,height); if(vaSurface[i]->image) { ADM_warning("Cannot allocate surface\n"); return false; } } context=new ADM_vaEncodingContext(); if(!context->init(width,height,VA_ENC_NB_SURFACE,vaSurface)) { ADM_warning("Cannot initialize vaEncoder context\n"); return false; } encodingBuffer=new ADM_vaEncodingBuffer(context,(width*height*400)/256); return true; }
/** \fn encode */ bool ADM_libvaEncoder::encode (ADMBitstream * out) { uint32_t fn; if(source->getNextFrame(&fn,image)==false) { ADM_warning("[LIBVA] Cannot get next image\n"); return false; } if(!vaSurface[0]->fromAdmImage(image)) { ADM_warning("Cannot upload image to surface\n"); return false; } // if(!context->encode(vaSurface[0],out,encodingBuffer)) { ADM_warning("Error encoding picture\n"); return false; } out->len=plane; out->pts=out->dts=image->Pts; out->flags=AVI_KEY_FRAME; return true; }
/** \fn getPacket */ uint8_t ADM_audioStreamConstantChunk::getPacket(uint8_t *buffer,uint32_t *size, uint32_t sizeMax,uint32_t *nbSample,uint64_t *dts) { *size=0; *nbSample=0; if(sizeMax>=chunkSize) { uint32_t mSize; uint64_t mDts; if(!access->getPacket(buffer,&mSize,sizeMax,&mDts)) { ADM_warning("Cant get packet\n"); return 0; } ADM_info("Got packet : chunk=%d size=%d dts=%s\n",chunkSize,mSize,ADM_us2plain(mDts)); if(!*size) *dts=mDts; *size+=mSize; *nbSample+=samplesPerChunk; if(mSize!=chunkSize) { ADM_warning("Expected chunk of size =%d, got %d\n",chunkSize,mSize); } buffer+=mSize; sizeMax-=mSize; } if(!*size) return 0; return 1; }
/** \fn init */ bool libvaRender::init( GUI_WindowInfo * window, uint32_t w, uint32_t h,renderZoom zoom) { ADM_info("[libva]Xv start\n"); info=*window; if(admLibVA::isOperationnal()==false) { ADM_warning("[libva] Not operationnal\n"); return false; } for(int i=0;i<2;i++) { VASurfaceID surface=admLibVA::allocateSurface(w,h); if(surface==VA_INVALID) { ADM_warning("[libva] cannot allocate surface\n"); return false; } mySurface[i]=new ADM_vaSurface(NULL,w,h); mySurface[i]->surface=surface; } baseInit(w,h,zoom); return true; }
bool ADM_latm2aac::pushData(int incomingLen,uint8_t *inData,uint64_t dts) { // Lookup sync uint8_t *end=inData+incomingLen; uint8_t *start=inData; xdebug("Pushing data %d bytes\n",incomingLen); while(start<end) { int key=(start[0]<<8)+start[1]; if((key & 0xffe0)!=0x56e0) { ADM_warning("Sync lost\n"); return true; } uint32_t len=start[2]+((key & 0x1f)<<8); start+=3; if(start+len>end) { ADM_warning("Not enough data, need %d, got %d\n",len,(int)(end-start)); return true; } xdebug("Found LATM : size %d\n",len); demuxLatm(dts,start,len); dts=ADM_NO_PTS; // LATM demux start+=len; } xdebug("-- end of this LOAS frame --\n"); return true; }
/** \fn fillAudio \brief Put audio datas until targetDts is reached */ bool muxerAvi::fillAudio(uint64_t targetDts) { // Now send audio until they all have DTS > lastVideoDts+increment for(int audioIndex=0;audioIndex<nbAStreams;audioIndex++) { ADM_audioStream*a=aStreams[audioIndex]; uint32_t fq=a->getInfo()->frequency; int nb=0; audioClock *clk=clocks[audioIndex]; aviAudioPacket *aPacket=audioPackets+audioIndex; if(true==aPacket->eos) return true; while(1) { if(false==aPacket->present) { if(!a->getPacket(aPacket->buffer, &(aPacket->sizeInBytes), AUDIO_BUFFER_SIZE, &(aPacket->nbSamples), &(aPacket->dts))) { ADM_warning("Cannot get audio packet for stream %d\n",audioIndex); aPacket->eos=true; break; } if(aPacket->dts!=ADM_NO_PTS) { aPacket->dts+=audioDelay; aPacket->dts-=firstPacketOffset; } aprintf("[Audio] Packet size %"PRIu32" sample:%"PRIu32" dts:%"PRIu64" target :%"PRIu64"\n", aPacket->sizeInBytes,aPacket->nbSamples,aPacket->dts,targetDts); if(aPacket->dts!=ADM_NO_PTS) if( abs(aPacket->dts-clk->getTimeUs())>32000) { ADM_warning("[AviMuxer] Audio skew!\n"); clk->setTimeUs(aPacket->dts); #warning FIXME add padding } aPacket->present=true; } // We now have a packet stored aprintf("Audio packet dts =%s\n",ADM_us2plain(aPacket->dts)); if(aPacket->dts!=ADM_NO_PTS) if(aPacket->dts>targetDts) { aprintf("In the future..\n"); break; // this one is in the future } nb=writter.saveAudioFrame(audioIndex,aPacket->sizeInBytes,aPacket->buffer) ; encoding->pushAudioFrame(aPacket->sizeInBytes); aprintf("writting audio packet\n"); clk->advanceBySample(aPacket->nbSamples); aPacket->present=false; //printf("%u vs %u\n",audioDts/1000,(lastVideoDts+videoIncrement)/1000); } } return true; }
/** \fn removeChunk \brief */ bool ADM_EditorSegment::removeChunk(uint64_t from, uint64_t to) { uint32_t startSeg,endSeg; uint64_t startOffset,endOffset; ADM_info("Cutting from %"PRIu64" to %"PRIu64" ms\n",from/1000,to/1000); dump(); if(false==convertLinearTimeToSeg( from,&startSeg,&startOffset)) { ADM_warning("Cannot get starting point (%"PRIu64" ms\n",from/1000); return false; } if(false==convertLinearTimeToSeg( to,&endSeg,&endOffset)) { ADM_warning("Cannot get starting point (%"PRIu64" ms\n",from/1000); return false; } ADM_info("Start, seg %"PRIu32" Offset :%"PRIu64" ms\n",startSeg,startOffset); ADM_info("End , seg %"PRIu32" Offset :%"PRIu64" ms\n",endSeg,endOffset); ListOfSegments tmp=segments; if(startSeg==endSeg) { // Split the seg int two.. segments.insert(segments.begin()+startSeg+1,*getSegment(startSeg)); endSeg=startSeg+1; } _SEGMENT *first=getSegment(startSeg); // Span over several seg... // 1- shorten the start segment.. first->_durationUs=startOffset; // 3- Shorten last segment _SEGMENT *last=getSegment(endSeg); last->_refStartTimeUs+=endOffset; last->_durationUs-=endOffset; // 2- Kill the segment in between for(int i=startSeg+1;i<endSeg;i++) { segments.erase(segments.begin()+startSeg+1); } updateStartTime(); removeEmptySegments(); if(isEmpty()) { GUI_Error_HIG(QT_TRANSLATE_NOOP("adm","Error"),QT_TRANSLATE_NOOP("adm","You cannot remove *all* the video\n")); segments=tmp; updateStartTime(); return false; } undoSegments.push_back(tmp); dump(); return true; }
/** \fn brief input is already a surface, in yv12 format */ bool dxvaRender::displayImage_surface(ADMImage *pic,admDx2Surface *surface) { // this does not work, both surfaces are coming from different device IDirect3DSurface9 *bBuffer; POINT point={0,0}; // 1 upload to myYV12 surface if(ADM_FAILED(IDirect3DDevice9_UpdateSurface(d3dDevice, surface->surface, // src &panScan, // src rect myYV12Surface, // dst &point // where to ))) { ADM_warning("Copying surface failed, switching to non accelerated path \n"); if(!pic->hwDownloadFromRef()) { ADM_warning("Failed to download yv12 from dxva\n"); return false; } // workaround : use default non bridged path if(useYV12) { return displayImage_yv12(pic); } return displayImage_argb(pic); return false; } // upload.... if( ADM_FAILED(IDirect3DDevice9_GetBackBuffer(d3dDevice, 0, 0, D3DBACKBUFFER_TYPE_MONO, &bBuffer))) { ADM_warning("D3D Cannot create backBuffer\n"); return false; } // data are in YV12 surface, blit it to mySurface // zoom and color conversion happen there if (ADM_FAILED(IDirect3DDevice9_StretchRect(d3dDevice, myYV12Surface, NULL, bBuffer, NULL, D3DTEXF_LINEAR))) { ADM_warning("StretchRec yv12 failed\n"); } IDirect3DDevice9_BeginScene(d3dDevice); IDirect3DDevice9_EndScene(d3dDevice); if( ADM_FAILED(IDirect3DDevice9_Present(d3dDevice, &targetRect, 0, 0, 0))) { ADM_warning("D3D Present failed\n"); } return true; }
/** \fn checkCutsAreOnIntra \brief In copy mode, if the cuts are not on intra we will run into trouble : * We include skipped ref frames: we will have DTS going back error * We skip them, we have borked video at cut points due to missing ref framesz \return true if everything ok */ bool ADM_Composer::checkCutsAreOnIntra(void) { bool fail=false; int nbSeg=_segments.getNbSegments(); ADMCompressedImage img; uint8_t *buffer=new uint8_t[1920*1080*3]; img.data=buffer; ADM_info("Checking cuts start on keyframe..\n"); for(int i=0; i<nbSeg; i++) { _SEGMENT *seg=_segments.getSegment(i); _VIDEOS *vid=_segments.getRefVideo(seg->_reference); vidHeader *demuxer=vid->_aviheader; if(false==switchToSegment(i,true)) { fail=true; break; } if(false==demuxer->getFrame (vid->lastSentFrame,&img)) { ADM_info("Cannot get 1st frame of segment %d\n",i); fail=true; break; } if(!img.flags & AVI_KEY_FRAME) { ADM_warning("Segment %d does not start on a keyframe (%s)\n",i,ADM_us2plain(img.demuxerPts)); fail=true; break; } // After a seg switch we are at the keyframe before or equal to where we want to go // if the dts do not match, it means we went back too much // When re-encoding, it's not a problem, it is when copying. ADM_info("seg:%d refDTS=%"PRIu64"\n",seg->_reference,seg->_refStartDts); ADM_info("seg:%d imgDTS=%"PRIu64"\n",seg->_reference,img.demuxerDts); if(!seg->_refStartDts && !seg->_reference) { ADM_info("Ignoring first seg (unreliable DTS)\n"); } else if(img.demuxerDts!=ADM_NO_PTS && seg->_refStartDts!=ADM_NO_PTS && img.demuxerDts!=seg->_refStartDts) { ADM_warning("Segment %d does not start on a known DTS (%s)\n",i,ADM_us2plain(img.demuxerPts)); ADM_warning("expected (%s)\n",ADM_us2plain(seg->_refStartDts)); fail=true; break; } ADM_info("Segment %d ok\n",i); } delete [] buffer; buffer=NULL; if(fail) return false; return true; }
/** \fn getPacket \brief */ uint8_t ADM_edAudioTrackFromVideo::getPacket(uint8_t *dest, uint32_t *len,uint32_t sizeMax, uint32_t *samples,uint64_t *odts) { zgain: _SEGMENT *seg=parent->_segments.getSegment(_audioSeg); ADM_audioStreamTrack *trk=getTrackAtVideoNumber(seg->_reference); if(!trk) return 0; // Read a packet bool r=trk->stream->getPacket(dest,len,sizeMax,samples,odts); if(r==false) { ADM_warning("AudioGetPacket failed, audioSegment=%d\n",(int)_audioSeg); // if it fails, we have to switch segment if(false==switchToNextAudioSegment()) { ADM_warning("..and this is the last segment\n"); return false; } goto zgain; } // Rescale odts if(*odts!=ADM_NO_PTS) { if(*odts<seg->_refStartTimeUs) { ADM_warning("Audio packet is too early %"PRIu64" ms, this segment starts at %"PRIu64"ms\n",*odts,seg->_refStartTimeUs); goto zgain; } #if 0 ADM_info("Audio DTS:%"PRIu64" ms, ref StartTime :%"PRIu64" Delta:%"PRIu64" duration :%"PRIu64"\n", *odts/1000,seg->_refStartTimeUs/1000,(*odts-seg->_refStartTimeUs)/1000,seg->_durationUs/1000); #endif *odts-=seg->_refStartTimeUs; if(*odts>seg->_durationUs) { if(switchToNextAudioSegment()==false) { ADM_warning("Audio:Switching to next segment failed\n"); return false; } goto zgain; } *odts+=seg->_startTimeUs; }else { *odts=ADM_NO_PTS; } //ADM_info("Time : %s\n",ADM_us2plain(*odts)); //advanceDtsBySample(*samples); return true; }
/** \fn getPacket */ uint8_t ADM_audioStreamDCA::getPacket(uint8_t *obuffer,uint32_t *osize, uint32_t sizeMax,uint32_t *nbSample,uint64_t *dts) { #define ADM_LOOK_AHEAD DTS_HEADER_SIZE // Need 10 bytes... uint8_t data[ADM_LOOK_AHEAD]; uint32_t offset; ADM_DCA_INFO info; while(1) { // Do we have sync ? if(needBytes(ADM_LOOK_AHEAD)==false) { ADM_warning("DCA: Not sync found in buffer\n"); return false; } // Peek peek(ADM_LOOK_AHEAD,data); // Search start seq if(buffer[start]!=0x7F || buffer[start+1]!=0xFE) { read8(); continue; } if(buffer[start+2]!=0x80 || buffer[start+3]!=0x1) { read8(); read8(); continue; } if(false== ADM_DCAGetInfo(buffer+start, limit-start,&info,&offset)) { read8(); read8(); read8(); read8(); continue; } ADM_assert(info.frameSizeInBytes<=sizeMax); if(needBytes(info.frameSizeInBytes)==false) { ADM_warning("DCA: Not enough data\n"); return false; } *osize=info.frameSizeInBytes; read(*osize,obuffer); *nbSample=info.samples; *dts=lastDts; advanceDtsBySample(*nbSample); return 1; } }
bool xiphExtraData2Adm(uint8_t *extraData, int extraLen,uint8_t **newExtra,int *newExtraLen) { *newExtra=NULL; *newExtraLen=0; uint8_t *oldata=extraData; int oldlen=extraLen; int len1,len2,len3; uint8_t *head; if(*oldata!=2) // 3 packets -1 = 2 { ADM_warning("[MKV] weird vorbis audio, expect problems\n"); return false; } // First packet length head=oldata+1; len1=xypheLacingRead(&head); len2=xypheLacingRead(&head); int consumed=head-oldata; len3=oldlen-consumed; // left in extradata if(len3<0) { ADM_warning("Error in vorbis header, len3 too small %d %d / %d\n",len1,len2,len3); return false; } len3-=(len1+len2); ADM_info("Found packets len : %d- %d- %d, total size %d\n",len1,len2,len3,oldlen); // Now build our own packet... // Allocate uint32 for alignment purpose uint32_t *buffer=new uint32_t[3+(4+len1+len2+len3)/4]; uint32_t nwlen=len1+len2+len3+sizeof(uint32_t)*3; // in bytes uint8_t *cp=(uint8_t *)(buffer+3); // data part memcpy(cp,head,len1); cp+=len1;head+=len1; memcpy(cp,head,len2); cp+=len2;head+=len2; memcpy(cp,head,len3); buffer[0]=len1; buffer[1]=len2; buffer[2]=len3; // Destroy old datas *newExtra=(uint8_t *)(buffer); *newExtraLen=nwlen; return true; }
/** * * @param avctx * @return */ static bool probeCuda() { ADM_info( "Probing cuda\n"); if(!loadCuda()) { ADM_warning("Cannot load cuda\n"); return false; } ADM_warning("Cuda loaded, probing..\n"); if(!cudaCall(init(0))) return false; int deviceCount=0; if(!cudaCall(getDeviceCount(&deviceCount))) return false; if (!deviceCount) { ADM_warning( "No Cuda device available\n"); return false; } ADM_info( "found %d CUDA devices \n", deviceCount); for (int i = 0; i < deviceCount; ++i) { CUdevice dev; char chipName[128]; int major,minor,ver; cudaAbortOnFail(getDevice(&dev,i)); cudaAbortOnFail(getDeviceName(chipName,sizeof(chipName),dev)); cudaAbortOnFail(getDeviceCapabilities(&major,&minor,dev)); ver = (major << 4) | minor; ADM_info("Found chip, GPU %s, SM %d.d",chipName,major,minor); if(ver>=0x30) { ADM_info(" this chip has nvenc"); if(!nvEncAvailable) { nvEncAvailable=true; selectedDevice=dev; } } } return nvEncAvailable; abortCudaProbe: return false; }
/** \fn GoToIntraTime_noDecoding \brief Go to an intra at time time (exact) but do not decode frames \return true on success, false on error */ bool ADM_Composer::GoToIntraTime_noDecoding(uint64_t time,uint32_t *toframe) { uint32_t s; uint64_t segTime; // Search the seg ..; if(false==_segments.convertLinearTimeToSeg(time,&s,&segTime)) { ADM_warning("GoToIntraTime failed!\n"); return false; } _SEGMENT *seg=_segments.getSegment(s); ADM_assert(seg); _VIDEOS *ref=_segments.getRefVideo(seg->_reference); ADM_assert(ref); // uint64_t refTime=seg->_refStartTimeUs+segTime; uint32_t frame=_segments.intraTimeToFrame(seg->_reference,refTime); if(s!=_currentSegment) { if(false==switchToSegment(s)) { ADM_warning("Cannot go to segment %"PRIu32"\n",s); return false; } } if(toframe) *toframe=frame; ref->lastSentFrame=frame; // For copy // Initialize _nextFrameDts, in fact next DTS uint64_t pts,dts; ref->_aviheader->getPtsDts(frame,&pts,&dts); if(dts==ADM_NO_PTS) { if(pts==ADM_NO_PTS) { ADM_warning("No PTS nor DTS, cannot set start DTS"); return false; // Fixme we can still guess DTS } // convert to linear time _segments.dtsFromPts(seg->_reference,pts,&dts); } time=(int64_t)dts; time-=seg->_refStartTimeUs; time+=seg->_startTimeUs; dts=time; _nextFrameDts=dts; seg->_dropBframes=1; return true; }
/** \fn displayImage */ bool vdpauRender::displayImage(ADMImage *pic) { // Blit pic into our video Surface VdpVideoSurface myInput=input; int next=currentSurface^1; int ipitches[3]; uint32_t pitches[3]; uint8_t *planes[3]; pic->GetPitches(ipitches); pic->GetReadPlanes(planes); for(int i=0;i<3;i++) pitches[i]=(uint32_t)ipitches[i]; // Put out stuff in input... // if input is already a VDPAU surface, no need to reupload it... if(pic->refType==ADM_HW_VDPAU) { // cookie is a render... struct vdpau_render_state *rndr = (struct vdpau_render_state *)pic->refDescriptor.refHwImage; myInput=rndr->surface; aprintf("VDPAU: This is already vdpau image, just passing along surface=%d\n",rndr->surface); }else { aprintf("VDPAU: This is NOT a vdpau image, converting\n"); //printf("Blitting surface\n"); if(VDP_STATUS_OK!=admVdpau::surfacePutBits( input, planes,pitches)) { ADM_warning("[Vdpau] video surface : Cannot putbits\n"); return false; } } // Call mixer... if(VDP_STATUS_OK!=admVdpau::mixerRender( mixer,myInput,surface[next], pic->_width,pic->_height)) { ADM_warning("[Vdpau] Cannot mixerRender\n"); return false; } // Display! if(VDP_STATUS_OK!=admVdpau::presentationQueueDisplay(queue,surface[next])) { ADM_warning("[Vdpau] Cannot display on presenation queue\n"); return false; } currentSurface=next; return true; }
/** \fn setAudioCodec */ bool ADM_Composer::setAudioCodec(int dex,const char *codec, CONFcouple *c) { if(!audioCodecSetByName(dex,codec)) { ADM_warning("Cannot set codec %s, track %d\n",codec,dex); return false; } if(!setAudioExtraConf(dex,c)) { ADM_warning("Cannot set configuration for codec %s, track %d\n",codec,dex); return false; } //#warning memleak on *c ? return true; }
/** \fn getFrame \brief Get a processed frame */ bool lumaOnlyFilter::getNextFrame(uint32_t *fn,ADMImage *image) { // since we do nothing, just get the output of previous filter if(false==previousFilter->getNextFrame(fn,image)) { ADM_warning("lumaOnlyFilter : Cannot get frame\n"); return false; } // do in place flip int w=info.width; int h=info.height; int pitches[3]; uint8_t *ptr[3]; image->GetPitches(pitches); image->GetWritePlanes((uint8_t **)ptr); w>>=1, h>>=1; for(int i=1;i<3;i++) { uint8_t *p=ptr[i]; uint32_t d=pitches[i]; for(int y=0;y<h;y++) { memset(p,128,w); p+=d; } } return true; }
/** \fn convertLinearTimeToSeg \brief convert linear time to a segment+ offset in the segment */ bool ADM_EditorSegment::convertLinearTimeToSeg( uint64_t frameTime, uint32_t *seg, uint64_t *segTime) { if(!frameTime && segments.size()) // pick the first one { ADM_info("Frame time=0, taking first segment \n"); *seg=0; *segTime=0; // ?? return true; } for(int i=0;i<segments.size();i++) { if(segments[i]._startTimeUs<=frameTime && segments[i]._startTimeUs+segments[i]._durationUs>frameTime) { *seg=i; *segTime=frameTime-segments[i]._startTimeUs; return true; } } int max=segments.size(); if(max) { _SEGMENT *last=&(segments[max-1]); if(frameTime==last->_startTimeUs+last->_durationUs) { ADM_info("End of last segment\n"); *seg=max-1; *segTime=frameTime-last->_startTimeUs; return true; } } ADM_warning("Cannot find segment matching time %"PRIu64"ms \n",frameTime/1000); dump(); return false; }
/** \fn getFrame \brief Get a processed frame */ bool unstackFieldFilter::getNextFrame(uint32_t *fn,ADMImage *image) { // since we do nothing, just get the output of previous filter if(false==previousFilter->getNextFrame(fn,current)) { ADM_warning("unstackField : Cannot get frame\n"); return false; } // do in place flip image->copyInfo(current); for(int i=PLANAR_Y;i<PLANAR_LAST;i++) { ADM_PLANE plane=(ADM_PLANE)i; uint32_t srcPitch=current->GetPitch(plane); uint32_t dstPitch=image->GetPitch(plane); uint8_t *src=current->GetReadPtr(plane); uint8_t *dst=image->GetWritePtr(plane); uint32_t w=info.width; uint32_t h=info.height; if(plane) { w>>=1; h>>=1; } // Even BitBlit(dst, dstPitch*2,src,srcPitch,w,h/2); BitBlit(dst+dstPitch, dstPitch*2,src+(srcPitch*h)/2,srcPitch,w,h/2); } return true; }
/** \fn updateRefVideo \brief Update start time */ bool ADM_EditorSegment::updateRefVideo(void) { int n=videos.size(); ADM_assert(n); _VIDEOS *ref=getRefVideo(n-1); vidHeader *demuxer=ref->_aviheader; uint64_t pts,dts; demuxer->getPtsDts(0,&pts,&dts); if(pts!=ADM_NO_PTS && pts >0) { ADM_warning("Updating firstFramePTS, The first frame has a PTS >0, adjusting to %"PRIu64" ms\n",pts/1000); ref->firstFramePts=pts; }else { ADM_info("First PTS is %s\n",ADM_us2plain(pts)); } updateStartTime(); // n=segments.size(); if(n) { _SEGMENT *seg=getSegment(n-1); uint64_t dur=ref->_aviheader->getVideoDuration(); printf("Current duration %"PRIu64" ms real one %"PRIu64" ms\n",dur/1000,seg->_durationUs/1000); } return true; }
/** \fn getFrame \brief Get a processed frame */ bool rotateGl::getNextFrame(uint32_t *fn,ADMImage *image) { // since we do nothing, just get the output of previous filter if(false==previousFilter->getNextFrame(fn,original)) { ADM_warning("glRotate : Cannot get frame\n"); return false; } widget->makeCurrent(); glPushMatrix(); // size is the last one... fboY->bind(); glProgramY->setUniformValue("myTextureU", 1); glProgramY->setUniformValue("myTextureV", 2); glProgramY->setUniformValue("myTextureY", 0); uploadAllPlanes(original); render(image,PLANAR_Y,fboY); downloadTextures(image,fboY); fboY->release(); firstRun=false; glPopMatrix(); widget->doneCurrent(); image->copyInfo(original); return true; }