bool process(bool ending) { if (!getStatus()) return false; //Try getting a bitmap for encoding GBitmap* bitmap = NULL; if (mFrameBitmapList.tryPopFront(bitmap)) { encodeFrame(bitmap, false); } //Delete previous bitmap if (!ending && bitmap) { if (mLastFrame) pushProcessedBitmap(mLastFrame); mLastFrame = bitmap; } //If we're stopping encoding, but didn't have a frame, re-encode the last frame if (ending && !bitmap && mLastFrame) { encodeFrame(mLastFrame, true); pushProcessedBitmap(mLastFrame); mLastFrame = NULL; } // We'll live while we have a last frame return (mLastFrame != NULL); }
bool VideoEncoderX264or5::doProcessFrame(Frame *org, Frame *dst) { if (!(org && dst)) { utils::errorMsg("Error encoding video frame: org or dst are NULL"); return false; } VideoFrame* rawFrame = dynamic_cast<VideoFrame*> (org); VideoFrame* codedFrame = dynamic_cast<VideoFrame*> (dst); if (!rawFrame || !codedFrame) { utils::errorMsg("Error encoding video frame: org and dst MUST be VideoFrame"); return false; } if (!reconfigure(rawFrame, codedFrame)) { utils::errorMsg("Error encoding video frame: reconfigure failed"); return false; } if (!fill_x264or5_picture(rawFrame)){ utils::errorMsg("Could not fill x264_picture_t from frame"); return false; } if (!encodeFrame(codedFrame)) { utils::errorMsg("Could not encode video frame"); return false; } codedFrame->setSize(rawFrame->getWidth(), rawFrame->getHeight()); return true; }
main() { int fd; unsigned char* dataptr; huffmanTable stdTable = { { 0 , 1 , 1 , 1 , 1 , 1 , 1 , 0 , 3 , 0 }, { 1 , 2 , 0 , 3 , 4 , 5 , 6 , 7 , 8 }, }; int i; unsigned long long before , after; jEncoder enc = initEncoder(512,512); fd = open("prelude.pm",O_RDWR); read(fd,frame,262144); setupTables(enc,&stdTable); dmGetUST(&before); dataptr = (char*)getBitsPtr(enc); printf("%02x %02x %d\n",dataptr[55] , dataptr[56] , i); for(i = 0; i < 1000; i++) { encodeFrame(enc,(char*)frame); /* printf("%02x %02x %d\n",dataptr[55] , dataptr[56] , i);*/ } dmGetUST(&after); printf("%lld\n",(after - before)/i); printf("compressed size: %d\n",getCompressedSize(enc)); fd = open("jout",O_WRONLY|O_CREAT,0666); write(fd,getBitsPtr(enc),getCompressedSize(enc)); close(fd); }
void VideoOutput::append(const shared_ptr<PixelTransferBuffer>& frame) { debugAssert(frame->width() == m_settings.width); debugAssert(frame->height() == m_settings.height); encodeFrame(static_cast<const uint8*>(frame->mapRead()), frame->format()); frame->unmap(); }
int main() { static char buf[1024*1024]; for(int i=4;i<=14;i++){ size_t sz = sizeof(buf); char path[30]; snprintf(path, sizeof(path), "img%02d.jpg",i ); bool ret = readFile( path, buf, &sz ); assert(ret); unsigned char raw[3*SCREEN_WIDTH*SCREEN_HEIGHT]; double st = now(); decodeFrame((unsigned char*)buf,sz, raw ); double et = now(); print("dec[%d]:%f", i, (et-st) ); st = now(); encodeFrame(raw,i); et = now(); print("enc[%d]:%f", i, (et-st)); } return 0; }
void VideoOutput::append(const shared_ptr<Texture>& frame, bool invertY) { debugAssert(frame->width() == m_settings.width); debugAssert(frame->height() == m_settings.height); shared_ptr<PixelTransferBuffer> buffer = frame->toPixelTransferBuffer(TextureFormat::RGB8()); encodeFrame(static_cast<const uint8*>(buffer->mapRead()), ImageFormat::RGB8(), invertY); buffer->unmap(); }
int encodeFlagBuf(uint8 *pFlagBuf,int FlagBufLen) { Prn_Frame_t frame; frame.pFlagBuf=pFlagBuf; frame.FlagBufLen=FlagBufLen; frame.i8DataId=global_context.i8DataId; return encodeFrame(&frame); }
int p25_print_text(HANDLE hFile,BYTE *ucTextBuf,DWORD iDataLen) { int iIndex; static BYTE bDataID = 0; BYTE ucSendBuf[SNDLEN]; BYTE ucRecvBuf[1000]; long iALen; Prn_Frame_t frame; char eResponse[8]; Com_Port_Reset(hFile); iIndex = 0; if(bDataID==10) bDataID=0; memset(ucSendBuf,0,SNDLEN); frame.CmdBufLen=iDataLen; frame.i8DataId=bDataID+0x30;bDataID++; frame.i8TypeOfFrame=0x44; frame.pCmdBuf=ucTextBuf; iDataLen=encodeFrame(ucSendBuf,&frame); Com_Port_Send(hFile,ucSendBuf,iDataLen); memset(ucRecvBuf, 0 ,6); iALen=Com_Port_Recv(hFile,ucRecvBuf, 6, 2000); if(iALen>0&&((memcmp(ucRecvBuf, resp_data_recved, 4) == 0)||(memcmp(ucRecvBuf, resp_data_recved+1, 3) == 0))) { } else { return -5; } memset(ucRecvBuf, 0 ,8); memcpy(eResponse,resp_print_finished,5); eResponse[3] = ucSendBuf[2]; iALen=Com_Port_Recv(hFile, ucRecvBuf, 7, 10000); if(iALen<=0||((memcmp(ucRecvBuf, eResponse, 5) != 0)&&(memcmp(ucRecvBuf, eResponse+1, 4)!=0))) { return -6; } return 0; }
void VideoOutput::append(RenderDevice* rd, bool backbuffer) { debugAssert(rd->width() == m_settings.width); debugAssert(rd->height() == m_settings.height); RenderDevice::ReadBuffer old = rd->readBuffer(); if (backbuffer) { rd->setReadBuffer(RenderDevice::READ_BACK); } else { rd->setReadBuffer(RenderDevice::READ_FRONT); } debugAssertGLOk(); // TODO: Optimize using GLPixelTransferBuffer and glReadPixels instead of screenshotPic shared_ptr<Image> image = rd->screenshotPic(false, false); rd->setReadBuffer(old); shared_ptr<CPUPixelTransferBuffer> imageBuffer = image->toPixelTransferBuffer(); encodeFrame(static_cast<const uint8*>(imageBuffer->buffer()), imageBuffer->format(), true); }