예제 #1
0
파일: StAVPacket.cpp 프로젝트: gkv311/sview
void StAVPacket::setAVpkt(const AVPacket& theCopy) {
    // free old data
    free();
    if(theCopy.data == NULL) {
        return;
    }

    // copy values
    myIsOwn  = true;
    myPacket = theCopy;
#if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 0, 0))
    myPacket.buf = NULL;
#endif

    // now copy data with special padding space
    myPacket.data = stMemAllocAligned<uint8_t*>((theCopy.size + AV_INPUT_BUFFER_PADDING_SIZE), 16); // data must be aligned to 16 bytes for SSE!
    stMemCpy (myPacket.data, theCopy.data, theCopy.size);
    stMemZero(myPacket.data + (ptrdiff_t )theCopy.size, AV_INPUT_BUFFER_PADDING_SIZE);

#if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(52, 118, 0))
    if(myPacket.side_data_elems > 0) {
        size_t aSize = theCopy.side_data_elems * sizeof(*theCopy.side_data);
        // weird anonymouse structure...
        uint8_t** aPtr = (uint8_t** )&myPacket.side_data;
        *aPtr = stMemAllocZeroAligned<uint8_t*>(aSize, 16);
        for(int anIter = 0; anIter < theCopy.side_data_elems; ++anIter) {
            aSize = theCopy.side_data[anIter].size;
            myPacket.side_data[anIter] = theCopy.side_data[anIter];
            myPacket.side_data[anIter].data = stMemAllocAligned<uint8_t*>(aSize + AV_INPUT_BUFFER_PADDING_SIZE, 16);
            stMemCpy (myPacket.side_data[anIter].data, theCopy.side_data[anIter].data, aSize);
            stMemZero(myPacket.side_data[anIter].data + (ptrdiff_t )aSize, AV_INPUT_BUFFER_PADDING_SIZE);
        }
    }
#endif
}
예제 #2
0
void StGLTable::setupTable(const int theNbRows,
                           const int theNbColumns) {
    // destroy old content
    for(size_t aRowIter = 0; aRowIter < myTable.size(); ++aRowIter) {
        StArrayList<StGLTableItem*>& aRow = myTable.changeValue(aRowIter);
        for(size_t aColIter = 0; aColIter < aRow.size(); ++aColIter) {
            StGLTableItem* anItem = aRow.changeValue(aColIter);
            delete anItem;
        }
    }
    myTable.clear();

    // initialize new empty content
    for(int aRowIter = 0; aRowIter < theNbRows; ++aRowIter) {
        myTable.add(StArrayList<StGLTableItem*>());
        StArrayList<StGLTableItem*>& aRow = myTable.changeLast();
        aRow.initArray(theNbColumns);
        for(size_t aColIter = 0; aColIter < aRow.size(); ++aColIter) {
            aRow.changeValue(aColIter) = new StGLTableItem(this);
        }
    }
    myRowBottoms.initArray(theNbRows);
    myColRights .initArray(theNbColumns);
    stMemZero(&myRowBottoms.changeFirst(), sizeof(int) * myRowBottoms.size());
    stMemZero(&myColRights .changeFirst(), sizeof(int) * myColRights .size());
}
예제 #3
0
void StPCMBuffer::resize(const size_t theSizeMin,
                         const bool   theToReduce) {
    if(mySizeBytes >= theSizeMin
    && !theToReduce) {
        return; // do not reduce buffer
    }

    mySizeBytes = theSizeMin;
    stMemZero(myPlanes, sizeof(myPlanes));
    stMemFreeAligned(myBuffer);
    myBuffer = stMemAllocAligned<uint8_t*>(mySizeBytes, 16);
    stMemZero(myBuffer, mySizeBytes);
    setupChannels(myChMap, myPlanesNb);
}
예제 #4
0
StPCMBuffer::StPCMBuffer(const StPCMformat thePCMFormat)
: myBuffer(NULL),
  mySizeBytes(ST_MAX_AUDIO_FRAME_SIZE),
  myPlaneSize(0),
  myPlanesNb(1),
  mySampleSize(0),
  myPCMFormat(thePCMFormat),
  myPCMFreq(FREQ_44100),
  myChMap(StChannelMap::CH10, StChannelMap::PCM) {
    myBuffer = stMemAllocAligned<uint8_t*>(mySizeBytes, 16); // data must be aligned to 16 bytes for SSE!
    stMemZero(myBuffer, mySizeBytes);
    stMemZero(myPlanes, sizeof(myPlanes));
    myPlanes[0] = myBuffer; // single plane for interleaved data
    setFormat(thePCMFormat);
}
예제 #5
0
StVideoTimer::StVideoTimer(const StHandle<StVideoQueue>& theVideo,
                           const StHandle<StAudioQueue>& theAudio,
                           const double                  theDelayVVFixedMs)
: myVideo(theVideo),
  myAudio(theAudio),
  myToQuitEv(false),
  myTimer(false),
  myTimerThrCurr(theDelayVVFixedMs),
  myTimerThrNext(theDelayVVFixedMs),
  myAudioPtsCurrSec(-1.0),
  myVideoPtsCurrSec(myVideo->getPts()),
  myVideoPtsNextSec(-1.0),
  myDelayTimer(0.0),
  myDiffVA(0.0),
  myDelayVAFixed(0),
  myDelayVV(0.0),
  myDelayVVAver(theDelayVVFixedMs),
  myDelayVVFixed(theDelayVVFixedMs),
  mySpeedFastSkip(3.0),
  mySpeedFast(1.5),
  mySpeedFastRev(1.0 / mySpeedFast),
  mySpeedSlow(0.4),
  mySpeedSlowRev(1.0 / mySpeedSlow),
  myIsBenchmark(false) {
    stMemZero(mySpeedDesc, sizeof(mySpeedDesc));
    myThread = new StThread(refreshThread, (void* )this, "StVideoTimer");
}
예제 #6
0
StXDisplay::StXDisplay()
: hDisplay(NULL),
  hVisInfo(NULL),
  hInputMethod(None),
  hInputCtx(None),
  wndProtocols(None),
  wndDestroyAtom(None),
  xDNDEnter(None),
  xDNDPosition(None),
  xDNDStatus(None),
  xDNDTypeList(None),
  xDNDActionCopy(None),
  xDNDDrop(None),
  xDNDLeave(None),
  xDNDFinished(None),
  xDNDSelection(None),
  xDNDProxy(None),
  xDNDAware(None),
  xDNDUriList(None),
  xDNDPlainText(None),
  xDNDPrimary(None),
  XA_TARGETS(None),
  XA_COMPOUND_TEXT(None),
  XA_UTF8_STRING(None),
  XA_CLIPBOARD(None) {
    stMemZero(&FBCfg, sizeof(GLXFBConfig)); // should be just a pointer
    open();
}
예제 #7
0
void StGLFontEntry::release(StGLContext& theCtx) {
    for(size_t anIter = 0; anIter < myFbos.size(); ++anIter) {
        StHandle<StGLFrameBuffer>& aFbo = myFbos.changeValue(anIter);
        aFbo->release(theCtx);
        aFbo.nullify();
    }
    for(size_t anIter = 0; anIter < myTextures.size(); ++anIter) {
        StHandle<StGLTexture>& aTexture = myTextures.changeValue(anIter);
        aTexture->release(theCtx);
        aTexture.nullify();
    }
    myTextures.clear();
    myFbos.clear();

    myAscender    = 0.0f;
    myLineSpacing = 0.0f;
    myTileSizeX   = 0;
    myTileSizeY   = 0;
    stMemZero(&myLastTilePx, sizeof(myLastTilePx));
    myTiles.clear();
    for(size_t aStyleIt = 0; aStyleIt < StFTFont::StylesNB; ++aStyleIt) {
        myGlyphMaps[aStyleIt].clear();
    }
    myLastTileId = size_t(-1);
}
예제 #8
0
bool StGLFontEntry::createTexture(StGLContext& theCtx) {
    const GLint aMaxSize = theCtx.getMaxTextureSize();

    GLint aGlyphsNb = 0;
    if(myFont->hasCJK()
    || myFont->hasKorean()) {
        // italic does not make sense for Chinese
        // limit overall number of glyphs in the single texture to 4k
        // (single font file might contain about 20k-50k glyphs)
        aGlyphsNb = stMin(4000, 2 * myFont->getGlyphsNumber() - GLint(myLastTileId) + 1);
    } else {
        // western might contain reg/bold/italic/bolditalic styles
        // limit overall number of glyphs in the single texture to 1k
        // (single font file might contain about 6k glyphs for different languages)
        aGlyphsNb = stMin(1000, 4 * myFont->getGlyphsNumber() - GLint(myLastTileId) + 1);
    }

    const GLsizei aTextureSizeX = getPowerOfTwo(aGlyphsNb * myTileSizeX, aMaxSize);
    const size_t  aTilesPerRow  = aTextureSizeX / myTileSizeX;
    GLsizei aTextureSizeY = stMin(getEvenNumber(GLint((aGlyphsNb / aTilesPerRow) + 1) * myTileSizeY), aMaxSize);
    if(!theCtx.arbNPTW) {
        aTextureSizeY = getPowerOfTwo(aTextureSizeY, aMaxSize);
    }

    stMemZero(&myLastTilePx, sizeof(myLastTilePx));
    myLastTilePx.bottom() = myTileSizeY;

    myTextures.add(new StGLTexture(theCtx.arbTexRG ? GL_R8 : GL_ALPHA));
    myFbos.add(new StGLFrameBuffer());
    StHandle<StGLTexture>&     aTexture = myTextures[myTextures.size() - 1];
    StHandle<StGLFrameBuffer>& aFbo     = myFbos    [myTextures.size() - 1];
    if(!aTexture->initTrash(theCtx, aTextureSizeX, aTextureSizeY)) {
        return false;
    }
    aTexture->bind(theCtx);
    theCtx.core11fwd->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    theCtx.core11fwd->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    aTexture->unbind(theCtx);

    // destruction of temporary FBO produces broken texture on Catalyst drivers for unknown reason
    //StGLFrameBuffer::clearTexture(theCtx, aTexture);
#if !defined(GL_ES_VERSION_2_0)
    if(theCtx.arbTexClear) {
        theCtx.core11fwd->glPixelStorei(GL_UNPACK_LSB_FIRST,  GL_FALSE);
        theCtx.core11fwd->glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);
        theCtx.core11fwd->glPixelStorei(GL_UNPACK_ALIGNMENT,  1);
        const stUByte_t THE_BLACK = 0;
        theCtx.extAll->glClearTexImage(aTexture->getTextureId(), 0, theCtx.arbTexRG ? GL_RED : GL_ALPHA, GL_UNSIGNED_BYTE, &THE_BLACK);
    } else if(aFbo->init(theCtx, aTexture, false)) {
        aFbo->clearTexture(theCtx);
    } else {
        ST_ERROR_LOG("Fail to bind " + (theCtx.arbTexRG ? "GL_R8" : "GL_ALPHA8") + " texture to FBO!");
    }
#else
    (void )aFbo;
#endif

    return true;
}
예제 #9
0
void StJpegParser::reset() {
    // destroy all images
    myImages.nullify();
    myComment.clear();
    myStFormat = ST_V_SRC_AUTODETECT;
    myLength = 0;
    stMemZero(myOffsets, sizeof(myOffsets));
}
예제 #10
0
StJpegParser::StJpegParser(const StCString& theFilePath)
: StRawFile(theFilePath),
  myImages(NULL),
  myStFormat(StFormat_AUTO) {
    stMemZero(myOffsets, sizeof(myOffsets));
#if !defined(_MSC_VER)
    (void )markerString;
#endif
}
예제 #11
0
StGLFontEntry::StGLFontEntry(const StHandle<StFTFont>& theFont)
: myFont(theFont),
  myAscender(0.0f),
  myLineSpacing(0.0f),
  myTileSizeX(0),
  myTileSizeY(0),
  myLastTileId(size_t(-1)),
  myGlyphMap(NULL) {
    stMemZero(&myLastTilePx, sizeof(myLastTilePx));
    if(!myFont.isNull()) {
        myFont->setActiveStyle(StFTFont::Style_Regular);
    }
    myGlyphMap = &myGlyphMaps[StFTFont::Style_Regular];
}
예제 #12
0
파일: StAVPacket.cpp 프로젝트: gkv311/sview
void StAVPacket::avInitPacket() {
    stMemZero(&myPacket, sizeof(AVPacket));
    myPacket.pts = stAV::NOPTS_VALUE;
    myPacket.dts = stAV::NOPTS_VALUE;
    myPacket.pos = -1;
    /*myPacket.duration = 0;
    myPacket.convergence_duration = 0;
    myPacket.flags = 0;
    myPacket.stream_index = 0;
    myPacket.destruct = NULL; //av_destruct_packet_nofree;
    myPacket.data = NULL;
    myPacket.size = 0;
    myPacket.side_data = NULL;
    myPacket.side_data_elems = 0;*/
}
예제 #13
0
bool StWinHandles::registerClass(const StStringUtfWide& theName,
                                 WNDPROC                theProc) {
    HINSTANCE aModule = GetModuleHandleW(NULL);
    WNDCLASSW aClass; stMemZero(&aClass, sizeof(aClass));
    // redraw on resize, and request own DC for window
    aClass.style         = CS_HREDRAW | CS_VREDRAW | CS_OWNDC;
    aClass.lpfnWndProc   = theProc;
    aClass.cbClsExtra    = 0;
    aClass.cbWndExtra    = 0;
    aClass.hInstance     = aModule;
    aClass.hIcon         = LoadIconW(aModule, L"A");
    aClass.hCursor       = LoadCursor(NULL, IDC_ARROW);
    aClass.hbrBackground = NULL;
    aClass.lpszMenuName  = NULL;
    aClass.lpszClassName = theName.toCString();
    if(RegisterClassW(&aClass) == 0) {
        stError(StString("WinAPI: Failed to register window class '") + theName.toUtf8() + "'");
        return false;
    }
    return true;
}
예제 #14
0
void StGLRootWidget::stglScissorRect(const StRectI_t& theRect,
                                     StGLBoxPx&       theScissorRect) const {
    const GLint aVPortWidth  = myViewport[2];
    const GLint aVPortHeight = myViewport[3];
    const GLint aRootWidth   = getRectPx().width();
    const GLint aRootHeight  = getRectPx().height();
    if(aRootWidth <= 0 || aRootHeight <= 0) {
        // just prevent division by zero - should never happen
        stMemZero(&theScissorRect, sizeof(StGLBoxPx));
        return;
    }

    // viewport could have different size in case of rendering to FBO
    const GLdouble aWidthFactor  = GLdouble(aVPortWidth)  / GLdouble(aRootWidth);
    const GLdouble aHeightFactor = GLdouble(aVPortHeight) / GLdouble(aRootHeight);

    theScissorRect.x() = myViewport[0] + GLint(aWidthFactor  * GLdouble(theRect.left())) + myScrDispXPx;
    theScissorRect.y() = myViewport[1] + GLint(aHeightFactor * GLdouble(aRootHeight - theRect.bottom()));

    theScissorRect.width()  = GLint(aWidthFactor  * GLdouble(theRect.width()));
    theScissorRect.height() = GLint(aHeightFactor * GLdouble(theRect.height()));
}
예제 #15
0
void StKeysState::reset() {
    StMutexAuto aLock(myLock);
    stMemZero(myKeys, sizeof(myKeys));
}
예제 #16
0
void StKeysState::resetRegisteredKeys() {
    stMemZero(myRegKeys, sizeof(myRegKeys));
}
예제 #17
0
StKeysState::StKeysState() {
    stMemZero(myKeys,    sizeof(myKeys));
    stMemZero(myRegKeys, sizeof(myRegKeys));
}
예제 #18
0
파일: StAVImage.cpp 프로젝트: zodsoft/sview
bool StAVImage::load(const StString& theFilePath,
                     ImageType       theImageType,
                     uint8_t*        theDataPtr,
                     int             theDataSize) {

    // reset current data
    StImage::nullify();
    setState();
    close();
    myMetadata.clear();

    switch(theImageType) {
        case ST_TYPE_PNG:
        case ST_TYPE_PNS: {
            myCodec = avcodec_find_decoder_by_name("png");
            break;
        }
        case ST_TYPE_JPEG:
        case ST_TYPE_MPO:
        case ST_TYPE_JPS: {
            myCodec = avcodec_find_decoder_by_name("mjpeg");
            break;
        }
        case ST_TYPE_EXR: {
            myCodec = avcodec_find_decoder_by_name("exr");
            break;
        }
        case ST_TYPE_WEBP:
        case ST_TYPE_WEBPLL: {
            myCodec = avcodec_find_decoder_by_name("webp");
            break;
        }
        default: {
            break;
        }
    }

    if(theImageType == ST_TYPE_NONE
    || (theDataPtr == NULL && !StFileNode::isFileExists(theFilePath))) {
        // open image file and detect its type, its could be non local file!
    #if(LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(53, 2, 0))
        int avErrCode = avformat_open_input(&myFormatCtx, theFilePath.toCString(), myImageFormat, NULL);
    #else
        int avErrCode = av_open_input_file (&myFormatCtx, theFilePath.toCString(), myImageFormat, 0, NULL);
    #endif
        if(avErrCode != 0
        || myFormatCtx->nb_streams < 1
        || myFormatCtx->streams[0]->codec->codec_id == 0) {
            if(myFormatCtx != NULL) {
            #if(LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(53, 17, 0))
                avformat_close_input(&myFormatCtx);
            #else
                av_close_input_file(myFormatCtx);
                myFormatCtx = NULL;
            #endif
            }

        #if(LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(53, 2, 0))
            avErrCode = avformat_open_input(&myFormatCtx, theFilePath.toCString(), NULL, NULL);
        #else
            avErrCode = av_open_input_file(&myFormatCtx, theFilePath.toCString(), NULL, 0, NULL);
        #endif
        }

        if(avErrCode != 0
        || myFormatCtx->nb_streams < 1) {
            setState(StString("AVFormat library, couldn't open image file. Error: ") + stAV::getAVErrorDescription(avErrCode));
            close();
            return false;
        }

        // find the decoder for the video stream
        myCodecCtx = myFormatCtx->streams[0]->codec;
        if(theImageType == ST_TYPE_NONE) {
            myCodec = avcodec_find_decoder(myCodecCtx->codec_id);
        }
    }

    if(myCodec == NULL) {
        setState("AVCodec library, video codec not found");
        close();
        return false;
    } else if(myFormatCtx == NULL) {
        // use given image type to load decoder
    #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0))
        myCodecCtx = avcodec_alloc_context3(myCodec);
    #else
        myCodecCtx = avcodec_alloc_context();
    #endif
    }

    // stupid check
    if(myCodecCtx == NULL) {
        setState("AVCodec library, codec context is NULL");
        close();
        return false;
    }

    // open VIDEO codec
#if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0))
    if(avcodec_open2(myCodecCtx, myCodec, NULL) < 0) {
#else
    if(avcodec_open(myCodecCtx, myCodec) < 0) {
#endif
        setState("AVCodec library, could not open video codec");
        close();
        return false;
    }

    // read one packet or file
    StRawFile aRawFile(theFilePath);
    StAVPacket anAvPkt;
    if(theDataPtr != NULL && theDataSize != 0) {
        anAvPkt.getAVpkt()->data = theDataPtr;
        anAvPkt.getAVpkt()->size = theDataSize;
    } else {
        if(myFormatCtx != NULL) {
            if(av_read_frame(myFormatCtx, anAvPkt.getAVpkt()) < 0) {
                setState("AVFormat library, could not read first packet");
                close();
                return false;
            }
        } else {
            if(!aRawFile.readFile()) {
                setState("StAVImage, could not read the file");
                close();
                return false;
            }
            anAvPkt.getAVpkt()->data = (uint8_t* )aRawFile.getBuffer();
            anAvPkt.getAVpkt()->size = (int )aRawFile.getSize();
        }
    }
    anAvPkt.setKeyFrame();

    // decode one frame
    int isFrameFinished = 0;
#if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(52, 23, 0))
    avcodec_decode_video2(myCodecCtx, myFrame.Frame, &isFrameFinished, anAvPkt.getAVpkt());
#else
    avcodec_decode_video(myCodecCtx, myFrame.Frame, &isFrameFinished,
                         theDataPtr, theDataSize);
#endif

    if(isFrameFinished == 0) {
        // thats not an image!!! try to decode more packets???
        setState("AVCodec library, input file is not an Image!");
        close();
        return false;
    }

    // check frame size
    if(myCodecCtx->width <= 0 || myCodecCtx->height <= 0) {
        setState("AVCodec library, codec returns wrong frame size");
        close();
        return false;
    }

    // read aspect ratio
    if(myCodecCtx->sample_aspect_ratio.num == 0
    || myCodecCtx->sample_aspect_ratio.den == 0) {
        setPixelRatio(1.0f);
    } else {
        const GLfloat aRatio = GLfloat(myCodecCtx->sample_aspect_ratio.num) / GLfloat(myCodecCtx->sample_aspect_ratio.den);
        if(aRatio > 70.0f) {
            ST_DEBUG_LOG("AVCodec library, igning wrong PAR " + myCodecCtx->sample_aspect_ratio.num + ":" + myCodecCtx->sample_aspect_ratio.den);
            setPixelRatio(1.0f);
        } else {
            setPixelRatio(aRatio);
        }
    }

#ifdef ST_AV_NEWSTEREO
    // currently it is unlikelly... but maybe in future?
    AVFrameSideData* aSideData = av_frame_get_side_data(myFrame.Frame, AV_FRAME_DATA_STEREO3D);
    if(aSideData != NULL) {
        AVStereo3D* aStereo = (AVStereo3D* )aSideData->data;
        mySrcFormat = stAV::stereo3dAvToSt(aStereo->type);
        if(aStereo->flags & AV_STEREO3D_FLAG_INVERT) {
            mySrcFormat = st::formatReversed(mySrcFormat);
        }
    } else {
        mySrcFormat = StFormat_AUTO;
    }
#endif

    // it is unlikely that there would be any metadata from format...
    // but lets try
    if(myFormatCtx != NULL) {
        for(stAV::meta::Tag* aTag = stAV::meta::findTag(myFormatCtx->metadata, "", NULL, stAV::meta::SEARCH_IGNORE_SUFFIX);
            aTag != NULL;
            aTag = stAV::meta::findTag(myFormatCtx->metadata, "", aTag, stAV::meta::SEARCH_IGNORE_SUFFIX)) {
            myMetadata.add(StDictEntry(aTag->key, aTag->value));
        }
        for(stAV::meta::Tag* aTag = stAV::meta::findTag(myFormatCtx->streams[0]->metadata, "", NULL, stAV::meta::SEARCH_IGNORE_SUFFIX);
            aTag != NULL;
            aTag = stAV::meta::findTag(myFormatCtx->streams[0]->metadata, "", aTag, stAV::meta::SEARCH_IGNORE_SUFFIX)) {
            myMetadata.add(StDictEntry(aTag->key, aTag->value));
        }
    }

    // collect metadata from the frame
    stAV::meta::Dict* aFrameMetadata = stAV::meta::getFrameMetadata(myFrame.Frame);
    for(stAV::meta::Tag* aTag = stAV::meta::findTag(aFrameMetadata, "", NULL, stAV::meta::SEARCH_IGNORE_SUFFIX);
        aTag != NULL;
        aTag = stAV::meta::findTag(aFrameMetadata, "", aTag, stAV::meta::SEARCH_IGNORE_SUFFIX)) {
        myMetadata.add(StDictEntry(aTag->key, aTag->value));
    }

    stAV::dimYUV aDimsYUV;
    if(myCodecCtx->pix_fmt == stAV::PIX_FMT::RGB24) {
        setColorModel(StImage::ImgColor_RGB);
        changePlane(0).initWrapper(StImagePlane::ImgRGB, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::BGR24) {
        setColorModel(StImage::ImgColor_RGB);
        changePlane(0).initWrapper(StImagePlane::ImgBGR, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::RGBA32) {
        setColorModel(StImage::ImgColor_RGBA);
        changePlane(0).initWrapper(StImagePlane::ImgRGBA, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::BGRA32) {
        setColorModel(StImage::ImgColor_RGBA);
        changePlane(0).initWrapper(StImagePlane::ImgBGRA, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::GRAY8) {
        setColorModel(StImage::ImgColor_GRAY);
        changePlane(0).initWrapper(StImagePlane::ImgGray, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::GRAY16) {
        setColorModel(StImage::ImgColor_GRAY);
        changePlane(0).initWrapper(StImagePlane::ImgGray16, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::RGB48) {
        setColorModel(StImage::ImgColor_RGB);
        changePlane(0).initWrapper(StImagePlane::ImgRGB48, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::RGBA64) {
        setColorModel(StImage::ImgColor_RGBA);
        changePlane(0).initWrapper(StImagePlane::ImgRGBA64, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(stAV::isFormatYUVPlanar(myCodecCtx, aDimsYUV)) {
    #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(52, 29, 0))
        if(myCodecCtx->color_range == AVCOL_RANGE_JPEG) {
            aDimsYUV.isFullScale = true;
        }
    #endif
        setColorModel(StImage::ImgColor_YUV);
        setColorScale(aDimsYUV.isFullScale ? StImage::ImgScale_Full : StImage::ImgScale_Mpeg);
        StImagePlane::ImgFormat aPlaneFrmt = StImagePlane::ImgGray;
        if(aDimsYUV.bitsPerComp == 9) {
            aPlaneFrmt = StImagePlane::ImgGray16;
            setColorScale(aDimsYUV.isFullScale ? StImage::ImgScale_Jpeg9  : StImage::ImgScale_Mpeg9);
        } else if(aDimsYUV.bitsPerComp == 10) {
            aPlaneFrmt = StImagePlane::ImgGray16;
            setColorScale(aDimsYUV.isFullScale ? StImage::ImgScale_Jpeg10 : StImage::ImgScale_Mpeg10);
        } else if(aDimsYUV.bitsPerComp == 16) {
            aPlaneFrmt = StImagePlane::ImgGray16;
        }

        changePlane(0).initWrapper(aPlaneFrmt, myFrame.getPlane(0),
                                   size_t(aDimsYUV.widthY), size_t(aDimsYUV.heightY), myFrame.getLineSize(0));
        changePlane(1).initWrapper(aPlaneFrmt, myFrame.getPlane(1),
                                   size_t(aDimsYUV.widthU), size_t(aDimsYUV.heightU), myFrame.getLineSize(1));
        changePlane(2).initWrapper(aPlaneFrmt, myFrame.getPlane(2),
                                   size_t(aDimsYUV.widthV), size_t(aDimsYUV.heightV), myFrame.getLineSize(2));
    } else {
        ///ST_DEBUG_LOG("StAVImage, perform conversion from Pixel format '" + avcodec_get_pix_fmt_name(myCodecCtx->pix_fmt) + "' to RGB");
        // initialize software scaler/converter
        SwsContext* pToRgbCtx = sws_getContext(myCodecCtx->width, myCodecCtx->height, myCodecCtx->pix_fmt,    // source
                                               myCodecCtx->width, myCodecCtx->height, stAV::PIX_FMT::RGB24, // destination
                                               SWS_BICUBIC, NULL, NULL, NULL);
        if(pToRgbCtx == NULL) {
            setState("SWScale library, failed to create SWScaler context");
            close();
            return false;
        }

        // initialize additional buffer for converted RGB data
        setColorModel(StImage::ImgColor_RGB);
        changePlane(0).initTrash(StImagePlane::ImgRGB,
                                 myCodecCtx->width, myCodecCtx->height);

        uint8_t* rgbData[4]; stMemZero(rgbData,     sizeof(rgbData));
        int  rgbLinesize[4]; stMemZero(rgbLinesize, sizeof(rgbLinesize));
        rgbData[0]     = changePlane(0).changeData();
        rgbLinesize[0] = (int )changePlane(0).getSizeRowBytes();

        sws_scale(pToRgbCtx,
                  myFrame.Frame->data, myFrame.Frame->linesize,
                  0, myCodecCtx->height,
                  rgbData, rgbLinesize);
        // reset original data
        closeAvCtx();

        sws_freeContext(pToRgbCtx);
    }

    // set debug information
    StString aDummy, aFileName;
    StFileNode::getFolderAndFile(theFilePath, aDummy, aFileName);
    setState(StString("AVCodec library, loaded image '") + aFileName + "' " + getDescription());

    // we should not close the file because decoded image data is in codec context cache
    return true;
}

bool StAVImage::save(const StString& theFilePath,
                     ImageType       theImageType,
                     StFormat        theSrcFormat) {
    close();
    setState();
    if(isNull()) {
        return false;
    }

    PixelFormat aPFormatAV = (PixelFormat )getAVPixelFormat(*this);
    StImage anImage;
    switch(theImageType) {
        case ST_TYPE_PNG:
        case ST_TYPE_PNS: {
            myCodec = avcodec_find_encoder_by_name("png");
            if(myCodec == NULL) {
                setState("AVCodec library, video codec 'png' not found");
                close();
                return false;
            }
            if(aPFormatAV == stAV::PIX_FMT::RGB24
            || aPFormatAV == stAV::PIX_FMT::RGBA32
            || aPFormatAV == stAV::PIX_FMT::GRAY8) {
                anImage.initWrapper(*this);
            } else {
                // convert to compatible pixel format
                anImage.changePlane().initTrash(StImagePlane::ImgRGB, getSizeX(), getSizeY(), getAligned(getSizeX() * 3));
                PixelFormat aPFrmtTarget = stAV::PIX_FMT::RGB24;
                if(!convert(*this,   aPFormatAV,
                            anImage, aPFrmtTarget)) {
                    setState("SWScale library, failed to create SWScaler context");
                    close();
                    return false;
                }
                aPFormatAV = aPFrmtTarget;
            }
        #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0))
            myCodecCtx = avcodec_alloc_context3(myCodec);
        #else
            myCodecCtx = avcodec_alloc_context();
        #endif

            // setup encoder
            myCodecCtx->pix_fmt = aPFormatAV;
            myCodecCtx->width   = (int )anImage.getSizeX();
            myCodecCtx->height  = (int )anImage.getSizeY();
            myCodecCtx->compression_level = 9; // 0..9
            break;
        }
        case ST_TYPE_JPEG:
        case ST_TYPE_MPO:
        case ST_TYPE_JPS: {
            myCodec = avcodec_find_encoder_by_name("mjpeg");
            if(myCodec == NULL) {
                setState("AVCodec library, video codec 'mjpeg' not found");
                close();
                return false;
            }

            if(aPFormatAV == stAV::PIX_FMT::YUVJ420P
            || aPFormatAV == stAV::PIX_FMT::YUVJ422P
            //|| aPFormatAV == stAV::PIX_FMT::YUVJ444P not supported by FFmpeg... yet?
            //|| aPFormatAV == stAV::PIX_FMT::YUVJ440P
               ) {
                anImage.initWrapper(*this);
            } else {
                // convert to compatible pixel format
                PixelFormat aPFrmtTarget = aPFormatAV == stAV::PIX_FMT::YUV420P ? stAV::PIX_FMT::YUVJ420P : stAV::PIX_FMT::YUVJ422P;
                anImage.setColorModel(StImage::ImgColor_YUV);
                anImage.setColorScale(StImage::ImgScale_Mpeg);
                anImage.changePlane(0).initTrash(StImagePlane::ImgGray, getSizeX(), getSizeY(), getAligned(getSizeX()));
                stMemSet(anImage.changePlane(0).changeData(), '\0', anImage.getPlane(0).getSizeBytes());
                anImage.changePlane(1).initTrash(StImagePlane::ImgGray, getSizeX(), getSizeY(), getAligned(getSizeX()));
                stMemSet(anImage.changePlane(1).changeData(), '\0', anImage.getPlane(1).getSizeBytes());
                anImage.changePlane(2).initTrash(StImagePlane::ImgGray, getSizeX(), getSizeY(), getAligned(getSizeX()));
                stMemSet(anImage.changePlane(2).changeData(), '\0', anImage.getPlane(2).getSizeBytes());
                if(!convert(*this,   aPFormatAV,
                            anImage, aPFrmtTarget)) {
                    setState("SWScale library, failed to create SWScaler context");
                    close();
                    return false;
                }
                aPFormatAV = aPFrmtTarget;
            }

        #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0))
            myCodecCtx = avcodec_alloc_context3(myCodec);
        #else
            myCodecCtx = avcodec_alloc_context();
        #endif
            myCodecCtx->pix_fmt = aPFormatAV;
            myCodecCtx->width   = (int )anImage.getSizeX();
            myCodecCtx->height  = (int )anImage.getSizeY();
            myCodecCtx->time_base.num = 1;
            myCodecCtx->time_base.den = 1;
            myCodecCtx->qmin = myCodecCtx->qmax = 5; // quality factor - lesser is better
            break;
        }
        case ST_TYPE_NONE:
        default:
            close();
            return false;
    }

    // open VIDEO codec
#if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0))
    if(avcodec_open2(myCodecCtx, myCodec, NULL) < 0) {
#else
    if(avcodec_open(myCodecCtx, myCodec) < 0) {
#endif
        setState("AVCodec library, could not open video codec");
        close();
        return false;
    }

    // wrap own data into AVFrame
    myFrame.Frame->format = myCodecCtx->pix_fmt;
    myFrame.Frame->width  = myCodecCtx->width;
    myFrame.Frame->height = myCodecCtx->height;
    fillPointersAV(anImage, myFrame.Frame->data, myFrame.Frame->linesize);

#ifdef ST_AV_NEWSTEREO
    bool isReversed = false;
    AVStereo3DType anAvStereoType = stAV::stereo3dStToAv(theSrcFormat, isReversed);
    if(anAvStereoType != (AVStereo3DType )-1) {
        AVStereo3D* aStereo = av_stereo3d_create_side_data(myFrame.Frame);
        if(aStereo != NULL) {
            aStereo->type = anAvStereoType;
            if(isReversed) {
                aStereo->flags |= AV_STEREO3D_FLAG_INVERT;
            }
        }
    }
#endif

    StJpegParser aRawFile(theFilePath);
    if(!aRawFile.openFile(StRawFile::WRITE)) {
        setState("Can not open the file for writing");
        close();
        return false;
    }

    // allocate the buffer, large enough (stupid formula copied from ffmpeg.c)
    int aBuffSize = int(getSizeX() * getSizeY() * 10);
    aRawFile.initBuffer(aBuffSize);

    // encode the image
    StAVPacket aPacket;
    aPacket.getAVpkt()->data = (uint8_t* )aRawFile.changeBuffer();
    aPacket.getAVpkt()->size = aBuffSize;
#if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(54, 2, 100))
    int isGotPacket = 0;
    int anEncSize   = avcodec_encode_video2(myCodecCtx, aPacket.getAVpkt(), myFrame.Frame, &isGotPacket);
    if(anEncSize == 0 && isGotPacket != 0) {
        anEncSize = aPacket.getSize();
    }
#else
    int anEncSize = avcodec_encode_video(myCodecCtx, aPacket.changeData(), aPacket.getSize(), myFrame.Frame);
#endif
    if(anEncSize <= 0) {
        setState("AVCodec library, fail to encode the image");
        close();
        return false;
    }
    aRawFile.setDataSize((size_t )anEncSize);

    // save metadata when possible
    if(theImageType == ST_TYPE_JPEG
    || theImageType == ST_TYPE_JPS) {
        if(aRawFile.parse()) {
            if(theSrcFormat != StFormat_AUTO) {
                aRawFile.setupJps(theSrcFormat);
            }
        } else {
            ST_ERROR_LOG("AVCodec library, created JPEG can not be parsed!");
        }
    }

    // store current content
    aRawFile.writeFile();
    // and finally close the file handle
    aRawFile.closeFile();

    close();

    // set debug information
    StString aDummy, aFileName;
    StFileNode::getFolderAndFile(theFilePath, aDummy, aFileName);
    setState(StString("AVCodec library, saved image '") + aFileName + "' " + getDescription());

    return true;
}
예제 #19
0
StJpegParser::StJpegParser(const StCString& theFilePath)
: StRawFile(theFilePath),
  myImages(NULL),
  myStFormat(ST_V_SRC_AUTODETECT) {
    stMemZero(myOffsets, sizeof(myOffsets));
}
예제 #20
0
void StPCMBuffer::clear() {
    myPlaneSize = 0;
    stMemZero(myBuffer, mySizeBytes);
}