Beispiel #1
0
StADLsdk::StADLsdk()
    : myLib(),
      myAdaptersInfoList(NULL),
      myNumAdapters(0) {
    //
    stMemSet(&myFunctions, 0, sizeof(ADLsdkFunctions));
}
Beispiel #2
0
StGLWidget::StGLWidget(StGLWidget* theParent,
                       const int theLeft, const int theTop,
                       const StGLCorner theCorner,
                       const int theWidth, const int theHeight)
: myRoot(theParent != NULL ? theParent->myRoot : NULL),
  myParent(theParent),
  myChildren(),
  myPrev(NULL),
  myNext(NULL),
  userData(0),
  rectPx(theTop, theTop + theHeight, theLeft, theLeft + theWidth),
  myCorner(theCorner),
  opacityValue(0.0),
  opacityOnMs(2500.0),
  opacityOffMs(5000.0),
  opacityOnTimer(false),
  opacityOffTimer(true),
  isResized(true),
  myHasFocus(false),
  myIsTopWidget(false) {
    if(myParent != NULL) {
        myParent->getChildren()->add(this);
    }
    stMemSet(mouseClicked, 0, sizeof(mouseClicked));
}
Beispiel #3
0
StDXManager::StDXManager()
: myD3dLib(NULL),
  myD3dDevice(NULL),
  myRefreshRate(D3DPRESENT_RATE_DEFAULT),
  myWithAqbs(false) {
    stMemSet(&myD3dParams, 0, sizeof(myD3dParams));
    stMemSet(&myCurrMode,  0, sizeof(myCurrMode));
    myD3dParams.Windowed         = FALSE;
    myD3dParams.SwapEffect       = D3DSWAPEFFECT_DISCARD; // discards the previous frames
    myD3dParams.BackBufferFormat = D3DFMT_R5G6B5;         // display format
    myD3dParams.BackBufferCount  = 1;                     // number of back buffers
    myD3dParams.BackBufferHeight = 2;
    myD3dParams.BackBufferWidth  = 2;
    myD3dParams.AutoDepthStencilFormat     = D3DFMT_D16_LOCKABLE;
    myD3dParams.EnableAutoDepthStencil     = FALSE; // no need for our purposes!
    myD3dParams.FullScreen_RefreshRateInHz = D3DPRESENT_RATE_DEFAULT;
    myD3dParams.PresentationInterval       = D3DPRESENT_INTERVAL_DEFAULT;
}
Beispiel #4
0
void StADLsdk::close() {
    if(myFunctions.ADL_Main_Control_Destroy != NULL) {
        myFunctions.ADL_Main_Control_Destroy();
    }
    ADL_Main_Memory_Free(myAdaptersInfoList);
    myAdaptersInfoList = NULL;
    myLib.close();
    myNumAdapters = 0;
    stMemSet(&myFunctions, 0, sizeof(ADLsdkFunctions));
}
Beispiel #5
0
bool StImagePlane::initZero(StImagePlane::ImgFormat thePixelFormat,
                            const size_t theSizeX,
                            const size_t theSizeY,
                            const size_t theSizeRowBytes,
                            const int theValue) {
    if(!initTrash(thePixelFormat, theSizeX, theSizeY, theSizeRowBytes)) {
        return false;
    }
    stMemSet(myDataPtr, theValue, getSizeBytes());
    return true;
}
Beispiel #6
0
// we move this function to another object file
// to optimize static linkage (avoid unnecessary dependencies)
StArrayList<StString> StProcess::getArguments() {
    StArrayList<StString> aList;
#if (defined(_WIN32) || defined(__WIN32__))
    int argc = 0;
    stUtfWide_t** argv = CommandLineToArgvW(GetCommandLineW(), &argc);
    for(int aParamId = 0; aParamId < argc; ++aParamId) {
        aList.add(StString(argv[aParamId]));
    }
    // free memory allocated for CommandLineToArgvW arguments.
    LocalFree(argv);
#elif (defined(__APPLE__))
    if(_NSGetArgc() == NULL || _NSGetArgv() == NULL) {
        return aList; // is it possible?
    }
    int anArgsNb = *_NSGetArgc();
    char** anArgVec = *_NSGetArgv();
    for(int aParamId = 0; aParamId < anArgsNb; ++aParamId) {
        // automatically convert filenames from decomposed form used by Mac OS X file systems
        aList.add(stFromUtf8Mac(anArgVec[aParamId]));
    }
#elif (defined(__linux__) || defined(__linux))
    stUtf8_t aCmdlineInfoFile[4096];
    sprintf(aCmdlineInfoFile, "/proc/%d/cmdline", getpid());
    std::ifstream iFile;
    iFile.open(aCmdlineInfoFile);
    if(iFile.is_open()) {
        char aCmdlineInfoBuff[4096];
        while(!iFile.eof()) {
            stMemSet(aCmdlineInfoBuff, 0, sizeof(aCmdlineInfoBuff));
            iFile.getline(aCmdlineInfoBuff, 4096, '\0');
            if(aCmdlineInfoBuff[0] != '\0') {
                aList.add(StString(aCmdlineInfoBuff));
            }
        }
        iFile.close();
    }
#endif
    return aList;
}
Beispiel #7
0
IDirect3DDevice9* StDXManager::createAqbsDevice(const UINT                   theAdapterId,
                                                const HWND                   theWinHandle,
                                                const D3DPRESENT_PARAMETERS& theD3dParams) {
    // first create a temporary windowed device
    IDirect3DDevice9* aD3dDevTmp = StDXManager::createAqbsTmpDevice(theAdapterId, theWinHandle, theD3dParams);
    if(aD3dDevTmp == NULL) {
        return NULL;
    }

    // create a surface to be used to communicate with the driver
    StHandle<StDXAqbsControl> anAqbsControl = new StDXAqbsControl(aD3dDevTmp);
    if(!anAqbsControl->isValid()) {
        ST_DEBUG_LOG("StDXManager::createAqbsDevice(), fail to create AQBS sufrace");
        aD3dDevTmp->Release();
        aD3dDevTmp = NULL;
        return NULL;
    }

    // send the command to the driver using the temporary surface
    if(!anAqbsControl->enableStereo()) {
        ST_DEBUG_LOG("StDXManager::createAqbsDevice(), fail to enable stereo via AQBS sufrace");
        anAqbsControl.nullify();
        aD3dDevTmp->Release();
        aD3dDevTmp = NULL;
        return NULL;
    }
    myWithAqbs = true;

    // see what stereo modes are available
    ATIDX9GETDISPLAYMODES aDispModeParams;
    stMemSet(&aDispModeParams, 0, sizeof(ATIDX9GETDISPLAYMODES));

    // send stereo command to get the number of available stereo modes.
    if(!anAqbsControl->sendCommand(ATI_STEREO_GETDISPLAYMODES,
                                   (BYTE* )&aDispModeParams, sizeof(ATIDX9GETDISPLAYMODES))) {
        ST_DEBUG_LOG("StDXManager::createAqbsDevice(), fail to enumerate stereo modes via AQBS sufrace");
        anAqbsControl.nullify();
        aD3dDevTmp->Release();
        aD3dDevTmp = NULL;
        return NULL;
    }

    if(aDispModeParams.dwNumModes != 0) {
        // allocating memory to get the list of modes.
        aDispModeParams.pStereoModes = new D3DDISPLAYMODE[aDispModeParams.dwNumModes];

        // send stereo command to get the list of stereo modes
        if(!anAqbsControl->sendCommand(ATI_STEREO_GETDISPLAYMODES,
                                       (BYTE* )&aDispModeParams, sizeof(ATIDX9GETDISPLAYMODES))) {
            ST_DEBUG_LOG("StDXManager::createAqbsDevice(), fail to retrieve stereo modes via AQBS sufrace");
            anAqbsControl.nullify();
            aD3dDevTmp->Release();
            aD3dDevTmp = NULL;
            delete[] aDispModeParams.pStereoModes;
            return NULL;
        }
    }
    anAqbsControl.nullify();

    int aResFormatMatch = -1;
///ST_DEBUG_LOG(" DX CUDD " + printDisplayFormat(theD3dParams));
///ST_DEBUG_LOG(" DX CURR " + printDisplayFormat(myCurrMode));
    for(int aDispModeIter = int(aDispModeParams.dwNumModes - 1); aDispModeIter >= 0; --aDispModeIter) {
        const D3DDISPLAYMODE& aDispMode = aDispModeParams.pStereoModes[aDispModeIter];
///ST_DEBUG_LOG(" DX ST  " + printDisplayFormat(aDispMode));
        if(aDispMode.Width  != theD3dParams.BackBufferWidth
        || aDispMode.Height != theD3dParams.BackBufferHeight
        || aDispMode.Format != theD3dParams.BackBufferFormat) {
            continue;
        }
        aResFormatMatch = aDispModeIter;
        break;
    }

    if(aResFormatMatch < 0) {
        ST_DEBUG_LOG("StDXManager::createAqbsDevice(), stereo display format doesn't found");
        aD3dDevTmp->Release();
        aD3dDevTmp = NULL;
        delete[] aDispModeParams.pStereoModes;
        return NULL;
    }

    int aRefreshMatch = -1;
    UINT aRefreshMax = 0;
    for(int aDispModeIter = aResFormatMatch; aDispModeIter >= 0; --aDispModeIter) {
        const D3DDISPLAYMODE& aDispMode = aDispModeParams.pStereoModes[aDispModeIter];
ST_DEBUG_LOG(" DX ST  " + printDisplayFormat(aDispMode));
        if(aDispMode.Width  != theD3dParams.BackBufferWidth
        || aDispMode.Height != theD3dParams.BackBufferHeight
        || aDispMode.Format != theD3dParams.BackBufferFormat) {
            continue;
        }
        if(aDispMode.RefreshRate == myRefreshRate) {
            aRefreshMatch = aDispModeIter; // found a match with the current refresh
            break;
        } else if(aDispMode.RefreshRate > aRefreshMax) {
            aRefreshMax = aDispMode.RefreshRate;
            aRefreshMatch = aDispModeIter;
        }
    }

ST_DEBUG_LOG(" DXSSS  " + printDisplayFormat(aDispModeParams.pStereoModes[aRefreshMatch]));

    // a valid multisample value other then 0 or 1 must be set for stereo (ex 2)
    D3DPRESENT_PARAMETERS aD3dParams = theD3dParams;
    aD3dParams.MultiSampleType = D3DMULTISAMPLE_2_SAMPLES;
    aD3dParams.Flags = 0; // can't lock the back buffer
    aD3dParams.EnableAutoDepthStencil     = FALSE; // need to create a special depth buffer
    aD3dParams.FullScreen_RefreshRateInHz = aDispModeParams.pStereoModes[aRefreshMatch].RefreshRate;
    aD3dParams.BackBufferFormat           = aDispModeParams.pStereoModes[aRefreshMatch].Format;
    myRefreshRate = aDispModeParams.pStereoModes[aRefreshMatch].RefreshRate;
    delete[] aDispModeParams.pStereoModes;

    // override original parameters
    myD3dParams = aD3dParams;
    return aD3dDevTmp;
}
Beispiel #8
0
bool StProcess::execProcess(const StString&          theExecutablePath,
                            const StArray<StString>& theArguments) {
    if(!StFileNode::isFileExists(theExecutablePath)) {
        return false;
    }
#ifdef _WIN32
    // convert to wide strings
    StStringUtfWide anExecutablePathW = theExecutablePath.toUtfWide();
    StArrayList<StStringUtfWide> anArgumentsW(theArguments.size());
    StStringUtfWide aSplitter = ' ';
    StStringUtfWide aCmdLineW = StStringUtfWide('\"') + anExecutablePathW + StStringUtfWide("\" ");
    for(size_t anElem = 0;;) {
        // TODO (Kirill Gavrilov#9) we should probably quote arguments with spaces...
        // how to correctly deal this in the same way for UNIX / Windows?
        aCmdLineW += theArguments[anElem++].toUtfWide();
        if(anElem >= theArguments.size()) {
            break;
        }
        aCmdLineW += aSplitter;
    }

    STARTUPINFOW aStartInfo;
    PROCESS_INFORMATION aProcessInfo;
    stMemSet(&aStartInfo, 0, sizeof(aStartInfo));
    aStartInfo.cb = sizeof(aStartInfo);
    stMemSet(&aProcessInfo, 0, sizeof(aProcessInfo));

    // start the process
    if(!CreateProcessW(anExecutablePathW.toCString(), (wchar_t* )aCmdLineW.toCString(),
        NULL, NULL, FALSE, 0, NULL, NULL, &aStartInfo, &aProcessInfo)) {
        return false;
    }

    // close process and thread handles
    CloseHandle(aProcessInfo.hProcess);
    CloseHandle(aProcessInfo.hThread);
    return true;
#else
    char** anArgList = new char*[theArguments.size() + 2];
    anArgList[0] = (char* )theExecutablePath.toCString();
    for(size_t anArgId = 0; anArgId < theArguments.size(); ++anArgId) {
        anArgList[anArgId + 1] = (char* )theArguments.getValue(anArgId).toCString();
    }
    anArgList[theArguments.size() + 1] = NULL;

    pid_t aChildPid = vfork();
    if(aChildPid == -1) {
        // fork fail
        delete[] anArgList;
        return false;
    } else if(aChildPid != 0) {
        // parent process give the control only after child
        // calls exit() or exec() functions
        delete[] anArgList;
        return true;
    }

    // child process
    execv(theExecutablePath.toCString(), anArgList);
    // fail
    _exit(1);
#endif
}
Beispiel #9
0
void* __stdcall StADLsdk::ADL_Main_Memory_Alloc(int theSizeBytes) {
    void* aPtr = stMemAllocAligned(theSizeBytes);
    stMemSet(aPtr, 0, theSizeBytes);
    return aPtr;
}
Beispiel #10
0
bool StAVImage::load(const StString& theFilePath,
                     ImageType       theImageType,
                     uint8_t*        theDataPtr,
                     int             theDataSize) {

    // reset current data
    StImage::nullify();
    setState();
    close();
    myMetadata.clear();

    switch(theImageType) {
        case ST_TYPE_PNG:
        case ST_TYPE_PNS: {
            myCodec = avcodec_find_decoder_by_name("png");
            break;
        }
        case ST_TYPE_JPEG:
        case ST_TYPE_MPO:
        case ST_TYPE_JPS: {
            myCodec = avcodec_find_decoder_by_name("mjpeg");
            break;
        }
        case ST_TYPE_EXR: {
            myCodec = avcodec_find_decoder_by_name("exr");
            break;
        }
        case ST_TYPE_WEBP:
        case ST_TYPE_WEBPLL: {
            myCodec = avcodec_find_decoder_by_name("webp");
            break;
        }
        default: {
            break;
        }
    }

    if(theImageType == ST_TYPE_NONE
    || (theDataPtr == NULL && !StFileNode::isFileExists(theFilePath))) {
        // open image file and detect its type, its could be non local file!
    #if(LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(53, 2, 0))
        int avErrCode = avformat_open_input(&myFormatCtx, theFilePath.toCString(), myImageFormat, NULL);
    #else
        int avErrCode = av_open_input_file (&myFormatCtx, theFilePath.toCString(), myImageFormat, 0, NULL);
    #endif
        if(avErrCode != 0
        || myFormatCtx->nb_streams < 1
        || myFormatCtx->streams[0]->codec->codec_id == 0) {
            if(myFormatCtx != NULL) {
            #if(LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(53, 17, 0))
                avformat_close_input(&myFormatCtx);
            #else
                av_close_input_file(myFormatCtx);
                myFormatCtx = NULL;
            #endif
            }

        #if(LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(53, 2, 0))
            avErrCode = avformat_open_input(&myFormatCtx, theFilePath.toCString(), NULL, NULL);
        #else
            avErrCode = av_open_input_file(&myFormatCtx, theFilePath.toCString(), NULL, 0, NULL);
        #endif
        }

        if(avErrCode != 0
        || myFormatCtx->nb_streams < 1) {
            setState(StString("AVFormat library, couldn't open image file. Error: ") + stAV::getAVErrorDescription(avErrCode));
            close();
            return false;
        }

        // find the decoder for the video stream
        myCodecCtx = myFormatCtx->streams[0]->codec;
        if(theImageType == ST_TYPE_NONE) {
            myCodec = avcodec_find_decoder(myCodecCtx->codec_id);
        }
    }

    if(myCodec == NULL) {
        setState("AVCodec library, video codec not found");
        close();
        return false;
    } else if(myFormatCtx == NULL) {
        // use given image type to load decoder
    #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0))
        myCodecCtx = avcodec_alloc_context3(myCodec);
    #else
        myCodecCtx = avcodec_alloc_context();
    #endif
    }

    // stupid check
    if(myCodecCtx == NULL) {
        setState("AVCodec library, codec context is NULL");
        close();
        return false;
    }

    // open VIDEO codec
#if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0))
    if(avcodec_open2(myCodecCtx, myCodec, NULL) < 0) {
#else
    if(avcodec_open(myCodecCtx, myCodec) < 0) {
#endif
        setState("AVCodec library, could not open video codec");
        close();
        return false;
    }

    // read one packet or file
    StRawFile aRawFile(theFilePath);
    StAVPacket anAvPkt;
    if(theDataPtr != NULL && theDataSize != 0) {
        anAvPkt.getAVpkt()->data = theDataPtr;
        anAvPkt.getAVpkt()->size = theDataSize;
    } else {
        if(myFormatCtx != NULL) {
            if(av_read_frame(myFormatCtx, anAvPkt.getAVpkt()) < 0) {
                setState("AVFormat library, could not read first packet");
                close();
                return false;
            }
        } else {
            if(!aRawFile.readFile()) {
                setState("StAVImage, could not read the file");
                close();
                return false;
            }
            anAvPkt.getAVpkt()->data = (uint8_t* )aRawFile.getBuffer();
            anAvPkt.getAVpkt()->size = (int )aRawFile.getSize();
        }
    }
    anAvPkt.setKeyFrame();

    // decode one frame
    int isFrameFinished = 0;
#if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(52, 23, 0))
    avcodec_decode_video2(myCodecCtx, myFrame.Frame, &isFrameFinished, anAvPkt.getAVpkt());
#else
    avcodec_decode_video(myCodecCtx, myFrame.Frame, &isFrameFinished,
                         theDataPtr, theDataSize);
#endif

    if(isFrameFinished == 0) {
        // thats not an image!!! try to decode more packets???
        setState("AVCodec library, input file is not an Image!");
        close();
        return false;
    }

    // check frame size
    if(myCodecCtx->width <= 0 || myCodecCtx->height <= 0) {
        setState("AVCodec library, codec returns wrong frame size");
        close();
        return false;
    }

    // read aspect ratio
    if(myCodecCtx->sample_aspect_ratio.num == 0
    || myCodecCtx->sample_aspect_ratio.den == 0) {
        setPixelRatio(1.0f);
    } else {
        const GLfloat aRatio = GLfloat(myCodecCtx->sample_aspect_ratio.num) / GLfloat(myCodecCtx->sample_aspect_ratio.den);
        if(aRatio > 70.0f) {
            ST_DEBUG_LOG("AVCodec library, igning wrong PAR " + myCodecCtx->sample_aspect_ratio.num + ":" + myCodecCtx->sample_aspect_ratio.den);
            setPixelRatio(1.0f);
        } else {
            setPixelRatio(aRatio);
        }
    }

#ifdef ST_AV_NEWSTEREO
    // currently it is unlikelly... but maybe in future?
    AVFrameSideData* aSideData = av_frame_get_side_data(myFrame.Frame, AV_FRAME_DATA_STEREO3D);
    if(aSideData != NULL) {
        AVStereo3D* aStereo = (AVStereo3D* )aSideData->data;
        mySrcFormat = stAV::stereo3dAvToSt(aStereo->type);
        if(aStereo->flags & AV_STEREO3D_FLAG_INVERT) {
            mySrcFormat = st::formatReversed(mySrcFormat);
        }
    } else {
        mySrcFormat = StFormat_AUTO;
    }
#endif

    // it is unlikely that there would be any metadata from format...
    // but lets try
    if(myFormatCtx != NULL) {
        for(stAV::meta::Tag* aTag = stAV::meta::findTag(myFormatCtx->metadata, "", NULL, stAV::meta::SEARCH_IGNORE_SUFFIX);
            aTag != NULL;
            aTag = stAV::meta::findTag(myFormatCtx->metadata, "", aTag, stAV::meta::SEARCH_IGNORE_SUFFIX)) {
            myMetadata.add(StDictEntry(aTag->key, aTag->value));
        }
        for(stAV::meta::Tag* aTag = stAV::meta::findTag(myFormatCtx->streams[0]->metadata, "", NULL, stAV::meta::SEARCH_IGNORE_SUFFIX);
            aTag != NULL;
            aTag = stAV::meta::findTag(myFormatCtx->streams[0]->metadata, "", aTag, stAV::meta::SEARCH_IGNORE_SUFFIX)) {
            myMetadata.add(StDictEntry(aTag->key, aTag->value));
        }
    }

    // collect metadata from the frame
    stAV::meta::Dict* aFrameMetadata = stAV::meta::getFrameMetadata(myFrame.Frame);
    for(stAV::meta::Tag* aTag = stAV::meta::findTag(aFrameMetadata, "", NULL, stAV::meta::SEARCH_IGNORE_SUFFIX);
        aTag != NULL;
        aTag = stAV::meta::findTag(aFrameMetadata, "", aTag, stAV::meta::SEARCH_IGNORE_SUFFIX)) {
        myMetadata.add(StDictEntry(aTag->key, aTag->value));
    }

    stAV::dimYUV aDimsYUV;
    if(myCodecCtx->pix_fmt == stAV::PIX_FMT::RGB24) {
        setColorModel(StImage::ImgColor_RGB);
        changePlane(0).initWrapper(StImagePlane::ImgRGB, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::BGR24) {
        setColorModel(StImage::ImgColor_RGB);
        changePlane(0).initWrapper(StImagePlane::ImgBGR, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::RGBA32) {
        setColorModel(StImage::ImgColor_RGBA);
        changePlane(0).initWrapper(StImagePlane::ImgRGBA, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::BGRA32) {
        setColorModel(StImage::ImgColor_RGBA);
        changePlane(0).initWrapper(StImagePlane::ImgBGRA, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::GRAY8) {
        setColorModel(StImage::ImgColor_GRAY);
        changePlane(0).initWrapper(StImagePlane::ImgGray, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::GRAY16) {
        setColorModel(StImage::ImgColor_GRAY);
        changePlane(0).initWrapper(StImagePlane::ImgGray16, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::RGB48) {
        setColorModel(StImage::ImgColor_RGB);
        changePlane(0).initWrapper(StImagePlane::ImgRGB48, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::RGBA64) {
        setColorModel(StImage::ImgColor_RGBA);
        changePlane(0).initWrapper(StImagePlane::ImgRGBA64, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(stAV::isFormatYUVPlanar(myCodecCtx, aDimsYUV)) {
    #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(52, 29, 0))
        if(myCodecCtx->color_range == AVCOL_RANGE_JPEG) {
            aDimsYUV.isFullScale = true;
        }
    #endif
        setColorModel(StImage::ImgColor_YUV);
        setColorScale(aDimsYUV.isFullScale ? StImage::ImgScale_Full : StImage::ImgScale_Mpeg);
        StImagePlane::ImgFormat aPlaneFrmt = StImagePlane::ImgGray;
        if(aDimsYUV.bitsPerComp == 9) {
            aPlaneFrmt = StImagePlane::ImgGray16;
            setColorScale(aDimsYUV.isFullScale ? StImage::ImgScale_Jpeg9  : StImage::ImgScale_Mpeg9);
        } else if(aDimsYUV.bitsPerComp == 10) {
            aPlaneFrmt = StImagePlane::ImgGray16;
            setColorScale(aDimsYUV.isFullScale ? StImage::ImgScale_Jpeg10 : StImage::ImgScale_Mpeg10);
        } else if(aDimsYUV.bitsPerComp == 16) {
            aPlaneFrmt = StImagePlane::ImgGray16;
        }

        changePlane(0).initWrapper(aPlaneFrmt, myFrame.getPlane(0),
                                   size_t(aDimsYUV.widthY), size_t(aDimsYUV.heightY), myFrame.getLineSize(0));
        changePlane(1).initWrapper(aPlaneFrmt, myFrame.getPlane(1),
                                   size_t(aDimsYUV.widthU), size_t(aDimsYUV.heightU), myFrame.getLineSize(1));
        changePlane(2).initWrapper(aPlaneFrmt, myFrame.getPlane(2),
                                   size_t(aDimsYUV.widthV), size_t(aDimsYUV.heightV), myFrame.getLineSize(2));
    } else {
        ///ST_DEBUG_LOG("StAVImage, perform conversion from Pixel format '" + avcodec_get_pix_fmt_name(myCodecCtx->pix_fmt) + "' to RGB");
        // initialize software scaler/converter
        SwsContext* pToRgbCtx = sws_getContext(myCodecCtx->width, myCodecCtx->height, myCodecCtx->pix_fmt,    // source
                                               myCodecCtx->width, myCodecCtx->height, stAV::PIX_FMT::RGB24, // destination
                                               SWS_BICUBIC, NULL, NULL, NULL);
        if(pToRgbCtx == NULL) {
            setState("SWScale library, failed to create SWScaler context");
            close();
            return false;
        }

        // initialize additional buffer for converted RGB data
        setColorModel(StImage::ImgColor_RGB);
        changePlane(0).initTrash(StImagePlane::ImgRGB,
                                 myCodecCtx->width, myCodecCtx->height);

        uint8_t* rgbData[4]; stMemZero(rgbData,     sizeof(rgbData));
        int  rgbLinesize[4]; stMemZero(rgbLinesize, sizeof(rgbLinesize));
        rgbData[0]     = changePlane(0).changeData();
        rgbLinesize[0] = (int )changePlane(0).getSizeRowBytes();

        sws_scale(pToRgbCtx,
                  myFrame.Frame->data, myFrame.Frame->linesize,
                  0, myCodecCtx->height,
                  rgbData, rgbLinesize);
        // reset original data
        closeAvCtx();

        sws_freeContext(pToRgbCtx);
    }

    // set debug information
    StString aDummy, aFileName;
    StFileNode::getFolderAndFile(theFilePath, aDummy, aFileName);
    setState(StString("AVCodec library, loaded image '") + aFileName + "' " + getDescription());

    // we should not close the file because decoded image data is in codec context cache
    return true;
}

bool StAVImage::save(const StString& theFilePath,
                     ImageType       theImageType,
                     StFormat        theSrcFormat) {
    close();
    setState();
    if(isNull()) {
        return false;
    }

    PixelFormat aPFormatAV = (PixelFormat )getAVPixelFormat(*this);
    StImage anImage;
    switch(theImageType) {
        case ST_TYPE_PNG:
        case ST_TYPE_PNS: {
            myCodec = avcodec_find_encoder_by_name("png");
            if(myCodec == NULL) {
                setState("AVCodec library, video codec 'png' not found");
                close();
                return false;
            }
            if(aPFormatAV == stAV::PIX_FMT::RGB24
            || aPFormatAV == stAV::PIX_FMT::RGBA32
            || aPFormatAV == stAV::PIX_FMT::GRAY8) {
                anImage.initWrapper(*this);
            } else {
                // convert to compatible pixel format
                anImage.changePlane().initTrash(StImagePlane::ImgRGB, getSizeX(), getSizeY(), getAligned(getSizeX() * 3));
                PixelFormat aPFrmtTarget = stAV::PIX_FMT::RGB24;
                if(!convert(*this,   aPFormatAV,
                            anImage, aPFrmtTarget)) {
                    setState("SWScale library, failed to create SWScaler context");
                    close();
                    return false;
                }
                aPFormatAV = aPFrmtTarget;
            }
        #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0))
            myCodecCtx = avcodec_alloc_context3(myCodec);
        #else
            myCodecCtx = avcodec_alloc_context();
        #endif

            // setup encoder
            myCodecCtx->pix_fmt = aPFormatAV;
            myCodecCtx->width   = (int )anImage.getSizeX();
            myCodecCtx->height  = (int )anImage.getSizeY();
            myCodecCtx->compression_level = 9; // 0..9
            break;
        }
        case ST_TYPE_JPEG:
        case ST_TYPE_MPO:
        case ST_TYPE_JPS: {
            myCodec = avcodec_find_encoder_by_name("mjpeg");
            if(myCodec == NULL) {
                setState("AVCodec library, video codec 'mjpeg' not found");
                close();
                return false;
            }

            if(aPFormatAV == stAV::PIX_FMT::YUVJ420P
            || aPFormatAV == stAV::PIX_FMT::YUVJ422P
            //|| aPFormatAV == stAV::PIX_FMT::YUVJ444P not supported by FFmpeg... yet?
            //|| aPFormatAV == stAV::PIX_FMT::YUVJ440P
               ) {
                anImage.initWrapper(*this);
            } else {
                // convert to compatible pixel format
                PixelFormat aPFrmtTarget = aPFormatAV == stAV::PIX_FMT::YUV420P ? stAV::PIX_FMT::YUVJ420P : stAV::PIX_FMT::YUVJ422P;
                anImage.setColorModel(StImage::ImgColor_YUV);
                anImage.setColorScale(StImage::ImgScale_Mpeg);
                anImage.changePlane(0).initTrash(StImagePlane::ImgGray, getSizeX(), getSizeY(), getAligned(getSizeX()));
                stMemSet(anImage.changePlane(0).changeData(), '\0', anImage.getPlane(0).getSizeBytes());
                anImage.changePlane(1).initTrash(StImagePlane::ImgGray, getSizeX(), getSizeY(), getAligned(getSizeX()));
                stMemSet(anImage.changePlane(1).changeData(), '\0', anImage.getPlane(1).getSizeBytes());
                anImage.changePlane(2).initTrash(StImagePlane::ImgGray, getSizeX(), getSizeY(), getAligned(getSizeX()));
                stMemSet(anImage.changePlane(2).changeData(), '\0', anImage.getPlane(2).getSizeBytes());
                if(!convert(*this,   aPFormatAV,
                            anImage, aPFrmtTarget)) {
                    setState("SWScale library, failed to create SWScaler context");
                    close();
                    return false;
                }
                aPFormatAV = aPFrmtTarget;
            }

        #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0))
            myCodecCtx = avcodec_alloc_context3(myCodec);
        #else
            myCodecCtx = avcodec_alloc_context();
        #endif
            myCodecCtx->pix_fmt = aPFormatAV;
            myCodecCtx->width   = (int )anImage.getSizeX();
            myCodecCtx->height  = (int )anImage.getSizeY();
            myCodecCtx->time_base.num = 1;
            myCodecCtx->time_base.den = 1;
            myCodecCtx->qmin = myCodecCtx->qmax = 5; // quality factor - lesser is better
            break;
        }
        case ST_TYPE_NONE:
        default:
            close();
            return false;
    }

    // open VIDEO codec
#if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0))
    if(avcodec_open2(myCodecCtx, myCodec, NULL) < 0) {
#else
    if(avcodec_open(myCodecCtx, myCodec) < 0) {
#endif
        setState("AVCodec library, could not open video codec");
        close();
        return false;
    }

    // wrap own data into AVFrame
    myFrame.Frame->format = myCodecCtx->pix_fmt;
    myFrame.Frame->width  = myCodecCtx->width;
    myFrame.Frame->height = myCodecCtx->height;
    fillPointersAV(anImage, myFrame.Frame->data, myFrame.Frame->linesize);

#ifdef ST_AV_NEWSTEREO
    bool isReversed = false;
    AVStereo3DType anAvStereoType = stAV::stereo3dStToAv(theSrcFormat, isReversed);
    if(anAvStereoType != (AVStereo3DType )-1) {
        AVStereo3D* aStereo = av_stereo3d_create_side_data(myFrame.Frame);
        if(aStereo != NULL) {
            aStereo->type = anAvStereoType;
            if(isReversed) {
                aStereo->flags |= AV_STEREO3D_FLAG_INVERT;
            }
        }
    }
#endif

    StJpegParser aRawFile(theFilePath);
    if(!aRawFile.openFile(StRawFile::WRITE)) {
        setState("Can not open the file for writing");
        close();
        return false;
    }

    // allocate the buffer, large enough (stupid formula copied from ffmpeg.c)
    int aBuffSize = int(getSizeX() * getSizeY() * 10);
    aRawFile.initBuffer(aBuffSize);

    // encode the image
    StAVPacket aPacket;
    aPacket.getAVpkt()->data = (uint8_t* )aRawFile.changeBuffer();
    aPacket.getAVpkt()->size = aBuffSize;
#if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(54, 2, 100))
    int isGotPacket = 0;
    int anEncSize   = avcodec_encode_video2(myCodecCtx, aPacket.getAVpkt(), myFrame.Frame, &isGotPacket);
    if(anEncSize == 0 && isGotPacket != 0) {
        anEncSize = aPacket.getSize();
    }
#else
    int anEncSize = avcodec_encode_video(myCodecCtx, aPacket.changeData(), aPacket.getSize(), myFrame.Frame);
#endif
    if(anEncSize <= 0) {
        setState("AVCodec library, fail to encode the image");
        close();
        return false;
    }
    aRawFile.setDataSize((size_t )anEncSize);

    // save metadata when possible
    if(theImageType == ST_TYPE_JPEG
    || theImageType == ST_TYPE_JPS) {
        if(aRawFile.parse()) {
            if(theSrcFormat != StFormat_AUTO) {
                aRawFile.setupJps(theSrcFormat);
            }
        } else {
            ST_ERROR_LOG("AVCodec library, created JPEG can not be parsed!");
        }
    }

    // store current content
    aRawFile.writeFile();
    // and finally close the file handle
    aRawFile.closeFile();

    close();

    // set debug information
    StString aDummy, aFileName;
    StFileNode::getFolderAndFile(theFilePath, aDummy, aFileName);
    setState(StString("AVCodec library, saved image '") + aFileName + "' " + getDescription());

    return true;
}
Beispiel #11
0
bool StTestEmbed::createNative() {
    myParent = (StNativeWin_t )NULL;
    StRectI_t aRect;
    aRect.top()    = 128;
    aRect.bottom() = 128 + 400;
    aRect.left()   = 128;
    aRect.right()  = 128 + 400;
#if defined(_WIN32)
    WNDCLASSW aWinClass;
    stMemSet(&aWinClass, 0, sizeof(aWinClass));
    HINSTANCE anAppInst = GetModuleHandle(NULL);
    aWinClass.lpfnWndProc   = (WNDPROC )embedWindowProc;
    aWinClass.hInstance     = anAppInst;
    aWinClass.hIcon         = LoadIcon(NULL, IDI_WINLOGO);
    aWinClass.hCursor       = LoadCursor(NULL, IDC_ARROW);
    aWinClass.lpszClassName = L"DummyClass";
    if(!RegisterClassW(&aWinClass)) {
        st::cout << stostream_text("RegisterClass() failed:\nCannot register window class 'DummyClass'.\n");
        return false;
    }
    HWND aWin = CreateWindowW(L"DummyClass", L"DummyWindow",
                              WS_OVERLAPPEDWINDOW | WS_CLIPSIBLINGS | WS_CLIPCHILDREN,
                              aRect.left(), aRect.top(), aRect.width(), aRect.height(), NULL, NULL, anAppInst, NULL);
    ShowWindow(aWin, TRUE);
    myParent = aWin;
    return true;
#elif defined(__linux__)
    // open a connection to the X server
    Display* aDisplay = XOpenDisplay(NULL);
    if(aDisplay == NULL) {
        st::cout << stostream_text("XOpenDisplay() failed!\n");
        return false;
    }

    Window aWin = XCreateSimpleWindow(aDisplay, RootWindow(aDisplay, DefaultScreen(aDisplay)),
                                      aRect.left(), aRect.top(), aRect.width(), aRect.height(),
                                      0, 0, BlackPixel(aDisplay, DefaultScreen(aDisplay)));
    if(aWin == 0) {
        st::cout << stostream_text("XCreateSimpleWindow() failed!\n");
        XCloseDisplay(aDisplay);
        return false;
    }

    XSetStandardProperties(aDisplay, aWin, "DummyWindow", "DummyWindow",
                           None, NULL, 0, NULL);
    XSelectInput(aDisplay, aWin,
                 KeyPressMask | KeyReleaseMask | ButtonPressMask | ButtonReleaseMask | StructureNotifyMask);

    // handle close window event
    //XSetWMProtocols(aDisplay, aWin, &(stXDisplay->wndDestroyAtom), 1);

    // request the X window to be displayed on the screen
    XMapWindow(aDisplay, aWin);

    // flushes the output buffer
    XFlush(aDisplay);
    myParent = (void* )aWin;
    myDisplay = aDisplay;
    return true;
#else
    st::cout << stostream_text("StTestEmbed::createNative() not implemented on this platform!\n");
    return false;
#endif
}