示例#1
0
bool StAVImage::load(const StString& theFilePath,
                     ImageType       theImageType,
                     uint8_t*        theDataPtr,
                     int             theDataSize) {

    // reset current data
    StImage::nullify();
    setState();
    close();
    myMetadata.clear();

    switch(theImageType) {
        case ST_TYPE_PNG:
        case ST_TYPE_PNS: {
            myCodec = avcodec_find_decoder_by_name("png");
            break;
        }
        case ST_TYPE_JPEG:
        case ST_TYPE_MPO:
        case ST_TYPE_JPS: {
            myCodec = avcodec_find_decoder_by_name("mjpeg");
            break;
        }
        case ST_TYPE_EXR: {
            myCodec = avcodec_find_decoder_by_name("exr");
            break;
        }
        case ST_TYPE_WEBP:
        case ST_TYPE_WEBPLL: {
            myCodec = avcodec_find_decoder_by_name("webp");
            break;
        }
        default: {
            break;
        }
    }

    if(theImageType == ST_TYPE_NONE
    || (theDataPtr == NULL && !StFileNode::isFileExists(theFilePath))) {
        // open image file and detect its type, its could be non local file!
    #if(LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(53, 2, 0))
        int avErrCode = avformat_open_input(&myFormatCtx, theFilePath.toCString(), myImageFormat, NULL);
    #else
        int avErrCode = av_open_input_file (&myFormatCtx, theFilePath.toCString(), myImageFormat, 0, NULL);
    #endif
        if(avErrCode != 0
        || myFormatCtx->nb_streams < 1
        || myFormatCtx->streams[0]->codec->codec_id == 0) {
            if(myFormatCtx != NULL) {
            #if(LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(53, 17, 0))
                avformat_close_input(&myFormatCtx);
            #else
                av_close_input_file(myFormatCtx);
                myFormatCtx = NULL;
            #endif
            }

        #if(LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(53, 2, 0))
            avErrCode = avformat_open_input(&myFormatCtx, theFilePath.toCString(), NULL, NULL);
        #else
            avErrCode = av_open_input_file(&myFormatCtx, theFilePath.toCString(), NULL, 0, NULL);
        #endif
        }

        if(avErrCode != 0
        || myFormatCtx->nb_streams < 1) {
            setState(StString("AVFormat library, couldn't open image file. Error: ") + stAV::getAVErrorDescription(avErrCode));
            close();
            return false;
        }

        // find the decoder for the video stream
        myCodecCtx = myFormatCtx->streams[0]->codec;
        if(theImageType == ST_TYPE_NONE) {
            myCodec = avcodec_find_decoder(myCodecCtx->codec_id);
        }
    }

    if(myCodec == NULL) {
        setState("AVCodec library, video codec not found");
        close();
        return false;
    } else if(myFormatCtx == NULL) {
        // use given image type to load decoder
    #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0))
        myCodecCtx = avcodec_alloc_context3(myCodec);
    #else
        myCodecCtx = avcodec_alloc_context();
    #endif
    }

    // stupid check
    if(myCodecCtx == NULL) {
        setState("AVCodec library, codec context is NULL");
        close();
        return false;
    }

    // open VIDEO codec
#if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0))
    if(avcodec_open2(myCodecCtx, myCodec, NULL) < 0) {
#else
    if(avcodec_open(myCodecCtx, myCodec) < 0) {
#endif
        setState("AVCodec library, could not open video codec");
        close();
        return false;
    }

    // read one packet or file
    StRawFile aRawFile(theFilePath);
    StAVPacket anAvPkt;
    if(theDataPtr != NULL && theDataSize != 0) {
        anAvPkt.getAVpkt()->data = theDataPtr;
        anAvPkt.getAVpkt()->size = theDataSize;
    } else {
        if(myFormatCtx != NULL) {
            if(av_read_frame(myFormatCtx, anAvPkt.getAVpkt()) < 0) {
                setState("AVFormat library, could not read first packet");
                close();
                return false;
            }
        } else {
            if(!aRawFile.readFile()) {
                setState("StAVImage, could not read the file");
                close();
                return false;
            }
            anAvPkt.getAVpkt()->data = (uint8_t* )aRawFile.getBuffer();
            anAvPkt.getAVpkt()->size = (int )aRawFile.getSize();
        }
    }
    anAvPkt.setKeyFrame();

    // decode one frame
    int isFrameFinished = 0;
#if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(52, 23, 0))
    avcodec_decode_video2(myCodecCtx, myFrame.Frame, &isFrameFinished, anAvPkt.getAVpkt());
#else
    avcodec_decode_video(myCodecCtx, myFrame.Frame, &isFrameFinished,
                         theDataPtr, theDataSize);
#endif

    if(isFrameFinished == 0) {
        // thats not an image!!! try to decode more packets???
        setState("AVCodec library, input file is not an Image!");
        close();
        return false;
    }

    // check frame size
    if(myCodecCtx->width <= 0 || myCodecCtx->height <= 0) {
        setState("AVCodec library, codec returns wrong frame size");
        close();
        return false;
    }

    // read aspect ratio
    if(myCodecCtx->sample_aspect_ratio.num == 0
    || myCodecCtx->sample_aspect_ratio.den == 0) {
        setPixelRatio(1.0f);
    } else {
        const GLfloat aRatio = GLfloat(myCodecCtx->sample_aspect_ratio.num) / GLfloat(myCodecCtx->sample_aspect_ratio.den);
        if(aRatio > 70.0f) {
            ST_DEBUG_LOG("AVCodec library, igning wrong PAR " + myCodecCtx->sample_aspect_ratio.num + ":" + myCodecCtx->sample_aspect_ratio.den);
            setPixelRatio(1.0f);
        } else {
            setPixelRatio(aRatio);
        }
    }

#ifdef ST_AV_NEWSTEREO
    // currently it is unlikelly... but maybe in future?
    AVFrameSideData* aSideData = av_frame_get_side_data(myFrame.Frame, AV_FRAME_DATA_STEREO3D);
    if(aSideData != NULL) {
        AVStereo3D* aStereo = (AVStereo3D* )aSideData->data;
        mySrcFormat = stAV::stereo3dAvToSt(aStereo->type);
        if(aStereo->flags & AV_STEREO3D_FLAG_INVERT) {
            mySrcFormat = st::formatReversed(mySrcFormat);
        }
    } else {
        mySrcFormat = StFormat_AUTO;
    }
#endif

    // it is unlikely that there would be any metadata from format...
    // but lets try
    if(myFormatCtx != NULL) {
        for(stAV::meta::Tag* aTag = stAV::meta::findTag(myFormatCtx->metadata, "", NULL, stAV::meta::SEARCH_IGNORE_SUFFIX);
            aTag != NULL;
            aTag = stAV::meta::findTag(myFormatCtx->metadata, "", aTag, stAV::meta::SEARCH_IGNORE_SUFFIX)) {
            myMetadata.add(StDictEntry(aTag->key, aTag->value));
        }
        for(stAV::meta::Tag* aTag = stAV::meta::findTag(myFormatCtx->streams[0]->metadata, "", NULL, stAV::meta::SEARCH_IGNORE_SUFFIX);
            aTag != NULL;
            aTag = stAV::meta::findTag(myFormatCtx->streams[0]->metadata, "", aTag, stAV::meta::SEARCH_IGNORE_SUFFIX)) {
            myMetadata.add(StDictEntry(aTag->key, aTag->value));
        }
    }

    // collect metadata from the frame
    stAV::meta::Dict* aFrameMetadata = stAV::meta::getFrameMetadata(myFrame.Frame);
    for(stAV::meta::Tag* aTag = stAV::meta::findTag(aFrameMetadata, "", NULL, stAV::meta::SEARCH_IGNORE_SUFFIX);
        aTag != NULL;
        aTag = stAV::meta::findTag(aFrameMetadata, "", aTag, stAV::meta::SEARCH_IGNORE_SUFFIX)) {
        myMetadata.add(StDictEntry(aTag->key, aTag->value));
    }

    stAV::dimYUV aDimsYUV;
    if(myCodecCtx->pix_fmt == stAV::PIX_FMT::RGB24) {
        setColorModel(StImage::ImgColor_RGB);
        changePlane(0).initWrapper(StImagePlane::ImgRGB, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::BGR24) {
        setColorModel(StImage::ImgColor_RGB);
        changePlane(0).initWrapper(StImagePlane::ImgBGR, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::RGBA32) {
        setColorModel(StImage::ImgColor_RGBA);
        changePlane(0).initWrapper(StImagePlane::ImgRGBA, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::BGRA32) {
        setColorModel(StImage::ImgColor_RGBA);
        changePlane(0).initWrapper(StImagePlane::ImgBGRA, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::GRAY8) {
        setColorModel(StImage::ImgColor_GRAY);
        changePlane(0).initWrapper(StImagePlane::ImgGray, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::GRAY16) {
        setColorModel(StImage::ImgColor_GRAY);
        changePlane(0).initWrapper(StImagePlane::ImgGray16, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::RGB48) {
        setColorModel(StImage::ImgColor_RGB);
        changePlane(0).initWrapper(StImagePlane::ImgRGB48, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::RGBA64) {
        setColorModel(StImage::ImgColor_RGBA);
        changePlane(0).initWrapper(StImagePlane::ImgRGBA64, myFrame.getPlane(0),
                                   myCodecCtx->width, myCodecCtx->height,
                                   myFrame.getLineSize(0));
    } else if(stAV::isFormatYUVPlanar(myCodecCtx, aDimsYUV)) {
    #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(52, 29, 0))
        if(myCodecCtx->color_range == AVCOL_RANGE_JPEG) {
            aDimsYUV.isFullScale = true;
        }
    #endif
        setColorModel(StImage::ImgColor_YUV);
        setColorScale(aDimsYUV.isFullScale ? StImage::ImgScale_Full : StImage::ImgScale_Mpeg);
        StImagePlane::ImgFormat aPlaneFrmt = StImagePlane::ImgGray;
        if(aDimsYUV.bitsPerComp == 9) {
            aPlaneFrmt = StImagePlane::ImgGray16;
            setColorScale(aDimsYUV.isFullScale ? StImage::ImgScale_Jpeg9  : StImage::ImgScale_Mpeg9);
        } else if(aDimsYUV.bitsPerComp == 10) {
            aPlaneFrmt = StImagePlane::ImgGray16;
            setColorScale(aDimsYUV.isFullScale ? StImage::ImgScale_Jpeg10 : StImage::ImgScale_Mpeg10);
        } else if(aDimsYUV.bitsPerComp == 16) {
            aPlaneFrmt = StImagePlane::ImgGray16;
        }

        changePlane(0).initWrapper(aPlaneFrmt, myFrame.getPlane(0),
                                   size_t(aDimsYUV.widthY), size_t(aDimsYUV.heightY), myFrame.getLineSize(0));
        changePlane(1).initWrapper(aPlaneFrmt, myFrame.getPlane(1),
                                   size_t(aDimsYUV.widthU), size_t(aDimsYUV.heightU), myFrame.getLineSize(1));
        changePlane(2).initWrapper(aPlaneFrmt, myFrame.getPlane(2),
                                   size_t(aDimsYUV.widthV), size_t(aDimsYUV.heightV), myFrame.getLineSize(2));
    } else {
        ///ST_DEBUG_LOG("StAVImage, perform conversion from Pixel format '" + avcodec_get_pix_fmt_name(myCodecCtx->pix_fmt) + "' to RGB");
        // initialize software scaler/converter
        SwsContext* pToRgbCtx = sws_getContext(myCodecCtx->width, myCodecCtx->height, myCodecCtx->pix_fmt,    // source
                                               myCodecCtx->width, myCodecCtx->height, stAV::PIX_FMT::RGB24, // destination
                                               SWS_BICUBIC, NULL, NULL, NULL);
        if(pToRgbCtx == NULL) {
            setState("SWScale library, failed to create SWScaler context");
            close();
            return false;
        }

        // initialize additional buffer for converted RGB data
        setColorModel(StImage::ImgColor_RGB);
        changePlane(0).initTrash(StImagePlane::ImgRGB,
                                 myCodecCtx->width, myCodecCtx->height);

        uint8_t* rgbData[4]; stMemZero(rgbData,     sizeof(rgbData));
        int  rgbLinesize[4]; stMemZero(rgbLinesize, sizeof(rgbLinesize));
        rgbData[0]     = changePlane(0).changeData();
        rgbLinesize[0] = (int )changePlane(0).getSizeRowBytes();

        sws_scale(pToRgbCtx,
                  myFrame.Frame->data, myFrame.Frame->linesize,
                  0, myCodecCtx->height,
                  rgbData, rgbLinesize);
        // reset original data
        closeAvCtx();

        sws_freeContext(pToRgbCtx);
    }

    // set debug information
    StString aDummy, aFileName;
    StFileNode::getFolderAndFile(theFilePath, aDummy, aFileName);
    setState(StString("AVCodec library, loaded image '") + aFileName + "' " + getDescription());

    // we should not close the file because decoded image data is in codec context cache
    return true;
}

bool StAVImage::save(const StString& theFilePath,
                     ImageType       theImageType,
                     StFormat        theSrcFormat) {
    close();
    setState();
    if(isNull()) {
        return false;
    }

    PixelFormat aPFormatAV = (PixelFormat )getAVPixelFormat(*this);
    StImage anImage;
    switch(theImageType) {
        case ST_TYPE_PNG:
        case ST_TYPE_PNS: {
            myCodec = avcodec_find_encoder_by_name("png");
            if(myCodec == NULL) {
                setState("AVCodec library, video codec 'png' not found");
                close();
                return false;
            }
            if(aPFormatAV == stAV::PIX_FMT::RGB24
            || aPFormatAV == stAV::PIX_FMT::RGBA32
            || aPFormatAV == stAV::PIX_FMT::GRAY8) {
                anImage.initWrapper(*this);
            } else {
                // convert to compatible pixel format
                anImage.changePlane().initTrash(StImagePlane::ImgRGB, getSizeX(), getSizeY(), getAligned(getSizeX() * 3));
                PixelFormat aPFrmtTarget = stAV::PIX_FMT::RGB24;
                if(!convert(*this,   aPFormatAV,
                            anImage, aPFrmtTarget)) {
                    setState("SWScale library, failed to create SWScaler context");
                    close();
                    return false;
                }
                aPFormatAV = aPFrmtTarget;
            }
        #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0))
            myCodecCtx = avcodec_alloc_context3(myCodec);
        #else
            myCodecCtx = avcodec_alloc_context();
        #endif

            // setup encoder
            myCodecCtx->pix_fmt = aPFormatAV;
            myCodecCtx->width   = (int )anImage.getSizeX();
            myCodecCtx->height  = (int )anImage.getSizeY();
            myCodecCtx->compression_level = 9; // 0..9
            break;
        }
        case ST_TYPE_JPEG:
        case ST_TYPE_MPO:
        case ST_TYPE_JPS: {
            myCodec = avcodec_find_encoder_by_name("mjpeg");
            if(myCodec == NULL) {
                setState("AVCodec library, video codec 'mjpeg' not found");
                close();
                return false;
            }

            if(aPFormatAV == stAV::PIX_FMT::YUVJ420P
            || aPFormatAV == stAV::PIX_FMT::YUVJ422P
            //|| aPFormatAV == stAV::PIX_FMT::YUVJ444P not supported by FFmpeg... yet?
            //|| aPFormatAV == stAV::PIX_FMT::YUVJ440P
               ) {
                anImage.initWrapper(*this);
            } else {
                // convert to compatible pixel format
                PixelFormat aPFrmtTarget = aPFormatAV == stAV::PIX_FMT::YUV420P ? stAV::PIX_FMT::YUVJ420P : stAV::PIX_FMT::YUVJ422P;
                anImage.setColorModel(StImage::ImgColor_YUV);
                anImage.setColorScale(StImage::ImgScale_Mpeg);
                anImage.changePlane(0).initTrash(StImagePlane::ImgGray, getSizeX(), getSizeY(), getAligned(getSizeX()));
                stMemSet(anImage.changePlane(0).changeData(), '\0', anImage.getPlane(0).getSizeBytes());
                anImage.changePlane(1).initTrash(StImagePlane::ImgGray, getSizeX(), getSizeY(), getAligned(getSizeX()));
                stMemSet(anImage.changePlane(1).changeData(), '\0', anImage.getPlane(1).getSizeBytes());
                anImage.changePlane(2).initTrash(StImagePlane::ImgGray, getSizeX(), getSizeY(), getAligned(getSizeX()));
                stMemSet(anImage.changePlane(2).changeData(), '\0', anImage.getPlane(2).getSizeBytes());
                if(!convert(*this,   aPFormatAV,
                            anImage, aPFrmtTarget)) {
                    setState("SWScale library, failed to create SWScaler context");
                    close();
                    return false;
                }
                aPFormatAV = aPFrmtTarget;
            }

        #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0))
            myCodecCtx = avcodec_alloc_context3(myCodec);
        #else
            myCodecCtx = avcodec_alloc_context();
        #endif
            myCodecCtx->pix_fmt = aPFormatAV;
            myCodecCtx->width   = (int )anImage.getSizeX();
            myCodecCtx->height  = (int )anImage.getSizeY();
            myCodecCtx->time_base.num = 1;
            myCodecCtx->time_base.den = 1;
            myCodecCtx->qmin = myCodecCtx->qmax = 5; // quality factor - lesser is better
            break;
        }
        case ST_TYPE_NONE:
        default:
            close();
            return false;
    }

    // open VIDEO codec
#if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0))
    if(avcodec_open2(myCodecCtx, myCodec, NULL) < 0) {
#else
    if(avcodec_open(myCodecCtx, myCodec) < 0) {
#endif
        setState("AVCodec library, could not open video codec");
        close();
        return false;
    }

    // wrap own data into AVFrame
    myFrame.Frame->format = myCodecCtx->pix_fmt;
    myFrame.Frame->width  = myCodecCtx->width;
    myFrame.Frame->height = myCodecCtx->height;
    fillPointersAV(anImage, myFrame.Frame->data, myFrame.Frame->linesize);

#ifdef ST_AV_NEWSTEREO
    bool isReversed = false;
    AVStereo3DType anAvStereoType = stAV::stereo3dStToAv(theSrcFormat, isReversed);
    if(anAvStereoType != (AVStereo3DType )-1) {
        AVStereo3D* aStereo = av_stereo3d_create_side_data(myFrame.Frame);
        if(aStereo != NULL) {
            aStereo->type = anAvStereoType;
            if(isReversed) {
                aStereo->flags |= AV_STEREO3D_FLAG_INVERT;
            }
        }
    }
#endif

    StJpegParser aRawFile(theFilePath);
    if(!aRawFile.openFile(StRawFile::WRITE)) {
        setState("Can not open the file for writing");
        close();
        return false;
    }

    // allocate the buffer, large enough (stupid formula copied from ffmpeg.c)
    int aBuffSize = int(getSizeX() * getSizeY() * 10);
    aRawFile.initBuffer(aBuffSize);

    // encode the image
    StAVPacket aPacket;
    aPacket.getAVpkt()->data = (uint8_t* )aRawFile.changeBuffer();
    aPacket.getAVpkt()->size = aBuffSize;
#if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(54, 2, 100))
    int isGotPacket = 0;
    int anEncSize   = avcodec_encode_video2(myCodecCtx, aPacket.getAVpkt(), myFrame.Frame, &isGotPacket);
    if(anEncSize == 0 && isGotPacket != 0) {
        anEncSize = aPacket.getSize();
    }
#else
    int anEncSize = avcodec_encode_video(myCodecCtx, aPacket.changeData(), aPacket.getSize(), myFrame.Frame);
#endif
    if(anEncSize <= 0) {
        setState("AVCodec library, fail to encode the image");
        close();
        return false;
    }
    aRawFile.setDataSize((size_t )anEncSize);

    // save metadata when possible
    if(theImageType == ST_TYPE_JPEG
    || theImageType == ST_TYPE_JPS) {
        if(aRawFile.parse()) {
            if(theSrcFormat != StFormat_AUTO) {
                aRawFile.setupJps(theSrcFormat);
            }
        } else {
            ST_ERROR_LOG("AVCodec library, created JPEG can not be parsed!");
        }
    }

    // store current content
    aRawFile.writeFile();
    // and finally close the file handle
    aRawFile.closeFile();

    close();

    // set debug information
    StString aDummy, aFileName;
    StFileNode::getFolderAndFile(theFilePath, aDummy, aFileName);
    setState(StString("AVCodec library, saved image '") + aFileName + "' " + getDescription());

    return true;
}
示例#2
0
bool StAVVideoMuxer::save(const StString& theFile) {
    if(myCtxListSrc.isEmpty()
    || theFile.isEmpty()) {
        return false;
    }

    StString aFormatName = myCtxListSrc[0]->iformat->name;
    const char* aFormatStr = formatToMetadata(myStereoFormat);

    std::vector<StRemuxContext> aSrcCtxList;
    //StArrayList<StRemuxContext> aSrcCtxList;
    unsigned int aStreamCount = 0;

    StAVOutContext aCtxOut;
    if(!aCtxOut.findFormat(NULL, theFile.toCString())) {
        signals.onError(StString("Unable to find a suitable output format for '") + theFile + "'.");
        return false;
    } else if(!aCtxOut.create(theFile)) {
        signals.onError(StString("Could not create output context."));
        return false;
    }

    for(size_t aCtxId = 0; aCtxId < myCtxListSrc.size(); ++aCtxId) {
        StRemuxContext aCtxSrc;
        aCtxSrc.Context = myCtxListSrc[aCtxId];
        if(aCtxId == 0) {
            av_dict_copy(&aCtxOut.Context->metadata, aCtxSrc.Context->metadata, AV_DICT_DONT_OVERWRITE);
            av_dict_set(&aCtxOut.Context->metadata, "STEREO_MODE", aFormatStr, 0);
        }
        for(unsigned int aStreamId = 0; aStreamId < aCtxSrc.Context->nb_streams; ++aStreamId) {
            aCtxSrc.Streams.add((unsigned int )-1);
            AVStream* aStreamSrc = aCtxSrc.Context->streams[aStreamId];
            if(stAV::getCodecType(aStreamSrc) == AVMEDIA_TYPE_VIDEO) {
                if(addStream(aCtxOut.Context, aStreamSrc)) {
                    aCtxSrc.Streams[aStreamId] = aStreamCount++;
                }
            }
        }
        aSrcCtxList.push_back(aCtxSrc);
    }

    // add audio streams after video
    for(size_t aCtxId = 0; aCtxId < myCtxListSrc.size(); ++aCtxId) {
        StRemuxContext& aCtxSrc = aSrcCtxList[aCtxId];
        for(unsigned int aStreamId = 0; aStreamId < aCtxSrc.Context->nb_streams; ++aStreamId) {
            AVStream* aStreamSrc = aCtxSrc.Context->streams[aStreamId];
            if(stAV::getCodecType(aStreamSrc) == AVMEDIA_TYPE_AUDIO
            && addStream(aCtxOut.Context, aStreamSrc)) {
                aCtxSrc.Streams[aStreamId] = aStreamCount++;
            }
        }
    }

    // add other streams (subtitles) at the end
    for(size_t aCtxId = 0; aCtxId < myCtxListSrc.size(); ++aCtxId) {
        StRemuxContext& aCtxSrc = aSrcCtxList[aCtxId];
        for(unsigned int aStreamId = 0; aStreamId < aCtxSrc.Context->nb_streams; ++aStreamId) {
            AVStream* aStreamSrc = aCtxSrc.Context->streams[aStreamId];
            if(stAV::getCodecType(aStreamSrc) != AVMEDIA_TYPE_VIDEO
            && stAV::getCodecType(aStreamSrc) != AVMEDIA_TYPE_AUDIO
            && addStream(aCtxOut.Context, aStreamSrc)) {
                aCtxSrc.Streams[aStreamId] = aStreamCount++;
            }
        }
    }

    av_dump_format(aCtxOut.Context, 0, theFile.toCString(), 1);
    if(!(aCtxOut.Context->oformat->flags & AVFMT_NOFILE)) {
        const int aState = avio_open2(&aCtxOut.Context->pb, theFile.toCString(), AVIO_FLAG_WRITE, NULL, NULL);
        if(aState < 0) {
            signals.onError(StString("Could not open output file '") + theFile + "' (" + stAV::getAVErrorDescription(aState) + ")");
            return false;
        }
    }

    int aState = avformat_write_header(aCtxOut.Context, NULL);
    if(aState < 0) {
        signals.onError(StString("Error occurred when opening output file (") + stAV::getAVErrorDescription(aState) + ").");
        return false;
    }

    StAVPacket aPacket;
    for(;;) {
        size_t aNbEmpty = 0;
        for(size_t aCtxId = 0; aCtxId < aSrcCtxList.size(); ++aCtxId) {
            StRemuxContext& aCtxSrc = aSrcCtxList[aCtxId];
            if(!aCtxSrc.State) {
                ++aNbEmpty;
                continue;
            }

            if(av_read_frame(aCtxSrc.Context, aPacket.getAVpkt()) < 0) {
                aCtxSrc.State = false;
                ++aNbEmpty;
                continue;
            }

            unsigned int aStreamOutIndex = aCtxSrc.Streams[aPacket.getStreamId()];
            if(aStreamOutIndex == (unsigned int )-1) {
                continue;
            }

            AVStream* aStreamIn  = aCtxSrc.Context->streams[aPacket.getStreamId()];
            AVStream* aStreamOut = aCtxOut.Context->streams[aStreamOutIndex];

        #ifdef ST_LIBAV_FORK
            const AVRounding aRoundParams = AV_ROUND_NEAR_INF;
        #else
            const AVRounding aRoundParams = AVRounding(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX);
        #endif
            aPacket.getAVpkt()->pts      = av_rescale_q_rnd(aPacket.getPts(), aStreamIn->time_base, aStreamOut->time_base, aRoundParams);
            aPacket.getAVpkt()->dts      = av_rescale_q_rnd(aPacket.getDts(), aStreamIn->time_base, aStreamOut->time_base, aRoundParams);
            aPacket.getAVpkt()->duration = static_cast<int >(av_rescale_q(aPacket.getDuration(), aStreamIn->time_base, aStreamOut->time_base));
            aPacket.getAVpkt()->pos      = -1;

            aState = av_interleaved_write_frame(aCtxOut.Context, aPacket.getAVpkt());
            if(aState < 0) {
                signals.onError(StString("Error muxing packet (") + stAV::getAVErrorDescription(aState) + ").");
                return false;
            }
            aPacket.free();
        }
        if(aNbEmpty == aSrcCtxList.size()) {
            break;
        }
    }
    av_write_trailer(aCtxOut.Context);
    return true;
}