void StageMenu::onJoyButtonUp(SDL_JoyButtonEvent event) { switch (event.button) { case RIGHT_BUTTON: changePlane(selected_plane + 1); break; case LEFT_BUTTON: changePlane(selected_plane - 1); break; case B_BUTTON: to_select_plane = true; break; } }
void StageMenu::onKeyPressed(SDL_KeyboardEvent event) { switch (event.keysym.sym) { case SDLK_b: to_select_plane = true; break; case SDLK_RIGHT: changePlane(selected_plane + 1); break; case SDLK_LEFT: changePlane(selected_plane - 1); break; } }
void addPassenger(Flight** flights, int numFlights) { displayAllFlight(*flights, numFlights); printf("\nFlight number (0 = exit): "); int flightNumber = getNumber(); int flightIndex; while(!isValidFlightNumber(flightNumber, *flights, numFlights)) { if (flightNumber == EXIT) { return; } //if the flight number is exit, then exit printf("\nFlight number (0 = exit): "); flightNumber = getNumber(); } //add passenger to the passenger matrix in plane struct flightIndex = getFlightIndex(flightNumber, *flights, numFlights); if(!isFullPlane(flightNumber, (*flights)[flightIndex].plane)) { changePlane(&((*flights)[flightIndex].plane)); } //if the plane is full, skip } //make cange to char*** passenger
bool StAVImage::load(const StString& theFilePath, ImageType theImageType, uint8_t* theDataPtr, int theDataSize) { // reset current data StImage::nullify(); setState(); close(); myMetadata.clear(); switch(theImageType) { case ST_TYPE_PNG: case ST_TYPE_PNS: { myCodec = avcodec_find_decoder_by_name("png"); break; } case ST_TYPE_JPEG: case ST_TYPE_MPO: case ST_TYPE_JPS: { myCodec = avcodec_find_decoder_by_name("mjpeg"); break; } case ST_TYPE_EXR: { myCodec = avcodec_find_decoder_by_name("exr"); break; } case ST_TYPE_WEBP: case ST_TYPE_WEBPLL: { myCodec = avcodec_find_decoder_by_name("webp"); break; } default: { break; } } if(theImageType == ST_TYPE_NONE || (theDataPtr == NULL && !StFileNode::isFileExists(theFilePath))) { // open image file and detect its type, its could be non local file! #if(LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(53, 2, 0)) int avErrCode = avformat_open_input(&myFormatCtx, theFilePath.toCString(), myImageFormat, NULL); #else int avErrCode = av_open_input_file (&myFormatCtx, theFilePath.toCString(), myImageFormat, 0, NULL); #endif if(avErrCode != 0 || myFormatCtx->nb_streams < 1 || myFormatCtx->streams[0]->codec->codec_id == 0) { if(myFormatCtx != NULL) { #if(LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(53, 17, 0)) avformat_close_input(&myFormatCtx); #else av_close_input_file(myFormatCtx); myFormatCtx = NULL; #endif } #if(LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(53, 2, 0)) avErrCode = avformat_open_input(&myFormatCtx, theFilePath.toCString(), NULL, NULL); #else avErrCode = av_open_input_file(&myFormatCtx, theFilePath.toCString(), NULL, 0, NULL); #endif } if(avErrCode != 0 || myFormatCtx->nb_streams < 1) { setState(StString("AVFormat library, couldn't open image file. Error: ") + stAV::getAVErrorDescription(avErrCode)); close(); return false; } // find the decoder for the video stream myCodecCtx = myFormatCtx->streams[0]->codec; if(theImageType == ST_TYPE_NONE) { myCodec = avcodec_find_decoder(myCodecCtx->codec_id); } } if(myCodec == NULL) { setState("AVCodec library, video codec not found"); close(); return false; } else if(myFormatCtx == NULL) { // use given image type to load decoder #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0)) myCodecCtx = avcodec_alloc_context3(myCodec); #else myCodecCtx = avcodec_alloc_context(); #endif } // stupid check if(myCodecCtx == NULL) { setState("AVCodec library, codec context is NULL"); close(); return false; } // open VIDEO codec #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0)) if(avcodec_open2(myCodecCtx, myCodec, NULL) < 0) { #else if(avcodec_open(myCodecCtx, myCodec) < 0) { #endif setState("AVCodec library, could not open video codec"); close(); return false; } // read one packet or file StRawFile aRawFile(theFilePath); StAVPacket anAvPkt; if(theDataPtr != NULL && theDataSize != 0) { anAvPkt.getAVpkt()->data = theDataPtr; anAvPkt.getAVpkt()->size = theDataSize; } else { if(myFormatCtx != NULL) { if(av_read_frame(myFormatCtx, anAvPkt.getAVpkt()) < 0) { setState("AVFormat library, could not read first packet"); close(); return false; } } else { if(!aRawFile.readFile()) { setState("StAVImage, could not read the file"); close(); return false; } anAvPkt.getAVpkt()->data = (uint8_t* )aRawFile.getBuffer(); anAvPkt.getAVpkt()->size = (int )aRawFile.getSize(); } } anAvPkt.setKeyFrame(); // decode one frame int isFrameFinished = 0; #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(52, 23, 0)) avcodec_decode_video2(myCodecCtx, myFrame.Frame, &isFrameFinished, anAvPkt.getAVpkt()); #else avcodec_decode_video(myCodecCtx, myFrame.Frame, &isFrameFinished, theDataPtr, theDataSize); #endif if(isFrameFinished == 0) { // thats not an image!!! try to decode more packets??? setState("AVCodec library, input file is not an Image!"); close(); return false; } // check frame size if(myCodecCtx->width <= 0 || myCodecCtx->height <= 0) { setState("AVCodec library, codec returns wrong frame size"); close(); return false; } // read aspect ratio if(myCodecCtx->sample_aspect_ratio.num == 0 || myCodecCtx->sample_aspect_ratio.den == 0) { setPixelRatio(1.0f); } else { const GLfloat aRatio = GLfloat(myCodecCtx->sample_aspect_ratio.num) / GLfloat(myCodecCtx->sample_aspect_ratio.den); if(aRatio > 70.0f) { ST_DEBUG_LOG("AVCodec library, igning wrong PAR " + myCodecCtx->sample_aspect_ratio.num + ":" + myCodecCtx->sample_aspect_ratio.den); setPixelRatio(1.0f); } else { setPixelRatio(aRatio); } } #ifdef ST_AV_NEWSTEREO // currently it is unlikelly... but maybe in future? AVFrameSideData* aSideData = av_frame_get_side_data(myFrame.Frame, AV_FRAME_DATA_STEREO3D); if(aSideData != NULL) { AVStereo3D* aStereo = (AVStereo3D* )aSideData->data; mySrcFormat = stAV::stereo3dAvToSt(aStereo->type); if(aStereo->flags & AV_STEREO3D_FLAG_INVERT) { mySrcFormat = st::formatReversed(mySrcFormat); } } else { mySrcFormat = StFormat_AUTO; } #endif // it is unlikely that there would be any metadata from format... // but lets try if(myFormatCtx != NULL) { for(stAV::meta::Tag* aTag = stAV::meta::findTag(myFormatCtx->metadata, "", NULL, stAV::meta::SEARCH_IGNORE_SUFFIX); aTag != NULL; aTag = stAV::meta::findTag(myFormatCtx->metadata, "", aTag, stAV::meta::SEARCH_IGNORE_SUFFIX)) { myMetadata.add(StDictEntry(aTag->key, aTag->value)); } for(stAV::meta::Tag* aTag = stAV::meta::findTag(myFormatCtx->streams[0]->metadata, "", NULL, stAV::meta::SEARCH_IGNORE_SUFFIX); aTag != NULL; aTag = stAV::meta::findTag(myFormatCtx->streams[0]->metadata, "", aTag, stAV::meta::SEARCH_IGNORE_SUFFIX)) { myMetadata.add(StDictEntry(aTag->key, aTag->value)); } } // collect metadata from the frame stAV::meta::Dict* aFrameMetadata = stAV::meta::getFrameMetadata(myFrame.Frame); for(stAV::meta::Tag* aTag = stAV::meta::findTag(aFrameMetadata, "", NULL, stAV::meta::SEARCH_IGNORE_SUFFIX); aTag != NULL; aTag = stAV::meta::findTag(aFrameMetadata, "", aTag, stAV::meta::SEARCH_IGNORE_SUFFIX)) { myMetadata.add(StDictEntry(aTag->key, aTag->value)); } stAV::dimYUV aDimsYUV; if(myCodecCtx->pix_fmt == stAV::PIX_FMT::RGB24) { setColorModel(StImage::ImgColor_RGB); changePlane(0).initWrapper(StImagePlane::ImgRGB, myFrame.getPlane(0), myCodecCtx->width, myCodecCtx->height, myFrame.getLineSize(0)); } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::BGR24) { setColorModel(StImage::ImgColor_RGB); changePlane(0).initWrapper(StImagePlane::ImgBGR, myFrame.getPlane(0), myCodecCtx->width, myCodecCtx->height, myFrame.getLineSize(0)); } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::RGBA32) { setColorModel(StImage::ImgColor_RGBA); changePlane(0).initWrapper(StImagePlane::ImgRGBA, myFrame.getPlane(0), myCodecCtx->width, myCodecCtx->height, myFrame.getLineSize(0)); } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::BGRA32) { setColorModel(StImage::ImgColor_RGBA); changePlane(0).initWrapper(StImagePlane::ImgBGRA, myFrame.getPlane(0), myCodecCtx->width, myCodecCtx->height, myFrame.getLineSize(0)); } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::GRAY8) { setColorModel(StImage::ImgColor_GRAY); changePlane(0).initWrapper(StImagePlane::ImgGray, myFrame.getPlane(0), myCodecCtx->width, myCodecCtx->height, myFrame.getLineSize(0)); } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::GRAY16) { setColorModel(StImage::ImgColor_GRAY); changePlane(0).initWrapper(StImagePlane::ImgGray16, myFrame.getPlane(0), myCodecCtx->width, myCodecCtx->height, myFrame.getLineSize(0)); } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::RGB48) { setColorModel(StImage::ImgColor_RGB); changePlane(0).initWrapper(StImagePlane::ImgRGB48, myFrame.getPlane(0), myCodecCtx->width, myCodecCtx->height, myFrame.getLineSize(0)); } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::RGBA64) { setColorModel(StImage::ImgColor_RGBA); changePlane(0).initWrapper(StImagePlane::ImgRGBA64, myFrame.getPlane(0), myCodecCtx->width, myCodecCtx->height, myFrame.getLineSize(0)); } else if(stAV::isFormatYUVPlanar(myCodecCtx, aDimsYUV)) { #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(52, 29, 0)) if(myCodecCtx->color_range == AVCOL_RANGE_JPEG) { aDimsYUV.isFullScale = true; } #endif setColorModel(StImage::ImgColor_YUV); setColorScale(aDimsYUV.isFullScale ? StImage::ImgScale_Full : StImage::ImgScale_Mpeg); StImagePlane::ImgFormat aPlaneFrmt = StImagePlane::ImgGray; if(aDimsYUV.bitsPerComp == 9) { aPlaneFrmt = StImagePlane::ImgGray16; setColorScale(aDimsYUV.isFullScale ? StImage::ImgScale_Jpeg9 : StImage::ImgScale_Mpeg9); } else if(aDimsYUV.bitsPerComp == 10) { aPlaneFrmt = StImagePlane::ImgGray16; setColorScale(aDimsYUV.isFullScale ? StImage::ImgScale_Jpeg10 : StImage::ImgScale_Mpeg10); } else if(aDimsYUV.bitsPerComp == 16) { aPlaneFrmt = StImagePlane::ImgGray16; } changePlane(0).initWrapper(aPlaneFrmt, myFrame.getPlane(0), size_t(aDimsYUV.widthY), size_t(aDimsYUV.heightY), myFrame.getLineSize(0)); changePlane(1).initWrapper(aPlaneFrmt, myFrame.getPlane(1), size_t(aDimsYUV.widthU), size_t(aDimsYUV.heightU), myFrame.getLineSize(1)); changePlane(2).initWrapper(aPlaneFrmt, myFrame.getPlane(2), size_t(aDimsYUV.widthV), size_t(aDimsYUV.heightV), myFrame.getLineSize(2)); } else { ///ST_DEBUG_LOG("StAVImage, perform conversion from Pixel format '" + avcodec_get_pix_fmt_name(myCodecCtx->pix_fmt) + "' to RGB"); // initialize software scaler/converter SwsContext* pToRgbCtx = sws_getContext(myCodecCtx->width, myCodecCtx->height, myCodecCtx->pix_fmt, // source myCodecCtx->width, myCodecCtx->height, stAV::PIX_FMT::RGB24, // destination SWS_BICUBIC, NULL, NULL, NULL); if(pToRgbCtx == NULL) { setState("SWScale library, failed to create SWScaler context"); close(); return false; } // initialize additional buffer for converted RGB data setColorModel(StImage::ImgColor_RGB); changePlane(0).initTrash(StImagePlane::ImgRGB, myCodecCtx->width, myCodecCtx->height); uint8_t* rgbData[4]; stMemZero(rgbData, sizeof(rgbData)); int rgbLinesize[4]; stMemZero(rgbLinesize, sizeof(rgbLinesize)); rgbData[0] = changePlane(0).changeData(); rgbLinesize[0] = (int )changePlane(0).getSizeRowBytes(); sws_scale(pToRgbCtx, myFrame.Frame->data, myFrame.Frame->linesize, 0, myCodecCtx->height, rgbData, rgbLinesize); // reset original data closeAvCtx(); sws_freeContext(pToRgbCtx); } // set debug information StString aDummy, aFileName; StFileNode::getFolderAndFile(theFilePath, aDummy, aFileName); setState(StString("AVCodec library, loaded image '") + aFileName + "' " + getDescription()); // we should not close the file because decoded image data is in codec context cache return true; } bool StAVImage::save(const StString& theFilePath, ImageType theImageType, StFormat theSrcFormat) { close(); setState(); if(isNull()) { return false; } PixelFormat aPFormatAV = (PixelFormat )getAVPixelFormat(*this); StImage anImage; switch(theImageType) { case ST_TYPE_PNG: case ST_TYPE_PNS: { myCodec = avcodec_find_encoder_by_name("png"); if(myCodec == NULL) { setState("AVCodec library, video codec 'png' not found"); close(); return false; } if(aPFormatAV == stAV::PIX_FMT::RGB24 || aPFormatAV == stAV::PIX_FMT::RGBA32 || aPFormatAV == stAV::PIX_FMT::GRAY8) { anImage.initWrapper(*this); } else { // convert to compatible pixel format anImage.changePlane().initTrash(StImagePlane::ImgRGB, getSizeX(), getSizeY(), getAligned(getSizeX() * 3)); PixelFormat aPFrmtTarget = stAV::PIX_FMT::RGB24; if(!convert(*this, aPFormatAV, anImage, aPFrmtTarget)) { setState("SWScale library, failed to create SWScaler context"); close(); return false; } aPFormatAV = aPFrmtTarget; } #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0)) myCodecCtx = avcodec_alloc_context3(myCodec); #else myCodecCtx = avcodec_alloc_context(); #endif // setup encoder myCodecCtx->pix_fmt = aPFormatAV; myCodecCtx->width = (int )anImage.getSizeX(); myCodecCtx->height = (int )anImage.getSizeY(); myCodecCtx->compression_level = 9; // 0..9 break; } case ST_TYPE_JPEG: case ST_TYPE_MPO: case ST_TYPE_JPS: { myCodec = avcodec_find_encoder_by_name("mjpeg"); if(myCodec == NULL) { setState("AVCodec library, video codec 'mjpeg' not found"); close(); return false; } if(aPFormatAV == stAV::PIX_FMT::YUVJ420P || aPFormatAV == stAV::PIX_FMT::YUVJ422P //|| aPFormatAV == stAV::PIX_FMT::YUVJ444P not supported by FFmpeg... yet? //|| aPFormatAV == stAV::PIX_FMT::YUVJ440P ) { anImage.initWrapper(*this); } else { // convert to compatible pixel format PixelFormat aPFrmtTarget = aPFormatAV == stAV::PIX_FMT::YUV420P ? stAV::PIX_FMT::YUVJ420P : stAV::PIX_FMT::YUVJ422P; anImage.setColorModel(StImage::ImgColor_YUV); anImage.setColorScale(StImage::ImgScale_Mpeg); anImage.changePlane(0).initTrash(StImagePlane::ImgGray, getSizeX(), getSizeY(), getAligned(getSizeX())); stMemSet(anImage.changePlane(0).changeData(), '\0', anImage.getPlane(0).getSizeBytes()); anImage.changePlane(1).initTrash(StImagePlane::ImgGray, getSizeX(), getSizeY(), getAligned(getSizeX())); stMemSet(anImage.changePlane(1).changeData(), '\0', anImage.getPlane(1).getSizeBytes()); anImage.changePlane(2).initTrash(StImagePlane::ImgGray, getSizeX(), getSizeY(), getAligned(getSizeX())); stMemSet(anImage.changePlane(2).changeData(), '\0', anImage.getPlane(2).getSizeBytes()); if(!convert(*this, aPFormatAV, anImage, aPFrmtTarget)) { setState("SWScale library, failed to create SWScaler context"); close(); return false; } aPFormatAV = aPFrmtTarget; } #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0)) myCodecCtx = avcodec_alloc_context3(myCodec); #else myCodecCtx = avcodec_alloc_context(); #endif myCodecCtx->pix_fmt = aPFormatAV; myCodecCtx->width = (int )anImage.getSizeX(); myCodecCtx->height = (int )anImage.getSizeY(); myCodecCtx->time_base.num = 1; myCodecCtx->time_base.den = 1; myCodecCtx->qmin = myCodecCtx->qmax = 5; // quality factor - lesser is better break; } case ST_TYPE_NONE: default: close(); return false; } // open VIDEO codec #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0)) if(avcodec_open2(myCodecCtx, myCodec, NULL) < 0) { #else if(avcodec_open(myCodecCtx, myCodec) < 0) { #endif setState("AVCodec library, could not open video codec"); close(); return false; } // wrap own data into AVFrame myFrame.Frame->format = myCodecCtx->pix_fmt; myFrame.Frame->width = myCodecCtx->width; myFrame.Frame->height = myCodecCtx->height; fillPointersAV(anImage, myFrame.Frame->data, myFrame.Frame->linesize); #ifdef ST_AV_NEWSTEREO bool isReversed = false; AVStereo3DType anAvStereoType = stAV::stereo3dStToAv(theSrcFormat, isReversed); if(anAvStereoType != (AVStereo3DType )-1) { AVStereo3D* aStereo = av_stereo3d_create_side_data(myFrame.Frame); if(aStereo != NULL) { aStereo->type = anAvStereoType; if(isReversed) { aStereo->flags |= AV_STEREO3D_FLAG_INVERT; } } } #endif StJpegParser aRawFile(theFilePath); if(!aRawFile.openFile(StRawFile::WRITE)) { setState("Can not open the file for writing"); close(); return false; } // allocate the buffer, large enough (stupid formula copied from ffmpeg.c) int aBuffSize = int(getSizeX() * getSizeY() * 10); aRawFile.initBuffer(aBuffSize); // encode the image StAVPacket aPacket; aPacket.getAVpkt()->data = (uint8_t* )aRawFile.changeBuffer(); aPacket.getAVpkt()->size = aBuffSize; #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(54, 2, 100)) int isGotPacket = 0; int anEncSize = avcodec_encode_video2(myCodecCtx, aPacket.getAVpkt(), myFrame.Frame, &isGotPacket); if(anEncSize == 0 && isGotPacket != 0) { anEncSize = aPacket.getSize(); } #else int anEncSize = avcodec_encode_video(myCodecCtx, aPacket.changeData(), aPacket.getSize(), myFrame.Frame); #endif if(anEncSize <= 0) { setState("AVCodec library, fail to encode the image"); close(); return false; } aRawFile.setDataSize((size_t )anEncSize); // save metadata when possible if(theImageType == ST_TYPE_JPEG || theImageType == ST_TYPE_JPS) { if(aRawFile.parse()) { if(theSrcFormat != StFormat_AUTO) { aRawFile.setupJps(theSrcFormat); } } else { ST_ERROR_LOG("AVCodec library, created JPEG can not be parsed!"); } } // store current content aRawFile.writeFile(); // and finally close the file handle aRawFile.closeFile(); close(); // set debug information StString aDummy, aFileName; StFileNode::getFolderAndFile(theFilePath, aDummy, aFileName); setState(StString("AVCodec library, saved image '") + aFileName + "' " + getDescription()); return true; }
bool StFreeImage::load(const StString& theFilePath, ImageType theImageType, uint8_t* theDataPtr, int theDataSize) { if(!StFreeImage::init()) { setState("FreeImage library is not initialized"); return false; } // reset current data StImage::nullify(); setState(); close(); FREE_IMAGE_FORMAT aFIF = convertToFIF(theImageType); if(theDataPtr != NULL && theDataSize != 0 && aFIF != FIF_UNKNOWN) { FIMEMORY* aFIMemory = FreeImage_OpenMemory(theDataPtr, theDataSize); if(aFIMemory == NULL) { setState("FreeImage library, internal error"); return false; } myDIB = FreeImage_LoadFromMemory(aFIF, aFIMemory, 0); FreeImage_CloseMemory(aFIMemory); } else { // check the file signature and deduce its format #if defined(_WIN32) StStringUtfWide aFilePathWide = theFilePath.toUtfWide(); aFIF = FreeImage_GetFileType(aFilePathWide.toCString(), 0); #else aFIF = FreeImage_GetFileType(theFilePath.toCString(), 0); #endif if(aFIF == FIF_UNKNOWN) { // no signature? try to guess the file format from the file extension #if defined(_WIN32) aFIF = FreeImage_GetFIFFromFilename(aFilePathWide.toCString()); #else aFIF = FreeImage_GetFIFFromFilename(theFilePath.toCString()); #endif } if((aFIF == FIF_UNKNOWN) || !FreeImage_FIFSupportsReading(aFIF)) { setState("FreeImage library does not support image format"); return false; } int loadFlags = 0; if(aFIF == FIF_GIF) { // GIF_PLAYBACK - 'Play' the GIF to generate each frame (as 32bpp) instead of returning raw frame data when loading loadFlags = 2; } else if(aFIF == FIF_ICO) { // ICO_MAKEALPHA - convert to 32bpp and create an alpha channel from the AND-mask when loading loadFlags = 1; } #if defined(_WIN32) myDIB = FreeImage_Load(aFIF, aFilePathWide.toCString(), loadFlags); #else myDIB = FreeImage_Load(aFIF, theFilePath.toCString(), loadFlags); #endif } if(myDIB == NULL) { setState("FreeImage library, loading file failed"); return false; } StImagePlane::ImgFormat stImgFormat = convertFromFreeFormat(FreeImage_GetImageType(myDIB), FreeImage_GetColorType(myDIB), FreeImage_GetBPP(myDIB)); if(stImgFormat == StImagePlane::ImgUNKNOWN) { setState(StString("StFreeImage, image format ") + FreeImage_GetImageType(myDIB) + ", " + FreeImage_GetColorType(myDIB) + " doesn't supported by application"); close(); return false; } setColorModelPacked(stImgFormat); changePlane(0).initWrapper(stImgFormat, FreeImage_GetBits(myDIB), FreeImage_GetWidth(myDIB), FreeImage_GetHeight(myDIB), FreeImage_GetPitch(myDIB)); // FreeImage data always bottom-up... changePlane(0).setTopDown(false); // set debug information StString aDummy, aFileName; StFileNode::getFolderAndFile(theFilePath, aDummy, aFileName); setState(StString("FreeImage library, loaded image '") + aFileName + "' " + getDescription()); // we should not close the file because we create a wrapper over FreeImage native object return true; }