bool ofxColorPicker_<ColorType>::mouseUpdate(ofMouseEventArgs& mouse){ if(rectBackground.inside(mouse)){ switch (state) { case ChangingScale:{ int relY = mouse.y - rectColorScaleBar.y; float scale = 1.f - saturate(relY / rectColorScaleBar.height); setColorScale(scale); setNeedsRedraw(); break; } case ChangingWheel:{ auto p = mouse - rectColorWheel.position.xy(); auto pc = getPolarCoordinate(p, colorWheelRadius); colorAngle = pc.angle; colorRadius = saturate(pc.radius); bSettingColor = true; color = getCircularColor<ColorType>(colorAngle, colorRadius, colorScale); bSettingColor = false; setNeedsRedraw(); break; } default: return true; } return true; } return false; }
bool AdaptiveGridTest::init() { float gz = 128.f; setColorScale(.43f / gz); setNodeDrawSize(gz * .008f); m_msh.fillBox(BoundingBox(-250.f, -50.f, -250.f, 250.f, 50.f, 250.f), gz); m_distFunc.addSphere(Vector3F( 9.f, 17.f, -1.f), 27.f ); m_distFunc.addSphere(Vector3F(-54.f, -13.f, -1.f), 64.f ); m_distFunc.addSphere(Vector3F(38.f, -10.f, -22.f), 21.1f ); m_distFunc.addSphere(Vector3F(-100.f, -3420.1f, -100.f), 3400.f ); #define MAX_BUILD_LEVEL 5 #define MAX_BUILD_ERROR .5f m_msh.adaptiveBuild<BDistanceFunction>(&m_distFunc, MAX_BUILD_LEVEL, MAX_BUILD_ERROR); std::cout<<"\n triangulating"<<std::endl; m_msh.triangulateFront(); #if 0 checkTetraVolumeExt<DistanceNode, ITetrahedron>(m_msh.nodes(), m_msh.numTetrahedrons(), m_msh.tetrahedrons() ); #endif std::cout.flush(); return true; }
bool ofxColorPicker_<ColorType>::mouseScrolled(ofMouseEventArgs & mouse){ if(rectColorScaleBar.inside(mouse)){ setColorScale(saturate(colorScale + mouse.scrollY * 0.001)); setNeedsRedraw(); return true; }else{ return false; } }
ColorBar::ColorBar(ColorScale *colorScale): _colorScale(colorScale), _minReal(0.0), _maxReal(0.0), _minSel(0), _maxSel(0), _leftButtonPress(false) { _gradient = QLinearGradient(BAR_START_X, BAR_START_Y+BAR_HEIGHT, BAR_START_X, BAR_START_Y); setColorScale(colorScale); }
//------------------------------------------------------------------------------ //## Basic TEST_F(Test_UI_UIElement, RenderEffects) { //* [ ] UISprite はデフォルトで BlendMode=Alpha { auto tex1 = Assets::loadTexture(LN_ASSETFILE("Sprite1.png")); //* [ ] Visible auto sprite1 = newObject<UISprite>(); sprite1->setTexture(tex1); sprite1->setPosition(0, 0, 0); sprite1->setVisible(false); //* [ ] BlendMode auto sprite2 = newObject<UISprite>(); sprite2->setTexture(tex1); sprite2->setBlendMode(BlendMode::Add); sprite2->setPosition(32, 0, 0); //* [ ] Opacity auto sprite3 = newObject<UISprite>(); sprite3->setTexture(tex1); sprite3->setPosition(64, 0, 0); sprite3->setOpacity(0.5); //* [ ] ColorScale auto sprite4 = newObject<UISprite>(); sprite4->setTexture(tex1); sprite4->setPosition(96, 0, 0); sprite4->setColorScale(Color(1, 0, 0, 1)); //* [ ] BlendColor auto sprite5 = newObject<UISprite>(); sprite5->setTexture(tex1); sprite5->setPosition(0, 32, 0); sprite5->setBlendColor(Color(1, 0, 0, 1)); //* [ ] Tone auto sprite6 = newObject<UISprite>(); sprite6->setTexture(tex1); sprite6->setPosition(32, 32, 0); sprite6->setTone(ColorTone(0.5, 0.3, 0.1, 1.0)); TestEnv::updateFrame(); ASSERT_SCREEN(LN_ASSETFILE("Result/Test_UI_UIElement-RenderEffects-1.png")); LN_TEST_CLEAN_SCENE; } }
//------------------------------------------------------------------------------ //## Basic TEST_F(Test_Visual_VisualComponent, BuiltinEffects) { // TODO: Builtin shader ごとに行う { auto tex1 = Assets::loadTexture(LN_ASSETFILE("Sprite1.png")); //* [ ] Hide auto sprite1 = Sprite::create(tex1, 3, 3); //sprite1->setShadingModel(ShadingModel::UnLighting); sprite1->setPosition(-6, 0, 0); sprite1->setVisible(false); //* [ ] Normal auto sprite2 = Sprite::create(tex1, 3, 3); //sprite2->setShadingModel(ShadingModel::UnLighting); sprite2->setPosition(-3, 0, 0); //* [ ] Opacity auto sprite3 = Sprite::create(tex1, 3, 3); //sprite3->setShadingModel(ShadingModel::UnLighting); sprite3->setPosition(-6, 3, 0); sprite3->setBlendMode(BlendMode::Alpha); sprite3->setOpacity(0.5); //* [ ] ColorScale auto sprite4 = Sprite::create(tex1, 3, 3); //sprite4->setShadingModel(ShadingModel::UnLighting); sprite4->setPosition(-3, 3, 0); sprite4->setColorScale(Color(1, 0, 0, 1)); //* [ ] BlendColor auto sprite5 = Sprite::create(tex1, 3, 3); //sprite5->setShadingModel(ShadingModel::UnLighting); sprite5->setPosition(0, 3, 0); sprite5->setBlendColor(Color(1, 0, 0, 1)); //* [ ] Tone auto sprite6 = Sprite::create(tex1, 3, 3); //sprite6->setShadingModel(ShadingModel::UnLighting); sprite6->setPosition(3, 3, 0); sprite6->setTone(ColorTone(0.5, 0.3, 0.1, 1.0)); TestEnv::updateFrame(); ASSERT_SCREEN(LN_ASSETFILE("Visual/Result/Test_Visual_VisualComponent-BuiltinEffects-1.png")); LN_TEST_CLEAN_SCENE; } }
void LLTexUnit::setTextureBlendType(eTextureBlendType type) { if (mIndex < 0) return; // Do nothing if it's already correctly set. if (mCurrBlendType == type && !gGL.mDirty) { return; } gGL.flush(); activate(); mCurrBlendType = type; S32 scale_amount = 1; switch (type) { case TB_REPLACE: glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE); break; case TB_ADD: glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_ADD); break; case TB_MULT: glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE); break; case TB_MULT_X2: glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE); scale_amount = 2; break; case TB_ALPHA_BLEND: glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL); break; case TB_COMBINE: glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_COMBINE_ARB); break; default: llerrs << "Unknown Texture Blend Type: " << type << llendl; break; } setColorScale(scale_amount); setAlphaScale(1); }
/** * Set the spectrum color scale. */ void SVConnections::spectrumColorScale() { setColorScale(ColorMaps::SPECTRUM, ColorMaps::GRAY); }
/** * Set the multi color scale. */ void SVConnections::multiColorScale() { setColorScale(ColorMaps::MULTI, ColorMaps::GRAY); }
/** * Set the optimal color scale. */ void SVConnections::optimalColorScale() { setColorScale(ColorMaps::OPTIMAL, ColorMaps::GRAY); }
/** * Set the rainbow color scale. */ void SVConnections::rainbowColorScale() { setColorScale(ColorMaps::RAINBOW, ColorMaps::GRAY); }
/** * Set the green and yellow color scale. */ void SVConnections::greenYellowColorScale() { setColorScale(ColorMaps::GREEN_YELLOW, ColorMaps::GRAY); }
/** * Set the inverse gray color scale. */ void SVConnections::negativeGrayColorScale() { setColorScale(ColorMaps::NEGATIVE_GRAY, ColorMaps::HEAT); }
/** * Set the heat color scale. */ void SVConnections::heatColorScale() { setColorScale(ColorMaps::HEAT, ColorMaps::GRAY); }
bool StAVImage::load(const StString& theFilePath, ImageType theImageType, uint8_t* theDataPtr, int theDataSize) { // reset current data StImage::nullify(); setState(); close(); myMetadata.clear(); switch(theImageType) { case ST_TYPE_PNG: case ST_TYPE_PNS: { myCodec = avcodec_find_decoder_by_name("png"); break; } case ST_TYPE_JPEG: case ST_TYPE_MPO: case ST_TYPE_JPS: { myCodec = avcodec_find_decoder_by_name("mjpeg"); break; } case ST_TYPE_EXR: { myCodec = avcodec_find_decoder_by_name("exr"); break; } case ST_TYPE_WEBP: case ST_TYPE_WEBPLL: { myCodec = avcodec_find_decoder_by_name("webp"); break; } default: { break; } } if(theImageType == ST_TYPE_NONE || (theDataPtr == NULL && !StFileNode::isFileExists(theFilePath))) { // open image file and detect its type, its could be non local file! #if(LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(53, 2, 0)) int avErrCode = avformat_open_input(&myFormatCtx, theFilePath.toCString(), myImageFormat, NULL); #else int avErrCode = av_open_input_file (&myFormatCtx, theFilePath.toCString(), myImageFormat, 0, NULL); #endif if(avErrCode != 0 || myFormatCtx->nb_streams < 1 || myFormatCtx->streams[0]->codec->codec_id == 0) { if(myFormatCtx != NULL) { #if(LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(53, 17, 0)) avformat_close_input(&myFormatCtx); #else av_close_input_file(myFormatCtx); myFormatCtx = NULL; #endif } #if(LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(53, 2, 0)) avErrCode = avformat_open_input(&myFormatCtx, theFilePath.toCString(), NULL, NULL); #else avErrCode = av_open_input_file(&myFormatCtx, theFilePath.toCString(), NULL, 0, NULL); #endif } if(avErrCode != 0 || myFormatCtx->nb_streams < 1) { setState(StString("AVFormat library, couldn't open image file. Error: ") + stAV::getAVErrorDescription(avErrCode)); close(); return false; } // find the decoder for the video stream myCodecCtx = myFormatCtx->streams[0]->codec; if(theImageType == ST_TYPE_NONE) { myCodec = avcodec_find_decoder(myCodecCtx->codec_id); } } if(myCodec == NULL) { setState("AVCodec library, video codec not found"); close(); return false; } else if(myFormatCtx == NULL) { // use given image type to load decoder #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0)) myCodecCtx = avcodec_alloc_context3(myCodec); #else myCodecCtx = avcodec_alloc_context(); #endif } // stupid check if(myCodecCtx == NULL) { setState("AVCodec library, codec context is NULL"); close(); return false; } // open VIDEO codec #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0)) if(avcodec_open2(myCodecCtx, myCodec, NULL) < 0) { #else if(avcodec_open(myCodecCtx, myCodec) < 0) { #endif setState("AVCodec library, could not open video codec"); close(); return false; } // read one packet or file StRawFile aRawFile(theFilePath); StAVPacket anAvPkt; if(theDataPtr != NULL && theDataSize != 0) { anAvPkt.getAVpkt()->data = theDataPtr; anAvPkt.getAVpkt()->size = theDataSize; } else { if(myFormatCtx != NULL) { if(av_read_frame(myFormatCtx, anAvPkt.getAVpkt()) < 0) { setState("AVFormat library, could not read first packet"); close(); return false; } } else { if(!aRawFile.readFile()) { setState("StAVImage, could not read the file"); close(); return false; } anAvPkt.getAVpkt()->data = (uint8_t* )aRawFile.getBuffer(); anAvPkt.getAVpkt()->size = (int )aRawFile.getSize(); } } anAvPkt.setKeyFrame(); // decode one frame int isFrameFinished = 0; #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(52, 23, 0)) avcodec_decode_video2(myCodecCtx, myFrame.Frame, &isFrameFinished, anAvPkt.getAVpkt()); #else avcodec_decode_video(myCodecCtx, myFrame.Frame, &isFrameFinished, theDataPtr, theDataSize); #endif if(isFrameFinished == 0) { // thats not an image!!! try to decode more packets??? setState("AVCodec library, input file is not an Image!"); close(); return false; } // check frame size if(myCodecCtx->width <= 0 || myCodecCtx->height <= 0) { setState("AVCodec library, codec returns wrong frame size"); close(); return false; } // read aspect ratio if(myCodecCtx->sample_aspect_ratio.num == 0 || myCodecCtx->sample_aspect_ratio.den == 0) { setPixelRatio(1.0f); } else { const GLfloat aRatio = GLfloat(myCodecCtx->sample_aspect_ratio.num) / GLfloat(myCodecCtx->sample_aspect_ratio.den); if(aRatio > 70.0f) { ST_DEBUG_LOG("AVCodec library, igning wrong PAR " + myCodecCtx->sample_aspect_ratio.num + ":" + myCodecCtx->sample_aspect_ratio.den); setPixelRatio(1.0f); } else { setPixelRatio(aRatio); } } #ifdef ST_AV_NEWSTEREO // currently it is unlikelly... but maybe in future? AVFrameSideData* aSideData = av_frame_get_side_data(myFrame.Frame, AV_FRAME_DATA_STEREO3D); if(aSideData != NULL) { AVStereo3D* aStereo = (AVStereo3D* )aSideData->data; mySrcFormat = stAV::stereo3dAvToSt(aStereo->type); if(aStereo->flags & AV_STEREO3D_FLAG_INVERT) { mySrcFormat = st::formatReversed(mySrcFormat); } } else { mySrcFormat = StFormat_AUTO; } #endif // it is unlikely that there would be any metadata from format... // but lets try if(myFormatCtx != NULL) { for(stAV::meta::Tag* aTag = stAV::meta::findTag(myFormatCtx->metadata, "", NULL, stAV::meta::SEARCH_IGNORE_SUFFIX); aTag != NULL; aTag = stAV::meta::findTag(myFormatCtx->metadata, "", aTag, stAV::meta::SEARCH_IGNORE_SUFFIX)) { myMetadata.add(StDictEntry(aTag->key, aTag->value)); } for(stAV::meta::Tag* aTag = stAV::meta::findTag(myFormatCtx->streams[0]->metadata, "", NULL, stAV::meta::SEARCH_IGNORE_SUFFIX); aTag != NULL; aTag = stAV::meta::findTag(myFormatCtx->streams[0]->metadata, "", aTag, stAV::meta::SEARCH_IGNORE_SUFFIX)) { myMetadata.add(StDictEntry(aTag->key, aTag->value)); } } // collect metadata from the frame stAV::meta::Dict* aFrameMetadata = stAV::meta::getFrameMetadata(myFrame.Frame); for(stAV::meta::Tag* aTag = stAV::meta::findTag(aFrameMetadata, "", NULL, stAV::meta::SEARCH_IGNORE_SUFFIX); aTag != NULL; aTag = stAV::meta::findTag(aFrameMetadata, "", aTag, stAV::meta::SEARCH_IGNORE_SUFFIX)) { myMetadata.add(StDictEntry(aTag->key, aTag->value)); } stAV::dimYUV aDimsYUV; if(myCodecCtx->pix_fmt == stAV::PIX_FMT::RGB24) { setColorModel(StImage::ImgColor_RGB); changePlane(0).initWrapper(StImagePlane::ImgRGB, myFrame.getPlane(0), myCodecCtx->width, myCodecCtx->height, myFrame.getLineSize(0)); } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::BGR24) { setColorModel(StImage::ImgColor_RGB); changePlane(0).initWrapper(StImagePlane::ImgBGR, myFrame.getPlane(0), myCodecCtx->width, myCodecCtx->height, myFrame.getLineSize(0)); } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::RGBA32) { setColorModel(StImage::ImgColor_RGBA); changePlane(0).initWrapper(StImagePlane::ImgRGBA, myFrame.getPlane(0), myCodecCtx->width, myCodecCtx->height, myFrame.getLineSize(0)); } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::BGRA32) { setColorModel(StImage::ImgColor_RGBA); changePlane(0).initWrapper(StImagePlane::ImgBGRA, myFrame.getPlane(0), myCodecCtx->width, myCodecCtx->height, myFrame.getLineSize(0)); } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::GRAY8) { setColorModel(StImage::ImgColor_GRAY); changePlane(0).initWrapper(StImagePlane::ImgGray, myFrame.getPlane(0), myCodecCtx->width, myCodecCtx->height, myFrame.getLineSize(0)); } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::GRAY16) { setColorModel(StImage::ImgColor_GRAY); changePlane(0).initWrapper(StImagePlane::ImgGray16, myFrame.getPlane(0), myCodecCtx->width, myCodecCtx->height, myFrame.getLineSize(0)); } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::RGB48) { setColorModel(StImage::ImgColor_RGB); changePlane(0).initWrapper(StImagePlane::ImgRGB48, myFrame.getPlane(0), myCodecCtx->width, myCodecCtx->height, myFrame.getLineSize(0)); } else if(myCodecCtx->pix_fmt == stAV::PIX_FMT::RGBA64) { setColorModel(StImage::ImgColor_RGBA); changePlane(0).initWrapper(StImagePlane::ImgRGBA64, myFrame.getPlane(0), myCodecCtx->width, myCodecCtx->height, myFrame.getLineSize(0)); } else if(stAV::isFormatYUVPlanar(myCodecCtx, aDimsYUV)) { #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(52, 29, 0)) if(myCodecCtx->color_range == AVCOL_RANGE_JPEG) { aDimsYUV.isFullScale = true; } #endif setColorModel(StImage::ImgColor_YUV); setColorScale(aDimsYUV.isFullScale ? StImage::ImgScale_Full : StImage::ImgScale_Mpeg); StImagePlane::ImgFormat aPlaneFrmt = StImagePlane::ImgGray; if(aDimsYUV.bitsPerComp == 9) { aPlaneFrmt = StImagePlane::ImgGray16; setColorScale(aDimsYUV.isFullScale ? StImage::ImgScale_Jpeg9 : StImage::ImgScale_Mpeg9); } else if(aDimsYUV.bitsPerComp == 10) { aPlaneFrmt = StImagePlane::ImgGray16; setColorScale(aDimsYUV.isFullScale ? StImage::ImgScale_Jpeg10 : StImage::ImgScale_Mpeg10); } else if(aDimsYUV.bitsPerComp == 16) { aPlaneFrmt = StImagePlane::ImgGray16; } changePlane(0).initWrapper(aPlaneFrmt, myFrame.getPlane(0), size_t(aDimsYUV.widthY), size_t(aDimsYUV.heightY), myFrame.getLineSize(0)); changePlane(1).initWrapper(aPlaneFrmt, myFrame.getPlane(1), size_t(aDimsYUV.widthU), size_t(aDimsYUV.heightU), myFrame.getLineSize(1)); changePlane(2).initWrapper(aPlaneFrmt, myFrame.getPlane(2), size_t(aDimsYUV.widthV), size_t(aDimsYUV.heightV), myFrame.getLineSize(2)); } else { ///ST_DEBUG_LOG("StAVImage, perform conversion from Pixel format '" + avcodec_get_pix_fmt_name(myCodecCtx->pix_fmt) + "' to RGB"); // initialize software scaler/converter SwsContext* pToRgbCtx = sws_getContext(myCodecCtx->width, myCodecCtx->height, myCodecCtx->pix_fmt, // source myCodecCtx->width, myCodecCtx->height, stAV::PIX_FMT::RGB24, // destination SWS_BICUBIC, NULL, NULL, NULL); if(pToRgbCtx == NULL) { setState("SWScale library, failed to create SWScaler context"); close(); return false; } // initialize additional buffer for converted RGB data setColorModel(StImage::ImgColor_RGB); changePlane(0).initTrash(StImagePlane::ImgRGB, myCodecCtx->width, myCodecCtx->height); uint8_t* rgbData[4]; stMemZero(rgbData, sizeof(rgbData)); int rgbLinesize[4]; stMemZero(rgbLinesize, sizeof(rgbLinesize)); rgbData[0] = changePlane(0).changeData(); rgbLinesize[0] = (int )changePlane(0).getSizeRowBytes(); sws_scale(pToRgbCtx, myFrame.Frame->data, myFrame.Frame->linesize, 0, myCodecCtx->height, rgbData, rgbLinesize); // reset original data closeAvCtx(); sws_freeContext(pToRgbCtx); } // set debug information StString aDummy, aFileName; StFileNode::getFolderAndFile(theFilePath, aDummy, aFileName); setState(StString("AVCodec library, loaded image '") + aFileName + "' " + getDescription()); // we should not close the file because decoded image data is in codec context cache return true; } bool StAVImage::save(const StString& theFilePath, ImageType theImageType, StFormat theSrcFormat) { close(); setState(); if(isNull()) { return false; } PixelFormat aPFormatAV = (PixelFormat )getAVPixelFormat(*this); StImage anImage; switch(theImageType) { case ST_TYPE_PNG: case ST_TYPE_PNS: { myCodec = avcodec_find_encoder_by_name("png"); if(myCodec == NULL) { setState("AVCodec library, video codec 'png' not found"); close(); return false; } if(aPFormatAV == stAV::PIX_FMT::RGB24 || aPFormatAV == stAV::PIX_FMT::RGBA32 || aPFormatAV == stAV::PIX_FMT::GRAY8) { anImage.initWrapper(*this); } else { // convert to compatible pixel format anImage.changePlane().initTrash(StImagePlane::ImgRGB, getSizeX(), getSizeY(), getAligned(getSizeX() * 3)); PixelFormat aPFrmtTarget = stAV::PIX_FMT::RGB24; if(!convert(*this, aPFormatAV, anImage, aPFrmtTarget)) { setState("SWScale library, failed to create SWScaler context"); close(); return false; } aPFormatAV = aPFrmtTarget; } #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0)) myCodecCtx = avcodec_alloc_context3(myCodec); #else myCodecCtx = avcodec_alloc_context(); #endif // setup encoder myCodecCtx->pix_fmt = aPFormatAV; myCodecCtx->width = (int )anImage.getSizeX(); myCodecCtx->height = (int )anImage.getSizeY(); myCodecCtx->compression_level = 9; // 0..9 break; } case ST_TYPE_JPEG: case ST_TYPE_MPO: case ST_TYPE_JPS: { myCodec = avcodec_find_encoder_by_name("mjpeg"); if(myCodec == NULL) { setState("AVCodec library, video codec 'mjpeg' not found"); close(); return false; } if(aPFormatAV == stAV::PIX_FMT::YUVJ420P || aPFormatAV == stAV::PIX_FMT::YUVJ422P //|| aPFormatAV == stAV::PIX_FMT::YUVJ444P not supported by FFmpeg... yet? //|| aPFormatAV == stAV::PIX_FMT::YUVJ440P ) { anImage.initWrapper(*this); } else { // convert to compatible pixel format PixelFormat aPFrmtTarget = aPFormatAV == stAV::PIX_FMT::YUV420P ? stAV::PIX_FMT::YUVJ420P : stAV::PIX_FMT::YUVJ422P; anImage.setColorModel(StImage::ImgColor_YUV); anImage.setColorScale(StImage::ImgScale_Mpeg); anImage.changePlane(0).initTrash(StImagePlane::ImgGray, getSizeX(), getSizeY(), getAligned(getSizeX())); stMemSet(anImage.changePlane(0).changeData(), '\0', anImage.getPlane(0).getSizeBytes()); anImage.changePlane(1).initTrash(StImagePlane::ImgGray, getSizeX(), getSizeY(), getAligned(getSizeX())); stMemSet(anImage.changePlane(1).changeData(), '\0', anImage.getPlane(1).getSizeBytes()); anImage.changePlane(2).initTrash(StImagePlane::ImgGray, getSizeX(), getSizeY(), getAligned(getSizeX())); stMemSet(anImage.changePlane(2).changeData(), '\0', anImage.getPlane(2).getSizeBytes()); if(!convert(*this, aPFormatAV, anImage, aPFrmtTarget)) { setState("SWScale library, failed to create SWScaler context"); close(); return false; } aPFormatAV = aPFrmtTarget; } #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0)) myCodecCtx = avcodec_alloc_context3(myCodec); #else myCodecCtx = avcodec_alloc_context(); #endif myCodecCtx->pix_fmt = aPFormatAV; myCodecCtx->width = (int )anImage.getSizeX(); myCodecCtx->height = (int )anImage.getSizeY(); myCodecCtx->time_base.num = 1; myCodecCtx->time_base.den = 1; myCodecCtx->qmin = myCodecCtx->qmax = 5; // quality factor - lesser is better break; } case ST_TYPE_NONE: default: close(); return false; } // open VIDEO codec #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(53, 8, 0)) if(avcodec_open2(myCodecCtx, myCodec, NULL) < 0) { #else if(avcodec_open(myCodecCtx, myCodec) < 0) { #endif setState("AVCodec library, could not open video codec"); close(); return false; } // wrap own data into AVFrame myFrame.Frame->format = myCodecCtx->pix_fmt; myFrame.Frame->width = myCodecCtx->width; myFrame.Frame->height = myCodecCtx->height; fillPointersAV(anImage, myFrame.Frame->data, myFrame.Frame->linesize); #ifdef ST_AV_NEWSTEREO bool isReversed = false; AVStereo3DType anAvStereoType = stAV::stereo3dStToAv(theSrcFormat, isReversed); if(anAvStereoType != (AVStereo3DType )-1) { AVStereo3D* aStereo = av_stereo3d_create_side_data(myFrame.Frame); if(aStereo != NULL) { aStereo->type = anAvStereoType; if(isReversed) { aStereo->flags |= AV_STEREO3D_FLAG_INVERT; } } } #endif StJpegParser aRawFile(theFilePath); if(!aRawFile.openFile(StRawFile::WRITE)) { setState("Can not open the file for writing"); close(); return false; } // allocate the buffer, large enough (stupid formula copied from ffmpeg.c) int aBuffSize = int(getSizeX() * getSizeY() * 10); aRawFile.initBuffer(aBuffSize); // encode the image StAVPacket aPacket; aPacket.getAVpkt()->data = (uint8_t* )aRawFile.changeBuffer(); aPacket.getAVpkt()->size = aBuffSize; #if(LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(54, 2, 100)) int isGotPacket = 0; int anEncSize = avcodec_encode_video2(myCodecCtx, aPacket.getAVpkt(), myFrame.Frame, &isGotPacket); if(anEncSize == 0 && isGotPacket != 0) { anEncSize = aPacket.getSize(); } #else int anEncSize = avcodec_encode_video(myCodecCtx, aPacket.changeData(), aPacket.getSize(), myFrame.Frame); #endif if(anEncSize <= 0) { setState("AVCodec library, fail to encode the image"); close(); return false; } aRawFile.setDataSize((size_t )anEncSize); // save metadata when possible if(theImageType == ST_TYPE_JPEG || theImageType == ST_TYPE_JPS) { if(aRawFile.parse()) { if(theSrcFormat != StFormat_AUTO) { aRawFile.setupJps(theSrcFormat); } } else { ST_ERROR_LOG("AVCodec library, created JPEG can not be parsed!"); } } // store current content aRawFile.writeFile(); // and finally close the file handle aRawFile.closeFile(); close(); // set debug information StString aDummy, aFileName; StFileNode::getFolderAndFile(theFilePath, aDummy, aFileName); setState(StString("AVCodec library, saved image '") + aFileName + "' " + getDescription()); return true; }
void LLTexUnit::setTextureCombiner(eTextureBlendOp op, eTextureBlendSrc src1, eTextureBlendSrc src2, bool isAlpha) { if (mIndex < 0) return; activate(); if (mCurrBlendType != TB_COMBINE || gGL.mDirty) { mCurrBlendType = TB_COMBINE; gGL.flush(); glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_COMBINE_ARB); } // We want an early out, because this function does a LOT of stuff. if ( ( (isAlpha && (mCurrAlphaOp == op) && (mCurrAlphaSrc1 == src1) && (mCurrAlphaSrc2 == src2)) || (!isAlpha && (mCurrColorOp == op) && (mCurrColorSrc1 == src1) && (mCurrColorSrc2 == src2)) ) && !gGL.mDirty) { return; } gGL.flush(); // Get the gl source enums according to the eTextureBlendSrc sources passed in GLint source1 = getTextureSource(src1); GLint source2 = getTextureSource(src2); // Get the gl operand enums according to the eTextureBlendSrc sources passed in GLint operand1 = getTextureSourceType(src1, isAlpha); GLint operand2 = getTextureSourceType(src2, isAlpha); // Default the scale amount to 1 S32 scale_amount = 1; GLenum comb_enum, src0_enum, src1_enum, src2_enum, operand0_enum, operand1_enum, operand2_enum; if (isAlpha) { // Set enums to ALPHA ones comb_enum = GL_COMBINE_ALPHA_ARB; src0_enum = GL_SOURCE0_ALPHA_ARB; src1_enum = GL_SOURCE1_ALPHA_ARB; src2_enum = GL_SOURCE2_ALPHA_ARB; operand0_enum = GL_OPERAND0_ALPHA_ARB; operand1_enum = GL_OPERAND1_ALPHA_ARB; operand2_enum = GL_OPERAND2_ALPHA_ARB; // cache current combiner mCurrAlphaOp = op; mCurrAlphaSrc1 = src1; mCurrAlphaSrc2 = src2; } else { // Set enums to RGB ones comb_enum = GL_COMBINE_RGB_ARB; src0_enum = GL_SOURCE0_RGB_ARB; src1_enum = GL_SOURCE1_RGB_ARB; src2_enum = GL_SOURCE2_RGB_ARB; operand0_enum = GL_OPERAND0_RGB_ARB; operand1_enum = GL_OPERAND1_RGB_ARB; operand2_enum = GL_OPERAND2_RGB_ARB; // cache current combiner mCurrColorOp = op; mCurrColorSrc1 = src1; mCurrColorSrc2 = src2; } switch(op) { case TBO_REPLACE: // Slightly special syntax (no second sources), just set all and return. glTexEnvi(GL_TEXTURE_ENV, comb_enum, GL_REPLACE); glTexEnvi(GL_TEXTURE_ENV, src0_enum, source1); glTexEnvi(GL_TEXTURE_ENV, operand0_enum, operand1); (isAlpha) ? setAlphaScale(1) : setColorScale(1); return; case TBO_MULT: glTexEnvi(GL_TEXTURE_ENV, comb_enum, GL_MODULATE); break; case TBO_MULT_X2: glTexEnvi(GL_TEXTURE_ENV, comb_enum, GL_MODULATE); scale_amount = 2; break; case TBO_MULT_X4: glTexEnvi(GL_TEXTURE_ENV, comb_enum, GL_MODULATE); scale_amount = 4; break; case TBO_ADD: glTexEnvi(GL_TEXTURE_ENV, comb_enum, GL_ADD); break; case TBO_ADD_SIGNED: glTexEnvi(GL_TEXTURE_ENV, comb_enum, GL_ADD_SIGNED_ARB); break; case TBO_SUBTRACT: glTexEnvi(GL_TEXTURE_ENV, comb_enum, GL_SUBTRACT_ARB); break; case TBO_LERP_VERT_ALPHA: glTexEnvi(GL_TEXTURE_ENV, comb_enum, GL_INTERPOLATE); glTexEnvi(GL_TEXTURE_ENV, src2_enum, GL_PRIMARY_COLOR_ARB); glTexEnvi(GL_TEXTURE_ENV, operand2_enum, GL_SRC_ALPHA); break; case TBO_LERP_TEX_ALPHA: glTexEnvi(GL_TEXTURE_ENV, comb_enum, GL_INTERPOLATE); glTexEnvi(GL_TEXTURE_ENV, src2_enum, GL_TEXTURE); glTexEnvi(GL_TEXTURE_ENV, operand2_enum, GL_SRC_ALPHA); break; case TBO_LERP_PREV_ALPHA: glTexEnvi(GL_TEXTURE_ENV, comb_enum, GL_INTERPOLATE); glTexEnvi(GL_TEXTURE_ENV, src2_enum, GL_PREVIOUS_ARB); glTexEnvi(GL_TEXTURE_ENV, operand2_enum, GL_SRC_ALPHA); break; case TBO_LERP_CONST_ALPHA: glTexEnvi(GL_TEXTURE_ENV, comb_enum, GL_INTERPOLATE); glTexEnvi(GL_TEXTURE_ENV, src2_enum, GL_CONSTANT_ARB); glTexEnvi(GL_TEXTURE_ENV, operand2_enum, GL_SRC_ALPHA); break; case TBO_LERP_VERT_COLOR: glTexEnvi(GL_TEXTURE_ENV, comb_enum, GL_INTERPOLATE); glTexEnvi(GL_TEXTURE_ENV, src2_enum, GL_PRIMARY_COLOR_ARB); glTexEnvi(GL_TEXTURE_ENV, operand2_enum, (isAlpha) ? GL_SRC_ALPHA : GL_SRC_COLOR); break; default: llwarns << "Unknown eTextureBlendOp: " << op << ". Setting op to replace." << llendl; // Slightly special syntax (no second sources), just set all and return. glTexEnvi(GL_TEXTURE_ENV, comb_enum, GL_REPLACE); glTexEnvi(GL_TEXTURE_ENV, src0_enum, source1); glTexEnvi(GL_TEXTURE_ENV, operand0_enum, operand1); (isAlpha) ? setAlphaScale(1) : setColorScale(1); return; } // Set sources, operands, and scale accordingly glTexEnvi(GL_TEXTURE_ENV, src0_enum, source1); glTexEnvi(GL_TEXTURE_ENV, operand0_enum, operand1); glTexEnvi(GL_TEXTURE_ENV, src1_enum, source2); glTexEnvi(GL_TEXTURE_ENV, operand1_enum, operand2); (isAlpha) ? setAlphaScale(scale_amount) : setColorScale(scale_amount); }
/** * Set the gray color scale. */ void SVConnections::grayColorScale() { setColorScale(ColorMaps::GRAY, ColorMaps::HEAT); }