示例#1
0
文件: basic.cpp 项目: igorepi/openfx
/* set up and run a processor */
void
BasicPlugin::setupAndProcess(ImageScalerBase &processor, const OFX::RenderArguments &args)
{
  // get a dst image
  std::auto_ptr<OFX::Image> dst(dstClip_->fetchImage(args.time));
  OFX::BitDepthEnum dstBitDepth       = dst->getPixelDepth();
  OFX::PixelComponentEnum dstComponents  = dst->getPixelComponents();

  // fetch main input image
  std::auto_ptr<OFX::Image> src(srcClip_->fetchImage(args.time));

  // make sure bit depths are sane
  if(src.get()) {
    OFX::BitDepthEnum    srcBitDepth      = src->getPixelDepth();
    OFX::PixelComponentEnum srcComponents = src->getPixelComponents();

    // see if they have the same depths and bytes and all
    if(srcBitDepth != dstBitDepth || srcComponents != dstComponents)
      throw int(1); // HACK!! need to throw an sensible exception here!
  }

  // auto ptr for the mask.
  // Should do this inside the if statement below but the MS compiler I have doesn't have
  // a 'reset' function on the auto_ptr class
  std::auto_ptr<OFX::Image> mask(getContext() != OFX::eContextFilter ? maskClip_->fetchImage(args.time) : 0);

  // do we do masking
  if(getContext() != OFX::eContextFilter) {
    // say we are masking
    processor.doMasking(true);

    // Set it in the processor 
    processor.setMaskImg(mask.get());
  }

  // get the scale parameter values...
  double r, g, b, a = aScale_->getValueAtTime(args.time);
  r = g = b = scale_->getValueAtTime(args.time);

  // see if the individual component scales are enabled
  if(componentScalesEnabled_->getValueAtTime(args.time)) {
    r *= rScale_->getValueAtTime(args.time);
    g *= gScale_->getValueAtTime(args.time);
    b *= bScale_->getValueAtTime(args.time);
  }

  // set the images
  processor.setDstImg(dst.get());
  processor.setSrcImg(src.get());


  // set the render window
  processor.setRenderWindow(args.renderWindow);

  // set the scales
  processor.setScales((float)r, (float)g, (float)b, (float)a);

  // Call the base class process member, this will call the derived templated process code
  processor.process();
}
示例#2
0
/* set up and run a processor */
void
InvertPlugin::setupAndProcess(InvertBase &processor, const OFX::RenderArguments &args)
{
  // get a dst image
  std::auto_ptr<OFX::Image> dst(dstClip_->fetchImage(args.time));
  OFX::BitDepthEnum dstBitDepth       = dst->getPixelDepth();
  OFX::PixelComponentEnum dstComponents  = dst->getPixelComponents();

  // fetch main input image
  std::auto_ptr<OFX::Image> src(srcClip_->fetchImage(args.time));

  // make sure bit depths are sane
  if(src.get()) {
    OFX::BitDepthEnum    srcBitDepth      = src->getPixelDepth();
    OFX::PixelComponentEnum srcComponents = src->getPixelComponents();

    // see if they have the same depths and bytes and all
    if(srcBitDepth != dstBitDepth || srcComponents != dstComponents)
      throw int(1); // HACK!! need to throw an sensible exception here!
  }

  // set the images
  processor.setDstImg(dst.get());
  processor.setSrcImg(src.get());

  // set the render window
  processor.setRenderWindow(args.renderWindow);

  // Call the base class process member, this will call the derived templated process code
  processor.process();
}
示例#3
0
/* set up and run a processor */
void
CrossFadePlugin::setupAndProcess(OFX::ImageBlenderBase &processor, const OFX::RenderArguments &args)
{
  // get a dst image
  std::auto_ptr<OFX::Image>  dst(dstClip_->fetchImage(args.time));
  OFX::BitDepthEnum          dstBitDepth    = dst->getPixelDepth();
  OFX::PixelComponentEnum    dstComponents  = dst->getPixelComponents();

  // fetch the two source images
  std::auto_ptr<OFX::Image> fromImg(fromClip_->fetchImage(args.time));
  std::auto_ptr<OFX::Image> toImg(toClip_->fetchImage(args.time));

  // make sure bit depths are sane
  if(fromImg.get()) checkComponents(*fromImg, dstBitDepth, dstComponents);
  if(toImg.get()) checkComponents(*toImg, dstBitDepth, dstComponents);

  // get the transition value
  float blend = (float)transition_->getValueAtTime(args.time);

  // set the images
  processor.setDstImg(dst.get());
  processor.setFromImg(fromImg.get());
  processor.setToImg(toImg.get());

  // set the render window
  processor.setRenderWindow(args.renderWindow);

  // set the scales
  processor.setBlend(blend);

  // Call the base class process member, this will call the derived templated process code
  processor.process();
}
示例#4
0
/* set up and run a processor */
void
InvertPlugin::setupAndProcess(InvertBase &processor, const OFX::RenderArguments &args)
{
    // get a dst image
    std::auto_ptr<OFX::Image> dst(dstClip_->fetchImage(args.time));
    if (!dst.get()) {
        OFX::throwSuiteStatusException(kOfxStatFailed);
    }
    OFX::BitDepthEnum dstBitDepth       = dst->getPixelDepth();
    OFX::PixelComponentEnum dstComponents  = dst->getPixelComponents();

    // fetch main input image
    std::auto_ptr<OFX::Image> src(srcClip_->fetchImage(args.time));

    // make sure bit depths are sane
    if (src.get()) {
        OFX::BitDepthEnum    srcBitDepth      = src->getPixelDepth();
        OFX::PixelComponentEnum srcComponents = src->getPixelComponents();

        // see if they have the same depths and bytes and all
        if (srcBitDepth != dstBitDepth || srcComponents != dstComponents) {
            OFX::throwSuiteStatusException(kOfxStatErrImageFormat);
        }
    }

    // auto ptr for the mask.
    std::auto_ptr<OFX::Image> mask((getContext() != OFX::eContextFilter) ? maskClip_->fetchImage(args.time) : 0);

    // do we do masking
    if (getContext() != OFX::eContextFilter && maskClip_->isConnected()) {
        // say we are masking
        processor.doMasking(true);

        // Set it in the processor
        processor.setMaskImg(mask.get());
    }

    bool red, green, blue, alpha;
    _paramProcessR->getValueAtTime(args.time, red);
    _paramProcessG->getValueAtTime(args.time, green);
    _paramProcessB->getValueAtTime(args.time, blue);
    _paramProcessA->getValueAtTime(args.time, alpha);
    double mix;
    _mix->getValueAtTime(args.time, mix);
    bool maskInvert;
    _maskInvert->getValueAtTime(args.time, maskInvert);
    processor.setValues(red, green, blue, alpha, mix, maskInvert);

    // set the images
    processor.setDstImg(dst.get());
    processor.setSrcImg(src.get());

    // set the render window
    processor.setRenderWindow(args.renderWindow);

    // Call the base class process member, this will call the derived templated process code
    processor.process();
}
示例#5
0
/** @brief render for the filter */
void
FilterPlugin::render(const OFX::RenderArguments &args)
{
  OFX::Image *src = 0, *dst = 0;

  try {
    // get a src image
    src = srcClip_->fetchImage(args.time);

    // get a dst image
    dst = dstClip_->fetchImage(args.time);

    // push some pixels
    // blah;
    // blah;
    // blah;
  }

  catch(...) {
    delete src;
    delete dst;
    throw;
  }

  // delete them
  delete src;
  delete dst;
}
示例#6
0
文件: Tester.cpp 项目: olear/openfx
void GenericTestPlugin::setupAndProcess(GenericTestBase &processor, const OFX::RenderArguments &args)
{
  std::auto_ptr<OFX::Image> dst(dstClip_->fetchImage(args.time));
  OFX::BitDepthEnum dstBitDepth       = dst->getPixelDepth();
  OFX::PixelComponentEnum dstComponents  = dst->getPixelComponents();
  std::auto_ptr<OFX::Image> src(srcClip_->fetchImage(args.time));

  if(src.get()) 
  {
    OFX::BitDepthEnum    srcBitDepth      = src->getPixelDepth();
    OFX::PixelComponentEnum srcComponents = src->getPixelComponents();

    // see if they have the same depths and bytes and all
    if(srcBitDepth != dstBitDepth || srcComponents != dstComponents)
      throw int(1); // HACK!! need to throw an sensible exception here!
  }

  processor.setDstImg(dst.get());
  processor.setSrcImg(src.get());
  processor.setRenderWindow(args.renderWindow);
  processor.process();
}
示例#7
0
	virtual void setup( const OFX::RenderArguments& args )
	{
		// destination view
//		TUTTLE_COUT_INFOS;
//		TUTTLE_COUT_VAR( "dst - fetchImage " << time );
		_dst.reset( _clipDst->fetchImage( args.time ) );
		if( !_dst.get() )
			BOOST_THROW_EXCEPTION( exception::ImageNotReady()
				<< exception::dev() + "Error on clip " + quotes(_clipDst->name()) );
		if( _dst->getRowDistanceBytes() == 0 )
			BOOST_THROW_EXCEPTION( exception::WrongRowBytes()
				<< exception::dev() + "Error on clip " + quotes(_clipDst->name()) );
		
		//		_dstPixelRod = _dst->getRegionOfDefinition(); // bug in nuke, returns bounds
		_dstPixelRod       = _clipDst->getPixelRod( args.time, args.renderScale );
		_dstPixelRodSize.x = ( this->_dstPixelRod.x2 - this->_dstPixelRod.x1 );
		_dstPixelRodSize.y = ( this->_dstPixelRod.y2 - this->_dstPixelRod.y1 );
	}
示例#8
0
    virtual void setup(const OFX::RenderArguments& args)
    {
        ImageProcessor::setup(args);

        // source view
        _src.reset(_clipSrc->fetchImage(args.time));
        if(!_src.get())
            BOOST_THROW_EXCEPTION(exception::ImageNotReady()
                                  << exception::dev() + "Error on clip " + quotes(_clipSrc->name()));
        if(_src->getRowDistanceBytes() == 0)
            BOOST_THROW_EXCEPTION(exception::WrongRowBytes()
                                  << exception::dev() + "Error on clip " + quotes(_clipSrc->name()));

        if(OFX::getImageEffectHostDescription()->hostName == "uk.co.thefoundry.nuke")
        {
            // bug in nuke, getRegionOfDefinition() on OFX::Image returns bounds
            _srcPixelRod = _clipSrc->getPixelRod(args.time, args.renderScale);
        }
        else
        {
            _srcPixelRod = _src->getRegionOfDefinition();
        }
    }
示例#9
0
文件: noise.cpp 项目: Anna83/openfx
/* set up and run a processor */
void
NoisePlugin::setupAndProcess(NoiseGeneratorBase &processor, const OFX::RenderArguments &args)
{
  // get a dst image
  std::auto_ptr<OFX::Image>  dst(dstClip_->fetchImage(args.time));
  OFX::BitDepthEnum         dstBitDepth    = dst->getPixelDepth();
  OFX::PixelComponentEnum   dstComponents  = dst->getPixelComponents();

  // set the images
  processor.setDstImg(dst.get());

  // set the render window
  processor.setRenderWindow(args.renderWindow);

  // set the scales
  processor.setNoiseLevel((float)noise_->getValueAtTime(args.time));

  // set the seed based on the current time, and double it we get difference seeds on different fields
  processor.setSeed(uint32_t(args.time * 2.0f + 2000.0f));

  // Call the base class process member, this will call the derived templated process code
  processor.process();
}
示例#10
0
	virtual void setup( const OFX::RenderArguments& args )
	{
		// destination view
		_dst.reset( _clipDst->fetchImage( args.time ) );
		if( !_dst.get() )
			BOOST_THROW_EXCEPTION( exception::ImageNotReady() );
		if( _dst->getRowBytes() == 0 )
			BOOST_THROW_EXCEPTION( exception::WrongRowBytes() );
		//		_dstPixelRod = _dst->getRegionOfDefinition(); // bug in nuke, returns bounds
		_dstPixelRod       = _clipDst->getPixelRod( args.time, args.renderScale );
		_dstPixelRodSize.x = ( this->_dstPixelRod.x2 - this->_dstPixelRod.x1 );
		_dstPixelRodSize.y = ( this->_dstPixelRod.y2 - this->_dstPixelRod.y1 );
		_dstView           = getView( _dst.get(), _dstPixelRod );

		#ifndef TUTTLE_PRODUCTION
		// init dst buffer with red to highlight uninitialized pixels
		const OfxRectI dstBounds = this->translateRoWToOutputClipCoordinates( _dst->getBounds() );
		View dstToFill           = boost::gil::subimage_view( _dstView,
		                                                      dstBounds.x1, dstBounds.y1,
		                                                      dstBounds.x2 - dstBounds.x1, dstBounds.y2 - dstBounds.y1 );
		const boost::gil::rgba32f_pixel_t errorColor( 1.0, 0.0, 0.0, 1.0 );
		fill_pixels( dstToFill, errorColor );
		#endif
	}
示例#11
0
/* set up and run a processor */
void
AnaglyphPlugin::setupAndProcess(AnaglyphBase &processor, const OFX::RenderArguments &args)
{
    // get a dst image
    std::auto_ptr<OFX::Image> dst(dstClip_->fetchImage(args.time));
    if (!dst.get()) {
        OFX::throwSuiteStatusException(kOfxStatFailed);
    }
    if (dst->getRenderScale().x != args.renderScale.x ||
        dst->getRenderScale().y != args.renderScale.y ||
        dst->getField() != args.fieldToRender) {
        setPersistentMessage(OFX::Message::eMessageError, "", "OFX Host gave image with wrong scale or field properties");
        OFX::throwSuiteStatusException(kOfxStatFailed);
    }
    OFX::BitDepthEnum dstBitDepth       = dst->getPixelDepth();
    OFX::PixelComponentEnum dstComponents  = dst->getPixelComponents();

    // fetch main input image
    std::auto_ptr<OFX::Image> srcLeft(srcClip_->fetchStereoscopicImage(args.time,0));
    if (srcLeft.get()) {
        if (srcLeft->getRenderScale().x != args.renderScale.x ||
            srcLeft->getRenderScale().y != args.renderScale.y ||
            srcLeft->getField() != args.fieldToRender) {
            setPersistentMessage(OFX::Message::eMessageError, "", "OFX Host gave image with wrong scale or field properties");
            OFX::throwSuiteStatusException(kOfxStatFailed);
        }
    }
    std::auto_ptr<OFX::Image> srcRight(srcClip_->fetchStereoscopicImage(args.time,1));
    if (srcRight.get()) {
        if (srcRight->getRenderScale().x != args.renderScale.x ||
            srcRight->getRenderScale().y != args.renderScale.y ||
            srcRight->getField() != args.fieldToRender) {
            setPersistentMessage(OFX::Message::eMessageError, "", "OFX Host gave image with wrong scale or field properties");
            OFX::throwSuiteStatusException(kOfxStatFailed);
        }
    }

    // make sure bit depths are sane
    if (srcLeft.get()) {
        OFX::BitDepthEnum    srcBitDepth      = srcLeft->getPixelDepth();
        OFX::PixelComponentEnum srcComponents = srcLeft->getPixelComponents();

        // see if they have the same depths and bytes and all
        if (srcBitDepth != dstBitDepth || srcComponents != dstComponents)
            OFX::throwSuiteStatusException(kOfxStatErrImageFormat);
    }
    if (srcRight.get()) {
        OFX::BitDepthEnum    srcBitDepth      = srcRight->getPixelDepth();
        OFX::PixelComponentEnum srcComponents = srcRight->getPixelComponents();

        // see if they have the same depths and bytes and all
        if (srcBitDepth != dstBitDepth || srcComponents != dstComponents)
            OFX::throwSuiteStatusException(kOfxStatErrImageFormat);
    }

    double amtcolour = amtcolour_->getValueAtTime(args.time);
    bool swap = swap_->getValueAtTime(args.time);
    int offset = offset_->getValueAtTime(args.time);

    // set the images
    processor.setDstImg(dst.get());
    processor.setSrcLeftImg(srcLeft.get());
    processor.setSrcRightImg(srcRight.get());

    // set the render window
    processor.setRenderWindow(args.renderWindow);

    // set the parameters
    processor.setAmtColour(amtcolour);
    processor.setSwap(swap);
    processor.setOffset(offset);

    // Call the base class process member, this will call the derived templated process code
    processor.process();
}
示例#12
0
文件: retimer.cpp 项目: apextw/Ramen
/* set up and run a processor */
void
RetimerPlugin::setupAndProcess(OFX::ImageBlenderBase &processor, const OFX::RenderArguments &args)
{
    // get a dst image
    std::auto_ptr<OFX::Image>  dst(dstClip_->fetchImage(args.time));
    OFX::BitDepthEnum          dstBitDepth    = dst->getPixelDepth();
    OFX::PixelComponentEnum    dstComponents  = dst->getPixelComponents();

    // figure the frame we should be retiming from
    double sourceTime;

    if(getContext() == OFX::eContextRetimer) {
        // the host is specifying it, so fetch it from the "sourceTime" pseudo-param
        sourceTime = sourceTime_->getValueAtTime(args.time);
    }
    else {
        // we have our own param, which is a speed, so we integrate it to get the time we want
        sourceTime = speed_->integrate(0, args.time);
    }

    // figure the two images we are blending between
    double fromTime, toTime;
    double blend;

    if(args.fieldToRender == OFX::eFieldNone) {
        // unfielded, easy peasy
        fromTime = floor(sourceTime);
        toTime = fromTime + 1;
        blend = sourceTime - fromTime;
    }
    else {
        // Fielded clips, pook. We are rendering field doubled images,
        // and so need to blend between fields, not frames.
        double frac = sourceTime - floor(sourceTime);
        if(frac < 0.5) {
            // need to go between the first and second fields of this frame
            fromTime = floor(sourceTime); // this will get the first field
            toTime   = fromTime + 0.5;    // this will get the second field of the same frame
            blend    = frac * 2.0;        // and the blend is between those two
        }
        else { // frac > 0.5
            fromTime = floor(sourceTime) + 0.5; // this will get the second field of this frame
            toTime   = floor(sourceTime) + 1.0; // this will get the first field of the next frame
            blend    = (frac - 0.5) * 2.0;
        }
    }

    // fetch the two source images
    std::auto_ptr<OFX::Image> fromImg(srcClip_->fetchImage(fromTime));
    std::auto_ptr<OFX::Image> toImg(srcClip_->fetchImage(toTime));

    // make sure bit depths are sane
    if(fromImg.get()) checkComponents(*fromImg, dstBitDepth, dstComponents);
    if(toImg.get()) checkComponents(*toImg, dstBitDepth, dstComponents);

    // set the images
    processor.setDstImg(dst.get());
    processor.setFromImg(fromImg.get());
    processor.setToImg(toImg.get());

    // set the render window
    processor.setRenderWindow(args.renderWindow);

    // set the blend between
    processor.setBlend((float)blend);

    // Call the base class process member, this will call the derived templated process code
    processor.process();
}