示例#1
0
文件: basic.cpp 项目: chajaeik/openfx
// are the settings of the effect performing an identity operation
static OfxStatus
isIdentity( OfxImageEffectHandle  effect,
	    OfxPropertySetHandle inArgs,
	    OfxPropertySetHandle outArgs)
{
  // get the render window and the time from the inArgs
  OfxTime time;
  OfxRectI renderWindow;
  
  gPropHost->propGetDouble(inArgs, kOfxPropTime, 0, &time);
  gPropHost->propGetIntN(inArgs, kOfxImageEffectPropRenderWindow, 4, &renderWindow.x1);

  // retrieve any instance data associated with this effect
  MyInstanceData *myData = getMyInstanceData(effect);

  double scaleValue, sR = 1, sG = 1, sB = 1, sA = 1;
  gParamHost->paramGetValueAtTime(myData->scaleParam, time, &scaleValue);

  if(ofxuGetClipPixelsAreRGBA(myData->sourceClip)) {
    gParamHost->paramGetValueAtTime(myData->scaleRParam, time, &sR);
    gParamHost->paramGetValueAtTime(myData->scaleGParam, time, &sG);
    gParamHost->paramGetValueAtTime(myData->scaleBParam, time, &sB);
    gParamHost->paramGetValueAtTime(myData->scaleAParam, time, &sA);
  }

  // if the scale values are all 1, then we have an identity xfm on the Source clip
  if(scaleValue == 1.0 && sR==1 && sG == 1 && sB == 1 && sA == 1) {
    // set the property in the out args indicating which is the identity clip
    gPropHost->propSetString(outArgs, kOfxPropName, 0, kOfxImageEffectSimpleSourceClipName);
    return kOfxStatOK;
  }

  // In this case do the default, which in this case is to render 
  return kOfxStatReplyDefault;
}
示例#2
0
// are the settings of the effect performing an identity operation
static OfxStatus
isIdentity(OfxImageEffectHandle  effect,
	   OfxPropertySetHandle inArgs,
	   OfxPropertySetHandle outArgs)
{
  // retrieve any instance data associated with this effect
  MyInstanceData *myData = getMyInstanceData(effect);
  
  // get the render window and the time from the inArgs
  OfxTime time;
  OfxRectI renderWindow;
  
  // get the src depth
  int srcDepth = ofxuGetClipPixelDepth(myData->sourceClip);

  // get the dst depth
  int dstDepth = ofxuGetClipPixelDepth(myData->outputClip);

  // if the depths are the same we have no work to do!
  if(srcDepth == dstDepth) {
    // set the property in the out args indicating which is the identity clip
    gPropHost->propSetString(outArgs, kOfxPropName, 0, "Source");
    return kOfxStatOK;
  }

  // In this case do the default, which in this case is to render
  return kOfxStatReplyDefault;
}
示例#3
0
文件: basic.cpp 项目: chajaeik/openfx
// function thats sets the enabledness of the percomponent scale parameters
// depending on the value of the 
// This function is called when the 'scaleComponents' value is changed
// or when the input clip has been changed
static void
setPerComponentScaleEnabledness( OfxImageEffectHandle effect)
{
  // get my instance data
  MyInstanceData *myData = getMyInstanceData(effect);

  // get the value of the percomponent scale param
  int perComponentScale;
  gParamHost->paramGetValue(myData->perComponentScaleParam, &perComponentScale);

  if(ofxuIsClipConnected(effect, kOfxImageEffectSimpleSourceClipName)) {
    OfxPropertySetHandle props; gEffectHost->clipGetPropertySet(myData->sourceClip, &props);

    // get the input clip format
    char *pixelType;
    gPropHost->propGetString(props, kOfxImageEffectPropComponents, 0, &pixelType);

    // only enable the scales if the input is an RGBA input
    perComponentScale = perComponentScale && !(strcmp(pixelType, kOfxImageComponentAlpha) == 0);
  }

  // set the enabled/disabled state of the parameter
  setParamEnabledness(effect, "scaleR", perComponentScale);
  setParamEnabledness(effect, "scaleG", perComponentScale);
  setParamEnabledness(effect, "scaleB", perComponentScale);
  setParamEnabledness(effect, "scaleA", perComponentScale);
}
示例#4
0
文件: basic.cpp 项目: chajaeik/openfx
// Set our clip preferences 
static OfxStatus 
getClipPreferences( OfxImageEffectHandle  effect,  OfxPropertySetHandle inArgs,  OfxPropertySetHandle outArgs)
{
  // retrieve any instance data associated with this effect
  MyInstanceData *myData = getMyInstanceData(effect);
  
  // get the component type and bit depth of our main input
  int  bitDepth;
  bool isRGBA;
  ofxuClipGetFormat(myData->sourceClip, bitDepth, isRGBA, true); // get the unmapped clip component

  // get the strings used to label the various bit depths
  const char *bitDepthStr = bitDepth == 8 ? kOfxBitDepthByte : (bitDepth == 16 ? kOfxBitDepthShort : kOfxBitDepthFloat);
  const char *componentStr = isRGBA ? kOfxImageComponentRGBA : kOfxImageComponentAlpha;

  // set out output to be the same same as the input, component and bitdepth
  gPropHost->propSetString(outArgs, "OfxImageClipPropComponents_Output", 0, componentStr);
  if(gHostSupportsMultipleBitDepths)
    gPropHost->propSetString(outArgs, "OfxImageClipPropDepth_Output", 0, bitDepthStr);

  // if a general effect, we may have a mask input, check that for types
  if(myData->isGeneralEffect) {
    if(ofxuIsClipConnected(effect, "Mask")) {
      // set the mask input to be a single channel image of the same bitdepth as the source
      gPropHost->propSetString(outArgs, "OfxImageClipPropComponents_Mask", 0, kOfxImageComponentAlpha);
      if(gHostSupportsMultipleBitDepths) 
	gPropHost->propSetString(outArgs, "OfxImageClipPropDepth_Mask", 0, bitDepthStr);
    }
  }

  return kOfxStatOK;
}
示例#5
0
// instance destruction
static OfxStatus
destroyInstance(OfxImageEffectHandle effect)
{
  // get my instance data
  MyInstanceData *myData = getMyInstanceData(effect);

  // and delete it
  if(myData)
    delete myData;
  return kOfxStatOK;
}
示例#6
0
// tells the host how much of the input we need to fill the given window
OfxStatus
getSpatialRoI( OfxImageEffectHandle  effect,  OfxPropertySetHandle inArgs,  OfxPropertySetHandle outArgs)
{
  // get the RoI the effect is interested in from inArgs
  OfxRectD roi;
  gPropHost->propGetDoubleN(inArgs, kOfxImageEffectPropRegionOfInterest, 4, &roi.x1);

  // the input needed is the same as the output, so set that on the source clip
  gPropHost->propSetDoubleN(outArgs, "OfxImageClipPropRoI_Source", 4, &roi.x1);

  // retrieve any instance data associated with this effect
  MyInstanceData *myData = getMyInstanceData(effect);

  return kOfxStatOK;
}
示例#7
0
文件: basic.cpp 项目: chajaeik/openfx
// Tells the host how many frames we can fill, only called in the general context.
// This is actually redundant as this is the default behaviour, but for illustrative
// purposes.
OfxStatus 
getTemporalDomain( OfxImageEffectHandle  effect,  OfxPropertySetHandle inArgs,  OfxPropertySetHandle outArgs)
{
  MyInstanceData *myData = getMyInstanceData(effect);

  double sourceRange[2];
  
  // get the frame range of the source clip
  OfxPropertySetHandle props; gEffectHost->clipGetPropertySet(myData->sourceClip, &props);
  gPropHost->propGetDoubleN(props, kOfxImageEffectPropFrameRange, 2, sourceRange);

  // set it on the out args
  gPropHost->propSetDoubleN(outArgs, kOfxImageEffectPropFrameRange, 2, sourceRange);
  
  return kOfxStatOK;
}
示例#8
0
// tells the host what region we are capable of filling
OfxStatus
getSpatialRoD( OfxImageEffectHandle  effect,  OfxPropertySetHandle inArgs,  OfxPropertySetHandle outArgs)
{
  // retrieve any instance data associated with this effect
  MyInstanceData *myData = getMyInstanceData(effect);

  OfxTime time;
  gPropHost->propGetDouble(inArgs, kOfxPropTime, 0, &time);

  // my RoD is the same as my input's
  OfxRectD rod;
  gEffectHost->clipGetRegionOfDefinition(myData->sourceClip, time, &rod);

  // set the rod in the out args
  gPropHost->propSetDoubleN(outArgs, kOfxImageEffectPropRegionOfDefinition, 4, &rod.x1);

  return kOfxStatOK;
}
示例#9
0
文件: basic.cpp 项目: chajaeik/openfx
// tells the host how much of the input we need to fill the given window
OfxStatus 
getSpatialRoI( OfxImageEffectHandle  effect,  OfxPropertySetHandle inArgs,  OfxPropertySetHandle outArgs)
{
  // get the RoI the effect is interested in from inArgs
  OfxRectD roi;
  gPropHost->propGetDoubleN(inArgs, kOfxImageEffectPropRegionOfInterest, 4, &roi.x1);

  // the input needed is the same as the output, so set that on the source clip
  gPropHost->propSetDoubleN(outArgs, "OfxImageClipPropRoI_Source", 4, &roi.x1);

  // retrieve any instance data associated with this effect
  MyInstanceData *myData = getMyInstanceData(effect);

  // if a general effect, we need to know the mask as well
  if(myData->isGeneralEffect && ofxuIsClipConnected(effect, "Mask")) {
    gPropHost->propSetDoubleN(outArgs, "OfxImageClipPropRoI_Mask", 4, &roi.x1);
  }
  return kOfxStatOK;
}
示例#10
0
// Set our clip preferences
static OfxStatus
getClipPreferences( OfxImageEffectHandle  effect,  OfxPropertySetHandle /*inArgs*/,  OfxPropertySetHandle outArgs)
{
  // retrieve any instance data associated with this effect
  MyInstanceData *myData = getMyInstanceData(effect);

  // get the component type and bit depth of our main input
  int  bitDepth;
  bool isRGBA;
  ofxuClipGetFormat(myData->sourceClip, bitDepth, isRGBA, true); // get the unmapped clip component

  // get the strings used to label the various bit depths
  const char *bitDepthStr = bitDepth == 8 ? kOfxBitDepthByte : (bitDepth == 16 ? kOfxBitDepthShort : kOfxBitDepthFloat);
  const char *componentStr = isRGBA ? kOfxImageComponentRGBA : kOfxImageComponentAlpha;

  // set out output to be the same same as the input, component and bitdepth
  gPropHost->propSetString(outArgs, "OfxImageClipPropComponents_Output", 0, componentStr);
  if(gHostSupportsMultipleBitDepths)
    gPropHost->propSetString(outArgs, "OfxImageClipPropDepth_Output", 0, bitDepthStr);

  return kOfxStatOK;
}
示例#11
0
// Set our clip preferences 
static OfxStatus 
getClipPreferences(OfxImageEffectHandle effect,
		   OfxPropertySetHandle inArgs,
		   OfxPropertySetHandle outArgs)
{
  // retrieve any instance data associated with this effect
  MyInstanceData *myData = getMyInstanceData(effect);
  
  // fetch the depth parameter value
  int depthVal;
  gParamHost->paramGetValue(myData->depthParam, &depthVal);

  // and set the output depths based on that
  switch(gDepthParamToBytes[depthVal]) {
  // byte
  case 8 : gPropHost->propSetString(outArgs, "OfxImageClipPropDepth_Output", 0, kOfxBitDepthByte); break;
  // short
  case 16 : gPropHost->propSetString(outArgs, "OfxImageClipPropDepth_Output", 0, kOfxBitDepthShort); break;
  // float
  case 32 : gPropHost->propSetString(outArgs, "OfxImageClipPropDepth_Output", 0, kOfxBitDepthFloat); break;
  }

  return kOfxStatOK;
}
示例#12
0
// the process code  that the host sees
static OfxStatus render(OfxImageEffectHandle effect,
                        OfxPropertySetHandle inArgs,
                        OfxPropertySetHandle outArgs)
{
  // get the render window and the time from the inArgs
  OfxTime time;
  OfxRectI renderWindow;
  OfxStatus status = kOfxStatOK;
  
  gPropHost->propGetDouble(inArgs, kOfxPropTime, 0, &time);
  gPropHost->propGetIntN(inArgs, kOfxImageEffectPropRenderWindow, 4, &renderWindow.x1);

  // retrieve any instance data associated with this effect
  MyInstanceData *myData = getMyInstanceData(effect);

  // property handles and members of each image
  // in reality, we would put this in a struct as the C++ support layer does
  OfxPropertySetHandle sourceImg = NULL, outputImg = NULL;
  int srcRowBytes, srcBitDepth, dstRowBytes, dstBitDepth;
  bool srcIsAlpha, dstIsAlpha;
  OfxRectI dstRect, srcRect;
  void *src, *dst;

  try {
    outputImg = ofxuGetImage(myData->outputClip, time, dstRowBytes, dstBitDepth, dstIsAlpha, dstRect, dst);
    if(outputImg == NULL) throw OfxuNoImageException();

    sourceImg = ofxuGetImage(myData->sourceClip, time, srcRowBytes, srcBitDepth, srcIsAlpha, srcRect, src);
    if(sourceImg == NULL) throw OfxuNoImageException();
    
    int nComponents = dstIsAlpha ? 1 : 4;
    
    // set up the processor that we pass to the individual constructors
    Processor proc(effect, nComponents,
                   src, srcRect, srcRowBytes,
                   dst, dstRect, dstRowBytes,
                   renderWindow);
    
    // now instantiate the templated processor depending on src and dest pixel types, 9 cases in all
    switch(dstBitDepth) {
    case 8 : {
      switch(srcBitDepth) {
      case 8 :  {ProcessPix<unsigned char,  255,   0, unsigned char, 255, 0> pixProc(proc); break;}
      case 16 : {ProcessPix<unsigned short, 65535, 0, unsigned char, 255, 0> pixProc(proc); break;}
      case 32 : {ProcessPix<float,          1,     1, unsigned char, 255, 0> pixProc(proc); break;}
      }
    }
      break;

    case 16 : {
      switch(srcBitDepth) {
      case 8 :  {ProcessPix<unsigned char,  255,   0, unsigned short, 65535, 0> pixProc(proc); break;}
      case 16 : {ProcessPix<unsigned short, 65535, 0, unsigned short, 65535, 0> pixProc(proc); break;}
      case 32 : {ProcessPix<float,          1,     1, unsigned short, 65535, 0> pixProc(proc); break;}
      }
    }
      break;

    case 32 : {
      switch(srcBitDepth) {
      case 8 :  {ProcessPix<unsigned char,  255,   0, float, 1, 1> pixProc(proc); break;}
      case 16 : {ProcessPix<unsigned short, 65535, 0, float, 1, 1> pixProc(proc); break;}
      case 32 : {ProcessPix<float,          1,     1, float, 1, 1> pixProc(proc); break;}
      }
    }                          
      break;
    }
  }
  catch(OfxuNoImageException &ex) {
    // if we were interrupted, the failed fetch is fine, just return kOfxStatOK
    // otherwise, something wierd happened
    if(!gEffectHost->abort(effect)) {
      status = kOfxStatFailed;
    }
  }

  // release the data pointers;
  if(sourceImg)
    gEffectHost->clipReleaseImage(sourceImg);
  if(outputImg)
    gEffectHost->clipReleaseImage(outputImg);
  
  return status;
}
示例#13
0
文件: basic.cpp 项目: chajaeik/openfx
// the process code  that the host sees
static OfxStatus render( OfxImageEffectHandle  instance,
                         OfxPropertySetHandle inArgs,
                         OfxPropertySetHandle outArgs)
{
  // get the render window and the time from the inArgs
  OfxTime time;
  OfxRectI renderWindow;
  OfxStatus status = kOfxStatOK;

  gPropHost->propGetDouble(inArgs, kOfxPropTime, 0, &time);
  gPropHost->propGetIntN(inArgs, kOfxImageEffectPropRenderWindow, 4, &renderWindow.x1);

  // retrieve any instance data associated with this effect
  MyInstanceData *myData = getMyInstanceData(instance);

  // property handles and members of each image
  // in reality, we would put this in a struct as the C++ support layer does
  OfxPropertySetHandle sourceImg = NULL, outputImg = NULL, maskImg = NULL;
  int srcRowBytes, srcBitDepth, dstRowBytes, dstBitDepth, maskRowBytes, maskBitDepth;
  bool srcIsAlpha, dstIsAlpha, maskIsAlpha;
  OfxRectI dstRect, srcRect, maskRect;
  void *src, *dst, *mask = NULL;

  try {
    // get the source image
    sourceImg = ofxuGetImage(myData->sourceClip, time, srcRowBytes, srcBitDepth, srcIsAlpha, srcRect, src);
    if(sourceImg == NULL) throw OfxuNoImageException();

    // get the output image
    outputImg = ofxuGetImage(myData->outputClip, time, dstRowBytes, dstBitDepth, dstIsAlpha, dstRect, dst);
    if(outputImg == NULL) throw OfxuNoImageException();

    if(myData->isGeneralEffect) {
      // is the mask connected?
      if(ofxuIsClipConnected(instance, "Mask")) {
        maskImg = ofxuGetImage(myData->maskClip, time, maskRowBytes, maskBitDepth, maskIsAlpha, maskRect, mask);

        if(maskImg != NULL) {                        
          // and see that it is a single component
          if(!maskIsAlpha || maskBitDepth != srcBitDepth) {
            throw OfxuStatusException(kOfxStatErrImageFormat);
          }  
        }
      }
    }

    // see if they have the same depths and bytes and all
    if(srcBitDepth != dstBitDepth || srcIsAlpha != dstIsAlpha) {
      throw OfxuStatusException(kOfxStatErrImageFormat);
    }

    // are we compenent scaling
    bool scaleComponents;
    gParamHost->paramGetValueAtTime(myData->perComponentScaleParam, time, &scaleComponents);

    // get the scale parameters
    double scale, rScale = 1, gScale = 1, bScale = 1, aScale = 1;
    gParamHost->paramGetValueAtTime(myData->scaleParam, time, &scale);

    if(scaleComponents) {
      gParamHost->paramGetValueAtTime(myData->scaleRParam, time, &rScale);
      gParamHost->paramGetValueAtTime(myData->scaleGParam, time, &gScale);
      gParamHost->paramGetValueAtTime(myData->scaleBParam, time, &bScale);
      gParamHost->paramGetValueAtTime(myData->scaleAParam, time, &aScale);
    }
    rScale *= scale; gScale *= scale; bScale *= scale; aScale *= scale;
  
    // do the rendering
    if(!dstIsAlpha) {
      switch(dstBitDepth) {
      case 8 : {      
        ProcessRGBA<OfxRGBAColourB, unsigned char, 255, 0> fred(instance, rScale, gScale, bScale, aScale,
                                                                src, srcRect, srcRowBytes,
                                                                dst, dstRect, dstRowBytes,
                                                                mask, maskRect, maskRowBytes,
                                                                renderWindow);
        fred.process();                                          
      }
        break;

      case 16 : {
        ProcessRGBA<OfxRGBAColourS, unsigned short, 65535, 0> fred(instance, rScale, gScale, bScale, aScale,
                                                                   src, srcRect, srcRowBytes,
                                                                   dst, dstRect, dstRowBytes,
                                                                   mask, maskRect, maskRowBytes,
                                                                   renderWindow);
        fred.process();           
      }                          
        break;

      case 32 : {
        ProcessRGBA<OfxRGBAColourF, float, 1, 1> fred(instance, rScale, gScale, bScale, aScale,
                                                      src, srcRect, srcRowBytes,
                                                      dst, dstRect, dstRowBytes,
                                                      mask, maskRect, maskRowBytes,
                                                      renderWindow);
        fred.process();                                          
        break;
      }
      }
    }
    else {
      switch(dstBitDepth) {
      case 8 : {
        ProcessAlpha<unsigned char, unsigned char, 255, 0> fred(instance, scale, 
                                                                src, srcRect, srcRowBytes,
                                                                dst, dstRect, dstRowBytes,
                                                                mask, maskRect, maskRowBytes,
                                                                renderWindow);
        fred.process();                                                                                  
      }
        break;

      case 16 : {
        ProcessAlpha<unsigned short, unsigned short, 65535, 0> fred(instance, scale, 
                                                                    src, srcRect, srcRowBytes,
                                                                    dst, dstRect, dstRowBytes,
                                                                    mask, maskRect, maskRowBytes,
                                                                    renderWindow);
        fred.process();           
      }                          
        break;

      case 32 : {
        ProcessAlpha<float, float, 1, 1> fred(instance, scale, 
                                              src, srcRect, srcRowBytes,
                                              dst, dstRect, dstRowBytes,
                                              mask, maskRect, maskRowBytes,
                                              renderWindow);
        fred.process();           
      }                          
        break;
      }
    }
  }
  catch(OfxuNoImageException &ex) {
    // if we were interrupted, the failed fetch is fine, just return kOfxStatOK
    // otherwise, something wierd happened
    if(!gEffectHost->abort(instance)) {
      status = kOfxStatFailed;
    }
  }
  catch(OfxuStatusException &ex) {
    status = ex.status();
  }

  // release the data pointers
  if(maskImg)
    gEffectHost->clipReleaseImage(maskImg);
  if(sourceImg)
    gEffectHost->clipReleaseImage(sourceImg);
  if(outputImg)
    gEffectHost->clipReleaseImage(outputImg);
  
  return status;
}
示例#14
0
// the process code  that the host sees
static OfxStatus render( OfxImageEffectHandle  instance,
                         OfxPropertySetHandle inArgs,
                         OfxPropertySetHandle outArgs)
{
  // get the render window and the time from the inArgs
  OfxTime time;
  OfxRectI renderWindow;
  OfxStatus status = kOfxStatOK;

  gPropHost->propGetDouble(inArgs, kOfxPropTime, 0, &time);
  gPropHost->propGetIntN(inArgs, kOfxImageEffectPropRenderWindow, 4, &renderWindow.x1);

  // Retrieve instance data associated with this effect
  MyInstanceData *myData = getMyInstanceData(instance);

  // property handles and members of each image
  OfxPropertySetHandle sourceImg = NULL, outputImg = NULL;
  int srcRowBytes, srcBitDepth, dstRowBytes, dstBitDepth;
  bool srcIsAlpha, dstIsAlpha;
  OfxRectI dstRect, srcRect;
  void *src, *dst;

  DPRINT(("Render: window = [%d, %d - %d, %d]\n",
	  renderWindow.x1, renderWindow.y1,
	  renderWindow.x2, renderWindow.y2));

  int isOpenCLEnabled = 0;
  if (gHostSupportsOpenCL)
  {
      gPropHost->propGetInt(inArgs, kOfxImageEffectPropOpenCLEnabled, 0, &isOpenCLEnabled);
      DPRINT(("render: OpenCL rendering %s\n", isOpenCLEnabled ? "enabled" : "DISABLED"));
  }

  cl_context clContext = NULL;
  cl_command_queue cmdQ = NULL;
  cl_device_id deviceId = NULL;
  if (isOpenCLEnabled)
  {
      void* voidPtrCmdQ;
      gPropHost->propGetPointer(inArgs, kOfxImageEffectPropOpenCLCommandQueue, 0, &voidPtrCmdQ);
      cmdQ = reinterpret_cast<cl_command_queue>(voidPtrCmdQ);

      clGetCommandQueueInfo(cmdQ, CL_QUEUE_CONTEXT, sizeof(cl_context), &clContext, NULL);
      clGetCommandQueueInfo(cmdQ, CL_QUEUE_DEVICE, sizeof(cl_device_id), &deviceId, NULL);
  }
  else
  {
      clContext = GetContext(deviceId);
      cmdQ = clCreateCommandQueue(clContext, deviceId, 0, NULL);
  }

  char deviceName[128];
  clGetDeviceInfo(deviceId, CL_DEVICE_NAME, 128, deviceName, NULL);
  DPRINT(("Using %s for plugin\n", deviceName));

  cl_kernel kernel = GetKernel(clContext);

  // get the source image
  sourceImg = ofxuGetImage(myData->sourceClip, time, srcRowBytes, srcBitDepth, srcIsAlpha, srcRect, src);

  // get the output image
  outputImg = ofxuGetImage(myData->outputClip, time, dstRowBytes, dstBitDepth, dstIsAlpha, dstRect, dst);

  // get the scale parameter
  double rGain = 1, gGain = 1, bGain = 1;
  gParamHost->paramGetValueAtTime(myData->rGainParam, time, &rGain);
  gParamHost->paramGetValueAtTime(myData->gGainParam, time, &gGain);
  gParamHost->paramGetValueAtTime(myData->bGainParam, time, &bGain);
  DPRINT(("Gain(%f %f %f)\n", rGain, gGain, bGain));

  float w = (renderWindow.x2 - renderWindow.x1);
  float h = (renderWindow.y2 - renderWindow.y1);

  const size_t rowSize = w * 4 * sizeof(float);

  if (isOpenCLEnabled)
  {
      DPRINT(("Using OpenCL transfers (same device)\n"));

      RunKernel(cmdQ, deviceId, kernel, w, h, rGain, gGain, bGain, (cl_mem)src, (cl_mem)dst);
  }
  else
  {
      DPRINT(("Using CPU transfers\n"));

      const size_t bufferSize = w * h * 4 * sizeof(float);

      // Allocate the temporary buffers on the plugin device
      cl_mem inBuffer = clCreateBuffer(clContext, CL_MEM_READ_ONLY, bufferSize, NULL, NULL);
      cl_mem outBuffer = clCreateBuffer(clContext, CL_MEM_WRITE_ONLY, bufferSize, NULL, NULL);

      // Copy the buffer from the CPU to the plugin device
      clEnqueueWriteBuffer(cmdQ, inBuffer, CL_TRUE, 0, bufferSize, src, 0, NULL, NULL);

      RunKernel(cmdQ, deviceId, kernel, w, h, rGain, gGain, bGain, inBuffer, outBuffer);

      // Copy the buffer from the plugin device to the CPU
      clEnqueueReadBuffer(cmdQ, outBuffer, CL_TRUE, 0, bufferSize, dst, 0, NULL, NULL);

      clFinish(cmdQ);

      // Free the temporary buffers on the plugin device
      clReleaseMemObject(inBuffer);
      clReleaseMemObject(outBuffer);
  }

  if (sourceImg)
  {
      gEffectHost->clipReleaseImage(sourceImg);
  }

  if (outputImg)
  {
      gEffectHost->clipReleaseImage(outputImg);
  }

  return status;
}
示例#15
0
// the process code  that the host sees
static OfxStatus render( OfxImageEffectHandle  instance,
                         OfxPropertySetHandle inArgs,
                         OfxPropertySetHandle /*outArgs*/)
{
  // get the render window and the time from the inArgs
  OfxTime time;
  OfxRectI renderWindow;
  OfxStatus status = kOfxStatOK;

  gPropHost->propGetDouble(inArgs, kOfxPropTime, 0, &time);
  gPropHost->propGetIntN(inArgs, kOfxImageEffectPropRenderWindow, 4, &renderWindow.x1);

  // Retrieve instance data associated with this effect
  MyInstanceData *myData = getMyInstanceData(instance);

  // property handles and members of each image
  OfxPropertySetHandle sourceImg = NULL, outputImg = NULL;
  int gl_enabled = 0;
  int source_texture_index = -1, source_texture_target = -1;
  int output_texture_index = -1, output_texture_target = -1;
  char *tmps;

  DPRINT(("render: openGLSuite %s\n", gOpenGLSuite ? "found" : "not found"));
  if (gOpenGLSuite) {
    gPropHost->propGetInt(inArgs, kOfxImageEffectPropOpenGLEnabled, 0, &gl_enabled);
    DPRINT(("render: openGL rendering %s\n", gl_enabled ? "enabled" : "DISABLED"));
  }
  DPRINT(("Render: window = [%d, %d - %d, %d]\n",
	  renderWindow.x1, renderWindow.y1,
	  renderWindow.x2, renderWindow.y2));

  // For this test, we only process in OpenGL mode.
  if (!gl_enabled) {
    return kOfxStatErrImageFormat;
  }

  // get the output image texture
  status = gOpenGLSuite->clipLoadTexture(myData->outputClip, time, NULL, NULL, &outputImg);
  DPRINT(("openGL: clipLoadTexture (output) returns status %d\n", status));
  if (status != kOfxStatOK) {
    return status;
  }
  status = gPropHost->propGetInt(outputImg, kOfxImageEffectPropOpenGLTextureIndex,
				 0, &output_texture_index);
  if (status != kOfxStatOK) {
    return status;
  }
  status = gPropHost->propGetInt(outputImg, kOfxImageEffectPropOpenGLTextureTarget,
				 0, &output_texture_target);
  if (status != kOfxStatOK) {
    return status;
  }
  status = gPropHost->propGetString(outputImg, kOfxImageEffectPropPixelDepth, 0, &tmps);
  if (status != kOfxStatOK) {
    return status;
  }
  DPRINT(("openGL: output texture index %d, target %d, depth %s\n",
	  output_texture_index, output_texture_target, tmps));

  status = gOpenGLSuite->clipLoadTexture(myData->sourceClip, time, NULL, NULL, &sourceImg);
  DPRINT(("openGL: clipLoadTexture (source) returns status %d\n", status));
  if (status != kOfxStatOK) {
    return status;
  }

  status = gPropHost->propGetInt(sourceImg, kOfxImageEffectPropOpenGLTextureIndex,
				 0, &source_texture_index);
  if (status != kOfxStatOK) {
    return status;
  }
  status = gPropHost->propGetInt(sourceImg, kOfxImageEffectPropOpenGLTextureTarget,
				 0, &source_texture_target);
  if (status != kOfxStatOK) {
    return status;
  }
  status = gPropHost->propGetString(sourceImg, kOfxImageEffectPropPixelDepth, 0, &tmps);
  if (status != kOfxStatOK) {
    return status;
  }
  DPRINT(("openGL: source texture index %d, target %d, depth %d\n",
	  source_texture_index, source_texture_target, tmps));
  // XXX: check status for errors

  // get the scale parameter
  double scale = 1;
  double source_scale = 1;
  gParamHost->paramGetValueAtTime(myData->scaleParam, time, &scale);
  gParamHost->paramGetValueAtTime(myData->sourceScaleParam, time, &source_scale);

  float w = (renderWindow.x2 - renderWindow.x1);
  float h = (renderWindow.y2 - renderWindow.y1);

  glPushAttrib(GL_ALL_ATTRIB_BITS);
  glDisable(GL_BLEND);

  // Draw black into dest to start
  glBegin(GL_QUADS);
  glColor4f(0, 0, 0, 1); //Set the colour to opaque black
  glVertex2f(0, 0);
  glVertex2f(0, h);
  glVertex2f(w, h);
  glVertex2f(w, 0);
  glEnd();

  //
  // Copy source texture to output by drawing a big textured quad
  //

  // set up texture (how much of this is needed?)
  glEnable(source_texture_target);
  glBindTexture(source_texture_target, source_texture_index);
  glTexParameteri(source_texture_target, GL_TEXTURE_WRAP_S, GL_REPEAT);
  glTexParameteri(source_texture_target, GL_TEXTURE_WRAP_T, GL_REPEAT);
  glTexParameteri(source_texture_target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  glTexParameteri(source_texture_target, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);

  // textures are oriented with Y up (standard orientation)
  float tymin = 0;
  float tymax = 1;

  // now draw the textured quad containing the source
  glBegin(GL_QUADS);
  glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
  glBegin (GL_QUADS);
  glTexCoord2f (0, tymin);
  glVertex2f   (0, 0);
  glTexCoord2f (1.0, tymin);
  glVertex2f   (w * source_scale, 0);
  glTexCoord2f (1.0, tymax);
  glVertex2f   (w * source_scale, h * source_scale);
  glTexCoord2f (0, tymax);
  glVertex2f   (0, h * source_scale);
  glEnd ();

  glDisable(source_texture_target);

  // Now draw some stuff on top of it to show we really did something
#define WIDTH 200
#define HEIGHT 100
  glBegin(GL_QUADS);
  glColor3f(1.0f, 0, 0); //Set the colour to red
  glVertex2f(10, 10);
  glVertex2f(10, HEIGHT * scale);
  glVertex2f(WIDTH * scale, HEIGHT * scale);
  glVertex2f(WIDTH * scale, 10);
  glEnd();

  // done; clean up.
  glPopAttrib();

  // release the data pointers
  if(sourceImg)
    gOpenGLSuite->clipFreeTexture(sourceImg);
  if(outputImg)
    gOpenGLSuite->clipFreeTexture(outputImg);

  return status;
}