Exemplo n.º 1
0
static void
DrawSurfaceWithTextureCoords(DrawTarget *aDest,
                             const gfx::Rect& aDestRect,
                             SourceSurface *aSource,
                             const gfx::Rect& aTextureCoords,
                             gfx::Filter aFilter,
                             float aOpacity,
                             SourceSurface *aMask,
                             const Matrix* aMaskTransform)
{
  // Convert aTextureCoords into aSource's coordinate space
  gfxRect sourceRect(aTextureCoords.x * aSource->GetSize().width,
                     aTextureCoords.y * aSource->GetSize().height,
                     aTextureCoords.width * aSource->GetSize().width,
                     aTextureCoords.height * aSource->GetSize().height);

  // Floating point error can accumulate above and we know our visible region
  // is integer-aligned, so round it out.
  sourceRect.Round();

  // Compute a transform that maps sourceRect to aDestRect.
  Matrix matrix =
    gfxUtils::TransformRectToRect(sourceRect,
                                  gfx::IntPoint(aDestRect.x, aDestRect.y),
                                  gfx::IntPoint(aDestRect.XMost(), aDestRect.y),
                                  gfx::IntPoint(aDestRect.XMost(), aDestRect.YMost()));

  // Only use REPEAT if aTextureCoords is outside (0, 0, 1, 1).
  gfx::Rect unitRect(0, 0, 1, 1);
  ExtendMode mode = unitRect.Contains(aTextureCoords) ? ExtendMode::CLAMP : ExtendMode::REPEAT;

  FillRectWithMask(aDest, aDestRect, aSource, aFilter, DrawOptions(aOpacity),
                   mode, aMask, aMaskTransform, &matrix);
}
Exemplo n.º 2
0
void
AndroidGeckoLayerClient::SyncFrameMetrics(const gfx::Point& aScrollOffset, float aZoom, const gfx::Rect& aCssPageRect,
                                          bool aLayersUpdated, const gfx::Rect& aDisplayPort, float aDisplayResolution,
                                          bool aIsFirstPaint, gfx::Margin& aFixedLayerMargins, gfx::Point& aOffset)
{
    NS_ASSERTION(!isNull(), "SyncFrameMetrics called on null layer client!");
    JNIEnv *env = GetJNIForThread();    // this is called on the compositor thread
    if (!env)
        return;

    AutoLocalJNIFrame jniFrame(env);

    // convert the displayport rect from scroll-relative CSS pixels to document-relative device pixels
    int dpX = NS_lround((aDisplayPort.x * aDisplayResolution) + aScrollOffset.x);
    int dpY = NS_lround((aDisplayPort.y * aDisplayResolution) + aScrollOffset.y);
    int dpW = NS_lround(aDisplayPort.width * aDisplayResolution);
    int dpH = NS_lround(aDisplayPort.height * aDisplayResolution);

    jobject viewTransformJObj = env->CallObjectMethod(wrapped_obj, jSyncFrameMetricsMethod,
            aScrollOffset.x, aScrollOffset.y, aZoom,
            aCssPageRect.x, aCssPageRect.y, aCssPageRect.XMost(), aCssPageRect.YMost(),
            aLayersUpdated, dpX, dpY, dpW, dpH, aDisplayResolution,
            aIsFirstPaint);

    if (jniFrame.CheckForException())
        return;

    NS_ABORT_IF_FALSE(viewTransformJObj, "No view transform object!");

    AndroidViewTransform viewTransform;
    viewTransform.Init(viewTransformJObj);
    viewTransform.GetFixedLayerMargins(env, aFixedLayerMargins);
    aOffset.x = viewTransform.GetOffsetX(env);
    aOffset.y = viewTransform.GetOffsetY(env);
}
Exemplo n.º 3
0
bool
VRDisplayOpenVR::SubmitFrame(void* aTextureHandle,
                             ::vr::ETextureType aTextureType,
                             const IntSize& aSize,
                             const gfx::Rect& aLeftEyeRect,
                             const gfx::Rect& aRightEyeRect)
{
  MOZ_ASSERT(mSubmitThread->GetThread() == NS_GetCurrentThread());
  if (!mIsPresenting) {
    return false;
  }

  ::vr::Texture_t tex;
  tex.handle = aTextureHandle;
  tex.eType = aTextureType;
  tex.eColorSpace = ::vr::EColorSpace::ColorSpace_Auto;

  ::vr::VRTextureBounds_t bounds;
  bounds.uMin = aLeftEyeRect.X();
  bounds.vMin = 1.0 - aLeftEyeRect.Y();
  bounds.uMax = aLeftEyeRect.XMost();
  bounds.vMax = 1.0 - aLeftEyeRect.YMost();

  ::vr::EVRCompositorError err;
  err = mVRCompositor->Submit(::vr::EVREye::Eye_Left, &tex, &bounds);
  if (err != ::vr::EVRCompositorError::VRCompositorError_None) {
    printf_stderr("OpenVR Compositor Submit() failed.\n");
  }

  bounds.uMin = aRightEyeRect.X();
  bounds.vMin = 1.0 - aRightEyeRect.Y();
  bounds.uMax = aRightEyeRect.XMost();
  bounds.vMax = 1.0 - aRightEyeRect.YMost();

  err = mVRCompositor->Submit(::vr::EVREye::Eye_Right, &tex, &bounds);
  if (err != ::vr::EVRCompositorError::VRCompositorError_None) {
    printf_stderr("OpenVR Compositor Submit() failed.\n");
  }

  mVRCompositor->PostPresentHandoff();
  return true;
}
Exemplo n.º 4
0
void
AndroidGeckoLayerClient::SetPageRect(const gfx::Rect& aCssPageRect)
{
    NS_ASSERTION(!isNull(), "SetPageRect called on null layer client!");
    JNIEnv *env = GetJNIForThread();    // this is called on the compositor thread
    if (!env)
        return;

    AutoLocalJNIFrame jniFrame(env, 0);
    return env->CallVoidMethod(wrapped_obj, jSetPageRect,
                               aCssPageRect.x, aCssPageRect.y, aCssPageRect.XMost(), aCssPageRect.YMost());
}
Exemplo n.º 5
0
void
CompositorD3D9::ClearRect(const gfx::Rect& aRect)
{
  D3DRECT rect;
  rect.x1 = aRect.X();
  rect.y1 = aRect.Y();
  rect.x2 = aRect.XMost();
  rect.y2 = aRect.YMost();

  device()->Clear(1, &rect, D3DCLEAR_TARGET,
                  0x00000000, 0, 0);
}
Exemplo n.º 6
0
void
AndroidGeckoLayerClient::SetFirstPaintViewport(const nsIntPoint& aOffset, float aZoom, const nsIntRect& aPageRect, const gfx::Rect& aCssPageRect)
{
    NS_ASSERTION(!isNull(), "SetFirstPaintViewport called on null layer client!");
    JNIEnv *env = GetJNIForThread();    // this is called on the compositor thread
    if (!env)
        return;

    AutoLocalJNIFrame jniFrame(env, 0);
    return env->CallVoidMethod(wrapped_obj, jSetFirstPaintViewport, (float)aOffset.x, (float)aOffset.y, aZoom,
                               (float)aPageRect.x, (float)aPageRect.y, (float)aPageRect.XMost(), (float)aPageRect.YMost(),
                               aCssPageRect.x, aCssPageRect.y, aCssPageRect.XMost(), aCssPageRect.YMost());
}
Exemplo n.º 7
0
void
CompositorD3D11::ClearRect(const gfx::Rect& aRect)
{
  mContext->OMSetBlendState(mAttachments->mDisabledBlendState, sBlendFactor, 0xFFFFFFFF);

  Matrix4x4 identity;
  memcpy(&mVSConstants.layerTransform, &identity._11, 64);

  mVSConstants.layerQuad = aRect;
  mVSConstants.renderTargetOffset[0] = 0;
  mVSConstants.renderTargetOffset[1] = 0;
  mPSConstants.layerOpacity[0] = 1.0f;

  D3D11_RECT scissor;
  scissor.left = aRect.x;
  scissor.right = aRect.XMost();
  scissor.top = aRect.y;
  scissor.bottom = aRect.YMost();
  mContext->RSSetScissorRects(1, &scissor);
  mContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP);
  mContext->VSSetShader(mAttachments->mVSQuadShader[MaskType::MaskNone], nullptr, 0);

  mContext->PSSetShader(mAttachments->mSolidColorShader[MaskType::MaskNone], nullptr, 0);
  mPSConstants.layerColor[0] = 0;
  mPSConstants.layerColor[1] = 0;
  mPSConstants.layerColor[2] = 0;
  mPSConstants.layerColor[3] = 0;

  if (!UpdateConstantBuffers()) {
    NS_WARNING("Failed to update shader constant buffers");
    return;
  }

  mContext->Draw(4, 0);

  mContext->OMSetBlendState(mAttachments->mPremulBlendState, sBlendFactor, 0xFFFFFFFF);
}
Exemplo n.º 8
0
size_t
DecomposeIntoNoRepeatRects(const gfx::Rect& aRect,
                           const gfx::Rect& aTexCoordRect,
                           decomposedRectArrayT* aLayerRects,
                           decomposedRectArrayT* aTextureRects)
{
  gfx::Rect texCoordRect = aTexCoordRect;

  // If the texture should be flipped, it will have negative height. Detect that
  // here and compensate for it. We will flip each rect as we emit it.
  bool flipped = false;
  if (texCoordRect.height < 0) {
    flipped = true;
    texCoordRect.y += texCoordRect.height;
    texCoordRect.height = -texCoordRect.height;
  }

  // Wrap the texture coordinates so they are within [0,1] and cap width/height
  // at 1. We rely on this below.
  texCoordRect = gfx::Rect(gfx::Point(WrapTexCoord(texCoordRect.x),
                                      WrapTexCoord(texCoordRect.y)),
                           gfx::Size(std::min(texCoordRect.width, 1.0f),
                                     std::min(texCoordRect.height, 1.0f)));

  NS_ASSERTION(texCoordRect.x >= 0.0f && texCoordRect.x <= 1.0f &&
               texCoordRect.y >= 0.0f && texCoordRect.y <= 1.0f &&
               texCoordRect.width >= 0.0f && texCoordRect.width <= 1.0f &&
               texCoordRect.height >= 0.0f && texCoordRect.height <= 1.0f &&
               texCoordRect.XMost() >= 0.0f && texCoordRect.XMost() <= 2.0f &&
               texCoordRect.YMost() >= 0.0f && texCoordRect.YMost() <= 2.0f,
               "We just wrapped the texture coordinates, didn't we?");

  // Get the top left and bottom right points of the rectangle. Note that
  // tl.x/tl.y are within [0,1] but br.x/br.y are within [0,2].
  gfx::Point tl = texCoordRect.TopLeft();
  gfx::Point br = texCoordRect.BottomRight();

  NS_ASSERTION(tl.x >= 0.0f && tl.x <= 1.0f &&
               tl.y >= 0.0f && tl.y <= 1.0f &&
               br.x >= tl.x && br.x <= 2.0f &&
               br.y >= tl.y && br.y <= 2.0f &&
               FuzzyLTE(br.x - tl.x, 1.0f) &&
               FuzzyLTE(br.y - tl.y, 1.0f),
               "Somehow generated invalid texture coordinates");

  // Then check if we wrap in either the x or y axis.
  bool xwrap = br.x > 1.0f;
  bool ywrap = br.y > 1.0f;

  // If xwrap is false, the texture will be sampled from tl.x .. br.x.
  // If xwrap is true, then it will be split into tl.x .. 1.0, and
  // 0.0 .. WrapTexCoord(br.x). Same for the Y axis. The destination
  // rectangle is also split appropriately, according to the calculated
  // xmid/ymid values.
  if (!xwrap && !ywrap) {
    SetRects(0, aLayerRects, aTextureRects,
             aRect.x, aRect.y, aRect.XMost(), aRect.YMost(),
             tl.x, tl.y, br.x, br.y,
             flipped);
    return 1;
  }

  // If we are dealing with wrapping br.x and br.y are greater than 1.0 so
  // wrap them here as well.
  br = gfx::Point(xwrap ? WrapTexCoord(br.x) : br.x,
                  ywrap ? WrapTexCoord(br.y) : br.y);

  // If we wrap around along the x axis, we will draw first from
  // tl.x .. 1.0 and then from 0.0 .. br.x (which we just wrapped above).
  // The same applies for the Y axis. The midpoints we calculate here are
  // only valid if we actually wrap around.
  GLfloat xmid = aRect.x + (1.0f - tl.x) / texCoordRect.width * aRect.width;
  GLfloat ymid = aRect.y + (1.0f - tl.y) / texCoordRect.height * aRect.height;

  NS_ASSERTION(!xwrap ||
               (xmid > aRect.x &&
                xmid < aRect.XMost() &&
                FuzzyEqual((xmid - aRect.x) + (aRect.XMost() - xmid), aRect.width)),
               "xmid should be within [x,XMost()] and the wrapped rect should have the same width");
  NS_ASSERTION(!ywrap ||
               (ymid > aRect.y &&
                ymid < aRect.YMost() &&
                FuzzyEqual((ymid - aRect.y) + (aRect.YMost() - ymid), aRect.height)),
               "ymid should be within [y,YMost()] and the wrapped rect should have the same height");

  if (!xwrap && ywrap) {
    SetRects(0, aLayerRects, aTextureRects,
             aRect.x, aRect.y, aRect.XMost(), ymid,
             tl.x, tl.y, br.x, 1.0f,
             flipped);
    SetRects(1, aLayerRects, aTextureRects,
             aRect.x, ymid, aRect.XMost(), aRect.YMost(),
             tl.x, 0.0f, br.x, br.y,
             flipped);
    return 2;
  }

  if (xwrap && !ywrap) {
    SetRects(0, aLayerRects, aTextureRects,
             aRect.x, aRect.y, xmid, aRect.YMost(),
             tl.x, tl.y, 1.0f, br.y,
             flipped);
    SetRects(1, aLayerRects, aTextureRects,
             xmid, aRect.y, aRect.XMost(), aRect.YMost(),
             0.0f, tl.y, br.x, br.y,
             flipped);
    return 2;
  }

  SetRects(0, aLayerRects, aTextureRects,
           aRect.x, aRect.y, xmid, ymid,
           tl.x, tl.y, 1.0f, 1.0f,
           flipped);
  SetRects(1, aLayerRects, aTextureRects,
           xmid, aRect.y, aRect.XMost(), ymid,
           0.0f, tl.y, br.x, 1.0f,
           flipped);
  SetRects(2, aLayerRects, aTextureRects,
           aRect.x, ymid, xmid, aRect.YMost(),
           tl.x, 0.0f, 1.0f, br.y,
           flipped);
  SetRects(3, aLayerRects, aTextureRects,
           xmid, ymid, aRect.XMost(), aRect.YMost(),
           0.0f, 0.0f, br.x, br.y,
           flipped);
  return 4;
}
Exemplo n.º 9
0
void
CompositorD3D11::DrawQuad(const gfx::Rect& aRect,
                          const gfx::Rect& aClipRect,
                          const EffectChain& aEffectChain,
                          gfx::Float aOpacity,
                          const gfx::Matrix4x4& aTransform)
{
  MOZ_ASSERT(mCurrentRT, "No render target");
  memcpy(&mVSConstants.layerTransform, &aTransform._11, 64);
  IntPoint origin = mCurrentRT->GetOrigin();
  mVSConstants.renderTargetOffset[0] = origin.x;
  mVSConstants.renderTargetOffset[1] = origin.y;

  mPSConstants.layerOpacity[0] = aOpacity;

  bool restoreBlendMode = false;

  MaskType maskType = MaskType::MaskNone;

  if (aEffectChain.mSecondaryEffects[EffectTypes::MASK]) {
    if (aTransform.Is2D()) {
      maskType = MaskType::Mask2d;
    } else {
      MOZ_ASSERT(aEffectChain.mPrimaryEffect->mType == EffectTypes::RGB);
      maskType = MaskType::Mask3d;
    }

    EffectMask* maskEffect =
      static_cast<EffectMask*>(aEffectChain.mSecondaryEffects[EffectTypes::MASK].get());
    TextureSourceD3D11* source = maskEffect->mMaskTexture->AsSourceD3D11();

    if (!source) {
      NS_WARNING("Missing texture source!");
      return;
    }

    RefPtr<ID3D11ShaderResourceView> view;
    HRESULT hr = mDevice->CreateShaderResourceView(source->GetD3D11Texture(), nullptr, byRef(view));
    if (Failed(hr)) {
      // XXX - There's a chance we won't be able to render anything, should we
      // just crash release builds?
      return;
    }

    ID3D11ShaderResourceView* srView = view;
    mContext->PSSetShaderResources(3, 1, &srView);

    const gfx::Matrix4x4& maskTransform = maskEffect->mMaskTransform;
    NS_ASSERTION(maskTransform.Is2D(), "How did we end up with a 3D transform here?!");
    Rect bounds = Rect(Point(), Size(maskEffect->mSize));

    mVSConstants.maskQuad = maskTransform.As2D().TransformBounds(bounds);
  }


  D3D11_RECT scissor;
  scissor.left = aClipRect.x;
  scissor.right = aClipRect.XMost();
  scissor.top = aClipRect.y;
  scissor.bottom = aClipRect.YMost();
  mContext->RSSetScissorRects(1, &scissor);
  mContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP);
  mContext->VSSetShader(mAttachments->mVSQuadShader[maskType], nullptr, 0);

  const Rect* pTexCoordRect = nullptr;

  switch (aEffectChain.mPrimaryEffect->mType) {
  case EffectTypes::SOLID_COLOR: {
      SetPSForEffect(aEffectChain.mPrimaryEffect, maskType, SurfaceFormat::UNKNOWN);

      Color color =
        static_cast<EffectSolidColor*>(aEffectChain.mPrimaryEffect.get())->mColor;
      mPSConstants.layerColor[0] = color.r * color.a * aOpacity;
      mPSConstants.layerColor[1] = color.g * color.a * aOpacity;
      mPSConstants.layerColor[2] = color.b * color.a * aOpacity;
      mPSConstants.layerColor[3] = color.a * aOpacity;
    }
    break;
  case EffectTypes::RGB:
  case EffectTypes::RENDER_TARGET:
    {
      TexturedEffect* texturedEffect =
        static_cast<TexturedEffect*>(aEffectChain.mPrimaryEffect.get());

      pTexCoordRect = &texturedEffect->mTextureCoords;

      TextureSourceD3D11* source = texturedEffect->mTexture->AsSourceD3D11();

      if (!source) {
        NS_WARNING("Missing texture source!");
        return;
      }

      SetPSForEffect(aEffectChain.mPrimaryEffect, maskType, texturedEffect->mTexture->GetFormat());

      RefPtr<ID3D11ShaderResourceView> view;
      HRESULT hr = mDevice->CreateShaderResourceView(source->GetD3D11Texture(), nullptr, byRef(view));
      if (Failed(hr)) {
        // XXX - There's a chance we won't be able to render anything, should we
        // just crash release builds?
        return;
      }

      ID3D11ShaderResourceView* srView = view;
      mContext->PSSetShaderResources(0, 1, &srView);

      if (!texturedEffect->mPremultiplied) {
        mContext->OMSetBlendState(mAttachments->mNonPremulBlendState, sBlendFactor, 0xFFFFFFFF);
        restoreBlendMode = true;
      }

      SetSamplerForFilter(texturedEffect->mFilter);
    }
    break;
  case EffectTypes::YCBCR: {
      EffectYCbCr* ycbcrEffect =
        static_cast<EffectYCbCr*>(aEffectChain.mPrimaryEffect.get());

      SetSamplerForFilter(Filter::LINEAR);

      pTexCoordRect = &ycbcrEffect->mTextureCoords;

      const int Y = 0, Cb = 1, Cr = 2;
      TextureSource* source = ycbcrEffect->mTexture;

      if (!source) {
        NS_WARNING("No texture to composite");
        return;
      }

      SetPSForEffect(aEffectChain.mPrimaryEffect, maskType, ycbcrEffect->mTexture->GetFormat());

      if (!source->GetSubSource(Y) || !source->GetSubSource(Cb) || !source->GetSubSource(Cr)) {
        // This can happen if we failed to upload the textures, most likely
        // because of unsupported dimensions (we don't tile YCbCr textures).
        return;
      }

      TextureSourceD3D11* sourceY  = source->GetSubSource(Y)->AsSourceD3D11();
      TextureSourceD3D11* sourceCb = source->GetSubSource(Cb)->AsSourceD3D11();
      TextureSourceD3D11* sourceCr = source->GetSubSource(Cr)->AsSourceD3D11();

      HRESULT hr;

      RefPtr<ID3D11ShaderResourceView> views[3];

      hr = mDevice->CreateShaderResourceView(sourceY->GetD3D11Texture(),
                                             nullptr, byRef(views[0]));
      if (Failed(hr)) {
        return;
      }

      hr = mDevice->CreateShaderResourceView(sourceCb->GetD3D11Texture(),
                                             nullptr, byRef(views[1]));
      if (Failed(hr)) {
        return;
      }

      hr = mDevice->CreateShaderResourceView(sourceCr->GetD3D11Texture(),
                                             nullptr, byRef(views[2]));
      if (Failed(hr)) {
        return;
      }

      ID3D11ShaderResourceView* srViews[3] = { views[0], views[1], views[2] };
      mContext->PSSetShaderResources(0, 3, srViews);
    }
    break;
  case EffectTypes::COMPONENT_ALPHA:
    {
      MOZ_ASSERT(gfxPrefs::ComponentAlphaEnabled());
      MOZ_ASSERT(mAttachments->mComponentBlendState);
      EffectComponentAlpha* effectComponentAlpha =
        static_cast<EffectComponentAlpha*>(aEffectChain.mPrimaryEffect.get());

      TextureSourceD3D11* sourceOnWhite = effectComponentAlpha->mOnWhite->AsSourceD3D11();
      TextureSourceD3D11* sourceOnBlack = effectComponentAlpha->mOnBlack->AsSourceD3D11();

      if (!sourceOnWhite || !sourceOnBlack) {
        NS_WARNING("Missing texture source(s)!");
        return;
      }

      SetPSForEffect(aEffectChain.mPrimaryEffect, maskType, effectComponentAlpha->mOnWhite->GetFormat());

      SetSamplerForFilter(effectComponentAlpha->mFilter);

      pTexCoordRect = &effectComponentAlpha->mTextureCoords;

      RefPtr<ID3D11ShaderResourceView> views[2];

      HRESULT hr;

      hr = mDevice->CreateShaderResourceView(sourceOnBlack->GetD3D11Texture(), nullptr, byRef(views[0]));
      if (Failed(hr)) {
        return;
      }
      hr = mDevice->CreateShaderResourceView(sourceOnWhite->GetD3D11Texture(), nullptr, byRef(views[1]));
      if (Failed(hr)) {
        return;
      }

      ID3D11ShaderResourceView* srViews[2] = { views[0], views[1] };
      mContext->PSSetShaderResources(0, 2, srViews);

      mContext->OMSetBlendState(mAttachments->mComponentBlendState, sBlendFactor, 0xFFFFFFFF);
      restoreBlendMode = true;
    }
    break;
  default:
    NS_WARNING("Unknown shader type");
    return;
  }

  if (pTexCoordRect) {
    Rect layerRects[4];
    Rect textureRects[4];
    size_t rects = DecomposeIntoNoRepeatRects(aRect,
                                              *pTexCoordRect,
                                              &layerRects,
                                              &textureRects);
    for (size_t i = 0; i < rects; i++) {
      mVSConstants.layerQuad = layerRects[i];
      mVSConstants.textureCoords = textureRects[i];

      if (!UpdateConstantBuffers()) {
        NS_WARNING("Failed to update shader constant buffers");
        break;
      }
      mContext->Draw(4, 0);
    }
  } else {
    mVSConstants.layerQuad = aRect;

    if (!UpdateConstantBuffers()) {
      NS_WARNING("Failed to update shader constant buffers");
    } else {
      mContext->Draw(4, 0);
    }
  }

  if (restoreBlendMode) {
    mContext->OMSetBlendState(mAttachments->mPremulBlendState, sBlendFactor, 0xFFFFFFFF);
  }
}
Exemplo n.º 10
0
void
CompositorD3D9::DrawQuad(const gfx::Rect &aRect,
                         const gfx::Rect &aClipRect,
                         const EffectChain &aEffectChain,
                         gfx::Float aOpacity,
                         const gfx::Matrix4x4 &aTransform)
{
  if (!mDeviceManager) {
    return;
  }

  IDirect3DDevice9* d3d9Device = device();
  MOZ_ASSERT(d3d9Device, "We should be able to get a device now");

  MOZ_ASSERT(mCurrentRT, "No render target");
  d3d9Device->SetVertexShaderConstantF(CBmLayerTransform, &aTransform._11, 4);

  IntPoint origin = mCurrentRT->GetOrigin();
  float renderTargetOffset[] = { origin.x, origin.y, 0, 0 };
  d3d9Device->SetVertexShaderConstantF(CBvRenderTargetOffset,
                                       renderTargetOffset,
                                       1);
  d3d9Device->SetVertexShaderConstantF(CBvLayerQuad,
                                       ShaderConstantRect(aRect.x,
                                                          aRect.y,
                                                          aRect.width,
                                                          aRect.height),
                                       1);
  bool target = false;

  if (aEffectChain.mPrimaryEffect->mType != EffectTypes::SOLID_COLOR) {
    float opacity[4];
    /*
     * We always upload a 4 component float, but the shader will use only the
     * first component since it's declared as a 'float'.
     */
    opacity[0] = aOpacity;
    d3d9Device->SetPixelShaderConstantF(CBfLayerOpacity, opacity, 1);
  }

  bool isPremultiplied = true;

  MaskType maskType = MaskType::MaskNone;

  if (aEffectChain.mSecondaryEffects[EffectTypes::MASK]) {
    if (aTransform.Is2D()) {
      maskType = MaskType::Mask2d;
    } else {
      maskType = MaskType::Mask3d;
    }
  }

  RECT scissor;
  scissor.left = aClipRect.x;
  scissor.right = aClipRect.XMost();
  scissor.top = aClipRect.y;
  scissor.bottom = aClipRect.YMost();
  d3d9Device->SetScissorRect(&scissor);

  uint32_t maskTexture = 0;
  switch (aEffectChain.mPrimaryEffect->mType) {
  case EffectTypes::SOLID_COLOR:
    {
      // output color is premultiplied, so we need to adjust all channels.
      Color layerColor =
        static_cast<EffectSolidColor*>(aEffectChain.mPrimaryEffect.get())->mColor;
      float color[4];
      color[0] = layerColor.r * layerColor.a * aOpacity;
      color[1] = layerColor.g * layerColor.a * aOpacity;
      color[2] = layerColor.b * layerColor.a * aOpacity;
      color[3] = layerColor.a * aOpacity;

      d3d9Device->SetPixelShaderConstantF(CBvColor, color, 1);

      maskTexture = mDeviceManager
        ->SetShaderMode(DeviceManagerD3D9::SOLIDCOLORLAYER, maskType);
    }
    break;
  case EffectTypes::RENDER_TARGET:
  case EffectTypes::RGB:
    {
      TexturedEffect* texturedEffect =
        static_cast<TexturedEffect*>(aEffectChain.mPrimaryEffect.get());

      Rect textureCoords = texturedEffect->mTextureCoords;
      d3d9Device->SetVertexShaderConstantF(CBvTextureCoords,
                                           ShaderConstantRect(
                                             textureCoords.x,
                                             textureCoords.y,
                                             textureCoords.width,
                                             textureCoords.height),
                                           1);

      SetSamplerForFilter(texturedEffect->mFilter);

      TextureSourceD3D9* source = texturedEffect->mTexture->AsSourceD3D9();
      d3d9Device->SetTexture(0, source->GetD3D9Texture());

      maskTexture = mDeviceManager
        ->SetShaderMode(ShaderModeForEffectType(aEffectChain.mPrimaryEffect->mType,
                                                texturedEffect->mTexture->GetFormat()),
                        maskType);

      isPremultiplied = texturedEffect->mPremultiplied;
    }
    break;
  case EffectTypes::YCBCR:
    {
      EffectYCbCr* ycbcrEffect =
        static_cast<EffectYCbCr*>(aEffectChain.mPrimaryEffect.get());

      SetSamplerForFilter(Filter::LINEAR);

      Rect textureCoords = ycbcrEffect->mTextureCoords;
      d3d9Device->SetVertexShaderConstantF(CBvTextureCoords,
                                           ShaderConstantRect(
                                             textureCoords.x,
                                             textureCoords.y,
                                             textureCoords.width,
                                             textureCoords.height),
                                           1);

      const int Y = 0, Cb = 1, Cr = 2;
      TextureSource* source = ycbcrEffect->mTexture;

      if (!source) {
        NS_WARNING("No texture to composite");
        return;
      }

      if (!source->GetSubSource(Y) || !source->GetSubSource(Cb) || !source->GetSubSource(Cr)) {
        // This can happen if we failed to upload the textures, most likely
        // because of unsupported dimensions (we don't tile YCbCr textures).
        return;
      }

      TextureSourceD3D9* sourceY  = source->GetSubSource(Y)->AsSourceD3D9();
      TextureSourceD3D9* sourceCb = source->GetSubSource(Cb)->AsSourceD3D9();
      TextureSourceD3D9* sourceCr = source->GetSubSource(Cr)->AsSourceD3D9();


      MOZ_ASSERT(sourceY->GetD3D9Texture());
      MOZ_ASSERT(sourceCb->GetD3D9Texture());
      MOZ_ASSERT(sourceCr->GetD3D9Texture());

      /*
       * Send 3d control data and metadata
       */
      if (mDeviceManager->GetNv3DVUtils()) {
        Nv_Stereo_Mode mode;
        switch (source->AsSourceD3D9()->GetStereoMode()) {
        case StereoMode::LEFT_RIGHT:
          mode = NV_STEREO_MODE_LEFT_RIGHT;
          break;
        case StereoMode::RIGHT_LEFT:
          mode = NV_STEREO_MODE_RIGHT_LEFT;
          break;
        case StereoMode::BOTTOM_TOP:
          mode = NV_STEREO_MODE_BOTTOM_TOP;
          break;
        case StereoMode::TOP_BOTTOM:
          mode = NV_STEREO_MODE_TOP_BOTTOM;
          break;
        case StereoMode::MONO:
          mode = NV_STEREO_MODE_MONO;
          break;
        }

        // Send control data even in mono case so driver knows to leave stereo mode.
        mDeviceManager->GetNv3DVUtils()->SendNv3DVControl(mode, true, FIREFOX_3DV_APP_HANDLE);

        if (source->AsSourceD3D9()->GetStereoMode() != StereoMode::MONO) {
          mDeviceManager->GetNv3DVUtils()->SendNv3DVControl(mode, true, FIREFOX_3DV_APP_HANDLE);

          nsRefPtr<IDirect3DSurface9> renderTarget;
          d3d9Device->GetRenderTarget(0, getter_AddRefs(renderTarget));
          mDeviceManager->GetNv3DVUtils()->SendNv3DVMetaData((unsigned int)aRect.width,
                                                             (unsigned int)aRect.height,
                                                             (HANDLE)(sourceY->GetD3D9Texture()),
                                                             (HANDLE)(renderTarget));
        }
      }

      // Linear scaling is default here, adhering to mFilter is difficult since
      // presumably even with point filtering we'll still want chroma upsampling
      // to be linear. In the current approach we can't.
      device()->SetTexture(Y, sourceY->GetD3D9Texture());
      device()->SetTexture(Cb, sourceCb->GetD3D9Texture());
      device()->SetTexture(Cr, sourceCr->GetD3D9Texture());
      maskTexture = mDeviceManager->SetShaderMode(DeviceManagerD3D9::YCBCRLAYER, maskType);
    }
    break;
  case EffectTypes::COMPONENT_ALPHA:
    {
      MOZ_ASSERT(gfxPrefs::ComponentAlphaEnabled());
      EffectComponentAlpha* effectComponentAlpha =
        static_cast<EffectComponentAlpha*>(aEffectChain.mPrimaryEffect.get());
      TextureSourceD3D9* sourceOnWhite = effectComponentAlpha->mOnWhite->AsSourceD3D9();
      TextureSourceD3D9* sourceOnBlack = effectComponentAlpha->mOnBlack->AsSourceD3D9();

      Rect textureCoords = effectComponentAlpha->mTextureCoords;
      d3d9Device->SetVertexShaderConstantF(CBvTextureCoords,
                                           ShaderConstantRect(
                                             textureCoords.x,
                                             textureCoords.y,
                                             textureCoords.width,
                                             textureCoords.height),
                                           1);

      SetSamplerForFilter(effectComponentAlpha->mFilter);

      maskTexture = mDeviceManager->SetShaderMode(DeviceManagerD3D9::COMPONENTLAYERPASS1, maskType);
      SetMask(aEffectChain, maskTexture);
      d3d9Device->SetTexture(0, sourceOnBlack->GetD3D9Texture());
      d3d9Device->SetTexture(1, sourceOnWhite->GetD3D9Texture());
      d3d9Device->SetRenderState(D3DRS_SRCBLEND, D3DBLEND_ZERO);
      d3d9Device->SetRenderState(D3DRS_DESTBLEND, D3DBLEND_INVSRCCOLOR);
      d3d9Device->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);

      maskTexture = mDeviceManager->SetShaderMode(DeviceManagerD3D9::COMPONENTLAYERPASS2, maskType);
      SetMask(aEffectChain, maskTexture);
      d3d9Device->SetRenderState(D3DRS_SRCBLEND, D3DBLEND_ONE);
      d3d9Device->SetRenderState(D3DRS_DESTBLEND, D3DBLEND_ONE);
      d3d9Device->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);

      // Restore defaults
      d3d9Device->SetRenderState(D3DRS_SRCBLEND, D3DBLEND_ONE);
      d3d9Device->SetRenderState(D3DRS_DESTBLEND, D3DBLEND_INVSRCALPHA);
      d3d9Device->SetTexture(1, nullptr);
    }
    return;
  default:
    NS_WARNING("Unknown shader type");
    return;
  }

  SetMask(aEffectChain, maskTexture);

  if (!isPremultiplied) {
    d3d9Device->SetRenderState(D3DRS_SRCBLEND, D3DBLEND_SRCALPHA);
  }

  HRESULT hr = d3d9Device->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);

  if (!isPremultiplied) {
    d3d9Device->SetRenderState(D3DRS_SRCBLEND, D3DBLEND_ONE);
  }
}
Exemplo n.º 11
0
void
CompositorD3D11::DrawQuad(const gfx::Rect& aRect,
                          const gfx::Rect& aClipRect,
                          const EffectChain& aEffectChain,
                          gfx::Float aOpacity,
                          const gfx::Matrix4x4& aTransform,
                          const gfx::Point& aOffset)
{
  MOZ_ASSERT(mCurrentRT, "No render target");
  memcpy(&mVSConstants.layerTransform, &aTransform._11, 64);
  mVSConstants.renderTargetOffset[0] = aOffset.x;
  mVSConstants.renderTargetOffset[1] = aOffset.y;
  mVSConstants.layerQuad = aRect;

  mPSConstants.layerOpacity[0] = aOpacity;

  bool restoreBlendMode = false;

  MaskType maskType = MaskNone;

  if (aEffectChain.mSecondaryEffects[EFFECT_MASK]) {
    if (aTransform.Is2D()) {
      maskType = Mask2d;
    } else {
      MOZ_ASSERT(aEffectChain.mPrimaryEffect->mType == EFFECT_BGRA);
      maskType = Mask3d;
    }

    EffectMask* maskEffect =
      static_cast<EffectMask*>(aEffectChain.mSecondaryEffects[EFFECT_MASK].get());
    TextureSourceD3D11* source = maskEffect->mMaskTexture->AsSourceD3D11();

    RefPtr<ID3D11ShaderResourceView> view;
    mDevice->CreateShaderResourceView(source->GetD3D11Texture(), nullptr, byRef(view));

    ID3D11ShaderResourceView* srView = view;
    mContext->PSSetShaderResources(3, 1, &srView);

    const gfx::Matrix4x4& maskTransform = maskEffect->mMaskTransform;
    NS_ASSERTION(maskTransform.Is2D(), "How did we end up with a 3D transform here?!");
    Rect bounds = Rect(Point(), Size(maskEffect->mSize));

    mVSConstants.maskQuad = maskTransform.As2D().TransformBounds(bounds);
  }


  D3D11_RECT scissor;
  scissor.left = aClipRect.x;
  scissor.right = aClipRect.XMost();
  scissor.top = aClipRect.y;
  scissor.bottom = aClipRect.YMost();
  mContext->RSSetScissorRects(1, &scissor);
  mContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP);
  mContext->VSSetShader(mAttachments->mVSQuadShader[maskType], nullptr, 0);

  SetPSForEffect(aEffectChain.mPrimaryEffect, maskType);

  switch (aEffectChain.mPrimaryEffect->mType) {
  case EFFECT_SOLID_COLOR: {
      Color color =
        static_cast<EffectSolidColor*>(aEffectChain.mPrimaryEffect.get())->mColor;
      mPSConstants.layerColor[0] = color.r * color.a * aOpacity;
      mPSConstants.layerColor[1] = color.g * color.a * aOpacity;
      mPSConstants.layerColor[2] = color.b * color.a * aOpacity;
      mPSConstants.layerColor[3] = color.a * aOpacity;
    }
    break;
  case EFFECT_BGRX:
  case EFFECT_BGRA:
  case EFFECT_RENDER_TARGET:
    {
      TexturedEffect* texturedEffect =
        static_cast<TexturedEffect*>(aEffectChain.mPrimaryEffect.get());

      mVSConstants.textureCoords = texturedEffect->mTextureCoords;

      TextureSourceD3D11* source = texturedEffect->mTexture->AsSourceD3D11();

      RefPtr<ID3D11ShaderResourceView> view;
      mDevice->CreateShaderResourceView(source->GetD3D11Texture(), nullptr, byRef(view));

      ID3D11ShaderResourceView* srView = view;
      mContext->PSSetShaderResources(0, 1, &srView);

      if (!texturedEffect->mPremultiplied) {
        mContext->OMSetBlendState(mAttachments->mNonPremulBlendState, sBlendFactor, 0xFFFFFFFF);
        restoreBlendMode = true;
      }

      SetSamplerForFilter(texturedEffect->mFilter);
    }
    break;
  case EFFECT_YCBCR: {
      EffectYCbCr* ycbcrEffect =
        static_cast<EffectYCbCr*>(aEffectChain.mPrimaryEffect.get());

      SetSamplerForFilter(FILTER_LINEAR);

      mVSConstants.textureCoords = ycbcrEffect->mTextureCoords;

      TextureSourceD3D11* source = ycbcrEffect->mTexture->AsSourceD3D11();
      TextureSourceD3D11::YCbCrTextures textures = source->GetYCbCrTextures();

      RefPtr<ID3D11ShaderResourceView> views[3];
      mDevice->CreateShaderResourceView(textures.mY, nullptr, byRef(views[0]));
      mDevice->CreateShaderResourceView(textures.mCb, nullptr, byRef(views[1]));
      mDevice->CreateShaderResourceView(textures.mCr, nullptr, byRef(views[2]));

      ID3D11ShaderResourceView* srViews[3] = { views[0], views[1], views[2] };
      mContext->PSSetShaderResources(0, 3, srViews);
    }
    break;
  case EFFECT_COMPONENT_ALPHA:
    {
      MOZ_ASSERT(gfxPlatform::ComponentAlphaEnabled());
      EffectComponentAlpha* effectComponentAlpha =
        static_cast<EffectComponentAlpha*>(aEffectChain.mPrimaryEffect.get());
      TextureSourceD3D11* sourceOnWhite = effectComponentAlpha->mOnWhite->AsSourceD3D11();
      TextureSourceD3D11* sourceOnBlack = effectComponentAlpha->mOnBlack->AsSourceD3D11();
      SetSamplerForFilter(effectComponentAlpha->mFilter);

      mVSConstants.textureCoords = effectComponentAlpha->mTextureCoords;
      RefPtr<ID3D11ShaderResourceView> views[2];
      mDevice->CreateShaderResourceView(sourceOnBlack->GetD3D11Texture(), nullptr, byRef(views[0]));
      mDevice->CreateShaderResourceView(sourceOnWhite->GetD3D11Texture(), nullptr, byRef(views[1]));

      ID3D11ShaderResourceView* srViews[2] = { views[0], views[1] };
      mContext->PSSetShaderResources(0, 2, srViews);

      mContext->OMSetBlendState(mAttachments->mComponentBlendState, sBlendFactor, 0xFFFFFFFF);
      restoreBlendMode = true;
    }
    break;
  default:
    NS_WARNING("Unknown shader type");
    return;
  }
  UpdateConstantBuffers();

  mContext->Draw(4, 0);
  if (restoreBlendMode) {
    mContext->OMSetBlendState(mAttachments->mPremulBlendState, sBlendFactor, 0xFFFFFFFF);
  }
}
Exemplo n.º 12
0
void
CompositorD3D9::DrawQuad(const gfx::Rect &aRect,
                         const gfx::Rect &aClipRect,
                         const EffectChain &aEffectChain,
                         gfx::Float aOpacity,
                         const gfx::Matrix4x4& aTransform,
                         const gfx::Rect& aVisibleRect)
{
  if (!mDeviceManager) {
    return;
  }

  IDirect3DDevice9* d3d9Device = device();
  MOZ_ASSERT(d3d9Device, "We should be able to get a device now");

  MOZ_ASSERT(mCurrentRT, "No render target");
  d3d9Device->SetVertexShaderConstantF(CBmLayerTransform, &aTransform._11, 4);

  IntPoint origin = mCurrentRT->GetOrigin();
  float renderTargetOffset[] = { float(origin.x), float(origin.y), 0, 0 };
  d3d9Device->SetVertexShaderConstantF(CBvRenderTargetOffset,
                                       renderTargetOffset,
                                       1);
  d3d9Device->SetVertexShaderConstantF(CBvLayerQuad,
                                       ShaderConstantRect(aRect.x,
                                                          aRect.y,
                                                          aRect.width,
                                                          aRect.height),
                                       1);

  if (aEffectChain.mPrimaryEffect->mType != EffectTypes::SOLID_COLOR) {
    float opacity[4];
    /*
     * We always upload a 4 component float, but the shader will use only the
     * first component since it's declared as a 'float'.
     */
    opacity[0] = aOpacity;
    d3d9Device->SetPixelShaderConstantF(CBfLayerOpacity, opacity, 1);
  }

  bool isPremultiplied = true;

  MaskType maskType = MaskType::MaskNone;

  if (aEffectChain.mSecondaryEffects[EffectTypes::MASK]) {
    maskType = MaskType::Mask;
  }

  gfx::Rect backdropDest;
  gfx::IntRect backdropRect;
  gfx::Matrix4x4 backdropTransform;
  RefPtr<IDirect3DTexture9> backdropTexture;
  gfx::CompositionOp blendMode = gfx::CompositionOp::OP_OVER;

  if (aEffectChain.mSecondaryEffects[EffectTypes::BLEND_MODE]) {
    EffectBlendMode *blendEffect =
      static_cast<EffectBlendMode*>(aEffectChain.mSecondaryEffects[EffectTypes::BLEND_MODE].get());
    blendMode = blendEffect->mBlendMode;

    // Pixel Shader Model 2.0 is too limited to perform blending in the same way
    // as Direct3D 11 - there are too many instructions, and we don't have
    // configurable shaders (as we do with OGL) that would avoid a huge shader
    // matrix.
    //
    // Instead, we use a multi-step process for blending on D3D9:
    //  (1) Capture the backdrop into a temporary surface.
    //  (2) Render the effect chain onto the backdrop, with OP_SOURCE.
    //  (3) Capture the backdrop again into another surface - these are our source pixels.
    //  (4) Perform a final blend step using software.
    //  (5) Blit the blended result back to the render target.
    if (BlendOpIsMixBlendMode(blendMode)) {
      backdropRect = ComputeBackdropCopyRect(
        aRect, aClipRect, aTransform, &backdropTransform, &backdropDest);

      // If this fails, don't set a blend op.
      backdropTexture = CreateTexture(backdropRect, mCurrentRT, backdropRect.TopLeft());
      if (!backdropTexture) {
        blendMode = gfx::CompositionOp::OP_OVER;
      }
    }
  }

  RECT scissor;
  scissor.left = aClipRect.x;
  scissor.right = aClipRect.XMost();
  scissor.top = aClipRect.y;
  scissor.bottom = aClipRect.YMost();
  d3d9Device->SetScissorRect(&scissor);

  uint32_t maskTexture = 0;
  switch (aEffectChain.mPrimaryEffect->mType) {
  case EffectTypes::SOLID_COLOR:
    {
      // output color is premultiplied, so we need to adjust all channels.
      Color layerColor =
        static_cast<EffectSolidColor*>(aEffectChain.mPrimaryEffect.get())->mColor;
      float color[4];
      color[0] = layerColor.r * layerColor.a * aOpacity;
      color[1] = layerColor.g * layerColor.a * aOpacity;
      color[2] = layerColor.b * layerColor.a * aOpacity;
      color[3] = layerColor.a * aOpacity;

      d3d9Device->SetPixelShaderConstantF(CBvColor, color, 1);

      maskTexture = mDeviceManager
        ->SetShaderMode(DeviceManagerD3D9::SOLIDCOLORLAYER, maskType);
    }
    break;
  case EffectTypes::RENDER_TARGET:
  case EffectTypes::RGB:
    {
      TexturedEffect* texturedEffect =
        static_cast<TexturedEffect*>(aEffectChain.mPrimaryEffect.get());

      Rect textureCoords = texturedEffect->mTextureCoords;
      d3d9Device->SetVertexShaderConstantF(CBvTextureCoords,
                                           ShaderConstantRect(
                                             textureCoords.x,
                                             textureCoords.y,
                                             textureCoords.width,
                                             textureCoords.height),
                                           1);

      SetSamplerForFilter(texturedEffect->mFilter);

      TextureSourceD3D9* source = texturedEffect->mTexture->AsSourceD3D9();
      d3d9Device->SetTexture(0, source->GetD3D9Texture());

      maskTexture = mDeviceManager
        ->SetShaderMode(ShaderModeForEffectType(aEffectChain.mPrimaryEffect->mType,
                                                texturedEffect->mTexture->GetFormat()),
                        maskType);

      isPremultiplied = texturedEffect->mPremultiplied;
    }
    break;
  case EffectTypes::YCBCR:
    {
      EffectYCbCr* ycbcrEffect =
        static_cast<EffectYCbCr*>(aEffectChain.mPrimaryEffect.get());

      SetSamplerForFilter(Filter::LINEAR);

      Rect textureCoords = ycbcrEffect->mTextureCoords;
      d3d9Device->SetVertexShaderConstantF(CBvTextureCoords,
                                           ShaderConstantRect(
                                             textureCoords.x,
                                             textureCoords.y,
                                             textureCoords.width,
                                             textureCoords.height),
                                           1);

      const int Y = 0, Cb = 1, Cr = 2;
      TextureSource* source = ycbcrEffect->mTexture;

      if (!source) {
        NS_WARNING("No texture to composite");
        return;
      }

      if (!source->GetSubSource(Y) || !source->GetSubSource(Cb) || !source->GetSubSource(Cr)) {
        // This can happen if we failed to upload the textures, most likely
        // because of unsupported dimensions (we don't tile YCbCr textures).
        return;
      }

      TextureSourceD3D9* sourceY  = source->GetSubSource(Y)->AsSourceD3D9();
      TextureSourceD3D9* sourceCb = source->GetSubSource(Cb)->AsSourceD3D9();
      TextureSourceD3D9* sourceCr = source->GetSubSource(Cr)->AsSourceD3D9();


      MOZ_ASSERT(sourceY->GetD3D9Texture());
      MOZ_ASSERT(sourceCb->GetD3D9Texture());
      MOZ_ASSERT(sourceCr->GetD3D9Texture());

      /*
       * Send 3d control data and metadata
       */
      if (mDeviceManager->GetNv3DVUtils()) {
        Nv_Stereo_Mode mode;
        switch (source->AsSourceD3D9()->GetStereoMode()) {
        case StereoMode::LEFT_RIGHT:
          mode = NV_STEREO_MODE_LEFT_RIGHT;
          break;
        case StereoMode::RIGHT_LEFT:
          mode = NV_STEREO_MODE_RIGHT_LEFT;
          break;
        case StereoMode::BOTTOM_TOP:
          mode = NV_STEREO_MODE_BOTTOM_TOP;
          break;
        case StereoMode::TOP_BOTTOM:
          mode = NV_STEREO_MODE_TOP_BOTTOM;
          break;
        case StereoMode::MONO:
          mode = NV_STEREO_MODE_MONO;
          break;
        }

        // Send control data even in mono case so driver knows to leave stereo mode.
        mDeviceManager->GetNv3DVUtils()->SendNv3DVControl(mode, true, FIREFOX_3DV_APP_HANDLE);

        if (source->AsSourceD3D9()->GetStereoMode() != StereoMode::MONO) {
          mDeviceManager->GetNv3DVUtils()->SendNv3DVControl(mode, true, FIREFOX_3DV_APP_HANDLE);

          RefPtr<IDirect3DSurface9> renderTarget;
          d3d9Device->GetRenderTarget(0, getter_AddRefs(renderTarget));
          mDeviceManager->GetNv3DVUtils()->SendNv3DVMetaData((unsigned int)aRect.width,
                                                             (unsigned int)aRect.height,
                                                             (HANDLE)(sourceY->GetD3D9Texture()),
                                                             (HANDLE)(renderTarget));
        }
      }

      // Linear scaling is default here, adhering to mFilter is difficult since
      // presumably even with point filtering we'll still want chroma upsampling
      // to be linear. In the current approach we can't.
      device()->SetTexture(Y, sourceY->GetD3D9Texture());
      device()->SetTexture(Cb, sourceCb->GetD3D9Texture());
      device()->SetTexture(Cr, sourceCr->GetD3D9Texture());
      maskTexture = mDeviceManager->SetShaderMode(DeviceManagerD3D9::YCBCRLAYER, maskType);
    }
    break;
  case EffectTypes::COMPONENT_ALPHA:
    {
      MOZ_ASSERT(gfxPrefs::ComponentAlphaEnabled());
      EffectComponentAlpha* effectComponentAlpha =
        static_cast<EffectComponentAlpha*>(aEffectChain.mPrimaryEffect.get());
      TextureSourceD3D9* sourceOnWhite = effectComponentAlpha->mOnWhite->AsSourceD3D9();
      TextureSourceD3D9* sourceOnBlack = effectComponentAlpha->mOnBlack->AsSourceD3D9();

      Rect textureCoords = effectComponentAlpha->mTextureCoords;
      d3d9Device->SetVertexShaderConstantF(CBvTextureCoords,
                                           ShaderConstantRect(
                                             textureCoords.x,
                                             textureCoords.y,
                                             textureCoords.width,
                                             textureCoords.height),
                                           1);

      SetSamplerForFilter(effectComponentAlpha->mFilter);

      maskTexture = mDeviceManager->SetShaderMode(DeviceManagerD3D9::COMPONENTLAYERPASS1, maskType);
      SetMask(aEffectChain, maskTexture);
      d3d9Device->SetTexture(0, sourceOnBlack->GetD3D9Texture());
      d3d9Device->SetTexture(1, sourceOnWhite->GetD3D9Texture());
      d3d9Device->SetRenderState(D3DRS_SRCBLEND, D3DBLEND_ZERO);
      d3d9Device->SetRenderState(D3DRS_DESTBLEND, D3DBLEND_INVSRCCOLOR);
      d3d9Device->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);

      maskTexture = mDeviceManager->SetShaderMode(DeviceManagerD3D9::COMPONENTLAYERPASS2, maskType);
      SetMask(aEffectChain, maskTexture);
      d3d9Device->SetRenderState(D3DRS_SRCBLEND, D3DBLEND_ONE);
      d3d9Device->SetRenderState(D3DRS_DESTBLEND, D3DBLEND_ONE);
      d3d9Device->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);

      // Restore defaults
      d3d9Device->SetRenderState(D3DRS_SRCBLEND, D3DBLEND_ONE);
      d3d9Device->SetRenderState(D3DRS_DESTBLEND, D3DBLEND_INVSRCALPHA);
      d3d9Device->SetTexture(1, nullptr);
    }
    return;
  default:
    NS_WARNING("Unknown shader type");
    return;
  }

  SetMask(aEffectChain, maskTexture);

  if (BlendOpIsMixBlendMode(blendMode)) {
    // Use SOURCE instead of OVER to get the original source pixels without
    // having to render to another intermediate target.
    d3d9Device->SetRenderState(D3DRS_DESTBLEND, D3DBLEND_ZERO);
  }
  if (!isPremultiplied) {
    d3d9Device->SetRenderState(D3DRS_SRCBLEND, D3DBLEND_SRCALPHA);
  }

  d3d9Device->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);

  // Restore defaults.
  if (BlendOpIsMixBlendMode(blendMode)) {
    d3d9Device->SetRenderState(D3DRS_DESTBLEND, D3DBLEND_INVSRCALPHA);
  }
  if (!isPremultiplied) {
    d3d9Device->SetRenderState(D3DRS_SRCBLEND, D3DBLEND_ONE);
  }

  // Final pass - if mix-blending, do it now that we have the backdrop and
  // source textures.
  if (BlendOpIsMixBlendMode(blendMode)) {
    FinishMixBlend(
      backdropRect,
      backdropDest,
      backdropTransform,
      backdropTexture,
      blendMode);
  }
}
void
CompositorD3D11::DrawVRDistortion(const gfx::Rect& aRect,
                                  const gfx::Rect& aClipRect,
                                  const EffectChain& aEffectChain,
                                  gfx::Float aOpacity,
                                  const gfx::Matrix4x4& aTransform)
{
  MOZ_ASSERT(aEffectChain.mPrimaryEffect->mType == EffectTypes::VR_DISTORTION);

  if (aEffectChain.mSecondaryEffects[EffectTypes::MASK] ||
      aEffectChain.mSecondaryEffects[EffectTypes::BLEND_MODE])
  {
    NS_WARNING("DrawVRDistortion: ignoring secondary effect!");
  }

  HRESULT hr;

  EffectVRDistortion* vrEffect =
    static_cast<EffectVRDistortion*>(aEffectChain.mPrimaryEffect.get());

  TextureSourceD3D11* source = vrEffect->mTexture->AsSourceD3D11();
  gfx::IntSize size = vrEffect->mRenderTarget->GetSize(); // XXX source->GetSize()

  VRHMDInfo* hmdInfo = vrEffect->mHMD;
  VRDistortionConstants shaderConstants;

  // do we need to recreate the VR buffers, since the config has changed?
  if (hmdInfo->GetConfiguration() != mAttachments->mVRConfiguration) {
    D3D11_SUBRESOURCE_DATA sdata = { 0 };
    CD3D11_BUFFER_DESC desc(0, D3D11_BIND_VERTEX_BUFFER, D3D11_USAGE_IMMUTABLE);

    // XXX as an optimization, we should really pack the indices and vertices for both eyes
    // into one buffer instead of needing one eye each.  Then we can just bind them once.
    for (uint32_t eye = 0; eye < 2; eye++) {
      const gfx::VRDistortionMesh& mesh = hmdInfo->GetDistortionMesh(eye);

      desc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
      desc.ByteWidth = mesh.mVertices.Length() * sizeof(gfx::VRDistortionVertex);
      sdata.pSysMem = mesh.mVertices.Elements();
      
      hr = mDevice->CreateBuffer(&desc, &sdata, byRef(mAttachments->mVRDistortionVertices[eye]));
      if (FAILED(hr)) {
        NS_WARNING("CreateBuffer failed");
        return;
      }

      desc.BindFlags = D3D11_BIND_INDEX_BUFFER;
      desc.ByteWidth = mesh.mIndices.Length() * sizeof(uint16_t);
      sdata.pSysMem = mesh.mIndices.Elements();

      hr = mDevice->CreateBuffer(&desc, &sdata, byRef(mAttachments->mVRDistortionIndices[eye]));
      if (FAILED(hr)) {
        NS_WARNING("CreateBuffer failed");
        return;
      }

      mAttachments->mVRDistortionIndexCount[eye] = mesh.mIndices.Length();
    }

    mAttachments->mVRConfiguration = hmdInfo->GetConfiguration();
  }

  // XXX do I need to set a scissor rect? Is this the right scissor rect?
  D3D11_RECT scissor;
  scissor.left = aClipRect.x;
  scissor.right = aClipRect.XMost();
  scissor.top = aClipRect.y;
  scissor.bottom = aClipRect.YMost();
  mContext->RSSetScissorRects(1, &scissor);

  // Triangle lists and same layout for both eyes
  mContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
  mContext->IASetInputLayout(mAttachments->mVRDistortionInputLayout);

  // Shaders for this HMD
  mContext->VSSetShader(mAttachments->mVRDistortionVS[mAttachments->mVRConfiguration.hmdType], nullptr, 0);
  mContext->PSSetShader(mAttachments->mVRDistortionPS[mAttachments->mVRConfiguration.hmdType], nullptr, 0);

  // This is the source texture SRV for the pixel shader
  // XXX, um should we cache this SRV?
  RefPtr<ID3D11ShaderResourceView> view;
  mDevice->CreateShaderResourceView(source->GetD3D11Texture(), nullptr, byRef(view));
  ID3D11ShaderResourceView* srView = view;
  mContext->PSSetShaderResources(0, 1, &srView);


  gfx::IntSize vpSizeInt = mCurrentRT->GetSize();
  gfx::Size vpSize(vpSizeInt.width, vpSizeInt.height);
  ID3D11Buffer* vbuffer;
  UINT vsize, voffset;

  for (uint32_t eye = 0; eye < 2; eye++) {
    gfx::IntRect eyeViewport;
    eyeViewport.x = eye * size.width / 2;
    eyeViewport.y = 0;
    eyeViewport.width = size.width / 2;
    eyeViewport.height = size.height;

    hmdInfo->FillDistortionConstants(eye,
                                     size, eyeViewport,
                                     vpSize, aRect,
                                     shaderConstants);

    // D3D has clip space top-left as -1,1 so we need to flip the Y coordinate offset here
    shaderConstants.destinationScaleAndOffset[1] = - shaderConstants.destinationScaleAndOffset[1];

    // XXX I really want to write a templated helper for these next 4 lines
    D3D11_MAPPED_SUBRESOURCE resource;
    mContext->Map(mAttachments->mVRDistortionConstants, 0, D3D11_MAP_WRITE_DISCARD, 0, &resource);
    *(gfx::VRDistortionConstants*)resource.pData = shaderConstants;
    mContext->Unmap(mAttachments->mVRDistortionConstants, 0);

    // XXX is there a better way to change a bunch of these things from what they were set to
    // in BeginFrame/etc?
    vbuffer = mAttachments->mVRDistortionVertices[eye];
    vsize = sizeof(gfx::VRDistortionVertex);
    voffset = 0;
    mContext->IASetVertexBuffers(0, 1, &vbuffer, &vsize, &voffset);
    mContext->IASetIndexBuffer(mAttachments->mVRDistortionIndices[eye], DXGI_FORMAT_R16_UINT, 0);

    ID3D11Buffer* constBuf = mAttachments->mVRDistortionConstants;
    mContext->VSSetConstantBuffers(0, 1, &constBuf);

    mContext->DrawIndexed(mAttachments->mVRDistortionIndexCount[eye], 0, 0);
  }

  // restore previous configurations
  vbuffer = mAttachments->mVertexBuffer;
  vsize = sizeof(Vertex);
  voffset = 0;
  mContext->IASetVertexBuffers(0, 1, &vbuffer, &vsize, &voffset);
  mContext->IASetIndexBuffer(nullptr, DXGI_FORMAT_R16_UINT, 0);
  mContext->IASetInputLayout(mAttachments->mInputLayout);
}