コード例 #1
0
ファイル: check.hpp プロジェクト: aelovikov/mesos
Option<std::string> _check(const Option<T>& o)
{
  if (o.isNone()) {
    return Some("is NONE");
  }
  return None();
}
コード例 #2
0
ファイル: ImageLayerMLGPU.cpp プロジェクト: yrliou/gecko-dev
void
ImageLayerMLGPU::ComputeEffectiveTransforms(const gfx::Matrix4x4& aTransformToSurface)
{
  Matrix4x4 local = GetLocalTransform();

  // Snap image edges to pixel boundaries.
  gfxRect sourceRect(0, 0, 0, 0);
  if (mHost && mHost->IsAttached()) {
    IntSize size = mHost->GetImageSize();
    sourceRect.SizeTo(size.width, size.height);
  }

  // Snap our local transform first, and snap the inherited transform as well.
  // This makes our snapping equivalent to what would happen if our content
  // was drawn into a PaintedLayer (gfxContext would snap using the local
  // transform, then we'd snap again when compositing the PaintedLayer).
  mEffectiveTransform =
      SnapTransform(local, sourceRect, nullptr) *
      SnapTransformTranslation(aTransformToSurface, nullptr);
  mEffectiveTransformForBuffer = mEffectiveTransform;

  if (mScaleMode == ScaleMode::STRETCH &&
      mScaleToSize.width != 0.0 &&
      mScaleToSize.height != 0.0)
  {
    Size scale(
      sourceRect.width / mScaleToSize.width,
      sourceRect.height / mScaleToSize.height);
    mScale = Some(scale);
  }

  ComputeEffectiveTransformForMaskLayers(aTransformToSurface);
}
コード例 #3
0
bool
WebRenderCanvasRendererSync::CreateCompositable()
{
  if (!mCanvasClient) {
    TextureFlags flags = TextureFlags::DEFAULT;
    if (mOriginPos == gl::OriginPos::BottomLeft) {
      flags |= TextureFlags::ORIGIN_BOTTOM_LEFT;
    }

    if (!mIsAlphaPremultiplied) {
      flags |= TextureFlags::NON_PREMULTIPLIED;
    }

    mCanvasClient = CanvasClient::CreateCanvasClient(GetCanvasClientType(),
                                                     GetForwarder(),
                                                     flags);
    if (!mCanvasClient) {
      return false;
    }

    mCanvasClient->Connect();
  }

  if (mExternalImageId.isNothing()) {
    mExternalImageId = Some(mManager->WrBridge()->AllocExternalImageIdForCompositable(mCanvasClient));
  }

  return true;
}
コード例 #4
0
ファイル: SourceBuffer.cpp プロジェクト: paulmadore/luckyde
void
SourceBuffer::Complete(nsresult aStatus)
{
  MutexAutoLock lock(mMutex);

  if (MOZ_UNLIKELY(mStatus)) {
    MOZ_ASSERT_UNREACHABLE("Called Complete more than once");
    return;
  }

  if (MOZ_UNLIKELY(NS_SUCCEEDED(aStatus) && IsEmpty())) {
    // It's illegal to succeed without writing anything.
    aStatus = NS_ERROR_FAILURE;
  }

  mStatus = Some(aStatus);

  // Resume any waiting consumers now that we're complete.
  ResumeWaitingConsumers();

  // If we still have active consumers, just return.
  if (mConsumerCount > 0) {
    return;
  }

  // Attempt to compact our buffer down to a single chunk.
  Compact();
}
コード例 #5
0
void
MediaSource::SetLiveSeekableRange(double aStart, double aEnd, ErrorResult& aRv)
{
  MOZ_ASSERT(NS_IsMainThread());

  // 1. If the readyState attribute is not "open" then throw an InvalidStateError
  // exception and abort these steps.
  if (mReadyState != MediaSourceReadyState::Open) {
    aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
    return;
  }

  // 2. If start is negative or greater than end, then throw a TypeError
  // exception and abort these steps.
  if (aStart < 0 || aStart > aEnd) {
    aRv.Throw(NS_ERROR_DOM_TYPE_ERR);
    return;
  }

  // 3. Set live seekable range to be a new normalized TimeRanges object
  // containing a single range whose start position is start and end position is
  // end.
  mLiveSeekableRange =
    Some(media::TimeInterval(media::TimeUnit::FromSeconds(aStart),
                             media::TimeUnit::FromSeconds(aEnd)));
}
コード例 #6
0
bool WebRenderCanvasRendererAsync::CreateCompositable() {
  if (!mCanvasClient) {
    TextureFlags flags = TextureFlags::DEFAULT;
    if (mOriginPos == gl::OriginPos::BottomLeft) {
      flags |= TextureFlags::ORIGIN_BOTTOM_LEFT;
    }

    if (!mIsAlphaPremultiplied) {
      flags |= TextureFlags::NON_PREMULTIPLIED;
    }

    mCanvasClient = CanvasClient::CreateCanvasClient(GetCanvasClientType(),
                                                     GetForwarder(), flags);
    if (!mCanvasClient) {
      return false;
    }

    mCanvasClient->Connect();
  }

  if (!mPipelineId) {
    // Alloc async image pipeline id.
    mPipelineId = Some(
        mManager->WrBridge()->GetCompositorBridgeChild()->GetNextPipelineId());
    mManager->AddPipelineIdForCompositable(mPipelineId.ref(),
                                           mCanvasClient->GetIPCHandle());
  }

  return true;
}
コード例 #7
0
void
DeviceManagerDx::ImportDeviceInfo(const D3D11DeviceStatus& aDeviceStatus)
{
  MOZ_ASSERT(!ProcessOwnsCompositor());

  mDeviceStatus = Some(aDeviceStatus);
}
コード例 #8
0
void
H264Converter::DecodeFirstSample(MediaRawData* aSample)
{
  if (mNeedKeyframe && !aSample->mKeyframe) {
    mDecodePromise.Resolve(DecodedData(), __func__);
    return;
  }

  mNeedAVCC =
    Some(mDecoder->NeedsConversion() == ConversionRequired::kNeedAVCC);

  if (!*mNeedAVCC
      && !mp4_demuxer::AnnexB::ConvertSampleToAnnexB(aSample, mNeedKeyframe)) {
    mDecodePromise.Reject(
      MediaResult(NS_ERROR_OUT_OF_MEMORY,
                  RESULT_DETAIL("ConvertSampleToAnnexB")),
      __func__);
    return;
  }

  mNeedKeyframe = false;

  RefPtr<H264Converter> self = this;
  mDecoder->Decode(aSample)
    ->Then(AbstractThread::GetCurrent()->AsTaskQueue(), __func__,
           [self, this](const MediaDataDecoder::DecodedData& aResults) {
             mDecodePromiseRequest.Complete();
             mDecodePromise.Resolve(aResults, __func__);
           },
           [self, this](const MediaResult& aError) {
             mDecodePromiseRequest.Complete();
             mDecodePromise.Reject(aError, __func__);
           })
    ->Track(mDecodePromiseRequest);
}
コード例 #9
0
ファイル: DataTransfer.cpp プロジェクト: philbooth/gecko-dev
NS_IMETHODIMP
DataTransfer::MozClearDataAt(const nsAString& aFormat, uint32_t aIndex)
{
  ErrorResult rv;
  MozClearDataAt(aFormat, aIndex, Some(nsContentUtils::SubjectPrincipal()), rv);
  return rv.StealNSResult();
}
コード例 #10
0
void
WebRenderLayerScrollData::Initialize(WebRenderScrollData& aOwner,
                                     Layer* aLayer,
                                     int32_t aDescendantCount)
{
  MOZ_ASSERT(aDescendantCount >= 0); // Ensure value is valid
  MOZ_ASSERT(mDescendantCount == -1); // Don't allow re-setting an already set value
  mDescendantCount = aDescendantCount;

  MOZ_ASSERT(aLayer);
  for (uint32_t i = 0; i < aLayer->GetScrollMetadataCount(); i++) {
    mScrollIds.AppendElement(aOwner.AddMetadata(aLayer->GetScrollMetadata(i)));
  }

  mIsScrollInfoLayer = aLayer->AsContainerLayer() && !aLayer->GetFirstChild();
  mTransform = aLayer->GetTransform();
  mTransformIsPerspective = aLayer->GetTransformIsPerspective();
  mEventRegions = aLayer->GetEventRegions();
  mReferentId = aLayer->AsRefLayer()
      ? Some(aLayer->AsRefLayer()->GetReferentId())
      : Nothing();
  mEventRegionsOverride = aLayer->AsContainerLayer()
      ? aLayer->AsContainerLayer()->GetEventRegionsOverride()
      : EventRegionsOverride::NoOverride;
  mScrollThumbData = aLayer->GetScrollThumbData();
  mScrollbarTargetContainerId = aLayer->GetScrollbarTargetContainerId();
  mIsScrollbarContainer = aLayer->IsScrollbarContainer();
  mFixedPosScrollContainerId = aLayer->GetFixedPositionScrollContainerId();
}
コード例 #11
0
ファイル: VectorImage.cpp プロジェクト: miketaylr/gecko-dev
nsresult
VectorImage::OnImageDataComplete(nsIRequest* aRequest,
                                 nsISupports* aContext,
                                 nsresult aStatus,
                                 bool aLastPart)
{
  // Call our internal OnStopRequest method, which only talks to our embedded
  // SVG document. This won't have any effect on our ProgressTracker.
  nsresult finalStatus = OnStopRequest(aRequest, aContext, aStatus);

  // Give precedence to Necko failure codes.
  if (NS_FAILED(aStatus)) {
    finalStatus = aStatus;
  }
  
  Progress loadProgress = LoadCompleteProgress(aLastPart, mError, finalStatus);

  if (mIsFullyLoaded || mError) {
    // Our document is loaded, so we're ready to notify now.
    mProgressTracker->SyncNotifyProgress(loadProgress);
  } else {
    // Record our progress so far; we'll actually send the notifications in
    // OnSVGDocumentLoaded or OnSVGDocumentError.
    mLoadProgress = Some(loadProgress);
  }

  return finalStatus;
}
コード例 #12
0
ファイル: leveldb.cpp プロジェクト: ankurcha/mesos
Try<Option<Entry> > LevelDBStorageProcess::read(const string& name)
{
  CHECK_NONE(error);

  leveldb::ReadOptions options;

  string value;

  leveldb::Status status = db->Get(options, name, &value);

  if (status.IsNotFound()) {
    return None();
  } else if (!status.ok()) {
    return Error(status.ToString());
  }

  google::protobuf::io::ArrayInputStream stream(value.data(), value.size());

  Entry entry;

  if (!entry.ParseFromZeroCopyStream(&stream)) {
    return Error("Failed to deserialize Entry");
  }

  return Some(entry);
}
コード例 #13
0
ファイル: MP3FrameParser.cpp プロジェクト: jld/gecko-dev
Result<bool, nsresult> FrameParser::VBRHeader::ParseVBRI(
    BufferReader* aReader) {
  static const uint32_t TAG = BigEndian::readUint32("VBRI");
  static const uint32_t OFFSET = 32 + FrameParser::FrameHeader::SIZE;
  static const uint32_t FRAME_COUNT_OFFSET = OFFSET + 14;
  static const uint32_t MIN_FRAME_SIZE = OFFSET + 26;

  MOZ_ASSERT(aReader);
  // ParseVBRI assumes that the ByteReader offset points to the beginning of a
  // frame, therefore as a simple check, we look for the presence of a frame
  // sync at that position.
  auto sync = aReader->PeekU16();
  if (sync.isOk()) {  // To avoid compiler complains 'set but unused'.
    MOZ_ASSERT((sync.unwrap() & 0xFFE0) == 0xFFE0);
  }

  // Seek backward to the original position before leaving this scope.
  const size_t prevReaderOffset = aReader->Offset();
  auto scopeExit = MakeScopeExit([&] { aReader->Seek(prevReaderOffset); });

  // VBRI have a fixed relative position, so let's check for it there.
  if (aReader->Remaining() > MIN_FRAME_SIZE) {
    aReader->Seek(prevReaderOffset + OFFSET);
    uint32_t tag, frames;
    MOZ_TRY_VAR(tag, aReader->ReadU32());
    if (tag == TAG) {
      aReader->Seek(prevReaderOffset + FRAME_COUNT_OFFSET);
      MOZ_TRY_VAR(frames, aReader->ReadU32());
      mNumAudioFrames = Some(frames);
      mType = VBRI;
      return true;
    }
  }
  return false;
}
コード例 #14
0
bool
GenericScrollAnimation::DoSample(FrameMetrics& aFrameMetrics, const TimeDuration& aDelta)
{
  TimeStamp now = mApzc.GetFrameTime();
  CSSToParentLayerScale2D zoom = aFrameMetrics.GetZoom();

  // If the animation is finished, make sure the final position is correct by
  // using one last displacement. Otherwise, compute the delta via the timing
  // function as normal.
  bool finished = mAnimationPhysics->IsFinished(now);
  nsPoint sampledDest = mAnimationPhysics->PositionAt(now);
  ParentLayerPoint displacement =
    (CSSPoint::FromAppUnits(sampledDest) - aFrameMetrics.GetScrollOffset()) * zoom;

  if (finished) {
    mApzc.mX.SetVelocity(0);
    mApzc.mY.SetVelocity(0);
  } else if (!IsZero(displacement)) {
    // Convert velocity from AppUnits/Seconds to ParentLayerCoords/Milliseconds
    nsSize velocity = mAnimationPhysics->VelocityAt(now);
    ParentLayerPoint velocityPL =
      CSSPoint::FromAppUnits(nsPoint(velocity.width, velocity.height)) * zoom;
    mApzc.mX.SetVelocity(velocityPL.x / 1000.0);
    mApzc.mY.SetVelocity(velocityPL.y / 1000.0);
  }

  // Note: we ignore overscroll for generic animations.
  ParentLayerPoint adjustedOffset, overscroll;
  mApzc.mX.AdjustDisplacement(displacement.x, adjustedOffset.x, overscroll.x,
                              mDirectionForcedToOverscroll
                                == Some(ScrollDirection::eHorizontal));
  mApzc.mY.AdjustDisplacement(displacement.y, adjustedOffset.y, overscroll.y,
                              mDirectionForcedToOverscroll
                                == Some(ScrollDirection::eVertical));

  // If we expected to scroll, but there's no more scroll range on either axis,
  // then end the animation early. Note that the initial displacement could be 0
  // if the compositor ran very quickly (<1ms) after the animation was created.
  // When that happens we want to make sure the animation continues.
  if (!IsZero(displacement) && IsZero(adjustedOffset)) {
    // Nothing more to do - end the animation.
    return false;
  }

  aFrameMetrics.ScrollBy(adjustedOffset / zoom);
  return !finished;
}
コード例 #15
0
/* static */ Maybe<ComputedTimingFunction>
TimingParams::ParseEasing(const nsAString& aEasing,
                          nsIDocument* aDocument,
                          ErrorResult& aRv)
{
  MOZ_ASSERT(aDocument);

  nsCSSValue value;
  nsCSSParser parser;
  parser.ParseLonghandProperty(eCSSProperty_animation_timing_function,
                               aEasing,
                               aDocument->GetDocumentURI(),
                               aDocument->GetDocumentURI(),
                               aDocument->NodePrincipal(),
                               value);

  switch (value.GetUnit()) {
    case eCSSUnit_List: {
      const nsCSSValueList* list = value.GetListValue();
      if (list->mNext) {
        // don't support a list of timing functions
        break;
      }
      switch (list->mValue.GetUnit()) {
        case eCSSUnit_Enumerated:
          // Return Nothing() if "linear" is passed in.
          if (list->mValue.GetIntValue() ==
              NS_STYLE_TRANSITION_TIMING_FUNCTION_LINEAR) {
            return Nothing();
          }
          MOZ_FALLTHROUGH;
        case eCSSUnit_Cubic_Bezier:
        case eCSSUnit_Steps: {
          nsTimingFunction timingFunction;
          nsRuleNode::ComputeTimingFunction(list->mValue, timingFunction);
          ComputedTimingFunction computedTimingFunction;
          computedTimingFunction.Init(timingFunction);
          return Some(computedTimingFunction);
        }
        default:
          MOZ_ASSERT_UNREACHABLE("unexpected animation-timing-function list "
                                 "item unit");
        break;
      }
      break;
    }
    case eCSSUnit_Inherit:
    case eCSSUnit_Initial:
    case eCSSUnit_Unset:
    case eCSSUnit_TokenStream:
    case eCSSUnit_Null:
      break;
    default:
      MOZ_ASSERT_UNREACHABLE("unexpected animation-timing-function unit");
      break;
  }
  aRv.ThrowTypeError<dom::MSG_INVALID_EASING_ERROR>();
  return Nothing();
}
コード例 #16
0
void
DeviceManagerDx::ForceDeviceReset(ForcedDeviceResetReason aReason)
{
  Telemetry::Accumulate(Telemetry::FORCED_DEVICE_RESET_REASON, uint32_t(aReason));
  {
    MutexAutoLock lock(mDeviceLock);
    mDeviceResetReason = Some(DeviceResetReason::FORCED_RESET);
  }
}
コード例 #17
0
ファイル: DataTransfer.cpp プロジェクト: philbooth/gecko-dev
NS_IMETHODIMP
DataTransfer::ClearData(const nsAString& aFormat)
{
  Optional<nsAString> format;
  format = &aFormat;
  ErrorResult rv;
  ClearData(format, Some(nsContentUtils::SubjectPrincipal()), rv);
  return rv.StealNSResult();
}
コード例 #18
0
Maybe<uint32_t> GetCubebMSGLatencyInFrames()
{
  StaticMutexAutoLock lock(sMutex);
  if (!sCubebMSGLatencyPrefSet) {
    return Maybe<uint32_t>();
  }
  MOZ_ASSERT(sCubebMSGLatencyInFrames > 0);
  return Some(sCubebMSGLatencyInFrames);
}
コード例 #19
0
ファイル: check.hpp プロジェクト: Bbarrett/mesos
Option<std::string> _check(const Result<T>& r)
{
  if (r.isError()) {
    return r.error();
  } else if (r.isNone()) {
    return Some("is NONE");
  }
  return None();
}
コード例 #20
0
ファイル: SandboxFilter.cpp プロジェクト: Jar-win/Waterfox
 virtual Maybe<ResultExpr> EvaluateSocketCall(int aCall) const override {
     switch (aCall) {
     case SYS_RECVMSG:
     case SYS_SENDMSG:
         return Some(Allow());
     default:
         return Nothing();
     }
 }
コード例 #21
0
    AtomParser(const nsACString& aType, const MediaLargeByteBuffer* aData)
    {
      const nsCString mType(aType); // for logging macro.
      mp4_demuxer::ByteReader reader(aData);
      mp4_demuxer::AtomType initAtom("ftyp");
      mp4_demuxer::AtomType mediaAtom("moof");

      while (reader.Remaining() >= 8) {
        uint64_t size = reader.ReadU32();
        const uint8_t* typec = reader.Peek(4);
        uint32_t type = reader.ReadU32();
        MSE_DEBUGV(AtomParser ,"Checking atom:'%c%c%c%c'",
                   typec[0], typec[1], typec[2], typec[3]);
        if (mInitOffset.isNothing() &&
            mp4_demuxer::AtomType(type) == initAtom) {
          mInitOffset = Some(reader.Offset());
        }
        if (mMediaOffset.isNothing() &&
            mp4_demuxer::AtomType(type) == mediaAtom) {
          mMediaOffset = Some(reader.Offset());
        }
        if (mInitOffset.isSome() && mMediaOffset.isSome()) {
          // We have everything we need.
          break;
        }
        if (size == 1) {
          // 64 bits size.
          if (!reader.CanReadType<uint64_t>()) {
            break;
          }
          size = reader.ReadU64();
        } else if (size == 0) {
          // Atom extends to the end of the buffer, it can't have what we're
          // looking for.
          break;
        }
        if (reader.Remaining() < size - 8) {
          // Incomplete atom.
          break;
        }
        reader.Read(size - 8);
      }
      reader.DiscardRemaining();
    }
コード例 #22
0
Maybe<TimeUnit>
MediaCodecDataDecoder::GetOutputDuration()
{
  if (mDurations.empty()) {
    return Nothing();
  }
  const Maybe<TimeUnit> duration = Some(mDurations.front());
  mDurations.pop_front();
  return duration;
}
コード例 #23
0
ファイル: state.hpp プロジェクト: ChrisPaprocki/mesos
inline process::Future<Option<Variable>> State::_store(
    const internal::state::Entry& entry,
    const bool& b) // TODO(benh): Remove 'const &' after fixing libprocess.
{
  if (b) {
    return Some(Variable(entry));
  }

  return None();
}
コード例 #24
0
ファイル: packet.cpp プロジェクト: omochi/ikadenwa-ios
 Optional<PacketType> PacketTypeFromString(const std::string & str) {
     if (str.length() <= 0) {
         return None();
     }
     int value = str[0] - '0';
     if (!IsValidPacketTypeValue(value)) {
         return None();
     }
     return Some(static_cast<PacketType>(value));
 }
コード例 #25
0
Maybe<uint32_t>
WebGLBuffer::GetIndexedFetchMaxVert(const GLenum type, const uint64_t byteOffset,
                                    const uint32_t indexCount) const
{
    if (!mIndexCache)
        return Nothing();

    const IndexRange range = { type, byteOffset, indexCount };
    auto res = mIndexRanges.insert({ range, Nothing() });
    if (mIndexRanges.size() > kMaxIndexRanges) {
        mContext->GeneratePerfWarning("[%p] Clearing mIndexRanges after exceeding %u.",
                                      this, kMaxIndexRanges);
        mIndexRanges.clear();
        res = mIndexRanges.insert({ range, Nothing() });
    }

    const auto& itr = res.first;
    const auto& didInsert = res.second;

    auto& maxFetchIndex = itr->second;
    if (didInsert) {
        const auto& data = mIndexCache.get();

        const auto start = (const uint8_t*)data + byteOffset;

        Maybe<uint32_t> ignoredVal;
        if (mContext->IsWebGL2()) {
            ignoredVal = Some(UINT32_MAX);
        }

        switch (type) {
        case LOCAL_GL_UNSIGNED_BYTE:
            maxFetchIndex = MaxForRange<uint8_t>(start, indexCount, ignoredVal);
            break;
        case LOCAL_GL_UNSIGNED_SHORT:
            maxFetchIndex = MaxForRange<uint16_t>(start, indexCount, ignoredVal);
            break;
        case LOCAL_GL_UNSIGNED_INT:
            maxFetchIndex = MaxForRange<uint32_t>(start, indexCount, ignoredVal);
            break;
        default:
            MOZ_CRASH();
        }
        const auto displayMaxVertIndex = maxFetchIndex ? int64_t(maxFetchIndex.value())
                                                       : -1;
        mContext->GeneratePerfWarning("[%p] New range #%u: (0x%04x, %" PRIu64 ", %u):"
                                      " %" PRIi64,
                                      this, uint32_t(mIndexRanges.size()), range.type,
                                      range.byteOffset, range.indexCount,
                                      displayMaxVertIndex);
    }

    return maxFetchIndex;
}
コード例 #26
0
TEST_F(APZHitTestingTester, HitTestingRespectsScrollClip_Bug1257288) {
  // Create the layer tree.
  const char* layerTreeSyntax = "c(tt)";
  // LayerID                     0 12
  nsIntRegion layerVisibleRegion[] = {
    nsIntRegion(IntRect(0,0,200,200)),
    nsIntRegion(IntRect(0,0,200,200)),
    nsIntRegion(IntRect(0,0,200,100))
  };
  root = CreateLayerTree(layerTreeSyntax, layerVisibleRegion, nullptr, lm, layers);

  // Add root scroll metadata to the first painted layer.
  SetScrollableFrameMetrics(layers[1], FrameMetrics::START_SCROLL_ID, CSSRect(0,0,200,200));

  // Add root and subframe scroll metadata to the second painted layer.
  // Give the subframe metadata a scroll clip corresponding to the subframe's
  // composition bounds.
  // Importantly, give the layer a layer clip which leaks outside of the
  // subframe's composition bounds.
  ScrollMetadata rootMetadata = BuildScrollMetadata(
      FrameMetrics::START_SCROLL_ID, CSSRect(0,0,200,200),
      ParentLayerRect(0,0,200,200));
  ScrollMetadata subframeMetadata = BuildScrollMetadata(
      FrameMetrics::START_SCROLL_ID + 1, CSSRect(0,0,200,200),
      ParentLayerRect(0,0,200,100));
  subframeMetadata.SetScrollClip(Some(LayerClip(ParentLayerIntRect(0,0,200,100))));
  layers[2]->SetScrollMetadata({subframeMetadata, rootMetadata});
  layers[2]->SetClipRect(Some(ParentLayerIntRect(0,0,200,200)));
  SetEventRegionsBasedOnBottommostMetrics(layers[2]);

  // Build the hit testing tree.
  ScopedLayerTreeRegistration registration(manager, 0, root, mcc);
  manager->UpdateHitTestingTree(nullptr, root, false, 0, 0);

  // Pan on a region that's inside layers[2]'s layer clip, but outside
  // its subframe metadata's scroll clip.
  Pan(manager, 120, 110);

  // Test that the subframe hasn't scrolled.
  EXPECT_EQ(CSSPoint(0,0), ApzcOf(layers[2], 0)->GetFrameMetrics().GetScrollOffset());
}
コード例 #27
0
static Maybe<nsRect>
EdgeInclusiveIntersection(const nsRect& aRect, const nsRect& aOtherRect)
{
    nscoord left = std::max(aRect.x, aOtherRect.x);
    nscoord top = std::max(aRect.y, aOtherRect.y);
    nscoord right = std::min(aRect.XMost(), aOtherRect.XMost());
    nscoord bottom = std::min(aRect.YMost(), aOtherRect.YMost());
    if (left > right || top > bottom) {
        return Nothing();
    }
    return Some(nsRect(left, top, right - left, bottom - top));
}
コード例 #28
0
nsresult
RasterImage::OnImageDataComplete(nsIRequest*, nsISupports*, nsresult aStatus,
                                 bool aLastPart)
{
  MOZ_ASSERT(NS_IsMainThread());

  // Record that we have all the data we're going to get now.
  mHasSourceData = true;

  // Let decoders know that there won't be any more data coming.
  mSourceBuffer->Complete(aStatus);

  // Allow a synchronous metadata decode if mSyncLoad was set, or if we're
  // running on a single thread (in which case waiting for the async metadata
  // decoder could delay this image's load event quite a bit), or if this image
  // is transient.
  bool canSyncDecodeMetadata = mSyncLoad || mTransient ||
                               DecodePool::NumberOfCores() < 2;

  if (canSyncDecodeMetadata && !mHasSize) {
    // We're loading this image synchronously, so it needs to be usable after
    // this call returns.  Since we haven't gotten our size yet, we need to do a
    // synchronous metadata decode here.
    DecodeMetadata(FLAG_SYNC_DECODE);
  }

  // Determine our final status, giving precedence to Necko failure codes. We
  // check after running the metadata decode in case it triggered an error.
  nsresult finalStatus = mError ? NS_ERROR_FAILURE : NS_OK;
  if (NS_FAILED(aStatus)) {
    finalStatus = aStatus;
  }

  // If loading failed, report an error.
  if (NS_FAILED(finalStatus)) {
    DoError();
  }

  Progress loadProgress = LoadCompleteProgress(aLastPart, mError, finalStatus);

  if (!mHasSize && !mError) {
    // We don't have our size yet, so we'll fire the load event in SetSize().
    MOZ_ASSERT(!canSyncDecodeMetadata,
               "Firing load async after metadata sync decode?");
    NotifyProgress(FLAG_ONLOAD_BLOCKED);
    mLoadProgress = Some(loadProgress);
    return finalStatus;
  }

  NotifyForLoadEvent(loadProgress);

  return finalStatus;
}
コード例 #29
0
ファイル: WorkerScope.cpp プロジェクト: luke-chang/gecko-1
already_AddRefed<Promise>
WorkerGlobalScope::CreateImageBitmap(JSContext* aCx,
                                     const ImageBitmapSource& aImage,
                                     int32_t aSx, int32_t aSy, int32_t aSw, int32_t aSh,
                                     ErrorResult& aRv)
{
  if (aImage.IsArrayBuffer() || aImage.IsArrayBufferView()) {
    aRv.Throw(NS_ERROR_NOT_IMPLEMENTED);
    return nullptr;
  }

  return ImageBitmap::Create(this, aImage, Some(gfx::IntRect(aSx, aSy, aSw, aSh)), aRv);
}
コード例 #30
0
Maybe<LayerPoint>
HitTestingTreeNode::Untransform(const ParentLayerPoint& aPoint) const
{
  // convert into Layer coordinate space
  gfx::Matrix4x4 localTransform = mTransform;
  if (mApzc) {
    localTransform = localTransform * mApzc->GetCurrentAsyncTransformWithOverscroll();
  }
  gfx::Point4D point = localTransform.Inverse().ProjectPoint(aPoint.ToUnknownPoint());
  return point.HasPositiveWCoord()
        ? Some(ViewAs<LayerPixel>(point.As2DPoint()))
        : Nothing();
}