inline bool RegExpCompartment::get(JSContext *cx, JSAtom *keyAtom, JSAtom *source, RegExpFlag flags, Type type, RegExpGuard *g) { Key key(keyAtom, flags, type); Map::AddPtr p = map_.lookupForAdd(key); if (p) { g->init(*p->value); return true; } RegExpShared *shared = cx->runtime->new_<RegExpShared>(cx->runtime, flags); if (!shared) goto error; if (!shared->compile(cx, source)) goto error; /* Re-lookup in case there was a GC. */ if (!map_.relookupOrAdd(p, key, shared)) goto error; /* * Since 'error' deletes 'shared', only guard 'shared' on success. This is * safe since 'shared' cannot be deleted by GC until after the call to * map_.add() directly above. */ g->init(*shared); return true; error: Foreground::delete_(shared); js_ReportOutOfMemory(cx); return false; }
void RegExpCompartment::sweep(JSRuntime *rt) { for (Set::Enum e(set_); !e.empty(); e.popFront()) { RegExpShared *shared = e.front(); // Sometimes RegExpShared instances are marked without the // compartment being subsequently cleared. This can happen if a GC is // restarted while in progress (i.e. performing a full GC in the // middle of an incremental GC) or if a RegExpShared referenced via the // stack is traced but is not in a zone being collected. // // Because of this we only treat the marked_ bit as a hint, and destroy // the RegExpShared if it was accidentally marked earlier but wasn't // marked by the current trace. bool keep = shared->marked() && !IsStringAboutToBeFinalized(shared->source.unsafeGet()); for (size_t i = 0; i < ArrayLength(shared->compilationArray); i++) { RegExpShared::RegExpCompilation &compilation = shared->compilationArray[i]; if (keep && compilation.jitCode) keep = !IsJitCodeAboutToBeFinalized(compilation.jitCode.unsafeGet()); } if (keep) { shared->clearMarked(); } else { js_delete(shared); e.removeFront(); } } if (matchResultTemplateObject_ && IsObjectAboutToBeFinalized(matchResultTemplateObject_.unsafeGet())) { matchResultTemplateObject_.set(nullptr); } }
size_t RegExpCompartment::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) { size_t n = 0; n += set_.sizeOfExcludingThis(mallocSizeOf); for (Set::Enum e(set_); !e.empty(); e.popFront()) { RegExpShared* shared = e.front(); n += shared->sizeOfIncludingThis(mallocSizeOf); } return n; }
bool CrossCompartmentWrapper::regexp_toShared(JSContext* cx, HandleObject wrapper, RegExpGuard* g) const { RegExpGuard wrapperGuard(cx); { AutoCompartment call(cx, wrappedObject(wrapper)); if (!Wrapper::regexp_toShared(cx, wrapper, &wrapperGuard)) return false; } // Get an equivalent RegExpShared associated with the current compartment. RegExpShared* re = wrapperGuard.re(); return cx->compartment()->regExps.get(cx, re->getSource(), re->getFlags(), g); }
void RegExpCompartment::sweep(JSRuntime* rt) { if (!set_.initialized()) return; for (Set::Enum e(set_); !e.empty(); e.popFront()) { RegExpShared* shared = e.front(); if (shared->needsSweep(rt)) { js_delete(shared); e.removeFront(); } } if (matchResultTemplateObject_ && IsAboutToBeFinalized(&matchResultTemplateObject_)) { matchResultTemplateObject_.set(nullptr); } }
static RegExpRunStatus ExecuteRegExpImpl(JSContext *cx, RegExpStatics *res, RegExpShared &re, HandleLinearString input, size_t *lastIndex, MatchPairs &matches) { RegExpRunStatus status = re.execute(cx, input, lastIndex, matches); if (status == RegExpRunStatus_Success && res) { if (!res->updateFromMatchPairs(cx, input, matches)) return RegExpRunStatus_Error; } return status; }
static RegExpRunStatus ExecuteRegExpImpl(JSContext *cx, RegExpStatics *res, RegExpShared &re, Handle<JSLinearString*> input, const jschar *chars, size_t length, size_t *lastIndex, MatchConduit &matches) { RegExpRunStatus status; /* Switch between MatchOnly and IncludeSubpatterns modes. */ if (matches.isPair) { size_t lastIndex_orig = *lastIndex; /* Only one MatchPair slot provided: execute short-circuiting regexp. */ status = re.executeMatchOnly(cx, chars, length, lastIndex, *matches.u.pair); if (status == RegExpRunStatus_Success && res) res->updateLazily(cx, input, &re, lastIndex_orig); } else { /* Vector of MatchPairs provided: execute full regexp. */ status = re.execute(cx, chars, length, lastIndex, *matches.u.pairs); if (status == RegExpRunStatus_Success && res) res->updateFromMatchPairs(cx, input, *matches.u.pairs); } return status; }
/* static */ void RegExpObject::trace(JSTracer* trc, JSObject* obj) { RegExpShared* shared = obj->as<RegExpObject>().maybeShared(); if (!shared) return; // When tracing through the object normally, we have the option of // unlinking the object from its RegExpShared so that the RegExpShared may // be collected. To detect this we need to test all the following // conditions, since: // 1. During TraceRuntime, isHeapBusy() is true, but the tracer might not // be a marking tracer. // 2. When a write barrier executes, IsMarkingTracer is true, but // isHeapBusy() will be false. if (trc->runtime()->isHeapBusy() && trc->isMarkingTracer() && !obj->asTenured().zone()->isPreservingCode()) { obj->as<RegExpObject>().NativeObject::setPrivate(nullptr); } else { shared->trace(trc); } }
static RegExpRunStatus ExecuteRegExpImpl(JSContext* cx, RegExpStatics* res, RegExpShared& re, HandleLinearString input, size_t searchIndex, MatchPairs* matches) { RegExpRunStatus status = re.execute(cx, input, searchIndex, matches); if (status == RegExpRunStatus_Success && res) { if (matches) { if (!res->updateFromMatchPairs(cx, input, *matches)) return RegExpRunStatus_Error; } else { res->updateLazily(cx, input, &re, searchIndex); } } return status; }