Update On Mon Feb 14 19:28:44 CET 2022

This commit is contained in:
github-action[bot] 2022-02-14 19:28:45 +01:00
parent 7018db8329
commit 42533c2e4d
148 changed files with 2002 additions and 1455 deletions

View file

@ -12,6 +12,7 @@ prefs =
[browser_aaa_eventTelemetry_run_first.js]
skip-if =
asan || tsan || ccov || debug || (os == "win" && !debug) # bug 1605494 is more prevalent on linux, Bug 1627419
os == 'linux' && bits == 64 && !debug # Bug 1648862
[browser_alertDismissedAfterChangingPassword.js]
skip-if =
os == "mac" && os_version == "10.15" && !debug # Bug 1684513

View file

@ -652,7 +652,11 @@
<checkbox id="useSmoothScrolling"
data-l10n-id="browsing-use-smooth-scrolling"
preference="general.smoothScroll"/>
#ifdef MOZ_WIDGET_GTK
<checkbox id="useOverlayScrollbars"
data-l10n-id="browsing-gtk-use-non-overlay-scrollbars"
preference="widget.gtk.overlay-scrollbars.enabled"/>
#endif
#ifdef XP_WIN
<checkbox id="useOnScreenKeyboard"
hidden="true"

View file

@ -120,6 +120,7 @@ Preferences.addAll([
page-down, and other such page movements */
{ id: "general.autoScroll", type: "bool" },
{ id: "general.smoothScroll", type: "bool" },
{ id: "widget.gtk.overlay-scrollbars.enabled", type: "bool", inverted: true },
{ id: "layout.spellcheckDefault", type: "int" },
{

View file

@ -29,6 +29,7 @@ support-files =
[browser_aboutHomeLoading.js]
skip-if =
tsan # Intermittently times out, see 1622698 (frequent on TSan).
os == 'linux' && bits == 64 && !debug # Bug 1622698
[browser_action_searchengine.js]
[browser_action_searchengine_alias.js]
[browser_autocomplete_a11y_label.js]

View file

@ -517,6 +517,10 @@ browsing-use-smooth-scrolling =
.label = Use smooth scrolling
.accesskey = m
browsing-gtk-use-non-overlay-scrollbars =
.label = Always show scrollbars
.accesskey = o
browsing-use-onscreen-keyboard =
.label = Show a touch keyboard when necessary
.accesskey = c

View file

@ -465,7 +465,7 @@
"win64-aarch64-devedition",
"win64-devedition"
],
"revision": "59fde7f7f5132e2787f3acf3430d1774c53ae5f5"
"revision": "879b99194c7b241e4a68b6ea73f295c41539db01"
},
"es-AR": {
"pin": false,
@ -519,7 +519,7 @@
"win64-aarch64-devedition",
"win64-devedition"
],
"revision": "667fed3eb1c31e90335509da877a15a7d7ce5336"
"revision": "b28109480a27ad850ad0521f82e192980fcddaa6"
},
"es-MX": {
"pin": false,
@ -933,7 +933,7 @@
"win64-aarch64-devedition",
"win64-devedition"
],
"revision": "37dbc92c07b4a80c1d22477aa3c7d9456e0043d4"
"revision": "8dde0697ad8ac3b42c3b31c0be78de70264acb6e"
},
"it": {
"pin": false,
@ -1407,7 +1407,7 @@
"win64-aarch64-devedition",
"win64-devedition"
],
"revision": "4057635ff38376e899f8ba45a2a464a591aa635f"
"revision": "b21705e160f0724c3f53a07c8b81592c175e3fa4"
},
"pt-PT": {
"pin": false,
@ -1497,7 +1497,7 @@
"win64-aarch64-devedition",
"win64-devedition"
],
"revision": "cb67fb4ce51bd276ef579e85639e2e40f810ae4e"
"revision": "5540ebe380fa088f0e2c3e5981d8378fc1a66d04"
},
"sc": {
"pin": false,
@ -1605,7 +1605,7 @@
"win64-aarch64-devedition",
"win64-devedition"
],
"revision": "640dbd6479e3fef1773bd9877ee65797d9f068a9"
"revision": "94847137bceb69343cbd4f3b0b3ad44d08f09502"
},
"son": {
"pin": false,
@ -1659,7 +1659,7 @@
"win64-aarch64-devedition",
"win64-devedition"
],
"revision": "e7c7978d2fd74f4e2098253a41b099604b88d61b"
"revision": "fc2038de2801930ce585dd1817c3de3367523630"
},
"sv-SE": {
"pin": false,
@ -1803,7 +1803,7 @@
"win64-aarch64-devedition",
"win64-devedition"
],
"revision": "6f771afed474ec6bbc5d8cc89bcab7872535e540"
"revision": "e08aeb9558a4527729b1b157425c72d9cde1e6eb"
},
"trs": {
"pin": false,

View file

@ -13,6 +13,8 @@ PromiseTestUtils.allowMatchingRejectionsGlobally(/File closed/);
// On debug test runner, it takes about 50s to run the test.
requestLongerTimeout(4);
/* eslint-disable mozilla/no-arbitrary-setTimeout */
const { fetch } = require("devtools/shared/DevToolsUtils");
const debuggerHeadURL =
@ -23,6 +25,33 @@ const helpersContextURL =
CHROME_URL_ROOT + "../../../debugger/test/mochitest/helpers/context.js";
add_task(async function runTest() {
let { content: debuggerHead } = await fetch(debuggerHeadURL);
// Also include the debugger helpers which are separated from debugger's head to be
// reused in other modules.
const { content: debuggerHelpers } = await fetch(helpersURL);
const { content: debuggerContextHelpers } = await fetch(helpersContextURL);
debuggerHead = debuggerHead + debuggerContextHelpers + debuggerHelpers;
// We remove its import of shared-head, which isn't available in browser toolbox process
// And isn't needed thanks to testHead's symbols
debuggerHead = debuggerHead.replace(
/Services.scriptloader.loadSubScript[^\)]*\);/g,
""
);
const ToolboxTask = await initBrowserToolboxTask({
enableBrowserToolboxFission: true,
});
await ToolboxTask.importFunctions({
// head.js uses this method
registerCleanupFunction: () => {},
waitForDispatch,
waitUntil,
});
await ToolboxTask.importScript(debuggerHead);
info("### First test breakpoint in the parent process script");
const s = Cu.Sandbox("http://mozilla.org");
// Use a unique id for the fake script name in order to be able to run
@ -52,36 +81,10 @@ add_task(async function runTest() {
// Execute the function every second in order to trigger the breakpoint
const interval = setInterval(s.plop, 1000);
let { content: debuggerHead } = await fetch(debuggerHeadURL);
// Also include the debugger helpers which are separated from debugger's head to be
// reused in other modules.
const { content: debuggerHelpers } = await fetch(helpersURL);
const { content: debuggerContextHelpers } = await fetch(helpersContextURL);
debuggerHead = debuggerHead + debuggerContextHelpers + debuggerHelpers;
// We remove its import of shared-head, which isn't available in browser toolbox process
// And isn't needed thanks to testHead's symbols
debuggerHead = debuggerHead.replace(
/Services.scriptloader.loadSubScript[^\)]*\);/g,
""
);
const ToolboxTask = await initBrowserToolboxTask({
enableBrowserToolboxFission: true,
});
await ToolboxTask.importFunctions({
// head.js uses this method
registerCleanupFunction: () => {},
waitForDispatch,
waitUntil,
});
await ToolboxTask.importScript(debuggerHead);
await ToolboxTask.spawn(`"${testUrl}"`, async _testUrl => {
/* global createDebuggerContext, waitForSources, waitForPaused,
/* global gToolbox, createDebuggerContext, waitForSources, waitForPaused,
addBreakpoint, assertPausedAtSourceAndLine, stepIn, findSource,
removeBreakpoint, resume, selectSource */
removeBreakpoint, resume, selectSource, assertNotPaused, assertBreakpoint */
const { Services } = ChromeUtils.import(
"resource://gre/modules/Services.jsm"
);
@ -90,7 +93,6 @@ add_task(async function runTest() {
Services.prefs.clearUserPref("devtools.debugger.pending-selected-location");
info("Waiting for debugger load");
/* global gToolbox */
await gToolbox.selectTool("jsdebugger");
const dbg = createDebuggerContext(gToolbox);
const window = dbg.win;
@ -131,19 +133,99 @@ add_task(async function runTest() {
const source = findSource(dbg, fileName);
assertPausedAtSourceAndLine(dbg, source.id, 2);
is(
dbg.selectors.getBreakpointCount(),
1,
"There is exactly one breakpoint"
);
await stepIn(dbg);
assertPausedAtSourceAndLine(dbg, source.id, 3);
is(
dbg.selectors.getBreakpointCount(),
1,
"We still have only one breakpoint after step-in"
);
// Remove the breakpoint before resuming in order to prevent hitting the breakpoint
// again during test closing.
await removeBreakpoint(dbg, source.id, 2);
await resume(dbg);
// Let a change for the interval to re-execute
await new Promise(r => setTimeout(r, 1000));
is(dbg.selectors.getBreakpointCount(), 0, "There is no more breakpoints");
assertNotPaused(dbg);
});
clearInterval(interval);
info("### Now test breakpoint in a privileged content process script");
const testUrl2 = `http://mozilla.org/content-process-test-${id}.js`;
await SpecialPowers.spawn(gBrowser.selectedBrowser, [testUrl2], testUrl => {
// Use a sandbox in order to have a URL to set a breakpoint
const s = Cu.Sandbox("http://mozilla.org");
Cu.evalInSandbox(
"(" +
function() {
this.foo = function foo() {
return 1;
};
} +
").call(this)",
s,
"1.8",
testUrl,
0
);
content.interval = content.setInterval(s.foo, 1000);
});
await ToolboxTask.spawn(`"${testUrl2}"`, async _testUrl => {
const dbg = createDebuggerContext(gToolbox);
const fileName = _testUrl.match(/content-process-test.*\.js/)[0];
await waitForSources(dbg, _testUrl);
await selectSource(dbg, fileName);
const onPaused = waitForPaused(dbg);
await addBreakpoint(dbg, fileName, 2);
await onPaused;
const source = findSource(dbg, fileName);
assertPausedAtSourceAndLine(dbg, source.id, 2);
await assertBreakpoint(dbg, 2);
is(dbg.selectors.getBreakpointCount(), 1, "We have exactly one breakpoint");
await stepIn(dbg);
assertPausedAtSourceAndLine(dbg, source.id, 3);
is(
dbg.selectors.getBreakpointCount(),
1,
"We still have only one breakpoint after step-in"
);
// Remove the breakpoint before resuming in order to prevent hitting the breakpoint
// again during test closing.
await removeBreakpoint(dbg, source.id, 2);
await resume(dbg);
// Let a change for the interval to re-execute
await new Promise(r => setTimeout(r, 1000));
is(dbg.selectors.getBreakpointCount(), 0, "There is no more breakpoints");
assertNotPaused(dbg);
});
await SpecialPowers.spawn(gBrowser.selectedBrowser, [], () => {
content.clearInterval(content.interval);
});
await ToolboxTask.destroy();
});

View file

@ -73,6 +73,7 @@
flex: 1 1 auto;
height: auto;
overflow: auto;
min-width: max-content;
}
#http-custom-input-and-map-form :is(.http-custom-input, .map-add-new-inputs) {
@ -138,10 +139,6 @@
background-color: var(--theme-selection-background);
}
.tabpanel-summary-container .http-custom-section {
border: 1px solid var(--grey-25);
}
.network-monitor .http-custom-request {
display: block;
padding: 0;

View file

@ -126,7 +126,7 @@ class ContentProcessStartup {
);
break;
case "debug:remove-session-data-entry":
this.addSessionDataEntry(
this.removeSessionDataEntry(
msg.data.watcherActorID,
msg.data.type,
msg.data.entries

View file

@ -400,7 +400,7 @@ already_AddRefed<BrowsingContext> BrowsingContext::CreateDetached(
fields.mDefaultLoadFlags =
inherit ? inherit->GetDefaultLoadFlags() : nsIRequest::LOAD_NORMAL;
fields.mOrientationLock = mozilla::hal::eScreenOrientation_None;
fields.mOrientationLock = mozilla::hal::ScreenOrientation::None;
fields.mUseGlobalHistory = inherit ? inherit->GetUseGlobalHistory() : false;

View file

@ -12,6 +12,7 @@
#include "mozilla/dom/ContentParent.h"
#include "mozilla/dom/ContentChild.h"
#include "nsReadableUtils.h"
#include "mozilla/HalIPCUtils.h"
namespace mozilla {
namespace dom {

View file

@ -4886,7 +4886,7 @@ void nsDocShell::ActivenessMaybeChanged() {
// Update orientation when the top-level browsing context becomes active.
if (isActive && mBrowsingContext->IsTop()) {
// We only care about the top-level browsing context.
uint16_t orientation = mBrowsingContext->GetOrientationLock();
auto orientation = mBrowsingContext->GetOrientationLock();
ScreenOrientation::UpdateActiveOrientationLock(orientation);
}
@ -9459,13 +9459,13 @@ nsresult nsDocShell::InternalLoad(nsDocShellLoadState* aLoadState,
// lock the orientation of the document to the document's default
// orientation. We don't explicitly check for a top-level browsing context
// here because orientation is only set on top-level browsing contexts.
if (mBrowsingContext->GetOrientationLock() != hal::eScreenOrientation_None) {
if (mBrowsingContext->GetOrientationLock() != hal::ScreenOrientation::None) {
MOZ_ASSERT(mBrowsingContext->IsTop());
MOZ_ALWAYS_SUCCEEDS(
mBrowsingContext->SetOrientationLock(hal::eScreenOrientation_None));
mBrowsingContext->SetOrientationLock(hal::ScreenOrientation::None));
if (mBrowsingContext->IsActive()) {
ScreenOrientation::UpdateActiveOrientationLock(
hal::eScreenOrientation_None);
hal::ScreenOrientation::None);
}
}

View file

@ -141,7 +141,7 @@ bool AppendIndexedPropertyNames(JSContext* aCx, BrowsingContext* aContext,
}
for (int32_t i = 0; i < length; ++i) {
aIndexedProps.infallibleAppend(INT_TO_JSID(i));
aIndexedProps.infallibleAppend(JS::PropertyKey::Int(i));
}
return true;
}

View file

@ -36,13 +36,13 @@ NS_IMPL_RELEASE_INHERITED(ScreenOrientation, DOMEventTargetHelper)
static OrientationType InternalOrientationToType(
hal::ScreenOrientation aOrientation) {
switch (aOrientation) {
case hal::eScreenOrientation_PortraitPrimary:
case hal::ScreenOrientation::PortraitPrimary:
return OrientationType::Portrait_primary;
case hal::eScreenOrientation_PortraitSecondary:
case hal::ScreenOrientation::PortraitSecondary:
return OrientationType::Portrait_secondary;
case hal::eScreenOrientation_LandscapePrimary:
case hal::ScreenOrientation::LandscapePrimary:
return OrientationType::Landscape_primary;
case hal::eScreenOrientation_LandscapeSecondary:
case hal::ScreenOrientation::LandscapeSecondary:
return OrientationType::Landscape_secondary;
default:
MOZ_CRASH("Bad aOrientation value");
@ -53,13 +53,13 @@ static hal::ScreenOrientation OrientationTypeToInternal(
OrientationType aOrientation) {
switch (aOrientation) {
case OrientationType::Portrait_primary:
return hal::eScreenOrientation_PortraitPrimary;
return hal::ScreenOrientation::PortraitPrimary;
case OrientationType::Portrait_secondary:
return hal::eScreenOrientation_PortraitSecondary;
return hal::ScreenOrientation::PortraitSecondary;
case OrientationType::Landscape_primary:
return hal::eScreenOrientation_LandscapePrimary;
return hal::ScreenOrientation::LandscapePrimary;
case OrientationType::Landscape_secondary:
return hal::eScreenOrientation_LandscapeSecondary;
return hal::ScreenOrientation::LandscapeSecondary;
default:
MOZ_CRASH("Bad aOrientation value");
}
@ -154,7 +154,7 @@ ScreenOrientation::LockOrientationTask::~LockOrientationTask() = default;
bool ScreenOrientation::LockOrientationTask::OrientationLockContains(
OrientationType aOrientationType) {
return mOrientationLock & OrientationTypeToInternal(aOrientationType);
return bool(mOrientationLock & OrientationTypeToInternal(aOrientationType));
}
NS_IMETHODIMP
@ -179,7 +179,7 @@ ScreenOrientation::LockOrientationTask::Run() {
return NS_OK;
}
if (mOrientationLock == hal::eScreenOrientation_None) {
if (mOrientationLock == hal::ScreenOrientation::None) {
mScreenOrientation->UnlockDeviceOrientation();
mPromise->MaybeResolveWithUndefined();
mDocument->ClearOrientationPendingPromise();
@ -211,7 +211,7 @@ ScreenOrientation::LockOrientationTask::Run() {
BrowsingContext* bc = mDocument->GetBrowsingContext();
if (OrientationLockContains(bc->GetCurrentOrientationType()) ||
(mOrientationLock == hal::eScreenOrientation_Default &&
(mOrientationLock == hal::ScreenOrientation::Default &&
bc->GetCurrentOrientationAngle() == 0)) {
// Orientation lock will not cause an orientation change.
mPromise->MaybeResolveWithUndefined();
@ -223,37 +223,37 @@ ScreenOrientation::LockOrientationTask::Run() {
already_AddRefed<Promise> ScreenOrientation::Lock(
OrientationLockType aOrientation, ErrorResult& aRv) {
hal::ScreenOrientation orientation = hal::eScreenOrientation_None;
hal::ScreenOrientation orientation = hal::ScreenOrientation::None;
switch (aOrientation) {
case OrientationLockType::Any:
orientation = hal::eScreenOrientation_PortraitPrimary |
hal::eScreenOrientation_PortraitSecondary |
hal::eScreenOrientation_LandscapePrimary |
hal::eScreenOrientation_LandscapeSecondary;
orientation = hal::ScreenOrientation::PortraitPrimary |
hal::ScreenOrientation::PortraitSecondary |
hal::ScreenOrientation::LandscapePrimary |
hal::ScreenOrientation::LandscapeSecondary;
break;
case OrientationLockType::Natural:
orientation |= hal::eScreenOrientation_Default;
orientation |= hal::ScreenOrientation::Default;
break;
case OrientationLockType::Landscape:
orientation = hal::eScreenOrientation_LandscapePrimary |
hal::eScreenOrientation_LandscapeSecondary;
orientation = hal::ScreenOrientation::LandscapePrimary |
hal::ScreenOrientation::LandscapeSecondary;
break;
case OrientationLockType::Portrait:
orientation = hal::eScreenOrientation_PortraitPrimary |
hal::eScreenOrientation_PortraitSecondary;
orientation = hal::ScreenOrientation::PortraitPrimary |
hal::ScreenOrientation::PortraitSecondary;
break;
case OrientationLockType::Portrait_primary:
orientation = hal::eScreenOrientation_PortraitPrimary;
orientation = hal::ScreenOrientation::PortraitPrimary;
break;
case OrientationLockType::Portrait_secondary:
orientation = hal::eScreenOrientation_PortraitSecondary;
orientation = hal::ScreenOrientation::PortraitSecondary;
break;
case OrientationLockType::Landscape_primary:
orientation = hal::eScreenOrientation_LandscapePrimary;
orientation = hal::ScreenOrientation::LandscapePrimary;
break;
case OrientationLockType::Landscape_secondary:
orientation = hal::eScreenOrientation_LandscapeSecondary;
orientation = hal::ScreenOrientation::LandscapeSecondary;
break;
default:
NS_WARNING("Unexpected orientation type");
@ -401,7 +401,7 @@ RefPtr<MozPromise<bool, bool, false>> ScreenOrientation::LockDeviceOrientation(
}
void ScreenOrientation::Unlock(ErrorResult& aRv) {
RefPtr<Promise> p = LockInternal(hal::eScreenOrientation_None, aRv);
RefPtr<Promise> p = LockInternal(hal::ScreenOrientation::None, aRv);
}
void ScreenOrientation::UnlockDeviceOrientation() {
@ -517,10 +517,10 @@ void ScreenOrientation::Notify(const hal::ScreenConfiguration& aConfiguration) {
}
hal::ScreenOrientation orientation = aConfiguration.orientation();
if (orientation != hal::eScreenOrientation_PortraitPrimary &&
orientation != hal::eScreenOrientation_PortraitSecondary &&
orientation != hal::eScreenOrientation_LandscapePrimary &&
orientation != hal::eScreenOrientation_LandscapeSecondary) {
if (orientation != hal::ScreenOrientation::PortraitPrimary &&
orientation != hal::ScreenOrientation::PortraitSecondary &&
orientation != hal::ScreenOrientation::LandscapePrimary &&
orientation != hal::ScreenOrientation::LandscapeSecondary) {
// The platform may notify of some other values from
// an orientation lock, but we only care about real
// changes to screen orientation which result in one of
@ -559,7 +559,7 @@ void ScreenOrientation::Notify(const hal::ScreenConfiguration& aConfiguration) {
void ScreenOrientation::UpdateActiveOrientationLock(
hal::ScreenOrientation aOrientation) {
if (aOrientation == hal::eScreenOrientation_None) {
if (aOrientation == hal::ScreenOrientation::None) {
hal::UnlockScreenOrientation();
} else {
hal::LockScreenOrientation(aOrientation)

View file

@ -206,8 +206,8 @@ bool WindowNamedPropertiesHandler::ownPropNames(
if (!doc || !doc->IsHTMLOrXHTML()) {
// Define to @@toStringTag on this object to keep Object.prototype.toString
// backwards compatible.
JS::Rooted<jsid> toStringTagId(aCx, SYMBOL_TO_JSID(JS::GetWellKnownSymbol(
aCx, JS::SymbolCode::toStringTag)));
JS::Rooted<jsid> toStringTagId(
aCx, JS::GetWellKnownSymbolKey(aCx, JS::SymbolCode::toStringTag));
return aProps.append(toStringTagId);
}
@ -221,8 +221,8 @@ bool WindowNamedPropertiesHandler::ownPropNames(
return false;
}
JS::Rooted<jsid> toStringTagId(aCx, SYMBOL_TO_JSID(JS::GetWellKnownSymbol(
aCx, JS::SymbolCode::toStringTag)));
JS::Rooted<jsid> toStringTagId(
aCx, JS::GetWellKnownSymbolKey(aCx, JS::SymbolCode::toStringTag));
if (!docProps.append(toStringTagId)) {
return false;
}

View file

@ -1081,7 +1081,7 @@ bool nsOuterWindowProxy::AppendIndexedPropertyNames(
return false;
}
for (int32_t i = 0; i < int32_t(length); ++i) {
if (!props.append(INT_TO_JSID(i))) {
if (!props.append(JS::PropertyKey::Int(i))) {
return false;
}
}

View file

@ -810,12 +810,11 @@ static bool DefineConstructor(JSContext* cx, JS::Handle<JSObject*> global,
if (!nameStr) {
return false;
}
JS::Rooted<JS::PropertyKey> nameKey(cx,
JS::PropertyKey::fromNonIntAtom(nameStr));
JS::Rooted<JS::PropertyKey> nameKey(cx, JS::PropertyKey::NonIntAtom(nameStr));
return DefineConstructor(cx, global, nameKey, constructor);
}
// name must be an atom (or JS::PropertyKey::fromNonIntAtom will assert).
// name must be an atom (or JS::PropertyKey::NonIntAtom will assert).
static JSObject* CreateInterfaceObject(
JSContext* cx, JS::Handle<JSObject*> global,
JS::Handle<JSObject*> constructorProto, const JSClass* constructorClass,
@ -848,8 +847,8 @@ static JSObject* CreateInterfaceObject(
->wantsInterfaceHasInstance) {
if (isChrome ||
StaticPrefs::dom_webidl_crosscontext_hasinstance_enabled()) {
JS::Rooted<jsid> hasInstanceId(cx, SYMBOL_TO_JSID(JS::GetWellKnownSymbol(
cx, JS::SymbolCode::hasInstance)));
JS::Rooted<jsid> hasInstanceId(
cx, JS::GetWellKnownSymbolKey(cx, JS::SymbolCode::hasInstance));
if (!JS_DefineFunctionById(
cx, constructor, hasInstanceId, InterfaceHasInstance, 1,
// Flags match those of Function[Symbol.hasInstance]
@ -906,7 +905,7 @@ static JSObject* CreateInterfaceObject(
return nullptr;
}
JS::Rooted<jsid> nameStr(cx, JS::PropertyKey::fromNonIntAtom(name));
JS::Rooted<jsid> nameStr(cx, JS::PropertyKey::NonIntAtom(name));
if (defineOnGlobal && !DefineConstructor(cx, global, nameStr, constructor)) {
return nullptr;
}
@ -974,8 +973,8 @@ static JSObject* CreateInterfacePrototypeObject(
}
}
JS::Rooted<jsid> unscopableId(cx, SYMBOL_TO_JSID(JS::GetWellKnownSymbol(
cx, JS::SymbolCode::unscopables)));
JS::Rooted<jsid> unscopableId(
cx, JS::GetWellKnownSymbolKey(cx, JS::SymbolCode::unscopables));
// Readonly and non-enumerable to match Array.prototype.
if (!JS_DefinePropertyById(cx, ourProto, unscopableId, unscopableObj,
JSPROP_READONLY)) {
@ -983,8 +982,8 @@ static JSObject* CreateInterfacePrototypeObject(
}
}
JS::Rooted<jsid> toStringTagId(cx, SYMBOL_TO_JSID(JS::GetWellKnownSymbol(
cx, JS::SymbolCode::toStringTag)));
JS::Rooted<jsid> toStringTagId(
cx, JS::GetWellKnownSymbolKey(cx, JS::SymbolCode::toStringTag));
if (!JS_DefinePropertyById(cx, ourProto, toStringTagId, name,
JSPROP_READONLY)) {
return nullptr;
@ -1260,9 +1259,11 @@ static int CompareIdsAtIndices(const void* aElement1, const void* aElement2,
const uint16_t index2 = *static_cast<const uint16_t*>(aElement2);
const PropertyInfo* infos = static_cast<PropertyInfo*>(aClosure);
MOZ_ASSERT(JSID_BITS(infos[index1].Id()) != JSID_BITS(infos[index2].Id()));
uintptr_t rawBits1 = infos[index1].Id().asRawBits();
uintptr_t rawBits2 = infos[index2].Id().asRawBits();
MOZ_ASSERT(rawBits1 != rawBits2);
return JSID_BITS(infos[index1].Id()) < JSID_BITS(infos[index2].Id()) ? -1 : 1;
return rawBits1 < rawBits2 ? -1 : 1;
}
// {JSPropertySpec,JSFunctionSpec} use {JSPropertySpec,JSFunctionSpec}::Name
@ -1466,10 +1467,10 @@ struct IdToIndexComparator {
explicit IdToIndexComparator(const jsid& aId, const PropertyInfo* aInfos)
: mId(aId), mInfos(aInfos) {}
int operator()(const uint16_t aIndex) const {
if (JSID_BITS(mId) == JSID_BITS(mInfos[aIndex].Id())) {
if (mId.asRawBits() == mInfos[aIndex].Id().asRawBits()) {
return 0;
}
return JSID_BITS(mId) < JSID_BITS(mInfos[aIndex].Id()) ? -1 : 1;
return mId.asRawBits() < mInfos[aIndex].Id().asRawBits() ? -1 : 1;
}
};

View file

@ -976,7 +976,7 @@ MOZ_ALWAYS_INLINE bool MaybeWrapValue(JSContext* cx,
return JS_WrapValue(cx, rval);
}
MOZ_ASSERT(rval.isSymbol());
JS_MarkCrossZoneId(cx, SYMBOL_TO_JSID(rval.toSymbol()));
JS_MarkCrossZoneId(cx, JS::PropertyKey::Symbol(rval.toSymbol()));
}
return true;
}

View file

@ -3733,8 +3733,7 @@ class CGCreateInterfaceObjectsMethod(CGAbstractMethod):
name = alias[2:]
symbolJSID = (
"SYMBOL_TO_JSID(JS::GetWellKnownSymbol(aCx, JS::SymbolCode::%s))"
% name
"JS::GetWellKnownSymbolKey(aCx, JS::SymbolCode::%s)" % name
)
prop = "%sId" % name
getSymbolJSID = CGGeneric(
@ -4391,7 +4390,7 @@ def InitUnforgeablePropertiesOnHolder(
fill(
"""
JS::RootedId toPrimitive(aCx,
SYMBOL_TO_JSID(JS::GetWellKnownSymbol(aCx, JS::SymbolCode::toPrimitive)));
JS::GetWellKnownSymbolKey(aCx, JS::SymbolCode::toPrimitive));
if (!JS_DefinePropertyById(aCx, ${holderName}, toPrimitive,
JS::UndefinedHandleValue,
JSPROP_READONLY | JSPROP_PERMANENT)) {
@ -15064,7 +15063,7 @@ class CGDOMJSProxyHandler_ownPropNames(ClassMethod):
uint32_t length = UnwrapProxy(proxy)->Length(${callerType});
MOZ_ASSERT(int32_t(length) >= 0);
for (int32_t i = 0; i < int32_t(length); ++i) {
if (!props.append(INT_TO_JSID(i))) {
if (!props.append(JS::PropertyKey::Int(i))) {
return false;
}
}

View file

@ -203,7 +203,7 @@ struct PropertyInfo {
void SetId(jsid aId) {
static_assert(sizeof(jsid) == sizeof(mIdBits),
"jsid should fit in mIdBits");
mIdBits = JSID_BITS(aId);
mIdBits = aId.asRawBits();
}
MOZ_ALWAYS_INLINE jsid Id() const { return jsid::fromRawBits(mIdBits); }
};

View file

@ -21,7 +21,7 @@ using namespace JS;
namespace mozilla::dom {
jsid s_length_id = JSID_VOID;
jsid s_length_id = JS::PropertyKey::Void();
bool DefineStaticJSVals(JSContext* cx) {
return AtomizeAndPinJSString(cx, s_length_id, "length");

View file

@ -25,7 +25,7 @@ class PinnedStringId {
jsid id;
public:
constexpr PinnedStringId() : id(JSID_VOID) {}
constexpr PinnedStringId() : id(JS::PropertyKey::Void()) {}
bool init(JSContext* cx, const char* string) {
JSString* str = JS_AtomizeAndPinString(cx, string);

View file

@ -191,7 +191,7 @@ bool WebIDLGlobalNameHash::GetNames(JSContext* aCx, JS::Handle<JSObject*> aObj,
(!entry.mEnabled || entry.mEnabled(aCx, aObj))) {
JSString* str =
JS_AtomizeStringN(aCx, sNames + entry.mNameOffset, entry.mNameLength);
if (!str || !aNames.append(JS::PropertyKey::fromNonIntAtom(str))) {
if (!str || !aNames.append(JS::PropertyKey::NonIntAtom(str))) {
return false;
}
}
@ -262,7 +262,7 @@ bool WebIDLGlobalNameHash::NewEnumerateSystemGlobal(
if (!entry.mEnabled || entry.mEnabled(aCx, aObj)) {
JSString* str =
JS_AtomizeStringN(aCx, sNames + entry.mNameOffset, entry.mNameLength);
if (!str || !aProperties.append(JS::PropertyKey::fromNonIntAtom(str))) {
if (!str || !aProperties.append(JS::PropertyKey::NonIntAtom(str))) {
return false;
}
}

View file

@ -28,7 +28,7 @@ using mozilla::TimeDuration from "mozilla/TimeStamp.h";
using class mozilla::TimeStamp from "mozilla/TimeStamp.h";
using mozilla::ScreenRotation from "mozilla/WidgetUtils.h";
using nsCSSPropertyID from "nsCSSPropertyID.h";
using hal::ScreenOrientation from "mozilla/HalScreenConfiguration.h";
using hal::ScreenOrientation from "mozilla/HalIPCUtils.h";
using struct mozilla::layers::TextureInfo from "mozilla/layers/CompositorTypes.h";
using mozilla::CSSPoint from "Units.h";
using mozilla::CSSRect from "Units.h";

23
hal/HalIPCUtils.h Normal file
View file

@ -0,0 +1,23 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set sw=2 ts=8 et ft=cpp : */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef mozilla_HalIPCUtils_h
#define mozilla_HalIPCUtils_h
#include "HalScreenConfiguration.h"
#include "ipc/EnumSerializer.h"
namespace IPC {
template <>
struct ParamTraits<mozilla::hal::ScreenOrientation>
: public BitFlagsEnumSerializer<mozilla::hal::ScreenOrientation,
mozilla::hal::kAllScreenOrientationBits> {};
} // namespace IPC
#endif

View file

@ -8,27 +8,30 @@
#define mozilla_HalScreenConfiguration_h
#include "mozilla/Observer.h"
#include "mozilla/TypedEnumBits.h"
namespace mozilla {
namespace hal {
namespace mozilla::hal {
// Make sure that any change to ScreenOrientation values are also made in
// mobile/android/geckoview/src/main/java/org/mozilla/gecko/GeckoScreenOrientation.java
typedef uint32_t ScreenOrientation;
enum class ScreenOrientation : uint32_t {
None = 0,
PortraitPrimary = 1u << 0,
PortraitSecondary = 1u << 1,
LandscapePrimary = 1u << 2,
LandscapeSecondary = 1u << 3,
// Default will use the natural orientation for the device, it could be
// PortraitPrimary or LandscapePrimary depends on display resolution
Default = 1u << 4,
};
static const ScreenOrientation eScreenOrientation_None = 0;
static const ScreenOrientation eScreenOrientation_PortraitPrimary = 1u << 0;
static const ScreenOrientation eScreenOrientation_PortraitSecondary = 1u << 1;
static const ScreenOrientation eScreenOrientation_LandscapePrimary = 1u << 2;
static const ScreenOrientation eScreenOrientation_LandscapeSecondary = 1u << 3;
// eScreenOrientation_Default will use the natural orientation for the deivce,
// it could be PortraitPrimary or LandscapePrimary depends on display resolution
static const ScreenOrientation eScreenOrientation_Default = 1u << 4;
constexpr auto kAllScreenOrientationBits = ScreenOrientation((1 << 5) - 1);
MOZ_MAKE_ENUM_CLASS_BITWISE_OPERATORS(ScreenOrientation);
class ScreenConfiguration;
typedef Observer<ScreenConfiguration> ScreenConfigurationObserver;
using ScreenConfigurationObserver = Observer<ScreenConfiguration>;
} // namespace hal
} // namespace mozilla
} // namespace mozilla::hal
#endif // mozilla_HalScreenConfiguration_h

View file

@ -16,10 +16,7 @@
using namespace mozilla::dom;
using namespace mozilla::hal;
namespace java = mozilla::java;
namespace mozilla {
namespace hal_impl {
namespace mozilla::hal_impl {
void Vibrate(const nsTArray<uint32_t>& pattern, WindowIdentifier&&) {
// Ignore the WindowIdentifier parameter; it's here only because hal::Vibrate,
@ -102,47 +99,56 @@ void GetCurrentScreenConfiguration(ScreenConfiguration* aScreenConfiguration) {
aScreenConfiguration->angle() = bridge->GetScreenAngle();
}
static bool IsSupportedScreenOrientation(hal::ScreenOrientation aOrientation) {
// The Android backend only supports these orientations.
static constexpr ScreenOrientation kSupportedOrientations[] = {
ScreenOrientation::PortraitPrimary,
ScreenOrientation::PortraitSecondary,
ScreenOrientation::PortraitPrimary | ScreenOrientation::PortraitSecondary,
ScreenOrientation::LandscapePrimary,
ScreenOrientation::LandscapeSecondary,
ScreenOrientation::LandscapePrimary |
ScreenOrientation::LandscapeSecondary,
ScreenOrientation::PortraitPrimary |
ScreenOrientation::PortraitSecondary |
ScreenOrientation::LandscapePrimary |
ScreenOrientation::LandscapeSecondary,
ScreenOrientation::Default,
};
for (auto supportedOrientation : kSupportedOrientations) {
if (aOrientation == supportedOrientation) {
return true;
}
}
return false;
}
RefPtr<MozPromise<bool, bool, false>> LockScreenOrientation(
const hal::ScreenOrientation& aOrientation) {
switch (aOrientation) {
// The Android backend only supports these orientations.
case eScreenOrientation_PortraitPrimary:
case eScreenOrientation_PortraitSecondary:
case eScreenOrientation_PortraitPrimary |
eScreenOrientation_PortraitSecondary:
case eScreenOrientation_LandscapePrimary:
case eScreenOrientation_LandscapeSecondary:
case eScreenOrientation_LandscapePrimary |
eScreenOrientation_LandscapeSecondary:
case eScreenOrientation_PortraitPrimary |
eScreenOrientation_PortraitSecondary |
eScreenOrientation_LandscapePrimary |
eScreenOrientation_LandscapeSecondary:
case eScreenOrientation_Default: {
java::GeckoRuntime::LocalRef runtime = java::GeckoRuntime::GetInstance();
if (runtime != NULL) {
auto result = runtime->LockScreenOrientation(aOrientation);
auto geckoResult = java::GeckoResult::LocalRef(std::move(result));
return geckoResult
? MozPromise<bool, bool, false>::FromGeckoResult(geckoResult)
: MozPromise<bool, bool, false>::CreateAndReject(false,
__func__);
} else {
return MozPromise<bool, bool, false>::CreateAndReject(false, __func__);
}
}
default:
NS_WARNING("Unsupported screen orientation type");
return MozPromise<bool, bool, false>::CreateAndReject(false, __func__);
using LockPromise = MozPromise<bool, bool, false>;
if (!IsSupportedScreenOrientation(aOrientation)) {
NS_WARNING("Unsupported screen orientation type");
return LockPromise::CreateAndReject(false, __func__);
}
java::GeckoRuntime::LocalRef runtime = java::GeckoRuntime::GetInstance();
if (!runtime) {
return LockPromise::CreateAndReject(false, __func__);
}
auto result = runtime->LockScreenOrientation(uint32_t(aOrientation));
auto geckoResult = java::GeckoResult::LocalRef(std::move(result));
if (!geckoResult) {
return LockPromise::CreateAndReject(false, __func__);
}
return LockPromise::FromGeckoResult(geckoResult);
}
void UnlockScreenOrientation() {
java::GeckoRuntime::LocalRef runtime = java::GeckoRuntime::GetInstance();
if (runtime != NULL) {
if (runtime) {
runtime->UnlockScreenOrientation();
}
}
} // namespace hal_impl
} // namespace mozilla
} // namespace mozilla::hal_impl

View file

@ -20,8 +20,8 @@ inline void GetCurrentScreenConfiguration(
aScreenConfiguration->orientation() =
aScreenConfiguration->rect().Width() >=
aScreenConfiguration->rect().Height()
? hal::eScreenOrientation_LandscapePrimary
: hal::eScreenOrientation_PortraitPrimary;
? hal::ScreenOrientation::LandscapePrimary
: hal::ScreenOrientation::PortraitPrimary;
}
} // namespace fallback

View file

@ -11,6 +11,7 @@ EXPORTS.mozilla += [
"Hal.h",
"HalBatteryInformation.h",
"HalImpl.h",
"HalIPCUtils.h",
"HalNetworkInformation.h",
"HalSandbox.h",
"HalScreenConfiguration.h",

View file

@ -10,7 +10,7 @@ include protocol PBrowser;
include "mozilla/dom/ReferrerInfoUtils.h";
include "mozilla/GfxMessageUtils.h";
using hal::ScreenOrientation from "mozilla/HalScreenConfiguration.h";
using hal::ScreenOrientation from "mozilla/HalIPCUtils.h";
using mozilla::hal::SensorType from "mozilla/HalSensor.h";
using mozilla::hal::WakeLockControl from "mozilla/HalTypes.h";
using mozilla::hal::ProcessPriority from "mozilla/HalTypes.h";

View file

@ -21,7 +21,7 @@ class JS_PUBLIC_API JSString;
namespace JS {
class JS_PUBLIC_API BigInt;
struct JS_PUBLIC_API PropertyKey;
class JS_PUBLIC_API PropertyKey;
class JS_PUBLIC_API Symbol;
class JS_PUBLIC_API Value;
} // namespace JS

View file

@ -7,20 +7,24 @@
#ifndef js_Id_h
#define js_Id_h
// [SMDOC] Property Key / JSID
// [SMDOC] PropertyKey / jsid
//
// A jsid is an identifier for a property or method of an object which is
// either a 31-bit unsigned integer, interned string or symbol.
// A PropertyKey is an identifier for a property of an object which is either a
// 31-bit unsigned integer, interned string or symbol.
//
// Also, there is an additional jsid value, JSID_VOID, which does not occur in
// JS scripts but may be used to indicate the absence of a valid jsid. A void
// jsid is not a valid id and only arises as an exceptional API return value,
// such as in JS_NextProperty. Embeddings must not pass JSID_VOID into JSAPI
// entry points expecting a jsid and do not need to handle JSID_VOID in hooks
// receiving a jsid except when explicitly noted in the API contract.
// Also, there is an additional PropertyKey value, PropertyKey::Void(), which
// does not occur in JS scripts but may be used to indicate the absence of a
// valid key. A void PropertyKey is not a valid key and only arises as an
// exceptional API return value. Embeddings must not pass a void PropertyKey
// into JSAPI entry points expecting a PropertyKey and do not need to handle
// void keys in hooks receiving a PropertyKey except when explicitly noted in
// the API contract.
//
// A jsid is not implicitly convertible to or from a Value; JS_ValueToId or
// JS_IdToValue must be used instead.
// A PropertyKey is not implicitly convertible to or from a Value; JS_ValueToId
// or JS_IdToValue must be used instead.
//
// jsid is an alias for JS::PropertyKey. New code should use PropertyKey instead
// of jsid.
#include "mozilla/Maybe.h"
@ -33,76 +37,82 @@
#include "js/TracingAPI.h"
#include "js/TypeDecls.h"
// All jsids with the low bit set are integer ids. This means the other type
// tags must all be even.
#define JSID_TYPE_INT_BIT 0x1
// Use 0 for JSID_TYPE_STRING to avoid a bitwise op for atom <-> id conversions.
#define JSID_TYPE_STRING 0x0
#define JSID_TYPE_VOID 0x2
#define JSID_TYPE_SYMBOL 0x4
// (0x6 is unused)
#define JSID_TYPE_MASK 0x7
namespace JS {
enum class SymbolCode : uint32_t;
struct PropertyKey {
size_t asBits;
class PropertyKey {
uintptr_t asBits_;
constexpr PropertyKey() : asBits(JSID_TYPE_VOID) {}
public:
// All keys with the low bit set are integer keys. This means the other type
// tags must all be even. These constants are public only for the JITs.
static constexpr uintptr_t IntTagBit = 0x1;
// Use 0 for StringTypeTag to avoid a bitwise op for atom <-> id conversions.
static constexpr uintptr_t StringTypeTag = 0x0;
static constexpr uintptr_t VoidTypeTag = 0x2;
static constexpr uintptr_t SymbolTypeTag = 0x4;
// (0x6 is unused)
static constexpr uintptr_t TypeMask = 0x7;
static constexpr MOZ_ALWAYS_INLINE PropertyKey fromRawBits(size_t bits) {
static constexpr uint32_t IntMin = 0;
static constexpr uint32_t IntMax = INT32_MAX;
constexpr PropertyKey() : asBits_(VoidTypeTag) {}
static constexpr MOZ_ALWAYS_INLINE PropertyKey fromRawBits(uintptr_t bits) {
PropertyKey id;
id.asBits = bits;
id.asBits_ = bits;
return id;
}
bool operator==(const PropertyKey& rhs) const { return asBits == rhs.asBits; }
bool operator!=(const PropertyKey& rhs) const { return asBits != rhs.asBits; }
bool operator==(const PropertyKey& rhs) const {
return asBits_ == rhs.asBits_;
}
bool operator!=(const PropertyKey& rhs) const {
return asBits_ != rhs.asBits_;
}
MOZ_ALWAYS_INLINE bool isVoid() const {
MOZ_ASSERT_IF((asBits & JSID_TYPE_MASK) == JSID_TYPE_VOID,
asBits == JSID_TYPE_VOID);
return asBits == JSID_TYPE_VOID;
MOZ_ASSERT_IF((asBits_ & TypeMask) == VoidTypeTag, asBits_ == VoidTypeTag);
return asBits_ == VoidTypeTag;
}
MOZ_ALWAYS_INLINE bool isInt() const {
return !!(asBits & JSID_TYPE_INT_BIT);
}
MOZ_ALWAYS_INLINE bool isInt() const { return !!(asBits_ & IntTagBit); }
MOZ_ALWAYS_INLINE bool isString() const {
return (asBits & JSID_TYPE_MASK) == JSID_TYPE_STRING;
return (asBits_ & TypeMask) == StringTypeTag;
}
MOZ_ALWAYS_INLINE bool isSymbol() const {
return (asBits & JSID_TYPE_MASK) == JSID_TYPE_SYMBOL;
return (asBits_ & TypeMask) == SymbolTypeTag;
}
MOZ_ALWAYS_INLINE bool isGCThing() const { return isString() || isSymbol(); }
constexpr uintptr_t asRawBits() const { return asBits_; }
MOZ_ALWAYS_INLINE int32_t toInt() const {
MOZ_ASSERT(isInt());
uint32_t bits = static_cast<uint32_t>(asBits) >> 1;
uint32_t bits = static_cast<uint32_t>(asBits_) >> 1;
return static_cast<int32_t>(bits);
}
MOZ_ALWAYS_INLINE JSString* toString() const {
MOZ_ASSERT(isString());
// Use XOR instead of `& ~JSID_TYPE_MASK` because small immediates can be
// Use XOR instead of `& ~TypeMask` because small immediates can be
// encoded more efficiently on some platorms.
return reinterpret_cast<JSString*>(asBits ^ JSID_TYPE_STRING);
return reinterpret_cast<JSString*>(asBits_ ^ StringTypeTag);
}
MOZ_ALWAYS_INLINE JS::Symbol* toSymbol() const {
MOZ_ASSERT(isSymbol());
return reinterpret_cast<JS::Symbol*>(asBits ^ JSID_TYPE_SYMBOL);
return reinterpret_cast<JS::Symbol*>(asBits_ ^ SymbolTypeTag);
}
js::gc::Cell* toGCThing() const {
MOZ_ASSERT(isGCThing());
return reinterpret_cast<js::gc::Cell*>(asBits & ~(size_t)JSID_TYPE_MASK);
return reinterpret_cast<js::gc::Cell*>(asBits_ & ~TypeMask);
}
GCCellPtr toGCCellPtr() const {
@ -118,20 +128,32 @@ struct PropertyKey {
bool isWellKnownSymbol(JS::SymbolCode code) const;
// This API can be used by embedders to convert pinned (aka interned) strings,
// as created by JS_AtomizeAndPinString, into PropertyKeys. This means the
// string does not have to be explicitly rooted.
//
// Only use this API when absolutely necessary, otherwise use JS_StringToId.
static PropertyKey fromPinnedString(JSString* str);
// A void PropertyKey. This is equivalent to a PropertyKey created by the
// default constructor.
static constexpr PropertyKey Void() { return PropertyKey(); }
static constexpr bool fitsInInt(int32_t i) { return i >= 0; }
static constexpr PropertyKey Int(int32_t i) {
MOZ_ASSERT(fitsInInt(i));
uint32_t bits = (static_cast<uint32_t>(i) << 1) | IntTagBit;
return PropertyKey::fromRawBits(bits);
}
static PropertyKey Symbol(JS::Symbol* sym) {
MOZ_ASSERT(sym != nullptr);
MOZ_ASSERT((uintptr_t(sym) & TypeMask) == 0);
MOZ_ASSERT(!js::gc::IsInsideNursery(reinterpret_cast<js::gc::Cell*>(sym)));
return PropertyKey::fromRawBits(uintptr_t(sym) | SymbolTypeTag);
}
// Must not be used on atoms that are representable as integer PropertyKey.
// Prefer NameToId or AtomToId over this function:
//
// A PropertyName is an atom that does not contain an integer in the range
// [0, UINT32_MAX]. However, PropertyKey can only hold an integer in the range
// [0, JSID_INT_MAX] (where JSID_INT_MAX == 2^31-1). Thus, for the range of
// integers (JSID_INT_MAX, UINT32_MAX], to represent as a 'id', it must be
// [0, IntMax] (where IntMax == 2^31-1). Thus, for the range of integers
// (IntMax, UINT32_MAX], to represent as a 'id', it must be
// the case id.isString() and id.toString()->isIndex(). In most
// cases when creating a PropertyKey, code does not have to care about
// this corner case because:
@ -146,19 +168,26 @@ struct PropertyKey {
// Thus, it is only the rare third case which needs this function, which
// handles any JSAtom* that is known not to be representable with an int
// PropertyKey.
static PropertyKey fromNonIntAtom(JSAtom* atom) {
MOZ_ASSERT((size_t(atom) & JSID_TYPE_MASK) == 0);
static PropertyKey NonIntAtom(JSAtom* atom) {
MOZ_ASSERT((uintptr_t(atom) & TypeMask) == 0);
MOZ_ASSERT(PropertyKey::isNonIntAtom(atom));
return PropertyKey::fromRawBits(size_t(atom) | JSID_TYPE_STRING);
return PropertyKey::fromRawBits(uintptr_t(atom) | StringTypeTag);
}
// The JSAtom/JSString type exposed to embedders is opaque.
static PropertyKey fromNonIntAtom(JSString* str) {
MOZ_ASSERT((size_t(str) & JSID_TYPE_MASK) == 0);
static PropertyKey NonIntAtom(JSString* str) {
MOZ_ASSERT((uintptr_t(str) & TypeMask) == 0);
MOZ_ASSERT(PropertyKey::isNonIntAtom(str));
return PropertyKey::fromRawBits(size_t(str) | JSID_TYPE_STRING);
return PropertyKey::fromRawBits(uintptr_t(str) | StringTypeTag);
}
// This API can be used by embedders to convert pinned (aka interned) strings,
// as created by JS_AtomizeAndPinString, into PropertyKeys. This means the
// string does not have to be explicitly rooted.
//
// Only use this API when absolutely necessary, otherwise use JS_StringToId.
static PropertyKey fromPinnedString(JSString* str);
// Internal API!
// All string PropertyKeys are actually atomized.
MOZ_ALWAYS_INLINE bool isAtom() const { return isString(); }
@ -184,36 +213,11 @@ struct PropertyKey {
using jsid = JS::PropertyKey;
#define JSID_BITS(id) (id.asBits)
#define JSID_INT_MIN 0
#define JSID_INT_MAX INT32_MAX
static MOZ_ALWAYS_INLINE bool INT_FITS_IN_JSID(int32_t i) { return i >= 0; }
static MOZ_ALWAYS_INLINE jsid INT_TO_JSID(int32_t i) {
jsid id;
MOZ_ASSERT(INT_FITS_IN_JSID(i));
uint32_t bits = (static_cast<uint32_t>(i) << 1) | JSID_TYPE_INT_BIT;
JSID_BITS(id) = static_cast<size_t>(bits);
return id;
}
static MOZ_ALWAYS_INLINE jsid SYMBOL_TO_JSID(JS::Symbol* sym) {
jsid id;
MOZ_ASSERT(sym != nullptr);
MOZ_ASSERT((size_t(sym) & JSID_TYPE_MASK) == 0);
MOZ_ASSERT(!js::gc::IsInsideNursery(reinterpret_cast<js::gc::Cell*>(sym)));
JSID_BITS(id) = (size_t(sym) | JSID_TYPE_SYMBOL);
return id;
}
constexpr const jsid JSID_VOID;
extern JS_PUBLIC_DATA const JS::HandleId JSID_VOIDHANDLE;
namespace JS {
// Handle<PropertyKey> version of PropertyKey::Void().
extern JS_PUBLIC_DATA const JS::HandleId VoidHandlePropertyKey;
template <>
struct GCPolicy<jsid> {
static void trace(JSTracer* trc, jsid* idp, const char* name) {
@ -242,6 +246,15 @@ MOZ_ALWAYS_INLINE void AssertIdIsNotGray(jsid id) {
}
#endif
/**
* Get one of the well-known symbols defined by ES6 as PropertyKey. This is
* equivalent to calling JS::GetWellKnownSymbol and then creating a PropertyKey.
*
* `which` must be in the range [0, WellKnownSymbolLimit).
*/
extern JS_PUBLIC_API PropertyKey GetWellKnownSymbolKey(JSContext* cx,
SymbolCode which);
} // namespace JS
namespace js {
@ -319,6 +332,8 @@ class WrappedPtrOperations<JS::PropertyKey, Wrapper> {
return id().isWellKnownSymbol(code);
}
uintptr_t asRawBits() const { return id().asRawBits(); }
// Internal API
bool isAtom() const { return id().isAtom(); }
bool isAtom(JSAtom* atom) const { return id().isAtom(atom); }

View file

@ -42,7 +42,7 @@ class JS_PUBLIC_API TempAllocPolicy;
namespace JS {
struct JS_PUBLIC_API PropertyKey;
class JS_PUBLIC_API PropertyKey;
typedef unsigned char Latin1Char;

View file

@ -27,7 +27,7 @@ class JS_PUBLIC_API JSString;
namespace JS {
class JS_PUBLIC_API BigInt;
struct JS_PUBLIC_API PropertyKey;
class JS_PUBLIC_API PropertyKey;
class JS_PUBLIC_API Value;
} // namespace JS

View file

@ -894,7 +894,7 @@ static inline bool ObjectMayHaveExtraIndexedOwnProperties(JSObject* obj) {
}
return ClassMayResolveId(*obj->runtimeFromAnyThread()->commonNames,
obj->getClass(), INT_TO_JSID(0), obj);
obj->getClass(), PropertyKey::Int(0), obj);
}
/*
@ -1018,7 +1018,7 @@ static MOZ_ALWAYS_INLINE bool IsArraySpecies(JSContext* cx,
return true;
}
jsid speciesId = SYMBOL_TO_JSID(cx->wellKnownSymbols().species);
jsid speciesId = PropertyKey::Symbol(cx->wellKnownSymbols().species);
JSFunction* getter;
if (!GetGetterPure(cx, &ctor.toObject(), speciesId, &getter)) {
return false;
@ -1529,7 +1529,7 @@ static DenseElementResult ArrayReverseDenseKernel(JSContext* cx,
}
obj->setDenseElementHole(index);
return SuppressDeletedProperty(cx, obj, INT_TO_JSID(index));
return SuppressDeletedProperty(cx, obj, PropertyKey::Int(index));
};
RootedValue origlo(cx), orighi(cx);
@ -2730,7 +2730,7 @@ static bool CopyArrayElements(JSContext* cx, HandleObject obj, uint64_t begin,
// Use dense storage for new indexed properties where possible.
{
uint32_t index = 0;
uint32_t limit = std::min<uint32_t>(count, JSID_INT_MAX);
uint32_t limit = std::min<uint32_t>(count, PropertyKey::IntMax);
for (; index < limit; index++) {
bool hole;
if (!CheckForInterrupt(cx) ||
@ -4148,8 +4148,7 @@ static bool array_proto_finish(JSContext* cx, JS::HandleObject ctor,
}
#endif
RootedId id(cx, SYMBOL_TO_JSID(
cx->wellKnownSymbols().get(JS::SymbolCode::unscopables)));
RootedId id(cx, PropertyKey::Symbol(cx->wellKnownSymbols().unscopables));
value.setObject(*unscopables);
return DefineDataProperty(cx, proto, id, value, JSPROP_READONLY);
}
@ -4348,8 +4347,8 @@ void js::ArraySpeciesLookup::initialize(JSContext* cx) {
}
// Look up the '@@species' value on Array
Maybe<PropertyInfo> speciesProp =
arrayCtor->lookup(cx, SYMBOL_TO_JSID(cx->wellKnownSymbols().species));
Maybe<PropertyInfo> speciesProp = arrayCtor->lookup(
cx, PropertyKey::Symbol(cx->wellKnownSymbols().species));
if (speciesProp.isNothing() || !arrayCtor->hasGetter(*speciesProp)) {
return;
}

View file

@ -656,7 +656,7 @@ static bool JA(JSContext* cx, HandleObject obj, StringifyContext* scx) {
MOZ_ASSERT(obj->is<ArrayObject>());
MOZ_ASSERT(obj->is<NativeObject>());
RootedNativeObject nativeObj(cx, &obj->as<NativeObject>());
if (i <= JSID_INT_MAX) {
if (i <= PropertyKey::IntMax) {
MOZ_ASSERT(
nativeObj->containsDenseElement(i) != nativeObj->isIndexed(),
"the array must either be small enough to remain "
@ -1199,7 +1199,7 @@ bool BuildImmutableProperty(JSContext* cx, HandleValue value, HandleId name,
// Step 1.b.iv
for (uint32_t i = 0; i < len; i++) {
// Step 1.b.iv.1
childName.set(INT_TO_JSID(i));
childName.set(PropertyKey::Int(i));
// Step 1.b.iv.2
if (!GetProperty(cx, arr, value, childName, &childValue)) {

View file

@ -497,8 +497,7 @@ const JSPropertySpec MapObject::staticProperties[] = {
// 23.1.3.12 Map.prototype[@@iterator]()
// The initial value of the @@iterator property is the same function object
// as the initial value of the "entries" property.
RootedId iteratorId(
cx, SYMBOL_TO_JSID(JS::GetWellKnownSymbol(cx, JS::SymbolCode::iterator)));
RootedId iteratorId(cx, PropertyKey::Symbol(cx->wellKnownSymbols().iterator));
return NativeDefineDataProperty(cx, nativeProto, iteratorId, entriesFn, 0);
}
@ -1297,8 +1296,7 @@ const JSPropertySpec SetObject::staticProperties[] = {
// 23.2.3.11 Set.prototype[@@iterator]()
// See above.
RootedId iteratorId(
cx, SYMBOL_TO_JSID(JS::GetWellKnownSymbol(cx, JS::SymbolCode::iterator)));
RootedId iteratorId(cx, PropertyKey::Symbol(cx->wellKnownSymbols().iterator));
return NativeDefineDataProperty(cx, nativeProto, iteratorId, valuesFn, 0);
}

View file

@ -739,7 +739,8 @@ bool ModuleNamespaceObject::ProxyHandler::ownPropertyKeys(
props.infallibleAppend(AtomToId(&names[i].toString()->asAtom()));
}
props.infallibleAppend(SYMBOL_TO_JSID(cx->wellKnownSymbols().toStringTag));
props.infallibleAppend(
PropertyKey::Symbol(cx->wellKnownSymbols().toStringTag));
return true;
}

View file

@ -1489,8 +1489,9 @@ static bool TryEnumerableOwnPropertiesNative(JSContext* cx, HandleObject obj,
JSString* str;
if (kind != EnumerableOwnPropertiesKind::Values) {
static_assert(NativeObject::MAX_DENSE_ELEMENTS_COUNT <= JSID_INT_MAX,
"dense elements don't exceed JSID_INT_MAX");
static_assert(
NativeObject::MAX_DENSE_ELEMENTS_COUNT <= PropertyKey::IntMax,
"dense elements don't exceed PropertyKey::IntMax");
str = Int32ToString<CanGC>(cx, i);
if (!str) {
return false;
@ -1533,8 +1534,9 @@ static bool TryEnumerableOwnPropertiesNative(JSContext* cx, HandleObject obj,
for (uint32_t i = 0; i < len; i++) {
JSString* str;
if (kind != EnumerableOwnPropertiesKind::Values) {
static_assert(NativeObject::MAX_DENSE_ELEMENTS_COUNT <= JSID_INT_MAX,
"dense elements don't exceed JSID_INT_MAX");
static_assert(
NativeObject::MAX_DENSE_ELEMENTS_COUNT <= PropertyKey::IntMax,
"dense elements don't exceed PropertyKey::IntMax");
str = Int32ToString<CanGC>(cx, i);
if (!str) {
return false;

View file

@ -469,7 +469,7 @@ bool js::IsRegExp(JSContext* cx, HandleValue value, bool* result) {
/* Steps 2-3. */
RootedValue isRegExp(cx);
RootedId matchId(cx, SYMBOL_TO_JSID(cx->wellKnownSymbols().match));
RootedId matchId(cx, PropertyKey::Symbol(cx->wellKnownSymbols().match));
if (!GetProperty(cx, obj, obj, matchId, &isRegExp)) {
return false;
}
@ -1988,7 +1988,7 @@ bool js::RegExpPrototypeOptimizableRaw(JSContext* cx, JSObject* proto) {
// those values should be tested in selfhosted JS.
bool has = false;
if (!HasOwnDataPropertyPure(
cx, proto, SYMBOL_TO_JSID(cx->wellKnownSymbols().match), &has)) {
cx, proto, PropertyKey::Symbol(cx->wellKnownSymbols().match), &has)) {
return false;
}
if (!has) {
@ -1996,7 +1996,8 @@ bool js::RegExpPrototypeOptimizableRaw(JSContext* cx, JSObject* proto) {
}
if (!HasOwnDataPropertyPure(
cx, proto, SYMBOL_TO_JSID(cx->wellKnownSymbols().search), &has)) {
cx, proto, PropertyKey::Symbol(cx->wellKnownSymbols().search),
&has)) {
return false;
}
if (!has) {

View file

@ -4108,7 +4108,7 @@ static bool ReadGeckoProfilingStack(JSContext* cx, unsigned argc, Value* vp) {
return false;
}
idx = INT_TO_JSID(inlineFrameNo);
idx = PropertyKey::Int(inlineFrameNo);
if (!JS_DefinePropertyById(cx, inlineStack, idx, inlineFrameInfo, 0)) {
return false;
}
@ -4117,7 +4117,7 @@ static bool ReadGeckoProfilingStack(JSContext* cx, unsigned argc, Value* vp) {
}
// Push inline array into main array.
idx = INT_TO_JSID(physicalFrameNo);
idx = PropertyKey::Int(physicalFrameNo);
if (!JS_DefinePropertyById(cx, stack, idx, inlineStack, 0)) {
return false;
}
@ -4210,7 +4210,7 @@ JSObject* ShellAllocationMetadataBuilder::build(
RootedValue callee(cx);
for (NonBuiltinScriptFrameIter iter(cx); !iter.done(); ++iter) {
if (iter.isFunctionFrame() && iter.compartment() == cx->compartment()) {
id = INT_TO_JSID(stackIndex);
id = PropertyKey::Int(stackIndex);
RootedObject callee(cx, iter.callee(cx));
if (!JS_DefinePropertyById(cx, stack, id, callee, JSPROP_ENUMERATE)) {
oomUnsafe.crash("ShellAllocationMetadataBuilder::build");

View file

@ -1871,8 +1871,8 @@ static bool DefineToStringTag(JSContext* cx, HandleObject obj,
return false;
}
RootedId toStringTagId(cx, SYMBOL_TO_JSID(JS::GetWellKnownSymbol(
cx, JS::SymbolCode::toStringTag)));
RootedId toStringTagId(
cx, JS::GetWellKnownSymbolKey(cx, JS::SymbolCode::toStringTag));
return JS_DefinePropertyById(cx, obj, toStringTagId, toStringTagStr,
JSPROP_READONLY);
}

View file

@ -1670,7 +1670,7 @@ DebuggerArguments* DebuggerArguments::create(JSContext* cx, HandleObject proto,
if (!getobj) {
return nullptr;
}
id = INT_TO_JSID(i);
id = PropertyKey::Int(i);
if (!NativeDefineAccessorProperty(cx, obj, id, getobj, nullptr,
JSPROP_ENUMERATE)) {
return nullptr;

View file

@ -1664,7 +1664,7 @@ bool DebuggerScript::CallData::getAllOffsets() {
RootedObject offsets(cx);
RootedValue offsetsv(cx);
RootedId id(cx, INT_TO_JSID(lineno));
RootedId id(cx, PropertyKey::Int(lineno));
bool found;
if (!HasOwnProperty(cx, result, id, &found)) {

View file

@ -18,7 +18,7 @@ struct IdValuePair {
JS::Value value;
jsid id;
IdValuePair() : value(JS::UndefinedValue()), id(JSID_VOID) {}
IdValuePair() : value(JS::UndefinedValue()), id(JS::PropertyKey::Void()) {}
explicit IdValuePair(jsid idArg) : value(JS::UndefinedValue()), id(idArg) {}
IdValuePair(jsid idArg, const Value& valueArg) : value(valueArg), id(idArg) {}

View file

@ -134,7 +134,7 @@ bool InterpretObjLiteralObj(JSContext* cx, HandlePlainObject obj,
!insn.getKey().isArrayIndex());
if (kind == PropertySetKind::Normal && insn.getKey().isArrayIndex()) {
propId = INT_TO_JSID(insn.getKey().getArrayIndex());
propId = PropertyKey::Int(insn.getKey().getArrayIndex());
} else {
JSAtom* jsatom =
atomCache.getExistingAtomAt(cx, insn.getKey().getAtomIndex());

View file

@ -56,11 +56,9 @@ struct TaggedPtr<JS::Value> {
template <>
struct TaggedPtr<jsid> {
static jsid wrap(JSString* str) {
return JS::PropertyKey::fromNonIntAtom(str);
}
static jsid wrap(JS::Symbol* sym) { return SYMBOL_TO_JSID(sym); }
static jsid empty() { return JSID_VOID; }
static jsid wrap(JSString* str) { return JS::PropertyKey::NonIntAtom(str); }
static jsid wrap(JS::Symbol* sym) { return PropertyKey::Symbol(sym); }
static jsid empty() { return JS::PropertyKey::Void(); }
};
template <>

View file

@ -19,12 +19,11 @@ class PropertyKey(object):
# think of any way to avoid copying these values here, short of using
# inferior calls for every operation (which, I hear, is broken from
# pretty-printers in some recent GDBs).
TYPE_STRING = 0x0
TYPE_INT = 0x1
TYPE_VOID = 0x2
TYPE_SYMBOL = 0x4
TYPE_EMPTY = 0x6
TYPE_MASK = 0x7
StringTypeTag = 0x0
IntTagBit = 0x1
VoidTypeTag = 0x2
SymbolTypeTag = 0x4
TypeMask = 0x7
def __init__(self, value, cache):
self.value = value
@ -32,18 +31,16 @@ class PropertyKey(object):
self.concrete_type = self.value.type.strip_typedefs()
def to_string(self):
bits = self.value["asBits"]
tag = bits & PropertyKey.TYPE_MASK
if tag == PropertyKey.TYPE_STRING:
bits = self.value["asBits_"]
tag = bits & PropertyKey.TypeMask
if tag == PropertyKey.StringTypeTag:
body = bits.cast(self.cache.JSString_ptr_t)
elif tag & PropertyKey.TYPE_INT:
elif tag & PropertyKey.IntTagBit:
body = bits >> 1
elif tag == PropertyKey.TYPE_VOID:
return "JSID_VOID"
elif tag == PropertyKey.TYPE_SYMBOL:
body = (bits & ~PropertyKey.TYPE_MASK).cast(self.cache.JSSymbol_ptr_t)
elif tag == PropertyKey.TYPE_EMPTY:
return "JSID_EMPTY"
elif tag == PropertyKey.VoidTypeTag:
return "JS::VoidPropertyKey"
elif tag == PropertyKey.SymbolTypeTag:
body = (bits & ~PropertyKey.TypeMask).cast(self.cache.JSSymbol_ptr_t)
else:
body = "<unrecognized>"
return "$jsid(%s)" % (body,)

View file

@ -8,14 +8,14 @@ FRAGMENT(jsid, simple) {
JS::Rooted<JSString*> string(cx, JS_NewStringCopyZ(cx, chars));
JS::Rooted<JSString*> interned(cx, JS_AtomizeAndPinString(cx, chars));
JS::Rooted<jsid> string_id(cx, JS::PropertyKey::fromPinnedString(interned));
JS::Rooted<jsid> int_id(cx, INT_TO_JSID(1729));
JS::Rooted<jsid> int_id(cx, JS::PropertyKey::Int(1729));
JS::Rooted<jsid> unique_symbol_id(
cx, SYMBOL_TO_JSID(JS::NewSymbol(cx, interned)));
cx, JS::PropertyKey::Symbol(JS::NewSymbol(cx, interned)));
JS::Rooted<jsid> registry_symbol_id(
cx, SYMBOL_TO_JSID(JS::GetSymbolFor(cx, interned)));
cx, JS::PropertyKey::Symbol(JS::GetSymbolFor(cx, interned)));
JS::Rooted<jsid> well_known_symbol_id(
cx, SYMBOL_TO_JSID(JS::GetWellKnownSymbol(cx, JS::SymbolCode::iterator)));
jsid void_id = JSID_VOID;
cx, JS::GetWellKnownSymbolKey(cx, JS::SymbolCode::iterator));
jsid void_id = JS::PropertyKey::Void();
breakpoint();

View file

@ -11,7 +11,7 @@ unique_symbol_pretty = str(gdb.parse_and_eval("unique_symbol_id")).split("@")[0]
assert_eq(unique_symbol_pretty, '$jsid(Symbol("moon"))')
assert_pretty("registry_symbol_id", '$jsid(Symbol.for("moon"))')
assert_pretty("well_known_symbol_id", "$jsid(Symbol.iterator)")
assert_pretty("void_id", "JSID_VOID")
assert_pretty("void_id", "JS::VoidPropertyKey")
run_fragment("jsid.handles")

View file

@ -347,12 +347,12 @@ static bool ValueToNameOrSymbolId(JSContext* cx, HandleValue idVal,
}
if (!id.isAtom() && !id.isSymbol()) {
id.set(JSID_VOID);
id.set(JS::PropertyKey::Void());
return true;
}
if (id.isAtom() && id.toAtom()->isIndex()) {
id.set(JSID_VOID);
id.set(JS::PropertyKey::Void());
return true;
}
@ -3502,7 +3502,7 @@ AttachDecision CheckPrivateFieldIRGenerator::tryAttachStub() {
}
JSObject* obj = &val_.toObject();
ObjOperandId objId = writer.guardToObject(valId);
PropertyKey key = SYMBOL_TO_JSID(idVal_.toSymbol());
PropertyKey key = PropertyKey::Symbol(idVal_.toSymbol());
ThrowCondition condition;
ThrowMsgKind msgKind;
@ -4740,7 +4740,7 @@ AttachDecision InstanceOfIRGenerator::tryAttachStub() {
// property value.
PropertyResult hasInstanceProp;
NativeObject* hasInstanceHolder = nullptr;
jsid hasInstanceID = SYMBOL_TO_JSID(cx_->wellKnownSymbols().hasInstance);
jsid hasInstanceID = PropertyKey::Symbol(cx_->wellKnownSymbols().hasInstance);
if (!LookupPropertyPure(cx_, fun, hasInstanceID, &hasInstanceHolder,
&hasInstanceProp) ||
!hasInstanceProp.isNativeProperty()) {
@ -5017,7 +5017,8 @@ static bool IsArrayPrototypeOptimizable(JSContext* cx, ArrayObject* arr,
*arrProto = proto;
// The object must not have an own @@iterator property.
PropertyKey iteratorKey = SYMBOL_TO_JSID(cx->wellKnownSymbols().iterator);
PropertyKey iteratorKey =
PropertyKey::Symbol(cx->wellKnownSymbols().iterator);
if (arr->lookupPure(iteratorKey)) {
return false;
}

View file

@ -223,7 +223,7 @@ class MOZ_RAII CacheIRWriter : public JS::CustomAutoRooter {
addStubField(uintptr_t(ptr), StubField::Type::RawPointer);
}
void writeIdField(jsid id) {
addStubField(uintptr_t(JSID_BITS(id)), StubField::Type::Id);
addStubField(id.asRawBits(), StubField::Type::Id);
}
void writeValueField(const Value& val) {
addStubField(val.asRawBits(), StubField::Type::Value);

View file

@ -2871,9 +2871,9 @@ void MacroAssembler::Push(PropertyKey key, Register scratchReg) {
if (key.isString()) {
JSString* str = key.toString();
MOZ_ASSERT((uintptr_t(str) & JSID_TYPE_MASK) == 0);
static_assert(JSID_TYPE_STRING == 0,
"need to orPtr JSID_TYPE_STRING tag if it's not 0");
MOZ_ASSERT((uintptr_t(str) & PropertyKey::TypeMask) == 0);
static_assert(PropertyKey::StringTypeTag == 0,
"need to orPtr StringTypeTag if it's not 0");
Push(ImmGCPtr(str));
} else {
MOZ_ASSERT(key.isSymbol());
@ -2882,7 +2882,7 @@ void MacroAssembler::Push(PropertyKey key, Register scratchReg) {
}
} else {
MOZ_ASSERT(key.isInt());
Push(ImmWord(key.asBits));
Push(ImmWord(key.asRawBits()));
}
}
@ -2891,19 +2891,19 @@ void MacroAssembler::movePropertyKey(PropertyKey key, Register dest) {
// See comment in |Push(PropertyKey, ...)| above for an explanation.
if (key.isString()) {
JSString* str = key.toString();
MOZ_ASSERT((uintptr_t(str) & JSID_TYPE_MASK) == 0);
static_assert(JSID_TYPE_STRING == 0,
MOZ_ASSERT((uintptr_t(str) & PropertyKey::TypeMask) == 0);
static_assert(PropertyKey::StringTypeTag == 0,
"need to orPtr JSID_TYPE_STRING tag if it's not 0");
movePtr(ImmGCPtr(str), dest);
} else {
MOZ_ASSERT(key.isSymbol());
JS::Symbol* sym = key.toSymbol();
movePtr(ImmGCPtr(sym), dest);
orPtr(Imm32(JSID_TYPE_SYMBOL), dest);
orPtr(Imm32(PropertyKey::SymbolTypeTag), dest);
}
} else {
MOZ_ASSERT(key.isInt());
movePtr(ImmWord(key.asBits), dest);
movePtr(ImmWord(key.asRawBits()), dest);
}
}
@ -2976,7 +2976,7 @@ void MacroAssembler::PushEmptyRooted(VMFunctionData::RootType rootType) {
Push(UndefinedValue());
break;
case VMFunctionData::RootId:
Push(ImmWord(JSID_BITS(JSID_VOID)));
Push(ImmWord(JS::PropertyKey::Void().asRawBits()));
break;
}
}

View file

@ -1712,7 +1712,7 @@ static MOZ_ALWAYS_INLINE bool ValueToAtomOrSymbolPure(JSContext* cx,
}
*id = AtomToId(atom);
} else if (idVal.isSymbol()) {
*id = SYMBOL_TO_JSID(idVal.toSymbol());
*id = PropertyKey::Symbol(idVal.toSymbol());
} else {
if (!ValueToIdPure(idVal, id)) {
return false;
@ -1720,7 +1720,7 @@ static MOZ_ALWAYS_INLINE bool ValueToAtomOrSymbolPure(JSContext* cx,
}
// Watch out for ids that may be stored in dense elements.
static_assert(NativeObject::MAX_DENSE_ELEMENTS_COUNT < JSID_INT_MAX,
static_assert(NativeObject::MAX_DENSE_ELEMENTS_COUNT < PropertyKey::IntMax,
"All dense elements must have integer jsids");
if (MOZ_UNLIKELY(id->isInt())) {
return false;
@ -1928,7 +1928,7 @@ bool HasNativeElementPure(JSContext* cx, NativeObject* obj, int32_t index,
return true;
}
jsid id = INT_TO_JSID(index);
jsid id = PropertyKey::Int(index);
uint32_t unused;
if (obj->shape()->lookup(cx, id, &unused)) {
vp[0].setBoolean(true);

View file

@ -1997,7 +1997,7 @@ JS_PUBLIC_API bool JSPropertySpec::getValue(JSContext* cx,
bool PropertySpecNameToId(JSContext* cx, JSPropertySpec::Name name,
MutableHandleId id) {
if (name.isSymbol()) {
id.set(SYMBOL_TO_JSID(cx->wellKnownSymbols().get(name.symbol())));
id.set(PropertyKey::Symbol(cx->wellKnownSymbols().get(name.symbol())));
} else {
JSAtom* atom = Atomize(cx, name.string(), strlen(name.string()));
if (!atom) {
@ -2178,8 +2178,8 @@ JS_PUBLIC_API JSFunction* JS::NewFunctionFromSpec(JSContext* cx,
#ifdef DEBUG
if (fs->name.isSymbol()) {
MOZ_ASSERT(SYMBOL_TO_JSID(cx->wellKnownSymbols().get(fs->name.symbol())) ==
id);
JS::Symbol* sym = cx->wellKnownSymbols().get(fs->name.symbol());
MOZ_ASSERT(PropertyKey::Symbol(sym) == id);
} else {
MOZ_ASSERT(id.isString() &&
StringEqualsAscii(id.toLinearString(), fs->name.string()));
@ -3364,6 +3364,11 @@ JS_PUBLIC_API JS::Symbol* JS::GetWellKnownSymbol(JSContext* cx,
return cx->wellKnownSymbols().get(which);
}
JS_PUBLIC_API JS::PropertyKey JS::GetWellKnownSymbolKey(JSContext* cx,
JS::SymbolCode which) {
return PropertyKey::Symbol(cx->wellKnownSymbols().get(which));
}
#ifdef DEBUG
static bool PropertySpecNameIsDigits(JSPropertySpec::Name name) {
if (name.isSymbol()) {

View file

@ -244,7 +244,7 @@ bool js::SetPropertyIgnoringNamedGetter(
bool BaseProxyHandler::getOwnEnumerablePropertyKeys(
JSContext* cx, HandleObject proxy, MutableHandleIdVector props) const {
assertEnteredPolicy(cx, proxy, JSID_VOID, ENUMERATE);
assertEnteredPolicy(cx, proxy, JS::PropertyKey::Void(), ENUMERATE);
MOZ_ASSERT(props.length() == 0);
if (!ownPropertyKeys(cx, proxy, props)) {
@ -285,7 +285,7 @@ bool BaseProxyHandler::getOwnEnumerablePropertyKeys(
bool BaseProxyHandler::enumerate(JSContext* cx, HandleObject proxy,
MutableHandleIdVector props) const {
assertEnteredPolicy(cx, proxy, JSID_VOID, ENUMERATE);
assertEnteredPolicy(cx, proxy, JS::PropertyKey::Void(), ENUMERATE);
// GetPropertyKeys will invoke getOwnEnumerablePropertyKeys along the proto
// chain for us.
@ -339,7 +339,7 @@ bool BaseProxyHandler::nativeCall(JSContext* cx, IsAcceptableThis test,
bool BaseProxyHandler::hasInstance(JSContext* cx, HandleObject proxy,
MutableHandleValue v, bool* bp) const {
assertEnteredPolicy(cx, proxy, JSID_VOID, GET);
assertEnteredPolicy(cx, proxy, JS::PropertyKey::Void(), GET);
cx->check(proxy, v);
return JS::InstanceofOperator(cx, proxy, v, bp);
}
@ -389,7 +389,7 @@ bool BaseProxyHandler::setImmutablePrototype(JSContext* cx, HandleObject proxy,
bool BaseProxyHandler::getElements(JSContext* cx, HandleObject proxy,
uint32_t begin, uint32_t end,
ElementAdder* adder) const {
assertEnteredPolicy(cx, proxy, JSID_VOID, GET);
assertEnteredPolicy(cx, proxy, JS::PropertyKey::Void(), GET);
return js::GetElementsWithAdder(cx, proxy, proxy, begin, end, adder);
}

View file

@ -226,7 +226,7 @@ bool Proxy::ownPropertyKeys(JSContext* cx, HandleObject proxy,
return false;
}
const BaseProxyHandler* handler = proxy->as<ProxyObject>().handler();
AutoEnterPolicy policy(cx, handler, proxy, JSID_VOIDHANDLE,
AutoEnterPolicy policy(cx, handler, proxy, JS::VoidHandlePropertyKey,
BaseProxyHandler::ENUMERATE, true);
if (!policy.allowed()) {
return policy.returnValue();
@ -583,7 +583,7 @@ bool Proxy::getOwnEnumerablePropertyKeys(JSContext* cx, HandleObject proxy,
return false;
}
const BaseProxyHandler* handler = proxy->as<ProxyObject>().handler();
AutoEnterPolicy policy(cx, handler, proxy, JSID_VOIDHANDLE,
AutoEnterPolicy policy(cx, handler, proxy, JS::VoidHandlePropertyKey,
BaseProxyHandler::ENUMERATE, true);
if (!policy.allowed()) {
return policy.returnValue();
@ -621,7 +621,7 @@ bool Proxy::enumerate(JSContext* cx, HandleObject proxy,
return AppendUnique(cx, props, protoProps);
}
AutoEnterPolicy policy(cx, handler, proxy, JSID_VOIDHANDLE,
AutoEnterPolicy policy(cx, handler, proxy, JS::VoidHandlePropertyKey,
BaseProxyHandler::ENUMERATE, true);
// If the policy denies access but wants us to return true, we need
@ -644,7 +644,7 @@ bool Proxy::call(JSContext* cx, HandleObject proxy, const CallArgs& args) {
// Because vp[0] is JS_CALLEE on the way in and JS_RVAL on the way out, we
// can only set our default value once we're sure that we're not calling the
// trap.
AutoEnterPolicy policy(cx, handler, proxy, JSID_VOIDHANDLE,
AutoEnterPolicy policy(cx, handler, proxy, JS::VoidHandlePropertyKey,
BaseProxyHandler::CALL, true);
if (!policy.allowed()) {
args.rval().setUndefined();
@ -664,7 +664,7 @@ bool Proxy::construct(JSContext* cx, HandleObject proxy, const CallArgs& args) {
// Because vp[0] is JS_CALLEE on the way in and JS_RVAL on the way out, we
// can only set our default value once we're sure that we're not calling the
// trap.
AutoEnterPolicy policy(cx, handler, proxy, JSID_VOIDHANDLE,
AutoEnterPolicy policy(cx, handler, proxy, JS::VoidHandlePropertyKey,
BaseProxyHandler::CALL, true);
if (!policy.allowed()) {
args.rval().setUndefined();
@ -695,7 +695,7 @@ bool Proxy::hasInstance(JSContext* cx, HandleObject proxy, MutableHandleValue v,
}
const BaseProxyHandler* handler = proxy->as<ProxyObject>().handler();
*bp = false; // default result if we refuse to perform this action
AutoEnterPolicy policy(cx, handler, proxy, JSID_VOIDHANDLE,
AutoEnterPolicy policy(cx, handler, proxy, JS::VoidHandlePropertyKey,
BaseProxyHandler::GET, true);
if (!policy.allowed()) {
return policy.returnValue();
@ -729,7 +729,7 @@ const char* Proxy::className(JSContext* cx, HandleObject proxy) {
}
const BaseProxyHandler* handler = proxy->as<ProxyObject>().handler();
AutoEnterPolicy policy(cx, handler, proxy, JSID_VOIDHANDLE,
AutoEnterPolicy policy(cx, handler, proxy, JS::VoidHandlePropertyKey,
BaseProxyHandler::GET, /* mayThrow = */ false);
// Do the safe thing if the policy rejects.
if (!policy.allowed()) {
@ -745,7 +745,7 @@ JSString* Proxy::fun_toString(JSContext* cx, HandleObject proxy,
return nullptr;
}
const BaseProxyHandler* handler = proxy->as<ProxyObject>().handler();
AutoEnterPolicy policy(cx, handler, proxy, JSID_VOIDHANDLE,
AutoEnterPolicy policy(cx, handler, proxy, JS::VoidHandlePropertyKey,
BaseProxyHandler::GET, /* mayThrow = */ false);
// Do the safe thing if the policy rejects.
if (!policy.allowed()) {
@ -781,7 +781,7 @@ bool Proxy::getElements(JSContext* cx, HandleObject proxy, uint32_t begin,
return false;
}
const BaseProxyHandler* handler = proxy->as<ProxyObject>().handler();
AutoEnterPolicy policy(cx, handler, proxy, JSID_VOIDHANDLE,
AutoEnterPolicy policy(cx, handler, proxy, JS::VoidHandlePropertyKey,
BaseProxyHandler::GET,
/* mayThrow = */ true);
if (!policy.allowed()) {

View file

@ -65,7 +65,7 @@ bool ForwardingProxyHandler::defineProperty(JSContext* cx, HandleObject proxy,
bool ForwardingProxyHandler::ownPropertyKeys(
JSContext* cx, HandleObject proxy, MutableHandleIdVector props) const {
assertEnteredPolicy(cx, proxy, JSID_VOID, ENUMERATE);
assertEnteredPolicy(cx, proxy, JS::PropertyKey::Void(), ENUMERATE);
RootedObject target(cx, proxy->as<ProxyObject>().target());
return GetPropertyKeys(
cx, target, JSITER_OWNONLY | JSITER_HIDDEN | JSITER_SYMBOLS, props);
@ -81,7 +81,7 @@ bool ForwardingProxyHandler::delete_(JSContext* cx, HandleObject proxy,
bool ForwardingProxyHandler::enumerate(JSContext* cx, HandleObject proxy,
MutableHandleIdVector props) const {
assertEnteredPolicy(cx, proxy, JSID_VOID, ENUMERATE);
assertEnteredPolicy(cx, proxy, JS::PropertyKey::Void(), ENUMERATE);
MOZ_ASSERT(
!hasPrototype()); // Should never be called if there's a prototype.
RootedObject target(cx, proxy->as<ProxyObject>().target());
@ -155,7 +155,7 @@ bool ForwardingProxyHandler::set(JSContext* cx, HandleObject proxy, HandleId id,
bool ForwardingProxyHandler::call(JSContext* cx, HandleObject proxy,
const CallArgs& args) const {
assertEnteredPolicy(cx, proxy, JSID_VOID, CALL);
assertEnteredPolicy(cx, proxy, JS::PropertyKey::Void(), CALL);
RootedValue target(cx, proxy->as<ProxyObject>().private_());
InvokeArgs iargs(cx);
@ -168,7 +168,7 @@ bool ForwardingProxyHandler::call(JSContext* cx, HandleObject proxy,
bool ForwardingProxyHandler::construct(JSContext* cx, HandleObject proxy,
const CallArgs& args) const {
assertEnteredPolicy(cx, proxy, JSID_VOID, CALL);
assertEnteredPolicy(cx, proxy, JS::PropertyKey::Void(), CALL);
RootedValue target(cx, proxy->as<ProxyObject>().private_());
if (!IsConstructor(target)) {
@ -200,7 +200,7 @@ bool ForwardingProxyHandler::hasOwn(JSContext* cx, HandleObject proxy,
bool ForwardingProxyHandler::getOwnEnumerablePropertyKeys(
JSContext* cx, HandleObject proxy, MutableHandleIdVector props) const {
assertEnteredPolicy(cx, proxy, JSID_VOID, ENUMERATE);
assertEnteredPolicy(cx, proxy, JS::PropertyKey::Void(), ENUMERATE);
RootedObject target(cx, proxy->as<ProxyObject>().target());
return GetPropertyKeys(cx, target, JSITER_OWNONLY, props);
}
@ -220,7 +220,7 @@ bool ForwardingProxyHandler::nativeCall(JSContext* cx, IsAcceptableThis test,
bool ForwardingProxyHandler::hasInstance(JSContext* cx, HandleObject proxy,
MutableHandleValue v, bool* bp) const {
assertEnteredPolicy(cx, proxy, JSID_VOID, GET);
assertEnteredPolicy(cx, proxy, JS::PropertyKey::Void(), GET);
RootedObject target(cx, proxy->as<ProxyObject>().target());
return HasInstance(cx, target, v, bp);
}
@ -239,7 +239,7 @@ bool ForwardingProxyHandler::isArray(JSContext* cx, HandleObject proxy,
const char* ForwardingProxyHandler::className(JSContext* cx,
HandleObject proxy) const {
assertEnteredPolicy(cx, proxy, JSID_VOID, GET);
assertEnteredPolicy(cx, proxy, JS::PropertyKey::Void(), GET);
RootedObject target(cx, proxy->as<ProxyObject>().target());
return GetObjectClassName(cx, target);
}
@ -247,7 +247,7 @@ const char* ForwardingProxyHandler::className(JSContext* cx,
JSString* ForwardingProxyHandler::fun_toString(JSContext* cx,
HandleObject proxy,
bool isToSource) const {
assertEnteredPolicy(cx, proxy, JSID_VOID, GET);
assertEnteredPolicy(cx, proxy, JS::PropertyKey::Void(), GET);
RootedObject target(cx, proxy->as<ProxyObject>().target());
return fun_toStringHelper(cx, target, isToSource);
}

View file

@ -624,7 +624,7 @@ bool ArgumentsObject::reifyIterator(JSContext* cx,
return true;
}
RootedId iteratorId(cx, SYMBOL_TO_JSID(cx->wellKnownSymbols().iterator));
RootedId iteratorId(cx, PropertyKey::Symbol(cx->wellKnownSymbols().iterator));
RootedValue val(cx);
if (!ArgumentsObject::getArgumentsIterator(cx, &val)) {
return false;
@ -713,13 +713,13 @@ bool MappedArgumentsObject::obj_enumerate(JSContext* cx, HandleObject obj) {
return false;
}
id = SYMBOL_TO_JSID(cx->wellKnownSymbols().iterator);
id = PropertyKey::Symbol(cx->wellKnownSymbols().iterator);
if (!HasOwnProperty(cx, argsobj, id, &found)) {
return false;
}
for (unsigned i = 0; i < argsobj->initialLength(); i++) {
id = INT_TO_JSID(i);
id = PropertyKey::Int(i);
if (!HasOwnProperty(cx, argsobj, id, &found)) {
return false;
}
@ -1000,13 +1000,13 @@ bool UnmappedArgumentsObject::obj_enumerate(JSContext* cx, HandleObject obj) {
return false;
}
id = SYMBOL_TO_JSID(cx->wellKnownSymbols().iterator);
id = PropertyKey::Symbol(cx->wellKnownSymbols().iterator);
if (!HasOwnProperty(cx, argsobj, id, &found)) {
return false;
}
for (unsigned i = 0; i < argsobj->initialLength(); i++) {
id = INT_TO_JSID(i);
id = PropertyKey::Int(i);
if (!HasOwnProperty(cx, argsobj, id, &found)) {
return false;
}

View file

@ -686,8 +686,7 @@ static bool IsInternalDotName(JSContext* cx, HandleId id) {
static bool CheckUnscopables(JSContext* cx, HandleObject obj, HandleId id,
bool* scopable) {
RootedId unscopablesId(
cx,
SYMBOL_TO_JSID(cx->wellKnownSymbols().get(JS::SymbolCode::unscopables)));
cx, PropertyKey::Symbol(cx->wellKnownSymbols().unscopables));
RootedValue v(cx);
if (!GetProperty(cx, obj, obj, unscopablesId, &v)) {
return false;

View file

@ -52,7 +52,7 @@ bool ForOfIterator::init(HandleValue iterable,
MOZ_ASSERT(index == NOT_ARRAY);
RootedValue callee(cx);
RootedId iteratorId(cx, SYMBOL_TO_JSID(cx->wellKnownSymbols().iterator));
RootedId iteratorId(cx, PropertyKey::Symbol(cx->wellKnownSymbols().iterator));
if (!GetProperty(cx, iterableObj, iterable, iteratorId, &callee)) {
return false;
}

View file

@ -767,8 +767,8 @@ bool js::DefinePropertiesAndFunctions(JSContext* cx, HandleObject obj,
}
bool js::DefineToStringTag(JSContext* cx, HandleObject obj, JSAtom* tag) {
RootedId toStringTagId(cx,
SYMBOL_TO_JSID(cx->wellKnownSymbols().toStringTag));
RootedId toStringTagId(
cx, PropertyKey::Symbol(cx->wellKnownSymbols().toStringTag));
RootedValue tagString(cx, StringValue(tag));
return DefineDataProperty(cx, obj, toStringTagId, tagString, JSPROP_READONLY);
}

View file

@ -14,9 +14,10 @@
using namespace js;
static const jsid voidIdValue = JSID_VOID;
const JS::HandleId JSID_VOIDHANDLE =
JS::HandleId::fromMarkedLocation(&voidIdValue);
static const JS::PropertyKey voidKeyValue = JS::PropertyKey::Void();
const JS::HandleId JS::VoidHandlePropertyKey =
JS::HandleId::fromMarkedLocation(&voidKeyValue);
bool JS::PropertyKey::isPrivateName() const {
return isSymbol() && toSymbol()->isPrivateName();
@ -40,8 +41,8 @@ bool JS::PropertyKey::isWellKnownSymbol(JS::SymbolCode code) const {
if (!atom->isIndex(&index)) {
return true;
}
static_assert(JSID_INT_MIN == 0);
return index > JSID_INT_MAX;
static_assert(PropertyKey::IntMin == 0);
return index > PropertyKey::IntMax;
}
/* static */ bool JS::PropertyKey::isNonIntAtom(JSString* str) {

View file

@ -809,7 +809,7 @@ extern bool JS::InstanceofOperator(JSContext* cx, HandleObject obj,
/* Step 2. */
RootedValue hasInstance(cx);
RootedId id(cx, SYMBOL_TO_JSID(cx->wellKnownSymbols().hasInstance));
RootedId id(cx, PropertyKey::Symbol(cx->wellKnownSymbols().hasInstance));
if (!GetProperty(cx, obj, obj, id, &hasInstance)) {
return false;
}

View file

@ -221,7 +221,7 @@ static bool EnumerateNativeProperties(JSContext* cx, HandleNativeObject pobj,
} else {
// Dense arrays never get so large that i would not fit into an
// integer id.
if (!Enumerate<CheckForDuplicates>(cx, pobj, INT_TO_JSID(i),
if (!Enumerate<CheckForDuplicates>(cx, pobj, PropertyKey::Int(i),
/* enumerable = */ true, flags,
visited, props)) {
return false;
@ -236,15 +236,15 @@ static bool EnumerateNativeProperties(JSContext* cx, HandleNativeObject pobj,
// Fail early if the typed array is enormous, because this will be very
// slow and will likely report OOM. This also means we don't need to
// handle indices greater than JSID_INT_MAX in the loop below.
static_assert(JSID_INT_MAX == INT32_MAX);
// handle indices greater than PropertyKey::IntMax in the loop below.
static_assert(PropertyKey::IntMax == INT32_MAX);
if (len > INT32_MAX) {
ReportOutOfMemory(cx);
return false;
}
for (size_t i = 0; i < len; i++) {
if (!Enumerate<CheckForDuplicates>(cx, pobj, INT_TO_JSID(i),
if (!Enumerate<CheckForDuplicates>(cx, pobj, PropertyKey::Int(i),
/* enumerable = */ true, flags,
visited, props)) {
return false;
@ -468,10 +468,24 @@ struct SortComparatorIds {
return true;
}
size_t ta = JSID_BITS(a.get()) & JSID_TYPE_MASK;
size_t tb = JSID_BITS(b.get()) & JSID_TYPE_MASK;
if (ta != tb) {
*lessOrEqualp = (ta <= tb);
enum class KeyType { Void, Int, String, Symbol };
auto keyType = [](PropertyKey key) {
if (key.isString()) {
return KeyType::String;
}
if (key.isInt()) {
return KeyType::Int;
}
if (key.isSymbol()) {
return KeyType::Symbol;
}
MOZ_ASSERT(key.isVoid());
return KeyType::Void;
};
if (keyType(a) != keyType(b)) {
*lessOrEqualp = (keyType(a) <= keyType(b));
return true;
}

View file

@ -21,14 +21,14 @@
namespace js {
MOZ_ALWAYS_INLINE jsid AtomToId(JSAtom* atom) {
static_assert(JSID_INT_MIN == 0);
static_assert(JS::PropertyKey::IntMin == 0);
uint32_t index;
if (atom->isIndex(&index) && index <= JSID_INT_MAX) {
return INT_TO_JSID(int32_t(index));
if (atom->isIndex(&index) && index <= JS::PropertyKey::IntMax) {
return JS::PropertyKey::Int(int32_t(index));
}
return JS::PropertyKey::fromNonIntAtom(atom);
return JS::PropertyKey::NonIntAtom(atom);
}
// Use the NameToId method instead!
@ -42,11 +42,11 @@ MOZ_ALWAYS_INLINE bool ValueToIntId(const Value& v, jsid* id) {
return false;
}
if (!INT_FITS_IN_JSID(i)) {
if (!PropertyKey::fitsInInt(i)) {
return false;
}
*id = INT_TO_JSID(i);
*id = PropertyKey::Int(i);
return true;
}
@ -64,7 +64,7 @@ inline bool ValueToIdPure(const Value& v, jsid* id) {
}
if (v.isSymbol()) {
*id = SYMBOL_TO_JSID(v.toSymbol());
*id = PropertyKey::Symbol(v.toSymbol());
return true;
}
@ -89,7 +89,7 @@ inline bool PrimitiveValueToId(
}
if (v.isSymbol()) {
idp.set(SYMBOL_TO_JSID(v.toSymbol()));
idp.set(PropertyKey::Symbol(v.toSymbol()));
return true;
}
}
@ -134,8 +134,8 @@ inline mozilla::RangedPtr<T> BackfillIndexInCharBuffer(
bool IndexToIdSlow(JSContext* cx, uint32_t index, MutableHandleId idp);
inline bool IndexToId(JSContext* cx, uint32_t index, MutableHandleId idp) {
if (index <= JSID_INT_MAX) {
idp.set(INT_TO_JSID(index));
if (index <= PropertyKey::IntMax) {
idp.set(PropertyKey::Int(index));
return true;
}

View file

@ -934,7 +934,7 @@ JSAtom* js::AtomizeUTF8Chars(JSContext* cx, const char* utf8Chars,
}
bool js::IndexToIdSlow(JSContext* cx, uint32_t index, MutableHandleId idp) {
MOZ_ASSERT(index > JSID_INT_MAX);
MOZ_ASSERT(index > JS::PropertyKey::IntMax);
char16_t buf[UINT32_CHAR_BUFFER_LENGTH];
RangedPtr<char16_t> end(std::end(buf), buf, std::end(buf));
@ -945,7 +945,7 @@ bool js::IndexToIdSlow(JSContext* cx, uint32_t index, MutableHandleId idp) {
return false;
}
idp.set(JS::PropertyKey::fromNonIntAtom(atom));
idp.set(JS::PropertyKey::NonIntAtom(atom));
return true;
}

View file

@ -291,8 +291,8 @@ static MOZ_ALWAYS_INLINE bool HasNoToPrimitiveMethodPure(JSObject* obj,
#ifdef DEBUG
NativeObject* pobj;
PropertyResult prop;
MOZ_ASSERT(
LookupPropertyPure(cx, obj, SYMBOL_TO_JSID(toPrimitive), &pobj, &prop));
MOZ_ASSERT(LookupPropertyPure(cx, obj, PropertyKey::Symbol(toPrimitive),
&pobj, &prop));
MOZ_ASSERT(prop.isNotFound());
#endif
return true;
@ -300,7 +300,7 @@ static MOZ_ALWAYS_INLINE bool HasNoToPrimitiveMethodPure(JSObject* obj,
NativeObject* pobj;
PropertyResult prop;
if (!LookupPropertyPure(cx, holder, SYMBOL_TO_JSID(toPrimitive), &pobj,
if (!LookupPropertyPure(cx, holder, PropertyKey::Symbol(toPrimitive), &pobj,
&prop)) {
return false;
}

View file

@ -2829,7 +2829,7 @@ JS_PUBLIC_API void js::DumpValue(const Value& val, js::GenericPrinter& out) {
}
JS_PUBLIC_API void js::DumpId(jsid id, js::GenericPrinter& out) {
out.printf("jsid %p = ", (void*)JSID_BITS(id));
out.printf("jsid %p = ", (void*)id.asRawBits());
dumpValue(IdToValue(id), out);
out.putChar('\n');
}
@ -2845,7 +2845,7 @@ static void DumpProperty(const NativeObject* obj, PropMap* map, uint32_t index,
} else if (id.isSymbol()) {
id.toSymbol()->dump(out);
} else {
out.printf("id %p", reinterpret_cast<void*>(JSID_BITS(id)));
out.printf("id %p", reinterpret_cast<void*>(id.asRawBits()));
}
if (prop.isDataProperty()) {
@ -3431,7 +3431,7 @@ void JSObject::traceChildren(JSTracer* trc) {
bool ctorGetSucceeded = GetPropertyPure(
cx, obj, NameToId(cx->names().constructor), ctor.address());
if (ctorGetSucceeded && ctor.isObject() && &ctor.toObject() == defaultCtor) {
jsid speciesId = SYMBOL_TO_JSID(cx->wellKnownSymbols().species);
jsid speciesId = PropertyKey::Symbol(cx->wellKnownSymbols().species);
JSFunction* getter;
if (GetGetterPure(cx, defaultCtor, speciesId, &getter) && getter &&
isDefaultSpecies(cx, getter)) {
@ -3461,7 +3461,7 @@ void JSObject::traceChildren(JSTracer* trc) {
// Step 5.
RootedObject ctorObj(cx, &ctor.toObject());
RootedValue s(cx);
RootedId speciesId(cx, SYMBOL_TO_JSID(cx->wellKnownSymbols().species));
RootedId speciesId(cx, PropertyKey::Symbol(cx->wellKnownSymbols().species));
if (!GetProperty(cx, ctorObj, ctor, speciesId, &s)) {
return nullptr;
}

View file

@ -60,7 +60,7 @@ inline void NativeObject::setDenseElementHole(uint32_t index) {
}
inline void NativeObject::removeDenseElementForSparseIndex(uint32_t index) {
MOZ_ASSERT(containsPure(INT_TO_JSID(index)));
MOZ_ASSERT(containsPure(PropertyKey::Int(index)));
if (containsDenseElement(index)) {
setDenseElementHole(index);
}

View file

@ -1071,7 +1071,7 @@ static MOZ_ALWAYS_INLINE bool CallAddPropertyHookDense(JSContext* cx,
if (MOZ_UNLIKELY(addProperty)) {
MOZ_ASSERT(!cx->isHelperThreadContext());
RootedId id(cx, INT_TO_JSID(index));
RootedId id(cx, PropertyKey::Int(index));
if (!CallJSAddPropertyOp(cx, addProperty, obj, id, value)) {
obj->setDenseElementHole(index);
return false;
@ -1807,8 +1807,8 @@ static bool DefineNonexistentProperty(JSContext* cx, HandleNativeObject obj,
bool js::AddOrUpdateSparseElementHelper(JSContext* cx, HandleArrayObject obj,
int32_t int_id, HandleValue v,
bool strict) {
MOZ_ASSERT(INT_FITS_IN_JSID(int_id));
RootedId id(cx, INT_TO_JSID(int_id));
MOZ_ASSERT(PropertyKey::fitsInInt(int_id));
RootedId id(cx, PropertyKey::Int(int_id));
// This helper doesn't handle the case where the index may be in the dense
// elements
@ -2095,8 +2095,8 @@ bool js::GetSparseElementHelper(JSContext* cx, HandleArrayObject obj,
MOZ_ASSERT_IF(obj->staticPrototype() != nullptr,
!ObjectMayHaveExtraIndexedProperties(obj->staticPrototype()));
MOZ_ASSERT(INT_FITS_IN_JSID(int_id));
RootedId id(cx, INT_TO_JSID(int_id));
MOZ_ASSERT(PropertyKey::fitsInInt(int_id));
RootedId id(cx, PropertyKey::Int(int_id));
uint32_t index;
PropMap* map = obj->shape()->lookup(cx, id, &index);

View file

@ -203,11 +203,11 @@ inline bool GetElementNoGC(JSContext* cx, JSObject* obj,
return false;
}
if (index > JSID_INT_MAX) {
if (index > PropertyKey::IntMax) {
return false;
}
return GetPropertyNoGC(cx, obj, receiver, INT_TO_JSID(index), vp);
return GetPropertyNoGC(cx, obj, receiver, PropertyKey::Int(index), vp);
}
static MOZ_ALWAYS_INLINE bool ClassMayResolveId(const JSAtomState& names,
@ -242,7 +242,7 @@ MOZ_ALWAYS_INLINE bool MaybeHasInterestingSymbolProperty(
JSObject** holder /* = nullptr */) {
MOZ_ASSERT(symbol->isInterestingSymbol());
jsid id = SYMBOL_TO_JSID(symbol);
jsid id = PropertyKey::Symbol(symbol);
do {
if (obj->maybeHasInterestingSymbolProperty() ||
MOZ_UNLIKELY(
@ -267,7 +267,7 @@ MOZ_ALWAYS_INLINE bool GetInterestingSymbolProperty(
if (!MaybeHasInterestingSymbolProperty(cx, obj, sym, &holder)) {
#ifdef DEBUG
JS::Rooted<JS::Value> receiver(cx, JS::ObjectValue(*obj));
JS::Rooted<jsid> id(cx, SYMBOL_TO_JSID(sym));
JS::Rooted<jsid> id(cx, PropertyKey::Symbol(sym));
if (!GetProperty(cx, obj, receiver, id, vp)) {
return false;
}
@ -280,7 +280,7 @@ MOZ_ALWAYS_INLINE bool GetInterestingSymbolProperty(
JS::Rooted<JSObject*> holderRoot(cx, holder);
JS::Rooted<JS::Value> receiver(cx, JS::ObjectValue(*obj));
JS::Rooted<jsid> id(cx, SYMBOL_TO_JSID(sym));
JS::Rooted<jsid> id(cx, PropertyKey::Symbol(sym));
return GetProperty(cx, holderRoot, receiver, id, vp);
}

View file

@ -66,8 +66,8 @@ bool js::ForOfPIC::Chain::initialize(JSContext* cx) {
disabled_ = true;
// Look up Array.prototype[@@iterator], ensure it's a slotful shape.
mozilla::Maybe<PropertyInfo> iterProp =
arrayProto->lookup(cx, SYMBOL_TO_JSID(cx->wellKnownSymbols().iterator));
mozilla::Maybe<PropertyInfo> iterProp = arrayProto->lookup(
cx, PropertyKey::Symbol(cx->wellKnownSymbols().iterator));
if (iterProp.isNothing() || !iterProp->isDataProperty()) {
return true;
}
@ -154,7 +154,7 @@ bool js::ForOfPIC::Chain::tryOptimizeArray(JSContext* cx,
}
// Ensure array doesn't define @@iterator directly.
if (array->lookup(cx, SYMBOL_TO_JSID(cx->wellKnownSymbols().iterator))) {
if (array->lookup(cx, PropertyKey::Symbol(cx->wellKnownSymbols().iterator))) {
return true;
}

View file

@ -114,8 +114,8 @@ void js::PromiseLookup::initialize(JSContext* cx) {
// Check condition 4:
// Look up the '@@species' value on Promise.
mozilla::Maybe<PropertyInfo> speciesProp =
promiseCtor->lookup(cx, SYMBOL_TO_JSID(cx->wellKnownSymbols().species));
mozilla::Maybe<PropertyInfo> speciesProp = promiseCtor->lookup(
cx, PropertyKey::Symbol(cx->wellKnownSymbols().species));
if (speciesProp.isNothing() || !promiseCtor->hasGetter(*speciesProp)) {
return;
}

View file

@ -919,7 +919,7 @@ class DictionaryPropMap final : public PropMap {
clearHeaderFlagBits(HasPrevFlag);
}
void clearProperty(uint32_t index) { keys_[index] = JSID_VOID; }
void clearProperty(uint32_t index) { keys_[index] = PropertyKey::Void(); }
static void skipTrailingHoles(MutableHandle<DictionaryPropMap*> map,
uint32_t* mapLength);

View file

@ -27,7 +27,7 @@ static MOZ_ALWAYS_INLINE HashNumber HashPropertyKey(PropertyKey key) {
if (key.isSymbol()) {
return key.toSymbol()->hash();
}
return mozilla::HashGeneric(key.asBits);
return mozilla::HashGeneric(key.asRawBits());
}
// Like HashPropertyKey but optimized for callers that only use atom or symbol

View file

@ -1102,7 +1102,7 @@ static MOZ_ALWAYS_INLINE void MakeRangeGCSafe(Value* beg, Value* end) {
}
static MOZ_ALWAYS_INLINE void MakeRangeGCSafe(jsid* beg, jsid* end) {
std::fill(beg, end, INT_TO_JSID(0));
std::fill(beg, end, PropertyKey::Int(0));
}
static MOZ_ALWAYS_INLINE void MakeRangeGCSafe(jsid* vec, size_t len) {

View file

@ -1334,7 +1334,7 @@ static_assert(sizeof(PropertyName) == sizeof(JSString),
"string subclasses must be binary-compatible with JSString");
static MOZ_ALWAYS_INLINE jsid NameToId(PropertyName* name) {
return JS::PropertyKey::fromNonIntAtom(name);
return JS::PropertyKey::NonIntAtom(name);
}
using PropertyNameVector = JS::GCVector<PropertyName*>;

View file

@ -1469,7 +1469,7 @@ static bool TryAppendNativeProperties(JSContext* cx, HandleObject obj,
continue;
}
if (!entries.append(INT_TO_JSID(i - 1))) {
if (!entries.append(PropertyKey::Int(i - 1))) {
return false;
}

View file

@ -1156,7 +1156,7 @@ static JSObject* GetBufferSpeciesConstructor(
if (GetOwnPropertyPure(cx, proto, NameToId(cx->names().constructor), &ctor,
&found) &&
ctor.isObject() && &ctor.toObject() == defaultCtor) {
jsid speciesId = SYMBOL_TO_JSID(cx->wellKnownSymbols().species);
jsid speciesId = PropertyKey::Symbol(cx->wellKnownSymbols().species);
JSFunction* getter;
if (GetOwnGetterPure(cx, defaultCtor, speciesId, &getter) && getter &&
IsArrayBufferSpecies(cx, getter)) {
@ -1371,7 +1371,7 @@ template <typename T>
// Step 5.
RootedValue callee(cx);
RootedId iteratorId(cx, SYMBOL_TO_JSID(cx->wellKnownSymbols().iterator));
RootedId iteratorId(cx, PropertyKey::Symbol(cx->wellKnownSymbols().iterator));
if (!GetProperty(cx, other, other, iteratorId, &callee)) {
return nullptr;
}

View file

@ -598,13 +598,12 @@ bool TypedObject::obj_newEnumerate(JSContext* cx, HandleObject obj,
}
RootedId id(cx);
for (size_t index = 0; index < indexCount; index++) {
id = INT_TO_JSID(index);
id = PropertyKey::Int(index);
properties.infallibleAppend(id);
}
if (typeDef.kind() == wasm::TypeDefKind::Array) {
properties.infallibleAppend(
JS::PropertyKey::fromNonIntAtom(cx->runtime()->commonNames->length));
properties.infallibleAppend(NameToId(cx->runtime()->commonNames->length));
}
return true;

View file

@ -87,7 +87,7 @@ class MOZ_STACK_CLASS StackScopedCloneData : public StructuredCloneHolderBase {
}
FunctionForwarderOptions forwarderOptions;
if (!xpc::NewFunctionForwarder(aCx, JSID_VOIDHANDLE, obj,
if (!xpc::NewFunctionForwarder(aCx, JS::VoidHandlePropertyKey, obj,
forwarderOptions, &functionValue)) {
return nullptr;
}
@ -398,7 +398,7 @@ bool NewFunctionForwarder(JSContext* cx, HandleId idArg, HandleObject callable,
FunctionForwarderOptions& options,
MutableHandleValue vp) {
RootedId id(cx, idArg);
if (id == JSID_VOIDHANDLE) {
if (id.isVoid()) {
id = GetJSIDByIndex(cx, XPCJSContext::IDX_EMPTYSTRING);
}

View file

@ -1101,7 +1101,7 @@ XPCJSContext::~XPCJSContext() {
XPCJSContext::XPCJSContext()
: mCallContext(nullptr),
mAutoRoots(nullptr),
mResolveName(JSID_VOID),
mResolveName(JS::PropertyKey::Void()),
mResolvingWrapper(nullptr),
mWatchdogManager(GetWatchdogManager()),
mSlowScriptSecondHalf(false),

View file

@ -116,8 +116,8 @@ static JSObject* GetIDPrototype(JSContext* aCx, const JSClass* aClass) {
JS_NewObjectWithGivenProto(aCx, nullptr, idProto));
RootedObject cidProto(aCx,
JS_NewObjectWithGivenProto(aCx, nullptr, idProto));
RootedId hasInstance(
aCx, SYMBOL_TO_JSID(GetWellKnownSymbol(aCx, SymbolCode::hasInstance)));
RootedId hasInstance(aCx,
GetWellKnownSymbolKey(aCx, SymbolCode::hasInstance));
const uint32_t kFlags =
JSPROP_READONLY | JSPROP_ENUMERATE | JSPROP_PERMANENT;

View file

@ -2977,7 +2977,7 @@ void XPCJSRuntime::Initialize(JSContext* cx) {
mLoaderGlobal.init(cx, nullptr);
// these jsids filled in later when we have a JSContext to work with.
mStrIDs[0] = JSID_VOID;
mStrIDs[0] = JS::PropertyKey::Void();
nsScriptSecurityManager::GetScriptSecurityManager()->InitJSCallbacks(cx);
@ -3081,7 +3081,7 @@ bool XPCJSRuntime::InitializeStrings(JSContext* cx) {
for (unsigned i = 0; i < XPCJSContext::IDX_TOTAL_COUNT; i++) {
str = JS_AtomizeAndPinString(cx, mStrings[i]);
if (!str) {
mStrIDs[0] = JSID_VOID;
mStrIDs[0] = JS::PropertyKey::Void();
return false;
}
mStrIDs[i] = PropertyKey::fromPinnedString(str);

View file

@ -286,7 +286,7 @@ already_AddRefed<XPCNativeInterface> XPCNativeInterface::NewInstance(
NS_ERROR("bad constant name");
return nullptr;
}
jsid name = PropertyKey::fromNonIntAtom(str);
jsid name = PropertyKey::NonIntAtom(str);
// XXX need better way to find dups
// MOZ_ASSERT(!LookupMemberByID(name),"duplicate method/constant name");
@ -311,7 +311,7 @@ already_AddRefed<XPCNativeInterface> XPCNativeInterface::NewInstance(
return nullptr;
}
RootedId interfaceName(cx, PropertyKey::fromNonIntAtom(str));
RootedId interfaceName(cx, PropertyKey::NonIntAtom(str));
// Use placement new to create an object with the right amount of space
// to hold the members array

View file

@ -723,7 +723,7 @@ bool XPC_WN_Helper_Call(JSContext* cx, unsigned argc, Value* vp) {
// N.B. we want obj to be the callee, not JS_THIS(cx, vp)
RootedObject obj(cx, &args.callee());
XPCCallContext ccx(cx, obj, nullptr, JSID_VOIDHANDLE, args.length(),
XPCCallContext ccx(cx, obj, nullptr, JS::VoidHandlePropertyKey, args.length(),
args.array(), args.rval().address());
if (!ccx.IsValid()) {
return false;
@ -741,7 +741,7 @@ bool XPC_WN_Helper_Construct(JSContext* cx, unsigned argc, Value* vp) {
return false;
}
XPCCallContext ccx(cx, obj, nullptr, JSID_VOIDHANDLE, args.length(),
XPCCallContext ccx(cx, obj, nullptr, JS::VoidHandlePropertyKey, args.length(),
args.array(), args.rval().address());
if (!ccx.IsValid()) {
return false;
@ -908,7 +908,7 @@ bool XPC_WN_CallMethod(JSContext* cx, unsigned argc, Value* vp) {
}
obj = FixUpThisIfBroken(obj, funobj);
XPCCallContext ccx(cx, obj, funobj, JSID_VOIDHANDLE, args.length(),
XPCCallContext ccx(cx, obj, funobj, JS::VoidHandlePropertyKey, args.length(),
args.array(), vp);
XPCWrappedNative* wrapper = ccx.GetWrapper();
THROW_AND_RETURN_IF_BAD_WRAPPER(cx, wrapper);
@ -937,7 +937,7 @@ bool XPC_WN_GetterSetter(JSContext* cx, unsigned argc, Value* vp) {
RootedObject obj(cx, &args.thisv().toObject());
obj = FixUpThisIfBroken(obj, funobj);
XPCCallContext ccx(cx, obj, funobj, JSID_VOIDHANDLE, args.length(),
XPCCallContext ccx(cx, obj, funobj, JS::VoidHandlePropertyKey, args.length(),
args.array(), vp);
XPCWrappedNative* wrapper = ccx.GetWrapper();
THROW_AND_RETURN_IF_BAD_WRAPPER(cx, wrapper);

View file

@ -678,7 +678,7 @@ class MOZ_STACK_CLASS XPCCallContext final {
explicit XPCCallContext(JSContext* cx, JS::HandleObject obj = nullptr,
JS::HandleObject funobj = nullptr,
JS::HandleId id = JSID_VOIDHANDLE,
JS::HandleId id = JS::VoidHandlePropertyKey,
unsigned argc = NO_ARGS, JS::Value* argv = nullptr,
JS::Value* rval = nullptr);
@ -2361,7 +2361,7 @@ class MOZ_STACK_CLASS CreateObjectInOptions : public OptionsBase {
public:
explicit CreateObjectInOptions(JSContext* cx = xpc_GetSafeJSContext(),
JSObject* options = nullptr)
: OptionsBase(cx, options), defineAs(cx, JSID_VOID) {}
: OptionsBase(cx, options), defineAs(cx, JS::PropertyKey::Void()) {}
virtual bool Parse() override { return ParseId("defineAs", &defineAs); }
@ -2373,7 +2373,7 @@ class MOZ_STACK_CLASS ExportFunctionOptions : public OptionsBase {
explicit ExportFunctionOptions(JSContext* cx = xpc_GetSafeJSContext(),
JSObject* options = nullptr)
: OptionsBase(cx, options),
defineAs(cx, JSID_VOID),
defineAs(cx, JS::PropertyKey::Void()),
allowCrossOriginArguments(false) {}
virtual bool Parse() override {

View file

@ -68,7 +68,7 @@ bool AppendCrossOriginWhitelistedPropNames(JSContext* cx,
}
for (auto code : sCrossOriginWhitelistedSymbolCodes) {
props.infallibleAppend(SYMBOL_TO_JSID(JS::GetWellKnownSymbol(cx, code)));
props.infallibleAppend(JS::GetWellKnownSymbolKey(cx, code));
}
return true;

View file

@ -935,8 +935,8 @@ bool JSXrayTraits::enumerateNames(JSContext* cx, HandleObject wrapper,
// Fail early if the typed array is enormous, because this will be very
// slow and will likely report OOM. This also means we don't need to
// handle indices greater than JSID_INT_MAX in the loop below.
static_assert(JSID_INT_MAX >= INT32_MAX);
// handle indices greater than PropertyKey::IntMax in the loop below.
static_assert(PropertyKey::IntMax >= INT32_MAX);
if (length > INT32_MAX) {
JS_ReportOutOfMemory(cx);
return false;
@ -946,7 +946,7 @@ bool JSXrayTraits::enumerateNames(JSContext* cx, HandleObject wrapper,
return false;
}
for (int32_t i = 0; i < int32_t(length); ++i) {
props.infallibleAppend(INT_TO_JSID(i));
props.infallibleAppend(PropertyKey::Int(i));
}
} else if (key == JSProto_Function) {
if (!props.append(GetJSIDByIndex(cx, XPCJSContext::IDX_LENGTH))) {
@ -2004,7 +2004,8 @@ bool XrayWrapper<Base, Traits>::defineProperty(JSContext* cx,
template <typename Base, typename Traits>
bool XrayWrapper<Base, Traits>::ownPropertyKeys(
JSContext* cx, HandleObject wrapper, MutableHandleIdVector props) const {
assertEnteredPolicy(cx, wrapper, JSID_VOID, BaseProxyHandler::ENUMERATE);
assertEnteredPolicy(cx, wrapper, JS::PropertyKey::Void(),
BaseProxyHandler::ENUMERATE);
return getPropertyKeys(
cx, wrapper, JSITER_OWNONLY | JSITER_HIDDEN | JSITER_SYMBOLS, props);
}
@ -2112,7 +2113,8 @@ bool XrayWrapper<Base, Traits>::enumerate(
template <typename Base, typename Traits>
bool XrayWrapper<Base, Traits>::call(JSContext* cx, HandleObject wrapper,
const JS::CallArgs& args) const {
assertEnteredPolicy(cx, wrapper, JSID_VOID, BaseProxyHandler::CALL);
assertEnteredPolicy(cx, wrapper, JS::PropertyKey::Void(),
BaseProxyHandler::CALL);
// Hard cast the singleton since SecurityWrapper doesn't have one.
return Traits::call(cx, wrapper, args, Base::singleton);
}
@ -2120,7 +2122,8 @@ bool XrayWrapper<Base, Traits>::call(JSContext* cx, HandleObject wrapper,
template <typename Base, typename Traits>
bool XrayWrapper<Base, Traits>::construct(JSContext* cx, HandleObject wrapper,
const JS::CallArgs& args) const {
assertEnteredPolicy(cx, wrapper, JSID_VOID, BaseProxyHandler::CALL);
assertEnteredPolicy(cx, wrapper, JS::PropertyKey::Void(),
BaseProxyHandler::CALL);
// Hard cast the singleton since SecurityWrapper doesn't have one.
return Traits::construct(cx, wrapper, args, Base::singleton);
}
@ -2137,7 +2140,8 @@ bool XrayWrapper<Base, Traits>::hasInstance(JSContext* cx,
JS::HandleObject wrapper,
JS::MutableHandleValue v,
bool* bp) const {
assertEnteredPolicy(cx, wrapper, JSID_VOID, BaseProxyHandler::GET);
assertEnteredPolicy(cx, wrapper, JS::PropertyKey::Void(),
BaseProxyHandler::GET);
// CrossCompartmentWrapper::hasInstance unwraps |wrapper|'s Xrays and enters
// its compartment. Any present XrayWrappers should be preserved, so the
@ -2265,7 +2269,8 @@ template <typename Base, typename Traits>
bool XrayWrapper<Base, Traits>::getPropertyKeys(
JSContext* cx, HandleObject wrapper, unsigned flags,
MutableHandleIdVector props) const {
assertEnteredPolicy(cx, wrapper, JSID_VOID, BaseProxyHandler::ENUMERATE);
assertEnteredPolicy(cx, wrapper, JS::PropertyKey::Void(),
BaseProxyHandler::ENUMERATE);
// Enumerate expando properties first. Note that the expando object lives
// in the target compartment.

View file

@ -5538,107 +5538,12 @@ void PresShell::SynthesizeMouseMove(bool aFromScroll) {
}
}
/**
* Find the first floating view with a frame and a widget in a postorder
* traversal of the view tree that contains the point. Thus more deeply nested
* floating views are preferred over their ancestors, and floating views earlier
* in the view hierarchy (i.e., added later) are preferred over their siblings.
* This is adequate for finding the "topmost" floating view under a point, given
* that floating views don't supporting having a specific z-index.
*
* We cannot exit early when aPt is outside the view bounds, because floating
* views aren't necessarily included in their parent's bounds, so this could
* traverse the entire view hierarchy --- use carefully.
*
* aPt is relative aRelativeToView with the viewport type
* aRelativeToViewportType. aRelativeToView will always have a frame. If aView
* has a frame then aRelativeToView will be aView. (The reason aRelativeToView
* and aView are separate is because we need to traverse into views without
* frames (ie the inner view of a subdocument frame) but we can only easily
* transform between views using TransformPoint which takes frames.)
*/
static nsView* FindFloatingViewContaining(nsView* aRelativeToView,
ViewportType aRelativeToViewportType,
nsView* aView, nsPoint aPt) {
MOZ_ASSERT(aRelativeToView->GetFrame());
if (aView->GetVisibility() == nsViewVisibility_kHide) {
// No need to look into descendants.
return nullptr;
}
bool crossingZoomBoundary = false;
nsIFrame* frame = aView->GetFrame();
if (frame) {
if (!frame->IsVisibleConsideringAncestors(
nsIFrame::VISIBILITY_CROSS_CHROME_CONTENT_BOUNDARY) ||
!frame->PresShell()->IsActive()) {
return nullptr;
}
// We start out in visual coords and then if we cross the zoom boundary we
// become in layout coords. The zoom boundary always occurs in a document
// with IsRootContentDocumentCrossProcess. The root view of such a document
// is outside the zoom boundary and any child view must be inside the zoom
// boundary because we only create views for certain kinds of frames and
// none of them can be between the root frame and the zoom boundary.
if (aRelativeToViewportType == ViewportType::Visual) {
if (!aRelativeToView->GetParent() ||
aRelativeToView->GetViewManager() !=
aRelativeToView->GetParent()->GetViewManager()) {
if (aRelativeToView->GetFrame()
->PresContext()
->IsRootContentDocumentCrossProcess()) {
crossingZoomBoundary = true;
}
}
}
ViewportType nextRelativeToViewportType = aRelativeToViewportType;
if (crossingZoomBoundary) {
nextRelativeToViewportType = ViewportType::Layout;
}
nsLayoutUtils::TransformResult result = nsLayoutUtils::TransformPoint(
RelativeTo{aRelativeToView->GetFrame(), aRelativeToViewportType},
RelativeTo{frame, nextRelativeToViewportType}, aPt);
if (result != nsLayoutUtils::TRANSFORM_SUCCEEDED) {
return nullptr;
}
aRelativeToView = aView;
aRelativeToViewportType = nextRelativeToViewportType;
}
for (nsView* v = aView->GetFirstChild(); v; v = v->GetNextSibling()) {
nsView* r = FindFloatingViewContaining(aRelativeToView,
aRelativeToViewportType, v, aPt);
if (r) return r;
}
if (!frame || !aView->GetFloating() || !aView->HasWidget()) {
return nullptr;
}
// Even though aPt is in visual coordinates until we cross the zoom boundary
// it is valid to compare it to view coords (which are in layout coords)
// because visual coords are the same as layout coords for every view outside
// of the zoom boundary except for the root view of the root content document.
// For the root view of the root content document, its bounds don't actually
// correspond to what is visible when we have a MobileViewportManager. So we
// skip the hit test. This is okay because the point has already been hit
// test: 1) if we are the root view in the process then the point comes from a
// real mouse event so it must have been over our widget, or 2) if we are the
// root of a subdocument then hittesting against the view of the subdocument
// frame that contains us already happened and succeeded before getting here.
if (!crossingZoomBoundary) {
if (aView->GetDimensions().Contains(aPt)) {
return aView;
}
}
return nullptr;
static nsView* FindFloatingViewContaining(nsPresContext* aRootPresContext,
nsIWidget* aRootWidget,
const LayoutDeviceIntPoint& aPt) {
nsIFrame* popupFrame =
nsLayoutUtils::GetPopupFrameForPoint(aRootPresContext, aRootWidget, aPt);
return popupFrame ? popupFrame->GetView() : nullptr;
}
/*
@ -5804,12 +5709,13 @@ void PresShell::ProcessSynthMouseMoveEvent(bool aFromScroll) {
// the mouse is over. pointVM is the VM of that pres shell.
nsViewManager* pointVM = nullptr;
// This could be a bit slow (traverses entire view hierarchy)
// but it's OK to do it once per synthetic mouse event
if (rootView->GetFrame()) {
view = FindFloatingViewContaining(rootView, ViewportType::Visual, rootView,
mMouseLocation);
view = FindFloatingViewContaining(
mPresContext, rootView->GetWidget(),
LayoutDeviceIntPoint::FromAppUnitsToNearest(
mMouseLocation + rootView->ViewToWidgetOffset(), APD));
}
nsView* pointView = view;
if (!view) {
view = rootView;

View file

@ -3402,7 +3402,7 @@
- name: dom.serviceWorkers.navigationPreload.enabled
type: RelaxedAtomicBool
value: @IS_NIGHTLY_BUILD@
value: true
mirror: always
# Mitigates ServiceWorker navigation faults by bypassing the ServiceWorker on
@ -12679,6 +12679,12 @@
value: false
mirror: always
# Whether we use overlay scrollbars on GTK.
- name: widget.gtk.overlay-scrollbars.enabled
type: RelaxedAtomicBool
value: @EARLY_BETA_OR_EARLIER@
mirror: always
# Whether we honor the scrollbar colors from the gtk theme.
- name: widget.gtk.theme-scrollbar-colors.enabled
type: bool

View file

@ -183,6 +183,16 @@ void PrintToConsole(const char* aFmt, ...) {
va_end(args);
}
ProfileChunkedBuffer& profiler_get_core_buffer() {
// This needs its own mutex, because it is used concurrently from functions
// guarded by gPSMutex as well as others without safety (e.g.,
// profiler_add_marker). It is *not* used inside the critical section of the
// sampler, because mutexes cannot be used there.
static ProfileChunkedBuffer sProfileChunkedBuffer{
ProfileChunkedBuffer::ThreadSafety::WithMutex};
return sProfileChunkedBuffer;
}
Atomic<int, MemoryOrdering::Relaxed> gSkipSampling;
constexpr static bool ValidateFeatures() {
@ -316,12 +326,7 @@ typedef const PSAutoLock& PSLockRef;
class CorePS {
private:
CorePS()
: mProcessStartTime(TimeStamp::ProcessCreation()),
// This needs its own mutex, because it is used concurrently from
// functions guarded by gPSMutex as well as others without safety (e.g.,
// profiler_add_marker). It is *not* used inside the critical section of
// the sampler, because mutexes cannot be used there.
mCoreBuffer(ProfileChunkedBuffer::ThreadSafety::WithMutex)
: mProcessStartTime(TimeStamp::ProcessCreation())
#ifdef USE_LUL_STACKWALK
,
mLul(nullptr)
@ -380,9 +385,6 @@ class CorePS {
// No PSLockRef is needed for this field because it's immutable.
PS_GET_LOCKLESS(const TimeStamp&, ProcessStartTime)
// No PSLockRef is needed for this field because it's thread-safe.
PS_GET_LOCKLESS(ProfileChunkedBuffer&, CoreBuffer)
PS_GET(const Vector<UniquePtr<RegisteredThread>>&, RegisteredThreads)
static void AppendRegisteredThread(
@ -488,17 +490,6 @@ class CorePS {
// The time that the process started.
const TimeStamp mProcessStartTime;
// The thread-safe blocks-oriented buffer into which all profiling data is
// recorded.
// ActivePS controls the lifetime of the underlying contents buffer: When
// ActivePS does not exist, mCoreBuffer is empty and rejects all reads&writes;
// see ActivePS for further details.
// Note: This needs to live here outside of ActivePS, because some producers
// are indirectly controlled (e.g., by atomic flags) and therefore may still
// attempt to write some data shortly after ActivePS has shutdown and deleted
// the underlying buffer in memory.
ProfileChunkedBuffer mCoreBuffer;
// Info on all the registered threads.
// ThreadIds in mRegisteredThreads are unique.
Vector<UniquePtr<RegisteredThread>> mRegisteredThreads;
@ -524,11 +515,6 @@ class CorePS {
CorePS* CorePS::sInstance = nullptr;
ProfileChunkedBuffer& profiler_get_core_buffer() {
MOZ_ASSERT(CorePS::Exists());
return CorePS::CoreBuffer();
}
class SamplerThread;
static SamplerThread* NewSamplerThread(PSLockRef aLock, uint32_t aGeneration,
@ -626,11 +612,14 @@ class ActivePS {
mInterval(aInterval),
mFeatures(AdjustFeatures(aFeatures, aFilterCount)),
mProfileBufferChunkManager(
size_t(ClampToAllowedEntries(aCapacity.Value())) * scBytesPerEntry,
ChunkSizeForEntries(aCapacity.Value())),
MakeUnique<ProfileBufferChunkManagerWithLocalLimit>(
size_t(ClampToAllowedEntries(aCapacity.Value())) *
scBytesPerEntry,
ChunkSizeForEntries(aCapacity.Value()))),
mProfileBuffer([this]() -> ProfileChunkedBuffer& {
CorePS::CoreBuffer().SetChunkManager(mProfileBufferChunkManager);
return CorePS::CoreBuffer();
ProfileChunkedBuffer& buffer = profiler_get_core_buffer();
buffer.SetChunkManager(*mProfileBufferChunkManager);
return buffer;
}()),
// The new sampler thread doesn't start sampling immediately because the
// main loop within Run() is blocked until this function's caller
@ -650,7 +639,12 @@ class ActivePS {
}
}
~ActivePS() { CorePS::CoreBuffer().ResetChunkManager(); }
~ActivePS() {
if (mProfileBufferChunkManager) {
// We still control the chunk manager, remove it from the core buffer.
profiler_get_core_buffer().ResetChunkManager();
}
}
bool ThreadSelected(const char* aThreadName) {
if (mFiltersLowered.empty()) {
@ -736,6 +730,12 @@ class ActivePS {
return n;
}
static UniquePtr<ProfileBufferChunkManagerWithLocalLimit>
ExtractBaseProfilerChunkManager(PSLockRef) {
MOZ_ASSERT(sInstance);
return std::move(sInstance->mProfileBufferChunkManager);
}
static bool ShouldProfileThread(PSLockRef aLock, ThreadInfo* aInfo) {
MOZ_ASSERT(sInstance);
return sInstance->ThreadSelected(aInfo->Name());
@ -766,7 +766,9 @@ class ActivePS {
static void FulfillChunkRequests(PSLockRef) {
MOZ_ASSERT(sInstance);
sInstance->mProfileBufferChunkManager.FulfillChunkRequests();
if (sInstance->mProfileBufferChunkManager) {
sInstance->mProfileBufferChunkManager->FulfillChunkRequests();
}
}
static ProfileBuffer& Buffer(PSLockRef) {
@ -1016,7 +1018,8 @@ class ActivePS {
Vector<std::string> mFiltersLowered;
// The chunk manager used by `mProfileBuffer` below.
ProfileBufferChunkManagerWithLocalLimit mProfileBufferChunkManager;
// May become null if it gets transferred to the Gecko Profiler.
UniquePtr<ProfileBufferChunkManagerWithLocalLimit> mProfileBufferChunkManager;
// The buffer into which all samples are recorded.
ProfileBuffer mProfileBuffer;
@ -1060,6 +1063,19 @@ uint32_t ActivePS::sNextGeneration = 0;
#undef PS_GET_LOCKLESS
#undef PS_GET_AND_SET
namespace detail {
[[nodiscard]] MFBT_API UniquePtr<ProfileBufferChunkManagerWithLocalLimit>
ExtractBaseProfilerChunkManager() {
PSAutoLock lock;
if (MOZ_UNLIKELY(!ActivePS::Exists(lock))) {
return nullptr;
}
return ActivePS::ExtractBaseProfilerChunkManager(lock);
}
} // namespace detail
Atomic<uint32_t, MemoryOrdering::Relaxed> RacyFeatures::sActiveAndFeatures(0);
/* static */
@ -1175,6 +1191,26 @@ ProfilingStack* AutoProfilerLabel::GetProfilingStack() {
// constraints. TLSRegisteredThread is responsible for updating it.
MOZ_THREAD_LOCAL(ProfilingStack*) AutoProfilerLabel::sProfilingStack;
namespace detail {
[[nodiscard]] MFBT_API TimeStamp GetThreadRegistrationTime() {
if (!CorePS::Exists()) {
return {};
}
PSAutoLock lock;
RegisteredThread* registeredThread =
TLSRegisteredThread::RegisteredThread(lock);
if (!registeredThread) {
return {};
}
return registeredThread->Info()->RegisterTime();
}
} // namespace detail
// The name of the main thread.
static const char* const kMainThreadName = "GeckoMain";
@ -2338,11 +2374,11 @@ void SamplerThread::Run() {
LOG("Stack sample too big for local storage, needed %u bytes",
unsigned(state.mRangeEnd - previousState.mRangeEnd));
} else if (state.mRangeEnd - previousState.mRangeEnd >=
*CorePS::CoreBuffer().BufferLength()) {
*profiler_get_core_buffer().BufferLength()) {
LOG("Stack sample too big for profiler storage, needed %u bytes",
unsigned(state.mRangeEnd - previousState.mRangeEnd));
} else {
CorePS::CoreBuffer().AppendContents(localBuffer);
profiler_get_core_buffer().AppendContents(localBuffer);
}
// Clean up for the next run.
@ -3651,7 +3687,7 @@ bool profiler_is_locked_on_current_thread() {
// - The buffer mutex, used directly in some functions without locking the
// main mutex, e.g., marker-related functions.
return PSAutoLock::IsLockedOnCurrentThread() ||
CorePS::CoreBuffer().IsThreadSafeAndLockedOnCurrentThread();
profiler_get_core_buffer().IsThreadSafeAndLockedOnCurrentThread();
}
// This is a simplified version of profiler_add_marker that can be easily passed

View file

@ -104,6 +104,7 @@ EXPORTS.mozilla += [
"public/ProfileBufferEntrySerialization.h",
"public/ProfileBufferIndex.h",
"public/ProfileChunkedBuffer.h",
"public/ProfileChunkedBufferDetail.h",
"public/ProgressLogger.h",
"public/ProportionValue.h",
]

View file

@ -15,9 +15,26 @@
#include "mozilla/BaseProfilerUtils.h"
#include "mozilla/Span.h"
#include "mozilla/TimeStamp.h"
#include "mozilla/Types.h"
#include "mozilla/UniquePtr.h"
namespace mozilla::profiler::detail {
namespace mozilla {
class ProfileBufferChunkManagerWithLocalLimit;
namespace baseprofiler::detail {
[[nodiscard]] MFBT_API UniquePtr<ProfileBufferChunkManagerWithLocalLimit>
ExtractBaseProfilerChunkManager();
// If the current thread is registered, returns its registration time, otherwise
// a null timestamp.
[[nodiscard]] MFBT_API TimeStamp GetThreadRegistrationTime();
} // namespace baseprofiler::detail
namespace profiler::detail {
// True if the filter is exactly "pid:<aPid>".
[[nodiscard]] MFBT_API bool FilterHasPid(
@ -38,6 +55,8 @@ namespace mozilla::profiler::detail {
baseprofiler::BaseProfilerProcessId aPid =
baseprofiler::profiler_current_process_id());
} // namespace mozilla::profiler::detail
} // namespace profiler::detail
} // namespace mozilla
#endif // BaseAndGeckoProfilerDetail_h

View file

@ -89,17 +89,17 @@ ProfileBufferBlockIndex AddMarker(
#ifndef MOZ_GECKO_PROFILER
return {};
#else
if ((aOptions.ThreadId().IsUnspecified() ||
aOptions.ThreadId().ThreadId() == profiler_current_thread_id())
? !baseprofiler::profiler_thread_is_being_profiled()
// If targetting another thread, we can only check if the profiler
// is active&unpaused.
: !baseprofiler::detail::RacyFeatures::IsActiveAndUnpaused()) {
// Record base markers whenever the core buffer is in session.
// TODO: When profiler_thread_is_being_profiled becomes available from
// mozglue, use it instead.
ProfileChunkedBuffer& coreBuffer =
::mozilla::baseprofiler::profiler_get_core_buffer();
if (!coreBuffer.IsInSession()) {
return {};
}
return ::mozilla::baseprofiler::AddMarkerToBuffer(
base_profiler_markers_detail::CachedBaseCoreBuffer(), aName, aCategory,
std::move(aOptions), aMarkerType, aPayloadArguments...);
coreBuffer, aName, aCategory, std::move(aOptions), aMarkerType,
aPayloadArguments...);
#endif
}

View file

@ -31,14 +31,6 @@ MFBT_API ProfileChunkedBuffer& profiler_get_core_buffer();
namespace mozilla::base_profiler_markers_detail {
// Get the core buffer from the profiler, and cache it in a
// non-templated-function static reference.
inline ProfileChunkedBuffer& CachedBaseCoreBuffer() {
static ProfileChunkedBuffer& coreBuffer =
baseprofiler::profiler_get_core_buffer();
return coreBuffer;
}
struct Streaming {
// A `MarkerDataDeserializer` is a free function that can read a serialized
// payload from an `EntryReader` and streams it as JSON object properties.

View file

@ -7,406 +7,25 @@
#ifndef ProfileChunkedBuffer_h
#define ProfileChunkedBuffer_h
#include "mozilla/Attributes.h"
#include "mozilla/BaseProfilerDetail.h"
#include "mozilla/NotNull.h"
#include "mozilla/ProfileBufferChunkManager.h"
#include "mozilla/ProfileBufferChunkManagerSingle.h"
#include "mozilla/ProfileBufferEntrySerialization.h"
#include "mozilla/RefCounted.h"
#include "mozilla/ProfileChunkedBufferDetail.h"
#include "mozilla/RefPtr.h"
#include "mozilla/ScopeExit.h"
#include "mozilla/Unused.h"
#include <cstdio>
#include <utility>
#ifdef DEBUG
# include <cstdio>
#endif
namespace mozilla {
namespace detail {
// Internal accessor pointing at a position inside a chunk.
// It can handle two groups of chunks (typically the extant chunks stored in
// the store manager, and the current chunk).
// The main operations are:
// - ReadEntrySize() to read an entry size, 0 means failure.
// - operator+=(Length) to skip a number of bytes.
// - EntryReader() creates an entry reader at the current position for a given
// size (it may fail with an empty reader), and skips the entry.
// Note that there is no "past-the-end" position -- as soon as InChunkPointer
// reaches the end, it becomes effectively null.
class InChunkPointer {
public:
using Byte = ProfileBufferChunk::Byte;
using Length = ProfileBufferChunk::Length;
// Nullptr-like InChunkPointer, may be used as end iterator.
InChunkPointer()
: mChunk(nullptr), mNextChunkGroup(nullptr), mOffsetInChunk(0) {}
// InChunkPointer over one or two chunk groups, pointing at the given
// block index (if still in range).
// This constructor should only be used with *trusted* block index values!
InChunkPointer(const ProfileBufferChunk* aChunk,
const ProfileBufferChunk* aNextChunkGroup,
ProfileBufferBlockIndex aBlockIndex)
: mChunk(aChunk), mNextChunkGroup(aNextChunkGroup) {
if (mChunk) {
mOffsetInChunk = mChunk->OffsetFirstBlock();
Adjust();
} else if (mNextChunkGroup) {
mChunk = mNextChunkGroup;
mNextChunkGroup = nullptr;
mOffsetInChunk = mChunk->OffsetFirstBlock();
Adjust();
} else {
mOffsetInChunk = 0;
}
// Try to advance to given position.
if (!AdvanceToGlobalRangePosition(aBlockIndex)) {
// Block does not exist anymore (or block doesn't look valid), reset the
// in-chunk pointer.
mChunk = nullptr;
mNextChunkGroup = nullptr;
}
}
// InChunkPointer over one or two chunk groups, will start at the first
// block (if any). This may be slow, so avoid using it too much.
InChunkPointer(const ProfileBufferChunk* aChunk,
const ProfileBufferChunk* aNextChunkGroup,
ProfileBufferIndex aIndex = ProfileBufferIndex(0))
: mChunk(aChunk), mNextChunkGroup(aNextChunkGroup) {
if (mChunk) {
mOffsetInChunk = mChunk->OffsetFirstBlock();
Adjust();
} else if (mNextChunkGroup) {
mChunk = mNextChunkGroup;
mNextChunkGroup = nullptr;
mOffsetInChunk = mChunk->OffsetFirstBlock();
Adjust();
} else {
mOffsetInChunk = 0;
}
// Try to advance to given position.
if (!AdvanceToGlobalRangePosition(aIndex)) {
// Block does not exist anymore, reset the in-chunk pointer.
mChunk = nullptr;
mNextChunkGroup = nullptr;
}
}
// Compute the current position in the global range.
// 0 if null (including if we're reached the end).
[[nodiscard]] ProfileBufferIndex GlobalRangePosition() const {
if (IsNull()) {
return 0;
}
return mChunk->RangeStart() + mOffsetInChunk;
}
// Move InChunkPointer forward to the block at the given global block
// position, which is assumed to be valid exactly -- but it may be obsolete.
// 0 stays where it is (if valid already).
// MOZ_ASSERTs if the index is invalid.
[[nodiscard]] bool AdvanceToGlobalRangePosition(
ProfileBufferBlockIndex aBlockIndex) {
if (IsNull()) {
// Pointer is null already. (Not asserting because it's acceptable.)
return false;
}
if (!aBlockIndex) {
// Special null position, just stay where we are.
return ShouldPointAtValidBlock();
}
if (aBlockIndex.ConvertToProfileBufferIndex() < GlobalRangePosition()) {
// Past the requested position, stay where we are (assuming the current
// position was valid).
return ShouldPointAtValidBlock();
}
for (;;) {
if (aBlockIndex.ConvertToProfileBufferIndex() <
mChunk->RangeStart() + mChunk->OffsetPastLastBlock()) {
// Target position is in this chunk's written space, move to it.
mOffsetInChunk =
aBlockIndex.ConvertToProfileBufferIndex() - mChunk->RangeStart();
return ShouldPointAtValidBlock();
}
// Position is after this chunk, try next chunk.
GoToNextChunk();
if (IsNull()) {
return false;
}
// Skip whatever block tail there is, we don't allow pointing in the
// middle of a block.
mOffsetInChunk = mChunk->OffsetFirstBlock();
if (aBlockIndex.ConvertToProfileBufferIndex() < GlobalRangePosition()) {
// Past the requested position, meaning that the given position was in-
// between blocks -> Failure.
MOZ_ASSERT(false, "AdvanceToGlobalRangePosition - In-between blocks");
return false;
}
}
}
// Move InChunkPointer forward to the block at or after the given global
// range position.
// 0 stays where it is (if valid already).
[[nodiscard]] bool AdvanceToGlobalRangePosition(
ProfileBufferIndex aPosition) {
if (aPosition == 0) {
// Special position '0', just stay where we are.
// Success if this position is already valid.
return !IsNull();
}
for (;;) {
ProfileBufferIndex currentPosition = GlobalRangePosition();
if (currentPosition == 0) {
// Pointer is null.
return false;
}
if (aPosition <= currentPosition) {
// At or past the requested position, stay where we are.
return true;
}
if (aPosition < mChunk->RangeStart() + mChunk->OffsetPastLastBlock()) {
// Target position is in this chunk's written space, move to it.
for (;;) {
// Skip the current block.
mOffsetInChunk += ReadEntrySize();
if (mOffsetInChunk >= mChunk->OffsetPastLastBlock()) {
// Reached the end of the chunk, this can happen for the last
// block, let's just continue to the next chunk.
break;
}
if (aPosition <= mChunk->RangeStart() + mOffsetInChunk) {
// We're at or after the position, return at this block position.
return true;
}
}
}
// Position is after this chunk, try next chunk.
GoToNextChunk();
if (IsNull()) {
return false;
}
// Skip whatever block tail there is, we don't allow pointing in the
// middle of a block.
mOffsetInChunk = mChunk->OffsetFirstBlock();
}
}
[[nodiscard]] Byte ReadByte() {
MOZ_ASSERT(!IsNull());
MOZ_ASSERT(mOffsetInChunk < mChunk->OffsetPastLastBlock());
Byte byte = mChunk->ByteAt(mOffsetInChunk);
if (MOZ_UNLIKELY(++mOffsetInChunk == mChunk->OffsetPastLastBlock())) {
Adjust();
}
return byte;
}
// Read and skip a ULEB128-encoded size.
// 0 means failure (0-byte entries are not allowed.)
// Note that this doesn't guarantee that there are actually that many bytes
// available to read! (EntryReader() below may gracefully fail.)
[[nodiscard]] Length ReadEntrySize() {
ULEB128Reader<Length> reader;
if (IsNull()) {
return 0;
}
for (;;) {
const bool isComplete = reader.FeedByteIsComplete(ReadByte());
if (MOZ_UNLIKELY(IsNull())) {
// End of chunks, so there's no actual entry after this anyway.
return 0;
}
if (MOZ_LIKELY(isComplete)) {
if (MOZ_UNLIKELY(reader.Value() > mChunk->BufferBytes())) {
// Don't allow entries larger than a chunk.
return 0;
}
return reader.Value();
}
}
}
InChunkPointer& operator+=(Length aLength) {
MOZ_ASSERT(!IsNull());
mOffsetInChunk += aLength;
Adjust();
return *this;
}
[[nodiscard]] ProfileBufferEntryReader EntryReader(Length aLength) {
if (IsNull() || aLength == 0) {
return ProfileBufferEntryReader();
}
MOZ_ASSERT(mOffsetInChunk < mChunk->OffsetPastLastBlock());
// We should be pointing at the entry, past the entry size.
const ProfileBufferIndex entryIndex = GlobalRangePosition();
// Verify that there's enough space before for the size (starting at index
// 1 at least).
MOZ_ASSERT(entryIndex >= 1u + ULEB128Size(aLength));
const Length remaining = mChunk->OffsetPastLastBlock() - mOffsetInChunk;
Span<const Byte> mem0 = mChunk->BufferSpan();
mem0 = mem0.From(mOffsetInChunk);
if (aLength <= remaining) {
// Move to the end of this block, which could make this null if we have
// reached the end of all buffers.
*this += aLength;
return ProfileBufferEntryReader(
mem0.To(aLength),
// Block starts before the entry size.
ProfileBufferBlockIndex::CreateFromProfileBufferIndex(
entryIndex - ULEB128Size(aLength)),
// Block ends right after the entry (could be null for last entry).
ProfileBufferBlockIndex::CreateFromProfileBufferIndex(
GlobalRangePosition()));
}
// We need to go to the next chunk for the 2nd part of this block.
GoToNextChunk();
if (IsNull()) {
return ProfileBufferEntryReader();
}
Span<const Byte> mem1 = mChunk->BufferSpan();
const Length tail = aLength - remaining;
MOZ_ASSERT(tail <= mChunk->BufferBytes());
MOZ_ASSERT(tail == mChunk->OffsetFirstBlock());
// We are in the correct chunk, move the offset to the end of the block.
mOffsetInChunk = tail;
// And adjust as needed, which could make this null if we have reached the
// end of all buffers.
Adjust();
return ProfileBufferEntryReader(
mem0, mem1.To(tail),
// Block starts before the entry size.
ProfileBufferBlockIndex::CreateFromProfileBufferIndex(
entryIndex - ULEB128Size(aLength)),
// Block ends right after the entry (could be null for last entry).
ProfileBufferBlockIndex::CreateFromProfileBufferIndex(
GlobalRangePosition()));
}
[[nodiscard]] bool IsNull() const { return !mChunk; }
[[nodiscard]] bool operator==(const InChunkPointer& aOther) const {
if (IsNull() || aOther.IsNull()) {
return IsNull() && aOther.IsNull();
}
return mChunk == aOther.mChunk && mOffsetInChunk == aOther.mOffsetInChunk;
}
[[nodiscard]] bool operator!=(const InChunkPointer& aOther) const {
return !(*this == aOther);
}
[[nodiscard]] Byte operator*() const {
MOZ_ASSERT(!IsNull());
MOZ_ASSERT(mOffsetInChunk < mChunk->OffsetPastLastBlock());
return mChunk->ByteAt(mOffsetInChunk);
}
InChunkPointer& operator++() {
MOZ_ASSERT(!IsNull());
MOZ_ASSERT(mOffsetInChunk < mChunk->OffsetPastLastBlock());
if (MOZ_UNLIKELY(++mOffsetInChunk == mChunk->OffsetPastLastBlock())) {
mOffsetInChunk = 0;
GoToNextChunk();
Adjust();
}
return *this;
}
private:
void GoToNextChunk() {
MOZ_ASSERT(!IsNull());
const ProfileBufferIndex expectedNextRangeStart =
mChunk->RangeStart() + mChunk->BufferBytes();
mChunk = mChunk->GetNext();
if (!mChunk) {
// Reached the end of the current chunk group, try the next one (which
// may be null too, especially on the 2nd try).
mChunk = mNextChunkGroup;
mNextChunkGroup = nullptr;
}
if (mChunk && mChunk->RangeStart() == 0) {
// Reached a chunk without a valid (non-null) range start, assume there
// are only unused chunks from here on.
mChunk = nullptr;
}
MOZ_ASSERT(!mChunk || mChunk->RangeStart() == expectedNextRangeStart,
"We don't handle discontinuous buffers (yet)");
// Non-DEBUG fallback: Stop reading past discontinuities.
// (They should be rare, only happening on temporary OOMs.)
// TODO: Handle discontinuities (by skipping over incomplete blocks).
if (mChunk && mChunk->RangeStart() != expectedNextRangeStart) {
mChunk = nullptr;
}
}
// We want `InChunkPointer` to always point at a valid byte (or be null).
// After some operations, `mOffsetInChunk` may point past the end of the
// current `mChunk`, in which case we need to adjust our position to be inside
// the appropriate chunk. E.g., if we're 10 bytes after the end of the current
// chunk, we should end up at offset 10 in the next chunk.
// Note that we may "fall off" the last chunk and make this `InChunkPointer`
// effectively null.
void Adjust() {
while (mChunk && mOffsetInChunk >= mChunk->OffsetPastLastBlock()) {
// TODO: Try to adjust offset between chunks relative to mRangeStart
// differences. But we don't handle discontinuities yet.
if (mOffsetInChunk < mChunk->BufferBytes()) {
mOffsetInChunk -= mChunk->BufferBytes();
} else {
mOffsetInChunk -= mChunk->OffsetPastLastBlock();
}
GoToNextChunk();
}
}
// Check if the current position is likely to point at a valid block.
// (Size should be reasonable, and block should fully fit inside buffer.)
// MOZ_ASSERTs on failure, to catch incorrect uses of block indices (which
// should only point at valid blocks if still in range). Non-asserting build
// fallback should still be handled.
[[nodiscard]] bool ShouldPointAtValidBlock() const {
if (IsNull()) {
// Pointer is null, no blocks here.
MOZ_ASSERT(false, "ShouldPointAtValidBlock - null pointer");
return false;
}
// Use a copy, so we don't modify `*this`.
InChunkPointer pointer = *this;
// Try to read the entry size.
Length entrySize = pointer.ReadEntrySize();
if (entrySize == 0) {
// Entry size of zero means we read 0 or a way-too-big value.
MOZ_ASSERT(false, "ShouldPointAtValidBlock - invalid size");
return false;
}
// See if the last byte of the entry is still inside the buffer.
pointer += entrySize - 1;
MOZ_ASSERT(!IsNull(), "ShouldPointAtValidBlock - past end of buffer");
return !IsNull();
}
const ProfileBufferChunk* mChunk;
const ProfileBufferChunk* mNextChunkGroup;
Length mOffsetInChunk;
};
} // namespace detail
// Thread-safe buffer that can store blocks of different sizes during defined
// sessions, using Chunks (from a ChunkManager) as storage.
//
@ -518,10 +137,16 @@ class ProfileChunkedBuffer {
}
}
// Stop using the current chunk manager, and return it if owned here.
[[nodiscard]] UniquePtr<ProfileBufferChunkManager> ExtractChunkManager() {
// Set the current chunk manager, except if it's already the one provided.
// The caller is responsible for keeping the chunk manager alive as along as
// it's used here (until the next (Re)SetChunkManager, or
// ~ProfileChunkedBuffer).
void SetChunkManagerIfDifferent(ProfileBufferChunkManager& aChunkManager) {
baseprofiler::detail::BaseProfilerMaybeAutoLock lock(mMutex);
return ResetChunkManager(lock);
if (!mChunkManager || mChunkManager != &aChunkManager) {
Unused << ResetChunkManager(lock);
SetChunkManager(aChunkManager, lock);
}
}
// Clear the contents of this buffer, ready to receive new chunks.
@ -862,7 +487,7 @@ class ProfileChunkedBuffer {
mBuffer->mMutex.AssertCurrentThreadOwns();
}
detail::InChunkPointer mNextBlockPointer;
profiler::detail::InChunkPointer mNextBlockPointer;
ProfileBufferBlockIndex mCurrentBlockIndex;
@ -1020,7 +645,7 @@ class ProfileChunkedBuffer {
ProfileBufferBlockIndex>,
"ReadEach callback must take ProfileBufferEntryReader& and "
"optionally a ProfileBufferBlockIndex");
detail::InChunkPointer p{aChunks0, aChunks1};
profiler::detail::InChunkPointer p{aChunks0, aChunks1};
while (!p.IsNull()) {
// The position right before an entry size *is* a block index.
const ProfileBufferBlockIndex blockIndex =
@ -1078,7 +703,7 @@ class ProfileChunkedBuffer {
std::is_invocable_v<Callback, Maybe<ProfileBufferEntryReader>&&>,
"ReadAt callback must take a Maybe<ProfileBufferEntryReader>&&");
Maybe<ProfileBufferEntryReader> maybeEntryReader;
if (detail::InChunkPointer p{aChunks0, aChunks1}; !p.IsNull()) {
if (profiler::detail::InChunkPointer p{aChunks0, aChunks1}; !p.IsNull()) {
// If the pointer position is before the given position, try to advance.
if (p.GlobalRangePosition() >=
aMinimumBlockIndex.ConvertToProfileBufferIndex() ||
@ -1586,11 +1211,8 @@ class ProfileChunkedBuffer {
// asynchronously, and either side may be destroyed during the request.
// It cannot use the `ProfileChunkedBuffer` mutex, because that buffer and its
// mutex could be destroyed during the request.
class RequestedChunkRefCountedHolder
: public external::AtomicRefCounted<RequestedChunkRefCountedHolder> {
class RequestedChunkRefCountedHolder {
public:
MOZ_DECLARE_REFCOUNTED_TYPENAME(RequestedChunkRefCountedHolder)
enum class State { Unused, Requested, Fulfilled };
// Get the current state. Note that it may change after the function
@ -1636,9 +1258,32 @@ class ProfileChunkedBuffer {
return maybeChunk;
}
// Ref-counting implementation. Hand-rolled, because mozilla::RefCounted
// logs AddRefs and Releases in xpcom, but this object could be AddRef'd
// by the Base Profiler before xpcom starts, then Release'd by the Gecko
// Profiler in xpcom, leading to apparent negative leaks.
void AddRef() {
baseprofiler::detail::BaseProfilerAutoLock lock(mRequestMutex);
++mRefCount;
}
void Release() {
{
baseprofiler::detail::BaseProfilerAutoLock lock(mRequestMutex);
if (--mRefCount > 0) {
return;
}
}
delete this;
}
private:
~RequestedChunkRefCountedHolder() = default;
// Mutex guarding the following members.
mutable baseprofiler::detail::BaseProfilerMutex mRequestMutex;
int mRefCount = 0;
State mState = State::Unused;
UniquePtr<ProfileBufferChunk> mRequestedChunk;
};

View file

@ -0,0 +1,400 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef ProfileChunkedBufferDetail_h
#define ProfileChunkedBufferDetail_h
#include "mozilla/Assertions.h"
#include "mozilla/Likely.h"
#include "mozilla/ProfileBufferChunk.h"
#include "mozilla/ProfileBufferEntrySerialization.h"
namespace mozilla::profiler::detail {
// Internal accessor pointing at a position inside a chunk.
// It can handle two groups of chunks (typically the extant chunks stored in
// the store manager, and the current chunk).
// The main operations are:
// - ReadEntrySize() to read an entry size, 0 means failure.
// - operator+=(Length) to skip a number of bytes.
// - EntryReader() creates an entry reader at the current position for a given
// size (it may fail with an empty reader), and skips the entry.
// Note that there is no "past-the-end" position -- as soon as InChunkPointer
// reaches the end, it becomes effectively null.
class InChunkPointer {
public:
using Byte = ProfileBufferChunk::Byte;
using Length = ProfileBufferChunk::Length;
// Nullptr-like InChunkPointer, may be used as end iterator.
InChunkPointer()
: mChunk(nullptr), mNextChunkGroup(nullptr), mOffsetInChunk(0) {}
// InChunkPointer over one or two chunk groups, pointing at the given
// block index (if still in range).
// This constructor should only be used with *trusted* block index values!
InChunkPointer(const ProfileBufferChunk* aChunk,
const ProfileBufferChunk* aNextChunkGroup,
ProfileBufferBlockIndex aBlockIndex)
: mChunk(aChunk), mNextChunkGroup(aNextChunkGroup) {
if (mChunk) {
mOffsetInChunk = mChunk->OffsetFirstBlock();
Adjust();
} else if (mNextChunkGroup) {
mChunk = mNextChunkGroup;
mNextChunkGroup = nullptr;
mOffsetInChunk = mChunk->OffsetFirstBlock();
Adjust();
} else {
mOffsetInChunk = 0;
}
// Try to advance to given position.
if (!AdvanceToGlobalRangePosition(aBlockIndex)) {
// Block does not exist anymore (or block doesn't look valid), reset the
// in-chunk pointer.
mChunk = nullptr;
mNextChunkGroup = nullptr;
}
}
// InChunkPointer over one or two chunk groups, will start at the first
// block (if any). This may be slow, so avoid using it too much.
InChunkPointer(const ProfileBufferChunk* aChunk,
const ProfileBufferChunk* aNextChunkGroup,
ProfileBufferIndex aIndex = ProfileBufferIndex(0))
: mChunk(aChunk), mNextChunkGroup(aNextChunkGroup) {
if (mChunk) {
mOffsetInChunk = mChunk->OffsetFirstBlock();
Adjust();
} else if (mNextChunkGroup) {
mChunk = mNextChunkGroup;
mNextChunkGroup = nullptr;
mOffsetInChunk = mChunk->OffsetFirstBlock();
Adjust();
} else {
mOffsetInChunk = 0;
}
// Try to advance to given position.
if (!AdvanceToGlobalRangePosition(aIndex)) {
// Block does not exist anymore, reset the in-chunk pointer.
mChunk = nullptr;
mNextChunkGroup = nullptr;
}
}
// Compute the current position in the global range.
// 0 if null (including if we're reached the end).
[[nodiscard]] ProfileBufferIndex GlobalRangePosition() const {
if (IsNull()) {
return 0;
}
return mChunk->RangeStart() + mOffsetInChunk;
}
// Move InChunkPointer forward to the block at the given global block
// position, which is assumed to be valid exactly -- but it may be obsolete.
// 0 stays where it is (if valid already).
// MOZ_ASSERTs if the index is invalid.
[[nodiscard]] bool AdvanceToGlobalRangePosition(
ProfileBufferBlockIndex aBlockIndex) {
if (IsNull()) {
// Pointer is null already. (Not asserting because it's acceptable.)
return false;
}
if (!aBlockIndex) {
// Special null position, just stay where we are.
return ShouldPointAtValidBlock();
}
if (aBlockIndex.ConvertToProfileBufferIndex() < GlobalRangePosition()) {
// Past the requested position, stay where we are (assuming the current
// position was valid).
return ShouldPointAtValidBlock();
}
for (;;) {
if (aBlockIndex.ConvertToProfileBufferIndex() <
mChunk->RangeStart() + mChunk->OffsetPastLastBlock()) {
// Target position is in this chunk's written space, move to it.
mOffsetInChunk =
aBlockIndex.ConvertToProfileBufferIndex() - mChunk->RangeStart();
return ShouldPointAtValidBlock();
}
// Position is after this chunk, try next chunk.
GoToNextChunk();
if (IsNull()) {
return false;
}
// Skip whatever block tail there is, we don't allow pointing in the
// middle of a block.
mOffsetInChunk = mChunk->OffsetFirstBlock();
if (aBlockIndex.ConvertToProfileBufferIndex() < GlobalRangePosition()) {
// Past the requested position, meaning that the given position was in-
// between blocks -> Failure.
MOZ_ASSERT(false, "AdvanceToGlobalRangePosition - In-between blocks");
return false;
}
}
}
// Move InChunkPointer forward to the block at or after the given global
// range position.
// 0 stays where it is (if valid already).
[[nodiscard]] bool AdvanceToGlobalRangePosition(
ProfileBufferIndex aPosition) {
if (aPosition == 0) {
// Special position '0', just stay where we are.
// Success if this position is already valid.
return !IsNull();
}
for (;;) {
ProfileBufferIndex currentPosition = GlobalRangePosition();
if (currentPosition == 0) {
// Pointer is null.
return false;
}
if (aPosition <= currentPosition) {
// At or past the requested position, stay where we are.
return true;
}
if (aPosition < mChunk->RangeStart() + mChunk->OffsetPastLastBlock()) {
// Target position is in this chunk's written space, move to it.
for (;;) {
// Skip the current block.
mOffsetInChunk += ReadEntrySize();
if (mOffsetInChunk >= mChunk->OffsetPastLastBlock()) {
// Reached the end of the chunk, this can happen for the last
// block, let's just continue to the next chunk.
break;
}
if (aPosition <= mChunk->RangeStart() + mOffsetInChunk) {
// We're at or after the position, return at this block position.
return true;
}
}
}
// Position is after this chunk, try next chunk.
GoToNextChunk();
if (IsNull()) {
return false;
}
// Skip whatever block tail there is, we don't allow pointing in the
// middle of a block.
mOffsetInChunk = mChunk->OffsetFirstBlock();
}
}
[[nodiscard]] Byte ReadByte() {
MOZ_ASSERT(!IsNull());
MOZ_ASSERT(mOffsetInChunk < mChunk->OffsetPastLastBlock());
Byte byte = mChunk->ByteAt(mOffsetInChunk);
if (MOZ_UNLIKELY(++mOffsetInChunk == mChunk->OffsetPastLastBlock())) {
Adjust();
}
return byte;
}
// Read and skip a ULEB128-encoded size.
// 0 means failure (0-byte entries are not allowed.)
// Note that this doesn't guarantee that there are actually that many bytes
// available to read! (EntryReader() below may gracefully fail.)
[[nodiscard]] Length ReadEntrySize() {
ULEB128Reader<Length> reader;
if (IsNull()) {
return 0;
}
for (;;) {
const bool isComplete = reader.FeedByteIsComplete(ReadByte());
if (MOZ_UNLIKELY(IsNull())) {
// End of chunks, so there's no actual entry after this anyway.
return 0;
}
if (MOZ_LIKELY(isComplete)) {
if (MOZ_UNLIKELY(reader.Value() > mChunk->BufferBytes())) {
// Don't allow entries larger than a chunk.
return 0;
}
return reader.Value();
}
}
}
InChunkPointer& operator+=(Length aLength) {
MOZ_ASSERT(!IsNull());
mOffsetInChunk += aLength;
Adjust();
return *this;
}
[[nodiscard]] ProfileBufferEntryReader EntryReader(Length aLength) {
if (IsNull() || aLength == 0) {
return ProfileBufferEntryReader();
}
MOZ_ASSERT(mOffsetInChunk < mChunk->OffsetPastLastBlock());
// We should be pointing at the entry, past the entry size.
const ProfileBufferIndex entryIndex = GlobalRangePosition();
// Verify that there's enough space before for the size (starting at index
// 1 at least).
MOZ_ASSERT(entryIndex >= 1u + ULEB128Size(aLength));
const Length remaining = mChunk->OffsetPastLastBlock() - mOffsetInChunk;
Span<const Byte> mem0 = mChunk->BufferSpan();
mem0 = mem0.From(mOffsetInChunk);
if (aLength <= remaining) {
// Move to the end of this block, which could make this null if we have
// reached the end of all buffers.
*this += aLength;
return ProfileBufferEntryReader(
mem0.To(aLength),
// Block starts before the entry size.
ProfileBufferBlockIndex::CreateFromProfileBufferIndex(
entryIndex - ULEB128Size(aLength)),
// Block ends right after the entry (could be null for last entry).
ProfileBufferBlockIndex::CreateFromProfileBufferIndex(
GlobalRangePosition()));
}
// We need to go to the next chunk for the 2nd part of this block.
GoToNextChunk();
if (IsNull()) {
return ProfileBufferEntryReader();
}
Span<const Byte> mem1 = mChunk->BufferSpan();
const Length tail = aLength - remaining;
MOZ_ASSERT(tail <= mChunk->BufferBytes());
MOZ_ASSERT(tail == mChunk->OffsetFirstBlock());
// We are in the correct chunk, move the offset to the end of the block.
mOffsetInChunk = tail;
// And adjust as needed, which could make this null if we have reached the
// end of all buffers.
Adjust();
return ProfileBufferEntryReader(
mem0, mem1.To(tail),
// Block starts before the entry size.
ProfileBufferBlockIndex::CreateFromProfileBufferIndex(
entryIndex - ULEB128Size(aLength)),
// Block ends right after the entry (could be null for last entry).
ProfileBufferBlockIndex::CreateFromProfileBufferIndex(
GlobalRangePosition()));
}
[[nodiscard]] bool IsNull() const { return !mChunk; }
[[nodiscard]] bool operator==(const InChunkPointer& aOther) const {
if (IsNull() || aOther.IsNull()) {
return IsNull() && aOther.IsNull();
}
return mChunk == aOther.mChunk && mOffsetInChunk == aOther.mOffsetInChunk;
}
[[nodiscard]] bool operator!=(const InChunkPointer& aOther) const {
return !(*this == aOther);
}
[[nodiscard]] Byte operator*() const {
MOZ_ASSERT(!IsNull());
MOZ_ASSERT(mOffsetInChunk < mChunk->OffsetPastLastBlock());
return mChunk->ByteAt(mOffsetInChunk);
}
InChunkPointer& operator++() {
MOZ_ASSERT(!IsNull());
MOZ_ASSERT(mOffsetInChunk < mChunk->OffsetPastLastBlock());
if (MOZ_UNLIKELY(++mOffsetInChunk == mChunk->OffsetPastLastBlock())) {
mOffsetInChunk = 0;
GoToNextChunk();
Adjust();
}
return *this;
}
private:
void GoToNextChunk() {
MOZ_ASSERT(!IsNull());
const ProfileBufferIndex expectedNextRangeStart =
mChunk->RangeStart() + mChunk->BufferBytes();
mChunk = mChunk->GetNext();
if (!mChunk) {
// Reached the end of the current chunk group, try the next one (which
// may be null too, especially on the 2nd try).
mChunk = mNextChunkGroup;
mNextChunkGroup = nullptr;
}
if (mChunk && mChunk->RangeStart() == 0) {
// Reached a chunk without a valid (non-null) range start, assume there
// are only unused chunks from here on.
mChunk = nullptr;
}
MOZ_ASSERT(!mChunk || mChunk->RangeStart() == expectedNextRangeStart,
"We don't handle discontinuous buffers (yet)");
// Non-DEBUG fallback: Stop reading past discontinuities.
// (They should be rare, only happening on temporary OOMs.)
// TODO: Handle discontinuities (by skipping over incomplete blocks).
if (mChunk && mChunk->RangeStart() != expectedNextRangeStart) {
mChunk = nullptr;
}
}
// We want `InChunkPointer` to always point at a valid byte (or be null).
// After some operations, `mOffsetInChunk` may point past the end of the
// current `mChunk`, in which case we need to adjust our position to be inside
// the appropriate chunk. E.g., if we're 10 bytes after the end of the current
// chunk, we should end up at offset 10 in the next chunk.
// Note that we may "fall off" the last chunk and make this `InChunkPointer`
// effectively null.
void Adjust() {
while (mChunk && mOffsetInChunk >= mChunk->OffsetPastLastBlock()) {
// TODO: Try to adjust offset between chunks relative to mRangeStart
// differences. But we don't handle discontinuities yet.
if (mOffsetInChunk < mChunk->BufferBytes()) {
mOffsetInChunk -= mChunk->BufferBytes();
} else {
mOffsetInChunk -= mChunk->OffsetPastLastBlock();
}
GoToNextChunk();
}
}
// Check if the current position is likely to point at a valid block.
// (Size should be reasonable, and block should fully fit inside buffer.)
// MOZ_ASSERTs on failure, to catch incorrect uses of block indices (which
// should only point at valid blocks if still in range). Non-asserting build
// fallback should still be handled.
[[nodiscard]] bool ShouldPointAtValidBlock() const {
if (IsNull()) {
// Pointer is null, no blocks here.
MOZ_ASSERT(false, "ShouldPointAtValidBlock - null pointer");
return false;
}
// Use a copy, so we don't modify `*this`.
InChunkPointer pointer = *this;
// Try to read the entry size.
Length entrySize = pointer.ReadEntrySize();
if (entrySize == 0) {
// Entry size of zero means we read 0 or a way-too-big value.
MOZ_ASSERT(false, "ShouldPointAtValidBlock - invalid size");
return false;
}
// See if the last byte of the entry is still inside the buffer.
pointer += entrySize - 1;
MOZ_ASSERT(!IsNull(), "ShouldPointAtValidBlock - past end of buffer");
return !IsNull();
}
const ProfileBufferChunk* mChunk;
const ProfileBufferChunk* mNextChunkGroup;
Length mOffsetInChunk;
};
} // namespace mozilla::profiler::detail
#endif // ProfileChunkedBufferDetail_h

View file

@ -8,7 +8,7 @@
# Adding a new metric? We have docs for that!
# https://mozilla.github.io/glean/book/user/metrics/adding-new-metrics.html
---
$schema: moz://mozilla.org/schemas/glean/metrics/1-0-0
$schema: moz://mozilla.org/schemas/glean/metrics/2-0-0
mach:
command:

Some files were not shown because too many files have changed in this diff Show more