Update On Fri Oct 27 20:51:05 CEST 2023
This commit is contained in:
parent
9c3b252298
commit
5d3e899968
215 changed files with 5544 additions and 5903 deletions
24
Cargo.lock
generated
24
Cargo.lock
generated
|
@ -1572,7 +1572,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "error-support"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=04f012eab55b080a17c92cf54ad83668276ac365#04f012eab55b080a17c92cf54ad83668276ac365"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c82bccfa500813f273f4db0ead64fc73bfa2b34c#c82bccfa500813f273f4db0ead64fc73bfa2b34c"
|
||||
dependencies = [
|
||||
"error-support-macros",
|
||||
"lazy_static",
|
||||
|
@ -1584,7 +1584,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "error-support-macros"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=04f012eab55b080a17c92cf54ad83668276ac365#04f012eab55b080a17c92cf54ad83668276ac365"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c82bccfa500813f273f4db0ead64fc73bfa2b34c#c82bccfa500813f273f4db0ead64fc73bfa2b34c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -2790,7 +2790,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "interrupt-support"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=04f012eab55b080a17c92cf54ad83668276ac365#04f012eab55b080a17c92cf54ad83668276ac365"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c82bccfa500813f273f4db0ead64fc73bfa2b34c#c82bccfa500813f273f4db0ead64fc73bfa2b34c"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"parking_lot",
|
||||
|
@ -3951,7 +3951,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "nss_build_common"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=04f012eab55b080a17c92cf54ad83668276ac365#04f012eab55b080a17c92cf54ad83668276ac365"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c82bccfa500813f273f4db0ead64fc73bfa2b34c#c82bccfa500813f273f4db0ead64fc73bfa2b34c"
|
||||
|
||||
[[package]]
|
||||
name = "nsstring"
|
||||
|
@ -4590,7 +4590,7 @@ checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
|
|||
[[package]]
|
||||
name = "remote_settings"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=04f012eab55b080a17c92cf54ad83668276ac365#04f012eab55b080a17c92cf54ad83668276ac365"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c82bccfa500813f273f4db0ead64fc73bfa2b34c#c82bccfa500813f273f4db0ead64fc73bfa2b34c"
|
||||
dependencies = [
|
||||
"parking_lot",
|
||||
"serde",
|
||||
|
@ -5121,7 +5121,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "sql-support"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=04f012eab55b080a17c92cf54ad83668276ac365#04f012eab55b080a17c92cf54ad83668276ac365"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c82bccfa500813f273f4db0ead64fc73bfa2b34c#c82bccfa500813f273f4db0ead64fc73bfa2b34c"
|
||||
dependencies = [
|
||||
"ffi-support",
|
||||
"interrupt-support",
|
||||
|
@ -5302,7 +5302,7 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
|
|||
[[package]]
|
||||
name = "suggest"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=04f012eab55b080a17c92cf54ad83668276ac365#04f012eab55b080a17c92cf54ad83668276ac365"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c82bccfa500813f273f4db0ead64fc73bfa2b34c#c82bccfa500813f273f4db0ead64fc73bfa2b34c"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
|
@ -5348,7 +5348,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "sync-guid"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=04f012eab55b080a17c92cf54ad83668276ac365#04f012eab55b080a17c92cf54ad83668276ac365"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c82bccfa500813f273f4db0ead64fc73bfa2b34c#c82bccfa500813f273f4db0ead64fc73bfa2b34c"
|
||||
dependencies = [
|
||||
"base64 0.21.3",
|
||||
"rand",
|
||||
|
@ -5359,7 +5359,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "sync15"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=04f012eab55b080a17c92cf54ad83668276ac365#04f012eab55b080a17c92cf54ad83668276ac365"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c82bccfa500813f273f4db0ead64fc73bfa2b34c#c82bccfa500813f273f4db0ead64fc73bfa2b34c"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"error-support",
|
||||
|
@ -5391,7 +5391,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "tabs"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=04f012eab55b080a17c92cf54ad83668276ac365#04f012eab55b080a17c92cf54ad83668276ac365"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c82bccfa500813f273f4db0ead64fc73bfa2b34c#c82bccfa500813f273f4db0ead64fc73bfa2b34c"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"error-support",
|
||||
|
@ -6031,7 +6031,7 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
|||
[[package]]
|
||||
name = "viaduct"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=04f012eab55b080a17c92cf54ad83668276ac365#04f012eab55b080a17c92cf54ad83668276ac365"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c82bccfa500813f273f4db0ead64fc73bfa2b34c#c82bccfa500813f273f4db0ead64fc73bfa2b34c"
|
||||
dependencies = [
|
||||
"ffi-support",
|
||||
"log",
|
||||
|
@ -6187,7 +6187,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "webext-storage"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=04f012eab55b080a17c92cf54ad83668276ac365#04f012eab55b080a17c92cf54ad83668276ac365"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c82bccfa500813f273f4db0ead64fc73bfa2b34c#c82bccfa500813f273f4db0ead64fc73bfa2b34c"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"error-support",
|
||||
|
|
14
Cargo.toml
14
Cargo.toml
|
@ -197,13 +197,13 @@ warp = { git = "https://github.com/glandium/warp", rev = "4af45fae95bc98b0eba1ef
|
|||
malloc_size_of_derive = { path = "xpcom/rust/malloc_size_of_derive" }
|
||||
|
||||
# application-services overrides to make updating them all simpler.
|
||||
interrupt-support = { git = "https://github.com/mozilla/application-services", rev = "04f012eab55b080a17c92cf54ad83668276ac365" }
|
||||
sql-support = { git = "https://github.com/mozilla/application-services", rev = "04f012eab55b080a17c92cf54ad83668276ac365" }
|
||||
suggest = { git = "https://github.com/mozilla/application-services", rev = "04f012eab55b080a17c92cf54ad83668276ac365" }
|
||||
sync15 = { git = "https://github.com/mozilla/application-services", rev = "04f012eab55b080a17c92cf54ad83668276ac365" }
|
||||
tabs = { git = "https://github.com/mozilla/application-services", rev = "04f012eab55b080a17c92cf54ad83668276ac365" }
|
||||
viaduct = { git = "https://github.com/mozilla/application-services", rev = "04f012eab55b080a17c92cf54ad83668276ac365" }
|
||||
webext-storage = { git = "https://github.com/mozilla/application-services", rev = "04f012eab55b080a17c92cf54ad83668276ac365" }
|
||||
interrupt-support = { git = "https://github.com/mozilla/application-services", rev = "c82bccfa500813f273f4db0ead64fc73bfa2b34c" }
|
||||
sql-support = { git = "https://github.com/mozilla/application-services", rev = "c82bccfa500813f273f4db0ead64fc73bfa2b34c" }
|
||||
suggest = { git = "https://github.com/mozilla/application-services", rev = "c82bccfa500813f273f4db0ead64fc73bfa2b34c" }
|
||||
sync15 = { git = "https://github.com/mozilla/application-services", rev = "c82bccfa500813f273f4db0ead64fc73bfa2b34c" }
|
||||
tabs = { git = "https://github.com/mozilla/application-services", rev = "c82bccfa500813f273f4db0ead64fc73bfa2b34c" }
|
||||
viaduct = { git = "https://github.com/mozilla/application-services", rev = "c82bccfa500813f273f4db0ead64fc73bfa2b34c" }
|
||||
webext-storage = { git = "https://github.com/mozilla/application-services", rev = "c82bccfa500813f273f4db0ead64fc73bfa2b34c" }
|
||||
|
||||
# ICU4X 1.2 with synstructure 0.13.x / syn 2.x. When updating to next version, this should be removed.
|
||||
diplomat = { git = "https://github.com/rust-diplomat/diplomat", rev = "8d125999893fedfdf30595e97334c21ec4b18da9" }
|
||||
|
|
|
@ -28,6 +28,8 @@ $(MDDEPDIR)/buildid.h.stub $(MDDEPDIR)/source-repo.h.stub: FORCE
|
|||
endif
|
||||
source-repo.h: $(MDDEPDIR)/source-repo.h.stub
|
||||
buildid.h: $(MDDEPDIR)/buildid.h.stub
|
||||
# Add explicit dependencies that moz.build can't declare yet.
|
||||
build/$(MDDEPDIR)/application.ini.stub: source-repo.h buildid.h
|
||||
|
||||
BUILD_BACKEND_FILES := $(addprefix backend.,$(addsuffix Backend,$(BUILD_BACKENDS)))
|
||||
|
||||
|
@ -98,11 +100,11 @@ install-test-files:
|
|||
include $(topsrcdir)/build/moz-automation.mk
|
||||
|
||||
# Dummy rule for the cases below where we don't depend on dist/include
|
||||
recurse_pre-export::
|
||||
recurse_pre-export:
|
||||
|
||||
# For the binaries rule, not all the install manifests matter, so force only
|
||||
# the interesting ones to be done.
|
||||
recurse_pre-export:: install-manifests
|
||||
recurse_pre-export: install-manifests
|
||||
binaries::
|
||||
@$(MAKE) install-manifests install_manifests=dist/include
|
||||
|
||||
|
|
|
@ -95,7 +95,7 @@ void NotificationController::Shutdown() {
|
|||
mDocument = nullptr;
|
||||
mPresShell = nullptr;
|
||||
|
||||
mTextHash.Clear();
|
||||
mTextArray.Clear();
|
||||
mContentInsertions.Clear();
|
||||
mNotifications.Clear();
|
||||
mFocusEvent = nullptr;
|
||||
|
@ -465,7 +465,7 @@ bool NotificationController::IsUpdatePending() {
|
|||
return mPresShell->IsLayoutFlushObserver() ||
|
||||
mObservingState == eRefreshProcessingForUpdate || WaitingForParent() ||
|
||||
mContentInsertions.Count() != 0 || mNotifications.Length() != 0 ||
|
||||
mTextHash.Count() != 0 ||
|
||||
!mTextArray.IsEmpty() ||
|
||||
!mDocument->HasLoadState(DocAccessible::eTreeConstructed);
|
||||
}
|
||||
|
||||
|
@ -735,8 +735,14 @@ void NotificationController::WillRefresh(mozilla::TimeStamp aTime) {
|
|||
|
||||
mDocument->ProcessPendingUpdates();
|
||||
|
||||
// Process rendered text change notifications.
|
||||
for (nsIContent* textNode : mTextHash) {
|
||||
// Process rendered text change notifications. Even though we want to process
|
||||
// them in the order in which they were queued, we still want to avoid
|
||||
// duplicates.
|
||||
nsTHashSet<nsIContent*> textHash;
|
||||
for (nsIContent* textNode : mTextArray) {
|
||||
if (!textHash.EnsureInserted(textNode)) {
|
||||
continue; // Already processed.
|
||||
}
|
||||
LocalAccessible* textAcc = mDocument->GetAccessible(textNode);
|
||||
|
||||
// If the text node is not in tree or doesn't have a frame, or placed in
|
||||
|
@ -825,7 +831,8 @@ void NotificationController::WillRefresh(mozilla::TimeStamp aTime) {
|
|||
}
|
||||
}
|
||||
}
|
||||
mTextHash.Clear();
|
||||
textHash.Clear();
|
||||
mTextArray.Clear();
|
||||
|
||||
// Process content inserted notifications to update the tree.
|
||||
// Processing an insertion can indirectly run script (e.g. querying a XUL
|
||||
|
@ -1008,7 +1015,7 @@ void NotificationController::WillRefresh(mozilla::TimeStamp aTime) {
|
|||
// Stop further processing if there are no new notifications of any kind or
|
||||
// events and document load is processed.
|
||||
if (mContentInsertions.Count() == 0 && mNotifications.IsEmpty() &&
|
||||
!mFocusEvent && mEvents.IsEmpty() && mTextHash.Count() == 0 &&
|
||||
!mFocusEvent && mEvents.IsEmpty() && mTextArray.IsEmpty() &&
|
||||
mHangingChildDocuments.IsEmpty() &&
|
||||
mDocument->HasLoadState(DocAccessible::eCompletelyLoaded) &&
|
||||
mPresShell->RemoveRefreshObserver(this, FlushType::Display)) {
|
||||
|
|
|
@ -142,7 +142,7 @@ class NotificationController final : public EventQueue,
|
|||
MOZ_ASSERT(aTextNode->GetPrimaryFrame()->StyleVisibility()->IsVisible(),
|
||||
"A text node is not visible");
|
||||
|
||||
mTextHash.Insert(aTextNode);
|
||||
mTextArray.AppendElement(aTextNode);
|
||||
|
||||
ScheduleProcessing();
|
||||
}
|
||||
|
@ -340,8 +340,11 @@ class NotificationController final : public EventQueue,
|
|||
|
||||
/**
|
||||
* Pending accessible tree update notifications for rendered text changes.
|
||||
* When there are a lot of nearby text insertions (e.g. during a reflow), it
|
||||
* is much more performant to process them in order because we then benefit
|
||||
* from the layout line cursor. Therefore, we use an array here.
|
||||
*/
|
||||
nsTHashSet<nsCOMPtrHashKey<nsIContent>> mTextHash;
|
||||
nsTArray<nsCOMPtr<nsIContent>> mTextArray;
|
||||
|
||||
/**
|
||||
* Other notifications like DOM events. Don't make this an AutoTArray; we
|
||||
|
|
|
@ -359,7 +359,27 @@ void DocAccessible::QueueCacheUpdate(LocalAccessible* aAcc,
|
|||
if (!mIPCDoc) {
|
||||
return;
|
||||
}
|
||||
uint64_t& domain = mQueuedCacheUpdates.LookupOrInsert(aAcc, 0);
|
||||
// These strong references aren't necessary because WithEntryHandle is
|
||||
// guaranteed to run synchronously. However, static analysis complains without
|
||||
// them.
|
||||
RefPtr<DocAccessible> self = this;
|
||||
RefPtr<LocalAccessible> acc = aAcc;
|
||||
size_t arrayIndex =
|
||||
mQueuedCacheUpdatesHash.WithEntryHandle(aAcc, [self, acc](auto&& entry) {
|
||||
if (entry.HasEntry()) {
|
||||
// This LocalAccessible has already been queued. Return its index in
|
||||
// the queue array so we can update its queued domains.
|
||||
return entry.Data();
|
||||
}
|
||||
// Add this LocalAccessible to the queue array.
|
||||
size_t index = self->mQueuedCacheUpdatesArray.Length();
|
||||
self->mQueuedCacheUpdatesArray.EmplaceBack(std::make_pair(acc, 0));
|
||||
// Also add it to the hash map so we can avoid processing the same
|
||||
// LocalAccessible twice.
|
||||
return entry.Insert(index);
|
||||
});
|
||||
auto& [arrayAcc, domain] = mQueuedCacheUpdatesArray[arrayIndex];
|
||||
MOZ_ASSERT(arrayAcc == aAcc);
|
||||
domain |= aNewDomain;
|
||||
Controller()->ScheduleProcessing();
|
||||
}
|
||||
|
@ -476,10 +496,11 @@ void DocAccessible::Shutdown() {
|
|||
}
|
||||
|
||||
mChildDocuments.Clear();
|
||||
// mQueuedCacheUpdates can contain a reference to this document (ex. if the
|
||||
// mQueuedCacheUpdates* can contain a reference to this document (ex. if the
|
||||
// doc is scrollable and we're sending a scroll position update). Clear the
|
||||
// map here to avoid creating ref cycles.
|
||||
mQueuedCacheUpdates.Clear();
|
||||
mQueuedCacheUpdatesArray.Clear();
|
||||
mQueuedCacheUpdatesHash.Clear();
|
||||
|
||||
// XXX thinking about ordering?
|
||||
if (mIPCDoc) {
|
||||
|
@ -1463,9 +1484,7 @@ void DocAccessible::ProcessQueuedCacheUpdates() {
|
|||
// DO NOT ADD CODE ABOVE THIS BLOCK: THIS CODE IS MEASURING TIMINGS.
|
||||
|
||||
nsTArray<CacheData> data;
|
||||
for (auto iter = mQueuedCacheUpdates.Iter(); !iter.Done(); iter.Next()) {
|
||||
LocalAccessible* acc = iter.Key();
|
||||
uint64_t domain = iter.UserData();
|
||||
for (auto [acc, domain] : mQueuedCacheUpdatesArray) {
|
||||
if (acc && acc->IsInDocument() && !acc->IsDefunct()) {
|
||||
RefPtr<AccAttributes> fields =
|
||||
acc->BundleFieldsForCache(domain, CacheUpdateType::Update);
|
||||
|
@ -1478,7 +1497,8 @@ void DocAccessible::ProcessQueuedCacheUpdates() {
|
|||
}
|
||||
}
|
||||
|
||||
mQueuedCacheUpdates.Clear();
|
||||
mQueuedCacheUpdatesArray.Clear();
|
||||
mQueuedCacheUpdatesHash.Clear();
|
||||
|
||||
if (mViewportCacheDirty) {
|
||||
RefPtr<AccAttributes> fields =
|
||||
|
|
|
@ -110,7 +110,7 @@ class DocAccessible : public HyperTextAccessible,
|
|||
void DocType(nsAString& aType) const;
|
||||
|
||||
/**
|
||||
* Adds an entry to mQueuedCacheUpdates indicating aAcc requires
|
||||
* Adds an entry to queued cache updates indicating aAcc requires
|
||||
* a cache update on domain aNewDomain. If we've already queued an update
|
||||
* for aAcc, aNewDomain is or'd with the existing domain(s)
|
||||
* and the map is updated. Otherwise, the entry is simply inserted.
|
||||
|
@ -525,7 +525,7 @@ class DocAccessible : public HyperTextAccessible,
|
|||
|
||||
/**
|
||||
* Called from NotificationController to process this doc's
|
||||
* mQueuedCacheUpdates list. For each acc in the map, this function
|
||||
* queued cache updates. For each acc in the map, this function
|
||||
* sends a cache update with its corresponding CacheDomain.
|
||||
*/
|
||||
void ProcessQueuedCacheUpdates();
|
||||
|
@ -784,12 +784,21 @@ class DocAccessible : public HyperTextAccessible,
|
|||
// Exclusively owned by IPDL so don't manually delete it!
|
||||
DocAccessibleChild* mIPCDoc;
|
||||
|
||||
// A hash map between LocalAccessibles and CacheDomains, tracking
|
||||
// cache updates that have been queued during the current tick
|
||||
// but not yet sent. It is possible for this map to contain a reference
|
||||
// to the document it lives on. We clear the list in Shutdown() to
|
||||
// avoid cyclical references.
|
||||
nsTHashMap<RefPtr<LocalAccessible>, uint64_t> mQueuedCacheUpdates;
|
||||
// These data structures map between LocalAccessibles and CacheDomains,
|
||||
// tracking cache updates that have been queued during the current tick but
|
||||
// not yet sent. If there are a lot of nearby text cache updates (e.g. during
|
||||
// a reflow), it is much more performant to process them in order because we
|
||||
// then benefit from the layout line cursor. However, we still only want to
|
||||
// process each LocalAccessible only once. Therefore, we use an array for
|
||||
// ordering and a hash map to avoid duplicates, since Gecko has no ordered
|
||||
// set data structure. The array contains pairs of LocalAccessible and cache
|
||||
// domain. The hash map maps from LocalAccessible to the corresponding index
|
||||
// in the array. These data structures must be kept in sync. It is possible
|
||||
// for these to contain a reference to the document they live on. We clear
|
||||
// them in Shutdown() to avoid cyclical references.
|
||||
nsTArray<std::pair<RefPtr<LocalAccessible>, uint64_t>>
|
||||
mQueuedCacheUpdatesArray;
|
||||
nsTHashMap<LocalAccessible*, size_t> mQueuedCacheUpdatesHash;
|
||||
|
||||
// A set of Accessibles moved during this tick. Only used in content
|
||||
// processes.
|
||||
|
|
|
@ -27,6 +27,12 @@ class AnalysisExplainer extends MozLitElement {
|
|||
productUrl: { type: String, reflect: true },
|
||||
};
|
||||
|
||||
static get queries() {
|
||||
return {
|
||||
reviewQualityExplainerLink: "#review-quality-url",
|
||||
};
|
||||
}
|
||||
|
||||
getGradesDescriptionTemplate() {
|
||||
return html`
|
||||
<section id="analysis-explainer-grades-wrapper">
|
||||
|
@ -134,6 +140,7 @@ class AnalysisExplainer extends MozLitElement {
|
|||
@click=${this.handleReviewQualityUrlClicked}
|
||||
>
|
||||
<a
|
||||
id="review-quality-url"
|
||||
data-l10n-name="review-quality-url"
|
||||
target="_blank"
|
||||
href="${window.RPMGetFormatURLPref(
|
||||
|
|
|
@ -5,11 +5,11 @@
|
|||
@import url("chrome://global/skin/in-content/common.css");
|
||||
|
||||
:host {
|
||||
--background-term-a: #1BB861;
|
||||
--background-term-b: #0D70DF;
|
||||
--background-term-c: #FFB811;
|
||||
--background-term-d: #F87919;
|
||||
--background-term-f: #D51235;
|
||||
--background-term-a: #B3FFE3;
|
||||
--background-term-b: #80EBFF;
|
||||
--background-term-c: #FFEA80;
|
||||
--background-term-d: #FFB587;
|
||||
--background-term-f: #FF848B;
|
||||
--in-content-box-border-color: rgba(0, 0, 0, 0.15);
|
||||
--inner-border: 1px solid var(--in-content-box-border-color);
|
||||
--letter-grade-width: 1.5rem;
|
||||
|
@ -68,6 +68,7 @@
|
|||
box-sizing: border-box;
|
||||
display: flex;
|
||||
font-size: 0.87rem;
|
||||
font-weight: var(--font-weight-default);
|
||||
margin: 0;
|
||||
padding: 0.125rem 0.5rem;
|
||||
}
|
||||
|
@ -79,17 +80,15 @@
|
|||
}
|
||||
|
||||
:host([letter="A"]) #letter-grade-description {
|
||||
background-color: color-mix(in srgb, var(--background-term-a) 15%, #FFF);
|
||||
background-color: rgba(231, 255, 246, 1);
|
||||
}
|
||||
|
||||
:host([letter="B"]) #letter-grade-term {
|
||||
background-color: var(--background-term-b);
|
||||
/* Override dark font */
|
||||
color: #FFF;
|
||||
}
|
||||
|
||||
:host([letter="B"]) #letter-grade-description {
|
||||
background-color: color-mix(in srgb, var(--background-term-b) 15%, #FFF);
|
||||
background-color: rgba(222, 250, 255, 1);
|
||||
}
|
||||
|
||||
:host([letter="C"]) #letter-grade-term {
|
||||
|
@ -97,7 +96,7 @@
|
|||
}
|
||||
|
||||
:host([letter="C"]) #letter-grade-description {
|
||||
background-color: color-mix(in srgb, var(--background-term-c) 15%, #FFF);
|
||||
background-color: rgba(255, 249, 218, 1);
|
||||
}
|
||||
|
||||
:host([letter="D"]) #letter-grade-term {
|
||||
|
@ -105,17 +104,15 @@
|
|||
}
|
||||
|
||||
:host([letter="D"]) #letter-grade-description {
|
||||
background-color: color-mix(in srgb, var(--background-term-d) 15%, #FFF);
|
||||
background-color: rgba(252, 230, 213, 1);
|
||||
}
|
||||
|
||||
:host([letter="F"]) #letter-grade-term {
|
||||
background-color: var(--background-term-f);
|
||||
/* Override dark font */
|
||||
color: #FFF;
|
||||
}
|
||||
|
||||
:host([letter="F"]) #letter-grade-description {
|
||||
background-color: color-mix(in srgb, var(--background-term-f) 15%, #FFF);
|
||||
background-color: rgba(255, 228, 230, 1);
|
||||
}
|
||||
|
||||
@media (prefers-contrast) {
|
||||
|
|
|
@ -264,6 +264,57 @@ add_task(async function test_close_telemetry_recorded() {
|
|||
await SpecialPowers.popPrefEnv();
|
||||
});
|
||||
|
||||
add_task(async function test_powered_by_fakespot_link() {
|
||||
await Services.fog.testFlushAllChildren();
|
||||
Services.fog.testResetFOG();
|
||||
|
||||
await BrowserTestUtils.withNewTab(
|
||||
{
|
||||
url: "about:shoppingsidebar",
|
||||
gBrowser,
|
||||
},
|
||||
async browser => {
|
||||
await clickPoweredByFakespotLink(browser, MOCK_ANALYZED_PRODUCT_RESPONSE);
|
||||
}
|
||||
);
|
||||
|
||||
await Services.fog.testFlushAllChildren();
|
||||
|
||||
let fakespotLinkEvents =
|
||||
Glean.shopping.surfacePoweredByFakespotLinkClicked.testGetValue();
|
||||
assertEventMatches(fakespotLinkEvents[0], {
|
||||
category: "shopping",
|
||||
name: "surface_powered_by_fakespot_link_clicked",
|
||||
});
|
||||
});
|
||||
|
||||
add_task(async function test_review_quality_explainer_link() {
|
||||
await Services.fog.testFlushAllChildren();
|
||||
Services.fog.testResetFOG();
|
||||
|
||||
await BrowserTestUtils.withNewTab(
|
||||
{
|
||||
url: "about:shoppingsidebar",
|
||||
gBrowser,
|
||||
},
|
||||
async browser => {
|
||||
await clickReviewQualityExplainerLink(
|
||||
browser,
|
||||
MOCK_ANALYZED_PRODUCT_RESPONSE
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
await Services.fog.testFlushAllChildren();
|
||||
|
||||
let qualityExplainerEvents =
|
||||
Glean.shopping.surfaceShowQualityExplainerUrlClicked.testGetValue();
|
||||
assertEventMatches(qualityExplainerEvents[0], {
|
||||
category: "shopping",
|
||||
name: "surface_show_quality_explainer_url_clicked",
|
||||
});
|
||||
});
|
||||
|
||||
function clickReAnalyzeLink(browser, data) {
|
||||
return SpecialPowers.spawn(browser, [data], async mockData => {
|
||||
let shoppingContainer =
|
||||
|
@ -372,3 +423,41 @@ function clickCheckReviewQualityButton(browser, data) {
|
|||
button.click();
|
||||
});
|
||||
}
|
||||
|
||||
function clickPoweredByFakespotLink(browser, data) {
|
||||
return SpecialPowers.spawn(browser, [data], async mockData => {
|
||||
let shoppingContainer =
|
||||
content.document.querySelector("shopping-container").wrappedJSObject;
|
||||
shoppingContainer.data = Cu.cloneInto(mockData, content);
|
||||
await shoppingContainer.updateComplete;
|
||||
|
||||
let settingsEl = shoppingContainer.settingsEl;
|
||||
await settingsEl.updateComplete;
|
||||
let fakespotLink = settingsEl.fakespotLearnMoreLinkEl;
|
||||
|
||||
// Prevent link navigation for test.
|
||||
fakespotLink.href = undefined;
|
||||
await fakespotLink.updateComplete;
|
||||
|
||||
fakespotLink.click();
|
||||
});
|
||||
}
|
||||
|
||||
function clickReviewQualityExplainerLink(browser, data) {
|
||||
return SpecialPowers.spawn(browser, [data], async mockData => {
|
||||
let shoppingContainer =
|
||||
content.document.querySelector("shopping-container").wrappedJSObject;
|
||||
shoppingContainer.data = Cu.cloneInto(mockData, content);
|
||||
await shoppingContainer.updateComplete;
|
||||
|
||||
let analysisExplainerEl = shoppingContainer.analysisExplainerEl;
|
||||
await analysisExplainerEl.updateComplete;
|
||||
let reviewQualityLink = analysisExplainerEl.reviewQualityExplainerLink;
|
||||
|
||||
// Prevent link navigation for test.
|
||||
reviewQualityLink.href = undefined;
|
||||
await reviewQualityLink.updateComplete;
|
||||
|
||||
reviewQualityLink.click();
|
||||
});
|
||||
}
|
||||
|
|
|
@ -339,7 +339,7 @@
|
|||
"win64-aarch64-devedition",
|
||||
"win64-devedition"
|
||||
],
|
||||
"revision": "d9b19ce5f66d0775eb73972cb703c4d5bdcba12a"
|
||||
"revision": "6d6e43ed0b027727988ee45f5a97ec8455ac72b0"
|
||||
},
|
||||
"da": {
|
||||
"pin": false,
|
||||
|
@ -357,7 +357,7 @@
|
|||
"win64-aarch64-devedition",
|
||||
"win64-devedition"
|
||||
],
|
||||
"revision": "e7fa863121ded5e9fd91f0077e2f9d7dbb850dd5"
|
||||
"revision": "f7ec6aa04b2be76a70e87d7f72ee31561600e1a9"
|
||||
},
|
||||
"de": {
|
||||
"pin": false,
|
||||
|
@ -501,7 +501,7 @@
|
|||
"win64-aarch64-devedition",
|
||||
"win64-devedition"
|
||||
],
|
||||
"revision": "9451a8023fd17b61c9c39bbc615a1e0b2dc80fd6"
|
||||
"revision": "ff2b76625250103365832ad64ab97254ccef8ab4"
|
||||
},
|
||||
"es-ES": {
|
||||
"pin": false,
|
||||
|
@ -645,7 +645,7 @@
|
|||
"win64-aarch64-devedition",
|
||||
"win64-devedition"
|
||||
],
|
||||
"revision": "102c4c846a8db758524221cd8ed7def072cb860b"
|
||||
"revision": "73dba2a5ab0ccc3379f1c33871ea18623e92ef7e"
|
||||
},
|
||||
"fur": {
|
||||
"pin": false,
|
||||
|
@ -753,7 +753,7 @@
|
|||
"win64-aarch64-devedition",
|
||||
"win64-devedition"
|
||||
],
|
||||
"revision": "2dd1ab054b0d61da2b63e1ff90434471633abf23"
|
||||
"revision": "7cb2575a39cb2e44c1bcc1b9abf4c8227f97ea13"
|
||||
},
|
||||
"gu-IN": {
|
||||
"pin": false,
|
||||
|
@ -951,7 +951,7 @@
|
|||
"win64-aarch64-devedition",
|
||||
"win64-devedition"
|
||||
],
|
||||
"revision": "d6f1d7da4a796f1f95d788f6701100d6f93bc2ef"
|
||||
"revision": "00c51ae66e84149e4bb0cc6e9f1f7ba5887bb245"
|
||||
},
|
||||
"it": {
|
||||
"pin": false,
|
||||
|
@ -1011,7 +1011,7 @@
|
|||
"win64-aarch64-devedition",
|
||||
"win64-devedition"
|
||||
],
|
||||
"revision": "de68f2631fcc1bb0f58949b21eaf20d04ada876d"
|
||||
"revision": "f18521f4d8b4bc986026ea37ba3404f845f9c052"
|
||||
},
|
||||
"kab": {
|
||||
"pin": false,
|
||||
|
@ -1191,7 +1191,7 @@
|
|||
"win64-aarch64-devedition",
|
||||
"win64-devedition"
|
||||
],
|
||||
"revision": "753d5f733f7f9b68cc435dc969ef4275677f6a83"
|
||||
"revision": "5909e52cd13ec891fc10945831c8ddbfc457d620"
|
||||
},
|
||||
"meh": {
|
||||
"pin": false,
|
||||
|
@ -1587,7 +1587,7 @@
|
|||
"win64-aarch64-devedition",
|
||||
"win64-devedition"
|
||||
],
|
||||
"revision": "7c59f793a45add29468cd15c3189431e13365f98"
|
||||
"revision": "9c48d4b58333645f875f8e965a7f42984d27ced1"
|
||||
},
|
||||
"sk": {
|
||||
"pin": false,
|
||||
|
@ -1983,7 +1983,7 @@
|
|||
"win64-aarch64-devedition",
|
||||
"win64-devedition"
|
||||
],
|
||||
"revision": "2b8e43d48161b3dbf1bd7964ed409f61392cd6b7"
|
||||
"revision": "fdc961a0a739b33fb8a0d44628ccb47d13e2a48c"
|
||||
},
|
||||
"zh-TW": {
|
||||
"pin": false,
|
||||
|
|
|
@ -44,7 +44,7 @@ include backend.mk
|
|||
|
||||
# Add e.g. `export:: $(EXPORT_TARGETS)` rules. The *_TARGETS variables are defined
|
||||
# in backend.mk.
|
||||
$(foreach tier,$(RUNNABLE_TIERS),$(eval $(tier):: $($(call varize,$(tier))_TARGETS)))
|
||||
$(foreach tier,$(RUNNABLE_TIERS),$(eval $(if $(filter .,$(DEPTH)),recurse_$(tier):,$(tier)::) $($(call varize,$(tier))_TARGETS)))
|
||||
endif
|
||||
|
||||
endif
|
||||
|
|
|
@ -432,6 +432,33 @@ $(1):
|
|||
|
||||
endef
|
||||
|
||||
# make_cargo_rule(target, real-target [, extra-deps])
|
||||
# Generates a rule suitable to rebuild $(target) only if its dependencies are
|
||||
# obsolete.
|
||||
# It relies on the fact that upon build, cargo generates a dependency file named
|
||||
# `$(target).d'. Unfortunately the lhs of the rule has an absolute path,
|
||||
# so we extract it under the name $(target)_deps below.
|
||||
#
|
||||
# If the dependencies are empty, the file was not created so we force a rebuild.
|
||||
# Otherwise we add it to the dependency list.
|
||||
#
|
||||
# The actual rule is a bit tricky. The `+' prefix allow for recursive parallel
|
||||
# make, and it's skipped (`:') if we already triggered a rebuild as part of the
|
||||
# dependency chain.
|
||||
#
|
||||
# Another tricky thing: some dependencies may contain escaped spaces, and they
|
||||
# need to be preserved, but $(foreach) splits on spaces, so we replace escaped
|
||||
# spaces with some unlikely string for the foreach, and replace them back in the
|
||||
# loop itself.
|
||||
define make_cargo_rule
|
||||
$(notdir $(1))_deps := $$(wordlist 2, 10000000, $$(if $$(wildcard $(basename $(1)).d),$$(shell cat $(basename $(1)).d)))
|
||||
$(1): $(CARGO_FILE) $(3) $$(if $$($(notdir $(1))_deps),$$($(notdir $(1))_deps),$(2))
|
||||
$$(REPORT_BUILD)
|
||||
$$(if $$($(notdir $(1))_deps),+$(MAKE) $(2),:)
|
||||
|
||||
$$(foreach dep, $$(call normalize_sep,$$(subst \ ,_^_^_^_,$$($(notdir $(1))_deps))),$$(eval $$(call make_default_rule,$$(subst _^_^_^_,\ ,$$(dep)))))
|
||||
endef
|
||||
|
||||
ifdef RUST_LIBRARY_FILE
|
||||
|
||||
rust_features_flag := --features '$(if $(RUST_LIBRARY_FEATURES),$(RUST_LIBRARY_FEATURES) )mozilla-central-workspace-hack'
|
||||
|
@ -449,15 +476,9 @@ endif
|
|||
# has full visibility into how changes in Rust sources might affect the final
|
||||
# build.
|
||||
force-cargo-library-build:
|
||||
$(REPORT_BUILD)
|
||||
$(call BUILDSTATUS,START_Rust $(notdir $(RUST_LIBRARY_FILE)))
|
||||
$(call CARGO_BUILD) --lib $(cargo_target_flag) $(rust_features_flag) -- $(cargo_rustc_flags)
|
||||
$(call BUILDSTATUS,END_Rust $(notdir $(RUST_LIBRARY_FILE)))
|
||||
|
||||
RUST_LIBRARY_DEP_FILE := $(basename $(RUST_LIBRARY_FILE)).d
|
||||
RUST_LIBRARY_DEPS := $(wordlist 2, 10000000, $(if $(wildcard $(RUST_LIBRARY_DEP_FILE)),$(shell cat $(RUST_LIBRARY_DEP_FILE))))
|
||||
$(RUST_LIBRARY_FILE): $(CARGO_FILE) $(if $(RUST_LIBRARY_DEPS),$(RUST_LIBRARY_DEPS), force-cargo-library-build)
|
||||
$(if $(RUST_LIBRARY_DEPS),+$(MAKE) force-cargo-library-build,:)
|
||||
# When we are building in --enable-release mode; we add an additional check to confirm
|
||||
# that we are not importing any networking-related functions in rust code. This reduces
|
||||
# the chance of proxy bypasses originating from rust code.
|
||||
|
@ -468,15 +489,14 @@ ifeq ($(OS_ARCH), Linux)
|
|||
ifeq (,$(rustflags_sancov)$(MOZ_ASAN)$(MOZ_TSAN)$(MOZ_UBSAN))
|
||||
ifndef MOZ_LTO_RUST_CROSS
|
||||
ifneq (,$(filter -Clto,$(cargo_rustc_flags)))
|
||||
$(call py_action,check_binary $(@F),--networking $@)
|
||||
$(call py_action,check_binary $(@F),--networking $(RUST_LIBRARY_FILE))
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
|
||||
$(foreach dep, $(call normalize_sep,$(RUST_LIBRARY_DEPS)),$(eval $(call make_default_rule,$(dep))))
|
||||
|
||||
$(eval $(call make_cargo_rule,$(RUST_LIBRARY_FILE),force-cargo-library-build))
|
||||
|
||||
SUGGEST_INSTALL_ON_FAILURE = (ret=$$?; if [ $$ret = 101 ]; then echo If $1 is not installed, install it using: cargo install $1; fi; exit $$ret)
|
||||
|
||||
|
@ -513,12 +533,11 @@ ifdef HOST_RUST_LIBRARY_FILE
|
|||
host_rust_features_flag := --features '$(if $(HOST_RUST_LIBRARY_FEATURES),$(HOST_RUST_LIBRARY_FEATURES) )mozilla-central-workspace-hack'
|
||||
|
||||
force-cargo-host-library-build:
|
||||
$(REPORT_BUILD)
|
||||
$(call BUILDSTATUS,START_Rust $(notdir $(HOST_RUST_LIBRARY_FILE)))
|
||||
$(call CARGO_BUILD) --lib $(cargo_host_flag) $(host_rust_features_flag)
|
||||
$(call BUILDSTATUS,END_Rust $(notdir $(HOST_RUST_LIBRARY_FILE)))
|
||||
|
||||
$(HOST_RUST_LIBRARY_FILE): force-cargo-host-library-build ;
|
||||
$(eval $(call make_cargo_rule,$(HOST_RUST_LIBRARY_FILE),force-cargo-host-library-build))
|
||||
|
||||
ifndef CARGO_NO_AUTO_ARG
|
||||
force-cargo-host-library-%:
|
||||
|
@ -538,33 +557,11 @@ ifdef RUST_PROGRAMS
|
|||
program_features_flag := --features mozilla-central-workspace-hack
|
||||
|
||||
force-cargo-program-build: $(call resfile,module)
|
||||
$(REPORT_BUILD)
|
||||
$(call BUILDSTATUS,START_Rust $(RUST_CARGO_PROGRAMS))
|
||||
$(call CARGO_BUILD) $(addprefix --bin ,$(RUST_CARGO_PROGRAMS)) $(cargo_target_flag) $(program_features_flag) -- $(addprefix -C link-arg=$(CURDIR)/,$(call resfile,module)) $(CARGO_RUSTCFLAGS)
|
||||
$(call BUILDSTATUS,END_Rust $(RUST_CARGO_PROGRAMS))
|
||||
|
||||
# RUST_PROGRAM_DEPENDENCIES(RUST_PROGRAM)
|
||||
# Generates a rule suitable to rebuild RUST_PROGRAM only if its dependencies are
|
||||
# obsolete.
|
||||
# It relies on the fact that upon build, cargo generates a dependency file named
|
||||
# `$(RUST_PROGRAM).d'. Unfortunately the lhs of the rule has an absolute path,
|
||||
# so we extract it under the name $(RUST_PROGRAM)_deps below.
|
||||
#
|
||||
# If the dependencies are empty, the file was not created so we force a rebuild.
|
||||
# Otherwise we add it to the dependency list.
|
||||
#
|
||||
# The actual rule is a bit tricky. The `+' prefix allow for recursive parallel
|
||||
# make, and it's skipped (`:') if we already triggered a rebuild as part of the
|
||||
# dependency chain.
|
||||
#
|
||||
define RUST_PROGRAM_DEPENDENCIES
|
||||
$(1)_deps := $(wordlist 2, 10000000, $(if $(wildcard $(1).d),$(shell cat $(1).d)))
|
||||
$(1): $(CARGO_FILE) $(call resfile,module) $$(if $$($(1)_deps),$$($(1)_deps),force-cargo-program-build)
|
||||
$$(if $$($(1)_deps),+$(MAKE) force-cargo-program-build,:)
|
||||
$$(foreach dep,$$(call normalize_sep, %.h,$$($(1)_deps)),$$(eval $$(call make_default_rule,$$(dep))))
|
||||
endef
|
||||
|
||||
$(foreach RUST_PROGRAM,$(RUST_PROGRAMS), $(eval $(call RUST_PROGRAM_DEPENDENCIES,$(RUST_PROGRAM))))
|
||||
$(foreach RUST_PROGRAM,$(RUST_PROGRAMS), $(eval $(call make_cargo_rule,$(RUST_PROGRAM),force-cargo-program-build,$(call resfile,module))))
|
||||
|
||||
ifndef CARGO_NO_AUTO_ARG
|
||||
force-cargo-program-%:
|
||||
|
@ -583,16 +580,14 @@ ifdef HOST_RUST_PROGRAMS
|
|||
host_program_features_flag := --features mozilla-central-workspace-hack
|
||||
|
||||
force-cargo-host-program-build:
|
||||
$(REPORT_BUILD)
|
||||
$(call BUILDSTATUS,START_Rust $(HOST_RUST_CARGO_PROGRAMS))
|
||||
$(call CARGO_BUILD) $(addprefix --bin ,$(HOST_RUST_CARGO_PROGRAMS)) $(cargo_host_flag) $(host_program_features_flag)
|
||||
$(call BUILDSTATUS,END_Rust $(HOST_RUST_CARGO_PROGRAMS))
|
||||
|
||||
$(HOST_RUST_PROGRAMS): force-cargo-host-program-build ;
|
||||
$(foreach HOST_RUST_PROGRAM,$(HOST_RUST_PROGRAMS), $(eval $(call make_cargo_rule,$(HOST_RUST_PROGRAM),force-cargo-host-program-build)))
|
||||
|
||||
ifndef CARGO_NO_AUTO_ARG
|
||||
force-cargo-host-program-%:
|
||||
$(REPORT_BUILD)
|
||||
$(call BUILDSTATUS,START_Rust $(HOST_RUST_CARGO_PROGRAMS))
|
||||
$(call RUN_CARGO,$*) $(addprefix --bin ,$(HOST_RUST_CARGO_PROGRAMS)) $(cargo_host_flag) $(host_program_features_flag)
|
||||
$(call BUILDSTATUS,END_Rust $(HOST_RUST_CARGO_PROGRAMS))
|
||||
|
|
|
@ -159,7 +159,7 @@ ifeq (.,$(DEPTH))
|
|||
# This is required so that the pre-export tier sees the rules in
|
||||
# mobile/android
|
||||
ifeq ($(MOZ_WIDGET_TOOLKIT),android)
|
||||
recurse_pre-export:: mobile/android/pre-export
|
||||
recurse_pre-export: mobile/android/pre-export
|
||||
endif
|
||||
|
||||
# CSS2Properties.webidl needs ServoCSSPropList.py from layout/style
|
||||
|
@ -168,10 +168,6 @@ dom/bindings/export: layout/style/ServoCSSPropList.py
|
|||
# Various telemetry histogram files need ServoCSSPropList.py from layout/style
|
||||
toolkit/components/telemetry/export: layout/style/ServoCSSPropList.py
|
||||
|
||||
# The update agent needs to link to the updatecommon library, but the build system does not
|
||||
# currently have a good way of expressing this dependency.
|
||||
toolkit/components/updateagent/target: toolkit/mozapps/update/common/target
|
||||
|
||||
ifeq ($(TARGET_ENDIANNESS),big)
|
||||
config/external/icu/data/target-objects: config/external/icu/data/$(MDDEPDIR)/icudt$(MOZ_ICU_VERSION)b.dat.stub
|
||||
config/external/icu/data/$(MDDEPDIR)/icudt$(MOZ_ICU_VERSION)b.dat.stub: config/external/icu/icupkg/host
|
||||
|
@ -196,27 +192,7 @@ endif
|
|||
|
||||
# Interdependencies that moz.build world don't know about yet for compilation.
|
||||
# Note some others are hardcoded or "guessed" in recursivemake.py and emitter.py
|
||||
ifndef MOZ_FOLD_LIBS
|
||||
ifndef MOZ_SYSTEM_NSS
|
||||
netwerk/test/http3server/target: security/nss/lib/nss/nss_nss3/target security/nss/lib/ssl/ssl_ssl3/target
|
||||
endif
|
||||
ifndef MOZ_SYSTEM_NSPR
|
||||
netwerk/test/http3server/target: config/external/nspr/pr/target
|
||||
endif
|
||||
else
|
||||
ifndef MOZ_SYSTEM_NSS
|
||||
netwerk/test/http3server/target: security/target
|
||||
endif
|
||||
endif
|
||||
|
||||
ifdef RELRHACK
|
||||
# When building with RELR-based ELF hack, we need to build the relevant parts
|
||||
# before any target.
|
||||
$(filter %/target,$(compile_targets)): build/unix/elfhack/host build/unix/elfhack/inject/target-objects
|
||||
endif
|
||||
|
||||
ifdef MOZ_USING_WASM_SANDBOXING
|
||||
security/rlbox/pre-compile media/libsoundtouch/src/pre-compile: config/external/wasm2c_sandbox_compiler/host
|
||||
dom/media/ogg/target-objects extensions/spellcheck/hunspell/glue/target-objects gfx/thebes/target-objects parser/expat/target-objects parser/htmlparser/target-objects gfx/ots/src/target-objects: security/rlbox/pre-compile
|
||||
dom/media/target-objects dom/media/mediasink/target-objects: media/libsoundtouch/src/pre-compile
|
||||
endif
|
||||
|
@ -234,15 +210,7 @@ $(addprefix build/unix/stdc++compat/,target host) build/clang-plugin/host: confi
|
|||
# that run cbindgen, tricking Make into keeping them early.
|
||||
$(rust_targets): $(DEPTH)/.cargo/config
|
||||
ifndef TEST_MOZBUILD
|
||||
pre-export:: $(DEPTH)/.cargo/config
|
||||
recurse_pre-export: $(DEPTH)/.cargo/config
|
||||
endif
|
||||
|
||||
# When building gtest as part of the build (LINK_GTEST_DURING_COMPILE),
|
||||
# force the build system to get to it first, so that it can be linked
|
||||
# quickly without LTO, allowing the build system to go ahead with
|
||||
# plain gkrust and libxul while libxul-gtest is being linked and
|
||||
# dump-sym'ed.
|
||||
ifneq (,$(filter toolkit/library/gtest/rust/target-objects,$(compile_targets)))
|
||||
toolkit/library/rust/target-objects: toolkit/library/gtest/rust/target-objects
|
||||
endif
|
||||
endif
|
||||
|
|
|
@ -24,7 +24,7 @@ USE_AUTOTARGETS_MK = 1
|
|||
include $(MOZILLA_DIR)/config/makefiles/makeutils.mk
|
||||
|
||||
ifdef REBUILD_CHECK
|
||||
REPORT_BUILD = $(info $(shell $(PYTHON3) $(MOZILLA_DIR)/config/rebuild_check.py $@ $^))
|
||||
REPORT_BUILD = $(info $(shell $(PYTHON3) $(MOZILLA_DIR)/config/rebuild_check.py $@ $?))
|
||||
REPORT_BUILD_VERBOSE = $(REPORT_BUILD)
|
||||
else
|
||||
REPORT_BUILD = $(info $(relativesrcdir)/$(notdir $@))
|
||||
|
@ -637,7 +637,6 @@ $(ASOBJS):
|
|||
endif
|
||||
|
||||
define syms_template
|
||||
syms:: $(2)
|
||||
$(2): $(1)
|
||||
ifdef MOZ_CRASHREPORTER
|
||||
$$(call py_action,dumpsymbols $$@,$$(abspath $$<) $$(abspath $$@) $$(DUMP_SYMBOLS_FLAGS))
|
||||
|
@ -674,6 +673,7 @@ endif
|
|||
|
||||
else ifdef MOZ_CRASHREPORTER
|
||||
$(foreach file,$(DUMP_SYMS_TARGETS),$(eval $(call syms_template,$(file),$(notdir $(file))_syms.track)))
|
||||
syms:: $(foreach file,$(DUMP_SYMS_TARGETS),$(notdir $(file))_syms.track)
|
||||
endif
|
||||
|
||||
ifneq (,$(RUST_TESTS)$(RUST_LIBRARY_FILE)$(HOST_RUST_LIBRARY_FILE)$(RUST_PROGRAMS)$(HOST_RUST_PROGRAMS))
|
||||
|
@ -1000,7 +1000,7 @@ $(foreach category,$(INSTALL_TARGETS),\
|
|||
)
|
||||
|
||||
$(foreach tier,$(INSTALL_TARGETS_TIERS), \
|
||||
$(eval $(tier):: $(INSTALL_TARGETS_FILES_$(tier)) $(INSTALL_TARGETS_EXECUTABLES_$(tier))) \
|
||||
$(eval $(if $(filter .,$(DEPTH)),recurse_$(tier):,$(tier)::) $(INSTALL_TARGETS_FILES_$(tier)) $(INSTALL_TARGETS_EXECUTABLES_$(tier))) \
|
||||
)
|
||||
|
||||
install_targets_sanity = $(if $(filter-out $(notdir $@),$(notdir $(<))),$(error Looks like $@ has an unexpected dependency on $< which breaks INSTALL_TARGETS))
|
||||
|
@ -1070,7 +1070,7 @@ $(foreach category,$(PP_TARGETS), \
|
|||
)
|
||||
|
||||
$(foreach tier,$(PP_TARGETS_TIERS), \
|
||||
$(eval $(tier):: $(PP_TARGETS_RESULTS_$(tier))) \
|
||||
$(eval $(if $(filter .,$(DEPTH)),recurse_$(tier):,$(tier)::) $(PP_TARGETS_RESULTS_$(tier))) \
|
||||
)
|
||||
|
||||
PP_TARGETS_ALL_RESULTS := $(sort $(foreach tier,$(PP_TARGETS_TIERS),$(PP_TARGETS_RESULTS_$(tier))))
|
||||
|
|
|
@ -60,9 +60,11 @@ export default class Exception extends PureComponent {
|
|||
}
|
||||
|
||||
const location = createLocation({
|
||||
column: columnNumber - 1,
|
||||
line: lineNumber,
|
||||
source: selectedSource,
|
||||
line: lineNumber,
|
||||
// Exceptions are reported with column being 1-based
|
||||
// while the frontend uses 0-based column.
|
||||
column: columnNumber - 1,
|
||||
});
|
||||
|
||||
const { line, column } = toEditorPosition(location);
|
||||
|
|
|
@ -109,7 +109,8 @@ class Breakpoint extends PureComponent {
|
|||
const { column, line } = this.selectedLocation;
|
||||
|
||||
const isWasm = source?.isWasm;
|
||||
const columnVal = column ? `:${column}` : "";
|
||||
// column is 0-based everywhere, but we want to display 1-based to the user.
|
||||
const columnVal = column ? `:${column + 1}` : "";
|
||||
const bpLocation = isWasm
|
||||
? `0x${line.toString(16).toUpperCase()}`
|
||||
: `${line}${columnVal}`;
|
||||
|
|
|
@ -309,7 +309,7 @@ describe("QuickOpenModal", () => {
|
|||
};
|
||||
wrapper.find("Connect(SearchInput)").simulate("keydown", event);
|
||||
expect(props.selectSpecificLocation).toHaveBeenCalledWith({
|
||||
column: 12,
|
||||
column: 11,
|
||||
line: 34,
|
||||
source: {
|
||||
id: "foo",
|
||||
|
@ -336,7 +336,7 @@ describe("QuickOpenModal", () => {
|
|||
};
|
||||
wrapper.find("Connect(SearchInput)").simulate("keydown", event);
|
||||
expect(props.selectSpecificLocation).toHaveBeenCalledWith({
|
||||
column: 12,
|
||||
column: 11,
|
||||
line: 34,
|
||||
source: {
|
||||
id: sourceId,
|
||||
|
@ -507,7 +507,7 @@ describe("QuickOpenModal", () => {
|
|||
};
|
||||
wrapper.find("Connect(SearchInput)").simulate("keydown", event);
|
||||
expect(props.selectSpecificLocation).toHaveBeenCalledWith({
|
||||
column: 4,
|
||||
column: 3,
|
||||
line: 3,
|
||||
source: { id },
|
||||
sourceActorId: undefined,
|
||||
|
|
|
@ -67,18 +67,13 @@ export function fromEditorLine(sourceId, line, sourceIsWasm) {
|
|||
}
|
||||
|
||||
export function toEditorPosition(location) {
|
||||
// Note that Spidermonkey, Debugger frontend and CodeMirror are all consistant regarding column
|
||||
// and are 0-based. But only CodeMirror consider the line to be 0-based while the two others
|
||||
// consider lines to be 1-based.
|
||||
return {
|
||||
line: toEditorLine(location.source.id, location.line),
|
||||
column:
|
||||
isWasm(location.source.id) || !location.column ? 0 : location.column,
|
||||
};
|
||||
}
|
||||
|
||||
export function toEditorRange(sourceId, location) {
|
||||
const { start, end } = location;
|
||||
return {
|
||||
start: toEditorPosition({ ...start, sourceId }),
|
||||
end: toEditorPosition({ ...end, sourceId }),
|
||||
isWasm(location.source.id) || (!location.column ? 0 : location.column),
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
import {
|
||||
toEditorLine,
|
||||
toEditorPosition,
|
||||
toEditorRange,
|
||||
toSourceLine,
|
||||
scrollToPosition,
|
||||
markText,
|
||||
|
@ -38,20 +37,6 @@ describe("toEditorPosition", () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe("toEditorRange", () => {
|
||||
it("returns an editor range", () => {
|
||||
const testId = "test-123";
|
||||
const loc = {
|
||||
start: { source: { id: testId }, line: 100, column: 25 },
|
||||
end: { source: { id: testId }, line: 200, column: 0 },
|
||||
};
|
||||
expect(toEditorRange(testId, loc)).toEqual({
|
||||
start: { line: 99, column: 25 },
|
||||
end: { line: 199, column: 0 },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("toSourceLine", () => {
|
||||
it("returns a source line", () => {
|
||||
const testId = "test-123";
|
||||
|
|
|
@ -27,6 +27,10 @@ export function createLocation({
|
|||
sourceActor,
|
||||
sourceActorId: sourceActor?.id,
|
||||
|
||||
// `line` is 1-based while `column` is 0-based.
|
||||
// This data is mostly coming from and driven by
|
||||
// JSScript::lineno and JSScript::column
|
||||
// https://searchfox.org/mozilla-central/rev/d81e60336d9f498ad3985491dc17c2b77969ade4/js/src/vm/JSScript.h#1544-1547
|
||||
line,
|
||||
column,
|
||||
};
|
||||
|
|
|
@ -45,10 +45,14 @@ export function parseLineColumn(query) {
|
|||
if (isNaN(lineNumber)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (isNaN(columnNumber)) {
|
||||
return { line: lineNumber };
|
||||
}
|
||||
// columnNumber here is the user input value which is 1-based.
|
||||
// Whereas in location objects, line is 1-based, and column is 0-based.
|
||||
return {
|
||||
line: lineNumber,
|
||||
...(!isNaN(columnNumber) ? { column: columnNumber } : null),
|
||||
column: columnNumber - 1,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ cases(
|
|||
{
|
||||
name: "line and column",
|
||||
query: ":30:90",
|
||||
location: { column: 90, line: 30 },
|
||||
location: { column: 89, line: 30 },
|
||||
},
|
||||
]
|
||||
);
|
||||
|
|
|
@ -50,7 +50,7 @@ add_task(async function testBreakpointsListForMultipleTargets() {
|
|||
);
|
||||
is(
|
||||
breakpointItems[0].textContent,
|
||||
"func();5:17",
|
||||
"func();5:18",
|
||||
"The info displayed for the 1st breakpoint is correct"
|
||||
);
|
||||
|
||||
|
@ -66,7 +66,7 @@ add_task(async function testBreakpointsListForMultipleTargets() {
|
|||
);
|
||||
is(
|
||||
breakpointItems[1].textContent,
|
||||
"return x + y;3:4",
|
||||
"return x + y;3:5",
|
||||
"The info displayed for the 2nd breakpoint is correct"
|
||||
);
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ add_task(async function () {
|
|||
dbg,
|
||||
findSource(dbg, "pause-points.js").id,
|
||||
lineToContinueTo,
|
||||
4
|
||||
5
|
||||
);
|
||||
ok(true, "Debugger continued to the expected line");
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ add_task(async function () {
|
|||
dbg,
|
||||
findSource(dbg, "pause-points.js").id,
|
||||
31,
|
||||
4
|
||||
5
|
||||
);
|
||||
await resume(dbg);
|
||||
|
||||
|
@ -33,7 +33,7 @@ add_task(async function () {
|
|||
dbg,
|
||||
findSource(dbg, "pause-points.js").id,
|
||||
31,
|
||||
4
|
||||
5
|
||||
);
|
||||
await resume(dbg);
|
||||
});
|
||||
|
|
|
@ -12,7 +12,7 @@ add_task(async function () {
|
|||
dbg,
|
||||
"mapTestFunction",
|
||||
"router.js",
|
||||
{ line: 13, column: 2 },
|
||||
{ line: 13, column: 3 },
|
||||
async () => {
|
||||
await assertScopes(dbg, [
|
||||
"Module",
|
||||
|
|
|
@ -28,7 +28,7 @@ add_task(async function () {
|
|||
15,
|
||||
]);
|
||||
|
||||
await addBreakpoint(dbg, "doc-inline-script-offset.html", 15, 66);
|
||||
await addBreakpoint(dbg, "doc-inline-script-offset.html", 15, 67);
|
||||
|
||||
const onReloaded = reload(dbg);
|
||||
await waitForPaused(dbg);
|
||||
|
|
|
@ -95,7 +95,7 @@ function assertBreakpointsList(dbg, source) {
|
|||
);
|
||||
is(
|
||||
breakpointItems[0].textContent,
|
||||
"return x + y;3:4",
|
||||
"return x + y;3:5",
|
||||
"The info displayed for the 1st breakpoint is correct"
|
||||
);
|
||||
}
|
||||
|
|
|
@ -25,7 +25,7 @@ add_task(async function debuggerStatementOnUnload() {
|
|||
|
||||
await waitForPaused(dbg);
|
||||
await waitForInlinePreviews(dbg);
|
||||
assertPausedAtSourceAndLine(dbg, findSource(dbg, TEST_URL_1).id, 1, 55);
|
||||
assertPausedAtSourceAndLine(dbg, findSource(dbg, TEST_URL_1).id, 1, 56);
|
||||
|
||||
await evaluated;
|
||||
is(
|
||||
|
@ -66,7 +66,7 @@ add_task(async function exceptionsOnUnload() {
|
|||
|
||||
// Cover catching exception on unload
|
||||
await waitForPaused(dbg);
|
||||
assertPausedAtSourceAndLine(dbg, findSource(dbg, TEST_URL_2).id, 2, 48);
|
||||
assertPausedAtSourceAndLine(dbg, findSource(dbg, TEST_URL_2).id, 2, 49);
|
||||
|
||||
// But also that previous inline exceptions are still visible
|
||||
await assertInlineExceptionPreview(dbg, 3, 4, {
|
||||
|
|
|
@ -53,7 +53,7 @@ add_task(async function () {
|
|||
dbg,
|
||||
prettySource.id,
|
||||
LINE_INDEX_TO_BREAK_ON,
|
||||
15
|
||||
16
|
||||
);
|
||||
await resume(dbg);
|
||||
|
||||
|
@ -66,7 +66,7 @@ add_task(async function () {
|
|||
dbg,
|
||||
prettySource.id,
|
||||
LINE_INDEX_TO_BREAK_ON,
|
||||
18
|
||||
19
|
||||
);
|
||||
const assertScopesForSecondColumnBreakpoint = topBlockItems =>
|
||||
assertScopes(dbg, [
|
||||
|
@ -89,7 +89,7 @@ add_task(async function () {
|
|||
dbg,
|
||||
prettySource.id,
|
||||
LINE_INDEX_TO_BREAK_ON,
|
||||
18
|
||||
19
|
||||
);
|
||||
await assertScopesForSecondColumnBreakpoint([["i", "1"]]);
|
||||
await resume(dbg);
|
||||
|
@ -103,7 +103,7 @@ add_task(async function () {
|
|||
dbg,
|
||||
prettySource.id,
|
||||
LINE_INDEX_TO_BREAK_ON,
|
||||
18
|
||||
19
|
||||
);
|
||||
await assertScopesForSecondColumnBreakpoint([["i", "2"]]);
|
||||
await resume(dbg);
|
||||
|
|
|
@ -81,7 +81,7 @@ add_task(async function () {
|
|||
await selectSource(dbg, "pretty.js");
|
||||
|
||||
info("Add breakpoint to pretty.js (generated source)");
|
||||
await addBreakpoint(dbg, "pretty.js", 4, 7);
|
||||
await addBreakpoint(dbg, "pretty.js", 4, 8);
|
||||
|
||||
await prettyPrint(dbg);
|
||||
|
||||
|
@ -121,6 +121,6 @@ async function assertBreakpointsInNonPrettyAndPrettySources(dbg) {
|
|||
|
||||
info("Assert pause and display on the correct line in the minified source");
|
||||
const minifiedSource = findSource(dbg, "pretty.js");
|
||||
await assertPausedAtSourceAndLine(dbg, minifiedSource.id, 4, 7);
|
||||
await assertPausedAtSourceAndLine(dbg, minifiedSource.id, 4, 8);
|
||||
await assertBreakpoint(dbg, 4);
|
||||
}
|
||||
|
|
|
@ -16,12 +16,12 @@ add_task(async function () {
|
|||
"preview-getter.js"
|
||||
);
|
||||
|
||||
await loadAndAddBreakpoint(dbg, "preview-getter.js", 5, 4);
|
||||
await loadAndAddBreakpoint(dbg, "preview-getter.js", 5, 5);
|
||||
invokeInTab("funcA");
|
||||
await waitForPaused(dbg);
|
||||
|
||||
info("Hovers over 'this' token to display the preview.");
|
||||
const { tokenEl } = await tryHovering(dbg, 5, 8, "previewPopup");
|
||||
const { tokenEl } = await tryHovering(dbg, 5, 5, "previewPopup");
|
||||
|
||||
info("Wait for properties to be loaded");
|
||||
await waitUntil(
|
||||
|
|
|
@ -132,6 +132,9 @@ function assertColumn(dbg, columnNumber) {
|
|||
let value = dbg.selectors.getSelectedLocation().column;
|
||||
if (value === undefined) {
|
||||
value = null;
|
||||
} else {
|
||||
// column is 0-based, while we want to mention 1-based in the test.
|
||||
value++;
|
||||
}
|
||||
is(value, columnNumber, `goto column is ${columnNumber}`);
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@ add_task(async function () {
|
|||
dbg,
|
||||
"webpack3-babel6",
|
||||
"eval-maps",
|
||||
{ line: 14, column: 4 },
|
||||
{ line: 14, column: 5 },
|
||||
["one === 1", "two === 4", "three === 5"]
|
||||
);
|
||||
|
||||
|
@ -23,7 +23,7 @@ add_task(async function () {
|
|||
dbg,
|
||||
"webpack3-babel6",
|
||||
"esmodules-cjs",
|
||||
{ line: 18, column: 2 },
|
||||
{ line: 18, column: 3 },
|
||||
[
|
||||
`aDefault === "a-default"`,
|
||||
`anAliased === "an-original"`,
|
||||
|
@ -41,7 +41,7 @@ add_task(async function () {
|
|||
dbg,
|
||||
"webpack3-babel6",
|
||||
"shadowed-vars",
|
||||
{ line: 18, column: 6 },
|
||||
{ line: 18, column: 7 },
|
||||
[`aVar === "var3"`, `aLet === "let3"`, `aConst === "const3"`]
|
||||
);
|
||||
|
||||
|
@ -49,7 +49,7 @@ add_task(async function () {
|
|||
dbg,
|
||||
"webpack3-babel6",
|
||||
"babel-classes",
|
||||
{ line: 8, column: 16 },
|
||||
{ line: 8, column: 17 },
|
||||
[`this.hasOwnProperty("bound")`]
|
||||
);
|
||||
});
|
||||
|
|
|
@ -42,11 +42,11 @@ function testForOf(dbg) {
|
|||
dbg,
|
||||
"webpack3-babel6",
|
||||
"for-of",
|
||||
{ line: 5, column: 4 },
|
||||
{ line: 5, column: 5 },
|
||||
[
|
||||
{
|
||||
line: 5,
|
||||
column: 7,
|
||||
column: 4,
|
||||
expression: "doThing",
|
||||
result: "doThing(arg)",
|
||||
},
|
||||
|
@ -58,7 +58,7 @@ function testForOf(dbg) {
|
|||
},
|
||||
{
|
||||
line: 8,
|
||||
column: 16,
|
||||
column: 12,
|
||||
expression: "doThing",
|
||||
result: "doThing(arg)",
|
||||
},
|
||||
|
@ -71,7 +71,7 @@ function testShadowing(dbg) {
|
|||
dbg,
|
||||
"webpack3-babel6",
|
||||
"shadowed-vars",
|
||||
{ line: 18, column: 6 },
|
||||
{ line: 18, column: 7 },
|
||||
[
|
||||
// These aren't what the user would expect, but we test them anyway since
|
||||
// they reflect what this actually returns. These shadowed bindings read
|
||||
|
@ -79,37 +79,37 @@ function testShadowing(dbg) {
|
|||
// actual value is different.
|
||||
{
|
||||
line: 2,
|
||||
column: 9,
|
||||
column: 10,
|
||||
expression: "aVar",
|
||||
result: '"var3"',
|
||||
},
|
||||
{
|
||||
line: 3,
|
||||
column: 9,
|
||||
column: 10,
|
||||
expression: "aLet",
|
||||
result: '"let3"',
|
||||
},
|
||||
{
|
||||
line: 4,
|
||||
column: 11,
|
||||
column: 12,
|
||||
expression: "aConst",
|
||||
result: '"const3"',
|
||||
},
|
||||
{
|
||||
line: 10,
|
||||
column: 11,
|
||||
column: 12,
|
||||
expression: "aVar",
|
||||
result: '"var3"',
|
||||
},
|
||||
{
|
||||
line: 11,
|
||||
column: 11,
|
||||
column: 12,
|
||||
expression: "aLet",
|
||||
result: '"let3"',
|
||||
},
|
||||
{
|
||||
line: 12,
|
||||
column: 13,
|
||||
column: 14,
|
||||
expression: "aConst",
|
||||
result: '"const3"',
|
||||
},
|
||||
|
@ -117,19 +117,19 @@ function testShadowing(dbg) {
|
|||
// These actually result in the values the user would expect.
|
||||
{
|
||||
line: 14,
|
||||
column: 13,
|
||||
column: 14,
|
||||
expression: "aVar",
|
||||
result: '"var3"',
|
||||
},
|
||||
{
|
||||
line: 15,
|
||||
column: 13,
|
||||
column: 14,
|
||||
expression: "aLet",
|
||||
result: '"let3"',
|
||||
},
|
||||
{
|
||||
line: 16,
|
||||
column: 13,
|
||||
column: 14,
|
||||
expression: "aConst",
|
||||
result: '"const3"',
|
||||
},
|
||||
|
@ -142,35 +142,35 @@ function testImportedBindings(dbg) {
|
|||
dbg,
|
||||
"webpack3-babel6",
|
||||
"esmodules-cjs",
|
||||
{ line: 20, column: 2 },
|
||||
{ line: 20, column: 3 },
|
||||
[
|
||||
{
|
||||
line: 20,
|
||||
column: 16,
|
||||
column: 17,
|
||||
expression: "aDefault",
|
||||
result: '"a-default"',
|
||||
},
|
||||
{
|
||||
line: 21,
|
||||
column: 16,
|
||||
column: 17,
|
||||
expression: "anAliased",
|
||||
result: '"an-original"',
|
||||
},
|
||||
{
|
||||
line: 22,
|
||||
column: 16,
|
||||
column: 17,
|
||||
expression: "aNamed",
|
||||
result: '"a-named"',
|
||||
},
|
||||
{
|
||||
line: 23,
|
||||
column: 16,
|
||||
column: 17,
|
||||
expression: "anotherNamed",
|
||||
result: '"a-named"',
|
||||
},
|
||||
{
|
||||
line: 24,
|
||||
column: 16,
|
||||
column: 17,
|
||||
expression: "aNamespace",
|
||||
fields: [
|
||||
["aNamed", '"a-named"'],
|
||||
|
@ -179,25 +179,25 @@ function testImportedBindings(dbg) {
|
|||
},
|
||||
{
|
||||
line: 29,
|
||||
column: 20,
|
||||
column: 21,
|
||||
expression: "aDefault2",
|
||||
result: '"a-default2"',
|
||||
},
|
||||
{
|
||||
line: 30,
|
||||
column: 20,
|
||||
column: 21,
|
||||
expression: "anAliased2",
|
||||
result: '"an-original2"',
|
||||
},
|
||||
{
|
||||
line: 31,
|
||||
column: 20,
|
||||
column: 21,
|
||||
expression: "aNamed2",
|
||||
result: '"a-named2"',
|
||||
},
|
||||
{
|
||||
line: 32,
|
||||
column: 20,
|
||||
column: 21,
|
||||
expression: "anotherNamed2",
|
||||
result: '"a-named2"',
|
||||
},
|
||||
|
|
|
@ -148,7 +148,7 @@ async function testBabelBindingsWithFlow(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"babel-bindings-with-flow",
|
||||
{ line: 9, column: 2 },
|
||||
{ line: 9, column: 3 },
|
||||
[
|
||||
"root",
|
||||
["value", '"a-named"'],
|
||||
|
@ -177,7 +177,7 @@ async function testBabelFlowtypeBindings(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"babel-flowtype-bindings",
|
||||
{ line: 9, column: 2 },
|
||||
{ line: 9, column: 3 },
|
||||
[
|
||||
"Module",
|
||||
["aConst", '"a-const"'],
|
||||
|
@ -196,7 +196,7 @@ async function testEvalMaps(dbg) {
|
|||
for (const target of ["webpack3", "webpack4"]) {
|
||||
const { defaultExport } = targetToFlags(target);
|
||||
|
||||
await breakpointScopes(dbg, target, "eval-maps", { line: 14, column: 4 }, [
|
||||
await breakpointScopes(dbg, target, "eval-maps", { line: 14, column: 5 }, [
|
||||
"Block",
|
||||
["<this>", "Window"],
|
||||
["three", "5"],
|
||||
|
@ -230,7 +230,7 @@ async function testEvalMaps(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"eval-maps",
|
||||
{ line: 14, column: maybeLineStart(4) },
|
||||
{ line: 14, column: maybeLineStart(5) },
|
||||
[
|
||||
"Block",
|
||||
["three", "5"],
|
||||
|
@ -251,7 +251,7 @@ async function testForOf(dbg) {
|
|||
for (const target of ["webpack3", "webpack4"]) {
|
||||
const { defaultExport } = targetToFlags(target);
|
||||
|
||||
await breakpointScopes(dbg, target, "for-of", { line: 5, column: 0 }, [
|
||||
await breakpointScopes(dbg, target, "for-of", { line: 5, column: 1 }, [
|
||||
"Block",
|
||||
["<this>", "Window"],
|
||||
["x", "1"],
|
||||
|
@ -282,7 +282,7 @@ async function testForOf(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"for-of",
|
||||
{ line: 5, column: maybeLineStart(4) },
|
||||
{ line: 5, column: maybeLineStart(5) },
|
||||
[
|
||||
"For",
|
||||
["x", "1"],
|
||||
|
@ -302,7 +302,7 @@ async function testShadowedVars(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"shadowed-vars",
|
||||
{ line: 18, column: 0 },
|
||||
{ line: 18, column: 1 },
|
||||
[
|
||||
"Block",
|
||||
["<this>", "Window"],
|
||||
|
@ -345,7 +345,7 @@ async function testShadowedVars(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"shadowed-vars",
|
||||
{ line: 18, column: maybeLineStart(6) },
|
||||
{ line: 18, column: maybeLineStart(7) },
|
||||
[
|
||||
"Block",
|
||||
["aConst", rollupOptimized || '"const3"'],
|
||||
|
@ -381,7 +381,7 @@ async function testLineStartBindingsES6(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"line-start-bindings-es6",
|
||||
{ line: 19, column: 0 },
|
||||
{ line: 19, column: 1 },
|
||||
[
|
||||
"Block",
|
||||
["<this>", "{\u2026}"],
|
||||
|
@ -421,7 +421,7 @@ async function testLineStartBindingsES6(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"line-start-bindings-es6",
|
||||
{ line: 19, column: maybeLineStart(4) },
|
||||
{ line: 19, column: maybeLineStart(5) },
|
||||
[
|
||||
"Function Body",
|
||||
["<this>", "{\u2026}"],
|
||||
|
@ -442,7 +442,7 @@ async function testThisArgumentsBindings(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"this-arguments-bindings",
|
||||
{ line: 4, column: 0 },
|
||||
{ line: 4, column: 1 },
|
||||
[
|
||||
"Block",
|
||||
["<this>", '"this-value"'],
|
||||
|
@ -467,7 +467,7 @@ async function testThisArgumentsBindings(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"this-arguments-bindings",
|
||||
{ line: 8, column: 0 },
|
||||
{ line: 8, column: 1 },
|
||||
[
|
||||
"Block",
|
||||
["<this>", '"this-value"'],
|
||||
|
@ -508,7 +508,7 @@ async function testThisArgumentsBindings(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"this-arguments-bindings",
|
||||
{ line: 4, column: maybeLineStart(4) },
|
||||
{ line: 4, column: maybeLineStart(5) },
|
||||
[
|
||||
"Function Body",
|
||||
["<this>", '"this-value"'],
|
||||
|
@ -530,7 +530,7 @@ async function testThisArgumentsBindings(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"this-arguments-bindings",
|
||||
{ line: 8, column: maybeLineStart(6) },
|
||||
{ line: 8, column: maybeLineStart(7) },
|
||||
[
|
||||
"arrow",
|
||||
["<this>", '"this-value"'],
|
||||
|
@ -553,7 +553,7 @@ async function testThisArgumentsBindings(dbg) {
|
|||
|
||||
async function testClasses(dbg) {
|
||||
for (const target of ["webpack3", "webpack4"]) {
|
||||
await breakpointScopes(dbg, target, "classes", { line: 6, column: 0 }, [
|
||||
await breakpointScopes(dbg, target, "classes", { line: 6, column: 1 }, [
|
||||
"Block",
|
||||
["<this>", "{}"],
|
||||
["arguments", "Arguments"],
|
||||
|
@ -570,7 +570,7 @@ async function testClasses(dbg) {
|
|||
"root()",
|
||||
]);
|
||||
|
||||
await breakpointScopes(dbg, target, "classes", { line: 16, column: 0 }, [
|
||||
await breakpointScopes(dbg, target, "classes", { line: 16, column: 1 }, [
|
||||
"Block",
|
||||
["<this>", "{}"],
|
||||
["three", "3"],
|
||||
|
@ -607,7 +607,7 @@ async function testClasses(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"classes",
|
||||
{ line: 6, column: maybeLineStart(6) },
|
||||
{ line: 6, column: maybeLineStart(7) },
|
||||
[
|
||||
"Class",
|
||||
target === "rollup" || isParcel
|
||||
|
@ -628,7 +628,7 @@ async function testClasses(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"classes",
|
||||
{ line: 16, column: maybeLineStart(6) },
|
||||
{ line: 16, column: maybeLineStart(7) },
|
||||
[
|
||||
"Function Body",
|
||||
["three", rollupOptimized || "3"],
|
||||
|
@ -648,7 +648,7 @@ async function testClasses(dbg) {
|
|||
|
||||
async function testForLoops(dbg) {
|
||||
for (const target of ["webpack3", "webpack4"]) {
|
||||
await breakpointScopes(dbg, target, "for-loops", { line: 5, column: 0 }, [
|
||||
await breakpointScopes(dbg, target, "for-loops", { line: 5, column: 1 }, [
|
||||
"Block",
|
||||
["<this>", "Window"],
|
||||
["i", "1"],
|
||||
|
@ -660,7 +660,7 @@ async function testForLoops(dbg) {
|
|||
["module", "(optimized away)"],
|
||||
"root()",
|
||||
]);
|
||||
await breakpointScopes(dbg, target, "for-loops", { line: 9, column: 0 }, [
|
||||
await breakpointScopes(dbg, target, "for-loops", { line: 9, column: 1 }, [
|
||||
"Block",
|
||||
["<this>", "Window"],
|
||||
["i", '"2"'],
|
||||
|
@ -672,7 +672,7 @@ async function testForLoops(dbg) {
|
|||
["module", "(optimized away)"],
|
||||
"root()",
|
||||
]);
|
||||
await breakpointScopes(dbg, target, "for-loops", { line: 13, column: 0 }, [
|
||||
await breakpointScopes(dbg, target, "for-loops", { line: 13, column: 1 }, [
|
||||
"Block",
|
||||
["<this>", "Window"],
|
||||
["i", "3"],
|
||||
|
@ -702,7 +702,7 @@ async function testForLoops(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"for-loops",
|
||||
{ line: 5, column: maybeLineStart(4) },
|
||||
{ line: 5, column: maybeLineStart(5) },
|
||||
[
|
||||
"For",
|
||||
["i", "1"],
|
||||
|
@ -716,7 +716,7 @@ async function testForLoops(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"for-loops",
|
||||
{ line: 9, column: maybeLineStart(4) },
|
||||
{ line: 9, column: maybeLineStart(5) },
|
||||
[
|
||||
"For",
|
||||
["i", '"2"'],
|
||||
|
@ -730,7 +730,7 @@ async function testForLoops(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"for-loops",
|
||||
{ line: 13, column: maybeLineStart(4) },
|
||||
{ line: 13, column: maybeLineStart(5) },
|
||||
[
|
||||
"For",
|
||||
["i", target === "rollup" ? "3" : rollupOptimized || "3"],
|
||||
|
@ -745,7 +745,7 @@ async function testForLoops(dbg) {
|
|||
|
||||
async function testFunctions(dbg) {
|
||||
for (const target of ["webpack3", "webpack4"]) {
|
||||
await breakpointScopes(dbg, target, "functions", { line: 6, column: 0 }, [
|
||||
await breakpointScopes(dbg, target, "functions", { line: 6, column: 1 }, [
|
||||
"Block",
|
||||
["<this>", "(optimized away)"],
|
||||
["arguments", "Arguments"],
|
||||
|
@ -787,7 +787,7 @@ async function testFunctions(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"functions",
|
||||
{ line: 6, column: maybeLineStart(8) },
|
||||
{ line: 6, column: maybeLineStart(9) },
|
||||
[
|
||||
"arrow",
|
||||
["p3", "undefined"],
|
||||
|
@ -814,7 +814,7 @@ async function testFunctions(dbg) {
|
|||
|
||||
async function testSwitches(dbg) {
|
||||
for (const target of ["webpack3", "webpack4"]) {
|
||||
await breakpointScopes(dbg, target, "switches", { line: 7, column: 0 }, [
|
||||
await breakpointScopes(dbg, target, "switches", { line: 7, column: 1 }, [
|
||||
"Block",
|
||||
["<this>", "Window"],
|
||||
["val", "2"],
|
||||
|
@ -827,7 +827,7 @@ async function testSwitches(dbg) {
|
|||
"root()",
|
||||
]);
|
||||
|
||||
await breakpointScopes(dbg, target, "switches", { line: 10, column: 0 }, [
|
||||
await breakpointScopes(dbg, target, "switches", { line: 10, column: 1 }, [
|
||||
"Block",
|
||||
["<this>", "Window"],
|
||||
["val", "3"],
|
||||
|
@ -859,7 +859,7 @@ async function testSwitches(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"switches",
|
||||
{ line: 7, column: maybeLineStart(6) },
|
||||
{ line: 7, column: maybeLineStart(7) },
|
||||
[
|
||||
"Switch",
|
||||
["val", rollupOptimized || "2"],
|
||||
|
@ -874,7 +874,7 @@ async function testSwitches(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"switches",
|
||||
{ line: 10, column: maybeLineStart(6) },
|
||||
{ line: 10, column: maybeLineStart(7) },
|
||||
[
|
||||
"Block",
|
||||
["val", rollupOptimized || "3"],
|
||||
|
@ -891,7 +891,7 @@ async function testSwitches(dbg) {
|
|||
|
||||
async function testTryCatches(dbg) {
|
||||
for (const target of ["webpack3", "webpack4"]) {
|
||||
await breakpointScopes(dbg, target, "try-catches", { line: 8, column: 0 }, [
|
||||
await breakpointScopes(dbg, target, "try-catches", { line: 8, column: 1 }, [
|
||||
"Block",
|
||||
["<this>", "Window"],
|
||||
["two", "2"],
|
||||
|
@ -923,7 +923,7 @@ async function testTryCatches(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"try-catches",
|
||||
{ line: 8, column: maybeLineStart(4) },
|
||||
{ line: 8, column: maybeLineStart(5) },
|
||||
[
|
||||
"Block",
|
||||
["two", rollupOptimized || "2"],
|
||||
|
@ -944,7 +944,7 @@ async function testLexAndNonlex(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"lex-and-nonlex",
|
||||
{ line: 3, column: 0 },
|
||||
{ line: 3, column: 1 },
|
||||
[
|
||||
"Block",
|
||||
["<this>", "undefined"],
|
||||
|
@ -977,7 +977,7 @@ async function testLexAndNonlex(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"lex-and-nonlex",
|
||||
{ line: 3, column: maybeLineStart(4) },
|
||||
{ line: 3, column: maybeLineStart(5) },
|
||||
[
|
||||
"Function Body",
|
||||
target === "rollup" || target === "parcel" ? "class Thing" : "Thing()",
|
||||
|
@ -1001,7 +1001,7 @@ async function testTypescriptClasses(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"typescript-classes",
|
||||
{ line: 50, column: 2 },
|
||||
{ line: 50, column: 3 },
|
||||
[
|
||||
"Module",
|
||||
"AnotherThing()",
|
||||
|
@ -1031,7 +1031,7 @@ async function testTypescriptClasses(dbg) {
|
|||
|
||||
async function testTypeModule(dbg) {
|
||||
for (const target of ["webpack3", "webpack4"]) {
|
||||
await breakpointScopes(dbg, target, "type-module", { line: 7, column: 0 }, [
|
||||
await breakpointScopes(dbg, target, "type-module", { line: 7, column: 1 }, [
|
||||
"Block",
|
||||
["<this>", "Window"],
|
||||
["arguments", "Arguments"],
|
||||
|
@ -1060,7 +1060,7 @@ async function testTypeModule(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"type-module",
|
||||
{ line: 7, column: maybeLineStart(2) },
|
||||
{ line: 7, column: maybeLineStart(3) },
|
||||
[
|
||||
"Module",
|
||||
["alsoModuleScoped", "2"],
|
||||
|
@ -1077,7 +1077,7 @@ async function testTypeScriptCJS(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"type-script-cjs",
|
||||
{ line: 7, column: 0 },
|
||||
{ line: 7, column: 1 },
|
||||
[
|
||||
"Block",
|
||||
["<this>", "Window"],
|
||||
|
@ -1108,7 +1108,7 @@ async function testTypeScriptCJS(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"type-script-cjs",
|
||||
{ line: 7, column: 2 },
|
||||
{ line: 7, column: 3 },
|
||||
[
|
||||
"Module",
|
||||
"alsoModuleScopes",
|
||||
|
@ -1133,7 +1133,7 @@ async function testOutOfOrderDeclarationsCJS(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"out-of-order-declarations-cjs",
|
||||
{ line: 8, column: 4 },
|
||||
{ line: 8, column: 5 },
|
||||
[
|
||||
"callback",
|
||||
"fn(inner)",
|
||||
|
@ -1165,7 +1165,7 @@ async function testOutOfOrderDeclarationsCJS(dbg) {
|
|||
|
||||
async function testModulesCJS(dbg) {
|
||||
for (const target of ["webpack3", "webpack4"]) {
|
||||
await breakpointScopes(dbg, target, "modules-cjs", { line: 7, column: 0 }, [
|
||||
await breakpointScopes(dbg, target, "modules-cjs", { line: 7, column: 1 }, [
|
||||
"Block",
|
||||
["<this>", "Window"],
|
||||
["arguments", "Arguments"],
|
||||
|
@ -1188,7 +1188,7 @@ async function testModulesCJS(dbg) {
|
|||
"webpack4-babel6",
|
||||
"webpack4-babel7",
|
||||
]) {
|
||||
await breakpointScopes(dbg, target, "modules-cjs", { line: 7, column: 2 }, [
|
||||
await breakpointScopes(dbg, target, "modules-cjs", { line: 7, column: 3 }, [
|
||||
"Module",
|
||||
["alsoModuleScoped", "2"],
|
||||
["moduleScoped", "1"],
|
||||
|
@ -1202,7 +1202,7 @@ async function testWebpackLineMappings(dbg) {
|
|||
dbg,
|
||||
"webpack3",
|
||||
"webpack-line-mappings",
|
||||
{ line: 11, column: 0 },
|
||||
{ line: 11, column: 1 },
|
||||
[
|
||||
"Block",
|
||||
["<this>", '"this-value"'],
|
||||
|
@ -1232,7 +1232,7 @@ async function testWebpackLineMappings(dbg) {
|
|||
dbg,
|
||||
"webpack4",
|
||||
"webpack-line-mappings",
|
||||
{ line: 11, column: 0 },
|
||||
{ line: 11, column: 1 },
|
||||
[
|
||||
"Block",
|
||||
["<this>", '"this-value"'],
|
||||
|
@ -1267,7 +1267,7 @@ async function testWebpackFunctions(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"webpack-functions",
|
||||
{ line: 4, column: 0 },
|
||||
{ line: 4, column: 1 },
|
||||
[
|
||||
"Block",
|
||||
["<this>", "{\u2026}"],
|
||||
|
@ -1289,7 +1289,7 @@ async function testESModules(dbg) {
|
|||
dbg,
|
||||
"webpack3",
|
||||
"esmodules",
|
||||
{ line: 20, column: 0 },
|
||||
{ line: 20, column: 1 },
|
||||
[
|
||||
"Block",
|
||||
["<this>", "Window"],
|
||||
|
@ -1319,7 +1319,7 @@ async function testESModules(dbg) {
|
|||
dbg,
|
||||
"webpack4",
|
||||
"esmodules",
|
||||
{ line: 20, column: 0 },
|
||||
{ line: 20, column: 1 },
|
||||
[
|
||||
"Block",
|
||||
["<this>", "Window"],
|
||||
|
@ -1360,7 +1360,7 @@ async function testESModules(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"esmodules",
|
||||
{ line: 20, column: maybeLineStart(2) },
|
||||
{ line: 20, column: maybeLineStart(3) },
|
||||
[
|
||||
"Module",
|
||||
["aDefault", '"a-default"'],
|
||||
|
@ -1388,7 +1388,7 @@ async function testESModules(dbg) {
|
|||
// fully and includes the () of the call in the range of the identifier.
|
||||
// this means that Rollup, has to map locations for calls to imports,
|
||||
// it can fail. This will be addressed in Babel eventually.
|
||||
await breakpointScopes(dbg, target, "esmodules", { line: 20, column: 2 }, [
|
||||
await breakpointScopes(dbg, target, "esmodules", { line: 20, column: 3 }, [
|
||||
"root",
|
||||
["<this>", "Window"],
|
||||
["arguments", "Arguments"],
|
||||
|
@ -1416,7 +1416,7 @@ async function testESModulesCJS(dbg) {
|
|||
dbg,
|
||||
"webpack3",
|
||||
"esmodules-cjs",
|
||||
{ line: 20, column: 0 },
|
||||
{ line: 20, column: 1 },
|
||||
[
|
||||
"Block",
|
||||
["<this>", "Window"],
|
||||
|
@ -1446,7 +1446,7 @@ async function testESModulesCJS(dbg) {
|
|||
dbg,
|
||||
"webpack4",
|
||||
"esmodules-cjs",
|
||||
{ line: 20, column: 0 },
|
||||
{ line: 20, column: 1 },
|
||||
[
|
||||
"Block",
|
||||
["<this>", "Window"],
|
||||
|
@ -1484,7 +1484,7 @@ async function testESModulesCJS(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"esmodules-cjs",
|
||||
{ line: 20, column: 2 },
|
||||
{ line: 20, column: 3 },
|
||||
[
|
||||
"Module",
|
||||
["aDefault", '"a-default"'],
|
||||
|
@ -1513,7 +1513,7 @@ async function testESModulesES6(dbg) {
|
|||
dbg,
|
||||
"webpack3",
|
||||
"esmodules-es6",
|
||||
{ line: 20, column: 0 },
|
||||
{ line: 20, column: 1 },
|
||||
[
|
||||
"Block",
|
||||
["<this>", "Window"],
|
||||
|
@ -1543,7 +1543,7 @@ async function testESModulesES6(dbg) {
|
|||
dbg,
|
||||
"webpack4",
|
||||
"esmodules-es6",
|
||||
{ line: 20, column: 0 },
|
||||
{ line: 20, column: 1 },
|
||||
[
|
||||
"Block",
|
||||
["<this>", "Window"],
|
||||
|
@ -1584,7 +1584,7 @@ async function testESModulesES6(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"esmodules-es6",
|
||||
{ line: 20, column: maybeLineStart(2) },
|
||||
{ line: 20, column: maybeLineStart(3) },
|
||||
[
|
||||
"Module",
|
||||
["aDefault", '"a-default"'],
|
||||
|
@ -1616,7 +1616,7 @@ async function testESModulesES6(dbg) {
|
|||
dbg,
|
||||
target,
|
||||
"esmodules-es6",
|
||||
{ line: 20, column: 2 },
|
||||
{ line: 20, column: 3 },
|
||||
[
|
||||
"root",
|
||||
["<this>", "Window"],
|
||||
|
|
|
@ -16,7 +16,7 @@ add_task(async function () {
|
|||
await waitForSources(dbg, url);
|
||||
const source = findSource(dbg, url);
|
||||
await selectSource(dbg, source);
|
||||
await addBreakpoint(dbg, source, 20, 2);
|
||||
await addBreakpoint(dbg, source, 20, 3);
|
||||
invokeInTab("webpack3Babel6EsmodulesCjs");
|
||||
await waitForPaused(dbg);
|
||||
|
||||
|
@ -25,21 +25,21 @@ add_task(async function () {
|
|||
await waitForLoadedScopes(dbg);
|
||||
ok(getOriginalScope(dbg) != null, "Scopes are now mapped");
|
||||
|
||||
await assertPreviewTextValue(dbg, 20, 16, {
|
||||
await assertPreviewTextValue(dbg, 20, 17, {
|
||||
result: '"a-default"',
|
||||
expression: "aDefault",
|
||||
});
|
||||
|
||||
info("3. Hover on a token with mapScopes disabled");
|
||||
await toggleMapScopes(dbg);
|
||||
await assertPreviewTextValue(dbg, 21, 16, {
|
||||
await assertPreviewTextValue(dbg, 21, 17, {
|
||||
result: "undefined",
|
||||
expression: "anAliased",
|
||||
});
|
||||
|
||||
info("4. StepOver with mapScopes disabled");
|
||||
await stepOver(dbg);
|
||||
await assertPreviewTextValue(dbg, 20, 16, {
|
||||
await assertPreviewTextValue(dbg, 20, 17, {
|
||||
result: "undefined",
|
||||
expression: "aDefault",
|
||||
});
|
||||
|
|
|
@ -27,7 +27,7 @@ add_task(async function () {
|
|||
await selectSource(dbg, mainSrc);
|
||||
|
||||
// Test that breakpoint is not off by a line.
|
||||
await addBreakpoint(dbg, mainSrc, 4, 2);
|
||||
await addBreakpoint(dbg, mainSrc, 4, 3);
|
||||
is(getBreakpointCount(), 1, "One breakpoint exists");
|
||||
ok(
|
||||
getBreakpoint(createLocation({ source: mainSrc, line: 4, column: 2 })),
|
||||
|
@ -38,7 +38,7 @@ add_task(async function () {
|
|||
invokeInTab("logMessage");
|
||||
|
||||
await waitForPaused(dbg);
|
||||
assertPausedAtSourceAndLine(dbg, mainSrc.id, 4, 2);
|
||||
assertPausedAtSourceAndLine(dbg, mainSrc.id, 4, 3);
|
||||
|
||||
// Tests the existence of the sourcemap link in the original source.
|
||||
ok(findElement(dbg, "sourceMapLink"), "Sourcemap link in original source");
|
||||
|
|
|
@ -24,8 +24,8 @@ add_task(async function () {
|
|||
);
|
||||
|
||||
await addBreakpoint(dbg, entrySrc, 5);
|
||||
await addBreakpoint(dbg, entrySrc, 15, 0);
|
||||
await disableBreakpoint(dbg, entrySrc, 15, 0);
|
||||
await addBreakpoint(dbg, entrySrc, 15, 1);
|
||||
await disableBreakpoint(dbg, entrySrc, 15, 1);
|
||||
|
||||
// Test reloading the debugger
|
||||
const onReloaded = reload(dbg, "opts.js");
|
||||
|
|
|
@ -27,7 +27,7 @@ add_task(async function () {
|
|||
await selectSource(dbg, mainSrc);
|
||||
|
||||
// Test that breakpoint is not off by a line.
|
||||
await addBreakpoint(dbg, mainSrc, 4, 2);
|
||||
await addBreakpoint(dbg, mainSrc, 4, 3);
|
||||
is(getBreakpointCount(), 1, "One breakpoint exists");
|
||||
ok(
|
||||
getBreakpoint(createLocation({ source: mainSrc, line: 4, column: 2 })),
|
||||
|
|
|
@ -25,7 +25,7 @@ add_task(async function () {
|
|||
await selectSource(dbg, sortedSrc);
|
||||
|
||||
// Test that breakpoint is not off by a line.
|
||||
await addBreakpoint(dbg, sortedSrc, 9, 4);
|
||||
await addBreakpoint(dbg, sortedSrc, 9, 5);
|
||||
is(dbg.selectors.getBreakpointCount(), 1, "One breakpoint exists");
|
||||
ok(
|
||||
dbg.selectors.getBreakpoint(
|
||||
|
@ -37,7 +37,7 @@ add_task(async function () {
|
|||
invokeInTab("test");
|
||||
|
||||
await waitForPaused(dbg);
|
||||
assertPausedAtSourceAndLine(dbg, sortedSrc.id, 9, 4);
|
||||
assertPausedAtSourceAndLine(dbg, sortedSrc.id, 9, 5);
|
||||
|
||||
is(getScopeNodeLabel(dbg, 1), "Block");
|
||||
is(getScopeNodeLabel(dbg, 2), "na");
|
||||
|
|
|
@ -25,7 +25,7 @@ add_task(async function () {
|
|||
// We have to remove the first breakpoint, set on the first worker.
|
||||
// All the workers use the same source.
|
||||
// The first worker is loaded on the html page load.
|
||||
await removeBreakpoint(dbg, workerSource.id, 1, 12);
|
||||
await removeBreakpoint(dbg, workerSource.id, 1, 13);
|
||||
await resume(dbg);
|
||||
|
||||
// Make sure that suspending activity in the worker when attaching does not
|
||||
|
@ -36,5 +36,5 @@ add_task(async function () {
|
|||
|
||||
// We should be paused in the message listener in simple-worker.js
|
||||
assertPausedAtSourceAndLine(dbg, workerSource.id, 10);
|
||||
await removeBreakpoint(dbg, workerSource.id, 10, 2);
|
||||
await removeBreakpoint(dbg, workerSource.id, 10, 3);
|
||||
});
|
||||
|
|
|
@ -443,8 +443,10 @@ function assertPausedAtSourceAndLine(
|
|||
);
|
||||
const pauseColumn = getVisibleSelectedFrameColumn(dbg);
|
||||
if (expectedColumn) {
|
||||
// `pauseColumn` is 0-based, coming from internal state,
|
||||
// while `expectedColumn` is manually passed from test scripts and so is 1-based.
|
||||
is(
|
||||
pauseColumn,
|
||||
pauseColumn + 1,
|
||||
expectedColumn,
|
||||
"Redux state for currently selected frame's column is correct"
|
||||
);
|
||||
|
@ -475,10 +477,14 @@ function assertPausedAtSourceAndLine(
|
|||
);
|
||||
|
||||
if (expectedColumn) {
|
||||
// `column` is 0-based, coming from internal state,
|
||||
// while `expectedColumn` is manually passed from test scripts and so is 1-based.
|
||||
is(
|
||||
column,
|
||||
column + 1,
|
||||
expectedColumn,
|
||||
`Frame paused at column ${column}, but expected column ${expectedColumn}`
|
||||
`Frame paused at column ${
|
||||
column + 1
|
||||
}, but expected column ${expectedColumn}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1041,7 +1047,8 @@ async function addBreakpoint(dbg, source, line, column, options) {
|
|||
const bpCount = dbg.selectors.getBreakpointCount();
|
||||
const onBreakpoint = waitForDispatch(dbg.store, "SET_BREAKPOINT");
|
||||
await dbg.actions.addBreakpoint(
|
||||
createLocation({ source, line, column }),
|
||||
// column is 0-based internally, but tests are using 1-based.
|
||||
createLocation({ source, line, column: column - 1 }),
|
||||
options
|
||||
);
|
||||
await onBreakpoint;
|
||||
|
@ -1064,11 +1071,17 @@ async function addBreakpointViaGutter(dbg, line) {
|
|||
}
|
||||
|
||||
function disableBreakpoint(dbg, source, line, column) {
|
||||
column = column || getFirstBreakpointColumn(dbg, source, line);
|
||||
if (column === 0) {
|
||||
throw new Error("disableBreakpoint expect a 1-based column argument");
|
||||
}
|
||||
// `internalColumn` is 0-based internally, while `column` manually defined in test scripts is 1-based.
|
||||
const internalColumn = column
|
||||
? column - 1
|
||||
: getFirstBreakpointColumn(dbg, source, line);
|
||||
const location = createLocation({
|
||||
source,
|
||||
line,
|
||||
column,
|
||||
column: internalColumn,
|
||||
});
|
||||
const bp = getBreakpointForLocation(dbg, location);
|
||||
return dbg.actions.disableBreakpoint(bp);
|
||||
|
@ -1107,13 +1120,16 @@ async function loadAndAddBreakpoint(dbg, filename, line, column) {
|
|||
await addBreakpoint(dbg, source, line, column);
|
||||
|
||||
is(getBreakpointCount(), 1, "One breakpoint exists");
|
||||
if (!getBreakpoint(createLocation({ source, line, column }))) {
|
||||
// column is 0-based internally, but tests are using 1-based.
|
||||
if (!getBreakpoint(createLocation({ source, line, column: column - 1 }))) {
|
||||
const breakpoints = getBreakpointsMap();
|
||||
const id = Object.keys(breakpoints).pop();
|
||||
const loc = breakpoints[id].location;
|
||||
ok(
|
||||
false,
|
||||
`Breakpoint has correct line ${line}, column ${column}, but was line ${loc.line} column ${loc.column}`
|
||||
`Breakpoint has correct line ${line}, column ${column}, but was line ${
|
||||
loc.line
|
||||
} column ${loc.column + 1}`
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1257,7 +1273,8 @@ async function expandAllSourceNodes(dbg, treeNode) {
|
|||
*/
|
||||
function removeBreakpoint(dbg, sourceId, line, column) {
|
||||
const source = dbg.selectors.getSource(sourceId);
|
||||
column = column || getFirstBreakpointColumn(dbg, source, line);
|
||||
// column is 0-based internally, but tests are using 1-based.
|
||||
column = column ? column - 1 : getFirstBreakpointColumn(dbg, source, line);
|
||||
const location = createLocation({
|
||||
source,
|
||||
line,
|
||||
|
|
|
@ -510,7 +510,9 @@ class InactivePropertyHelper {
|
|||
// because the text-wrap: balance; property only applies to
|
||||
// the first block. And fragmented elements (with multiple
|
||||
// blocks) are excluded from line balancing for the time being.
|
||||
return blockLineCounts[0] > TEXT_WRAP_BALANCE_LIMIT;
|
||||
return (
|
||||
blockLineCounts && blockLineCounts[0] > TEXT_WRAP_BALANCE_LIMIT
|
||||
);
|
||||
},
|
||||
fixId: "inactive-css-text-wrap-balance-lines-exceeded-fix",
|
||||
msgId: "inactive-css-text-wrap-balance-lines-exceeded",
|
||||
|
@ -524,7 +526,7 @@ class InactivePropertyHelper {
|
|||
return false;
|
||||
}
|
||||
const blockLineCounts = InspectorUtils.getBlockLineCounts(this.node);
|
||||
const isFragmented = blockLineCounts.length > 1;
|
||||
const isFragmented = blockLineCounts && blockLineCounts.length > 1;
|
||||
return isFragmented;
|
||||
},
|
||||
fixId: "inactive-css-text-wrap-balance-fragmented-fix",
|
||||
|
|
|
@ -53,6 +53,19 @@ export default [
|
|||
rules: ["div { text-wrap: balance; column-count: 2; }"],
|
||||
isActive: false,
|
||||
},
|
||||
{
|
||||
info: "text-wrap: balance; does not throw if element is not a block",
|
||||
property: "text-wrap",
|
||||
createTestElement: rootNode => {
|
||||
const element = document.createElement("div");
|
||||
element.textContent = LOREM_IPSUM;
|
||||
rootNode.append(element);
|
||||
return element;
|
||||
},
|
||||
tagName: "div",
|
||||
rules: ["div { text-wrap: balance; display: inline; }"],
|
||||
isActive: true,
|
||||
},
|
||||
{
|
||||
info: "text-wrap: initial; is active",
|
||||
property: "text-wrap",
|
||||
|
|
|
@ -70,6 +70,9 @@ struct DevTools : public ::testing::Test {
|
|||
JSObject* createGlobal() {
|
||||
/* Create the global object. */
|
||||
JS::RealmOptions options;
|
||||
// dummy
|
||||
options.behaviors().setReduceTimerPrecisionCallerType(
|
||||
JS::RTPCallerTypeToken{0});
|
||||
return JS_NewGlobalObject(cx, getGlobalClass(), nullptr,
|
||||
JS::FireOnNewGlobalHook, options);
|
||||
}
|
||||
|
|
|
@ -10,6 +10,9 @@
|
|||
DEF_TEST(DoesCrossCompartmentBoundaries, {
|
||||
// Create a new global to get a new compartment.
|
||||
JS::RealmOptions options;
|
||||
// dummy
|
||||
options.behaviors().setReduceTimerPrecisionCallerType(
|
||||
JS::RTPCallerTypeToken{0});
|
||||
JS::Rooted<JSObject*> newGlobal(
|
||||
cx, JS_NewGlobalObject(cx, getGlobalClass(), nullptr,
|
||||
JS::FireOnNewGlobalHook, options));
|
||||
|
|
|
@ -10,6 +10,9 @@
|
|||
DEF_TEST(DoesntCrossCompartmentBoundaries, {
|
||||
// Create a new global to get a new compartment.
|
||||
JS::RealmOptions options;
|
||||
// dummy
|
||||
options.behaviors().setReduceTimerPrecisionCallerType(
|
||||
JS::RTPCallerTypeToken{0});
|
||||
JS::Rooted<JSObject*> newGlobal(
|
||||
cx, JS_NewGlobalObject(cx, getGlobalClass(), nullptr,
|
||||
JS::FireOnNewGlobalHook, options));
|
||||
|
|
|
@ -10453,7 +10453,9 @@ nsresult nsDocShell::DoURILoad(nsDocShellLoadState* aLoadState,
|
|||
if (context->HasValidTransientUserGestureActivation()) {
|
||||
aLoadState->SetHasValidUserGestureActivation(true);
|
||||
}
|
||||
aLoadState->SetTriggeringWindowId(context->Id());
|
||||
if (!aLoadState->TriggeringWindowId()) {
|
||||
aLoadState->SetTriggeringWindowId(context->Id());
|
||||
}
|
||||
if (!aLoadState->TriggeringStorageAccess()) {
|
||||
Document* contextDoc = context->GetExtantDoc();
|
||||
if (contextDoc) {
|
||||
|
|
|
@ -115,7 +115,6 @@
|
|||
#include "mozilla/StaticPrefs_browser.h"
|
||||
#include "mozilla/StaticPrefs_docshell.h"
|
||||
#include "mozilla/StaticPrefs_dom.h"
|
||||
#include "mozilla/StaticPrefs_editor.h"
|
||||
#include "mozilla/StaticPrefs_fission.h"
|
||||
#include "mozilla/StaticPrefs_full_screen_api.h"
|
||||
#include "mozilla/StaticPrefs_layout.h"
|
||||
|
@ -5386,12 +5385,13 @@ bool Document::ExecCommand(const nsAString& aHTMLCommandName, bool aShowUI,
|
|||
SetUseCounter(eUseCounter_custom_DocumentExecCommandContentReadOnly);
|
||||
break;
|
||||
case Command::EnableCompatibleJoinSplitNodeDirection:
|
||||
// We don't allow to take the legacy behavior back if the new one is
|
||||
// enabled by default.
|
||||
if (StaticPrefs::
|
||||
editor_join_split_direction_compatible_with_the_other_browsers() &&
|
||||
!adjustedValue.EqualsLiteral("true") &&
|
||||
!aSubjectPrincipal.IsSystemPrincipal()) {
|
||||
// We didn't allow to enable the legacy behavior once we've enabled the
|
||||
// new behavior by default. For keeping the behavior at supporting both
|
||||
// mode, we should keep returning `false` if the web app to enable the
|
||||
// legacy mode. Additionally, we don't support the legacy direction
|
||||
// anymore. Therefore, we can return `false` here even if the caller is
|
||||
// an addon or chrome script.
|
||||
if (!adjustedValue.EqualsLiteral("true")) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
|
@ -16313,7 +16313,13 @@ bool Document::RecomputeResistFingerprinting() {
|
|||
("Finished RecomputeResistFingerprinting with result %x",
|
||||
mShouldResistFingerprinting));
|
||||
|
||||
return previous != mShouldResistFingerprinting;
|
||||
bool changed = previous != mShouldResistFingerprinting;
|
||||
if (changed) {
|
||||
if (auto win = nsGlobalWindowInner::Cast(GetInnerWindow())) {
|
||||
win->RefreshReduceTimerPrecisionCallerType();
|
||||
}
|
||||
}
|
||||
return changed;
|
||||
}
|
||||
|
||||
bool Document::ShouldResistFingerprinting(RFPTarget aTarget) const {
|
||||
|
|
|
@ -108,6 +108,9 @@ already_AddRefed<nsDocShellLoadState> LocationBase::CheckURL(
|
|||
loadState->SetHasValidUserGestureActivation(
|
||||
doc->HasValidTransientUserGestureActivation());
|
||||
|
||||
loadState->SetTriggeringWindowId(doc->InnerWindowID());
|
||||
loadState->SetTriggeringStorageAccess(doc->UsingStorageAccess());
|
||||
|
||||
return loadState.forget();
|
||||
}
|
||||
|
||||
|
|
|
@ -714,6 +714,12 @@ nsresult nsFrameLoader::ReallyStartLoadingInternal() {
|
|||
|
||||
loadState->SetFirstParty(false);
|
||||
|
||||
Document* ownerDoc = mOwnerContent->OwnerDoc();
|
||||
if (ownerDoc) {
|
||||
loadState->SetTriggeringStorageAccess(ownerDoc->UsingStorageAccess());
|
||||
loadState->SetTriggeringWindowId(ownerDoc->InnerWindowID());
|
||||
}
|
||||
|
||||
// If we're loading the default about:blank document in a <browser> element,
|
||||
// prevent the load from causing a process switch by explicitly overriding
|
||||
// remote type selection.
|
||||
|
|
|
@ -3621,6 +3621,12 @@ void nsGlobalWindowInner::RefreshRealmPrincipal() {
|
|||
nsJSPrincipals::get(mDoc->NodePrincipal()));
|
||||
}
|
||||
|
||||
void nsGlobalWindowInner::RefreshReduceTimerPrecisionCallerType() {
|
||||
JS::SetRealmReduceTimerPrecisionCallerType(
|
||||
js::GetNonCCWObjectRealm(GetWrapperPreserveColor()),
|
||||
RTPCallerTypeToToken(GetRTPCallerType()));
|
||||
}
|
||||
|
||||
already_AddRefed<nsIWidget> nsGlobalWindowInner::GetMainWidget() {
|
||||
FORWARD_TO_OUTER(GetMainWidget, (), nullptr);
|
||||
}
|
||||
|
|
|
@ -363,6 +363,7 @@ class nsGlobalWindowInner final : public mozilla::dom::EventTarget,
|
|||
|
||||
// Inner windows only.
|
||||
void RefreshRealmPrincipal();
|
||||
void RefreshReduceTimerPrecisionCallerType();
|
||||
|
||||
// For accessing protected field mFullscreen
|
||||
friend class FullscreenTransitionTask;
|
||||
|
|
|
@ -2414,6 +2414,8 @@ nsresult nsGlobalWindowOuter::SetNewDocument(Document* aDocument,
|
|||
|
||||
MOZ_RELEASE_ASSERT(newInnerWindow->mDoc == aDocument);
|
||||
|
||||
newInnerWindow->RefreshReduceTimerPrecisionCallerType();
|
||||
|
||||
if (!aState) {
|
||||
if (reUseInnerWindow) {
|
||||
// The StorageAccess state may have changed. Invalidate the cached
|
||||
|
@ -2488,7 +2490,7 @@ nsresult nsGlobalWindowOuter::SetNewDocument(Document* aDocument,
|
|||
&nsGlobalWindowInner::FireOnNewGlobalObject));
|
||||
}
|
||||
|
||||
if (newInnerWindow && !newInnerWindow->mHasNotifiedGlobalCreated && mDoc) {
|
||||
if (!newInnerWindow->mHasNotifiedGlobalCreated && mDoc) {
|
||||
// We should probably notify. However if this is the, arguably bad,
|
||||
// situation when we're creating a temporary non-chrome-about-blank
|
||||
// document in a chrome docshell, don't notify just yet. Instead wait
|
||||
|
|
|
@ -9,24 +9,25 @@
|
|||
<script src="/tests/SimpleTest/SimpleTest.js"></script>
|
||||
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
|
||||
<script type="application/javascript">
|
||||
SimpleTest.waitForExplicitFinish();
|
||||
let path = location.pathname.substring(0, location.pathname.lastIndexOf('/')) + '/file_youtube_flash_embed.html';
|
||||
onmessage = function(e) {
|
||||
let msg = JSON.parse(e.data);
|
||||
if (msg.fn == "finish") {
|
||||
SimpleTest.finish();
|
||||
return;
|
||||
}
|
||||
self[msg.fn].apply(null, msg.args);
|
||||
}
|
||||
function onLoad() {
|
||||
// The test file must be loaded into youtube.com domain
|
||||
// because it needs unprivileged access to fullscreenEnabled.
|
||||
ifr.src = "https://mochitest.youtube.com" + path;
|
||||
}
|
||||
SimpleTest.waitForExplicitFinish();
|
||||
document.addEventListener("DOMContentLoaded", async function() {
|
||||
await SpecialPowers.pushPrefEnv({ set: [["plugins.rewrite_youtube_embeds", true]] });
|
||||
let path = location.pathname.substring(0, location.pathname.lastIndexOf('/')) + '/file_youtube_flash_embed.html';
|
||||
window.onmessage = function(e) {
|
||||
let msg = JSON.parse(e.data);
|
||||
if (msg.fn == "finish") {
|
||||
SimpleTest.finish();
|
||||
return;
|
||||
}
|
||||
self[msg.fn].apply(null, msg.args);
|
||||
}
|
||||
// The test file must be loaded into youtube.com domain
|
||||
// because it needs unprivileged access to fullscreenEnabled.
|
||||
ifr.src = "https://mochitest.youtube.com" + path;
|
||||
});
|
||||
</script>
|
||||
</head>
|
||||
<body onload="onLoad()">
|
||||
<body>
|
||||
<iframe id="ifr" allowfullscreen></iframe>
|
||||
</body>
|
||||
</html>
|
||||
|
|
|
@ -2954,6 +2954,15 @@ bool CreateGlobal(JSContext* aCx, T* aNative, nsWrapperCache* aCache,
|
|||
if (!CreateGlobalOptions<T>::PostCreateGlobal(aCx, aGlobal)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Initializing this at this point for nsGlobalWindowInner makes no sense,
|
||||
// because GetRTPCallerType doesn't return the correct result before
|
||||
// the global is completely initialized with a document.
|
||||
if constexpr (!std::is_base_of_v<nsGlobalWindowInner, T>) {
|
||||
JS::SetRealmReduceTimerPrecisionCallerType(
|
||||
js::GetNonCCWObjectRealm(aGlobal),
|
||||
RTPCallerTypeToToken(aNative->GetRTPCallerType()));
|
||||
}
|
||||
}
|
||||
|
||||
if (aInitStandardClasses && !JS::InitRealmStandardClasses(aCx)) {
|
||||
|
|
|
@ -138,6 +138,10 @@ JSObject* SimpleGlobalObject::Create(GlobalType globalType,
|
|||
RefPtr<SimpleGlobalObject> globalObject =
|
||||
new SimpleGlobalObject(global, globalType);
|
||||
|
||||
JS::SetRealmReduceTimerPrecisionCallerType(
|
||||
js::GetNonCCWObjectRealm(global),
|
||||
RTPCallerTypeToToken(globalObject->GetRTPCallerType()));
|
||||
|
||||
// Pass on ownership of globalObject to |global|.
|
||||
JS::SetObjectISupports(global, globalObject.forget().take());
|
||||
|
||||
|
|
|
@ -795,6 +795,7 @@ enum WebIDLUtilityActorName {
|
|||
"mfMediaEngineCDM",
|
||||
"jSOracle",
|
||||
"windowsUtils",
|
||||
"windowsFileDialog",
|
||||
};
|
||||
|
||||
dictionary UtilityActorsDictionary {
|
||||
|
|
|
@ -26,7 +26,7 @@
|
|||
#include "js/CompileOptions.h" // JS::CompileOptions, JS::OwningCompileOptions, JS::DecodeOptions, JS::OwningDecodeOptions, JS::DelazificationOption
|
||||
#include "js/ContextOptions.h" // JS::ContextOptionsRef
|
||||
#include "js/experimental/JSStencil.h" // JS::Stencil, JS::InstantiationStorage
|
||||
#include "js/experimental/CompileScript.h" // JS::FrontendContext, JS::NewFrontendContext, JS::DestroyFrontendContext, JS::SetNativeStackQuota, JS::CompilationStorage, JS::CompileGlobalScriptToStencil, JS::CompileModuleScriptToStencil, JS::DecodeStencil, JS::PrepareForInstantiate
|
||||
#include "js/experimental/CompileScript.h" // JS::FrontendContext, JS::NewFrontendContext, JS::DestroyFrontendContext, JS::SetNativeStackQuota, JS::ThreadStackQuotaForSize, JS::CompilationStorage, JS::CompileGlobalScriptToStencil, JS::CompileModuleScriptToStencil, JS::DecodeStencil, JS::PrepareForInstantiate
|
||||
#include "js/friend/ErrorMessages.h" // js::GetErrorMessage, JSMSG_*
|
||||
#include "js/loader/ScriptLoadRequest.h"
|
||||
#include "ScriptCompression.h"
|
||||
|
@ -1871,16 +1871,10 @@ class ScriptOrModuleCompileTask final : public CompileOrDecodeTask {
|
|||
}
|
||||
|
||||
private:
|
||||
static size_t ThreadStackQuotaForSize(size_t size) {
|
||||
// Set the stack quota to 10% less that the actual size.
|
||||
// NOTE: This follows what JS helper thread does.
|
||||
return size_t(double(size) * 0.9);
|
||||
}
|
||||
|
||||
already_AddRefed<JS::Stencil> Compile() {
|
||||
size_t stackSize = TaskController::GetThreadStackSize();
|
||||
JS::SetNativeStackQuota(mFrontendContext,
|
||||
ThreadStackQuotaForSize(stackSize));
|
||||
JS::ThreadStackQuotaForSize(stackSize));
|
||||
|
||||
JS::CompilationStorage compileStorage;
|
||||
auto compile = [&](auto& source) {
|
||||
|
|
|
@ -47,7 +47,10 @@ class ServiceWorkerRegistrarTest : public ServiceWorkerRegistrar {
|
|||
RegisterServiceWorkerInternal(aData);
|
||||
}
|
||||
|
||||
nsTArray<ServiceWorkerRegistrationData>& TestGetData() { return mData; }
|
||||
nsTArray<ServiceWorkerRegistrationData>& TestGetData()
|
||||
MOZ_NO_THREAD_SAFETY_ANALYSIS {
|
||||
return mData;
|
||||
}
|
||||
};
|
||||
|
||||
already_AddRefed<nsIFile> GetFile() {
|
||||
|
|
|
@ -81,7 +81,7 @@ class XMLHttpRequestStringBuffer final {
|
|||
private:
|
||||
~XMLHttpRequestStringBuffer() = default;
|
||||
|
||||
nsString& UnsafeData() { return mData; }
|
||||
nsString& UnsafeData() MOZ_NO_THREAD_SAFETY_ANALYSIS { return mData; }
|
||||
|
||||
Mutex mMutex;
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
#include "XULTreeElement.h"
|
||||
#include "js/CompilationAndEvaluation.h"
|
||||
#include "js/CompileOptions.h" // JS::CompileOptions, JS::OwningCompileOptions, , JS::ReadOnlyCompileOptions, JS::ReadOnlyDecodeOptions, JS::DecodeOptions
|
||||
#include "js/experimental/CompileScript.h" // JS::NewFrontendContext, JS::DestroyFrontendContext, JS::SetNativeStackQuota, JS::CompileGlobalScriptToStencil, JS::CompilationStorage
|
||||
#include "js/experimental/CompileScript.h" // JS::NewFrontendContext, JS::DestroyFrontendContext, JS::SetNativeStackQuota, JS::ThreadStackQuotaForSize, JS::CompileGlobalScriptToStencil, JS::CompilationStorage
|
||||
#include "js/experimental/JSStencil.h" // JS::Stencil, JS::FrontendContext
|
||||
#include "js/SourceText.h"
|
||||
#include "js/Transcoding.h"
|
||||
|
@ -1856,17 +1856,11 @@ class ScriptCompileTask final : public Task {
|
|||
}
|
||||
|
||||
private:
|
||||
static size_t ThreadStackQuotaForSize(size_t size) {
|
||||
// Set the stack quota to 10% less that the actual size.
|
||||
// NOTE: This follows what JS helper thread does.
|
||||
return size_t(double(size) * 0.9);
|
||||
}
|
||||
|
||||
void Compile() {
|
||||
// NOTE: The stack limit must be set from the same thread that compiles.
|
||||
size_t stackSize = TaskController::GetThreadStackSize();
|
||||
JS::SetNativeStackQuota(mFrontendContext,
|
||||
ThreadStackQuotaForSize(stackSize));
|
||||
JS::ThreadStackQuotaForSize(stackSize));
|
||||
|
||||
JS::SourceText<Utf8Unit> srcBuf;
|
||||
if (NS_WARN_IF(!srcBuf.init(mFrontendContext, mText.get(), mTextLength,
|
||||
|
|
|
@ -6751,8 +6751,8 @@ void EditorBase::TopLevelEditSubActionData::WillDeleteContent(
|
|||
}
|
||||
|
||||
void EditorBase::TopLevelEditSubActionData::DidSplitContent(
|
||||
EditorBase& aEditorBase, nsIContent& aSplitContent, nsIContent& aNewContent,
|
||||
SplitNodeDirection aSplitNodeDirection) {
|
||||
EditorBase& aEditorBase, nsIContent& aSplitContent,
|
||||
nsIContent& aNewContent) {
|
||||
MOZ_ASSERT(aEditorBase.AsHTMLEditor());
|
||||
|
||||
if (!aEditorBase.mInitSucceeded || aEditorBase.Destroyed()) {
|
||||
|
@ -6763,14 +6763,9 @@ void EditorBase::TopLevelEditSubActionData::DidSplitContent(
|
|||
return; // Temporarily disabled by edit sub-action handler.
|
||||
}
|
||||
|
||||
DebugOnly<nsresult> rvIgnored =
|
||||
aSplitNodeDirection == SplitNodeDirection::LeftNodeIsNewOne
|
||||
? AddRangeToChangedRange(*aEditorBase.AsHTMLEditor(),
|
||||
EditorRawDOMPoint(&aNewContent, 0),
|
||||
EditorRawDOMPoint(&aSplitContent, 0))
|
||||
: AddRangeToChangedRange(*aEditorBase.AsHTMLEditor(),
|
||||
EditorRawDOMPoint::AtEndOf(aSplitContent),
|
||||
EditorRawDOMPoint::AtEndOf(aNewContent));
|
||||
DebugOnly<nsresult> rvIgnored = AddRangeToChangedRange(
|
||||
*aEditorBase.AsHTMLEditor(), EditorRawDOMPoint::AtEndOf(aSplitContent),
|
||||
EditorRawDOMPoint::AtEndOf(aNewContent));
|
||||
NS_WARNING_ASSERTION(NS_SUCCEEDED(rvIgnored),
|
||||
"TopLevelEditSubActionData::AddRangeToChangedRange() "
|
||||
"failed, but ignored");
|
||||
|
|
|
@ -849,8 +849,7 @@ class EditorBase : public nsIEditor,
|
|||
void WillDeleteContent(EditorBase& aEditorBase,
|
||||
nsIContent& aRemovingContent);
|
||||
void DidSplitContent(EditorBase& aEditorBase, nsIContent& aSplitContent,
|
||||
nsIContent& aNewContent,
|
||||
SplitNodeDirection aSplitNodeDirection);
|
||||
nsIContent& aNewContent);
|
||||
void DidJoinContents(EditorBase& aEditorBase,
|
||||
const EditorRawDOMPoint& aJoinedPoint);
|
||||
void DidInsertText(EditorBase& aEditorBase,
|
||||
|
|
|
@ -44,10 +44,8 @@ enum class CollectChildrenOption; // HTMLEditUtils.h
|
|||
enum class EditAction; // mozilla/EditAction.h
|
||||
enum class EditorCommandParamType : uint16_t; // mozilla/EditorCommands.h
|
||||
enum class EditSubAction : int32_t; // mozilla/EditAction.h
|
||||
enum class JoinNodesDirection; // JoinSplitNodeDirection.h
|
||||
enum class ParagraphSeparator; // mozilla/HTMLEditor.h
|
||||
enum class SpecifiedStyle : uint8_t; // mozilla/PendingStyles.h
|
||||
enum class SplitNodeDirection; // JoinSplitNodeDirection.h
|
||||
enum class SuggestCaret; // EditorUtils.h
|
||||
enum class WithTransaction; // HTMLEditHelpers.h
|
||||
|
||||
|
|
|
@ -15,7 +15,6 @@
|
|||
#include "EditorDOMPoint.h"
|
||||
#include "EditorForwards.h"
|
||||
#include "EditorUtils.h" // for CaretPoint
|
||||
#include "JoinSplitNodeDirection.h"
|
||||
|
||||
#include "mozilla/AlreadyAddRefed.h"
|
||||
#include "mozilla/Attributes.h"
|
||||
|
@ -303,8 +302,7 @@ class MOZ_STACK_CLASS SplitNodeResult final : public CaretPoint {
|
|||
if (!DidSplit()) {
|
||||
return nullptr;
|
||||
}
|
||||
return mDirection == SplitNodeDirection::LeftNodeIsNewOne ? mPreviousNode
|
||||
: mNextNode;
|
||||
return mNextNode;
|
||||
}
|
||||
template <typename NodeType>
|
||||
MOZ_KNOWN_LIVE NodeType* GetNewContentAs() const {
|
||||
|
@ -327,9 +325,6 @@ class MOZ_STACK_CLASS SplitNodeResult final : public CaretPoint {
|
|||
// node, the container of the split point is the original node.
|
||||
return mGivenSplitPoint.GetContainerAs<nsIContent>();
|
||||
}
|
||||
if (mDirection == SplitNodeDirection::LeftNodeIsNewOne) {
|
||||
return mNextNode ? mNextNode : mPreviousNode;
|
||||
}
|
||||
return mPreviousNode ? mPreviousNode : mNextNode;
|
||||
}
|
||||
template <typename NodeType>
|
||||
|
@ -359,7 +354,6 @@ class MOZ_STACK_CLASS SplitNodeResult final : public CaretPoint {
|
|||
return EditorDOMPointType::After(mPreviousNode);
|
||||
}
|
||||
|
||||
SplitNodeResult() = delete;
|
||||
SplitNodeResult(const SplitNodeResult&) = delete;
|
||||
SplitNodeResult& operator=(const SplitNodeResult&) = delete;
|
||||
SplitNodeResult(SplitNodeResult&&) = default;
|
||||
|
@ -386,27 +380,22 @@ class MOZ_STACK_CLASS SplitNodeResult final : public CaretPoint {
|
|||
*
|
||||
* @param aNewNode The node which is newly created.
|
||||
* @param aSplitNode The node which was split.
|
||||
* @param aDirection The split direction which the HTML editor tried to split
|
||||
* a node with.
|
||||
* @param aNewCaretPoint
|
||||
* An optional new caret position. If this is omitted,
|
||||
* the point between new node and split node will be
|
||||
* suggested.
|
||||
*/
|
||||
SplitNodeResult(nsIContent& aNewNode, nsIContent& aSplitNode,
|
||||
SplitNodeDirection aDirection,
|
||||
const Maybe<EditorDOMPoint>& aNewCaretPoint = Nothing())
|
||||
: CaretPoint(aNewCaretPoint.isSome()
|
||||
? aNewCaretPoint.ref()
|
||||
: EditorDOMPoint::AtEndOf(*PreviousNode(
|
||||
aDirection, &aNewNode, &aSplitNode))),
|
||||
mPreviousNode(PreviousNode(aDirection, &aNewNode, &aSplitNode)),
|
||||
mNextNode(NextNode(aDirection, &aNewNode, &aSplitNode)),
|
||||
mDirection(aDirection) {}
|
||||
: EditorDOMPoint::AtEndOf(aSplitNode)),
|
||||
mPreviousNode(&aSplitNode),
|
||||
mNextNode(&aNewNode) {}
|
||||
|
||||
SplitNodeResult ToHandledResult() const {
|
||||
CaretPointHandled();
|
||||
SplitNodeResult result(mDirection);
|
||||
SplitNodeResult result;
|
||||
result.mPreviousNode = GetPreviousContent();
|
||||
result.mNextNode = GetNextContent();
|
||||
MOZ_DIAGNOSTIC_ASSERT(result.Handled());
|
||||
|
@ -427,9 +416,9 @@ class MOZ_STACK_CLASS SplitNodeResult final : public CaretPoint {
|
|||
* result should be specified.
|
||||
*/
|
||||
static inline SplitNodeResult HandledButDidNotSplitDueToEndOfContainer(
|
||||
nsIContent& aNotSplitNode, SplitNodeDirection aDirection,
|
||||
nsIContent& aNotSplitNode,
|
||||
const SplitNodeResult* aDeeperSplitNodeResult = nullptr) {
|
||||
SplitNodeResult result(aDirection);
|
||||
SplitNodeResult result;
|
||||
result.mPreviousNode = &aNotSplitNode;
|
||||
// Caret should be put at the last split point instead of current node.
|
||||
if (aDeeperSplitNodeResult) {
|
||||
|
@ -440,9 +429,9 @@ class MOZ_STACK_CLASS SplitNodeResult final : public CaretPoint {
|
|||
}
|
||||
|
||||
static inline SplitNodeResult HandledButDidNotSplitDueToStartOfContainer(
|
||||
nsIContent& aNotSplitNode, SplitNodeDirection aDirection,
|
||||
nsIContent& aNotSplitNode,
|
||||
const SplitNodeResult* aDeeperSplitNodeResult = nullptr) {
|
||||
SplitNodeResult result(aDirection);
|
||||
SplitNodeResult result;
|
||||
result.mNextNode = &aNotSplitNode;
|
||||
// Caret should be put at the last split point instead of current node.
|
||||
if (aDeeperSplitNodeResult) {
|
||||
|
@ -455,9 +444,8 @@ class MOZ_STACK_CLASS SplitNodeResult final : public CaretPoint {
|
|||
template <typename PT, typename CT>
|
||||
static inline SplitNodeResult NotHandled(
|
||||
const EditorDOMPointBase<PT, CT>& aGivenSplitPoint,
|
||||
SplitNodeDirection aDirection,
|
||||
const SplitNodeResult* aDeeperSplitNodeResult = nullptr) {
|
||||
SplitNodeResult result(aDirection);
|
||||
SplitNodeResult result;
|
||||
result.mGivenSplitPoint = aGivenSplitPoint;
|
||||
// Caret should be put at the last split point instead of current node.
|
||||
if (aDeeperSplitNodeResult) {
|
||||
|
@ -495,21 +483,7 @@ class MOZ_STACK_CLASS SplitNodeResult final : public CaretPoint {
|
|||
#endif
|
||||
|
||||
private:
|
||||
explicit SplitNodeResult(SplitNodeDirection aDirection)
|
||||
: mDirection(aDirection) {}
|
||||
|
||||
// Helper methods to consider previous/next node from new/old node and split
|
||||
// direction.
|
||||
static nsIContent* PreviousNode(SplitNodeDirection aDirection,
|
||||
nsIContent* aNewOne, nsIContent* aOldOne) {
|
||||
return aDirection == SplitNodeDirection::LeftNodeIsNewOne ? aNewOne
|
||||
: aOldOne;
|
||||
}
|
||||
static nsIContent* NextNode(SplitNodeDirection aDirection,
|
||||
nsIContent* aNewOne, nsIContent* aOldOne) {
|
||||
return aDirection == SplitNodeDirection::LeftNodeIsNewOne ? aOldOne
|
||||
: aNewOne;
|
||||
}
|
||||
SplitNodeResult() = default;
|
||||
|
||||
// When methods which return this class split some nodes actually, they
|
||||
// need to set a set of left node and right node to this class. However,
|
||||
|
@ -526,8 +500,6 @@ class MOZ_STACK_CLASS SplitNodeResult final : public CaretPoint {
|
|||
// which cannot be represented as a node. Therefore, we need EditorDOMPoint
|
||||
// for representing the point.
|
||||
EditorDOMPoint mGivenSplitPoint;
|
||||
|
||||
SplitNodeDirection mDirection;
|
||||
};
|
||||
|
||||
/*****************************************************************************
|
||||
|
@ -566,11 +538,9 @@ class MOZ_STACK_CLASS JoinNodesResult final {
|
|||
*
|
||||
* @param aJoinedPoint First child of right node or first character.
|
||||
* @param aRemovedContent The node which was removed from the parent.
|
||||
* @param aDirection The join direction which the HTML editor tried
|
||||
* to join the nodes with.
|
||||
*/
|
||||
JoinNodesResult(const EditorDOMPoint& aJoinedPoint,
|
||||
nsIContent& aRemovedContent, JoinNodesDirection aDirection)
|
||||
nsIContent& aRemovedContent)
|
||||
: mJoinedPoint(aJoinedPoint), mRemovedContent(&aRemovedContent) {
|
||||
MOZ_DIAGNOSTIC_ASSERT(aJoinedPoint.IsInContentNode());
|
||||
}
|
||||
|
|
|
@ -6445,8 +6445,7 @@ HTMLEditor::RemoveBlockContainerElementWithTransactionBetween(
|
|||
if (NS_WARN_IF(!rightmostElement)) {
|
||||
return Err(NS_ERROR_FAILURE);
|
||||
}
|
||||
MOZ_ASSERT_IF(GetSplitNodeDirection() == SplitNodeDirection::LeftNodeIsNewOne,
|
||||
rightmostElement == &aBlockContainerElement);
|
||||
|
||||
{
|
||||
// MOZ_KnownLive(rightmostElement) because it's grabbed by
|
||||
// unwrappedSplitResult.
|
||||
|
@ -8293,8 +8292,7 @@ Result<SplitNodeResult, nsresult> HTMLEditor::HandleInsertParagraphInParagraph(
|
|||
// If insertParagraph does not create a new paragraph, default to
|
||||
// insertLineBreak.
|
||||
if (!createNewParagraph) {
|
||||
return SplitNodeResult::NotHandled(pointToSplit,
|
||||
GetSplitNodeDirection());
|
||||
return SplitNodeResult::NotHandled(pointToSplit);
|
||||
}
|
||||
const EditorDOMPoint pointToInsertBR = pointToSplit.ParentPoint();
|
||||
MOZ_ASSERT(pointToInsertBR.IsSet());
|
||||
|
@ -8327,8 +8325,7 @@ Result<SplitNodeResult, nsresult> HTMLEditor::HandleInsertParagraphInParagraph(
|
|||
// If insertParagraph does not create a new paragraph, default to
|
||||
// insertLineBreak.
|
||||
if (!createNewParagraph) {
|
||||
return SplitNodeResult::NotHandled(pointToSplit,
|
||||
GetSplitNodeDirection());
|
||||
return SplitNodeResult::NotHandled(pointToSplit);
|
||||
}
|
||||
const auto pointToInsertBR =
|
||||
EditorDOMPoint::After(*pointToSplit.ContainerAs<Text>());
|
||||
|
@ -8350,8 +8347,7 @@ Result<SplitNodeResult, nsresult> HTMLEditor::HandleInsertParagraphInParagraph(
|
|||
// If insertParagraph does not create a new paragraph, default to
|
||||
// insertLineBreak.
|
||||
if (!createNewParagraph) {
|
||||
return SplitNodeResult::NotHandled(pointToSplit,
|
||||
GetSplitNodeDirection());
|
||||
return SplitNodeResult::NotHandled(pointToSplit);
|
||||
}
|
||||
|
||||
// If we're splitting the paragraph at middle of a text node, we should
|
||||
|
@ -8444,8 +8440,7 @@ Result<SplitNodeResult, nsresult> HTMLEditor::HandleInsertParagraphInParagraph(
|
|||
// If insertParagraph does not create a new paragraph, default to
|
||||
// insertLineBreak.
|
||||
if (!createNewParagraph) {
|
||||
return SplitNodeResult::NotHandled(pointToSplit,
|
||||
GetSplitNodeDirection());
|
||||
return SplitNodeResult::NotHandled(pointToSplit);
|
||||
}
|
||||
Result<CreateElementResult, nsresult> insertBRElementResult =
|
||||
InsertBRElement(WithTransaction::Yes, pointToSplit);
|
||||
|
@ -9512,8 +9507,7 @@ HTMLEditor::MaybeSplitAncestorsForInsertWithTransaction(
|
|||
// ancestor nodes. In this case, we should return the given split point
|
||||
// as is.
|
||||
if (pointToInsert.GetContainer() == aStartOfDeepestRightNode.GetContainer()) {
|
||||
return SplitNodeResult::NotHandled(aStartOfDeepestRightNode,
|
||||
GetSplitNodeDirection());
|
||||
return SplitNodeResult::NotHandled(aStartOfDeepestRightNode);
|
||||
}
|
||||
|
||||
Result<SplitNodeResult, nsresult> splitNodeResult =
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
#include "HTMLEditUtils.h"
|
||||
#include "InsertNodeTransaction.h"
|
||||
#include "JoinNodesTransaction.h"
|
||||
#include "JoinSplitNodeDirection.h"
|
||||
#include "MoveNodeTransaction.h"
|
||||
#include "PendingStyles.h"
|
||||
#include "ReplaceTextTransaction.h"
|
||||
|
@ -239,26 +238,9 @@ HTMLEditor::AttributeFilter HTMLEditor::CopyAllAttributesExceptIdAndDir =
|
|||
aAttr.NodeInfo()->NameAtom() == nsGkAtoms::dir));
|
||||
};
|
||||
|
||||
static bool ShouldUseTraditionalJoinSplitDirection(const Document& aDocument) {
|
||||
if (nsIPrincipal* principal = aDocument.GetPrincipalForPrefBasedHacks()) {
|
||||
if (principal->IsURIInPrefList("editor.join_split_direction."
|
||||
"force_use_traditional_direction")) {
|
||||
return true;
|
||||
}
|
||||
if (principal->IsURIInPrefList("editor.join_split_direction."
|
||||
"force_use_compatible_direction")) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return !StaticPrefs::
|
||||
editor_join_split_direction_compatible_with_the_other_browsers();
|
||||
}
|
||||
|
||||
HTMLEditor::HTMLEditor(const Document& aDocument)
|
||||
: EditorBase(EditorBase::EditorType::HTML),
|
||||
mCRInParagraphCreatesParagraph(false),
|
||||
mUseGeckoTraditionalJoinSplitBehavior(
|
||||
ShouldUseTraditionalJoinSplitDirection(aDocument)),
|
||||
mIsObjectResizingEnabled(
|
||||
StaticPrefs::editor_resizing_enabled_by_default()),
|
||||
mIsResizing(false),
|
||||
|
@ -5090,7 +5072,6 @@ Result<SplitNodeResult, nsresult> HTMLEditor::SplitNodeWithTransaction(
|
|||
!ignoredError.Failed(),
|
||||
"OnStartToHandleTopLevelEditSubAction() failed, but ignored");
|
||||
|
||||
mMaybeHasJoinSplitTransactions = true;
|
||||
RefPtr<SplitNodeTransaction> transaction =
|
||||
SplitNodeTransaction::Create(*this, aStartOfRightNode);
|
||||
nsresult rv = DoTransactionInternal(transaction);
|
||||
|
@ -5109,15 +5090,14 @@ Result<SplitNodeResult, nsresult> HTMLEditor::SplitNodeWithTransaction(
|
|||
if (NS_WARN_IF(!newContent) || NS_WARN_IF(!splitContent)) {
|
||||
return Err(NS_ERROR_FAILURE);
|
||||
}
|
||||
TopLevelEditSubActionDataRef().DidSplitContent(
|
||||
*this, *splitContent, *newContent, transaction->GetSplitNodeDirection());
|
||||
TopLevelEditSubActionDataRef().DidSplitContent(*this, *splitContent,
|
||||
*newContent);
|
||||
if (NS_WARN_IF(!newContent->IsInComposedDoc()) ||
|
||||
NS_WARN_IF(!splitContent->IsInComposedDoc())) {
|
||||
return Err(NS_ERROR_EDITOR_UNEXPECTED_DOM_TREE);
|
||||
}
|
||||
|
||||
return SplitNodeResult(*newContent, *splitContent,
|
||||
transaction->GetSplitNodeDirection());
|
||||
return SplitNodeResult(*newContent, *splitContent);
|
||||
}
|
||||
|
||||
Result<SplitNodeResult, nsresult> HTMLEditor::SplitNodeDeepWithTransaction(
|
||||
|
@ -5138,8 +5118,7 @@ Result<SplitNodeResult, nsresult> HTMLEditor::SplitNodeDeepWithTransaction(
|
|||
EditorDOMPoint atStartOfRightNode(aDeepestStartOfRightNode);
|
||||
// lastResult is as explained by its name, the last result which may not be
|
||||
// split a node actually.
|
||||
SplitNodeResult lastResult =
|
||||
SplitNodeResult::NotHandled(atStartOfRightNode, GetSplitNodeDirection());
|
||||
SplitNodeResult lastResult = SplitNodeResult::NotHandled(atStartOfRightNode);
|
||||
MOZ_ASSERT(lastResult.AtSplitPoint<EditorRawDOMPoint>()
|
||||
.IsSetAndValidInComposedDoc());
|
||||
|
||||
|
@ -5202,7 +5181,7 @@ Result<SplitNodeResult, nsresult> HTMLEditor::SplitNodeDeepWithTransaction(
|
|||
// allowed to create empty container node, try to split its parent after it.
|
||||
else if (!atStartOfRightNode.IsStartOfContainer()) {
|
||||
lastResult = SplitNodeResult::HandledButDidNotSplitDueToEndOfContainer(
|
||||
*splittingContent, GetSplitNodeDirection(), &lastResult);
|
||||
*splittingContent, &lastResult);
|
||||
MOZ_ASSERT(lastResult.AtSplitPoint<EditorRawDOMPoint>()
|
||||
.IsSetAndValidInComposedDoc());
|
||||
if (splittingContent == &aMostAncestorToSplit) {
|
||||
|
@ -5217,15 +5196,14 @@ Result<SplitNodeResult, nsresult> HTMLEditor::SplitNodeDeepWithTransaction(
|
|||
else {
|
||||
if (splittingContent == &aMostAncestorToSplit) {
|
||||
return SplitNodeResult::HandledButDidNotSplitDueToStartOfContainer(
|
||||
*splittingContent, GetSplitNodeDirection(), &lastResult);
|
||||
*splittingContent, &lastResult);
|
||||
}
|
||||
|
||||
// Try to split its parent before current node.
|
||||
// XXX This is logically wrong. If we've already split something but
|
||||
// this is the last splitable content node in the limiter, this
|
||||
// method will return "not handled".
|
||||
lastResult = SplitNodeResult::NotHandled(
|
||||
atStartOfRightNode, GetSplitNodeDirection(), &lastResult);
|
||||
lastResult = SplitNodeResult::NotHandled(atStartOfRightNode, &lastResult);
|
||||
MOZ_ASSERT(lastResult.AtSplitPoint<EditorRawDOMPoint>()
|
||||
.IsSetAndValidInComposedDoc());
|
||||
atStartOfRightNode.Set(splittingContent);
|
||||
|
@ -5237,9 +5215,8 @@ Result<SplitNodeResult, nsresult> HTMLEditor::SplitNodeDeepWithTransaction(
|
|||
}
|
||||
|
||||
Result<SplitNodeResult, nsresult> HTMLEditor::DoSplitNode(
|
||||
const EditorDOMPoint& aStartOfRightNode, nsIContent& aNewNode,
|
||||
SplitNodeDirection aDirection) {
|
||||
// Ensure computing the offset if it's intialized with a child content node.
|
||||
const EditorDOMPoint& aStartOfRightNode, nsIContent& aNewNode) {
|
||||
// Ensure computing the offset if it's initialized with a child content node.
|
||||
Unused << aStartOfRightNode.Offset();
|
||||
|
||||
// XXX Perhaps, aStartOfRightNode may be invalid if this is a redo
|
||||
|
@ -5286,11 +5263,8 @@ Result<SplitNodeResult, nsresult> HTMLEditor::DoSplitNode(
|
|||
// Fix the child before mutation observer may touch the DOM tree.
|
||||
nsIContent* firstChildOfRightNode = aStartOfRightNode.GetChild();
|
||||
IgnoredErrorResult error;
|
||||
parent->InsertBefore(aNewNode,
|
||||
aDirection == SplitNodeDirection::LeftNodeIsNewOne
|
||||
? aStartOfRightNode.GetContainer()
|
||||
: aStartOfRightNode.GetContainer()->GetNextSibling(),
|
||||
error);
|
||||
parent->InsertBefore(
|
||||
aNewNode, aStartOfRightNode.GetContainer()->GetNextSibling(), error);
|
||||
if (MOZ_UNLIKELY(error.Failed())) {
|
||||
NS_WARNING("nsINode::InsertBefore() failed");
|
||||
return Err(error.StealNSResult());
|
||||
|
@ -5303,21 +5277,13 @@ Result<SplitNodeResult, nsresult> HTMLEditor::DoSplitNode(
|
|||
// If we are splitting a text node, we need to move its some data to the
|
||||
// new text node.
|
||||
if (aStartOfRightNode.IsInTextNode()) {
|
||||
if (!(aDirection == SplitNodeDirection::LeftNodeIsNewOne &&
|
||||
aStartOfRightNode.IsStartOfContainer()) &&
|
||||
!(aDirection == SplitNodeDirection::RightNodeIsNewOne &&
|
||||
aStartOfRightNode.IsEndOfContainer())) {
|
||||
if (!aStartOfRightNode.IsEndOfContainer()) {
|
||||
Text* originalTextNode = aStartOfRightNode.ContainerAs<Text>();
|
||||
Text* newTextNode = aNewNode.AsText();
|
||||
nsAutoString movingText;
|
||||
const uint32_t cutStartOffset =
|
||||
aDirection == SplitNodeDirection::LeftNodeIsNewOne
|
||||
? 0u
|
||||
: aStartOfRightNode.Offset();
|
||||
const uint32_t cutStartOffset = aStartOfRightNode.Offset();
|
||||
const uint32_t cutLength =
|
||||
aDirection == SplitNodeDirection::LeftNodeIsNewOne
|
||||
? aStartOfRightNode.Offset()
|
||||
: originalTextNode->Length() - aStartOfRightNode.Offset();
|
||||
originalTextNode->Length() - aStartOfRightNode.Offset();
|
||||
IgnoredErrorResult error;
|
||||
originalTextNode->SubstringData(cutStartOffset, cutLength, movingText,
|
||||
error);
|
||||
|
@ -5339,43 +5305,14 @@ Result<SplitNodeResult, nsresult> HTMLEditor::DoSplitNode(
|
|||
}
|
||||
}
|
||||
// If the node has been moved to different parent, we should do nothing
|
||||
// since web apps should handle eventhing in such case.
|
||||
// since web apps should handle everything in such case.
|
||||
else if (firstChildOfRightNode &&
|
||||
aStartOfRightNode.GetContainer() !=
|
||||
firstChildOfRightNode->GetParentNode()) {
|
||||
NS_WARNING(
|
||||
"The web app interupped us and touched the DOM tree, we stopped "
|
||||
"The web app interrupted us and touched the DOM tree, we stopped "
|
||||
"splitting anything");
|
||||
} else if (aDirection == SplitNodeDirection::LeftNodeIsNewOne) {
|
||||
// If Splitting at end of container which is not a text node, we need to
|
||||
// move all children if the left node is new one. Otherwise, nothing to do.
|
||||
if (!firstChildOfRightNode) {
|
||||
// XXX Why do we ignore an error while moving nodes from the right
|
||||
// node to the left node?
|
||||
nsresult rv = MoveAllChildren(*aStartOfRightNode.GetContainer(),
|
||||
EditorRawDOMPoint(&aNewNode, 0u));
|
||||
if (NS_WARN_IF(rv == NS_ERROR_EDITOR_DESTROYED)) {
|
||||
return Err(NS_ERROR_EDITOR_DESTROYED);
|
||||
}
|
||||
NS_WARNING_ASSERTION(NS_SUCCEEDED(rv),
|
||||
"HTMLEditor::MoveAllChildren() failed, but ignored");
|
||||
}
|
||||
// If the left node is new one and splitting middle of it, we need to
|
||||
// previous siblings of the given point to the new left node.
|
||||
else if (firstChildOfRightNode->GetPreviousSibling()) {
|
||||
// XXX Why do we ignore an error while moving nodes from the right node
|
||||
// to the left node?
|
||||
nsresult rv = MovePreviousSiblings(*firstChildOfRightNode,
|
||||
EditorRawDOMPoint(&aNewNode, 0u));
|
||||
if (NS_WARN_IF(rv == NS_ERROR_EDITOR_DESTROYED)) {
|
||||
return Err(NS_ERROR_EDITOR_DESTROYED);
|
||||
}
|
||||
NS_WARNING_ASSERTION(
|
||||
NS_SUCCEEDED(rv),
|
||||
"HTMLEditor::MovePreviousSiblings() failed, but ignored");
|
||||
}
|
||||
} else {
|
||||
MOZ_ASSERT(aDirection == SplitNodeDirection::RightNodeIsNewOne);
|
||||
// If the right node is new one and there is no children or splitting at
|
||||
// end of the node, we need to do nothing.
|
||||
if (!firstChildOfRightNode) {
|
||||
|
@ -5449,28 +5386,6 @@ Result<SplitNodeResult, nsresult> HTMLEditor::DoSplitNode(
|
|||
return;
|
||||
}
|
||||
|
||||
if (aDirection == SplitNodeDirection::LeftNodeIsNewOne) {
|
||||
// If the container is the right node and offset is before the split
|
||||
// point, the content was moved into aNewNode. So, just changing the
|
||||
// container will point proper position.
|
||||
if (aOffset < aStartOfRightNode.Offset()) {
|
||||
aContainer = &aNewNode;
|
||||
return;
|
||||
}
|
||||
|
||||
// If the container is the right node and offset equals or is larger
|
||||
// than the split point, we need to decrease the offset since some
|
||||
// content before the split point was moved to aNewNode.
|
||||
if (aOffset >= aStartOfRightNode.Offset()) {
|
||||
aOffset -= aStartOfRightNode.Offset();
|
||||
return;
|
||||
}
|
||||
|
||||
NS_WARNING("The stored offset was smaller than the right node offset");
|
||||
aOffset = 0u;
|
||||
return;
|
||||
}
|
||||
|
||||
// If the container is the left node and offset is after the split
|
||||
// point, the content was moved from the right node to aNewNode.
|
||||
// So, we need to change the container to aNewNode and decrease the
|
||||
|
@ -5516,23 +5431,19 @@ Result<SplitNodeResult, nsresult> HTMLEditor::DoSplitNode(
|
|||
// some of the descendants, they should check by themselves.
|
||||
if (NS_WARN_IF(parent != aStartOfRightNode.GetContainer()->GetParentNode()) ||
|
||||
NS_WARN_IF(parent != aNewNode.GetParentNode()) ||
|
||||
(aDirection == SplitNodeDirection::LeftNodeIsNewOne &&
|
||||
NS_WARN_IF(aNewNode.GetNextSibling() !=
|
||||
aStartOfRightNode.GetContainer())) ||
|
||||
(aDirection == SplitNodeDirection::RightNodeIsNewOne &&
|
||||
NS_WARN_IF(aNewNode.GetPreviousSibling() !=
|
||||
aStartOfRightNode.GetContainer()))) {
|
||||
NS_WARN_IF(aNewNode.GetPreviousSibling() !=
|
||||
aStartOfRightNode.GetContainer())) {
|
||||
return Err(NS_ERROR_EDITOR_UNEXPECTED_DOM_TREE);
|
||||
}
|
||||
|
||||
DebugOnly<nsresult> rvIgnored = RangeUpdaterRef().SelAdjSplitNode(
|
||||
*aStartOfRightNode.ContainerAs<nsIContent>(), aStartOfRightNode.Offset(),
|
||||
aNewNode, aDirection);
|
||||
aNewNode);
|
||||
NS_WARNING_ASSERTION(NS_SUCCEEDED(rvIgnored),
|
||||
"RangeUpdater::SelAdjSplitNode() failed, but ignored");
|
||||
|
||||
return SplitNodeResult(aNewNode, *aStartOfRightNode.ContainerAs<nsIContent>(),
|
||||
aDirection);
|
||||
return SplitNodeResult(aNewNode,
|
||||
*aStartOfRightNode.ContainerAs<nsIContent>());
|
||||
}
|
||||
|
||||
Result<JoinNodesResult, nsresult> HTMLEditor::JoinNodesWithTransaction(
|
||||
|
@ -5564,7 +5475,6 @@ Result<JoinNodesResult, nsresult> HTMLEditor::JoinNodesWithTransaction(
|
|||
return Err(NS_ERROR_FAILURE);
|
||||
}
|
||||
|
||||
mMaybeHasJoinSplitTransactions = true;
|
||||
const nsresult rv = DoTransactionInternal(transaction);
|
||||
// FYI: Now, DidJoinNodesTransaction() must have been run if succeeded.
|
||||
if (NS_WARN_IF(Destroyed())) {
|
||||
|
@ -5592,8 +5502,7 @@ Result<JoinNodesResult, nsresult> HTMLEditor::JoinNodesWithTransaction(
|
|||
}
|
||||
|
||||
return JoinNodesResult(transaction->CreateJoinedPoint<EditorDOMPoint>(),
|
||||
*transaction->GetRemovedContent(),
|
||||
transaction->GetJoinNodesDirection());
|
||||
*transaction->GetRemovedContent());
|
||||
}
|
||||
|
||||
void HTMLEditor::DidJoinNodesTransaction(
|
||||
|
@ -5625,8 +5534,7 @@ void HTMLEditor::DidJoinNodesTransaction(
|
|||
mTextServicesDocument) {
|
||||
textServicesDocument->DidJoinContents(
|
||||
aTransaction.CreateJoinedPoint<EditorRawDOMPoint>(),
|
||||
*aTransaction.GetRemovedContent(),
|
||||
aTransaction.GetJoinNodesDirection());
|
||||
*aTransaction.GetRemovedContent());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -5634,9 +5542,7 @@ void HTMLEditor::DidJoinNodesTransaction(
|
|||
for (auto& listener : mActionListeners.Clone()) {
|
||||
DebugOnly<nsresult> rvIgnored = listener->DidJoinContents(
|
||||
aTransaction.CreateJoinedPoint<EditorRawDOMPoint>(),
|
||||
aTransaction.GetRemovedContent(),
|
||||
aTransaction.GetJoinNodesDirection() ==
|
||||
JoinNodesDirection::LeftNodeIntoRightNode);
|
||||
aTransaction.GetRemovedContent());
|
||||
NS_WARNING_ASSERTION(
|
||||
NS_SUCCEEDED(rvIgnored),
|
||||
"nsIEditActionListener::DidJoinContents() failed, but ignored");
|
||||
|
@ -5645,16 +5551,11 @@ void HTMLEditor::DidJoinNodesTransaction(
|
|||
}
|
||||
|
||||
nsresult HTMLEditor::DoJoinNodes(nsIContent& aContentToKeep,
|
||||
nsIContent& aContentToRemove,
|
||||
JoinNodesDirection aDirection) {
|
||||
nsIContent& aContentToRemove) {
|
||||
MOZ_ASSERT(IsEditActionDataAvailable());
|
||||
|
||||
const uint32_t keepingContentLength = aContentToKeep.Length();
|
||||
const uint32_t removingContentLength = aContentToRemove.Length();
|
||||
const EditorDOMPoint oldPointAtRightContent(
|
||||
aDirection == JoinNodesDirection::LeftNodeIntoRightNode
|
||||
? &aContentToKeep
|
||||
: &aContentToRemove);
|
||||
const EditorDOMPoint oldPointAtRightContent(&aContentToRemove);
|
||||
if (MOZ_LIKELY(oldPointAtRightContent.IsSet())) {
|
||||
Unused << oldPointAtRightContent.Offset(); // Fix the offset
|
||||
}
|
||||
|
@ -5699,19 +5600,6 @@ nsresult HTMLEditor::DoJoinNodes(nsIContent& aContentToKeep,
|
|||
MOZ_ASSERT(savingRange.mEndContainer);
|
||||
auto AdjustDOMPoint = [&](nsCOMPtr<nsINode>& aContainer,
|
||||
uint32_t& aOffset) {
|
||||
if (aDirection == JoinNodesDirection::LeftNodeIntoRightNode) {
|
||||
// If range boundary points aContentToKeep and aContentToRemove
|
||||
// is its left node, remember it as being at end of the removing
|
||||
// node. Then, only chaning the container to aContentToKeep will
|
||||
// point start of the current first content of aContentToKeep.
|
||||
if (aContainer == atRemovingNode.GetContainer() &&
|
||||
atRemovingNode.Offset() < aOffset &&
|
||||
aOffset <= atNodeToKeep.Offset()) {
|
||||
aContainer = &aContentToRemove;
|
||||
aOffset = removingContentLength;
|
||||
}
|
||||
return;
|
||||
}
|
||||
// If range boundary points aContentToRemove and aContentToKeep is
|
||||
// its left node, remember it as being at end of aContentToKeep.
|
||||
// Then, it will point start of the first content of moved content
|
||||
|
@ -5738,16 +5626,8 @@ nsresult HTMLEditor::DoJoinNodes(nsIContent& aContentToKeep,
|
|||
if (aContentToKeep.IsText() && aContentToRemove.IsText()) {
|
||||
nsAutoString rightText;
|
||||
nsAutoString leftText;
|
||||
const nsIContent& rightTextNode =
|
||||
aDirection == JoinNodesDirection::LeftNodeIntoRightNode
|
||||
? aContentToKeep
|
||||
: aContentToRemove;
|
||||
const nsIContent& leftTextNode =
|
||||
aDirection == JoinNodesDirection::LeftNodeIntoRightNode
|
||||
? aContentToRemove
|
||||
: aContentToKeep;
|
||||
rightTextNode.AsText()->GetData(rightText);
|
||||
leftTextNode.AsText()->GetData(leftText);
|
||||
aContentToRemove.AsText()->GetData(rightText);
|
||||
aContentToKeep.AsText()->GetData(leftText);
|
||||
leftText += rightText;
|
||||
IgnoredErrorResult ignoredError;
|
||||
DoSetText(MOZ_KnownLive(*aContentToKeep.AsText()), leftText,
|
||||
|
@ -5763,34 +5643,15 @@ nsresult HTMLEditor::DoJoinNodes(nsIContent& aContentToKeep,
|
|||
AutoTArray<OwningNonNull<nsIContent>, 64> arrayOfChildContents;
|
||||
HTMLEditUtils::CollectAllChildren(aContentToRemove, arrayOfChildContents);
|
||||
|
||||
if (aDirection == JoinNodesDirection::LeftNodeIntoRightNode) {
|
||||
for (const OwningNonNull<nsIContent>& child :
|
||||
Reversed(arrayOfChildContents)) {
|
||||
// Note that it's safe to pass the reference node to insert the child
|
||||
// without making it grabbed by nsINode::mNextSibling before touching
|
||||
// the DOM tree.
|
||||
IgnoredErrorResult error;
|
||||
aContentToKeep.InsertBefore(child, aContentToKeep.GetFirstChild(),
|
||||
error);
|
||||
if (NS_WARN_IF(Destroyed())) {
|
||||
return NS_ERROR_EDITOR_DESTROYED;
|
||||
}
|
||||
if (error.Failed()) {
|
||||
NS_WARNING("nsINode::InsertBefore() failed");
|
||||
return error.StealNSResult();
|
||||
}
|
||||
for (const OwningNonNull<nsIContent>& child : arrayOfChildContents) {
|
||||
IgnoredErrorResult error;
|
||||
aContentToKeep.AppendChild(child, error);
|
||||
if (NS_WARN_IF(Destroyed())) {
|
||||
return NS_ERROR_EDITOR_DESTROYED;
|
||||
}
|
||||
} else {
|
||||
for (const OwningNonNull<nsIContent>& child : arrayOfChildContents) {
|
||||
IgnoredErrorResult error;
|
||||
aContentToKeep.AppendChild(child, error);
|
||||
if (NS_WARN_IF(Destroyed())) {
|
||||
return NS_ERROR_EDITOR_DESTROYED;
|
||||
}
|
||||
if (error.Failed()) {
|
||||
NS_WARNING("nsINode::AppendChild() failed");
|
||||
return error.StealNSResult();
|
||||
}
|
||||
if (error.Failed()) {
|
||||
NS_WARNING("nsINode::AppendChild() failed");
|
||||
return error.StealNSResult();
|
||||
}
|
||||
}
|
||||
return NS_OK;
|
||||
|
@ -5806,12 +5667,9 @@ nsresult HTMLEditor::DoJoinNodes(nsIContent& aContentToKeep,
|
|||
|
||||
if (MOZ_LIKELY(oldPointAtRightContent.IsSet())) {
|
||||
DebugOnly<nsresult> rvIgnored = RangeUpdaterRef().SelAdjJoinNodes(
|
||||
EditorRawDOMPoint(
|
||||
&aContentToKeep,
|
||||
aDirection == JoinNodesDirection::LeftNodeIntoRightNode
|
||||
? std::min(removingContentLength, aContentToKeep.Length())
|
||||
: std::min(keepingContentLength, aContentToKeep.Length())),
|
||||
aContentToRemove, oldPointAtRightContent, aDirection);
|
||||
EditorRawDOMPoint(&aContentToKeep, std::min(keepingContentLength,
|
||||
aContentToKeep.Length())),
|
||||
aContentToRemove, oldPointAtRightContent);
|
||||
NS_WARNING_ASSERTION(NS_SUCCEEDED(rvIgnored),
|
||||
"RangeUpdater::SelAdjJoinNodes() failed, but ignored");
|
||||
}
|
||||
|
@ -5848,23 +5706,6 @@ nsresult HTMLEditor::DoJoinNodes(nsIContent& aContentToKeep,
|
|||
|
||||
auto AdjustDOMPoint = [&](nsCOMPtr<nsINode>& aContainer,
|
||||
uint32_t& aOffset) {
|
||||
if (aDirection == JoinNodesDirection::LeftNodeIntoRightNode) {
|
||||
// Now, all content of aContentToRemove are moved to start of
|
||||
// aContentToKeep. Therefore, if a range boundary was in
|
||||
// aContentToRemove, we just need to change the container to
|
||||
// aContentToKeep.
|
||||
if (aContainer == &aContentToRemove) {
|
||||
aContainer = &aContentToKeep;
|
||||
return;
|
||||
}
|
||||
// And also if the range boundary was in aContentToKeep, we need to
|
||||
// adjust the offset because the content in aContentToRemove was
|
||||
// instarted before ex-start content of aContentToKeep.
|
||||
if (aContainer == &aContentToKeep) {
|
||||
aOffset += removingContentLength;
|
||||
}
|
||||
return;
|
||||
}
|
||||
// Now, all content of aContentToRemove are moved to end of
|
||||
// aContentToKeep. Therefore, if a range boundary was in
|
||||
// aContentToRemove, we need to change the container to aContentToKeep and
|
||||
|
@ -5901,10 +5742,7 @@ nsresult HTMLEditor::DoJoinNodes(nsIContent& aContentToKeep,
|
|||
|
||||
if (allowedTransactionsToChangeSelection) {
|
||||
// Editor wants us to set selection at join point.
|
||||
DebugOnly<nsresult> rvIgnored = CollapseSelectionTo(
|
||||
aDirection == JoinNodesDirection::LeftNodeIntoRightNode
|
||||
? EditorRawDOMPoint(&aContentToKeep, removingContentLength)
|
||||
: EditorRawDOMPoint(&aContentToKeep, 0u));
|
||||
DebugOnly<nsresult> rvIgnored = CollapseSelectionToStartOf(aContentToKeep);
|
||||
if (MOZ_UNLIKELY(rv == NS_ERROR_EDITOR_DESTROYED)) {
|
||||
NS_WARNING(
|
||||
"EditorBase::CollapseSelectionTo() caused destroying the editor");
|
||||
|
|
|
@ -454,42 +454,6 @@ class HTMLEditor final : public EditorBase,
|
|||
return mIsAbsolutelyPositioningEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable/disable Gecko's traditional join/split node direction, that is,
|
||||
* creating left node at splitting a node and removing left node at joining 2
|
||||
* nodes. This is acceptable only before first join/split transaction is
|
||||
* created.
|
||||
*/
|
||||
bool EnableCompatibleJoinSplitNodeDirection(bool aEnable) {
|
||||
if (!CanChangeJoinSplitNodeDirection()) {
|
||||
return false;
|
||||
}
|
||||
mUseGeckoTraditionalJoinSplitBehavior = !aEnable;
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return true if the instance works with the legacy join/split node
|
||||
* direction.
|
||||
*/
|
||||
[[nodiscard]] bool IsCompatibleJoinSplitNodeDirectionEnabled() const {
|
||||
return !mUseGeckoTraditionalJoinSplitBehavior;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return true if web apps can still change the join split node direction.
|
||||
* For saving the footprint, each transaction does not store join/split node
|
||||
* direction at first run. Therefore, join/split node transactions need to
|
||||
* refer the direction of corresponding HTMLEditor. So if the direction were
|
||||
* changed after creating join/split transactions, they would break the DOM
|
||||
* tree with undoing/redoing within wrong direction. Therefore, once this
|
||||
* instance created a join or split node transaction, this returns false to
|
||||
* block to change the direction.
|
||||
*/
|
||||
[[nodiscard]] bool CanChangeJoinSplitNodeDirection() const {
|
||||
return !mMaybeHasJoinSplitTransactions;
|
||||
}
|
||||
|
||||
/**
|
||||
* returns the deepest absolutely positioned container of the selection
|
||||
* if it exists or null.
|
||||
|
@ -1024,12 +988,9 @@ class HTMLEditor final : public EditorBase,
|
|||
* @param aNewNode The new node called. The previous or following
|
||||
* content of aStartOfRightNode will be moved into
|
||||
* this node.
|
||||
* @param aDirection Whether aNewNode will have previous or following
|
||||
* content of aStartOfRightNode.
|
||||
*/
|
||||
MOZ_CAN_RUN_SCRIPT Result<SplitNodeResult, nsresult> DoSplitNode(
|
||||
const EditorDOMPoint& aStartOfRightNode, nsIContent& aNewNode,
|
||||
SplitNodeDirection aDirection);
|
||||
const EditorDOMPoint& aStartOfRightNode, nsIContent& aNewNode);
|
||||
|
||||
/**
|
||||
* DoJoinNodes() merges contents in aContentToRemove to aContentToKeep and
|
||||
|
@ -1042,13 +1003,9 @@ class HTMLEditor final : public EditorBase,
|
|||
* @param aContentToRemove The node that will be joined with aContentToKeep.
|
||||
* There is no requirement that the two nodes be of
|
||||
* the same type.
|
||||
* @param aDirection Whether aContentToKeep is right node or left node,
|
||||
* and whether aContentToRemove is left node or right
|
||||
* node.
|
||||
*/
|
||||
[[nodiscard]] MOZ_CAN_RUN_SCRIPT nsresult
|
||||
DoJoinNodes(nsIContent& aContentToKeep, nsIContent& aContentToRemove,
|
||||
JoinNodesDirection aDirection);
|
||||
DoJoinNodes(nsIContent& aContentToKeep, nsIContent& aContentToRemove);
|
||||
|
||||
/**
|
||||
* Routines for managing the preservation of selection across
|
||||
|
@ -2709,12 +2666,6 @@ class HTMLEditor final : public EditorBase,
|
|||
MOZ_CAN_RUN_SCRIPT nsresult OnDocumentModified();
|
||||
|
||||
protected: // Called by helper classes.
|
||||
/**
|
||||
* Get split/join node(s) direction for **this** instance.
|
||||
*/
|
||||
[[nodiscard]] inline SplitNodeDirection GetSplitNodeDirection() const;
|
||||
[[nodiscard]] inline JoinNodesDirection GetJoinNodesDirection() const;
|
||||
|
||||
MOZ_CAN_RUN_SCRIPT void OnStartToHandleTopLevelEditSubAction(
|
||||
EditSubAction aTopLevelEditSubAction,
|
||||
nsIEditor::EDirection aDirectionOfTopLevelEditSubAction,
|
||||
|
@ -4459,10 +4410,6 @@ class HTMLEditor final : public EditorBase,
|
|||
|
||||
bool mCRInParagraphCreatesParagraph;
|
||||
|
||||
// Whether use Blink/WebKit compatible joining nodes and split a node
|
||||
// direction or Gecko's traditional direction.
|
||||
bool mUseGeckoTraditionalJoinSplitBehavior;
|
||||
|
||||
// resizing
|
||||
bool mIsObjectResizingEnabled;
|
||||
bool mIsResizing;
|
||||
|
@ -4556,10 +4503,6 @@ class HTMLEditor final : public EditorBase,
|
|||
|
||||
bool mHasBeforeInputBeenCanceled = false;
|
||||
|
||||
// Set to true once the instance creates a JoinNodesTransaction or
|
||||
// SplitNodeTransaction. See also CanChangeJoinSplitNodeDirection().
|
||||
bool mMaybeHasJoinSplitTransactions = false;
|
||||
|
||||
ParagraphSeparator mDefaultParagraphSeparator;
|
||||
|
||||
friend class AlignStateAtSelection; // CollectEditableTargetNodes,
|
||||
|
@ -4581,8 +4524,7 @@ class HTMLEditor final : public EditorBase,
|
|||
// RemoveEmptyInclusiveAncestorInlineElements,
|
||||
// mComposerUpdater, mHasBeforeInputBeenCanceled
|
||||
friend class JoinNodesTransaction; // DidJoinNodesTransaction, DoJoinNodes,
|
||||
// DoSplitNode, GetJoinNodesDirection,
|
||||
// RangeUpdaterRef
|
||||
// DoSplitNode, // RangeUpdaterRef
|
||||
friend class ListElementSelectionState; // CollectEditTargetNodes,
|
||||
// CollectNonEditableNodes
|
||||
friend class ListItemElementSelectionState; // CollectEditTargetNodes,
|
||||
|
@ -4596,8 +4538,7 @@ class HTMLEditor final : public EditorBase,
|
|||
// CollectNonEditableNodes,
|
||||
// CollectTableChildren
|
||||
friend class SlurpBlobEventListener; // BlobReader
|
||||
friend class SplitNodeTransaction; // DoJoinNodes, DoSplitNode,
|
||||
// GetSplitNodeDirection
|
||||
friend class SplitNodeTransaction; // DoJoinNodes, DoSplitNode
|
||||
friend class TransactionManager; // DidDoTransaction, DidRedoTransaction,
|
||||
// DidUndoTransaction
|
||||
friend class
|
||||
|
|
|
@ -6,12 +6,12 @@
|
|||
#include "EditorCommands.h"
|
||||
|
||||
#include "EditorBase.h" // for EditorBase
|
||||
#include "ErrorList.h"
|
||||
#include "HTMLEditor.h" // for HTMLEditor
|
||||
|
||||
#include "mozilla/BasePrincipal.h" // for nsIPrincipal::IsSystemPrincipal()
|
||||
#include "mozilla/StaticPrefs_editor.h"
|
||||
#include "mozilla/dom/Element.h" // for Element
|
||||
#include "mozilla/dom/Document.h" // for Document
|
||||
#include "mozilla/dom/Element.h" // for Element
|
||||
#include "mozilla/dom/Document.h" // for Document
|
||||
#include "mozilla/dom/HTMLInputElement.h" // for HTMLInputElement
|
||||
#include "mozilla/dom/HTMLTextAreaElement.h" // for HTMLTextAreaElement
|
||||
|
||||
|
@ -50,9 +50,6 @@ bool SetDocumentStateCommand::IsCommandEnabled(Command aCommand,
|
|||
switch (aCommand) {
|
||||
case Command::SetDocumentReadOnly:
|
||||
return !!aEditorBase;
|
||||
case Command::EnableCompatibleJoinSplitNodeDirection:
|
||||
return aEditorBase && aEditorBase->IsHTMLEditor() &&
|
||||
aEditorBase->AsHTMLEditor()->CanChangeJoinSplitNodeDirection();
|
||||
default:
|
||||
// The other commands are always enabled if given editor is an HTMLEditor.
|
||||
return aEditorBase && aEditorBase->IsHTMLEditor();
|
||||
|
@ -170,16 +167,12 @@ nsresult SetDocumentStateCommand::DoCommandParam(
|
|||
return NS_OK;
|
||||
}
|
||||
case Command::EnableCompatibleJoinSplitNodeDirection:
|
||||
MOZ_ASSERT_IF(
|
||||
StaticPrefs::
|
||||
editor_join_split_direction_compatible_with_the_other_browsers() &&
|
||||
aPrincipal && !aPrincipal->IsSystemPrincipal(),
|
||||
aBoolParam.value());
|
||||
return MOZ_KnownLive(aEditorBase.AsHTMLEditor())
|
||||
->EnableCompatibleJoinSplitNodeDirection(
|
||||
aBoolParam.value())
|
||||
? NS_OK
|
||||
: NS_SUCCESS_DOM_NO_OPERATION;
|
||||
// Now we don't support the legacy join/split node direction anymore, but
|
||||
// this result may be used for the feature detection whether Gecko
|
||||
// supports the new direction mode. Therefore, even though we do nothing,
|
||||
// but we should return NS_OK to return `true` from
|
||||
// `Document.execCommand()`.
|
||||
return NS_OK;
|
||||
default:
|
||||
return NS_ERROR_NOT_IMPLEMENTED;
|
||||
}
|
||||
|
@ -370,8 +363,11 @@ nsresult SetDocumentStateCommand::GetCommandStateParams(
|
|||
if (NS_WARN_IF(!htmlEditor)) {
|
||||
return NS_ERROR_INVALID_ARG;
|
||||
}
|
||||
return aParams.SetBool(
|
||||
STATE_ALL, htmlEditor->IsCompatibleJoinSplitNodeDirectionEnabled());
|
||||
// Now we don't support the legacy join/split node direction anymore, but
|
||||
// this result may be used for the feature detection whether Gecko
|
||||
// supports the new direction mode. Therefore, we should return `true`
|
||||
// even though executing the command does nothing.
|
||||
return aParams.SetBool(STATE_ALL, true);
|
||||
}
|
||||
default:
|
||||
return NS_ERROR_NOT_IMPLEMENTED;
|
||||
|
|
|
@ -10,8 +10,7 @@
|
|||
|
||||
#include "EditorDOMPoint.h"
|
||||
#include "HTMLEditHelpers.h"
|
||||
#include "JoinSplitNodeDirection.h" // for JoinNodesDirection and SplitNodeDirection
|
||||
#include "SelectionState.h" // for RangeItem
|
||||
#include "SelectionState.h" // for RangeItem
|
||||
|
||||
#include "ErrorList.h" // for nsresult
|
||||
|
||||
|
@ -32,18 +31,6 @@ namespace mozilla {
|
|||
|
||||
using namespace dom;
|
||||
|
||||
SplitNodeDirection HTMLEditor::GetSplitNodeDirection() const {
|
||||
return MOZ_LIKELY(mUseGeckoTraditionalJoinSplitBehavior)
|
||||
? SplitNodeDirection::LeftNodeIsNewOne
|
||||
: SplitNodeDirection::RightNodeIsNewOne;
|
||||
}
|
||||
|
||||
JoinNodesDirection HTMLEditor::GetJoinNodesDirection() const {
|
||||
return MOZ_LIKELY(mUseGeckoTraditionalJoinSplitBehavior)
|
||||
? JoinNodesDirection::LeftNodeIntoRightNode
|
||||
: JoinNodesDirection::RightNodeIntoLeftNode;
|
||||
}
|
||||
|
||||
Result<CreateElementResult, nsresult>
|
||||
HTMLEditor::ReplaceContainerAndCloneAttributesWithTransaction(
|
||||
Element& aOldContainer, const nsAtom& aTagName) {
|
||||
|
|
|
@ -857,8 +857,7 @@ HTMLEditor::AutoInlineStyleSetter::SplitTextNodeAndApplyStyleToMiddleNode(
|
|||
[&]() MOZ_CAN_RUN_SCRIPT -> Result<SplitNodeResult, nsresult> {
|
||||
EditorDOMPoint atEnd(&aText, aEndOffset);
|
||||
if (atEnd.IsEndOfContainer()) {
|
||||
return SplitNodeResult::NotHandled(atEnd,
|
||||
aHTMLEditor.GetSplitNodeDirection());
|
||||
return SplitNodeResult::NotHandled(atEnd);
|
||||
}
|
||||
// We need to split off back of text node
|
||||
Result<SplitNodeResult, nsresult> splitNodeResult =
|
||||
|
@ -887,8 +886,7 @@ HTMLEditor::AutoInlineStyleSetter::SplitTextNodeAndApplyStyleToMiddleNode(
|
|||
: &aText,
|
||||
aStartOffset);
|
||||
if (atStart.IsStartOfContainer()) {
|
||||
return SplitNodeResult::NotHandled(atStart,
|
||||
aHTMLEditor.GetSplitNodeDirection());
|
||||
return SplitNodeResult::NotHandled(atStart);
|
||||
}
|
||||
// We need to split off front of text node
|
||||
Result<SplitNodeResult, nsresult> splitNodeResult =
|
||||
|
@ -2129,7 +2127,7 @@ HTMLEditor::SplitAncestorStyledInlineElementsAt(
|
|||
// If the point is in a non-content node, e.g., in the document node, we
|
||||
// should split nothing.
|
||||
if (MOZ_UNLIKELY(!aPointToSplit.IsInContentNode())) {
|
||||
return SplitNodeResult::NotHandled(aPointToSplit, GetSplitNodeDirection());
|
||||
return SplitNodeResult::NotHandled(aPointToSplit);
|
||||
}
|
||||
|
||||
// We assume that this method is called only when we're removing style(s).
|
||||
|
@ -2160,8 +2158,7 @@ HTMLEditor::SplitAncestorStyledInlineElementsAt(
|
|||
}
|
||||
|
||||
// Split any matching style nodes above the point.
|
||||
SplitNodeResult result =
|
||||
SplitNodeResult::NotHandled(aPointToSplit, GetSplitNodeDirection());
|
||||
SplitNodeResult result = SplitNodeResult::NotHandled(aPointToSplit);
|
||||
MOZ_ASSERT(!result.Handled());
|
||||
EditorDOMPoint pointToPutCaret;
|
||||
for (OwningNonNull<Element>& element : arrayOfParents) {
|
||||
|
|
|
@ -5,10 +5,9 @@
|
|||
|
||||
#include "JoinNodesTransaction.h"
|
||||
|
||||
#include "EditorDOMPoint.h" // for EditorDOMPoint, etc.
|
||||
#include "HTMLEditHelpers.h" // for SplitNodeResult
|
||||
#include "JoinSplitNodeDirection.h" // JoinNodesDirection
|
||||
#include "HTMLEditor.h" // for HTMLEditor
|
||||
#include "EditorDOMPoint.h" // for EditorDOMPoint, etc.
|
||||
#include "HTMLEditHelpers.h" // for SplitNodeResult
|
||||
#include "HTMLEditor.h" // for HTMLEditor
|
||||
#include "HTMLEditorInlines.h"
|
||||
#include "HTMLEditUtils.h"
|
||||
|
||||
|
@ -42,14 +41,8 @@ JoinNodesTransaction::JoinNodesTransaction(HTMLEditor& aHTMLEditor,
|
|||
nsIContent& aLeftContent,
|
||||
nsIContent& aRightContent)
|
||||
: mHTMLEditor(&aHTMLEditor),
|
||||
mRemovedContent(aHTMLEditor.GetJoinNodesDirection() ==
|
||||
JoinNodesDirection::LeftNodeIntoRightNode
|
||||
? &aLeftContent
|
||||
: &aRightContent),
|
||||
mKeepingContent(aHTMLEditor.GetJoinNodesDirection() ==
|
||||
JoinNodesDirection::LeftNodeIntoRightNode
|
||||
? &aRightContent
|
||||
: &aLeftContent) {
|
||||
mRemovedContent(&aRightContent),
|
||||
mKeepingContent(&aLeftContent) {
|
||||
// printf("JoinNodesTransaction size: %zu\n", sizeof(JoinNodesTransaction));
|
||||
static_assert(sizeof(JoinNodesTransaction) <= 64,
|
||||
"Transaction classes may be created a lot and may be alive "
|
||||
|
@ -71,9 +64,7 @@ std::ostream& operator<<(std::ostream& aStream,
|
|||
aStream << " (" << *aTransaction.mKeepingContent << ")";
|
||||
}
|
||||
aStream << ", mJoinedOffset=" << aTransaction.mJoinedOffset
|
||||
<< ", mHTMLEditor=" << aTransaction.mHTMLEditor.get()
|
||||
<< ", GetJoinNodesDirection()="
|
||||
<< aTransaction.GetJoinNodesDirection() << " }";
|
||||
<< ", mHTMLEditor=" << aTransaction.mHTMLEditor.get() << " }";
|
||||
return aStream;
|
||||
}
|
||||
|
||||
|
@ -84,16 +75,6 @@ NS_IMPL_CYCLE_COLLECTION_INHERITED(JoinNodesTransaction, EditTransactionBase,
|
|||
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(JoinNodesTransaction)
|
||||
NS_INTERFACE_MAP_END_INHERITING(EditTransactionBase)
|
||||
|
||||
SplitNodeDirection JoinNodesTransaction::GetSplitNodeDirection() const {
|
||||
return MOZ_LIKELY(mHTMLEditor) ? mHTMLEditor->GetSplitNodeDirection()
|
||||
: SplitNodeDirection::LeftNodeIsNewOne;
|
||||
}
|
||||
|
||||
JoinNodesDirection JoinNodesTransaction::GetJoinNodesDirection() const {
|
||||
return MOZ_LIKELY(mHTMLEditor) ? mHTMLEditor->GetJoinNodesDirection()
|
||||
: JoinNodesDirection::LeftNodeIntoRightNode;
|
||||
}
|
||||
|
||||
bool JoinNodesTransaction::CanDoIt() const {
|
||||
if (NS_WARN_IF(!mKeepingContent) || NS_WARN_IF(!mRemovedContent) ||
|
||||
NS_WARN_IF(!mHTMLEditor) ||
|
||||
|
@ -140,27 +121,20 @@ nsresult JoinNodesTransaction::DoTransactionInternal(
|
|||
// For now, setting mJoinedOffset to removed content length so that
|
||||
// CreateJoinedPoint returns a point in mKeepingContent whose offset is
|
||||
// the result if all content in mRemovedContent are moved to start or end of
|
||||
// mKeepingContent without any intervation. The offset will be adjusted
|
||||
// mKeepingContent without any intervention. The offset will be adjusted
|
||||
// below.
|
||||
mJoinedOffset =
|
||||
GetJoinNodesDirection() == JoinNodesDirection::LeftNodeIntoRightNode
|
||||
? mRemovedContent->Length()
|
||||
: mKeepingContent->Length();
|
||||
mJoinedOffset = mKeepingContent->Length();
|
||||
|
||||
const OwningNonNull<HTMLEditor> htmlEditor = *mHTMLEditor;
|
||||
const OwningNonNull<nsIContent> removingContent = *mRemovedContent;
|
||||
const OwningNonNull<nsIContent> keepingContent = *mKeepingContent;
|
||||
nsresult rv;
|
||||
// Let's try to get actual joined point with the tacker.
|
||||
EditorDOMPoint joinNodesPoint =
|
||||
GetJoinNodesDirection() == JoinNodesDirection::LeftNodeIntoRightNode
|
||||
? EditorDOMPoint(keepingContent, 0u)
|
||||
: EditorDOMPoint::AtEndOf(keepingContent);
|
||||
auto joinNodesPoint = EditorDOMPoint::AtEndOf(keepingContent);
|
||||
{
|
||||
AutoTrackDOMPoint trackJoinNodePoint(htmlEditor->RangeUpdaterRef(),
|
||||
&joinNodesPoint);
|
||||
rv = htmlEditor->DoJoinNodes(keepingContent, removingContent,
|
||||
GetJoinNodesDirection());
|
||||
rv = htmlEditor->DoJoinNodes(keepingContent, removingContent);
|
||||
NS_WARNING_ASSERTION(NS_SUCCEEDED(rv), "HTMLEditor::DoJoinNodes() failed");
|
||||
}
|
||||
// Adjust join node offset to the actual offset where the original first
|
||||
|
@ -189,9 +163,8 @@ NS_IMETHODIMP JoinNodesTransaction::UndoTransaction() {
|
|||
const OwningNonNull<HTMLEditor> htmlEditor = *mHTMLEditor;
|
||||
const OwningNonNull<nsIContent> removedContent = *mRemovedContent;
|
||||
|
||||
Result<SplitNodeResult, nsresult> splitNodeResult =
|
||||
htmlEditor->DoSplitNode(CreateJoinedPoint<EditorDOMPoint>(),
|
||||
removedContent, GetSplitNodeDirection());
|
||||
Result<SplitNodeResult, nsresult> splitNodeResult = htmlEditor->DoSplitNode(
|
||||
CreateJoinedPoint<EditorDOMPoint>(), removedContent);
|
||||
if (MOZ_UNLIKELY(splitNodeResult.isErr())) {
|
||||
NS_WARNING("HTMLEditor::DoSplitNode() failed");
|
||||
return splitNodeResult.unwrapErr();
|
||||
|
|
|
@ -60,11 +60,6 @@ class JoinNodesTransaction final : public EditTransactionBase {
|
|||
|
||||
MOZ_CAN_RUN_SCRIPT NS_IMETHOD RedoTransaction() override;
|
||||
|
||||
// Note that we don't support join/split node direction switching per
|
||||
// transaction.
|
||||
[[nodiscard]] SplitNodeDirection GetSplitNodeDirection() const;
|
||||
[[nodiscard]] JoinNodesDirection GetJoinNodesDirection() const;
|
||||
|
||||
/**
|
||||
* GetExistingContent() and GetRemovedContent() never returns nullptr
|
||||
* unless the cycle collector clears them out.
|
||||
|
|
|
@ -1,51 +0,0 @@
|
|||
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#ifndef JoinSplitNodeDirection_h
|
||||
#define JoinSplitNodeDirection_h
|
||||
|
||||
#include <ostream>
|
||||
|
||||
namespace mozilla {
|
||||
|
||||
// JoinNodesDirection is also affected to which one is new node at splitting
|
||||
// a node because a couple of undo/redo.
|
||||
enum class JoinNodesDirection {
|
||||
LeftNodeIntoRightNode,
|
||||
RightNodeIntoLeftNode,
|
||||
};
|
||||
|
||||
static inline std::ostream& operator<<(std::ostream& aStream,
|
||||
JoinNodesDirection aJoinNodesDirection) {
|
||||
if (aJoinNodesDirection == JoinNodesDirection::LeftNodeIntoRightNode) {
|
||||
return aStream << "JoinNodesDirection::LeftNodeIntoRightNode";
|
||||
}
|
||||
if (aJoinNodesDirection == JoinNodesDirection::RightNodeIntoLeftNode) {
|
||||
return aStream << "JoinNodesDirection::RightNodeIntoLeftNode";
|
||||
}
|
||||
return aStream << "Invalid value";
|
||||
}
|
||||
|
||||
// SplitNodeDirection is also affected to which one is removed at joining a
|
||||
// node because a couple of undo/redo.
|
||||
enum class SplitNodeDirection {
|
||||
LeftNodeIsNewOne,
|
||||
RightNodeIsNewOne,
|
||||
};
|
||||
|
||||
static inline std::ostream& operator<<(std::ostream& aStream,
|
||||
SplitNodeDirection aSplitNodeDirection) {
|
||||
if (aSplitNodeDirection == SplitNodeDirection::LeftNodeIsNewOne) {
|
||||
return aStream << "SplitNodeDirection::LeftNodeIsNewOne";
|
||||
}
|
||||
if (aSplitNodeDirection == SplitNodeDirection::RightNodeIsNewOne) {
|
||||
return aStream << "SplitNodeDirection::RightNodeIsNewOne";
|
||||
}
|
||||
return aStream << "Invalid value";
|
||||
}
|
||||
|
||||
} // namespace mozilla
|
||||
|
||||
#endif // JoinSplitNodeDirection_h
|
|
@ -8,7 +8,6 @@
|
|||
#include "AutoRangeArray.h" // for AutoRangeArray
|
||||
#include "EditorUtils.h" // for EditorUtils, AutoRangeArray
|
||||
#include "ErrorList.h"
|
||||
#include "JoinSplitNodeDirection.h" // for JoinNodesDirection, SplitNodeDirection
|
||||
|
||||
#include "mozilla/Assertions.h" // for MOZ_ASSERT, etc.
|
||||
#include "mozilla/IntegerRange.h" // for IntegerRange
|
||||
|
@ -300,8 +299,7 @@ void RangeUpdater::SelAdjDeleteNode(nsINode& aNodeToDelete) {
|
|||
|
||||
nsresult RangeUpdater::SelAdjSplitNode(nsIContent& aOriginalContent,
|
||||
uint32_t aSplitOffset,
|
||||
nsIContent& aNewContent,
|
||||
SplitNodeDirection aSplitNodeDirection) {
|
||||
nsIContent& aNewContent) {
|
||||
if (mLocked) {
|
||||
// lock set by Will/DidReplaceParent, etc...
|
||||
return NS_OK;
|
||||
|
@ -319,28 +317,15 @@ nsresult RangeUpdater::SelAdjSplitNode(nsIContent& aOriginalContent,
|
|||
auto AdjustDOMPoint = [&](nsCOMPtr<nsINode>& aContainer,
|
||||
uint32_t& aOffset) -> void {
|
||||
if (aContainer == atNewNode.GetContainer()) {
|
||||
if (aSplitNodeDirection == SplitNodeDirection::LeftNodeIsNewOne) {
|
||||
// When we create a left node, we insert it before the right node.
|
||||
// In this case,
|
||||
// - `{}<right/>` should become `{}<left/><right/>` (0 -> 0)
|
||||
// - `<right/>{}` should become `<left/><right/>{}` (1 -> 2)
|
||||
// - `{<right/>}` should become `{<left/><right/>}` (0 -> 0, 1 -> 2}
|
||||
// Therefore, we need to increate the offset only when the offset is
|
||||
// larger than the offset at the left node.
|
||||
if (aOffset > atNewNode.Offset()) {
|
||||
aOffset++;
|
||||
}
|
||||
} else {
|
||||
// When we create a right node, we insert it after the left node.
|
||||
// In this case,
|
||||
// - `{}<left/>` should become `{}<left/><right/>` (0 -> 0)
|
||||
// - `<left/>{}` should become `<left/><right/>{}` (1 -> 2)
|
||||
// - `{<left/>}` should become `{<left/><right/>}` (0 -> 0, 1 -> 2}
|
||||
// Therefore, we need to increate the offset only when the offset equals
|
||||
// or is larger than the offset at the right node.
|
||||
if (aOffset >= atNewNode.Offset()) {
|
||||
aOffset++;
|
||||
}
|
||||
// When we create a right node, we insert it after the left node.
|
||||
// In this case,
|
||||
// - `{}<left/>` should become `{}<left/><right/>` (0 -> 0)
|
||||
// - `<left/>{}` should become `<left/><right/>{}` (1 -> 2)
|
||||
// - `{<left/>}` should become `{<left/><right/>}` (0 -> 0, 1 -> 2}
|
||||
// Therefore, we need to increate the offset only when the offset equals
|
||||
// or is larger than the offset at the right node.
|
||||
if (aOffset >= atNewNode.Offset()) {
|
||||
aOffset++;
|
||||
}
|
||||
}
|
||||
// If point is in the range which are moved from aOriginalContent to
|
||||
|
@ -350,13 +335,7 @@ nsresult RangeUpdater::SelAdjSplitNode(nsIContent& aOriginalContent,
|
|||
if (aContainer != &aOriginalContent) {
|
||||
return;
|
||||
}
|
||||
if (aSplitNodeDirection == SplitNodeDirection::LeftNodeIsNewOne) {
|
||||
if (aOffset > aSplitOffset) {
|
||||
aOffset -= aSplitOffset;
|
||||
} else {
|
||||
aContainer = &aNewContent;
|
||||
}
|
||||
} else if (aOffset >= aSplitOffset) {
|
||||
if (aOffset >= aSplitOffset) {
|
||||
aContainer = &aNewContent;
|
||||
aOffset -= aSplitOffset;
|
||||
}
|
||||
|
@ -375,8 +354,7 @@ nsresult RangeUpdater::SelAdjSplitNode(nsIContent& aOriginalContent,
|
|||
nsresult RangeUpdater::SelAdjJoinNodes(
|
||||
const EditorRawDOMPoint& aStartOfRightContent,
|
||||
const nsIContent& aRemovedContent,
|
||||
const EditorDOMPoint& aOldPointAtRightContent,
|
||||
JoinNodesDirection aJoinNodesDirection) {
|
||||
const EditorDOMPoint& aOldPointAtRightContent) {
|
||||
MOZ_ASSERT(aStartOfRightContent.IsSetAndValid());
|
||||
MOZ_ASSERT(aOldPointAtRightContent.IsSet()); // Invalid point in most cases
|
||||
MOZ_ASSERT(aOldPointAtRightContent.HasOffset());
|
||||
|
@ -402,9 +380,7 @@ nsresult RangeUpdater::SelAdjJoinNodes(
|
|||
// right node, the offset should be same. Otherwise, we need to advance
|
||||
// the offset to length of the removed content.
|
||||
aContainer = aStartOfRightContent.GetContainer();
|
||||
if (aJoinNodesDirection == JoinNodesDirection::RightNodeIntoLeftNode) {
|
||||
aOffset += aStartOfRightContent.Offset();
|
||||
}
|
||||
aOffset += aStartOfRightContent.Offset();
|
||||
}
|
||||
// TODO: If aOldPointAtRightContent.GetContainer() was in aRemovedContent,
|
||||
// we fail to adjust container and offset here because we need to
|
||||
|
@ -423,12 +399,6 @@ nsresult RangeUpdater::SelAdjJoinNodes(
|
|||
aContainer = aStartOfRightContent.GetContainer();
|
||||
aOffset = aStartOfRightContent.Offset();
|
||||
}
|
||||
} else if (aContainer == aStartOfRightContent.GetContainer()) {
|
||||
// If the point is in joined node, and removed content is moved to
|
||||
// start of the joined node, we need to adjust the offset.
|
||||
if (aJoinNodesDirection == JoinNodesDirection::LeftNodeIntoRightNode) {
|
||||
aOffset += aStartOfRightContent.Offset();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -220,12 +220,9 @@ class MOZ_STACK_CLASS RangeUpdater final {
|
|||
* it.
|
||||
* @param aNewContent The new content node which was inserted into
|
||||
* the DOM tree.
|
||||
* @param aSplitNodeDirection Whether aNewNode was inserted before or after
|
||||
* aOriginalContent.
|
||||
*/
|
||||
nsresult SelAdjSplitNode(nsIContent& aOriginalContent, uint32_t aSplitOffset,
|
||||
nsIContent& aNewContent,
|
||||
SplitNodeDirection aSplitNodeDirection);
|
||||
nsIContent& aNewContent);
|
||||
|
||||
/**
|
||||
* SelAdjJoinNodes() is called immediately after joining aRemovedContent and
|
||||
|
@ -242,8 +239,7 @@ class MOZ_STACK_CLASS RangeUpdater final {
|
|||
*/
|
||||
nsresult SelAdjJoinNodes(const EditorRawDOMPoint& aStartOfRightContent,
|
||||
const nsIContent& aRemovedContent,
|
||||
const EditorDOMPoint& aOldPointAtRightContent,
|
||||
JoinNodesDirection aJoinNodesDirection);
|
||||
const EditorDOMPoint& aOldPointAtRightContent);
|
||||
void SelAdjInsertText(const dom::Text& aTextNode, uint32_t aOffset,
|
||||
uint32_t aInsertedLength);
|
||||
void SelAdjDeleteText(const dom::Text& aTextNode, uint32_t aOffset,
|
||||
|
|
|
@ -10,8 +10,7 @@
|
|||
#include "HTMLEditor.h" // for HTMLEditor
|
||||
#include "HTMLEditorInlines.h"
|
||||
#include "HTMLEditUtils.h"
|
||||
#include "JoinSplitNodeDirection.h" // for SplitNodeDirection
|
||||
#include "SelectionState.h" // for AutoTrackDOMPoint and RangeUpdater
|
||||
#include "SelectionState.h" // for AutoTrackDOMPoint and RangeUpdater
|
||||
|
||||
#include "mozilla/Logging.h"
|
||||
#include "mozilla/Maybe.h"
|
||||
|
@ -71,9 +70,7 @@ std::ostream& operator<<(std::ostream& aStream,
|
|||
aStream << " (" << *aTransaction.mSplitContent << ")";
|
||||
}
|
||||
aStream << ", mSplitOffset=" << aTransaction.mSplitOffset
|
||||
<< ", mHTMLEditor=" << aTransaction.mHTMLEditor.get()
|
||||
<< ", GetSplitNodeDirection()="
|
||||
<< aTransaction.GetSplitNodeDirection() << " }";
|
||||
<< ", mHTMLEditor=" << aTransaction.mHTMLEditor.get() << " }";
|
||||
return aStream;
|
||||
}
|
||||
|
||||
|
@ -86,16 +83,6 @@ NS_IMPL_RELEASE_INHERITED(SplitNodeTransaction, EditTransactionBase)
|
|||
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(SplitNodeTransaction)
|
||||
NS_INTERFACE_MAP_END_INHERITING(EditTransactionBase)
|
||||
|
||||
SplitNodeDirection SplitNodeTransaction::GetSplitNodeDirection() const {
|
||||
return MOZ_LIKELY(mHTMLEditor) ? mHTMLEditor->GetSplitNodeDirection()
|
||||
: SplitNodeDirection::LeftNodeIsNewOne;
|
||||
}
|
||||
|
||||
JoinNodesDirection SplitNodeTransaction::GetJoinNodesDirection() const {
|
||||
return MOZ_LIKELY(mHTMLEditor) ? mHTMLEditor->GetJoinNodesDirection()
|
||||
: JoinNodesDirection::LeftNodeIntoRightNode;
|
||||
}
|
||||
|
||||
NS_IMETHODIMP SplitNodeTransaction::DoTransaction() {
|
||||
MOZ_LOG(GetLogModule(), LogLevel::Info,
|
||||
("%p SplitNodeTransaction::%s this=%s", this, __FUNCTION__,
|
||||
|
@ -157,7 +144,7 @@ Result<SplitNodeResult, nsresult> SplitNodeTransaction::DoTransactionInternal(
|
|||
Result<SplitNodeResult, nsresult> splitNodeResult = aHTMLEditor.DoSplitNode(
|
||||
EditorDOMPoint(&aSplittingContent,
|
||||
std::min(aSplitOffset, aSplittingContent.Length())),
|
||||
aNewContent, GetSplitNodeDirection());
|
||||
aNewContent);
|
||||
if (MOZ_UNLIKELY(splitNodeResult.isErr())) {
|
||||
NS_WARNING("HTMLEditor::DoSplitNode() failed");
|
||||
return splitNodeResult;
|
||||
|
@ -183,21 +170,12 @@ NS_IMETHODIMP SplitNodeTransaction::UndoTransaction() {
|
|||
const OwningNonNull<HTMLEditor> htmlEditor = *mHTMLEditor;
|
||||
const OwningNonNull<nsIContent> keepingContent = *mSplitContent;
|
||||
const OwningNonNull<nsIContent> removingContent = *mNewContent;
|
||||
nsresult rv;
|
||||
EditorDOMPoint joinedPoint;
|
||||
{
|
||||
// Unfortunately, we cannot track joining point if moving right node content
|
||||
// into left node since it cannot track changes from web apps and HTMLEditor
|
||||
// never removes the content of the left node. So it should be true that
|
||||
// we don't need to track the point in the direction.
|
||||
Maybe<AutoTrackDOMPoint> trackJoinedPoint;
|
||||
if (GetJoinNodesDirection() == JoinNodesDirection::LeftNodeIntoRightNode) {
|
||||
joinedPoint.Set(keepingContent, 0u);
|
||||
trackJoinedPoint.emplace(htmlEditor->RangeUpdaterRef(), &joinedPoint);
|
||||
}
|
||||
rv = htmlEditor->DoJoinNodes(keepingContent, removingContent,
|
||||
GetJoinNodesDirection());
|
||||
}
|
||||
// Unfortunately, we cannot track joining point if moving right node content
|
||||
// into left node since it cannot track changes from web apps and HTMLEditor
|
||||
// never removes the content of the left node. So it should be true that
|
||||
// we don't need to track the point in this case.
|
||||
nsresult rv = htmlEditor->DoJoinNodes(keepingContent, removingContent);
|
||||
if (NS_SUCCEEDED(rv)) {
|
||||
// Adjust split offset for redo here
|
||||
if (joinedPoint.IsSet()) {
|
||||
|
|
|
@ -53,11 +53,6 @@ class SplitNodeTransaction final : public EditTransactionBase {
|
|||
|
||||
MOZ_CAN_RUN_SCRIPT NS_IMETHOD RedoTransaction() override;
|
||||
|
||||
// Note that we don't support join/split node direction switching per
|
||||
// transaction.
|
||||
[[nodiscard]] SplitNodeDirection GetSplitNodeDirection() const;
|
||||
[[nodiscard]] JoinNodesDirection GetJoinNodesDirection() const;
|
||||
|
||||
nsIContent* GetSplitContent() const { return mSplitContent; }
|
||||
nsIContent* GetNewContent() const { return mNewContent; }
|
||||
nsINode* GetParentNode() const { return mParentNode; }
|
||||
|
|
|
@ -22,172 +22,32 @@ SimpleTest.waitForFocus(async () => {
|
|||
}
|
||||
|
||||
await resetIframe();
|
||||
await SpecialPowers.pushPrefEnv({
|
||||
set: [["editor.join_split_direction.compatible_with_the_other_browsers", false]],
|
||||
});
|
||||
(function test_command_when_legacy_behavior_is_enabled_by_default() {
|
||||
iframe.contentDocument.body.innerHTML = "<div contenteditable><br></div>";
|
||||
ok(
|
||||
iframe.contentDocument.queryCommandSupported("enableCompatibleJoinSplitDirection"),
|
||||
"test_command_when_legacy_behavior_is_enabled_by_default: command should be supported"
|
||||
);
|
||||
ok(
|
||||
iframe.contentDocument.queryCommandEnabled("enableCompatibleJoinSplitDirection"),
|
||||
"test_command_when_legacy_behavior_is_enabled_by_default: command should be enabled"
|
||||
);
|
||||
ok(
|
||||
!iframe.contentDocument.queryCommandState("enableCompatibleJoinSplitDirection"),
|
||||
"test_command_when_legacy_behavior_is_enabled_by_default: command state should be false"
|
||||
);
|
||||
is(
|
||||
iframe.contentDocument.queryCommandValue("enableCompatibleJoinSplitDirection"),
|
||||
"",
|
||||
"test_command_when_legacy_behavior_is_enabled_by_default: command value should be empty string"
|
||||
);
|
||||
ok(
|
||||
iframe.contentDocument.execCommand("enableCompatibleJoinSplitDirection", false, "true"),
|
||||
"test_command_when_legacy_behavior_is_enabled_by_default: command to enable it should return true"
|
||||
);
|
||||
})();
|
||||
|
||||
(function test_command_when_enabling_new_behavior_when_legacy_one_is_enabled_by_default() {
|
||||
ok(
|
||||
iframe.contentDocument.queryCommandSupported("enableCompatibleJoinSplitDirection"),
|
||||
"test_command_when_enabling_new_behavior_when_legacy_one_is_enabled_by_default: command should be supported"
|
||||
);
|
||||
ok(
|
||||
iframe.contentDocument.queryCommandEnabled("enableCompatibleJoinSplitDirection"),
|
||||
"test_command_when_enabling_new_behavior_when_legacy_one_is_enabled_by_default: command should be enabled"
|
||||
);
|
||||
ok(
|
||||
iframe.contentDocument.queryCommandState("enableCompatibleJoinSplitDirection"),
|
||||
"test_command_when_enabling_new_behavior_when_legacy_one_is_enabled_by_default: command state should be true"
|
||||
);
|
||||
is(
|
||||
iframe.contentDocument.queryCommandValue("enableCompatibleJoinSplitDirection"),
|
||||
"",
|
||||
"test_command_when_enabling_new_behavior_when_legacy_one_is_enabled_by_default: command value should be empty string"
|
||||
);
|
||||
|
||||
ok(
|
||||
iframe.contentDocument.execCommand("enableCompatibleJoinSplitDirection", false, "false"),
|
||||
"test_command_when_enabling_new_behavior_when_legacy_one_is_enabled_by_default: command to disable it should return true"
|
||||
);
|
||||
})();
|
||||
|
||||
(function test_command_when_disabling_new_behavior_when_the_legacy_one_is_enabled_by_default() {
|
||||
ok(
|
||||
iframe.contentDocument.queryCommandSupported("enableCompatibleJoinSplitDirection"),
|
||||
"test_command_when_disabling_new_behavior_when_the_legacy_one_is_enabled_by_default: command should be supported"
|
||||
);
|
||||
ok(
|
||||
iframe.contentDocument.queryCommandEnabled("enableCompatibleJoinSplitDirection"),
|
||||
"test_command_when_disabling_new_behavior_when_the_legacy_one_is_enabled_by_default: command should be enabled"
|
||||
);
|
||||
ok(
|
||||
!iframe.contentDocument.queryCommandState("enableCompatibleJoinSplitDirection"),
|
||||
"test_command_when_disabling_new_behavior_when_the_legacy_one_is_enabled_by_default: command state should be false"
|
||||
);
|
||||
is(
|
||||
iframe.contentDocument.queryCommandValue("enableCompatibleJoinSplitDirection"),
|
||||
"",
|
||||
"test_command_when_disabling_new_behavior_when_the_legacy_one_is_enabled_by_default: command value should be empty string"
|
||||
);
|
||||
})();
|
||||
|
||||
await resetIframe();
|
||||
await SpecialPowers.pushPrefEnv({
|
||||
set: [["editor.join_split_direction.compatible_with_the_other_browsers", true]],
|
||||
});
|
||||
(function test_command_when_new_behavior_is_enabled_by_default() {
|
||||
iframe.contentDocument.body.innerHTML = "<div contenteditable><br></div>";
|
||||
ok(
|
||||
iframe.contentDocument.queryCommandSupported("enableCompatibleJoinSplitDirection"),
|
||||
"test_command_when_new_behavior_is_enabled_by_default: command should be supported"
|
||||
);
|
||||
ok(
|
||||
iframe.contentDocument.queryCommandEnabled("enableCompatibleJoinSplitDirection"),
|
||||
"test_command_when_new_behavior_is_enabled_by_default: command should be enabled"
|
||||
);
|
||||
ok(
|
||||
iframe.contentDocument.queryCommandState("enableCompatibleJoinSplitDirection"),
|
||||
"test_command_when_new_behavior_is_enabled_by_default: command state should be true"
|
||||
);
|
||||
is(
|
||||
iframe.contentDocument.queryCommandValue("enableCompatibleJoinSplitDirection"),
|
||||
"",
|
||||
"test_command_when_new_behavior_is_enabled_by_default: command value should be empty string"
|
||||
);
|
||||
ok(
|
||||
!iframe.contentDocument.execCommand("enableCompatibleJoinSplitDirection", false, "false"),
|
||||
"test_command_when_new_behavior_is_enabled_by_default: command to disable it should return false"
|
||||
);
|
||||
ok(
|
||||
iframe.contentDocument.queryCommandState("enableCompatibleJoinSplitDirection"),
|
||||
"test_command_when_new_behavior_is_enabled_by_default: command state should be true even after executing the command to disable it"
|
||||
);
|
||||
})();
|
||||
|
||||
await resetIframe();
|
||||
await SpecialPowers.pushPrefEnv({
|
||||
set: [["editor.join_split_direction.compatible_with_the_other_browsers", false]],
|
||||
});
|
||||
(function test_command_disabled_after_joining_nodes() {
|
||||
iframe.contentDocument.body.innerHTML = "<div contenteditable><p>abc</p><p>def</p></div>";
|
||||
iframe.contentWindow.getSelection().collapse(iframe.contentDocument.querySelector("p + p").firstChild, 0);
|
||||
iframe.contentDocument.execCommand("delete");
|
||||
ok(
|
||||
!iframe.contentDocument.execCommand("enableCompatibleJoinSplitDirection", false, "true"),
|
||||
"test_command_disabled_after_joining_nodes: command should return false"
|
||||
);
|
||||
ok(
|
||||
!iframe.contentDocument.queryCommandEnabled("enableCompatibleJoinSplitDirection"),
|
||||
"test_command_when_new_behavior_is_enabled_by_default: command should be disabled"
|
||||
);
|
||||
ok(
|
||||
!iframe.contentDocument.queryCommandState("enableCompatibleJoinSplitDirection"),
|
||||
"test_command_when_new_behavior_is_enabled_by_default: command state should be false"
|
||||
);
|
||||
})();
|
||||
|
||||
await resetIframe();
|
||||
await SpecialPowers.pushPrefEnv({
|
||||
set: [["editor.join_split_direction.compatible_with_the_other_browsers", false]],
|
||||
});
|
||||
(function test_command_disabled_after_splitting_node() {
|
||||
iframe.contentDocument.body.innerHTML = "<div contenteditable><p>abcdef</p></div>";
|
||||
iframe.contentWindow.getSelection().collapse(iframe.contentDocument.querySelector("p").firstChild, "abc".length);
|
||||
iframe.contentDocument.execCommand("insertParagraph");
|
||||
ok(
|
||||
!iframe.contentDocument.execCommand("enableCompatibleJoinSplitDirection", false, "true"),
|
||||
"test_command_disabled_after_splitting_node: command should return false"
|
||||
);
|
||||
ok(
|
||||
!iframe.contentDocument.queryCommandEnabled("enableCompatibleJoinSplitDirection"),
|
||||
"test_command_disabled_after_splitting_node: command should be disabled"
|
||||
);
|
||||
ok(
|
||||
!iframe.contentDocument.queryCommandState("enableCompatibleJoinSplitDirection"),
|
||||
"test_command_disabled_after_splitting_node: command state should be false"
|
||||
);
|
||||
})();
|
||||
|
||||
await resetIframe();
|
||||
await SpecialPowers.pushPrefEnv({
|
||||
set: [["editor.join_split_direction.compatible_with_the_other_browsers", false]],
|
||||
});
|
||||
(function test_split_direction_after_enabling_new_direction() {
|
||||
iframe.contentDocument.body.innerHTML = "<div contenteditable><p>abc</p><p>def</p></div>";
|
||||
const rightP = iframe.contentDocument.querySelector("p + p");
|
||||
iframe.contentWindow.getSelection().collapse(rightP.firstChild, 0);
|
||||
iframe.contentDocument.execCommand("delete");
|
||||
iframe.contentDocument.execCommand("enableCompatibleJoinSplitDirection", false, "true");
|
||||
is(
|
||||
iframe.contentDocument.querySelector("p"),
|
||||
rightP,
|
||||
"test_split_direction_after_enabling_new_direction: left paragraph should be deleted and right paragraph should be alive"
|
||||
);
|
||||
})();
|
||||
iframe.contentDocument.body.innerHTML = "<div contenteditable><br></div>";
|
||||
ok(
|
||||
iframe.contentDocument.queryCommandSupported("enableCompatibleJoinSplitDirection"),
|
||||
"command should be supported"
|
||||
);
|
||||
ok(
|
||||
iframe.contentDocument.queryCommandEnabled("enableCompatibleJoinSplitDirection"),
|
||||
"command should be enabled"
|
||||
);
|
||||
ok(
|
||||
iframe.contentDocument.queryCommandState("enableCompatibleJoinSplitDirection"),
|
||||
"command state should be true"
|
||||
);
|
||||
is(
|
||||
iframe.contentDocument.queryCommandValue("enableCompatibleJoinSplitDirection"),
|
||||
"",
|
||||
"command value should be empty string"
|
||||
);
|
||||
ok(
|
||||
!iframe.contentDocument.execCommand("enableCompatibleJoinSplitDirection", false, "false"),
|
||||
"command to disable it should return false"
|
||||
);
|
||||
ok(
|
||||
iframe.contentDocument.queryCommandState("enableCompatibleJoinSplitDirection"),
|
||||
"command state should be true even after executing the command to disable it"
|
||||
);
|
||||
|
||||
SimpleTest.finish();
|
||||
});
|
||||
|
|
|
@ -50,16 +50,10 @@ interface nsIEditActionListener : nsISupports
|
|||
* right node. Otherwise, it points start of inserted
|
||||
* right node content in the left node.
|
||||
* @param aRemovedNode The removed node.
|
||||
* @param aLeftNodeWasRemoved
|
||||
* true if left node is removed and its contents were
|
||||
* moved into start of the right node.
|
||||
* false if right node is removed and its contents were
|
||||
* moved into end of the left node.
|
||||
*/
|
||||
[noscript]
|
||||
void DidJoinContents([const] in EditorRawDOMPointRef aJoinedPoint,
|
||||
[const] in Node aRemovedNode,
|
||||
in bool aLeftNodeWasRemoved);
|
||||
[const] in Node aRemovedNode);
|
||||
|
||||
/**
|
||||
* Called after the editor inserts text.
|
||||
|
|
|
@ -10,7 +10,6 @@
|
|||
#include "FilteredContentIterator.h" // for FilteredContentIterator
|
||||
#include "HTMLEditHelpers.h" // for BlockInlineCheck
|
||||
#include "HTMLEditUtils.h" // for HTMLEditUtils
|
||||
#include "JoinSplitNodeDirection.h" // for JoinNodesDirection
|
||||
|
||||
#include "mozilla/Assertions.h" // for MOZ_ASSERT, etc
|
||||
#include "mozilla/IntegerRange.h" // for IntegerRange
|
||||
|
@ -1338,8 +1337,7 @@ void TextServicesDocument::DidDeleteContent(const nsIContent& aChildContent) {
|
|||
}
|
||||
|
||||
void TextServicesDocument::DidJoinContents(
|
||||
const EditorRawDOMPoint& aJoinedPoint, const nsIContent& aRemovedContent,
|
||||
JoinNodesDirection aJoinNodesDirection) {
|
||||
const EditorRawDOMPoint& aJoinedPoint, const nsIContent& aRemovedContent) {
|
||||
// Make sure that both nodes are text nodes -- otherwise we don't care.
|
||||
if (!aJoinedPoint.IsInTextNode() || !aRemovedContent.IsText()) {
|
||||
return;
|
||||
|
@ -1367,30 +1365,19 @@ void TextServicesDocument::DidJoinContents(
|
|||
const size_t removedIndex = *maybeRemovedIndex;
|
||||
const size_t joinedIndex = *maybeJoinedIndex;
|
||||
|
||||
if (aJoinNodesDirection == JoinNodesDirection::LeftNodeIntoRightNode) {
|
||||
if (MOZ_UNLIKELY(removedIndex > joinedIndex)) {
|
||||
NS_ASSERTION(removedIndex < joinedIndex, "Indexes out of order.");
|
||||
return;
|
||||
}
|
||||
NS_ASSERTION(mOffsetTable[joinedIndex]->mOffsetInTextNode == 0,
|
||||
"Unexpected offset value for joinedIndex.");
|
||||
} else {
|
||||
if (MOZ_UNLIKELY(joinedIndex > removedIndex)) {
|
||||
NS_ASSERTION(joinedIndex < removedIndex, "Indexes out of order.");
|
||||
return;
|
||||
}
|
||||
NS_ASSERTION(mOffsetTable[removedIndex]->mOffsetInTextNode == 0,
|
||||
"Unexpected offset value for rightIndex.");
|
||||
if (MOZ_UNLIKELY(joinedIndex > removedIndex)) {
|
||||
NS_ASSERTION(joinedIndex < removedIndex, "Indexes out of order.");
|
||||
return;
|
||||
}
|
||||
NS_ASSERTION(mOffsetTable[removedIndex]->mOffsetInTextNode == 0,
|
||||
"Unexpected offset value for rightIndex.");
|
||||
|
||||
// Run through the table and change all entries referring to
|
||||
// the removed node so that they now refer to the joined node,
|
||||
// and adjust offsets if necessary.
|
||||
const uint32_t movedTextDataLength =
|
||||
aJoinNodesDirection == JoinNodesDirection::LeftNodeIntoRightNode
|
||||
? aJoinedPoint.Offset()
|
||||
: aJoinedPoint.ContainerAs<Text>()->TextDataLength() -
|
||||
aJoinedPoint.Offset();
|
||||
aJoinedPoint.ContainerAs<Text>()->TextDataLength() -
|
||||
aJoinedPoint.Offset();
|
||||
for (uint32_t i = removedIndex; i < mOffsetTable.Length(); i++) {
|
||||
const UniquePtr<OffsetEntry>& entry = mOffsetTable[i];
|
||||
LockOffsetEntryArrayLengthInDebugBuild(observer, mOffsetTable);
|
||||
|
@ -1399,26 +1386,9 @@ void TextServicesDocument::DidJoinContents(
|
|||
}
|
||||
if (entry->mIsValid) {
|
||||
entry->mTextNode = aJoinedPoint.ContainerAs<Text>();
|
||||
if (aJoinNodesDirection == JoinNodesDirection::RightNodeIntoLeftNode) {
|
||||
// The text was moved from aRemovedContent to end of the container of
|
||||
// aJoinedPoint.
|
||||
entry->mOffsetInTextNode += movedTextDataLength;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (aJoinNodesDirection == JoinNodesDirection::LeftNodeIntoRightNode) {
|
||||
// The text was moved from aRemovedContent to start of the container of
|
||||
// aJoinedPoint.
|
||||
for (uint32_t i = joinedIndex; i < mOffsetTable.Length(); i++) {
|
||||
const UniquePtr<OffsetEntry>& entry = mOffsetTable[i];
|
||||
LockOffsetEntryArrayLengthInDebugBuild(observer, mOffsetTable);
|
||||
if (entry->mTextNode != aJoinedPoint.ContainerAs<Text>()) {
|
||||
break;
|
||||
}
|
||||
if (entry->mIsValid) {
|
||||
entry->mOffsetInTextNode += movedTextDataLength;
|
||||
}
|
||||
// The text was moved from aRemovedContent to end of the container of
|
||||
// aJoinedPoint.
|
||||
entry->mOffsetInTextNode += movedTextDataLength;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2784,16 +2754,12 @@ TextServicesDocument::DidDeleteNode(nsINode* aChild, nsresult aResult) {
|
|||
}
|
||||
|
||||
NS_IMETHODIMP TextServicesDocument::DidJoinContents(
|
||||
const EditorRawDOMPoint& aJoinedPoint, const nsINode* aRemovedNode,
|
||||
bool aLeftNodeWasRemoved) {
|
||||
const EditorRawDOMPoint& aJoinedPoint, const nsINode* aRemovedNode) {
|
||||
if (MOZ_UNLIKELY(NS_WARN_IF(!aJoinedPoint.IsSetAndValid()) ||
|
||||
NS_WARN_IF(!aRemovedNode->IsContent()))) {
|
||||
return NS_OK;
|
||||
}
|
||||
DidJoinContents(aJoinedPoint, *aRemovedNode->AsContent(),
|
||||
aLeftNodeWasRemoved
|
||||
? JoinNodesDirection::LeftNodeIntoRightNode
|
||||
: JoinNodesDirection::RightNodeIntoLeftNode);
|
||||
DidJoinContents(aJoinedPoint, *aRemovedNode->AsContent());
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
|
|
|
@ -27,7 +27,6 @@ namespace mozilla {
|
|||
class EditorBase;
|
||||
class FilteredContentIterator;
|
||||
class OffsetEntry;
|
||||
enum class JoinNodesDirection; // Declared in HTMLEditHelpers.h
|
||||
|
||||
namespace dom {
|
||||
class AbstractRange;
|
||||
|
@ -374,8 +373,7 @@ class TextServicesDocument final : public nsIEditActionListener {
|
|||
*/
|
||||
void DidDeleteContent(const nsIContent& aChildContent);
|
||||
void DidJoinContents(const EditorRawDOMPoint& aJoinedPoint,
|
||||
const nsIContent& aRemovedContent,
|
||||
JoinNodesDirection aJoinNodesDirection);
|
||||
const nsIContent& aRemovedContent);
|
||||
|
||||
private:
|
||||
// TODO: We should get rid of this method since `aAbstractRange` has
|
||||
|
|
|
@ -12,6 +12,7 @@ include protocol PJSOracle;
|
|||
|
||||
#if defined(XP_WIN)
|
||||
include protocol PWindowsUtils;
|
||||
include protocol PWinFileDialog;
|
||||
#endif
|
||||
|
||||
#if defined(MOZ_SANDBOX) && defined(MOZ_DEBUG) && defined(ENABLE_TESTS)
|
||||
|
@ -102,6 +103,7 @@ child:
|
|||
|
||||
#if defined(XP_WIN)
|
||||
async StartWindowsUtilsService(Endpoint<PWindowsUtilsChild> aEndpoint);
|
||||
async StartWinFileDialogService(Endpoint<PWinFileDialogChild> aEndpoint);
|
||||
|
||||
async GetUntrustedModulesData() returns (UntrustedModulesData? data);
|
||||
|
||||
|
|
|
@ -5,14 +5,15 @@
|
|||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
#include "UtilityProcessChild.h"
|
||||
|
||||
#include "mozilla/ipc/UtilityProcessManager.h"
|
||||
#include "mozilla/ipc/UtilityProcessSandboxing.h"
|
||||
#include "mozilla/AppShutdown.h"
|
||||
#include "mozilla/Logging.h"
|
||||
#include "mozilla/dom/ContentParent.h"
|
||||
#include "mozilla/dom/JSOracleChild.h"
|
||||
#include "mozilla/dom/MemoryReportRequest.h"
|
||||
#include "mozilla/ipc/CrashReporterClient.h"
|
||||
#include "mozilla/ipc/Endpoint.h"
|
||||
#include "mozilla/AppShutdown.h"
|
||||
#include "mozilla/ipc/UtilityProcessManager.h"
|
||||
#include "mozilla/ipc/UtilityProcessSandboxing.h"
|
||||
#include "mozilla/Preferences.h"
|
||||
#include "mozilla/RemoteDecoderManagerParent.h"
|
||||
|
||||
|
@ -33,6 +34,7 @@
|
|||
#if defined(XP_WIN)
|
||||
# include "mozilla/WinDllServices.h"
|
||||
# include "mozilla/dom/WindowsUtilsChild.h"
|
||||
# include "mozilla/widget/filedialog/WinFileDialogChild.h"
|
||||
#endif
|
||||
|
||||
#include "nsDebugImpl.h"
|
||||
|
@ -265,7 +267,7 @@ UtilityProcessChild::RecvStartUtilityAudioDecoderService(
|
|||
MarkerOptions(MarkerTiming::IntervalUntilNowFrom(mChildStartTime)));
|
||||
mUtilityAudioDecoderInstance = new UtilityAudioDecoderParent();
|
||||
if (!mUtilityAudioDecoderInstance) {
|
||||
return IPC_FAIL(this, "Failing to create UtilityAudioDecoderParent");
|
||||
return IPC_FAIL(this, "Failed to create UtilityAudioDecoderParent");
|
||||
}
|
||||
|
||||
mUtilityAudioDecoderInstance->Start(std::move(aEndpoint));
|
||||
|
@ -279,7 +281,7 @@ mozilla::ipc::IPCResult UtilityProcessChild::RecvStartJSOracleService(
|
|||
MarkerOptions(MarkerTiming::IntervalUntilNowFrom(mChildStartTime)));
|
||||
mJSOracleInstance = new mozilla::dom::JSOracleChild();
|
||||
if (!mJSOracleInstance) {
|
||||
return IPC_FAIL(this, "Failing to create JSOracleParent");
|
||||
return IPC_FAIL(this, "Failed to create JSOracleParent");
|
||||
}
|
||||
|
||||
mJSOracleInstance->Start(std::move(aEndpoint));
|
||||
|
@ -302,6 +304,29 @@ mozilla::ipc::IPCResult UtilityProcessChild::RecvStartWindowsUtilsService(
|
|||
return IPC_OK();
|
||||
}
|
||||
|
||||
mozilla::ipc::IPCResult UtilityProcessChild::RecvStartWinFileDialogService(
|
||||
Endpoint<widget::filedialog::PWinFileDialogChild>&& aEndpoint) {
|
||||
PROFILER_MARKER_UNTYPED(
|
||||
"UtilityProcessChild::RecvStartWinFileDialogService", OTHER,
|
||||
MarkerOptions(MarkerTiming::IntervalUntilNowFrom(mChildStartTime)));
|
||||
|
||||
MOZ_RELEASE_ASSERT(!mFileDialogInstance,
|
||||
"attempted to double-start file dialog service");
|
||||
|
||||
auto instance = MakeRefPtr<widget::filedialog::WinFileDialogChild>();
|
||||
if (!instance) {
|
||||
return IPC_FAIL(this, "Failed to create WinFileDialogChild");
|
||||
}
|
||||
|
||||
bool const ok = std::move(aEndpoint).Bind(instance.get());
|
||||
if (!ok) {
|
||||
return IPC_FAIL(this, "Failed to bind created WinFileDialogChild");
|
||||
}
|
||||
|
||||
mFileDialogInstance = std::move(instance);
|
||||
return IPC_OK();
|
||||
}
|
||||
|
||||
mozilla::ipc::IPCResult UtilityProcessChild::RecvGetUntrustedModulesData(
|
||||
GetUntrustedModulesDataResolver&& aResolver) {
|
||||
RefPtr<DllServices> dllSvc(DllServices::Get());
|
||||
|
|
|
@ -72,6 +72,9 @@ class UtilityProcessChild final : public PUtilityProcessChild {
|
|||
mozilla::ipc::IPCResult RecvStartWindowsUtilsService(
|
||||
Endpoint<PWindowsUtilsChild>&& aEndpoint);
|
||||
|
||||
mozilla::ipc::IPCResult RecvStartWinFileDialogService(
|
||||
Endpoint<PWinFileDialogChild>&& aEndpoint);
|
||||
|
||||
mozilla::ipc::IPCResult RecvGetUntrustedModulesData(
|
||||
GetUntrustedModulesDataResolver&& aResolver);
|
||||
mozilla::ipc::IPCResult RecvUnblockUntrustedModulesThread();
|
||||
|
@ -97,6 +100,7 @@ class UtilityProcessChild final : public PUtilityProcessChild {
|
|||
RefPtr<dom::JSOracleChild> mJSOracleInstance{};
|
||||
#ifdef XP_WIN
|
||||
RefPtr<PWindowsUtilsChild> mWindowsUtilsInstance;
|
||||
RefPtr<PWinFileDialogChild> mFileDialogInstance;
|
||||
#endif
|
||||
|
||||
AsyncBlockers mShutdownBlockers;
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
|
||||
#ifdef XP_WIN
|
||||
# include "mozilla/dom/WindowsUtilsParent.h"
|
||||
# include "mozilla/widget/filedialog/WinFileDialogParent.h"
|
||||
#endif
|
||||
|
||||
#include "mozilla/GeckoArgs.h"
|
||||
|
@ -449,6 +450,45 @@ UtilityProcessManager::GetWindowsUtilsPromise() {
|
|||
|
||||
void UtilityProcessManager::ReleaseWindowsUtils() { mWindowsUtils = nullptr; }
|
||||
|
||||
RefPtr<UtilityProcessManager::WinFileDialogPromise>
|
||||
UtilityProcessManager::CreateWinFileDialogAsync() {
|
||||
using Promise = WinFileDialogPromise;
|
||||
TimeStamp startTime = TimeStamp::Now();
|
||||
auto wfdp = MakeRefPtr<widget::filedialog::WinFileDialogParent>();
|
||||
|
||||
return StartUtility(wfdp, SandboxingKind::WINDOWS_FILE_DIALOG)
|
||||
->Then(
|
||||
GetMainThreadSerialEventTarget(), __PRETTY_FUNCTION__,
|
||||
[wfdp, startTime]() mutable {
|
||||
LOGD("CreateWinFileDialogAsync() resolve: wfdp = [%p]", wfdp.get());
|
||||
if (!wfdp->CanSend()) {
|
||||
MOZ_ASSERT(false, "WinFileDialogParent can't send");
|
||||
return Promise::CreateAndReject(NS_ERROR_FAILURE,
|
||||
__PRETTY_FUNCTION__);
|
||||
}
|
||||
PROFILER_MARKER_TEXT(
|
||||
"UtilityProcessManager::CreateWinFileDialogAsync", OTHER,
|
||||
MarkerOptions(MarkerTiming::IntervalUntilNowFrom(startTime)),
|
||||
"Resolve"_ns);
|
||||
|
||||
return Promise::CreateAndResolve(
|
||||
widget::filedialog::ProcessProxy(std::move(wfdp)),
|
||||
__PRETTY_FUNCTION__);
|
||||
},
|
||||
[self = RefPtr(this), startTime](nsresult error) {
|
||||
LOGD("CreateWinFileDialogAsync() reject");
|
||||
if (!self->IsShutdown()) {
|
||||
MOZ_ASSERT_UNREACHABLE("failure when starting file-dialog actor");
|
||||
}
|
||||
PROFILER_MARKER_TEXT(
|
||||
"UtilityProcessManager::CreateWinFileDialogAsync", OTHER,
|
||||
MarkerOptions(MarkerTiming::IntervalUntilNowFrom(startTime)),
|
||||
"Reject"_ns);
|
||||
|
||||
return Promise::CreateAndReject(error, __PRETTY_FUNCTION__);
|
||||
});
|
||||
}
|
||||
|
||||
#endif // XP_WIN
|
||||
|
||||
bool UtilityProcessManager::IsProcessLaunching(SandboxingKind aSandbox) {
|
||||
|
|
|
@ -23,6 +23,10 @@ class JSOracleParent;
|
|||
class WindowsUtilsParent;
|
||||
} // namespace dom
|
||||
|
||||
namespace widget::filedialog {
|
||||
class ProcessProxy;
|
||||
} // namespace widget::filedialog
|
||||
|
||||
namespace ipc {
|
||||
|
||||
class UtilityProcessParent;
|
||||
|
@ -34,12 +38,17 @@ class UtilityProcessManager final : public UtilityProcessHost::Listener {
|
|||
friend class UtilityProcessParent;
|
||||
|
||||
public:
|
||||
template <typename T>
|
||||
using Promise = MozPromise<T, nsresult, true>;
|
||||
|
||||
using StartRemoteDecodingUtilityPromise =
|
||||
MozPromise<Endpoint<PRemoteDecoderManagerChild>, nsresult, true>;
|
||||
Promise<Endpoint<PRemoteDecoderManagerChild>>;
|
||||
using JSOraclePromise = GenericNonExclusivePromise;
|
||||
|
||||
using WindowsUtilsPromise =
|
||||
MozPromise<RefPtr<dom::WindowsUtilsParent>, nsresult, true>;
|
||||
#ifdef XP_WIN
|
||||
using WindowsUtilsPromise = Promise<RefPtr<dom::WindowsUtilsParent>>;
|
||||
using WinFileDialogPromise = Promise<widget::filedialog::ProcessProxy>;
|
||||
#endif
|
||||
|
||||
static RefPtr<UtilityProcessManager> GetSingleton();
|
||||
|
||||
|
@ -64,6 +73,10 @@ class UtilityProcessManager final : public UtilityProcessHost::Listener {
|
|||
// Releases the WindowsUtils actor so that it can be destroyed.
|
||||
// Subsequent attempts to use WindowsUtils will create a new process.
|
||||
void ReleaseWindowsUtils();
|
||||
|
||||
// Get a new Windows file-dialog utility-process actor. These are never
|
||||
// reused; this will always return a fresh actor.
|
||||
RefPtr<WinFileDialogPromise> CreateWinFileDialogAsync();
|
||||
#endif
|
||||
|
||||
void OnProcessUnexpectedShutdown(UtilityProcessHost* aHost);
|
||||
|
|
|
@ -27,6 +27,21 @@ std::vector<std::string> split(const std::string& str, char s) {
|
|||
}
|
||||
|
||||
bool IsUtilitySandboxEnabled(const char* envVar, SandboxingKind aKind) {
|
||||
#ifdef XP_WIN
|
||||
// Sandboxing the Windows file dialog is probably not useful.
|
||||
//
|
||||
// (Additionally, it causes failures in our test environments: when running
|
||||
// tests on windows-11-2009-qr machines, sandboxed child processes can't see
|
||||
// or interact with any other process's windows -- which means they can't
|
||||
// select a window from the parent process as the file dialog's parent. This
|
||||
// occurs regardless of the sandbox preferences, which is why we disable
|
||||
// sandboxing entirely rather than use a maximally permissive preference-set.
|
||||
// This behavior has not been seen in user-facing environments.)
|
||||
if (aKind == SandboxingKind::WINDOWS_FILE_DIALOG) {
|
||||
return false;
|
||||
}
|
||||
#endif
|
||||
|
||||
if (envVar == nullptr) {
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -29,6 +29,7 @@ enum SandboxingKind : uint64_t {
|
|||
#endif
|
||||
#ifdef XP_WIN
|
||||
WINDOWS_UTILS,
|
||||
WINDOWS_FILE_DIALOG,
|
||||
#endif
|
||||
|
||||
COUNT,
|
||||
|
|
|
@ -57,8 +57,15 @@ skip-if = [
|
|||
"ccov",
|
||||
]
|
||||
|
||||
["browser_utility_filepicker_crashed.js"]
|
||||
run-if = ["os == 'win'"]
|
||||
skip-if = [
|
||||
"!crashreporter",
|
||||
"ccov",
|
||||
]
|
||||
|
||||
["browser_utility_geolocation_crashed.js"]
|
||||
run-if = ["os == 'win'"] # Geolocation is remoted only on Windows 8+
|
||||
run-if = ["os == 'win'"]
|
||||
skip-if = [
|
||||
"!crashreporter",
|
||||
"ccov",
|
||||
|
|
170
ipc/glue/test/browser/browser_utility_filepicker_crashed.js
Normal file
170
ipc/glue/test/browser/browser_utility_filepicker_crashed.js
Normal file
|
@ -0,0 +1,170 @@
|
|||
/* Any copyright is dedicated to the Public Domain.
|
||||
* http://creativecommons.org/publicdomain/zero/1.0/ */
|
||||
|
||||
"use strict";
|
||||
|
||||
SimpleTest.requestCompleteLog();
|
||||
|
||||
// Wait until the child process with the given PID has indeed been terminated.
|
||||
//
|
||||
// Note that `checkUtilityExists`, and other functions deriving from the output
|
||||
// of `ChromeUtils.requestProcInfo()`, do not suffice for this purpose! It is an
|
||||
// attested failure mode that the file-dialog utility process has been removed
|
||||
// from the proc-info list, but is still live with the file-picker dialog still
|
||||
// displayed.
|
||||
function untilChildProcessDead(pid) {
|
||||
return utilityProcessTest().untilChildProcessDead(pid);
|
||||
}
|
||||
|
||||
async function fileDialogProcessExists() {
|
||||
return !!(await tryGetUtilityPid("windowsFileDialog"));
|
||||
}
|
||||
|
||||
// Poll for the creation of a file dialog process.
|
||||
function untilFileDialogProcessExists(options = { maxTime: 2000 }) {
|
||||
// milliseconds
|
||||
const maxTime = options.maxTime ?? 2000,
|
||||
pollTime = options.pollTime ?? 100;
|
||||
const count = maxTime / pollTime;
|
||||
|
||||
return TestUtils.waitForCondition(
|
||||
() => tryGetUtilityPid("windowsFileDialog", { quiet: true }),
|
||||
"waiting for file dialog process",
|
||||
pollTime, // interval
|
||||
count // maxTries
|
||||
);
|
||||
}
|
||||
|
||||
function openFileDialog() {
|
||||
const process = (async () => {
|
||||
await untilFileDialogProcessExists();
|
||||
let pid = await tryGetUtilityPid("windowsFileDialog");
|
||||
ok(pid, `pid should be acquired in openFileDialog::process (got ${pid})`);
|
||||
// HACK: Wait briefly for the file dialog to open.
|
||||
//
|
||||
// If this is not done, we may attempt to crash the process while it's in
|
||||
// the middle of creating and showing the file dialog window. There _should_
|
||||
// be no problem with this, but `::MiniDumpWriteDump()` occasionally fails
|
||||
// with mysterious errors (`ERROR_BAD_LENGTH`) if we crashed the process
|
||||
// while that was happening, yielding no minidump and therefore a failing
|
||||
// test.
|
||||
//
|
||||
// Use of an arbitrary timeout could presumably be avoided by setting a
|
||||
// window hook for the file dialog being shown and `await`ing on that.
|
||||
//
|
||||
// eslint-disable-next-line mozilla/no-arbitrary-setTimeout
|
||||
await new Promise(res => setTimeout(res, 500));
|
||||
return pid;
|
||||
})();
|
||||
|
||||
const file = new Promise((resolve, reject) => {
|
||||
info("Opening Windows file dialog");
|
||||
let fp = Cc["@mozilla.org/filepicker;1"].createInstance(Ci.nsIFilePicker);
|
||||
fp.init(window, "Test: browser_utility_filepicker_crashed.js", fp.modeOpen);
|
||||
fp.open(result => {
|
||||
ok(
|
||||
result == fp.returnCancel,
|
||||
"filepicker should resolve to cancellation"
|
||||
);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
|
||||
return { process, file };
|
||||
}
|
||||
|
||||
add_setup(async function () {
|
||||
await SpecialPowers.pushPrefEnv({
|
||||
set: [
|
||||
// remote, no fallback
|
||||
["widget.windows.utility_process_file_picker", 2],
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
function makeTask(description, Describe, action) {
|
||||
let task = async function () {
|
||||
if (await fileDialogProcessExists()) {
|
||||
// If this test proceeds, it will probably cause whatever other test has a
|
||||
// file dialog open to fail.
|
||||
//
|
||||
// (We shouldn't be running two such tests in parallel on the same Fx
|
||||
// instance, but that's not obvious at this level.)
|
||||
ok(false, "another test has a file dialog open; aborting");
|
||||
return;
|
||||
}
|
||||
|
||||
const { process, file } = openFileDialog();
|
||||
const pid = await process;
|
||||
const untilDead = untilChildProcessDead(pid);
|
||||
|
||||
info(Describe + " the file-dialog utility process");
|
||||
await action();
|
||||
|
||||
// the file-picker's callback should have been promptly cancelled
|
||||
const _before = Date.now();
|
||||
await file;
|
||||
const _after = Date.now();
|
||||
const delta = _after - _before;
|
||||
info(`file callback resolved after ${description} after ${delta}ms`);
|
||||
|
||||
// depending on the test configuration, this may take some time while
|
||||
// cleanup occurs
|
||||
await untilDead;
|
||||
};
|
||||
|
||||
// give this task a legible name
|
||||
Object.defineProperty(task, "name", {
|
||||
value: "testFileDialogProcess-" + Describe.replace(" ", ""),
|
||||
});
|
||||
|
||||
return task;
|
||||
}
|
||||
|
||||
for (let [description, Describe, action] of [
|
||||
["crash", "Crash", () => crashSomeUtilityActor("windowsFileDialog")],
|
||||
[
|
||||
"being killed",
|
||||
"Kill",
|
||||
() => cleanUtilityProcessShutdown("windowsFileDialog", true),
|
||||
],
|
||||
// Unfortunately, a controlled shutdown doesn't actually terminate the utility
|
||||
// process; the file dialog remains open. (This is expected to be resolved with
|
||||
// bug 1837008.)
|
||||
/* [
|
||||
"shutdown",
|
||||
"Shut down",
|
||||
() => cleanUtilityProcessShutdown("windowsFileDialog"),
|
||||
] */
|
||||
]) {
|
||||
add_task(makeTask(description, Describe, action));
|
||||
add_task(testCleanup);
|
||||
}
|
||||
|
||||
async function testCleanup() {
|
||||
const killFileDialogProcess = async () => {
|
||||
if (await tryGetUtilityPid("windowsFileDialog", { quiet: true })) {
|
||||
await cleanUtilityProcessShutdown("windowsFileDialog", true);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
// If a test failure occurred, the file dialog process may or may not already
|
||||
// exist...
|
||||
if (await killFileDialogProcess()) {
|
||||
console.warn("File dialog process found and killed");
|
||||
return;
|
||||
}
|
||||
|
||||
// ... and if not, may or may not be pending creation.
|
||||
info("Active file dialog process not found; waiting...");
|
||||
try {
|
||||
await untilFileDialogProcessExists({ maxTime: 1000 });
|
||||
} catch (e) {
|
||||
info("File dialog process not found during cleanup (as expected)");
|
||||
return;
|
||||
}
|
||||
await killFileDialogProcess();
|
||||
console.warn("Delayed file dialog process found and killed");
|
||||
}
|
|
@ -21,7 +21,7 @@ async function startUtilityProcess(actors = []) {
|
|||
|
||||
// Returns an array of process infos for utility processes of the given type
|
||||
// or all utility processes if actor is not defined.
|
||||
async function getUtilityProcesses(actor = undefined) {
|
||||
async function getUtilityProcesses(actor = undefined, options = {}) {
|
||||
let procInfos = (await ChromeUtils.requestProcInfo()).children.filter(p => {
|
||||
return (
|
||||
p.type === "utility" &&
|
||||
|
@ -30,19 +30,23 @@ async function getUtilityProcesses(actor = undefined) {
|
|||
);
|
||||
});
|
||||
|
||||
info(`Utility process infos = ${JSON.stringify(procInfos)}`);
|
||||
if (!options?.quiet) {
|
||||
info(`Utility process infos = ${JSON.stringify(procInfos)}`);
|
||||
}
|
||||
return procInfos;
|
||||
}
|
||||
|
||||
async function getUtilityPid(actor) {
|
||||
let process = await getUtilityProcesses(actor);
|
||||
is(process.length, 1, `exactly one ${actor} process exists`);
|
||||
return process[0].pid;
|
||||
async function tryGetUtilityPid(actor, options = {}) {
|
||||
let process = await getUtilityProcesses(actor, options);
|
||||
if (!options?.quiet) {
|
||||
ok(process.length <= 1, `at most one ${actor} process exists`);
|
||||
}
|
||||
return process[0]?.pid;
|
||||
}
|
||||
|
||||
async function checkUtilityExists(actor) {
|
||||
info(`Looking for a running ${actor} utility process`);
|
||||
const utilityPid = await getUtilityPid(actor);
|
||||
const utilityPid = await tryGetUtilityPid(actor);
|
||||
ok(utilityPid > 0, `Found ${actor} utility process ${utilityPid}`);
|
||||
return utilityPid;
|
||||
}
|
||||
|
@ -55,7 +59,7 @@ async function checkUtilityExists(actor) {
|
|||
async function cleanUtilityProcessShutdown(actor, preferKill = false) {
|
||||
info(`${preferKill ? "Kill" : "Clean shutdown"} Utility Process ${actor}`);
|
||||
|
||||
const utilityPid = await getUtilityPid(actor);
|
||||
const utilityPid = await tryGetUtilityPid(actor);
|
||||
ok(utilityPid !== undefined, `Must have PID for ${actor} utility process`);
|
||||
|
||||
const utilityProcessGone = TestUtils.topicObserved(
|
||||
|
|
|
@ -5,12 +5,22 @@
|
|||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#if defined(ENABLE_TESTS)
|
||||
# include "mozilla/ipc/UtilityProcessTest.h"
|
||||
# include "mozilla/ipc/UtilityProcessManager.h"
|
||||
# include "mozilla/ipc/UtilityProcessTest.h"
|
||||
# include "mozilla/dom/Promise.h"
|
||||
# include "mozilla/ProcInfo.h"
|
||||
# include "mozilla/IntentionalCrash.h"
|
||||
|
||||
# ifdef XP_WIN
|
||||
# include <handleapi.h>
|
||||
# include <processthreadsapi.h>
|
||||
# include <tlhelp32.h>
|
||||
|
||||
# include "mozilla/WinHandleWatcher.h"
|
||||
# include "nsISupports.h"
|
||||
# include "nsWindowsHelpers.h"
|
||||
# endif
|
||||
|
||||
namespace mozilla::ipc {
|
||||
|
||||
static UtilityActorName UtilityActorNameFromString(
|
||||
|
@ -55,6 +65,51 @@ static SandboxingKind FindUtilityProcessWithActor(UtilityActorName aActorName) {
|
|||
return SandboxingKind::COUNT;
|
||||
}
|
||||
|
||||
# ifdef XP_WIN
|
||||
namespace {
|
||||
// Promise implementation for `UntilChildProcessDead`.
|
||||
//
|
||||
// Resolves the provided JS promise when the provided Windows HANDLE becomes
|
||||
// signaled.
|
||||
class WinHandlePromiseImpl final {
|
||||
public:
|
||||
NS_INLINE_DECL_REFCOUNTING(WinHandlePromiseImpl)
|
||||
|
||||
using HandlePtr = mozilla::UniqueFileHandle;
|
||||
|
||||
// Takes ownership of aHandle.
|
||||
static void Create(mozilla::UniqueFileHandle handle,
|
||||
RefPtr<mozilla::dom::Promise> promise) {
|
||||
MOZ_ASSERT(handle);
|
||||
MOZ_ASSERT(promise);
|
||||
|
||||
RefPtr obj{new WinHandlePromiseImpl(std::move(handle), std::move(promise))};
|
||||
|
||||
// WARNING: This creates an owning-reference cycle: (self -> HandleWatcher
|
||||
// -> Runnable -> self). `obj` will therefore only be destroyed when and
|
||||
// if the HANDLE is signaled.
|
||||
obj->watcher.Watch(obj->handle.get(), GetCurrentSerialEventTarget(),
|
||||
NewRunnableMethod("WinHandlePromiseImpl::Resolve", obj,
|
||||
&WinHandlePromiseImpl::Resolve));
|
||||
}
|
||||
|
||||
private:
|
||||
WinHandlePromiseImpl(mozilla::UniqueFileHandle handle,
|
||||
RefPtr<mozilla::dom::Promise> promise)
|
||||
: handle(std::move(handle)), promise(std::move(promise)) {}
|
||||
|
||||
~WinHandlePromiseImpl() { watcher.Stop(); }
|
||||
|
||||
void Resolve() { promise->MaybeResolveWithUndefined(); }
|
||||
|
||||
mozilla::UniqueFileHandle handle;
|
||||
HandleWatcher watcher;
|
||||
RefPtr<mozilla::dom::Promise> promise;
|
||||
};
|
||||
|
||||
} // namespace
|
||||
# endif
|
||||
|
||||
NS_IMETHODIMP
|
||||
UtilityProcessTest::StartProcess(const nsTArray<nsCString>& aActorsToRegister,
|
||||
JSContext* aCx,
|
||||
|
@ -112,6 +167,67 @@ UtilityProcessTest::NoteIntentionalCrash(uint32_t aPid) {
|
|||
return NS_OK;
|
||||
}
|
||||
|
||||
NS_IMETHODIMP
|
||||
UtilityProcessTest::UntilChildProcessDead(
|
||||
uint32_t pid, JSContext* cx, ::mozilla::dom::Promise** aOutPromise) {
|
||||
NS_ENSURE_ARG(aOutPromise);
|
||||
*aOutPromise = nullptr;
|
||||
|
||||
# ifdef XP_WIN
|
||||
if (pid == 0) {
|
||||
return NS_ERROR_INVALID_ARG;
|
||||
}
|
||||
|
||||
nsIGlobalObject* global = xpc::CurrentNativeGlobal(cx);
|
||||
if (NS_WARN_IF(!global)) {
|
||||
return NS_ERROR_FAILURE;
|
||||
}
|
||||
|
||||
ErrorResult erv;
|
||||
RefPtr<dom::Promise> promise = dom::Promise::Create(global, erv);
|
||||
if (NS_WARN_IF(erv.Failed())) {
|
||||
return erv.StealNSResult();
|
||||
}
|
||||
|
||||
// Get a fresh handle to the child process with the specified PID.
|
||||
mozilla::UniqueFileHandle handle;
|
||||
{
|
||||
bool failed = false;
|
||||
GeckoChildProcessHost::GetAll([&](GeckoChildProcessHost* aProc) {
|
||||
if (handle || failed) {
|
||||
return;
|
||||
}
|
||||
if (aProc->GetChildProcessId() != pid) {
|
||||
return;
|
||||
}
|
||||
|
||||
HANDLE handle_ = nullptr;
|
||||
if (!::DuplicateHandle(
|
||||
::GetCurrentProcess(), aProc->GetChildProcessHandle(),
|
||||
::GetCurrentProcess(), &handle_, SYNCHRONIZE, FALSE, 0)) {
|
||||
failed = true;
|
||||
} else {
|
||||
handle.reset(handle_);
|
||||
}
|
||||
});
|
||||
|
||||
if (failed || !handle) {
|
||||
return NS_ERROR_FAILURE;
|
||||
}
|
||||
}
|
||||
|
||||
// Create and attach the resolver for the promise, giving the handle over to
|
||||
// it.
|
||||
WinHandlePromiseImpl::Create(std::move(handle), promise);
|
||||
|
||||
promise.forget(aOutPromise);
|
||||
|
||||
return NS_OK;
|
||||
# else // !defined(XP_WIN)
|
||||
return NS_ERROR_NOT_IMPLEMENTED;
|
||||
# endif
|
||||
}
|
||||
|
||||
NS_IMETHODIMP
|
||||
UtilityProcessTest::StopProcess(const char* aActorName) {
|
||||
using namespace mozilla::dom;
|
||||
|
|
|
@ -11,7 +11,7 @@ interface nsIUtilityProcessTest : nsISupports
|
|||
/**
|
||||
* ** Test-only Method **
|
||||
*
|
||||
* Allowing to start Utility Process from JS code.
|
||||
* Start a generic utility process from JS code.
|
||||
*
|
||||
* actorsToAdd: An array of actor names, taken from WebIDLUtilityActorName.
|
||||
* Unlike normal utility processes, test processes launched this way do not
|
||||
|
@ -20,6 +20,18 @@ interface nsIUtilityProcessTest : nsISupports
|
|||
[implicit_jscontext]
|
||||
Promise startProcess([optional] in Array<ACString> actorsToAdd);
|
||||
|
||||
/**
|
||||
* ** Test-only Method **
|
||||
*
|
||||
* Report when a child process is actually dead (as opposed to merely having
|
||||
* been removed from our internal list of child processes). Must be called
|
||||
* while the process is still live.
|
||||
*
|
||||
* Only implemented on Windows.
|
||||
*/
|
||||
[implicit_jscontext]
|
||||
Promise untilChildProcessDead(in uint32_t pid);
|
||||
|
||||
/**
|
||||
* ** Test-only Method **
|
||||
*
|
||||
|
|
|
@ -32,7 +32,8 @@
|
|||
#include "mozilla/FloatingPoint.h" // mozilla::{IsFinite,}, mozilla::UnspecifiedNaN
|
||||
#include "mozilla/MathAlgorithms.h" // mozilla::Abs
|
||||
|
||||
#include "js/Conversions.h" // JS::ToInteger
|
||||
#include "js/Conversions.h" // JS::ToInteger
|
||||
#include "js/RealmOptions.h" // JS::RTPCallerTypeToken
|
||||
#include "js/TypeDecls.h"
|
||||
#include "js/Value.h" // JS::CanonicalizeNaN, JS::DoubleValue, JS::Value
|
||||
|
||||
|
@ -187,15 +188,26 @@ JS_PUBLIC_API double DayFromYear(double year);
|
|||
JS_PUBLIC_API double DayWithinYear(double time, double year);
|
||||
|
||||
// The callback will be a wrapper function that accepts a double (the time
|
||||
// to clamp and jitter). Inside the JS Engine, other parameters that may be
|
||||
// needed are all constant, so they are handled inside the wrapper function
|
||||
using ReduceMicrosecondTimePrecisionCallback = double (*)(double, JSContext*);
|
||||
// to clamp and jitter) and a JS::RTPCallerTypeToken (a wrapper for
|
||||
// mozilla::RTPCallerType) that can be used to decide the proper clamping
|
||||
// behavior to use. Inside the JS Engine, other parameters that may be needed
|
||||
// are all constant, so they are handled inside the wrapper function
|
||||
using ReduceMicrosecondTimePrecisionCallback =
|
||||
double (*)(double, JS::RTPCallerTypeToken, JSContext*);
|
||||
|
||||
// Set a callback into the toolkit/components/resistfingerprinting function that
|
||||
// will centralize time resolution and jitter into one place.
|
||||
// Defining such a callback requires all Realms that are created afterwards
|
||||
// to have a set JS::RTPCallerTypeToken, via RealmBehaviors or
|
||||
// JS::SetRealmReduceTimerPrecisionCallerType.
|
||||
JS_PUBLIC_API void SetReduceMicrosecondTimePrecisionCallback(
|
||||
ReduceMicrosecondTimePrecisionCallback callback);
|
||||
|
||||
// Get the previously set ReduceMicrosecondTimePrecisionCallback callback or
|
||||
// nullptr.
|
||||
JS_PUBLIC_API ReduceMicrosecondTimePrecisionCallback
|
||||
GetReduceMicrosecondTimePrecisionCallback();
|
||||
|
||||
// Sets the time resolution for fingerprinting protection, and whether jitter
|
||||
// should occur. If resolution is set to zero, then no rounding or jitter will
|
||||
// occur. This is used if the callback above is not specified.
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
#define js_RealmOptions_h
|
||||
|
||||
#include "mozilla/Assertions.h" // MOZ_ASSERT
|
||||
#include "mozilla/Maybe.h"
|
||||
|
||||
#include "jstypes.h" // JS_PUBLIC_API
|
||||
|
||||
|
@ -320,6 +321,12 @@ class JS_PUBLIC_API RealmCreationOptions {
|
|||
bool alwaysUseFdlibm_ = false;
|
||||
};
|
||||
|
||||
// This is a wrapper for mozilla::RTPCallerType, that can't easily
|
||||
// be exposed to the JS engine for layering reasons.
|
||||
struct RTPCallerTypeToken {
|
||||
uint8_t value;
|
||||
};
|
||||
|
||||
/**
|
||||
* RealmBehaviors specifies behaviors of a realm that can be changed after the
|
||||
* realm's been created.
|
||||
|
@ -328,6 +335,17 @@ class JS_PUBLIC_API RealmBehaviors {
|
|||
public:
|
||||
RealmBehaviors() = default;
|
||||
|
||||
// When a JS::ReduceMicrosecondTimePrecisionCallback callback is defined via
|
||||
// JS::SetReduceMicrosecondTimePrecisionCallback, a JS::RTPCallerTypeToken (a
|
||||
// wrapper for mozilla::RTPCallerType) needs to be set for every Realm.
|
||||
mozilla::Maybe<RTPCallerTypeToken> reduceTimerPrecisionCallerType() const {
|
||||
return rtpCallerType;
|
||||
}
|
||||
RealmBehaviors& setReduceTimerPrecisionCallerType(RTPCallerTypeToken type) {
|
||||
rtpCallerType = mozilla::Some(type);
|
||||
return *this;
|
||||
}
|
||||
|
||||
// For certain globals, we know enough about the code that will run in them
|
||||
// that we can discard script source entirely.
|
||||
bool discardSource() const { return discardSource_; }
|
||||
|
@ -342,29 +360,6 @@ class JS_PUBLIC_API RealmBehaviors {
|
|||
return *this;
|
||||
}
|
||||
|
||||
class Override {
|
||||
public:
|
||||
Override() : mode_(Default) {}
|
||||
|
||||
bool get(bool defaultValue) const {
|
||||
if (mode_ == Default) {
|
||||
return defaultValue;
|
||||
}
|
||||
return mode_ == ForceTrue;
|
||||
}
|
||||
|
||||
void set(bool overrideValue) {
|
||||
mode_ = overrideValue ? ForceTrue : ForceFalse;
|
||||
}
|
||||
|
||||
void reset() { mode_ = Default; }
|
||||
|
||||
private:
|
||||
enum Mode { Default, ForceTrue, ForceFalse };
|
||||
|
||||
Mode mode_;
|
||||
};
|
||||
|
||||
// A Realm can stop being "live" in all the ways that matter before its global
|
||||
// is actually GCed. Consumers that tear down parts of a Realm or its global
|
||||
// before that point should set isNonLive accordingly.
|
||||
|
@ -375,6 +370,7 @@ class JS_PUBLIC_API RealmBehaviors {
|
|||
}
|
||||
|
||||
private:
|
||||
mozilla::Maybe<RTPCallerTypeToken> rtpCallerType;
|
||||
bool discardSource_ = false;
|
||||
bool clampAndJitterTime_ = true;
|
||||
bool isNonLive_ = false;
|
||||
|
@ -423,6 +419,11 @@ extern JS_PUBLIC_API const RealmBehaviors& RealmBehaviorsRef(JSContext* cx);
|
|||
|
||||
extern JS_PUBLIC_API void SetRealmNonLive(Realm* realm);
|
||||
|
||||
// This behaves like RealmBehaviors::setReduceTimerPrecisionCallerType, but
|
||||
// can be used even after the Realm has already been created.
|
||||
extern JS_PUBLIC_API void SetRealmReduceTimerPrecisionCallerType(
|
||||
Realm* realm, RTPCallerTypeToken type);
|
||||
|
||||
} // namespace JS
|
||||
|
||||
#endif // js_RealmOptions_h
|
||||
|
|
|
@ -42,6 +42,12 @@ JS_PUBLIC_API void DestroyFrontendContext(JS::FrontendContext* fc);
|
|||
JS_PUBLIC_API void SetNativeStackQuota(JS::FrontendContext* fc,
|
||||
JS::NativeStackSize stackSize);
|
||||
|
||||
// Return the stack quota that can be passed to SetNativeStackQuota, for given
|
||||
// stack size.
|
||||
// This subtracts a margin from given stack size, to make sure the stack quota
|
||||
// check performed internally is sufficient.
|
||||
JS_PUBLIC_API JS::NativeStackSize ThreadStackQuotaForSize(size_t stackSize);
|
||||
|
||||
// Returns true if there was any error reported to given FrontendContext.
|
||||
JS_PUBLIC_API bool HadFrontendErrors(JS::FrontendContext* fc);
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue