summaryrefslogtreecommitdiff
path: root/js/src
diff options
context:
space:
mode:
authorwolfbeast <mcwerewolf@gmail.com>2018-07-18 08:24:24 +0200
committerwolfbeast <mcwerewolf@gmail.com>2018-07-18 08:24:24 +0200
commitfc61780b35af913801d72086456f493f63197da6 (patch)
treef85891288a7bd988da9f0f15ae64e5c63f00d493 /js/src
parent69f7f9e5f1475891ce11cc4f431692f965b0cd30 (diff)
parent50d3e596bbe89c95615f96eb71f6bc5be737a1db (diff)
downloaduxp-fc61780b35af913801d72086456f493f63197da6.tar.gz
Merge commit '50d3e596bbe89c95615f96eb71f6bc5be737a1db' into Basilisk-releasev2018.07.18
# Conflicts: # browser/app/profile/firefox.js # browser/components/preferences/jar.mn
Diffstat (limited to 'js/src')
-rw-r--r--js/src/builtin/Array.js13
-rw-r--r--js/src/builtin/IntlTimeZoneData.h2
-rw-r--r--js/src/builtin/Map.js6
-rw-r--r--js/src/builtin/Set.js6
-rw-r--r--js/src/builtin/String.js9
-rw-r--r--js/src/builtin/TestingFunctions.cpp30
-rw-r--r--js/src/gc/GCRuntime.h18
-rw-r--r--js/src/gc/Nursery.cpp8
-rw-r--r--js/src/gc/Statistics.h3
-rw-r--r--js/src/jit/BaselineBailouts.cpp56
-rw-r--r--js/src/jit/CodeGenerator.cpp26
-rw-r--r--js/src/jit/CodeGenerator.h1
-rw-r--r--js/src/jit/InlinableNatives.h10
-rw-r--r--js/src/jit/IonBuilder.h1
-rw-r--r--js/src/jit/JitFrameIterator.h4
-rw-r--r--js/src/jit/JitFrames.cpp4
-rw-r--r--js/src/jit/Lowering.cpp10
-rw-r--r--js/src/jit/Lowering.h1
-rw-r--r--js/src/jit/MCallOptimize.cpp51
-rw-r--r--js/src/jit/MIR.h42
-rw-r--r--js/src/jit/MOpcodes.h1
-rw-r--r--js/src/jit/shared/LIR-shared.h23
-rw-r--r--js/src/jit/shared/LOpcodes-shared.h1
-rw-r--r--js/src/jsgc.cpp27
-rw-r--r--js/src/jsgc.h51
-rw-r--r--js/src/jsnativestack.cpp68
-rw-r--r--js/src/jsstr.cpp10
-rw-r--r--js/src/tests/Intl/DateTimeFormat/timeZone_backward_links.js2
-rw-r--r--js/src/tests/Intl/DateTimeFormat/timeZone_backzone.js2
-rw-r--r--js/src/tests/Intl/DateTimeFormat/timeZone_backzone_links.js2
-rw-r--r--js/src/tests/Intl/DateTimeFormat/timeZone_notbackward_links.js2
-rw-r--r--js/src/tests/js1_8_5/extensions/clone-errors.js1
-rw-r--r--js/src/tests/js1_8_5/extensions/clone-transferables.js12
-rw-r--r--js/src/vm/EnvironmentObject.h3
-rw-r--r--js/src/vm/HelperThreads.cpp4
-rw-r--r--js/src/vm/SelfHosting.cpp45
-rw-r--r--js/src/vm/StructuredClone.cpp389
37 files changed, 591 insertions, 353 deletions
diff --git a/js/src/builtin/Array.js b/js/src/builtin/Array.js
index 360dd2af17..30e6fb35fb 100644
--- a/js/src/builtin/Array.js
+++ b/js/src/builtin/Array.js
@@ -711,13 +711,14 @@ function CreateArrayIterator(obj, kind) {
// http://www.ecma-international.org/ecma-262/6.0/index.html#sec-%arrayiteratorprototype%.next
function ArrayIteratorNext() {
// Step 1-3.
- if (!IsObject(this) || !IsArrayIterator(this)) {
+ var obj;
+ if (!IsObject(this) || (obj = GuardToArrayIterator(this)) === null) {
return callFunction(CallArrayIteratorMethodIfWrapped, this,
"ArrayIteratorNext");
}
// Step 4.
- var a = UnsafeGetReservedSlot(this, ITERATOR_SLOT_TARGET);
+ var a = UnsafeGetReservedSlot(obj, ITERATOR_SLOT_TARGET);
var result = { value: undefined, done: false };
// Step 5.
@@ -728,10 +729,10 @@ function ArrayIteratorNext() {
// Step 6.
// The index might not be an integer, so we have to do a generic get here.
- var index = UnsafeGetReservedSlot(this, ITERATOR_SLOT_NEXT_INDEX);
+ var index = UnsafeGetReservedSlot(obj, ITERATOR_SLOT_NEXT_INDEX);
// Step 7.
- var itemKind = UnsafeGetInt32FromReservedSlot(this, ITERATOR_SLOT_ITEM_KIND);
+ var itemKind = UnsafeGetInt32FromReservedSlot(obj, ITERATOR_SLOT_ITEM_KIND);
// Step 8-9.
var len = IsPossiblyWrappedTypedArray(a)
@@ -740,13 +741,13 @@ function ArrayIteratorNext() {
// Step 10.
if (index >= len) {
- UnsafeSetReservedSlot(this, ITERATOR_SLOT_TARGET, null);
+ UnsafeSetReservedSlot(obj, ITERATOR_SLOT_TARGET, null);
result.done = true;
return result;
}
// Step 11.
- UnsafeSetReservedSlot(this, ITERATOR_SLOT_NEXT_INDEX, index + 1);
+ UnsafeSetReservedSlot(obj, ITERATOR_SLOT_NEXT_INDEX, index + 1);
// Step 16.
if (itemKind === ITEM_KIND_VALUE) {
diff --git a/js/src/builtin/IntlTimeZoneData.h b/js/src/builtin/IntlTimeZoneData.h
index 3d5c1b0d5b..fa808c0b94 100644
--- a/js/src/builtin/IntlTimeZoneData.h
+++ b/js/src/builtin/IntlTimeZoneData.h
@@ -1,5 +1,5 @@
// Generated by make_intl_data.py. DO NOT EDIT.
-// tzdata version = 2018d
+// tzdata version = 2018e
#ifndef builtin_IntlTimeZoneData_h
#define builtin_IntlTimeZoneData_h
diff --git a/js/src/builtin/Map.js b/js/src/builtin/Map.js
index 580629a132..434cd6529b 100644
--- a/js/src/builtin/Map.js
+++ b/js/src/builtin/Map.js
@@ -62,8 +62,8 @@ function MapIteratorNext() {
var O = this;
// Steps 2-3.
- if (!IsObject(O) || !IsMapIterator(O))
- return callFunction(CallMapIteratorMethodIfWrapped, O, "MapIteratorNext");
+ if (!IsObject(O) || (O = GuardToMapIterator(O)) === null)
+ return callFunction(CallMapIteratorMethodIfWrapped, this, "MapIteratorNext");
// Steps 4-5 (implemented in _GetNextMapEntryForIterator).
// Steps 8-9 (omitted).
@@ -82,7 +82,7 @@ function MapIteratorNext() {
// Steps 10.b-c (omitted).
// Step 6.
- var itemKind = UnsafeGetInt32FromReservedSlot(this, ITERATOR_SLOT_ITEM_KIND);
+ var itemKind = UnsafeGetInt32FromReservedSlot(O, ITERATOR_SLOT_ITEM_KIND);
var result;
if (itemKind === ITEM_KIND_KEY) {
diff --git a/js/src/builtin/Set.js b/js/src/builtin/Set.js
index 9af6cf8d1a..e2571e66a8 100644
--- a/js/src/builtin/Set.js
+++ b/js/src/builtin/Set.js
@@ -64,8 +64,8 @@ function SetIteratorNext() {
var O = this;
// Steps 2-3.
- if (!IsObject(O) || !IsSetIterator(O))
- return callFunction(CallSetIteratorMethodIfWrapped, O, "SetIteratorNext");
+ if (!IsObject(O) || (O = GuardToSetIterator(O)) === null)
+ return callFunction(CallSetIteratorMethodIfWrapped, this, "SetIteratorNext");
// Steps 4-5 (implemented in _GetNextSetEntryForIterator).
// Steps 8-9 (omitted).
@@ -83,7 +83,7 @@ function SetIteratorNext() {
// Steps 10.b-c (omitted).
// Step 6.
- var itemKind = UnsafeGetInt32FromReservedSlot(this, ITERATOR_SLOT_ITEM_KIND);
+ var itemKind = UnsafeGetInt32FromReservedSlot(O, ITERATOR_SLOT_ITEM_KIND);
var result;
if (itemKind === ITEM_KIND_VALUE) {
diff --git a/js/src/builtin/String.js b/js/src/builtin/String.js
index 6d1d335a05..e5b2ad5525 100644
--- a/js/src/builtin/String.js
+++ b/js/src/builtin/String.js
@@ -529,16 +529,17 @@ function String_iterator() {
}
function StringIteratorNext() {
- if (!IsObject(this) || !IsStringIterator(this)) {
+ var obj;
+ if (!IsObject(this) || (obj = GuardToStringIterator(this)) === null) {
return callFunction(CallStringIteratorMethodIfWrapped, this,
"StringIteratorNext");
}
- var S = UnsafeGetStringFromReservedSlot(this, ITERATOR_SLOT_TARGET);
+ var S = UnsafeGetStringFromReservedSlot(obj, ITERATOR_SLOT_TARGET);
// We know that JSString::MAX_LENGTH <= INT32_MAX (and assert this in
// SelfHostring.cpp) so our current index can never be anything other than
// an Int32Value.
- var index = UnsafeGetInt32FromReservedSlot(this, ITERATOR_SLOT_NEXT_INDEX);
+ var index = UnsafeGetInt32FromReservedSlot(obj, ITERATOR_SLOT_NEXT_INDEX);
var size = S.length;
var result = { value: undefined, done: false };
@@ -556,7 +557,7 @@ function StringIteratorNext() {
}
}
- UnsafeSetReservedSlot(this, ITERATOR_SLOT_NEXT_INDEX, index + charCount);
+ UnsafeSetReservedSlot(obj, ITERATOR_SLOT_NEXT_INDEX, index + charCount);
result.value = callFunction(String_substring, S, index, index + charCount);
return result;
diff --git a/js/src/builtin/TestingFunctions.cpp b/js/src/builtin/TestingFunctions.cpp
index 00637a7a5d..373b6c9edc 100644
--- a/js/src/builtin/TestingFunctions.cpp
+++ b/js/src/builtin/TestingFunctions.cpp
@@ -2088,7 +2088,7 @@ class CloneBufferObject : public NativeObject {
Rooted<CloneBufferObject*> obj(cx, Create(cx));
if (!obj)
return nullptr;
- auto data = js::MakeUnique<JSStructuredCloneData>();
+ auto data = js::MakeUnique<JSStructuredCloneData>(buffer->scope());
if (!data) {
ReportOutOfMemory(cx);
return nullptr;
@@ -2141,8 +2141,11 @@ class CloneBufferObject : public NativeObject {
return false;
size_t nbytes = JS_GetStringLength(args[0].toString());
MOZ_ASSERT(nbytes % sizeof(uint64_t) == 0);
- auto buf = js::MakeUnique<JSStructuredCloneData>(nbytes, nbytes, nbytes);
- js_memcpy(buf->Start(), str, nbytes);
+ auto buf = js::MakeUnique<JSStructuredCloneData>(JS::StructuredCloneScope::DifferentProcess);
+ if (!buf->AppendBytes(str, nbytes)) {
+ ReportOutOfMemory(cx);
+ return false;
+ }
JS_free(cx, str);
obj->setData(buf.release());
@@ -2186,7 +2189,7 @@ class CloneBufferObject : public NativeObject {
ReportOutOfMemory(cx);
return false;
}
- auto iter = obj->data()->Iter();
+ auto iter = obj->data()->Start();
obj->data()->ReadBytes(iter, buffer.get(), size);
JSString* str = JS_NewStringCopyN(cx, buffer.get(), size);
if (!str)
@@ -2244,6 +2247,8 @@ ParseCloneScope(JSContext* cx, HandleString str)
scope.emplace(JS::StructuredCloneScope::SameProcessDifferentThread);
else if (strcmp(scopeStr.ptr(), "DifferentProcess") == 0)
scope.emplace(JS::StructuredCloneScope::DifferentProcess);
+ else if (strcmp(scopeStr.ptr(), "DifferentProcessForIndexedDB") == 0)
+ scope.emplace(JS::StructuredCloneScope::DifferentProcessForIndexedDB);
return scope;
}
@@ -4370,19 +4375,22 @@ JS_FN_HELP("rejectPromise", RejectPromise, 2, 0,
" clone buffer object. 'policy' may be an options hash. Valid keys:\n"
" 'SharedArrayBuffer' - either 'allow' (the default) or 'deny'\n"
" to specify whether SharedArrayBuffers may be serialized.\n"
-"\n"
-" 'scope' - SameProcessSameThread, SameProcessDifferentThread, or\n"
-" DifferentProcess. Determines how some values will be serialized.\n"
-" Clone buffers may only be deserialized with a compatible scope."),
+" 'scope' - SameProcessSameThread, SameProcessDifferentThread,\n"
+" DifferentProcess, or DifferentProcessForIndexedDB. Determines how some\n"
+" values will be serialized. Clone buffers may only be deserialized with a\n"
+" compatible scope. NOTE - For DifferentProcess/DifferentProcessForIndexedDB,\n"
+" must also set SharedArrayBuffer:'deny' if data contains any shared memory\n"
+" object."),
JS_FN_HELP("deserialize", Deserialize, 1, 0,
"deserialize(clonebuffer[, opts])",
" Deserialize data generated by serialize. 'opts' is an options hash with one\n"
" recognized key 'scope', which limits the clone buffers that are considered\n"
" valid. Allowed values: 'SameProcessSameThread', 'SameProcessDifferentThread',\n"
-" and 'DifferentProcess'. So for example, a DifferentProcess clone buffer\n"
-" may be deserialized in any scope, but a SameProcessSameThread clone buffer\n"
-" cannot be deserialized in a DifferentProcess scope."),
+" 'DifferentProcess', and 'DifferentProcessForIndexedDB'. So for example, a\n"
+" DifferentProcessForIndexedDB clone buffer may be deserialized in any scope, but\n"
+" a SameProcessSameThread clone buffer cannot be deserialized in a\n"
+" DifferentProcess scope."),
JS_FN_HELP("detachArrayBuffer", DetachArrayBuffer, 1, 0,
"detachArrayBuffer(buffer)",
diff --git a/js/src/gc/GCRuntime.h b/js/src/gc/GCRuntime.h
index 19737c9ee5..5c2576efde 100644
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -73,7 +73,7 @@ class ChunkPool
// Performs extra allocation off the main thread so that when memory is
// required on the main thread it will already be available and waiting.
-class BackgroundAllocTask : public GCParallelTask
+class BackgroundAllocTask : public GCParallelTaskHelper<BackgroundAllocTask>
{
// Guarded by the GC lock.
JSRuntime* runtime;
@@ -85,12 +85,11 @@ class BackgroundAllocTask : public GCParallelTask
BackgroundAllocTask(JSRuntime* rt, ChunkPool& pool);
bool enabled() const { return enabled_; }
- protected:
- void run() override;
+ void run();
};
-// Search the provided Chunks for free arenas and decommit them.
-class BackgroundDecommitTask : public GCParallelTask
+// Search the provided Chunks for free arenas and recommit them.
+class BackgroundDecommitTask : public GCParallelTaskHelper<BackgroundDecommitTask>
{
public:
using ChunkVector = mozilla::Vector<Chunk*>;
@@ -98,8 +97,7 @@ class BackgroundDecommitTask : public GCParallelTask
explicit BackgroundDecommitTask(JSRuntime *rt) : runtime(rt) {}
void setChunksToScan(ChunkVector &chunks);
- protected:
- void run() override;
+ void run();
private:
JSRuntime* runtime;
@@ -1171,8 +1169,10 @@ class GCRuntime
/*
* Concurrent sweep infrastructure.
*/
- void startTask(GCParallelTask& task, gcstats::Phase phase, AutoLockHelperThreadState& locked);
- void joinTask(GCParallelTask& task, gcstats::Phase phase, AutoLockHelperThreadState& locked);
+ void startTask(GCParallelTask& task, gcstats::Phase phase,
+ AutoLockHelperThreadState& locked);
+ void joinTask(GCParallelTask& task, gcstats::Phase phase,
+ AutoLockHelperThreadState& locked);
/*
* List head of arenas allocated during the sweep phase.
diff --git a/js/src/gc/Nursery.cpp b/js/src/gc/Nursery.cpp
index aa50bf29e5..55ca5a059d 100644
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -43,19 +43,19 @@ using mozilla::PodZero;
static const uintptr_t CanaryMagicValue = 0xDEADB15D;
-struct js::Nursery::FreeMallocedBuffersTask : public GCParallelTask
+struct js::Nursery::FreeMallocedBuffersTask : public GCParallelTaskHelper<FreeMallocedBuffersTask>
{
explicit FreeMallocedBuffersTask(FreeOp* fop) : fop_(fop) {}
bool init() { return buffers_.init(); }
void transferBuffersToFree(MallocedBuffersSet& buffersToFree,
const AutoLockHelperThreadState& lock);
- ~FreeMallocedBuffersTask() override { join(); }
+ ~FreeMallocedBuffersTask() { join(); }
+
+ void run();
private:
FreeOp* fop_;
MallocedBuffersSet buffers_;
-
- virtual void run() override;
};
struct js::Nursery::SweepAction
diff --git a/js/src/gc/Statistics.h b/js/src/gc/Statistics.h
index c9e5871e3b..ca1969b2ce 100644
--- a/js/src/gc/Statistics.h
+++ b/js/src/gc/Statistics.h
@@ -22,9 +22,6 @@
using mozilla::Maybe;
namespace js {
-
-class GCParallelTask;
-
namespace gcstats {
enum Phase : uint8_t {
diff --git a/js/src/jit/BaselineBailouts.cpp b/js/src/jit/BaselineBailouts.cpp
index 3ab722b3d6..ad2757ae16 100644
--- a/js/src/jit/BaselineBailouts.cpp
+++ b/js/src/jit/BaselineBailouts.cpp
@@ -419,41 +419,6 @@ struct BaselineStackBuilder
}
};
-// Ensure that all value locations are readable from the SnapshotIterator.
-// Remove RInstructionResults from the JitActivation if the frame got recovered
-// ahead of the bailout.
-class SnapshotIteratorForBailout : public SnapshotIterator
-{
- JitActivation* activation_;
- JitFrameIterator& iter_;
-
- public:
- SnapshotIteratorForBailout(JitActivation* activation, JitFrameIterator& iter)
- : SnapshotIterator(iter, activation->bailoutData()->machineState()),
- activation_(activation),
- iter_(iter)
- {
- MOZ_ASSERT(iter.isBailoutJS());
- }
-
- ~SnapshotIteratorForBailout() {
- // The bailout is complete, we no longer need the recover instruction
- // results.
- activation_->removeIonFrameRecovery(fp_);
- }
-
- // Take previously computed result out of the activation, or compute the
- // results of all recover instructions contained in the snapshot.
- MOZ_MUST_USE bool init(JSContext* cx) {
-
- // Under a bailout, there is no need to invalidate the frame after
- // evaluating the recover instruction, as the invalidation is only
- // needed to cause of the frame which has been introspected.
- MaybeReadFallback recoverBailout(cx, activation_, &iter_, MaybeReadFallback::Fallback_DoNothing);
- return initInstructionResults(recoverBailout);
- }
-};
-
#ifdef DEBUG
static inline bool
IsInlinableFallback(ICFallbackStub* icEntry)
@@ -1476,6 +1441,7 @@ jit::BailoutIonToBaseline(JSContext* cx, JitActivation* activation, JitFrameIter
{
MOZ_ASSERT(bailoutInfo != nullptr);
MOZ_ASSERT(*bailoutInfo == nullptr);
+ MOZ_ASSERT(iter.isBailoutJS());
TraceLoggerThread* logger = TraceLoggerForMainThread(cx->runtime());
TraceLogStopEvent(logger, TraceLogger_IonMonkey);
@@ -1488,6 +1454,12 @@ jit::BailoutIonToBaseline(JSContext* cx, JitActivation* activation, JitFrameIter
activation->removeRematerializedFramesFromDebugger(cx, iter.fp());
});
+ // Always remove the RInstructionResults from the JitActivation, even in
+ // case of failures as the stack frame is going away after the bailout.
+ auto removeIonFrameRecovery = mozilla::MakeScopeExit([&] {
+ activation->removeIonFrameRecovery(iter.jsFrame());
+ });
+
// The caller of the top frame must be one of the following:
// IonJS - Ion calling into Ion.
// BaselineStub - Baseline calling into Ion.
@@ -1561,9 +1533,19 @@ jit::BailoutIonToBaseline(JSContext* cx, JitActivation* activation, JitFrameIter
}
JitSpew(JitSpew_BaselineBailouts, " Incoming frame ptr = %p", builder.startFrame());
- SnapshotIteratorForBailout snapIter(activation, iter);
- if (!snapIter.init(cx))
+ // Under a bailout, there is no need to invalidate the frame after
+ // evaluating the recover instruction, as the invalidation is only needed in
+ // cases where the frame is introspected ahead of the bailout.
+ MaybeReadFallback recoverBailout(cx, activation, &iter, MaybeReadFallback::Fallback_DoNothing);
+
+ // Ensure that all value locations are readable from the SnapshotIterator.
+ // Get the RInstructionResults from the JitActivation if the frame got
+ // recovered ahead of the bailout.
+ SnapshotIterator snapIter(iter, activation->bailoutData()->machineState());
+ if (!snapIter.initInstructionResults(recoverBailout)) {
+ ReportOutOfMemory(cx);
return BAILOUT_RETURN_FATAL_ERROR;
+ }
#ifdef TRACK_SNAPSHOTS
snapIter.spewBailingFrom();
diff --git a/js/src/jit/CodeGenerator.cpp b/js/src/jit/CodeGenerator.cpp
index 7b2f8214ba..16d0260929 100644
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -11529,6 +11529,32 @@ CodeGenerator::visitHasClass(LHasClass* ins)
}
void
+CodeGenerator::visitGuardToClass(LGuardToClass* ins)
+{
+ Register lhs = ToRegister(ins->lhs());
+ Register output = ToRegister(ins->output());
+ Register temp = ToRegister(ins->temp());
+
+ Label notEqual;
+
+ masm.branchTestObjClass(Assembler::NotEqual, lhs, temp, ins->mir()->getClass(), &notEqual);
+ masm.mov(lhs, output);
+
+ if (ins->mir()->type() == MIRType::Object) {
+ // Can't return null-return here, so bail
+ bailoutFrom(&notEqual, ins->snapshot());
+ } else {
+ Label done;
+ masm.jump(&done);
+
+ masm.bind(&notEqual);
+ masm.mov(ImmPtr(0), output);
+
+ masm.bind(&done);
+ }
+}
+
+void
CodeGenerator::visitWasmParameter(LWasmParameter* lir)
{
}
diff --git a/js/src/jit/CodeGenerator.h b/js/src/jit/CodeGenerator.h
index d3126651bc..b226f6cc91 100644
--- a/js/src/jit/CodeGenerator.h
+++ b/js/src/jit/CodeGenerator.h
@@ -377,6 +377,7 @@ class CodeGenerator final : public CodeGeneratorSpecific
void visitIsObject(LIsObject* lir);
void visitIsObjectAndBranch(LIsObjectAndBranch* lir);
void visitHasClass(LHasClass* lir);
+ void visitGuardToClass(LGuardToClass* lir);
void visitWasmParameter(LWasmParameter* lir);
void visitWasmParameterI64(LWasmParameterI64* lir);
void visitWasmReturn(LWasmReturn* ret);
diff --git a/js/src/jit/InlinableNatives.h b/js/src/jit/InlinableNatives.h
index 89c2ff0a45..18535389ad 100644
--- a/js/src/jit/InlinableNatives.h
+++ b/js/src/jit/InlinableNatives.h
@@ -117,14 +117,16 @@
_(IntrinsicDefineDataProperty) \
_(IntrinsicObjectHasPrototype) \
\
- _(IntrinsicIsArrayIterator) \
- _(IntrinsicIsMapIterator) \
- _(IntrinsicIsSetIterator) \
- _(IntrinsicIsStringIterator) \
+ _(IntrinsicGuardToArrayIterator) \
+ _(IntrinsicGuardToMapIterator) \
+ _(IntrinsicGuardToSetIterator) \
_(IntrinsicIsListIterator) \
+ _(IntrinsicGuardToStringIterator) \
\
+ _(IntrinsicGuardToMapObject) \
_(IntrinsicGetNextMapEntryForIterator) \
\
+ _(IntrinsicGuardToSetObject) \
_(IntrinsicGetNextSetEntryForIterator) \
\
_(IntrinsicArrayBufferByteLength) \
diff --git a/js/src/jit/IonBuilder.h b/js/src/jit/IonBuilder.h
index 35ad120f7d..f24ef30c84 100644
--- a/js/src/jit/IonBuilder.h
+++ b/js/src/jit/IonBuilder.h
@@ -969,6 +969,7 @@ class IonBuilder
const Class* clasp2 = nullptr,
const Class* clasp3 = nullptr,
const Class* clasp4 = nullptr);
+ InliningStatus inlineGuardToClass(CallInfo& callInfo, const Class* clasp);
InliningStatus inlineIsConstructing(CallInfo& callInfo);
InliningStatus inlineSubstringKernel(CallInfo& callInfo);
InliningStatus inlineObjectHasPrototype(CallInfo& callInfo);
diff --git a/js/src/jit/JitFrameIterator.h b/js/src/jit/JitFrameIterator.h
index ba5efef6a5..76d04d092e 100644
--- a/js/src/jit/JitFrameIterator.h
+++ b/js/src/jit/JitFrameIterator.h
@@ -322,9 +322,7 @@ class RInstructionResults
MOZ_MUST_USE bool init(JSContext* cx, uint32_t numResults);
bool isInitialized() const;
-#ifdef DEBUG
size_t length() const;
-#endif
JitFrameLayout* frame() const;
@@ -511,13 +509,13 @@ class SnapshotIterator
return recover_.moreInstructions();
}
- protected:
// Register a vector used for storing the results of the evaluation of
// recover instructions. This vector should be registered before the
// beginning of the iteration. This function is in charge of allocating
// enough space for all instructions results, and return false iff it fails.
MOZ_MUST_USE bool initInstructionResults(MaybeReadFallback& fallback);
+ protected:
// This function is used internally for computing the result of the recover
// instructions.
MOZ_MUST_USE bool computeInstructionResults(JSContext* cx, RInstructionResults* results) const;
diff --git a/js/src/jit/JitFrames.cpp b/js/src/jit/JitFrames.cpp
index f11f17225c..019be46ddb 100644
--- a/js/src/jit/JitFrames.cpp
+++ b/js/src/jit/JitFrames.cpp
@@ -1688,13 +1688,11 @@ RInstructionResults::isInitialized() const
return initialized_;
}
-#ifdef DEBUG
size_t
RInstructionResults::length() const
{
return results_->length();
}
-#endif
JitFrameLayout*
RInstructionResults::frame() const
@@ -2150,7 +2148,7 @@ SnapshotIterator::initInstructionResults(MaybeReadFallback& fallback)
}
MOZ_ASSERT(results->isInitialized());
- MOZ_ASSERT(results->length() == recover_.numInstructions() - 1);
+ MOZ_RELEASE_ASSERT(results->length() == recover_.numInstructions() - 1);
instructionResults_ = results;
return true;
}
diff --git a/js/src/jit/Lowering.cpp b/js/src/jit/Lowering.cpp
index 730697163f..709de99873 100644
--- a/js/src/jit/Lowering.cpp
+++ b/js/src/jit/Lowering.cpp
@@ -4171,6 +4171,16 @@ LIRGenerator::visitHasClass(MHasClass* ins)
}
void
+LIRGenerator::visitGuardToClass(MGuardToClass* ins)
+{
+ MOZ_ASSERT(ins->object()->type() == MIRType::Object);
+ MOZ_ASSERT(ins->type() == MIRType::ObjectOrNull|| ins->type() == MIRType::Object);
+ LGuardToClass* lir = new(alloc()) LGuardToClass(useRegister(ins->object()), temp());
+ assignSnapshot(lir, Bailout_TypeBarrierO);
+ define(lir, ins);
+}
+
+void
LIRGenerator::visitWasmAddOffset(MWasmAddOffset* ins)
{
MOZ_ASSERT(ins->base()->type() == MIRType::Int32);
diff --git a/js/src/jit/Lowering.h b/js/src/jit/Lowering.h
index b2805cb7a6..9b4095aec0 100644
--- a/js/src/jit/Lowering.h
+++ b/js/src/jit/Lowering.h
@@ -289,6 +289,7 @@ class LIRGenerator : public LIRGeneratorSpecific
void visitIsConstructor(MIsConstructor* ins);
void visitIsObject(MIsObject* ins);
void visitHasClass(MHasClass* ins);
+ void visitGuardToClass(MGuardToClass* ins);
void visitWasmAddOffset(MWasmAddOffset* ins);
void visitWasmBoundsCheck(MWasmBoundsCheck* ins);
void visitWasmLoadGlobalVar(MWasmLoadGlobalVar* ins);
diff --git a/js/src/jit/MCallOptimize.cpp b/js/src/jit/MCallOptimize.cpp
index 202aef497b..01755094af 100644
--- a/js/src/jit/MCallOptimize.cpp
+++ b/js/src/jit/MCallOptimize.cpp
@@ -280,14 +280,14 @@ IonBuilder::inlineNativeCall(CallInfo& callInfo, JSFunction* target)
return inlineIsConstructing(callInfo);
case InlinableNative::IntrinsicSubstringKernel:
return inlineSubstringKernel(callInfo);
- case InlinableNative::IntrinsicIsArrayIterator:
- return inlineHasClass(callInfo, &ArrayIteratorObject::class_);
- case InlinableNative::IntrinsicIsMapIterator:
- return inlineHasClass(callInfo, &MapIteratorObject::class_);
- case InlinableNative::IntrinsicIsSetIterator:
- return inlineHasClass(callInfo, &SetIteratorObject::class_);
- case InlinableNative::IntrinsicIsStringIterator:
- return inlineHasClass(callInfo, &StringIteratorObject::class_);
+ case InlinableNative::IntrinsicGuardToArrayIterator:
+ return inlineGuardToClass(callInfo, &ArrayIteratorObject::class_);
+ case InlinableNative::IntrinsicGuardToMapIterator:
+ return inlineGuardToClass(callInfo, &MapIteratorObject::class_);
+ case InlinableNative::IntrinsicGuardToSetIterator:
+ return inlineGuardToClass(callInfo, &SetIteratorObject::class_);
+ case InlinableNative::IntrinsicGuardToStringIterator:
+ return inlineGuardToClass(callInfo, &StringIteratorObject::class_);
case InlinableNative::IntrinsicIsListIterator:
return inlineHasClass(callInfo, &ListIteratorObject::class_);
case InlinableNative::IntrinsicDefineDataProperty:
@@ -296,10 +296,14 @@ IonBuilder::inlineNativeCall(CallInfo& callInfo, JSFunction* target)
return inlineObjectHasPrototype(callInfo);
// Map intrinsics.
+ case InlinableNative::IntrinsicGuardToMapObject:
+ return inlineGuardToClass(callInfo, &MapObject::class_);
case InlinableNative::IntrinsicGetNextMapEntryForIterator:
return inlineGetNextEntryForIterator(callInfo, MGetNextEntryForIterator::Map);
// Set intrinsics.
+ case InlinableNative::IntrinsicGuardToSetObject:
+ return inlineGuardToClass(callInfo, &SetObject::class_);
case InlinableNative::IntrinsicGetNextSetEntryForIterator:
return inlineGetNextEntryForIterator(callInfo, MGetNextEntryForIterator::Set);
@@ -2220,6 +2224,37 @@ IonBuilder::inlineHasClass(CallInfo& callInfo,
}
IonBuilder::InliningStatus
+IonBuilder::inlineGuardToClass(CallInfo& callInfo, const Class* clasp)
+{
+ MOZ_ASSERT(!callInfo.constructing());
+ MOZ_ASSERT(callInfo.argc() == 1);
+
+ if (callInfo.getArg(0)->type() != MIRType::Object)
+ return InliningStatus_NotInlined;
+
+ if (getInlineReturnType() != MIRType::ObjectOrNull &&
+ getInlineReturnType() != MIRType::Object)
+ {
+ return InliningStatus_NotInlined;
+ }
+
+ TemporaryTypeSet* types = callInfo.getArg(0)->resultTypeSet();
+ const Class* knownClass = types ? types->getKnownClass(constraints()) : nullptr;
+
+ if (knownClass && knownClass == clasp) {
+ current->push(callInfo.getArg(0));
+ } else {
+ MGuardToClass* guardToClass = MGuardToClass::New(alloc(), callInfo.getArg(0),
+ clasp, getInlineReturnType());
+ current->add(guardToClass);
+ current->push(guardToClass);
+ }
+
+ callInfo.setImplicitlyUsedUnchecked();
+ return InliningStatus_Inlined;
+}
+
+IonBuilder::InliningStatus
IonBuilder::inlineGetNextEntryForIterator(CallInfo& callInfo, MGetNextEntryForIterator::Mode mode)
{
if (callInfo.argc() != 2 || callInfo.constructing()) {
diff --git a/js/src/jit/MIR.h b/js/src/jit/MIR.h
index 2de91e2df4..6ec05af764 100644
--- a/js/src/jit/MIR.h
+++ b/js/src/jit/MIR.h
@@ -13192,6 +13192,48 @@ class MHasClass
}
};
+class MGuardToClass
+ : public MUnaryInstruction,
+ public SingleObjectPolicy::Data
+{
+ const Class* class_;
+
+ MGuardToClass(MDefinition* object, const Class* clasp, MIRType resultType)
+ : MUnaryInstruction(object)
+ , class_(clasp)
+ {
+ MOZ_ASSERT(object->type() == MIRType::Object ||
+ (object->type() == MIRType::Value && object->mightBeType(MIRType::Object)));
+ MOZ_ASSERT(resultType == MIRType::Object || resultType == MIRType::ObjectOrNull);
+ setResultType(resultType);
+ setMovable();
+ if (resultType == MIRType::Object) {
+ // We will bail out if the class type is incorrect,
+ // so we need to ensure we don't eliminate this instruction
+ setGuard();
+ }
+ }
+
+ public:
+ INSTRUCTION_HEADER(GuardToClass)
+ TRIVIAL_NEW_WRAPPERS
+ NAMED_OPERANDS((0, object))
+
+ const Class* getClass() const {
+ return class_;
+ }
+ AliasSet getAliasSet() const override {
+ return AliasSet::None();
+ }
+ bool congruentTo(const MDefinition* ins) const override {
+ if (!ins->isGuardToClass())
+ return false;
+ if (getClass() != ins->toGuardToClass()->getClass())
+ return false;
+ return congruentIfOperandsEqual(ins);
+ }
+};
+
class MCheckReturn
: public MBinaryInstruction,
public BoxInputsPolicy::Data
diff --git a/js/src/jit/MOpcodes.h b/js/src/jit/MOpcodes.h
index bb2ab8190d..fddc1e637d 100644
--- a/js/src/jit/MOpcodes.h
+++ b/js/src/jit/MOpcodes.h
@@ -272,6 +272,7 @@ namespace jit {
_(IsCallable) \
_(IsObject) \
_(HasClass) \
+ _(GuardToClass) \
_(CopySign) \
_(Rotate) \
_(NewDerivedTypedObject) \
diff --git a/js/src/jit/shared/LIR-shared.h b/js/src/jit/shared/LIR-shared.h
index 9dcb527c5d..f4adcc63c0 100644
--- a/js/src/jit/shared/LIR-shared.h
+++ b/js/src/jit/shared/LIR-shared.h
@@ -7867,6 +7867,29 @@ class LHasClass : public LInstructionHelper<1, 1, 0>
}
};
+class LGuardToClass : public LInstructionHelper<1, 1, 1>
+{
+ public:
+ LIR_HEADER(GuardToClass);
+ explicit LGuardToClass(const LAllocation& lhs, const LDefinition& temp)
+ {
+ setOperand(0, lhs);
+ setTemp(0, temp);
+ }
+
+ const LAllocation* lhs() {
+ return getOperand(0);
+ }
+
+ const LDefinition* temp() {
+ return getTemp(0);
+ }
+
+ MGuardToClass* mir() const {
+ return mir_->toGuardToClass();
+ }
+};
+
template<size_t Defs, size_t Ops>
class LWasmSelectBase : public LInstructionHelper<Defs, Ops, 0>
{
diff --git a/js/src/jit/shared/LOpcodes-shared.h b/js/src/jit/shared/LOpcodes-shared.h
index 3eea1b449b..fe2ab5ea35 100644
--- a/js/src/jit/shared/LOpcodes-shared.h
+++ b/js/src/jit/shared/LOpcodes-shared.h
@@ -386,6 +386,7 @@
_(IsObject) \
_(IsObjectAndBranch) \
_(HasClass) \
+ _(GuardToClass) \
_(RecompileCheck) \
_(MemoryBarrier) \
_(AssertRangeI) \
diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp
index 45301dac87..3d4dae9bb9 100644
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -2156,7 +2156,7 @@ ArenasToUpdate::getArenasToUpdate(AutoLockHelperThreadState& lock, unsigned maxL
return { begin, last->next };
}
-struct UpdatePointersTask : public GCParallelTask
+struct UpdatePointersTask : public GCParallelTaskHelper<UpdatePointersTask>
{
// Maximum number of arenas to update in one block.
#ifdef DEBUG
@@ -2172,14 +2172,13 @@ struct UpdatePointersTask : public GCParallelTask
arenas_.end = nullptr;
}
- ~UpdatePointersTask() override { join(); }
+ void run();
private:
JSRuntime* rt_;
ArenasToUpdate* source_;
ArenaListSegment arenas_;
- virtual void run() override;
bool getArenasToUpdate();
void updateArenas();
};
@@ -2276,7 +2275,7 @@ GCRuntime::updateCellPointers(MovingTracer* trc, Zone* zone, AllocKinds kinds, s
for (size_t i = 0; i < bgTaskCount && !bgArenas.done(); i++) {
bgTasks[i].emplace(rt, &bgArenas, lock);
startTask(*bgTasks[i], gcstats::PHASE_COMPACT_UPDATE_CELLS, lock);
- tasksStarted = i;
+ tasksStarted++;
}
}
@@ -2985,7 +2984,6 @@ js::gc::BackgroundDecommitTask::run()
AutoLockGC lock(runtime);
for (Chunk* chunk : toDecommit) {
-
// The arena list is not doubly-linked, so we have to work in the free
// list order and not in the natural order.
while (chunk->info.numArenasFreeCommitted) {
@@ -4359,7 +4357,8 @@ GCRuntime::endMarkingZoneGroup()
marker.setMarkColorBlack();
}
-class GCSweepTask : public GCParallelTask
+template <typename Derived>
+class GCSweepTask : public GCParallelTaskHelper<Derived>
{
GCSweepTask(const GCSweepTask&) = delete;
@@ -4369,13 +4368,13 @@ class GCSweepTask : public GCParallelTask
public:
explicit GCSweepTask(JSRuntime* rt) : runtime(rt) {}
GCSweepTask(GCSweepTask&& other)
- : GCParallelTask(mozilla::Move(other)),
+ : GCParallelTaskHelper<Derived>(mozilla::Move(other)),
runtime(other.runtime)
{}
};
// Causes the given WeakCache to be swept when run.
-class SweepWeakCacheTask : public GCSweepTask
+class SweepWeakCacheTask : public GCSweepTask<SweepWeakCacheTask>
{
JS::WeakCache<void*>& cache;
@@ -4387,15 +4386,15 @@ class SweepWeakCacheTask : public GCSweepTask
: GCSweepTask(mozilla::Move(other)), cache(other.cache)
{}
- void run() override {
+ void run() {
cache.sweep();
}
};
#define MAKE_GC_SWEEP_TASK(name) \
- class name : public GCSweepTask { \
- void run() override; \
+ class name : public GCSweepTask<name> { \
public: \
+ void run(); \
explicit name (JSRuntime* rt) : GCSweepTask(rt) {} \
}
MAKE_GC_SWEEP_TASK(SweepAtomsTask);
@@ -4447,7 +4446,8 @@ SweepMiscTask::run()
}
void
-GCRuntime::startTask(GCParallelTask& task, gcstats::Phase phase, AutoLockHelperThreadState& locked)
+GCRuntime::startTask(GCParallelTask& task, gcstats::Phase phase,
+ AutoLockHelperThreadState& locked)
{
if (!task.startWithLockHeld(locked)) {
AutoUnlockHelperThreadState unlock(locked);
@@ -4457,7 +4457,8 @@ GCRuntime::startTask(GCParallelTask& task, gcstats::Phase phase, AutoLockHelperT
}
void
-GCRuntime::joinTask(GCParallelTask& task, gcstats::Phase phase, AutoLockHelperThreadState& locked)
+GCRuntime::joinTask(GCParallelTask& task, gcstats::Phase phase,
+ AutoLockHelperThreadState& locked)
{
gcstats::AutoPhase ap(stats, task, phase);
task.joinWithLockHeld(locked);
diff --git a/js/src/jsgc.h b/js/src/jsgc.h
index 7ad176d84e..d3cf31fe77 100644
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -12,6 +12,7 @@
#include "mozilla/Atomics.h"
#include "mozilla/EnumeratedArray.h"
#include "mozilla/MemoryReporting.h"
+#include "mozilla/Move.h"
#include "mozilla/TypeTraits.h"
#include "js/GCAPI.h"
@@ -936,10 +937,19 @@ class GCHelperState
};
// A generic task used to dispatch work to the helper thread system.
-// Users should derive from GCParallelTask add what data they need and
-// override |run|.
+// Users supply a function pointer to call.
+//
+// Note that we don't use virtual functions here because destructors can write
+// the vtable pointer on entry, which can causes races if synchronization
+// happens there.
class GCParallelTask
{
+ public:
+ using TaskFunc = void (*)(GCParallelTask*);
+
+ private:
+ TaskFunc func_;
+
// The state of the parallel computation.
enum TaskState {
NotStarted,
@@ -956,19 +966,24 @@ class GCParallelTask
// A flag to signal a request for early completion of the off-thread task.
mozilla::Atomic<bool> cancel_;
- virtual void run() = 0;
-
public:
- GCParallelTask() : state(NotStarted), duration_(0) {}
+ explicit GCParallelTask(TaskFunc func)
+ : func_(func),
+ state(NotStarted),
+ duration_(0),
+ cancel_(false)
+ {}
+
GCParallelTask(GCParallelTask&& other)
- : state(other.state),
+ : func_(other.func_),
+ state(other.state),
duration_(0),
cancel_(false)
{}
// Derived classes must override this to ensure that join() gets called
// before members get destructed.
- virtual ~GCParallelTask();
+ ~GCParallelTask();
// Time spent in the most recent invocation of this task.
int64_t duration() const { return duration_; }
@@ -997,12 +1012,34 @@ class GCParallelTask
bool isRunningWithLockHeld(const AutoLockHelperThreadState& locked) const;
bool isRunning() const;
+ void runTask() {
+ func_(this);
+ }
+
// This should be friended to HelperThread, but cannot be because it
// would introduce several circular dependencies.
public:
void runFromHelperThread(AutoLockHelperThreadState& locked);
};
+// CRTP template to handle cast to derived type when calling run().
+template <typename Derived>
+class GCParallelTaskHelper : public GCParallelTask
+{
+ public:
+ GCParallelTaskHelper()
+ : GCParallelTask(&runTaskTyped)
+ {}
+ GCParallelTaskHelper(GCParallelTaskHelper&& other)
+ : GCParallelTask(mozilla::Move(other))
+ {}
+
+ private:
+ static void runTaskTyped(GCParallelTask* task) {
+ static_cast<Derived*>(task)->run();
+ }
+};
+
typedef void (*IterateChunkCallback)(JSRuntime* rt, void* data, gc::Chunk* chunk);
typedef void (*IterateZoneCallback)(JSRuntime* rt, void* data, JS::Zone* zone);
typedef void (*IterateArenaCallback)(JSRuntime* rt, void* data, gc::Arena* arena,
diff --git a/js/src/jsnativestack.cpp b/js/src/jsnativestack.cpp
index 05928ea3df..166a5a4f77 100644
--- a/js/src/jsnativestack.cpp
+++ b/js/src/jsnativestack.cpp
@@ -21,6 +21,18 @@
# include <unistd.h>
# endif
+# if defined(XP_LINUX) && !defined(ANDROID) && defined(__GLIBC__)
+# include <dlfcn.h>
+# include <sys/syscall.h>
+# include <sys/types.h>
+# include <unistd.h>
+static pid_t
+gettid()
+{
+ return syscall(__NR_gettid);
+}
+# endif
+
#else
# error "Unsupported platform"
@@ -88,6 +100,52 @@ js::GetNativeStackBaseImpl()
context.uc_stack.ss_size;
}
+#elif defined(XP_LINUX) && !defined(ANDROID) && defined(__GLIBC__)
+void*
+js::GetNativeStackBaseImpl()
+{
+ // On the main thread, get stack base from glibc's __libc_stack_end rather than pthread APIs
+ // to avoid filesystem calls /proc/self/maps. Non-main threads spawned with pthreads can read
+ // this information directly from their pthread struct, but when using the pthreads API, the
+ // main thread must go parse /proc/self/maps to figure the mapped stack address space ranges.
+ // We want to avoid reading from /proc/ so that the application can run in restricted
+ // environments where /proc may not be mounted (e.g. chroot).
+ if (gettid() == getpid()) {
+ void** pLibcStackEnd = (void**)dlsym(RTLD_DEFAULT, "__libc_stack_end");
+
+ // If __libc_stack_end is not found, architecture specific frame pointer hopping will need
+ // to be implemented.
+ MOZ_RELEASE_ASSERT(pLibcStackEnd, "__libc_stack_end unavailable, unable to setup stack range for JS.");
+ void* stackBase = *pLibcStackEnd;
+ MOZ_RELEASE_ASSERT(stackBase, "Invalid stack base, unable to setup stack range for JS.");
+
+ // We don't need to fix stackBase, as it already roughly points to beginning of the stack.
+ return stackBase;
+ }
+
+ // Non-main threads have the required info stored in memory, so no filesystem calls are made.
+ pthread_t thread = pthread_self();
+ pthread_attr_t sattr;
+ pthread_attr_init(&sattr);
+ pthread_getattr_np(thread, &sattr);
+
+ // stackBase will be the *lowest* address on all architectures.
+ void* stackBase = nullptr;
+ size_t stackSize = 0;
+ int rc = pthread_attr_getstack(&sattr, &stackBase, &stackSize);
+ if (rc) {
+ MOZ_CRASH("Call to pthread_attr_getstack failed, unable to setup stack range for JS.");
+ }
+ MOZ_RELEASE_ASSERT(stackBase, "Invalid stack base, unable to setup stack range for JS.");
+ pthread_attr_destroy(&sattr);
+
+# if JS_STACK_GROWTH_DIRECTION > 0
+ return stackBase;
+# else
+ return static_cast<char*>(stackBase) + stackSize;
+# endif
+}
+
#else /* XP_UNIX */
void*
@@ -156,11 +214,15 @@ js::GetNativeStackBaseImpl()
// the truth.
rc = pthread_attr_getstack(&sattr, &stackBase, &stackSize);
# else
+ // Use the default pthread_attr_getstack() call. Note that this function
+ // differs between libc implementations and could imply /proc access etc.
+ // which may not work in restricted environments.
rc = pthread_attr_getstack(&sattr, &stackBase, &stackSize);
# endif
- if (rc)
- MOZ_CRASH();
- MOZ_ASSERT(stackBase);
+ if (rc) {
+ MOZ_CRASH("Call to pthread_attr_getstack failed, unable to setup stack range for JS.");
+ }
+ MOZ_RELEASE_ASSERT(stackBase, "Invalid stack base, unable to setup stack range for JS.");
pthread_attr_destroy(&sattr);
# if JS_STACK_GROWTH_DIRECTION > 0
diff --git a/js/src/jsstr.cpp b/js/src/jsstr.cpp
index 01b4076262..4151d012b6 100644
--- a/js/src/jsstr.cpp
+++ b/js/src/jsstr.cpp
@@ -1534,7 +1534,7 @@ RopeMatch(JSContext* cx, JSRope* text, JSLinearString* pat, int* match)
return true;
}
-/* ES6 draft rc4 21.1.3.7. */
+/* ES6 2015 ST 21.1.3.7 String.prototype.includes */
bool
js::str_includes(JSContext* cx, unsigned argc, Value* vp)
{
@@ -1591,6 +1591,13 @@ js::str_includes(JSContext* cx, unsigned argc, Value* vp)
return true;
}
+/* ES6 draft <RC4 String.prototype.contains for compatibility */
+bool
+js::str_contains(JSContext* cx, unsigned argc, Value* vp)
+{
+ return js::str_includes(cx, argc, vp);
+}
+
/* ES6 20120927 draft 15.5.4.7. */
bool
js::str_indexOf(JSContext* cx, unsigned argc, Value* vp)
@@ -2555,6 +2562,7 @@ static const JSFunctionSpec string_methods[] = {
JS_SELF_HOSTED_FN("padEnd", "String_pad_end", 2,0),
JS_SELF_HOSTED_FN("codePointAt", "String_codePointAt", 1,0),
JS_FN("includes", str_includes, 1,0),
+ JS_FN("contains", str_contains, 1,0),
JS_FN("indexOf", str_indexOf, 1,0),
JS_FN("lastIndexOf", str_lastIndexOf, 1,0),
JS_FN("startsWith", str_startsWith, 1,0),
diff --git a/js/src/tests/Intl/DateTimeFormat/timeZone_backward_links.js b/js/src/tests/Intl/DateTimeFormat/timeZone_backward_links.js
index 7b3a46a60a..d87abd7be4 100644
--- a/js/src/tests/Intl/DateTimeFormat/timeZone_backward_links.js
+++ b/js/src/tests/Intl/DateTimeFormat/timeZone_backward_links.js
@@ -1,7 +1,7 @@
// |reftest| skip-if(!this.hasOwnProperty("Intl"))
// Generated by make_intl_data.py. DO NOT EDIT.
-// tzdata version = 2018d
+// tzdata version = 2018e
const tzMapper = [
x => x,
diff --git a/js/src/tests/Intl/DateTimeFormat/timeZone_backzone.js b/js/src/tests/Intl/DateTimeFormat/timeZone_backzone.js
index ed63df9217..b96dac96f8 100644
--- a/js/src/tests/Intl/DateTimeFormat/timeZone_backzone.js
+++ b/js/src/tests/Intl/DateTimeFormat/timeZone_backzone.js
@@ -1,7 +1,7 @@
// |reftest| skip-if(!this.hasOwnProperty("Intl"))
// Generated by make_intl_data.py. DO NOT EDIT.
-// tzdata version = 2018d
+// tzdata version = 2018e
const tzMapper = [
x => x,
diff --git a/js/src/tests/Intl/DateTimeFormat/timeZone_backzone_links.js b/js/src/tests/Intl/DateTimeFormat/timeZone_backzone_links.js
index 215808765c..66ef3075dd 100644
--- a/js/src/tests/Intl/DateTimeFormat/timeZone_backzone_links.js
+++ b/js/src/tests/Intl/DateTimeFormat/timeZone_backzone_links.js
@@ -1,7 +1,7 @@
// |reftest| skip-if(!this.hasOwnProperty("Intl"))
// Generated by make_intl_data.py. DO NOT EDIT.
-// tzdata version = 2018d
+// tzdata version = 2018e
const tzMapper = [
x => x,
diff --git a/js/src/tests/Intl/DateTimeFormat/timeZone_notbackward_links.js b/js/src/tests/Intl/DateTimeFormat/timeZone_notbackward_links.js
index 48242dfbdc..8d44204bcf 100644
--- a/js/src/tests/Intl/DateTimeFormat/timeZone_notbackward_links.js
+++ b/js/src/tests/Intl/DateTimeFormat/timeZone_notbackward_links.js
@@ -1,7 +1,7 @@
// |reftest| skip-if(!this.hasOwnProperty("Intl"))
// Generated by make_intl_data.py. DO NOT EDIT.
-// tzdata version = 2018d
+// tzdata version = 2018e
const tzMapper = [
x => x,
diff --git a/js/src/tests/js1_8_5/extensions/clone-errors.js b/js/src/tests/js1_8_5/extensions/clone-errors.js
index f65578a06b..d2ccea2e8c 100644
--- a/js/src/tests/js1_8_5/extensions/clone-errors.js
+++ b/js/src/tests/js1_8_5/extensions/clone-errors.js
@@ -25,6 +25,7 @@ check({get x() { throw new Error("fail"); }});
// Mismatched scopes.
for (let [write_scope, read_scope] of [['SameProcessSameThread', 'SameProcessDifferentThread'],
['SameProcessSameThread', 'DifferentProcess'],
+ ['SameProcessDifferentThread', 'DifferentProcessForIndexedDB'],
['SameProcessDifferentThread', 'DifferentProcess']])
{
var ab = new ArrayBuffer(12);
diff --git a/js/src/tests/js1_8_5/extensions/clone-transferables.js b/js/src/tests/js1_8_5/extensions/clone-transferables.js
index 673684b954..9aad27208b 100644
--- a/js/src/tests/js1_8_5/extensions/clone-transferables.js
+++ b/js/src/tests/js1_8_5/extensions/clone-transferables.js
@@ -3,11 +3,15 @@
// http://creativecommons.org/licenses/publicdomain/
function* buffer_options() {
- for (var scope of ["SameProcessSameThread", "SameProcessDifferentThread", "DifferentProcess"]) {
- for (var size of [0, 8, 16, 200, 1000, 4096, 8192, 65536]) {
- yield { scope, size };
+ for (var scope of ["SameProcessSameThread",
+ "SameProcessDifferentThread",
+ "DifferentProcess",
+ "DifferentProcessForIndexedDB"])
+ {
+ for (var size of [0, 8, 16, 200, 1000, 4096, 8192, 65536]) {
+ yield { scope, size };
+ }
}
- }
}
diff --git a/js/src/vm/EnvironmentObject.h b/js/src/vm/EnvironmentObject.h
index d457ca8392..0322861164 100644
--- a/js/src/vm/EnvironmentObject.h
+++ b/js/src/vm/EnvironmentObject.h
@@ -930,6 +930,9 @@ class DebugEnvironments
void mark(JSTracer* trc);
void sweep(JSRuntime* rt);
void finish();
+#ifdef JSGC_HASH_TABLE_CHECKS
+ void checkHashTablesAfterMovingGC(JSRuntime* runtime);
+#endif
// If a live frame has a synthesized entry in missingEnvs, make sure it's not
// collected.
diff --git a/js/src/vm/HelperThreads.cpp b/js/src/vm/HelperThreads.cpp
index 7381a97b5c..bd29d0c796 100644
--- a/js/src/vm/HelperThreads.cpp
+++ b/js/src/vm/HelperThreads.cpp
@@ -1144,7 +1144,7 @@ js::GCParallelTask::runFromMainThread(JSRuntime* rt)
MOZ_ASSERT(state == NotStarted);
MOZ_ASSERT(js::CurrentThreadCanAccessRuntime(rt));
uint64_t timeStart = PRMJ_Now();
- run();
+ runTask();
duration_ = PRMJ_Now() - timeStart;
}
@@ -1155,7 +1155,7 @@ js::GCParallelTask::runFromHelperThread(AutoLockHelperThreadState& locked)
AutoUnlockHelperThreadState parallelSection(locked);
gc::AutoSetThreadIsPerformingGC performingGC;
uint64_t timeStart = PRMJ_Now();
- run();
+ runTask();
duration_ = PRMJ_Now() - timeStart;
}
diff --git a/js/src/vm/SelfHosting.cpp b/js/src/vm/SelfHosting.cpp
index 653807ce85..08670c8331 100644
--- a/js/src/vm/SelfHosting.cpp
+++ b/js/src/vm/SelfHosting.cpp
@@ -189,6 +189,22 @@ intrinsic_IsInstanceOfBuiltin(JSContext* cx, unsigned argc, Value* vp)
return true;
}
+template<typename T>
+static bool
+intrinsic_GuardToBuiltin(JSContext* cx, unsigned argc, Value* vp)
+{
+ CallArgs args = CallArgsFromVp(argc, vp);
+ MOZ_ASSERT(args.length() == 1);
+ MOZ_ASSERT(args[0].isObject());
+
+ if (args[0].toObject().is<T>()) {
+ args.rval().setObject(args[0].toObject());
+ return true;
+ }
+ args.rval().setNull();
+ return true;
+}
+
/**
* Self-hosting intrinsic returning the original constructor for a builtin
* the name of which is the first and only argument.
@@ -2297,18 +2313,18 @@ static const JSFunctionSpec intrinsic_functions[] = {
JS_FN("_SetCanonicalName", intrinsic_SetCanonicalName, 2,0),
- JS_INLINABLE_FN("IsArrayIterator",
- intrinsic_IsInstanceOfBuiltin<ArrayIteratorObject>, 1,0,
- IntrinsicIsArrayIterator),
- JS_INLINABLE_FN("IsMapIterator",
- intrinsic_IsInstanceOfBuiltin<MapIteratorObject>, 1,0,
- IntrinsicIsMapIterator),
- JS_INLINABLE_FN("IsSetIterator",
- intrinsic_IsInstanceOfBuiltin<SetIteratorObject>, 1,0,
- IntrinsicIsSetIterator),
- JS_INLINABLE_FN("IsStringIterator",
- intrinsic_IsInstanceOfBuiltin<StringIteratorObject>, 1,0,
- IntrinsicIsStringIterator),
+ JS_INLINABLE_FN("GuardToArrayIterator",
+ intrinsic_GuardToBuiltin<ArrayIteratorObject>, 1,0,
+ IntrinsicGuardToArrayIterator),
+ JS_INLINABLE_FN("GuardToMapIterator",
+ intrinsic_GuardToBuiltin<MapIteratorObject>, 1,0,
+ IntrinsicGuardToMapIterator),
+ JS_INLINABLE_FN("GuardToSetIterator",
+ intrinsic_GuardToBuiltin<SetIteratorObject>, 1,0,
+ IntrinsicGuardToSetIterator),
+ JS_INLINABLE_FN("GuardToStringIterator",
+ intrinsic_GuardToBuiltin<StringIteratorObject>, 1,0,
+ IntrinsicGuardToStringIterator),
JS_INLINABLE_FN("IsListIterator",
intrinsic_IsInstanceOfBuiltin<ListIteratorObject>, 1,0,
IntrinsicIsListIterator),
@@ -2412,7 +2428,12 @@ static const JSFunctionSpec intrinsic_functions[] = {
JS_FN("CallStarGeneratorMethodIfWrapped",
CallNonGenericSelfhostedMethod<Is<StarGeneratorObject>>, 2, 0),
+ JS_INLINABLE_FN("GuardToMapObject", intrinsic_GuardToBuiltin<MapObject>, 1, 0,
+ IntrinsicGuardToMapObject),
JS_FN("IsWeakSet", intrinsic_IsInstanceOfBuiltin<WeakSetObject>, 1,0),
+
+ JS_INLINABLE_FN("GuardToSetObject", intrinsic_GuardToBuiltin<SetObject>, 1, 0,
+ IntrinsicGuardToSetObject),
JS_FN("CallWeakSetMethodIfWrapped",
CallNonGenericSelfhostedMethod<Is<WeakSetObject>>, 2, 0),
diff --git a/js/src/vm/StructuredClone.cpp b/js/src/vm/StructuredClone.cpp
index 3a062c3b8b..42e9090004 100644
--- a/js/src/vm/StructuredClone.cpp
+++ b/js/src/vm/StructuredClone.cpp
@@ -160,16 +160,16 @@ template<typename T, typename AllocPolicy>
struct BufferIterator {
typedef mozilla::BufferList<AllocPolicy> BufferList;
- explicit BufferIterator(BufferList& buffer)
+ explicit BufferIterator(const BufferList& buffer)
: mBuffer(buffer)
, mIter(buffer.Iter())
{
JS_STATIC_ASSERT(8 % sizeof(T) == 0);
}
- BufferIterator(const BufferIterator& other)
- : mBuffer(other.mBuffer)
- , mIter(other.mIter)
+ explicit BufferIterator(const JSStructuredCloneData& data)
+ : mBuffer(data.bufList_)
+ , mIter(data.Start())
{
}
@@ -228,17 +228,26 @@ struct BufferIterator {
return mIter.HasRoomFor(sizeof(T));
}
- BufferList& mBuffer;
+ const BufferList& mBuffer;
typename BufferList::IterImpl mIter;
};
+// SCOutput provides an interface to write raw data -- eg uint64_ts, doubles,
+// arrays of bytes -- into a structured clone data output stream. It also knows
+// how to free any transferable data within that stream.
+//
+// Note that it contains a full JSStructuredCloneData object, which holds the
+// callbacks necessary to read/write/transfer/free the data. For the purpose of
+// this class, only the freeTransfer callback is relevant; the rest of the callbacks
+// are used by the higher-level JSStructuredCloneWriter interface.
struct SCOutput {
public:
- using Iter = BufferIterator<uint64_t, TempAllocPolicy>;
+ using Iter = BufferIterator<uint64_t, SystemAllocPolicy>;
- explicit SCOutput(JSContext* cx);
+ SCOutput(JSContext* cx, JS::StructuredCloneScope scope);
JSContext* context() const { return cx; }
+ JS::StructuredCloneScope scope() const { return buf.scope(); }
bool write(uint64_t u);
bool writePair(uint32_t tag, uint32_t data);
@@ -251,22 +260,25 @@ struct SCOutput {
template <class T>
bool writeArray(const T* p, size_t nbytes);
- bool extractBuffer(JSStructuredCloneData* data);
- void discardTransferables(const JSStructuredCloneCallbacks* cb, void* cbClosure);
+ void setCallbacks(const JSStructuredCloneCallbacks* callbacks,
+ void* closure,
+ OwnTransferablePolicy policy)
+ {
+ buf.setCallbacks(callbacks, closure, policy);
+ }
+ void extractBuffer(JSStructuredCloneData* data) { *data = Move(buf); }
+ void discardTransferables();
uint64_t tell() const { return buf.Size(); }
uint64_t count() const { return buf.Size() / sizeof(uint64_t); }
- Iter iter() {
- return BufferIterator<uint64_t, TempAllocPolicy>(buf);
- }
+ Iter iter() { return Iter(buf); }
size_t offset(Iter dest) {
return dest - iter();
}
- private:
JSContext* cx;
- mozilla::BufferList<TempAllocPolicy> buf;
+ JSStructuredCloneData buf;
};
class SCInput {
@@ -356,13 +368,6 @@ struct JSStructuredCloneReader {
// be valid cross-process.)
JS::StructuredCloneScope allowedScope;
- // The scope the buffer was generated for (what sort of buffer it is.) The
- // scope is not just a permissions thing; it also affects the storage
- // format (eg a Transferred ArrayBuffer can be stored as a pointer for
- // SameProcessSameThread but must have its contents in the clone buffer for
- // DifferentProcess.)
- JS::StructuredCloneScope storedScope;
-
// Stack of objects with properties remaining to be read.
AutoValueVector objs;
@@ -386,13 +391,15 @@ struct JSStructuredCloneWriter {
const JSStructuredCloneCallbacks* cb,
void* cbClosure,
const Value& tVal)
- : out(cx), scope(scope), objs(out.context()),
+ : out(cx, scope), objs(out.context()),
counts(out.context()), entries(out.context()),
- memory(out.context()), callbacks(cb),
- closure(cbClosure), transferable(out.context(), tVal),
+ memory(out.context()),
+ transferable(out.context(), tVal),
transferableObjects(out.context(), GCHashSet<JSObject*>(cx)),
cloneDataPolicy(cloneDataPolicy)
- {}
+ {
+ out.setCallbacks(cb, cbClosure, OwnTransferablePolicy::NoTransferables);
+ }
~JSStructuredCloneWriter();
@@ -408,17 +415,10 @@ struct JSStructuredCloneWriter {
SCOutput& output() { return out; }
- bool extractBuffer(JSStructuredCloneData* data) {
- bool success = out.extractBuffer(data);
- if (success) {
- data->setOptionalCallbacks(callbacks, closure,
- OwnTransferablePolicy::OwnsTransferablesIfAny);
- }
- return success;
+ void extractBuffer(JSStructuredCloneData* newData) {
+ out.extractBuffer(newData);
}
- JS::StructuredCloneScope cloneScope() const { return scope; }
-
private:
JSStructuredCloneWriter() = delete;
JSStructuredCloneWriter(const JSStructuredCloneWriter&) = delete;
@@ -449,9 +449,6 @@ struct JSStructuredCloneWriter {
SCOutput out;
- // The (address space, thread) scope within which this clone is valid.
- JS::StructuredCloneScope scope;
-
// Vector of objects with properties remaining to be written.
//
// NB: These can span multiple compartments, so the compartment must be
@@ -477,12 +474,6 @@ struct JSStructuredCloneWriter {
SystemAllocPolicy>;
Rooted<CloneMemory> memory;
- // The user defined callbacks that will be used for cloning.
- const JSStructuredCloneCallbacks* callbacks;
-
- // Any value passed to JS_WriteStructuredClone.
- void* closure;
-
// Set of transferable objects
RootedValue transferable;
Rooted<GCHashSet<JSObject*>> transferableObjects;
@@ -542,7 +533,12 @@ WriteStructuredClone(JSContext* cx, HandleValue v, JSStructuredCloneData* bufp,
const Value& transferable)
{
JSStructuredCloneWriter w(cx, scope, cloneDataPolicy, cb, cbClosure, transferable);
- return w.init() && w.write(v) && w.extractBuffer(bufp);
+ if (!w.init())
+ return false;
+ if (!w.write(v))
+ return false;
+ w.extractBuffer(bufp);
+ return true;
}
bool
@@ -555,91 +551,15 @@ ReadStructuredClone(JSContext* cx, JSStructuredCloneData& data,
return r.read(vp);
}
-// If the given buffer contains Transferables, free them. Note that custom
-// Transferables will use the JSStructuredCloneCallbacks::freeTransfer() to
-// delete their transferables.
-template<typename AllocPolicy>
-static void
-DiscardTransferables(mozilla::BufferList<AllocPolicy>& buffer,
- const JSStructuredCloneCallbacks* cb, void* cbClosure)
-{
- auto point = BufferIterator<uint64_t, AllocPolicy>(buffer);
- if (point.done())
- return; // Empty buffer
-
- uint32_t tag, data;
- MOZ_RELEASE_ASSERT(point.canPeek());
- SCInput::getPair(point.peek(), &tag, &data);
- point.next();
-
- if (tag == SCTAG_HEADER) {
- if (point.done())
- return;
-
- MOZ_RELEASE_ASSERT(point.canPeek());
- SCInput::getPair(point.peek(), &tag, &data);
- point.next();
- }
-
- if (tag != SCTAG_TRANSFER_MAP_HEADER)
- return;
-
- if (TransferableMapHeader(data) == SCTAG_TM_TRANSFERRED)
- return;
-
- // freeTransfer should not GC
- JS::AutoSuppressGCAnalysis nogc;
-
- if (point.done())
- return;
-
- uint64_t numTransferables = NativeEndian::swapFromLittleEndian(point.peek());
- point.next();
- while (numTransferables--) {
- if (!point.canPeek())
- return;
-
- uint32_t ownership;
- SCInput::getPair(point.peek(), &tag, &ownership);
- point.next();
- MOZ_ASSERT(tag >= SCTAG_TRANSFER_MAP_PENDING_ENTRY);
- if (!point.canPeek())
- return;
-
- void* content;
- SCInput::getPtr(point.peek(), &content);
- point.next();
- if (!point.canPeek())
- return;
-
- uint64_t extraData = NativeEndian::swapFromLittleEndian(point.peek());
- point.next();
-
- if (ownership < JS::SCTAG_TMO_FIRST_OWNED)
- continue;
-
- if (ownership == JS::SCTAG_TMO_ALLOC_DATA) {
- js_free(content);
- } else if (ownership == JS::SCTAG_TMO_MAPPED_DATA) {
- JS_ReleaseMappedArrayBufferContents(content, extraData);
- } else if (cb && cb->freeTransfer) {
- cb->freeTransfer(tag, JS::TransferableOwnership(ownership), content, extraData, cbClosure);
- } else {
- MOZ_ASSERT(false, "unknown ownership");
- }
- }
-}
-
static bool
StructuredCloneHasTransferObjects(const JSStructuredCloneData& data)
{
- auto iter = data.Iter();
-
if (data.Size() < sizeof(uint64_t))
return false;
uint64_t u;
- data.ReadBytes(iter, reinterpret_cast<char*>(&u), sizeof(u));
+ BufferIterator<uint64_t, SystemAllocPolicy> iter(data);
+ MOZ_ALWAYS_TRUE(iter.readBytes(reinterpret_cast<char*>(&u), sizeof(u)));
uint32_t tag = uint32_t(u >> 32);
return (tag == SCTAG_TRANSFER_MAP_HEADER);
}
@@ -650,7 +570,7 @@ SCInput::SCInput(JSContext* cx, JSStructuredCloneData& data)
: cx(cx), point(data)
{
- static_assert(JSStructuredCloneData::kSegmentAlignment % 8 == 0,
+ static_assert(JSStructuredCloneData::BufferList::kSegmentAlignment % 8 == 0,
"structured clone buffer reads should be aligned");
MOZ_ASSERT(data.Size() % 8 == 0);
}
@@ -812,9 +732,8 @@ SCInput::readPtr(void** p)
return true;
}
-SCOutput::SCOutput(JSContext* cx)
- : cx(cx)
- , buf(0, 0, 4096, cx)
+SCOutput::SCOutput(JSContext* cx, JS::StructuredCloneScope scope)
+ : cx(cx), buf(scope)
{
}
@@ -822,7 +741,11 @@ bool
SCOutput::write(uint64_t u)
{
uint64_t v = NativeEndian::swapToLittleEndian(u);
- return buf.WriteBytes(reinterpret_cast<char*>(&v), sizeof(u));
+ if (!buf.AppendBytes(reinterpret_cast<char*>(&v), sizeof(u))) {
+ ReportOutOfMemory(context());
+ return false;
+ }
+ return true;
}
bool
@@ -883,7 +806,7 @@ SCOutput::writeArray(const T* p, size_t nelems)
for (size_t i = 0; i < nelems; i++) {
T value = swapToLittleEndian(p[i]);
- if (!buf.WriteBytes(reinterpret_cast<char*>(&value), sizeof(value)))
+ if (!buf.AppendBytes(reinterpret_cast<char*>(&value), sizeof(value)))
return false;
}
@@ -892,7 +815,7 @@ SCOutput::writeArray(const T* p, size_t nelems)
size_t padbytes = sizeof(uint64_t) * nwords - sizeof(T) * nelems;
char zero = 0;
for (size_t i = 0; i < padbytes; i++) {
- if (!buf.WriteBytes(&zero, sizeof(zero)))
+ if (!buf.AppendBytes(&zero, sizeof(zero)))
return false;
}
@@ -927,34 +850,101 @@ SCOutput::writePtr(const void* p)
return write(reinterpret_cast<uint64_t>(p));
}
-bool
-SCOutput::extractBuffer(JSStructuredCloneData* data)
-{
- bool success;
- mozilla::BufferList<SystemAllocPolicy> out =
- buf.MoveFallible<SystemAllocPolicy>(&success);
- if (!success) {
- ReportOutOfMemory(cx);
- return false;
- }
- *data = JSStructuredCloneData(Move(out));
- return true;
-}
-
void
-SCOutput::discardTransferables(const JSStructuredCloneCallbacks* cb, void* cbClosure)
+SCOutput::discardTransferables()
{
- DiscardTransferables(buf, cb, cbClosure);
+ buf.discardTransferables();
}
} /* namespace js */
-JSStructuredCloneData::~JSStructuredCloneData()
+
+// If the buffer contains Transferables, free them. Note that custom
+// Transferables will use the JSStructuredCloneCallbacks::freeTransfer() to
+// delete their transferables.
+void
+JSStructuredCloneData::discardTransferables()
{
if (!Size())
return;
- if (ownTransferables_ == OwnTransferablePolicy::OwnsTransferablesIfAny)
- DiscardTransferables(*this, callbacks_, closure_);
+
+ if (ownTransferables_ != OwnTransferablePolicy::OwnsTransferablesIfAny)
+ return;
+
+ // DifferentProcess clones cannot contain pointers, so nothing needs to be
+ // released.
+ if (scope_ == JS::StructuredCloneScope::DifferentProcess)
+ return;
+
+ FreeTransferStructuredCloneOp freeTransfer = nullptr;
+ if (callbacks_)
+ freeTransfer = callbacks_->freeTransfer;
+
+ auto point = BufferIterator<uint64_t, SystemAllocPolicy>(*this);
+ if (point.done())
+ return; // Empty buffer
+
+ uint32_t tag, data;
+ MOZ_RELEASE_ASSERT(point.canPeek());
+ SCInput::getPair(point.peek(), &tag, &data);
+ point.next();
+
+ if (tag == SCTAG_HEADER) {
+ if (point.done())
+ return;
+
+ MOZ_RELEASE_ASSERT(point.canPeek());
+ SCInput::getPair(point.peek(), &tag, &data);
+ point.next();
+ }
+
+ if (tag != SCTAG_TRANSFER_MAP_HEADER)
+ return;
+
+ if (TransferableMapHeader(data) == SCTAG_TM_TRANSFERRED)
+ return;
+
+ // freeTransfer should not GC
+ JS::AutoSuppressGCAnalysis nogc;
+
+ if (point.done())
+ return;
+
+ uint64_t numTransferables = NativeEndian::swapFromLittleEndian(point.peek());
+ point.next();
+ while (numTransferables--) {
+ if (!point.canPeek())
+ return;
+
+ uint32_t ownership;
+ SCInput::getPair(point.peek(), &tag, &ownership);
+ point.next();
+ MOZ_ASSERT(tag >= SCTAG_TRANSFER_MAP_PENDING_ENTRY);
+ if (!point.canPeek())
+ return;
+
+ void* content;
+ SCInput::getPtr(point.peek(), &content);
+ point.next();
+ if (!point.canPeek())
+ return;
+
+ uint64_t extraData = NativeEndian::swapFromLittleEndian(point.peek());
+ point.next();
+
+ if (ownership < JS::SCTAG_TMO_FIRST_OWNED)
+ continue;
+
+ if (ownership == JS::SCTAG_TMO_ALLOC_DATA) {
+ js_free(content);
+ } else if (ownership == JS::SCTAG_TMO_MAPPED_DATA) {
+ JS_ReleaseMappedArrayBufferContents(content, extraData);
+ } else if (freeTransfer) {
+ freeTransfer(tag, JS::TransferableOwnership(ownership), content, extraData, closure_);
+ } else {
+ MOZ_ASSERT(false, "unknown ownership");
+ }
+ }
}
JS_STATIC_ASSERT(JSString::MAX_LENGTH < UINT32_MAX);
@@ -962,9 +952,8 @@ JS_STATIC_ASSERT(JSString::MAX_LENGTH < UINT32_MAX);
JSStructuredCloneWriter::~JSStructuredCloneWriter()
{
// Free any transferable data left lying around in the buffer
- if (out.count()) {
- out.discardTransferables(callbacks, closure);
- }
+ if (out.count())
+ out.discardTransferables();
}
bool
@@ -1038,7 +1027,7 @@ JSStructuredCloneWriter::parseTransferable()
bool
JSStructuredCloneWriter::reportDataCloneError(uint32_t errorId)
{
- ReportDataCloneError(context(), callbacks, errorId);
+ ReportDataCloneError(context(), out.buf.callbacks_, errorId);
return false;
}
@@ -1454,8 +1443,8 @@ JSStructuredCloneWriter::startWrite(HandleValue v)
return traverseSavedFrame(obj);
}
- if (callbacks && callbacks->write)
- return callbacks->write(context(), this, obj, closure);
+ if (out.buf.callbacks_ && out.buf.callbacks_->write)
+ return out.buf.callbacks_->write(context(), this, obj, out.buf.closure_);
/* else fall through */
}
@@ -1465,7 +1454,7 @@ JSStructuredCloneWriter::startWrite(HandleValue v)
bool
JSStructuredCloneWriter::writeHeader()
{
- return out.writePair(SCTAG_HEADER, (uint32_t)scope);
+ return out.writePair(SCTAG_HEADER, (uint32_t)output().scope());
}
bool
@@ -1523,6 +1512,7 @@ JSStructuredCloneWriter::transferOwnership()
JSContext* cx = context();
RootedObject obj(cx);
+ JS::StructuredCloneScope scope = output().scope();
for (auto tr = transferableObjects.all(); !tr.empty(); tr.popFront()) {
obj = tr.front();
@@ -1555,7 +1545,9 @@ JSStructuredCloneWriter::transferOwnership()
return false;
}
- if (scope == JS::StructuredCloneScope::DifferentProcess) {
+ if (scope == JS::StructuredCloneScope::DifferentProcess ||
+ scope == JS::StructuredCloneScope::DifferentProcessForIndexedDB)
+ {
// Write Transferred ArrayBuffers in DifferentProcess scope at
// the end of the clone buffer, and store the offset within the
// buffer to where the ArrayBuffer was written. Note that this
@@ -1592,9 +1584,9 @@ JSStructuredCloneWriter::transferOwnership()
extraData = nbytes;
}
} else {
- if (!callbacks || !callbacks->writeTransfer)
+ if (!out.buf.callbacks_ || !out.buf.callbacks_->writeTransfer)
return reportDataCloneError(JS_SCERR_TRANSFERABLE);
- if (!callbacks->writeTransfer(cx, obj, closure, &tag, &ownership, &content, &extraData))
+ if (!out.buf.callbacks_->writeTransfer(cx, obj, out.buf.closure_, &tag, &ownership, &content, &extraData))
return false;
MOZ_ASSERT(tag > SCTAG_TRANSFER_MAP_PENDING_ENTRY);
}
@@ -2187,25 +2179,33 @@ JSStructuredCloneReader::readHeader()
if (!in.getPair(&tag, &data))
return in.reportTruncated();
- if (tag != SCTAG_HEADER) {
+ JS::StructuredCloneScope storedScope;
+ if (tag == SCTAG_HEADER) {
+ MOZ_ALWAYS_TRUE(in.readPair(&tag, &data));
+ storedScope = JS::StructuredCloneScope(data);
+ } else {
// Old structured clone buffer. We must have read it from disk.
- storedScope = JS::StructuredCloneScope::DifferentProcess;
- return true;
+ storedScope = JS::StructuredCloneScope::DifferentProcessForIndexedDB;
}
- MOZ_ALWAYS_TRUE(in.readPair(&tag, &data));
- storedScope = JS::StructuredCloneScope(data);
-
- if (data != uint32_t(JS::StructuredCloneScope::SameProcessSameThread) &&
- data != uint32_t(JS::StructuredCloneScope::SameProcessDifferentThread) &&
- data != uint32_t(JS::StructuredCloneScope::DifferentProcess))
+ if (storedScope < JS::StructuredCloneScope::SameProcessSameThread ||
+ storedScope > JS::StructuredCloneScope::DifferentProcessForIndexedDB)
{
JS_ReportErrorNumberASCII(context(), GetErrorMessage, nullptr, JSMSG_SC_BAD_SERIALIZED_DATA,
"invalid structured clone scope");
return false;
}
+
+ if (allowedScope == JS::StructuredCloneScope::DifferentProcessForIndexedDB) {
+ // Bug 1434308 and bug 1458320 - the scopes stored in old IndexedDB
+ // clones are incorrect. Treat them as if they were DifferentProcess.
+ allowedScope = JS::StructuredCloneScope::DifferentProcess;
+ return true;
+ }
+
if (storedScope < allowedScope) {
- JS_ReportErrorNumberASCII(context(), GetErrorMessage, nullptr, JSMSG_SC_BAD_SERIALIZED_DATA,
+ JS_ReportErrorNumberASCII(context(), GetErrorMessage, nullptr,
+ JSMSG_SC_BAD_SERIALIZED_DATA,
"incompatible structured clone scope");
return false;
}
@@ -2249,10 +2249,14 @@ JSStructuredCloneReader::readTransferMap()
return false;
if (tag == SCTAG_TRANSFER_MAP_ARRAY_BUFFER) {
- if (storedScope == JS::StructuredCloneScope::DifferentProcess) {
+ if (allowedScope == JS::StructuredCloneScope::DifferentProcess ||
+ allowedScope == JS::StructuredCloneScope::DifferentProcessForIndexedDB)
+ {
// Transferred ArrayBuffers in a DifferentProcess clone buffer
- // are treated as if they weren't Transferred at all.
- continue;
+ // are treated as if they weren't Transferred at all. We should
+ // only see SCTAG_TRANSFER_MAP_STORED_ARRAY_BUFFER.
+ ReportDataCloneError(cx, callbacks, JS_SCERR_TRANSFERABLE);
+ return false;
}
size_t nbytes = extraData;
@@ -2586,7 +2590,7 @@ JS_StructuredClone(JSContext* cx, HandleValue value, MutableHandleValue vp,
}
JSAutoStructuredCloneBuffer::JSAutoStructuredCloneBuffer(JSAutoStructuredCloneBuffer&& other)
- : scope_(other.scope_)
+ : scope_(other.scope()), data_(other.scope())
{
data_.ownTransferables_ = other.data_.ownTransferables_;
other.steal(&data_, &version_, &data_.callbacks_, &data_.closure_);
@@ -2604,45 +2608,14 @@ JSAutoStructuredCloneBuffer::operator=(JSAutoStructuredCloneBuffer&& other)
}
void
-JSAutoStructuredCloneBuffer::clear(const JSStructuredCloneCallbacks* optionalCallbacks,
- void* optionalClosure)
+JSAutoStructuredCloneBuffer::clear()
{
- if (!data_.Size())
- return;
-
- const JSStructuredCloneCallbacks* callbacks =
- optionalCallbacks ? optionalCallbacks : data_.callbacks_;
- void* closure = optionalClosure ? optionalClosure : data_.closure_;
-
- if (data_.ownTransferables_ == OwnTransferablePolicy::OwnsTransferablesIfAny)
- DiscardTransferables(data_, callbacks, closure);
+ data_.discardTransferables();
data_.ownTransferables_ = OwnTransferablePolicy::NoTransferables;
data_.Clear();
version_ = 0;
}
-bool
-JSAutoStructuredCloneBuffer::copy(const JSStructuredCloneData& srcData, uint32_t version,
- const JSStructuredCloneCallbacks* callbacks,
- void* closure)
-{
- // transferable objects cannot be copied
- if (StructuredCloneHasTransferObjects(srcData))
- return false;
-
- clear();
-
- auto iter = srcData.Iter();
- while (!iter.Done()) {
- data_.WriteBytes(iter.Data(), iter.RemainingInSegment());
- iter.Advance(srcData, iter.RemainingInSegment());
- }
-
- version_ = version;
- data_.setOptionalCallbacks(callbacks, closure, OwnTransferablePolicy::NoTransferables);
- return true;
-}
-
void
JSAutoStructuredCloneBuffer::adopt(JSStructuredCloneData&& data, uint32_t version,
const JSStructuredCloneCallbacks* callbacks,
@@ -2651,7 +2624,7 @@ JSAutoStructuredCloneBuffer::adopt(JSStructuredCloneData&& data, uint32_t versio
clear();
data_ = Move(data);
version_ = version;
- data_.setOptionalCallbacks(callbacks, closure, OwnTransferablePolicy::OwnsTransferablesIfAny);
+ data_.setCallbacks(callbacks, closure, OwnTransferablePolicy::OwnsTransferablesIfAny);
}
void
@@ -2668,7 +2641,7 @@ JSAutoStructuredCloneBuffer::steal(JSStructuredCloneData* data, uint32_t* versio
*data = Move(data_);
version_ = 0;
- data_.setOptionalCallbacks(nullptr, nullptr, OwnTransferablePolicy::NoTransferables);
+ data_.setCallbacks(nullptr, nullptr, OwnTransferablePolicy::NoTransferables);
}
bool
@@ -2782,5 +2755,5 @@ JS_ObjectNotWritten(JSStructuredCloneWriter* w, HandleObject obj)
JS_PUBLIC_API(JS::StructuredCloneScope)
JS_GetStructuredCloneScope(JSStructuredCloneWriter* w)
{
- return w->cloneScope();
+ return w->output().scope();
}