Skip to content

feat: Update V8 to 9.2.230.18 #121

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jul 23, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions NativeScript/NativeScript-Prefix.pch
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
#define SIZEOF_OFF_T 8
#endif

#define V8_ENABLE_WEBASSEMBLY 0
#define V8_CONCURRENT_MARKING 1
#define V8_ARRAY_BUFFER_EXTENSION 1
#define V8_ENABLE_LAZY_SOURCE_POSITIONS 1
Expand Down
15 changes: 9 additions & 6 deletions NativeScript/include/cppgc/common.h
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,18 @@

namespace cppgc {

// Indicator for the stack state of the embedder.
/**
* Indicator for the stack state of the embedder.
*/
enum class EmbedderStackState {
/**
* Stack may contain interesting heap pointers.
*/
kMayContainHeapPointers,
/**
* Stack does not contain any interesting heap pointers.
*/
kNoHeapPointers,
kUnknown V8_ENUM_DEPRECATE_SOON("Use kMayContainHeapPointers") =
kMayContainHeapPointers,
kNonEmpty V8_ENUM_DEPRECATE_SOON("Use kMayContainHeapPointers") =
kMayContainHeapPointers,
kEmpty V8_ENUM_DEPRECATE_SOON("Use kNoHeapPointers") = kNoHeapPointers,
};

} // namespace cppgc
Expand Down
2 changes: 2 additions & 0 deletions NativeScript/include/libplatform/v8-tracing.h
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,8 @@ class V8_PLATFORM_EXPORT TraceWriter {
static TraceWriter* CreateJSONTraceWriter(std::ostream& stream,
const std::string& tag);

static TraceWriter* CreateSystemInstrumentationTraceWriter();

private:
// Disallow copy and assign
TraceWriter(const TraceWriter&) = delete;
Expand Down
15 changes: 14 additions & 1 deletion NativeScript/include/v8-inspector.h
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,9 @@ class V8_EXPORT V8StackTrace {
virtual StringView topSourceURL() const = 0;
virtual int topLineNumber() const = 0;
virtual int topColumnNumber() const = 0;
virtual StringView topScriptId() const = 0;
virtual int topScriptId() const = 0;
V8_DEPRECATE_SOON("Use V8::StackTrace::topScriptId() instead.")
int topScriptIdAsInteger() const { return topScriptId(); }
virtual StringView topFunctionName() const = 0;

virtual ~V8StackTrace() = default;
Expand All @@ -129,6 +131,10 @@ class V8_EXPORT V8InspectorSession {
virtual v8::Local<v8::Value> get(v8::Local<v8::Context>) = 0;
virtual ~Inspectable() = default;
};
class V8_EXPORT CommandLineAPIScope {
public:
virtual ~CommandLineAPIScope() = default;
};
virtual void addInspectedObject(std::unique_ptr<Inspectable>) = 0;

// Dispatching protocol messages.
Expand All @@ -138,6 +144,9 @@ class V8_EXPORT V8InspectorSession {
virtual std::vector<std::unique_ptr<protocol::Schema::API::Domain>>
supportedDomains() = 0;

virtual std::unique_ptr<V8InspectorSession::CommandLineAPIScope>
initializeCommandLineAPIScope(int executionContextId) = 0;

// Debugger actions.
virtual void schedulePauseOnNextStatement(StringView breakReason,
StringView breakDetails) = 0;
Expand Down Expand Up @@ -228,6 +237,10 @@ class V8_EXPORT V8InspectorClient {
const StringView& resourceName) {
return nullptr;
}

// The caller would defer to generating a random 64 bit integer if
// this method returns 0.
virtual int64_t generateUniqueId() { return 0; }
};

// These stack trace ids are intended to be passed between debuggers and be
Expand Down
95 changes: 72 additions & 23 deletions NativeScript/include/v8-internal.h
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,21 @@ const int kApiSystemPointerSize = sizeof(void*);
const int kApiDoubleSize = sizeof(double);
const int kApiInt32Size = sizeof(int32_t);
const int kApiInt64Size = sizeof(int64_t);
const int kApiSizetSize = sizeof(size_t);

// Tag information for HeapObject.
const int kHeapObjectTag = 1;
const int kWeakHeapObjectTag = 3;
const int kHeapObjectTagSize = 2;
const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;

// Tag information for fowarding pointers stored in object headers.
// 0b00 at the lowest 2 bits in the header indicates that the map word is a
// forwarding pointer.
const int kForwardingTag = 0;
const int kForwardingTagSize = 2;
const intptr_t kForwardingTagMask = (1 << kForwardingTagSize) - 1;

// Tag information for Smi.
const int kSmiTag = 0;
const int kSmiTagSize = 1;
Expand Down Expand Up @@ -120,23 +128,28 @@ constexpr bool HeapSandboxIsEnabled() {

using ExternalPointer_t = Address;

// If the heap sandbox is enabled, these tag values will be XORed with the
// If the heap sandbox is enabled, these tag values will be ORed with the
// external pointers in the external pointer table to prevent use of pointers of
// the wrong type.
enum ExternalPointerTag : Address {
kExternalPointerNullTag = static_cast<Address>(0ULL),
kArrayBufferBackingStoreTag = static_cast<Address>(1ULL << 48),
kTypedArrayExternalPointerTag = static_cast<Address>(2ULL << 48),
kDataViewDataPointerTag = static_cast<Address>(3ULL << 48),
kExternalStringResourceTag = static_cast<Address>(4ULL << 48),
kExternalStringResourceDataTag = static_cast<Address>(5ULL << 48),
kForeignForeignAddressTag = static_cast<Address>(6ULL << 48),
kNativeContextMicrotaskQueueTag = static_cast<Address>(7ULL << 48),
// TODO(v8:10391, saelo): Currently has to be zero so that raw zero values are
// also nullptr
kEmbedderDataSlotPayloadTag = static_cast<Address>(0ULL << 48),
// the wrong type. When a pointer is loaded, it is ANDed with the inverse of the
// expected type's tag. The tags are constructed in a way that guarantees that a
// failed type check will result in one or more of the top bits of the pointer
// to be set, rendering the pointer inacessible. This construction allows
// performing the type check and removing GC marking bits from the pointer at
// the same time.
enum ExternalPointerTag : uint64_t {
kExternalPointerNullTag = 0x0000000000000000,
kArrayBufferBackingStoreTag = 0x00ff000000000000, // 0b000000011111111
kTypedArrayExternalPointerTag = 0x017f000000000000, // 0b000000101111111
kDataViewDataPointerTag = 0x01bf000000000000, // 0b000000110111111
kExternalStringResourceTag = 0x01df000000000000, // 0b000000111011111
kExternalStringResourceDataTag = 0x01ef000000000000, // 0b000000111101111
kForeignForeignAddressTag = 0x01f7000000000000, // 0b000000111110111
kNativeContextMicrotaskQueueTag = 0x01fb000000000000, // 0b000000111111011
kEmbedderDataSlotPayloadTag = 0x01fd000000000000, // 0b000000111111101
};

constexpr uint64_t kExternalPointerTagMask = 0xffff000000000000;

#ifdef V8_31BIT_SMIS_ON_64BIT_ARCH
using PlatformSmiTagging = SmiTagging<kApiInt32Size>;
#else
Expand Down Expand Up @@ -177,6 +190,14 @@ V8_EXPORT bool ShouldThrowOnError(v8::internal::Isolate* isolate);
* depend on functions and constants defined here.
*/
class Internals {
#ifdef V8_MAP_PACKING
V8_INLINE static constexpr internal::Address UnpackMapWord(
internal::Address mapword) {
// TODO(wenyuzhao): Clear header metadata.
return mapword ^ kMapWordXorMask;
}
#endif

public:
// These values match non-compiler-dependent values defined within
// the implementation of v8.
Expand Down Expand Up @@ -207,8 +228,14 @@ class Internals {
kNumIsolateDataSlots * kApiSystemPointerSize;
static const int kIsolateFastCCallCallerPcOffset =
kIsolateFastCCallCallerFpOffset + kApiSystemPointerSize;
static const int kIsolateStackGuardOffset =
static const int kIsolateFastApiCallTargetOffset =
kIsolateFastCCallCallerPcOffset + kApiSystemPointerSize;
static const int kIsolateCageBaseOffset =
kIsolateFastApiCallTargetOffset + kApiSystemPointerSize;
static const int kIsolateLongTaskStatsCounterOffset =
kIsolateCageBaseOffset + kApiSystemPointerSize;
static const int kIsolateStackGuardOffset =
kIsolateLongTaskStatsCounterOffset + kApiSizetSize;
static const int kIsolateRootsOffset =
kIsolateStackGuardOffset + 7 * kApiSystemPointerSize;

Expand Down Expand Up @@ -251,6 +278,17 @@ class Internals {
// incremental GC once the external memory reaches this limit.
static constexpr int kExternalAllocationSoftLimit = 64 * 1024 * 1024;

#ifdef V8_MAP_PACKING
static const uintptr_t kMapWordMetadataMask = 0xffffULL << 48;
// The lowest two bits of mapwords are always `0b10`
static const uintptr_t kMapWordSignature = 0b10;
// XORing a (non-compressed) map with this mask ensures that the two
// low-order bits are 0b10. The 0 at the end makes this look like a Smi,
// although real Smis have all lower 32 bits unset. We only rely on these
// values passing as Smis in very few places.
static const int kMapWordXorMask = 0b11;
#endif

V8_EXPORT static void CheckInitializedImpl(v8::Isolate* isolate);
V8_INLINE static void CheckInitialized(v8::Isolate* isolate) {
#ifdef V8_ENABLE_CHECKS
Expand All @@ -277,6 +315,9 @@ class Internals {
V8_INLINE static int GetInstanceType(const internal::Address obj) {
typedef internal::Address A;
A map = ReadTaggedPointerField(obj, kHeapObjectMapOffset);
#ifdef V8_MAP_PACKING
map = UnpackMapWord(map);
#endif
return ReadRawField<uint16_t>(map, kMapInstanceTypeOffset);
}

Expand Down Expand Up @@ -327,6 +368,12 @@ class Internals {
return *reinterpret_cast<void* const*>(addr);
}

V8_INLINE static void IncrementLongTasksStatsCounter(v8::Isolate* isolate) {
internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
kIsolateLongTaskStatsCounterOffset;
++(*reinterpret_cast<size_t*>(addr));
}

V8_INLINE static internal::Address* GetRoot(v8::Isolate* isolate, int index) {
internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
kIsolateRootsOffset +
Expand Down Expand Up @@ -356,8 +403,9 @@ class Internals {
internal::Address heap_object_ptr, int offset) {
#ifdef V8_COMPRESS_POINTERS
uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
internal::Address root = GetRootFromOnHeapAddress(heap_object_ptr);
return root + static_cast<internal::Address>(static_cast<uintptr_t>(value));
internal::Address base =
GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
return base + static_cast<internal::Address>(static_cast<uintptr_t>(value));
#else
return ReadRawField<internal::Address>(heap_object_ptr, offset);
#endif
Expand Down Expand Up @@ -409,18 +457,19 @@ class Internals {

#ifdef V8_COMPRESS_POINTERS
// See v8:7703 or src/ptr-compr.* for details about pointer compression.
static constexpr size_t kPtrComprHeapReservationSize = size_t{1} << 32;
static constexpr size_t kPtrComprIsolateRootAlignment = size_t{1} << 32;
static constexpr size_t kPtrComprCageReservationSize = size_t{1} << 32;
static constexpr size_t kPtrComprCageBaseAlignment = size_t{1} << 32;

V8_INLINE static internal::Address GetRootFromOnHeapAddress(
V8_INLINE static internal::Address GetPtrComprCageBaseFromOnHeapAddress(
internal::Address addr) {
return addr & -static_cast<intptr_t>(kPtrComprIsolateRootAlignment);
return addr & -static_cast<intptr_t>(kPtrComprCageBaseAlignment);
}

V8_INLINE static internal::Address DecompressTaggedAnyField(
internal::Address heap_object_ptr, uint32_t value) {
internal::Address root = GetRootFromOnHeapAddress(heap_object_ptr);
return root + static_cast<internal::Address>(static_cast<uintptr_t>(value));
internal::Address base =
GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
return base + static_cast<internal::Address>(static_cast<uintptr_t>(value));
}

#endif // V8_COMPRESS_POINTERS
Expand Down
23 changes: 5 additions & 18 deletions NativeScript/include/v8-platform.h
Original file line number Diff line number Diff line change
Expand Up @@ -181,9 +181,8 @@ class JobDelegate {
/**
* Returns true if the current task is called from the thread currently
* running JobHandle::Join().
* TODO(etiennep): Make pure virtual once custom embedders implement it.
*/
virtual bool IsJoiningThread() const { return false; }
virtual bool IsJoiningThread() const = 0;
};

/**
Expand Down Expand Up @@ -220,27 +219,20 @@ class JobHandle {
* Forces all existing workers to yield ASAP but doesn’t wait for them.
* Warning, this is dangerous if the Job's callback is bound to or has access
* to state which may be deleted after this call.
* TODO(etiennep): Cleanup once implemented by all embedders.
*/
virtual void CancelAndDetach() { Cancel(); }
virtual void CancelAndDetach() = 0;

/**
* Returns true if there's currently no work pending and no worker running.
* TODO(etiennep): Deprecate IsCompleted in favor of IsActive once implemented
* by all embedders.
* Returns true if there's any work pending or any worker running.
*/
virtual bool IsCompleted() = 0;
virtual bool IsActive() { return !IsCompleted(); }
virtual bool IsActive() = 0;

/**
* Returns true if associated with a Job and other methods may be called.
* Returns false after Join() or Cancel() was called. This may return true
* even if no workers are running and IsCompleted() returns true
* TODO(etiennep): Deprecate IsRunning in favor of IsValid once implemented by
* all embedders.
*/
virtual bool IsRunning() = 0;
virtual bool IsValid() { return IsRunning(); }
virtual bool IsValid() = 0;

/**
* Returns true if job priority can be changed.
Expand Down Expand Up @@ -270,10 +262,6 @@ class JobTask {
* it must not call back any JobHandle methods.
*/
virtual size_t GetMaxConcurrency(size_t worker_count) const = 0;

// TODO(1114823): Clean up once all overrides are removed.
V8_DEPRECATED("Use the version that takes |worker_count|.")
virtual size_t GetMaxConcurrency() const { return 0; }
};

/**
Expand Down Expand Up @@ -406,7 +394,6 @@ class PageAllocator {
kNoAccess,
kRead,
kReadWrite,
// TODO(hpayer): Remove this flag. Memory should never be rwx.
kReadWriteExecute,
kReadExecute,
// Set this when reserving memory that will later require kReadWriteExecute
Expand Down
Loading