[go: up one dir, main page]

v8/
isolate.rs

1// Copyright 2019-2021 the Deno authors. All rights reserved. MIT license.
2use crate::Array;
3use crate::CallbackScope;
4use crate::Context;
5use crate::Data;
6use crate::FixedArray;
7use crate::Function;
8use crate::FunctionCodeHandling;
9use crate::HandleScope;
10use crate::Local;
11use crate::Message;
12use crate::Module;
13use crate::Object;
14use crate::Platform;
15use crate::Promise;
16use crate::PromiseResolver;
17use crate::StartupData;
18use crate::String;
19use crate::V8::get_current_platform;
20use crate::Value;
21use crate::binding::v8__HeapSpaceStatistics;
22use crate::binding::v8__HeapStatistics;
23use crate::binding::v8__Isolate__UseCounterFeature;
24pub use crate::binding::v8__ModuleImportPhase as ModuleImportPhase;
25use crate::cppgc::Heap;
26use crate::external_references::ExternalReference;
27use crate::function::FunctionCallbackInfo;
28use crate::gc::GCCallbackFlags;
29use crate::gc::GCType;
30use crate::handle::FinalizerCallback;
31use crate::handle::FinalizerMap;
32use crate::isolate_create_params::CreateParams;
33use crate::isolate_create_params::raw;
34use crate::promise::PromiseRejectMessage;
35use crate::scope::data::ScopeData;
36use crate::snapshot::SnapshotCreator;
37use crate::support::MapFnFrom;
38use crate::support::MapFnTo;
39use crate::support::Opaque;
40use crate::support::ToCFn;
41use crate::support::UnitType;
42use crate::support::char;
43use crate::support::int;
44use crate::support::size_t;
45use crate::wasm::WasmStreaming;
46use crate::wasm::trampoline;
47use std::ffi::CStr;
48
49use std::any::Any;
50use std::any::TypeId;
51use std::borrow::Cow;
52use std::collections::HashMap;
53use std::ffi::c_void;
54use std::fmt::{self, Debug, Formatter};
55use std::hash::BuildHasher;
56use std::hash::Hasher;
57use std::mem::MaybeUninit;
58use std::mem::align_of;
59use std::mem::forget;
60use std::mem::needs_drop;
61use std::mem::size_of;
62use std::ops::Deref;
63use std::ops::DerefMut;
64use std::ptr;
65use std::ptr::NonNull;
66use std::ptr::addr_of_mut;
67use std::ptr::drop_in_place;
68use std::ptr::null_mut;
69use std::sync::Arc;
70use std::sync::Mutex;
71
72/// Policy for running microtasks:
73///   - explicit: microtasks are invoked with the
74///     Isolate::PerformMicrotaskCheckpoint() method;
75///   - auto: microtasks are invoked when the script call depth decrements
76///     to zero.
77#[derive(Debug, Clone, Copy, PartialEq, Eq)]
78#[repr(C)]
79pub enum MicrotasksPolicy {
80  Explicit = 0,
81  // Scoped = 1 (RAII) is omitted for now, doesn't quite map to idiomatic Rust.
82  Auto = 2,
83}
84
85/// Memory pressure level for the MemoryPressureNotification.
86/// None hints V8 that there is no memory pressure.
87/// Moderate hints V8 to speed up incremental garbage collection at the cost
88/// of higher latency due to garbage collection pauses.
89/// Critical hints V8 to free memory as soon as possible. Garbage collection
90/// pauses at this level will be large.
91#[derive(Debug, Clone, Copy, PartialEq, Eq)]
92#[repr(C)]
93pub enum MemoryPressureLevel {
94  None = 0,
95  Moderate = 1,
96  Critical = 2,
97}
98
99/// Time zone redetection indicator for
100/// DateTimeConfigurationChangeNotification.
101///
102/// kSkip indicates V8 that the notification should not trigger redetecting
103/// host time zone. kRedetect indicates V8 that host time zone should be
104/// redetected, and used to set the default time zone.
105///
106/// The host time zone detection may require file system access or similar
107/// operations unlikely to be available inside a sandbox. If v8 is run inside a
108/// sandbox, the host time zone has to be detected outside the sandbox before
109/// calling DateTimeConfigurationChangeNotification function.
110#[derive(Debug, Clone, Copy, PartialEq, Eq)]
111#[repr(C)]
112pub enum TimeZoneDetection {
113  Skip = 0,
114  Redetect = 1,
115}
116
117/// PromiseHook with type Init is called when a new promise is
118/// created. When a new promise is created as part of the chain in the
119/// case of Promise.then or in the intermediate promises created by
120/// Promise.{race, all}/AsyncFunctionAwait, we pass the parent promise
121/// otherwise we pass undefined.
122///
123/// PromiseHook with type Resolve is called at the beginning of
124/// resolve or reject function defined by CreateResolvingFunctions.
125///
126/// PromiseHook with type Before is called at the beginning of the
127/// PromiseReactionJob.
128///
129/// PromiseHook with type After is called right at the end of the
130/// PromiseReactionJob.
131#[derive(Debug, Clone, Copy, PartialEq, Eq)]
132#[repr(C)]
133pub enum PromiseHookType {
134  Init,
135  Resolve,
136  Before,
137  After,
138}
139
140/// Types of garbage collections that can be requested via
141/// [`Isolate::request_garbage_collection_for_testing`].
142#[derive(Debug, Clone, Copy, PartialEq, Eq)]
143#[repr(C)]
144pub enum GarbageCollectionType {
145  Full,
146  Minor,
147}
148
149pub type MessageCallback = unsafe extern "C" fn(Local<Message>, Local<Value>);
150
151bitflags! {
152  #[derive(Debug, Clone, Copy, PartialEq, Eq)]
153  #[repr(transparent)]
154  pub struct MessageErrorLevel: int {
155    const LOG = 1 << 0;
156    const DEBUG = 1 << 1;
157    const INFO = 1 << 2;
158    const ERROR = 1 << 3;
159    const WARNING = 1 << 4;
160    const ALL = (1 << 5) - 1;
161  }
162}
163
164pub type PromiseHook =
165  unsafe extern "C" fn(PromiseHookType, Local<Promise>, Local<Value>);
166
167pub type PromiseRejectCallback = unsafe extern "C" fn(PromiseRejectMessage);
168
169#[derive(Debug, Clone, Copy, PartialEq, Eq)]
170#[repr(C)]
171pub enum WasmAsyncSuccess {
172  Success,
173  Fail,
174}
175pub type WasmAsyncResolvePromiseCallback = unsafe extern "C" fn(
176  *mut Isolate,
177  Local<Context>,
178  Local<PromiseResolver>,
179  Local<Value>,
180  WasmAsyncSuccess,
181);
182
183pub type AllowWasmCodeGenerationCallback =
184  unsafe extern "C" fn(Local<Context>, Local<String>) -> bool;
185
186/// HostInitializeImportMetaObjectCallback is called the first time import.meta
187/// is accessed for a module. Subsequent access will reuse the same value.
188///
189/// The method combines two implementation-defined abstract operations into one:
190/// HostGetImportMetaProperties and HostFinalizeImportMeta.
191///
192/// The embedder should use v8::Object::CreateDataProperty to add properties on
193/// the meta object.
194pub type HostInitializeImportMetaObjectCallback =
195  unsafe extern "C" fn(Local<Context>, Local<Module>, Local<Object>);
196
197/// HostImportModuleDynamicallyCallback is called when we require the embedder
198/// to load a module. This is used as part of the dynamic import syntax.
199///
200/// The host_defined_options are metadata provided by the host environment, which may be used
201/// to customize or further specify how the module should be imported.
202///
203/// The resource_name is the identifier or path for the module or script making the import request.
204///
205/// The specifier is the name of the module that should be imported.
206///
207/// The import_attributes are import assertions for this request in the form:
208/// [key1, value1, key2, value2, ...] where the keys and values are of type
209/// v8::String. Note, unlike the FixedArray passed to ResolveModuleCallback and
210/// returned from ModuleRequest::GetImportAssertions(), this array does not
211/// contain the source Locations of the assertions.
212///
213/// The embedder must compile, instantiate, evaluate the Module, and obtain its
214/// namespace object.
215///
216/// The Promise returned from this function is forwarded to userland JavaScript.
217/// The embedder must resolve this promise with the module namespace object. In
218/// case of an exception, the embedder must reject this promise with the
219/// exception. If the promise creation itself fails (e.g. due to stack
220/// overflow), the embedder must propagate that exception by returning an empty
221/// MaybeLocal.
222///
223/// # Example
224///
225/// ```
226/// fn host_import_module_dynamically_callback_example<'s>(
227///   scope: &mut v8::HandleScope<'s>,
228///   host_defined_options: v8::Local<'s, v8::Data>,
229///   resource_name: v8::Local<'s, v8::Value>,
230///   specifier: v8::Local<'s, v8::String>,
231///   import_attributes: v8::Local<'s, v8::FixedArray>,
232/// ) -> Option<v8::Local<'s, v8::Promise>> {
233///   todo!()
234/// }
235/// ```
236pub trait HostImportModuleDynamicallyCallback:
237  UnitType
238  + for<'s> FnOnce(
239    &mut HandleScope<'s>,
240    Local<'s, Data>,
241    Local<'s, Value>,
242    Local<'s, String>,
243    Local<'s, FixedArray>,
244  ) -> Option<Local<'s, Promise>>
245{
246  fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback;
247}
248
249#[cfg(target_family = "unix")]
250pub(crate) type RawHostImportModuleDynamicallyCallback =
251  for<'s> unsafe extern "C" fn(
252    Local<'s, Context>,
253    Local<'s, Data>,
254    Local<'s, Value>,
255    Local<'s, String>,
256    Local<'s, FixedArray>,
257  ) -> *mut Promise;
258
259#[cfg(all(target_family = "windows", target_arch = "x86_64"))]
260pub type RawHostImportModuleDynamicallyCallback =
261  for<'s> unsafe extern "C" fn(
262    *mut *mut Promise,
263    Local<'s, Context>,
264    Local<'s, Data>,
265    Local<'s, Value>,
266    Local<'s, String>,
267    Local<'s, FixedArray>,
268  ) -> *mut *mut Promise;
269
270impl<F> HostImportModuleDynamicallyCallback for F
271where
272  F: UnitType
273    + for<'s> FnOnce(
274      &mut HandleScope<'s>,
275      Local<'s, Data>,
276      Local<'s, Value>,
277      Local<'s, String>,
278      Local<'s, FixedArray>,
279    ) -> Option<Local<'s, Promise>>,
280{
281  #[inline(always)]
282  fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback {
283    #[inline(always)]
284    fn scope_adapter<'s, F: HostImportModuleDynamicallyCallback>(
285      context: Local<'s, Context>,
286      host_defined_options: Local<'s, Data>,
287      resource_name: Local<'s, Value>,
288      specifier: Local<'s, String>,
289      import_attributes: Local<'s, FixedArray>,
290    ) -> Option<Local<'s, Promise>> {
291      let scope = &mut unsafe { CallbackScope::new(context) };
292      (F::get())(
293        scope,
294        host_defined_options,
295        resource_name,
296        specifier,
297        import_attributes,
298      )
299    }
300
301    #[cfg(target_family = "unix")]
302    #[inline(always)]
303    unsafe extern "C" fn abi_adapter<
304      's,
305      F: HostImportModuleDynamicallyCallback,
306    >(
307      context: Local<'s, Context>,
308      host_defined_options: Local<'s, Data>,
309      resource_name: Local<'s, Value>,
310      specifier: Local<'s, String>,
311      import_attributes: Local<'s, FixedArray>,
312    ) -> *mut Promise {
313      scope_adapter::<F>(
314        context,
315        host_defined_options,
316        resource_name,
317        specifier,
318        import_attributes,
319      )
320      .map_or_else(null_mut, |return_value| return_value.as_non_null().as_ptr())
321    }
322
323    #[cfg(all(target_family = "windows", target_arch = "x86_64"))]
324    #[inline(always)]
325    unsafe extern "C" fn abi_adapter<
326      's,
327      F: HostImportModuleDynamicallyCallback,
328    >(
329      return_value: *mut *mut Promise,
330      context: Local<'s, Context>,
331      host_defined_options: Local<'s, Data>,
332      resource_name: Local<'s, Value>,
333      specifier: Local<'s, String>,
334      import_attributes: Local<'s, FixedArray>,
335    ) -> *mut *mut Promise {
336      unsafe {
337        std::ptr::write(
338          return_value,
339          scope_adapter::<F>(
340            context,
341            host_defined_options,
342            resource_name,
343            specifier,
344            import_attributes,
345          )
346          .map(|return_value| return_value.as_non_null().as_ptr())
347          .unwrap_or_else(null_mut),
348        );
349        return_value
350      }
351    }
352
353    abi_adapter::<F>
354  }
355}
356
357/// HostImportModuleWithPhaseDynamicallyCallback is called when we
358/// require the embedder to load a module with a specific phase. This is used
359/// as part of the dynamic import syntax.
360///
361/// The referrer contains metadata about the script/module that calls
362/// import.
363///
364/// The specifier is the name of the module that should be imported.
365///
366/// The phase is the phase of the import requested.
367///
368/// The import_attributes are import attributes for this request in the form:
369/// [key1, value1, key2, value2, ...] where the keys and values are of type
370/// v8::String. Note, unlike the FixedArray passed to ResolveModuleCallback and
371/// returned from ModuleRequest::GetImportAttributes(), this array does not
372/// contain the source Locations of the attributes.
373///
374/// The Promise returned from this function is forwarded to userland
375/// JavaScript. The embedder must resolve this promise according to the phase
376/// requested:
377/// - For ModuleImportPhase::kSource, the promise must be resolved with a
378///   compiled ModuleSource object, or rejected with a SyntaxError if the
379///   module does not support source representation.
380/// - For ModuleImportPhase::kEvaluation, the promise must be resolved with a
381///   ModuleNamespace object of a module that has been compiled, instantiated,
382///   and evaluated.
383///
384/// In case of an exception, the embedder must reject this promise with the
385/// exception. If the promise creation itself fails (e.g. due to stack
386/// overflow), the embedder must propagate that exception by returning an empty
387/// MaybeLocal.
388///
389/// This callback is still experimental and is only invoked for source phase
390/// imports.
391pub trait HostImportModuleWithPhaseDynamicallyCallback:
392  UnitType
393  + for<'s> FnOnce(
394    &mut HandleScope<'s>,
395    Local<'s, Data>,
396    Local<'s, Value>,
397    Local<'s, String>,
398    ModuleImportPhase,
399    Local<'s, FixedArray>,
400  ) -> Option<Local<'s, Promise>>
401{
402  fn to_c_fn(self) -> RawHostImportModuleWithPhaseDynamicallyCallback;
403}
404
405#[cfg(target_family = "unix")]
406pub(crate) type RawHostImportModuleWithPhaseDynamicallyCallback =
407  for<'s> unsafe extern "C" fn(
408    Local<'s, Context>,
409    Local<'s, Data>,
410    Local<'s, Value>,
411    Local<'s, String>,
412    ModuleImportPhase,
413    Local<'s, FixedArray>,
414  ) -> *mut Promise;
415
416#[cfg(all(target_family = "windows", target_arch = "x86_64"))]
417pub type RawHostImportModuleWithPhaseDynamicallyCallback =
418  for<'s> unsafe extern "C" fn(
419    *mut *mut Promise,
420    Local<'s, Context>,
421    Local<'s, Data>,
422    Local<'s, Value>,
423    Local<'s, String>,
424    ModuleImportPhase,
425    Local<'s, FixedArray>,
426  ) -> *mut *mut Promise;
427
428impl<F> HostImportModuleWithPhaseDynamicallyCallback for F
429where
430  F: UnitType
431    + for<'s> FnOnce(
432      &mut HandleScope<'s>,
433      Local<'s, Data>,
434      Local<'s, Value>,
435      Local<'s, String>,
436      ModuleImportPhase,
437      Local<'s, FixedArray>,
438    ) -> Option<Local<'s, Promise>>,
439{
440  #[inline(always)]
441  fn to_c_fn(self) -> RawHostImportModuleWithPhaseDynamicallyCallback {
442    #[inline(always)]
443    fn scope_adapter<'s, F: HostImportModuleWithPhaseDynamicallyCallback>(
444      context: Local<'s, Context>,
445      host_defined_options: Local<'s, Data>,
446      resource_name: Local<'s, Value>,
447      specifier: Local<'s, String>,
448      import_phase: ModuleImportPhase,
449      import_attributes: Local<'s, FixedArray>,
450    ) -> Option<Local<'s, Promise>> {
451      let scope = &mut unsafe { CallbackScope::new(context) };
452      (F::get())(
453        scope,
454        host_defined_options,
455        resource_name,
456        specifier,
457        import_phase,
458        import_attributes,
459      )
460    }
461
462    #[cfg(target_family = "unix")]
463    #[inline(always)]
464    unsafe extern "C" fn abi_adapter<
465      's,
466      F: HostImportModuleWithPhaseDynamicallyCallback,
467    >(
468      context: Local<'s, Context>,
469      host_defined_options: Local<'s, Data>,
470      resource_name: Local<'s, Value>,
471      specifier: Local<'s, String>,
472      import_phase: ModuleImportPhase,
473      import_attributes: Local<'s, FixedArray>,
474    ) -> *mut Promise {
475      scope_adapter::<F>(
476        context,
477        host_defined_options,
478        resource_name,
479        specifier,
480        import_phase,
481        import_attributes,
482      )
483      .map_or_else(null_mut, |return_value| return_value.as_non_null().as_ptr())
484    }
485
486    #[cfg(all(target_family = "windows", target_arch = "x86_64"))]
487    #[inline(always)]
488    unsafe extern "C" fn abi_adapter<
489      's,
490      F: HostImportModuleWithPhaseDynamicallyCallback,
491    >(
492      return_value: *mut *mut Promise,
493      context: Local<'s, Context>,
494      host_defined_options: Local<'s, Data>,
495      resource_name: Local<'s, Value>,
496      specifier: Local<'s, String>,
497      import_phase: ModuleImportPhase,
498      import_attributes: Local<'s, FixedArray>,
499    ) -> *mut *mut Promise {
500      unsafe {
501        std::ptr::write(
502          return_value,
503          scope_adapter::<F>(
504            context,
505            host_defined_options,
506            resource_name,
507            specifier,
508            import_phase,
509            import_attributes,
510          )
511          .map(|return_value| return_value.as_non_null().as_ptr())
512          .unwrap_or_else(null_mut),
513        );
514        return_value
515      }
516    }
517
518    abi_adapter::<F>
519  }
520}
521
522/// `HostCreateShadowRealmContextCallback` is called each time a `ShadowRealm`
523/// is being constructed. You can use [`HandleScope::get_current_context`] to
524/// get the [`Context`] in which the constructor is being run.
525///
526/// The method combines [`Context`] creation and the implementation-defined
527/// abstract operation `HostInitializeShadowRealm` into one.
528///
529/// The embedder should use [`Context::new`] to create a new context. If the
530/// creation fails, the embedder must propagate that exception by returning
531/// [`None`].
532pub type HostCreateShadowRealmContextCallback =
533  for<'s> fn(scope: &mut HandleScope<'s>) -> Option<Local<'s, Context>>;
534
535pub type GcCallbackWithData = unsafe extern "C" fn(
536  isolate: *mut Isolate,
537  r#type: GCType,
538  flags: GCCallbackFlags,
539  data: *mut c_void,
540);
541
542pub type InterruptCallback =
543  unsafe extern "C" fn(isolate: &mut Isolate, data: *mut c_void);
544
545pub type NearHeapLimitCallback = unsafe extern "C" fn(
546  data: *mut c_void,
547  current_heap_limit: usize,
548  initial_heap_limit: usize,
549) -> usize;
550
551#[repr(C)]
552pub struct OomDetails {
553  pub is_heap_oom: bool,
554  pub detail: *const char,
555}
556
557pub type OomErrorCallback =
558  unsafe extern "C" fn(location: *const char, details: &OomDetails);
559
560// Windows x64 ABI: MaybeLocal<Value> returned on the stack.
561#[cfg(target_os = "windows")]
562pub type PrepareStackTraceCallback<'s> =
563  unsafe extern "C" fn(
564    *mut *const Value,
565    Local<'s, Context>,
566    Local<'s, Value>,
567    Local<'s, Array>,
568  ) -> *mut *const Value;
569
570// System V ABI: MaybeLocal<Value> returned in a register.
571// System V i386 ABI: Local<Value> returned in hidden pointer (struct).
572#[cfg(not(target_os = "windows"))]
573#[repr(C)]
574pub struct PrepareStackTraceCallbackRet(*const Value);
575
576#[cfg(not(target_os = "windows"))]
577pub type PrepareStackTraceCallback<'s> =
578  unsafe extern "C" fn(
579    Local<'s, Context>,
580    Local<'s, Value>,
581    Local<'s, Array>,
582  ) -> PrepareStackTraceCallbackRet;
583
584pub type UseCounterFeature = v8__Isolate__UseCounterFeature;
585pub type UseCounterCallback =
586  unsafe extern "C" fn(&mut Isolate, UseCounterFeature);
587
588unsafe extern "C" {
589  fn v8__Isolate__New(params: *const raw::CreateParams) -> *mut Isolate;
590  fn v8__Isolate__Dispose(this: *mut Isolate);
591  fn v8__Isolate__GetNumberOfDataSlots(this: *const Isolate) -> u32;
592  fn v8__Isolate__GetData(isolate: *const Isolate, slot: u32) -> *mut c_void;
593  fn v8__Isolate__SetData(
594    isolate: *const Isolate,
595    slot: u32,
596    data: *mut c_void,
597  );
598  fn v8__Isolate__Enter(this: *mut Isolate);
599  fn v8__Isolate__Exit(this: *mut Isolate);
600  fn v8__Isolate__GetCurrent() -> *mut Isolate;
601  fn v8__Isolate__MemoryPressureNotification(this: *mut Isolate, level: u8);
602  fn v8__Isolate__ClearKeptObjects(isolate: *mut Isolate);
603  fn v8__Isolate__LowMemoryNotification(isolate: *mut Isolate);
604  fn v8__Isolate__GetHeapStatistics(
605    this: *mut Isolate,
606    s: *mut v8__HeapStatistics,
607  );
608  fn v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
609    this: *mut Isolate,
610    capture: bool,
611    frame_limit: i32,
612  );
613  fn v8__Isolate__AddMessageListener(
614    isolate: *mut Isolate,
615    callback: MessageCallback,
616  ) -> bool;
617  fn v8__Isolate__AddMessageListenerWithErrorLevel(
618    isolate: *mut Isolate,
619    callback: MessageCallback,
620    message_levels: MessageErrorLevel,
621  ) -> bool;
622  fn v8__Isolate__AddGCPrologueCallback(
623    isolate: *mut Isolate,
624    callback: GcCallbackWithData,
625    data: *mut c_void,
626    gc_type_filter: GCType,
627  );
628  fn v8__Isolate__RemoveGCPrologueCallback(
629    isolate: *mut Isolate,
630    callback: GcCallbackWithData,
631    data: *mut c_void,
632  );
633  fn v8__Isolate__AddGCEpilogueCallback(
634    isolate: *mut Isolate,
635    callback: GcCallbackWithData,
636    data: *mut c_void,
637    gc_type_filter: GCType,
638  );
639  fn v8__Isolate__RemoveGCEpilogueCallback(
640    isolate: *mut Isolate,
641    callback: GcCallbackWithData,
642    data: *mut c_void,
643  );
644  fn v8__Isolate__NumberOfHeapSpaces(isolate: *mut Isolate) -> size_t;
645  fn v8__Isolate__GetHeapSpaceStatistics(
646    isolate: *mut Isolate,
647    space_statistics: *mut v8__HeapSpaceStatistics,
648    index: size_t,
649  ) -> bool;
650  fn v8__Isolate__AddNearHeapLimitCallback(
651    isolate: *mut Isolate,
652    callback: NearHeapLimitCallback,
653    data: *mut c_void,
654  );
655  fn v8__Isolate__RemoveNearHeapLimitCallback(
656    isolate: *mut Isolate,
657    callback: NearHeapLimitCallback,
658    heap_limit: usize,
659  );
660  fn v8__Isolate__SetOOMErrorHandler(
661    isolate: *mut Isolate,
662    callback: OomErrorCallback,
663  );
664  fn v8__Isolate__AdjustAmountOfExternalAllocatedMemory(
665    isolate: *mut Isolate,
666    change_in_bytes: i64,
667  ) -> i64;
668  fn v8__Isolate__GetCppHeap(isolate: *mut Isolate) -> *mut Heap;
669  fn v8__Isolate__SetPrepareStackTraceCallback(
670    isolate: *mut Isolate,
671    callback: PrepareStackTraceCallback,
672  );
673  fn v8__Isolate__SetPromiseHook(isolate: *mut Isolate, hook: PromiseHook);
674  fn v8__Isolate__SetPromiseRejectCallback(
675    isolate: *mut Isolate,
676    callback: PromiseRejectCallback,
677  );
678  fn v8__Isolate__SetWasmAsyncResolvePromiseCallback(
679    isolate: *mut Isolate,
680    callback: WasmAsyncResolvePromiseCallback,
681  );
682  fn v8__Isolate__SetAllowWasmCodeGenerationCallback(
683    isolate: *mut Isolate,
684    callback: AllowWasmCodeGenerationCallback,
685  );
686  fn v8__Isolate__SetHostInitializeImportMetaObjectCallback(
687    isolate: *mut Isolate,
688    callback: HostInitializeImportMetaObjectCallback,
689  );
690  fn v8__Isolate__SetHostImportModuleDynamicallyCallback(
691    isolate: *mut Isolate,
692    callback: RawHostImportModuleDynamicallyCallback,
693  );
694  fn v8__Isolate__SetHostImportModuleWithPhaseDynamicallyCallback(
695    isolate: *mut Isolate,
696    callback: RawHostImportModuleWithPhaseDynamicallyCallback,
697  );
698  #[cfg(not(target_os = "windows"))]
699  fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
700    isolate: *mut Isolate,
701    callback: unsafe extern "C" fn(
702      initiator_context: Local<Context>,
703    ) -> *mut Context,
704  );
705  #[cfg(target_os = "windows")]
706  fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
707    isolate: *mut Isolate,
708    callback: unsafe extern "C" fn(
709      rv: *mut *mut Context,
710      initiator_context: Local<Context>,
711    ) -> *mut *mut Context,
712  );
713  fn v8__Isolate__SetUseCounterCallback(
714    isolate: *mut Isolate,
715    callback: UseCounterCallback,
716  );
717  fn v8__Isolate__RequestInterrupt(
718    isolate: *const Isolate,
719    callback: InterruptCallback,
720    data: *mut c_void,
721  );
722  fn v8__Isolate__TerminateExecution(isolate: *const Isolate);
723  fn v8__Isolate__IsExecutionTerminating(isolate: *const Isolate) -> bool;
724  fn v8__Isolate__CancelTerminateExecution(isolate: *const Isolate);
725  fn v8__Isolate__GetMicrotasksPolicy(
726    isolate: *const Isolate,
727  ) -> MicrotasksPolicy;
728  fn v8__Isolate__SetMicrotasksPolicy(
729    isolate: *mut Isolate,
730    policy: MicrotasksPolicy,
731  );
732  fn v8__Isolate__PerformMicrotaskCheckpoint(isolate: *mut Isolate);
733  fn v8__Isolate__EnqueueMicrotask(
734    isolate: *mut Isolate,
735    function: *const Function,
736  );
737  fn v8__Isolate__SetAllowAtomicsWait(isolate: *mut Isolate, allow: bool);
738  fn v8__Isolate__SetWasmStreamingCallback(
739    isolate: *mut Isolate,
740    callback: unsafe extern "C" fn(*const FunctionCallbackInfo),
741  );
742  fn v8__Isolate__DateTimeConfigurationChangeNotification(
743    isolate: *mut Isolate,
744    time_zone_detection: TimeZoneDetection,
745  );
746  fn v8__Isolate__HasPendingBackgroundTasks(isolate: *const Isolate) -> bool;
747  fn v8__Isolate__RequestGarbageCollectionForTesting(
748    isolate: *mut Isolate,
749    r#type: usize,
750  );
751
752  fn v8__HeapProfiler__TakeHeapSnapshot(
753    isolate: *mut Isolate,
754    callback: unsafe extern "C" fn(*mut c_void, *const u8, usize) -> bool,
755    arg: *mut c_void,
756  );
757}
758
759/// Isolate represents an isolated instance of the V8 engine.  V8 isolates have
760/// completely separate states.  Objects from one isolate must not be used in
761/// other isolates.  The embedder can create multiple isolates and use them in
762/// parallel in multiple threads.  An isolate can be entered by at most one
763/// thread at any given time.  The Locker/Unlocker API must be used to
764/// synchronize.
765///
766/// rusty_v8 note: Unlike in the C++ API, the Isolate is entered when it is
767/// constructed and exited when dropped. Because of that v8::OwnedIsolate
768/// instances must be dropped in the reverse order of creation
769#[repr(C)]
770#[derive(Debug)]
771pub struct Isolate(Opaque);
772
773impl Isolate {
774  // Isolate data slots used internally by rusty_v8.
775  const ANNEX_SLOT: u32 = 0;
776  const CURRENT_SCOPE_DATA_SLOT: u32 = 1;
777  const INTERNAL_DATA_SLOT_COUNT: u32 = 2;
778
779  #[inline(always)]
780  fn assert_embedder_data_slot_count_and_offset_correct(&self) {
781    assert!(
782      unsafe { v8__Isolate__GetNumberOfDataSlots(self) }
783        >= Self::INTERNAL_DATA_SLOT_COUNT
784    )
785  }
786
787  fn new_impl(params: CreateParams) -> *mut Isolate {
788    crate::V8::assert_initialized();
789    let (raw_create_params, create_param_allocations) = params.finalize();
790    let cxx_isolate = unsafe { v8__Isolate__New(&raw_create_params) };
791    let isolate = unsafe { &mut *cxx_isolate };
792    isolate.initialize(create_param_allocations);
793    cxx_isolate
794  }
795
796  pub(crate) fn initialize(&mut self, create_param_allocations: Box<dyn Any>) {
797    self.assert_embedder_data_slot_count_and_offset_correct();
798    self.create_annex(create_param_allocations);
799  }
800
801  /// Creates a new isolate.  Does not change the currently entered
802  /// isolate.
803  ///
804  /// When an isolate is no longer used its resources should be freed
805  /// by calling V8::dispose().  Using the delete operator is not allowed.
806  ///
807  /// V8::initialize() must have run prior to this.
808  #[allow(clippy::new_ret_no_self)]
809  pub fn new(params: CreateParams) -> OwnedIsolate {
810    OwnedIsolate::new(Self::new_impl(params))
811  }
812
813  #[allow(clippy::new_ret_no_self)]
814  pub fn snapshot_creator(
815    external_references: Option<Cow<'static, [ExternalReference]>>,
816    params: Option<CreateParams>,
817  ) -> OwnedIsolate {
818    SnapshotCreator::new(external_references, params)
819  }
820
821  #[allow(clippy::new_ret_no_self)]
822  pub fn snapshot_creator_from_existing_snapshot(
823    existing_snapshot_blob: StartupData,
824    external_references: Option<Cow<'static, [ExternalReference]>>,
825    params: Option<CreateParams>,
826  ) -> OwnedIsolate {
827    SnapshotCreator::from_existing_snapshot(
828      existing_snapshot_blob,
829      external_references,
830      params,
831    )
832  }
833
834  /// Initial configuration parameters for a new Isolate.
835  #[inline(always)]
836  pub fn create_params() -> CreateParams {
837    CreateParams::default()
838  }
839
840  #[inline(always)]
841  pub fn thread_safe_handle(&self) -> IsolateHandle {
842    IsolateHandle::new(self)
843  }
844
845  /// See [`IsolateHandle::terminate_execution`]
846  #[inline(always)]
847  pub fn terminate_execution(&self) -> bool {
848    self.thread_safe_handle().terminate_execution()
849  }
850
851  /// See [`IsolateHandle::cancel_terminate_execution`]
852  #[inline(always)]
853  pub fn cancel_terminate_execution(&self) -> bool {
854    self.thread_safe_handle().cancel_terminate_execution()
855  }
856
857  /// See [`IsolateHandle::is_execution_terminating`]
858  #[inline(always)]
859  pub fn is_execution_terminating(&self) -> bool {
860    self.thread_safe_handle().is_execution_terminating()
861  }
862
863  pub(crate) fn create_annex(
864    &mut self,
865    create_param_allocations: Box<dyn Any>,
866  ) {
867    let annex_arc = Arc::new(IsolateAnnex::new(self, create_param_allocations));
868    let annex_ptr = Arc::into_raw(annex_arc);
869    assert!(self.get_data_internal(Self::ANNEX_SLOT).is_null());
870    self.set_data_internal(Self::ANNEX_SLOT, annex_ptr as *mut _);
871  }
872
873  unsafe fn dispose_annex(&mut self) -> Box<dyn Any> {
874    // Set the `isolate` pointer inside the annex struct to null, so any
875    // IsolateHandle that outlives the isolate will know that it can't call
876    // methods on the isolate.
877    let annex = self.get_annex_mut();
878    {
879      let _lock = annex.isolate_mutex.lock().unwrap();
880      annex.isolate = null_mut();
881    }
882
883    // Clear slots and drop owned objects that were taken out of `CreateParams`.
884    let create_param_allocations =
885      std::mem::replace(&mut annex.create_param_allocations, Box::new(()));
886    annex.slots.clear();
887
888    // Run through any remaining guaranteed finalizers.
889    for finalizer in annex.finalizer_map.drain() {
890      if let FinalizerCallback::Guaranteed(callback) = finalizer {
891        callback();
892      }
893    }
894
895    // Subtract one from the Arc<IsolateAnnex> reference count.
896    unsafe { Arc::from_raw(annex) };
897    self.set_data(0, null_mut());
898
899    create_param_allocations
900  }
901
902  #[inline(always)]
903  fn get_annex(&self) -> &IsolateAnnex {
904    let annex_ptr =
905      self.get_data_internal(Self::ANNEX_SLOT) as *const IsolateAnnex;
906    assert!(!annex_ptr.is_null());
907    unsafe { &*annex_ptr }
908  }
909
910  #[inline(always)]
911  fn get_annex_mut(&mut self) -> &mut IsolateAnnex {
912    let annex_ptr =
913      self.get_data_internal(Self::ANNEX_SLOT) as *mut IsolateAnnex;
914    assert!(!annex_ptr.is_null());
915    unsafe { &mut *annex_ptr }
916  }
917
918  pub(crate) fn set_snapshot_creator(
919    &mut self,
920    snapshot_creator: SnapshotCreator,
921  ) {
922    let prev = self
923      .get_annex_mut()
924      .maybe_snapshot_creator
925      .replace(snapshot_creator);
926    assert!(prev.is_none());
927  }
928
929  pub(crate) fn get_finalizer_map(&self) -> &FinalizerMap {
930    &self.get_annex().finalizer_map
931  }
932
933  pub(crate) fn get_finalizer_map_mut(&mut self) -> &mut FinalizerMap {
934    &mut self.get_annex_mut().finalizer_map
935  }
936
937  fn get_annex_arc(&self) -> Arc<IsolateAnnex> {
938    let annex_ptr = self.get_annex();
939    let annex_arc = unsafe { Arc::from_raw(annex_ptr) };
940    let _ = Arc::into_raw(annex_arc.clone());
941    annex_arc
942  }
943
944  /// Retrieve embedder-specific data from the isolate.
945  /// Returns NULL if SetData has never been called for the given `slot`.
946  pub fn get_data(&self, slot: u32) -> *mut c_void {
947    self.get_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot)
948  }
949
950  /// Associate embedder-specific data with the isolate. `slot` has to be
951  /// between 0 and `Isolate::get_number_of_data_slots()`.
952  #[inline(always)]
953  pub fn set_data(&mut self, slot: u32, data: *mut c_void) {
954    self.set_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot, data);
955  }
956
957  /// Returns the maximum number of available embedder data slots. Valid slots
958  /// are in the range of `0 <= n < Isolate::get_number_of_data_slots()`.
959  pub fn get_number_of_data_slots(&self) -> u32 {
960    let n = unsafe { v8__Isolate__GetNumberOfDataSlots(self) };
961    n - Self::INTERNAL_DATA_SLOT_COUNT
962  }
963
964  #[inline(always)]
965  pub(crate) fn get_data_internal(&self, slot: u32) -> *mut c_void {
966    unsafe { v8__Isolate__GetData(self, slot) }
967  }
968
969  #[inline(always)]
970  pub(crate) fn set_data_internal(&mut self, slot: u32, data: *mut c_void) {
971    unsafe { v8__Isolate__SetData(self, slot, data) }
972  }
973
974  pub(crate) fn init_scope_root(&mut self) {
975    ScopeData::new_root(self);
976  }
977
978  pub(crate) fn dispose_scope_root(&mut self) {
979    ScopeData::drop_root(self);
980  }
981
982  /// Returns a pointer to the `ScopeData` struct for the current scope.
983  #[inline(always)]
984  pub(crate) fn get_current_scope_data(&self) -> Option<NonNull<ScopeData>> {
985    let scope_data_ptr = self.get_data_internal(Self::CURRENT_SCOPE_DATA_SLOT);
986    NonNull::new(scope_data_ptr).map(NonNull::cast)
987  }
988
989  /// Updates the slot that stores a `ScopeData` pointer for the current scope.
990  #[inline(always)]
991  pub(crate) fn set_current_scope_data(
992    &mut self,
993    scope_data: Option<NonNull<ScopeData>>,
994  ) {
995    let scope_data_ptr = scope_data
996      .map(NonNull::cast)
997      .map_or_else(null_mut, NonNull::as_ptr);
998    self.set_data_internal(Self::CURRENT_SCOPE_DATA_SLOT, scope_data_ptr);
999  }
1000
1001  /// Get a reference to embedder data added with `set_slot()`.
1002  #[inline(always)]
1003  pub fn get_slot<T: 'static>(&self) -> Option<&T> {
1004    self
1005      .get_annex()
1006      .slots
1007      .get(&TypeId::of::<T>())
1008      .map(|slot| unsafe { slot.borrow::<T>() })
1009  }
1010
1011  /// Get a mutable reference to embedder data added with `set_slot()`.
1012  #[inline(always)]
1013  pub fn get_slot_mut<T: 'static>(&mut self) -> Option<&mut T> {
1014    self
1015      .get_annex_mut()
1016      .slots
1017      .get_mut(&TypeId::of::<T>())
1018      .map(|slot| unsafe { slot.borrow_mut::<T>() })
1019  }
1020
1021  /// Use with Isolate::get_slot and Isolate::get_slot_mut to associate state
1022  /// with an Isolate.
1023  ///
1024  /// This method gives ownership of value to the Isolate. Exactly one object of
1025  /// each type can be associated with an Isolate. If called more than once with
1026  /// an object of the same type, the earlier version will be dropped and
1027  /// replaced.
1028  ///
1029  /// Returns true if value was set without replacing an existing value.
1030  ///
1031  /// The value will be dropped when the isolate is dropped.
1032  #[inline(always)]
1033  pub fn set_slot<T: 'static>(&mut self, value: T) -> bool {
1034    self
1035      .get_annex_mut()
1036      .slots
1037      .insert(TypeId::of::<T>(), RawSlot::new(value))
1038      .is_none()
1039  }
1040
1041  /// Removes the embedder data added with `set_slot()` and returns it if it exists.
1042  #[inline(always)]
1043  pub fn remove_slot<T: 'static>(&mut self) -> Option<T> {
1044    self
1045      .get_annex_mut()
1046      .slots
1047      .remove(&TypeId::of::<T>())
1048      .map(|slot| unsafe { slot.into_inner::<T>() })
1049  }
1050
1051  /// Sets this isolate as the entered one for the current thread.
1052  /// Saves the previously entered one (if any), so that it can be
1053  /// restored when exiting.  Re-entering an isolate is allowed.
1054  ///
1055  /// rusty_v8 note: Unlike in the C++ API, the isolate is entered when it is
1056  /// constructed and exited when dropped.
1057  #[inline(always)]
1058  pub unsafe fn enter(&mut self) {
1059    unsafe {
1060      v8__Isolate__Enter(self);
1061    }
1062  }
1063
1064  /// Exits this isolate by restoring the previously entered one in the
1065  /// current thread.  The isolate may still stay the same, if it was
1066  /// entered more than once.
1067  ///
1068  /// Requires: self == Isolate::GetCurrent().
1069  ///
1070  /// rusty_v8 note: Unlike in the C++ API, the isolate is entered when it is
1071  /// constructed and exited when dropped.
1072  #[inline(always)]
1073  pub unsafe fn exit(&mut self) {
1074    unsafe {
1075      v8__Isolate__Exit(self);
1076    }
1077  }
1078
1079  /// Optional notification that the system is running low on memory.
1080  /// V8 uses these notifications to guide heuristics.
1081  /// It is allowed to call this function from another thread while
1082  /// the isolate is executing long running JavaScript code.
1083  #[inline(always)]
1084  pub fn memory_pressure_notification(&mut self, level: MemoryPressureLevel) {
1085    unsafe { v8__Isolate__MemoryPressureNotification(self, level as u8) }
1086  }
1087
1088  /// Clears the set of objects held strongly by the heap. This set of
1089  /// objects are originally built when a WeakRef is created or
1090  /// successfully dereferenced.
1091  ///
1092  /// This is invoked automatically after microtasks are run. See
1093  /// MicrotasksPolicy for when microtasks are run.
1094  ///
1095  /// This needs to be manually invoked only if the embedder is manually
1096  /// running microtasks via a custom MicrotaskQueue class's PerformCheckpoint.
1097  /// In that case, it is the embedder's responsibility to make this call at a
1098  /// time which does not interrupt synchronous ECMAScript code execution.
1099  #[inline(always)]
1100  pub fn clear_kept_objects(&mut self) {
1101    unsafe { v8__Isolate__ClearKeptObjects(self) }
1102  }
1103
1104  /// Optional notification that the system is running low on memory.
1105  /// V8 uses these notifications to attempt to free memory.
1106  #[inline(always)]
1107  pub fn low_memory_notification(&mut self) {
1108    unsafe { v8__Isolate__LowMemoryNotification(self) }
1109  }
1110
1111  /// Get statistics about the heap memory usage.
1112  #[inline(always)]
1113  pub fn get_heap_statistics(&mut self) -> HeapStatistics {
1114    let inner = unsafe {
1115      let mut s = MaybeUninit::zeroed();
1116      v8__Isolate__GetHeapStatistics(self, s.as_mut_ptr());
1117      s.assume_init()
1118    };
1119    HeapStatistics(inner)
1120  }
1121
1122  /// Returns the number of spaces in the heap.
1123  #[inline(always)]
1124  pub fn number_of_heap_spaces(&mut self) -> usize {
1125    unsafe { v8__Isolate__NumberOfHeapSpaces(self) }
1126  }
1127
1128  /// Get the memory usage of a space in the heap.
1129  ///
1130  /// \param space_statistics The HeapSpaceStatistics object to fill in
1131  ///   statistics.
1132  /// \param index The index of the space to get statistics from, which ranges
1133  ///   from 0 to NumberOfHeapSpaces() - 1.
1134  /// \returns true on success.
1135  #[inline(always)]
1136  pub fn get_heap_space_statistics(
1137    &mut self,
1138    index: usize,
1139  ) -> Option<HeapSpaceStatistics> {
1140    let inner = unsafe {
1141      let mut s = MaybeUninit::zeroed();
1142      if !v8__Isolate__GetHeapSpaceStatistics(self, s.as_mut_ptr(), index) {
1143        return None;
1144      }
1145      s.assume_init()
1146    };
1147    Some(HeapSpaceStatistics(inner))
1148  }
1149
1150  /// Tells V8 to capture current stack trace when uncaught exception occurs
1151  /// and report it to the message listeners. The option is off by default.
1152  #[inline(always)]
1153  pub fn set_capture_stack_trace_for_uncaught_exceptions(
1154    &mut self,
1155    capture: bool,
1156    frame_limit: i32,
1157  ) {
1158    unsafe {
1159      v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
1160        self,
1161        capture,
1162        frame_limit,
1163      );
1164    }
1165  }
1166
1167  /// Adds a message listener (errors only).
1168  ///
1169  /// The same message listener can be added more than once and in that
1170  /// case it will be called more than once for each message.
1171  ///
1172  /// The exception object will be passed to the callback.
1173  #[inline(always)]
1174  pub fn add_message_listener(&mut self, callback: MessageCallback) -> bool {
1175    unsafe { v8__Isolate__AddMessageListener(self, callback) }
1176  }
1177
1178  /// Adds a message listener for the specified message levels.
1179  #[inline(always)]
1180  pub fn add_message_listener_with_error_level(
1181    &mut self,
1182    callback: MessageCallback,
1183    message_levels: MessageErrorLevel,
1184  ) -> bool {
1185    unsafe {
1186      v8__Isolate__AddMessageListenerWithErrorLevel(
1187        self,
1188        callback,
1189        message_levels,
1190      )
1191    }
1192  }
1193
1194  /// This specifies the callback called when the stack property of Error
1195  /// is accessed.
1196  ///
1197  /// PrepareStackTraceCallback is called when the stack property of an error is
1198  /// first accessed. The return value will be used as the stack value. If this
1199  /// callback is registed, the |Error.prepareStackTrace| API will be disabled.
1200  /// |sites| is an array of call sites, specified in
1201  /// https://v8.dev/docs/stack-trace-api
1202  #[inline(always)]
1203  pub fn set_prepare_stack_trace_callback<'s>(
1204    &mut self,
1205    callback: impl MapFnTo<PrepareStackTraceCallback<'s>>,
1206  ) {
1207    // Note: the C++ API returns a MaybeLocal but V8 asserts at runtime when
1208    // it's empty. That is, you can't return None and that's why the Rust API
1209    // expects Local<Value> instead of Option<Local<Value>>.
1210    unsafe {
1211      v8__Isolate__SetPrepareStackTraceCallback(self, callback.map_fn_to());
1212    };
1213  }
1214
1215  /// Set the PromiseHook callback for various promise lifecycle
1216  /// events.
1217  #[inline(always)]
1218  pub fn set_promise_hook(&mut self, hook: PromiseHook) {
1219    unsafe { v8__Isolate__SetPromiseHook(self, hook) }
1220  }
1221
1222  /// Set callback to notify about promise reject with no handler, or
1223  /// revocation of such a previous notification once the handler is added.
1224  #[inline(always)]
1225  pub fn set_promise_reject_callback(
1226    &mut self,
1227    callback: PromiseRejectCallback,
1228  ) {
1229    unsafe { v8__Isolate__SetPromiseRejectCallback(self, callback) }
1230  }
1231
1232  #[inline(always)]
1233  pub fn set_wasm_async_resolve_promise_callback(
1234    &mut self,
1235    callback: WasmAsyncResolvePromiseCallback,
1236  ) {
1237    unsafe { v8__Isolate__SetWasmAsyncResolvePromiseCallback(self, callback) }
1238  }
1239
1240  #[inline(always)]
1241  pub fn set_allow_wasm_code_generation_callback(
1242    &mut self,
1243    callback: AllowWasmCodeGenerationCallback,
1244  ) {
1245    unsafe {
1246      v8__Isolate__SetAllowWasmCodeGenerationCallback(self, callback);
1247    }
1248  }
1249
1250  #[inline(always)]
1251  /// This specifies the callback called by the upcoming importa.meta
1252  /// language feature to retrieve host-defined meta data for a module.
1253  pub fn set_host_initialize_import_meta_object_callback(
1254    &mut self,
1255    callback: HostInitializeImportMetaObjectCallback,
1256  ) {
1257    unsafe {
1258      v8__Isolate__SetHostInitializeImportMetaObjectCallback(self, callback);
1259    }
1260  }
1261
1262  /// This specifies the callback called by the upcoming dynamic
1263  /// import() language feature to load modules.
1264  #[inline(always)]
1265  pub fn set_host_import_module_dynamically_callback(
1266    &mut self,
1267    callback: impl HostImportModuleDynamicallyCallback,
1268  ) {
1269    unsafe {
1270      v8__Isolate__SetHostImportModuleDynamicallyCallback(
1271        self,
1272        callback.to_c_fn(),
1273      );
1274    }
1275  }
1276
1277  /// This specifies the callback called by the upcoming dynamic
1278  /// import() and import.source() language feature to load modules.
1279  ///
1280  /// This API is experimental and is expected to be changed or removed in the
1281  /// future. The callback is currently only called when for source-phase
1282  /// imports. Evaluation-phase imports use the existing
1283  /// HostImportModuleDynamicallyCallback callback.
1284  #[inline(always)]
1285  pub fn set_host_import_module_with_phase_dynamically_callback(
1286    &mut self,
1287    callback: impl HostImportModuleWithPhaseDynamicallyCallback,
1288  ) {
1289    unsafe {
1290      v8__Isolate__SetHostImportModuleWithPhaseDynamicallyCallback(
1291        self,
1292        callback.to_c_fn(),
1293      );
1294    }
1295  }
1296
1297  /// This specifies the callback called by the upcoming `ShadowRealm`
1298  /// construction language feature to retrieve host created globals.
1299  pub fn set_host_create_shadow_realm_context_callback(
1300    &mut self,
1301    callback: HostCreateShadowRealmContextCallback,
1302  ) {
1303    #[inline]
1304    unsafe extern "C" fn rust_shadow_realm_callback(
1305      initiator_context: Local<Context>,
1306    ) -> *mut Context {
1307      let mut scope = unsafe { CallbackScope::new(initiator_context) };
1308      let callback = scope
1309        .get_slot::<HostCreateShadowRealmContextCallback>()
1310        .unwrap();
1311      let context = callback(&mut scope);
1312      context.map_or_else(null_mut, |l| l.as_non_null().as_ptr())
1313    }
1314
1315    // Windows x64 ABI: MaybeLocal<Context> must be returned on the stack.
1316    #[cfg(target_os = "windows")]
1317    unsafe extern "C" fn rust_shadow_realm_callback_windows(
1318      rv: *mut *mut Context,
1319      initiator_context: Local<Context>,
1320    ) -> *mut *mut Context {
1321      unsafe {
1322        let ret = rust_shadow_realm_callback(initiator_context);
1323        rv.write(ret);
1324      }
1325      rv
1326    }
1327
1328    let slot_didnt_exist_before = self.set_slot(callback);
1329    if slot_didnt_exist_before {
1330      unsafe {
1331        #[cfg(target_os = "windows")]
1332        v8__Isolate__SetHostCreateShadowRealmContextCallback(
1333          self,
1334          rust_shadow_realm_callback_windows,
1335        );
1336        #[cfg(not(target_os = "windows"))]
1337        v8__Isolate__SetHostCreateShadowRealmContextCallback(
1338          self,
1339          rust_shadow_realm_callback,
1340        );
1341      }
1342    }
1343  }
1344
1345  /// Sets a callback for counting the number of times a feature of V8 is used.
1346  #[inline(always)]
1347  pub fn set_use_counter_callback(&mut self, callback: UseCounterCallback) {
1348    unsafe {
1349      v8__Isolate__SetUseCounterCallback(self, callback);
1350    }
1351  }
1352
1353  /// Enables the host application to receive a notification before a
1354  /// garbage collection. Allocations are allowed in the callback function,
1355  /// but the callback is not re-entrant: if the allocation inside it will
1356  /// trigger the garbage collection, the callback won't be called again.
1357  /// It is possible to specify the GCType filter for your callback. But it is
1358  /// not possible to register the same callback function two times with
1359  /// different GCType filters.
1360  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1361  #[inline(always)]
1362  pub fn add_gc_prologue_callback(
1363    &mut self,
1364    callback: GcCallbackWithData,
1365    data: *mut c_void,
1366    gc_type_filter: GCType,
1367  ) {
1368    unsafe {
1369      v8__Isolate__AddGCPrologueCallback(self, callback, data, gc_type_filter);
1370    }
1371  }
1372
1373  /// This function removes callback which was installed by
1374  /// AddGCPrologueCallback function.
1375  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1376  #[inline(always)]
1377  pub fn remove_gc_prologue_callback(
1378    &mut self,
1379    callback: GcCallbackWithData,
1380    data: *mut c_void,
1381  ) {
1382    unsafe { v8__Isolate__RemoveGCPrologueCallback(self, callback, data) }
1383  }
1384
1385  /// Enables the host application to receive a notification after a
1386  /// garbage collection.
1387  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1388  #[inline(always)]
1389  pub fn add_gc_epilogue_callback(
1390    &mut self,
1391    callback: GcCallbackWithData,
1392    data: *mut c_void,
1393    gc_type_filter: GCType,
1394  ) {
1395    unsafe {
1396      v8__Isolate__AddGCEpilogueCallback(self, callback, data, gc_type_filter);
1397    }
1398  }
1399
1400  /// This function removes a callback which was added by
1401  /// `AddGCEpilogueCallback`.
1402  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1403  #[inline(always)]
1404  pub fn remove_gc_epilogue_callback(
1405    &mut self,
1406    callback: GcCallbackWithData,
1407    data: *mut c_void,
1408  ) {
1409    unsafe { v8__Isolate__RemoveGCEpilogueCallback(self, callback, data) }
1410  }
1411
1412  /// Add a callback to invoke in case the heap size is close to the heap limit.
1413  /// If multiple callbacks are added, only the most recently added callback is
1414  /// invoked.
1415  #[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
1416  #[inline(always)]
1417  pub fn add_near_heap_limit_callback(
1418    &mut self,
1419    callback: NearHeapLimitCallback,
1420    data: *mut c_void,
1421  ) {
1422    unsafe { v8__Isolate__AddNearHeapLimitCallback(self, callback, data) };
1423  }
1424
1425  /// Remove the given callback and restore the heap limit to the given limit.
1426  /// If the given limit is zero, then it is ignored. If the current heap size
1427  /// is greater than the given limit, then the heap limit is restored to the
1428  /// minimal limit that is possible for the current heap size.
1429  #[inline(always)]
1430  pub fn remove_near_heap_limit_callback(
1431    &mut self,
1432    callback: NearHeapLimitCallback,
1433    heap_limit: usize,
1434  ) {
1435    unsafe {
1436      v8__Isolate__RemoveNearHeapLimitCallback(self, callback, heap_limit);
1437    };
1438  }
1439
1440  /// Adjusts the amount of registered external memory. Used to give V8 an
1441  /// indication of the amount of externally allocated memory that is kept
1442  /// alive by JavaScript objects. V8 uses this to decide when to perform
1443  /// global garbage collections. Registering externally allocated memory
1444  /// will trigger global garbage collections more often than it would
1445  /// otherwise in an attempt to garbage collect the JavaScript objects
1446  /// that keep the externally allocated memory alive.
1447  #[inline(always)]
1448  pub fn adjust_amount_of_external_allocated_memory(
1449    &mut self,
1450    change_in_bytes: i64,
1451  ) -> i64 {
1452    unsafe {
1453      v8__Isolate__AdjustAmountOfExternalAllocatedMemory(self, change_in_bytes)
1454    }
1455  }
1456
1457  #[inline(always)]
1458  pub fn get_cpp_heap(&mut self) -> Option<&Heap> {
1459    unsafe { v8__Isolate__GetCppHeap(self).as_ref() }
1460  }
1461
1462  #[inline(always)]
1463  pub fn set_oom_error_handler(&mut self, callback: OomErrorCallback) {
1464    unsafe { v8__Isolate__SetOOMErrorHandler(self, callback) };
1465  }
1466
1467  /// Returns the policy controlling how Microtasks are invoked.
1468  #[inline(always)]
1469  pub fn get_microtasks_policy(&self) -> MicrotasksPolicy {
1470    unsafe { v8__Isolate__GetMicrotasksPolicy(self) }
1471  }
1472
1473  /// Returns the policy controlling how Microtasks are invoked.
1474  #[inline(always)]
1475  pub fn set_microtasks_policy(&mut self, policy: MicrotasksPolicy) {
1476    unsafe { v8__Isolate__SetMicrotasksPolicy(self, policy) }
1477  }
1478
1479  /// Runs the default MicrotaskQueue until it gets empty and perform other
1480  /// microtask checkpoint steps, such as calling ClearKeptObjects. Asserts that
1481  /// the MicrotasksPolicy is not kScoped. Any exceptions thrown by microtask
1482  /// callbacks are swallowed.
1483  #[inline(always)]
1484  pub fn perform_microtask_checkpoint(&mut self) {
1485    unsafe { v8__Isolate__PerformMicrotaskCheckpoint(self) }
1486  }
1487
1488  /// Enqueues the callback to the default MicrotaskQueue
1489  #[inline(always)]
1490  pub fn enqueue_microtask(&mut self, microtask: Local<Function>) {
1491    unsafe { v8__Isolate__EnqueueMicrotask(self, &*microtask) }
1492  }
1493
1494  /// Set whether calling Atomics.wait (a function that may block) is allowed in
1495  /// this isolate. This can also be configured via
1496  /// CreateParams::allow_atomics_wait.
1497  #[inline(always)]
1498  pub fn set_allow_atomics_wait(&mut self, allow: bool) {
1499    unsafe { v8__Isolate__SetAllowAtomicsWait(self, allow) }
1500  }
1501
1502  /// Embedder injection point for `WebAssembly.compileStreaming(source)`.
1503  /// The expectation is that the embedder sets it at most once.
1504  ///
1505  /// The callback receives the source argument (string, Promise, etc.)
1506  /// and an instance of [WasmStreaming]. The [WasmStreaming] instance
1507  /// can outlive the callback and is used to feed data chunks to V8
1508  /// asynchronously.
1509  #[inline(always)]
1510  pub fn set_wasm_streaming_callback<F>(&mut self, _: F)
1511  where
1512    F: UnitType + Fn(&mut HandleScope, Local<Value>, WasmStreaming),
1513  {
1514    unsafe { v8__Isolate__SetWasmStreamingCallback(self, trampoline::<F>()) }
1515  }
1516
1517  /// Notification that the embedder has changed the time zone, daylight savings
1518  /// time or other date / time configuration parameters. V8 keeps a cache of
1519  /// various values used for date / time computation. This notification will
1520  /// reset those cached values for the current context so that date / time
1521  /// configuration changes would be reflected.
1522  ///
1523  /// This API should not be called more than needed as it will negatively impact
1524  /// the performance of date operations.
1525  #[inline(always)]
1526  pub fn date_time_configuration_change_notification(
1527    &mut self,
1528    time_zone_detection: TimeZoneDetection,
1529  ) {
1530    unsafe {
1531      v8__Isolate__DateTimeConfigurationChangeNotification(
1532        self,
1533        time_zone_detection,
1534      );
1535    }
1536  }
1537
1538  /// Returns true if there is ongoing background work within V8 that will
1539  /// eventually post a foreground task, like asynchronous WebAssembly
1540  /// compilation.
1541  #[inline(always)]
1542  pub fn has_pending_background_tasks(&self) -> bool {
1543    unsafe { v8__Isolate__HasPendingBackgroundTasks(self) }
1544  }
1545
1546  /// Request garbage collection with a specific embedderstack state in this
1547  /// Isolate. It is only valid to call this function if --expose_gc was
1548  /// specified.
1549  ///
1550  /// This should only be used for testing purposes and not to enforce a garbage
1551  /// collection schedule. It has strong negative impact on the garbage
1552  /// collection performance. Use IdleNotificationDeadline() or
1553  /// LowMemoryNotification() instead to influence the garbage collection
1554  /// schedule.
1555  #[inline(always)]
1556  pub fn request_garbage_collection_for_testing(
1557    &mut self,
1558    r#type: GarbageCollectionType,
1559  ) {
1560    unsafe {
1561      v8__Isolate__RequestGarbageCollectionForTesting(
1562        self,
1563        match r#type {
1564          GarbageCollectionType::Full => 0,
1565          GarbageCollectionType::Minor => 1,
1566        },
1567      );
1568    }
1569  }
1570
1571  /// Disposes the isolate.  The isolate must not be entered by any
1572  /// thread to be disposable.
1573  unsafe fn dispose(&mut self) {
1574    // No test case in rusty_v8 show this, but there have been situations in
1575    // deno where dropping Annex before the states causes a segfault.
1576    unsafe {
1577      v8__Isolate__Dispose(self);
1578    }
1579  }
1580
1581  /// Take a heap snapshot. The callback is invoked one or more times
1582  /// with byte slices containing the snapshot serialized as JSON.
1583  /// It's the callback's responsibility to reassemble them into
1584  /// a single document, e.g., by writing them to a file.
1585  /// Note that Chrome DevTools refuses to load snapshots without
1586  /// a .heapsnapshot suffix.
1587  pub fn take_heap_snapshot<F>(&mut self, mut callback: F)
1588  where
1589    F: FnMut(&[u8]) -> bool,
1590  {
1591    unsafe extern "C" fn trampoline<F>(
1592      arg: *mut c_void,
1593      data: *const u8,
1594      size: usize,
1595    ) -> bool
1596    where
1597      F: FnMut(&[u8]) -> bool,
1598    {
1599      unsafe {
1600        let mut callback = NonNull::<F>::new_unchecked(arg as _);
1601        if size > 0 {
1602          (callback.as_mut())(std::slice::from_raw_parts(data, size))
1603        } else {
1604          (callback.as_mut())(&[])
1605        }
1606      }
1607    }
1608
1609    let arg = addr_of_mut!(callback);
1610    unsafe {
1611      v8__HeapProfiler__TakeHeapSnapshot(self, trampoline::<F>, arg as _);
1612    }
1613  }
1614
1615  /// Set the default context to be included in the snapshot blob.
1616  /// The snapshot will not contain the global proxy, and we expect one or a
1617  /// global object template to create one, to be provided upon deserialization.
1618  ///
1619  /// # Panics
1620  ///
1621  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
1622  #[inline(always)]
1623  pub fn set_default_context(&mut self, context: Local<Context>) {
1624    let snapshot_creator = self
1625      .get_annex_mut()
1626      .maybe_snapshot_creator
1627      .as_mut()
1628      .unwrap();
1629    snapshot_creator.set_default_context(context);
1630  }
1631
1632  /// Add additional context to be included in the snapshot blob.
1633  /// The snapshot will include the global proxy.
1634  ///
1635  /// Returns the index of the context in the snapshot blob.
1636  ///
1637  /// # Panics
1638  ///
1639  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
1640  #[inline(always)]
1641  pub fn add_context(&mut self, context: Local<Context>) -> usize {
1642    let snapshot_creator = self
1643      .get_annex_mut()
1644      .maybe_snapshot_creator
1645      .as_mut()
1646      .unwrap();
1647    snapshot_creator.add_context(context)
1648  }
1649
1650  /// Attach arbitrary `v8::Data` to the isolate snapshot, which can be
1651  /// retrieved via `HandleScope::get_context_data_from_snapshot_once()` after
1652  /// deserialization. This data does not survive when a new snapshot is created
1653  /// from an existing snapshot.
1654  ///
1655  /// # Panics
1656  ///
1657  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
1658  #[inline(always)]
1659  pub fn add_isolate_data<T>(&mut self, data: Local<T>) -> usize
1660  where
1661    for<'l> Local<'l, T>: Into<Local<'l, Data>>,
1662  {
1663    let snapshot_creator = self
1664      .get_annex_mut()
1665      .maybe_snapshot_creator
1666      .as_mut()
1667      .unwrap();
1668    snapshot_creator.add_isolate_data(data)
1669  }
1670
1671  /// Attach arbitrary `v8::Data` to the context snapshot, which can be
1672  /// retrieved via `HandleScope::get_context_data_from_snapshot_once()` after
1673  /// deserialization. This data does not survive when a new snapshot is
1674  /// created from an existing snapshot.
1675  ///
1676  /// # Panics
1677  ///
1678  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
1679  #[inline(always)]
1680  pub fn add_context_data<T>(
1681    &mut self,
1682    context: Local<Context>,
1683    data: Local<T>,
1684  ) -> usize
1685  where
1686    for<'l> Local<'l, T>: Into<Local<'l, Data>>,
1687  {
1688    let snapshot_creator = self
1689      .get_annex_mut()
1690      .maybe_snapshot_creator
1691      .as_mut()
1692      .unwrap();
1693    snapshot_creator.add_context_data(context, data)
1694  }
1695}
1696
1697pub(crate) struct IsolateAnnex {
1698  create_param_allocations: Box<dyn Any>,
1699  slots: HashMap<TypeId, RawSlot, BuildTypeIdHasher>,
1700  finalizer_map: FinalizerMap,
1701  maybe_snapshot_creator: Option<SnapshotCreator>,
1702  // The `isolate` and `isolate_mutex` fields are there so an `IsolateHandle`
1703  // (which may outlive the isolate itself) can determine whether the isolate
1704  // is still alive, and if so, get a reference to it. Safety rules:
1705  // - The 'main thread' must lock the mutex and reset `isolate` to null just
1706  //   before the isolate is disposed.
1707  // - Any other thread must lock the mutex while it's reading/using the
1708  //   `isolate` pointer.
1709  isolate: *mut Isolate,
1710  isolate_mutex: Mutex<()>,
1711}
1712
1713unsafe impl Send for IsolateAnnex {}
1714unsafe impl Sync for IsolateAnnex {}
1715
1716impl IsolateAnnex {
1717  fn new(
1718    isolate: &mut Isolate,
1719    create_param_allocations: Box<dyn Any>,
1720  ) -> Self {
1721    Self {
1722      create_param_allocations,
1723      slots: HashMap::default(),
1724      finalizer_map: FinalizerMap::default(),
1725      maybe_snapshot_creator: None,
1726      isolate,
1727      isolate_mutex: Mutex::new(()),
1728    }
1729  }
1730}
1731
1732impl Debug for IsolateAnnex {
1733  fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1734    f.debug_struct("IsolateAnnex")
1735      .field("isolate", &self.isolate)
1736      .field("isolate_mutex", &self.isolate_mutex)
1737      .finish()
1738  }
1739}
1740
1741/// IsolateHandle is a thread-safe reference to an Isolate. It's main use is to
1742/// terminate execution of a running isolate from another thread.
1743///
1744/// It is created with Isolate::thread_safe_handle().
1745///
1746/// IsolateHandle is Cloneable, Send, and Sync.
1747#[derive(Clone, Debug)]
1748pub struct IsolateHandle(Arc<IsolateAnnex>);
1749
1750impl IsolateHandle {
1751  // This function is marked unsafe because it must be called only with either
1752  // IsolateAnnex::mutex locked, or from the main thread associated with the V8
1753  // isolate.
1754  pub(crate) unsafe fn get_isolate_ptr(&self) -> *mut Isolate {
1755    self.0.isolate
1756  }
1757
1758  #[inline(always)]
1759  fn new(isolate: &Isolate) -> Self {
1760    Self(isolate.get_annex_arc())
1761  }
1762
1763  /// Forcefully terminate the current thread of JavaScript execution
1764  /// in the given isolate.
1765  ///
1766  /// This method can be used by any thread even if that thread has not
1767  /// acquired the V8 lock with a Locker object.
1768  ///
1769  /// Returns false if Isolate was already destroyed.
1770  #[inline(always)]
1771  pub fn terminate_execution(&self) -> bool {
1772    let _lock = self.0.isolate_mutex.lock().unwrap();
1773    if self.0.isolate.is_null() {
1774      false
1775    } else {
1776      unsafe { v8__Isolate__TerminateExecution(self.0.isolate) };
1777      true
1778    }
1779  }
1780
1781  /// Resume execution capability in the given isolate, whose execution
1782  /// was previously forcefully terminated using TerminateExecution().
1783  ///
1784  /// When execution is forcefully terminated using TerminateExecution(),
1785  /// the isolate can not resume execution until all JavaScript frames
1786  /// have propagated the uncatchable exception which is generated.  This
1787  /// method allows the program embedding the engine to handle the
1788  /// termination event and resume execution capability, even if
1789  /// JavaScript frames remain on the stack.
1790  ///
1791  /// This method can be used by any thread even if that thread has not
1792  /// acquired the V8 lock with a Locker object.
1793  ///
1794  /// Returns false if Isolate was already destroyed.
1795  #[inline(always)]
1796  pub fn cancel_terminate_execution(&self) -> bool {
1797    let _lock = self.0.isolate_mutex.lock().unwrap();
1798    if self.0.isolate.is_null() {
1799      false
1800    } else {
1801      unsafe { v8__Isolate__CancelTerminateExecution(self.0.isolate) };
1802      true
1803    }
1804  }
1805
1806  /// Is V8 terminating JavaScript execution.
1807  ///
1808  /// Returns true if JavaScript execution is currently terminating
1809  /// because of a call to TerminateExecution.  In that case there are
1810  /// still JavaScript frames on the stack and the termination
1811  /// exception is still active.
1812  ///
1813  /// Returns false if Isolate was already destroyed.
1814  #[inline(always)]
1815  pub fn is_execution_terminating(&self) -> bool {
1816    let _lock = self.0.isolate_mutex.lock().unwrap();
1817    if self.0.isolate.is_null() {
1818      false
1819    } else {
1820      unsafe { v8__Isolate__IsExecutionTerminating(self.0.isolate) }
1821    }
1822  }
1823
1824  /// Request V8 to interrupt long running JavaScript code and invoke
1825  /// the given |callback| passing the given |data| to it. After |callback|
1826  /// returns control will be returned to the JavaScript code.
1827  /// There may be a number of interrupt requests in flight.
1828  /// Can be called from another thread without acquiring a |Locker|.
1829  /// Registered |callback| must not reenter interrupted Isolate.
1830  ///
1831  /// Returns false if Isolate was already destroyed.
1832  // Clippy warns that this method is dereferencing a raw pointer, but it is
1833  // not: https://github.com/rust-lang/rust-clippy/issues/3045
1834  #[allow(clippy::not_unsafe_ptr_arg_deref)]
1835  #[inline(always)]
1836  pub fn request_interrupt(
1837    &self,
1838    callback: InterruptCallback,
1839    data: *mut c_void,
1840  ) -> bool {
1841    let _lock = self.0.isolate_mutex.lock().unwrap();
1842    if self.0.isolate.is_null() {
1843      false
1844    } else {
1845      unsafe { v8__Isolate__RequestInterrupt(self.0.isolate, callback, data) };
1846      true
1847    }
1848  }
1849}
1850
1851/// Same as Isolate but gets disposed when it goes out of scope.
1852#[derive(Debug)]
1853pub struct OwnedIsolate {
1854  cxx_isolate: NonNull<Isolate>,
1855}
1856
1857impl OwnedIsolate {
1858  pub(crate) fn new(cxx_isolate: *mut Isolate) -> Self {
1859    let mut isolate = Self::new_already_entered(cxx_isolate);
1860    unsafe {
1861      isolate.enter();
1862    }
1863    isolate
1864  }
1865
1866  pub(crate) fn new_already_entered(cxx_isolate: *mut Isolate) -> Self {
1867    let cxx_isolate = NonNull::new(cxx_isolate).unwrap();
1868    let mut owned_isolate = Self { cxx_isolate };
1869    owned_isolate.init_scope_root();
1870    owned_isolate
1871  }
1872}
1873
1874impl Drop for OwnedIsolate {
1875  fn drop(&mut self) {
1876    unsafe {
1877      let snapshot_creator = self.get_annex_mut().maybe_snapshot_creator.take();
1878      assert!(
1879        snapshot_creator.is_none(),
1880        "If isolate was created using v8::Isolate::snapshot_creator, you should use v8::OwnedIsolate::create_blob before dropping an isolate."
1881      );
1882      // Safety: We need to check `this == Isolate::GetCurrent()` before calling exit()
1883      assert!(
1884        self.cxx_isolate.as_mut() as *mut Isolate == v8__Isolate__GetCurrent(),
1885        "v8::OwnedIsolate instances must be dropped in the reverse order of creation. They are entered upon creation and exited upon being dropped."
1886      );
1887      self.dispose_scope_root();
1888      self.exit();
1889      self.dispose_annex();
1890      Platform::notify_isolate_shutdown(&get_current_platform(), self);
1891      self.dispose();
1892    }
1893  }
1894}
1895
1896impl OwnedIsolate {
1897  /// Creates a snapshot data blob.
1898  /// This must not be called from within a handle scope.
1899  ///
1900  /// # Panics
1901  ///
1902  /// Panics if the isolate was not created using [`Isolate::snapshot_creator`]
1903  #[inline(always)]
1904  pub fn create_blob(
1905    mut self,
1906    function_code_handling: FunctionCodeHandling,
1907  ) -> Option<StartupData> {
1908    let mut snapshot_creator =
1909      self.get_annex_mut().maybe_snapshot_creator.take().unwrap();
1910
1911    // create_param_allocations is needed during CreateBlob
1912    // so v8 can read external references
1913    let _create_param_allocations = unsafe {
1914      self.dispose_scope_root();
1915      self.dispose_annex()
1916    };
1917
1918    // The isolate is owned by the snapshot creator; we need to forget it
1919    // here as the snapshot creator will drop it when running the destructor.
1920    std::mem::forget(self);
1921    snapshot_creator.create_blob(function_code_handling)
1922  }
1923}
1924
1925impl Deref for OwnedIsolate {
1926  type Target = Isolate;
1927  fn deref(&self) -> &Self::Target {
1928    unsafe { self.cxx_isolate.as_ref() }
1929  }
1930}
1931
1932impl DerefMut for OwnedIsolate {
1933  fn deref_mut(&mut self) -> &mut Self::Target {
1934    unsafe { self.cxx_isolate.as_mut() }
1935  }
1936}
1937
1938impl AsMut<Isolate> for OwnedIsolate {
1939  fn as_mut(&mut self) -> &mut Isolate {
1940    self
1941  }
1942}
1943
1944impl AsMut<Isolate> for Isolate {
1945  fn as_mut(&mut self) -> &mut Isolate {
1946    self
1947  }
1948}
1949
1950/// Collection of V8 heap information.
1951///
1952/// Instances of this class can be passed to v8::Isolate::GetHeapStatistics to
1953/// get heap statistics from V8.
1954pub struct HeapStatistics(v8__HeapStatistics);
1955
1956impl HeapStatistics {
1957  #[inline(always)]
1958  pub fn total_heap_size(&self) -> usize {
1959    self.0.total_heap_size_
1960  }
1961
1962  #[inline(always)]
1963  pub fn total_heap_size_executable(&self) -> usize {
1964    self.0.total_heap_size_executable_
1965  }
1966
1967  #[inline(always)]
1968  pub fn total_physical_size(&self) -> usize {
1969    self.0.total_physical_size_
1970  }
1971
1972  #[inline(always)]
1973  pub fn total_available_size(&self) -> usize {
1974    self.0.total_available_size_
1975  }
1976
1977  #[inline(always)]
1978  pub fn total_global_handles_size(&self) -> usize {
1979    self.0.total_global_handles_size_
1980  }
1981
1982  #[inline(always)]
1983  pub fn used_global_handles_size(&self) -> usize {
1984    self.0.used_global_handles_size_
1985  }
1986
1987  #[inline(always)]
1988  pub fn used_heap_size(&self) -> usize {
1989    self.0.used_heap_size_
1990  }
1991
1992  #[inline(always)]
1993  pub fn heap_size_limit(&self) -> usize {
1994    self.0.heap_size_limit_
1995  }
1996
1997  #[inline(always)]
1998  pub fn malloced_memory(&self) -> usize {
1999    self.0.malloced_memory_
2000  }
2001
2002  #[inline(always)]
2003  pub fn external_memory(&self) -> usize {
2004    self.0.external_memory_
2005  }
2006
2007  #[inline(always)]
2008  pub fn peak_malloced_memory(&self) -> usize {
2009    self.0.peak_malloced_memory_
2010  }
2011
2012  #[inline(always)]
2013  pub fn number_of_native_contexts(&self) -> usize {
2014    self.0.number_of_native_contexts_
2015  }
2016
2017  #[inline(always)]
2018  pub fn number_of_detached_contexts(&self) -> usize {
2019    self.0.number_of_detached_contexts_
2020  }
2021
2022  /// Returns a 0/1 boolean, which signifies whether the V8 overwrite heap
2023  /// garbage with a bit pattern.
2024  #[inline(always)]
2025  pub fn does_zap_garbage(&self) -> bool {
2026    self.0.does_zap_garbage_
2027  }
2028}
2029
2030pub struct HeapSpaceStatistics(v8__HeapSpaceStatistics);
2031
2032impl HeapSpaceStatistics {
2033  pub fn space_name(&self) -> &'static CStr {
2034    unsafe { CStr::from_ptr(self.0.space_name_) }
2035  }
2036
2037  pub fn space_size(&self) -> usize {
2038    self.0.space_size_
2039  }
2040
2041  pub fn space_used_size(&self) -> usize {
2042    self.0.space_used_size_
2043  }
2044
2045  pub fn space_available_size(&self) -> usize {
2046    self.0.space_available_size_
2047  }
2048
2049  pub fn physical_space_size(&self) -> usize {
2050    self.0.physical_space_size_
2051  }
2052}
2053
2054impl<'s, F> MapFnFrom<F> for PrepareStackTraceCallback<'s>
2055where
2056  F: UnitType
2057    + Fn(
2058      &mut HandleScope<'s>,
2059      Local<'s, Value>,
2060      Local<'s, Array>,
2061    ) -> Local<'s, Value>,
2062{
2063  // Windows x64 ABI: MaybeLocal<Value> returned on the stack.
2064  #[cfg(target_os = "windows")]
2065  fn mapping() -> Self {
2066    let f = |ret_ptr, context, error, sites| {
2067      let mut scope: CallbackScope = unsafe { CallbackScope::new(context) };
2068      let r = (F::get())(&mut scope, error, sites);
2069      unsafe { std::ptr::write(ret_ptr, &*r as *const _) };
2070      ret_ptr
2071    };
2072    f.to_c_fn()
2073  }
2074
2075  // System V ABI
2076  #[cfg(not(target_os = "windows"))]
2077  fn mapping() -> Self {
2078    let f = |context, error, sites| {
2079      let mut scope: CallbackScope = unsafe { CallbackScope::new(context) };
2080      let r = (F::get())(&mut scope, error, sites);
2081      PrepareStackTraceCallbackRet(&*r as *const _)
2082    };
2083    f.to_c_fn()
2084  }
2085}
2086
2087/// A special hasher that is optimized for hashing `std::any::TypeId` values.
2088/// `TypeId` values are actually 64-bit values which themselves come out of some
2089/// hash function, so it's unnecessary to shuffle their bits any further.
2090#[derive(Clone, Default)]
2091pub(crate) struct TypeIdHasher {
2092  state: Option<u64>,
2093}
2094
2095impl Hasher for TypeIdHasher {
2096  fn write(&mut self, _bytes: &[u8]) {
2097    panic!("TypeIdHasher::write() called unexpectedly");
2098  }
2099
2100  #[inline]
2101  fn write_u64(&mut self, value: u64) {
2102    // The internal hash function of TypeId only takes the bottom 64-bits, even on versions
2103    // of Rust that use a 128-bit TypeId.
2104    let prev_state = self.state.replace(value);
2105    debug_assert_eq!(prev_state, None);
2106  }
2107
2108  #[inline]
2109  fn finish(&self) -> u64 {
2110    self.state.unwrap()
2111  }
2112}
2113
2114/// Factory for instances of `TypeIdHasher`. This is the type that one would
2115/// pass to the constructor of some map/set type in order to make it use
2116/// `TypeIdHasher` instead of the default hasher implementation.
2117#[derive(Copy, Clone, Default)]
2118pub(crate) struct BuildTypeIdHasher;
2119
2120impl BuildHasher for BuildTypeIdHasher {
2121  type Hasher = TypeIdHasher;
2122
2123  #[inline]
2124  fn build_hasher(&self) -> Self::Hasher {
2125    Default::default()
2126  }
2127}
2128
2129const _: () = {
2130  assert!(
2131    size_of::<TypeId>() == size_of::<u64>()
2132      || size_of::<TypeId>() == size_of::<u128>()
2133  );
2134  assert!(
2135    align_of::<TypeId>() == align_of::<u64>()
2136      || align_of::<TypeId>() == align_of::<u128>()
2137  );
2138};
2139
2140pub(crate) struct RawSlot {
2141  data: RawSlotData,
2142  dtor: Option<RawSlotDtor>,
2143}
2144
2145type RawSlotData = MaybeUninit<usize>;
2146type RawSlotDtor = unsafe fn(&mut RawSlotData) -> ();
2147
2148impl RawSlot {
2149  #[inline]
2150  pub fn new<T: 'static>(value: T) -> Self {
2151    if Self::needs_box::<T>() {
2152      Self::new_internal(Box::new(value))
2153    } else {
2154      Self::new_internal(value)
2155    }
2156  }
2157
2158  // SAFETY: a valid value of type `T` must haven been stored in the slot
2159  // earlier. There is no verification that the type param provided by the
2160  // caller is correct.
2161  #[inline]
2162  pub unsafe fn borrow<T: 'static>(&self) -> &T {
2163    unsafe {
2164      if Self::needs_box::<T>() {
2165        &*(self.data.as_ptr() as *const Box<T>)
2166      } else {
2167        &*(self.data.as_ptr() as *const T)
2168      }
2169    }
2170  }
2171
2172  // Safety: see [`RawSlot::borrow`].
2173  #[inline]
2174  pub unsafe fn borrow_mut<T: 'static>(&mut self) -> &mut T {
2175    unsafe {
2176      if Self::needs_box::<T>() {
2177        &mut *(self.data.as_mut_ptr() as *mut Box<T>)
2178      } else {
2179        &mut *(self.data.as_mut_ptr() as *mut T)
2180      }
2181    }
2182  }
2183
2184  // Safety: see [`RawSlot::borrow`].
2185  #[inline]
2186  pub unsafe fn into_inner<T: 'static>(self) -> T {
2187    unsafe {
2188      let value = if Self::needs_box::<T>() {
2189        *std::ptr::read(self.data.as_ptr() as *mut Box<T>)
2190      } else {
2191        std::ptr::read(self.data.as_ptr() as *mut T)
2192      };
2193      forget(self);
2194      value
2195    }
2196  }
2197
2198  const fn needs_box<T: 'static>() -> bool {
2199    size_of::<T>() > size_of::<RawSlotData>()
2200      || align_of::<T>() > align_of::<RawSlotData>()
2201  }
2202
2203  #[inline]
2204  fn new_internal<B: 'static>(value: B) -> Self {
2205    assert!(!Self::needs_box::<B>());
2206    let mut self_ = Self {
2207      data: RawSlotData::zeroed(),
2208      dtor: None,
2209    };
2210    unsafe {
2211      ptr::write(self_.data.as_mut_ptr() as *mut B, value);
2212    }
2213    if needs_drop::<B>() {
2214      self_.dtor.replace(Self::drop_internal::<B>);
2215    };
2216    self_
2217  }
2218
2219  // SAFETY: a valid value of type `T` or `Box<T>` must be stored in the slot.
2220  unsafe fn drop_internal<B: 'static>(data: &mut RawSlotData) {
2221    assert!(!Self::needs_box::<B>());
2222    unsafe {
2223      drop_in_place(data.as_mut_ptr() as *mut B);
2224    }
2225  }
2226}
2227
2228impl Drop for RawSlot {
2229  fn drop(&mut self) {
2230    if let Some(dtor) = self.dtor {
2231      unsafe { dtor(&mut self.data) };
2232    }
2233  }
2234}