1use crate::Array;
3use crate::CallbackScope;
4use crate::Context;
5use crate::Data;
6use crate::FixedArray;
7use crate::Function;
8use crate::FunctionCodeHandling;
9use crate::HandleScope;
10use crate::Local;
11use crate::Message;
12use crate::Module;
13use crate::Object;
14use crate::Platform;
15use crate::Promise;
16use crate::PromiseResolver;
17use crate::StartupData;
18use crate::String;
19use crate::V8::get_current_platform;
20use crate::Value;
21use crate::binding::v8__HeapSpaceStatistics;
22use crate::binding::v8__HeapStatistics;
23use crate::binding::v8__Isolate__UseCounterFeature;
24pub use crate::binding::v8__ModuleImportPhase as ModuleImportPhase;
25use crate::cppgc::Heap;
26use crate::external_references::ExternalReference;
27use crate::function::FunctionCallbackInfo;
28use crate::gc::GCCallbackFlags;
29use crate::gc::GCType;
30use crate::handle::FinalizerCallback;
31use crate::handle::FinalizerMap;
32use crate::isolate_create_params::CreateParams;
33use crate::isolate_create_params::raw;
34use crate::promise::PromiseRejectMessage;
35use crate::scope::data::ScopeData;
36use crate::snapshot::SnapshotCreator;
37use crate::support::MapFnFrom;
38use crate::support::MapFnTo;
39use crate::support::Opaque;
40use crate::support::ToCFn;
41use crate::support::UnitType;
42use crate::support::char;
43use crate::support::int;
44use crate::support::size_t;
45use crate::wasm::WasmStreaming;
46use crate::wasm::trampoline;
47use std::ffi::CStr;
48
49use std::any::Any;
50use std::any::TypeId;
51use std::borrow::Cow;
52use std::collections::HashMap;
53use std::ffi::c_void;
54use std::fmt::{self, Debug, Formatter};
55use std::hash::BuildHasher;
56use std::hash::Hasher;
57use std::mem::MaybeUninit;
58use std::mem::align_of;
59use std::mem::forget;
60use std::mem::needs_drop;
61use std::mem::size_of;
62use std::ops::Deref;
63use std::ops::DerefMut;
64use std::ptr;
65use std::ptr::NonNull;
66use std::ptr::addr_of_mut;
67use std::ptr::drop_in_place;
68use std::ptr::null_mut;
69use std::sync::Arc;
70use std::sync::Mutex;
71
72#[derive(Debug, Clone, Copy, PartialEq, Eq)]
78#[repr(C)]
79pub enum MicrotasksPolicy {
80 Explicit = 0,
81 Auto = 2,
83}
84
85#[derive(Debug, Clone, Copy, PartialEq, Eq)]
92#[repr(C)]
93pub enum MemoryPressureLevel {
94 None = 0,
95 Moderate = 1,
96 Critical = 2,
97}
98
99#[derive(Debug, Clone, Copy, PartialEq, Eq)]
111#[repr(C)]
112pub enum TimeZoneDetection {
113 Skip = 0,
114 Redetect = 1,
115}
116
117#[derive(Debug, Clone, Copy, PartialEq, Eq)]
132#[repr(C)]
133pub enum PromiseHookType {
134 Init,
135 Resolve,
136 Before,
137 After,
138}
139
140#[derive(Debug, Clone, Copy, PartialEq, Eq)]
143#[repr(C)]
144pub enum GarbageCollectionType {
145 Full,
146 Minor,
147}
148
149pub type MessageCallback = unsafe extern "C" fn(Local<Message>, Local<Value>);
150
151bitflags! {
152 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
153 #[repr(transparent)]
154 pub struct MessageErrorLevel: int {
155 const LOG = 1 << 0;
156 const DEBUG = 1 << 1;
157 const INFO = 1 << 2;
158 const ERROR = 1 << 3;
159 const WARNING = 1 << 4;
160 const ALL = (1 << 5) - 1;
161 }
162}
163
164pub type PromiseHook =
165 unsafe extern "C" fn(PromiseHookType, Local<Promise>, Local<Value>);
166
167pub type PromiseRejectCallback = unsafe extern "C" fn(PromiseRejectMessage);
168
169#[derive(Debug, Clone, Copy, PartialEq, Eq)]
170#[repr(C)]
171pub enum WasmAsyncSuccess {
172 Success,
173 Fail,
174}
175pub type WasmAsyncResolvePromiseCallback = unsafe extern "C" fn(
176 *mut Isolate,
177 Local<Context>,
178 Local<PromiseResolver>,
179 Local<Value>,
180 WasmAsyncSuccess,
181);
182
183pub type AllowWasmCodeGenerationCallback =
184 unsafe extern "C" fn(Local<Context>, Local<String>) -> bool;
185
186pub type HostInitializeImportMetaObjectCallback =
195 unsafe extern "C" fn(Local<Context>, Local<Module>, Local<Object>);
196
197pub trait HostImportModuleDynamicallyCallback:
237 UnitType
238 + for<'s> FnOnce(
239 &mut HandleScope<'s>,
240 Local<'s, Data>,
241 Local<'s, Value>,
242 Local<'s, String>,
243 Local<'s, FixedArray>,
244 ) -> Option<Local<'s, Promise>>
245{
246 fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback;
247}
248
249#[cfg(target_family = "unix")]
250pub(crate) type RawHostImportModuleDynamicallyCallback =
251 for<'s> unsafe extern "C" fn(
252 Local<'s, Context>,
253 Local<'s, Data>,
254 Local<'s, Value>,
255 Local<'s, String>,
256 Local<'s, FixedArray>,
257 ) -> *mut Promise;
258
259#[cfg(all(target_family = "windows", target_arch = "x86_64"))]
260pub type RawHostImportModuleDynamicallyCallback =
261 for<'s> unsafe extern "C" fn(
262 *mut *mut Promise,
263 Local<'s, Context>,
264 Local<'s, Data>,
265 Local<'s, Value>,
266 Local<'s, String>,
267 Local<'s, FixedArray>,
268 ) -> *mut *mut Promise;
269
270impl<F> HostImportModuleDynamicallyCallback for F
271where
272 F: UnitType
273 + for<'s> FnOnce(
274 &mut HandleScope<'s>,
275 Local<'s, Data>,
276 Local<'s, Value>,
277 Local<'s, String>,
278 Local<'s, FixedArray>,
279 ) -> Option<Local<'s, Promise>>,
280{
281 #[inline(always)]
282 fn to_c_fn(self) -> RawHostImportModuleDynamicallyCallback {
283 #[inline(always)]
284 fn scope_adapter<'s, F: HostImportModuleDynamicallyCallback>(
285 context: Local<'s, Context>,
286 host_defined_options: Local<'s, Data>,
287 resource_name: Local<'s, Value>,
288 specifier: Local<'s, String>,
289 import_attributes: Local<'s, FixedArray>,
290 ) -> Option<Local<'s, Promise>> {
291 let scope = &mut unsafe { CallbackScope::new(context) };
292 (F::get())(
293 scope,
294 host_defined_options,
295 resource_name,
296 specifier,
297 import_attributes,
298 )
299 }
300
301 #[cfg(target_family = "unix")]
302 #[inline(always)]
303 unsafe extern "C" fn abi_adapter<
304 's,
305 F: HostImportModuleDynamicallyCallback,
306 >(
307 context: Local<'s, Context>,
308 host_defined_options: Local<'s, Data>,
309 resource_name: Local<'s, Value>,
310 specifier: Local<'s, String>,
311 import_attributes: Local<'s, FixedArray>,
312 ) -> *mut Promise {
313 scope_adapter::<F>(
314 context,
315 host_defined_options,
316 resource_name,
317 specifier,
318 import_attributes,
319 )
320 .map_or_else(null_mut, |return_value| return_value.as_non_null().as_ptr())
321 }
322
323 #[cfg(all(target_family = "windows", target_arch = "x86_64"))]
324 #[inline(always)]
325 unsafe extern "C" fn abi_adapter<
326 's,
327 F: HostImportModuleDynamicallyCallback,
328 >(
329 return_value: *mut *mut Promise,
330 context: Local<'s, Context>,
331 host_defined_options: Local<'s, Data>,
332 resource_name: Local<'s, Value>,
333 specifier: Local<'s, String>,
334 import_attributes: Local<'s, FixedArray>,
335 ) -> *mut *mut Promise {
336 unsafe {
337 std::ptr::write(
338 return_value,
339 scope_adapter::<F>(
340 context,
341 host_defined_options,
342 resource_name,
343 specifier,
344 import_attributes,
345 )
346 .map(|return_value| return_value.as_non_null().as_ptr())
347 .unwrap_or_else(null_mut),
348 );
349 return_value
350 }
351 }
352
353 abi_adapter::<F>
354 }
355}
356
357pub trait HostImportModuleWithPhaseDynamicallyCallback:
392 UnitType
393 + for<'s> FnOnce(
394 &mut HandleScope<'s>,
395 Local<'s, Data>,
396 Local<'s, Value>,
397 Local<'s, String>,
398 ModuleImportPhase,
399 Local<'s, FixedArray>,
400 ) -> Option<Local<'s, Promise>>
401{
402 fn to_c_fn(self) -> RawHostImportModuleWithPhaseDynamicallyCallback;
403}
404
405#[cfg(target_family = "unix")]
406pub(crate) type RawHostImportModuleWithPhaseDynamicallyCallback =
407 for<'s> unsafe extern "C" fn(
408 Local<'s, Context>,
409 Local<'s, Data>,
410 Local<'s, Value>,
411 Local<'s, String>,
412 ModuleImportPhase,
413 Local<'s, FixedArray>,
414 ) -> *mut Promise;
415
416#[cfg(all(target_family = "windows", target_arch = "x86_64"))]
417pub type RawHostImportModuleWithPhaseDynamicallyCallback =
418 for<'s> unsafe extern "C" fn(
419 *mut *mut Promise,
420 Local<'s, Context>,
421 Local<'s, Data>,
422 Local<'s, Value>,
423 Local<'s, String>,
424 ModuleImportPhase,
425 Local<'s, FixedArray>,
426 ) -> *mut *mut Promise;
427
428impl<F> HostImportModuleWithPhaseDynamicallyCallback for F
429where
430 F: UnitType
431 + for<'s> FnOnce(
432 &mut HandleScope<'s>,
433 Local<'s, Data>,
434 Local<'s, Value>,
435 Local<'s, String>,
436 ModuleImportPhase,
437 Local<'s, FixedArray>,
438 ) -> Option<Local<'s, Promise>>,
439{
440 #[inline(always)]
441 fn to_c_fn(self) -> RawHostImportModuleWithPhaseDynamicallyCallback {
442 #[inline(always)]
443 fn scope_adapter<'s, F: HostImportModuleWithPhaseDynamicallyCallback>(
444 context: Local<'s, Context>,
445 host_defined_options: Local<'s, Data>,
446 resource_name: Local<'s, Value>,
447 specifier: Local<'s, String>,
448 import_phase: ModuleImportPhase,
449 import_attributes: Local<'s, FixedArray>,
450 ) -> Option<Local<'s, Promise>> {
451 let scope = &mut unsafe { CallbackScope::new(context) };
452 (F::get())(
453 scope,
454 host_defined_options,
455 resource_name,
456 specifier,
457 import_phase,
458 import_attributes,
459 )
460 }
461
462 #[cfg(target_family = "unix")]
463 #[inline(always)]
464 unsafe extern "C" fn abi_adapter<
465 's,
466 F: HostImportModuleWithPhaseDynamicallyCallback,
467 >(
468 context: Local<'s, Context>,
469 host_defined_options: Local<'s, Data>,
470 resource_name: Local<'s, Value>,
471 specifier: Local<'s, String>,
472 import_phase: ModuleImportPhase,
473 import_attributes: Local<'s, FixedArray>,
474 ) -> *mut Promise {
475 scope_adapter::<F>(
476 context,
477 host_defined_options,
478 resource_name,
479 specifier,
480 import_phase,
481 import_attributes,
482 )
483 .map_or_else(null_mut, |return_value| return_value.as_non_null().as_ptr())
484 }
485
486 #[cfg(all(target_family = "windows", target_arch = "x86_64"))]
487 #[inline(always)]
488 unsafe extern "C" fn abi_adapter<
489 's,
490 F: HostImportModuleWithPhaseDynamicallyCallback,
491 >(
492 return_value: *mut *mut Promise,
493 context: Local<'s, Context>,
494 host_defined_options: Local<'s, Data>,
495 resource_name: Local<'s, Value>,
496 specifier: Local<'s, String>,
497 import_phase: ModuleImportPhase,
498 import_attributes: Local<'s, FixedArray>,
499 ) -> *mut *mut Promise {
500 unsafe {
501 std::ptr::write(
502 return_value,
503 scope_adapter::<F>(
504 context,
505 host_defined_options,
506 resource_name,
507 specifier,
508 import_phase,
509 import_attributes,
510 )
511 .map(|return_value| return_value.as_non_null().as_ptr())
512 .unwrap_or_else(null_mut),
513 );
514 return_value
515 }
516 }
517
518 abi_adapter::<F>
519 }
520}
521
522pub type HostCreateShadowRealmContextCallback =
533 for<'s> fn(scope: &mut HandleScope<'s>) -> Option<Local<'s, Context>>;
534
535pub type GcCallbackWithData = unsafe extern "C" fn(
536 isolate: *mut Isolate,
537 r#type: GCType,
538 flags: GCCallbackFlags,
539 data: *mut c_void,
540);
541
542pub type InterruptCallback =
543 unsafe extern "C" fn(isolate: &mut Isolate, data: *mut c_void);
544
545pub type NearHeapLimitCallback = unsafe extern "C" fn(
546 data: *mut c_void,
547 current_heap_limit: usize,
548 initial_heap_limit: usize,
549) -> usize;
550
551#[repr(C)]
552pub struct OomDetails {
553 pub is_heap_oom: bool,
554 pub detail: *const char,
555}
556
557pub type OomErrorCallback =
558 unsafe extern "C" fn(location: *const char, details: &OomDetails);
559
560#[cfg(target_os = "windows")]
562pub type PrepareStackTraceCallback<'s> =
563 unsafe extern "C" fn(
564 *mut *const Value,
565 Local<'s, Context>,
566 Local<'s, Value>,
567 Local<'s, Array>,
568 ) -> *mut *const Value;
569
570#[cfg(not(target_os = "windows"))]
573#[repr(C)]
574pub struct PrepareStackTraceCallbackRet(*const Value);
575
576#[cfg(not(target_os = "windows"))]
577pub type PrepareStackTraceCallback<'s> =
578 unsafe extern "C" fn(
579 Local<'s, Context>,
580 Local<'s, Value>,
581 Local<'s, Array>,
582 ) -> PrepareStackTraceCallbackRet;
583
584pub type UseCounterFeature = v8__Isolate__UseCounterFeature;
585pub type UseCounterCallback =
586 unsafe extern "C" fn(&mut Isolate, UseCounterFeature);
587
588unsafe extern "C" {
589 fn v8__Isolate__New(params: *const raw::CreateParams) -> *mut Isolate;
590 fn v8__Isolate__Dispose(this: *mut Isolate);
591 fn v8__Isolate__GetNumberOfDataSlots(this: *const Isolate) -> u32;
592 fn v8__Isolate__GetData(isolate: *const Isolate, slot: u32) -> *mut c_void;
593 fn v8__Isolate__SetData(
594 isolate: *const Isolate,
595 slot: u32,
596 data: *mut c_void,
597 );
598 fn v8__Isolate__Enter(this: *mut Isolate);
599 fn v8__Isolate__Exit(this: *mut Isolate);
600 fn v8__Isolate__GetCurrent() -> *mut Isolate;
601 fn v8__Isolate__MemoryPressureNotification(this: *mut Isolate, level: u8);
602 fn v8__Isolate__ClearKeptObjects(isolate: *mut Isolate);
603 fn v8__Isolate__LowMemoryNotification(isolate: *mut Isolate);
604 fn v8__Isolate__GetHeapStatistics(
605 this: *mut Isolate,
606 s: *mut v8__HeapStatistics,
607 );
608 fn v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
609 this: *mut Isolate,
610 capture: bool,
611 frame_limit: i32,
612 );
613 fn v8__Isolate__AddMessageListener(
614 isolate: *mut Isolate,
615 callback: MessageCallback,
616 ) -> bool;
617 fn v8__Isolate__AddMessageListenerWithErrorLevel(
618 isolate: *mut Isolate,
619 callback: MessageCallback,
620 message_levels: MessageErrorLevel,
621 ) -> bool;
622 fn v8__Isolate__AddGCPrologueCallback(
623 isolate: *mut Isolate,
624 callback: GcCallbackWithData,
625 data: *mut c_void,
626 gc_type_filter: GCType,
627 );
628 fn v8__Isolate__RemoveGCPrologueCallback(
629 isolate: *mut Isolate,
630 callback: GcCallbackWithData,
631 data: *mut c_void,
632 );
633 fn v8__Isolate__AddGCEpilogueCallback(
634 isolate: *mut Isolate,
635 callback: GcCallbackWithData,
636 data: *mut c_void,
637 gc_type_filter: GCType,
638 );
639 fn v8__Isolate__RemoveGCEpilogueCallback(
640 isolate: *mut Isolate,
641 callback: GcCallbackWithData,
642 data: *mut c_void,
643 );
644 fn v8__Isolate__NumberOfHeapSpaces(isolate: *mut Isolate) -> size_t;
645 fn v8__Isolate__GetHeapSpaceStatistics(
646 isolate: *mut Isolate,
647 space_statistics: *mut v8__HeapSpaceStatistics,
648 index: size_t,
649 ) -> bool;
650 fn v8__Isolate__AddNearHeapLimitCallback(
651 isolate: *mut Isolate,
652 callback: NearHeapLimitCallback,
653 data: *mut c_void,
654 );
655 fn v8__Isolate__RemoveNearHeapLimitCallback(
656 isolate: *mut Isolate,
657 callback: NearHeapLimitCallback,
658 heap_limit: usize,
659 );
660 fn v8__Isolate__SetOOMErrorHandler(
661 isolate: *mut Isolate,
662 callback: OomErrorCallback,
663 );
664 fn v8__Isolate__AdjustAmountOfExternalAllocatedMemory(
665 isolate: *mut Isolate,
666 change_in_bytes: i64,
667 ) -> i64;
668 fn v8__Isolate__GetCppHeap(isolate: *mut Isolate) -> *mut Heap;
669 fn v8__Isolate__SetPrepareStackTraceCallback(
670 isolate: *mut Isolate,
671 callback: PrepareStackTraceCallback,
672 );
673 fn v8__Isolate__SetPromiseHook(isolate: *mut Isolate, hook: PromiseHook);
674 fn v8__Isolate__SetPromiseRejectCallback(
675 isolate: *mut Isolate,
676 callback: PromiseRejectCallback,
677 );
678 fn v8__Isolate__SetWasmAsyncResolvePromiseCallback(
679 isolate: *mut Isolate,
680 callback: WasmAsyncResolvePromiseCallback,
681 );
682 fn v8__Isolate__SetAllowWasmCodeGenerationCallback(
683 isolate: *mut Isolate,
684 callback: AllowWasmCodeGenerationCallback,
685 );
686 fn v8__Isolate__SetHostInitializeImportMetaObjectCallback(
687 isolate: *mut Isolate,
688 callback: HostInitializeImportMetaObjectCallback,
689 );
690 fn v8__Isolate__SetHostImportModuleDynamicallyCallback(
691 isolate: *mut Isolate,
692 callback: RawHostImportModuleDynamicallyCallback,
693 );
694 fn v8__Isolate__SetHostImportModuleWithPhaseDynamicallyCallback(
695 isolate: *mut Isolate,
696 callback: RawHostImportModuleWithPhaseDynamicallyCallback,
697 );
698 #[cfg(not(target_os = "windows"))]
699 fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
700 isolate: *mut Isolate,
701 callback: unsafe extern "C" fn(
702 initiator_context: Local<Context>,
703 ) -> *mut Context,
704 );
705 #[cfg(target_os = "windows")]
706 fn v8__Isolate__SetHostCreateShadowRealmContextCallback(
707 isolate: *mut Isolate,
708 callback: unsafe extern "C" fn(
709 rv: *mut *mut Context,
710 initiator_context: Local<Context>,
711 ) -> *mut *mut Context,
712 );
713 fn v8__Isolate__SetUseCounterCallback(
714 isolate: *mut Isolate,
715 callback: UseCounterCallback,
716 );
717 fn v8__Isolate__RequestInterrupt(
718 isolate: *const Isolate,
719 callback: InterruptCallback,
720 data: *mut c_void,
721 );
722 fn v8__Isolate__TerminateExecution(isolate: *const Isolate);
723 fn v8__Isolate__IsExecutionTerminating(isolate: *const Isolate) -> bool;
724 fn v8__Isolate__CancelTerminateExecution(isolate: *const Isolate);
725 fn v8__Isolate__GetMicrotasksPolicy(
726 isolate: *const Isolate,
727 ) -> MicrotasksPolicy;
728 fn v8__Isolate__SetMicrotasksPolicy(
729 isolate: *mut Isolate,
730 policy: MicrotasksPolicy,
731 );
732 fn v8__Isolate__PerformMicrotaskCheckpoint(isolate: *mut Isolate);
733 fn v8__Isolate__EnqueueMicrotask(
734 isolate: *mut Isolate,
735 function: *const Function,
736 );
737 fn v8__Isolate__SetAllowAtomicsWait(isolate: *mut Isolate, allow: bool);
738 fn v8__Isolate__SetWasmStreamingCallback(
739 isolate: *mut Isolate,
740 callback: unsafe extern "C" fn(*const FunctionCallbackInfo),
741 );
742 fn v8__Isolate__DateTimeConfigurationChangeNotification(
743 isolate: *mut Isolate,
744 time_zone_detection: TimeZoneDetection,
745 );
746 fn v8__Isolate__HasPendingBackgroundTasks(isolate: *const Isolate) -> bool;
747 fn v8__Isolate__RequestGarbageCollectionForTesting(
748 isolate: *mut Isolate,
749 r#type: usize,
750 );
751
752 fn v8__HeapProfiler__TakeHeapSnapshot(
753 isolate: *mut Isolate,
754 callback: unsafe extern "C" fn(*mut c_void, *const u8, usize) -> bool,
755 arg: *mut c_void,
756 );
757}
758
759#[repr(C)]
770#[derive(Debug)]
771pub struct Isolate(Opaque);
772
773impl Isolate {
774 const ANNEX_SLOT: u32 = 0;
776 const CURRENT_SCOPE_DATA_SLOT: u32 = 1;
777 const INTERNAL_DATA_SLOT_COUNT: u32 = 2;
778
779 #[inline(always)]
780 fn assert_embedder_data_slot_count_and_offset_correct(&self) {
781 assert!(
782 unsafe { v8__Isolate__GetNumberOfDataSlots(self) }
783 >= Self::INTERNAL_DATA_SLOT_COUNT
784 )
785 }
786
787 fn new_impl(params: CreateParams) -> *mut Isolate {
788 crate::V8::assert_initialized();
789 let (raw_create_params, create_param_allocations) = params.finalize();
790 let cxx_isolate = unsafe { v8__Isolate__New(&raw_create_params) };
791 let isolate = unsafe { &mut *cxx_isolate };
792 isolate.initialize(create_param_allocations);
793 cxx_isolate
794 }
795
796 pub(crate) fn initialize(&mut self, create_param_allocations: Box<dyn Any>) {
797 self.assert_embedder_data_slot_count_and_offset_correct();
798 self.create_annex(create_param_allocations);
799 }
800
801 #[allow(clippy::new_ret_no_self)]
809 pub fn new(params: CreateParams) -> OwnedIsolate {
810 OwnedIsolate::new(Self::new_impl(params))
811 }
812
813 #[allow(clippy::new_ret_no_self)]
814 pub fn snapshot_creator(
815 external_references: Option<Cow<'static, [ExternalReference]>>,
816 params: Option<CreateParams>,
817 ) -> OwnedIsolate {
818 SnapshotCreator::new(external_references, params)
819 }
820
821 #[allow(clippy::new_ret_no_self)]
822 pub fn snapshot_creator_from_existing_snapshot(
823 existing_snapshot_blob: StartupData,
824 external_references: Option<Cow<'static, [ExternalReference]>>,
825 params: Option<CreateParams>,
826 ) -> OwnedIsolate {
827 SnapshotCreator::from_existing_snapshot(
828 existing_snapshot_blob,
829 external_references,
830 params,
831 )
832 }
833
834 #[inline(always)]
836 pub fn create_params() -> CreateParams {
837 CreateParams::default()
838 }
839
840 #[inline(always)]
841 pub fn thread_safe_handle(&self) -> IsolateHandle {
842 IsolateHandle::new(self)
843 }
844
845 #[inline(always)]
847 pub fn terminate_execution(&self) -> bool {
848 self.thread_safe_handle().terminate_execution()
849 }
850
851 #[inline(always)]
853 pub fn cancel_terminate_execution(&self) -> bool {
854 self.thread_safe_handle().cancel_terminate_execution()
855 }
856
857 #[inline(always)]
859 pub fn is_execution_terminating(&self) -> bool {
860 self.thread_safe_handle().is_execution_terminating()
861 }
862
863 pub(crate) fn create_annex(
864 &mut self,
865 create_param_allocations: Box<dyn Any>,
866 ) {
867 let annex_arc = Arc::new(IsolateAnnex::new(self, create_param_allocations));
868 let annex_ptr = Arc::into_raw(annex_arc);
869 assert!(self.get_data_internal(Self::ANNEX_SLOT).is_null());
870 self.set_data_internal(Self::ANNEX_SLOT, annex_ptr as *mut _);
871 }
872
873 unsafe fn dispose_annex(&mut self) -> Box<dyn Any> {
874 let annex = self.get_annex_mut();
878 {
879 let _lock = annex.isolate_mutex.lock().unwrap();
880 annex.isolate = null_mut();
881 }
882
883 let create_param_allocations =
885 std::mem::replace(&mut annex.create_param_allocations, Box::new(()));
886 annex.slots.clear();
887
888 for finalizer in annex.finalizer_map.drain() {
890 if let FinalizerCallback::Guaranteed(callback) = finalizer {
891 callback();
892 }
893 }
894
895 unsafe { Arc::from_raw(annex) };
897 self.set_data(0, null_mut());
898
899 create_param_allocations
900 }
901
902 #[inline(always)]
903 fn get_annex(&self) -> &IsolateAnnex {
904 let annex_ptr =
905 self.get_data_internal(Self::ANNEX_SLOT) as *const IsolateAnnex;
906 assert!(!annex_ptr.is_null());
907 unsafe { &*annex_ptr }
908 }
909
910 #[inline(always)]
911 fn get_annex_mut(&mut self) -> &mut IsolateAnnex {
912 let annex_ptr =
913 self.get_data_internal(Self::ANNEX_SLOT) as *mut IsolateAnnex;
914 assert!(!annex_ptr.is_null());
915 unsafe { &mut *annex_ptr }
916 }
917
918 pub(crate) fn set_snapshot_creator(
919 &mut self,
920 snapshot_creator: SnapshotCreator,
921 ) {
922 let prev = self
923 .get_annex_mut()
924 .maybe_snapshot_creator
925 .replace(snapshot_creator);
926 assert!(prev.is_none());
927 }
928
929 pub(crate) fn get_finalizer_map(&self) -> &FinalizerMap {
930 &self.get_annex().finalizer_map
931 }
932
933 pub(crate) fn get_finalizer_map_mut(&mut self) -> &mut FinalizerMap {
934 &mut self.get_annex_mut().finalizer_map
935 }
936
937 fn get_annex_arc(&self) -> Arc<IsolateAnnex> {
938 let annex_ptr = self.get_annex();
939 let annex_arc = unsafe { Arc::from_raw(annex_ptr) };
940 let _ = Arc::into_raw(annex_arc.clone());
941 annex_arc
942 }
943
944 pub fn get_data(&self, slot: u32) -> *mut c_void {
947 self.get_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot)
948 }
949
950 #[inline(always)]
953 pub fn set_data(&mut self, slot: u32, data: *mut c_void) {
954 self.set_data_internal(Self::INTERNAL_DATA_SLOT_COUNT + slot, data);
955 }
956
957 pub fn get_number_of_data_slots(&self) -> u32 {
960 let n = unsafe { v8__Isolate__GetNumberOfDataSlots(self) };
961 n - Self::INTERNAL_DATA_SLOT_COUNT
962 }
963
964 #[inline(always)]
965 pub(crate) fn get_data_internal(&self, slot: u32) -> *mut c_void {
966 unsafe { v8__Isolate__GetData(self, slot) }
967 }
968
969 #[inline(always)]
970 pub(crate) fn set_data_internal(&mut self, slot: u32, data: *mut c_void) {
971 unsafe { v8__Isolate__SetData(self, slot, data) }
972 }
973
974 pub(crate) fn init_scope_root(&mut self) {
975 ScopeData::new_root(self);
976 }
977
978 pub(crate) fn dispose_scope_root(&mut self) {
979 ScopeData::drop_root(self);
980 }
981
982 #[inline(always)]
984 pub(crate) fn get_current_scope_data(&self) -> Option<NonNull<ScopeData>> {
985 let scope_data_ptr = self.get_data_internal(Self::CURRENT_SCOPE_DATA_SLOT);
986 NonNull::new(scope_data_ptr).map(NonNull::cast)
987 }
988
989 #[inline(always)]
991 pub(crate) fn set_current_scope_data(
992 &mut self,
993 scope_data: Option<NonNull<ScopeData>>,
994 ) {
995 let scope_data_ptr = scope_data
996 .map(NonNull::cast)
997 .map_or_else(null_mut, NonNull::as_ptr);
998 self.set_data_internal(Self::CURRENT_SCOPE_DATA_SLOT, scope_data_ptr);
999 }
1000
1001 #[inline(always)]
1003 pub fn get_slot<T: 'static>(&self) -> Option<&T> {
1004 self
1005 .get_annex()
1006 .slots
1007 .get(&TypeId::of::<T>())
1008 .map(|slot| unsafe { slot.borrow::<T>() })
1009 }
1010
1011 #[inline(always)]
1013 pub fn get_slot_mut<T: 'static>(&mut self) -> Option<&mut T> {
1014 self
1015 .get_annex_mut()
1016 .slots
1017 .get_mut(&TypeId::of::<T>())
1018 .map(|slot| unsafe { slot.borrow_mut::<T>() })
1019 }
1020
1021 #[inline(always)]
1033 pub fn set_slot<T: 'static>(&mut self, value: T) -> bool {
1034 self
1035 .get_annex_mut()
1036 .slots
1037 .insert(TypeId::of::<T>(), RawSlot::new(value))
1038 .is_none()
1039 }
1040
1041 #[inline(always)]
1043 pub fn remove_slot<T: 'static>(&mut self) -> Option<T> {
1044 self
1045 .get_annex_mut()
1046 .slots
1047 .remove(&TypeId::of::<T>())
1048 .map(|slot| unsafe { slot.into_inner::<T>() })
1049 }
1050
1051 #[inline(always)]
1058 pub unsafe fn enter(&mut self) {
1059 unsafe {
1060 v8__Isolate__Enter(self);
1061 }
1062 }
1063
1064 #[inline(always)]
1073 pub unsafe fn exit(&mut self) {
1074 unsafe {
1075 v8__Isolate__Exit(self);
1076 }
1077 }
1078
1079 #[inline(always)]
1084 pub fn memory_pressure_notification(&mut self, level: MemoryPressureLevel) {
1085 unsafe { v8__Isolate__MemoryPressureNotification(self, level as u8) }
1086 }
1087
1088 #[inline(always)]
1100 pub fn clear_kept_objects(&mut self) {
1101 unsafe { v8__Isolate__ClearKeptObjects(self) }
1102 }
1103
1104 #[inline(always)]
1107 pub fn low_memory_notification(&mut self) {
1108 unsafe { v8__Isolate__LowMemoryNotification(self) }
1109 }
1110
1111 #[inline(always)]
1113 pub fn get_heap_statistics(&mut self) -> HeapStatistics {
1114 let inner = unsafe {
1115 let mut s = MaybeUninit::zeroed();
1116 v8__Isolate__GetHeapStatistics(self, s.as_mut_ptr());
1117 s.assume_init()
1118 };
1119 HeapStatistics(inner)
1120 }
1121
1122 #[inline(always)]
1124 pub fn number_of_heap_spaces(&mut self) -> usize {
1125 unsafe { v8__Isolate__NumberOfHeapSpaces(self) }
1126 }
1127
1128 #[inline(always)]
1136 pub fn get_heap_space_statistics(
1137 &mut self,
1138 index: usize,
1139 ) -> Option<HeapSpaceStatistics> {
1140 let inner = unsafe {
1141 let mut s = MaybeUninit::zeroed();
1142 if !v8__Isolate__GetHeapSpaceStatistics(self, s.as_mut_ptr(), index) {
1143 return None;
1144 }
1145 s.assume_init()
1146 };
1147 Some(HeapSpaceStatistics(inner))
1148 }
1149
1150 #[inline(always)]
1153 pub fn set_capture_stack_trace_for_uncaught_exceptions(
1154 &mut self,
1155 capture: bool,
1156 frame_limit: i32,
1157 ) {
1158 unsafe {
1159 v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
1160 self,
1161 capture,
1162 frame_limit,
1163 );
1164 }
1165 }
1166
1167 #[inline(always)]
1174 pub fn add_message_listener(&mut self, callback: MessageCallback) -> bool {
1175 unsafe { v8__Isolate__AddMessageListener(self, callback) }
1176 }
1177
1178 #[inline(always)]
1180 pub fn add_message_listener_with_error_level(
1181 &mut self,
1182 callback: MessageCallback,
1183 message_levels: MessageErrorLevel,
1184 ) -> bool {
1185 unsafe {
1186 v8__Isolate__AddMessageListenerWithErrorLevel(
1187 self,
1188 callback,
1189 message_levels,
1190 )
1191 }
1192 }
1193
1194 #[inline(always)]
1203 pub fn set_prepare_stack_trace_callback<'s>(
1204 &mut self,
1205 callback: impl MapFnTo<PrepareStackTraceCallback<'s>>,
1206 ) {
1207 unsafe {
1211 v8__Isolate__SetPrepareStackTraceCallback(self, callback.map_fn_to());
1212 };
1213 }
1214
1215 #[inline(always)]
1218 pub fn set_promise_hook(&mut self, hook: PromiseHook) {
1219 unsafe { v8__Isolate__SetPromiseHook(self, hook) }
1220 }
1221
1222 #[inline(always)]
1225 pub fn set_promise_reject_callback(
1226 &mut self,
1227 callback: PromiseRejectCallback,
1228 ) {
1229 unsafe { v8__Isolate__SetPromiseRejectCallback(self, callback) }
1230 }
1231
1232 #[inline(always)]
1233 pub fn set_wasm_async_resolve_promise_callback(
1234 &mut self,
1235 callback: WasmAsyncResolvePromiseCallback,
1236 ) {
1237 unsafe { v8__Isolate__SetWasmAsyncResolvePromiseCallback(self, callback) }
1238 }
1239
1240 #[inline(always)]
1241 pub fn set_allow_wasm_code_generation_callback(
1242 &mut self,
1243 callback: AllowWasmCodeGenerationCallback,
1244 ) {
1245 unsafe {
1246 v8__Isolate__SetAllowWasmCodeGenerationCallback(self, callback);
1247 }
1248 }
1249
1250 #[inline(always)]
1251 pub fn set_host_initialize_import_meta_object_callback(
1254 &mut self,
1255 callback: HostInitializeImportMetaObjectCallback,
1256 ) {
1257 unsafe {
1258 v8__Isolate__SetHostInitializeImportMetaObjectCallback(self, callback);
1259 }
1260 }
1261
1262 #[inline(always)]
1265 pub fn set_host_import_module_dynamically_callback(
1266 &mut self,
1267 callback: impl HostImportModuleDynamicallyCallback,
1268 ) {
1269 unsafe {
1270 v8__Isolate__SetHostImportModuleDynamicallyCallback(
1271 self,
1272 callback.to_c_fn(),
1273 );
1274 }
1275 }
1276
1277 #[inline(always)]
1285 pub fn set_host_import_module_with_phase_dynamically_callback(
1286 &mut self,
1287 callback: impl HostImportModuleWithPhaseDynamicallyCallback,
1288 ) {
1289 unsafe {
1290 v8__Isolate__SetHostImportModuleWithPhaseDynamicallyCallback(
1291 self,
1292 callback.to_c_fn(),
1293 );
1294 }
1295 }
1296
1297 pub fn set_host_create_shadow_realm_context_callback(
1300 &mut self,
1301 callback: HostCreateShadowRealmContextCallback,
1302 ) {
1303 #[inline]
1304 unsafe extern "C" fn rust_shadow_realm_callback(
1305 initiator_context: Local<Context>,
1306 ) -> *mut Context {
1307 let mut scope = unsafe { CallbackScope::new(initiator_context) };
1308 let callback = scope
1309 .get_slot::<HostCreateShadowRealmContextCallback>()
1310 .unwrap();
1311 let context = callback(&mut scope);
1312 context.map_or_else(null_mut, |l| l.as_non_null().as_ptr())
1313 }
1314
1315 #[cfg(target_os = "windows")]
1317 unsafe extern "C" fn rust_shadow_realm_callback_windows(
1318 rv: *mut *mut Context,
1319 initiator_context: Local<Context>,
1320 ) -> *mut *mut Context {
1321 unsafe {
1322 let ret = rust_shadow_realm_callback(initiator_context);
1323 rv.write(ret);
1324 }
1325 rv
1326 }
1327
1328 let slot_didnt_exist_before = self.set_slot(callback);
1329 if slot_didnt_exist_before {
1330 unsafe {
1331 #[cfg(target_os = "windows")]
1332 v8__Isolate__SetHostCreateShadowRealmContextCallback(
1333 self,
1334 rust_shadow_realm_callback_windows,
1335 );
1336 #[cfg(not(target_os = "windows"))]
1337 v8__Isolate__SetHostCreateShadowRealmContextCallback(
1338 self,
1339 rust_shadow_realm_callback,
1340 );
1341 }
1342 }
1343 }
1344
1345 #[inline(always)]
1347 pub fn set_use_counter_callback(&mut self, callback: UseCounterCallback) {
1348 unsafe {
1349 v8__Isolate__SetUseCounterCallback(self, callback);
1350 }
1351 }
1352
1353 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1362 pub fn add_gc_prologue_callback(
1363 &mut self,
1364 callback: GcCallbackWithData,
1365 data: *mut c_void,
1366 gc_type_filter: GCType,
1367 ) {
1368 unsafe {
1369 v8__Isolate__AddGCPrologueCallback(self, callback, data, gc_type_filter);
1370 }
1371 }
1372
1373 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1377 pub fn remove_gc_prologue_callback(
1378 &mut self,
1379 callback: GcCallbackWithData,
1380 data: *mut c_void,
1381 ) {
1382 unsafe { v8__Isolate__RemoveGCPrologueCallback(self, callback, data) }
1383 }
1384
1385 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1389 pub fn add_gc_epilogue_callback(
1390 &mut self,
1391 callback: GcCallbackWithData,
1392 data: *mut c_void,
1393 gc_type_filter: GCType,
1394 ) {
1395 unsafe {
1396 v8__Isolate__AddGCEpilogueCallback(self, callback, data, gc_type_filter);
1397 }
1398 }
1399
1400 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1404 pub fn remove_gc_epilogue_callback(
1405 &mut self,
1406 callback: GcCallbackWithData,
1407 data: *mut c_void,
1408 ) {
1409 unsafe { v8__Isolate__RemoveGCEpilogueCallback(self, callback, data) }
1410 }
1411
1412 #[allow(clippy::not_unsafe_ptr_arg_deref)] #[inline(always)]
1417 pub fn add_near_heap_limit_callback(
1418 &mut self,
1419 callback: NearHeapLimitCallback,
1420 data: *mut c_void,
1421 ) {
1422 unsafe { v8__Isolate__AddNearHeapLimitCallback(self, callback, data) };
1423 }
1424
1425 #[inline(always)]
1430 pub fn remove_near_heap_limit_callback(
1431 &mut self,
1432 callback: NearHeapLimitCallback,
1433 heap_limit: usize,
1434 ) {
1435 unsafe {
1436 v8__Isolate__RemoveNearHeapLimitCallback(self, callback, heap_limit);
1437 };
1438 }
1439
1440 #[inline(always)]
1448 pub fn adjust_amount_of_external_allocated_memory(
1449 &mut self,
1450 change_in_bytes: i64,
1451 ) -> i64 {
1452 unsafe {
1453 v8__Isolate__AdjustAmountOfExternalAllocatedMemory(self, change_in_bytes)
1454 }
1455 }
1456
1457 #[inline(always)]
1458 pub fn get_cpp_heap(&mut self) -> Option<&Heap> {
1459 unsafe { v8__Isolate__GetCppHeap(self).as_ref() }
1460 }
1461
1462 #[inline(always)]
1463 pub fn set_oom_error_handler(&mut self, callback: OomErrorCallback) {
1464 unsafe { v8__Isolate__SetOOMErrorHandler(self, callback) };
1465 }
1466
1467 #[inline(always)]
1469 pub fn get_microtasks_policy(&self) -> MicrotasksPolicy {
1470 unsafe { v8__Isolate__GetMicrotasksPolicy(self) }
1471 }
1472
1473 #[inline(always)]
1475 pub fn set_microtasks_policy(&mut self, policy: MicrotasksPolicy) {
1476 unsafe { v8__Isolate__SetMicrotasksPolicy(self, policy) }
1477 }
1478
1479 #[inline(always)]
1484 pub fn perform_microtask_checkpoint(&mut self) {
1485 unsafe { v8__Isolate__PerformMicrotaskCheckpoint(self) }
1486 }
1487
1488 #[inline(always)]
1490 pub fn enqueue_microtask(&mut self, microtask: Local<Function>) {
1491 unsafe { v8__Isolate__EnqueueMicrotask(self, &*microtask) }
1492 }
1493
1494 #[inline(always)]
1498 pub fn set_allow_atomics_wait(&mut self, allow: bool) {
1499 unsafe { v8__Isolate__SetAllowAtomicsWait(self, allow) }
1500 }
1501
1502 #[inline(always)]
1510 pub fn set_wasm_streaming_callback<F>(&mut self, _: F)
1511 where
1512 F: UnitType + Fn(&mut HandleScope, Local<Value>, WasmStreaming),
1513 {
1514 unsafe { v8__Isolate__SetWasmStreamingCallback(self, trampoline::<F>()) }
1515 }
1516
1517 #[inline(always)]
1526 pub fn date_time_configuration_change_notification(
1527 &mut self,
1528 time_zone_detection: TimeZoneDetection,
1529 ) {
1530 unsafe {
1531 v8__Isolate__DateTimeConfigurationChangeNotification(
1532 self,
1533 time_zone_detection,
1534 );
1535 }
1536 }
1537
1538 #[inline(always)]
1542 pub fn has_pending_background_tasks(&self) -> bool {
1543 unsafe { v8__Isolate__HasPendingBackgroundTasks(self) }
1544 }
1545
1546 #[inline(always)]
1556 pub fn request_garbage_collection_for_testing(
1557 &mut self,
1558 r#type: GarbageCollectionType,
1559 ) {
1560 unsafe {
1561 v8__Isolate__RequestGarbageCollectionForTesting(
1562 self,
1563 match r#type {
1564 GarbageCollectionType::Full => 0,
1565 GarbageCollectionType::Minor => 1,
1566 },
1567 );
1568 }
1569 }
1570
1571 unsafe fn dispose(&mut self) {
1574 unsafe {
1577 v8__Isolate__Dispose(self);
1578 }
1579 }
1580
1581 pub fn take_heap_snapshot<F>(&mut self, mut callback: F)
1588 where
1589 F: FnMut(&[u8]) -> bool,
1590 {
1591 unsafe extern "C" fn trampoline<F>(
1592 arg: *mut c_void,
1593 data: *const u8,
1594 size: usize,
1595 ) -> bool
1596 where
1597 F: FnMut(&[u8]) -> bool,
1598 {
1599 unsafe {
1600 let mut callback = NonNull::<F>::new_unchecked(arg as _);
1601 if size > 0 {
1602 (callback.as_mut())(std::slice::from_raw_parts(data, size))
1603 } else {
1604 (callback.as_mut())(&[])
1605 }
1606 }
1607 }
1608
1609 let arg = addr_of_mut!(callback);
1610 unsafe {
1611 v8__HeapProfiler__TakeHeapSnapshot(self, trampoline::<F>, arg as _);
1612 }
1613 }
1614
1615 #[inline(always)]
1623 pub fn set_default_context(&mut self, context: Local<Context>) {
1624 let snapshot_creator = self
1625 .get_annex_mut()
1626 .maybe_snapshot_creator
1627 .as_mut()
1628 .unwrap();
1629 snapshot_creator.set_default_context(context);
1630 }
1631
1632 #[inline(always)]
1641 pub fn add_context(&mut self, context: Local<Context>) -> usize {
1642 let snapshot_creator = self
1643 .get_annex_mut()
1644 .maybe_snapshot_creator
1645 .as_mut()
1646 .unwrap();
1647 snapshot_creator.add_context(context)
1648 }
1649
1650 #[inline(always)]
1659 pub fn add_isolate_data<T>(&mut self, data: Local<T>) -> usize
1660 where
1661 for<'l> Local<'l, T>: Into<Local<'l, Data>>,
1662 {
1663 let snapshot_creator = self
1664 .get_annex_mut()
1665 .maybe_snapshot_creator
1666 .as_mut()
1667 .unwrap();
1668 snapshot_creator.add_isolate_data(data)
1669 }
1670
1671 #[inline(always)]
1680 pub fn add_context_data<T>(
1681 &mut self,
1682 context: Local<Context>,
1683 data: Local<T>,
1684 ) -> usize
1685 where
1686 for<'l> Local<'l, T>: Into<Local<'l, Data>>,
1687 {
1688 let snapshot_creator = self
1689 .get_annex_mut()
1690 .maybe_snapshot_creator
1691 .as_mut()
1692 .unwrap();
1693 snapshot_creator.add_context_data(context, data)
1694 }
1695}
1696
1697pub(crate) struct IsolateAnnex {
1698 create_param_allocations: Box<dyn Any>,
1699 slots: HashMap<TypeId, RawSlot, BuildTypeIdHasher>,
1700 finalizer_map: FinalizerMap,
1701 maybe_snapshot_creator: Option<SnapshotCreator>,
1702 isolate: *mut Isolate,
1710 isolate_mutex: Mutex<()>,
1711}
1712
1713unsafe impl Send for IsolateAnnex {}
1714unsafe impl Sync for IsolateAnnex {}
1715
1716impl IsolateAnnex {
1717 fn new(
1718 isolate: &mut Isolate,
1719 create_param_allocations: Box<dyn Any>,
1720 ) -> Self {
1721 Self {
1722 create_param_allocations,
1723 slots: HashMap::default(),
1724 finalizer_map: FinalizerMap::default(),
1725 maybe_snapshot_creator: None,
1726 isolate,
1727 isolate_mutex: Mutex::new(()),
1728 }
1729 }
1730}
1731
1732impl Debug for IsolateAnnex {
1733 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1734 f.debug_struct("IsolateAnnex")
1735 .field("isolate", &self.isolate)
1736 .field("isolate_mutex", &self.isolate_mutex)
1737 .finish()
1738 }
1739}
1740
1741#[derive(Clone, Debug)]
1748pub struct IsolateHandle(Arc<IsolateAnnex>);
1749
1750impl IsolateHandle {
1751 pub(crate) unsafe fn get_isolate_ptr(&self) -> *mut Isolate {
1755 self.0.isolate
1756 }
1757
1758 #[inline(always)]
1759 fn new(isolate: &Isolate) -> Self {
1760 Self(isolate.get_annex_arc())
1761 }
1762
1763 #[inline(always)]
1771 pub fn terminate_execution(&self) -> bool {
1772 let _lock = self.0.isolate_mutex.lock().unwrap();
1773 if self.0.isolate.is_null() {
1774 false
1775 } else {
1776 unsafe { v8__Isolate__TerminateExecution(self.0.isolate) };
1777 true
1778 }
1779 }
1780
1781 #[inline(always)]
1796 pub fn cancel_terminate_execution(&self) -> bool {
1797 let _lock = self.0.isolate_mutex.lock().unwrap();
1798 if self.0.isolate.is_null() {
1799 false
1800 } else {
1801 unsafe { v8__Isolate__CancelTerminateExecution(self.0.isolate) };
1802 true
1803 }
1804 }
1805
1806 #[inline(always)]
1815 pub fn is_execution_terminating(&self) -> bool {
1816 let _lock = self.0.isolate_mutex.lock().unwrap();
1817 if self.0.isolate.is_null() {
1818 false
1819 } else {
1820 unsafe { v8__Isolate__IsExecutionTerminating(self.0.isolate) }
1821 }
1822 }
1823
1824 #[allow(clippy::not_unsafe_ptr_arg_deref)]
1835 #[inline(always)]
1836 pub fn request_interrupt(
1837 &self,
1838 callback: InterruptCallback,
1839 data: *mut c_void,
1840 ) -> bool {
1841 let _lock = self.0.isolate_mutex.lock().unwrap();
1842 if self.0.isolate.is_null() {
1843 false
1844 } else {
1845 unsafe { v8__Isolate__RequestInterrupt(self.0.isolate, callback, data) };
1846 true
1847 }
1848 }
1849}
1850
1851#[derive(Debug)]
1853pub struct OwnedIsolate {
1854 cxx_isolate: NonNull<Isolate>,
1855}
1856
1857impl OwnedIsolate {
1858 pub(crate) fn new(cxx_isolate: *mut Isolate) -> Self {
1859 let mut isolate = Self::new_already_entered(cxx_isolate);
1860 unsafe {
1861 isolate.enter();
1862 }
1863 isolate
1864 }
1865
1866 pub(crate) fn new_already_entered(cxx_isolate: *mut Isolate) -> Self {
1867 let cxx_isolate = NonNull::new(cxx_isolate).unwrap();
1868 let mut owned_isolate = Self { cxx_isolate };
1869 owned_isolate.init_scope_root();
1870 owned_isolate
1871 }
1872}
1873
1874impl Drop for OwnedIsolate {
1875 fn drop(&mut self) {
1876 unsafe {
1877 let snapshot_creator = self.get_annex_mut().maybe_snapshot_creator.take();
1878 assert!(
1879 snapshot_creator.is_none(),
1880 "If isolate was created using v8::Isolate::snapshot_creator, you should use v8::OwnedIsolate::create_blob before dropping an isolate."
1881 );
1882 assert!(
1884 self.cxx_isolate.as_mut() as *mut Isolate == v8__Isolate__GetCurrent(),
1885 "v8::OwnedIsolate instances must be dropped in the reverse order of creation. They are entered upon creation and exited upon being dropped."
1886 );
1887 self.dispose_scope_root();
1888 self.exit();
1889 self.dispose_annex();
1890 Platform::notify_isolate_shutdown(&get_current_platform(), self);
1891 self.dispose();
1892 }
1893 }
1894}
1895
1896impl OwnedIsolate {
1897 #[inline(always)]
1904 pub fn create_blob(
1905 mut self,
1906 function_code_handling: FunctionCodeHandling,
1907 ) -> Option<StartupData> {
1908 let mut snapshot_creator =
1909 self.get_annex_mut().maybe_snapshot_creator.take().unwrap();
1910
1911 let _create_param_allocations = unsafe {
1914 self.dispose_scope_root();
1915 self.dispose_annex()
1916 };
1917
1918 std::mem::forget(self);
1921 snapshot_creator.create_blob(function_code_handling)
1922 }
1923}
1924
1925impl Deref for OwnedIsolate {
1926 type Target = Isolate;
1927 fn deref(&self) -> &Self::Target {
1928 unsafe { self.cxx_isolate.as_ref() }
1929 }
1930}
1931
1932impl DerefMut for OwnedIsolate {
1933 fn deref_mut(&mut self) -> &mut Self::Target {
1934 unsafe { self.cxx_isolate.as_mut() }
1935 }
1936}
1937
1938impl AsMut<Isolate> for OwnedIsolate {
1939 fn as_mut(&mut self) -> &mut Isolate {
1940 self
1941 }
1942}
1943
1944impl AsMut<Isolate> for Isolate {
1945 fn as_mut(&mut self) -> &mut Isolate {
1946 self
1947 }
1948}
1949
1950pub struct HeapStatistics(v8__HeapStatistics);
1955
1956impl HeapStatistics {
1957 #[inline(always)]
1958 pub fn total_heap_size(&self) -> usize {
1959 self.0.total_heap_size_
1960 }
1961
1962 #[inline(always)]
1963 pub fn total_heap_size_executable(&self) -> usize {
1964 self.0.total_heap_size_executable_
1965 }
1966
1967 #[inline(always)]
1968 pub fn total_physical_size(&self) -> usize {
1969 self.0.total_physical_size_
1970 }
1971
1972 #[inline(always)]
1973 pub fn total_available_size(&self) -> usize {
1974 self.0.total_available_size_
1975 }
1976
1977 #[inline(always)]
1978 pub fn total_global_handles_size(&self) -> usize {
1979 self.0.total_global_handles_size_
1980 }
1981
1982 #[inline(always)]
1983 pub fn used_global_handles_size(&self) -> usize {
1984 self.0.used_global_handles_size_
1985 }
1986
1987 #[inline(always)]
1988 pub fn used_heap_size(&self) -> usize {
1989 self.0.used_heap_size_
1990 }
1991
1992 #[inline(always)]
1993 pub fn heap_size_limit(&self) -> usize {
1994 self.0.heap_size_limit_
1995 }
1996
1997 #[inline(always)]
1998 pub fn malloced_memory(&self) -> usize {
1999 self.0.malloced_memory_
2000 }
2001
2002 #[inline(always)]
2003 pub fn external_memory(&self) -> usize {
2004 self.0.external_memory_
2005 }
2006
2007 #[inline(always)]
2008 pub fn peak_malloced_memory(&self) -> usize {
2009 self.0.peak_malloced_memory_
2010 }
2011
2012 #[inline(always)]
2013 pub fn number_of_native_contexts(&self) -> usize {
2014 self.0.number_of_native_contexts_
2015 }
2016
2017 #[inline(always)]
2018 pub fn number_of_detached_contexts(&self) -> usize {
2019 self.0.number_of_detached_contexts_
2020 }
2021
2022 #[inline(always)]
2025 pub fn does_zap_garbage(&self) -> bool {
2026 self.0.does_zap_garbage_
2027 }
2028}
2029
2030pub struct HeapSpaceStatistics(v8__HeapSpaceStatistics);
2031
2032impl HeapSpaceStatistics {
2033 pub fn space_name(&self) -> &'static CStr {
2034 unsafe { CStr::from_ptr(self.0.space_name_) }
2035 }
2036
2037 pub fn space_size(&self) -> usize {
2038 self.0.space_size_
2039 }
2040
2041 pub fn space_used_size(&self) -> usize {
2042 self.0.space_used_size_
2043 }
2044
2045 pub fn space_available_size(&self) -> usize {
2046 self.0.space_available_size_
2047 }
2048
2049 pub fn physical_space_size(&self) -> usize {
2050 self.0.physical_space_size_
2051 }
2052}
2053
2054impl<'s, F> MapFnFrom<F> for PrepareStackTraceCallback<'s>
2055where
2056 F: UnitType
2057 + Fn(
2058 &mut HandleScope<'s>,
2059 Local<'s, Value>,
2060 Local<'s, Array>,
2061 ) -> Local<'s, Value>,
2062{
2063 #[cfg(target_os = "windows")]
2065 fn mapping() -> Self {
2066 let f = |ret_ptr, context, error, sites| {
2067 let mut scope: CallbackScope = unsafe { CallbackScope::new(context) };
2068 let r = (F::get())(&mut scope, error, sites);
2069 unsafe { std::ptr::write(ret_ptr, &*r as *const _) };
2070 ret_ptr
2071 };
2072 f.to_c_fn()
2073 }
2074
2075 #[cfg(not(target_os = "windows"))]
2077 fn mapping() -> Self {
2078 let f = |context, error, sites| {
2079 let mut scope: CallbackScope = unsafe { CallbackScope::new(context) };
2080 let r = (F::get())(&mut scope, error, sites);
2081 PrepareStackTraceCallbackRet(&*r as *const _)
2082 };
2083 f.to_c_fn()
2084 }
2085}
2086
2087#[derive(Clone, Default)]
2091pub(crate) struct TypeIdHasher {
2092 state: Option<u64>,
2093}
2094
2095impl Hasher for TypeIdHasher {
2096 fn write(&mut self, _bytes: &[u8]) {
2097 panic!("TypeIdHasher::write() called unexpectedly");
2098 }
2099
2100 #[inline]
2101 fn write_u64(&mut self, value: u64) {
2102 let prev_state = self.state.replace(value);
2105 debug_assert_eq!(prev_state, None);
2106 }
2107
2108 #[inline]
2109 fn finish(&self) -> u64 {
2110 self.state.unwrap()
2111 }
2112}
2113
2114#[derive(Copy, Clone, Default)]
2118pub(crate) struct BuildTypeIdHasher;
2119
2120impl BuildHasher for BuildTypeIdHasher {
2121 type Hasher = TypeIdHasher;
2122
2123 #[inline]
2124 fn build_hasher(&self) -> Self::Hasher {
2125 Default::default()
2126 }
2127}
2128
2129const _: () = {
2130 assert!(
2131 size_of::<TypeId>() == size_of::<u64>()
2132 || size_of::<TypeId>() == size_of::<u128>()
2133 );
2134 assert!(
2135 align_of::<TypeId>() == align_of::<u64>()
2136 || align_of::<TypeId>() == align_of::<u128>()
2137 );
2138};
2139
2140pub(crate) struct RawSlot {
2141 data: RawSlotData,
2142 dtor: Option<RawSlotDtor>,
2143}
2144
2145type RawSlotData = MaybeUninit<usize>;
2146type RawSlotDtor = unsafe fn(&mut RawSlotData) -> ();
2147
2148impl RawSlot {
2149 #[inline]
2150 pub fn new<T: 'static>(value: T) -> Self {
2151 if Self::needs_box::<T>() {
2152 Self::new_internal(Box::new(value))
2153 } else {
2154 Self::new_internal(value)
2155 }
2156 }
2157
2158 #[inline]
2162 pub unsafe fn borrow<T: 'static>(&self) -> &T {
2163 unsafe {
2164 if Self::needs_box::<T>() {
2165 &*(self.data.as_ptr() as *const Box<T>)
2166 } else {
2167 &*(self.data.as_ptr() as *const T)
2168 }
2169 }
2170 }
2171
2172 #[inline]
2174 pub unsafe fn borrow_mut<T: 'static>(&mut self) -> &mut T {
2175 unsafe {
2176 if Self::needs_box::<T>() {
2177 &mut *(self.data.as_mut_ptr() as *mut Box<T>)
2178 } else {
2179 &mut *(self.data.as_mut_ptr() as *mut T)
2180 }
2181 }
2182 }
2183
2184 #[inline]
2186 pub unsafe fn into_inner<T: 'static>(self) -> T {
2187 unsafe {
2188 let value = if Self::needs_box::<T>() {
2189 *std::ptr::read(self.data.as_ptr() as *mut Box<T>)
2190 } else {
2191 std::ptr::read(self.data.as_ptr() as *mut T)
2192 };
2193 forget(self);
2194 value
2195 }
2196 }
2197
2198 const fn needs_box<T: 'static>() -> bool {
2199 size_of::<T>() > size_of::<RawSlotData>()
2200 || align_of::<T>() > align_of::<RawSlotData>()
2201 }
2202
2203 #[inline]
2204 fn new_internal<B: 'static>(value: B) -> Self {
2205 assert!(!Self::needs_box::<B>());
2206 let mut self_ = Self {
2207 data: RawSlotData::zeroed(),
2208 dtor: None,
2209 };
2210 unsafe {
2211 ptr::write(self_.data.as_mut_ptr() as *mut B, value);
2212 }
2213 if needs_drop::<B>() {
2214 self_.dtor.replace(Self::drop_internal::<B>);
2215 };
2216 self_
2217 }
2218
2219 unsafe fn drop_internal<B: 'static>(data: &mut RawSlotData) {
2221 assert!(!Self::needs_box::<B>());
2222 unsafe {
2223 drop_in_place(data.as_mut_ptr() as *mut B);
2224 }
2225 }
2226}
2227
2228impl Drop for RawSlot {
2229 fn drop(&mut self) {
2230 if let Some(dtor) = self.dtor {
2231 unsafe { dtor(&mut self.data) };
2232 }
2233 }
2234}