[go: up one dir, main page]

v8/
handle.rs

1use std::borrow::Borrow;
2use std::cell::Cell;
3use std::ffi::c_void;
4use std::hash::Hash;
5use std::hash::Hasher;
6use std::marker::PhantomData;
7use std::mem::forget;
8use std::mem::transmute;
9use std::ops::Deref;
10use std::ptr::NonNull;
11
12use crate::Data;
13use crate::HandleScope;
14use crate::Isolate;
15use crate::IsolateHandle;
16use crate::support::Opaque;
17
18unsafe extern "C" {
19  fn v8__Local__New(isolate: *mut Isolate, other: *const Data) -> *const Data;
20  fn v8__Global__New(isolate: *mut Isolate, data: *const Data) -> *const Data;
21  fn v8__Global__NewWeak(
22    isolate: *mut Isolate,
23    data: *const Data,
24    parameter: *const c_void,
25    callback: unsafe extern "C" fn(*const WeakCallbackInfo),
26  ) -> *const Data;
27  fn v8__Global__Reset(data: *const Data);
28  fn v8__WeakCallbackInfo__GetIsolate(
29    this: *const WeakCallbackInfo,
30  ) -> *mut Isolate;
31  fn v8__WeakCallbackInfo__GetParameter(
32    this: *const WeakCallbackInfo,
33  ) -> *mut c_void;
34  fn v8__WeakCallbackInfo__SetSecondPassCallback(
35    this: *const WeakCallbackInfo,
36    callback: unsafe extern "C" fn(*const WeakCallbackInfo),
37  );
38
39  fn v8__TracedReference__CONSTRUCT(this: *mut TracedReference<Data>);
40  fn v8__TracedReference__DESTRUCT(this: *mut TracedReference<Data>);
41  fn v8__TracedReference__Reset(
42    this: *mut TracedReference<Data>,
43    isolate: *mut Isolate,
44    data: *mut Data,
45  );
46  fn v8__TracedReference__Get(
47    this: *const TracedReference<Data>,
48    isolate: *mut Isolate,
49  ) -> *const Data;
50
51  fn v8__Eternal__CONSTRUCT(this: *mut Eternal<Data>);
52  fn v8__Eternal__DESTRUCT(this: *mut Eternal<Data>);
53  fn v8__Eternal__Clear(this: *mut Eternal<Data>);
54  fn v8__Eternal__Get(
55    this: *const Eternal<Data>,
56    isolate: *mut Isolate,
57  ) -> *const Data;
58  fn v8__Eternal__Set(
59    this: *mut Eternal<Data>,
60    isolate: *mut Isolate,
61    data: *mut Data,
62  );
63  fn v8__Eternal__IsEmpty(this: *const Eternal<Data>) -> bool;
64}
65
66/// An object reference managed by the v8 garbage collector.
67///
68/// All objects returned from v8 have to be tracked by the garbage
69/// collector so that it knows that the objects are still alive.  Also,
70/// because the garbage collector may move objects, it is unsafe to
71/// point directly to an object.  Instead, all objects are stored in
72/// handles which are known by the garbage collector and updated
73/// whenever an object moves.  Handles should always be passed by value
74/// (except in cases like out-parameters) and they should never be
75/// allocated on the heap.
76///
77/// There are two types of handles: local and persistent handles.
78///
79/// Local handles are light-weight and transient and typically used in
80/// local operations.  They are managed by HandleScopes. That means that a
81/// HandleScope must exist on the stack when they are created and that they are
82/// only valid inside of the `HandleScope` active during their creation.
83/// For passing a local handle to an outer `HandleScope`, an
84/// `EscapableHandleScope` and its `Escape()` method must be used.
85///
86/// Persistent handles can be used when storing objects across several
87/// independent operations and have to be explicitly deallocated when they're no
88/// longer used.
89///
90/// It is safe to extract the object stored in the handle by
91/// dereferencing the handle (for instance, to extract the `*Object` from
92/// a `Local<Object>`); the value will still be governed by a handle
93/// behind the scenes and the same rules apply to these values as to
94/// their handles.
95///
96/// Note: Local handles in Rusty V8 differ from the V8 C++ API in that they are
97/// never empty. In situations where empty handles are needed, use
98/// `Option<Local>`.
99#[repr(C)]
100#[derive(Debug)]
101pub struct Local<'s, T>(NonNull<T>, PhantomData<&'s ()>);
102
103impl<'s, T> Local<'s, T> {
104  /// Construct a new Local from an existing Handle.
105  #[inline(always)]
106  pub fn new(
107    scope: &mut HandleScope<'s, ()>,
108    handle: impl Handle<Data = T>,
109  ) -> Self {
110    let HandleInfo { data, host } = handle.get_handle_info();
111    host.assert_match_isolate(scope);
112    unsafe {
113      scope.cast_local(|sd| {
114        v8__Local__New(sd.get_isolate_ptr(), data.cast().as_ptr()) as *const T
115      })
116    }
117    .unwrap()
118  }
119
120  /// Create a local handle by downcasting from one of its super types.
121  /// This function is unsafe because the cast is unchecked.
122  #[inline(always)]
123  pub unsafe fn cast_unchecked<A>(other: Local<'s, A>) -> Self
124  where
125    Local<'s, A>: TryFrom<Self>,
126  {
127    unsafe { transmute(other) }
128  }
129
130  #[inline(always)]
131  pub(crate) unsafe fn from_raw(ptr: *const T) -> Option<Self> {
132    NonNull::new(ptr as *mut _).map(|nn| unsafe { Self::from_non_null(nn) })
133  }
134
135  #[inline(always)]
136  pub(crate) unsafe fn from_raw_unchecked(ptr: *const T) -> Self {
137    Self(
138      unsafe { NonNull::new_unchecked(ptr as *mut _) },
139      PhantomData,
140    )
141  }
142
143  #[inline(always)]
144  pub(crate) unsafe fn from_non_null(nn: NonNull<T>) -> Self {
145    Self(nn, PhantomData)
146  }
147
148  #[inline(always)]
149  pub(crate) fn as_non_null(self) -> NonNull<T> {
150    self.0
151  }
152
153  #[inline(always)]
154  pub(crate) fn slice_into_raw(slice: &[Self]) -> &[*const T] {
155    unsafe { &*(slice as *const [Self] as *const [*const T]) }
156  }
157}
158
159impl<T> Copy for Local<'_, T> {}
160
161impl<T> Clone for Local<'_, T> {
162  fn clone(&self) -> Self {
163    *self
164  }
165}
166
167impl<T> Deref for Local<'_, T> {
168  type Target = T;
169  fn deref(&self) -> &T {
170    unsafe { self.0.as_ref() }
171  }
172}
173
174impl<'s, T> Local<'s, T> {
175  /// Attempts to cast the contained type to another,
176  /// returning an error if the conversion fails.
177  ///
178  /// # Examples
179  ///
180  /// ```ignore
181  /// let value: Local<'_, Value> = get_v8_value();
182  ///
183  /// if let Ok(func) = value.try_cast::<Function> {
184  ///   //
185  /// }
186  /// ```
187  #[inline(always)]
188  pub fn try_cast<A>(
189    self,
190  ) -> Result<Local<'s, A>, <Self as TryInto<Local<'s, A>>>::Error>
191  where
192    Self: TryInto<Local<'s, A>>,
193  {
194    self.try_into()
195  }
196
197  /// Attempts to cast the contained type to another,
198  /// panicking if the conversion fails.
199  ///
200  /// # Example
201  ///
202  /// ```ignore
203  /// let value: Local<'_, Value> = get_v8_value();
204  ///
205  /// let func = value.cast::<Function>();
206  /// ```
207  #[inline(always)]
208  pub fn cast<A>(self) -> Local<'s, A>
209  where
210    Self: TryInto<Local<'s, A>, Error: std::fmt::Debug>,
211  {
212    self.try_into().unwrap()
213  }
214}
215
216/// An object reference that is independent of any handle scope. Where
217/// a Local handle only lives as long as the HandleScope in which it was
218/// allocated, a global handle remains valid until it is dropped.
219///
220/// A global handle contains a reference to a storage cell within
221/// the V8 engine which holds an object value and which is updated by
222/// the garbage collector whenever the object is moved.
223///
224/// You can create a `v8::Local` out of `v8::Global` using
225/// `v8::Local::new(scope, global_handle)`.
226#[derive(Debug)]
227pub struct Global<T> {
228  data: NonNull<T>,
229  isolate_handle: IsolateHandle,
230}
231
232impl<T> Global<T> {
233  /// Construct a new Global from an existing Handle.
234  #[inline(always)]
235  pub fn new(isolate: &mut Isolate, handle: impl Handle<Data = T>) -> Self {
236    let HandleInfo { data, host } = handle.get_handle_info();
237    host.assert_match_isolate(isolate);
238    unsafe { Self::new_raw(isolate, data) }
239  }
240
241  /// Implementation helper function that contains the code that can be shared
242  /// between `Global::new()` and `Global::clone()`.
243  #[inline(always)]
244  unsafe fn new_raw(isolate: *mut Isolate, data: NonNull<T>) -> Self {
245    let data = data.cast().as_ptr();
246    unsafe {
247      let data = v8__Global__New(isolate, data) as *const T;
248      let data = NonNull::new_unchecked(data as *mut _);
249      let isolate_handle = (*isolate).thread_safe_handle();
250      Self {
251        data,
252        isolate_handle,
253      }
254    }
255  }
256
257  /// Consume this `Global` and return the underlying raw pointer.
258  ///
259  /// The returned raw pointer must be converted back into a `Global` by using
260  /// [`Global::from_raw`], otherwise the V8 value referenced by this global
261  /// handle will be pinned on the V8 heap permanently and never get garbage
262  /// collected.
263  #[inline(always)]
264  pub fn into_raw(self) -> NonNull<T> {
265    let data = self.data;
266    forget(self);
267    data
268  }
269
270  /// Converts a raw pointer created with [`Global::into_raw()`] back to its
271  /// original `Global`.
272  #[inline(always)]
273  pub unsafe fn from_raw(isolate: &mut Isolate, data: NonNull<T>) -> Self {
274    let isolate_handle = isolate.thread_safe_handle();
275    Self {
276      data,
277      isolate_handle,
278    }
279  }
280
281  #[inline(always)]
282  pub fn open<'a>(&'a self, scope: &mut Isolate) -> &'a T {
283    Handle::open(self, scope)
284  }
285}
286
287impl<T> Clone for Global<T> {
288  fn clone(&self) -> Self {
289    let HandleInfo { data, host } = self.get_handle_info();
290    unsafe { Self::new_raw(host.get_isolate().as_mut(), data) }
291  }
292}
293
294impl<T> Drop for Global<T> {
295  fn drop(&mut self) {
296    unsafe {
297      if self.isolate_handle.get_isolate_ptr().is_null() {
298        // This `Global` handle is associated with an `Isolate` that has already
299        // been disposed.
300      } else {
301        // Destroy the storage cell that contains the contents of this Global.
302        v8__Global__Reset(self.data.cast().as_ptr());
303      }
304    }
305  }
306}
307
308/// An implementation of [`Handle`] that can be constructed unsafely from a
309/// reference.
310pub(crate) struct UnsafeRefHandle<'a, T> {
311  reference: &'a T,
312  isolate_handle: IsolateHandle,
313}
314impl<'a, T> UnsafeRefHandle<'a, T> {
315  /// Constructs an `UnsafeRefHandle`.
316  ///
317  /// # Safety
318  ///
319  /// `reference` must be derived from a [`Local`] or [`Global`] handle, and its
320  /// lifetime must not outlive that handle. Furthermore, `isolate` must be the
321  /// isolate associated with the handle (for [`Local`], the current isolate;
322  /// for [`Global`], the isolate you would pass to the [`Global::open()`]
323  /// method).
324  #[inline(always)]
325  pub unsafe fn new(reference: &'a T, isolate: &mut Isolate) -> Self {
326    UnsafeRefHandle {
327      reference,
328      isolate_handle: isolate.thread_safe_handle(),
329    }
330  }
331}
332
333pub trait Handle: Sized {
334  type Data;
335
336  #[doc(hidden)]
337  fn get_handle_info(&self) -> HandleInfo<Self::Data>;
338
339  /// Returns a reference to the V8 heap object that this handle represents.
340  /// The handle does not get cloned, nor is it converted to a `Local` handle.
341  ///
342  /// # Panics
343  ///
344  /// This function panics in the following situations:
345  /// - The handle is not hosted by the specified Isolate.
346  /// - The Isolate that hosts this handle has been disposed.
347  fn open<'a>(&'a self, isolate: &mut Isolate) -> &'a Self::Data {
348    let HandleInfo { data, host } = self.get_handle_info();
349    host.assert_match_isolate(isolate);
350    unsafe { &*data.as_ptr() }
351  }
352
353  /// Reads the inner value contained in this handle, _without_ verifying that
354  /// the this handle is hosted by the currently active `Isolate`.
355  ///
356  /// # Safety
357  ///
358  /// Using a V8 heap object with another `Isolate` than the `Isolate` that
359  /// hosts it is not permitted under any circumstance. Doing so leads to
360  /// undefined behavior, likely a crash.
361  ///
362  /// # Panics
363  ///
364  /// This function panics if the `Isolate` that hosts the handle has been
365  /// disposed.
366  unsafe fn get_unchecked(&self) -> &Self::Data {
367    let HandleInfo { data, host } = self.get_handle_info();
368    if let HandleHost::DisposedIsolate = host {
369      panic!("attempt to access Handle hosted by disposed Isolate");
370    }
371    unsafe { &*data.as_ptr() }
372  }
373}
374
375impl<T> Handle for Local<'_, T> {
376  type Data = T;
377  fn get_handle_info(&self) -> HandleInfo<T> {
378    HandleInfo::new(self.as_non_null(), HandleHost::Scope)
379  }
380}
381
382impl<'a, 's: 'a, T> Handle for &'a Local<'s, T> {
383  type Data = T;
384  fn get_handle_info(&self) -> HandleInfo<T> {
385    HandleInfo::new(self.as_non_null(), HandleHost::Scope)
386  }
387}
388
389impl<T> Handle for Global<T> {
390  type Data = T;
391  fn get_handle_info(&self) -> HandleInfo<T> {
392    HandleInfo::new(self.data, (&self.isolate_handle).into())
393  }
394}
395
396impl<T> Handle for &Global<T> {
397  type Data = T;
398  fn get_handle_info(&self) -> HandleInfo<T> {
399    HandleInfo::new(self.data, (&self.isolate_handle).into())
400  }
401}
402
403impl<T> Handle for UnsafeRefHandle<'_, T> {
404  type Data = T;
405  fn get_handle_info(&self) -> HandleInfo<T> {
406    HandleInfo::new(
407      NonNull::from(self.reference),
408      (&self.isolate_handle).into(),
409    )
410  }
411}
412
413impl<T> Handle for &UnsafeRefHandle<'_, T> {
414  type Data = T;
415  fn get_handle_info(&self) -> HandleInfo<T> {
416    HandleInfo::new(
417      NonNull::from(self.reference),
418      (&self.isolate_handle).into(),
419    )
420  }
421}
422
423impl<T> Borrow<T> for Local<'_, T> {
424  fn borrow(&self) -> &T {
425    self
426  }
427}
428
429impl<T> Borrow<T> for Global<T> {
430  fn borrow(&self) -> &T {
431    let HandleInfo { data, host } = self.get_handle_info();
432    if let HandleHost::DisposedIsolate = host {
433      panic!("attempt to access Handle hosted by disposed Isolate");
434    }
435    unsafe { &*data.as_ptr() }
436  }
437}
438
439impl<T> Eq for Local<'_, T> where T: Eq {}
440impl<T> Eq for Global<T> where T: Eq {}
441
442impl<T: Hash> Hash for Local<'_, T> {
443  fn hash<H: Hasher>(&self, state: &mut H) {
444    (**self).hash(state);
445  }
446}
447
448impl<T: Hash> Hash for Global<T> {
449  fn hash<H: Hasher>(&self, state: &mut H) {
450    unsafe {
451      if self.isolate_handle.get_isolate_ptr().is_null() {
452        panic!("can't hash Global after its host Isolate has been disposed");
453      }
454      self.data.as_ref().hash(state);
455    }
456  }
457}
458
459impl<T, Rhs: Handle> PartialEq<Rhs> for Local<'_, T>
460where
461  T: PartialEq<Rhs::Data>,
462{
463  fn eq(&self, other: &Rhs) -> bool {
464    let i1 = self.get_handle_info();
465    let i2 = other.get_handle_info();
466    i1.host.match_host(i2.host, None)
467      && unsafe { i1.data.as_ref() == i2.data.as_ref() }
468  }
469}
470
471impl<T, Rhs: Handle> PartialEq<Rhs> for Global<T>
472where
473  T: PartialEq<Rhs::Data>,
474{
475  fn eq(&self, other: &Rhs) -> bool {
476    let i1 = self.get_handle_info();
477    let i2 = other.get_handle_info();
478    i1.host.match_host(i2.host, None)
479      && unsafe { i1.data.as_ref() == i2.data.as_ref() }
480  }
481}
482
483#[derive(Copy, Debug, Clone)]
484pub struct HandleInfo<T> {
485  data: NonNull<T>,
486  host: HandleHost,
487}
488
489impl<T> HandleInfo<T> {
490  fn new(data: NonNull<T>, host: HandleHost) -> Self {
491    Self { data, host }
492  }
493}
494
495#[derive(Copy, Debug, Clone)]
496enum HandleHost {
497  // Note: the `HandleHost::Scope` variant does not indicate that the handle
498  // it applies to is not associated with an `Isolate`. It only means that
499  // the handle is a `Local` handle that was unable to provide a pointer to
500  // the `Isolate` that hosts it (the handle) and the currently entered
501  // scope.
502  Scope,
503  Isolate(NonNull<Isolate>),
504  DisposedIsolate,
505}
506
507impl From<&'_ mut Isolate> for HandleHost {
508  fn from(isolate: &'_ mut Isolate) -> Self {
509    Self::Isolate(NonNull::from(isolate))
510  }
511}
512
513impl From<&'_ IsolateHandle> for HandleHost {
514  fn from(isolate_handle: &IsolateHandle) -> Self {
515    NonNull::new(unsafe { isolate_handle.get_isolate_ptr() })
516      .map_or(Self::DisposedIsolate, Self::Isolate)
517  }
518}
519
520impl HandleHost {
521  /// Compares two `HandleHost` values, returning `true` if they refer to the
522  /// same `Isolate`, or `false` if they refer to different isolates.
523  ///
524  /// If the caller knows which `Isolate` the currently entered scope (if any)
525  /// belongs to, it should pass on this information via the second argument
526  /// (`scope_isolate_opt`).
527  ///
528  /// # Panics
529  ///
530  /// This function panics if one of the `HandleHost` values refers to an
531  /// `Isolate` that has been disposed.
532  ///
533  /// # Safety / Bugs
534  ///
535  /// The current implementation is a bit too forgiving. If it cannot decide
536  /// whether two hosts refer to the same `Isolate`, it just returns `true`.
537  /// Note that this can only happen when the caller does _not_ provide a value
538  /// for the `scope_isolate_opt` argument.
539  fn match_host(
540    self,
541    other: Self,
542    scope_isolate_opt: Option<&mut Isolate>,
543  ) -> bool {
544    let scope_isolate_opt_nn = scope_isolate_opt.map(NonNull::from);
545    match (self, other, scope_isolate_opt_nn) {
546      (Self::Scope, Self::Scope, _) => true,
547      (Self::Isolate(ile1), Self::Isolate(ile2), _) => ile1 == ile2,
548      (Self::Scope, Self::Isolate(ile1), Some(ile2)) => ile1 == ile2,
549      (Self::Isolate(ile1), Self::Scope, Some(ile2)) => ile1 == ile2,
550      // TODO(pisciaureus): If the caller didn't provide a `scope_isolate_opt`
551      // value that works, we can't do a meaningful check. So all we do for now
552      // is pretend the Isolates match and hope for the best. This eventually
553      // needs to be tightened up.
554      (Self::Scope, Self::Isolate(_), _) => true,
555      (Self::Isolate(_), Self::Scope, _) => true,
556      // Handles hosted in an Isolate that has been disposed aren't good for
557      // anything, even if a pair of handles used to to be hosted in the same
558      // now-disposed solate.
559      (Self::DisposedIsolate, ..) | (_, Self::DisposedIsolate, _) => {
560        panic!("attempt to access Handle hosted by disposed Isolate")
561      }
562    }
563  }
564
565  fn assert_match_host(self, other: Self, scope_opt: Option<&mut Isolate>) {
566    assert!(
567      self.match_host(other, scope_opt),
568      "attempt to use Handle in an Isolate that is not its host"
569    );
570  }
571
572  #[allow(dead_code)]
573  fn match_isolate(self, isolate: &mut Isolate) -> bool {
574    self.match_host(isolate.into(), Some(isolate))
575  }
576
577  fn assert_match_isolate(self, isolate: &mut Isolate) {
578    self.assert_match_host(isolate.into(), Some(isolate));
579  }
580
581  fn get_isolate(self) -> NonNull<Isolate> {
582    match self {
583      Self::Scope => panic!("host Isolate for Handle not available"),
584      Self::Isolate(ile) => ile,
585      Self::DisposedIsolate => panic!("attempt to access disposed Isolate"),
586    }
587  }
588
589  #[allow(dead_code)]
590  fn get_isolate_handle(self) -> IsolateHandle {
591    unsafe { self.get_isolate().as_ref() }.thread_safe_handle()
592  }
593}
594
595/// An object reference that does not prevent garbage collection for the object,
596/// and which allows installing finalization callbacks which will be called
597/// after the object has been GC'd.
598///
599/// Note that finalization callbacks are tied to the lifetime of a `Weak<T>`,
600/// and will not be called after the `Weak<T>` is dropped.
601///
602/// # `Clone`
603///
604/// Since finalization callbacks are specific to a `Weak<T>` instance, cloning
605/// will create a new object reference without a finalizer, as if created by
606/// [`Self::new`]. You can use [`Self::clone_with_finalizer`] to attach a
607/// finalization callback to the clone.
608#[derive(Debug)]
609pub struct Weak<T> {
610  data: Option<Box<WeakData<T>>>,
611  isolate_handle: IsolateHandle,
612}
613
614impl<T> Weak<T> {
615  pub fn new(isolate: &mut Isolate, handle: impl Handle<Data = T>) -> Self {
616    let HandleInfo { data, host } = handle.get_handle_info();
617    host.assert_match_isolate(isolate);
618    Self::new_raw(isolate, data, None)
619  }
620
621  /// Create a weak handle with a finalization callback installed.
622  ///
623  /// There is no guarantee as to *when* or even *if* the finalization callback
624  /// will be invoked. The invocation is performed solely on a best effort
625  /// basis. GC-based finalization should *not* be relied upon for any critical
626  /// form of resource management! Consider using
627  /// [`Self::with_guaranteed_finalizer`] instead.
628  ///
629  /// The callback does not have access to the inner value, because it has
630  /// already been collected by the time it runs.
631  pub fn with_finalizer(
632    isolate: &mut Isolate,
633    handle: impl Handle<Data = T>,
634    finalizer: Box<dyn FnOnce(&mut Isolate)>,
635  ) -> Self {
636    let HandleInfo { data, host } = handle.get_handle_info();
637    host.assert_match_isolate(isolate);
638    let finalizer_id = isolate
639      .get_finalizer_map_mut()
640      .add(FinalizerCallback::Regular(finalizer));
641    Self::new_raw(isolate, data, Some(finalizer_id))
642  }
643
644  /// Create a weak handle with a finalization callback installed, which is
645  /// guaranteed to run at some point.
646  ///
647  /// Unlike [`Self::with_finalizer`], whose finalization callbacks are not
648  /// guaranteed to run, this method is guaranteed to be called before the
649  /// isolate is destroyed. It can therefore be used for critical resource
650  /// management. Note that other than that, there is still no guarantee as to
651  /// *when* the callback will be called.
652  ///
653  /// Unlike regular finalizers, guaranteed finalizers aren't passed a mutable
654  /// [`Isolate`] reference, since they might be called when the isolate is
655  /// being destroyed, at which point it might be no longer valid to use.
656  /// Accessing the isolate (with unsafe code) from the finalizer callback is
657  /// therefore unsound, unless you prove the isolate is not being destroyed.
658  pub fn with_guaranteed_finalizer(
659    isolate: &mut Isolate,
660    handle: impl Handle<Data = T>,
661    finalizer: Box<dyn FnOnce()>,
662  ) -> Self {
663    let HandleInfo { data, host } = handle.get_handle_info();
664    host.assert_match_isolate(isolate);
665    let finalizer_id = isolate
666      .get_finalizer_map_mut()
667      .add(FinalizerCallback::Guaranteed(finalizer));
668    Self::new_raw(isolate, data, Some(finalizer_id))
669  }
670
671  fn new_raw(
672    isolate: *mut Isolate,
673    data: NonNull<T>,
674    finalizer_id: Option<FinalizerId>,
675  ) -> Self {
676    let weak_data = Box::new(WeakData {
677      pointer: Default::default(),
678      finalizer_id,
679      weak_dropped: Cell::new(false),
680    });
681    let data = data.cast().as_ptr();
682    let data = unsafe {
683      v8__Global__NewWeak(
684        isolate,
685        data,
686        weak_data.deref() as *const _ as *const c_void,
687        Self::first_pass_callback,
688      )
689    };
690    weak_data
691      .pointer
692      .set(Some(unsafe { NonNull::new_unchecked(data as *mut _) }));
693    Self {
694      data: Some(weak_data),
695      isolate_handle: unsafe { (*isolate).thread_safe_handle() },
696    }
697  }
698
699  /// Creates a new empty handle, identical to one for an object that has
700  /// already been GC'd.
701  pub fn empty(isolate: &mut Isolate) -> Self {
702    Weak {
703      data: None,
704      isolate_handle: isolate.thread_safe_handle(),
705    }
706  }
707
708  /// Clones this handle and installs a finalizer callback on the clone, as if
709  /// by calling [`Self::with_finalizer`].
710  ///
711  /// Note that if this handle is empty (its value has already been GC'd), the
712  /// finalization callback will never run.
713  pub fn clone_with_finalizer(
714    &self,
715    finalizer: Box<dyn FnOnce(&mut Isolate)>,
716  ) -> Self {
717    self.clone_raw(Some(FinalizerCallback::Regular(finalizer)))
718  }
719
720  /// Clones this handle and installs a guaranteed finalizer callback on the
721  /// clone, as if by calling [`Self::with_guaranteed_finalizer`].
722  ///
723  /// Note that if this handle is empty (its value has already been GC'd), the
724  /// finalization callback will never run.
725  pub fn clone_with_guaranteed_finalizer(
726    &self,
727    finalizer: Box<dyn FnOnce()>,
728  ) -> Self {
729    self.clone_raw(Some(FinalizerCallback::Guaranteed(finalizer)))
730  }
731
732  fn clone_raw(&self, finalizer: Option<FinalizerCallback>) -> Self {
733    if let Some(data) = self.get_pointer() {
734      // SAFETY: We're in the isolate's thread, because Weak<T> isn't Send or
735      // Sync.
736      let isolate_ptr = unsafe { self.isolate_handle.get_isolate_ptr() };
737      if isolate_ptr.is_null() {
738        unreachable!("Isolate was dropped but weak handle wasn't reset.");
739      }
740
741      let finalizer_id = if let Some(finalizer) = finalizer {
742        let isolate = unsafe { &mut *isolate_ptr };
743        Some(isolate.get_finalizer_map_mut().add(finalizer))
744      } else {
745        None
746      };
747      Self::new_raw(isolate_ptr, data, finalizer_id)
748    } else {
749      Weak {
750        data: None,
751        isolate_handle: self.isolate_handle.clone(),
752      }
753    }
754  }
755
756  /// Converts an optional raw pointer created with [`Weak::into_raw()`] back to
757  /// its original `Weak`.
758  ///
759  /// This method is called with `Some`, the pointer is invalidated and it
760  /// cannot be used with this method again. Additionally, it is unsound to call
761  /// this method with an isolate other than that in which the original `Weak`
762  /// was created.
763  pub unsafe fn from_raw(
764    isolate: &mut Isolate,
765    data: Option<NonNull<WeakData<T>>>,
766  ) -> Self {
767    Weak {
768      data: data.map(|raw| unsafe { Box::from_raw(raw.cast().as_ptr()) }),
769      isolate_handle: isolate.thread_safe_handle(),
770    }
771  }
772
773  /// Consume this `Weak` handle and return the underlying raw pointer, or
774  /// `None` if the value has been GC'd.
775  ///
776  /// The return value can be converted back into a `Weak` by using
777  /// [`Weak::from_raw`]. Note that `Weak` allocates some memory, and if this
778  /// method returns `Some`, the pointer must be converted back into a `Weak`
779  /// for it to be freed.
780  ///
781  /// Note that this method might return `Some` even after the V8 value has been
782  /// GC'd.
783  pub fn into_raw(mut self) -> Option<NonNull<WeakData<T>>> {
784    if let Some(data) = self.data.take() {
785      let has_finalizer = if let Some(finalizer_id) = data.finalizer_id {
786        // SAFETY: We're in the isolate's thread because Weak isn't Send or Sync
787        let isolate_ptr = unsafe { self.isolate_handle.get_isolate_ptr() };
788        if isolate_ptr.is_null() {
789          // Disposed isolates have no finalizers.
790          false
791        } else {
792          let isolate = unsafe { &mut *isolate_ptr };
793          isolate.get_finalizer_map().map.contains_key(&finalizer_id)
794        }
795      } else {
796        false
797      };
798
799      if data.pointer.get().is_none() && !has_finalizer {
800        // If the pointer is None and we're not waiting for the second pass,
801        // drop the box and return None.
802        None
803      } else {
804        assert!(!data.weak_dropped.get());
805        Some(unsafe { NonNull::new_unchecked(Box::into_raw(data)) })
806      }
807    } else {
808      None
809    }
810  }
811
812  fn get_pointer(&self) -> Option<NonNull<T>> {
813    if let Some(data) = &self.data {
814      // It seems like when the isolate is dropped, even the first pass callback
815      // might not be called.
816      if unsafe { self.isolate_handle.get_isolate_ptr() }.is_null() {
817        None
818      } else {
819        data.pointer.get()
820      }
821    } else {
822      None
823    }
824  }
825
826  pub fn is_empty(&self) -> bool {
827    self.get_pointer().is_none()
828  }
829
830  pub fn to_global(&self, isolate: &mut Isolate) -> Option<Global<T>> {
831    if let Some(data) = self.get_pointer() {
832      let handle_host: HandleHost = (&self.isolate_handle).into();
833      handle_host.assert_match_isolate(isolate);
834      Some(unsafe { Global::new_raw(isolate, data) })
835    } else {
836      None
837    }
838  }
839
840  pub fn to_local<'s>(
841    &self,
842    scope: &mut HandleScope<'s, ()>,
843  ) -> Option<Local<'s, T>> {
844    if let Some(data) = self.get_pointer() {
845      let handle_host: HandleHost = (&self.isolate_handle).into();
846      handle_host.assert_match_isolate(scope);
847      let local = unsafe {
848        scope.cast_local(|sd| {
849          v8__Local__New(sd.get_isolate_ptr(), data.cast().as_ptr()) as *const T
850        })
851      };
852      Some(local.unwrap())
853    } else {
854      None
855    }
856  }
857
858  // Finalization callbacks.
859
860  unsafe extern "C" fn first_pass_callback(wci: *const WeakCallbackInfo) {
861    // SAFETY: If this callback is called, then the weak handle hasn't been
862    // reset, which means the `Weak` instance which owns the pinned box that the
863    // parameter points to hasn't been dropped.
864    let weak_data = unsafe {
865      let ptr = v8__WeakCallbackInfo__GetParameter(wci);
866      &*(ptr as *mut WeakData<T>)
867    };
868
869    let data = weak_data.pointer.take().unwrap();
870    unsafe {
871      v8__Global__Reset(data.cast().as_ptr());
872    }
873
874    // Only set the second pass callback if there could be a finalizer.
875    if weak_data.finalizer_id.is_some() {
876      unsafe {
877        v8__WeakCallbackInfo__SetSecondPassCallback(
878          wci,
879          Self::second_pass_callback,
880        );
881      };
882    }
883  }
884
885  unsafe extern "C" fn second_pass_callback(wci: *const WeakCallbackInfo) {
886    // SAFETY: This callback is guaranteed by V8 to be called in the isolate's
887    // thread before the isolate is disposed.
888    let isolate = unsafe { &mut *v8__WeakCallbackInfo__GetIsolate(wci) };
889
890    // SAFETY: This callback might be called well after the first pass callback,
891    // which means the corresponding Weak might have been dropped. In Weak's
892    // Drop impl we make sure that if the second pass callback hasn't yet run, the
893    // Box<WeakData<T>> is leaked, so it will still be alive by the time this
894    // callback is called.
895    let weak_data = unsafe {
896      let ptr = v8__WeakCallbackInfo__GetParameter(wci);
897      &*(ptr as *mut WeakData<T>)
898    };
899    let finalizer: Option<FinalizerCallback> = {
900      let finalizer_id = weak_data.finalizer_id.unwrap();
901      isolate.get_finalizer_map_mut().map.remove(&finalizer_id)
902    };
903
904    if weak_data.weak_dropped.get() {
905      // SAFETY: If weak_dropped is true, the corresponding Weak has been dropped,
906      // so it's safe to take ownership of the Box<WeakData<T>> and drop it.
907      let _ = unsafe {
908        Box::from_raw(weak_data as *const WeakData<T> as *mut WeakData<T>)
909      };
910    }
911
912    match finalizer {
913      Some(FinalizerCallback::Regular(finalizer)) => finalizer(isolate),
914      Some(FinalizerCallback::Guaranteed(finalizer)) => finalizer(),
915      None => {}
916    }
917  }
918}
919
920impl<T> Clone for Weak<T> {
921  fn clone(&self) -> Self {
922    self.clone_raw(None)
923  }
924}
925
926impl<T> Drop for Weak<T> {
927  fn drop(&mut self) {
928    // Returns whether the finalizer existed.
929    let remove_finalizer = |finalizer_id: Option<FinalizerId>| -> bool {
930      if let Some(finalizer_id) = finalizer_id {
931        // SAFETY: We're in the isolate's thread because `Weak` isn't Send or Sync.
932        let isolate_ptr = unsafe { self.isolate_handle.get_isolate_ptr() };
933        if !isolate_ptr.is_null() {
934          let isolate = unsafe { &mut *isolate_ptr };
935          let finalizer =
936            isolate.get_finalizer_map_mut().map.remove(&finalizer_id);
937          return finalizer.is_some();
938        }
939      }
940      false
941    };
942
943    if let Some(data) = self.get_pointer() {
944      // If the pointer is not None, the first pass callback hasn't been
945      // called yet, and resetting will prevent it from being called.
946      unsafe { v8__Global__Reset(data.cast().as_ptr()) };
947      remove_finalizer(self.data.as_ref().unwrap().finalizer_id);
948    } else if let Some(weak_data) = self.data.take() {
949      // The second pass callback removes the finalizer, so if there is one,
950      // the second pass hasn't yet run, and WeakData will have to be alive.
951      // In that case we leak the WeakData but remove the finalizer.
952      if remove_finalizer(weak_data.finalizer_id) {
953        weak_data.weak_dropped.set(true);
954        Box::leak(weak_data);
955      }
956    }
957  }
958}
959
960impl<T> Eq for Weak<T> where T: Eq {}
961
962impl<T, Rhs: Handle> PartialEq<Rhs> for Weak<T>
963where
964  T: PartialEq<Rhs::Data>,
965{
966  fn eq(&self, other: &Rhs) -> bool {
967    let HandleInfo {
968      data: other_data,
969      host: other_host,
970    } = other.get_handle_info();
971    let self_host: HandleHost = (&self.isolate_handle).into();
972    if !self_host.match_host(other_host, None) {
973      false
974    } else if let Some(self_data) = self.get_pointer() {
975      unsafe { self_data.as_ref() == other_data.as_ref() }
976    } else {
977      false
978    }
979  }
980}
981
982impl<T, T2> PartialEq<Weak<T2>> for Weak<T>
983where
984  T: PartialEq<T2>,
985{
986  fn eq(&self, other: &Weak<T2>) -> bool {
987    let self_host: HandleHost = (&self.isolate_handle).into();
988    let other_host: HandleHost = (&other.isolate_handle).into();
989    if !self_host.match_host(other_host, None) {
990      return false;
991    }
992    match (self.get_pointer(), other.get_pointer()) {
993      (Some(self_data), Some(other_data)) => unsafe {
994        self_data.as_ref() == other_data.as_ref()
995      },
996      (None, None) => true,
997      _ => false,
998    }
999  }
1000}
1001
1002/// The inner mechanism behind [`Weak`] and finalizations.
1003///
1004/// This struct is heap-allocated and will not move until it's dropped, so it
1005/// can be accessed by the finalization callbacks by creating a shared reference
1006/// from a pointer. The fields are wrapped in [`Cell`] so they are modifiable by
1007/// both the [`Weak`] and the finalization callbacks.
1008pub struct WeakData<T> {
1009  pointer: Cell<Option<NonNull<T>>>,
1010  finalizer_id: Option<FinalizerId>,
1011  weak_dropped: Cell<bool>,
1012}
1013
1014impl<T> std::fmt::Debug for WeakData<T> {
1015  fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1016    f.debug_struct("WeakData")
1017      .field("pointer", &self.pointer)
1018      .finish_non_exhaustive()
1019  }
1020}
1021
1022#[repr(C)]
1023struct WeakCallbackInfo(Opaque);
1024
1025type FinalizerId = usize;
1026
1027pub(crate) enum FinalizerCallback {
1028  Regular(Box<dyn FnOnce(&mut Isolate)>),
1029  Guaranteed(Box<dyn FnOnce()>),
1030}
1031
1032#[derive(Default)]
1033pub(crate) struct FinalizerMap {
1034  map: std::collections::HashMap<FinalizerId, FinalizerCallback>,
1035  next_id: FinalizerId,
1036}
1037
1038impl FinalizerMap {
1039  fn add(&mut self, finalizer: FinalizerCallback) -> FinalizerId {
1040    let id = self.next_id;
1041    // TODO: Overflow.
1042    self.next_id += 1;
1043    self.map.insert(id, finalizer);
1044    id
1045  }
1046
1047  pub(crate) fn drain(
1048    &mut self,
1049  ) -> impl Iterator<Item = FinalizerCallback> + '_ {
1050    self.map.drain().map(|(_, finalizer)| finalizer)
1051  }
1052}
1053
1054/// A traced handle without destructor that clears the handle. The embedder needs
1055/// to ensure that the handle is not accessed once the V8 object has been
1056/// reclaimed. For more details see BasicTracedReference.
1057#[repr(C)]
1058pub struct TracedReference<T> {
1059  data: [u8; crate::binding::v8__TracedReference_SIZE],
1060  _phantom: PhantomData<T>,
1061}
1062
1063impl<T> TracedReference<T> {
1064  /// An empty TracedReference without storage cell.
1065  pub fn empty() -> Self {
1066    let mut this = std::mem::MaybeUninit::uninit();
1067    unsafe {
1068      v8__TracedReference__CONSTRUCT(this.as_mut_ptr() as _);
1069      this.assume_init()
1070    }
1071  }
1072
1073  /// Construct a TracedReference from a Local.
1074  ///
1075  /// A new storage cell is created pointing to the same object.
1076  pub fn new(scope: &mut HandleScope<()>, data: Local<T>) -> Self {
1077    let mut this = Self::empty();
1078    this.reset(scope, Some(data));
1079    this
1080  }
1081
1082  pub fn get<'s>(
1083    &self,
1084    scope: &mut HandleScope<'s, ()>,
1085  ) -> Option<Local<'s, T>> {
1086    unsafe {
1087      scope.cast_local(|sd| {
1088        v8__TracedReference__Get(
1089          self as *const Self as *const TracedReference<Data>,
1090          sd.get_isolate_ptr(),
1091        ) as *const T
1092      })
1093    }
1094  }
1095
1096  /// Always resets the reference. Creates a new reference from `other` if it is
1097  /// non-empty.
1098  pub fn reset(&mut self, scope: &mut HandleScope<()>, data: Option<Local<T>>) {
1099    unsafe {
1100      v8__TracedReference__Reset(
1101        self as *mut Self as *mut TracedReference<Data>,
1102        scope.get_isolate_ptr(),
1103        data
1104          .map_or(std::ptr::null_mut(), |h| h.as_non_null().as_ptr())
1105          .cast(),
1106      );
1107    }
1108  }
1109}
1110
1111impl<T> Drop for TracedReference<T> {
1112  fn drop(&mut self) {
1113    unsafe {
1114      v8__TracedReference__DESTRUCT(
1115        self as *mut Self as *mut TracedReference<Data>,
1116      );
1117    }
1118  }
1119}
1120
1121/// Eternal handles are set-once handles that live for the lifetime of the isolate.
1122#[repr(C)]
1123pub struct Eternal<T> {
1124  data: [u8; crate::binding::v8__Eternal_SIZE],
1125  _phantom: PhantomData<T>,
1126}
1127
1128impl<T> Eternal<T> {
1129  pub fn empty() -> Self {
1130    let mut this = std::mem::MaybeUninit::uninit();
1131    unsafe {
1132      v8__Eternal__CONSTRUCT(this.as_mut_ptr() as _);
1133      this.assume_init()
1134    }
1135  }
1136
1137  pub fn clear(&self) {
1138    unsafe {
1139      v8__Eternal__Clear(self as *const Self as *mut Eternal<Data>);
1140    }
1141  }
1142
1143  pub fn set(&self, scope: &mut HandleScope<()>, data: Local<T>) {
1144    unsafe {
1145      v8__Eternal__Set(
1146        self as *const Self as *mut Eternal<Data>,
1147        scope.get_isolate_ptr(),
1148        data.as_non_null().as_ptr().cast(),
1149      );
1150    }
1151  }
1152
1153  pub fn get<'s>(
1154    &self,
1155    scope: &mut HandleScope<'s, ()>,
1156  ) -> Option<Local<'s, T>> {
1157    unsafe {
1158      scope.cast_local(|sd| {
1159        v8__Eternal__Get(
1160          self as *const Self as *const Eternal<Data>,
1161          sd.get_isolate_ptr(),
1162        ) as *const T
1163      })
1164    }
1165  }
1166
1167  pub fn is_empty(&self) -> bool {
1168    unsafe { v8__Eternal__IsEmpty(self as *const Self as *const Eternal<Data>) }
1169  }
1170}
1171
1172impl<T> Drop for Eternal<T> {
1173  fn drop(&mut self) {
1174    unsafe {
1175      v8__Eternal__DESTRUCT(self as *mut Self as *mut Eternal<Data>);
1176    }
1177  }
1178}
1179
1180/// A Local<T> passed from V8 without an inherent scope.
1181/// The value must be "unsealed" with Scope::unseal to bind
1182/// it to a lifetime.
1183#[derive(Debug)]
1184#[repr(transparent)]
1185pub struct SealedLocal<T>(pub(crate) NonNull<T>);