#[macro_use]
pub(crate) mod macros;
#[doc(hidden)]
pub mod macro_util;
use core::{
marker::PhantomData,
mem::{self, ManuallyDrop},
num::NonZeroUsize,
ptr::NonNull,
};
use super::*;
pub(crate) struct SendSyncPhantomData<T: ?Sized>(PhantomData<T>);
unsafe impl<T: ?Sized> Send for SendSyncPhantomData<T> {}
unsafe impl<T: ?Sized> Sync for SendSyncPhantomData<T> {}
impl<T: ?Sized> Default for SendSyncPhantomData<T> {
fn default() -> SendSyncPhantomData<T> {
SendSyncPhantomData(PhantomData)
}
}
impl<T: ?Sized> PartialEq for SendSyncPhantomData<T> {
fn eq(&self, _other: &Self) -> bool {
true
}
}
impl<T: ?Sized> Eq for SendSyncPhantomData<T> {}
impl<T: ?Sized> Clone for SendSyncPhantomData<T> {
fn clone(&self) -> Self {
SendSyncPhantomData(PhantomData)
}
}
#[cfg(miri)]
extern "Rust" {
pub(crate) fn miri_promise_symbolic_alignment(ptr: *const (), align: usize);
}
pub(crate) trait AsAddress {
fn addr(self) -> usize;
}
impl<T: ?Sized> AsAddress for &T {
#[inline(always)]
fn addr(self) -> usize {
let ptr: *const T = self;
AsAddress::addr(ptr)
}
}
impl<T: ?Sized> AsAddress for &mut T {
#[inline(always)]
fn addr(self) -> usize {
let ptr: *const T = self;
AsAddress::addr(ptr)
}
}
impl<T: ?Sized> AsAddress for NonNull<T> {
#[inline(always)]
fn addr(self) -> usize {
AsAddress::addr(self.as_ptr())
}
}
impl<T: ?Sized> AsAddress for *const T {
#[inline(always)]
fn addr(self) -> usize {
#[allow(clippy::as_conversions)]
#[cfg_attr(
__ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS,
allow(lossy_provenance_casts)
)]
return self.cast::<()>() as usize;
}
}
impl<T: ?Sized> AsAddress for *mut T {
#[inline(always)]
fn addr(self) -> usize {
let ptr: *const T = self;
AsAddress::addr(ptr)
}
}
#[inline(always)]
pub(crate) fn validate_aligned_to<T: AsAddress, U>(t: T) -> Result<(), AlignmentError<(), U>> {
#[allow(clippy::arithmetic_side_effects)]
let remainder = t.addr() % mem::align_of::<U>();
if remainder == 0 {
Ok(())
} else {
Err(unsafe { AlignmentError::new_unchecked(()) })
}
}
#[cfg_attr(
kani,
kani::requires(len <= isize::MAX as usize),
kani::requires(align.is_power_of_two()),
kani::ensures(|&p| (len + p) % align.get() == 0),
// Ensures that we add the minimum required padding.
kani::ensures(|&p| p < align.get()),
)]
pub(crate) const fn padding_needed_for(len: usize, align: NonZeroUsize) -> usize {
#[cfg(kani)]
#[kani::proof_for_contract(padding_needed_for)]
fn proof() {
padding_needed_for(kani::any(), kani::any());
}
#[allow(clippy::arithmetic_side_effects)]
let mask = align.get() - 1;
!(len.wrapping_sub(1)) & mask
}
#[inline(always)]
#[cfg_attr(
kani,
kani::requires(align.is_power_of_two()),
kani::ensures(|&m| m <= n && m % align.get() == 0),
// Guarantees that `m` is the *largest* value such that `m % align == 0`.
kani::ensures(|&m| {
// If this `checked_add` fails, then the next multiple would wrap
// around, which trivially satisfies the "largest value" requirement.
m.checked_add(align.get()).map(|next_mul| next_mul > n).unwrap_or(true)
})
)]
pub(crate) const fn round_down_to_next_multiple_of_alignment(
n: usize,
align: NonZeroUsize,
) -> usize {
#[cfg(kani)]
#[kani::proof_for_contract(round_down_to_next_multiple_of_alignment)]
fn proof() {
round_down_to_next_multiple_of_alignment(kani::any(), kani::any());
}
let align = align.get();
#[cfg(not(no_zerocopy_panic_in_const_and_vec_try_reserve_1_57_0))]
debug_assert!(align.is_power_of_two());
#[allow(clippy::arithmetic_side_effects)]
let mask = !(align - 1);
n & mask
}
pub(crate) const fn max(a: NonZeroUsize, b: NonZeroUsize) -> NonZeroUsize {
if a.get() < b.get() {
b
} else {
a
}
}
pub(crate) const fn min(a: NonZeroUsize, b: NonZeroUsize) -> NonZeroUsize {
if a.get() > b.get() {
b
} else {
a
}
}
#[inline(always)]
pub(crate) unsafe fn copy_unchecked(src: &[u8], dst: &mut [u8]) {
debug_assert!(src.len() <= dst.len());
unsafe {
core::ptr::copy_nonoverlapping(src.as_ptr(), dst.as_mut_ptr(), src.len());
};
}
#[inline(always)]
pub(crate) const unsafe fn transmute_unchecked<Src, Dst>(src: Src) -> Dst {
static_assert!(Src, Dst => core::mem::size_of::<Src>() == core::mem::size_of::<Dst>());
#[repr(C)]
union Transmute<Src, Dst> {
src: ManuallyDrop<Src>,
dst: ManuallyDrop<Dst>,
}
unsafe { ManuallyDrop::into_inner(Transmute { src: ManuallyDrop::new(src) }.dst) }
}
#[must_use = "has no side effects (other than allocation)"]
#[cfg(feature = "alloc")]
#[inline]
pub(crate) unsafe fn new_box<T>(
meta: T::PointerMetadata,
allocate: unsafe fn(core::alloc::Layout) -> *mut u8,
) -> Result<alloc::boxed::Box<T>, AllocError>
where
T: ?Sized + crate::KnownLayout,
{
let size = match T::size_for_metadata(meta) {
Some(size) => size,
None => return Err(AllocError),
};
let align = T::LAYOUT.align.get();
#[allow(clippy::as_conversions)]
let max_alloc = (isize::MAX as usize).saturating_sub(align);
if size > max_alloc {
return Err(AllocError);
}
let layout = Layout::from_size_align(size, align).or(Err(AllocError))?;
let ptr = if layout.size() != 0 {
let ptr = unsafe { allocate(layout) };
match NonNull::new(ptr) {
Some(ptr) => ptr,
None => return Err(AllocError),
}
} else {
let align = T::LAYOUT.align.get();
#[allow(unknown_lints)]
#[allow(clippy::useless_transmute, integer_to_ptr_transmutes)]
let dangling = unsafe { mem::transmute::<usize, *mut u8>(align) };
unsafe { NonNull::new_unchecked(dangling) }
};
let ptr = T::raw_from_ptr_len(ptr, meta);
#[allow(clippy::undocumented_unsafe_blocks)]
Ok(unsafe { alloc::boxed::Box::from_raw(ptr.as_ptr()) })
}
mod len_of {
use super::*;
pub(crate) struct MetadataOf<T: ?Sized + KnownLayout> {
meta: T::PointerMetadata,
_p: PhantomData<T>,
}
impl<T: ?Sized + KnownLayout> Copy for MetadataOf<T> {}
impl<T: ?Sized + KnownLayout> Clone for MetadataOf<T> {
fn clone(&self) -> Self {
*self
}
}
impl<T: ?Sized> MetadataOf<T>
where
T: KnownLayout,
{
#[inline(always)]
pub(crate) fn new_in_bounds(t: &T, meta: usize) -> Option<Self>
where
T: KnownLayout<PointerMetadata = usize>,
{
if meta <= Ptr::from_ref(t).len() {
Some(unsafe { Self::new_unchecked(meta) })
} else {
None
}
}
pub(crate) unsafe fn new_unchecked(meta: T::PointerMetadata) -> Self {
Self { meta, _p: PhantomData }
}
pub(crate) fn get(&self) -> T::PointerMetadata
where
T::PointerMetadata: Copy,
{
self.meta
}
#[inline]
pub(crate) fn padding_needed_for(&self) -> usize
where
T: KnownLayout<PointerMetadata = usize>,
{
let trailing_slice_layout = crate::trailing_slice_layout::<T>();
#[allow(
unstable_name_collisions,
clippy::incompatible_msrv,
clippy::multiple_unsafe_ops_per_block
)]
let unpadded_size = unsafe {
let trailing_size = self.meta.unchecked_mul(trailing_slice_layout.elem_size);
trailing_size.unchecked_add(trailing_slice_layout.offset)
};
util::padding_needed_for(unpadded_size, T::LAYOUT.align)
}
#[inline(always)]
pub(crate) fn validate_cast_and_convert_metadata(
addr: usize,
bytes_len: MetadataOf<[u8]>,
cast_type: CastType,
meta: Option<T::PointerMetadata>,
) -> Result<(MetadataOf<T>, MetadataOf<[u8]>), MetadataCastError> {
let layout = match meta {
None => T::LAYOUT,
Some(meta) => {
let size = match T::size_for_metadata(meta) {
Some(size) => size,
None => return Err(MetadataCastError::Size),
};
DstLayout {
align: T::LAYOUT.align,
size_info: crate::SizeInfo::Sized { size },
statically_shallow_unpadded: false,
}
}
};
let (elems, split_at) =
layout.validate_cast_and_convert_metadata(addr, bytes_len.get(), cast_type)?;
let elems = T::PointerMetadata::from_elem_count(elems);
let elems = meta.unwrap_or(elems);
let elems = unsafe { MetadataOf::new_unchecked(elems) };
let split_at = unsafe { MetadataOf::<[u8]>::new_unchecked(split_at) };
Ok((elems, split_at))
}
}
}
pub(crate) use len_of::MetadataOf;
pub(crate) mod polyfills {
use core::ptr::{self, NonNull};
#[allow(unused)]
pub(crate) trait NonNullExt<T> {
fn slice_from_raw_parts(data: Self, len: usize) -> NonNull<[T]>;
}
impl<T> NonNullExt<T> for NonNull<T> {
#[cfg_attr(
all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS),
coverage(off)
)]
#[inline(always)]
fn slice_from_raw_parts(data: Self, len: usize) -> NonNull<[T]> {
let ptr = ptr::slice_from_raw_parts_mut(data.as_ptr(), len);
unsafe { NonNull::new_unchecked(ptr) }
}
}
#[allow(unused)]
pub(crate) trait NumExt {
unsafe fn unchecked_add(self, rhs: Self) -> Self;
unsafe fn unchecked_sub(self, rhs: Self) -> Self;
unsafe fn unchecked_mul(self, rhs: Self) -> Self;
}
impl NumExt for usize {
#[cfg_attr(
all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS),
coverage(off)
)]
#[inline(always)]
unsafe fn unchecked_add(self, rhs: usize) -> usize {
match self.checked_add(rhs) {
Some(x) => x,
None => {
unsafe { core::hint::unreachable_unchecked() }
}
}
}
#[cfg_attr(
all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS),
coverage(off)
)]
#[inline(always)]
unsafe fn unchecked_sub(self, rhs: usize) -> usize {
match self.checked_sub(rhs) {
Some(x) => x,
None => {
unsafe { core::hint::unreachable_unchecked() }
}
}
}
#[cfg_attr(
all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS),
coverage(off)
)]
#[inline(always)]
unsafe fn unchecked_mul(self, rhs: usize) -> usize {
match self.checked_mul(rhs) {
Some(x) => x,
None => {
unsafe { core::hint::unreachable_unchecked() }
}
}
}
}
}
#[cfg(test)]
pub(crate) mod testutil {
use crate::*;
#[derive(Default)]
pub(crate) struct Align<T, A> {
pub(crate) t: T,
_a: [A; 0],
}
impl<T: Default, A> Align<T, A> {
pub(crate) fn set_default(&mut self) {
self.t = T::default();
}
}
impl<T, A> Align<T, A> {
pub(crate) const fn new(t: T) -> Align<T, A> {
Align { t, _a: [] }
}
}
#[repr(C)]
pub(crate) struct ForceUnalign<T: Unaligned, A> {
_u: u8,
pub(crate) t: T,
_a: [A; 0],
}
impl<T: Unaligned, A> ForceUnalign<T, A> {
pub(crate) fn new(t: T) -> ForceUnalign<T, A> {
ForceUnalign { _u: 0, t, _a: [] }
}
}
#[derive(
KnownLayout,
Immutable,
FromBytes,
IntoBytes,
Eq,
PartialEq,
Ord,
PartialOrd,
Default,
Debug,
Copy,
Clone,
)]
#[repr(C, align(8))]
pub(crate) struct AU64(pub(crate) u64);
impl AU64 {
pub(crate) fn to_bytes(self) -> [u8; 8] {
crate::transmute!(self)
}
}
impl Display for AU64 {
#[cfg_attr(
all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS),
coverage(off)
)]
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
Display::fmt(&self.0, f)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_round_down_to_next_multiple_of_alignment() {
fn alt_impl(n: usize, align: NonZeroUsize) -> usize {
let mul = n / align.get();
mul * align.get()
}
for align in [1, 2, 4, 8, 16] {
for n in 0..256 {
let align = NonZeroUsize::new(align).unwrap();
let want = alt_impl(n, align);
let got = round_down_to_next_multiple_of_alignment(n, align);
assert_eq!(got, want, "round_down_to_next_multiple_of_alignment({}, {})", n, align);
}
}
}
#[rustversion::since(1.57.0)]
#[test]
#[should_panic]
fn test_round_down_to_next_multiple_of_alignment_zerocopy_panic_in_const_and_vec_try_reserve() {
round_down_to_next_multiple_of_alignment(0, NonZeroUsize::new(3).unwrap());
}
}