Initial vendor packages
Signed-off-by: Valentin Popov <valentin@popov.link>
This commit is contained in:
12
vendor/bytemuck/tests/array_tests.rs
vendored
Normal file
12
vendor/bytemuck/tests/array_tests.rs
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
#[test]
|
||||
pub fn test_cast_array() {
|
||||
let x = [0u32, 1u32, 2u32];
|
||||
let _: [u16; 6] = bytemuck::cast(x);
|
||||
}
|
||||
|
||||
#[cfg(feature = "min_const_generics")]
|
||||
#[test]
|
||||
pub fn test_cast_long_array() {
|
||||
let x = [0u32; 65];
|
||||
let _: [u16; 130] = bytemuck::cast(x);
|
||||
}
|
197
vendor/bytemuck/tests/cast_slice_tests.rs
vendored
Normal file
197
vendor/bytemuck/tests/cast_slice_tests.rs
vendored
Normal file
@ -0,0 +1,197 @@
|
||||
#![allow(clippy::unnecessary_cast)]
|
||||
#![allow(clippy::manual_slice_size_calculation)]
|
||||
|
||||
use core::mem::size_of;
|
||||
|
||||
use bytemuck::*;
|
||||
|
||||
#[test]
|
||||
fn test_try_cast_slice() {
|
||||
// some align4 data
|
||||
let u32_slice: &[u32] = &[4, 5, 6];
|
||||
// the same data as align1
|
||||
let the_bytes: &[u8] = try_cast_slice(u32_slice).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
u32_slice.as_ptr() as *const u32 as usize,
|
||||
the_bytes.as_ptr() as *const u8 as usize
|
||||
);
|
||||
assert_eq!(
|
||||
u32_slice.len() * size_of::<u32>(),
|
||||
the_bytes.len() * size_of::<u8>()
|
||||
);
|
||||
|
||||
// by taking one byte off the front, we're definitely mis-aligned for u32.
|
||||
let mis_aligned_bytes = &the_bytes[1..];
|
||||
assert_eq!(
|
||||
try_cast_slice::<u8, u32>(mis_aligned_bytes),
|
||||
Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned)
|
||||
);
|
||||
|
||||
// by taking one byte off the end, we're aligned but would have slop bytes for
|
||||
// u32
|
||||
let the_bytes_len_minus1 = the_bytes.len() - 1;
|
||||
let slop_bytes = &the_bytes[..the_bytes_len_minus1];
|
||||
assert_eq!(
|
||||
try_cast_slice::<u8, u32>(slop_bytes),
|
||||
Err(PodCastError::OutputSliceWouldHaveSlop)
|
||||
);
|
||||
|
||||
// if we don't mess with it we can up-alignment cast
|
||||
try_cast_slice::<u8, u32>(the_bytes).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_try_cast_slice_mut() {
|
||||
// some align4 data
|
||||
let u32_slice: &mut [u32] = &mut [4, 5, 6];
|
||||
let u32_len = u32_slice.len();
|
||||
let u32_ptr = u32_slice.as_ptr();
|
||||
|
||||
// the same data as align1
|
||||
let the_bytes: &mut [u8] = try_cast_slice_mut(u32_slice).unwrap();
|
||||
let the_bytes_len = the_bytes.len();
|
||||
let the_bytes_ptr = the_bytes.as_ptr();
|
||||
|
||||
assert_eq!(
|
||||
u32_ptr as *const u32 as usize,
|
||||
the_bytes_ptr as *const u8 as usize
|
||||
);
|
||||
assert_eq!(u32_len * size_of::<u32>(), the_bytes_len * size_of::<u8>());
|
||||
|
||||
// by taking one byte off the front, we're definitely mis-aligned for u32.
|
||||
let mis_aligned_bytes = &mut the_bytes[1..];
|
||||
assert_eq!(
|
||||
try_cast_slice_mut::<u8, u32>(mis_aligned_bytes),
|
||||
Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned)
|
||||
);
|
||||
|
||||
// by taking one byte off the end, we're aligned but would have slop bytes for
|
||||
// u32
|
||||
let the_bytes_len_minus1 = the_bytes.len() - 1;
|
||||
let slop_bytes = &mut the_bytes[..the_bytes_len_minus1];
|
||||
assert_eq!(
|
||||
try_cast_slice_mut::<u8, u32>(slop_bytes),
|
||||
Err(PodCastError::OutputSliceWouldHaveSlop)
|
||||
);
|
||||
|
||||
// if we don't mess with it we can up-alignment cast
|
||||
try_cast_slice_mut::<u8, u32>(the_bytes).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_types() {
|
||||
let _: i32 = cast(1.0_f32);
|
||||
let _: &mut i32 = cast_mut(&mut 1.0_f32);
|
||||
let _: &i32 = cast_ref(&1.0_f32);
|
||||
let _: &[i32] = cast_slice(&[1.0_f32]);
|
||||
let _: &mut [i32] = cast_slice_mut(&mut [1.0_f32]);
|
||||
//
|
||||
let _: Result<i32, PodCastError> = try_cast(1.0_f32);
|
||||
let _: Result<&mut i32, PodCastError> = try_cast_mut(&mut 1.0_f32);
|
||||
let _: Result<&i32, PodCastError> = try_cast_ref(&1.0_f32);
|
||||
let _: Result<&[i32], PodCastError> = try_cast_slice(&[1.0_f32]);
|
||||
let _: Result<&mut [i32], PodCastError> = try_cast_slice_mut(&mut [1.0_f32]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_bytes_of() {
|
||||
assert_eq!(bytes_of(&0xaabbccdd_u32), &0xaabbccdd_u32.to_ne_bytes());
|
||||
assert_eq!(
|
||||
bytes_of_mut(&mut 0xaabbccdd_u32),
|
||||
&mut 0xaabbccdd_u32.to_ne_bytes()
|
||||
);
|
||||
let mut a = 0xaabbccdd_u32;
|
||||
let a_addr = &a as *const _ as usize;
|
||||
// ensure addresses match.
|
||||
assert_eq!(bytes_of(&a).as_ptr() as usize, a_addr);
|
||||
assert_eq!(bytes_of_mut(&mut a).as_ptr() as usize, a_addr);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_try_from_bytes() {
|
||||
let u32s = [0xaabbccdd, 0x11223344_u32];
|
||||
let bytes = bytemuck::cast_slice::<u32, u8>(&u32s);
|
||||
assert_eq!(try_from_bytes::<u32>(&bytes[..4]), Ok(&u32s[0]));
|
||||
assert_eq!(
|
||||
try_from_bytes::<u32>(&bytes[..5]),
|
||||
Err(PodCastError::SizeMismatch)
|
||||
);
|
||||
assert_eq!(
|
||||
try_from_bytes::<u32>(&bytes[..3]),
|
||||
Err(PodCastError::SizeMismatch)
|
||||
);
|
||||
assert_eq!(
|
||||
try_from_bytes::<u32>(&bytes[1..5]),
|
||||
Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_try_from_bytes_mut() {
|
||||
let mut abcd = 0xaabbccdd;
|
||||
let mut u32s = [abcd, 0x11223344_u32];
|
||||
let bytes = bytemuck::cast_slice_mut::<u32, u8>(&mut u32s);
|
||||
assert_eq!(try_from_bytes_mut::<u32>(&mut bytes[..4]), Ok(&mut abcd));
|
||||
assert_eq!(try_from_bytes_mut::<u32>(&mut bytes[..4]), Ok(&mut abcd));
|
||||
assert_eq!(
|
||||
try_from_bytes_mut::<u32>(&mut bytes[..5]),
|
||||
Err(PodCastError::SizeMismatch)
|
||||
);
|
||||
assert_eq!(
|
||||
try_from_bytes_mut::<u32>(&mut bytes[..3]),
|
||||
Err(PodCastError::SizeMismatch)
|
||||
);
|
||||
assert_eq!(
|
||||
try_from_bytes::<u32>(&bytes[1..5]),
|
||||
Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_from_bytes() {
|
||||
let abcd = 0xaabbccdd_u32;
|
||||
let aligned_bytes = bytemuck::bytes_of(&abcd);
|
||||
assert_eq!(from_bytes::<u32>(aligned_bytes), &abcd);
|
||||
assert!(core::ptr::eq(from_bytes(aligned_bytes), &abcd));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_from_bytes_mut() {
|
||||
let mut a = 0xaabbccdd_u32;
|
||||
let a_addr = &a as *const _ as usize;
|
||||
let aligned_bytes = bytemuck::bytes_of_mut(&mut a);
|
||||
assert_eq!(*from_bytes_mut::<u32>(aligned_bytes), 0xaabbccdd_u32);
|
||||
assert_eq!(
|
||||
from_bytes_mut::<u32>(aligned_bytes) as *const u32 as usize,
|
||||
a_addr
|
||||
);
|
||||
}
|
||||
|
||||
// like #[should_panic], but can be a part of another test, instead of requiring
|
||||
// it to be it's own test.
|
||||
macro_rules! should_panic {
|
||||
($ex:expr) => {
|
||||
assert!(
|
||||
std::panic::catch_unwind(|| {
|
||||
let _ = $ex;
|
||||
})
|
||||
.is_err(),
|
||||
concat!("should have panicked: `", stringify!($ex), "`")
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_panics() {
|
||||
should_panic!(cast_slice::<u8, u32>(&[1u8, 2u8]));
|
||||
should_panic!(cast_slice_mut::<u8, u32>(&mut [1u8, 2u8]));
|
||||
should_panic!(from_bytes::<u32>(&[1u8, 2]));
|
||||
should_panic!(from_bytes::<u32>(&[1u8, 2, 3, 4, 5]));
|
||||
should_panic!(from_bytes_mut::<u32>(&mut [1u8, 2]));
|
||||
should_panic!(from_bytes_mut::<u32>(&mut [1u8, 2, 3, 4, 5]));
|
||||
// use cast_slice on some u32s to get some align>=4 bytes, so we can know
|
||||
// we'll give from_bytes unaligned ones.
|
||||
let aligned_bytes = bytemuck::cast_slice::<u32, u8>(&[0, 0]);
|
||||
should_panic!(from_bytes::<u32>(&aligned_bytes[1..5]));
|
||||
}
|
419
vendor/bytemuck/tests/checked_tests.rs
vendored
Normal file
419
vendor/bytemuck/tests/checked_tests.rs
vendored
Normal file
@ -0,0 +1,419 @@
|
||||
#![allow(clippy::unnecessary_cast)]
|
||||
#![allow(clippy::manual_slice_size_calculation)]
|
||||
|
||||
use core::{
|
||||
mem::size_of,
|
||||
num::{NonZeroU32, NonZeroU8},
|
||||
};
|
||||
|
||||
use bytemuck::{checked::CheckedCastError, *};
|
||||
|
||||
#[test]
|
||||
fn test_try_cast_slice() {
|
||||
// some align4 data
|
||||
let nonzero_u32_slice: &[NonZeroU32] = &[
|
||||
NonZeroU32::new(4).unwrap(),
|
||||
NonZeroU32::new(5).unwrap(),
|
||||
NonZeroU32::new(6).unwrap(),
|
||||
];
|
||||
|
||||
// contains bytes with invalid bitpattern for NonZeroU8
|
||||
assert_eq!(
|
||||
checked::try_cast_slice::<NonZeroU32, NonZeroU8>(nonzero_u32_slice),
|
||||
Err(CheckedCastError::InvalidBitPattern)
|
||||
);
|
||||
|
||||
// the same data as align1
|
||||
let the_bytes: &[u8] = checked::try_cast_slice(nonzero_u32_slice).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
nonzero_u32_slice.as_ptr() as *const NonZeroU32 as usize,
|
||||
the_bytes.as_ptr() as *const u8 as usize
|
||||
);
|
||||
assert_eq!(
|
||||
nonzero_u32_slice.len() * size_of::<NonZeroU32>(),
|
||||
the_bytes.len() * size_of::<u8>()
|
||||
);
|
||||
|
||||
// by taking one byte off the front, we're definitely mis-aligned for
|
||||
// NonZeroU32.
|
||||
let mis_aligned_bytes = &the_bytes[1..];
|
||||
assert_eq!(
|
||||
checked::try_cast_slice::<u8, NonZeroU32>(mis_aligned_bytes),
|
||||
Err(CheckedCastError::PodCastError(
|
||||
PodCastError::TargetAlignmentGreaterAndInputNotAligned
|
||||
))
|
||||
);
|
||||
|
||||
// by taking one byte off the end, we're aligned but would have slop bytes for
|
||||
// NonZeroU32
|
||||
let the_bytes_len_minus1 = the_bytes.len() - 1;
|
||||
let slop_bytes = &the_bytes[..the_bytes_len_minus1];
|
||||
assert_eq!(
|
||||
checked::try_cast_slice::<u8, NonZeroU32>(slop_bytes),
|
||||
Err(CheckedCastError::PodCastError(PodCastError::OutputSliceWouldHaveSlop))
|
||||
);
|
||||
|
||||
// if we don't mess with it we can up-alignment cast
|
||||
checked::try_cast_slice::<u8, NonZeroU32>(the_bytes).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_try_cast_slice_mut() {
|
||||
// some align4 data
|
||||
let u32_slice: &mut [u32] = &mut [4, 5, 6];
|
||||
|
||||
// contains bytes with invalid bitpattern for NonZeroU8
|
||||
assert_eq!(
|
||||
checked::try_cast_slice_mut::<u32, NonZeroU8>(u32_slice),
|
||||
Err(CheckedCastError::InvalidBitPattern)
|
||||
);
|
||||
|
||||
// some align4 data
|
||||
let u32_slice: &mut [u32] = &mut [0x4444_4444, 0x5555_5555, 0x6666_6666];
|
||||
let u32_len = u32_slice.len();
|
||||
let u32_ptr = u32_slice.as_ptr();
|
||||
|
||||
// the same data as align1, nonzero bytes
|
||||
let the_nonzero_bytes: &mut [NonZeroU8] =
|
||||
checked::try_cast_slice_mut(u32_slice).unwrap();
|
||||
let the_nonzero_bytes_len = the_nonzero_bytes.len();
|
||||
let the_nonzero_bytes_ptr = the_nonzero_bytes.as_ptr();
|
||||
|
||||
assert_eq!(
|
||||
u32_ptr as *const u32 as usize,
|
||||
the_nonzero_bytes_ptr as *const NonZeroU8 as usize
|
||||
);
|
||||
assert_eq!(
|
||||
u32_len * size_of::<u32>(),
|
||||
the_nonzero_bytes_len * size_of::<NonZeroU8>()
|
||||
);
|
||||
|
||||
// the same data as align1
|
||||
let the_bytes: &mut [u8] = checked::try_cast_slice_mut(u32_slice).unwrap();
|
||||
let the_bytes_len = the_bytes.len();
|
||||
let the_bytes_ptr = the_bytes.as_ptr();
|
||||
|
||||
assert_eq!(
|
||||
u32_ptr as *const u32 as usize,
|
||||
the_bytes_ptr as *const u8 as usize
|
||||
);
|
||||
assert_eq!(
|
||||
u32_len * size_of::<u32>(),
|
||||
the_bytes_len * size_of::<NonZeroU8>()
|
||||
);
|
||||
|
||||
// by taking one byte off the front, we're definitely mis-aligned for u32.
|
||||
let mis_aligned_bytes = &mut the_bytes[1..];
|
||||
assert_eq!(
|
||||
checked::try_cast_slice_mut::<u8, NonZeroU32>(mis_aligned_bytes),
|
||||
Err(CheckedCastError::PodCastError(
|
||||
PodCastError::TargetAlignmentGreaterAndInputNotAligned
|
||||
))
|
||||
);
|
||||
|
||||
// by taking one byte off the end, we're aligned but would have slop bytes for
|
||||
// NonZeroU32
|
||||
let the_bytes_len_minus1 = the_bytes.len() - 1;
|
||||
let slop_bytes = &mut the_bytes[..the_bytes_len_minus1];
|
||||
assert_eq!(
|
||||
checked::try_cast_slice_mut::<u8, NonZeroU32>(slop_bytes),
|
||||
Err(CheckedCastError::PodCastError(PodCastError::OutputSliceWouldHaveSlop))
|
||||
);
|
||||
|
||||
// if we don't mess with it we can up-alignment cast, since there are no
|
||||
// zeroes in the original slice
|
||||
checked::try_cast_slice_mut::<u8, NonZeroU32>(the_bytes).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_types() {
|
||||
let _: NonZeroU32 = checked::cast(1.0_f32);
|
||||
let _: &mut NonZeroU32 = checked::cast_mut(&mut 1.0_f32);
|
||||
let _: &NonZeroU32 = checked::cast_ref(&1.0_f32);
|
||||
let _: &[NonZeroU32] = checked::cast_slice(&[1.0_f32]);
|
||||
let _: &mut [NonZeroU32] = checked::cast_slice_mut(&mut [1.0_f32]);
|
||||
//
|
||||
let _: Result<NonZeroU32, CheckedCastError> = checked::try_cast(1.0_f32);
|
||||
let _: Result<&mut NonZeroU32, CheckedCastError> =
|
||||
checked::try_cast_mut(&mut 1.0_f32);
|
||||
let _: Result<&NonZeroU32, CheckedCastError> =
|
||||
checked::try_cast_ref(&1.0_f32);
|
||||
let _: Result<&[NonZeroU32], CheckedCastError> =
|
||||
checked::try_cast_slice(&[1.0_f32]);
|
||||
let _: Result<&mut [NonZeroU32], CheckedCastError> =
|
||||
checked::try_cast_slice_mut(&mut [1.0_f32]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_try_pod_read_unaligned() {
|
||||
let u32s = [0xaabbccdd, 0x11223344_u32];
|
||||
let bytes = bytemuck::checked::cast_slice::<u32, u8>(&u32s);
|
||||
|
||||
#[cfg(target_endian = "big")]
|
||||
assert_eq!(
|
||||
checked::try_pod_read_unaligned::<NonZeroU32>(&bytes[1..5]),
|
||||
Ok(NonZeroU32::new(0xbbccdd11).unwrap())
|
||||
);
|
||||
#[cfg(target_endian = "little")]
|
||||
assert_eq!(
|
||||
checked::try_pod_read_unaligned::<NonZeroU32>(&bytes[1..5]),
|
||||
Ok(NonZeroU32::new(0x44aabbcc).unwrap())
|
||||
);
|
||||
|
||||
let u32s = [0; 2];
|
||||
let bytes = bytemuck::checked::cast_slice::<u32, u8>(&u32s);
|
||||
|
||||
assert_eq!(
|
||||
checked::try_pod_read_unaligned::<NonZeroU32>(&bytes[1..5]),
|
||||
Err(CheckedCastError::InvalidBitPattern)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_try_from_bytes() {
|
||||
let nonzero_u32s = [
|
||||
NonZeroU32::new(0xaabbccdd).unwrap(),
|
||||
NonZeroU32::new(0x11223344).unwrap(),
|
||||
];
|
||||
let bytes = bytemuck::checked::cast_slice::<NonZeroU32, u8>(&nonzero_u32s);
|
||||
assert_eq!(
|
||||
checked::try_from_bytes::<NonZeroU32>(&bytes[..4]),
|
||||
Ok(&nonzero_u32s[0])
|
||||
);
|
||||
assert_eq!(
|
||||
checked::try_from_bytes::<NonZeroU32>(&bytes[..5]),
|
||||
Err(CheckedCastError::PodCastError(PodCastError::SizeMismatch))
|
||||
);
|
||||
assert_eq!(
|
||||
checked::try_from_bytes::<NonZeroU32>(&bytes[..3]),
|
||||
Err(CheckedCastError::PodCastError(PodCastError::SizeMismatch))
|
||||
);
|
||||
assert_eq!(
|
||||
checked::try_from_bytes::<NonZeroU32>(&bytes[1..5]),
|
||||
Err(CheckedCastError::PodCastError(
|
||||
PodCastError::TargetAlignmentGreaterAndInputNotAligned
|
||||
))
|
||||
);
|
||||
|
||||
let zero_u32s = [0, 0x11223344_u32];
|
||||
let bytes = bytemuck::checked::cast_slice::<u32, u8>(&zero_u32s);
|
||||
assert_eq!(
|
||||
checked::try_from_bytes::<NonZeroU32>(&bytes[..4]),
|
||||
Err(CheckedCastError::InvalidBitPattern)
|
||||
);
|
||||
assert_eq!(
|
||||
checked::try_from_bytes::<NonZeroU32>(&bytes[4..]),
|
||||
Ok(&NonZeroU32::new(zero_u32s[1]).unwrap())
|
||||
);
|
||||
assert_eq!(
|
||||
checked::try_from_bytes::<NonZeroU32>(&bytes[..5]),
|
||||
Err(CheckedCastError::PodCastError(PodCastError::SizeMismatch))
|
||||
);
|
||||
assert_eq!(
|
||||
checked::try_from_bytes::<NonZeroU32>(&bytes[..3]),
|
||||
Err(CheckedCastError::PodCastError(PodCastError::SizeMismatch))
|
||||
);
|
||||
assert_eq!(
|
||||
checked::try_from_bytes::<NonZeroU32>(&bytes[1..5]),
|
||||
Err(CheckedCastError::PodCastError(
|
||||
PodCastError::TargetAlignmentGreaterAndInputNotAligned
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_try_from_bytes_mut() {
|
||||
let a = 0xaabbccdd_u32;
|
||||
let b = 0x11223344_u32;
|
||||
let mut u32s = [a, b];
|
||||
let bytes = bytemuck::checked::cast_slice_mut::<u32, u8>(&mut u32s);
|
||||
assert_eq!(
|
||||
checked::try_from_bytes_mut::<NonZeroU32>(&mut bytes[..4]),
|
||||
Ok(&mut NonZeroU32::new(a).unwrap())
|
||||
);
|
||||
assert_eq!(
|
||||
checked::try_from_bytes_mut::<NonZeroU32>(&mut bytes[4..]),
|
||||
Ok(&mut NonZeroU32::new(b).unwrap())
|
||||
);
|
||||
assert_eq!(
|
||||
checked::try_from_bytes_mut::<NonZeroU32>(&mut bytes[..5]),
|
||||
Err(CheckedCastError::PodCastError(PodCastError::SizeMismatch))
|
||||
);
|
||||
assert_eq!(
|
||||
checked::try_from_bytes_mut::<NonZeroU32>(&mut bytes[..3]),
|
||||
Err(CheckedCastError::PodCastError(PodCastError::SizeMismatch))
|
||||
);
|
||||
assert_eq!(
|
||||
checked::try_from_bytes::<NonZeroU32>(&bytes[1..5]),
|
||||
Err(CheckedCastError::PodCastError(
|
||||
PodCastError::TargetAlignmentGreaterAndInputNotAligned
|
||||
))
|
||||
);
|
||||
|
||||
let mut u32s = [0, b];
|
||||
let bytes = bytemuck::checked::cast_slice_mut::<u32, u8>(&mut u32s);
|
||||
assert_eq!(
|
||||
checked::try_from_bytes_mut::<NonZeroU32>(&mut bytes[..4]),
|
||||
Err(CheckedCastError::InvalidBitPattern)
|
||||
);
|
||||
assert_eq!(
|
||||
checked::try_from_bytes_mut::<NonZeroU32>(&mut bytes[4..]),
|
||||
Ok(&mut NonZeroU32::new(b).unwrap())
|
||||
);
|
||||
assert_eq!(
|
||||
checked::try_from_bytes_mut::<NonZeroU32>(&mut bytes[..5]),
|
||||
Err(CheckedCastError::PodCastError(PodCastError::SizeMismatch))
|
||||
);
|
||||
assert_eq!(
|
||||
checked::try_from_bytes_mut::<NonZeroU32>(&mut bytes[..3]),
|
||||
Err(CheckedCastError::PodCastError(PodCastError::SizeMismatch))
|
||||
);
|
||||
assert_eq!(
|
||||
checked::try_from_bytes::<NonZeroU32>(&bytes[1..5]),
|
||||
Err(CheckedCastError::PodCastError(
|
||||
PodCastError::TargetAlignmentGreaterAndInputNotAligned
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_from_bytes() {
|
||||
let abcd = 0xaabbccdd_u32;
|
||||
let aligned_bytes = bytemuck::bytes_of(&abcd);
|
||||
assert_eq!(
|
||||
checked::from_bytes::<NonZeroU32>(aligned_bytes),
|
||||
&NonZeroU32::new(abcd).unwrap()
|
||||
);
|
||||
assert!(core::ptr::eq(
|
||||
checked::from_bytes(aligned_bytes) as *const NonZeroU32 as *const u32,
|
||||
&abcd
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_from_bytes_mut() {
|
||||
let mut a = 0xaabbccdd_u32;
|
||||
let a_addr = &a as *const _ as usize;
|
||||
let aligned_bytes = bytemuck::bytes_of_mut(&mut a);
|
||||
assert_eq!(
|
||||
*checked::from_bytes_mut::<NonZeroU32>(aligned_bytes),
|
||||
NonZeroU32::new(0xaabbccdd).unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
checked::from_bytes_mut::<NonZeroU32>(aligned_bytes) as *const NonZeroU32
|
||||
as usize,
|
||||
a_addr
|
||||
);
|
||||
}
|
||||
|
||||
// like #[should_panic], but can be a part of another test, instead of requiring
|
||||
// it to be it's own test.
|
||||
macro_rules! should_panic {
|
||||
($ex:expr) => {
|
||||
assert!(
|
||||
std::panic::catch_unwind(|| {
|
||||
let _ = $ex;
|
||||
})
|
||||
.is_err(),
|
||||
concat!("should have panicked: `", stringify!($ex), "`")
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_panics() {
|
||||
should_panic!(checked::cast::<u32, NonZeroU32>(0));
|
||||
should_panic!(checked::cast_ref::<u32, NonZeroU32>(&0));
|
||||
should_panic!(checked::cast_mut::<u32, NonZeroU32>(&mut 0));
|
||||
should_panic!(checked::cast_slice::<u8, NonZeroU32>(&[1u8, 2u8]));
|
||||
should_panic!(checked::cast_slice_mut::<u8, NonZeroU32>(&mut [1u8, 2u8]));
|
||||
should_panic!(checked::from_bytes::<NonZeroU32>(&[1u8, 2]));
|
||||
should_panic!(checked::from_bytes::<NonZeroU32>(&[1u8, 2, 3, 4, 5]));
|
||||
should_panic!(checked::from_bytes_mut::<NonZeroU32>(&mut [1u8, 2]));
|
||||
should_panic!(checked::from_bytes_mut::<NonZeroU32>(&mut [1u8, 2, 3, 4, 5]));
|
||||
// use cast_slice on some u32s to get some align>=4 bytes, so we can know
|
||||
// we'll give from_bytes unaligned ones.
|
||||
let aligned_bytes = bytemuck::cast_slice::<u32, u8>(&[0, 0]);
|
||||
should_panic!(checked::from_bytes::<NonZeroU32>(aligned_bytes));
|
||||
should_panic!(checked::from_bytes::<NonZeroU32>(&aligned_bytes[1..5]));
|
||||
should_panic!(checked::pod_read_unaligned::<NonZeroU32>(
|
||||
&aligned_bytes[1..5]
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_char() {
|
||||
assert_eq!(checked::try_cast::<u32, char>(0), Ok('\0'));
|
||||
assert_eq!(checked::try_cast::<u32, char>(0xd7ff), Ok('\u{d7ff}'));
|
||||
assert_eq!(
|
||||
checked::try_cast::<u32, char>(0xd800),
|
||||
Err(CheckedCastError::InvalidBitPattern)
|
||||
);
|
||||
assert_eq!(
|
||||
checked::try_cast::<u32, char>(0xdfff),
|
||||
Err(CheckedCastError::InvalidBitPattern)
|
||||
);
|
||||
assert_eq!(checked::try_cast::<u32, char>(0xe000), Ok('\u{e000}'));
|
||||
assert_eq!(checked::try_cast::<u32, char>(0x10ffff), Ok('\u{10ffff}'));
|
||||
assert_eq!(
|
||||
checked::try_cast::<u32, char>(0x110000),
|
||||
Err(CheckedCastError::InvalidBitPattern)
|
||||
);
|
||||
assert_eq!(
|
||||
checked::try_cast::<u32, char>(-1i32 as u32),
|
||||
Err(CheckedCastError::InvalidBitPattern)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_bool() {
|
||||
assert_eq!(checked::try_cast::<u8, bool>(0), Ok(false));
|
||||
assert_eq!(checked::try_cast::<u8, bool>(1), Ok(true));
|
||||
for i in 2..=255 {
|
||||
assert_eq!(
|
||||
checked::try_cast::<u8, bool>(i),
|
||||
Err(CheckedCastError::InvalidBitPattern)
|
||||
);
|
||||
}
|
||||
|
||||
assert_eq!(checked::try_from_bytes::<bool>(&[1]), Ok(&true));
|
||||
assert_eq!(
|
||||
checked::try_from_bytes::<bool>(&[3]),
|
||||
Err(CheckedCastError::InvalidBitPattern)
|
||||
);
|
||||
assert_eq!(
|
||||
checked::try_from_bytes::<bool>(&[0, 1]),
|
||||
Err(CheckedCastError::PodCastError(PodCastError::SizeMismatch))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_all_nonzero() {
|
||||
use core::num::*;
|
||||
macro_rules! test_nonzero {
|
||||
($nonzero:ty: $primitive:ty) => {
|
||||
assert_eq!(
|
||||
checked::try_cast::<$primitive, $nonzero>(0),
|
||||
Err(CheckedCastError::InvalidBitPattern)
|
||||
);
|
||||
assert_eq!(
|
||||
checked::try_cast::<$primitive, $nonzero>(1),
|
||||
Ok(<$nonzero>::new(1).unwrap())
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
test_nonzero!(NonZeroU8: u8);
|
||||
test_nonzero!(NonZeroI8: i8);
|
||||
test_nonzero!(NonZeroU16: u16);
|
||||
test_nonzero!(NonZeroI16: i16);
|
||||
test_nonzero!(NonZeroU32: u32);
|
||||
test_nonzero!(NonZeroI32: i32);
|
||||
test_nonzero!(NonZeroU64: u64);
|
||||
test_nonzero!(NonZeroI64: i64);
|
||||
test_nonzero!(NonZeroU128: u128);
|
||||
test_nonzero!(NonZeroI128: i128);
|
||||
test_nonzero!(NonZeroUsize: usize);
|
||||
test_nonzero!(NonZeroIsize: isize);
|
||||
}
|
77
vendor/bytemuck/tests/derive.rs
vendored
Normal file
77
vendor/bytemuck/tests/derive.rs
vendored
Normal file
@ -0,0 +1,77 @@
|
||||
#![cfg(feature = "derive")]
|
||||
#![allow(dead_code)]
|
||||
|
||||
use bytemuck::{ByteEq, ByteHash, Pod, TransparentWrapper, Zeroable};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
#[derive(Copy, Clone, Pod, Zeroable, ByteEq, ByteHash)]
|
||||
#[repr(C)]
|
||||
struct Test {
|
||||
a: u16,
|
||||
b: u16,
|
||||
}
|
||||
|
||||
#[derive(TransparentWrapper)]
|
||||
#[repr(transparent)]
|
||||
struct TransparentSingle {
|
||||
a: u16,
|
||||
}
|
||||
|
||||
#[derive(TransparentWrapper)]
|
||||
#[repr(transparent)]
|
||||
#[transparent(u16)]
|
||||
struct TransparentWithZeroSized {
|
||||
a: u16,
|
||||
b: (),
|
||||
}
|
||||
|
||||
#[derive(TransparentWrapper)]
|
||||
#[repr(transparent)]
|
||||
struct TransparentWithGeneric<T: ?Sized> {
|
||||
a: T,
|
||||
}
|
||||
|
||||
/// Ensuring that no additional bounds are emitted.
|
||||
/// See https://github.com/Lokathor/bytemuck/issues/145
|
||||
fn test_generic<T>(x: T) -> TransparentWithGeneric<T> {
|
||||
TransparentWithGeneric::wrap(x)
|
||||
}
|
||||
|
||||
#[derive(TransparentWrapper)]
|
||||
#[repr(transparent)]
|
||||
#[transparent(T)]
|
||||
struct TransparentWithGenericAndZeroSized<T: ?Sized> {
|
||||
a: (),
|
||||
b: T,
|
||||
}
|
||||
|
||||
/// Ensuring that no additional bounds are emitted.
|
||||
/// See https://github.com/Lokathor/bytemuck/issues/145
|
||||
fn test_generic_with_zst<T>(x: T) -> TransparentWithGenericAndZeroSized<T> {
|
||||
TransparentWithGenericAndZeroSized::wrap(x)
|
||||
}
|
||||
|
||||
#[derive(TransparentWrapper)]
|
||||
#[repr(transparent)]
|
||||
struct TransparentUnsized {
|
||||
a: dyn std::fmt::Debug,
|
||||
}
|
||||
|
||||
type DynDebug = dyn std::fmt::Debug;
|
||||
|
||||
#[derive(TransparentWrapper)]
|
||||
#[repr(transparent)]
|
||||
#[transparent(DynDebug)]
|
||||
struct TransparentUnsizedWithZeroSized {
|
||||
a: (),
|
||||
b: DynDebug,
|
||||
}
|
||||
|
||||
#[derive(TransparentWrapper)]
|
||||
#[repr(transparent)]
|
||||
#[transparent(DynDebug)]
|
||||
struct TransparentUnsizedWithGenericZeroSizeds<T: ?Sized, U: ?Sized> {
|
||||
a: PhantomData<T>,
|
||||
b: PhantomData<U>,
|
||||
c: DynDebug,
|
||||
}
|
123
vendor/bytemuck/tests/doc_tests.rs
vendored
Normal file
123
vendor/bytemuck/tests/doc_tests.rs
vendored
Normal file
@ -0,0 +1,123 @@
|
||||
#![allow(clippy::disallowed_names)]
|
||||
|
||||
//! Cargo miri doesn't run doctests yet, so we duplicate these here. It's
|
||||
//! probably not that important to sweat keeping these perfectly up to date, but
|
||||
//! we should try to catch the cases where the primary tests are doctests.
|
||||
use bytemuck::*;
|
||||
|
||||
// Miri doesn't run on doctests, so... copypaste to the rescue.
|
||||
#[test]
|
||||
fn test_transparent_slice() {
|
||||
#[repr(transparent)]
|
||||
struct Slice<T>([T]);
|
||||
|
||||
unsafe impl<T> TransparentWrapper<[T]> for Slice<T> {}
|
||||
|
||||
let s = Slice::wrap_ref(&[1u32, 2, 3]);
|
||||
assert_eq!(&s.0, &[1, 2, 3]);
|
||||
|
||||
let mut buf = [1, 2, 3u8];
|
||||
let _sm = Slice::wrap_mut(&mut buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_transparent_basic() {
|
||||
#[derive(Default)]
|
||||
struct SomeStruct(u32);
|
||||
|
||||
#[repr(transparent)]
|
||||
struct MyWrapper(SomeStruct);
|
||||
|
||||
unsafe impl TransparentWrapper<SomeStruct> for MyWrapper {}
|
||||
|
||||
// interpret a reference to &SomeStruct as a &MyWrapper
|
||||
let thing = SomeStruct::default();
|
||||
let wrapped_ref: &MyWrapper = MyWrapper::wrap_ref(&thing);
|
||||
|
||||
// Works with &mut too.
|
||||
let mut mut_thing = SomeStruct::default();
|
||||
let wrapped_mut: &mut MyWrapper = MyWrapper::wrap_mut(&mut mut_thing);
|
||||
let _ = (wrapped_ref, wrapped_mut);
|
||||
}
|
||||
|
||||
// Work around miri not running doctests
|
||||
#[test]
|
||||
fn test_contiguous_doc() {
|
||||
#[repr(u8)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq)]
|
||||
enum Foo {
|
||||
A = 0,
|
||||
B = 1,
|
||||
C = 2,
|
||||
D = 3,
|
||||
E = 4,
|
||||
}
|
||||
unsafe impl Contiguous for Foo {
|
||||
type Int = u8;
|
||||
const MIN_VALUE: u8 = Foo::A as u8;
|
||||
const MAX_VALUE: u8 = Foo::E as u8;
|
||||
}
|
||||
|
||||
assert_eq!(Foo::from_integer(3).unwrap(), Foo::D);
|
||||
assert_eq!(Foo::from_integer(8), None);
|
||||
assert_eq!(Foo::C.into_integer(), 2);
|
||||
assert_eq!(Foo::B.into_integer(), Foo::B as u8);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_offsetof_vertex() {
|
||||
#[repr(C)]
|
||||
struct Vertex {
|
||||
pos: [f32; 2],
|
||||
uv: [u16; 2],
|
||||
color: [u8; 4],
|
||||
}
|
||||
unsafe impl Zeroable for Vertex {}
|
||||
|
||||
let pos = offset_of!(Zeroable::zeroed(), Vertex, pos);
|
||||
let uv = offset_of!(Zeroable::zeroed(), Vertex, uv);
|
||||
let color = offset_of!(Zeroable::zeroed(), Vertex, color);
|
||||
|
||||
assert_eq!(pos, 0);
|
||||
assert_eq!(uv, 8);
|
||||
assert_eq!(color, 12);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_offsetof_nonpod() {
|
||||
#[derive(Default)]
|
||||
struct Foo {
|
||||
a: u8,
|
||||
b: &'static str,
|
||||
c: i32,
|
||||
}
|
||||
|
||||
let a_offset = offset_of!(Default::default(), Foo, a);
|
||||
let b_offset = offset_of!(Default::default(), Foo, b);
|
||||
let c_offset = offset_of!(Default::default(), Foo, c);
|
||||
|
||||
assert_ne!(a_offset, b_offset);
|
||||
assert_ne!(b_offset, c_offset);
|
||||
// We can't check against hardcoded values for a repr(Rust) type,
|
||||
// but prove to ourself this way.
|
||||
|
||||
let foo = Foo::default();
|
||||
// Note: offsets are in bytes.
|
||||
let as_bytes = &foo as *const _ as *const u8;
|
||||
|
||||
// We're using wrapping_offset here because it's not worth
|
||||
// the unsafe block, but it would be valid to use `add` instead,
|
||||
// as it cannot overflow.
|
||||
assert_eq!(
|
||||
&foo.a as *const _ as usize,
|
||||
as_bytes.wrapping_add(a_offset) as usize
|
||||
);
|
||||
assert_eq!(
|
||||
&foo.b as *const _ as usize,
|
||||
as_bytes.wrapping_add(b_offset) as usize
|
||||
);
|
||||
assert_eq!(
|
||||
&foo.c as *const _ as usize,
|
||||
as_bytes.wrapping_add(c_offset) as usize
|
||||
);
|
||||
}
|
60
vendor/bytemuck/tests/offset_of_tests.rs
vendored
Normal file
60
vendor/bytemuck/tests/offset_of_tests.rs
vendored
Normal file
@ -0,0 +1,60 @@
|
||||
#![allow(clippy::disallowed_names)]
|
||||
use bytemuck::{offset_of, Zeroable};
|
||||
|
||||
#[test]
|
||||
fn test_offset_of_vertex() {
|
||||
#[repr(C)]
|
||||
struct Vertex {
|
||||
pos: [f32; 2],
|
||||
uv: [u16; 2],
|
||||
color: [u8; 4],
|
||||
}
|
||||
unsafe impl Zeroable for Vertex {}
|
||||
|
||||
let pos = offset_of!(Zeroable::zeroed(), Vertex, pos);
|
||||
let uv = offset_of!(Zeroable::zeroed(), Vertex, uv);
|
||||
let color = offset_of!(Zeroable::zeroed(), Vertex, color);
|
||||
|
||||
assert_eq!(pos, 0);
|
||||
assert_eq!(uv, 8);
|
||||
assert_eq!(color, 12);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_offset_of_foo() {
|
||||
#[derive(Default)]
|
||||
struct Foo {
|
||||
a: u8,
|
||||
b: &'static str,
|
||||
c: i32,
|
||||
}
|
||||
|
||||
let a_offset = offset_of!(Default::default(), Foo, a);
|
||||
let b_offset = offset_of!(Default::default(), Foo, b);
|
||||
let c_offset = offset_of!(Default::default(), Foo, c);
|
||||
|
||||
assert_ne!(a_offset, b_offset);
|
||||
assert_ne!(b_offset, c_offset);
|
||||
// We can't check against hardcoded values for a repr(Rust) type,
|
||||
// but prove to ourself this way.
|
||||
|
||||
let foo = Foo::default();
|
||||
// Note: offsets are in bytes.
|
||||
let as_bytes = &foo as *const _ as *const u8;
|
||||
|
||||
// we're using wrapping_offset here because it's not worth
|
||||
// the unsafe block, but it would be valid to use `add` instead,
|
||||
// as it cannot overflow.
|
||||
assert_eq!(
|
||||
&foo.a as *const _ as usize,
|
||||
as_bytes.wrapping_add(a_offset) as usize
|
||||
);
|
||||
assert_eq!(
|
||||
&foo.b as *const _ as usize,
|
||||
as_bytes.wrapping_add(b_offset) as usize
|
||||
);
|
||||
assert_eq!(
|
||||
&foo.c as *const _ as usize,
|
||||
as_bytes.wrapping_add(c_offset) as usize
|
||||
);
|
||||
}
|
46
vendor/bytemuck/tests/std_tests.rs
vendored
Normal file
46
vendor/bytemuck/tests/std_tests.rs
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
#![allow(clippy::uninlined_format_args)]
|
||||
//! The integration tests seem to always have `std` linked, so things that would
|
||||
//! depend on that can go here.
|
||||
|
||||
use bytemuck::*;
|
||||
|
||||
#[test]
|
||||
fn test_transparent_vtabled() {
|
||||
use core::fmt::Display;
|
||||
|
||||
#[repr(transparent)]
|
||||
struct DisplayTraitObj(dyn Display);
|
||||
|
||||
unsafe impl TransparentWrapper<dyn Display> for DisplayTraitObj {}
|
||||
|
||||
impl Display for DisplayTraitObj {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
let v = DisplayTraitObj::wrap_ref(&5i32);
|
||||
let s = format!("{}", v);
|
||||
assert_eq!(s, "5");
|
||||
|
||||
let mut x = 100i32;
|
||||
let v_mut = DisplayTraitObj::wrap_mut(&mut x);
|
||||
let s = format!("{}", v_mut);
|
||||
assert_eq!(s, "100");
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(feature = "extern_crate_alloc")]
|
||||
fn test_large_box_alloc() {
|
||||
type SuperPage = [[u8; 4096]; 4096];
|
||||
let _: Box<SuperPage> = try_zeroed_box().unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(feature = "extern_crate_alloc")]
|
||||
fn test_zero_sized_box_alloc() {
|
||||
#[repr(align(4096))]
|
||||
struct Empty;
|
||||
unsafe impl Zeroable for Empty {}
|
||||
let _: Box<Empty> = try_zeroed_box().unwrap();
|
||||
}
|
116
vendor/bytemuck/tests/transparent.rs
vendored
Normal file
116
vendor/bytemuck/tests/transparent.rs
vendored
Normal file
@ -0,0 +1,116 @@
|
||||
// Currently this test doesn't actually check the output of the functions.
|
||||
// It's only here for miri to check for any potential undefined behaviour.
|
||||
// TODO: check function results
|
||||
|
||||
#[test]
|
||||
fn test_transparent_wrapper() {
|
||||
// An external type defined in a different crate.
|
||||
#[derive(Debug, Copy, Clone, Default)]
|
||||
struct Foreign(u8);
|
||||
|
||||
use bytemuck::TransparentWrapper;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
#[repr(transparent)]
|
||||
struct Wrapper(Foreign);
|
||||
|
||||
unsafe impl TransparentWrapper<Foreign> for Wrapper {}
|
||||
|
||||
// Traits can be implemented on crate-local wrapper.
|
||||
unsafe impl bytemuck::Zeroable for Wrapper {}
|
||||
unsafe impl bytemuck::Pod for Wrapper {}
|
||||
|
||||
impl PartialEq<u8> for Foreign {
|
||||
fn eq(&self, &other: &u8) -> bool {
|
||||
self.0 == other
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<u8> for Wrapper {
|
||||
fn eq(&self, &other: &u8) -> bool {
|
||||
self.0 == other
|
||||
}
|
||||
}
|
||||
|
||||
let _: u8 = bytemuck::cast(Wrapper::wrap(Foreign::default()));
|
||||
let _: Foreign = Wrapper::peel(bytemuck::cast(u8::default()));
|
||||
|
||||
let _: &u8 = bytemuck::cast_ref(Wrapper::wrap_ref(&Foreign::default()));
|
||||
let _: &Foreign = Wrapper::peel_ref(bytemuck::cast_ref(&u8::default()));
|
||||
|
||||
let _: &mut u8 =
|
||||
bytemuck::cast_mut(Wrapper::wrap_mut(&mut Foreign::default()));
|
||||
let _: &mut Foreign =
|
||||
Wrapper::peel_mut(bytemuck::cast_mut(&mut u8::default()));
|
||||
|
||||
let _: &[u8] =
|
||||
bytemuck::cast_slice(Wrapper::wrap_slice(&[Foreign::default()]));
|
||||
let _: &[Foreign] =
|
||||
Wrapper::peel_slice(bytemuck::cast_slice(&[u8::default()]));
|
||||
|
||||
let _: &mut [u8] =
|
||||
bytemuck::cast_slice_mut(Wrapper::wrap_slice_mut(
|
||||
&mut [Foreign::default()],
|
||||
));
|
||||
let _: &mut [Foreign] =
|
||||
Wrapper::peel_slice_mut(bytemuck::cast_slice_mut(&mut [u8::default()]));
|
||||
|
||||
let _: &[u8] = bytemuck::bytes_of(Wrapper::wrap_ref(&Foreign::default()));
|
||||
let _: &Foreign = Wrapper::peel_ref(bytemuck::from_bytes(&[u8::default()]));
|
||||
|
||||
let _: &mut [u8] =
|
||||
bytemuck::bytes_of_mut(Wrapper::wrap_mut(&mut Foreign::default()));
|
||||
let _: &mut Foreign =
|
||||
Wrapper::peel_mut(bytemuck::from_bytes_mut(&mut [u8::default()]));
|
||||
|
||||
// not sure if this is the right usage
|
||||
let _ =
|
||||
bytemuck::pod_align_to::<_, u8>(Wrapper::wrap_slice(&[Foreign::default()]));
|
||||
// counterpart?
|
||||
|
||||
// not sure if this is the right usage
|
||||
let _ = bytemuck::pod_align_to_mut::<_, u8>(Wrapper::wrap_slice_mut(&mut [
|
||||
Foreign::default(),
|
||||
]));
|
||||
// counterpart?
|
||||
|
||||
#[cfg(feature = "extern_crate_alloc")]
|
||||
{
|
||||
use bytemuck::allocation::TransparentWrapperAlloc;
|
||||
use std::rc::Rc;
|
||||
|
||||
let a: Vec<Foreign> = vec![Foreign::default(); 2];
|
||||
|
||||
let b: Vec<Wrapper> = Wrapper::wrap_vec(a);
|
||||
assert_eq!(b, [0, 0]);
|
||||
|
||||
let c: Vec<Foreign> = Wrapper::peel_vec(b);
|
||||
assert_eq!(c, [0, 0]);
|
||||
|
||||
let d: Box<Foreign> = Box::new(Foreign::default());
|
||||
|
||||
let e: Box<Wrapper> = Wrapper::wrap_box(d);
|
||||
assert_eq!(&*e, &0);
|
||||
let f: Box<Foreign> = Wrapper::peel_box(e);
|
||||
assert_eq!(&*f, &0);
|
||||
|
||||
let g: Rc<Foreign> = Rc::new(Foreign::default());
|
||||
|
||||
let h: Rc<Wrapper> = Wrapper::wrap_rc(g);
|
||||
assert_eq!(&*h, &0);
|
||||
let i: Rc<Foreign> = Wrapper::peel_rc(h);
|
||||
assert_eq!(&*i, &0);
|
||||
|
||||
#[cfg(target_has_atomic = "ptr")]
|
||||
{
|
||||
use std::sync::Arc;
|
||||
|
||||
let j: Arc<Foreign> = Arc::new(Foreign::default());
|
||||
|
||||
let k: Arc<Wrapper> = Wrapper::wrap_arc(j);
|
||||
assert_eq!(&*k, &0);
|
||||
let l: Arc<Foreign> = Wrapper::peel_arc(k);
|
||||
assert_eq!(&*l, &0);
|
||||
}
|
||||
}
|
||||
}
|
13
vendor/bytemuck/tests/wrapper_forgets.rs
vendored
Normal file
13
vendor/bytemuck/tests/wrapper_forgets.rs
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
use bytemuck::TransparentWrapper;
|
||||
|
||||
#[repr(transparent)]
|
||||
struct Wrap(Box<u32>);
|
||||
|
||||
// SAFETY: it's #[repr(transparent)]
|
||||
unsafe impl TransparentWrapper<Box<u32>> for Wrap {}
|
||||
|
||||
fn main() {
|
||||
let value = Box::new(5);
|
||||
// This used to duplicate the wrapped value, creating a double free :(
|
||||
Wrap::wrap(value);
|
||||
}
|
Reference in New Issue
Block a user