Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add T: ?Sized to every Smart Pointer in the Library #8

Open
wants to merge 7 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions rust-toolchain.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[toolchain]
channel = "stable"
147 changes: 141 additions & 6 deletions src/collector.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,30 @@
use core::mem::ManuallyDrop;
use core::alloc::Layout;
use core::mem::{ManuallyDrop, MaybeUninit};
use core::sync::atomic::{AtomicPtr, AtomicUsize, Ordering};

extern crate alloc;
use alloc::boxed::Box;

#[repr(C)]
struct NodeHeader {
pub(crate) struct NodeHeader {
link: NodeLink,
/// stores the meta_data needed for unsized. With nightly feature ptr_metadata this could just be the metadata.
/// initialized when the data is put into the drop queue. Needs the Node to not move, but this is already given with the library design
/// Isn't `Node<T>` as that prohibits unsize coercion
///
/// All fat pointers in Rust are the same size, so this works also for dyn. When PtrMetadata API is stablised this should use it.
self_ptr: MaybeUninit<*mut [()]>,
drop: unsafe fn(*mut NodeHeader),
}

impl NodeHeader {
/// SAFETY: this ptr needs to be valid and self_ptr in NodeHeader has to be initialized.
/// T has to be equal to T that the NodeHeader was initialized with.
pub(crate) unsafe fn get_node_ptr<T: ?Sized>(this: *mut NodeHeader) -> *mut Node<T> {
core::mem::transmute_copy(&(*this).self_ptr)
}
}

#[repr(C)]
union NodeLink {
collector: *mut CollectorInner,
Expand All @@ -27,14 +42,16 @@ union NodeLink {
/// [`Owned`]: crate::Owned
/// [`Shared`]: crate::Shared
#[repr(C)]
pub struct Node<T> {
pub struct Node<T: ?Sized> {
header: NodeHeader,
/// The data stored in this allocation.
pub data: T,
}

unsafe fn drop_node<T>(node: *mut NodeHeader) {
let _ = Box::from_raw(node as *mut Node<T>);
unsafe fn drop_node<T: ?Sized>(node: *mut NodeHeader) {
// self_ptr is initialized by drop_node. If T: Sized only reads the first half of self_ptr. Rest is uninit
let self_ptr = NodeHeader::get_node_ptr(node);
let _: Box<Node<T>> = Box::from_raw(self_ptr);
}

impl<T: Send + 'static> Node<T> {
Expand All @@ -61,14 +78,84 @@ impl<T: Send + 'static> Node<T> {
link: NodeLink {
collector: handle.collector,
},
self_ptr: MaybeUninit::uninit(),
drop: drop_node::<T>,
},
data,
}))
}
}

impl<T> Node<T> {
impl<T: Send + ?Sized + 'static> Node<T> {
pub fn alloc_from_box(handle: &Handle, data: Box<T>) -> *mut Node<T> {
unsafe {
let src_ptr = &raw const *data;
let node = Self::alloc_for_layout(handle, Layout::for_value(&*data), |mem| {
// equivalent to ptr::with_metdata_of(other), but on stable
let offset = mem.byte_offset_from(src_ptr);
let src_ptr: *const Node<T> = src_ptr as *const Node<T>;
src_ptr.byte_offset(offset).cast_mut()
});

let size = size_of_val(&*data);
// move data
core::ptr::copy_nonoverlapping(
src_ptr as *const u8,
(&raw mut (*node).data).cast(),
size,
);
core::mem::forget(data);
let src_ptr = src_ptr as *mut core::mem::ManuallyDrop<T>;
let src: Box<ManuallyDrop<T>> = Box::from_raw(src_ptr);
drop(src);
node
}
}
}

impl<T: ?Sized> Node<T> {
/// T in Node isn't initialized.
/// This function increases the alloc counter of the collector.
///
/// # convert_ptr
/// needs to add any needed metadata. the returned ptr must point to the same location.
/// This is needed to support creation of Node<Shared<T>>.
pub(crate) unsafe fn alloc_for_layout(
handle: &Handle,
layout: Layout,
// is a function, because then it can also create Shared Nodes
convert_ptr: impl FnOnce(*mut u8) -> *mut Node<T>,
) -> *mut Node<T> {
let node_layout = Layout::new::<Node<()>>()
.extend(layout)
.unwrap()
.0
.pad_to_align();

// increase the alloc count directly before allocating
unsafe {
(*handle.collector).allocs.fetch_add(1, Ordering::Relaxed);
}
let mem_ptr = alloc::alloc::alloc(node_layout);
if mem_ptr.is_null() {
alloc::alloc::handle_alloc_error(node_layout);
}

let node_ptr = convert_ptr(mem_ptr);
// debug assert that convert_ptr holds at least that requirement.
debug_assert!(node_ptr.byte_offset_from(mem_ptr) == 0);

// init the NodeHeader
(&raw mut (*node_ptr).header).write(NodeHeader {
link: NodeLink {
collector: handle.collector,
},
drop: drop_node::<T>,
self_ptr: MaybeUninit::uninit(),
});
node_ptr
}

/// Adds a `Node` to its associated [`Collector`]'s drop queue. The `Node`
/// and its contained data may be dropped at a later time when
/// [`Collector::collect`] or [`Collector::collect_one`] is called.
Expand All @@ -95,12 +182,27 @@ impl<T> Node<T> {
/// [`Collector::collect_one`]: crate::Collector::collect_one
/// [`Node::alloc`]: crate::Node::alloc
pub unsafe fn queue_drop(node: *mut Node<T>) {
Self::write_self_ptr(node);
let collector = (*node).header.link.collector;
(*node).header.link.next = ManuallyDrop::new(AtomicPtr::new(core::ptr::null_mut()));
let tail = (*collector).tail.swap(node as *mut NodeHeader, Ordering::AcqRel);
(*tail).link.next.store(node as *mut NodeHeader, Ordering::Relaxed);
}

/// Prepare the node for being shared via AtomicPtr.
/// Needed to drop it and to share it via SharedCell.
/// Moving the Node after calling this method deinitializes self_ptr again.
///
/// SAFETY: node ptr needs to be valid.
pub(crate) unsafe fn write_self_ptr(node: *mut Node<T>) {
(*node)
.header
.self_ptr
.as_mut_ptr()
.cast::<*mut Node<T>>()
.write(node);
}

/// Gets a [`Handle`] to this `Node`'s associated [`Collector`].
///
/// The argument must point to a valid `Node` previously allocated with
Expand Down Expand Up @@ -183,6 +285,7 @@ impl Collector {
link: NodeLink {
next: ManuallyDrop::new(AtomicPtr::new(core::ptr::null_mut())),
},
self_ptr: MaybeUninit::uninit(),
drop: drop_node::<()>,
},
data: (),
Expand Down Expand Up @@ -358,6 +461,38 @@ mod tests {
}
}

#[test]
fn unsize() {
let mut collector = Collector::new();
let node: *mut Node<[u8]> = Node::alloc(&collector.handle(), [0, 1, 2, 3]);
unsafe { Node::queue_drop(node); }
collector.collect();
assert!(collector.try_cleanup().is_ok());
}

#[test]
fn dyn_coercion() {
let mut collector = Collector::new();
let node: *mut Node<dyn core::any::Any> = Node::alloc(&collector.handle(), 4u8);
unsafe {
Node::queue_drop(node);
}
collector.collect();
assert!(collector.try_cleanup().is_ok());
}

#[test]
fn from_box() {
let mut collector = Collector::new();
let boxed_slice: Box<[i32]> = Box::new([0, 1, 2, 3]);
let node = Node::alloc_from_box(&collector.handle(), boxed_slice);
unsafe {
Node::queue_drop(node);
}
collector.collect();
assert!(collector.try_cleanup().is_ok());
}

#[test]
fn collector() {
let counter = Arc::new(AtomicUsize::new(0));
Expand Down
26 changes: 19 additions & 7 deletions src/owned.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ use core::marker::PhantomData;
use core::ops::{Deref, DerefMut};
use core::ptr::NonNull;

extern crate alloc;

/// An owned smart pointer with deferred collection, analogous to `Box`.
///
/// When an `Owned<T>` is dropped, its contents are added to the drop queue
Expand All @@ -13,13 +15,14 @@ use core::ptr::NonNull;
///
/// [`Collector`]: crate::Collector
/// [`Handle`]: crate::Handle
pub struct Owned<T> {
#[repr(transparent)]
pub struct Owned<T: ?Sized> {
node: NonNull<Node<T>>,
phantom: PhantomData<T>,
}

unsafe impl<T: Send> Send for Owned<T> {}
unsafe impl<T: Sync> Sync for Owned<T> {}
unsafe impl<T: Send + ?Sized> Send for Owned<T> {}
unsafe impl<T: Sync + ?Sized> Sync for Owned<T> {}

impl<T: Send + 'static> Owned<T> {
/// Constructs a new `Owned<T>`.
Expand All @@ -39,31 +42,40 @@ impl<T: Send + 'static> Owned<T> {
}
}

impl<T: Send + ?Sized + 'static> Owned<T> {
pub fn from_box(handle: &Handle, data: alloc::boxed::Box<T>) -> Self {
Owned {
node: unsafe { NonNull::new_unchecked(Node::alloc_from_box(handle, data)) },
phantom: PhantomData
}
}
}

impl<T: Clone + Send + 'static> Clone for Owned<T> {
fn clone(&self) -> Self {
let handle = unsafe { Node::handle(self.node.as_ptr()) };
Owned::new(&handle, self.deref().clone())
}
}

impl<T> Deref for Owned<T> {
impl<T: ?Sized> Deref for Owned<T> {
type Target = T;

fn deref(&self) -> &Self::Target {
unsafe { &self.node.as_ref().data }
}
}

impl<T> DerefMut for Owned<T> {
impl<T: ?Sized> DerefMut for Owned<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { &mut self.node.as_mut().data }
}
}

impl<T> Drop for Owned<T> {
impl<T: ?Sized> Drop for Owned<T> {
fn drop(&mut self) {
unsafe {
Node::queue_drop(self.node.as_ptr());
}
}
}
}
Loading