Skip to content

Commit

Permalink
Support #![no_std] for the zerogc API
Browse files Browse the repository at this point in the history
An implementation doesn't nessicarrily require the stdlib
or even a system allocator. However as a matter of practice,
"zerogc-simple" currently requires both.
  • Loading branch information
Techcable committed Aug 10, 2020
1 parent 9e256b0 commit 3bd9123
Show file tree
Hide file tree
Showing 7 changed files with 106 additions and 77 deletions.
14 changes: 13 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,16 @@ indexmap = { version = "1.4", optional = true }
members = ["libs/simple", "libs/derive"]

[profile.dev]
opt-level = 1
opt-level = 1

[features]
default = ["std"]
# Depend on the standard library (optional)
#
# This implements tracing
std = []
# Depend on `extern crate alloc` in addition to the Rust `core`
# This is implied by using the standard library (feature="std")
#
# This implements `Trace` for `Box` and collections like `Vec`
alloc = []
2 changes: 1 addition & 1 deletion src/cell.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
//! for fields that are wrapped in a [GcCell].
//! Just mark the field with `#[zerogc(mutable(public))]`
//! and it'll generate a safe wrapper.
use std::cell::Cell;
use core::cell::Cell;

use crate::{GcSafe, Trace, GcVisitor, NullTrace, TraceImmutable, GcDirectBarrier,};

Expand Down
12 changes: 8 additions & 4 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
const_panic, // RFC 2345 - Const asserts
)]
#![deny(missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)]
//! Zero overhead tracing garbage collection for rust,
//! by abusing the borrow checker.
//!
Expand All @@ -15,13 +16,16 @@
//! 6. Collection can only happen with an explicit `safepoint` call and has no overhead between these calls,
//! 7. API supports moving objects (allowing copying/generational GCs)
#![cfg(any(feature = "alloc", feature = "std"))]
extern crate alloc;

/*
* I want this library to use 'mostly' stable features,
* unless there's good justification to use an unstable feature.
*/
use std::mem;
use std::ops::{Deref, DerefMut};
use std::fmt::Debug;
use core::mem;
use core::ops::{Deref, DerefMut};
use core::fmt::Debug;

#[macro_use]
mod manually_traced;
Expand Down Expand Up @@ -558,7 +562,7 @@ unsafe impl<T> TraceImmutable for AssumeNotTraced<T> {
unsafe impl<T> NullTrace for AssumeNotTraced<T> {}
/// No tracing implies GcSafe
unsafe impl<T> GcSafe for AssumeNotTraced<T> {
const NEEDS_DROP: bool = std::mem::needs_drop::<T>();
const NEEDS_DROP: bool = core::mem::needs_drop::<T>();
}
unsafe_gc_brand!(AssumeNotTraced, T);

Expand Down
63 changes: 60 additions & 3 deletions src/manually_traced/core.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,10 @@
//!
//! `RefCell` and `Cell` are intentionally ignored and do not have implementations.
//! Some collectors may need write barriers to protect their internals.
use core::num::Wrapping;

use crate::{Trace, GcSafe, GcVisitor, NullTrace, GcBrand, GcSystem, TraceImmutable};
use crate::prelude::*;
use crate::GcDirectBarrier;

macro_rules! trace_tuple {
{ $($param:ident)* } => {
Expand Down Expand Up @@ -113,7 +115,7 @@ macro_rules! trace_array {
}
unsafe impl<T: $crate::NullTrace> $crate::NullTrace for [T; $size] {}
unsafe impl<T: GcSafe> GcSafe for [T; $size] {
const NEEDS_DROP: bool = std::mem::needs_drop::<T>();
const NEEDS_DROP: bool = core::mem::needs_drop::<T>();
}
unsafe impl<'new_gc, S: GcSystem, T> $crate::GcBrand<'new_gc, S> for [T; $size]
where S: GcSystem, T: GcBrand<'new_gc, S>,
Expand Down Expand Up @@ -203,5 +205,60 @@ unsafe impl<T: TraceImmutable> TraceImmutable for [T] {
}
unsafe impl<T: NullTrace> NullTrace for [T] {}
unsafe impl<T: GcSafe> GcSafe for [T] {
const NEEDS_DROP: bool = std::mem::needs_drop::<T>();
const NEEDS_DROP: bool = core::mem::needs_drop::<T>();
}

unsafe impl<T: Trace> Trace for Option<T> {
const NEEDS_TRACE: bool = T::NEEDS_TRACE;

#[inline]
fn visit<V: GcVisitor>(&mut self, visitor: &mut V) -> Result<(), V::Err> {
match *self {
None => Ok(()),
Some(ref mut value) => visitor.visit(value),
}
}
}
unsafe impl<T: TraceImmutable> TraceImmutable for Option<T> {
#[inline]
fn visit_immutable<V: GcVisitor>(&self, visitor: &mut V) -> Result<(), <V as GcVisitor>::Err> {
match *self {
None => Ok(()),
Some(ref value) => visitor.visit_immutable(value),
}
}
}
unsafe impl<T: NullTrace> NullTrace for Option<T> {}
unsafe impl<T: GcSafe> GcSafe for Option<T> {
const NEEDS_DROP: bool = T::NEEDS_DROP;
}
unsafe impl<'gc, OwningRef, V> GcDirectBarrier<'gc, OwningRef> for Option<V>
where V: GcDirectBarrier<'gc, OwningRef> {
#[inline]
unsafe fn write_barrier(&self, owner: &OwningRef, start_offset: usize) {
// Implementing direct write is safe because we store our value inline
match *self {
None => { /* Nothing to trigger the barrier for :) */ },
Some(ref value) => {
/*
* We must manually compute the offset
* Null pointer-optimized types will have offset of zero,
* while other types may not
*/
let value_offset = (value as *const V as usize) -
(self as *const Self as usize);
value.write_barrier(owner, start_offset + value_offset)
},
}
}
}
unsafe_gc_brand!(Option, T);

// We can trace `Wrapping` by simply tracing its interior
unsafe_trace_deref!(Wrapping, T; immut = false; |wrapping| &mut wrapping.0);
unsafe impl<T: TraceImmutable> TraceImmutable for Wrapping<T> {
#[inline]
fn visit_immutable<V: GcVisitor>(&self, visitor: &mut V) -> Result<(), V::Err> {
visitor.visit_immutable(&self.0)
}
}
9 changes: 6 additions & 3 deletions src/manually_traced/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ macro_rules! unsafe_trace_deref {
/// We trust ourselves to not do anything bad as long as our paramaters don't
unsafe impl<$($param),*> GcSafe for $target<$($param),*>
where $($param: GcSafe + TraceImmutable),* {
const NEEDS_DROP: bool = std::mem::needs_drop::<Self>();
const NEEDS_DROP: bool = core::mem::needs_drop::<Self>();
}
};
($target:ident, $($param:ident),*; immut = false; |$value:ident| $extract:expr) => {
Expand All @@ -202,7 +202,7 @@ macro_rules! unsafe_trace_deref {
/// We trust ourselves to not do anything bad as long as our paramaters don't
unsafe impl<$($param),*> GcSafe for $target<$($param),*>
where $($param: GcSafe),* {
const NEEDS_DROP: bool = std::mem::needs_drop::<Self>();
const NEEDS_DROP: bool = core::mem::needs_drop::<Self>();
}
};
}
Expand Down Expand Up @@ -304,7 +304,7 @@ macro_rules! unsafe_trace_primitive {
unsafe impl $crate::NullTrace for $target {}
/// No drop/custom behavior -> GcSafe
unsafe impl GcSafe for $target {
const NEEDS_DROP: bool = std::mem::needs_drop::<$target>();
const NEEDS_DROP: bool = core::mem::needs_drop::<$target>();
}
unsafe impl<'gc, OwningRef> $crate::GcDirectBarrier<'gc, OwningRef> for $target {
#[inline(always)]
Expand Down Expand Up @@ -363,6 +363,9 @@ macro_rules! unsafe_gc_brand {
}

mod core;
#[cfg(any(feature = "alloc", feature = "std"))]
mod stdalloc;
#[cfg(feature = "std")]
mod stdlib;
#[cfg(feature = "indexmap")]
mod indexmap;
17 changes: 17 additions & 0 deletions src/manually_traced/stdalloc.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
//! Implementations for types in the standard `alloc` crate
//!
//! These can be used in `#![no_std]` crates without requiring
//! the entire standard library.
use alloc::rc::Rc;
use alloc::sync::Arc;
use alloc::vec::Vec;
use alloc::boxed::Box;

use crate::prelude::*;

// NOTE: Delegate to slice to avoid code duplication
unsafe_trace_deref!(Vec, target = { [T] }; T);
unsafe_trace_deref!(Box, target = T);
// We can only trace `Rc` and `Arc` if the inner type implements `TraceImmutable`
unsafe_trace_deref!(Rc, T; immut = required; |rc| &**rc);
unsafe_trace_deref!(Arc, T; immut = required; |arc| &**arc);
66 changes: 1 addition & 65 deletions src/manually_traced/stdlib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,27 +3,9 @@
//! Types that are in `libcore` and are `#![no_std]` should go in the core module,
//! but anything that requires the rest of the stdlib (including collections and allocations),
//! should go in this module.
use crate::prelude::*;

use crate::{Trace, GcSafe, GcVisitor, TraceImmutable, NullTrace, GcBrand, GcSystem, GcDirectBarrier};
use std::collections::{HashMap, HashSet};
use std::rc::Rc;
use std::sync::Arc;
use std::num::Wrapping;

// NOTE: Delegate to slice to avoid code duplication
unsafe_trace_deref!(Vec, target = { [T] }; T);
unsafe_trace_deref!(Box, target = T);
// We can only trace `Rc` and `Arc` if the inner type implements `TraceImmutable`
unsafe_trace_deref!(Rc, T; immut = required; |rc| &**rc);
unsafe_trace_deref!(Arc, T; immut = required; |arc| &**arc);
// We can trace `Wrapping` by simply tracing its interior
unsafe_trace_deref!(Wrapping, T; immut = false; |wrapping| &mut wrapping.0);
unsafe impl<T: TraceImmutable> TraceImmutable for Wrapping<T> {
#[inline]
fn visit_immutable<V: GcVisitor>(&self, visitor: &mut V) -> Result<(), V::Err> {
visitor.visit_immutable(&self.0)
}
}

unsafe_immutable_trace_iterable!(HashMap<K, V>; element = { (&K, &V) });
unsafe impl<K: TraceImmutable, V: Trace> Trace for HashMap<K, V> {
Expand Down Expand Up @@ -64,49 +46,3 @@ unsafe impl<V: TraceImmutable> Trace for HashSet<V> {
}
}
unsafe_gc_brand!(HashSet, immut = required; V);

unsafe impl<T: Trace> Trace for Option<T> {
const NEEDS_TRACE: bool = T::NEEDS_TRACE;

#[inline]
fn visit<V: GcVisitor>(&mut self, visitor: &mut V) -> Result<(), V::Err> {
match *self {
None => Ok(()),
Some(ref mut value) => visitor.visit(value),
}
}
}
unsafe impl<T: TraceImmutable> TraceImmutable for Option<T> {
#[inline]
fn visit_immutable<V: GcVisitor>(&self, visitor: &mut V) -> Result<(), <V as GcVisitor>::Err> {
match *self {
None => Ok(()),
Some(ref value) => visitor.visit_immutable(value),
}
}
}
unsafe impl<T: NullTrace> NullTrace for Option<T> {}
unsafe impl<T: GcSafe> GcSafe for Option<T> {
const NEEDS_DROP: bool = T::NEEDS_DROP;
}
unsafe impl<'gc, OwningRef, V> GcDirectBarrier<'gc, OwningRef> for Option<V>
where V: GcDirectBarrier<'gc, OwningRef> {
#[inline]
unsafe fn write_barrier(&self, owner: &OwningRef, start_offset: usize) {
// Implementing direct write is safe because we store our value inline
match *self {
None => { /* Nothing to trigger the barrier for :) */ },
Some(ref value) => {
/*
* We must manually compute the offset
* Null pointer-optimized types will have offset of zero,
* while other types may not
*/
let value_offset = (value as *const V as usize) -
(self as *const Self as usize);
value.write_barrier(owner, start_offset + value_offset)
},
}
}
}
unsafe_gc_brand!(Option, T);

0 comments on commit 3bd9123

Please sign in to comment.