Skip to content

Commit

Permalink
[alloc] Start to seperate allocation logic into its own crate
Browse files Browse the repository at this point in the history
GC now has a seperate and customizable allocator interface.
This should allow the tracing and compacting code tight implementation
with the internals, while still maintaining some flexibility and code reuse.

This is the first step towards seperating `zerogc-simple` into
multiple different implementations. I am planning to re-use much
of this code when I implement generational gc (#14)
  • Loading branch information
Techcable committed Jan 19, 2021
1 parent 161f5d0 commit 58f44aa
Show file tree
Hide file tree
Showing 8 changed files with 1,156 additions and 412 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ readme = "README.md"
indexmap = { version = "1.6", optional = true }

[workspace]
members = ["libs/simple", "libs/derive", "libs/context"]
members = ["libs/simple", "libs/derive", "libs/context", "libs/alloc"]

[profile.dev]
opt-level = 1
Expand Down
26 changes: 26 additions & 0 deletions libs/alloc/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
[package]
name = "zerogc-alloc"
description = "Set of modular allocators for zerogc"
version = "0.1.4"
authors = ["Techcable <[email protected]>"]
readme = "../../README.md"
license = "MIT"
edition = "2018"

[dependencies]
zerogc = { path = "../..", version = "0.1.3" }
# Lazy init (TODO: Replace with `std::lazy` when accepted)
once_cell = { version = "1.5.2", optional = true }
# Better locking
parking_lot = { version = "0.11", optional = true, features = ["nightly"] }
# Concurrency utils
crossbeam-utils = { vresion = "0.8.1", optional = true }

[features]
default = ["malloc", "sync"]
# Enable support for the system allocator
malloc = []
# Enable support for "small object" allocators
small-objects = ["once_cell"]
# Support thread-safety (for all collectors)
sync = ["parking_lot", "once_cell/parking_lot", "crossbeam-utils"]
200 changes: 200 additions & 0 deletions libs/alloc/src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,200 @@
//! A set of modular allocators for zerogc.
//!
//! ## Current implementations:
//!
//! ### "Small object" allocator
//! Allocates separate memory-arenas for each size of object.
//! Only supports "small" objects less than a certain size.
//!
//! When objects are freed, they are added to an internal free list.
//! This free list is checked before allocating any more chunks of memory
//! from the operating system.
//!
//! ### Malloc allocator
//! Allocates memory directly using the [standard allocator API](std::alloc)
//!
//! This is excellent for debugging.
#![deny(missing_docs)]
// Features that we always use
#![feature(
alloc_layout_extra, // Needed to compute Layout of objects
const_alloc_layout, // We want to const-fold away layout computations
allocator_api, // We allocate via the standard API
slice_ptr_get, // We want to have NonNull::as_mut
untagged_unions, // This should already be stable....
)]
// Features required for 'small object' allocator
#![cfg_attr(feature = "small-objects", feature(
new_uninit, // Needed to allocate fixed-size arrays via `Box<[SmallArena; NUM_ARENAS]>`
cell_update, // Cell::update is just useful :)
generic_associated_types, // Used to be generic over 'std::cell::MutRef' and `MutextGuard`
))]

use std::ptr::NonNull;
use std::alloc::Layout;

#[cfg(feature = "malloc")]
mod malloc;
#[cfg(feature = "small-objects")]
mod small_objects;

/// The most basic interface to allocation
///
/// ## Safety
/// The allocator must obey the API description and allocate chunks of
/// memory of the correct size.
pub unsafe trait SimpleAllocator {
/// The minimum size of supported memory.
///
/// Anything less than this is wasted space.
///
/// Allocators are required to satisfy requests smaller than this,
/// although they can just round up internally.
const MIN_SIZE: usize = 0;
/// The maximum size of objects supported by the allocator,
/// or `None` if there is no inherent limitation
const MAX_SIZE: Option<usize> = None;
/// The maximum supported alignment supported by the allocator,
/// or `None` if there is no inhernit limitation.
const MAX_ALIGNMENT: Option<usize> = None;

/// Allocate a chunk of memory
/// whose size is not known at compile time.
fn alloc(&self, layout: Layout) -> Result<AllocatedObject, AllocationError>;

/// Allocate a chunk of memory
/// whose layout is statically known in advance.
///
/// This is likely faster than [SimpleAllocator::alloc], because it can statically
/// determine which part of the allocator to use.
#[inline]
fn alloc_fixed<const SIZE: usize, const ALIGN: usize>(&self) -> Result<AllocatedObject, AllocationError> {
self.alloc(Layout::from_size_align(SIZE, ALIGN).unwrap())
}

/// Free the specified object
///
/// ## Safety
/// Undefined behavior if the specified object is invalid,
/// or came from a different allocator.
unsafe fn free(&self, mem: AllocatedObject);

/// Free the specified object, whose layout is statically known
///
/// ## Safety
/// Undefined behavior if the specified object is invalid,
/// or came from a different allocator.
#[inline]
unsafe fn free_fixed<const SIZE: usize, const ALIGN: usize>(&self, mem: AllocatedObject) {
debug_assert_eq!(mem.size(), SIZE);
debug_assert_eq!(mem.align(), ALIGN);
self.free(mem)
}

/// Returns the memory currently in use
fn used_memory(&self) -> usize;

/// Returns the total amount of memory currently reserved by this allocator.
///
/// Not all of this memory is nessicarry being used by alloated objects,
/// although it can't be used by other parts of the program.
///
/// For the number of objects currently in use, see [Allocator::used_memory]
#[deprecated(note = "Should this require computation or be a simple getter?")]
fn reserved_memory(&self) -> usize;

/// Release all the memory currently in use,
/// marking it as unused.
///
/// Returns the amont of memory freed.
///
/// Like [Vec::clear], this method doesn't actually return anything to
/// the operating system and continues to reserve it. It simply marks the memory as unused.
///
/// It is equivalent to manually calling free on every object
/// that is currently allocated.
///
/// ## Used
/// Undefined behavior if any of the freed memory is ever used again.
unsafe fn unchecked_reset(&self) -> usize;
}

/// A chunk of allocated memory
///
/// This is a simple wrapper type
#[derive(Clone, Debug)]
#[must_use]
pub struct AllocatedObject {
/// The allocated memory
pub ptr: NonNull<u8>,
/// The layout of the memory
///
/// This may include more space than requested
/// if the allocator had excess.
pub layout: Layout
}

impl AllocatedObject {
/// The size of the object
pub const fn size(&self) -> usize {
self.layout.size()
}
/// The alignment of the object
pub const fn align(&self) -> usize {
self.layout.align()
}
}

/// An error caused when allocating a chunk of memory
///
/// This indicates a recoverable error, not a developer error.
/// Invalid usages will cause panics.
#[derive(Clone, Debug)]
pub enum AllocationError {
/// Indicates that there was insufficient memory to allocate
/// from the standard library
StdError {
/// The underlying cause of the allocation failure
cause: std::alloc::AllocError,
/// The layout that failed to allocate
///
/// This may include internal metadata,
/// so it may end up being larger than actually requested.
layout: Layout
},
/// Indicates that the specified size is invalid
InvalidSize {
/// The requested layout
size: usize,
/// The reason the size is invalid
cause: &'static &'static str
},
/// Indicates that the specified size is unsupported
UnsupportedAlignment {
/// The requested alignment
align: usize
}
}
impl AllocationError {
/// Treat this error as a fatal error and panic with an appropriate message
///
/// By default, rust allocations tend to panic on failure so
/// this should be fairly common.
///
/// This is analogous to [std::alloc::handle_alloc_error] from the
/// standard allocator API.
#[cold]
pub fn consider_fatal(&self) -> ! {
match *self {
AllocationError::StdError { cause: _, layout } => {
std::alloc::handle_alloc_error(layout)
},
AllocationError::InvalidSize { size, cause } => {
panic!("Invalid size {}: {}", size, cause);
},
AllocationError::UnsupportedAlignment { align } => {
panic!("Unsupported alignment: {}", align);
}
}
}
}
110 changes: 110 additions & 0 deletions libs/alloc/src/malloc.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
use std::ptr::NonNull;
use crate::{SimpleAllocator, AllocatedObject, AllocationError};

use std::alloc::{Allocator, Global, Layout};
use std::sync::atomic::{AtomicUsize, Ordering};
use crossbeam_utils::atomic::AtomicCell;

/// The header of objects allocated in the std allocator
///
/// This is used to implement a linked list of active objects
#[repr(C)]
struct ObjHeader {
prev: AtomicCell<Option<NonNull<ObjHeader>>>,
}

#[inline]
pub const fn header_padding(inner_type: Layout) -> usize {
Layout::new::<ObjHeader>().padding_needed_for(inner_type.align())
}
#[inline]
pub const fn layout_including_header(inner_type: Layout) -> Layout {
let mut required_align = std::mem::align_of::<ObjHeader>();
if inner_type.align() > required_align {
required_align = inner_type.align()
}
unsafe {
Layout::from_size_align_unchecked(
std::mem::size_of::<ObjHeader>()
+ header_padding(inner_type)
+ inner_type.size(),
required_align
)
}
}

#[derive(Default)]
pub struct StdAllocator<A: Allocator = Global> {
alloc: A,
total_used: AtomicUsize,
last: AtomicCell<Option<NonNull<ObjHeader>>>
}
impl<A: Allocator> StdAllocator<A> {
#[inline]
unsafe fn insert_header(&self, mut header: NonNull<ObjHeader>) {
let mut last = self.last.load();
loop {
header.as_mut().prev.store(last);
match self.last.compare_exchange(last, Some(header)) {
Ok(_) => return,
Err(actual_last) => {
last = actual_last;
}
}
}
}
#[inline]
unsafe fn remove_header(&self, header: NonNull<ObjHeader>) {
unimplemented!()
}
}
impl StdAllocator {
pub fn new() -> Self {
Self::default()
}
}
unsafe impl<A: Allocator> SimpleAllocator for StdAllocator<A> {
/// We use at least one word for linking
const MIN_SIZE: usize = std::mem::size_of::<usize>();
/// There is no *inherent* limitation on sizes in the allocator API
const MAX_SIZE: Option<usize> = None;
/// There is no *inherent* limitation on alignment in the allocator API
const MAX_ALIGNMENT: Option<usize> = None;

#[inline]
fn alloc(&self, layout: Layout) -> Result<AllocatedObject, AllocationError> {
let total_layout = layout_including_header(layout);
let bytes = match self.alloc.allocate(total_layout) {
Ok(bytes) => bytes,
Err(cause) => {
return Err(AllocationError::StdError { cause, layout: total_layout })
}
};
unsafe {
self.insert_header(bytes.cast::<ObjHeader>());
let res = bytes.as_mut_ptr().add(std::mem::size_of::<ObjHeader>()
+ header_padding(layout));
Ok(AllocatedObject {
layout, ptr: NonNull::new_unchecked(res)
})
}
}

unsafe fn free(&self, mem: AllocatedObject) {
unimplemented!("{:?}", mem)
}

#[inline]
fn used_memory(&self) -> usize {
self.total_used.load(Ordering::Acquire)
}

#[inline]
fn reserved_memory(&self) -> usize {
self.used_memory()
}

unsafe fn unchecked_reset(&self) -> usize {
unimplemented!()
}
}
Loading

0 comments on commit 58f44aa

Please sign in to comment.