diff --git a/.cargo/config.toml b/.cargo/config.toml index 7a41013..a520d27 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -1,2 +1,6 @@ [build] -rustflags = ["-Z", "tls_model=initial-exec"] \ No newline at end of file +rustflags = ["-Z", "tls_model=initial-exec"] + +[alias] +# Helper command to run a program with a malloc implementation +x = "run -p mallockit-dev-tools --quiet --" diff --git a/Cargo.lock b/Cargo.lock index 1eb11ff..b2d36ab 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -258,62 +258,6 @@ version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" -[[package]] -name = "crossbeam" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1137cd7e7fc0fb5d3c5a8678be38ec56e819125d8d7907411fe24ccb943faca8" -dependencies = [ - "crossbeam-channel", - "crossbeam-deque", - "crossbeam-epoch", - "crossbeam-queue", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-channel" -version = "0.5.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-deque" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" -dependencies = [ - "crossbeam-epoch", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.9.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-queue" -version = "0.3.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df0346b5d5e76ac2fe4e327c5fd1118d6be7c51dfb18f9b7922923f287471e35" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-utils" -version = "0.8.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" - [[package]] name = "ctor" version = "0.1.26" @@ -742,7 +686,6 @@ version = "0.1.0" dependencies = [ "atomic", "cargo_metadata", - "crossbeam", "ctor", "errno 0.2.8", "libc", @@ -762,6 +705,14 @@ dependencies = [ "num_cpus", ] +[[package]] +name = "mallockit-dev-tools" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap", +] + [[package]] name = "mallockit-macros" version = "0.1.0" diff --git a/Cargo.toml b/Cargo.toml index 543dc43..43ecc96 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,6 +3,7 @@ resolver = "2" members = [ "mallockit", "mallockit/macros", + "mallockit/dev", "bump", "buddy", "hoard", @@ -30,7 +31,6 @@ errno = "0.2.8" spin = { version = "0.9.3", features = ["std"] } ctor = "0.1" num_cpus = "1.13.1" -crossbeam = "0.8.1" atomic = "0.5.1" quote = "1.0.20" syn = "1.0.98" diff --git a/README.md b/README.md index b4d0822..c77765f 100644 --- a/README.md +++ b/README.md @@ -3,13 +3,13 @@ ## Getting Started ```console -$ cargo build --release -$ env LD_PRELOAD=./target/release/libbump.so cargo # or some other command +$ cargo build -p hoard --release --features malloc +$ env LD_PRELOAD=./target/release/libhoard.so cargo --help # or some other command ``` #### Run on macOS ```console -$ env DYLD_INSERT_LIBRARIES=./target/release/libbump.dylib cargo # or some other command +$ env DYLD_INSERT_LIBRARIES=./target/release/libhoard.dylib cargo --help # or some other command ``` *Note: If you'd like to hijack the system apps and libraries as well, disable System Integrity Protection (SIP). Do this at your own risk 😉* diff --git a/buddy/Cargo.toml b/buddy/Cargo.toml index ff5a22a..63cd4cb 100644 --- a/buddy/Cargo.toml +++ b/buddy/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "buddy" version = { workspace = true } -authors = ["Wenyu Zhao "] +authors = ["Wenyu Zhao "] edition = { workspace = true } # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/buddy/src/lib.rs b/buddy/src/lib.rs index 841ed4a..95d25f3 100644 --- a/buddy/src/lib.rs +++ b/buddy/src/lib.rs @@ -49,10 +49,8 @@ impl Mutator for BuddyMutator { fn new() -> Self { Self { - freelist: FreeListAllocator::new::(Lazy::new(|| { - &Self::plan().freelist_space - })), - los: LargeObjectAllocator::new(Lazy::new(|| &Self::plan().large_object_space)), + freelist: FreeListAllocator::new::(&Self::plan().freelist_space), + los: LargeObjectAllocator::new(&Self::plan().large_object_space), } } diff --git a/bump/Cargo.toml b/bump/Cargo.toml index a5d97e3..95543d6 100644 --- a/bump/Cargo.toml +++ b/bump/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "bump" version = { workspace = true } -authors = ["Wenyu Zhao "] +authors = ["Wenyu Zhao "] edition = { workspace = true } # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/bump/src/lib.rs b/bump/src/lib.rs index 22fb816..573d985 100644 --- a/bump/src/lib.rs +++ b/bump/src/lib.rs @@ -41,7 +41,7 @@ impl Mutator for BumpMutator { fn new() -> Self { Self { - bump: BumpAllocator::new(Lazy::new(|| &Self::plan().immortal)), + bump: BumpAllocator::new(&Self::plan().immortal), } } diff --git a/examples/malloc-override/example.sh b/examples/malloc-override/example.sh new file mode 100644 index 0000000..7ad01d5 --- /dev/null +++ b/examples/malloc-override/example.sh @@ -0,0 +1,3 @@ +cargo build -p hoard --release --features malloc + +env LD_PRELOAD=./target/release/libhoard.so cargo --help \ No newline at end of file diff --git a/hoard/Cargo.toml b/hoard/Cargo.toml index de02769..7cc74b9 100644 --- a/hoard/Cargo.toml +++ b/hoard/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "hoard" version = { workspace = true } -authors = ["Wenyu Zhao "] +authors = ["Wenyu Zhao "] edition = { workspace = true } # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/hoard/src/hoard_space.rs b/hoard/src/hoard_space.rs index ff65a31..df68ff5 100644 --- a/hoard/src/hoard_space.rs +++ b/hoard/src/hoard_space.rs @@ -1,25 +1,28 @@ use super::{page_resource::BlockPageResource, Allocator, Space, SpaceId}; use crate::{pool::Pool, super_block::SuperBlock}; use mallockit::{ - space::meta::{Box, Meta}, + space::{ + meta::{Box, Meta}, + page_resource::MemRegion, + }, util::{mem::alloc::discrete_tlab::DiscreteTLAB, *}, }; /// Global heap pub struct HoardSpace { id: SpaceId, - pr: BlockPageResource, + pr: BlockPageResource, pub(crate) pool: Pool, } impl Space for HoardSpace { const MAX_ALLOCATION_SIZE: usize = SuperBlock::BYTES / 4; - type PR = BlockPageResource; + type PR = BlockPageResource; fn new(id: SpaceId) -> Self { Self { id, - pr: BlockPageResource::new(id, SuperBlock::LOG_BYTES), + pr: BlockPageResource::new(id), pool: Pool::new(true), } } @@ -59,11 +62,7 @@ impl HoardSpace { return Some(block); } // Acquire new memory - let addr = self - .acquire::(1 << (SuperBlock::LOG_BYTES - Size4K::LOG_BYTES))? - .start - .start(); - let block = SuperBlock::new(addr); + let block = self.pr.acquire_block()?; block.init(local.static_ref(), size_class); debug_assert!(!block.is_full()); debug_assert!(block.is_empty()); @@ -78,25 +77,25 @@ impl HoardSpace { } pub fn release_block(&self, block: SuperBlock) { - self.release::(Page::new(block.start())); + self.pr.release_block(block) } } /// Thread-local heap pub struct HoardAllocator { - space: Lazy<&'static HoardSpace, Local>, + space: &'static HoardSpace, tlab: DiscreteTLAB<{ SizeClass::<4>::from_bytes(Self::LARGEST_SMALL_OBJECT).as_usize() + 1 }>, - local: Lazy, Local>, + local: Box, } impl HoardAllocator { const LOCAL_HEAP_THRESHOLD: usize = 16 * 1024 * 1024; const LARGEST_SMALL_OBJECT: usize = 1024; - pub const fn new(space: Lazy<&'static HoardSpace, Local>, _space_id: SpaceId) -> Self { + pub fn new(space: &'static HoardSpace, _space_id: SpaceId) -> Self { Self { space, tlab: DiscreteTLAB::new(), - local: Lazy::new(|| Box::new_in(Pool::new(false), Meta)), + local: Box::new_in(Pool::new(false), Meta), } } } @@ -109,7 +108,7 @@ impl Allocator for HoardAllocator { return Some(cell); } } - self.local.alloc_cell(size_class, &self.space) + self.local.alloc_cell(size_class, self.space) } fn dealloc(&mut self, cell: Address) { @@ -120,7 +119,7 @@ impl Allocator for HoardAllocator { { self.tlab.push(block.size_class, cell); } else { - self.local.free_cell(cell, &self.space); + self.local.free_cell(cell, self.space); } } } diff --git a/hoard/src/lib.rs b/hoard/src/lib.rs index 81fce97..f367c38 100644 --- a/hoard/src/lib.rs +++ b/hoard/src/lib.rs @@ -55,8 +55,8 @@ impl Mutator for HoardMutator { fn new() -> Self { Self { - hoard: HoardAllocator::new(Lazy::new(|| &Self::plan().hoard_space), HOARD_SPACE), - los: LargeObjectAllocator::new(Lazy::new(|| &Self::plan().large_object_space)), + hoard: HoardAllocator::new(&Self::plan().hoard_space, HOARD_SPACE), + los: LargeObjectAllocator::new(&Self::plan().large_object_space), } } diff --git a/hoard/src/pool.rs b/hoard/src/pool.rs index 7899ea1..c34981f 100644 --- a/hoard/src/pool.rs +++ b/hoard/src/pool.rs @@ -1,6 +1,9 @@ use crate::{hoard_space::HoardSpace, super_block::SuperBlock}; use array_const_fn_init::array_const_fn_init; -use mallockit::util::{mem::size_class::SizeClass, Address, Lazy, Local}; +use mallockit::{ + space::page_resource::MemRegion, + util::{mem::size_class::SizeClass, Address}, +}; use spin::{relax::Yield, MutexGuard}; use std::sync::atomic::{AtomicUsize, Ordering}; @@ -36,7 +39,7 @@ impl BlockList { fn group(block: SuperBlock, alloc: bool) -> usize { let u = block.used_bytes() + if alloc { block.size_class.bytes() } else { 0 } - + (Address::ZERO + SuperBlock::HEADER_BYTES) + + (Address::ZERO + SuperBlock::META_BYTES) .align_up(block.size_class.bytes()) .as_usize(); (u << 2) >> SuperBlock::LOG_BYTES @@ -252,7 +255,7 @@ impl Pool { &self, size_class: SizeClass, blocks: &mut BlockList, - space: &Lazy<&'static HoardSpace, Local>, + space: &'static HoardSpace, ) -> SuperBlock { // Get a block from global pool let block = space @@ -273,7 +276,7 @@ impl Pool { pub fn alloc_cell( &mut self, size_class: SizeClass, - space: &Lazy<&'static HoardSpace, Local>, + space: &'static HoardSpace, ) -> Option
{ debug_assert!(!self.global); let mut blocks = unsafe { self.blocks.get_unchecked(size_class.as_usize()).lock() }; @@ -289,7 +292,7 @@ impl Pool { } #[cold] - pub fn free_cell(&self, cell: Address, space: &Lazy<&'static HoardSpace, Local>) { + pub fn free_cell(&self, cell: Address, space: &'static HoardSpace) { let block = SuperBlock::containing(cell); let mut owner = block.owner; let mut blocks = owner.lock_blocks(block.size_class); @@ -305,7 +308,7 @@ impl Pool { fn free_cell_slow_impl( &self, cell: Address, - space: &Lazy<&'static HoardSpace, Local>, + space: &'static HoardSpace, blocks: &mut BlockList, ) { let block = SuperBlock::containing(cell); @@ -328,7 +331,7 @@ impl Pool { fn flush_block_slow( &self, size_class: SizeClass, - space: &Lazy<&'static HoardSpace, Local>, + space: &'static HoardSpace, blocks: &mut BlockList, ) { // Transit a mostly-empty block to the global pool diff --git a/hoard/src/super_block.rs b/hoard/src/super_block.rs index 83c4d99..e090193 100644 --- a/hoard/src/super_block.rs +++ b/hoard/src/super_block.rs @@ -1,6 +1,9 @@ -use std::num::NonZeroUsize; +use std::{ + num::NonZeroUsize, + ops::{Deref, DerefMut}, +}; -use mallockit::util::mem::{aligned_block::AlignedBlockConfig, size_class::SizeClass}; +use mallockit::{space::page_resource::MemRegion, util::mem::size_class::SizeClass}; use crate::pool::Pool; @@ -16,35 +19,58 @@ pub struct BlockMeta { pub group: u8, head_cell: Address, pub owner: &'static Pool, + pr_next: Option, } -#[mallockit::aligned_block] +#[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct SuperBlock(NonZeroUsize); -impl AlignedBlockConfig for SuperBlock { - type Header = BlockMeta; +impl MemRegion for SuperBlock { + type Meta = BlockMeta; + const LOG_BYTES: usize = 18; - fn from_address(address: Address) -> Self { - debug_assert!(!address.is_zero()); - debug_assert!(Self::is_aligned(address)); - Self(unsafe { NonZeroUsize::new_unchecked(usize::from(address)) }) + fn start(&self) -> Address { + Address::from(self.0.get()) } - fn into_address(self) -> Address { - Address::from(self.0.get()) + fn from_address(addr: Address) -> Self { + debug_assert!(!addr.is_zero()); + debug_assert!(Self::is_aligned(addr)); + Self(unsafe { NonZeroUsize::new_unchecked(usize::from(addr)) }) + } + + fn set_next(&self, next: Option) { + unsafe { self.meta_mut().pr_next = next }; + } + + #[allow(clippy::misnamed_getters)] + fn next(&self) -> Option { + self.pr_next + } +} + +impl Deref for SuperBlock { + type Target = BlockMeta; + + fn deref(&self) -> &Self::Target { + self.meta() + } +} + +impl DerefMut for SuperBlock { + fn deref_mut(&mut self) -> &mut Self::Target { + unsafe { self.meta_mut() } } } impl SuperBlock { pub fn init(mut self, _local: &'static Pool, size_class: SizeClass) { - debug_assert_eq!(SuperBlock::HEADER_BYTES, Address::BYTES * 6); + debug_assert_eq!(Self::META_BYTES, Address::BYTES * 8); self.size_class = size_class; let size = size_class.bytes(); self.head_cell = Address::ZERO; - self.bump_cursor = (Address::ZERO + Self::HEADER_BYTES) - .align_up(size) - .as_usize() as u32; + self.bump_cursor = (Address::ZERO + Self::META_BYTES).align_up(size).as_usize() as u32; self.used_bytes = 0; } diff --git a/mallockit/Cargo.toml b/mallockit/Cargo.toml index 1333b13..cfbf3a9 100644 --- a/mallockit/Cargo.toml +++ b/mallockit/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "mallockit" version = { workspace = true } -authors = ["Wenyu Zhao "] +authors = ["Wenyu Zhao "] edition = { workspace = true } # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html @@ -12,7 +12,6 @@ errno = { workspace = true } spin = { workspace = true } ctor = { workspace = true } num_cpus = { workspace = true } -crossbeam = { workspace = true } atomic = { workspace = true } shell-words = { workspace = true } mallockit-macros = { path = "./macros" } diff --git a/mallockit/dev/Cargo.toml b/mallockit/dev/Cargo.toml new file mode 100644 index 0000000..3f67845 --- /dev/null +++ b/mallockit/dev/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "mallockit-dev-tools" +version = "0.1.0" +edition = "2021" +publish = false + + + +[dependencies] +anyhow = { version = "1.0.86", features = ["backtrace"] } +clap = { version = "4.5.4", features = ["derive"] } diff --git a/mallockit/dev/src/main.rs b/mallockit/dev/src/main.rs new file mode 100644 index 0000000..283369d --- /dev/null +++ b/mallockit/dev/src/main.rs @@ -0,0 +1,116 @@ +use std::process::Command; + +use clap::Parser; + +/// Run a program with a custom allocator +#[derive(Parser, Debug)] +struct Options { + /// Malloc implementation to use + malloc: String, + + /// Space or comma separated list of features to activate + #[arg(short = 'F', long)] + features: Vec, + + /// Activate all available features + #[arg(long, default_value_t = false)] + all_features: bool, + + /// Do not activate the `default` feature + #[arg(long, default_value_t = false)] + no_default_features: bool, + + /// Build artifacts in release mode, with optimizations + #[arg(short, long, default_value_t = false)] + release: bool, + + /// Build artifacts with the specified profile + #[arg(short, long)] + profile: Option, + + /// Run the program under GDB + #[arg(long, default_value_t = false)] + gdb: bool, + + /// Run the program under LLDB + #[arg(long, default_value_t = false)] + lldb: bool, + + /// The program to run, with arguments + #[arg(last = true, allow_hyphen_values = true)] + command: Vec, +} + +fn build_crate(opts: &Options) -> anyhow::Result<()> { + let mut cmd = Command::new("cargo"); + cmd.arg("build").args(["-p", &opts.malloc]); + let mut features = opts.features.clone(); + if !features.contains(&"malloc".to_owned()) && !opts.all_features { + features.push("malloc".to_string()); + } + if !features.is_empty() { + cmd.arg("--features").arg(features.join(",")); + } + if opts.all_features { + cmd.arg("--all-features"); + } + if opts.no_default_features { + cmd.arg("--no-default-features"); + } + if opts.release { + cmd.arg("--release"); + } + if let Some(profile) = &opts.profile { + cmd.arg("--profile").arg(profile); + } + let status = cmd.status()?; + if !status.success() { + std::process::exit(status.code().unwrap_or(1)); + } + Ok(()) +} + +const ENV: &str = if cfg!(target_os = "macos") { + "DYLD_INSERT_LIBRARIES" +} else { + "LD_PRELOAD" +}; + +const EXT: &str = if cfg!(target_os = "macos") { + "dylib" +} else { + "so" +}; + +fn main() -> anyhow::Result<()> { + let options = Options::parse(); + // 1. Build the crate + build_crate(&options)?; + // 2. Run the program + let args = options.command.clone(); + let profile = if options.release { "release" } else { "debug" }; + let dylib = format!("target/{}/lib{}.{}", profile, options.malloc, EXT); + let mut cmd = if !options.gdb && !options.lldb { + let mut cmd = Command::new(&args[0]); + cmd.args(&args[1..]); + cmd.env(ENV, dylib); + cmd + } else if options.gdb { + let mut cmd = Command::new("rust-gdb"); + cmd.args(["-ex", &format!("set environment {ENV}={dylib}")]); + cmd.arg("--args").args(&args); + cmd + } else { + let mut cmd = Command::new("rust-lldb"); + cmd.args(["-o", &format!("env {ENV}={dylib}")]); + cmd.arg("--source-quietly"); + cmd.arg("--").args(&args); + cmd + }; + cmd.env("RUST_BACKTRACE", "1"); + let status = cmd.status()?; + if !status.success() { + std::process::exit(status.code().unwrap_or(1)); + } + Ok(()) +} diff --git a/mallockit/macros/Cargo.toml b/mallockit/macros/Cargo.toml index 30c3a79..95f959d 100644 --- a/mallockit/macros/Cargo.toml +++ b/mallockit/macros/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "mallockit-macros" version = { workspace = true } -authors = ["Wenyu Zhao "] +authors = ["Wenyu Zhao "] edition = { workspace = true } # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/mallockit/macros/src/lib.rs b/mallockit/macros/src/lib.rs index 2ba8114..38f9bfb 100644 --- a/mallockit/macros/src/lib.rs +++ b/mallockit/macros/src/lib.rs @@ -9,28 +9,43 @@ pub fn plan(_attr: TokenStream, item: TokenStream) -> TokenStream { #input mod __mallockit_plan { - pub(super) static PLAN: ::mallockit::util::Lazy = ::mallockit::util::Lazy::new(|| ::new()); + type Plan = super::#name; - ::mallockit::export_malloc_api!(PLAN, super::super::#name); - } + static PLAN: ::mallockit::util::Lazy = ::mallockit::util::Lazy::new(|| ::new()); - ::mallockit::export_rust_global_alloc_api!(PLAN, #name); + #[cfg(any(feature = "malloc", feature = "mallockit/malloc"))] + #[::mallockit::ctor] + unsafe fn ctor() { + ::mallockit::util::sys::hooks::process_start(&*PLAN); + } - impl ::mallockit::plan::Singleton for #name { - fn singleton() -> &'static Self { - unsafe { &__mallockit_plan::PLAN } + #[cfg(target_os = "macos")] + #[no_mangle] + extern "C" fn mallockit_initialize_macos_tls() -> *mut u8 { + use ::mallockit::mutator::TLS; + ::Mutator::current() as *mut ::Mutator as _ } + + impl ::mallockit::plan::Singleton for super::#name { + fn singleton() -> &'static Self { + unsafe { &PLAN } + } + } + + ::mallockit::export_malloc_api!(PLAN, super::super::#name); + ::mallockit::export_rust_global_alloc_api!(super::super::#name); } - #[cfg(any(feature = "malloc", feature = "mallockit/malloc"))] - #[global_allocator] - static __GLOBAL_ALLOCATOR: ::mallockit::space::meta::Meta = ::mallockit::space::meta::Meta; + pub use __mallockit_plan::__mallockit_rust_api::Global; - include!(concat!( - env!("CARGO_MANIFEST_DIR"), - "/../target/generated_tests.rs" - )); - ::mallockit::rust_allocator_tests!(Global); + #[cfg(test)] + mod tests { + include!(concat!( + env!("CARGO_MANIFEST_DIR"), + "/../target/generated_tests.rs" + )); + ::mallockit::rust_allocator_tests!(crate::Global); + } }; result.into() } @@ -43,22 +58,19 @@ pub fn mutator(_attr: TokenStream, item: TokenStream) -> TokenStream { #[repr(align(256))] #input - #[cfg(not(target_os = "macos"))] - mod __mallockit_mutator { - #[thread_local] - pub(super) static mut MUTATOR: mallockit::util::Lazy = mallockit::util::Lazy::new(|| ::new()); - } - - impl mallockit::mutator::TLS for #name { + impl ::mallockit::mutator::TLS for #name { fn new() -> Self { - ::new() + ::new() } #[cfg(not(target_os = "macos"))] fn current() -> &'static mut Self { - unsafe { &mut *__mallockit_mutator::MUTATOR } + #[thread_local] + static mut MUTATOR: ::mallockit::util::Lazy<#name, ::mallockit::util::Local> = ::mallockit::util::Lazy::new(|| <#name as ::mallockit::Mutator>::new()); + unsafe { &mut * MUTATOR } } } + }; result.into() } diff --git a/mallockit/src/mutator.rs b/mallockit/src/mutator.rs index d897769..b7a91db 100644 --- a/mallockit/src/mutator.rs +++ b/mallockit/src/mutator.rs @@ -33,25 +33,33 @@ pub trait Mutator: Sized + 'static + TLS { fn dealloc(&mut self, ptr: Address); - fn realloc(&mut self, ptr: Address, new_size: usize) -> Option
{ + fn realloc(&mut self, ptr: Address, new_layout: Layout) -> Option
{ let layout = Self::Plan::get_layout(ptr); - if layout.size() >= new_size { + if layout.size() >= new_layout.size() && layout.align() >= new_layout.align() { return Some(ptr); } - let new_layout = unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) }; let new_ptr = self.alloc(new_layout); if let Some(new_ptr) = new_ptr { unsafe { ptr::copy_nonoverlapping( ptr.as_ptr::(), new_ptr.as_mut_ptr::(), - usize::min(layout.size(), new_size), + usize::min(layout.size(), new_layout.size()), ); } self.dealloc(ptr); } new_ptr } + + fn realloc_zeroed(&mut self, ptr: Address, new_layout: Layout) -> Option
{ + let size = new_layout.size(); + let new_ptr = self.realloc(ptr, new_layout); + if let Some(new_ptr) = new_ptr { + unsafe { ptr::write_bytes(new_ptr.as_mut_ptr::(), 0, size) }; + } + new_ptr + } } pub(crate) struct InternalTLS { diff --git a/mallockit/src/space/freelist_space.rs b/mallockit/src/space/freelist_space.rs index adea5ed..c3f09ca 100644 --- a/mallockit/src/space/freelist_space.rs +++ b/mallockit/src/space/freelist_space.rs @@ -1,4 +1,7 @@ -use super::{page_resource::BlockPageResource, Allocator, Space, SpaceId}; +use super::{ + page_resource::{BlockPageResource, MemRegion}, + Allocator, Space, SpaceId, +}; use crate::util::bits::{BitField, BitFieldSlot}; use crate::util::mem::freelist::intrusive_freelist::AddressSpaceConfig; use crate::util::mem::freelist::intrusive_freelist::IntrusiveFreeList; @@ -10,6 +13,30 @@ use std::{ops::Range, sync::atomic::AtomicUsize}; // type ActivePageSize = Size4K; type ActivePageSize = Size2M; +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[repr(C)] +pub struct Chunk(Address); + +impl MemRegion for Chunk { + const LOG_BYTES: usize = ActivePageSize::LOG_BYTES; + + fn start(&self) -> Address { + self.0 + } + + fn from_address(addr: Address) -> Self { + Self(addr) + } + + fn set_next(&self, _next: Option) { + unreachable!() + } + + fn next(&self) -> Option { + unreachable!() + } +} + pub struct AddressSpace; impl AddressSpaceConfig for AddressSpace { @@ -20,18 +47,18 @@ impl AddressSpaceConfig for AddressSpace { pub struct FreeListSpace { id: SpaceId, - pr: BlockPageResource, + pr: BlockPageResource, pages: Mutex>>, } impl Space for FreeListSpace { const MAX_ALLOCATION_SIZE: usize = Size4K::BYTES; - type PR = BlockPageResource; + type PR = BlockPageResource; fn new(id: SpaceId) -> Self { Self { id, - pr: BlockPageResource::new(id, ActivePageSize::LOG_BYTES), + pr: BlockPageResource::new(id), pages: Mutex::new(None), } } @@ -121,27 +148,25 @@ impl Cell { } pub struct FreeListAllocator { - space: Lazy<&'static FreeListSpace, Local>, - freelist: Lazy, Local>, + space: &'static FreeListSpace, + freelist: IntrusiveFreeList, } impl FreeListAllocator { - pub const fn new(space: Lazy<&'static FreeListSpace, Local>) -> Self { + pub fn new(space: &'static FreeListSpace) -> Self { Self { space, - freelist: Lazy::new(|| { - IntrusiveFreeList::new(false, HEAP.get_space_range(SPACE_ID).start) - }), + freelist: IntrusiveFreeList::new(false, HEAP.get_space_range(SPACE_ID).start), } } #[cold] fn alloc_cell_slow(&mut self, bytes: usize) -> Option> { - let page = match self.space.get_coalesced_page() { - Some(page) => page, - _ => self.space.acquire::(1)?.start, + let range = match self.space.get_coalesced_page() { + Some(page) => page.range(), + _ => self.space.pr.acquire_block()?.data(), }; - self.freelist.add_units(page.start(), ActivePageSize::BYTES); + self.freelist.add_units(range.start, ActivePageSize::BYTES); self.alloc_cell(bytes) } diff --git a/mallockit/src/space/immortal_space.rs b/mallockit/src/space/immortal_space.rs index 6e0ec57..f1cbf63 100644 --- a/mallockit/src/space/immortal_space.rs +++ b/mallockit/src/space/immortal_space.rs @@ -30,13 +30,13 @@ impl Space for ImmortalSpace { } pub struct BumpAllocator { - space: Lazy<&'static ImmortalSpace, Local>, + space: &'static ImmortalSpace, allocation_area: AllocationArea, retry: bool, } impl BumpAllocator { - pub const fn new(space: Lazy<&'static ImmortalSpace, Local>) -> Self { + pub const fn new(space: &'static ImmortalSpace) -> Self { Self { space, allocation_area: AllocationArea::EMPTY, diff --git a/mallockit/src/space/large_object_space.rs b/mallockit/src/space/large_object_space.rs index a6c983c..d205ba9 100644 --- a/mallockit/src/space/large_object_space.rs +++ b/mallockit/src/space/large_object_space.rs @@ -4,7 +4,7 @@ use super::{ page_resource::{FreelistPageResource, PageResource}, Allocator, Space, SpaceId, }; -use crate::util::{Address, Lazy, Local, Page, PageSize, Size4K}; +use crate::util::{Address, Page, PageSize, Size4K}; pub struct LargeObjectSpace { id: SpaceId, @@ -62,7 +62,7 @@ pub struct LargeObjectAllocator< > where [(); bins::(MAX_CACHEABLE_SIZE)]: Sized, { - space: Lazy<&'static LargeObjectSpace, Local>, + space: &'static LargeObjectSpace, bins: [Address; bins::(MAX_CACHEABLE_SIZE)], max_live: usize, live: usize, @@ -77,7 +77,7 @@ where { const CACHE_ENABLED: bool = bins::(MAX_CACHEABLE_SIZE) > 0; - pub const fn new(los: Lazy<&'static LargeObjectSpace, Local>) -> Self { + pub fn new(los: &'static LargeObjectSpace) -> Self { Self { space: los, bins: [Address::ZERO; bins::(MAX_CACHEABLE_SIZE)], @@ -89,7 +89,7 @@ where } fn space(&self) -> &'static LargeObjectSpace { - *self.space + self.space } fn alloc_slow(&mut self, layout: Layout) -> Option
{ diff --git a/mallockit/src/space/meta/meta_allocator.rs b/mallockit/src/space/meta/meta_allocator.rs index a39bd06..6e38e0b 100644 --- a/mallockit/src/space/meta/meta_allocator.rs +++ b/mallockit/src/space/meta/meta_allocator.rs @@ -131,6 +131,7 @@ impl MetaLocal { } } +#[derive(Clone, Copy, Debug, Default)] pub struct Meta; unsafe impl Allocator for Meta { diff --git a/mallockit/src/space/page_resource/block_page_resource.rs b/mallockit/src/space/page_resource/block_page_resource.rs index 031b607..86c9882 100644 --- a/mallockit/src/space/page_resource/block_page_resource.rs +++ b/mallockit/src/space/page_resource/block_page_resource.rs @@ -1,50 +1,105 @@ use super::super::SpaceId; use super::PageResource; +use crate::space::meta::{Meta, Vec}; use crate::util::mem::heap::HEAP; use crate::util::*; use atomic::Atomic; -use crossbeam::queue::SegQueue; +use spin::Mutex; use std::iter::Step; use std::{ ops::Range, sync::atomic::{AtomicUsize, Ordering}, }; -pub struct BlockPageResource { +pub trait MemRegion: 'static + Sized + Clone + Copy { + type Meta = (); + + const LOG_BYTES: usize; + const BYTES: usize = 1 << Self::LOG_BYTES; + + const META_BYTES: usize = std::mem::size_of::().next_power_of_two(); + + fn start(&self) -> Address; + fn from_address(addr: Address) -> Self; + fn set_next(&self, next: Option); + fn next(&self) -> Option; + + fn data_start(&self) -> Address { + self.start() + Self::META_BYTES + } + + fn end(&self) -> Address { + self.start() + Self::BYTES + } + + fn data(&self) -> Range
{ + self.data_start()..self.end() + } + + fn range(&self) -> Range
{ + self.start()..self.end() + } + + fn meta(&self) -> &Self::Meta { + unsafe { &*(self.start().as_ptr::()) } + } + + /// # Safety + /// The caller must ensure that the block is within its corresponding space, and the block is properly aligned. + #[allow(clippy::mut_from_ref)] + unsafe fn meta_mut(&self) -> &mut Self::Meta { + &mut *(self.start().as_mut_ptr::()) + } + + fn is_aligned(addr: Address) -> bool { + addr.is_aligned_to(Self::BYTES) + } + + fn align(addr: Address) -> Address { + addr.align_down(Self::BYTES) + } + + fn containing(addr: Address) -> Self { + let start = Self::align(addr); + Self::from_address(start) + } +} + +pub struct BlockPageResource { pub id: SpaceId, - log_bytes: usize, cursor: Atomic
, highwater: Address, - recycled_blocks: SegQueue
, + recycled_blocks_intrusive: Atomic>, + recycled_blocks_non_intrusive: Mutex>, reserved_bytes: AtomicUsize, } -impl BlockPageResource { - pub fn new(id: SpaceId, log_bytes: usize) -> Self { +impl BlockPageResource { + pub fn new(id: SpaceId) -> Self { debug_assert!(id.0 < 0b0000_1111); - debug_assert!(log_bytes >= Size4K::LOG_BYTES); + debug_assert!(B::LOG_BYTES >= Size4K::LOG_BYTES); let range = HEAP.get_space_range(id); Self { id, - log_bytes, cursor: Atomic::new(range.start), highwater: range.end, - recycled_blocks: SegQueue::new(), + recycled_blocks_intrusive: Atomic::new(None), + recycled_blocks_non_intrusive: Mutex::new(Vec::new_in(Meta)), reserved_bytes: AtomicUsize::new(0), } } #[cold] fn acquire_block_slow(&self, pages: usize) -> Option>> { - debug_assert!(self.log_bytes >= S::LOG_BYTES); - debug_assert_eq!(pages, 1 << (self.log_bytes - S::LOG_BYTES)); + debug_assert!(B::LOG_BYTES >= S::LOG_BYTES); + debug_assert_eq!(pages, 1 << (B::LOG_BYTES - S::LOG_BYTES)); let block = self .cursor .fetch_update(Ordering::SeqCst, Ordering::SeqCst, |a| { if a >= self.highwater { None } else { - Some(a + (1usize << self.log_bytes)) + Some(a + (1usize << B::LOG_BYTES)) } }); match block { @@ -56,37 +111,71 @@ impl BlockPageResource { Err(_) => None, } } + + pub fn acquire_block(&self) -> Option { + if INTRUSIVE { + loop { + let head = self.recycled_blocks_intrusive.load(Ordering::Relaxed); + if let Some(block) = head { + if self + .recycled_blocks_intrusive + .compare_exchange(head, block.next(), Ordering::Relaxed, Ordering::Relaxed) + .is_ok() + { + self.reserved_bytes.fetch_add(B::BYTES, Ordering::Relaxed); + return Some(block); + } + } else { + break; + } + } + } else if let Some(addr) = self.recycled_blocks_non_intrusive.lock().pop() { + let block = B::from_address(addr); + self.reserved_bytes.fetch_add(B::BYTES, Ordering::Relaxed); + return Some(block); + } + let range = self.acquire_block_slow::(B::BYTES >> Size4K::LOG_BYTES)?; + let block = B::from_address(range.start.start()); + if INTRUSIVE { + block.set_next(None); + } + self.reserved_bytes.fetch_add(B::BYTES, Ordering::Relaxed); + Some(block) + } + + pub fn release_block(&self, block: B) { + if INTRUSIVE { + loop { + let head = self.recycled_blocks_intrusive.load(Ordering::Relaxed); + block.set_next(head); + if self + .recycled_blocks_intrusive + .compare_exchange(head, Some(block), Ordering::Relaxed, Ordering::Relaxed) + .is_ok() + { + break; + } + } + } else { + self.recycled_blocks_non_intrusive + .lock() + .push(block.start()); + } + self.reserved_bytes + .fetch_sub(1 << B::LOG_BYTES, Ordering::Relaxed); + } } -impl PageResource for BlockPageResource { +impl PageResource for BlockPageResource { fn reserved_bytes(&self) -> usize { self.reserved_bytes.load(Ordering::Relaxed) } - fn acquire_pages(&self, pages: usize) -> Option>> { - debug_assert!(self.log_bytes >= S::LOG_BYTES); - debug_assert_eq!(pages, 1 << (self.log_bytes - S::LOG_BYTES)); - if let Some(addr) = self.recycled_blocks.pop() { - let start = Page::::new(addr); - let end = Step::forward(start, pages); - self.reserved_bytes - .fetch_add(1 << self.log_bytes, Ordering::Relaxed); - return Some(start..end); - } - if let Some(result) = self.acquire_block_slow(pages) { - self.reserved_bytes - .fetch_add(1 << self.log_bytes, Ordering::Relaxed); - return Some(result); - } - None + fn acquire_pages(&self, _pages: usize) -> Option>> { + unreachable!("Use `alloc_block` instead") } - fn release_pages(&self, start: Page) { - // NOTE: `SegQueue::push` space expansion indirectly calls `malloc`. - // It's safe for now since we don't call `release_pages` in `malloc`. - // Review this later when we start to work on lazy sweeping. - self.recycled_blocks.push(start.start()); - self.reserved_bytes - .fetch_sub(1 << self.log_bytes, Ordering::Relaxed); + fn release_pages(&self, _start: Page) { + unreachable!("Use `release_block` instead") } } diff --git a/mallockit/src/space/page_resource/freelist_page_resource.rs b/mallockit/src/space/page_resource/freelist_page_resource.rs index 7f05854..60f8253 100644 --- a/mallockit/src/space/page_resource/freelist_page_resource.rs +++ b/mallockit/src/space/page_resource/freelist_page_resource.rs @@ -38,7 +38,9 @@ impl FreelistPageResource { id, freelist: Mutex::new(freelist), reserved_bytes: AtomicUsize::new(0), - meta: RwLock::new(unsafe { std::mem::transmute(meta) }), + meta: RwLock::new(unsafe { + std::mem::transmute::, Vec>(meta) + }), base, } } diff --git a/mallockit/src/space/page_table.rs b/mallockit/src/space/page_table.rs index 442bc58..0c28701 100644 --- a/mallockit/src/space/page_table.rs +++ b/mallockit/src/space/page_table.rs @@ -148,7 +148,11 @@ impl PageTable { Some(_) => unreachable!(), _ => { let table = Box::leak(meta_box!(PageTable:: { - table: unsafe { mem::transmute([0usize; 512]) }, + table: unsafe { + mem::transmute::<[usize; 512], [PageTableEntry; 512]>( + [0usize; 512], + ) + }, phantom: PhantomData, })); self.table[index].set_next_page_table(table); @@ -168,7 +172,9 @@ impl PageTable { impl PageTable { pub(crate) const fn new() -> Self { Self { - table: unsafe { mem::transmute([0usize; 512]) }, + table: unsafe { + mem::transmute::<[usize; 512], [PageTableEntry; 512]>([0usize; 512]) + }, phantom: PhantomData, } } diff --git a/mallockit/src/stat.rs b/mallockit/src/stat.rs index 483f7f0..72b60cf 100644 --- a/mallockit/src/stat.rs +++ b/mallockit/src/stat.rs @@ -6,26 +6,29 @@ use std::{ }, }; -use crossbeam::queue::SegQueue; +use spin::Mutex; -use crate::util::Lazy; +use crate::{ + space::meta::{Meta, Vec}, + util::Lazy, +}; -static COUNTERS: SegQueue> = SegQueue::new(); +static COUNTERS: Mutex>> = Mutex::new(Vec::new_in(Meta)); -pub type CounterRef = Lazy>; +pub type CounterRef = Lazy>; -pub const fn define_counter() -> Lazy> { +pub const fn define_counter() -> Lazy> { Lazy::new(|| { - let c = Arc::new(Counter::new(NAME)); - COUNTERS.push(c.clone()); + let c = Arc::new_in(Counter::new(NAME), Meta); + COUNTERS.lock().push(c.clone()); c }) } -static TOTAL_ALLOCATIONS: Lazy> = define_counter::<"total-allocations">(); -static LARGE_ALLOCATIONS: Lazy> = define_counter::<"large-allocations">(); -static TOTAL_DEALLOCATIONS: Lazy> = define_counter::<"total-deallocations">(); -static LARGE_DEALLOCATIONS: Lazy> = define_counter::<"large-deallocations">(); +static TOTAL_ALLOCATIONS: Lazy> = define_counter::<"total-allocations">(); +static LARGE_ALLOCATIONS: Lazy> = define_counter::<"large-allocations">(); +static TOTAL_DEALLOCATIONS: Lazy> = define_counter::<"total-deallocations">(); +static LARGE_DEALLOCATIONS: Lazy> = define_counter::<"large-deallocations">(); static ALIGNMENTS: [Counter; 11] = [ Counter::new(""), // 1 @@ -126,19 +129,19 @@ pub(crate) fn report() {} #[cfg(feature = "stat")] pub(crate) fn report() { - println!("alignment:"); + eprintln!("alignment:"); for i in 0..ALIGNMENTS.len() { - println!(" - {} = {}", i, ALIGNMENTS[i].get()); + eprintln!(" - {} = {}", i, ALIGNMENTS[i].get()); } - println!(" - others = {}", OTHER_ALIGNMENT.get()); - println!(""); - println!("size:"); + eprintln!(" - others = {}", OTHER_ALIGNMENT.get()); + eprintln!(""); + eprintln!("size:"); for i in 0..SIZES.len() { - println!(" - {} = {}", i, SIZES[i].get()); + eprintln!(" - {} = {}", i, SIZES[i].get()); } - println!(" - others = {}", OTHER_SIZE.get()); - println!(""); - while let Some(c) = COUNTERS.pop() { - println!("{}: {}", c.0, c.get()); + eprintln!(" - others = {}", OTHER_SIZE.get()); + eprintln!(""); + while let Some(c) = COUNTERS.lock().pop() { + eprintln!("{}: {}", c.0, c.get()); } } diff --git a/mallockit/src/util/constants.rs b/mallockit/src/util/constants.rs new file mode 100644 index 0000000..1de30e6 --- /dev/null +++ b/mallockit/src/util/constants.rs @@ -0,0 +1,12 @@ +#[cfg(not(any( + target_os = "macos", + all(target_os = "windows", target_pointer_width = "64") +)))] +pub const LOG_MIN_ALIGNMENT: usize = 4; // should be 8? +#[cfg(any( + target_os = "macos", + all(target_os = "windows", target_pointer_width = "64") +))] +pub const LOG_MIN_ALIGNMENT: usize = 4; + +pub const MIN_ALIGNMENT: usize = 1 << LOG_MIN_ALIGNMENT; diff --git a/mallockit/src/util/malloc/malloc_api.rs b/mallockit/src/util/malloc/malloc_api.rs index a2a2301..054936e 100644 --- a/mallockit/src/util/malloc/malloc_api.rs +++ b/mallockit/src/util/malloc/malloc_api.rs @@ -1,3 +1,4 @@ +use crate::util::constants::MIN_ALIGNMENT; use crate::util::mem::heap::HEAP; use crate::util::Address; use crate::util::Lazy; @@ -18,16 +19,7 @@ impl GetMutatorType for MallocAPI

{ #[allow(unused)] impl MallocAPI

{ - #[cfg(not(any( - target_os = "macos", - all(target_os = "windows", target_pointer_width = "64") - )))] - pub const MIN_ALIGNMENT: usize = 16; // should be 8? - #[cfg(any( - target_os = "macos", - all(target_os = "windows", target_pointer_width = "64") - ))] - pub const MIN_ALIGNMENT: usize = 16; + pub const MIN_ALIGNMENT: usize = MIN_ALIGNMENT; pub const PAGE_SIZE: usize = 4096; pub const fn new(plan: &'static Lazy

) -> Self { @@ -154,7 +146,8 @@ impl MallocAPI

{ return new_ptr.into(); } - match self.mutator().realloc(ptr.into(), new_size) { + let layout = Layout::from_size_align_unchecked(new_size, Self::MIN_ALIGNMENT); + match self.mutator().realloc(ptr.into(), layout) { Some(ptr) => ptr.into(), None => { if free_if_fail { @@ -231,128 +224,109 @@ impl MallocAPI

{ #[doc(hidden)] macro_rules! export_malloc_api { ($plan: expr, $plan_ty: ty) => { - pub mod __mallockit { + #[cfg(any(feature = "malloc", feature = "mallockit/malloc"))] + pub mod __mallockit_malloc_api { use super::*; use $crate::Plan; - type ConcretePlan = $plan_ty; - type Malloc = $crate::util::malloc::MallocAPI; - static MALLOC_IMPL: Malloc = - $crate::util::malloc::MallocAPI::::new(&$plan); - - #[cfg(any(feature = "malloc", feature = "mallockit/malloc"))] - #[$crate::ctor] - unsafe fn ctor() { - $crate::util::sys::hooks::process_start(&*$plan); - $crate::libc::atexit($crate::util::sys::hooks::process_exit); + type Malloc = $crate::util::malloc::MallocAPI<$plan_ty>; + static MALLOC_IMPL: Malloc = $crate::util::malloc::MallocAPI::<$plan_ty>::new(&$plan); + + #[$crate::interpose] + pub unsafe extern "C" fn malloc(size: usize) -> *mut u8 { + MALLOC_IMPL.alloc_or_enomem(size, Malloc::MIN_ALIGNMENT) } #[cfg(target_os = "macos")] - #[no_mangle] - pub extern "C" fn mallockit_initialize_macos_tls() -> *mut u8 { - MALLOC_IMPL.mutator() as *mut _ as _ + #[$crate::interpose] + pub unsafe extern "C" fn malloc_size(ptr: *mut u8) -> usize { + MALLOC_IMPL.malloc_size(ptr.into()) } - #[cfg(any(feature = "malloc", feature = "mallockit/malloc"))] - pub mod __malloc_api { - use super::{Malloc, MALLOC_IMPL}; - - #[$crate::interpose] - pub unsafe extern "C" fn malloc(size: usize) -> *mut u8 { - MALLOC_IMPL.alloc_or_enomem(size, Malloc::MIN_ALIGNMENT) - } - - #[cfg(target_os = "macos")] - #[$crate::interpose] - pub unsafe extern "C" fn malloc_size(ptr: *mut u8) -> usize { - MALLOC_IMPL.malloc_size(ptr.into()) - } - - // #[cfg(target_os = "macos")] - // #[$crate::interpose] - // pub unsafe fn malloc_good_size(ptr: *mut u8) -> usize { - // MALLOC_IMPL.malloc_size(ptr.into()) - // } + // #[cfg(target_os = "macos")] + // #[$crate::interpose] + // pub unsafe fn malloc_good_size(ptr: *mut u8) -> usize { + // MALLOC_IMPL.malloc_size(ptr.into()) + // } - #[cfg(target_os = "linux")] - #[$crate::interpose] - pub unsafe extern "C" fn malloc_usable_size(ptr: *mut u8) -> usize { - MALLOC_IMPL.malloc_size(ptr.into()) - } + #[cfg(target_os = "linux")] + #[$crate::interpose] + pub unsafe extern "C" fn malloc_usable_size(ptr: *mut u8) -> usize { + MALLOC_IMPL.malloc_size(ptr.into()) + } - #[$crate::interpose] - pub unsafe extern "C" fn free(ptr: *mut u8) { - MALLOC_IMPL.free(ptr) - } + #[$crate::interpose] + pub unsafe extern "C" fn free(ptr: *mut u8) { + MALLOC_IMPL.free(ptr) + } - #[cfg(target_os = "linux")] - #[$crate::interpose] - pub unsafe extern "C" fn cfree(ptr: *mut u8) { - MALLOC_IMPL.free(ptr) - } + #[cfg(target_os = "linux")] + #[$crate::interpose] + pub unsafe extern "C" fn cfree(ptr: *mut u8) { + MALLOC_IMPL.free(ptr) + } - #[$crate::interpose] - pub unsafe extern "C" fn calloc(count: usize, size: usize) -> *mut u8 { - let size = count * size; - let ptr = MALLOC_IMPL.alloc_or_enomem(size, Malloc::MIN_ALIGNMENT); - std::ptr::write_bytes(ptr, 0, size); - ptr - } + #[$crate::interpose] + pub unsafe extern "C" fn calloc(count: usize, size: usize) -> *mut u8 { + let size = count * size; + let ptr = MALLOC_IMPL.alloc_or_enomem(size, Malloc::MIN_ALIGNMENT); + std::ptr::write_bytes(ptr, 0, size); + ptr + } - #[cfg(any(target_os = "linux", target_os = "macos"))] - #[$crate::interpose] - pub unsafe extern "C" fn valloc(size: usize) -> *mut u8 { - MALLOC_IMPL.alloc_or_enomem(size, Malloc::PAGE_SIZE) - } + #[cfg(any(target_os = "linux", target_os = "macos"))] + #[$crate::interpose] + pub unsafe extern "C" fn valloc(size: usize) -> *mut u8 { + MALLOC_IMPL.alloc_or_enomem(size, Malloc::PAGE_SIZE) + } - #[cfg(target_os = "linux")] - #[$crate::interpose] - pub unsafe extern "C" fn pvalloc(size: usize) -> *mut u8 { - MALLOC_IMPL.alloc_or_enomem(size, Malloc::PAGE_SIZE) - } + #[cfg(target_os = "linux")] + #[$crate::interpose] + pub unsafe extern "C" fn pvalloc(size: usize) -> *mut u8 { + MALLOC_IMPL.alloc_or_enomem(size, Malloc::PAGE_SIZE) + } - #[$crate::interpose] - pub unsafe extern "C" fn realloc(ptr: *mut u8, size: usize) -> *mut u8 { - MALLOC_IMPL.reallocate_or_enomem( - ptr, - size, - cfg!(any(target_os = "linux", target_os = "windows")), - false, - ) - } + #[$crate::interpose] + pub unsafe extern "C" fn realloc(ptr: *mut u8, size: usize) -> *mut u8 { + MALLOC_IMPL.reallocate_or_enomem( + ptr, + size, + cfg!(any(target_os = "linux", target_os = "windows")), + false, + ) + } - #[cfg(target_os = "macos")] - #[$crate::interpose] - pub unsafe extern "C" fn reallocf(ptr: *mut u8, size: usize) -> *mut u8 { - MALLOC_IMPL.reallocate_or_enomem(ptr, size, false, true) - } + #[cfg(target_os = "macos")] + #[$crate::interpose] + pub unsafe extern "C" fn reallocf(ptr: *mut u8, size: usize) -> *mut u8 { + MALLOC_IMPL.reallocate_or_enomem(ptr, size, false, true) + } - #[cfg(any(target_os = "linux", target_os = "macos"))] - #[$crate::interpose] - pub unsafe extern "C" fn posix_memalign( - ptr: *mut *mut u8, - alignment: usize, - size: usize, - ) -> i32 { - MALLOC_IMPL.posix_memalign(ptr, alignment, size) - } + #[cfg(any(target_os = "linux", target_os = "macos"))] + #[$crate::interpose] + pub unsafe extern "C" fn posix_memalign( + ptr: *mut *mut u8, + alignment: usize, + size: usize, + ) -> i32 { + MALLOC_IMPL.posix_memalign(ptr, alignment, size) + } - #[cfg(target_os = "linux")] - #[$crate::interpose] - pub unsafe extern "C" fn memalign(alignment: usize, size: usize) -> *mut u8 { - MALLOC_IMPL.memalign(alignment, size) - } + #[cfg(target_os = "linux")] + #[$crate::interpose] + pub unsafe extern "C" fn memalign(alignment: usize, size: usize) -> *mut u8 { + MALLOC_IMPL.memalign(alignment, size) + } - #[cfg(target_os = "linux")] - #[$crate::interpose] - pub unsafe extern "C" fn aligned_alloc(alignment: usize, size: usize) -> *mut u8 { - MALLOC_IMPL.aligned_alloc(size, alignment, true, false) - } + #[cfg(target_os = "linux")] + #[$crate::interpose] + pub unsafe extern "C" fn aligned_alloc(alignment: usize, size: usize) -> *mut u8 { + MALLOC_IMPL.aligned_alloc(size, alignment, true, false) + } - #[cfg(target_os = "windows")] - #[$crate::interpose] - pub unsafe extern "C" fn _aligned_malloc(size: usize, alignment: usize) -> *mut u8 { - MALLOC_IMPL.aligned_alloc(size, alignment, false, true) - } + #[cfg(target_os = "windows")] + #[$crate::interpose] + pub unsafe extern "C" fn _aligned_malloc(size: usize, alignment: usize) -> *mut u8 { + MALLOC_IMPL.aligned_alloc(size, alignment, false, true) } } }; @@ -384,53 +358,3 @@ macro_rules! export_malloc_api_macos { } }; } - -#[macro_export] -#[doc(hidden)] -macro_rules! export_rust_global_alloc_api { - ($plan: expr, $plan_ty: ty) => { - pub struct Global; - - unsafe impl ::std::alloc::Allocator for Global { - fn allocate( - &self, - mut layout: Layout, - ) -> Result<::std::ptr::NonNull<[u8]>, ::std::alloc::AllocError> { - if layout.align() < 16 { - layout = layout.align_to(16).unwrap(); - } - layout = unsafe { layout.pad_to_align_unchecked() }; - let start = <$plan_ty as $crate::Plan>::Mutator::current() - .alloc(layout) - .unwrap_or($crate::util::Address::ZERO); - let slice = unsafe { - ::std::slice::from_raw_parts_mut(start.as_mut() as *mut u8, layout.size()) - }; - Ok(::std::ptr::NonNull::from(slice)) - } - unsafe fn deallocate( - &self, - ptr: ::std::ptr::NonNull, - layout: ::std::alloc::Layout, - ) { - <$plan_ty as $crate::Plan>::Mutator::current().dealloc(ptr.as_ptr().into()) - } - } - - unsafe impl ::std::alloc::GlobalAlloc for Global { - unsafe fn alloc(&self, mut layout: ::std::alloc::Layout) -> *mut u8 { - if layout.align() < 16 { - layout = layout.align_to(16).unwrap(); - } - layout = layout.pad_to_align_unchecked(); - <$plan_ty as $crate::Plan>::Mutator::current() - .alloc(layout) - .unwrap_or($crate::util::Address::ZERO) - .into() - } - unsafe fn dealloc(&self, ptr: *mut u8, _layout: ::std::alloc::Layout) { - <$plan_ty as $crate::Plan>::Mutator::current().dealloc(ptr.into()) - } - } - }; -} diff --git a/mallockit/src/util/malloc/mod.rs b/mallockit/src/util/malloc/mod.rs index 8648133..5cef82e 100644 --- a/mallockit/src/util/malloc/mod.rs +++ b/mallockit/src/util/malloc/mod.rs @@ -2,5 +2,7 @@ pub mod macos_malloc_zone; #[macro_use] mod malloc_api; +#[macro_use] +mod rust_alloc; pub use malloc_api::*; diff --git a/mallockit/src/util/malloc/rust_alloc.rs b/mallockit/src/util/malloc/rust_alloc.rs new file mode 100644 index 0000000..994e206 --- /dev/null +++ b/mallockit/src/util/malloc/rust_alloc.rs @@ -0,0 +1,176 @@ +#[macro_export] +#[doc(hidden)] +macro_rules! export_rust_global_alloc_api { + ($plan_ty: ty) => { + pub mod __mallockit_rust_api { + use $crate::util::{Layout, LayoutUtils}; + use $crate::{Mutator, Plan}; + + pub struct Global; + + impl Global { + fn __fix_layout(mut layout: ::std::alloc::Layout) -> Layout { + if layout.align() < $crate::util::constants::MIN_ALIGNMENT { + layout = layout + .align_to($crate::util::constants::MIN_ALIGNMENT) + .unwrap(); + } + layout = unsafe { layout.pad_to_align_unchecked() }; + layout + } + } + + unsafe impl ::std::alloc::Allocator for Global { + fn allocate( + &self, + mut layout: ::std::alloc::Layout, + ) -> ::std::result::Result<::std::ptr::NonNull<[u8]>, ::std::alloc::AllocError> + { + layout = Self::__fix_layout(layout); + let start = <$plan_ty as $crate::Plan>::Mutator::current() + .alloc(layout) + .unwrap_or($crate::util::Address::ZERO); + let slice = unsafe { + ::std::slice::from_raw_parts_mut(start.as_mut() as *mut u8, layout.size()) + }; + ::std::result::Result::Ok(::std::ptr::NonNull::from(slice)) + } + + fn allocate_zeroed( + &self, + mut layout: ::std::alloc::Layout, + ) -> ::std::result::Result<::std::ptr::NonNull<[u8]>, ::std::alloc::AllocError> + { + layout = Self::__fix_layout(layout); + let start = <$plan_ty as $crate::Plan>::Mutator::current() + .alloc_zeroed(layout) + .unwrap_or($crate::util::Address::ZERO); + let slice = unsafe { + ::std::slice::from_raw_parts_mut(start.as_mut() as *mut u8, layout.size()) + }; + ::std::result::Result::Ok(::std::ptr::NonNull::from(slice)) + } + + unsafe fn deallocate( + &self, + ptr: ::std::ptr::NonNull, + layout: ::std::alloc::Layout, + ) { + <$plan_ty as $crate::Plan>::Mutator::current().dealloc(ptr.as_ptr().into()) + } + + unsafe fn grow( + &self, + ptr: ::std::ptr::NonNull, + old_layout: ::std::alloc::Layout, + mut new_layout: ::std::alloc::Layout, + ) -> ::std::result::Result<::std::ptr::NonNull<[u8]>, ::std::alloc::AllocError> + { + debug_assert!( + new_layout.size() >= old_layout.size(), + "`new_layout.size()` must be greater than or equal to `old_layout.size()`" + ); + + new_layout = Self::__fix_layout(new_layout); + let start = <$plan_ty as $crate::Plan>::Mutator::current() + .realloc(ptr.as_ptr().into(), new_layout) + .unwrap_or($crate::util::Address::ZERO); + let slice = unsafe { + ::std::slice::from_raw_parts_mut( + start.as_mut() as *mut u8, + new_layout.size(), + ) + }; + ::std::result::Result::Ok(::std::ptr::NonNull::from(slice)) + } + + unsafe fn grow_zeroed( + &self, + ptr: ::std::ptr::NonNull, + old_layout: ::std::alloc::Layout, + mut new_layout: ::std::alloc::Layout, + ) -> ::std::result::Result<::std::ptr::NonNull<[u8]>, ::std::alloc::AllocError> + { + debug_assert!( + new_layout.size() >= old_layout.size(), + "`new_layout.size()` must be greater than or equal to `old_layout.size()`" + ); + + new_layout = Self::__fix_layout(new_layout); + let start = <$plan_ty as $crate::Plan>::Mutator::current() + .realloc_zeroed(ptr.as_ptr().into(), new_layout) + .unwrap_or($crate::util::Address::ZERO); + let slice = unsafe { + ::std::slice::from_raw_parts_mut( + start.as_mut() as *mut u8, + new_layout.size(), + ) + }; + ::std::result::Result::Ok(::std::ptr::NonNull::from(slice)) + } + + unsafe fn shrink( + &self, + ptr: ::std::ptr::NonNull, + old_layout: ::std::alloc::Layout, + mut new_layout: ::std::alloc::Layout, + ) -> ::std::result::Result<::std::ptr::NonNull<[u8]>, ::std::alloc::AllocError> + { + debug_assert!( + new_layout.size() <= old_layout.size(), + "`new_layout.size()` must be smaller than or equal to `old_layout.size()`" + ); + + new_layout = Self::__fix_layout(new_layout); + let start = <$plan_ty as $crate::Plan>::Mutator::current() + .realloc(ptr.as_ptr().into(), new_layout) + .unwrap_or($crate::util::Address::ZERO); + let slice = unsafe { + ::std::slice::from_raw_parts_mut( + start.as_mut() as *mut u8, + new_layout.size(), + ) + }; + ::std::result::Result::Ok(::std::ptr::NonNull::from(slice)) + } + } + + unsafe impl ::std::alloc::GlobalAlloc for Global { + unsafe fn alloc(&self, mut layout: ::std::alloc::Layout) -> *mut u8 { + layout = Self::__fix_layout(layout); + <$plan_ty as $crate::Plan>::Mutator::current() + .alloc(layout) + .unwrap_or($crate::util::Address::ZERO) + .into() + } + + unsafe fn alloc_zeroed(&self, mut layout: ::std::alloc::Layout) -> *mut u8 { + layout = Self::__fix_layout(layout); + <$plan_ty as $crate::Plan>::Mutator::current() + .alloc_zeroed(layout) + .unwrap_or($crate::util::Address::ZERO) + .into() + } + + unsafe fn dealloc(&self, ptr: *mut u8, _layout: ::std::alloc::Layout) { + <$plan_ty as $crate::Plan>::Mutator::current().dealloc(ptr.into()) + } + + unsafe fn realloc( + &self, + ptr: *mut u8, + layout: ::std::alloc::Layout, + new_size: usize, + ) -> *mut u8 { + let mut new_layout = + unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) }; + new_layout = Self::__fix_layout(new_layout); + <$plan_ty as $crate::Plan>::Mutator::current() + .realloc(ptr.into(), new_layout) + .unwrap_or($crate::util::Address::ZERO) + .into() + } + } + } + }; +} diff --git a/mallockit/src/util/mem/size_class.rs b/mallockit/src/util/mem/size_class.rs index 1b5f9f3..1862ddb 100644 --- a/mallockit/src/util/mem/size_class.rs +++ b/mallockit/src/util/mem/size_class.rs @@ -1,10 +1,14 @@ -use std::{alloc::Layout, usize}; +use std::alloc::Layout; + +use crate::util::constants::LOG_MIN_ALIGNMENT; use super::layout_utils::LayoutUtils; +const LOG_MIN_ALIGNMENT_U8: u8 = LOG_MIN_ALIGNMENT as u8; + #[repr(transparent)] #[derive(Debug, Clone, Copy)] -pub struct SizeClass(pub u8); +pub struct SizeClass(pub u8); impl SizeClass { pub const fn as_usize(self) -> usize { diff --git a/mallockit/src/util/mod.rs b/mallockit/src/util/mod.rs index 053b80f..42e8537 100644 --- a/mallockit/src/util/mod.rs +++ b/mallockit/src/util/mod.rs @@ -18,6 +18,7 @@ macro_rules! name_list { } pub mod bits; +pub mod constants; mod lazy; #[macro_use] pub mod malloc; diff --git a/mallockit/src/util/sys/hooks.rs b/mallockit/src/util/sys/hooks.rs index 604c650..30323c7 100644 --- a/mallockit/src/util/sys/hooks.rs +++ b/mallockit/src/util/sys/hooks.rs @@ -3,7 +3,7 @@ use std::panic::PanicInfo; use crate::Plan; fn panic_handler(panic_info: &PanicInfo<'_>) { - println!("{}", panic_info); + crate::println!("{}", panic_info); std::process::abort(); } @@ -13,11 +13,14 @@ pub fn set_panic_handler() { pub extern "C" fn process_start(plan: &'static impl Plan) { set_panic_handler(); + unsafe { + libc::atexit(process_exit); + } #[cfg(target_os = "macos")] crate::util::malloc::macos_malloc_zone::init(); plan.init(); } -pub extern "C" fn process_exit() { +extern "C" fn process_exit() { crate::stat::report(); } diff --git a/rust-toolchain b/rust-toolchain index b2833d7..f4ea31c 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1 +1 @@ -nightly-2024-03-01 \ No newline at end of file +nightly-2024-05-01 \ No newline at end of file diff --git a/sanity/Cargo.toml b/sanity/Cargo.toml index d481025..91a192a 100644 --- a/sanity/Cargo.toml +++ b/sanity/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "sanity" version = { workspace = true } -authors = ["Wenyu Zhao "] +authors = ["Wenyu Zhao "] edition = { workspace = true } # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/sanity/src/lib.rs b/sanity/src/lib.rs index ae08beb..26a542b 100644 --- a/sanity/src/lib.rs +++ b/sanity/src/lib.rs @@ -41,7 +41,7 @@ impl Mutator for SanityMutator { fn new() -> Self { Self { - los: LargeObjectAllocator::new(Lazy::new(|| &Self::plan().large_object_space)), + los: LargeObjectAllocator::new(&Self::plan().large_object_space), } }