Skip to content

Commit

Permalink
Use volatile to prevent the compiler from optimizing alias pointers
Browse files Browse the repository at this point in the history
  • Loading branch information
jjyr committed Oct 11, 2024
1 parent 1a3e2e2 commit d407874
Show file tree
Hide file tree
Showing 3 changed files with 55 additions and 28 deletions.
18 changes: 12 additions & 6 deletions src/buddy_alloc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,15 +80,19 @@ struct Node {
impl Node {
fn init(list: *mut Node) {
unsafe {
(*list).next = list;
(*list).prev = list;
list.write(Node {
next: list,
prev: list,
});
}
}

fn remove(list: *mut Node) {
unsafe {
(*(*list).prev).next = (*list).next;
(*(*list).next).prev = (*list).prev;
// To prevent the compiler from optimizing alias potiners
// details https://github.com/jjyr/buddy-alloc/issues/16
core::ptr::write_volatile(&mut (*(*list).prev).next, (*list).next);
core::ptr::write_volatile(&mut (*(*list).next).prev, (*list).prev);
}
}

Expand All @@ -108,8 +112,10 @@ impl Node {
};
// pointer aligned to 16 bytes(MIN_LEAF_SIZE_ALIGN), so it's safe to use write
p.write(n_list);
(*(*list).next).prev = p;
(*list).next = p;
// To prevent the compiler from optimizing alias potiners
// details https://github.com/jjyr/buddy-alloc/issues/16
core::ptr::write_volatile(&mut (*(*list).next).prev, p);
core::ptr::write_volatile(&mut (*list).next, p);
}
}

Expand Down
26 changes: 19 additions & 7 deletions src/fast_alloc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,19 @@ struct Node {
impl Node {
fn init(list: *mut Node) {
unsafe {
(*list).next = list;
(*list).prev = list;
list.write(Node {
next: list,
prev: list,
});
}
}

fn remove(list: *mut Node) {
unsafe {
(*(*list).prev).next = (*list).next;
(*(*list).next).prev = (*list).prev;
// To prevent the compiler from optimizing alias potiners
// details https://github.com/jjyr/buddy-alloc/issues/16
core::ptr::write_volatile(&mut (*(*list).prev).next, (*list).next);
core::ptr::write_volatile(&mut (*(*list).next).prev, (*list).prev);
}
}

Expand All @@ -40,9 +44,11 @@ impl Node {
prev: list,
next: (*list).next,
};
p.write_unaligned(n_list);
(*(*list).next).prev = p;
(*list).next = p;
p.write(n_list);
// To prevent the compiler from optimizing alias potiners
// details https://github.com/jjyr/buddy-alloc/issues/16
core::ptr::write_volatile(&mut (*(*list).next).prev, p);
core::ptr::write_volatile(&mut (*list).next, p);
}
}

Expand Down Expand Up @@ -107,6 +113,12 @@ impl FastAlloc {
let base_addr = base_addr as usize;
let end_addr = base_addr + nblocks * BLOCK_SIZE;

debug_assert_eq!(
base_addr % BLOCK_SIZE,
0,
"base_addr must align to block size"
);

// Actual blocks to create here
let cblocks = core::cmp::min(nblocks, initialized_nodes);

Expand Down
39 changes: 24 additions & 15 deletions src/tests/fast_alloc.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,14 @@
use crate::fast_alloc::{FastAlloc, FastAllocParam, BLOCK_SIZE};

#[repr(align(64))]
struct AlignedBuf([u8; 4096]);

impl Default for AlignedBuf {
fn default() -> Self {
Self([0u8; 4096])
}
}

fn with_allocator<F: FnOnce(FastAlloc)>(f: F, buf: &[u8]) {
let allocator = unsafe {
let addr = buf.as_ptr();
Expand All @@ -12,7 +21,7 @@ fn with_allocator<F: FnOnce(FastAlloc)>(f: F, buf: &[u8]) {

#[test]
fn test_basic_malloc() {
let buf = [0u8; 4096];
let buf = AlignedBuf::default();
// alloc a min block
with_allocator(
|mut allocator| {
Expand All @@ -24,66 +33,66 @@ fn test_basic_malloc() {
assert_eq!(p_addr, p as usize);
assert_eq!(unsafe { *p }, 42);
},
&buf,
&buf.0,
);
}

#[test]
fn test_multiple_malloc() {
let buf = [0u8; 4096];
let buf = AlignedBuf::default();
with_allocator(
|mut allocator| {
let mut available_bytes = buf.len();
let mut available_bytes = buf.0.len();
// alloc serveral sized blocks
while available_bytes >= BLOCK_SIZE {
let bytes = BLOCK_SIZE;
assert!(!allocator.malloc(bytes).is_null());
available_bytes -= bytes;
}
},
&buf,
&buf.0,
);
}

#[test]
fn test_small_size_malloc() {
let buf = [0u8; 4096];
let buf = AlignedBuf::default();
with_allocator(
|mut allocator| {
let mut available_bytes = buf.len();
let mut available_bytes = buf.0.len();
while available_bytes >= BLOCK_SIZE {
assert!(!allocator.malloc(BLOCK_SIZE).is_null());
available_bytes -= BLOCK_SIZE;
}
// memory should be drained, we can't allocate even 1 byte
assert!(allocator.malloc(1).is_null());
},
&buf,
&buf.0,
);
}

#[test]
fn test_fail_malloc() {
let buf = [0u8; 4096];
let buf = AlignedBuf::default();
// not enough memory since we only have HEAP_SIZE bytes,
// and the allocator itself occupied few bytes
with_allocator(
|mut allocator| {
let p = allocator.malloc(BLOCK_SIZE + 1);
assert!(p.is_null());
},
&buf,
&buf.0,
);
}

#[test]
fn test_malloc_and_free() {
fn _test_malloc_and_free(times: usize) {
let buf = [0u8; 4096];
let buf = AlignedBuf::default();
with_allocator(
|mut allocator| {
for _i in 0..times {
let mut available_bytes = buf.len();
let mut available_bytes = buf.0.len();
let mut ptrs = Vec::new();
// alloc serveral sized blocks
while available_bytes >= BLOCK_SIZE {
Expand All @@ -102,15 +111,15 @@ fn test_malloc_and_free() {
}
}
},
&buf,
&buf.0,
);
}
_test_malloc_and_free(10);
}

#[test]
fn test_free_bug() {
let buf = [0u8; 4096];
let buf = AlignedBuf::default();
with_allocator(
|mut allocator| {
let p1 = allocator.malloc(32);
Expand All @@ -120,6 +129,6 @@ fn test_free_bug() {
allocator.free(p2);
allocator.free(p3);
},
&buf,
&buf.0,
);
}

0 comments on commit d407874

Please sign in to comment.