diff --git a/benches/memory_allocator_benchmark.rs b/benches/memory_allocator_benchmark.rs
index 9628469..f78154a 100644
--- a/benches/memory_allocator_benchmark.rs
+++ b/benches/memory_allocator_benchmark.rs
@@ -182,7 +182,7 @@ pub fn criterion_benchmark(c: &mut Criterion) {
c.bench_function("mutil thread random size", |b| {
b.iter(|| mutil_thread_random_size(black_box(&HEAP_ALLOCATOR)))
});
- c.bench_function("threadtest", |b| b.iter(|| thread_test()));
+ c.bench_function("threadtest", |b| b.iter(thread_test));
}
criterion_group!(benches, criterion_benchmark);
diff --git a/src/frame.rs b/src/frame.rs
index 60f82b3..c84ef2c 100644
--- a/src/frame.rs
+++ b/src/frame.rs
@@ -11,7 +11,7 @@ use spin::Mutex;
/// A frame allocator that uses buddy system, requiring a global allocator.
///
-/// The max order of the allocator is determined by the const generic parameter `ORDER` (`MAX_ORDER = ORDER - 1`).
+/// The max order of the allocator is determined by the const generic parameter `ORDER` (`MAX_ORDER = ORDER - 1`).
/// The frame allocator will only be able to allocate ranges of size up to 2MAX_ORDER, out of a total
/// range of size at most 2MAX_ORDER + 1 - 1.
///
@@ -169,6 +169,12 @@ impl FrameAllocator {
}
}
+impl Default for FrameAllocator {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
/// A locked version of `FrameAllocator`
///
/// # Usage
@@ -187,6 +193,7 @@ impl FrameAllocator {
/// assert_eq!(num, Some(0));
/// ```
#[cfg(feature = "use_spin")]
+#[derive(Default)]
pub struct LockedFrameAllocator(Mutex>);
#[cfg(feature = "use_spin")]
diff --git a/src/lib.rs b/src/lib.rs
index 85eb30a..5e3a7df 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -92,7 +92,7 @@ impl Heap {
while current_start + size_of::() <= end {
let lowbit = current_start & (!current_start + 1);
let mut size = min(lowbit, prev_power_of_two(end - current_start));
-
+
// If the order of size is larger than the max order,
// split it into smaller blocks.
let mut order = size.trailing_zeros() as usize;
@@ -214,6 +214,12 @@ impl Heap {
}
}
+impl Default for Heap {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
impl fmt::Debug for Heap {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("Heap")
@@ -247,6 +253,7 @@ impl fmt::Debug for Heap {
/// }
/// ```
#[cfg(feature = "use_spin")]
+#[derive(Default)]
pub struct LockedHeap(Mutex>);
#[cfg(feature = "use_spin")]
diff --git a/src/linked_list.rs b/src/linked_list.rs
index 806f9d0..7a15925 100644
--- a/src/linked_list.rs
+++ b/src/linked_list.rs
@@ -66,6 +66,12 @@ impl LinkedList {
}
}
+impl Default for LinkedList {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
impl fmt::Debug for LinkedList {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
diff --git a/src/test.rs b/src/test.rs
index c99d53c..3ed859a 100644
--- a/src/test.rs
+++ b/src/test.rs
@@ -91,9 +91,10 @@ fn test_heap_oom() {
#[test]
fn test_heap_oom_rescue() {
- static mut SPACE: [usize; 100] = [0; 100];
+ const SPACE_SIZE: usize = 100;
+ static mut SPACE: [usize; 100] = [0; SPACE_SIZE];
let heap = LockedHeapWithRescue::new(|heap: &mut Heap<32>, _layout: &Layout| unsafe {
- heap.add_to_heap(SPACE.as_ptr() as usize, SPACE.as_ptr().add(100) as usize);
+ heap.init(&raw mut SPACE as usize, SPACE_SIZE);
});
unsafe {