/ src / allocator / fixed_size_block.rs
fixed_size_block.rs
 1  use alloc::alloc::Layout;
 2  use core::ptr;
 3  use super::Locked;
 4  use alloc::alloc::GlobalAlloc;
 5  use core::{mem, ptr::NonNull};
 6  
 7  struct ListNode {
 8      next: Option<&'static mut ListNode>,
 9  }
10  
11  const BLOCK_SIZES: &[usize] = &[8, 16, 32, 64, 128, 256, 512, 1024, 2048];
12  
13  pub struct FixedSizeBlockAllocator {
14      list_heads: [Option<&'static mut ListNode>; BLOCK_SIZES.len()],
15      fallback_allocator: linked_list_allocator::Heap,
16  }
17  
18  impl FixedSizeBlockAllocator {
19      pub const fn new() -> Self {
20          const EMPTY: Option<&'static mut ListNode> = None;
21          FixedSizeBlockAllocator {
22              list_heads: [EMPTY; BLOCK_SIZES.len()],
23              fallback_allocator: linked_list_allocator::Heap::empty(),
24          }
25      }
26  
27      pub unsafe fn init(&mut self, heap_start: usize, heap_size: usize) {
28          unsafe { self.fallback_allocator.init(heap_start, heap_size); }
29      }
30  
31      fn fallback_alloc(&mut self, layout: Layout) -> *mut u8 {
32          match self.fallback_allocator.allocate_first_fit(layout) {
33              Ok(ptr) => ptr.as_ptr(),
34              Err(_) => ptr::null_mut(),
35          }
36      }
37  }
38  
39  unsafe impl GlobalAlloc for Locked<FixedSizeBlockAllocator> {
40      unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
41          let mut allocator = self.lock();
42          match list_index(&layout) {
43              Some(index) => {
44                  match allocator.list_heads[index].take() {
45                      Some(node) => {
46                          allocator.list_heads[index] = node.next.take();
47                          node as *mut ListNode as *mut u8
48                      }
49                      None => {
50                          let block_size = BLOCK_SIZES[index];
51                          let block_align = block_size;
52                          let layout = Layout::from_size_align(block_size, block_align)
53                              .unwrap();
54                          allocator.fallback_alloc(layout)
55                      }
56                  }
57              }
58              None => allocator.fallback_alloc(layout),
59          }
60      }
61  
62      unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
63          let mut allocator = self.lock();
64          match list_index(&layout) {
65              Some(index) => {
66                  let new_node = ListNode {
67                      next: allocator.list_heads[index].take(),
68                  };
69                  assert!(mem::size_of::<ListNode>() <= BLOCK_SIZES[index]);
70                  assert!(mem::align_of::<ListNode>() <= BLOCK_SIZES[index]);
71                  let new_node_ptr = ptr as *mut ListNode;
72                  unsafe {
73                      new_node_ptr.write(new_node);
74                      allocator.list_heads[index] = Some(&mut *new_node_ptr);
75                  }
76              }
77              None => {
78                  let ptr = NonNull::new(ptr).unwrap();
79                  unsafe {
80                      allocator.fallback_allocator.deallocate(ptr, layout);
81                  }
82              }
83          }
84      }
85  }
86  
87  fn list_index(layout: &Layout) -> Option<usize> {
88      let required_block_size = layout.size().max(layout.align());
89      BLOCK_SIZES.iter().position(|&s| s >= required_block_size)
90  }