1use core::alloc::GlobalAlloc;
2use core::sync::atomic::{AtomicUsize, Ordering};
3
4use slab_allocator_rs::LockedHeap;
5
6use crate::early_println;
7use crate::vm::vmem::MemoryArea;
8
9#[global_allocator]
10static mut ALLOCATOR: Allocator = Allocator::new();
11
12struct Allocator {
13 inner: Option<LockedHeap>,
15 allocated_count: AtomicUsize,
16 allocated_bytes: AtomicUsize,
17}
18
19unsafe impl GlobalAlloc for Allocator {
20 unsafe fn alloc(&self, layout: core::alloc::Layout) -> *mut u8 {
21 if let Some(ref inner) = self.inner {
22 let ptr = unsafe { inner.alloc(layout) };
24 self.allocated_count.fetch_add(1, Ordering::SeqCst);
26 self.allocated_bytes.fetch_add(layout.size(), Ordering::SeqCst);
27 ptr
29 } else {
30 panic!("Allocator not initialized, cannot allocate memory.");
31 }
32 }
33
34 unsafe fn dealloc(&self, ptr: *mut u8, layout: core::alloc::Layout) {
35 if let Some(ref inner) = self.inner {
36 unsafe { inner.dealloc(ptr, layout) }
37 self.allocated_count.fetch_sub(1, Ordering::SeqCst);
39 self.allocated_bytes.fetch_sub(layout.size(), Ordering::SeqCst);
40 } else {
42 panic!("Allocator not initialized, cannot deallocate memory.");
43 }
44 }
45}
46
47impl Allocator {
48 pub const fn new() -> Self {
49 Allocator { inner: None, allocated_count: AtomicUsize::new(0), allocated_bytes: AtomicUsize::new(0) }
50 }
51
52 pub unsafe fn init(&mut self, start: usize, size: usize) {
53 if self.inner.is_some() {
54 early_println!("Allocator already initialized.");
55 return;
56 }
57
58 let heap = unsafe { LockedHeap::new(start, size) };
59 self.inner = Some(heap);
60 }
61}
62
63#[allow(static_mut_refs)]
64pub fn init_heap(area: MemoryArea) {
65 let size = area.size();
66 if size == 0 {
67 early_println!("Heap size is zero, skipping initialization.");
68 return;
69 }
70
71 unsafe {
72 ALLOCATOR.init(area.start, size);
73 }
74
75 early_println!("Heap initialized: {:#x} - {:#x}", area.start, area.end);
76}