1use core::alloc::GlobalAlloc;
2use core::sync::atomic::{AtomicUsize, Ordering};
3
4use slab_allocator_rs::LockedHeap;
5
6use crate::early_println;
7use crate::vm::vmem::MemoryArea;
8
9#[global_allocator]
10#[unsafe(link_section = ".data")]
14static ALLOCATOR: Allocator = Allocator::new();
15
16struct Allocator {
17 inner: spin::Once<LockedHeap>,
19 allocated_count: AtomicUsize,
20 allocated_bytes: AtomicUsize,
21}
22
23unsafe impl GlobalAlloc for Allocator {
24 unsafe fn alloc(&self, layout: core::alloc::Layout) -> *mut u8 {
25 if let Some(inner) = self.inner.get() {
26 let ptr = unsafe { inner.alloc(layout) };
28 self.allocated_count.fetch_add(1, Ordering::SeqCst);
30 self.allocated_bytes
31 .fetch_add(layout.size(), Ordering::SeqCst);
32 return ptr;
34 }
35 panic!("Allocator not initialized, cannot allocate memory.");
36 }
37
38 unsafe fn dealloc(&self, ptr: *mut u8, layout: core::alloc::Layout) {
39 if let Some(inner) = self.inner.get() {
40 unsafe { inner.dealloc(ptr, layout) }
41 self.allocated_count.fetch_sub(1, Ordering::SeqCst);
43 self.allocated_bytes
44 .fetch_sub(layout.size(), Ordering::SeqCst);
45 return;
46 }
47 panic!("Allocator not initialized, cannot deallocate memory.");
48 }
49}
50
51impl Allocator {
52 pub const fn new() -> Self {
53 Allocator {
54 inner: spin::Once::new(),
55 allocated_count: AtomicUsize::new(0),
56 allocated_bytes: AtomicUsize::new(0),
57 }
58 }
59
60 pub unsafe fn init(&self, start: usize, size: usize) {
61 let _ = self
62 .inner
63 .call_once(|| unsafe { LockedHeap::new(start, size) });
64 }
65}
66
67pub fn init_heap(area: MemoryArea) {
68 let size = area.size();
69 if size == 0 {
70 early_println!("Heap size is zero, skipping initialization.");
71 return;
72 }
73
74 unsafe {
75 ALLOCATOR.init(area.start, size);
76 }
77
78 early_println!("Heap initialized: {:#x} - {:#x}", area.start, area.end);
79}