1#[allow(dead_code)]
3mod atomic128;
4mod slab_allocator;
5
6use crate::mm::ContigPages;
7use abyss::{addressing::Kva, spinlock::SpinLock};
8use core::{
9 alloc::{AllocError, Layout},
10 ptr::NonNull,
11};
12use slab_allocator::SlabAllocator;
13
14pub struct Allocator {
16 pub s64: SlabAllocator<0x40, 0x1000>,
18 pub s128: SlabAllocator<0x80, 0x1000>,
20 s256: SlabAllocator<0x100, 0x1000>,
22 s512: SlabAllocator<0x200, 0x1000>,
24 s1024: SlabAllocator<0x400, 0x1000>,
26 s2048: SlabAllocator<0x800, 0x2000>,
28 s4096: SlabAllocator<0x1000, 0x4000>,
30 s8192: SlabAllocator<0x2000, 0x8000>,
32 s16384: SlabAllocator<0x4000, 0x10000>,
34 s32768: SlabAllocator<0x8000, 0x20000>,
36 s65536: SlabAllocator<0x10000, 0x40000>,
38 s131072: SlabAllocator<0x20000, 0x80000>,
40 allocator: Palloc,
41}
42
43#[inline]
44fn index_from_size(size: usize) -> u32 {
45 if size <= 64 {
46 0
47 } else {
48 64 - (size - 1).leading_zeros() - 6
49 }
50}
51
52macro_rules! dispatch {
53 ($self_:expr_2021, $size:expr_2021, |$t:ident| $code:expr_2021) => {{
54 match index_from_size($size) {
55 0 => {
56 let $t = &$self_.s64;
57 Ok($code)
58 }
59 1 => {
60 let $t = &$self_.s128;
61 Ok($code)
62 }
63 2 => {
64 let $t = &$self_.s256;
65 Ok($code)
66 }
67 3 => {
68 let $t = &$self_.s512;
69 Ok($code)
70 }
71 4 => {
72 let $t = &$self_.s1024;
73 Ok($code)
74 }
75 5 => {
76 let $t = &$self_.s2048;
77 Ok($code)
78 }
79 6 => {
80 let $t = &$self_.s4096;
81 Ok($code)
82 }
83 7 => {
84 let $t = &$self_.s8192;
85 Ok($code)
86 }
87 8 => {
88 let $t = &$self_.s16384;
89 Ok($code)
90 }
91 9 => {
92 let $t = &$self_.s32768;
93 Ok($code)
94 }
95 10 => {
96 let $t = &$self_.s65536;
97 Ok($code)
98 }
99 11 => {
100 let $t = &$self_.s131072;
101 Ok($code)
102 }
103 _ => Err($size),
104 }
105 }};
106}
107
108impl Allocator {
109 const fn new() -> Self {
111 Self {
112 s64: SlabAllocator::new(),
113 s128: SlabAllocator::new(),
114 s256: SlabAllocator::new(),
115 s512: SlabAllocator::new(),
116 s1024: SlabAllocator::new(),
117 s2048: SlabAllocator::new(),
118 s4096: SlabAllocator::new(),
119 s8192: SlabAllocator::new(),
120 s16384: SlabAllocator::new(),
121 s32768: SlabAllocator::new(),
122 s65536: SlabAllocator::new(),
123 s131072: SlabAllocator::new(),
124 allocator: Palloc,
125 }
126 }
127}
128
129unsafe impl core::alloc::GlobalAlloc for Allocator {
130 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
131 let size = layout.size();
132 if size == 0 {
133 core::ptr::NonNull::dangling().as_ptr()
134 } else {
135 assert!(
136 layout.align() <= size,
137 "align: {:?} size: {:?}",
138 layout.align(),
139 size
140 );
141 unsafe {
142 match dispatch!(self, size, |allocator| allocator.alloc(&self.allocator)) {
143 Ok(o) => o,
144 Err(size) => self.allocator.allocate(size),
145 }
146 .map(|n| n.as_ptr() as *mut u8)
147 .unwrap_or(core::ptr::null_mut())
148 }
149 }
150 }
151
152 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
153 unsafe {
154 if layout.size() != 0 {
155 debug_assert!(layout.align() <= layout.size());
156 if let Err(_size) = dispatch!(self, layout.size(), |allocator| allocator
157 .dealloc(ptr as usize, &self.allocator))
158 {
159 self.allocator.deallocate(ptr, layout.size());
160 }
161 }
162 }
163 }
164}
165
166struct Palloc;
167
168impl Palloc {
169 unsafe fn allocate(&self, size: usize) -> Result<NonNull<[u8]>, AllocError> {
170 unsafe {
171 match crate::mm::ContigPages::new_with_align(size, size) {
172 Some(pg) => {
173 let va = pg.kva().into_usize();
174 core::mem::forget(pg);
175 NonNull::new(core::slice::from_raw_parts_mut(va as *mut u8, size))
176 .ok_or(AllocError)
177 }
178 _ => Err(AllocError),
179 }
180 }
181 }
182
183 unsafe fn deallocate(&self, ptr: *mut u8, size: usize) {
184 unsafe {
185 ContigPages::from_va(Kva::new(ptr as usize).unwrap(), size);
186 }
187 }
188
189 fn serialize<F>(&self, aux: &SpinLock<()>, f: F)
190 where
191 F: FnOnce(),
192 {
193 let _guard = aux.lock();
194 f();
195 _guard.unlock();
196 }
197}
198
199#[global_allocator]
200static ALLOCATOR: Allocator = Allocator::new();