1use super::{Palloc, atomic128::AtomicU128};
4use abyss::spinlock::SpinLock;
5use core::{
6 alloc::AllocError,
7 ptr::NonNull,
8 sync::atomic::{AtomicU64, Ordering},
9};
10
11#[inline]
12pub fn into_pointer_tag<T>(v: u128) -> (*mut T, u64)
13where
14 T: Sized,
15{
16 (
17 (v >> 64) as usize as *mut _,
18 (v & 0xffff_ffff_ffff_ffff) as u64,
19 )
20}
21
22#[inline]
23pub fn from_pointer_tag<T>(p: *mut T, v: u64) -> u128
24where
25 T: Sized,
26{
27 ((p as usize as u128) << 64) | (v as u128)
28}
29
30pub struct SlabAllocator<const BSIZE: usize, const GROW_SIZE: usize> {
32 head: AtomicU128,
33 stamp: AtomicU64,
34 grow_aux: SpinLock<()>,
35}
36
37#[doc(hidden)]
38#[repr(transparent)]
39pub(super) struct Block<'a> {
40 next: AtomicU128,
41 _l: core::marker::PhantomData<&'a Block<'a>>,
42}
43
44#[cfg(feature = "redzone")]
45#[inline(never)]
46fn verify_redzone_panic(bsize: usize, ptr: usize, _where: &str, dir: &str) {
47 panic!(
48 "Heap corruption detected on {}! (redzone {})\nbucket: {:?} head: {:x}\n{:?}",
49 _where,
50 dir,
51 bsize,
52 ptr,
53 DebugArea { ptr, b: bsize }
54 );
55}
56
57#[cfg(feature = "redzone")]
58fn verify_redzone(rz_size: usize, bsize: usize, ptr: usize, _where: &str) {
59 unsafe {
60 if !core::slice::from_raw_parts((ptr - rz_size) as *mut u8, rz_size)
61 .iter()
62 .all(|n| *n == RZ)
63 {
64 verify_redzone_panic(bsize, ptr, _where, "head")
65 }
66
67 if !core::slice::from_raw_parts((ptr + bsize) as *mut u8, rz_size)
68 .iter()
69 .all(|n| *n == RZ)
70 {
71 verify_redzone_panic(bsize, ptr, _where, "tail")
72 }
73 }
74}
75
76const RZ: u8 = 0x34;
77
78impl<const BSIZE: usize, const GROW_SIZE: usize> SlabAllocator<BSIZE, GROW_SIZE> {
79 #[cfg(feature = "redzone")]
80 const REDZONE_SIZE: usize = BSIZE;
81 #[cfg(feature = "redzone")]
82 const BLOCK_SIZE: usize = BSIZE * 3;
83 #[cfg(feature = "redzone")]
84 const G_SIZE: usize = GROW_SIZE * 3;
85
86 #[cfg(not(feature = "redzone"))]
87 const REDZONE_SIZE: usize = 0;
88 #[cfg(not(feature = "redzone"))]
89 const BLOCK_SIZE: usize = BSIZE;
90 #[cfg(not(feature = "redzone"))]
91 const G_SIZE: usize = GROW_SIZE;
92
93 pub(super) const fn new() -> Self {
95 Self {
96 head: AtomicU128::new(0),
97 stamp: AtomicU64::new(0),
98 grow_aux: SpinLock::new(()),
99 }
100 }
101
102 pub(super) unsafe fn grow(&self, allocator: &Palloc) -> Result<(), AllocError> {
104 unsafe {
105 let base = allocator.allocate(Self::G_SIZE)?.cast::<u8>().as_ptr() as usize;
106
107 #[cfg(feature = "redzone")]
108 core::slice::from_raw_parts_mut(base as *mut u8, Self::G_SIZE).fill(RZ);
109
110 for i in (0..Self::G_SIZE).step_by(Self::BLOCK_SIZE) {
111 self.dealloc(base + i + Self::REDZONE_SIZE, allocator)
112 }
113 Ok(())
114 }
115 }
116
117 #[inline]
119 pub(super) unsafe fn dealloc(&self, ptr: usize, _allocator: &Palloc) {
120 unsafe {
121 #[cfg(feature = "redzone")]
122 verify_redzone(Self::REDZONE_SIZE, BSIZE, ptr, "dealloc");
123
124 let blk = (ptr as *mut Block).as_mut().unwrap();
125 let stamp = self.stamp.fetch_add(1, Ordering::Relaxed);
126 loop {
127 let head = self.head.load(Ordering::Relaxed);
128 blk.next.store(head, Ordering::Relaxed);
129 let next = from_pointer_tag(blk as *mut Block, stamp);
130 if self
131 .head
132 .compare_exchange(head, next, Ordering::Release, Ordering::Relaxed)
133 .is_ok()
134 {
135 break;
136 }
137 }
138 }
139 }
140
141 #[inline]
143 pub(super) unsafe fn alloc(&self, allocator: &Palloc) -> Result<NonNull<[u8]>, AllocError> {
144 unsafe {
145 loop {
146 let head = self.head.load(Ordering::Acquire);
147 let (ptr, _) = into_pointer_tag::<Block>(head);
148 if ptr.is_null() {
149 let mut o = Ok(());
150 allocator.serialize(&self.grow_aux, || {
151 if into_pointer_tag::<()>(self.head.load(Ordering::Acquire))
153 .0
154 .is_null()
155 {
156 o = self.grow(allocator);
157 }
158 });
159 o?;
160 continue;
161 }
162
163 #[cfg(feature = "redzone")]
164 verify_redzone(Self::REDZONE_SIZE, BSIZE, ptr as usize, "alloc");
165
166 let next = (*ptr).next.load(Ordering::Relaxed);
167 if self
168 .head
169 .compare_exchange(head, next, Ordering::Relaxed, Ordering::Relaxed)
170 .is_ok()
171 {
172 break Ok(NonNull::slice_from_raw_parts(
173 NonNull::new(ptr as *mut u8).ok_or(AllocError)?,
174 BSIZE,
175 ));
176 }
177 }
178 }
179 }
180}
181
182struct DebugArea {
183 ptr: usize,
184 b: usize,
185}
186
187impl core::fmt::Debug for DebugArea {
188 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
189 fn fmt_line(f: &mut core::fmt::Formatter<'_>, b: usize) {
190 let _ = write!(f, "{b:016x} |");
191 for i in 0..8 {
192 unsafe {
193 let _ = write!(f, " {:02x}", ((b + i) as *const u8).as_ref().unwrap());
194 }
195 }
196 let _ = write!(f, " |");
197 for i in 0..8 {
198 unsafe {
199 let _ = write!(f, " {:02x}", ((b + i + 8) as *const u8).as_ref().unwrap());
200 }
201 }
202 let _ = writeln!(f);
203 }
204
205 let b = self.ptr & !0xf;
206 let _ = writeln!(
207 f,
208 " | 00 01 02 03 04 05 06 07 | 08 09 0a 0b 0c 0d 0e 0f"
209 );
210 let _ = writeln!(
211 f,
212 "-----------------+-------------------------+------------------------"
213 );
214 for i in 0..self.b / 0x10 {
215 fmt_line(f, b - self.b + 0x10 * i);
216 }
217 let _ = writeln!(
218 f,
219 "-----------------+-------------------------+------------------------"
220 );
221 for i in 0..self.b / 0x10 {
222 fmt_line(f, b + 0x10 * i);
223 }
224 let _ = writeln!(
225 f,
226 "-----------------+-------------------------+------------------------"
227 );
228 for i in 0..self.b / 0x10 {
229 fmt_line(f, b + self.b + 0x10 * i);
230 }
231 let _ = writeln!(
232 f,
233 "-----------------+-------------------------+------------------------"
234 );
235 Ok(())
236 }
237}