kernel/alloc/kbox.rs
1// SPDX-License-Identifier: GPL-2.0
2
3//! Implementation of [`Box`].
4
5#[allow(unused_imports)] // Used in doc comments.
6use super::allocator::{KVmalloc, Kmalloc, Vmalloc, VmallocPageIter};
7use super::{AllocError, Allocator, Flags, NumaNode};
8use core::alloc::Layout;
9use core::borrow::{Borrow, BorrowMut};
10use core::fmt;
11use core::marker::PhantomData;
12use core::mem::ManuallyDrop;
13use core::mem::MaybeUninit;
14use core::ops::{Deref, DerefMut};
15use core::pin::Pin;
16use core::ptr::NonNull;
17use core::result::Result;
18
19use crate::ffi::c_void;
20use crate::init::InPlaceInit;
21use crate::page::AsPageIter;
22use crate::types::ForeignOwnable;
23use pin_init::{InPlaceWrite, Init, PinInit, ZeroableOption};
24
25/// The kernel's [`Box`] type -- a heap allocation for a single value of type `T`.
26///
27/// This is the kernel's version of the Rust stdlib's `Box`. There are several differences,
28/// for example no `noalias` attribute is emitted and partially moving out of a `Box` is not
29/// supported. There are also several API differences, e.g. `Box` always requires an [`Allocator`]
30/// implementation to be passed as generic, page [`Flags`] when allocating memory and all functions
31/// that may allocate memory are fallible.
32///
33/// `Box` works with any of the kernel's allocators, e.g. [`Kmalloc`], [`Vmalloc`] or [`KVmalloc`].
34/// There are aliases for `Box` with these allocators ([`KBox`], [`VBox`], [`KVBox`]).
35///
36/// When dropping a [`Box`], the value is also dropped and the heap memory is automatically freed.
37///
38/// # Examples
39///
40/// ```
41/// let b = KBox::<u64>::new(24_u64, GFP_KERNEL)?;
42///
43/// assert_eq!(*b, 24_u64);
44/// # Ok::<(), Error>(())
45/// ```
46///
47/// ```
48/// # use kernel::bindings;
49/// const SIZE: usize = bindings::KMALLOC_MAX_SIZE as usize + 1;
50/// struct Huge([u8; SIZE]);
51///
52/// assert!(KBox::<Huge>::new_uninit(GFP_KERNEL | __GFP_NOWARN).is_err());
53/// ```
54///
55/// ```
56/// # use kernel::bindings;
57/// const SIZE: usize = bindings::KMALLOC_MAX_SIZE as usize + 1;
58/// struct Huge([u8; SIZE]);
59///
60/// assert!(KVBox::<Huge>::new_uninit(GFP_KERNEL).is_ok());
61/// ```
62///
63/// [`Box`]es can also be used to store trait objects by coercing their type:
64///
65/// ```
66/// trait FooTrait {}
67///
68/// struct FooStruct;
69/// impl FooTrait for FooStruct {}
70///
71/// let _ = KBox::new(FooStruct, GFP_KERNEL)? as KBox<dyn FooTrait>;
72/// # Ok::<(), Error>(())
73/// ```
74///
75/// # Invariants
76///
77/// `self.0` is always properly aligned and either points to memory allocated with `A` or, for
78/// zero-sized types, is a dangling, well aligned pointer.
79#[repr(transparent)]
80#[cfg_attr(CONFIG_RUSTC_HAS_COERCE_POINTEE, derive(core::marker::CoercePointee))]
81pub struct Box<#[cfg_attr(CONFIG_RUSTC_HAS_COERCE_POINTEE, pointee)] T: ?Sized, A: Allocator>(
82 NonNull<T>,
83 PhantomData<A>,
84);
85
86// This is to allow coercion from `Box<T, A>` to `Box<U, A>` if `T` can be converted to the
87// dynamically-sized type (DST) `U`.
88#[cfg(not(CONFIG_RUSTC_HAS_COERCE_POINTEE))]
89impl<T, U, A> core::ops::CoerceUnsized<Box<U, A>> for Box<T, A>
90where
91 T: ?Sized + core::marker::Unsize<U>,
92 U: ?Sized,
93 A: Allocator,
94{
95}
96
97// This is to allow `Box<U, A>` to be dispatched on when `Box<T, A>` can be coerced into `Box<U,
98// A>`.
99#[cfg(not(CONFIG_RUSTC_HAS_COERCE_POINTEE))]
100impl<T, U, A> core::ops::DispatchFromDyn<Box<U, A>> for Box<T, A>
101where
102 T: ?Sized + core::marker::Unsize<U>,
103 U: ?Sized,
104 A: Allocator,
105{
106}
107
108/// Type alias for [`Box`] with a [`Kmalloc`] allocator.
109///
110/// # Examples
111///
112/// ```
113/// let b = KBox::new(24_u64, GFP_KERNEL)?;
114///
115/// assert_eq!(*b, 24_u64);
116/// # Ok::<(), Error>(())
117/// ```
118pub type KBox<T> = Box<T, super::allocator::Kmalloc>;
119
120/// Type alias for [`Box`] with a [`Vmalloc`] allocator.
121///
122/// # Examples
123///
124/// ```
125/// let b = VBox::new(24_u64, GFP_KERNEL)?;
126///
127/// assert_eq!(*b, 24_u64);
128/// # Ok::<(), Error>(())
129/// ```
130pub type VBox<T> = Box<T, super::allocator::Vmalloc>;
131
132/// Type alias for [`Box`] with a [`KVmalloc`] allocator.
133///
134/// # Examples
135///
136/// ```
137/// let b = KVBox::new(24_u64, GFP_KERNEL)?;
138///
139/// assert_eq!(*b, 24_u64);
140/// # Ok::<(), Error>(())
141/// ```
142pub type KVBox<T> = Box<T, super::allocator::KVmalloc>;
143
144// SAFETY: All zeros is equivalent to `None` (option layout optimization guarantee:
145// <https://doc.rust-lang.org/stable/std/option/index.html#representation>).
146unsafe impl<T, A: Allocator> ZeroableOption for Box<T, A> {}
147
148// SAFETY: `Box` is `Send` if `T` is `Send` because the `Box` owns a `T`.
149unsafe impl<T, A> Send for Box<T, A>
150where
151 T: Send + ?Sized,
152 A: Allocator,
153{
154}
155
156// SAFETY: `Box` is `Sync` if `T` is `Sync` because the `Box` owns a `T`.
157unsafe impl<T, A> Sync for Box<T, A>
158where
159 T: Sync + ?Sized,
160 A: Allocator,
161{
162}
163
164impl<T, A> Box<T, A>
165where
166 T: ?Sized,
167 A: Allocator,
168{
169 /// Creates a new `Box<T, A>` from a raw pointer.
170 ///
171 /// # Safety
172 ///
173 /// For non-ZSTs, `raw` must point at an allocation allocated with `A` that is sufficiently
174 /// aligned for and holds a valid `T`. The caller passes ownership of the allocation to the
175 /// `Box`.
176 ///
177 /// For ZSTs, `raw` must be a dangling, well aligned pointer.
178 #[inline]
179 pub const unsafe fn from_raw(raw: *mut T) -> Self {
180 // INVARIANT: Validity of `raw` is guaranteed by the safety preconditions of this function.
181 // SAFETY: By the safety preconditions of this function, `raw` is not a NULL pointer.
182 Self(unsafe { NonNull::new_unchecked(raw) }, PhantomData)
183 }
184
185 /// Consumes the `Box<T, A>` and returns a raw pointer.
186 ///
187 /// This will not run the destructor of `T` and for non-ZSTs the allocation will stay alive
188 /// indefinitely. Use [`Box::from_raw`] to recover the [`Box`], drop the value and free the
189 /// allocation, if any.
190 ///
191 /// # Examples
192 ///
193 /// ```
194 /// let x = KBox::new(24, GFP_KERNEL)?;
195 /// let ptr = KBox::into_raw(x);
196 /// // SAFETY: `ptr` comes from a previous call to `KBox::into_raw`.
197 /// let x = unsafe { KBox::from_raw(ptr) };
198 ///
199 /// assert_eq!(*x, 24);
200 /// # Ok::<(), Error>(())
201 /// ```
202 #[inline]
203 pub fn into_raw(b: Self) -> *mut T {
204 ManuallyDrop::new(b).0.as_ptr()
205 }
206
207 /// Consumes and leaks the `Box<T, A>` and returns a mutable reference.
208 ///
209 /// See [`Box::into_raw`] for more details.
210 #[inline]
211 pub fn leak<'a>(b: Self) -> &'a mut T {
212 // SAFETY: `Box::into_raw` always returns a properly aligned and dereferenceable pointer
213 // which points to an initialized instance of `T`.
214 unsafe { &mut *Box::into_raw(b) }
215 }
216}
217
218impl<T, A> Box<MaybeUninit<T>, A>
219where
220 A: Allocator,
221{
222 /// Converts a `Box<MaybeUninit<T>, A>` to a `Box<T, A>`.
223 ///
224 /// It is undefined behavior to call this function while the value inside of `b` is not yet
225 /// fully initialized.
226 ///
227 /// # Safety
228 ///
229 /// Callers must ensure that the value inside of `b` is in an initialized state.
230 pub unsafe fn assume_init(self) -> Box<T, A> {
231 let raw = Self::into_raw(self);
232
233 // SAFETY: `raw` comes from a previous call to `Box::into_raw`. By the safety requirements
234 // of this function, the value inside the `Box` is in an initialized state. Hence, it is
235 // safe to reconstruct the `Box` as `Box<T, A>`.
236 unsafe { Box::from_raw(raw.cast()) }
237 }
238
239 /// Writes the value and converts to `Box<T, A>`.
240 pub fn write(mut self, value: T) -> Box<T, A> {
241 (*self).write(value);
242
243 // SAFETY: We've just initialized `b`'s value.
244 unsafe { self.assume_init() }
245 }
246}
247
248impl<T, A> Box<T, A>
249where
250 A: Allocator,
251{
252 /// Creates a new `Box<T, A>` and initializes its contents with `x`.
253 ///
254 /// New memory is allocated with `A`. The allocation may fail, in which case an error is
255 /// returned. For ZSTs no memory is allocated.
256 pub fn new(x: T, flags: Flags) -> Result<Self, AllocError> {
257 let b = Self::new_uninit(flags)?;
258 Ok(Box::write(b, x))
259 }
260
261 /// Creates a new `Box<T, A>` with uninitialized contents.
262 ///
263 /// New memory is allocated with `A`. The allocation may fail, in which case an error is
264 /// returned. For ZSTs no memory is allocated.
265 ///
266 /// # Examples
267 ///
268 /// ```
269 /// let b = KBox::<u64>::new_uninit(GFP_KERNEL)?;
270 /// let b = KBox::write(b, 24);
271 ///
272 /// assert_eq!(*b, 24_u64);
273 /// # Ok::<(), Error>(())
274 /// ```
275 pub fn new_uninit(flags: Flags) -> Result<Box<MaybeUninit<T>, A>, AllocError> {
276 let layout = Layout::new::<MaybeUninit<T>>();
277 let ptr = A::alloc(layout, flags, NumaNode::NO_NODE)?;
278
279 // INVARIANT: `ptr` is either a dangling pointer or points to memory allocated with `A`,
280 // which is sufficient in size and alignment for storing a `T`.
281 Ok(Box(ptr.cast(), PhantomData))
282 }
283
284 /// Constructs a new `Pin<Box<T, A>>`. If `T` does not implement [`Unpin`], then `x` will be
285 /// pinned in memory and can't be moved.
286 #[inline]
287 pub fn pin(x: T, flags: Flags) -> Result<Pin<Box<T, A>>, AllocError>
288 where
289 A: 'static,
290 {
291 Ok(Self::new(x, flags)?.into())
292 }
293
294 /// Construct a pinned slice of elements `Pin<Box<[T], A>>`.
295 ///
296 /// This is a convenient means for creation of e.g. slices of structrures containing spinlocks
297 /// or mutexes.
298 ///
299 /// # Examples
300 ///
301 /// ```
302 /// use kernel::sync::{new_spinlock, SpinLock};
303 ///
304 /// struct Inner {
305 /// a: u32,
306 /// b: u32,
307 /// }
308 ///
309 /// #[pin_data]
310 /// struct Example {
311 /// c: u32,
312 /// #[pin]
313 /// d: SpinLock<Inner>,
314 /// }
315 ///
316 /// impl Example {
317 /// fn new() -> impl PinInit<Self, Error> {
318 /// try_pin_init!(Self {
319 /// c: 10,
320 /// d <- new_spinlock!(Inner { a: 20, b: 30 }),
321 /// })
322 /// }
323 /// }
324 ///
325 /// // Allocate a boxed slice of 10 `Example`s.
326 /// let s = KBox::pin_slice(
327 /// | _i | Example::new(),
328 /// 10,
329 /// GFP_KERNEL
330 /// )?;
331 ///
332 /// assert_eq!(s[5].c, 10);
333 /// assert_eq!(s[3].d.lock().a, 20);
334 /// # Ok::<(), Error>(())
335 /// ```
336 pub fn pin_slice<Func, Item, E>(
337 mut init: Func,
338 len: usize,
339 flags: Flags,
340 ) -> Result<Pin<Box<[T], A>>, E>
341 where
342 Func: FnMut(usize) -> Item,
343 Item: PinInit<T, E>,
344 E: From<AllocError>,
345 {
346 let mut buffer = super::Vec::<T, A>::with_capacity(len, flags)?;
347 for i in 0..len {
348 let ptr = buffer.spare_capacity_mut().as_mut_ptr().cast();
349 // SAFETY:
350 // - `ptr` is a valid pointer to uninitialized memory.
351 // - `ptr` is not used if an error is returned.
352 // - `ptr` won't be moved until it is dropped, i.e. it is pinned.
353 unsafe { init(i).__pinned_init(ptr)? };
354
355 // SAFETY:
356 // - `i + 1 <= len`, hence we don't exceed the capacity, due to the call to
357 // `with_capacity()` above.
358 // - The new value at index buffer.len() + 1 is the only element being added here, and
359 // it has been initialized above by `init(i).__pinned_init(ptr)`.
360 unsafe { buffer.inc_len(1) };
361 }
362
363 let (ptr, _, _) = buffer.into_raw_parts();
364 let slice = core::ptr::slice_from_raw_parts_mut(ptr, len);
365
366 // SAFETY: `slice` points to an allocation allocated with `A` (`buffer`) and holds a valid
367 // `[T]`.
368 Ok(Pin::from(unsafe { Box::from_raw(slice) }))
369 }
370
371 /// Convert a [`Box<T,A>`] to a [`Pin<Box<T,A>>`]. If `T` does not implement
372 /// [`Unpin`], then `x` will be pinned in memory and can't be moved.
373 pub fn into_pin(this: Self) -> Pin<Self> {
374 this.into()
375 }
376
377 /// Forgets the contents (does not run the destructor), but keeps the allocation.
378 fn forget_contents(this: Self) -> Box<MaybeUninit<T>, A> {
379 let ptr = Self::into_raw(this);
380
381 // SAFETY: `ptr` is valid, because it came from `Box::into_raw`.
382 unsafe { Box::from_raw(ptr.cast()) }
383 }
384
385 /// Drops the contents, but keeps the allocation.
386 ///
387 /// # Examples
388 ///
389 /// ```
390 /// let value = KBox::new([0; 32], GFP_KERNEL)?;
391 /// assert_eq!(*value, [0; 32]);
392 /// let value = KBox::drop_contents(value);
393 /// // Now we can re-use `value`:
394 /// let value = KBox::write(value, [1; 32]);
395 /// assert_eq!(*value, [1; 32]);
396 /// # Ok::<(), Error>(())
397 /// ```
398 pub fn drop_contents(this: Self) -> Box<MaybeUninit<T>, A> {
399 let ptr = this.0.as_ptr();
400
401 // SAFETY: `ptr` is valid, because it came from `this`. After this call we never access the
402 // value stored in `this` again.
403 unsafe { core::ptr::drop_in_place(ptr) };
404
405 Self::forget_contents(this)
406 }
407
408 /// Moves the `Box`'s value out of the `Box` and consumes the `Box`.
409 pub fn into_inner(b: Self) -> T {
410 // SAFETY: By the type invariant `&*b` is valid for `read`.
411 let value = unsafe { core::ptr::read(&*b) };
412 let _ = Self::forget_contents(b);
413 value
414 }
415}
416
417impl<T, A> From<Box<T, A>> for Pin<Box<T, A>>
418where
419 T: ?Sized,
420 A: Allocator,
421{
422 /// Converts a `Box<T, A>` into a `Pin<Box<T, A>>`. If `T` does not implement [`Unpin`], then
423 /// `*b` will be pinned in memory and can't be moved.
424 ///
425 /// This moves `b` into `Pin` without moving `*b` or allocating and copying any memory.
426 fn from(b: Box<T, A>) -> Self {
427 // SAFETY: The value wrapped inside a `Pin<Box<T, A>>` cannot be moved or replaced as long
428 // as `T` does not implement `Unpin`.
429 unsafe { Pin::new_unchecked(b) }
430 }
431}
432
433impl<T, A> InPlaceWrite<T> for Box<MaybeUninit<T>, A>
434where
435 A: Allocator + 'static,
436{
437 type Initialized = Box<T, A>;
438
439 fn write_init<E>(mut self, init: impl Init<T, E>) -> Result<Self::Initialized, E> {
440 let slot = self.as_mut_ptr();
441 // SAFETY: When init errors/panics, slot will get deallocated but not dropped,
442 // slot is valid.
443 unsafe { init.__init(slot)? };
444 // SAFETY: All fields have been initialized.
445 Ok(unsafe { Box::assume_init(self) })
446 }
447
448 fn write_pin_init<E>(mut self, init: impl PinInit<T, E>) -> Result<Pin<Self::Initialized>, E> {
449 let slot = self.as_mut_ptr();
450 // SAFETY: When init errors/panics, slot will get deallocated but not dropped,
451 // slot is valid and will not be moved, because we pin it later.
452 unsafe { init.__pinned_init(slot)? };
453 // SAFETY: All fields have been initialized.
454 Ok(unsafe { Box::assume_init(self) }.into())
455 }
456}
457
458impl<T, A> InPlaceInit<T> for Box<T, A>
459where
460 A: Allocator + 'static,
461{
462 type PinnedSelf = Pin<Self>;
463
464 #[inline]
465 fn try_pin_init<E>(init: impl PinInit<T, E>, flags: Flags) -> Result<Pin<Self>, E>
466 where
467 E: From<AllocError>,
468 {
469 Box::<_, A>::new_uninit(flags)?.write_pin_init(init)
470 }
471
472 #[inline]
473 fn try_init<E>(init: impl Init<T, E>, flags: Flags) -> Result<Self, E>
474 where
475 E: From<AllocError>,
476 {
477 Box::<_, A>::new_uninit(flags)?.write_init(init)
478 }
479}
480
481// SAFETY: The pointer returned by `into_foreign` comes from a well aligned
482// pointer to `T` allocated by `A`.
483unsafe impl<T: 'static, A> ForeignOwnable for Box<T, A>
484where
485 A: Allocator,
486{
487 const FOREIGN_ALIGN: usize = if core::mem::align_of::<T>() < A::MIN_ALIGN {
488 A::MIN_ALIGN
489 } else {
490 core::mem::align_of::<T>()
491 };
492
493 type Borrowed<'a> = &'a T;
494 type BorrowedMut<'a> = &'a mut T;
495
496 fn into_foreign(self) -> *mut c_void {
497 Box::into_raw(self).cast()
498 }
499
500 unsafe fn from_foreign(ptr: *mut c_void) -> Self {
501 // SAFETY: The safety requirements of this function ensure that `ptr` comes from a previous
502 // call to `Self::into_foreign`.
503 unsafe { Box::from_raw(ptr.cast()) }
504 }
505
506 unsafe fn borrow<'a>(ptr: *mut c_void) -> &'a T {
507 // SAFETY: The safety requirements of this method ensure that the object remains alive and
508 // immutable for the duration of 'a.
509 unsafe { &*ptr.cast() }
510 }
511
512 unsafe fn borrow_mut<'a>(ptr: *mut c_void) -> &'a mut T {
513 let ptr = ptr.cast();
514 // SAFETY: The safety requirements of this method ensure that the pointer is valid and that
515 // nothing else will access the value for the duration of 'a.
516 unsafe { &mut *ptr }
517 }
518}
519
520// SAFETY: The pointer returned by `into_foreign` comes from a well aligned
521// pointer to `T` allocated by `A`.
522unsafe impl<T: 'static, A> ForeignOwnable for Pin<Box<T, A>>
523where
524 A: Allocator,
525{
526 const FOREIGN_ALIGN: usize = <Box<T, A> as ForeignOwnable>::FOREIGN_ALIGN;
527 type Borrowed<'a> = Pin<&'a T>;
528 type BorrowedMut<'a> = Pin<&'a mut T>;
529
530 fn into_foreign(self) -> *mut c_void {
531 // SAFETY: We are still treating the box as pinned.
532 Box::into_raw(unsafe { Pin::into_inner_unchecked(self) }).cast()
533 }
534
535 unsafe fn from_foreign(ptr: *mut c_void) -> Self {
536 // SAFETY: The safety requirements of this function ensure that `ptr` comes from a previous
537 // call to `Self::into_foreign`.
538 unsafe { Pin::new_unchecked(Box::from_raw(ptr.cast())) }
539 }
540
541 unsafe fn borrow<'a>(ptr: *mut c_void) -> Pin<&'a T> {
542 // SAFETY: The safety requirements for this function ensure that the object is still alive,
543 // so it is safe to dereference the raw pointer.
544 // The safety requirements of `from_foreign` also ensure that the object remains alive for
545 // the lifetime of the returned value.
546 let r = unsafe { &*ptr.cast() };
547
548 // SAFETY: This pointer originates from a `Pin<Box<T>>`.
549 unsafe { Pin::new_unchecked(r) }
550 }
551
552 unsafe fn borrow_mut<'a>(ptr: *mut c_void) -> Pin<&'a mut T> {
553 let ptr = ptr.cast();
554 // SAFETY: The safety requirements for this function ensure that the object is still alive,
555 // so it is safe to dereference the raw pointer.
556 // The safety requirements of `from_foreign` also ensure that the object remains alive for
557 // the lifetime of the returned value.
558 let r = unsafe { &mut *ptr };
559
560 // SAFETY: This pointer originates from a `Pin<Box<T>>`.
561 unsafe { Pin::new_unchecked(r) }
562 }
563}
564
565impl<T, A> Deref for Box<T, A>
566where
567 T: ?Sized,
568 A: Allocator,
569{
570 type Target = T;
571
572 fn deref(&self) -> &T {
573 // SAFETY: `self.0` is always properly aligned, dereferenceable and points to an initialized
574 // instance of `T`.
575 unsafe { self.0.as_ref() }
576 }
577}
578
579impl<T, A> DerefMut for Box<T, A>
580where
581 T: ?Sized,
582 A: Allocator,
583{
584 fn deref_mut(&mut self) -> &mut T {
585 // SAFETY: `self.0` is always properly aligned, dereferenceable and points to an initialized
586 // instance of `T`.
587 unsafe { self.0.as_mut() }
588 }
589}
590
591/// # Examples
592///
593/// ```
594/// # use core::borrow::Borrow;
595/// # use kernel::alloc::KBox;
596/// struct Foo<B: Borrow<u32>>(B);
597///
598/// // Owned instance.
599/// let owned = Foo(1);
600///
601/// // Owned instance using `KBox`.
602/// let owned_kbox = Foo(KBox::new(1, GFP_KERNEL)?);
603///
604/// let i = 1;
605/// // Borrowed from `i`.
606/// let borrowed = Foo(&i);
607/// # Ok::<(), Error>(())
608/// ```
609impl<T, A> Borrow<T> for Box<T, A>
610where
611 T: ?Sized,
612 A: Allocator,
613{
614 fn borrow(&self) -> &T {
615 self.deref()
616 }
617}
618
619/// # Examples
620///
621/// ```
622/// # use core::borrow::BorrowMut;
623/// # use kernel::alloc::KBox;
624/// struct Foo<B: BorrowMut<u32>>(B);
625///
626/// // Owned instance.
627/// let owned = Foo(1);
628///
629/// // Owned instance using `KBox`.
630/// let owned_kbox = Foo(KBox::new(1, GFP_KERNEL)?);
631///
632/// let mut i = 1;
633/// // Borrowed from `i`.
634/// let borrowed = Foo(&mut i);
635/// # Ok::<(), Error>(())
636/// ```
637impl<T, A> BorrowMut<T> for Box<T, A>
638where
639 T: ?Sized,
640 A: Allocator,
641{
642 fn borrow_mut(&mut self) -> &mut T {
643 self.deref_mut()
644 }
645}
646
647impl<T, A> fmt::Display for Box<T, A>
648where
649 T: ?Sized + fmt::Display,
650 A: Allocator,
651{
652 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
653 <T as fmt::Display>::fmt(&**self, f)
654 }
655}
656
657impl<T, A> fmt::Debug for Box<T, A>
658where
659 T: ?Sized + fmt::Debug,
660 A: Allocator,
661{
662 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
663 <T as fmt::Debug>::fmt(&**self, f)
664 }
665}
666
667impl<T, A> Drop for Box<T, A>
668where
669 T: ?Sized,
670 A: Allocator,
671{
672 fn drop(&mut self) {
673 let layout = Layout::for_value::<T>(self);
674
675 // SAFETY: The pointer in `self.0` is guaranteed to be valid by the type invariant.
676 unsafe { core::ptr::drop_in_place::<T>(self.deref_mut()) };
677
678 // SAFETY:
679 // - `self.0` was previously allocated with `A`.
680 // - `layout` is equal to the `Layout´ `self.0` was allocated with.
681 unsafe { A::free(self.0.cast(), layout) };
682 }
683}
684
685/// # Examples
686///
687/// ```
688/// # use kernel::prelude::*;
689/// use kernel::alloc::allocator::VmallocPageIter;
690/// use kernel::page::{AsPageIter, PAGE_SIZE};
691///
692/// let mut vbox = VBox::new((), GFP_KERNEL)?;
693///
694/// assert!(vbox.page_iter().next().is_none());
695///
696/// let mut vbox = VBox::<[u8; PAGE_SIZE]>::new_uninit(GFP_KERNEL)?;
697///
698/// let page = vbox.page_iter().next().expect("At least one page should be available.\n");
699///
700/// // SAFETY: There is no concurrent read or write to the same page.
701/// unsafe { page.fill_zero_raw(0, PAGE_SIZE)? };
702/// # Ok::<(), Error>(())
703/// ```
704impl<T> AsPageIter for VBox<T> {
705 type Iter<'a>
706 = VmallocPageIter<'a>
707 where
708 T: 'a;
709
710 fn page_iter(&mut self) -> Self::Iter<'_> {
711 let ptr = self.0.cast();
712 let size = core::mem::size_of::<T>();
713
714 // SAFETY:
715 // - `ptr` is a valid pointer to the beginning of a `Vmalloc` allocation.
716 // - `ptr` is guaranteed to be valid for the lifetime of `'a`.
717 // - `size` is the size of the `Vmalloc` allocation `ptr` points to.
718 unsafe { VmallocPageIter::new(ptr, size) }
719 }
720}