core\ptr/non_null.rs
1use crate::cmp::Ordering;
2use crate::marker::Unsize;
3use crate::mem::{MaybeUninit, SizedTypeProperties};
4use crate::num::NonZero;
5use crate::ops::{CoerceUnsized, DispatchFromDyn};
6use crate::pin::PinCoerceUnsized;
7use crate::ptr::Unique;
8use crate::slice::{self, SliceIndex};
9use crate::ub_checks::assert_unsafe_precondition;
10use crate::{fmt, hash, intrinsics, mem, ptr};
11
12/// `*mut T` but non-zero and [covariant].
13///
14/// This is often the correct thing to use when building data structures using
15/// raw pointers, but is ultimately more dangerous to use because of its additional
16/// properties. If you're not sure if you should use `NonNull<T>`, just use `*mut T`!
17///
18/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
19/// is never dereferenced. This is so that enums may use this forbidden value
20/// as a discriminant -- `Option<NonNull<T>>` has the same size as `*mut T`.
21/// However the pointer may still dangle if it isn't dereferenced.
22///
23/// Unlike `*mut T`, `NonNull<T>` was chosen to be covariant over `T`. This makes it
24/// possible to use `NonNull<T>` when building covariant types, but introduces the
25/// risk of unsoundness if used in a type that shouldn't actually be covariant.
26/// (The opposite choice was made for `*mut T` even though technically the unsoundness
27/// could only be caused by calling unsafe functions.)
28///
29/// Covariance is correct for most safe abstractions, such as `Box`, `Rc`, `Arc`, `Vec`,
30/// and `LinkedList`. This is the case because they provide a public API that follows the
31/// normal shared XOR mutable rules of Rust.
32///
33/// If your type cannot safely be covariant, you must ensure it contains some
34/// additional field to provide invariance. Often this field will be a [`PhantomData`]
35/// type like `PhantomData<Cell<T>>` or `PhantomData<&'a mut T>`.
36///
37/// Notice that `NonNull<T>` has a `From` instance for `&T`. However, this does
38/// not change the fact that mutating through a (pointer derived from a) shared
39/// reference is undefined behavior unless the mutation happens inside an
40/// [`UnsafeCell<T>`]. The same goes for creating a mutable reference from a shared
41/// reference. When using this `From` instance without an `UnsafeCell<T>`,
42/// it is your responsibility to ensure that `as_mut` is never called, and `as_ptr`
43/// is never used for mutation.
44///
45/// # Representation
46///
47/// Thanks to the [null pointer optimization],
48/// `NonNull<T>` and `Option<NonNull<T>>`
49/// are guaranteed to have the same size and alignment:
50///
51/// ```
52/// use std::ptr::NonNull;
53///
54/// assert_eq!(size_of::<NonNull<i16>>(), size_of::<Option<NonNull<i16>>>());
55/// assert_eq!(align_of::<NonNull<i16>>(), align_of::<Option<NonNull<i16>>>());
56///
57/// assert_eq!(size_of::<NonNull<str>>(), size_of::<Option<NonNull<str>>>());
58/// assert_eq!(align_of::<NonNull<str>>(), align_of::<Option<NonNull<str>>>());
59/// ```
60///
61/// [covariant]: https://doc.rust-lang.org/reference/subtyping.html
62/// [`PhantomData`]: crate::marker::PhantomData
63/// [`UnsafeCell<T>`]: crate::cell::UnsafeCell
64/// [null pointer optimization]: crate::option#representation
65#[stable(feature = "nonnull", since = "1.25.0")]
66#[repr(transparent)]
67#[rustc_layout_scalar_valid_range_start(1)]
68#[rustc_nonnull_optimization_guaranteed]
69#[rustc_diagnostic_item = "NonNull"]
70pub struct NonNull<T: ?Sized> {
71 // Remember to use `.as_ptr()` instead of `.pointer`, as field projecting to
72 // this is banned by <https://github.com/rust-lang/compiler-team/issues/807>.
73 pointer: *const T,
74}
75
76/// `NonNull` pointers are not `Send` because the data they reference may be aliased.
77// N.B., this impl is unnecessary, but should provide better error messages.
78#[stable(feature = "nonnull", since = "1.25.0")]
79impl<T: ?Sized> !Send for NonNull<T> {}
80
81/// `NonNull` pointers are not `Sync` because the data they reference may be aliased.
82// N.B., this impl is unnecessary, but should provide better error messages.
83#[stable(feature = "nonnull", since = "1.25.0")]
84impl<T: ?Sized> !Sync for NonNull<T> {}
85
86impl<T: Sized> NonNull<T> {
87 /// Creates a pointer with the given address and no [provenance][crate::ptr#provenance].
88 ///
89 /// For more details, see the equivalent method on a raw pointer, [`ptr::without_provenance_mut`].
90 ///
91 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
92 #[unstable(feature = "nonnull_provenance", issue = "135243")]
93 #[must_use]
94 #[inline]
95 pub const fn without_provenance(addr: NonZero<usize>) -> Self {
96 let pointer = crate::ptr::without_provenance(addr.get());
97 // SAFETY: we know `addr` is non-zero.
98 unsafe { NonNull { pointer } }
99 }
100
101 /// Creates a new `NonNull` that is dangling, but well-aligned.
102 ///
103 /// This is useful for initializing types which lazily allocate, like
104 /// `Vec::new` does.
105 ///
106 /// Note that the pointer value may potentially represent a valid pointer to
107 /// a `T`, which means this must not be used as a "not yet initialized"
108 /// sentinel value. Types that lazily allocate must track initialization by
109 /// some other means.
110 ///
111 /// # Examples
112 ///
113 /// ```
114 /// use std::ptr::NonNull;
115 ///
116 /// let ptr = NonNull::<u32>::dangling();
117 /// // Important: don't try to access the value of `ptr` without
118 /// // initializing it first! The pointer is not null but isn't valid either!
119 /// ```
120 #[stable(feature = "nonnull", since = "1.25.0")]
121 #[rustc_const_stable(feature = "const_nonnull_dangling", since = "1.36.0")]
122 #[must_use]
123 #[inline]
124 pub const fn dangling() -> Self {
125 let align = crate::ptr::Alignment::of::<T>();
126 NonNull::without_provenance(align.as_nonzero())
127 }
128
129 /// Converts an address back to a mutable pointer, picking up some previously 'exposed'
130 /// [provenance][crate::ptr#provenance].
131 ///
132 /// For more details, see the equivalent method on a raw pointer, [`ptr::with_exposed_provenance_mut`].
133 ///
134 /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
135 #[unstable(feature = "nonnull_provenance", issue = "135243")]
136 #[inline]
137 pub fn with_exposed_provenance(addr: NonZero<usize>) -> Self {
138 // SAFETY: we know `addr` is non-zero.
139 unsafe {
140 let ptr = crate::ptr::with_exposed_provenance_mut(addr.get());
141 NonNull::new_unchecked(ptr)
142 }
143 }
144
145 /// Returns a shared references to the value. In contrast to [`as_ref`], this does not require
146 /// that the value has to be initialized.
147 ///
148 /// For the mutable counterpart see [`as_uninit_mut`].
149 ///
150 /// [`as_ref`]: NonNull::as_ref
151 /// [`as_uninit_mut`]: NonNull::as_uninit_mut
152 ///
153 /// # Safety
154 ///
155 /// When calling this method, you have to ensure that
156 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
157 /// Note that because the created reference is to `MaybeUninit<T>`, the
158 /// source pointer can point to uninitialized memory.
159 #[inline]
160 #[must_use]
161 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
162 pub const unsafe fn as_uninit_ref<'a>(self) -> &'a MaybeUninit<T> {
163 // SAFETY: the caller must guarantee that `self` meets all the
164 // requirements for a reference.
165 unsafe { &*self.cast().as_ptr() }
166 }
167
168 /// Returns a unique references to the value. In contrast to [`as_mut`], this does not require
169 /// that the value has to be initialized.
170 ///
171 /// For the shared counterpart see [`as_uninit_ref`].
172 ///
173 /// [`as_mut`]: NonNull::as_mut
174 /// [`as_uninit_ref`]: NonNull::as_uninit_ref
175 ///
176 /// # Safety
177 ///
178 /// When calling this method, you have to ensure that
179 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
180 /// Note that because the created reference is to `MaybeUninit<T>`, the
181 /// source pointer can point to uninitialized memory.
182 #[inline]
183 #[must_use]
184 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
185 pub const unsafe fn as_uninit_mut<'a>(self) -> &'a mut MaybeUninit<T> {
186 // SAFETY: the caller must guarantee that `self` meets all the
187 // requirements for a reference.
188 unsafe { &mut *self.cast().as_ptr() }
189 }
190}
191
192impl<T: ?Sized> NonNull<T> {
193 /// Creates a new `NonNull`.
194 ///
195 /// # Safety
196 ///
197 /// `ptr` must be non-null.
198 ///
199 /// # Examples
200 ///
201 /// ```
202 /// use std::ptr::NonNull;
203 ///
204 /// let mut x = 0u32;
205 /// let ptr = unsafe { NonNull::new_unchecked(&mut x as *mut _) };
206 /// ```
207 ///
208 /// *Incorrect* usage of this function:
209 ///
210 /// ```rust,no_run
211 /// use std::ptr::NonNull;
212 ///
213 /// // NEVER DO THAT!!! This is undefined behavior. ⚠️
214 /// let ptr = unsafe { NonNull::<u32>::new_unchecked(std::ptr::null_mut()) };
215 /// ```
216 #[stable(feature = "nonnull", since = "1.25.0")]
217 #[rustc_const_stable(feature = "const_nonnull_new_unchecked", since = "1.25.0")]
218 #[inline]
219 pub const unsafe fn new_unchecked(ptr: *mut T) -> Self {
220 // SAFETY: the caller must guarantee that `ptr` is non-null.
221 unsafe {
222 assert_unsafe_precondition!(
223 check_language_ub,
224 "NonNull::new_unchecked requires that the pointer is non-null",
225 (ptr: *mut () = ptr as *mut ()) => !ptr.is_null()
226 );
227 NonNull { pointer: ptr as _ }
228 }
229 }
230
231 /// Creates a new `NonNull` if `ptr` is non-null.
232 ///
233 /// # Panics during const evaluation
234 ///
235 /// This method will panic during const evaluation if the pointer cannot be
236 /// determined to be null or not. See [`is_null`] for more information.
237 ///
238 /// [`is_null`]: ../primitive.pointer.html#method.is_null-1
239 ///
240 /// # Examples
241 ///
242 /// ```
243 /// use std::ptr::NonNull;
244 ///
245 /// let mut x = 0u32;
246 /// let ptr = NonNull::<u32>::new(&mut x as *mut _).expect("ptr is null!");
247 ///
248 /// if let Some(ptr) = NonNull::<u32>::new(std::ptr::null_mut()) {
249 /// unreachable!();
250 /// }
251 /// ```
252 #[stable(feature = "nonnull", since = "1.25.0")]
253 #[rustc_const_stable(feature = "const_nonnull_new", since = "1.85.0")]
254 #[inline]
255 pub const fn new(ptr: *mut T) -> Option<Self> {
256 if !ptr.is_null() {
257 // SAFETY: The pointer is already checked and is not null
258 Some(unsafe { Self::new_unchecked(ptr) })
259 } else {
260 None
261 }
262 }
263
264 /// Converts a reference to a `NonNull` pointer.
265 #[stable(feature = "non_null_from_ref", since = "CURRENT_RUSTC_VERSION")]
266 #[rustc_const_stable(feature = "non_null_from_ref", since = "CURRENT_RUSTC_VERSION")]
267 #[inline]
268 pub const fn from_ref(r: &T) -> Self {
269 // SAFETY: A reference cannot be null.
270 unsafe { NonNull { pointer: r as *const T } }
271 }
272
273 /// Converts a mutable reference to a `NonNull` pointer.
274 #[stable(feature = "non_null_from_ref", since = "CURRENT_RUSTC_VERSION")]
275 #[rustc_const_stable(feature = "non_null_from_ref", since = "CURRENT_RUSTC_VERSION")]
276 #[inline]
277 pub const fn from_mut(r: &mut T) -> Self {
278 // SAFETY: A mutable reference cannot be null.
279 unsafe { NonNull { pointer: r as *mut T } }
280 }
281
282 /// Performs the same functionality as [`std::ptr::from_raw_parts`], except that a
283 /// `NonNull` pointer is returned, as opposed to a raw `*const` pointer.
284 ///
285 /// See the documentation of [`std::ptr::from_raw_parts`] for more details.
286 ///
287 /// [`std::ptr::from_raw_parts`]: crate::ptr::from_raw_parts
288 #[unstable(feature = "ptr_metadata", issue = "81513")]
289 #[inline]
290 pub const fn from_raw_parts(
291 data_pointer: NonNull<impl super::Thin>,
292 metadata: <T as super::Pointee>::Metadata,
293 ) -> NonNull<T> {
294 // SAFETY: The result of `ptr::from::raw_parts_mut` is non-null because `data_pointer` is.
295 unsafe {
296 NonNull::new_unchecked(super::from_raw_parts_mut(data_pointer.as_ptr(), metadata))
297 }
298 }
299
300 /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
301 ///
302 /// The pointer can be later reconstructed with [`NonNull::from_raw_parts`].
303 #[unstable(feature = "ptr_metadata", issue = "81513")]
304 #[must_use = "this returns the result of the operation, \
305 without modifying the original"]
306 #[inline]
307 pub const fn to_raw_parts(self) -> (NonNull<()>, <T as super::Pointee>::Metadata) {
308 (self.cast(), super::metadata(self.as_ptr()))
309 }
310
311 /// Gets the "address" portion of the pointer.
312 ///
313 /// For more details, see the equivalent method on a raw pointer, [`pointer::addr`].
314 ///
315 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
316 #[must_use]
317 #[inline]
318 #[stable(feature = "strict_provenance", since = "1.84.0")]
319 pub fn addr(self) -> NonZero<usize> {
320 // SAFETY: The pointer is guaranteed by the type to be non-null,
321 // meaning that the address will be non-zero.
322 unsafe { NonZero::new_unchecked(self.as_ptr().addr()) }
323 }
324
325 /// Exposes the ["provenance"][crate::ptr#provenance] part of the pointer for future use in
326 /// [`with_exposed_provenance`][NonNull::with_exposed_provenance] and returns the "address" portion.
327 ///
328 /// For more details, see the equivalent method on a raw pointer, [`pointer::expose_provenance`].
329 ///
330 /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
331 #[unstable(feature = "nonnull_provenance", issue = "135243")]
332 pub fn expose_provenance(self) -> NonZero<usize> {
333 // SAFETY: The pointer is guaranteed by the type to be non-null,
334 // meaning that the address will be non-zero.
335 unsafe { NonZero::new_unchecked(self.as_ptr().expose_provenance()) }
336 }
337
338 /// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
339 /// `self`.
340 ///
341 /// For more details, see the equivalent method on a raw pointer, [`pointer::with_addr`].
342 ///
343 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
344 #[must_use]
345 #[inline]
346 #[stable(feature = "strict_provenance", since = "1.84.0")]
347 pub fn with_addr(self, addr: NonZero<usize>) -> Self {
348 // SAFETY: The result of `ptr::from::with_addr` is non-null because `addr` is guaranteed to be non-zero.
349 unsafe { NonNull::new_unchecked(self.as_ptr().with_addr(addr.get()) as *mut _) }
350 }
351
352 /// Creates a new pointer by mapping `self`'s address to a new one, preserving the
353 /// [provenance][crate::ptr#provenance] of `self`.
354 ///
355 /// For more details, see the equivalent method on a raw pointer, [`pointer::map_addr`].
356 ///
357 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
358 #[must_use]
359 #[inline]
360 #[stable(feature = "strict_provenance", since = "1.84.0")]
361 pub fn map_addr(self, f: impl FnOnce(NonZero<usize>) -> NonZero<usize>) -> Self {
362 self.with_addr(f(self.addr()))
363 }
364
365 /// Acquires the underlying `*mut` pointer.
366 ///
367 /// # Examples
368 ///
369 /// ```
370 /// use std::ptr::NonNull;
371 ///
372 /// let mut x = 0u32;
373 /// let ptr = NonNull::new(&mut x).expect("ptr is null!");
374 ///
375 /// let x_value = unsafe { *ptr.as_ptr() };
376 /// assert_eq!(x_value, 0);
377 ///
378 /// unsafe { *ptr.as_ptr() += 2; }
379 /// let x_value = unsafe { *ptr.as_ptr() };
380 /// assert_eq!(x_value, 2);
381 /// ```
382 #[stable(feature = "nonnull", since = "1.25.0")]
383 #[rustc_const_stable(feature = "const_nonnull_as_ptr", since = "1.32.0")]
384 #[rustc_never_returns_null_ptr]
385 #[must_use]
386 #[inline(always)]
387 pub const fn as_ptr(self) -> *mut T {
388 // This is a transmute for the same reasons as `NonZero::get`.
389
390 // SAFETY: `NonNull` is `transparent` over a `*const T`, and `*const T`
391 // and `*mut T` have the same layout, so transitively we can transmute
392 // our `NonNull` to a `*mut T` directly.
393 unsafe { mem::transmute::<Self, *mut T>(self) }
394 }
395
396 /// Returns a shared reference to the value. If the value may be uninitialized, [`as_uninit_ref`]
397 /// must be used instead.
398 ///
399 /// For the mutable counterpart see [`as_mut`].
400 ///
401 /// [`as_uninit_ref`]: NonNull::as_uninit_ref
402 /// [`as_mut`]: NonNull::as_mut
403 ///
404 /// # Safety
405 ///
406 /// When calling this method, you have to ensure that
407 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
408 ///
409 /// # Examples
410 ///
411 /// ```
412 /// use std::ptr::NonNull;
413 ///
414 /// let mut x = 0u32;
415 /// let ptr = NonNull::new(&mut x as *mut _).expect("ptr is null!");
416 ///
417 /// let ref_x = unsafe { ptr.as_ref() };
418 /// println!("{ref_x}");
419 /// ```
420 ///
421 /// [the module documentation]: crate::ptr#safety
422 #[stable(feature = "nonnull", since = "1.25.0")]
423 #[rustc_const_stable(feature = "const_nonnull_as_ref", since = "1.73.0")]
424 #[must_use]
425 #[inline(always)]
426 pub const unsafe fn as_ref<'a>(&self) -> &'a T {
427 // SAFETY: the caller must guarantee that `self` meets all the
428 // requirements for a reference.
429 // `cast_const` avoids a mutable raw pointer deref.
430 unsafe { &*self.as_ptr().cast_const() }
431 }
432
433 /// Returns a unique reference to the value. If the value may be uninitialized, [`as_uninit_mut`]
434 /// must be used instead.
435 ///
436 /// For the shared counterpart see [`as_ref`].
437 ///
438 /// [`as_uninit_mut`]: NonNull::as_uninit_mut
439 /// [`as_ref`]: NonNull::as_ref
440 ///
441 /// # Safety
442 ///
443 /// When calling this method, you have to ensure that
444 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
445 /// # Examples
446 ///
447 /// ```
448 /// use std::ptr::NonNull;
449 ///
450 /// let mut x = 0u32;
451 /// let mut ptr = NonNull::new(&mut x).expect("null pointer");
452 ///
453 /// let x_ref = unsafe { ptr.as_mut() };
454 /// assert_eq!(*x_ref, 0);
455 /// *x_ref += 2;
456 /// assert_eq!(*x_ref, 2);
457 /// ```
458 ///
459 /// [the module documentation]: crate::ptr#safety
460 #[stable(feature = "nonnull", since = "1.25.0")]
461 #[rustc_const_stable(feature = "const_ptr_as_ref", since = "1.83.0")]
462 #[must_use]
463 #[inline(always)]
464 pub const unsafe fn as_mut<'a>(&mut self) -> &'a mut T {
465 // SAFETY: the caller must guarantee that `self` meets all the
466 // requirements for a mutable reference.
467 unsafe { &mut *self.as_ptr() }
468 }
469
470 /// Casts to a pointer of another type.
471 ///
472 /// # Examples
473 ///
474 /// ```
475 /// use std::ptr::NonNull;
476 ///
477 /// let mut x = 0u32;
478 /// let ptr = NonNull::new(&mut x as *mut _).expect("null pointer");
479 ///
480 /// let casted_ptr = ptr.cast::<i8>();
481 /// let raw_ptr: *mut i8 = casted_ptr.as_ptr();
482 /// ```
483 #[stable(feature = "nonnull_cast", since = "1.27.0")]
484 #[rustc_const_stable(feature = "const_nonnull_cast", since = "1.36.0")]
485 #[must_use = "this returns the result of the operation, \
486 without modifying the original"]
487 #[inline]
488 pub const fn cast<U>(self) -> NonNull<U> {
489 // SAFETY: `self` is a `NonNull` pointer which is necessarily non-null
490 unsafe { NonNull { pointer: self.as_ptr() as *mut U } }
491 }
492
493 /// Adds an offset to a pointer.
494 ///
495 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
496 /// offset of `3 * size_of::<T>()` bytes.
497 ///
498 /// # Safety
499 ///
500 /// If any of the following conditions are violated, the result is Undefined Behavior:
501 ///
502 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
503 ///
504 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
505 /// [allocated object], and the entire memory range between `self` and the result must be in
506 /// bounds of that allocated object. In particular, this range must not "wrap around" the edge
507 /// of the address space.
508 ///
509 /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
510 /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
511 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
512 /// safe.
513 ///
514 /// [allocated object]: crate::ptr#allocated-object
515 ///
516 /// # Examples
517 ///
518 /// ```
519 /// use std::ptr::NonNull;
520 ///
521 /// let mut s = [1, 2, 3];
522 /// let ptr: NonNull<u32> = NonNull::new(s.as_mut_ptr()).unwrap();
523 ///
524 /// unsafe {
525 /// println!("{}", ptr.offset(1).read());
526 /// println!("{}", ptr.offset(2).read());
527 /// }
528 /// ```
529 #[inline(always)]
530 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
531 #[must_use = "returns a new pointer rather than modifying its argument"]
532 #[stable(feature = "non_null_convenience", since = "1.80.0")]
533 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
534 pub const unsafe fn offset(self, count: isize) -> Self
535 where
536 T: Sized,
537 {
538 // SAFETY: the caller must uphold the safety contract for `offset`.
539 // Additionally safety contract of `offset` guarantees that the resulting pointer is
540 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
541 // construct `NonNull`.
542 unsafe { NonNull { pointer: intrinsics::offset(self.as_ptr(), count) } }
543 }
544
545 /// Calculates the offset from a pointer in bytes.
546 ///
547 /// `count` is in units of **bytes**.
548 ///
549 /// This is purely a convenience for casting to a `u8` pointer and
550 /// using [offset][pointer::offset] on it. See that method for documentation
551 /// and safety requirements.
552 ///
553 /// For non-`Sized` pointees this operation changes only the data pointer,
554 /// leaving the metadata untouched.
555 #[must_use]
556 #[inline(always)]
557 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
558 #[stable(feature = "non_null_convenience", since = "1.80.0")]
559 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
560 pub const unsafe fn byte_offset(self, count: isize) -> Self {
561 // SAFETY: the caller must uphold the safety contract for `offset` and `byte_offset` has
562 // the same safety contract.
563 // Additionally safety contract of `offset` guarantees that the resulting pointer is
564 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
565 // construct `NonNull`.
566 unsafe { NonNull { pointer: self.as_ptr().byte_offset(count) } }
567 }
568
569 /// Adds an offset to a pointer (convenience for `.offset(count as isize)`).
570 ///
571 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
572 /// offset of `3 * size_of::<T>()` bytes.
573 ///
574 /// # Safety
575 ///
576 /// If any of the following conditions are violated, the result is Undefined Behavior:
577 ///
578 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
579 ///
580 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
581 /// [allocated object], and the entire memory range between `self` and the result must be in
582 /// bounds of that allocated object. In particular, this range must not "wrap around" the edge
583 /// of the address space.
584 ///
585 /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
586 /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
587 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
588 /// safe.
589 ///
590 /// [allocated object]: crate::ptr#allocated-object
591 ///
592 /// # Examples
593 ///
594 /// ```
595 /// use std::ptr::NonNull;
596 ///
597 /// let s: &str = "123";
598 /// let ptr: NonNull<u8> = NonNull::new(s.as_ptr().cast_mut()).unwrap();
599 ///
600 /// unsafe {
601 /// println!("{}", ptr.add(1).read() as char);
602 /// println!("{}", ptr.add(2).read() as char);
603 /// }
604 /// ```
605 #[inline(always)]
606 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
607 #[must_use = "returns a new pointer rather than modifying its argument"]
608 #[stable(feature = "non_null_convenience", since = "1.80.0")]
609 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
610 pub const unsafe fn add(self, count: usize) -> Self
611 where
612 T: Sized,
613 {
614 // SAFETY: the caller must uphold the safety contract for `offset`.
615 // Additionally safety contract of `offset` guarantees that the resulting pointer is
616 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
617 // construct `NonNull`.
618 unsafe { NonNull { pointer: intrinsics::offset(self.as_ptr(), count) } }
619 }
620
621 /// Calculates the offset from a pointer in bytes (convenience for `.byte_offset(count as isize)`).
622 ///
623 /// `count` is in units of bytes.
624 ///
625 /// This is purely a convenience for casting to a `u8` pointer and
626 /// using [`add`][NonNull::add] on it. See that method for documentation
627 /// and safety requirements.
628 ///
629 /// For non-`Sized` pointees this operation changes only the data pointer,
630 /// leaving the metadata untouched.
631 #[must_use]
632 #[inline(always)]
633 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
634 #[stable(feature = "non_null_convenience", since = "1.80.0")]
635 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
636 pub const unsafe fn byte_add(self, count: usize) -> Self {
637 // SAFETY: the caller must uphold the safety contract for `add` and `byte_add` has the same
638 // safety contract.
639 // Additionally safety contract of `add` guarantees that the resulting pointer is pointing
640 // to an allocation, there can't be an allocation at null, thus it's safe to construct
641 // `NonNull`.
642 unsafe { NonNull { pointer: self.as_ptr().byte_add(count) } }
643 }
644
645 /// Subtracts an offset from a pointer (convenience for
646 /// `.offset((count as isize).wrapping_neg())`).
647 ///
648 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
649 /// offset of `3 * size_of::<T>()` bytes.
650 ///
651 /// # Safety
652 ///
653 /// If any of the following conditions are violated, the result is Undefined Behavior:
654 ///
655 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
656 ///
657 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
658 /// [allocated object], and the entire memory range between `self` and the result must be in
659 /// bounds of that allocated object. In particular, this range must not "wrap around" the edge
660 /// of the address space.
661 ///
662 /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
663 /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
664 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
665 /// safe.
666 ///
667 /// [allocated object]: crate::ptr#allocated-object
668 ///
669 /// # Examples
670 ///
671 /// ```
672 /// use std::ptr::NonNull;
673 ///
674 /// let s: &str = "123";
675 ///
676 /// unsafe {
677 /// let end: NonNull<u8> = NonNull::new(s.as_ptr().cast_mut()).unwrap().add(3);
678 /// println!("{}", end.sub(1).read() as char);
679 /// println!("{}", end.sub(2).read() as char);
680 /// }
681 /// ```
682 #[inline(always)]
683 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
684 #[must_use = "returns a new pointer rather than modifying its argument"]
685 #[stable(feature = "non_null_convenience", since = "1.80.0")]
686 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
687 pub const unsafe fn sub(self, count: usize) -> Self
688 where
689 T: Sized,
690 {
691 if T::IS_ZST {
692 // Pointer arithmetic does nothing when the pointee is a ZST.
693 self
694 } else {
695 // SAFETY: the caller must uphold the safety contract for `offset`.
696 // Because the pointee is *not* a ZST, that means that `count` is
697 // at most `isize::MAX`, and thus the negation cannot overflow.
698 unsafe { self.offset((count as isize).unchecked_neg()) }
699 }
700 }
701
702 /// Calculates the offset from a pointer in bytes (convenience for
703 /// `.byte_offset((count as isize).wrapping_neg())`).
704 ///
705 /// `count` is in units of bytes.
706 ///
707 /// This is purely a convenience for casting to a `u8` pointer and
708 /// using [`sub`][NonNull::sub] on it. See that method for documentation
709 /// and safety requirements.
710 ///
711 /// For non-`Sized` pointees this operation changes only the data pointer,
712 /// leaving the metadata untouched.
713 #[must_use]
714 #[inline(always)]
715 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
716 #[stable(feature = "non_null_convenience", since = "1.80.0")]
717 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
718 pub const unsafe fn byte_sub(self, count: usize) -> Self {
719 // SAFETY: the caller must uphold the safety contract for `sub` and `byte_sub` has the same
720 // safety contract.
721 // Additionally safety contract of `sub` guarantees that the resulting pointer is pointing
722 // to an allocation, there can't be an allocation at null, thus it's safe to construct
723 // `NonNull`.
724 unsafe { NonNull { pointer: self.as_ptr().byte_sub(count) } }
725 }
726
727 /// Calculates the distance between two pointers within the same allocation. The returned value is in
728 /// units of T: the distance in bytes divided by `size_of::<T>()`.
729 ///
730 /// This is equivalent to `(self as isize - origin as isize) / (size_of::<T>() as isize)`,
731 /// except that it has a lot more opportunities for UB, in exchange for the compiler
732 /// better understanding what you are doing.
733 ///
734 /// The primary motivation of this method is for computing the `len` of an array/slice
735 /// of `T` that you are currently representing as a "start" and "end" pointer
736 /// (and "end" is "one past the end" of the array).
737 /// In that case, `end.offset_from(start)` gets you the length of the array.
738 ///
739 /// All of the following safety requirements are trivially satisfied for this usecase.
740 ///
741 /// [`offset`]: #method.offset
742 ///
743 /// # Safety
744 ///
745 /// If any of the following conditions are violated, the result is Undefined Behavior:
746 ///
747 /// * `self` and `origin` must either
748 ///
749 /// * point to the same address, or
750 /// * both be *derived from* a pointer to the same [allocated object], and the memory range between
751 /// the two pointers must be in bounds of that object. (See below for an example.)
752 ///
753 /// * The distance between the pointers, in bytes, must be an exact multiple
754 /// of the size of `T`.
755 ///
756 /// As a consequence, the absolute distance between the pointers, in bytes, computed on
757 /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
758 /// implied by the in-bounds requirement, and the fact that no allocated object can be larger
759 /// than `isize::MAX` bytes.
760 ///
761 /// The requirement for pointers to be derived from the same allocated object is primarily
762 /// needed for `const`-compatibility: the distance between pointers into *different* allocated
763 /// objects is not known at compile-time. However, the requirement also exists at
764 /// runtime and may be exploited by optimizations. If you wish to compute the difference between
765 /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
766 /// origin as isize) / size_of::<T>()`.
767 // FIXME: recommend `addr()` instead of `as usize` once that is stable.
768 ///
769 /// [`add`]: #method.add
770 /// [allocated object]: crate::ptr#allocated-object
771 ///
772 /// # Panics
773 ///
774 /// This function panics if `T` is a Zero-Sized Type ("ZST").
775 ///
776 /// # Examples
777 ///
778 /// Basic usage:
779 ///
780 /// ```
781 /// use std::ptr::NonNull;
782 ///
783 /// let a = [0; 5];
784 /// let ptr1: NonNull<u32> = NonNull::from(&a[1]);
785 /// let ptr2: NonNull<u32> = NonNull::from(&a[3]);
786 /// unsafe {
787 /// assert_eq!(ptr2.offset_from(ptr1), 2);
788 /// assert_eq!(ptr1.offset_from(ptr2), -2);
789 /// assert_eq!(ptr1.offset(2), ptr2);
790 /// assert_eq!(ptr2.offset(-2), ptr1);
791 /// }
792 /// ```
793 ///
794 /// *Incorrect* usage:
795 ///
796 /// ```rust,no_run
797 /// use std::ptr::NonNull;
798 ///
799 /// let ptr1 = NonNull::new(Box::into_raw(Box::new(0u8))).unwrap();
800 /// let ptr2 = NonNull::new(Box::into_raw(Box::new(1u8))).unwrap();
801 /// let diff = (ptr2.addr().get() as isize).wrapping_sub(ptr1.addr().get() as isize);
802 /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
803 /// let diff_plus_1 = diff.wrapping_add(1);
804 /// let ptr2_other = NonNull::new(ptr1.as_ptr().wrapping_byte_offset(diff_plus_1)).unwrap();
805 /// assert_eq!(ptr2.addr(), ptr2_other.addr());
806 /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
807 /// // computing their offset is undefined behavior, even though
808 /// // they point to addresses that are in-bounds of the same object!
809 ///
810 /// let one = unsafe { ptr2_other.offset_from(ptr2) }; // Undefined Behavior! ⚠️
811 /// ```
812 #[inline]
813 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
814 #[stable(feature = "non_null_convenience", since = "1.80.0")]
815 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
816 pub const unsafe fn offset_from(self, origin: NonNull<T>) -> isize
817 where
818 T: Sized,
819 {
820 // SAFETY: the caller must uphold the safety contract for `offset_from`.
821 unsafe { self.as_ptr().offset_from(origin.as_ptr()) }
822 }
823
824 /// Calculates the distance between two pointers within the same allocation. The returned value is in
825 /// units of **bytes**.
826 ///
827 /// This is purely a convenience for casting to a `u8` pointer and
828 /// using [`offset_from`][NonNull::offset_from] on it. See that method for
829 /// documentation and safety requirements.
830 ///
831 /// For non-`Sized` pointees this operation considers only the data pointers,
832 /// ignoring the metadata.
833 #[inline(always)]
834 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
835 #[stable(feature = "non_null_convenience", since = "1.80.0")]
836 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
837 pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: NonNull<U>) -> isize {
838 // SAFETY: the caller must uphold the safety contract for `byte_offset_from`.
839 unsafe { self.as_ptr().byte_offset_from(origin.as_ptr()) }
840 }
841
842 // N.B. `wrapping_offset``, `wrapping_add`, etc are not implemented because they can wrap to null
843
844 /// Calculates the distance between two pointers within the same allocation, *where it's known that
845 /// `self` is equal to or greater than `origin`*. The returned value is in
846 /// units of T: the distance in bytes is divided by `size_of::<T>()`.
847 ///
848 /// This computes the same value that [`offset_from`](#method.offset_from)
849 /// would compute, but with the added precondition that the offset is
850 /// guaranteed to be non-negative. This method is equivalent to
851 /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
852 /// but it provides slightly more information to the optimizer, which can
853 /// sometimes allow it to optimize slightly better with some backends.
854 ///
855 /// This method can be though of as recovering the `count` that was passed
856 /// to [`add`](#method.add) (or, with the parameters in the other order,
857 /// to [`sub`](#method.sub)). The following are all equivalent, assuming
858 /// that their safety preconditions are met:
859 /// ```rust
860 /// # unsafe fn blah(ptr: std::ptr::NonNull<u32>, origin: std::ptr::NonNull<u32>, count: usize) -> bool { unsafe {
861 /// ptr.offset_from_unsigned(origin) == count
862 /// # &&
863 /// origin.add(count) == ptr
864 /// # &&
865 /// ptr.sub(count) == origin
866 /// # } }
867 /// ```
868 ///
869 /// # Safety
870 ///
871 /// - The distance between the pointers must be non-negative (`self >= origin`)
872 ///
873 /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
874 /// apply to this method as well; see it for the full details.
875 ///
876 /// Importantly, despite the return type of this method being able to represent
877 /// a larger offset, it's still *not permitted* to pass pointers which differ
878 /// by more than `isize::MAX` *bytes*. As such, the result of this method will
879 /// always be less than or equal to `isize::MAX as usize`.
880 ///
881 /// # Panics
882 ///
883 /// This function panics if `T` is a Zero-Sized Type ("ZST").
884 ///
885 /// # Examples
886 ///
887 /// ```
888 /// use std::ptr::NonNull;
889 ///
890 /// let a = [0; 5];
891 /// let ptr1: NonNull<u32> = NonNull::from(&a[1]);
892 /// let ptr2: NonNull<u32> = NonNull::from(&a[3]);
893 /// unsafe {
894 /// assert_eq!(ptr2.offset_from_unsigned(ptr1), 2);
895 /// assert_eq!(ptr1.add(2), ptr2);
896 /// assert_eq!(ptr2.sub(2), ptr1);
897 /// assert_eq!(ptr2.offset_from_unsigned(ptr2), 0);
898 /// }
899 ///
900 /// // This would be incorrect, as the pointers are not correctly ordered:
901 /// // ptr1.offset_from_unsigned(ptr2)
902 /// ```
903 #[inline]
904 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
905 #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
906 #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
907 pub const unsafe fn offset_from_unsigned(self, subtracted: NonNull<T>) -> usize
908 where
909 T: Sized,
910 {
911 // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`.
912 unsafe { self.as_ptr().offset_from_unsigned(subtracted.as_ptr()) }
913 }
914
915 /// Calculates the distance between two pointers within the same allocation, *where it's known that
916 /// `self` is equal to or greater than `origin`*. The returned value is in
917 /// units of **bytes**.
918 ///
919 /// This is purely a convenience for casting to a `u8` pointer and
920 /// using [`offset_from_unsigned`][NonNull::offset_from_unsigned] on it.
921 /// See that method for documentation and safety requirements.
922 ///
923 /// For non-`Sized` pointees this operation considers only the data pointers,
924 /// ignoring the metadata.
925 #[inline(always)]
926 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
927 #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
928 #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
929 pub const unsafe fn byte_offset_from_unsigned<U: ?Sized>(self, origin: NonNull<U>) -> usize {
930 // SAFETY: the caller must uphold the safety contract for `byte_offset_from_unsigned`.
931 unsafe { self.as_ptr().byte_offset_from_unsigned(origin.as_ptr()) }
932 }
933
934 /// Reads the value from `self` without moving it. This leaves the
935 /// memory in `self` unchanged.
936 ///
937 /// See [`ptr::read`] for safety concerns and examples.
938 ///
939 /// [`ptr::read`]: crate::ptr::read()
940 #[inline]
941 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
942 #[stable(feature = "non_null_convenience", since = "1.80.0")]
943 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
944 pub const unsafe fn read(self) -> T
945 where
946 T: Sized,
947 {
948 // SAFETY: the caller must uphold the safety contract for `read`.
949 unsafe { ptr::read(self.as_ptr()) }
950 }
951
952 /// Performs a volatile read of the value from `self` without moving it. This
953 /// leaves the memory in `self` unchanged.
954 ///
955 /// Volatile operations are intended to act on I/O memory, and are guaranteed
956 /// to not be elided or reordered by the compiler across other volatile
957 /// operations.
958 ///
959 /// See [`ptr::read_volatile`] for safety concerns and examples.
960 ///
961 /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
962 #[inline]
963 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
964 #[stable(feature = "non_null_convenience", since = "1.80.0")]
965 pub unsafe fn read_volatile(self) -> T
966 where
967 T: Sized,
968 {
969 // SAFETY: the caller must uphold the safety contract for `read_volatile`.
970 unsafe { ptr::read_volatile(self.as_ptr()) }
971 }
972
973 /// Reads the value from `self` without moving it. This leaves the
974 /// memory in `self` unchanged.
975 ///
976 /// Unlike `read`, the pointer may be unaligned.
977 ///
978 /// See [`ptr::read_unaligned`] for safety concerns and examples.
979 ///
980 /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
981 #[inline]
982 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
983 #[stable(feature = "non_null_convenience", since = "1.80.0")]
984 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
985 pub const unsafe fn read_unaligned(self) -> T
986 where
987 T: Sized,
988 {
989 // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
990 unsafe { ptr::read_unaligned(self.as_ptr()) }
991 }
992
993 /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
994 /// and destination may overlap.
995 ///
996 /// NOTE: this has the *same* argument order as [`ptr::copy`].
997 ///
998 /// See [`ptr::copy`] for safety concerns and examples.
999 ///
1000 /// [`ptr::copy`]: crate::ptr::copy()
1001 #[inline(always)]
1002 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1003 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1004 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1005 pub const unsafe fn copy_to(self, dest: NonNull<T>, count: usize)
1006 where
1007 T: Sized,
1008 {
1009 // SAFETY: the caller must uphold the safety contract for `copy`.
1010 unsafe { ptr::copy(self.as_ptr(), dest.as_ptr(), count) }
1011 }
1012
1013 /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1014 /// and destination may *not* overlap.
1015 ///
1016 /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1017 ///
1018 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1019 ///
1020 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1021 #[inline(always)]
1022 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1023 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1024 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1025 pub const unsafe fn copy_to_nonoverlapping(self, dest: NonNull<T>, count: usize)
1026 where
1027 T: Sized,
1028 {
1029 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1030 unsafe { ptr::copy_nonoverlapping(self.as_ptr(), dest.as_ptr(), count) }
1031 }
1032
1033 /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1034 /// and destination may overlap.
1035 ///
1036 /// NOTE: this has the *opposite* argument order of [`ptr::copy`].
1037 ///
1038 /// See [`ptr::copy`] for safety concerns and examples.
1039 ///
1040 /// [`ptr::copy`]: crate::ptr::copy()
1041 #[inline(always)]
1042 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1043 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1044 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1045 pub const unsafe fn copy_from(self, src: NonNull<T>, count: usize)
1046 where
1047 T: Sized,
1048 {
1049 // SAFETY: the caller must uphold the safety contract for `copy`.
1050 unsafe { ptr::copy(src.as_ptr(), self.as_ptr(), count) }
1051 }
1052
1053 /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1054 /// and destination may *not* overlap.
1055 ///
1056 /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`].
1057 ///
1058 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1059 ///
1060 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1061 #[inline(always)]
1062 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1063 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1064 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1065 pub const unsafe fn copy_from_nonoverlapping(self, src: NonNull<T>, count: usize)
1066 where
1067 T: Sized,
1068 {
1069 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1070 unsafe { ptr::copy_nonoverlapping(src.as_ptr(), self.as_ptr(), count) }
1071 }
1072
1073 /// Executes the destructor (if any) of the pointed-to value.
1074 ///
1075 /// See [`ptr::drop_in_place`] for safety concerns and examples.
1076 ///
1077 /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place()
1078 #[inline(always)]
1079 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1080 pub unsafe fn drop_in_place(self) {
1081 // SAFETY: the caller must uphold the safety contract for `drop_in_place`.
1082 unsafe { ptr::drop_in_place(self.as_ptr()) }
1083 }
1084
1085 /// Overwrites a memory location with the given value without reading or
1086 /// dropping the old value.
1087 ///
1088 /// See [`ptr::write`] for safety concerns and examples.
1089 ///
1090 /// [`ptr::write`]: crate::ptr::write()
1091 #[inline(always)]
1092 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1093 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1094 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1095 pub const unsafe fn write(self, val: T)
1096 where
1097 T: Sized,
1098 {
1099 // SAFETY: the caller must uphold the safety contract for `write`.
1100 unsafe { ptr::write(self.as_ptr(), val) }
1101 }
1102
1103 /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
1104 /// bytes of memory starting at `self` to `val`.
1105 ///
1106 /// See [`ptr::write_bytes`] for safety concerns and examples.
1107 ///
1108 /// [`ptr::write_bytes`]: crate::ptr::write_bytes()
1109 #[inline(always)]
1110 #[doc(alias = "memset")]
1111 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1112 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1113 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1114 pub const unsafe fn write_bytes(self, val: u8, count: usize)
1115 where
1116 T: Sized,
1117 {
1118 // SAFETY: the caller must uphold the safety contract for `write_bytes`.
1119 unsafe { ptr::write_bytes(self.as_ptr(), val, count) }
1120 }
1121
1122 /// Performs a volatile write of a memory location with the given value without
1123 /// reading or dropping the old value.
1124 ///
1125 /// Volatile operations are intended to act on I/O memory, and are guaranteed
1126 /// to not be elided or reordered by the compiler across other volatile
1127 /// operations.
1128 ///
1129 /// See [`ptr::write_volatile`] for safety concerns and examples.
1130 ///
1131 /// [`ptr::write_volatile`]: crate::ptr::write_volatile()
1132 #[inline(always)]
1133 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1134 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1135 pub unsafe fn write_volatile(self, val: T)
1136 where
1137 T: Sized,
1138 {
1139 // SAFETY: the caller must uphold the safety contract for `write_volatile`.
1140 unsafe { ptr::write_volatile(self.as_ptr(), val) }
1141 }
1142
1143 /// Overwrites a memory location with the given value without reading or
1144 /// dropping the old value.
1145 ///
1146 /// Unlike `write`, the pointer may be unaligned.
1147 ///
1148 /// See [`ptr::write_unaligned`] for safety concerns and examples.
1149 ///
1150 /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned()
1151 #[inline(always)]
1152 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1153 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1154 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1155 pub const unsafe fn write_unaligned(self, val: T)
1156 where
1157 T: Sized,
1158 {
1159 // SAFETY: the caller must uphold the safety contract for `write_unaligned`.
1160 unsafe { ptr::write_unaligned(self.as_ptr(), val) }
1161 }
1162
1163 /// Replaces the value at `self` with `src`, returning the old
1164 /// value, without dropping either.
1165 ///
1166 /// See [`ptr::replace`] for safety concerns and examples.
1167 ///
1168 /// [`ptr::replace`]: crate::ptr::replace()
1169 #[inline(always)]
1170 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1171 #[rustc_const_stable(feature = "const_inherent_ptr_replace", since = "1.88.0")]
1172 pub const unsafe fn replace(self, src: T) -> T
1173 where
1174 T: Sized,
1175 {
1176 // SAFETY: the caller must uphold the safety contract for `replace`.
1177 unsafe { ptr::replace(self.as_ptr(), src) }
1178 }
1179
1180 /// Swaps the values at two mutable locations of the same type, without
1181 /// deinitializing either. They may overlap, unlike `mem::swap` which is
1182 /// otherwise equivalent.
1183 ///
1184 /// See [`ptr::swap`] for safety concerns and examples.
1185 ///
1186 /// [`ptr::swap`]: crate::ptr::swap()
1187 #[inline(always)]
1188 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1189 #[rustc_const_stable(feature = "const_swap", since = "1.85.0")]
1190 pub const unsafe fn swap(self, with: NonNull<T>)
1191 where
1192 T: Sized,
1193 {
1194 // SAFETY: the caller must uphold the safety contract for `swap`.
1195 unsafe { ptr::swap(self.as_ptr(), with.as_ptr()) }
1196 }
1197
1198 /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1199 /// `align`.
1200 ///
1201 /// If it is not possible to align the pointer, the implementation returns
1202 /// `usize::MAX`.
1203 ///
1204 /// The offset is expressed in number of `T` elements, and not bytes.
1205 ///
1206 /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1207 /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1208 /// the returned offset is correct in all terms other than alignment.
1209 ///
1210 /// When this is called during compile-time evaluation (which is unstable), the implementation
1211 /// may return `usize::MAX` in cases where that can never happen at runtime. This is because the
1212 /// actual alignment of pointers is not known yet during compile-time, so an offset with
1213 /// guaranteed alignment can sometimes not be computed. For example, a buffer declared as `[u8;
1214 /// N]` might be allocated at an odd or an even address, but at compile-time this is not yet
1215 /// known, so the execution has to be correct for either choice. It is therefore impossible to
1216 /// find an offset that is guaranteed to be 2-aligned. (This behavior is subject to change, as usual
1217 /// for unstable APIs.)
1218 ///
1219 /// # Panics
1220 ///
1221 /// The function panics if `align` is not a power-of-two.
1222 ///
1223 /// # Examples
1224 ///
1225 /// Accessing adjacent `u8` as `u16`
1226 ///
1227 /// ```
1228 /// use std::ptr::NonNull;
1229 ///
1230 /// # unsafe {
1231 /// let x = [5_u8, 6, 7, 8, 9];
1232 /// let ptr = NonNull::new(x.as_ptr() as *mut u8).unwrap();
1233 /// let offset = ptr.align_offset(align_of::<u16>());
1234 ///
1235 /// if offset < x.len() - 1 {
1236 /// let u16_ptr = ptr.add(offset).cast::<u16>();
1237 /// assert!(u16_ptr.read() == u16::from_ne_bytes([5, 6]) || u16_ptr.read() == u16::from_ne_bytes([6, 7]));
1238 /// } else {
1239 /// // while the pointer can be aligned via `offset`, it would point
1240 /// // outside the allocation
1241 /// }
1242 /// # }
1243 /// ```
1244 #[inline]
1245 #[must_use]
1246 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1247 pub fn align_offset(self, align: usize) -> usize
1248 where
1249 T: Sized,
1250 {
1251 if !align.is_power_of_two() {
1252 panic!("align_offset: align is not a power-of-two");
1253 }
1254
1255 {
1256 // SAFETY: `align` has been checked to be a power of 2 above.
1257 unsafe { ptr::align_offset(self.as_ptr(), align) }
1258 }
1259 }
1260
1261 /// Returns whether the pointer is properly aligned for `T`.
1262 ///
1263 /// # Examples
1264 ///
1265 /// ```
1266 /// use std::ptr::NonNull;
1267 ///
1268 /// // On some platforms, the alignment of i32 is less than 4.
1269 /// #[repr(align(4))]
1270 /// struct AlignedI32(i32);
1271 ///
1272 /// let data = AlignedI32(42);
1273 /// let ptr = NonNull::<AlignedI32>::from(&data);
1274 ///
1275 /// assert!(ptr.is_aligned());
1276 /// assert!(!NonNull::new(ptr.as_ptr().wrapping_byte_add(1)).unwrap().is_aligned());
1277 /// ```
1278 #[inline]
1279 #[must_use]
1280 #[stable(feature = "pointer_is_aligned", since = "1.79.0")]
1281 pub fn is_aligned(self) -> bool
1282 where
1283 T: Sized,
1284 {
1285 self.as_ptr().is_aligned()
1286 }
1287
1288 /// Returns whether the pointer is aligned to `align`.
1289 ///
1290 /// For non-`Sized` pointees this operation considers only the data pointer,
1291 /// ignoring the metadata.
1292 ///
1293 /// # Panics
1294 ///
1295 /// The function panics if `align` is not a power-of-two (this includes 0).
1296 ///
1297 /// # Examples
1298 ///
1299 /// ```
1300 /// #![feature(pointer_is_aligned_to)]
1301 ///
1302 /// // On some platforms, the alignment of i32 is less than 4.
1303 /// #[repr(align(4))]
1304 /// struct AlignedI32(i32);
1305 ///
1306 /// let data = AlignedI32(42);
1307 /// let ptr = &data as *const AlignedI32;
1308 ///
1309 /// assert!(ptr.is_aligned_to(1));
1310 /// assert!(ptr.is_aligned_to(2));
1311 /// assert!(ptr.is_aligned_to(4));
1312 ///
1313 /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1314 /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1315 ///
1316 /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1317 /// ```
1318 #[inline]
1319 #[must_use]
1320 #[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
1321 pub fn is_aligned_to(self, align: usize) -> bool {
1322 self.as_ptr().is_aligned_to(align)
1323 }
1324}
1325
1326impl<T> NonNull<[T]> {
1327 /// Creates a non-null raw slice from a thin pointer and a length.
1328 ///
1329 /// The `len` argument is the number of **elements**, not the number of bytes.
1330 ///
1331 /// This function is safe, but dereferencing the return value is unsafe.
1332 /// See the documentation of [`slice::from_raw_parts`] for slice safety requirements.
1333 ///
1334 /// # Examples
1335 ///
1336 /// ```rust
1337 /// use std::ptr::NonNull;
1338 ///
1339 /// // create a slice pointer when starting out with a pointer to the first element
1340 /// let mut x = [5, 6, 7];
1341 /// let nonnull_pointer = NonNull::new(x.as_mut_ptr()).unwrap();
1342 /// let slice = NonNull::slice_from_raw_parts(nonnull_pointer, 3);
1343 /// assert_eq!(unsafe { slice.as_ref()[2] }, 7);
1344 /// ```
1345 ///
1346 /// (Note that this example artificially demonstrates a use of this method,
1347 /// but `let slice = NonNull::from(&x[..]);` would be a better way to write code like this.)
1348 #[stable(feature = "nonnull_slice_from_raw_parts", since = "1.70.0")]
1349 #[rustc_const_stable(feature = "const_slice_from_raw_parts_mut", since = "1.83.0")]
1350 #[must_use]
1351 #[inline]
1352 pub const fn slice_from_raw_parts(data: NonNull<T>, len: usize) -> Self {
1353 // SAFETY: `data` is a `NonNull` pointer which is necessarily non-null
1354 unsafe { Self::new_unchecked(super::slice_from_raw_parts_mut(data.as_ptr(), len)) }
1355 }
1356
1357 /// Returns the length of a non-null raw slice.
1358 ///
1359 /// The returned value is the number of **elements**, not the number of bytes.
1360 ///
1361 /// This function is safe, even when the non-null raw slice cannot be dereferenced to a slice
1362 /// because the pointer does not have a valid address.
1363 ///
1364 /// # Examples
1365 ///
1366 /// ```rust
1367 /// use std::ptr::NonNull;
1368 ///
1369 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1370 /// assert_eq!(slice.len(), 3);
1371 /// ```
1372 #[stable(feature = "slice_ptr_len_nonnull", since = "1.63.0")]
1373 #[rustc_const_stable(feature = "const_slice_ptr_len_nonnull", since = "1.63.0")]
1374 #[must_use]
1375 #[inline]
1376 pub const fn len(self) -> usize {
1377 self.as_ptr().len()
1378 }
1379
1380 /// Returns `true` if the non-null raw slice has a length of 0.
1381 ///
1382 /// # Examples
1383 ///
1384 /// ```rust
1385 /// use std::ptr::NonNull;
1386 ///
1387 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1388 /// assert!(!slice.is_empty());
1389 /// ```
1390 #[stable(feature = "slice_ptr_is_empty_nonnull", since = "1.79.0")]
1391 #[rustc_const_stable(feature = "const_slice_ptr_is_empty_nonnull", since = "1.79.0")]
1392 #[must_use]
1393 #[inline]
1394 pub const fn is_empty(self) -> bool {
1395 self.len() == 0
1396 }
1397
1398 /// Returns a non-null pointer to the slice's buffer.
1399 ///
1400 /// # Examples
1401 ///
1402 /// ```rust
1403 /// #![feature(slice_ptr_get)]
1404 /// use std::ptr::NonNull;
1405 ///
1406 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1407 /// assert_eq!(slice.as_non_null_ptr(), NonNull::<i8>::dangling());
1408 /// ```
1409 #[inline]
1410 #[must_use]
1411 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1412 pub const fn as_non_null_ptr(self) -> NonNull<T> {
1413 self.cast()
1414 }
1415
1416 /// Returns a raw pointer to the slice's buffer.
1417 ///
1418 /// # Examples
1419 ///
1420 /// ```rust
1421 /// #![feature(slice_ptr_get)]
1422 /// use std::ptr::NonNull;
1423 ///
1424 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1425 /// assert_eq!(slice.as_mut_ptr(), NonNull::<i8>::dangling().as_ptr());
1426 /// ```
1427 #[inline]
1428 #[must_use]
1429 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1430 #[rustc_never_returns_null_ptr]
1431 pub const fn as_mut_ptr(self) -> *mut T {
1432 self.as_non_null_ptr().as_ptr()
1433 }
1434
1435 /// Returns a shared reference to a slice of possibly uninitialized values. In contrast to
1436 /// [`as_ref`], this does not require that the value has to be initialized.
1437 ///
1438 /// For the mutable counterpart see [`as_uninit_slice_mut`].
1439 ///
1440 /// [`as_ref`]: NonNull::as_ref
1441 /// [`as_uninit_slice_mut`]: NonNull::as_uninit_slice_mut
1442 ///
1443 /// # Safety
1444 ///
1445 /// When calling this method, you have to ensure that all of the following is true:
1446 ///
1447 /// * The pointer must be [valid] for reads for `ptr.len() * size_of::<T>()` many bytes,
1448 /// and it must be properly aligned. This means in particular:
1449 ///
1450 /// * The entire memory range of this slice must be contained within a single allocated object!
1451 /// Slices can never span across multiple allocated objects.
1452 ///
1453 /// * The pointer must be aligned even for zero-length slices. One
1454 /// reason for this is that enum layout optimizations may rely on references
1455 /// (including slices of any length) being aligned and non-null to distinguish
1456 /// them from other data. You can obtain a pointer that is usable as `data`
1457 /// for zero-length slices using [`NonNull::dangling()`].
1458 ///
1459 /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1460 /// See the safety documentation of [`pointer::offset`].
1461 ///
1462 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1463 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1464 /// In particular, while this reference exists, the memory the pointer points to must
1465 /// not get mutated (except inside `UnsafeCell`).
1466 ///
1467 /// This applies even if the result of this method is unused!
1468 ///
1469 /// See also [`slice::from_raw_parts`].
1470 ///
1471 /// [valid]: crate::ptr#safety
1472 #[inline]
1473 #[must_use]
1474 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1475 pub const unsafe fn as_uninit_slice<'a>(self) -> &'a [MaybeUninit<T>] {
1476 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1477 unsafe { slice::from_raw_parts(self.cast().as_ptr(), self.len()) }
1478 }
1479
1480 /// Returns a unique reference to a slice of possibly uninitialized values. In contrast to
1481 /// [`as_mut`], this does not require that the value has to be initialized.
1482 ///
1483 /// For the shared counterpart see [`as_uninit_slice`].
1484 ///
1485 /// [`as_mut`]: NonNull::as_mut
1486 /// [`as_uninit_slice`]: NonNull::as_uninit_slice
1487 ///
1488 /// # Safety
1489 ///
1490 /// When calling this method, you have to ensure that all of the following is true:
1491 ///
1492 /// * The pointer must be [valid] for reads and writes for `ptr.len() * size_of::<T>()`
1493 /// many bytes, and it must be properly aligned. This means in particular:
1494 ///
1495 /// * The entire memory range of this slice must be contained within a single allocated object!
1496 /// Slices can never span across multiple allocated objects.
1497 ///
1498 /// * The pointer must be aligned even for zero-length slices. One
1499 /// reason for this is that enum layout optimizations may rely on references
1500 /// (including slices of any length) being aligned and non-null to distinguish
1501 /// them from other data. You can obtain a pointer that is usable as `data`
1502 /// for zero-length slices using [`NonNull::dangling()`].
1503 ///
1504 /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1505 /// See the safety documentation of [`pointer::offset`].
1506 ///
1507 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1508 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1509 /// In particular, while this reference exists, the memory the pointer points to must
1510 /// not get accessed (read or written) through any other pointer.
1511 ///
1512 /// This applies even if the result of this method is unused!
1513 ///
1514 /// See also [`slice::from_raw_parts_mut`].
1515 ///
1516 /// [valid]: crate::ptr#safety
1517 ///
1518 /// # Examples
1519 ///
1520 /// ```rust
1521 /// #![feature(allocator_api, ptr_as_uninit)]
1522 ///
1523 /// use std::alloc::{Allocator, Layout, Global};
1524 /// use std::mem::MaybeUninit;
1525 /// use std::ptr::NonNull;
1526 ///
1527 /// let memory: NonNull<[u8]> = Global.allocate(Layout::new::<[u8; 32]>())?;
1528 /// // This is safe as `memory` is valid for reads and writes for `memory.len()` many bytes.
1529 /// // Note that calling `memory.as_mut()` is not allowed here as the content may be uninitialized.
1530 /// # #[allow(unused_variables)]
1531 /// let slice: &mut [MaybeUninit<u8>] = unsafe { memory.as_uninit_slice_mut() };
1532 /// # // Prevent leaks for Miri.
1533 /// # unsafe { Global.deallocate(memory.cast(), Layout::new::<[u8; 32]>()); }
1534 /// # Ok::<_, std::alloc::AllocError>(())
1535 /// ```
1536 #[inline]
1537 #[must_use]
1538 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1539 pub const unsafe fn as_uninit_slice_mut<'a>(self) -> &'a mut [MaybeUninit<T>] {
1540 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice_mut`.
1541 unsafe { slice::from_raw_parts_mut(self.cast().as_ptr(), self.len()) }
1542 }
1543
1544 /// Returns a raw pointer to an element or subslice, without doing bounds
1545 /// checking.
1546 ///
1547 /// Calling this method with an out-of-bounds index or when `self` is not dereferenceable
1548 /// is *[undefined behavior]* even if the resulting pointer is not used.
1549 ///
1550 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1551 ///
1552 /// # Examples
1553 ///
1554 /// ```
1555 /// #![feature(slice_ptr_get)]
1556 /// use std::ptr::NonNull;
1557 ///
1558 /// let x = &mut [1, 2, 4];
1559 /// let x = NonNull::slice_from_raw_parts(NonNull::new(x.as_mut_ptr()).unwrap(), x.len());
1560 ///
1561 /// unsafe {
1562 /// assert_eq!(x.get_unchecked_mut(1).as_ptr(), x.as_non_null_ptr().as_ptr().add(1));
1563 /// }
1564 /// ```
1565 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1566 #[inline]
1567 pub unsafe fn get_unchecked_mut<I>(self, index: I) -> NonNull<I::Output>
1568 where
1569 I: SliceIndex<[T]>,
1570 {
1571 // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
1572 // As a consequence, the resulting pointer cannot be null.
1573 unsafe { NonNull::new_unchecked(self.as_ptr().get_unchecked_mut(index)) }
1574 }
1575}
1576
1577#[stable(feature = "nonnull", since = "1.25.0")]
1578impl<T: ?Sized> Clone for NonNull<T> {
1579 #[inline(always)]
1580 fn clone(&self) -> Self {
1581 *self
1582 }
1583}
1584
1585#[stable(feature = "nonnull", since = "1.25.0")]
1586impl<T: ?Sized> Copy for NonNull<T> {}
1587
1588#[unstable(feature = "coerce_unsized", issue = "18598")]
1589impl<T: ?Sized, U: ?Sized> CoerceUnsized<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
1590
1591#[unstable(feature = "dispatch_from_dyn", issue = "none")]
1592impl<T: ?Sized, U: ?Sized> DispatchFromDyn<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
1593
1594#[stable(feature = "pin", since = "1.33.0")]
1595unsafe impl<T: ?Sized> PinCoerceUnsized for NonNull<T> {}
1596
1597#[unstable(feature = "pointer_like_trait", issue = "none")]
1598impl<T> core::marker::PointerLike for NonNull<T> {}
1599
1600#[stable(feature = "nonnull", since = "1.25.0")]
1601impl<T: ?Sized> fmt::Debug for NonNull<T> {
1602 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1603 fmt::Pointer::fmt(&self.as_ptr(), f)
1604 }
1605}
1606
1607#[stable(feature = "nonnull", since = "1.25.0")]
1608impl<T: ?Sized> fmt::Pointer for NonNull<T> {
1609 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1610 fmt::Pointer::fmt(&self.as_ptr(), f)
1611 }
1612}
1613
1614#[stable(feature = "nonnull", since = "1.25.0")]
1615impl<T: ?Sized> Eq for NonNull<T> {}
1616
1617#[stable(feature = "nonnull", since = "1.25.0")]
1618impl<T: ?Sized> PartialEq for NonNull<T> {
1619 #[inline]
1620 #[allow(ambiguous_wide_pointer_comparisons)]
1621 fn eq(&self, other: &Self) -> bool {
1622 self.as_ptr() == other.as_ptr()
1623 }
1624}
1625
1626#[stable(feature = "nonnull", since = "1.25.0")]
1627impl<T: ?Sized> Ord for NonNull<T> {
1628 #[inline]
1629 #[allow(ambiguous_wide_pointer_comparisons)]
1630 fn cmp(&self, other: &Self) -> Ordering {
1631 self.as_ptr().cmp(&other.as_ptr())
1632 }
1633}
1634
1635#[stable(feature = "nonnull", since = "1.25.0")]
1636impl<T: ?Sized> PartialOrd for NonNull<T> {
1637 #[inline]
1638 #[allow(ambiguous_wide_pointer_comparisons)]
1639 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
1640 self.as_ptr().partial_cmp(&other.as_ptr())
1641 }
1642}
1643
1644#[stable(feature = "nonnull", since = "1.25.0")]
1645impl<T: ?Sized> hash::Hash for NonNull<T> {
1646 #[inline]
1647 fn hash<H: hash::Hasher>(&self, state: &mut H) {
1648 self.as_ptr().hash(state)
1649 }
1650}
1651
1652#[unstable(feature = "ptr_internals", issue = "none")]
1653impl<T: ?Sized> From<Unique<T>> for NonNull<T> {
1654 #[inline]
1655 fn from(unique: Unique<T>) -> Self {
1656 unique.as_non_null_ptr()
1657 }
1658}
1659
1660#[stable(feature = "nonnull", since = "1.25.0")]
1661impl<T: ?Sized> From<&mut T> for NonNull<T> {
1662 /// Converts a `&mut T` to a `NonNull<T>`.
1663 ///
1664 /// This conversion is safe and infallible since references cannot be null.
1665 #[inline]
1666 fn from(r: &mut T) -> Self {
1667 NonNull::from_mut(r)
1668 }
1669}
1670
1671#[stable(feature = "nonnull", since = "1.25.0")]
1672impl<T: ?Sized> From<&T> for NonNull<T> {
1673 /// Converts a `&T` to a `NonNull<T>`.
1674 ///
1675 /// This conversion is safe and infallible since references cannot be null.
1676 #[inline]
1677 fn from(r: &T) -> Self {
1678 NonNull::from_ref(r)
1679 }
1680}