core\ptr/
mut_ptr.rs

1use super::*;
2use crate::cmp::Ordering::{Equal, Greater, Less};
3use crate::intrinsics::const_eval_select;
4use crate::mem::{self, SizedTypeProperties};
5use crate::slice::{self, SliceIndex};
6
7impl<T: ?Sized> *mut T {
8    /// Returns `true` if the pointer is null.
9    ///
10    /// Note that unsized types have many possible null pointers, as only the
11    /// raw data pointer is considered, not their length, vtable, etc.
12    /// Therefore, two pointers that are null may still not compare equal to
13    /// each other.
14    ///
15    /// # Panics during const evaluation
16    ///
17    /// If this method is used during const evaluation, and `self` is a pointer
18    /// that is offset beyond the bounds of the memory it initially pointed to,
19    /// then there might not be enough information to determine whether the
20    /// pointer is null. This is because the absolute address in memory is not
21    /// known at compile time. If the nullness of the pointer cannot be
22    /// determined, this method will panic.
23    ///
24    /// In-bounds pointers are never null, so the method will never panic for
25    /// such pointers.
26    ///
27    /// # Examples
28    ///
29    /// ```
30    /// let mut s = [1, 2, 3];
31    /// let ptr: *mut u32 = s.as_mut_ptr();
32    /// assert!(!ptr.is_null());
33    /// ```
34    #[stable(feature = "rust1", since = "1.0.0")]
35    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
36    #[rustc_diagnostic_item = "ptr_is_null"]
37    #[inline]
38    pub const fn is_null(self) -> bool {
39        self.cast_const().is_null()
40    }
41
42    /// Casts to a pointer of another type.
43    #[stable(feature = "ptr_cast", since = "1.38.0")]
44    #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
45    #[rustc_diagnostic_item = "ptr_cast"]
46    #[inline(always)]
47    pub const fn cast<U>(self) -> *mut U {
48        self as _
49    }
50
51    /// Try to cast to a pointer of another type by checking aligment.
52    ///
53    /// If the pointer is properly aligned to the target type, it will be
54    /// cast to the target type. Otherwise, `None` is returned.
55    ///
56    /// # Examples
57    ///
58    /// ```rust
59    /// #![feature(pointer_try_cast_aligned)]
60    ///
61    /// let mut x = 0u64;
62    ///
63    /// let aligned: *mut u64 = &mut x;
64    /// let unaligned = unsafe { aligned.byte_add(1) };
65    ///
66    /// assert!(aligned.try_cast_aligned::<u32>().is_some());
67    /// assert!(unaligned.try_cast_aligned::<u32>().is_none());
68    /// ```
69    #[unstable(feature = "pointer_try_cast_aligned", issue = "141221")]
70    #[must_use = "this returns the result of the operation, \
71                  without modifying the original"]
72    #[inline]
73    pub fn try_cast_aligned<U>(self) -> Option<*mut U> {
74        if self.is_aligned_to(align_of::<U>()) { Some(self.cast()) } else { None }
75    }
76
77    /// Uses the address value in a new pointer of another type.
78    ///
79    /// This operation will ignore the address part of its `meta` operand and discard existing
80    /// metadata of `self`. For pointers to a sized types (thin pointers), this has the same effect
81    /// as a simple cast. For pointers to an unsized type (fat pointers) this recombines the address
82    /// with new metadata such as slice lengths or `dyn`-vtable.
83    ///
84    /// The resulting pointer will have provenance of `self`. This operation is semantically the
85    /// same as creating a new pointer with the data pointer value of `self` but the metadata of
86    /// `meta`, being fat or thin depending on the `meta` operand.
87    ///
88    /// # Examples
89    ///
90    /// This function is primarily useful for enabling pointer arithmetic on potentially fat
91    /// pointers. The pointer is cast to a sized pointee to utilize offset operations and then
92    /// recombined with its own original metadata.
93    ///
94    /// ```
95    /// #![feature(set_ptr_value)]
96    /// # use core::fmt::Debug;
97    /// let mut arr: [i32; 3] = [1, 2, 3];
98    /// let mut ptr = arr.as_mut_ptr() as *mut dyn Debug;
99    /// let thin = ptr as *mut u8;
100    /// unsafe {
101    ///     ptr = thin.add(8).with_metadata_of(ptr);
102    ///     # assert_eq!(*(ptr as *mut i32), 3);
103    ///     println!("{:?}", &*ptr); // will print "3"
104    /// }
105    /// ```
106    ///
107    /// # *Incorrect* usage
108    ///
109    /// The provenance from pointers is *not* combined. The result must only be used to refer to the
110    /// address allowed by `self`.
111    ///
112    /// ```rust,no_run
113    /// #![feature(set_ptr_value)]
114    /// let mut x = 0u32;
115    /// let mut y = 1u32;
116    ///
117    /// let x = (&mut x) as *mut u32;
118    /// let y = (&mut y) as *mut u32;
119    ///
120    /// let offset = (x as usize - y as usize) / 4;
121    /// let bad = x.wrapping_add(offset).with_metadata_of(y);
122    ///
123    /// // This dereference is UB. The pointer only has provenance for `x` but points to `y`.
124    /// println!("{:?}", unsafe { &*bad });
125    #[unstable(feature = "set_ptr_value", issue = "75091")]
126    #[must_use = "returns a new pointer rather than modifying its argument"]
127    #[inline]
128    pub const fn with_metadata_of<U>(self, meta: *const U) -> *mut U
129    where
130        U: ?Sized,
131    {
132        from_raw_parts_mut::<U>(self as *mut (), metadata(meta))
133    }
134
135    /// Changes constness without changing the type.
136    ///
137    /// This is a bit safer than `as` because it wouldn't silently change the type if the code is
138    /// refactored.
139    ///
140    /// While not strictly required (`*mut T` coerces to `*const T`), this is provided for symmetry
141    /// with [`cast_mut`] on `*const T` and may have documentation value if used instead of implicit
142    /// coercion.
143    ///
144    /// [`cast_mut`]: pointer::cast_mut
145    #[stable(feature = "ptr_const_cast", since = "1.65.0")]
146    #[rustc_const_stable(feature = "ptr_const_cast", since = "1.65.0")]
147    #[rustc_diagnostic_item = "ptr_cast_const"]
148    #[inline(always)]
149    pub const fn cast_const(self) -> *const T {
150        self as _
151    }
152
153    /// Gets the "address" portion of the pointer.
154    ///
155    /// This is similar to `self as usize`, except that the [provenance][crate::ptr#provenance] of
156    /// the pointer is discarded and not [exposed][crate::ptr#exposed-provenance]. This means that
157    /// casting the returned address back to a pointer yields a [pointer without
158    /// provenance][without_provenance_mut], which is undefined behavior to dereference. To properly
159    /// restore the lost information and obtain a dereferenceable pointer, use
160    /// [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr].
161    ///
162    /// If using those APIs is not possible because there is no way to preserve a pointer with the
163    /// required provenance, then Strict Provenance might not be for you. Use pointer-integer casts
164    /// or [`expose_provenance`][pointer::expose_provenance] and [`with_exposed_provenance`][with_exposed_provenance]
165    /// instead. However, note that this makes your code less portable and less amenable to tools
166    /// that check for compliance with the Rust memory model.
167    ///
168    /// On most platforms this will produce a value with the same bytes as the original
169    /// pointer, because all the bytes are dedicated to describing the address.
170    /// Platforms which need to store additional information in the pointer may
171    /// perform a change of representation to produce a value containing only the address
172    /// portion of the pointer. What that means is up to the platform to define.
173    ///
174    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
175    #[must_use]
176    #[inline(always)]
177    #[stable(feature = "strict_provenance", since = "1.84.0")]
178    pub fn addr(self) -> usize {
179        // A pointer-to-integer transmute currently has exactly the right semantics: it returns the
180        // address without exposing the provenance. Note that this is *not* a stable guarantee about
181        // transmute semantics, it relies on sysroot crates having special status.
182        // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
183        // provenance).
184        unsafe { mem::transmute(self.cast::<()>()) }
185    }
186
187    /// Exposes the ["provenance"][crate::ptr#provenance] part of the pointer for future use in
188    /// [`with_exposed_provenance_mut`] and returns the "address" portion.
189    ///
190    /// This is equivalent to `self as usize`, which semantically discards provenance information.
191    /// Furthermore, this (like the `as` cast) has the implicit side-effect of marking the
192    /// provenance as 'exposed', so on platforms that support it you can later call
193    /// [`with_exposed_provenance_mut`] to reconstitute the original pointer including its provenance.
194    ///
195    /// Due to its inherent ambiguity, [`with_exposed_provenance_mut`] may not be supported by tools
196    /// that help you to stay conformant with the Rust memory model. It is recommended to use
197    /// [Strict Provenance][crate::ptr#strict-provenance] APIs such as [`with_addr`][pointer::with_addr]
198    /// wherever possible, in which case [`addr`][pointer::addr] should be used instead of `expose_provenance`.
199    ///
200    /// On most platforms this will produce a value with the same bytes as the original pointer,
201    /// because all the bytes are dedicated to describing the address. Platforms which need to store
202    /// additional information in the pointer may not support this operation, since the 'expose'
203    /// side-effect which is required for [`with_exposed_provenance_mut`] to work is typically not
204    /// available.
205    ///
206    /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
207    ///
208    /// [`with_exposed_provenance_mut`]: with_exposed_provenance_mut
209    #[inline(always)]
210    #[stable(feature = "exposed_provenance", since = "1.84.0")]
211    pub fn expose_provenance(self) -> usize {
212        self.cast::<()>() as usize
213    }
214
215    /// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
216    /// `self`.
217    ///
218    /// This is similar to a `addr as *mut T` cast, but copies
219    /// the *provenance* of `self` to the new pointer.
220    /// This avoids the inherent ambiguity of the unary cast.
221    ///
222    /// This is equivalent to using [`wrapping_offset`][pointer::wrapping_offset] to offset
223    /// `self` to the given address, and therefore has all the same capabilities and restrictions.
224    ///
225    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
226    #[must_use]
227    #[inline]
228    #[stable(feature = "strict_provenance", since = "1.84.0")]
229    pub fn with_addr(self, addr: usize) -> Self {
230        // This should probably be an intrinsic to avoid doing any sort of arithmetic, but
231        // meanwhile, we can implement it with `wrapping_offset`, which preserves the pointer's
232        // provenance.
233        let self_addr = self.addr() as isize;
234        let dest_addr = addr as isize;
235        let offset = dest_addr.wrapping_sub(self_addr);
236        self.wrapping_byte_offset(offset)
237    }
238
239    /// Creates a new pointer by mapping `self`'s address to a new one, preserving the original
240    /// pointer's [provenance][crate::ptr#provenance].
241    ///
242    /// This is a convenience for [`with_addr`][pointer::with_addr], see that method for details.
243    ///
244    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
245    #[must_use]
246    #[inline]
247    #[stable(feature = "strict_provenance", since = "1.84.0")]
248    pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
249        self.with_addr(f(self.addr()))
250    }
251
252    /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
253    ///
254    /// The pointer can be later reconstructed with [`from_raw_parts_mut`].
255    #[unstable(feature = "ptr_metadata", issue = "81513")]
256    #[inline]
257    pub const fn to_raw_parts(self) -> (*mut (), <T as super::Pointee>::Metadata) {
258        (self.cast(), super::metadata(self))
259    }
260
261    /// Returns `None` if the pointer is null, or else returns a shared reference to
262    /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_ref`]
263    /// must be used instead.
264    ///
265    /// For the mutable counterpart see [`as_mut`].
266    ///
267    /// [`as_uninit_ref`]: pointer#method.as_uninit_ref-1
268    /// [`as_mut`]: #method.as_mut
269    ///
270    /// # Safety
271    ///
272    /// When calling this method, you have to ensure that *either* the pointer is null *or*
273    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
274    ///
275    /// # Panics during const evaluation
276    ///
277    /// This method will panic during const evaluation if the pointer cannot be
278    /// determined to be null or not. See [`is_null`] for more information.
279    ///
280    /// [`is_null`]: #method.is_null-1
281    ///
282    /// # Examples
283    ///
284    /// ```
285    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
286    ///
287    /// unsafe {
288    ///     if let Some(val_back) = ptr.as_ref() {
289    ///         println!("We got back the value: {val_back}!");
290    ///     }
291    /// }
292    /// ```
293    ///
294    /// # Null-unchecked version
295    ///
296    /// If you are sure the pointer can never be null and are looking for some kind of
297    /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can
298    /// dereference the pointer directly.
299    ///
300    /// ```
301    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
302    ///
303    /// unsafe {
304    ///     let val_back = &*ptr;
305    ///     println!("We got back the value: {val_back}!");
306    /// }
307    /// ```
308    #[stable(feature = "ptr_as_ref", since = "1.9.0")]
309    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
310    #[inline]
311    pub const unsafe fn as_ref<'a>(self) -> Option<&'a T> {
312        // SAFETY: the caller must guarantee that `self` is valid for a
313        // reference if it isn't null.
314        if self.is_null() { None } else { unsafe { Some(&*self) } }
315    }
316
317    /// Returns a shared reference to the value behind the pointer.
318    /// If the pointer may be null or the value may be uninitialized, [`as_uninit_ref`] must be used instead.
319    /// If the pointer may be null, but the value is known to have been initialized, [`as_ref`] must be used instead.
320    ///
321    /// For the mutable counterpart see [`as_mut_unchecked`].
322    ///
323    /// [`as_ref`]: #method.as_ref
324    /// [`as_uninit_ref`]: #method.as_uninit_ref
325    /// [`as_mut_unchecked`]: #method.as_mut_unchecked
326    ///
327    /// # Safety
328    ///
329    /// When calling this method, you have to ensure that the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
330    ///
331    /// # Examples
332    ///
333    /// ```
334    /// #![feature(ptr_as_ref_unchecked)]
335    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
336    ///
337    /// unsafe {
338    ///     println!("We got back the value: {}!", ptr.as_ref_unchecked());
339    /// }
340    /// ```
341    // FIXME: mention it in the docs for `as_ref` and `as_uninit_ref` once stabilized.
342    #[unstable(feature = "ptr_as_ref_unchecked", issue = "122034")]
343    #[inline]
344    #[must_use]
345    pub const unsafe fn as_ref_unchecked<'a>(self) -> &'a T {
346        // SAFETY: the caller must guarantee that `self` is valid for a reference
347        unsafe { &*self }
348    }
349
350    /// Returns `None` if the pointer is null, or else returns a shared reference to
351    /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
352    /// that the value has to be initialized.
353    ///
354    /// For the mutable counterpart see [`as_uninit_mut`].
355    ///
356    /// [`as_ref`]: pointer#method.as_ref-1
357    /// [`as_uninit_mut`]: #method.as_uninit_mut
358    ///
359    /// # Safety
360    ///
361    /// When calling this method, you have to ensure that *either* the pointer is null *or*
362    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
363    /// Note that because the created reference is to `MaybeUninit<T>`, the
364    /// source pointer can point to uninitialized memory.
365    ///
366    /// # Panics during const evaluation
367    ///
368    /// This method will panic during const evaluation if the pointer cannot be
369    /// determined to be null or not. See [`is_null`] for more information.
370    ///
371    /// [`is_null`]: #method.is_null-1
372    ///
373    /// # Examples
374    ///
375    /// ```
376    /// #![feature(ptr_as_uninit)]
377    ///
378    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
379    ///
380    /// unsafe {
381    ///     if let Some(val_back) = ptr.as_uninit_ref() {
382    ///         println!("We got back the value: {}!", val_back.assume_init());
383    ///     }
384    /// }
385    /// ```
386    #[inline]
387    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
388    pub const unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>>
389    where
390        T: Sized,
391    {
392        // SAFETY: the caller must guarantee that `self` meets all the
393        // requirements for a reference.
394        if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) }
395    }
396
397    #[doc = include_str!("./docs/offset.md")]
398    ///
399    /// # Examples
400    ///
401    /// ```
402    /// let mut s = [1, 2, 3];
403    /// let ptr: *mut u32 = s.as_mut_ptr();
404    ///
405    /// unsafe {
406    ///     assert_eq!(2, *ptr.offset(1));
407    ///     assert_eq!(3, *ptr.offset(2));
408    /// }
409    /// ```
410    #[stable(feature = "rust1", since = "1.0.0")]
411    #[must_use = "returns a new pointer rather than modifying its argument"]
412    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
413    #[inline(always)]
414    #[track_caller]
415    pub const unsafe fn offset(self, count: isize) -> *mut T
416    where
417        T: Sized,
418    {
419        #[inline]
420        #[rustc_allow_const_fn_unstable(const_eval_select)]
421        const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
422            // We can use const_eval_select here because this is only for UB checks.
423            const_eval_select!(
424                @capture { this: *const (), count: isize, size: usize } -> bool:
425                if const {
426                    true
427                } else {
428                    // `size` is the size of a Rust type, so we know that
429                    // `size <= isize::MAX` and thus `as` cast here is not lossy.
430                    let Some(byte_offset) = count.checked_mul(size as isize) else {
431                        return false;
432                    };
433                    let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
434                    !overflow
435                }
436            )
437        }
438
439        ub_checks::assert_unsafe_precondition!(
440            check_language_ub,
441            "ptr::offset requires the address calculation to not overflow",
442            (
443                this: *const () = self as *const (),
444                count: isize = count,
445                size: usize = size_of::<T>(),
446            ) => runtime_offset_nowrap(this, count, size)
447        );
448
449        // SAFETY: the caller must uphold the safety contract for `offset`.
450        // The obtained pointer is valid for writes since the caller must
451        // guarantee that it points to the same allocation as `self`.
452        unsafe { intrinsics::offset(self, count) }
453    }
454
455    /// Adds a signed offset in bytes to a pointer.
456    ///
457    /// `count` is in units of **bytes**.
458    ///
459    /// This is purely a convenience for casting to a `u8` pointer and
460    /// using [offset][pointer::offset] on it. See that method for documentation
461    /// and safety requirements.
462    ///
463    /// For non-`Sized` pointees this operation changes only the data pointer,
464    /// leaving the metadata untouched.
465    #[must_use]
466    #[inline(always)]
467    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
468    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
469    #[track_caller]
470    pub const unsafe fn byte_offset(self, count: isize) -> Self {
471        // SAFETY: the caller must uphold the safety contract for `offset`.
472        unsafe { self.cast::<u8>().offset(count).with_metadata_of(self) }
473    }
474
475    /// Adds a signed offset to a pointer using wrapping arithmetic.
476    ///
477    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
478    /// offset of `3 * size_of::<T>()` bytes.
479    ///
480    /// # Safety
481    ///
482    /// This operation itself is always safe, but using the resulting pointer is not.
483    ///
484    /// The resulting pointer "remembers" the [allocation] that `self` points to
485    /// (this is called "[Provenance](ptr/index.html#provenance)").
486    /// The pointer must not be used to read or write other allocations.
487    ///
488    /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z`
489    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
490    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
491    /// `x` and `y` point into the same allocation.
492    ///
493    /// Compared to [`offset`], this method basically delays the requirement of staying within the
494    /// same allocation: [`offset`] is immediate Undefined Behavior when crossing object
495    /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a
496    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`]
497    /// can be optimized better and is thus preferable in performance-sensitive code.
498    ///
499    /// The delayed check only considers the value of the pointer that was dereferenced, not the
500    /// intermediate values used during the computation of the final result. For example,
501    /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other
502    /// words, leaving the allocation and then re-entering it later is permitted.
503    ///
504    /// [`offset`]: #method.offset
505    /// [allocation]: crate::ptr#allocation
506    ///
507    /// # Examples
508    ///
509    /// ```
510    /// // Iterate using a raw pointer in increments of two elements
511    /// let mut data = [1u8, 2, 3, 4, 5];
512    /// let mut ptr: *mut u8 = data.as_mut_ptr();
513    /// let step = 2;
514    /// let end_rounded_up = ptr.wrapping_offset(6);
515    ///
516    /// while ptr != end_rounded_up {
517    ///     unsafe {
518    ///         *ptr = 0;
519    ///     }
520    ///     ptr = ptr.wrapping_offset(step);
521    /// }
522    /// assert_eq!(&data, &[0, 2, 0, 4, 0]);
523    /// ```
524    #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
525    #[must_use = "returns a new pointer rather than modifying its argument"]
526    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
527    #[inline(always)]
528    pub const fn wrapping_offset(self, count: isize) -> *mut T
529    where
530        T: Sized,
531    {
532        // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
533        unsafe { intrinsics::arith_offset(self, count) as *mut T }
534    }
535
536    /// Adds a signed offset in bytes to a pointer using wrapping arithmetic.
537    ///
538    /// `count` is in units of **bytes**.
539    ///
540    /// This is purely a convenience for casting to a `u8` pointer and
541    /// using [wrapping_offset][pointer::wrapping_offset] on it. See that method
542    /// for documentation.
543    ///
544    /// For non-`Sized` pointees this operation changes only the data pointer,
545    /// leaving the metadata untouched.
546    #[must_use]
547    #[inline(always)]
548    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
549    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
550    pub const fn wrapping_byte_offset(self, count: isize) -> Self {
551        self.cast::<u8>().wrapping_offset(count).with_metadata_of(self)
552    }
553
554    /// Masks out bits of the pointer according to a mask.
555    ///
556    /// This is convenience for `ptr.map_addr(|a| a & mask)`.
557    ///
558    /// For non-`Sized` pointees this operation changes only the data pointer,
559    /// leaving the metadata untouched.
560    ///
561    /// ## Examples
562    ///
563    /// ```
564    /// #![feature(ptr_mask)]
565    /// let mut v = 17_u32;
566    /// let ptr: *mut u32 = &mut v;
567    ///
568    /// // `u32` is 4 bytes aligned,
569    /// // which means that lower 2 bits are always 0.
570    /// let tag_mask = 0b11;
571    /// let ptr_mask = !tag_mask;
572    ///
573    /// // We can store something in these lower bits
574    /// let tagged_ptr = ptr.map_addr(|a| a | 0b10);
575    ///
576    /// // Get the "tag" back
577    /// let tag = tagged_ptr.addr() & tag_mask;
578    /// assert_eq!(tag, 0b10);
579    ///
580    /// // Note that `tagged_ptr` is unaligned, it's UB to read from/write to it.
581    /// // To get original pointer `mask` can be used:
582    /// let masked_ptr = tagged_ptr.mask(ptr_mask);
583    /// assert_eq!(unsafe { *masked_ptr }, 17);
584    ///
585    /// unsafe { *masked_ptr = 0 };
586    /// assert_eq!(v, 0);
587    /// ```
588    #[unstable(feature = "ptr_mask", issue = "98290")]
589    #[must_use = "returns a new pointer rather than modifying its argument"]
590    #[inline(always)]
591    pub fn mask(self, mask: usize) -> *mut T {
592        intrinsics::ptr_mask(self.cast::<()>(), mask).cast_mut().with_metadata_of(self)
593    }
594
595    /// Returns `None` if the pointer is null, or else returns a unique reference to
596    /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_mut`]
597    /// must be used instead.
598    ///
599    /// For the shared counterpart see [`as_ref`].
600    ///
601    /// [`as_uninit_mut`]: #method.as_uninit_mut
602    /// [`as_ref`]: pointer#method.as_ref-1
603    ///
604    /// # Safety
605    ///
606    /// When calling this method, you have to ensure that *either*
607    /// the pointer is null *or*
608    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
609    ///
610    /// # Panics during const evaluation
611    ///
612    /// This method will panic during const evaluation if the pointer cannot be
613    /// determined to be null or not. See [`is_null`] for more information.
614    ///
615    /// [`is_null`]: #method.is_null-1
616    ///
617    /// # Examples
618    ///
619    /// ```
620    /// let mut s = [1, 2, 3];
621    /// let ptr: *mut u32 = s.as_mut_ptr();
622    /// let first_value = unsafe { ptr.as_mut().unwrap() };
623    /// *first_value = 4;
624    /// # assert_eq!(s, [4, 2, 3]);
625    /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
626    /// ```
627    ///
628    /// # Null-unchecked version
629    ///
630    /// If you are sure the pointer can never be null and are looking for some kind of
631    /// `as_mut_unchecked` that returns the `&mut T` instead of `Option<&mut T>`, know that
632    /// you can dereference the pointer directly.
633    ///
634    /// ```
635    /// let mut s = [1, 2, 3];
636    /// let ptr: *mut u32 = s.as_mut_ptr();
637    /// let first_value = unsafe { &mut *ptr };
638    /// *first_value = 4;
639    /// # assert_eq!(s, [4, 2, 3]);
640    /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
641    /// ```
642    #[stable(feature = "ptr_as_ref", since = "1.9.0")]
643    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
644    #[inline]
645    pub const unsafe fn as_mut<'a>(self) -> Option<&'a mut T> {
646        // SAFETY: the caller must guarantee that `self` is be valid for
647        // a mutable reference if it isn't null.
648        if self.is_null() { None } else { unsafe { Some(&mut *self) } }
649    }
650
651    /// Returns a unique reference to the value behind the pointer.
652    /// If the pointer may be null or the value may be uninitialized, [`as_uninit_mut`] must be used instead.
653    /// If the pointer may be null, but the value is known to have been initialized, [`as_mut`] must be used instead.
654    ///
655    /// For the shared counterpart see [`as_ref_unchecked`].
656    ///
657    /// [`as_mut`]: #method.as_mut
658    /// [`as_uninit_mut`]: #method.as_uninit_mut
659    /// [`as_ref_unchecked`]: #method.as_mut_unchecked
660    ///
661    /// # Safety
662    ///
663    /// When calling this method, you have to ensure that
664    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
665    ///
666    /// # Examples
667    ///
668    /// ```
669    /// #![feature(ptr_as_ref_unchecked)]
670    /// let mut s = [1, 2, 3];
671    /// let ptr: *mut u32 = s.as_mut_ptr();
672    /// let first_value = unsafe { ptr.as_mut_unchecked() };
673    /// *first_value = 4;
674    /// # assert_eq!(s, [4, 2, 3]);
675    /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
676    /// ```
677    // FIXME: mention it in the docs for `as_mut` and `as_uninit_mut` once stabilized.
678    #[unstable(feature = "ptr_as_ref_unchecked", issue = "122034")]
679    #[inline]
680    #[must_use]
681    pub const unsafe fn as_mut_unchecked<'a>(self) -> &'a mut T {
682        // SAFETY: the caller must guarantee that `self` is valid for a reference
683        unsafe { &mut *self }
684    }
685
686    /// Returns `None` if the pointer is null, or else returns a unique reference to
687    /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
688    /// that the value has to be initialized.
689    ///
690    /// For the shared counterpart see [`as_uninit_ref`].
691    ///
692    /// [`as_mut`]: #method.as_mut
693    /// [`as_uninit_ref`]: pointer#method.as_uninit_ref-1
694    ///
695    /// # Safety
696    ///
697    /// When calling this method, you have to ensure that *either* the pointer is null *or*
698    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
699    ///
700    /// # Panics during const evaluation
701    ///
702    /// This method will panic during const evaluation if the pointer cannot be
703    /// determined to be null or not. See [`is_null`] for more information.
704    ///
705    /// [`is_null`]: #method.is_null-1
706    #[inline]
707    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
708    pub const unsafe fn as_uninit_mut<'a>(self) -> Option<&'a mut MaybeUninit<T>>
709    where
710        T: Sized,
711    {
712        // SAFETY: the caller must guarantee that `self` meets all the
713        // requirements for a reference.
714        if self.is_null() { None } else { Some(unsafe { &mut *(self as *mut MaybeUninit<T>) }) }
715    }
716
717    /// Returns whether two pointers are guaranteed to be equal.
718    ///
719    /// At runtime this function behaves like `Some(self == other)`.
720    /// However, in some contexts (e.g., compile-time evaluation),
721    /// it is not always possible to determine equality of two pointers, so this function may
722    /// spuriously return `None` for pointers that later actually turn out to have its equality known.
723    /// But when it returns `Some`, the pointers' equality is guaranteed to be known.
724    ///
725    /// The return value may change from `Some` to `None` and vice versa depending on the compiler
726    /// version and unsafe code must not
727    /// rely on the result of this function for soundness. It is suggested to only use this function
728    /// for performance optimizations where spurious `None` return values by this function do not
729    /// affect the outcome, but just the performance.
730    /// The consequences of using this method to make runtime and compile-time code behave
731    /// differently have not been explored. This method should not be used to introduce such
732    /// differences, and it should also not be stabilized before we have a better understanding
733    /// of this issue.
734    #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
735    #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
736    #[inline]
737    pub const fn guaranteed_eq(self, other: *mut T) -> Option<bool>
738    where
739        T: Sized,
740    {
741        (self as *const T).guaranteed_eq(other as _)
742    }
743
744    /// Returns whether two pointers are guaranteed to be inequal.
745    ///
746    /// At runtime this function behaves like `Some(self != other)`.
747    /// However, in some contexts (e.g., compile-time evaluation),
748    /// it is not always possible to determine inequality of two pointers, so this function may
749    /// spuriously return `None` for pointers that later actually turn out to have its inequality known.
750    /// But when it returns `Some`, the pointers' inequality is guaranteed to be known.
751    ///
752    /// The return value may change from `Some` to `None` and vice versa depending on the compiler
753    /// version and unsafe code must not
754    /// rely on the result of this function for soundness. It is suggested to only use this function
755    /// for performance optimizations where spurious `None` return values by this function do not
756    /// affect the outcome, but just the performance.
757    /// The consequences of using this method to make runtime and compile-time code behave
758    /// differently have not been explored. This method should not be used to introduce such
759    /// differences, and it should also not be stabilized before we have a better understanding
760    /// of this issue.
761    #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
762    #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
763    #[inline]
764    pub const fn guaranteed_ne(self, other: *mut T) -> Option<bool>
765    where
766        T: Sized,
767    {
768        (self as *const T).guaranteed_ne(other as _)
769    }
770
771    /// Calculates the distance between two pointers within the same allocation. The returned value is in
772    /// units of T: the distance in bytes divided by `size_of::<T>()`.
773    ///
774    /// This is equivalent to `(self as isize - origin as isize) / (size_of::<T>() as isize)`,
775    /// except that it has a lot more opportunities for UB, in exchange for the compiler
776    /// better understanding what you are doing.
777    ///
778    /// The primary motivation of this method is for computing the `len` of an array/slice
779    /// of `T` that you are currently representing as a "start" and "end" pointer
780    /// (and "end" is "one past the end" of the array).
781    /// In that case, `end.offset_from(start)` gets you the length of the array.
782    ///
783    /// All of the following safety requirements are trivially satisfied for this usecase.
784    ///
785    /// [`offset`]: pointer#method.offset-1
786    ///
787    /// # Safety
788    ///
789    /// If any of the following conditions are violated, the result is Undefined Behavior:
790    ///
791    /// * `self` and `origin` must either
792    ///
793    ///   * point to the same address, or
794    ///   * both be [derived from][crate::ptr#provenance] a pointer to the same [allocation], and the memory range between
795    ///     the two pointers must be in bounds of that object. (See below for an example.)
796    ///
797    /// * The distance between the pointers, in bytes, must be an exact multiple
798    ///   of the size of `T`.
799    ///
800    /// As a consequence, the absolute distance between the pointers, in bytes, computed on
801    /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
802    /// implied by the in-bounds requirement, and the fact that no allocation can be larger
803    /// than `isize::MAX` bytes.
804    ///
805    /// The requirement for pointers to be derived from the same allocation is primarily
806    /// needed for `const`-compatibility: the distance between pointers into *different* allocated
807    /// objects is not known at compile-time. However, the requirement also exists at
808    /// runtime and may be exploited by optimizations. If you wish to compute the difference between
809    /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
810    /// origin as isize) / size_of::<T>()`.
811    // FIXME: recommend `addr()` instead of `as usize` once that is stable.
812    ///
813    /// [`add`]: #method.add
814    /// [allocation]: crate::ptr#allocation
815    ///
816    /// # Panics
817    ///
818    /// This function panics if `T` is a Zero-Sized Type ("ZST").
819    ///
820    /// # Examples
821    ///
822    /// Basic usage:
823    ///
824    /// ```
825    /// let mut a = [0; 5];
826    /// let ptr1: *mut i32 = &mut a[1];
827    /// let ptr2: *mut i32 = &mut a[3];
828    /// unsafe {
829    ///     assert_eq!(ptr2.offset_from(ptr1), 2);
830    ///     assert_eq!(ptr1.offset_from(ptr2), -2);
831    ///     assert_eq!(ptr1.offset(2), ptr2);
832    ///     assert_eq!(ptr2.offset(-2), ptr1);
833    /// }
834    /// ```
835    ///
836    /// *Incorrect* usage:
837    ///
838    /// ```rust,no_run
839    /// let ptr1 = Box::into_raw(Box::new(0u8));
840    /// let ptr2 = Box::into_raw(Box::new(1u8));
841    /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize);
842    /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
843    /// let ptr2_other = (ptr1 as *mut u8).wrapping_offset(diff).wrapping_offset(1);
844    /// assert_eq!(ptr2 as usize, ptr2_other as usize);
845    /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
846    /// // computing their offset is undefined behavior, even though
847    /// // they point to addresses that are in-bounds of the same object!
848    /// unsafe {
849    ///     let one = ptr2_other.offset_from(ptr2); // Undefined Behavior! ⚠️
850    /// }
851    /// ```
852    #[stable(feature = "ptr_offset_from", since = "1.47.0")]
853    #[rustc_const_stable(feature = "const_ptr_offset_from", since = "1.65.0")]
854    #[inline(always)]
855    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
856    pub const unsafe fn offset_from(self, origin: *const T) -> isize
857    where
858        T: Sized,
859    {
860        // SAFETY: the caller must uphold the safety contract for `offset_from`.
861        unsafe { (self as *const T).offset_from(origin) }
862    }
863
864    /// Calculates the distance between two pointers within the same allocation. The returned value is in
865    /// units of **bytes**.
866    ///
867    /// This is purely a convenience for casting to a `u8` pointer and
868    /// using [`offset_from`][pointer::offset_from] on it. See that method for
869    /// documentation and safety requirements.
870    ///
871    /// For non-`Sized` pointees this operation considers only the data pointers,
872    /// ignoring the metadata.
873    #[inline(always)]
874    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
875    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
876    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
877    pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: *const U) -> isize {
878        // SAFETY: the caller must uphold the safety contract for `offset_from`.
879        unsafe { self.cast::<u8>().offset_from(origin.cast::<u8>()) }
880    }
881
882    /// Calculates the distance between two pointers within the same allocation, *where it's known that
883    /// `self` is equal to or greater than `origin`*. The returned value is in
884    /// units of T: the distance in bytes is divided by `size_of::<T>()`.
885    ///
886    /// This computes the same value that [`offset_from`](#method.offset_from)
887    /// would compute, but with the added precondition that the offset is
888    /// guaranteed to be non-negative.  This method is equivalent to
889    /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
890    /// but it provides slightly more information to the optimizer, which can
891    /// sometimes allow it to optimize slightly better with some backends.
892    ///
893    /// This method can be thought of as recovering the `count` that was passed
894    /// to [`add`](#method.add) (or, with the parameters in the other order,
895    /// to [`sub`](#method.sub)).  The following are all equivalent, assuming
896    /// that their safety preconditions are met:
897    /// ```rust
898    /// # unsafe fn blah(ptr: *mut i32, origin: *mut i32, count: usize) -> bool { unsafe {
899    /// ptr.offset_from_unsigned(origin) == count
900    /// # &&
901    /// origin.add(count) == ptr
902    /// # &&
903    /// ptr.sub(count) == origin
904    /// # } }
905    /// ```
906    ///
907    /// # Safety
908    ///
909    /// - The distance between the pointers must be non-negative (`self >= origin`)
910    ///
911    /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
912    ///   apply to this method as well; see it for the full details.
913    ///
914    /// Importantly, despite the return type of this method being able to represent
915    /// a larger offset, it's still *not permitted* to pass pointers which differ
916    /// by more than `isize::MAX` *bytes*.  As such, the result of this method will
917    /// always be less than or equal to `isize::MAX as usize`.
918    ///
919    /// # Panics
920    ///
921    /// This function panics if `T` is a Zero-Sized Type ("ZST").
922    ///
923    /// # Examples
924    ///
925    /// ```
926    /// let mut a = [0; 5];
927    /// let p: *mut i32 = a.as_mut_ptr();
928    /// unsafe {
929    ///     let ptr1: *mut i32 = p.add(1);
930    ///     let ptr2: *mut i32 = p.add(3);
931    ///
932    ///     assert_eq!(ptr2.offset_from_unsigned(ptr1), 2);
933    ///     assert_eq!(ptr1.add(2), ptr2);
934    ///     assert_eq!(ptr2.sub(2), ptr1);
935    ///     assert_eq!(ptr2.offset_from_unsigned(ptr2), 0);
936    /// }
937    ///
938    /// // This would be incorrect, as the pointers are not correctly ordered:
939    /// // ptr1.offset_from(ptr2)
940    /// ```
941    #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
942    #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
943    #[inline]
944    #[track_caller]
945    pub const unsafe fn offset_from_unsigned(self, origin: *const T) -> usize
946    where
947        T: Sized,
948    {
949        // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`.
950        unsafe { (self as *const T).offset_from_unsigned(origin) }
951    }
952
953    /// Calculates the distance between two pointers within the same allocation, *where it's known that
954    /// `self` is equal to or greater than `origin`*. The returned value is in
955    /// units of **bytes**.
956    ///
957    /// This is purely a convenience for casting to a `u8` pointer and
958    /// using [`offset_from_unsigned`][pointer::offset_from_unsigned] on it.
959    /// See that method for documentation and safety requirements.
960    ///
961    /// For non-`Sized` pointees this operation considers only the data pointers,
962    /// ignoring the metadata.
963    #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
964    #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
965    #[inline]
966    #[track_caller]
967    pub const unsafe fn byte_offset_from_unsigned<U: ?Sized>(self, origin: *mut U) -> usize {
968        // SAFETY: the caller must uphold the safety contract for `byte_offset_from_unsigned`.
969        unsafe { (self as *const T).byte_offset_from_unsigned(origin) }
970    }
971
972    #[doc = include_str!("./docs/add.md")]
973    ///
974    /// # Examples
975    ///
976    /// ```
977    /// let mut s: String = "123".to_string();
978    /// let ptr: *mut u8 = s.as_mut_ptr();
979    ///
980    /// unsafe {
981    ///     assert_eq!('2', *ptr.add(1) as char);
982    ///     assert_eq!('3', *ptr.add(2) as char);
983    /// }
984    /// ```
985    #[stable(feature = "pointer_methods", since = "1.26.0")]
986    #[must_use = "returns a new pointer rather than modifying its argument"]
987    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
988    #[inline(always)]
989    #[track_caller]
990    pub const unsafe fn add(self, count: usize) -> Self
991    where
992        T: Sized,
993    {
994        #[cfg(debug_assertions)]
995        #[inline]
996        #[rustc_allow_const_fn_unstable(const_eval_select)]
997        const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
998            const_eval_select!(
999                @capture { this: *const (), count: usize, size: usize } -> bool:
1000                if const {
1001                    true
1002                } else {
1003                    let Some(byte_offset) = count.checked_mul(size) else {
1004                        return false;
1005                    };
1006                    let (_, overflow) = this.addr().overflowing_add(byte_offset);
1007                    byte_offset <= (isize::MAX as usize) && !overflow
1008                }
1009            )
1010        }
1011
1012        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
1013        ub_checks::assert_unsafe_precondition!(
1014            check_language_ub,
1015            "ptr::add requires that the address calculation does not overflow",
1016            (
1017                this: *const () = self as *const (),
1018                count: usize = count,
1019                size: usize = size_of::<T>(),
1020            ) => runtime_add_nowrap(this, count, size)
1021        );
1022
1023        // SAFETY: the caller must uphold the safety contract for `offset`.
1024        unsafe { intrinsics::offset(self, count) }
1025    }
1026
1027    /// Adds an unsigned offset in bytes to a pointer.
1028    ///
1029    /// `count` is in units of bytes.
1030    ///
1031    /// This is purely a convenience for casting to a `u8` pointer and
1032    /// using [add][pointer::add] on it. See that method for documentation
1033    /// and safety requirements.
1034    ///
1035    /// For non-`Sized` pointees this operation changes only the data pointer,
1036    /// leaving the metadata untouched.
1037    #[must_use]
1038    #[inline(always)]
1039    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1040    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1041    #[track_caller]
1042    pub const unsafe fn byte_add(self, count: usize) -> Self {
1043        // SAFETY: the caller must uphold the safety contract for `add`.
1044        unsafe { self.cast::<u8>().add(count).with_metadata_of(self) }
1045    }
1046
1047    /// Subtracts an unsigned offset from a pointer.
1048    ///
1049    /// This can only move the pointer backward (or not move it). If you need to move forward or
1050    /// backward depending on the value, then you might want [`offset`](#method.offset) instead
1051    /// which takes a signed offset.
1052    ///
1053    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1054    /// offset of `3 * size_of::<T>()` bytes.
1055    ///
1056    /// # Safety
1057    ///
1058    /// If any of the following conditions are violated, the result is Undefined Behavior:
1059    ///
1060    /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
1061    ///   "wrapping around"), must fit in an `isize`.
1062    ///
1063    /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
1064    ///   [allocation], and the entire memory range between `self` and the result must be in
1065    ///   bounds of that allocation. In particular, this range must not "wrap around" the edge
1066    ///   of the address space.
1067    ///
1068    /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
1069    /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
1070    /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
1071    /// safe.
1072    ///
1073    /// Consider using [`wrapping_sub`] instead if these constraints are
1074    /// difficult to satisfy. The only advantage of this method is that it
1075    /// enables more aggressive compiler optimizations.
1076    ///
1077    /// [`wrapping_sub`]: #method.wrapping_sub
1078    /// [allocation]: crate::ptr#allocation
1079    ///
1080    /// # Examples
1081    ///
1082    /// ```
1083    /// let s: &str = "123";
1084    ///
1085    /// unsafe {
1086    ///     let end: *const u8 = s.as_ptr().add(3);
1087    ///     assert_eq!('3', *end.sub(1) as char);
1088    ///     assert_eq!('2', *end.sub(2) as char);
1089    /// }
1090    /// ```
1091    #[stable(feature = "pointer_methods", since = "1.26.0")]
1092    #[must_use = "returns a new pointer rather than modifying its argument"]
1093    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1094    #[inline(always)]
1095    #[track_caller]
1096    pub const unsafe fn sub(self, count: usize) -> Self
1097    where
1098        T: Sized,
1099    {
1100        #[cfg(debug_assertions)]
1101        #[inline]
1102        #[rustc_allow_const_fn_unstable(const_eval_select)]
1103        const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
1104            const_eval_select!(
1105                @capture { this: *const (), count: usize, size: usize } -> bool:
1106                if const {
1107                    true
1108                } else {
1109                    let Some(byte_offset) = count.checked_mul(size) else {
1110                        return false;
1111                    };
1112                    byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
1113                }
1114            )
1115        }
1116
1117        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
1118        ub_checks::assert_unsafe_precondition!(
1119            check_language_ub,
1120            "ptr::sub requires that the address calculation does not overflow",
1121            (
1122                this: *const () = self as *const (),
1123                count: usize = count,
1124                size: usize = size_of::<T>(),
1125            ) => runtime_sub_nowrap(this, count, size)
1126        );
1127
1128        if T::IS_ZST {
1129            // Pointer arithmetic does nothing when the pointee is a ZST.
1130            self
1131        } else {
1132            // SAFETY: the caller must uphold the safety contract for `offset`.
1133            // Because the pointee is *not* a ZST, that means that `count` is
1134            // at most `isize::MAX`, and thus the negation cannot overflow.
1135            unsafe { intrinsics::offset(self, intrinsics::unchecked_sub(0, count as isize)) }
1136        }
1137    }
1138
1139    /// Subtracts an unsigned offset in bytes from a pointer.
1140    ///
1141    /// `count` is in units of bytes.
1142    ///
1143    /// This is purely a convenience for casting to a `u8` pointer and
1144    /// using [sub][pointer::sub] on it. See that method for documentation
1145    /// and safety requirements.
1146    ///
1147    /// For non-`Sized` pointees this operation changes only the data pointer,
1148    /// leaving the metadata untouched.
1149    #[must_use]
1150    #[inline(always)]
1151    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1152    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1153    #[track_caller]
1154    pub const unsafe fn byte_sub(self, count: usize) -> Self {
1155        // SAFETY: the caller must uphold the safety contract for `sub`.
1156        unsafe { self.cast::<u8>().sub(count).with_metadata_of(self) }
1157    }
1158
1159    /// Adds an unsigned offset to a pointer using wrapping arithmetic.
1160    ///
1161    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1162    /// offset of `3 * size_of::<T>()` bytes.
1163    ///
1164    /// # Safety
1165    ///
1166    /// This operation itself is always safe, but using the resulting pointer is not.
1167    ///
1168    /// The resulting pointer "remembers" the [allocation] that `self` points to; it must not
1169    /// be used to read or write other allocations.
1170    ///
1171    /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z`
1172    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1173    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1174    /// `x` and `y` point into the same allocation.
1175    ///
1176    /// Compared to [`add`], this method basically delays the requirement of staying within the
1177    /// same allocation: [`add`] is immediate Undefined Behavior when crossing object
1178    /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a
1179    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`]
1180    /// can be optimized better and is thus preferable in performance-sensitive code.
1181    ///
1182    /// The delayed check only considers the value of the pointer that was dereferenced, not the
1183    /// intermediate values used during the computation of the final result. For example,
1184    /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1185    /// allocation and then re-entering it later is permitted.
1186    ///
1187    /// [`add`]: #method.add
1188    /// [allocation]: crate::ptr#allocation
1189    ///
1190    /// # Examples
1191    ///
1192    /// ```
1193    /// // Iterate using a raw pointer in increments of two elements
1194    /// let data = [1u8, 2, 3, 4, 5];
1195    /// let mut ptr: *const u8 = data.as_ptr();
1196    /// let step = 2;
1197    /// let end_rounded_up = ptr.wrapping_add(6);
1198    ///
1199    /// // This loop prints "1, 3, 5, "
1200    /// while ptr != end_rounded_up {
1201    ///     unsafe {
1202    ///         print!("{}, ", *ptr);
1203    ///     }
1204    ///     ptr = ptr.wrapping_add(step);
1205    /// }
1206    /// ```
1207    #[stable(feature = "pointer_methods", since = "1.26.0")]
1208    #[must_use = "returns a new pointer rather than modifying its argument"]
1209    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1210    #[inline(always)]
1211    pub const fn wrapping_add(self, count: usize) -> Self
1212    where
1213        T: Sized,
1214    {
1215        self.wrapping_offset(count as isize)
1216    }
1217
1218    /// Adds an unsigned offset in bytes to a pointer using wrapping arithmetic.
1219    ///
1220    /// `count` is in units of bytes.
1221    ///
1222    /// This is purely a convenience for casting to a `u8` pointer and
1223    /// using [wrapping_add][pointer::wrapping_add] on it. See that method for documentation.
1224    ///
1225    /// For non-`Sized` pointees this operation changes only the data pointer,
1226    /// leaving the metadata untouched.
1227    #[must_use]
1228    #[inline(always)]
1229    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1230    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1231    pub const fn wrapping_byte_add(self, count: usize) -> Self {
1232        self.cast::<u8>().wrapping_add(count).with_metadata_of(self)
1233    }
1234
1235    /// Subtracts an unsigned offset from a pointer using wrapping arithmetic.
1236    ///
1237    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1238    /// offset of `3 * size_of::<T>()` bytes.
1239    ///
1240    /// # Safety
1241    ///
1242    /// This operation itself is always safe, but using the resulting pointer is not.
1243    ///
1244    /// The resulting pointer "remembers" the [allocation] that `self` points to; it must not
1245    /// be used to read or write other allocations.
1246    ///
1247    /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z`
1248    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1249    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1250    /// `x` and `y` point into the same allocation.
1251    ///
1252    /// Compared to [`sub`], this method basically delays the requirement of staying within the
1253    /// same allocation: [`sub`] is immediate Undefined Behavior when crossing object
1254    /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a
1255    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`]
1256    /// can be optimized better and is thus preferable in performance-sensitive code.
1257    ///
1258    /// The delayed check only considers the value of the pointer that was dereferenced, not the
1259    /// intermediate values used during the computation of the final result. For example,
1260    /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1261    /// allocation and then re-entering it later is permitted.
1262    ///
1263    /// [`sub`]: #method.sub
1264    /// [allocation]: crate::ptr#allocation
1265    ///
1266    /// # Examples
1267    ///
1268    /// ```
1269    /// // Iterate using a raw pointer in increments of two elements (backwards)
1270    /// let data = [1u8, 2, 3, 4, 5];
1271    /// let mut ptr: *const u8 = data.as_ptr();
1272    /// let start_rounded_down = ptr.wrapping_sub(2);
1273    /// ptr = ptr.wrapping_add(4);
1274    /// let step = 2;
1275    /// // This loop prints "5, 3, 1, "
1276    /// while ptr != start_rounded_down {
1277    ///     unsafe {
1278    ///         print!("{}, ", *ptr);
1279    ///     }
1280    ///     ptr = ptr.wrapping_sub(step);
1281    /// }
1282    /// ```
1283    #[stable(feature = "pointer_methods", since = "1.26.0")]
1284    #[must_use = "returns a new pointer rather than modifying its argument"]
1285    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1286    #[inline(always)]
1287    pub const fn wrapping_sub(self, count: usize) -> Self
1288    where
1289        T: Sized,
1290    {
1291        self.wrapping_offset((count as isize).wrapping_neg())
1292    }
1293
1294    /// Subtracts an unsigned offset in bytes from a pointer using wrapping arithmetic.
1295    ///
1296    /// `count` is in units of bytes.
1297    ///
1298    /// This is purely a convenience for casting to a `u8` pointer and
1299    /// using [wrapping_sub][pointer::wrapping_sub] on it. See that method for documentation.
1300    ///
1301    /// For non-`Sized` pointees this operation changes only the data pointer,
1302    /// leaving the metadata untouched.
1303    #[must_use]
1304    #[inline(always)]
1305    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1306    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1307    pub const fn wrapping_byte_sub(self, count: usize) -> Self {
1308        self.cast::<u8>().wrapping_sub(count).with_metadata_of(self)
1309    }
1310
1311    /// Reads the value from `self` without moving it. This leaves the
1312    /// memory in `self` unchanged.
1313    ///
1314    /// See [`ptr::read`] for safety concerns and examples.
1315    ///
1316    /// [`ptr::read`]: crate::ptr::read()
1317    #[stable(feature = "pointer_methods", since = "1.26.0")]
1318    #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1319    #[inline(always)]
1320    #[track_caller]
1321    pub const unsafe fn read(self) -> T
1322    where
1323        T: Sized,
1324    {
1325        // SAFETY: the caller must uphold the safety contract for ``.
1326        unsafe { read(self) }
1327    }
1328
1329    /// Performs a volatile read of the value from `self` without moving it. This
1330    /// leaves the memory in `self` unchanged.
1331    ///
1332    /// Volatile operations are intended to act on I/O memory, and are guaranteed
1333    /// to not be elided or reordered by the compiler across other volatile
1334    /// operations.
1335    ///
1336    /// See [`ptr::read_volatile`] for safety concerns and examples.
1337    ///
1338    /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
1339    #[stable(feature = "pointer_methods", since = "1.26.0")]
1340    #[inline(always)]
1341    #[track_caller]
1342    pub unsafe fn read_volatile(self) -> T
1343    where
1344        T: Sized,
1345    {
1346        // SAFETY: the caller must uphold the safety contract for `read_volatile`.
1347        unsafe { read_volatile(self) }
1348    }
1349
1350    /// Reads the value from `self` without moving it. This leaves the
1351    /// memory in `self` unchanged.
1352    ///
1353    /// Unlike `read`, the pointer may be unaligned.
1354    ///
1355    /// See [`ptr::read_unaligned`] for safety concerns and examples.
1356    ///
1357    /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
1358    #[stable(feature = "pointer_methods", since = "1.26.0")]
1359    #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1360    #[inline(always)]
1361    #[track_caller]
1362    pub const unsafe fn read_unaligned(self) -> T
1363    where
1364        T: Sized,
1365    {
1366        // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1367        unsafe { read_unaligned(self) }
1368    }
1369
1370    /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1371    /// and destination may overlap.
1372    ///
1373    /// NOTE: this has the *same* argument order as [`ptr::copy`].
1374    ///
1375    /// See [`ptr::copy`] for safety concerns and examples.
1376    ///
1377    /// [`ptr::copy`]: crate::ptr::copy()
1378    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1379    #[stable(feature = "pointer_methods", since = "1.26.0")]
1380    #[inline(always)]
1381    #[track_caller]
1382    pub const unsafe fn copy_to(self, dest: *mut T, count: usize)
1383    where
1384        T: Sized,
1385    {
1386        // SAFETY: the caller must uphold the safety contract for `copy`.
1387        unsafe { copy(self, dest, count) }
1388    }
1389
1390    /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1391    /// and destination may *not* overlap.
1392    ///
1393    /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1394    ///
1395    /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1396    ///
1397    /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1398    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1399    #[stable(feature = "pointer_methods", since = "1.26.0")]
1400    #[inline(always)]
1401    #[track_caller]
1402    pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
1403    where
1404        T: Sized,
1405    {
1406        // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1407        unsafe { copy_nonoverlapping(self, dest, count) }
1408    }
1409
1410    /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1411    /// and destination may overlap.
1412    ///
1413    /// NOTE: this has the *opposite* argument order of [`ptr::copy`].
1414    ///
1415    /// See [`ptr::copy`] for safety concerns and examples.
1416    ///
1417    /// [`ptr::copy`]: crate::ptr::copy()
1418    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1419    #[stable(feature = "pointer_methods", since = "1.26.0")]
1420    #[inline(always)]
1421    #[track_caller]
1422    pub const unsafe fn copy_from(self, src: *const T, count: usize)
1423    where
1424        T: Sized,
1425    {
1426        // SAFETY: the caller must uphold the safety contract for `copy`.
1427        unsafe { copy(src, self, count) }
1428    }
1429
1430    /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1431    /// and destination may *not* overlap.
1432    ///
1433    /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`].
1434    ///
1435    /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1436    ///
1437    /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1438    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1439    #[stable(feature = "pointer_methods", since = "1.26.0")]
1440    #[inline(always)]
1441    #[track_caller]
1442    pub const unsafe fn copy_from_nonoverlapping(self, src: *const T, count: usize)
1443    where
1444        T: Sized,
1445    {
1446        // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1447        unsafe { copy_nonoverlapping(src, self, count) }
1448    }
1449
1450    /// Executes the destructor (if any) of the pointed-to value.
1451    ///
1452    /// See [`ptr::drop_in_place`] for safety concerns and examples.
1453    ///
1454    /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place()
1455    #[stable(feature = "pointer_methods", since = "1.26.0")]
1456    #[inline(always)]
1457    pub unsafe fn drop_in_place(self) {
1458        // SAFETY: the caller must uphold the safety contract for `drop_in_place`.
1459        unsafe { drop_in_place(self) }
1460    }
1461
1462    /// Overwrites a memory location with the given value without reading or
1463    /// dropping the old value.
1464    ///
1465    /// See [`ptr::write`] for safety concerns and examples.
1466    ///
1467    /// [`ptr::write`]: crate::ptr::write()
1468    #[stable(feature = "pointer_methods", since = "1.26.0")]
1469    #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1470    #[inline(always)]
1471    #[track_caller]
1472    pub const unsafe fn write(self, val: T)
1473    where
1474        T: Sized,
1475    {
1476        // SAFETY: the caller must uphold the safety contract for `write`.
1477        unsafe { write(self, val) }
1478    }
1479
1480    /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
1481    /// bytes of memory starting at `self` to `val`.
1482    ///
1483    /// See [`ptr::write_bytes`] for safety concerns and examples.
1484    ///
1485    /// [`ptr::write_bytes`]: crate::ptr::write_bytes()
1486    #[doc(alias = "memset")]
1487    #[stable(feature = "pointer_methods", since = "1.26.0")]
1488    #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1489    #[inline(always)]
1490    #[track_caller]
1491    pub const unsafe fn write_bytes(self, val: u8, count: usize)
1492    where
1493        T: Sized,
1494    {
1495        // SAFETY: the caller must uphold the safety contract for `write_bytes`.
1496        unsafe { write_bytes(self, val, count) }
1497    }
1498
1499    /// Performs a volatile write of a memory location with the given value without
1500    /// reading or dropping the old value.
1501    ///
1502    /// Volatile operations are intended to act on I/O memory, and are guaranteed
1503    /// to not be elided or reordered by the compiler across other volatile
1504    /// operations.
1505    ///
1506    /// See [`ptr::write_volatile`] for safety concerns and examples.
1507    ///
1508    /// [`ptr::write_volatile`]: crate::ptr::write_volatile()
1509    #[stable(feature = "pointer_methods", since = "1.26.0")]
1510    #[inline(always)]
1511    #[track_caller]
1512    pub unsafe fn write_volatile(self, val: T)
1513    where
1514        T: Sized,
1515    {
1516        // SAFETY: the caller must uphold the safety contract for `write_volatile`.
1517        unsafe { write_volatile(self, val) }
1518    }
1519
1520    /// Overwrites a memory location with the given value without reading or
1521    /// dropping the old value.
1522    ///
1523    /// Unlike `write`, the pointer may be unaligned.
1524    ///
1525    /// See [`ptr::write_unaligned`] for safety concerns and examples.
1526    ///
1527    /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned()
1528    #[stable(feature = "pointer_methods", since = "1.26.0")]
1529    #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1530    #[inline(always)]
1531    #[track_caller]
1532    pub const unsafe fn write_unaligned(self, val: T)
1533    where
1534        T: Sized,
1535    {
1536        // SAFETY: the caller must uphold the safety contract for `write_unaligned`.
1537        unsafe { write_unaligned(self, val) }
1538    }
1539
1540    /// Replaces the value at `self` with `src`, returning the old
1541    /// value, without dropping either.
1542    ///
1543    /// See [`ptr::replace`] for safety concerns and examples.
1544    ///
1545    /// [`ptr::replace`]: crate::ptr::replace()
1546    #[stable(feature = "pointer_methods", since = "1.26.0")]
1547    #[rustc_const_stable(feature = "const_inherent_ptr_replace", since = "1.88.0")]
1548    #[inline(always)]
1549    pub const unsafe fn replace(self, src: T) -> T
1550    where
1551        T: Sized,
1552    {
1553        // SAFETY: the caller must uphold the safety contract for `replace`.
1554        unsafe { replace(self, src) }
1555    }
1556
1557    /// Swaps the values at two mutable locations of the same type, without
1558    /// deinitializing either. They may overlap, unlike `mem::swap` which is
1559    /// otherwise equivalent.
1560    ///
1561    /// See [`ptr::swap`] for safety concerns and examples.
1562    ///
1563    /// [`ptr::swap`]: crate::ptr::swap()
1564    #[stable(feature = "pointer_methods", since = "1.26.0")]
1565    #[rustc_const_stable(feature = "const_swap", since = "1.85.0")]
1566    #[inline(always)]
1567    pub const unsafe fn swap(self, with: *mut T)
1568    where
1569        T: Sized,
1570    {
1571        // SAFETY: the caller must uphold the safety contract for `swap`.
1572        unsafe { swap(self, with) }
1573    }
1574
1575    /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1576    /// `align`.
1577    ///
1578    /// If it is not possible to align the pointer, the implementation returns
1579    /// `usize::MAX`.
1580    ///
1581    /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
1582    /// used with the `wrapping_add` method.
1583    ///
1584    /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1585    /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1586    /// the returned offset is correct in all terms other than alignment.
1587    ///
1588    /// # Panics
1589    ///
1590    /// The function panics if `align` is not a power-of-two.
1591    ///
1592    /// # Examples
1593    ///
1594    /// Accessing adjacent `u8` as `u16`
1595    ///
1596    /// ```
1597    /// # unsafe {
1598    /// let mut x = [5_u8, 6, 7, 8, 9];
1599    /// let ptr = x.as_mut_ptr();
1600    /// let offset = ptr.align_offset(align_of::<u16>());
1601    ///
1602    /// if offset < x.len() - 1 {
1603    ///     let u16_ptr = ptr.add(offset).cast::<u16>();
1604    ///     *u16_ptr = 0;
1605    ///
1606    ///     assert!(x == [0, 0, 7, 8, 9] || x == [5, 0, 0, 8, 9]);
1607    /// } else {
1608    ///     // while the pointer can be aligned via `offset`, it would point
1609    ///     // outside the allocation
1610    /// }
1611    /// # }
1612    /// ```
1613    #[must_use]
1614    #[inline]
1615    #[stable(feature = "align_offset", since = "1.36.0")]
1616    pub fn align_offset(self, align: usize) -> usize
1617    where
1618        T: Sized,
1619    {
1620        if !align.is_power_of_two() {
1621            panic!("align_offset: align is not a power-of-two");
1622        }
1623
1624        // SAFETY: `align` has been checked to be a power of 2 above
1625        let ret = unsafe { align_offset(self, align) };
1626
1627        // Inform Miri that we want to consider the resulting pointer to be suitably aligned.
1628        #[cfg(miri)]
1629        if ret != usize::MAX {
1630            intrinsics::miri_promise_symbolic_alignment(
1631                self.wrapping_add(ret).cast_const().cast(),
1632                align,
1633            );
1634        }
1635
1636        ret
1637    }
1638
1639    /// Returns whether the pointer is properly aligned for `T`.
1640    ///
1641    /// # Examples
1642    ///
1643    /// ```
1644    /// // On some platforms, the alignment of i32 is less than 4.
1645    /// #[repr(align(4))]
1646    /// struct AlignedI32(i32);
1647    ///
1648    /// let mut data = AlignedI32(42);
1649    /// let ptr = &mut data as *mut AlignedI32;
1650    ///
1651    /// assert!(ptr.is_aligned());
1652    /// assert!(!ptr.wrapping_byte_add(1).is_aligned());
1653    /// ```
1654    #[must_use]
1655    #[inline]
1656    #[stable(feature = "pointer_is_aligned", since = "1.79.0")]
1657    pub fn is_aligned(self) -> bool
1658    where
1659        T: Sized,
1660    {
1661        self.is_aligned_to(align_of::<T>())
1662    }
1663
1664    /// Returns whether the pointer is aligned to `align`.
1665    ///
1666    /// For non-`Sized` pointees this operation considers only the data pointer,
1667    /// ignoring the metadata.
1668    ///
1669    /// # Panics
1670    ///
1671    /// The function panics if `align` is not a power-of-two (this includes 0).
1672    ///
1673    /// # Examples
1674    ///
1675    /// ```
1676    /// #![feature(pointer_is_aligned_to)]
1677    ///
1678    /// // On some platforms, the alignment of i32 is less than 4.
1679    /// #[repr(align(4))]
1680    /// struct AlignedI32(i32);
1681    ///
1682    /// let mut data = AlignedI32(42);
1683    /// let ptr = &mut data as *mut AlignedI32;
1684    ///
1685    /// assert!(ptr.is_aligned_to(1));
1686    /// assert!(ptr.is_aligned_to(2));
1687    /// assert!(ptr.is_aligned_to(4));
1688    ///
1689    /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1690    /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1691    ///
1692    /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1693    /// ```
1694    #[must_use]
1695    #[inline]
1696    #[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
1697    pub fn is_aligned_to(self, align: usize) -> bool {
1698        if !align.is_power_of_two() {
1699            panic!("is_aligned_to: align is not a power-of-two");
1700        }
1701
1702        self.addr() & (align - 1) == 0
1703    }
1704}
1705
1706impl<T> *mut [T] {
1707    /// Returns the length of a raw slice.
1708    ///
1709    /// The returned value is the number of **elements**, not the number of bytes.
1710    ///
1711    /// This function is safe, even when the raw slice cannot be cast to a slice
1712    /// reference because the pointer is null or unaligned.
1713    ///
1714    /// # Examples
1715    ///
1716    /// ```rust
1717    /// use std::ptr;
1718    ///
1719    /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1720    /// assert_eq!(slice.len(), 3);
1721    /// ```
1722    #[inline(always)]
1723    #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1724    #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1725    pub const fn len(self) -> usize {
1726        metadata(self)
1727    }
1728
1729    /// Returns `true` if the raw slice has a length of 0.
1730    ///
1731    /// # Examples
1732    ///
1733    /// ```
1734    /// use std::ptr;
1735    ///
1736    /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1737    /// assert!(!slice.is_empty());
1738    /// ```
1739    #[inline(always)]
1740    #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1741    #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1742    pub const fn is_empty(self) -> bool {
1743        self.len() == 0
1744    }
1745
1746    /// Gets a raw, mutable pointer to the underlying array.
1747    ///
1748    /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
1749    #[unstable(feature = "slice_as_array", issue = "133508")]
1750    #[inline]
1751    #[must_use]
1752    pub const fn as_mut_array<const N: usize>(self) -> Option<*mut [T; N]> {
1753        if self.len() == N {
1754            let me = self.as_mut_ptr() as *mut [T; N];
1755            Some(me)
1756        } else {
1757            None
1758        }
1759    }
1760
1761    /// Divides one mutable raw slice into two at an index.
1762    ///
1763    /// The first will contain all indices from `[0, mid)` (excluding
1764    /// the index `mid` itself) and the second will contain all
1765    /// indices from `[mid, len)` (excluding the index `len` itself).
1766    ///
1767    /// # Panics
1768    ///
1769    /// Panics if `mid > len`.
1770    ///
1771    /// # Safety
1772    ///
1773    /// `mid` must be [in-bounds] of the underlying [allocation].
1774    /// Which means `self` must be dereferenceable and span a single allocation
1775    /// that is at least `mid * size_of::<T>()` bytes long. Not upholding these
1776    /// requirements is *[undefined behavior]* even if the resulting pointers are not used.
1777    ///
1778    /// Since `len` being in-bounds it is not a safety invariant of `*mut [T]` the
1779    /// safety requirements of this method are the same as for [`split_at_mut_unchecked`].
1780    /// The explicit bounds check is only as useful as `len` is correct.
1781    ///
1782    /// [`split_at_mut_unchecked`]: #method.split_at_mut_unchecked
1783    /// [in-bounds]: #method.add
1784    /// [allocation]: crate::ptr#allocation
1785    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1786    ///
1787    /// # Examples
1788    ///
1789    /// ```
1790    /// #![feature(raw_slice_split)]
1791    /// #![feature(slice_ptr_get)]
1792    ///
1793    /// let mut v = [1, 0, 3, 0, 5, 6];
1794    /// let ptr = &mut v as *mut [_];
1795    /// unsafe {
1796    ///     let (left, right) = ptr.split_at_mut(2);
1797    ///     assert_eq!(&*left, [1, 0]);
1798    ///     assert_eq!(&*right, [3, 0, 5, 6]);
1799    /// }
1800    /// ```
1801    #[inline(always)]
1802    #[track_caller]
1803    #[unstable(feature = "raw_slice_split", issue = "95595")]
1804    pub unsafe fn split_at_mut(self, mid: usize) -> (*mut [T], *mut [T]) {
1805        assert!(mid <= self.len());
1806        // SAFETY: The assert above is only a safety-net as long as `self.len()` is correct
1807        // The actual safety requirements of this function are the same as for `split_at_mut_unchecked`
1808        unsafe { self.split_at_mut_unchecked(mid) }
1809    }
1810
1811    /// Divides one mutable raw slice into two at an index, without doing bounds checking.
1812    ///
1813    /// The first will contain all indices from `[0, mid)` (excluding
1814    /// the index `mid` itself) and the second will contain all
1815    /// indices from `[mid, len)` (excluding the index `len` itself).
1816    ///
1817    /// # Safety
1818    ///
1819    /// `mid` must be [in-bounds] of the underlying [allocation].
1820    /// Which means `self` must be dereferenceable and span a single allocation
1821    /// that is at least `mid * size_of::<T>()` bytes long. Not upholding these
1822    /// requirements is *[undefined behavior]* even if the resulting pointers are not used.
1823    ///
1824    /// [in-bounds]: #method.add
1825    /// [out-of-bounds index]: #method.add
1826    /// [allocation]: crate::ptr#allocation
1827    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1828    ///
1829    /// # Examples
1830    ///
1831    /// ```
1832    /// #![feature(raw_slice_split)]
1833    ///
1834    /// let mut v = [1, 0, 3, 0, 5, 6];
1835    /// // scoped to restrict the lifetime of the borrows
1836    /// unsafe {
1837    ///     let ptr = &mut v as *mut [_];
1838    ///     let (left, right) = ptr.split_at_mut_unchecked(2);
1839    ///     assert_eq!(&*left, [1, 0]);
1840    ///     assert_eq!(&*right, [3, 0, 5, 6]);
1841    ///     (&mut *left)[1] = 2;
1842    ///     (&mut *right)[1] = 4;
1843    /// }
1844    /// assert_eq!(v, [1, 2, 3, 4, 5, 6]);
1845    /// ```
1846    #[inline(always)]
1847    #[unstable(feature = "raw_slice_split", issue = "95595")]
1848    pub unsafe fn split_at_mut_unchecked(self, mid: usize) -> (*mut [T], *mut [T]) {
1849        let len = self.len();
1850        let ptr = self.as_mut_ptr();
1851
1852        // SAFETY: Caller must pass a valid pointer and an index that is in-bounds.
1853        let tail = unsafe { ptr.add(mid) };
1854        (
1855            crate::ptr::slice_from_raw_parts_mut(ptr, mid),
1856            crate::ptr::slice_from_raw_parts_mut(tail, len - mid),
1857        )
1858    }
1859
1860    /// Returns a raw pointer to the slice's buffer.
1861    ///
1862    /// This is equivalent to casting `self` to `*mut T`, but more type-safe.
1863    ///
1864    /// # Examples
1865    ///
1866    /// ```rust
1867    /// #![feature(slice_ptr_get)]
1868    /// use std::ptr;
1869    ///
1870    /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1871    /// assert_eq!(slice.as_mut_ptr(), ptr::null_mut());
1872    /// ```
1873    #[inline(always)]
1874    #[unstable(feature = "slice_ptr_get", issue = "74265")]
1875    pub const fn as_mut_ptr(self) -> *mut T {
1876        self as *mut T
1877    }
1878
1879    /// Returns a raw pointer to an element or subslice, without doing bounds
1880    /// checking.
1881    ///
1882    /// Calling this method with an [out-of-bounds index] or when `self` is not dereferenceable
1883    /// is *[undefined behavior]* even if the resulting pointer is not used.
1884    ///
1885    /// [out-of-bounds index]: #method.add
1886    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1887    ///
1888    /// # Examples
1889    ///
1890    /// ```
1891    /// #![feature(slice_ptr_get)]
1892    ///
1893    /// let x = &mut [1, 2, 4] as *mut [i32];
1894    ///
1895    /// unsafe {
1896    ///     assert_eq!(x.get_unchecked_mut(1), x.as_mut_ptr().add(1));
1897    /// }
1898    /// ```
1899    #[unstable(feature = "slice_ptr_get", issue = "74265")]
1900    #[inline(always)]
1901    pub unsafe fn get_unchecked_mut<I>(self, index: I) -> *mut I::Output
1902    where
1903        I: SliceIndex<[T]>,
1904    {
1905        // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
1906        unsafe { index.get_unchecked_mut(self) }
1907    }
1908
1909    /// Returns `None` if the pointer is null, or else returns a shared slice to
1910    /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
1911    /// that the value has to be initialized.
1912    ///
1913    /// For the mutable counterpart see [`as_uninit_slice_mut`].
1914    ///
1915    /// [`as_ref`]: pointer#method.as_ref-1
1916    /// [`as_uninit_slice_mut`]: #method.as_uninit_slice_mut
1917    ///
1918    /// # Safety
1919    ///
1920    /// When calling this method, you have to ensure that *either* the pointer is null *or*
1921    /// all of the following is true:
1922    ///
1923    /// * The pointer must be [valid] for reads for `ptr.len() * size_of::<T>()` many bytes,
1924    ///   and it must be properly aligned. This means in particular:
1925    ///
1926    ///     * The entire memory range of this slice must be contained within a single [allocation]!
1927    ///       Slices can never span across multiple allocations.
1928    ///
1929    ///     * The pointer must be aligned even for zero-length slices. One
1930    ///       reason for this is that enum layout optimizations may rely on references
1931    ///       (including slices of any length) being aligned and non-null to distinguish
1932    ///       them from other data. You can obtain a pointer that is usable as `data`
1933    ///       for zero-length slices using [`NonNull::dangling()`].
1934    ///
1935    /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1936    ///   See the safety documentation of [`pointer::offset`].
1937    ///
1938    /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1939    ///   arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1940    ///   In particular, while this reference exists, the memory the pointer points to must
1941    ///   not get mutated (except inside `UnsafeCell`).
1942    ///
1943    /// This applies even if the result of this method is unused!
1944    ///
1945    /// See also [`slice::from_raw_parts`][].
1946    ///
1947    /// [valid]: crate::ptr#safety
1948    /// [allocation]: crate::ptr#allocation
1949    ///
1950    /// # Panics during const evaluation
1951    ///
1952    /// This method will panic during const evaluation if the pointer cannot be
1953    /// determined to be null or not. See [`is_null`] for more information.
1954    ///
1955    /// [`is_null`]: #method.is_null-1
1956    #[inline]
1957    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1958    pub const unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> {
1959        if self.is_null() {
1960            None
1961        } else {
1962            // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1963            Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) })
1964        }
1965    }
1966
1967    /// Returns `None` if the pointer is null, or else returns a unique slice to
1968    /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
1969    /// that the value has to be initialized.
1970    ///
1971    /// For the shared counterpart see [`as_uninit_slice`].
1972    ///
1973    /// [`as_mut`]: #method.as_mut
1974    /// [`as_uninit_slice`]: #method.as_uninit_slice-1
1975    ///
1976    /// # Safety
1977    ///
1978    /// When calling this method, you have to ensure that *either* the pointer is null *or*
1979    /// all of the following is true:
1980    ///
1981    /// * The pointer must be [valid] for reads and writes for `ptr.len() * size_of::<T>()`
1982    ///   many bytes, and it must be properly aligned. This means in particular:
1983    ///
1984    ///     * The entire memory range of this slice must be contained within a single [allocation]!
1985    ///       Slices can never span across multiple allocations.
1986    ///
1987    ///     * The pointer must be aligned even for zero-length slices. One
1988    ///       reason for this is that enum layout optimizations may rely on references
1989    ///       (including slices of any length) being aligned and non-null to distinguish
1990    ///       them from other data. You can obtain a pointer that is usable as `data`
1991    ///       for zero-length slices using [`NonNull::dangling()`].
1992    ///
1993    /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1994    ///   See the safety documentation of [`pointer::offset`].
1995    ///
1996    /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1997    ///   arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1998    ///   In particular, while this reference exists, the memory the pointer points to must
1999    ///   not get accessed (read or written) through any other pointer.
2000    ///
2001    /// This applies even if the result of this method is unused!
2002    ///
2003    /// See also [`slice::from_raw_parts_mut`][].
2004    ///
2005    /// [valid]: crate::ptr#safety
2006    /// [allocation]: crate::ptr#allocation
2007    ///
2008    /// # Panics during const evaluation
2009    ///
2010    /// This method will panic during const evaluation if the pointer cannot be
2011    /// determined to be null or not. See [`is_null`] for more information.
2012    ///
2013    /// [`is_null`]: #method.is_null-1
2014    #[inline]
2015    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
2016    pub const unsafe fn as_uninit_slice_mut<'a>(self) -> Option<&'a mut [MaybeUninit<T>]> {
2017        if self.is_null() {
2018            None
2019        } else {
2020            // SAFETY: the caller must uphold the safety contract for `as_uninit_slice_mut`.
2021            Some(unsafe { slice::from_raw_parts_mut(self as *mut MaybeUninit<T>, self.len()) })
2022        }
2023    }
2024}
2025
2026impl<T, const N: usize> *mut [T; N] {
2027    /// Returns a raw pointer to the array's buffer.
2028    ///
2029    /// This is equivalent to casting `self` to `*mut T`, but more type-safe.
2030    ///
2031    /// # Examples
2032    ///
2033    /// ```rust
2034    /// #![feature(array_ptr_get)]
2035    /// use std::ptr;
2036    ///
2037    /// let arr: *mut [i8; 3] = ptr::null_mut();
2038    /// assert_eq!(arr.as_mut_ptr(), ptr::null_mut());
2039    /// ```
2040    #[inline]
2041    #[unstable(feature = "array_ptr_get", issue = "119834")]
2042    pub const fn as_mut_ptr(self) -> *mut T {
2043        self as *mut T
2044    }
2045
2046    /// Returns a raw pointer to a mutable slice containing the entire array.
2047    ///
2048    /// # Examples
2049    ///
2050    /// ```
2051    /// #![feature(array_ptr_get)]
2052    ///
2053    /// let mut arr = [1, 2, 5];
2054    /// let ptr: *mut [i32; 3] = &mut arr;
2055    /// unsafe {
2056    ///     (&mut *ptr.as_mut_slice())[..2].copy_from_slice(&[3, 4]);
2057    /// }
2058    /// assert_eq!(arr, [3, 4, 5]);
2059    /// ```
2060    #[inline]
2061    #[unstable(feature = "array_ptr_get", issue = "119834")]
2062    pub const fn as_mut_slice(self) -> *mut [T] {
2063        self
2064    }
2065}
2066
2067/// Pointer equality is by address, as produced by the [`<*mut T>::addr`](pointer::addr) method.
2068#[stable(feature = "rust1", since = "1.0.0")]
2069impl<T: ?Sized> PartialEq for *mut T {
2070    #[inline(always)]
2071    #[allow(ambiguous_wide_pointer_comparisons)]
2072    fn eq(&self, other: &*mut T) -> bool {
2073        *self == *other
2074    }
2075}
2076
2077/// Pointer equality is an equivalence relation.
2078#[stable(feature = "rust1", since = "1.0.0")]
2079impl<T: ?Sized> Eq for *mut T {}
2080
2081/// Pointer comparison is by address, as produced by the [`<*mut T>::addr`](pointer::addr) method.
2082#[stable(feature = "rust1", since = "1.0.0")]
2083impl<T: ?Sized> Ord for *mut T {
2084    #[inline]
2085    #[allow(ambiguous_wide_pointer_comparisons)]
2086    fn cmp(&self, other: &*mut T) -> Ordering {
2087        if self < other {
2088            Less
2089        } else if self == other {
2090            Equal
2091        } else {
2092            Greater
2093        }
2094    }
2095}
2096
2097/// Pointer comparison is by address, as produced by the [`<*mut T>::addr`](pointer::addr) method.
2098#[stable(feature = "rust1", since = "1.0.0")]
2099impl<T: ?Sized> PartialOrd for *mut T {
2100    #[inline(always)]
2101    #[allow(ambiguous_wide_pointer_comparisons)]
2102    fn partial_cmp(&self, other: &*mut T) -> Option<Ordering> {
2103        Some(self.cmp(other))
2104    }
2105
2106    #[inline(always)]
2107    #[allow(ambiguous_wide_pointer_comparisons)]
2108    fn lt(&self, other: &*mut T) -> bool {
2109        *self < *other
2110    }
2111
2112    #[inline(always)]
2113    #[allow(ambiguous_wide_pointer_comparisons)]
2114    fn le(&self, other: &*mut T) -> bool {
2115        *self <= *other
2116    }
2117
2118    #[inline(always)]
2119    #[allow(ambiguous_wide_pointer_comparisons)]
2120    fn gt(&self, other: &*mut T) -> bool {
2121        *self > *other
2122    }
2123
2124    #[inline(always)]
2125    #[allow(ambiguous_wide_pointer_comparisons)]
2126    fn ge(&self, other: &*mut T) -> bool {
2127        *self >= *other
2128    }
2129}
2130
2131#[stable(feature = "raw_ptr_default", since = "1.88.0")]
2132impl<T: ?Sized + Thin> Default for *mut T {
2133    /// Returns the default value of [`null_mut()`][crate::ptr::null_mut].
2134    fn default() -> Self {
2135        crate::ptr::null_mut()
2136    }
2137}