Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

strict provenance: rename addr → bare_addr #121588

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions compiler/rustc_arena/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ impl<T> TypedArena<T> {
fn can_allocate(&self, additional: usize) -> bool {
// FIXME: this should *likely* use `offset_from`, but more
// investigation is needed (including running tests in miri).
let available_bytes = self.end.get().addr() - self.ptr.get().addr();
let available_bytes = self.end.get().bare_addr() - self.ptr.get().bare_addr();
let additional_bytes = additional.checked_mul(mem::size_of::<T>()).unwrap();
available_bytes >= additional_bytes
}
Expand Down Expand Up @@ -245,7 +245,7 @@ impl<T> TypedArena<T> {
if mem::needs_drop::<T>() {
// FIXME: this should *likely* use `offset_from`, but more
// investigation is needed (including running tests in miri).
let used_bytes = self.ptr.get().addr() - last_chunk.start().addr();
let used_bytes = self.ptr.get().bare_addr() - last_chunk.start().bare_addr();
last_chunk.entries = used_bytes / mem::size_of::<T>();
}

Expand All @@ -271,9 +271,9 @@ impl<T> TypedArena<T> {
// chunks.
fn clear_last_chunk(&self, last_chunk: &mut ArenaChunk<T>) {
// Determine how much was filled.
let start = last_chunk.start().addr();
let start = last_chunk.start().bare_addr();
// We obtain the value of the pointer to the first uninitialized element.
let end = self.ptr.get().addr();
let end = self.ptr.get().bare_addr();
// We then calculate the number of elements to be dropped in the last chunk,
// which is the filled area's length.
let diff = if mem::size_of::<T>() == 0 {
Expand Down Expand Up @@ -396,11 +396,11 @@ impl DroplessArena {
self.start.set(chunk.start());

// Align the end to DROPLESS_ALIGNMENT.
let end = align_down(chunk.end().addr(), DROPLESS_ALIGNMENT);
let end = align_down(chunk.end().bare_addr(), DROPLESS_ALIGNMENT);

// Make sure we don't go past `start`. This should not happen since the allocation
// should be at least DROPLESS_ALIGNMENT - 1 bytes.
debug_assert!(chunk.start().addr() <= end);
debug_assert!(chunk.start().bare_addr() <= end);

self.end.set(chunk.end().with_addr(end));

Expand All @@ -415,9 +415,9 @@ impl DroplessArena {
// This loop executes once or twice: if allocation fails the first
// time, the `grow` ensures it will succeed the second time.
loop {
let start = self.start.get().addr();
let start = self.start.get().bare_addr();
let old_end = self.end.get();
let end = old_end.addr();
let end = old_end.bare_addr();

// Align allocated bytes so that `self.end` stays aligned to
// DROPLESS_ALIGNMENT.
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_codegen_ssa/src/mono_item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ impl<'a, 'tcx: 'a> MonoItemExt<'a, 'tcx> for MonoItem<'tcx> {
fn to_raw_string(&self) -> String {
match *self {
MonoItem::Fn(instance) => {
format!("Fn({:?}, {})", instance.def, instance.args.as_ptr().addr())
format!("Fn({:?}, {})", instance.def, instance.args.as_ptr().bare_addr())
}
MonoItem::Static(id) => format!("Static({id:?})"),
MonoItem::GlobalAsm(id) => format!("GlobalAsm({id:?})"),
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_data_structures/src/tagged_ptr/copy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ where
#[inline]
pub fn tag(&self) -> T {
// Unpack the tag, according to the `self.packed` encoding scheme
let tag = self.packed.addr().get() >> Self::TAG_BIT_SHIFT;
let tag = self.packed.bare_addr().get() >> Self::TAG_BIT_SHIFT;

// Safety:
// The shift retrieves the original value from `T::into_usize`,
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_hir_typeck/messages.ftl
Original file line number Diff line number Diff line change
Expand Up @@ -90,8 +90,8 @@ hir_typeck_lossy_provenance_int2ptr =

hir_typeck_lossy_provenance_ptr2int =
under strict provenance it is considered bad style to cast pointer `{$expr_ty}` to integer `{$cast_ty}`
.suggestion = use `.addr()` to obtain the address of a pointer
.help = if you can't comply with strict provenance and need to expose the pointer provenance you can use `.expose_addr()` instead
.suggestion = use `.bare_addr()` to obtain the address of a pointer without its provenance -- but note that this cannot be cast back to a pointer later; you need to use `with_addr` instead
.help = if you need to cast the address back to a pointer later, use `.expose_addr()` instead

hir_typeck_method_call_on_unknown_raw_pointee =
cannot call a method on a raw pointer with an unknown pointee type
Expand Down
8 changes: 4 additions & 4 deletions compiler/rustc_hir_typeck/src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -256,28 +256,28 @@ pub enum LossyProvenancePtr2IntSuggestion<'tcx> {
NeedsParensCast {
#[suggestion_part(code = "(")]
expr_span: Span,
#[suggestion_part(code = ").addr() as {cast_ty}")]
#[suggestion_part(code = ").bare_addr() as {cast_ty}")]
cast_span: Span,
cast_ty: Ty<'tcx>,
},
#[multipart_suggestion(hir_typeck_suggestion, applicability = "maybe-incorrect")]
NeedsParens {
#[suggestion_part(code = "(")]
expr_span: Span,
#[suggestion_part(code = ").addr()")]
#[suggestion_part(code = ").bare_addr()")]
cast_span: Span,
},
#[suggestion(
hir_typeck_suggestion,
code = ".addr() as {cast_ty}",
code = ".bare_addr() as {cast_ty}",
applicability = "maybe-incorrect"
)]
NeedsCast {
#[primary_span]
cast_span: Span,
cast_ty: Ty<'tcx>,
},
#[suggestion(hir_typeck_suggestion, code = ".addr()", applicability = "maybe-incorrect")]
#[suggestion(hir_typeck_suggestion, code = ".bare_addr()", applicability = "maybe-incorrect")]
Other {
#[primary_span]
cast_span: Span,
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_middle/src/ty/generic_args.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ impl<'tcx> GenericArg<'tcx> {
// pointers were originally created from `Interned` types in `pack()`,
// and this is just going in the other direction.
unsafe {
match self.ptr.addr().get() & TAG_MASK {
match self.ptr.bare_addr().get() & TAG_MASK {
REGION_TAG => GenericArgKind::Lifetime(ty::Region(Interned::new_unchecked(
ptr.cast::<ty::RegionKind<'tcx>>().as_ref(),
))),
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_middle/src/ty/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -624,7 +624,7 @@ impl<'tcx> Term<'tcx> {
// pointers were originally created from `Interned` types in `pack()`,
// and this is just going in the other direction.
unsafe {
match self.ptr.addr().get() & TAG_MASK {
match self.ptr.bare_addr().get() & TAG_MASK {
TYPE_TAG => TermKind::Ty(Ty(Interned::new_unchecked(
ptr.cast::<WithCachedTypeInfo<ty::TyKind<'tcx>>>().as_ref(),
))),
Expand Down
2 changes: 1 addition & 1 deletion library/alloc/src/rc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2840,7 +2840,7 @@ impl<T, A: Allocator> Weak<T, A> {
}

pub(crate) fn is_dangling<T: ?Sized>(ptr: *const T) -> bool {
(ptr.cast::<()>()).addr() == usize::MAX
(ptr.cast::<()>()).bare_addr() == usize::MAX
}

/// Helper type to allow accessing the reference counts without
Expand Down
2 changes: 1 addition & 1 deletion library/alloc/src/vec/into_iter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let exact = if T::IS_ZST {
self.end.addr().wrapping_sub(self.ptr.as_ptr().addr())
self.end.bare_addr().wrapping_sub(self.ptr.as_ptr().bare_addr())
} else {
unsafe { non_null!(self.end, T).sub_ptr(self.ptr) }
};
Expand Down
4 changes: 2 additions & 2 deletions library/core/src/hash/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -956,7 +956,7 @@ mod impls {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
let (address, metadata) = self.to_raw_parts();
state.write_usize(address.addr());
state.write_usize(address.bare_addr());
metadata.hash(state);
}
}
Expand All @@ -966,7 +966,7 @@ mod impls {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
let (address, metadata) = self.to_raw_parts();
state.write_usize(address.addr());
state.write_usize(address.bare_addr());
metadata.hash(state);
}
}
Expand Down
6 changes: 3 additions & 3 deletions library/core/src/intrinsics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1267,7 +1267,7 @@ extern "rust-intrinsic" {
/// - If the code just wants to store data of arbitrary type in some buffer and needs to pick a
/// type for that buffer, it can use [`MaybeUninit`][crate::mem::MaybeUninit].
/// - If the code actually wants to work on the address the pointer points to, it can use `as`
/// casts or [`ptr.addr()`][pointer::addr].
/// casts or [`ptr.bare_addr()`][pointer::bare_addr].
///
/// Turning a `*mut T` into an `&mut T`:
///
Expand Down Expand Up @@ -2781,8 +2781,8 @@ pub(crate) fn is_valid_allocation_size(size: usize, len: usize) -> bool {
/// `count * size` do *not* overlap.
#[inline]
pub(crate) fn is_nonoverlapping(src: *const (), dst: *const (), size: usize, count: usize) -> bool {
let src_usize = src.addr();
let dst_usize = dst.addr();
let src_usize = src.bare_addr();
let dst_usize = dst.bare_addr();
let Some(size) = size.checked_mul(count) else {
crate::panicking::panic_nounwind(
"is_nonoverlapping: `size_of::<T>() * count` overflows a usize",
Expand Down
16 changes: 8 additions & 8 deletions library/core/src/ptr/const_ptr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ impl<T: ?Sized> *const T {
pub const fn is_null(self) -> bool {
#[inline]
fn runtime_impl(ptr: *const u8) -> bool {
ptr.addr() == 0
ptr.bare_addr() == 0
}

#[inline]
Expand Down Expand Up @@ -203,7 +203,7 @@ impl<T: ?Sized> *const T {
#[must_use]
#[inline(always)]
#[unstable(feature = "strict_provenance", issue = "95228")]
pub fn addr(self) -> usize {
pub fn bare_addr(self) -> usize {
// FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
// SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
// provenance).
Expand All @@ -223,7 +223,7 @@ impl<T: ?Sized> *const T {
/// Provenance][super#strict-provenance] rules. Supporting
/// [`from_exposed_addr`][] complicates specification and reasoning and may not be supported by
/// tools that help you to stay conformant with the Rust memory model, so it is recommended to
/// use [`addr`][pointer::addr] wherever possible.
/// use [`bare_addr`][pointer::bare_addr] wherever possible.
///
/// On most platforms this will produce a value with the same bytes as the original pointer,
/// because all the bytes are dedicated to describing the address. Platforms which need to store
Expand Down Expand Up @@ -264,7 +264,7 @@ impl<T: ?Sized> *const T {
// In the mean-time, this operation is defined to be "as if" it was
// a wrapping_offset, so we can emulate it as such. This should properly
// restore pointer provenance even under today's compiler.
let self_addr = self.addr() as isize;
let self_addr = self.bare_addr() as isize;
let dest_addr = addr as isize;
let offset = dest_addr.wrapping_sub(self_addr);

Expand All @@ -282,7 +282,7 @@ impl<T: ?Sized> *const T {
#[inline]
#[unstable(feature = "strict_provenance", issue = "95228")]
pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
self.with_addr(f(self.addr()))
self.with_addr(f(self.bare_addr()))
}

/// Decompose a (possibly wide) pointer into its data pointer and metadata components.
Expand Down Expand Up @@ -592,7 +592,7 @@ impl<T: ?Sized> *const T {
/// let tagged_ptr = ptr.map_addr(|a| a | 0b10);
///
/// // Get the "tag" back
/// let tag = tagged_ptr.addr() & tag_mask;
/// let tag = tagged_ptr.bare_addr() & tag_mask;
/// assert_eq!(tag, 0b10);
///
/// // Note that `tagged_ptr` is unaligned, it's UB to read from it.
Expand Down Expand Up @@ -664,7 +664,7 @@ impl<T: ?Sized> *const T {
/// runtime and may be exploited by optimizations. If you wish to compute the difference between
/// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
/// origin as isize) / mem::size_of::<T>()`.
// FIXME: recommend `addr()` instead of `as usize` once that is stable.
// FIXME: recommend `bare_addr()` instead of `as usize` once that is stable.
///
/// [`add`]: #method.add
/// [allocated object]: crate::ptr#allocated-object
Expand Down Expand Up @@ -1611,7 +1611,7 @@ impl<T: ?Sized> *const T {

#[inline]
fn runtime_impl(ptr: *const (), align: usize) -> bool {
ptr.addr() & (align - 1) == 0
ptr.bare_addr() & (align - 1) == 0
}

#[inline]
Expand Down
10 changes: 5 additions & 5 deletions library/core/src/ptr/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@
//! we provide the [`map_addr`][] method.
//!
//! To help make it clear that code is "following" Strict Provenance semantics, we also provide an
//! [`addr`][] method which promises that the returned address is not part of a
//! [`bare_addr`][] method which promises that the returned address is not part of a
//! pointer-usize-pointer roundtrip. In the future we may provide a lint for pointer<->integer
//! casts to help you audit if your code conforms to strict provenance.
//!
Expand Down Expand Up @@ -239,7 +239,7 @@
//! let tagged = ptr.map_addr(|addr| addr | HAS_DATA);
//!
//! // Check the flag:
//! if tagged.addr() & HAS_DATA != 0 {
//! if tagged.bare_addr() & HAS_DATA != 0 {
//! // Untag and read the pointer
//! let data = *tagged.map_addr(|addr| addr & FLAG_MASK);
//! assert_eq!(data, 17);
Expand Down Expand Up @@ -294,7 +294,7 @@
//! particular platform, and it's an open question as to how to specify this (if at all).
//! Notably, [CHERI][] relies on a compression scheme that can't handle a
//! pointer getting offset "too far" out of bounds. If this happens, the address
//! returned by `addr` will be the value you expect, but the provenance will get invalidated
//! returned by `bare_addr` will be the value you expect, but the provenance will get invalidated
//! and using it to read/write will fault. The details of this are architecture-specific
//! and based on alignment, but the buffer on either side of the pointer's range is pretty
//! generous (think kilobytes, not bytes).
Expand Down Expand Up @@ -342,7 +342,7 @@
//!
//! Exposed Provenance is provided by the [`expose_addr`] and [`from_exposed_addr`] methods, which
//! are meant to replace `as` casts between pointers and integers. [`expose_addr`] is a lot like
//! [`addr`], but additionally adds the provenance of the pointer to a global list of 'exposed'
//! [`bare_addr`], but additionally adds the provenance of the pointer to a global list of 'exposed'
//! provenances. (This list is purely conceptual, it exists for the purpose of specifying Rust but
//! is not materialized in actual executions, except in tools like [Miri].) [`from_exposed_addr`]
//! can be used to construct a pointer with one of these previously 'exposed' provenances.
Expand Down Expand Up @@ -372,7 +372,7 @@
//! [`wrapping_offset`]: pointer::wrapping_offset
//! [`with_addr`]: pointer::with_addr
//! [`map_addr`]: pointer::map_addr
//! [`addr`]: pointer::addr
//! [`bare_addr`]: pointer::bare_addr
//! [`ptr::dangling`]: core::ptr::dangling
//! [`expose_addr`]: pointer::expose_addr
//! [`from_exposed_addr`]: from_exposed_addr
Expand Down
16 changes: 8 additions & 8 deletions library/core/src/ptr/mut_ptr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ impl<T: ?Sized> *mut T {
pub const fn is_null(self) -> bool {
#[inline]
fn runtime_impl(ptr: *mut u8) -> bool {
ptr.addr() == 0
ptr.bare_addr() == 0
}

#[inline]
Expand Down Expand Up @@ -211,7 +211,7 @@ impl<T: ?Sized> *mut T {
#[must_use]
#[inline(always)]
#[unstable(feature = "strict_provenance", issue = "95228")]
pub fn addr(self) -> usize {
pub fn bare_addr(self) -> usize {
// FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
// SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
// provenance).
Expand All @@ -231,7 +231,7 @@ impl<T: ?Sized> *mut T {
/// Provenance][super#strict-provenance] rules. Supporting
/// [`from_exposed_addr_mut`][] complicates specification and reasoning and may not be supported
/// by tools that help you to stay conformant with the Rust memory model, so it is recommended
/// to use [`addr`][pointer::addr] wherever possible.
/// to use [`bare_addr`][pointer::bare_addr] wherever possible.
///
/// On most platforms this will produce a value with the same bytes as the original pointer,
/// because all the bytes are dedicated to describing the address. Platforms which need to store
Expand Down Expand Up @@ -272,7 +272,7 @@ impl<T: ?Sized> *mut T {
// In the mean-time, this operation is defined to be "as if" it was
// a wrapping_offset, so we can emulate it as such. This should properly
// restore pointer provenance even under today's compiler.
let self_addr = self.addr() as isize;
let self_addr = self.bare_addr() as isize;
let dest_addr = addr as isize;
let offset = dest_addr.wrapping_sub(self_addr);

Expand All @@ -290,7 +290,7 @@ impl<T: ?Sized> *mut T {
#[inline]
#[unstable(feature = "strict_provenance", issue = "95228")]
pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
self.with_addr(f(self.addr()))
self.with_addr(f(self.bare_addr()))
}

/// Decompose a (possibly wide) pointer into its data pointer and metadata components.
Expand Down Expand Up @@ -607,7 +607,7 @@ impl<T: ?Sized> *mut T {
/// let tagged_ptr = ptr.map_addr(|a| a | 0b10);
///
/// // Get the "tag" back
/// let tag = tagged_ptr.addr() & tag_mask;
/// let tag = tagged_ptr.bare_addr() & tag_mask;
/// assert_eq!(tag, 0b10);
///
/// // Note that `tagged_ptr` is unaligned, it's UB to read from/write to it.
Expand Down Expand Up @@ -839,7 +839,7 @@ impl<T: ?Sized> *mut T {
/// runtime and may be exploited by optimizations. If you wish to compute the difference between
/// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
/// origin as isize) / mem::size_of::<T>()`.
// FIXME: recommend `addr()` instead of `as usize` once that is stable.
// FIXME: recommend `bare_addr()` instead of `as usize` once that is stable.
///
/// [`add`]: #method.add
/// [allocated object]: crate::ptr#allocated-object
Expand Down Expand Up @@ -1884,7 +1884,7 @@ impl<T: ?Sized> *mut T {

#[inline]
fn runtime_impl(ptr: *mut (), align: usize) -> bool {
ptr.addr() & (align - 1) == 0
ptr.bare_addr() & (align - 1) == 0
}

#[inline]
Expand Down
Loading
Loading