diff --git a/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs b/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs index a10da08ddf347..fdca6b5654098 100644 --- a/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs +++ b/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs @@ -341,7 +341,7 @@ impl<'tcx> BorrowExplanation<'tcx> { } } } else if let LocalInfo::BlockTailTemp(info) = local_decl.local_info() { - let sp = info.span.find_oldest_ancestor_in_same_ctxt(); + let sp = info.span.find_ancestor_not_from_macro().unwrap_or(info.span); if info.tail_result_is_ignored { // #85581: If the first mutable borrow's scope contains // the second borrow, this suggestion isn't helpful. diff --git a/compiler/rustc_codegen_cranelift/patches/0027-sysroot_tests-128bit-atomic-operations.patch b/compiler/rustc_codegen_cranelift/patches/0027-sysroot_tests-128bit-atomic-operations.patch index f6e6bbc2387c2..f3d1d5c43ea10 100644 --- a/compiler/rustc_codegen_cranelift/patches/0027-sysroot_tests-128bit-atomic-operations.patch +++ b/compiler/rustc_codegen_cranelift/patches/0027-sysroot_tests-128bit-atomic-operations.patch @@ -19,7 +19,7 @@ index 1e336bf..35e6f54 100644 -#![cfg_attr(target_has_atomic = "128", feature(integer_atomics))] #![cfg_attr(test, feature(cfg_select))] #![feature(alloc_layout_extra)] - #![feature(array_chunks)] + #![feature(array_ptr_get)] diff --git a/coretests/tests/atomic.rs b/coretests/tests/atomic.rs index b735957..ea728b6 100644 --- a/coretests/tests/atomic.rs diff --git a/compiler/rustc_hir/src/hir.rs b/compiler/rustc_hir/src/hir.rs index 96501844264ce..4ccc2e5a97c77 100644 --- a/compiler/rustc_hir/src/hir.rs +++ b/compiler/rustc_hir/src/hir.rs @@ -148,6 +148,11 @@ impl From for LifetimeSyntax { /// `LifetimeSource::OutlivesBound` or `LifetimeSource::PreciseCapturing` /// — there's no way to "elide" these lifetimes. #[derive(Debug, Copy, Clone, HashStable_Generic)] +// Raise the aligement to at least 4 bytes - this is relied on in other parts of the compiler(for pointer tagging): +// https://github.com/rust-lang/rust/blob/ce5fdd7d42aba9a2925692e11af2bd39cf37798a/compiler/rustc_data_structures/src/tagged_ptr.rs#L163 +// Removing this `repr(4)` will cause the compiler to not build on platforms like `m68k` Linux, where the aligement of u32 and usize is only 2. +// Since `repr(align)` may only raise aligement, this has no effect on platforms where the aligement is already sufficient. +#[repr(align(4))] pub struct Lifetime { #[stable_hasher(ignore)] pub hir_id: HirId, diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs index 33ae4f6c45cf1..278f6e56ec5b3 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs @@ -1395,7 +1395,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { .macro_backtrace() .any(|x| matches!(x.kind, ExpnKind::Macro(MacroKind::Attr | MacroKind::Derive, ..))) { - let span = expr.span.find_oldest_ancestor_in_same_ctxt(); + let span = expr + .span + .find_ancestor_not_from_extern_macro(&self.tcx.sess.source_map()) + .unwrap_or(expr.span); let mut sugg = if self.precedence(expr) >= ExprPrecedence::Unambiguous { vec![(span.shrink_to_hi(), ".into()".to_owned())] @@ -2062,7 +2065,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { None => sugg.to_string(), }; - let span = expr.span.find_oldest_ancestor_in_same_ctxt(); + let span = expr + .span + .find_ancestor_not_from_extern_macro(&self.tcx.sess.source_map()) + .unwrap_or(expr.span); err.span_suggestion_verbose(span.shrink_to_hi(), msg, sugg, Applicability::HasPlaceholders); true } diff --git a/compiler/rustc_lint/src/unused.rs b/compiler/rustc_lint/src/unused.rs index 11df071f0686f..00e40b515a327 100644 --- a/compiler/rustc_lint/src/unused.rs +++ b/compiler/rustc_lint/src/unused.rs @@ -185,7 +185,7 @@ impl<'tcx> LateLintPass<'tcx> for UnusedResults { let mut op_warned = false; if let Some(must_use_op) = must_use_op { - let span = expr.span.find_oldest_ancestor_in_same_ctxt(); + let span = expr.span.find_ancestor_not_from_macro().unwrap_or(expr.span); cx.emit_span_lint( UNUSED_MUST_USE, expr.span, @@ -511,7 +511,7 @@ impl<'tcx> LateLintPass<'tcx> for UnusedResults { ); } MustUsePath::Def(span, def_id, reason) => { - let span = span.find_oldest_ancestor_in_same_ctxt(); + let span = span.find_ancestor_not_from_macro().unwrap_or(*span); cx.emit_span_lint( UNUSED_MUST_USE, span, diff --git a/compiler/rustc_middle/src/thir.rs b/compiler/rustc_middle/src/thir.rs index 730c1147684b4..3dd6d2c892869 100644 --- a/compiler/rustc_middle/src/thir.rs +++ b/compiler/rustc_middle/src/thir.rs @@ -838,6 +838,8 @@ pub enum PatKind<'tcx> { /// * integer, bool, char or float (represented as a valtree), which will be handled by /// exhaustiveness to cover exactly its own value, similar to `&str`, but these values are /// much simpler. + /// * raw pointers derived from integers, other raw pointers will have already resulted in an + // error. /// * `String`, if `string_deref_patterns` is enabled. Constant { value: mir::Const<'tcx>, diff --git a/compiler/rustc_span/src/lib.rs b/compiler/rustc_span/src/lib.rs index dbc67da37b53c..3f72ccd9f89dc 100644 --- a/compiler/rustc_span/src/lib.rs +++ b/compiler/rustc_span/src/lib.rs @@ -716,12 +716,17 @@ impl Span { (!ctxt.is_root()).then(|| ctxt.outer_expn_data().call_site) } - /// Walk down the expansion ancestors to find a span that's contained within `outer`. + /// Find the first ancestor span that's contained within `outer`. /// - /// The span returned by this method may have a different [`SyntaxContext`] as `outer`. + /// This method traverses the macro expansion ancestors until it finds the first span + /// that's contained within `outer`. + /// + /// The span returned by this method may have a different [`SyntaxContext`] than `outer`. /// If you need to extend the span, use [`find_ancestor_inside_same_ctxt`] instead, /// because joining spans with different syntax contexts can create unexpected results. /// + /// This is used to find the span of the macro call when a parent expr span, i.e. `outer`, is known. + /// /// [`find_ancestor_inside_same_ctxt`]: Self::find_ancestor_inside_same_ctxt pub fn find_ancestor_inside(mut self, outer: Span) -> Option { while !outer.contains(self) { @@ -730,8 +735,10 @@ impl Span { Some(self) } - /// Walk down the expansion ancestors to find a span with the same [`SyntaxContext`] as - /// `other`. + /// Find the first ancestor span with the same [`SyntaxContext`] as `other`. + /// + /// This method traverses the macro expansion ancestors until it finds a span + /// that has the same [`SyntaxContext`] as `other`. /// /// Like [`find_ancestor_inside_same_ctxt`], but specifically for when spans might not /// overlap. Take care when using this, and prefer [`find_ancestor_inside`] or @@ -747,9 +754,12 @@ impl Span { Some(self) } - /// Walk down the expansion ancestors to find a span that's contained within `outer` and + /// Find the first ancestor span that's contained within `outer` and /// has the same [`SyntaxContext`] as `outer`. /// + /// This method traverses the macro expansion ancestors until it finds a span + /// that is both contained within `outer` and has the same [`SyntaxContext`] as `outer`. + /// /// This method is the combination of [`find_ancestor_inside`] and /// [`find_ancestor_in_same_ctxt`] and should be preferred when extending the returned span. /// If you do not need to modify the span, use [`find_ancestor_inside`] instead. @@ -763,43 +773,43 @@ impl Span { Some(self) } - /// Recursively walk down the expansion ancestors to find the oldest ancestor span with the same - /// [`SyntaxContext`] the initial span. + /// Find the first ancestor span that does not come from an external macro. /// - /// This method is suitable for peeling through *local* macro expansions to find the "innermost" - /// span that is still local and shares the same [`SyntaxContext`]. For example, given + /// This method traverses the macro expansion ancestors until it finds a span + /// that is either from user-written code or from a local macro (defined in the current crate). /// - /// ```ignore (illustrative example, contains type error) - /// macro_rules! outer { - /// ($x: expr) => { - /// inner!($x) - /// } - /// } + /// External macros are those defined in dependencies or the standard library. + /// This method is useful for reporting errors in user-controllable code and avoiding + /// diagnostics inside external macros. /// - /// macro_rules! inner { - /// ($x: expr) => { - /// format!("error: {}", $x) - /// //~^ ERROR mismatched types - /// } - /// } + /// # See also /// - /// fn bar(x: &str) -> Result<(), Box> { - /// Err(outer!(x)) - /// } - /// ``` + /// - [`Self::find_ancestor_not_from_macro`] + /// - [`Self::in_external_macro`] + pub fn find_ancestor_not_from_extern_macro(mut self, sm: &SourceMap) -> Option { + while self.in_external_macro(sm) { + self = self.parent_callsite()?; + } + Some(self) + } + + /// Find the first ancestor span that does not come from any macro expansion. /// - /// if provided the initial span of `outer!(x)` inside `bar`, this method will recurse - /// the parent callsites until we reach `format!("error: {}", $x)`, at which point it is the - /// oldest ancestor span that is both still local and shares the same [`SyntaxContext`] as the - /// initial span. - pub fn find_oldest_ancestor_in_same_ctxt(self) -> Span { - let mut cur = self; - while cur.eq_ctxt(self) - && let Some(parent_callsite) = cur.parent_callsite() - { - cur = parent_callsite; + /// This method traverses the macro expansion ancestors until it finds a span + /// that originates from user-written code rather than any macro-generated code. + /// + /// This method is useful for reporting errors at the exact location users wrote code + /// and providing suggestions at directly editable locations. + /// + /// # See also + /// + /// - [`Self::find_ancestor_not_from_extern_macro`] + /// - [`Span::from_expansion`] + pub fn find_ancestor_not_from_macro(mut self) -> Option { + while self.from_expansion() { + self = self.parent_callsite()?; } - cur + Some(self) } /// Edition of the crate from which this span came. diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index 6b6e4df4cba72..c091e496c5090 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -94,7 +94,6 @@ // tidy-alphabetical-start #![feature(alloc_layout_extra)] #![feature(allocator_api)] -#![feature(array_chunks)] #![feature(array_into_iter_constructors)] #![feature(array_windows)] #![feature(ascii_char)] diff --git a/library/alloc/src/slice.rs b/library/alloc/src/slice.rs index b4da56578c894..ce9f967cc387a 100644 --- a/library/alloc/src/slice.rs +++ b/library/alloc/src/slice.rs @@ -16,10 +16,6 @@ use core::cmp::Ordering::{self, Less}; use core::mem::MaybeUninit; #[cfg(not(no_global_oom_handling))] use core::ptr; -#[unstable(feature = "array_chunks", issue = "74985")] -pub use core::slice::ArrayChunks; -#[unstable(feature = "array_chunks", issue = "74985")] -pub use core::slice::ArrayChunksMut; #[unstable(feature = "array_windows", issue = "75027")] pub use core::slice::ArrayWindows; #[stable(feature = "inherent_ascii_escape", since = "1.60.0")] diff --git a/library/alloc/src/string.rs b/library/alloc/src/string.rs index a189c00a6b61d..d58240f3051e0 100644 --- a/library/alloc/src/string.rs +++ b/library/alloc/src/string.rs @@ -787,12 +787,12 @@ impl String { #[cfg(not(no_global_oom_handling))] #[unstable(feature = "str_from_utf16_endian", issue = "116258")] pub fn from_utf16le(v: &[u8]) -> Result { - if v.len() % 2 != 0 { + let (chunks, []) = v.as_chunks::<2>() else { return Err(FromUtf16Error(())); - } + }; match (cfg!(target_endian = "little"), unsafe { v.align_to::() }) { (true, ([], v, [])) => Self::from_utf16(v), - _ => char::decode_utf16(v.array_chunks::<2>().copied().map(u16::from_le_bytes)) + _ => char::decode_utf16(chunks.iter().copied().map(u16::from_le_bytes)) .collect::>() .map_err(|_| FromUtf16Error(())), } @@ -830,11 +830,11 @@ impl String { (true, ([], v, [])) => Self::from_utf16_lossy(v), (true, ([], v, [_remainder])) => Self::from_utf16_lossy(v) + "\u{FFFD}", _ => { - let mut iter = v.array_chunks::<2>(); - let string = char::decode_utf16(iter.by_ref().copied().map(u16::from_le_bytes)) + let (chunks, remainder) = v.as_chunks::<2>(); + let string = char::decode_utf16(chunks.iter().copied().map(u16::from_le_bytes)) .map(|r| r.unwrap_or(char::REPLACEMENT_CHARACTER)) .collect(); - if iter.remainder().is_empty() { string } else { string + "\u{FFFD}" } + if remainder.is_empty() { string } else { string + "\u{FFFD}" } } } } @@ -862,12 +862,12 @@ impl String { #[cfg(not(no_global_oom_handling))] #[unstable(feature = "str_from_utf16_endian", issue = "116258")] pub fn from_utf16be(v: &[u8]) -> Result { - if v.len() % 2 != 0 { + let (chunks, []) = v.as_chunks::<2>() else { return Err(FromUtf16Error(())); - } + }; match (cfg!(target_endian = "big"), unsafe { v.align_to::() }) { (true, ([], v, [])) => Self::from_utf16(v), - _ => char::decode_utf16(v.array_chunks::<2>().copied().map(u16::from_be_bytes)) + _ => char::decode_utf16(chunks.iter().copied().map(u16::from_be_bytes)) .collect::>() .map_err(|_| FromUtf16Error(())), } @@ -905,11 +905,11 @@ impl String { (true, ([], v, [])) => Self::from_utf16_lossy(v), (true, ([], v, [_remainder])) => Self::from_utf16_lossy(v) + "\u{FFFD}", _ => { - let mut iter = v.array_chunks::<2>(); - let string = char::decode_utf16(iter.by_ref().copied().map(u16::from_be_bytes)) + let (chunks, remainder) = v.as_chunks::<2>(); + let string = char::decode_utf16(chunks.iter().copied().map(u16::from_be_bytes)) .map(|r| r.unwrap_or(char::REPLACEMENT_CHARACTER)) .collect(); - if iter.remainder().is_empty() { string } else { string + "\u{FFFD}" } + if remainder.is_empty() { string } else { string + "\u{FFFD}" } } } } diff --git a/library/core/src/mem/drop_guard.rs b/library/core/src/mem/drop_guard.rs new file mode 100644 index 0000000000000..47ccb69acc806 --- /dev/null +++ b/library/core/src/mem/drop_guard.rs @@ -0,0 +1,155 @@ +use crate::fmt::{self, Debug}; +use crate::mem::ManuallyDrop; +use crate::ops::{Deref, DerefMut}; + +/// Wrap a value and run a closure when dropped. +/// +/// This is useful for quickly creating desructors inline. +/// +/// # Examples +/// +/// ```rust +/// # #![allow(unused)] +/// #![feature(drop_guard)] +/// +/// use std::mem::DropGuard; +/// +/// { +/// // Create a new guard around a string that will +/// // print its value when dropped. +/// let s = String::from("Chashu likes tuna"); +/// let mut s = DropGuard::new(s, |s| println!("{s}")); +/// +/// // Modify the string contained in the guard. +/// s.push_str("!!!"); +/// +/// // The guard will be dropped here, printing: +/// // "Chashu likes tuna!!!" +/// } +/// ``` +#[unstable(feature = "drop_guard", issue = "144426")] +#[doc(alias = "ScopeGuard")] +#[doc(alias = "defer")] +pub struct DropGuard +where + F: FnOnce(T), +{ + inner: ManuallyDrop, + f: ManuallyDrop, +} + +impl DropGuard +where + F: FnOnce(T), +{ + /// Create a new instance of `DropGuard`. + /// + /// # Example + /// + /// ```rust + /// # #![allow(unused)] + /// #![feature(drop_guard)] + /// + /// use std::mem::DropGuard; + /// + /// let value = String::from("Chashu likes tuna"); + /// let guard = DropGuard::new(value, |s| println!("{s}")); + /// ``` + #[unstable(feature = "drop_guard", issue = "144426")] + #[must_use] + pub const fn new(inner: T, f: F) -> Self { + Self { inner: ManuallyDrop::new(inner), f: ManuallyDrop::new(f) } + } + + /// Consumes the `DropGuard`, returning the wrapped value. + /// + /// This will not execute the closure. This is implemented as an associated + /// function to prevent any potential conflicts with any other methods called + /// `into_inner` from the `Deref` and `DerefMut` impls. + /// + /// It is typically preferred to call this function instead of `mem::forget` + /// because it will return the stored value and drop variables captured + /// by the closure instead of leaking their owned resources. + /// + /// # Example + /// + /// ```rust + /// # #![allow(unused)] + /// #![feature(drop_guard)] + /// + /// use std::mem::DropGuard; + /// + /// let value = String::from("Nori likes chicken"); + /// let guard = DropGuard::new(value, |s| println!("{s}")); + /// assert_eq!(DropGuard::into_inner(guard), "Nori likes chicken"); + /// ``` + #[unstable(feature = "drop_guard", issue = "144426")] + #[inline] + pub fn into_inner(guard: Self) -> T { + // First we ensure that dropping the guard will not trigger + // its destructor + let mut guard = ManuallyDrop::new(guard); + + // Next we manually read the stored value from the guard. + // + // SAFETY: this is safe because we've taken ownership of the guard. + let value = unsafe { ManuallyDrop::take(&mut guard.inner) }; + + // Finally we drop the stored closure. We do this *after* having read + // the value, so that even if the closure's `drop` function panics, + // unwinding still tries to drop the value. + // + // SAFETY: this is safe because we've taken ownership of the guard. + unsafe { ManuallyDrop::drop(&mut guard.f) }; + value + } +} + +#[unstable(feature = "drop_guard", issue = "144426")] +impl Deref for DropGuard +where + F: FnOnce(T), +{ + type Target = T; + + fn deref(&self) -> &T { + &*self.inner + } +} + +#[unstable(feature = "drop_guard", issue = "144426")] +impl DerefMut for DropGuard +where + F: FnOnce(T), +{ + fn deref_mut(&mut self) -> &mut T { + &mut *self.inner + } +} + +#[unstable(feature = "drop_guard", issue = "144426")] +impl Drop for DropGuard +where + F: FnOnce(T), +{ + fn drop(&mut self) { + // SAFETY: `DropGuard` is in the process of being dropped. + let inner = unsafe { ManuallyDrop::take(&mut self.inner) }; + + // SAFETY: `DropGuard` is in the process of being dropped. + let f = unsafe { ManuallyDrop::take(&mut self.f) }; + + f(inner); + } +} + +#[unstable(feature = "drop_guard", issue = "144426")] +impl Debug for DropGuard +where + T: Debug, + F: FnOnce(T), +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Debug::fmt(&**self, f) + } +} diff --git a/library/core/src/mem/mod.rs b/library/core/src/mem/mod.rs index 33407637ab3ff..db4c8e9e55150 100644 --- a/library/core/src/mem/mod.rs +++ b/library/core/src/mem/mod.rs @@ -21,6 +21,10 @@ mod transmutability; #[unstable(feature = "transmutability", issue = "99571")] pub use transmutability::{Assume, TransmuteFrom}; +mod drop_guard; +#[unstable(feature = "drop_guard", issue = "144426")] +pub use drop_guard::DropGuard; + // This one has to be a re-export (rather than wrapping the underlying intrinsic) so that we can do // the special magic "types have equal size" check at the call site. #[stable(feature = "rust1", since = "1.0.0")] diff --git a/library/core/src/ptr/mod.rs b/library/core/src/ptr/mod.rs index dbe3999b4a433..1a2a5182567b4 100644 --- a/library/core/src/ptr/mod.rs +++ b/library/core/src/ptr/mod.rs @@ -974,9 +974,10 @@ pub const fn dangling_mut() -> *mut T { #[must_use] #[inline(always)] #[stable(feature = "exposed_provenance", since = "1.84.0")] +#[rustc_const_unstable(feature = "const_exposed_provenance", issue = "144538")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[allow(fuzzy_provenance_casts)] // this *is* the explicit provenance API one should use instead -pub fn with_exposed_provenance(addr: usize) -> *const T { +pub const fn with_exposed_provenance(addr: usize) -> *const T { addr as *const T } @@ -1014,9 +1015,10 @@ pub fn with_exposed_provenance(addr: usize) -> *const T { #[must_use] #[inline(always)] #[stable(feature = "exposed_provenance", since = "1.84.0")] +#[rustc_const_unstable(feature = "const_exposed_provenance", issue = "144538")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[allow(fuzzy_provenance_casts)] // this *is* the explicit provenance API one should use instead -pub fn with_exposed_provenance_mut(addr: usize) -> *mut T { +pub const fn with_exposed_provenance_mut(addr: usize) -> *mut T { addr as *mut T } diff --git a/library/core/src/slice/iter.rs b/library/core/src/slice/iter.rs index 33132dcc7148d..ae910e0525209 100644 --- a/library/core/src/slice/iter.rs +++ b/library/core/src/slice/iter.rs @@ -2301,255 +2301,6 @@ impl ExactSizeIterator for ArrayWindows<'_, T, N> { } } -/// An iterator over a slice in (non-overlapping) chunks (`N` elements at a -/// time), starting at the beginning of the slice. -/// -/// When the slice len is not evenly divided by the chunk size, the last -/// up to `N-1` elements will be omitted but can be retrieved from -/// the [`remainder`] function from the iterator. -/// -/// This struct is created by the [`array_chunks`] method on [slices]. -/// -/// # Example -/// -/// ``` -/// #![feature(array_chunks)] -/// -/// let slice = ['l', 'o', 'r', 'e', 'm']; -/// let mut iter = slice.array_chunks::<2>(); -/// assert_eq!(iter.next(), Some(&['l', 'o'])); -/// assert_eq!(iter.next(), Some(&['r', 'e'])); -/// assert_eq!(iter.next(), None); -/// ``` -/// -/// [`array_chunks`]: slice::array_chunks -/// [`remainder`]: ArrayChunks::remainder -/// [slices]: slice -#[derive(Debug)] -#[unstable(feature = "array_chunks", issue = "74985")] -#[must_use = "iterators are lazy and do nothing unless consumed"] -pub struct ArrayChunks<'a, T: 'a, const N: usize> { - iter: Iter<'a, [T; N]>, - rem: &'a [T], -} - -impl<'a, T, const N: usize> ArrayChunks<'a, T, N> { - #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] - #[inline] - pub(super) const fn new(slice: &'a [T]) -> Self { - let (array_slice, rem) = slice.as_chunks(); - Self { iter: array_slice.iter(), rem } - } - - /// Returns the remainder of the original slice that is not going to be - /// returned by the iterator. The returned slice has at most `N-1` - /// elements. - #[must_use] - #[unstable(feature = "array_chunks", issue = "74985")] - pub fn remainder(&self) -> &'a [T] { - self.rem - } -} - -// FIXME(#26925) Remove in favor of `#[derive(Clone)]` -#[unstable(feature = "array_chunks", issue = "74985")] -impl Clone for ArrayChunks<'_, T, N> { - fn clone(&self) -> Self { - ArrayChunks { iter: self.iter.clone(), rem: self.rem } - } -} - -#[unstable(feature = "array_chunks", issue = "74985")] -impl<'a, T, const N: usize> Iterator for ArrayChunks<'a, T, N> { - type Item = &'a [T; N]; - - #[inline] - fn next(&mut self) -> Option<&'a [T; N]> { - self.iter.next() - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } - - #[inline] - fn count(self) -> usize { - self.iter.count() - } - - #[inline] - fn nth(&mut self, n: usize) -> Option { - self.iter.nth(n) - } - - #[inline] - fn last(self) -> Option { - self.iter.last() - } - - unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> &'a [T; N] { - // SAFETY: The safety guarantees of `__iterator_get_unchecked` are - // transferred to the caller. - unsafe { self.iter.__iterator_get_unchecked(i) } - } -} - -#[unstable(feature = "array_chunks", issue = "74985")] -impl<'a, T, const N: usize> DoubleEndedIterator for ArrayChunks<'a, T, N> { - #[inline] - fn next_back(&mut self) -> Option<&'a [T; N]> { - self.iter.next_back() - } - - #[inline] - fn nth_back(&mut self, n: usize) -> Option { - self.iter.nth_back(n) - } -} - -#[unstable(feature = "array_chunks", issue = "74985")] -impl ExactSizeIterator for ArrayChunks<'_, T, N> { - fn is_empty(&self) -> bool { - self.iter.is_empty() - } -} - -#[unstable(feature = "trusted_len", issue = "37572")] -unsafe impl TrustedLen for ArrayChunks<'_, T, N> {} - -#[unstable(feature = "array_chunks", issue = "74985")] -impl FusedIterator for ArrayChunks<'_, T, N> {} - -#[doc(hidden)] -#[unstable(feature = "array_chunks", issue = "74985")] -unsafe impl<'a, T, const N: usize> TrustedRandomAccess for ArrayChunks<'a, T, N> {} - -#[doc(hidden)] -#[unstable(feature = "array_chunks", issue = "74985")] -unsafe impl<'a, T, const N: usize> TrustedRandomAccessNoCoerce for ArrayChunks<'a, T, N> { - const MAY_HAVE_SIDE_EFFECT: bool = false; -} - -/// An iterator over a slice in (non-overlapping) mutable chunks (`N` elements -/// at a time), starting at the beginning of the slice. -/// -/// When the slice len is not evenly divided by the chunk size, the last -/// up to `N-1` elements will be omitted but can be retrieved from -/// the [`into_remainder`] function from the iterator. -/// -/// This struct is created by the [`array_chunks_mut`] method on [slices]. -/// -/// # Example -/// -/// ``` -/// #![feature(array_chunks)] -/// -/// let mut slice = ['l', 'o', 'r', 'e', 'm']; -/// let iter = slice.array_chunks_mut::<2>(); -/// ``` -/// -/// [`array_chunks_mut`]: slice::array_chunks_mut -/// [`into_remainder`]: ../../std/slice/struct.ArrayChunksMut.html#method.into_remainder -/// [slices]: slice -#[derive(Debug)] -#[unstable(feature = "array_chunks", issue = "74985")] -#[must_use = "iterators are lazy and do nothing unless consumed"] -pub struct ArrayChunksMut<'a, T: 'a, const N: usize> { - iter: IterMut<'a, [T; N]>, - rem: &'a mut [T], -} - -impl<'a, T, const N: usize> ArrayChunksMut<'a, T, N> { - #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] - #[inline] - pub(super) const fn new(slice: &'a mut [T]) -> Self { - let (array_slice, rem) = slice.as_chunks_mut(); - Self { iter: array_slice.iter_mut(), rem } - } - - /// Returns the remainder of the original slice that is not going to be - /// returned by the iterator. The returned slice has at most `N-1` - /// elements. - #[must_use = "`self` will be dropped if the result is not used"] - #[unstable(feature = "array_chunks", issue = "74985")] - pub fn into_remainder(self) -> &'a mut [T] { - self.rem - } -} - -#[unstable(feature = "array_chunks", issue = "74985")] -impl<'a, T, const N: usize> Iterator for ArrayChunksMut<'a, T, N> { - type Item = &'a mut [T; N]; - - #[inline] - fn next(&mut self) -> Option<&'a mut [T; N]> { - self.iter.next() - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } - - #[inline] - fn count(self) -> usize { - self.iter.count() - } - - #[inline] - fn nth(&mut self, n: usize) -> Option { - self.iter.nth(n) - } - - #[inline] - fn last(self) -> Option { - self.iter.last() - } - - unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> &'a mut [T; N] { - // SAFETY: The safety guarantees of `__iterator_get_unchecked` are transferred to - // the caller. - unsafe { self.iter.__iterator_get_unchecked(i) } - } -} - -#[unstable(feature = "array_chunks", issue = "74985")] -impl<'a, T, const N: usize> DoubleEndedIterator for ArrayChunksMut<'a, T, N> { - #[inline] - fn next_back(&mut self) -> Option<&'a mut [T; N]> { - self.iter.next_back() - } - - #[inline] - fn nth_back(&mut self, n: usize) -> Option { - self.iter.nth_back(n) - } -} - -#[unstable(feature = "array_chunks", issue = "74985")] -impl ExactSizeIterator for ArrayChunksMut<'_, T, N> { - fn is_empty(&self) -> bool { - self.iter.is_empty() - } -} - -#[unstable(feature = "trusted_len", issue = "37572")] -unsafe impl TrustedLen for ArrayChunksMut<'_, T, N> {} - -#[unstable(feature = "array_chunks", issue = "74985")] -impl FusedIterator for ArrayChunksMut<'_, T, N> {} - -#[doc(hidden)] -#[unstable(feature = "array_chunks", issue = "74985")] -unsafe impl<'a, T, const N: usize> TrustedRandomAccess for ArrayChunksMut<'a, T, N> {} - -#[doc(hidden)] -#[unstable(feature = "array_chunks", issue = "74985")] -unsafe impl<'a, T, const N: usize> TrustedRandomAccessNoCoerce for ArrayChunksMut<'a, T, N> { - const MAY_HAVE_SIDE_EFFECT: bool = false; -} - /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a /// time), starting at the end of the slice. /// diff --git a/library/core/src/slice/mod.rs b/library/core/src/slice/mod.rs index 6fe5affc48be7..1f34a7931d247 100644 --- a/library/core/src/slice/mod.rs +++ b/library/core/src/slice/mod.rs @@ -52,8 +52,6 @@ pub use index::SliceIndex; pub use index::{range, try_range}; #[unstable(feature = "array_windows", issue = "75027")] pub use iter::ArrayWindows; -#[unstable(feature = "array_chunks", issue = "74985")] -pub use iter::{ArrayChunks, ArrayChunksMut}; #[stable(feature = "slice_group_by", since = "1.77.0")] pub use iter::{ChunkBy, ChunkByMut}; #[stable(feature = "rust1", since = "1.0.0")] @@ -1448,42 +1446,6 @@ impl [T] { (remainder, array_slice) } - /// Returns an iterator over `N` elements of the slice at a time, starting at the - /// beginning of the slice. - /// - /// The chunks are array references and do not overlap. If `N` does not divide the - /// length of the slice, then the last up to `N-1` elements will be omitted and can be - /// retrieved from the `remainder` function of the iterator. - /// - /// This method is the const generic equivalent of [`chunks_exact`]. - /// - /// # Panics - /// - /// Panics if `N` is zero. This check will most probably get changed to a compile time - /// error before this method gets stabilized. - /// - /// # Examples - /// - /// ``` - /// #![feature(array_chunks)] - /// let slice = ['l', 'o', 'r', 'e', 'm']; - /// let mut iter = slice.array_chunks(); - /// assert_eq!(iter.next().unwrap(), &['l', 'o']); - /// assert_eq!(iter.next().unwrap(), &['r', 'e']); - /// assert!(iter.next().is_none()); - /// assert_eq!(iter.remainder(), &['m']); - /// ``` - /// - /// [`chunks_exact`]: slice::chunks_exact - #[unstable(feature = "array_chunks", issue = "74985")] - #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] - #[inline] - #[track_caller] - pub const fn array_chunks(&self) -> ArrayChunks<'_, T, N> { - assert!(N != 0, "chunk size must be non-zero"); - ArrayChunks::new(self) - } - /// Splits the slice into a slice of `N`-element arrays, /// assuming that there's no remainder. /// @@ -1646,44 +1608,6 @@ impl [T] { (remainder, array_slice) } - /// Returns an iterator over `N` elements of the slice at a time, starting at the - /// beginning of the slice. - /// - /// The chunks are mutable array references and do not overlap. If `N` does not divide - /// the length of the slice, then the last up to `N-1` elements will be omitted and - /// can be retrieved from the `into_remainder` function of the iterator. - /// - /// This method is the const generic equivalent of [`chunks_exact_mut`]. - /// - /// # Panics - /// - /// Panics if `N` is zero. This check will most probably get changed to a compile time - /// error before this method gets stabilized. - /// - /// # Examples - /// - /// ``` - /// #![feature(array_chunks)] - /// let v = &mut [0, 0, 0, 0, 0]; - /// let mut count = 1; - /// - /// for chunk in v.array_chunks_mut() { - /// *chunk = [count; 2]; - /// count += 1; - /// } - /// assert_eq!(v, &[1, 1, 2, 2, 0]); - /// ``` - /// - /// [`chunks_exact_mut`]: slice::chunks_exact_mut - #[unstable(feature = "array_chunks", issue = "74985")] - #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] - #[inline] - #[track_caller] - pub const fn array_chunks_mut(&mut self) -> ArrayChunksMut<'_, T, N> { - assert!(N != 0, "chunk size must be non-zero"); - ArrayChunksMut::new(self) - } - /// Returns an iterator over overlapping windows of `N` elements of a slice, /// starting at the beginning of the slice. /// diff --git a/library/core/src/str/iter.rs b/library/core/src/str/iter.rs index bcf886484add4..d2985d8a18669 100644 --- a/library/core/src/str/iter.rs +++ b/library/core/src/str/iter.rs @@ -52,7 +52,7 @@ impl<'a> Iterator for Chars<'a> { const CHUNK_SIZE: usize = 32; if remainder >= CHUNK_SIZE { - let mut chunks = self.iter.as_slice().array_chunks::(); + let mut chunks = self.iter.as_slice().as_chunks::().0.iter(); let mut bytes_skipped: usize = 0; while remainder > CHUNK_SIZE diff --git a/library/core/src/sync/atomic.rs b/library/core/src/sync/atomic.rs index 57bea505433d2..70c02ead35848 100644 --- a/library/core/src/sync/atomic.rs +++ b/library/core/src/sync/atomic.rs @@ -1245,8 +1245,8 @@ impl AtomicBool { /// Returning an `*mut` pointer from a shared reference to this atomic is safe because the /// atomic types work with interior mutability. All modifications of an atomic change the value /// through a shared reference, and can do so safely as long as they use atomic operations. Any - /// use of the returned raw pointer requires an `unsafe` block and still has to uphold the same - /// restriction in [Memory model for atomic accesses]. + /// use of the returned raw pointer requires an `unsafe` block and still has to uphold the + /// requirements of the [memory model]. /// /// # Examples /// @@ -1265,7 +1265,7 @@ impl AtomicBool { /// # } /// ``` /// - /// [Memory model for atomic accesses]: self#memory-model-for-atomic-accesses + /// [memory model]: self#memory-model-for-atomic-accesses #[inline] #[stable(feature = "atomic_as_ptr", since = "1.70.0")] #[rustc_const_stable(feature = "atomic_as_ptr", since = "1.70.0")] @@ -2489,8 +2489,8 @@ impl AtomicPtr { /// Returning an `*mut` pointer from a shared reference to this atomic is safe because the /// atomic types work with interior mutability. All modifications of an atomic change the value /// through a shared reference, and can do so safely as long as they use atomic operations. Any - /// use of the returned raw pointer requires an `unsafe` block and still has to uphold the same - /// restriction in [Memory model for atomic accesses]. + /// use of the returned raw pointer requires an `unsafe` block and still has to uphold the + /// requirements of the [memory model]. /// /// # Examples /// @@ -2510,7 +2510,7 @@ impl AtomicPtr { /// } /// ``` /// - /// [Memory model for atomic accesses]: self#memory-model-for-atomic-accesses + /// [memory model]: self#memory-model-for-atomic-accesses #[inline] #[stable(feature = "atomic_as_ptr", since = "1.70.0")] #[rustc_const_stable(feature = "atomic_as_ptr", since = "1.70.0")] @@ -3623,8 +3623,8 @@ macro_rules! atomic_int { /// Returning an `*mut` pointer from a shared reference to this atomic is safe because the /// atomic types work with interior mutability. All modifications of an atomic change the value /// through a shared reference, and can do so safely as long as they use atomic operations. Any - /// use of the returned raw pointer requires an `unsafe` block and still has to uphold the same - /// restriction in [Memory model for atomic accesses]. + /// use of the returned raw pointer requires an `unsafe` block and still has to uphold the + /// requirements of the [memory model]. /// /// # Examples /// @@ -3645,7 +3645,7 @@ macro_rules! atomic_int { /// # } /// ``` /// - /// [Memory model for atomic accesses]: self#memory-model-for-atomic-accesses + /// [memory model]: self#memory-model-for-atomic-accesses #[inline] #[stable(feature = "atomic_as_ptr", since = "1.70.0")] #[rustc_const_stable(feature = "atomic_as_ptr", since = "1.70.0")] diff --git a/library/coretests/tests/lib.rs b/library/coretests/tests/lib.rs index c5bfd1574e295..029a7b00ad36e 100644 --- a/library/coretests/tests/lib.rs +++ b/library/coretests/tests/lib.rs @@ -2,7 +2,6 @@ #![cfg_attr(target_has_atomic = "128", feature(integer_atomics))] #![cfg_attr(test, feature(cfg_select))] #![feature(alloc_layout_extra)] -#![feature(array_chunks)] #![feature(array_ptr_get)] #![feature(array_try_from_fn)] #![feature(array_windows)] @@ -30,6 +29,7 @@ #![feature(core_private_diy_float)] #![feature(cstr_display)] #![feature(dec2flt)] +#![feature(drop_guard)] #![feature(duration_constants)] #![feature(duration_constructors)] #![feature(duration_constructors_lite)] diff --git a/library/coretests/tests/mem.rs b/library/coretests/tests/mem.rs index 9c15be4a8c4bd..e896c61ef4881 100644 --- a/library/coretests/tests/mem.rs +++ b/library/coretests/tests/mem.rs @@ -1,5 +1,6 @@ use core::mem::*; use core::{array, ptr}; +use std::cell::Cell; #[cfg(panic = "unwind")] use std::rc::Rc; @@ -795,3 +796,48 @@ fn const_maybe_uninit_zeroed() { assert_eq!(unsafe { (*UNINIT.0.cast::<[[u8; SIZE]; 1]>())[0] }, [0u8; SIZE]); } + +#[test] +fn drop_guards_only_dropped_by_closure_when_run() { + let value_drops = Cell::new(0); + let value = DropGuard::new((), |()| value_drops.set(1 + value_drops.get())); + let closure_drops = Cell::new(0); + let guard = DropGuard::new(value, |_| closure_drops.set(1 + closure_drops.get())); + assert_eq!(value_drops.get(), 0); + assert_eq!(closure_drops.get(), 0); + drop(guard); + assert_eq!(value_drops.get(), 1); + assert_eq!(closure_drops.get(), 1); +} + +#[test] +fn drop_guard_into_inner() { + let dropped = Cell::new(false); + let value = DropGuard::new(42, |_| dropped.set(true)); + let guard = DropGuard::new(value, |_| dropped.set(true)); + let inner = DropGuard::into_inner(guard); + assert_eq!(dropped.get(), false); + assert_eq!(*inner, 42); +} + +#[test] +#[cfg(panic = "unwind")] +fn drop_guard_always_drops_value_if_closure_drop_unwinds() { + // Create a value with a destructor, which we will validate ran successfully. + let mut value_was_dropped = false; + let value_with_tracked_destruction = DropGuard::new((), |_| value_was_dropped = true); + + // Create a closure that will begin unwinding when dropped. + let drop_bomb = DropGuard::new((), |_| panic!()); + let closure_that_panics_on_drop = move |_| { + let _drop_bomb = drop_bomb; + }; + + // This will run the closure, which will panic when dropped. This should + // run the destructor of the value we passed, which we validate. + let _ = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| { + let guard = DropGuard::new(value_with_tracked_destruction, closure_that_panics_on_drop); + DropGuard::into_inner(guard); + })); + assert!(value_was_dropped); +} diff --git a/library/coretests/tests/slice.rs b/library/coretests/tests/slice.rs index d17e681480c70..992f24cb18f20 100644 --- a/library/coretests/tests/slice.rs +++ b/library/coretests/tests/slice.rs @@ -611,190 +611,6 @@ fn test_chunks_exact_mut_zip() { assert_eq!(v1, [13, 14, 19, 20, 4]); } -#[test] -fn test_array_chunks_infer() { - let v: &[i32] = &[0, 1, 2, 3, 4, -4]; - let c = v.array_chunks(); - for &[a, b, c] in c { - assert_eq!(a + b + c, 3); - } - - let v2: &[i32] = &[0, 1, 2, 3, 4, 5, 6]; - let total = v2.array_chunks().map(|&[a, b]| a * b).sum::(); - assert_eq!(total, 2 * 3 + 4 * 5); -} - -#[test] -fn test_array_chunks_count() { - let v: &[i32] = &[0, 1, 2, 3, 4, 5]; - let c = v.array_chunks::<3>(); - assert_eq!(c.count(), 2); - - let v2: &[i32] = &[0, 1, 2, 3, 4]; - let c2 = v2.array_chunks::<2>(); - assert_eq!(c2.count(), 2); - - let v3: &[i32] = &[]; - let c3 = v3.array_chunks::<2>(); - assert_eq!(c3.count(), 0); -} - -#[test] -fn test_array_chunks_nth() { - let v: &[i32] = &[0, 1, 2, 3, 4, 5]; - let mut c = v.array_chunks::<2>(); - assert_eq!(c.nth(1).unwrap(), &[2, 3]); - assert_eq!(c.next().unwrap(), &[4, 5]); - - let v2: &[i32] = &[0, 1, 2, 3, 4, 5, 6]; - let mut c2 = v2.array_chunks::<3>(); - assert_eq!(c2.nth(1).unwrap(), &[3, 4, 5]); - assert_eq!(c2.next(), None); -} - -#[test] -fn test_array_chunks_nth_back() { - let v: &[i32] = &[0, 1, 2, 3, 4, 5]; - let mut c = v.array_chunks::<2>(); - assert_eq!(c.nth_back(1).unwrap(), &[2, 3]); - assert_eq!(c.next().unwrap(), &[0, 1]); - assert_eq!(c.next(), None); - - let v2: &[i32] = &[0, 1, 2, 3, 4]; - let mut c2 = v2.array_chunks::<3>(); - assert_eq!(c2.nth_back(0).unwrap(), &[0, 1, 2]); - assert_eq!(c2.next(), None); - assert_eq!(c2.next_back(), None); - - let v3: &[i32] = &[0, 1, 2, 3, 4]; - let mut c3 = v3.array_chunks::<10>(); - assert_eq!(c3.nth_back(0), None); -} - -#[test] -fn test_array_chunks_last() { - let v: &[i32] = &[0, 1, 2, 3, 4, 5]; - let c = v.array_chunks::<2>(); - assert_eq!(c.last().unwrap(), &[4, 5]); - - let v2: &[i32] = &[0, 1, 2, 3, 4]; - let c2 = v2.array_chunks::<2>(); - assert_eq!(c2.last().unwrap(), &[2, 3]); -} - -#[test] -fn test_array_chunks_remainder() { - let v: &[i32] = &[0, 1, 2, 3, 4]; - let c = v.array_chunks::<2>(); - assert_eq!(c.remainder(), &[4]); -} - -#[test] -fn test_array_chunks_zip() { - let v1: &[i32] = &[0, 1, 2, 3, 4]; - let v2: &[i32] = &[6, 7, 8, 9, 10]; - - let res = v1 - .array_chunks::<2>() - .zip(v2.array_chunks::<2>()) - .map(|(a, b)| a.iter().sum::() + b.iter().sum::()) - .collect::>(); - assert_eq!(res, vec![14, 22]); -} - -#[test] -fn test_array_chunks_mut_infer() { - let v: &mut [i32] = &mut [0, 1, 2, 3, 4, 5, 6]; - for a in v.array_chunks_mut() { - let sum = a.iter().sum::(); - *a = [sum; 3]; - } - assert_eq!(v, &[3, 3, 3, 12, 12, 12, 6]); - - let v2: &mut [i32] = &mut [0, 1, 2, 3, 4, 5, 6]; - v2.array_chunks_mut().for_each(|[a, b]| core::mem::swap(a, b)); - assert_eq!(v2, &[1, 0, 3, 2, 5, 4, 6]); -} - -#[test] -fn test_array_chunks_mut_count() { - let v: &mut [i32] = &mut [0, 1, 2, 3, 4, 5]; - let c = v.array_chunks_mut::<3>(); - assert_eq!(c.count(), 2); - - let v2: &mut [i32] = &mut [0, 1, 2, 3, 4]; - let c2 = v2.array_chunks_mut::<2>(); - assert_eq!(c2.count(), 2); - - let v3: &mut [i32] = &mut []; - let c3 = v3.array_chunks_mut::<2>(); - assert_eq!(c3.count(), 0); -} - -#[test] -fn test_array_chunks_mut_nth() { - let v: &mut [i32] = &mut [0, 1, 2, 3, 4, 5]; - let mut c = v.array_chunks_mut::<2>(); - assert_eq!(c.nth(1).unwrap(), &[2, 3]); - assert_eq!(c.next().unwrap(), &[4, 5]); - - let v2: &mut [i32] = &mut [0, 1, 2, 3, 4, 5, 6]; - let mut c2 = v2.array_chunks_mut::<3>(); - assert_eq!(c2.nth(1).unwrap(), &[3, 4, 5]); - assert_eq!(c2.next(), None); -} - -#[test] -fn test_array_chunks_mut_nth_back() { - let v: &mut [i32] = &mut [0, 1, 2, 3, 4, 5]; - let mut c = v.array_chunks_mut::<2>(); - assert_eq!(c.nth_back(1).unwrap(), &[2, 3]); - assert_eq!(c.next().unwrap(), &[0, 1]); - assert_eq!(c.next(), None); - - let v2: &mut [i32] = &mut [0, 1, 2, 3, 4]; - let mut c2 = v2.array_chunks_mut::<3>(); - assert_eq!(c2.nth_back(0).unwrap(), &[0, 1, 2]); - assert_eq!(c2.next(), None); - assert_eq!(c2.next_back(), None); - - let v3: &mut [i32] = &mut [0, 1, 2, 3, 4]; - let mut c3 = v3.array_chunks_mut::<10>(); - assert_eq!(c3.nth_back(0), None); -} - -#[test] -fn test_array_chunks_mut_last() { - let v: &mut [i32] = &mut [0, 1, 2, 3, 4, 5]; - let c = v.array_chunks_mut::<2>(); - assert_eq!(c.last().unwrap(), &[4, 5]); - - let v2: &mut [i32] = &mut [0, 1, 2, 3, 4]; - let c2 = v2.array_chunks_mut::<2>(); - assert_eq!(c2.last().unwrap(), &[2, 3]); -} - -#[test] -fn test_array_chunks_mut_remainder() { - let v: &mut [i32] = &mut [0, 1, 2, 3, 4]; - let c = v.array_chunks_mut::<2>(); - assert_eq!(c.into_remainder(), &[4]); -} - -#[test] -fn test_array_chunks_mut_zip() { - let v1: &mut [i32] = &mut [0, 1, 2, 3, 4]; - let v2: &[i32] = &[6, 7, 8, 9, 10]; - - for (a, b) in v1.array_chunks_mut::<2>().zip(v2.array_chunks::<2>()) { - let sum = b.iter().sum::(); - for v in a { - *v += sum; - } - } - assert_eq!(v1, [13, 14, 19, 20, 4]); -} - #[test] fn test_array_windows_infer() { let v: &[i32] = &[0, 1, 0, 1]; diff --git a/library/std/src/lib.rs b/library/std/src/lib.rs index 323742a75b055..77301d7228ead 100644 --- a/library/std/src/lib.rs +++ b/library/std/src/lib.rs @@ -324,13 +324,13 @@ // // Library features (core): // tidy-alphabetical-start -#![feature(array_chunks)] #![feature(bstr)] #![feature(bstr_internals)] #![feature(char_internals)] #![feature(clone_to_uninit)] #![feature(core_intrinsics)] #![feature(core_io_borrowed_buf)] +#![feature(drop_guard)] #![feature(duration_constants)] #![feature(error_generic_member_access)] #![feature(error_iter)] diff --git a/library/std/src/sys/random/sgx.rs b/library/std/src/sys/random/sgx.rs index c3647a8df220e..462b19003fad2 100644 --- a/library/std/src/sys/random/sgx.rs +++ b/library/std/src/sys/random/sgx.rs @@ -46,22 +46,22 @@ fn rdrand16() -> u16 { } pub fn fill_bytes(bytes: &mut [u8]) { - let mut chunks = bytes.array_chunks_mut(); - for chunk in &mut chunks { + let (chunks, remainder) = bytes.as_chunks_mut(); + for chunk in chunks { *chunk = rdrand64().to_ne_bytes(); } - let mut chunks = chunks.into_remainder().array_chunks_mut(); - for chunk in &mut chunks { + let (chunks, remainder) = remainder.as_chunks_mut(); + for chunk in chunks { *chunk = rdrand32().to_ne_bytes(); } - let mut chunks = chunks.into_remainder().array_chunks_mut(); - for chunk in &mut chunks { + let (chunks, remainder) = remainder.as_chunks_mut(); + for chunk in chunks { *chunk = rdrand16().to_ne_bytes(); } - if let [byte] = chunks.into_remainder() { + if let [byte] = remainder { *byte = rdrand16() as u8; } } diff --git a/library/std/src/sys/random/uefi.rs b/library/std/src/sys/random/uefi.rs index 5f001f0f532a0..4a71d32fffeb4 100644 --- a/library/std/src/sys/random/uefi.rs +++ b/library/std/src/sys/random/uefi.rs @@ -138,12 +138,11 @@ mod rdrand { } unsafe fn rdrand_exact(dest: &mut [u8]) -> Option<()> { - let mut chunks = dest.array_chunks_mut(); - for chunk in &mut chunks { + let (chunks, tail) = dest.as_chunks_mut(); + for chunk in chunks { *chunk = unsafe { rdrand() }?.to_ne_bytes(); } - let tail = chunks.into_remainder(); let n = tail.len(); if n > 0 { let src = unsafe { rdrand() }?.to_ne_bytes(); diff --git a/src/bootstrap/src/core/build_steps/check.rs b/src/bootstrap/src/core/build_steps/check.rs index cfe090b22dc32..b4232409ba83d 100644 --- a/src/bootstrap/src/core/build_steps/check.rs +++ b/src/bootstrap/src/core/build_steps/check.rs @@ -556,3 +556,9 @@ tool_check_step!(Compiletest { allow_features: COMPILETEST_ALLOW_FEATURES, default: false, }); + +tool_check_step!(Linkchecker { + path: "src/tools/linkchecker", + mode: |_builder| Mode::ToolBootstrap, + default: false +}); diff --git a/src/bootstrap/src/core/build_steps/compile.rs b/src/bootstrap/src/core/build_steps/compile.rs index 4abfe1843ebeb..b5b021c6e028f 100644 --- a/src/bootstrap/src/core/build_steps/compile.rs +++ b/src/bootstrap/src/core/build_steps/compile.rs @@ -19,7 +19,7 @@ use serde_derive::Deserialize; use tracing::{instrument, span}; use crate::core::build_steps::gcc::{Gcc, add_cg_gcc_cargo_flags}; -use crate::core::build_steps::tool::{SourceType, copy_lld_artifacts}; +use crate::core::build_steps::tool::{RustcPrivateCompilers, SourceType, copy_lld_artifacts}; use crate::core::build_steps::{dist, llvm}; use crate::core::builder; use crate::core::builder::{ @@ -1128,7 +1128,7 @@ impl Step for Rustc { cargo.env("RUSTC_BOLT_LINK_FLAGS", "1"); } - let _guard = builder.msg_sysroot_tool( + let _guard = builder.msg_rustc_tool( Kind::Build, build_compiler.stage, format_args!("compiler artifacts{}", crate_description(&self.crates)), @@ -1541,9 +1541,8 @@ impl Step for RustcLink { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct CodegenBackend { - pub target: TargetSelection, - pub compiler: Compiler, - pub backend: String, + compilers: RustcPrivateCompilers, + backend: String, } fn needs_codegen_config(run: &RunConfig<'_>) -> bool { @@ -1607,8 +1606,11 @@ impl Step for CodegenBackend { } run.builder.ensure(CodegenBackend { - target: run.target, - compiler: run.builder.compiler(run.builder.top_stage, run.build_triple()), + compilers: RustcPrivateCompilers::new( + run.builder, + run.builder.top_stage, + run.target, + ), backend: backend.clone(), }); } @@ -1621,20 +1623,17 @@ impl Step for CodegenBackend { name = "CodegenBackend::run", skip_all, fields( - compiler = ?self.compiler, - target = ?self.target, - backend = ?self.target, + compilers = ?self.compilers, + backend = ?self.backend, ), ), )] fn run(self, builder: &Builder<'_>) { - let compiler = self.compiler; - let target = self.target; let backend = self.backend; + let target = self.compilers.target(); + let build_compiler = self.compilers.build_compiler(); - builder.ensure(Rustc::new(compiler, target)); - - if builder.config.keep_stage.contains(&compiler.stage) { + if builder.config.keep_stage.contains(&build_compiler.stage) { trace!("`keep-stage` requested"); builder.info( "WARNING: Using a potentially old codegen backend. \ @@ -1645,17 +1644,11 @@ impl Step for CodegenBackend { return; } - let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target); - if compiler_to_use != compiler { - builder.ensure(CodegenBackend { compiler: compiler_to_use, target, backend }); - return; - } - - let out_dir = builder.cargo_out(compiler, Mode::Codegen, target); + let out_dir = builder.cargo_out(build_compiler, Mode::Codegen, target); let mut cargo = builder::Cargo::new( builder, - compiler, + build_compiler, Mode::Codegen, SourceType::InTree, target, @@ -1676,7 +1669,13 @@ impl Step for CodegenBackend { let tmp_stamp = BuildStamp::new(&out_dir).with_prefix("tmp"); - let _guard = builder.msg_build(compiler, format_args!("codegen backend {backend}"), target); + let _guard = builder.msg_rustc_tool( + Kind::Build, + build_compiler.stage, + format_args!("codegen backend {backend}"), + build_compiler.host, + target, + ); let files = run_cargo(builder, cargo, vec![], &tmp_stamp, vec![], false, false); if builder.config.dry_run() { return; @@ -1696,10 +1695,20 @@ impl Step for CodegenBackend { f.display() ); } - let stamp = build_stamp::codegen_backend_stamp(builder, compiler, target, &backend); + let stamp = build_stamp::codegen_backend_stamp(builder, build_compiler, target, &backend); let codegen_backend = codegen_backend.to_str().unwrap(); t!(stamp.add_stamp(codegen_backend).write()); } + + fn metadata(&self) -> Option { + Some( + StepMetadata::build( + &format!("rustc_codegen_{}", self.backend), + self.compilers.target(), + ) + .built_by(self.compilers.build_compiler()), + ) + } } /// Creates the `codegen-backends` folder for a compiler that's about to be @@ -2186,8 +2195,10 @@ impl Step for Assemble { continue; } builder.ensure(CodegenBackend { - compiler: build_compiler, - target: target_compiler.host, + compilers: RustcPrivateCompilers::from_build_and_target_compiler( + build_compiler, + target_compiler, + ), backend: backend.clone(), }); } diff --git a/src/bootstrap/src/core/build_steps/dist.rs b/src/bootstrap/src/core/build_steps/dist.rs index c8a54ad250cb3..e8ed7be8158ba 100644 --- a/src/bootstrap/src/core/build_steps/dist.rs +++ b/src/bootstrap/src/core/build_steps/dist.rs @@ -20,7 +20,7 @@ use object::read::archive::ArchiveFile; use tracing::instrument; use crate::core::build_steps::doc::DocumentationFormat; -use crate::core::build_steps::tool::{self, Tool}; +use crate::core::build_steps::tool::{self, RustcPrivateCompilers, Tool}; use crate::core::build_steps::vendor::{VENDOR_DIR, Vendor}; use crate::core::build_steps::{compile, llvm}; use crate::core::builder::{Builder, Kind, RunConfig, ShouldRun, Step, StepMetadata}; @@ -425,19 +425,20 @@ impl Step for Rustc { .as_ref() .is_none_or(|tools| tools.iter().any(|tool| tool == "rustdoc")) { - let rustdoc = builder.rustdoc(compiler); + let rustdoc = builder.rustdoc_for_compiler(compiler); builder.install(&rustdoc, &image.join("bin"), FileType::Executable); } let ra_proc_macro_srv_compiler = builder.compiler_for(compiler.stage, builder.config.host_target, compiler.host); - builder.ensure(compile::Rustc::new(ra_proc_macro_srv_compiler, compiler.host)); + let compilers = RustcPrivateCompilers::from_build_compiler( + builder, + ra_proc_macro_srv_compiler, + compiler.host, + ); if let Some(ra_proc_macro_srv) = builder.ensure_if_default( - tool::RustAnalyzerProcMacroSrv { - compiler: ra_proc_macro_srv_compiler, - target: compiler.host, - }, + tool::RustAnalyzerProcMacroSrv::from_compilers(compilers), builder.kind, ) { let dst = image.join("libexec"); @@ -1228,7 +1229,7 @@ impl Step for Cargo { #[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct RustAnalyzer { - pub compiler: Compiler, + pub build_compiler: Compiler, pub target: TargetSelection, } @@ -1244,7 +1245,7 @@ impl Step for RustAnalyzer { fn make_run(run: RunConfig<'_>) { run.builder.ensure(RustAnalyzer { - compiler: run.builder.compiler_for( + build_compiler: run.builder.compiler_for( run.builder.top_stage, run.builder.config.host_target, run.target, @@ -1254,12 +1255,11 @@ impl Step for RustAnalyzer { } fn run(self, builder: &Builder<'_>) -> Option { - let compiler = self.compiler; let target = self.target; + let compilers = + RustcPrivateCompilers::from_build_compiler(builder, self.build_compiler, self.target); - builder.ensure(compile::Rustc::new(compiler, target)); - - let rust_analyzer = builder.ensure(tool::RustAnalyzer { compiler, target }); + let rust_analyzer = builder.ensure(tool::RustAnalyzer::from_compilers(compilers)); let mut tarball = Tarball::new(builder, "rust-analyzer", &target.triple); tarball.set_overlay(OverlayKind::RustAnalyzer); @@ -1270,9 +1270,9 @@ impl Step for RustAnalyzer { } } -#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct Clippy { - pub compiler: Compiler, + pub build_compiler: Compiler, pub target: TargetSelection, } @@ -1288,7 +1288,7 @@ impl Step for Clippy { fn make_run(run: RunConfig<'_>) { run.builder.ensure(Clippy { - compiler: run.builder.compiler_for( + build_compiler: run.builder.compiler_for( run.builder.top_stage, run.builder.config.host_target, run.target, @@ -1298,16 +1298,15 @@ impl Step for Clippy { } fn run(self, builder: &Builder<'_>) -> Option { - let compiler = self.compiler; let target = self.target; - - builder.ensure(compile::Rustc::new(compiler, target)); + let compilers = + RustcPrivateCompilers::from_build_compiler(builder, self.build_compiler, target); // Prepare the image directory // We expect clippy to build, because we've exited this step above if tool // state for clippy isn't testing. - let clippy = builder.ensure(tool::Clippy { compiler, target }); - let cargoclippy = builder.ensure(tool::CargoClippy { compiler, target }); + let clippy = builder.ensure(tool::Clippy::from_compilers(compilers)); + let cargoclippy = builder.ensure(tool::CargoClippy::from_compilers(compilers)); let mut tarball = Tarball::new(builder, "clippy", &target.triple); tarball.set_overlay(OverlayKind::Clippy); @@ -1319,9 +1318,9 @@ impl Step for Clippy { } } -#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct Miri { - pub compiler: Compiler, + pub build_compiler: Compiler, pub target: TargetSelection, } @@ -1337,7 +1336,7 @@ impl Step for Miri { fn make_run(run: RunConfig<'_>) { run.builder.ensure(Miri { - compiler: run.builder.compiler_for( + build_compiler: run.builder.compiler_for( run.builder.top_stage, run.builder.config.host_target, run.target, @@ -1354,15 +1353,12 @@ impl Step for Miri { return None; } - let compiler = self.compiler; - let target = self.target; - - builder.ensure(compile::Rustc::new(compiler, target)); - - let miri = builder.ensure(tool::Miri { compiler, target }); - let cargomiri = builder.ensure(tool::CargoMiri { compiler, target }); + let compilers = + RustcPrivateCompilers::from_build_compiler(builder, self.build_compiler, self.target); + let miri = builder.ensure(tool::Miri::from_compilers(compilers)); + let cargomiri = builder.ensure(tool::CargoMiri::from_compilers(compilers)); - let mut tarball = Tarball::new(builder, "miri", &target.triple); + let mut tarball = Tarball::new(builder, "miri", &self.target.triple); tarball.set_overlay(OverlayKind::Miri); tarball.is_preview(true); tarball.add_file(&miri.tool_path, "bin", FileType::Executable); @@ -1464,9 +1460,9 @@ impl Step for CodegenBackend { } } -#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct Rustfmt { - pub compiler: Compiler, + pub build_compiler: Compiler, pub target: TargetSelection, } @@ -1482,7 +1478,7 @@ impl Step for Rustfmt { fn make_run(run: RunConfig<'_>) { run.builder.ensure(Rustfmt { - compiler: run.builder.compiler_for( + build_compiler: run.builder.compiler_for( run.builder.top_stage, run.builder.config.host_target, run.target, @@ -1492,14 +1488,13 @@ impl Step for Rustfmt { } fn run(self, builder: &Builder<'_>) -> Option { - let compiler = self.compiler; - let target = self.target; + let compilers = + RustcPrivateCompilers::from_build_compiler(builder, self.build_compiler, self.target); - builder.ensure(compile::Rustc::new(compiler, target)); + let rustfmt = builder.ensure(tool::Rustfmt::from_compilers(compilers)); + let cargofmt = builder.ensure(tool::Cargofmt::from_compilers(compilers)); - let rustfmt = builder.ensure(tool::Rustfmt { compiler, target }); - let cargofmt = builder.ensure(tool::Cargofmt { compiler, target }); - let mut tarball = Tarball::new(builder, "rustfmt", &target.triple); + let mut tarball = Tarball::new(builder, "rustfmt", &self.target.triple); tarball.set_overlay(OverlayKind::Rustfmt); tarball.is_preview(true); tarball.add_file(&rustfmt.tool_path, "bin", FileType::Executable); @@ -1546,7 +1541,7 @@ impl Step for Extended { let mut built_tools = HashSet::new(); macro_rules! add_component { ($name:expr => $step:expr) => { - if let Some(tarball) = builder.ensure_if_default($step, Kind::Dist) { + if let Some(Some(tarball)) = builder.ensure_if_default($step, Kind::Dist) { tarballs.push(tarball); built_tools.insert($name); } @@ -1567,11 +1562,11 @@ impl Step for Extended { add_component!("rust-docs" => Docs { host: target }); add_component!("rust-json-docs" => JsonDocs { host: target }); add_component!("cargo" => Cargo { compiler, target }); - add_component!("rustfmt" => Rustfmt { compiler, target }); - add_component!("rust-analyzer" => RustAnalyzer { compiler, target }); + add_component!("rustfmt" => Rustfmt { build_compiler: compiler, target }); + add_component!("rust-analyzer" => RustAnalyzer { build_compiler: compiler, target }); add_component!("llvm-components" => LlvmTools { target }); - add_component!("clippy" => Clippy { compiler, target }); - add_component!("miri" => Miri { compiler, target }); + add_component!("clippy" => Clippy { build_compiler: compiler, target }); + add_component!("miri" => Miri { build_compiler: compiler, target }); add_component!("analysis" => Analysis { compiler, target }); add_component!("rustc-codegen-cranelift" => CodegenBackend { compiler: builder.compiler(stage, target), diff --git a/src/bootstrap/src/core/build_steps/doc.rs b/src/bootstrap/src/core/build_steps/doc.rs index 37418f640aca6..d4539a0eb34d1 100644 --- a/src/bootstrap/src/core/build_steps/doc.rs +++ b/src/bootstrap/src/core/build_steps/doc.rs @@ -149,7 +149,7 @@ impl Step for RustbookSrc

{ let mut rustbook_cmd = builder.tool_cmd(Tool::Rustbook); if let Some(compiler) = self.rustdoc_compiler { - let mut rustdoc = builder.rustdoc(compiler); + let mut rustdoc = builder.rustdoc_for_compiler(compiler); rustdoc.pop(); let old_path = env::var_os("PATH").unwrap_or_default(); let new_path = @@ -365,7 +365,7 @@ impl Step for Standalone { } let html = out.join(filename).with_extension("html"); - let rustdoc = builder.rustdoc(compiler); + let rustdoc = builder.rustdoc_for_compiler(compiler); if up_to_date(&path, &html) && up_to_date(&footer, &html) && up_to_date(&favicon, &html) @@ -463,7 +463,7 @@ impl Step for Releases { let html = out.join("releases.html"); let tmppath = out.join("releases.md"); let inpath = builder.src.join("RELEASES.md"); - let rustdoc = builder.rustdoc(compiler); + let rustdoc = builder.rustdoc_for_compiler(compiler); if !up_to_date(&inpath, &html) || !up_to_date(&footer, &html) || !up_to_date(&favicon, &html) @@ -811,7 +811,7 @@ impl Step for Rustc { let compiler = builder.compiler(stage, builder.config.host_target); builder.std(compiler, builder.config.host_target); - let _guard = builder.msg_sysroot_tool( + let _guard = builder.msg_rustc_tool( Kind::Doc, stage, format!("compiler{}", crate_description(&self.crates)), @@ -901,6 +901,10 @@ impl Step for Rustc { builder.open_in_browser(index); } } + + fn metadata(&self) -> Option { + Some(StepMetadata::doc("rustc", self.target).stage(self.stage)) + } } macro_rules! tool_doc { @@ -1018,6 +1022,10 @@ macro_rules! tool_doc { })? } } + + fn metadata(&self) -> Option { + Some(StepMetadata::doc(stringify!($tool), self.target)) + } } } } diff --git a/src/bootstrap/src/core/build_steps/install.rs b/src/bootstrap/src/core/build_steps/install.rs index 4156b49a8b337..f50ddce18a271 100644 --- a/src/bootstrap/src/core/build_steps/install.rs +++ b/src/bootstrap/src/core/build_steps/install.rs @@ -221,7 +221,7 @@ install!((self, builder, _config), }; RustAnalyzer, alias = "rust-analyzer", Self::should_build(_config), only_hosts: true, { if let Some(tarball) = - builder.ensure(dist::RustAnalyzer { compiler: self.compiler, target: self.target }) + builder.ensure(dist::RustAnalyzer { build_compiler: self.compiler, target: self.target }) { install_sh(builder, "rust-analyzer", self.compiler.stage, Some(self.target), &tarball); } else { @@ -232,12 +232,12 @@ install!((self, builder, _config), }; Clippy, alias = "clippy", Self::should_build(_config), only_hosts: true, { let tarball = builder - .ensure(dist::Clippy { compiler: self.compiler, target: self.target }) + .ensure(dist::Clippy { build_compiler: self.compiler, target: self.target }) .expect("missing clippy"); install_sh(builder, "clippy", self.compiler.stage, Some(self.target), &tarball); }; Miri, alias = "miri", Self::should_build(_config), only_hosts: true, { - if let Some(tarball) = builder.ensure(dist::Miri { compiler: self.compiler, target: self.target }) { + if let Some(tarball) = builder.ensure(dist::Miri { build_compiler: self.compiler, target: self.target }) { install_sh(builder, "miri", self.compiler.stage, Some(self.target), &tarball); } else { // Miri is only available on nightly @@ -257,7 +257,7 @@ install!((self, builder, _config), }; Rustfmt, alias = "rustfmt", Self::should_build(_config), only_hosts: true, { if let Some(tarball) = builder.ensure(dist::Rustfmt { - compiler: self.compiler, + build_compiler: self.compiler, target: self.target }) { install_sh(builder, "rustfmt", self.compiler.stage, Some(self.target), &tarball); diff --git a/src/bootstrap/src/core/build_steps/perf.rs b/src/bootstrap/src/core/build_steps/perf.rs index 4d61b38c876d0..108b7f90c149e 100644 --- a/src/bootstrap/src/core/build_steps/perf.rs +++ b/src/bootstrap/src/core/build_steps/perf.rs @@ -157,7 +157,7 @@ Consider setting `rust.debuginfo-level = 1` in `bootstrap.toml`."#); if let Some(opts) = args.cmd.shared_opts() && opts.profiles.contains(&Profile::Doc) { - builder.ensure(Rustdoc { compiler }); + builder.ensure(Rustdoc { target_compiler: compiler }); } let sysroot = builder.ensure(Sysroot::new(compiler)); diff --git a/src/bootstrap/src/core/build_steps/run.rs b/src/bootstrap/src/core/build_steps/run.rs index b2293fdd9b523..962dd372849d2 100644 --- a/src/bootstrap/src/core/build_steps/run.rs +++ b/src/bootstrap/src/core/build_steps/run.rs @@ -9,7 +9,7 @@ use clap_complete::{Generator, shells}; use crate::core::build_steps::dist::distdir; use crate::core::build_steps::test; -use crate::core::build_steps::tool::{self, SourceType, Tool}; +use crate::core::build_steps::tool::{self, RustcPrivateCompilers, SourceType, Tool}; use crate::core::build_steps::vendor::{Vendor, default_paths_to_vendor}; use crate::core::builder::{Builder, Kind, RunConfig, ShouldRun, Step}; use crate::core::config::TargetSelection; @@ -135,13 +135,13 @@ impl Step for Miri { } // This compiler runs on the host, we'll just use it for the target. - let target_compiler = builder.compiler(stage, target); - let miri_build = builder.ensure(tool::Miri { compiler: target_compiler, target }); - // Rustc tools are off by one stage, so use the build compiler to run miri. + let compilers = RustcPrivateCompilers::new(builder, stage, target); + let miri_build = builder.ensure(tool::Miri::from_compilers(compilers)); let host_compiler = miri_build.build_compiler; // Get a target sysroot for Miri. - let miri_sysroot = test::Miri::build_miri_sysroot(builder, target_compiler, target); + let miri_sysroot = + test::Miri::build_miri_sysroot(builder, compilers.target_compiler(), target); // # Run miri. // Running it via `cargo run` as that figures out the right dylib path. @@ -465,8 +465,8 @@ impl Step for Rustfmt { std::process::exit(1); } - let compiler = builder.compiler(stage, host); - let rustfmt_build = builder.ensure(tool::Rustfmt { compiler, target: host }); + let compilers = RustcPrivateCompilers::new(builder, stage, host); + let rustfmt_build = builder.ensure(tool::Rustfmt::from_compilers(compilers)); let mut rustfmt = tool::prepare_tool_cargo( builder, diff --git a/src/bootstrap/src/core/build_steps/test.rs b/src/bootstrap/src/core/build_steps/test.rs index 0f9268097d7b7..14feef5855192 100644 --- a/src/bootstrap/src/core/build_steps/test.rs +++ b/src/bootstrap/src/core/build_steps/test.rs @@ -14,7 +14,9 @@ use crate::core::build_steps::gcc::{Gcc, add_cg_gcc_cargo_flags}; use crate::core::build_steps::llvm::get_llvm_version; use crate::core::build_steps::run::get_completion_paths; use crate::core::build_steps::synthetic_targets::MirOptPanicAbortSyntheticTarget; -use crate::core::build_steps::tool::{self, COMPILETEST_ALLOW_FEATURES, SourceType, Tool}; +use crate::core::build_steps::tool::{ + self, COMPILETEST_ALLOW_FEATURES, RustcPrivateCompilers, SourceType, Tool, +}; use crate::core::build_steps::toolstate::ToolState; use crate::core::build_steps::{compile, dist, llvm}; use crate::core::builder::{ @@ -260,7 +262,7 @@ impl Step for Cargotest { .arg(&out_dir) .args(builder.config.test_args()) .env("RUSTC", builder.rustc(compiler)) - .env("RUSTDOC", builder.rustdoc(compiler)); + .env("RUSTDOC", builder.rustdoc_for_compiler(compiler)); add_rustdoc_cargo_linker_args( &mut cmd, builder, @@ -361,8 +363,7 @@ impl Step for Cargo { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct RustAnalyzer { - stage: u32, - host: TargetSelection, + compilers: RustcPrivateCompilers, } impl Step for RustAnalyzer { @@ -375,19 +376,18 @@ impl Step for RustAnalyzer { } fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Self { stage: run.builder.top_stage, host: run.target }); + run.builder.ensure(Self { + compilers: RustcPrivateCompilers::new( + run.builder, + run.builder.top_stage, + run.builder.host_target, + ), + }); } /// Runs `cargo test` for rust-analyzer fn run(self, builder: &Builder<'_>) { - let stage = self.stage; - let host = self.host; - let compiler = builder.compiler(stage, host); - let compiler = tool::get_tool_rustc_compiler(builder, compiler); - - // We don't need to build the whole Rust Analyzer for the proc-macro-srv test suite, - // but we do need the standard library to be present. - builder.ensure(compile::Rustc::new(compiler, host)); + let host = self.compilers.target(); let workspace_path = "src/tools/rust-analyzer"; // until the whole RA test suite runs on `i686`, we only run @@ -395,7 +395,7 @@ impl Step for RustAnalyzer { let crate_path = "src/tools/rust-analyzer/crates/proc-macro-srv"; let mut cargo = tool::prepare_tool_cargo( builder, - compiler, + self.compilers.build_compiler(), Mode::ToolRustc, host, Kind::Test, @@ -422,8 +422,7 @@ impl Step for RustAnalyzer { /// Runs `cargo test` for rustfmt. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Rustfmt { - stage: u32, - host: TargetSelection, + compilers: RustcPrivateCompilers, } impl Step for Rustfmt { @@ -435,36 +434,39 @@ impl Step for Rustfmt { } fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Rustfmt { stage: run.builder.top_stage, host: run.target }); + run.builder.ensure(Rustfmt { + compilers: RustcPrivateCompilers::new( + run.builder, + run.builder.top_stage, + run.builder.host_target, + ), + }); } /// Runs `cargo test` for rustfmt. fn run(self, builder: &Builder<'_>) { - let stage = self.stage; - let host = self.host; - let compiler = builder.compiler(stage, host); - - let tool_result = builder.ensure(tool::Rustfmt { compiler, target: self.host }); - let compiler = tool_result.build_compiler; + let tool_result = builder.ensure(tool::Rustfmt::from_compilers(self.compilers)); + let build_compiler = tool_result.build_compiler; + let target = self.compilers.target(); let mut cargo = tool::prepare_tool_cargo( builder, - compiler, + build_compiler, Mode::ToolRustc, - host, + target, Kind::Test, "src/tools/rustfmt", SourceType::InTree, &[], ); - let dir = testdir(builder, compiler.host); + let dir = testdir(builder, target); t!(fs::create_dir_all(&dir)); cargo.env("RUSTFMT_TEST_DIR", dir); cargo.add_rustc_lib_path(builder); - run_cargo_test(cargo, &[], &[], "rustfmt", host, builder); + run_cargo_test(cargo, &[], &[], "rustfmt", target, builder); } } @@ -539,12 +541,14 @@ impl Step for Miri { } // This compiler runs on the host, we'll just use it for the target. - let target_compiler = builder.compiler(stage, host); + let compilers = RustcPrivateCompilers::new(builder, stage, host); // Build our tools. - let miri = builder.ensure(tool::Miri { compiler: target_compiler, target: host }); + let miri = builder.ensure(tool::Miri::from_compilers(compilers)); // the ui tests also assume cargo-miri has been built - builder.ensure(tool::CargoMiri { compiler: target_compiler, target: host }); + builder.ensure(tool::CargoMiri::from_compilers(compilers)); + + let target_compiler = compilers.target_compiler(); // We also need sysroots, for Miri and for the host (the latter for build scripts). // This is for the tests so everything is done with the target compiler. @@ -596,7 +600,7 @@ impl Step for Miri { cargo.env("MIRI_TEST_TARGET", target.rustc_target_arg()); { - let _guard = builder.msg_sysroot_tool(Kind::Test, stage, "miri", host, target); + let _guard = builder.msg_rustc_tool(Kind::Test, stage, "miri", host, target); let _time = helpers::timeit(builder); cargo.run(builder); } @@ -612,7 +616,7 @@ impl Step for Miri { cargo.args(["tests/pass", "tests/panic"]); { - let _guard = builder.msg_sysroot_tool( + let _guard = builder.msg_rustc_tool( Kind::Test, stage, "miri (mir-opt-level 4)", @@ -689,7 +693,7 @@ impl Step for CargoMiri { // Finally, run everything. let mut cargo = BootstrapCommand::from(cargo); { - let _guard = builder.msg_sysroot_tool(Kind::Test, stage, "cargo-miri", host, target); + let _guard = builder.msg_rustc_tool(Kind::Test, stage, "cargo-miri", host, target); let _time = helpers::timeit(builder); cargo.run(builder); } @@ -739,7 +743,7 @@ impl Step for CompiletestTest { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Clippy { - host: TargetSelection, + compilers: RustcPrivateCompilers, } impl Step for Clippy { @@ -752,23 +756,30 @@ impl Step for Clippy { } fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Clippy { host: run.target }); + run.builder.ensure(Clippy { + compilers: RustcPrivateCompilers::new( + run.builder, + run.builder.top_stage, + run.builder.host_target, + ), + }); } /// Runs `cargo test` for clippy. fn run(self, builder: &Builder<'_>) { - let stage = builder.top_stage; - let host = self.host; + let host = self.compilers.target(); + // We need to carefully distinguish the compiler that builds clippy, and the compiler // that is linked into the clippy being tested. `target_compiler` is the latter, // and it must also be used by clippy's test runner to build tests and their dependencies. - let target_compiler = builder.compiler(stage, host); + let compilers = self.compilers; + let target_compiler = compilers.target_compiler(); - let tool_result = builder.ensure(tool::Clippy { compiler: target_compiler, target: host }); - let tool_compiler = tool_result.build_compiler; + let tool_result = builder.ensure(tool::Clippy::from_compilers(compilers)); + let build_compiler = tool_result.build_compiler; let mut cargo = tool::prepare_tool_cargo( builder, - tool_compiler, + build_compiler, Mode::ToolRustc, host, Kind::Test, @@ -777,9 +788,10 @@ impl Step for Clippy { &[], ); - cargo.env("RUSTC_TEST_SUITE", builder.rustc(tool_compiler)); - cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(tool_compiler)); - let host_libs = builder.stage_out(tool_compiler, Mode::ToolRustc).join(builder.cargo_dir()); + cargo.env("RUSTC_TEST_SUITE", builder.rustc(build_compiler)); + cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(build_compiler)); + let host_libs = + builder.stage_out(build_compiler, Mode::ToolRustc).join(builder.cargo_dir()); cargo.env("HOST_LIBS", host_libs); // Build the standard library that the tests can use. @@ -808,8 +820,7 @@ impl Step for Clippy { cargo.add_rustc_lib_path(builder); let cargo = prepare_cargo_test(cargo, &[], &[], host, builder); - let _guard = - builder.msg_sysroot_tool(Kind::Test, tool_compiler.stage, "clippy", host, host); + let _guard = builder.msg_rustc_tool(Kind::Test, build_compiler.stage, "clippy", host, host); // Clippy reports errors if it blessed the outputs if cargo.allow_failure().run(builder) { @@ -861,7 +872,7 @@ impl Step for RustdocTheme { .env("RUSTC_SYSROOT", builder.sysroot(self.compiler)) .env("RUSTDOC_LIBDIR", builder.sysroot_target_libdir(self.compiler, self.compiler.host)) .env("CFG_RELEASE_CHANNEL", &builder.config.channel) - .env("RUSTDOC_REAL", builder.rustdoc(self.compiler)) + .env("RUSTDOC_REAL", builder.rustdoc_for_compiler(self.compiler)) .env("RUSTC_BOOTSTRAP", "1"); cmd.args(linker_args(builder, self.compiler.host, LldThreads::No, self.compiler.stage)); @@ -1020,7 +1031,11 @@ impl Step for RustdocGUI { let mut cmd = builder.tool_cmd(Tool::RustdocGUITest); let out_dir = builder.test_out(self.target).join("rustdoc-gui"); - build_stamp::clear_if_dirty(builder, &out_dir, &builder.rustdoc(self.compiler)); + build_stamp::clear_if_dirty( + builder, + &out_dir, + &builder.rustdoc_for_compiler(self.compiler), + ); if let Some(src) = builder.config.src.to_str() { cmd.arg("--rust-src").arg(src); @@ -1036,7 +1051,7 @@ impl Step for RustdocGUI { cmd.arg("--jobs").arg(builder.jobs().to_string()); - cmd.env("RUSTDOC", builder.rustdoc(self.compiler)) + cmd.env("RUSTDOC", builder.rustdoc_for_compiler(self.compiler)) .env("RUSTC", builder.rustc(self.compiler)); add_rustdoc_cargo_linker_args( @@ -1072,7 +1087,7 @@ impl Step for RustdocGUI { } let _time = helpers::timeit(builder); - let _guard = builder.msg_sysroot_tool( + let _guard = builder.msg_rustc_tool( Kind::Test, self.compiler.stage, "rustdoc-gui", @@ -1726,7 +1741,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the || mode == "rustdoc-json" || suite == "coverage-run-rustdoc" { - cmd.arg("--rustdoc-path").arg(builder.rustdoc(compiler)); + cmd.arg("--rustdoc-path").arg(builder.rustdoc_for_compiler(compiler)); } if mode == "rustdoc-json" { @@ -2240,7 +2255,7 @@ impl BookTest { // mdbook just executes a binary named "rustdoc", so we need to update // PATH so that it points to our rustdoc. - let mut rustdoc_path = builder.rustdoc(compiler); + let mut rustdoc_path = builder.rustdoc_for_compiler(compiler); rustdoc_path.pop(); let old_path = env::var_os("PATH").unwrap_or_default(); let new_path = env::join_paths(iter::once(rustdoc_path).chain(env::split_paths(&old_path))) @@ -2563,7 +2578,7 @@ fn run_cargo_test<'a>( let mut cargo = prepare_cargo_test(cargo, libtest_args, crates, target, builder); let _time = helpers::timeit(builder); let _group = description.into().and_then(|what| { - builder.msg_sysroot_tool(Kind::Test, compiler.stage, what, compiler.host, target) + builder.msg_rustc_tool(Kind::Test, compiler.stage, what, compiler.host, target) }); #[cfg(feature = "build-metrics")] diff --git a/src/bootstrap/src/core/build_steps/tool.rs b/src/bootstrap/src/core/build_steps/tool.rs index f5fa33b98f3bf..6204476456ceb 100644 --- a/src/bootstrap/src/core/build_steps/tool.rs +++ b/src/bootstrap/src/core/build_steps/tool.rs @@ -71,13 +71,9 @@ impl Builder<'_> { ) -> Option { match mode { // depends on compiler stage, different to host compiler - Mode::ToolRustc => self.msg_sysroot_tool( - kind, - build_stage, - format_args!("tool {tool}"), - *host, - *target, - ), + Mode::ToolRustc => { + self.msg_rustc_tool(kind, build_stage, format_args!("tool {tool}"), *host, *target) + } // doesn't depend on compiler, same as host compiler _ => self.msg(kind, build_stage, format_args!("tool {tool}"), *host, *target), } @@ -90,11 +86,8 @@ impl Builder<'_> { pub struct ToolBuildResult { /// Artifact path of the corresponding tool that was built. pub tool_path: PathBuf, - /// Compiler used to build the tool. For non-`ToolRustc` tools this is equal to `target_compiler`. - /// For `ToolRustc` this is one stage before of the `target_compiler`. + /// Compiler used to build the tool. pub build_compiler: Compiler, - /// Target compiler passed to `Step`. - pub target_compiler: Compiler, } impl Step for ToolBuild { @@ -108,22 +101,15 @@ impl Step for ToolBuild { /// /// This will build the specified tool with the specified `host` compiler in /// `stage` into the normal cargo output directory. - fn run(mut self, builder: &Builder<'_>) -> ToolBuildResult { + fn run(self, builder: &Builder<'_>) -> ToolBuildResult { let target = self.target; let mut tool = self.tool; let path = self.path; - let target_compiler = self.build_compiler; - self.build_compiler = if self.mode == Mode::ToolRustc { - get_tool_rustc_compiler(builder, self.build_compiler) - } else { - self.build_compiler - }; - match self.mode { Mode::ToolRustc => { - // If compiler was forced, its artifacts should have been prepared earlier. - if !self.build_compiler.is_forced_compiler() { + // FIXME: remove this, it's only needed for download-rustc... + if !self.build_compiler.is_forced_compiler() && builder.download_rustc() { builder.std(self.build_compiler, self.build_compiler.host); builder.ensure(compile::Rustc::new(self.build_compiler, target)); } @@ -184,8 +170,7 @@ impl Step for ToolBuild { Kind::Build, self.mode, self.tool, - // A stage N tool is built with the stage N-1 compiler. - self.build_compiler.stage + 1, + self.build_compiler.stage, &self.build_compiler.host, &self.target, ); @@ -216,7 +201,7 @@ impl Step for ToolBuild { .join(format!("lib{tool}.rlib")), }; - ToolBuildResult { tool_path, build_compiler: self.build_compiler, target_compiler } + ToolBuildResult { tool_path, build_compiler: self.build_compiler } } } } @@ -346,27 +331,6 @@ pub fn prepare_tool_cargo( cargo } -/// Handle stage-off logic for `ToolRustc` tools when necessary. -pub(crate) fn get_tool_rustc_compiler( - builder: &Builder<'_>, - target_compiler: Compiler, -) -> Compiler { - if target_compiler.is_forced_compiler() { - return target_compiler; - } - - if builder.download_rustc() && target_compiler.stage == 1 { - // We shouldn't drop to stage0 compiler when using CI rustc. - return builder.compiler(1, builder.config.host_target); - } - - // Similar to `compile::Assemble`, build with the previous stage's compiler. Otherwise - // we'd have stageN/bin/rustc and stageN/bin/$rustc_tool be effectively different stage - // compilers, which isn't what we want. Rustc tools should be linked in the same way as the - // compiler it's paired with, so it must be built with the previous stage compiler. - builder.compiler(target_compiler.stage.saturating_sub(1), builder.config.host_target) -} - /// Determines how to build a `ToolTarget`, i.e. which compiler should be used to compile it. /// The compiler stage is automatically bumped if we need to cross-compile a stage 1 tool. pub enum ToolTargetBuildMode { @@ -719,15 +683,21 @@ impl Step for RemoteTestServer { } } -#[derive(Debug, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)] +/// Represents `Rustdoc` that either comes from the external stage0 sysroot or that is built +/// locally. +/// Rustdoc is special, because it both essentially corresponds to a `Compiler` (that can be +/// externally provided), but also to a `ToolRustc` tool. +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct Rustdoc { - /// This should only ever be 0 or 2. - /// We sometimes want to reference the "bootstrap" rustdoc, which is why this option is here. - pub compiler: Compiler, + /// If the stage of `target_compiler` is `0`, then rustdoc is externally provided. + /// Otherwise it is built locally. + pub target_compiler: Compiler, } impl Step for Rustdoc { - type Output = ToolBuildResult; + /// Path to the built rustdoc binary. + type Output = PathBuf; + const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; @@ -736,26 +706,25 @@ impl Step for Rustdoc { } fn make_run(run: RunConfig<'_>) { - run.builder - .ensure(Rustdoc { compiler: run.builder.compiler(run.builder.top_stage, run.target) }); + run.builder.ensure(Rustdoc { + target_compiler: run.builder.compiler(run.builder.top_stage, run.target), + }); } - fn run(self, builder: &Builder<'_>) -> ToolBuildResult { - let target_compiler = self.compiler; + fn run(self, builder: &Builder<'_>) -> Self::Output { + let target_compiler = self.target_compiler; let target = target_compiler.host; + // If stage is 0, we use a prebuilt rustdoc from stage0 if target_compiler.stage == 0 { if !target_compiler.is_snapshot(builder) { panic!("rustdoc in stage 0 must be snapshot rustdoc"); } - return ToolBuildResult { - tool_path: builder.initial_rustdoc.clone(), - build_compiler: target_compiler, - target_compiler, - }; + return builder.initial_rustdoc.clone(); } + // If stage is higher, we build rustdoc instead let bin_rustdoc = || { let sysroot = builder.sysroot(target_compiler); let bindir = sysroot.join("bin"); @@ -767,10 +736,7 @@ impl Step for Rustdoc { // If CI rustc is enabled and we haven't modified the rustdoc sources, // use the precompiled rustdoc from CI rustc's sysroot to speed up bootstrapping. - if builder.download_rustc() - && target_compiler.stage > 0 - && builder.rust_info().is_managed_git_subrepository() - { + if builder.download_rustc() && builder.rust_info().is_managed_git_subrepository() { let files_to_track = &["src/librustdoc", "src/tools/rustdoc", "src/rustdoc-json-types"]; // Check if unchanged @@ -783,12 +749,7 @@ impl Step for Rustdoc { let bin_rustdoc = bin_rustdoc(); builder.copy_link(&precompiled_rustdoc, &bin_rustdoc, FileType::Executable); - - return ToolBuildResult { - tool_path: bin_rustdoc, - build_compiler: target_compiler, - target_compiler, - }; + return bin_rustdoc; } } @@ -804,9 +765,10 @@ impl Step for Rustdoc { extra_features.push("jemalloc".to_string()); } - let ToolBuildResult { tool_path, build_compiler, target_compiler } = - builder.ensure(ToolBuild { - build_compiler: target_compiler, + let compilers = RustcPrivateCompilers::from_target_compiler(builder, target_compiler); + let tool_path = builder + .ensure(ToolBuild { + build_compiler: compilers.build_compiler, target, // Cargo adds a number of paths to the dylib search path on windows, which results in // the wrong rustdoc being executed. To avoid the conflicting rustdocs, we name the "tool" @@ -819,30 +781,23 @@ impl Step for Rustdoc { allow_features: "", cargo_args: Vec::new(), artifact_kind: ToolArtifactKind::Binary, - }); - - // don't create a stage0-sysroot/bin directory. - if target_compiler.stage > 0 { - if builder.config.rust_debuginfo_level_tools == DebuginfoLevel::None { - // Due to LTO a lot of debug info from C++ dependencies such as jemalloc can make it into - // our final binaries - compile::strip_debug(builder, target, &tool_path); - } - let bin_rustdoc = bin_rustdoc(); - builder.copy_link(&tool_path, &bin_rustdoc, FileType::Executable); - ToolBuildResult { tool_path: bin_rustdoc, build_compiler, target_compiler } - } else { - ToolBuildResult { tool_path, build_compiler, target_compiler } + }) + .tool_path; + + if builder.config.rust_debuginfo_level_tools == DebuginfoLevel::None { + // Due to LTO a lot of debug info from C++ dependencies such as jemalloc can make it into + // our final binaries + compile::strip_debug(builder, target, &tool_path); } + let bin_rustdoc = bin_rustdoc(); + builder.copy_link(&tool_path, &bin_rustdoc, FileType::Executable); + bin_rustdoc } fn metadata(&self) -> Option { Some( - StepMetadata::build("rustdoc", self.compiler.host) - // rustdoc is ToolRustc, so stage N rustdoc is built by stage N-1 rustc - // FIXME: make this stage deduction automatic somehow - // FIXME: log the compiler that actually built ToolRustc steps - .stage(self.compiler.stage.saturating_sub(1)), + StepMetadata::build("rustdoc", self.target_compiler.host) + .stage(self.target_compiler.stage), ) } } @@ -886,6 +841,14 @@ impl Step for Cargo { artifact_kind: ToolArtifactKind::Binary, }) } + + fn metadata(&self) -> Option { + // FIXME: fix staging logic + Some( + StepMetadata::build("cargo", self.target) + .built_by(self.compiler.with_stage(self.compiler.stage - 1)), + ) + } } /// Represents a built LldWrapper, the `lld-wrapper` tool itself, and a directory @@ -1066,8 +1029,13 @@ impl Step for WasmComponentLd { #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct RustAnalyzer { - pub compiler: Compiler, - pub target: TargetSelection, + compilers: RustcPrivateCompilers, +} + +impl RustAnalyzer { + pub fn from_compilers(compilers: RustcPrivateCompilers) -> Self { + Self { compilers } + } } impl RustAnalyzer { @@ -1086,15 +1054,16 @@ impl Step for RustAnalyzer { fn make_run(run: RunConfig<'_>) { run.builder.ensure(RustAnalyzer { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.host_target), - target: run.target, + compilers: RustcPrivateCompilers::new(run.builder, run.builder.top_stage, run.target), }); } fn run(self, builder: &Builder<'_>) -> ToolBuildResult { + let build_compiler = self.compilers.build_compiler; + let target = self.compilers.target(); builder.ensure(ToolBuild { - build_compiler: self.compiler, - target: self.target, + build_compiler, + target, tool: "rust-analyzer", mode: Mode::ToolRustc, path: "src/tools/rust-analyzer", @@ -1105,16 +1074,29 @@ impl Step for RustAnalyzer { artifact_kind: ToolArtifactKind::Binary, }) } + + fn metadata(&self) -> Option { + Some( + StepMetadata::build("rust-analyzer", self.compilers.target()) + .built_by(self.compilers.build_compiler), + ) + } } #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct RustAnalyzerProcMacroSrv { - pub compiler: Compiler, - pub target: TargetSelection, + compilers: RustcPrivateCompilers, +} + +impl RustAnalyzerProcMacroSrv { + pub fn from_compilers(compilers: RustcPrivateCompilers) -> Self { + Self { compilers } + } } impl Step for RustAnalyzerProcMacroSrv { - type Output = Option; + type Output = ToolBuildResult; + const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; @@ -1131,15 +1113,14 @@ impl Step for RustAnalyzerProcMacroSrv { fn make_run(run: RunConfig<'_>) { run.builder.ensure(RustAnalyzerProcMacroSrv { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.host_target), - target: run.target, + compilers: RustcPrivateCompilers::new(run.builder, run.builder.top_stage, run.target), }); } - fn run(self, builder: &Builder<'_>) -> Option { + fn run(self, builder: &Builder<'_>) -> Self::Output { let tool_result = builder.ensure(ToolBuild { - build_compiler: self.compiler, - target: self.target, + build_compiler: self.compilers.build_compiler, + target: self.compilers.target(), tool: "rust-analyzer-proc-macro-srv", mode: Mode::ToolRustc, path: "src/tools/rust-analyzer/crates/proc-macro-srv-cli", @@ -1152,7 +1133,7 @@ impl Step for RustAnalyzerProcMacroSrv { // Copy `rust-analyzer-proc-macro-srv` to `/libexec/` // so that r-a can use it. - let libexec_path = builder.sysroot(self.compiler).join("libexec"); + let libexec_path = builder.sysroot(self.compilers.target_compiler).join("libexec"); t!(fs::create_dir_all(&libexec_path)); builder.copy_link( &tool_result.tool_path, @@ -1160,7 +1141,14 @@ impl Step for RustAnalyzerProcMacroSrv { FileType::Executable, ); - Some(tool_result) + tool_result + } + + fn metadata(&self) -> Option { + Some( + StepMetadata::build("rust-analyzer-proc-macro-srv", self.compilers.target()) + .built_by(self.compilers.build_compiler), + ) } } @@ -1302,7 +1290,92 @@ impl Step for LibcxxVersionTool { } } -macro_rules! tool_extended { +/// Represents which compilers are involved in the compilation of a tool +/// that depends on compiler internals (`rustc_private`). +/// Their compilation looks like this: +/// +/// - `build_compiler` (stage N-1) builds `target_compiler` (stage N) to produce .rlibs +/// - These .rlibs are copied into the sysroot of `build_compiler` +/// - `build_compiler` (stage N-1) builds `` (stage N) +/// - `` links to .rlibs from `target_compiler` +/// +/// Eventually, this could also be used for .rmetas and check builds, but so far we only deal with +/// normal builds here. +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +pub struct RustcPrivateCompilers { + /// Compiler that builds the tool and that builds `target_compiler`. + build_compiler: Compiler, + /// Compiler to which .rlib artifacts the tool links to. + /// The host target of this compiler corresponds to the target of the tool. + target_compiler: Compiler, +} + +impl RustcPrivateCompilers { + /// Create compilers for a `rustc_private` tool with the given `stage` and for the given + /// `target`. + pub fn new(builder: &Builder<'_>, stage: u32, target: TargetSelection) -> Self { + let build_compiler = Self::build_compiler_from_stage(builder, stage); + + // This is the compiler we'll link to + // FIXME: make 100% sure that `target_compiler` was indeed built with `build_compiler`... + let target_compiler = builder.compiler(build_compiler.stage + 1, target); + + Self { build_compiler, target_compiler } + } + + pub fn from_build_and_target_compiler( + build_compiler: Compiler, + target_compiler: Compiler, + ) -> Self { + Self { build_compiler, target_compiler } + } + + /// Create rustc tool compilers from the build compiler. + pub fn from_build_compiler( + builder: &Builder<'_>, + build_compiler: Compiler, + target: TargetSelection, + ) -> Self { + let target_compiler = builder.compiler(build_compiler.stage + 1, target); + Self { build_compiler, target_compiler } + } + + /// Create rustc tool compilers from the target compiler. + pub fn from_target_compiler(builder: &Builder<'_>, target_compiler: Compiler) -> Self { + Self { + build_compiler: Self::build_compiler_from_stage(builder, target_compiler.stage), + target_compiler, + } + } + + fn build_compiler_from_stage(builder: &Builder<'_>, stage: u32) -> Compiler { + assert!(stage > 0); + + if builder.download_rustc() && stage == 1 { + // We shouldn't drop to stage0 compiler when using CI rustc. + builder.compiler(1, builder.config.host_target) + } else { + builder.compiler(stage - 1, builder.config.host_target) + } + } + + pub fn build_compiler(&self) -> Compiler { + self.build_compiler + } + + pub fn target_compiler(&self) -> Compiler { + self.target_compiler + } + + /// Target of the tool being compiled + pub fn target(&self) -> TargetSelection { + self.target_compiler.host + } +} + +/// Creates a step that builds an extended `Mode::ToolRustc` tool +/// and installs it into the sysroot of a corresponding compiler. +macro_rules! tool_rustc_extended { ( $name:ident { path: $path:expr, @@ -1316,8 +1389,15 @@ macro_rules! tool_extended { ) => { #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct $name { - pub compiler: Compiler, - pub target: TargetSelection, + compilers: RustcPrivateCompilers, + } + + impl $name { + pub fn from_compilers(compilers: RustcPrivateCompilers) -> Self { + Self { + compilers, + } + } } impl Step for $name { @@ -1326,7 +1406,7 @@ macro_rules! tool_extended { const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - should_run_tool_build_step( + should_run_extended_rustc_tool( run, $tool_name, $path, @@ -1336,17 +1416,15 @@ macro_rules! tool_extended { fn make_run(run: RunConfig<'_>) { run.builder.ensure($name { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.host_target), - target: run.target, + compilers: RustcPrivateCompilers::new(run.builder, run.builder.top_stage, run.target), }); } fn run(self, builder: &Builder<'_>) -> ToolBuildResult { - let Self { compiler, target } = self; - run_tool_build_step( + let Self { compilers } = self; + build_extended_rustc_tool( builder, - compiler, - target, + compilers, $tool_name, $path, None $( .or(Some(&$add_bins_to_sysroot)) )?, @@ -1356,18 +1434,16 @@ macro_rules! tool_extended { } fn metadata(&self) -> Option { - // FIXME: refactor extended tool steps to make the build_compiler explicit, - // it is offset by one now for rustc tools Some( - StepMetadata::build($tool_name, self.target) - .built_by(self.compiler.with_stage(self.compiler.stage.saturating_sub(1))) + StepMetadata::build($tool_name, self.compilers.target()) + .built_by(self.compilers.build_compiler) ) } } } } -fn should_run_tool_build_step<'a>( +fn should_run_extended_rustc_tool<'a>( run: ShouldRun<'a>, tool_name: &'static str, path: &'static str, @@ -1391,39 +1467,38 @@ fn should_run_tool_build_step<'a>( ) } -#[expect(clippy::too_many_arguments)] // silence overeager clippy lint -fn run_tool_build_step( +fn build_extended_rustc_tool( builder: &Builder<'_>, - compiler: Compiler, - target: TargetSelection, + compilers: RustcPrivateCompilers, tool_name: &'static str, path: &'static str, add_bins_to_sysroot: Option<&[&str]>, add_features: Option, TargetSelection, &mut Vec)>, cargo_args: Option<&[&'static str]>, ) -> ToolBuildResult { + let target = compilers.target(); let mut extra_features = Vec::new(); if let Some(func) = add_features { func(builder, target, &mut extra_features); } - let ToolBuildResult { tool_path, build_compiler, target_compiler } = - builder.ensure(ToolBuild { - build_compiler: compiler, - target, - tool: tool_name, - mode: Mode::ToolRustc, - path, - extra_features, - source_type: SourceType::InTree, - allow_features: "", - cargo_args: cargo_args.unwrap_or_default().iter().map(|s| String::from(*s)).collect(), - artifact_kind: ToolArtifactKind::Binary, - }); - + let build_compiler = compilers.build_compiler; + let ToolBuildResult { tool_path, .. } = builder.ensure(ToolBuild { + build_compiler, + target, + tool: tool_name, + mode: Mode::ToolRustc, + path, + extra_features, + source_type: SourceType::InTree, + allow_features: "", + cargo_args: cargo_args.unwrap_or_default().iter().map(|s| String::from(*s)).collect(), + artifact_kind: ToolArtifactKind::Binary, + }); + + let target_compiler = compilers.target_compiler; if let Some(add_bins_to_sysroot) = add_bins_to_sysroot && !add_bins_to_sysroot.is_empty() - && target_compiler.stage > 0 { let bindir = builder.sysroot(target_compiler).join("bin"); t!(fs::create_dir_all(&bindir)); @@ -1435,25 +1510,25 @@ fn run_tool_build_step( // Return a path into the bin dir. let path = bindir.join(exe(tool_name, target_compiler.host)); - ToolBuildResult { tool_path: path, build_compiler, target_compiler } + ToolBuildResult { tool_path: path, build_compiler } } else { - ToolBuildResult { tool_path, build_compiler, target_compiler } + ToolBuildResult { tool_path, build_compiler } } } -tool_extended!(Cargofmt { +tool_rustc_extended!(Cargofmt { path: "src/tools/rustfmt", tool_name: "cargo-fmt", stable: true, add_bins_to_sysroot: ["cargo-fmt"] }); -tool_extended!(CargoClippy { +tool_rustc_extended!(CargoClippy { path: "src/tools/clippy", tool_name: "cargo-clippy", stable: true, add_bins_to_sysroot: ["cargo-clippy"] }); -tool_extended!(Clippy { +tool_rustc_extended!(Clippy { path: "src/tools/clippy", tool_name: "clippy-driver", stable: true, @@ -1464,7 +1539,7 @@ tool_extended!(Clippy { } } }); -tool_extended!(Miri { +tool_rustc_extended!(Miri { path: "src/tools/miri", tool_name: "miri", stable: false, @@ -1472,13 +1547,13 @@ tool_extended!(Miri { // Always compile also tests when building miri. Otherwise feature unification can cause rebuilds between building and testing miri. cargo_args: &["--all-targets"], }); -tool_extended!(CargoMiri { +tool_rustc_extended!(CargoMiri { path: "src/tools/miri/cargo-miri", tool_name: "cargo-miri", stable: false, add_bins_to_sysroot: ["cargo-miri"] }); -tool_extended!(Rustfmt { +tool_rustc_extended!(Rustfmt { path: "src/tools/rustfmt", tool_name: "rustfmt", stable: true, diff --git a/src/bootstrap/src/core/builder/cargo.rs b/src/bootstrap/src/core/builder/cargo.rs index badd5f24dba7e..d676cfecd6813 100644 --- a/src/bootstrap/src/core/builder/cargo.rs +++ b/src/bootstrap/src/core/builder/cargo.rs @@ -508,7 +508,7 @@ impl Builder<'_> { } _ => panic!("doc mode {mode:?} not expected"), }; - let rustdoc = self.rustdoc(compiler); + let rustdoc = self.rustdoc_for_compiler(compiler); build_stamp::clear_if_dirty(self, &my_out, &rustdoc); } @@ -822,7 +822,7 @@ impl Builder<'_> { } let rustdoc_path = match cmd_kind { - Kind::Doc | Kind::Test | Kind::MiriTest => self.rustdoc(compiler), + Kind::Doc | Kind::Test | Kind::MiriTest => self.rustdoc_for_compiler(compiler), _ => PathBuf::from("/path/to/nowhere/rustdoc/not/required"), }; diff --git a/src/bootstrap/src/core/builder/mod.rs b/src/bootstrap/src/core/builder/mod.rs index 923c3a9a93501..98deb66030267 100644 --- a/src/bootstrap/src/core/builder/mod.rs +++ b/src/bootstrap/src/core/builder/mod.rs @@ -16,6 +16,7 @@ use tracing::instrument; pub use self::cargo::{Cargo, cargo_profile_var}; pub use crate::Compiler; use crate::core::build_steps::compile::{Std, StdLink}; +use crate::core::build_steps::tool::RustcPrivateCompilers; use crate::core::build_steps::{ check, clean, clippy, compile, dist, doc, gcc, install, llvm, run, setup, test, tool, vendor, }; @@ -141,7 +142,7 @@ pub trait Step: 'static + Clone + Debug + PartialEq + Eq + Hash { #[allow(unused)] #[derive(Debug, PartialEq, Eq)] pub struct StepMetadata { - name: &'static str, + name: String, kind: Kind, target: TargetSelection, built_by: Option, @@ -151,28 +152,28 @@ pub struct StepMetadata { } impl StepMetadata { - pub fn build(name: &'static str, target: TargetSelection) -> Self { + pub fn build(name: &str, target: TargetSelection) -> Self { Self::new(name, target, Kind::Build) } - pub fn check(name: &'static str, target: TargetSelection) -> Self { + pub fn check(name: &str, target: TargetSelection) -> Self { Self::new(name, target, Kind::Check) } - pub fn doc(name: &'static str, target: TargetSelection) -> Self { + pub fn doc(name: &str, target: TargetSelection) -> Self { Self::new(name, target, Kind::Doc) } - pub fn dist(name: &'static str, target: TargetSelection) -> Self { + pub fn dist(name: &str, target: TargetSelection) -> Self { Self::new(name, target, Kind::Dist) } - pub fn test(name: &'static str, target: TargetSelection) -> Self { + pub fn test(name: &str, target: TargetSelection) -> Self { Self::new(name, target, Kind::Test) } - fn new(name: &'static str, target: TargetSelection, kind: Kind) -> Self { - Self { name, kind, target, built_by: None, stage: None, metadata: None } + fn new(name: &str, target: TargetSelection, kind: Kind) -> Self { + Self { name: name.to_string(), kind, target, built_by: None, stage: None, metadata: None } } pub fn built_by(mut self, compiler: Compiler) -> Self { @@ -1033,6 +1034,7 @@ impl<'a> Builder<'a> { check::Compiletest, check::FeaturesStatusDump, check::CoverageDump, + check::Linkchecker, // This has special staging logic, it may run on stage 1 while others run on stage 0. // It takes quite some time to build stage 1, so put this at the end. // @@ -1534,8 +1536,11 @@ You have to build a stage1 compiler for `{}` first, and then use it to build a s .map(|entry| entry.path()) } - pub fn rustdoc(&self, compiler: Compiler) -> PathBuf { - self.ensure(tool::Rustdoc { compiler }).tool_path + /// Returns a path to `Rustdoc` that "belongs" to the `target_compiler`. + /// It can be either a stage0 rustdoc or a locally built rustdoc that *links* to + /// `target_compiler`. + pub fn rustdoc_for_compiler(&self, target_compiler: Compiler) -> PathBuf { + self.ensure(tool::Rustdoc { target_compiler }) } pub fn cargo_clippy_cmd(&self, run_compiler: Compiler) -> BootstrapCommand { @@ -1551,10 +1556,13 @@ You have to build a stage1 compiler for `{}` first, and then use it to build a s return cmd; } - let _ = - self.ensure(tool::Clippy { compiler: run_compiler, target: self.build.host_target }); - let cargo_clippy = self - .ensure(tool::CargoClippy { compiler: run_compiler, target: self.build.host_target }); + // FIXME: double check that `run_compiler`'s stage is what we want to use + let compilers = + RustcPrivateCompilers::new(self, run_compiler.stage, self.build.host_target); + assert_eq!(run_compiler, compilers.target_compiler()); + + let _ = self.ensure(tool::Clippy::from_compilers(compilers)); + let cargo_clippy = self.ensure(tool::CargoClippy::from_compilers(compilers)); let mut dylib_path = helpers::dylib_path(); dylib_path.insert(0, self.sysroot(run_compiler).join("lib")); @@ -1566,11 +1574,14 @@ You have to build a stage1 compiler for `{}` first, and then use it to build a s pub fn cargo_miri_cmd(&self, run_compiler: Compiler) -> BootstrapCommand { assert!(run_compiler.stage > 0, "miri can not be invoked at stage 0"); + + let compilers = + RustcPrivateCompilers::new(self, run_compiler.stage, self.build.host_target); + assert_eq!(run_compiler, compilers.target_compiler()); + // Prepare the tools - let miri = - self.ensure(tool::Miri { compiler: run_compiler, target: self.build.host_target }); - let cargo_miri = - self.ensure(tool::CargoMiri { compiler: run_compiler, target: self.build.host_target }); + let miri = self.ensure(tool::Miri::from_compilers(compilers)); + let cargo_miri = self.ensure(tool::CargoMiri::from_compilers(compilers)); // Invoke cargo-miri, make sure it can find miri and cargo. let mut cmd = command(cargo_miri.tool_path); cmd.env("MIRI", &miri.tool_path); @@ -1595,7 +1606,7 @@ You have to build a stage1 compiler for `{}` first, and then use it to build a s // equivalently to rustc. .env("RUSTDOC_LIBDIR", self.rustc_libdir(compiler)) .env("CFG_RELEASE_CHANNEL", &self.config.channel) - .env("RUSTDOC_REAL", self.rustdoc(compiler)) + .env("RUSTDOC_REAL", self.rustdoc_for_compiler(compiler)) .env("RUSTC_BOOTSTRAP", "1"); cmd.arg("-Wrustdoc::invalid_codeblock_attributes"); @@ -1704,11 +1715,11 @@ You have to build a stage1 compiler for `{}` first, and then use it to build a s /// Ensure that a given step is built *only if it's supposed to be built by default*, returning /// its output. This will cache the step, so it's safe (and good!) to call this as often as /// needed to ensure that all dependencies are build. - pub(crate) fn ensure_if_default>>( + pub(crate) fn ensure_if_default>( &'a self, step: S, kind: Kind, - ) -> S::Output { + ) -> Option { let desc = StepDescription::from::(kind); let should_run = (desc.should_run)(ShouldRun::new(self, desc.kind)); @@ -1720,7 +1731,7 @@ You have to build a stage1 compiler for `{}` first, and then use it to build a s } // Only execute if it's supposed to run as default - if desc.default && should_run.is_really_default() { self.ensure(step) } else { None } + if desc.default && should_run.is_really_default() { Some(self.ensure(step)) } else { None } } /// Checks if any of the "should_run" paths is in the `Builder` paths. diff --git a/src/bootstrap/src/core/builder/tests.rs b/src/bootstrap/src/core/builder/tests.rs index 6ea5d4e655328..86d8e9afe8714 100644 --- a/src/bootstrap/src/core/builder/tests.rs +++ b/src/bootstrap/src/core/builder/tests.rs @@ -279,13 +279,6 @@ mod defaults { first(cache.all::()), &[tool::ErrorIndex { compiler: Compiler::new(1, a) }] ); - // docs should be built with the stage0 compiler, not with the stage0 artifacts. - // recall that rustdoc is off-by-one: `stage` is the compiler rustdoc is _linked_ to, - // not the one it was built by. - assert_eq!( - first(cache.all::()), - &[tool::Rustdoc { compiler: Compiler::new(1, a) },] - ); } } @@ -337,12 +330,6 @@ mod dist { first(builder.cache.all::()), &[tool::ErrorIndex { compiler: Compiler::new(1, a) }] ); - // This is actually stage 1, but Rustdoc::run swaps out the compiler with - // stage minus 1 if --stage is not 0. Very confusing! - assert_eq!( - first(builder.cache.all::()), - &[tool::Rustdoc { compiler: Compiler::new(2, a) },] - ); } } @@ -569,36 +556,6 @@ fn test_is_builder_target() { } } -#[test] -fn test_get_tool_rustc_compiler() { - let mut config = configure("build", &[], &[]); - config.download_rustc_commit = None; - let build = Build::new(config); - let builder = Builder::new(&build); - - let target_triple_1 = TargetSelection::from_user(TEST_TRIPLE_1); - - let compiler = Compiler::new(2, target_triple_1); - let expected = Compiler::new(1, target_triple_1); - let actual = tool::get_tool_rustc_compiler(&builder, compiler); - assert_eq!(expected, actual); - - let compiler = Compiler::new(1, target_triple_1); - let expected = Compiler::new(0, target_triple_1); - let actual = tool::get_tool_rustc_compiler(&builder, compiler); - assert_eq!(expected, actual); - - let mut config = configure("build", &[], &[]); - config.download_rustc_commit = Some("".to_owned()); - let build = Build::new(config); - let builder = Builder::new(&build); - - let compiler = Compiler::new(1, target_triple_1); - let expected = Compiler::new(1, target_triple_1); - let actual = tool::get_tool_rustc_compiler(&builder, compiler); - assert_eq!(expected, actual); -} - /// When bootstrap detects a step dependency cycle (which is a bug), its panic /// message should show the actual steps on the stack, not just several copies /// of `Any { .. }`. @@ -657,7 +614,7 @@ mod snapshot { [build] llvm [build] rustc 0 -> rustc 1 [build] rustc 1 -> std 1 - [build] rustdoc 0 + [build] rustdoc 1 "); } @@ -680,10 +637,10 @@ mod snapshot { [build] rustc 2 -> std 2 [build] rustc 1 -> std 1 [build] rustc 2 -> std 2 - [build] rustdoc 1 + [build] rustdoc 2 [build] llvm [build] rustc 1 -> rustc 2 - [build] rustdoc 1 + [build] rustdoc 2 "); } @@ -762,6 +719,23 @@ mod snapshot { "); } + #[test] + fn build_compiler_codegen_backend() { + let ctx = TestCtx::new(); + insta::assert_snapshot!( + ctx + .config("build") + .args(&["--set", "rust.codegen-backends=['llvm', 'cranelift']"]) + .render_steps(), @r" + [build] llvm + [build] rustc 0 -> rustc 1 + [build] rustc 0 -> rustc_codegen_cranelift 1 + [build] rustc 1 -> std 1 + [build] rustdoc 1 + " + ); + } + #[test] fn build_compiler_tools() { let ctx = TestCtx::new(); @@ -780,7 +754,7 @@ mod snapshot { [build] rustc 1 -> LldWrapper 2 [build] rustc 1 -> LlvmBitcodeLinker 2 [build] rustc 2 -> std 2 - [build] rustdoc 1 + [build] rustdoc 2 " ); } @@ -809,7 +783,7 @@ mod snapshot { [build] rustc 1 -> rustc 2 [build] rustc 1 -> LldWrapper 2 [build] rustc 1 -> LlvmBitcodeLinker 2 - [build] rustdoc 1 + [build] rustdoc 2 " ); } @@ -998,7 +972,7 @@ mod snapshot { .render_steps(), @r" [build] llvm [build] rustc 0 -> rustc 1 - [build] rustdoc 0 + [build] rustdoc 1 [doc] std 1 crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,std,std_detect,sysroot,test,unwind] "); } @@ -1047,7 +1021,7 @@ mod snapshot { [build] rustc 0 -> rustc 1 [build] rustc 1 -> std 1 [build] rustc 1 -> rustc 2 - [build] rustdoc 1 + [build] rustdoc 2 [doc] std 2 crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,std,std_detect,sysroot,test,unwind] [build] rustc 2 -> std 2 [build] rustc 0 -> LintDocs 1 @@ -1089,7 +1063,7 @@ mod snapshot { [build] rustc 1 -> LldWrapper 2 [build] rustc 1 -> WasmComponentLd 2 [build] rustc 1 -> LlvmBitcodeLinker 2 - [build] rustdoc 1 + [build] rustdoc 2 [doc] std 2 crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,std,std_detect,sysroot,test,unwind] [build] rustc 2 -> std 2 [build] rustc 0 -> LintDocs 1 @@ -1097,16 +1071,19 @@ mod snapshot { [dist] docs [doc] std 2 crates=[] [dist] mingw + [build] rustc 1 -> rust-analyzer-proc-macro-srv 2 [build] rustc 0 -> GenerateCopyright 1 [dist] rustc [dist] rustc 1 -> std 1 [dist] src <> - [build] rustc 0 -> rustfmt 1 - [build] rustc 0 -> cargo-fmt 1 - [build] rustc 0 -> clippy-driver 1 - [build] rustc 0 -> cargo-clippy 1 - [build] rustc 0 -> miri 1 - [build] rustc 0 -> cargo-miri 1 + [build] rustc 0 -> cargo 1 + [build] rustc 1 -> rust-analyzer 2 + [build] rustc 1 -> rustfmt 2 + [build] rustc 1 -> cargo-fmt 2 + [build] rustc 1 -> clippy-driver 2 + [build] rustc 1 -> cargo-clippy 2 + [build] rustc 1 -> miri 2 + [build] rustc 1 -> cargo-miri 2 "); } @@ -1125,7 +1102,7 @@ mod snapshot { [build] rustc 0 -> rustc 1 [build] rustc 1 -> std 1 [build] rustc 1 -> rustc 2 - [build] rustdoc 1 + [build] rustdoc 2 [doc] std 2 crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,std,std_detect,sysroot,test,unwind] [doc] std 2 crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,std,std_detect,sysroot,test,unwind] [build] rustc 2 -> std 2 @@ -1162,7 +1139,7 @@ mod snapshot { [build] rustc 0 -> rustc 1 [build] rustc 1 -> std 1 [build] rustc 1 -> rustc 2 - [build] rustdoc 1 + [build] rustdoc 2 [doc] std 2 crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,std,std_detect,sysroot,test,unwind] [build] rustc 2 -> std 2 [build] rustc 0 -> LintDocs 1 @@ -1176,7 +1153,7 @@ mod snapshot { [dist] rustc [build] llvm [build] rustc 1 -> rustc 2 - [build] rustdoc 1 + [build] rustdoc 2 [dist] rustc [dist] rustc 1 -> std 1 [dist] src <> @@ -1199,7 +1176,7 @@ mod snapshot { [build] rustc 0 -> rustc 1 [build] rustc 1 -> std 1 [build] rustc 1 -> rustc 2 - [build] rustdoc 1 + [build] rustdoc 2 [doc] std 2 crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,std,std_detect,sysroot,test,unwind] [doc] std 2 crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,std,std_detect,sysroot,test,unwind] [build] rustc 2 -> std 2 @@ -1217,7 +1194,7 @@ mod snapshot { [dist] rustc [build] llvm [build] rustc 1 -> rustc 2 - [build] rustdoc 1 + [build] rustdoc 2 [dist] rustc [dist] rustc 1 -> std 1 [dist] rustc 1 -> std 1 @@ -1241,7 +1218,7 @@ mod snapshot { [build] rustc 0 -> rustc 1 [build] rustc 1 -> std 1 [build] rustc 1 -> rustc 2 - [build] rustdoc 1 + [build] rustdoc 2 [doc] std 2 crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,std,std_detect,sysroot,test,unwind] [build] rustc 2 -> std 2 [build] rustc 0 -> RustInstaller 1 @@ -1273,7 +1250,7 @@ mod snapshot { [build] rustc 1 -> std 1 [build] rustc 1 -> rustc 2 [build] rustc 1 -> WasmComponentLd 2 - [build] rustdoc 1 + [build] rustdoc 2 [doc] std 2 crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,std,std_detect,sysroot,test,unwind] [build] rustc 2 -> std 2 [build] rustc 1 -> std 1 @@ -1286,17 +1263,20 @@ mod snapshot { [build] llvm [build] rustc 1 -> rustc 2 [build] rustc 1 -> WasmComponentLd 2 - [build] rustdoc 1 + [build] rustdoc 2 + [build] rustc 1 -> rust-analyzer-proc-macro-srv 2 [build] rustc 0 -> GenerateCopyright 1 [dist] rustc [dist] rustc 1 -> std 1 [dist] src <> - [build] rustc 0 -> rustfmt 1 - [build] rustc 0 -> cargo-fmt 1 - [build] rustc 0 -> clippy-driver 1 - [build] rustc 0 -> cargo-clippy 1 - [build] rustc 0 -> miri 1 - [build] rustc 0 -> cargo-miri 1 + [build] rustc 0 -> cargo 1 + [build] rustc 1 -> rust-analyzer 2 + [build] rustc 1 -> rustfmt 2 + [build] rustc 1 -> cargo-fmt 2 + [build] rustc 1 -> clippy-driver 2 + [build] rustc 1 -> cargo-clippy 2 + [build] rustc 1 -> miri 2 + [build] rustc 1 -> cargo-miri 2 [build] rustc 1 -> LlvmBitcodeLinker 2 "); } @@ -1619,7 +1599,7 @@ mod snapshot { .render_steps(), @r" [build] llvm [build] rustc 0 -> rustc 1 - [build] rustdoc 0 + [build] rustdoc 1 [doc] std 1 crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,std,std_detect,sysroot,test,unwind] "); } @@ -1633,7 +1613,7 @@ mod snapshot { .render_steps(), @r" [build] llvm [build] rustc 0 -> rustc 1 - [build] rustdoc 0 + [build] rustdoc 1 [doc] std 1 crates=[core] "); } @@ -1648,7 +1628,7 @@ mod snapshot { .render_steps(), @r" [build] llvm [build] rustc 0 -> rustc 1 - [build] rustdoc 0 + [build] rustdoc 1 [doc] std 1 crates=[core] "); } @@ -1678,7 +1658,7 @@ mod snapshot { .render_steps(), @r" [build] llvm [build] rustc 0 -> rustc 1 - [build] rustdoc 0 + [build] rustdoc 1 [doc] std 1 crates=[alloc,core] "); } @@ -1694,10 +1674,105 @@ mod snapshot { .render_steps(), @r" [build] llvm [build] rustc 0 -> rustc 1 - [build] rustdoc 0 + [build] rustdoc 1 [doc] std 1 crates=[alloc,core] "); } + + #[test] + fn doc_compiler_stage_0() { + let ctx = TestCtx::new(); + insta::assert_snapshot!( + ctx.config("doc") + .path("compiler") + .stage(0) + .render_steps(), @r" + [build] rustdoc 0 + [build] llvm + [doc] rustc 0 + "); + } + + #[test] + fn doc_compiler_stage_1() { + let ctx = TestCtx::new(); + insta::assert_snapshot!( + ctx.config("doc") + .path("compiler") + .stage(1) + .render_steps(), @r" + [build] llvm + [build] rustc 0 -> rustc 1 + [build] rustc 1 -> std 1 + [build] rustdoc 1 + [doc] rustc 1 + "); + } + + #[test] + fn doc_compiler_stage_2() { + let ctx = TestCtx::new(); + insta::assert_snapshot!( + ctx.config("doc") + .path("compiler") + .stage(2) + .render_steps(), @r" + [build] llvm + [build] rustc 0 -> rustc 1 + [build] rustc 1 -> std 1 + [build] rustc 1 -> rustc 2 + [build] rustc 2 -> std 2 + [build] rustdoc 2 + [doc] rustc 2 + "); + } + + #[test] + fn doc_compiletest_stage_0() { + let ctx = TestCtx::new(); + insta::assert_snapshot!( + ctx.config("doc") + .path("src/tools/compiletest") + .stage(0) + .render_steps(), @r" + [build] rustdoc 0 + [doc] Compiletest + "); + } + + #[test] + fn doc_compiletest_stage_1() { + let ctx = TestCtx::new(); + insta::assert_snapshot!( + ctx.config("doc") + .path("src/tools/compiletest") + .stage(1) + .render_steps(), @r" + [build] llvm + [build] rustc 0 -> rustc 1 + [build] rustc 1 -> std 1 + [build] rustdoc 1 + [doc] Compiletest + "); + } + + #[test] + fn doc_compiletest_stage_2() { + let ctx = TestCtx::new(); + insta::assert_snapshot!( + ctx.config("doc") + .path("src/tools/compiletest") + .stage(2) + .render_steps(), @r" + [build] llvm + [build] rustc 0 -> rustc 1 + [build] rustc 1 -> std 1 + [build] rustc 1 -> rustc 2 + [build] rustc 2 -> std 2 + [build] rustdoc 2 + [doc] Compiletest + "); + } } struct ExecutedSteps { diff --git a/src/bootstrap/src/lib.rs b/src/bootstrap/src/lib.rs index 51a84ad5272c9..9eb82cd2e55b5 100644 --- a/src/bootstrap/src/lib.rs +++ b/src/bootstrap/src/lib.rs @@ -1060,17 +1060,6 @@ impl Build { self.msg(Kind::Doc, compiler.stage, what, compiler.host, target.into()) } - #[must_use = "Groups should not be dropped until the Step finishes running"] - #[track_caller] - fn msg_build( - &self, - compiler: Compiler, - what: impl Display, - target: impl Into>, - ) -> Option { - self.msg(Kind::Build, compiler.stage, what, compiler.host, target) - } - /// Return a `Group` guard for a [`Step`] that is built for each `--stage`. /// /// [`Step`]: crate::core::builder::Step @@ -1117,20 +1106,21 @@ impl Build { #[must_use = "Groups should not be dropped until the Step finishes running"] #[track_caller] - fn msg_sysroot_tool( + fn msg_rustc_tool( &self, action: impl Into, - stage: u32, + build_stage: u32, what: impl Display, host: TargetSelection, target: TargetSelection, ) -> Option { let action = action.into().description(); let msg = |fmt| format!("{action} {what} {fmt}"); + let msg = if host == target { - msg(format_args!("(stage{stage} -> stage{}, {target})", stage + 1)) + msg(format_args!("(stage{build_stage} -> stage{}, {target})", build_stage + 1)) } else { - msg(format_args!("(stage{stage}:{host} -> stage{}:{target})", stage + 1)) + msg(format_args!("(stage{build_stage}:{host} -> stage{}:{target})", build_stage + 1)) }; self.group(&msg) } diff --git a/src/doc/rustc-dev-guide/rust-version b/src/doc/rustc-dev-guide/rust-version index ce9f984e637b5..b631041b6bfae 100644 --- a/src/doc/rustc-dev-guide/rust-version +++ b/src/doc/rustc-dev-guide/rust-version @@ -1 +1 @@ -efd420c770bb179537c01063e98cb6990c439654 +2b5e239c6b86cde974b0ef0f8e23754fb08ff3c5 diff --git a/src/doc/rustc-dev-guide/src/SUMMARY.md b/src/doc/rustc-dev-guide/src/SUMMARY.md index 651e2925ad504..e3c0d50fcc737 100644 --- a/src/doc/rustc-dev-guide/src/SUMMARY.md +++ b/src/doc/rustc-dev-guide/src/SUMMARY.md @@ -53,7 +53,8 @@ - [Walkthrough: a typical contribution](./walkthrough.md) - [Implementing new language features](./implementing_new_features.md) - [Stability attributes](./stability.md) -- [Stabilizing Features](./stabilization_guide.md) +- [Stabilizing language features](./stabilization_guide.md) + - [Stabilization report template](./stabilization_report_template.md) - [Feature Gates](./feature-gates.md) - [Coding conventions](./conventions.md) - [Procedures for breaking changes](./bug-fix-procedure.md) diff --git a/src/doc/rustc-dev-guide/src/implementing_new_features.md b/src/doc/rustc-dev-guide/src/implementing_new_features.md index 5d0e875cbc18c..76cf2386c826a 100644 --- a/src/doc/rustc-dev-guide/src/implementing_new_features.md +++ b/src/doc/rustc-dev-guide/src/implementing_new_features.md @@ -2,145 +2,91 @@ -When you want to implement a new significant feature in the compiler, -you need to go through this process to make sure everything goes -smoothly. +When you want to implement a new significant feature in the compiler, you need to go through this process to make sure everything goes smoothly. -**NOTE: this section is for *language* features, not *library* features, -which use [a different process].** +**NOTE: This section is for *language* features, not *library* features, which use [a different process].** -See also [the Rust Language Design Team's procedures][lang-propose] for -proposing changes to the language. +See also [the Rust Language Design Team's procedures][lang-propose] for proposing changes to the language. [a different process]: ./stability.md [lang-propose]: https://lang-team.rust-lang.org/how_to/propose.html ## The @rfcbot FCP process -When the change is small and uncontroversial, then it can be done -with just writing a PR and getting an r+ from someone who knows that -part of the code. However, if the change is potentially controversial, -it would be a bad idea to push it without consensus from the rest -of the team (both in the "distributed system" sense to make sure -you don't break anything you don't know about, and in the social -sense to avoid PR fights). - -If such a change seems to be too small to require a full formal RFC process -(e.g., a small standard library addition, a big refactoring of the code, a -"technically-breaking" change, or a "big bugfix" that basically amounts to a -small feature) but is still too controversial or big to get by with a single r+, -you can propose a final comment period (FCP). Or, if you're not on the relevant -team (and thus don't have @rfcbot permissions), ask someone who is to start one; -unless they have a concern themselves, they should. - -Again, the FCP process is only needed if you need consensus – if you -don't think anyone would have a problem with your change, it's OK to -get by with only an r+. For example, it is OK to add or modify -unstable command-line flags or attributes without an FCP for -compiler development or standard library use, as long as you don't -expect them to be in wide use in the nightly ecosystem. -Some teams have lighter weight processes that they use in scenarios -like this; for example, the compiler team recommends -filing a Major Change Proposal ([MCP][mcp]) as a lightweight way to -garner support and feedback without requiring full consensus. +When the change is small, uncontroversial, non-breaking, and does not affect the stable language in any user-observable ways or add any new unstable features, then it can be done with just writing a PR and getting an r+ from someone who knows that part of the code. However, if not, more must be done. Even for compiler-internal work, it would be a bad idea to push a controversial change without consensus from the rest of the team (both in the "distributed system" sense to make sure you don't break anything you don't know about, and in the social sense to avoid PR fights). + +For changes that need the consensus of a team, we us the process of proposing a final comment period (FCP). If you're not on the relevant team (and thus don't have @rfcbot permissions), ask someone who is to start one; unless they have a concern themselves, they should. + +The FCP process is only needed if you need consensus – if no processes require consensus for your change and you don't think anyone would have a problem with it, it's OK to rely on only an r+. For example, it is OK to add or modify unstable command-line flags or attributes in the reserved compiler-internal `rustc_` namespace without an FCP for compiler development or standard library use, as long as you don't expect them to be in wide use in the nightly ecosystem. Some teams have lighter weight processes that they use in scenarios like this; for example, the compiler team recommends filing a Major Change Proposal ([MCP][mcp]) as a lightweight way to garner support and feedback without requiring full consensus. [mcp]: https://forge.rust-lang.org/compiler/proposals-and-stabilization.html#how-do-i-submit-an-mcp -You don't need to have the implementation fully ready for r+ to propose an FCP, -but it is generally a good idea to have at least a proof -of concept so that people can see what you are talking about. +You don't need to have the implementation fully ready for r+ to propose an FCP, but it is generally a good idea to have at least a proof of concept so that people can see what you are talking about. -When an FCP is proposed, it requires all members of the team to sign off the -FCP. After they all do so, there's a 10-day-long "final comment period" (hence -the name) where everybody can comment, and if no concerns are raised, the -PR/issue gets FCP approval. +When an FCP is proposed, it requires all members of the team to sign off on the FCP. After they all do so, there's a 10-day-long "final comment period" (hence the name) where everybody can comment, and if no concerns are raised, the PR/issue gets FCP approval. ## The logistics of writing features -There are a few "logistic" hoops you might need to go through in -order to implement a feature in a working way. +There are a few "logistical" hoops you might need to go through in order to implement a feature in a working way. ### Warning Cycles -In some cases, a feature or bugfix might break some existing programs -in some edge cases. In that case, you might want to do a crater run -to assess the impact and possibly add a future-compatibility lint, -similar to those used for -[edition-gated lints](diagnostics.md#edition-gated-lints). +In some cases, a feature or bugfix might break some existing programs in some edge cases. In that case, you'll want to do a crater run to assess the impact and possibly add a future-compatibility lint, similar to those used for [edition-gated lints](diagnostics.md#edition-gated-lints). ### Stability -We [value the stability of Rust]. Code that works and runs on stable -should (mostly) not break. Because of that, we don't want to release -a feature to the world with only team consensus and code review - -we want to gain real-world experience on using that feature on nightly, -and we might want to change the feature based on that experience. - -To allow for that, we must make sure users don't accidentally depend -on that new feature - otherwise, especially if experimentation takes -time or is delayed and the feature takes the trains to stable, -it would end up de facto stable and we'll not be able to make changes -in it without breaking people's code. - -The way we do that is that we make sure all new features are feature -gated - they can't be used without enabling a feature gate -(`#[feature(foo)]`), which can't be done in a stable/beta compiler. -See the [stability in code] section for the technical details. - -Eventually, after we gain enough experience using the feature, -make the necessary changes, and are satisfied, we expose it to -the world using the stabilization process described [here]. -Until then, the feature is not set in stone: every part of the -feature can be changed, or the feature might be completely -rewritten or removed. Features are not supposed to gain tenure -by being unstable and unchanged for a year. +We [value the stability of Rust]. Code that works and runs on stable should (mostly) not break. Because of that, we don't want to release a feature to the world with only team consensus and code review - we want to gain real-world experience on using that feature on nightly, and we might want to change the feature based on that experience. + +To allow for that, we must make sure users don't accidentally depend on that new feature - otherwise, especially if experimentation takes time or is delayed and the feature takes the trains to stable, it would end up de facto stable and we'll not be able to make changes in it without breaking people's code. + +The way we do that is that we make sure all new features are feature gated - they can't be used without enabling a feature gate (`#[feature(foo)]`), which can't be done in a stable/beta compiler. See the [stability in code] section for the technical details. + +Eventually, after we gain enough experience using the feature, make the necessary changes, and are satisfied, we expose it to the world using the stabilization process described [here]. Until then, the feature is not set in stone: every part of the feature can be changed, or the feature might be completely rewritten or removed. Features do not gain tenure by being unstable and unchanged for long periods of time. ### Tracking Issues -To keep track of the status of an unstable feature, the -experience we get while using it on nightly, and of the -concerns that block its stabilization, every feature-gate -needs a tracking issue. General discussions about the feature should be done on the tracking issue. +To keep track of the status of an unstable feature, the experience we get while using it on +nightly, and of the concerns that block its stabilization, every feature-gate needs a tracking +issue. When creating issues and PRs related to the feature, reference this tracking issue, and when there are updates about the feature's progress, post those to the tracking issue. -For features that have an RFC, you should use the RFC's -tracking issue for the feature. +For features that are part of an accept RFC or approved lang experiment, use the tracking issue for that. -For other features, you'll have to make a tracking issue -for that feature. The issue title should be "Tracking issue -for YOUR FEATURE". Use the ["Tracking Issue" issue template][template]. +For other features, create a tracking issue for that feature. The issue title should be "Tracking issue for YOUR FEATURE". Use the ["Tracking Issue" issue template][template]. [template]: https://github.com/rust-lang/rust/issues/new?template=tracking_issue.md +### Lang experiments + +To land in the compiler, features that have user-visible effects on the language (even unstable ones) must either be part of an accepted RFC or an approved [lang experiment]. + +To propose a new lang experiment, open an issue in `rust-lang/rust` that describes the motivation and the intended solution. If it's accepted, this issue will become the tracking issue for the experiment, so use the tracking issue [template] while also including these other details. Nominate the issue for the lang team and CC `@rust-lang/lang` and `@rust-lang/lang-advisors`. When the experiment is approved, the tracking issue will be marked as `B-experimental`. + +Feature flags related to a lang experiment must be marked as `incomplete` until an RFC is accepted for the feature. + +[lang experiment]: https://lang-team.rust-lang.org/how_to/experiment.html + ## Stability in code -The below steps needs to be followed in order to implement -a new unstable feature: +The below steps needs to be followed in order to implement a new unstable feature: -1. Open a [tracking issue] - - if you have an RFC, you can use the tracking issue for the RFC. +1. Open or identify the [tracking issue]. For features that are part of an accept RFC or approved lang experiment, use the tracking issue for that. - The tracking issue should be labeled with at least `C-tracking-issue`. - For a language feature, a label `F-feature_name` should be added as well. + Label the tracking issue with `C-tracking-issue` and the relevant `F-feature_name` label (adding that label if needed). -1. Pick a name for the feature gate (for RFCs, use the name - in the RFC). +1. Pick a name for the feature gate (for RFCs, use the name in the RFC). 1. Add the feature name to `rustc_span/src/symbol.rs` in the `Symbols {...}` block. Note that this block must be in alphabetical order. -1. Add a feature gate declaration to `rustc_feature/src/unstable.rs` in the unstable - `declare_features` block. +1. Add a feature gate declaration to `rustc_feature/src/unstable.rs` in the unstable `declare_features` block. ```rust ignore /// description of feature (unstable, $feature_name, "CURRENT_RUSTC_VERSION", Some($tracking_issue_number)) ``` - If you haven't yet - opened a tracking issue (e.g. because you want initial feedback on whether the feature is likely - to be accepted), you can temporarily use `None` - but make sure to update it before the PR is - merged! + If you haven't yet opened a tracking issue (e.g. because you want initial feedback on whether the feature is likely to be accepted), you can temporarily use `None` - but make sure to update it before the PR is merged! For example: @@ -149,9 +95,7 @@ a new unstable feature: (unstable, non_ascii_idents, "CURRENT_RUSTC_VERSION", Some(55467), None), ``` - Features can be marked as incomplete, and trigger the warn-by-default [`incomplete_features` - lint] - by setting their type to `incomplete`: + Features can be marked as incomplete, and trigger the warn-by-default [`incomplete_features` lint] by setting their type to `incomplete`: [`incomplete_features` lint]: https://doc.rust-lang.org/rustc/lints/listing/warn-by-default.html#incomplete-features @@ -160,42 +104,27 @@ a new unstable feature: (incomplete, deref_patterns, "CURRENT_RUSTC_VERSION", Some(87121), None), ``` - To avoid [semantic merge conflicts], please use `CURRENT_RUSTC_VERSION` instead of `1.70` or - another explicit version number. + Feature flags related to a lang experiment must be marked as `incomplete` until an RFC is accepted for the feature. + + To avoid [semantic merge conflicts], use `CURRENT_RUSTC_VERSION` instead of `1.70` or another explicit version number. [semantic merge conflicts]: https://bors.tech/essay/2017/02/02/pitch/ -1. Prevent usage of the new feature unless the feature gate is set. - You can check it in most places in the compiler using the - expression `tcx.features().$feature_name()` +1. Prevent usage of the new feature unless the feature gate is set. You can check it in most places in the compiler using the expression `tcx.features().$feature_name()`. + + If the feature gate is not set, you should either maintain the pre-feature behavior or raise an error, depending on what makes sense. Errors should generally use [`rustc_session::parse::feature_err`]. For an example of adding an error, see [#81015]. - If the feature gate is not set, you should either maintain - the pre-feature behavior or raise an error, depending on - what makes sense. Errors should generally use [`rustc_session::parse::feature_err`]. - For an example of adding an error, see [#81015]. + For features introducing new syntax, pre-expansion gating should be used instead. During parsing, when the new syntax is parsed, the symbol must be inserted to the current crate's [`GatedSpans`] via `self.sess.gated_span.gate(sym::my_feature, span)`. - For features introducing new syntax, pre-expansion gating should be used instead. - During parsing, when the new syntax is parsed, the symbol must be inserted to the - current crate's [`GatedSpans`] via `self.sess.gated_span.gate(sym::my_feature, span)`. - - After being inserted to the gated spans, the span must be checked in the - [`rustc_ast_passes::feature_gate::check_crate`] function, which actually denies - features. Exactly how it is gated depends on the exact type of feature, but most - likely will use the `gate_all!()` macro. + After being inserted to the gated spans, the span must be checked in the [`rustc_ast_passes::feature_gate::check_crate`] function, which actually denies features. Exactly how it is gated depends on the exact type of feature, but most likely will use the `gate_all!()` macro. -1. Add a test to ensure the feature cannot be used without - a feature gate, by creating `tests/ui/feature-gates/feature-gate-$feature_name.rs`. - You can generate the corresponding `.stderr` file by running `./x test -tests/ui/feature-gates/ --bless`. +1. Add a test to ensure the feature cannot be used without a feature gate, by creating `tests/ui/feature-gates/feature-gate-$feature_name.rs`. You can generate the corresponding `.stderr` file by running `./x test tests/ui/feature-gates/ --bless`. -1. Add a section to the unstable book, in - `src/doc/unstable-book/src/language-features/$feature_name.md`. +1. Add a section to the unstable book, in `src/doc/unstable-book/src/language-features/$feature_name.md`. -1. Write a lot of tests for the new feature, preferably in `tests/ui/$feature_name/`. - PRs without tests will not be accepted! +1. Write a lot of tests for the new feature, preferably in `tests/ui/$feature_name/`. PRs without tests will not be accepted! -1. Get your PR reviewed and land it. You have now successfully - implemented a feature in Rust! +1. Get your PR reviewed and land it. You have now successfully implemented a feature in Rust! [`GatedSpans`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_session/parse/struct.GatedSpans.html [#81015]: https://github.com/rust-lang/rust/pull/81015 @@ -206,3 +135,42 @@ tests/ui/feature-gates/ --bless`. [here]: ./stabilization_guide.md [tracking issue]: #tracking-issues [add-feature-gate]: ./feature-gates.md#adding-a-feature-gate + +## Call for testing + +Once the implementation is complete, the feature will be available to nightly users but not yet part of stable Rust. This is a good time to write a blog post on [the main Rust blog][rust-blog] and issue a "call for testing". + +Some earlier such blog posts include: + +1. [The push for GATs stabilization](https://blog.rust-lang.org/2021/08/03/GATs-stabilization-push/) +2. [Changes to `impl Trait` in Rust 2024](https://blog.rust-lang.org/2024/09/05/impl-trait-capture-rules.html) +3. [Async Closures MVP: Call for Testing!](https://blog.rust-lang.org/inside-rust/2024/08/09/async-closures-call-for-testing/) + +Alternatively, [*This Week in Rust*][twir] has a [section][twir-cft] for this. One example of this having been used is: + +- [Call for testing on boolean literals as cfg predicates](https://github.com/rust-lang/rust/issues/131204#issuecomment-2569314526) + +Which option to choose might depend on how significant the language change is, though note that the [*This Week in Rust*][twir] section might be less visible than a dedicated post on the main Rust blog. + +## Polishing + +Giving users a polished experience means more than just implementing the feature in rustc. We need to think about all of the tools and resources that we ship. This work includes: + +- Documenting the language feature in the [Rust Reference][reference]. +- Extending [`rustfmt`] to format any new syntax (if applicable). +- Extending [`rust-analyzer`] (if applicable). The extent of this work can depend on the nature of the language feature, as some features don't need to be blocked on *full* support. + - When a language feature degrades the user experience simply by existing before support is implemented in [`rust-analyzer`], that may lead the lang team to raise a blocking concern. + - Examples of such might include new syntax that [`rust-analyzer`] can't parse or type inference changes it doesn't understand when those lead to bogus diagnostics. + +## Stabilization + +The final step in the feature lifecycle is [stabilization][stab], which is when the feature becomes available to all Rust users. At this point, backward incompatible changes are generally no longer permitted (see the lang team's [defined semver policies](https://rust-lang.github.io/rfcs/1122-language-semver.html) for details). To learn more about stabilization, see the [stabilization guide][stab]. + + +[stab]: ./stabilization_guide.md +[rust-blog]: https://github.com/rust-lang/blog.rust-lang.org/ +[twir]: https://github.com/rust-lang/this-week-in-rust +[twir-cft]: https://this-week-in-rust.org/blog/2025/01/22/this-week-in-rust-583/#calls-for-testing +[`rustfmt`]: https://github.com/rust-lang/rustfmt +[`rust-analyzer`]: https://github.com/rust-lang/rust-analyzer +[reference]: https://github.com/rust-lang/reference diff --git a/src/doc/rustc-dev-guide/src/stabilization_guide.md b/src/doc/rustc-dev-guide/src/stabilization_guide.md index f875c68745f63..f155272e5a2c6 100644 --- a/src/doc/rustc-dev-guide/src/stabilization_guide.md +++ b/src/doc/rustc-dev-guide/src/stabilization_guide.md @@ -1,120 +1,66 @@ # Request for stabilization -**NOTE**: this page is about stabilizing *language* features. -For stabilizing *library* features, see [Stabilizing a library feature]. +**NOTE**: This page is about stabilizing *language* features. For stabilizing *library* features, see [Stabilizing a library feature]. [Stabilizing a library feature]: ./stability.md#stabilizing-a-library-feature -Once an unstable feature has been well-tested with no outstanding -concern, anyone may push for its stabilization. It involves the -following steps: +Once an unstable feature has been well-tested with no outstanding concerns, anyone may push for its stabilization, though involving the people who have worked on it is prudent. Follow these steps: -## Documentation PRs +## Write an RFC, if needed - +If the feature was part of a [lang experiment], the lang team generally will want to first accept an RFC before stabilization. -If any documentation for this feature exists, it should be -in the [`Unstable Book`], located at [`src/doc/unstable-book`]. -If it exists, the page for the feature gate should be removed. +[lang experiment]: https://lang-team.rust-lang.org/how_to/experiment.html + +## Documentation PRs -If there was documentation there, integrating it into the -existing documentation is needed. + -If there wasn't documentation there, it needs to be added. +The feature might be documented in the [`Unstable Book`], located at [`src/doc/unstable-book`]. Remove the page for the feature gate if it exists. Integrate any useful parts of that documentation in other places. -Places that may need updated documentation: +Places that may need updated documentation include: -- [The Reference]: This must be updated, in full detail. -- [The Book]: This may or may not need updating, depends. - If you're not sure, please open an issue on this repository - and it can be discussed. -- standard library documentation: As needed. Language features - often don't need this, but if it's a feature that changes - how good examples are written, such as when `?` was added - to the language, updating examples is important. -- [Rust by Example]: As needed. +- [The Reference]: This must be updated, in full detail, and a member of the lang-docs team must review and approve the PR before the stabilization can be merged. +- [The Book]: This is updated as needed. If you're not sure, please open an issue on this repository and it can be discussed. +- Standard library documentation: This is updated as needed. Language features often don't need this, but if it's a feature that changes how idiomatic examples are written, such as when `?` was added to the language, updating these in the library documentation is important. Review also the keyword documentation and ABI documentation in the standard library, as these sometimes needs updates for language changes. +- [Rust by Example]: This is updated as needed. -Prepare PRs to update documentation involving this new feature -for repositories mentioned above. Maintainers of these repositories -will keep these PRs open until the whole stabilization process -has completed. Meanwhile, we can proceed to the next step. +Prepare PRs to update documentation involving this new feature for the repositories mentioned above. Maintainers of these repositories will keep these PRs open until the whole stabilization process has completed. Meanwhile, we can proceed to the next step. ## Write a stabilization report -Find the tracking issue of the feature, and create a short -stabilization report. Essentially this would be a brief summary -of the feature plus some links to test cases showing it works -as expected, along with a list of edge cases that came up -and were considered. This is a minimal "due diligence" that -we do before stabilizing. - -The report should contain: +Author a stabilization report using the [template found in this repository][srt]. -- A summary, showing examples (e.g. code snippets) what is - enabled by this feature. -- Links to test cases in our test suite regarding this feature - and describe the feature's behavior on encountering edge cases. -- Links to the documentations (the PRs we have made in the - previous steps). -- Any other relevant information. -- The resolutions of any unresolved questions if the stabilization - is for an RFC. +The stabilization reports summarizes: -Examples of stabilization reports can be found in -[rust-lang/rust#44494][report1] and [rust-lang/rust#28237][report2] (these links -will bring you directly to the comment containing the stabilization report). +- The main design decisions and deviations since the RFC was accepted, including both decisions that were FCP'd or otherwise accepted by the language team as well as those being presented to the lang team for the first time. + - Often, the final stabilized language feature has significant design deviations from the original RFC. That's OK, but these deviations must be highlighted and explained carefully. +- The work that has been done since the RFC was accepted, acknowledging the main contributors that helped drive the language feature forward. -[report1]: https://github.com/rust-lang/rust/issues/44494#issuecomment-360191474 -[report2]: https://github.com/rust-lang/rust/issues/28237#issuecomment-363374130 +The [*Stabilization Template*][srt] includes a series of questions that aim to surface connections between this feature and lang's subteams (e.g. types, opsem, lang-docs, etc.) and to identify items that are commonly overlooked. -## FCP +[srt]: ./stabilization_report_template.md -If any member of the team responsible for tracking this -feature agrees with stabilizing this feature, they will -start the FCP (final-comment-period) process by commenting - -```text -@rfcbot fcp merge -``` - -The rest of the team members will review the proposal. If the final -decision is to stabilize, we proceed to do the actual code modification. +The stabilization report is typically posted as the main comment on the stabilization PR (see the next section). ## Stabilization PR -*This is for stabilizing language features. If you are stabilizing a library -feature, see [the stabilization chapter of the std dev guide][std-guide-stabilization] instead.* - -Once we have decided to stabilize a feature, we need to have -a PR that actually makes that stabilization happen. These kinds -of PRs are a great way to get involved in Rust, as they take -you on a little tour through the source code. +Every feature is different, and some may require steps beyond what this guide discusses. -Here is a general guide to how to stabilize a feature -- -every feature is different, of course, so some features may -require steps beyond what this guide talks about. - -Note: Before we stabilize any feature, it's the rule that it -should appear in the documentation. +Before the stabilization will be considered by the lang team, there must be a complete PR to the Reference describing the feature, and before the stabilization PR will be merged, this PR must have been reviewed and approved by the lang-docs team. ### Updating the feature-gate listing -There is a central listing of unstable feature-gates in -[`compiler/rustc_feature/src/unstable.rs`]. Search for the `declare_features!` -macro. There should be an entry for the feature you are aiming -to stabilize, something like (this example is taken from -[rust-lang/rust#32409]: +There is a central listing of unstable feature-gates in [`compiler/rustc_feature/src/unstable.rs`]. Search for the `declare_features!` macro. There should be an entry for the feature you are aiming to stabilize, something like (this example is taken from [rust-lang/rust#32409]: ```rust,ignore // pub(restricted) visibilities (RFC 1422) (unstable, pub_restricted, "CURRENT_RUSTC_VERSION", Some(32409)), ``` -The above line should be moved to [`compiler/rustc_feature/src/accepted.rs`]. -Entries in the `declare_features!` call are sorted, so find the correct place. -When it is done, it should look like: +The above line should be moved to [`compiler/rustc_feature/src/accepted.rs`]. Entries in the `declare_features!` call are sorted, so find the correct place. When it is done, it should look like: ```rust,ignore // pub(restricted) visibilities (RFC 1422) @@ -122,54 +68,31 @@ When it is done, it should look like: // note that we changed this ``` -(Even though you will encounter version numbers in the file of past changes, -you should not put the rustc version you expect your stabilization to happen in, -but instead `CURRENT_RUSTC_VERSION`) +(Even though you will encounter version numbers in the file of past changes, you should not put the rustc version you expect your stabilization to happen in, but instead use `CURRENT_RUSTC_VERSION`.) ### Removing existing uses of the feature-gate -Next search for the feature string (in this case, `pub_restricted`) -in the codebase to find where it appears. Change uses of -`#![feature(XXX)]` from the `std` and any rustc crates (this includes test folders -under `library/` and `compiler/` but not the toplevel `tests/` one) to be -`#![cfg_attr(bootstrap, feature(XXX))]`. This includes the feature-gate -only for stage0, which is built using the current beta (this is -needed because the feature is still unstable in the current beta). +Next, search for the feature string (in this case, `pub_restricted`) in the codebase to find where it appears. Change uses of `#![feature(XXX)]` from the `std` and any rustc crates (this includes test folders under `library/` and `compiler/` but not the toplevel `tests/` one) to be `#![cfg_attr(bootstrap, feature(XXX))]`. This includes the feature-gate only for stage0, which is built using the current beta (this is needed because the feature is still unstable in the current beta). -Also, remove those strings from any tests (e.g. under `tests/`). If there are tests -specifically targeting the feature-gate (i.e., testing that the -feature-gate is required to use the feature, but nothing else), -simply remove the test. +Also, remove those strings from any tests (e.g. under `tests/`). If there are tests specifically targeting the feature-gate (i.e., testing that the feature-gate is required to use the feature, but nothing else), simply remove the test. ### Do not require the feature-gate to use the feature -Most importantly, remove the code which flags an error if the -feature-gate is not present (since the feature is now considered -stable). If the feature can be detected because it employs some -new syntax, then a common place for that code to be is in the -same `compiler/rustc_ast_passes/src/feature_gate.rs`. -For example, you might see code like this: +Most importantly, remove the code which flags an error if the feature-gate is not present (since the feature is now considered stable). If the feature can be detected because it employs some new syntax, then a common place for that code to be is in `compiler/rustc_ast_passes/src/feature_gate.rs`. For example, you might see code like this: ```rust,ignore -gate_feature_post!(&self, pub_restricted, span, - "`pub(restricted)` syntax is experimental"); +gate_all!(pub_restricted, "`pub(restricted)` syntax is experimental"); ``` -This `gate_feature_post!` macro prints an error if the -`pub_restricted` feature is not enabled. It is not needed -now that `#[pub_restricted]` is stable. +This `gate_feature_post!` macro prints an error if the `pub_restricted` feature is not enabled. It is not needed now that `#[pub_restricted]` is stable. For more subtle features, you may find code like this: ```rust,ignore -if self.tcx.sess.features.borrow().pub_restricted { /* XXX */ } +if self.tcx.features().async_fn_in_dyn_trait() { /* XXX */ } ``` -This `pub_restricted` field (obviously named after the feature) -would ordinarily be false if the feature flag is not present -and true if it is. So transform the code to assume that the field -is true. In this case, that would mean removing the `if` and -leaving just the `/* XXX */`. +This `pub_restricted` field (named after the feature) would ordinarily be false if the feature flag is not present and true if it is. So transform the code to assume that the field is true. In this case, that would mean removing the `if` and leaving just the `/* XXX */`. ```rust,ignore if self.tcx.sess.features.borrow().pub_restricted { /* XXX */ } @@ -194,3 +117,40 @@ if something { /* XXX */ } [Rust by Example]: https://github.com/rust-lang/rust-by-example [`Unstable Book`]: https://doc.rust-lang.org/unstable-book/index.html [`src/doc/unstable-book`]: https://github.com/rust-lang/rust/tree/master/src/doc/unstable-book + +## Team nominations + +When opening the stabilization PR, CC the lang team and its advisors (`@rust-lang/lang @rust-lang/lang-advisors`) and any other teams to whom the feature is relevant, e.g.: + +- `@rust-lang/types`, for type system interactions. +- `@rust-lang/opsem`, for interactions with unsafe code. +- `@rust-lang/compiler`, for implementation robustness. +- `@rust-lang/libs-api`, for changes to the standard library API or its guarantees. +- `@rust-lang/lang-docs`, for questions about how this should be documented in the Reference. + +After the stabilization PR is opened with the stabilization report, wait a bit for any immediate comments. When such comments "simmer down" and you feel the PR is ready for consideration by the lang team, [nominate the PR](https://lang-team.rust-lang.org/how_to/nominate.html) to get it on the agenda for consideration in an upcoming lang meeting. + +If you are not a `rust-lang` organization member, you can ask your assigned reviewer to CC the relevant teams on your behalf. + +## Propose FCP on the PR + +After the lang team and other relevant teams review the stabilization, and after you have answered any questions they may have had, a member of one of the teams may propose to accept the stabilization by commenting: + +```text +@rfcbot fcp merge +``` + +Once enough team members have reviewed, the PR will move into a "final comment period" (FCP). If no new concerns are raised, this period will complete and the PR can be merged after implementation review in the usual way. + +## Reviewing and merging stabilizations + +On a stabilization, before giving it the `r+`, ensure that the PR: + +- Matches what the team proposed for stabilization and what is documented in the Reference PR. +- Includes any changes the team decided to request along the way in order to resolve or avoid concerns. +- Is otherwise exactly what is described in the stabilization report and in any relevant RFCs or prior lang FCPs. +- Does not expose on stable behaviors other than those specified, accepted for stabilization, and documented in the Reference. +- Has sufficient tests to convincingly demonstrate these things. +- Is accompanied by a PR to the Reference than has been reviewed and approved by a member of lang-docs. + +In particular, when reviewing the PR, keep an eye out for any user-visible details that the lang team failed to consider and specify. If you find one, describe it and nominate the PR for the lang team. diff --git a/src/doc/rustc-dev-guide/src/stabilization_report_template.md b/src/doc/rustc-dev-guide/src/stabilization_report_template.md new file mode 100644 index 0000000000000..793f7d7e45cff --- /dev/null +++ b/src/doc/rustc-dev-guide/src/stabilization_report_template.md @@ -0,0 +1,277 @@ +# Stabilization report template + +## What is this? + +This is a template for [stabilization reports](./stabilization_guide.md) of **language features**. The questions aim to solicit the details most often needed. These details help reviewers to identify potential problems upfront. Not all parts of the template will apply to every stabilization. If a question doesn't apply, explain briefly why. + +Copy everything after the separator and edit it as Markdown. Replace each *TODO* with your answer. + +--- + +# Stabilization report + +## Summary + +> Remind us what this feature is and what value it provides. Tell the story of what led up to this stabilization. +> +> E.g., see: +> +> - [Stabilize AFIT/RPITIT](https://web.archive.org/web/20250329190642/https://github.com/rust-lang/rust/pull/115822) +> - [Stabilize RTN](https://web.archive.org/web/20250321214601/https://github.com/rust-lang/rust/pull/138424) +> - [Stabilize ATPIT](https://web.archive.org/web/20250124214256/https://github.com/rust-lang/rust/pull/120700) +> - [Stabilize opaque type precise capturing](https://web.archive.org/web/20250312173538/https://github.com/rust-lang/rust/pull/127672) + +*TODO* + +Tracking: + +- *TODO* (Link to tracking issue.) + +Reference PRs: + +- *TODO* (Link to Reference PRs.) + +cc @rust-lang/lang @rust-lang/lang-advisors + +### What is stabilized + +> Describe each behavior being stabilized and give a short example of code that will now be accepted. + +```rust +todo!() +``` + +### What isn't stabilized + +> Describe any parts of the feature not being stabilized. Talk about what we might want to do later and what doors are being left open for that. If what we're not stabilizing might lead to surprises for users, talk about that in particular. + +## Design + +### Reference + +> What updates are needed to the Reference? Link to each PR. If the Reference is missing content needed for describing this feature, discuss that. + +- *TODO* + +### RFC history + +> What RFCs have been accepted for this feature? + +- *TODO* + +### Answers to unresolved questions + +> What questions were left unresolved by the RFC? How have they been answered? Link to any relevant lang decisions. + +*TODO* + +### Post-RFC changes + +> What other user-visible changes have occurred since the RFC was accepted? Describe both changes that the lang team accepted (and link to those decisions) as well as changes that are being presented to the team for the first time in this stabilization report. + +*TODO* + +### Key points + +> What decisions have been most difficult and what behaviors to be stabilized have proved most contentious? Summarize the major arguments on all sides and link to earlier documents and discussions. + +*TODO* + +### Nightly extensions + +> Are there extensions to this feature that remain unstable? How do we know that we are not accidentally committing to those? + +*TODO* + +### Doors closed + +> What doors does this stabilization close for later changes to the language? E.g., does this stabilization make any other RFCs, lang experiments, or known in-flight proposals more difficult or impossible to do later? + +## Feedback + +### Call for testing + +> Has a "call for testing" been done? If so, what feedback was received? + +*TODO* + +### Nightly use + +> Do any known nightly users use this feature? Counting instances of `#![feature(FEATURE_NAME)]` on GitHub with grep might be informative. + +*TODO* + +## Implementation + +### Major parts + +> Summarize the major parts of the implementation and provide links into the code and to relevant PRs. +> +> See, e.g., this breakdown of the major parts of async closures: +> +> - + +*TODO* + +### Coverage + +> Summarize the test coverage of this feature. +> +> Consider what the "edges" of this feature are. We're particularly interested in seeing tests that assure us about exactly what nearby things we're not stabilizing. Tests should of course comprehensively demonstrate that the feature works. Think too about demonstrating the diagnostics seen when common mistakes are made and the feature is used incorrectly. +> +> Within each test, include a comment at the top describing the purpose of the test and what set of invariants it intends to demonstrate. This is a great help to our review. +> +> Describe any known or intentional gaps in test coverage. +> +> Contextualize and link to test folders and individual tests. + +*TODO* + +### Outstanding bugs + +> What outstanding bugs involve this feature? List them. Should any block the stabilization? Discuss why or why not. + +*TODO* + +- *TODO* +- *TODO* +- *TODO* + +### Outstanding FIXMEs + +> What FIXMEs are still in the code for that feature and why is it OK to leave them there? + +*TODO* + +### Tool changes + +> What changes must be made to our other tools to support this feature. Has this work been done? Link to any relevant PRs and issues. + +- [ ] rustfmt + - *TODO* +- [ ] rust-analyzer + - *TODO* +- [ ] rustdoc (both JSON and HTML) + - *TODO* +- [ ] cargo + - *TODO* +- [ ] clippy + - *TODO* +- [ ] rustup + - *TODO* +- [ ] docs.rs + - *TODO* + +*TODO* + +### Breaking changes + +> If this stabilization represents a known breaking change, link to the crater report, the analysis of the crater report, and to all PRs we've made to ecosystem projects affected by this breakage. Discuss any limitations of what we're able to know about or to fix. + +*TODO* + +Crater report: + +- *TODO* + +Crater analysis: + +- *TODO* + +PRs to affected crates: + +- *TODO* +- *TODO* +- *TODO* + +## Type system, opsem + +### Compile-time checks + +> What compilation-time checks are done that are needed to prevent undefined behavior? +> +> Link to tests demonstrating that these checks are being done. + +*TODO* + +- *TODO* +- *TODO* +- *TODO* + +### Type system rules + +> What type system rules are enforced for this feature and what is the purpose of each? + +*TODO* + +### Sound by default? + +> Does the feature's implementation need specific checks to prevent UB, or is it sound by default and need specific opt-in to perform the dangerous/unsafe operations? If it is not sound by default, what is the rationale? + +*TODO* + +### Breaks the AM? + +> Can users use this feature to introduce undefined behavior, or use this feature to break the abstraction of Rust and expose the underlying assembly-level implementation? Describe this if so. + +*TODO* + +## Common interactions + +### Temporaries + +> Does this feature introduce new expressions that can produce temporaries? What are the scopes of those temporaries? + +*TODO* + +### Drop order + +> Does this feature raise questions about the order in which we should drop values? Talk about the decisions made here and how they're consistent with our earlier decisions. + +*TODO* + +### Pre-expansion / post-expansion + +> Does this feature raise questions about what should be accepted pre-expansion (e.g. in code covered by `#[cfg(false)]`) versus what should be accepted post-expansion? What decisions were made about this? + +*TODO* + +### Edition hygiene + +> If this feature is gated on an edition, how do we decide, in the context of the edition hygiene of tokens, whether to accept or reject code. E.g., what token do we use to decide? + +*TODO* + +### SemVer implications + +> Does this feature create any new ways in which library authors must take care to prevent breaking downstreams when making minor-version releases? Describe these. Are these new hazards "major" or "minor" according to [RFC 1105](https://rust-lang.github.io/rfcs/1105-api-evolution.html)? + +*TODO* + +### Exposing other features + +> Are there any other unstable features whose behavior may be exposed by this feature in any way? What features present the highest risk of that? + +*TODO* + +## History + +> List issues and PRs that are important for understanding how we got here. + +- *TODO* +- *TODO* +- *TODO* + +## Acknowledgments + +> Summarize contributors to the feature by name for recognition and so that those people are notified about the stabilization. Does anyone who worked on this *not* think it should be stabilized right now? We'd like to hear about that if so. + +*TODO* + +## Open items + +> List any known items that have not yet been completed and that should be before this is stabilized. + +- [ ] *TODO* +- [ ] *TODO* +- [ ] *TODO* diff --git a/src/doc/rustc-dev-guide/src/tests/ui.md b/src/doc/rustc-dev-guide/src/tests/ui.md index b1feef9ed0cc8..782f78d76148e 100644 --- a/src/doc/rustc-dev-guide/src/tests/ui.md +++ b/src/doc/rustc-dev-guide/src/tests/ui.md @@ -309,8 +309,9 @@ fn main((ؼ Use `//~?` to match an error without line information. `//~?` is precise and will not match errors if their line information is available. -For tests wishing to match against compiler diagnostics, error annotations should -be preferred over //@ error-pattern, //@ error-pattern is imprecise and non-exhaustive. +It should be preferred over `//@ error-pattern` +for tests wishing to match against compiler diagnostics, +due to `//@ error-pattern` being imprecise and non-exhaustive. ```rust,ignore //@ compile-flags: --print yyyy @@ -320,8 +321,8 @@ be preferred over //@ error-pattern, //@ error-pattern is imprecise and non-exha ### `error-pattern` -The `error-pattern` [directive](directives.md) can be used for runtime messages, which don't -have a specific span, or in exceptional cases, for compile time messages. +The `error-pattern` [directive](directives.md) can be used for runtime messages which don't +have a specific span, or, in exceptional cases, for compile time messages. Let's think about this test: diff --git a/src/tools/linkchecker/Cargo.toml b/src/tools/linkchecker/Cargo.toml index 7123d43eb564c..fb5bff3fe63ff 100644 --- a/src/tools/linkchecker/Cargo.toml +++ b/src/tools/linkchecker/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "linkchecker" version = "0.1.0" -edition = "2021" +edition = "2024" [[bin]] name = "linkchecker" diff --git a/src/tools/linkchecker/main.rs b/src/tools/linkchecker/main.rs index 84cba3f8c4473..1dc45728c90cd 100644 --- a/src/tools/linkchecker/main.rs +++ b/src/tools/linkchecker/main.rs @@ -17,12 +17,13 @@ //! should catch the majority of "broken link" cases. use std::cell::{Cell, RefCell}; +use std::collections::hash_map::Entry; use std::collections::{HashMap, HashSet}; -use std::io::ErrorKind; +use std::fs; +use std::iter::once; use std::path::{Component, Path, PathBuf}; use std::rc::Rc; use std::time::Instant; -use std::{env, fs}; use html5ever::tendril::ByteTendril; use html5ever::tokenizer::{ @@ -110,10 +111,25 @@ macro_rules! t { }; } +struct Cli { + docs: PathBuf, + link_targets_dirs: Vec, +} + fn main() { - let docs = env::args_os().nth(1).expect("doc path should be first argument"); - let docs = env::current_dir().unwrap().join(docs); - let mut checker = Checker { root: docs.clone(), cache: HashMap::new() }; + let cli = match parse_cli() { + Ok(cli) => cli, + Err(err) => { + eprintln!("error: {err}"); + usage_and_exit(1); + } + }; + + let mut checker = Checker { + root: cli.docs.clone(), + link_targets_dirs: cli.link_targets_dirs, + cache: HashMap::new(), + }; let mut report = Report { errors: 0, start: Instant::now(), @@ -125,7 +141,7 @@ fn main() { intra_doc_exceptions: 0, has_broken_urls: false, }; - checker.walk(&docs, &mut report); + checker.walk(&cli.docs, &mut report); report.report(); if report.errors != 0 { println!("found some broken links"); @@ -133,8 +149,50 @@ fn main() { } } +fn parse_cli() -> Result { + fn to_absolute_path(arg: &str) -> Result { + std::path::absolute(arg).map_err(|e| format!("could not convert to absolute {arg}: {e}")) + } + + let mut verbatim = false; + let mut docs = None; + let mut link_targets_dirs = Vec::new(); + + let mut args = std::env::args().skip(1); + while let Some(arg) = args.next() { + if !verbatim && arg == "--" { + verbatim = true; + } else if !verbatim && (arg == "-h" || arg == "--help") { + usage_and_exit(0) + } else if !verbatim && arg == "--link-targets-dir" { + link_targets_dirs.push(to_absolute_path( + &args.next().ok_or("missing value for --link-targets-dir")?, + )?); + } else if !verbatim && let Some(value) = arg.strip_prefix("--link-targets-dir=") { + link_targets_dirs.push(to_absolute_path(value)?); + } else if !verbatim && arg.starts_with('-') { + return Err(format!("unknown flag: {arg}")); + } else if docs.is_none() { + docs = Some(arg); + } else { + return Err("too many positional arguments".into()); + } + } + + Ok(Cli { + docs: to_absolute_path(&docs.ok_or("missing first positional argument")?)?, + link_targets_dirs, + }) +} + +fn usage_and_exit(code: i32) -> ! { + eprintln!("usage: linkchecker PATH [--link-targets-dir=PATH ...]"); + std::process::exit(code) +} + struct Checker { root: PathBuf, + link_targets_dirs: Vec, cache: Cache, } @@ -420,37 +478,34 @@ impl Checker { /// Load a file from disk, or from the cache if available. fn load_file(&mut self, file: &Path, report: &mut Report) -> (String, &FileEntry) { - // https://docs.microsoft.com/en-us/windows/win32/debug/system-error-codes--0-499- - #[cfg(windows)] - const ERROR_INVALID_NAME: i32 = 123; - let pretty_path = file.strip_prefix(&self.root).unwrap_or(file).to_str().unwrap().to_string(); - let entry = - self.cache.entry(pretty_path.clone()).or_insert_with(|| match fs::metadata(file) { + for base in once(&self.root).chain(self.link_targets_dirs.iter()) { + let entry = self.cache.entry(pretty_path.clone()); + if let Entry::Occupied(e) = &entry + && !matches!(e.get(), FileEntry::Missing) + { + break; + } + + let file = base.join(&pretty_path); + entry.insert_entry(match fs::metadata(&file) { Ok(metadata) if metadata.is_dir() => FileEntry::Dir, Ok(_) => { if file.extension().and_then(|s| s.to_str()) != Some("html") { FileEntry::OtherFile } else { report.html_files += 1; - load_html_file(file, report) + load_html_file(&file, report) } } - Err(e) if e.kind() == ErrorKind::NotFound => FileEntry::Missing, - Err(e) => { - // If a broken intra-doc link contains `::`, on windows, it will cause `ERROR_INVALID_NAME` rather than `NotFound`. - // Explicitly check for that so that the broken link can be allowed in `LINKCHECK_EXCEPTIONS`. - #[cfg(windows)] - if e.raw_os_error() == Some(ERROR_INVALID_NAME) - && file.as_os_str().to_str().map_or(false, |s| s.contains("::")) - { - return FileEntry::Missing; - } - panic!("unexpected read error for {}: {}", file.display(), e); - } + Err(e) if is_not_found_error(&file, &e) => FileEntry::Missing, + Err(e) => panic!("unexpected read error for {}: {}", file.display(), e), }); + } + + let entry = self.cache.get(&pretty_path).unwrap(); (pretty_path, entry) } } @@ -629,3 +684,16 @@ fn parse_ids(ids: &mut HashSet, file: &str, source: &str, report: &mut R ids.insert(encoded); } } + +fn is_not_found_error(path: &Path, error: &std::io::Error) -> bool { + // https://docs.microsoft.com/en-us/windows/win32/debug/system-error-codes--0-499- + const WINDOWS_ERROR_INVALID_NAME: i32 = 123; + + error.kind() == std::io::ErrorKind::NotFound + // If a broken intra-doc link contains `::`, on windows, it will cause `ERROR_INVALID_NAME` + // rather than `NotFound`. Explicitly check for that so that the broken link can be allowed + // in `LINKCHECK_EXCEPTIONS`. + || (cfg!(windows) + && error.raw_os_error() == Some(WINDOWS_ERROR_INVALID_NAME) + && path.as_os_str().to_str().map_or(false, |s| s.contains("::"))) +} diff --git a/tests/codegen/diverging-function-call-debuginfo.rs b/tests/codegen/diverging-function-call-debuginfo.rs new file mode 100644 index 0000000000000..1a80fe1643dea --- /dev/null +++ b/tests/codegen/diverging-function-call-debuginfo.rs @@ -0,0 +1,38 @@ +/// Make sure that line debuginfo is correct for diverging calls under certain +/// conditions. In particular we want to ensure that the line number is never +/// 0, but we check the absence of 0 by looking for the expected exact line +/// numbers. Regression test for . + +//@ compile-flags: -g -Clto -Copt-level=0 +//@ no-prefer-dynamic + +// First find the scope of both diverge() calls, namely this main() function. +// CHECK-DAG: [[MAIN_SCOPE:![0-9]+]] = distinct !DISubprogram(name: "main", linkageName: {{.*}}diverging_function_call_debuginfo{{.*}}main{{.*}} +fn main() { + if True == False { + // unreachable + // Then find the DILocation with the correct line number for this call ... + // CHECK-DAG: [[UNREACHABLE_CALL_DBG:![0-9]+]] = !DILocation(line: [[@LINE+1]], {{.*}}scope: [[MAIN_SCOPE]] + diverge(); + } + + // ... and this call. + // CHECK-DAG: [[LAST_CALL_DBG:![0-9]+]] = !DILocation(line: [[@LINE+1]], {{.*}}scope: [[MAIN_SCOPE]] + diverge(); +} + +#[derive(PartialEq)] +pub enum MyBool { + True, + False, +} + +use MyBool::*; + +fn diverge() -> ! { + panic!(); +} + +// Finally make sure both DILocations belong to each the respective diverge() call. +// CHECK-DAG: call void {{.*}}diverging_function_call_debuginfo{{.*}}diverge{{.*}} !dbg [[LAST_CALL_DBG]] +// CHECK-DAG: call void {{.*}}diverging_function_call_debuginfo{{.*}}diverge{{.*}} !dbg [[UNREACHABLE_CALL_DBG]]