Auto merge of #154535 - JonathanBrouwer:rollup-jU7CiZw, r=JonathanBrouwer

Rollup of 4 pull requests

Successful merges:

 - rust-lang/rust#153632 (Fix Vec::const_make_global for 0 capacity and ZST's)
 - rust-lang/rust#154190 (Don't fuse in `MapWindows`)
 - rust-lang/rust#154512 (Constify comparisons and `Clone` for `core::mem::Alignment`)
 - rust-lang/rust#154520 (Add doc links to `ExtractIf` of `BTree{Set,Map}` and `LinkedList`)
This commit is contained in:
bors
2026-03-29 07:00:59 +00:00
9 changed files with 103 additions and 46 deletions
+3 -1
View File
@@ -2102,7 +2102,9 @@ fn default() -> Self {
}
}
/// An iterator produced by calling `extract_if` on BTreeMap.
/// This `struct` is created by the [`extract_if`] method on [`BTreeMap`].
///
/// [`extract_if`]: BTreeMap::extract_if
#[stable(feature = "btree_extract_if", since = "1.91.0")]
#[must_use = "iterators are lazy and do nothing unless consumed; \
use `retain` or `extract_if().for_each(drop)` to remove and discard elements"]
+3 -1
View File
@@ -1547,7 +1547,9 @@ fn into_iter(self) -> Iter<'a, T> {
}
}
/// An iterator produced by calling `extract_if` on BTreeSet.
/// This `struct` is created by the [`extract_if`] method on [`BTreeSet`].
///
/// [`extract_if`]: BTreeSet::extract_if
#[stable(feature = "btree_extract_if", since = "1.91.0")]
#[must_use = "iterators are lazy and do nothing unless consumed; \
use `retain` or `extract_if().for_each(drop)` to remove and discard elements"]
+3 -1
View File
@@ -1942,7 +1942,9 @@ pub fn back_mut(&mut self) -> Option<&mut T> {
}
}
/// An iterator produced by calling `extract_if` on LinkedList.
/// This `struct` is created by the [`extract_if`] method on [`LinkedList`].
///
/// [`extract_if`]: LinkedList::extract_if
#[stable(feature = "extract_if", since = "1.87.0")]
#[must_use = "iterators are lazy and do nothing unless consumed; \
use `extract_if().for_each(drop)` to remove and discard elements"]
+11 -3
View File
@@ -898,9 +898,17 @@ pub const fn const_make_global(mut self) -> &'static [T]
where
T: Freeze,
{
unsafe { core::intrinsics::const_make_global(self.as_mut_ptr().cast()) };
let me = ManuallyDrop::new(self);
unsafe { slice::from_raw_parts(me.as_ptr(), me.len) }
// `const_make_global` requires the pointer to point to the beginning of a heap allocation,
// which is not the case when `self.capacity()` is 0, or if `T::IS_ZST`,
// which is why we instead return a new slice in this case.
if self.capacity() == 0 || T::IS_ZST {
let me = ManuallyDrop::new(self);
unsafe { slice::from_raw_parts(NonNull::<T>::dangling().as_ptr(), me.len) }
} else {
unsafe { core::intrinsics::const_make_global(self.as_mut_ptr().cast()) };
let me = ManuallyDrop::new(self);
unsafe { slice::from_raw_parts(me.as_ptr(), me.len) }
}
}
}
+22
View File
@@ -2764,3 +2764,25 @@ fn const_heap() {
assert_eq!([1, 2, 4, 8, 16, 32], X);
}
// regression test for issue #153158. `const_make_global` previously assumed `Vec<T>`'s buf
// always has a heap allocation, which lead to compilation errors.
#[test]
fn const_make_global_empty_or_zst_regression() {
const EMPTY_SLICE: &'static [i32] = {
let empty_vec: Vec<i32> = Vec::new();
empty_vec.const_make_global()
};
assert_eq!(EMPTY_SLICE, &[]);
const ZST_SLICE: &'static [()] = {
let mut zst_vec: Vec<()> = Vec::new();
zst_vec.push(());
zst_vec.push(());
zst_vec.push(());
zst_vec.const_make_global()
};
assert_eq!(ZST_SLICE, &[(), (), ()]);
}
+7 -16
View File
@@ -14,10 +14,7 @@ pub struct MapWindows<I: Iterator, F, const N: usize> {
}
struct MapWindowsInner<I: Iterator, const N: usize> {
// We fuse the inner iterator because there shouldn't be "holes" in
// the sliding window. Once the iterator returns a `None`, we make
// our `MapWindows` iterator return `None` forever.
iter: Option<I>,
iter: I,
// Since iterators are assumed lazy, i.e. it only yields an item when
// `Iterator::next()` is called, and `MapWindows` is not an exception.
//
@@ -26,7 +23,7 @@ struct MapWindowsInner<I: Iterator, const N: usize> {
// we collect the first `N` items yielded from the inner iterator and
// put it into the buffer.
//
// When the inner iterator has returned a `None` (i.e. fused), we take
// When the inner iterator has returned a `None`, we take
// away this `buffer` and leave it `None` to reclaim its resources.
//
// FIXME: should we shrink the size of `buffer` using niche optimization?
@@ -64,19 +61,16 @@ pub(in crate::iter) fn new(iter: I, f: F) -> Self {
impl<I: Iterator, const N: usize> MapWindowsInner<I, N> {
#[inline]
fn new(iter: I) -> Self {
Self { iter: Some(iter), buffer: None }
Self { iter, buffer: None }
}
fn next_window(&mut self) -> Option<&[I::Item; N]> {
let iter = self.iter.as_mut()?;
match self.buffer {
// It is the first time to advance. We collect
// the first `N` items from `self.iter` to initialize `self.buffer`.
None => self.buffer = Buffer::try_from_iter(iter),
Some(ref mut buffer) => match iter.next() {
None => self.buffer = Buffer::try_from_iter(&mut self.iter),
Some(ref mut buffer) => match self.iter.next() {
None => {
// Fuse the inner iterator since it yields a `None`.
self.iter.take();
self.buffer.take();
}
// Advance the iterator. We first call `next` before changing our buffer
@@ -89,8 +83,7 @@ fn new(iter: I) -> Self {
}
fn size_hint(&self) -> (usize, Option<usize>) {
let Some(ref iter) = self.iter else { return (0, Some(0)) };
let (lo, hi) = iter.size_hint();
let (lo, hi) = self.iter.size_hint();
if self.buffer.is_some() {
// If the first `N` items are already yielded by the inner iterator,
// the size hint is then equal to the that of the inner iterator's.
@@ -253,12 +246,10 @@ fn size_hint(&self) -> (usize, Option<usize>) {
}
}
// Note that even if the inner iterator not fused, the `MapWindows` is still fused,
// because we don't allow "holes" in the mapping window.
#[unstable(feature = "iter_map_windows", issue = "87155")]
impl<I, F, R, const N: usize> FusedIterator for MapWindows<I, F, N>
where
I: Iterator,
I: FusedIterator,
F: FnMut(&[I::Item; N]) -> R,
{
}
+16 -18
View File
@@ -1657,11 +1657,6 @@ fn flatten(self) -> Flatten<Self>
/// items yielded by `self`). If 𝑘 is less than `N`, this method yields an
/// empty iterator.
///
/// The returned iterator implements [`FusedIterator`], because once `self`
/// returns `None`, even if it returns a `Some(T)` again in the next iterations,
/// we cannot put it into a contiguous array buffer, and thus the returned iterator
/// should be fused.
///
/// [`slice::windows()`]: slice::windows
/// [`FusedIterator`]: crate::iter::FusedIterator
///
@@ -1722,7 +1717,7 @@ fn flatten(self) -> Flatten<Self>
/// assert_eq!(it.next(), None);
/// ```
///
/// For non-fused iterators, they are fused after `map_windows`.
/// For non-fused iterators, the window is reset after `None` is yielded.
///
/// ```
/// #![feature(iter_map_windows)]
@@ -1739,11 +1734,11 @@ fn flatten(self) -> Flatten<Self>
/// let val = self.state;
/// self.state = self.state + 1;
///
/// // yields `0..5` first, then only even numbers since `6..`.
/// if val < 5 || val % 2 == 0 {
/// Some(val)
/// } else {
/// // Skip every 5th number
/// if (val + 1) % 5 == 0 {
/// None
/// } else {
/// Some(val)
/// }
/// }
/// }
@@ -1751,32 +1746,35 @@ fn flatten(self) -> Flatten<Self>
///
/// let mut iter = NonFusedIterator::default();
///
/// // yields 0..5 first.
/// assert_eq!(iter.next(), Some(0));
/// assert_eq!(iter.next(), Some(1));
/// assert_eq!(iter.next(), Some(2));
/// assert_eq!(iter.next(), Some(3));
/// assert_eq!(iter.next(), Some(4));
/// // then we can see our iterator going back and forth
/// assert_eq!(iter.next(), None);
/// assert_eq!(iter.next(), Some(5));
/// assert_eq!(iter.next(), Some(6));
/// assert_eq!(iter.next(), None);
/// assert_eq!(iter.next(), Some(7));
/// assert_eq!(iter.next(), Some(8));
/// assert_eq!(iter.next(), None);
/// assert_eq!(iter.next(), Some(10));
/// assert_eq!(iter.next(), Some(11));
///
/// // however, with `.map_windows()`, it is fused.
/// let mut iter = NonFusedIterator::default()
/// .map_windows(|arr: &[_; 2]| *arr);
///
/// assert_eq!(iter.next(), Some([0, 1]));
/// assert_eq!(iter.next(), Some([1, 2]));
/// assert_eq!(iter.next(), Some([2, 3]));
/// assert_eq!(iter.next(), Some([3, 4]));
/// assert_eq!(iter.next(), None);
///
/// // it will always return `None` after the first time.
/// assert_eq!(iter.next(), None);
/// assert_eq!(iter.next(), Some([5, 6]));
/// assert_eq!(iter.next(), Some([6, 7]));
/// assert_eq!(iter.next(), Some([7, 8]));
/// assert_eq!(iter.next(), None);
///
/// assert_eq!(iter.next(), Some([10, 11]));
/// assert_eq!(iter.next(), Some([11, 12]));
/// assert_eq!(iter.next(), Some([12, 13]));
/// assert_eq!(iter.next(), None);
/// ```
#[inline]
+12 -6
View File
@@ -11,7 +11,8 @@
/// Note that particularly large alignments, while representable in this type,
/// are likely not to be supported by actual allocators and linkers.
#[unstable(feature = "ptr_alignment_type", issue = "102070")]
#[derive(Copy, Clone, PartialEq, Eq)]
#[derive(Copy)]
#[derive_const(Clone, PartialEq, Eq)]
#[repr(transparent)]
pub struct Alignment {
// This field is never used directly (nor is the enum),
@@ -303,7 +304,8 @@ fn from(align: Alignment) -> usize {
}
#[unstable(feature = "ptr_alignment_type", issue = "102070")]
impl cmp::Ord for Alignment {
#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
impl const cmp::Ord for Alignment {
#[inline]
fn cmp(&self, other: &Self) -> cmp::Ordering {
self.as_nonzero_usize().cmp(&other.as_nonzero_usize())
@@ -311,7 +313,8 @@ fn cmp(&self, other: &Self) -> cmp::Ordering {
}
#[unstable(feature = "ptr_alignment_type", issue = "102070")]
impl cmp::PartialOrd for Alignment {
#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
impl const cmp::PartialOrd for Alignment {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
Some(self.cmp(other))
@@ -336,7 +339,8 @@ fn default() -> Alignment {
}
#[cfg(target_pointer_width = "16")]
#[derive(Copy, Clone, PartialEq, Eq)]
#[derive(Copy)]
#[derive_const(Clone, PartialEq, Eq)]
#[repr(usize)]
enum AlignmentEnum {
_Align1Shl0 = 1 << 0,
@@ -358,7 +362,8 @@ enum AlignmentEnum {
}
#[cfg(target_pointer_width = "32")]
#[derive(Copy, Clone, PartialEq, Eq)]
#[derive(Copy)]
#[derive_const(Clone, PartialEq, Eq)]
#[repr(usize)]
enum AlignmentEnum {
_Align1Shl0 = 1 << 0,
@@ -396,7 +401,8 @@ enum AlignmentEnum {
}
#[cfg(target_pointer_width = "64")]
#[derive(Copy, Clone, PartialEq, Eq)]
#[derive(Copy)]
#[derive_const(Clone, PartialEq, Eq)]
#[repr(usize)]
enum AlignmentEnum {
_Align1Shl0 = 1 << 0,
@@ -284,3 +284,29 @@ fn check_size_hint<const N: usize>(
check_size_hint::<5>((5, Some(5)), (1, Some(1)));
check_size_hint::<5>((5, Some(10)), (1, Some(6)));
}
#[test]
fn test_unfused() {
#[derive(Default)]
struct UnfusedIter(usize);
impl Iterator for UnfusedIter {
type Item = usize;
fn next(&mut self) -> Option<usize> {
let curr = self.0;
self.0 += 1;
if curr % 7 == 0 { None } else { Some(curr) }
}
}
let mut iter = UnfusedIter(1).map_windows(|a: &[_; 3]| *a);
assert_eq!(iter.by_ref().collect::<Vec<_>>(), vec![[1, 2, 3], [2, 3, 4], [3, 4, 5], [4, 5, 6]]);
assert_eq!(
iter.by_ref().collect::<Vec<_>>(),
vec![[8, 9, 10], [9, 10, 11], [10, 11, 12], [11, 12, 13]]
);
assert_eq!(
iter.by_ref().collect::<Vec<_>>(),
vec![[15, 16, 17], [16, 17, 18], [17, 18, 19], [18, 19, 20]]
);
}