Skip to main content

core/slice/
cmp.rs

1//! Comparison traits for `[T]`.
2
3use super::{from_raw_parts, memchr};
4use crate::ascii;
5use crate::cmp::{self, BytewiseEq, Ordering};
6use crate::convert::Infallible;
7use crate::intrinsics::compare_bytes;
8use crate::marker::Destruct;
9use crate::mem::SizedTypeProperties;
10use crate::num::NonZero;
11use crate::ops::ControlFlow;
12
13#[stable(feature = "rust1", since = "1.0.0")]
14#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
15impl<T, U> const PartialEq<[U]> for [T]
16where
17    T: [const] PartialEq<U>,
18{
19    #[inline]
20    fn eq(&self, other: &[U]) -> bool {
21        let len = self.len();
22        if len == other.len() {
23            // SAFETY: Just checked that they're the same length, and the pointers
24            // come from references-to-slices so they're guaranteed readable.
25            unsafe { SlicePartialEq::equal_same_length(self.as_ptr(), other.as_ptr(), len) }
26        } else {
27            false
28        }
29    }
30}
31
32#[stable(feature = "rust1", since = "1.0.0")]
33#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
34impl<T: [const] Eq> const Eq for [T] {}
35
36/// Implements comparison of slices [lexicographically](Ord#lexicographical-comparison).
37#[stable(feature = "rust1", since = "1.0.0")]
38#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
39impl<T: [const] Ord> const Ord for [T] {
40    fn cmp(&self, other: &[T]) -> Ordering {
41        SliceOrd::compare(self, other)
42    }
43}
44
45#[inline]
46const fn as_underlying(x: ControlFlow<bool>) -> u8 {
47    // SAFETY: This will only compile if `bool` and `ControlFlow<bool>` have the same
48    // size (which isn't guaranteed but this is libcore). Because they have the same
49    // size, it's a niched implementation, which in one byte means there can't be
50    // any uninitialized memory. The callers then only check for `0` or `1` from this,
51    // which must necessarily match the `Break` variant, and we're fine no matter
52    // what ends up getting picked as the value representing `Continue(())`.
53    unsafe { crate::mem::transmute(x) }
54}
55
56/// Implements comparison of slices [lexicographically](Ord#lexicographical-comparison).
57#[stable(feature = "rust1", since = "1.0.0")]
58#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
59impl<T: [const] PartialOrd> const PartialOrd for [T] {
60    #[inline]
61    fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
62        SlicePartialOrd::partial_compare(self, other)
63    }
64    #[inline]
65    fn lt(&self, other: &Self) -> bool {
66        // This is certainly not the obvious way to implement these methods.
67        // Unfortunately, using anything that looks at the discriminant means that
68        // LLVM sees a check for `2` (aka `ControlFlow<bool>::Continue(())`) and
69        // gets very distracted by that, ending up generating extraneous code.
70        // This should be changed to something simpler once either LLVM is smarter,
71        // see <https://github.com/llvm/llvm-project/issues/132678>, or we generate
72        // niche discriminant checks in a way that doesn't trigger it.
73
74        as_underlying(self.__chaining_lt(other)) == 1
75    }
76    #[inline]
77    fn le(&self, other: &Self) -> bool {
78        as_underlying(self.__chaining_le(other)) != 0
79    }
80    #[inline]
81    fn gt(&self, other: &Self) -> bool {
82        as_underlying(self.__chaining_gt(other)) == 1
83    }
84    #[inline]
85    fn ge(&self, other: &Self) -> bool {
86        as_underlying(self.__chaining_ge(other)) != 0
87    }
88    #[inline]
89    fn __chaining_lt(&self, other: &Self) -> ControlFlow<bool> {
90        SliceChain::chaining_lt(self, other)
91    }
92    #[inline]
93    fn __chaining_le(&self, other: &Self) -> ControlFlow<bool> {
94        SliceChain::chaining_le(self, other)
95    }
96    #[inline]
97    fn __chaining_gt(&self, other: &Self) -> ControlFlow<bool> {
98        SliceChain::chaining_gt(self, other)
99    }
100    #[inline]
101    fn __chaining_ge(&self, other: &Self) -> ControlFlow<bool> {
102        SliceChain::chaining_ge(self, other)
103    }
104}
105
106#[doc(hidden)]
107// intermediate trait for specialization of slice's PartialEq
108#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
109const trait SlicePartialEq<B> {
110    /// # Safety
111    /// `lhs` and `rhs` are both readable for `len` elements
112    unsafe fn equal_same_length(lhs: *const Self, rhs: *const B, len: usize) -> bool;
113}
114
115// Generic slice equality
116#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
117impl<A, B> const SlicePartialEq<B> for A
118where
119    A: [const] PartialEq<B>,
120{
121    // It's not worth trying to inline the loops underneath here *in MIR*,
122    // and preventing it encourages more useful inlining upstream,
123    // such as in `<str as PartialEq>::eq`.
124    // The codegen backend can still inline it later if needed.
125    #[rustc_no_mir_inline]
126    default unsafe fn equal_same_length(lhs: *const Self, rhs: *const B, len: usize) -> bool {
127        // Implemented as explicit indexing rather
128        // than zipped iterators for performance reasons.
129        // See PR https://github.com/rust-lang/rust/pull/116846
130        // FIXME(const_hack): make this a `for idx in 0..len` loop.
131        let mut idx = 0;
132        while idx < len {
133            // SAFETY: idx < len, so both are in-bounds and readable
134            if unsafe { *lhs.add(idx) != *rhs.add(idx) } {
135                return false;
136            }
137            idx += 1;
138        }
139
140        true
141    }
142}
143
144// When each element can be compared byte-wise, we can compare all the bytes
145// from the whole size in one call to the intrinsics.
146#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
147impl<A, B> const SlicePartialEq<B> for A
148where
149    A: [const] BytewiseEq<B>,
150{
151    #[inline]
152    unsafe fn equal_same_length(lhs: *const Self, rhs: *const B, len: usize) -> bool {
153        // SAFETY: by our precondition, `lhs` and `rhs` are guaranteed to be valid
154        // for reading `len` values, which also means the size is guaranteed
155        // not to overflow because it exists in memory;
156        unsafe {
157            let size = crate::intrinsics::unchecked_mul(len, Self::SIZE);
158            compare_bytes(lhs as _, rhs as _, size) == 0
159        }
160    }
161}
162
163#[doc(hidden)]
164#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
165// intermediate trait for specialization of slice's PartialOrd
166const trait SlicePartialOrd: Sized {
167    fn partial_compare(left: &[Self], right: &[Self]) -> Option<Ordering>;
168}
169
170#[doc(hidden)]
171#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
172// intermediate trait for specialization of slice's PartialOrd chaining methods
173const trait SliceChain: Sized {
174    fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
175    fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
176    fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
177    fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
178}
179
180type AlwaysBreak<B> = ControlFlow<B, crate::convert::Infallible>;
181
182#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
183impl<A: [const] PartialOrd> const SlicePartialOrd for A {
184    default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
185        // FIXME(const-hack): revert this to a const closure once possible
186        #[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
187        const fn elem_chain<A: [const] PartialOrd>(a: &A, b: &A) -> ControlFlow<Option<Ordering>> {
188            match PartialOrd::partial_cmp(a, b) {
189                Some(Ordering::Equal) => ControlFlow::Continue(()),
190                non_eq => ControlFlow::Break(non_eq),
191            }
192        }
193
194        // FIXME(const-hack): revert this to a const closure once possible
195        #[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
196        const fn len_chain(a: &usize, b: &usize) -> ControlFlow<Option<Ordering>, Infallible> {
197            ControlFlow::Break(usize::partial_cmp(a, b))
198        }
199
200        let AlwaysBreak::Break(b) = chaining_impl(left, right, elem_chain, len_chain);
201        b
202    }
203}
204
205#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
206impl<A: [const] PartialOrd> const SliceChain for A {
207    default fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
208        chaining_impl(left, right, PartialOrd::__chaining_lt, usize::__chaining_lt)
209    }
210    default fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
211        chaining_impl(left, right, PartialOrd::__chaining_le, usize::__chaining_le)
212    }
213    default fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
214        chaining_impl(left, right, PartialOrd::__chaining_gt, usize::__chaining_gt)
215    }
216    default fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
217        chaining_impl(left, right, PartialOrd::__chaining_ge, usize::__chaining_ge)
218    }
219}
220
221#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
222#[inline]
223const fn chaining_impl<'l, 'r, A: PartialOrd, B, C>(
224    left: &'l [A],
225    right: &'r [A],
226    elem_chain: impl [const] Fn(&'l A, &'r A) -> ControlFlow<B> + [const] Destruct,
227    len_chain: impl for<'a> [const] FnOnce(&'a usize, &'a usize) -> ControlFlow<B, C> + [const] Destruct,
228) -> ControlFlow<B, C> {
229    let l = cmp::min(left.len(), right.len());
230
231    // Slice to the loop iteration range to enable bound check
232    // elimination in the compiler
233    let lhs = &left[..l];
234    let rhs = &right[..l];
235
236    // FIXME(const-hack): revert this to `for i in 0..l` once `impl const Iterator for Range<T>`
237    let mut i: usize = 0;
238    while i < l {
239        elem_chain(&lhs[i], &rhs[i])?;
240        i += 1;
241    }
242
243    len_chain(&left.len(), &right.len())
244}
245
246// This is the impl that we would like to have. Unfortunately it's not sound.
247// See `partial_ord_slice.rs`.
248/*
249impl<A> SlicePartialOrd for A
250where
251    A: Ord,
252{
253    default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
254        Some(SliceOrd::compare(left, right))
255    }
256}
257*/
258
259#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
260impl<A: [const] AlwaysApplicableOrd> const SlicePartialOrd for A {
261    fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
262        Some(SliceOrd::compare(left, right))
263    }
264}
265
266#[rustc_specialization_trait]
267#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
268const trait AlwaysApplicableOrd: [const] SliceOrd + [const] Ord {}
269
270macro_rules! always_applicable_ord {
271    ($([$($p:tt)*] $t:ty,)*) => {
272        $(impl<$($p)*> AlwaysApplicableOrd for $t {})*
273    }
274}
275
276always_applicable_ord! {
277    [] u8, [] u16, [] u32, [] u64, [] u128, [] usize,
278    [] i8, [] i16, [] i32, [] i64, [] i128, [] isize,
279    [] bool, [] char,
280    [T: ?Sized] *const T, [T: ?Sized] *mut T,
281    [T: AlwaysApplicableOrd] &T,
282    [T: AlwaysApplicableOrd] &mut T,
283    [T: AlwaysApplicableOrd] Option<T>,
284}
285
286#[doc(hidden)]
287#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
288// intermediate trait for specialization of slice's Ord
289const trait SliceOrd: Sized {
290    fn compare(left: &[Self], right: &[Self]) -> Ordering;
291}
292
293#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
294impl<A: [const] Ord> const SliceOrd for A {
295    default fn compare(left: &[Self], right: &[Self]) -> Ordering {
296        // FIXME(const-hack): revert this to a const closure once possible
297        #[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
298        const fn elem_chain<A: [const] Ord>(a: &A, b: &A) -> ControlFlow<Ordering> {
299            match Ord::cmp(a, b) {
300                Ordering::Equal => ControlFlow::Continue(()),
301                non_eq => ControlFlow::Break(non_eq),
302            }
303        }
304
305        // FIXME(const-hack): revert this to a const closure once possible
306        #[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
307        const fn len_chain(a: &usize, b: &usize) -> ControlFlow<Ordering, Infallible> {
308            ControlFlow::Break(usize::cmp(a, b))
309        }
310
311        let AlwaysBreak::Break(b) = chaining_impl(left, right, elem_chain, len_chain);
312        b
313    }
314}
315
316/// Marks that a type should be treated as an unsigned byte for comparisons.
317///
318/// # Safety
319/// * The type must be readable as an `u8`, meaning it has to have the same
320///   layout as `u8` and always be initialized.
321/// * For every `x` and `y` of this type, `Ord(x, y)` must return the same
322///   value as `Ord::cmp(transmute::<_, u8>(x), transmute::<_, u8>(y))`.
323#[rustc_specialization_trait]
324const unsafe trait UnsignedBytewiseOrd: [const] Ord {}
325
326#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
327unsafe impl const UnsignedBytewiseOrd for bool {}
328#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
329unsafe impl const UnsignedBytewiseOrd for u8 {}
330#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
331unsafe impl const UnsignedBytewiseOrd for NonZero<u8> {}
332#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
333unsafe impl const UnsignedBytewiseOrd for Option<NonZero<u8>> {}
334#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
335unsafe impl const UnsignedBytewiseOrd for ascii::Char {}
336
337// `compare_bytes` compares a sequence of unsigned bytes lexicographically, so
338// use it if the requirements for `UnsignedBytewiseOrd` are fulfilled.
339#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
340impl<A: [const] Ord + [const] UnsignedBytewiseOrd> const SliceOrd for A {
341    #[inline]
342    fn compare(left: &[Self], right: &[Self]) -> Ordering {
343        // Since the length of a slice is always less than or equal to
344        // isize::MAX, this never underflows.
345        let diff = left.len() as isize - right.len() as isize;
346        // This comparison gets optimized away (on x86_64 and ARM) because the
347        // subtraction updates flags.
348        let len = if left.len() < right.len() { left.len() } else { right.len() };
349        let left = left.as_ptr().cast();
350        let right = right.as_ptr().cast();
351        // SAFETY: `left` and `right` are references and are thus guaranteed to
352        // be valid. `UnsignedBytewiseOrd` is only implemented for types that
353        // are valid u8s and can be compared the same way. We use the minimum
354        // of both lengths which guarantees that both regions are valid for
355        // reads in that interval.
356        let mut order = unsafe { compare_bytes(left, right, len) as isize };
357        if order == 0 {
358            order = diff;
359        }
360        order.cmp(&0)
361    }
362}
363
364// Don't generate our own chaining loops for `memcmp`-able things either.
365
366#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
367impl<A: [const] PartialOrd + [const] UnsignedBytewiseOrd> const SliceChain for A {
368    #[inline]
369    fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
370        match SliceOrd::compare(left, right) {
371            Ordering::Equal => ControlFlow::Continue(()),
372            ne => ControlFlow::Break(ne.is_lt()),
373        }
374    }
375    #[inline]
376    fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
377        match SliceOrd::compare(left, right) {
378            Ordering::Equal => ControlFlow::Continue(()),
379            ne => ControlFlow::Break(ne.is_le()),
380        }
381    }
382    #[inline]
383    fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
384        match SliceOrd::compare(left, right) {
385            Ordering::Equal => ControlFlow::Continue(()),
386            ne => ControlFlow::Break(ne.is_gt()),
387        }
388    }
389    #[inline]
390    fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
391        match SliceOrd::compare(left, right) {
392            Ordering::Equal => ControlFlow::Continue(()),
393            ne => ControlFlow::Break(ne.is_ge()),
394        }
395    }
396}
397
398pub(super) trait SliceContains: Sized {
399    fn slice_contains(&self, x: &[Self]) -> bool;
400}
401
402impl<T> SliceContains for T
403where
404    T: PartialEq,
405{
406    default fn slice_contains(&self, x: &[Self]) -> bool {
407        x.iter().any(|y| *y == *self)
408    }
409}
410
411impl SliceContains for u8 {
412    #[inline]
413    fn slice_contains(&self, x: &[Self]) -> bool {
414        memchr::memchr(*self, x).is_some()
415    }
416}
417
418impl SliceContains for i8 {
419    #[inline]
420    fn slice_contains(&self, x: &[Self]) -> bool {
421        let byte = *self as u8;
422        // SAFETY: `i8` and `u8` have the same memory layout, thus casting `x.as_ptr()`
423        // as `*const u8` is safe. The `x.as_ptr()` comes from a reference and is thus guaranteed
424        // to be valid for reads for the length of the slice `x.len()`, which cannot be larger
425        // than `isize::MAX`. The returned slice is never mutated.
426        let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
427        memchr::memchr(byte, bytes).is_some()
428    }
429}
430
431macro_rules! impl_slice_contains {
432    ($($t:ty),*) => {
433        $(
434            impl SliceContains for $t {
435                #[inline]
436                fn slice_contains(&self, arr: &[$t]) -> bool {
437                    // Make our LANE_COUNT 4x the normal lane count (aiming for 128 bit vectors).
438                    // The compiler will nicely unroll it.
439                    const LANE_COUNT: usize = 4 * (128 / (size_of::<$t>() * 8));
440                    // SIMD
441                    let mut chunks = arr.chunks_exact(LANE_COUNT);
442                    for chunk in &mut chunks {
443                        if chunk.iter().fold(false, |acc, x| acc | (*x == *self)) {
444                            return true;
445                        }
446                    }
447                    // Scalar remainder
448                    return chunks.remainder().iter().any(|x| *x == *self);
449                }
450            }
451        )*
452    };
453}
454
455impl_slice_contains!(u16, u32, u64, i16, i32, i64, f32, f64, usize, isize, char);