Skip to main content

core/slice/
cmp.rs

1//! Comparison traits for `[T]`.
2
3use super::{from_raw_parts, memchr};
4use crate::ascii;
5use crate::cmp::{self, BytewiseEq, Ordering};
6use crate::intrinsics::compare_bytes;
7use crate::mem::SizedTypeProperties;
8use crate::num::NonZero;
9use crate::ops::ControlFlow;
10
11#[stable(feature = "rust1", since = "1.0.0")]
12#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
13impl<T, U> const PartialEq<[U]> for [T]
14where
15    T: [const] PartialEq<U>,
16{
17    #[inline]
18    fn eq(&self, other: &[U]) -> bool {
19        let len = self.len();
20        if len == other.len() {
21            // SAFETY: Just checked that they're the same length, and the pointers
22            // come from references-to-slices so they're guaranteed readable.
23            unsafe { SlicePartialEq::equal_same_length(self.as_ptr(), other.as_ptr(), len) }
24        } else {
25            false
26        }
27    }
28}
29
30#[stable(feature = "rust1", since = "1.0.0")]
31#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
32impl<T: [const] Eq> const Eq for [T] {}
33
34/// Implements comparison of slices [lexicographically](Ord#lexicographical-comparison).
35#[stable(feature = "rust1", since = "1.0.0")]
36impl<T: Ord> Ord for [T] {
37    fn cmp(&self, other: &[T]) -> Ordering {
38        SliceOrd::compare(self, other)
39    }
40}
41
42#[inline]
43const fn as_underlying(x: ControlFlow<bool>) -> u8 {
44    // SAFETY: This will only compile if `bool` and `ControlFlow<bool>` have the same
45    // size (which isn't guaranteed but this is libcore). Because they have the same
46    // size, it's a niched implementation, which in one byte means there can't be
47    // any uninitialized memory. The callers then only check for `0` or `1` from this,
48    // which must necessarily match the `Break` variant, and we're fine no matter
49    // what ends up getting picked as the value representing `Continue(())`.
50    unsafe { crate::mem::transmute(x) }
51}
52
53/// Implements comparison of slices [lexicographically](Ord#lexicographical-comparison).
54#[stable(feature = "rust1", since = "1.0.0")]
55impl<T: PartialOrd> PartialOrd for [T] {
56    #[inline]
57    fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
58        SlicePartialOrd::partial_compare(self, other)
59    }
60    #[inline]
61    fn lt(&self, other: &Self) -> bool {
62        // This is certainly not the obvious way to implement these methods.
63        // Unfortunately, using anything that looks at the discriminant means that
64        // LLVM sees a check for `2` (aka `ControlFlow<bool>::Continue(())`) and
65        // gets very distracted by that, ending up generating extraneous code.
66        // This should be changed to something simpler once either LLVM is smarter,
67        // see <https://github.com/llvm/llvm-project/issues/132678>, or we generate
68        // niche discriminant checks in a way that doesn't trigger it.
69
70        as_underlying(self.__chaining_lt(other)) == 1
71    }
72    #[inline]
73    fn le(&self, other: &Self) -> bool {
74        as_underlying(self.__chaining_le(other)) != 0
75    }
76    #[inline]
77    fn gt(&self, other: &Self) -> bool {
78        as_underlying(self.__chaining_gt(other)) == 1
79    }
80    #[inline]
81    fn ge(&self, other: &Self) -> bool {
82        as_underlying(self.__chaining_ge(other)) != 0
83    }
84    #[inline]
85    fn __chaining_lt(&self, other: &Self) -> ControlFlow<bool> {
86        SliceChain::chaining_lt(self, other)
87    }
88    #[inline]
89    fn __chaining_le(&self, other: &Self) -> ControlFlow<bool> {
90        SliceChain::chaining_le(self, other)
91    }
92    #[inline]
93    fn __chaining_gt(&self, other: &Self) -> ControlFlow<bool> {
94        SliceChain::chaining_gt(self, other)
95    }
96    #[inline]
97    fn __chaining_ge(&self, other: &Self) -> ControlFlow<bool> {
98        SliceChain::chaining_ge(self, other)
99    }
100}
101
102#[doc(hidden)]
103// intermediate trait for specialization of slice's PartialEq
104#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
105const trait SlicePartialEq<B> {
106    /// # Safety
107    /// `lhs` and `rhs` are both readable for `len` elements
108    unsafe fn equal_same_length(lhs: *const Self, rhs: *const B, len: usize) -> bool;
109}
110
111// Generic slice equality
112#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
113impl<A, B> const SlicePartialEq<B> for A
114where
115    A: [const] PartialEq<B>,
116{
117    // It's not worth trying to inline the loops underneath here *in MIR*,
118    // and preventing it encourages more useful inlining upstream,
119    // such as in `<str as PartialEq>::eq`.
120    // The codegen backend can still inline it later if needed.
121    #[rustc_no_mir_inline]
122    default unsafe fn equal_same_length(lhs: *const Self, rhs: *const B, len: usize) -> bool {
123        // Implemented as explicit indexing rather
124        // than zipped iterators for performance reasons.
125        // See PR https://github.com/rust-lang/rust/pull/116846
126        // FIXME(const_hack): make this a `for idx in 0..len` loop.
127        let mut idx = 0;
128        while idx < len {
129            // SAFETY: idx < len, so both are in-bounds and readable
130            if unsafe { *lhs.add(idx) != *rhs.add(idx) } {
131                return false;
132            }
133            idx += 1;
134        }
135
136        true
137    }
138}
139
140// When each element can be compared byte-wise, we can compare all the bytes
141// from the whole size in one call to the intrinsics.
142#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
143impl<A, B> const SlicePartialEq<B> for A
144where
145    A: [const] BytewiseEq<B>,
146{
147    #[inline]
148    unsafe fn equal_same_length(lhs: *const Self, rhs: *const B, len: usize) -> bool {
149        // SAFETY: by our precondition, `lhs` and `rhs` are guaranteed to be valid
150        // for reading `len` values, which also means the size is guaranteed
151        // not to overflow because it exists in memory;
152        unsafe {
153            let size = crate::intrinsics::unchecked_mul(len, Self::SIZE);
154            compare_bytes(lhs as _, rhs as _, size) == 0
155        }
156    }
157}
158
159#[doc(hidden)]
160#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
161// intermediate trait for specialization of slice's PartialOrd
162const trait SlicePartialOrd: Sized {
163    fn partial_compare(left: &[Self], right: &[Self]) -> Option<Ordering>;
164}
165
166#[doc(hidden)]
167#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
168// intermediate trait for specialization of slice's PartialOrd chaining methods
169const trait SliceChain: Sized {
170    fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
171    fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
172    fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
173    fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
174}
175
176type AlwaysBreak<B> = ControlFlow<B, crate::convert::Infallible>;
177
178impl<A: PartialOrd> SlicePartialOrd for A {
179    default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
180        let elem_chain = |a, b| match PartialOrd::partial_cmp(a, b) {
181            Some(Ordering::Equal) => ControlFlow::Continue(()),
182            non_eq => ControlFlow::Break(non_eq),
183        };
184        let len_chain = |a: &_, b: &_| ControlFlow::Break(usize::partial_cmp(a, b));
185        let AlwaysBreak::Break(b) = chaining_impl(left, right, elem_chain, len_chain);
186        b
187    }
188}
189
190impl<A: PartialOrd> SliceChain for A {
191    default fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
192        chaining_impl(left, right, PartialOrd::__chaining_lt, usize::__chaining_lt)
193    }
194    default fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
195        chaining_impl(left, right, PartialOrd::__chaining_le, usize::__chaining_le)
196    }
197    default fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
198        chaining_impl(left, right, PartialOrd::__chaining_gt, usize::__chaining_gt)
199    }
200    default fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
201        chaining_impl(left, right, PartialOrd::__chaining_ge, usize::__chaining_ge)
202    }
203}
204
205#[inline]
206fn chaining_impl<'l, 'r, A: PartialOrd, B, C>(
207    left: &'l [A],
208    right: &'r [A],
209    elem_chain: impl Fn(&'l A, &'r A) -> ControlFlow<B>,
210    len_chain: impl for<'a> FnOnce(&'a usize, &'a usize) -> ControlFlow<B, C>,
211) -> ControlFlow<B, C> {
212    let l = cmp::min(left.len(), right.len());
213
214    // Slice to the loop iteration range to enable bound check
215    // elimination in the compiler
216    let lhs = &left[..l];
217    let rhs = &right[..l];
218
219    for i in 0..l {
220        elem_chain(&lhs[i], &rhs[i])?;
221    }
222
223    len_chain(&left.len(), &right.len())
224}
225
226// This is the impl that we would like to have. Unfortunately it's not sound.
227// See `partial_ord_slice.rs`.
228/*
229impl<A> SlicePartialOrd for A
230where
231    A: Ord,
232{
233    default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
234        Some(SliceOrd::compare(left, right))
235    }
236}
237*/
238
239#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
240impl<A: [const] AlwaysApplicableOrd> const SlicePartialOrd for A {
241    fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
242        Some(SliceOrd::compare(left, right))
243    }
244}
245
246#[rustc_specialization_trait]
247#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
248const trait AlwaysApplicableOrd: [const] SliceOrd + [const] Ord {}
249
250macro_rules! always_applicable_ord {
251    ($([$($p:tt)*] $t:ty,)*) => {
252        $(impl<$($p)*> AlwaysApplicableOrd for $t {})*
253    }
254}
255
256always_applicable_ord! {
257    [] u8, [] u16, [] u32, [] u64, [] u128, [] usize,
258    [] i8, [] i16, [] i32, [] i64, [] i128, [] isize,
259    [] bool, [] char,
260    [T: ?Sized] *const T, [T: ?Sized] *mut T,
261    [T: AlwaysApplicableOrd] &T,
262    [T: AlwaysApplicableOrd] &mut T,
263    [T: AlwaysApplicableOrd] Option<T>,
264}
265
266#[doc(hidden)]
267#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
268// intermediate trait for specialization of slice's Ord
269const trait SliceOrd: Sized {
270    fn compare(left: &[Self], right: &[Self]) -> Ordering;
271}
272
273impl<A: Ord> SliceOrd for A {
274    default fn compare(left: &[Self], right: &[Self]) -> Ordering {
275        let elem_chain = |a, b| match Ord::cmp(a, b) {
276            Ordering::Equal => ControlFlow::Continue(()),
277            non_eq => ControlFlow::Break(non_eq),
278        };
279        let len_chain = |a: &_, b: &_| ControlFlow::Break(usize::cmp(a, b));
280        let AlwaysBreak::Break(b) = chaining_impl(left, right, elem_chain, len_chain);
281        b
282    }
283}
284
285/// Marks that a type should be treated as an unsigned byte for comparisons.
286///
287/// # Safety
288/// * The type must be readable as an `u8`, meaning it has to have the same
289///   layout as `u8` and always be initialized.
290/// * For every `x` and `y` of this type, `Ord(x, y)` must return the same
291///   value as `Ord::cmp(transmute::<_, u8>(x), transmute::<_, u8>(y))`.
292#[rustc_specialization_trait]
293const unsafe trait UnsignedBytewiseOrd: [const] Ord {}
294
295#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
296unsafe impl const UnsignedBytewiseOrd for bool {}
297#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
298unsafe impl const UnsignedBytewiseOrd for u8 {}
299#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
300unsafe impl const UnsignedBytewiseOrd for NonZero<u8> {}
301#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
302unsafe impl const UnsignedBytewiseOrd for Option<NonZero<u8>> {}
303#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
304unsafe impl const UnsignedBytewiseOrd for ascii::Char {}
305
306// `compare_bytes` compares a sequence of unsigned bytes lexicographically, so
307// use it if the requirements for `UnsignedBytewiseOrd` are fulfilled.
308#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
309impl<A: [const] Ord + [const] UnsignedBytewiseOrd> const SliceOrd for A {
310    #[inline]
311    fn compare(left: &[Self], right: &[Self]) -> Ordering {
312        // Since the length of a slice is always less than or equal to
313        // isize::MAX, this never underflows.
314        let diff = left.len() as isize - right.len() as isize;
315        // This comparison gets optimized away (on x86_64 and ARM) because the
316        // subtraction updates flags.
317        let len = if left.len() < right.len() { left.len() } else { right.len() };
318        let left = left.as_ptr().cast();
319        let right = right.as_ptr().cast();
320        // SAFETY: `left` and `right` are references and are thus guaranteed to
321        // be valid. `UnsignedBytewiseOrd` is only implemented for types that
322        // are valid u8s and can be compared the same way. We use the minimum
323        // of both lengths which guarantees that both regions are valid for
324        // reads in that interval.
325        let mut order = unsafe { compare_bytes(left, right, len) as isize };
326        if order == 0 {
327            order = diff;
328        }
329        order.cmp(&0)
330    }
331}
332
333// Don't generate our own chaining loops for `memcmp`-able things either.
334
335#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
336impl<A: [const] PartialOrd + [const] UnsignedBytewiseOrd> const SliceChain for A {
337    #[inline]
338    fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
339        match SliceOrd::compare(left, right) {
340            Ordering::Equal => ControlFlow::Continue(()),
341            ne => ControlFlow::Break(ne.is_lt()),
342        }
343    }
344    #[inline]
345    fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
346        match SliceOrd::compare(left, right) {
347            Ordering::Equal => ControlFlow::Continue(()),
348            ne => ControlFlow::Break(ne.is_le()),
349        }
350    }
351    #[inline]
352    fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
353        match SliceOrd::compare(left, right) {
354            Ordering::Equal => ControlFlow::Continue(()),
355            ne => ControlFlow::Break(ne.is_gt()),
356        }
357    }
358    #[inline]
359    fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
360        match SliceOrd::compare(left, right) {
361            Ordering::Equal => ControlFlow::Continue(()),
362            ne => ControlFlow::Break(ne.is_ge()),
363        }
364    }
365}
366
367pub(super) trait SliceContains: Sized {
368    fn slice_contains(&self, x: &[Self]) -> bool;
369}
370
371impl<T> SliceContains for T
372where
373    T: PartialEq,
374{
375    default fn slice_contains(&self, x: &[Self]) -> bool {
376        x.iter().any(|y| *y == *self)
377    }
378}
379
380impl SliceContains for u8 {
381    #[inline]
382    fn slice_contains(&self, x: &[Self]) -> bool {
383        memchr::memchr(*self, x).is_some()
384    }
385}
386
387impl SliceContains for i8 {
388    #[inline]
389    fn slice_contains(&self, x: &[Self]) -> bool {
390        let byte = *self as u8;
391        // SAFETY: `i8` and `u8` have the same memory layout, thus casting `x.as_ptr()`
392        // as `*const u8` is safe. The `x.as_ptr()` comes from a reference and is thus guaranteed
393        // to be valid for reads for the length of the slice `x.len()`, which cannot be larger
394        // than `isize::MAX`. The returned slice is never mutated.
395        let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
396        memchr::memchr(byte, bytes).is_some()
397    }
398}
399
400macro_rules! impl_slice_contains {
401    ($($t:ty),*) => {
402        $(
403            impl SliceContains for $t {
404                #[inline]
405                fn slice_contains(&self, arr: &[$t]) -> bool {
406                    // Make our LANE_COUNT 4x the normal lane count (aiming for 128 bit vectors).
407                    // The compiler will nicely unroll it.
408                    const LANE_COUNT: usize = 4 * (128 / (size_of::<$t>() * 8));
409                    // SIMD
410                    let mut chunks = arr.chunks_exact(LANE_COUNT);
411                    for chunk in &mut chunks {
412                        if chunk.iter().fold(false, |acc, x| acc | (*x == *self)) {
413                            return true;
414                        }
415                    }
416                    // Scalar remainder
417                    return chunks.remainder().iter().any(|x| *x == *self);
418                }
419            }
420        )*
421    };
422}
423
424impl_slice_contains!(u16, u32, u64, i16, i32, i64, f32, f64, usize, isize, char);