Skip to main content

core/slice/
cmp.rs

1//! Comparison traits for `[T]`.
2
3use super::{from_raw_parts, memchr};
4use crate::ascii;
5use crate::cmp::{self, BytewiseEq, Ordering};
6use crate::intrinsics::compare_bytes;
7use crate::marker::Destruct;
8use crate::mem::SizedTypeProperties;
9use crate::num::NonZero;
10use crate::ops::ControlFlow;
11
12#[stable(feature = "rust1", since = "1.0.0")]
13#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
14impl<T, U> const PartialEq<[U]> for [T]
15where
16    T: [const] PartialEq<U>,
17{
18    #[inline]
19    fn eq(&self, other: &[U]) -> bool {
20        let len = self.len();
21        if len == other.len() {
22            // SAFETY: Just checked that they're the same length, and the pointers
23            // come from references-to-slices so they're guaranteed readable.
24            unsafe { SlicePartialEq::equal_same_length(self.as_ptr(), other.as_ptr(), len) }
25        } else {
26            false
27        }
28    }
29}
30
31#[stable(feature = "rust1", since = "1.0.0")]
32#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
33impl<T: [const] Eq> const Eq for [T] {}
34
35/// Implements comparison of slices [lexicographically](Ord#lexicographical-comparison).
36#[stable(feature = "rust1", since = "1.0.0")]
37#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
38impl<T: [const] Ord> const Ord for [T] {
39    fn cmp(&self, other: &[T]) -> Ordering {
40        SliceOrd::compare(self, other)
41    }
42}
43
44#[inline]
45const fn as_underlying(x: ControlFlow<bool>) -> u8 {
46    // SAFETY: This will only compile if `bool` and `ControlFlow<bool>` have the same
47    // size (which isn't guaranteed but this is libcore). Because they have the same
48    // size, it's a niched implementation, which in one byte means there can't be
49    // any uninitialized memory. The callers then only check for `0` or `1` from this,
50    // which must necessarily match the `Break` variant, and we're fine no matter
51    // what ends up getting picked as the value representing `Continue(())`.
52    unsafe { crate::mem::transmute(x) }
53}
54
55/// Implements comparison of slices [lexicographically](Ord#lexicographical-comparison).
56#[stable(feature = "rust1", since = "1.0.0")]
57#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
58impl<T: [const] PartialOrd> const PartialOrd for [T] {
59    #[inline]
60    fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
61        SlicePartialOrd::partial_compare(self, other)
62    }
63    #[inline]
64    fn lt(&self, other: &Self) -> bool {
65        // This is certainly not the obvious way to implement these methods.
66        // Unfortunately, using anything that looks at the discriminant means that
67        // LLVM sees a check for `2` (aka `ControlFlow<bool>::Continue(())`) and
68        // gets very distracted by that, ending up generating extraneous code.
69        // This should be changed to something simpler once either LLVM is smarter,
70        // see <https://github.com/llvm/llvm-project/issues/132678>, or we generate
71        // niche discriminant checks in a way that doesn't trigger it.
72
73        as_underlying(self.__chaining_lt(other)) == 1
74    }
75    #[inline]
76    fn le(&self, other: &Self) -> bool {
77        as_underlying(self.__chaining_le(other)) != 0
78    }
79    #[inline]
80    fn gt(&self, other: &Self) -> bool {
81        as_underlying(self.__chaining_gt(other)) == 1
82    }
83    #[inline]
84    fn ge(&self, other: &Self) -> bool {
85        as_underlying(self.__chaining_ge(other)) != 0
86    }
87    #[inline]
88    fn __chaining_lt(&self, other: &Self) -> ControlFlow<bool> {
89        SliceChain::chaining_lt(self, other)
90    }
91    #[inline]
92    fn __chaining_le(&self, other: &Self) -> ControlFlow<bool> {
93        SliceChain::chaining_le(self, other)
94    }
95    #[inline]
96    fn __chaining_gt(&self, other: &Self) -> ControlFlow<bool> {
97        SliceChain::chaining_gt(self, other)
98    }
99    #[inline]
100    fn __chaining_ge(&self, other: &Self) -> ControlFlow<bool> {
101        SliceChain::chaining_ge(self, other)
102    }
103}
104
105#[doc(hidden)]
106// intermediate trait for specialization of slice's PartialEq
107#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
108const trait SlicePartialEq<B> {
109    /// # Safety
110    /// `lhs` and `rhs` are both readable for `len` elements
111    unsafe fn equal_same_length(lhs: *const Self, rhs: *const B, len: usize) -> bool;
112}
113
114// Generic slice equality
115#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
116impl<A, B> const SlicePartialEq<B> for A
117where
118    A: [const] PartialEq<B>,
119{
120    // It's not worth trying to inline the loops underneath here *in MIR*,
121    // and preventing it encourages more useful inlining upstream,
122    // such as in `<str as PartialEq>::eq`.
123    // The codegen backend can still inline it later if needed.
124    #[rustc_no_mir_inline]
125    default unsafe fn equal_same_length(lhs: *const Self, rhs: *const B, len: usize) -> bool {
126        // Implemented as explicit indexing rather
127        // than zipped iterators for performance reasons.
128        // See PR https://github.com/rust-lang/rust/pull/116846
129        // FIXME(const_hack): make this a `for idx in 0..len` loop.
130        let mut idx = 0;
131        while idx < len {
132            // SAFETY: idx < len, so both are in-bounds and readable
133            if unsafe { *lhs.add(idx) != *rhs.add(idx) } {
134                return false;
135            }
136            idx += 1;
137        }
138
139        true
140    }
141}
142
143// When each element can be compared byte-wise, we can compare all the bytes
144// from the whole size in one call to the intrinsics.
145#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
146impl<A, B> const SlicePartialEq<B> for A
147where
148    A: [const] BytewiseEq<B>,
149{
150    #[inline]
151    unsafe fn equal_same_length(lhs: *const Self, rhs: *const B, len: usize) -> bool {
152        // SAFETY: by our precondition, `lhs` and `rhs` are guaranteed to be valid
153        // for reading `len` values, which also means the size is guaranteed
154        // not to overflow because it exists in memory;
155        unsafe {
156            let size = crate::intrinsics::unchecked_mul(len, Self::SIZE);
157            compare_bytes(lhs as _, rhs as _, size) == 0
158        }
159    }
160}
161
162#[doc(hidden)]
163#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
164// intermediate trait for specialization of slice's PartialOrd
165const trait SlicePartialOrd: Sized {
166    fn partial_compare(left: &[Self], right: &[Self]) -> Option<Ordering>;
167}
168
169#[doc(hidden)]
170#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
171// intermediate trait for specialization of slice's PartialOrd chaining methods
172const trait SliceChain: Sized {
173    fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
174    fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
175    fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
176    fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
177}
178
179type AlwaysBreak<B> = ControlFlow<B, crate::convert::Infallible>;
180
181#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
182impl<A: [const] PartialOrd> const SlicePartialOrd for A {
183    default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
184        let elem_chain = const |a, b| match PartialOrd::partial_cmp(a, b) {
185            Some(Ordering::Equal) => ControlFlow::Continue(()),
186            non_eq => ControlFlow::Break(non_eq),
187        };
188
189        let len_chain = const |a: &_, b: &_| ControlFlow::Break(usize::partial_cmp(a, b));
190
191        let AlwaysBreak::Break(b) = chaining_impl(left, right, elem_chain, len_chain);
192        b
193    }
194}
195
196#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
197impl<A: [const] PartialOrd> const SliceChain for A {
198    default fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
199        chaining_impl(left, right, PartialOrd::__chaining_lt, usize::__chaining_lt)
200    }
201    default fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
202        chaining_impl(left, right, PartialOrd::__chaining_le, usize::__chaining_le)
203    }
204    default fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
205        chaining_impl(left, right, PartialOrd::__chaining_gt, usize::__chaining_gt)
206    }
207    default fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
208        chaining_impl(left, right, PartialOrd::__chaining_ge, usize::__chaining_ge)
209    }
210}
211
212#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
213#[inline]
214const fn chaining_impl<'l, 'r, A: PartialOrd, B, C>(
215    left: &'l [A],
216    right: &'r [A],
217    elem_chain: impl [const] Fn(&'l A, &'r A) -> ControlFlow<B> + [const] Destruct,
218    len_chain: impl for<'a> [const] FnOnce(&'a usize, &'a usize) -> ControlFlow<B, C> + [const] Destruct,
219) -> ControlFlow<B, C> {
220    let l = cmp::min(left.len(), right.len());
221
222    // Slice to the loop iteration range to enable bound check
223    // elimination in the compiler
224    let lhs = &left[..l];
225    let rhs = &right[..l];
226
227    // FIXME(const-hack): revert this to `for i in 0..l` once `impl const Iterator for Range<T>`
228    let mut i: usize = 0;
229    while i < l {
230        elem_chain(&lhs[i], &rhs[i])?;
231        i += 1;
232    }
233
234    len_chain(&left.len(), &right.len())
235}
236
237// This is the impl that we would like to have. Unfortunately it's not sound.
238// See `partial_ord_slice.rs`.
239/*
240impl<A> SlicePartialOrd for A
241where
242    A: Ord,
243{
244    default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
245        Some(SliceOrd::compare(left, right))
246    }
247}
248*/
249
250#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
251impl<A: [const] AlwaysApplicableOrd> const SlicePartialOrd for A {
252    fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
253        Some(SliceOrd::compare(left, right))
254    }
255}
256
257#[rustc_specialization_trait]
258#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
259const trait AlwaysApplicableOrd: [const] SliceOrd + [const] Ord {}
260
261macro_rules! always_applicable_ord {
262    ($([$($p:tt)*] $t:ty,)*) => {
263        $(impl<$($p)*> AlwaysApplicableOrd for $t {})*
264    }
265}
266
267always_applicable_ord! {
268    [] u8, [] u16, [] u32, [] u64, [] u128, [] usize,
269    [] i8, [] i16, [] i32, [] i64, [] i128, [] isize,
270    [] bool, [] char,
271    [T: ?Sized] *const T, [T: ?Sized] *mut T,
272    [T: AlwaysApplicableOrd] &T,
273    [T: AlwaysApplicableOrd] &mut T,
274    [T: AlwaysApplicableOrd] Option<T>,
275}
276
277#[doc(hidden)]
278#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
279// intermediate trait for specialization of slice's Ord
280const trait SliceOrd: Sized {
281    fn compare(left: &[Self], right: &[Self]) -> Ordering;
282}
283
284#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
285impl<A: [const] Ord> const SliceOrd for A {
286    default fn compare(left: &[Self], right: &[Self]) -> Ordering {
287        let elem_chain = const |a, b| match Ord::cmp(a, b) {
288            Ordering::Equal => ControlFlow::Continue(()),
289            non_eq => ControlFlow::Break(non_eq),
290        };
291
292        let len_chain = const |a: &_, b: &_| ControlFlow::Break(usize::cmp(a, b));
293
294        let AlwaysBreak::Break(b) = chaining_impl(left, right, elem_chain, len_chain);
295        b
296    }
297}
298
299/// Marks that a type should be treated as an unsigned byte for comparisons.
300///
301/// # Safety
302/// * The type must be readable as an `u8`, meaning it has to have the same
303///   layout as `u8` and always be initialized.
304/// * For every `x` and `y` of this type, `Ord(x, y)` must return the same
305///   value as `Ord::cmp(transmute::<_, u8>(x), transmute::<_, u8>(y))`.
306#[rustc_specialization_trait]
307const unsafe trait UnsignedBytewiseOrd: [const] Ord {}
308
309#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
310unsafe impl const UnsignedBytewiseOrd for bool {}
311#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
312unsafe impl const UnsignedBytewiseOrd for u8 {}
313#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
314unsafe impl const UnsignedBytewiseOrd for NonZero<u8> {}
315#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
316unsafe impl const UnsignedBytewiseOrd for Option<NonZero<u8>> {}
317#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
318unsafe impl const UnsignedBytewiseOrd for ascii::Char {}
319
320// `compare_bytes` compares a sequence of unsigned bytes lexicographically, so
321// use it if the requirements for `UnsignedBytewiseOrd` are fulfilled.
322#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
323impl<A: [const] Ord + [const] UnsignedBytewiseOrd> const SliceOrd for A {
324    #[inline]
325    fn compare(left: &[Self], right: &[Self]) -> Ordering {
326        // Since the length of a slice is always less than or equal to
327        // isize::MAX, this never underflows.
328        let diff = left.len() as isize - right.len() as isize;
329        // This comparison gets optimized away (on x86_64 and ARM) because the
330        // subtraction updates flags.
331        let len = if left.len() < right.len() { left.len() } else { right.len() };
332        let left = left.as_ptr().cast();
333        let right = right.as_ptr().cast();
334        // SAFETY: `left` and `right` are references and are thus guaranteed to
335        // be valid. `UnsignedBytewiseOrd` is only implemented for types that
336        // are valid u8s and can be compared the same way. We use the minimum
337        // of both lengths which guarantees that both regions are valid for
338        // reads in that interval.
339        let mut order = unsafe { compare_bytes(left, right, len) as isize };
340        if order == 0 {
341            order = diff;
342        }
343        order.cmp(&0)
344    }
345}
346
347// Don't generate our own chaining loops for `memcmp`-able things either.
348
349#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
350impl<A: [const] PartialOrd + [const] UnsignedBytewiseOrd> const SliceChain for A {
351    #[inline]
352    fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
353        match SliceOrd::compare(left, right) {
354            Ordering::Equal => ControlFlow::Continue(()),
355            ne => ControlFlow::Break(ne.is_lt()),
356        }
357    }
358    #[inline]
359    fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
360        match SliceOrd::compare(left, right) {
361            Ordering::Equal => ControlFlow::Continue(()),
362            ne => ControlFlow::Break(ne.is_le()),
363        }
364    }
365    #[inline]
366    fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
367        match SliceOrd::compare(left, right) {
368            Ordering::Equal => ControlFlow::Continue(()),
369            ne => ControlFlow::Break(ne.is_gt()),
370        }
371    }
372    #[inline]
373    fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
374        match SliceOrd::compare(left, right) {
375            Ordering::Equal => ControlFlow::Continue(()),
376            ne => ControlFlow::Break(ne.is_ge()),
377        }
378    }
379}
380
381pub(super) trait SliceContains: Sized {
382    fn slice_contains(&self, x: &[Self]) -> bool;
383}
384
385impl<T> SliceContains for T
386where
387    T: PartialEq,
388{
389    default fn slice_contains(&self, x: &[Self]) -> bool {
390        x.iter().any(|y| *y == *self)
391    }
392}
393
394impl SliceContains for u8 {
395    #[inline]
396    fn slice_contains(&self, x: &[Self]) -> bool {
397        memchr::memchr(*self, x).is_some()
398    }
399}
400
401impl SliceContains for i8 {
402    #[inline]
403    fn slice_contains(&self, x: &[Self]) -> bool {
404        let byte = *self as u8;
405        // SAFETY: `i8` and `u8` have the same memory layout, thus casting `x.as_ptr()`
406        // as `*const u8` is safe. The `x.as_ptr()` comes from a reference and is thus guaranteed
407        // to be valid for reads for the length of the slice `x.len()`, which cannot be larger
408        // than `isize::MAX`. The returned slice is never mutated.
409        let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
410        memchr::memchr(byte, bytes).is_some()
411    }
412}
413
414macro_rules! impl_slice_contains {
415    ($($t:ty),*) => {
416        $(
417            impl SliceContains for $t {
418                #[inline]
419                fn slice_contains(&self, arr: &[$t]) -> bool {
420                    // Make our LANE_COUNT 4x the normal lane count (aiming for 128 bit vectors).
421                    // The compiler will nicely unroll it.
422                    const LANE_COUNT: usize = 4 * (128 / (size_of::<$t>() * 8));
423                    // SIMD
424                    let mut chunks = arr.chunks_exact(LANE_COUNT);
425                    for chunk in &mut chunks {
426                        if chunk.iter().fold(false, |acc, x| acc | (*x == *self)) {
427                            return true;
428                        }
429                    }
430                    // Scalar remainder
431                    return chunks.remainder().iter().any(|x| *x == *self);
432                }
433            }
434        )*
435    };
436}
437
438impl_slice_contains!(u16, u32, u64, i16, i32, i64, f32, f64, usize, isize, char);