diff --git a/Cargo.toml b/Cargo.toml index b773030b4cab4..776cafab70e59 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -93,3 +93,8 @@ codegen-units = 1 # FIXME: LTO cannot be enabled for binaries in a workspace # # lto = true + +[profile.release.package.rustc_mir_build] +opt-level = 1 +[profile.release.package.rustc_driver] +opt-level = 1 diff --git a/compiler/rustc_const_eval/src/check_consts/check.rs b/compiler/rustc_const_eval/src/check_consts/check.rs index 16ead1b978543..8eb0b9e619946 100644 --- a/compiler/rustc_const_eval/src/check_consts/check.rs +++ b/compiler/rustc_const_eval/src/check_consts/check.rs @@ -809,6 +809,10 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> { // Otherwise, it's really misleading to call something "conditionally" // const when it's very obviously not conditionally const. if trait_is_const && has_const_conditions == Some(ConstConditionsHold::Yes) { + if tcx.is_lang_item(trait_did, LangItem::PatternConstEq) { + return; + } + // Trait calls are always conditionally-const. self.check_op(ops::ConditionallyConstCall { callee, diff --git a/compiler/rustc_hir/src/lang_items.rs b/compiler/rustc_hir/src/lang_items.rs index 02bc069fc5f27..e4c32034edafb 100644 --- a/compiler/rustc_hir/src/lang_items.rs +++ b/compiler/rustc_hir/src/lang_items.rs @@ -417,6 +417,11 @@ language_item_table! { String, sym::String, string, Target::Struct, GenericRequirement::None; CStr, sym::CStr, c_str, Target::Struct, GenericRequirement::None; + + // FIXME(xacrimon): Used for lowering of match/if let statements. will made obsolete by const PartialEq. + PatternConstEq, sym::PatternConstEq, pattern_const_eq, Target::Trait, GenericRequirement::None; + AggregateRawPtr, sym::aggregate_raw_ptr, aggregate_raw_ptr, Target::Fn, GenericRequirement::None; + Offset, sym::offset, offset, Target::Fn, GenericRequirement::None; } pub enum GenericRequirement { diff --git a/compiler/rustc_middle/src/arena.rs b/compiler/rustc_middle/src/arena.rs index 750531b638e4d..871e9f3d38a71 100644 --- a/compiler/rustc_middle/src/arena.rs +++ b/compiler/rustc_middle/src/arena.rs @@ -114,6 +114,7 @@ macro_rules! arena_types { [decode] specialization_graph: rustc_middle::traits::specialization_graph::Graph, [] crate_inherent_impls: rustc_middle::ty::CrateInherentImpls, [] hir_owner_nodes: rustc_hir::OwnerNodes<'tcx>, + [] thir_pat: rustc_middle::thir::Pat<'tcx>, ]); ) } diff --git a/compiler/rustc_middle/src/mir/consts.rs b/compiler/rustc_middle/src/mir/consts.rs index 66d97fda43332..772086d8d559a 100644 --- a/compiler/rustc_middle/src/mir/consts.rs +++ b/compiler/rustc_middle/src/mir/consts.rs @@ -448,6 +448,7 @@ impl<'tcx> Const<'tcx> { Self::Val(cv, ty) } + #[inline] pub fn from_usize(tcx: TyCtxt<'tcx>, n: u64) -> Self { let ty = tcx.types.usize; let typing_env = ty::TypingEnv::fully_monomorphized(); diff --git a/compiler/rustc_middle/src/ty/util.rs b/compiler/rustc_middle/src/ty/util.rs index 7d5e5c2e82370..fe864d4bc06ac 100644 --- a/compiler/rustc_middle/src/ty/util.rs +++ b/compiler/rustc_middle/src/ty/util.rs @@ -1555,6 +1555,13 @@ impl<'tcx> Ty<'tcx> { ty } + pub fn pointee(self) -> Ty<'tcx> { + match *self.kind() { + ty::RawPtr(ty, _) => ty, + _ => bug!("pointee called on non-pointer type: {:?}", self), + } + } + // FIXME(compiler-errors): Think about removing this. #[inline] pub fn outer_exclusive_binder(self) -> ty::DebruijnIndex { diff --git a/compiler/rustc_mir_build/src/builder/matches/match_pair.rs b/compiler/rustc_mir_build/src/builder/matches/match_pair.rs index 9d59ffc88ba23..b40d2209a63b3 100644 --- a/compiler/rustc_mir_build/src/builder/matches/match_pair.rs +++ b/compiler/rustc_mir_build/src/builder/matches/match_pair.rs @@ -1,9 +1,13 @@ +use std::ops; + +use either::Either; use rustc_middle::mir::*; use rustc_middle::thir::{self, *}; use rustc_middle::ty::{self, Ty, TypeVisitableExt}; use crate::builder::Builder; use crate::builder::expr::as_place::{PlaceBase, PlaceBuilder}; +use crate::builder::matches::util::Range; use crate::builder::matches::{FlatPat, MatchPairTree, TestCase}; impl<'a, 'tcx> Builder<'a, 'tcx> { @@ -33,6 +37,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// Used internally by [`MatchPairTree::for_pattern`]. fn prefix_slice_suffix<'pat>( &mut self, + top_pattern: &'pat Pat<'tcx>, match_pairs: &mut Vec>, place: &PlaceBuilder<'tcx>, prefix: &'pat [Box>], @@ -54,11 +59,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { ((prefix.len() + suffix.len()).try_into().unwrap(), false) }; - match_pairs.extend(prefix.iter().enumerate().map(|(idx, subpattern)| { - let elem = - ProjectionElem::ConstantIndex { offset: idx as u64, min_length, from_end: false }; - MatchPairTree::for_pattern(place.clone_project(elem), subpattern, self) - })); + if !prefix.is_empty() { + let bounds = Range::from_start(0..prefix.len() as u64); + let subpattern = bounds.apply(prefix); + for pair in self.build_slice_branch(bounds, place, top_pattern, subpattern) { + match_pairs.push(pair); + } + } if let Some(subslice_pat) = opt_slice { let suffix_len = suffix.len() as u64; @@ -70,16 +77,155 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { match_pairs.push(MatchPairTree::for_pattern(subslice, subslice_pat, self)); } - match_pairs.extend(suffix.iter().rev().enumerate().map(|(idx, subpattern)| { - let end_offset = (idx + 1) as u64; - let elem = ProjectionElem::ConstantIndex { - offset: if exact_size { min_length - end_offset } else { end_offset }, - min_length, - from_end: !exact_size, + if !suffix.is_empty() { + let bounds = Range::from_end(0..suffix.len() as u64); + let subpattern = bounds.apply(suffix); + for pair in self.build_slice_branch(bounds, place, top_pattern, subpattern) { + match_pairs.push(pair); + } + } + } + + fn build_slice_branch<'pat, 'b>( + &'b mut self, + bounds: Range, + place: &'b PlaceBuilder<'tcx>, + top_pattern: &'pat Pat<'tcx>, + pattern: &'pat [Box>], + ) -> impl Iterator> + use<'a, 'tcx, 'pat, 'b> { + let entries = self.find_const_groups(pattern); + + entries.into_iter().map(move |entry| { + let mut build_single = |idx| { + let subpattern = &pattern[idx as usize]; + let place = place.clone_project(ProjectionElem::ConstantIndex { + offset: bounds.shift_idx(idx), + min_length: pattern.len() as u64, + from_end: bounds.from_end, + }); + + MatchPairTree::for_pattern(place, subpattern, self) }; - let place = place.clone_project(elem); - MatchPairTree::for_pattern(place, subpattern, self) - })); + + match entry { + Either::Right(range) if range.end - range.start > 1 => { + assert!( + (range.start..range.end) + .all(|idx| self.is_constant_pattern(&pattern[idx as usize])) + ); + + let subpattern = &pattern[range.start as usize..range.end as usize]; + let elem_ty = subpattern[0].ty; + + let valtree = self.simplify_const_pattern_slice_into_valtree(subpattern); + self.valtree_to_match_pair( + top_pattern, + valtree, + place.clone(), + elem_ty, + bounds.shift_range(range), + true, // TODO: set false if only branch and only entry + ) + } + Either::Right(range) => { + let tree = build_single(range.start); + assert!(self.is_constant_pattern(&pattern[range.start as usize])); + tree + } + Either::Left(idx) => build_single(idx), + } + }) + } + + fn find_const_groups(&self, pattern: &[Box>]) -> Vec>> { + let mut entries = Vec::new(); + let mut current_seq_start = None; + + for (idx, pat) in pattern.iter().enumerate() { + if self.is_constant_pattern(pat) { + if current_seq_start.is_none() { + current_seq_start = Some(idx as u64); + } else { + continue; + } + } else { + if let Some(start) = current_seq_start { + entries.push(Either::Right(start..idx as u64)); + current_seq_start = None; + } + entries.push(Either::Left(idx as u64)); + } + } + + if let Some(start) = current_seq_start { + entries.push(Either::Right(start..pattern.len() as u64)); + } + + entries + } + + fn is_constant_pattern(&self, pat: &Pat<'tcx>) -> bool { + if let PatKind::Constant { value } = pat.kind + && let Const::Ty(_, const_) = value + && let ty::ConstKind::Value(_, valtree) = const_.kind() + && let ty::ValTree::Leaf(_) = valtree + { + true + } else { + false + } + } + + fn extract_leaf(&self, pat: &Pat<'tcx>) -> ty::ValTree<'tcx> { + if let PatKind::Constant { value } = pat.kind + && let Const::Ty(_, const_) = value + && let ty::ConstKind::Value(_, valtree) = const_.kind() + && matches!(valtree, ty::ValTree::Leaf(_)) + { + valtree + } else { + unreachable!() + } + } + + fn simplify_const_pattern_slice_into_valtree( + &self, + subslice: &[Box>], + ) -> ty::ValTree<'tcx> { + let leaves = subslice.iter().map(|p| self.extract_leaf(p)); + let interned = self.tcx.arena.alloc_from_iter(leaves); + ty::ValTree::Branch(interned) + } + + fn valtree_to_match_pair<'pat>( + &mut self, + source_pattern: &'pat Pat<'tcx>, + valtree: ty::ValTree<'tcx>, + place: PlaceBuilder<'tcx>, + elem_ty: Ty<'tcx>, + range: Range, + do_slice: bool, + ) -> MatchPairTree<'pat, 'tcx> { + let tcx = self.tcx; + let const_ty = + Ty::new_imm_ref(tcx, tcx.lifetimes.re_erased, Ty::new_array(tcx, elem_ty, range.len())); + + let pat_ty = if do_slice { Ty::new_slice(tcx, elem_ty) } else { source_pattern.ty }; + let ty_const = ty::Const::new(tcx, ty::ConstKind::Value(const_ty, valtree)); + let value = Const::Ty(const_ty, ty_const); + let test_case = TestCase::Constant { value, range: do_slice.then_some(range) }; + let pattern = tcx.arena.alloc(Pat { + ty: pat_ty, + span: source_pattern.span, + kind: PatKind::Constant { value }, + }); + + MatchPairTree { + place: Some(place.to_place(self)), + test_case, + subpairs: Vec::new(), + pattern, + } } } @@ -129,7 +275,7 @@ impl<'pat, 'tcx> MatchPairTree<'pat, 'tcx> { } } - PatKind::Constant { value } => TestCase::Constant { value }, + PatKind::Constant { value } => TestCase::Constant { value, range: None }, PatKind::AscribeUserType { ascription: thir::Ascription { ref annotation, variance }, @@ -192,11 +338,25 @@ impl<'pat, 'tcx> MatchPairTree<'pat, 'tcx> { } PatKind::Array { ref prefix, ref slice, ref suffix } => { - cx.prefix_slice_suffix(&mut subpairs, &place_builder, prefix, slice, suffix); + cx.prefix_slice_suffix( + pattern, + &mut subpairs, + &place_builder, + prefix, + slice, + suffix, + ); default_irrefutable() } PatKind::Slice { ref prefix, ref slice, ref suffix } => { - cx.prefix_slice_suffix(&mut subpairs, &place_builder, prefix, slice, suffix); + cx.prefix_slice_suffix( + pattern, + &mut subpairs, + &place_builder, + prefix, + slice, + suffix, + ); if prefix.is_empty() && slice.is_some() && suffix.is_empty() { default_irrefutable() diff --git a/compiler/rustc_mir_build/src/builder/matches/mod.rs b/compiler/rustc_mir_build/src/builder/matches/mod.rs index b21ec8f3083b3..ef2b07d231140 100644 --- a/compiler/rustc_mir_build/src/builder/matches/mod.rs +++ b/compiler/rustc_mir_build/src/builder/matches/mod.rs @@ -19,6 +19,7 @@ use tracing::{debug, instrument}; use crate::builder::ForGuard::{self, OutsideGuard, RefWithinGuard}; use crate::builder::expr::as_place::PlaceBuilder; +use crate::builder::matches::util::Range; use crate::builder::scope::DropKind; use crate::builder::{ BlockAnd, BlockAndExtension, Builder, GuardFrame, GuardFrameLocal, LocalsForNode, @@ -1237,7 +1238,7 @@ struct Ascription<'tcx> { enum TestCase<'pat, 'tcx> { Irrefutable { binding: Option>, ascription: Option> }, Variant { adt_def: ty::AdtDef<'tcx>, variant_index: VariantIdx }, - Constant { value: mir::Const<'tcx> }, + Constant { value: mir::Const<'tcx>, range: Option }, Range(&'pat PatRange<'tcx>), Slice { len: usize, variable_length: bool }, Deref { temp: Place<'tcx>, mutability: Mutability }, @@ -1313,6 +1314,7 @@ enum TestKind<'tcx> { /// `ty`, Eq { value: Const<'tcx>, + range: Option, // Integer types are handled by `SwitchInt`, and constants with ADT // types are converted back into patterns, so this can only be `&str`, // `&[T]`, `f32` or `f64`. diff --git a/compiler/rustc_mir_build/src/builder/matches/test.rs b/compiler/rustc_mir_build/src/builder/matches/test.rs index afe6b4475be3c..d985c27fa4eb5 100644 --- a/compiler/rustc_mir_build/src/builder/matches/test.rs +++ b/compiler/rustc_mir_build/src/builder/matches/test.rs @@ -17,10 +17,13 @@ use rustc_middle::{bug, span_bug}; use rustc_span::def_id::DefId; use rustc_span::source_map::Spanned; use rustc_span::{DUMMY_SP, Span, Symbol, sym}; +use rustc_trait_selection::infer::InferCtxtExt; use tracing::{debug, instrument}; -use crate::builder::Builder; +use crate::builder::matches::util::Range; use crate::builder::matches::{Candidate, MatchPairTree, Test, TestBranch, TestCase, TestKind}; +use crate::builder::misc::SpannedCallOperandsExt; +use crate::builder::{BlockAnd, BlockAndExtension, Builder, PlaceBuilder}; impl<'a, 'tcx> Builder<'a, 'tcx> { /// Identifies what test is needed to decide if `match_pair` is applicable. @@ -35,7 +38,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { TestCase::Constant { .. } if match_pair.pattern.ty.is_bool() => TestKind::If, TestCase::Constant { .. } if is_switch_ty(match_pair.pattern.ty) => TestKind::SwitchInt, - TestCase::Constant { value } => TestKind::Eq { value, ty: match_pair.pattern.ty }, + TestCase::Constant { value, range } => { + TestKind::Eq { value, ty: match_pair.pattern.ty, range } + } TestCase::Range(range) => { assert_eq!(range.ty, match_pair.pattern.ty); @@ -141,7 +146,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.cfg.terminate(block, self.source_info(match_start_span), terminator); } - TestKind::Eq { value, mut ty } => { + TestKind::Eq { value, range, mut ty } => { let tcx = self.tcx; let success_block = target_block(TestBranch::Success); let fail_block = target_block(TestBranch::Failure); @@ -184,6 +189,19 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } if !ty.is_scalar() { + if let Some(range) = range { + place = unpack!( + block = self.subslice_sized_range( + block, + place, + place_ty.ty, + ty.sequence_element_type(tcx), + range, + test.span, + ) + ); + } + // Use `PartialEq::eq` instead of `BinOp::Eq` // (the binop can only handle primitives) self.non_scalar_compare( @@ -193,7 +211,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { source_info, expect, expect_ty, - Operand::Copy(place), + place, ty, ); } else { @@ -294,6 +312,91 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } + fn subslice_sized_range( + &mut self, + mut block: BasicBlock, + input: Place<'tcx>, + input_ty: Ty<'tcx>, + elem_ty: Ty<'tcx>, + range: Range, + span: Span, + ) -> BlockAnd> { + let tcx = self.tcx; + let source_info = self.source_info(span); + + let (ptr_offset, slice_len) = { + if !range.from_end { + let start = self.literal_operand(span, Const::from_usize(tcx, range.start)); + let len = Const::from_usize(tcx, range.len()); + (start, len) + } else { + let source_len = self.temp(tcx.types.usize, span); + self.cfg.push_assign(block, source_info, source_len, Rvalue::Len(input)); + let start = self.temp(tcx.types.usize, span); + let neg_offset = self.literal_operand(span, Const::from_usize(tcx, range.start)); + + self.cfg.push_assign( + block, + source_info, + start, + Rvalue::BinaryOp(BinOp::Sub, Box::new((Operand::Copy(source_len), neg_offset))), + ); + + let len = Const::from_usize(tcx, range.len()); + (Operand::Copy(start), len) + } + }; + + let temp_source_ptr = self.temp(Ty::new_ptr(tcx, input_ty, Mutability::Not), span); + self.cfg.push_assign( + block, + source_info, + temp_source_ptr, + Rvalue::RawPtr(RawPtrKind::Const, input), + ); + + let elem_ptr_ty = Ty::new_ptr(tcx, elem_ty, Mutability::Not); + + let temp_elem_ptr = self.temp(elem_ptr_ty, span); + self.cfg.push_assign( + block, + source_info, + temp_elem_ptr, + Rvalue::Cast(CastKind::PtrToPtr, Operand::Copy(temp_source_ptr), elem_ptr_ty), + ); + + let updated_ptr = self.temp(elem_ptr_ty, span); + + unpack!( + block = self.call_intrinsic( + block, + span, + LangItem::Offset, + &[elem_ptr_ty, tcx.types.usize], + span.args([Operand::Copy(temp_elem_ptr), ptr_offset]), + updated_ptr + ) + ); + + let slice_len = self.literal_operand(span, slice_len); + let slice_ptr_ty = Ty::new_imm_ptr(tcx, Ty::new_slice(tcx, elem_ty)); + let subslice_ptr = self.temp(slice_ptr_ty, span); + + unpack!( + block = self.call_intrinsic( + block, + span, + LangItem::AggregateRawPtr, + &[slice_ptr_ty, elem_ptr_ty, tcx.types.usize], + span.args([Operand::Copy(updated_ptr), slice_len]), + subslice_ptr + ) + ); + + let out = PlaceBuilder::from(subslice_ptr).project(PlaceElem::Deref).to_place(self); + block.and(out) + } + /// Perform `let temp = ::deref(&place)`. /// or `let temp = ::deref_mut(&mut place)`. pub(super) fn call_deref( @@ -367,7 +470,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { ); } - /// Compare two values using `::eq`. + /// Compare two values using `::eq` + /// or `::eq` if they depending on exposed const-capability. /// If the values are already references, just call it directly, otherwise /// take a reference to the values first and then call it. fn non_scalar_compare( @@ -376,89 +480,114 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { success_block: BasicBlock, fail_block: BasicBlock, source_info: SourceInfo, - mut expect: Operand<'tcx>, - expect_ty: Ty<'tcx>, - mut val: Operand<'tcx>, + expect_op: Operand<'tcx>, + mut expect_ty: Ty<'tcx>, + mut val: Place<'tcx>, mut ty: Ty<'tcx>, ) { - // If we're using `b"..."` as a pattern, we need to insert an - // unsizing coercion, as the byte string has the type `&[u8; N]`. - // - // We want to do this even when the scrutinee is a reference to an - // array, so we can call `<[u8]>::eq` rather than having to find an - // `<[u8; N]>::eq`. - let unsize = |ty: Ty<'tcx>| match ty.kind() { - ty::Ref(region, rty, _) => match rty.kind() { - ty::Array(inner_ty, n) => Some((region, inner_ty, n)), - _ => None, - }, - _ => None, - }; - let opt_ref_ty = unsize(ty); - let opt_ref_test_ty = unsize(expect_ty); - match (opt_ref_ty, opt_ref_test_ty) { - // nothing to do, neither is an array - (None, None) => {} - (Some((region, elem_ty, _)), _) | (None, Some((region, elem_ty, _))) => { - let tcx = self.tcx; - // make both a slice - ty = Ty::new_imm_ref(tcx, *region, Ty::new_slice(tcx, *elem_ty)); - if opt_ref_ty.is_some() { - let temp = self.temp(ty, source_info.span); - self.cfg.push_assign( - block, - source_info, - temp, - Rvalue::Cast( - CastKind::PointerCoercion( - PointerCoercion::Unsize, - CoercionSource::Implicit, - ), - val, - ty, - ), - ); - val = Operand::Copy(temp); - } - if opt_ref_test_ty.is_some() { - let slice = self.temp(ty, source_info.span); - self.cfg.push_assign( - block, - source_info, - slice, - Rvalue::Cast( - CastKind::PointerCoercion( - PointerCoercion::Unsize, - CoercionSource::Implicit, - ), - expect, - ty, - ), - ); - expect = Operand::Move(slice); + let mut expect = self.temp(expect_ty, source_info.span); + self.cfg.push_assign(block, source_info, expect, Rvalue::Use(expect_op)); + + let mut normalize_depth = |mut ty: Ty<'tcx>, mut val: Place<'tcx>| { + if !ty.is_ref() { + ty = Ty::new_imm_ref(self.tcx, self.tcx.lifetimes.re_erased, ty); + let temp = self.temp(ty, source_info.span); + self.cfg.push_assign( + block, + source_info, + temp, + Rvalue::Ref(self.tcx.lifetimes.re_erased, BorrowKind::Shared, val), + ); + val = temp; + return (ty, val); + } + + loop { + match ty.kind() { + ty::Ref(_, inner_ty @ _, _) if inner_ty.is_ref() => { + ty = *inner_ty; + let temp = self.temp(ty, source_info.span); + self.cfg.push_assign( + block, + source_info, + temp, + Rvalue::Use(Operand::Copy(val)), + ); + val = temp; + } + _ => break, } } + + (ty, val) + }; + + (ty, val) = normalize_depth(ty, val); + (expect_ty, expect) = normalize_depth(expect_ty, expect); + + let mut coerce = |mut ty: Ty<'tcx>, mut place: Place<'tcx>, elem_ty: Ty<'tcx>| { + assert!(ty.is_ref() && ty.peel_refs().is_array()); + let ref_ty = Ty::new_imm_ref( + self.tcx, + self.tcx.lifetimes.re_erased, + Ty::new_slice(self.tcx, elem_ty), + ); + + let ref_val = self.temp(ref_ty, source_info.span); + self.cfg.push_assign( + block, + source_info, + ref_val, + Rvalue::Cast( + CastKind::PointerCoercion(PointerCoercion::Unsize, CoercionSource::Implicit), + Operand::Copy(place), + ref_ty, + ), + ); + + ty = ref_ty; + place = ref_val; + (ty, place) + }; + + if let ty::Array(elem_ty, _) = ty.peel_refs().kind() { + (ty, val) = coerce(ty, val, *elem_ty); } - // Figure out the type on which we are calling `PartialEq`. This involves an extra wrapping + if let ty::Array(elem_ty, _) = expect_ty.peel_refs().kind() { + (_, expect) = coerce(expect_ty, expect, *elem_ty); + } + + // Figure out the type we are searching for trait impls against. This involves an extra wrapping // reference: we can only compare two `&T`, and then compare_ty will be `T`. // Make sure that we do *not* call any user-defined code here. - // The only types that can end up here are string and byte literals, + // The only types that can end up here are str and ScalarInt slices, // which have their comparison defined in `core`. // (Interestingly this means that exhaustiveness analysis relies, for soundness, - // on the `PartialEq` impls for `str` and `[u8]` to b correct!) + // on the `PatternConstEq` and `PartialEq` impls for `str` and `[T]` to be correct!) let compare_ty = match *ty.kind() { - ty::Ref(_, deref_ty, _) - if deref_ty == self.tcx.types.str_ || deref_ty != self.tcx.types.u8 => - { + ty::Ref(_, deref_ty, _) if deref_ty == self.tcx.types.str_ || deref_ty.is_slice() => { deref_ty } _ => span_bug!(source_info.span, "invalid type for non-scalar compare: {}", ty), }; - let eq_def_id = self.tcx.require_lang_item(LangItem::PartialEq, Some(source_info.span)); - let method = trait_method(self.tcx, eq_def_id, sym::eq, [compare_ty, compare_ty]); + let const_cmp_trait_def = + self.tcx.require_lang_item(LangItem::PatternConstEq, Some(source_info.span)); + let fallback_cmp_trait_def = + self.tcx.require_lang_item(LangItem::PartialEq, Some(source_info.span)); + + let cmp_trait_def = if self + .infcx + .type_implements_trait(const_cmp_trait_def, [compare_ty, compare_ty], self.param_env) + .must_apply_modulo_regions() + { + const_cmp_trait_def + } else { + fallback_cmp_trait_def + }; + let method = trait_method(self.tcx, cmp_trait_def, sym::eq, [compare_ty, compare_ty]); let bool_ty = self.tcx.types.bool; let eq_result = self.temp(bool_ty, source_info.span); let eq_block = self.cfg.start_new_block(); @@ -474,8 +603,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { const_: method, })), - args: [Spanned { node: val, span: DUMMY_SP }, Spanned { node: expect, span: DUMMY_SP }] - .into(), + args: [Spanned { node: Operand::Copy(val), span: DUMMY_SP }, Spanned { + node: Operand::Copy(expect), + span: DUMMY_SP, + }] + .into(), destination: eq_result, target: Some(eq_block), unwind: UnwindAction::Continue, @@ -557,7 +689,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // // FIXME(#29623) we could use PatKind::Range to rule // things out here, in some cases. - (TestKind::SwitchInt, &TestCase::Constant { value }) + (TestKind::SwitchInt, &TestCase::Constant { value, .. }) if is_switch_ty(match_pair.pattern.ty) => { // An important invariant of candidate sorting is that a candidate @@ -611,7 +743,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }) } - (TestKind::If, TestCase::Constant { value }) => { + (TestKind::If, TestCase::Constant { value, .. }) => { fully_matched = true; let value = value.try_eval_bool(self.tcx, self.typing_env()).unwrap_or_else(|| { span_bug!(test.span, "expected boolean value but got {value:?}") @@ -700,7 +832,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } } - (TestKind::Range(range), &TestCase::Constant { value }) => { + (TestKind::Range(range), &TestCase::Constant { value, .. }) => { fully_matched = false; if !range.contains(value, self.tcx, self.typing_env())? { // `value` is not contained in the testing range, @@ -711,7 +843,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - (TestKind::Eq { value: test_val, .. }, TestCase::Constant { value: case_val }) => { + (TestKind::Eq { value: test_val, .. }, TestCase::Constant { value: case_val, .. }) => { if test_val == case_val { fully_matched = true; Some(TestBranch::Success) diff --git a/compiler/rustc_mir_build/src/builder/matches/util.rs b/compiler/rustc_mir_build/src/builder/matches/util.rs index 1bd399e511b39..8ba3c46682183 100644 --- a/compiler/rustc_mir_build/src/builder/matches/util.rs +++ b/compiler/rustc_mir_build/src/builder/matches/util.rs @@ -1,3 +1,5 @@ +use std::ops; + use rustc_data_structures::fx::FxIndexMap; use rustc_middle::mir::*; use rustc_middle::ty::Ty; @@ -229,3 +231,46 @@ pub(crate) fn ref_pat_borrow_kind(ref_mutability: Mutability) -> BorrowKind { Mutability::Not => BorrowKind::Shared, } } + +#[derive(Copy, Clone, PartialEq, Debug)] +pub(super) struct Range { + pub(crate) start: u64, + pub(crate) end: u64, + pub(crate) from_end: bool, +} + +impl Range { + pub(crate) fn from_start(range: ops::Range) -> Self { + Range { start: range.start, end: range.end, from_end: false } + } + + pub(crate) fn from_end(range: ops::Range) -> Self { + Range { start: range.end, end: range.start, from_end: true } + } + + pub(crate) fn len(self) -> u64 { + if !self.from_end { self.end - self.start } else { self.start - self.end } + } + + pub(crate) fn apply(self, slice: &[T]) -> &[T] { + if !self.from_end { + &slice[self.start as usize..self.end as usize] + } else { + &slice[..self.start as usize - self.end as usize] + } + } + + pub(crate) fn shift_idx(self, idx: u64) -> u64 { + if !self.from_end { self.start + idx } else { self.start - idx } + } + + pub(crate) fn shift_range(self, range_within: ops::Range) -> Self { + if !self.from_end { + Self::from_start(self.start + range_within.start..self.start + range_within.end) + } else { + let range_within_start = range_within.end; + let range_within_end = range_within.start; + Self::from_end(self.start - range_within_start..self.start - range_within_end) + } + } +} diff --git a/compiler/rustc_mir_build/src/builder/misc.rs b/compiler/rustc_mir_build/src/builder/misc.rs index 9ea56a9574fde..aab36c8e45dbd 100644 --- a/compiler/rustc_mir_build/src/builder/misc.rs +++ b/compiler/rustc_mir_build/src/builder/misc.rs @@ -1,12 +1,15 @@ //! Miscellaneous builder routines that are not specific to building any particular //! kind of thing. +use rustc_hir::LangItem; use rustc_middle::mir::*; use rustc_middle::ty::{self, Ty}; use rustc_span::Span; +use rustc_span::source_map::Spanned; use rustc_trait_selection::infer::InferCtxtExt; use tracing::debug; +use super::{BlockAnd, BlockAndExtension}; use crate::builder::Builder; impl<'a, 'tcx> Builder<'a, 'tcx> { @@ -62,4 +65,46 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { Operand::Move(place) } } + + pub(crate) fn call_intrinsic( + &mut self, + block: BasicBlock, + span: Span, + intrinsic: LangItem, + type_args: &[Ty<'tcx>], + args: Box<[Spanned>]>, + output: Place<'tcx>, + ) -> BlockAnd<()> { + let tcx = self.tcx; + let source_info = self.source_info(span); + let func = Operand::function_handle( + tcx, + tcx.require_lang_item(intrinsic, Some(span)), + type_args.iter().copied().map(Into::into), + span, + ); + + let next_block = self.cfg.start_new_block(); + self.cfg.terminate(block, source_info, TerminatorKind::Call { + func, + args, + destination: output, + target: Some(next_block), + unwind: UnwindAction::Continue, + call_source: CallSource::Misc, + fn_span: span, + }); + + next_block.unit() + } +} + +pub(crate) trait SpannedCallOperandsExt<'tcx> { + fn args(&self, list: impl IntoIterator>) -> Box<[Spanned>]>; +} + +impl<'tcx> SpannedCallOperandsExt<'tcx> for Span { + fn args(&self, list: impl IntoIterator>) -> Box<[Spanned>]> { + list.into_iter().map(move |arg| Spanned { node: arg, span: *self }).collect() + } } diff --git a/compiler/rustc_mir_build/src/lib.rs b/compiler/rustc_mir_build/src/lib.rs index 8e786733ee03d..c17468e88471b 100644 --- a/compiler/rustc_mir_build/src/lib.rs +++ b/compiler/rustc_mir_build/src/lib.rs @@ -1,11 +1,13 @@ //! Construction of MIR from HIR. // tidy-alphabetical-start +#![allow(internal_features)] #![allow(rustc::diagnostic_outside_of_impl)] #![allow(rustc::untranslatable_diagnostic)] #![feature(assert_matches)] #![feature(box_patterns)] #![feature(if_let_guard)] +#![feature(lang_items)] #![feature(let_chains)] #![feature(try_blocks)] #![warn(unreachable_pub)] diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index 6f1d3a74a8165..399629b559944 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -286,6 +286,7 @@ symbols! { PartialOrd, Path, PathBuf, + PatternConstEq, Pending, PinCoerceUnsized, Pointer, diff --git a/library/core/src/intrinsics/mod.rs b/library/core/src/intrinsics/mod.rs index 41b2ffad6680d..f83fd6cbebbe8 100644 --- a/library/core/src/intrinsics/mod.rs +++ b/library/core/src/intrinsics/mod.rs @@ -1965,6 +1965,7 @@ pub const fn needs_drop() -> bool { /// of bounds or arithmetic overflow occurs then this operation is undefined behavior. /// /// The stabilized version of this intrinsic is [`pointer::offset`]. +#[cfg_attr(not(bootstrap), lang = "offset")] #[must_use = "returns a new pointer rather than modifying its argument"] #[rustc_intrinsic_const_stable_indirect] #[rustc_nounwind] @@ -4202,6 +4203,7 @@ pub const fn type_id() -> u128 { /// This is used to implement functions like `slice::from_raw_parts_mut` and /// `ptr::from_raw_parts` in a way compatible with the compiler being able to /// change the possible layouts of pointers. +#[cfg_attr(not(bootstrap), lang = "aggregate_raw_ptr")] #[rustc_nounwind] #[unstable(feature = "core_intrinsics", issue = "none")] #[rustc_intrinsic_const_stable_indirect] diff --git a/library/core/src/lib.rs b/library/core/src/lib.rs index c18e0405f7293..e41bc1069ae45 100644 --- a/library/core/src/lib.rs +++ b/library/core/src/lib.rs @@ -252,6 +252,9 @@ pub use crate::macros::cfg_match; #[macro_use] mod internal_macros; +#[cfg(not(bootstrap))] +mod match_internals; + #[path = "num/shells/int_macros.rs"] #[macro_use] mod int_macros; diff --git a/library/core/src/match_internals.rs b/library/core/src/match_internals.rs new file mode 100644 index 0000000000000..1bfa51869c9a2 --- /dev/null +++ b/library/core/src/match_internals.rs @@ -0,0 +1,77 @@ +use crate::cmp::BytewiseEq; +use crate::{intrinsics, mem}; + +#[lang = "PatternConstEq"] +#[const_trait] +trait PatternConstEq +where + Rhs: ?Sized, +{ + #[allow(dead_code)] + fn eq(&self, other: &Rhs) -> bool; +} + +macro_rules ! impl_for_primitive { + ($($t:ty),*) => { + $( + impl const PatternConstEq for $t { + #[inline(always)] + fn eq(&self, other: &Self) -> bool { + *self == *other + } + } + )* + }; +} + +impl_for_primitive! { + bool, char, + u8, u16, u32, u64, u128, usize, + i8, i16, i32, i64, i128, isize, + f32, f64 +} + +impl const PatternConstEq for [T] +where + T: ~const PatternConstEq, +{ + #[inline(always)] + default fn eq(&self, other: &Self) -> bool { + if self.len() != other.len() { + return false; + } + + let mut i = 0; + + while i < self.len() { + if ::eq(&self[i], &other[i]) == false { + return false; + } + + i += 1; + } + + true + } +} + +#[rustc_const_unstable(feature = "core_intrinsics", issue = "none")] +impl const PatternConstEq for [T] +where + T: ~const PatternConstEq + BytewiseEq, +{ + #[inline(always)] + fn eq(&self, other: &Self) -> bool { + if self.len() != other.len() { + return false; + } + + // SAFETY: `self` and `other` are references and are thus guaranteed to be valid. + // The two slices have been checked to have the same size above. + unsafe { + let size = mem::size_of_val(self); + intrinsics::compare_bytes(self.as_ptr() as *const u8, other.as_ptr() as *const u8, size) + == 0 + } + } +} diff --git a/tests/ui/traits/const-traits/auxiliary/minicore.rs b/tests/ui/traits/const-traits/auxiliary/minicore.rs index 08d7817548d7c..2212cf219c104 100644 --- a/tests/ui/traits/const-traits/auxiliary/minicore.rs +++ b/tests/ui/traits/const-traits/auxiliary/minicore.rs @@ -329,6 +329,16 @@ impl PartialEq for str { } } +#[lang = "PatternConstEq"] +#[const_trait] +trait PatternConstEq +where + Rhs: ?Sized, +{ + #[allow(dead_code)] + fn eq(&self, other: &Rhs) -> bool; +} + #[lang = "not"] #[const_trait] pub trait Not {