Skip to content

Commit efbb806

Browse files
committed
Simplify the aggregate-as-OperandRef path
No need to build `ArrayVec`s; just put everything exactly where it goes.
1 parent ee9901e commit efbb806

File tree

4 files changed

+199
-176
lines changed

4 files changed

+199
-176
lines changed

compiler/rustc_codegen_ssa/Cargo.toml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,11 @@ edition = "2024"
66
[dependencies]
77
# tidy-alphabetical-start
88
ar_archive_writer = "0.4.2"
9-
arrayvec = { version = "0.7", default-features = false }
109
bitflags = "2.4.1"
1110
bstr = "1.11.3"
1211
# Pinned so `cargo update` bumps don't cause breakage. Please also update the
1312
# `cc` in `rustc_llvm` if you update the `cc` here.
1413
cc = "=1.2.16"
15-
either = "1.5.0"
1614
itertools = "0.12"
1715
pathdiff = "0.2.0"
1816
regex = "1.4"

compiler/rustc_codegen_ssa/src/mir/operand.rs

Lines changed: 168 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
use std::fmt;
22

3-
use arrayvec::ArrayVec;
4-
use either::Either;
53
use rustc_abi as abi;
6-
use rustc_abi::{Align, BackendRepr, FIRST_VARIANT, Primitive, Size, TagEncoding, Variants};
4+
use rustc_abi::{
5+
Align, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, TagEncoding, Variants,
6+
};
77
use rustc_middle::mir::interpret::{Pointer, Scalar, alloc_range};
88
use rustc_middle::mir::{self, ConstValue};
99
use rustc_middle::ty::Ty;
@@ -63,31 +63,6 @@ pub enum OperandValue<V> {
6363
}
6464

6565
impl<V: CodegenObject> OperandValue<V> {
66-
/// If this is ZeroSized/Immediate/Pair, return an array of the 0/1/2 values.
67-
/// If this is Ref, return the place.
68-
#[inline]
69-
pub(crate) fn immediates_or_place(self) -> Either<ArrayVec<V, 2>, PlaceValue<V>> {
70-
match self {
71-
OperandValue::ZeroSized => Either::Left(ArrayVec::new()),
72-
OperandValue::Immediate(a) => Either::Left(ArrayVec::from_iter([a])),
73-
OperandValue::Pair(a, b) => Either::Left([a, b].into()),
74-
OperandValue::Ref(p) => Either::Right(p),
75-
}
76-
}
77-
78-
/// Given an array of 0/1/2 immediate values, return ZeroSized/Immediate/Pair.
79-
#[inline]
80-
pub(crate) fn from_immediates(immediates: ArrayVec<V, 2>) -> Self {
81-
let mut it = immediates.into_iter();
82-
let Some(a) = it.next() else {
83-
return OperandValue::ZeroSized;
84-
};
85-
let Some(b) = it.next() else {
86-
return OperandValue::Immediate(a);
87-
};
88-
OperandValue::Pair(a, b)
89-
}
90-
9166
/// Treat this value as a pointer and return the data pointer and
9267
/// optional metadata as backend values.
9368
///
@@ -589,6 +564,81 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
589564
}
590565
}
591566
}
567+
568+
pub(crate) fn builder(layout: TyAndLayout<'tcx>) -> OperandRef<'tcx, Result<V, abi::Scalar>> {
569+
let val = match layout.backend_repr {
570+
BackendRepr::Memory { .. } if layout.is_zst() => OperandValue::ZeroSized,
571+
BackendRepr::Scalar(s) => OperandValue::Immediate(Err(s)),
572+
BackendRepr::ScalarPair(a, b) => OperandValue::Pair(Err(a), Err(b)),
573+
_ => bug!("Cannot use type in operand builder: {layout:?}"),
574+
};
575+
OperandRef { val, layout }
576+
}
577+
}
578+
579+
impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, Result<V, abi::Scalar>> {
580+
pub(crate) fn insert_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
581+
&mut self,
582+
bx: &mut Bx,
583+
f: FieldIdx,
584+
operand: OperandRef<'tcx, V>,
585+
) {
586+
let field_layout = self.layout.field(bx.cx(), f.as_usize());
587+
let field_offset = self.layout.fields.offset(f.as_usize());
588+
589+
let mut update = |tgt: &mut Result<V, abi::Scalar>, src, from_scalar| {
590+
let from_bty = bx.cx().type_from_scalar(from_scalar);
591+
let to_scalar = tgt.unwrap_err();
592+
let to_bty = bx.cx().type_from_scalar(to_scalar);
593+
let v = transmute_immediate(bx, src, from_scalar, from_bty, to_scalar, to_bty);
594+
*tgt = Ok(v);
595+
};
596+
597+
match (operand.val, operand.layout.backend_repr) {
598+
(OperandValue::ZeroSized, _) => {
599+
debug_assert_eq!(field_layout.size, Size::ZERO);
600+
}
601+
(OperandValue::Immediate(v), BackendRepr::Scalar(from_scalar)) => match &mut self.val {
602+
OperandValue::Immediate(val @ Err(_)) => {
603+
debug_assert_eq!(field_offset, Size::ZERO);
604+
update(val, v, from_scalar);
605+
//*val = Ok(v);
606+
}
607+
OperandValue::Pair(fst @ Err(_), _) if field_offset == Size::ZERO => {
608+
update(fst, v, from_scalar);
609+
//*fst = Ok(v);
610+
}
611+
OperandValue::Pair(_, snd @ Err(_)) if field_offset != Size::ZERO => {
612+
update(snd, v, from_scalar);
613+
//*snd = Ok(v);
614+
}
615+
_ => bug!("Tried to insert {operand:?} into field {f:?} of {self:?}"),
616+
},
617+
(OperandValue::Pair(a, b), BackendRepr::ScalarPair(from_sa, from_sb)) => {
618+
match &mut self.val {
619+
OperandValue::Pair(fst @ Err(_), snd @ Err(_)) => {
620+
update(fst, a, from_sa);
621+
//*fst = Ok(a);
622+
update(snd, b, from_sb);
623+
//*snd = Ok(b);
624+
}
625+
_ => bug!("Tried to insert {operand:?} into field {f:?} of {self:?}"),
626+
}
627+
}
628+
_ => bug!("Unsupported operand {operand:?} inserting into field {f:?} of {self:?}"),
629+
}
630+
}
631+
632+
pub fn finalize(self) -> OperandRef<'tcx, V> {
633+
let OperandRef { val, layout } = self;
634+
let val = match val {
635+
OperandValue::ZeroSized => OperandValue::ZeroSized,
636+
OperandValue::Immediate(v) => OperandValue::Immediate(v.unwrap()),
637+
OperandValue::Pair(a, b) => OperandValue::Pair(a.unwrap(), b.unwrap()),
638+
OperandValue::Ref(_) => bug!(),
639+
};
640+
OperandRef { val, layout }
641+
}
592642
}
593643

594644
impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
@@ -838,3 +888,93 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
838888
}
839889
}
840890
}
891+
892+
/// Transmutes one of the immediates from an [`OperandValue::Immediate`]
893+
/// or an [`OperandValue::Pair`] to an immediate of the target type.
894+
///
895+
/// `to_backend_ty` must be the *non*-immediate backend type (so it will be
896+
/// `i8`, not `i1`, for `bool`-like types.)
897+
pub(super) fn transmute_immediate<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
898+
bx: &mut Bx,
899+
mut imm: Bx::Value,
900+
from_scalar: abi::Scalar,
901+
from_backend_ty: Bx::Type,
902+
to_scalar: abi::Scalar,
903+
to_backend_ty: Bx::Type,
904+
) -> Bx::Value {
905+
assert_eq!(from_scalar.size(bx.cx()), to_scalar.size(bx.cx()));
906+
907+
// While optimizations will remove no-op transmutes, they might still be
908+
// there in debug or things that aren't no-op in MIR because they change
909+
// the Rust type but not the underlying layout/niche.
910+
if from_scalar == to_scalar && from_backend_ty == to_backend_ty {
911+
return imm;
912+
}
913+
914+
use abi::Primitive::*;
915+
imm = bx.from_immediate(imm);
916+
917+
// If we have a scalar, we must already know its range. Either
918+
//
919+
// 1) It's a parameter with `range` parameter metadata,
920+
// 2) It's something we `load`ed with `!range` metadata, or
921+
// 3) After a transmute we `assume`d the range (see below).
922+
//
923+
// That said, last time we tried removing this, it didn't actually help
924+
// the rustc-perf results, so might as well keep doing it
925+
// <https://github.com/rust-lang/rust/pull/135610#issuecomment-2599275182>
926+
assume_scalar_range(bx, imm, from_scalar, from_backend_ty);
927+
928+
imm = match (from_scalar.primitive(), to_scalar.primitive()) {
929+
(Int(..) | Float(_), Int(..) | Float(_)) => bx.bitcast(imm, to_backend_ty),
930+
(Pointer(..), Pointer(..)) => bx.pointercast(imm, to_backend_ty),
931+
(Int(..), Pointer(..)) => bx.ptradd(bx.const_null(bx.type_ptr()), imm),
932+
(Pointer(..), Int(..)) => {
933+
// FIXME: this exposes the provenance, which shouldn't be necessary.
934+
bx.ptrtoint(imm, to_backend_ty)
935+
}
936+
(Float(_), Pointer(..)) => {
937+
let int_imm = bx.bitcast(imm, bx.cx().type_isize());
938+
bx.ptradd(bx.const_null(bx.type_ptr()), int_imm)
939+
}
940+
(Pointer(..), Float(_)) => {
941+
// FIXME: this exposes the provenance, which shouldn't be necessary.
942+
let int_imm = bx.ptrtoint(imm, bx.cx().type_isize());
943+
bx.bitcast(int_imm, to_backend_ty)
944+
}
945+
};
946+
947+
// This `assume` remains important for cases like (a conceptual)
948+
// transmute::<u32, NonZeroU32>(x) == 0
949+
// since it's never passed to something with parameter metadata (especially
950+
// after MIR inlining) so the only way to tell the backend about the
951+
// constraint that the `transmute` introduced is to `assume` it.
952+
assume_scalar_range(bx, imm, to_scalar, to_backend_ty);
953+
954+
imm = bx.to_immediate_scalar(imm, to_scalar);
955+
imm
956+
}
957+
958+
pub(super) fn assume_scalar_range<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
959+
bx: &mut Bx,
960+
imm: Bx::Value,
961+
scalar: abi::Scalar,
962+
backend_ty: Bx::Type,
963+
) {
964+
if matches!(bx.cx().sess().opts.optimize, OptLevel::No) || scalar.is_always_valid(bx.cx()) {
965+
return;
966+
}
967+
968+
match scalar.primitive() {
969+
abi::Primitive::Int(..) => {
970+
let range = scalar.valid_range(bx.cx());
971+
bx.assume_integer_range(imm, backend_ty, range);
972+
}
973+
abi::Primitive::Pointer(abi::AddressSpace::DATA)
974+
if !scalar.valid_range(bx.cx()).contains(0) =>
975+
{
976+
bx.assume_nonnull(imm);
977+
}
978+
abi::Primitive::Pointer(..) | abi::Primitive::Float(..) => {}
979+
}
980+
}

0 commit comments

Comments
 (0)