Skip to content

Commit fbb321e

Browse files
committed
Simplify the aggregate-as-OperandRef path
No need to build `ArrayVec`s; just put everything exactly where it goes.
1 parent 8ce2287 commit fbb321e

File tree

5 files changed

+199
-178
lines changed

5 files changed

+199
-178
lines changed

Cargo.lock

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3469,11 +3469,9 @@ name = "rustc_codegen_ssa"
34693469
version = "0.0.0"
34703470
dependencies = [
34713471
"ar_archive_writer",
3472-
"arrayvec",
34733472
"bitflags",
34743473
"bstr",
34753474
"cc",
3476-
"either",
34773475
"itertools",
34783476
"libc",
34793477
"object 0.37.0",

compiler/rustc_codegen_ssa/Cargo.toml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,11 @@ edition = "2024"
66
[dependencies]
77
# tidy-alphabetical-start
88
ar_archive_writer = "0.4.2"
9-
arrayvec = { version = "0.7", default-features = false }
109
bitflags = "2.4.1"
1110
bstr = "1.11.3"
1211
# Pinned so `cargo update` bumps don't cause breakage. Please also update the
1312
# `cc` in `rustc_llvm` if you update the `cc` here.
1413
cc = "=1.2.16"
15-
either = "1.5.0"
1614
itertools = "0.12"
1715
pathdiff = "0.2.0"
1816
regex = "1.4"

compiler/rustc_codegen_ssa/src/mir/operand.rs

Lines changed: 168 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
use std::fmt;
22

3-
use arrayvec::ArrayVec;
4-
use either::Either;
53
use rustc_abi as abi;
6-
use rustc_abi::{Align, BackendRepr, FIRST_VARIANT, Primitive, Size, TagEncoding, Variants};
4+
use rustc_abi::{
5+
Align, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, TagEncoding, Variants,
6+
};
77
use rustc_middle::mir::interpret::{Pointer, Scalar, alloc_range};
88
use rustc_middle::mir::{self, ConstValue};
99
use rustc_middle::ty::Ty;
@@ -69,31 +69,6 @@ pub enum OperandValue<V> {
6969
}
7070

7171
impl<V: CodegenObject> OperandValue<V> {
72-
/// If this is ZeroSized/Immediate/Pair, return an array of the 0/1/2 values.
73-
/// If this is Ref, return the place.
74-
#[inline]
75-
pub(crate) fn immediates_or_place(self) -> Either<ArrayVec<V, 2>, PlaceValue<V>> {
76-
match self {
77-
OperandValue::ZeroSized => Either::Left(ArrayVec::new()),
78-
OperandValue::Immediate(a) => Either::Left(ArrayVec::from_iter([a])),
79-
OperandValue::Pair(a, b) => Either::Left([a, b].into()),
80-
OperandValue::Ref(p) => Either::Right(p),
81-
}
82-
}
83-
84-
/// Given an array of 0/1/2 immediate values, return ZeroSized/Immediate/Pair.
85-
#[inline]
86-
pub(crate) fn from_immediates(immediates: ArrayVec<V, 2>) -> Self {
87-
let mut it = immediates.into_iter();
88-
let Some(a) = it.next() else {
89-
return OperandValue::ZeroSized;
90-
};
91-
let Some(b) = it.next() else {
92-
return OperandValue::Immediate(a);
93-
};
94-
OperandValue::Pair(a, b)
95-
}
96-
9772
/// Treat this value as a pointer and return the data pointer and
9873
/// optional metadata as backend values.
9974
///
@@ -595,6 +570,81 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
595570
}
596571
}
597572
}
573+
574+
pub(crate) fn builder(layout: TyAndLayout<'tcx>) -> OperandRef<'tcx, Result<V, abi::Scalar>> {
575+
let val = match layout.backend_repr {
576+
BackendRepr::Memory { .. } if layout.is_zst() => OperandValue::ZeroSized,
577+
BackendRepr::Scalar(s) => OperandValue::Immediate(Err(s)),
578+
BackendRepr::ScalarPair(a, b) => OperandValue::Pair(Err(a), Err(b)),
579+
_ => bug!("Cannot use type in operand builder: {layout:?}"),
580+
};
581+
OperandRef { val, layout }
582+
}
583+
}
584+
585+
impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, Result<V, abi::Scalar>> {
586+
pub(crate) fn insert_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
587+
&mut self,
588+
bx: &mut Bx,
589+
f: FieldIdx,
590+
operand: OperandRef<'tcx, V>,
591+
) {
592+
let field_layout = self.layout.field(bx.cx(), f.as_usize());
593+
let field_offset = self.layout.fields.offset(f.as_usize());
594+
595+
let mut update = |tgt: &mut Result<V, abi::Scalar>, src, from_scalar| {
596+
let from_bty = bx.cx().type_from_scalar(from_scalar);
597+
let to_scalar = tgt.unwrap_err();
598+
let to_bty = bx.cx().type_from_scalar(to_scalar);
599+
let v = transmute_immediate(bx, src, from_scalar, from_bty, to_scalar, to_bty);
600+
*tgt = Ok(v);
601+
};
602+
603+
match (operand.val, operand.layout.backend_repr) {
604+
(OperandValue::ZeroSized, _) => {
605+
debug_assert_eq!(field_layout.size, Size::ZERO);
606+
}
607+
(OperandValue::Immediate(v), BackendRepr::Scalar(from_scalar)) => match &mut self.val {
608+
OperandValue::Immediate(val @ Err(_)) => {
609+
debug_assert_eq!(field_offset, Size::ZERO);
610+
update(val, v, from_scalar);
611+
//*val = Ok(v);
612+
}
613+
OperandValue::Pair(fst @ Err(_), _) if field_offset == Size::ZERO => {
614+
update(fst, v, from_scalar);
615+
//*fst = Ok(v);
616+
}
617+
OperandValue::Pair(_, snd @ Err(_)) if field_offset != Size::ZERO => {
618+
update(snd, v, from_scalar);
619+
//*snd = Ok(v);
620+
}
621+
_ => bug!("Tried to insert {operand:?} into field {f:?} of {self:?}"),
622+
},
623+
(OperandValue::Pair(a, b), BackendRepr::ScalarPair(from_sa, from_sb)) => {
624+
match &mut self.val {
625+
OperandValue::Pair(fst @ Err(_), snd @ Err(_)) => {
626+
update(fst, a, from_sa);
627+
//*fst = Ok(a);
628+
update(snd, b, from_sb);
629+
//*snd = Ok(b);
630+
}
631+
_ => bug!("Tried to insert {operand:?} into field {f:?} of {self:?}"),
632+
}
633+
}
634+
_ => bug!("Unsupported operand {operand:?} inserting into field {f:?} of {self:?}"),
635+
}
636+
}
637+
638+
pub fn finalize(self) -> OperandRef<'tcx, V> {
639+
let OperandRef { val, layout } = self;
640+
let val = match val {
641+
OperandValue::ZeroSized => OperandValue::ZeroSized,
642+
OperandValue::Immediate(v) => OperandValue::Immediate(v.unwrap()),
643+
OperandValue::Pair(a, b) => OperandValue::Pair(a.unwrap(), b.unwrap()),
644+
OperandValue::Ref(_) => bug!(),
645+
};
646+
OperandRef { val, layout }
647+
}
598648
}
599649

600650
impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
@@ -844,3 +894,93 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
844894
}
845895
}
846896
}
897+
898+
/// Transmutes one of the immediates from an [`OperandValue::Immediate`]
899+
/// or an [`OperandValue::Pair`] to an immediate of the target type.
900+
///
901+
/// `to_backend_ty` must be the *non*-immediate backend type (so it will be
902+
/// `i8`, not `i1`, for `bool`-like types.)
903+
pub(super) fn transmute_immediate<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
904+
bx: &mut Bx,
905+
mut imm: Bx::Value,
906+
from_scalar: abi::Scalar,
907+
from_backend_ty: Bx::Type,
908+
to_scalar: abi::Scalar,
909+
to_backend_ty: Bx::Type,
910+
) -> Bx::Value {
911+
assert_eq!(from_scalar.size(bx.cx()), to_scalar.size(bx.cx()));
912+
913+
// While optimizations will remove no-op transmutes, they might still be
914+
// there in debug or things that aren't no-op in MIR because they change
915+
// the Rust type but not the underlying layout/niche.
916+
if from_scalar == to_scalar && from_backend_ty == to_backend_ty {
917+
return imm;
918+
}
919+
920+
use abi::Primitive::*;
921+
imm = bx.from_immediate(imm);
922+
923+
// If we have a scalar, we must already know its range. Either
924+
//
925+
// 1) It's a parameter with `range` parameter metadata,
926+
// 2) It's something we `load`ed with `!range` metadata, or
927+
// 3) After a transmute we `assume`d the range (see below).
928+
//
929+
// That said, last time we tried removing this, it didn't actually help
930+
// the rustc-perf results, so might as well keep doing it
931+
// <https://github.com/rust-lang/rust/pull/135610#issuecomment-2599275182>
932+
assume_scalar_range(bx, imm, from_scalar, from_backend_ty);
933+
934+
imm = match (from_scalar.primitive(), to_scalar.primitive()) {
935+
(Int(..) | Float(_), Int(..) | Float(_)) => bx.bitcast(imm, to_backend_ty),
936+
(Pointer(..), Pointer(..)) => bx.pointercast(imm, to_backend_ty),
937+
(Int(..), Pointer(..)) => bx.ptradd(bx.const_null(bx.type_ptr()), imm),
938+
(Pointer(..), Int(..)) => {
939+
// FIXME: this exposes the provenance, which shouldn't be necessary.
940+
bx.ptrtoint(imm, to_backend_ty)
941+
}
942+
(Float(_), Pointer(..)) => {
943+
let int_imm = bx.bitcast(imm, bx.cx().type_isize());
944+
bx.ptradd(bx.const_null(bx.type_ptr()), int_imm)
945+
}
946+
(Pointer(..), Float(_)) => {
947+
// FIXME: this exposes the provenance, which shouldn't be necessary.
948+
let int_imm = bx.ptrtoint(imm, bx.cx().type_isize());
949+
bx.bitcast(int_imm, to_backend_ty)
950+
}
951+
};
952+
953+
// This `assume` remains important for cases like (a conceptual)
954+
// transmute::<u32, NonZeroU32>(x) == 0
955+
// since it's never passed to something with parameter metadata (especially
956+
// after MIR inlining) so the only way to tell the backend about the
957+
// constraint that the `transmute` introduced is to `assume` it.
958+
assume_scalar_range(bx, imm, to_scalar, to_backend_ty);
959+
960+
imm = bx.to_immediate_scalar(imm, to_scalar);
961+
imm
962+
}
963+
964+
pub(super) fn assume_scalar_range<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
965+
bx: &mut Bx,
966+
imm: Bx::Value,
967+
scalar: abi::Scalar,
968+
backend_ty: Bx::Type,
969+
) {
970+
if matches!(bx.cx().sess().opts.optimize, OptLevel::No) || scalar.is_always_valid(bx.cx()) {
971+
return;
972+
}
973+
974+
match scalar.primitive() {
975+
abi::Primitive::Int(..) => {
976+
let range = scalar.valid_range(bx.cx());
977+
bx.assume_integer_range(imm, backend_ty, range);
978+
}
979+
abi::Primitive::Pointer(abi::AddressSpace::DATA)
980+
if !scalar.valid_range(bx.cx()).contains(0) =>
981+
{
982+
bx.assume_nonnull(imm);
983+
}
984+
abi::Primitive::Pointer(..) | abi::Primitive::Float(..) => {}
985+
}
986+
}

0 commit comments

Comments
 (0)