Skip to content

Commit 5d13765

Browse files
committed
Simplify the aggregate-as-OperandRef path
No need to build `ArrayVec`s; just put everything exactly where it goes.
1 parent a7fc463 commit 5d13765

File tree

4 files changed

+200
-176
lines changed

4 files changed

+200
-176
lines changed

compiler/rustc_codegen_ssa/Cargo.toml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,11 @@ edition = "2024"
66
[dependencies]
77
# tidy-alphabetical-start
88
ar_archive_writer = "0.4.2"
9-
arrayvec = { version = "0.7", default-features = false }
109
bitflags = "2.4.1"
1110
bstr = "1.11.3"
1211
# Pinned so `cargo update` bumps don't cause breakage. Please also update the
1312
# `cc` in `rustc_llvm` if you update the `cc` here.
1413
cc = "=1.2.16"
15-
either = "1.5.0"
1614
itertools = "0.12"
1715
pathdiff = "0.2.0"
1816
regex = "1.4"

compiler/rustc_codegen_ssa/src/mir/operand.rs

Lines changed: 169 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,15 @@
11
use std::fmt;
22

3-
use arrayvec::ArrayVec;
4-
use either::Either;
53
use rustc_abi as abi;
6-
use rustc_abi::{Align, BackendRepr, FIRST_VARIANT, Primitive, Size, TagEncoding, Variants};
4+
use rustc_abi::{
5+
Align, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, TagEncoding, Variants,
6+
};
77
use rustc_middle::mir::interpret::{Pointer, Scalar, alloc_range};
88
use rustc_middle::mir::{self, ConstValue};
99
use rustc_middle::ty::Ty;
1010
use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
1111
use rustc_middle::{bug, span_bug};
12+
use rustc_session::config::OptLevel;
1213
use tracing::{debug, instrument};
1314

1415
use super::place::{PlaceRef, PlaceValue};
@@ -62,31 +63,6 @@ pub enum OperandValue<V> {
6263
}
6364

6465
impl<V: CodegenObject> OperandValue<V> {
65-
/// If this is ZeroSized/Immediate/Pair, return an array of the 0/1/2 values.
66-
/// If this is Ref, return the place.
67-
#[inline]
68-
pub(crate) fn immediates_or_place(self) -> Either<ArrayVec<V, 2>, PlaceValue<V>> {
69-
match self {
70-
OperandValue::ZeroSized => Either::Left(ArrayVec::new()),
71-
OperandValue::Immediate(a) => Either::Left(ArrayVec::from_iter([a])),
72-
OperandValue::Pair(a, b) => Either::Left([a, b].into()),
73-
OperandValue::Ref(p) => Either::Right(p),
74-
}
75-
}
76-
77-
/// Given an array of 0/1/2 immediate values, return ZeroSized/Immediate/Pair.
78-
#[inline]
79-
pub(crate) fn from_immediates(immediates: ArrayVec<V, 2>) -> Self {
80-
let mut it = immediates.into_iter();
81-
let Some(a) = it.next() else {
82-
return OperandValue::ZeroSized;
83-
};
84-
let Some(b) = it.next() else {
85-
return OperandValue::Immediate(a);
86-
};
87-
OperandValue::Pair(a, b)
88-
}
89-
9066
/// Treat this value as a pointer and return the data pointer and
9167
/// optional metadata as backend values.
9268
///
@@ -559,6 +535,81 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
559535
}
560536
}
561537
}
538+
539+
pub(crate) fn builder(layout: TyAndLayout<'tcx>) -> OperandRef<'tcx, Result<V, abi::Scalar>> {
540+
let val = match layout.backend_repr {
541+
BackendRepr::Memory { .. } if layout.is_zst() => OperandValue::ZeroSized,
542+
BackendRepr::Scalar(s) => OperandValue::Immediate(Err(s)),
543+
BackendRepr::ScalarPair(a, b) => OperandValue::Pair(Err(a), Err(b)),
544+
_ => bug!("Cannot use type in operand builder: {layout:?}"),
545+
};
546+
OperandRef { val, layout }
547+
}
548+
}
549+
550+
impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, Result<V, abi::Scalar>> {
551+
pub(crate) fn insert_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
552+
&mut self,
553+
bx: &mut Bx,
554+
f: FieldIdx,
555+
operand: OperandRef<'tcx, V>,
556+
) {
557+
let field_layout = self.layout.field(bx.cx(), f.as_usize());
558+
let field_offset = self.layout.fields.offset(f.as_usize());
559+
560+
let mut update = |tgt: &mut Result<V, abi::Scalar>, src, from_scalar| {
561+
let from_bty = bx.cx().type_from_scalar(from_scalar);
562+
let to_scalar = tgt.unwrap_err();
563+
let to_bty = bx.cx().type_from_scalar(to_scalar);
564+
let v = transmute_immediate(bx, src, from_scalar, from_bty, to_scalar, to_bty);
565+
*tgt = Ok(v);
566+
};
567+
568+
match (operand.val, operand.layout.backend_repr) {
569+
(OperandValue::ZeroSized, _) => {
570+
debug_assert_eq!(field_layout.size, Size::ZERO);
571+
}
572+
(OperandValue::Immediate(v), BackendRepr::Scalar(from_scalar)) => match &mut self.val {
573+
OperandValue::Immediate(val @ Err(_)) => {
574+
debug_assert_eq!(field_offset, Size::ZERO);
575+
update(val, v, from_scalar);
576+
//*val = Ok(v);
577+
}
578+
OperandValue::Pair(fst @ Err(_), _) if field_offset == Size::ZERO => {
579+
update(fst, v, from_scalar);
580+
//*fst = Ok(v);
581+
}
582+
OperandValue::Pair(_, snd @ Err(_)) if field_offset != Size::ZERO => {
583+
update(snd, v, from_scalar);
584+
//*snd = Ok(v);
585+
}
586+
_ => bug!("Tried to insert {operand:?} into field {f:?} of {self:?}"),
587+
},
588+
(OperandValue::Pair(a, b), BackendRepr::ScalarPair(from_sa, from_sb)) => {
589+
match &mut self.val {
590+
OperandValue::Pair(fst @ Err(_), snd @ Err(_)) => {
591+
update(fst, a, from_sa);
592+
//*fst = Ok(a);
593+
update(snd, b, from_sb);
594+
//*snd = Ok(b);
595+
}
596+
_ => bug!("Tried to insert {operand:?} into field {f:?} of {self:?}"),
597+
}
598+
}
599+
_ => bug!("Unsupported operand {operand:?} inserting into field {f:?} of {self:?}"),
600+
}
601+
}
602+
603+
pub fn finalize(self) -> OperandRef<'tcx, V> {
604+
let OperandRef { val, layout } = self;
605+
let val = match val {
606+
OperandValue::ZeroSized => OperandValue::ZeroSized,
607+
OperandValue::Immediate(v) => OperandValue::Immediate(v.unwrap()),
608+
OperandValue::Pair(a, b) => OperandValue::Pair(a.unwrap(), b.unwrap()),
609+
OperandValue::Ref(_) => bug!(),
610+
};
611+
OperandRef { val, layout }
612+
}
562613
}
563614

564615
impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
@@ -808,3 +859,93 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
808859
}
809860
}
810861
}
862+
863+
/// Transmutes one of the immediates from an [`OperandValue::Immediate`]
864+
/// or an [`OperandValue::Pair`] to an immediate of the target type.
865+
///
866+
/// `to_backend_ty` must be the *non*-immediate backend type (so it will be
867+
/// `i8`, not `i1`, for `bool`-like types.)
868+
pub(super) fn transmute_immediate<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
869+
bx: &mut Bx,
870+
mut imm: Bx::Value,
871+
from_scalar: abi::Scalar,
872+
from_backend_ty: Bx::Type,
873+
to_scalar: abi::Scalar,
874+
to_backend_ty: Bx::Type,
875+
) -> Bx::Value {
876+
assert_eq!(from_scalar.size(bx.cx()), to_scalar.size(bx.cx()));
877+
878+
// While optimizations will remove no-op transmutes, they might still be
879+
// there in debug or things that aren't no-op in MIR because they change
880+
// the Rust type but not the underlying layout/niche.
881+
if from_scalar == to_scalar && from_backend_ty == to_backend_ty {
882+
return imm;
883+
}
884+
885+
use abi::Primitive::*;
886+
imm = bx.from_immediate(imm);
887+
888+
// If we have a scalar, we must already know its range. Either
889+
//
890+
// 1) It's a parameter with `range` parameter metadata,
891+
// 2) It's something we `load`ed with `!range` metadata, or
892+
// 3) After a transmute we `assume`d the range (see below).
893+
//
894+
// That said, last time we tried removing this, it didn't actually help
895+
// the rustc-perf results, so might as well keep doing it
896+
// <https://github.com/rust-lang/rust/pull/135610#issuecomment-2599275182>
897+
assume_scalar_range(bx, imm, from_scalar, from_backend_ty);
898+
899+
imm = match (from_scalar.primitive(), to_scalar.primitive()) {
900+
(Int(..) | Float(_), Int(..) | Float(_)) => bx.bitcast(imm, to_backend_ty),
901+
(Pointer(..), Pointer(..)) => bx.pointercast(imm, to_backend_ty),
902+
(Int(..), Pointer(..)) => bx.ptradd(bx.const_null(bx.type_ptr()), imm),
903+
(Pointer(..), Int(..)) => {
904+
// FIXME: this exposes the provenance, which shouldn't be necessary.
905+
bx.ptrtoint(imm, to_backend_ty)
906+
}
907+
(Float(_), Pointer(..)) => {
908+
let int_imm = bx.bitcast(imm, bx.cx().type_isize());
909+
bx.ptradd(bx.const_null(bx.type_ptr()), int_imm)
910+
}
911+
(Pointer(..), Float(_)) => {
912+
// FIXME: this exposes the provenance, which shouldn't be necessary.
913+
let int_imm = bx.ptrtoint(imm, bx.cx().type_isize());
914+
bx.bitcast(int_imm, to_backend_ty)
915+
}
916+
};
917+
918+
// This `assume` remains important for cases like (a conceptual)
919+
// transmute::<u32, NonZeroU32>(x) == 0
920+
// since it's never passed to something with parameter metadata (especially
921+
// after MIR inlining) so the only way to tell the backend about the
922+
// constraint that the `transmute` introduced is to `assume` it.
923+
assume_scalar_range(bx, imm, to_scalar, to_backend_ty);
924+
925+
imm = bx.to_immediate_scalar(imm, to_scalar);
926+
imm
927+
}
928+
929+
pub(super) fn assume_scalar_range<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
930+
bx: &mut Bx,
931+
imm: Bx::Value,
932+
scalar: abi::Scalar,
933+
backend_ty: Bx::Type,
934+
) {
935+
if matches!(bx.cx().sess().opts.optimize, OptLevel::No) || scalar.is_always_valid(bx.cx()) {
936+
return;
937+
}
938+
939+
match scalar.primitive() {
940+
abi::Primitive::Int(..) => {
941+
let range = scalar.valid_range(bx.cx());
942+
bx.assume_integer_range(imm, backend_ty, range);
943+
}
944+
abi::Primitive::Pointer(abi::AddressSpace::DATA)
945+
if !scalar.valid_range(bx.cx()).contains(0) =>
946+
{
947+
bx.assume_nonnull(imm);
948+
}
949+
abi::Primitive::Pointer(..) | abi::Primitive::Float(..) => {}
950+
}
951+
}

0 commit comments

Comments
 (0)