From 6385982bfdd35db037782ebb0ca297dfff55ec1b Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Thu, 23 Mar 2023 18:50:57 -0700 Subject: [PATCH] Pass in the layout to `codegen_rvalue_operand`/`rvalue_creates_operand` It turns out that every caller either had it easily available or had calculated it previously but thrown it away. I started doing this because I wanted to look at the layout in order to do better transmute codegen -- which I left out of this PR -- but hopefully it'll be good in general too. --- compiler/rustc_codegen_ssa/src/mir/analyze.rs | 33 ++++--- compiler/rustc_codegen_ssa/src/mir/block.rs | 13 +-- .../rustc_codegen_ssa/src/mir/debuginfo.rs | 8 +- compiler/rustc_codegen_ssa/src/mir/mod.rs | 12 ++- compiler/rustc_codegen_ssa/src/mir/operand.rs | 4 +- compiler/rustc_codegen_ssa/src/mir/place.rs | 2 +- compiler/rustc_codegen_ssa/src/mir/rvalue.rs | 85 +++++++++---------- .../rustc_codegen_ssa/src/mir/statement.rs | 10 +-- 8 files changed, 84 insertions(+), 83 deletions(-) diff --git a/compiler/rustc_codegen_ssa/src/mir/analyze.rs b/compiler/rustc_codegen_ssa/src/mir/analyze.rs index 0ce395e912db3..fbe5336070458 100644 --- a/compiler/rustc_codegen_ssa/src/mir/analyze.rs +++ b/compiler/rustc_codegen_ssa/src/mir/analyze.rs @@ -9,7 +9,7 @@ use rustc_index::vec::IndexVec; use rustc_middle::mir::traversal; use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::{self, Location, TerminatorKind}; -use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf}; +use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf, TyAndLayout}; pub fn non_ssa_locals<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( fx: &FunctionCx<'a, 'tcx, Bx>, @@ -22,13 +22,14 @@ pub fn non_ssa_locals<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( .map(|decl| { let ty = fx.monomorphize(decl.ty); let layout = fx.cx.spanned_layout_of(ty, decl.source_info.span); - if layout.is_zst() { + let kind = if layout.is_zst() { LocalKind::ZST } else if fx.cx.is_backend_immediate(layout) || fx.cx.is_backend_scalar_pair(layout) { LocalKind::Unused } else { LocalKind::Memory - } + }; + (kind, layout) }) .collect(); @@ -48,7 +49,7 @@ pub fn non_ssa_locals<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( let mut non_ssa_locals = BitSet::new_empty(analyzer.locals.len()); for (local, kind) in analyzer.locals.iter_enumerated() { - if matches!(kind, LocalKind::Memory) { + if matches!(kind.0, LocalKind::Memory) { non_ssa_locals.insert(local); } } @@ -85,12 +86,12 @@ impl DefLocation { struct LocalAnalyzer<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> { fx: &'mir FunctionCx<'a, 'tcx, Bx>, dominators: Dominators, - locals: IndexVec, + locals: IndexVec)>, } impl<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> LocalAnalyzer<'mir, 'a, 'tcx, Bx> { fn assign(&mut self, local: mir::Local, location: DefLocation) { - let kind = &mut self.locals[local]; + let kind = &mut self.locals[local].0; match *kind { LocalKind::ZST => {} LocalKind::Memory => {} @@ -178,10 +179,9 @@ impl<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> Visitor<'tcx> if let Some(local) = place.as_local() { self.assign(local, DefLocation::Body(location)); - if self.locals[local] != LocalKind::Memory { - let decl_span = self.fx.mir.local_decls[local].source_info.span; - if !self.fx.rvalue_creates_operand(rvalue, decl_span) { - self.locals[local] = LocalKind::Memory; + if self.locals[local].0 != LocalKind::Memory { + if !self.fx.rvalue_creates_operand(rvalue, self.locals[local].1) { + self.locals[local].0 = LocalKind::Memory; } } } else { @@ -207,7 +207,7 @@ impl<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> Visitor<'tcx> PlaceContext::NonMutatingUse( NonMutatingUseContext::Copy | NonMutatingUseContext::Move, - ) => match &mut self.locals[local] { + ) => match &mut self.locals[local].0 { LocalKind::ZST => {} LocalKind::Memory => {} LocalKind::SSA(def) if def.dominates(location, &self.dominators) => {} @@ -237,17 +237,16 @@ impl<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> Visitor<'tcx> | NonMutatingUseContext::AddressOf | NonMutatingUseContext::Projection, ) => { - self.locals[local] = LocalKind::Memory; + self.locals[local].0 = LocalKind::Memory; } PlaceContext::MutatingUse(MutatingUseContext::Drop) => { - let kind = &mut self.locals[local]; - if *kind != LocalKind::Memory { - let ty = self.fx.mir.local_decls[local].ty; - let ty = self.fx.monomorphize(ty); + let kind_and_ty = &mut self.locals[local]; + if kind_and_ty.0 != LocalKind::Memory { + let ty = kind_and_ty.1.ty; if self.fx.cx.type_needs_drop(ty) { // Only need the place if we're actually dropping it. - *kind = LocalKind::Memory; + kind_and_ty.0 = LocalKind::Memory; } } } diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index 5da0e826c5640..4d7b5393ed11a 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -397,8 +397,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { PassMode::Cast(cast_ty, _) => { let op = match self.locals[mir::RETURN_PLACE] { - LocalRef::Operand(Some(op)) => op, - LocalRef::Operand(None) => bug!("use of return before def"), + LocalRef::Operand(op) => op, + LocalRef::PendingOperand(_) => bug!("use of return before def"), LocalRef::Place(cg_place) => OperandRef { val: Ref(cg_place.llval, None, cg_place.align), layout: cg_place.layout, @@ -1673,7 +1673,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { match self.locals[index] { LocalRef::Place(dest) => dest, LocalRef::UnsizedPlace(_) => bug!("return type must be sized"), - LocalRef::Operand(None) => { + LocalRef::PendingOperand(op_layout) => { + debug_assert_eq!(op_layout, fn_ret.layout); // Handle temporary places, specifically `Operand` ones, as // they don't have `alloca`s. return if fn_ret.is_indirect() { @@ -1694,7 +1695,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { ReturnDest::DirectOperand(index) }; } - LocalRef::Operand(Some(_)) => { + LocalRef::Operand(_) => { bug!("place local already assigned to"); } } @@ -1737,7 +1738,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { IndirectOperand(tmp, index) => { let op = bx.load_operand(tmp); tmp.storage_dead(bx); - self.locals[index] = LocalRef::Operand(Some(op)); + self.locals[index] = LocalRef::Operand(op); self.debug_introduce_local(bx, index); } DirectOperand(index) => { @@ -1752,7 +1753,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } else { OperandRef::from_immediate_or_packed_pair(bx, llval, ret_abi.layout) }; - self.locals[index] = LocalRef::Operand(Some(op)); + self.locals[index] = LocalRef::Operand(op); self.debug_introduce_local(bx, index); } } diff --git a/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs b/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs index 6e32c28a42c65..9033c1725b3de 100644 --- a/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs +++ b/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs @@ -312,7 +312,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { LocalRef::Place(place) | LocalRef::UnsizedPlace(place) => { bx.set_var_name(place.llval, name); } - LocalRef::Operand(Some(operand)) => match operand.val { + LocalRef::Operand(operand) => match operand.val { OperandValue::Ref(x, ..) | OperandValue::Immediate(x) => { bx.set_var_name(x, name); } @@ -323,7 +323,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { bx.set_var_name(b, &(name.clone() + ".1")); } }, - LocalRef::Operand(None) => {} + LocalRef::PendingOperand(_) => {} } } @@ -332,9 +332,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } let base = match local_ref { - LocalRef::Operand(None) => return, + LocalRef::PendingOperand(_) => return, - LocalRef::Operand(Some(operand)) => { + LocalRef::Operand(operand) => { // Don't spill operands onto the stack in naked functions. // See: https://github.com/rust-lang/rust/issues/42779 let attrs = bx.tcx().codegen_fn_attrs(self.instance.def_id()); diff --git a/compiler/rustc_codegen_ssa/src/mir/mod.rs b/compiler/rustc_codegen_ssa/src/mir/mod.rs index 5cffca5230a8f..53d0289213ab5 100644 --- a/compiler/rustc_codegen_ssa/src/mir/mod.rs +++ b/compiler/rustc_codegen_ssa/src/mir/mod.rs @@ -123,7 +123,11 @@ enum LocalRef<'tcx, V> { /// Every time it is initialized, we have to reallocate the place /// and update the fat pointer. That's the reason why it is indirect. UnsizedPlace(PlaceRef<'tcx, V>), - Operand(Option>), + /// `Operand(o)`: `o` has the backend [`OperandValue`] already generated. + Operand(OperandRef<'tcx, V>), + /// `PendingOperand(l)`: codegen has not yet run for this local. + /// `l` is the expected layout, to save recomputing it later. + PendingOperand(TyAndLayout<'tcx>), } impl<'a, 'tcx, V: CodegenObject> LocalRef<'tcx, V> { @@ -135,9 +139,9 @@ impl<'a, 'tcx, V: CodegenObject> LocalRef<'tcx, V> { // Zero-size temporaries aren't always initialized, which // doesn't matter because they don't contain data, but // we need something in the operand. - LocalRef::Operand(Some(OperandRef::new_zst(bx, layout))) + LocalRef::Operand(OperandRef::new_zst(bx, layout)) } else { - LocalRef::Operand(None) + LocalRef::PendingOperand(layout) } } } @@ -337,7 +341,7 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( // We don't have to cast or keep the argument in the alloca. // FIXME(eddyb): We should figure out how to use llvm.dbg.value instead // of putting everything in allocas just so we can use llvm.dbg.declare. - let local = |op| LocalRef::Operand(Some(op)); + let local = |op| LocalRef::Operand(op); match arg.mode { PassMode::Ignore => { return local(OperandRef::new_zst(bx, arg.layout)); diff --git a/compiler/rustc_codegen_ssa/src/mir/operand.rs b/compiler/rustc_codegen_ssa/src/mir/operand.rs index 34a5b638d7eba..1a6c9ce85a1a7 100644 --- a/compiler/rustc_codegen_ssa/src/mir/operand.rs +++ b/compiler/rustc_codegen_ssa/src/mir/operand.rs @@ -370,7 +370,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { debug!("maybe_codegen_consume_direct(place_ref={:?})", place_ref); match self.locals[place_ref.local] { - LocalRef::Operand(Some(mut o)) => { + LocalRef::Operand(mut o) => { // Moves out of scalar and scalar pair fields are trivial. for elem in place_ref.projection.iter() { match elem { @@ -395,7 +395,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { Some(o) } - LocalRef::Operand(None) => { + LocalRef::PendingOperand(_) => { bug!("use of {:?} before def", place_ref); } LocalRef::Place(..) | LocalRef::UnsizedPlace(..) => { diff --git a/compiler/rustc_codegen_ssa/src/mir/place.rs b/compiler/rustc_codegen_ssa/src/mir/place.rs index cf02f59f67b97..ad4ae9cc173c7 100644 --- a/compiler/rustc_codegen_ssa/src/mir/place.rs +++ b/compiler/rustc_codegen_ssa/src/mir/place.rs @@ -546,7 +546,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { let mut cg_base = match self.locals[place_ref.local] { LocalRef::Place(place) => place, LocalRef::UnsizedPlace(place) => bx.load_operand(place).deref(cx), - LocalRef::Operand(..) => { + LocalRef::Operand(..) | LocalRef::PendingOperand(..) => { if place_ref.has_deref() { base = 1; let cg_base = self.codegen_consume( diff --git a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs index 72d41d8c32c21..f356f454e125d 100644 --- a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs +++ b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs @@ -10,9 +10,8 @@ use crate::MemFlags; use rustc_middle::mir; use rustc_middle::mir::Operand; use rustc_middle::ty::cast::{CastTy, IntTy}; -use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf}; +use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf, TyAndLayout}; use rustc_middle::ty::{self, adjustment::PointerCast, Instance, Ty, TyCtxt}; -use rustc_span::source_map::{Span, DUMMY_SP}; use rustc_target::abi::{self, VariantIdx}; impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { @@ -37,7 +36,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { if bx.cx().is_backend_scalar_pair(dest.layout) { // Into-coerce of a thin pointer to a fat pointer -- just // use the operand path. - let temp = self.codegen_rvalue_operand(bx, rvalue); + let temp = self.codegen_rvalue_operand(bx, rvalue, dest.layout); temp.val.store(bx, dest); return; } @@ -141,8 +140,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } _ => { - assert!(self.rvalue_creates_operand(rvalue, DUMMY_SP)); - let temp = self.codegen_rvalue_operand(bx, rvalue); + let temp = self.codegen_rvalue_operand(bx, rvalue, dest.layout); temp.val.store(bx, dest); } } @@ -219,18 +217,29 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { &mut self, bx: &mut Bx, rvalue: &mir::Rvalue<'tcx>, + output_ty: TyAndLayout<'tcx>, ) -> OperandRef<'tcx, Bx::Value> { assert!( - self.rvalue_creates_operand(rvalue, DUMMY_SP), + self.rvalue_creates_operand(rvalue, output_ty), "cannot codegen {:?} to operand", rvalue, ); + // Our callers reliably know what the `TyAndLayout` of the eventual + // `OperandRef` will be, thanks to the local or place reference. + // But very strange things would happen in codegen were it ever wrong + // somehow, so double-check them in debug-assertions CI just in case. + debug_assert_eq!( + output_ty, + bx.cx().layout_of(self.monomorphize(rvalue.ty(self.mir, bx.tcx()))), + ); + match *rvalue { mir::Rvalue::Cast(ref kind, ref source, mir_cast_ty) => { let operand = self.codegen_operand(bx, source); debug!("cast operand is {:?}", operand); - let cast = bx.cx().layout_of(self.monomorphize(mir_cast_ty)); + let cast = output_ty; + debug_assert_eq!(cast, bx.cx().layout_of(self.monomorphize(mir_cast_ty))); let val = match *kind { mir::CastKind::PointerExposeAddress => { @@ -422,10 +431,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { mir::Rvalue::Len(place) => { let size = self.evaluate_array_len(bx, place); - OperandRef { - val: OperandValue::Immediate(size), - layout: bx.cx().layout_of(bx.tcx().types.usize), - } + OperandRef { val: OperandValue::Immediate(size), layout: output_ty } } mir::Rvalue::BinaryOp(op, box (ref lhs, ref rhs)) => { @@ -451,10 +457,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { _ => bug!(), }; - OperandRef { - val: OperandValue::Immediate(llresult), - layout: bx.cx().layout_of(op.ty(bx.tcx(), lhs.layout.ty, rhs.layout.ty)), - } + + OperandRef { val: OperandValue::Immediate(llresult), layout: output_ty } } mir::Rvalue::CheckedBinaryOp(op, box (ref lhs, ref rhs)) => { let lhs = self.codegen_operand(bx, lhs); @@ -466,13 +470,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { rhs.immediate(), lhs.layout.ty, ); - let val_ty = op.ty(bx.tcx(), lhs.layout.ty, rhs.layout.ty); - let operand_ty = bx.tcx().mk_tup(&[val_ty, bx.tcx().types.bool]); - OperandRef { val: result, layout: bx.cx().layout_of(operand_ty) } + OperandRef { val: result, layout: output_ty } } mir::Rvalue::UnaryOp(op, ref operand) => { let operand = self.codegen_operand(bx, operand); + debug_assert_eq!(operand.layout, output_ty); let lloperand = operand.immediate(); let is_float = operand.layout.ty.is_floating_point(); let llval = match op { @@ -485,17 +488,13 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } } }; - OperandRef { val: OperandValue::Immediate(llval), layout: operand.layout } + OperandRef { val: OperandValue::Immediate(llval), layout: output_ty } } mir::Rvalue::Discriminant(ref place) => { - let discr_ty = rvalue.ty(self.mir, bx.tcx()); - let discr_ty = self.monomorphize(discr_ty); - let discr = self.codegen_place(bx, place.as_ref()).codegen_get_discr(bx, discr_ty); - OperandRef { - val: OperandValue::Immediate(discr), - layout: self.cx.layout_of(discr_ty), - } + let place_ref = self.codegen_place(bx, place.as_ref()); + let discr = place_ref.codegen_get_discr(bx, output_ty.ty); + OperandRef { val: OperandValue::Immediate(discr), layout: output_ty } } mir::Rvalue::NullaryOp(null_op, ty) => { @@ -507,36 +506,32 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { mir::NullOp::AlignOf => layout.align.abi.bytes(), }; let val = bx.cx().const_usize(val); - let tcx = self.cx.tcx(); - OperandRef { - val: OperandValue::Immediate(val), - layout: self.cx.layout_of(tcx.types.usize), - } + OperandRef { val: OperandValue::Immediate(val), layout: output_ty } } mir::Rvalue::ThreadLocalRef(def_id) => { assert!(bx.cx().tcx().is_static(def_id)); let static_ = bx.get_static(def_id); - let layout = bx.layout_of(bx.cx().tcx().static_ptr_ty(def_id)); - OperandRef { val: OperandValue::Immediate(static_), layout } + OperandRef { val: OperandValue::Immediate(static_), layout: output_ty } } mir::Rvalue::Use(ref operand) => self.codegen_operand(bx, operand), mir::Rvalue::Repeat(..) | mir::Rvalue::Aggregate(..) => { // According to `rvalue_creates_operand`, only ZST // aggregate rvalues are allowed to be operands. - let ty = rvalue.ty(self.mir, self.cx.tcx()); - OperandRef::new_zst(bx, self.cx.layout_of(self.monomorphize(ty))) + OperandRef::new_zst(bx, output_ty) } mir::Rvalue::ShallowInitBox(ref operand, content_ty) => { let operand = self.codegen_operand(bx, operand); let lloperand = operand.immediate(); - let content_ty = self.monomorphize(content_ty); - let box_layout = bx.cx().layout_of(bx.tcx().mk_box(content_ty)); - let llty_ptr = bx.cx().backend_type(box_layout); + debug_assert_eq!(output_ty, { + let content_ty = self.monomorphize(content_ty); + bx.cx().layout_of(bx.tcx().mk_box(content_ty)) + }); + let llty_ptr = bx.cx().backend_type(output_ty); let val = bx.pointercast(lloperand, llty_ptr); - OperandRef { val: OperandValue::Immediate(val), layout: box_layout } + OperandRef { val: OperandValue::Immediate(val), layout: output_ty } } } } @@ -545,7 +540,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { // ZST are passed as operands and require special handling // because codegen_place() panics if Local is operand. if let Some(index) = place.as_local() { - if let LocalRef::Operand(Some(op)) = self.locals[index] { + if let LocalRef::Operand(op) = self.locals[index] { if let ty::Array(_, n) = op.layout.ty.kind() { let n = n.eval_target_usize(bx.cx().tcx(), ty::ParamEnv::reveal_all()); return bx.cx().const_usize(n); @@ -725,7 +720,11 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { - pub fn rvalue_creates_operand(&self, rvalue: &mir::Rvalue<'tcx>, span: Span) -> bool { + pub fn rvalue_creates_operand( + &self, + rvalue: &mir::Rvalue<'tcx>, + output_ty: TyAndLayout<'tcx>, + ) -> bool { match *rvalue { mir::Rvalue::Cast(mir::CastKind::Transmute, ..) => // FIXME: Now that transmute is an Rvalue, it would be nice if @@ -747,9 +746,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { true, mir::Rvalue::Repeat(..) | mir::Rvalue::Aggregate(..) => { - let ty = rvalue.ty(self.mir, self.cx.tcx()); - let ty = self.monomorphize(ty); - self.cx.spanned_layout_of(ty, span).is_zst() + output_ty.is_zst() } } diff --git a/compiler/rustc_codegen_ssa/src/mir/statement.rs b/compiler/rustc_codegen_ssa/src/mir/statement.rs index 41f585f7fccc1..6b93f52ae3c36 100644 --- a/compiler/rustc_codegen_ssa/src/mir/statement.rs +++ b/compiler/rustc_codegen_ssa/src/mir/statement.rs @@ -18,12 +18,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { LocalRef::UnsizedPlace(cg_indirect_dest) => { self.codegen_rvalue_unsized(bx, cg_indirect_dest, rvalue) } - LocalRef::Operand(None) => { - let operand = self.codegen_rvalue_operand(bx, rvalue); - self.locals[index] = LocalRef::Operand(Some(operand)); + LocalRef::PendingOperand(op_layout) => { + let operand = self.codegen_rvalue_operand(bx, rvalue, op_layout); + self.locals[index] = LocalRef::Operand(operand); self.debug_introduce_local(bx, index); } - LocalRef::Operand(Some(op)) => { + LocalRef::Operand(op) => { if !op.layout.is_zst() { span_bug!( statement.source_info.span, @@ -34,7 +34,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { // If the type is zero-sized, it's already been set here, // but we still need to make sure we codegen the operand - self.codegen_rvalue_operand(bx, rvalue); + self.codegen_rvalue_operand(bx, rvalue, op.layout); } } } else {