From 90e363a59a227647875980032db486728df08ffc Mon Sep 17 00:00:00 2001 From: Matthew Maurer Date: Fri, 15 Mar 2024 19:45:46 +0000 Subject: [PATCH] CFI: Use Instance at callsites We already use `Instance` at declaration sites when available to glean additional information about possible abstractions of the type in use. This does the same when possible at callsites as well. The primary purpose of this change is to allow CFI to alter how it generates type information for indirect calls through `Virtual` instances. --- compiler/rustc_codegen_gcc/src/asm.rs | 2 +- compiler/rustc_codegen_gcc/src/builder.rs | 6 ++- .../rustc_codegen_gcc/src/intrinsic/mod.rs | 11 ++--- compiler/rustc_codegen_llvm/src/asm.rs | 6 +-- compiler/rustc_codegen_llvm/src/builder.rs | 41 +++++++++++++------ compiler/rustc_codegen_llvm/src/intrinsic.rs | 39 ++++++++++-------- compiler/rustc_codegen_ssa/src/base.rs | 19 ++++++--- compiler/rustc_codegen_ssa/src/mir/block.rs | 19 ++++++--- compiler/rustc_codegen_ssa/src/mir/rvalue.rs | 2 +- compiler/rustc_codegen_ssa/src/size_of_val.rs | 10 ++++- .../rustc_codegen_ssa/src/traits/builder.rs | 4 +- 11 files changed, 105 insertions(+), 54 deletions(-) diff --git a/compiler/rustc_codegen_gcc/src/asm.rs b/compiler/rustc_codegen_gcc/src/asm.rs index 9b679019e96ce..06b14a1f118a3 100644 --- a/compiler/rustc_codegen_gcc/src/asm.rs +++ b/compiler/rustc_codegen_gcc/src/asm.rs @@ -541,7 +541,7 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { let builtin_unreachable: RValue<'gcc> = unsafe { std::mem::transmute(builtin_unreachable) }; - self.call(self.type_void(), None, None, builtin_unreachable, &[], None); + self.call(self.type_void(), None, None, builtin_unreachable, &[], None, None); } // Write results to outputs. diff --git a/compiler/rustc_codegen_gcc/src/builder.rs b/compiler/rustc_codegen_gcc/src/builder.rs index f5cda81f6ab86..43cc46cfe682f 100644 --- a/compiler/rustc_codegen_gcc/src/builder.rs +++ b/compiler/rustc_codegen_gcc/src/builder.rs @@ -25,7 +25,7 @@ use rustc_middle::ty::layout::{ FnAbiError, FnAbiOfHelpers, FnAbiRequest, HasParamEnv, HasTyCtxt, LayoutError, LayoutOfHelpers, TyAndLayout, }; -use rustc_middle::ty::{ParamEnv, Ty, TyCtxt}; +use rustc_middle::ty::{ParamEnv, Ty, TyCtxt, Instance}; use rustc_span::def_id::DefId; use rustc_span::Span; use rustc_target::abi::{ @@ -592,12 +592,13 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> { then: Block<'gcc>, catch: Block<'gcc>, _funclet: Option<&Funclet>, + instance: Option>, ) -> RValue<'gcc> { let try_block = self.current_func().new_block("try"); let current_block = self.block.clone(); self.block = try_block; - let call = self.call(typ, fn_attrs, None, func, args, None); // TODO(antoyo): use funclet here? + let call = self.call(typ, fn_attrs, None, func, args, None, instance); // TODO(antoyo): use funclet here? self.block = current_block; let return_value = @@ -1667,6 +1668,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> { func: RValue<'gcc>, args: &[RValue<'gcc>], funclet: Option<&Funclet>, + _instance: Option>, ) -> RValue<'gcc> { // FIXME(antoyo): remove when having a proper API. let gcc_func = unsafe { std::mem::transmute(func) }; diff --git a/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs b/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs index a6c8b72e851b2..cebd45c09aa39 100644 --- a/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs +++ b/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs @@ -133,6 +133,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { func, &args.iter().map(|arg| arg.immediate()).collect::>(), None, + None, ) } sym::likely => self.expect(args[0].immediate(), true), @@ -401,7 +402,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { fn abort(&mut self) { let func = self.context.get_builtin_function("abort"); let func: RValue<'gcc> = unsafe { std::mem::transmute(func) }; - self.call(self.type_void(), None, None, func, &[], None); + self.call(self.type_void(), None, None, func, &[], None, None); } fn assume(&mut self, value: Self::Value) { @@ -1103,7 +1104,7 @@ fn try_intrinsic<'a, 'b, 'gcc, 'tcx>( dest: RValue<'gcc>, ) { if bx.sess().panic_strategy() == PanicStrategy::Abort { - bx.call(bx.type_void(), None, None, try_func, &[data], None); + bx.call(bx.type_void(), None, None, try_func, &[data], None, None); // Return 0 unconditionally from the intrinsic call; // we can never unwind. let ret_align = bx.tcx.data_layout.i32_align.abi; @@ -1177,21 +1178,21 @@ fn codegen_gnu_try<'gcc>( let zero = bx.cx.context.new_rvalue_zero(bx.int_type); let ptr = bx.cx.context.new_call(None, eh_pointer_builtin, &[zero]); let catch_ty = bx.type_func(&[bx.type_i8p(), bx.type_i8p()], bx.type_void()); - bx.call(catch_ty, None, None, catch_func, &[data, ptr], None); + bx.call(catch_ty, None, None, catch_func, &[data, ptr], None, None); bx.ret(bx.const_i32(1)); // NOTE: the blocks must be filled before adding the try/catch, otherwise gcc will not // generate a try/catch. // FIXME(antoyo): add a check in the libgccjit API to prevent this. bx.switch_to_block(current_block); - bx.invoke(try_func_ty, None, None, try_func, &[data], then, catch, None); + bx.invoke(try_func_ty, None, None, try_func, &[data], then, catch, None, None); }); let func = unsafe { std::mem::transmute(func) }; // Note that no invoke is used here because by definition this function // can't panic (that's what it's catching). - let ret = bx.call(llty, None, None, func, &[try_func, data, catch_func], None); + let ret = bx.call(llty, None, None, func, &[try_func, data, catch_func], None, None); let i32_align = bx.tcx().data_layout.i32_align.abi; bx.store(ret, dest, i32_align); } diff --git a/compiler/rustc_codegen_llvm/src/asm.rs b/compiler/rustc_codegen_llvm/src/asm.rs index 74539d4d49570..500904ce18805 100644 --- a/compiler/rustc_codegen_llvm/src/asm.rs +++ b/compiler/rustc_codegen_llvm/src/asm.rs @@ -466,11 +466,11 @@ pub(crate) fn inline_asm_call<'ll>( let call = if !labels.is_empty() { assert!(catch_funclet.is_none()); - bx.callbr(fty, None, None, v, inputs, dest.unwrap(), labels, None) + bx.callbr(fty, None, None, v, inputs, dest.unwrap(), labels, None, None) } else if let Some((catch, funclet)) = catch_funclet { - bx.invoke(fty, None, None, v, inputs, dest.unwrap(), catch, funclet) + bx.invoke(fty, None, None, v, inputs, dest.unwrap(), catch, funclet, None) } else { - bx.call(fty, None, None, v, inputs, None) + bx.call(fty, None, None, v, inputs, None, None) }; // Store mark in a metadata node so we can map LLVM errors diff --git a/compiler/rustc_codegen_llvm/src/builder.rs b/compiler/rustc_codegen_llvm/src/builder.rs index 63e59ea13fc35..a5a5ae73d77a1 100644 --- a/compiler/rustc_codegen_llvm/src/builder.rs +++ b/compiler/rustc_codegen_llvm/src/builder.rs @@ -19,9 +19,12 @@ use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs; use rustc_middle::ty::layout::{ FnAbiError, FnAbiOfHelpers, FnAbiRequest, LayoutError, LayoutOfHelpers, TyAndLayout, }; -use rustc_middle::ty::{self, Ty, TyCtxt}; +use rustc_middle::ty::{self, Instance, Ty, TyCtxt}; use rustc_span::Span; -use rustc_symbol_mangling::typeid::{kcfi_typeid_for_fnabi, typeid_for_fnabi, TypeIdOptions}; +use rustc_symbol_mangling::typeid::{ + kcfi_typeid_for_fnabi, kcfi_typeid_for_instance, typeid_for_fnabi, typeid_for_instance, + TypeIdOptions, +}; use rustc_target::abi::{self, call::FnAbi, Align, Size, WrappingRange}; use rustc_target::spec::{HasTargetSpec, SanitizerSet, Target}; use smallvec::SmallVec; @@ -221,6 +224,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { then: &'ll BasicBlock, catch: &'ll BasicBlock, funclet: Option<&Funclet<'ll>>, + instance: Option>, ) -> &'ll Value { debug!("invoke {:?} with args ({:?})", llfn, args); @@ -233,10 +237,10 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { } // Emit CFI pointer type membership test - self.cfi_type_test(fn_attrs, fn_abi, llfn); + self.cfi_type_test(fn_attrs, fn_abi, instance, llfn); // Emit KCFI operand bundle - let kcfi_bundle = self.kcfi_operand_bundle(fn_attrs, fn_abi, llfn); + let kcfi_bundle = self.kcfi_operand_bundle(fn_attrs, fn_abi, instance, llfn); let kcfi_bundle = kcfi_bundle.as_ref().map(|b| &*b.raw); if let Some(kcfi_bundle) = kcfi_bundle { bundles.push(kcfi_bundle); @@ -1231,6 +1235,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { llfn: &'ll Value, args: &[&'ll Value], funclet: Option<&Funclet<'ll>>, + instance: Option>, ) -> &'ll Value { debug!("call {:?} with args ({:?})", llfn, args); @@ -1243,10 +1248,10 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { } // Emit CFI pointer type membership test - self.cfi_type_test(fn_attrs, fn_abi, llfn); + self.cfi_type_test(fn_attrs, fn_abi, instance, llfn); // Emit KCFI operand bundle - let kcfi_bundle = self.kcfi_operand_bundle(fn_attrs, fn_abi, llfn); + let kcfi_bundle = self.kcfi_operand_bundle(fn_attrs, fn_abi, instance, llfn); let kcfi_bundle = kcfi_bundle.as_ref().map(|b| &*b.raw); if let Some(kcfi_bundle) = kcfi_bundle { bundles.push(kcfi_bundle); @@ -1468,7 +1473,7 @@ impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> { pub(crate) fn call_intrinsic(&mut self, intrinsic: &str, args: &[&'ll Value]) -> &'ll Value { let (ty, f) = self.cx.get_intrinsic(intrinsic); - self.call(ty, None, None, f, args, None) + self.call(ty, None, None, f, args, None, None) } fn call_lifetime_intrinsic(&mut self, intrinsic: &str, ptr: &'ll Value, size: Size) { @@ -1526,7 +1531,7 @@ impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> { format!("llvm.{instr}.sat.i{int_width}.f{float_width}") }; let f = self.declare_cfn(&name, llvm::UnnamedAddr::No, self.type_func(&[src_ty], dest_ty)); - self.call(self.type_func(&[src_ty], dest_ty), None, None, f, &[val], None) + self.call(self.type_func(&[src_ty], dest_ty), None, None, f, &[val], None, None) } pub(crate) fn landing_pad( @@ -1554,6 +1559,7 @@ impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> { default_dest: &'ll BasicBlock, indirect_dest: &[&'ll BasicBlock], funclet: Option<&Funclet<'ll>>, + instance: Option>, ) -> &'ll Value { debug!("invoke {:?} with args ({:?})", llfn, args); @@ -1566,10 +1572,10 @@ impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> { } // Emit CFI pointer type membership test - self.cfi_type_test(fn_attrs, fn_abi, llfn); + self.cfi_type_test(fn_attrs, fn_abi, instance, llfn); // Emit KCFI operand bundle - let kcfi_bundle = self.kcfi_operand_bundle(fn_attrs, fn_abi, llfn); + let kcfi_bundle = self.kcfi_operand_bundle(fn_attrs, fn_abi, instance, llfn); let kcfi_bundle = kcfi_bundle.as_ref().map(|b| &*b.raw); if let Some(kcfi_bundle) = kcfi_bundle { bundles.push(kcfi_bundle); @@ -1601,6 +1607,7 @@ impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> { &mut self, fn_attrs: Option<&CodegenFnAttrs>, fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>, + instance: Option>, llfn: &'ll Value, ) { let is_indirect_call = unsafe { llvm::LLVMRustIsNonGVFunctionPointerTy(llfn) }; @@ -1622,7 +1629,11 @@ impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> { options.insert(TypeIdOptions::NORMALIZE_INTEGERS); } - let typeid = typeid_for_fnabi(self.tcx, fn_abi, options); + let typeid = if let Some(instance) = instance { + typeid_for_instance(self.tcx, &instance, options) + } else { + typeid_for_fnabi(self.tcx, fn_abi, options) + }; let typeid_metadata = self.cx.typeid_metadata(typeid).unwrap(); // Test whether the function pointer is associated with the type identifier. @@ -1644,6 +1655,7 @@ impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> { &mut self, fn_attrs: Option<&CodegenFnAttrs>, fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>, + instance: Option>, llfn: &'ll Value, ) -> Option> { let is_indirect_call = unsafe { llvm::LLVMRustIsNonGVFunctionPointerTy(llfn) }; @@ -1665,7 +1677,12 @@ impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> { options.insert(TypeIdOptions::NORMALIZE_INTEGERS); } - let kcfi_typeid = kcfi_typeid_for_fnabi(self.tcx, fn_abi, options); + let kcfi_typeid = if let Some(instance) = instance { + kcfi_typeid_for_instance(self.tcx, &instance, options) + } else { + kcfi_typeid_for_fnabi(self.tcx, fn_abi, options) + }; + Some(llvm::OperandBundleDef::new("kcfi", &[self.const_u32(kcfi_typeid)])) } else { None diff --git a/compiler/rustc_codegen_llvm/src/intrinsic.rs b/compiler/rustc_codegen_llvm/src/intrinsic.rs index 71b69a94e99e7..78ed31f9d9aca 100644 --- a/compiler/rustc_codegen_llvm/src/intrinsic.rs +++ b/compiler/rustc_codegen_llvm/src/intrinsic.rs @@ -181,6 +181,7 @@ impl<'ll, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'_, 'll, 'tcx> { simple_fn, &args.iter().map(|arg| arg.immediate()).collect::>(), None, + Some(instance), ) } sym::likely => { @@ -539,7 +540,7 @@ fn catch_unwind_intrinsic<'ll>( ) { if bx.sess().panic_strategy() == PanicStrategy::Abort { let try_func_ty = bx.type_func(&[bx.type_ptr()], bx.type_void()); - bx.call(try_func_ty, None, None, try_func, &[data], None); + bx.call(try_func_ty, None, None, try_func, &[data], None, None); // Return 0 unconditionally from the intrinsic call; // we can never unwind. let ret_align = bx.tcx().data_layout.i32_align.abi; @@ -640,7 +641,7 @@ fn codegen_msvc_try<'ll>( let ptr_align = bx.tcx().data_layout.pointer_align.abi; let slot = bx.alloca(bx.type_ptr(), ptr_align); let try_func_ty = bx.type_func(&[bx.type_ptr()], bx.type_void()); - bx.invoke(try_func_ty, None, None, try_func, &[data], normal, catchswitch, None); + bx.invoke(try_func_ty, None, None, try_func, &[data], normal, catchswitch, None, None); bx.switch_to_block(normal); bx.ret(bx.const_i32(0)); @@ -684,7 +685,7 @@ fn codegen_msvc_try<'ll>( let funclet = bx.catch_pad(cs, &[tydesc, flags, slot]); let ptr = bx.load(bx.type_ptr(), slot, ptr_align); let catch_ty = bx.type_func(&[bx.type_ptr(), bx.type_ptr()], bx.type_void()); - bx.call(catch_ty, None, None, catch_func, &[data, ptr], Some(&funclet)); + bx.call(catch_ty, None, None, catch_func, &[data, ptr], Some(&funclet), None); bx.catch_ret(&funclet, caught); // The flag value of 64 indicates a "catch-all". @@ -692,7 +693,7 @@ fn codegen_msvc_try<'ll>( let flags = bx.const_i32(64); let null = bx.const_null(bx.type_ptr()); let funclet = bx.catch_pad(cs, &[null, flags, null]); - bx.call(catch_ty, None, None, catch_func, &[data, null], Some(&funclet)); + bx.call(catch_ty, None, None, catch_func, &[data, null], Some(&funclet), None); bx.catch_ret(&funclet, caught); bx.switch_to_block(caught); @@ -701,7 +702,7 @@ fn codegen_msvc_try<'ll>( // Note that no invoke is used here because by definition this function // can't panic (that's what it's catching). - let ret = bx.call(llty, None, None, llfn, &[try_func, data, catch_func], None); + let ret = bx.call(llty, None, None, llfn, &[try_func, data, catch_func], None, None); let i32_align = bx.tcx().data_layout.i32_align.abi; bx.store(ret, dest, i32_align); } @@ -750,7 +751,7 @@ fn codegen_wasm_try<'ll>( // } // let try_func_ty = bx.type_func(&[bx.type_ptr()], bx.type_void()); - bx.invoke(try_func_ty, None, None, try_func, &[data], normal, catchswitch, None); + bx.invoke(try_func_ty, None, None, try_func, &[data], normal, catchswitch, None, None); bx.switch_to_block(normal); bx.ret(bx.const_i32(0)); @@ -766,7 +767,7 @@ fn codegen_wasm_try<'ll>( let _sel = bx.call_intrinsic("llvm.wasm.get.ehselector", &[funclet.cleanuppad()]); let catch_ty = bx.type_func(&[bx.type_ptr(), bx.type_ptr()], bx.type_void()); - bx.call(catch_ty, None, None, catch_func, &[data, ptr], Some(&funclet)); + bx.call(catch_ty, None, None, catch_func, &[data, ptr], Some(&funclet), None); bx.catch_ret(&funclet, caught); bx.switch_to_block(caught); @@ -775,7 +776,7 @@ fn codegen_wasm_try<'ll>( // Note that no invoke is used here because by definition this function // can't panic (that's what it's catching). - let ret = bx.call(llty, None, None, llfn, &[try_func, data, catch_func], None); + let ret = bx.call(llty, None, None, llfn, &[try_func, data, catch_func], None, None); let i32_align = bx.tcx().data_layout.i32_align.abi; bx.store(ret, dest, i32_align); } @@ -818,7 +819,7 @@ fn codegen_gnu_try<'ll>( let data = llvm::get_param(bx.llfn(), 1); let catch_func = llvm::get_param(bx.llfn(), 2); let try_func_ty = bx.type_func(&[bx.type_ptr()], bx.type_void()); - bx.invoke(try_func_ty, None, None, try_func, &[data], then, catch, None); + bx.invoke(try_func_ty, None, None, try_func, &[data], then, catch, None, None); bx.switch_to_block(then); bx.ret(bx.const_i32(0)); @@ -836,13 +837,13 @@ fn codegen_gnu_try<'ll>( bx.add_clause(vals, tydesc); let ptr = bx.extract_value(vals, 0); let catch_ty = bx.type_func(&[bx.type_ptr(), bx.type_ptr()], bx.type_void()); - bx.call(catch_ty, None, None, catch_func, &[data, ptr], None); + bx.call(catch_ty, None, None, catch_func, &[data, ptr], None, None); bx.ret(bx.const_i32(1)); }); // Note that no invoke is used here because by definition this function // can't panic (that's what it's catching). - let ret = bx.call(llty, None, None, llfn, &[try_func, data, catch_func], None); + let ret = bx.call(llty, None, None, llfn, &[try_func, data, catch_func], None, None); let i32_align = bx.tcx().data_layout.i32_align.abi; bx.store(ret, dest, i32_align); } @@ -882,7 +883,7 @@ fn codegen_emcc_try<'ll>( let data = llvm::get_param(bx.llfn(), 1); let catch_func = llvm::get_param(bx.llfn(), 2); let try_func_ty = bx.type_func(&[bx.type_ptr()], bx.type_void()); - bx.invoke(try_func_ty, None, None, try_func, &[data], then, catch, None); + bx.invoke(try_func_ty, None, None, try_func, &[data], then, catch, None, None); bx.switch_to_block(then); bx.ret(bx.const_i32(0)); @@ -920,13 +921,13 @@ fn codegen_emcc_try<'ll>( bx.store(is_rust_panic, catch_data_1, i8_align); let catch_ty = bx.type_func(&[bx.type_ptr(), bx.type_ptr()], bx.type_void()); - bx.call(catch_ty, None, None, catch_func, &[data, catch_data], None); + bx.call(catch_ty, None, None, catch_func, &[data, catch_data], None, None); bx.ret(bx.const_i32(1)); }); // Note that no invoke is used here because by definition this function // can't panic (that's what it's catching). - let ret = bx.call(llty, None, None, llfn, &[try_func, data, catch_func], None); + let ret = bx.call(llty, None, None, llfn, &[try_func, data, catch_func], None, None); let i32_align = bx.tcx().data_layout.i32_align.abi; bx.store(ret, dest, i32_align); } @@ -1439,6 +1440,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>( f, &args.iter().map(|arg| arg.immediate()).collect::>(), None, + None, ); Ok(c) } @@ -1607,6 +1609,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>( f, &[args[1].immediate(), alignment, mask, args[0].immediate()], None, + None, ); return Ok(v); } @@ -1706,6 +1709,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>( f, &[args[1].immediate(), alignment, mask, args[2].immediate()], None, + None, ); return Ok(v); } @@ -1799,6 +1803,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>( f, &[args[2].immediate(), args[1].immediate(), alignment, mask], None, + None, ); return Ok(v); } @@ -1904,6 +1909,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>( f, &[args[0].immediate(), args[1].immediate(), alignment, mask], None, + None, ); return Ok(v); } @@ -2352,11 +2358,12 @@ fn generic_simd_intrinsic<'ll, 'tcx>( f, &[args[0].immediate(), bx.const_int(bx.type_i1(), 0)], None, + None, )) } else { let fn_ty = bx.type_func(&[vec_ty], vec_ty); let f = bx.declare_cfn(llvm_intrinsic, llvm::UnnamedAddr::No, fn_ty); - Ok(bx.call(fn_ty, None, None, f, &[args[0].immediate()], None)) + Ok(bx.call(fn_ty, None, None, f, &[args[0].immediate()], None, None)) }; } @@ -2409,7 +2416,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>( let fn_ty = bx.type_func(&[vec_ty, vec_ty], vec_ty); let f = bx.declare_cfn(llvm_intrinsic, llvm::UnnamedAddr::No, fn_ty); - let v = bx.call(fn_ty, None, None, f, &[lhs, rhs], None); + let v = bx.call(fn_ty, None, None, f, &[lhs, rhs], None, None); return Ok(v); } diff --git a/compiler/rustc_codegen_ssa/src/base.rs b/compiler/rustc_codegen_ssa/src/base.rs index c316d19e04119..51006839a9b69 100644 --- a/compiler/rustc_codegen_ssa/src/base.rs +++ b/compiler/rustc_codegen_ssa/src/base.rs @@ -462,27 +462,34 @@ pub fn maybe_create_entry_wrapper<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( let ptr_ty = cx.type_ptr(); let (arg_argc, arg_argv) = get_argc_argv(cx, &mut bx); - let (start_fn, start_ty, args) = if let EntryFnType::Main { sigpipe } = entry_type { + let (start_fn, start_ty, args, instance) = if let EntryFnType::Main { sigpipe } = entry_type + { let start_def_id = cx.tcx().require_lang_item(LangItem::Start, None); - let start_fn = cx.get_fn_addr(ty::Instance::expect_resolve( + let start_instance = ty::Instance::expect_resolve( cx.tcx(), ty::ParamEnv::reveal_all(), start_def_id, cx.tcx().mk_args(&[main_ret_ty.into()]), - )); + ); + let start_fn = cx.get_fn_addr(start_instance); let i8_ty = cx.type_i8(); let arg_sigpipe = bx.const_u8(sigpipe); let start_ty = cx.type_func(&[cx.val_ty(rust_main), isize_ty, ptr_ty, i8_ty], isize_ty); - (start_fn, start_ty, vec![rust_main, arg_argc, arg_argv, arg_sigpipe]) + ( + start_fn, + start_ty, + vec![rust_main, arg_argc, arg_argv, arg_sigpipe], + Some(start_instance), + ) } else { debug!("using user-defined start fn"); let start_ty = cx.type_func(&[isize_ty, ptr_ty], isize_ty); - (rust_main, start_ty, vec![arg_argc, arg_argv]) + (rust_main, start_ty, vec![arg_argc, arg_argv], None) }; - let result = bx.call(start_ty, None, None, start_fn, &args, None); + let result = bx.call(start_ty, None, None, start_fn, &args, None, instance); if cx.sess().target.os.contains("uefi") { bx.ret(result); } else { diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index dcc27a4f0e568..8c668597a43b3 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -232,6 +232,7 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> { ret_llbb, unwind_block, self.funclet(fx), + instance, ); if fx.mir[self.bb].is_cleanup { bx.apply_attrs_to_cleanup_callsite(invokeret); @@ -247,7 +248,8 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> { } MergingSucc::False } else { - let llret = bx.call(fn_ty, fn_attrs, Some(fn_abi), fn_ptr, llargs, self.funclet(fx)); + let llret = + bx.call(fn_ty, fn_attrs, Some(fn_abi), fn_ptr, llargs, self.funclet(fx), instance); if fx.mir[self.bb].is_cleanup { bx.apply_attrs_to_cleanup_callsite(llret); } @@ -502,7 +504,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { let ty = location.ty(self.mir, bx.tcx()).ty; let ty = self.monomorphize(ty); let drop_fn = Instance::resolve_drop_in_place(bx.tcx(), ty); - let instance = drop_fn.clone(); if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def { // we don't actually need to drop anything. @@ -518,7 +519,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { args1 = [place.llval]; &args1[..] }; - let (drop_fn, fn_abi) = + let (drop_fn, fn_abi, drop_instance) = match ty.kind() { // FIXME(eddyb) perhaps move some of this logic into // `Instance::resolve_drop_in_place`? @@ -550,6 +551,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { meth::VirtualIndex::from_index(ty::COMMON_VTABLE_ENTRIES_DROPINPLACE) .get_fn(bx, vtable, ty, fn_abi), fn_abi, + virtual_drop, ) } ty::Dynamic(_, _, ty::DynStar) => { @@ -592,9 +594,14 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { meth::VirtualIndex::from_index(ty::COMMON_VTABLE_ENTRIES_DROPINPLACE) .get_fn(bx, meta.immediate(), ty, fn_abi), fn_abi, + virtual_drop, ) } - _ => (bx.get_fn_addr(drop_fn), bx.fn_abi_of_instance(drop_fn, ty::List::empty())), + _ => ( + bx.get_fn_addr(drop_fn), + bx.fn_abi_of_instance(drop_fn, ty::List::empty()), + drop_fn, + ), }; helper.do_call( self, @@ -605,7 +612,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { Some((ReturnDest::Nothing, target)), unwind, &[], - Some(instance), + Some(drop_instance), mergeable_succ, ) } @@ -1699,7 +1706,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } else { let fn_ty = bx.fn_decl_backend_type(fn_abi); - let llret = bx.call(fn_ty, None, Some(fn_abi), fn_ptr, &[], funclet.as_ref()); + let llret = bx.call(fn_ty, None, Some(fn_abi), fn_ptr, &[], funclet.as_ref(), None); bx.apply_attrs_to_cleanup_callsite(llret); } diff --git a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs index 8159f76b421b3..96732b711d0ea 100644 --- a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs +++ b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs @@ -715,7 +715,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } else { None }; - bx.call(fn_ty, fn_attrs, Some(fn_abi), fn_ptr, &[], None) + bx.call(fn_ty, fn_attrs, Some(fn_abi), fn_ptr, &[], None, Some(instance)) } else { bx.get_static(def_id) }; diff --git a/compiler/rustc_codegen_ssa/src/size_of_val.rs b/compiler/rustc_codegen_ssa/src/size_of_val.rs index e2e95cede60a5..c250cc2682323 100644 --- a/compiler/rustc_codegen_ssa/src/size_of_val.rs +++ b/compiler/rustc_codegen_ssa/src/size_of_val.rs @@ -70,7 +70,15 @@ pub fn size_and_align_of_dst<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( // (But we are in good company, this code is duplicated plenty of times.) let fn_ty = bx.fn_decl_backend_type(fn_abi); - bx.call(fn_ty, /* fn_attrs */ None, Some(fn_abi), llfn, &[msg.0, msg.1], None); + bx.call( + fn_ty, + /* fn_attrs */ None, + Some(fn_abi), + llfn, + &[msg.0, msg.1], + None, + None, + ); // This function does not return so we can now return whatever we want. let size = bx.const_usize(layout.size.bytes()); diff --git a/compiler/rustc_codegen_ssa/src/traits/builder.rs b/compiler/rustc_codegen_ssa/src/traits/builder.rs index 36f37e3791bc5..70eecf4f189f6 100644 --- a/compiler/rustc_codegen_ssa/src/traits/builder.rs +++ b/compiler/rustc_codegen_ssa/src/traits/builder.rs @@ -16,7 +16,7 @@ use crate::MemFlags; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs; use rustc_middle::ty::layout::{HasParamEnv, TyAndLayout}; -use rustc_middle::ty::Ty; +use rustc_middle::ty::{Instance, Ty}; use rustc_span::Span; use rustc_target::abi::call::FnAbi; use rustc_target::abi::{Abi, Align, Scalar, Size, WrappingRange}; @@ -80,6 +80,7 @@ pub trait BuilderMethods<'a, 'tcx>: then: Self::BasicBlock, catch: Self::BasicBlock, funclet: Option<&Self::Funclet>, + instance: Option>, ) -> Self::Value; fn unreachable(&mut self); @@ -339,6 +340,7 @@ pub trait BuilderMethods<'a, 'tcx>: llfn: Self::Value, args: &[Self::Value], funclet: Option<&Self::Funclet>, + instance: Option>, ) -> Self::Value; fn zext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;