Skip to content

Commit 81a4b3e

Browse files
committed
Implement asm_const_ptr for global_asm and naked_asm
1 parent 363fd49 commit 81a4b3e

File tree

14 files changed

+345
-96
lines changed

14 files changed

+345
-96
lines changed

compiler/rustc_codegen_cranelift/src/global_asm.rs

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,10 @@ use std::sync::Arc;
88

99
use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece};
1010
use rustc_codegen_ssa::traits::{AsmCodegenMethods, GlobalAsmOperandRef};
11-
use rustc_middle::ty::TyCtxt;
1211
use rustc_middle::ty::layout::{
1312
FnAbiError, FnAbiOfHelpers, FnAbiRequest, HasTyCtxt, HasTypingEnv, LayoutError, LayoutOfHelpers,
1413
};
14+
use rustc_middle::ty::{Instance, TyCtxt};
1515
use rustc_session::config::{OutputFilenames, OutputType};
1616
use rustc_target::asm::InlineAsmArch;
1717

@@ -29,6 +29,7 @@ impl<'tcx> AsmCodegenMethods<'tcx> for GlobalAsmContext<'_, 'tcx> {
2929
operands: &[GlobalAsmOperandRef<'tcx>],
3030
options: InlineAsmOptions,
3131
_line_spans: &[Span],
32+
_instance: Instance<'_>,
3233
) {
3334
codegen_global_asm_inner(self.tcx, self.global_asm, template, operands, options);
3435
}
@@ -132,6 +133,12 @@ fn codegen_global_asm_inner<'tcx>(
132133
let symbol = tcx.symbol_name(instance);
133134
global_asm.push_str(symbol.name);
134135
}
136+
GlobalAsmOperandRef::ConstPointer { value: _ } => {
137+
tcx.dcx().span_err(
138+
span,
139+
"asm! and global_asm! const pointer operands are not yet supported",
140+
);
141+
}
135142
}
136143
}
137144
}

compiler/rustc_codegen_gcc/src/asm.rs

Lines changed: 96 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -2,16 +2,16 @@
22

33
use std::borrow::Cow;
44

5-
use gccjit::{LValue, RValue, ToRValue, Type};
5+
use gccjit::{GlobalKind, LValue, RValue, ToRValue, Type};
66
use rustc_ast::ast::{InlineAsmOptions, InlineAsmTemplatePiece};
77
use rustc_codegen_ssa::mir::operand::OperandValue;
88
use rustc_codegen_ssa::mir::place::PlaceRef;
99
use rustc_codegen_ssa::traits::{
1010
AsmBuilderMethods, AsmCodegenMethods, BaseTypeCodegenMethods, BuilderMethods,
1111
GlobalAsmOperandRef, InlineAsmOperandRef,
1212
};
13-
use rustc_middle::bug;
1413
use rustc_middle::ty::Instance;
14+
use rustc_middle::{bug, mir};
1515
use rustc_span::Span;
1616
use rustc_target::asm::*;
1717

@@ -858,13 +858,106 @@ impl<'gcc, 'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
858858
operands: &[GlobalAsmOperandRef<'tcx>],
859859
options: InlineAsmOptions,
860860
_line_spans: &[Span],
861+
instance: Instance<'tcx>,
861862
) {
862863
let asm_arch = self.tcx.sess.asm_arch.unwrap();
863864

864865
// Default to Intel syntax on x86
865866
let att_dialect = matches!(asm_arch, InlineAsmArch::X86 | InlineAsmArch::X86_64)
866867
&& options.contains(InlineAsmOptions::ATT_SYNTAX);
867868

869+
// Convert all operands to string interpolations
870+
let converted_operands = operands
871+
.iter()
872+
.enumerate()
873+
.map(|(operand_idx, operand)| {
874+
match *operand {
875+
GlobalAsmOperandRef::Interpolate { ref string } => {
876+
// Const operands get injected directly into the
877+
// template. Note that we don't need to escape $
878+
// here unlike normal inline assembly.
879+
string.to_owned()
880+
}
881+
GlobalAsmOperandRef::ConstPointer { value } => {
882+
let (prov, offset) = value.prov_and_relative_offset();
883+
let global_alloc = self.tcx.global_alloc(prov.alloc_id());
884+
let symbol = 'sym: {
885+
let alloc = match global_alloc {
886+
mir::interpret::GlobalAlloc::Function { instance } => {
887+
let function = get_fn(self, instance);
888+
self.add_used_function(function);
889+
// TODO(@Amanieu): Additional mangling is needed on
890+
// some targets to add a leading underscore (Mach-O)
891+
// or byte count suffixes (x86 Windows).
892+
break 'sym self.tcx.symbol_name(instance).name.to_owned();
893+
}
894+
mir::interpret::GlobalAlloc::VTable(ty, dyn_ty) => self
895+
.tcx
896+
.global_alloc(self.tcx.vtable_allocation((
897+
ty,
898+
dyn_ty.principal().map(|principal| {
899+
self.tcx
900+
.instantiate_bound_regions_with_erased(principal)
901+
}),
902+
)))
903+
.unwrap_memory(),
904+
mir::interpret::GlobalAlloc::Static(def_id) => {
905+
// TODO(antoyo): set the global variable as used.
906+
// TODO(@Amanieu): Additional mangling is needed on
907+
// some targets to add a leading underscore (Mach-O).
908+
let instance = Instance::mono(self.tcx, def_id);
909+
break 'sym self.tcx.symbol_name(instance).name.to_owned();
910+
}
911+
mir::interpret::GlobalAlloc::Memory(alloc) => alloc,
912+
};
913+
914+
// For ZSTs directly codegen an aligned pointer.
915+
if alloc.inner().len() == 0 {
916+
assert_eq!(offset.bytes(), 0);
917+
return format!("{}", alloc.inner().align.bytes());
918+
}
919+
920+
let asm_name = self.tcx.symbol_name(instance);
921+
let sym_name = format!("{asm_name}.{operand_idx}");
922+
923+
let init = crate::consts::const_alloc_to_gcc_uncached(self, alloc);
924+
let alloc = alloc.inner();
925+
let typ = self.val_ty(init).get_aligned(alloc.align.bytes());
926+
927+
let global = self.declare_global_with_linkage(
928+
&sym_name,
929+
typ,
930+
GlobalKind::Exported,
931+
);
932+
global.global_set_initializer_rvalue(init);
933+
// TODO(nbdd0121): set unnamed address.
934+
// TODO(nbdd0121): set the global variable as used.
935+
936+
sym_name
937+
};
938+
939+
let offset = offset.bytes();
940+
if offset != 0 { format!("{symbol}+{offset}") } else { symbol }
941+
}
942+
GlobalAsmOperandRef::SymFn { instance } => {
943+
let function = get_fn(self, instance);
944+
self.add_used_function(function);
945+
// TODO(@Amanieu): Additional mangling is needed on
946+
// some targets to add a leading underscore (Mach-O)
947+
// or byte count suffixes (x86 Windows).
948+
self.tcx.symbol_name(instance).name.to_owned()
949+
}
950+
GlobalAsmOperandRef::SymStatic { def_id } => {
951+
// TODO(antoyo): set the global variable as used.
952+
// TODO(@Amanieu): Additional mangling is needed on
953+
// some targets to add a leading underscore (Mach-O).
954+
let instance = Instance::mono(self.tcx, def_id);
955+
self.tcx.symbol_name(instance).name.to_owned()
956+
}
957+
}
958+
})
959+
.collect::<Vec<_>>();
960+
868961
// Build the template string
869962
let mut template_str = ".pushsection .text\n".to_owned();
870963
if att_dialect {
@@ -888,33 +981,7 @@ impl<'gcc, 'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
888981
}
889982
}
890983
InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span: _ } => {
891-
match operands[operand_idx] {
892-
GlobalAsmOperandRef::Interpolate { ref string } => {
893-
// Const operands get injected directly into the
894-
// template. Note that we don't need to escape %
895-
// here unlike normal inline assembly.
896-
template_str.push_str(string);
897-
}
898-
899-
GlobalAsmOperandRef::SymFn { instance } => {
900-
let function = get_fn(self, instance);
901-
self.add_used_function(function);
902-
// TODO(@Amanieu): Additional mangling is needed on
903-
// some targets to add a leading underscore (Mach-O)
904-
// or byte count suffixes (x86 Windows).
905-
let name = self.tcx.symbol_name(instance).name;
906-
template_str.push_str(name);
907-
}
908-
909-
GlobalAsmOperandRef::SymStatic { def_id } => {
910-
// TODO(antoyo): set the global variable as used.
911-
// TODO(@Amanieu): Additional mangling is needed on
912-
// some targets to add a leading underscore (Mach-O).
913-
let instance = Instance::mono(self.tcx, def_id);
914-
let name = self.tcx.symbol_name(instance).name;
915-
template_str.push_str(name);
916-
}
917-
}
984+
template_str.push_str(&converted_operands[operand_idx]);
918985
}
919986
}
920987
}

compiler/rustc_codegen_llvm/src/asm.rs

Lines changed: 108 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ use rustc_codegen_ssa::traits::*;
77
use rustc_data_structures::fx::FxHashMap;
88
use rustc_middle::ty::Instance;
99
use rustc_middle::ty::layout::TyAndLayout;
10-
use rustc_middle::{bug, span_bug};
10+
use rustc_middle::{bug, mir, span_bug};
1111
use rustc_span::{Pos, Span, Symbol, sym};
1212
use rustc_target::asm::*;
1313
use smallvec::SmallVec;
@@ -389,13 +389,119 @@ impl<'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'_, 'tcx> {
389389
operands: &[GlobalAsmOperandRef<'tcx>],
390390
options: InlineAsmOptions,
391391
_line_spans: &[Span],
392+
instance: Instance<'tcx>,
392393
) {
393394
let asm_arch = self.tcx.sess.asm_arch.unwrap();
394395

395396
// Default to Intel syntax on x86
396397
let intel_syntax = matches!(asm_arch, InlineAsmArch::X86 | InlineAsmArch::X86_64)
397398
&& !options.contains(InlineAsmOptions::ATT_SYNTAX);
398399

400+
// Convert all operands to string interpolations
401+
let converted_operands = operands
402+
.iter()
403+
.enumerate()
404+
.map(|(operand_idx, operand)| {
405+
match *operand {
406+
GlobalAsmOperandRef::Interpolate { ref string } => {
407+
// Const operands get injected directly into the
408+
// template. Note that we don't need to escape $
409+
// here unlike normal inline assembly.
410+
string.to_owned()
411+
}
412+
GlobalAsmOperandRef::ConstPointer { value } => {
413+
let (prov, offset) = value.prov_and_relative_offset();
414+
let global_alloc = self.tcx.global_alloc(prov.alloc_id());
415+
let llval = 'llval: {
416+
let alloc = match global_alloc {
417+
mir::interpret::GlobalAlloc::Function { instance } => {
418+
break 'llval self.get_fn(instance);
419+
}
420+
mir::interpret::GlobalAlloc::VTable(ty, dyn_ty) => self
421+
.tcx
422+
.global_alloc(self.tcx.vtable_allocation((
423+
ty,
424+
dyn_ty.principal().map(|principal| {
425+
self.tcx
426+
.instantiate_bound_regions_with_erased(principal)
427+
}),
428+
)))
429+
.unwrap_memory(),
430+
mir::interpret::GlobalAlloc::Static(def_id) => {
431+
break 'llval self
432+
.renamed_statics
433+
.borrow()
434+
.get(&def_id)
435+
.copied()
436+
.unwrap_or_else(|| self.get_static(def_id));
437+
}
438+
mir::interpret::GlobalAlloc::Memory(alloc) => alloc,
439+
};
440+
441+
// For ZSTs directly codegen an aligned pointer.
442+
if alloc.inner().len() == 0 {
443+
assert_eq!(offset.bytes(), 0);
444+
return format!("{}", alloc.inner().align.bytes());
445+
}
446+
447+
let asm_name = self.tcx.symbol_name(instance);
448+
let sym_name = format!("{asm_name}.{operand_idx}");
449+
450+
let init = crate::consts::const_alloc_to_llvm(
451+
self, alloc, /*static*/ false,
452+
);
453+
let alloc = alloc.inner();
454+
let g = self.static_addr_of_mut(init, alloc.align, None);
455+
if alloc.mutability.is_not() {
456+
// NB: we can't use `static_addr_of_impl` here to avoid sharing
457+
// the global, as we need to set name and linkage.
458+
unsafe { llvm::LLVMSetGlobalConstant(g, llvm::True) };
459+
}
460+
461+
llvm::set_value_name(g, sym_name.as_bytes());
462+
463+
// `static_addr_of_mut` gives us a private global which can't be
464+
// used by global asm. Update it to a hidden internal global instead.
465+
llvm::set_linkage(g, llvm::Linkage::InternalLinkage);
466+
llvm::set_visibility(g, llvm::Visibility::Hidden);
467+
g
468+
};
469+
self.add_compiler_used_global(llval);
470+
let symbol = llvm::build_string(|s| unsafe {
471+
llvm::LLVMRustGetMangledName(llval, s);
472+
})
473+
.expect("symbol is not valid UTF-8");
474+
475+
let offset = offset.bytes();
476+
if offset != 0 { format!("{symbol}+{offset}") } else { symbol }
477+
}
478+
GlobalAsmOperandRef::SymFn { instance } => {
479+
let llval = self.get_fn(instance);
480+
self.add_compiler_used_global(llval);
481+
let symbol = llvm::build_string(|s| unsafe {
482+
llvm::LLVMRustGetMangledName(llval, s);
483+
})
484+
.expect("symbol is not valid UTF-8");
485+
symbol
486+
}
487+
GlobalAsmOperandRef::SymStatic { def_id } => {
488+
let llval = self
489+
.renamed_statics
490+
.borrow()
491+
.get(&def_id)
492+
.copied()
493+
.unwrap_or_else(|| self.get_static(def_id));
494+
self.add_compiler_used_global(llval);
495+
let symbol = llvm::build_string(|s| unsafe {
496+
llvm::LLVMRustGetMangledName(llval, s);
497+
})
498+
.expect("symbol is not valid UTF-8");
499+
symbol
500+
}
501+
}
502+
})
503+
.collect::<Vec<_>>();
504+
399505
// Build the template string
400506
let mut template_str = String::new();
401507
if intel_syntax {
@@ -405,37 +511,7 @@ impl<'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'_, 'tcx> {
405511
match *piece {
406512
InlineAsmTemplatePiece::String(ref s) => template_str.push_str(s),
407513
InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span: _ } => {
408-
match operands[operand_idx] {
409-
GlobalAsmOperandRef::Interpolate { ref string } => {
410-
// Const operands get injected directly into the
411-
// template. Note that we don't need to escape $
412-
// here unlike normal inline assembly.
413-
template_str.push_str(string);
414-
}
415-
GlobalAsmOperandRef::SymFn { instance } => {
416-
let llval = self.get_fn(instance);
417-
self.add_compiler_used_global(llval);
418-
let symbol = llvm::build_string(|s| unsafe {
419-
llvm::LLVMRustGetMangledName(llval, s);
420-
})
421-
.expect("symbol is not valid UTF-8");
422-
template_str.push_str(&symbol);
423-
}
424-
GlobalAsmOperandRef::SymStatic { def_id } => {
425-
let llval = self
426-
.renamed_statics
427-
.borrow()
428-
.get(&def_id)
429-
.copied()
430-
.unwrap_or_else(|| self.get_static(def_id));
431-
self.add_compiler_used_global(llval);
432-
let symbol = llvm::build_string(|s| unsafe {
433-
llvm::LLVMRustGetMangledName(llval, s);
434-
})
435-
.expect("symbol is not valid UTF-8");
436-
template_str.push_str(&symbol);
437-
}
438-
}
514+
template_str.push_str(&converted_operands[operand_idx])
439515
}
440516
}
441517
}

0 commit comments

Comments
 (0)