Skip to content

Commit 8da54fc

Browse files
committed
Implement asm_const_ptr for global_asm and naked_asm
1 parent d7235e6 commit 8da54fc

File tree

14 files changed

+353
-96
lines changed

14 files changed

+353
-96
lines changed

compiler/rustc_codegen_cranelift/src/global_asm.rs

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,10 @@ use std::sync::Arc;
88

99
use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece};
1010
use rustc_codegen_ssa::traits::{AsmCodegenMethods, GlobalAsmOperandRef};
11-
use rustc_middle::ty::TyCtxt;
1211
use rustc_middle::ty::layout::{
1312
FnAbiError, FnAbiOfHelpers, FnAbiRequest, HasTyCtxt, HasTypingEnv, LayoutError, LayoutOfHelpers,
1413
};
14+
use rustc_middle::ty::{Instance, TyCtxt};
1515
use rustc_session::config::{OutputFilenames, OutputType};
1616
use rustc_target::asm::InlineAsmArch;
1717

@@ -29,6 +29,7 @@ impl<'tcx> AsmCodegenMethods<'tcx> for GlobalAsmContext<'_, 'tcx> {
2929
operands: &[GlobalAsmOperandRef<'tcx>],
3030
options: InlineAsmOptions,
3131
_line_spans: &[Span],
32+
_instance: Instance<'_>,
3233
) {
3334
codegen_global_asm_inner(self.tcx, self.global_asm, template, operands, options);
3435
}
@@ -132,6 +133,12 @@ fn codegen_global_asm_inner<'tcx>(
132133
let symbol = tcx.symbol_name(instance);
133134
global_asm.push_str(symbol.name);
134135
}
136+
GlobalAsmOperandRef::ConstPointer { value: _ } => {
137+
tcx.dcx().span_err(
138+
span,
139+
"asm! and global_asm! const pointer operands are not yet supported",
140+
);
141+
}
135142
}
136143
}
137144
}

compiler/rustc_codegen_gcc/src/asm.rs

Lines changed: 100 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -2,16 +2,16 @@
22

33
use std::borrow::Cow;
44

5-
use gccjit::{LValue, RValue, ToRValue, Type};
5+
use gccjit::{GlobalKind, LValue, RValue, ToRValue, Type};
66
use rustc_ast::ast::{InlineAsmOptions, InlineAsmTemplatePiece};
77
use rustc_codegen_ssa::mir::operand::OperandValue;
88
use rustc_codegen_ssa::mir::place::PlaceRef;
99
use rustc_codegen_ssa::traits::{
1010
AsmBuilderMethods, AsmCodegenMethods, BaseTypeCodegenMethods, BuilderMethods,
1111
GlobalAsmOperandRef, InlineAsmOperandRef,
1212
};
13-
use rustc_middle::bug;
1413
use rustc_middle::ty::Instance;
14+
use rustc_middle::{bug, mir};
1515
use rustc_span::Span;
1616
use rustc_target::asm::*;
1717

@@ -858,13 +858,110 @@ impl<'gcc, 'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
858858
operands: &[GlobalAsmOperandRef<'tcx>],
859859
options: InlineAsmOptions,
860860
_line_spans: &[Span],
861+
instance: Instance<'tcx>,
861862
) {
862863
let asm_arch = self.tcx.sess.asm_arch.unwrap();
863864

864865
// Default to Intel syntax on x86
865866
let att_dialect = matches!(asm_arch, InlineAsmArch::X86 | InlineAsmArch::X86_64)
866867
&& options.contains(InlineAsmOptions::ATT_SYNTAX);
867868

869+
// Convert all operands to string interpolations
870+
let converted_operands = operands
871+
.iter()
872+
.enumerate()
873+
.map(|(operand_idx, operand)| {
874+
match *operand {
875+
GlobalAsmOperandRef::Interpolate { ref string } => {
876+
// Const operands get injected directly into the
877+
// template. Note that we don't need to escape $
878+
// here unlike normal inline assembly.
879+
string.to_owned()
880+
}
881+
GlobalAsmOperandRef::ConstPointer { value } => {
882+
let (prov, offset) = value.prov_and_relative_offset();
883+
let global_alloc = self.tcx.global_alloc(prov.alloc_id());
884+
let symbol = 'sym: {
885+
let alloc = match global_alloc {
886+
mir::interpret::GlobalAlloc::Function { instance } => {
887+
let function = get_fn(self, instance);
888+
self.add_used_function(function);
889+
// TODO(@Amanieu): Additional mangling is needed on
890+
// some targets to add a leading underscore (Mach-O)
891+
// or byte count suffixes (x86 Windows).
892+
break 'sym self.tcx.symbol_name(instance).name.to_owned();
893+
}
894+
mir::interpret::GlobalAlloc::VTable(ty, dyn_ty) => self
895+
.tcx
896+
.global_alloc(self.tcx.vtable_allocation((
897+
ty,
898+
dyn_ty.principal().map(|principal| {
899+
self.tcx
900+
.instantiate_bound_regions_with_erased(principal)
901+
}),
902+
)))
903+
.unwrap_memory(),
904+
mir::interpret::GlobalAlloc::Static(def_id) => {
905+
// TODO(antoyo): set the global variable as used.
906+
// TODO(@Amanieu): Additional mangling is needed on
907+
// some targets to add a leading underscore (Mach-O).
908+
let instance = Instance::mono(self.tcx, def_id);
909+
break 'sym self.tcx.symbol_name(instance).name.to_owned();
910+
}
911+
mir::interpret::GlobalAlloc::Memory(alloc) => alloc,
912+
mir::interpret::GlobalAlloc::TypeId { .. } => {
913+
// This is not an actual allocation, just return the offset.
914+
return format!("{}", offset.bytes());
915+
}
916+
};
917+
918+
// For ZSTs directly codegen an aligned pointer.
919+
if alloc.inner().len() == 0 {
920+
assert_eq!(offset.bytes(), 0);
921+
return format!("{}", alloc.inner().align.bytes());
922+
}
923+
924+
let asm_name = self.tcx.symbol_name(instance);
925+
let sym_name = format!("{asm_name}.{operand_idx}");
926+
927+
let init = crate::consts::const_alloc_to_gcc_uncached(self, alloc);
928+
let alloc = alloc.inner();
929+
let typ = self.val_ty(init).get_aligned(alloc.align.bytes());
930+
931+
let global = self.declare_global_with_linkage(
932+
&sym_name,
933+
typ,
934+
GlobalKind::Exported,
935+
);
936+
global.global_set_initializer_rvalue(init);
937+
// TODO(nbdd0121): set unnamed address.
938+
// TODO(nbdd0121): set the global variable as used.
939+
940+
sym_name
941+
};
942+
943+
let offset = offset.bytes();
944+
if offset != 0 { format!("{symbol}+{offset}") } else { symbol }
945+
}
946+
GlobalAsmOperandRef::SymFn { instance } => {
947+
let function = get_fn(self, instance);
948+
self.add_used_function(function);
949+
// TODO(@Amanieu): Additional mangling is needed on
950+
// some targets to add a leading underscore (Mach-O)
951+
// or byte count suffixes (x86 Windows).
952+
self.tcx.symbol_name(instance).name.to_owned()
953+
}
954+
GlobalAsmOperandRef::SymStatic { def_id } => {
955+
// TODO(antoyo): set the global variable as used.
956+
// TODO(@Amanieu): Additional mangling is needed on
957+
// some targets to add a leading underscore (Mach-O).
958+
let instance = Instance::mono(self.tcx, def_id);
959+
self.tcx.symbol_name(instance).name.to_owned()
960+
}
961+
}
962+
})
963+
.collect::<Vec<_>>();
964+
868965
// Build the template string
869966
let mut template_str = ".pushsection .text\n".to_owned();
870967
if att_dialect {
@@ -888,33 +985,7 @@ impl<'gcc, 'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
888985
}
889986
}
890987
InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span: _ } => {
891-
match operands[operand_idx] {
892-
GlobalAsmOperandRef::Interpolate { ref string } => {
893-
// Const operands get injected directly into the
894-
// template. Note that we don't need to escape %
895-
// here unlike normal inline assembly.
896-
template_str.push_str(string);
897-
}
898-
899-
GlobalAsmOperandRef::SymFn { instance } => {
900-
let function = get_fn(self, instance);
901-
self.add_used_function(function);
902-
// TODO(@Amanieu): Additional mangling is needed on
903-
// some targets to add a leading underscore (Mach-O)
904-
// or byte count suffixes (x86 Windows).
905-
let name = self.tcx.symbol_name(instance).name;
906-
template_str.push_str(name);
907-
}
908-
909-
GlobalAsmOperandRef::SymStatic { def_id } => {
910-
// TODO(antoyo): set the global variable as used.
911-
// TODO(@Amanieu): Additional mangling is needed on
912-
// some targets to add a leading underscore (Mach-O).
913-
let instance = Instance::mono(self.tcx, def_id);
914-
let name = self.tcx.symbol_name(instance).name;
915-
template_str.push_str(name);
916-
}
917-
}
988+
template_str.push_str(&converted_operands[operand_idx]);
918989
}
919990
}
920991
}

compiler/rustc_codegen_llvm/src/asm.rs

Lines changed: 112 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ use rustc_codegen_ssa::traits::*;
77
use rustc_data_structures::fx::FxHashMap;
88
use rustc_middle::ty::Instance;
99
use rustc_middle::ty::layout::TyAndLayout;
10-
use rustc_middle::{bug, span_bug};
10+
use rustc_middle::{bug, mir, span_bug};
1111
use rustc_span::{Pos, Span, Symbol, sym};
1212
use rustc_target::asm::*;
1313
use smallvec::SmallVec;
@@ -389,9 +389,119 @@ impl<'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'_, 'tcx> {
389389
operands: &[GlobalAsmOperandRef<'tcx>],
390390
options: InlineAsmOptions,
391391
_line_spans: &[Span],
392+
instance: Instance<'tcx>,
392393
) {
393394
let asm_arch = self.tcx.sess.asm_arch.unwrap();
394395

396+
// Convert all operands to string interpolations
397+
let converted_operands = operands
398+
.iter()
399+
.enumerate()
400+
.map(|(operand_idx, operand)| {
401+
match *operand {
402+
GlobalAsmOperandRef::Interpolate { ref string } => {
403+
// Const operands get injected directly into the
404+
// template. Note that we don't need to escape $
405+
// here unlike normal inline assembly.
406+
string.to_owned()
407+
}
408+
GlobalAsmOperandRef::ConstPointer { value } => {
409+
let (prov, offset) = value.prov_and_relative_offset();
410+
let global_alloc = self.tcx.global_alloc(prov.alloc_id());
411+
let llval = 'llval: {
412+
let alloc = match global_alloc {
413+
mir::interpret::GlobalAlloc::Function { instance } => {
414+
break 'llval self.get_fn(instance);
415+
}
416+
mir::interpret::GlobalAlloc::VTable(ty, dyn_ty) => self
417+
.tcx
418+
.global_alloc(self.tcx.vtable_allocation((
419+
ty,
420+
dyn_ty.principal().map(|principal| {
421+
self.tcx
422+
.instantiate_bound_regions_with_erased(principal)
423+
}),
424+
)))
425+
.unwrap_memory(),
426+
mir::interpret::GlobalAlloc::Static(def_id) => {
427+
break 'llval self
428+
.renamed_statics
429+
.borrow()
430+
.get(&def_id)
431+
.copied()
432+
.unwrap_or_else(|| self.get_static(def_id));
433+
}
434+
mir::interpret::GlobalAlloc::Memory(alloc) => alloc,
435+
mir::interpret::GlobalAlloc::TypeId { .. } => {
436+
// This is not an actual allocation, just return the offset.
437+
return format!("{}", offset.bytes());
438+
}
439+
};
440+
let alloc = alloc.inner();
441+
442+
// For ZSTs directly codegen an aligned pointer.
443+
if alloc.len() == 0 {
444+
assert_eq!(offset.bytes(), 0);
445+
return format!("{}", alloc.align.bytes());
446+
}
447+
448+
let asm_name = self.tcx.symbol_name(instance);
449+
let sym_name = format!("{asm_name}.{operand_idx}");
450+
451+
let init = crate::consts::const_alloc_to_llvm(
452+
self, alloc, /*static*/ false,
453+
);
454+
let g = self.static_addr_of_mut(init, alloc.align, None);
455+
if alloc.mutability.is_not() {
456+
// NB: we can't use `static_addr_of_impl` here to avoid sharing
457+
// the global, as we need to set name and linkage.
458+
unsafe { llvm::LLVMSetGlobalConstant(g, llvm::True) };
459+
}
460+
461+
llvm::set_value_name(g, sym_name.as_bytes());
462+
463+
// `static_addr_of_mut` gives us a private global which can't be
464+
// used by global asm. Update it to a hidden internal global instead.
465+
llvm::set_linkage(g, llvm::Linkage::InternalLinkage);
466+
llvm::set_visibility(g, llvm::Visibility::Hidden);
467+
g
468+
};
469+
self.add_compiler_used_global(llval);
470+
let symbol = llvm::build_string(|s| unsafe {
471+
llvm::LLVMRustGetMangledName(llval, s);
472+
})
473+
.expect("symbol is not valid UTF-8");
474+
475+
let offset = offset.bytes();
476+
if offset != 0 { format!("{symbol}+{offset}") } else { symbol }
477+
}
478+
GlobalAsmOperandRef::SymFn { instance } => {
479+
let llval = self.get_fn(instance);
480+
self.add_compiler_used_global(llval);
481+
let symbol = llvm::build_string(|s| unsafe {
482+
llvm::LLVMRustGetMangledName(llval, s);
483+
})
484+
.expect("symbol is not valid UTF-8");
485+
symbol
486+
}
487+
GlobalAsmOperandRef::SymStatic { def_id } => {
488+
let llval = self
489+
.renamed_statics
490+
.borrow()
491+
.get(&def_id)
492+
.copied()
493+
.unwrap_or_else(|| self.get_static(def_id));
494+
self.add_compiler_used_global(llval);
495+
let symbol = llvm::build_string(|s| unsafe {
496+
llvm::LLVMRustGetMangledName(llval, s);
497+
})
498+
.expect("symbol is not valid UTF-8");
499+
symbol
500+
}
501+
}
502+
})
503+
.collect::<Vec<_>>();
504+
395505
// Build the template string
396506
let mut template_str = String::new();
397507

@@ -409,37 +519,7 @@ impl<'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'_, 'tcx> {
409519
match *piece {
410520
InlineAsmTemplatePiece::String(ref s) => template_str.push_str(s),
411521
InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span: _ } => {
412-
match operands[operand_idx] {
413-
GlobalAsmOperandRef::Interpolate { ref string } => {
414-
// Const operands get injected directly into the
415-
// template. Note that we don't need to escape $
416-
// here unlike normal inline assembly.
417-
template_str.push_str(string);
418-
}
419-
GlobalAsmOperandRef::SymFn { instance } => {
420-
let llval = self.get_fn(instance);
421-
self.add_compiler_used_global(llval);
422-
let symbol = llvm::build_string(|s| unsafe {
423-
llvm::LLVMRustGetMangledName(llval, s);
424-
})
425-
.expect("symbol is not valid UTF-8");
426-
template_str.push_str(&symbol);
427-
}
428-
GlobalAsmOperandRef::SymStatic { def_id } => {
429-
let llval = self
430-
.renamed_statics
431-
.borrow()
432-
.get(&def_id)
433-
.copied()
434-
.unwrap_or_else(|| self.get_static(def_id));
435-
self.add_compiler_used_global(llval);
436-
let symbol = llvm::build_string(|s| unsafe {
437-
llvm::LLVMRustGetMangledName(llval, s);
438-
})
439-
.expect("symbol is not valid UTF-8");
440-
template_str.push_str(&symbol);
441-
}
442-
}
522+
template_str.push_str(&converted_operands[operand_idx])
443523
}
444524
}
445525
}

0 commit comments

Comments
 (0)