@@ -127,7 +127,7 @@ pub(crate) fn codegen_inline_asm_terminator<'tcx>(
127
127
}
128
128
InlineAsmOperand :: SymStatic { def_id } => {
129
129
assert ! ( fx. tcx. is_static( def_id) ) ;
130
- let instance = Instance :: mono ( fx. tcx , def_id) . polymorphize ( fx . tcx ) ;
130
+ let instance = Instance :: mono ( fx. tcx , def_id) ;
131
131
CInlineAsmOperand :: Symbol { symbol : fx. tcx . symbol_name ( instance) . name . to_owned ( ) }
132
132
}
133
133
InlineAsmOperand :: Label { .. } => {
@@ -169,6 +169,7 @@ pub(crate) fn codegen_inline_asm_inner<'tcx>(
169
169
stack_slots_input : Vec :: new ( ) ,
170
170
stack_slots_output : Vec :: new ( ) ,
171
171
stack_slot_size : Size :: from_bytes ( 0 ) ,
172
+ is_naked : false ,
172
173
} ;
173
174
asm_gen. allocate_registers ( ) ;
174
175
asm_gen. allocate_stack_slots ( ) ;
@@ -209,6 +210,121 @@ pub(crate) fn codegen_inline_asm_inner<'tcx>(
209
210
call_inline_asm ( fx, & asm_name, asm_gen. stack_slot_size , inputs, outputs) ;
210
211
}
211
212
213
+ pub ( crate ) fn codegen_naked_asm < ' tcx > (
214
+ tcx : TyCtxt < ' tcx > ,
215
+ cx : & mut crate :: CodegenCx ,
216
+ module : & mut dyn Module ,
217
+ instance : Instance < ' tcx > ,
218
+ span : Span ,
219
+ symbol_name : & str ,
220
+ template : & [ InlineAsmTemplatePiece ] ,
221
+ operands : & [ InlineAsmOperand < ' tcx > ] ,
222
+ options : InlineAsmOptions ,
223
+ ) {
224
+ // FIXME add .eh_frame unwind info directives
225
+
226
+ let operands = operands
227
+ . iter ( )
228
+ . map ( |operand| match * operand {
229
+ InlineAsmOperand :: In { .. }
230
+ | InlineAsmOperand :: Out { .. }
231
+ | InlineAsmOperand :: InOut { .. } => {
232
+ span_bug ! ( span, "invalid operand type for naked asm" )
233
+ }
234
+ InlineAsmOperand :: Const { ref value } => {
235
+ let cv = instance. instantiate_mir_and_normalize_erasing_regions (
236
+ tcx,
237
+ ty:: ParamEnv :: reveal_all ( ) ,
238
+ ty:: EarlyBinder :: bind ( value. const_ ) ,
239
+ ) ;
240
+ let const_value = cv
241
+ . eval ( tcx, ty:: ParamEnv :: reveal_all ( ) , value. span )
242
+ . expect ( "erroneous constant missed by mono item collection" ) ;
243
+
244
+ let value = rustc_codegen_ssa:: common:: asm_const_to_str (
245
+ tcx,
246
+ span,
247
+ const_value,
248
+ RevealAllLayoutCx ( tcx) . layout_of ( cv. ty ( ) ) ,
249
+ ) ;
250
+ CInlineAsmOperand :: Const { value }
251
+ }
252
+ InlineAsmOperand :: SymFn { ref value } => {
253
+ if cfg ! ( not( feature = "inline_asm_sym" ) ) {
254
+ tcx. dcx ( )
255
+ . span_err ( span, "asm! and global_asm! sym operands are not yet supported" ) ;
256
+ }
257
+
258
+ let const_ = instance. instantiate_mir_and_normalize_erasing_regions (
259
+ tcx,
260
+ ty:: ParamEnv :: reveal_all ( ) ,
261
+ ty:: EarlyBinder :: bind ( value. const_ ) ,
262
+ ) ;
263
+ if let ty:: FnDef ( def_id, args) = * const_. ty ( ) . kind ( ) {
264
+ let instance = ty:: Instance :: resolve_for_fn_ptr (
265
+ tcx,
266
+ ty:: ParamEnv :: reveal_all ( ) ,
267
+ def_id,
268
+ args,
269
+ )
270
+ . unwrap ( ) ;
271
+ let symbol = tcx. symbol_name ( instance) ;
272
+
273
+ // Pass a wrapper rather than the function itself as the function itself may not
274
+ // be exported from the main codegen unit and may thus be unreachable from the
275
+ // object file created by an external assembler.
276
+ let inline_asm_index = cx. inline_asm_index . get ( ) ;
277
+ cx. inline_asm_index . set ( inline_asm_index + 1 ) ;
278
+ let wrapper_name = format ! (
279
+ "__inline_asm_{}_wrapper_n{}" ,
280
+ cx. cgu_name. as_str( ) . replace( '.' , "__" ) . replace( '-' , "_" ) ,
281
+ inline_asm_index
282
+ ) ;
283
+ let sig =
284
+ get_function_sig ( tcx, module. target_config ( ) . default_call_conv , instance) ;
285
+ create_wrapper_function (
286
+ module,
287
+ & mut cx. unwind_context ,
288
+ sig,
289
+ & wrapper_name,
290
+ symbol. name ,
291
+ ) ;
292
+
293
+ CInlineAsmOperand :: Symbol { symbol : wrapper_name }
294
+ } else {
295
+ span_bug ! ( span, "invalid type for asm sym (fn)" ) ;
296
+ }
297
+ }
298
+ InlineAsmOperand :: SymStatic { def_id } => {
299
+ assert ! ( tcx. is_static( def_id) ) ;
300
+ let instance = Instance :: mono ( tcx, def_id) ;
301
+ CInlineAsmOperand :: Symbol { symbol : tcx. symbol_name ( instance) . name . to_owned ( ) }
302
+ }
303
+ InlineAsmOperand :: Label { .. } => {
304
+ span_bug ! ( span, "asm! label operands are not yet supported" ) ;
305
+ }
306
+ } )
307
+ . collect :: < Vec < _ > > ( ) ;
308
+
309
+ let asm_gen = InlineAssemblyGenerator {
310
+ tcx,
311
+ arch : tcx. sess . asm_arch . unwrap ( ) ,
312
+ enclosing_def_id : instance. def_id ( ) ,
313
+ template,
314
+ operands : & operands,
315
+ options,
316
+ registers : Vec :: new ( ) ,
317
+ stack_slots_clobber : Vec :: new ( ) ,
318
+ stack_slots_input : Vec :: new ( ) ,
319
+ stack_slots_output : Vec :: new ( ) ,
320
+ stack_slot_size : Size :: from_bytes ( 0 ) ,
321
+ is_naked : true ,
322
+ } ;
323
+
324
+ let generated_asm = asm_gen. generate_asm_wrapper ( symbol_name) ;
325
+ cx. global_asm . push_str ( & generated_asm) ;
326
+ }
327
+
212
328
struct InlineAssemblyGenerator < ' a , ' tcx > {
213
329
tcx : TyCtxt < ' tcx > ,
214
330
arch : InlineAsmArch ,
@@ -221,10 +337,13 @@ struct InlineAssemblyGenerator<'a, 'tcx> {
221
337
stack_slots_input : Vec < Option < Size > > ,
222
338
stack_slots_output : Vec < Option < Size > > ,
223
339
stack_slot_size : Size ,
340
+ is_naked : bool ,
224
341
}
225
342
226
343
impl < ' tcx > InlineAssemblyGenerator < ' _ , ' tcx > {
227
344
fn allocate_registers ( & mut self ) {
345
+ assert ! ( !self . is_naked) ;
346
+
228
347
let sess = self . tcx . sess ;
229
348
let map = allocatable_registers (
230
349
self . arch ,
@@ -348,6 +467,8 @@ impl<'tcx> InlineAssemblyGenerator<'_, 'tcx> {
348
467
}
349
468
350
469
fn allocate_stack_slots ( & mut self ) {
470
+ assert ! ( !self . is_naked) ;
471
+
351
472
let mut slot_size = Size :: from_bytes ( 0 ) ;
352
473
let mut slots_clobber = vec ! [ None ; self . operands. len( ) ] ;
353
474
let mut slots_input = vec ! [ None ; self . operands. len( ) ] ;
@@ -468,30 +589,32 @@ impl<'tcx> InlineAssemblyGenerator<'_, 'tcx> {
468
589
if is_x86 {
469
590
generated_asm. push_str ( ".intel_syntax noprefix\n " ) ;
470
591
}
471
- Self :: prologue ( & mut generated_asm, self . arch ) ;
592
+ if !self . is_naked {
593
+ Self :: prologue ( & mut generated_asm, self . arch ) ;
594
+
595
+ // Save clobbered registers
596
+ if !self . options . contains ( InlineAsmOptions :: NORETURN ) {
597
+ for ( reg, slot) in self
598
+ . registers
599
+ . iter ( )
600
+ . zip ( self . stack_slots_clobber . iter ( ) . copied ( ) )
601
+ . filter_map ( |( r, s) | r. zip ( s) )
602
+ {
603
+ Self :: save_register ( & mut generated_asm, self . arch , reg, slot) ;
604
+ }
605
+ }
472
606
473
- // Save clobbered registers
474
- if !self . options . contains ( InlineAsmOptions :: NORETURN ) {
607
+ // Write input registers
475
608
for ( reg, slot) in self
476
609
. registers
477
610
. iter ( )
478
- . zip ( self . stack_slots_clobber . iter ( ) . copied ( ) )
611
+ . zip ( self . stack_slots_input . iter ( ) . copied ( ) )
479
612
. filter_map ( |( r, s) | r. zip ( s) )
480
613
{
481
- Self :: save_register ( & mut generated_asm, self . arch , reg, slot) ;
614
+ Self :: restore_register ( & mut generated_asm, self . arch , reg, slot) ;
482
615
}
483
616
}
484
617
485
- // Write input registers
486
- for ( reg, slot) in self
487
- . registers
488
- . iter ( )
489
- . zip ( self . stack_slots_input . iter ( ) . copied ( ) )
490
- . filter_map ( |( r, s) | r. zip ( s) )
491
- {
492
- Self :: restore_register ( & mut generated_asm, self . arch , reg, slot) ;
493
- }
494
-
495
618
if is_x86 && self . options . contains ( InlineAsmOptions :: ATT_SYNTAX ) {
496
619
generated_asm. push_str ( ".att_syntax\n " ) ;
497
620
}
@@ -553,30 +676,32 @@ impl<'tcx> InlineAssemblyGenerator<'_, 'tcx> {
553
676
generated_asm. push_str ( ".intel_syntax noprefix\n " ) ;
554
677
}
555
678
556
- if !self . options . contains ( InlineAsmOptions :: NORETURN ) {
557
- // Read output registers
558
- for ( reg, slot) in self
559
- . registers
560
- . iter ( )
561
- . zip ( self . stack_slots_output . iter ( ) . copied ( ) )
562
- . filter_map ( |( r, s) | r. zip ( s) )
563
- {
564
- Self :: save_register ( & mut generated_asm, self . arch , reg, slot) ;
565
- }
679
+ if !self . is_naked {
680
+ if !self . options . contains ( InlineAsmOptions :: NORETURN ) {
681
+ // Read output registers
682
+ for ( reg, slot) in self
683
+ . registers
684
+ . iter ( )
685
+ . zip ( self . stack_slots_output . iter ( ) . copied ( ) )
686
+ . filter_map ( |( r, s) | r. zip ( s) )
687
+ {
688
+ Self :: save_register ( & mut generated_asm, self . arch , reg, slot) ;
689
+ }
566
690
567
- // Restore clobbered registers
568
- for ( reg, slot) in self
569
- . registers
570
- . iter ( )
571
- . zip ( self . stack_slots_clobber . iter ( ) . copied ( ) )
572
- . filter_map ( |( r, s) | r. zip ( s) )
573
- {
574
- Self :: restore_register ( & mut generated_asm, self . arch , reg, slot) ;
575
- }
691
+ // Restore clobbered registers
692
+ for ( reg, slot) in self
693
+ . registers
694
+ . iter ( )
695
+ . zip ( self . stack_slots_clobber . iter ( ) . copied ( ) )
696
+ . filter_map ( |( r, s) | r. zip ( s) )
697
+ {
698
+ Self :: restore_register ( & mut generated_asm, self . arch , reg, slot) ;
699
+ }
576
700
577
- Self :: epilogue ( & mut generated_asm, self . arch ) ;
578
- } else {
579
- Self :: epilogue_noreturn ( & mut generated_asm, self . arch ) ;
701
+ Self :: epilogue ( & mut generated_asm, self . arch ) ;
702
+ } else {
703
+ Self :: epilogue_noreturn ( & mut generated_asm, self . arch ) ;
704
+ }
580
705
}
581
706
582
707
if is_x86 {
0 commit comments