Skip to content

Make librustc_codegen_llvm aware of LLVM address spaces. #51576

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 8 additions & 12 deletions src/librustc_codegen_llvm/abi.rs
Original file line number Diff line number Diff line change
@@ -11,7 +11,7 @@ use rustc_target::abi::call::ArgType;

use rustc_codegen_ssa::traits::*;

use rustc_target::abi::{HasDataLayout, LayoutOf, Size, TyLayout, Abi as LayoutAbi};
use rustc_target::abi::{LayoutOf, Size, TyLayout, Abi as LayoutAbi};
use rustc::ty::{self, Ty, Instance};
use rustc::ty::layout;

@@ -311,7 +311,6 @@ pub trait FnTypeExt<'tcx> {
cx: &CodegenCx<'ll, 'tcx>,
abi: Abi);
fn llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type;
fn ptr_to_llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type;
fn llvm_cconv(&self) -> llvm::CallConv;
fn apply_attrs_llfn(&self, llfn: &'ll Value);
fn apply_attrs_callsite(&self, bx: &mut Builder<'a, 'll, 'tcx>, callsite: &'ll Value);
@@ -649,10 +648,11 @@ impl<'tcx> FnTypeExt<'tcx> for FnType<'tcx, Ty<'tcx>> {
PassMode::Ignore => cx.type_void(),
PassMode::Direct(_) | PassMode::Pair(..) => {
self.ret.layout.immediate_llvm_type(cx)
.copy_addr_space(cx.flat_addr_space())
}
PassMode::Cast(cast) => cast.llvm_type(cx),
PassMode::Indirect(..) => {
llargument_tys.push(cx.type_ptr_to(self.ret.memory_ty(cx)));
llargument_tys.push(cx.type_ptr_to_alloca(self.ret.memory_ty(cx)));
cx.type_void()
}
};
@@ -665,8 +665,11 @@ impl<'tcx> FnTypeExt<'tcx> for FnType<'tcx, Ty<'tcx>> {

let llarg_ty = match arg.mode {
PassMode::Ignore => continue,
PassMode::Direct(_) => arg.layout.immediate_llvm_type(cx),
PassMode::Direct(_) => arg.layout.immediate_llvm_type(cx)
.copy_addr_space(cx.flat_addr_space()),
PassMode::Pair(..) => {
// Keep the argument type address space given by
// `scalar_pair_element_llvm_type`.
llargument_tys.push(arg.layout.scalar_pair_element_llvm_type(cx, 0, true));
llargument_tys.push(arg.layout.scalar_pair_element_llvm_type(cx, 1, true));
continue;
@@ -679,7 +682,7 @@ impl<'tcx> FnTypeExt<'tcx> for FnType<'tcx, Ty<'tcx>> {
continue;
}
PassMode::Cast(cast) => cast.llvm_type(cx),
PassMode::Indirect(_, None) => cx.type_ptr_to(arg.memory_ty(cx)),
PassMode::Indirect(_, None) => cx.type_ptr_to_alloca(arg.memory_ty(cx)),
};
llargument_tys.push(llarg_ty);
}
@@ -691,13 +694,6 @@ impl<'tcx> FnTypeExt<'tcx> for FnType<'tcx, Ty<'tcx>> {
}
}

fn ptr_to_llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type {
unsafe {
llvm::LLVMPointerType(self.llvm_type(cx),
cx.data_layout().instruction_address_space as c_uint)
}
}

fn llvm_cconv(&self) -> llvm::CallConv {
match self.conv {
Conv::C => llvm::CCallConv,
78 changes: 73 additions & 5 deletions src/librustc_codegen_llvm/builder.rs
Original file line number Diff line number Diff line change
@@ -2,7 +2,7 @@ use llvm::{AtomicRmwBinOp, AtomicOrdering, SynchronizationScope, AsmDialect};
use llvm::{self, False, BasicBlock};
use rustc_codegen_ssa::common::{IntPredicate, TypeKind, RealPredicate};
use rustc_codegen_ssa::{self, MemFlags};
use common::Funclet;
use common::{Funclet, val_addr_space, val_addr_space_opt};
use context::CodegenCx;
use type_::Type;
use type_of::LayoutLlvmExt;
@@ -18,6 +18,7 @@ use syntax;
use rustc_codegen_ssa::base::to_immediate;
use rustc_codegen_ssa::mir::operand::{OperandValue, OperandRef};
use rustc_codegen_ssa::mir::place::PlaceRef;
use rustc_target::spec::AddrSpaceIdx;
use std::borrow::Cow;
use std::ffi::CStr;
use std::ops::{Deref, Range};
@@ -846,26 +847,59 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {

fn ptrtoint(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
self.count_insn("ptrtoint");
let val = self.flat_addr_cast(val);
unsafe {
llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty, noname())
}
}

fn inttoptr(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
self.count_insn("inttoptr");
let dest_ty = dest_ty.copy_addr_space(self.cx().flat_addr_space());
unsafe {
llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty, noname())
}
}

fn bitcast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
self.count_insn("bitcast");
let dest_ty = dest_ty.copy_addr_space(val_addr_space(val));
unsafe {
llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty, noname())
}
}

/// address space casts, then bitcasts to dest_ty without changing address spaces.
fn as_ptr_cast(&mut self, val: &'ll Value,
addr_space: AddrSpaceIdx,
dest_ty: &'ll Type) -> &'ll Value
{
let val = self.addrspace_cast(val, addr_space);
self.pointercast(val, dest_ty.copy_addr_space(addr_space))
}
fn flat_as_ptr_cast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
self.as_ptr_cast(val, self.cx().flat_addr_space(), dest_ty)
}

fn addrspace_cast(&mut self, val: &'ll Value, dest: AddrSpaceIdx) -> &'ll Value {
// LLVM considers no-op address space casts to be invalid.
let src_ty = self.cx.val_ty(val);
if src_ty.is_ptr() && src_ty.address_space() != dest {
let dest_ty = src_ty.copy_addr_space(dest);
self.cx().check_addr_space_cast(val, dest_ty);
self.count_insn("addrspacecast");
unsafe {
llvm::LLVMBuildAddrSpaceCast(self.llbuilder, val,
dest_ty, noname())
}
} else {
val
}
}

fn flat_addr_cast(&mut self, val: &'ll Value) -> &'ll Value {
self.addrspace_cast(val, self.cx().flat_addr_space())
}
fn intcast(&mut self, val: &'ll Value, dest_ty: &'ll Type, is_signed: bool) -> &'ll Value {
self.count_insn("intcast");
unsafe {
@@ -875,6 +909,7 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {

fn pointercast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
self.count_insn("pointercast");
let dest_ty = dest_ty.copy_addr_space(val_addr_space(val));
unsafe {
llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty, noname())
}
@@ -883,7 +918,18 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
/* Comparisons */
fn icmp(&mut self, op: IntPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
self.count_insn("icmp");

let op = llvm::IntPredicate::from_generic(op);

match (val_addr_space_opt(lhs), val_addr_space_opt(rhs)) {
(Some(l), Some(r)) if l == r => {},
(Some(l), Some(r)) if l != r => {
bug!("tried to cmp ptrs of different addr spaces: lhs {:?} rhs {:?}",
lhs, rhs);
},
_ => {},
}

unsafe {
llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
}
@@ -1004,7 +1050,8 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
flags: MemFlags,
) {
let ptr_width = &self.sess().target.target.target_pointer_width;
let intrinsic_key = format!("llvm.memset.p0i8.i{}", ptr_width);
let addr_space = self.val_ty(ptr).address_space();
let intrinsic_key = format!("llvm.memset.p{}i8.i{}", addr_space, ptr_width);
let llintrinsicfn = self.get_intrinsic(&intrinsic_key);
let ptr = self.pointercast(ptr, self.type_i8p());
let align = self.const_u32(align.bytes() as u32);
@@ -1352,7 +1399,8 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
ptr: &'ll Value) -> &'ll Value {
let dest_ptr_ty = self.cx.val_ty(ptr);
let stored_ty = self.cx.val_ty(val);
let stored_ptr_ty = self.cx.type_ptr_to(stored_ty);
let stored_ptr_ty = self.cx.type_as_ptr_to(stored_ty,
dest_ptr_ty.address_space());

assert_eq!(self.cx.type_kind(dest_ptr_ty), TypeKind::Pointer);

@@ -1398,7 +1446,18 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
debug!("Type mismatch in function call of {:?}. \
Expected {:?} for param {}, got {:?}; injecting bitcast",
llfn, expected_ty, i, actual_ty);
self.bitcast(actual_val, expected_ty)
if expected_ty.is_ptr() && actual_ty.is_ptr() {
let actual_val = self.addrspace_cast(actual_val,
expected_ty.address_space());
self.pointercast(actual_val, expected_ty)
} else {
let actual_val = if actual_ty.is_ptr() {
self.flat_addr_cast(actual_val)
} else {
actual_val
};
self.bitcast(actual_val, expected_ty)
}
} else {
actual_val
}
@@ -1488,7 +1547,16 @@ impl Builder<'a, 'll, 'tcx> {
return;
}

let lifetime_intrinsic = self.cx.get_intrinsic(intrinsic);
let addr_space = self.cx.val_ty(ptr).address_space();
// Old LLVMs don't have the address space specific intrinsics.
// So as a semi-crude workaround, don't specialize if in the
// default address space.
let lifetime_intrinsic = if let AddrSpaceIdx(0) = addr_space {
self.cx.get_intrinsic(intrinsic)
} else {
let intrinsic = format!("{}.p{}i8", intrinsic, addr_space);
self.cx.get_intrinsic(&intrinsic)
};

let ptr = self.pointercast(ptr, self.cx.type_i8p());
self.call(lifetime_intrinsic, &[self.cx.const_u64(size), ptr], None);
39 changes: 32 additions & 7 deletions src/librustc_codegen_llvm/common.rs
Original file line number Diff line number Diff line change
@@ -13,7 +13,9 @@ use rustc_codegen_ssa::traits::*;
use rustc::ty::layout::{HasDataLayout, LayoutOf, self, TyLayout, Size};
use rustc::mir::interpret::{Scalar, AllocKind, Allocation};
use consts::const_alloc_to_llvm;
use rustc_codegen_ssa::common::TypeKind;
use rustc_codegen_ssa::mir::place::PlaceRef;
use rustc_target::spec::AddrSpaceIdx;

use libc::{c_uint, c_char};

@@ -170,9 +172,9 @@ impl ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> {
s.len() as c_uint,
!null_terminated as Bool);
let sym = self.generate_local_symbol_name("str");
let g = self.define_global(&sym[..], self.val_ty(sc)).unwrap_or_else(||{
bug!("symbol `{}` is already defined", sym);
});
let addr_space = self.const_addr_space();
let g = self.define_global(&sym[..], self.val_ty(sc), addr_space)
.unwrap_or_else(|| bug!("symbol `{}` is already defined", sym) );
llvm::LLVMSetInitializer(g, sc);
llvm::LLVMSetGlobalConstant(g, True);
llvm::LLVMRustSetLinkage(g, llvm::Linkage::InternalLinkage);
@@ -284,6 +286,10 @@ impl ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> {
}
}

fn const_as_cast(&self, val: &'ll Value, addr_space: AddrSpaceIdx) -> &'ll Value {
self.const_addrcast(val, addr_space)
}

fn scalar_to_backend(
&self,
cv: Scalar,
@@ -299,10 +305,16 @@ impl ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> {
Scalar::Bits { bits, size } => {
assert_eq!(size as u64, layout.value.size(self).bytes());
let llval = self.const_uint_big(self.type_ix(bitsize), bits);
if layout.value == layout::Pointer {
unsafe { llvm::LLVMConstIntToPtr(llval, llty) }
let flat_llty = llty.copy_addr_space(self.flat_addr_space());
let llval = if layout.value == layout::Pointer {
unsafe { llvm::LLVMConstIntToPtr(llval, flat_llty) }
} else {
self.const_bitcast(llval, llty)
self.const_bitcast(llval, flat_llty)
};
if llty.is_ptr() {
self.const_as_cast(llval, llty.address_space())
} else {
llval
}
},
Scalar::Ptr(ptr) => {
@@ -311,7 +323,8 @@ impl ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> {
Some(AllocKind::Memory(alloc)) => {
let init = const_alloc_to_llvm(self, alloc);
if alloc.mutability == Mutability::Mutable {
self.static_addr_of_mut(init, alloc.align, None)
self.static_addr_of_mut(init, alloc.align, None,
self.mutable_addr_space())
} else {
self.static_addr_of(init, alloc.align, None)
}
@@ -330,6 +343,7 @@ impl ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> {
&self.const_usize(ptr.offset.bytes()),
1,
) };
let llval = self.const_flat_as_cast(llval);
if layout.value != layout::Pointer {
unsafe { llvm::LLVMConstPtrToInt(llval, llty) }
} else {
@@ -367,6 +381,17 @@ pub fn val_ty(v: &'ll Value) -> &'ll Type {
llvm::LLVMTypeOf(v)
}
}
pub fn val_addr_space_opt(v: &'ll Value) -> Option<AddrSpaceIdx> {
let ty = val_ty(v);
if ty.kind() == TypeKind::Pointer {
Some(ty.address_space())
} else {
None
}
}
pub fn val_addr_space(v: &'ll Value) -> AddrSpaceIdx {
val_addr_space_opt(v).unwrap_or_default()
}

pub fn bytes_in_context(llcx: &'ll llvm::Context, bytes: &[u8]) -> &'ll Value {
unsafe {
Loading