Skip to content

abi: readjust FnAbis to remove unsupported PassModes, via query hooks. #766

New issue

Have a question about this project? # for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “#”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? # to your account

Merged
merged 7 commits into from
Oct 20, 2021
216 changes: 97 additions & 119 deletions crates/rustc_codegen_spirv/src/abi.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,24 +8,95 @@ use rspirv::spirv::{StorageClass, Word};
use rustc_data_structures::fx::FxHashMap;
use rustc_errors::ErrorReported;
use rustc_index::vec::Idx;
use rustc_middle::bug;
use rustc_middle::ty::layout::{FnAbiOf, LayoutOf, TyAndLayout};
use rustc_middle::ty::query::Providers;
use rustc_middle::ty::subst::SubstsRef;
use rustc_middle::ty::{
self, Const, FloatTy, GeneratorSubsts, IntTy, ParamEnv, PolyFnSig, Ty, TyKind, TypeAndMut,
UintTy,
self, Const, FloatTy, GeneratorSubsts, IntTy, ParamEnv, PolyFnSig, Ty, TyCtxt, TyKind,
TypeAndMut, UintTy,
};
use rustc_middle::{bug, span_bug};
use rustc_span::def_id::DefId;
use rustc_span::Span;
use rustc_span::DUMMY_SP;
use rustc_target::abi::call::{CastTarget, FnAbi, PassMode, Reg, RegKind};
use rustc_target::abi::call::{ArgAbi, ArgAttributes, FnAbi, PassMode};
use rustc_target::abi::{Abi, Align, FieldsShape, Primitive, Scalar, Size, VariantIdx, Variants};
use rustc_target::spec::abi::Abi as SpecAbi;
use std::cell::RefCell;
use std::collections::hash_map::Entry;
use std::fmt;

use num_traits::cast::FromPrimitive;

pub(crate) fn provide(providers: &mut Providers) {
// This is a lil weird: so, we obviously don't support C ABIs at all. However, libcore does declare some extern
// C functions:
// https://github.com/rust-lang/rust/blob/5fae56971d8487088c0099c82c0a5ce1638b5f62/library/core/src/slice/cmp.rs#L119
// However, those functions will be implemented by compiler-builtins:
// https://github.com/rust-lang/rust/blob/5fae56971d8487088c0099c82c0a5ce1638b5f62/library/core/src/lib.rs#L23-L27
// This theoretically then should be fine to leave as C, but, there's no backend hook for
// `FnAbi::adjust_for_cabi`, causing it to panic:
// https://github.com/rust-lang/rust/blob/5fae56971d8487088c0099c82c0a5ce1638b5f62/compiler/rustc_target/src/abi/call/mod.rs#L603
// So, treat any `extern "C"` functions as `extern "unadjusted"`, to be able to compile libcore with arch=spirv.
providers.fn_sig = |tcx, def_id| {
// We can't capture the old fn_sig and just call that, because fn_sig is a `fn`, not a `Fn`, i.e. it can't
// capture variables. Fortunately, the defaults are exposed (thanks rustdoc), so use that instead.
let result = (rustc_interface::DEFAULT_QUERY_PROVIDERS.fn_sig)(tcx, def_id);
result.map_bound(|mut inner| {
if let SpecAbi::C { .. } = inner.abi {
inner.abi = SpecAbi::Unadjusted;
}
inner
})
};

// For the Rust ABI, `FnAbi` adjustments are backend-agnostic, but they will
// use features like `PassMode::Cast`, that are incompatible with SPIR-V.
// By hooking the queries computing `FnAbi`s, we can recompute the `FnAbi`
// from the return/args layouts, to e.g. prefer using `PassMode::Direct`.
fn readjust_fn_abi<'tcx>(
tcx: TyCtxt<'tcx>,
fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>,
) -> &'tcx FnAbi<'tcx, Ty<'tcx>> {
let readjust_arg_abi = |arg: &ArgAbi<'tcx, Ty<'tcx>>| {
let mut arg = ArgAbi::new(&tcx, arg.layout, |_, _, _| ArgAttributes::new());

// Avoid pointlessly passing ZSTs, just like the official Rust ABI.
if arg.layout.is_zst() {
arg.mode = PassMode::Ignore;
}

arg
};
tcx.arena.alloc(FnAbi {
args: fn_abi.args.iter().map(readjust_arg_abi).collect(),
ret: readjust_arg_abi(&fn_abi.ret),

// FIXME(eddyb) validate some of these, and report errors - however,
// we can't just emit errors from here, since we have no `Span`, so
// we should have instead a check on MIR for e.g. C variadic calls.
c_variadic: fn_abi.c_variadic,
fixed_count: fn_abi.fixed_count,
conv: fn_abi.conv,
can_unwind: fn_abi.can_unwind,
})
}
providers.fn_abi_of_fn_ptr = |tcx, key| {
let result = (rustc_interface::DEFAULT_QUERY_PROVIDERS.fn_abi_of_fn_ptr)(tcx, key);
Ok(readjust_fn_abi(tcx, result?))
};
providers.fn_abi_of_instance = |tcx, key| {
let result = (rustc_interface::DEFAULT_QUERY_PROVIDERS.fn_abi_of_instance)(tcx, key);
Ok(readjust_fn_abi(tcx, result?))
};
}

pub(crate) fn provide_extern(providers: &mut Providers) {
// Reset providers overriden in `provide`, that need to still go through the
// `rustc_metadata::rmeta` decoding, as opposed to being locally computed.
providers.fn_sig = rustc_interface::DEFAULT_EXTERN_QUERY_PROVIDERS.fn_sig;
}

/// If a struct contains a pointer to itself, even indirectly, then doing a naiive recursive walk
/// of the fields will result in an infinite loop. Because pointers are the only thing that are
/// allowed to be recursive, keep track of what pointers we've translated, or are currently in the
Expand Down Expand Up @@ -81,7 +152,9 @@ impl<'tcx> RecursivePointeeCache<'tcx> {
) -> Word {
match self.map.borrow_mut().entry(pointee) {
// We should have hit begin() on this type already, which always inserts an entry.
Entry::Vacant(_) => bug!("RecursivePointeeCache::end should always have entry"),
Entry::Vacant(_) => {
span_bug!(span, "RecursivePointeeCache::end should always have entry")
}
Entry::Occupied(mut entry) => match *entry.get() {
// State: There have been no recursive references to this type while defining it, and so no
// OpTypeForwardPointer has been emitted. This is the most common case.
Expand All @@ -103,7 +176,7 @@ impl<'tcx> RecursivePointeeCache<'tcx> {
.def_with_id(cx, span, id)
}
PointeeDefState::Defined(_) => {
bug!("RecursivePointeeCache::end defined pointer twice")
span_bug!(span, "RecursivePointeeCache::end defined pointer twice")
}
},
}
Expand Down Expand Up @@ -163,87 +236,6 @@ impl<'tcx> ConvSpirvType<'tcx> for PointeeTy<'tcx> {
}
}

impl<'tcx> ConvSpirvType<'tcx> for Reg {
fn spirv_type(&self, span: Span, cx: &CodegenCx<'tcx>) -> Word {
match self.kind {
RegKind::Integer => SpirvType::Integer(self.size.bits() as u32, false).def(span, cx),
RegKind::Float => SpirvType::Float(self.size.bits() as u32).def(span, cx),
RegKind::Vector => SpirvType::Vector {
element: SpirvType::Integer(8, false).def(span, cx),
count: self.size.bytes() as u32,
}
.def(span, cx),
}
}
}

impl<'tcx> ConvSpirvType<'tcx> for CastTarget {
fn spirv_type(&self, span: Span, cx: &CodegenCx<'tcx>) -> Word {
let rest_ll_unit = self.rest.unit.spirv_type(span, cx);
let (rest_count, rem_bytes) = if self.rest.unit.size.bytes() == 0 {
(0, 0)
} else {
(
self.rest.total.bytes() / self.rest.unit.size.bytes(),
self.rest.total.bytes() % self.rest.unit.size.bytes(),
)
};

if self.prefix.iter().all(|x| x.is_none()) {
// Simplify to a single unit when there is no prefix and size <= unit size
if self.rest.total <= self.rest.unit.size {
return rest_ll_unit;
}

// Simplify to array when all chunks are the same size and type
if rem_bytes == 0 {
return SpirvType::Array {
element: rest_ll_unit,
count: cx.constant_u32(span, rest_count as u32),
}
.def(span, cx);
}
}

// Create list of fields in the main structure
let mut args: Vec<_> = self
.prefix
.iter()
.flatten()
.map(|&kind| {
Reg {
kind,
size: self.prefix_chunk_size,
}
.spirv_type(span, cx)
})
.chain((0..rest_count).map(|_| rest_ll_unit))
.collect();

// Append final integer
if rem_bytes != 0 {
// Only integers can be really split further.
assert_eq!(self.rest.unit.kind, RegKind::Integer);
args.push(SpirvType::Integer(rem_bytes as u32 * 8, false).def(span, cx));
}

let size = Some(self.size(cx));
let align = self.align(cx);
let (field_offsets, computed_size, computed_align) = auto_struct_layout(cx, &args);
assert_eq!(size, computed_size, "{:#?}", self);
assert_eq!(align, computed_align, "{:#?}", self);
SpirvType::Adt {
def_id: None,
size,
align,
field_types: args,
field_offsets,
field_names: None,
}
.def(span, cx)
}
}

impl<'tcx> ConvSpirvType<'tcx> for FnAbi<'tcx, Ty<'tcx>> {
fn spirv_type(&self, span: Span, cx: &CodegenCx<'tcx>) -> Word {
let mut argument_types = Vec::new();
Expand All @@ -253,14 +245,11 @@ impl<'tcx> ConvSpirvType<'tcx> for FnAbi<'tcx, Ty<'tcx>> {
PassMode::Direct(_) | PassMode::Pair(..) => {
self.ret.layout.spirv_type_immediate(span, cx)
}
PassMode::Cast(cast_target) => cast_target.spirv_type(span, cx),
PassMode::Indirect { .. } => {
let pointee = self.ret.layout.spirv_type(span, cx);
let pointer = SpirvType::Pointer { pointee }.def(span, cx);
// Important: the return pointer comes *first*, not last.
argument_types.push(pointer);
SpirvType::Void.def(span, cx)
}
PassMode::Cast(_) | PassMode::Indirect { .. } => span_bug!(
span,
"query hooks should've made this `PassMode` impossible: {:#?}",
self.ret
),
};

for arg in &self.args {
Expand All @@ -276,27 +265,11 @@ impl<'tcx> ConvSpirvType<'tcx> for FnAbi<'tcx, Ty<'tcx>> {
));
continue;
}
PassMode::Cast(cast_target) => cast_target.spirv_type(span, cx),
PassMode::Indirect {
extra_attrs: Some(_),
..
} => {
let ptr_ty = cx.tcx.mk_mut_ptr(arg.layout.ty);
let ptr_layout = cx.layout_of(ptr_ty);
argument_types.push(scalar_pair_element_backend_type(
cx, span, ptr_layout, 0, true,
));
argument_types.push(scalar_pair_element_backend_type(
cx, span, ptr_layout, 1, true,
));
continue;
}
PassMode::Indirect {
extra_attrs: None, ..
} => {
let pointee = arg.layout.spirv_type(span, cx);
SpirvType::Pointer { pointee }.def(span, cx)
}
PassMode::Cast(_) | PassMode::Indirect { .. } => span_bug!(
span,
"query hooks should've made this `PassMode` impossible: {:#?}",
arg
),
};
argument_types.push(arg_type);
}
Expand Down Expand Up @@ -395,7 +368,11 @@ pub fn scalar_pair_element_backend_type<'tcx>(
) -> Word {
let [a, b] = match &ty.layout.abi {
Abi::ScalarPair(a, b) => [a, b],
other => bug!("scalar_pair_element_backend_type invalid abi: {:?}", other),
other => span_bug!(
span,
"scalar_pair_element_backend_type invalid abi: {:?}",
other
),
};
let offset = match index {
0 => Size::ZERO,
Expand Down Expand Up @@ -524,7 +501,8 @@ fn dig_scalar_pointee<'tcx>(

fn trans_aggregate<'tcx>(cx: &CodegenCx<'tcx>, span: Span, ty: TyAndLayout<'tcx>) -> Word {
match ty.fields {
FieldsShape::Primitive => bug!(
FieldsShape::Primitive => span_bug!(
span,
"trans_aggregate called for FieldsShape::Primitive layout {:#?}",
ty
),
Expand Down Expand Up @@ -629,7 +607,7 @@ fn trans_struct<'tcx>(cx: &CodegenCx<'tcx>, span: Span, ty: TyAndLayout<'tcx>) -
} else {
if let TyKind::Adt(_, _) = ty.ty.kind() {
} else {
bug!("Variants::Multiple not TyKind::Adt");
span_bug!(span, "Variants::Multiple not TyKind::Adt");
}
if i == 0 {
field_names.push("discriminant".to_string());
Expand Down
25 changes: 8 additions & 17 deletions crates/rustc_codegen_spirv/src/builder/intrinsics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ use rustc_middle::ty::{FnDef, Instance, ParamEnv, Ty, TyKind};
use rustc_span::source_map::Span;
use rustc_span::sym;
use rustc_target::abi::call::{FnAbi, PassMode};
use std::assert_matches::assert_matches;

fn int_type_width_signed(ty: Ty<'_>, cx: &CodegenCx<'_>) -> Option<(u64, bool)> {
match ty.kind() {
Expand Down Expand Up @@ -100,16 +101,9 @@ impl<'a, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'tcx> {

sym::volatile_load | sym::unaligned_volatile_load => {
let ptr = args[0].immediate();
if let PassMode::Cast(ty) = fn_abi.ret.mode {
let pointee = ty.spirv_type(self.span(), self);
let pointer = SpirvType::Pointer { pointee }.def(self.span(), self);
let ptr = self.pointercast(ptr, pointer);
self.volatile_load(pointee, ptr)
} else {
let layout = self.layout_of(substs.type_at(0));
let load = self.volatile_load(layout.spirv_type(self.span(), self), ptr);
self.to_immediate(load, layout)
}
let layout = self.layout_of(substs.type_at(0));
let load = self.volatile_load(layout.spirv_type(self.span(), self), ptr);
self.to_immediate(load, layout)
}

sym::prefetch_read_data
Expand Down Expand Up @@ -330,13 +324,10 @@ impl<'a, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'tcx> {
};

if !fn_abi.ret.is_ignore() {
if let PassMode::Cast(_ty) = fn_abi.ret.mode {
self.fatal("TODO: PassMode::Cast not implemented yet in intrinsics");
} else {
OperandRef::from_immediate_or_packed_pair(self, value, result.layout)
.val
.store(self, result);
}
assert_matches!(fn_abi.ret.mode, PassMode::Direct(_) | PassMode::Pair(..));
OperandRef::from_immediate_or_packed_pair(self, value, result.layout)
.val
.store(self, result);
}
}

Expand Down
Loading