Rollup merge of #52391 - Amanieu:volatile_unaligned, r=alexcrichton

Add unaligned volatile intrinsics

Surprisingly enough, it turns out that unaligned volatile loads are actually useful for certain (very niche) types of lock-free code. I included unaligned volatile stores for completeness, but I currently do not know of any use cases for them.

These are only exposed as intrinsics for now. If they turn out to be useful in practice, we can work towards stabilizing them.

r? @alexcrichton
This commit is contained in:
Mark Rousskov 2018-07-24 16:43:44 -06:00 committed by GitHub
commit 06ba69d043
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 44 additions and 8 deletions

View File

@ -1085,6 +1085,15 @@ extern "rust-intrinsic" {
/// [`std::ptr::write_volatile`](../../std/ptr/fn.write_volatile.html).
pub fn volatile_store<T>(dst: *mut T, val: T);
/// Perform a volatile load from the `src` pointer
/// The pointer is not required to be aligned.
#[cfg(not(stage0))]
pub fn unaligned_volatile_load<T>(src: *const T) -> T;
/// Perform a volatile store to the `dst` pointer.
/// The pointer is not required to be aligned.
#[cfg(not(stage0))]
pub fn unaligned_volatile_store<T>(dst: *mut T, val: T);
/// Returns the square root of an `f32`
pub fn sqrtf32(x: f32) -> f32;
/// Returns the square root of an `f64`

View File

@ -54,6 +54,7 @@ bitflags! {
pub struct MemFlags: u8 {
const VOLATILE = 1 << 0;
const NONTEMPORAL = 1 << 1;
const UNALIGNED = 1 << 2;
}
}
@ -602,7 +603,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let ptr = self.check_store(val, ptr);
unsafe {
let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
llvm::LLVMSetAlignment(store, align.abi() as c_uint);
let align = if flags.contains(MemFlags::UNALIGNED) {
1
} else {
align.abi() as c_uint
};
llvm::LLVMSetAlignment(store, align);
if flags.contains(MemFlags::VOLATILE) {
llvm::LLVMSetVolatile(store, llvm::True);
}

View File

@ -234,15 +234,20 @@ pub fn codegen_intrinsic_call<'a, 'tcx>(bx: &Builder<'a, 'tcx>,
memset_intrinsic(bx, true, substs.type_at(0),
args[0].immediate(), args[1].immediate(), args[2].immediate())
}
"volatile_load" => {
"volatile_load" | "unaligned_volatile_load" => {
let tp_ty = substs.type_at(0);
let mut ptr = args[0].immediate();
if let PassMode::Cast(ty) = fn_ty.ret.mode {
ptr = bx.pointercast(ptr, ty.llvm_type(cx).ptr_to());
}
let load = bx.volatile_load(ptr);
let align = if name == "unaligned_volatile_load" {
1
} else {
cx.align_of(tp_ty).abi() as u32
};
unsafe {
llvm::LLVMSetAlignment(load, cx.align_of(tp_ty).abi() as u32);
llvm::LLVMSetAlignment(load, align);
}
to_immediate(bx, load, cx.layout_of(tp_ty))
},
@ -251,6 +256,11 @@ pub fn codegen_intrinsic_call<'a, 'tcx>(bx: &Builder<'a, 'tcx>,
args[1].val.volatile_store(bx, dst);
return;
},
"unaligned_volatile_store" => {
let dst = args[0].deref(bx.cx);
args[1].val.unaligned_volatile_store(bx, dst);
return;
},
"prefetch_read_data" | "prefetch_write_data" |
"prefetch_read_instruction" | "prefetch_write_instruction" => {
let expect = cx.get_intrinsic(&("llvm.prefetch"));

View File

@ -276,6 +276,10 @@ impl<'a, 'tcx> OperandValue {
self.store_with_flags(bx, dest, MemFlags::VOLATILE);
}
pub fn unaligned_volatile_store(self, bx: &Builder<'a, 'tcx>, dest: PlaceRef<'tcx>) {
self.store_with_flags(bx, dest, MemFlags::VOLATILE | MemFlags::UNALIGNED);
}
pub fn nontemporal_store(self, bx: &Builder<'a, 'tcx>, dest: PlaceRef<'tcx>) {
self.store_with_flags(bx, dest, MemFlags::NONTEMPORAL);
}

View File

@ -270,9 +270,9 @@ pub fn check_intrinsic_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
"roundf32" => (0, vec![ tcx.types.f32 ], tcx.types.f32),
"roundf64" => (0, vec![ tcx.types.f64 ], tcx.types.f64),
"volatile_load" =>
"volatile_load" | "unaligned_volatile_load" =>
(1, vec![ tcx.mk_imm_ptr(param(0)) ], param(0)),
"volatile_store" =>
"volatile_store" | "unaligned_volatile_store" =>
(1, vec![ tcx.mk_mut_ptr(param(0)), param(0) ], tcx.mk_nil()),
"ctpop" | "ctlz" | "ctlz_nonzero" | "cttz" | "cttz_nonzero" |

View File

@ -10,17 +10,24 @@
#![feature(core_intrinsics, volatile)]
use std::intrinsics::{volatile_load, volatile_store};
use std::intrinsics::{
unaligned_volatile_load, unaligned_volatile_store, volatile_load, volatile_store,
};
use std::ptr::{read_volatile, write_volatile};
pub fn main() {
unsafe {
let mut i : isize = 1;
let mut i: isize = 1;
volatile_store(&mut i, 2);
assert_eq!(volatile_load(&i), 2);
}
unsafe {
let mut i : isize = 1;
let mut i: isize = 1;
unaligned_volatile_store(&mut i, 2);
assert_eq!(unaligned_volatile_load(&i), 2);
}
unsafe {
let mut i: isize = 1;
write_volatile(&mut i, 2);
assert_eq!(read_volatile(&i), 2);
}