Skip to content

Implement volatile_load and volatile_store intrinsics #11173

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jan 1, 2014
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions src/librustc/lib/llvm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -787,6 +787,10 @@ pub mod llvm {
pub fn LLVMIsTailCall(CallInst: ValueRef) -> Bool;
pub fn LLVMSetTailCall(CallInst: ValueRef, IsTailCall: Bool);

/* Operations on load/store instructions (only) */
pub fn LLVMGetVolatile(MemoryAccessInst: ValueRef) -> Bool;
pub fn LLVMSetVolatile(MemoryAccessInst: ValueRef, volatile: Bool);

/* Operations on phi nodes */
pub fn LLVMAddIncoming(PhiNode: ValueRef,
IncomingValues: *ValueRef,
Expand Down
12 changes: 12 additions & 0 deletions src/librustc/middle/trans/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -349,6 +349,13 @@ pub fn Load(cx: &Block, PointerVal: ValueRef) -> ValueRef {
}
}

pub fn VolatileLoad(cx: &Block, PointerVal: ValueRef) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(Type::nil().to_ref()); }
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is returning undef of type nil here actually safe? It seems to be, but I don't understand the details of this operation nor any way to exercise this code path.

B(cx).volatile_load(PointerVal)
}
}

pub fn AtomicLoad(cx: &Block, PointerVal: ValueRef, order: AtomicOrdering) -> ValueRef {
unsafe {
let ccx = cx.fcx.ccx;
Expand Down Expand Up @@ -383,6 +390,11 @@ pub fn Store(cx: &Block, Val: ValueRef, Ptr: ValueRef) {
B(cx).store(Val, Ptr)
}

pub fn VolatileStore(cx: &Block, Val: ValueRef, Ptr: ValueRef) {
if cx.unreachable.get() { return; }
B(cx).volatile_store(Val, Ptr)
}

pub fn AtomicStore(cx: &Block, Val: ValueRef, Ptr: ValueRef, order: AtomicOrdering) {
if cx.unreachable.get() { return; }
B(cx).atomic_store(Val, Ptr, order)
Expand Down
21 changes: 21 additions & 0 deletions src/librustc/middle/trans/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -449,6 +449,15 @@ impl Builder {
}
}

pub fn volatile_load(&self, ptr: ValueRef) -> ValueRef {
self.count_insn("load.volatile");
unsafe {
let insn = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
llvm::LLVMSetVolatile(insn, lib::llvm::True);
insn
}
}

pub fn atomic_load(&self, ptr: ValueRef, order: AtomicOrdering) -> ValueRef {
self.count_insn("load.atomic");
unsafe {
Expand Down Expand Up @@ -488,6 +497,18 @@ impl Builder {
}
}

pub fn volatile_store(&self, val: ValueRef, ptr: ValueRef) {
debug!("Store {} -> {}",
self.ccx.tn.val_to_str(val),
self.ccx.tn.val_to_str(ptr));
assert!(is_not_null(self.llbuilder));
self.count_insn("store.volatile");
unsafe {
let insn = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
llvm::LLVMSetVolatile(insn, lib::llvm::True);
}
}

pub fn atomic_store(&self, val: ValueRef, ptr: ValueRef, order: AtomicOrdering) {
debug!("Store {} -> {}",
self.ccx.tn.val_to_str(val),
Expand Down
20 changes: 20 additions & 0 deletions src/librustc/middle/trans/intrinsic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,23 @@ pub fn trans_intrinsic(ccx: @CrateContext,
}
}

fn volatile_load_intrinsic(bcx: @Block) {
let first_real_arg = bcx.fcx.arg_pos(0u);
let src = get_param(bcx.fcx.llfn, first_real_arg);

let val = VolatileLoad(bcx, src);
Ret(bcx, val);
}

fn volatile_store_intrinsic(bcx: @Block) {
let first_real_arg = bcx.fcx.arg_pos(0u);
let dst = get_param(bcx.fcx.llfn, first_real_arg);
let val = get_param(bcx.fcx.llfn, first_real_arg + 1);

VolatileStore(bcx, val, dst);
RetVoid(bcx);
}

fn copy_intrinsic(bcx: @Block, allow_overlap: bool, tp_ty: ty::t) {
let ccx = bcx.ccx();
let lltp_ty = type_of::type_of(ccx, tp_ty);
Expand Down Expand Up @@ -480,6 +497,9 @@ pub fn trans_intrinsic(ccx: @CrateContext,
"bswap32" => simple_llvm_intrinsic(bcx, "llvm.bswap.i32", 1),
"bswap64" => simple_llvm_intrinsic(bcx, "llvm.bswap.i64", 1),

"volatile_load" => volatile_load_intrinsic(bcx),
"volatile_store" => volatile_store_intrinsic(bcx),

"i8_add_with_overflow" =>
with_overflow_instrinsic(bcx, "llvm.sadd.with.overflow.i8", output_type),
"i16_add_with_overflow" =>
Expand Down
5 changes: 5 additions & 0 deletions src/librustc/middle/typeck/check/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4226,6 +4226,11 @@ pub fn check_intrinsic_type(ccx: @CrateCtxt, it: @ast::foreign_item) {
"bswap32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()),
"bswap64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()),

"volatile_load" =>
(1, ~[ ty::mk_imm_ptr(tcx, param(ccx, 0)) ], param(ccx, 0)),
"volatile_store" =>
(1, ~[ ty::mk_mut_ptr(tcx, param(ccx, 0)), param(ccx, 0) ], ty::mk_nil()),

"i8_add_with_overflow" | "i8_sub_with_overflow" | "i8_mul_with_overflow" =>
(0, ~[ty::mk_i8(), ty::mk_i8()],
ty::mk_tup(tcx, ~[ty::mk_i8(), ty::mk_bool()])),
Expand Down
12 changes: 12 additions & 0 deletions src/libstd/unstable/intrinsics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,15 @@

The corresponding definitions are in librustc/middle/trans/foreign.rs.

# Volatiles

The volatile intrinsics provide operations intended to act on I/O
memory, which are guaranteed to not be reordered by the compiler
across other volatile intrinsics. See the LLVM documentation on
[[volatile]].

[volatile]: http://llvm.org/docs/LangRef.html#volatile-memory-accesses

# Atomics

The atomic intrinsics provide common atomic operations on machine
Expand Down Expand Up @@ -179,6 +188,9 @@ extern "rust-intrinsic" {
/// Execute a breakpoint trap, for inspection by a debugger.
pub fn breakpoint();

#[cfg(not(stage0))] pub fn volatile_load<T>(src: *T) -> T;
#[cfg(not(stage0))] pub fn volatile_store<T>(dst: *mut T, val: T);

/// Atomic compare and exchange, sequentially consistent.
pub fn atomic_cxchg(dst: &mut int, old: int, src: int) -> int;
/// Atomic compare and exchange, acquire ordering.
Expand Down
10 changes: 10 additions & 0 deletions src/test/run-make/volatile-intrinsics/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
-include ../tools.mk

all:
# The tests must pass...
$(RUSTC) main.rs
$(call RUN,main)
# ... and the loads/stores must not be optimized out.
$(RUSTC) main.rs --emit-llvm -S
grep "load volatile" $(TMPDIR)/main.ll
grep "store volatile" $(TMPDIR)/main.ll
19 changes: 19 additions & 0 deletions src/test/run-make/volatile-intrinsics/main.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

use std::unstable::intrinsics::{volatile_load, volatile_store};

pub fn main() {
unsafe {
let mut i : int = 1;
volatile_store(&mut i, 2);
assert_eq!(volatile_load(&i), 2);
}
}