Skip to content

Commit 1dcc986

Browse files
committed
auto merge of #11173 : whitequark/rust/master, r=thestinger
This PR adds `std::unsafe::intrinsics::{volatile_load,volatile_store}`, which map to LLVM's `load volatile` and `store volatile` operations correspondingly. This would fix #11172. I have addressed several uncertainties with this PR in the line comments.
2 parents bd52e6a + f98f83a commit 1dcc986

File tree

8 files changed

+103
-0
lines changed

8 files changed

+103
-0
lines changed

src/librustc/lib/llvm.rs

+4
Original file line numberDiff line numberDiff line change
@@ -787,6 +787,10 @@ pub mod llvm {
787787
pub fn LLVMIsTailCall(CallInst: ValueRef) -> Bool;
788788
pub fn LLVMSetTailCall(CallInst: ValueRef, IsTailCall: Bool);
789789

790+
/* Operations on load/store instructions (only) */
791+
pub fn LLVMGetVolatile(MemoryAccessInst: ValueRef) -> Bool;
792+
pub fn LLVMSetVolatile(MemoryAccessInst: ValueRef, volatile: Bool);
793+
790794
/* Operations on phi nodes */
791795
pub fn LLVMAddIncoming(PhiNode: ValueRef,
792796
IncomingValues: *ValueRef,

src/librustc/middle/trans/build.rs

+12
Original file line numberDiff line numberDiff line change
@@ -349,6 +349,13 @@ pub fn Load(cx: &Block, PointerVal: ValueRef) -> ValueRef {
349349
}
350350
}
351351

352+
pub fn VolatileLoad(cx: &Block, PointerVal: ValueRef) -> ValueRef {
353+
unsafe {
354+
if cx.unreachable.get() { return llvm::LLVMGetUndef(Type::nil().to_ref()); }
355+
B(cx).volatile_load(PointerVal)
356+
}
357+
}
358+
352359
pub fn AtomicLoad(cx: &Block, PointerVal: ValueRef, order: AtomicOrdering) -> ValueRef {
353360
unsafe {
354361
let ccx = cx.fcx.ccx;
@@ -383,6 +390,11 @@ pub fn Store(cx: &Block, Val: ValueRef, Ptr: ValueRef) {
383390
B(cx).store(Val, Ptr)
384391
}
385392

393+
pub fn VolatileStore(cx: &Block, Val: ValueRef, Ptr: ValueRef) {
394+
if cx.unreachable.get() { return; }
395+
B(cx).volatile_store(Val, Ptr)
396+
}
397+
386398
pub fn AtomicStore(cx: &Block, Val: ValueRef, Ptr: ValueRef, order: AtomicOrdering) {
387399
if cx.unreachable.get() { return; }
388400
B(cx).atomic_store(Val, Ptr, order)

src/librustc/middle/trans/builder.rs

+21
Original file line numberDiff line numberDiff line change
@@ -449,6 +449,15 @@ impl Builder {
449449
}
450450
}
451451

452+
pub fn volatile_load(&self, ptr: ValueRef) -> ValueRef {
453+
self.count_insn("load.volatile");
454+
unsafe {
455+
let insn = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
456+
llvm::LLVMSetVolatile(insn, lib::llvm::True);
457+
insn
458+
}
459+
}
460+
452461
pub fn atomic_load(&self, ptr: ValueRef, order: AtomicOrdering) -> ValueRef {
453462
self.count_insn("load.atomic");
454463
unsafe {
@@ -488,6 +497,18 @@ impl Builder {
488497
}
489498
}
490499

500+
pub fn volatile_store(&self, val: ValueRef, ptr: ValueRef) {
501+
debug!("Store {} -> {}",
502+
self.ccx.tn.val_to_str(val),
503+
self.ccx.tn.val_to_str(ptr));
504+
assert!(is_not_null(self.llbuilder));
505+
self.count_insn("store.volatile");
506+
unsafe {
507+
let insn = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
508+
llvm::LLVMSetVolatile(insn, lib::llvm::True);
509+
}
510+
}
511+
491512
pub fn atomic_store(&self, val: ValueRef, ptr: ValueRef, order: AtomicOrdering) {
492513
debug!("Store {} -> {}",
493514
self.ccx.tn.val_to_str(val),

src/librustc/middle/trans/intrinsic.rs

+20
Original file line numberDiff line numberDiff line change
@@ -73,6 +73,23 @@ pub fn trans_intrinsic(ccx: @CrateContext,
7373
}
7474
}
7575

76+
fn volatile_load_intrinsic(bcx: @Block) {
77+
let first_real_arg = bcx.fcx.arg_pos(0u);
78+
let src = get_param(bcx.fcx.llfn, first_real_arg);
79+
80+
let val = VolatileLoad(bcx, src);
81+
Ret(bcx, val);
82+
}
83+
84+
fn volatile_store_intrinsic(bcx: @Block) {
85+
let first_real_arg = bcx.fcx.arg_pos(0u);
86+
let dst = get_param(bcx.fcx.llfn, first_real_arg);
87+
let val = get_param(bcx.fcx.llfn, first_real_arg + 1);
88+
89+
VolatileStore(bcx, val, dst);
90+
RetVoid(bcx);
91+
}
92+
7693
fn copy_intrinsic(bcx: @Block, allow_overlap: bool, tp_ty: ty::t) {
7794
let ccx = bcx.ccx();
7895
let lltp_ty = type_of::type_of(ccx, tp_ty);
@@ -480,6 +497,9 @@ pub fn trans_intrinsic(ccx: @CrateContext,
480497
"bswap32" => simple_llvm_intrinsic(bcx, "llvm.bswap.i32", 1),
481498
"bswap64" => simple_llvm_intrinsic(bcx, "llvm.bswap.i64", 1),
482499

500+
"volatile_load" => volatile_load_intrinsic(bcx),
501+
"volatile_store" => volatile_store_intrinsic(bcx),
502+
483503
"i8_add_with_overflow" =>
484504
with_overflow_instrinsic(bcx, "llvm.sadd.with.overflow.i8", output_type),
485505
"i16_add_with_overflow" =>

src/librustc/middle/typeck/check/mod.rs

+5
Original file line numberDiff line numberDiff line change
@@ -4226,6 +4226,11 @@ pub fn check_intrinsic_type(ccx: @CrateCtxt, it: @ast::foreign_item) {
42264226
"bswap32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()),
42274227
"bswap64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()),
42284228

4229+
"volatile_load" =>
4230+
(1, ~[ ty::mk_imm_ptr(tcx, param(ccx, 0)) ], param(ccx, 0)),
4231+
"volatile_store" =>
4232+
(1, ~[ ty::mk_mut_ptr(tcx, param(ccx, 0)), param(ccx, 0) ], ty::mk_nil()),
4233+
42294234
"i8_add_with_overflow" | "i8_sub_with_overflow" | "i8_mul_with_overflow" =>
42304235
(0, ~[ty::mk_i8(), ty::mk_i8()],
42314236
ty::mk_tup(tcx, ~[ty::mk_i8(), ty::mk_bool()])),

src/libstd/unstable/intrinsics.rs

+12
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,15 @@
1212
1313
The corresponding definitions are in librustc/middle/trans/foreign.rs.
1414
15+
# Volatiles
16+
17+
The volatile intrinsics provide operations intended to act on I/O
18+
memory, which are guaranteed to not be reordered by the compiler
19+
across other volatile intrinsics. See the LLVM documentation on
20+
[[volatile]].
21+
22+
[volatile]: http://llvm.org/docs/LangRef.html#volatile-memory-accesses
23+
1524
# Atomics
1625
1726
The atomic intrinsics provide common atomic operations on machine
@@ -179,6 +188,9 @@ extern "rust-intrinsic" {
179188
/// Execute a breakpoint trap, for inspection by a debugger.
180189
pub fn breakpoint();
181190

191+
#[cfg(not(stage0))] pub fn volatile_load<T>(src: *T) -> T;
192+
#[cfg(not(stage0))] pub fn volatile_store<T>(dst: *mut T, val: T);
193+
182194
/// Atomic compare and exchange, sequentially consistent.
183195
pub fn atomic_cxchg(dst: &mut int, old: int, src: int) -> int;
184196
/// Atomic compare and exchange, acquire ordering.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
-include ../tools.mk
2+
3+
all:
4+
# The tests must pass...
5+
$(RUSTC) main.rs
6+
$(call RUN,main)
7+
# ... and the loads/stores must not be optimized out.
8+
$(RUSTC) main.rs --emit-llvm -S
9+
grep "load volatile" $(TMPDIR)/main.ll
10+
grep "store volatile" $(TMPDIR)/main.ll
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
2+
// file at the top-level directory of this distribution and at
3+
// http://rust-lang.org/COPYRIGHT.
4+
//
5+
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6+
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7+
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8+
// option. This file may not be copied, modified, or distributed
9+
// except according to those terms.
10+
11+
use std::unstable::intrinsics::{volatile_load, volatile_store};
12+
13+
pub fn main() {
14+
unsafe {
15+
let mut i : int = 1;
16+
volatile_store(&mut i, 2);
17+
assert_eq!(volatile_load(&i), 2);
18+
}
19+
}

0 commit comments

Comments
 (0)