Skip to content
This repository was archived by the owner on Sep 2, 2018. It is now read-only.

Commit c4c08aa

Browse files
committed
[msan] Avoid extra origin address realignment.
Do not realign origin address if the corresponding application address is at least 4-byte-aligned. Saves 2.5% code size in track-origins mode. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@223464 91177308-0d34-0410-b5e6-96231b3b80d8
1 parent 54529ed commit c4c08aa

File tree

2 files changed

+97
-21
lines changed

2 files changed

+97
-21
lines changed

lib/Transforms/Instrumentation/MemorySanitizer.cpp

+24-21
Original file line numberDiff line numberDiff line change
@@ -522,9 +522,11 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
522522

523523
void storeOrigin(IRBuilder<> &IRB, Value *Addr, Value *Shadow, Value *Origin,
524524
unsigned Alignment, bool AsCall) {
525+
unsigned OriginAlignment = std::max(kMinOriginAlignment, Alignment);
525526
if (isa<StructType>(Shadow->getType())) {
526-
IRB.CreateAlignedStore(updateOrigin(Origin, IRB), getOriginPtr(Addr, IRB),
527-
Alignment);
527+
IRB.CreateAlignedStore(updateOrigin(Origin, IRB),
528+
getOriginPtr(Addr, IRB, Alignment),
529+
OriginAlignment);
528530
} else {
529531
Value *ConvertedShadow = convertToShadowTyNoVec(Shadow, IRB);
530532
// TODO(eugenis): handle non-zero constant shadow by inserting an
@@ -549,7 +551,8 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
549551
Cmp, IRB.GetInsertPoint(), false, MS.OriginStoreWeights);
550552
IRBuilder<> IRBNew(CheckTerm);
551553
IRBNew.CreateAlignedStore(updateOrigin(Origin, IRBNew),
552-
getOriginPtr(Addr, IRBNew), Alignment);
554+
getOriginPtr(Addr, IRBNew, Alignment),
555+
OriginAlignment);
553556
}
554557
}
555558
}
@@ -573,11 +576,9 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
573576

574577
if (SI.isAtomic()) SI.setOrdering(addReleaseOrdering(SI.getOrdering()));
575578

576-
if (MS.TrackOrigins) {
577-
unsigned Alignment = std::max(kMinOriginAlignment, SI.getAlignment());
578-
storeOrigin(IRB, Addr, Shadow, getOrigin(Val), Alignment,
579+
if (MS.TrackOrigins)
580+
storeOrigin(IRB, Addr, Shadow, getOrigin(Val), SI.getAlignment(),
579581
InstrumentWithCalls);
580-
}
581582
}
582583
}
583584

@@ -739,16 +740,17 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
739740
/// address.
740741
///
741742
/// OriginAddr = (ShadowAddr + OriginOffset) & ~3ULL
742-
Value *getOriginPtr(Value *Addr, IRBuilder<> &IRB) {
743+
Value *getOriginPtr(Value *Addr, IRBuilder<> &IRB, unsigned Alignment) {
743744
Value *ShadowLong =
744-
IRB.CreateAnd(IRB.CreatePointerCast(Addr, MS.IntptrTy),
745-
ConstantInt::get(MS.IntptrTy, ~MS.ShadowMask));
746-
Value *Add =
747-
IRB.CreateAdd(ShadowLong,
748-
ConstantInt::get(MS.IntptrTy, MS.OriginOffset));
749-
Value *SecondAnd =
750-
IRB.CreateAnd(Add, ConstantInt::get(MS.IntptrTy, ~3ULL));
751-
return IRB.CreateIntToPtr(SecondAnd, PointerType::get(IRB.getInt32Ty(), 0));
745+
IRB.CreateAnd(IRB.CreatePointerCast(Addr, MS.IntptrTy),
746+
ConstantInt::get(MS.IntptrTy, ~MS.ShadowMask));
747+
Value *Origin = IRB.CreateAdd(
748+
ShadowLong, ConstantInt::get(MS.IntptrTy, MS.OriginOffset));
749+
if (Alignment < kMinOriginAlignment) {
750+
uint64_t Mask = kMinOriginAlignment - 1;
751+
Origin = IRB.CreateAnd(Origin, ConstantInt::get(MS.IntptrTy, ~Mask));
752+
}
753+
return IRB.CreateIntToPtr(Origin, PointerType::get(IRB.getInt32Ty(), 0));
752754
}
753755

754756
/// \brief Compute the shadow address for a given function argument.
@@ -1052,9 +1054,10 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
10521054

10531055
if (MS.TrackOrigins) {
10541056
if (PropagateShadow) {
1055-
unsigned Alignment = std::max(kMinOriginAlignment, I.getAlignment());
1056-
setOrigin(&I,
1057-
IRB.CreateAlignedLoad(getOriginPtr(Addr, IRB), Alignment));
1057+
unsigned Alignment = I.getAlignment();
1058+
unsigned OriginAlignment = std::max(kMinOriginAlignment, Alignment);
1059+
setOrigin(&I, IRB.CreateAlignedLoad(getOriginPtr(Addr, IRB, Alignment),
1060+
OriginAlignment));
10581061
} else {
10591062
setOrigin(&I, getCleanOrigin());
10601063
}
@@ -1706,7 +1709,7 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
17061709
// FIXME: use ClStoreCleanOrigin
17071710
// FIXME: factor out common code from materializeStores
17081711
if (MS.TrackOrigins)
1709-
IRB.CreateStore(getOrigin(&I, 1), getOriginPtr(Addr, IRB));
1712+
IRB.CreateStore(getOrigin(&I, 1), getOriginPtr(Addr, IRB, 1));
17101713
return true;
17111714
}
17121715

@@ -1733,7 +1736,7 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
17331736

17341737
if (MS.TrackOrigins) {
17351738
if (PropagateShadow)
1736-
setOrigin(&I, IRB.CreateLoad(getOriginPtr(Addr, IRB)));
1739+
setOrigin(&I, IRB.CreateLoad(getOriginPtr(Addr, IRB, 1)));
17371740
else
17381741
setOrigin(&I, getCleanOrigin());
17391742
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,73 @@
1+
; RUN: opt < %s -msan -msan-check-access-address=0 -msan-track-origins=1 -S | FileCheck -check-prefix=CHECK -check-prefix=CHECK-ORIGINS1 %s
2+
; RUN: opt < %s -msan -msan-check-access-address=0 -msan-track-origins=2 -S | FileCheck -check-prefix=CHECK -check-prefix=CHECK-ORIGINS2 %s
3+
4+
target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128"
5+
target triple = "x86_64-unknown-linux-gnu"
6+
7+
8+
; Check origin instrumentation of stores.
9+
; Check that debug info for origin propagation code is set correctly.
10+
11+
@a8 = global i8 0, align 8
12+
@a4 = global i8 0, align 4
13+
@a2 = global i8 0, align 2
14+
@a1 = global i8 0, align 1
15+
16+
; 8-aligned store => 8-aligned origin store, origin address is not realigned
17+
define void @Store8(i8 %x) sanitize_memory {
18+
entry:
19+
store i8 %x, i8* @a8, align 8
20+
ret void
21+
}
22+
23+
; CHECK-LABEL: @Store8
24+
; CHECK-ORIGINS1: [[ORIGIN:%[01-9a-z]+]] = load {{.*}} @__msan_param_origin_tls
25+
; CHECK-ORIGINS2: [[ORIGIN0:%[01-9a-z]+]] = load {{.*}} @__msan_param_origin_tls
26+
; CHECK-ORIGINS2: [[ORIGIN:%[01-9a-z]+]] = call i32 @__msan_chain_origin(i32 [[ORIGIN0]])
27+
; CHECK: store i32 [[ORIGIN]], i32* inttoptr (i64 add (i64 and (i64 ptrtoint {{.*}} to i32*), align 8
28+
; CHECK: ret void
29+
30+
31+
; 4-aligned store => 4-aligned origin store, origin address is not realigned
32+
define void @Store4(i8 %x) sanitize_memory {
33+
entry:
34+
store i8 %x, i8* @a4, align 4
35+
ret void
36+
}
37+
38+
; CHECK-LABEL: @Store4
39+
; CHECK-ORIGINS1: [[ORIGIN:%[01-9a-z]+]] = load {{.*}} @__msan_param_origin_tls
40+
; CHECK-ORIGINS2: [[ORIGIN0:%[01-9a-z]+]] = load {{.*}} @__msan_param_origin_tls
41+
; CHECK-ORIGINS2: [[ORIGIN:%[01-9a-z]+]] = call i32 @__msan_chain_origin(i32 [[ORIGIN0]])
42+
; CHECK: store i32 [[ORIGIN]], i32* inttoptr (i64 add (i64 and (i64 ptrtoint {{.*}} to i32*), align 4
43+
; CHECK: ret void
44+
45+
46+
; 2-aligned store => 4-aligned origin store, origin address is realigned
47+
define void @Store2(i8 %x) sanitize_memory {
48+
entry:
49+
store i8 %x, i8* @a2, align 2
50+
ret void
51+
}
52+
53+
; CHECK-LABEL: @Store2
54+
; CHECK-ORIGINS1: [[ORIGIN:%[01-9a-z]+]] = load {{.*}} @__msan_param_origin_tls
55+
; CHECK-ORIGINS2: [[ORIGIN0:%[01-9a-z]+]] = load {{.*}} @__msan_param_origin_tls
56+
; CHECK-ORIGINS2: [[ORIGIN:%[01-9a-z]+]] = call i32 @__msan_chain_origin(i32 [[ORIGIN0]])
57+
; CHECK: store i32 [[ORIGIN]], i32* inttoptr (i64 and (i64 add (i64 and (i64 ptrtoint {{.*}} i64 -4) to i32*), align 4
58+
; CHECK: ret void
59+
60+
61+
; 1-aligned store => 4-aligned origin store, origin address is realigned
62+
define void @Store1(i8 %x) sanitize_memory {
63+
entry:
64+
store i8 %x, i8* @a1, align 1
65+
ret void
66+
}
67+
68+
; CHECK-LABEL: @Store1
69+
; CHECK-ORIGINS1: [[ORIGIN:%[01-9a-z]+]] = load {{.*}} @__msan_param_origin_tls
70+
; CHECK-ORIGINS2: [[ORIGIN0:%[01-9a-z]+]] = load {{.*}} @__msan_param_origin_tls
71+
; CHECK-ORIGINS2: [[ORIGIN:%[01-9a-z]+]] = call i32 @__msan_chain_origin(i32 [[ORIGIN0]])
72+
; CHECK: store i32 [[ORIGIN]], i32* inttoptr (i64 and (i64 add (i64 and (i64 ptrtoint {{.*}} i64 -4) to i32*), align 4
73+
; CHECK: ret void

0 commit comments

Comments
 (0)