summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorEvgeniy Stepanov <eugeni.stepanov@gmail.com>2016-08-02 23:21:30 +0000
committerEvgeniy Stepanov <eugeni.stepanov@gmail.com>2016-08-02 23:21:30 +0000
commitd99f80b48e1f3502357e21d071531432be333b8e (patch)
treeb7858ff8897d3b754b433c3e91e3c448fe9751ad
parent39bf39f35c208109f6d5907708ee53dee2878bed (diff)
downloadbcm5719-llvm-d99f80b48e1f3502357e21d071531432be333b8e.tar.gz
bcm5719-llvm-d99f80b48e1f3502357e21d071531432be333b8e.zip
[safestack] Layout large allocas first to reduce fragmentation.
llvm-svn: 277544
-rw-r--r--llvm/lib/CodeGen/SafeStackLayout.cpp8
-rw-r--r--llvm/test/Transforms/SafeStack/layout-frag.ll39
2 files changed, 47 insertions, 0 deletions
diff --git a/llvm/lib/CodeGen/SafeStackLayout.cpp b/llvm/lib/CodeGen/SafeStackLayout.cpp
index fb433c1856a..7d4dbd13abf 100644
--- a/llvm/lib/CodeGen/SafeStackLayout.cpp
+++ b/llvm/lib/CodeGen/SafeStackLayout.cpp
@@ -132,6 +132,14 @@ void StackLayout::computeLayout() {
// If this is replaced with something smarter, it must preserve the property
// that the first object is always at the offset 0 in the stack frame (for
// StackProtectorSlot), or handle stack protector in some other way.
+
+ // Sort objects by size (largest first) to reduce fragmentation.
+ if (StackObjects.size() > 2)
+ std::stable_sort(StackObjects.begin() + 1, StackObjects.end(),
+ [](const StackObject &a, const StackObject &b) {
+ return a.Size > b.Size;
+ });
+
for (auto &Obj : StackObjects)
layoutObject(Obj);
diff --git a/llvm/test/Transforms/SafeStack/layout-frag.ll b/llvm/test/Transforms/SafeStack/layout-frag.ll
new file mode 100644
index 00000000000..125eb0f8be9
--- /dev/null
+++ b/llvm/test/Transforms/SafeStack/layout-frag.ll
@@ -0,0 +1,39 @@
+; Test that safestack layout reuses a region w/o fragmentation.
+; RUN: opt -safe-stack -S -mtriple=x86_64-pc-linux-gnu < %s -o - | FileCheck %s
+
+define void @f() safestack {
+; CHECK-LABEL: define void @f
+entry:
+; CHECK: %[[USP:.*]] = load i8*, i8** @__safestack_unsafe_stack_ptr
+; CHECK: getelementptr i8, i8* %[[USP]], i32 -16
+
+ %x0 = alloca i64, align 8
+ %x1 = alloca i8, align 1
+ %x2 = alloca i64, align 8
+
+ %x0a = bitcast i64* %x0 to i8*
+ %x2a = bitcast i64* %x2 to i8*
+
+ call void @llvm.lifetime.start(i64 4, i8* %x0a)
+ call void @capture64(i64* %x0)
+ call void @llvm.lifetime.end(i64 4, i8* %x0a)
+
+ call void @llvm.lifetime.start(i64 4, i8* %x1)
+ call void @llvm.lifetime.start(i64 4, i8* %x2a)
+ call void @capture8(i8* %x1)
+ call void @capture64(i64* %x2)
+ call void @llvm.lifetime.end(i64 4, i8* %x1)
+ call void @llvm.lifetime.end(i64 4, i8* %x2a)
+
+; Test that i64 allocas share space.
+; CHECK: getelementptr i8, i8* %unsafe_stack_ptr, i32 -8
+; CHECK: getelementptr i8, i8* %unsafe_stack_ptr, i32 -9
+; CHECK: getelementptr i8, i8* %unsafe_stack_ptr, i32 -8
+
+ ret void
+}
+
+declare void @llvm.lifetime.start(i64, i8* nocapture)
+declare void @llvm.lifetime.end(i64, i8* nocapture)
+declare void @capture8(i8*)
+declare void @capture64(i64*)
OpenPOWER on IntegriCloud