diff options
author | Evgeniy Stepanov <eugeni.stepanov@gmail.com> | 2014-02-14 11:55:53 +0000 |
---|---|---|
committer | Evgeniy Stepanov <eugeni.stepanov@gmail.com> | 2014-02-14 11:55:53 +0000 |
commit | f335bd9c69e1f42d1fc3844866622838c7fd3de6 (patch) | |
tree | bbc14e1658d626b336eaf128c7fadf51d92fbfe8 | |
parent | ba869e79a050ada6a3a3fbeeb275313ab8fdf438 (diff) | |
download | bcm5719-llvm-f335bd9c69e1f42d1fc3844866622838c7fd3de6.tar.gz bcm5719-llvm-f335bd9c69e1f42d1fc3844866622838c7fd3de6.zip |
[asan] Added assembly functions for x86/amd64 asan.
These are runtime support functions for inline assembly instrumentation.
Patch by Yuri Gorshenin.
llvm-svn: 201402
-rw-r--r-- | compiler-rt/lib/asan/CMakeLists.txt | 4 | ||||
-rw-r--r-- | compiler-rt/lib/asan/asan_asm_instrumentation.S | 451 | ||||
-rwxr-xr-x | compiler-rt/lib/asan/scripts/gen_asm_instrumentation.sh | 215 | ||||
-rw-r--r-- | compiler-rt/lib/asan/tests/asan_test.cc | 199 |
4 files changed, 869 insertions, 0 deletions
diff --git a/compiler-rt/lib/asan/CMakeLists.txt b/compiler-rt/lib/asan/CMakeLists.txt index 52eb8efe6ec..8f329531d59 100644 --- a/compiler-rt/lib/asan/CMakeLists.txt +++ b/compiler-rt/lib/asan/CMakeLists.txt @@ -50,6 +50,10 @@ endif() filter_available_targets(ASAN_SUPPORTED_ARCH x86_64 i386 powerpc64) +set(ASAN_ASM_SOURCES asan_asm_instrumentation.S) +set_source_files_properties(${ASAN_ASM_SOURCES} PROPERTIES LANGUAGE C) +list(APPEND ASAN_SOURCES ${ASAN_ASM_SOURCES}) + # Compile ASan sources into an object library. if(APPLE) foreach(os ${SANITIZER_COMMON_SUPPORTED_DARWIN_OS}) diff --git a/compiler-rt/lib/asan/asan_asm_instrumentation.S b/compiler-rt/lib/asan/asan_asm_instrumentation.S new file mode 100644 index 00000000000..7de45825c84 --- /dev/null +++ b/compiler-rt/lib/asan/asan_asm_instrumentation.S @@ -0,0 +1,451 @@ +// This file was generated by gen_asm_instrumentation.sh. Please, do not edit +// manually. +.section .text +#if defined(__i386__) +.globl __sanitizer_sanitize_store1 +__sanitizer_sanitize_store1: + pushl %ebp + movl %esp, %ebp + pushl %eax + pushl %ecx + pushl %edx + pushfl + movl 8(%ebp), %eax + movl %eax, %ecx + shrl $0x3, %ecx + movb 0x20000000(%ecx), %cl + testb %cl, %cl + je .sanitize_store1_done + movl %eax, %edx + andl $0x7, %edx + movsbl %cl, %ecx + cmpl %ecx, %edx + jl .sanitize_store1_done + pushl %eax + call __asan_report_store1 +.sanitize_store1_done: + popfl + popl %edx + popl %ecx + popl %eax + movl %ebp, %esp + popl %ebp + ret +.globl __sanitizer_sanitize_load1 +__sanitizer_sanitize_load1: + pushl %ebp + movl %esp, %ebp + pushl %eax + pushl %ecx + pushl %edx + pushfl + movl 8(%ebp), %eax + movl %eax, %ecx + shrl $0x3, %ecx + movb 0x20000000(%ecx), %cl + testb %cl, %cl + je .sanitize_load1_done + movl %eax, %edx + andl $0x7, %edx + movsbl %cl, %ecx + cmpl %ecx, %edx + jl .sanitize_load1_done + pushl %eax + call __asan_report_load1 +.sanitize_load1_done: + popfl + popl %edx + popl %ecx + popl %eax + movl %ebp, %esp + popl %ebp + ret +.globl __sanitizer_sanitize_store2 +__sanitizer_sanitize_store2: + pushl %ebp + movl %esp, %ebp + pushl %eax + pushl %ecx + pushl %edx + pushfl + movl 8(%ebp), %eax + movl %eax, %ecx + shrl $0x3, %ecx + movb 0x20000000(%ecx), %cl + testb %cl, %cl + je .sanitize_store2_done + movl %eax, %edx + andl $0x7, %edx + incl %edx + movsbl %cl, %ecx + cmpl %ecx, %edx + jl .sanitize_store2_done + pushl %eax + call __asan_report_store2 +.sanitize_store2_done: + popfl + popl %edx + popl %ecx + popl %eax + movl %ebp, %esp + popl %ebp + ret +.globl __sanitizer_sanitize_load2 +__sanitizer_sanitize_load2: + pushl %ebp + movl %esp, %ebp + pushl %eax + pushl %ecx + pushl %edx + pushfl + movl 8(%ebp), %eax + movl %eax, %ecx + shrl $0x3, %ecx + movb 0x20000000(%ecx), %cl + testb %cl, %cl + je .sanitize_load2_done + movl %eax, %edx + andl $0x7, %edx + incl %edx + movsbl %cl, %ecx + cmpl %ecx, %edx + jl .sanitize_load2_done + pushl %eax + call __asan_report_load2 +.sanitize_load2_done: + popfl + popl %edx + popl %ecx + popl %eax + movl %ebp, %esp + popl %ebp + ret +.globl __sanitizer_sanitize_store4 +__sanitizer_sanitize_store4: + pushl %ebp + movl %esp, %ebp + pushl %eax + pushl %ecx + pushl %edx + pushfl + movl 8(%ebp), %eax + movl %eax, %ecx + shrl $0x3, %ecx + movb 0x20000000(%ecx), %cl + testb %cl, %cl + je .sanitize_store4_done + movl %eax, %edx + andl $0x7, %edx + addl $0x3, %edx + movsbl %cl, %ecx + cmpl %ecx, %edx + jl .sanitize_store4_done + pushl %eax + call __asan_report_store4 +.sanitize_store4_done: + popfl + popl %edx + popl %ecx + popl %eax + movl %ebp, %esp + popl %ebp + ret +.globl __sanitizer_sanitize_load4 +__sanitizer_sanitize_load4: + pushl %ebp + movl %esp, %ebp + pushl %eax + pushl %ecx + pushl %edx + pushfl + movl 8(%ebp), %eax + movl %eax, %ecx + shrl $0x3, %ecx + movb 0x20000000(%ecx), %cl + testb %cl, %cl + je .sanitize_load4_done + movl %eax, %edx + andl $0x7, %edx + addl $0x3, %edx + movsbl %cl, %ecx + cmpl %ecx, %edx + jl .sanitize_load4_done + pushl %eax + call __asan_report_load4 +.sanitize_load4_done: + popfl + popl %edx + popl %ecx + popl %eax + movl %ebp, %esp + popl %ebp + ret +.globl __sanitizer_sanitize_store8 +__sanitizer_sanitize_store8: + pushl %ebp + movl %esp, %ebp + pushl %eax + pushl %ecx + pushfl + movl 8(%ebp), %eax + movl %eax, %ecx + shrl $0x3, %ecx + cmpb $0x0, 0x20000000(%ecx) + je .sanitize_store8_done + pushl %eax + call __asan_report_store8 +.sanitize_store8_done: + popfl + popl %ecx + popl %eax + movl %ebp, %esp + popl %ebp + ret +.globl __sanitizer_sanitize_load8 +__sanitizer_sanitize_load8: + pushl %ebp + movl %esp, %ebp + pushl %eax + pushl %ecx + pushfl + movl 8(%ebp), %eax + movl %eax, %ecx + shrl $0x3, %ecx + cmpb $0x0, 0x20000000(%ecx) + je .sanitize_load8_done + pushl %eax + call __asan_report_load8 +.sanitize_load8_done: + popfl + popl %ecx + popl %eax + movl %ebp, %esp + popl %ebp + ret +.globl __sanitizer_sanitize_store16 +__sanitizer_sanitize_store16: + pushl %ebp + movl %esp, %ebp + pushl %eax + pushl %ecx + pushfl + movl 8(%ebp), %eax + movl %eax, %ecx + shrl $0x3, %ecx + cmpw $0x0, 0x20000000(%ecx) + je .sanitize_store16_done + pushl %eax + call __asan_report_store16 +.sanitize_store16_done: + popfl + popl %ecx + popl %eax + movl %ebp, %esp + popl %ebp + ret +.globl __sanitizer_sanitize_load16 +__sanitizer_sanitize_load16: + pushl %ebp + movl %esp, %ebp + pushl %eax + pushl %ecx + pushfl + movl 8(%ebp), %eax + movl %eax, %ecx + shrl $0x3, %ecx + cmpw $0x0, 0x20000000(%ecx) + je .sanitize_load16_done + pushl %eax + call __asan_report_load16 +.sanitize_load16_done: + popfl + popl %ecx + popl %eax + movl %ebp, %esp + popl %ebp + ret +#endif // defined(__i386__) +#if defined(__x86_64__) +.globl __sanitizer_sanitize_store1 +__sanitizer_sanitize_store1: + pushq %rax + pushq %rcx + pushfq + movq %rdi, %rax + shrq $0x3, %rax + movb 0x7fff8000(%rax), %al + test %al, %al + je .sanitize_store1_done + movl %edi, %ecx + andl $0x7, %ecx + movsbl %al, %eax + cmpl %eax, %ecx + jl .sanitize_store1_done + call __asan_report_store1 +.sanitize_store1_done: + popfq + popq %rcx + popq %rax + ret +.globl __sanitizer_sanitize_load1 +__sanitizer_sanitize_load1: + pushq %rax + pushq %rcx + pushfq + movq %rdi, %rax + shrq $0x3, %rax + movb 0x7fff8000(%rax), %al + test %al, %al + je .sanitize_load1_done + movl %edi, %ecx + andl $0x7, %ecx + movsbl %al, %eax + cmpl %eax, %ecx + jl .sanitize_load1_done + call __asan_report_load1 +.sanitize_load1_done: + popfq + popq %rcx + popq %rax + ret +.globl __sanitizer_sanitize_store2 +__sanitizer_sanitize_store2: + pushq %rax + pushq %rcx + pushfq + movq %rdi, %rax + shrq $0x3, %rax + movb 0x7fff8000(%rax), %al + test %al, %al + je .sanitize_store2_done + movl %edi, %ecx + andl $0x7, %ecx + incl %ecx + movsbl %al, %eax + cmpl %eax, %ecx + jl .sanitize_store2_done + call __asan_report_store2 +.sanitize_store2_done: + popfq + popq %rcx + popq %rax + ret +.globl __sanitizer_sanitize_load2 +__sanitizer_sanitize_load2: + pushq %rax + pushq %rcx + pushfq + movq %rdi, %rax + shrq $0x3, %rax + movb 0x7fff8000(%rax), %al + test %al, %al + je .sanitize_load2_done + movl %edi, %ecx + andl $0x7, %ecx + incl %ecx + movsbl %al, %eax + cmpl %eax, %ecx + jl .sanitize_load2_done + call __asan_report_load2 +.sanitize_load2_done: + popfq + popq %rcx + popq %rax + ret +.globl __sanitizer_sanitize_store4 +__sanitizer_sanitize_store4: + pushq %rax + pushq %rcx + pushfq + movq %rdi, %rax + shrq $0x3, %rax + movb 0x7fff8000(%rax), %al + test %al, %al + je .sanitize_store4_done + movl %edi, %ecx + andl $0x7, %ecx + addl $0x3, %ecx + movsbl %al, %eax + cmpl %eax, %ecx + jl .sanitize_store4_done + call __asan_report_store4 +.sanitize_store4_done: + popfq + popq %rcx + popq %rax + ret +.globl __sanitizer_sanitize_load4 +__sanitizer_sanitize_load4: + pushq %rax + pushq %rcx + pushfq + movq %rdi, %rax + shrq $0x3, %rax + movb 0x7fff8000(%rax), %al + test %al, %al + je .sanitize_load4_done + movl %edi, %ecx + andl $0x7, %ecx + addl $0x3, %ecx + movsbl %al, %eax + cmpl %eax, %ecx + jl .sanitize_load4_done + call __asan_report_load4 +.sanitize_load4_done: + popfq + popq %rcx + popq %rax + ret +.globl __sanitizer_sanitize_store8 +__sanitizer_sanitize_store8: + pushq %rax + pushfq + movq %rdi, %rax + shrq $0x3, %rax + cmpb $0x0, 0x7fff8000(%rax) + je .sanitize_store8_done + call __asan_report_store8 +.sanitize_store8_done: + popfq + popq %rax + ret +.globl __sanitizer_sanitize_load8 +__sanitizer_sanitize_load8: + pushq %rax + pushfq + movq %rdi, %rax + shrq $0x3, %rax + cmpb $0x0, 0x7fff8000(%rax) + je .sanitize_load8_done + call __asan_report_load8 +.sanitize_load8_done: + popfq + popq %rax + ret +.globl __sanitizer_sanitize_store16 +__sanitizer_sanitize_store16: + pushq %rax + pushfq + movq %rdi, %rax + shrq $0x3, %rax + cmpw $0x0, 0x7fff8000(%rax) + je .sanitize_store16_done + call __asan_report_store16 +.sanitize_store16_done: + popfq + popq %rax + ret +.globl __sanitizer_sanitize_load16 +__sanitizer_sanitize_load16: + pushq %rax + pushfq + movq %rdi, %rax + shrq $0x3, %rax + cmpw $0x0, 0x7fff8000(%rax) + je .sanitize_load16_done + call __asan_report_load16 +.sanitize_load16_done: + popfq + popq %rax + ret +#endif // defined(__x86_64__) diff --git a/compiler-rt/lib/asan/scripts/gen_asm_instrumentation.sh b/compiler-rt/lib/asan/scripts/gen_asm_instrumentation.sh new file mode 100755 index 00000000000..e423a53d81e --- /dev/null +++ b/compiler-rt/lib/asan/scripts/gen_asm_instrumentation.sh @@ -0,0 +1,215 @@ +#!/bin/bash + +#===- lib/asan/scripts/gen_asm_instrumentation.sh -------------------------===# +# +# The LLVM Compiler Infrastructure +# +# This file is distributed under the University of Illinois Open Source +# License. See LICENSE.TXT for details. +# +# Emit x86 instrumentation functions for asan. +# +#===-----------------------------------------------------------------------===# + + +check() { + test $# -eq 2 || (echo "Incorrent number of arguments: $#" 1>&2 && exit 1) + case "$1" in + store) ;; + load) ;; + *) echo "Incorrect first argument: $1" 1>&2 && exit 1 ;; + esac + case "$2" in + [0-9]*) ;; + *) echo "Incorrect second argument: $2" 1>&2 && exit 1 ;; + esac +} + +fname() { + check $1 $2 + echo "__sanitizer_sanitize_$1$2" +} + +flabel() { + check $1 $2 + echo ".sanitize_$1$2_done" +} + +freport() { + check $1 $2 + echo "__asan_report_$1$2" +} + +cat <<EOF +// This file was generated by $(basename $0). Please, do not edit +// manually. +EOF + +echo ".section .text" + +echo "#if defined(__i386__)" + +# Functions for i386 1-, 2- and 4-byte accesses. +for as in 1 2 4 +do + for at in store load + do +cat <<EOF +.globl $(fname $at $as) +$(fname $at $as): + pushl %ebp + movl %esp, %ebp + pushl %eax + pushl %ecx + pushl %edx + pushfl + movl 8(%ebp), %eax + movl %eax, %ecx + shrl \$0x3, %ecx + movb 0x20000000(%ecx), %cl + testb %cl, %cl + je $(flabel $at $as) + movl %eax, %edx + andl \$0x7, %edx +EOF + + case $as in + 1) ;; + 2) echo ' incl %edx' ;; + 4) echo ' addl $0x3, %edx' ;; + *) echo "Incorrect access size: $as" 1>&2; exit 1 ;; + esac + +cat <<EOF + movsbl %cl, %ecx + cmpl %ecx, %edx + jl $(flabel $at $as) + pushl %eax + call $(freport $at $as) +$(flabel $at $as): + popfl + popl %edx + popl %ecx + popl %eax + movl %ebp, %esp + popl %ebp + ret +EOF + done +done + +# Functions for i386 8- and 16-byte accesses. +for as in 8 16 +do + for at in store load + do +cat <<EOF +.globl $(fname $at $as) +$(fname $at $as): + pushl %ebp + movl %esp, %ebp + pushl %eax + pushl %ecx + pushfl + movl 8(%ebp), %eax + movl %eax, %ecx + shrl \$0x3, %ecx +EOF + + case ${as} in + 8) echo ' cmpb $0x0, 0x20000000(%ecx)' ;; + 16) echo ' cmpw $0x0, 0x20000000(%ecx)' ;; + *) echo "Incorrect access size: ${as}" 1>&2; exit 1 ;; + esac + +cat <<EOF + je $(flabel $at $as) + pushl %eax + call $(freport $at $as) +$(flabel $at $as): + popfl + popl %ecx + popl %eax + movl %ebp, %esp + popl %ebp + ret +EOF + done +done + +echo "#endif // defined(__i386__)" + +echo "#if defined(__x86_64__)" + +# Functions for x86-64 1-, 2- and 4-byte accesses. +for as in 1 2 4 +do + for at in store load + do +cat <<EOF +.globl $(fname $at $as) +$(fname $at $as): + pushq %rax + pushq %rcx + pushfq + movq %rdi, %rax + shrq \$0x3, %rax + movb 0x7fff8000(%rax), %al + test %al, %al + je $(flabel $at $as) + movl %edi, %ecx + andl \$0x7, %ecx +EOF + + case ${as} in + 1) ;; + 2) echo ' incl %ecx' ;; + 4) echo ' addl $0x3, %ecx' ;; + *) echo "Incorrect access size: ${as}" 1>&2; exit 1 ;; + esac + +cat <<EOF + movsbl %al, %eax + cmpl %eax, %ecx + jl $(flabel $at $as) + call $(freport $at $as) +$(flabel $at $as): + popfq + popq %rcx + popq %rax + ret +EOF + done +done + +# Functions for x86-64 8- and 16-byte accesses. +for as in 8 16 +do + for at in store load + do +cat <<EOF +.globl $(fname $at $as) +$(fname $at $as): + pushq %rax + pushfq + movq %rdi, %rax + shrq \$0x3, %rax +EOF + + case ${as} in + 8) echo ' cmpb $0x0, 0x7fff8000(%rax)' ;; + 16) echo ' cmpw $0x0, 0x7fff8000(%rax)' ;; + *) echo "Incorrect access size: ${as}" 1>&2; exit 1 ;; + esac + +cat <<EOF + je $(flabel $at $as) + call $(freport $at $as) +$(flabel $at $as): + popfq + popq %rax + ret +EOF + done +done +echo "#endif // defined(__x86_64__)" diff --git a/compiler-rt/lib/asan/tests/asan_test.cc b/compiler-rt/lib/asan/tests/asan_test.cc index af7c471883b..587d85bea74 100644 --- a/compiler-rt/lib/asan/tests/asan_test.cc +++ b/compiler-rt/lib/asan/tests/asan_test.cc @@ -1244,3 +1244,202 @@ TEST(AddressSanitizer, pthread_getschedparam) { int res = pthread_getschedparam(pthread_self(), &policy, ¶m); ASSERT_EQ(0, res); } + +// Tests for __sanitizer_sanitize_(store|load)N functions in compiler-rt. + +#if defined(__x86_64__) || (defined(__i386__) && defined(__SSE2__)) + +#include <emmintrin.h> + +namespace { + +template<typename T> void asm_write(T *ptr, T val); +template<typename T> T asm_read(T *ptr); + +} // End of anonymous namespace + +#endif // defined(__x86_64__) || (defined(__i386__) && defined(__SSE2__)) + +#if defined(__x86_64__) + +namespace { + +#define DECLARE_ASM_WRITE(Type, Size, Mov, Reg) \ +template<> void asm_write<Type>(Type *ptr, Type val) { \ + __asm__( \ + "leaq (%[ptr]), %%rdi \n\t" \ + "call __sanitizer_sanitize_store" Size " \n\t" \ + Mov " %[val], (%[ptr]) \n\t" \ + : \ + : [ptr] "r" (ptr), [val] Reg (val) \ + : "memory", "rdi" \ + ); \ +} + +#define DECLARE_ASM_READ(Type, Size, Mov, Reg) \ +template<> Type asm_read<Type>(Type *ptr) { \ + Type res; \ + __asm__( \ + "leaq (%[ptr]), %%rdi \n\t" \ + "call __sanitizer_sanitize_load" Size " \n\t" \ + Mov " (%[ptr]), %[res] \n\t" \ + : [res] Reg (res) \ + : [ptr] "r" (ptr) \ + : "memory", "rdi" \ + ); \ + return res; \ +} + +DECLARE_ASM_WRITE(U8, "8", "movq", "r"); +DECLARE_ASM_READ(U8, "8", "movq", "=r"); + +} // End of anonymous namespace + +#endif // defined(__x86_64__) + +#if defined(__i386__) && defined(__SSE2__) + +namespace { + +#define DECLARE_ASM_WRITE(Type, Size, Mov, Reg) \ +template<> void asm_write<Type>(Type *ptr, Type val) { \ + __asm__( \ + "leal (%[ptr]), %%eax \n\t" \ + "pushl %%eax \n\t" \ + "call __sanitizer_sanitize_store" Size " \n\t" \ + "popl %%eax \n\t" \ + Mov " %[val], (%[ptr]) \n\t" \ + : \ + : [ptr] "r" (ptr), [val] Reg (val) \ + : "memory", "eax", "esp" \ + ); \ +} + +#define DECLARE_ASM_READ(Type, Size, Mov, Reg) \ +template<> Type asm_read<Type>(Type *ptr) { \ + Type res; \ + __asm__( \ + "leal (%[ptr]), %%eax \n\t" \ + "pushl %%eax \n\t" \ + "call __sanitizer_sanitize_load" Size " \n\t" \ + "popl %%eax \n\t" \ + Mov " (%[ptr]), %[res] \n\t" \ + : [res] Reg (res) \ + : [ptr] "r" (ptr) \ + : "memory", "eax", "esp" \ + ); \ + return res; \ +} + +template<> void asm_write<U8>(U8 *ptr, U8 val) { + __asm__( + "leal (%[ptr]), %%eax \n\t" + "pushl %%eax \n\t" + "call __sanitizer_sanitize_store8 \n\t" + "popl %%eax \n\t" + "movl (%[val]), %%eax \n\t" + "movl %%eax, (%[ptr]) \n\t" + "movl 0x4(%[val]), %%eax \n\t" + "movl %%eax, 0x4(%[ptr]) \n\t" + : + : [ptr] "r" (ptr), [val] "r" (&val) + : "memory", "eax", "esp" + ); +} + +template<> U8 asm_read(U8 *ptr) { + U8 res; + __asm__( + "leal (%[ptr]), %%eax \n\t" + "pushl %%eax \n\t" + "call __sanitizer_sanitize_load8 \n\t" + "popl %%eax \n\t" + "movl (%[ptr]), %%eax \n\t" + "movl %%eax, (%[res]) \n\t" + "movl 0x4(%[ptr]), %%eax \n\t" + "movl %%eax, 0x4(%[res]) \n\t" + : + : [ptr] "r" (ptr), [res] "r" (&res) + : "memory", "eax", "esp" + ); + return res; +} + +} // End of anonymous namespace + +#endif // defined(__i386__) && defined(__SSE2__) + +#if defined(__x86_64__) || (defined(__i386__) && defined(__SSE2__)) + +namespace { + +DECLARE_ASM_WRITE(U1, "1", "movb", "r"); +DECLARE_ASM_WRITE(U2, "2", "movw", "r"); +DECLARE_ASM_WRITE(U4, "4", "movl", "r"); +DECLARE_ASM_WRITE(__m128i, "16", "movaps", "x"); + +DECLARE_ASM_READ(U1, "1", "movb", "=r"); +DECLARE_ASM_READ(U2, "2", "movw", "=r"); +DECLARE_ASM_READ(U4, "4", "movl", "=r"); +DECLARE_ASM_READ(__m128i, "16", "movaps", "=x"); + +template<typename T> void TestAsmWrite(const char *DeathPattern) { + T *buf = new T; + EXPECT_DEATH(asm_write(&buf[1], static_cast<T>(0)), DeathPattern); + T var = 0x12; + asm_write(&var, static_cast<T>(0x21)); + ASSERT_EQ(static_cast<T>(0x21), var); + delete buf; +} + +template<> void TestAsmWrite<__m128i>(const char *DeathPattern) { + char *buf = new char[16]; + char *p = buf + 16; + if (((uintptr_t) p % 16) != 0) + p = buf + 8; + assert(((uintptr_t) p % 16) == 0); + __m128i val = _mm_set1_epi16(0x1234); + EXPECT_DEATH(asm_write<__m128i>((__m128i*) p, val), DeathPattern); + __m128i var = _mm_set1_epi16(0x4321); + asm_write(&var, val); + ASSERT_EQ(0x1234, _mm_extract_epi16(var, 0)); + delete [] buf; +} + +template<typename T> void TestAsmRead(const char *DeathPattern) { + T *buf = new T; + EXPECT_DEATH(asm_read(&buf[1]), DeathPattern); + T var = 0x12; + ASSERT_EQ(static_cast<T>(0x12), asm_read(&var)); + delete buf; +} + +template<> void TestAsmRead<__m128i>(const char *DeathPattern) { + char *buf = new char[16]; + char *p = buf + 16; + if (((uintptr_t) p % 16) != 0) + p = buf + 8; + assert(((uintptr_t) p % 16) == 0); + EXPECT_DEATH(asm_read<__m128i>((__m128i*) p), DeathPattern); + __m128i val = _mm_set1_epi16(0x1234); + ASSERT_EQ(0x1234, _mm_extract_epi16(asm_read(&val), 0)); + delete [] buf; +} + +} // End of anonymous namespace + +TEST(AddressSanitizer, asm_rw) { + TestAsmWrite<U1>("WRITE of size 1"); + TestAsmWrite<U2>("WRITE of size 2"); + TestAsmWrite<U4>("WRITE of size 4"); + TestAsmWrite<U8>("WRITE of size 8"); + TestAsmWrite<__m128i>("WRITE of size 16"); + + TestAsmRead<U1>("READ of size 1"); + TestAsmRead<U2>("READ of size 2"); + TestAsmRead<U4>("READ of size 4"); + TestAsmRead<U8>("READ of size 8"); + TestAsmRead<__m128i>("READ of size 16"); +} + +#endif // defined(__x86_64__) || (defined(__i386__) && defined(__SSE2__)) |