|
| 1 | +; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5 |
| 2 | +; RUN: llc --mtriple=x86_64-- < %s | FileCheck --check-prefix=X64 %s |
| 3 | +; RUN: llc --mtriple=i386-- < %s | FileCheck --check-prefix=X86 %s |
| 4 | + |
| 5 | +; This test is to ensure rbp/rbx/ebp/esi is correctly saved/restored before clobbered when enable ipra. |
| 6 | + |
| 7 | +define internal void @callee_clobber_rbp() nounwind norecurse { |
| 8 | +; X64-LABEL: callee_clobber_rbp: |
| 9 | +; X64: # %bb.0: |
| 10 | +; X64-NEXT: pushq %rbp |
| 11 | +; X64-NEXT: #APP |
| 12 | +; X64-NEXT: xorl %ebp, %ebp |
| 13 | +; X64-NEXT: #NO_APP |
| 14 | +; X64-NEXT: popq %rbp |
| 15 | +; X64-NEXT: retq |
| 16 | +; |
| 17 | +; X86-LABEL: callee_clobber_rbp: |
| 18 | +; X86: # %bb.0: |
| 19 | +; X86-NEXT: pushl %ebp |
| 20 | +; X86-NEXT: #APP |
| 21 | +; X86-NEXT: xorl %ebp, %ebp |
| 22 | +; X86-NEXT: #NO_APP |
| 23 | +; X86-NEXT: popl %ebp |
| 24 | +; X86-NEXT: retl |
| 25 | + call void asm sideeffect "xor %ebp, %ebp", "~{ebp}"() |
| 26 | + ret void |
| 27 | +} |
| 28 | + |
| 29 | +define internal void @callee_clobber_rbx(ptr %addr) nounwind norecurse { |
| 30 | +; X64-LABEL: callee_clobber_rbx: |
| 31 | +; X64: # %bb.0: |
| 32 | +; X64-NEXT: pushq %rbx |
| 33 | +; X64-NEXT: #APP |
| 34 | +; X64-NEXT: xorl %ebx, %ebx |
| 35 | +; X64-NEXT: #NO_APP |
| 36 | +; X64-NEXT: popq %rbx |
| 37 | +; X64-NEXT: retq |
| 38 | + call void asm sideeffect "xor %ebx, %ebx", "~{ebx}"() |
| 39 | + ret void |
| 40 | +} |
| 41 | + |
| 42 | +define internal void @callee_clobber_esi(ptr %addr) nounwind norecurse { |
| 43 | +; X86-LABEL: callee_clobber_esi: |
| 44 | +; X86: # %bb.0: |
| 45 | +; X86-NEXT: pushl %esi |
| 46 | +; X86-NEXT: #APP |
| 47 | +; X86-NEXT: xorl %esi, %esi |
| 48 | +; X86-NEXT: #NO_APP |
| 49 | +; X86-NEXT: popl %esi |
| 50 | +; X86-NEXT: retl |
| 51 | + call void asm sideeffect "xor %esi, %esi", "~{esi}"() |
| 52 | + ret void |
| 53 | +} |
| 54 | + |
| 55 | +define void @caller_use_rbp() "frame-pointer"="all" nounwind { |
| 56 | +; X64-LABEL: caller_use_rbp: |
| 57 | +; X64: # %bb.0: |
| 58 | +; X64-NEXT: pushq %rbp |
| 59 | +; X64-NEXT: movq %rsp, %rbp |
| 60 | +; X64-NEXT: subq $16, %rsp |
| 61 | +; X64-NEXT: callq callee_clobber_rbp |
| 62 | +; X64-NEXT: movl $5, -4(%rbp) |
| 63 | +; X64-NEXT: addq $16, %rsp |
| 64 | +; X64-NEXT: popq %rbp |
| 65 | +; X64-NEXT: retq |
| 66 | +; |
| 67 | +; X86-LABEL: caller_use_rbp: |
| 68 | +; X86: # %bb.0: |
| 69 | +; X86-NEXT: pushl %ebp |
| 70 | +; X86-NEXT: movl %esp, %ebp |
| 71 | +; X86-NEXT: pushl %eax |
| 72 | +; X86-NEXT: calll callee_clobber_rbp |
| 73 | +; X86-NEXT: movl $5, -4(%ebp) |
| 74 | +; X86-NEXT: addl $4, %esp |
| 75 | +; X86-NEXT: popl %ebp |
| 76 | +; X86-NEXT: retl |
| 77 | + call void @callee_clobber_rbp() |
| 78 | + %addr = alloca i32, align 4 |
| 79 | + store i32 5, ptr %addr, align 4 |
| 80 | + ret void |
| 81 | +} |
| 82 | + |
| 83 | +define void @caller_use_rbx(i32 %X) nounwind ssp { |
| 84 | +; X64-LABEL: caller_use_rbx: |
| 85 | +; X64: # %bb.0: |
| 86 | +; X64-NEXT: pushq %rbp |
| 87 | +; X64-NEXT: movq %rsp, %rbp |
| 88 | +; X64-NEXT: pushq %rbx |
| 89 | +; X64-NEXT: andq $-32, %rsp |
| 90 | +; X64-NEXT: subq $64, %rsp |
| 91 | +; X64-NEXT: movq %rsp, %rbx |
| 92 | +; X64-NEXT: movq __stack_chk_guard(%rip), %rax |
| 93 | +; X64-NEXT: movq %rax, 32(%rbx) |
| 94 | +; X64-NEXT: movq %rsp, %rax |
| 95 | +; X64-NEXT: movl %edi, %ecx |
| 96 | +; X64-NEXT: leaq 15(,%rcx,4), %rcx |
| 97 | +; X64-NEXT: andq $-16, %rcx |
| 98 | +; X64-NEXT: subq %rcx, %rax |
| 99 | +; X64-NEXT: movq %rax, %rsp |
| 100 | +; X64-NEXT: movq %rbx, %rdi |
| 101 | +; X64-NEXT: callq callee_clobber_rbx |
| 102 | +; X64-NEXT: movq __stack_chk_guard(%rip), %rax |
| 103 | +; X64-NEXT: cmpq 32(%rbx), %rax |
| 104 | +; X64-NEXT: jne .LBB4_2 |
| 105 | +; X64-NEXT: # %bb.1: |
| 106 | +; X64-NEXT: leaq -8(%rbp), %rsp |
| 107 | +; X64-NEXT: popq %rbx |
| 108 | +; X64-NEXT: popq %rbp |
| 109 | +; X64-NEXT: retq |
| 110 | +; X64-NEXT: .LBB4_2: |
| 111 | +; X64-NEXT: callq __stack_chk_fail@PLT |
| 112 | + %realign = alloca i32, align 32 |
| 113 | + %addr = alloca i32, i32 %X |
| 114 | + call void @callee_clobber_rbx(ptr %realign) |
| 115 | + ret void |
| 116 | +} |
| 117 | + |
| 118 | +define void @caller_use_esi(i32 %X) nounwind ssp { |
| 119 | +; X86-LABEL: caller_use_esi: |
| 120 | +; X86: # %bb.0: |
| 121 | +; X86-NEXT: pushl %ebp |
| 122 | +; X86-NEXT: movl %esp, %ebp |
| 123 | +; X86-NEXT: pushl %esi |
| 124 | +; X86-NEXT: andl $-32, %esp |
| 125 | +; X86-NEXT: subl $32, %esp |
| 126 | +; X86-NEXT: movl %esp, %esi |
| 127 | +; X86-NEXT: movl 8(%ebp), %eax |
| 128 | +; X86-NEXT: movl __stack_chk_guard, %ecx |
| 129 | +; X86-NEXT: movl %ecx, 16(%esi) |
| 130 | +; X86-NEXT: movl %esp, %ecx |
| 131 | +; X86-NEXT: shll $2, %eax |
| 132 | +; X86-NEXT: subl %eax, %ecx |
| 133 | +; X86-NEXT: movl %ecx, %esp |
| 134 | +; X86-NEXT: movl %esi, %eax |
| 135 | +; X86-NEXT: pushl %eax |
| 136 | +; X86-NEXT: calll callee_clobber_esi |
| 137 | +; X86-NEXT: addl $4, %esp |
| 138 | +; X86-NEXT: movl __stack_chk_guard, %eax |
| 139 | +; X86-NEXT: cmpl 16(%esi), %eax |
| 140 | +; X86-NEXT: jne .LBB5_2 |
| 141 | +; X86-NEXT: # %bb.1: |
| 142 | +; X86-NEXT: leal -4(%ebp), %esp |
| 143 | +; X86-NEXT: popl %esi |
| 144 | +; X86-NEXT: popl %ebp |
| 145 | +; X86-NEXT: retl |
| 146 | +; X86-NEXT: .LBB5_2: |
| 147 | +; X86-NEXT: calll __stack_chk_fail |
| 148 | + %realign = alloca i32, align 32 |
| 149 | + %addr = alloca i32, i32 %X |
| 150 | + call void @callee_clobber_esi(ptr %realign) |
| 151 | + ret void |
| 152 | +} |
0 commit comments