| Tim Northover | 3b0846e | 2014-05-24 12:50:23 +0000 | [diff] [blame] | 1 | ; RUN: llc -mtriple=aarch64-linux-gnu -verify-machineinstrs -o - %s | FileCheck %s |
| Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 2 | |
| 3 | @var32 = global [3 x i32] zeroinitializer |
| 4 | @var64 = global [3 x i64] zeroinitializer |
| 5 | @var32_align64 = global [3 x i32] zeroinitializer, align 8 |
| David Blaikie | 2f40830 | 2015-09-11 03:22:04 +0000 | [diff] [blame] | 6 | @alias = alias [3 x i32], [3 x i32]* @var32_align64 |
| Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 7 | |
| 8 | define i64 @test_align32() { |
| Stephen Lin | f799e3f | 2013-07-13 20:38:47 +0000 | [diff] [blame] | 9 | ; CHECK-LABEL: test_align32: |
| Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 10 | %addr = bitcast [3 x i32]* @var32 to i64* |
| 11 | |
| 12 | ; Since @var32 is only guaranteed to be aligned to 32-bits, it's invalid to |
| 13 | ; emit an "LDR x0, [x0, #:lo12:var32] instruction to implement this load. |
| David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 14 | %val = load i64, i64* %addr |
| Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 15 | ; CHECK: adrp [[HIBITS:x[0-9]+]], var32 |
| Tim Northover | bd66887 | 2014-04-15 14:00:29 +0000 | [diff] [blame] | 16 | ; CHECK: add x[[ADDR:[0-9]+]], [[HIBITS]], {{#?}}:lo12:var32 |
| Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 17 | ; CHECK: ldr x0, [x[[ADDR]]] |
| 18 | |
| 19 | ret i64 %val |
| 20 | } |
| 21 | |
| 22 | define i64 @test_align64() { |
| Stephen Lin | f799e3f | 2013-07-13 20:38:47 +0000 | [diff] [blame] | 23 | ; CHECK-LABEL: test_align64: |
| Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 24 | %addr = bitcast [3 x i64]* @var64 to i64* |
| 25 | |
| 26 | ; However, var64 *is* properly aligned and emitting an adrp/add/ldr would be |
| 27 | ; inefficient. |
| David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 28 | %val = load i64, i64* %addr |
| Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 29 | ; CHECK: adrp x[[HIBITS:[0-9]+]], var64 |
| 30 | ; CHECK-NOT: add x[[HIBITS]] |
| Tim Northover | bd66887 | 2014-04-15 14:00:29 +0000 | [diff] [blame] | 31 | ; CHECK: ldr x0, [x[[HIBITS]], {{#?}}:lo12:var64] |
| Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 32 | |
| 33 | ret i64 %val |
| 34 | } |
| 35 | |
| 36 | define i64 @test_var32_align64() { |
| Stephen Lin | f799e3f | 2013-07-13 20:38:47 +0000 | [diff] [blame] | 37 | ; CHECK-LABEL: test_var32_align64: |
| Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 38 | %addr = bitcast [3 x i32]* @var32_align64 to i64* |
| 39 | |
| 40 | ; Since @var32 is only guaranteed to be aligned to 32-bits, it's invalid to |
| 41 | ; emit an "LDR x0, [x0, #:lo12:var32] instruction to implement this load. |
| David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 42 | %val = load i64, i64* %addr |
| Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 43 | ; CHECK: adrp x[[HIBITS:[0-9]+]], var32_align64 |
| 44 | ; CHECK-NOT: add x[[HIBITS]] |
| Tim Northover | bd66887 | 2014-04-15 14:00:29 +0000 | [diff] [blame] | 45 | ; CHECK: ldr x0, [x[[HIBITS]], {{#?}}:lo12:var32_align64] |
| Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 46 | |
| 47 | ret i64 %val |
| 48 | } |
| 49 | |
| Rafael Espindola | 52dc5d8 | 2014-05-06 16:48:58 +0000 | [diff] [blame] | 50 | define i64 @test_var32_alias() { |
| 51 | ; CHECK-LABEL: test_var32_alias: |
| 52 | %addr = bitcast [3 x i32]* @alias to i64* |
| 53 | |
| 54 | ; Test that we can find the alignment for aliases. |
| David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 55 | %val = load i64, i64* %addr |
| Rafael Espindola | 52dc5d8 | 2014-05-06 16:48:58 +0000 | [diff] [blame] | 56 | ; CHECK: adrp x[[HIBITS:[0-9]+]], alias |
| 57 | ; CHECK-NOT: add x[[HIBITS]] |
| 58 | ; CHECK: ldr x0, [x[[HIBITS]], {{#?}}:lo12:alias] |
| 59 | |
| 60 | ret i64 %val |
| 61 | } |
| 62 | |
| Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 63 | @yet_another_var = external global {i32, i32} |
| 64 | |
| 65 | define i64 @test_yet_another_var() { |
| Stephen Lin | f799e3f | 2013-07-13 20:38:47 +0000 | [diff] [blame] | 66 | ; CHECK-LABEL: test_yet_another_var: |
| Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 67 | |
| 68 | ; @yet_another_var has a preferred alignment of 8, but that's not enough if |
| 69 | ; we're going to be linking against other things. Its ABI alignment is only 4 |
| 70 | ; so we can't fold the load. |
| David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 71 | %val = load i64, i64* bitcast({i32, i32}* @yet_another_var to i64*) |
| Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 72 | ; CHECK: adrp [[HIBITS:x[0-9]+]], yet_another_var |
| Tim Northover | bd66887 | 2014-04-15 14:00:29 +0000 | [diff] [blame] | 73 | ; CHECK: add x[[ADDR:[0-9]+]], [[HIBITS]], {{#?}}:lo12:yet_another_var |
| Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 74 | ; CHECK: ldr x0, [x[[ADDR]]] |
| 75 | ret i64 %val |
| 76 | } |
| 77 | |
| 78 | define i64()* @test_functions() { |
| Stephen Lin | f799e3f | 2013-07-13 20:38:47 +0000 | [diff] [blame] | 79 | ; CHECK-LABEL: test_functions: |
| Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 80 | ret i64()* @test_yet_another_var |
| 81 | ; CHECK: adrp [[HIBITS:x[0-9]+]], test_yet_another_var |
| Tim Northover | bd66887 | 2014-04-15 14:00:29 +0000 | [diff] [blame] | 82 | ; CHECK: add x0, [[HIBITS]], {{#?}}:lo12:test_yet_another_var |
| Tim Northover | e0e3aef | 2013-01-31 12:12:40 +0000 | [diff] [blame] | 83 | } |