[AMDGPU] Change constant addr space to 4

Differential Revision: https://reviews.llvm.org/D43170

llvm-svn: 325030
diff --git a/llvm/test/CodeGen/AMDGPU/load-constant-i64.ll b/llvm/test/CodeGen/AMDGPU/load-constant-i64.ll
index 14e50ea..6a49353 100644
--- a/llvm/test/CodeGen/AMDGPU/load-constant-i64.ll
+++ b/llvm/test/CodeGen/AMDGPU/load-constant-i64.ll
@@ -7,8 +7,8 @@
 ; FUNC-LABEL: {{^}}constant_load_i64:
 ; GCN: s_load_dwordx2 {{s\[[0-9]+:[0-9]+\]}}, {{s\[[0-9]+:[0-9]+\]}}, 0x0{{$}}
 ; EG: VTX_READ_64
-define amdgpu_kernel void @constant_load_i64(i64 addrspace(1)* %out, i64 addrspace(2)* %in) #0 {
-  %ld = load i64, i64 addrspace(2)* %in
+define amdgpu_kernel void @constant_load_i64(i64 addrspace(1)* %out, i64 addrspace(4)* %in) #0 {
+  %ld = load i64, i64 addrspace(4)* %in
   store i64 %ld, i64 addrspace(1)* %out
   ret void
 }
@@ -17,9 +17,9 @@
 ; GCN: s_load_dwordx4
 
 ; EG: VTX_READ_128
-define amdgpu_kernel void @constant_load_v2i64(<2 x i64> addrspace(1)* %out, <2 x i64> addrspace(2)* %in) #0 {
+define amdgpu_kernel void @constant_load_v2i64(<2 x i64> addrspace(1)* %out, <2 x i64> addrspace(4)* %in) #0 {
 entry:
-  %ld = load <2 x i64>, <2 x i64> addrspace(2)* %in
+  %ld = load <2 x i64>, <2 x i64> addrspace(4)* %in
   store <2 x i64> %ld, <2 x i64> addrspace(1)* %out
   ret void
 }
@@ -29,9 +29,9 @@
 
 ; EG-DAG: VTX_READ_128
 ; EG-DAG: VTX_READ_128
-define amdgpu_kernel void @constant_load_v3i64(<3 x i64> addrspace(1)* %out, <3 x i64> addrspace(2)* %in) #0 {
+define amdgpu_kernel void @constant_load_v3i64(<3 x i64> addrspace(1)* %out, <3 x i64> addrspace(4)* %in) #0 {
 entry:
-  %ld = load <3 x i64>, <3 x i64> addrspace(2)* %in
+  %ld = load <3 x i64>, <3 x i64> addrspace(4)* %in
   store <3 x i64> %ld, <3 x i64> addrspace(1)* %out
   ret void
 }
@@ -41,9 +41,9 @@
 
 ; EG: VTX_READ_128
 ; EG: VTX_READ_128
-define amdgpu_kernel void @constant_load_v4i64(<4 x i64> addrspace(1)* %out, <4 x i64> addrspace(2)* %in) #0 {
+define amdgpu_kernel void @constant_load_v4i64(<4 x i64> addrspace(1)* %out, <4 x i64> addrspace(4)* %in) #0 {
 entry:
-  %ld = load <4 x i64>, <4 x i64> addrspace(2)* %in
+  %ld = load <4 x i64>, <4 x i64> addrspace(4)* %in
   store <4 x i64> %ld, <4 x i64> addrspace(1)* %out
   ret void
 }
@@ -55,9 +55,9 @@
 ; EG: VTX_READ_128
 ; EG: VTX_READ_128
 ; EG: VTX_READ_128
-define amdgpu_kernel void @constant_load_v8i64(<8 x i64> addrspace(1)* %out, <8 x i64> addrspace(2)* %in) #0 {
+define amdgpu_kernel void @constant_load_v8i64(<8 x i64> addrspace(1)* %out, <8 x i64> addrspace(4)* %in) #0 {
 entry:
-  %ld = load <8 x i64>, <8 x i64> addrspace(2)* %in
+  %ld = load <8 x i64>, <8 x i64> addrspace(4)* %in
   store <8 x i64> %ld, <8 x i64> addrspace(1)* %out
   ret void
 }
@@ -74,9 +74,9 @@
 ; EG: VTX_READ_128
 ; EG: VTX_READ_128
 ; EG: VTX_READ_128
-define amdgpu_kernel void @constant_load_v16i64(<16 x i64> addrspace(1)* %out, <16 x i64> addrspace(2)* %in) #0 {
+define amdgpu_kernel void @constant_load_v16i64(<16 x i64> addrspace(1)* %out, <16 x i64> addrspace(4)* %in) #0 {
 entry:
-  %ld = load <16 x i64>, <16 x i64> addrspace(2)* %in
+  %ld = load <16 x i64>, <16 x i64> addrspace(4)* %in
   store <16 x i64> %ld, <16 x i64> addrspace(1)* %out
   ret void
 }