blob: 78ea962edcbee4c974481520de516e751d78a61d [file] [log] [blame]
David S. Millerae2c6ca2012-09-26 21:11:01 -07001/* NG4memcpy.S: Niagara-4 optimized memcpy.
2 *
3 * Copyright (C) 2012 David S. Miller (davem@davemloft.net)
4 */
5
6#ifdef __KERNEL__
David S. Miller957077042016-10-24 18:58:05 -07007#include <linux/linkage.h>
David S. Millerae2c6ca2012-09-26 21:11:01 -07008#include <asm/visasm.h>
9#include <asm/asi.h>
10#define GLOBAL_SPARE %g7
11#else
12#define ASI_BLK_INIT_QUAD_LDD_P 0xe2
13#define FPRS_FEF 0x04
14
15/* On T4 it is very expensive to access ASRs like %fprs and
16 * %asi, avoiding a read or a write can save ~50 cycles.
17 */
18#define FPU_ENTER \
19 rd %fprs, %o5; \
20 andcc %o5, FPRS_FEF, %g0; \
21 be,a,pn %icc, 999f; \
22 wr %g0, FPRS_FEF, %fprs; \
23 999:
24
25#ifdef MEMCPY_DEBUG
26#define VISEntryHalf FPU_ENTER; \
27 clr %g1; clr %g2; clr %g3; clr %g5; subcc %g0, %g0, %g0;
28#define VISExitHalf and %o5, FPRS_FEF, %o5; wr %o5, 0x0, %fprs
29#else
30#define VISEntryHalf FPU_ENTER
31#define VISExitHalf and %o5, FPRS_FEF, %o5; wr %o5, 0x0, %fprs
32#endif
33
34#define GLOBAL_SPARE %g5
35#endif
36
37#ifndef STORE_ASI
38#ifndef SIMULATE_NIAGARA_ON_NON_NIAGARA
39#define STORE_ASI ASI_BLK_INIT_QUAD_LDD_P
40#else
41#define STORE_ASI 0x80 /* ASI_P */
42#endif
43#endif
44
David S. Millerf4da3622014-10-14 19:37:58 -070045#if !defined(EX_LD) && !defined(EX_ST)
46#define NON_USER_COPY
47#endif
48
David S. Millerae2c6ca2012-09-26 21:11:01 -070049#ifndef EX_LD
David S. Miller957077042016-10-24 18:58:05 -070050#define EX_LD(x,y) x
David S. Millerae2c6ca2012-09-26 21:11:01 -070051#endif
Rob Gardnera7c57242015-12-22 23:24:49 -070052#ifndef EX_LD_FP
David S. Miller957077042016-10-24 18:58:05 -070053#define EX_LD_FP(x,y) x
Rob Gardnera7c57242015-12-22 23:24:49 -070054#endif
David S. Millerae2c6ca2012-09-26 21:11:01 -070055
56#ifndef EX_ST
David S. Miller957077042016-10-24 18:58:05 -070057#define EX_ST(x,y) x
David S. Millerae2c6ca2012-09-26 21:11:01 -070058#endif
Rob Gardnera7c57242015-12-22 23:24:49 -070059#ifndef EX_ST_FP
David S. Miller957077042016-10-24 18:58:05 -070060#define EX_ST_FP(x,y) x
Rob Gardnera7c57242015-12-22 23:24:49 -070061#endif
David S. Millerae2c6ca2012-09-26 21:11:01 -070062
David S. Millerae2c6ca2012-09-26 21:11:01 -070063
64#ifndef LOAD
65#define LOAD(type,addr,dest) type [addr], dest
66#endif
67
68#ifndef STORE
69#ifndef MEMCPY_DEBUG
70#define STORE(type,src,addr) type src, [addr]
71#else
72#define STORE(type,src,addr) type##a src, [addr] %asi
73#endif
74#endif
75
76#ifndef STORE_INIT
77#define STORE_INIT(src,addr) stxa src, [addr] STORE_ASI
78#endif
79
80#ifndef FUNC_NAME
81#define FUNC_NAME NG4memcpy
82#endif
83#ifndef PREAMBLE
84#define PREAMBLE
85#endif
86
87#ifndef XCC
88#define XCC xcc
89#endif
90
91 .register %g2,#scratch
92 .register %g3,#scratch
93
94 .text
David S. Miller957077042016-10-24 18:58:05 -070095#ifndef EX_RETVAL
96#define EX_RETVAL(x) x
97__restore_asi_fp:
98 VISExitHalf
99__restore_asi:
100 retl
101 wr %g0, ASI_AIUS, %asi
102
103ENTRY(NG4_retl_o2)
104 ba,pt %xcc, __restore_asi
105 mov %o2, %o0
106ENDPROC(NG4_retl_o2)
107ENTRY(NG4_retl_o2_plus_1)
108 ba,pt %xcc, __restore_asi
109 add %o2, 1, %o0
110ENDPROC(NG4_retl_o2_plus_1)
111ENTRY(NG4_retl_o2_plus_4)
112 ba,pt %xcc, __restore_asi
113 add %o2, 4, %o0
114ENDPROC(NG4_retl_o2_plus_4)
115ENTRY(NG4_retl_o2_plus_o5)
116 ba,pt %xcc, __restore_asi
117 add %o2, %o5, %o0
118ENDPROC(NG4_retl_o2_plus_o5)
119ENTRY(NG4_retl_o2_plus_o5_plus_4)
120 add %o5, 4, %o5
121 ba,pt %xcc, __restore_asi
122 add %o2, %o5, %o0
123ENDPROC(NG4_retl_o2_plus_o5_plus_4)
124ENTRY(NG4_retl_o2_plus_o5_plus_8)
125 add %o5, 8, %o5
126 ba,pt %xcc, __restore_asi
127 add %o2, %o5, %o0
128ENDPROC(NG4_retl_o2_plus_o5_plus_8)
129ENTRY(NG4_retl_o2_plus_o5_plus_16)
130 add %o5, 16, %o5
131 ba,pt %xcc, __restore_asi
132 add %o2, %o5, %o0
133ENDPROC(NG4_retl_o2_plus_o5_plus_16)
134ENTRY(NG4_retl_o2_plus_o5_plus_24)
135 add %o5, 24, %o5
136 ba,pt %xcc, __restore_asi
137 add %o2, %o5, %o0
138ENDPROC(NG4_retl_o2_plus_o5_plus_24)
139ENTRY(NG4_retl_o2_plus_o5_plus_32)
140 add %o5, 32, %o5
141 ba,pt %xcc, __restore_asi
142 add %o2, %o5, %o0
143ENDPROC(NG4_retl_o2_plus_o5_plus_32)
144ENTRY(NG4_retl_o2_plus_g1)
145 ba,pt %xcc, __restore_asi
146 add %o2, %g1, %o0
147ENDPROC(NG4_retl_o2_plus_g1)
148ENTRY(NG4_retl_o2_plus_g1_plus_1)
149 add %g1, 1, %g1
150 ba,pt %xcc, __restore_asi
151 add %o2, %g1, %o0
152ENDPROC(NG4_retl_o2_plus_g1_plus_1)
153ENTRY(NG4_retl_o2_plus_g1_plus_8)
154 add %g1, 8, %g1
155 ba,pt %xcc, __restore_asi
156 add %o2, %g1, %o0
157ENDPROC(NG4_retl_o2_plus_g1_plus_8)
158ENTRY(NG4_retl_o2_plus_o4)
159 ba,pt %xcc, __restore_asi
160 add %o2, %o4, %o0
161ENDPROC(NG4_retl_o2_plus_o4)
162ENTRY(NG4_retl_o2_plus_o4_plus_8)
163 add %o4, 8, %o4
164 ba,pt %xcc, __restore_asi
165 add %o2, %o4, %o0
166ENDPROC(NG4_retl_o2_plus_o4_plus_8)
167ENTRY(NG4_retl_o2_plus_o4_plus_16)
168 add %o4, 16, %o4
169 ba,pt %xcc, __restore_asi
170 add %o2, %o4, %o0
171ENDPROC(NG4_retl_o2_plus_o4_plus_16)
172ENTRY(NG4_retl_o2_plus_o4_plus_24)
173 add %o4, 24, %o4
174 ba,pt %xcc, __restore_asi
175 add %o2, %o4, %o0
176ENDPROC(NG4_retl_o2_plus_o4_plus_24)
177ENTRY(NG4_retl_o2_plus_o4_plus_32)
178 add %o4, 32, %o4
179 ba,pt %xcc, __restore_asi
180 add %o2, %o4, %o0
181ENDPROC(NG4_retl_o2_plus_o4_plus_32)
182ENTRY(NG4_retl_o2_plus_o4_plus_40)
183 add %o4, 40, %o4
184 ba,pt %xcc, __restore_asi
185 add %o2, %o4, %o0
186ENDPROC(NG4_retl_o2_plus_o4_plus_40)
187ENTRY(NG4_retl_o2_plus_o4_plus_48)
188 add %o4, 48, %o4
189 ba,pt %xcc, __restore_asi
190 add %o2, %o4, %o0
191ENDPROC(NG4_retl_o2_plus_o4_plus_48)
192ENTRY(NG4_retl_o2_plus_o4_plus_56)
193 add %o4, 56, %o4
194 ba,pt %xcc, __restore_asi
195 add %o2, %o4, %o0
196ENDPROC(NG4_retl_o2_plus_o4_plus_56)
197ENTRY(NG4_retl_o2_plus_o4_plus_64)
198 add %o4, 64, %o4
199 ba,pt %xcc, __restore_asi
200 add %o2, %o4, %o0
201ENDPROC(NG4_retl_o2_plus_o4_plus_64)
202ENTRY(NG4_retl_o2_plus_o4_fp)
203 ba,pt %xcc, __restore_asi_fp
204 add %o2, %o4, %o0
205ENDPROC(NG4_retl_o2_plus_o4_fp)
206ENTRY(NG4_retl_o2_plus_o4_plus_8_fp)
207 add %o4, 8, %o4
208 ba,pt %xcc, __restore_asi_fp
209 add %o2, %o4, %o0
210ENDPROC(NG4_retl_o2_plus_o4_plus_8_fp)
211ENTRY(NG4_retl_o2_plus_o4_plus_16_fp)
212 add %o4, 16, %o4
213 ba,pt %xcc, __restore_asi_fp
214 add %o2, %o4, %o0
215ENDPROC(NG4_retl_o2_plus_o4_plus_16_fp)
216ENTRY(NG4_retl_o2_plus_o4_plus_24_fp)
217 add %o4, 24, %o4
218 ba,pt %xcc, __restore_asi_fp
219 add %o2, %o4, %o0
220ENDPROC(NG4_retl_o2_plus_o4_plus_24_fp)
221ENTRY(NG4_retl_o2_plus_o4_plus_32_fp)
222 add %o4, 32, %o4
223 ba,pt %xcc, __restore_asi_fp
224 add %o2, %o4, %o0
225ENDPROC(NG4_retl_o2_plus_o4_plus_32_fp)
226ENTRY(NG4_retl_o2_plus_o4_plus_40_fp)
227 add %o4, 40, %o4
228 ba,pt %xcc, __restore_asi_fp
229 add %o2, %o4, %o0
230ENDPROC(NG4_retl_o2_plus_o4_plus_40_fp)
231ENTRY(NG4_retl_o2_plus_o4_plus_48_fp)
232 add %o4, 48, %o4
233 ba,pt %xcc, __restore_asi_fp
234 add %o2, %o4, %o0
235ENDPROC(NG4_retl_o2_plus_o4_plus_48_fp)
236ENTRY(NG4_retl_o2_plus_o4_plus_56_fp)
237 add %o4, 56, %o4
238 ba,pt %xcc, __restore_asi_fp
239 add %o2, %o4, %o0
240ENDPROC(NG4_retl_o2_plus_o4_plus_56_fp)
241ENTRY(NG4_retl_o2_plus_o4_plus_64_fp)
242 add %o4, 64, %o4
243 ba,pt %xcc, __restore_asi_fp
244 add %o2, %o4, %o0
245ENDPROC(NG4_retl_o2_plus_o4_plus_64_fp)
246#endif
David S. Millerae2c6ca2012-09-26 21:11:01 -0700247 .align 64
248
249 .globl FUNC_NAME
250 .type FUNC_NAME,#function
251FUNC_NAME: /* %o0=dst, %o1=src, %o2=len */
252#ifdef MEMCPY_DEBUG
253 wr %g0, 0x80, %asi
254#endif
255 srlx %o2, 31, %g2
256 cmp %g2, 0
257 tne %XCC, 5
258 PREAMBLE
259 mov %o0, %o3
260 brz,pn %o2, .Lexit
261 cmp %o2, 3
262 ble,pn %icc, .Ltiny
263 cmp %o2, 19
264 ble,pn %icc, .Lsmall
265 or %o0, %o1, %g2
266 cmp %o2, 128
267 bl,pn %icc, .Lmedium
268 nop
269
270.Llarge:/* len >= 0x80 */
271 /* First get dest 8 byte aligned. */
272 sub %g0, %o0, %g1
273 and %g1, 0x7, %g1
274 brz,pt %g1, 51f
275 sub %o2, %g1, %o2
David S. Miller42a41722012-09-28 13:08:22 -0700276
David S. Miller957077042016-10-24 18:58:05 -0700277
2781: EX_LD(LOAD(ldub, %o1 + 0x00, %g2), NG4_retl_o2_plus_g1)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700279 add %o1, 1, %o1
280 subcc %g1, 1, %g1
281 add %o0, 1, %o0
282 bne,pt %icc, 1b
David S. Miller957077042016-10-24 18:58:05 -0700283 EX_ST(STORE(stb, %g2, %o0 - 0x01), NG4_retl_o2_plus_g1_plus_1)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700284
28551: LOAD(prefetch, %o1 + 0x040, #n_reads_strong)
286 LOAD(prefetch, %o1 + 0x080, #n_reads_strong)
287 LOAD(prefetch, %o1 + 0x0c0, #n_reads_strong)
288 LOAD(prefetch, %o1 + 0x100, #n_reads_strong)
289 LOAD(prefetch, %o1 + 0x140, #n_reads_strong)
290 LOAD(prefetch, %o1 + 0x180, #n_reads_strong)
291 LOAD(prefetch, %o1 + 0x1c0, #n_reads_strong)
292 LOAD(prefetch, %o1 + 0x200, #n_reads_strong)
293
294 /* Check if we can use the straight fully aligned
295 * loop, or we require the alignaddr/faligndata variant.
296 */
297 andcc %o1, 0x7, %o5
298 bne,pn %icc, .Llarge_src_unaligned
299 sub %g0, %o0, %g1
300
301 /* Legitimize the use of initializing stores by getting dest
302 * to be 64-byte aligned.
303 */
304 and %g1, 0x3f, %g1
305 brz,pt %g1, .Llarge_aligned
306 sub %o2, %g1, %o2
David S. Miller42a41722012-09-28 13:08:22 -0700307
David S. Miller957077042016-10-24 18:58:05 -07003081: EX_LD(LOAD(ldx, %o1 + 0x00, %g2), NG4_retl_o2_plus_g1)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700309 add %o1, 8, %o1
310 subcc %g1, 8, %g1
311 add %o0, 8, %o0
312 bne,pt %icc, 1b
David S. Miller957077042016-10-24 18:58:05 -0700313 EX_ST(STORE(stx, %g2, %o0 - 0x08), NG4_retl_o2_plus_g1_plus_8)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700314
315.Llarge_aligned:
316 /* len >= 0x80 && src 8-byte aligned && dest 8-byte aligned */
317 andn %o2, 0x3f, %o4
318 sub %o2, %o4, %o2
319
David S. Miller957077042016-10-24 18:58:05 -07003201: EX_LD(LOAD(ldx, %o1 + 0x00, %g1), NG4_retl_o2_plus_o4)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700321 add %o1, 0x40, %o1
David S. Miller957077042016-10-24 18:58:05 -0700322 EX_LD(LOAD(ldx, %o1 - 0x38, %g2), NG4_retl_o2_plus_o4)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700323 subcc %o4, 0x40, %o4
David S. Miller957077042016-10-24 18:58:05 -0700324 EX_LD(LOAD(ldx, %o1 - 0x30, %g3), NG4_retl_o2_plus_o4_plus_64)
325 EX_LD(LOAD(ldx, %o1 - 0x28, GLOBAL_SPARE), NG4_retl_o2_plus_o4_plus_64)
326 EX_LD(LOAD(ldx, %o1 - 0x20, %o5), NG4_retl_o2_plus_o4_plus_64)
327 EX_ST(STORE_INIT(%g1, %o0), NG4_retl_o2_plus_o4_plus_64)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700328 add %o0, 0x08, %o0
David S. Miller957077042016-10-24 18:58:05 -0700329 EX_ST(STORE_INIT(%g2, %o0), NG4_retl_o2_plus_o4_plus_56)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700330 add %o0, 0x08, %o0
David S. Miller957077042016-10-24 18:58:05 -0700331 EX_LD(LOAD(ldx, %o1 - 0x18, %g2), NG4_retl_o2_plus_o4_plus_48)
332 EX_ST(STORE_INIT(%g3, %o0), NG4_retl_o2_plus_o4_plus_48)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700333 add %o0, 0x08, %o0
David S. Miller957077042016-10-24 18:58:05 -0700334 EX_LD(LOAD(ldx, %o1 - 0x10, %g3), NG4_retl_o2_plus_o4_plus_40)
335 EX_ST(STORE_INIT(GLOBAL_SPARE, %o0), NG4_retl_o2_plus_o4_plus_40)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700336 add %o0, 0x08, %o0
David S. Miller957077042016-10-24 18:58:05 -0700337 EX_LD(LOAD(ldx, %o1 - 0x08, GLOBAL_SPARE), NG4_retl_o2_plus_o4_plus_32)
338 EX_ST(STORE_INIT(%o5, %o0), NG4_retl_o2_plus_o4_plus_32)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700339 add %o0, 0x08, %o0
David S. Miller957077042016-10-24 18:58:05 -0700340 EX_ST(STORE_INIT(%g2, %o0), NG4_retl_o2_plus_o4_plus_24)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700341 add %o0, 0x08, %o0
David S. Miller957077042016-10-24 18:58:05 -0700342 EX_ST(STORE_INIT(%g3, %o0), NG4_retl_o2_plus_o4_plus_16)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700343 add %o0, 0x08, %o0
David S. Miller957077042016-10-24 18:58:05 -0700344 EX_ST(STORE_INIT(GLOBAL_SPARE, %o0), NG4_retl_o2_plus_o4_plus_8)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700345 add %o0, 0x08, %o0
346 bne,pt %icc, 1b
347 LOAD(prefetch, %o1 + 0x200, #n_reads_strong)
348
349 membar #StoreLoad | #StoreStore
350
351 brz,pn %o2, .Lexit
352 cmp %o2, 19
353 ble,pn %icc, .Lsmall_unaligned
354 nop
355 ba,a,pt %icc, .Lmedium_noprefetch
356
357.Lexit: retl
358 mov EX_RETVAL(%o3), %o0
359
360.Llarge_src_unaligned:
David S. Millerf4da3622014-10-14 19:37:58 -0700361#ifdef NON_USER_COPY
362 VISEntryHalfFast(.Lmedium_vis_entry_fail)
363#else
364 VISEntryHalf
365#endif
David S. Millerae2c6ca2012-09-26 21:11:01 -0700366 andn %o2, 0x3f, %o4
367 sub %o2, %o4, %o2
David S. Millerae2c6ca2012-09-26 21:11:01 -0700368 alignaddr %o1, %g0, %g1
369 add %o1, %o4, %o1
David S. Miller957077042016-10-24 18:58:05 -0700370 EX_LD_FP(LOAD(ldd, %g1 + 0x00, %f0), NG4_retl_o2_plus_o4)
3711: EX_LD_FP(LOAD(ldd, %g1 + 0x08, %f2), NG4_retl_o2_plus_o4)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700372 subcc %o4, 0x40, %o4
David S. Miller957077042016-10-24 18:58:05 -0700373 EX_LD_FP(LOAD(ldd, %g1 + 0x10, %f4), NG4_retl_o2_plus_o4_plus_64)
374 EX_LD_FP(LOAD(ldd, %g1 + 0x18, %f6), NG4_retl_o2_plus_o4_plus_64)
375 EX_LD_FP(LOAD(ldd, %g1 + 0x20, %f8), NG4_retl_o2_plus_o4_plus_64)
376 EX_LD_FP(LOAD(ldd, %g1 + 0x28, %f10), NG4_retl_o2_plus_o4_plus_64)
377 EX_LD_FP(LOAD(ldd, %g1 + 0x30, %f12), NG4_retl_o2_plus_o4_plus_64)
378 EX_LD_FP(LOAD(ldd, %g1 + 0x38, %f14), NG4_retl_o2_plus_o4_plus_64)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700379 faligndata %f0, %f2, %f16
David S. Miller957077042016-10-24 18:58:05 -0700380 EX_LD_FP(LOAD(ldd, %g1 + 0x40, %f0), NG4_retl_o2_plus_o4_plus_64)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700381 faligndata %f2, %f4, %f18
382 add %g1, 0x40, %g1
383 faligndata %f4, %f6, %f20
384 faligndata %f6, %f8, %f22
385 faligndata %f8, %f10, %f24
386 faligndata %f10, %f12, %f26
387 faligndata %f12, %f14, %f28
388 faligndata %f14, %f0, %f30
David S. Miller957077042016-10-24 18:58:05 -0700389 EX_ST_FP(STORE(std, %f16, %o0 + 0x00), NG4_retl_o2_plus_o4_plus_64)
390 EX_ST_FP(STORE(std, %f18, %o0 + 0x08), NG4_retl_o2_plus_o4_plus_56)
391 EX_ST_FP(STORE(std, %f20, %o0 + 0x10), NG4_retl_o2_plus_o4_plus_48)
392 EX_ST_FP(STORE(std, %f22, %o0 + 0x18), NG4_retl_o2_plus_o4_plus_40)
393 EX_ST_FP(STORE(std, %f24, %o0 + 0x20), NG4_retl_o2_plus_o4_plus_32)
394 EX_ST_FP(STORE(std, %f26, %o0 + 0x28), NG4_retl_o2_plus_o4_plus_24)
395 EX_ST_FP(STORE(std, %f28, %o0 + 0x30), NG4_retl_o2_plus_o4_plus_16)
396 EX_ST_FP(STORE(std, %f30, %o0 + 0x38), NG4_retl_o2_plus_o4_plus_8)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700397 add %o0, 0x40, %o0
398 bne,pt %icc, 1b
399 LOAD(prefetch, %g1 + 0x200, #n_reads_strong)
David S. Miller44922152015-08-06 19:13:25 -0700400#ifdef NON_USER_COPY
401 VISExitHalfFast
402#else
David S. Millerae2c6ca2012-09-26 21:11:01 -0700403 VISExitHalf
David S. Miller44922152015-08-06 19:13:25 -0700404#endif
David S. Millerae2c6ca2012-09-26 21:11:01 -0700405 brz,pn %o2, .Lexit
406 cmp %o2, 19
407 ble,pn %icc, .Lsmall_unaligned
408 nop
409 ba,a,pt %icc, .Lmedium_unaligned
410
David S. Millerf4da3622014-10-14 19:37:58 -0700411#ifdef NON_USER_COPY
412.Lmedium_vis_entry_fail:
413 or %o0, %o1, %g2
414#endif
David S. Millerae2c6ca2012-09-26 21:11:01 -0700415.Lmedium:
416 LOAD(prefetch, %o1 + 0x40, #n_reads_strong)
417 andcc %g2, 0x7, %g0
418 bne,pn %icc, .Lmedium_unaligned
419 nop
420.Lmedium_noprefetch:
421 andncc %o2, 0x20 - 1, %o5
422 be,pn %icc, 2f
423 sub %o2, %o5, %o2
David S. Miller957077042016-10-24 18:58:05 -07004241: EX_LD(LOAD(ldx, %o1 + 0x00, %g1), NG4_retl_o2_plus_o5)
425 EX_LD(LOAD(ldx, %o1 + 0x08, %g2), NG4_retl_o2_plus_o5)
426 EX_LD(LOAD(ldx, %o1 + 0x10, GLOBAL_SPARE), NG4_retl_o2_plus_o5)
427 EX_LD(LOAD(ldx, %o1 + 0x18, %o4), NG4_retl_o2_plus_o5)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700428 add %o1, 0x20, %o1
429 subcc %o5, 0x20, %o5
David S. Miller957077042016-10-24 18:58:05 -0700430 EX_ST(STORE(stx, %g1, %o0 + 0x00), NG4_retl_o2_plus_o5_plus_32)
431 EX_ST(STORE(stx, %g2, %o0 + 0x08), NG4_retl_o2_plus_o5_plus_24)
432 EX_ST(STORE(stx, GLOBAL_SPARE, %o0 + 0x10), NG4_retl_o2_plus_o5_plus_24)
433 EX_ST(STORE(stx, %o4, %o0 + 0x18), NG4_retl_o2_plus_o5_plus_8)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700434 bne,pt %icc, 1b
435 add %o0, 0x20, %o0
4362: andcc %o2, 0x18, %o5
437 be,pt %icc, 3f
438 sub %o2, %o5, %o2
David S. Miller957077042016-10-24 18:58:05 -0700439
4401: EX_LD(LOAD(ldx, %o1 + 0x00, %g1), NG4_retl_o2_plus_o5)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700441 add %o1, 0x08, %o1
442 add %o0, 0x08, %o0
443 subcc %o5, 0x08, %o5
444 bne,pt %icc, 1b
David S. Miller957077042016-10-24 18:58:05 -0700445 EX_ST(STORE(stx, %g1, %o0 - 0x08), NG4_retl_o2_plus_o5_plus_8)
David S. Millerae2c6ca2012-09-26 21:11:01 -07004463: brz,pt %o2, .Lexit
447 cmp %o2, 0x04
448 bl,pn %icc, .Ltiny
449 nop
David S. Miller957077042016-10-24 18:58:05 -0700450 EX_LD(LOAD(lduw, %o1 + 0x00, %g1), NG4_retl_o2)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700451 add %o1, 0x04, %o1
452 add %o0, 0x04, %o0
453 subcc %o2, 0x04, %o2
454 bne,pn %icc, .Ltiny
David S. Miller957077042016-10-24 18:58:05 -0700455 EX_ST(STORE(stw, %g1, %o0 - 0x04), NG4_retl_o2_plus_4)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700456 ba,a,pt %icc, .Lexit
457.Lmedium_unaligned:
458 /* First get dest 8 byte aligned. */
459 sub %g0, %o0, %g1
460 and %g1, 0x7, %g1
461 brz,pt %g1, 2f
462 sub %o2, %g1, %o2
David S. Miller42a41722012-09-28 13:08:22 -0700463
David S. Miller957077042016-10-24 18:58:05 -07004641: EX_LD(LOAD(ldub, %o1 + 0x00, %g2), NG4_retl_o2_plus_g1)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700465 add %o1, 1, %o1
466 subcc %g1, 1, %g1
467 add %o0, 1, %o0
468 bne,pt %icc, 1b
David S. Miller957077042016-10-24 18:58:05 -0700469 EX_ST(STORE(stb, %g2, %o0 - 0x01), NG4_retl_o2_plus_g1_plus_1)
David S. Millerae2c6ca2012-09-26 21:11:01 -07004702:
471 and %o1, 0x7, %g1
472 brz,pn %g1, .Lmedium_noprefetch
473 sll %g1, 3, %g1
474 mov 64, %g2
475 sub %g2, %g1, %g2
476 andn %o1, 0x7, %o1
David S. Miller957077042016-10-24 18:58:05 -0700477 EX_LD(LOAD(ldx, %o1 + 0x00, %o4), NG4_retl_o2)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700478 sllx %o4, %g1, %o4
479 andn %o2, 0x08 - 1, %o5
480 sub %o2, %o5, %o2
David S. Miller957077042016-10-24 18:58:05 -07004811: EX_LD(LOAD(ldx, %o1 + 0x08, %g3), NG4_retl_o2_plus_o5)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700482 add %o1, 0x08, %o1
483 subcc %o5, 0x08, %o5
484 srlx %g3, %g2, GLOBAL_SPARE
485 or GLOBAL_SPARE, %o4, GLOBAL_SPARE
David S. Miller957077042016-10-24 18:58:05 -0700486 EX_ST(STORE(stx, GLOBAL_SPARE, %o0 + 0x00), NG4_retl_o2_plus_o5_plus_8)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700487 add %o0, 0x08, %o0
488 bne,pt %icc, 1b
489 sllx %g3, %g1, %o4
490 srl %g1, 3, %g1
491 add %o1, %g1, %o1
492 brz,pn %o2, .Lexit
493 nop
494 ba,pt %icc, .Lsmall_unaligned
495
496.Ltiny:
David S. Miller957077042016-10-24 18:58:05 -0700497 EX_LD(LOAD(ldub, %o1 + 0x00, %g1), NG4_retl_o2)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700498 subcc %o2, 1, %o2
499 be,pn %icc, .Lexit
David S. Miller957077042016-10-24 18:58:05 -0700500 EX_ST(STORE(stb, %g1, %o0 + 0x00), NG4_retl_o2_plus_1)
501 EX_LD(LOAD(ldub, %o1 + 0x01, %g1), NG4_retl_o2)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700502 subcc %o2, 1, %o2
503 be,pn %icc, .Lexit
David S. Miller957077042016-10-24 18:58:05 -0700504 EX_ST(STORE(stb, %g1, %o0 + 0x01), NG4_retl_o2_plus_1)
505 EX_LD(LOAD(ldub, %o1 + 0x02, %g1), NG4_retl_o2)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700506 ba,pt %icc, .Lexit
David S. Miller957077042016-10-24 18:58:05 -0700507 EX_ST(STORE(stb, %g1, %o0 + 0x02), NG4_retl_o2)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700508
509.Lsmall:
510 andcc %g2, 0x3, %g0
511 bne,pn %icc, .Lsmall_unaligned
512 andn %o2, 0x4 - 1, %o5
513 sub %o2, %o5, %o2
5141:
David S. Miller957077042016-10-24 18:58:05 -0700515 EX_LD(LOAD(lduw, %o1 + 0x00, %g1), NG4_retl_o2_plus_o5)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700516 add %o1, 0x04, %o1
517 subcc %o5, 0x04, %o5
518 add %o0, 0x04, %o0
519 bne,pt %icc, 1b
David S. Miller957077042016-10-24 18:58:05 -0700520 EX_ST(STORE(stw, %g1, %o0 - 0x04), NG4_retl_o2_plus_o5_plus_4)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700521 brz,pt %o2, .Lexit
522 nop
523 ba,a,pt %icc, .Ltiny
524
525.Lsmall_unaligned:
David S. Miller957077042016-10-24 18:58:05 -07005261: EX_LD(LOAD(ldub, %o1 + 0x00, %g1), NG4_retl_o2)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700527 add %o1, 1, %o1
528 add %o0, 1, %o0
529 subcc %o2, 1, %o2
530 bne,pt %icc, 1b
David S. Miller957077042016-10-24 18:58:05 -0700531 EX_ST(STORE(stb, %g1, %o0 - 0x01), NG4_retl_o2_plus_1)
David S. Millerae2c6ca2012-09-26 21:11:01 -0700532 ba,a,pt %icc, .Lexit
Babu Moger0ae2d262017-03-17 14:52:21 -0600533 nop
David S. Millerae2c6ca2012-09-26 21:11:01 -0700534 .size FUNC_NAME, .-FUNC_NAME