blob: 33d59c66f1e2361c76a2463c1258b5a8938fa0d4 [file] [log] [blame]
David S. Miller9bf48522012-08-21 03:58:13 -07001#include <linux/linkage.h>
2#include <asm/visasm.h>
3
4#define F3F(x,y,z) (((x)<<30)|((y)<<19)|((z)<<5))
5
6#define FPD_ENCODE(x) (((x) >> 5) | ((x) & ~(0x20)))
7
8#define RS1(x) (FPD_ENCODE(x) << 14)
9#define RS2(x) (FPD_ENCODE(x) << 0)
10#define RS3(x) (FPD_ENCODE(x) << 9)
11#define RD(x) (FPD_ENCODE(x) << 25)
12#define IMM5(x) ((x) << 9)
13
14#define AES_EROUND01(a,b,c,d) \
15 .word (F3F(2, 0x19, 0)|RS1(a)|RS2(b)|RS3(c)|RD(d));
16#define AES_EROUND23(a,b,c,d) \
17 .word (F3F(2, 0x19, 1)|RS1(a)|RS2(b)|RS3(c)|RD(d));
18#define AES_DROUND01(a,b,c,d) \
19 .word (F3F(2, 0x19, 2)|RS1(a)|RS2(b)|RS3(c)|RD(d));
20#define AES_DROUND23(a,b,c,d) \
21 .word (F3F(2, 0x19, 3)|RS1(a)|RS2(b)|RS3(c)|RD(d));
22#define AES_EROUND01_L(a,b,c,d) \
23 .word (F3F(2, 0x19, 4)|RS1(a)|RS2(b)|RS3(c)|RD(d));
24#define AES_EROUND23_L(a,b,c,d) \
25 .word (F3F(2, 0x19, 5)|RS1(a)|RS2(b)|RS3(c)|RD(d));
26#define AES_DROUND01_L(a,b,c,d) \
27 .word (F3F(2, 0x19, 6)|RS1(a)|RS2(b)|RS3(c)|RD(d));
28#define AES_DROUND23_L(a,b,c,d) \
29 .word (F3F(2, 0x19, 7)|RS1(a)|RS2(b)|RS3(c)|RD(d));
30#define AES_KEXPAND1(a,b,c,d) \
31 .word (F3F(2, 0x19, 8)|RS1(a)|RS2(b)|IMM5(c)|RD(d));
32#define AES_KEXPAND0(a,b,c) \
33 .word (F3F(2, 0x36, 0x130)|RS1(a)|RS2(b)|RD(c));
34#define AES_KEXPAND2(a,b,c) \
35 .word (F3F(2, 0x36, 0x131)|RS1(a)|RS2(b)|RD(c));
36
37#define MOVXTOD_G3_F4 \
38 .word 0x89b02303;
39#define MOVXTOD_G7_F6 \
40 .word 0x8db02307;
41#define MOVXTOD_G3_F0 \
42 .word 0x81b02303;
43#define MOVXTOD_G7_F2 \
44 .word 0x85b02307;
45#define MOVXTOD_O0_F0 \
46 .word 0x81b02308;
David S. Miller9fd130e2012-08-29 14:49:23 -070047#define MOVXTOD_O5_F0 \
48 .word 0x81b0230d;
David S. Miller0bdcaf72012-08-29 12:50:16 -070049#define MOVXTOD_O5_F2 \
50 .word 0x85b0230d;
David S. Miller03d168a2012-08-30 07:51:32 -070051#define MOVXTOD_G3_F60 \
52 .word 0xbbb02303;
53#define MOVXTOD_G7_F62 \
54 .word 0xbfb02307;
David S. Miller9bf48522012-08-21 03:58:13 -070055
56#define ENCRYPT_TWO_ROUNDS(KEY_BASE, I0, I1, T0, T1) \
57 AES_EROUND01(KEY_BASE + 0, I0, I1, T0) \
58 AES_EROUND23(KEY_BASE + 2, I0, I1, T1) \
59 AES_EROUND01(KEY_BASE + 4, T0, T1, I0) \
60 AES_EROUND23(KEY_BASE + 6, T0, T1, I1)
61
David S. Miller03d168a2012-08-30 07:51:32 -070062#define ENCRYPT_TWO_ROUNDS_2(KEY_BASE, I0, I1, I2, I3, T0, T1, T2, T3) \
63 AES_EROUND01(KEY_BASE + 0, I0, I1, T0) \
64 AES_EROUND23(KEY_BASE + 2, I0, I1, T1) \
65 AES_EROUND01(KEY_BASE + 0, I2, I3, T2) \
66 AES_EROUND23(KEY_BASE + 2, I2, I3, T3) \
67 AES_EROUND01(KEY_BASE + 4, T0, T1, I0) \
68 AES_EROUND23(KEY_BASE + 6, T0, T1, I1) \
69 AES_EROUND01(KEY_BASE + 4, T2, T3, I2) \
70 AES_EROUND23(KEY_BASE + 6, T2, T3, I3)
71
David S. Miller9bf48522012-08-21 03:58:13 -070072#define ENCRYPT_TWO_ROUNDS_LAST(KEY_BASE, I0, I1, T0, T1) \
73 AES_EROUND01(KEY_BASE + 0, I0, I1, T0) \
74 AES_EROUND23(KEY_BASE + 2, I0, I1, T1) \
75 AES_EROUND01_L(KEY_BASE + 4, T0, T1, I0) \
76 AES_EROUND23_L(KEY_BASE + 6, T0, T1, I1)
77
David S. Miller03d168a2012-08-30 07:51:32 -070078#define ENCRYPT_TWO_ROUNDS_LAST_2(KEY_BASE, I0, I1, I2, I3, T0, T1, T2, T3) \
79 AES_EROUND01(KEY_BASE + 0, I0, I1, T0) \
80 AES_EROUND23(KEY_BASE + 2, I0, I1, T1) \
81 AES_EROUND01(KEY_BASE + 0, I2, I3, T2) \
82 AES_EROUND23(KEY_BASE + 2, I2, I3, T3) \
83 AES_EROUND01_L(KEY_BASE + 4, T0, T1, I0) \
84 AES_EROUND23_L(KEY_BASE + 6, T0, T1, I1) \
85 AES_EROUND01_L(KEY_BASE + 4, T2, T3, I2) \
86 AES_EROUND23_L(KEY_BASE + 6, T2, T3, I3)
87
David S. Miller9bf48522012-08-21 03:58:13 -070088 /* 10 rounds */
89#define ENCRYPT_128(KEY_BASE, I0, I1, T0, T1) \
90 ENCRYPT_TWO_ROUNDS(KEY_BASE + 0, I0, I1, T0, T1) \
91 ENCRYPT_TWO_ROUNDS(KEY_BASE + 8, I0, I1, T0, T1) \
92 ENCRYPT_TWO_ROUNDS(KEY_BASE + 16, I0, I1, T0, T1) \
93 ENCRYPT_TWO_ROUNDS(KEY_BASE + 24, I0, I1, T0, T1) \
94 ENCRYPT_TWO_ROUNDS_LAST(KEY_BASE + 32, I0, I1, T0, T1)
95
David S. Miller03d168a2012-08-30 07:51:32 -070096#define ENCRYPT_128_2(KEY_BASE, I0, I1, I2, I3, T0, T1, T2, T3) \
97 ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 0, I0, I1, I2, I3, T0, T1, T2, T3) \
98 ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 8, I0, I1, I2, I3, T0, T1, T2, T3) \
99 ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 16, I0, I1, I2, I3, T0, T1, T2, T3) \
100 ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 24, I0, I1, I2, I3, T0, T1, T2, T3) \
101 ENCRYPT_TWO_ROUNDS_LAST_2(KEY_BASE + 32, I0, I1, I2, I3, T0, T1, T2, T3)
102
David S. Miller9bf48522012-08-21 03:58:13 -0700103 /* 12 rounds */
104#define ENCRYPT_192(KEY_BASE, I0, I1, T0, T1) \
105 ENCRYPT_TWO_ROUNDS(KEY_BASE + 0, I0, I1, T0, T1) \
106 ENCRYPT_TWO_ROUNDS(KEY_BASE + 8, I0, I1, T0, T1) \
107 ENCRYPT_TWO_ROUNDS(KEY_BASE + 16, I0, I1, T0, T1) \
108 ENCRYPT_TWO_ROUNDS(KEY_BASE + 24, I0, I1, T0, T1) \
109 ENCRYPT_TWO_ROUNDS(KEY_BASE + 32, I0, I1, T0, T1) \
110 ENCRYPT_TWO_ROUNDS_LAST(KEY_BASE + 40, I0, I1, T0, T1)
111
David S. Miller03d168a2012-08-30 07:51:32 -0700112#define ENCRYPT_192_2(KEY_BASE, I0, I1, I2, I3, T0, T1, T2, T3) \
113 ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 0, I0, I1, I2, I3, T0, T1, T2, T3) \
114 ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 8, I0, I1, I2, I3, T0, T1, T2, T3) \
115 ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 16, I0, I1, I2, I3, T0, T1, T2, T3) \
116 ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 24, I0, I1, I2, I3, T0, T1, T2, T3) \
117 ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 32, I0, I1, I2, I3, T0, T1, T2, T3) \
118 ENCRYPT_TWO_ROUNDS_LAST_2(KEY_BASE + 40, I0, I1, I2, I3, T0, T1, T2, T3)
119
David S. Miller9bf48522012-08-21 03:58:13 -0700120 /* 14 rounds */
121#define ENCRYPT_256(KEY_BASE, I0, I1, T0, T1) \
122 ENCRYPT_TWO_ROUNDS(KEY_BASE + 0, I0, I1, T0, T1) \
123 ENCRYPT_TWO_ROUNDS(KEY_BASE + 8, I0, I1, T0, T1) \
124 ENCRYPT_TWO_ROUNDS(KEY_BASE + 16, I0, I1, T0, T1) \
125 ENCRYPT_TWO_ROUNDS(KEY_BASE + 24, I0, I1, T0, T1) \
126 ENCRYPT_TWO_ROUNDS(KEY_BASE + 32, I0, I1, T0, T1) \
127 ENCRYPT_TWO_ROUNDS(KEY_BASE + 40, I0, I1, T0, T1) \
128 ENCRYPT_TWO_ROUNDS_LAST(KEY_BASE + 48, I0, I1, T0, T1)
129
David S. Miller03d168a2012-08-30 07:51:32 -0700130#define ENCRYPT_256_TWO_ROUNDS_2(KEY_BASE, I0, I1, I2, I3, TMP_BASE) \
131 ENCRYPT_TWO_ROUNDS_2(KEY_BASE, I0, I1, I2, I3, \
132 TMP_BASE + 0, TMP_BASE + 2, TMP_BASE + 4, TMP_BASE + 6)
133
134#define ENCRYPT_256_2(KEY_BASE, I0, I1, I2, I3) \
135 ENCRYPT_256_TWO_ROUNDS_2(KEY_BASE + 0, I0, I1, I2, I3, KEY_BASE + 48) \
136 ldd [%o0 + 0xd0], %f56; \
137 ldd [%o0 + 0xd8], %f58; \
138 ENCRYPT_256_TWO_ROUNDS_2(KEY_BASE + 8, I0, I1, I2, I3, KEY_BASE + 0) \
139 ldd [%o0 + 0xe0], %f60; \
140 ldd [%o0 + 0xe8], %f62; \
141 ENCRYPT_256_TWO_ROUNDS_2(KEY_BASE + 16, I0, I1, I2, I3, KEY_BASE + 0) \
142 ENCRYPT_256_TWO_ROUNDS_2(KEY_BASE + 24, I0, I1, I2, I3, KEY_BASE + 0) \
143 ENCRYPT_256_TWO_ROUNDS_2(KEY_BASE + 32, I0, I1, I2, I3, KEY_BASE + 0) \
144 ENCRYPT_256_TWO_ROUNDS_2(KEY_BASE + 40, I0, I1, I2, I3, KEY_BASE + 0) \
145 AES_EROUND01(KEY_BASE + 48, I0, I1, KEY_BASE + 0) \
146 AES_EROUND23(KEY_BASE + 50, I0, I1, KEY_BASE + 2) \
147 AES_EROUND01(KEY_BASE + 48, I2, I3, KEY_BASE + 4) \
148 AES_EROUND23(KEY_BASE + 50, I2, I3, KEY_BASE + 6) \
149 AES_EROUND01_L(KEY_BASE + 52, KEY_BASE + 0, KEY_BASE + 2, I0) \
150 AES_EROUND23_L(KEY_BASE + 54, KEY_BASE + 0, KEY_BASE + 2, I1) \
151 ldd [%o0 + 0x10], %f8; \
152 ldd [%o0 + 0x18], %f10; \
153 AES_EROUND01_L(KEY_BASE + 52, KEY_BASE + 4, KEY_BASE + 6, I2) \
154 AES_EROUND23_L(KEY_BASE + 54, KEY_BASE + 4, KEY_BASE + 6, I3) \
155 ldd [%o0 + 0x20], %f12; \
156 ldd [%o0 + 0x28], %f14;
157
David S. Miller0bdcaf72012-08-29 12:50:16 -0700158#define DECRYPT_TWO_ROUNDS(KEY_BASE, I0, I1, T0, T1) \
159 AES_DROUND23(KEY_BASE + 0, I0, I1, T1) \
160 AES_DROUND01(KEY_BASE + 2, I0, I1, T0) \
161 AES_DROUND23(KEY_BASE + 4, T0, T1, I1) \
162 AES_DROUND01(KEY_BASE + 6, T0, T1, I0)
David S. Miller9bf48522012-08-21 03:58:13 -0700163
David S. Miller0bdcaf72012-08-29 12:50:16 -0700164#define DECRYPT_TWO_ROUNDS_LAST(KEY_BASE, I0, I1, T0, T1) \
165 AES_DROUND23(KEY_BASE + 0, I0, I1, T1) \
166 AES_DROUND01(KEY_BASE + 2, I0, I1, T0) \
167 AES_DROUND23_L(KEY_BASE + 4, T0, T1, I1) \
168 AES_DROUND01_L(KEY_BASE + 6, T0, T1, I0)
David S. Miller9bf48522012-08-21 03:58:13 -0700169
170 /* 10 rounds */
David S. Miller0bdcaf72012-08-29 12:50:16 -0700171#define DECRYPT_128(KEY_BASE, I0, I1, T0, T1) \
172 DECRYPT_TWO_ROUNDS(KEY_BASE + 0, I0, I1, T0, T1) \
173 DECRYPT_TWO_ROUNDS(KEY_BASE + 8, I0, I1, T0, T1) \
174 DECRYPT_TWO_ROUNDS(KEY_BASE + 16, I0, I1, T0, T1) \
175 DECRYPT_TWO_ROUNDS(KEY_BASE + 24, I0, I1, T0, T1) \
176 DECRYPT_TWO_ROUNDS_LAST(KEY_BASE + 32, I0, I1, T0, T1)
David S. Miller9bf48522012-08-21 03:58:13 -0700177
178 /* 12 rounds */
David S. Miller0bdcaf72012-08-29 12:50:16 -0700179#define DECRYPT_192(KEY_BASE, I0, I1, T0, T1) \
180 DECRYPT_TWO_ROUNDS(KEY_BASE + 0, I0, I1, T0, T1) \
181 DECRYPT_TWO_ROUNDS(KEY_BASE + 8, I0, I1, T0, T1) \
182 DECRYPT_TWO_ROUNDS(KEY_BASE + 16, I0, I1, T0, T1) \
183 DECRYPT_TWO_ROUNDS(KEY_BASE + 24, I0, I1, T0, T1) \
184 DECRYPT_TWO_ROUNDS(KEY_BASE + 32, I0, I1, T0, T1) \
185 DECRYPT_TWO_ROUNDS_LAST(KEY_BASE + 40, I0, I1, T0, T1)
David S. Miller9bf48522012-08-21 03:58:13 -0700186
187 /* 14 rounds */
David S. Miller0bdcaf72012-08-29 12:50:16 -0700188#define DECRYPT_256(KEY_BASE, I0, I1, T0, T1) \
189 DECRYPT_TWO_ROUNDS(KEY_BASE + 0, I0, I1, T0, T1) \
190 DECRYPT_TWO_ROUNDS(KEY_BASE + 8, I0, I1, T0, T1) \
191 DECRYPT_TWO_ROUNDS(KEY_BASE + 16, I0, I1, T0, T1) \
192 DECRYPT_TWO_ROUNDS(KEY_BASE + 24, I0, I1, T0, T1) \
193 DECRYPT_TWO_ROUNDS(KEY_BASE + 32, I0, I1, T0, T1) \
194 DECRYPT_TWO_ROUNDS(KEY_BASE + 40, I0, I1, T0, T1) \
195 DECRYPT_TWO_ROUNDS_LAST(KEY_BASE + 48, I0, I1, T0, T1)
David S. Miller9bf48522012-08-21 03:58:13 -0700196
David S. Miller0bdcaf72012-08-29 12:50:16 -0700197 .align 32
David S. Miller9bf48522012-08-21 03:58:13 -0700198ENTRY(aes_sparc64_key_expand)
199 /* %o0=input_key, %o1=output_key, %o2=key_len */
200 VISEntry
201 ld [%o0 + 0x00], %f0
202 ld [%o0 + 0x04], %f1
203 ld [%o0 + 0x08], %f2
204 ld [%o0 + 0x0c], %f3
205
206 std %f0, [%o1 + 0x00]
207 std %f2, [%o1 + 0x08]
208 add %o1, 0x10, %o1
209
210 cmp %o2, 24
211 bl 2f
212 nop
213
214 be 1f
215 nop
216
217 /* 256-bit key expansion */
218 ld [%o0 + 0x10], %f4
219 ld [%o0 + 0x14], %f5
220 ld [%o0 + 0x18], %f6
221 ld [%o0 + 0x1c], %f7
222
223 std %f4, [%o1 + 0x00]
224 std %f6, [%o1 + 0x08]
225 add %o1, 0x10, %o1
226
227 AES_KEXPAND1(0, 6, 0x0, 8)
228 AES_KEXPAND2(2, 8, 10)
229 AES_KEXPAND0(4, 10, 12)
230 AES_KEXPAND2(6, 12, 14)
231 AES_KEXPAND1(8, 14, 0x1, 16)
232 AES_KEXPAND2(10, 16, 18)
233 AES_KEXPAND0(12, 18, 20)
234 AES_KEXPAND2(14, 20, 22)
235 AES_KEXPAND1(16, 22, 0x2, 24)
236 AES_KEXPAND2(18, 24, 26)
237 AES_KEXPAND0(20, 26, 28)
238 AES_KEXPAND2(22, 28, 30)
239 AES_KEXPAND1(24, 30, 0x3, 32)
240 AES_KEXPAND2(26, 32, 34)
241 AES_KEXPAND0(28, 34, 36)
242 AES_KEXPAND2(30, 36, 38)
243 AES_KEXPAND1(32, 38, 0x4, 40)
244 AES_KEXPAND2(34, 40, 42)
245 AES_KEXPAND0(36, 42, 44)
246 AES_KEXPAND2(38, 44, 46)
247 AES_KEXPAND1(40, 46, 0x5, 48)
248 AES_KEXPAND2(42, 48, 50)
249 AES_KEXPAND0(44, 50, 52)
250 AES_KEXPAND2(46, 52, 54)
251 AES_KEXPAND1(48, 54, 0x6, 56)
252 AES_KEXPAND2(50, 56, 58)
253
254 std %f8, [%o1 + 0x00]
255 std %f10, [%o1 + 0x08]
256 std %f12, [%o1 + 0x10]
257 std %f14, [%o1 + 0x18]
258 std %f16, [%o1 + 0x20]
259 std %f18, [%o1 + 0x28]
260 std %f20, [%o1 + 0x30]
261 std %f22, [%o1 + 0x38]
262 std %f24, [%o1 + 0x40]
263 std %f26, [%o1 + 0x48]
264 std %f28, [%o1 + 0x50]
265 std %f30, [%o1 + 0x58]
266 std %f32, [%o1 + 0x60]
267 std %f34, [%o1 + 0x68]
268 std %f36, [%o1 + 0x70]
269 std %f38, [%o1 + 0x78]
270 std %f40, [%o1 + 0x80]
271 std %f42, [%o1 + 0x88]
272 std %f44, [%o1 + 0x90]
273 std %f46, [%o1 + 0x98]
274 std %f48, [%o1 + 0xa0]
275 std %f50, [%o1 + 0xa8]
276 std %f52, [%o1 + 0xb0]
277 std %f54, [%o1 + 0xb8]
278 std %f56, [%o1 + 0xc0]
279 ba,pt %xcc, 80f
280 std %f58, [%o1 + 0xc8]
281
2821:
283 /* 192-bit key expansion */
284 ld [%o0 + 0x10], %f4
285 ld [%o0 + 0x14], %f5
286
287 std %f4, [%o1 + 0x00]
288 add %o1, 0x08, %o1
289
290 AES_KEXPAND1(0, 4, 0x0, 6)
291 AES_KEXPAND2(2, 6, 8)
292 AES_KEXPAND2(4, 8, 10)
293 AES_KEXPAND1(6, 10, 0x1, 12)
294 AES_KEXPAND2(8, 12, 14)
295 AES_KEXPAND2(10, 14, 16)
296 AES_KEXPAND1(12, 16, 0x2, 18)
297 AES_KEXPAND2(14, 18, 20)
298 AES_KEXPAND2(16, 20, 22)
299 AES_KEXPAND1(18, 22, 0x3, 24)
300 AES_KEXPAND2(20, 24, 26)
301 AES_KEXPAND2(22, 26, 28)
302 AES_KEXPAND1(24, 28, 0x4, 30)
303 AES_KEXPAND2(26, 30, 32)
304 AES_KEXPAND2(28, 32, 34)
305 AES_KEXPAND1(30, 34, 0x5, 36)
306 AES_KEXPAND2(32, 36, 38)
307 AES_KEXPAND2(34, 38, 40)
308 AES_KEXPAND1(36, 40, 0x6, 42)
309 AES_KEXPAND2(38, 42, 44)
310 AES_KEXPAND2(40, 44, 46)
311 AES_KEXPAND1(42, 46, 0x7, 48)
312 AES_KEXPAND2(44, 48, 50)
313
314 std %f6, [%o1 + 0x00]
315 std %f8, [%o1 + 0x08]
316 std %f10, [%o1 + 0x10]
317 std %f12, [%o1 + 0x18]
318 std %f14, [%o1 + 0x20]
319 std %f16, [%o1 + 0x28]
320 std %f18, [%o1 + 0x30]
321 std %f20, [%o1 + 0x38]
322 std %f22, [%o1 + 0x40]
323 std %f24, [%o1 + 0x48]
324 std %f26, [%o1 + 0x50]
325 std %f28, [%o1 + 0x58]
326 std %f30, [%o1 + 0x60]
327 std %f32, [%o1 + 0x68]
328 std %f34, [%o1 + 0x70]
329 std %f36, [%o1 + 0x78]
330 std %f38, [%o1 + 0x80]
331 std %f40, [%o1 + 0x88]
332 std %f42, [%o1 + 0x90]
333 std %f44, [%o1 + 0x98]
334 std %f46, [%o1 + 0xa0]
335 std %f48, [%o1 + 0xa8]
336 ba,pt %xcc, 80f
337 std %f50, [%o1 + 0xb0]
338
3392:
340 /* 128-bit key expansion */
341 AES_KEXPAND1(0, 2, 0x0, 4)
342 AES_KEXPAND2(2, 4, 6)
343 AES_KEXPAND1(4, 6, 0x1, 8)
344 AES_KEXPAND2(6, 8, 10)
345 AES_KEXPAND1(8, 10, 0x2, 12)
346 AES_KEXPAND2(10, 12, 14)
347 AES_KEXPAND1(12, 14, 0x3, 16)
348 AES_KEXPAND2(14, 16, 18)
349 AES_KEXPAND1(16, 18, 0x4, 20)
350 AES_KEXPAND2(18, 20, 22)
351 AES_KEXPAND1(20, 22, 0x5, 24)
352 AES_KEXPAND2(22, 24, 26)
353 AES_KEXPAND1(24, 26, 0x6, 28)
354 AES_KEXPAND2(26, 28, 30)
355 AES_KEXPAND1(28, 30, 0x7, 32)
356 AES_KEXPAND2(30, 32, 34)
357 AES_KEXPAND1(32, 34, 0x8, 36)
358 AES_KEXPAND2(34, 36, 38)
359 AES_KEXPAND1(36, 38, 0x9, 40)
360 AES_KEXPAND2(38, 40, 42)
361
362 std %f4, [%o1 + 0x00]
363 std %f6, [%o1 + 0x08]
364 std %f8, [%o1 + 0x10]
365 std %f10, [%o1 + 0x18]
366 std %f12, [%o1 + 0x20]
367 std %f14, [%o1 + 0x28]
368 std %f16, [%o1 + 0x30]
369 std %f18, [%o1 + 0x38]
370 std %f20, [%o1 + 0x40]
371 std %f22, [%o1 + 0x48]
372 std %f24, [%o1 + 0x50]
373 std %f26, [%o1 + 0x58]
374 std %f28, [%o1 + 0x60]
375 std %f30, [%o1 + 0x68]
376 std %f32, [%o1 + 0x70]
377 std %f34, [%o1 + 0x78]
378 std %f36, [%o1 + 0x80]
379 std %f38, [%o1 + 0x88]
380 std %f40, [%o1 + 0x90]
381 std %f42, [%o1 + 0x98]
38280:
383 retl
384 VISExit
385ENDPROC(aes_sparc64_key_expand)
386
David S. Miller0bdcaf72012-08-29 12:50:16 -0700387 .align 32
388ENTRY(aes_sparc64_encrypt_128)
389 /* %o0=key, %o1=input, %o2=output */
390 VISEntry
391 ld [%o1 + 0x00], %f4
392 ld [%o1 + 0x04], %f5
393 ld [%o1 + 0x08], %f6
394 ld [%o1 + 0x0c], %f7
395 ldd [%o0 + 0x00], %f8
396 ldd [%o0 + 0x08], %f10
397 ldd [%o0 + 0x10], %f12
398 ldd [%o0 + 0x18], %f14
399 ldd [%o0 + 0x20], %f16
400 ldd [%o0 + 0x28], %f18
401 ldd [%o0 + 0x30], %f20
402 ldd [%o0 + 0x38], %f22
403 ldd [%o0 + 0x40], %f24
404 ldd [%o0 + 0x48], %f26
405 ldd [%o0 + 0x50], %f28
406 ldd [%o0 + 0x58], %f30
407 ldd [%o0 + 0x60], %f32
408 ldd [%o0 + 0x68], %f34
409 ldd [%o0 + 0x70], %f36
410 ldd [%o0 + 0x78], %f38
411 ldd [%o0 + 0x80], %f40
412 ldd [%o0 + 0x88], %f42
413 ldd [%o0 + 0x90], %f44
414 ldd [%o0 + 0x98], %f46
415 ldd [%o0 + 0xa0], %f48
416 ldd [%o0 + 0xa8], %f50
417 fxor %f8, %f4, %f4
418 fxor %f10, %f6, %f6
419 ENCRYPT_128(12, 4, 6, 0, 2)
420 st %f4, [%o2 + 0x00]
421 st %f5, [%o2 + 0x04]
422 st %f6, [%o2 + 0x08]
423 st %f7, [%o2 + 0x0c]
424 retl
425 VISExit
426ENDPROC(aes_sparc64_encrypt_128)
427
428 .align 32
429ENTRY(aes_sparc64_encrypt_192)
430 /* %o0=key, %o1=input, %o2=output */
David S. Miller9bf48522012-08-21 03:58:13 -0700431 VISEntry
432 ld [%o1 + 0x00], %f4
433 ld [%o1 + 0x04], %f5
434 ld [%o1 + 0x08], %f6
435 ld [%o1 + 0x0c], %f7
436
437 ldd [%o0 + 0x00], %f8
438 ldd [%o0 + 0x08], %f10
David S. Miller0bdcaf72012-08-29 12:50:16 -0700439
David S. Miller9bf48522012-08-21 03:58:13 -0700440 fxor %f8, %f4, %f4
David S. Miller0bdcaf72012-08-29 12:50:16 -0700441 fxor %f10, %f6, %f6
David S. Miller9bf48522012-08-21 03:58:13 -0700442
443 ldd [%o0 + 0x10], %f8
David S. Miller9bf48522012-08-21 03:58:13 -0700444 ldd [%o0 + 0x18], %f10
445 ldd [%o0 + 0x20], %f12
446 ldd [%o0 + 0x28], %f14
447 add %o0, 0x20, %o0
448
449 ENCRYPT_TWO_ROUNDS(8, 4, 6, 0, 2)
450
David S. Miller9bf48522012-08-21 03:58:13 -0700451 ldd [%o0 + 0x10], %f12
452 ldd [%o0 + 0x18], %f14
453 ldd [%o0 + 0x20], %f16
454 ldd [%o0 + 0x28], %f18
455 ldd [%o0 + 0x30], %f20
456 ldd [%o0 + 0x38], %f22
457 ldd [%o0 + 0x40], %f24
458 ldd [%o0 + 0x48], %f26
459 ldd [%o0 + 0x50], %f28
460 ldd [%o0 + 0x58], %f30
461 ldd [%o0 + 0x60], %f32
462 ldd [%o0 + 0x68], %f34
463 ldd [%o0 + 0x70], %f36
464 ldd [%o0 + 0x78], %f38
465 ldd [%o0 + 0x80], %f40
466 ldd [%o0 + 0x88], %f42
467 ldd [%o0 + 0x90], %f44
468 ldd [%o0 + 0x98], %f46
469 ldd [%o0 + 0xa0], %f48
470 ldd [%o0 + 0xa8], %f50
471
472
473 ENCRYPT_128(12, 4, 6, 0, 2)
474
475 st %f4, [%o2 + 0x00]
476 st %f5, [%o2 + 0x04]
477 st %f6, [%o2 + 0x08]
478 st %f7, [%o2 + 0x0c]
479
480 retl
481 VISExit
David S. Miller0bdcaf72012-08-29 12:50:16 -0700482ENDPROC(aes_sparc64_encrypt_192)
David S. Miller9bf48522012-08-21 03:58:13 -0700483
David S. Miller0bdcaf72012-08-29 12:50:16 -0700484 .align 32
485ENTRY(aes_sparc64_encrypt_256)
486 /* %o0=key, %o1=input, %o2=output */
David S. Miller9bf48522012-08-21 03:58:13 -0700487 VISEntry
488 ld [%o1 + 0x00], %f4
David S. Miller9bf48522012-08-21 03:58:13 -0700489 ld [%o1 + 0x04], %f5
490 ld [%o1 + 0x08], %f6
491 ld [%o1 + 0x0c], %f7
David S. Miller9bf48522012-08-21 03:58:13 -0700492
David S. Miller0bdcaf72012-08-29 12:50:16 -0700493 ldd [%o0 + 0x00], %f8
494 ldd [%o0 + 0x08], %f10
David S. Miller9bf48522012-08-21 03:58:13 -0700495
David S. Miller0bdcaf72012-08-29 12:50:16 -0700496 fxor %f8, %f4, %f4
497 fxor %f10, %f6, %f6
David S. Miller9bf48522012-08-21 03:58:13 -0700498
David S. Miller0bdcaf72012-08-29 12:50:16 -0700499 ldd [%o0 + 0x10], %f8
David S. Miller9bf48522012-08-21 03:58:13 -0700500
David S. Miller0bdcaf72012-08-29 12:50:16 -0700501 ldd [%o0 + 0x18], %f10
502 ldd [%o0 + 0x20], %f12
503 ldd [%o0 + 0x28], %f14
504 add %o0, 0x20, %o0
David S. Miller9bf48522012-08-21 03:58:13 -0700505
David S. Miller0bdcaf72012-08-29 12:50:16 -0700506 ENCRYPT_TWO_ROUNDS(8, 4, 6, 0, 2)
David S. Miller9bf48522012-08-21 03:58:13 -0700507
David S. Miller0bdcaf72012-08-29 12:50:16 -0700508 ldd [%o0 + 0x10], %f8
David S. Miller9bf48522012-08-21 03:58:13 -0700509
David S. Miller0bdcaf72012-08-29 12:50:16 -0700510 ldd [%o0 + 0x18], %f10
511 ldd [%o0 + 0x20], %f12
512 ldd [%o0 + 0x28], %f14
513 add %o0, 0x20, %o0
514
515 ENCRYPT_TWO_ROUNDS(8, 4, 6, 0, 2)
516
517 ldd [%o0 + 0x10], %f12
518 ldd [%o0 + 0x18], %f14
519 ldd [%o0 + 0x20], %f16
520 ldd [%o0 + 0x28], %f18
521 ldd [%o0 + 0x30], %f20
522 ldd [%o0 + 0x38], %f22
523 ldd [%o0 + 0x40], %f24
524 ldd [%o0 + 0x48], %f26
525 ldd [%o0 + 0x50], %f28
526 ldd [%o0 + 0x58], %f30
527 ldd [%o0 + 0x60], %f32
528 ldd [%o0 + 0x68], %f34
529 ldd [%o0 + 0x70], %f36
530 ldd [%o0 + 0x78], %f38
531 ldd [%o0 + 0x80], %f40
532 ldd [%o0 + 0x88], %f42
533 ldd [%o0 + 0x90], %f44
534 ldd [%o0 + 0x98], %f46
535 ldd [%o0 + 0xa0], %f48
536 ldd [%o0 + 0xa8], %f50
537
538 ENCRYPT_128(12, 4, 6, 0, 2)
David S. Miller9bf48522012-08-21 03:58:13 -0700539
540 st %f4, [%o2 + 0x00]
541 st %f5, [%o2 + 0x04]
542 st %f6, [%o2 + 0x08]
543 st %f7, [%o2 + 0x0c]
544
545 retl
546 VISExit
David S. Miller0bdcaf72012-08-29 12:50:16 -0700547ENDPROC(aes_sparc64_encrypt_256)
David S. Miller9bf48522012-08-21 03:58:13 -0700548
David S. Miller0bdcaf72012-08-29 12:50:16 -0700549 .align 32
550ENTRY(aes_sparc64_decrypt_128)
551 /* %o0=key, %o1=input, %o2=output */
552 VISEntry
553 ld [%o1 + 0x00], %f4
554 ld [%o1 + 0x04], %f5
555 ld [%o1 + 0x08], %f6
556 ld [%o1 + 0x0c], %f7
557 ldd [%o0 + 0xa0], %f8
558 ldd [%o0 + 0xa8], %f10
559 ldd [%o0 + 0x98], %f12
560 ldd [%o0 + 0x90], %f14
561 ldd [%o0 + 0x88], %f16
562 ldd [%o0 + 0x80], %f18
563 ldd [%o0 + 0x78], %f20
564 ldd [%o0 + 0x70], %f22
565 ldd [%o0 + 0x68], %f24
566 ldd [%o0 + 0x60], %f26
567 ldd [%o0 + 0x58], %f28
568 ldd [%o0 + 0x50], %f30
569 ldd [%o0 + 0x48], %f32
570 ldd [%o0 + 0x40], %f34
571 ldd [%o0 + 0x38], %f36
572 ldd [%o0 + 0x30], %f38
573 ldd [%o0 + 0x28], %f40
574 ldd [%o0 + 0x20], %f42
575 ldd [%o0 + 0x18], %f44
576 ldd [%o0 + 0x10], %f46
577 ldd [%o0 + 0x08], %f48
578 ldd [%o0 + 0x00], %f50
579 fxor %f8, %f4, %f4
580 fxor %f10, %f6, %f6
581 DECRYPT_128(12, 4, 6, 0, 2)
582 st %f4, [%o2 + 0x00]
583 st %f5, [%o2 + 0x04]
584 st %f6, [%o2 + 0x08]
585 st %f7, [%o2 + 0x0c]
586 retl
587 VISExit
588ENDPROC(aes_sparc64_decrypt_128)
589
590 .align 32
591ENTRY(aes_sparc64_decrypt_192)
592 /* %o0=key, %o1=input, %o2=output */
593 VISEntry
594 ld [%o1 + 0x00], %f4
595 ld [%o1 + 0x04], %f5
596 ld [%o1 + 0x08], %f6
597 ld [%o1 + 0x0c], %f7
598 ldd [%o0 + 0xc0], %f8
599 ldd [%o0 + 0xc8], %f10
600 ldd [%o0 + 0xb8], %f12
601 ldd [%o0 + 0xb0], %f14
602 ldd [%o0 + 0xa8], %f16
603 ldd [%o0 + 0xa0], %f18
604 fxor %f8, %f4, %f4
605 fxor %f10, %f6, %f6
606 ldd [%o0 + 0x98], %f20
607 ldd [%o0 + 0x90], %f22
608 ldd [%o0 + 0x88], %f24
609 ldd [%o0 + 0x80], %f26
610 DECRYPT_TWO_ROUNDS(12, 4, 6, 0, 2)
611 ldd [%o0 + 0x78], %f28
612 ldd [%o0 + 0x70], %f30
613 ldd [%o0 + 0x68], %f32
614 ldd [%o0 + 0x60], %f34
615 ldd [%o0 + 0x58], %f36
616 ldd [%o0 + 0x50], %f38
617 ldd [%o0 + 0x48], %f40
618 ldd [%o0 + 0x40], %f42
619 ldd [%o0 + 0x38], %f44
620 ldd [%o0 + 0x30], %f46
621 ldd [%o0 + 0x28], %f48
622 ldd [%o0 + 0x20], %f50
623 ldd [%o0 + 0x18], %f52
624 ldd [%o0 + 0x10], %f54
625 ldd [%o0 + 0x08], %f56
626 ldd [%o0 + 0x00], %f58
627 DECRYPT_128(20, 4, 6, 0, 2)
628 st %f4, [%o2 + 0x00]
629 st %f5, [%o2 + 0x04]
630 st %f6, [%o2 + 0x08]
631 st %f7, [%o2 + 0x0c]
632 retl
633 VISExit
634ENDPROC(aes_sparc64_decrypt_192)
635
636 .align 32
637ENTRY(aes_sparc64_decrypt_256)
638 /* %o0=key, %o1=input, %o2=output */
639 VISEntry
640 ld [%o1 + 0x00], %f4
641 ld [%o1 + 0x04], %f5
642 ld [%o1 + 0x08], %f6
643 ld [%o1 + 0x0c], %f7
644 ldd [%o0 + 0xe0], %f8
645 ldd [%o0 + 0xe8], %f10
646 ldd [%o0 + 0xd8], %f12
647 ldd [%o0 + 0xd0], %f14
648 ldd [%o0 + 0xc8], %f16
649 fxor %f8, %f4, %f4
650 ldd [%o0 + 0xc0], %f18
651 fxor %f10, %f6, %f6
652 ldd [%o0 + 0xb8], %f20
653 AES_DROUND23(12, 4, 6, 2)
654 ldd [%o0 + 0xb0], %f22
655 AES_DROUND01(14, 4, 6, 0)
656 ldd [%o0 + 0xa8], %f24
657 AES_DROUND23(16, 0, 2, 6)
658 ldd [%o0 + 0xa0], %f26
659 AES_DROUND01(18, 0, 2, 4)
660 ldd [%o0 + 0x98], %f12
661 AES_DROUND23(20, 4, 6, 2)
662 ldd [%o0 + 0x90], %f14
663 AES_DROUND01(22, 4, 6, 0)
664 ldd [%o0 + 0x88], %f16
665 AES_DROUND23(24, 0, 2, 6)
666 ldd [%o0 + 0x80], %f18
667 AES_DROUND01(26, 0, 2, 4)
668 ldd [%o0 + 0x78], %f20
669 AES_DROUND23(12, 4, 6, 2)
670 ldd [%o0 + 0x70], %f22
671 AES_DROUND01(14, 4, 6, 0)
672 ldd [%o0 + 0x68], %f24
673 AES_DROUND23(16, 0, 2, 6)
674 ldd [%o0 + 0x60], %f26
675 AES_DROUND01(18, 0, 2, 4)
676 ldd [%o0 + 0x58], %f28
677 AES_DROUND23(20, 4, 6, 2)
678 ldd [%o0 + 0x50], %f30
679 AES_DROUND01(22, 4, 6, 0)
680 ldd [%o0 + 0x48], %f32
681 AES_DROUND23(24, 0, 2, 6)
682 ldd [%o0 + 0x40], %f34
683 AES_DROUND01(26, 0, 2, 4)
684 ldd [%o0 + 0x38], %f36
685 AES_DROUND23(28, 4, 6, 2)
686 ldd [%o0 + 0x30], %f38
687 AES_DROUND01(30, 4, 6, 0)
688 ldd [%o0 + 0x28], %f40
689 AES_DROUND23(32, 0, 2, 6)
690 ldd [%o0 + 0x20], %f42
691 AES_DROUND01(34, 0, 2, 4)
692 ldd [%o0 + 0x18], %f44
693 AES_DROUND23(36, 4, 6, 2)
694 ldd [%o0 + 0x10], %f46
695 AES_DROUND01(38, 4, 6, 0)
696 ldd [%o0 + 0x08], %f48
697 AES_DROUND23(40, 0, 2, 6)
698 ldd [%o0 + 0x00], %f50
699 AES_DROUND01(42, 0, 2, 4)
700 AES_DROUND23(44, 4, 6, 2)
701 AES_DROUND01(46, 4, 6, 0)
702 AES_DROUND23_L(48, 0, 2, 6)
703 AES_DROUND01_L(50, 0, 2, 4)
704 st %f4, [%o2 + 0x00]
705 st %f5, [%o2 + 0x04]
706 st %f6, [%o2 + 0x08]
707 st %f7, [%o2 + 0x0c]
708 retl
709 VISExit
710ENDPROC(aes_sparc64_decrypt_256)
711
712 .align 32
713ENTRY(aes_sparc64_load_encrypt_keys_128)
David S. Miller9bf48522012-08-21 03:58:13 -0700714 /* %o0=key */
David S. Miller0bdcaf72012-08-29 12:50:16 -0700715 VISEntry
716 ldd [%o0 + 0x10], %f8
717 ldd [%o0 + 0x18], %f10
718 ldd [%o0 + 0x20], %f12
719 ldd [%o0 + 0x28], %f14
720 ldd [%o0 + 0x30], %f16
721 ldd [%o0 + 0x38], %f18
722 ldd [%o0 + 0x40], %f20
723 ldd [%o0 + 0x48], %f22
724 ldd [%o0 + 0x50], %f24
725 ldd [%o0 + 0x58], %f26
726 ldd [%o0 + 0x60], %f28
727 ldd [%o0 + 0x68], %f30
728 ldd [%o0 + 0x70], %f32
729 ldd [%o0 + 0x78], %f34
730 ldd [%o0 + 0x80], %f36
731 ldd [%o0 + 0x88], %f38
732 ldd [%o0 + 0x90], %f40
733 ldd [%o0 + 0x98], %f42
734 ldd [%o0 + 0xa0], %f44
735 retl
736 ldd [%o0 + 0xa8], %f46
737ENDPROC(aes_sparc64_load_encrypt_keys_128)
David S. Miller9bf48522012-08-21 03:58:13 -0700738
David S. Miller0bdcaf72012-08-29 12:50:16 -0700739 .align 32
740ENTRY(aes_sparc64_load_encrypt_keys_192)
741 /* %o0=key */
742 VISEntry
743 ldd [%o0 + 0x10], %f8
744 ldd [%o0 + 0x18], %f10
745 ldd [%o0 + 0x20], %f12
746 ldd [%o0 + 0x28], %f14
747 ldd [%o0 + 0x30], %f16
748 ldd [%o0 + 0x38], %f18
749 ldd [%o0 + 0x40], %f20
750 ldd [%o0 + 0x48], %f22
751 ldd [%o0 + 0x50], %f24
752 ldd [%o0 + 0x58], %f26
753 ldd [%o0 + 0x60], %f28
754 ldd [%o0 + 0x68], %f30
755 ldd [%o0 + 0x70], %f32
756 ldd [%o0 + 0x78], %f34
757 ldd [%o0 + 0x80], %f36
758 ldd [%o0 + 0x88], %f38
759 ldd [%o0 + 0x90], %f40
760 ldd [%o0 + 0x98], %f42
761 ldd [%o0 + 0xa0], %f44
762 ldd [%o0 + 0xa8], %f46
763 ldd [%o0 + 0xb0], %f48
764 ldd [%o0 + 0xb8], %f50
765 ldd [%o0 + 0xc0], %f52
766 retl
767 ldd [%o0 + 0xc8], %f54
768ENDPROC(aes_sparc64_load_encrypt_keys_192)
769
770 .align 32
771ENTRY(aes_sparc64_load_encrypt_keys_256)
David S. Miller9bf48522012-08-21 03:58:13 -0700772 /* %o0=key */
773 VISEntry
774 ldd [%o0 + 0x10], %f8
775 ldd [%o0 + 0x18], %f10
776 ldd [%o0 + 0x20], %f12
777 ldd [%o0 + 0x28], %f14
778 ldd [%o0 + 0x30], %f16
779 ldd [%o0 + 0x38], %f18
780 ldd [%o0 + 0x40], %f20
781 ldd [%o0 + 0x48], %f22
782 ldd [%o0 + 0x50], %f24
783 ldd [%o0 + 0x58], %f26
784 ldd [%o0 + 0x60], %f28
785 ldd [%o0 + 0x68], %f30
786 ldd [%o0 + 0x70], %f32
787 ldd [%o0 + 0x78], %f34
788 ldd [%o0 + 0x80], %f36
789 ldd [%o0 + 0x88], %f38
790 ldd [%o0 + 0x90], %f40
791 ldd [%o0 + 0x98], %f42
792 ldd [%o0 + 0xa0], %f44
793 ldd [%o0 + 0xa8], %f46
794 ldd [%o0 + 0xb0], %f48
795 ldd [%o0 + 0xb8], %f50
796 ldd [%o0 + 0xc0], %f52
797 ldd [%o0 + 0xc8], %f54
798 ldd [%o0 + 0xd0], %f56
799 ldd [%o0 + 0xd8], %f58
800 ldd [%o0 + 0xe0], %f60
801 retl
802 ldd [%o0 + 0xe8], %f62
David S. Miller0bdcaf72012-08-29 12:50:16 -0700803ENDPROC(aes_sparc64_load_encrypt_keys_256)
David S. Miller9bf48522012-08-21 03:58:13 -0700804
David S. Miller0bdcaf72012-08-29 12:50:16 -0700805 .align 32
806ENTRY(aes_sparc64_load_decrypt_keys_128)
807 /* %o0=key */
808 VISEntry
809 ldd [%o0 + 0x98], %f8
810 ldd [%o0 + 0x90], %f10
811 ldd [%o0 + 0x88], %f12
812 ldd [%o0 + 0x80], %f14
813 ldd [%o0 + 0x78], %f16
814 ldd [%o0 + 0x70], %f18
815 ldd [%o0 + 0x68], %f20
816 ldd [%o0 + 0x60], %f22
817 ldd [%o0 + 0x58], %f24
818 ldd [%o0 + 0x50], %f26
819 ldd [%o0 + 0x48], %f28
820 ldd [%o0 + 0x40], %f30
821 ldd [%o0 + 0x38], %f32
822 ldd [%o0 + 0x30], %f34
823 ldd [%o0 + 0x28], %f36
824 ldd [%o0 + 0x20], %f38
825 ldd [%o0 + 0x18], %f40
826 ldd [%o0 + 0x10], %f42
827 ldd [%o0 + 0x08], %f44
828 retl
829 ldd [%o0 + 0x00], %f46
830ENDPROC(aes_sparc64_load_decrypt_keys_128)
831
832 .align 32
833ENTRY(aes_sparc64_load_decrypt_keys_192)
834 /* %o0=key */
835 VISEntry
836 ldd [%o0 + 0xb8], %f8
837 ldd [%o0 + 0xb0], %f10
838 ldd [%o0 + 0xa8], %f12
839 ldd [%o0 + 0xa0], %f14
840 ldd [%o0 + 0x98], %f16
841 ldd [%o0 + 0x90], %f18
842 ldd [%o0 + 0x88], %f20
843 ldd [%o0 + 0x80], %f22
844 ldd [%o0 + 0x78], %f24
845 ldd [%o0 + 0x70], %f26
846 ldd [%o0 + 0x68], %f28
847 ldd [%o0 + 0x60], %f30
848 ldd [%o0 + 0x58], %f32
849 ldd [%o0 + 0x50], %f34
850 ldd [%o0 + 0x48], %f36
851 ldd [%o0 + 0x40], %f38
852 ldd [%o0 + 0x38], %f40
853 ldd [%o0 + 0x30], %f42
854 ldd [%o0 + 0x28], %f44
855 ldd [%o0 + 0x20], %f46
856 ldd [%o0 + 0x18], %f48
857 ldd [%o0 + 0x10], %f50
858 ldd [%o0 + 0x08], %f52
859 retl
860 ldd [%o0 + 0x00], %f54
861ENDPROC(aes_sparc64_load_decrypt_keys_192)
862
863 .align 32
864ENTRY(aes_sparc64_load_decrypt_keys_256)
865 /* %o0=key */
866 VISEntry
867 ldd [%o0 + 0xd8], %f8
868 ldd [%o0 + 0xd0], %f10
869 ldd [%o0 + 0xc8], %f12
870 ldd [%o0 + 0xc0], %f14
871 ldd [%o0 + 0xb8], %f16
872 ldd [%o0 + 0xb0], %f18
873 ldd [%o0 + 0xa8], %f20
874 ldd [%o0 + 0xa0], %f22
875 ldd [%o0 + 0x98], %f24
876 ldd [%o0 + 0x90], %f26
877 ldd [%o0 + 0x88], %f28
878 ldd [%o0 + 0x80], %f30
879 ldd [%o0 + 0x78], %f32
880 ldd [%o0 + 0x70], %f34
881 ldd [%o0 + 0x68], %f36
882 ldd [%o0 + 0x60], %f38
883 ldd [%o0 + 0x58], %f40
884 ldd [%o0 + 0x50], %f42
885 ldd [%o0 + 0x48], %f44
886 ldd [%o0 + 0x40], %f46
887 ldd [%o0 + 0x38], %f48
888 ldd [%o0 + 0x30], %f50
889 ldd [%o0 + 0x28], %f52
890 ldd [%o0 + 0x20], %f54
891 ldd [%o0 + 0x18], %f56
892 ldd [%o0 + 0x10], %f58
893 ldd [%o0 + 0x08], %f60
894 retl
895 ldd [%o0 + 0x00], %f62
896ENDPROC(aes_sparc64_load_decrypt_keys_256)
897
898 .align 32
899ENTRY(aes_sparc64_ecb_encrypt_128)
900 /* %o0=key, %o1=input, %o2=output, %o3=len */
David S. Miller9bf48522012-08-21 03:58:13 -0700901 ldx [%o0 + 0x00], %g1
David S. Miller03d168a2012-08-30 07:51:32 -0700902 subcc %o3, 0x10, %o3
903 be 10f
904 ldx [%o0 + 0x08], %g2
David S. Miller0bdcaf72012-08-29 12:50:16 -07009051: ldx [%o1 + 0x00], %g3
David S. Miller9bf48522012-08-21 03:58:13 -0700906 ldx [%o1 + 0x08], %g7
David S. Miller03d168a2012-08-30 07:51:32 -0700907 ldx [%o1 + 0x10], %o4
908 ldx [%o1 + 0x18], %o5
909 xor %g1, %g3, %g3
910 xor %g2, %g7, %g7
911 MOVXTOD_G3_F4
912 MOVXTOD_G7_F6
913 xor %g1, %o4, %g3
914 xor %g2, %o5, %g7
915 MOVXTOD_G3_F60
916 MOVXTOD_G7_F62
917 ENCRYPT_128_2(8, 4, 6, 60, 62, 0, 2, 56, 58)
918 std %f4, [%o2 + 0x00]
919 std %f6, [%o2 + 0x08]
920 std %f60, [%o2 + 0x10]
921 std %f62, [%o2 + 0x18]
922 sub %o3, 0x20, %o3
923 add %o1, 0x20, %o1
924 brgz %o3, 1b
925 add %o2, 0x20, %o2
926 brlz,pt %o3, 11f
927 nop
92810: ldx [%o1 + 0x00], %g3
929 ldx [%o1 + 0x08], %g7
David S. Miller9bf48522012-08-21 03:58:13 -0700930 xor %g1, %g3, %g3
931 xor %g2, %g7, %g7
932 MOVXTOD_G3_F4
933 MOVXTOD_G7_F6
David S. Miller9bf48522012-08-21 03:58:13 -0700934 ENCRYPT_128(8, 4, 6, 0, 2)
David S. Miller9bf48522012-08-21 03:58:13 -0700935 std %f4, [%o2 + 0x00]
936 std %f6, [%o2 + 0x08]
David S. Miller03d168a2012-08-30 07:51:32 -070093711: retl
David S. Miller9bf48522012-08-21 03:58:13 -0700938 nop
David S. Miller0bdcaf72012-08-29 12:50:16 -0700939ENDPROC(aes_sparc64_ecb_encrypt_128)
David S. Miller9bf48522012-08-21 03:58:13 -0700940
David S. Miller0bdcaf72012-08-29 12:50:16 -0700941 .align 32
942ENTRY(aes_sparc64_ecb_encrypt_192)
943 /* %o0=key, %o1=input, %o2=output, %o3=len */
David S. Miller9bf48522012-08-21 03:58:13 -0700944 ldx [%o0 + 0x00], %g1
David S. Miller03d168a2012-08-30 07:51:32 -0700945 subcc %o3, 0x10, %o3
946 be 10f
947 ldx [%o0 + 0x08], %g2
David S. Miller0bdcaf72012-08-29 12:50:16 -07009481: ldx [%o1 + 0x00], %g3
David S. Miller9bf48522012-08-21 03:58:13 -0700949 ldx [%o1 + 0x08], %g7
David S. Miller03d168a2012-08-30 07:51:32 -0700950 ldx [%o1 + 0x10], %o4
951 ldx [%o1 + 0x18], %o5
952 xor %g1, %g3, %g3
953 xor %g2, %g7, %g7
954 MOVXTOD_G3_F4
955 MOVXTOD_G7_F6
956 xor %g1, %o4, %g3
957 xor %g2, %o5, %g7
958 MOVXTOD_G3_F60
959 MOVXTOD_G7_F62
960 ENCRYPT_192_2(8, 4, 6, 60, 62, 0, 2, 56, 58)
961 std %f4, [%o2 + 0x00]
962 std %f6, [%o2 + 0x08]
963 std %f60, [%o2 + 0x10]
964 std %f62, [%o2 + 0x18]
965 sub %o3, 0x20, %o3
966 add %o1, 0x20, %o1
967 brgz %o3, 1b
968 add %o2, 0x20, %o2
969 brlz,pt %o3, 11f
970 nop
97110: ldx [%o1 + 0x00], %g3
972 ldx [%o1 + 0x08], %g7
David S. Miller9bf48522012-08-21 03:58:13 -0700973 xor %g1, %g3, %g3
974 xor %g2, %g7, %g7
David S. Miller0bdcaf72012-08-29 12:50:16 -0700975 MOVXTOD_G3_F4
976 MOVXTOD_G7_F6
David S. Miller9bf48522012-08-21 03:58:13 -0700977 ENCRYPT_192(8, 4, 6, 0, 2)
David S. Miller9bf48522012-08-21 03:58:13 -0700978 std %f4, [%o2 + 0x00]
979 std %f6, [%o2 + 0x08]
David S. Miller03d168a2012-08-30 07:51:32 -070098011: retl
David S. Miller9bf48522012-08-21 03:58:13 -0700981 nop
David S. Miller0bdcaf72012-08-29 12:50:16 -0700982ENDPROC(aes_sparc64_ecb_encrypt_192)
David S. Miller9bf48522012-08-21 03:58:13 -0700983
David S. Miller0bdcaf72012-08-29 12:50:16 -0700984 .align 32
985ENTRY(aes_sparc64_ecb_encrypt_256)
986 /* %o0=key, %o1=input, %o2=output, %o3=len */
987 ldx [%o0 + 0x00], %g1
David S. Miller03d168a2012-08-30 07:51:32 -0700988 subcc %o3, 0x10, %o3
989 be 10f
990 ldx [%o0 + 0x08], %g2
David S. Miller0bdcaf72012-08-29 12:50:16 -07009911: ldx [%o1 + 0x00], %g3
David S. Miller9bf48522012-08-21 03:58:13 -0700992 ldx [%o1 + 0x08], %g7
David S. Miller03d168a2012-08-30 07:51:32 -0700993 ldx [%o1 + 0x10], %o4
994 ldx [%o1 + 0x18], %o5
995 xor %g1, %g3, %g3
996 xor %g2, %g7, %g7
997 MOVXTOD_G3_F4
998 MOVXTOD_G7_F6
999 xor %g1, %o4, %g3
1000 xor %g2, %o5, %g7
1001 MOVXTOD_G3_F0
1002 MOVXTOD_G7_F2
1003 ENCRYPT_256_2(8, 4, 6, 0, 2)
1004 std %f4, [%o2 + 0x00]
1005 std %f6, [%o2 + 0x08]
1006 std %f0, [%o2 + 0x10]
1007 std %f2, [%o2 + 0x18]
1008 sub %o3, 0x20, %o3
1009 add %o1, 0x20, %o1
1010 brgz %o3, 1b
1011 add %o2, 0x20, %o2
1012 brlz,pt %o3, 11f
1013 nop
101410: ldx [%o1 + 0x00], %g3
1015 ldx [%o1 + 0x08], %g7
David S. Miller9bf48522012-08-21 03:58:13 -07001016 xor %g1, %g3, %g3
1017 xor %g2, %g7, %g7
David S. Miller0bdcaf72012-08-29 12:50:16 -07001018 MOVXTOD_G3_F4
1019 MOVXTOD_G7_F6
1020 ENCRYPT_256(8, 4, 6, 0, 2)
David S. Miller9bf48522012-08-21 03:58:13 -07001021 std %f4, [%o2 + 0x00]
1022 std %f6, [%o2 + 0x08]
David S. Miller03d168a2012-08-30 07:51:32 -0700102311: retl
David S. Miller9bf48522012-08-21 03:58:13 -07001024 nop
David S. Miller0bdcaf72012-08-29 12:50:16 -07001025ENDPROC(aes_sparc64_ecb_encrypt_256)
David S. Miller9bf48522012-08-21 03:58:13 -07001026
David S. Miller0bdcaf72012-08-29 12:50:16 -07001027 .align 32
1028ENTRY(aes_sparc64_ecb_decrypt_128)
1029 /* %o0=&key[key_len], %o1=input, %o2=output, %o3=len */
David S. Miller9bf48522012-08-21 03:58:13 -07001030 ldx [%o0 - 0x10], %g1
1031 ldx [%o0 - 0x08], %g2
David S. Miller0bdcaf72012-08-29 12:50:16 -070010321: ldx [%o1 + 0x00], %g3
1033 ldx [%o1 + 0x08], %g7
1034 add %o1, 0x10, %o1
David S. Miller9bf48522012-08-21 03:58:13 -07001035 xor %g1, %g3, %g3
1036 xor %g2, %g7, %g7
1037 MOVXTOD_G3_F4
1038 MOVXTOD_G7_F6
David S. Miller0bdcaf72012-08-29 12:50:16 -07001039 DECRYPT_128(8, 4, 6, 0, 2)
1040 std %f4, [%o2 + 0x00]
1041 std %f6, [%o2 + 0x08]
1042 subcc %o3, 0x10, %o3
David S. Miller9bf48522012-08-21 03:58:13 -07001043 bne,pt %xcc, 1b
David S. Miller0bdcaf72012-08-29 12:50:16 -07001044 add %o2, 0x10, %o2
David S. Miller9bf48522012-08-21 03:58:13 -07001045 retl
1046 nop
David S. Miller0bdcaf72012-08-29 12:50:16 -07001047ENDPROC(aes_sparc64_ecb_decrypt_128)
David S. Miller9bf48522012-08-21 03:58:13 -07001048
David S. Miller0bdcaf72012-08-29 12:50:16 -07001049 .align 32
1050ENTRY(aes_sparc64_ecb_decrypt_192)
1051 /* %o0=&key[key_len], %o1=input, %o2=output, %o3=len */
1052 ldx [%o0 - 0x10], %g1
1053 ldx [%o0 - 0x08], %g2
10541: ldx [%o1 + 0x00], %g3
1055 ldx [%o1 + 0x08], %g7
1056 add %o1, 0x10, %o1
David S. Miller9bf48522012-08-21 03:58:13 -07001057 xor %g1, %g3, %g3
1058 xor %g2, %g7, %g7
1059 MOVXTOD_G3_F4
1060 MOVXTOD_G7_F6
David S. Miller0bdcaf72012-08-29 12:50:16 -07001061 DECRYPT_192(8, 4, 6, 0, 2)
1062 std %f4, [%o2 + 0x00]
1063 std %f6, [%o2 + 0x08]
1064 subcc %o3, 0x10, %o3
1065 bne,pt %xcc, 1b
1066 add %o2, 0x10, %o2
David S. Miller9bf48522012-08-21 03:58:13 -07001067 retl
1068 nop
David S. Miller0bdcaf72012-08-29 12:50:16 -07001069ENDPROC(aes_sparc64_ecb_decrypt_192)
1070
1071 .align 32
1072ENTRY(aes_sparc64_ecb_decrypt_256)
1073 /* %o0=&key[key_len], %o1=input, %o2=output, %o3=len */
1074 ldx [%o0 - 0x10], %g1
1075 ldx [%o0 - 0x08], %g2
10761: ldx [%o1 + 0x00], %g3
1077 ldx [%o1 + 0x08], %g7
1078 add %o1, 0x10, %o1
1079 xor %g1, %g3, %g3
1080 xor %g2, %g7, %g7
1081 MOVXTOD_G3_F4
1082 MOVXTOD_G7_F6
1083 DECRYPT_256(8, 4, 6, 0, 2)
1084 std %f4, [%o2 + 0x00]
1085 std %f6, [%o2 + 0x08]
1086 subcc %o3, 0x10, %o3
1087 bne,pt %xcc, 1b
1088 add %o2, 0x10, %o2
1089 retl
1090 nop
1091ENDPROC(aes_sparc64_ecb_decrypt_256)
1092
1093 .align 32
1094ENTRY(aes_sparc64_cbc_encrypt_128)
1095 /* %o0=key, %o1=input, %o2=output, %o3=len, %o4=IV */
1096 ldd [%o4 + 0x00], %f4
1097 ldd [%o4 + 0x08], %f6
1098 ldx [%o0 + 0x00], %g1
1099 ldx [%o0 + 0x08], %g2
11001: ldx [%o1 + 0x00], %g3
1101 ldx [%o1 + 0x08], %g7
1102 add %o1, 0x10, %o1
1103 xor %g1, %g3, %g3
1104 xor %g2, %g7, %g7
1105 MOVXTOD_G3_F0
1106 MOVXTOD_G7_F2
1107 fxor %f4, %f0, %f4
1108 fxor %f6, %f2, %f6
1109 ENCRYPT_128(8, 4, 6, 0, 2)
1110 std %f4, [%o2 + 0x00]
1111 std %f6, [%o2 + 0x08]
1112 subcc %o3, 0x10, %o3
1113 bne,pt %xcc, 1b
1114 add %o2, 0x10, %o2
1115 std %f4, [%o4 + 0x00]
1116 std %f6, [%o4 + 0x08]
1117 retl
1118 nop
1119ENDPROC(aes_sparc64_cbc_encrypt_128)
1120
1121 .align 32
1122ENTRY(aes_sparc64_cbc_encrypt_192)
1123 /* %o0=key, %o1=input, %o2=output, %o3=len, %o4=IV */
1124 ldd [%o4 + 0x00], %f4
1125 ldd [%o4 + 0x08], %f6
1126 ldx [%o0 + 0x00], %g1
1127 ldx [%o0 + 0x08], %g2
11281: ldx [%o1 + 0x00], %g3
1129 ldx [%o1 + 0x08], %g7
1130 add %o1, 0x10, %o1
1131 xor %g1, %g3, %g3
1132 xor %g2, %g7, %g7
1133 MOVXTOD_G3_F0
1134 MOVXTOD_G7_F2
1135 fxor %f4, %f0, %f4
1136 fxor %f6, %f2, %f6
1137 ENCRYPT_192(8, 4, 6, 0, 2)
1138 std %f4, [%o2 + 0x00]
1139 std %f6, [%o2 + 0x08]
1140 subcc %o3, 0x10, %o3
1141 bne,pt %xcc, 1b
1142 add %o2, 0x10, %o2
1143 std %f4, [%o4 + 0x00]
1144 std %f6, [%o4 + 0x08]
1145 retl
1146 nop
1147ENDPROC(aes_sparc64_cbc_encrypt_192)
1148
1149 .align 32
1150ENTRY(aes_sparc64_cbc_encrypt_256)
1151 /* %o0=key, %o1=input, %o2=output, %o3=len, %o4=IV */
1152 ldd [%o4 + 0x00], %f4
1153 ldd [%o4 + 0x08], %f6
1154 ldx [%o0 + 0x00], %g1
1155 ldx [%o0 + 0x08], %g2
11561: ldx [%o1 + 0x00], %g3
1157 ldx [%o1 + 0x08], %g7
1158 add %o1, 0x10, %o1
1159 xor %g1, %g3, %g3
1160 xor %g2, %g7, %g7
1161 MOVXTOD_G3_F0
1162 MOVXTOD_G7_F2
1163 fxor %f4, %f0, %f4
1164 fxor %f6, %f2, %f6
1165 ENCRYPT_256(8, 4, 6, 0, 2)
1166 std %f4, [%o2 + 0x00]
1167 std %f6, [%o2 + 0x08]
1168 subcc %o3, 0x10, %o3
1169 bne,pt %xcc, 1b
1170 add %o2, 0x10, %o2
1171 std %f4, [%o4 + 0x00]
1172 std %f6, [%o4 + 0x08]
1173 retl
1174 nop
1175ENDPROC(aes_sparc64_cbc_encrypt_256)
1176
1177 .align 32
1178ENTRY(aes_sparc64_cbc_decrypt_128)
1179 /* %o0=&key[key_len], %o1=input, %o2=output, %o3=len, %o4=iv */
1180 ldx [%o0 - 0x10], %g1
1181 ldx [%o0 - 0x08], %g2
1182 ldx [%o4 + 0x00], %o0
1183 ldx [%o4 + 0x08], %o5
11841: ldx [%o1 + 0x00], %g3
1185 ldx [%o1 + 0x08], %g7
1186 add %o1, 0x10, %o1
1187 xor %g1, %g3, %g3
1188 xor %g2, %g7, %g7
1189 MOVXTOD_G3_F4
1190 MOVXTOD_G7_F6
1191 DECRYPT_128(8, 4, 6, 0, 2)
1192 MOVXTOD_O0_F0
1193 MOVXTOD_O5_F2
1194 xor %g1, %g3, %o0
1195 xor %g2, %g7, %o5
1196 fxor %f4, %f0, %f4
1197 fxor %f6, %f2, %f6
1198 std %f4, [%o2 + 0x00]
1199 std %f6, [%o2 + 0x08]
1200 subcc %o3, 0x10, %o3
1201 bne,pt %xcc, 1b
1202 add %o2, 0x10, %o2
1203 stx %o0, [%o4 + 0x00]
1204 stx %o5, [%o4 + 0x08]
1205 retl
1206 nop
1207ENDPROC(aes_sparc64_cbc_decrypt_128)
1208
1209 .align 32
1210ENTRY(aes_sparc64_cbc_decrypt_192)
1211 /* %o0=&key[key_len], %o1=input, %o2=output, %o3=len, %o4=iv */
1212 ldx [%o0 - 0x10], %g1
1213 ldx [%o0 - 0x08], %g2
1214 ldx [%o4 + 0x00], %o0
1215 ldx [%o4 + 0x08], %o5
12161: ldx [%o1 + 0x00], %g3
1217 ldx [%o1 + 0x08], %g7
1218 add %o1, 0x10, %o1
1219 xor %g1, %g3, %g3
1220 xor %g2, %g7, %g7
1221 MOVXTOD_G3_F4
1222 MOVXTOD_G7_F6
1223 DECRYPT_192(8, 4, 6, 0, 2)
1224 MOVXTOD_O0_F0
1225 MOVXTOD_O5_F2
1226 xor %g1, %g3, %o0
1227 xor %g2, %g7, %o5
1228 fxor %f4, %f0, %f4
1229 fxor %f6, %f2, %f6
1230 std %f4, [%o2 + 0x00]
1231 std %f6, [%o2 + 0x08]
1232 subcc %o3, 0x10, %o3
1233 bne,pt %xcc, 1b
1234 add %o2, 0x10, %o2
1235 stx %o0, [%o4 + 0x00]
1236 stx %o5, [%o4 + 0x08]
1237 retl
1238 nop
1239ENDPROC(aes_sparc64_cbc_decrypt_192)
1240
1241 .align 32
1242ENTRY(aes_sparc64_cbc_decrypt_256)
1243 /* %o0=&key[key_len], %o1=input, %o2=output, %o3=len, %o4=iv */
1244 ldx [%o0 - 0x10], %g1
1245 ldx [%o0 - 0x08], %g2
1246 ldx [%o4 + 0x00], %o0
1247 ldx [%o4 + 0x08], %o5
12481: ldx [%o1 + 0x00], %g3
1249 ldx [%o1 + 0x08], %g7
1250 add %o1, 0x10, %o1
1251 xor %g1, %g3, %g3
1252 xor %g2, %g7, %g7
1253 MOVXTOD_G3_F4
1254 MOVXTOD_G7_F6
1255 DECRYPT_256(8, 4, 6, 0, 2)
1256 MOVXTOD_O0_F0
1257 MOVXTOD_O5_F2
1258 xor %g1, %g3, %o0
1259 xor %g2, %g7, %o5
1260 fxor %f4, %f0, %f4
1261 fxor %f6, %f2, %f6
1262 std %f4, [%o2 + 0x00]
1263 std %f6, [%o2 + 0x08]
1264 subcc %o3, 0x10, %o3
1265 bne,pt %xcc, 1b
1266 add %o2, 0x10, %o2
1267 stx %o0, [%o4 + 0x00]
1268 stx %o5, [%o4 + 0x08]
1269 retl
1270 nop
1271ENDPROC(aes_sparc64_cbc_decrypt_256)
David S. Miller9fd130e2012-08-29 14:49:23 -07001272
1273 .align 32
1274ENTRY(aes_sparc64_ctr_crypt_128)
1275 /* %o0=key, %o1=input, %o2=output, %o3=len, %o4=IV */
1276 ldx [%o4 + 0x00], %g3
1277 ldx [%o4 + 0x08], %g7
1278 ldx [%o0 + 0x00], %g1
1279 ldx [%o0 + 0x08], %g2
12801: xor %g1, %g3, %o5
1281 MOVXTOD_O5_F0
1282 xor %g2, %g7, %o5
1283 MOVXTOD_O5_F2
1284 add %g7, 1, %g7
1285 add %g3, 1, %o5
1286 movrz %g7, %o5, %g3
1287 ENCRYPT_128(8, 0, 2, 4, 6)
1288 ldd [%o1 + 0x00], %f4
1289 ldd [%o1 + 0x08], %f6
1290 fxor %f4, %f0, %f4
1291 fxor %f6, %f2, %f6
1292 std %f4, [%o2 + 0x00]
1293 std %f6, [%o2 + 0x08]
1294 subcc %o3, 0x10, %o3
1295 add %o1, 0x10, %o1
1296 bne,pt %xcc, 1b
1297 add %o2, 0x10, %o2
1298 stx %g3, [%o4 + 0x00]
1299 stx %g7, [%o4 + 0x08]
1300 retl
1301 nop
1302ENDPROC(aes_sparc64_ctr_crypt_128)
1303
1304 .align 32
1305ENTRY(aes_sparc64_ctr_crypt_192)
1306 /* %o0=key, %o1=input, %o2=output, %o3=len, %o4=IV */
1307 ldx [%o4 + 0x00], %g3
1308 ldx [%o4 + 0x08], %g7
1309 ldx [%o0 + 0x00], %g1
1310 ldx [%o0 + 0x08], %g2
13111: xor %g1, %g3, %o5
1312 MOVXTOD_O5_F0
1313 xor %g2, %g7, %o5
1314 MOVXTOD_O5_F2
1315 add %g7, 1, %g7
1316 add %g3, 1, %o5
1317 movrz %g7, %o5, %g3
1318 ENCRYPT_192(8, 0, 2, 4, 6)
1319 ldd [%o1 + 0x00], %f4
1320 ldd [%o1 + 0x08], %f6
1321 fxor %f4, %f0, %f4
1322 fxor %f6, %f2, %f6
1323 std %f4, [%o2 + 0x00]
1324 std %f6, [%o2 + 0x08]
1325 subcc %o3, 0x10, %o3
1326 add %o1, 0x10, %o1
1327 bne,pt %xcc, 1b
1328 add %o2, 0x10, %o2
1329 stx %g3, [%o4 + 0x00]
1330 stx %g7, [%o4 + 0x08]
1331 retl
1332 nop
1333ENDPROC(aes_sparc64_ctr_crypt_192)
1334
1335 .align 32
1336ENTRY(aes_sparc64_ctr_crypt_256)
1337 /* %o0=key, %o1=input, %o2=output, %o3=len, %o4=IV */
1338 ldx [%o4 + 0x00], %g3
1339 ldx [%o4 + 0x08], %g7
1340 ldx [%o0 + 0x00], %g1
1341 ldx [%o0 + 0x08], %g2
13421: xor %g1, %g3, %o5
1343 MOVXTOD_O5_F0
1344 xor %g2, %g7, %o5
1345 MOVXTOD_O5_F2
1346 add %g7, 1, %g7
1347 add %g3, 1, %o5
1348 movrz %g7, %o5, %g3
1349 ENCRYPT_256(8, 0, 2, 4, 6)
1350 ldd [%o1 + 0x00], %f4
1351 ldd [%o1 + 0x08], %f6
1352 fxor %f4, %f0, %f4
1353 fxor %f6, %f2, %f6
1354 std %f4, [%o2 + 0x00]
1355 std %f6, [%o2 + 0x08]
1356 subcc %o3, 0x10, %o3
1357 add %o1, 0x10, %o1
1358 bne,pt %xcc, 1b
1359 add %o2, 0x10, %o2
1360 stx %g3, [%o4 + 0x00]
1361 stx %g7, [%o4 + 0x08]
1362 retl
1363 nop
1364ENDPROC(aes_sparc64_ctr_crypt_256)