blob: 0fadad0c60ad37c31c6bf12bd6cca9023b850bc0 [file] [log] [blame]
David S. Miller9bf48522012-08-21 03:58:13 -07001#include <linux/linkage.h>
2#include <asm/visasm.h>
3
4#define F3F(x,y,z) (((x)<<30)|((y)<<19)|((z)<<5))
5
6#define FPD_ENCODE(x) (((x) >> 5) | ((x) & ~(0x20)))
7
8#define RS1(x) (FPD_ENCODE(x) << 14)
9#define RS2(x) (FPD_ENCODE(x) << 0)
10#define RS3(x) (FPD_ENCODE(x) << 9)
11#define RD(x) (FPD_ENCODE(x) << 25)
12#define IMM5(x) ((x) << 9)
13
14#define AES_EROUND01(a,b,c,d) \
15 .word (F3F(2, 0x19, 0)|RS1(a)|RS2(b)|RS3(c)|RD(d));
16#define AES_EROUND23(a,b,c,d) \
17 .word (F3F(2, 0x19, 1)|RS1(a)|RS2(b)|RS3(c)|RD(d));
18#define AES_DROUND01(a,b,c,d) \
19 .word (F3F(2, 0x19, 2)|RS1(a)|RS2(b)|RS3(c)|RD(d));
20#define AES_DROUND23(a,b,c,d) \
21 .word (F3F(2, 0x19, 3)|RS1(a)|RS2(b)|RS3(c)|RD(d));
22#define AES_EROUND01_L(a,b,c,d) \
23 .word (F3F(2, 0x19, 4)|RS1(a)|RS2(b)|RS3(c)|RD(d));
24#define AES_EROUND23_L(a,b,c,d) \
25 .word (F3F(2, 0x19, 5)|RS1(a)|RS2(b)|RS3(c)|RD(d));
26#define AES_DROUND01_L(a,b,c,d) \
27 .word (F3F(2, 0x19, 6)|RS1(a)|RS2(b)|RS3(c)|RD(d));
28#define AES_DROUND23_L(a,b,c,d) \
29 .word (F3F(2, 0x19, 7)|RS1(a)|RS2(b)|RS3(c)|RD(d));
30#define AES_KEXPAND1(a,b,c,d) \
31 .word (F3F(2, 0x19, 8)|RS1(a)|RS2(b)|IMM5(c)|RD(d));
32#define AES_KEXPAND0(a,b,c) \
33 .word (F3F(2, 0x36, 0x130)|RS1(a)|RS2(b)|RD(c));
34#define AES_KEXPAND2(a,b,c) \
35 .word (F3F(2, 0x36, 0x131)|RS1(a)|RS2(b)|RD(c));
36
37#define MOVXTOD_G3_F4 \
38 .word 0x89b02303;
39#define MOVXTOD_G7_F6 \
40 .word 0x8db02307;
41#define MOVXTOD_G3_F0 \
42 .word 0x81b02303;
43#define MOVXTOD_G7_F2 \
44 .word 0x85b02307;
45#define MOVXTOD_O0_F0 \
46 .word 0x81b02308;
David S. Miller9fd130e2012-08-29 14:49:23 -070047#define MOVXTOD_O5_F0 \
48 .word 0x81b0230d;
David S. Miller0bdcaf72012-08-29 12:50:16 -070049#define MOVXTOD_O5_F2 \
50 .word 0x85b0230d;
David S. Miller4e71bb42012-08-30 08:40:44 -070051#define MOVXTOD_O5_F4 \
52 .word 0x89b0230d;
53#define MOVXTOD_O5_F6 \
54 .word 0x8db0230d;
David S. Miller03d168a2012-08-30 07:51:32 -070055#define MOVXTOD_G3_F60 \
56 .word 0xbbb02303;
57#define MOVXTOD_G7_F62 \
58 .word 0xbfb02307;
David S. Miller9bf48522012-08-21 03:58:13 -070059
60#define ENCRYPT_TWO_ROUNDS(KEY_BASE, I0, I1, T0, T1) \
61 AES_EROUND01(KEY_BASE + 0, I0, I1, T0) \
62 AES_EROUND23(KEY_BASE + 2, I0, I1, T1) \
63 AES_EROUND01(KEY_BASE + 4, T0, T1, I0) \
64 AES_EROUND23(KEY_BASE + 6, T0, T1, I1)
65
David S. Miller03d168a2012-08-30 07:51:32 -070066#define ENCRYPT_TWO_ROUNDS_2(KEY_BASE, I0, I1, I2, I3, T0, T1, T2, T3) \
67 AES_EROUND01(KEY_BASE + 0, I0, I1, T0) \
68 AES_EROUND23(KEY_BASE + 2, I0, I1, T1) \
69 AES_EROUND01(KEY_BASE + 0, I2, I3, T2) \
70 AES_EROUND23(KEY_BASE + 2, I2, I3, T3) \
71 AES_EROUND01(KEY_BASE + 4, T0, T1, I0) \
72 AES_EROUND23(KEY_BASE + 6, T0, T1, I1) \
73 AES_EROUND01(KEY_BASE + 4, T2, T3, I2) \
74 AES_EROUND23(KEY_BASE + 6, T2, T3, I3)
75
David S. Miller9bf48522012-08-21 03:58:13 -070076#define ENCRYPT_TWO_ROUNDS_LAST(KEY_BASE, I0, I1, T0, T1) \
77 AES_EROUND01(KEY_BASE + 0, I0, I1, T0) \
78 AES_EROUND23(KEY_BASE + 2, I0, I1, T1) \
79 AES_EROUND01_L(KEY_BASE + 4, T0, T1, I0) \
80 AES_EROUND23_L(KEY_BASE + 6, T0, T1, I1)
81
David S. Miller03d168a2012-08-30 07:51:32 -070082#define ENCRYPT_TWO_ROUNDS_LAST_2(KEY_BASE, I0, I1, I2, I3, T0, T1, T2, T3) \
83 AES_EROUND01(KEY_BASE + 0, I0, I1, T0) \
84 AES_EROUND23(KEY_BASE + 2, I0, I1, T1) \
85 AES_EROUND01(KEY_BASE + 0, I2, I3, T2) \
86 AES_EROUND23(KEY_BASE + 2, I2, I3, T3) \
87 AES_EROUND01_L(KEY_BASE + 4, T0, T1, I0) \
88 AES_EROUND23_L(KEY_BASE + 6, T0, T1, I1) \
89 AES_EROUND01_L(KEY_BASE + 4, T2, T3, I2) \
90 AES_EROUND23_L(KEY_BASE + 6, T2, T3, I3)
91
David S. Miller9bf48522012-08-21 03:58:13 -070092 /* 10 rounds */
93#define ENCRYPT_128(KEY_BASE, I0, I1, T0, T1) \
94 ENCRYPT_TWO_ROUNDS(KEY_BASE + 0, I0, I1, T0, T1) \
95 ENCRYPT_TWO_ROUNDS(KEY_BASE + 8, I0, I1, T0, T1) \
96 ENCRYPT_TWO_ROUNDS(KEY_BASE + 16, I0, I1, T0, T1) \
97 ENCRYPT_TWO_ROUNDS(KEY_BASE + 24, I0, I1, T0, T1) \
98 ENCRYPT_TWO_ROUNDS_LAST(KEY_BASE + 32, I0, I1, T0, T1)
99
David S. Miller03d168a2012-08-30 07:51:32 -0700100#define ENCRYPT_128_2(KEY_BASE, I0, I1, I2, I3, T0, T1, T2, T3) \
101 ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 0, I0, I1, I2, I3, T0, T1, T2, T3) \
102 ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 8, I0, I1, I2, I3, T0, T1, T2, T3) \
103 ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 16, I0, I1, I2, I3, T0, T1, T2, T3) \
104 ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 24, I0, I1, I2, I3, T0, T1, T2, T3) \
105 ENCRYPT_TWO_ROUNDS_LAST_2(KEY_BASE + 32, I0, I1, I2, I3, T0, T1, T2, T3)
106
David S. Miller9bf48522012-08-21 03:58:13 -0700107 /* 12 rounds */
108#define ENCRYPT_192(KEY_BASE, I0, I1, T0, T1) \
109 ENCRYPT_TWO_ROUNDS(KEY_BASE + 0, I0, I1, T0, T1) \
110 ENCRYPT_TWO_ROUNDS(KEY_BASE + 8, I0, I1, T0, T1) \
111 ENCRYPT_TWO_ROUNDS(KEY_BASE + 16, I0, I1, T0, T1) \
112 ENCRYPT_TWO_ROUNDS(KEY_BASE + 24, I0, I1, T0, T1) \
113 ENCRYPT_TWO_ROUNDS(KEY_BASE + 32, I0, I1, T0, T1) \
114 ENCRYPT_TWO_ROUNDS_LAST(KEY_BASE + 40, I0, I1, T0, T1)
115
David S. Miller03d168a2012-08-30 07:51:32 -0700116#define ENCRYPT_192_2(KEY_BASE, I0, I1, I2, I3, T0, T1, T2, T3) \
117 ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 0, I0, I1, I2, I3, T0, T1, T2, T3) \
118 ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 8, I0, I1, I2, I3, T0, T1, T2, T3) \
119 ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 16, I0, I1, I2, I3, T0, T1, T2, T3) \
120 ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 24, I0, I1, I2, I3, T0, T1, T2, T3) \
121 ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 32, I0, I1, I2, I3, T0, T1, T2, T3) \
122 ENCRYPT_TWO_ROUNDS_LAST_2(KEY_BASE + 40, I0, I1, I2, I3, T0, T1, T2, T3)
123
David S. Miller9bf48522012-08-21 03:58:13 -0700124 /* 14 rounds */
125#define ENCRYPT_256(KEY_BASE, I0, I1, T0, T1) \
126 ENCRYPT_TWO_ROUNDS(KEY_BASE + 0, I0, I1, T0, T1) \
127 ENCRYPT_TWO_ROUNDS(KEY_BASE + 8, I0, I1, T0, T1) \
128 ENCRYPT_TWO_ROUNDS(KEY_BASE + 16, I0, I1, T0, T1) \
129 ENCRYPT_TWO_ROUNDS(KEY_BASE + 24, I0, I1, T0, T1) \
130 ENCRYPT_TWO_ROUNDS(KEY_BASE + 32, I0, I1, T0, T1) \
131 ENCRYPT_TWO_ROUNDS(KEY_BASE + 40, I0, I1, T0, T1) \
132 ENCRYPT_TWO_ROUNDS_LAST(KEY_BASE + 48, I0, I1, T0, T1)
133
David S. Miller03d168a2012-08-30 07:51:32 -0700134#define ENCRYPT_256_TWO_ROUNDS_2(KEY_BASE, I0, I1, I2, I3, TMP_BASE) \
135 ENCRYPT_TWO_ROUNDS_2(KEY_BASE, I0, I1, I2, I3, \
136 TMP_BASE + 0, TMP_BASE + 2, TMP_BASE + 4, TMP_BASE + 6)
137
138#define ENCRYPT_256_2(KEY_BASE, I0, I1, I2, I3) \
139 ENCRYPT_256_TWO_ROUNDS_2(KEY_BASE + 0, I0, I1, I2, I3, KEY_BASE + 48) \
140 ldd [%o0 + 0xd0], %f56; \
141 ldd [%o0 + 0xd8], %f58; \
142 ENCRYPT_256_TWO_ROUNDS_2(KEY_BASE + 8, I0, I1, I2, I3, KEY_BASE + 0) \
143 ldd [%o0 + 0xe0], %f60; \
144 ldd [%o0 + 0xe8], %f62; \
145 ENCRYPT_256_TWO_ROUNDS_2(KEY_BASE + 16, I0, I1, I2, I3, KEY_BASE + 0) \
146 ENCRYPT_256_TWO_ROUNDS_2(KEY_BASE + 24, I0, I1, I2, I3, KEY_BASE + 0) \
147 ENCRYPT_256_TWO_ROUNDS_2(KEY_BASE + 32, I0, I1, I2, I3, KEY_BASE + 0) \
148 ENCRYPT_256_TWO_ROUNDS_2(KEY_BASE + 40, I0, I1, I2, I3, KEY_BASE + 0) \
149 AES_EROUND01(KEY_BASE + 48, I0, I1, KEY_BASE + 0) \
150 AES_EROUND23(KEY_BASE + 50, I0, I1, KEY_BASE + 2) \
151 AES_EROUND01(KEY_BASE + 48, I2, I3, KEY_BASE + 4) \
152 AES_EROUND23(KEY_BASE + 50, I2, I3, KEY_BASE + 6) \
153 AES_EROUND01_L(KEY_BASE + 52, KEY_BASE + 0, KEY_BASE + 2, I0) \
154 AES_EROUND23_L(KEY_BASE + 54, KEY_BASE + 0, KEY_BASE + 2, I1) \
155 ldd [%o0 + 0x10], %f8; \
156 ldd [%o0 + 0x18], %f10; \
157 AES_EROUND01_L(KEY_BASE + 52, KEY_BASE + 4, KEY_BASE + 6, I2) \
158 AES_EROUND23_L(KEY_BASE + 54, KEY_BASE + 4, KEY_BASE + 6, I3) \
159 ldd [%o0 + 0x20], %f12; \
160 ldd [%o0 + 0x28], %f14;
161
David S. Miller0bdcaf72012-08-29 12:50:16 -0700162#define DECRYPT_TWO_ROUNDS(KEY_BASE, I0, I1, T0, T1) \
163 AES_DROUND23(KEY_BASE + 0, I0, I1, T1) \
164 AES_DROUND01(KEY_BASE + 2, I0, I1, T0) \
165 AES_DROUND23(KEY_BASE + 4, T0, T1, I1) \
166 AES_DROUND01(KEY_BASE + 6, T0, T1, I0)
David S. Miller9bf48522012-08-21 03:58:13 -0700167
David S. Miller30101312012-08-30 08:11:01 -0700168#define DECRYPT_TWO_ROUNDS_2(KEY_BASE, I0, I1, I2, I3, T0, T1, T2, T3) \
169 AES_DROUND23(KEY_BASE + 0, I0, I1, T1) \
170 AES_DROUND01(KEY_BASE + 2, I0, I1, T0) \
171 AES_DROUND23(KEY_BASE + 0, I2, I3, T3) \
172 AES_DROUND01(KEY_BASE + 2, I2, I3, T2) \
173 AES_DROUND23(KEY_BASE + 4, T0, T1, I1) \
174 AES_DROUND01(KEY_BASE + 6, T0, T1, I0) \
175 AES_DROUND23(KEY_BASE + 4, T2, T3, I3) \
176 AES_DROUND01(KEY_BASE + 6, T2, T3, I2)
177
David S. Miller0bdcaf72012-08-29 12:50:16 -0700178#define DECRYPT_TWO_ROUNDS_LAST(KEY_BASE, I0, I1, T0, T1) \
179 AES_DROUND23(KEY_BASE + 0, I0, I1, T1) \
180 AES_DROUND01(KEY_BASE + 2, I0, I1, T0) \
181 AES_DROUND23_L(KEY_BASE + 4, T0, T1, I1) \
182 AES_DROUND01_L(KEY_BASE + 6, T0, T1, I0)
David S. Miller9bf48522012-08-21 03:58:13 -0700183
David S. Miller30101312012-08-30 08:11:01 -0700184#define DECRYPT_TWO_ROUNDS_LAST_2(KEY_BASE, I0, I1, I2, I3, T0, T1, T2, T3) \
185 AES_DROUND23(KEY_BASE + 0, I0, I1, T1) \
186 AES_DROUND01(KEY_BASE + 2, I0, I1, T0) \
187 AES_DROUND23(KEY_BASE + 0, I2, I3, T3) \
188 AES_DROUND01(KEY_BASE + 2, I2, I3, T2) \
189 AES_DROUND23_L(KEY_BASE + 4, T0, T1, I1) \
190 AES_DROUND01_L(KEY_BASE + 6, T0, T1, I0) \
191 AES_DROUND23_L(KEY_BASE + 4, T2, T3, I3) \
192 AES_DROUND01_L(KEY_BASE + 6, T2, T3, I2)
193
David S. Miller9bf48522012-08-21 03:58:13 -0700194 /* 10 rounds */
David S. Miller0bdcaf72012-08-29 12:50:16 -0700195#define DECRYPT_128(KEY_BASE, I0, I1, T0, T1) \
196 DECRYPT_TWO_ROUNDS(KEY_BASE + 0, I0, I1, T0, T1) \
197 DECRYPT_TWO_ROUNDS(KEY_BASE + 8, I0, I1, T0, T1) \
198 DECRYPT_TWO_ROUNDS(KEY_BASE + 16, I0, I1, T0, T1) \
199 DECRYPT_TWO_ROUNDS(KEY_BASE + 24, I0, I1, T0, T1) \
200 DECRYPT_TWO_ROUNDS_LAST(KEY_BASE + 32, I0, I1, T0, T1)
David S. Miller9bf48522012-08-21 03:58:13 -0700201
David S. Miller30101312012-08-30 08:11:01 -0700202#define DECRYPT_128_2(KEY_BASE, I0, I1, I2, I3, T0, T1, T2, T3) \
203 DECRYPT_TWO_ROUNDS_2(KEY_BASE + 0, I0, I1, I2, I3, T0, T1, T2, T3) \
204 DECRYPT_TWO_ROUNDS_2(KEY_BASE + 8, I0, I1, I2, I3, T0, T1, T2, T3) \
205 DECRYPT_TWO_ROUNDS_2(KEY_BASE + 16, I0, I1, I2, I3, T0, T1, T2, T3) \
206 DECRYPT_TWO_ROUNDS_2(KEY_BASE + 24, I0, I1, I2, I3, T0, T1, T2, T3) \
207 DECRYPT_TWO_ROUNDS_LAST_2(KEY_BASE + 32, I0, I1, I2, I3, T0, T1, T2, T3)
208
David S. Miller9bf48522012-08-21 03:58:13 -0700209 /* 12 rounds */
David S. Miller0bdcaf72012-08-29 12:50:16 -0700210#define DECRYPT_192(KEY_BASE, I0, I1, T0, T1) \
211 DECRYPT_TWO_ROUNDS(KEY_BASE + 0, I0, I1, T0, T1) \
212 DECRYPT_TWO_ROUNDS(KEY_BASE + 8, I0, I1, T0, T1) \
213 DECRYPT_TWO_ROUNDS(KEY_BASE + 16, I0, I1, T0, T1) \
214 DECRYPT_TWO_ROUNDS(KEY_BASE + 24, I0, I1, T0, T1) \
215 DECRYPT_TWO_ROUNDS(KEY_BASE + 32, I0, I1, T0, T1) \
216 DECRYPT_TWO_ROUNDS_LAST(KEY_BASE + 40, I0, I1, T0, T1)
David S. Miller9bf48522012-08-21 03:58:13 -0700217
David S. Miller30101312012-08-30 08:11:01 -0700218#define DECRYPT_192_2(KEY_BASE, I0, I1, I2, I3, T0, T1, T2, T3) \
219 DECRYPT_TWO_ROUNDS_2(KEY_BASE + 0, I0, I1, I2, I3, T0, T1, T2, T3) \
220 DECRYPT_TWO_ROUNDS_2(KEY_BASE + 8, I0, I1, I2, I3, T0, T1, T2, T3) \
221 DECRYPT_TWO_ROUNDS_2(KEY_BASE + 16, I0, I1, I2, I3, T0, T1, T2, T3) \
222 DECRYPT_TWO_ROUNDS_2(KEY_BASE + 24, I0, I1, I2, I3, T0, T1, T2, T3) \
223 DECRYPT_TWO_ROUNDS_2(KEY_BASE + 32, I0, I1, I2, I3, T0, T1, T2, T3) \
224 DECRYPT_TWO_ROUNDS_LAST_2(KEY_BASE + 40, I0, I1, I2, I3, T0, T1, T2, T3)
225
David S. Miller9bf48522012-08-21 03:58:13 -0700226 /* 14 rounds */
David S. Miller0bdcaf72012-08-29 12:50:16 -0700227#define DECRYPT_256(KEY_BASE, I0, I1, T0, T1) \
228 DECRYPT_TWO_ROUNDS(KEY_BASE + 0, I0, I1, T0, T1) \
229 DECRYPT_TWO_ROUNDS(KEY_BASE + 8, I0, I1, T0, T1) \
230 DECRYPT_TWO_ROUNDS(KEY_BASE + 16, I0, I1, T0, T1) \
231 DECRYPT_TWO_ROUNDS(KEY_BASE + 24, I0, I1, T0, T1) \
232 DECRYPT_TWO_ROUNDS(KEY_BASE + 32, I0, I1, T0, T1) \
233 DECRYPT_TWO_ROUNDS(KEY_BASE + 40, I0, I1, T0, T1) \
234 DECRYPT_TWO_ROUNDS_LAST(KEY_BASE + 48, I0, I1, T0, T1)
David S. Miller9bf48522012-08-21 03:58:13 -0700235
David S. Miller30101312012-08-30 08:11:01 -0700236#define DECRYPT_256_TWO_ROUNDS_2(KEY_BASE, I0, I1, I2, I3, TMP_BASE) \
237 DECRYPT_TWO_ROUNDS_2(KEY_BASE, I0, I1, I2, I3, \
238 TMP_BASE + 0, TMP_BASE + 2, TMP_BASE + 4, TMP_BASE + 6)
239
240#define DECRYPT_256_2(KEY_BASE, I0, I1, I2, I3) \
241 DECRYPT_256_TWO_ROUNDS_2(KEY_BASE + 0, I0, I1, I2, I3, KEY_BASE + 48) \
242 ldd [%o0 + 0x18], %f56; \
243 ldd [%o0 + 0x10], %f58; \
244 DECRYPT_256_TWO_ROUNDS_2(KEY_BASE + 8, I0, I1, I2, I3, KEY_BASE + 0) \
245 DECRYPT_256_TWO_ROUNDS_2(KEY_BASE + 16, I0, I1, I2, I3, KEY_BASE + 0) \
246 DECRYPT_256_TWO_ROUNDS_2(KEY_BASE + 24, I0, I1, I2, I3, KEY_BASE + 0) \
247 DECRYPT_256_TWO_ROUNDS_2(KEY_BASE + 32, I0, I1, I2, I3, KEY_BASE + 0) \
248 DECRYPT_256_TWO_ROUNDS_2(KEY_BASE + 40, I0, I1, I2, I3, KEY_BASE + 0) \
249 AES_DROUND23(KEY_BASE + 48, I0, I1, KEY_BASE + 2) \
250 AES_DROUND01(KEY_BASE + 50, I0, I1, KEY_BASE + 0) \
251 AES_DROUND23(KEY_BASE + 48, I2, I3, KEY_BASE + 6) \
252 AES_DROUND01(KEY_BASE + 50, I2, I3, KEY_BASE + 4) \
253 AES_DROUND23_L(KEY_BASE + 52, KEY_BASE + 0, KEY_BASE + 2, I1) \
254 AES_DROUND01_L(KEY_BASE + 54, KEY_BASE + 0, KEY_BASE + 2, I0) \
255 ldd [%o0 + 0xd8], %f8; \
256 ldd [%o0 + 0xd0], %f10; \
257 AES_DROUND23_L(KEY_BASE + 52, KEY_BASE + 4, KEY_BASE + 6, I3) \
258 AES_DROUND01_L(KEY_BASE + 54, KEY_BASE + 4, KEY_BASE + 6, I2)
259 ldd [%o0 + 0xc8], %f12; \
260 ldd [%o0 + 0xc0], %f14;
261
David S. Miller0bdcaf72012-08-29 12:50:16 -0700262 .align 32
David S. Miller9bf48522012-08-21 03:58:13 -0700263ENTRY(aes_sparc64_key_expand)
264 /* %o0=input_key, %o1=output_key, %o2=key_len */
265 VISEntry
266 ld [%o0 + 0x00], %f0
267 ld [%o0 + 0x04], %f1
268 ld [%o0 + 0x08], %f2
269 ld [%o0 + 0x0c], %f3
270
271 std %f0, [%o1 + 0x00]
272 std %f2, [%o1 + 0x08]
273 add %o1, 0x10, %o1
274
275 cmp %o2, 24
276 bl 2f
277 nop
278
279 be 1f
280 nop
281
282 /* 256-bit key expansion */
283 ld [%o0 + 0x10], %f4
284 ld [%o0 + 0x14], %f5
285 ld [%o0 + 0x18], %f6
286 ld [%o0 + 0x1c], %f7
287
288 std %f4, [%o1 + 0x00]
289 std %f6, [%o1 + 0x08]
290 add %o1, 0x10, %o1
291
292 AES_KEXPAND1(0, 6, 0x0, 8)
293 AES_KEXPAND2(2, 8, 10)
294 AES_KEXPAND0(4, 10, 12)
295 AES_KEXPAND2(6, 12, 14)
296 AES_KEXPAND1(8, 14, 0x1, 16)
297 AES_KEXPAND2(10, 16, 18)
298 AES_KEXPAND0(12, 18, 20)
299 AES_KEXPAND2(14, 20, 22)
300 AES_KEXPAND1(16, 22, 0x2, 24)
301 AES_KEXPAND2(18, 24, 26)
302 AES_KEXPAND0(20, 26, 28)
303 AES_KEXPAND2(22, 28, 30)
304 AES_KEXPAND1(24, 30, 0x3, 32)
305 AES_KEXPAND2(26, 32, 34)
306 AES_KEXPAND0(28, 34, 36)
307 AES_KEXPAND2(30, 36, 38)
308 AES_KEXPAND1(32, 38, 0x4, 40)
309 AES_KEXPAND2(34, 40, 42)
310 AES_KEXPAND0(36, 42, 44)
311 AES_KEXPAND2(38, 44, 46)
312 AES_KEXPAND1(40, 46, 0x5, 48)
313 AES_KEXPAND2(42, 48, 50)
314 AES_KEXPAND0(44, 50, 52)
315 AES_KEXPAND2(46, 52, 54)
316 AES_KEXPAND1(48, 54, 0x6, 56)
317 AES_KEXPAND2(50, 56, 58)
318
319 std %f8, [%o1 + 0x00]
320 std %f10, [%o1 + 0x08]
321 std %f12, [%o1 + 0x10]
322 std %f14, [%o1 + 0x18]
323 std %f16, [%o1 + 0x20]
324 std %f18, [%o1 + 0x28]
325 std %f20, [%o1 + 0x30]
326 std %f22, [%o1 + 0x38]
327 std %f24, [%o1 + 0x40]
328 std %f26, [%o1 + 0x48]
329 std %f28, [%o1 + 0x50]
330 std %f30, [%o1 + 0x58]
331 std %f32, [%o1 + 0x60]
332 std %f34, [%o1 + 0x68]
333 std %f36, [%o1 + 0x70]
334 std %f38, [%o1 + 0x78]
335 std %f40, [%o1 + 0x80]
336 std %f42, [%o1 + 0x88]
337 std %f44, [%o1 + 0x90]
338 std %f46, [%o1 + 0x98]
339 std %f48, [%o1 + 0xa0]
340 std %f50, [%o1 + 0xa8]
341 std %f52, [%o1 + 0xb0]
342 std %f54, [%o1 + 0xb8]
343 std %f56, [%o1 + 0xc0]
344 ba,pt %xcc, 80f
345 std %f58, [%o1 + 0xc8]
346
3471:
348 /* 192-bit key expansion */
349 ld [%o0 + 0x10], %f4
350 ld [%o0 + 0x14], %f5
351
352 std %f4, [%o1 + 0x00]
353 add %o1, 0x08, %o1
354
355 AES_KEXPAND1(0, 4, 0x0, 6)
356 AES_KEXPAND2(2, 6, 8)
357 AES_KEXPAND2(4, 8, 10)
358 AES_KEXPAND1(6, 10, 0x1, 12)
359 AES_KEXPAND2(8, 12, 14)
360 AES_KEXPAND2(10, 14, 16)
361 AES_KEXPAND1(12, 16, 0x2, 18)
362 AES_KEXPAND2(14, 18, 20)
363 AES_KEXPAND2(16, 20, 22)
364 AES_KEXPAND1(18, 22, 0x3, 24)
365 AES_KEXPAND2(20, 24, 26)
366 AES_KEXPAND2(22, 26, 28)
367 AES_KEXPAND1(24, 28, 0x4, 30)
368 AES_KEXPAND2(26, 30, 32)
369 AES_KEXPAND2(28, 32, 34)
370 AES_KEXPAND1(30, 34, 0x5, 36)
371 AES_KEXPAND2(32, 36, 38)
372 AES_KEXPAND2(34, 38, 40)
373 AES_KEXPAND1(36, 40, 0x6, 42)
374 AES_KEXPAND2(38, 42, 44)
375 AES_KEXPAND2(40, 44, 46)
376 AES_KEXPAND1(42, 46, 0x7, 48)
377 AES_KEXPAND2(44, 48, 50)
378
379 std %f6, [%o1 + 0x00]
380 std %f8, [%o1 + 0x08]
381 std %f10, [%o1 + 0x10]
382 std %f12, [%o1 + 0x18]
383 std %f14, [%o1 + 0x20]
384 std %f16, [%o1 + 0x28]
385 std %f18, [%o1 + 0x30]
386 std %f20, [%o1 + 0x38]
387 std %f22, [%o1 + 0x40]
388 std %f24, [%o1 + 0x48]
389 std %f26, [%o1 + 0x50]
390 std %f28, [%o1 + 0x58]
391 std %f30, [%o1 + 0x60]
392 std %f32, [%o1 + 0x68]
393 std %f34, [%o1 + 0x70]
394 std %f36, [%o1 + 0x78]
395 std %f38, [%o1 + 0x80]
396 std %f40, [%o1 + 0x88]
397 std %f42, [%o1 + 0x90]
398 std %f44, [%o1 + 0x98]
399 std %f46, [%o1 + 0xa0]
400 std %f48, [%o1 + 0xa8]
401 ba,pt %xcc, 80f
402 std %f50, [%o1 + 0xb0]
403
4042:
405 /* 128-bit key expansion */
406 AES_KEXPAND1(0, 2, 0x0, 4)
407 AES_KEXPAND2(2, 4, 6)
408 AES_KEXPAND1(4, 6, 0x1, 8)
409 AES_KEXPAND2(6, 8, 10)
410 AES_KEXPAND1(8, 10, 0x2, 12)
411 AES_KEXPAND2(10, 12, 14)
412 AES_KEXPAND1(12, 14, 0x3, 16)
413 AES_KEXPAND2(14, 16, 18)
414 AES_KEXPAND1(16, 18, 0x4, 20)
415 AES_KEXPAND2(18, 20, 22)
416 AES_KEXPAND1(20, 22, 0x5, 24)
417 AES_KEXPAND2(22, 24, 26)
418 AES_KEXPAND1(24, 26, 0x6, 28)
419 AES_KEXPAND2(26, 28, 30)
420 AES_KEXPAND1(28, 30, 0x7, 32)
421 AES_KEXPAND2(30, 32, 34)
422 AES_KEXPAND1(32, 34, 0x8, 36)
423 AES_KEXPAND2(34, 36, 38)
424 AES_KEXPAND1(36, 38, 0x9, 40)
425 AES_KEXPAND2(38, 40, 42)
426
427 std %f4, [%o1 + 0x00]
428 std %f6, [%o1 + 0x08]
429 std %f8, [%o1 + 0x10]
430 std %f10, [%o1 + 0x18]
431 std %f12, [%o1 + 0x20]
432 std %f14, [%o1 + 0x28]
433 std %f16, [%o1 + 0x30]
434 std %f18, [%o1 + 0x38]
435 std %f20, [%o1 + 0x40]
436 std %f22, [%o1 + 0x48]
437 std %f24, [%o1 + 0x50]
438 std %f26, [%o1 + 0x58]
439 std %f28, [%o1 + 0x60]
440 std %f30, [%o1 + 0x68]
441 std %f32, [%o1 + 0x70]
442 std %f34, [%o1 + 0x78]
443 std %f36, [%o1 + 0x80]
444 std %f38, [%o1 + 0x88]
445 std %f40, [%o1 + 0x90]
446 std %f42, [%o1 + 0x98]
44780:
448 retl
449 VISExit
450ENDPROC(aes_sparc64_key_expand)
451
David S. Miller0bdcaf72012-08-29 12:50:16 -0700452 .align 32
453ENTRY(aes_sparc64_encrypt_128)
454 /* %o0=key, %o1=input, %o2=output */
455 VISEntry
456 ld [%o1 + 0x00], %f4
457 ld [%o1 + 0x04], %f5
458 ld [%o1 + 0x08], %f6
459 ld [%o1 + 0x0c], %f7
460 ldd [%o0 + 0x00], %f8
461 ldd [%o0 + 0x08], %f10
462 ldd [%o0 + 0x10], %f12
463 ldd [%o0 + 0x18], %f14
464 ldd [%o0 + 0x20], %f16
465 ldd [%o0 + 0x28], %f18
466 ldd [%o0 + 0x30], %f20
467 ldd [%o0 + 0x38], %f22
468 ldd [%o0 + 0x40], %f24
469 ldd [%o0 + 0x48], %f26
470 ldd [%o0 + 0x50], %f28
471 ldd [%o0 + 0x58], %f30
472 ldd [%o0 + 0x60], %f32
473 ldd [%o0 + 0x68], %f34
474 ldd [%o0 + 0x70], %f36
475 ldd [%o0 + 0x78], %f38
476 ldd [%o0 + 0x80], %f40
477 ldd [%o0 + 0x88], %f42
478 ldd [%o0 + 0x90], %f44
479 ldd [%o0 + 0x98], %f46
480 ldd [%o0 + 0xa0], %f48
481 ldd [%o0 + 0xa8], %f50
482 fxor %f8, %f4, %f4
483 fxor %f10, %f6, %f6
484 ENCRYPT_128(12, 4, 6, 0, 2)
485 st %f4, [%o2 + 0x00]
486 st %f5, [%o2 + 0x04]
487 st %f6, [%o2 + 0x08]
488 st %f7, [%o2 + 0x0c]
489 retl
490 VISExit
491ENDPROC(aes_sparc64_encrypt_128)
492
493 .align 32
494ENTRY(aes_sparc64_encrypt_192)
495 /* %o0=key, %o1=input, %o2=output */
David S. Miller9bf48522012-08-21 03:58:13 -0700496 VISEntry
497 ld [%o1 + 0x00], %f4
498 ld [%o1 + 0x04], %f5
499 ld [%o1 + 0x08], %f6
500 ld [%o1 + 0x0c], %f7
501
502 ldd [%o0 + 0x00], %f8
503 ldd [%o0 + 0x08], %f10
David S. Miller0bdcaf72012-08-29 12:50:16 -0700504
David S. Miller9bf48522012-08-21 03:58:13 -0700505 fxor %f8, %f4, %f4
David S. Miller0bdcaf72012-08-29 12:50:16 -0700506 fxor %f10, %f6, %f6
David S. Miller9bf48522012-08-21 03:58:13 -0700507
508 ldd [%o0 + 0x10], %f8
David S. Miller9bf48522012-08-21 03:58:13 -0700509 ldd [%o0 + 0x18], %f10
510 ldd [%o0 + 0x20], %f12
511 ldd [%o0 + 0x28], %f14
512 add %o0, 0x20, %o0
513
514 ENCRYPT_TWO_ROUNDS(8, 4, 6, 0, 2)
515
David S. Miller9bf48522012-08-21 03:58:13 -0700516 ldd [%o0 + 0x10], %f12
517 ldd [%o0 + 0x18], %f14
518 ldd [%o0 + 0x20], %f16
519 ldd [%o0 + 0x28], %f18
520 ldd [%o0 + 0x30], %f20
521 ldd [%o0 + 0x38], %f22
522 ldd [%o0 + 0x40], %f24
523 ldd [%o0 + 0x48], %f26
524 ldd [%o0 + 0x50], %f28
525 ldd [%o0 + 0x58], %f30
526 ldd [%o0 + 0x60], %f32
527 ldd [%o0 + 0x68], %f34
528 ldd [%o0 + 0x70], %f36
529 ldd [%o0 + 0x78], %f38
530 ldd [%o0 + 0x80], %f40
531 ldd [%o0 + 0x88], %f42
532 ldd [%o0 + 0x90], %f44
533 ldd [%o0 + 0x98], %f46
534 ldd [%o0 + 0xa0], %f48
535 ldd [%o0 + 0xa8], %f50
536
537
538 ENCRYPT_128(12, 4, 6, 0, 2)
539
540 st %f4, [%o2 + 0x00]
541 st %f5, [%o2 + 0x04]
542 st %f6, [%o2 + 0x08]
543 st %f7, [%o2 + 0x0c]
544
545 retl
546 VISExit
David S. Miller0bdcaf72012-08-29 12:50:16 -0700547ENDPROC(aes_sparc64_encrypt_192)
David S. Miller9bf48522012-08-21 03:58:13 -0700548
David S. Miller0bdcaf72012-08-29 12:50:16 -0700549 .align 32
550ENTRY(aes_sparc64_encrypt_256)
551 /* %o0=key, %o1=input, %o2=output */
David S. Miller9bf48522012-08-21 03:58:13 -0700552 VISEntry
553 ld [%o1 + 0x00], %f4
David S. Miller9bf48522012-08-21 03:58:13 -0700554 ld [%o1 + 0x04], %f5
555 ld [%o1 + 0x08], %f6
556 ld [%o1 + 0x0c], %f7
David S. Miller9bf48522012-08-21 03:58:13 -0700557
David S. Miller0bdcaf72012-08-29 12:50:16 -0700558 ldd [%o0 + 0x00], %f8
559 ldd [%o0 + 0x08], %f10
David S. Miller9bf48522012-08-21 03:58:13 -0700560
David S. Miller0bdcaf72012-08-29 12:50:16 -0700561 fxor %f8, %f4, %f4
562 fxor %f10, %f6, %f6
David S. Miller9bf48522012-08-21 03:58:13 -0700563
David S. Miller0bdcaf72012-08-29 12:50:16 -0700564 ldd [%o0 + 0x10], %f8
David S. Miller9bf48522012-08-21 03:58:13 -0700565
David S. Miller0bdcaf72012-08-29 12:50:16 -0700566 ldd [%o0 + 0x18], %f10
567 ldd [%o0 + 0x20], %f12
568 ldd [%o0 + 0x28], %f14
569 add %o0, 0x20, %o0
David S. Miller9bf48522012-08-21 03:58:13 -0700570
David S. Miller0bdcaf72012-08-29 12:50:16 -0700571 ENCRYPT_TWO_ROUNDS(8, 4, 6, 0, 2)
David S. Miller9bf48522012-08-21 03:58:13 -0700572
David S. Miller0bdcaf72012-08-29 12:50:16 -0700573 ldd [%o0 + 0x10], %f8
David S. Miller9bf48522012-08-21 03:58:13 -0700574
David S. Miller0bdcaf72012-08-29 12:50:16 -0700575 ldd [%o0 + 0x18], %f10
576 ldd [%o0 + 0x20], %f12
577 ldd [%o0 + 0x28], %f14
578 add %o0, 0x20, %o0
579
580 ENCRYPT_TWO_ROUNDS(8, 4, 6, 0, 2)
581
582 ldd [%o0 + 0x10], %f12
583 ldd [%o0 + 0x18], %f14
584 ldd [%o0 + 0x20], %f16
585 ldd [%o0 + 0x28], %f18
586 ldd [%o0 + 0x30], %f20
587 ldd [%o0 + 0x38], %f22
588 ldd [%o0 + 0x40], %f24
589 ldd [%o0 + 0x48], %f26
590 ldd [%o0 + 0x50], %f28
591 ldd [%o0 + 0x58], %f30
592 ldd [%o0 + 0x60], %f32
593 ldd [%o0 + 0x68], %f34
594 ldd [%o0 + 0x70], %f36
595 ldd [%o0 + 0x78], %f38
596 ldd [%o0 + 0x80], %f40
597 ldd [%o0 + 0x88], %f42
598 ldd [%o0 + 0x90], %f44
599 ldd [%o0 + 0x98], %f46
600 ldd [%o0 + 0xa0], %f48
601 ldd [%o0 + 0xa8], %f50
602
603 ENCRYPT_128(12, 4, 6, 0, 2)
David S. Miller9bf48522012-08-21 03:58:13 -0700604
605 st %f4, [%o2 + 0x00]
606 st %f5, [%o2 + 0x04]
607 st %f6, [%o2 + 0x08]
608 st %f7, [%o2 + 0x0c]
609
610 retl
611 VISExit
David S. Miller0bdcaf72012-08-29 12:50:16 -0700612ENDPROC(aes_sparc64_encrypt_256)
David S. Miller9bf48522012-08-21 03:58:13 -0700613
David S. Miller0bdcaf72012-08-29 12:50:16 -0700614 .align 32
615ENTRY(aes_sparc64_decrypt_128)
616 /* %o0=key, %o1=input, %o2=output */
617 VISEntry
618 ld [%o1 + 0x00], %f4
619 ld [%o1 + 0x04], %f5
620 ld [%o1 + 0x08], %f6
621 ld [%o1 + 0x0c], %f7
622 ldd [%o0 + 0xa0], %f8
623 ldd [%o0 + 0xa8], %f10
624 ldd [%o0 + 0x98], %f12
625 ldd [%o0 + 0x90], %f14
626 ldd [%o0 + 0x88], %f16
627 ldd [%o0 + 0x80], %f18
628 ldd [%o0 + 0x78], %f20
629 ldd [%o0 + 0x70], %f22
630 ldd [%o0 + 0x68], %f24
631 ldd [%o0 + 0x60], %f26
632 ldd [%o0 + 0x58], %f28
633 ldd [%o0 + 0x50], %f30
634 ldd [%o0 + 0x48], %f32
635 ldd [%o0 + 0x40], %f34
636 ldd [%o0 + 0x38], %f36
637 ldd [%o0 + 0x30], %f38
638 ldd [%o0 + 0x28], %f40
639 ldd [%o0 + 0x20], %f42
640 ldd [%o0 + 0x18], %f44
641 ldd [%o0 + 0x10], %f46
642 ldd [%o0 + 0x08], %f48
643 ldd [%o0 + 0x00], %f50
644 fxor %f8, %f4, %f4
645 fxor %f10, %f6, %f6
646 DECRYPT_128(12, 4, 6, 0, 2)
647 st %f4, [%o2 + 0x00]
648 st %f5, [%o2 + 0x04]
649 st %f6, [%o2 + 0x08]
650 st %f7, [%o2 + 0x0c]
651 retl
652 VISExit
653ENDPROC(aes_sparc64_decrypt_128)
654
655 .align 32
656ENTRY(aes_sparc64_decrypt_192)
657 /* %o0=key, %o1=input, %o2=output */
658 VISEntry
659 ld [%o1 + 0x00], %f4
660 ld [%o1 + 0x04], %f5
661 ld [%o1 + 0x08], %f6
662 ld [%o1 + 0x0c], %f7
663 ldd [%o0 + 0xc0], %f8
664 ldd [%o0 + 0xc8], %f10
665 ldd [%o0 + 0xb8], %f12
666 ldd [%o0 + 0xb0], %f14
667 ldd [%o0 + 0xa8], %f16
668 ldd [%o0 + 0xa0], %f18
669 fxor %f8, %f4, %f4
670 fxor %f10, %f6, %f6
671 ldd [%o0 + 0x98], %f20
672 ldd [%o0 + 0x90], %f22
673 ldd [%o0 + 0x88], %f24
674 ldd [%o0 + 0x80], %f26
675 DECRYPT_TWO_ROUNDS(12, 4, 6, 0, 2)
676 ldd [%o0 + 0x78], %f28
677 ldd [%o0 + 0x70], %f30
678 ldd [%o0 + 0x68], %f32
679 ldd [%o0 + 0x60], %f34
680 ldd [%o0 + 0x58], %f36
681 ldd [%o0 + 0x50], %f38
682 ldd [%o0 + 0x48], %f40
683 ldd [%o0 + 0x40], %f42
684 ldd [%o0 + 0x38], %f44
685 ldd [%o0 + 0x30], %f46
686 ldd [%o0 + 0x28], %f48
687 ldd [%o0 + 0x20], %f50
688 ldd [%o0 + 0x18], %f52
689 ldd [%o0 + 0x10], %f54
690 ldd [%o0 + 0x08], %f56
691 ldd [%o0 + 0x00], %f58
692 DECRYPT_128(20, 4, 6, 0, 2)
693 st %f4, [%o2 + 0x00]
694 st %f5, [%o2 + 0x04]
695 st %f6, [%o2 + 0x08]
696 st %f7, [%o2 + 0x0c]
697 retl
698 VISExit
699ENDPROC(aes_sparc64_decrypt_192)
700
701 .align 32
702ENTRY(aes_sparc64_decrypt_256)
703 /* %o0=key, %o1=input, %o2=output */
704 VISEntry
705 ld [%o1 + 0x00], %f4
706 ld [%o1 + 0x04], %f5
707 ld [%o1 + 0x08], %f6
708 ld [%o1 + 0x0c], %f7
709 ldd [%o0 + 0xe0], %f8
710 ldd [%o0 + 0xe8], %f10
711 ldd [%o0 + 0xd8], %f12
712 ldd [%o0 + 0xd0], %f14
713 ldd [%o0 + 0xc8], %f16
714 fxor %f8, %f4, %f4
715 ldd [%o0 + 0xc0], %f18
716 fxor %f10, %f6, %f6
717 ldd [%o0 + 0xb8], %f20
718 AES_DROUND23(12, 4, 6, 2)
719 ldd [%o0 + 0xb0], %f22
720 AES_DROUND01(14, 4, 6, 0)
721 ldd [%o0 + 0xa8], %f24
722 AES_DROUND23(16, 0, 2, 6)
723 ldd [%o0 + 0xa0], %f26
724 AES_DROUND01(18, 0, 2, 4)
725 ldd [%o0 + 0x98], %f12
726 AES_DROUND23(20, 4, 6, 2)
727 ldd [%o0 + 0x90], %f14
728 AES_DROUND01(22, 4, 6, 0)
729 ldd [%o0 + 0x88], %f16
730 AES_DROUND23(24, 0, 2, 6)
731 ldd [%o0 + 0x80], %f18
732 AES_DROUND01(26, 0, 2, 4)
733 ldd [%o0 + 0x78], %f20
734 AES_DROUND23(12, 4, 6, 2)
735 ldd [%o0 + 0x70], %f22
736 AES_DROUND01(14, 4, 6, 0)
737 ldd [%o0 + 0x68], %f24
738 AES_DROUND23(16, 0, 2, 6)
739 ldd [%o0 + 0x60], %f26
740 AES_DROUND01(18, 0, 2, 4)
741 ldd [%o0 + 0x58], %f28
742 AES_DROUND23(20, 4, 6, 2)
743 ldd [%o0 + 0x50], %f30
744 AES_DROUND01(22, 4, 6, 0)
745 ldd [%o0 + 0x48], %f32
746 AES_DROUND23(24, 0, 2, 6)
747 ldd [%o0 + 0x40], %f34
748 AES_DROUND01(26, 0, 2, 4)
749 ldd [%o0 + 0x38], %f36
750 AES_DROUND23(28, 4, 6, 2)
751 ldd [%o0 + 0x30], %f38
752 AES_DROUND01(30, 4, 6, 0)
753 ldd [%o0 + 0x28], %f40
754 AES_DROUND23(32, 0, 2, 6)
755 ldd [%o0 + 0x20], %f42
756 AES_DROUND01(34, 0, 2, 4)
757 ldd [%o0 + 0x18], %f44
758 AES_DROUND23(36, 4, 6, 2)
759 ldd [%o0 + 0x10], %f46
760 AES_DROUND01(38, 4, 6, 0)
761 ldd [%o0 + 0x08], %f48
762 AES_DROUND23(40, 0, 2, 6)
763 ldd [%o0 + 0x00], %f50
764 AES_DROUND01(42, 0, 2, 4)
765 AES_DROUND23(44, 4, 6, 2)
766 AES_DROUND01(46, 4, 6, 0)
767 AES_DROUND23_L(48, 0, 2, 6)
768 AES_DROUND01_L(50, 0, 2, 4)
769 st %f4, [%o2 + 0x00]
770 st %f5, [%o2 + 0x04]
771 st %f6, [%o2 + 0x08]
772 st %f7, [%o2 + 0x0c]
773 retl
774 VISExit
775ENDPROC(aes_sparc64_decrypt_256)
776
777 .align 32
778ENTRY(aes_sparc64_load_encrypt_keys_128)
David S. Miller9bf48522012-08-21 03:58:13 -0700779 /* %o0=key */
David S. Miller0bdcaf72012-08-29 12:50:16 -0700780 VISEntry
781 ldd [%o0 + 0x10], %f8
782 ldd [%o0 + 0x18], %f10
783 ldd [%o0 + 0x20], %f12
784 ldd [%o0 + 0x28], %f14
785 ldd [%o0 + 0x30], %f16
786 ldd [%o0 + 0x38], %f18
787 ldd [%o0 + 0x40], %f20
788 ldd [%o0 + 0x48], %f22
789 ldd [%o0 + 0x50], %f24
790 ldd [%o0 + 0x58], %f26
791 ldd [%o0 + 0x60], %f28
792 ldd [%o0 + 0x68], %f30
793 ldd [%o0 + 0x70], %f32
794 ldd [%o0 + 0x78], %f34
795 ldd [%o0 + 0x80], %f36
796 ldd [%o0 + 0x88], %f38
797 ldd [%o0 + 0x90], %f40
798 ldd [%o0 + 0x98], %f42
799 ldd [%o0 + 0xa0], %f44
800 retl
801 ldd [%o0 + 0xa8], %f46
802ENDPROC(aes_sparc64_load_encrypt_keys_128)
David S. Miller9bf48522012-08-21 03:58:13 -0700803
David S. Miller0bdcaf72012-08-29 12:50:16 -0700804 .align 32
805ENTRY(aes_sparc64_load_encrypt_keys_192)
806 /* %o0=key */
807 VISEntry
808 ldd [%o0 + 0x10], %f8
809 ldd [%o0 + 0x18], %f10
810 ldd [%o0 + 0x20], %f12
811 ldd [%o0 + 0x28], %f14
812 ldd [%o0 + 0x30], %f16
813 ldd [%o0 + 0x38], %f18
814 ldd [%o0 + 0x40], %f20
815 ldd [%o0 + 0x48], %f22
816 ldd [%o0 + 0x50], %f24
817 ldd [%o0 + 0x58], %f26
818 ldd [%o0 + 0x60], %f28
819 ldd [%o0 + 0x68], %f30
820 ldd [%o0 + 0x70], %f32
821 ldd [%o0 + 0x78], %f34
822 ldd [%o0 + 0x80], %f36
823 ldd [%o0 + 0x88], %f38
824 ldd [%o0 + 0x90], %f40
825 ldd [%o0 + 0x98], %f42
826 ldd [%o0 + 0xa0], %f44
827 ldd [%o0 + 0xa8], %f46
828 ldd [%o0 + 0xb0], %f48
829 ldd [%o0 + 0xb8], %f50
830 ldd [%o0 + 0xc0], %f52
831 retl
832 ldd [%o0 + 0xc8], %f54
833ENDPROC(aes_sparc64_load_encrypt_keys_192)
834
835 .align 32
836ENTRY(aes_sparc64_load_encrypt_keys_256)
David S. Miller9bf48522012-08-21 03:58:13 -0700837 /* %o0=key */
838 VISEntry
839 ldd [%o0 + 0x10], %f8
840 ldd [%o0 + 0x18], %f10
841 ldd [%o0 + 0x20], %f12
842 ldd [%o0 + 0x28], %f14
843 ldd [%o0 + 0x30], %f16
844 ldd [%o0 + 0x38], %f18
845 ldd [%o0 + 0x40], %f20
846 ldd [%o0 + 0x48], %f22
847 ldd [%o0 + 0x50], %f24
848 ldd [%o0 + 0x58], %f26
849 ldd [%o0 + 0x60], %f28
850 ldd [%o0 + 0x68], %f30
851 ldd [%o0 + 0x70], %f32
852 ldd [%o0 + 0x78], %f34
853 ldd [%o0 + 0x80], %f36
854 ldd [%o0 + 0x88], %f38
855 ldd [%o0 + 0x90], %f40
856 ldd [%o0 + 0x98], %f42
857 ldd [%o0 + 0xa0], %f44
858 ldd [%o0 + 0xa8], %f46
859 ldd [%o0 + 0xb0], %f48
860 ldd [%o0 + 0xb8], %f50
861 ldd [%o0 + 0xc0], %f52
862 ldd [%o0 + 0xc8], %f54
863 ldd [%o0 + 0xd0], %f56
864 ldd [%o0 + 0xd8], %f58
865 ldd [%o0 + 0xe0], %f60
866 retl
867 ldd [%o0 + 0xe8], %f62
David S. Miller0bdcaf72012-08-29 12:50:16 -0700868ENDPROC(aes_sparc64_load_encrypt_keys_256)
David S. Miller9bf48522012-08-21 03:58:13 -0700869
David S. Miller0bdcaf72012-08-29 12:50:16 -0700870 .align 32
871ENTRY(aes_sparc64_load_decrypt_keys_128)
872 /* %o0=key */
873 VISEntry
874 ldd [%o0 + 0x98], %f8
875 ldd [%o0 + 0x90], %f10
876 ldd [%o0 + 0x88], %f12
877 ldd [%o0 + 0x80], %f14
878 ldd [%o0 + 0x78], %f16
879 ldd [%o0 + 0x70], %f18
880 ldd [%o0 + 0x68], %f20
881 ldd [%o0 + 0x60], %f22
882 ldd [%o0 + 0x58], %f24
883 ldd [%o0 + 0x50], %f26
884 ldd [%o0 + 0x48], %f28
885 ldd [%o0 + 0x40], %f30
886 ldd [%o0 + 0x38], %f32
887 ldd [%o0 + 0x30], %f34
888 ldd [%o0 + 0x28], %f36
889 ldd [%o0 + 0x20], %f38
890 ldd [%o0 + 0x18], %f40
891 ldd [%o0 + 0x10], %f42
892 ldd [%o0 + 0x08], %f44
893 retl
894 ldd [%o0 + 0x00], %f46
895ENDPROC(aes_sparc64_load_decrypt_keys_128)
896
897 .align 32
898ENTRY(aes_sparc64_load_decrypt_keys_192)
899 /* %o0=key */
900 VISEntry
901 ldd [%o0 + 0xb8], %f8
902 ldd [%o0 + 0xb0], %f10
903 ldd [%o0 + 0xa8], %f12
904 ldd [%o0 + 0xa0], %f14
905 ldd [%o0 + 0x98], %f16
906 ldd [%o0 + 0x90], %f18
907 ldd [%o0 + 0x88], %f20
908 ldd [%o0 + 0x80], %f22
909 ldd [%o0 + 0x78], %f24
910 ldd [%o0 + 0x70], %f26
911 ldd [%o0 + 0x68], %f28
912 ldd [%o0 + 0x60], %f30
913 ldd [%o0 + 0x58], %f32
914 ldd [%o0 + 0x50], %f34
915 ldd [%o0 + 0x48], %f36
916 ldd [%o0 + 0x40], %f38
917 ldd [%o0 + 0x38], %f40
918 ldd [%o0 + 0x30], %f42
919 ldd [%o0 + 0x28], %f44
920 ldd [%o0 + 0x20], %f46
921 ldd [%o0 + 0x18], %f48
922 ldd [%o0 + 0x10], %f50
923 ldd [%o0 + 0x08], %f52
924 retl
925 ldd [%o0 + 0x00], %f54
926ENDPROC(aes_sparc64_load_decrypt_keys_192)
927
928 .align 32
929ENTRY(aes_sparc64_load_decrypt_keys_256)
930 /* %o0=key */
931 VISEntry
932 ldd [%o0 + 0xd8], %f8
933 ldd [%o0 + 0xd0], %f10
934 ldd [%o0 + 0xc8], %f12
935 ldd [%o0 + 0xc0], %f14
936 ldd [%o0 + 0xb8], %f16
937 ldd [%o0 + 0xb0], %f18
938 ldd [%o0 + 0xa8], %f20
939 ldd [%o0 + 0xa0], %f22
940 ldd [%o0 + 0x98], %f24
941 ldd [%o0 + 0x90], %f26
942 ldd [%o0 + 0x88], %f28
943 ldd [%o0 + 0x80], %f30
944 ldd [%o0 + 0x78], %f32
945 ldd [%o0 + 0x70], %f34
946 ldd [%o0 + 0x68], %f36
947 ldd [%o0 + 0x60], %f38
948 ldd [%o0 + 0x58], %f40
949 ldd [%o0 + 0x50], %f42
950 ldd [%o0 + 0x48], %f44
951 ldd [%o0 + 0x40], %f46
952 ldd [%o0 + 0x38], %f48
953 ldd [%o0 + 0x30], %f50
954 ldd [%o0 + 0x28], %f52
955 ldd [%o0 + 0x20], %f54
956 ldd [%o0 + 0x18], %f56
957 ldd [%o0 + 0x10], %f58
958 ldd [%o0 + 0x08], %f60
959 retl
960 ldd [%o0 + 0x00], %f62
961ENDPROC(aes_sparc64_load_decrypt_keys_256)
962
963 .align 32
964ENTRY(aes_sparc64_ecb_encrypt_128)
965 /* %o0=key, %o1=input, %o2=output, %o3=len */
David S. Miller9bf48522012-08-21 03:58:13 -0700966 ldx [%o0 + 0x00], %g1
David S. Miller03d168a2012-08-30 07:51:32 -0700967 subcc %o3, 0x10, %o3
968 be 10f
969 ldx [%o0 + 0x08], %g2
David S. Miller0bdcaf72012-08-29 12:50:16 -07009701: ldx [%o1 + 0x00], %g3
David S. Miller9bf48522012-08-21 03:58:13 -0700971 ldx [%o1 + 0x08], %g7
David S. Miller03d168a2012-08-30 07:51:32 -0700972 ldx [%o1 + 0x10], %o4
973 ldx [%o1 + 0x18], %o5
974 xor %g1, %g3, %g3
975 xor %g2, %g7, %g7
976 MOVXTOD_G3_F4
977 MOVXTOD_G7_F6
978 xor %g1, %o4, %g3
979 xor %g2, %o5, %g7
980 MOVXTOD_G3_F60
981 MOVXTOD_G7_F62
982 ENCRYPT_128_2(8, 4, 6, 60, 62, 0, 2, 56, 58)
983 std %f4, [%o2 + 0x00]
984 std %f6, [%o2 + 0x08]
985 std %f60, [%o2 + 0x10]
986 std %f62, [%o2 + 0x18]
987 sub %o3, 0x20, %o3
988 add %o1, 0x20, %o1
989 brgz %o3, 1b
990 add %o2, 0x20, %o2
991 brlz,pt %o3, 11f
992 nop
99310: ldx [%o1 + 0x00], %g3
994 ldx [%o1 + 0x08], %g7
David S. Miller9bf48522012-08-21 03:58:13 -0700995 xor %g1, %g3, %g3
996 xor %g2, %g7, %g7
997 MOVXTOD_G3_F4
998 MOVXTOD_G7_F6
David S. Miller9bf48522012-08-21 03:58:13 -0700999 ENCRYPT_128(8, 4, 6, 0, 2)
David S. Miller9bf48522012-08-21 03:58:13 -07001000 std %f4, [%o2 + 0x00]
1001 std %f6, [%o2 + 0x08]
David S. Miller03d168a2012-08-30 07:51:32 -0700100211: retl
David S. Miller9bf48522012-08-21 03:58:13 -07001003 nop
David S. Miller0bdcaf72012-08-29 12:50:16 -07001004ENDPROC(aes_sparc64_ecb_encrypt_128)
David S. Miller9bf48522012-08-21 03:58:13 -07001005
David S. Miller0bdcaf72012-08-29 12:50:16 -07001006 .align 32
1007ENTRY(aes_sparc64_ecb_encrypt_192)
1008 /* %o0=key, %o1=input, %o2=output, %o3=len */
David S. Miller9bf48522012-08-21 03:58:13 -07001009 ldx [%o0 + 0x00], %g1
David S. Miller03d168a2012-08-30 07:51:32 -07001010 subcc %o3, 0x10, %o3
1011 be 10f
1012 ldx [%o0 + 0x08], %g2
David S. Miller0bdcaf72012-08-29 12:50:16 -070010131: ldx [%o1 + 0x00], %g3
David S. Miller9bf48522012-08-21 03:58:13 -07001014 ldx [%o1 + 0x08], %g7
David S. Miller03d168a2012-08-30 07:51:32 -07001015 ldx [%o1 + 0x10], %o4
1016 ldx [%o1 + 0x18], %o5
1017 xor %g1, %g3, %g3
1018 xor %g2, %g7, %g7
1019 MOVXTOD_G3_F4
1020 MOVXTOD_G7_F6
1021 xor %g1, %o4, %g3
1022 xor %g2, %o5, %g7
1023 MOVXTOD_G3_F60
1024 MOVXTOD_G7_F62
1025 ENCRYPT_192_2(8, 4, 6, 60, 62, 0, 2, 56, 58)
1026 std %f4, [%o2 + 0x00]
1027 std %f6, [%o2 + 0x08]
1028 std %f60, [%o2 + 0x10]
1029 std %f62, [%o2 + 0x18]
1030 sub %o3, 0x20, %o3
1031 add %o1, 0x20, %o1
1032 brgz %o3, 1b
1033 add %o2, 0x20, %o2
1034 brlz,pt %o3, 11f
1035 nop
103610: ldx [%o1 + 0x00], %g3
1037 ldx [%o1 + 0x08], %g7
David S. Miller9bf48522012-08-21 03:58:13 -07001038 xor %g1, %g3, %g3
1039 xor %g2, %g7, %g7
David S. Miller0bdcaf72012-08-29 12:50:16 -07001040 MOVXTOD_G3_F4
1041 MOVXTOD_G7_F6
David S. Miller9bf48522012-08-21 03:58:13 -07001042 ENCRYPT_192(8, 4, 6, 0, 2)
David S. Miller9bf48522012-08-21 03:58:13 -07001043 std %f4, [%o2 + 0x00]
1044 std %f6, [%o2 + 0x08]
David S. Miller03d168a2012-08-30 07:51:32 -0700104511: retl
David S. Miller9bf48522012-08-21 03:58:13 -07001046 nop
David S. Miller0bdcaf72012-08-29 12:50:16 -07001047ENDPROC(aes_sparc64_ecb_encrypt_192)
David S. Miller9bf48522012-08-21 03:58:13 -07001048
David S. Miller0bdcaf72012-08-29 12:50:16 -07001049 .align 32
1050ENTRY(aes_sparc64_ecb_encrypt_256)
1051 /* %o0=key, %o1=input, %o2=output, %o3=len */
1052 ldx [%o0 + 0x00], %g1
David S. Miller03d168a2012-08-30 07:51:32 -07001053 subcc %o3, 0x10, %o3
1054 be 10f
1055 ldx [%o0 + 0x08], %g2
David S. Miller0bdcaf72012-08-29 12:50:16 -070010561: ldx [%o1 + 0x00], %g3
David S. Miller9bf48522012-08-21 03:58:13 -07001057 ldx [%o1 + 0x08], %g7
David S. Miller03d168a2012-08-30 07:51:32 -07001058 ldx [%o1 + 0x10], %o4
1059 ldx [%o1 + 0x18], %o5
1060 xor %g1, %g3, %g3
1061 xor %g2, %g7, %g7
1062 MOVXTOD_G3_F4
1063 MOVXTOD_G7_F6
1064 xor %g1, %o4, %g3
1065 xor %g2, %o5, %g7
1066 MOVXTOD_G3_F0
1067 MOVXTOD_G7_F2
1068 ENCRYPT_256_2(8, 4, 6, 0, 2)
1069 std %f4, [%o2 + 0x00]
1070 std %f6, [%o2 + 0x08]
1071 std %f0, [%o2 + 0x10]
1072 std %f2, [%o2 + 0x18]
1073 sub %o3, 0x20, %o3
1074 add %o1, 0x20, %o1
1075 brgz %o3, 1b
1076 add %o2, 0x20, %o2
1077 brlz,pt %o3, 11f
1078 nop
107910: ldx [%o1 + 0x00], %g3
1080 ldx [%o1 + 0x08], %g7
David S. Miller9bf48522012-08-21 03:58:13 -07001081 xor %g1, %g3, %g3
1082 xor %g2, %g7, %g7
David S. Miller0bdcaf72012-08-29 12:50:16 -07001083 MOVXTOD_G3_F4
1084 MOVXTOD_G7_F6
1085 ENCRYPT_256(8, 4, 6, 0, 2)
David S. Miller9bf48522012-08-21 03:58:13 -07001086 std %f4, [%o2 + 0x00]
1087 std %f6, [%o2 + 0x08]
David S. Miller03d168a2012-08-30 07:51:32 -0700108811: retl
David S. Miller9bf48522012-08-21 03:58:13 -07001089 nop
David S. Miller0bdcaf72012-08-29 12:50:16 -07001090ENDPROC(aes_sparc64_ecb_encrypt_256)
David S. Miller9bf48522012-08-21 03:58:13 -07001091
David S. Miller0bdcaf72012-08-29 12:50:16 -07001092 .align 32
1093ENTRY(aes_sparc64_ecb_decrypt_128)
1094 /* %o0=&key[key_len], %o1=input, %o2=output, %o3=len */
David S. Miller9bf48522012-08-21 03:58:13 -07001095 ldx [%o0 - 0x10], %g1
David S. Miller30101312012-08-30 08:11:01 -07001096 subcc %o3, 0x10, %o3
1097 be 10f
1098 ldx [%o0 - 0x08], %g2
David S. Miller0bdcaf72012-08-29 12:50:16 -070010991: ldx [%o1 + 0x00], %g3
1100 ldx [%o1 + 0x08], %g7
David S. Miller30101312012-08-30 08:11:01 -07001101 ldx [%o1 + 0x10], %o4
1102 ldx [%o1 + 0x18], %o5
1103 xor %g1, %g3, %g3
1104 xor %g2, %g7, %g7
1105 MOVXTOD_G3_F4
1106 MOVXTOD_G7_F6
1107 xor %g1, %o4, %g3
1108 xor %g2, %o5, %g7
1109 MOVXTOD_G3_F60
1110 MOVXTOD_G7_F62
1111 DECRYPT_128_2(8, 4, 6, 60, 62, 0, 2, 56, 58)
1112 std %f4, [%o2 + 0x00]
1113 std %f6, [%o2 + 0x08]
1114 std %f60, [%o2 + 0x10]
1115 std %f62, [%o2 + 0x18]
1116 sub %o3, 0x20, %o3
1117 add %o1, 0x20, %o1
1118 brgz,pt %o3, 1b
1119 add %o2, 0x20, %o2
1120 brlz,pt %o3, 11f
1121 nop
112210: ldx [%o1 + 0x00], %g3
1123 ldx [%o1 + 0x08], %g7
David S. Miller9bf48522012-08-21 03:58:13 -07001124 xor %g1, %g3, %g3
1125 xor %g2, %g7, %g7
1126 MOVXTOD_G3_F4
1127 MOVXTOD_G7_F6
David S. Miller0bdcaf72012-08-29 12:50:16 -07001128 DECRYPT_128(8, 4, 6, 0, 2)
1129 std %f4, [%o2 + 0x00]
1130 std %f6, [%o2 + 0x08]
David S. Miller30101312012-08-30 08:11:01 -0700113111: retl
David S. Miller9bf48522012-08-21 03:58:13 -07001132 nop
David S. Miller0bdcaf72012-08-29 12:50:16 -07001133ENDPROC(aes_sparc64_ecb_decrypt_128)
David S. Miller9bf48522012-08-21 03:58:13 -07001134
David S. Miller0bdcaf72012-08-29 12:50:16 -07001135 .align 32
1136ENTRY(aes_sparc64_ecb_decrypt_192)
1137 /* %o0=&key[key_len], %o1=input, %o2=output, %o3=len */
1138 ldx [%o0 - 0x10], %g1
David S. Miller30101312012-08-30 08:11:01 -07001139 subcc %o3, 0x10, %o3
1140 be 10f
1141 ldx [%o0 - 0x08], %g2
David S. Miller0bdcaf72012-08-29 12:50:16 -070011421: ldx [%o1 + 0x00], %g3
1143 ldx [%o1 + 0x08], %g7
David S. Miller30101312012-08-30 08:11:01 -07001144 ldx [%o1 + 0x10], %o4
1145 ldx [%o1 + 0x18], %o5
1146 xor %g1, %g3, %g3
1147 xor %g2, %g7, %g7
1148 MOVXTOD_G3_F4
1149 MOVXTOD_G7_F6
1150 xor %g1, %o4, %g3
1151 xor %g2, %o5, %g7
1152 MOVXTOD_G3_F60
1153 MOVXTOD_G7_F62
1154 DECRYPT_192_2(8, 4, 6, 60, 62, 0, 2, 56, 58)
1155 std %f4, [%o2 + 0x00]
1156 std %f6, [%o2 + 0x08]
1157 std %f60, [%o2 + 0x10]
1158 std %f62, [%o2 + 0x18]
1159 sub %o3, 0x20, %o3
1160 add %o1, 0x20, %o1
1161 brgz,pt %o3, 1b
1162 add %o2, 0x20, %o2
1163 brlz,pt %o3, 11f
1164 nop
116510: ldx [%o1 + 0x00], %g3
1166 ldx [%o1 + 0x08], %g7
David S. Miller9bf48522012-08-21 03:58:13 -07001167 xor %g1, %g3, %g3
1168 xor %g2, %g7, %g7
1169 MOVXTOD_G3_F4
1170 MOVXTOD_G7_F6
David S. Miller0bdcaf72012-08-29 12:50:16 -07001171 DECRYPT_192(8, 4, 6, 0, 2)
1172 std %f4, [%o2 + 0x00]
1173 std %f6, [%o2 + 0x08]
David S. Miller30101312012-08-30 08:11:01 -0700117411: retl
David S. Miller9bf48522012-08-21 03:58:13 -07001175 nop
David S. Miller0bdcaf72012-08-29 12:50:16 -07001176ENDPROC(aes_sparc64_ecb_decrypt_192)
1177
1178 .align 32
1179ENTRY(aes_sparc64_ecb_decrypt_256)
1180 /* %o0=&key[key_len], %o1=input, %o2=output, %o3=len */
1181 ldx [%o0 - 0x10], %g1
David S. Miller30101312012-08-30 08:11:01 -07001182 subcc %o3, 0x10, %o3
1183 be 10f
1184 ldx [%o0 - 0x08], %g2
1185 sub %o0, 0xf0, %o0
David S. Miller0bdcaf72012-08-29 12:50:16 -070011861: ldx [%o1 + 0x00], %g3
1187 ldx [%o1 + 0x08], %g7
David S. Miller30101312012-08-30 08:11:01 -07001188 ldx [%o1 + 0x10], %o4
1189 ldx [%o1 + 0x18], %o5
1190 xor %g1, %g3, %g3
1191 xor %g2, %g7, %g7
1192 MOVXTOD_G3_F4
1193 MOVXTOD_G7_F6
1194 xor %g1, %o4, %g3
1195 xor %g2, %o5, %g7
1196 MOVXTOD_G3_F0
1197 MOVXTOD_G7_F2
1198 DECRYPT_256_2(8, 4, 6, 0, 2)
1199 std %f4, [%o2 + 0x00]
1200 std %f6, [%o2 + 0x08]
1201 std %f60, [%o2 + 0x10]
1202 std %f62, [%o2 + 0x18]
1203 sub %o3, 0x20, %o3
1204 add %o1, 0x20, %o1
1205 brgz,pt %o3, 1b
1206 add %o2, 0x20, %o2
1207 brlz,pt %o3, 11f
1208 nop
120910: ldx [%o1 + 0x00], %g3
1210 ldx [%o1 + 0x08], %g7
David S. Miller0bdcaf72012-08-29 12:50:16 -07001211 xor %g1, %g3, %g3
1212 xor %g2, %g7, %g7
1213 MOVXTOD_G3_F4
1214 MOVXTOD_G7_F6
1215 DECRYPT_256(8, 4, 6, 0, 2)
1216 std %f4, [%o2 + 0x00]
1217 std %f6, [%o2 + 0x08]
David S. Miller30101312012-08-30 08:11:01 -0700121811: retl
David S. Miller0bdcaf72012-08-29 12:50:16 -07001219 nop
1220ENDPROC(aes_sparc64_ecb_decrypt_256)
1221
1222 .align 32
1223ENTRY(aes_sparc64_cbc_encrypt_128)
1224 /* %o0=key, %o1=input, %o2=output, %o3=len, %o4=IV */
1225 ldd [%o4 + 0x00], %f4
1226 ldd [%o4 + 0x08], %f6
1227 ldx [%o0 + 0x00], %g1
1228 ldx [%o0 + 0x08], %g2
12291: ldx [%o1 + 0x00], %g3
1230 ldx [%o1 + 0x08], %g7
1231 add %o1, 0x10, %o1
1232 xor %g1, %g3, %g3
1233 xor %g2, %g7, %g7
1234 MOVXTOD_G3_F0
1235 MOVXTOD_G7_F2
1236 fxor %f4, %f0, %f4
1237 fxor %f6, %f2, %f6
1238 ENCRYPT_128(8, 4, 6, 0, 2)
1239 std %f4, [%o2 + 0x00]
1240 std %f6, [%o2 + 0x08]
1241 subcc %o3, 0x10, %o3
1242 bne,pt %xcc, 1b
1243 add %o2, 0x10, %o2
1244 std %f4, [%o4 + 0x00]
1245 std %f6, [%o4 + 0x08]
1246 retl
1247 nop
1248ENDPROC(aes_sparc64_cbc_encrypt_128)
1249
1250 .align 32
1251ENTRY(aes_sparc64_cbc_encrypt_192)
1252 /* %o0=key, %o1=input, %o2=output, %o3=len, %o4=IV */
1253 ldd [%o4 + 0x00], %f4
1254 ldd [%o4 + 0x08], %f6
1255 ldx [%o0 + 0x00], %g1
1256 ldx [%o0 + 0x08], %g2
12571: ldx [%o1 + 0x00], %g3
1258 ldx [%o1 + 0x08], %g7
1259 add %o1, 0x10, %o1
1260 xor %g1, %g3, %g3
1261 xor %g2, %g7, %g7
1262 MOVXTOD_G3_F0
1263 MOVXTOD_G7_F2
1264 fxor %f4, %f0, %f4
1265 fxor %f6, %f2, %f6
1266 ENCRYPT_192(8, 4, 6, 0, 2)
1267 std %f4, [%o2 + 0x00]
1268 std %f6, [%o2 + 0x08]
1269 subcc %o3, 0x10, %o3
1270 bne,pt %xcc, 1b
1271 add %o2, 0x10, %o2
1272 std %f4, [%o4 + 0x00]
1273 std %f6, [%o4 + 0x08]
1274 retl
1275 nop
1276ENDPROC(aes_sparc64_cbc_encrypt_192)
1277
1278 .align 32
1279ENTRY(aes_sparc64_cbc_encrypt_256)
1280 /* %o0=key, %o1=input, %o2=output, %o3=len, %o4=IV */
1281 ldd [%o4 + 0x00], %f4
1282 ldd [%o4 + 0x08], %f6
1283 ldx [%o0 + 0x00], %g1
1284 ldx [%o0 + 0x08], %g2
12851: ldx [%o1 + 0x00], %g3
1286 ldx [%o1 + 0x08], %g7
1287 add %o1, 0x10, %o1
1288 xor %g1, %g3, %g3
1289 xor %g2, %g7, %g7
1290 MOVXTOD_G3_F0
1291 MOVXTOD_G7_F2
1292 fxor %f4, %f0, %f4
1293 fxor %f6, %f2, %f6
1294 ENCRYPT_256(8, 4, 6, 0, 2)
1295 std %f4, [%o2 + 0x00]
1296 std %f6, [%o2 + 0x08]
1297 subcc %o3, 0x10, %o3
1298 bne,pt %xcc, 1b
1299 add %o2, 0x10, %o2
1300 std %f4, [%o4 + 0x00]
1301 std %f6, [%o4 + 0x08]
1302 retl
1303 nop
1304ENDPROC(aes_sparc64_cbc_encrypt_256)
1305
1306 .align 32
1307ENTRY(aes_sparc64_cbc_decrypt_128)
1308 /* %o0=&key[key_len], %o1=input, %o2=output, %o3=len, %o4=iv */
1309 ldx [%o0 - 0x10], %g1
1310 ldx [%o0 - 0x08], %g2
1311 ldx [%o4 + 0x00], %o0
1312 ldx [%o4 + 0x08], %o5
13131: ldx [%o1 + 0x00], %g3
1314 ldx [%o1 + 0x08], %g7
1315 add %o1, 0x10, %o1
1316 xor %g1, %g3, %g3
1317 xor %g2, %g7, %g7
1318 MOVXTOD_G3_F4
1319 MOVXTOD_G7_F6
1320 DECRYPT_128(8, 4, 6, 0, 2)
1321 MOVXTOD_O0_F0
1322 MOVXTOD_O5_F2
1323 xor %g1, %g3, %o0
1324 xor %g2, %g7, %o5
1325 fxor %f4, %f0, %f4
1326 fxor %f6, %f2, %f6
1327 std %f4, [%o2 + 0x00]
1328 std %f6, [%o2 + 0x08]
1329 subcc %o3, 0x10, %o3
1330 bne,pt %xcc, 1b
1331 add %o2, 0x10, %o2
1332 stx %o0, [%o4 + 0x00]
1333 stx %o5, [%o4 + 0x08]
1334 retl
1335 nop
1336ENDPROC(aes_sparc64_cbc_decrypt_128)
1337
1338 .align 32
1339ENTRY(aes_sparc64_cbc_decrypt_192)
1340 /* %o0=&key[key_len], %o1=input, %o2=output, %o3=len, %o4=iv */
1341 ldx [%o0 - 0x10], %g1
1342 ldx [%o0 - 0x08], %g2
1343 ldx [%o4 + 0x00], %o0
1344 ldx [%o4 + 0x08], %o5
13451: ldx [%o1 + 0x00], %g3
1346 ldx [%o1 + 0x08], %g7
1347 add %o1, 0x10, %o1
1348 xor %g1, %g3, %g3
1349 xor %g2, %g7, %g7
1350 MOVXTOD_G3_F4
1351 MOVXTOD_G7_F6
1352 DECRYPT_192(8, 4, 6, 0, 2)
1353 MOVXTOD_O0_F0
1354 MOVXTOD_O5_F2
1355 xor %g1, %g3, %o0
1356 xor %g2, %g7, %o5
1357 fxor %f4, %f0, %f4
1358 fxor %f6, %f2, %f6
1359 std %f4, [%o2 + 0x00]
1360 std %f6, [%o2 + 0x08]
1361 subcc %o3, 0x10, %o3
1362 bne,pt %xcc, 1b
1363 add %o2, 0x10, %o2
1364 stx %o0, [%o4 + 0x00]
1365 stx %o5, [%o4 + 0x08]
1366 retl
1367 nop
1368ENDPROC(aes_sparc64_cbc_decrypt_192)
1369
1370 .align 32
1371ENTRY(aes_sparc64_cbc_decrypt_256)
1372 /* %o0=&key[key_len], %o1=input, %o2=output, %o3=len, %o4=iv */
1373 ldx [%o0 - 0x10], %g1
1374 ldx [%o0 - 0x08], %g2
1375 ldx [%o4 + 0x00], %o0
1376 ldx [%o4 + 0x08], %o5
13771: ldx [%o1 + 0x00], %g3
1378 ldx [%o1 + 0x08], %g7
1379 add %o1, 0x10, %o1
1380 xor %g1, %g3, %g3
1381 xor %g2, %g7, %g7
1382 MOVXTOD_G3_F4
1383 MOVXTOD_G7_F6
1384 DECRYPT_256(8, 4, 6, 0, 2)
1385 MOVXTOD_O0_F0
1386 MOVXTOD_O5_F2
1387 xor %g1, %g3, %o0
1388 xor %g2, %g7, %o5
1389 fxor %f4, %f0, %f4
1390 fxor %f6, %f2, %f6
1391 std %f4, [%o2 + 0x00]
1392 std %f6, [%o2 + 0x08]
1393 subcc %o3, 0x10, %o3
1394 bne,pt %xcc, 1b
1395 add %o2, 0x10, %o2
1396 stx %o0, [%o4 + 0x00]
1397 stx %o5, [%o4 + 0x08]
1398 retl
1399 nop
1400ENDPROC(aes_sparc64_cbc_decrypt_256)
David S. Miller9fd130e2012-08-29 14:49:23 -07001401
1402 .align 32
1403ENTRY(aes_sparc64_ctr_crypt_128)
1404 /* %o0=key, %o1=input, %o2=output, %o3=len, %o4=IV */
1405 ldx [%o4 + 0x00], %g3
1406 ldx [%o4 + 0x08], %g7
David S. Miller4e71bb42012-08-30 08:40:44 -07001407 subcc %o3, 0x10, %o3
David S. Miller9fd130e2012-08-29 14:49:23 -07001408 ldx [%o0 + 0x00], %g1
David S. Miller4e71bb42012-08-30 08:40:44 -07001409 be 10f
1410 ldx [%o0 + 0x08], %g2
David S. Miller9fd130e2012-08-29 14:49:23 -070014111: xor %g1, %g3, %o5
1412 MOVXTOD_O5_F0
1413 xor %g2, %g7, %o5
1414 MOVXTOD_O5_F2
1415 add %g7, 1, %g7
1416 add %g3, 1, %o5
1417 movrz %g7, %o5, %g3
David S. Miller4e71bb42012-08-30 08:40:44 -07001418 xor %g1, %g3, %o5
1419 MOVXTOD_O5_F4
1420 xor %g2, %g7, %o5
1421 MOVXTOD_O5_F6
1422 add %g7, 1, %g7
1423 add %g3, 1, %o5
1424 movrz %g7, %o5, %g3
1425 ENCRYPT_128_2(8, 0, 2, 4, 6, 56, 58, 60, 62)
1426 ldd [%o1 + 0x00], %f56
1427 ldd [%o1 + 0x08], %f58
1428 ldd [%o1 + 0x10], %f60
1429 ldd [%o1 + 0x18], %f62
1430 fxor %f56, %f0, %f56
1431 fxor %f58, %f2, %f58
1432 fxor %f60, %f4, %f60
1433 fxor %f62, %f6, %f62
1434 std %f56, [%o2 + 0x00]
1435 std %f58, [%o2 + 0x08]
1436 std %f60, [%o2 + 0x10]
1437 std %f62, [%o2 + 0x18]
1438 subcc %o3, 0x20, %o3
1439 add %o1, 0x20, %o1
1440 brgz %o3, 1b
1441 add %o2, 0x20, %o2
1442 brlz,pt %o3, 11f
1443 nop
144410: xor %g1, %g3, %o5
1445 MOVXTOD_O5_F0
1446 xor %g2, %g7, %o5
1447 MOVXTOD_O5_F2
1448 add %g7, 1, %g7
1449 add %g3, 1, %o5
1450 movrz %g7, %o5, %g3
David S. Miller9fd130e2012-08-29 14:49:23 -07001451 ENCRYPT_128(8, 0, 2, 4, 6)
1452 ldd [%o1 + 0x00], %f4
1453 ldd [%o1 + 0x08], %f6
1454 fxor %f4, %f0, %f4
1455 fxor %f6, %f2, %f6
1456 std %f4, [%o2 + 0x00]
1457 std %f6, [%o2 + 0x08]
David S. Miller4e71bb42012-08-30 08:40:44 -0700145811: stx %g3, [%o4 + 0x00]
David S. Miller9fd130e2012-08-29 14:49:23 -07001459 retl
David S. Miller4e71bb42012-08-30 08:40:44 -07001460 stx %g7, [%o4 + 0x08]
David S. Miller9fd130e2012-08-29 14:49:23 -07001461ENDPROC(aes_sparc64_ctr_crypt_128)
1462
1463 .align 32
1464ENTRY(aes_sparc64_ctr_crypt_192)
1465 /* %o0=key, %o1=input, %o2=output, %o3=len, %o4=IV */
1466 ldx [%o4 + 0x00], %g3
1467 ldx [%o4 + 0x08], %g7
David S. Miller4e71bb42012-08-30 08:40:44 -07001468 subcc %o3, 0x10, %o3
David S. Miller9fd130e2012-08-29 14:49:23 -07001469 ldx [%o0 + 0x00], %g1
David S. Miller4e71bb42012-08-30 08:40:44 -07001470 be 10f
1471 ldx [%o0 + 0x08], %g2
David S. Miller9fd130e2012-08-29 14:49:23 -070014721: xor %g1, %g3, %o5
1473 MOVXTOD_O5_F0
1474 xor %g2, %g7, %o5
1475 MOVXTOD_O5_F2
1476 add %g7, 1, %g7
1477 add %g3, 1, %o5
1478 movrz %g7, %o5, %g3
David S. Miller4e71bb42012-08-30 08:40:44 -07001479 xor %g1, %g3, %o5
1480 MOVXTOD_O5_F4
1481 xor %g2, %g7, %o5
1482 MOVXTOD_O5_F6
1483 add %g7, 1, %g7
1484 add %g3, 1, %o5
1485 movrz %g7, %o5, %g3
1486 ENCRYPT_192_2(8, 0, 2, 4, 6, 56, 58, 60, 62)
1487 ldd [%o1 + 0x00], %f56
1488 ldd [%o1 + 0x08], %f58
1489 ldd [%o1 + 0x10], %f60
1490 ldd [%o1 + 0x18], %f62
1491 fxor %f56, %f0, %f56
1492 fxor %f58, %f2, %f58
1493 fxor %f60, %f4, %f60
1494 fxor %f62, %f6, %f62
1495 std %f56, [%o2 + 0x00]
1496 std %f58, [%o2 + 0x08]
1497 std %f60, [%o2 + 0x10]
1498 std %f62, [%o2 + 0x18]
1499 subcc %o3, 0x20, %o3
1500 add %o1, 0x20, %o1
1501 brgz %o3, 1b
1502 add %o2, 0x20, %o2
1503 brlz,pt %o3, 11f
1504 nop
150510: xor %g1, %g3, %o5
1506 MOVXTOD_O5_F0
1507 xor %g2, %g7, %o5
1508 MOVXTOD_O5_F2
1509 add %g7, 1, %g7
1510 add %g3, 1, %o5
1511 movrz %g7, %o5, %g3
David S. Miller9fd130e2012-08-29 14:49:23 -07001512 ENCRYPT_192(8, 0, 2, 4, 6)
1513 ldd [%o1 + 0x00], %f4
1514 ldd [%o1 + 0x08], %f6
1515 fxor %f4, %f0, %f4
1516 fxor %f6, %f2, %f6
1517 std %f4, [%o2 + 0x00]
1518 std %f6, [%o2 + 0x08]
David S. Miller4e71bb42012-08-30 08:40:44 -0700151911: stx %g3, [%o4 + 0x00]
David S. Miller9fd130e2012-08-29 14:49:23 -07001520 retl
David S. Miller4e71bb42012-08-30 08:40:44 -07001521 stx %g7, [%o4 + 0x08]
David S. Miller9fd130e2012-08-29 14:49:23 -07001522ENDPROC(aes_sparc64_ctr_crypt_192)
1523
1524 .align 32
1525ENTRY(aes_sparc64_ctr_crypt_256)
1526 /* %o0=key, %o1=input, %o2=output, %o3=len, %o4=IV */
1527 ldx [%o4 + 0x00], %g3
1528 ldx [%o4 + 0x08], %g7
David S. Miller4e71bb42012-08-30 08:40:44 -07001529 subcc %o3, 0x10, %o3
David S. Miller9fd130e2012-08-29 14:49:23 -07001530 ldx [%o0 + 0x00], %g1
David S. Miller4e71bb42012-08-30 08:40:44 -07001531 be 10f
1532 ldx [%o0 + 0x08], %g2
David S. Miller9fd130e2012-08-29 14:49:23 -070015331: xor %g1, %g3, %o5
1534 MOVXTOD_O5_F0
1535 xor %g2, %g7, %o5
1536 MOVXTOD_O5_F2
1537 add %g7, 1, %g7
1538 add %g3, 1, %o5
1539 movrz %g7, %o5, %g3
David S. Miller4e71bb42012-08-30 08:40:44 -07001540 xor %g1, %g3, %o5
1541 MOVXTOD_O5_F4
1542 xor %g2, %g7, %o5
1543 MOVXTOD_O5_F6
1544 add %g7, 1, %g7
1545 add %g3, 1, %o5
1546 movrz %g7, %o5, %g3
1547 ENCRYPT_256_2(8, 0, 2, 4, 6)
1548 ldd [%o1 + 0x00], %f56
1549 ldd [%o1 + 0x08], %f58
1550 ldd [%o1 + 0x10], %f60
1551 ldd [%o1 + 0x18], %f62
1552 fxor %f56, %f0, %f56
1553 fxor %f58, %f2, %f58
1554 fxor %f60, %f4, %f60
1555 fxor %f62, %f6, %f62
1556 std %f56, [%o2 + 0x00]
1557 std %f58, [%o2 + 0x08]
1558 std %f60, [%o2 + 0x10]
1559 std %f62, [%o2 + 0x18]
1560 subcc %o3, 0x20, %o3
1561 add %o1, 0x20, %o1
1562 brgz %o3, 1b
1563 add %o2, 0x20, %o2
1564 brlz,pt %o3, 11f
1565 nop
156610: xor %g1, %g3, %o5
1567 MOVXTOD_O5_F0
1568 xor %g2, %g7, %o5
1569 MOVXTOD_O5_F2
1570 add %g7, 1, %g7
1571 add %g3, 1, %o5
1572 movrz %g7, %o5, %g3
David S. Miller9fd130e2012-08-29 14:49:23 -07001573 ENCRYPT_256(8, 0, 2, 4, 6)
1574 ldd [%o1 + 0x00], %f4
1575 ldd [%o1 + 0x08], %f6
1576 fxor %f4, %f0, %f4
1577 fxor %f6, %f2, %f6
1578 std %f4, [%o2 + 0x00]
1579 std %f6, [%o2 + 0x08]
David S. Miller4e71bb42012-08-30 08:40:44 -0700158011: stx %g3, [%o4 + 0x00]
David S. Miller9fd130e2012-08-29 14:49:23 -07001581 retl
David S. Miller4e71bb42012-08-30 08:40:44 -07001582 stx %g7, [%o4 + 0x08]
David S. Miller9fd130e2012-08-29 14:49:23 -07001583ENDPROC(aes_sparc64_ctr_crypt_256)