Jeff Vander Stoep | 39e02b1 | 2020-12-04 13:57:34 +0100 | [diff] [blame] | 1 | # This file is generated from a similarly-named Perl script in the BoringSSL |
| 2 | # source tree. Do not edit by hand. |
| 3 | |
| 4 | #if defined(__has_feature) |
| 5 | #if __has_feature(memory_sanitizer) && !defined(OPENSSL_NO_ASM) |
| 6 | #define OPENSSL_NO_ASM |
| 7 | #endif |
| 8 | #endif |
| 9 | |
| 10 | #if defined(__x86_64__) && !defined(OPENSSL_NO_ASM) |
| 11 | .text |
| 12 | |
| 13 | .type _aesni_ctr32_ghash_6x,@function |
| 14 | .align 32 |
| 15 | _aesni_ctr32_ghash_6x: |
| 16 | .cfi_startproc |
| 17 | vmovdqu 32(%r11),%xmm2 |
| 18 | subq $6,%rdx |
| 19 | vpxor %xmm4,%xmm4,%xmm4 |
| 20 | vmovdqu 0-128(%rcx),%xmm15 |
| 21 | vpaddb %xmm2,%xmm1,%xmm10 |
| 22 | vpaddb %xmm2,%xmm10,%xmm11 |
| 23 | vpaddb %xmm2,%xmm11,%xmm12 |
| 24 | vpaddb %xmm2,%xmm12,%xmm13 |
| 25 | vpaddb %xmm2,%xmm13,%xmm14 |
| 26 | vpxor %xmm15,%xmm1,%xmm9 |
| 27 | vmovdqu %xmm4,16+8(%rsp) |
| 28 | jmp .Loop6x |
| 29 | |
| 30 | .align 32 |
| 31 | .Loop6x: |
| 32 | addl $100663296,%ebx |
| 33 | jc .Lhandle_ctr32 |
| 34 | vmovdqu 0-32(%r9),%xmm3 |
| 35 | vpaddb %xmm2,%xmm14,%xmm1 |
| 36 | vpxor %xmm15,%xmm10,%xmm10 |
| 37 | vpxor %xmm15,%xmm11,%xmm11 |
| 38 | |
| 39 | .Lresume_ctr32: |
| 40 | vmovdqu %xmm1,(%r8) |
| 41 | vpclmulqdq $0x10,%xmm3,%xmm7,%xmm5 |
| 42 | vpxor %xmm15,%xmm12,%xmm12 |
| 43 | vmovups 16-128(%rcx),%xmm2 |
| 44 | vpclmulqdq $0x01,%xmm3,%xmm7,%xmm6 |
| 45 | |
| 46 | |
| 47 | |
| 48 | |
| 49 | |
| 50 | |
| 51 | |
| 52 | |
| 53 | |
| 54 | |
| 55 | |
| 56 | |
| 57 | |
| 58 | |
| 59 | |
| 60 | |
| 61 | |
| 62 | xorq %r12,%r12 |
| 63 | cmpq %r14,%r15 |
| 64 | |
| 65 | vaesenc %xmm2,%xmm9,%xmm9 |
| 66 | vmovdqu 48+8(%rsp),%xmm0 |
| 67 | vpxor %xmm15,%xmm13,%xmm13 |
| 68 | vpclmulqdq $0x00,%xmm3,%xmm7,%xmm1 |
| 69 | vaesenc %xmm2,%xmm10,%xmm10 |
| 70 | vpxor %xmm15,%xmm14,%xmm14 |
| 71 | setnc %r12b |
| 72 | vpclmulqdq $0x11,%xmm3,%xmm7,%xmm7 |
| 73 | vaesenc %xmm2,%xmm11,%xmm11 |
| 74 | vmovdqu 16-32(%r9),%xmm3 |
| 75 | negq %r12 |
| 76 | vaesenc %xmm2,%xmm12,%xmm12 |
| 77 | vpxor %xmm5,%xmm6,%xmm6 |
| 78 | vpclmulqdq $0x00,%xmm3,%xmm0,%xmm5 |
| 79 | vpxor %xmm4,%xmm8,%xmm8 |
| 80 | vaesenc %xmm2,%xmm13,%xmm13 |
| 81 | vpxor %xmm5,%xmm1,%xmm4 |
| 82 | andq $0x60,%r12 |
| 83 | vmovups 32-128(%rcx),%xmm15 |
| 84 | vpclmulqdq $0x10,%xmm3,%xmm0,%xmm1 |
| 85 | vaesenc %xmm2,%xmm14,%xmm14 |
| 86 | |
| 87 | vpclmulqdq $0x01,%xmm3,%xmm0,%xmm2 |
| 88 | leaq (%r14,%r12,1),%r14 |
| 89 | vaesenc %xmm15,%xmm9,%xmm9 |
| 90 | vpxor 16+8(%rsp),%xmm8,%xmm8 |
| 91 | vpclmulqdq $0x11,%xmm3,%xmm0,%xmm3 |
| 92 | vmovdqu 64+8(%rsp),%xmm0 |
| 93 | vaesenc %xmm15,%xmm10,%xmm10 |
| 94 | movbeq 88(%r14),%r13 |
| 95 | vaesenc %xmm15,%xmm11,%xmm11 |
| 96 | movbeq 80(%r14),%r12 |
| 97 | vaesenc %xmm15,%xmm12,%xmm12 |
| 98 | movq %r13,32+8(%rsp) |
| 99 | vaesenc %xmm15,%xmm13,%xmm13 |
| 100 | movq %r12,40+8(%rsp) |
| 101 | vmovdqu 48-32(%r9),%xmm5 |
| 102 | vaesenc %xmm15,%xmm14,%xmm14 |
| 103 | |
| 104 | vmovups 48-128(%rcx),%xmm15 |
| 105 | vpxor %xmm1,%xmm6,%xmm6 |
| 106 | vpclmulqdq $0x00,%xmm5,%xmm0,%xmm1 |
| 107 | vaesenc %xmm15,%xmm9,%xmm9 |
| 108 | vpxor %xmm2,%xmm6,%xmm6 |
| 109 | vpclmulqdq $0x10,%xmm5,%xmm0,%xmm2 |
| 110 | vaesenc %xmm15,%xmm10,%xmm10 |
| 111 | vpxor %xmm3,%xmm7,%xmm7 |
| 112 | vpclmulqdq $0x01,%xmm5,%xmm0,%xmm3 |
| 113 | vaesenc %xmm15,%xmm11,%xmm11 |
| 114 | vpclmulqdq $0x11,%xmm5,%xmm0,%xmm5 |
| 115 | vmovdqu 80+8(%rsp),%xmm0 |
| 116 | vaesenc %xmm15,%xmm12,%xmm12 |
| 117 | vaesenc %xmm15,%xmm13,%xmm13 |
| 118 | vpxor %xmm1,%xmm4,%xmm4 |
| 119 | vmovdqu 64-32(%r9),%xmm1 |
| 120 | vaesenc %xmm15,%xmm14,%xmm14 |
| 121 | |
| 122 | vmovups 64-128(%rcx),%xmm15 |
| 123 | vpxor %xmm2,%xmm6,%xmm6 |
| 124 | vpclmulqdq $0x00,%xmm1,%xmm0,%xmm2 |
| 125 | vaesenc %xmm15,%xmm9,%xmm9 |
| 126 | vpxor %xmm3,%xmm6,%xmm6 |
| 127 | vpclmulqdq $0x10,%xmm1,%xmm0,%xmm3 |
| 128 | vaesenc %xmm15,%xmm10,%xmm10 |
| 129 | movbeq 72(%r14),%r13 |
| 130 | vpxor %xmm5,%xmm7,%xmm7 |
| 131 | vpclmulqdq $0x01,%xmm1,%xmm0,%xmm5 |
| 132 | vaesenc %xmm15,%xmm11,%xmm11 |
| 133 | movbeq 64(%r14),%r12 |
| 134 | vpclmulqdq $0x11,%xmm1,%xmm0,%xmm1 |
| 135 | vmovdqu 96+8(%rsp),%xmm0 |
| 136 | vaesenc %xmm15,%xmm12,%xmm12 |
| 137 | movq %r13,48+8(%rsp) |
| 138 | vaesenc %xmm15,%xmm13,%xmm13 |
| 139 | movq %r12,56+8(%rsp) |
| 140 | vpxor %xmm2,%xmm4,%xmm4 |
| 141 | vmovdqu 96-32(%r9),%xmm2 |
| 142 | vaesenc %xmm15,%xmm14,%xmm14 |
| 143 | |
| 144 | vmovups 80-128(%rcx),%xmm15 |
| 145 | vpxor %xmm3,%xmm6,%xmm6 |
| 146 | vpclmulqdq $0x00,%xmm2,%xmm0,%xmm3 |
| 147 | vaesenc %xmm15,%xmm9,%xmm9 |
| 148 | vpxor %xmm5,%xmm6,%xmm6 |
| 149 | vpclmulqdq $0x10,%xmm2,%xmm0,%xmm5 |
| 150 | vaesenc %xmm15,%xmm10,%xmm10 |
| 151 | movbeq 56(%r14),%r13 |
| 152 | vpxor %xmm1,%xmm7,%xmm7 |
| 153 | vpclmulqdq $0x01,%xmm2,%xmm0,%xmm1 |
| 154 | vpxor 112+8(%rsp),%xmm8,%xmm8 |
| 155 | vaesenc %xmm15,%xmm11,%xmm11 |
| 156 | movbeq 48(%r14),%r12 |
| 157 | vpclmulqdq $0x11,%xmm2,%xmm0,%xmm2 |
| 158 | vaesenc %xmm15,%xmm12,%xmm12 |
| 159 | movq %r13,64+8(%rsp) |
| 160 | vaesenc %xmm15,%xmm13,%xmm13 |
| 161 | movq %r12,72+8(%rsp) |
| 162 | vpxor %xmm3,%xmm4,%xmm4 |
| 163 | vmovdqu 112-32(%r9),%xmm3 |
| 164 | vaesenc %xmm15,%xmm14,%xmm14 |
| 165 | |
| 166 | vmovups 96-128(%rcx),%xmm15 |
| 167 | vpxor %xmm5,%xmm6,%xmm6 |
| 168 | vpclmulqdq $0x10,%xmm3,%xmm8,%xmm5 |
| 169 | vaesenc %xmm15,%xmm9,%xmm9 |
| 170 | vpxor %xmm1,%xmm6,%xmm6 |
| 171 | vpclmulqdq $0x01,%xmm3,%xmm8,%xmm1 |
| 172 | vaesenc %xmm15,%xmm10,%xmm10 |
| 173 | movbeq 40(%r14),%r13 |
| 174 | vpxor %xmm2,%xmm7,%xmm7 |
| 175 | vpclmulqdq $0x00,%xmm3,%xmm8,%xmm2 |
| 176 | vaesenc %xmm15,%xmm11,%xmm11 |
| 177 | movbeq 32(%r14),%r12 |
| 178 | vpclmulqdq $0x11,%xmm3,%xmm8,%xmm8 |
| 179 | vaesenc %xmm15,%xmm12,%xmm12 |
| 180 | movq %r13,80+8(%rsp) |
| 181 | vaesenc %xmm15,%xmm13,%xmm13 |
| 182 | movq %r12,88+8(%rsp) |
| 183 | vpxor %xmm5,%xmm6,%xmm6 |
| 184 | vaesenc %xmm15,%xmm14,%xmm14 |
| 185 | vpxor %xmm1,%xmm6,%xmm6 |
| 186 | |
| 187 | vmovups 112-128(%rcx),%xmm15 |
| 188 | vpslldq $8,%xmm6,%xmm5 |
| 189 | vpxor %xmm2,%xmm4,%xmm4 |
| 190 | vmovdqu 16(%r11),%xmm3 |
| 191 | |
| 192 | vaesenc %xmm15,%xmm9,%xmm9 |
| 193 | vpxor %xmm8,%xmm7,%xmm7 |
| 194 | vaesenc %xmm15,%xmm10,%xmm10 |
| 195 | vpxor %xmm5,%xmm4,%xmm4 |
| 196 | movbeq 24(%r14),%r13 |
| 197 | vaesenc %xmm15,%xmm11,%xmm11 |
| 198 | movbeq 16(%r14),%r12 |
| 199 | vpalignr $8,%xmm4,%xmm4,%xmm0 |
| 200 | vpclmulqdq $0x10,%xmm3,%xmm4,%xmm4 |
| 201 | movq %r13,96+8(%rsp) |
| 202 | vaesenc %xmm15,%xmm12,%xmm12 |
| 203 | movq %r12,104+8(%rsp) |
| 204 | vaesenc %xmm15,%xmm13,%xmm13 |
| 205 | vmovups 128-128(%rcx),%xmm1 |
| 206 | vaesenc %xmm15,%xmm14,%xmm14 |
| 207 | |
| 208 | vaesenc %xmm1,%xmm9,%xmm9 |
| 209 | vmovups 144-128(%rcx),%xmm15 |
| 210 | vaesenc %xmm1,%xmm10,%xmm10 |
| 211 | vpsrldq $8,%xmm6,%xmm6 |
| 212 | vaesenc %xmm1,%xmm11,%xmm11 |
| 213 | vpxor %xmm6,%xmm7,%xmm7 |
| 214 | vaesenc %xmm1,%xmm12,%xmm12 |
| 215 | vpxor %xmm0,%xmm4,%xmm4 |
| 216 | movbeq 8(%r14),%r13 |
| 217 | vaesenc %xmm1,%xmm13,%xmm13 |
| 218 | movbeq 0(%r14),%r12 |
| 219 | vaesenc %xmm1,%xmm14,%xmm14 |
| 220 | vmovups 160-128(%rcx),%xmm1 |
| 221 | cmpl $11,%ebp |
| 222 | jb .Lenc_tail |
| 223 | |
| 224 | vaesenc %xmm15,%xmm9,%xmm9 |
| 225 | vaesenc %xmm15,%xmm10,%xmm10 |
| 226 | vaesenc %xmm15,%xmm11,%xmm11 |
| 227 | vaesenc %xmm15,%xmm12,%xmm12 |
| 228 | vaesenc %xmm15,%xmm13,%xmm13 |
| 229 | vaesenc %xmm15,%xmm14,%xmm14 |
| 230 | |
| 231 | vaesenc %xmm1,%xmm9,%xmm9 |
| 232 | vaesenc %xmm1,%xmm10,%xmm10 |
| 233 | vaesenc %xmm1,%xmm11,%xmm11 |
| 234 | vaesenc %xmm1,%xmm12,%xmm12 |
| 235 | vaesenc %xmm1,%xmm13,%xmm13 |
| 236 | vmovups 176-128(%rcx),%xmm15 |
| 237 | vaesenc %xmm1,%xmm14,%xmm14 |
| 238 | vmovups 192-128(%rcx),%xmm1 |
| 239 | |
| 240 | |
| 241 | vaesenc %xmm15,%xmm9,%xmm9 |
| 242 | vaesenc %xmm15,%xmm10,%xmm10 |
| 243 | vaesenc %xmm15,%xmm11,%xmm11 |
| 244 | vaesenc %xmm15,%xmm12,%xmm12 |
| 245 | vaesenc %xmm15,%xmm13,%xmm13 |
| 246 | vaesenc %xmm15,%xmm14,%xmm14 |
| 247 | |
| 248 | vaesenc %xmm1,%xmm9,%xmm9 |
| 249 | vaesenc %xmm1,%xmm10,%xmm10 |
| 250 | vaesenc %xmm1,%xmm11,%xmm11 |
| 251 | vaesenc %xmm1,%xmm12,%xmm12 |
| 252 | vaesenc %xmm1,%xmm13,%xmm13 |
| 253 | vmovups 208-128(%rcx),%xmm15 |
| 254 | vaesenc %xmm1,%xmm14,%xmm14 |
| 255 | vmovups 224-128(%rcx),%xmm1 |
| 256 | jmp .Lenc_tail |
| 257 | |
| 258 | .align 32 |
| 259 | .Lhandle_ctr32: |
| 260 | vmovdqu (%r11),%xmm0 |
| 261 | vpshufb %xmm0,%xmm1,%xmm6 |
| 262 | vmovdqu 48(%r11),%xmm5 |
| 263 | vpaddd 64(%r11),%xmm6,%xmm10 |
| 264 | vpaddd %xmm5,%xmm6,%xmm11 |
| 265 | vmovdqu 0-32(%r9),%xmm3 |
| 266 | vpaddd %xmm5,%xmm10,%xmm12 |
| 267 | vpshufb %xmm0,%xmm10,%xmm10 |
| 268 | vpaddd %xmm5,%xmm11,%xmm13 |
| 269 | vpshufb %xmm0,%xmm11,%xmm11 |
| 270 | vpxor %xmm15,%xmm10,%xmm10 |
| 271 | vpaddd %xmm5,%xmm12,%xmm14 |
| 272 | vpshufb %xmm0,%xmm12,%xmm12 |
| 273 | vpxor %xmm15,%xmm11,%xmm11 |
| 274 | vpaddd %xmm5,%xmm13,%xmm1 |
| 275 | vpshufb %xmm0,%xmm13,%xmm13 |
| 276 | vpshufb %xmm0,%xmm14,%xmm14 |
| 277 | vpshufb %xmm0,%xmm1,%xmm1 |
| 278 | jmp .Lresume_ctr32 |
| 279 | |
| 280 | .align 32 |
| 281 | .Lenc_tail: |
| 282 | vaesenc %xmm15,%xmm9,%xmm9 |
| 283 | vmovdqu %xmm7,16+8(%rsp) |
| 284 | vpalignr $8,%xmm4,%xmm4,%xmm8 |
| 285 | vaesenc %xmm15,%xmm10,%xmm10 |
| 286 | vpclmulqdq $0x10,%xmm3,%xmm4,%xmm4 |
| 287 | vpxor 0(%rdi),%xmm1,%xmm2 |
| 288 | vaesenc %xmm15,%xmm11,%xmm11 |
| 289 | vpxor 16(%rdi),%xmm1,%xmm0 |
| 290 | vaesenc %xmm15,%xmm12,%xmm12 |
| 291 | vpxor 32(%rdi),%xmm1,%xmm5 |
| 292 | vaesenc %xmm15,%xmm13,%xmm13 |
| 293 | vpxor 48(%rdi),%xmm1,%xmm6 |
| 294 | vaesenc %xmm15,%xmm14,%xmm14 |
| 295 | vpxor 64(%rdi),%xmm1,%xmm7 |
| 296 | vpxor 80(%rdi),%xmm1,%xmm3 |
| 297 | vmovdqu (%r8),%xmm1 |
| 298 | |
| 299 | vaesenclast %xmm2,%xmm9,%xmm9 |
| 300 | vmovdqu 32(%r11),%xmm2 |
| 301 | vaesenclast %xmm0,%xmm10,%xmm10 |
| 302 | vpaddb %xmm2,%xmm1,%xmm0 |
| 303 | movq %r13,112+8(%rsp) |
| 304 | leaq 96(%rdi),%rdi |
| 305 | vaesenclast %xmm5,%xmm11,%xmm11 |
| 306 | vpaddb %xmm2,%xmm0,%xmm5 |
| 307 | movq %r12,120+8(%rsp) |
| 308 | leaq 96(%rsi),%rsi |
| 309 | vmovdqu 0-128(%rcx),%xmm15 |
| 310 | vaesenclast %xmm6,%xmm12,%xmm12 |
| 311 | vpaddb %xmm2,%xmm5,%xmm6 |
| 312 | vaesenclast %xmm7,%xmm13,%xmm13 |
| 313 | vpaddb %xmm2,%xmm6,%xmm7 |
| 314 | vaesenclast %xmm3,%xmm14,%xmm14 |
| 315 | vpaddb %xmm2,%xmm7,%xmm3 |
| 316 | |
| 317 | addq $0x60,%r10 |
| 318 | subq $0x6,%rdx |
| 319 | jc .L6x_done |
| 320 | |
| 321 | vmovups %xmm9,-96(%rsi) |
| 322 | vpxor %xmm15,%xmm1,%xmm9 |
| 323 | vmovups %xmm10,-80(%rsi) |
| 324 | vmovdqa %xmm0,%xmm10 |
| 325 | vmovups %xmm11,-64(%rsi) |
| 326 | vmovdqa %xmm5,%xmm11 |
| 327 | vmovups %xmm12,-48(%rsi) |
| 328 | vmovdqa %xmm6,%xmm12 |
| 329 | vmovups %xmm13,-32(%rsi) |
| 330 | vmovdqa %xmm7,%xmm13 |
| 331 | vmovups %xmm14,-16(%rsi) |
| 332 | vmovdqa %xmm3,%xmm14 |
| 333 | vmovdqu 32+8(%rsp),%xmm7 |
| 334 | jmp .Loop6x |
| 335 | |
| 336 | .L6x_done: |
| 337 | vpxor 16+8(%rsp),%xmm8,%xmm8 |
| 338 | vpxor %xmm4,%xmm8,%xmm8 |
| 339 | |
| 340 | .byte 0xf3,0xc3 |
| 341 | .cfi_endproc |
| 342 | .size _aesni_ctr32_ghash_6x,.-_aesni_ctr32_ghash_6x |
| 343 | .globl GFp_aesni_gcm_decrypt |
| 344 | .hidden GFp_aesni_gcm_decrypt |
| 345 | .type GFp_aesni_gcm_decrypt,@function |
| 346 | .align 32 |
| 347 | GFp_aesni_gcm_decrypt: |
| 348 | .cfi_startproc |
| 349 | xorq %r10,%r10 |
| 350 | |
| 351 | |
| 352 | |
| 353 | cmpq $0x60,%rdx |
| 354 | jb .Lgcm_dec_abort |
| 355 | |
| 356 | leaq (%rsp),%rax |
| 357 | .cfi_def_cfa_register %rax |
| 358 | pushq %rbx |
| 359 | .cfi_offset %rbx,-16 |
| 360 | pushq %rbp |
| 361 | .cfi_offset %rbp,-24 |
| 362 | pushq %r12 |
| 363 | .cfi_offset %r12,-32 |
| 364 | pushq %r13 |
| 365 | .cfi_offset %r13,-40 |
| 366 | pushq %r14 |
| 367 | .cfi_offset %r14,-48 |
| 368 | pushq %r15 |
| 369 | .cfi_offset %r15,-56 |
| 370 | vzeroupper |
| 371 | |
| 372 | vmovdqu (%r8),%xmm1 |
| 373 | addq $-128,%rsp |
| 374 | movl 12(%r8),%ebx |
| 375 | leaq .Lbswap_mask(%rip),%r11 |
| 376 | leaq -128(%rcx),%r14 |
| 377 | movq $0xf80,%r15 |
| 378 | vmovdqu (%r9),%xmm8 |
| 379 | andq $-128,%rsp |
| 380 | vmovdqu (%r11),%xmm0 |
| 381 | leaq 128(%rcx),%rcx |
| 382 | leaq 32+32(%r9),%r9 |
| 383 | movl 240-128(%rcx),%ebp |
| 384 | vpshufb %xmm0,%xmm8,%xmm8 |
| 385 | |
| 386 | andq %r15,%r14 |
| 387 | andq %rsp,%r15 |
| 388 | subq %r14,%r15 |
| 389 | jc .Ldec_no_key_aliasing |
| 390 | cmpq $768,%r15 |
| 391 | jnc .Ldec_no_key_aliasing |
| 392 | subq %r15,%rsp |
| 393 | .Ldec_no_key_aliasing: |
| 394 | |
| 395 | vmovdqu 80(%rdi),%xmm7 |
| 396 | leaq (%rdi),%r14 |
| 397 | vmovdqu 64(%rdi),%xmm4 |
| 398 | |
| 399 | |
| 400 | |
| 401 | |
| 402 | |
| 403 | |
| 404 | |
| 405 | leaq -192(%rdi,%rdx,1),%r15 |
| 406 | |
| 407 | vmovdqu 48(%rdi),%xmm5 |
| 408 | shrq $4,%rdx |
| 409 | xorq %r10,%r10 |
| 410 | vmovdqu 32(%rdi),%xmm6 |
| 411 | vpshufb %xmm0,%xmm7,%xmm7 |
| 412 | vmovdqu 16(%rdi),%xmm2 |
| 413 | vpshufb %xmm0,%xmm4,%xmm4 |
| 414 | vmovdqu (%rdi),%xmm3 |
| 415 | vpshufb %xmm0,%xmm5,%xmm5 |
| 416 | vmovdqu %xmm4,48(%rsp) |
| 417 | vpshufb %xmm0,%xmm6,%xmm6 |
| 418 | vmovdqu %xmm5,64(%rsp) |
| 419 | vpshufb %xmm0,%xmm2,%xmm2 |
| 420 | vmovdqu %xmm6,80(%rsp) |
| 421 | vpshufb %xmm0,%xmm3,%xmm3 |
| 422 | vmovdqu %xmm2,96(%rsp) |
| 423 | vmovdqu %xmm3,112(%rsp) |
| 424 | |
| 425 | call _aesni_ctr32_ghash_6x |
| 426 | |
| 427 | vmovups %xmm9,-96(%rsi) |
| 428 | vmovups %xmm10,-80(%rsi) |
| 429 | vmovups %xmm11,-64(%rsi) |
| 430 | vmovups %xmm12,-48(%rsi) |
| 431 | vmovups %xmm13,-32(%rsi) |
| 432 | vmovups %xmm14,-16(%rsi) |
| 433 | |
| 434 | vpshufb (%r11),%xmm8,%xmm8 |
| 435 | vmovdqu %xmm8,-64(%r9) |
| 436 | |
| 437 | vzeroupper |
| 438 | movq -48(%rax),%r15 |
| 439 | .cfi_restore %r15 |
| 440 | movq -40(%rax),%r14 |
| 441 | .cfi_restore %r14 |
| 442 | movq -32(%rax),%r13 |
| 443 | .cfi_restore %r13 |
| 444 | movq -24(%rax),%r12 |
| 445 | .cfi_restore %r12 |
| 446 | movq -16(%rax),%rbp |
| 447 | .cfi_restore %rbp |
| 448 | movq -8(%rax),%rbx |
| 449 | .cfi_restore %rbx |
| 450 | leaq (%rax),%rsp |
| 451 | .cfi_def_cfa_register %rsp |
| 452 | .Lgcm_dec_abort: |
| 453 | movq %r10,%rax |
| 454 | .byte 0xf3,0xc3 |
| 455 | .cfi_endproc |
| 456 | .size GFp_aesni_gcm_decrypt,.-GFp_aesni_gcm_decrypt |
| 457 | .type _aesni_ctr32_6x,@function |
| 458 | .align 32 |
| 459 | _aesni_ctr32_6x: |
| 460 | .cfi_startproc |
| 461 | vmovdqu 0-128(%rcx),%xmm4 |
| 462 | vmovdqu 32(%r11),%xmm2 |
| 463 | leaq -1(%rbp),%r13 |
| 464 | vmovups 16-128(%rcx),%xmm15 |
| 465 | leaq 32-128(%rcx),%r12 |
| 466 | vpxor %xmm4,%xmm1,%xmm9 |
| 467 | addl $100663296,%ebx |
| 468 | jc .Lhandle_ctr32_2 |
| 469 | vpaddb %xmm2,%xmm1,%xmm10 |
| 470 | vpaddb %xmm2,%xmm10,%xmm11 |
| 471 | vpxor %xmm4,%xmm10,%xmm10 |
| 472 | vpaddb %xmm2,%xmm11,%xmm12 |
| 473 | vpxor %xmm4,%xmm11,%xmm11 |
| 474 | vpaddb %xmm2,%xmm12,%xmm13 |
| 475 | vpxor %xmm4,%xmm12,%xmm12 |
| 476 | vpaddb %xmm2,%xmm13,%xmm14 |
| 477 | vpxor %xmm4,%xmm13,%xmm13 |
| 478 | vpaddb %xmm2,%xmm14,%xmm1 |
| 479 | vpxor %xmm4,%xmm14,%xmm14 |
| 480 | jmp .Loop_ctr32 |
| 481 | |
| 482 | .align 16 |
| 483 | .Loop_ctr32: |
| 484 | vaesenc %xmm15,%xmm9,%xmm9 |
| 485 | vaesenc %xmm15,%xmm10,%xmm10 |
| 486 | vaesenc %xmm15,%xmm11,%xmm11 |
| 487 | vaesenc %xmm15,%xmm12,%xmm12 |
| 488 | vaesenc %xmm15,%xmm13,%xmm13 |
| 489 | vaesenc %xmm15,%xmm14,%xmm14 |
| 490 | vmovups (%r12),%xmm15 |
| 491 | leaq 16(%r12),%r12 |
| 492 | decl %r13d |
| 493 | jnz .Loop_ctr32 |
| 494 | |
| 495 | vmovdqu (%r12),%xmm3 |
| 496 | vaesenc %xmm15,%xmm9,%xmm9 |
| 497 | vpxor 0(%rdi),%xmm3,%xmm4 |
| 498 | vaesenc %xmm15,%xmm10,%xmm10 |
| 499 | vpxor 16(%rdi),%xmm3,%xmm5 |
| 500 | vaesenc %xmm15,%xmm11,%xmm11 |
| 501 | vpxor 32(%rdi),%xmm3,%xmm6 |
| 502 | vaesenc %xmm15,%xmm12,%xmm12 |
| 503 | vpxor 48(%rdi),%xmm3,%xmm8 |
| 504 | vaesenc %xmm15,%xmm13,%xmm13 |
| 505 | vpxor 64(%rdi),%xmm3,%xmm2 |
| 506 | vaesenc %xmm15,%xmm14,%xmm14 |
| 507 | vpxor 80(%rdi),%xmm3,%xmm3 |
| 508 | leaq 96(%rdi),%rdi |
| 509 | |
| 510 | vaesenclast %xmm4,%xmm9,%xmm9 |
| 511 | vaesenclast %xmm5,%xmm10,%xmm10 |
| 512 | vaesenclast %xmm6,%xmm11,%xmm11 |
| 513 | vaesenclast %xmm8,%xmm12,%xmm12 |
| 514 | vaesenclast %xmm2,%xmm13,%xmm13 |
| 515 | vaesenclast %xmm3,%xmm14,%xmm14 |
| 516 | vmovups %xmm9,0(%rsi) |
| 517 | vmovups %xmm10,16(%rsi) |
| 518 | vmovups %xmm11,32(%rsi) |
| 519 | vmovups %xmm12,48(%rsi) |
| 520 | vmovups %xmm13,64(%rsi) |
| 521 | vmovups %xmm14,80(%rsi) |
| 522 | leaq 96(%rsi),%rsi |
| 523 | |
| 524 | .byte 0xf3,0xc3 |
| 525 | .align 32 |
| 526 | .Lhandle_ctr32_2: |
| 527 | vpshufb %xmm0,%xmm1,%xmm6 |
| 528 | vmovdqu 48(%r11),%xmm5 |
| 529 | vpaddd 64(%r11),%xmm6,%xmm10 |
| 530 | vpaddd %xmm5,%xmm6,%xmm11 |
| 531 | vpaddd %xmm5,%xmm10,%xmm12 |
| 532 | vpshufb %xmm0,%xmm10,%xmm10 |
| 533 | vpaddd %xmm5,%xmm11,%xmm13 |
| 534 | vpshufb %xmm0,%xmm11,%xmm11 |
| 535 | vpxor %xmm4,%xmm10,%xmm10 |
| 536 | vpaddd %xmm5,%xmm12,%xmm14 |
| 537 | vpshufb %xmm0,%xmm12,%xmm12 |
| 538 | vpxor %xmm4,%xmm11,%xmm11 |
| 539 | vpaddd %xmm5,%xmm13,%xmm1 |
| 540 | vpshufb %xmm0,%xmm13,%xmm13 |
| 541 | vpxor %xmm4,%xmm12,%xmm12 |
| 542 | vpshufb %xmm0,%xmm14,%xmm14 |
| 543 | vpxor %xmm4,%xmm13,%xmm13 |
| 544 | vpshufb %xmm0,%xmm1,%xmm1 |
| 545 | vpxor %xmm4,%xmm14,%xmm14 |
| 546 | jmp .Loop_ctr32 |
| 547 | .cfi_endproc |
| 548 | .size _aesni_ctr32_6x,.-_aesni_ctr32_6x |
| 549 | |
| 550 | .globl GFp_aesni_gcm_encrypt |
| 551 | .hidden GFp_aesni_gcm_encrypt |
| 552 | .type GFp_aesni_gcm_encrypt,@function |
| 553 | .align 32 |
| 554 | GFp_aesni_gcm_encrypt: |
| 555 | .cfi_startproc |
| 556 | xorq %r10,%r10 |
| 557 | |
| 558 | |
| 559 | |
| 560 | |
| 561 | cmpq $288,%rdx |
| 562 | jb .Lgcm_enc_abort |
| 563 | |
| 564 | leaq (%rsp),%rax |
| 565 | .cfi_def_cfa_register %rax |
| 566 | pushq %rbx |
| 567 | .cfi_offset %rbx,-16 |
| 568 | pushq %rbp |
| 569 | .cfi_offset %rbp,-24 |
| 570 | pushq %r12 |
| 571 | .cfi_offset %r12,-32 |
| 572 | pushq %r13 |
| 573 | .cfi_offset %r13,-40 |
| 574 | pushq %r14 |
| 575 | .cfi_offset %r14,-48 |
| 576 | pushq %r15 |
| 577 | .cfi_offset %r15,-56 |
| 578 | vzeroupper |
| 579 | |
| 580 | vmovdqu (%r8),%xmm1 |
| 581 | addq $-128,%rsp |
| 582 | movl 12(%r8),%ebx |
| 583 | leaq .Lbswap_mask(%rip),%r11 |
| 584 | leaq -128(%rcx),%r14 |
| 585 | movq $0xf80,%r15 |
| 586 | leaq 128(%rcx),%rcx |
| 587 | vmovdqu (%r11),%xmm0 |
| 588 | andq $-128,%rsp |
| 589 | movl 240-128(%rcx),%ebp |
| 590 | |
| 591 | andq %r15,%r14 |
| 592 | andq %rsp,%r15 |
| 593 | subq %r14,%r15 |
| 594 | jc .Lenc_no_key_aliasing |
| 595 | cmpq $768,%r15 |
| 596 | jnc .Lenc_no_key_aliasing |
| 597 | subq %r15,%rsp |
| 598 | .Lenc_no_key_aliasing: |
| 599 | |
| 600 | leaq (%rsi),%r14 |
| 601 | |
| 602 | |
| 603 | |
| 604 | |
| 605 | |
| 606 | |
| 607 | |
| 608 | |
| 609 | leaq -192(%rsi,%rdx,1),%r15 |
| 610 | |
| 611 | shrq $4,%rdx |
| 612 | |
| 613 | call _aesni_ctr32_6x |
| 614 | vpshufb %xmm0,%xmm9,%xmm8 |
| 615 | vpshufb %xmm0,%xmm10,%xmm2 |
| 616 | vmovdqu %xmm8,112(%rsp) |
| 617 | vpshufb %xmm0,%xmm11,%xmm4 |
| 618 | vmovdqu %xmm2,96(%rsp) |
| 619 | vpshufb %xmm0,%xmm12,%xmm5 |
| 620 | vmovdqu %xmm4,80(%rsp) |
| 621 | vpshufb %xmm0,%xmm13,%xmm6 |
| 622 | vmovdqu %xmm5,64(%rsp) |
| 623 | vpshufb %xmm0,%xmm14,%xmm7 |
| 624 | vmovdqu %xmm6,48(%rsp) |
| 625 | |
| 626 | call _aesni_ctr32_6x |
| 627 | |
| 628 | vmovdqu (%r9),%xmm8 |
| 629 | leaq 32+32(%r9),%r9 |
| 630 | subq $12,%rdx |
| 631 | movq $192,%r10 |
| 632 | vpshufb %xmm0,%xmm8,%xmm8 |
| 633 | |
| 634 | call _aesni_ctr32_ghash_6x |
| 635 | vmovdqu 32(%rsp),%xmm7 |
| 636 | vmovdqu (%r11),%xmm0 |
| 637 | vmovdqu 0-32(%r9),%xmm3 |
| 638 | vpunpckhqdq %xmm7,%xmm7,%xmm1 |
| 639 | vmovdqu 32-32(%r9),%xmm15 |
| 640 | vmovups %xmm9,-96(%rsi) |
| 641 | vpshufb %xmm0,%xmm9,%xmm9 |
| 642 | vpxor %xmm7,%xmm1,%xmm1 |
| 643 | vmovups %xmm10,-80(%rsi) |
| 644 | vpshufb %xmm0,%xmm10,%xmm10 |
| 645 | vmovups %xmm11,-64(%rsi) |
| 646 | vpshufb %xmm0,%xmm11,%xmm11 |
| 647 | vmovups %xmm12,-48(%rsi) |
| 648 | vpshufb %xmm0,%xmm12,%xmm12 |
| 649 | vmovups %xmm13,-32(%rsi) |
| 650 | vpshufb %xmm0,%xmm13,%xmm13 |
| 651 | vmovups %xmm14,-16(%rsi) |
| 652 | vpshufb %xmm0,%xmm14,%xmm14 |
| 653 | vmovdqu %xmm9,16(%rsp) |
| 654 | vmovdqu 48(%rsp),%xmm6 |
| 655 | vmovdqu 16-32(%r9),%xmm0 |
| 656 | vpunpckhqdq %xmm6,%xmm6,%xmm2 |
| 657 | vpclmulqdq $0x00,%xmm3,%xmm7,%xmm5 |
| 658 | vpxor %xmm6,%xmm2,%xmm2 |
| 659 | vpclmulqdq $0x11,%xmm3,%xmm7,%xmm7 |
| 660 | vpclmulqdq $0x00,%xmm15,%xmm1,%xmm1 |
| 661 | |
| 662 | vmovdqu 64(%rsp),%xmm9 |
| 663 | vpclmulqdq $0x00,%xmm0,%xmm6,%xmm4 |
| 664 | vmovdqu 48-32(%r9),%xmm3 |
| 665 | vpxor %xmm5,%xmm4,%xmm4 |
| 666 | vpunpckhqdq %xmm9,%xmm9,%xmm5 |
| 667 | vpclmulqdq $0x11,%xmm0,%xmm6,%xmm6 |
| 668 | vpxor %xmm9,%xmm5,%xmm5 |
| 669 | vpxor %xmm7,%xmm6,%xmm6 |
| 670 | vpclmulqdq $0x10,%xmm15,%xmm2,%xmm2 |
| 671 | vmovdqu 80-32(%r9),%xmm15 |
| 672 | vpxor %xmm1,%xmm2,%xmm2 |
| 673 | |
| 674 | vmovdqu 80(%rsp),%xmm1 |
| 675 | vpclmulqdq $0x00,%xmm3,%xmm9,%xmm7 |
| 676 | vmovdqu 64-32(%r9),%xmm0 |
| 677 | vpxor %xmm4,%xmm7,%xmm7 |
| 678 | vpunpckhqdq %xmm1,%xmm1,%xmm4 |
| 679 | vpclmulqdq $0x11,%xmm3,%xmm9,%xmm9 |
| 680 | vpxor %xmm1,%xmm4,%xmm4 |
| 681 | vpxor %xmm6,%xmm9,%xmm9 |
| 682 | vpclmulqdq $0x00,%xmm15,%xmm5,%xmm5 |
| 683 | vpxor %xmm2,%xmm5,%xmm5 |
| 684 | |
| 685 | vmovdqu 96(%rsp),%xmm2 |
| 686 | vpclmulqdq $0x00,%xmm0,%xmm1,%xmm6 |
| 687 | vmovdqu 96-32(%r9),%xmm3 |
| 688 | vpxor %xmm7,%xmm6,%xmm6 |
| 689 | vpunpckhqdq %xmm2,%xmm2,%xmm7 |
| 690 | vpclmulqdq $0x11,%xmm0,%xmm1,%xmm1 |
| 691 | vpxor %xmm2,%xmm7,%xmm7 |
| 692 | vpxor %xmm9,%xmm1,%xmm1 |
| 693 | vpclmulqdq $0x10,%xmm15,%xmm4,%xmm4 |
| 694 | vmovdqu 128-32(%r9),%xmm15 |
| 695 | vpxor %xmm5,%xmm4,%xmm4 |
| 696 | |
| 697 | vpxor 112(%rsp),%xmm8,%xmm8 |
| 698 | vpclmulqdq $0x00,%xmm3,%xmm2,%xmm5 |
| 699 | vmovdqu 112-32(%r9),%xmm0 |
| 700 | vpunpckhqdq %xmm8,%xmm8,%xmm9 |
| 701 | vpxor %xmm6,%xmm5,%xmm5 |
| 702 | vpclmulqdq $0x11,%xmm3,%xmm2,%xmm2 |
| 703 | vpxor %xmm8,%xmm9,%xmm9 |
| 704 | vpxor %xmm1,%xmm2,%xmm2 |
| 705 | vpclmulqdq $0x00,%xmm15,%xmm7,%xmm7 |
| 706 | vpxor %xmm4,%xmm7,%xmm4 |
| 707 | |
| 708 | vpclmulqdq $0x00,%xmm0,%xmm8,%xmm6 |
| 709 | vmovdqu 0-32(%r9),%xmm3 |
| 710 | vpunpckhqdq %xmm14,%xmm14,%xmm1 |
| 711 | vpclmulqdq $0x11,%xmm0,%xmm8,%xmm8 |
| 712 | vpxor %xmm14,%xmm1,%xmm1 |
| 713 | vpxor %xmm5,%xmm6,%xmm5 |
| 714 | vpclmulqdq $0x10,%xmm15,%xmm9,%xmm9 |
| 715 | vmovdqu 32-32(%r9),%xmm15 |
| 716 | vpxor %xmm2,%xmm8,%xmm7 |
| 717 | vpxor %xmm4,%xmm9,%xmm6 |
| 718 | |
| 719 | vmovdqu 16-32(%r9),%xmm0 |
| 720 | vpxor %xmm5,%xmm7,%xmm9 |
| 721 | vpclmulqdq $0x00,%xmm3,%xmm14,%xmm4 |
| 722 | vpxor %xmm9,%xmm6,%xmm6 |
| 723 | vpunpckhqdq %xmm13,%xmm13,%xmm2 |
| 724 | vpclmulqdq $0x11,%xmm3,%xmm14,%xmm14 |
| 725 | vpxor %xmm13,%xmm2,%xmm2 |
| 726 | vpslldq $8,%xmm6,%xmm9 |
| 727 | vpclmulqdq $0x00,%xmm15,%xmm1,%xmm1 |
| 728 | vpxor %xmm9,%xmm5,%xmm8 |
| 729 | vpsrldq $8,%xmm6,%xmm6 |
| 730 | vpxor %xmm6,%xmm7,%xmm7 |
| 731 | |
| 732 | vpclmulqdq $0x00,%xmm0,%xmm13,%xmm5 |
| 733 | vmovdqu 48-32(%r9),%xmm3 |
| 734 | vpxor %xmm4,%xmm5,%xmm5 |
| 735 | vpunpckhqdq %xmm12,%xmm12,%xmm9 |
| 736 | vpclmulqdq $0x11,%xmm0,%xmm13,%xmm13 |
| 737 | vpxor %xmm12,%xmm9,%xmm9 |
| 738 | vpxor %xmm14,%xmm13,%xmm13 |
| 739 | vpalignr $8,%xmm8,%xmm8,%xmm14 |
| 740 | vpclmulqdq $0x10,%xmm15,%xmm2,%xmm2 |
| 741 | vmovdqu 80-32(%r9),%xmm15 |
| 742 | vpxor %xmm1,%xmm2,%xmm2 |
| 743 | |
| 744 | vpclmulqdq $0x00,%xmm3,%xmm12,%xmm4 |
| 745 | vmovdqu 64-32(%r9),%xmm0 |
| 746 | vpxor %xmm5,%xmm4,%xmm4 |
| 747 | vpunpckhqdq %xmm11,%xmm11,%xmm1 |
| 748 | vpclmulqdq $0x11,%xmm3,%xmm12,%xmm12 |
| 749 | vpxor %xmm11,%xmm1,%xmm1 |
| 750 | vpxor %xmm13,%xmm12,%xmm12 |
| 751 | vxorps 16(%rsp),%xmm7,%xmm7 |
| 752 | vpclmulqdq $0x00,%xmm15,%xmm9,%xmm9 |
| 753 | vpxor %xmm2,%xmm9,%xmm9 |
| 754 | |
| 755 | vpclmulqdq $0x10,16(%r11),%xmm8,%xmm8 |
| 756 | vxorps %xmm14,%xmm8,%xmm8 |
| 757 | |
| 758 | vpclmulqdq $0x00,%xmm0,%xmm11,%xmm5 |
| 759 | vmovdqu 96-32(%r9),%xmm3 |
| 760 | vpxor %xmm4,%xmm5,%xmm5 |
| 761 | vpunpckhqdq %xmm10,%xmm10,%xmm2 |
| 762 | vpclmulqdq $0x11,%xmm0,%xmm11,%xmm11 |
| 763 | vpxor %xmm10,%xmm2,%xmm2 |
| 764 | vpalignr $8,%xmm8,%xmm8,%xmm14 |
| 765 | vpxor %xmm12,%xmm11,%xmm11 |
| 766 | vpclmulqdq $0x10,%xmm15,%xmm1,%xmm1 |
| 767 | vmovdqu 128-32(%r9),%xmm15 |
| 768 | vpxor %xmm9,%xmm1,%xmm1 |
| 769 | |
| 770 | vxorps %xmm7,%xmm14,%xmm14 |
| 771 | vpclmulqdq $0x10,16(%r11),%xmm8,%xmm8 |
| 772 | vxorps %xmm14,%xmm8,%xmm8 |
| 773 | |
| 774 | vpclmulqdq $0x00,%xmm3,%xmm10,%xmm4 |
| 775 | vmovdqu 112-32(%r9),%xmm0 |
| 776 | vpxor %xmm5,%xmm4,%xmm4 |
| 777 | vpunpckhqdq %xmm8,%xmm8,%xmm9 |
| 778 | vpclmulqdq $0x11,%xmm3,%xmm10,%xmm10 |
| 779 | vpxor %xmm8,%xmm9,%xmm9 |
| 780 | vpxor %xmm11,%xmm10,%xmm10 |
| 781 | vpclmulqdq $0x00,%xmm15,%xmm2,%xmm2 |
| 782 | vpxor %xmm1,%xmm2,%xmm2 |
| 783 | |
| 784 | vpclmulqdq $0x00,%xmm0,%xmm8,%xmm5 |
| 785 | vpclmulqdq $0x11,%xmm0,%xmm8,%xmm7 |
| 786 | vpxor %xmm4,%xmm5,%xmm5 |
| 787 | vpclmulqdq $0x10,%xmm15,%xmm9,%xmm6 |
| 788 | vpxor %xmm10,%xmm7,%xmm7 |
| 789 | vpxor %xmm2,%xmm6,%xmm6 |
| 790 | |
| 791 | vpxor %xmm5,%xmm7,%xmm4 |
| 792 | vpxor %xmm4,%xmm6,%xmm6 |
| 793 | vpslldq $8,%xmm6,%xmm1 |
| 794 | vmovdqu 16(%r11),%xmm3 |
| 795 | vpsrldq $8,%xmm6,%xmm6 |
| 796 | vpxor %xmm1,%xmm5,%xmm8 |
| 797 | vpxor %xmm6,%xmm7,%xmm7 |
| 798 | |
| 799 | vpalignr $8,%xmm8,%xmm8,%xmm2 |
| 800 | vpclmulqdq $0x10,%xmm3,%xmm8,%xmm8 |
| 801 | vpxor %xmm2,%xmm8,%xmm8 |
| 802 | |
| 803 | vpalignr $8,%xmm8,%xmm8,%xmm2 |
| 804 | vpclmulqdq $0x10,%xmm3,%xmm8,%xmm8 |
| 805 | vpxor %xmm7,%xmm2,%xmm2 |
| 806 | vpxor %xmm2,%xmm8,%xmm8 |
| 807 | vpshufb (%r11),%xmm8,%xmm8 |
| 808 | vmovdqu %xmm8,-64(%r9) |
| 809 | |
| 810 | vzeroupper |
| 811 | movq -48(%rax),%r15 |
| 812 | .cfi_restore %r15 |
| 813 | movq -40(%rax),%r14 |
| 814 | .cfi_restore %r14 |
| 815 | movq -32(%rax),%r13 |
| 816 | .cfi_restore %r13 |
| 817 | movq -24(%rax),%r12 |
| 818 | .cfi_restore %r12 |
| 819 | movq -16(%rax),%rbp |
| 820 | .cfi_restore %rbp |
| 821 | movq -8(%rax),%rbx |
| 822 | .cfi_restore %rbx |
| 823 | leaq (%rax),%rsp |
| 824 | .cfi_def_cfa_register %rsp |
| 825 | .Lgcm_enc_abort: |
| 826 | movq %r10,%rax |
| 827 | .byte 0xf3,0xc3 |
| 828 | .cfi_endproc |
| 829 | .size GFp_aesni_gcm_encrypt,.-GFp_aesni_gcm_encrypt |
| 830 | .align 64 |
| 831 | .Lbswap_mask: |
| 832 | .byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0 |
| 833 | .Lpoly: |
| 834 | .byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0xc2 |
| 835 | .Lone_msb: |
| 836 | .byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 |
| 837 | .Ltwo_lsb: |
| 838 | .byte 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 |
| 839 | .Lone_lsb: |
| 840 | .byte 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 |
| 841 | .byte 65,69,83,45,78,73,32,71,67,77,32,109,111,100,117,108,101,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 |
| 842 | .align 64 |
| 843 | #endif |
| 844 | .section .note.GNU-stack,"",@progbits |