Adrian Bunk | 88278ca | 2008-05-19 16:53:02 -0700 | [diff] [blame] | 1 | /* |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 2 | * blockops.S: Common block zero optimized routines. |
| 3 | * |
| 4 | * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu) |
| 5 | */ |
| 6 | |
David S. Miller | 8695c37 | 2012-05-11 20:33:22 -0700 | [diff] [blame] | 7 | #include <linux/linkage.h> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 8 | #include <asm/page.h> |
| 9 | |
| 10 | /* Zero out 64 bytes of memory at (buf + offset). |
| 11 | * Assumes %g1 contains zero. |
| 12 | */ |
| 13 | #define BLAST_BLOCK(buf, offset) \ |
| 14 | std %g0, [buf + offset + 0x38]; \ |
| 15 | std %g0, [buf + offset + 0x30]; \ |
| 16 | std %g0, [buf + offset + 0x28]; \ |
| 17 | std %g0, [buf + offset + 0x20]; \ |
| 18 | std %g0, [buf + offset + 0x18]; \ |
| 19 | std %g0, [buf + offset + 0x10]; \ |
| 20 | std %g0, [buf + offset + 0x08]; \ |
| 21 | std %g0, [buf + offset + 0x00]; |
| 22 | |
| 23 | /* Copy 32 bytes of memory at (src + offset) to |
| 24 | * (dst + offset). |
| 25 | */ |
| 26 | #define MIRROR_BLOCK(dst, src, offset, t0, t1, t2, t3, t4, t5, t6, t7) \ |
| 27 | ldd [src + offset + 0x18], t0; \ |
| 28 | ldd [src + offset + 0x10], t2; \ |
| 29 | ldd [src + offset + 0x08], t4; \ |
| 30 | ldd [src + offset + 0x00], t6; \ |
| 31 | std t0, [dst + offset + 0x18]; \ |
| 32 | std t2, [dst + offset + 0x10]; \ |
| 33 | std t4, [dst + offset + 0x08]; \ |
| 34 | std t6, [dst + offset + 0x00]; |
| 35 | |
| 36 | /* Profiling evidence indicates that memset() is |
| 37 | * commonly called for blocks of size PAGE_SIZE, |
| 38 | * and (2 * PAGE_SIZE) (for kernel stacks) |
| 39 | * and with a second arg of zero. We assume in |
| 40 | * all of these cases that the buffer is aligned |
| 41 | * on at least an 8 byte boundary. |
| 42 | * |
| 43 | * Therefore we special case them to make them |
| 44 | * as fast as possible. |
| 45 | */ |
| 46 | |
| 47 | .text |
David S. Miller | 8695c37 | 2012-05-11 20:33:22 -0700 | [diff] [blame] | 48 | ENTRY(bzero_1page) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 49 | /* NOTE: If you change the number of insns of this routine, please check |
| 50 | * arch/sparc/mm/hypersparc.S */ |
| 51 | /* %o0 = buf */ |
| 52 | or %g0, %g0, %g1 |
| 53 | or %o0, %g0, %o1 |
| 54 | or %g0, (PAGE_SIZE >> 8), %g2 |
| 55 | 1: |
| 56 | BLAST_BLOCK(%o0, 0x00) |
| 57 | BLAST_BLOCK(%o0, 0x40) |
| 58 | BLAST_BLOCK(%o0, 0x80) |
| 59 | BLAST_BLOCK(%o0, 0xc0) |
| 60 | subcc %g2, 1, %g2 |
| 61 | bne 1b |
| 62 | add %o0, 0x100, %o0 |
| 63 | |
| 64 | retl |
| 65 | nop |
David S. Miller | 8695c37 | 2012-05-11 20:33:22 -0700 | [diff] [blame] | 66 | ENDPROC(bzero_1page) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 67 | |
David S. Miller | 8695c37 | 2012-05-11 20:33:22 -0700 | [diff] [blame] | 68 | ENTRY(__copy_1page) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 69 | /* NOTE: If you change the number of insns of this routine, please check |
| 70 | * arch/sparc/mm/hypersparc.S */ |
| 71 | /* %o0 = dst, %o1 = src */ |
| 72 | or %g0, (PAGE_SIZE >> 8), %g1 |
| 73 | 1: |
| 74 | MIRROR_BLOCK(%o0, %o1, 0x00, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5) |
| 75 | MIRROR_BLOCK(%o0, %o1, 0x20, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5) |
| 76 | MIRROR_BLOCK(%o0, %o1, 0x40, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5) |
| 77 | MIRROR_BLOCK(%o0, %o1, 0x60, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5) |
| 78 | MIRROR_BLOCK(%o0, %o1, 0x80, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5) |
| 79 | MIRROR_BLOCK(%o0, %o1, 0xa0, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5) |
| 80 | MIRROR_BLOCK(%o0, %o1, 0xc0, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5) |
| 81 | MIRROR_BLOCK(%o0, %o1, 0xe0, %o2, %o3, %o4, %o5, %g2, %g3, %g4, %g5) |
| 82 | subcc %g1, 1, %g1 |
| 83 | add %o0, 0x100, %o0 |
| 84 | bne 1b |
| 85 | add %o1, 0x100, %o1 |
| 86 | |
| 87 | retl |
| 88 | nop |
David S. Miller | 8695c37 | 2012-05-11 20:33:22 -0700 | [diff] [blame] | 89 | ENDPROC(__copy_1page) |