David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 1 | /* |
| 2 | * This file is subject to the terms and conditions of the GNU General Public |
| 3 | * License. See the file "COPYING" in the main directory of this archive |
| 4 | * for more details. |
| 5 | * |
| 6 | * Copyright (C) 2006 Cavium Networks |
| 7 | * Cache error handler |
| 8 | */ |
| 9 | |
| 10 | #include <asm/asm.h> |
| 11 | #include <asm/regdef.h> |
| 12 | #include <asm/mipsregs.h> |
| 13 | #include <asm/stackframe.h> |
| 14 | |
| 15 | /* |
| 16 | * Handle cache error. Indicate to the second level handler whether |
| 17 | * the exception is recoverable. |
| 18 | */ |
| 19 | LEAF(except_vec2_octeon) |
| 20 | |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 21 | .set push |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 22 | .set mips64r2 |
| 23 | .set noreorder |
| 24 | .set noat |
| 25 | |
| 26 | |
| 27 | /* due to an errata we need to read the COP0 CacheErr (Dcache) |
| 28 | * before any cache/DRAM access */ |
| 29 | |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 30 | rdhwr k0, $0 /* get core_id */ |
| 31 | PTR_LA k1, cache_err_dcache |
| 32 | sll k0, k0, 3 |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 33 | PTR_ADDU k1, k0, k1 /* k1 = &cache_err_dcache[core_id] */ |
| 34 | |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 35 | dmfc0 k0, CP0_CACHEERR, 1 |
| 36 | sd k0, (k1) |
| 37 | dmtc0 $0, CP0_CACHEERR, 1 |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 38 | |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 39 | /* check whether this is a nested exception */ |
| 40 | mfc0 k1, CP0_STATUS |
| 41 | andi k1, k1, ST0_EXL |
| 42 | beqz k1, 1f |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 43 | nop |
| 44 | j cache_parity_error_octeon_non_recoverable |
| 45 | nop |
| 46 | |
| 47 | /* exception is recoverable */ |
| 48 | 1: j handle_cache_err |
| 49 | nop |
| 50 | |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 51 | .set pop |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 52 | END(except_vec2_octeon) |
| 53 | |
| 54 | /* We need to jump to handle_cache_err so that the previous handler |
| 55 | * can fit within 0x80 bytes. We also move from 0xFFFFFFFFAXXXXXXX |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 56 | * space (uncached) to the 0xFFFFFFFF8XXXXXXX space (cached). */ |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 57 | LEAF(handle_cache_err) |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 58 | .set push |
| 59 | .set noreorder |
| 60 | .set noat |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 61 | |
| 62 | SAVE_ALL |
| 63 | KMODE |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 64 | jal cache_parity_error_octeon_recoverable |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 65 | nop |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 66 | j ret_from_exception |
David Daney | 5b3b168 | 2009-01-08 16:46:40 -0800 | [diff] [blame] | 67 | nop |
| 68 | |
| 69 | .set pop |
| 70 | END(handle_cache_err) |