Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 1 | /* |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 2 | * spu hypervisor abstraction for direct hardware access. |
| 3 | * |
| 4 | * (C) Copyright IBM Deutschland Entwicklung GmbH 2005 |
| 5 | * Copyright 2006 Sony Corp. |
| 6 | * |
| 7 | * This program is free software; you can redistribute it and/or modify |
| 8 | * it under the terms of the GNU General Public License as published by |
| 9 | * the Free Software Foundation; version 2 of the License. |
| 10 | * |
| 11 | * This program is distributed in the hope that it will be useful, |
| 12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 14 | * GNU General Public License for more details. |
| 15 | * |
| 16 | * You should have received a copy of the GNU General Public License |
| 17 | * along with this program; if not, write to the Free Software |
| 18 | * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 19 | */ |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 20 | |
Geoff Levand | e28b003 | 2006-11-23 00:46:49 +0100 | [diff] [blame] | 21 | #include <linux/interrupt.h> |
| 22 | #include <linux/list.h> |
Geoff Levand | e28b003 | 2006-11-23 00:46:49 +0100 | [diff] [blame] | 23 | #include <linux/ptrace.h> |
Geoff Levand | e28b003 | 2006-11-23 00:46:49 +0100 | [diff] [blame] | 24 | #include <linux/wait.h> |
| 25 | #include <linux/mm.h> |
| 26 | #include <linux/io.h> |
| 27 | #include <linux/mutex.h> |
| 28 | #include <linux/device.h> |
Luke Browning | 7a21420 | 2008-04-28 14:32:34 +1000 | [diff] [blame] | 29 | #include <linux/sched.h> |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 30 | |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 31 | #include <asm/spu.h> |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 32 | #include <asm/spu_priv1.h> |
Geoff Levand | e28b003 | 2006-11-23 00:46:49 +0100 | [diff] [blame] | 33 | #include <asm/firmware.h> |
| 34 | #include <asm/prom.h> |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 35 | |
Geoff Levand | a91942a | 2006-06-19 20:33:30 +0200 | [diff] [blame] | 36 | #include "interrupt.h" |
Geoff Levand | e28b003 | 2006-11-23 00:46:49 +0100 | [diff] [blame] | 37 | #include "spu_priv1_mmio.h" |
| 38 | |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 39 | static void int_mask_and(struct spu *spu, int class, u64 mask) |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 40 | { |
| 41 | u64 old_mask; |
| 42 | |
Ishizaki Kou | c9868fe | 2007-02-02 16:45:33 +0900 | [diff] [blame] | 43 | old_mask = in_be64(&spu->priv1->int_mask_RW[class]); |
| 44 | out_be64(&spu->priv1->int_mask_RW[class], old_mask & mask); |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 45 | } |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 46 | |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 47 | static void int_mask_or(struct spu *spu, int class, u64 mask) |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 48 | { |
| 49 | u64 old_mask; |
| 50 | |
Ishizaki Kou | c9868fe | 2007-02-02 16:45:33 +0900 | [diff] [blame] | 51 | old_mask = in_be64(&spu->priv1->int_mask_RW[class]); |
| 52 | out_be64(&spu->priv1->int_mask_RW[class], old_mask | mask); |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 53 | } |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 54 | |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 55 | static void int_mask_set(struct spu *spu, int class, u64 mask) |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 56 | { |
Ishizaki Kou | c9868fe | 2007-02-02 16:45:33 +0900 | [diff] [blame] | 57 | out_be64(&spu->priv1->int_mask_RW[class], mask); |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 58 | } |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 59 | |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 60 | static u64 int_mask_get(struct spu *spu, int class) |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 61 | { |
Ishizaki Kou | c9868fe | 2007-02-02 16:45:33 +0900 | [diff] [blame] | 62 | return in_be64(&spu->priv1->int_mask_RW[class]); |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 63 | } |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 64 | |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 65 | static void int_stat_clear(struct spu *spu, int class, u64 stat) |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 66 | { |
Ishizaki Kou | c9868fe | 2007-02-02 16:45:33 +0900 | [diff] [blame] | 67 | out_be64(&spu->priv1->int_stat_RW[class], stat); |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 68 | } |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 69 | |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 70 | static u64 int_stat_get(struct spu *spu, int class) |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 71 | { |
Ishizaki Kou | c9868fe | 2007-02-02 16:45:33 +0900 | [diff] [blame] | 72 | return in_be64(&spu->priv1->int_stat_RW[class]); |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 73 | } |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 74 | |
Geoff Levand | a91942a | 2006-06-19 20:33:30 +0200 | [diff] [blame] | 75 | static void cpu_affinity_set(struct spu *spu, int cpu) |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 76 | { |
Luke Browning | 7a21420 | 2008-04-28 14:32:34 +1000 | [diff] [blame] | 77 | u64 target; |
| 78 | u64 route; |
| 79 | |
| 80 | if (nr_cpus_node(spu->node)) { |
Rusty Russell | 86c6f27 | 2008-12-26 22:23:39 +1030 | [diff] [blame] | 81 | const struct cpumask *spumask = cpumask_of_node(spu->node), |
| 82 | *cpumask = cpumask_of_node(cpu_to_node(cpu)); |
Luke Browning | 7a21420 | 2008-04-28 14:32:34 +1000 | [diff] [blame] | 83 | |
Rusty Russell | 86c6f27 | 2008-12-26 22:23:39 +1030 | [diff] [blame] | 84 | if (!cpumask_intersects(spumask, cpumask)) |
Luke Browning | 7a21420 | 2008-04-28 14:32:34 +1000 | [diff] [blame] | 85 | return; |
| 86 | } |
| 87 | |
| 88 | target = iic_get_target_id(cpu); |
| 89 | route = target << 48 | target << 32 | target << 16; |
Ishizaki Kou | c9868fe | 2007-02-02 16:45:33 +0900 | [diff] [blame] | 90 | out_be64(&spu->priv1->int_route_RW, route); |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 91 | } |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 92 | |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 93 | static u64 mfc_dar_get(struct spu *spu) |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 94 | { |
Ishizaki Kou | c9868fe | 2007-02-02 16:45:33 +0900 | [diff] [blame] | 95 | return in_be64(&spu->priv1->mfc_dar_RW); |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 96 | } |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 97 | |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 98 | static u64 mfc_dsisr_get(struct spu *spu) |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 99 | { |
Ishizaki Kou | c9868fe | 2007-02-02 16:45:33 +0900 | [diff] [blame] | 100 | return in_be64(&spu->priv1->mfc_dsisr_RW); |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 101 | } |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 102 | |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 103 | static void mfc_dsisr_set(struct spu *spu, u64 dsisr) |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 104 | { |
Ishizaki Kou | c9868fe | 2007-02-02 16:45:33 +0900 | [diff] [blame] | 105 | out_be64(&spu->priv1->mfc_dsisr_RW, dsisr); |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 106 | } |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 107 | |
Masato Noguchi | 24f43b3 | 2006-10-24 18:31:14 +0200 | [diff] [blame] | 108 | static void mfc_sdr_setup(struct spu *spu) |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 109 | { |
Ishizaki Kou | c9868fe | 2007-02-02 16:45:33 +0900 | [diff] [blame] | 110 | out_be64(&spu->priv1->mfc_sdr_RW, mfspr(SPRN_SDR1)); |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 111 | } |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 112 | |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 113 | static void mfc_sr1_set(struct spu *spu, u64 sr1) |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 114 | { |
Ishizaki Kou | c9868fe | 2007-02-02 16:45:33 +0900 | [diff] [blame] | 115 | out_be64(&spu->priv1->mfc_sr1_RW, sr1); |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 116 | } |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 117 | |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 118 | static u64 mfc_sr1_get(struct spu *spu) |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 119 | { |
Ishizaki Kou | c9868fe | 2007-02-02 16:45:33 +0900 | [diff] [blame] | 120 | return in_be64(&spu->priv1->mfc_sr1_RW); |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 121 | } |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 122 | |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 123 | static void mfc_tclass_id_set(struct spu *spu, u64 tclass_id) |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 124 | { |
Ishizaki Kou | c9868fe | 2007-02-02 16:45:33 +0900 | [diff] [blame] | 125 | out_be64(&spu->priv1->mfc_tclass_id_RW, tclass_id); |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 126 | } |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 127 | |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 128 | static u64 mfc_tclass_id_get(struct spu *spu) |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 129 | { |
Ishizaki Kou | c9868fe | 2007-02-02 16:45:33 +0900 | [diff] [blame] | 130 | return in_be64(&spu->priv1->mfc_tclass_id_RW); |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 131 | } |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 132 | |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 133 | static void tlb_invalidate(struct spu *spu) |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 134 | { |
Ishizaki Kou | c9868fe | 2007-02-02 16:45:33 +0900 | [diff] [blame] | 135 | out_be64(&spu->priv1->tlb_invalidate_entry_W, 0ul); |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 136 | } |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 137 | |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 138 | static void resource_allocation_groupID_set(struct spu *spu, u64 id) |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 139 | { |
Ishizaki Kou | c9868fe | 2007-02-02 16:45:33 +0900 | [diff] [blame] | 140 | out_be64(&spu->priv1->resource_allocation_groupID_RW, id); |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 141 | } |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 142 | |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 143 | static u64 resource_allocation_groupID_get(struct spu *spu) |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 144 | { |
Ishizaki Kou | c9868fe | 2007-02-02 16:45:33 +0900 | [diff] [blame] | 145 | return in_be64(&spu->priv1->resource_allocation_groupID_RW); |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 146 | } |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 147 | |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 148 | static void resource_allocation_enable_set(struct spu *spu, u64 enable) |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 149 | { |
Ishizaki Kou | c9868fe | 2007-02-02 16:45:33 +0900 | [diff] [blame] | 150 | out_be64(&spu->priv1->resource_allocation_enable_RW, enable); |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 151 | } |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 152 | |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 153 | static u64 resource_allocation_enable_get(struct spu *spu) |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 154 | { |
Ishizaki Kou | c9868fe | 2007-02-02 16:45:33 +0900 | [diff] [blame] | 155 | return in_be64(&spu->priv1->resource_allocation_enable_RW); |
Arnd Bergmann | f0831ac | 2006-01-04 20:31:30 +0100 | [diff] [blame] | 156 | } |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 157 | |
| 158 | const struct spu_priv1_ops spu_priv1_mmio_ops = |
| 159 | { |
| 160 | .int_mask_and = int_mask_and, |
| 161 | .int_mask_or = int_mask_or, |
| 162 | .int_mask_set = int_mask_set, |
| 163 | .int_mask_get = int_mask_get, |
| 164 | .int_stat_clear = int_stat_clear, |
| 165 | .int_stat_get = int_stat_get, |
Geoff Levand | a91942a | 2006-06-19 20:33:30 +0200 | [diff] [blame] | 166 | .cpu_affinity_set = cpu_affinity_set, |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 167 | .mfc_dar_get = mfc_dar_get, |
| 168 | .mfc_dsisr_get = mfc_dsisr_get, |
| 169 | .mfc_dsisr_set = mfc_dsisr_set, |
Masato Noguchi | 24f43b3 | 2006-10-24 18:31:14 +0200 | [diff] [blame] | 170 | .mfc_sdr_setup = mfc_sdr_setup, |
Geoff Levand | 540270d | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 171 | .mfc_sr1_set = mfc_sr1_set, |
| 172 | .mfc_sr1_get = mfc_sr1_get, |
| 173 | .mfc_tclass_id_set = mfc_tclass_id_set, |
| 174 | .mfc_tclass_id_get = mfc_tclass_id_get, |
| 175 | .tlb_invalidate = tlb_invalidate, |
| 176 | .resource_allocation_groupID_set = resource_allocation_groupID_set, |
| 177 | .resource_allocation_groupID_get = resource_allocation_groupID_get, |
| 178 | .resource_allocation_enable_set = resource_allocation_enable_set, |
| 179 | .resource_allocation_enable_get = resource_allocation_enable_get, |
| 180 | }; |