Sergei Shtylyov | 128296f | 2014-01-03 15:52:22 +0300 | [diff] [blame] | 1 | /* SuperH Ethernet device driver |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2 | * |
Sergei Shtylyov | 9b39f05 | 2017-01-04 15:11:21 +0300 | [diff] [blame] | 3 | * Copyright (C) 2014 Renesas Electronics Corporation |
Nobuhiro Iwamatsu | f0e81fe | 2012-03-25 18:59:51 +0000 | [diff] [blame] | 4 | * Copyright (C) 2006-2012 Nobuhiro Iwamatsu |
Sergei Shtylyov | b356e97 | 2014-02-18 03:12:43 +0300 | [diff] [blame] | 5 | * Copyright (C) 2008-2014 Renesas Solutions Corp. |
Sergei Shtylyov | 9b39f05 | 2017-01-04 15:11:21 +0300 | [diff] [blame] | 6 | * Copyright (C) 2013-2017 Cogent Embedded, Inc. |
Ben Dooks | 702eca0 | 2014-03-12 17:47:40 +0000 | [diff] [blame] | 7 | * Copyright (C) 2014 Codethink Limited |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 8 | * |
| 9 | * This program is free software; you can redistribute it and/or modify it |
| 10 | * under the terms and conditions of the GNU General Public License, |
| 11 | * version 2, as published by the Free Software Foundation. |
| 12 | * |
| 13 | * This program is distributed in the hope it will be useful, but WITHOUT |
| 14 | * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| 15 | * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for |
| 16 | * more details. |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 17 | * |
| 18 | * The full GNU General Public License is included in this distribution in |
| 19 | * the file called "COPYING". |
| 20 | */ |
| 21 | |
Yoshihiro Shimoda | 0654011 | 2011-09-29 17:16:57 +0000 | [diff] [blame] | 22 | #include <linux/module.h> |
| 23 | #include <linux/kernel.h> |
| 24 | #include <linux/spinlock.h> |
David S. Miller | 823dcd2 | 2011-08-20 10:39:12 -0700 | [diff] [blame] | 25 | #include <linux/interrupt.h> |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 26 | #include <linux/dma-mapping.h> |
| 27 | #include <linux/etherdevice.h> |
| 28 | #include <linux/delay.h> |
| 29 | #include <linux/platform_device.h> |
| 30 | #include <linux/mdio-bitbang.h> |
| 31 | #include <linux/netdevice.h> |
Sergei Shtylyov | b356e97 | 2014-02-18 03:12:43 +0300 | [diff] [blame] | 32 | #include <linux/of.h> |
| 33 | #include <linux/of_device.h> |
| 34 | #include <linux/of_irq.h> |
| 35 | #include <linux/of_net.h> |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 36 | #include <linux/phy.h> |
| 37 | #include <linux/cache.h> |
| 38 | #include <linux/io.h> |
Magnus Damm | bcd5149 | 2009-10-09 00:20:04 +0000 | [diff] [blame] | 39 | #include <linux/pm_runtime.h> |
Tejun Heo | 5a0e3ad | 2010-03-24 17:04:11 +0900 | [diff] [blame] | 40 | #include <linux/slab.h> |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 41 | #include <linux/ethtool.h> |
Yoshihiro Shimoda | fdb37a7 | 2012-02-06 23:55:15 +0000 | [diff] [blame] | 42 | #include <linux/if_vlan.h> |
Nobuhiro Iwamatsu | f0e81fe | 2012-03-25 18:59:51 +0000 | [diff] [blame] | 43 | #include <linux/clk.h> |
Yoshihiro Shimoda | d4fa0e3 | 2011-09-27 21:49:12 +0000 | [diff] [blame] | 44 | #include <linux/sh_eth.h> |
Ben Dooks | 702eca0 | 2014-03-12 17:47:40 +0000 | [diff] [blame] | 45 | #include <linux/of_mdio.h> |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 46 | |
| 47 | #include "sh_eth.h" |
| 48 | |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 49 | #define SH_ETH_DEF_MSG_ENABLE \ |
| 50 | (NETIF_MSG_LINK | \ |
| 51 | NETIF_MSG_TIMER | \ |
| 52 | NETIF_MSG_RX_ERR| \ |
| 53 | NETIF_MSG_TX_ERR) |
| 54 | |
Sergei Shtylyov | 2274d37 | 2015-12-13 01:44:50 +0300 | [diff] [blame] | 55 | #define SH_ETH_OFFSET_INVALID ((u16)~0) |
| 56 | |
Ben Hutchings | 3365711 | 2015-02-26 20:34:14 +0000 | [diff] [blame] | 57 | #define SH_ETH_OFFSET_DEFAULTS \ |
| 58 | [0 ... SH_ETH_MAX_REGISTER_OFFSET - 1] = SH_ETH_OFFSET_INVALID |
| 59 | |
Sergei Shtylyov | c0013f6 | 2013-03-28 11:48:26 +0000 | [diff] [blame] | 60 | static const u16 sh_eth_offset_gigabit[SH_ETH_MAX_REGISTER_OFFSET] = { |
Ben Hutchings | 3365711 | 2015-02-26 20:34:14 +0000 | [diff] [blame] | 61 | SH_ETH_OFFSET_DEFAULTS, |
| 62 | |
Sergei Shtylyov | c0013f6 | 2013-03-28 11:48:26 +0000 | [diff] [blame] | 63 | [EDSR] = 0x0000, |
| 64 | [EDMR] = 0x0400, |
| 65 | [EDTRR] = 0x0408, |
| 66 | [EDRRR] = 0x0410, |
| 67 | [EESR] = 0x0428, |
| 68 | [EESIPR] = 0x0430, |
| 69 | [TDLAR] = 0x0010, |
| 70 | [TDFAR] = 0x0014, |
| 71 | [TDFXR] = 0x0018, |
| 72 | [TDFFR] = 0x001c, |
| 73 | [RDLAR] = 0x0030, |
| 74 | [RDFAR] = 0x0034, |
| 75 | [RDFXR] = 0x0038, |
| 76 | [RDFFR] = 0x003c, |
| 77 | [TRSCER] = 0x0438, |
| 78 | [RMFCR] = 0x0440, |
| 79 | [TFTR] = 0x0448, |
| 80 | [FDR] = 0x0450, |
| 81 | [RMCR] = 0x0458, |
| 82 | [RPADIR] = 0x0460, |
| 83 | [FCFTR] = 0x0468, |
| 84 | [CSMR] = 0x04E4, |
| 85 | |
| 86 | [ECMR] = 0x0500, |
| 87 | [ECSR] = 0x0510, |
| 88 | [ECSIPR] = 0x0518, |
| 89 | [PIR] = 0x0520, |
| 90 | [PSR] = 0x0528, |
| 91 | [PIPR] = 0x052c, |
| 92 | [RFLR] = 0x0508, |
| 93 | [APR] = 0x0554, |
| 94 | [MPR] = 0x0558, |
| 95 | [PFTCR] = 0x055c, |
| 96 | [PFRCR] = 0x0560, |
| 97 | [TPAUSER] = 0x0564, |
| 98 | [GECMR] = 0x05b0, |
| 99 | [BCULR] = 0x05b4, |
| 100 | [MAHR] = 0x05c0, |
| 101 | [MALR] = 0x05c8, |
| 102 | [TROCR] = 0x0700, |
| 103 | [CDCR] = 0x0708, |
| 104 | [LCCR] = 0x0710, |
| 105 | [CEFCR] = 0x0740, |
| 106 | [FRECR] = 0x0748, |
| 107 | [TSFRCR] = 0x0750, |
| 108 | [TLFRCR] = 0x0758, |
| 109 | [RFCR] = 0x0760, |
| 110 | [CERCR] = 0x0768, |
| 111 | [CEECR] = 0x0770, |
| 112 | [MAFCR] = 0x0778, |
| 113 | [RMII_MII] = 0x0790, |
| 114 | |
| 115 | [ARSTR] = 0x0000, |
| 116 | [TSU_CTRST] = 0x0004, |
| 117 | [TSU_FWEN0] = 0x0010, |
| 118 | [TSU_FWEN1] = 0x0014, |
| 119 | [TSU_FCM] = 0x0018, |
| 120 | [TSU_BSYSL0] = 0x0020, |
| 121 | [TSU_BSYSL1] = 0x0024, |
| 122 | [TSU_PRISL0] = 0x0028, |
| 123 | [TSU_PRISL1] = 0x002c, |
| 124 | [TSU_FWSL0] = 0x0030, |
| 125 | [TSU_FWSL1] = 0x0034, |
| 126 | [TSU_FWSLC] = 0x0038, |
| 127 | [TSU_QTAG0] = 0x0040, |
| 128 | [TSU_QTAG1] = 0x0044, |
| 129 | [TSU_FWSR] = 0x0050, |
| 130 | [TSU_FWINMK] = 0x0054, |
| 131 | [TSU_ADQT0] = 0x0048, |
| 132 | [TSU_ADQT1] = 0x004c, |
| 133 | [TSU_VTAG0] = 0x0058, |
| 134 | [TSU_VTAG1] = 0x005c, |
| 135 | [TSU_ADSBSY] = 0x0060, |
| 136 | [TSU_TEN] = 0x0064, |
| 137 | [TSU_POST1] = 0x0070, |
| 138 | [TSU_POST2] = 0x0074, |
| 139 | [TSU_POST3] = 0x0078, |
| 140 | [TSU_POST4] = 0x007c, |
| 141 | [TSU_ADRH0] = 0x0100, |
Sergei Shtylyov | c0013f6 | 2013-03-28 11:48:26 +0000 | [diff] [blame] | 142 | |
| 143 | [TXNLCR0] = 0x0080, |
| 144 | [TXALCR0] = 0x0084, |
| 145 | [RXNLCR0] = 0x0088, |
| 146 | [RXALCR0] = 0x008c, |
| 147 | [FWNLCR0] = 0x0090, |
| 148 | [FWALCR0] = 0x0094, |
| 149 | [TXNLCR1] = 0x00a0, |
| 150 | [TXALCR1] = 0x00a0, |
| 151 | [RXNLCR1] = 0x00a8, |
| 152 | [RXALCR1] = 0x00ac, |
| 153 | [FWNLCR1] = 0x00b0, |
| 154 | [FWALCR1] = 0x00b4, |
| 155 | }; |
| 156 | |
Simon Horman | db89347 | 2014-01-17 09:22:28 +0900 | [diff] [blame] | 157 | static const u16 sh_eth_offset_fast_rz[SH_ETH_MAX_REGISTER_OFFSET] = { |
Ben Hutchings | 3365711 | 2015-02-26 20:34:14 +0000 | [diff] [blame] | 158 | SH_ETH_OFFSET_DEFAULTS, |
| 159 | |
Simon Horman | db89347 | 2014-01-17 09:22:28 +0900 | [diff] [blame] | 160 | [EDSR] = 0x0000, |
| 161 | [EDMR] = 0x0400, |
| 162 | [EDTRR] = 0x0408, |
| 163 | [EDRRR] = 0x0410, |
| 164 | [EESR] = 0x0428, |
| 165 | [EESIPR] = 0x0430, |
| 166 | [TDLAR] = 0x0010, |
| 167 | [TDFAR] = 0x0014, |
| 168 | [TDFXR] = 0x0018, |
| 169 | [TDFFR] = 0x001c, |
| 170 | [RDLAR] = 0x0030, |
| 171 | [RDFAR] = 0x0034, |
| 172 | [RDFXR] = 0x0038, |
| 173 | [RDFFR] = 0x003c, |
| 174 | [TRSCER] = 0x0438, |
| 175 | [RMFCR] = 0x0440, |
| 176 | [TFTR] = 0x0448, |
| 177 | [FDR] = 0x0450, |
| 178 | [RMCR] = 0x0458, |
| 179 | [RPADIR] = 0x0460, |
| 180 | [FCFTR] = 0x0468, |
| 181 | [CSMR] = 0x04E4, |
| 182 | |
| 183 | [ECMR] = 0x0500, |
| 184 | [RFLR] = 0x0508, |
| 185 | [ECSR] = 0x0510, |
| 186 | [ECSIPR] = 0x0518, |
| 187 | [PIR] = 0x0520, |
| 188 | [APR] = 0x0554, |
| 189 | [MPR] = 0x0558, |
| 190 | [PFTCR] = 0x055c, |
| 191 | [PFRCR] = 0x0560, |
| 192 | [TPAUSER] = 0x0564, |
| 193 | [MAHR] = 0x05c0, |
| 194 | [MALR] = 0x05c8, |
| 195 | [CEFCR] = 0x0740, |
| 196 | [FRECR] = 0x0748, |
| 197 | [TSFRCR] = 0x0750, |
| 198 | [TLFRCR] = 0x0758, |
| 199 | [RFCR] = 0x0760, |
| 200 | [MAFCR] = 0x0778, |
| 201 | |
| 202 | [ARSTR] = 0x0000, |
| 203 | [TSU_CTRST] = 0x0004, |
Chris Brandt | e148788 | 2016-09-07 14:57:09 -0400 | [diff] [blame] | 204 | [TSU_FWSLC] = 0x0038, |
Simon Horman | db89347 | 2014-01-17 09:22:28 +0900 | [diff] [blame] | 205 | [TSU_VTAG0] = 0x0058, |
| 206 | [TSU_ADSBSY] = 0x0060, |
| 207 | [TSU_TEN] = 0x0064, |
Chris Brandt | e148788 | 2016-09-07 14:57:09 -0400 | [diff] [blame] | 208 | [TSU_POST1] = 0x0070, |
| 209 | [TSU_POST2] = 0x0074, |
| 210 | [TSU_POST3] = 0x0078, |
| 211 | [TSU_POST4] = 0x007c, |
Simon Horman | db89347 | 2014-01-17 09:22:28 +0900 | [diff] [blame] | 212 | [TSU_ADRH0] = 0x0100, |
Simon Horman | db89347 | 2014-01-17 09:22:28 +0900 | [diff] [blame] | 213 | |
| 214 | [TXNLCR0] = 0x0080, |
| 215 | [TXALCR0] = 0x0084, |
| 216 | [RXNLCR0] = 0x0088, |
| 217 | [RXALCR0] = 0x008C, |
| 218 | }; |
| 219 | |
Sergei Shtylyov | a3f109b | 2013-03-28 11:51:31 +0000 | [diff] [blame] | 220 | static const u16 sh_eth_offset_fast_rcar[SH_ETH_MAX_REGISTER_OFFSET] = { |
Ben Hutchings | 3365711 | 2015-02-26 20:34:14 +0000 | [diff] [blame] | 221 | SH_ETH_OFFSET_DEFAULTS, |
| 222 | |
Sergei Shtylyov | a3f109b | 2013-03-28 11:51:31 +0000 | [diff] [blame] | 223 | [ECMR] = 0x0300, |
| 224 | [RFLR] = 0x0308, |
| 225 | [ECSR] = 0x0310, |
| 226 | [ECSIPR] = 0x0318, |
| 227 | [PIR] = 0x0320, |
| 228 | [PSR] = 0x0328, |
| 229 | [RDMLR] = 0x0340, |
| 230 | [IPGR] = 0x0350, |
| 231 | [APR] = 0x0354, |
| 232 | [MPR] = 0x0358, |
| 233 | [RFCF] = 0x0360, |
| 234 | [TPAUSER] = 0x0364, |
| 235 | [TPAUSECR] = 0x0368, |
| 236 | [MAHR] = 0x03c0, |
| 237 | [MALR] = 0x03c8, |
| 238 | [TROCR] = 0x03d0, |
| 239 | [CDCR] = 0x03d4, |
| 240 | [LCCR] = 0x03d8, |
| 241 | [CNDCR] = 0x03dc, |
| 242 | [CEFCR] = 0x03e4, |
| 243 | [FRECR] = 0x03e8, |
| 244 | [TSFRCR] = 0x03ec, |
| 245 | [TLFRCR] = 0x03f0, |
| 246 | [RFCR] = 0x03f4, |
| 247 | [MAFCR] = 0x03f8, |
| 248 | |
| 249 | [EDMR] = 0x0200, |
| 250 | [EDTRR] = 0x0208, |
| 251 | [EDRRR] = 0x0210, |
| 252 | [TDLAR] = 0x0218, |
| 253 | [RDLAR] = 0x0220, |
| 254 | [EESR] = 0x0228, |
| 255 | [EESIPR] = 0x0230, |
| 256 | [TRSCER] = 0x0238, |
| 257 | [RMFCR] = 0x0240, |
| 258 | [TFTR] = 0x0248, |
| 259 | [FDR] = 0x0250, |
| 260 | [RMCR] = 0x0258, |
| 261 | [TFUCR] = 0x0264, |
| 262 | [RFOCR] = 0x0268, |
Simon Horman | 55754f1 | 2013-07-23 10:18:04 +0900 | [diff] [blame] | 263 | [RMIIMODE] = 0x026c, |
Sergei Shtylyov | a3f109b | 2013-03-28 11:51:31 +0000 | [diff] [blame] | 264 | [FCFTR] = 0x0270, |
| 265 | [TRIMD] = 0x027c, |
| 266 | }; |
| 267 | |
Sergei Shtylyov | c0013f6 | 2013-03-28 11:48:26 +0000 | [diff] [blame] | 268 | static const u16 sh_eth_offset_fast_sh4[SH_ETH_MAX_REGISTER_OFFSET] = { |
Ben Hutchings | 3365711 | 2015-02-26 20:34:14 +0000 | [diff] [blame] | 269 | SH_ETH_OFFSET_DEFAULTS, |
| 270 | |
Sergei Shtylyov | c0013f6 | 2013-03-28 11:48:26 +0000 | [diff] [blame] | 271 | [ECMR] = 0x0100, |
| 272 | [RFLR] = 0x0108, |
| 273 | [ECSR] = 0x0110, |
| 274 | [ECSIPR] = 0x0118, |
| 275 | [PIR] = 0x0120, |
| 276 | [PSR] = 0x0128, |
| 277 | [RDMLR] = 0x0140, |
| 278 | [IPGR] = 0x0150, |
| 279 | [APR] = 0x0154, |
| 280 | [MPR] = 0x0158, |
| 281 | [TPAUSER] = 0x0164, |
| 282 | [RFCF] = 0x0160, |
| 283 | [TPAUSECR] = 0x0168, |
| 284 | [BCFRR] = 0x016c, |
| 285 | [MAHR] = 0x01c0, |
| 286 | [MALR] = 0x01c8, |
| 287 | [TROCR] = 0x01d0, |
| 288 | [CDCR] = 0x01d4, |
| 289 | [LCCR] = 0x01d8, |
| 290 | [CNDCR] = 0x01dc, |
| 291 | [CEFCR] = 0x01e4, |
| 292 | [FRECR] = 0x01e8, |
| 293 | [TSFRCR] = 0x01ec, |
| 294 | [TLFRCR] = 0x01f0, |
| 295 | [RFCR] = 0x01f4, |
| 296 | [MAFCR] = 0x01f8, |
| 297 | [RTRATE] = 0x01fc, |
| 298 | |
| 299 | [EDMR] = 0x0000, |
| 300 | [EDTRR] = 0x0008, |
| 301 | [EDRRR] = 0x0010, |
| 302 | [TDLAR] = 0x0018, |
| 303 | [RDLAR] = 0x0020, |
| 304 | [EESR] = 0x0028, |
| 305 | [EESIPR] = 0x0030, |
| 306 | [TRSCER] = 0x0038, |
| 307 | [RMFCR] = 0x0040, |
| 308 | [TFTR] = 0x0048, |
| 309 | [FDR] = 0x0050, |
| 310 | [RMCR] = 0x0058, |
| 311 | [TFUCR] = 0x0064, |
| 312 | [RFOCR] = 0x0068, |
| 313 | [FCFTR] = 0x0070, |
| 314 | [RPADIR] = 0x0078, |
| 315 | [TRIMD] = 0x007c, |
| 316 | [RBWAR] = 0x00c8, |
| 317 | [RDFAR] = 0x00cc, |
| 318 | [TBRAR] = 0x00d4, |
| 319 | [TDFAR] = 0x00d8, |
| 320 | }; |
| 321 | |
| 322 | static const u16 sh_eth_offset_fast_sh3_sh2[SH_ETH_MAX_REGISTER_OFFSET] = { |
Ben Hutchings | 3365711 | 2015-02-26 20:34:14 +0000 | [diff] [blame] | 323 | SH_ETH_OFFSET_DEFAULTS, |
| 324 | |
Sergei Shtylyov | d8b0426 | 2014-06-03 23:42:26 +0400 | [diff] [blame] | 325 | [EDMR] = 0x0000, |
| 326 | [EDTRR] = 0x0004, |
| 327 | [EDRRR] = 0x0008, |
| 328 | [TDLAR] = 0x000c, |
| 329 | [RDLAR] = 0x0010, |
| 330 | [EESR] = 0x0014, |
| 331 | [EESIPR] = 0x0018, |
| 332 | [TRSCER] = 0x001c, |
| 333 | [RMFCR] = 0x0020, |
| 334 | [TFTR] = 0x0024, |
| 335 | [FDR] = 0x0028, |
| 336 | [RMCR] = 0x002c, |
| 337 | [EDOCR] = 0x0030, |
| 338 | [FCFTR] = 0x0034, |
| 339 | [RPADIR] = 0x0038, |
| 340 | [TRIMD] = 0x003c, |
| 341 | [RBWAR] = 0x0040, |
| 342 | [RDFAR] = 0x0044, |
| 343 | [TBRAR] = 0x004c, |
| 344 | [TDFAR] = 0x0050, |
| 345 | |
Sergei Shtylyov | c0013f6 | 2013-03-28 11:48:26 +0000 | [diff] [blame] | 346 | [ECMR] = 0x0160, |
| 347 | [ECSR] = 0x0164, |
| 348 | [ECSIPR] = 0x0168, |
| 349 | [PIR] = 0x016c, |
| 350 | [MAHR] = 0x0170, |
| 351 | [MALR] = 0x0174, |
| 352 | [RFLR] = 0x0178, |
| 353 | [PSR] = 0x017c, |
| 354 | [TROCR] = 0x0180, |
| 355 | [CDCR] = 0x0184, |
| 356 | [LCCR] = 0x0188, |
| 357 | [CNDCR] = 0x018c, |
| 358 | [CEFCR] = 0x0194, |
| 359 | [FRECR] = 0x0198, |
| 360 | [TSFRCR] = 0x019c, |
| 361 | [TLFRCR] = 0x01a0, |
| 362 | [RFCR] = 0x01a4, |
| 363 | [MAFCR] = 0x01a8, |
| 364 | [IPGR] = 0x01b4, |
| 365 | [APR] = 0x01b8, |
| 366 | [MPR] = 0x01bc, |
| 367 | [TPAUSER] = 0x01c4, |
| 368 | [BCFR] = 0x01cc, |
| 369 | |
| 370 | [ARSTR] = 0x0000, |
| 371 | [TSU_CTRST] = 0x0004, |
| 372 | [TSU_FWEN0] = 0x0010, |
| 373 | [TSU_FWEN1] = 0x0014, |
| 374 | [TSU_FCM] = 0x0018, |
| 375 | [TSU_BSYSL0] = 0x0020, |
| 376 | [TSU_BSYSL1] = 0x0024, |
| 377 | [TSU_PRISL0] = 0x0028, |
| 378 | [TSU_PRISL1] = 0x002c, |
| 379 | [TSU_FWSL0] = 0x0030, |
| 380 | [TSU_FWSL1] = 0x0034, |
| 381 | [TSU_FWSLC] = 0x0038, |
| 382 | [TSU_QTAGM0] = 0x0040, |
| 383 | [TSU_QTAGM1] = 0x0044, |
| 384 | [TSU_ADQT0] = 0x0048, |
| 385 | [TSU_ADQT1] = 0x004c, |
| 386 | [TSU_FWSR] = 0x0050, |
| 387 | [TSU_FWINMK] = 0x0054, |
| 388 | [TSU_ADSBSY] = 0x0060, |
| 389 | [TSU_TEN] = 0x0064, |
| 390 | [TSU_POST1] = 0x0070, |
| 391 | [TSU_POST2] = 0x0074, |
| 392 | [TSU_POST3] = 0x0078, |
| 393 | [TSU_POST4] = 0x007c, |
| 394 | |
| 395 | [TXNLCR0] = 0x0080, |
| 396 | [TXALCR0] = 0x0084, |
| 397 | [RXNLCR0] = 0x0088, |
| 398 | [RXALCR0] = 0x008c, |
| 399 | [FWNLCR0] = 0x0090, |
| 400 | [FWALCR0] = 0x0094, |
| 401 | [TXNLCR1] = 0x00a0, |
| 402 | [TXALCR1] = 0x00a0, |
| 403 | [RXNLCR1] = 0x00a8, |
| 404 | [RXALCR1] = 0x00ac, |
| 405 | [FWNLCR1] = 0x00b0, |
| 406 | [FWALCR1] = 0x00b4, |
| 407 | |
| 408 | [TSU_ADRH0] = 0x0100, |
Sergei Shtylyov | c0013f6 | 2013-03-28 11:48:26 +0000 | [diff] [blame] | 409 | }; |
| 410 | |
Ben Hutchings | 740c7f3 | 2015-01-27 00:49:32 +0000 | [diff] [blame] | 411 | static void sh_eth_rcv_snd_disable(struct net_device *ndev); |
| 412 | static struct net_device_stats *sh_eth_get_stats(struct net_device *ndev); |
| 413 | |
Sergei Shtylyov | 2274d37 | 2015-12-13 01:44:50 +0300 | [diff] [blame] | 414 | static void sh_eth_write(struct net_device *ndev, u32 data, int enum_index) |
| 415 | { |
| 416 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 417 | u16 offset = mdp->reg_offset[enum_index]; |
| 418 | |
| 419 | if (WARN_ON(offset == SH_ETH_OFFSET_INVALID)) |
| 420 | return; |
| 421 | |
| 422 | iowrite32(data, mdp->addr + offset); |
| 423 | } |
| 424 | |
| 425 | static u32 sh_eth_read(struct net_device *ndev, int enum_index) |
| 426 | { |
| 427 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 428 | u16 offset = mdp->reg_offset[enum_index]; |
| 429 | |
| 430 | if (WARN_ON(offset == SH_ETH_OFFSET_INVALID)) |
| 431 | return ~0U; |
| 432 | |
| 433 | return ioread32(mdp->addr + offset); |
| 434 | } |
| 435 | |
Sergei Shtylyov | b2b14d2 | 2016-02-10 01:38:28 +0300 | [diff] [blame] | 436 | static void sh_eth_modify(struct net_device *ndev, int enum_index, u32 clear, |
| 437 | u32 set) |
| 438 | { |
| 439 | sh_eth_write(ndev, (sh_eth_read(ndev, enum_index) & ~clear) | set, |
| 440 | enum_index); |
| 441 | } |
| 442 | |
Simon Horman | 504c8ca | 2014-01-17 09:22:27 +0900 | [diff] [blame] | 443 | static bool sh_eth_is_gether(struct sh_eth_private *mdp) |
Nobuhiro Iwamatsu | dabdde9 | 2013-06-06 09:51:39 +0000 | [diff] [blame] | 444 | { |
Simon Horman | 504c8ca | 2014-01-17 09:22:27 +0900 | [diff] [blame] | 445 | return mdp->reg_offset == sh_eth_offset_gigabit; |
Nobuhiro Iwamatsu | dabdde9 | 2013-06-06 09:51:39 +0000 | [diff] [blame] | 446 | } |
| 447 | |
Simon Horman | db89347 | 2014-01-17 09:22:28 +0900 | [diff] [blame] | 448 | static bool sh_eth_is_rz_fast_ether(struct sh_eth_private *mdp) |
| 449 | { |
| 450 | return mdp->reg_offset == sh_eth_offset_fast_rz; |
| 451 | } |
| 452 | |
Sergei Shtylyov | 8e99440 | 2013-06-12 03:07:29 +0400 | [diff] [blame] | 453 | static void sh_eth_select_mii(struct net_device *ndev) |
Nobuhiro Iwamatsu | 5e7a76b | 2012-06-25 17:34:14 +0000 | [diff] [blame] | 454 | { |
Nobuhiro Iwamatsu | 5e7a76b | 2012-06-25 17:34:14 +0000 | [diff] [blame] | 455 | struct sh_eth_private *mdp = netdev_priv(ndev); |
Sergei Shtylyov | 4fa8c3c | 2016-03-13 01:29:45 +0300 | [diff] [blame] | 456 | u32 value; |
Nobuhiro Iwamatsu | 5e7a76b | 2012-06-25 17:34:14 +0000 | [diff] [blame] | 457 | |
| 458 | switch (mdp->phy_interface) { |
| 459 | case PHY_INTERFACE_MODE_GMII: |
| 460 | value = 0x2; |
| 461 | break; |
| 462 | case PHY_INTERFACE_MODE_MII: |
| 463 | value = 0x1; |
| 464 | break; |
| 465 | case PHY_INTERFACE_MODE_RMII: |
| 466 | value = 0x0; |
| 467 | break; |
| 468 | default: |
Sergei Shtylyov | f75f14e | 2014-03-15 03:27:54 +0300 | [diff] [blame] | 469 | netdev_warn(ndev, |
| 470 | "PHY interface mode was not setup. Set to MII.\n"); |
Nobuhiro Iwamatsu | 5e7a76b | 2012-06-25 17:34:14 +0000 | [diff] [blame] | 471 | value = 0x1; |
| 472 | break; |
| 473 | } |
| 474 | |
| 475 | sh_eth_write(ndev, value, RMII_MII); |
| 476 | } |
Nobuhiro Iwamatsu | 5e7a76b | 2012-06-25 17:34:14 +0000 | [diff] [blame] | 477 | |
Sergei Shtylyov | 8e99440 | 2013-06-12 03:07:29 +0400 | [diff] [blame] | 478 | static void sh_eth_set_duplex(struct net_device *ndev) |
Yoshihiro Shimoda | 65ac885 | 2009-05-24 23:54:30 +0000 | [diff] [blame] | 479 | { |
| 480 | struct sh_eth_private *mdp = netdev_priv(ndev); |
Yoshihiro Shimoda | 65ac885 | 2009-05-24 23:54:30 +0000 | [diff] [blame] | 481 | |
Sergei Shtylyov | b2b14d2 | 2016-02-10 01:38:28 +0300 | [diff] [blame] | 482 | sh_eth_modify(ndev, ECMR, ECMR_DM, mdp->duplex ? ECMR_DM : 0); |
Yoshihiro Shimoda | 65ac885 | 2009-05-24 23:54:30 +0000 | [diff] [blame] | 483 | } |
| 484 | |
Geert Uytterhoeven | 99f84be | 2015-11-24 15:40:57 +0100 | [diff] [blame] | 485 | static void sh_eth_chip_reset(struct net_device *ndev) |
| 486 | { |
| 487 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 488 | |
| 489 | /* reset device */ |
Sergei Shtylyov | ec65cfc | 2016-04-24 23:46:15 +0300 | [diff] [blame] | 490 | sh_eth_tsu_write(mdp, ARSTR_ARST, ARSTR); |
Geert Uytterhoeven | 99f84be | 2015-11-24 15:40:57 +0100 | [diff] [blame] | 491 | mdelay(1); |
| 492 | } |
| 493 | |
Geert Uytterhoeven | a0f48be | 2015-11-24 15:40:59 +0100 | [diff] [blame] | 494 | static void sh_eth_set_rate_gether(struct net_device *ndev) |
| 495 | { |
| 496 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 497 | |
| 498 | switch (mdp->speed) { |
| 499 | case 10: /* 10BASE */ |
| 500 | sh_eth_write(ndev, GECMR_10, GECMR); |
| 501 | break; |
| 502 | case 100:/* 100BASE */ |
| 503 | sh_eth_write(ndev, GECMR_100, GECMR); |
| 504 | break; |
| 505 | case 1000: /* 1000BASE */ |
| 506 | sh_eth_write(ndev, GECMR_1000, GECMR); |
| 507 | break; |
Geert Uytterhoeven | a0f48be | 2015-11-24 15:40:59 +0100 | [diff] [blame] | 508 | } |
| 509 | } |
| 510 | |
Geert Uytterhoeven | 99f84be | 2015-11-24 15:40:57 +0100 | [diff] [blame] | 511 | #ifdef CONFIG_OF |
| 512 | /* R7S72100 */ |
| 513 | static struct sh_eth_cpu_data r7s72100_data = { |
| 514 | .chip_reset = sh_eth_chip_reset, |
| 515 | .set_duplex = sh_eth_set_duplex, |
| 516 | |
| 517 | .register_type = SH_ETH_REG_FAST_RZ, |
| 518 | |
| 519 | .ecsr_value = ECSR_ICD, |
| 520 | .ecsipr_value = ECSIPR_ICDIP, |
Chris Brandt | 33d446d | 2016-12-01 13:32:14 -0500 | [diff] [blame] | 521 | .eesipr_value = 0xe77f009f, |
Geert Uytterhoeven | 99f84be | 2015-11-24 15:40:57 +0100 | [diff] [blame] | 522 | |
| 523 | .tx_check = EESR_TC1 | EESR_FTC, |
| 524 | .eesr_err_check = EESR_TWB1 | EESR_TWB | EESR_TABT | EESR_RABT | |
| 525 | EESR_RFE | EESR_RDE | EESR_RFRMER | EESR_TFE | |
Sergei Shtylyov | 9b39f05 | 2017-01-04 15:11:21 +0300 | [diff] [blame] | 526 | EESR_TDE, |
Geert Uytterhoeven | 99f84be | 2015-11-24 15:40:57 +0100 | [diff] [blame] | 527 | .fdr_value = 0x0000070f, |
| 528 | |
| 529 | .no_psr = 1, |
| 530 | .apr = 1, |
| 531 | .mpr = 1, |
| 532 | .tpauser = 1, |
| 533 | .hw_swap = 1, |
| 534 | .rpadir = 1, |
| 535 | .rpadir_value = 2 << 16, |
| 536 | .no_trimd = 1, |
| 537 | .no_ade = 1, |
Sergei Shtylyov | 62e04b7 | 2017-01-07 00:03:37 +0300 | [diff] [blame] | 538 | .hw_checksum = 1, |
Geert Uytterhoeven | 99f84be | 2015-11-24 15:40:57 +0100 | [diff] [blame] | 539 | .tsu = 1, |
Geert Uytterhoeven | 99f84be | 2015-11-24 15:40:57 +0100 | [diff] [blame] | 540 | }; |
Geert Uytterhoeven | a0f48be | 2015-11-24 15:40:59 +0100 | [diff] [blame] | 541 | |
| 542 | static void sh_eth_chip_reset_r8a7740(struct net_device *ndev) |
| 543 | { |
Sergei Shtylyov | c66b258 | 2016-05-07 14:09:01 -0700 | [diff] [blame] | 544 | sh_eth_chip_reset(ndev); |
Geert Uytterhoeven | a0f48be | 2015-11-24 15:40:59 +0100 | [diff] [blame] | 545 | |
| 546 | sh_eth_select_mii(ndev); |
| 547 | } |
| 548 | |
| 549 | /* R8A7740 */ |
| 550 | static struct sh_eth_cpu_data r8a7740_data = { |
| 551 | .chip_reset = sh_eth_chip_reset_r8a7740, |
| 552 | .set_duplex = sh_eth_set_duplex, |
| 553 | .set_rate = sh_eth_set_rate_gether, |
| 554 | |
| 555 | .register_type = SH_ETH_REG_GIGABIT, |
| 556 | |
| 557 | .ecsr_value = ECSR_ICD | ECSR_MPD, |
| 558 | .ecsipr_value = ECSIPR_LCHNGIP | ECSIPR_ICDIP | ECSIPR_MPDIP, |
| 559 | .eesipr_value = DMAC_M_RFRMER | DMAC_M_ECI | 0x003fffff, |
| 560 | |
| 561 | .tx_check = EESR_TC1 | EESR_FTC, |
| 562 | .eesr_err_check = EESR_TWB1 | EESR_TWB | EESR_TABT | EESR_RABT | |
| 563 | EESR_RFE | EESR_RDE | EESR_RFRMER | EESR_TFE | |
Sergei Shtylyov | 9b39f05 | 2017-01-04 15:11:21 +0300 | [diff] [blame] | 564 | EESR_TDE, |
Geert Uytterhoeven | a0f48be | 2015-11-24 15:40:59 +0100 | [diff] [blame] | 565 | .fdr_value = 0x0000070f, |
| 566 | |
| 567 | .apr = 1, |
| 568 | .mpr = 1, |
| 569 | .tpauser = 1, |
| 570 | .bculr = 1, |
| 571 | .hw_swap = 1, |
| 572 | .rpadir = 1, |
| 573 | .rpadir_value = 2 << 16, |
| 574 | .no_trimd = 1, |
| 575 | .no_ade = 1, |
Sergei Shtylyov | 62e04b7 | 2017-01-07 00:03:37 +0300 | [diff] [blame] | 576 | .hw_checksum = 1, |
Geert Uytterhoeven | a0f48be | 2015-11-24 15:40:59 +0100 | [diff] [blame] | 577 | .tsu = 1, |
| 578 | .select_mii = 1, |
Niklas Söderlund | 33017e2 | 2017-01-09 16:34:07 +0100 | [diff] [blame^] | 579 | .magic = 1, |
Geert Uytterhoeven | a0f48be | 2015-11-24 15:40:59 +0100 | [diff] [blame] | 580 | }; |
Geert Uytterhoeven | 99f84be | 2015-11-24 15:40:57 +0100 | [diff] [blame] | 581 | |
Nobuhiro Iwamatsu | 04b0ed2 | 2013-06-06 09:45:25 +0000 | [diff] [blame] | 582 | /* There is CPU dependent code */ |
Sergei Shtylyov | 589ebde | 2013-06-07 14:05:59 +0000 | [diff] [blame] | 583 | static void sh_eth_set_rate_r8a777x(struct net_device *ndev) |
Yoshihiro Shimoda | 65ac885 | 2009-05-24 23:54:30 +0000 | [diff] [blame] | 584 | { |
| 585 | struct sh_eth_private *mdp = netdev_priv(ndev); |
Yoshihiro Shimoda | 65ac885 | 2009-05-24 23:54:30 +0000 | [diff] [blame] | 586 | |
| 587 | switch (mdp->speed) { |
| 588 | case 10: /* 10BASE */ |
Sergei Shtylyov | b2b14d2 | 2016-02-10 01:38:28 +0300 | [diff] [blame] | 589 | sh_eth_modify(ndev, ECMR, ECMR_ELB, 0); |
Yoshihiro Shimoda | 65ac885 | 2009-05-24 23:54:30 +0000 | [diff] [blame] | 590 | break; |
| 591 | case 100:/* 100BASE */ |
Sergei Shtylyov | b2b14d2 | 2016-02-10 01:38:28 +0300 | [diff] [blame] | 592 | sh_eth_modify(ndev, ECMR, ECMR_ELB, ECMR_ELB); |
Sergei Shtylyov | a3f109b | 2013-03-28 11:51:31 +0000 | [diff] [blame] | 593 | break; |
Sergei Shtylyov | a3f109b | 2013-03-28 11:51:31 +0000 | [diff] [blame] | 594 | } |
| 595 | } |
| 596 | |
Sergei Shtylyov | 674853b | 2013-04-27 10:44:24 +0000 | [diff] [blame] | 597 | /* R8A7778/9 */ |
Sergei Shtylyov | 589ebde | 2013-06-07 14:05:59 +0000 | [diff] [blame] | 598 | static struct sh_eth_cpu_data r8a777x_data = { |
Sergei Shtylyov | a3f109b | 2013-03-28 11:51:31 +0000 | [diff] [blame] | 599 | .set_duplex = sh_eth_set_duplex, |
Sergei Shtylyov | 589ebde | 2013-06-07 14:05:59 +0000 | [diff] [blame] | 600 | .set_rate = sh_eth_set_rate_r8a777x, |
Sergei Shtylyov | a3f109b | 2013-03-28 11:51:31 +0000 | [diff] [blame] | 601 | |
Sergei Shtylyov | a3153d8 | 2013-08-18 03:11:28 +0400 | [diff] [blame] | 602 | .register_type = SH_ETH_REG_FAST_RCAR, |
| 603 | |
Sergei Shtylyov | a3f109b | 2013-03-28 11:51:31 +0000 | [diff] [blame] | 604 | .ecsr_value = ECSR_PSRTO | ECSR_LCHNG | ECSR_ICD, |
| 605 | .ecsipr_value = ECSIPR_PSRTOIP | ECSIPR_LCHNGIP | ECSIPR_ICDIP, |
| 606 | .eesipr_value = 0x01ff009f, |
| 607 | |
| 608 | .tx_check = EESR_FTC | EESR_CND | EESR_DLC | EESR_CD | EESR_RTO, |
Sergei Shtylyov | ca8c358 | 2013-06-21 01:12:21 +0400 | [diff] [blame] | 609 | .eesr_err_check = EESR_TWB | EESR_TABT | EESR_RABT | EESR_RFE | |
Sergei Shtylyov | 9b39f05 | 2017-01-04 15:11:21 +0300 | [diff] [blame] | 610 | EESR_RDE | EESR_RFRMER | EESR_TFE | EESR_TDE, |
Nobuhiro Iwamatsu | d407bc0 | 2015-01-07 14:40:15 +0900 | [diff] [blame] | 611 | .fdr_value = 0x00000f0f, |
Sergei Shtylyov | a3f109b | 2013-03-28 11:51:31 +0000 | [diff] [blame] | 612 | |
| 613 | .apr = 1, |
| 614 | .mpr = 1, |
| 615 | .tpauser = 1, |
| 616 | .hw_swap = 1, |
| 617 | }; |
Sergei Shtylyov | a3f109b | 2013-03-28 11:51:31 +0000 | [diff] [blame] | 618 | |
Sergei Shtylyov | 94a12b1 | 2013-12-08 02:59:18 +0300 | [diff] [blame] | 619 | /* R8A7790/1 */ |
| 620 | static struct sh_eth_cpu_data r8a779x_data = { |
Simon Horman | e18dbf7 | 2013-07-23 10:18:05 +0900 | [diff] [blame] | 621 | .set_duplex = sh_eth_set_duplex, |
| 622 | .set_rate = sh_eth_set_rate_r8a777x, |
| 623 | |
Sergei Shtylyov | a3153d8 | 2013-08-18 03:11:28 +0400 | [diff] [blame] | 624 | .register_type = SH_ETH_REG_FAST_RCAR, |
| 625 | |
Niklas Söderlund | e410d86 | 2017-01-09 16:34:06 +0100 | [diff] [blame] | 626 | .ecsr_value = ECSR_PSRTO | ECSR_LCHNG | ECSR_ICD | ECSR_MPD, |
| 627 | .ecsipr_value = ECSIPR_PSRTOIP | ECSIPR_LCHNGIP | ECSIPR_ICDIP | |
| 628 | ECSIPR_MPDIP, |
Simon Horman | e18dbf7 | 2013-07-23 10:18:05 +0900 | [diff] [blame] | 629 | .eesipr_value = 0x01ff009f, |
| 630 | |
| 631 | .tx_check = EESR_FTC | EESR_CND | EESR_DLC | EESR_CD | EESR_RTO, |
Laurent Pinchart | ba361cb | 2013-07-31 16:42:11 +0900 | [diff] [blame] | 632 | .eesr_err_check = EESR_TWB | EESR_TABT | EESR_RABT | EESR_RFE | |
Sergei Shtylyov | 9b39f05 | 2017-01-04 15:11:21 +0300 | [diff] [blame] | 633 | EESR_RDE | EESR_RFRMER | EESR_TFE | EESR_TDE, |
Nobuhiro Iwamatsu | d407bc0 | 2015-01-07 14:40:15 +0900 | [diff] [blame] | 634 | .fdr_value = 0x00000f0f, |
Simon Horman | e18dbf7 | 2013-07-23 10:18:05 +0900 | [diff] [blame] | 635 | |
Geert Uytterhoeven | 01fbd3f | 2015-01-15 11:52:19 +0100 | [diff] [blame] | 636 | .trscer_err_mask = DESC_I_RINT8, |
| 637 | |
Simon Horman | e18dbf7 | 2013-07-23 10:18:05 +0900 | [diff] [blame] | 638 | .apr = 1, |
| 639 | .mpr = 1, |
| 640 | .tpauser = 1, |
| 641 | .hw_swap = 1, |
| 642 | .rmiimode = 1, |
Niklas Söderlund | e410d86 | 2017-01-09 16:34:06 +0100 | [diff] [blame] | 643 | .magic = 1, |
Simon Horman | e18dbf7 | 2013-07-23 10:18:05 +0900 | [diff] [blame] | 644 | }; |
Geert Uytterhoeven | c74a224 | 2015-11-24 15:40:58 +0100 | [diff] [blame] | 645 | #endif /* CONFIG_OF */ |
Simon Horman | e18dbf7 | 2013-07-23 10:18:05 +0900 | [diff] [blame] | 646 | |
Sergei Shtylyov | 9c3beaa | 2013-06-07 14:03:37 +0000 | [diff] [blame] | 647 | static void sh_eth_set_rate_sh7724(struct net_device *ndev) |
Sergei Shtylyov | a3f109b | 2013-03-28 11:51:31 +0000 | [diff] [blame] | 648 | { |
| 649 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 650 | |
| 651 | switch (mdp->speed) { |
| 652 | case 10: /* 10BASE */ |
Sergei Shtylyov | b2b14d2 | 2016-02-10 01:38:28 +0300 | [diff] [blame] | 653 | sh_eth_modify(ndev, ECMR, ECMR_RTM, 0); |
Sergei Shtylyov | a3f109b | 2013-03-28 11:51:31 +0000 | [diff] [blame] | 654 | break; |
| 655 | case 100:/* 100BASE */ |
Sergei Shtylyov | b2b14d2 | 2016-02-10 01:38:28 +0300 | [diff] [blame] | 656 | sh_eth_modify(ndev, ECMR, ECMR_RTM, ECMR_RTM); |
Yoshihiro Shimoda | 65ac885 | 2009-05-24 23:54:30 +0000 | [diff] [blame] | 657 | break; |
Yoshihiro Shimoda | 65ac885 | 2009-05-24 23:54:30 +0000 | [diff] [blame] | 658 | } |
| 659 | } |
| 660 | |
| 661 | /* SH7724 */ |
Sergei Shtylyov | 9c3beaa | 2013-06-07 14:03:37 +0000 | [diff] [blame] | 662 | static struct sh_eth_cpu_data sh7724_data = { |
Yoshihiro Shimoda | 65ac885 | 2009-05-24 23:54:30 +0000 | [diff] [blame] | 663 | .set_duplex = sh_eth_set_duplex, |
Sergei Shtylyov | 9c3beaa | 2013-06-07 14:03:37 +0000 | [diff] [blame] | 664 | .set_rate = sh_eth_set_rate_sh7724, |
Yoshihiro Shimoda | 65ac885 | 2009-05-24 23:54:30 +0000 | [diff] [blame] | 665 | |
Sergei Shtylyov | a3153d8 | 2013-08-18 03:11:28 +0400 | [diff] [blame] | 666 | .register_type = SH_ETH_REG_FAST_SH4, |
| 667 | |
Yoshihiro Shimoda | 65ac885 | 2009-05-24 23:54:30 +0000 | [diff] [blame] | 668 | .ecsr_value = ECSR_PSRTO | ECSR_LCHNG | ECSR_ICD, |
| 669 | .ecsipr_value = ECSIPR_PSRTOIP | ECSIPR_LCHNGIP | ECSIPR_ICDIP, |
Sergei Shtylyov | a80c3de | 2013-06-20 02:24:54 +0400 | [diff] [blame] | 670 | .eesipr_value = 0x01ff009f, |
Yoshihiro Shimoda | 65ac885 | 2009-05-24 23:54:30 +0000 | [diff] [blame] | 671 | |
| 672 | .tx_check = EESR_FTC | EESR_CND | EESR_DLC | EESR_CD | EESR_RTO, |
Sergei Shtylyov | ca8c358 | 2013-06-21 01:12:21 +0400 | [diff] [blame] | 673 | .eesr_err_check = EESR_TWB | EESR_TABT | EESR_RABT | EESR_RFE | |
Sergei Shtylyov | 9b39f05 | 2017-01-04 15:11:21 +0300 | [diff] [blame] | 674 | EESR_RDE | EESR_RFRMER | EESR_TFE | EESR_TDE, |
Yoshihiro Shimoda | 65ac885 | 2009-05-24 23:54:30 +0000 | [diff] [blame] | 675 | |
| 676 | .apr = 1, |
| 677 | .mpr = 1, |
| 678 | .tpauser = 1, |
| 679 | .hw_swap = 1, |
Magnus Damm | 503914c | 2009-12-15 21:16:55 -0800 | [diff] [blame] | 680 | .rpadir = 1, |
| 681 | .rpadir_value = 0x00020000, /* NET_IP_ALIGN assumed to be 2 */ |
Yoshihiro Shimoda | 65ac885 | 2009-05-24 23:54:30 +0000 | [diff] [blame] | 682 | }; |
Nobuhiro Iwamatsu | 5cee1d3 | 2012-06-25 17:35:12 +0000 | [diff] [blame] | 683 | |
Sergei Shtylyov | 24549e2 | 2013-06-07 13:59:21 +0000 | [diff] [blame] | 684 | static void sh_eth_set_rate_sh7757(struct net_device *ndev) |
Yoshihiro Shimoda | f29a3d0 | 2010-07-05 18:32:50 +0000 | [diff] [blame] | 685 | { |
| 686 | struct sh_eth_private *mdp = netdev_priv(ndev); |
Yoshihiro Shimoda | f29a3d0 | 2010-07-05 18:32:50 +0000 | [diff] [blame] | 687 | |
| 688 | switch (mdp->speed) { |
| 689 | case 10: /* 10BASE */ |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 690 | sh_eth_write(ndev, 0, RTRATE); |
Yoshihiro Shimoda | f29a3d0 | 2010-07-05 18:32:50 +0000 | [diff] [blame] | 691 | break; |
| 692 | case 100:/* 100BASE */ |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 693 | sh_eth_write(ndev, 1, RTRATE); |
Yoshihiro Shimoda | f29a3d0 | 2010-07-05 18:32:50 +0000 | [diff] [blame] | 694 | break; |
Yoshihiro Shimoda | f29a3d0 | 2010-07-05 18:32:50 +0000 | [diff] [blame] | 695 | } |
| 696 | } |
| 697 | |
| 698 | /* SH7757 */ |
Sergei Shtylyov | 24549e2 | 2013-06-07 13:59:21 +0000 | [diff] [blame] | 699 | static struct sh_eth_cpu_data sh7757_data = { |
| 700 | .set_duplex = sh_eth_set_duplex, |
| 701 | .set_rate = sh_eth_set_rate_sh7757, |
Yoshihiro Shimoda | f29a3d0 | 2010-07-05 18:32:50 +0000 | [diff] [blame] | 702 | |
Sergei Shtylyov | a3153d8 | 2013-08-18 03:11:28 +0400 | [diff] [blame] | 703 | .register_type = SH_ETH_REG_FAST_SH4, |
| 704 | |
Yoshihiro Shimoda | f29a3d0 | 2010-07-05 18:32:50 +0000 | [diff] [blame] | 705 | .eesipr_value = DMAC_M_RFRMER | DMAC_M_ECI | 0x003fffff, |
Yoshihiro Shimoda | f29a3d0 | 2010-07-05 18:32:50 +0000 | [diff] [blame] | 706 | |
| 707 | .tx_check = EESR_FTC | EESR_CND | EESR_DLC | EESR_CD | EESR_RTO, |
Sergei Shtylyov | ca8c358 | 2013-06-21 01:12:21 +0400 | [diff] [blame] | 708 | .eesr_err_check = EESR_TWB | EESR_TABT | EESR_RABT | EESR_RFE | |
Sergei Shtylyov | 9b39f05 | 2017-01-04 15:11:21 +0300 | [diff] [blame] | 709 | EESR_RDE | EESR_RFRMER | EESR_TFE | EESR_TDE, |
Yoshihiro Shimoda | f29a3d0 | 2010-07-05 18:32:50 +0000 | [diff] [blame] | 710 | |
Nobuhiro Iwamatsu | 5b3dfd1 | 2013-06-06 09:49:30 +0000 | [diff] [blame] | 711 | .irq_flags = IRQF_SHARED, |
Yoshihiro Shimoda | f29a3d0 | 2010-07-05 18:32:50 +0000 | [diff] [blame] | 712 | .apr = 1, |
| 713 | .mpr = 1, |
| 714 | .tpauser = 1, |
| 715 | .hw_swap = 1, |
| 716 | .no_ade = 1, |
Yoshihiro Shimoda | 2e98e79 | 2011-07-05 20:33:57 +0000 | [diff] [blame] | 717 | .rpadir = 1, |
| 718 | .rpadir_value = 2 << 16, |
Ben Hutchings | 6b4b4fe | 2015-02-26 20:34:35 +0000 | [diff] [blame] | 719 | .rtrate = 1, |
Yoshihiro Shimoda | f29a3d0 | 2010-07-05 18:32:50 +0000 | [diff] [blame] | 720 | }; |
Yoshihiro Shimoda | 65ac885 | 2009-05-24 23:54:30 +0000 | [diff] [blame] | 721 | |
David S. Miller | e403d29 | 2013-06-07 23:40:41 -0700 | [diff] [blame] | 722 | #define SH_GIGA_ETH_BASE 0xfee00000UL |
Yoshihiro Shimoda | 8fcd496 | 2011-03-07 21:59:49 +0000 | [diff] [blame] | 723 | #define GIGA_MALR(port) (SH_GIGA_ETH_BASE + 0x800 * (port) + 0x05c8) |
| 724 | #define GIGA_MAHR(port) (SH_GIGA_ETH_BASE + 0x800 * (port) + 0x05c0) |
| 725 | static void sh_eth_chip_reset_giga(struct net_device *ndev) |
| 726 | { |
Geert Uytterhoeven | 0799c2d | 2015-01-15 11:54:28 +0100 | [diff] [blame] | 727 | u32 mahr[2], malr[2]; |
Sergei Shtylyov | 7927092 | 2016-05-08 00:08:05 +0300 | [diff] [blame] | 728 | int i; |
Yoshihiro Shimoda | 8fcd496 | 2011-03-07 21:59:49 +0000 | [diff] [blame] | 729 | |
| 730 | /* save MAHR and MALR */ |
| 731 | for (i = 0; i < 2; i++) { |
Yoshihiro Shimoda | ae70644 | 2011-09-27 21:48:58 +0000 | [diff] [blame] | 732 | malr[i] = ioread32((void *)GIGA_MALR(i)); |
| 733 | mahr[i] = ioread32((void *)GIGA_MAHR(i)); |
Yoshihiro Shimoda | 8fcd496 | 2011-03-07 21:59:49 +0000 | [diff] [blame] | 734 | } |
| 735 | |
Sergei Shtylyov | c66b258 | 2016-05-07 14:09:01 -0700 | [diff] [blame] | 736 | sh_eth_chip_reset(ndev); |
Yoshihiro Shimoda | 8fcd496 | 2011-03-07 21:59:49 +0000 | [diff] [blame] | 737 | |
| 738 | /* restore MAHR and MALR */ |
| 739 | for (i = 0; i < 2; i++) { |
Yoshihiro Shimoda | ae70644 | 2011-09-27 21:48:58 +0000 | [diff] [blame] | 740 | iowrite32(malr[i], (void *)GIGA_MALR(i)); |
| 741 | iowrite32(mahr[i], (void *)GIGA_MAHR(i)); |
Yoshihiro Shimoda | 8fcd496 | 2011-03-07 21:59:49 +0000 | [diff] [blame] | 742 | } |
| 743 | } |
| 744 | |
Yoshihiro Shimoda | 8fcd496 | 2011-03-07 21:59:49 +0000 | [diff] [blame] | 745 | static void sh_eth_set_rate_giga(struct net_device *ndev) |
| 746 | { |
| 747 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 748 | |
| 749 | switch (mdp->speed) { |
| 750 | case 10: /* 10BASE */ |
| 751 | sh_eth_write(ndev, 0x00000000, GECMR); |
| 752 | break; |
| 753 | case 100:/* 100BASE */ |
| 754 | sh_eth_write(ndev, 0x00000010, GECMR); |
| 755 | break; |
| 756 | case 1000: /* 1000BASE */ |
| 757 | sh_eth_write(ndev, 0x00000020, GECMR); |
| 758 | break; |
Yoshihiro Shimoda | 8fcd496 | 2011-03-07 21:59:49 +0000 | [diff] [blame] | 759 | } |
| 760 | } |
| 761 | |
| 762 | /* SH7757(GETHERC) */ |
Sergei Shtylyov | 24549e2 | 2013-06-07 13:59:21 +0000 | [diff] [blame] | 763 | static struct sh_eth_cpu_data sh7757_data_giga = { |
Yoshihiro Shimoda | 8fcd496 | 2011-03-07 21:59:49 +0000 | [diff] [blame] | 764 | .chip_reset = sh_eth_chip_reset_giga, |
Nobuhiro Iwamatsu | 04b0ed2 | 2013-06-06 09:45:25 +0000 | [diff] [blame] | 765 | .set_duplex = sh_eth_set_duplex, |
Yoshihiro Shimoda | 8fcd496 | 2011-03-07 21:59:49 +0000 | [diff] [blame] | 766 | .set_rate = sh_eth_set_rate_giga, |
| 767 | |
Sergei Shtylyov | a3153d8 | 2013-08-18 03:11:28 +0400 | [diff] [blame] | 768 | .register_type = SH_ETH_REG_GIGABIT, |
| 769 | |
Yoshihiro Shimoda | 8fcd496 | 2011-03-07 21:59:49 +0000 | [diff] [blame] | 770 | .ecsr_value = ECSR_ICD | ECSR_MPD, |
| 771 | .ecsipr_value = ECSIPR_LCHNGIP | ECSIPR_ICDIP | ECSIPR_MPDIP, |
| 772 | .eesipr_value = DMAC_M_RFRMER | DMAC_M_ECI | 0x003fffff, |
| 773 | |
| 774 | .tx_check = EESR_TC1 | EESR_FTC, |
Sergei Shtylyov | ca8c358 | 2013-06-21 01:12:21 +0400 | [diff] [blame] | 775 | .eesr_err_check = EESR_TWB1 | EESR_TWB | EESR_TABT | EESR_RABT | |
| 776 | EESR_RFE | EESR_RDE | EESR_RFRMER | EESR_TFE | |
Sergei Shtylyov | 9b39f05 | 2017-01-04 15:11:21 +0300 | [diff] [blame] | 777 | EESR_TDE, |
Yoshihiro Shimoda | 8fcd496 | 2011-03-07 21:59:49 +0000 | [diff] [blame] | 778 | .fdr_value = 0x0000072f, |
Yoshihiro Shimoda | 8fcd496 | 2011-03-07 21:59:49 +0000 | [diff] [blame] | 779 | |
Nobuhiro Iwamatsu | 5b3dfd1 | 2013-06-06 09:49:30 +0000 | [diff] [blame] | 780 | .irq_flags = IRQF_SHARED, |
Yoshihiro Shimoda | 8fcd496 | 2011-03-07 21:59:49 +0000 | [diff] [blame] | 781 | .apr = 1, |
| 782 | .mpr = 1, |
| 783 | .tpauser = 1, |
| 784 | .bculr = 1, |
| 785 | .hw_swap = 1, |
| 786 | .rpadir = 1, |
| 787 | .rpadir_value = 2 << 16, |
| 788 | .no_trimd = 1, |
| 789 | .no_ade = 1, |
Yoshihiro Shimoda | 3acbc97 | 2012-02-15 17:54:51 +0000 | [diff] [blame] | 790 | .tsu = 1, |
Yoshihiro Shimoda | 8fcd496 | 2011-03-07 21:59:49 +0000 | [diff] [blame] | 791 | }; |
| 792 | |
Sergei Shtylyov | f5d1276 | 2013-06-07 13:58:18 +0000 | [diff] [blame] | 793 | /* SH7734 */ |
| 794 | static struct sh_eth_cpu_data sh7734_data = { |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 795 | .chip_reset = sh_eth_chip_reset, |
| 796 | .set_duplex = sh_eth_set_duplex, |
Sergei Shtylyov | f5d1276 | 2013-06-07 13:58:18 +0000 | [diff] [blame] | 797 | .set_rate = sh_eth_set_rate_gether, |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 798 | |
Sergei Shtylyov | a3153d8 | 2013-08-18 03:11:28 +0400 | [diff] [blame] | 799 | .register_type = SH_ETH_REG_GIGABIT, |
| 800 | |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 801 | .ecsr_value = ECSR_ICD | ECSR_MPD, |
| 802 | .ecsipr_value = ECSIPR_LCHNGIP | ECSIPR_ICDIP | ECSIPR_MPDIP, |
Sergei Shtylyov | 978d363 | 2017-01-04 22:18:24 +0300 | [diff] [blame] | 803 | .eesipr_value = DMAC_M_RFRMER | DMAC_M_ECI | 0x003f07ff, |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 804 | |
| 805 | .tx_check = EESR_TC1 | EESR_FTC, |
Sergei Shtylyov | ca8c358 | 2013-06-21 01:12:21 +0400 | [diff] [blame] | 806 | .eesr_err_check = EESR_TWB1 | EESR_TWB | EESR_TABT | EESR_RABT | |
| 807 | EESR_RFE | EESR_RDE | EESR_RFRMER | EESR_TFE | |
Sergei Shtylyov | 9b39f05 | 2017-01-04 15:11:21 +0300 | [diff] [blame] | 808 | EESR_TDE, |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 809 | |
| 810 | .apr = 1, |
| 811 | .mpr = 1, |
| 812 | .tpauser = 1, |
| 813 | .bculr = 1, |
| 814 | .hw_swap = 1, |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 815 | .no_trimd = 1, |
| 816 | .no_ade = 1, |
Yoshihiro Shimoda | 4986b99 | 2011-03-07 21:59:34 +0000 | [diff] [blame] | 817 | .tsu = 1, |
Sergei Shtylyov | 62e04b7 | 2017-01-07 00:03:37 +0300 | [diff] [blame] | 818 | .hw_checksum = 1, |
Sergei Shtylyov | f5d1276 | 2013-06-07 13:58:18 +0000 | [diff] [blame] | 819 | .select_mii = 1, |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 820 | }; |
Sergei Shtylyov | f5d1276 | 2013-06-07 13:58:18 +0000 | [diff] [blame] | 821 | |
| 822 | /* SH7763 */ |
| 823 | static struct sh_eth_cpu_data sh7763_data = { |
| 824 | .chip_reset = sh_eth_chip_reset, |
| 825 | .set_duplex = sh_eth_set_duplex, |
| 826 | .set_rate = sh_eth_set_rate_gether, |
| 827 | |
Sergei Shtylyov | a3153d8 | 2013-08-18 03:11:28 +0400 | [diff] [blame] | 828 | .register_type = SH_ETH_REG_GIGABIT, |
| 829 | |
Sergei Shtylyov | f5d1276 | 2013-06-07 13:58:18 +0000 | [diff] [blame] | 830 | .ecsr_value = ECSR_ICD | ECSR_MPD, |
| 831 | .ecsipr_value = ECSIPR_LCHNGIP | ECSIPR_ICDIP | ECSIPR_MPDIP, |
Sergei Shtylyov | 978d363 | 2017-01-04 22:18:24 +0300 | [diff] [blame] | 832 | .eesipr_value = DMAC_M_RFRMER | DMAC_M_ECI | 0x003f07ff, |
Sergei Shtylyov | f5d1276 | 2013-06-07 13:58:18 +0000 | [diff] [blame] | 833 | |
| 834 | .tx_check = EESR_TC1 | EESR_FTC, |
Sergei Shtylyov | 128296f | 2014-01-03 15:52:22 +0300 | [diff] [blame] | 835 | .eesr_err_check = EESR_TWB1 | EESR_TWB | EESR_TABT | EESR_RABT | |
Sergei Shtylyov | 9b39f05 | 2017-01-04 15:11:21 +0300 | [diff] [blame] | 836 | EESR_RDE | EESR_RFRMER | EESR_TFE | EESR_TDE, |
Sergei Shtylyov | f5d1276 | 2013-06-07 13:58:18 +0000 | [diff] [blame] | 837 | |
| 838 | .apr = 1, |
| 839 | .mpr = 1, |
| 840 | .tpauser = 1, |
| 841 | .bculr = 1, |
| 842 | .hw_swap = 1, |
| 843 | .no_trimd = 1, |
| 844 | .no_ade = 1, |
| 845 | .tsu = 1, |
| 846 | .irq_flags = IRQF_SHARED, |
| 847 | }; |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 848 | |
Sergei Shtylyov | c18a79a | 2013-06-07 13:56:05 +0000 | [diff] [blame] | 849 | static struct sh_eth_cpu_data sh7619_data = { |
Sergei Shtylyov | a3153d8 | 2013-08-18 03:11:28 +0400 | [diff] [blame] | 850 | .register_type = SH_ETH_REG_FAST_SH3_SH2, |
| 851 | |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 852 | .eesipr_value = DMAC_M_RFRMER | DMAC_M_ECI | 0x003fffff, |
| 853 | |
| 854 | .apr = 1, |
| 855 | .mpr = 1, |
| 856 | .tpauser = 1, |
| 857 | .hw_swap = 1, |
| 858 | }; |
Sergei Shtylyov | 7bbe150 | 2013-06-07 13:55:08 +0000 | [diff] [blame] | 859 | |
| 860 | static struct sh_eth_cpu_data sh771x_data = { |
Sergei Shtylyov | a3153d8 | 2013-08-18 03:11:28 +0400 | [diff] [blame] | 861 | .register_type = SH_ETH_REG_FAST_SH3_SH2, |
| 862 | |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 863 | .eesipr_value = DMAC_M_RFRMER | DMAC_M_ECI | 0x003fffff, |
Yoshihiro Shimoda | 4986b99 | 2011-03-07 21:59:34 +0000 | [diff] [blame] | 864 | .tsu = 1, |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 865 | }; |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 866 | |
| 867 | static void sh_eth_set_default_cpu_data(struct sh_eth_cpu_data *cd) |
| 868 | { |
| 869 | if (!cd->ecsr_value) |
| 870 | cd->ecsr_value = DEFAULT_ECSR_INIT; |
| 871 | |
| 872 | if (!cd->ecsipr_value) |
| 873 | cd->ecsipr_value = DEFAULT_ECSIPR_INIT; |
| 874 | |
| 875 | if (!cd->fcftr_value) |
Sergei Shtylyov | 128296f | 2014-01-03 15:52:22 +0300 | [diff] [blame] | 876 | cd->fcftr_value = DEFAULT_FIFO_F_D_RFF | |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 877 | DEFAULT_FIFO_F_D_RFD; |
| 878 | |
| 879 | if (!cd->fdr_value) |
| 880 | cd->fdr_value = DEFAULT_FDR_INIT; |
| 881 | |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 882 | if (!cd->tx_check) |
| 883 | cd->tx_check = DEFAULT_TX_CHECK; |
| 884 | |
| 885 | if (!cd->eesr_err_check) |
| 886 | cd->eesr_err_check = DEFAULT_EESR_ERR_CHECK; |
Nobuhiro Iwamatsu | b284fbe | 2015-01-08 15:25:07 +0900 | [diff] [blame] | 887 | |
| 888 | if (!cd->trscer_err_mask) |
| 889 | cd->trscer_err_mask = DEFAULT_TRSCER_ERR_MASK; |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 890 | } |
| 891 | |
Nobuhiro Iwamatsu | 5cee1d3 | 2012-06-25 17:35:12 +0000 | [diff] [blame] | 892 | static int sh_eth_check_reset(struct net_device *ndev) |
| 893 | { |
| 894 | int ret = 0; |
| 895 | int cnt = 100; |
| 896 | |
| 897 | while (cnt > 0) { |
Sergei Shtylyov | 97717ed | 2016-04-24 23:45:23 +0300 | [diff] [blame] | 898 | if (!(sh_eth_read(ndev, EDMR) & EDMR_SRST_GETHER)) |
Nobuhiro Iwamatsu | 5cee1d3 | 2012-06-25 17:35:12 +0000 | [diff] [blame] | 899 | break; |
| 900 | mdelay(1); |
| 901 | cnt--; |
| 902 | } |
Sergei Shtylyov | 9f8c426 | 2013-06-05 23:54:01 +0400 | [diff] [blame] | 903 | if (cnt <= 0) { |
Sergei Shtylyov | f75f14e | 2014-03-15 03:27:54 +0300 | [diff] [blame] | 904 | netdev_err(ndev, "Device reset failed\n"); |
Nobuhiro Iwamatsu | 5cee1d3 | 2012-06-25 17:35:12 +0000 | [diff] [blame] | 905 | ret = -ETIMEDOUT; |
| 906 | } |
| 907 | return ret; |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 908 | } |
Nobuhiro Iwamatsu | dabdde9 | 2013-06-06 09:51:39 +0000 | [diff] [blame] | 909 | |
| 910 | static int sh_eth_reset(struct net_device *ndev) |
| 911 | { |
| 912 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 913 | int ret = 0; |
| 914 | |
Simon Horman | db89347 | 2014-01-17 09:22:28 +0900 | [diff] [blame] | 915 | if (sh_eth_is_gether(mdp) || sh_eth_is_rz_fast_ether(mdp)) { |
Nobuhiro Iwamatsu | dabdde9 | 2013-06-06 09:51:39 +0000 | [diff] [blame] | 916 | sh_eth_write(ndev, EDSR_ENALL, EDSR); |
Sergei Shtylyov | b2b14d2 | 2016-02-10 01:38:28 +0300 | [diff] [blame] | 917 | sh_eth_modify(ndev, EDMR, EDMR_SRST_GETHER, EDMR_SRST_GETHER); |
Nobuhiro Iwamatsu | dabdde9 | 2013-06-06 09:51:39 +0000 | [diff] [blame] | 918 | |
| 919 | ret = sh_eth_check_reset(ndev); |
| 920 | if (ret) |
Laurent Pinchart | f738a13 | 2014-03-20 15:00:35 +0100 | [diff] [blame] | 921 | return ret; |
Nobuhiro Iwamatsu | dabdde9 | 2013-06-06 09:51:39 +0000 | [diff] [blame] | 922 | |
| 923 | /* Table Init */ |
| 924 | sh_eth_write(ndev, 0x0, TDLAR); |
| 925 | sh_eth_write(ndev, 0x0, TDFAR); |
| 926 | sh_eth_write(ndev, 0x0, TDFXR); |
| 927 | sh_eth_write(ndev, 0x0, TDFFR); |
| 928 | sh_eth_write(ndev, 0x0, RDLAR); |
| 929 | sh_eth_write(ndev, 0x0, RDFAR); |
| 930 | sh_eth_write(ndev, 0x0, RDFXR); |
| 931 | sh_eth_write(ndev, 0x0, RDFFR); |
| 932 | |
| 933 | /* Reset HW CRC register */ |
Sergei Shtylyov | 62e04b7 | 2017-01-07 00:03:37 +0300 | [diff] [blame] | 934 | if (mdp->cd->hw_checksum) |
Nobuhiro Iwamatsu | dabdde9 | 2013-06-06 09:51:39 +0000 | [diff] [blame] | 935 | sh_eth_write(ndev, 0x0, CSMR); |
| 936 | |
| 937 | /* Select MII mode */ |
| 938 | if (mdp->cd->select_mii) |
| 939 | sh_eth_select_mii(ndev); |
| 940 | } else { |
Sergei Shtylyov | b2b14d2 | 2016-02-10 01:38:28 +0300 | [diff] [blame] | 941 | sh_eth_modify(ndev, EDMR, EDMR_SRST_ETHER, EDMR_SRST_ETHER); |
Nobuhiro Iwamatsu | dabdde9 | 2013-06-06 09:51:39 +0000 | [diff] [blame] | 942 | mdelay(3); |
Sergei Shtylyov | b2b14d2 | 2016-02-10 01:38:28 +0300 | [diff] [blame] | 943 | sh_eth_modify(ndev, EDMR, EDMR_SRST_ETHER, 0); |
Nobuhiro Iwamatsu | dabdde9 | 2013-06-06 09:51:39 +0000 | [diff] [blame] | 944 | } |
| 945 | |
Nobuhiro Iwamatsu | dabdde9 | 2013-06-06 09:51:39 +0000 | [diff] [blame] | 946 | return ret; |
| 947 | } |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 948 | |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 949 | static void sh_eth_set_receive_align(struct sk_buff *skb) |
| 950 | { |
Mitsuhiro Kimura | 4d6a949 | 2014-11-27 20:34:00 +0900 | [diff] [blame] | 951 | uintptr_t reserve = (uintptr_t)skb->data & (SH_ETH_RX_ALIGN - 1); |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 952 | |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 953 | if (reserve) |
Mitsuhiro Kimura | 4d6a949 | 2014-11-27 20:34:00 +0900 | [diff] [blame] | 954 | skb_reserve(skb, SH_ETH_RX_ALIGN - reserve); |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 955 | } |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 956 | |
Sergei Shtylyov | 128296f | 2014-01-03 15:52:22 +0300 | [diff] [blame] | 957 | /* Program the hardware MAC address from dev->dev_addr. */ |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 958 | static void update_mac_address(struct net_device *ndev) |
| 959 | { |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 960 | sh_eth_write(ndev, |
Sergei Shtylyov | 128296f | 2014-01-03 15:52:22 +0300 | [diff] [blame] | 961 | (ndev->dev_addr[0] << 24) | (ndev->dev_addr[1] << 16) | |
| 962 | (ndev->dev_addr[2] << 8) | (ndev->dev_addr[3]), MAHR); |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 963 | sh_eth_write(ndev, |
Sergei Shtylyov | 128296f | 2014-01-03 15:52:22 +0300 | [diff] [blame] | 964 | (ndev->dev_addr[4] << 8) | (ndev->dev_addr[5]), MALR); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 965 | } |
| 966 | |
Sergei Shtylyov | 128296f | 2014-01-03 15:52:22 +0300 | [diff] [blame] | 967 | /* Get MAC address from SuperH MAC address register |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 968 | * |
| 969 | * SuperH's Ethernet device doesn't have 'ROM' to MAC address. |
| 970 | * This driver get MAC address that use by bootloader(U-boot or sh-ipl+g). |
| 971 | * When you want use this device, you must set MAC address in bootloader. |
| 972 | * |
| 973 | */ |
Magnus Damm | 748031f | 2009-10-09 00:17:14 +0000 | [diff] [blame] | 974 | static void read_mac_address(struct net_device *ndev, unsigned char *mac) |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 975 | { |
Magnus Damm | 748031f | 2009-10-09 00:17:14 +0000 | [diff] [blame] | 976 | if (mac[0] || mac[1] || mac[2] || mac[3] || mac[4] || mac[5]) { |
Joe Perches | d458cdf | 2013-10-01 19:04:40 -0700 | [diff] [blame] | 977 | memcpy(ndev->dev_addr, mac, ETH_ALEN); |
Magnus Damm | 748031f | 2009-10-09 00:17:14 +0000 | [diff] [blame] | 978 | } else { |
Sergei Shtylyov | 37742f0 | 2015-12-05 00:58:57 +0300 | [diff] [blame] | 979 | u32 mahr = sh_eth_read(ndev, MAHR); |
| 980 | u32 malr = sh_eth_read(ndev, MALR); |
| 981 | |
| 982 | ndev->dev_addr[0] = (mahr >> 24) & 0xFF; |
| 983 | ndev->dev_addr[1] = (mahr >> 16) & 0xFF; |
| 984 | ndev->dev_addr[2] = (mahr >> 8) & 0xFF; |
| 985 | ndev->dev_addr[3] = (mahr >> 0) & 0xFF; |
| 986 | ndev->dev_addr[4] = (malr >> 8) & 0xFF; |
| 987 | ndev->dev_addr[5] = (malr >> 0) & 0xFF; |
Magnus Damm | 748031f | 2009-10-09 00:17:14 +0000 | [diff] [blame] | 988 | } |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 989 | } |
| 990 | |
Geert Uytterhoeven | 0799c2d | 2015-01-15 11:54:28 +0100 | [diff] [blame] | 991 | static u32 sh_eth_get_edtrr_trns(struct sh_eth_private *mdp) |
Yoshihiro Shimoda | c5ed536 | 2011-03-07 21:59:38 +0000 | [diff] [blame] | 992 | { |
Simon Horman | db89347 | 2014-01-17 09:22:28 +0900 | [diff] [blame] | 993 | if (sh_eth_is_gether(mdp) || sh_eth_is_rz_fast_ether(mdp)) |
Yoshihiro Shimoda | c5ed536 | 2011-03-07 21:59:38 +0000 | [diff] [blame] | 994 | return EDTRR_TRNS_GETHER; |
| 995 | else |
| 996 | return EDTRR_TRNS_ETHER; |
| 997 | } |
| 998 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 999 | struct bb_info { |
Yoshihiro Shimoda | ae70644 | 2011-09-27 21:48:58 +0000 | [diff] [blame] | 1000 | void (*set_gate)(void *addr); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1001 | struct mdiobb_ctrl ctrl; |
Yoshihiro Shimoda | ae70644 | 2011-09-27 21:48:58 +0000 | [diff] [blame] | 1002 | void *addr; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1003 | }; |
| 1004 | |
Sergei Shtylyov | 39b4b06 | 2015-12-08 00:40:57 +0300 | [diff] [blame] | 1005 | static void sh_mdio_ctrl(struct mdiobb_ctrl *ctrl, u32 mask, int set) |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1006 | { |
| 1007 | struct bb_info *bitbang = container_of(ctrl, struct bb_info, ctrl); |
Sergei Shtylyov | 78fa3c5 | 2015-12-08 00:41:43 +0300 | [diff] [blame] | 1008 | u32 pir; |
Yoshihiro Shimoda | b3017e6 | 2011-03-07 21:59:55 +0000 | [diff] [blame] | 1009 | |
| 1010 | if (bitbang->set_gate) |
| 1011 | bitbang->set_gate(bitbang->addr); |
| 1012 | |
Sergei Shtylyov | 78fa3c5 | 2015-12-08 00:41:43 +0300 | [diff] [blame] | 1013 | pir = ioread32(bitbang->addr); |
Sergei Shtylyov | 39b4b06 | 2015-12-08 00:40:57 +0300 | [diff] [blame] | 1014 | if (set) |
Sergei Shtylyov | 78fa3c5 | 2015-12-08 00:41:43 +0300 | [diff] [blame] | 1015 | pir |= mask; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1016 | else |
Sergei Shtylyov | 78fa3c5 | 2015-12-08 00:41:43 +0300 | [diff] [blame] | 1017 | pir &= ~mask; |
| 1018 | iowrite32(pir, bitbang->addr); |
Sergei Shtylyov | 39b4b06 | 2015-12-08 00:40:57 +0300 | [diff] [blame] | 1019 | } |
| 1020 | |
| 1021 | /* Data I/O pin control */ |
| 1022 | static void sh_mmd_ctrl(struct mdiobb_ctrl *ctrl, int bit) |
| 1023 | { |
| 1024 | sh_mdio_ctrl(ctrl, PIR_MMD, bit); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1025 | } |
| 1026 | |
| 1027 | /* Set bit data*/ |
| 1028 | static void sh_set_mdio(struct mdiobb_ctrl *ctrl, int bit) |
| 1029 | { |
Sergei Shtylyov | 39b4b06 | 2015-12-08 00:40:57 +0300 | [diff] [blame] | 1030 | sh_mdio_ctrl(ctrl, PIR_MDO, bit); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1031 | } |
| 1032 | |
| 1033 | /* Get bit data*/ |
| 1034 | static int sh_get_mdio(struct mdiobb_ctrl *ctrl) |
| 1035 | { |
| 1036 | struct bb_info *bitbang = container_of(ctrl, struct bb_info, ctrl); |
Yoshihiro Shimoda | b3017e6 | 2011-03-07 21:59:55 +0000 | [diff] [blame] | 1037 | |
| 1038 | if (bitbang->set_gate) |
| 1039 | bitbang->set_gate(bitbang->addr); |
| 1040 | |
Sergei Shtylyov | 78fa3c5 | 2015-12-08 00:41:43 +0300 | [diff] [blame] | 1041 | return (ioread32(bitbang->addr) & PIR_MDI) != 0; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1042 | } |
| 1043 | |
| 1044 | /* MDC pin control */ |
| 1045 | static void sh_mdc_ctrl(struct mdiobb_ctrl *ctrl, int bit) |
| 1046 | { |
Sergei Shtylyov | 39b4b06 | 2015-12-08 00:40:57 +0300 | [diff] [blame] | 1047 | sh_mdio_ctrl(ctrl, PIR_MDC, bit); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1048 | } |
| 1049 | |
| 1050 | /* mdio bus control struct */ |
| 1051 | static struct mdiobb_ops bb_ops = { |
| 1052 | .owner = THIS_MODULE, |
| 1053 | .set_mdc = sh_mdc_ctrl, |
| 1054 | .set_mdio_dir = sh_mmd_ctrl, |
| 1055 | .set_mdio_data = sh_set_mdio, |
| 1056 | .get_mdio_data = sh_get_mdio, |
| 1057 | }; |
| 1058 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1059 | /* free skb and descriptor buffer */ |
| 1060 | static void sh_eth_ring_free(struct net_device *ndev) |
| 1061 | { |
| 1062 | struct sh_eth_private *mdp = netdev_priv(ndev); |
Sergei Shtylyov | 8e03a5e | 2015-11-04 00:55:13 +0300 | [diff] [blame] | 1063 | int ringsize, i; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1064 | |
| 1065 | /* Free Rx skb ringbuffer */ |
| 1066 | if (mdp->rx_skbuff) { |
Sergei Shtylyov | 179d80a | 2014-06-28 04:10:00 +0400 | [diff] [blame] | 1067 | for (i = 0; i < mdp->num_rx_ring; i++) |
| 1068 | dev_kfree_skb(mdp->rx_skbuff[i]); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1069 | } |
| 1070 | kfree(mdp->rx_skbuff); |
Yoshihiro Shimoda | 91c7755 | 2012-06-26 20:00:01 +0000 | [diff] [blame] | 1071 | mdp->rx_skbuff = NULL; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1072 | |
| 1073 | /* Free Tx skb ringbuffer */ |
| 1074 | if (mdp->tx_skbuff) { |
Sergei Shtylyov | 179d80a | 2014-06-28 04:10:00 +0400 | [diff] [blame] | 1075 | for (i = 0; i < mdp->num_tx_ring; i++) |
| 1076 | dev_kfree_skb(mdp->tx_skbuff[i]); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1077 | } |
| 1078 | kfree(mdp->tx_skbuff); |
Yoshihiro Shimoda | 91c7755 | 2012-06-26 20:00:01 +0000 | [diff] [blame] | 1079 | mdp->tx_skbuff = NULL; |
Sergei Shtylyov | 8e03a5e | 2015-11-04 00:55:13 +0300 | [diff] [blame] | 1080 | |
| 1081 | if (mdp->rx_ring) { |
| 1082 | ringsize = sizeof(struct sh_eth_rxdesc) * mdp->num_rx_ring; |
| 1083 | dma_free_coherent(NULL, ringsize, mdp->rx_ring, |
| 1084 | mdp->rx_desc_dma); |
| 1085 | mdp->rx_ring = NULL; |
| 1086 | } |
| 1087 | |
| 1088 | if (mdp->tx_ring) { |
| 1089 | ringsize = sizeof(struct sh_eth_txdesc) * mdp->num_tx_ring; |
| 1090 | dma_free_coherent(NULL, ringsize, mdp->tx_ring, |
| 1091 | mdp->tx_desc_dma); |
| 1092 | mdp->tx_ring = NULL; |
| 1093 | } |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1094 | } |
| 1095 | |
| 1096 | /* format skb and descriptor buffer */ |
| 1097 | static void sh_eth_ring_format(struct net_device *ndev) |
| 1098 | { |
| 1099 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 1100 | int i; |
| 1101 | struct sk_buff *skb; |
| 1102 | struct sh_eth_rxdesc *rxdesc = NULL; |
| 1103 | struct sh_eth_txdesc *txdesc = NULL; |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 1104 | int rx_ringsize = sizeof(*rxdesc) * mdp->num_rx_ring; |
| 1105 | int tx_ringsize = sizeof(*txdesc) * mdp->num_tx_ring; |
Sergei Shtylyov | cb36859 | 2015-10-24 00:46:40 +0300 | [diff] [blame] | 1106 | int skbuff_size = mdp->rx_buf_sz + SH_ETH_RX_ALIGN + 32 - 1; |
Ben Hutchings | 52b9fa3 | 2015-01-27 00:50:24 +0000 | [diff] [blame] | 1107 | dma_addr_t dma_addr; |
Sergei Shtylyov | 5cbf20c | 2015-12-20 01:48:04 +0300 | [diff] [blame] | 1108 | u32 buf_len; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1109 | |
Sergei Shtylyov | 128296f | 2014-01-03 15:52:22 +0300 | [diff] [blame] | 1110 | mdp->cur_rx = 0; |
| 1111 | mdp->cur_tx = 0; |
| 1112 | mdp->dirty_rx = 0; |
| 1113 | mdp->dirty_tx = 0; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1114 | |
| 1115 | memset(mdp->rx_ring, 0, rx_ringsize); |
| 1116 | |
| 1117 | /* build Rx ring buffer */ |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 1118 | for (i = 0; i < mdp->num_rx_ring; i++) { |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1119 | /* skb */ |
| 1120 | mdp->rx_skbuff[i] = NULL; |
Mitsuhiro Kimura | 4d6a949 | 2014-11-27 20:34:00 +0900 | [diff] [blame] | 1121 | skb = netdev_alloc_skb(ndev, skbuff_size); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1122 | if (skb == NULL) |
| 1123 | break; |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1124 | sh_eth_set_receive_align(skb); |
| 1125 | |
Sergei Shtylyov | ab85791 | 2015-10-24 00:46:03 +0300 | [diff] [blame] | 1126 | /* The size of the buffer is a multiple of 32 bytes. */ |
Sergei Shtylyov | 5cbf20c | 2015-12-20 01:48:04 +0300 | [diff] [blame] | 1127 | buf_len = ALIGN(mdp->rx_buf_sz, 32); |
Sergei Shtylyov | 5cbf20c | 2015-12-20 01:48:04 +0300 | [diff] [blame] | 1128 | dma_addr = dma_map_single(&ndev->dev, skb->data, buf_len, |
Ben Hutchings | 52b9fa3 | 2015-01-27 00:50:24 +0000 | [diff] [blame] | 1129 | DMA_FROM_DEVICE); |
| 1130 | if (dma_mapping_error(&ndev->dev, dma_addr)) { |
| 1131 | kfree_skb(skb); |
| 1132 | break; |
| 1133 | } |
| 1134 | mdp->rx_skbuff[i] = skb; |
Sergei Shtylyov | d0ba913 | 2016-03-08 01:37:09 +0300 | [diff] [blame] | 1135 | |
| 1136 | /* RX descriptor */ |
| 1137 | rxdesc = &mdp->rx_ring[i]; |
| 1138 | rxdesc->len = cpu_to_le32(buf_len << 16); |
Sergei Shtylyov | 7cf7247 | 2015-12-28 02:10:47 +0300 | [diff] [blame] | 1139 | rxdesc->addr = cpu_to_le32(dma_addr); |
| 1140 | rxdesc->status = cpu_to_le32(RD_RACT | RD_RFP); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1141 | |
Nobuhiro Iwamatsu | b0ca2a2 | 2008-06-30 11:08:17 +0900 | [diff] [blame] | 1142 | /* Rx descriptor address set */ |
| 1143 | if (i == 0) { |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 1144 | sh_eth_write(ndev, mdp->rx_desc_dma, RDLAR); |
Simon Horman | db89347 | 2014-01-17 09:22:28 +0900 | [diff] [blame] | 1145 | if (sh_eth_is_gether(mdp) || |
| 1146 | sh_eth_is_rz_fast_ether(mdp)) |
Yoshihiro Shimoda | c5ed536 | 2011-03-07 21:59:38 +0000 | [diff] [blame] | 1147 | sh_eth_write(ndev, mdp->rx_desc_dma, RDFAR); |
Nobuhiro Iwamatsu | b0ca2a2 | 2008-06-30 11:08:17 +0900 | [diff] [blame] | 1148 | } |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1149 | } |
| 1150 | |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 1151 | mdp->dirty_rx = (u32) (i - mdp->num_rx_ring); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1152 | |
| 1153 | /* Mark the last entry as wrapping the ring. */ |
Sergei Shtylyov | c1b7fca | 2016-03-08 01:36:28 +0300 | [diff] [blame] | 1154 | if (rxdesc) |
| 1155 | rxdesc->status |= cpu_to_le32(RD_RDLE); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1156 | |
| 1157 | memset(mdp->tx_ring, 0, tx_ringsize); |
| 1158 | |
| 1159 | /* build Tx ring buffer */ |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 1160 | for (i = 0; i < mdp->num_tx_ring; i++) { |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1161 | mdp->tx_skbuff[i] = NULL; |
| 1162 | txdesc = &mdp->tx_ring[i]; |
Sergei Shtylyov | 7cf7247 | 2015-12-28 02:10:47 +0300 | [diff] [blame] | 1163 | txdesc->status = cpu_to_le32(TD_TFP); |
| 1164 | txdesc->len = cpu_to_le32(0); |
Nobuhiro Iwamatsu | b0ca2a2 | 2008-06-30 11:08:17 +0900 | [diff] [blame] | 1165 | if (i == 0) { |
Yoshinori Sato | 71557a3 | 2008-08-06 19:49:00 -0400 | [diff] [blame] | 1166 | /* Tx descriptor address set */ |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 1167 | sh_eth_write(ndev, mdp->tx_desc_dma, TDLAR); |
Simon Horman | db89347 | 2014-01-17 09:22:28 +0900 | [diff] [blame] | 1168 | if (sh_eth_is_gether(mdp) || |
| 1169 | sh_eth_is_rz_fast_ether(mdp)) |
Yoshihiro Shimoda | c5ed536 | 2011-03-07 21:59:38 +0000 | [diff] [blame] | 1170 | sh_eth_write(ndev, mdp->tx_desc_dma, TDFAR); |
Nobuhiro Iwamatsu | b0ca2a2 | 2008-06-30 11:08:17 +0900 | [diff] [blame] | 1171 | } |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1172 | } |
| 1173 | |
Sergei Shtylyov | 7cf7247 | 2015-12-28 02:10:47 +0300 | [diff] [blame] | 1174 | txdesc->status |= cpu_to_le32(TD_TDLE); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1175 | } |
| 1176 | |
| 1177 | /* Get skb and descriptor buffer */ |
| 1178 | static int sh_eth_ring_init(struct net_device *ndev) |
| 1179 | { |
| 1180 | struct sh_eth_private *mdp = netdev_priv(ndev); |
Sergei Shtylyov | 91d8068 | 2015-11-04 00:17:08 +0300 | [diff] [blame] | 1181 | int rx_ringsize, tx_ringsize; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1182 | |
Sergei Shtylyov | 128296f | 2014-01-03 15:52:22 +0300 | [diff] [blame] | 1183 | /* +26 gets the maximum ethernet encapsulation, +7 & ~7 because the |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1184 | * card needs room to do 8 byte alignment, +2 so we can reserve |
| 1185 | * the first 2 bytes, and +16 gets room for the status word from the |
| 1186 | * card. |
| 1187 | */ |
| 1188 | mdp->rx_buf_sz = (ndev->mtu <= 1492 ? PKT_BUF_SZ : |
| 1189 | (((ndev->mtu + 26 + 7) & ~7) + 2 + 16)); |
Magnus Damm | 503914c | 2009-12-15 21:16:55 -0800 | [diff] [blame] | 1190 | if (mdp->cd->rpadir) |
| 1191 | mdp->rx_buf_sz += NET_IP_ALIGN; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1192 | |
| 1193 | /* Allocate RX and TX skb rings */ |
Sergei Shtylyov | 2c94e85 | 2015-10-31 02:05:56 +0300 | [diff] [blame] | 1194 | mdp->rx_skbuff = kcalloc(mdp->num_rx_ring, sizeof(*mdp->rx_skbuff), |
| 1195 | GFP_KERNEL); |
Sergei Shtylyov | 91d8068 | 2015-11-04 00:17:08 +0300 | [diff] [blame] | 1196 | if (!mdp->rx_skbuff) |
| 1197 | return -ENOMEM; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1198 | |
Sergei Shtylyov | 2c94e85 | 2015-10-31 02:05:56 +0300 | [diff] [blame] | 1199 | mdp->tx_skbuff = kcalloc(mdp->num_tx_ring, sizeof(*mdp->tx_skbuff), |
| 1200 | GFP_KERNEL); |
Sergei Shtylyov | 91d8068 | 2015-11-04 00:17:08 +0300 | [diff] [blame] | 1201 | if (!mdp->tx_skbuff) |
Sergei Shtylyov | 8e03a5e | 2015-11-04 00:55:13 +0300 | [diff] [blame] | 1202 | goto ring_free; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1203 | |
| 1204 | /* Allocate all Rx descriptors. */ |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 1205 | rx_ringsize = sizeof(struct sh_eth_rxdesc) * mdp->num_rx_ring; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1206 | mdp->rx_ring = dma_alloc_coherent(NULL, rx_ringsize, &mdp->rx_desc_dma, |
Joe Perches | d0320f7 | 2013-03-14 13:07:21 +0000 | [diff] [blame] | 1207 | GFP_KERNEL); |
Sergei Shtylyov | 91d8068 | 2015-11-04 00:17:08 +0300 | [diff] [blame] | 1208 | if (!mdp->rx_ring) |
Sergei Shtylyov | 8e03a5e | 2015-11-04 00:55:13 +0300 | [diff] [blame] | 1209 | goto ring_free; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1210 | |
| 1211 | mdp->dirty_rx = 0; |
| 1212 | |
| 1213 | /* Allocate all Tx descriptors. */ |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 1214 | tx_ringsize = sizeof(struct sh_eth_txdesc) * mdp->num_tx_ring; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1215 | mdp->tx_ring = dma_alloc_coherent(NULL, tx_ringsize, &mdp->tx_desc_dma, |
Joe Perches | d0320f7 | 2013-03-14 13:07:21 +0000 | [diff] [blame] | 1216 | GFP_KERNEL); |
Sergei Shtylyov | 91d8068 | 2015-11-04 00:17:08 +0300 | [diff] [blame] | 1217 | if (!mdp->tx_ring) |
Sergei Shtylyov | 8e03a5e | 2015-11-04 00:55:13 +0300 | [diff] [blame] | 1218 | goto ring_free; |
Sergei Shtylyov | 91d8068 | 2015-11-04 00:17:08 +0300 | [diff] [blame] | 1219 | return 0; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1220 | |
Sergei Shtylyov | 8e03a5e | 2015-11-04 00:55:13 +0300 | [diff] [blame] | 1221 | ring_free: |
| 1222 | /* Free Rx and Tx skb ring buffer and DMA buffer */ |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1223 | sh_eth_ring_free(ndev); |
| 1224 | |
Sergei Shtylyov | 91d8068 | 2015-11-04 00:17:08 +0300 | [diff] [blame] | 1225 | return -ENOMEM; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1226 | } |
| 1227 | |
Sergei Shtylyov | f796721 | 2016-04-24 19:11:07 +0300 | [diff] [blame] | 1228 | static int sh_eth_dev_init(struct net_device *ndev) |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1229 | { |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1230 | struct sh_eth_private *mdp = netdev_priv(ndev); |
Sergei Shtylyov | 4fa8c3c | 2016-03-13 01:29:45 +0300 | [diff] [blame] | 1231 | int ret; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1232 | |
| 1233 | /* Soft Reset */ |
Nobuhiro Iwamatsu | 5cee1d3 | 2012-06-25 17:35:12 +0000 | [diff] [blame] | 1234 | ret = sh_eth_reset(ndev); |
| 1235 | if (ret) |
Laurent Pinchart | f738a13 | 2014-03-20 15:00:35 +0100 | [diff] [blame] | 1236 | return ret; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1237 | |
Simon Horman | 55754f1 | 2013-07-23 10:18:04 +0900 | [diff] [blame] | 1238 | if (mdp->cd->rmiimode) |
| 1239 | sh_eth_write(ndev, 0x1, RMIIMODE); |
| 1240 | |
Nobuhiro Iwamatsu | b0ca2a2 | 2008-06-30 11:08:17 +0900 | [diff] [blame] | 1241 | /* Descriptor format */ |
| 1242 | sh_eth_ring_format(ndev); |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1243 | if (mdp->cd->rpadir) |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 1244 | sh_eth_write(ndev, mdp->cd->rpadir_value, RPADIR); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1245 | |
| 1246 | /* all sh_eth int mask */ |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 1247 | sh_eth_write(ndev, 0, EESIPR); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1248 | |
Yoshihiro Shimoda | 10b9194 | 2012-03-29 19:32:08 +0000 | [diff] [blame] | 1249 | #if defined(__LITTLE_ENDIAN) |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1250 | if (mdp->cd->hw_swap) |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 1251 | sh_eth_write(ndev, EDMR_EL, EDMR); |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1252 | else |
Nobuhiro Iwamatsu | b0ca2a2 | 2008-06-30 11:08:17 +0900 | [diff] [blame] | 1253 | #endif |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 1254 | sh_eth_write(ndev, 0, EDMR); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1255 | |
Nobuhiro Iwamatsu | b0ca2a2 | 2008-06-30 11:08:17 +0900 | [diff] [blame] | 1256 | /* FIFO size set */ |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 1257 | sh_eth_write(ndev, mdp->cd->fdr_value, FDR); |
| 1258 | sh_eth_write(ndev, 0, TFTR); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1259 | |
Ben Dooks | 530aa2d | 2014-06-03 12:21:13 +0100 | [diff] [blame] | 1260 | /* Frame recv control (enable multiple-packets per rx irq) */ |
| 1261 | sh_eth_write(ndev, RMCR_RNC, RMCR); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1262 | |
Nobuhiro Iwamatsu | b284fbe | 2015-01-08 15:25:07 +0900 | [diff] [blame] | 1263 | sh_eth_write(ndev, mdp->cd->trscer_err_mask, TRSCER); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1264 | |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1265 | if (mdp->cd->bculr) |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 1266 | sh_eth_write(ndev, 0x800, BCULR); /* Burst sycle set */ |
Nobuhiro Iwamatsu | b0ca2a2 | 2008-06-30 11:08:17 +0900 | [diff] [blame] | 1267 | |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 1268 | sh_eth_write(ndev, mdp->cd->fcftr_value, FCFTR); |
Nobuhiro Iwamatsu | b0ca2a2 | 2008-06-30 11:08:17 +0900 | [diff] [blame] | 1269 | |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1270 | if (!mdp->cd->no_trimd) |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 1271 | sh_eth_write(ndev, 0, TRIMD); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1272 | |
Nobuhiro Iwamatsu | b0ca2a2 | 2008-06-30 11:08:17 +0900 | [diff] [blame] | 1273 | /* Recv frame limit set register */ |
Yoshihiro Shimoda | fdb37a7 | 2012-02-06 23:55:15 +0000 | [diff] [blame] | 1274 | sh_eth_write(ndev, ndev->mtu + ETH_HLEN + VLAN_HLEN + ETH_FCS_LEN, |
| 1275 | RFLR); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1276 | |
Sergei Shtylyov | b2b14d2 | 2016-02-10 01:38:28 +0300 | [diff] [blame] | 1277 | sh_eth_modify(ndev, EESR, 0, 0); |
Sergei Shtylyov | f796721 | 2016-04-24 19:11:07 +0300 | [diff] [blame] | 1278 | mdp->irq_enabled = true; |
| 1279 | sh_eth_write(ndev, mdp->cd->eesipr_value, EESIPR); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1280 | |
| 1281 | /* PAUSE Prohibition */ |
Sergei Shtylyov | bffa731 | 2016-01-11 00:28:14 +0300 | [diff] [blame] | 1282 | sh_eth_write(ndev, ECMR_ZPF | (mdp->duplex ? ECMR_DM : 0) | |
| 1283 | ECMR_TE | ECMR_RE, ECMR); |
Nobuhiro Iwamatsu | b0ca2a2 | 2008-06-30 11:08:17 +0900 | [diff] [blame] | 1284 | |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1285 | if (mdp->cd->set_rate) |
| 1286 | mdp->cd->set_rate(ndev); |
| 1287 | |
Nobuhiro Iwamatsu | b0ca2a2 | 2008-06-30 11:08:17 +0900 | [diff] [blame] | 1288 | /* E-MAC Status Register clear */ |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 1289 | sh_eth_write(ndev, mdp->cd->ecsr_value, ECSR); |
Nobuhiro Iwamatsu | b0ca2a2 | 2008-06-30 11:08:17 +0900 | [diff] [blame] | 1290 | |
| 1291 | /* E-MAC Interrupt Enable register */ |
Sergei Shtylyov | f796721 | 2016-04-24 19:11:07 +0300 | [diff] [blame] | 1292 | sh_eth_write(ndev, mdp->cd->ecsipr_value, ECSIPR); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1293 | |
| 1294 | /* Set MAC address */ |
| 1295 | update_mac_address(ndev); |
| 1296 | |
| 1297 | /* mask reset */ |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1298 | if (mdp->cd->apr) |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 1299 | sh_eth_write(ndev, APR_AP, APR); |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1300 | if (mdp->cd->mpr) |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 1301 | sh_eth_write(ndev, MPR_MP, MPR); |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1302 | if (mdp->cd->tpauser) |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 1303 | sh_eth_write(ndev, TPAUSER_UNLIMITED, TPAUSER); |
Nobuhiro Iwamatsu | b0ca2a2 | 2008-06-30 11:08:17 +0900 | [diff] [blame] | 1304 | |
Sergei Shtylyov | f796721 | 2016-04-24 19:11:07 +0300 | [diff] [blame] | 1305 | /* Setting the Rx mode will start the Rx process. */ |
| 1306 | sh_eth_write(ndev, EDRRR_R, EDRRR); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1307 | |
| 1308 | return ret; |
| 1309 | } |
| 1310 | |
Ben Hutchings | 740c7f3 | 2015-01-27 00:49:32 +0000 | [diff] [blame] | 1311 | static void sh_eth_dev_exit(struct net_device *ndev) |
| 1312 | { |
| 1313 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 1314 | int i; |
| 1315 | |
| 1316 | /* Deactivate all TX descriptors, so DMA should stop at next |
| 1317 | * packet boundary if it's currently running |
| 1318 | */ |
| 1319 | for (i = 0; i < mdp->num_tx_ring; i++) |
Sergei Shtylyov | 7cf7247 | 2015-12-28 02:10:47 +0300 | [diff] [blame] | 1320 | mdp->tx_ring[i].status &= ~cpu_to_le32(TD_TACT); |
Ben Hutchings | 740c7f3 | 2015-01-27 00:49:32 +0000 | [diff] [blame] | 1321 | |
| 1322 | /* Disable TX FIFO egress to MAC */ |
| 1323 | sh_eth_rcv_snd_disable(ndev); |
| 1324 | |
| 1325 | /* Stop RX DMA at next packet boundary */ |
| 1326 | sh_eth_write(ndev, 0, EDRRR); |
| 1327 | |
| 1328 | /* Aside from TX DMA, we can't tell when the hardware is |
| 1329 | * really stopped, so we need to reset to make sure. |
| 1330 | * Before doing that, wait for long enough to *probably* |
| 1331 | * finish transmitting the last packet and poll stats. |
| 1332 | */ |
| 1333 | msleep(2); /* max frame time at 10 Mbps < 1250 us */ |
| 1334 | sh_eth_get_stats(ndev); |
| 1335 | sh_eth_reset(ndev); |
Geert Uytterhoeven | a14c7d1 | 2015-02-27 17:16:26 +0100 | [diff] [blame] | 1336 | |
| 1337 | /* Set MAC address again */ |
| 1338 | update_mac_address(ndev); |
Ben Hutchings | 740c7f3 | 2015-01-27 00:49:32 +0000 | [diff] [blame] | 1339 | } |
| 1340 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1341 | /* free Tx skb function */ |
| 1342 | static int sh_eth_txfree(struct net_device *ndev) |
| 1343 | { |
| 1344 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 1345 | struct sh_eth_txdesc *txdesc; |
Sergei Shtylyov | 128296f | 2014-01-03 15:52:22 +0300 | [diff] [blame] | 1346 | int free_num = 0; |
Sergei Shtylyov | 4fa8c3c | 2016-03-13 01:29:45 +0300 | [diff] [blame] | 1347 | int entry; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1348 | |
| 1349 | for (; mdp->cur_tx - mdp->dirty_tx > 0; mdp->dirty_tx++) { |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 1350 | entry = mdp->dirty_tx % mdp->num_tx_ring; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1351 | txdesc = &mdp->tx_ring[entry]; |
Sergei Shtylyov | 7cf7247 | 2015-12-28 02:10:47 +0300 | [diff] [blame] | 1352 | if (txdesc->status & cpu_to_le32(TD_TACT)) |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1353 | break; |
Ben Hutchings | 7d7355f | 2015-03-03 00:52:00 +0000 | [diff] [blame] | 1354 | /* TACT bit must be checked before all the following reads */ |
Sergei Shtylyov | f32bfb9 | 2015-11-03 22:36:04 +0300 | [diff] [blame] | 1355 | dma_rmb(); |
Ben Hutchings | e5fd13f | 2015-02-26 20:34:46 +0000 | [diff] [blame] | 1356 | netif_info(mdp, tx_done, ndev, |
| 1357 | "tx entry %d status 0x%08x\n", |
Sergei Shtylyov | 7cf7247 | 2015-12-28 02:10:47 +0300 | [diff] [blame] | 1358 | entry, le32_to_cpu(txdesc->status)); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1359 | /* Free the original skb. */ |
| 1360 | if (mdp->tx_skbuff[entry]) { |
Sergei Shtylyov | 7cf7247 | 2015-12-28 02:10:47 +0300 | [diff] [blame] | 1361 | dma_unmap_single(&ndev->dev, le32_to_cpu(txdesc->addr), |
| 1362 | le32_to_cpu(txdesc->len) >> 16, |
Sergei Shtylyov | 5cbf20c | 2015-12-20 01:48:04 +0300 | [diff] [blame] | 1363 | DMA_TO_DEVICE); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1364 | dev_kfree_skb_irq(mdp->tx_skbuff[entry]); |
| 1365 | mdp->tx_skbuff[entry] = NULL; |
Sergei Shtylyov | 128296f | 2014-01-03 15:52:22 +0300 | [diff] [blame] | 1366 | free_num++; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1367 | } |
Sergei Shtylyov | 7cf7247 | 2015-12-28 02:10:47 +0300 | [diff] [blame] | 1368 | txdesc->status = cpu_to_le32(TD_TFP); |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 1369 | if (entry >= mdp->num_tx_ring - 1) |
Sergei Shtylyov | 7cf7247 | 2015-12-28 02:10:47 +0300 | [diff] [blame] | 1370 | txdesc->status |= cpu_to_le32(TD_TDLE); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1371 | |
Eric Dumazet | bb7d92e | 2012-02-06 22:17:21 +0000 | [diff] [blame] | 1372 | ndev->stats.tx_packets++; |
Sergei Shtylyov | 7cf7247 | 2015-12-28 02:10:47 +0300 | [diff] [blame] | 1373 | ndev->stats.tx_bytes += le32_to_cpu(txdesc->len) >> 16; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1374 | } |
Sergei Shtylyov | 128296f | 2014-01-03 15:52:22 +0300 | [diff] [blame] | 1375 | return free_num; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1376 | } |
| 1377 | |
| 1378 | /* Packet receive function */ |
Sergei Shtylyov | 3719109 | 2013-06-19 23:30:23 +0400 | [diff] [blame] | 1379 | static int sh_eth_rx(struct net_device *ndev, u32 intr_status, int *quota) |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1380 | { |
| 1381 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 1382 | struct sh_eth_rxdesc *rxdesc; |
| 1383 | |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 1384 | int entry = mdp->cur_rx % mdp->num_rx_ring; |
| 1385 | int boguscnt = (mdp->dirty_rx + mdp->num_rx_ring) - mdp->cur_rx; |
Mitsuhiro Kimura | 319cd52 | 2014-12-09 21:23:42 +0900 | [diff] [blame] | 1386 | int limit; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1387 | struct sk_buff *skb; |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1388 | u32 desc_status; |
Sergei Shtylyov | cb36859 | 2015-10-24 00:46:40 +0300 | [diff] [blame] | 1389 | int skbuff_size = mdp->rx_buf_sz + SH_ETH_RX_ALIGN + 32 - 1; |
Ben Hutchings | 52b9fa3 | 2015-01-27 00:50:24 +0000 | [diff] [blame] | 1390 | dma_addr_t dma_addr; |
Sergei Shtylyov | 4fa8c3c | 2016-03-13 01:29:45 +0300 | [diff] [blame] | 1391 | u16 pkt_len; |
Sergei Shtylyov | 5cbf20c | 2015-12-20 01:48:04 +0300 | [diff] [blame] | 1392 | u32 buf_len; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1393 | |
Mitsuhiro Kimura | 319cd52 | 2014-12-09 21:23:42 +0900 | [diff] [blame] | 1394 | boguscnt = min(boguscnt, *quota); |
| 1395 | limit = boguscnt; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1396 | rxdesc = &mdp->rx_ring[entry]; |
Sergei Shtylyov | 7cf7247 | 2015-12-28 02:10:47 +0300 | [diff] [blame] | 1397 | while (!(rxdesc->status & cpu_to_le32(RD_RACT))) { |
Ben Hutchings | 7d7355f | 2015-03-03 00:52:00 +0000 | [diff] [blame] | 1398 | /* RACT bit must be checked before all the following reads */ |
Sergei Shtylyov | f32bfb9 | 2015-11-03 22:36:04 +0300 | [diff] [blame] | 1399 | dma_rmb(); |
Sergei Shtylyov | 7cf7247 | 2015-12-28 02:10:47 +0300 | [diff] [blame] | 1400 | desc_status = le32_to_cpu(rxdesc->status); |
| 1401 | pkt_len = le32_to_cpu(rxdesc->len) & RD_RFL; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1402 | |
| 1403 | if (--boguscnt < 0) |
| 1404 | break; |
| 1405 | |
Ben Hutchings | e5fd13f | 2015-02-26 20:34:46 +0000 | [diff] [blame] | 1406 | netif_info(mdp, rx_status, ndev, |
| 1407 | "rx entry %d status 0x%08x len %d\n", |
| 1408 | entry, desc_status, pkt_len); |
| 1409 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1410 | if (!(desc_status & RDFEND)) |
Eric Dumazet | bb7d92e | 2012-02-06 22:17:21 +0000 | [diff] [blame] | 1411 | ndev->stats.rx_length_errors++; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1412 | |
Sergei Shtylyov | 128296f | 2014-01-03 15:52:22 +0300 | [diff] [blame] | 1413 | /* In case of almost all GETHER/ETHERs, the Receive Frame State |
Yoshihiro Shimoda | dd01989 | 2013-06-13 10:15:45 +0900 | [diff] [blame] | 1414 | * (RFS) bits in the Receive Descriptor 0 are from bit 9 to |
Ben Hutchings | 9b4a636 | 2015-03-03 00:52:39 +0000 | [diff] [blame] | 1415 | * bit 0. However, in case of the R8A7740 and R7S72100 |
| 1416 | * the RFS bits are from bit 25 to bit 16. So, the |
Simon Horman | db89347 | 2014-01-17 09:22:28 +0900 | [diff] [blame] | 1417 | * driver needs right shifting by 16. |
Yoshihiro Shimoda | dd01989 | 2013-06-13 10:15:45 +0900 | [diff] [blame] | 1418 | */ |
Sergei Shtylyov | 62e04b7 | 2017-01-07 00:03:37 +0300 | [diff] [blame] | 1419 | if (mdp->cd->hw_checksum) |
Sergei Shtylyov | ac8025a | 2013-06-13 22:12:45 +0400 | [diff] [blame] | 1420 | desc_status >>= 16; |
Yoshihiro Shimoda | dd01989 | 2013-06-13 10:15:45 +0900 | [diff] [blame] | 1421 | |
Sergei Shtylyov | 248be83 | 2015-12-04 01:45:40 +0300 | [diff] [blame] | 1422 | skb = mdp->rx_skbuff[entry]; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1423 | if (desc_status & (RD_RFS1 | RD_RFS2 | RD_RFS3 | RD_RFS4 | |
| 1424 | RD_RFS5 | RD_RFS6 | RD_RFS10)) { |
Eric Dumazet | bb7d92e | 2012-02-06 22:17:21 +0000 | [diff] [blame] | 1425 | ndev->stats.rx_errors++; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1426 | if (desc_status & RD_RFS1) |
Eric Dumazet | bb7d92e | 2012-02-06 22:17:21 +0000 | [diff] [blame] | 1427 | ndev->stats.rx_crc_errors++; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1428 | if (desc_status & RD_RFS2) |
Eric Dumazet | bb7d92e | 2012-02-06 22:17:21 +0000 | [diff] [blame] | 1429 | ndev->stats.rx_frame_errors++; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1430 | if (desc_status & RD_RFS3) |
Eric Dumazet | bb7d92e | 2012-02-06 22:17:21 +0000 | [diff] [blame] | 1431 | ndev->stats.rx_length_errors++; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1432 | if (desc_status & RD_RFS4) |
Eric Dumazet | bb7d92e | 2012-02-06 22:17:21 +0000 | [diff] [blame] | 1433 | ndev->stats.rx_length_errors++; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1434 | if (desc_status & RD_RFS6) |
Eric Dumazet | bb7d92e | 2012-02-06 22:17:21 +0000 | [diff] [blame] | 1435 | ndev->stats.rx_missed_errors++; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1436 | if (desc_status & RD_RFS10) |
Eric Dumazet | bb7d92e | 2012-02-06 22:17:21 +0000 | [diff] [blame] | 1437 | ndev->stats.rx_over_errors++; |
Sergei Shtylyov | 248be83 | 2015-12-04 01:45:40 +0300 | [diff] [blame] | 1438 | } else if (skb) { |
Sergei Shtylyov | 7cf7247 | 2015-12-28 02:10:47 +0300 | [diff] [blame] | 1439 | dma_addr = le32_to_cpu(rxdesc->addr); |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1440 | if (!mdp->cd->hw_swap) |
| 1441 | sh_eth_soft_swap( |
Sergei Shtylyov | 1299653 | 2015-12-13 23:05:07 +0300 | [diff] [blame] | 1442 | phys_to_virt(ALIGN(dma_addr, 4)), |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1443 | pkt_len + 2); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1444 | mdp->rx_skbuff[entry] = NULL; |
Magnus Damm | 503914c | 2009-12-15 21:16:55 -0800 | [diff] [blame] | 1445 | if (mdp->cd->rpadir) |
| 1446 | skb_reserve(skb, NET_IP_ALIGN); |
Sergei Shtylyov | 1299653 | 2015-12-13 23:05:07 +0300 | [diff] [blame] | 1447 | dma_unmap_single(&ndev->dev, dma_addr, |
Sergei Shtylyov | ab85791 | 2015-10-24 00:46:03 +0300 | [diff] [blame] | 1448 | ALIGN(mdp->rx_buf_sz, 32), |
Ben Hutchings | 52b9fa3 | 2015-01-27 00:50:24 +0000 | [diff] [blame] | 1449 | DMA_FROM_DEVICE); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1450 | skb_put(skb, pkt_len); |
| 1451 | skb->protocol = eth_type_trans(skb, ndev); |
Sergei Shtylyov | a8e9fd0 | 2013-09-03 03:03:10 +0400 | [diff] [blame] | 1452 | netif_receive_skb(skb); |
Eric Dumazet | bb7d92e | 2012-02-06 22:17:21 +0000 | [diff] [blame] | 1453 | ndev->stats.rx_packets++; |
| 1454 | ndev->stats.rx_bytes += pkt_len; |
Ben Hutchings | 25b77ad | 2015-02-26 20:33:30 +0000 | [diff] [blame] | 1455 | if (desc_status & RD_RFS8) |
| 1456 | ndev->stats.multicast++; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1457 | } |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 1458 | entry = (++mdp->cur_rx) % mdp->num_rx_ring; |
Yoshihiro Shimoda | 862df49 | 2009-05-24 23:53:40 +0000 | [diff] [blame] | 1459 | rxdesc = &mdp->rx_ring[entry]; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1460 | } |
| 1461 | |
| 1462 | /* Refill the Rx ring buffers. */ |
| 1463 | for (; mdp->cur_rx - mdp->dirty_rx > 0; mdp->dirty_rx++) { |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 1464 | entry = mdp->dirty_rx % mdp->num_rx_ring; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1465 | rxdesc = &mdp->rx_ring[entry]; |
Sergei Shtylyov | ab85791 | 2015-10-24 00:46:03 +0300 | [diff] [blame] | 1466 | /* The size of the buffer is 32 byte boundary. */ |
Sergei Shtylyov | 5cbf20c | 2015-12-20 01:48:04 +0300 | [diff] [blame] | 1467 | buf_len = ALIGN(mdp->rx_buf_sz, 32); |
Sergei Shtylyov | 7cf7247 | 2015-12-28 02:10:47 +0300 | [diff] [blame] | 1468 | rxdesc->len = cpu_to_le32(buf_len << 16); |
Nobuhiro Iwamatsu | b0ca2a2 | 2008-06-30 11:08:17 +0900 | [diff] [blame] | 1469 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1470 | if (mdp->rx_skbuff[entry] == NULL) { |
Mitsuhiro Kimura | 4d6a949 | 2014-11-27 20:34:00 +0900 | [diff] [blame] | 1471 | skb = netdev_alloc_skb(ndev, skbuff_size); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1472 | if (skb == NULL) |
| 1473 | break; /* Better luck next round. */ |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1474 | sh_eth_set_receive_align(skb); |
Ben Hutchings | 52b9fa3 | 2015-01-27 00:50:24 +0000 | [diff] [blame] | 1475 | dma_addr = dma_map_single(&ndev->dev, skb->data, |
Sergei Shtylyov | 5cbf20c | 2015-12-20 01:48:04 +0300 | [diff] [blame] | 1476 | buf_len, DMA_FROM_DEVICE); |
Ben Hutchings | 52b9fa3 | 2015-01-27 00:50:24 +0000 | [diff] [blame] | 1477 | if (dma_mapping_error(&ndev->dev, dma_addr)) { |
| 1478 | kfree_skb(skb); |
| 1479 | break; |
| 1480 | } |
| 1481 | mdp->rx_skbuff[entry] = skb; |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1482 | |
Eric Dumazet | bc8acf2 | 2010-09-02 13:07:41 -0700 | [diff] [blame] | 1483 | skb_checksum_none_assert(skb); |
Sergei Shtylyov | 7cf7247 | 2015-12-28 02:10:47 +0300 | [diff] [blame] | 1484 | rxdesc->addr = cpu_to_le32(dma_addr); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1485 | } |
Sergei Shtylyov | f32bfb9 | 2015-11-03 22:36:04 +0300 | [diff] [blame] | 1486 | dma_wmb(); /* RACT bit must be set after all the above writes */ |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 1487 | if (entry >= mdp->num_rx_ring - 1) |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1488 | rxdesc->status |= |
Sergei Shtylyov | 7cf7247 | 2015-12-28 02:10:47 +0300 | [diff] [blame] | 1489 | cpu_to_le32(RD_RACT | RD_RFP | RD_RDLE); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1490 | else |
Sergei Shtylyov | 7cf7247 | 2015-12-28 02:10:47 +0300 | [diff] [blame] | 1491 | rxdesc->status |= cpu_to_le32(RD_RACT | RD_RFP); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1492 | } |
| 1493 | |
| 1494 | /* Restart Rx engine if stopped. */ |
| 1495 | /* If we don't need to check status, don't. -KDU */ |
Yoshihiro Shimoda | 79fba9f | 2012-05-28 23:07:55 +0000 | [diff] [blame] | 1496 | if (!(sh_eth_read(ndev, EDRRR) & EDRRR_R)) { |
Yoshihiro Shimoda | a18e08b | 2012-06-20 15:26:34 +0000 | [diff] [blame] | 1497 | /* fix the values for the next receiving if RDE is set */ |
Ben Hutchings | 3365711 | 2015-02-26 20:34:14 +0000 | [diff] [blame] | 1498 | if (intr_status & EESR_RDE && |
| 1499 | mdp->reg_offset[RDFAR] != SH_ETH_OFFSET_INVALID) { |
Sergei Shtylyov | 128296f | 2014-01-03 15:52:22 +0300 | [diff] [blame] | 1500 | u32 count = (sh_eth_read(ndev, RDFAR) - |
| 1501 | sh_eth_read(ndev, RDLAR)) >> 4; |
| 1502 | |
| 1503 | mdp->cur_rx = count; |
| 1504 | mdp->dirty_rx = count; |
| 1505 | } |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 1506 | sh_eth_write(ndev, EDRRR_R, EDRRR); |
Yoshihiro Shimoda | 79fba9f | 2012-05-28 23:07:55 +0000 | [diff] [blame] | 1507 | } |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1508 | |
Mitsuhiro Kimura | 319cd52 | 2014-12-09 21:23:42 +0900 | [diff] [blame] | 1509 | *quota -= limit - boguscnt - 1; |
| 1510 | |
Yoshihiro Shimoda | 4f809ce | 2014-06-10 09:40:14 +0900 | [diff] [blame] | 1511 | return *quota <= 0; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1512 | } |
| 1513 | |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 1514 | static void sh_eth_rcv_snd_disable(struct net_device *ndev) |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 1515 | { |
| 1516 | /* disable tx and rx */ |
Sergei Shtylyov | b2b14d2 | 2016-02-10 01:38:28 +0300 | [diff] [blame] | 1517 | sh_eth_modify(ndev, ECMR, ECMR_RE | ECMR_TE, 0); |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 1518 | } |
| 1519 | |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 1520 | static void sh_eth_rcv_snd_enable(struct net_device *ndev) |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 1521 | { |
| 1522 | /* enable tx and rx */ |
Sergei Shtylyov | b2b14d2 | 2016-02-10 01:38:28 +0300 | [diff] [blame] | 1523 | sh_eth_modify(ndev, ECMR, ECMR_RE | ECMR_TE, ECMR_RE | ECMR_TE); |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 1524 | } |
| 1525 | |
Sergei Shtylyov | 9b39f05 | 2017-01-04 15:11:21 +0300 | [diff] [blame] | 1526 | /* E-MAC interrupt handler */ |
| 1527 | static void sh_eth_emac_interrupt(struct net_device *ndev) |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1528 | { |
| 1529 | struct sh_eth_private *mdp = netdev_priv(ndev); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1530 | u32 felic_stat; |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1531 | u32 link_stat; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1532 | |
Sergei Shtylyov | 9b39f05 | 2017-01-04 15:11:21 +0300 | [diff] [blame] | 1533 | felic_stat = sh_eth_read(ndev, ECSR) & sh_eth_read(ndev, ECSIPR); |
| 1534 | sh_eth_write(ndev, felic_stat, ECSR); /* clear int */ |
| 1535 | if (felic_stat & ECSR_ICD) |
| 1536 | ndev->stats.tx_carrier_errors++; |
| 1537 | if (felic_stat & ECSR_LCHNG) { |
| 1538 | /* Link Changed */ |
| 1539 | if (mdp->cd->no_psr || mdp->no_ether_link) |
| 1540 | return; |
| 1541 | link_stat = sh_eth_read(ndev, PSR); |
| 1542 | if (mdp->ether_link_active_low) |
| 1543 | link_stat = ~link_stat; |
| 1544 | if (!(link_stat & PHY_ST_LINK)) { |
| 1545 | sh_eth_rcv_snd_disable(ndev); |
| 1546 | } else { |
| 1547 | /* Link Up */ |
| 1548 | sh_eth_modify(ndev, EESIPR, DMAC_M_ECI, 0); |
| 1549 | /* clear int */ |
| 1550 | sh_eth_modify(ndev, ECSR, 0, 0); |
| 1551 | sh_eth_modify(ndev, EESIPR, DMAC_M_ECI, DMAC_M_ECI); |
| 1552 | /* enable tx and rx */ |
| 1553 | sh_eth_rcv_snd_enable(ndev); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1554 | } |
| 1555 | } |
Niklas Söderlund | d8981d0 | 2017-01-09 16:34:05 +0100 | [diff] [blame] | 1556 | if (felic_stat & ECSR_MPD) |
| 1557 | pm_wakeup_event(&mdp->pdev->dev, 0); |
Sergei Shtylyov | 9b39f05 | 2017-01-04 15:11:21 +0300 | [diff] [blame] | 1558 | } |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1559 | |
Sergei Shtylyov | 9b39f05 | 2017-01-04 15:11:21 +0300 | [diff] [blame] | 1560 | /* error control function */ |
| 1561 | static void sh_eth_error(struct net_device *ndev, u32 intr_status) |
| 1562 | { |
| 1563 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 1564 | u32 mask; |
| 1565 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1566 | if (intr_status & EESR_TWB) { |
Sergei Shtylyov | 4eb313a | 2013-06-21 01:13:42 +0400 | [diff] [blame] | 1567 | /* Unused write back interrupt */ |
| 1568 | if (intr_status & EESR_TABT) { /* Transmit Abort int */ |
Eric Dumazet | bb7d92e | 2012-02-06 22:17:21 +0000 | [diff] [blame] | 1569 | ndev->stats.tx_aborted_errors++; |
Sergei Shtylyov | 8d5009f | 2014-03-15 03:30:59 +0300 | [diff] [blame] | 1570 | netif_err(mdp, tx_err, ndev, "Transmit Abort\n"); |
Sergei Shtylyov | 4eb313a | 2013-06-21 01:13:42 +0400 | [diff] [blame] | 1571 | } |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1572 | } |
| 1573 | |
| 1574 | if (intr_status & EESR_RABT) { |
| 1575 | /* Receive Abort int */ |
| 1576 | if (intr_status & EESR_RFRMER) { |
| 1577 | /* Receive Frame Overflow int */ |
Eric Dumazet | bb7d92e | 2012-02-06 22:17:21 +0000 | [diff] [blame] | 1578 | ndev->stats.rx_frame_errors++; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1579 | } |
| 1580 | } |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1581 | |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 1582 | if (intr_status & EESR_TDE) { |
| 1583 | /* Transmit Descriptor Empty int */ |
Eric Dumazet | bb7d92e | 2012-02-06 22:17:21 +0000 | [diff] [blame] | 1584 | ndev->stats.tx_fifo_errors++; |
Sergei Shtylyov | 8d5009f | 2014-03-15 03:30:59 +0300 | [diff] [blame] | 1585 | netif_err(mdp, tx_err, ndev, "Transmit Descriptor Empty\n"); |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 1586 | } |
| 1587 | |
| 1588 | if (intr_status & EESR_TFE) { |
| 1589 | /* FIFO under flow */ |
Eric Dumazet | bb7d92e | 2012-02-06 22:17:21 +0000 | [diff] [blame] | 1590 | ndev->stats.tx_fifo_errors++; |
Sergei Shtylyov | 8d5009f | 2014-03-15 03:30:59 +0300 | [diff] [blame] | 1591 | netif_err(mdp, tx_err, ndev, "Transmit FIFO Under flow\n"); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1592 | } |
| 1593 | |
| 1594 | if (intr_status & EESR_RDE) { |
| 1595 | /* Receive Descriptor Empty int */ |
Eric Dumazet | bb7d92e | 2012-02-06 22:17:21 +0000 | [diff] [blame] | 1596 | ndev->stats.rx_over_errors++; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1597 | } |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 1598 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1599 | if (intr_status & EESR_RFE) { |
| 1600 | /* Receive FIFO Overflow int */ |
Eric Dumazet | bb7d92e | 2012-02-06 22:17:21 +0000 | [diff] [blame] | 1601 | ndev->stats.rx_fifo_errors++; |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 1602 | } |
| 1603 | |
| 1604 | if (!mdp->cd->no_ade && (intr_status & EESR_ADE)) { |
| 1605 | /* Address Error */ |
Eric Dumazet | bb7d92e | 2012-02-06 22:17:21 +0000 | [diff] [blame] | 1606 | ndev->stats.tx_fifo_errors++; |
Sergei Shtylyov | 8d5009f | 2014-03-15 03:30:59 +0300 | [diff] [blame] | 1607 | netif_err(mdp, tx_err, ndev, "Address Error\n"); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1608 | } |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1609 | |
| 1610 | mask = EESR_TWB | EESR_TABT | EESR_ADE | EESR_TDE | EESR_TFE; |
| 1611 | if (mdp->cd->no_ade) |
| 1612 | mask &= ~EESR_ADE; |
| 1613 | if (intr_status & mask) { |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1614 | /* Tx error */ |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 1615 | u32 edtrr = sh_eth_read(ndev, EDTRR); |
Sergei Shtylyov | 090d560 | 2014-01-11 02:41:49 +0300 | [diff] [blame] | 1616 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1617 | /* dmesg */ |
Sergei Shtylyov | da24685 | 2014-03-15 03:29:14 +0300 | [diff] [blame] | 1618 | netdev_err(ndev, "TX error. status=%8.8x cur_tx=%8.8x dirty_tx=%8.8x state=%8.8x EDTRR=%8.8x.\n", |
| 1619 | intr_status, mdp->cur_tx, mdp->dirty_tx, |
| 1620 | (u32)ndev->state, edtrr); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1621 | /* dirty buffer free */ |
| 1622 | sh_eth_txfree(ndev); |
| 1623 | |
| 1624 | /* SH7712 BUG */ |
Yoshihiro Shimoda | c5ed536 | 2011-03-07 21:59:38 +0000 | [diff] [blame] | 1625 | if (edtrr ^ sh_eth_get_edtrr_trns(mdp)) { |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1626 | /* tx dma start */ |
Yoshihiro Shimoda | c5ed536 | 2011-03-07 21:59:38 +0000 | [diff] [blame] | 1627 | sh_eth_write(ndev, sh_eth_get_edtrr_trns(mdp), EDTRR); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1628 | } |
| 1629 | /* wakeup */ |
| 1630 | netif_wake_queue(ndev); |
| 1631 | } |
| 1632 | } |
| 1633 | |
| 1634 | static irqreturn_t sh_eth_interrupt(int irq, void *netdev) |
| 1635 | { |
| 1636 | struct net_device *ndev = netdev; |
| 1637 | struct sh_eth_private *mdp = netdev_priv(ndev); |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1638 | struct sh_eth_cpu_data *cd = mdp->cd; |
Nobuhiro Iwamatsu | 0e0fde3 | 2009-03-16 19:50:57 +0000 | [diff] [blame] | 1639 | irqreturn_t ret = IRQ_NONE; |
Geert Uytterhoeven | 0799c2d | 2015-01-15 11:54:28 +0100 | [diff] [blame] | 1640 | u32 intr_status, intr_enable; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1641 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1642 | spin_lock(&mdp->lock); |
| 1643 | |
Sergei Shtylyov | 3893b27345 | 2013-03-31 09:54:20 +0000 | [diff] [blame] | 1644 | /* Get interrupt status */ |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 1645 | intr_status = sh_eth_read(ndev, EESR); |
Sergei Shtylyov | 9b39f05 | 2017-01-04 15:11:21 +0300 | [diff] [blame] | 1646 | /* Mask it with the interrupt mask, forcing ECI interrupt to be always |
| 1647 | * enabled since it's the one that comes thru regardless of the mask, |
| 1648 | * and we need to fully handle it in sh_eth_emac_interrupt() in order |
| 1649 | * to quench it as it doesn't get cleared by just writing 1 to the ECI |
| 1650 | * bit... |
Sergei Shtylyov | 3893b27345 | 2013-03-31 09:54:20 +0000 | [diff] [blame] | 1651 | */ |
Sergei Shtylyov | 3719109 | 2013-06-19 23:30:23 +0400 | [diff] [blame] | 1652 | intr_enable = sh_eth_read(ndev, EESIPR); |
| 1653 | intr_status &= intr_enable | DMAC_M_ECI; |
Sergei Shtylyov | 9b39f05 | 2017-01-04 15:11:21 +0300 | [diff] [blame] | 1654 | if (intr_status & (EESR_RX_CHECK | cd->tx_check | EESR_ECI | |
| 1655 | cd->eesr_err_check)) |
Nobuhiro Iwamatsu | 0e0fde3 | 2009-03-16 19:50:57 +0000 | [diff] [blame] | 1656 | ret = IRQ_HANDLED; |
Sergei Shtylyov | 3719109 | 2013-06-19 23:30:23 +0400 | [diff] [blame] | 1657 | else |
Ben Hutchings | 283e38d | 2015-01-22 12:44:08 +0000 | [diff] [blame] | 1658 | goto out; |
| 1659 | |
Sergei Shtylyov | 2344ef3 | 2016-12-30 00:07:38 +0300 | [diff] [blame] | 1660 | if (unlikely(!mdp->irq_enabled)) { |
Ben Hutchings | 283e38d | 2015-01-22 12:44:08 +0000 | [diff] [blame] | 1661 | sh_eth_write(ndev, 0, EESIPR); |
| 1662 | goto out; |
| 1663 | } |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1664 | |
Sergei Shtylyov | 3719109 | 2013-06-19 23:30:23 +0400 | [diff] [blame] | 1665 | if (intr_status & EESR_RX_CHECK) { |
| 1666 | if (napi_schedule_prep(&mdp->napi)) { |
| 1667 | /* Mask Rx interrupts */ |
| 1668 | sh_eth_write(ndev, intr_enable & ~EESR_RX_CHECK, |
| 1669 | EESIPR); |
| 1670 | __napi_schedule(&mdp->napi); |
| 1671 | } else { |
Sergei Shtylyov | da24685 | 2014-03-15 03:29:14 +0300 | [diff] [blame] | 1672 | netdev_warn(ndev, |
Geert Uytterhoeven | 0799c2d | 2015-01-15 11:54:28 +0100 | [diff] [blame] | 1673 | "ignoring interrupt, status 0x%08x, mask 0x%08x.\n", |
Sergei Shtylyov | da24685 | 2014-03-15 03:29:14 +0300 | [diff] [blame] | 1674 | intr_status, intr_enable); |
Sergei Shtylyov | 3719109 | 2013-06-19 23:30:23 +0400 | [diff] [blame] | 1675 | } |
| 1676 | } |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1677 | |
Nobuhiro Iwamatsu | b0ca2a2 | 2008-06-30 11:08:17 +0900 | [diff] [blame] | 1678 | /* Tx Check */ |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1679 | if (intr_status & cd->tx_check) { |
Sergei Shtylyov | 3719109 | 2013-06-19 23:30:23 +0400 | [diff] [blame] | 1680 | /* Clear Tx interrupts */ |
| 1681 | sh_eth_write(ndev, intr_status & cd->tx_check, EESR); |
| 1682 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1683 | sh_eth_txfree(ndev); |
| 1684 | netif_wake_queue(ndev); |
| 1685 | } |
| 1686 | |
Sergei Shtylyov | 9b39f05 | 2017-01-04 15:11:21 +0300 | [diff] [blame] | 1687 | /* E-MAC interrupt */ |
| 1688 | if (intr_status & EESR_ECI) |
| 1689 | sh_eth_emac_interrupt(ndev); |
| 1690 | |
Sergei Shtylyov | 3719109 | 2013-06-19 23:30:23 +0400 | [diff] [blame] | 1691 | if (intr_status & cd->eesr_err_check) { |
| 1692 | /* Clear error interrupts */ |
| 1693 | sh_eth_write(ndev, intr_status & cd->eesr_err_check, EESR); |
| 1694 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1695 | sh_eth_error(ndev, intr_status); |
Sergei Shtylyov | 3719109 | 2013-06-19 23:30:23 +0400 | [diff] [blame] | 1696 | } |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1697 | |
Ben Hutchings | 283e38d | 2015-01-22 12:44:08 +0000 | [diff] [blame] | 1698 | out: |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1699 | spin_unlock(&mdp->lock); |
| 1700 | |
Nobuhiro Iwamatsu | 0e0fde3 | 2009-03-16 19:50:57 +0000 | [diff] [blame] | 1701 | return ret; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1702 | } |
| 1703 | |
Sergei Shtylyov | 3719109 | 2013-06-19 23:30:23 +0400 | [diff] [blame] | 1704 | static int sh_eth_poll(struct napi_struct *napi, int budget) |
| 1705 | { |
| 1706 | struct sh_eth_private *mdp = container_of(napi, struct sh_eth_private, |
| 1707 | napi); |
| 1708 | struct net_device *ndev = napi->dev; |
| 1709 | int quota = budget; |
Geert Uytterhoeven | 0799c2d | 2015-01-15 11:54:28 +0100 | [diff] [blame] | 1710 | u32 intr_status; |
Sergei Shtylyov | 3719109 | 2013-06-19 23:30:23 +0400 | [diff] [blame] | 1711 | |
| 1712 | for (;;) { |
| 1713 | intr_status = sh_eth_read(ndev, EESR); |
| 1714 | if (!(intr_status & EESR_RX_CHECK)) |
| 1715 | break; |
| 1716 | /* Clear Rx interrupts */ |
| 1717 | sh_eth_write(ndev, intr_status & EESR_RX_CHECK, EESR); |
| 1718 | |
| 1719 | if (sh_eth_rx(ndev, intr_status, "a)) |
| 1720 | goto out; |
| 1721 | } |
| 1722 | |
| 1723 | napi_complete(napi); |
| 1724 | |
| 1725 | /* Reenable Rx interrupts */ |
Ben Hutchings | 283e38d | 2015-01-22 12:44:08 +0000 | [diff] [blame] | 1726 | if (mdp->irq_enabled) |
| 1727 | sh_eth_write(ndev, mdp->cd->eesipr_value, EESIPR); |
Sergei Shtylyov | 3719109 | 2013-06-19 23:30:23 +0400 | [diff] [blame] | 1728 | out: |
| 1729 | return budget - quota; |
| 1730 | } |
| 1731 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1732 | /* PHY state control function */ |
| 1733 | static void sh_eth_adjust_link(struct net_device *ndev) |
| 1734 | { |
| 1735 | struct sh_eth_private *mdp = netdev_priv(ndev); |
Philippe Reynes | 9fd0375 | 2016-08-10 00:04:48 +0200 | [diff] [blame] | 1736 | struct phy_device *phydev = ndev->phydev; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1737 | int new_state = 0; |
| 1738 | |
Sergei Shtylyov | 3340d2a | 2013-03-31 10:11:04 +0000 | [diff] [blame] | 1739 | if (phydev->link) { |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1740 | if (phydev->duplex != mdp->duplex) { |
| 1741 | new_state = 1; |
| 1742 | mdp->duplex = phydev->duplex; |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1743 | if (mdp->cd->set_duplex) |
| 1744 | mdp->cd->set_duplex(ndev); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1745 | } |
| 1746 | |
| 1747 | if (phydev->speed != mdp->speed) { |
| 1748 | new_state = 1; |
| 1749 | mdp->speed = phydev->speed; |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1750 | if (mdp->cd->set_rate) |
| 1751 | mdp->cd->set_rate(ndev); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1752 | } |
Sergei Shtylyov | 3340d2a | 2013-03-31 10:11:04 +0000 | [diff] [blame] | 1753 | if (!mdp->link) { |
Sergei Shtylyov | b2b14d2 | 2016-02-10 01:38:28 +0300 | [diff] [blame] | 1754 | sh_eth_modify(ndev, ECMR, ECMR_TXF, 0); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1755 | new_state = 1; |
| 1756 | mdp->link = phydev->link; |
Sergei Shtylyov | 1e1b812 | 2013-03-31 09:50:07 +0000 | [diff] [blame] | 1757 | if (mdp->cd->no_psr || mdp->no_ether_link) |
| 1758 | sh_eth_rcv_snd_enable(ndev); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1759 | } |
| 1760 | } else if (mdp->link) { |
| 1761 | new_state = 1; |
Sergei Shtylyov | 3340d2a | 2013-03-31 10:11:04 +0000 | [diff] [blame] | 1762 | mdp->link = 0; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1763 | mdp->speed = 0; |
| 1764 | mdp->duplex = -1; |
Sergei Shtylyov | 1e1b812 | 2013-03-31 09:50:07 +0000 | [diff] [blame] | 1765 | if (mdp->cd->no_psr || mdp->no_ether_link) |
| 1766 | sh_eth_rcv_snd_disable(ndev); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1767 | } |
| 1768 | |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 1769 | if (new_state && netif_msg_link(mdp)) |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1770 | phy_print_status(phydev); |
| 1771 | } |
| 1772 | |
| 1773 | /* PHY init function */ |
| 1774 | static int sh_eth_phy_init(struct net_device *ndev) |
| 1775 | { |
Ben Dooks | 702eca0 | 2014-03-12 17:47:40 +0000 | [diff] [blame] | 1776 | struct device_node *np = ndev->dev.parent->of_node; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1777 | struct sh_eth_private *mdp = netdev_priv(ndev); |
Sergei Shtylyov | 4fa8c3c | 2016-03-13 01:29:45 +0300 | [diff] [blame] | 1778 | struct phy_device *phydev; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1779 | |
Sergei Shtylyov | 3340d2a | 2013-03-31 10:11:04 +0000 | [diff] [blame] | 1780 | mdp->link = 0; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1781 | mdp->speed = 0; |
| 1782 | mdp->duplex = -1; |
| 1783 | |
| 1784 | /* Try connect to PHY */ |
Ben Dooks | 702eca0 | 2014-03-12 17:47:40 +0000 | [diff] [blame] | 1785 | if (np) { |
| 1786 | struct device_node *pn; |
| 1787 | |
| 1788 | pn = of_parse_phandle(np, "phy-handle", 0); |
| 1789 | phydev = of_phy_connect(ndev, pn, |
| 1790 | sh_eth_adjust_link, 0, |
| 1791 | mdp->phy_interface); |
| 1792 | |
Peter Chen | 8da703d | 2016-08-01 15:02:40 +0800 | [diff] [blame] | 1793 | of_node_put(pn); |
Ben Dooks | 702eca0 | 2014-03-12 17:47:40 +0000 | [diff] [blame] | 1794 | if (!phydev) |
| 1795 | phydev = ERR_PTR(-ENOENT); |
| 1796 | } else { |
| 1797 | char phy_id[MII_BUS_ID_SIZE + 3]; |
| 1798 | |
| 1799 | snprintf(phy_id, sizeof(phy_id), PHY_ID_FMT, |
| 1800 | mdp->mii_bus->id, mdp->phy_id); |
| 1801 | |
| 1802 | phydev = phy_connect(ndev, phy_id, sh_eth_adjust_link, |
| 1803 | mdp->phy_interface); |
| 1804 | } |
| 1805 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1806 | if (IS_ERR(phydev)) { |
Sergei Shtylyov | da24685 | 2014-03-15 03:29:14 +0300 | [diff] [blame] | 1807 | netdev_err(ndev, "failed to connect PHY\n"); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1808 | return PTR_ERR(phydev); |
| 1809 | } |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 1810 | |
Andrew Lunn | 2220943 | 2016-01-06 20:11:13 +0100 | [diff] [blame] | 1811 | phy_attached_info(phydev); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1812 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1813 | return 0; |
| 1814 | } |
| 1815 | |
| 1816 | /* PHY control start function */ |
| 1817 | static int sh_eth_phy_start(struct net_device *ndev) |
| 1818 | { |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1819 | int ret; |
| 1820 | |
| 1821 | ret = sh_eth_phy_init(ndev); |
| 1822 | if (ret) |
| 1823 | return ret; |
| 1824 | |
Philippe Reynes | 9fd0375 | 2016-08-10 00:04:48 +0200 | [diff] [blame] | 1825 | phy_start(ndev->phydev); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 1826 | |
| 1827 | return 0; |
| 1828 | } |
| 1829 | |
Philippe Reynes | f08aff4 | 2016-08-10 00:04:49 +0200 | [diff] [blame] | 1830 | static int sh_eth_get_link_ksettings(struct net_device *ndev, |
| 1831 | struct ethtool_link_ksettings *cmd) |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 1832 | { |
| 1833 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 1834 | unsigned long flags; |
| 1835 | int ret; |
| 1836 | |
Philippe Reynes | 9fd0375 | 2016-08-10 00:04:48 +0200 | [diff] [blame] | 1837 | if (!ndev->phydev) |
Ben Hutchings | 4f9dce23 | 2015-01-16 17:51:25 +0000 | [diff] [blame] | 1838 | return -ENODEV; |
| 1839 | |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 1840 | spin_lock_irqsave(&mdp->lock, flags); |
Philippe Reynes | f08aff4 | 2016-08-10 00:04:49 +0200 | [diff] [blame] | 1841 | ret = phy_ethtool_ksettings_get(ndev->phydev, cmd); |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 1842 | spin_unlock_irqrestore(&mdp->lock, flags); |
| 1843 | |
| 1844 | return ret; |
| 1845 | } |
| 1846 | |
Philippe Reynes | f08aff4 | 2016-08-10 00:04:49 +0200 | [diff] [blame] | 1847 | static int sh_eth_set_link_ksettings(struct net_device *ndev, |
| 1848 | const struct ethtool_link_ksettings *cmd) |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 1849 | { |
| 1850 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 1851 | unsigned long flags; |
| 1852 | int ret; |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 1853 | |
Philippe Reynes | 9fd0375 | 2016-08-10 00:04:48 +0200 | [diff] [blame] | 1854 | if (!ndev->phydev) |
Ben Hutchings | 4f9dce23 | 2015-01-16 17:51:25 +0000 | [diff] [blame] | 1855 | return -ENODEV; |
| 1856 | |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 1857 | spin_lock_irqsave(&mdp->lock, flags); |
| 1858 | |
| 1859 | /* disable tx and rx */ |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 1860 | sh_eth_rcv_snd_disable(ndev); |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 1861 | |
Philippe Reynes | f08aff4 | 2016-08-10 00:04:49 +0200 | [diff] [blame] | 1862 | ret = phy_ethtool_ksettings_set(ndev->phydev, cmd); |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 1863 | if (ret) |
| 1864 | goto error_exit; |
| 1865 | |
Philippe Reynes | f08aff4 | 2016-08-10 00:04:49 +0200 | [diff] [blame] | 1866 | if (cmd->base.duplex == DUPLEX_FULL) |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 1867 | mdp->duplex = 1; |
| 1868 | else |
| 1869 | mdp->duplex = 0; |
| 1870 | |
| 1871 | if (mdp->cd->set_duplex) |
| 1872 | mdp->cd->set_duplex(ndev); |
| 1873 | |
| 1874 | error_exit: |
| 1875 | mdelay(1); |
| 1876 | |
| 1877 | /* enable tx and rx */ |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 1878 | sh_eth_rcv_snd_enable(ndev); |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 1879 | |
| 1880 | spin_unlock_irqrestore(&mdp->lock, flags); |
| 1881 | |
| 1882 | return ret; |
| 1883 | } |
| 1884 | |
Ben Hutchings | 6b4b4fe | 2015-02-26 20:34:35 +0000 | [diff] [blame] | 1885 | /* If it is ever necessary to increase SH_ETH_REG_DUMP_MAX_REGS, the |
| 1886 | * version must be bumped as well. Just adding registers up to that |
| 1887 | * limit is fine, as long as the existing register indices don't |
| 1888 | * change. |
| 1889 | */ |
| 1890 | #define SH_ETH_REG_DUMP_VERSION 1 |
| 1891 | #define SH_ETH_REG_DUMP_MAX_REGS 256 |
| 1892 | |
| 1893 | static size_t __sh_eth_get_regs(struct net_device *ndev, u32 *buf) |
| 1894 | { |
| 1895 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 1896 | struct sh_eth_cpu_data *cd = mdp->cd; |
| 1897 | u32 *valid_map; |
| 1898 | size_t len; |
| 1899 | |
| 1900 | BUILD_BUG_ON(SH_ETH_MAX_REGISTER_OFFSET > SH_ETH_REG_DUMP_MAX_REGS); |
| 1901 | |
| 1902 | /* Dump starts with a bitmap that tells ethtool which |
| 1903 | * registers are defined for this chip. |
| 1904 | */ |
| 1905 | len = DIV_ROUND_UP(SH_ETH_REG_DUMP_MAX_REGS, 32); |
| 1906 | if (buf) { |
| 1907 | valid_map = buf; |
| 1908 | buf += len; |
| 1909 | } else { |
| 1910 | valid_map = NULL; |
| 1911 | } |
| 1912 | |
| 1913 | /* Add a register to the dump, if it has a defined offset. |
| 1914 | * This automatically skips most undefined registers, but for |
| 1915 | * some it is also necessary to check a capability flag in |
| 1916 | * struct sh_eth_cpu_data. |
| 1917 | */ |
| 1918 | #define mark_reg_valid(reg) valid_map[reg / 32] |= 1U << (reg % 32) |
| 1919 | #define add_reg_from(reg, read_expr) do { \ |
| 1920 | if (mdp->reg_offset[reg] != SH_ETH_OFFSET_INVALID) { \ |
| 1921 | if (buf) { \ |
| 1922 | mark_reg_valid(reg); \ |
| 1923 | *buf++ = read_expr; \ |
| 1924 | } \ |
| 1925 | ++len; \ |
| 1926 | } \ |
| 1927 | } while (0) |
| 1928 | #define add_reg(reg) add_reg_from(reg, sh_eth_read(ndev, reg)) |
| 1929 | #define add_tsu_reg(reg) add_reg_from(reg, sh_eth_tsu_read(mdp, reg)) |
| 1930 | |
| 1931 | add_reg(EDSR); |
| 1932 | add_reg(EDMR); |
| 1933 | add_reg(EDTRR); |
| 1934 | add_reg(EDRRR); |
| 1935 | add_reg(EESR); |
| 1936 | add_reg(EESIPR); |
| 1937 | add_reg(TDLAR); |
| 1938 | add_reg(TDFAR); |
| 1939 | add_reg(TDFXR); |
| 1940 | add_reg(TDFFR); |
| 1941 | add_reg(RDLAR); |
| 1942 | add_reg(RDFAR); |
| 1943 | add_reg(RDFXR); |
| 1944 | add_reg(RDFFR); |
| 1945 | add_reg(TRSCER); |
| 1946 | add_reg(RMFCR); |
| 1947 | add_reg(TFTR); |
| 1948 | add_reg(FDR); |
| 1949 | add_reg(RMCR); |
| 1950 | add_reg(TFUCR); |
| 1951 | add_reg(RFOCR); |
| 1952 | if (cd->rmiimode) |
| 1953 | add_reg(RMIIMODE); |
| 1954 | add_reg(FCFTR); |
| 1955 | if (cd->rpadir) |
| 1956 | add_reg(RPADIR); |
| 1957 | if (!cd->no_trimd) |
| 1958 | add_reg(TRIMD); |
| 1959 | add_reg(ECMR); |
| 1960 | add_reg(ECSR); |
| 1961 | add_reg(ECSIPR); |
| 1962 | add_reg(PIR); |
| 1963 | if (!cd->no_psr) |
| 1964 | add_reg(PSR); |
| 1965 | add_reg(RDMLR); |
| 1966 | add_reg(RFLR); |
| 1967 | add_reg(IPGR); |
| 1968 | if (cd->apr) |
| 1969 | add_reg(APR); |
| 1970 | if (cd->mpr) |
| 1971 | add_reg(MPR); |
| 1972 | add_reg(RFCR); |
| 1973 | add_reg(RFCF); |
| 1974 | if (cd->tpauser) |
| 1975 | add_reg(TPAUSER); |
| 1976 | add_reg(TPAUSECR); |
| 1977 | add_reg(GECMR); |
| 1978 | if (cd->bculr) |
| 1979 | add_reg(BCULR); |
| 1980 | add_reg(MAHR); |
| 1981 | add_reg(MALR); |
| 1982 | add_reg(TROCR); |
| 1983 | add_reg(CDCR); |
| 1984 | add_reg(LCCR); |
| 1985 | add_reg(CNDCR); |
| 1986 | add_reg(CEFCR); |
| 1987 | add_reg(FRECR); |
| 1988 | add_reg(TSFRCR); |
| 1989 | add_reg(TLFRCR); |
| 1990 | add_reg(CERCR); |
| 1991 | add_reg(CEECR); |
| 1992 | add_reg(MAFCR); |
| 1993 | if (cd->rtrate) |
| 1994 | add_reg(RTRATE); |
Sergei Shtylyov | 62e04b7 | 2017-01-07 00:03:37 +0300 | [diff] [blame] | 1995 | if (cd->hw_checksum) |
Ben Hutchings | 6b4b4fe | 2015-02-26 20:34:35 +0000 | [diff] [blame] | 1996 | add_reg(CSMR); |
| 1997 | if (cd->select_mii) |
| 1998 | add_reg(RMII_MII); |
| 1999 | add_reg(ARSTR); |
| 2000 | if (cd->tsu) { |
| 2001 | add_tsu_reg(TSU_CTRST); |
| 2002 | add_tsu_reg(TSU_FWEN0); |
| 2003 | add_tsu_reg(TSU_FWEN1); |
| 2004 | add_tsu_reg(TSU_FCM); |
| 2005 | add_tsu_reg(TSU_BSYSL0); |
| 2006 | add_tsu_reg(TSU_BSYSL1); |
| 2007 | add_tsu_reg(TSU_PRISL0); |
| 2008 | add_tsu_reg(TSU_PRISL1); |
| 2009 | add_tsu_reg(TSU_FWSL0); |
| 2010 | add_tsu_reg(TSU_FWSL1); |
| 2011 | add_tsu_reg(TSU_FWSLC); |
| 2012 | add_tsu_reg(TSU_QTAG0); |
| 2013 | add_tsu_reg(TSU_QTAG1); |
| 2014 | add_tsu_reg(TSU_QTAGM0); |
| 2015 | add_tsu_reg(TSU_QTAGM1); |
| 2016 | add_tsu_reg(TSU_FWSR); |
| 2017 | add_tsu_reg(TSU_FWINMK); |
| 2018 | add_tsu_reg(TSU_ADQT0); |
| 2019 | add_tsu_reg(TSU_ADQT1); |
| 2020 | add_tsu_reg(TSU_VTAG0); |
| 2021 | add_tsu_reg(TSU_VTAG1); |
| 2022 | add_tsu_reg(TSU_ADSBSY); |
| 2023 | add_tsu_reg(TSU_TEN); |
| 2024 | add_tsu_reg(TSU_POST1); |
| 2025 | add_tsu_reg(TSU_POST2); |
| 2026 | add_tsu_reg(TSU_POST3); |
| 2027 | add_tsu_reg(TSU_POST4); |
| 2028 | if (mdp->reg_offset[TSU_ADRH0] != SH_ETH_OFFSET_INVALID) { |
| 2029 | /* This is the start of a table, not just a single |
| 2030 | * register. |
| 2031 | */ |
| 2032 | if (buf) { |
| 2033 | unsigned int i; |
| 2034 | |
| 2035 | mark_reg_valid(TSU_ADRH0); |
| 2036 | for (i = 0; i < SH_ETH_TSU_CAM_ENTRIES * 2; i++) |
| 2037 | *buf++ = ioread32( |
| 2038 | mdp->tsu_addr + |
| 2039 | mdp->reg_offset[TSU_ADRH0] + |
| 2040 | i * 4); |
| 2041 | } |
| 2042 | len += SH_ETH_TSU_CAM_ENTRIES * 2; |
| 2043 | } |
| 2044 | } |
| 2045 | |
| 2046 | #undef mark_reg_valid |
| 2047 | #undef add_reg_from |
| 2048 | #undef add_reg |
| 2049 | #undef add_tsu_reg |
| 2050 | |
| 2051 | return len * 4; |
| 2052 | } |
| 2053 | |
| 2054 | static int sh_eth_get_regs_len(struct net_device *ndev) |
| 2055 | { |
| 2056 | return __sh_eth_get_regs(ndev, NULL); |
| 2057 | } |
| 2058 | |
| 2059 | static void sh_eth_get_regs(struct net_device *ndev, struct ethtool_regs *regs, |
| 2060 | void *buf) |
| 2061 | { |
| 2062 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2063 | |
| 2064 | regs->version = SH_ETH_REG_DUMP_VERSION; |
| 2065 | |
| 2066 | pm_runtime_get_sync(&mdp->pdev->dev); |
| 2067 | __sh_eth_get_regs(ndev, buf); |
| 2068 | pm_runtime_put_sync(&mdp->pdev->dev); |
| 2069 | } |
| 2070 | |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 2071 | static int sh_eth_nway_reset(struct net_device *ndev) |
| 2072 | { |
| 2073 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2074 | unsigned long flags; |
| 2075 | int ret; |
| 2076 | |
Philippe Reynes | 9fd0375 | 2016-08-10 00:04:48 +0200 | [diff] [blame] | 2077 | if (!ndev->phydev) |
Ben Hutchings | 4f9dce23 | 2015-01-16 17:51:25 +0000 | [diff] [blame] | 2078 | return -ENODEV; |
| 2079 | |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 2080 | spin_lock_irqsave(&mdp->lock, flags); |
Philippe Reynes | 9fd0375 | 2016-08-10 00:04:48 +0200 | [diff] [blame] | 2081 | ret = phy_start_aneg(ndev->phydev); |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 2082 | spin_unlock_irqrestore(&mdp->lock, flags); |
| 2083 | |
| 2084 | return ret; |
| 2085 | } |
| 2086 | |
| 2087 | static u32 sh_eth_get_msglevel(struct net_device *ndev) |
| 2088 | { |
| 2089 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2090 | return mdp->msg_enable; |
| 2091 | } |
| 2092 | |
| 2093 | static void sh_eth_set_msglevel(struct net_device *ndev, u32 value) |
| 2094 | { |
| 2095 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2096 | mdp->msg_enable = value; |
| 2097 | } |
| 2098 | |
| 2099 | static const char sh_eth_gstrings_stats[][ETH_GSTRING_LEN] = { |
| 2100 | "rx_current", "tx_current", |
| 2101 | "rx_dirty", "tx_dirty", |
| 2102 | }; |
| 2103 | #define SH_ETH_STATS_LEN ARRAY_SIZE(sh_eth_gstrings_stats) |
| 2104 | |
| 2105 | static int sh_eth_get_sset_count(struct net_device *netdev, int sset) |
| 2106 | { |
| 2107 | switch (sset) { |
| 2108 | case ETH_SS_STATS: |
| 2109 | return SH_ETH_STATS_LEN; |
| 2110 | default: |
| 2111 | return -EOPNOTSUPP; |
| 2112 | } |
| 2113 | } |
| 2114 | |
| 2115 | static void sh_eth_get_ethtool_stats(struct net_device *ndev, |
Sergei Shtylyov | 128296f | 2014-01-03 15:52:22 +0300 | [diff] [blame] | 2116 | struct ethtool_stats *stats, u64 *data) |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 2117 | { |
| 2118 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2119 | int i = 0; |
| 2120 | |
| 2121 | /* device-specific stats */ |
| 2122 | data[i++] = mdp->cur_rx; |
| 2123 | data[i++] = mdp->cur_tx; |
| 2124 | data[i++] = mdp->dirty_rx; |
| 2125 | data[i++] = mdp->dirty_tx; |
| 2126 | } |
| 2127 | |
| 2128 | static void sh_eth_get_strings(struct net_device *ndev, u32 stringset, u8 *data) |
| 2129 | { |
| 2130 | switch (stringset) { |
| 2131 | case ETH_SS_STATS: |
| 2132 | memcpy(data, *sh_eth_gstrings_stats, |
Sergei Shtylyov | 128296f | 2014-01-03 15:52:22 +0300 | [diff] [blame] | 2133 | sizeof(sh_eth_gstrings_stats)); |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 2134 | break; |
| 2135 | } |
| 2136 | } |
| 2137 | |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 2138 | static void sh_eth_get_ringparam(struct net_device *ndev, |
| 2139 | struct ethtool_ringparam *ring) |
| 2140 | { |
| 2141 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2142 | |
| 2143 | ring->rx_max_pending = RX_RING_MAX; |
| 2144 | ring->tx_max_pending = TX_RING_MAX; |
| 2145 | ring->rx_pending = mdp->num_rx_ring; |
| 2146 | ring->tx_pending = mdp->num_tx_ring; |
| 2147 | } |
| 2148 | |
| 2149 | static int sh_eth_set_ringparam(struct net_device *ndev, |
| 2150 | struct ethtool_ringparam *ring) |
| 2151 | { |
| 2152 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2153 | int ret; |
| 2154 | |
| 2155 | if (ring->tx_pending > TX_RING_MAX || |
| 2156 | ring->rx_pending > RX_RING_MAX || |
| 2157 | ring->tx_pending < TX_RING_MIN || |
| 2158 | ring->rx_pending < RX_RING_MIN) |
| 2159 | return -EINVAL; |
| 2160 | if (ring->rx_mini_pending || ring->rx_jumbo_pending) |
| 2161 | return -EINVAL; |
| 2162 | |
| 2163 | if (netif_running(ndev)) { |
Ben Hutchings | bd88891 | 2015-01-22 12:40:25 +0000 | [diff] [blame] | 2164 | netif_device_detach(ndev); |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 2165 | netif_tx_disable(ndev); |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 2166 | |
Ben Hutchings | 283e38d | 2015-01-22 12:44:08 +0000 | [diff] [blame] | 2167 | /* Serialise with the interrupt handler and NAPI, then |
| 2168 | * disable interrupts. We have to clear the |
| 2169 | * irq_enabled flag first to ensure that interrupts |
| 2170 | * won't be re-enabled. |
| 2171 | */ |
| 2172 | mdp->irq_enabled = false; |
| 2173 | synchronize_irq(ndev->irq); |
| 2174 | napi_synchronize(&mdp->napi); |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 2175 | sh_eth_write(ndev, 0x0000, EESIPR); |
Ben Hutchings | 283e38d | 2015-01-22 12:44:08 +0000 | [diff] [blame] | 2176 | |
Ben Hutchings | 740c7f3 | 2015-01-27 00:49:32 +0000 | [diff] [blame] | 2177 | sh_eth_dev_exit(ndev); |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 2178 | |
Sergei Shtylyov | 8e03a5e | 2015-11-04 00:55:13 +0300 | [diff] [blame] | 2179 | /* Free all the skbuffs in the Rx queue and the DMA buffers. */ |
Ben Hutchings | 084236d | 2015-01-22 12:41:34 +0000 | [diff] [blame] | 2180 | sh_eth_ring_free(ndev); |
Ben Hutchings | 084236d | 2015-01-22 12:41:34 +0000 | [diff] [blame] | 2181 | } |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 2182 | |
| 2183 | /* Set new parameters */ |
| 2184 | mdp->num_rx_ring = ring->rx_pending; |
| 2185 | mdp->num_tx_ring = ring->tx_pending; |
| 2186 | |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 2187 | if (netif_running(ndev)) { |
Ben Hutchings | 084236d | 2015-01-22 12:41:34 +0000 | [diff] [blame] | 2188 | ret = sh_eth_ring_init(ndev); |
| 2189 | if (ret < 0) { |
| 2190 | netdev_err(ndev, "%s: sh_eth_ring_init failed.\n", |
| 2191 | __func__); |
| 2192 | return ret; |
| 2193 | } |
Sergei Shtylyov | f796721 | 2016-04-24 19:11:07 +0300 | [diff] [blame] | 2194 | ret = sh_eth_dev_init(ndev); |
Ben Hutchings | 084236d | 2015-01-22 12:41:34 +0000 | [diff] [blame] | 2195 | if (ret < 0) { |
| 2196 | netdev_err(ndev, "%s: sh_eth_dev_init failed.\n", |
| 2197 | __func__); |
| 2198 | return ret; |
| 2199 | } |
| 2200 | |
Ben Hutchings | bd88891 | 2015-01-22 12:40:25 +0000 | [diff] [blame] | 2201 | netif_device_attach(ndev); |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 2202 | } |
| 2203 | |
| 2204 | return 0; |
| 2205 | } |
| 2206 | |
Niklas Söderlund | d8981d0 | 2017-01-09 16:34:05 +0100 | [diff] [blame] | 2207 | static void sh_eth_get_wol(struct net_device *ndev, struct ethtool_wolinfo *wol) |
| 2208 | { |
| 2209 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2210 | |
| 2211 | wol->supported = 0; |
| 2212 | wol->wolopts = 0; |
| 2213 | |
| 2214 | if (mdp->cd->magic && mdp->clk) { |
| 2215 | wol->supported = WAKE_MAGIC; |
| 2216 | wol->wolopts = mdp->wol_enabled ? WAKE_MAGIC : 0; |
| 2217 | } |
| 2218 | } |
| 2219 | |
| 2220 | static int sh_eth_set_wol(struct net_device *ndev, struct ethtool_wolinfo *wol) |
| 2221 | { |
| 2222 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2223 | |
| 2224 | if (!mdp->cd->magic || !mdp->clk || wol->wolopts & ~WAKE_MAGIC) |
| 2225 | return -EOPNOTSUPP; |
| 2226 | |
| 2227 | mdp->wol_enabled = !!(wol->wolopts & WAKE_MAGIC); |
| 2228 | |
| 2229 | device_set_wakeup_enable(&mdp->pdev->dev, mdp->wol_enabled); |
| 2230 | |
| 2231 | return 0; |
| 2232 | } |
| 2233 | |
stephen hemminger | 9b07be4 | 2012-01-04 12:59:49 +0000 | [diff] [blame] | 2234 | static const struct ethtool_ops sh_eth_ethtool_ops = { |
Ben Hutchings | 6b4b4fe | 2015-02-26 20:34:35 +0000 | [diff] [blame] | 2235 | .get_regs_len = sh_eth_get_regs_len, |
| 2236 | .get_regs = sh_eth_get_regs, |
stephen hemminger | 9b07be4 | 2012-01-04 12:59:49 +0000 | [diff] [blame] | 2237 | .nway_reset = sh_eth_nway_reset, |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 2238 | .get_msglevel = sh_eth_get_msglevel, |
| 2239 | .set_msglevel = sh_eth_set_msglevel, |
stephen hemminger | 9b07be4 | 2012-01-04 12:59:49 +0000 | [diff] [blame] | 2240 | .get_link = ethtool_op_get_link, |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 2241 | .get_strings = sh_eth_get_strings, |
| 2242 | .get_ethtool_stats = sh_eth_get_ethtool_stats, |
| 2243 | .get_sset_count = sh_eth_get_sset_count, |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 2244 | .get_ringparam = sh_eth_get_ringparam, |
| 2245 | .set_ringparam = sh_eth_set_ringparam, |
Philippe Reynes | f08aff4 | 2016-08-10 00:04:49 +0200 | [diff] [blame] | 2246 | .get_link_ksettings = sh_eth_get_link_ksettings, |
| 2247 | .set_link_ksettings = sh_eth_set_link_ksettings, |
Niklas Söderlund | d8981d0 | 2017-01-09 16:34:05 +0100 | [diff] [blame] | 2248 | .get_wol = sh_eth_get_wol, |
| 2249 | .set_wol = sh_eth_set_wol, |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 2250 | }; |
| 2251 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2252 | /* network device open function */ |
| 2253 | static int sh_eth_open(struct net_device *ndev) |
| 2254 | { |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2255 | struct sh_eth_private *mdp = netdev_priv(ndev); |
Sergei Shtylyov | 4fa8c3c | 2016-03-13 01:29:45 +0300 | [diff] [blame] | 2256 | int ret; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2257 | |
Magnus Damm | bcd5149 | 2009-10-09 00:20:04 +0000 | [diff] [blame] | 2258 | pm_runtime_get_sync(&mdp->pdev->dev); |
| 2259 | |
Sergei Shtylyov | d2779e9 | 2013-09-04 02:41:27 +0400 | [diff] [blame] | 2260 | napi_enable(&mdp->napi); |
| 2261 | |
Joe Perches | a0607fd | 2009-11-18 23:29:17 -0800 | [diff] [blame] | 2262 | ret = request_irq(ndev->irq, sh_eth_interrupt, |
Nobuhiro Iwamatsu | 5b3dfd1 | 2013-06-06 09:49:30 +0000 | [diff] [blame] | 2263 | mdp->cd->irq_flags, ndev->name, ndev); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2264 | if (ret) { |
Sergei Shtylyov | da24685 | 2014-03-15 03:29:14 +0300 | [diff] [blame] | 2265 | netdev_err(ndev, "Can not assign IRQ number\n"); |
Sergei Shtylyov | d2779e9 | 2013-09-04 02:41:27 +0400 | [diff] [blame] | 2266 | goto out_napi_off; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2267 | } |
| 2268 | |
| 2269 | /* Descriptor set */ |
| 2270 | ret = sh_eth_ring_init(ndev); |
| 2271 | if (ret) |
| 2272 | goto out_free_irq; |
| 2273 | |
| 2274 | /* device init */ |
Sergei Shtylyov | f796721 | 2016-04-24 19:11:07 +0300 | [diff] [blame] | 2275 | ret = sh_eth_dev_init(ndev); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2276 | if (ret) |
| 2277 | goto out_free_irq; |
| 2278 | |
| 2279 | /* PHY control start*/ |
| 2280 | ret = sh_eth_phy_start(ndev); |
| 2281 | if (ret) |
| 2282 | goto out_free_irq; |
| 2283 | |
Sergei Shtylyov | ad846aa | 2016-03-14 01:09:53 +0300 | [diff] [blame] | 2284 | netif_start_queue(ndev); |
| 2285 | |
Mitsuhiro Kimura | 7fa2955 | 2014-11-28 10:04:15 +0900 | [diff] [blame] | 2286 | mdp->is_opened = 1; |
| 2287 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2288 | return ret; |
| 2289 | |
| 2290 | out_free_irq: |
| 2291 | free_irq(ndev->irq, ndev); |
Sergei Shtylyov | d2779e9 | 2013-09-04 02:41:27 +0400 | [diff] [blame] | 2292 | out_napi_off: |
| 2293 | napi_disable(&mdp->napi); |
Magnus Damm | bcd5149 | 2009-10-09 00:20:04 +0000 | [diff] [blame] | 2294 | pm_runtime_put_sync(&mdp->pdev->dev); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2295 | return ret; |
| 2296 | } |
| 2297 | |
| 2298 | /* Timeout function */ |
| 2299 | static void sh_eth_tx_timeout(struct net_device *ndev) |
| 2300 | { |
| 2301 | struct sh_eth_private *mdp = netdev_priv(ndev); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2302 | struct sh_eth_rxdesc *rxdesc; |
| 2303 | int i; |
| 2304 | |
| 2305 | netif_stop_queue(ndev); |
| 2306 | |
Sergei Shtylyov | 8d5009f | 2014-03-15 03:30:59 +0300 | [diff] [blame] | 2307 | netif_err(mdp, timer, ndev, |
| 2308 | "transmit timed out, status %8.8x, resetting...\n", |
Geert Uytterhoeven | 0799c2d | 2015-01-15 11:54:28 +0100 | [diff] [blame] | 2309 | sh_eth_read(ndev, EESR)); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2310 | |
| 2311 | /* tx_errors count up */ |
Eric Dumazet | bb7d92e | 2012-02-06 22:17:21 +0000 | [diff] [blame] | 2312 | ndev->stats.tx_errors++; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2313 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2314 | /* Free all the skbuffs in the Rx queue. */ |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 2315 | for (i = 0; i < mdp->num_rx_ring; i++) { |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2316 | rxdesc = &mdp->rx_ring[i]; |
Sergei Shtylyov | 7cf7247 | 2015-12-28 02:10:47 +0300 | [diff] [blame] | 2317 | rxdesc->status = cpu_to_le32(0); |
| 2318 | rxdesc->addr = cpu_to_le32(0xBADF00D0); |
Sergei Shtylyov | 179d80a | 2014-06-28 04:10:00 +0400 | [diff] [blame] | 2319 | dev_kfree_skb(mdp->rx_skbuff[i]); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2320 | mdp->rx_skbuff[i] = NULL; |
| 2321 | } |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 2322 | for (i = 0; i < mdp->num_tx_ring; i++) { |
Sergei Shtylyov | 179d80a | 2014-06-28 04:10:00 +0400 | [diff] [blame] | 2323 | dev_kfree_skb(mdp->tx_skbuff[i]); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2324 | mdp->tx_skbuff[i] = NULL; |
| 2325 | } |
| 2326 | |
| 2327 | /* device init */ |
Sergei Shtylyov | f796721 | 2016-04-24 19:11:07 +0300 | [diff] [blame] | 2328 | sh_eth_dev_init(ndev); |
Sergei Shtylyov | ad846aa | 2016-03-14 01:09:53 +0300 | [diff] [blame] | 2329 | |
| 2330 | netif_start_queue(ndev); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2331 | } |
| 2332 | |
| 2333 | /* Packet transmit function */ |
| 2334 | static int sh_eth_start_xmit(struct sk_buff *skb, struct net_device *ndev) |
| 2335 | { |
| 2336 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2337 | struct sh_eth_txdesc *txdesc; |
Sergei Shtylyov | 1299653 | 2015-12-13 23:05:07 +0300 | [diff] [blame] | 2338 | dma_addr_t dma_addr; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2339 | u32 entry; |
Nobuhiro Iwamatsu | fb5e2f9 | 2008-11-17 20:29:58 +0000 | [diff] [blame] | 2340 | unsigned long flags; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2341 | |
| 2342 | spin_lock_irqsave(&mdp->lock, flags); |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 2343 | if ((mdp->cur_tx - mdp->dirty_tx) >= (mdp->num_tx_ring - 4)) { |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2344 | if (!sh_eth_txfree(ndev)) { |
Sergei Shtylyov | 8d5009f | 2014-03-15 03:30:59 +0300 | [diff] [blame] | 2345 | netif_warn(mdp, tx_queued, ndev, "TxFD exhausted.\n"); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2346 | netif_stop_queue(ndev); |
| 2347 | spin_unlock_irqrestore(&mdp->lock, flags); |
Patrick McHardy | 5b54814 | 2009-06-12 06:22:29 +0000 | [diff] [blame] | 2348 | return NETDEV_TX_BUSY; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2349 | } |
| 2350 | } |
| 2351 | spin_unlock_irqrestore(&mdp->lock, flags); |
| 2352 | |
Ben Hutchings | dacc73e | 2015-03-03 00:53:08 +0000 | [diff] [blame] | 2353 | if (skb_put_padto(skb, ETH_ZLEN)) |
Ben Hutchings | eebfb64 | 2015-01-22 12:40:13 +0000 | [diff] [blame] | 2354 | return NETDEV_TX_OK; |
| 2355 | |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 2356 | entry = mdp->cur_tx % mdp->num_tx_ring; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2357 | mdp->tx_skbuff[entry] = skb; |
| 2358 | txdesc = &mdp->tx_ring[entry]; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2359 | /* soft swap. */ |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 2360 | if (!mdp->cd->hw_swap) |
Sergei Shtylyov | 3e23099 | 2015-12-13 21:27:04 +0300 | [diff] [blame] | 2361 | sh_eth_soft_swap(PTR_ALIGN(skb->data, 4), skb->len + 2); |
Sergei Shtylyov | 1299653 | 2015-12-13 23:05:07 +0300 | [diff] [blame] | 2362 | dma_addr = dma_map_single(&ndev->dev, skb->data, skb->len, |
| 2363 | DMA_TO_DEVICE); |
| 2364 | if (dma_mapping_error(&ndev->dev, dma_addr)) { |
Ben Hutchings | aa3933b | 2015-01-27 00:49:47 +0000 | [diff] [blame] | 2365 | kfree_skb(skb); |
| 2366 | return NETDEV_TX_OK; |
| 2367 | } |
Sergei Shtylyov | 7cf7247 | 2015-12-28 02:10:47 +0300 | [diff] [blame] | 2368 | txdesc->addr = cpu_to_le32(dma_addr); |
| 2369 | txdesc->len = cpu_to_le32(skb->len << 16); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2370 | |
Sergei Shtylyov | f32bfb9 | 2015-11-03 22:36:04 +0300 | [diff] [blame] | 2371 | dma_wmb(); /* TACT bit must be set after all the above writes */ |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 2372 | if (entry >= mdp->num_tx_ring - 1) |
Sergei Shtylyov | 7cf7247 | 2015-12-28 02:10:47 +0300 | [diff] [blame] | 2373 | txdesc->status |= cpu_to_le32(TD_TACT | TD_TDLE); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2374 | else |
Sergei Shtylyov | 7cf7247 | 2015-12-28 02:10:47 +0300 | [diff] [blame] | 2375 | txdesc->status |= cpu_to_le32(TD_TACT); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2376 | |
| 2377 | mdp->cur_tx++; |
| 2378 | |
Yoshihiro Shimoda | c5ed536 | 2011-03-07 21:59:38 +0000 | [diff] [blame] | 2379 | if (!(sh_eth_read(ndev, EDTRR) & sh_eth_get_edtrr_trns(mdp))) |
| 2380 | sh_eth_write(ndev, sh_eth_get_edtrr_trns(mdp), EDTRR); |
Nobuhiro Iwamatsu | b0ca2a2 | 2008-06-30 11:08:17 +0900 | [diff] [blame] | 2381 | |
Patrick McHardy | 6ed1065 | 2009-06-23 06:03:08 +0000 | [diff] [blame] | 2382 | return NETDEV_TX_OK; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2383 | } |
| 2384 | |
Ben Hutchings | 4398f9c | 2015-02-26 20:35:05 +0000 | [diff] [blame] | 2385 | /* The statistics registers have write-clear behaviour, which means we |
| 2386 | * will lose any increment between the read and write. We mitigate |
| 2387 | * this by only clearing when we read a non-zero value, so we will |
| 2388 | * never falsely report a total of zero. |
| 2389 | */ |
| 2390 | static void |
| 2391 | sh_eth_update_stat(struct net_device *ndev, unsigned long *stat, int reg) |
| 2392 | { |
| 2393 | u32 delta = sh_eth_read(ndev, reg); |
| 2394 | |
| 2395 | if (delta) { |
| 2396 | *stat += delta; |
| 2397 | sh_eth_write(ndev, 0, reg); |
| 2398 | } |
| 2399 | } |
| 2400 | |
Mitsuhiro Kimura | 7fa2955 | 2014-11-28 10:04:15 +0900 | [diff] [blame] | 2401 | static struct net_device_stats *sh_eth_get_stats(struct net_device *ndev) |
| 2402 | { |
| 2403 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2404 | |
| 2405 | if (sh_eth_is_rz_fast_ether(mdp)) |
| 2406 | return &ndev->stats; |
| 2407 | |
| 2408 | if (!mdp->is_opened) |
| 2409 | return &ndev->stats; |
| 2410 | |
Ben Hutchings | 4398f9c | 2015-02-26 20:35:05 +0000 | [diff] [blame] | 2411 | sh_eth_update_stat(ndev, &ndev->stats.tx_dropped, TROCR); |
| 2412 | sh_eth_update_stat(ndev, &ndev->stats.collisions, CDCR); |
| 2413 | sh_eth_update_stat(ndev, &ndev->stats.tx_carrier_errors, LCCR); |
Mitsuhiro Kimura | 7fa2955 | 2014-11-28 10:04:15 +0900 | [diff] [blame] | 2414 | |
| 2415 | if (sh_eth_is_gether(mdp)) { |
Ben Hutchings | 4398f9c | 2015-02-26 20:35:05 +0000 | [diff] [blame] | 2416 | sh_eth_update_stat(ndev, &ndev->stats.tx_carrier_errors, |
| 2417 | CERCR); |
| 2418 | sh_eth_update_stat(ndev, &ndev->stats.tx_carrier_errors, |
| 2419 | CEECR); |
Mitsuhiro Kimura | 7fa2955 | 2014-11-28 10:04:15 +0900 | [diff] [blame] | 2420 | } else { |
Ben Hutchings | 4398f9c | 2015-02-26 20:35:05 +0000 | [diff] [blame] | 2421 | sh_eth_update_stat(ndev, &ndev->stats.tx_carrier_errors, |
| 2422 | CNDCR); |
Mitsuhiro Kimura | 7fa2955 | 2014-11-28 10:04:15 +0900 | [diff] [blame] | 2423 | } |
| 2424 | |
| 2425 | return &ndev->stats; |
| 2426 | } |
| 2427 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2428 | /* device close function */ |
| 2429 | static int sh_eth_close(struct net_device *ndev) |
| 2430 | { |
| 2431 | struct sh_eth_private *mdp = netdev_priv(ndev); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2432 | |
| 2433 | netif_stop_queue(ndev); |
| 2434 | |
Ben Hutchings | 283e38d | 2015-01-22 12:44:08 +0000 | [diff] [blame] | 2435 | /* Serialise with the interrupt handler and NAPI, then disable |
| 2436 | * interrupts. We have to clear the irq_enabled flag first to |
| 2437 | * ensure that interrupts won't be re-enabled. |
| 2438 | */ |
| 2439 | mdp->irq_enabled = false; |
| 2440 | synchronize_irq(ndev->irq); |
| 2441 | napi_disable(&mdp->napi); |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 2442 | sh_eth_write(ndev, 0x0000, EESIPR); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2443 | |
Ben Hutchings | 740c7f3 | 2015-01-27 00:49:32 +0000 | [diff] [blame] | 2444 | sh_eth_dev_exit(ndev); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2445 | |
| 2446 | /* PHY Disconnect */ |
Philippe Reynes | 9fd0375 | 2016-08-10 00:04:48 +0200 | [diff] [blame] | 2447 | if (ndev->phydev) { |
| 2448 | phy_stop(ndev->phydev); |
| 2449 | phy_disconnect(ndev->phydev); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2450 | } |
| 2451 | |
| 2452 | free_irq(ndev->irq, ndev); |
| 2453 | |
Sergei Shtylyov | 8e03a5e | 2015-11-04 00:55:13 +0300 | [diff] [blame] | 2454 | /* Free all the skbuffs in the Rx queue and the DMA buffer. */ |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2455 | sh_eth_ring_free(ndev); |
| 2456 | |
Magnus Damm | bcd5149 | 2009-10-09 00:20:04 +0000 | [diff] [blame] | 2457 | pm_runtime_put_sync(&mdp->pdev->dev); |
| 2458 | |
Mitsuhiro Kimura | 7fa2955 | 2014-11-28 10:04:15 +0900 | [diff] [blame] | 2459 | mdp->is_opened = 0; |
| 2460 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2461 | return 0; |
| 2462 | } |
| 2463 | |
Eric Dumazet | bb7d92e | 2012-02-06 22:17:21 +0000 | [diff] [blame] | 2464 | /* ioctl to device function */ |
Sergei Shtylyov | 128296f | 2014-01-03 15:52:22 +0300 | [diff] [blame] | 2465 | static int sh_eth_do_ioctl(struct net_device *ndev, struct ifreq *rq, int cmd) |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2466 | { |
Philippe Reynes | 9fd0375 | 2016-08-10 00:04:48 +0200 | [diff] [blame] | 2467 | struct phy_device *phydev = ndev->phydev; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2468 | |
| 2469 | if (!netif_running(ndev)) |
| 2470 | return -EINVAL; |
| 2471 | |
| 2472 | if (!phydev) |
| 2473 | return -ENODEV; |
| 2474 | |
Richard Cochran | 28b0411 | 2010-07-17 08:48:55 +0000 | [diff] [blame] | 2475 | return phy_mii_ioctl(phydev, rq, cmd); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2476 | } |
| 2477 | |
Yoshihiro Shimoda | 6743fe6 | 2012-02-15 17:55:03 +0000 | [diff] [blame] | 2478 | /* For TSU_POSTn. Please refer to the manual about this (strange) bitfields */ |
| 2479 | static void *sh_eth_tsu_get_post_reg_offset(struct sh_eth_private *mdp, |
| 2480 | int entry) |
| 2481 | { |
| 2482 | return sh_eth_tsu_get_offset(mdp, TSU_POST1) + (entry / 8 * 4); |
| 2483 | } |
| 2484 | |
| 2485 | static u32 sh_eth_tsu_get_post_mask(int entry) |
| 2486 | { |
| 2487 | return 0x0f << (28 - ((entry % 8) * 4)); |
| 2488 | } |
| 2489 | |
| 2490 | static u32 sh_eth_tsu_get_post_bit(struct sh_eth_private *mdp, int entry) |
| 2491 | { |
| 2492 | return (0x08 >> (mdp->port << 1)) << (28 - ((entry % 8) * 4)); |
| 2493 | } |
| 2494 | |
| 2495 | static void sh_eth_tsu_enable_cam_entry_post(struct net_device *ndev, |
| 2496 | int entry) |
| 2497 | { |
| 2498 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2499 | u32 tmp; |
| 2500 | void *reg_offset; |
| 2501 | |
| 2502 | reg_offset = sh_eth_tsu_get_post_reg_offset(mdp, entry); |
| 2503 | tmp = ioread32(reg_offset); |
| 2504 | iowrite32(tmp | sh_eth_tsu_get_post_bit(mdp, entry), reg_offset); |
| 2505 | } |
| 2506 | |
| 2507 | static bool sh_eth_tsu_disable_cam_entry_post(struct net_device *ndev, |
| 2508 | int entry) |
| 2509 | { |
| 2510 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2511 | u32 post_mask, ref_mask, tmp; |
| 2512 | void *reg_offset; |
| 2513 | |
| 2514 | reg_offset = sh_eth_tsu_get_post_reg_offset(mdp, entry); |
| 2515 | post_mask = sh_eth_tsu_get_post_mask(entry); |
| 2516 | ref_mask = sh_eth_tsu_get_post_bit(mdp, entry) & ~post_mask; |
| 2517 | |
| 2518 | tmp = ioread32(reg_offset); |
| 2519 | iowrite32(tmp & ~post_mask, reg_offset); |
| 2520 | |
| 2521 | /* If other port enables, the function returns "true" */ |
| 2522 | return tmp & ref_mask; |
| 2523 | } |
| 2524 | |
| 2525 | static int sh_eth_tsu_busy(struct net_device *ndev) |
| 2526 | { |
| 2527 | int timeout = SH_ETH_TSU_TIMEOUT_MS * 100; |
| 2528 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2529 | |
| 2530 | while ((sh_eth_tsu_read(mdp, TSU_ADSBSY) & TSU_ADSBSY_0)) { |
| 2531 | udelay(10); |
| 2532 | timeout--; |
| 2533 | if (timeout <= 0) { |
Sergei Shtylyov | da24685 | 2014-03-15 03:29:14 +0300 | [diff] [blame] | 2534 | netdev_err(ndev, "%s: timeout\n", __func__); |
Yoshihiro Shimoda | 6743fe6 | 2012-02-15 17:55:03 +0000 | [diff] [blame] | 2535 | return -ETIMEDOUT; |
| 2536 | } |
| 2537 | } |
| 2538 | |
| 2539 | return 0; |
| 2540 | } |
| 2541 | |
| 2542 | static int sh_eth_tsu_write_entry(struct net_device *ndev, void *reg, |
| 2543 | const u8 *addr) |
| 2544 | { |
| 2545 | u32 val; |
| 2546 | |
| 2547 | val = addr[0] << 24 | addr[1] << 16 | addr[2] << 8 | addr[3]; |
| 2548 | iowrite32(val, reg); |
| 2549 | if (sh_eth_tsu_busy(ndev) < 0) |
| 2550 | return -EBUSY; |
| 2551 | |
| 2552 | val = addr[4] << 8 | addr[5]; |
| 2553 | iowrite32(val, reg + 4); |
| 2554 | if (sh_eth_tsu_busy(ndev) < 0) |
| 2555 | return -EBUSY; |
| 2556 | |
| 2557 | return 0; |
| 2558 | } |
| 2559 | |
| 2560 | static void sh_eth_tsu_read_entry(void *reg, u8 *addr) |
| 2561 | { |
| 2562 | u32 val; |
| 2563 | |
| 2564 | val = ioread32(reg); |
| 2565 | addr[0] = (val >> 24) & 0xff; |
| 2566 | addr[1] = (val >> 16) & 0xff; |
| 2567 | addr[2] = (val >> 8) & 0xff; |
| 2568 | addr[3] = val & 0xff; |
| 2569 | val = ioread32(reg + 4); |
| 2570 | addr[4] = (val >> 8) & 0xff; |
| 2571 | addr[5] = val & 0xff; |
| 2572 | } |
| 2573 | |
| 2574 | |
| 2575 | static int sh_eth_tsu_find_entry(struct net_device *ndev, const u8 *addr) |
| 2576 | { |
| 2577 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2578 | void *reg_offset = sh_eth_tsu_get_offset(mdp, TSU_ADRH0); |
| 2579 | int i; |
| 2580 | u8 c_addr[ETH_ALEN]; |
| 2581 | |
| 2582 | for (i = 0; i < SH_ETH_TSU_CAM_ENTRIES; i++, reg_offset += 8) { |
| 2583 | sh_eth_tsu_read_entry(reg_offset, c_addr); |
dingtianhong | c4bde29 | 2013-12-30 15:41:17 +0800 | [diff] [blame] | 2584 | if (ether_addr_equal(addr, c_addr)) |
Yoshihiro Shimoda | 6743fe6 | 2012-02-15 17:55:03 +0000 | [diff] [blame] | 2585 | return i; |
| 2586 | } |
| 2587 | |
| 2588 | return -ENOENT; |
| 2589 | } |
| 2590 | |
| 2591 | static int sh_eth_tsu_find_empty(struct net_device *ndev) |
| 2592 | { |
| 2593 | u8 blank[ETH_ALEN]; |
| 2594 | int entry; |
| 2595 | |
| 2596 | memset(blank, 0, sizeof(blank)); |
| 2597 | entry = sh_eth_tsu_find_entry(ndev, blank); |
| 2598 | return (entry < 0) ? -ENOMEM : entry; |
| 2599 | } |
| 2600 | |
| 2601 | static int sh_eth_tsu_disable_cam_entry_table(struct net_device *ndev, |
| 2602 | int entry) |
| 2603 | { |
| 2604 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2605 | void *reg_offset = sh_eth_tsu_get_offset(mdp, TSU_ADRH0); |
| 2606 | int ret; |
| 2607 | u8 blank[ETH_ALEN]; |
| 2608 | |
| 2609 | sh_eth_tsu_write(mdp, sh_eth_tsu_read(mdp, TSU_TEN) & |
| 2610 | ~(1 << (31 - entry)), TSU_TEN); |
| 2611 | |
| 2612 | memset(blank, 0, sizeof(blank)); |
| 2613 | ret = sh_eth_tsu_write_entry(ndev, reg_offset + entry * 8, blank); |
| 2614 | if (ret < 0) |
| 2615 | return ret; |
| 2616 | return 0; |
| 2617 | } |
| 2618 | |
| 2619 | static int sh_eth_tsu_add_entry(struct net_device *ndev, const u8 *addr) |
| 2620 | { |
| 2621 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2622 | void *reg_offset = sh_eth_tsu_get_offset(mdp, TSU_ADRH0); |
| 2623 | int i, ret; |
| 2624 | |
| 2625 | if (!mdp->cd->tsu) |
| 2626 | return 0; |
| 2627 | |
| 2628 | i = sh_eth_tsu_find_entry(ndev, addr); |
| 2629 | if (i < 0) { |
| 2630 | /* No entry found, create one */ |
| 2631 | i = sh_eth_tsu_find_empty(ndev); |
| 2632 | if (i < 0) |
| 2633 | return -ENOMEM; |
| 2634 | ret = sh_eth_tsu_write_entry(ndev, reg_offset + i * 8, addr); |
| 2635 | if (ret < 0) |
| 2636 | return ret; |
| 2637 | |
| 2638 | /* Enable the entry */ |
| 2639 | sh_eth_tsu_write(mdp, sh_eth_tsu_read(mdp, TSU_TEN) | |
| 2640 | (1 << (31 - i)), TSU_TEN); |
| 2641 | } |
| 2642 | |
| 2643 | /* Entry found or created, enable POST */ |
| 2644 | sh_eth_tsu_enable_cam_entry_post(ndev, i); |
| 2645 | |
| 2646 | return 0; |
| 2647 | } |
| 2648 | |
| 2649 | static int sh_eth_tsu_del_entry(struct net_device *ndev, const u8 *addr) |
| 2650 | { |
| 2651 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2652 | int i, ret; |
| 2653 | |
| 2654 | if (!mdp->cd->tsu) |
| 2655 | return 0; |
| 2656 | |
| 2657 | i = sh_eth_tsu_find_entry(ndev, addr); |
| 2658 | if (i) { |
| 2659 | /* Entry found */ |
| 2660 | if (sh_eth_tsu_disable_cam_entry_post(ndev, i)) |
| 2661 | goto done; |
| 2662 | |
| 2663 | /* Disable the entry if both ports was disabled */ |
| 2664 | ret = sh_eth_tsu_disable_cam_entry_table(ndev, i); |
| 2665 | if (ret < 0) |
| 2666 | return ret; |
| 2667 | } |
| 2668 | done: |
| 2669 | return 0; |
| 2670 | } |
| 2671 | |
| 2672 | static int sh_eth_tsu_purge_all(struct net_device *ndev) |
| 2673 | { |
| 2674 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2675 | int i, ret; |
| 2676 | |
Ben Hutchings | b37feed | 2015-01-16 17:51:12 +0000 | [diff] [blame] | 2677 | if (!mdp->cd->tsu) |
Yoshihiro Shimoda | 6743fe6 | 2012-02-15 17:55:03 +0000 | [diff] [blame] | 2678 | return 0; |
| 2679 | |
| 2680 | for (i = 0; i < SH_ETH_TSU_CAM_ENTRIES; i++) { |
| 2681 | if (sh_eth_tsu_disable_cam_entry_post(ndev, i)) |
| 2682 | continue; |
| 2683 | |
| 2684 | /* Disable the entry if both ports was disabled */ |
| 2685 | ret = sh_eth_tsu_disable_cam_entry_table(ndev, i); |
| 2686 | if (ret < 0) |
| 2687 | return ret; |
| 2688 | } |
| 2689 | |
| 2690 | return 0; |
| 2691 | } |
| 2692 | |
| 2693 | static void sh_eth_tsu_purge_mcast(struct net_device *ndev) |
| 2694 | { |
| 2695 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2696 | u8 addr[ETH_ALEN]; |
| 2697 | void *reg_offset = sh_eth_tsu_get_offset(mdp, TSU_ADRH0); |
| 2698 | int i; |
| 2699 | |
Ben Hutchings | b37feed | 2015-01-16 17:51:12 +0000 | [diff] [blame] | 2700 | if (!mdp->cd->tsu) |
Yoshihiro Shimoda | 6743fe6 | 2012-02-15 17:55:03 +0000 | [diff] [blame] | 2701 | return; |
| 2702 | |
| 2703 | for (i = 0; i < SH_ETH_TSU_CAM_ENTRIES; i++, reg_offset += 8) { |
| 2704 | sh_eth_tsu_read_entry(reg_offset, addr); |
| 2705 | if (is_multicast_ether_addr(addr)) |
| 2706 | sh_eth_tsu_del_entry(ndev, addr); |
| 2707 | } |
| 2708 | } |
| 2709 | |
Ben Hutchings | b37feed | 2015-01-16 17:51:12 +0000 | [diff] [blame] | 2710 | /* Update promiscuous flag and multicast filter */ |
| 2711 | static void sh_eth_set_rx_mode(struct net_device *ndev) |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2712 | { |
Yoshihiro Shimoda | 6743fe6 | 2012-02-15 17:55:03 +0000 | [diff] [blame] | 2713 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2714 | u32 ecmr_bits; |
| 2715 | int mcast_all = 0; |
| 2716 | unsigned long flags; |
| 2717 | |
| 2718 | spin_lock_irqsave(&mdp->lock, flags); |
Sergei Shtylyov | 128296f | 2014-01-03 15:52:22 +0300 | [diff] [blame] | 2719 | /* Initial condition is MCT = 1, PRM = 0. |
Yoshihiro Shimoda | 6743fe6 | 2012-02-15 17:55:03 +0000 | [diff] [blame] | 2720 | * Depending on ndev->flags, set PRM or clear MCT |
| 2721 | */ |
Ben Hutchings | b37feed | 2015-01-16 17:51:12 +0000 | [diff] [blame] | 2722 | ecmr_bits = sh_eth_read(ndev, ECMR) & ~ECMR_PRM; |
| 2723 | if (mdp->cd->tsu) |
| 2724 | ecmr_bits |= ECMR_MCT; |
Yoshihiro Shimoda | 6743fe6 | 2012-02-15 17:55:03 +0000 | [diff] [blame] | 2725 | |
| 2726 | if (!(ndev->flags & IFF_MULTICAST)) { |
| 2727 | sh_eth_tsu_purge_mcast(ndev); |
| 2728 | mcast_all = 1; |
| 2729 | } |
| 2730 | if (ndev->flags & IFF_ALLMULTI) { |
| 2731 | sh_eth_tsu_purge_mcast(ndev); |
| 2732 | ecmr_bits &= ~ECMR_MCT; |
| 2733 | mcast_all = 1; |
| 2734 | } |
| 2735 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2736 | if (ndev->flags & IFF_PROMISC) { |
Yoshihiro Shimoda | 6743fe6 | 2012-02-15 17:55:03 +0000 | [diff] [blame] | 2737 | sh_eth_tsu_purge_all(ndev); |
| 2738 | ecmr_bits = (ecmr_bits & ~ECMR_MCT) | ECMR_PRM; |
| 2739 | } else if (mdp->cd->tsu) { |
| 2740 | struct netdev_hw_addr *ha; |
| 2741 | netdev_for_each_mc_addr(ha, ndev) { |
| 2742 | if (mcast_all && is_multicast_ether_addr(ha->addr)) |
| 2743 | continue; |
| 2744 | |
| 2745 | if (sh_eth_tsu_add_entry(ndev, ha->addr) < 0) { |
| 2746 | if (!mcast_all) { |
| 2747 | sh_eth_tsu_purge_mcast(ndev); |
| 2748 | ecmr_bits &= ~ECMR_MCT; |
| 2749 | mcast_all = 1; |
| 2750 | } |
| 2751 | } |
| 2752 | } |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2753 | } |
Yoshihiro Shimoda | 6743fe6 | 2012-02-15 17:55:03 +0000 | [diff] [blame] | 2754 | |
| 2755 | /* update the ethernet mode */ |
| 2756 | sh_eth_write(ndev, ecmr_bits, ECMR); |
| 2757 | |
| 2758 | spin_unlock_irqrestore(&mdp->lock, flags); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2759 | } |
Yoshihiro Shimoda | 71cc7c3 | 2012-02-15 17:55:06 +0000 | [diff] [blame] | 2760 | |
| 2761 | static int sh_eth_get_vtag_index(struct sh_eth_private *mdp) |
| 2762 | { |
| 2763 | if (!mdp->port) |
| 2764 | return TSU_VTAG0; |
| 2765 | else |
| 2766 | return TSU_VTAG1; |
| 2767 | } |
| 2768 | |
Patrick McHardy | 80d5c36 | 2013-04-19 02:04:28 +0000 | [diff] [blame] | 2769 | static int sh_eth_vlan_rx_add_vid(struct net_device *ndev, |
| 2770 | __be16 proto, u16 vid) |
Yoshihiro Shimoda | 71cc7c3 | 2012-02-15 17:55:06 +0000 | [diff] [blame] | 2771 | { |
| 2772 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2773 | int vtag_reg_index = sh_eth_get_vtag_index(mdp); |
| 2774 | |
| 2775 | if (unlikely(!mdp->cd->tsu)) |
| 2776 | return -EPERM; |
| 2777 | |
| 2778 | /* No filtering if vid = 0 */ |
| 2779 | if (!vid) |
| 2780 | return 0; |
| 2781 | |
| 2782 | mdp->vlan_num_ids++; |
| 2783 | |
Sergei Shtylyov | 128296f | 2014-01-03 15:52:22 +0300 | [diff] [blame] | 2784 | /* The controller has one VLAN tag HW filter. So, if the filter is |
Yoshihiro Shimoda | 71cc7c3 | 2012-02-15 17:55:06 +0000 | [diff] [blame] | 2785 | * already enabled, the driver disables it and the filte |
| 2786 | */ |
| 2787 | if (mdp->vlan_num_ids > 1) { |
| 2788 | /* disable VLAN filter */ |
| 2789 | sh_eth_tsu_write(mdp, 0, vtag_reg_index); |
| 2790 | return 0; |
| 2791 | } |
| 2792 | |
| 2793 | sh_eth_tsu_write(mdp, TSU_VTAG_ENABLE | (vid & TSU_VTAG_VID_MASK), |
| 2794 | vtag_reg_index); |
| 2795 | |
| 2796 | return 0; |
| 2797 | } |
| 2798 | |
Patrick McHardy | 80d5c36 | 2013-04-19 02:04:28 +0000 | [diff] [blame] | 2799 | static int sh_eth_vlan_rx_kill_vid(struct net_device *ndev, |
| 2800 | __be16 proto, u16 vid) |
Yoshihiro Shimoda | 71cc7c3 | 2012-02-15 17:55:06 +0000 | [diff] [blame] | 2801 | { |
| 2802 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 2803 | int vtag_reg_index = sh_eth_get_vtag_index(mdp); |
| 2804 | |
| 2805 | if (unlikely(!mdp->cd->tsu)) |
| 2806 | return -EPERM; |
| 2807 | |
| 2808 | /* No filtering if vid = 0 */ |
| 2809 | if (!vid) |
| 2810 | return 0; |
| 2811 | |
| 2812 | mdp->vlan_num_ids--; |
| 2813 | sh_eth_tsu_write(mdp, 0, vtag_reg_index); |
| 2814 | |
| 2815 | return 0; |
| 2816 | } |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2817 | |
| 2818 | /* SuperH's TSU register init function */ |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 2819 | static void sh_eth_tsu_init(struct sh_eth_private *mdp) |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2820 | { |
Simon Horman | db89347 | 2014-01-17 09:22:28 +0900 | [diff] [blame] | 2821 | if (sh_eth_is_rz_fast_ether(mdp)) { |
| 2822 | sh_eth_tsu_write(mdp, 0, TSU_TEN); /* Disable all CAM entry */ |
Chris Brandt | e148788 | 2016-09-07 14:57:09 -0400 | [diff] [blame] | 2823 | sh_eth_tsu_write(mdp, TSU_FWSLC_POSTENU | TSU_FWSLC_POSTENL, |
| 2824 | TSU_FWSLC); /* Enable POST registers */ |
Simon Horman | db89347 | 2014-01-17 09:22:28 +0900 | [diff] [blame] | 2825 | return; |
| 2826 | } |
| 2827 | |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 2828 | sh_eth_tsu_write(mdp, 0, TSU_FWEN0); /* Disable forward(0->1) */ |
| 2829 | sh_eth_tsu_write(mdp, 0, TSU_FWEN1); /* Disable forward(1->0) */ |
| 2830 | sh_eth_tsu_write(mdp, 0, TSU_FCM); /* forward fifo 3k-3k */ |
| 2831 | sh_eth_tsu_write(mdp, 0xc, TSU_BSYSL0); |
| 2832 | sh_eth_tsu_write(mdp, 0xc, TSU_BSYSL1); |
| 2833 | sh_eth_tsu_write(mdp, 0, TSU_PRISL0); |
| 2834 | sh_eth_tsu_write(mdp, 0, TSU_PRISL1); |
| 2835 | sh_eth_tsu_write(mdp, 0, TSU_FWSL0); |
| 2836 | sh_eth_tsu_write(mdp, 0, TSU_FWSL1); |
| 2837 | sh_eth_tsu_write(mdp, TSU_FWSLC_POSTENU | TSU_FWSLC_POSTENL, TSU_FWSLC); |
Yoshihiro Shimoda | c5ed536 | 2011-03-07 21:59:38 +0000 | [diff] [blame] | 2838 | if (sh_eth_is_gether(mdp)) { |
| 2839 | sh_eth_tsu_write(mdp, 0, TSU_QTAG0); /* Disable QTAG(0->1) */ |
| 2840 | sh_eth_tsu_write(mdp, 0, TSU_QTAG1); /* Disable QTAG(1->0) */ |
| 2841 | } else { |
| 2842 | sh_eth_tsu_write(mdp, 0, TSU_QTAGM0); /* Disable QTAG(0->1) */ |
| 2843 | sh_eth_tsu_write(mdp, 0, TSU_QTAGM1); /* Disable QTAG(1->0) */ |
| 2844 | } |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 2845 | sh_eth_tsu_write(mdp, 0, TSU_FWSR); /* all interrupt status clear */ |
| 2846 | sh_eth_tsu_write(mdp, 0, TSU_FWINMK); /* Disable all interrupt */ |
| 2847 | sh_eth_tsu_write(mdp, 0, TSU_TEN); /* Disable all CAM entry */ |
| 2848 | sh_eth_tsu_write(mdp, 0, TSU_POST1); /* Disable CAM entry [ 0- 7] */ |
| 2849 | sh_eth_tsu_write(mdp, 0, TSU_POST2); /* Disable CAM entry [ 8-15] */ |
| 2850 | sh_eth_tsu_write(mdp, 0, TSU_POST3); /* Disable CAM entry [16-23] */ |
| 2851 | sh_eth_tsu_write(mdp, 0, TSU_POST4); /* Disable CAM entry [24-31] */ |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2852 | } |
| 2853 | |
| 2854 | /* MDIO bus release function */ |
Laurent Pinchart | bd920ff | 2014-03-20 15:00:33 +0100 | [diff] [blame] | 2855 | static int sh_mdio_release(struct sh_eth_private *mdp) |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2856 | { |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2857 | /* unregister mdio bus */ |
Laurent Pinchart | bd920ff | 2014-03-20 15:00:33 +0100 | [diff] [blame] | 2858 | mdiobus_unregister(mdp->mii_bus); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2859 | |
| 2860 | /* free bitbang info */ |
Laurent Pinchart | bd920ff | 2014-03-20 15:00:33 +0100 | [diff] [blame] | 2861 | free_mdio_bitbang(mdp->mii_bus); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2862 | |
| 2863 | return 0; |
| 2864 | } |
| 2865 | |
| 2866 | /* MDIO bus init function */ |
Laurent Pinchart | bd920ff | 2014-03-20 15:00:33 +0100 | [diff] [blame] | 2867 | static int sh_mdio_init(struct sh_eth_private *mdp, |
Yoshihiro Shimoda | b3017e6 | 2011-03-07 21:59:55 +0000 | [diff] [blame] | 2868 | struct sh_eth_plat_data *pd) |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2869 | { |
Andrew Lunn | e7f4dc3 | 2016-01-06 20:11:15 +0100 | [diff] [blame] | 2870 | int ret; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2871 | struct bb_info *bitbang; |
Laurent Pinchart | bd920ff | 2014-03-20 15:00:33 +0100 | [diff] [blame] | 2872 | struct platform_device *pdev = mdp->pdev; |
Laurent Pinchart | aa8d422 | 2014-03-20 15:00:31 +0100 | [diff] [blame] | 2873 | struct device *dev = &mdp->pdev->dev; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2874 | |
| 2875 | /* create bit control struct for PHY */ |
Laurent Pinchart | aa8d422 | 2014-03-20 15:00:31 +0100 | [diff] [blame] | 2876 | bitbang = devm_kzalloc(dev, sizeof(struct bb_info), GFP_KERNEL); |
Laurent Pinchart | f738a13 | 2014-03-20 15:00:35 +0100 | [diff] [blame] | 2877 | if (!bitbang) |
| 2878 | return -ENOMEM; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2879 | |
| 2880 | /* bitbang init */ |
Yoshihiro Shimoda | ae70644 | 2011-09-27 21:48:58 +0000 | [diff] [blame] | 2881 | bitbang->addr = mdp->addr + mdp->reg_offset[PIR]; |
Yoshihiro Shimoda | b3017e6 | 2011-03-07 21:59:55 +0000 | [diff] [blame] | 2882 | bitbang->set_gate = pd->set_mdio_gate; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2883 | bitbang->ctrl.ops = &bb_ops; |
| 2884 | |
Stefan Weil | c2e07b3 | 2010-08-03 19:44:52 +0200 | [diff] [blame] | 2885 | /* MII controller setting */ |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2886 | mdp->mii_bus = alloc_mdio_bitbang(&bitbang->ctrl); |
Laurent Pinchart | f738a13 | 2014-03-20 15:00:35 +0100 | [diff] [blame] | 2887 | if (!mdp->mii_bus) |
| 2888 | return -ENOMEM; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2889 | |
| 2890 | /* Hook up MII support for ethtool */ |
| 2891 | mdp->mii_bus->name = "sh_mii"; |
Laurent Pinchart | a5bd6060 | 2014-03-20 15:00:32 +0100 | [diff] [blame] | 2892 | mdp->mii_bus->parent = dev; |
Florian Fainelli | 5278fb5 | 2012-01-09 23:59:17 +0000 | [diff] [blame] | 2893 | snprintf(mdp->mii_bus->id, MII_BUS_ID_SIZE, "%s-%x", |
Laurent Pinchart | bd920ff | 2014-03-20 15:00:33 +0100 | [diff] [blame] | 2894 | pdev->name, pdev->id); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2895 | |
Laurent Pinchart | bd920ff | 2014-03-20 15:00:33 +0100 | [diff] [blame] | 2896 | /* register MDIO bus */ |
| 2897 | if (dev->of_node) { |
| 2898 | ret = of_mdiobus_register(mdp->mii_bus, dev->of_node); |
Ben Dooks | 702eca0 | 2014-03-12 17:47:40 +0000 | [diff] [blame] | 2899 | } else { |
Ben Dooks | 702eca0 | 2014-03-12 17:47:40 +0000 | [diff] [blame] | 2900 | if (pd->phy_irq > 0) |
| 2901 | mdp->mii_bus->irq[pd->phy] = pd->phy_irq; |
| 2902 | |
| 2903 | ret = mdiobus_register(mdp->mii_bus); |
| 2904 | } |
| 2905 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2906 | if (ret) |
Sergei Shtylyov | d5e07e6 | 2013-03-21 10:41:11 +0000 | [diff] [blame] | 2907 | goto out_free_bus; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2908 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2909 | return 0; |
| 2910 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2911 | out_free_bus: |
Lennert Buytenhek | 298cf9b | 2008-10-08 16:29:57 -0700 | [diff] [blame] | 2912 | free_mdio_bitbang(mdp->mii_bus); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 2913 | return ret; |
| 2914 | } |
| 2915 | |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 2916 | static const u16 *sh_eth_get_register_offset(int register_type) |
| 2917 | { |
| 2918 | const u16 *reg_offset = NULL; |
| 2919 | |
| 2920 | switch (register_type) { |
| 2921 | case SH_ETH_REG_GIGABIT: |
| 2922 | reg_offset = sh_eth_offset_gigabit; |
| 2923 | break; |
Simon Horman | db89347 | 2014-01-17 09:22:28 +0900 | [diff] [blame] | 2924 | case SH_ETH_REG_FAST_RZ: |
| 2925 | reg_offset = sh_eth_offset_fast_rz; |
| 2926 | break; |
Sergei Shtylyov | a3f109b | 2013-03-28 11:51:31 +0000 | [diff] [blame] | 2927 | case SH_ETH_REG_FAST_RCAR: |
| 2928 | reg_offset = sh_eth_offset_fast_rcar; |
| 2929 | break; |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 2930 | case SH_ETH_REG_FAST_SH4: |
| 2931 | reg_offset = sh_eth_offset_fast_sh4; |
| 2932 | break; |
| 2933 | case SH_ETH_REG_FAST_SH3_SH2: |
| 2934 | reg_offset = sh_eth_offset_fast_sh3_sh2; |
| 2935 | break; |
Yoshihiro Shimoda | 4a55530 | 2011-03-07 21:59:26 +0000 | [diff] [blame] | 2936 | } |
| 2937 | |
| 2938 | return reg_offset; |
| 2939 | } |
| 2940 | |
Sergei Shtylyov | 8f728d7 | 2013-06-13 00:55:34 +0400 | [diff] [blame] | 2941 | static const struct net_device_ops sh_eth_netdev_ops = { |
Alexander Beregalov | ebf84ea | 2009-04-11 07:40:49 +0000 | [diff] [blame] | 2942 | .ndo_open = sh_eth_open, |
| 2943 | .ndo_stop = sh_eth_close, |
| 2944 | .ndo_start_xmit = sh_eth_start_xmit, |
| 2945 | .ndo_get_stats = sh_eth_get_stats, |
Ben Hutchings | b37feed | 2015-01-16 17:51:12 +0000 | [diff] [blame] | 2946 | .ndo_set_rx_mode = sh_eth_set_rx_mode, |
Alexander Beregalov | ebf84ea | 2009-04-11 07:40:49 +0000 | [diff] [blame] | 2947 | .ndo_tx_timeout = sh_eth_tx_timeout, |
| 2948 | .ndo_do_ioctl = sh_eth_do_ioctl, |
| 2949 | .ndo_validate_addr = eth_validate_addr, |
| 2950 | .ndo_set_mac_address = eth_mac_addr, |
Alexander Beregalov | ebf84ea | 2009-04-11 07:40:49 +0000 | [diff] [blame] | 2951 | }; |
| 2952 | |
Sergei Shtylyov | 8f728d7 | 2013-06-13 00:55:34 +0400 | [diff] [blame] | 2953 | static const struct net_device_ops sh_eth_netdev_ops_tsu = { |
| 2954 | .ndo_open = sh_eth_open, |
| 2955 | .ndo_stop = sh_eth_close, |
| 2956 | .ndo_start_xmit = sh_eth_start_xmit, |
| 2957 | .ndo_get_stats = sh_eth_get_stats, |
Ben Hutchings | b37feed | 2015-01-16 17:51:12 +0000 | [diff] [blame] | 2958 | .ndo_set_rx_mode = sh_eth_set_rx_mode, |
Sergei Shtylyov | 8f728d7 | 2013-06-13 00:55:34 +0400 | [diff] [blame] | 2959 | .ndo_vlan_rx_add_vid = sh_eth_vlan_rx_add_vid, |
| 2960 | .ndo_vlan_rx_kill_vid = sh_eth_vlan_rx_kill_vid, |
| 2961 | .ndo_tx_timeout = sh_eth_tx_timeout, |
| 2962 | .ndo_do_ioctl = sh_eth_do_ioctl, |
| 2963 | .ndo_validate_addr = eth_validate_addr, |
| 2964 | .ndo_set_mac_address = eth_mac_addr, |
Sergei Shtylyov | 8f728d7 | 2013-06-13 00:55:34 +0400 | [diff] [blame] | 2965 | }; |
| 2966 | |
Sergei Shtylyov | b356e97 | 2014-02-18 03:12:43 +0300 | [diff] [blame] | 2967 | #ifdef CONFIG_OF |
| 2968 | static struct sh_eth_plat_data *sh_eth_parse_dt(struct device *dev) |
| 2969 | { |
| 2970 | struct device_node *np = dev->of_node; |
| 2971 | struct sh_eth_plat_data *pdata; |
Sergei Shtylyov | b356e97 | 2014-02-18 03:12:43 +0300 | [diff] [blame] | 2972 | const char *mac_addr; |
| 2973 | |
| 2974 | pdata = devm_kzalloc(dev, sizeof(*pdata), GFP_KERNEL); |
| 2975 | if (!pdata) |
| 2976 | return NULL; |
| 2977 | |
| 2978 | pdata->phy_interface = of_get_phy_mode(np); |
| 2979 | |
Sergei Shtylyov | b356e97 | 2014-02-18 03:12:43 +0300 | [diff] [blame] | 2980 | mac_addr = of_get_mac_address(np); |
| 2981 | if (mac_addr) |
| 2982 | memcpy(pdata->mac_addr, mac_addr, ETH_ALEN); |
| 2983 | |
| 2984 | pdata->no_ether_link = |
| 2985 | of_property_read_bool(np, "renesas,no-ether-link"); |
| 2986 | pdata->ether_link_active_low = |
| 2987 | of_property_read_bool(np, "renesas,ether-link-active-low"); |
| 2988 | |
| 2989 | return pdata; |
| 2990 | } |
| 2991 | |
| 2992 | static const struct of_device_id sh_eth_match_table[] = { |
| 2993 | { .compatible = "renesas,gether-r8a7740", .data = &r8a7740_data }, |
Sergei Shtylyov | c099ff3 | 2016-09-27 01:23:26 +0300 | [diff] [blame] | 2994 | { .compatible = "renesas,ether-r8a7743", .data = &r8a779x_data }, |
| 2995 | { .compatible = "renesas,ether-r8a7745", .data = &r8a779x_data }, |
Sergei Shtylyov | b356e97 | 2014-02-18 03:12:43 +0300 | [diff] [blame] | 2996 | { .compatible = "renesas,ether-r8a7778", .data = &r8a777x_data }, |
| 2997 | { .compatible = "renesas,ether-r8a7779", .data = &r8a777x_data }, |
| 2998 | { .compatible = "renesas,ether-r8a7790", .data = &r8a779x_data }, |
| 2999 | { .compatible = "renesas,ether-r8a7791", .data = &r8a779x_data }, |
Hisashi Nakamura | 9488e1e | 2014-11-13 15:59:07 +0900 | [diff] [blame] | 3000 | { .compatible = "renesas,ether-r8a7793", .data = &r8a779x_data }, |
Hisashi Nakamura | 0f76b9d | 2014-08-01 17:03:00 +0200 | [diff] [blame] | 3001 | { .compatible = "renesas,ether-r8a7794", .data = &r8a779x_data }, |
Sergei Shtylyov | b356e97 | 2014-02-18 03:12:43 +0300 | [diff] [blame] | 3002 | { .compatible = "renesas,ether-r7s72100", .data = &r7s72100_data }, |
| 3003 | { } |
| 3004 | }; |
| 3005 | MODULE_DEVICE_TABLE(of, sh_eth_match_table); |
| 3006 | #else |
| 3007 | static inline struct sh_eth_plat_data *sh_eth_parse_dt(struct device *dev) |
| 3008 | { |
| 3009 | return NULL; |
| 3010 | } |
| 3011 | #endif |
| 3012 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3013 | static int sh_eth_drv_probe(struct platform_device *pdev) |
| 3014 | { |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3015 | struct resource *res; |
Jingoo Han | 0b76b86 | 2013-08-30 14:00:11 +0900 | [diff] [blame] | 3016 | struct sh_eth_plat_data *pd = dev_get_platdata(&pdev->dev); |
Sergei Shtylyov | afe391a | 2013-06-07 13:54:02 +0000 | [diff] [blame] | 3017 | const struct platform_device_id *id = platform_get_device_id(pdev); |
Sergei Shtylyov | 4fa8c3c | 2016-03-13 01:29:45 +0300 | [diff] [blame] | 3018 | struct sh_eth_private *mdp; |
| 3019 | struct net_device *ndev; |
| 3020 | int ret, devno; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3021 | |
| 3022 | /* get base addr */ |
| 3023 | res = platform_get_resource(pdev, IORESOURCE_MEM, 0); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3024 | |
| 3025 | ndev = alloc_etherdev(sizeof(struct sh_eth_private)); |
Laurent Pinchart | f738a13 | 2014-03-20 15:00:35 +0100 | [diff] [blame] | 3026 | if (!ndev) |
| 3027 | return -ENOMEM; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3028 | |
Ben Dooks | b5893a0 | 2014-03-21 12:09:14 +0100 | [diff] [blame] | 3029 | pm_runtime_enable(&pdev->dev); |
| 3030 | pm_runtime_get_sync(&pdev->dev); |
| 3031 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3032 | devno = pdev->id; |
| 3033 | if (devno < 0) |
| 3034 | devno = 0; |
| 3035 | |
roel kluin | cc3c080 | 2008-09-10 19:22:44 +0200 | [diff] [blame] | 3036 | ret = platform_get_irq(pdev, 0); |
Sergei Shtylyov | 7a468ac | 2015-08-28 16:56:01 +0300 | [diff] [blame] | 3037 | if (ret < 0) |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3038 | goto out_release; |
roel kluin | cc3c080 | 2008-09-10 19:22:44 +0200 | [diff] [blame] | 3039 | ndev->irq = ret; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3040 | |
| 3041 | SET_NETDEV_DEV(ndev, &pdev->dev); |
| 3042 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3043 | mdp = netdev_priv(ndev); |
Yoshihiro Shimoda | 525b807 | 2012-06-26 20:00:03 +0000 | [diff] [blame] | 3044 | mdp->num_tx_ring = TX_RING_SIZE; |
| 3045 | mdp->num_rx_ring = RX_RING_SIZE; |
Sergei Shtylyov | d5e07e6 | 2013-03-21 10:41:11 +0000 | [diff] [blame] | 3046 | mdp->addr = devm_ioremap_resource(&pdev->dev, res); |
| 3047 | if (IS_ERR(mdp->addr)) { |
| 3048 | ret = PTR_ERR(mdp->addr); |
Yoshihiro Shimoda | ae70644 | 2011-09-27 21:48:58 +0000 | [diff] [blame] | 3049 | goto out_release; |
| 3050 | } |
| 3051 | |
Niklas Söderlund | d8981d0 | 2017-01-09 16:34:05 +0100 | [diff] [blame] | 3052 | /* Get clock, if not found that's OK but Wake-On-Lan is unavailable */ |
| 3053 | mdp->clk = devm_clk_get(&pdev->dev, NULL); |
| 3054 | if (IS_ERR(mdp->clk)) |
| 3055 | mdp->clk = NULL; |
| 3056 | |
Varka Bhadram | c960804 | 2014-10-24 07:42:09 +0530 | [diff] [blame] | 3057 | ndev->base_addr = res->start; |
| 3058 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3059 | spin_lock_init(&mdp->lock); |
Magnus Damm | bcd5149 | 2009-10-09 00:20:04 +0000 | [diff] [blame] | 3060 | mdp->pdev = pdev; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3061 | |
Sergei Shtylyov | b356e97 | 2014-02-18 03:12:43 +0300 | [diff] [blame] | 3062 | if (pdev->dev.of_node) |
| 3063 | pd = sh_eth_parse_dt(&pdev->dev); |
Sergei Shtylyov | 3b4c5cb | 2013-10-30 23:30:19 +0300 | [diff] [blame] | 3064 | if (!pd) { |
| 3065 | dev_err(&pdev->dev, "no platform data\n"); |
| 3066 | ret = -EINVAL; |
| 3067 | goto out_release; |
| 3068 | } |
| 3069 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3070 | /* get PHY ID */ |
Yoshinori Sato | 71557a3 | 2008-08-06 19:49:00 -0400 | [diff] [blame] | 3071 | mdp->phy_id = pd->phy; |
Yoshihiro Shimoda | e47c905 | 2011-03-07 21:59:45 +0000 | [diff] [blame] | 3072 | mdp->phy_interface = pd->phy_interface; |
Yoshihiro Shimoda | 4923576 | 2009-08-27 23:25:03 +0000 | [diff] [blame] | 3073 | mdp->no_ether_link = pd->no_ether_link; |
| 3074 | mdp->ether_link_active_low = pd->ether_link_active_low; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3075 | |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 3076 | /* set cpu data */ |
Wolfram Sang | 42a67c9 | 2016-03-01 17:37:59 +0100 | [diff] [blame] | 3077 | if (id) |
Sergei Shtylyov | b356e97 | 2014-02-18 03:12:43 +0300 | [diff] [blame] | 3078 | mdp->cd = (struct sh_eth_cpu_data *)id->driver_data; |
Wolfram Sang | 42a67c9 | 2016-03-01 17:37:59 +0100 | [diff] [blame] | 3079 | else |
| 3080 | mdp->cd = (struct sh_eth_cpu_data *)of_device_get_match_data(&pdev->dev); |
Sergei Shtylyov | b356e97 | 2014-02-18 03:12:43 +0300 | [diff] [blame] | 3081 | |
Sergei Shtylyov | a3153d8 | 2013-08-18 03:11:28 +0400 | [diff] [blame] | 3082 | mdp->reg_offset = sh_eth_get_register_offset(mdp->cd->register_type); |
Sergei Shtylyov | 264be2f | 2014-03-15 03:11:24 +0300 | [diff] [blame] | 3083 | if (!mdp->reg_offset) { |
| 3084 | dev_err(&pdev->dev, "Unknown register type (%d)\n", |
| 3085 | mdp->cd->register_type); |
| 3086 | ret = -EINVAL; |
| 3087 | goto out_release; |
| 3088 | } |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 3089 | sh_eth_set_default_cpu_data(mdp->cd); |
| 3090 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3091 | /* set function */ |
Sergei Shtylyov | 8f728d7 | 2013-06-13 00:55:34 +0400 | [diff] [blame] | 3092 | if (mdp->cd->tsu) |
| 3093 | ndev->netdev_ops = &sh_eth_netdev_ops_tsu; |
| 3094 | else |
| 3095 | ndev->netdev_ops = &sh_eth_netdev_ops; |
Wilfried Klaebe | 7ad24ea | 2014-05-11 00:12:32 +0000 | [diff] [blame] | 3096 | ndev->ethtool_ops = &sh_eth_ethtool_ops; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3097 | ndev->watchdog_timeo = TX_TIMEOUT; |
| 3098 | |
Nobuhiro Iwamatsu | dc19e4e | 2011-02-15 21:17:32 +0000 | [diff] [blame] | 3099 | /* debug message level */ |
| 3100 | mdp->msg_enable = SH_ETH_DEF_MSG_ENABLE; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3101 | |
| 3102 | /* read and set MAC address */ |
Magnus Damm | 748031f | 2009-10-09 00:17:14 +0000 | [diff] [blame] | 3103 | read_mac_address(ndev, pd->mac_addr); |
Sergei Shtylyov | ff6e722 | 2013-04-29 09:49:42 +0000 | [diff] [blame] | 3104 | if (!is_valid_ether_addr(ndev->dev_addr)) { |
| 3105 | dev_warn(&pdev->dev, |
| 3106 | "no valid MAC address supplied, using a random one.\n"); |
| 3107 | eth_hw_addr_random(ndev); |
| 3108 | } |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3109 | |
Yoshihiro Shimoda | 6ba8802 | 2012-02-15 17:55:01 +0000 | [diff] [blame] | 3110 | /* ioremap the TSU registers */ |
| 3111 | if (mdp->cd->tsu) { |
| 3112 | struct resource *rtsu; |
| 3113 | rtsu = platform_get_resource(pdev, IORESOURCE_MEM, 1); |
Sergei Shtylyov | d5e07e6 | 2013-03-21 10:41:11 +0000 | [diff] [blame] | 3114 | mdp->tsu_addr = devm_ioremap_resource(&pdev->dev, rtsu); |
| 3115 | if (IS_ERR(mdp->tsu_addr)) { |
| 3116 | ret = PTR_ERR(mdp->tsu_addr); |
Sergei Shtylyov | fc0c090 | 2013-03-19 13:41:32 +0000 | [diff] [blame] | 3117 | goto out_release; |
| 3118 | } |
Yoshihiro Shimoda | 6743fe6 | 2012-02-15 17:55:03 +0000 | [diff] [blame] | 3119 | mdp->port = devno % 2; |
Patrick McHardy | f646968 | 2013-04-19 02:04:27 +0000 | [diff] [blame] | 3120 | ndev->features = NETIF_F_HW_VLAN_CTAG_FILTER; |
Yoshihiro Shimoda | 6ba8802 | 2012-02-15 17:55:01 +0000 | [diff] [blame] | 3121 | } |
| 3122 | |
Yoshihiro Shimoda | 150647f | 2012-02-15 17:54:56 +0000 | [diff] [blame] | 3123 | /* initialize first or needed device */ |
| 3124 | if (!devno || pd->needs_init) { |
Yoshihiro Shimoda | 380af9e | 2009-05-24 23:54:21 +0000 | [diff] [blame] | 3125 | if (mdp->cd->chip_reset) |
| 3126 | mdp->cd->chip_reset(ndev); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3127 | |
Yoshihiro Shimoda | 4986b99 | 2011-03-07 21:59:34 +0000 | [diff] [blame] | 3128 | if (mdp->cd->tsu) { |
| 3129 | /* TSU init (Init only)*/ |
| 3130 | sh_eth_tsu_init(mdp); |
| 3131 | } |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3132 | } |
| 3133 | |
Hisashi Nakamura | 966d6db | 2014-11-13 15:54:05 +0900 | [diff] [blame] | 3134 | if (mdp->cd->rmiimode) |
| 3135 | sh_eth_write(ndev, 0x1, RMIIMODE); |
| 3136 | |
Laurent Pinchart | daacf03 | 2014-03-20 15:00:34 +0100 | [diff] [blame] | 3137 | /* MDIO bus init */ |
| 3138 | ret = sh_mdio_init(mdp, pd); |
| 3139 | if (ret) { |
| 3140 | dev_err(&ndev->dev, "failed to initialise MDIO\n"); |
| 3141 | goto out_release; |
| 3142 | } |
| 3143 | |
Sergei Shtylyov | 3719109 | 2013-06-19 23:30:23 +0400 | [diff] [blame] | 3144 | netif_napi_add(ndev, &mdp->napi, sh_eth_poll, 64); |
| 3145 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3146 | /* network device register */ |
| 3147 | ret = register_netdev(ndev); |
| 3148 | if (ret) |
Sergei Shtylyov | 3719109 | 2013-06-19 23:30:23 +0400 | [diff] [blame] | 3149 | goto out_napi_del; |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3150 | |
Niklas Söderlund | d8981d0 | 2017-01-09 16:34:05 +0100 | [diff] [blame] | 3151 | if (mdp->cd->magic && mdp->clk) |
| 3152 | device_set_wakeup_capable(&pdev->dev, 1); |
| 3153 | |
Lucas De Marchi | 25985ed | 2011-03-30 22:57:33 -0300 | [diff] [blame] | 3154 | /* print device information */ |
Sergei Shtylyov | f75f14e | 2014-03-15 03:27:54 +0300 | [diff] [blame] | 3155 | netdev_info(ndev, "Base address at 0x%x, %pM, IRQ %d.\n", |
| 3156 | (u32)ndev->base_addr, ndev->dev_addr, ndev->irq); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3157 | |
Ben Dooks | b5893a0 | 2014-03-21 12:09:14 +0100 | [diff] [blame] | 3158 | pm_runtime_put(&pdev->dev); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3159 | platform_set_drvdata(pdev, ndev); |
| 3160 | |
| 3161 | return ret; |
| 3162 | |
Sergei Shtylyov | 3719109 | 2013-06-19 23:30:23 +0400 | [diff] [blame] | 3163 | out_napi_del: |
| 3164 | netif_napi_del(&mdp->napi); |
Laurent Pinchart | daacf03 | 2014-03-20 15:00:34 +0100 | [diff] [blame] | 3165 | sh_mdio_release(mdp); |
Sergei Shtylyov | 3719109 | 2013-06-19 23:30:23 +0400 | [diff] [blame] | 3166 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3167 | out_release: |
| 3168 | /* net_dev free */ |
| 3169 | if (ndev) |
| 3170 | free_netdev(ndev); |
| 3171 | |
Ben Dooks | b5893a0 | 2014-03-21 12:09:14 +0100 | [diff] [blame] | 3172 | pm_runtime_put(&pdev->dev); |
| 3173 | pm_runtime_disable(&pdev->dev); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3174 | return ret; |
| 3175 | } |
| 3176 | |
| 3177 | static int sh_eth_drv_remove(struct platform_device *pdev) |
| 3178 | { |
| 3179 | struct net_device *ndev = platform_get_drvdata(pdev); |
Sergei Shtylyov | 3719109 | 2013-06-19 23:30:23 +0400 | [diff] [blame] | 3180 | struct sh_eth_private *mdp = netdev_priv(ndev); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3181 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3182 | unregister_netdev(ndev); |
Sergei Shtylyov | 3719109 | 2013-06-19 23:30:23 +0400 | [diff] [blame] | 3183 | netif_napi_del(&mdp->napi); |
Laurent Pinchart | daacf03 | 2014-03-20 15:00:34 +0100 | [diff] [blame] | 3184 | sh_mdio_release(mdp); |
Magnus Damm | bcd5149 | 2009-10-09 00:20:04 +0000 | [diff] [blame] | 3185 | pm_runtime_disable(&pdev->dev); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3186 | free_netdev(ndev); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3187 | |
| 3188 | return 0; |
| 3189 | } |
| 3190 | |
Nobuhiro Iwamatsu | 540ad1b | 2013-06-06 09:52:37 +0000 | [diff] [blame] | 3191 | #ifdef CONFIG_PM |
Mikhail Ulyanov | b71af04 | 2015-01-22 01:19:48 +0300 | [diff] [blame] | 3192 | #ifdef CONFIG_PM_SLEEP |
Niklas Söderlund | d8981d0 | 2017-01-09 16:34:05 +0100 | [diff] [blame] | 3193 | static int sh_eth_wol_setup(struct net_device *ndev) |
| 3194 | { |
| 3195 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 3196 | |
| 3197 | /* Only allow ECI interrupts */ |
| 3198 | synchronize_irq(ndev->irq); |
| 3199 | napi_disable(&mdp->napi); |
| 3200 | sh_eth_write(ndev, DMAC_M_ECI, EESIPR); |
| 3201 | |
| 3202 | /* Enable MagicPacket */ |
| 3203 | sh_eth_modify(ndev, ECMR, 0, ECMR_MPDE); |
| 3204 | |
| 3205 | /* Increased clock usage so device won't be suspended */ |
| 3206 | clk_enable(mdp->clk); |
| 3207 | |
| 3208 | return enable_irq_wake(ndev->irq); |
| 3209 | } |
| 3210 | |
| 3211 | static int sh_eth_wol_restore(struct net_device *ndev) |
| 3212 | { |
| 3213 | struct sh_eth_private *mdp = netdev_priv(ndev); |
| 3214 | int ret; |
| 3215 | |
| 3216 | napi_enable(&mdp->napi); |
| 3217 | |
| 3218 | /* Disable MagicPacket */ |
| 3219 | sh_eth_modify(ndev, ECMR, ECMR_MPDE, 0); |
| 3220 | |
| 3221 | /* The device needs to be reset to restore MagicPacket logic |
| 3222 | * for next wakeup. If we close and open the device it will |
| 3223 | * both be reset and all registers restored. This is what |
| 3224 | * happens during suspend and resume without WoL enabled. |
| 3225 | */ |
| 3226 | ret = sh_eth_close(ndev); |
| 3227 | if (ret < 0) |
| 3228 | return ret; |
| 3229 | ret = sh_eth_open(ndev); |
| 3230 | if (ret < 0) |
| 3231 | return ret; |
| 3232 | |
| 3233 | /* Restore clock usage count */ |
| 3234 | clk_disable(mdp->clk); |
| 3235 | |
| 3236 | return disable_irq_wake(ndev->irq); |
| 3237 | } |
| 3238 | |
Mikhail Ulyanov | b71af04 | 2015-01-22 01:19:48 +0300 | [diff] [blame] | 3239 | static int sh_eth_suspend(struct device *dev) |
| 3240 | { |
| 3241 | struct net_device *ndev = dev_get_drvdata(dev); |
Niklas Söderlund | d8981d0 | 2017-01-09 16:34:05 +0100 | [diff] [blame] | 3242 | struct sh_eth_private *mdp = netdev_priv(ndev); |
Mikhail Ulyanov | b71af04 | 2015-01-22 01:19:48 +0300 | [diff] [blame] | 3243 | int ret = 0; |
| 3244 | |
Niklas Söderlund | d8981d0 | 2017-01-09 16:34:05 +0100 | [diff] [blame] | 3245 | if (!netif_running(ndev)) |
| 3246 | return 0; |
| 3247 | |
| 3248 | netif_device_detach(ndev); |
| 3249 | |
| 3250 | if (mdp->wol_enabled) |
| 3251 | ret = sh_eth_wol_setup(ndev); |
| 3252 | else |
Mikhail Ulyanov | b71af04 | 2015-01-22 01:19:48 +0300 | [diff] [blame] | 3253 | ret = sh_eth_close(ndev); |
Mikhail Ulyanov | b71af04 | 2015-01-22 01:19:48 +0300 | [diff] [blame] | 3254 | |
| 3255 | return ret; |
| 3256 | } |
| 3257 | |
| 3258 | static int sh_eth_resume(struct device *dev) |
| 3259 | { |
| 3260 | struct net_device *ndev = dev_get_drvdata(dev); |
Niklas Söderlund | d8981d0 | 2017-01-09 16:34:05 +0100 | [diff] [blame] | 3261 | struct sh_eth_private *mdp = netdev_priv(ndev); |
Mikhail Ulyanov | b71af04 | 2015-01-22 01:19:48 +0300 | [diff] [blame] | 3262 | int ret = 0; |
| 3263 | |
Niklas Söderlund | d8981d0 | 2017-01-09 16:34:05 +0100 | [diff] [blame] | 3264 | if (!netif_running(ndev)) |
| 3265 | return 0; |
| 3266 | |
| 3267 | if (mdp->wol_enabled) |
| 3268 | ret = sh_eth_wol_restore(ndev); |
| 3269 | else |
Mikhail Ulyanov | b71af04 | 2015-01-22 01:19:48 +0300 | [diff] [blame] | 3270 | ret = sh_eth_open(ndev); |
Niklas Söderlund | d8981d0 | 2017-01-09 16:34:05 +0100 | [diff] [blame] | 3271 | |
| 3272 | if (ret < 0) |
| 3273 | return ret; |
| 3274 | |
| 3275 | netif_device_attach(ndev); |
Mikhail Ulyanov | b71af04 | 2015-01-22 01:19:48 +0300 | [diff] [blame] | 3276 | |
| 3277 | return ret; |
| 3278 | } |
| 3279 | #endif |
| 3280 | |
Magnus Damm | bcd5149 | 2009-10-09 00:20:04 +0000 | [diff] [blame] | 3281 | static int sh_eth_runtime_nop(struct device *dev) |
| 3282 | { |
Sergei Shtylyov | 128296f | 2014-01-03 15:52:22 +0300 | [diff] [blame] | 3283 | /* Runtime PM callback shared between ->runtime_suspend() |
Magnus Damm | bcd5149 | 2009-10-09 00:20:04 +0000 | [diff] [blame] | 3284 | * and ->runtime_resume(). Simply returns success. |
| 3285 | * |
| 3286 | * This driver re-initializes all registers after |
| 3287 | * pm_runtime_get_sync() anyway so there is no need |
| 3288 | * to save and restore registers here. |
| 3289 | */ |
| 3290 | return 0; |
| 3291 | } |
| 3292 | |
Nobuhiro Iwamatsu | 540ad1b | 2013-06-06 09:52:37 +0000 | [diff] [blame] | 3293 | static const struct dev_pm_ops sh_eth_dev_pm_ops = { |
Mikhail Ulyanov | b71af04 | 2015-01-22 01:19:48 +0300 | [diff] [blame] | 3294 | SET_SYSTEM_SLEEP_PM_OPS(sh_eth_suspend, sh_eth_resume) |
Mikhail Ulyanov | e7d7e89 | 2015-01-22 01:18:44 +0300 | [diff] [blame] | 3295 | SET_RUNTIME_PM_OPS(sh_eth_runtime_nop, sh_eth_runtime_nop, NULL) |
Magnus Damm | bcd5149 | 2009-10-09 00:20:04 +0000 | [diff] [blame] | 3296 | }; |
Nobuhiro Iwamatsu | 540ad1b | 2013-06-06 09:52:37 +0000 | [diff] [blame] | 3297 | #define SH_ETH_PM_OPS (&sh_eth_dev_pm_ops) |
| 3298 | #else |
| 3299 | #define SH_ETH_PM_OPS NULL |
| 3300 | #endif |
Magnus Damm | bcd5149 | 2009-10-09 00:20:04 +0000 | [diff] [blame] | 3301 | |
Sergei Shtylyov | afe391a | 2013-06-07 13:54:02 +0000 | [diff] [blame] | 3302 | static struct platform_device_id sh_eth_id_table[] = { |
Sergei Shtylyov | c18a79a | 2013-06-07 13:56:05 +0000 | [diff] [blame] | 3303 | { "sh7619-ether", (kernel_ulong_t)&sh7619_data }, |
Sergei Shtylyov | 7bbe150 | 2013-06-07 13:55:08 +0000 | [diff] [blame] | 3304 | { "sh771x-ether", (kernel_ulong_t)&sh771x_data }, |
Sergei Shtylyov | 9c3beaa | 2013-06-07 14:03:37 +0000 | [diff] [blame] | 3305 | { "sh7724-ether", (kernel_ulong_t)&sh7724_data }, |
Sergei Shtylyov | f5d1276 | 2013-06-07 13:58:18 +0000 | [diff] [blame] | 3306 | { "sh7734-gether", (kernel_ulong_t)&sh7734_data }, |
Sergei Shtylyov | 24549e2 | 2013-06-07 13:59:21 +0000 | [diff] [blame] | 3307 | { "sh7757-ether", (kernel_ulong_t)&sh7757_data }, |
| 3308 | { "sh7757-gether", (kernel_ulong_t)&sh7757_data_giga }, |
Sergei Shtylyov | f5d1276 | 2013-06-07 13:58:18 +0000 | [diff] [blame] | 3309 | { "sh7763-gether", (kernel_ulong_t)&sh7763_data }, |
Sergei Shtylyov | afe391a | 2013-06-07 13:54:02 +0000 | [diff] [blame] | 3310 | { } |
| 3311 | }; |
| 3312 | MODULE_DEVICE_TABLE(platform, sh_eth_id_table); |
| 3313 | |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3314 | static struct platform_driver sh_eth_driver = { |
| 3315 | .probe = sh_eth_drv_probe, |
| 3316 | .remove = sh_eth_drv_remove, |
Sergei Shtylyov | afe391a | 2013-06-07 13:54:02 +0000 | [diff] [blame] | 3317 | .id_table = sh_eth_id_table, |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3318 | .driver = { |
| 3319 | .name = CARDNAME, |
Nobuhiro Iwamatsu | 540ad1b | 2013-06-06 09:52:37 +0000 | [diff] [blame] | 3320 | .pm = SH_ETH_PM_OPS, |
Sergei Shtylyov | b356e97 | 2014-02-18 03:12:43 +0300 | [diff] [blame] | 3321 | .of_match_table = of_match_ptr(sh_eth_match_table), |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3322 | }, |
| 3323 | }; |
| 3324 | |
Axel Lin | db62f68 | 2011-11-27 16:44:17 +0000 | [diff] [blame] | 3325 | module_platform_driver(sh_eth_driver); |
Nobuhiro Iwamatsu | 86a74ff | 2008-06-09 16:33:56 -0700 | [diff] [blame] | 3326 | |
| 3327 | MODULE_AUTHOR("Nobuhiro Iwamatsu, Yoshihiro Shimoda"); |
| 3328 | MODULE_DESCRIPTION("Renesas SuperH Ethernet driver"); |
| 3329 | MODULE_LICENSE("GPL v2"); |