Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 1 | /* |
| 2 | * drivers/mtd/devices/goldfish_nand.c |
| 3 | * |
| 4 | * Copyright (C) 2007 Google, Inc. |
| 5 | * Copyright (C) 2012 Intel, Inc. |
| 6 | * Copyright (C) 2013 Intel, Inc. |
| 7 | * |
| 8 | * This software is licensed under the terms of the GNU General Public |
| 9 | * License version 2, as published by the Free Software Foundation, and |
| 10 | * may be copied, distributed, and modified under those terms. |
| 11 | * |
| 12 | * This program is distributed in the hope that it will be useful, |
| 13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 15 | * GNU General Public License for more details. |
| 16 | * |
| 17 | */ |
| 18 | |
| 19 | #include <linux/io.h> |
| 20 | #include <linux/device.h> |
| 21 | #include <linux/module.h> |
| 22 | #include <linux/slab.h> |
| 23 | #include <linux/ioport.h> |
| 24 | #include <linux/vmalloc.h> |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 25 | #include <linux/mtd/mtd.h> |
| 26 | #include <linux/platform_device.h> |
Kristina Martšenko | 67c20cf | 2014-03-25 01:45:09 +0200 | [diff] [blame] | 27 | #include <linux/mutex.h> |
Alan | f627971 | 2014-05-12 16:56:56 +0100 | [diff] [blame] | 28 | #include <linux/goldfish.h> |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 29 | #include <asm/div64.h> |
Shraddha Barke | 3e2fbc7 | 2016-01-21 02:38:53 +0530 | [diff] [blame] | 30 | #include <linux/dma-mapping.h> |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 31 | |
| 32 | #include "goldfish_nand_reg.h" |
| 33 | |
| 34 | struct goldfish_nand { |
Loic Pefferkorn | 2c50741 | 2014-09-03 22:23:14 +0200 | [diff] [blame] | 35 | /* lock protects access to the device registers */ |
Kristina Martšenko | 67c20cf | 2014-03-25 01:45:09 +0200 | [diff] [blame] | 36 | struct mutex lock; |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 37 | unsigned char __iomem *base; |
| 38 | struct cmd_params *cmd_params; |
| 39 | size_t mtd_count; |
| 40 | struct mtd_info mtd[0]; |
| 41 | }; |
| 42 | |
| 43 | static u32 goldfish_nand_cmd_with_params(struct mtd_info *mtd, |
Loic Pefferkorn | 8f52e26 | 2014-09-03 22:23:13 +0200 | [diff] [blame] | 44 | enum nand_cmd cmd, u64 addr, u32 len, |
| 45 | void *ptr, u32 *rv) |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 46 | { |
| 47 | u32 cmdp; |
| 48 | struct goldfish_nand *nand = mtd->priv; |
| 49 | struct cmd_params *cps = nand->cmd_params; |
| 50 | unsigned char __iomem *base = nand->base; |
| 51 | |
Ravi Teja Darbha | 10d7108 | 2015-09-02 22:41:19 +0530 | [diff] [blame] | 52 | if (!cps) |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 53 | return -1; |
| 54 | |
| 55 | switch (cmd) { |
| 56 | case NAND_CMD_ERASE: |
| 57 | cmdp = NAND_CMD_ERASE_WITH_PARAMS; |
| 58 | break; |
| 59 | case NAND_CMD_READ: |
| 60 | cmdp = NAND_CMD_READ_WITH_PARAMS; |
| 61 | break; |
| 62 | case NAND_CMD_WRITE: |
| 63 | cmdp = NAND_CMD_WRITE_WITH_PARAMS; |
| 64 | break; |
| 65 | default: |
| 66 | return -1; |
| 67 | } |
| 68 | cps->dev = mtd - nand->mtd; |
| 69 | cps->addr_high = (u32)(addr >> 32); |
| 70 | cps->addr_low = (u32)addr; |
| 71 | cps->transfer_size = len; |
Jun Tian | 7f09d4a | 2014-04-28 20:47:22 +0100 | [diff] [blame] | 72 | cps->data = (unsigned long)ptr; |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 73 | writel(cmdp, base + NAND_COMMAND); |
| 74 | *rv = cps->result; |
| 75 | return 0; |
| 76 | } |
| 77 | |
| 78 | static u32 goldfish_nand_cmd(struct mtd_info *mtd, enum nand_cmd cmd, |
Loic Pefferkorn | 8f52e26 | 2014-09-03 22:23:13 +0200 | [diff] [blame] | 79 | u64 addr, u32 len, void *ptr) |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 80 | { |
| 81 | struct goldfish_nand *nand = mtd->priv; |
| 82 | u32 rv; |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 83 | unsigned char __iomem *base = nand->base; |
| 84 | |
Kristina Martšenko | 67c20cf | 2014-03-25 01:45:09 +0200 | [diff] [blame] | 85 | mutex_lock(&nand->lock); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 86 | if (goldfish_nand_cmd_with_params(mtd, cmd, addr, len, ptr, &rv)) { |
| 87 | writel(mtd - nand->mtd, base + NAND_DEV); |
| 88 | writel((u32)(addr >> 32), base + NAND_ADDR_HIGH); |
| 89 | writel((u32)addr, base + NAND_ADDR_LOW); |
| 90 | writel(len, base + NAND_TRANSFER_SIZE); |
Peter Senna Tschudin | 07d783f | 2015-05-19 11:44:46 +0200 | [diff] [blame] | 91 | gf_write_ptr(ptr, base + NAND_DATA, base + NAND_DATA_HIGH); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 92 | writel(cmd, base + NAND_COMMAND); |
| 93 | rv = readl(base + NAND_RESULT); |
| 94 | } |
Kristina Martšenko | 67c20cf | 2014-03-25 01:45:09 +0200 | [diff] [blame] | 95 | mutex_unlock(&nand->lock); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 96 | return rv; |
| 97 | } |
| 98 | |
| 99 | static int goldfish_nand_erase(struct mtd_info *mtd, struct erase_info *instr) |
| 100 | { |
| 101 | loff_t ofs = instr->addr; |
| 102 | u32 len = instr->len; |
Arnd Bergmann | b0e302b | 2016-02-01 11:33:00 +0100 | [diff] [blame] | 103 | s32 rem; |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 104 | |
| 105 | if (ofs + len > mtd->size) |
| 106 | goto invalid_arg; |
Arnd Bergmann | b0e302b | 2016-02-01 11:33:00 +0100 | [diff] [blame] | 107 | ofs = div_s64_rem(ofs, mtd->writesize, &rem); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 108 | if (rem) |
| 109 | goto invalid_arg; |
| 110 | ofs *= (mtd->writesize + mtd->oobsize); |
| 111 | |
| 112 | if (len % mtd->writesize) |
| 113 | goto invalid_arg; |
| 114 | len = len / mtd->writesize * (mtd->writesize + mtd->oobsize); |
| 115 | |
| 116 | if (goldfish_nand_cmd(mtd, NAND_CMD_ERASE, ofs, len, NULL) != len) { |
| 117 | pr_err("goldfish_nand_erase: erase failed, start %llx, len %x, dev_size %llx, erase_size %x\n", |
Loic Pefferkorn | 8f52e26 | 2014-09-03 22:23:13 +0200 | [diff] [blame] | 118 | ofs, len, mtd->size, mtd->erasesize); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 119 | return -EIO; |
| 120 | } |
| 121 | |
| 122 | instr->state = MTD_ERASE_DONE; |
| 123 | mtd_erase_callback(instr); |
| 124 | |
| 125 | return 0; |
| 126 | |
| 127 | invalid_arg: |
| 128 | pr_err("goldfish_nand_erase: invalid erase, start %llx, len %x, dev_size %llx, erase_size %x\n", |
Loic Pefferkorn | 8f52e26 | 2014-09-03 22:23:13 +0200 | [diff] [blame] | 129 | ofs, len, mtd->size, mtd->erasesize); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 130 | return -EINVAL; |
| 131 | } |
| 132 | |
| 133 | static int goldfish_nand_read_oob(struct mtd_info *mtd, loff_t ofs, |
Loic Pefferkorn | 8f52e26 | 2014-09-03 22:23:13 +0200 | [diff] [blame] | 134 | struct mtd_oob_ops *ops) |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 135 | { |
Arnd Bergmann | b0e302b | 2016-02-01 11:33:00 +0100 | [diff] [blame] | 136 | s32 rem; |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 137 | |
| 138 | if (ofs + ops->len > mtd->size) |
| 139 | goto invalid_arg; |
| 140 | if (ops->datbuf && ops->len && ops->len != mtd->writesize) |
| 141 | goto invalid_arg; |
| 142 | if (ops->ooblen + ops->ooboffs > mtd->oobsize) |
| 143 | goto invalid_arg; |
| 144 | |
Arnd Bergmann | b0e302b | 2016-02-01 11:33:00 +0100 | [diff] [blame] | 145 | ofs = div_s64_rem(ofs, mtd->writesize, &rem); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 146 | if (rem) |
| 147 | goto invalid_arg; |
| 148 | ofs *= (mtd->writesize + mtd->oobsize); |
| 149 | |
| 150 | if (ops->datbuf) |
| 151 | ops->retlen = goldfish_nand_cmd(mtd, NAND_CMD_READ, ofs, |
| 152 | ops->len, ops->datbuf); |
| 153 | ofs += mtd->writesize + ops->ooboffs; |
| 154 | if (ops->oobbuf) |
| 155 | ops->oobretlen = goldfish_nand_cmd(mtd, NAND_CMD_READ, ofs, |
| 156 | ops->ooblen, ops->oobbuf); |
| 157 | return 0; |
| 158 | |
| 159 | invalid_arg: |
Peter Huewe | b4fcf48 | 2013-02-07 23:57:07 +0100 | [diff] [blame] | 160 | pr_err("goldfish_nand_read_oob: invalid read, start %llx, len %zx, ooblen %zx, dev_size %llx, write_size %x\n", |
Loic Pefferkorn | 8f52e26 | 2014-09-03 22:23:13 +0200 | [diff] [blame] | 161 | ofs, ops->len, ops->ooblen, mtd->size, mtd->writesize); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 162 | return -EINVAL; |
| 163 | } |
| 164 | |
| 165 | static int goldfish_nand_write_oob(struct mtd_info *mtd, loff_t ofs, |
Loic Pefferkorn | 8f52e26 | 2014-09-03 22:23:13 +0200 | [diff] [blame] | 166 | struct mtd_oob_ops *ops) |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 167 | { |
Arnd Bergmann | b0e302b | 2016-02-01 11:33:00 +0100 | [diff] [blame] | 168 | s32 rem; |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 169 | |
| 170 | if (ofs + ops->len > mtd->size) |
| 171 | goto invalid_arg; |
| 172 | if (ops->len && ops->len != mtd->writesize) |
| 173 | goto invalid_arg; |
| 174 | if (ops->ooblen + ops->ooboffs > mtd->oobsize) |
| 175 | goto invalid_arg; |
| 176 | |
Arnd Bergmann | b0e302b | 2016-02-01 11:33:00 +0100 | [diff] [blame] | 177 | ofs = div_s64_rem(ofs, mtd->writesize, &rem); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 178 | if (rem) |
| 179 | goto invalid_arg; |
| 180 | ofs *= (mtd->writesize + mtd->oobsize); |
| 181 | |
| 182 | if (ops->datbuf) |
| 183 | ops->retlen = goldfish_nand_cmd(mtd, NAND_CMD_WRITE, ofs, |
| 184 | ops->len, ops->datbuf); |
| 185 | ofs += mtd->writesize + ops->ooboffs; |
| 186 | if (ops->oobbuf) |
| 187 | ops->oobretlen = goldfish_nand_cmd(mtd, NAND_CMD_WRITE, ofs, |
| 188 | ops->ooblen, ops->oobbuf); |
| 189 | return 0; |
| 190 | |
| 191 | invalid_arg: |
Peter Huewe | b4fcf48 | 2013-02-07 23:57:07 +0100 | [diff] [blame] | 192 | pr_err("goldfish_nand_write_oob: invalid write, start %llx, len %zx, ooblen %zx, dev_size %llx, write_size %x\n", |
Loic Pefferkorn | 8f52e26 | 2014-09-03 22:23:13 +0200 | [diff] [blame] | 193 | ofs, ops->len, ops->ooblen, mtd->size, mtd->writesize); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 194 | return -EINVAL; |
| 195 | } |
| 196 | |
| 197 | static int goldfish_nand_read(struct mtd_info *mtd, loff_t from, size_t len, |
Loic Pefferkorn | 8f52e26 | 2014-09-03 22:23:13 +0200 | [diff] [blame] | 198 | size_t *retlen, u_char *buf) |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 199 | { |
Arnd Bergmann | b0e302b | 2016-02-01 11:33:00 +0100 | [diff] [blame] | 200 | s32 rem; |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 201 | |
| 202 | if (from + len > mtd->size) |
| 203 | goto invalid_arg; |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 204 | |
Arnd Bergmann | b0e302b | 2016-02-01 11:33:00 +0100 | [diff] [blame] | 205 | from = div_s64_rem(from, mtd->writesize, &rem); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 206 | if (rem) |
| 207 | goto invalid_arg; |
| 208 | from *= (mtd->writesize + mtd->oobsize); |
| 209 | |
| 210 | *retlen = goldfish_nand_cmd(mtd, NAND_CMD_READ, from, len, buf); |
| 211 | return 0; |
| 212 | |
| 213 | invalid_arg: |
Peter Huewe | b4fcf48 | 2013-02-07 23:57:07 +0100 | [diff] [blame] | 214 | pr_err("goldfish_nand_read: invalid read, start %llx, len %zx, dev_size %llx, write_size %x\n", |
Loic Pefferkorn | 8f52e26 | 2014-09-03 22:23:13 +0200 | [diff] [blame] | 215 | from, len, mtd->size, mtd->writesize); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 216 | return -EINVAL; |
| 217 | } |
| 218 | |
| 219 | static int goldfish_nand_write(struct mtd_info *mtd, loff_t to, size_t len, |
Loic Pefferkorn | 8f52e26 | 2014-09-03 22:23:13 +0200 | [diff] [blame] | 220 | size_t *retlen, const u_char *buf) |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 221 | { |
Arnd Bergmann | b0e302b | 2016-02-01 11:33:00 +0100 | [diff] [blame] | 222 | s32 rem; |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 223 | |
| 224 | if (to + len > mtd->size) |
| 225 | goto invalid_arg; |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 226 | |
Arnd Bergmann | b0e302b | 2016-02-01 11:33:00 +0100 | [diff] [blame] | 227 | to = div_s64_rem(to, mtd->writesize, &rem); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 228 | if (rem) |
| 229 | goto invalid_arg; |
| 230 | to *= (mtd->writesize + mtd->oobsize); |
| 231 | |
| 232 | *retlen = goldfish_nand_cmd(mtd, NAND_CMD_WRITE, to, len, (void *)buf); |
| 233 | return 0; |
| 234 | |
| 235 | invalid_arg: |
Peter Huewe | b4fcf48 | 2013-02-07 23:57:07 +0100 | [diff] [blame] | 236 | pr_err("goldfish_nand_write: invalid write, start %llx, len %zx, dev_size %llx, write_size %x\n", |
Loic Pefferkorn | 8f52e26 | 2014-09-03 22:23:13 +0200 | [diff] [blame] | 237 | to, len, mtd->size, mtd->writesize); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 238 | return -EINVAL; |
| 239 | } |
| 240 | |
| 241 | static int goldfish_nand_block_isbad(struct mtd_info *mtd, loff_t ofs) |
| 242 | { |
Arnd Bergmann | b0e302b | 2016-02-01 11:33:00 +0100 | [diff] [blame] | 243 | s32 rem; |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 244 | |
| 245 | if (ofs >= mtd->size) |
| 246 | goto invalid_arg; |
| 247 | |
Arnd Bergmann | b0e302b | 2016-02-01 11:33:00 +0100 | [diff] [blame] | 248 | ofs = div_s64_rem(ofs, mtd->writesize, &rem); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 249 | if (rem) |
| 250 | goto invalid_arg; |
| 251 | ofs *= mtd->erasesize / mtd->writesize; |
| 252 | ofs *= (mtd->writesize + mtd->oobsize); |
| 253 | |
| 254 | return goldfish_nand_cmd(mtd, NAND_CMD_BLOCK_BAD_GET, ofs, 0, NULL); |
| 255 | |
| 256 | invalid_arg: |
| 257 | pr_err("goldfish_nand_block_isbad: invalid arg, ofs %llx, dev_size %llx, write_size %x\n", |
Loic Pefferkorn | 8f52e26 | 2014-09-03 22:23:13 +0200 | [diff] [blame] | 258 | ofs, mtd->size, mtd->writesize); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 259 | return -EINVAL; |
| 260 | } |
| 261 | |
| 262 | static int goldfish_nand_block_markbad(struct mtd_info *mtd, loff_t ofs) |
| 263 | { |
Arnd Bergmann | b0e302b | 2016-02-01 11:33:00 +0100 | [diff] [blame] | 264 | s32 rem; |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 265 | |
| 266 | if (ofs >= mtd->size) |
| 267 | goto invalid_arg; |
| 268 | |
Arnd Bergmann | b0e302b | 2016-02-01 11:33:00 +0100 | [diff] [blame] | 269 | ofs = div_s64_rem(ofs, mtd->writesize, &rem); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 270 | if (rem) |
| 271 | goto invalid_arg; |
| 272 | ofs *= mtd->erasesize / mtd->writesize; |
| 273 | ofs *= (mtd->writesize + mtd->oobsize); |
| 274 | |
| 275 | if (goldfish_nand_cmd(mtd, NAND_CMD_BLOCK_BAD_SET, ofs, 0, NULL) != 1) |
| 276 | return -EIO; |
| 277 | return 0; |
| 278 | |
| 279 | invalid_arg: |
| 280 | pr_err("goldfish_nand_block_markbad: invalid arg, ofs %llx, dev_size %llx, write_size %x\n", |
Loic Pefferkorn | 8f52e26 | 2014-09-03 22:23:13 +0200 | [diff] [blame] | 281 | ofs, mtd->size, mtd->writesize); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 282 | return -EINVAL; |
| 283 | } |
| 284 | |
| 285 | static int nand_setup_cmd_params(struct platform_device *pdev, |
Loic Pefferkorn | 8f52e26 | 2014-09-03 22:23:13 +0200 | [diff] [blame] | 286 | struct goldfish_nand *nand) |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 287 | { |
Shraddha Barke | 3e2fbc7 | 2016-01-21 02:38:53 +0530 | [diff] [blame] | 288 | dma_addr_t dma_handle; |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 289 | unsigned char __iomem *base = nand->base; |
| 290 | |
Shraddha Barke | 3e2fbc7 | 2016-01-21 02:38:53 +0530 | [diff] [blame] | 291 | nand->cmd_params = dmam_alloc_coherent(&pdev->dev, |
| 292 | sizeof(struct cmd_params), |
| 293 | &dma_handle, GFP_KERNEL); |
| 294 | if (!nand->cmd_params) { |
| 295 | dev_err(&pdev->dev, "allocate buffer failed\n"); |
| 296 | return -ENOMEM; |
| 297 | } |
| 298 | writel((u32)((u64)dma_handle >> 32), base + NAND_CMD_PARAMS_ADDR_HIGH); |
| 299 | writel((u32)dma_handle, base + NAND_CMD_PARAMS_ADDR_LOW); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 300 | return 0; |
| 301 | } |
| 302 | |
| 303 | static int goldfish_nand_init_device(struct platform_device *pdev, |
Loic Pefferkorn | 8f52e26 | 2014-09-03 22:23:13 +0200 | [diff] [blame] | 304 | struct goldfish_nand *nand, int id) |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 305 | { |
| 306 | u32 name_len; |
| 307 | u32 result; |
| 308 | u32 flags; |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 309 | unsigned char __iomem *base = nand->base; |
| 310 | struct mtd_info *mtd = &nand->mtd[id]; |
| 311 | char *name; |
| 312 | |
Kristina Martšenko | 67c20cf | 2014-03-25 01:45:09 +0200 | [diff] [blame] | 313 | mutex_lock(&nand->lock); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 314 | writel(id, base + NAND_DEV); |
| 315 | flags = readl(base + NAND_DEV_FLAGS); |
| 316 | name_len = readl(base + NAND_DEV_NAME_LEN); |
| 317 | mtd->writesize = readl(base + NAND_DEV_PAGE_SIZE); |
| 318 | mtd->size = readl(base + NAND_DEV_SIZE_LOW); |
| 319 | mtd->size |= (u64)readl(base + NAND_DEV_SIZE_HIGH) << 32; |
| 320 | mtd->oobsize = readl(base + NAND_DEV_EXTRA_SIZE); |
| 321 | mtd->oobavail = mtd->oobsize; |
| 322 | mtd->erasesize = readl(base + NAND_DEV_ERASE_SIZE) / |
| 323 | (mtd->writesize + mtd->oobsize) * mtd->writesize; |
Arnd Bergmann | b0e302b | 2016-02-01 11:33:00 +0100 | [diff] [blame] | 324 | mtd->size = div_s64(mtd->size, mtd->writesize + mtd->oobsize); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 325 | mtd->size *= mtd->writesize; |
Hema Prathaban | 36270be | 2013-05-09 19:07:28 +0530 | [diff] [blame] | 326 | dev_dbg(&pdev->dev, |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 327 | "goldfish nand dev%d: size %llx, page %d, extra %d, erase %d\n", |
Hema Prathaban | 13aa4016 | 2013-05-09 19:08:51 +0530 | [diff] [blame] | 328 | id, mtd->size, mtd->writesize, |
| 329 | mtd->oobsize, mtd->erasesize); |
Kristina Martšenko | 67c20cf | 2014-03-25 01:45:09 +0200 | [diff] [blame] | 330 | mutex_unlock(&nand->lock); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 331 | |
| 332 | mtd->priv = nand; |
| 333 | |
Loic Pefferkorn | 1f11b38 | 2014-09-03 22:23:15 +0200 | [diff] [blame] | 334 | name = devm_kzalloc(&pdev->dev, name_len + 1, GFP_KERNEL); |
Somya Anand | 6e3f3bb | 2015-03-16 19:34:09 +0530 | [diff] [blame] | 335 | if (!name) |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 336 | return -ENOMEM; |
Loic Pefferkorn | 1f11b38 | 2014-09-03 22:23:15 +0200 | [diff] [blame] | 337 | mtd->name = name; |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 338 | |
| 339 | result = goldfish_nand_cmd(mtd, NAND_CMD_GET_DEV_NAME, 0, name_len, |
Loic Pefferkorn | 8f52e26 | 2014-09-03 22:23:13 +0200 | [diff] [blame] | 340 | name); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 341 | if (result != name_len) { |
Hema Prathaban | 36270be | 2013-05-09 19:07:28 +0530 | [diff] [blame] | 342 | dev_err(&pdev->dev, |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 343 | "goldfish_nand_init_device failed to get dev name %d != %d\n", |
| 344 | result, name_len); |
| 345 | return -ENODEV; |
| 346 | } |
Loic Pefferkorn | 1977533 | 2014-09-03 22:23:11 +0200 | [diff] [blame] | 347 | ((char *)mtd->name)[name_len] = '\0'; |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 348 | |
| 349 | /* Setup the MTD structure */ |
| 350 | mtd->type = MTD_NANDFLASH; |
| 351 | mtd->flags = MTD_CAP_NANDFLASH; |
| 352 | if (flags & NAND_DEV_FLAG_READ_ONLY) |
| 353 | mtd->flags &= ~MTD_WRITEABLE; |
| 354 | if (flags & NAND_DEV_FLAG_CMD_PARAMS_CAP) |
| 355 | nand_setup_cmd_params(pdev, nand); |
| 356 | |
| 357 | mtd->owner = THIS_MODULE; |
| 358 | mtd->_erase = goldfish_nand_erase; |
| 359 | mtd->_read = goldfish_nand_read; |
| 360 | mtd->_write = goldfish_nand_write; |
| 361 | mtd->_read_oob = goldfish_nand_read_oob; |
| 362 | mtd->_write_oob = goldfish_nand_write_oob; |
| 363 | mtd->_block_isbad = goldfish_nand_block_isbad; |
| 364 | mtd->_block_markbad = goldfish_nand_block_markbad; |
| 365 | |
| 366 | if (mtd_device_register(mtd, NULL, 0)) |
| 367 | return -EIO; |
| 368 | |
| 369 | return 0; |
| 370 | } |
| 371 | |
| 372 | static int goldfish_nand_probe(struct platform_device *pdev) |
| 373 | { |
| 374 | u32 num_dev; |
| 375 | int i; |
| 376 | int err; |
| 377 | u32 num_dev_working; |
| 378 | u32 version; |
| 379 | struct resource *r; |
| 380 | struct goldfish_nand *nand; |
| 381 | unsigned char __iomem *base; |
| 382 | |
| 383 | r = platform_get_resource(pdev, IORESOURCE_MEM, 0); |
Ravi Teja Darbha | 10d7108 | 2015-09-02 22:41:19 +0530 | [diff] [blame] | 384 | if (!r) |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 385 | return -ENODEV; |
| 386 | |
| 387 | base = devm_ioremap(&pdev->dev, r->start, PAGE_SIZE); |
Somya Anand | 6e3f3bb | 2015-03-16 19:34:09 +0530 | [diff] [blame] | 388 | if (!base) |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 389 | return -ENOMEM; |
| 390 | |
| 391 | version = readl(base + NAND_VERSION); |
| 392 | if (version != NAND_VERSION_CURRENT) { |
Hema Prathaban | 36270be | 2013-05-09 19:07:28 +0530 | [diff] [blame] | 393 | dev_err(&pdev->dev, |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 394 | "goldfish_nand_init: version mismatch, got %d, expected %d\n", |
| 395 | version, NAND_VERSION_CURRENT); |
| 396 | return -ENODEV; |
| 397 | } |
| 398 | num_dev = readl(base + NAND_NUM_DEV); |
| 399 | if (num_dev == 0) |
| 400 | return -ENODEV; |
| 401 | |
Hema Prathaban | 36270be | 2013-05-09 19:07:28 +0530 | [diff] [blame] | 402 | nand = devm_kzalloc(&pdev->dev, sizeof(*nand) + |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 403 | sizeof(struct mtd_info) * num_dev, GFP_KERNEL); |
Somya Anand | 6e3f3bb | 2015-03-16 19:34:09 +0530 | [diff] [blame] | 404 | if (!nand) |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 405 | return -ENOMEM; |
| 406 | |
Kristina Martšenko | 67c20cf | 2014-03-25 01:45:09 +0200 | [diff] [blame] | 407 | mutex_init(&nand->lock); |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 408 | nand->base = base; |
| 409 | nand->mtd_count = num_dev; |
| 410 | platform_set_drvdata(pdev, nand); |
| 411 | |
| 412 | num_dev_working = 0; |
| 413 | for (i = 0; i < num_dev; i++) { |
| 414 | err = goldfish_nand_init_device(pdev, nand, i); |
| 415 | if (err == 0) |
| 416 | num_dev_working++; |
| 417 | } |
| 418 | if (num_dev_working == 0) |
| 419 | return -ENODEV; |
| 420 | return 0; |
| 421 | } |
| 422 | |
| 423 | static int goldfish_nand_remove(struct platform_device *pdev) |
| 424 | { |
| 425 | struct goldfish_nand *nand = platform_get_drvdata(pdev); |
| 426 | int i; |
Garret Kelly | ef32381 | 2014-04-06 23:47:31 -0400 | [diff] [blame] | 427 | |
Arve Hjønnevåg | 8e404ff | 2013-01-24 17:50:00 +0000 | [diff] [blame] | 428 | for (i = 0; i < nand->mtd_count; i++) { |
| 429 | if (nand->mtd[i].name) |
| 430 | mtd_device_unregister(&nand->mtd[i]); |
| 431 | } |
| 432 | return 0; |
| 433 | } |
| 434 | |
| 435 | static struct platform_driver goldfish_nand_driver = { |
| 436 | .probe = goldfish_nand_probe, |
| 437 | .remove = goldfish_nand_remove, |
| 438 | .driver = { |
| 439 | .name = "goldfish_nand" |
| 440 | } |
| 441 | }; |
| 442 | |
| 443 | module_platform_driver(goldfish_nand_driver); |
| 444 | MODULE_LICENSE("GPL"); |