ramips: move mt7621_nand driver to files
[openwrt/staging/stintel.git] / target / linux / ramips / files / drivers / mtd / nand / raw / mt7621_nand.c
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * MediaTek MT7621 NAND Flash Controller driver
4 *
5 * Copyright (C) 2020 MediaTek Inc. All Rights Reserved.
6 *
7 * Author: Weijie Gao <weijie.gao@mediatek.com>
8 */
9
10 #include <linux/io.h>
11 #include <linux/clk.h>
12 #include <linux/init.h>
13 #include <linux/errno.h>
14 #include <linux/sizes.h>
15 #include <linux/iopoll.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/mtd/mtd.h>
19 #include <linux/mtd/rawnand.h>
20 #include <linux/mtd/partitions.h>
21 #include <linux/platform_device.h>
22 #include <asm/addrspace.h>
23
24 /* NFI core registers */
25 #define NFI_CNFG 0x000
26 #define CNFG_OP_MODE_S 12
27 #define CNFG_OP_MODE_M GENMASK(14, 12)
28 #define CNFG_OP_CUSTOM 6
29 #define CNFG_AUTO_FMT_EN BIT(9)
30 #define CNFG_HW_ECC_EN BIT(8)
31 #define CNFG_BYTE_RW BIT(6)
32 #define CNFG_READ_MODE BIT(1)
33
34 #define NFI_PAGEFMT 0x004
35 #define PAGEFMT_FDM_ECC_S 12
36 #define PAGEFMT_FDM_ECC_M GENMASK(15, 12)
37 #define PAGEFMT_FDM_S 8
38 #define PAGEFMT_FDM_M GENMASK(11, 8)
39 #define PAGEFMT_SPARE_S 4
40 #define PAGEFMT_SPARE_M GENMASK(5, 4)
41 #define PAGEFMT_PAGE_S 0
42 #define PAGEFMT_PAGE_M GENMASK(1, 0)
43
44 #define NFI_CON 0x008
45 #define CON_NFI_SEC_S 12
46 #define CON_NFI_SEC_M GENMASK(15, 12)
47 #define CON_NFI_BWR BIT(9)
48 #define CON_NFI_BRD BIT(8)
49 #define CON_NFI_RST BIT(1)
50 #define CON_FIFO_FLUSH BIT(0)
51
52 #define NFI_ACCCON 0x00c
53 #define ACCCON_POECS_S 28
54 #define ACCCON_POECS_MAX 0x0f
55 #define ACCCON_POECS_DEF 3
56 #define ACCCON_PRECS_S 22
57 #define ACCCON_PRECS_MAX 0x3f
58 #define ACCCON_PRECS_DEF 3
59 #define ACCCON_C2R_S 16
60 #define ACCCON_C2R_MAX 0x3f
61 #define ACCCON_C2R_DEF 7
62 #define ACCCON_W2R_S 12
63 #define ACCCON_W2R_MAX 0x0f
64 #define ACCCON_W2R_DEF 7
65 #define ACCCON_WH_S 8
66 #define ACCCON_WH_MAX 0x0f
67 #define ACCCON_WH_DEF 15
68 #define ACCCON_WST_S 4
69 #define ACCCON_WST_MAX 0x0f
70 #define ACCCON_WST_DEF 15
71 #define ACCCON_WST_MIN 3
72 #define ACCCON_RLT_S 0
73 #define ACCCON_RLT_MAX 0x0f
74 #define ACCCON_RLT_DEF 15
75 #define ACCCON_RLT_MIN 3
76
77 #define NFI_CMD 0x020
78
79 #define NFI_ADDRNOB 0x030
80 #define ADDR_ROW_NOB_S 4
81 #define ADDR_ROW_NOB_M GENMASK(6, 4)
82 #define ADDR_COL_NOB_S 0
83 #define ADDR_COL_NOB_M GENMASK(2, 0)
84
85 #define NFI_COLADDR 0x034
86 #define NFI_ROWADDR 0x038
87
88 #define NFI_STRDATA 0x040
89 #define STR_DATA BIT(0)
90
91 #define NFI_CNRNB 0x044
92 #define CB2R_TIME_S 4
93 #define CB2R_TIME_M GENMASK(7, 4)
94 #define STR_CNRNB BIT(0)
95
96 #define NFI_DATAW 0x050
97 #define NFI_DATAR 0x054
98
99 #define NFI_PIO_DIRDY 0x058
100 #define PIO_DIRDY BIT(0)
101
102 #define NFI_STA 0x060
103 #define STA_NFI_FSM_S 16
104 #define STA_NFI_FSM_M GENMASK(19, 16)
105 #define STA_FSM_CUSTOM_DATA 14
106 #define STA_BUSY BIT(8)
107 #define STA_ADDR BIT(1)
108 #define STA_CMD BIT(0)
109
110 #define NFI_ADDRCNTR 0x070
111 #define SEC_CNTR_S 12
112 #define SEC_CNTR_M GENMASK(15, 12)
113 #define SEC_ADDR_S 0
114 #define SEC_ADDR_M GENMASK(9, 0)
115
116 #define NFI_CSEL 0x090
117 #define CSEL_S 0
118 #define CSEL_M GENMASK(1, 0)
119
120 #define NFI_FDM0L 0x0a0
121 #define NFI_FDML(n) (0x0a0 + ((n) << 3))
122
123 #define NFI_FDM0M 0x0a4
124 #define NFI_FDMM(n) (0x0a4 + ((n) << 3))
125
126 #define NFI_MASTER_STA 0x210
127 #define MAS_ADDR GENMASK(11, 9)
128 #define MAS_RD GENMASK(8, 6)
129 #define MAS_WR GENMASK(5, 3)
130 #define MAS_RDDLY GENMASK(2, 0)
131
132 /* ECC engine registers */
133 #define ECC_ENCCON 0x000
134 #define ENC_EN BIT(0)
135
136 #define ECC_ENCCNFG 0x004
137 #define ENC_CNFG_MSG_S 16
138 #define ENC_CNFG_MSG_M GENMASK(28, 16)
139 #define ENC_MODE_S 4
140 #define ENC_MODE_M GENMASK(5, 4)
141 #define ENC_MODE_NFI 1
142 #define ENC_TNUM_S 0
143 #define ENC_TNUM_M GENMASK(2, 0)
144
145 #define ECC_ENCIDLE 0x00c
146 #define ENC_IDLE BIT(0)
147
148 #define ECC_DECCON 0x100
149 #define DEC_EN BIT(0)
150
151 #define ECC_DECCNFG 0x104
152 #define DEC_EMPTY_EN BIT(31)
153 #define DEC_CS_S 16
154 #define DEC_CS_M GENMASK(28, 16)
155 #define DEC_CON_S 12
156 #define DEC_CON_M GENMASK(13, 12)
157 #define DEC_CON_EL 2
158 #define DEC_MODE_S 4
159 #define DEC_MODE_M GENMASK(5, 4)
160 #define DEC_MODE_NFI 1
161 #define DEC_TNUM_S 0
162 #define DEC_TNUM_M GENMASK(2, 0)
163
164 #define ECC_DECIDLE 0x10c
165 #define DEC_IDLE BIT(1)
166
167 #define ECC_DECENUM 0x114
168 #define ERRNUM_S 2
169 #define ERRNUM_M GENMASK(3, 0)
170
171 #define ECC_DECDONE 0x118
172 #define DEC_DONE7 BIT(7)
173 #define DEC_DONE6 BIT(6)
174 #define DEC_DONE5 BIT(5)
175 #define DEC_DONE4 BIT(4)
176 #define DEC_DONE3 BIT(3)
177 #define DEC_DONE2 BIT(2)
178 #define DEC_DONE1 BIT(1)
179 #define DEC_DONE0 BIT(0)
180
181 #define ECC_DECEL(n) (0x11c + (n) * 4)
182 #define DEC_EL_ODD_S 16
183 #define DEC_EL_EVEN_S 0
184 #define DEC_EL_M 0x1fff
185 #define DEC_EL_BYTE_POS_S 3
186 #define DEC_EL_BIT_POS_M GENMASK(2, 0)
187
188 #define ECC_FDMADDR 0x13c
189
190 /* ENCIDLE and DECIDLE */
191 #define ECC_IDLE BIT(0)
192
193 #define ACCTIMING(tpoecs, tprecs, tc2r, tw2r, twh, twst, trlt) \
194 ((tpoecs) << ACCCON_POECS_S | (tprecs) << ACCCON_PRECS_S | \
195 (tc2r) << ACCCON_C2R_S | (tw2r) << ACCCON_W2R_S | \
196 (twh) << ACCCON_WH_S | (twst) << ACCCON_WST_S | (trlt))
197
198 #define MASTER_STA_MASK (MAS_ADDR | MAS_RD | MAS_WR | \
199 MAS_RDDLY)
200 #define NFI_RESET_TIMEOUT 1000000
201 #define NFI_CORE_TIMEOUT 500000
202 #define ECC_ENGINE_TIMEOUT 500000
203
204 #define ECC_SECTOR_SIZE 512
205 #define ECC_PARITY_BITS 13
206
207 #define NFI_FDM_SIZE 8
208
209 #define MT7621_NFC_NAME "mt7621-nand"
210
211 struct mt7621_nfc {
212 struct nand_controller controller;
213 struct nand_chip nand;
214 struct clk *nfi_clk;
215 struct device *dev;
216
217 void __iomem *nfi_regs;
218 void __iomem *ecc_regs;
219
220 u32 spare_per_sector;
221 };
222
223 static const u16 mt7621_nfi_page_size[] = { SZ_512, SZ_2K, SZ_4K };
224 static const u8 mt7621_nfi_spare_size[] = { 16, 26, 27, 28 };
225 static const u8 mt7621_ecc_strength[] = { 4, 6, 8, 10, 12 };
226
227 static inline u32 nfi_read32(struct mt7621_nfc *nfc, u32 reg)
228 {
229 return readl(nfc->nfi_regs + reg);
230 }
231
232 static inline void nfi_write32(struct mt7621_nfc *nfc, u32 reg, u32 val)
233 {
234 writel(val, nfc->nfi_regs + reg);
235 }
236
237 static inline u16 nfi_read16(struct mt7621_nfc *nfc, u32 reg)
238 {
239 return readw(nfc->nfi_regs + reg);
240 }
241
242 static inline void nfi_write16(struct mt7621_nfc *nfc, u32 reg, u16 val)
243 {
244 writew(val, nfc->nfi_regs + reg);
245 }
246
247 static inline void ecc_write16(struct mt7621_nfc *nfc, u32 reg, u16 val)
248 {
249 writew(val, nfc->ecc_regs + reg);
250 }
251
252 static inline u32 ecc_read32(struct mt7621_nfc *nfc, u32 reg)
253 {
254 return readl(nfc->ecc_regs + reg);
255 }
256
257 static inline void ecc_write32(struct mt7621_nfc *nfc, u32 reg, u32 val)
258 {
259 return writel(val, nfc->ecc_regs + reg);
260 }
261
262 static inline u8 *oob_fdm_ptr(struct nand_chip *nand, int sect)
263 {
264 return nand->oob_poi + sect * NFI_FDM_SIZE;
265 }
266
267 static inline u8 *oob_ecc_ptr(struct mt7621_nfc *nfc, int sect)
268 {
269 struct nand_chip *nand = &nfc->nand;
270
271 return nand->oob_poi + nand->ecc.steps * NFI_FDM_SIZE +
272 sect * (nfc->spare_per_sector - NFI_FDM_SIZE);
273 }
274
275 static inline u8 *page_data_ptr(struct nand_chip *nand, const u8 *buf,
276 int sect)
277 {
278 return (u8 *)buf + sect * nand->ecc.size;
279 }
280
281 static int mt7621_ecc_wait_idle(struct mt7621_nfc *nfc, u32 reg)
282 {
283 struct device *dev = nfc->dev;
284 u32 val;
285 int ret;
286
287 ret = readw_poll_timeout_atomic(nfc->ecc_regs + reg, val,
288 val & ECC_IDLE, 10,
289 ECC_ENGINE_TIMEOUT);
290 if (ret) {
291 dev_warn(dev, "ECC engine timed out entering idle mode\n");
292 return -EIO;
293 }
294
295 return 0;
296 }
297
298 static int mt7621_ecc_decoder_wait_done(struct mt7621_nfc *nfc, u32 sect)
299 {
300 struct device *dev = nfc->dev;
301 u32 val;
302 int ret;
303
304 ret = readw_poll_timeout_atomic(nfc->ecc_regs + ECC_DECDONE, val,
305 val & (1 << sect), 10,
306 ECC_ENGINE_TIMEOUT);
307
308 if (ret) {
309 dev_warn(dev, "ECC decoder for sector %d timed out\n",
310 sect);
311 return -ETIMEDOUT;
312 }
313
314 return 0;
315 }
316
317 static void mt7621_ecc_encoder_op(struct mt7621_nfc *nfc, bool enable)
318 {
319 mt7621_ecc_wait_idle(nfc, ECC_ENCIDLE);
320 ecc_write16(nfc, ECC_ENCCON, enable ? ENC_EN : 0);
321 }
322
323 static void mt7621_ecc_decoder_op(struct mt7621_nfc *nfc, bool enable)
324 {
325 mt7621_ecc_wait_idle(nfc, ECC_DECIDLE);
326 ecc_write16(nfc, ECC_DECCON, enable ? DEC_EN : 0);
327 }
328
329 static int mt7621_ecc_correct_check(struct mt7621_nfc *nfc, u8 *sector_buf,
330 u8 *fdm_buf, u32 sect)
331 {
332 struct nand_chip *nand = &nfc->nand;
333 u32 decnum, num_error_bits, fdm_end_bits;
334 u32 error_locations, error_bit_loc;
335 u32 error_byte_pos, error_bit_pos;
336 int bitflips = 0;
337 u32 i;
338
339 decnum = ecc_read32(nfc, ECC_DECENUM);
340 num_error_bits = (decnum >> (sect << ERRNUM_S)) & ERRNUM_M;
341 fdm_end_bits = (nand->ecc.size + NFI_FDM_SIZE) << 3;
342
343 if (!num_error_bits)
344 return 0;
345
346 if (num_error_bits == ERRNUM_M)
347 return -1;
348
349 for (i = 0; i < num_error_bits; i++) {
350 error_locations = ecc_read32(nfc, ECC_DECEL(i / 2));
351 error_bit_loc = (error_locations >> ((i % 2) * DEC_EL_ODD_S)) &
352 DEC_EL_M;
353 error_byte_pos = error_bit_loc >> DEC_EL_BYTE_POS_S;
354 error_bit_pos = error_bit_loc & DEC_EL_BIT_POS_M;
355
356 if (error_bit_loc < (nand->ecc.size << 3)) {
357 if (sector_buf) {
358 sector_buf[error_byte_pos] ^=
359 (1 << error_bit_pos);
360 }
361 } else if (error_bit_loc < fdm_end_bits) {
362 if (fdm_buf) {
363 fdm_buf[error_byte_pos - nand->ecc.size] ^=
364 (1 << error_bit_pos);
365 }
366 }
367
368 bitflips++;
369 }
370
371 return bitflips;
372 }
373
374 static int mt7621_nfc_wait_write_completion(struct mt7621_nfc *nfc,
375 struct nand_chip *nand)
376 {
377 struct device *dev = nfc->dev;
378 u16 val;
379 int ret;
380
381 ret = readw_poll_timeout_atomic(nfc->nfi_regs + NFI_ADDRCNTR, val,
382 ((val & SEC_CNTR_M) >> SEC_CNTR_S) >= nand->ecc.steps, 10,
383 NFI_CORE_TIMEOUT);
384
385 if (ret) {
386 dev_warn(dev, "NFI core write operation timed out\n");
387 return -ETIMEDOUT;
388 }
389
390 return ret;
391 }
392
393 static void mt7621_nfc_hw_reset(struct mt7621_nfc *nfc)
394 {
395 u32 val;
396 int ret;
397
398 /* reset all registers and force the NFI master to terminate */
399 nfi_write16(nfc, NFI_CON, CON_FIFO_FLUSH | CON_NFI_RST);
400
401 /* wait for the master to finish the last transaction */
402 ret = readw_poll_timeout(nfc->nfi_regs + NFI_MASTER_STA, val,
403 !(val & MASTER_STA_MASK), 50,
404 NFI_RESET_TIMEOUT);
405 if (ret) {
406 dev_warn(nfc->dev, "Failed to reset NFI master in %dms\n",
407 NFI_RESET_TIMEOUT);
408 }
409
410 /* ensure any status register affected by the NFI master is reset */
411 nfi_write16(nfc, NFI_CON, CON_FIFO_FLUSH | CON_NFI_RST);
412 nfi_write16(nfc, NFI_STRDATA, 0);
413 }
414
415 static inline void mt7621_nfc_hw_init(struct mt7621_nfc *nfc)
416 {
417 u32 acccon;
418
419 /*
420 * CNRNB: nand ready/busy register
421 * -------------------------------
422 * 7:4: timeout register for polling the NAND busy/ready signal
423 * 0 : poll the status of the busy/ready signal after [7:4]*16 cycles.
424 */
425 nfi_write16(nfc, NFI_CNRNB, CB2R_TIME_M | STR_CNRNB);
426
427 mt7621_nfc_hw_reset(nfc);
428
429 /* Apply default access timing */
430 acccon = ACCTIMING(ACCCON_POECS_DEF, ACCCON_PRECS_DEF, ACCCON_C2R_DEF,
431 ACCCON_W2R_DEF, ACCCON_WH_DEF, ACCCON_WST_DEF,
432 ACCCON_RLT_DEF);
433
434 nfi_write32(nfc, NFI_ACCCON, acccon);
435 }
436
437 static int mt7621_nfc_send_command(struct mt7621_nfc *nfc, u8 command)
438 {
439 struct device *dev = nfc->dev;
440 u32 val;
441 int ret;
442
443 nfi_write32(nfc, NFI_CMD, command);
444
445 ret = readl_poll_timeout_atomic(nfc->nfi_regs + NFI_STA, val,
446 !(val & STA_CMD), 10,
447 NFI_CORE_TIMEOUT);
448 if (ret) {
449 dev_warn(dev, "NFI core timed out entering command mode\n");
450 return -EIO;
451 }
452
453 return 0;
454 }
455
456 static int mt7621_nfc_send_address_byte(struct mt7621_nfc *nfc, int addr)
457 {
458 struct device *dev = nfc->dev;
459 u32 val;
460 int ret;
461
462 nfi_write32(nfc, NFI_COLADDR, addr);
463 nfi_write32(nfc, NFI_ROWADDR, 0);
464 nfi_write16(nfc, NFI_ADDRNOB, 1);
465
466 ret = readl_poll_timeout_atomic(nfc->nfi_regs + NFI_STA, val,
467 !(val & STA_ADDR), 10,
468 NFI_CORE_TIMEOUT);
469 if (ret) {
470 dev_warn(dev, "NFI core timed out entering address mode\n");
471 return -EIO;
472 }
473
474 return 0;
475 }
476
477 static int mt7621_nfc_send_address(struct mt7621_nfc *nfc, const u8 *addr,
478 unsigned int naddrs)
479 {
480 int ret;
481
482 while (naddrs) {
483 ret = mt7621_nfc_send_address_byte(nfc, *addr);
484 if (ret)
485 return ret;
486
487 addr++;
488 naddrs--;
489 }
490
491 return 0;
492 }
493
494 static void mt7621_nfc_wait_pio_ready(struct mt7621_nfc *nfc)
495 {
496 struct device *dev = nfc->dev;
497 int ret;
498 u16 val;
499
500 ret = readw_poll_timeout_atomic(nfc->nfi_regs + NFI_PIO_DIRDY, val,
501 val & PIO_DIRDY, 10,
502 NFI_CORE_TIMEOUT);
503 if (ret < 0)
504 dev_err(dev, "NFI core PIO mode not ready\n");
505 }
506
507 static u32 mt7621_nfc_pio_read(struct mt7621_nfc *nfc, bool br)
508 {
509 u32 reg;
510
511 /* after each byte read, the NFI_STA reg is reset by the hardware */
512 reg = (nfi_read32(nfc, NFI_STA) & STA_NFI_FSM_M) >> STA_NFI_FSM_S;
513 if (reg != STA_FSM_CUSTOM_DATA) {
514 reg = nfi_read16(nfc, NFI_CNFG);
515 reg |= CNFG_READ_MODE | CNFG_BYTE_RW;
516 if (!br)
517 reg &= ~CNFG_BYTE_RW;
518 nfi_write16(nfc, NFI_CNFG, reg);
519
520 /*
521 * set to max sector to allow the HW to continue reading over
522 * unaligned accesses
523 */
524 nfi_write16(nfc, NFI_CON, CON_NFI_SEC_M | CON_NFI_BRD);
525
526 /* trigger to fetch data */
527 nfi_write16(nfc, NFI_STRDATA, STR_DATA);
528 }
529
530 mt7621_nfc_wait_pio_ready(nfc);
531
532 return nfi_read32(nfc, NFI_DATAR);
533 }
534
535 static void mt7621_nfc_read_data(struct mt7621_nfc *nfc, u8 *buf, u32 len)
536 {
537 while (((uintptr_t)buf & 3) && len) {
538 *buf = mt7621_nfc_pio_read(nfc, true);
539 buf++;
540 len--;
541 }
542
543 while (len >= 4) {
544 *(u32 *)buf = mt7621_nfc_pio_read(nfc, false);
545 buf += 4;
546 len -= 4;
547 }
548
549 while (len) {
550 *buf = mt7621_nfc_pio_read(nfc, true);
551 buf++;
552 len--;
553 }
554 }
555
556 static void mt7621_nfc_read_data_discard(struct mt7621_nfc *nfc, u32 len)
557 {
558 while (len >= 4) {
559 mt7621_nfc_pio_read(nfc, false);
560 len -= 4;
561 }
562
563 while (len) {
564 mt7621_nfc_pio_read(nfc, true);
565 len--;
566 }
567 }
568
569 static void mt7621_nfc_pio_write(struct mt7621_nfc *nfc, u32 val, bool bw)
570 {
571 u32 reg;
572
573 reg = (nfi_read32(nfc, NFI_STA) & STA_NFI_FSM_M) >> STA_NFI_FSM_S;
574 if (reg != STA_FSM_CUSTOM_DATA) {
575 reg = nfi_read16(nfc, NFI_CNFG);
576 reg &= ~(CNFG_READ_MODE | CNFG_BYTE_RW);
577 if (bw)
578 reg |= CNFG_BYTE_RW;
579 nfi_write16(nfc, NFI_CNFG, reg);
580
581 nfi_write16(nfc, NFI_CON, CON_NFI_SEC_M | CON_NFI_BWR);
582 nfi_write16(nfc, NFI_STRDATA, STR_DATA);
583 }
584
585 mt7621_nfc_wait_pio_ready(nfc);
586 nfi_write32(nfc, NFI_DATAW, val);
587 }
588
589 static void mt7621_nfc_write_data(struct mt7621_nfc *nfc, const u8 *buf,
590 u32 len)
591 {
592 while (((uintptr_t)buf & 3) && len) {
593 mt7621_nfc_pio_write(nfc, *buf, true);
594 buf++;
595 len--;
596 }
597
598 while (len >= 4) {
599 mt7621_nfc_pio_write(nfc, *(const u32 *)buf, false);
600 buf += 4;
601 len -= 4;
602 }
603
604 while (len) {
605 mt7621_nfc_pio_write(nfc, *buf, true);
606 buf++;
607 len--;
608 }
609 }
610
611 static void mt7621_nfc_write_data_empty(struct mt7621_nfc *nfc, u32 len)
612 {
613 while (len >= 4) {
614 mt7621_nfc_pio_write(nfc, 0xffffffff, false);
615 len -= 4;
616 }
617
618 while (len) {
619 mt7621_nfc_pio_write(nfc, 0xff, true);
620 len--;
621 }
622 }
623
624 static int mt7621_nfc_dev_ready(struct mt7621_nfc *nfc,
625 unsigned int timeout_ms)
626 {
627 u32 val;
628
629 return readl_poll_timeout_atomic(nfc->nfi_regs + NFI_STA, val,
630 !(val & STA_BUSY), 10,
631 timeout_ms * 1000);
632 }
633
634 static int mt7621_nfc_exec_instr(struct nand_chip *nand,
635 const struct nand_op_instr *instr)
636 {
637 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
638
639 switch (instr->type) {
640 case NAND_OP_CMD_INSTR:
641 mt7621_nfc_hw_reset(nfc);
642 nfi_write16(nfc, NFI_CNFG, CNFG_OP_CUSTOM << CNFG_OP_MODE_S);
643 return mt7621_nfc_send_command(nfc, instr->ctx.cmd.opcode);
644 case NAND_OP_ADDR_INSTR:
645 return mt7621_nfc_send_address(nfc, instr->ctx.addr.addrs,
646 instr->ctx.addr.naddrs);
647 case NAND_OP_DATA_IN_INSTR:
648 mt7621_nfc_read_data(nfc, instr->ctx.data.buf.in,
649 instr->ctx.data.len);
650 return 0;
651 case NAND_OP_DATA_OUT_INSTR:
652 mt7621_nfc_write_data(nfc, instr->ctx.data.buf.out,
653 instr->ctx.data.len);
654 return 0;
655 case NAND_OP_WAITRDY_INSTR:
656 return mt7621_nfc_dev_ready(nfc,
657 instr->ctx.waitrdy.timeout_ms);
658 default:
659 WARN_ONCE(1, "unsupported NAND instruction type: %d\n",
660 instr->type);
661
662 return -EINVAL;
663 }
664 }
665
666 static int mt7621_nfc_exec_op(struct nand_chip *nand,
667 const struct nand_operation *op, bool check_only)
668 {
669 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
670 int i, ret;
671
672 if (check_only)
673 return 0;
674
675 /* Only CS0 available */
676 nfi_write16(nfc, NFI_CSEL, 0);
677
678 for (i = 0; i < op->ninstrs; i++) {
679 ret = mt7621_nfc_exec_instr(nand, &op->instrs[i]);
680 if (ret)
681 return ret;
682 }
683
684 return 0;
685 }
686
687 static int mt7621_nfc_setup_interface(struct nand_chip *nand, int csline,
688 const struct nand_interface_config *conf)
689 {
690 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
691 const struct nand_sdr_timings *timings;
692 u32 acccon, temp, rate, tpoecs, tprecs, tc2r, tw2r, twh, twst, trlt;
693
694 if (!nfc->nfi_clk)
695 return -ENOTSUPP;
696
697 timings = nand_get_sdr_timings(conf);
698 if (IS_ERR(timings))
699 return -ENOTSUPP;
700
701 rate = clk_get_rate(nfc->nfi_clk);
702
703 /* turn clock rate into KHZ */
704 rate /= 1000;
705
706 tpoecs = max(timings->tALH_min, timings->tCLH_min) / 1000;
707 tpoecs = DIV_ROUND_UP(tpoecs * rate, 1000000);
708 tpoecs = min_t(u32, tpoecs, ACCCON_POECS_MAX);
709
710 tprecs = max(timings->tCLS_min, timings->tALS_min) / 1000;
711 tprecs = DIV_ROUND_UP(tprecs * rate, 1000000);
712 tprecs = min_t(u32, tprecs, ACCCON_PRECS_MAX);
713
714 /* sdr interface has no tCR which means CE# low to RE# low */
715 tc2r = 0;
716
717 tw2r = timings->tWHR_min / 1000;
718 tw2r = DIV_ROUND_UP(tw2r * rate, 1000000);
719 tw2r = DIV_ROUND_UP(tw2r - 1, 2);
720 tw2r = min_t(u32, tw2r, ACCCON_W2R_MAX);
721
722 twh = max(timings->tREH_min, timings->tWH_min) / 1000;
723 twh = DIV_ROUND_UP(twh * rate, 1000000) - 1;
724 twh = min_t(u32, twh, ACCCON_WH_MAX);
725
726 /* Calculate real WE#/RE# hold time in nanosecond */
727 temp = (twh + 1) * 1000000 / rate;
728 /* nanosecond to picosecond */
729 temp *= 1000;
730
731 /*
732 * WE# low level time should be expaned to meet WE# pulse time
733 * and WE# cycle time at the same time.
734 */
735 if (temp < timings->tWC_min)
736 twst = timings->tWC_min - temp;
737 else
738 twst = 0;
739 twst = max(timings->tWP_min, twst) / 1000;
740 twst = DIV_ROUND_UP(twst * rate, 1000000) - 1;
741 twst = min_t(u32, twst, ACCCON_WST_MAX);
742
743 /*
744 * RE# low level time should be expaned to meet RE# pulse time
745 * and RE# cycle time at the same time.
746 */
747 if (temp < timings->tRC_min)
748 trlt = timings->tRC_min - temp;
749 else
750 trlt = 0;
751 trlt = max(trlt, timings->tRP_min) / 1000;
752 trlt = DIV_ROUND_UP(trlt * rate, 1000000) - 1;
753 trlt = min_t(u32, trlt, ACCCON_RLT_MAX);
754
755 if (csline == NAND_DATA_IFACE_CHECK_ONLY) {
756 if (twst < ACCCON_WST_MIN || trlt < ACCCON_RLT_MIN)
757 return -ENOTSUPP;
758 }
759
760 acccon = ACCTIMING(tpoecs, tprecs, tc2r, tw2r, twh, twst, trlt);
761
762 dev_info(nfc->dev, "Using programmed access timing: %08x\n", acccon);
763
764 nfi_write32(nfc, NFI_ACCCON, acccon);
765
766 return 0;
767 }
768
769 static int mt7621_nfc_calc_ecc_strength(struct mt7621_nfc *nfc,
770 u32 avail_ecc_bytes)
771 {
772 struct nand_chip *nand = &nfc->nand;
773 struct mtd_info *mtd = nand_to_mtd(nand);
774 u32 strength;
775 int i;
776
777 strength = avail_ecc_bytes * 8 / ECC_PARITY_BITS;
778
779 /* Find the closest supported ecc strength */
780 for (i = ARRAY_SIZE(mt7621_ecc_strength) - 1; i >= 0; i--) {
781 if (mt7621_ecc_strength[i] <= strength)
782 break;
783 }
784
785 if (unlikely(i < 0)) {
786 dev_err(nfc->dev, "OOB size (%u) is not supported\n",
787 mtd->oobsize);
788 return -EINVAL;
789 }
790
791 nand->ecc.strength = mt7621_ecc_strength[i];
792 nand->ecc.bytes =
793 DIV_ROUND_UP(nand->ecc.strength * ECC_PARITY_BITS, 8);
794
795 dev_info(nfc->dev, "ECC strength adjusted to %u bits\n",
796 nand->ecc.strength);
797
798 return i;
799 }
800
801 static int mt7621_nfc_set_spare_per_sector(struct mt7621_nfc *nfc)
802 {
803 struct nand_chip *nand = &nfc->nand;
804 struct mtd_info *mtd = nand_to_mtd(nand);
805 u32 size;
806 int i;
807
808 size = nand->ecc.bytes + NFI_FDM_SIZE;
809
810 /* Find the closest supported spare size */
811 for (i = 0; i < ARRAY_SIZE(mt7621_nfi_spare_size); i++) {
812 if (mt7621_nfi_spare_size[i] >= size)
813 break;
814 }
815
816 if (unlikely(i >= ARRAY_SIZE(mt7621_nfi_spare_size))) {
817 dev_err(nfc->dev, "OOB size (%u) is not supported\n",
818 mtd->oobsize);
819 return -EINVAL;
820 }
821
822 nfc->spare_per_sector = mt7621_nfi_spare_size[i];
823
824 return i;
825 }
826
827 static int mt7621_nfc_ecc_init(struct mt7621_nfc *nfc)
828 {
829 struct nand_chip *nand = &nfc->nand;
830 struct mtd_info *mtd = nand_to_mtd(nand);
831 u32 spare_per_sector, encode_block_size, decode_block_size;
832 u32 ecc_enccfg, ecc_deccfg;
833 int ecc_cap;
834
835 /* Only hardware ECC mode is supported */
836 if (nand->ecc.engine_type != NAND_ECC_ENGINE_TYPE_ON_HOST) {
837 dev_err(nfc->dev, "Only hardware ECC mode is supported\n");
838 return -EINVAL;
839 }
840
841 nand->ecc.size = ECC_SECTOR_SIZE;
842 nand->ecc.steps = mtd->writesize / nand->ecc.size;
843
844 spare_per_sector = mtd->oobsize / nand->ecc.steps;
845
846 ecc_cap = mt7621_nfc_calc_ecc_strength(nfc,
847 spare_per_sector - NFI_FDM_SIZE);
848 if (ecc_cap < 0)
849 return ecc_cap;
850
851 /* Sector + FDM */
852 encode_block_size = (nand->ecc.size + NFI_FDM_SIZE) * 8;
853 ecc_enccfg = ecc_cap | (ENC_MODE_NFI << ENC_MODE_S) |
854 (encode_block_size << ENC_CNFG_MSG_S);
855
856 /* Sector + FDM + ECC parity bits */
857 decode_block_size = ((nand->ecc.size + NFI_FDM_SIZE) * 8) +
858 nand->ecc.strength * ECC_PARITY_BITS;
859 ecc_deccfg = ecc_cap | (DEC_MODE_NFI << DEC_MODE_S) |
860 (decode_block_size << DEC_CS_S) |
861 (DEC_CON_EL << DEC_CON_S) | DEC_EMPTY_EN;
862
863 mt7621_ecc_encoder_op(nfc, false);
864 ecc_write32(nfc, ECC_ENCCNFG, ecc_enccfg);
865
866 mt7621_ecc_decoder_op(nfc, false);
867 ecc_write32(nfc, ECC_DECCNFG, ecc_deccfg);
868
869 return 0;
870 }
871
872 static int mt7621_nfc_set_page_format(struct mt7621_nfc *nfc)
873 {
874 struct nand_chip *nand = &nfc->nand;
875 struct mtd_info *mtd = nand_to_mtd(nand);
876 int i, spare_size;
877 u32 pagefmt;
878
879 spare_size = mt7621_nfc_set_spare_per_sector(nfc);
880 if (spare_size < 0)
881 return spare_size;
882
883 for (i = 0; i < ARRAY_SIZE(mt7621_nfi_page_size); i++) {
884 if (mt7621_nfi_page_size[i] == mtd->writesize)
885 break;
886 }
887
888 if (unlikely(i >= ARRAY_SIZE(mt7621_nfi_page_size))) {
889 dev_err(nfc->dev, "Page size (%u) is not supported\n",
890 mtd->writesize);
891 return -EINVAL;
892 }
893
894 pagefmt = i | (spare_size << PAGEFMT_SPARE_S) |
895 (NFI_FDM_SIZE << PAGEFMT_FDM_S) |
896 (NFI_FDM_SIZE << PAGEFMT_FDM_ECC_S);
897
898 nfi_write16(nfc, NFI_PAGEFMT, pagefmt);
899
900 return 0;
901 }
902
903 static int mt7621_nfc_attach_chip(struct nand_chip *nand)
904 {
905 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
906 int ret;
907
908 if (nand->options & NAND_BUSWIDTH_16) {
909 dev_err(nfc->dev, "16-bit buswidth is not supported");
910 return -EINVAL;
911 }
912
913 ret = mt7621_nfc_ecc_init(nfc);
914 if (ret)
915 return ret;
916
917 return mt7621_nfc_set_page_format(nfc);
918 }
919
920 static const struct nand_controller_ops mt7621_nfc_controller_ops = {
921 .attach_chip = mt7621_nfc_attach_chip,
922 .exec_op = mt7621_nfc_exec_op,
923 .setup_interface = mt7621_nfc_setup_interface,
924 };
925
926 static int mt7621_nfc_ooblayout_free(struct mtd_info *mtd, int section,
927 struct mtd_oob_region *oob_region)
928 {
929 struct nand_chip *nand = mtd_to_nand(mtd);
930
931 if (section >= nand->ecc.steps)
932 return -ERANGE;
933
934 oob_region->length = NFI_FDM_SIZE - 1;
935 oob_region->offset = section * NFI_FDM_SIZE + 1;
936
937 return 0;
938 }
939
940 static int mt7621_nfc_ooblayout_ecc(struct mtd_info *mtd, int section,
941 struct mtd_oob_region *oob_region)
942 {
943 struct nand_chip *nand = mtd_to_nand(mtd);
944
945 if (section)
946 return -ERANGE;
947
948 oob_region->offset = NFI_FDM_SIZE * nand->ecc.steps;
949 oob_region->length = mtd->oobsize - oob_region->offset;
950
951 return 0;
952 }
953
954 static const struct mtd_ooblayout_ops mt7621_nfc_ooblayout_ops = {
955 .free = mt7621_nfc_ooblayout_free,
956 .ecc = mt7621_nfc_ooblayout_ecc,
957 };
958
959 static void mt7621_nfc_write_fdm(struct mt7621_nfc *nfc)
960 {
961 struct nand_chip *nand = &nfc->nand;
962 u32 vall, valm;
963 u8 *oobptr;
964 int i, j;
965
966 for (i = 0; i < nand->ecc.steps; i++) {
967 vall = 0;
968 valm = 0;
969 oobptr = oob_fdm_ptr(nand, i);
970
971 for (j = 0; j < 4; j++)
972 vall |= (u32)oobptr[j] << (j * 8);
973
974 for (j = 0; j < 4; j++)
975 valm |= (u32)oobptr[j + 4] << (j * 8);
976
977 nfi_write32(nfc, NFI_FDML(i), vall);
978 nfi_write32(nfc, NFI_FDMM(i), valm);
979 }
980 }
981
982 static void mt7621_nfc_read_sector_fdm(struct mt7621_nfc *nfc, u32 sect)
983 {
984 struct nand_chip *nand = &nfc->nand;
985 u32 vall, valm;
986 u8 *oobptr;
987 int i;
988
989 vall = nfi_read32(nfc, NFI_FDML(sect));
990 valm = nfi_read32(nfc, NFI_FDMM(sect));
991 oobptr = oob_fdm_ptr(nand, sect);
992
993 for (i = 0; i < 4; i++)
994 oobptr[i] = (vall >> (i * 8)) & 0xff;
995
996 for (i = 0; i < 4; i++)
997 oobptr[i + 4] = (valm >> (i * 8)) & 0xff;
998 }
999
1000 static int mt7621_nfc_read_page_hwecc(struct nand_chip *nand, uint8_t *buf,
1001 int oob_required, int page)
1002 {
1003 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
1004 struct mtd_info *mtd = nand_to_mtd(nand);
1005 int bitflips = 0;
1006 int rc, i;
1007
1008 nand_read_page_op(nand, page, 0, NULL, 0);
1009
1010 nfi_write16(nfc, NFI_CNFG, (CNFG_OP_CUSTOM << CNFG_OP_MODE_S) |
1011 CNFG_READ_MODE | CNFG_AUTO_FMT_EN | CNFG_HW_ECC_EN);
1012
1013 mt7621_ecc_decoder_op(nfc, true);
1014
1015 nfi_write16(nfc, NFI_CON,
1016 CON_NFI_BRD | (nand->ecc.steps << CON_NFI_SEC_S));
1017
1018 for (i = 0; i < nand->ecc.steps; i++) {
1019 if (buf)
1020 mt7621_nfc_read_data(nfc, page_data_ptr(nand, buf, i),
1021 nand->ecc.size);
1022 else
1023 mt7621_nfc_read_data_discard(nfc, nand->ecc.size);
1024
1025 rc = mt7621_ecc_decoder_wait_done(nfc, i);
1026
1027 mt7621_nfc_read_sector_fdm(nfc, i);
1028
1029 if (rc < 0) {
1030 bitflips = -EIO;
1031 continue;
1032 }
1033
1034 rc = mt7621_ecc_correct_check(nfc,
1035 buf ? page_data_ptr(nand, buf, i) : NULL,
1036 oob_fdm_ptr(nand, i), i);
1037
1038 if (rc < 0) {
1039 dev_dbg(nfc->dev,
1040 "Uncorrectable ECC error at page %d.%d\n",
1041 page, i);
1042 bitflips = -EBADMSG;
1043 mtd->ecc_stats.failed++;
1044 } else if (bitflips >= 0) {
1045 bitflips += rc;
1046 mtd->ecc_stats.corrected += rc;
1047 }
1048 }
1049
1050 mt7621_ecc_decoder_op(nfc, false);
1051
1052 nfi_write16(nfc, NFI_CON, 0);
1053
1054 return bitflips;
1055 }
1056
1057 static int mt7621_nfc_read_page_raw(struct nand_chip *nand, uint8_t *buf,
1058 int oob_required, int page)
1059 {
1060 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
1061 int i;
1062
1063 nand_read_page_op(nand, page, 0, NULL, 0);
1064
1065 nfi_write16(nfc, NFI_CNFG, (CNFG_OP_CUSTOM << CNFG_OP_MODE_S) |
1066 CNFG_READ_MODE);
1067
1068 nfi_write16(nfc, NFI_CON,
1069 CON_NFI_BRD | (nand->ecc.steps << CON_NFI_SEC_S));
1070
1071 for (i = 0; i < nand->ecc.steps; i++) {
1072 /* Read data */
1073 if (buf)
1074 mt7621_nfc_read_data(nfc, page_data_ptr(nand, buf, i),
1075 nand->ecc.size);
1076 else
1077 mt7621_nfc_read_data_discard(nfc, nand->ecc.size);
1078
1079 /* Read FDM */
1080 mt7621_nfc_read_data(nfc, oob_fdm_ptr(nand, i), NFI_FDM_SIZE);
1081
1082 /* Read ECC parity data */
1083 mt7621_nfc_read_data(nfc, oob_ecc_ptr(nfc, i),
1084 nfc->spare_per_sector - NFI_FDM_SIZE);
1085 }
1086
1087 nfi_write16(nfc, NFI_CON, 0);
1088
1089 return 0;
1090 }
1091
1092 static int mt7621_nfc_read_oob_hwecc(struct nand_chip *nand, int page)
1093 {
1094 return mt7621_nfc_read_page_hwecc(nand, NULL, 1, page);
1095 }
1096
1097 static int mt7621_nfc_read_oob_raw(struct nand_chip *nand, int page)
1098 {
1099 return mt7621_nfc_read_page_raw(nand, NULL, 1, page);
1100 }
1101
1102 static int mt7621_nfc_check_empty_page(struct nand_chip *nand, const u8 *buf)
1103 {
1104 struct mtd_info *mtd = nand_to_mtd(nand);
1105 uint32_t i, j;
1106 u8 *oobptr;
1107
1108 if (buf) {
1109 for (i = 0; i < mtd->writesize; i++)
1110 if (buf[i] != 0xff)
1111 return 0;
1112 }
1113
1114 for (i = 0; i < nand->ecc.steps; i++) {
1115 oobptr = oob_fdm_ptr(nand, i);
1116 for (j = 0; j < NFI_FDM_SIZE; j++)
1117 if (oobptr[j] != 0xff)
1118 return 0;
1119 }
1120
1121 return 1;
1122 }
1123
1124 static int mt7621_nfc_write_page_hwecc(struct nand_chip *nand,
1125 const uint8_t *buf, int oob_required,
1126 int page)
1127 {
1128 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
1129 struct mtd_info *mtd = nand_to_mtd(nand);
1130
1131 if (mt7621_nfc_check_empty_page(nand, buf)) {
1132 /*
1133 * MT7621 ECC engine always generates parity code for input
1134 * pages, even for empty pages. Doing so will write back ECC
1135 * parity code to the oob region, which means such pages will
1136 * no longer be empty pages.
1137 *
1138 * To avoid this, stop write operation if current page is an
1139 * empty page.
1140 */
1141 return 0;
1142 }
1143
1144 nand_prog_page_begin_op(nand, page, 0, NULL, 0);
1145
1146 nfi_write16(nfc, NFI_CNFG, (CNFG_OP_CUSTOM << CNFG_OP_MODE_S) |
1147 CNFG_AUTO_FMT_EN | CNFG_HW_ECC_EN);
1148
1149 mt7621_ecc_encoder_op(nfc, true);
1150
1151 mt7621_nfc_write_fdm(nfc);
1152
1153 nfi_write16(nfc, NFI_CON,
1154 CON_NFI_BWR | (nand->ecc.steps << CON_NFI_SEC_S));
1155
1156 if (buf)
1157 mt7621_nfc_write_data(nfc, buf, mtd->writesize);
1158 else
1159 mt7621_nfc_write_data_empty(nfc, mtd->writesize);
1160
1161 mt7621_nfc_wait_write_completion(nfc, nand);
1162
1163 mt7621_ecc_encoder_op(nfc, false);
1164
1165 nfi_write16(nfc, NFI_CON, 0);
1166
1167 return nand_prog_page_end_op(nand);
1168 }
1169
1170 static int mt7621_nfc_write_page_raw(struct nand_chip *nand,
1171 const uint8_t *buf, int oob_required,
1172 int page)
1173 {
1174 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
1175 int i;
1176
1177 nand_prog_page_begin_op(nand, page, 0, NULL, 0);
1178
1179 nfi_write16(nfc, NFI_CNFG, (CNFG_OP_CUSTOM << CNFG_OP_MODE_S));
1180
1181 nfi_write16(nfc, NFI_CON,
1182 CON_NFI_BWR | (nand->ecc.steps << CON_NFI_SEC_S));
1183
1184 for (i = 0; i < nand->ecc.steps; i++) {
1185 /* Write data */
1186 if (buf)
1187 mt7621_nfc_write_data(nfc, page_data_ptr(nand, buf, i),
1188 nand->ecc.size);
1189 else
1190 mt7621_nfc_write_data_empty(nfc, nand->ecc.size);
1191
1192 /* Write FDM */
1193 mt7621_nfc_write_data(nfc, oob_fdm_ptr(nand, i),
1194 NFI_FDM_SIZE);
1195
1196 /* Write dummy ECC parity data */
1197 mt7621_nfc_write_data_empty(nfc, nfc->spare_per_sector -
1198 NFI_FDM_SIZE);
1199 }
1200
1201 mt7621_nfc_wait_write_completion(nfc, nand);
1202
1203 nfi_write16(nfc, NFI_CON, 0);
1204
1205 return nand_prog_page_end_op(nand);
1206 }
1207
1208 static int mt7621_nfc_write_oob_hwecc(struct nand_chip *nand, int page)
1209 {
1210 return mt7621_nfc_write_page_hwecc(nand, NULL, 1, page);
1211 }
1212
1213 static int mt7621_nfc_write_oob_raw(struct nand_chip *nand, int page)
1214 {
1215 return mt7621_nfc_write_page_raw(nand, NULL, 1, page);
1216 }
1217
1218 static int mt7621_nfc_init_chip(struct mt7621_nfc *nfc)
1219 {
1220 struct nand_chip *nand = &nfc->nand;
1221 struct mtd_info *mtd;
1222 int ret;
1223
1224 nand->controller = &nfc->controller;
1225 nand_set_controller_data(nand, (void *)nfc);
1226 nand_set_flash_node(nand, nfc->dev->of_node);
1227
1228 nand->options |= NAND_USES_DMA | NAND_NO_SUBPAGE_WRITE;
1229 if (!nfc->nfi_clk)
1230 nand->options |= NAND_KEEP_TIMINGS;
1231
1232 nand->ecc.engine_type = NAND_ECC_ENGINE_TYPE_ON_HOST;
1233 nand->ecc.read_page = mt7621_nfc_read_page_hwecc;
1234 nand->ecc.read_page_raw = mt7621_nfc_read_page_raw;
1235 nand->ecc.write_page = mt7621_nfc_write_page_hwecc;
1236 nand->ecc.write_page_raw = mt7621_nfc_write_page_raw;
1237 nand->ecc.read_oob = mt7621_nfc_read_oob_hwecc;
1238 nand->ecc.read_oob_raw = mt7621_nfc_read_oob_raw;
1239 nand->ecc.write_oob = mt7621_nfc_write_oob_hwecc;
1240 nand->ecc.write_oob_raw = mt7621_nfc_write_oob_raw;
1241
1242 mtd = nand_to_mtd(nand);
1243 mtd->owner = THIS_MODULE;
1244 mtd->dev.parent = nfc->dev;
1245 mtd->name = MT7621_NFC_NAME;
1246 mtd_set_ooblayout(mtd, &mt7621_nfc_ooblayout_ops);
1247
1248 mt7621_nfc_hw_init(nfc);
1249
1250 ret = nand_scan(nand, 1);
1251 if (ret)
1252 return ret;
1253
1254 ret = mtd_device_register(mtd, NULL, 0);
1255 if (ret) {
1256 dev_err(nfc->dev, "Failed to register MTD: %d\n", ret);
1257 nand_cleanup(nand);
1258 return ret;
1259 }
1260
1261 return 0;
1262 }
1263
1264 static int mt7621_nfc_probe(struct platform_device *pdev)
1265 {
1266 struct device *dev = &pdev->dev;
1267 struct mt7621_nfc *nfc;
1268 struct resource *res;
1269 int ret;
1270
1271 nfc = devm_kzalloc(dev, sizeof(*nfc), GFP_KERNEL);
1272 if (!nfc)
1273 return -ENOMEM;
1274
1275 nand_controller_init(&nfc->controller);
1276 nfc->controller.ops = &mt7621_nfc_controller_ops;
1277 nfc->dev = dev;
1278
1279 res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "nfi");
1280 nfc->nfi_regs = devm_ioremap_resource(dev, res);
1281 if (IS_ERR(nfc->nfi_regs)) {
1282 ret = PTR_ERR(nfc->nfi_regs);
1283 return ret;
1284 }
1285
1286 res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "ecc");
1287 nfc->ecc_regs = devm_ioremap_resource(dev, res);
1288 if (IS_ERR(nfc->ecc_regs)) {
1289 ret = PTR_ERR(nfc->ecc_regs);
1290 return ret;
1291 }
1292
1293 nfc->nfi_clk = devm_clk_get(dev, "nfi_clk");
1294 if (IS_ERR(nfc->nfi_clk)) {
1295 dev_warn(dev, "nfi clk not provided\n");
1296 nfc->nfi_clk = NULL;
1297 } else {
1298 ret = clk_prepare_enable(nfc->nfi_clk);
1299 if (ret) {
1300 dev_err(dev, "Failed to enable nfi core clock\n");
1301 return ret;
1302 }
1303 }
1304
1305 platform_set_drvdata(pdev, nfc);
1306
1307 ret = mt7621_nfc_init_chip(nfc);
1308 if (ret) {
1309 dev_err(dev, "Failed to initialize nand chip\n");
1310 goto clk_disable;
1311 }
1312
1313 return 0;
1314
1315 clk_disable:
1316 clk_disable_unprepare(nfc->nfi_clk);
1317
1318 return ret;
1319 }
1320
1321 static int mt7621_nfc_remove(struct platform_device *pdev)
1322 {
1323 struct mt7621_nfc *nfc = platform_get_drvdata(pdev);
1324 struct nand_chip *nand = &nfc->nand;
1325 struct mtd_info *mtd = nand_to_mtd(nand);
1326
1327 mtd_device_unregister(mtd);
1328 nand_cleanup(nand);
1329 clk_disable_unprepare(nfc->nfi_clk);
1330
1331 return 0;
1332 }
1333
1334 static const struct of_device_id mt7621_nfc_id_table[] = {
1335 { .compatible = "mediatek,mt7621-nfc" },
1336 { },
1337 };
1338 MODULE_DEVICE_TABLE(of, match);
1339
1340 static struct platform_driver mt7621_nfc_driver = {
1341 .probe = mt7621_nfc_probe,
1342 .remove = mt7621_nfc_remove,
1343 .driver = {
1344 .name = MT7621_NFC_NAME,
1345 .owner = THIS_MODULE,
1346 .of_match_table = mt7621_nfc_id_table,
1347 },
1348 };
1349 module_platform_driver(mt7621_nfc_driver);
1350
1351 MODULE_LICENSE("GPL");
1352 MODULE_AUTHOR("Weijie Gao <weijie.gao@mediatek.com>");
1353 MODULE_DESCRIPTION("MediaTek MT7621 NAND Flash Controller driver");