ramips: skip bbt scan on mt7621
[openwrt/staging/mkresin.git] / target / linux / ramips / files / drivers / mtd / nand / raw / mt7621_nand.c
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * MediaTek MT7621 NAND Flash Controller driver
4 *
5 * Copyright (C) 2020 MediaTek Inc. All Rights Reserved.
6 *
7 * Author: Weijie Gao <weijie.gao@mediatek.com>
8 */
9
10 #include <linux/io.h>
11 #include <linux/clk.h>
12 #include <linux/init.h>
13 #include <linux/errno.h>
14 #include <linux/sizes.h>
15 #include <linux/iopoll.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/mtd/mtd.h>
19 #include <linux/mtd/rawnand.h>
20 #include <linux/mtd/partitions.h>
21 #include <linux/mtd/mtk_bmt.h>
22 #include <linux/platform_device.h>
23 #include <asm/addrspace.h>
24
25 /* NFI core registers */
26 #define NFI_CNFG 0x000
27 #define CNFG_OP_MODE_S 12
28 #define CNFG_OP_MODE_M GENMASK(14, 12)
29 #define CNFG_OP_CUSTOM 6
30 #define CNFG_AUTO_FMT_EN BIT(9)
31 #define CNFG_HW_ECC_EN BIT(8)
32 #define CNFG_BYTE_RW BIT(6)
33 #define CNFG_READ_MODE BIT(1)
34
35 #define NFI_PAGEFMT 0x004
36 #define PAGEFMT_FDM_ECC_S 12
37 #define PAGEFMT_FDM_ECC_M GENMASK(15, 12)
38 #define PAGEFMT_FDM_S 8
39 #define PAGEFMT_FDM_M GENMASK(11, 8)
40 #define PAGEFMT_SPARE_S 4
41 #define PAGEFMT_SPARE_M GENMASK(5, 4)
42 #define PAGEFMT_PAGE_S 0
43 #define PAGEFMT_PAGE_M GENMASK(1, 0)
44
45 #define NFI_CON 0x008
46 #define CON_NFI_SEC_S 12
47 #define CON_NFI_SEC_M GENMASK(15, 12)
48 #define CON_NFI_BWR BIT(9)
49 #define CON_NFI_BRD BIT(8)
50 #define CON_NFI_RST BIT(1)
51 #define CON_FIFO_FLUSH BIT(0)
52
53 #define NFI_ACCCON 0x00c
54 #define ACCCON_POECS_S 28
55 #define ACCCON_POECS_MAX 0x0f
56 #define ACCCON_POECS_DEF 3
57 #define ACCCON_PRECS_S 22
58 #define ACCCON_PRECS_MAX 0x3f
59 #define ACCCON_PRECS_DEF 3
60 #define ACCCON_C2R_S 16
61 #define ACCCON_C2R_MAX 0x3f
62 #define ACCCON_C2R_DEF 7
63 #define ACCCON_W2R_S 12
64 #define ACCCON_W2R_MAX 0x0f
65 #define ACCCON_W2R_DEF 7
66 #define ACCCON_WH_S 8
67 #define ACCCON_WH_MAX 0x0f
68 #define ACCCON_WH_DEF 15
69 #define ACCCON_WST_S 4
70 #define ACCCON_WST_MAX 0x0f
71 #define ACCCON_WST_DEF 15
72 #define ACCCON_WST_MIN 3
73 #define ACCCON_RLT_S 0
74 #define ACCCON_RLT_MAX 0x0f
75 #define ACCCON_RLT_DEF 15
76 #define ACCCON_RLT_MIN 3
77
78 #define NFI_CMD 0x020
79
80 #define NFI_ADDRNOB 0x030
81 #define ADDR_ROW_NOB_S 4
82 #define ADDR_ROW_NOB_M GENMASK(6, 4)
83 #define ADDR_COL_NOB_S 0
84 #define ADDR_COL_NOB_M GENMASK(2, 0)
85
86 #define NFI_COLADDR 0x034
87 #define NFI_ROWADDR 0x038
88
89 #define NFI_STRDATA 0x040
90 #define STR_DATA BIT(0)
91
92 #define NFI_CNRNB 0x044
93 #define CB2R_TIME_S 4
94 #define CB2R_TIME_M GENMASK(7, 4)
95 #define STR_CNRNB BIT(0)
96
97 #define NFI_DATAW 0x050
98 #define NFI_DATAR 0x054
99
100 #define NFI_PIO_DIRDY 0x058
101 #define PIO_DIRDY BIT(0)
102
103 #define NFI_STA 0x060
104 #define STA_NFI_FSM_S 16
105 #define STA_NFI_FSM_M GENMASK(19, 16)
106 #define STA_FSM_CUSTOM_DATA 14
107 #define STA_BUSY BIT(8)
108 #define STA_ADDR BIT(1)
109 #define STA_CMD BIT(0)
110
111 #define NFI_ADDRCNTR 0x070
112 #define SEC_CNTR_S 12
113 #define SEC_CNTR_M GENMASK(15, 12)
114 #define SEC_ADDR_S 0
115 #define SEC_ADDR_M GENMASK(9, 0)
116
117 #define NFI_CSEL 0x090
118 #define CSEL_S 0
119 #define CSEL_M GENMASK(1, 0)
120
121 #define NFI_FDM0L 0x0a0
122 #define NFI_FDML(n) (0x0a0 + ((n) << 3))
123
124 #define NFI_FDM0M 0x0a4
125 #define NFI_FDMM(n) (0x0a4 + ((n) << 3))
126
127 #define NFI_MASTER_STA 0x210
128 #define MAS_ADDR GENMASK(11, 9)
129 #define MAS_RD GENMASK(8, 6)
130 #define MAS_WR GENMASK(5, 3)
131 #define MAS_RDDLY GENMASK(2, 0)
132
133 /* ECC engine registers */
134 #define ECC_ENCCON 0x000
135 #define ENC_EN BIT(0)
136
137 #define ECC_ENCCNFG 0x004
138 #define ENC_CNFG_MSG_S 16
139 #define ENC_CNFG_MSG_M GENMASK(28, 16)
140 #define ENC_MODE_S 4
141 #define ENC_MODE_M GENMASK(5, 4)
142 #define ENC_MODE_NFI 1
143 #define ENC_TNUM_S 0
144 #define ENC_TNUM_M GENMASK(2, 0)
145
146 #define ECC_ENCIDLE 0x00c
147 #define ENC_IDLE BIT(0)
148
149 #define ECC_DECCON 0x100
150 #define DEC_EN BIT(0)
151
152 #define ECC_DECCNFG 0x104
153 #define DEC_EMPTY_EN BIT(31)
154 #define DEC_CS_S 16
155 #define DEC_CS_M GENMASK(28, 16)
156 #define DEC_CON_S 12
157 #define DEC_CON_M GENMASK(13, 12)
158 #define DEC_CON_EL 2
159 #define DEC_MODE_S 4
160 #define DEC_MODE_M GENMASK(5, 4)
161 #define DEC_MODE_NFI 1
162 #define DEC_TNUM_S 0
163 #define DEC_TNUM_M GENMASK(2, 0)
164
165 #define ECC_DECIDLE 0x10c
166 #define DEC_IDLE BIT(1)
167
168 #define ECC_DECENUM 0x114
169 #define ERRNUM_S 2
170 #define ERRNUM_M GENMASK(3, 0)
171
172 #define ECC_DECDONE 0x118
173 #define DEC_DONE7 BIT(7)
174 #define DEC_DONE6 BIT(6)
175 #define DEC_DONE5 BIT(5)
176 #define DEC_DONE4 BIT(4)
177 #define DEC_DONE3 BIT(3)
178 #define DEC_DONE2 BIT(2)
179 #define DEC_DONE1 BIT(1)
180 #define DEC_DONE0 BIT(0)
181
182 #define ECC_DECEL(n) (0x11c + (n) * 4)
183 #define DEC_EL_ODD_S 16
184 #define DEC_EL_EVEN_S 0
185 #define DEC_EL_M 0x1fff
186 #define DEC_EL_BYTE_POS_S 3
187 #define DEC_EL_BIT_POS_M GENMASK(2, 0)
188
189 #define ECC_FDMADDR 0x13c
190
191 /* ENCIDLE and DECIDLE */
192 #define ECC_IDLE BIT(0)
193
194 #define ACCTIMING(tpoecs, tprecs, tc2r, tw2r, twh, twst, trlt) \
195 ((tpoecs) << ACCCON_POECS_S | (tprecs) << ACCCON_PRECS_S | \
196 (tc2r) << ACCCON_C2R_S | (tw2r) << ACCCON_W2R_S | \
197 (twh) << ACCCON_WH_S | (twst) << ACCCON_WST_S | (trlt))
198
199 #define MASTER_STA_MASK (MAS_ADDR | MAS_RD | MAS_WR | \
200 MAS_RDDLY)
201 #define NFI_RESET_TIMEOUT 1000000
202 #define NFI_CORE_TIMEOUT 500000
203 #define ECC_ENGINE_TIMEOUT 500000
204
205 #define ECC_SECTOR_SIZE 512
206 #define ECC_PARITY_BITS 13
207
208 #define NFI_FDM_SIZE 8
209
210 #define MT7621_NFC_NAME "mt7621-nand"
211
212 struct mt7621_nfc {
213 struct nand_controller controller;
214 struct nand_chip nand;
215 struct clk *nfi_clk;
216 struct device *dev;
217
218 u32 nfi_base;
219 void __iomem *nfi_regs;
220 void __iomem *ecc_regs;
221
222 u32 spare_per_sector;
223 };
224
225 static const u16 mt7621_nfi_page_size[] = { SZ_512, SZ_2K, SZ_4K };
226 static const u8 mt7621_nfi_spare_size[] = { 16, 26, 27, 28 };
227 static const u8 mt7621_ecc_strength[] = { 4, 6, 8, 10, 12 };
228
229 static inline u32 nfi_read32(struct mt7621_nfc *nfc, u32 reg)
230 {
231 return readl(nfc->nfi_regs + reg);
232 }
233
234 static inline void nfi_write32(struct mt7621_nfc *nfc, u32 reg, u32 val)
235 {
236 writel(val, nfc->nfi_regs + reg);
237 }
238
239 static inline u16 nfi_read16(struct mt7621_nfc *nfc, u32 reg)
240 {
241 return readw(nfc->nfi_regs + reg);
242 }
243
244 static inline void nfi_write16(struct mt7621_nfc *nfc, u32 reg, u16 val)
245 {
246 writew(val, nfc->nfi_regs + reg);
247 }
248
249 static inline void ecc_write16(struct mt7621_nfc *nfc, u32 reg, u16 val)
250 {
251 writew(val, nfc->ecc_regs + reg);
252 }
253
254 static inline u32 ecc_read32(struct mt7621_nfc *nfc, u32 reg)
255 {
256 return readl(nfc->ecc_regs + reg);
257 }
258
259 static inline void ecc_write32(struct mt7621_nfc *nfc, u32 reg, u32 val)
260 {
261 return writel(val, nfc->ecc_regs + reg);
262 }
263
264 static inline u8 *oob_fdm_ptr(struct nand_chip *nand, int sect)
265 {
266 return nand->oob_poi + sect * NFI_FDM_SIZE;
267 }
268
269 static inline u8 *oob_ecc_ptr(struct mt7621_nfc *nfc, int sect)
270 {
271 struct nand_chip *nand = &nfc->nand;
272
273 return nand->oob_poi + nand->ecc.steps * NFI_FDM_SIZE +
274 sect * (nfc->spare_per_sector - NFI_FDM_SIZE);
275 }
276
277 static inline u8 *page_data_ptr(struct nand_chip *nand, const u8 *buf,
278 int sect)
279 {
280 return (u8 *)buf + sect * nand->ecc.size;
281 }
282
283 static int mt7621_ecc_wait_idle(struct mt7621_nfc *nfc, u32 reg)
284 {
285 struct device *dev = nfc->dev;
286 u32 val;
287 int ret;
288
289 ret = readw_poll_timeout_atomic(nfc->ecc_regs + reg, val,
290 val & ECC_IDLE, 10,
291 ECC_ENGINE_TIMEOUT);
292 if (ret) {
293 dev_warn(dev, "ECC engine timed out entering idle mode\n");
294 return -EIO;
295 }
296
297 return 0;
298 }
299
300 static int mt7621_ecc_decoder_wait_done(struct mt7621_nfc *nfc, u32 sect)
301 {
302 struct device *dev = nfc->dev;
303 u32 val;
304 int ret;
305
306 ret = readw_poll_timeout_atomic(nfc->ecc_regs + ECC_DECDONE, val,
307 val & (1 << sect), 10,
308 ECC_ENGINE_TIMEOUT);
309
310 if (ret) {
311 dev_warn(dev, "ECC decoder for sector %d timed out\n",
312 sect);
313 return -ETIMEDOUT;
314 }
315
316 return 0;
317 }
318
319 static void mt7621_ecc_encoder_op(struct mt7621_nfc *nfc, bool enable)
320 {
321 mt7621_ecc_wait_idle(nfc, ECC_ENCIDLE);
322 ecc_write16(nfc, ECC_ENCCON, enable ? ENC_EN : 0);
323 }
324
325 static void mt7621_ecc_decoder_op(struct mt7621_nfc *nfc, bool enable)
326 {
327 mt7621_ecc_wait_idle(nfc, ECC_DECIDLE);
328 ecc_write16(nfc, ECC_DECCON, enable ? DEC_EN : 0);
329 }
330
331 static int mt7621_ecc_correct_check(struct mt7621_nfc *nfc, u8 *sector_buf,
332 u8 *fdm_buf, u32 sect)
333 {
334 struct nand_chip *nand = &nfc->nand;
335 u32 decnum, num_error_bits, fdm_end_bits;
336 u32 error_locations, error_bit_loc;
337 u32 error_byte_pos, error_bit_pos;
338 int bitflips = 0;
339 u32 i;
340
341 decnum = ecc_read32(nfc, ECC_DECENUM);
342 num_error_bits = (decnum >> (sect << ERRNUM_S)) & ERRNUM_M;
343 fdm_end_bits = (nand->ecc.size + NFI_FDM_SIZE) << 3;
344
345 if (!num_error_bits)
346 return 0;
347
348 if (num_error_bits == ERRNUM_M)
349 return -1;
350
351 for (i = 0; i < num_error_bits; i++) {
352 error_locations = ecc_read32(nfc, ECC_DECEL(i / 2));
353 error_bit_loc = (error_locations >> ((i % 2) * DEC_EL_ODD_S)) &
354 DEC_EL_M;
355 error_byte_pos = error_bit_loc >> DEC_EL_BYTE_POS_S;
356 error_bit_pos = error_bit_loc & DEC_EL_BIT_POS_M;
357
358 if (error_bit_loc < (nand->ecc.size << 3)) {
359 if (sector_buf) {
360 sector_buf[error_byte_pos] ^=
361 (1 << error_bit_pos);
362 }
363 } else if (error_bit_loc < fdm_end_bits) {
364 if (fdm_buf) {
365 fdm_buf[error_byte_pos - nand->ecc.size] ^=
366 (1 << error_bit_pos);
367 }
368 }
369
370 bitflips++;
371 }
372
373 return bitflips;
374 }
375
376 static int mt7621_nfc_wait_write_completion(struct mt7621_nfc *nfc,
377 struct nand_chip *nand)
378 {
379 struct device *dev = nfc->dev;
380 u16 val;
381 int ret;
382
383 ret = readw_poll_timeout_atomic(nfc->nfi_regs + NFI_ADDRCNTR, val,
384 ((val & SEC_CNTR_M) >> SEC_CNTR_S) >= nand->ecc.steps, 10,
385 NFI_CORE_TIMEOUT);
386
387 if (ret) {
388 dev_warn(dev, "NFI core write operation timed out\n");
389 return -ETIMEDOUT;
390 }
391
392 return ret;
393 }
394
395 static void mt7621_nfc_hw_reset(struct mt7621_nfc *nfc)
396 {
397 u32 val;
398 int ret;
399
400 /* reset all registers and force the NFI master to terminate */
401 nfi_write16(nfc, NFI_CON, CON_FIFO_FLUSH | CON_NFI_RST);
402
403 /* wait for the master to finish the last transaction */
404 ret = readw_poll_timeout(nfc->nfi_regs + NFI_MASTER_STA, val,
405 !(val & MASTER_STA_MASK), 50,
406 NFI_RESET_TIMEOUT);
407 if (ret) {
408 dev_warn(nfc->dev, "Failed to reset NFI master in %dms\n",
409 NFI_RESET_TIMEOUT);
410 }
411
412 /* ensure any status register affected by the NFI master is reset */
413 nfi_write16(nfc, NFI_CON, CON_FIFO_FLUSH | CON_NFI_RST);
414 nfi_write16(nfc, NFI_STRDATA, 0);
415 }
416
417 static inline void mt7621_nfc_hw_init(struct mt7621_nfc *nfc)
418 {
419 u32 acccon;
420
421 /*
422 * CNRNB: nand ready/busy register
423 * -------------------------------
424 * 7:4: timeout register for polling the NAND busy/ready signal
425 * 0 : poll the status of the busy/ready signal after [7:4]*16 cycles.
426 */
427 nfi_write16(nfc, NFI_CNRNB, CB2R_TIME_M | STR_CNRNB);
428
429 mt7621_nfc_hw_reset(nfc);
430
431 /* Apply default access timing */
432 acccon = ACCTIMING(ACCCON_POECS_DEF, ACCCON_PRECS_DEF, ACCCON_C2R_DEF,
433 ACCCON_W2R_DEF, ACCCON_WH_DEF, ACCCON_WST_DEF,
434 ACCCON_RLT_DEF);
435
436 nfi_write32(nfc, NFI_ACCCON, acccon);
437 }
438
439 static int mt7621_nfc_send_command(struct mt7621_nfc *nfc, u8 command)
440 {
441 struct device *dev = nfc->dev;
442 u32 val;
443 int ret;
444
445 nfi_write32(nfc, NFI_CMD, command);
446
447 ret = readl_poll_timeout_atomic(nfc->nfi_regs + NFI_STA, val,
448 !(val & STA_CMD), 10,
449 NFI_CORE_TIMEOUT);
450 if (ret) {
451 dev_warn(dev, "NFI core timed out entering command mode\n");
452 return -EIO;
453 }
454
455 return 0;
456 }
457
458 static int mt7621_nfc_send_address_byte(struct mt7621_nfc *nfc, int addr)
459 {
460 struct device *dev = nfc->dev;
461 u32 val;
462 int ret;
463
464 nfi_write32(nfc, NFI_COLADDR, addr);
465 nfi_write32(nfc, NFI_ROWADDR, 0);
466 nfi_write16(nfc, NFI_ADDRNOB, 1);
467
468 ret = readl_poll_timeout_atomic(nfc->nfi_regs + NFI_STA, val,
469 !(val & STA_ADDR), 10,
470 NFI_CORE_TIMEOUT);
471 if (ret) {
472 dev_warn(dev, "NFI core timed out entering address mode\n");
473 return -EIO;
474 }
475
476 return 0;
477 }
478
479 static int mt7621_nfc_send_address(struct mt7621_nfc *nfc, const u8 *addr,
480 unsigned int naddrs)
481 {
482 int ret;
483
484 while (naddrs) {
485 ret = mt7621_nfc_send_address_byte(nfc, *addr);
486 if (ret)
487 return ret;
488
489 addr++;
490 naddrs--;
491 }
492
493 return 0;
494 }
495
496 static void mt7621_nfc_wait_pio_ready(struct mt7621_nfc *nfc)
497 {
498 struct device *dev = nfc->dev;
499 int ret;
500 u16 val;
501
502 ret = readw_poll_timeout_atomic(nfc->nfi_regs + NFI_PIO_DIRDY, val,
503 val & PIO_DIRDY, 10,
504 NFI_CORE_TIMEOUT);
505 if (ret < 0)
506 dev_err(dev, "NFI core PIO mode not ready\n");
507 }
508
509 static u32 mt7621_nfc_pio_read(struct mt7621_nfc *nfc, bool br)
510 {
511 u32 reg;
512
513 /* after each byte read, the NFI_STA reg is reset by the hardware */
514 reg = (nfi_read32(nfc, NFI_STA) & STA_NFI_FSM_M) >> STA_NFI_FSM_S;
515 if (reg != STA_FSM_CUSTOM_DATA) {
516 reg = nfi_read16(nfc, NFI_CNFG);
517 reg |= CNFG_READ_MODE | CNFG_BYTE_RW;
518 if (!br)
519 reg &= ~CNFG_BYTE_RW;
520 nfi_write16(nfc, NFI_CNFG, reg);
521
522 /*
523 * set to max sector to allow the HW to continue reading over
524 * unaligned accesses
525 */
526 nfi_write16(nfc, NFI_CON, CON_NFI_SEC_M | CON_NFI_BRD);
527
528 /* trigger to fetch data */
529 nfi_write16(nfc, NFI_STRDATA, STR_DATA);
530 }
531
532 mt7621_nfc_wait_pio_ready(nfc);
533
534 return nfi_read32(nfc, NFI_DATAR);
535 }
536
537 static void mt7621_nfc_read_data(struct mt7621_nfc *nfc, u8 *buf, u32 len)
538 {
539 while (((uintptr_t)buf & 3) && len) {
540 *buf = mt7621_nfc_pio_read(nfc, true);
541 buf++;
542 len--;
543 }
544
545 while (len >= 4) {
546 *(u32 *)buf = mt7621_nfc_pio_read(nfc, false);
547 buf += 4;
548 len -= 4;
549 }
550
551 while (len) {
552 *buf = mt7621_nfc_pio_read(nfc, true);
553 buf++;
554 len--;
555 }
556 }
557
558 static void mt7621_nfc_read_data_discard(struct mt7621_nfc *nfc, u32 len)
559 {
560 while (len >= 4) {
561 mt7621_nfc_pio_read(nfc, false);
562 len -= 4;
563 }
564
565 while (len) {
566 mt7621_nfc_pio_read(nfc, true);
567 len--;
568 }
569 }
570
571 static void mt7621_nfc_pio_write(struct mt7621_nfc *nfc, u32 val, bool bw)
572 {
573 u32 reg;
574
575 reg = (nfi_read32(nfc, NFI_STA) & STA_NFI_FSM_M) >> STA_NFI_FSM_S;
576 if (reg != STA_FSM_CUSTOM_DATA) {
577 reg = nfi_read16(nfc, NFI_CNFG);
578 reg &= ~(CNFG_READ_MODE | CNFG_BYTE_RW);
579 if (bw)
580 reg |= CNFG_BYTE_RW;
581 nfi_write16(nfc, NFI_CNFG, reg);
582
583 nfi_write16(nfc, NFI_CON, CON_NFI_SEC_M | CON_NFI_BWR);
584 nfi_write16(nfc, NFI_STRDATA, STR_DATA);
585 }
586
587 mt7621_nfc_wait_pio_ready(nfc);
588 nfi_write32(nfc, NFI_DATAW, val);
589 }
590
591 static void mt7621_nfc_write_data(struct mt7621_nfc *nfc, const u8 *buf,
592 u32 len)
593 {
594 while (((uintptr_t)buf & 3) && len) {
595 mt7621_nfc_pio_write(nfc, *buf, true);
596 buf++;
597 len--;
598 }
599
600 while (len >= 4) {
601 mt7621_nfc_pio_write(nfc, *(const u32 *)buf, false);
602 buf += 4;
603 len -= 4;
604 }
605
606 while (len) {
607 mt7621_nfc_pio_write(nfc, *buf, true);
608 buf++;
609 len--;
610 }
611 }
612
613 static void mt7621_nfc_write_data_empty(struct mt7621_nfc *nfc, u32 len)
614 {
615 while (len >= 4) {
616 mt7621_nfc_pio_write(nfc, 0xffffffff, false);
617 len -= 4;
618 }
619
620 while (len) {
621 mt7621_nfc_pio_write(nfc, 0xff, true);
622 len--;
623 }
624 }
625
626 static int mt7621_nfc_dev_ready(struct mt7621_nfc *nfc,
627 unsigned int timeout_ms)
628 {
629 u32 val;
630
631 return readl_poll_timeout_atomic(nfc->nfi_regs + NFI_STA, val,
632 !(val & STA_BUSY), 10,
633 timeout_ms * 1000);
634 }
635
636 static int mt7621_nfc_exec_instr(struct nand_chip *nand,
637 const struct nand_op_instr *instr)
638 {
639 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
640
641 switch (instr->type) {
642 case NAND_OP_CMD_INSTR:
643 mt7621_nfc_hw_reset(nfc);
644 nfi_write16(nfc, NFI_CNFG, CNFG_OP_CUSTOM << CNFG_OP_MODE_S);
645 return mt7621_nfc_send_command(nfc, instr->ctx.cmd.opcode);
646 case NAND_OP_ADDR_INSTR:
647 return mt7621_nfc_send_address(nfc, instr->ctx.addr.addrs,
648 instr->ctx.addr.naddrs);
649 case NAND_OP_DATA_IN_INSTR:
650 mt7621_nfc_read_data(nfc, instr->ctx.data.buf.in,
651 instr->ctx.data.len);
652 return 0;
653 case NAND_OP_DATA_OUT_INSTR:
654 mt7621_nfc_write_data(nfc, instr->ctx.data.buf.out,
655 instr->ctx.data.len);
656 return 0;
657 case NAND_OP_WAITRDY_INSTR:
658 return mt7621_nfc_dev_ready(nfc,
659 instr->ctx.waitrdy.timeout_ms);
660 default:
661 WARN_ONCE(1, "unsupported NAND instruction type: %d\n",
662 instr->type);
663
664 return -EINVAL;
665 }
666 }
667
668 static int mt7621_nfc_exec_op(struct nand_chip *nand,
669 const struct nand_operation *op, bool check_only)
670 {
671 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
672 int i, ret;
673
674 if (check_only)
675 return 0;
676
677 /* Only CS0 available */
678 nfi_write16(nfc, NFI_CSEL, 0);
679
680 for (i = 0; i < op->ninstrs; i++) {
681 ret = mt7621_nfc_exec_instr(nand, &op->instrs[i]);
682 if (ret)
683 return ret;
684 }
685
686 return 0;
687 }
688
689 static int mt7621_nfc_setup_interface(struct nand_chip *nand, int csline,
690 const struct nand_interface_config *conf)
691 {
692 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
693 const struct nand_sdr_timings *timings;
694 u32 acccon, temp, rate, tpoecs, tprecs, tc2r, tw2r, twh, twst, trlt;
695
696 if (!nfc->nfi_clk)
697 return -ENOTSUPP;
698
699 timings = nand_get_sdr_timings(conf);
700 if (IS_ERR(timings))
701 return -ENOTSUPP;
702
703 rate = clk_get_rate(nfc->nfi_clk);
704
705 /* turn clock rate into KHZ */
706 rate /= 1000;
707
708 tpoecs = max(timings->tALH_min, timings->tCLH_min) / 1000;
709 tpoecs = DIV_ROUND_UP(tpoecs * rate, 1000000);
710 tpoecs = min_t(u32, tpoecs, ACCCON_POECS_MAX);
711
712 tprecs = max(timings->tCLS_min, timings->tALS_min) / 1000;
713 tprecs = DIV_ROUND_UP(tprecs * rate, 1000000);
714 tprecs = min_t(u32, tprecs, ACCCON_PRECS_MAX);
715
716 /* sdr interface has no tCR which means CE# low to RE# low */
717 tc2r = 0;
718
719 tw2r = timings->tWHR_min / 1000;
720 tw2r = DIV_ROUND_UP(tw2r * rate, 1000000);
721 tw2r = DIV_ROUND_UP(tw2r - 1, 2);
722 tw2r = min_t(u32, tw2r, ACCCON_W2R_MAX);
723
724 twh = max(timings->tREH_min, timings->tWH_min) / 1000;
725 twh = DIV_ROUND_UP(twh * rate, 1000000) - 1;
726 twh = min_t(u32, twh, ACCCON_WH_MAX);
727
728 /* Calculate real WE#/RE# hold time in nanosecond */
729 temp = (twh + 1) * 1000000 / rate;
730 /* nanosecond to picosecond */
731 temp *= 1000;
732
733 /*
734 * WE# low level time should be expaned to meet WE# pulse time
735 * and WE# cycle time at the same time.
736 */
737 if (temp < timings->tWC_min)
738 twst = timings->tWC_min - temp;
739 else
740 twst = 0;
741 twst = max(timings->tWP_min, twst) / 1000;
742 twst = DIV_ROUND_UP(twst * rate, 1000000) - 1;
743 twst = min_t(u32, twst, ACCCON_WST_MAX);
744
745 /*
746 * RE# low level time should be expaned to meet RE# pulse time
747 * and RE# cycle time at the same time.
748 */
749 if (temp < timings->tRC_min)
750 trlt = timings->tRC_min - temp;
751 else
752 trlt = 0;
753 trlt = max(trlt, timings->tRP_min) / 1000;
754 trlt = DIV_ROUND_UP(trlt * rate, 1000000) - 1;
755 trlt = min_t(u32, trlt, ACCCON_RLT_MAX);
756
757 if (csline == NAND_DATA_IFACE_CHECK_ONLY) {
758 if (twst < ACCCON_WST_MIN || trlt < ACCCON_RLT_MIN)
759 return -ENOTSUPP;
760 }
761
762 acccon = ACCTIMING(tpoecs, tprecs, tc2r, tw2r, twh, twst, trlt);
763
764 dev_dbg(nfc->dev, "Using programmed access timing: %08x\n", acccon);
765
766 nfi_write32(nfc, NFI_ACCCON, acccon);
767
768 return 0;
769 }
770
771 static int mt7621_nfc_calc_ecc_strength(struct mt7621_nfc *nfc,
772 u32 avail_ecc_bytes)
773 {
774 struct nand_chip *nand = &nfc->nand;
775 struct mtd_info *mtd = nand_to_mtd(nand);
776 u32 strength;
777 int i;
778
779 strength = avail_ecc_bytes * 8 / ECC_PARITY_BITS;
780
781 /* Find the closest supported ecc strength */
782 for (i = ARRAY_SIZE(mt7621_ecc_strength) - 1; i >= 0; i--) {
783 if (mt7621_ecc_strength[i] <= strength)
784 break;
785 }
786
787 if (unlikely(i < 0)) {
788 dev_err(nfc->dev, "OOB size (%u) is not supported\n",
789 mtd->oobsize);
790 return -EINVAL;
791 }
792
793 nand->ecc.strength = mt7621_ecc_strength[i];
794 nand->ecc.bytes =
795 DIV_ROUND_UP(nand->ecc.strength * ECC_PARITY_BITS, 8);
796
797 dev_info(nfc->dev, "ECC strength adjusted to %u bits\n",
798 nand->ecc.strength);
799
800 return i;
801 }
802
803 static int mt7621_nfc_set_spare_per_sector(struct mt7621_nfc *nfc)
804 {
805 struct nand_chip *nand = &nfc->nand;
806 struct mtd_info *mtd = nand_to_mtd(nand);
807 u32 size;
808 int i;
809
810 size = nand->ecc.bytes + NFI_FDM_SIZE;
811
812 /* Find the closest supported spare size */
813 for (i = 0; i < ARRAY_SIZE(mt7621_nfi_spare_size); i++) {
814 if (mt7621_nfi_spare_size[i] >= size)
815 break;
816 }
817
818 if (unlikely(i >= ARRAY_SIZE(mt7621_nfi_spare_size))) {
819 dev_err(nfc->dev, "OOB size (%u) is not supported\n",
820 mtd->oobsize);
821 return -EINVAL;
822 }
823
824 nfc->spare_per_sector = mt7621_nfi_spare_size[i];
825
826 return i;
827 }
828
829 static int mt7621_nfc_ecc_init(struct mt7621_nfc *nfc)
830 {
831 struct nand_chip *nand = &nfc->nand;
832 struct mtd_info *mtd = nand_to_mtd(nand);
833 u32 spare_per_sector, encode_block_size, decode_block_size;
834 u32 ecc_enccfg, ecc_deccfg;
835 int ecc_cap;
836
837 /* Only hardware ECC mode is supported */
838 if (nand->ecc.engine_type != NAND_ECC_ENGINE_TYPE_ON_HOST) {
839 dev_err(nfc->dev, "Only hardware ECC mode is supported\n");
840 return -EINVAL;
841 }
842
843 nand->ecc.size = ECC_SECTOR_SIZE;
844 nand->ecc.steps = mtd->writesize / nand->ecc.size;
845
846 spare_per_sector = mtd->oobsize / nand->ecc.steps;
847
848 ecc_cap = mt7621_nfc_calc_ecc_strength(nfc,
849 spare_per_sector - NFI_FDM_SIZE);
850 if (ecc_cap < 0)
851 return ecc_cap;
852
853 /* Sector + FDM */
854 encode_block_size = (nand->ecc.size + NFI_FDM_SIZE) * 8;
855 ecc_enccfg = ecc_cap | (ENC_MODE_NFI << ENC_MODE_S) |
856 (encode_block_size << ENC_CNFG_MSG_S);
857
858 /* Sector + FDM + ECC parity bits */
859 decode_block_size = ((nand->ecc.size + NFI_FDM_SIZE) * 8) +
860 nand->ecc.strength * ECC_PARITY_BITS;
861 ecc_deccfg = ecc_cap | (DEC_MODE_NFI << DEC_MODE_S) |
862 (decode_block_size << DEC_CS_S) |
863 (DEC_CON_EL << DEC_CON_S) | DEC_EMPTY_EN;
864
865 ecc_write32(nfc, ECC_FDMADDR, nfc->nfi_base + NFI_FDML(0));
866
867 mt7621_ecc_encoder_op(nfc, false);
868 ecc_write32(nfc, ECC_ENCCNFG, ecc_enccfg);
869
870 mt7621_ecc_decoder_op(nfc, false);
871 ecc_write32(nfc, ECC_DECCNFG, ecc_deccfg);
872
873 return 0;
874 }
875
876 static int mt7621_nfc_set_page_format(struct mt7621_nfc *nfc)
877 {
878 struct nand_chip *nand = &nfc->nand;
879 struct mtd_info *mtd = nand_to_mtd(nand);
880 int i, spare_size;
881 u32 pagefmt;
882
883 spare_size = mt7621_nfc_set_spare_per_sector(nfc);
884 if (spare_size < 0)
885 return spare_size;
886
887 for (i = 0; i < ARRAY_SIZE(mt7621_nfi_page_size); i++) {
888 if (mt7621_nfi_page_size[i] == mtd->writesize)
889 break;
890 }
891
892 if (unlikely(i >= ARRAY_SIZE(mt7621_nfi_page_size))) {
893 dev_err(nfc->dev, "Page size (%u) is not supported\n",
894 mtd->writesize);
895 return -EINVAL;
896 }
897
898 pagefmt = i | (spare_size << PAGEFMT_SPARE_S) |
899 (NFI_FDM_SIZE << PAGEFMT_FDM_S) |
900 (NFI_FDM_SIZE << PAGEFMT_FDM_ECC_S);
901
902 nfi_write16(nfc, NFI_PAGEFMT, pagefmt);
903
904 return 0;
905 }
906
907 static int mt7621_nfc_attach_chip(struct nand_chip *nand)
908 {
909 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
910 int ret;
911
912 if (nand->options & NAND_BUSWIDTH_16) {
913 dev_err(nfc->dev, "16-bit buswidth is not supported");
914 return -EINVAL;
915 }
916
917 ret = mt7621_nfc_ecc_init(nfc);
918 if (ret)
919 return ret;
920
921 return mt7621_nfc_set_page_format(nfc);
922 }
923
924 static const struct nand_controller_ops mt7621_nfc_controller_ops = {
925 .attach_chip = mt7621_nfc_attach_chip,
926 .exec_op = mt7621_nfc_exec_op,
927 .setup_interface = mt7621_nfc_setup_interface,
928 };
929
930 static int mt7621_nfc_ooblayout_free(struct mtd_info *mtd, int section,
931 struct mtd_oob_region *oob_region)
932 {
933 struct nand_chip *nand = mtd_to_nand(mtd);
934
935 if (section >= nand->ecc.steps)
936 return -ERANGE;
937
938 oob_region->length = NFI_FDM_SIZE - 1;
939 oob_region->offset = section * NFI_FDM_SIZE + 1;
940
941 return 0;
942 }
943
944 static int mt7621_nfc_ooblayout_ecc(struct mtd_info *mtd, int section,
945 struct mtd_oob_region *oob_region)
946 {
947 struct nand_chip *nand = mtd_to_nand(mtd);
948
949 if (section)
950 return -ERANGE;
951
952 oob_region->offset = NFI_FDM_SIZE * nand->ecc.steps;
953 oob_region->length = mtd->oobsize - oob_region->offset;
954
955 return 0;
956 }
957
958 static const struct mtd_ooblayout_ops mt7621_nfc_ooblayout_ops = {
959 .free = mt7621_nfc_ooblayout_free,
960 .ecc = mt7621_nfc_ooblayout_ecc,
961 };
962
963 static void mt7621_nfc_write_fdm(struct mt7621_nfc *nfc)
964 {
965 struct nand_chip *nand = &nfc->nand;
966 u32 vall, valm;
967 u8 *oobptr;
968 int i, j;
969
970 for (i = 0; i < nand->ecc.steps; i++) {
971 vall = 0;
972 valm = 0;
973 oobptr = oob_fdm_ptr(nand, i);
974
975 for (j = 0; j < 4; j++)
976 vall |= (u32)oobptr[j] << (j * 8);
977
978 for (j = 0; j < 4; j++)
979 valm |= (u32)oobptr[j + 4] << (j * 8);
980
981 nfi_write32(nfc, NFI_FDML(i), vall);
982 nfi_write32(nfc, NFI_FDMM(i), valm);
983 }
984 }
985
986 static void mt7621_nfc_read_sector_fdm(struct mt7621_nfc *nfc, u32 sect)
987 {
988 struct nand_chip *nand = &nfc->nand;
989 u32 vall, valm;
990 u8 *oobptr;
991 int i;
992
993 vall = nfi_read32(nfc, NFI_FDML(sect));
994 valm = nfi_read32(nfc, NFI_FDMM(sect));
995 oobptr = oob_fdm_ptr(nand, sect);
996
997 for (i = 0; i < 4; i++)
998 oobptr[i] = (vall >> (i * 8)) & 0xff;
999
1000 for (i = 0; i < 4; i++)
1001 oobptr[i + 4] = (valm >> (i * 8)) & 0xff;
1002 }
1003
1004 static int mt7621_nfc_read_page_hwecc(struct nand_chip *nand, uint8_t *buf,
1005 int oob_required, int page)
1006 {
1007 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
1008 struct mtd_info *mtd = nand_to_mtd(nand);
1009 int bitflips = 0;
1010 int rc, i;
1011
1012 nand_read_page_op(nand, page, 0, NULL, 0);
1013
1014 nfi_write16(nfc, NFI_CNFG, (CNFG_OP_CUSTOM << CNFG_OP_MODE_S) |
1015 CNFG_READ_MODE | CNFG_AUTO_FMT_EN | CNFG_HW_ECC_EN);
1016
1017 mt7621_ecc_decoder_op(nfc, true);
1018
1019 nfi_write16(nfc, NFI_CON,
1020 CON_NFI_BRD | (nand->ecc.steps << CON_NFI_SEC_S));
1021
1022 for (i = 0; i < nand->ecc.steps; i++) {
1023 if (buf)
1024 mt7621_nfc_read_data(nfc, page_data_ptr(nand, buf, i),
1025 nand->ecc.size);
1026 else
1027 mt7621_nfc_read_data_discard(nfc, nand->ecc.size);
1028
1029 rc = mt7621_ecc_decoder_wait_done(nfc, i);
1030
1031 mt7621_nfc_read_sector_fdm(nfc, i);
1032
1033 if (rc < 0) {
1034 bitflips = -EIO;
1035 continue;
1036 }
1037
1038 rc = mt7621_ecc_correct_check(nfc,
1039 buf ? page_data_ptr(nand, buf, i) : NULL,
1040 oob_fdm_ptr(nand, i), i);
1041
1042 if (rc < 0) {
1043 dev_dbg(nfc->dev,
1044 "Uncorrectable ECC error at page %d.%d\n",
1045 page, i);
1046 bitflips = -EBADMSG;
1047 mtd->ecc_stats.failed++;
1048 } else if (bitflips >= 0) {
1049 bitflips += rc;
1050 mtd->ecc_stats.corrected += rc;
1051 }
1052 }
1053
1054 mt7621_ecc_decoder_op(nfc, false);
1055
1056 nfi_write16(nfc, NFI_CON, 0);
1057
1058 return bitflips;
1059 }
1060
1061 static int mt7621_nfc_read_page_raw(struct nand_chip *nand, uint8_t *buf,
1062 int oob_required, int page)
1063 {
1064 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
1065 int i;
1066
1067 nand_read_page_op(nand, page, 0, NULL, 0);
1068
1069 nfi_write16(nfc, NFI_CNFG, (CNFG_OP_CUSTOM << CNFG_OP_MODE_S) |
1070 CNFG_READ_MODE);
1071
1072 nfi_write16(nfc, NFI_CON,
1073 CON_NFI_BRD | (nand->ecc.steps << CON_NFI_SEC_S));
1074
1075 for (i = 0; i < nand->ecc.steps; i++) {
1076 /* Read data */
1077 if (buf)
1078 mt7621_nfc_read_data(nfc, page_data_ptr(nand, buf, i),
1079 nand->ecc.size);
1080 else
1081 mt7621_nfc_read_data_discard(nfc, nand->ecc.size);
1082
1083 /* Read FDM */
1084 mt7621_nfc_read_data(nfc, oob_fdm_ptr(nand, i), NFI_FDM_SIZE);
1085
1086 /* Read ECC parity data */
1087 mt7621_nfc_read_data(nfc, oob_ecc_ptr(nfc, i),
1088 nfc->spare_per_sector - NFI_FDM_SIZE);
1089 }
1090
1091 nfi_write16(nfc, NFI_CON, 0);
1092
1093 return 0;
1094 }
1095
1096 static int mt7621_nfc_read_oob_hwecc(struct nand_chip *nand, int page)
1097 {
1098 return mt7621_nfc_read_page_hwecc(nand, NULL, 1, page);
1099 }
1100
1101 static int mt7621_nfc_read_oob_raw(struct nand_chip *nand, int page)
1102 {
1103 return mt7621_nfc_read_page_raw(nand, NULL, 1, page);
1104 }
1105
1106 static int mt7621_nfc_check_empty_page(struct nand_chip *nand, const u8 *buf)
1107 {
1108 struct mtd_info *mtd = nand_to_mtd(nand);
1109 uint32_t i, j;
1110 u8 *oobptr;
1111
1112 if (buf) {
1113 for (i = 0; i < mtd->writesize; i++)
1114 if (buf[i] != 0xff)
1115 return 0;
1116 }
1117
1118 for (i = 0; i < nand->ecc.steps; i++) {
1119 oobptr = oob_fdm_ptr(nand, i);
1120 for (j = 0; j < NFI_FDM_SIZE; j++)
1121 if (oobptr[j] != 0xff)
1122 return 0;
1123 }
1124
1125 return 1;
1126 }
1127
1128 static int mt7621_nfc_write_page_hwecc(struct nand_chip *nand,
1129 const uint8_t *buf, int oob_required,
1130 int page)
1131 {
1132 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
1133 struct mtd_info *mtd = nand_to_mtd(nand);
1134
1135 if (mt7621_nfc_check_empty_page(nand, buf)) {
1136 /*
1137 * MT7621 ECC engine always generates parity code for input
1138 * pages, even for empty pages. Doing so will write back ECC
1139 * parity code to the oob region, which means such pages will
1140 * no longer be empty pages.
1141 *
1142 * To avoid this, stop write operation if current page is an
1143 * empty page.
1144 */
1145 return 0;
1146 }
1147
1148 nand_prog_page_begin_op(nand, page, 0, NULL, 0);
1149
1150 nfi_write16(nfc, NFI_CNFG, (CNFG_OP_CUSTOM << CNFG_OP_MODE_S) |
1151 CNFG_AUTO_FMT_EN | CNFG_HW_ECC_EN);
1152
1153 mt7621_ecc_encoder_op(nfc, true);
1154
1155 mt7621_nfc_write_fdm(nfc);
1156
1157 nfi_write16(nfc, NFI_CON,
1158 CON_NFI_BWR | (nand->ecc.steps << CON_NFI_SEC_S));
1159
1160 if (buf)
1161 mt7621_nfc_write_data(nfc, buf, mtd->writesize);
1162 else
1163 mt7621_nfc_write_data_empty(nfc, mtd->writesize);
1164
1165 mt7621_nfc_wait_write_completion(nfc, nand);
1166
1167 mt7621_ecc_encoder_op(nfc, false);
1168
1169 nfi_write16(nfc, NFI_CON, 0);
1170
1171 return nand_prog_page_end_op(nand);
1172 }
1173
1174 static int mt7621_nfc_write_page_raw(struct nand_chip *nand,
1175 const uint8_t *buf, int oob_required,
1176 int page)
1177 {
1178 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
1179 int i;
1180
1181 nand_prog_page_begin_op(nand, page, 0, NULL, 0);
1182
1183 nfi_write16(nfc, NFI_CNFG, (CNFG_OP_CUSTOM << CNFG_OP_MODE_S));
1184
1185 nfi_write16(nfc, NFI_CON,
1186 CON_NFI_BWR | (nand->ecc.steps << CON_NFI_SEC_S));
1187
1188 for (i = 0; i < nand->ecc.steps; i++) {
1189 /* Write data */
1190 if (buf)
1191 mt7621_nfc_write_data(nfc, page_data_ptr(nand, buf, i),
1192 nand->ecc.size);
1193 else
1194 mt7621_nfc_write_data_empty(nfc, nand->ecc.size);
1195
1196 /* Write FDM */
1197 mt7621_nfc_write_data(nfc, oob_fdm_ptr(nand, i),
1198 NFI_FDM_SIZE);
1199
1200 /* Write dummy ECC parity data */
1201 mt7621_nfc_write_data_empty(nfc, nfc->spare_per_sector -
1202 NFI_FDM_SIZE);
1203 }
1204
1205 mt7621_nfc_wait_write_completion(nfc, nand);
1206
1207 nfi_write16(nfc, NFI_CON, 0);
1208
1209 return nand_prog_page_end_op(nand);
1210 }
1211
1212 static int mt7621_nfc_write_oob_hwecc(struct nand_chip *nand, int page)
1213 {
1214 return mt7621_nfc_write_page_hwecc(nand, NULL, 1, page);
1215 }
1216
1217 static int mt7621_nfc_write_oob_raw(struct nand_chip *nand, int page)
1218 {
1219 return mt7621_nfc_write_page_raw(nand, NULL, 1, page);
1220 }
1221
1222 static int mt7621_nfc_init_chip(struct mt7621_nfc *nfc)
1223 {
1224 struct nand_chip *nand = &nfc->nand;
1225 struct mtd_info *mtd;
1226 int ret;
1227
1228 nand->controller = &nfc->controller;
1229 nand_set_controller_data(nand, (void *)nfc);
1230 nand_set_flash_node(nand, nfc->dev->of_node);
1231
1232 nand->options |= NAND_USES_DMA | NAND_NO_SUBPAGE_WRITE | NAND_SKIP_BBTSCAN;
1233 if (!nfc->nfi_clk)
1234 nand->options |= NAND_KEEP_TIMINGS;
1235
1236 nand->ecc.engine_type = NAND_ECC_ENGINE_TYPE_ON_HOST;
1237 nand->ecc.read_page = mt7621_nfc_read_page_hwecc;
1238 nand->ecc.read_page_raw = mt7621_nfc_read_page_raw;
1239 nand->ecc.write_page = mt7621_nfc_write_page_hwecc;
1240 nand->ecc.write_page_raw = mt7621_nfc_write_page_raw;
1241 nand->ecc.read_oob = mt7621_nfc_read_oob_hwecc;
1242 nand->ecc.read_oob_raw = mt7621_nfc_read_oob_raw;
1243 nand->ecc.write_oob = mt7621_nfc_write_oob_hwecc;
1244 nand->ecc.write_oob_raw = mt7621_nfc_write_oob_raw;
1245
1246 mtd = nand_to_mtd(nand);
1247 mtd->owner = THIS_MODULE;
1248 mtd->dev.parent = nfc->dev;
1249 mtd->name = MT7621_NFC_NAME;
1250 mtd_set_ooblayout(mtd, &mt7621_nfc_ooblayout_ops);
1251
1252 mt7621_nfc_hw_init(nfc);
1253
1254 ret = nand_scan(nand, 1);
1255 if (ret)
1256 return ret;
1257
1258 mtk_bmt_attach(mtd);
1259
1260 ret = mtd_device_register(mtd, NULL, 0);
1261 if (ret) {
1262 dev_err(nfc->dev, "Failed to register MTD: %d\n", ret);
1263 mtk_bmt_detach(mtd);
1264 nand_cleanup(nand);
1265 return ret;
1266 }
1267
1268 return 0;
1269 }
1270
1271 static int mt7621_nfc_probe(struct platform_device *pdev)
1272 {
1273 struct device *dev = &pdev->dev;
1274 struct mt7621_nfc *nfc;
1275 struct resource *res;
1276 int ret;
1277
1278 nfc = devm_kzalloc(dev, sizeof(*nfc), GFP_KERNEL);
1279 if (!nfc)
1280 return -ENOMEM;
1281
1282 nand_controller_init(&nfc->controller);
1283 nfc->controller.ops = &mt7621_nfc_controller_ops;
1284 nfc->dev = dev;
1285
1286 res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "nfi");
1287 nfc->nfi_base = res->start;
1288 nfc->nfi_regs = devm_ioremap_resource(dev, res);
1289 if (IS_ERR(nfc->nfi_regs)) {
1290 ret = PTR_ERR(nfc->nfi_regs);
1291 return ret;
1292 }
1293
1294 res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "ecc");
1295 nfc->ecc_regs = devm_ioremap_resource(dev, res);
1296 if (IS_ERR(nfc->ecc_regs)) {
1297 ret = PTR_ERR(nfc->ecc_regs);
1298 return ret;
1299 }
1300
1301 nfc->nfi_clk = devm_clk_get(dev, "nfi_clk");
1302 if (IS_ERR(nfc->nfi_clk)) {
1303 dev_warn(dev, "nfi clk not provided\n");
1304 nfc->nfi_clk = NULL;
1305 } else {
1306 ret = clk_prepare_enable(nfc->nfi_clk);
1307 if (ret) {
1308 dev_err(dev, "Failed to enable nfi core clock\n");
1309 return ret;
1310 }
1311 }
1312
1313 platform_set_drvdata(pdev, nfc);
1314
1315 ret = mt7621_nfc_init_chip(nfc);
1316 if (ret) {
1317 dev_err(dev, "Failed to initialize nand chip\n");
1318 goto clk_disable;
1319 }
1320
1321 return 0;
1322
1323 clk_disable:
1324 clk_disable_unprepare(nfc->nfi_clk);
1325
1326 return ret;
1327 }
1328
1329 static int mt7621_nfc_remove(struct platform_device *pdev)
1330 {
1331 struct mt7621_nfc *nfc = platform_get_drvdata(pdev);
1332 struct nand_chip *nand = &nfc->nand;
1333 struct mtd_info *mtd = nand_to_mtd(nand);
1334
1335 mtk_bmt_detach(mtd);
1336 mtd_device_unregister(mtd);
1337 nand_cleanup(nand);
1338 clk_disable_unprepare(nfc->nfi_clk);
1339
1340 return 0;
1341 }
1342
1343 static const struct of_device_id mt7621_nfc_id_table[] = {
1344 { .compatible = "mediatek,mt7621-nfc" },
1345 { },
1346 };
1347 MODULE_DEVICE_TABLE(of, match);
1348
1349 static struct platform_driver mt7621_nfc_driver = {
1350 .probe = mt7621_nfc_probe,
1351 .remove = mt7621_nfc_remove,
1352 .driver = {
1353 .name = MT7621_NFC_NAME,
1354 .owner = THIS_MODULE,
1355 .of_match_table = mt7621_nfc_id_table,
1356 },
1357 };
1358 module_platform_driver(mt7621_nfc_driver);
1359
1360 MODULE_LICENSE("GPL");
1361 MODULE_AUTHOR("Weijie Gao <weijie.gao@mediatek.com>");
1362 MODULE_DESCRIPTION("MediaTek MT7621 NAND Flash Controller driver");