ramips: mt7621_nand: initialize ECC_FDMADDR
[openwrt/openwrt.git] / target / linux / ramips / files / drivers / mtd / nand / raw / mt7621_nand.c
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * MediaTek MT7621 NAND Flash Controller driver
4 *
5 * Copyright (C) 2020 MediaTek Inc. All Rights Reserved.
6 *
7 * Author: Weijie Gao <weijie.gao@mediatek.com>
8 */
9
10 #include <linux/io.h>
11 #include <linux/clk.h>
12 #include <linux/init.h>
13 #include <linux/errno.h>
14 #include <linux/sizes.h>
15 #include <linux/iopoll.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/mtd/mtd.h>
19 #include <linux/mtd/rawnand.h>
20 #include <linux/mtd/partitions.h>
21 #include <linux/platform_device.h>
22 #include <asm/addrspace.h>
23
24 /* NFI core registers */
25 #define NFI_CNFG 0x000
26 #define CNFG_OP_MODE_S 12
27 #define CNFG_OP_MODE_M GENMASK(14, 12)
28 #define CNFG_OP_CUSTOM 6
29 #define CNFG_AUTO_FMT_EN BIT(9)
30 #define CNFG_HW_ECC_EN BIT(8)
31 #define CNFG_BYTE_RW BIT(6)
32 #define CNFG_READ_MODE BIT(1)
33
34 #define NFI_PAGEFMT 0x004
35 #define PAGEFMT_FDM_ECC_S 12
36 #define PAGEFMT_FDM_ECC_M GENMASK(15, 12)
37 #define PAGEFMT_FDM_S 8
38 #define PAGEFMT_FDM_M GENMASK(11, 8)
39 #define PAGEFMT_SPARE_S 4
40 #define PAGEFMT_SPARE_M GENMASK(5, 4)
41 #define PAGEFMT_PAGE_S 0
42 #define PAGEFMT_PAGE_M GENMASK(1, 0)
43
44 #define NFI_CON 0x008
45 #define CON_NFI_SEC_S 12
46 #define CON_NFI_SEC_M GENMASK(15, 12)
47 #define CON_NFI_BWR BIT(9)
48 #define CON_NFI_BRD BIT(8)
49 #define CON_NFI_RST BIT(1)
50 #define CON_FIFO_FLUSH BIT(0)
51
52 #define NFI_ACCCON 0x00c
53 #define ACCCON_POECS_S 28
54 #define ACCCON_POECS_MAX 0x0f
55 #define ACCCON_POECS_DEF 3
56 #define ACCCON_PRECS_S 22
57 #define ACCCON_PRECS_MAX 0x3f
58 #define ACCCON_PRECS_DEF 3
59 #define ACCCON_C2R_S 16
60 #define ACCCON_C2R_MAX 0x3f
61 #define ACCCON_C2R_DEF 7
62 #define ACCCON_W2R_S 12
63 #define ACCCON_W2R_MAX 0x0f
64 #define ACCCON_W2R_DEF 7
65 #define ACCCON_WH_S 8
66 #define ACCCON_WH_MAX 0x0f
67 #define ACCCON_WH_DEF 15
68 #define ACCCON_WST_S 4
69 #define ACCCON_WST_MAX 0x0f
70 #define ACCCON_WST_DEF 15
71 #define ACCCON_WST_MIN 3
72 #define ACCCON_RLT_S 0
73 #define ACCCON_RLT_MAX 0x0f
74 #define ACCCON_RLT_DEF 15
75 #define ACCCON_RLT_MIN 3
76
77 #define NFI_CMD 0x020
78
79 #define NFI_ADDRNOB 0x030
80 #define ADDR_ROW_NOB_S 4
81 #define ADDR_ROW_NOB_M GENMASK(6, 4)
82 #define ADDR_COL_NOB_S 0
83 #define ADDR_COL_NOB_M GENMASK(2, 0)
84
85 #define NFI_COLADDR 0x034
86 #define NFI_ROWADDR 0x038
87
88 #define NFI_STRDATA 0x040
89 #define STR_DATA BIT(0)
90
91 #define NFI_CNRNB 0x044
92 #define CB2R_TIME_S 4
93 #define CB2R_TIME_M GENMASK(7, 4)
94 #define STR_CNRNB BIT(0)
95
96 #define NFI_DATAW 0x050
97 #define NFI_DATAR 0x054
98
99 #define NFI_PIO_DIRDY 0x058
100 #define PIO_DIRDY BIT(0)
101
102 #define NFI_STA 0x060
103 #define STA_NFI_FSM_S 16
104 #define STA_NFI_FSM_M GENMASK(19, 16)
105 #define STA_FSM_CUSTOM_DATA 14
106 #define STA_BUSY BIT(8)
107 #define STA_ADDR BIT(1)
108 #define STA_CMD BIT(0)
109
110 #define NFI_ADDRCNTR 0x070
111 #define SEC_CNTR_S 12
112 #define SEC_CNTR_M GENMASK(15, 12)
113 #define SEC_ADDR_S 0
114 #define SEC_ADDR_M GENMASK(9, 0)
115
116 #define NFI_CSEL 0x090
117 #define CSEL_S 0
118 #define CSEL_M GENMASK(1, 0)
119
120 #define NFI_FDM0L 0x0a0
121 #define NFI_FDML(n) (0x0a0 + ((n) << 3))
122
123 #define NFI_FDM0M 0x0a4
124 #define NFI_FDMM(n) (0x0a4 + ((n) << 3))
125
126 #define NFI_MASTER_STA 0x210
127 #define MAS_ADDR GENMASK(11, 9)
128 #define MAS_RD GENMASK(8, 6)
129 #define MAS_WR GENMASK(5, 3)
130 #define MAS_RDDLY GENMASK(2, 0)
131
132 /* ECC engine registers */
133 #define ECC_ENCCON 0x000
134 #define ENC_EN BIT(0)
135
136 #define ECC_ENCCNFG 0x004
137 #define ENC_CNFG_MSG_S 16
138 #define ENC_CNFG_MSG_M GENMASK(28, 16)
139 #define ENC_MODE_S 4
140 #define ENC_MODE_M GENMASK(5, 4)
141 #define ENC_MODE_NFI 1
142 #define ENC_TNUM_S 0
143 #define ENC_TNUM_M GENMASK(2, 0)
144
145 #define ECC_ENCIDLE 0x00c
146 #define ENC_IDLE BIT(0)
147
148 #define ECC_DECCON 0x100
149 #define DEC_EN BIT(0)
150
151 #define ECC_DECCNFG 0x104
152 #define DEC_EMPTY_EN BIT(31)
153 #define DEC_CS_S 16
154 #define DEC_CS_M GENMASK(28, 16)
155 #define DEC_CON_S 12
156 #define DEC_CON_M GENMASK(13, 12)
157 #define DEC_CON_EL 2
158 #define DEC_MODE_S 4
159 #define DEC_MODE_M GENMASK(5, 4)
160 #define DEC_MODE_NFI 1
161 #define DEC_TNUM_S 0
162 #define DEC_TNUM_M GENMASK(2, 0)
163
164 #define ECC_DECIDLE 0x10c
165 #define DEC_IDLE BIT(1)
166
167 #define ECC_DECENUM 0x114
168 #define ERRNUM_S 2
169 #define ERRNUM_M GENMASK(3, 0)
170
171 #define ECC_DECDONE 0x118
172 #define DEC_DONE7 BIT(7)
173 #define DEC_DONE6 BIT(6)
174 #define DEC_DONE5 BIT(5)
175 #define DEC_DONE4 BIT(4)
176 #define DEC_DONE3 BIT(3)
177 #define DEC_DONE2 BIT(2)
178 #define DEC_DONE1 BIT(1)
179 #define DEC_DONE0 BIT(0)
180
181 #define ECC_DECEL(n) (0x11c + (n) * 4)
182 #define DEC_EL_ODD_S 16
183 #define DEC_EL_EVEN_S 0
184 #define DEC_EL_M 0x1fff
185 #define DEC_EL_BYTE_POS_S 3
186 #define DEC_EL_BIT_POS_M GENMASK(2, 0)
187
188 #define ECC_FDMADDR 0x13c
189
190 /* ENCIDLE and DECIDLE */
191 #define ECC_IDLE BIT(0)
192
193 #define ACCTIMING(tpoecs, tprecs, tc2r, tw2r, twh, twst, trlt) \
194 ((tpoecs) << ACCCON_POECS_S | (tprecs) << ACCCON_PRECS_S | \
195 (tc2r) << ACCCON_C2R_S | (tw2r) << ACCCON_W2R_S | \
196 (twh) << ACCCON_WH_S | (twst) << ACCCON_WST_S | (trlt))
197
198 #define MASTER_STA_MASK (MAS_ADDR | MAS_RD | MAS_WR | \
199 MAS_RDDLY)
200 #define NFI_RESET_TIMEOUT 1000000
201 #define NFI_CORE_TIMEOUT 500000
202 #define ECC_ENGINE_TIMEOUT 500000
203
204 #define ECC_SECTOR_SIZE 512
205 #define ECC_PARITY_BITS 13
206
207 #define NFI_FDM_SIZE 8
208
209 #define MT7621_NFC_NAME "mt7621-nand"
210
211 struct mt7621_nfc {
212 struct nand_controller controller;
213 struct nand_chip nand;
214 struct clk *nfi_clk;
215 struct device *dev;
216
217 u32 nfi_base;
218 void __iomem *nfi_regs;
219 void __iomem *ecc_regs;
220
221 u32 spare_per_sector;
222 };
223
224 static const u16 mt7621_nfi_page_size[] = { SZ_512, SZ_2K, SZ_4K };
225 static const u8 mt7621_nfi_spare_size[] = { 16, 26, 27, 28 };
226 static const u8 mt7621_ecc_strength[] = { 4, 6, 8, 10, 12 };
227
228 static inline u32 nfi_read32(struct mt7621_nfc *nfc, u32 reg)
229 {
230 return readl(nfc->nfi_regs + reg);
231 }
232
233 static inline void nfi_write32(struct mt7621_nfc *nfc, u32 reg, u32 val)
234 {
235 writel(val, nfc->nfi_regs + reg);
236 }
237
238 static inline u16 nfi_read16(struct mt7621_nfc *nfc, u32 reg)
239 {
240 return readw(nfc->nfi_regs + reg);
241 }
242
243 static inline void nfi_write16(struct mt7621_nfc *nfc, u32 reg, u16 val)
244 {
245 writew(val, nfc->nfi_regs + reg);
246 }
247
248 static inline void ecc_write16(struct mt7621_nfc *nfc, u32 reg, u16 val)
249 {
250 writew(val, nfc->ecc_regs + reg);
251 }
252
253 static inline u32 ecc_read32(struct mt7621_nfc *nfc, u32 reg)
254 {
255 return readl(nfc->ecc_regs + reg);
256 }
257
258 static inline void ecc_write32(struct mt7621_nfc *nfc, u32 reg, u32 val)
259 {
260 return writel(val, nfc->ecc_regs + reg);
261 }
262
263 static inline u8 *oob_fdm_ptr(struct nand_chip *nand, int sect)
264 {
265 return nand->oob_poi + sect * NFI_FDM_SIZE;
266 }
267
268 static inline u8 *oob_ecc_ptr(struct mt7621_nfc *nfc, int sect)
269 {
270 struct nand_chip *nand = &nfc->nand;
271
272 return nand->oob_poi + nand->ecc.steps * NFI_FDM_SIZE +
273 sect * (nfc->spare_per_sector - NFI_FDM_SIZE);
274 }
275
276 static inline u8 *page_data_ptr(struct nand_chip *nand, const u8 *buf,
277 int sect)
278 {
279 return (u8 *)buf + sect * nand->ecc.size;
280 }
281
282 static int mt7621_ecc_wait_idle(struct mt7621_nfc *nfc, u32 reg)
283 {
284 struct device *dev = nfc->dev;
285 u32 val;
286 int ret;
287
288 ret = readw_poll_timeout_atomic(nfc->ecc_regs + reg, val,
289 val & ECC_IDLE, 10,
290 ECC_ENGINE_TIMEOUT);
291 if (ret) {
292 dev_warn(dev, "ECC engine timed out entering idle mode\n");
293 return -EIO;
294 }
295
296 return 0;
297 }
298
299 static int mt7621_ecc_decoder_wait_done(struct mt7621_nfc *nfc, u32 sect)
300 {
301 struct device *dev = nfc->dev;
302 u32 val;
303 int ret;
304
305 ret = readw_poll_timeout_atomic(nfc->ecc_regs + ECC_DECDONE, val,
306 val & (1 << sect), 10,
307 ECC_ENGINE_TIMEOUT);
308
309 if (ret) {
310 dev_warn(dev, "ECC decoder for sector %d timed out\n",
311 sect);
312 return -ETIMEDOUT;
313 }
314
315 return 0;
316 }
317
318 static void mt7621_ecc_encoder_op(struct mt7621_nfc *nfc, bool enable)
319 {
320 mt7621_ecc_wait_idle(nfc, ECC_ENCIDLE);
321 ecc_write16(nfc, ECC_ENCCON, enable ? ENC_EN : 0);
322 }
323
324 static void mt7621_ecc_decoder_op(struct mt7621_nfc *nfc, bool enable)
325 {
326 mt7621_ecc_wait_idle(nfc, ECC_DECIDLE);
327 ecc_write16(nfc, ECC_DECCON, enable ? DEC_EN : 0);
328 }
329
330 static int mt7621_ecc_correct_check(struct mt7621_nfc *nfc, u8 *sector_buf,
331 u8 *fdm_buf, u32 sect)
332 {
333 struct nand_chip *nand = &nfc->nand;
334 u32 decnum, num_error_bits, fdm_end_bits;
335 u32 error_locations, error_bit_loc;
336 u32 error_byte_pos, error_bit_pos;
337 int bitflips = 0;
338 u32 i;
339
340 decnum = ecc_read32(nfc, ECC_DECENUM);
341 num_error_bits = (decnum >> (sect << ERRNUM_S)) & ERRNUM_M;
342 fdm_end_bits = (nand->ecc.size + NFI_FDM_SIZE) << 3;
343
344 if (!num_error_bits)
345 return 0;
346
347 if (num_error_bits == ERRNUM_M)
348 return -1;
349
350 for (i = 0; i < num_error_bits; i++) {
351 error_locations = ecc_read32(nfc, ECC_DECEL(i / 2));
352 error_bit_loc = (error_locations >> ((i % 2) * DEC_EL_ODD_S)) &
353 DEC_EL_M;
354 error_byte_pos = error_bit_loc >> DEC_EL_BYTE_POS_S;
355 error_bit_pos = error_bit_loc & DEC_EL_BIT_POS_M;
356
357 if (error_bit_loc < (nand->ecc.size << 3)) {
358 if (sector_buf) {
359 sector_buf[error_byte_pos] ^=
360 (1 << error_bit_pos);
361 }
362 } else if (error_bit_loc < fdm_end_bits) {
363 if (fdm_buf) {
364 fdm_buf[error_byte_pos - nand->ecc.size] ^=
365 (1 << error_bit_pos);
366 }
367 }
368
369 bitflips++;
370 }
371
372 return bitflips;
373 }
374
375 static int mt7621_nfc_wait_write_completion(struct mt7621_nfc *nfc,
376 struct nand_chip *nand)
377 {
378 struct device *dev = nfc->dev;
379 u16 val;
380 int ret;
381
382 ret = readw_poll_timeout_atomic(nfc->nfi_regs + NFI_ADDRCNTR, val,
383 ((val & SEC_CNTR_M) >> SEC_CNTR_S) >= nand->ecc.steps, 10,
384 NFI_CORE_TIMEOUT);
385
386 if (ret) {
387 dev_warn(dev, "NFI core write operation timed out\n");
388 return -ETIMEDOUT;
389 }
390
391 return ret;
392 }
393
394 static void mt7621_nfc_hw_reset(struct mt7621_nfc *nfc)
395 {
396 u32 val;
397 int ret;
398
399 /* reset all registers and force the NFI master to terminate */
400 nfi_write16(nfc, NFI_CON, CON_FIFO_FLUSH | CON_NFI_RST);
401
402 /* wait for the master to finish the last transaction */
403 ret = readw_poll_timeout(nfc->nfi_regs + NFI_MASTER_STA, val,
404 !(val & MASTER_STA_MASK), 50,
405 NFI_RESET_TIMEOUT);
406 if (ret) {
407 dev_warn(nfc->dev, "Failed to reset NFI master in %dms\n",
408 NFI_RESET_TIMEOUT);
409 }
410
411 /* ensure any status register affected by the NFI master is reset */
412 nfi_write16(nfc, NFI_CON, CON_FIFO_FLUSH | CON_NFI_RST);
413 nfi_write16(nfc, NFI_STRDATA, 0);
414 }
415
416 static inline void mt7621_nfc_hw_init(struct mt7621_nfc *nfc)
417 {
418 u32 acccon;
419
420 /*
421 * CNRNB: nand ready/busy register
422 * -------------------------------
423 * 7:4: timeout register for polling the NAND busy/ready signal
424 * 0 : poll the status of the busy/ready signal after [7:4]*16 cycles.
425 */
426 nfi_write16(nfc, NFI_CNRNB, CB2R_TIME_M | STR_CNRNB);
427
428 mt7621_nfc_hw_reset(nfc);
429
430 /* Apply default access timing */
431 acccon = ACCTIMING(ACCCON_POECS_DEF, ACCCON_PRECS_DEF, ACCCON_C2R_DEF,
432 ACCCON_W2R_DEF, ACCCON_WH_DEF, ACCCON_WST_DEF,
433 ACCCON_RLT_DEF);
434
435 nfi_write32(nfc, NFI_ACCCON, acccon);
436 }
437
438 static int mt7621_nfc_send_command(struct mt7621_nfc *nfc, u8 command)
439 {
440 struct device *dev = nfc->dev;
441 u32 val;
442 int ret;
443
444 nfi_write32(nfc, NFI_CMD, command);
445
446 ret = readl_poll_timeout_atomic(nfc->nfi_regs + NFI_STA, val,
447 !(val & STA_CMD), 10,
448 NFI_CORE_TIMEOUT);
449 if (ret) {
450 dev_warn(dev, "NFI core timed out entering command mode\n");
451 return -EIO;
452 }
453
454 return 0;
455 }
456
457 static int mt7621_nfc_send_address_byte(struct mt7621_nfc *nfc, int addr)
458 {
459 struct device *dev = nfc->dev;
460 u32 val;
461 int ret;
462
463 nfi_write32(nfc, NFI_COLADDR, addr);
464 nfi_write32(nfc, NFI_ROWADDR, 0);
465 nfi_write16(nfc, NFI_ADDRNOB, 1);
466
467 ret = readl_poll_timeout_atomic(nfc->nfi_regs + NFI_STA, val,
468 !(val & STA_ADDR), 10,
469 NFI_CORE_TIMEOUT);
470 if (ret) {
471 dev_warn(dev, "NFI core timed out entering address mode\n");
472 return -EIO;
473 }
474
475 return 0;
476 }
477
478 static int mt7621_nfc_send_address(struct mt7621_nfc *nfc, const u8 *addr,
479 unsigned int naddrs)
480 {
481 int ret;
482
483 while (naddrs) {
484 ret = mt7621_nfc_send_address_byte(nfc, *addr);
485 if (ret)
486 return ret;
487
488 addr++;
489 naddrs--;
490 }
491
492 return 0;
493 }
494
495 static void mt7621_nfc_wait_pio_ready(struct mt7621_nfc *nfc)
496 {
497 struct device *dev = nfc->dev;
498 int ret;
499 u16 val;
500
501 ret = readw_poll_timeout_atomic(nfc->nfi_regs + NFI_PIO_DIRDY, val,
502 val & PIO_DIRDY, 10,
503 NFI_CORE_TIMEOUT);
504 if (ret < 0)
505 dev_err(dev, "NFI core PIO mode not ready\n");
506 }
507
508 static u32 mt7621_nfc_pio_read(struct mt7621_nfc *nfc, bool br)
509 {
510 u32 reg;
511
512 /* after each byte read, the NFI_STA reg is reset by the hardware */
513 reg = (nfi_read32(nfc, NFI_STA) & STA_NFI_FSM_M) >> STA_NFI_FSM_S;
514 if (reg != STA_FSM_CUSTOM_DATA) {
515 reg = nfi_read16(nfc, NFI_CNFG);
516 reg |= CNFG_READ_MODE | CNFG_BYTE_RW;
517 if (!br)
518 reg &= ~CNFG_BYTE_RW;
519 nfi_write16(nfc, NFI_CNFG, reg);
520
521 /*
522 * set to max sector to allow the HW to continue reading over
523 * unaligned accesses
524 */
525 nfi_write16(nfc, NFI_CON, CON_NFI_SEC_M | CON_NFI_BRD);
526
527 /* trigger to fetch data */
528 nfi_write16(nfc, NFI_STRDATA, STR_DATA);
529 }
530
531 mt7621_nfc_wait_pio_ready(nfc);
532
533 return nfi_read32(nfc, NFI_DATAR);
534 }
535
536 static void mt7621_nfc_read_data(struct mt7621_nfc *nfc, u8 *buf, u32 len)
537 {
538 while (((uintptr_t)buf & 3) && len) {
539 *buf = mt7621_nfc_pio_read(nfc, true);
540 buf++;
541 len--;
542 }
543
544 while (len >= 4) {
545 *(u32 *)buf = mt7621_nfc_pio_read(nfc, false);
546 buf += 4;
547 len -= 4;
548 }
549
550 while (len) {
551 *buf = mt7621_nfc_pio_read(nfc, true);
552 buf++;
553 len--;
554 }
555 }
556
557 static void mt7621_nfc_read_data_discard(struct mt7621_nfc *nfc, u32 len)
558 {
559 while (len >= 4) {
560 mt7621_nfc_pio_read(nfc, false);
561 len -= 4;
562 }
563
564 while (len) {
565 mt7621_nfc_pio_read(nfc, true);
566 len--;
567 }
568 }
569
570 static void mt7621_nfc_pio_write(struct mt7621_nfc *nfc, u32 val, bool bw)
571 {
572 u32 reg;
573
574 reg = (nfi_read32(nfc, NFI_STA) & STA_NFI_FSM_M) >> STA_NFI_FSM_S;
575 if (reg != STA_FSM_CUSTOM_DATA) {
576 reg = nfi_read16(nfc, NFI_CNFG);
577 reg &= ~(CNFG_READ_MODE | CNFG_BYTE_RW);
578 if (bw)
579 reg |= CNFG_BYTE_RW;
580 nfi_write16(nfc, NFI_CNFG, reg);
581
582 nfi_write16(nfc, NFI_CON, CON_NFI_SEC_M | CON_NFI_BWR);
583 nfi_write16(nfc, NFI_STRDATA, STR_DATA);
584 }
585
586 mt7621_nfc_wait_pio_ready(nfc);
587 nfi_write32(nfc, NFI_DATAW, val);
588 }
589
590 static void mt7621_nfc_write_data(struct mt7621_nfc *nfc, const u8 *buf,
591 u32 len)
592 {
593 while (((uintptr_t)buf & 3) && len) {
594 mt7621_nfc_pio_write(nfc, *buf, true);
595 buf++;
596 len--;
597 }
598
599 while (len >= 4) {
600 mt7621_nfc_pio_write(nfc, *(const u32 *)buf, false);
601 buf += 4;
602 len -= 4;
603 }
604
605 while (len) {
606 mt7621_nfc_pio_write(nfc, *buf, true);
607 buf++;
608 len--;
609 }
610 }
611
612 static void mt7621_nfc_write_data_empty(struct mt7621_nfc *nfc, u32 len)
613 {
614 while (len >= 4) {
615 mt7621_nfc_pio_write(nfc, 0xffffffff, false);
616 len -= 4;
617 }
618
619 while (len) {
620 mt7621_nfc_pio_write(nfc, 0xff, true);
621 len--;
622 }
623 }
624
625 static int mt7621_nfc_dev_ready(struct mt7621_nfc *nfc,
626 unsigned int timeout_ms)
627 {
628 u32 val;
629
630 return readl_poll_timeout_atomic(nfc->nfi_regs + NFI_STA, val,
631 !(val & STA_BUSY), 10,
632 timeout_ms * 1000);
633 }
634
635 static int mt7621_nfc_exec_instr(struct nand_chip *nand,
636 const struct nand_op_instr *instr)
637 {
638 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
639
640 switch (instr->type) {
641 case NAND_OP_CMD_INSTR:
642 mt7621_nfc_hw_reset(nfc);
643 nfi_write16(nfc, NFI_CNFG, CNFG_OP_CUSTOM << CNFG_OP_MODE_S);
644 return mt7621_nfc_send_command(nfc, instr->ctx.cmd.opcode);
645 case NAND_OP_ADDR_INSTR:
646 return mt7621_nfc_send_address(nfc, instr->ctx.addr.addrs,
647 instr->ctx.addr.naddrs);
648 case NAND_OP_DATA_IN_INSTR:
649 mt7621_nfc_read_data(nfc, instr->ctx.data.buf.in,
650 instr->ctx.data.len);
651 return 0;
652 case NAND_OP_DATA_OUT_INSTR:
653 mt7621_nfc_write_data(nfc, instr->ctx.data.buf.out,
654 instr->ctx.data.len);
655 return 0;
656 case NAND_OP_WAITRDY_INSTR:
657 return mt7621_nfc_dev_ready(nfc,
658 instr->ctx.waitrdy.timeout_ms);
659 default:
660 WARN_ONCE(1, "unsupported NAND instruction type: %d\n",
661 instr->type);
662
663 return -EINVAL;
664 }
665 }
666
667 static int mt7621_nfc_exec_op(struct nand_chip *nand,
668 const struct nand_operation *op, bool check_only)
669 {
670 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
671 int i, ret;
672
673 if (check_only)
674 return 0;
675
676 /* Only CS0 available */
677 nfi_write16(nfc, NFI_CSEL, 0);
678
679 for (i = 0; i < op->ninstrs; i++) {
680 ret = mt7621_nfc_exec_instr(nand, &op->instrs[i]);
681 if (ret)
682 return ret;
683 }
684
685 return 0;
686 }
687
688 static int mt7621_nfc_setup_interface(struct nand_chip *nand, int csline,
689 const struct nand_interface_config *conf)
690 {
691 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
692 const struct nand_sdr_timings *timings;
693 u32 acccon, temp, rate, tpoecs, tprecs, tc2r, tw2r, twh, twst, trlt;
694
695 if (!nfc->nfi_clk)
696 return -ENOTSUPP;
697
698 timings = nand_get_sdr_timings(conf);
699 if (IS_ERR(timings))
700 return -ENOTSUPP;
701
702 rate = clk_get_rate(nfc->nfi_clk);
703
704 /* turn clock rate into KHZ */
705 rate /= 1000;
706
707 tpoecs = max(timings->tALH_min, timings->tCLH_min) / 1000;
708 tpoecs = DIV_ROUND_UP(tpoecs * rate, 1000000);
709 tpoecs = min_t(u32, tpoecs, ACCCON_POECS_MAX);
710
711 tprecs = max(timings->tCLS_min, timings->tALS_min) / 1000;
712 tprecs = DIV_ROUND_UP(tprecs * rate, 1000000);
713 tprecs = min_t(u32, tprecs, ACCCON_PRECS_MAX);
714
715 /* sdr interface has no tCR which means CE# low to RE# low */
716 tc2r = 0;
717
718 tw2r = timings->tWHR_min / 1000;
719 tw2r = DIV_ROUND_UP(tw2r * rate, 1000000);
720 tw2r = DIV_ROUND_UP(tw2r - 1, 2);
721 tw2r = min_t(u32, tw2r, ACCCON_W2R_MAX);
722
723 twh = max(timings->tREH_min, timings->tWH_min) / 1000;
724 twh = DIV_ROUND_UP(twh * rate, 1000000) - 1;
725 twh = min_t(u32, twh, ACCCON_WH_MAX);
726
727 /* Calculate real WE#/RE# hold time in nanosecond */
728 temp = (twh + 1) * 1000000 / rate;
729 /* nanosecond to picosecond */
730 temp *= 1000;
731
732 /*
733 * WE# low level time should be expaned to meet WE# pulse time
734 * and WE# cycle time at the same time.
735 */
736 if (temp < timings->tWC_min)
737 twst = timings->tWC_min - temp;
738 else
739 twst = 0;
740 twst = max(timings->tWP_min, twst) / 1000;
741 twst = DIV_ROUND_UP(twst * rate, 1000000) - 1;
742 twst = min_t(u32, twst, ACCCON_WST_MAX);
743
744 /*
745 * RE# low level time should be expaned to meet RE# pulse time
746 * and RE# cycle time at the same time.
747 */
748 if (temp < timings->tRC_min)
749 trlt = timings->tRC_min - temp;
750 else
751 trlt = 0;
752 trlt = max(trlt, timings->tRP_min) / 1000;
753 trlt = DIV_ROUND_UP(trlt * rate, 1000000) - 1;
754 trlt = min_t(u32, trlt, ACCCON_RLT_MAX);
755
756 if (csline == NAND_DATA_IFACE_CHECK_ONLY) {
757 if (twst < ACCCON_WST_MIN || trlt < ACCCON_RLT_MIN)
758 return -ENOTSUPP;
759 }
760
761 acccon = ACCTIMING(tpoecs, tprecs, tc2r, tw2r, twh, twst, trlt);
762
763 dev_dbg(nfc->dev, "Using programmed access timing: %08x\n", acccon);
764
765 nfi_write32(nfc, NFI_ACCCON, acccon);
766
767 return 0;
768 }
769
770 static int mt7621_nfc_calc_ecc_strength(struct mt7621_nfc *nfc,
771 u32 avail_ecc_bytes)
772 {
773 struct nand_chip *nand = &nfc->nand;
774 struct mtd_info *mtd = nand_to_mtd(nand);
775 u32 strength;
776 int i;
777
778 strength = avail_ecc_bytes * 8 / ECC_PARITY_BITS;
779
780 /* Find the closest supported ecc strength */
781 for (i = ARRAY_SIZE(mt7621_ecc_strength) - 1; i >= 0; i--) {
782 if (mt7621_ecc_strength[i] <= strength)
783 break;
784 }
785
786 if (unlikely(i < 0)) {
787 dev_err(nfc->dev, "OOB size (%u) is not supported\n",
788 mtd->oobsize);
789 return -EINVAL;
790 }
791
792 nand->ecc.strength = mt7621_ecc_strength[i];
793 nand->ecc.bytes =
794 DIV_ROUND_UP(nand->ecc.strength * ECC_PARITY_BITS, 8);
795
796 dev_info(nfc->dev, "ECC strength adjusted to %u bits\n",
797 nand->ecc.strength);
798
799 return i;
800 }
801
802 static int mt7621_nfc_set_spare_per_sector(struct mt7621_nfc *nfc)
803 {
804 struct nand_chip *nand = &nfc->nand;
805 struct mtd_info *mtd = nand_to_mtd(nand);
806 u32 size;
807 int i;
808
809 size = nand->ecc.bytes + NFI_FDM_SIZE;
810
811 /* Find the closest supported spare size */
812 for (i = 0; i < ARRAY_SIZE(mt7621_nfi_spare_size); i++) {
813 if (mt7621_nfi_spare_size[i] >= size)
814 break;
815 }
816
817 if (unlikely(i >= ARRAY_SIZE(mt7621_nfi_spare_size))) {
818 dev_err(nfc->dev, "OOB size (%u) is not supported\n",
819 mtd->oobsize);
820 return -EINVAL;
821 }
822
823 nfc->spare_per_sector = mt7621_nfi_spare_size[i];
824
825 return i;
826 }
827
828 static int mt7621_nfc_ecc_init(struct mt7621_nfc *nfc)
829 {
830 struct nand_chip *nand = &nfc->nand;
831 struct mtd_info *mtd = nand_to_mtd(nand);
832 u32 spare_per_sector, encode_block_size, decode_block_size;
833 u32 ecc_enccfg, ecc_deccfg;
834 int ecc_cap;
835
836 /* Only hardware ECC mode is supported */
837 if (nand->ecc.engine_type != NAND_ECC_ENGINE_TYPE_ON_HOST) {
838 dev_err(nfc->dev, "Only hardware ECC mode is supported\n");
839 return -EINVAL;
840 }
841
842 nand->ecc.size = ECC_SECTOR_SIZE;
843 nand->ecc.steps = mtd->writesize / nand->ecc.size;
844
845 spare_per_sector = mtd->oobsize / nand->ecc.steps;
846
847 ecc_cap = mt7621_nfc_calc_ecc_strength(nfc,
848 spare_per_sector - NFI_FDM_SIZE);
849 if (ecc_cap < 0)
850 return ecc_cap;
851
852 /* Sector + FDM */
853 encode_block_size = (nand->ecc.size + NFI_FDM_SIZE) * 8;
854 ecc_enccfg = ecc_cap | (ENC_MODE_NFI << ENC_MODE_S) |
855 (encode_block_size << ENC_CNFG_MSG_S);
856
857 /* Sector + FDM + ECC parity bits */
858 decode_block_size = ((nand->ecc.size + NFI_FDM_SIZE) * 8) +
859 nand->ecc.strength * ECC_PARITY_BITS;
860 ecc_deccfg = ecc_cap | (DEC_MODE_NFI << DEC_MODE_S) |
861 (decode_block_size << DEC_CS_S) |
862 (DEC_CON_EL << DEC_CON_S) | DEC_EMPTY_EN;
863
864 ecc_write32(nfc, ECC_FDMADDR, nfc->nfi_base + NFI_FDML(0));
865
866 mt7621_ecc_encoder_op(nfc, false);
867 ecc_write32(nfc, ECC_ENCCNFG, ecc_enccfg);
868
869 mt7621_ecc_decoder_op(nfc, false);
870 ecc_write32(nfc, ECC_DECCNFG, ecc_deccfg);
871
872 return 0;
873 }
874
875 static int mt7621_nfc_set_page_format(struct mt7621_nfc *nfc)
876 {
877 struct nand_chip *nand = &nfc->nand;
878 struct mtd_info *mtd = nand_to_mtd(nand);
879 int i, spare_size;
880 u32 pagefmt;
881
882 spare_size = mt7621_nfc_set_spare_per_sector(nfc);
883 if (spare_size < 0)
884 return spare_size;
885
886 for (i = 0; i < ARRAY_SIZE(mt7621_nfi_page_size); i++) {
887 if (mt7621_nfi_page_size[i] == mtd->writesize)
888 break;
889 }
890
891 if (unlikely(i >= ARRAY_SIZE(mt7621_nfi_page_size))) {
892 dev_err(nfc->dev, "Page size (%u) is not supported\n",
893 mtd->writesize);
894 return -EINVAL;
895 }
896
897 pagefmt = i | (spare_size << PAGEFMT_SPARE_S) |
898 (NFI_FDM_SIZE << PAGEFMT_FDM_S) |
899 (NFI_FDM_SIZE << PAGEFMT_FDM_ECC_S);
900
901 nfi_write16(nfc, NFI_PAGEFMT, pagefmt);
902
903 return 0;
904 }
905
906 static int mt7621_nfc_attach_chip(struct nand_chip *nand)
907 {
908 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
909 int ret;
910
911 if (nand->options & NAND_BUSWIDTH_16) {
912 dev_err(nfc->dev, "16-bit buswidth is not supported");
913 return -EINVAL;
914 }
915
916 ret = mt7621_nfc_ecc_init(nfc);
917 if (ret)
918 return ret;
919
920 return mt7621_nfc_set_page_format(nfc);
921 }
922
923 static const struct nand_controller_ops mt7621_nfc_controller_ops = {
924 .attach_chip = mt7621_nfc_attach_chip,
925 .exec_op = mt7621_nfc_exec_op,
926 .setup_interface = mt7621_nfc_setup_interface,
927 };
928
929 static int mt7621_nfc_ooblayout_free(struct mtd_info *mtd, int section,
930 struct mtd_oob_region *oob_region)
931 {
932 struct nand_chip *nand = mtd_to_nand(mtd);
933
934 if (section >= nand->ecc.steps)
935 return -ERANGE;
936
937 oob_region->length = NFI_FDM_SIZE - 1;
938 oob_region->offset = section * NFI_FDM_SIZE + 1;
939
940 return 0;
941 }
942
943 static int mt7621_nfc_ooblayout_ecc(struct mtd_info *mtd, int section,
944 struct mtd_oob_region *oob_region)
945 {
946 struct nand_chip *nand = mtd_to_nand(mtd);
947
948 if (section)
949 return -ERANGE;
950
951 oob_region->offset = NFI_FDM_SIZE * nand->ecc.steps;
952 oob_region->length = mtd->oobsize - oob_region->offset;
953
954 return 0;
955 }
956
957 static const struct mtd_ooblayout_ops mt7621_nfc_ooblayout_ops = {
958 .free = mt7621_nfc_ooblayout_free,
959 .ecc = mt7621_nfc_ooblayout_ecc,
960 };
961
962 static void mt7621_nfc_write_fdm(struct mt7621_nfc *nfc)
963 {
964 struct nand_chip *nand = &nfc->nand;
965 u32 vall, valm;
966 u8 *oobptr;
967 int i, j;
968
969 for (i = 0; i < nand->ecc.steps; i++) {
970 vall = 0;
971 valm = 0;
972 oobptr = oob_fdm_ptr(nand, i);
973
974 for (j = 0; j < 4; j++)
975 vall |= (u32)oobptr[j] << (j * 8);
976
977 for (j = 0; j < 4; j++)
978 valm |= (u32)oobptr[j + 4] << (j * 8);
979
980 nfi_write32(nfc, NFI_FDML(i), vall);
981 nfi_write32(nfc, NFI_FDMM(i), valm);
982 }
983 }
984
985 static void mt7621_nfc_read_sector_fdm(struct mt7621_nfc *nfc, u32 sect)
986 {
987 struct nand_chip *nand = &nfc->nand;
988 u32 vall, valm;
989 u8 *oobptr;
990 int i;
991
992 vall = nfi_read32(nfc, NFI_FDML(sect));
993 valm = nfi_read32(nfc, NFI_FDMM(sect));
994 oobptr = oob_fdm_ptr(nand, sect);
995
996 for (i = 0; i < 4; i++)
997 oobptr[i] = (vall >> (i * 8)) & 0xff;
998
999 for (i = 0; i < 4; i++)
1000 oobptr[i + 4] = (valm >> (i * 8)) & 0xff;
1001 }
1002
1003 static int mt7621_nfc_read_page_hwecc(struct nand_chip *nand, uint8_t *buf,
1004 int oob_required, int page)
1005 {
1006 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
1007 struct mtd_info *mtd = nand_to_mtd(nand);
1008 int bitflips = 0;
1009 int rc, i;
1010
1011 nand_read_page_op(nand, page, 0, NULL, 0);
1012
1013 nfi_write16(nfc, NFI_CNFG, (CNFG_OP_CUSTOM << CNFG_OP_MODE_S) |
1014 CNFG_READ_MODE | CNFG_AUTO_FMT_EN | CNFG_HW_ECC_EN);
1015
1016 mt7621_ecc_decoder_op(nfc, true);
1017
1018 nfi_write16(nfc, NFI_CON,
1019 CON_NFI_BRD | (nand->ecc.steps << CON_NFI_SEC_S));
1020
1021 for (i = 0; i < nand->ecc.steps; i++) {
1022 if (buf)
1023 mt7621_nfc_read_data(nfc, page_data_ptr(nand, buf, i),
1024 nand->ecc.size);
1025 else
1026 mt7621_nfc_read_data_discard(nfc, nand->ecc.size);
1027
1028 rc = mt7621_ecc_decoder_wait_done(nfc, i);
1029
1030 mt7621_nfc_read_sector_fdm(nfc, i);
1031
1032 if (rc < 0) {
1033 bitflips = -EIO;
1034 continue;
1035 }
1036
1037 rc = mt7621_ecc_correct_check(nfc,
1038 buf ? page_data_ptr(nand, buf, i) : NULL,
1039 oob_fdm_ptr(nand, i), i);
1040
1041 if (rc < 0) {
1042 dev_dbg(nfc->dev,
1043 "Uncorrectable ECC error at page %d.%d\n",
1044 page, i);
1045 bitflips = -EBADMSG;
1046 mtd->ecc_stats.failed++;
1047 } else if (bitflips >= 0) {
1048 bitflips += rc;
1049 mtd->ecc_stats.corrected += rc;
1050 }
1051 }
1052
1053 mt7621_ecc_decoder_op(nfc, false);
1054
1055 nfi_write16(nfc, NFI_CON, 0);
1056
1057 return bitflips;
1058 }
1059
1060 static int mt7621_nfc_read_page_raw(struct nand_chip *nand, uint8_t *buf,
1061 int oob_required, int page)
1062 {
1063 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
1064 int i;
1065
1066 nand_read_page_op(nand, page, 0, NULL, 0);
1067
1068 nfi_write16(nfc, NFI_CNFG, (CNFG_OP_CUSTOM << CNFG_OP_MODE_S) |
1069 CNFG_READ_MODE);
1070
1071 nfi_write16(nfc, NFI_CON,
1072 CON_NFI_BRD | (nand->ecc.steps << CON_NFI_SEC_S));
1073
1074 for (i = 0; i < nand->ecc.steps; i++) {
1075 /* Read data */
1076 if (buf)
1077 mt7621_nfc_read_data(nfc, page_data_ptr(nand, buf, i),
1078 nand->ecc.size);
1079 else
1080 mt7621_nfc_read_data_discard(nfc, nand->ecc.size);
1081
1082 /* Read FDM */
1083 mt7621_nfc_read_data(nfc, oob_fdm_ptr(nand, i), NFI_FDM_SIZE);
1084
1085 /* Read ECC parity data */
1086 mt7621_nfc_read_data(nfc, oob_ecc_ptr(nfc, i),
1087 nfc->spare_per_sector - NFI_FDM_SIZE);
1088 }
1089
1090 nfi_write16(nfc, NFI_CON, 0);
1091
1092 return 0;
1093 }
1094
1095 static int mt7621_nfc_read_oob_hwecc(struct nand_chip *nand, int page)
1096 {
1097 return mt7621_nfc_read_page_hwecc(nand, NULL, 1, page);
1098 }
1099
1100 static int mt7621_nfc_read_oob_raw(struct nand_chip *nand, int page)
1101 {
1102 return mt7621_nfc_read_page_raw(nand, NULL, 1, page);
1103 }
1104
1105 static int mt7621_nfc_check_empty_page(struct nand_chip *nand, const u8 *buf)
1106 {
1107 struct mtd_info *mtd = nand_to_mtd(nand);
1108 uint32_t i, j;
1109 u8 *oobptr;
1110
1111 if (buf) {
1112 for (i = 0; i < mtd->writesize; i++)
1113 if (buf[i] != 0xff)
1114 return 0;
1115 }
1116
1117 for (i = 0; i < nand->ecc.steps; i++) {
1118 oobptr = oob_fdm_ptr(nand, i);
1119 for (j = 0; j < NFI_FDM_SIZE; j++)
1120 if (oobptr[j] != 0xff)
1121 return 0;
1122 }
1123
1124 return 1;
1125 }
1126
1127 static int mt7621_nfc_write_page_hwecc(struct nand_chip *nand,
1128 const uint8_t *buf, int oob_required,
1129 int page)
1130 {
1131 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
1132 struct mtd_info *mtd = nand_to_mtd(nand);
1133
1134 if (mt7621_nfc_check_empty_page(nand, buf)) {
1135 /*
1136 * MT7621 ECC engine always generates parity code for input
1137 * pages, even for empty pages. Doing so will write back ECC
1138 * parity code to the oob region, which means such pages will
1139 * no longer be empty pages.
1140 *
1141 * To avoid this, stop write operation if current page is an
1142 * empty page.
1143 */
1144 return 0;
1145 }
1146
1147 nand_prog_page_begin_op(nand, page, 0, NULL, 0);
1148
1149 nfi_write16(nfc, NFI_CNFG, (CNFG_OP_CUSTOM << CNFG_OP_MODE_S) |
1150 CNFG_AUTO_FMT_EN | CNFG_HW_ECC_EN);
1151
1152 mt7621_ecc_encoder_op(nfc, true);
1153
1154 mt7621_nfc_write_fdm(nfc);
1155
1156 nfi_write16(nfc, NFI_CON,
1157 CON_NFI_BWR | (nand->ecc.steps << CON_NFI_SEC_S));
1158
1159 if (buf)
1160 mt7621_nfc_write_data(nfc, buf, mtd->writesize);
1161 else
1162 mt7621_nfc_write_data_empty(nfc, mtd->writesize);
1163
1164 mt7621_nfc_wait_write_completion(nfc, nand);
1165
1166 mt7621_ecc_encoder_op(nfc, false);
1167
1168 nfi_write16(nfc, NFI_CON, 0);
1169
1170 return nand_prog_page_end_op(nand);
1171 }
1172
1173 static int mt7621_nfc_write_page_raw(struct nand_chip *nand,
1174 const uint8_t *buf, int oob_required,
1175 int page)
1176 {
1177 struct mt7621_nfc *nfc = nand_get_controller_data(nand);
1178 int i;
1179
1180 nand_prog_page_begin_op(nand, page, 0, NULL, 0);
1181
1182 nfi_write16(nfc, NFI_CNFG, (CNFG_OP_CUSTOM << CNFG_OP_MODE_S));
1183
1184 nfi_write16(nfc, NFI_CON,
1185 CON_NFI_BWR | (nand->ecc.steps << CON_NFI_SEC_S));
1186
1187 for (i = 0; i < nand->ecc.steps; i++) {
1188 /* Write data */
1189 if (buf)
1190 mt7621_nfc_write_data(nfc, page_data_ptr(nand, buf, i),
1191 nand->ecc.size);
1192 else
1193 mt7621_nfc_write_data_empty(nfc, nand->ecc.size);
1194
1195 /* Write FDM */
1196 mt7621_nfc_write_data(nfc, oob_fdm_ptr(nand, i),
1197 NFI_FDM_SIZE);
1198
1199 /* Write dummy ECC parity data */
1200 mt7621_nfc_write_data_empty(nfc, nfc->spare_per_sector -
1201 NFI_FDM_SIZE);
1202 }
1203
1204 mt7621_nfc_wait_write_completion(nfc, nand);
1205
1206 nfi_write16(nfc, NFI_CON, 0);
1207
1208 return nand_prog_page_end_op(nand);
1209 }
1210
1211 static int mt7621_nfc_write_oob_hwecc(struct nand_chip *nand, int page)
1212 {
1213 return mt7621_nfc_write_page_hwecc(nand, NULL, 1, page);
1214 }
1215
1216 static int mt7621_nfc_write_oob_raw(struct nand_chip *nand, int page)
1217 {
1218 return mt7621_nfc_write_page_raw(nand, NULL, 1, page);
1219 }
1220
1221 static int mt7621_nfc_init_chip(struct mt7621_nfc *nfc)
1222 {
1223 struct nand_chip *nand = &nfc->nand;
1224 struct mtd_info *mtd;
1225 int ret;
1226
1227 nand->controller = &nfc->controller;
1228 nand_set_controller_data(nand, (void *)nfc);
1229 nand_set_flash_node(nand, nfc->dev->of_node);
1230
1231 nand->options |= NAND_USES_DMA | NAND_NO_SUBPAGE_WRITE;
1232 if (!nfc->nfi_clk)
1233 nand->options |= NAND_KEEP_TIMINGS;
1234
1235 nand->ecc.engine_type = NAND_ECC_ENGINE_TYPE_ON_HOST;
1236 nand->ecc.read_page = mt7621_nfc_read_page_hwecc;
1237 nand->ecc.read_page_raw = mt7621_nfc_read_page_raw;
1238 nand->ecc.write_page = mt7621_nfc_write_page_hwecc;
1239 nand->ecc.write_page_raw = mt7621_nfc_write_page_raw;
1240 nand->ecc.read_oob = mt7621_nfc_read_oob_hwecc;
1241 nand->ecc.read_oob_raw = mt7621_nfc_read_oob_raw;
1242 nand->ecc.write_oob = mt7621_nfc_write_oob_hwecc;
1243 nand->ecc.write_oob_raw = mt7621_nfc_write_oob_raw;
1244
1245 mtd = nand_to_mtd(nand);
1246 mtd->owner = THIS_MODULE;
1247 mtd->dev.parent = nfc->dev;
1248 mtd->name = MT7621_NFC_NAME;
1249 mtd_set_ooblayout(mtd, &mt7621_nfc_ooblayout_ops);
1250
1251 mt7621_nfc_hw_init(nfc);
1252
1253 ret = nand_scan(nand, 1);
1254 if (ret)
1255 return ret;
1256
1257 ret = mtd_device_register(mtd, NULL, 0);
1258 if (ret) {
1259 dev_err(nfc->dev, "Failed to register MTD: %d\n", ret);
1260 nand_cleanup(nand);
1261 return ret;
1262 }
1263
1264 return 0;
1265 }
1266
1267 static int mt7621_nfc_probe(struct platform_device *pdev)
1268 {
1269 struct device *dev = &pdev->dev;
1270 struct mt7621_nfc *nfc;
1271 struct resource *res;
1272 int ret;
1273
1274 nfc = devm_kzalloc(dev, sizeof(*nfc), GFP_KERNEL);
1275 if (!nfc)
1276 return -ENOMEM;
1277
1278 nand_controller_init(&nfc->controller);
1279 nfc->controller.ops = &mt7621_nfc_controller_ops;
1280 nfc->dev = dev;
1281
1282 res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "nfi");
1283 nfc->nfi_base = res->start;
1284 nfc->nfi_regs = devm_ioremap_resource(dev, res);
1285 if (IS_ERR(nfc->nfi_regs)) {
1286 ret = PTR_ERR(nfc->nfi_regs);
1287 return ret;
1288 }
1289
1290 res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "ecc");
1291 nfc->ecc_regs = devm_ioremap_resource(dev, res);
1292 if (IS_ERR(nfc->ecc_regs)) {
1293 ret = PTR_ERR(nfc->ecc_regs);
1294 return ret;
1295 }
1296
1297 nfc->nfi_clk = devm_clk_get(dev, "nfi_clk");
1298 if (IS_ERR(nfc->nfi_clk)) {
1299 dev_warn(dev, "nfi clk not provided\n");
1300 nfc->nfi_clk = NULL;
1301 } else {
1302 ret = clk_prepare_enable(nfc->nfi_clk);
1303 if (ret) {
1304 dev_err(dev, "Failed to enable nfi core clock\n");
1305 return ret;
1306 }
1307 }
1308
1309 platform_set_drvdata(pdev, nfc);
1310
1311 ret = mt7621_nfc_init_chip(nfc);
1312 if (ret) {
1313 dev_err(dev, "Failed to initialize nand chip\n");
1314 goto clk_disable;
1315 }
1316
1317 return 0;
1318
1319 clk_disable:
1320 clk_disable_unprepare(nfc->nfi_clk);
1321
1322 return ret;
1323 }
1324
1325 static int mt7621_nfc_remove(struct platform_device *pdev)
1326 {
1327 struct mt7621_nfc *nfc = platform_get_drvdata(pdev);
1328 struct nand_chip *nand = &nfc->nand;
1329 struct mtd_info *mtd = nand_to_mtd(nand);
1330
1331 mtd_device_unregister(mtd);
1332 nand_cleanup(nand);
1333 clk_disable_unprepare(nfc->nfi_clk);
1334
1335 return 0;
1336 }
1337
1338 static const struct of_device_id mt7621_nfc_id_table[] = {
1339 { .compatible = "mediatek,mt7621-nfc" },
1340 { },
1341 };
1342 MODULE_DEVICE_TABLE(of, match);
1343
1344 static struct platform_driver mt7621_nfc_driver = {
1345 .probe = mt7621_nfc_probe,
1346 .remove = mt7621_nfc_remove,
1347 .driver = {
1348 .name = MT7621_NFC_NAME,
1349 .owner = THIS_MODULE,
1350 .of_match_table = mt7621_nfc_id_table,
1351 },
1352 };
1353 module_platform_driver(mt7621_nfc_driver);
1354
1355 MODULE_LICENSE("GPL");
1356 MODULE_AUTHOR("Weijie Gao <weijie.gao@mediatek.com>");
1357 MODULE_DESCRIPTION("MediaTek MT7621 NAND Flash Controller driver");