1 // SPDX-License-Identifier: GPL-2.0 OR MIT
3 * MTK NAND Flash controller driver.
4 * Copyright (C) 2016 MediaTek Inc.
5 * Authors: Xiaolei Li <xiaolei.li@mediatek.com>
6 * Jorge Ramirez-Ortiz <jorge.ramirez-ortiz@linaro.org>
9 #include <linux/platform_device.h>
10 #include <linux/dma-mapping.h>
11 #include <linux/interrupt.h>
12 #include <linux/delay.h>
13 #include <linux/clk.h>
14 #include <linux/mtd/rawnand.h>
15 #include <linux/mtd/mtd.h>
16 #include <linux/module.h>
17 #include <linux/iopoll.h>
19 #include <linux/of_device.h>
22 /* NAND controller register definition */
23 #define NFI_CNFG (0x00)
24 #define CNFG_AHB BIT(0)
25 #define CNFG_READ_EN BIT(1)
26 #define CNFG_DMA_BURST_EN BIT(2)
27 #define CNFG_BYTE_RW BIT(6)
28 #define CNFG_HW_ECC_EN BIT(8)
29 #define CNFG_AUTO_FMT_EN BIT(9)
30 #define CNFG_OP_CUST (6 << 12)
31 #define NFI_PAGEFMT (0x04)
32 #define PAGEFMT_FDM_ECC_SHIFT (12)
33 #define PAGEFMT_FDM_SHIFT (8)
34 #define PAGEFMT_SEC_SEL_512 BIT(2)
35 #define PAGEFMT_512_2K (0)
36 #define PAGEFMT_2K_4K (1)
37 #define PAGEFMT_4K_8K (2)
38 #define PAGEFMT_8K_16K (3)
40 #define NFI_CON (0x08)
41 #define CON_FIFO_FLUSH BIT(0)
42 #define CON_NFI_RST BIT(1)
43 #define CON_BRD BIT(8) /* burst read */
44 #define CON_BWR BIT(9) /* burst write */
45 #define CON_SEC_SHIFT (12)
46 /* Timming control register */
47 #define NFI_ACCCON (0x0C)
48 #define NFI_INTR_EN (0x10)
49 #define INTR_AHB_DONE_EN BIT(6)
50 #define NFI_INTR_STA (0x14)
51 #define NFI_CMD (0x20)
52 #define NFI_ADDRNOB (0x30)
53 #define NFI_COLADDR (0x34)
54 #define NFI_ROWADDR (0x38)
55 #define NFI_STRDATA (0x40)
58 #define NFI_CNRNB (0x44)
59 #define NFI_DATAW (0x50)
60 #define NFI_DATAR (0x54)
61 #define NFI_PIO_DIRDY (0x58)
62 #define PIO_DI_RDY (0x01)
63 #define NFI_STA (0x60)
64 #define STA_CMD BIT(0)
65 #define STA_ADDR BIT(1)
66 #define STA_BUSY BIT(8)
67 #define STA_EMP_PAGE BIT(12)
68 #define NFI_FSM_CUSTDATA (0xe << 16)
69 #define NFI_FSM_MASK (0xf << 16)
70 #define NFI_ADDRCNTR (0x70)
71 #define CNTR_MASK GENMASK(16, 12)
72 #define ADDRCNTR_SEC_SHIFT (12)
73 #define ADDRCNTR_SEC(val) \
74 (((val) & CNTR_MASK) >> ADDRCNTR_SEC_SHIFT)
75 #define NFI_STRADDR (0x80)
76 #define NFI_BYTELEN (0x84)
77 #define NFI_CSEL (0x90)
78 #define NFI_FDML(x) (0xA0 + (x) * sizeof(u32) * 2)
79 #define NFI_FDMM(x) (0xA4 + (x) * sizeof(u32) * 2)
80 #define NFI_FDM_MAX_SIZE (8)
81 #define NFI_FDM_MIN_SIZE (1)
82 #define NFI_DEBUG_CON1 (0x220)
83 #define STROBE_MASK GENMASK(4, 3)
84 #define STROBE_SHIFT (3)
85 #define MAX_STROBE_DLY (3)
86 #define NFI_MASTER_STA (0x224)
87 #define MASTER_STA_MASK (0x0FFF)
88 #define NFI_EMPTY_THRESH (0x23C)
90 #define MTK_NAME "mtk-nand"
91 #define KB(x) ((x) * 1024UL)
92 #define MB(x) (KB(x) * 1024UL)
94 #define MTK_TIMEOUT (500000)
95 #define MTK_RESET_TIMEOUT (1000000)
96 #define MTK_NAND_MAX_NSELS (2)
97 #define MTK_NFC_MIN_SPARE (16)
98 #define ACCTIMING(tpoecs, tprecs, tc2r, tw2r, twh, twst, trlt) \
99 ((tpoecs) << 28 | (tprecs) << 22 | (tc2r) << 16 | \
100 (tw2r) << 12 | (twh) << 8 | (twst) << 4 | (trlt))
102 struct mtk_nfc_caps
{
103 const u8
*spare_size
;
105 u8 pageformat_spare_shift
;
111 struct mtk_nfc_bad_mark_ctl
{
112 void (*bm_swap
)(struct mtd_info
*, u8
*buf
, int raw
);
118 * FDM: region used to store free OOB data
125 struct mtk_nfc_nand_chip
{
126 struct list_head node
;
127 struct nand_chip nand
;
129 struct mtk_nfc_bad_mark_ctl bad_mark
;
130 struct mtk_nfc_fdm fdm
;
131 u32 spare_per_sector
;
135 /* nothing after this field */
144 struct nand_controller controller
;
145 struct mtk_ecc_config ecc_cfg
;
146 struct mtk_nfc_clk clk
;
150 const struct mtk_nfc_caps
*caps
;
153 struct completion done
;
154 struct list_head chips
;
158 unsigned long assigned_cs
;
162 * supported spare size of each IP.
163 * order should be the same with the spare size bitfiled defination of
164 * register NFI_PAGEFMT.
166 static const u8 spare_size_mt2701
[] = {
167 16, 26, 27, 28, 32, 36, 40, 44, 48, 49, 50, 51, 52, 62, 63, 64
170 static const u8 spare_size_mt2712
[] = {
171 16, 26, 27, 28, 32, 36, 40, 44, 48, 49, 50, 51, 52, 62, 61, 63, 64, 67,
175 static const u8 spare_size_mt7622
[] = {
179 static inline struct mtk_nfc_nand_chip
*to_mtk_nand(struct nand_chip
*nand
)
181 return container_of(nand
, struct mtk_nfc_nand_chip
, nand
);
184 static inline u8
*data_ptr(struct nand_chip
*chip
, const u8
*p
, int i
)
186 return (u8
*)p
+ i
* chip
->ecc
.size
;
189 static inline u8
*oob_ptr(struct nand_chip
*chip
, int i
)
191 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
194 /* map the sector's FDM data to free oob:
195 * the beginning of the oob area stores the FDM data of bad mark sectors
198 if (i
< mtk_nand
->bad_mark
.sec
)
199 poi
= chip
->oob_poi
+ (i
+ 1) * mtk_nand
->fdm
.reg_size
;
200 else if (i
== mtk_nand
->bad_mark
.sec
)
203 poi
= chip
->oob_poi
+ i
* mtk_nand
->fdm
.reg_size
;
208 static inline int mtk_data_len(struct nand_chip
*chip
)
210 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
212 return chip
->ecc
.size
+ mtk_nand
->spare_per_sector
;
215 static inline u8
*mtk_data_ptr(struct nand_chip
*chip
, int i
)
217 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
219 return nfc
->buffer
+ i
* mtk_data_len(chip
);
222 static inline u8
*mtk_oob_ptr(struct nand_chip
*chip
, int i
)
224 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
226 return nfc
->buffer
+ i
* mtk_data_len(chip
) + chip
->ecc
.size
;
229 static inline void nfi_writel(struct mtk_nfc
*nfc
, u32 val
, u32 reg
)
231 writel(val
, nfc
->regs
+ reg
);
234 static inline void nfi_writew(struct mtk_nfc
*nfc
, u16 val
, u32 reg
)
236 writew(val
, nfc
->regs
+ reg
);
239 static inline void nfi_writeb(struct mtk_nfc
*nfc
, u8 val
, u32 reg
)
241 writeb(val
, nfc
->regs
+ reg
);
244 static inline u32
nfi_readl(struct mtk_nfc
*nfc
, u32 reg
)
246 return readl_relaxed(nfc
->regs
+ reg
);
249 static inline u16
nfi_readw(struct mtk_nfc
*nfc
, u32 reg
)
251 return readw_relaxed(nfc
->regs
+ reg
);
254 static inline u8
nfi_readb(struct mtk_nfc
*nfc
, u32 reg
)
256 return readb_relaxed(nfc
->regs
+ reg
);
259 static void mtk_nfc_hw_reset(struct mtk_nfc
*nfc
)
261 struct device
*dev
= nfc
->dev
;
265 /* reset all registers and force the NFI master to terminate */
266 nfi_writel(nfc
, CON_FIFO_FLUSH
| CON_NFI_RST
, NFI_CON
);
268 /* wait for the master to finish the last transaction */
269 ret
= readl_poll_timeout(nfc
->regs
+ NFI_MASTER_STA
, val
,
270 !(val
& MASTER_STA_MASK
), 50,
273 dev_warn(dev
, "master active in reset [0x%x] = 0x%x\n",
274 NFI_MASTER_STA
, val
);
276 /* ensure any status register affected by the NFI master is reset */
277 nfi_writel(nfc
, CON_FIFO_FLUSH
| CON_NFI_RST
, NFI_CON
);
278 nfi_writew(nfc
, STAR_DE
, NFI_STRDATA
);
281 static int mtk_nfc_send_command(struct mtk_nfc
*nfc
, u8 command
)
283 struct device
*dev
= nfc
->dev
;
287 nfi_writel(nfc
, command
, NFI_CMD
);
289 ret
= readl_poll_timeout_atomic(nfc
->regs
+ NFI_STA
, val
,
290 !(val
& STA_CMD
), 10, MTK_TIMEOUT
);
292 dev_warn(dev
, "nfi core timed out entering command mode\n");
299 static int mtk_nfc_send_address(struct mtk_nfc
*nfc
, int addr
)
301 struct device
*dev
= nfc
->dev
;
305 nfi_writel(nfc
, addr
, NFI_COLADDR
);
306 nfi_writel(nfc
, 0, NFI_ROWADDR
);
307 nfi_writew(nfc
, 1, NFI_ADDRNOB
);
309 ret
= readl_poll_timeout_atomic(nfc
->regs
+ NFI_STA
, val
,
310 !(val
& STA_ADDR
), 10, MTK_TIMEOUT
);
312 dev_warn(dev
, "nfi core timed out entering address mode\n");
319 static int mtk_nfc_hw_runtime_config(struct mtd_info
*mtd
)
321 struct nand_chip
*chip
= mtd_to_nand(mtd
);
322 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
323 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
329 spare
= mtk_nand
->spare_per_sector
;
331 switch (mtd
->writesize
) {
333 fmt
= PAGEFMT_512_2K
| PAGEFMT_SEC_SEL_512
;
336 if (chip
->ecc
.size
== 512)
337 fmt
= PAGEFMT_2K_4K
| PAGEFMT_SEC_SEL_512
;
339 fmt
= PAGEFMT_512_2K
;
342 if (chip
->ecc
.size
== 512)
343 fmt
= PAGEFMT_4K_8K
| PAGEFMT_SEC_SEL_512
;
348 if (chip
->ecc
.size
== 512)
349 fmt
= PAGEFMT_8K_16K
| PAGEFMT_SEC_SEL_512
;
354 fmt
= PAGEFMT_8K_16K
;
357 dev_err(nfc
->dev
, "invalid page len: %d\n", mtd
->writesize
);
362 * the hardware will double the value for this eccsize, so we need to
365 if (chip
->ecc
.size
== 1024)
368 for (i
= 0; i
< nfc
->caps
->num_spare_size
; i
++) {
369 if (nfc
->caps
->spare_size
[i
] == spare
)
373 if (i
== nfc
->caps
->num_spare_size
) {
374 dev_err(nfc
->dev
, "invalid spare size %d\n", spare
);
378 fmt
|= i
<< nfc
->caps
->pageformat_spare_shift
;
380 fmt
|= mtk_nand
->fdm
.reg_size
<< PAGEFMT_FDM_SHIFT
;
381 fmt
|= mtk_nand
->fdm
.ecc_size
<< PAGEFMT_FDM_ECC_SHIFT
;
382 nfi_writel(nfc
, fmt
, NFI_PAGEFMT
);
384 nfc
->ecc_cfg
.strength
= chip
->ecc
.strength
;
385 nfc
->ecc_cfg
.len
= chip
->ecc
.size
+ mtk_nand
->fdm
.ecc_size
;
390 static inline void mtk_nfc_wait_ioready(struct mtk_nfc
*nfc
)
395 rc
= readb_poll_timeout_atomic(nfc
->regs
+ NFI_PIO_DIRDY
, val
,
396 val
& PIO_DI_RDY
, 10, MTK_TIMEOUT
);
398 dev_err(nfc
->dev
, "data not ready\n");
401 static inline u8
mtk_nfc_read_byte(struct nand_chip
*chip
)
403 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
406 /* after each byte read, the NFI_STA reg is reset by the hardware */
407 reg
= nfi_readl(nfc
, NFI_STA
) & NFI_FSM_MASK
;
408 if (reg
!= NFI_FSM_CUSTDATA
) {
409 reg
= nfi_readw(nfc
, NFI_CNFG
);
410 reg
|= CNFG_BYTE_RW
| CNFG_READ_EN
;
411 nfi_writew(nfc
, reg
, NFI_CNFG
);
414 * set to max sector to allow the HW to continue reading over
417 reg
= (nfc
->caps
->max_sector
<< CON_SEC_SHIFT
) | CON_BRD
;
418 nfi_writel(nfc
, reg
, NFI_CON
);
420 /* trigger to fetch data */
421 nfi_writew(nfc
, STAR_EN
, NFI_STRDATA
);
424 mtk_nfc_wait_ioready(nfc
);
426 return nfi_readb(nfc
, NFI_DATAR
);
429 static void mtk_nfc_read_buf(struct nand_chip
*chip
, u8
*buf
, int len
)
433 for (i
= 0; i
< len
; i
++)
434 buf
[i
] = mtk_nfc_read_byte(chip
);
437 static void mtk_nfc_write_byte(struct nand_chip
*chip
, u8 byte
)
439 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
442 reg
= nfi_readl(nfc
, NFI_STA
) & NFI_FSM_MASK
;
444 if (reg
!= NFI_FSM_CUSTDATA
) {
445 reg
= nfi_readw(nfc
, NFI_CNFG
) | CNFG_BYTE_RW
;
446 nfi_writew(nfc
, reg
, NFI_CNFG
);
448 reg
= nfc
->caps
->max_sector
<< CON_SEC_SHIFT
| CON_BWR
;
449 nfi_writel(nfc
, reg
, NFI_CON
);
451 nfi_writew(nfc
, STAR_EN
, NFI_STRDATA
);
454 mtk_nfc_wait_ioready(nfc
);
455 nfi_writeb(nfc
, byte
, NFI_DATAW
);
458 static void mtk_nfc_write_buf(struct nand_chip
*chip
, const u8
*buf
, int len
)
462 for (i
= 0; i
< len
; i
++)
463 mtk_nfc_write_byte(chip
, buf
[i
]);
466 static int mtk_nfc_exec_instr(struct nand_chip
*chip
,
467 const struct nand_op_instr
*instr
)
469 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
473 switch (instr
->type
) {
474 case NAND_OP_CMD_INSTR
:
475 mtk_nfc_send_command(nfc
, instr
->ctx
.cmd
.opcode
);
477 case NAND_OP_ADDR_INSTR
:
478 for (i
= 0; i
< instr
->ctx
.addr
.naddrs
; i
++)
479 mtk_nfc_send_address(nfc
, instr
->ctx
.addr
.addrs
[i
]);
481 case NAND_OP_DATA_IN_INSTR
:
482 mtk_nfc_read_buf(chip
, instr
->ctx
.data
.buf
.in
,
483 instr
->ctx
.data
.len
);
485 case NAND_OP_DATA_OUT_INSTR
:
486 mtk_nfc_write_buf(chip
, instr
->ctx
.data
.buf
.out
,
487 instr
->ctx
.data
.len
);
489 case NAND_OP_WAITRDY_INSTR
:
490 return readl_poll_timeout(nfc
->regs
+ NFI_STA
, status
,
491 status
& STA_BUSY
, 20,
492 instr
->ctx
.waitrdy
.timeout_ms
);
500 static void mtk_nfc_select_target(struct nand_chip
*nand
, unsigned int cs
)
502 struct mtk_nfc
*nfc
= nand_get_controller_data(nand
);
503 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(nand
);
505 mtk_nfc_hw_runtime_config(nand_to_mtd(nand
));
507 nfi_writel(nfc
, mtk_nand
->sels
[cs
], NFI_CSEL
);
510 static int mtk_nfc_exec_op(struct nand_chip
*chip
,
511 const struct nand_operation
*op
,
514 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
521 mtk_nfc_hw_reset(nfc
);
522 nfi_writew(nfc
, CNFG_OP_CUST
, NFI_CNFG
);
523 mtk_nfc_select_target(chip
, op
->cs
);
525 for (i
= 0; i
< op
->ninstrs
; i
++) {
526 ret
= mtk_nfc_exec_instr(chip
, &op
->instrs
[i
]);
534 static int mtk_nfc_setup_interface(struct nand_chip
*chip
, int csline
,
535 const struct nand_interface_config
*conf
)
537 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
538 const struct nand_sdr_timings
*timings
;
539 u32 rate
, tpoecs
, tprecs
, tc2r
, tw2r
, twh
, twst
= 0, trlt
= 0;
542 timings
= nand_get_sdr_timings(conf
);
546 if (csline
== NAND_DATA_IFACE_CHECK_ONLY
)
549 rate
= clk_get_rate(nfc
->clk
.nfi_clk
);
550 /* There is a frequency divider in some IPs */
551 rate
/= nfc
->caps
->nfi_clk_div
;
553 /* turn clock rate into KHZ */
556 tpoecs
= max(timings
->tALH_min
, timings
->tCLH_min
) / 1000;
557 tpoecs
= DIV_ROUND_UP(tpoecs
* rate
, 1000000);
560 tprecs
= max(timings
->tCLS_min
, timings
->tALS_min
) / 1000;
561 tprecs
= DIV_ROUND_UP(tprecs
* rate
, 1000000);
564 /* sdr interface has no tCR which means CE# low to RE# low */
567 tw2r
= timings
->tWHR_min
/ 1000;
568 tw2r
= DIV_ROUND_UP(tw2r
* rate
, 1000000);
569 tw2r
= DIV_ROUND_UP(tw2r
- 1, 2);
572 twh
= max(timings
->tREH_min
, timings
->tWH_min
) / 1000;
573 twh
= DIV_ROUND_UP(twh
* rate
, 1000000) - 1;
576 /* Calculate real WE#/RE# hold time in nanosecond */
577 temp
= (twh
+ 1) * 1000000 / rate
;
578 /* nanosecond to picosecond */
582 * WE# low level time should be expaned to meet WE# pulse time
583 * and WE# cycle time at the same time.
585 if (temp
< timings
->tWC_min
)
586 twst
= timings
->tWC_min
- temp
;
587 twst
= max(timings
->tWP_min
, twst
) / 1000;
588 twst
= DIV_ROUND_UP(twst
* rate
, 1000000) - 1;
592 * RE# low level time should be expaned to meet RE# pulse time
593 * and RE# cycle time at the same time.
595 if (temp
< timings
->tRC_min
)
596 trlt
= timings
->tRC_min
- temp
;
597 trlt
= max(trlt
, timings
->tRP_min
) / 1000;
598 trlt
= DIV_ROUND_UP(trlt
* rate
, 1000000) - 1;
601 /* Calculate RE# pulse time in nanosecond. */
602 temp
= (trlt
+ 1) * 1000000 / rate
;
603 /* nanosecond to picosecond */
606 * If RE# access time is bigger than RE# pulse time,
607 * delay sampling data timing.
609 if (temp
< timings
->tREA_max
) {
610 tsel
= timings
->tREA_max
/ 1000;
611 tsel
= DIV_ROUND_UP(tsel
* rate
, 1000000);
613 if (tsel
> MAX_STROBE_DLY
) {
614 trlt
+= tsel
- MAX_STROBE_DLY
;
615 tsel
= MAX_STROBE_DLY
;
618 temp
= nfi_readl(nfc
, NFI_DEBUG_CON1
);
619 temp
&= ~STROBE_MASK
;
620 temp
|= tsel
<< STROBE_SHIFT
;
621 nfi_writel(nfc
, temp
, NFI_DEBUG_CON1
);
624 * ACCON: access timing control register
625 * -------------------------------------
626 * 31:28: tpoecs, minimum required time for CS post pulling down after
627 * accessing the device
628 * 27:22: tprecs, minimum required time for CS pre pulling down before
629 * accessing the device
630 * 21:16: tc2r, minimum required time from NCEB low to NREB low
631 * 15:12: tw2r, minimum required time from NWEB high to NREB low.
632 * 11:08: twh, write enable hold time
633 * 07:04: twst, write wait states
634 * 03:00: trlt, read wait states
636 trlt
= ACCTIMING(tpoecs
, tprecs
, tc2r
, tw2r
, twh
, twst
, trlt
);
637 nfi_writel(nfc
, trlt
, NFI_ACCCON
);
642 static int mtk_nfc_sector_encode(struct nand_chip
*chip
, u8
*data
)
644 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
645 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
646 int size
= chip
->ecc
.size
+ mtk_nand
->fdm
.reg_size
;
648 nfc
->ecc_cfg
.mode
= ECC_DMA_MODE
;
649 nfc
->ecc_cfg
.op
= ECC_ENCODE
;
651 return mtk_ecc_encode(nfc
->ecc
, &nfc
->ecc_cfg
, data
, size
);
654 static void mtk_nfc_no_bad_mark_swap(struct mtd_info
*a
, u8
*b
, int c
)
659 static void mtk_nfc_bad_mark_swap(struct mtd_info
*mtd
, u8
*buf
, int raw
)
661 struct nand_chip
*chip
= mtd_to_nand(mtd
);
662 struct mtk_nfc_nand_chip
*nand
= to_mtk_nand(chip
);
663 u32 bad_pos
= nand
->bad_mark
.pos
;
666 bad_pos
+= nand
->bad_mark
.sec
* mtk_data_len(chip
);
668 bad_pos
+= nand
->bad_mark
.sec
* chip
->ecc
.size
;
670 swap(chip
->oob_poi
[0], buf
[bad_pos
]);
673 static int mtk_nfc_format_subpage(struct mtd_info
*mtd
, u32 offset
,
674 u32 len
, const u8
*buf
)
676 struct nand_chip
*chip
= mtd_to_nand(mtd
);
677 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
678 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
679 struct mtk_nfc_fdm
*fdm
= &mtk_nand
->fdm
;
683 start
= offset
/ chip
->ecc
.size
;
684 end
= DIV_ROUND_UP(offset
+ len
, chip
->ecc
.size
);
686 memset(nfc
->buffer
, 0xff, mtd
->writesize
+ mtd
->oobsize
);
687 for (i
= 0; i
< chip
->ecc
.steps
; i
++) {
688 memcpy(mtk_data_ptr(chip
, i
), data_ptr(chip
, buf
, i
),
691 if (start
> i
|| i
>= end
)
694 if (i
== mtk_nand
->bad_mark
.sec
)
695 mtk_nand
->bad_mark
.bm_swap(mtd
, nfc
->buffer
, 1);
697 memcpy(mtk_oob_ptr(chip
, i
), oob_ptr(chip
, i
), fdm
->reg_size
);
699 /* program the CRC back to the OOB */
700 ret
= mtk_nfc_sector_encode(chip
, mtk_data_ptr(chip
, i
));
708 static void mtk_nfc_format_page(struct mtd_info
*mtd
, const u8
*buf
)
710 struct nand_chip
*chip
= mtd_to_nand(mtd
);
711 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
712 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
713 struct mtk_nfc_fdm
*fdm
= &mtk_nand
->fdm
;
716 memset(nfc
->buffer
, 0xff, mtd
->writesize
+ mtd
->oobsize
);
717 for (i
= 0; i
< chip
->ecc
.steps
; i
++) {
719 memcpy(mtk_data_ptr(chip
, i
), data_ptr(chip
, buf
, i
),
722 if (i
== mtk_nand
->bad_mark
.sec
)
723 mtk_nand
->bad_mark
.bm_swap(mtd
, nfc
->buffer
, 1);
725 memcpy(mtk_oob_ptr(chip
, i
), oob_ptr(chip
, i
), fdm
->reg_size
);
729 static inline void mtk_nfc_read_fdm(struct nand_chip
*chip
, u32 start
,
732 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
733 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
734 struct mtk_nfc_fdm
*fdm
= &mtk_nand
->fdm
;
739 for (i
= 0; i
< sectors
; i
++) {
740 oobptr
= oob_ptr(chip
, start
+ i
);
741 vall
= nfi_readl(nfc
, NFI_FDML(i
));
742 valm
= nfi_readl(nfc
, NFI_FDMM(i
));
744 for (j
= 0; j
< fdm
->reg_size
; j
++)
745 oobptr
[j
] = (j
>= 4 ? valm
: vall
) >> ((j
% 4) * 8);
749 static inline void mtk_nfc_write_fdm(struct nand_chip
*chip
)
751 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
752 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
753 struct mtk_nfc_fdm
*fdm
= &mtk_nand
->fdm
;
758 for (i
= 0; i
< chip
->ecc
.steps
; i
++) {
759 oobptr
= oob_ptr(chip
, i
);
762 for (j
= 0; j
< 8; j
++) {
764 vall
|= (j
< fdm
->reg_size
? oobptr
[j
] : 0xff)
767 valm
|= (j
< fdm
->reg_size
? oobptr
[j
] : 0xff)
770 nfi_writel(nfc
, vall
, NFI_FDML(i
));
771 nfi_writel(nfc
, valm
, NFI_FDMM(i
));
775 static int mtk_nfc_do_write_page(struct mtd_info
*mtd
, struct nand_chip
*chip
,
776 const u8
*buf
, int page
, int len
)
778 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
779 struct device
*dev
= nfc
->dev
;
784 addr
= dma_map_single(dev
, (void *)buf
, len
, DMA_TO_DEVICE
);
785 ret
= dma_mapping_error(nfc
->dev
, addr
);
787 dev_err(nfc
->dev
, "dma mapping error\n");
791 reg
= nfi_readw(nfc
, NFI_CNFG
) | CNFG_AHB
| CNFG_DMA_BURST_EN
;
792 nfi_writew(nfc
, reg
, NFI_CNFG
);
794 nfi_writel(nfc
, chip
->ecc
.steps
<< CON_SEC_SHIFT
, NFI_CON
);
795 nfi_writel(nfc
, lower_32_bits(addr
), NFI_STRADDR
);
796 nfi_writew(nfc
, INTR_AHB_DONE_EN
, NFI_INTR_EN
);
798 init_completion(&nfc
->done
);
800 reg
= nfi_readl(nfc
, NFI_CON
) | CON_BWR
;
801 nfi_writel(nfc
, reg
, NFI_CON
);
802 nfi_writew(nfc
, STAR_EN
, NFI_STRDATA
);
804 ret
= wait_for_completion_timeout(&nfc
->done
, msecs_to_jiffies(500));
806 dev_err(dev
, "program ahb done timeout\n");
807 nfi_writew(nfc
, 0, NFI_INTR_EN
);
812 ret
= readl_poll_timeout_atomic(nfc
->regs
+ NFI_ADDRCNTR
, reg
,
813 ADDRCNTR_SEC(reg
) >= chip
->ecc
.steps
,
816 dev_err(dev
, "hwecc write timeout\n");
820 dma_unmap_single(nfc
->dev
, addr
, len
, DMA_TO_DEVICE
);
821 nfi_writel(nfc
, 0, NFI_CON
);
826 static int mtk_nfc_write_page(struct mtd_info
*mtd
, struct nand_chip
*chip
,
827 const u8
*buf
, int page
, int raw
)
829 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
830 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
836 mtk_nfc_select_target(chip
, chip
->cur_cs
);
837 nand_prog_page_begin_op(chip
, page
, 0, NULL
, 0);
840 /* OOB => FDM: from register, ECC: from HW */
841 reg
= nfi_readw(nfc
, NFI_CNFG
) | CNFG_AUTO_FMT_EN
;
842 nfi_writew(nfc
, reg
| CNFG_HW_ECC_EN
, NFI_CNFG
);
844 nfc
->ecc_cfg
.op
= ECC_ENCODE
;
845 nfc
->ecc_cfg
.mode
= ECC_NFI_MODE
;
846 ret
= mtk_ecc_enable(nfc
->ecc
, &nfc
->ecc_cfg
);
848 /* clear NFI config */
849 reg
= nfi_readw(nfc
, NFI_CNFG
);
850 reg
&= ~(CNFG_AUTO_FMT_EN
| CNFG_HW_ECC_EN
);
851 nfi_writew(nfc
, reg
, NFI_CNFG
);
856 memcpy(nfc
->buffer
, buf
, mtd
->writesize
);
857 mtk_nand
->bad_mark
.bm_swap(mtd
, nfc
->buffer
, raw
);
858 bufpoi
= nfc
->buffer
;
860 /* write OOB into the FDM registers (OOB area in MTK NAND) */
861 mtk_nfc_write_fdm(chip
);
866 len
= mtd
->writesize
+ (raw
? mtd
->oobsize
: 0);
867 ret
= mtk_nfc_do_write_page(mtd
, chip
, bufpoi
, page
, len
);
870 mtk_ecc_disable(nfc
->ecc
);
875 return nand_prog_page_end_op(chip
);
878 static int mtk_nfc_write_page_hwecc(struct nand_chip
*chip
, const u8
*buf
,
879 int oob_on
, int page
)
881 return mtk_nfc_write_page(nand_to_mtd(chip
), chip
, buf
, page
, 0);
884 static int mtk_nfc_write_page_raw(struct nand_chip
*chip
, const u8
*buf
,
887 struct mtd_info
*mtd
= nand_to_mtd(chip
);
888 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
890 mtk_nfc_format_page(mtd
, buf
);
891 return mtk_nfc_write_page(mtd
, chip
, nfc
->buffer
, pg
, 1);
894 static int mtk_nfc_write_subpage_hwecc(struct nand_chip
*chip
, u32 offset
,
895 u32 data_len
, const u8
*buf
,
896 int oob_on
, int page
)
898 struct mtd_info
*mtd
= nand_to_mtd(chip
);
899 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
902 ret
= mtk_nfc_format_subpage(mtd
, offset
, data_len
, buf
);
906 /* use the data in the private buffer (now with FDM and CRC) */
907 return mtk_nfc_write_page(mtd
, chip
, nfc
->buffer
, page
, 1);
910 static int mtk_nfc_write_oob_std(struct nand_chip
*chip
, int page
)
912 return mtk_nfc_write_page_raw(chip
, NULL
, 1, page
);
915 static int mtk_nfc_update_ecc_stats(struct mtd_info
*mtd
, u8
*buf
, u32 start
,
918 struct nand_chip
*chip
= mtd_to_nand(mtd
);
919 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
920 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
921 struct mtk_ecc_stats stats
;
922 u32 reg_size
= mtk_nand
->fdm
.reg_size
;
925 rc
= nfi_readl(nfc
, NFI_STA
) & STA_EMP_PAGE
;
927 memset(buf
, 0xff, sectors
* chip
->ecc
.size
);
928 for (i
= 0; i
< sectors
; i
++)
929 memset(oob_ptr(chip
, start
+ i
), 0xff, reg_size
);
933 mtk_ecc_get_stats(nfc
->ecc
, &stats
, sectors
);
934 mtd
->ecc_stats
.corrected
+= stats
.corrected
;
935 mtd
->ecc_stats
.failed
+= stats
.failed
;
937 return stats
.bitflips
;
940 static int mtk_nfc_read_subpage(struct mtd_info
*mtd
, struct nand_chip
*chip
,
941 u32 data_offs
, u32 readlen
,
942 u8
*bufpoi
, int page
, int raw
)
944 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
945 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
946 u32 spare
= mtk_nand
->spare_per_sector
;
947 u32 column
, sectors
, start
, end
, reg
;
954 mtk_nfc_select_target(chip
, chip
->cur_cs
);
955 start
= data_offs
/ chip
->ecc
.size
;
956 end
= DIV_ROUND_UP(data_offs
+ readlen
, chip
->ecc
.size
);
958 sectors
= end
- start
;
959 column
= start
* (chip
->ecc
.size
+ spare
);
961 len
= sectors
* chip
->ecc
.size
+ (raw
? sectors
* spare
: 0);
962 buf
= bufpoi
+ start
* chip
->ecc
.size
;
964 nand_read_page_op(chip
, page
, column
, NULL
, 0);
966 addr
= dma_map_single(nfc
->dev
, buf
, len
, DMA_FROM_DEVICE
);
967 rc
= dma_mapping_error(nfc
->dev
, addr
);
969 dev_err(nfc
->dev
, "dma mapping error\n");
974 reg
= nfi_readw(nfc
, NFI_CNFG
);
975 reg
|= CNFG_READ_EN
| CNFG_DMA_BURST_EN
| CNFG_AHB
;
977 reg
|= CNFG_AUTO_FMT_EN
| CNFG_HW_ECC_EN
;
978 nfi_writew(nfc
, reg
, NFI_CNFG
);
980 nfc
->ecc_cfg
.mode
= ECC_NFI_MODE
;
981 nfc
->ecc_cfg
.sectors
= sectors
;
982 nfc
->ecc_cfg
.op
= ECC_DECODE
;
983 rc
= mtk_ecc_enable(nfc
->ecc
, &nfc
->ecc_cfg
);
985 dev_err(nfc
->dev
, "ecc enable\n");
987 reg
&= ~(CNFG_DMA_BURST_EN
| CNFG_AHB
| CNFG_READ_EN
|
988 CNFG_AUTO_FMT_EN
| CNFG_HW_ECC_EN
);
989 nfi_writew(nfc
, reg
, NFI_CNFG
);
990 dma_unmap_single(nfc
->dev
, addr
, len
, DMA_FROM_DEVICE
);
995 nfi_writew(nfc
, reg
, NFI_CNFG
);
998 nfi_writel(nfc
, sectors
<< CON_SEC_SHIFT
, NFI_CON
);
999 nfi_writew(nfc
, INTR_AHB_DONE_EN
, NFI_INTR_EN
);
1000 nfi_writel(nfc
, lower_32_bits(addr
), NFI_STRADDR
);
1002 init_completion(&nfc
->done
);
1003 reg
= nfi_readl(nfc
, NFI_CON
) | CON_BRD
;
1004 nfi_writel(nfc
, reg
, NFI_CON
);
1005 nfi_writew(nfc
, STAR_EN
, NFI_STRDATA
);
1007 rc
= wait_for_completion_timeout(&nfc
->done
, msecs_to_jiffies(500));
1009 dev_warn(nfc
->dev
, "read ahb/dma done timeout\n");
1011 rc
= readl_poll_timeout_atomic(nfc
->regs
+ NFI_BYTELEN
, reg
,
1012 ADDRCNTR_SEC(reg
) >= sectors
, 10,
1015 dev_err(nfc
->dev
, "subpage done timeout\n");
1018 rc
= mtk_ecc_wait_done(nfc
->ecc
, ECC_DECODE
);
1019 bitflips
= rc
< 0 ? -ETIMEDOUT
:
1020 mtk_nfc_update_ecc_stats(mtd
, buf
, start
, sectors
);
1021 mtk_nfc_read_fdm(chip
, start
, sectors
);
1024 dma_unmap_single(nfc
->dev
, addr
, len
, DMA_FROM_DEVICE
);
1029 mtk_ecc_disable(nfc
->ecc
);
1031 if (clamp(mtk_nand
->bad_mark
.sec
, start
, end
) == mtk_nand
->bad_mark
.sec
)
1032 mtk_nand
->bad_mark
.bm_swap(mtd
, bufpoi
, raw
);
1034 nfi_writel(nfc
, 0, NFI_CON
);
1039 static int mtk_nfc_read_subpage_hwecc(struct nand_chip
*chip
, u32 off
,
1040 u32 len
, u8
*p
, int pg
)
1042 return mtk_nfc_read_subpage(nand_to_mtd(chip
), chip
, off
, len
, p
, pg
,
1046 static int mtk_nfc_read_page_hwecc(struct nand_chip
*chip
, u8
*p
, int oob_on
,
1049 struct mtd_info
*mtd
= nand_to_mtd(chip
);
1051 return mtk_nfc_read_subpage(mtd
, chip
, 0, mtd
->writesize
, p
, pg
, 0);
1054 static int mtk_nfc_read_page_raw(struct nand_chip
*chip
, u8
*buf
, int oob_on
,
1057 struct mtd_info
*mtd
= nand_to_mtd(chip
);
1058 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
1059 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
1060 struct mtk_nfc_fdm
*fdm
= &mtk_nand
->fdm
;
1063 memset(nfc
->buffer
, 0xff, mtd
->writesize
+ mtd
->oobsize
);
1064 ret
= mtk_nfc_read_subpage(mtd
, chip
, 0, mtd
->writesize
, nfc
->buffer
,
1069 for (i
= 0; i
< chip
->ecc
.steps
; i
++) {
1070 memcpy(oob_ptr(chip
, i
), mtk_oob_ptr(chip
, i
), fdm
->reg_size
);
1072 if (i
== mtk_nand
->bad_mark
.sec
)
1073 mtk_nand
->bad_mark
.bm_swap(mtd
, nfc
->buffer
, 1);
1076 memcpy(data_ptr(chip
, buf
, i
), mtk_data_ptr(chip
, i
),
1083 static int mtk_nfc_read_oob_std(struct nand_chip
*chip
, int page
)
1085 return mtk_nfc_read_page_raw(chip
, NULL
, 1, page
);
1088 static inline void mtk_nfc_hw_init(struct mtk_nfc
*nfc
)
1091 * CNRNB: nand ready/busy register
1092 * -------------------------------
1093 * 7:4: timeout register for polling the NAND busy/ready signal
1094 * 0 : poll the status of the busy/ready signal after [7:4]*16 cycles.
1096 nfi_writew(nfc
, 0xf1, NFI_CNRNB
);
1097 nfi_writel(nfc
, PAGEFMT_8K_16K
, NFI_PAGEFMT
);
1099 mtk_nfc_hw_reset(nfc
);
1101 nfi_readl(nfc
, NFI_INTR_STA
);
1102 nfi_writel(nfc
, 0, NFI_INTR_EN
);
1105 static irqreturn_t
mtk_nfc_irq(int irq
, void *id
)
1107 struct mtk_nfc
*nfc
= id
;
1110 sta
= nfi_readw(nfc
, NFI_INTR_STA
);
1111 ien
= nfi_readw(nfc
, NFI_INTR_EN
);
1116 nfi_writew(nfc
, ~sta
& ien
, NFI_INTR_EN
);
1117 complete(&nfc
->done
);
1122 static int mtk_nfc_enable_clk(struct device
*dev
, struct mtk_nfc_clk
*clk
)
1126 ret
= clk_prepare_enable(clk
->nfi_clk
);
1128 dev_err(dev
, "failed to enable nfi clk\n");
1132 ret
= clk_prepare_enable(clk
->pad_clk
);
1134 dev_err(dev
, "failed to enable pad clk\n");
1135 clk_disable_unprepare(clk
->nfi_clk
);
1142 static void mtk_nfc_disable_clk(struct mtk_nfc_clk
*clk
)
1144 clk_disable_unprepare(clk
->nfi_clk
);
1145 clk_disable_unprepare(clk
->pad_clk
);
1148 static int mtk_nfc_ooblayout_free(struct mtd_info
*mtd
, int section
,
1149 struct mtd_oob_region
*oob_region
)
1151 struct nand_chip
*chip
= mtd_to_nand(mtd
);
1152 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
1153 struct mtk_nfc_fdm
*fdm
= &mtk_nand
->fdm
;
1156 eccsteps
= mtd
->writesize
/ chip
->ecc
.size
;
1158 if (section
>= eccsteps
)
1161 oob_region
->length
= fdm
->reg_size
- fdm
->ecc_size
;
1162 oob_region
->offset
= section
* fdm
->reg_size
+ fdm
->ecc_size
;
1167 static int mtk_nfc_ooblayout_ecc(struct mtd_info
*mtd
, int section
,
1168 struct mtd_oob_region
*oob_region
)
1170 struct nand_chip
*chip
= mtd_to_nand(mtd
);
1171 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
1177 eccsteps
= mtd
->writesize
/ chip
->ecc
.size
;
1178 oob_region
->offset
= mtk_nand
->fdm
.reg_size
* eccsteps
;
1179 oob_region
->length
= mtd
->oobsize
- oob_region
->offset
;
1184 static const struct mtd_ooblayout_ops mtk_nfc_ooblayout_ops
= {
1185 .free
= mtk_nfc_ooblayout_free
,
1186 .ecc
= mtk_nfc_ooblayout_ecc
,
1189 static void mtk_nfc_set_fdm(struct mtk_nfc_fdm
*fdm
, struct mtd_info
*mtd
)
1191 struct nand_chip
*nand
= mtd_to_nand(mtd
);
1192 struct mtk_nfc_nand_chip
*chip
= to_mtk_nand(nand
);
1193 struct mtk_nfc
*nfc
= nand_get_controller_data(nand
);
1196 ecc_bytes
= DIV_ROUND_UP(nand
->ecc
.strength
*
1197 mtk_ecc_get_parity_bits(nfc
->ecc
), 8);
1199 fdm
->reg_size
= chip
->spare_per_sector
- ecc_bytes
;
1200 if (fdm
->reg_size
> NFI_FDM_MAX_SIZE
)
1201 fdm
->reg_size
= NFI_FDM_MAX_SIZE
;
1203 /* bad block mark storage */
1207 static void mtk_nfc_set_bad_mark_ctl(struct mtk_nfc_bad_mark_ctl
*bm_ctl
,
1208 struct mtd_info
*mtd
)
1210 struct nand_chip
*nand
= mtd_to_nand(mtd
);
1212 if (mtd
->writesize
== 512) {
1213 bm_ctl
->bm_swap
= mtk_nfc_no_bad_mark_swap
;
1215 bm_ctl
->bm_swap
= mtk_nfc_bad_mark_swap
;
1216 bm_ctl
->sec
= mtd
->writesize
/ mtk_data_len(nand
);
1217 bm_ctl
->pos
= mtd
->writesize
% mtk_data_len(nand
);
1221 static int mtk_nfc_set_spare_per_sector(u32
*sps
, struct mtd_info
*mtd
)
1223 struct nand_chip
*nand
= mtd_to_nand(mtd
);
1224 struct mtk_nfc
*nfc
= nand_get_controller_data(nand
);
1225 const u8
*spare
= nfc
->caps
->spare_size
;
1226 u32 eccsteps
, i
, closest_spare
= 0;
1228 eccsteps
= mtd
->writesize
/ nand
->ecc
.size
;
1229 *sps
= mtd
->oobsize
/ eccsteps
;
1231 if (nand
->ecc
.size
== 1024)
1234 if (*sps
< MTK_NFC_MIN_SPARE
)
1237 for (i
= 0; i
< nfc
->caps
->num_spare_size
; i
++) {
1238 if (*sps
>= spare
[i
] && spare
[i
] >= spare
[closest_spare
]) {
1240 if (*sps
== spare
[i
])
1245 *sps
= spare
[closest_spare
];
1247 if (nand
->ecc
.size
== 1024)
1253 static int mtk_nfc_ecc_init(struct device
*dev
, struct mtd_info
*mtd
)
1255 struct nand_chip
*nand
= mtd_to_nand(mtd
);
1256 const struct nand_ecc_props
*requirements
=
1257 nanddev_get_ecc_requirements(&nand
->base
);
1258 struct mtk_nfc
*nfc
= nand_get_controller_data(nand
);
1262 /* support only ecc hw mode */
1263 if (nand
->ecc
.engine_type
!= NAND_ECC_ENGINE_TYPE_ON_HOST
) {
1264 dev_err(dev
, "ecc.engine_type not supported\n");
1268 /* if optional dt settings not present */
1269 if (!nand
->ecc
.size
|| !nand
->ecc
.strength
) {
1270 /* use datasheet requirements */
1271 nand
->ecc
.strength
= requirements
->strength
;
1272 nand
->ecc
.size
= requirements
->step_size
;
1275 * align eccstrength and eccsize
1276 * this controller only supports 512 and 1024 sizes
1278 if (nand
->ecc
.size
< 1024) {
1279 if (mtd
->writesize
> 512 &&
1280 nfc
->caps
->max_sector_size
> 512) {
1281 nand
->ecc
.size
= 1024;
1282 nand
->ecc
.strength
<<= 1;
1284 nand
->ecc
.size
= 512;
1287 nand
->ecc
.size
= 1024;
1290 ret
= mtk_nfc_set_spare_per_sector(&spare
, mtd
);
1294 /* calculate oob bytes except ecc parity data */
1295 free
= (nand
->ecc
.strength
* mtk_ecc_get_parity_bits(nfc
->ecc
)
1297 free
= spare
- free
;
1300 * enhance ecc strength if oob left is bigger than max FDM size
1301 * or reduce ecc strength if oob size is not enough for ecc
1304 if (free
> NFI_FDM_MAX_SIZE
) {
1305 spare
-= NFI_FDM_MAX_SIZE
;
1306 nand
->ecc
.strength
= (spare
<< 3) /
1307 mtk_ecc_get_parity_bits(nfc
->ecc
);
1308 } else if (free
< 0) {
1309 spare
-= NFI_FDM_MIN_SIZE
;
1310 nand
->ecc
.strength
= (spare
<< 3) /
1311 mtk_ecc_get_parity_bits(nfc
->ecc
);
1315 mtk_ecc_adjust_strength(nfc
->ecc
, &nand
->ecc
.strength
);
1317 dev_info(dev
, "eccsize %d eccstrength %d\n",
1318 nand
->ecc
.size
, nand
->ecc
.strength
);
1323 static int mtk_nfc_attach_chip(struct nand_chip
*chip
)
1325 struct mtd_info
*mtd
= nand_to_mtd(chip
);
1326 struct device
*dev
= mtd
->dev
.parent
;
1327 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
1328 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
1332 if (chip
->options
& NAND_BUSWIDTH_16
) {
1333 dev_err(dev
, "16bits buswidth not supported");
1337 /* store bbt magic in page, cause OOB is not protected */
1338 if (chip
->bbt_options
& NAND_BBT_USE_FLASH
)
1339 chip
->bbt_options
|= NAND_BBT_NO_OOB
;
1341 ret
= mtk_nfc_ecc_init(dev
, mtd
);
1345 ret
= mtk_nfc_set_spare_per_sector(&mtk_nand
->spare_per_sector
, mtd
);
1349 mtk_nfc_set_fdm(&mtk_nand
->fdm
, mtd
);
1350 mtk_nfc_set_bad_mark_ctl(&mtk_nand
->bad_mark
, mtd
);
1352 len
= mtd
->writesize
+ mtd
->oobsize
;
1353 nfc
->buffer
= devm_kzalloc(dev
, len
, GFP_KERNEL
);
1360 static const struct nand_controller_ops mtk_nfc_controller_ops
= {
1361 .attach_chip
= mtk_nfc_attach_chip
,
1362 .setup_interface
= mtk_nfc_setup_interface
,
1363 .exec_op
= mtk_nfc_exec_op
,
1366 static int mtk_nfc_nand_chip_init(struct device
*dev
, struct mtk_nfc
*nfc
,
1367 struct device_node
*np
)
1369 struct mtk_nfc_nand_chip
*chip
;
1370 struct nand_chip
*nand
;
1371 struct mtd_info
*mtd
;
1377 if (!of_get_property(np
, "reg", &nsels
))
1380 nsels
/= sizeof(u32
);
1381 if (!nsels
|| nsels
> MTK_NAND_MAX_NSELS
) {
1382 dev_err(dev
, "invalid reg property size %d\n", nsels
);
1386 chip
= devm_kzalloc(dev
, sizeof(*chip
) + nsels
* sizeof(u8
),
1391 chip
->nsels
= nsels
;
1392 for (i
= 0; i
< nsels
; i
++) {
1393 ret
= of_property_read_u32_index(np
, "reg", i
, &tmp
);
1395 dev_err(dev
, "reg property failure : %d\n", ret
);
1399 if (tmp
>= MTK_NAND_MAX_NSELS
) {
1400 dev_err(dev
, "invalid CS: %u\n", tmp
);
1404 if (test_and_set_bit(tmp
, &nfc
->assigned_cs
)) {
1405 dev_err(dev
, "CS %u already assigned\n", tmp
);
1409 chip
->sels
[i
] = tmp
;
1413 nand
->controller
= &nfc
->controller
;
1415 nand_set_flash_node(nand
, np
);
1416 nand_set_controller_data(nand
, nfc
);
1418 nand
->options
|= NAND_USES_DMA
| NAND_SUBPAGE_READ
;
1420 /* set default mode in case dt entry is missing */
1421 nand
->ecc
.engine_type
= NAND_ECC_ENGINE_TYPE_ON_HOST
;
1423 nand
->ecc
.write_subpage
= mtk_nfc_write_subpage_hwecc
;
1424 nand
->ecc
.write_page_raw
= mtk_nfc_write_page_raw
;
1425 nand
->ecc
.write_page
= mtk_nfc_write_page_hwecc
;
1426 nand
->ecc
.write_oob_raw
= mtk_nfc_write_oob_std
;
1427 nand
->ecc
.write_oob
= mtk_nfc_write_oob_std
;
1429 nand
->ecc
.read_subpage
= mtk_nfc_read_subpage_hwecc
;
1430 nand
->ecc
.read_page_raw
= mtk_nfc_read_page_raw
;
1431 nand
->ecc
.read_page
= mtk_nfc_read_page_hwecc
;
1432 nand
->ecc
.read_oob_raw
= mtk_nfc_read_oob_std
;
1433 nand
->ecc
.read_oob
= mtk_nfc_read_oob_std
;
1435 mtd
= nand_to_mtd(nand
);
1436 mtd
->owner
= THIS_MODULE
;
1437 mtd
->dev
.parent
= dev
;
1438 mtd
->name
= MTK_NAME
;
1439 mtd_set_ooblayout(mtd
, &mtk_nfc_ooblayout_ops
);
1441 mtk_nfc_hw_init(nfc
);
1443 ret
= nand_scan(nand
, nsels
);
1447 ret
= mtd_device_register(mtd
, NULL
, 0);
1449 dev_err(dev
, "mtd parse partition error\n");
1454 list_add_tail(&chip
->node
, &nfc
->chips
);
1459 static int mtk_nfc_nand_chips_init(struct device
*dev
, struct mtk_nfc
*nfc
)
1461 struct device_node
*np
= dev
->of_node
;
1462 struct device_node
*nand_np
;
1465 for_each_child_of_node(np
, nand_np
) {
1466 ret
= mtk_nfc_nand_chip_init(dev
, nfc
, nand_np
);
1468 of_node_put(nand_np
);
1476 static const struct mtk_nfc_caps mtk_nfc_caps_mt2701
= {
1477 .spare_size
= spare_size_mt2701
,
1478 .num_spare_size
= 16,
1479 .pageformat_spare_shift
= 4,
1482 .max_sector_size
= 1024,
1485 static const struct mtk_nfc_caps mtk_nfc_caps_mt2712
= {
1486 .spare_size
= spare_size_mt2712
,
1487 .num_spare_size
= 19,
1488 .pageformat_spare_shift
= 16,
1491 .max_sector_size
= 1024,
1494 static const struct mtk_nfc_caps mtk_nfc_caps_mt7622
= {
1495 .spare_size
= spare_size_mt7622
,
1496 .num_spare_size
= 4,
1497 .pageformat_spare_shift
= 4,
1500 .max_sector_size
= 512,
1503 static const struct of_device_id mtk_nfc_id_table
[] = {
1505 .compatible
= "mediatek,mt2701-nfc",
1506 .data
= &mtk_nfc_caps_mt2701
,
1508 .compatible
= "mediatek,mt2712-nfc",
1509 .data
= &mtk_nfc_caps_mt2712
,
1511 .compatible
= "mediatek,mt7622-nfc",
1512 .data
= &mtk_nfc_caps_mt7622
,
1516 MODULE_DEVICE_TABLE(of
, mtk_nfc_id_table
);
1518 static int mtk_nfc_probe(struct platform_device
*pdev
)
1520 struct device
*dev
= &pdev
->dev
;
1521 struct device_node
*np
= dev
->of_node
;
1522 struct mtk_nfc
*nfc
;
1523 struct resource
*res
;
1526 nfc
= devm_kzalloc(dev
, sizeof(*nfc
), GFP_KERNEL
);
1530 nand_controller_init(&nfc
->controller
);
1531 INIT_LIST_HEAD(&nfc
->chips
);
1532 nfc
->controller
.ops
= &mtk_nfc_controller_ops
;
1534 /* probe defer if not ready */
1535 nfc
->ecc
= of_mtk_ecc_get(np
);
1536 if (IS_ERR(nfc
->ecc
))
1537 return PTR_ERR(nfc
->ecc
);
1541 nfc
->caps
= of_device_get_match_data(dev
);
1544 res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
1545 nfc
->regs
= devm_ioremap_resource(dev
, res
);
1546 if (IS_ERR(nfc
->regs
)) {
1547 ret
= PTR_ERR(nfc
->regs
);
1551 nfc
->clk
.nfi_clk
= devm_clk_get(dev
, "nfi_clk");
1552 if (IS_ERR(nfc
->clk
.nfi_clk
)) {
1553 dev_err(dev
, "no clk\n");
1554 ret
= PTR_ERR(nfc
->clk
.nfi_clk
);
1558 nfc
->clk
.pad_clk
= devm_clk_get(dev
, "pad_clk");
1559 if (IS_ERR(nfc
->clk
.pad_clk
)) {
1560 dev_err(dev
, "no pad clk\n");
1561 ret
= PTR_ERR(nfc
->clk
.pad_clk
);
1565 ret
= mtk_nfc_enable_clk(dev
, &nfc
->clk
);
1569 irq
= platform_get_irq(pdev
, 0);
1575 ret
= devm_request_irq(dev
, irq
, mtk_nfc_irq
, 0x0, "mtk-nand", nfc
);
1577 dev_err(dev
, "failed to request nfi irq\n");
1581 ret
= dma_set_mask(dev
, DMA_BIT_MASK(32));
1583 dev_err(dev
, "failed to set dma mask\n");
1587 platform_set_drvdata(pdev
, nfc
);
1589 ret
= mtk_nfc_nand_chips_init(dev
, nfc
);
1591 dev_err(dev
, "failed to init nand chips\n");
1598 mtk_nfc_disable_clk(&nfc
->clk
);
1601 mtk_ecc_release(nfc
->ecc
);
1606 static int mtk_nfc_remove(struct platform_device
*pdev
)
1608 struct mtk_nfc
*nfc
= platform_get_drvdata(pdev
);
1609 struct mtk_nfc_nand_chip
*mtk_chip
;
1610 struct nand_chip
*chip
;
1613 while (!list_empty(&nfc
->chips
)) {
1614 mtk_chip
= list_first_entry(&nfc
->chips
,
1615 struct mtk_nfc_nand_chip
, node
);
1616 chip
= &mtk_chip
->nand
;
1617 ret
= mtd_device_unregister(nand_to_mtd(chip
));
1620 list_del(&mtk_chip
->node
);
1623 mtk_ecc_release(nfc
->ecc
);
1624 mtk_nfc_disable_clk(&nfc
->clk
);
1629 #ifdef CONFIG_PM_SLEEP
1630 static int mtk_nfc_suspend(struct device
*dev
)
1632 struct mtk_nfc
*nfc
= dev_get_drvdata(dev
);
1634 mtk_nfc_disable_clk(&nfc
->clk
);
1639 static int mtk_nfc_resume(struct device
*dev
)
1641 struct mtk_nfc
*nfc
= dev_get_drvdata(dev
);
1642 struct mtk_nfc_nand_chip
*chip
;
1643 struct nand_chip
*nand
;
1649 ret
= mtk_nfc_enable_clk(dev
, &nfc
->clk
);
1653 /* reset NAND chip if VCC was powered off */
1654 list_for_each_entry(chip
, &nfc
->chips
, node
) {
1656 for (i
= 0; i
< chip
->nsels
; i
++)
1657 nand_reset(nand
, i
);
1663 static SIMPLE_DEV_PM_OPS(mtk_nfc_pm_ops
, mtk_nfc_suspend
, mtk_nfc_resume
);
1666 static struct platform_driver mtk_nfc_driver
= {
1667 .probe
= mtk_nfc_probe
,
1668 .remove
= mtk_nfc_remove
,
1671 .of_match_table
= mtk_nfc_id_table
,
1672 #ifdef CONFIG_PM_SLEEP
1673 .pm
= &mtk_nfc_pm_ops
,
1678 module_platform_driver(mtk_nfc_driver
);
1680 MODULE_LICENSE("Dual MIT/GPL");
1681 MODULE_AUTHOR("Xiaolei Li <xiaolei.li@mediatek.com>");
1682 MODULE_DESCRIPTION("MTK Nand Flash Controller Driver");