2 * MTK NAND Flash controller driver.
3 * Copyright (C) 2016 MediaTek Inc.
4 * Authors: Xiaolei Li <xiaolei.li@mediatek.com>
5 * Jorge Ramirez-Ortiz <jorge.ramirez-ortiz@linaro.org>
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
17 #include <linux/platform_device.h>
18 #include <linux/dma-mapping.h>
19 #include <linux/interrupt.h>
20 #include <linux/delay.h>
21 #include <linux/clk.h>
22 #include <linux/mtd/nand.h>
23 #include <linux/mtd/mtd.h>
24 #include <linux/module.h>
25 #include <linux/iopoll.h>
29 /* NAND controller register definition */
30 #define NFI_CNFG (0x00)
31 #define CNFG_AHB BIT(0)
32 #define CNFG_READ_EN BIT(1)
33 #define CNFG_DMA_BURST_EN BIT(2)
34 #define CNFG_BYTE_RW BIT(6)
35 #define CNFG_HW_ECC_EN BIT(8)
36 #define CNFG_AUTO_FMT_EN BIT(9)
37 #define CNFG_OP_CUST (6 << 12)
38 #define NFI_PAGEFMT (0x04)
39 #define PAGEFMT_FDM_ECC_SHIFT (12)
40 #define PAGEFMT_FDM_SHIFT (8)
41 #define PAGEFMT_SPARE_16 (0)
42 #define PAGEFMT_SPARE_26 (1)
43 #define PAGEFMT_SPARE_27 (2)
44 #define PAGEFMT_SPARE_28 (3)
45 #define PAGEFMT_SPARE_32 (4)
46 #define PAGEFMT_SPARE_36 (5)
47 #define PAGEFMT_SPARE_40 (6)
48 #define PAGEFMT_SPARE_44 (7)
49 #define PAGEFMT_SPARE_48 (8)
50 #define PAGEFMT_SPARE_49 (9)
51 #define PAGEFMT_SPARE_50 (0xa)
52 #define PAGEFMT_SPARE_51 (0xb)
53 #define PAGEFMT_SPARE_52 (0xc)
54 #define PAGEFMT_SPARE_62 (0xd)
55 #define PAGEFMT_SPARE_63 (0xe)
56 #define PAGEFMT_SPARE_64 (0xf)
57 #define PAGEFMT_SPARE_SHIFT (4)
58 #define PAGEFMT_SEC_SEL_512 BIT(2)
59 #define PAGEFMT_512_2K (0)
60 #define PAGEFMT_2K_4K (1)
61 #define PAGEFMT_4K_8K (2)
62 #define PAGEFMT_8K_16K (3)
64 #define NFI_CON (0x08)
65 #define CON_FIFO_FLUSH BIT(0)
66 #define CON_NFI_RST BIT(1)
67 #define CON_BRD BIT(8) /* burst read */
68 #define CON_BWR BIT(9) /* burst write */
69 #define CON_SEC_SHIFT (12)
70 /* Timming control register */
71 #define NFI_ACCCON (0x0C)
72 #define NFI_INTR_EN (0x10)
73 #define INTR_AHB_DONE_EN BIT(6)
74 #define NFI_INTR_STA (0x14)
75 #define NFI_CMD (0x20)
76 #define NFI_ADDRNOB (0x30)
77 #define NFI_COLADDR (0x34)
78 #define NFI_ROWADDR (0x38)
79 #define NFI_STRDATA (0x40)
82 #define NFI_CNRNB (0x44)
83 #define NFI_DATAW (0x50)
84 #define NFI_DATAR (0x54)
85 #define NFI_PIO_DIRDY (0x58)
86 #define PIO_DI_RDY (0x01)
87 #define NFI_STA (0x60)
88 #define STA_CMD BIT(0)
89 #define STA_ADDR BIT(1)
90 #define STA_BUSY BIT(8)
91 #define STA_EMP_PAGE BIT(12)
92 #define NFI_FSM_CUSTDATA (0xe << 16)
93 #define NFI_FSM_MASK (0xf << 16)
94 #define NFI_ADDRCNTR (0x70)
95 #define CNTR_MASK GENMASK(16, 12)
96 #define NFI_STRADDR (0x80)
97 #define NFI_BYTELEN (0x84)
98 #define NFI_CSEL (0x90)
99 #define NFI_FDML(x) (0xA0 + (x) * sizeof(u32) * 2)
100 #define NFI_FDMM(x) (0xA4 + (x) * sizeof(u32) * 2)
101 #define NFI_FDM_MAX_SIZE (8)
102 #define NFI_FDM_MIN_SIZE (1)
103 #define NFI_MASTER_STA (0x224)
104 #define MASTER_STA_MASK (0x0FFF)
105 #define NFI_EMPTY_THRESH (0x23C)
107 #define MTK_NAME "mtk-nand"
108 #define KB(x) ((x) * 1024UL)
109 #define MB(x) (KB(x) * 1024UL)
111 #define MTK_TIMEOUT (500000)
112 #define MTK_RESET_TIMEOUT (1000000)
113 #define MTK_MAX_SECTOR (16)
114 #define MTK_NAND_MAX_NSELS (2)
116 struct mtk_nfc_bad_mark_ctl
{
117 void (*bm_swap
)(struct mtd_info
*, u8
*buf
, int raw
);
123 * FDM: region used to store free OOB data
130 struct mtk_nfc_nand_chip
{
131 struct list_head node
;
132 struct nand_chip nand
;
134 struct mtk_nfc_bad_mark_ctl bad_mark
;
135 struct mtk_nfc_fdm fdm
;
136 u32 spare_per_sector
;
140 /* nothing after this field */
149 struct nand_hw_control controller
;
150 struct mtk_ecc_config ecc_cfg
;
151 struct mtk_nfc_clk clk
;
157 struct completion done
;
158 struct list_head chips
;
163 static inline struct mtk_nfc_nand_chip
*to_mtk_nand(struct nand_chip
*nand
)
165 return container_of(nand
, struct mtk_nfc_nand_chip
, nand
);
168 static inline u8
*data_ptr(struct nand_chip
*chip
, const u8
*p
, int i
)
170 return (u8
*)p
+ i
* chip
->ecc
.size
;
173 static inline u8
*oob_ptr(struct nand_chip
*chip
, int i
)
175 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
178 /* map the sector's FDM data to free oob:
179 * the beginning of the oob area stores the FDM data of bad mark sectors
182 if (i
< mtk_nand
->bad_mark
.sec
)
183 poi
= chip
->oob_poi
+ (i
+ 1) * mtk_nand
->fdm
.reg_size
;
184 else if (i
== mtk_nand
->bad_mark
.sec
)
187 poi
= chip
->oob_poi
+ i
* mtk_nand
->fdm
.reg_size
;
192 static inline int mtk_data_len(struct nand_chip
*chip
)
194 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
196 return chip
->ecc
.size
+ mtk_nand
->spare_per_sector
;
199 static inline u8
*mtk_data_ptr(struct nand_chip
*chip
, int i
)
201 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
203 return nfc
->buffer
+ i
* mtk_data_len(chip
);
206 static inline u8
*mtk_oob_ptr(struct nand_chip
*chip
, int i
)
208 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
210 return nfc
->buffer
+ i
* mtk_data_len(chip
) + chip
->ecc
.size
;
213 static inline void nfi_writel(struct mtk_nfc
*nfc
, u32 val
, u32 reg
)
215 writel(val
, nfc
->regs
+ reg
);
218 static inline void nfi_writew(struct mtk_nfc
*nfc
, u16 val
, u32 reg
)
220 writew(val
, nfc
->regs
+ reg
);
223 static inline void nfi_writeb(struct mtk_nfc
*nfc
, u8 val
, u32 reg
)
225 writeb(val
, nfc
->regs
+ reg
);
228 static inline u32
nfi_readl(struct mtk_nfc
*nfc
, u32 reg
)
230 return readl_relaxed(nfc
->regs
+ reg
);
233 static inline u16
nfi_readw(struct mtk_nfc
*nfc
, u32 reg
)
235 return readw_relaxed(nfc
->regs
+ reg
);
238 static inline u8
nfi_readb(struct mtk_nfc
*nfc
, u32 reg
)
240 return readb_relaxed(nfc
->regs
+ reg
);
243 static void mtk_nfc_hw_reset(struct mtk_nfc
*nfc
)
245 struct device
*dev
= nfc
->dev
;
249 /* reset all registers and force the NFI master to terminate */
250 nfi_writel(nfc
, CON_FIFO_FLUSH
| CON_NFI_RST
, NFI_CON
);
252 /* wait for the master to finish the last transaction */
253 ret
= readl_poll_timeout(nfc
->regs
+ NFI_MASTER_STA
, val
,
254 !(val
& MASTER_STA_MASK
), 50,
257 dev_warn(dev
, "master active in reset [0x%x] = 0x%x\n",
258 NFI_MASTER_STA
, val
);
260 /* ensure any status register affected by the NFI master is reset */
261 nfi_writel(nfc
, CON_FIFO_FLUSH
| CON_NFI_RST
, NFI_CON
);
262 nfi_writew(nfc
, STAR_DE
, NFI_STRDATA
);
265 static int mtk_nfc_send_command(struct mtk_nfc
*nfc
, u8 command
)
267 struct device
*dev
= nfc
->dev
;
271 nfi_writel(nfc
, command
, NFI_CMD
);
273 ret
= readl_poll_timeout_atomic(nfc
->regs
+ NFI_STA
, val
,
274 !(val
& STA_CMD
), 10, MTK_TIMEOUT
);
276 dev_warn(dev
, "nfi core timed out entering command mode\n");
283 static int mtk_nfc_send_address(struct mtk_nfc
*nfc
, int addr
)
285 struct device
*dev
= nfc
->dev
;
289 nfi_writel(nfc
, addr
, NFI_COLADDR
);
290 nfi_writel(nfc
, 0, NFI_ROWADDR
);
291 nfi_writew(nfc
, 1, NFI_ADDRNOB
);
293 ret
= readl_poll_timeout_atomic(nfc
->regs
+ NFI_STA
, val
,
294 !(val
& STA_ADDR
), 10, MTK_TIMEOUT
);
296 dev_warn(dev
, "nfi core timed out entering address mode\n");
303 static int mtk_nfc_hw_runtime_config(struct mtd_info
*mtd
)
305 struct nand_chip
*chip
= mtd_to_nand(mtd
);
306 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
307 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
313 spare
= mtk_nand
->spare_per_sector
;
315 switch (mtd
->writesize
) {
317 fmt
= PAGEFMT_512_2K
| PAGEFMT_SEC_SEL_512
;
320 if (chip
->ecc
.size
== 512)
321 fmt
= PAGEFMT_2K_4K
| PAGEFMT_SEC_SEL_512
;
323 fmt
= PAGEFMT_512_2K
;
326 if (chip
->ecc
.size
== 512)
327 fmt
= PAGEFMT_4K_8K
| PAGEFMT_SEC_SEL_512
;
332 if (chip
->ecc
.size
== 512)
333 fmt
= PAGEFMT_8K_16K
| PAGEFMT_SEC_SEL_512
;
338 fmt
= PAGEFMT_8K_16K
;
341 dev_err(nfc
->dev
, "invalid page len: %d\n", mtd
->writesize
);
346 * the hardware will double the value for this eccsize, so we need to
349 if (chip
->ecc
.size
== 1024)
354 fmt
|= (PAGEFMT_SPARE_16
<< PAGEFMT_SPARE_SHIFT
);
357 fmt
|= (PAGEFMT_SPARE_26
<< PAGEFMT_SPARE_SHIFT
);
360 fmt
|= (PAGEFMT_SPARE_27
<< PAGEFMT_SPARE_SHIFT
);
363 fmt
|= (PAGEFMT_SPARE_28
<< PAGEFMT_SPARE_SHIFT
);
366 fmt
|= (PAGEFMT_SPARE_32
<< PAGEFMT_SPARE_SHIFT
);
369 fmt
|= (PAGEFMT_SPARE_36
<< PAGEFMT_SPARE_SHIFT
);
372 fmt
|= (PAGEFMT_SPARE_40
<< PAGEFMT_SPARE_SHIFT
);
375 fmt
|= (PAGEFMT_SPARE_44
<< PAGEFMT_SPARE_SHIFT
);
378 fmt
|= (PAGEFMT_SPARE_48
<< PAGEFMT_SPARE_SHIFT
);
381 fmt
|= (PAGEFMT_SPARE_49
<< PAGEFMT_SPARE_SHIFT
);
384 fmt
|= (PAGEFMT_SPARE_50
<< PAGEFMT_SPARE_SHIFT
);
387 fmt
|= (PAGEFMT_SPARE_51
<< PAGEFMT_SPARE_SHIFT
);
390 fmt
|= (PAGEFMT_SPARE_52
<< PAGEFMT_SPARE_SHIFT
);
393 fmt
|= (PAGEFMT_SPARE_62
<< PAGEFMT_SPARE_SHIFT
);
396 fmt
|= (PAGEFMT_SPARE_63
<< PAGEFMT_SPARE_SHIFT
);
399 fmt
|= (PAGEFMT_SPARE_64
<< PAGEFMT_SPARE_SHIFT
);
402 dev_err(nfc
->dev
, "invalid spare per sector %d\n", spare
);
406 fmt
|= mtk_nand
->fdm
.reg_size
<< PAGEFMT_FDM_SHIFT
;
407 fmt
|= mtk_nand
->fdm
.ecc_size
<< PAGEFMT_FDM_ECC_SHIFT
;
408 nfi_writew(nfc
, fmt
, NFI_PAGEFMT
);
410 nfc
->ecc_cfg
.strength
= chip
->ecc
.strength
;
411 nfc
->ecc_cfg
.len
= chip
->ecc
.size
+ mtk_nand
->fdm
.ecc_size
;
416 static void mtk_nfc_select_chip(struct mtd_info
*mtd
, int chip
)
418 struct nand_chip
*nand
= mtd_to_nand(mtd
);
419 struct mtk_nfc
*nfc
= nand_get_controller_data(nand
);
420 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(nand
);
425 mtk_nfc_hw_runtime_config(mtd
);
427 nfi_writel(nfc
, mtk_nand
->sels
[chip
], NFI_CSEL
);
430 static int mtk_nfc_dev_ready(struct mtd_info
*mtd
)
432 struct mtk_nfc
*nfc
= nand_get_controller_data(mtd_to_nand(mtd
));
434 if (nfi_readl(nfc
, NFI_STA
) & STA_BUSY
)
440 static void mtk_nfc_cmd_ctrl(struct mtd_info
*mtd
, int dat
, unsigned int ctrl
)
442 struct mtk_nfc
*nfc
= nand_get_controller_data(mtd_to_nand(mtd
));
444 if (ctrl
& NAND_ALE
) {
445 mtk_nfc_send_address(nfc
, dat
);
446 } else if (ctrl
& NAND_CLE
) {
447 mtk_nfc_hw_reset(nfc
);
449 nfi_writew(nfc
, CNFG_OP_CUST
, NFI_CNFG
);
450 mtk_nfc_send_command(nfc
, dat
);
454 static inline void mtk_nfc_wait_ioready(struct mtk_nfc
*nfc
)
459 rc
= readb_poll_timeout_atomic(nfc
->regs
+ NFI_PIO_DIRDY
, val
,
460 val
& PIO_DI_RDY
, 10, MTK_TIMEOUT
);
462 dev_err(nfc
->dev
, "data not ready\n");
465 static inline u8
mtk_nfc_read_byte(struct mtd_info
*mtd
)
467 struct nand_chip
*chip
= mtd_to_nand(mtd
);
468 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
471 /* after each byte read, the NFI_STA reg is reset by the hardware */
472 reg
= nfi_readl(nfc
, NFI_STA
) & NFI_FSM_MASK
;
473 if (reg
!= NFI_FSM_CUSTDATA
) {
474 reg
= nfi_readw(nfc
, NFI_CNFG
);
475 reg
|= CNFG_BYTE_RW
| CNFG_READ_EN
;
476 nfi_writew(nfc
, reg
, NFI_CNFG
);
479 * set to max sector to allow the HW to continue reading over
482 reg
= (MTK_MAX_SECTOR
<< CON_SEC_SHIFT
) | CON_BRD
;
483 nfi_writel(nfc
, reg
, NFI_CON
);
485 /* trigger to fetch data */
486 nfi_writew(nfc
, STAR_EN
, NFI_STRDATA
);
489 mtk_nfc_wait_ioready(nfc
);
491 return nfi_readb(nfc
, NFI_DATAR
);
494 static void mtk_nfc_read_buf(struct mtd_info
*mtd
, u8
*buf
, int len
)
498 for (i
= 0; i
< len
; i
++)
499 buf
[i
] = mtk_nfc_read_byte(mtd
);
502 static void mtk_nfc_write_byte(struct mtd_info
*mtd
, u8 byte
)
504 struct mtk_nfc
*nfc
= nand_get_controller_data(mtd_to_nand(mtd
));
507 reg
= nfi_readl(nfc
, NFI_STA
) & NFI_FSM_MASK
;
509 if (reg
!= NFI_FSM_CUSTDATA
) {
510 reg
= nfi_readw(nfc
, NFI_CNFG
) | CNFG_BYTE_RW
;
511 nfi_writew(nfc
, reg
, NFI_CNFG
);
513 reg
= MTK_MAX_SECTOR
<< CON_SEC_SHIFT
| CON_BWR
;
514 nfi_writel(nfc
, reg
, NFI_CON
);
516 nfi_writew(nfc
, STAR_EN
, NFI_STRDATA
);
519 mtk_nfc_wait_ioready(nfc
);
520 nfi_writeb(nfc
, byte
, NFI_DATAW
);
523 static void mtk_nfc_write_buf(struct mtd_info
*mtd
, const u8
*buf
, int len
)
527 for (i
= 0; i
< len
; i
++)
528 mtk_nfc_write_byte(mtd
, buf
[i
]);
531 static int mtk_nfc_sector_encode(struct nand_chip
*chip
, u8
*data
)
533 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
534 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
535 int size
= chip
->ecc
.size
+ mtk_nand
->fdm
.reg_size
;
537 nfc
->ecc_cfg
.mode
= ECC_DMA_MODE
;
538 nfc
->ecc_cfg
.op
= ECC_ENCODE
;
540 return mtk_ecc_encode(nfc
->ecc
, &nfc
->ecc_cfg
, data
, size
);
543 static void mtk_nfc_no_bad_mark_swap(struct mtd_info
*a
, u8
*b
, int c
)
548 static void mtk_nfc_bad_mark_swap(struct mtd_info
*mtd
, u8
*buf
, int raw
)
550 struct nand_chip
*chip
= mtd_to_nand(mtd
);
551 struct mtk_nfc_nand_chip
*nand
= to_mtk_nand(chip
);
552 u32 bad_pos
= nand
->bad_mark
.pos
;
555 bad_pos
+= nand
->bad_mark
.sec
* mtk_data_len(chip
);
557 bad_pos
+= nand
->bad_mark
.sec
* chip
->ecc
.size
;
559 swap(chip
->oob_poi
[0], buf
[bad_pos
]);
562 static int mtk_nfc_format_subpage(struct mtd_info
*mtd
, u32 offset
,
563 u32 len
, const u8
*buf
)
565 struct nand_chip
*chip
= mtd_to_nand(mtd
);
566 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
567 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
568 struct mtk_nfc_fdm
*fdm
= &mtk_nand
->fdm
;
572 start
= offset
/ chip
->ecc
.size
;
573 end
= DIV_ROUND_UP(offset
+ len
, chip
->ecc
.size
);
575 memset(nfc
->buffer
, 0xff, mtd
->writesize
+ mtd
->oobsize
);
576 for (i
= 0; i
< chip
->ecc
.steps
; i
++) {
577 memcpy(mtk_data_ptr(chip
, i
), data_ptr(chip
, buf
, i
),
580 if (start
> i
|| i
>= end
)
583 if (i
== mtk_nand
->bad_mark
.sec
)
584 mtk_nand
->bad_mark
.bm_swap(mtd
, nfc
->buffer
, 1);
586 memcpy(mtk_oob_ptr(chip
, i
), oob_ptr(chip
, i
), fdm
->reg_size
);
588 /* program the CRC back to the OOB */
589 ret
= mtk_nfc_sector_encode(chip
, mtk_data_ptr(chip
, i
));
597 static void mtk_nfc_format_page(struct mtd_info
*mtd
, const u8
*buf
)
599 struct nand_chip
*chip
= mtd_to_nand(mtd
);
600 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
601 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
602 struct mtk_nfc_fdm
*fdm
= &mtk_nand
->fdm
;
605 memset(nfc
->buffer
, 0xff, mtd
->writesize
+ mtd
->oobsize
);
606 for (i
= 0; i
< chip
->ecc
.steps
; i
++) {
608 memcpy(mtk_data_ptr(chip
, i
), data_ptr(chip
, buf
, i
),
611 if (i
== mtk_nand
->bad_mark
.sec
)
612 mtk_nand
->bad_mark
.bm_swap(mtd
, nfc
->buffer
, 1);
614 memcpy(mtk_oob_ptr(chip
, i
), oob_ptr(chip
, i
), fdm
->reg_size
);
618 static inline void mtk_nfc_read_fdm(struct nand_chip
*chip
, u32 start
,
621 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
622 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
623 struct mtk_nfc_fdm
*fdm
= &mtk_nand
->fdm
;
628 for (i
= 0; i
< sectors
; i
++) {
629 oobptr
= oob_ptr(chip
, start
+ i
);
630 vall
= nfi_readl(nfc
, NFI_FDML(i
));
631 valm
= nfi_readl(nfc
, NFI_FDMM(i
));
633 for (j
= 0; j
< fdm
->reg_size
; j
++)
634 oobptr
[j
] = (j
>= 4 ? valm
: vall
) >> ((j
% 4) * 8);
638 static inline void mtk_nfc_write_fdm(struct nand_chip
*chip
)
640 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
641 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
642 struct mtk_nfc_fdm
*fdm
= &mtk_nand
->fdm
;
647 for (i
= 0; i
< chip
->ecc
.steps
; i
++) {
648 oobptr
= oob_ptr(chip
, i
);
651 for (j
= 0; j
< 8; j
++) {
653 vall
|= (j
< fdm
->reg_size
? oobptr
[j
] : 0xff)
656 valm
|= (j
< fdm
->reg_size
? oobptr
[j
] : 0xff)
659 nfi_writel(nfc
, vall
, NFI_FDML(i
));
660 nfi_writel(nfc
, valm
, NFI_FDMM(i
));
664 static int mtk_nfc_do_write_page(struct mtd_info
*mtd
, struct nand_chip
*chip
,
665 const u8
*buf
, int page
, int len
)
667 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
668 struct device
*dev
= nfc
->dev
;
673 addr
= dma_map_single(dev
, (void *)buf
, len
, DMA_TO_DEVICE
);
674 ret
= dma_mapping_error(nfc
->dev
, addr
);
676 dev_err(nfc
->dev
, "dma mapping error\n");
680 reg
= nfi_readw(nfc
, NFI_CNFG
) | CNFG_AHB
| CNFG_DMA_BURST_EN
;
681 nfi_writew(nfc
, reg
, NFI_CNFG
);
683 nfi_writel(nfc
, chip
->ecc
.steps
<< CON_SEC_SHIFT
, NFI_CON
);
684 nfi_writel(nfc
, lower_32_bits(addr
), NFI_STRADDR
);
685 nfi_writew(nfc
, INTR_AHB_DONE_EN
, NFI_INTR_EN
);
687 init_completion(&nfc
->done
);
689 reg
= nfi_readl(nfc
, NFI_CON
) | CON_BWR
;
690 nfi_writel(nfc
, reg
, NFI_CON
);
691 nfi_writew(nfc
, STAR_EN
, NFI_STRDATA
);
693 ret
= wait_for_completion_timeout(&nfc
->done
, msecs_to_jiffies(500));
695 dev_err(dev
, "program ahb done timeout\n");
696 nfi_writew(nfc
, 0, NFI_INTR_EN
);
701 ret
= readl_poll_timeout_atomic(nfc
->regs
+ NFI_ADDRCNTR
, reg
,
702 (reg
& CNTR_MASK
) >= chip
->ecc
.steps
,
705 dev_err(dev
, "hwecc write timeout\n");
709 dma_unmap_single(nfc
->dev
, addr
, len
, DMA_TO_DEVICE
);
710 nfi_writel(nfc
, 0, NFI_CON
);
715 static int mtk_nfc_write_page(struct mtd_info
*mtd
, struct nand_chip
*chip
,
716 const u8
*buf
, int page
, int raw
)
718 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
719 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
726 /* OOB => FDM: from register, ECC: from HW */
727 reg
= nfi_readw(nfc
, NFI_CNFG
) | CNFG_AUTO_FMT_EN
;
728 nfi_writew(nfc
, reg
| CNFG_HW_ECC_EN
, NFI_CNFG
);
730 nfc
->ecc_cfg
.op
= ECC_ENCODE
;
731 nfc
->ecc_cfg
.mode
= ECC_NFI_MODE
;
732 ret
= mtk_ecc_enable(nfc
->ecc
, &nfc
->ecc_cfg
);
734 /* clear NFI config */
735 reg
= nfi_readw(nfc
, NFI_CNFG
);
736 reg
&= ~(CNFG_AUTO_FMT_EN
| CNFG_HW_ECC_EN
);
737 nfi_writew(nfc
, reg
, NFI_CNFG
);
742 memcpy(nfc
->buffer
, buf
, mtd
->writesize
);
743 mtk_nand
->bad_mark
.bm_swap(mtd
, nfc
->buffer
, raw
);
744 bufpoi
= nfc
->buffer
;
746 /* write OOB into the FDM registers (OOB area in MTK NAND) */
747 mtk_nfc_write_fdm(chip
);
752 len
= mtd
->writesize
+ (raw
? mtd
->oobsize
: 0);
753 ret
= mtk_nfc_do_write_page(mtd
, chip
, bufpoi
, page
, len
);
756 mtk_ecc_disable(nfc
->ecc
);
761 static int mtk_nfc_write_page_hwecc(struct mtd_info
*mtd
,
762 struct nand_chip
*chip
, const u8
*buf
,
763 int oob_on
, int page
)
765 return mtk_nfc_write_page(mtd
, chip
, buf
, page
, 0);
768 static int mtk_nfc_write_page_raw(struct mtd_info
*mtd
, struct nand_chip
*chip
,
769 const u8
*buf
, int oob_on
, int pg
)
771 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
773 mtk_nfc_format_page(mtd
, buf
);
774 return mtk_nfc_write_page(mtd
, chip
, nfc
->buffer
, pg
, 1);
777 static int mtk_nfc_write_subpage_hwecc(struct mtd_info
*mtd
,
778 struct nand_chip
*chip
, u32 offset
,
779 u32 data_len
, const u8
*buf
,
780 int oob_on
, int page
)
782 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
785 ret
= mtk_nfc_format_subpage(mtd
, offset
, data_len
, buf
);
789 /* use the data in the private buffer (now with FDM and CRC) */
790 return mtk_nfc_write_page(mtd
, chip
, nfc
->buffer
, page
, 1);
793 static int mtk_nfc_write_oob_std(struct mtd_info
*mtd
, struct nand_chip
*chip
,
798 chip
->cmdfunc(mtd
, NAND_CMD_SEQIN
, 0x00, page
);
800 ret
= mtk_nfc_write_page_raw(mtd
, chip
, NULL
, 1, page
);
804 chip
->cmdfunc(mtd
, NAND_CMD_PAGEPROG
, -1, -1);
805 ret
= chip
->waitfunc(mtd
, chip
);
807 return ret
& NAND_STATUS_FAIL
? -EIO
: 0;
810 static int mtk_nfc_update_ecc_stats(struct mtd_info
*mtd
, u8
*buf
, u32 sectors
)
812 struct nand_chip
*chip
= mtd_to_nand(mtd
);
813 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
814 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
815 struct mtk_ecc_stats stats
;
818 rc
= nfi_readl(nfc
, NFI_STA
) & STA_EMP_PAGE
;
820 memset(buf
, 0xff, sectors
* chip
->ecc
.size
);
821 for (i
= 0; i
< sectors
; i
++)
822 memset(oob_ptr(chip
, i
), 0xff, mtk_nand
->fdm
.reg_size
);
826 mtk_ecc_get_stats(nfc
->ecc
, &stats
, sectors
);
827 mtd
->ecc_stats
.corrected
+= stats
.corrected
;
828 mtd
->ecc_stats
.failed
+= stats
.failed
;
830 return stats
.bitflips
;
833 static int mtk_nfc_read_subpage(struct mtd_info
*mtd
, struct nand_chip
*chip
,
834 u32 data_offs
, u32 readlen
,
835 u8
*bufpoi
, int page
, int raw
)
837 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
838 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
839 u32 spare
= mtk_nand
->spare_per_sector
;
840 u32 column
, sectors
, start
, end
, reg
;
847 start
= data_offs
/ chip
->ecc
.size
;
848 end
= DIV_ROUND_UP(data_offs
+ readlen
, chip
->ecc
.size
);
850 sectors
= end
- start
;
851 column
= start
* (chip
->ecc
.size
+ spare
);
853 len
= sectors
* chip
->ecc
.size
+ (raw
? sectors
* spare
: 0);
854 buf
= bufpoi
+ start
* chip
->ecc
.size
;
857 chip
->cmdfunc(mtd
, NAND_CMD_RNDOUT
, column
, -1);
859 addr
= dma_map_single(nfc
->dev
, buf
, len
, DMA_FROM_DEVICE
);
860 rc
= dma_mapping_error(nfc
->dev
, addr
);
862 dev_err(nfc
->dev
, "dma mapping error\n");
867 reg
= nfi_readw(nfc
, NFI_CNFG
);
868 reg
|= CNFG_READ_EN
| CNFG_DMA_BURST_EN
| CNFG_AHB
;
870 reg
|= CNFG_AUTO_FMT_EN
| CNFG_HW_ECC_EN
;
871 nfi_writew(nfc
, reg
, NFI_CNFG
);
873 nfc
->ecc_cfg
.mode
= ECC_NFI_MODE
;
874 nfc
->ecc_cfg
.sectors
= sectors
;
875 nfc
->ecc_cfg
.op
= ECC_DECODE
;
876 rc
= mtk_ecc_enable(nfc
->ecc
, &nfc
->ecc_cfg
);
878 dev_err(nfc
->dev
, "ecc enable\n");
880 reg
&= ~(CNFG_DMA_BURST_EN
| CNFG_AHB
| CNFG_READ_EN
|
881 CNFG_AUTO_FMT_EN
| CNFG_HW_ECC_EN
);
882 nfi_writew(nfc
, reg
, NFI_CNFG
);
883 dma_unmap_single(nfc
->dev
, addr
, len
, DMA_FROM_DEVICE
);
888 nfi_writew(nfc
, reg
, NFI_CNFG
);
891 nfi_writel(nfc
, sectors
<< CON_SEC_SHIFT
, NFI_CON
);
892 nfi_writew(nfc
, INTR_AHB_DONE_EN
, NFI_INTR_EN
);
893 nfi_writel(nfc
, lower_32_bits(addr
), NFI_STRADDR
);
895 init_completion(&nfc
->done
);
896 reg
= nfi_readl(nfc
, NFI_CON
) | CON_BRD
;
897 nfi_writel(nfc
, reg
, NFI_CON
);
898 nfi_writew(nfc
, STAR_EN
, NFI_STRDATA
);
900 rc
= wait_for_completion_timeout(&nfc
->done
, msecs_to_jiffies(500));
902 dev_warn(nfc
->dev
, "read ahb/dma done timeout\n");
904 rc
= readl_poll_timeout_atomic(nfc
->regs
+ NFI_BYTELEN
, reg
,
905 (reg
& CNTR_MASK
) >= sectors
, 10,
908 dev_err(nfc
->dev
, "subpage done timeout\n");
913 rc
= mtk_ecc_wait_done(nfc
->ecc
, ECC_DECODE
);
914 bitflips
= rc
< 0 ? -ETIMEDOUT
:
915 mtk_nfc_update_ecc_stats(mtd
, buf
, sectors
);
916 mtk_nfc_read_fdm(chip
, start
, sectors
);
920 dma_unmap_single(nfc
->dev
, addr
, len
, DMA_FROM_DEVICE
);
925 mtk_ecc_disable(nfc
->ecc
);
927 if (clamp(mtk_nand
->bad_mark
.sec
, start
, end
) == mtk_nand
->bad_mark
.sec
)
928 mtk_nand
->bad_mark
.bm_swap(mtd
, bufpoi
, raw
);
930 nfi_writel(nfc
, 0, NFI_CON
);
935 static int mtk_nfc_read_subpage_hwecc(struct mtd_info
*mtd
,
936 struct nand_chip
*chip
, u32 off
,
937 u32 len
, u8
*p
, int pg
)
939 return mtk_nfc_read_subpage(mtd
, chip
, off
, len
, p
, pg
, 0);
942 static int mtk_nfc_read_page_hwecc(struct mtd_info
*mtd
,
943 struct nand_chip
*chip
, u8
*p
,
946 return mtk_nfc_read_subpage(mtd
, chip
, 0, mtd
->writesize
, p
, pg
, 0);
949 static int mtk_nfc_read_page_raw(struct mtd_info
*mtd
, struct nand_chip
*chip
,
950 u8
*buf
, int oob_on
, int page
)
952 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
953 struct mtk_nfc
*nfc
= nand_get_controller_data(chip
);
954 struct mtk_nfc_fdm
*fdm
= &mtk_nand
->fdm
;
957 memset(nfc
->buffer
, 0xff, mtd
->writesize
+ mtd
->oobsize
);
958 ret
= mtk_nfc_read_subpage(mtd
, chip
, 0, mtd
->writesize
, nfc
->buffer
,
963 for (i
= 0; i
< chip
->ecc
.steps
; i
++) {
964 memcpy(oob_ptr(chip
, i
), mtk_oob_ptr(chip
, i
), fdm
->reg_size
);
966 if (i
== mtk_nand
->bad_mark
.sec
)
967 mtk_nand
->bad_mark
.bm_swap(mtd
, nfc
->buffer
, 1);
970 memcpy(data_ptr(chip
, buf
, i
), mtk_data_ptr(chip
, i
),
977 static int mtk_nfc_read_oob_std(struct mtd_info
*mtd
, struct nand_chip
*chip
,
980 chip
->cmdfunc(mtd
, NAND_CMD_READ0
, 0, page
);
982 return mtk_nfc_read_page_raw(mtd
, chip
, NULL
, 1, page
);
985 static inline void mtk_nfc_hw_init(struct mtk_nfc
*nfc
)
988 * ACCON: access timing control register
989 * -------------------------------------
990 * 31:28: minimum required time for CS post pulling down after accessing
992 * 27:22: minimum required time for CS pre pulling down before accessing
994 * 21:16: minimum required time from NCEB low to NREB low
995 * 15:12: minimum required time from NWEB high to NREB low.
996 * 11:08: write enable hold time
997 * 07:04: write wait states
998 * 03:00: read wait states
1000 nfi_writel(nfc
, 0x10804211, NFI_ACCCON
);
1003 * CNRNB: nand ready/busy register
1004 * -------------------------------
1005 * 7:4: timeout register for polling the NAND busy/ready signal
1006 * 0 : poll the status of the busy/ready signal after [7:4]*16 cycles.
1008 nfi_writew(nfc
, 0xf1, NFI_CNRNB
);
1009 nfi_writew(nfc
, PAGEFMT_8K_16K
, NFI_PAGEFMT
);
1011 mtk_nfc_hw_reset(nfc
);
1013 nfi_readl(nfc
, NFI_INTR_STA
);
1014 nfi_writel(nfc
, 0, NFI_INTR_EN
);
1017 static irqreturn_t
mtk_nfc_irq(int irq
, void *id
)
1019 struct mtk_nfc
*nfc
= id
;
1022 sta
= nfi_readw(nfc
, NFI_INTR_STA
);
1023 ien
= nfi_readw(nfc
, NFI_INTR_EN
);
1028 nfi_writew(nfc
, ~sta
& ien
, NFI_INTR_EN
);
1029 complete(&nfc
->done
);
1034 static int mtk_nfc_enable_clk(struct device
*dev
, struct mtk_nfc_clk
*clk
)
1038 ret
= clk_prepare_enable(clk
->nfi_clk
);
1040 dev_err(dev
, "failed to enable nfi clk\n");
1044 ret
= clk_prepare_enable(clk
->pad_clk
);
1046 dev_err(dev
, "failed to enable pad clk\n");
1047 clk_disable_unprepare(clk
->nfi_clk
);
1054 static void mtk_nfc_disable_clk(struct mtk_nfc_clk
*clk
)
1056 clk_disable_unprepare(clk
->nfi_clk
);
1057 clk_disable_unprepare(clk
->pad_clk
);
1060 static int mtk_nfc_ooblayout_free(struct mtd_info
*mtd
, int section
,
1061 struct mtd_oob_region
*oob_region
)
1063 struct nand_chip
*chip
= mtd_to_nand(mtd
);
1064 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
1065 struct mtk_nfc_fdm
*fdm
= &mtk_nand
->fdm
;
1068 eccsteps
= mtd
->writesize
/ chip
->ecc
.size
;
1070 if (section
>= eccsteps
)
1073 oob_region
->length
= fdm
->reg_size
- fdm
->ecc_size
;
1074 oob_region
->offset
= section
* fdm
->reg_size
+ fdm
->ecc_size
;
1079 static int mtk_nfc_ooblayout_ecc(struct mtd_info
*mtd
, int section
,
1080 struct mtd_oob_region
*oob_region
)
1082 struct nand_chip
*chip
= mtd_to_nand(mtd
);
1083 struct mtk_nfc_nand_chip
*mtk_nand
= to_mtk_nand(chip
);
1089 eccsteps
= mtd
->writesize
/ chip
->ecc
.size
;
1090 oob_region
->offset
= mtk_nand
->fdm
.reg_size
* eccsteps
;
1091 oob_region
->length
= mtd
->oobsize
- oob_region
->offset
;
1096 static const struct mtd_ooblayout_ops mtk_nfc_ooblayout_ops
= {
1097 .free
= mtk_nfc_ooblayout_free
,
1098 .ecc
= mtk_nfc_ooblayout_ecc
,
1101 static void mtk_nfc_set_fdm(struct mtk_nfc_fdm
*fdm
, struct mtd_info
*mtd
)
1103 struct nand_chip
*nand
= mtd_to_nand(mtd
);
1104 struct mtk_nfc_nand_chip
*chip
= to_mtk_nand(nand
);
1107 ecc_bytes
= DIV_ROUND_UP(nand
->ecc
.strength
* ECC_PARITY_BITS
, 8);
1109 fdm
->reg_size
= chip
->spare_per_sector
- ecc_bytes
;
1110 if (fdm
->reg_size
> NFI_FDM_MAX_SIZE
)
1111 fdm
->reg_size
= NFI_FDM_MAX_SIZE
;
1113 /* bad block mark storage */
1117 static void mtk_nfc_set_bad_mark_ctl(struct mtk_nfc_bad_mark_ctl
*bm_ctl
,
1118 struct mtd_info
*mtd
)
1120 struct nand_chip
*nand
= mtd_to_nand(mtd
);
1122 if (mtd
->writesize
== 512) {
1123 bm_ctl
->bm_swap
= mtk_nfc_no_bad_mark_swap
;
1125 bm_ctl
->bm_swap
= mtk_nfc_bad_mark_swap
;
1126 bm_ctl
->sec
= mtd
->writesize
/ mtk_data_len(nand
);
1127 bm_ctl
->pos
= mtd
->writesize
% mtk_data_len(nand
);
1131 static void mtk_nfc_set_spare_per_sector(u32
*sps
, struct mtd_info
*mtd
)
1133 struct nand_chip
*nand
= mtd_to_nand(mtd
);
1134 u32 spare
[] = {16, 26, 27, 28, 32, 36, 40, 44,
1135 48, 49, 50, 51, 52, 62, 63, 64};
1138 eccsteps
= mtd
->writesize
/ nand
->ecc
.size
;
1139 *sps
= mtd
->oobsize
/ eccsteps
;
1141 if (nand
->ecc
.size
== 1024)
1144 for (i
= 0; i
< ARRAY_SIZE(spare
); i
++) {
1145 if (*sps
<= spare
[i
]) {
1148 else if (*sps
!= spare
[i
])
1149 *sps
= spare
[i
- 1];
1154 if (i
>= ARRAY_SIZE(spare
))
1155 *sps
= spare
[ARRAY_SIZE(spare
) - 1];
1157 if (nand
->ecc
.size
== 1024)
1161 static int mtk_nfc_ecc_init(struct device
*dev
, struct mtd_info
*mtd
)
1163 struct nand_chip
*nand
= mtd_to_nand(mtd
);
1167 /* support only ecc hw mode */
1168 if (nand
->ecc
.mode
!= NAND_ECC_HW
) {
1169 dev_err(dev
, "ecc.mode not supported\n");
1173 /* if optional dt settings not present */
1174 if (!nand
->ecc
.size
|| !nand
->ecc
.strength
) {
1175 /* use datasheet requirements */
1176 nand
->ecc
.strength
= nand
->ecc_strength_ds
;
1177 nand
->ecc
.size
= nand
->ecc_step_ds
;
1180 * align eccstrength and eccsize
1181 * this controller only supports 512 and 1024 sizes
1183 if (nand
->ecc
.size
< 1024) {
1184 if (mtd
->writesize
> 512) {
1185 nand
->ecc
.size
= 1024;
1186 nand
->ecc
.strength
<<= 1;
1188 nand
->ecc
.size
= 512;
1191 nand
->ecc
.size
= 1024;
1194 mtk_nfc_set_spare_per_sector(&spare
, mtd
);
1196 /* calculate oob bytes except ecc parity data */
1197 free
= ((nand
->ecc
.strength
* ECC_PARITY_BITS
) + 7) >> 3;
1198 free
= spare
- free
;
1201 * enhance ecc strength if oob left is bigger than max FDM size
1202 * or reduce ecc strength if oob size is not enough for ecc
1205 if (free
> NFI_FDM_MAX_SIZE
) {
1206 spare
-= NFI_FDM_MAX_SIZE
;
1207 nand
->ecc
.strength
= (spare
<< 3) / ECC_PARITY_BITS
;
1208 } else if (free
< 0) {
1209 spare
-= NFI_FDM_MIN_SIZE
;
1210 nand
->ecc
.strength
= (spare
<< 3) / ECC_PARITY_BITS
;
1214 mtk_ecc_adjust_strength(&nand
->ecc
.strength
);
1216 dev_info(dev
, "eccsize %d eccstrength %d\n",
1217 nand
->ecc
.size
, nand
->ecc
.strength
);
1222 static int mtk_nfc_nand_chip_init(struct device
*dev
, struct mtk_nfc
*nfc
,
1223 struct device_node
*np
)
1225 struct mtk_nfc_nand_chip
*chip
;
1226 struct nand_chip
*nand
;
1227 struct mtd_info
*mtd
;
1233 if (!of_get_property(np
, "reg", &nsels
))
1236 nsels
/= sizeof(u32
);
1237 if (!nsels
|| nsels
> MTK_NAND_MAX_NSELS
) {
1238 dev_err(dev
, "invalid reg property size %d\n", nsels
);
1242 chip
= devm_kzalloc(dev
, sizeof(*chip
) + nsels
* sizeof(u8
),
1247 chip
->nsels
= nsels
;
1248 for (i
= 0; i
< nsels
; i
++) {
1249 ret
= of_property_read_u32_index(np
, "reg", i
, &tmp
);
1251 dev_err(dev
, "reg property failure : %d\n", ret
);
1254 chip
->sels
[i
] = tmp
;
1258 nand
->controller
= &nfc
->controller
;
1260 nand_set_flash_node(nand
, np
);
1261 nand_set_controller_data(nand
, nfc
);
1263 nand
->options
|= NAND_USE_BOUNCE_BUFFER
| NAND_SUBPAGE_READ
;
1264 nand
->dev_ready
= mtk_nfc_dev_ready
;
1265 nand
->select_chip
= mtk_nfc_select_chip
;
1266 nand
->write_byte
= mtk_nfc_write_byte
;
1267 nand
->write_buf
= mtk_nfc_write_buf
;
1268 nand
->read_byte
= mtk_nfc_read_byte
;
1269 nand
->read_buf
= mtk_nfc_read_buf
;
1270 nand
->cmd_ctrl
= mtk_nfc_cmd_ctrl
;
1272 /* set default mode in case dt entry is missing */
1273 nand
->ecc
.mode
= NAND_ECC_HW
;
1275 nand
->ecc
.write_subpage
= mtk_nfc_write_subpage_hwecc
;
1276 nand
->ecc
.write_page_raw
= mtk_nfc_write_page_raw
;
1277 nand
->ecc
.write_page
= mtk_nfc_write_page_hwecc
;
1278 nand
->ecc
.write_oob_raw
= mtk_nfc_write_oob_std
;
1279 nand
->ecc
.write_oob
= mtk_nfc_write_oob_std
;
1281 nand
->ecc
.read_subpage
= mtk_nfc_read_subpage_hwecc
;
1282 nand
->ecc
.read_page_raw
= mtk_nfc_read_page_raw
;
1283 nand
->ecc
.read_page
= mtk_nfc_read_page_hwecc
;
1284 nand
->ecc
.read_oob_raw
= mtk_nfc_read_oob_std
;
1285 nand
->ecc
.read_oob
= mtk_nfc_read_oob_std
;
1287 mtd
= nand_to_mtd(nand
);
1288 mtd
->owner
= THIS_MODULE
;
1289 mtd
->dev
.parent
= dev
;
1290 mtd
->name
= MTK_NAME
;
1291 mtd_set_ooblayout(mtd
, &mtk_nfc_ooblayout_ops
);
1293 mtk_nfc_hw_init(nfc
);
1295 ret
= nand_scan_ident(mtd
, nsels
, NULL
);
1299 /* store bbt magic in page, cause OOB is not protected */
1300 if (nand
->bbt_options
& NAND_BBT_USE_FLASH
)
1301 nand
->bbt_options
|= NAND_BBT_NO_OOB
;
1303 ret
= mtk_nfc_ecc_init(dev
, mtd
);
1307 if (nand
->options
& NAND_BUSWIDTH_16
) {
1308 dev_err(dev
, "16bits buswidth not supported");
1312 mtk_nfc_set_spare_per_sector(&chip
->spare_per_sector
, mtd
);
1313 mtk_nfc_set_fdm(&chip
->fdm
, mtd
);
1314 mtk_nfc_set_bad_mark_ctl(&chip
->bad_mark
, mtd
);
1316 len
= mtd
->writesize
+ mtd
->oobsize
;
1317 nfc
->buffer
= devm_kzalloc(dev
, len
, GFP_KERNEL
);
1321 ret
= nand_scan_tail(mtd
);
1325 ret
= mtd_device_parse_register(mtd
, NULL
, NULL
, NULL
, 0);
1327 dev_err(dev
, "mtd parse partition error\n");
1332 list_add_tail(&chip
->node
, &nfc
->chips
);
1337 static int mtk_nfc_nand_chips_init(struct device
*dev
, struct mtk_nfc
*nfc
)
1339 struct device_node
*np
= dev
->of_node
;
1340 struct device_node
*nand_np
;
1343 for_each_child_of_node(np
, nand_np
) {
1344 ret
= mtk_nfc_nand_chip_init(dev
, nfc
, nand_np
);
1346 of_node_put(nand_np
);
1354 static int mtk_nfc_probe(struct platform_device
*pdev
)
1356 struct device
*dev
= &pdev
->dev
;
1357 struct device_node
*np
= dev
->of_node
;
1358 struct mtk_nfc
*nfc
;
1359 struct resource
*res
;
1362 nfc
= devm_kzalloc(dev
, sizeof(*nfc
), GFP_KERNEL
);
1366 spin_lock_init(&nfc
->controller
.lock
);
1367 init_waitqueue_head(&nfc
->controller
.wq
);
1368 INIT_LIST_HEAD(&nfc
->chips
);
1370 /* probe defer if not ready */
1371 nfc
->ecc
= of_mtk_ecc_get(np
);
1372 if (IS_ERR(nfc
->ecc
))
1373 return PTR_ERR(nfc
->ecc
);
1379 res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
1380 nfc
->regs
= devm_ioremap_resource(dev
, res
);
1381 if (IS_ERR(nfc
->regs
)) {
1382 ret
= PTR_ERR(nfc
->regs
);
1383 dev_err(dev
, "no nfi base\n");
1387 nfc
->clk
.nfi_clk
= devm_clk_get(dev
, "nfi_clk");
1388 if (IS_ERR(nfc
->clk
.nfi_clk
)) {
1389 dev_err(dev
, "no clk\n");
1390 ret
= PTR_ERR(nfc
->clk
.nfi_clk
);
1394 nfc
->clk
.pad_clk
= devm_clk_get(dev
, "pad_clk");
1395 if (IS_ERR(nfc
->clk
.pad_clk
)) {
1396 dev_err(dev
, "no pad clk\n");
1397 ret
= PTR_ERR(nfc
->clk
.pad_clk
);
1401 ret
= mtk_nfc_enable_clk(dev
, &nfc
->clk
);
1405 irq
= platform_get_irq(pdev
, 0);
1407 dev_err(dev
, "no nfi irq resource\n");
1412 ret
= devm_request_irq(dev
, irq
, mtk_nfc_irq
, 0x0, "mtk-nand", nfc
);
1414 dev_err(dev
, "failed to request nfi irq\n");
1418 ret
= dma_set_mask(dev
, DMA_BIT_MASK(32));
1420 dev_err(dev
, "failed to set dma mask\n");
1424 platform_set_drvdata(pdev
, nfc
);
1426 ret
= mtk_nfc_nand_chips_init(dev
, nfc
);
1428 dev_err(dev
, "failed to init nand chips\n");
1435 mtk_nfc_disable_clk(&nfc
->clk
);
1438 mtk_ecc_release(nfc
->ecc
);
1443 static int mtk_nfc_remove(struct platform_device
*pdev
)
1445 struct mtk_nfc
*nfc
= platform_get_drvdata(pdev
);
1446 struct mtk_nfc_nand_chip
*chip
;
1448 while (!list_empty(&nfc
->chips
)) {
1449 chip
= list_first_entry(&nfc
->chips
, struct mtk_nfc_nand_chip
,
1451 nand_release(nand_to_mtd(&chip
->nand
));
1452 list_del(&chip
->node
);
1455 mtk_ecc_release(nfc
->ecc
);
1456 mtk_nfc_disable_clk(&nfc
->clk
);
1461 #ifdef CONFIG_PM_SLEEP
1462 static int mtk_nfc_suspend(struct device
*dev
)
1464 struct mtk_nfc
*nfc
= dev_get_drvdata(dev
);
1466 mtk_nfc_disable_clk(&nfc
->clk
);
1471 static int mtk_nfc_resume(struct device
*dev
)
1473 struct mtk_nfc
*nfc
= dev_get_drvdata(dev
);
1474 struct mtk_nfc_nand_chip
*chip
;
1475 struct nand_chip
*nand
;
1476 struct mtd_info
*mtd
;
1482 ret
= mtk_nfc_enable_clk(dev
, &nfc
->clk
);
1486 mtk_nfc_hw_init(nfc
);
1488 /* reset NAND chip if VCC was powered off */
1489 list_for_each_entry(chip
, &nfc
->chips
, node
) {
1491 mtd
= nand_to_mtd(nand
);
1492 for (i
= 0; i
< chip
->nsels
; i
++) {
1493 nand
->select_chip(mtd
, i
);
1494 nand
->cmdfunc(mtd
, NAND_CMD_RESET
, -1, -1);
1501 static SIMPLE_DEV_PM_OPS(mtk_nfc_pm_ops
, mtk_nfc_suspend
, mtk_nfc_resume
);
1504 static const struct of_device_id mtk_nfc_id_table
[] = {
1505 { .compatible
= "mediatek,mt2701-nfc" },
1508 MODULE_DEVICE_TABLE(of
, mtk_nfc_id_table
);
1510 static struct platform_driver mtk_nfc_driver
= {
1511 .probe
= mtk_nfc_probe
,
1512 .remove
= mtk_nfc_remove
,
1515 .of_match_table
= mtk_nfc_id_table
,
1516 #ifdef CONFIG_PM_SLEEP
1517 .pm
= &mtk_nfc_pm_ops
,
1522 module_platform_driver(mtk_nfc_driver
);
1524 MODULE_LICENSE("GPL");
1525 MODULE_AUTHOR("Xiaolei Li <xiaolei.li@mediatek.com>");
1526 MODULE_DESCRIPTION("MTK Nand Flash Controller Driver");