2 * drivers/mtd/nand/pxa3xx_nand.c
4 * Copyright © 2005 Intel Corporation
5 * Copyright © 2006 Marvell International Ltd.
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
12 #include <linux/kernel.h>
13 #include <linux/module.h>
14 #include <linux/interrupt.h>
15 #include <linux/platform_device.h>
16 #include <linux/dma-mapping.h>
17 #include <linux/delay.h>
18 #include <linux/clk.h>
19 #include <linux/mtd/mtd.h>
20 #include <linux/mtd/nand.h>
21 #include <linux/mtd/partitions.h>
23 #include <linux/irq.h>
26 #include <plat/pxa3xx_nand.h>
28 #define CHIP_DELAY_TIMEOUT (2 * HZ/10)
30 /* registers and bit definitions */
31 #define NDCR (0x00) /* Control register */
32 #define NDTR0CS0 (0x04) /* Timing Parameter 0 for CS0 */
33 #define NDTR1CS0 (0x0C) /* Timing Parameter 1 for CS0 */
34 #define NDSR (0x14) /* Status Register */
35 #define NDPCR (0x18) /* Page Count Register */
36 #define NDBDR0 (0x1C) /* Bad Block Register 0 */
37 #define NDBDR1 (0x20) /* Bad Block Register 1 */
38 #define NDDB (0x40) /* Data Buffer */
39 #define NDCB0 (0x48) /* Command Buffer0 */
40 #define NDCB1 (0x4C) /* Command Buffer1 */
41 #define NDCB2 (0x50) /* Command Buffer2 */
43 #define NDCR_SPARE_EN (0x1 << 31)
44 #define NDCR_ECC_EN (0x1 << 30)
45 #define NDCR_DMA_EN (0x1 << 29)
46 #define NDCR_ND_RUN (0x1 << 28)
47 #define NDCR_DWIDTH_C (0x1 << 27)
48 #define NDCR_DWIDTH_M (0x1 << 26)
49 #define NDCR_PAGE_SZ (0x1 << 24)
50 #define NDCR_NCSX (0x1 << 23)
51 #define NDCR_ND_MODE (0x3 << 21)
52 #define NDCR_NAND_MODE (0x0)
53 #define NDCR_CLR_PG_CNT (0x1 << 20)
54 #define NDCR_CLR_ECC (0x1 << 19)
55 #define NDCR_RD_ID_CNT_MASK (0x7 << 16)
56 #define NDCR_RD_ID_CNT(x) (((x) << 16) & NDCR_RD_ID_CNT_MASK)
58 #define NDCR_RA_START (0x1 << 15)
59 #define NDCR_PG_PER_BLK (0x1 << 14)
60 #define NDCR_ND_ARB_EN (0x1 << 12)
62 #define NDSR_MASK (0xfff)
63 #define NDSR_RDY (0x1 << 11)
64 #define NDSR_CS0_PAGED (0x1 << 10)
65 #define NDSR_CS1_PAGED (0x1 << 9)
66 #define NDSR_CS0_CMDD (0x1 << 8)
67 #define NDSR_CS1_CMDD (0x1 << 7)
68 #define NDSR_CS0_BBD (0x1 << 6)
69 #define NDSR_CS1_BBD (0x1 << 5)
70 #define NDSR_DBERR (0x1 << 4)
71 #define NDSR_SBERR (0x1 << 3)
72 #define NDSR_WRDREQ (0x1 << 2)
73 #define NDSR_RDDREQ (0x1 << 1)
74 #define NDSR_WRCMDREQ (0x1)
76 #define NDCB0_AUTO_RS (0x1 << 25)
77 #define NDCB0_CSEL (0x1 << 24)
78 #define NDCB0_CMD_TYPE_MASK (0x7 << 21)
79 #define NDCB0_CMD_TYPE(x) (((x) << 21) & NDCB0_CMD_TYPE_MASK)
80 #define NDCB0_NC (0x1 << 20)
81 #define NDCB0_DBC (0x1 << 19)
82 #define NDCB0_ADDR_CYC_MASK (0x7 << 16)
83 #define NDCB0_ADDR_CYC(x) (((x) << 16) & NDCB0_ADDR_CYC_MASK)
84 #define NDCB0_CMD2_MASK (0xff << 8)
85 #define NDCB0_CMD1_MASK (0xff)
86 #define NDCB0_ADDR_CYC_SHIFT (16)
88 /* macros for registers read/write */
89 #define nand_writel(info, off, val) \
90 __raw_writel((val), (info)->mmio_base + (off))
92 #define nand_readl(info, off) \
93 __raw_readl((info)->mmio_base + (off))
95 /* error code and state */
115 struct pxa3xx_nand_info
{
116 struct nand_chip nand_chip
;
118 struct platform_device
*pdev
;
119 const struct pxa3xx_nand_flash
*flash_info
;
122 void __iomem
*mmio_base
;
123 unsigned long mmio_phys
;
125 unsigned int buf_start
;
126 unsigned int buf_count
;
128 /* DMA information */
132 unsigned char *data_buff
;
133 dma_addr_t data_buff_phys
;
134 size_t data_buff_size
;
136 struct pxa_dma_desc
*data_desc
;
137 dma_addr_t data_desc_addr
;
141 /* saved column/page_addr during CMD_SEQIN */
145 /* relate to the command */
148 int use_ecc
; /* use HW ECC ? */
149 int use_dma
; /* use DMA ? */
151 size_t data_size
; /* data size in FIFO */
153 struct completion cmd_complete
;
155 /* generated NDCBx register values */
160 /* calculated from pxa3xx_nand_flash data */
162 size_t read_id_bytes
;
164 unsigned int col_addr_cycles
;
165 unsigned int row_addr_cycles
;
168 static int use_dma
= 1;
169 module_param(use_dma
, bool, 0444);
170 MODULE_PARM_DESC(use_dma
, "enable DMA for data transfering to/from NAND HW");
173 * Default NAND flash controller configuration setup by the
174 * bootloader. This configuration is used only when pdata->keep_config is set
176 static struct pxa3xx_nand_timing default_timing
;
177 static struct pxa3xx_nand_flash default_flash
;
179 static struct pxa3xx_nand_cmdset smallpage_cmdset
= {
183 .read_status
= 0x0070,
189 .lock_status
= 0x007A,
192 static struct pxa3xx_nand_cmdset largepage_cmdset
= {
196 .read_status
= 0x0070,
202 .lock_status
= 0x007A,
205 #ifdef CONFIG_MTD_NAND_PXA3xx_BUILTIN
206 static struct pxa3xx_nand_timing samsung512MbX16_timing
= {
218 static struct pxa3xx_nand_flash samsung512MbX16
= {
219 .timing
= &samsung512MbX16_timing
,
220 .cmdset
= &smallpage_cmdset
,
221 .page_per_block
= 32,
229 static struct pxa3xx_nand_flash samsung2GbX8
= {
230 .timing
= &samsung512MbX16_timing
,
231 .cmdset
= &smallpage_cmdset
,
232 .page_per_block
= 64,
240 static struct pxa3xx_nand_flash samsung32GbX8
= {
241 .timing
= &samsung512MbX16_timing
,
242 .cmdset
= &smallpage_cmdset
,
243 .page_per_block
= 128,
251 static struct pxa3xx_nand_timing micron_timing
= {
263 static struct pxa3xx_nand_flash micron1GbX8
= {
264 .timing
= µn_timing
,
265 .cmdset
= &largepage_cmdset
,
266 .page_per_block
= 64,
274 static struct pxa3xx_nand_flash micron1GbX16
= {
275 .timing
= µn_timing
,
276 .cmdset
= &largepage_cmdset
,
277 .page_per_block
= 64,
285 static struct pxa3xx_nand_flash micron4GbX8
= {
286 .timing
= µn_timing
,
287 .cmdset
= &largepage_cmdset
,
288 .page_per_block
= 64,
296 static struct pxa3xx_nand_flash micron4GbX16
= {
297 .timing
= µn_timing
,
298 .cmdset
= &largepage_cmdset
,
299 .page_per_block
= 64,
307 static struct pxa3xx_nand_timing stm2GbX16_timing
= {
319 static struct pxa3xx_nand_flash stm2GbX16
= {
320 .timing
= &stm2GbX16_timing
,
321 .cmdset
= &largepage_cmdset
,
322 .page_per_block
= 64,
330 static struct pxa3xx_nand_flash
*builtin_flash_types
[] = {
340 #endif /* CONFIG_MTD_NAND_PXA3xx_BUILTIN */
342 #define NDTR0_tCH(c) (min((c), 7) << 19)
343 #define NDTR0_tCS(c) (min((c), 7) << 16)
344 #define NDTR0_tWH(c) (min((c), 7) << 11)
345 #define NDTR0_tWP(c) (min((c), 7) << 8)
346 #define NDTR0_tRH(c) (min((c), 7) << 3)
347 #define NDTR0_tRP(c) (min((c), 7) << 0)
349 #define NDTR1_tR(c) (min((c), 65535) << 16)
350 #define NDTR1_tWHR(c) (min((c), 15) << 4)
351 #define NDTR1_tAR(c) (min((c), 15) << 0)
353 #define tCH_NDTR0(r) (((r) >> 19) & 0x7)
354 #define tCS_NDTR0(r) (((r) >> 16) & 0x7)
355 #define tWH_NDTR0(r) (((r) >> 11) & 0x7)
356 #define tWP_NDTR0(r) (((r) >> 8) & 0x7)
357 #define tRH_NDTR0(r) (((r) >> 3) & 0x7)
358 #define tRP_NDTR0(r) (((r) >> 0) & 0x7)
360 #define tR_NDTR1(r) (((r) >> 16) & 0xffff)
361 #define tWHR_NDTR1(r) (((r) >> 4) & 0xf)
362 #define tAR_NDTR1(r) (((r) >> 0) & 0xf)
364 /* convert nano-seconds to nand flash controller clock cycles */
365 #define ns2cycle(ns, clk) (int)(((ns) * (clk / 1000000) / 1000) - 1)
367 /* convert nand flash controller clock cycles to nano-seconds */
368 #define cycle2ns(c, clk) ((((c) + 1) * 1000000 + clk / 500) / (clk / 1000))
370 static void pxa3xx_nand_set_timing(struct pxa3xx_nand_info
*info
,
371 const struct pxa3xx_nand_timing
*t
)
373 unsigned long nand_clk
= clk_get_rate(info
->clk
);
374 uint32_t ndtr0
, ndtr1
;
376 ndtr0
= NDTR0_tCH(ns2cycle(t
->tCH
, nand_clk
)) |
377 NDTR0_tCS(ns2cycle(t
->tCS
, nand_clk
)) |
378 NDTR0_tWH(ns2cycle(t
->tWH
, nand_clk
)) |
379 NDTR0_tWP(ns2cycle(t
->tWP
, nand_clk
)) |
380 NDTR0_tRH(ns2cycle(t
->tRH
, nand_clk
)) |
381 NDTR0_tRP(ns2cycle(t
->tRP
, nand_clk
));
383 ndtr1
= NDTR1_tR(ns2cycle(t
->tR
, nand_clk
)) |
384 NDTR1_tWHR(ns2cycle(t
->tWHR
, nand_clk
)) |
385 NDTR1_tAR(ns2cycle(t
->tAR
, nand_clk
));
387 nand_writel(info
, NDTR0CS0
, ndtr0
);
388 nand_writel(info
, NDTR1CS0
, ndtr1
);
391 #define WAIT_EVENT_TIMEOUT 10
393 static int wait_for_event(struct pxa3xx_nand_info
*info
, uint32_t event
)
395 int timeout
= WAIT_EVENT_TIMEOUT
;
399 ndsr
= nand_readl(info
, NDSR
) & NDSR_MASK
;
401 nand_writel(info
, NDSR
, ndsr
);
410 static int prepare_read_prog_cmd(struct pxa3xx_nand_info
*info
,
411 uint16_t cmd
, int column
, int page_addr
)
413 const struct pxa3xx_nand_flash
*f
= info
->flash_info
;
414 const struct pxa3xx_nand_cmdset
*cmdset
= f
->cmdset
;
416 /* calculate data size */
417 switch (f
->page_size
) {
419 info
->data_size
= (info
->use_ecc
) ? 2088 : 2112;
422 info
->data_size
= (info
->use_ecc
) ? 520 : 528;
428 /* generate values for NDCBx registers */
429 info
->ndcb0
= cmd
| ((cmd
& 0xff00) ? NDCB0_DBC
: 0);
432 info
->ndcb0
|= NDCB0_ADDR_CYC(info
->row_addr_cycles
+ info
->col_addr_cycles
);
434 if (info
->col_addr_cycles
== 2) {
435 /* large block, 2 cycles for column address
436 * row address starts from 3rd cycle
438 info
->ndcb1
|= page_addr
<< 16;
439 if (info
->row_addr_cycles
== 3)
440 info
->ndcb2
= (page_addr
>> 16) & 0xff;
442 /* small block, 1 cycles for column address
443 * row address starts from 2nd cycle
445 info
->ndcb1
= page_addr
<< 8;
447 if (cmd
== cmdset
->program
)
448 info
->ndcb0
|= NDCB0_CMD_TYPE(1) | NDCB0_AUTO_RS
;
453 static int prepare_erase_cmd(struct pxa3xx_nand_info
*info
,
454 uint16_t cmd
, int page_addr
)
456 info
->ndcb0
= cmd
| ((cmd
& 0xff00) ? NDCB0_DBC
: 0);
457 info
->ndcb0
|= NDCB0_CMD_TYPE(2) | NDCB0_AUTO_RS
| NDCB0_ADDR_CYC(3);
458 info
->ndcb1
= page_addr
;
463 static int prepare_other_cmd(struct pxa3xx_nand_info
*info
, uint16_t cmd
)
465 const struct pxa3xx_nand_cmdset
*cmdset
= info
->flash_info
->cmdset
;
467 info
->ndcb0
= cmd
| ((cmd
& 0xff00) ? NDCB0_DBC
: 0);
471 if (cmd
== cmdset
->read_id
) {
472 info
->ndcb0
|= NDCB0_CMD_TYPE(3);
474 } else if (cmd
== cmdset
->read_status
) {
475 info
->ndcb0
|= NDCB0_CMD_TYPE(4);
477 } else if (cmd
== cmdset
->reset
|| cmd
== cmdset
->lock
||
478 cmd
== cmdset
->unlock
) {
479 info
->ndcb0
|= NDCB0_CMD_TYPE(5);
486 static void enable_int(struct pxa3xx_nand_info
*info
, uint32_t int_mask
)
490 ndcr
= nand_readl(info
, NDCR
);
491 nand_writel(info
, NDCR
, ndcr
& ~int_mask
);
494 static void disable_int(struct pxa3xx_nand_info
*info
, uint32_t int_mask
)
498 ndcr
= nand_readl(info
, NDCR
);
499 nand_writel(info
, NDCR
, ndcr
| int_mask
);
502 /* NOTE: it is a must to set ND_RUN firstly, then write command buffer
503 * otherwise, it does not work
505 static int write_cmd(struct pxa3xx_nand_info
*info
)
509 /* clear status bits and run */
510 nand_writel(info
, NDSR
, NDSR_MASK
);
512 ndcr
= info
->reg_ndcr
;
514 ndcr
|= info
->use_ecc
? NDCR_ECC_EN
: 0;
515 ndcr
|= info
->use_dma
? NDCR_DMA_EN
: 0;
518 nand_writel(info
, NDCR
, ndcr
);
520 if (wait_for_event(info
, NDSR_WRCMDREQ
)) {
521 printk(KERN_ERR
"timed out writing command\n");
525 nand_writel(info
, NDCB0
, info
->ndcb0
);
526 nand_writel(info
, NDCB0
, info
->ndcb1
);
527 nand_writel(info
, NDCB0
, info
->ndcb2
);
531 static int handle_data_pio(struct pxa3xx_nand_info
*info
)
533 int ret
, timeout
= CHIP_DELAY_TIMEOUT
;
535 switch (info
->state
) {
536 case STATE_PIO_WRITING
:
537 __raw_writesl(info
->mmio_base
+ NDDB
, info
->data_buff
,
538 DIV_ROUND_UP(info
->data_size
, 4));
540 enable_int(info
, NDSR_CS0_BBD
| NDSR_CS0_CMDD
);
542 ret
= wait_for_completion_timeout(&info
->cmd_complete
, timeout
);
544 printk(KERN_ERR
"program command time out\n");
548 case STATE_PIO_READING
:
549 __raw_readsl(info
->mmio_base
+ NDDB
, info
->data_buff
,
550 DIV_ROUND_UP(info
->data_size
, 4));
553 printk(KERN_ERR
"%s: invalid state %d\n", __func__
,
558 info
->state
= STATE_READY
;
562 static void start_data_dma(struct pxa3xx_nand_info
*info
, int dir_out
)
564 struct pxa_dma_desc
*desc
= info
->data_desc
;
565 int dma_len
= ALIGN(info
->data_size
, 32);
567 desc
->ddadr
= DDADR_STOP
;
568 desc
->dcmd
= DCMD_ENDIRQEN
| DCMD_WIDTH4
| DCMD_BURST32
| dma_len
;
571 desc
->dsadr
= info
->data_buff_phys
;
572 desc
->dtadr
= info
->mmio_phys
+ NDDB
;
573 desc
->dcmd
|= DCMD_INCSRCADDR
| DCMD_FLOWTRG
;
575 desc
->dtadr
= info
->data_buff_phys
;
576 desc
->dsadr
= info
->mmio_phys
+ NDDB
;
577 desc
->dcmd
|= DCMD_INCTRGADDR
| DCMD_FLOWSRC
;
580 DRCMR(info
->drcmr_dat
) = DRCMR_MAPVLD
| info
->data_dma_ch
;
581 DDADR(info
->data_dma_ch
) = info
->data_desc_addr
;
582 DCSR(info
->data_dma_ch
) |= DCSR_RUN
;
585 static void pxa3xx_nand_data_dma_irq(int channel
, void *data
)
587 struct pxa3xx_nand_info
*info
= data
;
590 dcsr
= DCSR(channel
);
591 DCSR(channel
) = dcsr
;
593 if (dcsr
& DCSR_BUSERR
) {
594 info
->retcode
= ERR_DMABUSERR
;
595 complete(&info
->cmd_complete
);
598 if (info
->state
== STATE_DMA_WRITING
) {
599 info
->state
= STATE_DMA_DONE
;
600 enable_int(info
, NDSR_CS0_BBD
| NDSR_CS0_CMDD
);
602 info
->state
= STATE_READY
;
603 complete(&info
->cmd_complete
);
607 static irqreturn_t
pxa3xx_nand_irq(int irq
, void *devid
)
609 struct pxa3xx_nand_info
*info
= devid
;
612 status
= nand_readl(info
, NDSR
);
614 if (status
& (NDSR_RDDREQ
| NDSR_DBERR
| NDSR_SBERR
)) {
615 if (status
& NDSR_DBERR
)
616 info
->retcode
= ERR_DBERR
;
617 else if (status
& NDSR_SBERR
)
618 info
->retcode
= ERR_SBERR
;
620 disable_int(info
, NDSR_RDDREQ
| NDSR_DBERR
| NDSR_SBERR
);
623 info
->state
= STATE_DMA_READING
;
624 start_data_dma(info
, 0);
626 info
->state
= STATE_PIO_READING
;
627 complete(&info
->cmd_complete
);
629 } else if (status
& NDSR_WRDREQ
) {
630 disable_int(info
, NDSR_WRDREQ
);
632 info
->state
= STATE_DMA_WRITING
;
633 start_data_dma(info
, 1);
635 info
->state
= STATE_PIO_WRITING
;
636 complete(&info
->cmd_complete
);
638 } else if (status
& (NDSR_CS0_BBD
| NDSR_CS0_CMDD
)) {
639 if (status
& NDSR_CS0_BBD
)
640 info
->retcode
= ERR_BBERR
;
642 disable_int(info
, NDSR_CS0_BBD
| NDSR_CS0_CMDD
);
643 info
->state
= STATE_READY
;
644 complete(&info
->cmd_complete
);
646 nand_writel(info
, NDSR
, status
);
650 static int pxa3xx_nand_do_cmd(struct pxa3xx_nand_info
*info
, uint32_t event
)
653 int ret
, timeout
= CHIP_DELAY_TIMEOUT
;
655 if (write_cmd(info
)) {
656 info
->retcode
= ERR_SENDCMD
;
660 info
->state
= STATE_CMD_HANDLE
;
662 enable_int(info
, event
);
664 ret
= wait_for_completion_timeout(&info
->cmd_complete
, timeout
);
666 printk(KERN_ERR
"command execution timed out\n");
667 info
->retcode
= ERR_SENDCMD
;
671 if (info
->use_dma
== 0 && info
->data_size
> 0)
672 if (handle_data_pio(info
))
678 ndcr
= nand_readl(info
, NDCR
);
679 nand_writel(info
, NDCR
, ndcr
& ~NDCR_ND_RUN
);
684 static int pxa3xx_nand_dev_ready(struct mtd_info
*mtd
)
686 struct pxa3xx_nand_info
*info
= mtd
->priv
;
687 return (nand_readl(info
, NDSR
) & NDSR_RDY
) ? 1 : 0;
690 static inline int is_buf_blank(uint8_t *buf
, size_t len
)
692 for (; len
> 0; len
--)
698 static void pxa3xx_nand_cmdfunc(struct mtd_info
*mtd
, unsigned command
,
699 int column
, int page_addr
)
701 struct pxa3xx_nand_info
*info
= mtd
->priv
;
702 const struct pxa3xx_nand_flash
*flash_info
= info
->flash_info
;
703 const struct pxa3xx_nand_cmdset
*cmdset
= flash_info
->cmdset
;
706 info
->use_dma
= (use_dma
) ? 1 : 0;
709 info
->state
= STATE_READY
;
711 init_completion(&info
->cmd_complete
);
714 case NAND_CMD_READOOB
:
715 /* disable HW ECC to get all the OOB data */
716 info
->buf_count
= mtd
->writesize
+ mtd
->oobsize
;
717 info
->buf_start
= mtd
->writesize
+ column
;
718 memset(info
->data_buff
, 0xFF, info
->buf_count
);
720 if (prepare_read_prog_cmd(info
, cmdset
->read1
, column
, page_addr
))
723 pxa3xx_nand_do_cmd(info
, NDSR_RDDREQ
| NDSR_DBERR
| NDSR_SBERR
);
725 /* We only are OOB, so if the data has error, does not matter */
726 if (info
->retcode
== ERR_DBERR
)
727 info
->retcode
= ERR_NONE
;
732 info
->retcode
= ERR_NONE
;
733 info
->buf_start
= column
;
734 info
->buf_count
= mtd
->writesize
+ mtd
->oobsize
;
735 memset(info
->data_buff
, 0xFF, info
->buf_count
);
737 if (prepare_read_prog_cmd(info
, cmdset
->read1
, column
, page_addr
))
740 pxa3xx_nand_do_cmd(info
, NDSR_RDDREQ
| NDSR_DBERR
| NDSR_SBERR
);
742 if (info
->retcode
== ERR_DBERR
) {
743 /* for blank page (all 0xff), HW will calculate its ECC as
744 * 0, which is different from the ECC information within
745 * OOB, ignore such double bit errors
747 if (is_buf_blank(info
->data_buff
, mtd
->writesize
))
748 info
->retcode
= ERR_NONE
;
752 info
->buf_start
= column
;
753 info
->buf_count
= mtd
->writesize
+ mtd
->oobsize
;
754 memset(info
->data_buff
, 0xff, info
->buf_count
);
756 /* save column/page_addr for next CMD_PAGEPROG */
757 info
->seqin_column
= column
;
758 info
->seqin_page_addr
= page_addr
;
760 case NAND_CMD_PAGEPROG
:
761 info
->use_ecc
= (info
->seqin_column
>= mtd
->writesize
) ? 0 : 1;
763 if (prepare_read_prog_cmd(info
, cmdset
->program
,
764 info
->seqin_column
, info
->seqin_page_addr
))
767 pxa3xx_nand_do_cmd(info
, NDSR_WRDREQ
);
769 case NAND_CMD_ERASE1
:
770 if (prepare_erase_cmd(info
, cmdset
->erase
, page_addr
))
773 pxa3xx_nand_do_cmd(info
, NDSR_CS0_BBD
| NDSR_CS0_CMDD
);
775 case NAND_CMD_ERASE2
:
777 case NAND_CMD_READID
:
778 case NAND_CMD_STATUS
:
779 info
->use_dma
= 0; /* force PIO read */
781 info
->buf_count
= (command
== NAND_CMD_READID
) ?
782 info
->read_id_bytes
: 1;
784 if (prepare_other_cmd(info
, (command
== NAND_CMD_READID
) ?
785 cmdset
->read_id
: cmdset
->read_status
))
788 pxa3xx_nand_do_cmd(info
, NDSR_RDDREQ
);
791 if (prepare_other_cmd(info
, cmdset
->reset
))
794 ret
= pxa3xx_nand_do_cmd(info
, NDSR_CS0_CMDD
);
800 if (nand_readl(info
, NDSR
) & NDSR_RDY
)
805 ndcr
= nand_readl(info
, NDCR
);
806 nand_writel(info
, NDCR
, ndcr
& ~NDCR_ND_RUN
);
810 printk(KERN_ERR
"non-supported command.\n");
814 if (info
->retcode
== ERR_DBERR
) {
815 printk(KERN_ERR
"double bit error @ page %08x\n", page_addr
);
816 info
->retcode
= ERR_NONE
;
820 static uint8_t pxa3xx_nand_read_byte(struct mtd_info
*mtd
)
822 struct pxa3xx_nand_info
*info
= mtd
->priv
;
825 if (info
->buf_start
< info
->buf_count
)
826 /* Has just send a new command? */
827 retval
= info
->data_buff
[info
->buf_start
++];
832 static u16
pxa3xx_nand_read_word(struct mtd_info
*mtd
)
834 struct pxa3xx_nand_info
*info
= mtd
->priv
;
837 if (!(info
->buf_start
& 0x01) && info
->buf_start
< info
->buf_count
) {
838 retval
= *((u16
*)(info
->data_buff
+info
->buf_start
));
839 info
->buf_start
+= 2;
844 static void pxa3xx_nand_read_buf(struct mtd_info
*mtd
, uint8_t *buf
, int len
)
846 struct pxa3xx_nand_info
*info
= mtd
->priv
;
847 int real_len
= min_t(size_t, len
, info
->buf_count
- info
->buf_start
);
849 memcpy(buf
, info
->data_buff
+ info
->buf_start
, real_len
);
850 info
->buf_start
+= real_len
;
853 static void pxa3xx_nand_write_buf(struct mtd_info
*mtd
,
854 const uint8_t *buf
, int len
)
856 struct pxa3xx_nand_info
*info
= mtd
->priv
;
857 int real_len
= min_t(size_t, len
, info
->buf_count
- info
->buf_start
);
859 memcpy(info
->data_buff
+ info
->buf_start
, buf
, real_len
);
860 info
->buf_start
+= real_len
;
863 static int pxa3xx_nand_verify_buf(struct mtd_info
*mtd
,
864 const uint8_t *buf
, int len
)
869 static void pxa3xx_nand_select_chip(struct mtd_info
*mtd
, int chip
)
874 static int pxa3xx_nand_waitfunc(struct mtd_info
*mtd
, struct nand_chip
*this)
876 struct pxa3xx_nand_info
*info
= mtd
->priv
;
878 /* pxa3xx_nand_send_command has waited for command complete */
879 if (this->state
== FL_WRITING
|| this->state
== FL_ERASING
) {
880 if (info
->retcode
== ERR_NONE
)
884 * any error make it return 0x01 which will tell
885 * the caller the erase and write fail
894 static void pxa3xx_nand_ecc_hwctl(struct mtd_info
*mtd
, int mode
)
899 static int pxa3xx_nand_ecc_calculate(struct mtd_info
*mtd
,
900 const uint8_t *dat
, uint8_t *ecc_code
)
905 static int pxa3xx_nand_ecc_correct(struct mtd_info
*mtd
,
906 uint8_t *dat
, uint8_t *read_ecc
, uint8_t *calc_ecc
)
908 struct pxa3xx_nand_info
*info
= mtd
->priv
;
910 * Any error include ERR_SEND_CMD, ERR_DBERR, ERR_BUSERR, we
911 * consider it as a ecc error which will tell the caller the
912 * read fail We have distinguish all the errors, but the
913 * nand_read_ecc only check this function return value
915 * Corrected (single-bit) errors must also be noted.
917 if (info
->retcode
== ERR_SBERR
)
919 else if (info
->retcode
!= ERR_NONE
)
925 static int __readid(struct pxa3xx_nand_info
*info
, uint32_t *id
)
927 const struct pxa3xx_nand_flash
*f
= info
->flash_info
;
928 const struct pxa3xx_nand_cmdset
*cmdset
= f
->cmdset
;
932 if (prepare_other_cmd(info
, cmdset
->read_id
)) {
933 printk(KERN_ERR
"failed to prepare command\n");
941 /* Wait for CMDDM(command done successfully) */
942 if (wait_for_event(info
, NDSR_RDDREQ
))
945 __raw_readsl(info
->mmio_base
+ NDDB
, id_buff
, 2);
946 *id
= id_buff
[0] | (id_buff
[1] << 8);
950 ndcr
= nand_readl(info
, NDCR
);
951 nand_writel(info
, NDCR
, ndcr
& ~NDCR_ND_RUN
);
956 static int pxa3xx_nand_config_flash(struct pxa3xx_nand_info
*info
,
957 const struct pxa3xx_nand_flash
*f
)
959 struct platform_device
*pdev
= info
->pdev
;
960 struct pxa3xx_nand_platform_data
*pdata
= pdev
->dev
.platform_data
;
961 uint32_t ndcr
= 0x00000FFF; /* disable all interrupts */
963 if (f
->page_size
!= 2048 && f
->page_size
!= 512)
966 if (f
->flash_width
!= 16 && f
->flash_width
!= 8)
969 /* calculate flash information */
970 info
->oob_size
= (f
->page_size
== 2048) ? 64 : 16;
971 info
->read_id_bytes
= (f
->page_size
== 2048) ? 4 : 2;
973 /* calculate addressing information */
974 info
->col_addr_cycles
= (f
->page_size
== 2048) ? 2 : 1;
976 if (f
->num_blocks
* f
->page_per_block
> 65536)
977 info
->row_addr_cycles
= 3;
979 info
->row_addr_cycles
= 2;
981 ndcr
|= (pdata
->enable_arbiter
) ? NDCR_ND_ARB_EN
: 0;
982 ndcr
|= (info
->col_addr_cycles
== 2) ? NDCR_RA_START
: 0;
983 ndcr
|= (f
->page_per_block
== 64) ? NDCR_PG_PER_BLK
: 0;
984 ndcr
|= (f
->page_size
== 2048) ? NDCR_PAGE_SZ
: 0;
985 ndcr
|= (f
->flash_width
== 16) ? NDCR_DWIDTH_M
: 0;
986 ndcr
|= (f
->dfc_width
== 16) ? NDCR_DWIDTH_C
: 0;
988 ndcr
|= NDCR_RD_ID_CNT(info
->read_id_bytes
);
989 ndcr
|= NDCR_SPARE_EN
; /* enable spare by default */
991 info
->reg_ndcr
= ndcr
;
993 pxa3xx_nand_set_timing(info
, f
->timing
);
994 info
->flash_info
= f
;
998 static void pxa3xx_nand_detect_timing(struct pxa3xx_nand_info
*info
,
999 struct pxa3xx_nand_timing
*t
)
1001 unsigned long nand_clk
= clk_get_rate(info
->clk
);
1002 uint32_t ndtr0
= nand_readl(info
, NDTR0CS0
);
1003 uint32_t ndtr1
= nand_readl(info
, NDTR1CS0
);
1005 t
->tCH
= cycle2ns(tCH_NDTR0(ndtr0
), nand_clk
);
1006 t
->tCS
= cycle2ns(tCS_NDTR0(ndtr0
), nand_clk
);
1007 t
->tWH
= cycle2ns(tWH_NDTR0(ndtr0
), nand_clk
);
1008 t
->tWP
= cycle2ns(tWP_NDTR0(ndtr0
), nand_clk
);
1009 t
->tRH
= cycle2ns(tRH_NDTR0(ndtr0
), nand_clk
);
1010 t
->tRP
= cycle2ns(tRP_NDTR0(ndtr0
), nand_clk
);
1012 t
->tR
= cycle2ns(tR_NDTR1(ndtr1
), nand_clk
);
1013 t
->tWHR
= cycle2ns(tWHR_NDTR1(ndtr1
), nand_clk
);
1014 t
->tAR
= cycle2ns(tAR_NDTR1(ndtr1
), nand_clk
);
1017 static int pxa3xx_nand_detect_config(struct pxa3xx_nand_info
*info
)
1019 uint32_t ndcr
= nand_readl(info
, NDCR
);
1020 struct nand_flash_dev
*type
= NULL
;
1024 default_flash
.page_per_block
= ndcr
& NDCR_PG_PER_BLK
? 64 : 32;
1025 default_flash
.page_size
= ndcr
& NDCR_PAGE_SZ
? 2048 : 512;
1026 default_flash
.flash_width
= ndcr
& NDCR_DWIDTH_M
? 16 : 8;
1027 default_flash
.dfc_width
= ndcr
& NDCR_DWIDTH_C
? 16 : 8;
1029 if (default_flash
.page_size
== 2048)
1030 default_flash
.cmdset
= &largepage_cmdset
;
1032 default_flash
.cmdset
= &smallpage_cmdset
;
1034 /* set info fields needed to __readid */
1035 info
->flash_info
= &default_flash
;
1036 info
->read_id_bytes
= (default_flash
.page_size
== 2048) ? 4 : 2;
1037 info
->reg_ndcr
= ndcr
;
1039 if (__readid(info
, &id
))
1042 /* Lookup the flash id */
1043 id
= (id
>> 8) & 0xff; /* device id is byte 2 */
1044 for (i
= 0; nand_flash_ids
[i
].name
!= NULL
; i
++) {
1045 if (id
== nand_flash_ids
[i
].id
) {
1046 type
= &nand_flash_ids
[i
];
1054 /* fill the missing flash information */
1055 i
= __ffs(default_flash
.page_per_block
* default_flash
.page_size
);
1056 default_flash
.num_blocks
= type
->chipsize
<< (20 - i
);
1058 info
->oob_size
= (default_flash
.page_size
== 2048) ? 64 : 16;
1060 /* calculate addressing information */
1061 info
->col_addr_cycles
= (default_flash
.page_size
== 2048) ? 2 : 1;
1063 if (default_flash
.num_blocks
* default_flash
.page_per_block
> 65536)
1064 info
->row_addr_cycles
= 3;
1066 info
->row_addr_cycles
= 2;
1068 pxa3xx_nand_detect_timing(info
, &default_timing
);
1069 default_flash
.timing
= &default_timing
;
1074 static int pxa3xx_nand_detect_flash(struct pxa3xx_nand_info
*info
,
1075 const struct pxa3xx_nand_platform_data
*pdata
)
1077 const struct pxa3xx_nand_flash
*f
;
1081 if (pdata
->keep_config
)
1082 if (pxa3xx_nand_detect_config(info
) == 0)
1085 for (i
= 0; i
<pdata
->num_flash
; ++i
) {
1086 f
= pdata
->flash
+ i
;
1088 if (pxa3xx_nand_config_flash(info
, f
))
1091 if (__readid(info
, &id
))
1094 if (id
== f
->chip_id
)
1098 #ifdef CONFIG_MTD_NAND_PXA3xx_BUILTIN
1099 for (i
= 0; i
< ARRAY_SIZE(builtin_flash_types
); i
++) {
1101 f
= builtin_flash_types
[i
];
1103 if (pxa3xx_nand_config_flash(info
, f
))
1106 if (__readid(info
, &id
))
1109 if (id
== f
->chip_id
)
1114 dev_warn(&info
->pdev
->dev
,
1115 "failed to detect configured nand flash; found %04x instead of\n",
1120 /* the maximum possible buffer size for large page with OOB data
1121 * is: 2048 + 64 = 2112 bytes, allocate a page here for both the
1122 * data buffer and the DMA descriptor
1124 #define MAX_BUFF_SIZE PAGE_SIZE
1126 static int pxa3xx_nand_init_buff(struct pxa3xx_nand_info
*info
)
1128 struct platform_device
*pdev
= info
->pdev
;
1129 int data_desc_offset
= MAX_BUFF_SIZE
- sizeof(struct pxa_dma_desc
);
1132 info
->data_buff
= kmalloc(MAX_BUFF_SIZE
, GFP_KERNEL
);
1133 if (info
->data_buff
== NULL
)
1138 info
->data_buff
= dma_alloc_coherent(&pdev
->dev
, MAX_BUFF_SIZE
,
1139 &info
->data_buff_phys
, GFP_KERNEL
);
1140 if (info
->data_buff
== NULL
) {
1141 dev_err(&pdev
->dev
, "failed to allocate dma buffer\n");
1145 info
->data_buff_size
= MAX_BUFF_SIZE
;
1146 info
->data_desc
= (void *)info
->data_buff
+ data_desc_offset
;
1147 info
->data_desc_addr
= info
->data_buff_phys
+ data_desc_offset
;
1149 info
->data_dma_ch
= pxa_request_dma("nand-data", DMA_PRIO_LOW
,
1150 pxa3xx_nand_data_dma_irq
, info
);
1151 if (info
->data_dma_ch
< 0) {
1152 dev_err(&pdev
->dev
, "failed to request data dma\n");
1153 dma_free_coherent(&pdev
->dev
, info
->data_buff_size
,
1154 info
->data_buff
, info
->data_buff_phys
);
1155 return info
->data_dma_ch
;
1161 static struct nand_ecclayout hw_smallpage_ecclayout
= {
1163 .eccpos
= {8, 9, 10, 11, 12, 13 },
1164 .oobfree
= { {2, 6} }
1167 static struct nand_ecclayout hw_largepage_ecclayout
= {
1170 40, 41, 42, 43, 44, 45, 46, 47,
1171 48, 49, 50, 51, 52, 53, 54, 55,
1172 56, 57, 58, 59, 60, 61, 62, 63},
1173 .oobfree
= { {2, 38} }
1176 static void pxa3xx_nand_init_mtd(struct mtd_info
*mtd
,
1177 struct pxa3xx_nand_info
*info
)
1179 const struct pxa3xx_nand_flash
*f
= info
->flash_info
;
1180 struct nand_chip
*this = &info
->nand_chip
;
1182 this->options
= (f
->flash_width
== 16) ? NAND_BUSWIDTH_16
: 0;
1184 this->waitfunc
= pxa3xx_nand_waitfunc
;
1185 this->select_chip
= pxa3xx_nand_select_chip
;
1186 this->dev_ready
= pxa3xx_nand_dev_ready
;
1187 this->cmdfunc
= pxa3xx_nand_cmdfunc
;
1188 this->read_word
= pxa3xx_nand_read_word
;
1189 this->read_byte
= pxa3xx_nand_read_byte
;
1190 this->read_buf
= pxa3xx_nand_read_buf
;
1191 this->write_buf
= pxa3xx_nand_write_buf
;
1192 this->verify_buf
= pxa3xx_nand_verify_buf
;
1194 this->ecc
.mode
= NAND_ECC_HW
;
1195 this->ecc
.hwctl
= pxa3xx_nand_ecc_hwctl
;
1196 this->ecc
.calculate
= pxa3xx_nand_ecc_calculate
;
1197 this->ecc
.correct
= pxa3xx_nand_ecc_correct
;
1198 this->ecc
.size
= f
->page_size
;
1200 if (f
->page_size
== 2048)
1201 this->ecc
.layout
= &hw_largepage_ecclayout
;
1203 this->ecc
.layout
= &hw_smallpage_ecclayout
;
1205 this->chip_delay
= 25;
1208 static int pxa3xx_nand_probe(struct platform_device
*pdev
)
1210 struct pxa3xx_nand_platform_data
*pdata
;
1211 struct pxa3xx_nand_info
*info
;
1212 struct nand_chip
*this;
1213 struct mtd_info
*mtd
;
1217 pdata
= pdev
->dev
.platform_data
;
1220 dev_err(&pdev
->dev
, "no platform data defined\n");
1224 mtd
= kzalloc(sizeof(struct mtd_info
) + sizeof(struct pxa3xx_nand_info
),
1227 dev_err(&pdev
->dev
, "failed to allocate memory\n");
1231 info
= (struct pxa3xx_nand_info
*)(&mtd
[1]);
1234 this = &info
->nand_chip
;
1236 mtd
->owner
= THIS_MODULE
;
1238 info
->clk
= clk_get(&pdev
->dev
, NULL
);
1239 if (IS_ERR(info
->clk
)) {
1240 dev_err(&pdev
->dev
, "failed to get nand clock\n");
1241 ret
= PTR_ERR(info
->clk
);
1244 clk_enable(info
->clk
);
1246 r
= platform_get_resource(pdev
, IORESOURCE_DMA
, 0);
1248 dev_err(&pdev
->dev
, "no resource defined for data DMA\n");
1252 info
->drcmr_dat
= r
->start
;
1254 r
= platform_get_resource(pdev
, IORESOURCE_DMA
, 1);
1256 dev_err(&pdev
->dev
, "no resource defined for command DMA\n");
1260 info
->drcmr_cmd
= r
->start
;
1262 irq
= platform_get_irq(pdev
, 0);
1264 dev_err(&pdev
->dev
, "no IRQ resource defined\n");
1269 r
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
1271 dev_err(&pdev
->dev
, "no IO memory resource defined\n");
1276 r
= request_mem_region(r
->start
, resource_size(r
), pdev
->name
);
1278 dev_err(&pdev
->dev
, "failed to request memory resource\n");
1283 info
->mmio_base
= ioremap(r
->start
, resource_size(r
));
1284 if (info
->mmio_base
== NULL
) {
1285 dev_err(&pdev
->dev
, "ioremap() failed\n");
1289 info
->mmio_phys
= r
->start
;
1291 ret
= pxa3xx_nand_init_buff(info
);
1295 /* initialize all interrupts to be disabled */
1296 disable_int(info
, NDSR_MASK
);
1298 ret
= request_irq(irq
, pxa3xx_nand_irq
, IRQF_DISABLED
,
1301 dev_err(&pdev
->dev
, "failed to request IRQ\n");
1305 ret
= pxa3xx_nand_detect_flash(info
, pdata
);
1307 dev_err(&pdev
->dev
, "failed to detect flash\n");
1312 pxa3xx_nand_init_mtd(mtd
, info
);
1314 platform_set_drvdata(pdev
, mtd
);
1316 if (nand_scan(mtd
, 1)) {
1317 dev_err(&pdev
->dev
, "failed to scan nand\n");
1322 return add_mtd_partitions(mtd
, pdata
->parts
, pdata
->nr_parts
);
1325 free_irq(irq
, info
);
1328 pxa_free_dma(info
->data_dma_ch
);
1329 dma_free_coherent(&pdev
->dev
, info
->data_buff_size
,
1330 info
->data_buff
, info
->data_buff_phys
);
1332 kfree(info
->data_buff
);
1334 iounmap(info
->mmio_base
);
1336 release_mem_region(r
->start
, resource_size(r
));
1338 clk_disable(info
->clk
);
1345 static int pxa3xx_nand_remove(struct platform_device
*pdev
)
1347 struct mtd_info
*mtd
= platform_get_drvdata(pdev
);
1348 struct pxa3xx_nand_info
*info
= mtd
->priv
;
1352 platform_set_drvdata(pdev
, NULL
);
1354 del_mtd_device(mtd
);
1355 del_mtd_partitions(mtd
);
1356 irq
= platform_get_irq(pdev
, 0);
1358 free_irq(irq
, info
);
1360 pxa_free_dma(info
->data_dma_ch
);
1361 dma_free_writecombine(&pdev
->dev
, info
->data_buff_size
,
1362 info
->data_buff
, info
->data_buff_phys
);
1364 kfree(info
->data_buff
);
1366 iounmap(info
->mmio_base
);
1367 r
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
1368 release_mem_region(r
->start
, resource_size(r
));
1370 clk_disable(info
->clk
);
1378 static int pxa3xx_nand_suspend(struct platform_device
*pdev
, pm_message_t state
)
1380 struct mtd_info
*mtd
= (struct mtd_info
*)platform_get_drvdata(pdev
);
1381 struct pxa3xx_nand_info
*info
= mtd
->priv
;
1383 if (info
->state
!= STATE_READY
) {
1384 dev_err(&pdev
->dev
, "driver busy, state = %d\n", info
->state
);
1391 static int pxa3xx_nand_resume(struct platform_device
*pdev
)
1393 struct mtd_info
*mtd
= (struct mtd_info
*)platform_get_drvdata(pdev
);
1394 struct pxa3xx_nand_info
*info
= mtd
->priv
;
1396 clk_enable(info
->clk
);
1398 return pxa3xx_nand_config_flash(info
, info
->flash_info
);
1401 #define pxa3xx_nand_suspend NULL
1402 #define pxa3xx_nand_resume NULL
1405 static struct platform_driver pxa3xx_nand_driver
= {
1407 .name
= "pxa3xx-nand",
1409 .probe
= pxa3xx_nand_probe
,
1410 .remove
= pxa3xx_nand_remove
,
1411 .suspend
= pxa3xx_nand_suspend
,
1412 .resume
= pxa3xx_nand_resume
,
1415 static int __init
pxa3xx_nand_init(void)
1417 return platform_driver_register(&pxa3xx_nand_driver
);
1419 module_init(pxa3xx_nand_init
);
1421 static void __exit
pxa3xx_nand_exit(void)
1423 platform_driver_unregister(&pxa3xx_nand_driver
);
1425 module_exit(pxa3xx_nand_exit
);
1427 MODULE_LICENSE("GPL");
1428 MODULE_DESCRIPTION("PXA3xx NAND controller driver");