1 // SPDX-License-Identifier: GPL-2.0+
2 /* Copyright (c) 2020 Intel Corporation. */
5 #include <linux/completion.h>
6 #include <linux/dmaengine.h>
7 #include <linux/dma-direction.h>
8 #include <linux/dma-mapping.h>
10 #include <linux/init.h>
11 #include <linux/iopoll.h>
12 #include <linux/kernel.h>
13 #include <linux/module.h>
15 #include <linux/mtd/mtd.h>
16 #include <linux/mtd/rawnand.h>
17 #include <linux/mtd/nand.h>
19 #include <linux/platform_device.h>
20 #include <linux/sched.h>
21 #include <linux/slab.h>
22 #include <linux/types.h>
23 #include <asm/unaligned.h>
26 #define EBU_CLC_RST 0x00000000u
28 #define EBU_ADDR_SEL(n) (0x020 + (n) * 4)
29 /* 5 bits 26:22 included for comparison in the ADDR_SELx */
30 #define EBU_ADDR_MASK(x) ((x) << 4)
31 #define EBU_ADDR_SEL_REGEN 0x1
33 #define EBU_BUSCON(n) (0x060 + (n) * 4)
34 #define EBU_BUSCON_CMULT_V4 0x1
35 #define EBU_BUSCON_RECOVC(n) ((n) << 2)
36 #define EBU_BUSCON_HOLDC(n) ((n) << 4)
37 #define EBU_BUSCON_WAITRDC(n) ((n) << 6)
38 #define EBU_BUSCON_WAITWRC(n) ((n) << 8)
39 #define EBU_BUSCON_BCGEN_CS 0x0
40 #define EBU_BUSCON_SETUP_EN BIT(22)
41 #define EBU_BUSCON_ALEC 0xC000
44 #define EBU_CON_NANDM_EN BIT(0)
45 #define EBU_CON_NANDM_DIS 0x0
46 #define EBU_CON_CSMUX_E_EN BIT(1)
47 #define EBU_CON_ALE_P_LOW BIT(2)
48 #define EBU_CON_CLE_P_LOW BIT(3)
49 #define EBU_CON_CS_P_LOW BIT(4)
50 #define EBU_CON_SE_P_LOW BIT(5)
51 #define EBU_CON_WP_P_LOW BIT(6)
52 #define EBU_CON_PRE_P_LOW BIT(7)
53 #define EBU_CON_IN_CS_S(n) ((n) << 8)
54 #define EBU_CON_OUT_CS_S(n) ((n) << 10)
55 #define EBU_CON_LAT_EN_CS_P ((0x3D) << 18)
57 #define EBU_WAIT 0x0B4
58 #define EBU_WAIT_RDBY BIT(0)
59 #define EBU_WAIT_WR_C BIT(3)
61 #define HSNAND_CTL1 0x110
62 #define HSNAND_CTL1_ADDR_SHIFT 24
64 #define HSNAND_CTL2 0x114
65 #define HSNAND_CTL2_ADDR_SHIFT 8
66 #define HSNAND_CTL2_CYC_N_V5 (0x2 << 16)
68 #define HSNAND_INT_MSK_CTL 0x124
69 #define HSNAND_INT_MSK_CTL_WR_C BIT(4)
71 #define HSNAND_INT_STA 0x128
72 #define HSNAND_INT_STA_WR_C BIT(4)
74 #define HSNAND_CTL 0x130
75 #define HSNAND_CTL_ENABLE_ECC BIT(0)
76 #define HSNAND_CTL_GO BIT(2)
77 #define HSNAND_CTL_CE_SEL_CS(n) BIT(3 + (n))
78 #define HSNAND_CTL_RW_READ 0x0
79 #define HSNAND_CTL_RW_WRITE BIT(10)
80 #define HSNAND_CTL_ECC_OFF_V8TH BIT(11)
81 #define HSNAND_CTL_CKFF_EN 0x0
82 #define HSNAND_CTL_MSG_EN BIT(17)
84 #define HSNAND_PARA0 0x13c
85 #define HSNAND_PARA0_PAGE_V8192 0x3
86 #define HSNAND_PARA0_PIB_V256 (0x3 << 4)
87 #define HSNAND_PARA0_BYP_EN_NP 0x0
88 #define HSNAND_PARA0_BYP_DEC_NP 0x0
89 #define HSNAND_PARA0_TYPE_ONFI BIT(18)
90 #define HSNAND_PARA0_ADEP_EN BIT(21)
92 #define HSNAND_CMSG_0 0x150
93 #define HSNAND_CMSG_1 0x154
95 #define HSNAND_ALE_OFFS BIT(2)
96 #define HSNAND_CLE_OFFS BIT(3)
97 #define HSNAND_CS_OFFS BIT(4)
99 #define HSNAND_ECC_OFFSET 0x008
101 #define NAND_DATA_IFACE_CHECK_ONLY -1
105 #define HZ_PER_MHZ 1000000L
106 #define USEC_PER_SEC 1000000L
109 void __iomem
*chipaddr
;
114 struct ebu_nand_controller
{
115 struct nand_controller controller
;
116 struct nand_chip chip
;
119 void __iomem
*hsnand
;
120 struct dma_chan
*dma_tx
;
121 struct dma_chan
*dma_rx
;
122 struct completion dma_access_complete
;
123 unsigned long clk_rate
;
127 struct ebu_nand_cs cs
[MAX_CS
];
130 static inline struct ebu_nand_controller
*nand_to_ebu(struct nand_chip
*chip
)
132 return container_of(chip
, struct ebu_nand_controller
, chip
);
135 static int ebu_nand_waitrdy(struct nand_chip
*chip
, int timeout_ms
)
137 struct ebu_nand_controller
*ctrl
= nand_to_ebu(chip
);
140 return readl_poll_timeout(ctrl
->ebu
+ EBU_WAIT
, status
,
141 (status
& EBU_WAIT_RDBY
) ||
142 (status
& EBU_WAIT_WR_C
), 20, timeout_ms
);
145 static u8
ebu_nand_readb(struct nand_chip
*chip
)
147 struct ebu_nand_controller
*ebu_host
= nand_get_controller_data(chip
);
148 u8 cs_num
= ebu_host
->cs_num
;
151 val
= readb(ebu_host
->cs
[cs_num
].chipaddr
+ HSNAND_CS_OFFS
);
152 ebu_nand_waitrdy(chip
, 1000);
156 static void ebu_nand_writeb(struct nand_chip
*chip
, u32 offset
, u8 value
)
158 struct ebu_nand_controller
*ebu_host
= nand_get_controller_data(chip
);
159 u8 cs_num
= ebu_host
->cs_num
;
161 writeb(value
, ebu_host
->cs
[cs_num
].chipaddr
+ offset
);
162 ebu_nand_waitrdy(chip
, 1000);
165 static void ebu_read_buf(struct nand_chip
*chip
, u_char
*buf
, unsigned int len
)
169 for (i
= 0; i
< len
; i
++)
170 buf
[i
] = ebu_nand_readb(chip
);
173 static void ebu_write_buf(struct nand_chip
*chip
, const u_char
*buf
, int len
)
177 for (i
= 0; i
< len
; i
++)
178 ebu_nand_writeb(chip
, HSNAND_CS_OFFS
, buf
[i
]);
181 static void ebu_nand_disable(struct nand_chip
*chip
)
183 struct ebu_nand_controller
*ebu_host
= nand_get_controller_data(chip
);
185 writel(0, ebu_host
->ebu
+ EBU_CON
);
188 static void ebu_select_chip(struct nand_chip
*chip
)
190 struct ebu_nand_controller
*ebu_host
= nand_get_controller_data(chip
);
191 void __iomem
*nand_con
= ebu_host
->ebu
+ EBU_CON
;
192 u32 cs
= ebu_host
->cs_num
;
194 writel(EBU_CON_NANDM_EN
| EBU_CON_CSMUX_E_EN
| EBU_CON_CS_P_LOW
|
195 EBU_CON_SE_P_LOW
| EBU_CON_WP_P_LOW
| EBU_CON_PRE_P_LOW
|
196 EBU_CON_IN_CS_S(cs
) | EBU_CON_OUT_CS_S(cs
) |
197 EBU_CON_LAT_EN_CS_P
, nand_con
);
200 static int ebu_nand_set_timings(struct nand_chip
*chip
, int csline
,
201 const struct nand_interface_config
*conf
)
203 struct ebu_nand_controller
*ctrl
= nand_to_ebu(chip
);
204 unsigned int rate
= clk_get_rate(ctrl
->clk
) / HZ_PER_MHZ
;
205 unsigned int period
= DIV_ROUND_UP(USEC_PER_SEC
, rate
);
206 const struct nand_sdr_timings
*timings
;
207 u32 trecov
, thold
, twrwait
, trdwait
;
210 timings
= nand_get_sdr_timings(conf
);
212 return PTR_ERR(timings
);
214 if (csline
== NAND_DATA_IFACE_CHECK_ONLY
)
217 trecov
= DIV_ROUND_UP(max(timings
->tREA_max
, timings
->tREH_min
),
219 reg
|= EBU_BUSCON_RECOVC(trecov
);
221 thold
= DIV_ROUND_UP(max(timings
->tDH_min
, timings
->tDS_min
), period
);
222 reg
|= EBU_BUSCON_HOLDC(thold
);
224 trdwait
= DIV_ROUND_UP(max(timings
->tRC_min
, timings
->tREH_min
),
226 reg
|= EBU_BUSCON_WAITRDC(trdwait
);
228 twrwait
= DIV_ROUND_UP(max(timings
->tWC_min
, timings
->tWH_min
), period
);
229 reg
|= EBU_BUSCON_WAITWRC(twrwait
);
231 reg
|= EBU_BUSCON_CMULT_V4
| EBU_BUSCON_BCGEN_CS
| EBU_BUSCON_ALEC
|
234 writel(reg
, ctrl
->ebu
+ EBU_BUSCON(ctrl
->cs_num
));
239 static int ebu_nand_ooblayout_ecc(struct mtd_info
*mtd
, int section
,
240 struct mtd_oob_region
*oobregion
)
242 struct nand_chip
*chip
= mtd_to_nand(mtd
);
247 oobregion
->offset
= HSNAND_ECC_OFFSET
;
248 oobregion
->length
= chip
->ecc
.total
;
253 static int ebu_nand_ooblayout_free(struct mtd_info
*mtd
, int section
,
254 struct mtd_oob_region
*oobregion
)
256 struct nand_chip
*chip
= mtd_to_nand(mtd
);
261 oobregion
->offset
= chip
->ecc
.total
+ HSNAND_ECC_OFFSET
;
262 oobregion
->length
= mtd
->oobsize
- oobregion
->offset
;
267 static const struct mtd_ooblayout_ops ebu_nand_ooblayout_ops
= {
268 .ecc
= ebu_nand_ooblayout_ecc
,
269 .free
= ebu_nand_ooblayout_free
,
272 static void ebu_dma_rx_callback(void *cookie
)
274 struct ebu_nand_controller
*ebu_host
= cookie
;
276 dmaengine_terminate_async(ebu_host
->dma_rx
);
278 complete(&ebu_host
->dma_access_complete
);
281 static void ebu_dma_tx_callback(void *cookie
)
283 struct ebu_nand_controller
*ebu_host
= cookie
;
285 dmaengine_terminate_async(ebu_host
->dma_tx
);
287 complete(&ebu_host
->dma_access_complete
);
290 static int ebu_dma_start(struct ebu_nand_controller
*ebu_host
, u32 dir
,
291 const u8
*buf
, u32 len
)
293 struct dma_async_tx_descriptor
*tx
;
294 struct completion
*dma_completion
;
295 dma_async_tx_callback callback
;
296 struct dma_chan
*chan
;
298 unsigned long flags
= DMA_CTRL_ACK
| DMA_PREP_INTERRUPT
;
303 if (dir
== DMA_DEV_TO_MEM
) {
304 chan
= ebu_host
->dma_rx
;
305 dma_completion
= &ebu_host
->dma_access_complete
;
306 callback
= ebu_dma_rx_callback
;
308 chan
= ebu_host
->dma_tx
;
309 dma_completion
= &ebu_host
->dma_access_complete
;
310 callback
= ebu_dma_tx_callback
;
313 buf_dma
= dma_map_single(chan
->device
->dev
, (void *)buf
, len
, dir
);
314 if (dma_mapping_error(chan
->device
->dev
, buf_dma
)) {
315 dev_err(ebu_host
->dev
, "Failed to map DMA buffer\n");
320 tx
= dmaengine_prep_slave_single(chan
, buf_dma
, len
, dir
, flags
);
324 tx
->callback
= callback
;
325 tx
->callback_param
= ebu_host
;
326 cookie
= tx
->tx_submit(tx
);
328 ret
= dma_submit_error(cookie
);
330 dev_err(ebu_host
->dev
, "dma_submit_error %d\n", cookie
);
335 init_completion(dma_completion
);
336 dma_async_issue_pending(chan
);
338 /* Wait DMA to finish the data transfer.*/
339 timeout
= wait_for_completion_timeout(dma_completion
, msecs_to_jiffies(1000));
341 dev_err(ebu_host
->dev
, "I/O Error in DMA RX (status %d)\n",
342 dmaengine_tx_status(chan
, cookie
, NULL
));
343 dmaengine_terminate_sync(chan
);
351 dma_unmap_single(ebu_host
->dev
, buf_dma
, len
, dir
);
356 static void ebu_nand_trigger(struct ebu_nand_controller
*ebu_host
,
361 val
= cmd
| (page
& 0xFF) << HSNAND_CTL1_ADDR_SHIFT
;
362 writel(val
, ebu_host
->hsnand
+ HSNAND_CTL1
);
363 val
= (page
& 0xFFFF00) >> 8 | HSNAND_CTL2_CYC_N_V5
;
364 writel(val
, ebu_host
->hsnand
+ HSNAND_CTL2
);
366 writel(ebu_host
->nd_para0
, ebu_host
->hsnand
+ HSNAND_PARA0
);
368 /* clear first, will update later */
369 writel(0xFFFFFFFF, ebu_host
->hsnand
+ HSNAND_CMSG_0
);
370 writel(0xFFFFFFFF, ebu_host
->hsnand
+ HSNAND_CMSG_1
);
372 writel(HSNAND_INT_MSK_CTL_WR_C
,
373 ebu_host
->hsnand
+ HSNAND_INT_MSK_CTL
);
376 val
= HSNAND_CTL_RW_READ
;
378 val
= HSNAND_CTL_RW_WRITE
;
380 writel(HSNAND_CTL_MSG_EN
| HSNAND_CTL_CKFF_EN
|
381 HSNAND_CTL_ECC_OFF_V8TH
| HSNAND_CTL_CE_SEL_CS(ebu_host
->cs_num
) |
382 HSNAND_CTL_ENABLE_ECC
| HSNAND_CTL_GO
| val
,
383 ebu_host
->hsnand
+ HSNAND_CTL
);
386 static int ebu_nand_read_page_hwecc(struct nand_chip
*chip
, u8
*buf
,
387 int oob_required
, int page
)
389 struct mtd_info
*mtd
= nand_to_mtd(chip
);
390 struct ebu_nand_controller
*ebu_host
= nand_get_controller_data(chip
);
393 ebu_nand_trigger(ebu_host
, page
, NAND_CMD_READ0
);
395 ret
= ebu_dma_start(ebu_host
, DMA_DEV_TO_MEM
, buf
, mtd
->writesize
);
400 chip
->ecc
.read_oob(chip
, page
);
402 reg_data
= readl(ebu_host
->hsnand
+ HSNAND_CTL
);
403 reg_data
&= ~HSNAND_CTL_GO
;
404 writel(reg_data
, ebu_host
->hsnand
+ HSNAND_CTL
);
409 static int ebu_nand_write_page_hwecc(struct nand_chip
*chip
, const u8
*buf
,
410 int oob_required
, int page
)
412 struct mtd_info
*mtd
= nand_to_mtd(chip
);
413 struct ebu_nand_controller
*ebu_host
= nand_get_controller_data(chip
);
414 void __iomem
*int_sta
= ebu_host
->hsnand
+ HSNAND_INT_STA
;
415 int reg_data
, ret
, val
;
418 ebu_nand_trigger(ebu_host
, page
, NAND_CMD_SEQIN
);
420 ret
= ebu_dma_start(ebu_host
, DMA_MEM_TO_DEV
, buf
, mtd
->writesize
);
425 reg
= get_unaligned_le32(chip
->oob_poi
);
426 writel(reg
, ebu_host
->hsnand
+ HSNAND_CMSG_0
);
428 reg
= get_unaligned_le32(chip
->oob_poi
+ 4);
429 writel(reg
, ebu_host
->hsnand
+ HSNAND_CMSG_1
);
432 ret
= readl_poll_timeout_atomic(int_sta
, val
, !(val
& HSNAND_INT_STA_WR_C
),
437 reg_data
= readl(ebu_host
->hsnand
+ HSNAND_CTL
);
438 reg_data
&= ~HSNAND_CTL_GO
;
439 writel(reg_data
, ebu_host
->hsnand
+ HSNAND_CTL
);
444 static const u8 ecc_strength
[] = { 1, 1, 4, 8, 24, 32, 40, 60, };
446 static int ebu_nand_attach_chip(struct nand_chip
*chip
)
448 struct mtd_info
*mtd
= nand_to_mtd(chip
);
449 struct ebu_nand_controller
*ebu_host
= nand_get_controller_data(chip
);
450 u32 ecc_steps
, ecc_bytes
, ecc_total
, pagesize
, pg_per_blk
;
451 u32 ecc_strength_ds
= chip
->ecc
.strength
;
452 u32 ecc_size
= chip
->ecc
.size
;
453 u32 writesize
= mtd
->writesize
;
454 u32 blocksize
= mtd
->erasesize
;
455 int bch_algo
, start
, val
;
457 /* Default to an ECC size of 512 */
459 chip
->ecc
.size
= 512;
464 if (!ecc_strength_ds
)
469 if (!ecc_strength_ds
)
470 ecc_strength_ds
= 32;
476 /* BCH ECC algorithm Settings for number of bits per 512B/1024B */
477 bch_algo
= round_up(start
+ 1, 4);
478 for (val
= start
; val
< bch_algo
; val
++) {
479 if (ecc_strength_ds
== ecc_strength
[val
])
485 if (ecc_strength_ds
== 8)
488 ecc_bytes
= DIV_ROUND_UP(ecc_strength_ds
* fls(8 * ecc_size
), 8);
490 ecc_steps
= writesize
/ ecc_size
;
491 ecc_total
= ecc_steps
* ecc_bytes
;
492 if ((ecc_total
+ 8) > mtd
->oobsize
)
495 chip
->ecc
.total
= ecc_total
;
496 pagesize
= fls(writesize
>> 11);
497 if (pagesize
> HSNAND_PARA0_PAGE_V8192
)
500 pg_per_blk
= fls((blocksize
/ writesize
) >> 6) / 8;
501 if (pg_per_blk
> HSNAND_PARA0_PIB_V256
)
504 ebu_host
->nd_para0
= pagesize
| pg_per_blk
| HSNAND_PARA0_BYP_EN_NP
|
505 HSNAND_PARA0_BYP_DEC_NP
| HSNAND_PARA0_ADEP_EN
|
506 HSNAND_PARA0_TYPE_ONFI
| (val
<< 29);
508 mtd_set_ooblayout(mtd
, &ebu_nand_ooblayout_ops
);
509 chip
->ecc
.read_page
= ebu_nand_read_page_hwecc
;
510 chip
->ecc
.write_page
= ebu_nand_write_page_hwecc
;
515 static int ebu_nand_exec_op(struct nand_chip
*chip
,
516 const struct nand_operation
*op
, bool check_only
)
518 const struct nand_op_instr
*instr
= NULL
;
520 int i
, timeout_ms
, ret
= 0;
525 ebu_select_chip(chip
);
526 for (op_id
= 0; op_id
< op
->ninstrs
; op_id
++) {
527 instr
= &op
->instrs
[op_id
];
529 switch (instr
->type
) {
530 case NAND_OP_CMD_INSTR
:
531 ebu_nand_writeb(chip
, HSNAND_CLE_OFFS
| HSNAND_CS_OFFS
,
532 instr
->ctx
.cmd
.opcode
);
535 case NAND_OP_ADDR_INSTR
:
536 for (i
= 0; i
< instr
->ctx
.addr
.naddrs
; i
++)
537 ebu_nand_writeb(chip
,
538 HSNAND_ALE_OFFS
| HSNAND_CS_OFFS
,
539 instr
->ctx
.addr
.addrs
[i
]);
542 case NAND_OP_DATA_IN_INSTR
:
543 ebu_read_buf(chip
, instr
->ctx
.data
.buf
.in
,
544 instr
->ctx
.data
.len
);
547 case NAND_OP_DATA_OUT_INSTR
:
548 ebu_write_buf(chip
, instr
->ctx
.data
.buf
.out
,
549 instr
->ctx
.data
.len
);
552 case NAND_OP_WAITRDY_INSTR
:
553 timeout_ms
= instr
->ctx
.waitrdy
.timeout_ms
* 1000;
554 ret
= ebu_nand_waitrdy(chip
, timeout_ms
);
562 static const struct nand_controller_ops ebu_nand_controller_ops
= {
563 .attach_chip
= ebu_nand_attach_chip
,
564 .setup_interface
= ebu_nand_set_timings
,
565 .exec_op
= ebu_nand_exec_op
,
568 static void ebu_dma_cleanup(struct ebu_nand_controller
*ebu_host
)
570 if (ebu_host
->dma_rx
)
571 dma_release_channel(ebu_host
->dma_rx
);
573 if (ebu_host
->dma_tx
)
574 dma_release_channel(ebu_host
->dma_tx
);
577 static int ebu_nand_probe(struct platform_device
*pdev
)
579 struct device
*dev
= &pdev
->dev
;
580 struct ebu_nand_controller
*ebu_host
;
581 struct nand_chip
*nand
;
582 struct mtd_info
*mtd
= NULL
;
583 struct resource
*res
;
588 ebu_host
= devm_kzalloc(dev
, sizeof(*ebu_host
), GFP_KERNEL
);
593 nand_controller_init(&ebu_host
->controller
);
595 res
= platform_get_resource_byname(pdev
, IORESOURCE_MEM
, "ebunand");
596 ebu_host
->ebu
= devm_ioremap_resource(&pdev
->dev
, res
);
597 if (IS_ERR(ebu_host
->ebu
))
598 return PTR_ERR(ebu_host
->ebu
);
600 res
= platform_get_resource_byname(pdev
, IORESOURCE_MEM
, "hsnand");
601 ebu_host
->hsnand
= devm_ioremap_resource(&pdev
->dev
, res
);
602 if (IS_ERR(ebu_host
->hsnand
))
603 return PTR_ERR(ebu_host
->hsnand
);
605 ret
= device_property_read_u32(dev
, "reg", &cs
);
607 dev_err(dev
, "failed to get chip select: %d\n", ret
);
610 ebu_host
->cs_num
= cs
;
612 resname
= devm_kasprintf(dev
, GFP_KERNEL
, "nand_cs%d", cs
);
613 res
= platform_get_resource_byname(pdev
, IORESOURCE_MEM
, resname
);
614 ebu_host
->cs
[cs
].chipaddr
= devm_ioremap_resource(dev
, res
);
615 ebu_host
->cs
[cs
].nand_pa
= res
->start
;
616 if (IS_ERR(ebu_host
->cs
[cs
].chipaddr
))
617 return PTR_ERR(ebu_host
->cs
[cs
].chipaddr
);
619 ebu_host
->clk
= devm_clk_get(dev
, NULL
);
620 if (IS_ERR(ebu_host
->clk
))
621 return dev_err_probe(dev
, PTR_ERR(ebu_host
->clk
),
622 "failed to get clock\n");
624 ret
= clk_prepare_enable(ebu_host
->clk
);
626 dev_err(dev
, "failed to enable clock: %d\n", ret
);
629 ebu_host
->clk_rate
= clk_get_rate(ebu_host
->clk
);
631 ebu_host
->dma_tx
= dma_request_chan(dev
, "tx");
632 if (IS_ERR(ebu_host
->dma_tx
))
633 return dev_err_probe(dev
, PTR_ERR(ebu_host
->dma_tx
),
634 "failed to request DMA tx chan!.\n");
636 ebu_host
->dma_rx
= dma_request_chan(dev
, "rx");
637 if (IS_ERR(ebu_host
->dma_rx
))
638 return dev_err_probe(dev
, PTR_ERR(ebu_host
->dma_rx
),
639 "failed to request DMA rx chan!.\n");
641 resname
= devm_kasprintf(dev
, GFP_KERNEL
, "addr_sel%d", cs
);
642 res
= platform_get_resource_byname(pdev
, IORESOURCE_MEM
, resname
);
645 ebu_host
->cs
[cs
].addr_sel
= res
->start
;
646 writel(ebu_host
->cs
[cs
].addr_sel
| EBU_ADDR_MASK(5) | EBU_ADDR_SEL_REGEN
,
647 ebu_host
->ebu
+ EBU_ADDR_SEL(cs
));
649 nand_set_flash_node(&ebu_host
->chip
, dev
->of_node
);
651 dev_err(ebu_host
->dev
, "NAND label property is mandatory\n");
655 mtd
= nand_to_mtd(&ebu_host
->chip
);
656 mtd
->dev
.parent
= dev
;
659 platform_set_drvdata(pdev
, ebu_host
);
660 nand_set_controller_data(&ebu_host
->chip
, ebu_host
);
662 nand
= &ebu_host
->chip
;
663 nand
->controller
= &ebu_host
->controller
;
664 nand
->controller
->ops
= &ebu_nand_controller_ops
;
666 /* Scan to find existence of the device */
667 ret
= nand_scan(&ebu_host
->chip
, 1);
669 goto err_cleanup_dma
;
671 ret
= mtd_device_register(mtd
, NULL
, 0);
678 nand_cleanup(&ebu_host
->chip
);
680 ebu_dma_cleanup(ebu_host
);
681 clk_disable_unprepare(ebu_host
->clk
);
686 static int ebu_nand_remove(struct platform_device
*pdev
)
688 struct ebu_nand_controller
*ebu_host
= platform_get_drvdata(pdev
);
691 ret
= mtd_device_unregister(nand_to_mtd(&ebu_host
->chip
));
693 nand_cleanup(&ebu_host
->chip
);
694 ebu_nand_disable(&ebu_host
->chip
);
695 ebu_dma_cleanup(ebu_host
);
696 clk_disable_unprepare(ebu_host
->clk
);
701 static const struct of_device_id ebu_nand_match
[] = {
702 { .compatible
= "intel,nand-controller" },
703 { .compatible
= "intel,lgm-ebunand" },
706 MODULE_DEVICE_TABLE(of
, ebu_nand_match
);
708 static struct platform_driver ebu_nand_driver
= {
709 .probe
= ebu_nand_probe
,
710 .remove
= ebu_nand_remove
,
712 .name
= "intel-nand-controller",
713 .of_match_table
= ebu_nand_match
,
717 module_platform_driver(ebu_nand_driver
);
719 MODULE_LICENSE("GPL v2");
720 MODULE_AUTHOR("Vadivel Murugan R <vadivel.muruganx.ramuthevar@intel.com>");
721 MODULE_DESCRIPTION("Intel's LGM External Bus NAND Controller driver");