1 // SPDX-License-Identifier: GPL-2.0
3 * SuperH Mobile I2C Controller
5 * Copyright (C) 2014-19 Wolfram Sang <wsa@sang-engineering.com>
6 * Copyright (C) 2008 Magnus Damm
8 * Portions of the code based on out-of-tree driver i2c-sh7343.c
9 * Copyright (c) 2006 Carlos Munoz <carlos@kenati.com>
12 #include <linux/clk.h>
13 #include <linux/delay.h>
14 #include <linux/dmaengine.h>
15 #include <linux/dma-mapping.h>
16 #include <linux/err.h>
17 #include <linux/i2c.h>
18 #include <linux/init.h>
19 #include <linux/interrupt.h>
21 #include <linux/kernel.h>
22 #include <linux/module.h>
23 #include <linux/of_device.h>
24 #include <linux/platform_device.h>
25 #include <linux/pm_runtime.h>
26 #include <linux/slab.h>
28 /* Transmit operation: */
31 /* BUS: S A8 ACK P(*) */
38 /* BUS: S A8 ACK D8(1) ACK P(*) */
39 /* IRQ: DTE WAIT WAIT */
45 /* BUS: S A8 ACK D8(1) ACK D8(2) ACK P(*) */
46 /* IRQ: DTE WAIT WAIT WAIT */
49 /* ICDR: A8 D8(1) D8(2) */
51 /* 3 bytes or more, +---------+ gets repeated */
54 /* Receive operation: */
56 /* 0 byte receive - not supported since slave may hold SDA low */
58 /* 1 byte receive [TX] | [RX] */
59 /* BUS: S A8 ACK | D8(1) ACK P(*) */
60 /* IRQ: DTE WAIT | WAIT DTE */
61 /* ICIC: -DTE | +DTE */
62 /* ICCR: 0x94 0x81 | 0xc0 */
63 /* ICDR: A8 | D8(1) */
65 /* 2 byte receive [TX]| [RX] */
66 /* BUS: S A8 ACK | D8(1) ACK D8(2) ACK P(*) */
67 /* IRQ: DTE WAIT | WAIT WAIT DTE */
68 /* ICIC: -DTE | +DTE */
69 /* ICCR: 0x94 0x81 | 0xc0 */
70 /* ICDR: A8 | D8(1) D8(2) */
72 /* 3 byte receive [TX] | [RX] (*) */
73 /* BUS: S A8 ACK | D8(1) ACK D8(2) ACK D8(3) ACK P */
74 /* IRQ: DTE WAIT | WAIT WAIT WAIT DTE */
75 /* ICIC: -DTE | +DTE */
76 /* ICCR: 0x94 0x81 | 0xc0 */
77 /* ICDR: A8 | D8(1) D8(2) D8(3) */
79 /* 4 bytes or more, this part is repeated +---------+ */
82 /* Interrupt order and BUSY flag */
84 /* SDA ___\___XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXAAAAAAAAA___/ */
85 /* SCL \_/1\_/2\_/3\_/4\_/5\_/6\_/7\_/8\___/9\_____/ */
87 /* S D7 D6 D5 D4 D3 D2 D1 D0 P(*) */
89 /* WAIT IRQ ________________________________/ \___________ */
90 /* TACK IRQ ____________________________________/ \_______ */
91 /* DTE IRQ __________________________________________/ \_ */
92 /* AL IRQ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX */
93 /* _______________________________________________ */
96 /* (*) The STOP condition is only sent by the master at the end of the last */
97 /* I2C message or if the I2C_M_STOP flag is set. Similarly, the BUSY bit is */
98 /* only cleared after the STOP condition, so, between messages we have to */
99 /* poll for the DTE bit. */
102 enum sh_mobile_i2c_op
{
113 struct sh_mobile_i2c_data
{
116 struct i2c_adapter adap
;
117 unsigned long bus_speed
;
118 unsigned int clks_per_count
;
126 wait_queue_head_t wait
;
133 struct resource
*res
;
134 struct dma_chan
*dma_tx
;
135 struct dma_chan
*dma_rx
;
136 struct scatterlist sg
;
137 enum dma_data_direction dma_direction
;
141 struct sh_mobile_dt_config
{
143 int (*setup
)(struct sh_mobile_i2c_data
*pd
);
146 #define IIC_FLAG_HAS_ICIC67 (1 << 0)
148 /* Register offsets */
158 #define ICCR_ICE 0x80
159 #define ICCR_RACK 0x40
160 #define ICCR_TRS 0x10
161 #define ICCR_BBSY 0x04
162 #define ICCR_SCP 0x01
164 #define ICSR_SCLM 0x80
165 #define ICSR_SDAM 0x40
167 #define ICSR_BUSY 0x10
169 #define ICSR_TACK 0x04
170 #define ICSR_WAIT 0x02
171 #define ICSR_DTE 0x01
173 #define ICIC_ICCLB8 0x80
174 #define ICIC_ICCHB8 0x40
175 #define ICIC_TDMAE 0x20
176 #define ICIC_RDMAE 0x10
177 #define ICIC_ALE 0x08
178 #define ICIC_TACKE 0x04
179 #define ICIC_WAITE 0x02
180 #define ICIC_DTEE 0x01
182 #define ICSTART_ICSTART 0x10
184 static void iic_wr(struct sh_mobile_i2c_data
*pd
, int offs
, unsigned char data
)
189 iowrite8(data
, pd
->reg
+ offs
);
192 static unsigned char iic_rd(struct sh_mobile_i2c_data
*pd
, int offs
)
194 return ioread8(pd
->reg
+ offs
);
197 static void iic_set_clr(struct sh_mobile_i2c_data
*pd
, int offs
,
198 unsigned char set
, unsigned char clr
)
200 iic_wr(pd
, offs
, (iic_rd(pd
, offs
) | set
) & ~clr
);
203 static u32
sh_mobile_i2c_iccl(unsigned long count_khz
, u32 tLOW
, u32 tf
)
206 * Conditional expression:
207 * ICCL >= COUNT_CLK * (tLOW + tf)
209 * SH-Mobile IIC hardware starts counting the LOW period of
210 * the SCL signal (tLOW) as soon as it pulls the SCL line.
211 * In order to meet the tLOW timing spec, we need to take into
212 * account the fall time of SCL signal (tf). Default tf value
213 * should be 0.3 us, for safety.
215 return (((count_khz
* (tLOW
+ tf
)) + 5000) / 10000);
218 static u32
sh_mobile_i2c_icch(unsigned long count_khz
, u32 tHIGH
, u32 tf
)
221 * Conditional expression:
222 * ICCH >= COUNT_CLK * (tHIGH + tf)
224 * SH-Mobile IIC hardware is aware of SCL transition period 'tr',
225 * and can ignore it. SH-Mobile IIC controller starts counting
226 * the HIGH period of the SCL signal (tHIGH) after the SCL input
227 * voltage increases at VIH.
229 * Afterward it turned out calculating ICCH using only tHIGH spec
230 * will result in violation of the tHD;STA timing spec. We need
231 * to take into account the fall time of SDA signal (tf) at START
232 * condition, in order to meet both tHIGH and tHD;STA specs.
234 return (((count_khz
* (tHIGH
+ tf
)) + 5000) / 10000);
237 static int sh_mobile_i2c_check_timing(struct sh_mobile_i2c_data
*pd
)
239 u16 max_val
= pd
->flags
& IIC_FLAG_HAS_ICIC67
? 0x1ff : 0xff;
241 if (pd
->iccl
> max_val
|| pd
->icch
> max_val
) {
242 dev_err(pd
->dev
, "timing values out of range: L/H=0x%x/0x%x\n",
247 /* one more bit of ICCL in ICIC */
248 if (pd
->iccl
& 0x100)
249 pd
->icic
|= ICIC_ICCLB8
;
251 pd
->icic
&= ~ICIC_ICCLB8
;
253 /* one more bit of ICCH in ICIC */
254 if (pd
->icch
& 0x100)
255 pd
->icic
|= ICIC_ICCHB8
;
257 pd
->icic
&= ~ICIC_ICCHB8
;
259 dev_dbg(pd
->dev
, "timing values: L/H=0x%x/0x%x\n", pd
->iccl
, pd
->icch
);
263 static int sh_mobile_i2c_init(struct sh_mobile_i2c_data
*pd
)
265 unsigned long i2c_clk_khz
;
268 i2c_clk_khz
= clk_get_rate(pd
->clk
) / 1000 / pd
->clks_per_count
;
270 if (pd
->bus_speed
== I2C_MAX_STANDARD_MODE_FREQ
) {
271 tLOW
= 47; /* tLOW = 4.7 us */
272 tHIGH
= 40; /* tHD;STA = tHIGH = 4.0 us */
273 tf
= 3; /* tf = 0.3 us */
274 } else if (pd
->bus_speed
== I2C_MAX_FAST_MODE_FREQ
) {
275 tLOW
= 13; /* tLOW = 1.3 us */
276 tHIGH
= 6; /* tHD;STA = tHIGH = 0.6 us */
277 tf
= 3; /* tf = 0.3 us */
279 dev_err(pd
->dev
, "unrecognized bus speed %lu Hz\n",
284 pd
->iccl
= sh_mobile_i2c_iccl(i2c_clk_khz
, tLOW
, tf
);
285 pd
->icch
= sh_mobile_i2c_icch(i2c_clk_khz
, tHIGH
, tf
);
287 return sh_mobile_i2c_check_timing(pd
);
290 static int sh_mobile_i2c_v2_init(struct sh_mobile_i2c_data
*pd
)
292 unsigned long clks_per_cycle
;
294 /* L = 5, H = 4, L + H = 9 */
295 clks_per_cycle
= clk_get_rate(pd
->clk
) / pd
->bus_speed
;
296 pd
->iccl
= DIV_ROUND_UP(clks_per_cycle
* 5 / 9 - 1, pd
->clks_per_count
);
297 pd
->icch
= DIV_ROUND_UP(clks_per_cycle
* 4 / 9 - 5, pd
->clks_per_count
);
299 return sh_mobile_i2c_check_timing(pd
);
302 static unsigned char i2c_op(struct sh_mobile_i2c_data
*pd
, enum sh_mobile_i2c_op op
)
304 unsigned char ret
= 0;
307 dev_dbg(pd
->dev
, "op %d\n", op
);
309 spin_lock_irqsave(&pd
->lock
, flags
);
312 case OP_START
: /* issue start and trigger DTE interrupt */
313 iic_wr(pd
, ICCR
, ICCR_ICE
| ICCR_TRS
| ICCR_BBSY
);
315 case OP_TX_FIRST
: /* disable DTE interrupt and write client address */
316 iic_wr(pd
, ICIC
, ICIC_WAITE
| ICIC_ALE
| ICIC_TACKE
);
317 iic_wr(pd
, ICDR
, i2c_8bit_addr_from_msg(pd
->msg
));
319 case OP_TX
: /* write data */
320 iic_wr(pd
, ICDR
, pd
->msg
->buf
[pd
->pos
]);
322 case OP_TX_STOP
: /* issue a stop (or rep_start) */
323 iic_wr(pd
, ICCR
, pd
->send_stop
? ICCR_ICE
| ICCR_TRS
324 : ICCR_ICE
| ICCR_TRS
| ICCR_BBSY
);
326 case OP_TX_TO_RX
: /* select read mode */
327 iic_wr(pd
, ICCR
, ICCR_ICE
| ICCR_SCP
);
329 case OP_RX
: /* just read data */
330 ret
= iic_rd(pd
, ICDR
);
332 case OP_RX_STOP
: /* enable DTE interrupt, issue stop */
334 ICIC_DTEE
| ICIC_WAITE
| ICIC_ALE
| ICIC_TACKE
);
335 iic_wr(pd
, ICCR
, ICCR_ICE
| ICCR_RACK
);
337 case OP_RX_STOP_DATA
: /* enable DTE interrupt, read data, issue stop */
339 ICIC_DTEE
| ICIC_WAITE
| ICIC_ALE
| ICIC_TACKE
);
340 ret
= iic_rd(pd
, ICDR
);
341 iic_wr(pd
, ICCR
, ICCR_ICE
| ICCR_RACK
);
345 spin_unlock_irqrestore(&pd
->lock
, flags
);
347 dev_dbg(pd
->dev
, "op %d, data out 0x%02x\n", op
, ret
);
351 static int sh_mobile_i2c_isr_tx(struct sh_mobile_i2c_data
*pd
)
353 if (pd
->pos
== pd
->msg
->len
) {
354 i2c_op(pd
, OP_TX_STOP
);
359 i2c_op(pd
, OP_TX_FIRST
);
367 static int sh_mobile_i2c_isr_rx(struct sh_mobile_i2c_data
*pd
)
372 /* switch from TX (address) to RX (data) adds two interrupts */
373 real_pos
= pd
->pos
- 2;
376 i2c_op(pd
, OP_TX_FIRST
);
377 } else if (pd
->pos
== 0) {
378 i2c_op(pd
, OP_TX_TO_RX
);
379 } else if (pd
->pos
== pd
->msg
->len
) {
380 if (pd
->stop_after_dma
) {
381 /* Simulate PIO end condition after DMA transfer */
382 i2c_op(pd
, OP_RX_STOP
);
388 i2c_op(pd
, OP_RX_STOP
);
390 data
= i2c_op(pd
, OP_RX_STOP_DATA
);
391 } else if (real_pos
>= 0) {
392 data
= i2c_op(pd
, OP_RX
);
396 pd
->msg
->buf
[real_pos
] = data
;
399 return pd
->pos
== (pd
->msg
->len
+ 2);
402 static irqreturn_t
sh_mobile_i2c_isr(int irq
, void *dev_id
)
404 struct sh_mobile_i2c_data
*pd
= dev_id
;
408 sr
= iic_rd(pd
, ICSR
);
409 pd
->sr
|= sr
; /* remember state */
411 dev_dbg(pd
->dev
, "i2c_isr 0x%02x 0x%02x %s %d %d!\n", sr
, pd
->sr
,
412 (pd
->msg
->flags
& I2C_M_RD
) ? "read" : "write",
413 pd
->pos
, pd
->msg
->len
);
415 /* Kick off TxDMA after preface was done */
416 if (pd
->dma_direction
== DMA_TO_DEVICE
&& pd
->pos
== 0)
417 iic_set_clr(pd
, ICIC
, ICIC_TDMAE
, 0);
418 else if (sr
& (ICSR_AL
| ICSR_TACK
))
419 /* don't interrupt transaction - continue to issue stop */
420 iic_wr(pd
, ICSR
, sr
& ~(ICSR_AL
| ICSR_TACK
));
421 else if (pd
->msg
->flags
& I2C_M_RD
)
422 wakeup
= sh_mobile_i2c_isr_rx(pd
);
424 wakeup
= sh_mobile_i2c_isr_tx(pd
);
426 /* Kick off RxDMA after preface was done */
427 if (pd
->dma_direction
== DMA_FROM_DEVICE
&& pd
->pos
== 1)
428 iic_set_clr(pd
, ICIC
, ICIC_RDMAE
, 0);
430 if (sr
& ICSR_WAIT
) /* TODO: add delay here to support slow acks */
431 iic_wr(pd
, ICSR
, sr
& ~ICSR_WAIT
);
438 /* defeat write posting to avoid spurious WAIT interrupts */
444 static void sh_mobile_i2c_dma_unmap(struct sh_mobile_i2c_data
*pd
)
446 struct dma_chan
*chan
= pd
->dma_direction
== DMA_FROM_DEVICE
447 ? pd
->dma_rx
: pd
->dma_tx
;
449 dma_unmap_single(chan
->device
->dev
, sg_dma_address(&pd
->sg
),
450 pd
->msg
->len
, pd
->dma_direction
);
452 pd
->dma_direction
= DMA_NONE
;
455 static void sh_mobile_i2c_cleanup_dma(struct sh_mobile_i2c_data
*pd
)
457 if (pd
->dma_direction
== DMA_NONE
)
459 else if (pd
->dma_direction
== DMA_FROM_DEVICE
)
460 dmaengine_terminate_all(pd
->dma_rx
);
461 else if (pd
->dma_direction
== DMA_TO_DEVICE
)
462 dmaengine_terminate_all(pd
->dma_tx
);
464 sh_mobile_i2c_dma_unmap(pd
);
467 static void sh_mobile_i2c_dma_callback(void *data
)
469 struct sh_mobile_i2c_data
*pd
= data
;
471 sh_mobile_i2c_dma_unmap(pd
);
472 pd
->pos
= pd
->msg
->len
;
473 pd
->stop_after_dma
= true;
475 iic_set_clr(pd
, ICIC
, 0, ICIC_TDMAE
| ICIC_RDMAE
);
478 static struct dma_chan
*sh_mobile_i2c_request_dma_chan(struct device
*dev
,
479 enum dma_transfer_direction dir
, dma_addr_t port_addr
)
481 struct dma_chan
*chan
;
482 struct dma_slave_config cfg
;
483 char *chan_name
= dir
== DMA_MEM_TO_DEV
? "tx" : "rx";
486 chan
= dma_request_chan(dev
, chan_name
);
488 dev_dbg(dev
, "request_channel failed for %s (%ld)\n", chan_name
,
493 memset(&cfg
, 0, sizeof(cfg
));
495 if (dir
== DMA_MEM_TO_DEV
) {
496 cfg
.dst_addr
= port_addr
;
497 cfg
.dst_addr_width
= DMA_SLAVE_BUSWIDTH_1_BYTE
;
499 cfg
.src_addr
= port_addr
;
500 cfg
.src_addr_width
= DMA_SLAVE_BUSWIDTH_1_BYTE
;
503 ret
= dmaengine_slave_config(chan
, &cfg
);
505 dev_dbg(dev
, "slave_config failed for %s (%d)\n", chan_name
, ret
);
506 dma_release_channel(chan
);
510 dev_dbg(dev
, "got DMA channel for %s\n", chan_name
);
514 static void sh_mobile_i2c_xfer_dma(struct sh_mobile_i2c_data
*pd
)
516 bool read
= pd
->msg
->flags
& I2C_M_RD
;
517 enum dma_data_direction dir
= read
? DMA_FROM_DEVICE
: DMA_TO_DEVICE
;
518 struct dma_chan
*chan
= read
? pd
->dma_rx
: pd
->dma_tx
;
519 struct dma_async_tx_descriptor
*txdesc
;
523 if (PTR_ERR(chan
) == -EPROBE_DEFER
) {
525 chan
= pd
->dma_rx
= sh_mobile_i2c_request_dma_chan(pd
->dev
, DMA_DEV_TO_MEM
,
526 pd
->res
->start
+ ICDR
);
528 chan
= pd
->dma_tx
= sh_mobile_i2c_request_dma_chan(pd
->dev
, DMA_MEM_TO_DEV
,
529 pd
->res
->start
+ ICDR
);
535 dma_addr
= dma_map_single(chan
->device
->dev
, pd
->dma_buf
, pd
->msg
->len
, dir
);
536 if (dma_mapping_error(chan
->device
->dev
, dma_addr
)) {
537 dev_dbg(pd
->dev
, "dma map failed, using PIO\n");
541 sg_dma_len(&pd
->sg
) = pd
->msg
->len
;
542 sg_dma_address(&pd
->sg
) = dma_addr
;
544 pd
->dma_direction
= dir
;
546 txdesc
= dmaengine_prep_slave_sg(chan
, &pd
->sg
, 1,
547 read
? DMA_DEV_TO_MEM
: DMA_MEM_TO_DEV
,
548 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
550 dev_dbg(pd
->dev
, "dma prep slave sg failed, using PIO\n");
551 sh_mobile_i2c_cleanup_dma(pd
);
555 txdesc
->callback
= sh_mobile_i2c_dma_callback
;
556 txdesc
->callback_param
= pd
;
558 cookie
= dmaengine_submit(txdesc
);
559 if (dma_submit_error(cookie
)) {
560 dev_dbg(pd
->dev
, "submitting dma failed, using PIO\n");
561 sh_mobile_i2c_cleanup_dma(pd
);
565 dma_async_issue_pending(chan
);
568 static void start_ch(struct sh_mobile_i2c_data
*pd
, struct i2c_msg
*usr_msg
,
572 /* Initialize channel registers */
573 iic_wr(pd
, ICCR
, ICCR_SCP
);
575 /* Enable channel and configure rx ack */
576 iic_wr(pd
, ICCR
, ICCR_ICE
| ICCR_SCP
);
579 iic_wr(pd
, ICCL
, pd
->iccl
& 0xff);
580 iic_wr(pd
, ICCH
, pd
->icch
& 0xff);
587 pd
->dma_buf
= i2c_get_dma_safe_msg_buf(pd
->msg
, 8);
589 sh_mobile_i2c_xfer_dma(pd
);
591 /* Enable all interrupts to begin with */
592 iic_wr(pd
, ICIC
, ICIC_DTEE
| ICIC_WAITE
| ICIC_ALE
| ICIC_TACKE
);
595 static int poll_dte(struct sh_mobile_i2c_data
*pd
)
599 for (i
= 1000; i
; i
--) {
600 u_int8_t val
= iic_rd(pd
, ICSR
);
611 return i
? 0 : -ETIMEDOUT
;
614 static int poll_busy(struct sh_mobile_i2c_data
*pd
)
618 for (i
= 1000; i
; i
--) {
619 u_int8_t val
= iic_rd(pd
, ICSR
);
621 dev_dbg(pd
->dev
, "val 0x%02x pd->sr 0x%02x\n", val
, pd
->sr
);
623 /* the interrupt handler may wake us up before the
624 * transfer is finished, so poll the hardware
627 if (!(val
& ICSR_BUSY
)) {
628 /* handle missing acknowledge and arbitration lost */
640 return i
? 0 : -ETIMEDOUT
;
643 static int sh_mobile_i2c_xfer(struct i2c_adapter
*adapter
,
644 struct i2c_msg
*msgs
,
647 struct sh_mobile_i2c_data
*pd
= i2c_get_adapdata(adapter
);
653 /* Wake up device and enable clock */
654 pm_runtime_get_sync(pd
->dev
);
656 /* Process all messages */
657 for (i
= 0; i
< num
; i
++) {
658 bool do_start
= pd
->send_stop
|| !i
;
660 pd
->send_stop
= i
== num
- 1 || msg
->flags
& I2C_M_STOP
;
661 pd
->stop_after_dma
= false;
663 start_ch(pd
, msg
, do_start
);
666 i2c_op(pd
, OP_START
);
668 /* The interrupt handler takes care of the rest... */
669 timeout
= wait_event_timeout(pd
->wait
,
670 pd
->sr
& (ICSR_TACK
| SW_DONE
),
673 /* 'stop_after_dma' tells if DMA transfer was complete */
674 i2c_put_dma_safe_msg_buf(pd
->dma_buf
, pd
->msg
, pd
->stop_after_dma
);
677 dev_err(pd
->dev
, "Transfer request timed out\n");
678 if (pd
->dma_direction
!= DMA_NONE
)
679 sh_mobile_i2c_cleanup_dma(pd
);
693 /* Disable channel */
694 iic_wr(pd
, ICCR
, ICCR_SCP
);
696 /* Disable clock and mark device as idle */
697 pm_runtime_put_sync(pd
->dev
);
702 static u32
sh_mobile_i2c_func(struct i2c_adapter
*adapter
)
704 return I2C_FUNC_I2C
| I2C_FUNC_SMBUS_EMUL
| I2C_FUNC_PROTOCOL_MANGLING
;
707 static const struct i2c_algorithm sh_mobile_i2c_algorithm
= {
708 .functionality
= sh_mobile_i2c_func
,
709 .master_xfer
= sh_mobile_i2c_xfer
,
712 static const struct i2c_adapter_quirks sh_mobile_i2c_quirks
= {
713 .flags
= I2C_AQ_NO_ZERO_LEN_READ
,
717 * r8a7740 has an errata regarding I2C I/O pad reset needing this workaround.
719 static int sh_mobile_i2c_r8a7740_workaround(struct sh_mobile_i2c_data
*pd
)
721 iic_set_clr(pd
, ICCR
, ICCR_ICE
, 0);
722 iic_rd(pd
, ICCR
); /* dummy read */
724 iic_set_clr(pd
, ICSTART
, ICSTART_ICSTART
, 0);
725 iic_rd(pd
, ICSTART
); /* dummy read */
729 iic_wr(pd
, ICCR
, ICCR_SCP
);
730 iic_wr(pd
, ICSTART
, 0);
734 iic_wr(pd
, ICCR
, ICCR_TRS
);
738 iic_wr(pd
, ICCR
, ICCR_TRS
);
741 return sh_mobile_i2c_init(pd
);
744 static const struct sh_mobile_dt_config default_dt_config
= {
746 .setup
= sh_mobile_i2c_init
,
749 static const struct sh_mobile_dt_config fast_clock_dt_config
= {
751 .setup
= sh_mobile_i2c_init
,
754 static const struct sh_mobile_dt_config v2_freq_calc_dt_config
= {
756 .setup
= sh_mobile_i2c_v2_init
,
759 static const struct sh_mobile_dt_config r8a7740_dt_config
= {
761 .setup
= sh_mobile_i2c_r8a7740_workaround
,
764 static const struct of_device_id sh_mobile_i2c_dt_ids
[] = {
765 { .compatible
= "renesas,iic-r8a73a4", .data
= &fast_clock_dt_config
},
766 { .compatible
= "renesas,iic-r8a7740", .data
= &r8a7740_dt_config
},
767 { .compatible
= "renesas,iic-r8a774c0", .data
= &fast_clock_dt_config
},
768 { .compatible
= "renesas,iic-r8a7790", .data
= &v2_freq_calc_dt_config
},
769 { .compatible
= "renesas,iic-r8a7791", .data
= &v2_freq_calc_dt_config
},
770 { .compatible
= "renesas,iic-r8a7792", .data
= &v2_freq_calc_dt_config
},
771 { .compatible
= "renesas,iic-r8a7793", .data
= &v2_freq_calc_dt_config
},
772 { .compatible
= "renesas,iic-r8a7794", .data
= &v2_freq_calc_dt_config
},
773 { .compatible
= "renesas,iic-r8a7795", .data
= &v2_freq_calc_dt_config
},
774 { .compatible
= "renesas,iic-r8a77990", .data
= &v2_freq_calc_dt_config
},
775 { .compatible
= "renesas,iic-sh73a0", .data
= &fast_clock_dt_config
},
776 { .compatible
= "renesas,rcar-gen2-iic", .data
= &v2_freq_calc_dt_config
},
777 { .compatible
= "renesas,rcar-gen3-iic", .data
= &v2_freq_calc_dt_config
},
778 { .compatible
= "renesas,rmobile-iic", .data
= &default_dt_config
},
781 MODULE_DEVICE_TABLE(of
, sh_mobile_i2c_dt_ids
);
783 static void sh_mobile_i2c_release_dma(struct sh_mobile_i2c_data
*pd
)
785 if (!IS_ERR(pd
->dma_tx
)) {
786 dma_release_channel(pd
->dma_tx
);
787 pd
->dma_tx
= ERR_PTR(-EPROBE_DEFER
);
790 if (!IS_ERR(pd
->dma_rx
)) {
791 dma_release_channel(pd
->dma_rx
);
792 pd
->dma_rx
= ERR_PTR(-EPROBE_DEFER
);
796 static int sh_mobile_i2c_hook_irqs(struct platform_device
*dev
, struct sh_mobile_i2c_data
*pd
)
798 struct resource
*res
;
802 while ((res
= platform_get_resource(dev
, IORESOURCE_IRQ
, k
))) {
803 for (n
= res
->start
; n
<= res
->end
; n
++) {
804 ret
= devm_request_irq(&dev
->dev
, n
, sh_mobile_i2c_isr
,
805 0, dev_name(&dev
->dev
), pd
);
807 dev_err(&dev
->dev
, "cannot request IRQ %pa\n", &n
);
814 return k
> 0 ? 0 : -ENOENT
;
817 static int sh_mobile_i2c_probe(struct platform_device
*dev
)
819 struct sh_mobile_i2c_data
*pd
;
820 struct i2c_adapter
*adap
;
821 struct resource
*res
;
822 const struct sh_mobile_dt_config
*config
;
826 pd
= devm_kzalloc(&dev
->dev
, sizeof(struct sh_mobile_i2c_data
), GFP_KERNEL
);
830 pd
->clk
= devm_clk_get(&dev
->dev
, NULL
);
831 if (IS_ERR(pd
->clk
)) {
832 dev_err(&dev
->dev
, "cannot get clock\n");
833 return PTR_ERR(pd
->clk
);
836 ret
= sh_mobile_i2c_hook_irqs(dev
, pd
);
841 platform_set_drvdata(dev
, pd
);
843 res
= platform_get_resource(dev
, IORESOURCE_MEM
, 0);
846 pd
->reg
= devm_ioremap_resource(&dev
->dev
, res
);
848 return PTR_ERR(pd
->reg
);
850 ret
= of_property_read_u32(dev
->dev
.of_node
, "clock-frequency", &bus_speed
);
851 pd
->bus_speed
= (ret
|| !bus_speed
) ? I2C_MAX_STANDARD_MODE_FREQ
: bus_speed
;
852 pd
->clks_per_count
= 1;
854 /* Newer variants come with two new bits in ICIC */
855 if (resource_size(res
) > 0x17)
856 pd
->flags
|= IIC_FLAG_HAS_ICIC67
;
858 pm_runtime_enable(&dev
->dev
);
859 pm_runtime_get_sync(&dev
->dev
);
861 config
= of_device_get_match_data(&dev
->dev
);
863 pd
->clks_per_count
= config
->clks_per_count
;
864 ret
= config
->setup(pd
);
866 ret
= sh_mobile_i2c_init(pd
);
869 pm_runtime_put_sync(&dev
->dev
);
874 sg_init_table(&pd
->sg
, 1);
875 pd
->dma_direction
= DMA_NONE
;
876 pd
->dma_rx
= pd
->dma_tx
= ERR_PTR(-EPROBE_DEFER
);
878 /* setup the private data */
880 i2c_set_adapdata(adap
, pd
);
882 adap
->owner
= THIS_MODULE
;
883 adap
->algo
= &sh_mobile_i2c_algorithm
;
884 adap
->quirks
= &sh_mobile_i2c_quirks
;
885 adap
->dev
.parent
= &dev
->dev
;
888 adap
->dev
.of_node
= dev
->dev
.of_node
;
890 strlcpy(adap
->name
, dev
->name
, sizeof(adap
->name
));
892 spin_lock_init(&pd
->lock
);
893 init_waitqueue_head(&pd
->wait
);
895 ret
= i2c_add_numbered_adapter(adap
);
897 sh_mobile_i2c_release_dma(pd
);
901 dev_info(&dev
->dev
, "I2C adapter %d, bus speed %lu Hz\n", adap
->nr
, pd
->bus_speed
);
906 static int sh_mobile_i2c_remove(struct platform_device
*dev
)
908 struct sh_mobile_i2c_data
*pd
= platform_get_drvdata(dev
);
910 i2c_del_adapter(&pd
->adap
);
911 sh_mobile_i2c_release_dma(pd
);
912 pm_runtime_disable(&dev
->dev
);
916 static struct platform_driver sh_mobile_i2c_driver
= {
918 .name
= "i2c-sh_mobile",
919 .of_match_table
= sh_mobile_i2c_dt_ids
,
921 .probe
= sh_mobile_i2c_probe
,
922 .remove
= sh_mobile_i2c_remove
,
925 static int __init
sh_mobile_i2c_adap_init(void)
927 return platform_driver_register(&sh_mobile_i2c_driver
);
929 subsys_initcall(sh_mobile_i2c_adap_init
);
931 static void __exit
sh_mobile_i2c_adap_exit(void)
933 platform_driver_unregister(&sh_mobile_i2c_driver
);
935 module_exit(sh_mobile_i2c_adap_exit
);
937 MODULE_DESCRIPTION("SuperH Mobile I2C Bus Controller driver");
938 MODULE_AUTHOR("Magnus Damm and Wolfram Sang");
939 MODULE_LICENSE("GPL v2");
940 MODULE_ALIAS("platform:i2c-sh_mobile");