1 // SPDX-License-Identifier: GPL-2.0
3 * SuperH Mobile I2C Controller
5 * Copyright (C) 2014-19 Wolfram Sang <wsa@sang-engineering.com>
6 * Copyright (C) 2008 Magnus Damm
8 * Portions of the code based on out-of-tree driver i2c-sh7343.c
9 * Copyright (c) 2006 Carlos Munoz <carlos@kenati.com>
12 #include <linux/clk.h>
13 #include <linux/delay.h>
14 #include <linux/dmaengine.h>
15 #include <linux/dma-mapping.h>
16 #include <linux/err.h>
17 #include <linux/i2c.h>
18 #include <linux/init.h>
19 #include <linux/interrupt.h>
21 #include <linux/kernel.h>
22 #include <linux/module.h>
23 #include <linux/of_device.h>
24 #include <linux/platform_device.h>
25 #include <linux/pm_runtime.h>
26 #include <linux/slab.h>
28 /* Transmit operation: */
31 /* BUS: S A8 ACK P(*) */
38 /* BUS: S A8 ACK D8(1) ACK P(*) */
39 /* IRQ: DTE WAIT WAIT */
45 /* BUS: S A8 ACK D8(1) ACK D8(2) ACK P(*) */
46 /* IRQ: DTE WAIT WAIT WAIT */
49 /* ICDR: A8 D8(1) D8(2) */
51 /* 3 bytes or more, +---------+ gets repeated */
54 /* Receive operation: */
56 /* 0 byte receive - not supported since slave may hold SDA low */
58 /* 1 byte receive [TX] | [RX] */
59 /* BUS: S A8 ACK | D8(1) ACK P(*) */
60 /* IRQ: DTE WAIT | WAIT DTE */
61 /* ICIC: -DTE | +DTE */
62 /* ICCR: 0x94 0x81 | 0xc0 */
63 /* ICDR: A8 | D8(1) */
65 /* 2 byte receive [TX]| [RX] */
66 /* BUS: S A8 ACK | D8(1) ACK D8(2) ACK P(*) */
67 /* IRQ: DTE WAIT | WAIT WAIT DTE */
68 /* ICIC: -DTE | +DTE */
69 /* ICCR: 0x94 0x81 | 0xc0 */
70 /* ICDR: A8 | D8(1) D8(2) */
72 /* 3 byte receive [TX] | [RX] (*) */
73 /* BUS: S A8 ACK | D8(1) ACK D8(2) ACK D8(3) ACK P */
74 /* IRQ: DTE WAIT | WAIT WAIT WAIT DTE */
75 /* ICIC: -DTE | +DTE */
76 /* ICCR: 0x94 0x81 | 0xc0 */
77 /* ICDR: A8 | D8(1) D8(2) D8(3) */
79 /* 4 bytes or more, this part is repeated +---------+ */
82 /* Interrupt order and BUSY flag */
84 /* SDA ___\___XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXAAAAAAAAA___/ */
85 /* SCL \_/1\_/2\_/3\_/4\_/5\_/6\_/7\_/8\___/9\_____/ */
87 /* S D7 D6 D5 D4 D3 D2 D1 D0 P(*) */
89 /* WAIT IRQ ________________________________/ \___________ */
90 /* TACK IRQ ____________________________________/ \_______ */
91 /* DTE IRQ __________________________________________/ \_ */
92 /* AL IRQ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX */
93 /* _______________________________________________ */
96 /* (*) The STOP condition is only sent by the master at the end of the last */
97 /* I2C message or if the I2C_M_STOP flag is set. Similarly, the BUSY bit is */
98 /* only cleared after the STOP condition, so, between messages we have to */
99 /* poll for the DTE bit. */
102 enum sh_mobile_i2c_op
{
113 struct sh_mobile_i2c_data
{
116 struct i2c_adapter adap
;
117 unsigned long bus_speed
;
118 unsigned int clks_per_count
;
126 wait_queue_head_t wait
;
133 struct resource
*res
;
134 struct dma_chan
*dma_tx
;
135 struct dma_chan
*dma_rx
;
136 struct scatterlist sg
;
137 enum dma_data_direction dma_direction
;
141 struct sh_mobile_dt_config
{
143 int (*setup
)(struct sh_mobile_i2c_data
*pd
);
146 #define IIC_FLAG_HAS_ICIC67 (1 << 0)
148 #define STANDARD_MODE 100000
149 #define FAST_MODE 400000
151 /* Register offsets */
161 #define ICCR_ICE 0x80
162 #define ICCR_RACK 0x40
163 #define ICCR_TRS 0x10
164 #define ICCR_BBSY 0x04
165 #define ICCR_SCP 0x01
167 #define ICSR_SCLM 0x80
168 #define ICSR_SDAM 0x40
170 #define ICSR_BUSY 0x10
172 #define ICSR_TACK 0x04
173 #define ICSR_WAIT 0x02
174 #define ICSR_DTE 0x01
176 #define ICIC_ICCLB8 0x80
177 #define ICIC_ICCHB8 0x40
178 #define ICIC_TDMAE 0x20
179 #define ICIC_RDMAE 0x10
180 #define ICIC_ALE 0x08
181 #define ICIC_TACKE 0x04
182 #define ICIC_WAITE 0x02
183 #define ICIC_DTEE 0x01
185 #define ICSTART_ICSTART 0x10
187 static void iic_wr(struct sh_mobile_i2c_data
*pd
, int offs
, unsigned char data
)
192 iowrite8(data
, pd
->reg
+ offs
);
195 static unsigned char iic_rd(struct sh_mobile_i2c_data
*pd
, int offs
)
197 return ioread8(pd
->reg
+ offs
);
200 static void iic_set_clr(struct sh_mobile_i2c_data
*pd
, int offs
,
201 unsigned char set
, unsigned char clr
)
203 iic_wr(pd
, offs
, (iic_rd(pd
, offs
) | set
) & ~clr
);
206 static u32
sh_mobile_i2c_iccl(unsigned long count_khz
, u32 tLOW
, u32 tf
)
209 * Conditional expression:
210 * ICCL >= COUNT_CLK * (tLOW + tf)
212 * SH-Mobile IIC hardware starts counting the LOW period of
213 * the SCL signal (tLOW) as soon as it pulls the SCL line.
214 * In order to meet the tLOW timing spec, we need to take into
215 * account the fall time of SCL signal (tf). Default tf value
216 * should be 0.3 us, for safety.
218 return (((count_khz
* (tLOW
+ tf
)) + 5000) / 10000);
221 static u32
sh_mobile_i2c_icch(unsigned long count_khz
, u32 tHIGH
, u32 tf
)
224 * Conditional expression:
225 * ICCH >= COUNT_CLK * (tHIGH + tf)
227 * SH-Mobile IIC hardware is aware of SCL transition period 'tr',
228 * and can ignore it. SH-Mobile IIC controller starts counting
229 * the HIGH period of the SCL signal (tHIGH) after the SCL input
230 * voltage increases at VIH.
232 * Afterward it turned out calculating ICCH using only tHIGH spec
233 * will result in violation of the tHD;STA timing spec. We need
234 * to take into account the fall time of SDA signal (tf) at START
235 * condition, in order to meet both tHIGH and tHD;STA specs.
237 return (((count_khz
* (tHIGH
+ tf
)) + 5000) / 10000);
240 static int sh_mobile_i2c_check_timing(struct sh_mobile_i2c_data
*pd
)
242 u16 max_val
= pd
->flags
& IIC_FLAG_HAS_ICIC67
? 0x1ff : 0xff;
244 if (pd
->iccl
> max_val
|| pd
->icch
> max_val
) {
245 dev_err(pd
->dev
, "timing values out of range: L/H=0x%x/0x%x\n",
250 /* one more bit of ICCL in ICIC */
251 if (pd
->iccl
& 0x100)
252 pd
->icic
|= ICIC_ICCLB8
;
254 pd
->icic
&= ~ICIC_ICCLB8
;
256 /* one more bit of ICCH in ICIC */
257 if (pd
->icch
& 0x100)
258 pd
->icic
|= ICIC_ICCHB8
;
260 pd
->icic
&= ~ICIC_ICCHB8
;
262 dev_dbg(pd
->dev
, "timing values: L/H=0x%x/0x%x\n", pd
->iccl
, pd
->icch
);
266 static int sh_mobile_i2c_init(struct sh_mobile_i2c_data
*pd
)
268 unsigned long i2c_clk_khz
;
271 i2c_clk_khz
= clk_get_rate(pd
->clk
) / 1000 / pd
->clks_per_count
;
273 if (pd
->bus_speed
== STANDARD_MODE
) {
274 tLOW
= 47; /* tLOW = 4.7 us */
275 tHIGH
= 40; /* tHD;STA = tHIGH = 4.0 us */
276 tf
= 3; /* tf = 0.3 us */
277 } else if (pd
->bus_speed
== FAST_MODE
) {
278 tLOW
= 13; /* tLOW = 1.3 us */
279 tHIGH
= 6; /* tHD;STA = tHIGH = 0.6 us */
280 tf
= 3; /* tf = 0.3 us */
282 dev_err(pd
->dev
, "unrecognized bus speed %lu Hz\n",
287 pd
->iccl
= sh_mobile_i2c_iccl(i2c_clk_khz
, tLOW
, tf
);
288 pd
->icch
= sh_mobile_i2c_icch(i2c_clk_khz
, tHIGH
, tf
);
290 return sh_mobile_i2c_check_timing(pd
);
293 static int sh_mobile_i2c_v2_init(struct sh_mobile_i2c_data
*pd
)
295 unsigned long clks_per_cycle
;
297 /* L = 5, H = 4, L + H = 9 */
298 clks_per_cycle
= clk_get_rate(pd
->clk
) / pd
->bus_speed
;
299 pd
->iccl
= DIV_ROUND_UP(clks_per_cycle
* 5 / 9 - 1, pd
->clks_per_count
);
300 pd
->icch
= DIV_ROUND_UP(clks_per_cycle
* 4 / 9 - 5, pd
->clks_per_count
);
302 return sh_mobile_i2c_check_timing(pd
);
305 static unsigned char i2c_op(struct sh_mobile_i2c_data
*pd
, enum sh_mobile_i2c_op op
)
307 unsigned char ret
= 0;
310 dev_dbg(pd
->dev
, "op %d\n", op
);
312 spin_lock_irqsave(&pd
->lock
, flags
);
315 case OP_START
: /* issue start and trigger DTE interrupt */
316 iic_wr(pd
, ICCR
, ICCR_ICE
| ICCR_TRS
| ICCR_BBSY
);
318 case OP_TX_FIRST
: /* disable DTE interrupt and write client address */
319 iic_wr(pd
, ICIC
, ICIC_WAITE
| ICIC_ALE
| ICIC_TACKE
);
320 iic_wr(pd
, ICDR
, i2c_8bit_addr_from_msg(pd
->msg
));
322 case OP_TX
: /* write data */
323 iic_wr(pd
, ICDR
, pd
->msg
->buf
[pd
->pos
]);
325 case OP_TX_STOP
: /* issue a stop (or rep_start) */
326 iic_wr(pd
, ICCR
, pd
->send_stop
? ICCR_ICE
| ICCR_TRS
327 : ICCR_ICE
| ICCR_TRS
| ICCR_BBSY
);
329 case OP_TX_TO_RX
: /* select read mode */
330 iic_wr(pd
, ICCR
, ICCR_ICE
| ICCR_SCP
);
332 case OP_RX
: /* just read data */
333 ret
= iic_rd(pd
, ICDR
);
335 case OP_RX_STOP
: /* enable DTE interrupt, issue stop */
337 ICIC_DTEE
| ICIC_WAITE
| ICIC_ALE
| ICIC_TACKE
);
338 iic_wr(pd
, ICCR
, ICCR_ICE
| ICCR_RACK
);
340 case OP_RX_STOP_DATA
: /* enable DTE interrupt, read data, issue stop */
342 ICIC_DTEE
| ICIC_WAITE
| ICIC_ALE
| ICIC_TACKE
);
343 ret
= iic_rd(pd
, ICDR
);
344 iic_wr(pd
, ICCR
, ICCR_ICE
| ICCR_RACK
);
348 spin_unlock_irqrestore(&pd
->lock
, flags
);
350 dev_dbg(pd
->dev
, "op %d, data out 0x%02x\n", op
, ret
);
354 static int sh_mobile_i2c_isr_tx(struct sh_mobile_i2c_data
*pd
)
356 if (pd
->pos
== pd
->msg
->len
) {
357 i2c_op(pd
, OP_TX_STOP
);
362 i2c_op(pd
, OP_TX_FIRST
);
370 static int sh_mobile_i2c_isr_rx(struct sh_mobile_i2c_data
*pd
)
375 /* switch from TX (address) to RX (data) adds two interrupts */
376 real_pos
= pd
->pos
- 2;
379 i2c_op(pd
, OP_TX_FIRST
);
380 } else if (pd
->pos
== 0) {
381 i2c_op(pd
, OP_TX_TO_RX
);
382 } else if (pd
->pos
== pd
->msg
->len
) {
383 if (pd
->stop_after_dma
) {
384 /* Simulate PIO end condition after DMA transfer */
385 i2c_op(pd
, OP_RX_STOP
);
391 i2c_op(pd
, OP_RX_STOP
);
393 data
= i2c_op(pd
, OP_RX_STOP_DATA
);
394 } else if (real_pos
>= 0) {
395 data
= i2c_op(pd
, OP_RX
);
399 pd
->msg
->buf
[real_pos
] = data
;
402 return pd
->pos
== (pd
->msg
->len
+ 2);
405 static irqreturn_t
sh_mobile_i2c_isr(int irq
, void *dev_id
)
407 struct sh_mobile_i2c_data
*pd
= dev_id
;
411 sr
= iic_rd(pd
, ICSR
);
412 pd
->sr
|= sr
; /* remember state */
414 dev_dbg(pd
->dev
, "i2c_isr 0x%02x 0x%02x %s %d %d!\n", sr
, pd
->sr
,
415 (pd
->msg
->flags
& I2C_M_RD
) ? "read" : "write",
416 pd
->pos
, pd
->msg
->len
);
418 /* Kick off TxDMA after preface was done */
419 if (pd
->dma_direction
== DMA_TO_DEVICE
&& pd
->pos
== 0)
420 iic_set_clr(pd
, ICIC
, ICIC_TDMAE
, 0);
421 else if (sr
& (ICSR_AL
| ICSR_TACK
))
422 /* don't interrupt transaction - continue to issue stop */
423 iic_wr(pd
, ICSR
, sr
& ~(ICSR_AL
| ICSR_TACK
));
424 else if (pd
->msg
->flags
& I2C_M_RD
)
425 wakeup
= sh_mobile_i2c_isr_rx(pd
);
427 wakeup
= sh_mobile_i2c_isr_tx(pd
);
429 /* Kick off RxDMA after preface was done */
430 if (pd
->dma_direction
== DMA_FROM_DEVICE
&& pd
->pos
== 1)
431 iic_set_clr(pd
, ICIC
, ICIC_RDMAE
, 0);
433 if (sr
& ICSR_WAIT
) /* TODO: add delay here to support slow acks */
434 iic_wr(pd
, ICSR
, sr
& ~ICSR_WAIT
);
441 /* defeat write posting to avoid spurious WAIT interrupts */
447 static void sh_mobile_i2c_dma_unmap(struct sh_mobile_i2c_data
*pd
)
449 struct dma_chan
*chan
= pd
->dma_direction
== DMA_FROM_DEVICE
450 ? pd
->dma_rx
: pd
->dma_tx
;
452 dma_unmap_single(chan
->device
->dev
, sg_dma_address(&pd
->sg
),
453 pd
->msg
->len
, pd
->dma_direction
);
455 pd
->dma_direction
= DMA_NONE
;
458 static void sh_mobile_i2c_cleanup_dma(struct sh_mobile_i2c_data
*pd
)
460 if (pd
->dma_direction
== DMA_NONE
)
462 else if (pd
->dma_direction
== DMA_FROM_DEVICE
)
463 dmaengine_terminate_all(pd
->dma_rx
);
464 else if (pd
->dma_direction
== DMA_TO_DEVICE
)
465 dmaengine_terminate_all(pd
->dma_tx
);
467 sh_mobile_i2c_dma_unmap(pd
);
470 static void sh_mobile_i2c_dma_callback(void *data
)
472 struct sh_mobile_i2c_data
*pd
= data
;
474 sh_mobile_i2c_dma_unmap(pd
);
475 pd
->pos
= pd
->msg
->len
;
476 pd
->stop_after_dma
= true;
478 iic_set_clr(pd
, ICIC
, 0, ICIC_TDMAE
| ICIC_RDMAE
);
481 static struct dma_chan
*sh_mobile_i2c_request_dma_chan(struct device
*dev
,
482 enum dma_transfer_direction dir
, dma_addr_t port_addr
)
484 struct dma_chan
*chan
;
485 struct dma_slave_config cfg
;
486 char *chan_name
= dir
== DMA_MEM_TO_DEV
? "tx" : "rx";
489 chan
= dma_request_chan(dev
, chan_name
);
491 dev_dbg(dev
, "request_channel failed for %s (%ld)\n", chan_name
,
496 memset(&cfg
, 0, sizeof(cfg
));
498 if (dir
== DMA_MEM_TO_DEV
) {
499 cfg
.dst_addr
= port_addr
;
500 cfg
.dst_addr_width
= DMA_SLAVE_BUSWIDTH_1_BYTE
;
502 cfg
.src_addr
= port_addr
;
503 cfg
.src_addr_width
= DMA_SLAVE_BUSWIDTH_1_BYTE
;
506 ret
= dmaengine_slave_config(chan
, &cfg
);
508 dev_dbg(dev
, "slave_config failed for %s (%d)\n", chan_name
, ret
);
509 dma_release_channel(chan
);
513 dev_dbg(dev
, "got DMA channel for %s\n", chan_name
);
517 static void sh_mobile_i2c_xfer_dma(struct sh_mobile_i2c_data
*pd
)
519 bool read
= pd
->msg
->flags
& I2C_M_RD
;
520 enum dma_data_direction dir
= read
? DMA_FROM_DEVICE
: DMA_TO_DEVICE
;
521 struct dma_chan
*chan
= read
? pd
->dma_rx
: pd
->dma_tx
;
522 struct dma_async_tx_descriptor
*txdesc
;
526 if (PTR_ERR(chan
) == -EPROBE_DEFER
) {
528 chan
= pd
->dma_rx
= sh_mobile_i2c_request_dma_chan(pd
->dev
, DMA_DEV_TO_MEM
,
529 pd
->res
->start
+ ICDR
);
531 chan
= pd
->dma_tx
= sh_mobile_i2c_request_dma_chan(pd
->dev
, DMA_MEM_TO_DEV
,
532 pd
->res
->start
+ ICDR
);
538 dma_addr
= dma_map_single(chan
->device
->dev
, pd
->dma_buf
, pd
->msg
->len
, dir
);
539 if (dma_mapping_error(chan
->device
->dev
, dma_addr
)) {
540 dev_dbg(pd
->dev
, "dma map failed, using PIO\n");
544 sg_dma_len(&pd
->sg
) = pd
->msg
->len
;
545 sg_dma_address(&pd
->sg
) = dma_addr
;
547 pd
->dma_direction
= dir
;
549 txdesc
= dmaengine_prep_slave_sg(chan
, &pd
->sg
, 1,
550 read
? DMA_DEV_TO_MEM
: DMA_MEM_TO_DEV
,
551 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
553 dev_dbg(pd
->dev
, "dma prep slave sg failed, using PIO\n");
554 sh_mobile_i2c_cleanup_dma(pd
);
558 txdesc
->callback
= sh_mobile_i2c_dma_callback
;
559 txdesc
->callback_param
= pd
;
561 cookie
= dmaengine_submit(txdesc
);
562 if (dma_submit_error(cookie
)) {
563 dev_dbg(pd
->dev
, "submitting dma failed, using PIO\n");
564 sh_mobile_i2c_cleanup_dma(pd
);
568 dma_async_issue_pending(chan
);
571 static void start_ch(struct sh_mobile_i2c_data
*pd
, struct i2c_msg
*usr_msg
,
575 /* Initialize channel registers */
576 iic_wr(pd
, ICCR
, ICCR_SCP
);
578 /* Enable channel and configure rx ack */
579 iic_wr(pd
, ICCR
, ICCR_ICE
| ICCR_SCP
);
582 iic_wr(pd
, ICCL
, pd
->iccl
& 0xff);
583 iic_wr(pd
, ICCH
, pd
->icch
& 0xff);
590 pd
->dma_buf
= i2c_get_dma_safe_msg_buf(pd
->msg
, 8);
592 sh_mobile_i2c_xfer_dma(pd
);
594 /* Enable all interrupts to begin with */
595 iic_wr(pd
, ICIC
, ICIC_DTEE
| ICIC_WAITE
| ICIC_ALE
| ICIC_TACKE
);
598 static int poll_dte(struct sh_mobile_i2c_data
*pd
)
602 for (i
= 1000; i
; i
--) {
603 u_int8_t val
= iic_rd(pd
, ICSR
);
614 return i
? 0 : -ETIMEDOUT
;
617 static int poll_busy(struct sh_mobile_i2c_data
*pd
)
621 for (i
= 1000; i
; i
--) {
622 u_int8_t val
= iic_rd(pd
, ICSR
);
624 dev_dbg(pd
->dev
, "val 0x%02x pd->sr 0x%02x\n", val
, pd
->sr
);
626 /* the interrupt handler may wake us up before the
627 * transfer is finished, so poll the hardware
630 if (!(val
& ICSR_BUSY
)) {
631 /* handle missing acknowledge and arbitration lost */
643 return i
? 0 : -ETIMEDOUT
;
646 static int sh_mobile_i2c_xfer(struct i2c_adapter
*adapter
,
647 struct i2c_msg
*msgs
,
650 struct sh_mobile_i2c_data
*pd
= i2c_get_adapdata(adapter
);
656 /* Wake up device and enable clock */
657 pm_runtime_get_sync(pd
->dev
);
659 /* Process all messages */
660 for (i
= 0; i
< num
; i
++) {
661 bool do_start
= pd
->send_stop
|| !i
;
663 pd
->send_stop
= i
== num
- 1 || msg
->flags
& I2C_M_STOP
;
664 pd
->stop_after_dma
= false;
666 start_ch(pd
, msg
, do_start
);
669 i2c_op(pd
, OP_START
);
671 /* The interrupt handler takes care of the rest... */
672 timeout
= wait_event_timeout(pd
->wait
,
673 pd
->sr
& (ICSR_TACK
| SW_DONE
),
676 /* 'stop_after_dma' tells if DMA transfer was complete */
677 i2c_put_dma_safe_msg_buf(pd
->dma_buf
, pd
->msg
, pd
->stop_after_dma
);
680 dev_err(pd
->dev
, "Transfer request timed out\n");
681 if (pd
->dma_direction
!= DMA_NONE
)
682 sh_mobile_i2c_cleanup_dma(pd
);
696 /* Disable channel */
697 iic_wr(pd
, ICCR
, ICCR_SCP
);
699 /* Disable clock and mark device as idle */
700 pm_runtime_put_sync(pd
->dev
);
705 static u32
sh_mobile_i2c_func(struct i2c_adapter
*adapter
)
707 return I2C_FUNC_I2C
| I2C_FUNC_SMBUS_EMUL
| I2C_FUNC_PROTOCOL_MANGLING
;
710 static const struct i2c_algorithm sh_mobile_i2c_algorithm
= {
711 .functionality
= sh_mobile_i2c_func
,
712 .master_xfer
= sh_mobile_i2c_xfer
,
715 static const struct i2c_adapter_quirks sh_mobile_i2c_quirks
= {
716 .flags
= I2C_AQ_NO_ZERO_LEN_READ
,
720 * r8a7740 has an errata regarding I2C I/O pad reset needing this workaround.
722 static int sh_mobile_i2c_r8a7740_workaround(struct sh_mobile_i2c_data
*pd
)
724 iic_set_clr(pd
, ICCR
, ICCR_ICE
, 0);
725 iic_rd(pd
, ICCR
); /* dummy read */
727 iic_set_clr(pd
, ICSTART
, ICSTART_ICSTART
, 0);
728 iic_rd(pd
, ICSTART
); /* dummy read */
732 iic_wr(pd
, ICCR
, ICCR_SCP
);
733 iic_wr(pd
, ICSTART
, 0);
737 iic_wr(pd
, ICCR
, ICCR_TRS
);
741 iic_wr(pd
, ICCR
, ICCR_TRS
);
744 return sh_mobile_i2c_init(pd
);
747 static const struct sh_mobile_dt_config default_dt_config
= {
749 .setup
= sh_mobile_i2c_init
,
752 static const struct sh_mobile_dt_config fast_clock_dt_config
= {
754 .setup
= sh_mobile_i2c_init
,
757 static const struct sh_mobile_dt_config v2_freq_calc_dt_config
= {
759 .setup
= sh_mobile_i2c_v2_init
,
762 static const struct sh_mobile_dt_config r8a7740_dt_config
= {
764 .setup
= sh_mobile_i2c_r8a7740_workaround
,
767 static const struct of_device_id sh_mobile_i2c_dt_ids
[] = {
768 { .compatible
= "renesas,iic-r8a73a4", .data
= &fast_clock_dt_config
},
769 { .compatible
= "renesas,iic-r8a7740", .data
= &r8a7740_dt_config
},
770 { .compatible
= "renesas,iic-r8a774c0", .data
= &fast_clock_dt_config
},
771 { .compatible
= "renesas,iic-r8a7790", .data
= &v2_freq_calc_dt_config
},
772 { .compatible
= "renesas,iic-r8a7791", .data
= &v2_freq_calc_dt_config
},
773 { .compatible
= "renesas,iic-r8a7792", .data
= &v2_freq_calc_dt_config
},
774 { .compatible
= "renesas,iic-r8a7793", .data
= &v2_freq_calc_dt_config
},
775 { .compatible
= "renesas,iic-r8a7794", .data
= &v2_freq_calc_dt_config
},
776 { .compatible
= "renesas,iic-r8a7795", .data
= &v2_freq_calc_dt_config
},
777 { .compatible
= "renesas,iic-r8a77990", .data
= &v2_freq_calc_dt_config
},
778 { .compatible
= "renesas,iic-sh73a0", .data
= &fast_clock_dt_config
},
779 { .compatible
= "renesas,rcar-gen2-iic", .data
= &v2_freq_calc_dt_config
},
780 { .compatible
= "renesas,rcar-gen3-iic", .data
= &v2_freq_calc_dt_config
},
781 { .compatible
= "renesas,rmobile-iic", .data
= &default_dt_config
},
784 MODULE_DEVICE_TABLE(of
, sh_mobile_i2c_dt_ids
);
786 static void sh_mobile_i2c_release_dma(struct sh_mobile_i2c_data
*pd
)
788 if (!IS_ERR(pd
->dma_tx
)) {
789 dma_release_channel(pd
->dma_tx
);
790 pd
->dma_tx
= ERR_PTR(-EPROBE_DEFER
);
793 if (!IS_ERR(pd
->dma_rx
)) {
794 dma_release_channel(pd
->dma_rx
);
795 pd
->dma_rx
= ERR_PTR(-EPROBE_DEFER
);
799 static int sh_mobile_i2c_hook_irqs(struct platform_device
*dev
, struct sh_mobile_i2c_data
*pd
)
801 struct resource
*res
;
805 while ((res
= platform_get_resource(dev
, IORESOURCE_IRQ
, k
))) {
806 for (n
= res
->start
; n
<= res
->end
; n
++) {
807 ret
= devm_request_irq(&dev
->dev
, n
, sh_mobile_i2c_isr
,
808 0, dev_name(&dev
->dev
), pd
);
810 dev_err(&dev
->dev
, "cannot request IRQ %pa\n", &n
);
817 return k
> 0 ? 0 : -ENOENT
;
820 static int sh_mobile_i2c_probe(struct platform_device
*dev
)
822 struct sh_mobile_i2c_data
*pd
;
823 struct i2c_adapter
*adap
;
824 struct resource
*res
;
825 const struct sh_mobile_dt_config
*config
;
829 pd
= devm_kzalloc(&dev
->dev
, sizeof(struct sh_mobile_i2c_data
), GFP_KERNEL
);
833 pd
->clk
= devm_clk_get(&dev
->dev
, NULL
);
834 if (IS_ERR(pd
->clk
)) {
835 dev_err(&dev
->dev
, "cannot get clock\n");
836 return PTR_ERR(pd
->clk
);
839 ret
= sh_mobile_i2c_hook_irqs(dev
, pd
);
844 platform_set_drvdata(dev
, pd
);
846 res
= platform_get_resource(dev
, IORESOURCE_MEM
, 0);
849 pd
->reg
= devm_ioremap_resource(&dev
->dev
, res
);
851 return PTR_ERR(pd
->reg
);
853 ret
= of_property_read_u32(dev
->dev
.of_node
, "clock-frequency", &bus_speed
);
854 pd
->bus_speed
= (ret
|| !bus_speed
) ? STANDARD_MODE
: bus_speed
;
855 pd
->clks_per_count
= 1;
857 /* Newer variants come with two new bits in ICIC */
858 if (resource_size(res
) > 0x17)
859 pd
->flags
|= IIC_FLAG_HAS_ICIC67
;
861 pm_runtime_enable(&dev
->dev
);
862 pm_runtime_get_sync(&dev
->dev
);
864 config
= of_device_get_match_data(&dev
->dev
);
866 pd
->clks_per_count
= config
->clks_per_count
;
867 ret
= config
->setup(pd
);
869 ret
= sh_mobile_i2c_init(pd
);
872 pm_runtime_put_sync(&dev
->dev
);
877 sg_init_table(&pd
->sg
, 1);
878 pd
->dma_direction
= DMA_NONE
;
879 pd
->dma_rx
= pd
->dma_tx
= ERR_PTR(-EPROBE_DEFER
);
881 /* setup the private data */
883 i2c_set_adapdata(adap
, pd
);
885 adap
->owner
= THIS_MODULE
;
886 adap
->algo
= &sh_mobile_i2c_algorithm
;
887 adap
->quirks
= &sh_mobile_i2c_quirks
;
888 adap
->dev
.parent
= &dev
->dev
;
891 adap
->dev
.of_node
= dev
->dev
.of_node
;
893 strlcpy(adap
->name
, dev
->name
, sizeof(adap
->name
));
895 spin_lock_init(&pd
->lock
);
896 init_waitqueue_head(&pd
->wait
);
898 ret
= i2c_add_numbered_adapter(adap
);
900 sh_mobile_i2c_release_dma(pd
);
904 dev_info(&dev
->dev
, "I2C adapter %d, bus speed %lu Hz\n", adap
->nr
, pd
->bus_speed
);
909 static int sh_mobile_i2c_remove(struct platform_device
*dev
)
911 struct sh_mobile_i2c_data
*pd
= platform_get_drvdata(dev
);
913 i2c_del_adapter(&pd
->adap
);
914 sh_mobile_i2c_release_dma(pd
);
915 pm_runtime_disable(&dev
->dev
);
919 static struct platform_driver sh_mobile_i2c_driver
= {
921 .name
= "i2c-sh_mobile",
922 .of_match_table
= sh_mobile_i2c_dt_ids
,
924 .probe
= sh_mobile_i2c_probe
,
925 .remove
= sh_mobile_i2c_remove
,
928 static int __init
sh_mobile_i2c_adap_init(void)
930 return platform_driver_register(&sh_mobile_i2c_driver
);
932 subsys_initcall(sh_mobile_i2c_adap_init
);
934 static void __exit
sh_mobile_i2c_adap_exit(void)
936 platform_driver_unregister(&sh_mobile_i2c_driver
);
938 module_exit(sh_mobile_i2c_adap_exit
);
940 MODULE_DESCRIPTION("SuperH Mobile I2C Bus Controller driver");
941 MODULE_AUTHOR("Magnus Damm and Wolfram Sang");
942 MODULE_LICENSE("GPL v2");
943 MODULE_ALIAS("platform:i2c-sh_mobile");