1 // SPDX-License-Identifier: GPL-2.0
3 * SuperH Mobile I2C Controller
5 * Copyright (C) 2014-19 Wolfram Sang <wsa@sang-engineering.com>
6 * Copyright (C) 2008 Magnus Damm
8 * Portions of the code based on out-of-tree driver i2c-sh7343.c
9 * Copyright (c) 2006 Carlos Munoz <carlos@kenati.com>
12 #include <linux/clk.h>
13 #include <linux/delay.h>
14 #include <linux/dmaengine.h>
15 #include <linux/dma-mapping.h>
16 #include <linux/err.h>
17 #include <linux/i2c.h>
18 #include <linux/init.h>
19 #include <linux/interrupt.h>
21 #include <linux/kernel.h>
22 #include <linux/module.h>
23 #include <linux/of_device.h>
24 #include <linux/platform_device.h>
25 #include <linux/pm_runtime.h>
26 #include <linux/slab.h>
28 /* Transmit operation: */
31 /* BUS: S A8 ACK P(*) */
38 /* BUS: S A8 ACK D8(1) ACK P(*) */
39 /* IRQ: DTE WAIT WAIT */
45 /* BUS: S A8 ACK D8(1) ACK D8(2) ACK P(*) */
46 /* IRQ: DTE WAIT WAIT WAIT */
49 /* ICDR: A8 D8(1) D8(2) */
51 /* 3 bytes or more, +---------+ gets repeated */
54 /* Receive operation: */
56 /* 0 byte receive - not supported since slave may hold SDA low */
58 /* 1 byte receive [TX] | [RX] */
59 /* BUS: S A8 ACK | D8(1) ACK P(*) */
60 /* IRQ: DTE WAIT | WAIT DTE */
61 /* ICIC: -DTE | +DTE */
62 /* ICCR: 0x94 0x81 | 0xc0 */
63 /* ICDR: A8 | D8(1) */
65 /* 2 byte receive [TX]| [RX] */
66 /* BUS: S A8 ACK | D8(1) ACK D8(2) ACK P(*) */
67 /* IRQ: DTE WAIT | WAIT WAIT DTE */
68 /* ICIC: -DTE | +DTE */
69 /* ICCR: 0x94 0x81 | 0xc0 */
70 /* ICDR: A8 | D8(1) D8(2) */
72 /* 3 byte receive [TX] | [RX] (*) */
73 /* BUS: S A8 ACK | D8(1) ACK D8(2) ACK D8(3) ACK P */
74 /* IRQ: DTE WAIT | WAIT WAIT WAIT DTE */
75 /* ICIC: -DTE | +DTE */
76 /* ICCR: 0x94 0x81 | 0xc0 */
77 /* ICDR: A8 | D8(1) D8(2) D8(3) */
79 /* 4 bytes or more, this part is repeated +---------+ */
82 /* Interrupt order and BUSY flag */
84 /* SDA ___\___XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXAAAAAAAAA___/ */
85 /* SCL \_/1\_/2\_/3\_/4\_/5\_/6\_/7\_/8\___/9\_____/ */
87 /* S D7 D6 D5 D4 D3 D2 D1 D0 P(*) */
89 /* WAIT IRQ ________________________________/ \___________ */
90 /* TACK IRQ ____________________________________/ \_______ */
91 /* DTE IRQ __________________________________________/ \_ */
92 /* AL IRQ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX */
93 /* _______________________________________________ */
96 /* (*) The STOP condition is only sent by the master at the end of the last */
97 /* I2C message or if the I2C_M_STOP flag is set. Similarly, the BUSY bit is */
98 /* only cleared after the STOP condition, so, between messages we have to */
99 /* poll for the DTE bit. */
102 enum sh_mobile_i2c_op
{
113 struct sh_mobile_i2c_data
{
116 struct i2c_adapter adap
;
117 unsigned long bus_speed
;
118 unsigned int clks_per_count
;
126 wait_queue_head_t wait
;
133 struct resource
*res
;
134 struct dma_chan
*dma_tx
;
135 struct dma_chan
*dma_rx
;
136 struct scatterlist sg
;
137 enum dma_data_direction dma_direction
;
141 struct sh_mobile_dt_config
{
143 int (*setup
)(struct sh_mobile_i2c_data
*pd
);
146 #define IIC_FLAG_HAS_ICIC67 (1 << 0)
148 /* Register offsets */
158 #define ICCR_ICE 0x80
159 #define ICCR_RACK 0x40
160 #define ICCR_TRS 0x10
161 #define ICCR_BBSY 0x04
162 #define ICCR_SCP 0x01
164 #define ICSR_SCLM 0x80
165 #define ICSR_SDAM 0x40
167 #define ICSR_BUSY 0x10
169 #define ICSR_TACK 0x04
170 #define ICSR_WAIT 0x02
171 #define ICSR_DTE 0x01
173 #define ICIC_ICCLB8 0x80
174 #define ICIC_ICCHB8 0x40
175 #define ICIC_TDMAE 0x20
176 #define ICIC_RDMAE 0x10
177 #define ICIC_ALE 0x08
178 #define ICIC_TACKE 0x04
179 #define ICIC_WAITE 0x02
180 #define ICIC_DTEE 0x01
182 #define ICSTART_ICSTART 0x10
184 static void iic_wr(struct sh_mobile_i2c_data
*pd
, int offs
, unsigned char data
)
189 iowrite8(data
, pd
->reg
+ offs
);
192 static unsigned char iic_rd(struct sh_mobile_i2c_data
*pd
, int offs
)
194 return ioread8(pd
->reg
+ offs
);
197 static void iic_set_clr(struct sh_mobile_i2c_data
*pd
, int offs
,
198 unsigned char set
, unsigned char clr
)
200 iic_wr(pd
, offs
, (iic_rd(pd
, offs
) | set
) & ~clr
);
203 static u32
sh_mobile_i2c_iccl(unsigned long count_khz
, u32 tLOW
, u32 tf
)
206 * Conditional expression:
207 * ICCL >= COUNT_CLK * (tLOW + tf)
209 * SH-Mobile IIC hardware starts counting the LOW period of
210 * the SCL signal (tLOW) as soon as it pulls the SCL line.
211 * In order to meet the tLOW timing spec, we need to take into
212 * account the fall time of SCL signal (tf). Default tf value
213 * should be 0.3 us, for safety.
215 return (((count_khz
* (tLOW
+ tf
)) + 5000) / 10000);
218 static u32
sh_mobile_i2c_icch(unsigned long count_khz
, u32 tHIGH
, u32 tf
)
221 * Conditional expression:
222 * ICCH >= COUNT_CLK * (tHIGH + tf)
224 * SH-Mobile IIC hardware is aware of SCL transition period 'tr',
225 * and can ignore it. SH-Mobile IIC controller starts counting
226 * the HIGH period of the SCL signal (tHIGH) after the SCL input
227 * voltage increases at VIH.
229 * Afterward it turned out calculating ICCH using only tHIGH spec
230 * will result in violation of the tHD;STA timing spec. We need
231 * to take into account the fall time of SDA signal (tf) at START
232 * condition, in order to meet both tHIGH and tHD;STA specs.
234 return (((count_khz
* (tHIGH
+ tf
)) + 5000) / 10000);
237 static int sh_mobile_i2c_check_timing(struct sh_mobile_i2c_data
*pd
)
239 u16 max_val
= pd
->flags
& IIC_FLAG_HAS_ICIC67
? 0x1ff : 0xff;
241 if (pd
->iccl
> max_val
|| pd
->icch
> max_val
) {
242 dev_err(pd
->dev
, "timing values out of range: L/H=0x%x/0x%x\n",
247 /* one more bit of ICCL in ICIC */
248 if (pd
->iccl
& 0x100)
249 pd
->icic
|= ICIC_ICCLB8
;
251 pd
->icic
&= ~ICIC_ICCLB8
;
253 /* one more bit of ICCH in ICIC */
254 if (pd
->icch
& 0x100)
255 pd
->icic
|= ICIC_ICCHB8
;
257 pd
->icic
&= ~ICIC_ICCHB8
;
259 dev_dbg(pd
->dev
, "timing values: L/H=0x%x/0x%x\n", pd
->iccl
, pd
->icch
);
263 static int sh_mobile_i2c_init(struct sh_mobile_i2c_data
*pd
)
265 unsigned long i2c_clk_khz
;
268 i2c_clk_khz
= clk_get_rate(pd
->clk
) / 1000 / pd
->clks_per_count
;
270 if (pd
->bus_speed
== I2C_MAX_STANDARD_MODE_FREQ
) {
271 tLOW
= 47; /* tLOW = 4.7 us */
272 tHIGH
= 40; /* tHD;STA = tHIGH = 4.0 us */
273 tf
= 3; /* tf = 0.3 us */
274 } else if (pd
->bus_speed
== I2C_MAX_FAST_MODE_FREQ
) {
275 tLOW
= 13; /* tLOW = 1.3 us */
276 tHIGH
= 6; /* tHD;STA = tHIGH = 0.6 us */
277 tf
= 3; /* tf = 0.3 us */
279 dev_err(pd
->dev
, "unrecognized bus speed %lu Hz\n",
284 pd
->iccl
= sh_mobile_i2c_iccl(i2c_clk_khz
, tLOW
, tf
);
285 pd
->icch
= sh_mobile_i2c_icch(i2c_clk_khz
, tHIGH
, tf
);
287 return sh_mobile_i2c_check_timing(pd
);
290 static int sh_mobile_i2c_v2_init(struct sh_mobile_i2c_data
*pd
)
292 unsigned long clks_per_cycle
;
294 /* L = 5, H = 4, L + H = 9 */
295 clks_per_cycle
= clk_get_rate(pd
->clk
) / pd
->bus_speed
;
296 pd
->iccl
= DIV_ROUND_UP(clks_per_cycle
* 5 / 9 - 1, pd
->clks_per_count
);
297 pd
->icch
= DIV_ROUND_UP(clks_per_cycle
* 4 / 9 - 5, pd
->clks_per_count
);
299 return sh_mobile_i2c_check_timing(pd
);
302 static unsigned char i2c_op(struct sh_mobile_i2c_data
*pd
, enum sh_mobile_i2c_op op
)
304 unsigned char ret
= 0;
307 dev_dbg(pd
->dev
, "op %d\n", op
);
309 spin_lock_irqsave(&pd
->lock
, flags
);
312 case OP_START
: /* issue start and trigger DTE interrupt */
313 iic_wr(pd
, ICCR
, ICCR_ICE
| ICCR_TRS
| ICCR_BBSY
);
315 case OP_TX_FIRST
: /* disable DTE interrupt and write client address */
316 iic_wr(pd
, ICIC
, ICIC_WAITE
| ICIC_ALE
| ICIC_TACKE
);
317 iic_wr(pd
, ICDR
, i2c_8bit_addr_from_msg(pd
->msg
));
319 case OP_TX
: /* write data */
320 iic_wr(pd
, ICDR
, pd
->msg
->buf
[pd
->pos
]);
322 case OP_TX_STOP
: /* issue a stop (or rep_start) */
323 iic_wr(pd
, ICCR
, pd
->send_stop
? ICCR_ICE
| ICCR_TRS
324 : ICCR_ICE
| ICCR_TRS
| ICCR_BBSY
);
326 case OP_TX_TO_RX
: /* select read mode */
327 iic_wr(pd
, ICCR
, ICCR_ICE
| ICCR_SCP
);
329 case OP_RX
: /* just read data */
330 ret
= iic_rd(pd
, ICDR
);
332 case OP_RX_STOP
: /* enable DTE interrupt, issue stop */
334 ICIC_DTEE
| ICIC_WAITE
| ICIC_ALE
| ICIC_TACKE
);
335 iic_wr(pd
, ICCR
, ICCR_ICE
| ICCR_RACK
);
337 case OP_RX_STOP_DATA
: /* enable DTE interrupt, read data, issue stop */
339 ICIC_DTEE
| ICIC_WAITE
| ICIC_ALE
| ICIC_TACKE
);
340 ret
= iic_rd(pd
, ICDR
);
341 iic_wr(pd
, ICCR
, ICCR_ICE
| ICCR_RACK
);
345 spin_unlock_irqrestore(&pd
->lock
, flags
);
347 dev_dbg(pd
->dev
, "op %d, data out 0x%02x\n", op
, ret
);
351 static int sh_mobile_i2c_isr_tx(struct sh_mobile_i2c_data
*pd
)
353 if (pd
->pos
== pd
->msg
->len
) {
354 i2c_op(pd
, OP_TX_STOP
);
359 i2c_op(pd
, OP_TX_FIRST
);
367 static int sh_mobile_i2c_isr_rx(struct sh_mobile_i2c_data
*pd
)
371 /* switch from TX (address) to RX (data) adds two interrupts */
372 real_pos
= pd
->pos
- 2;
375 i2c_op(pd
, OP_TX_FIRST
);
376 } else if (pd
->pos
== 0) {
377 i2c_op(pd
, OP_TX_TO_RX
);
378 } else if (pd
->pos
== pd
->msg
->len
) {
379 if (pd
->stop_after_dma
) {
380 /* Simulate PIO end condition after DMA transfer */
381 i2c_op(pd
, OP_RX_STOP
);
387 i2c_op(pd
, OP_RX_STOP
);
389 pd
->msg
->buf
[real_pos
] = i2c_op(pd
, OP_RX_STOP_DATA
);
390 } else if (real_pos
>= 0) {
391 pd
->msg
->buf
[real_pos
] = i2c_op(pd
, OP_RX
);
396 return pd
->pos
== (pd
->msg
->len
+ 2);
399 static irqreturn_t
sh_mobile_i2c_isr(int irq
, void *dev_id
)
401 struct sh_mobile_i2c_data
*pd
= dev_id
;
405 sr
= iic_rd(pd
, ICSR
);
406 pd
->sr
|= sr
; /* remember state */
408 dev_dbg(pd
->dev
, "i2c_isr 0x%02x 0x%02x %s %d %d!\n", sr
, pd
->sr
,
409 (pd
->msg
->flags
& I2C_M_RD
) ? "read" : "write",
410 pd
->pos
, pd
->msg
->len
);
412 /* Kick off TxDMA after preface was done */
413 if (pd
->dma_direction
== DMA_TO_DEVICE
&& pd
->pos
== 0)
414 iic_set_clr(pd
, ICIC
, ICIC_TDMAE
, 0);
415 else if (sr
& (ICSR_AL
| ICSR_TACK
))
416 /* don't interrupt transaction - continue to issue stop */
417 iic_wr(pd
, ICSR
, sr
& ~(ICSR_AL
| ICSR_TACK
));
418 else if (pd
->msg
->flags
& I2C_M_RD
)
419 wakeup
= sh_mobile_i2c_isr_rx(pd
);
421 wakeup
= sh_mobile_i2c_isr_tx(pd
);
423 /* Kick off RxDMA after preface was done */
424 if (pd
->dma_direction
== DMA_FROM_DEVICE
&& pd
->pos
== 1)
425 iic_set_clr(pd
, ICIC
, ICIC_RDMAE
, 0);
427 if (sr
& ICSR_WAIT
) /* TODO: add delay here to support slow acks */
428 iic_wr(pd
, ICSR
, sr
& ~ICSR_WAIT
);
435 /* defeat write posting to avoid spurious WAIT interrupts */
441 static void sh_mobile_i2c_dma_unmap(struct sh_mobile_i2c_data
*pd
)
443 struct dma_chan
*chan
= pd
->dma_direction
== DMA_FROM_DEVICE
444 ? pd
->dma_rx
: pd
->dma_tx
;
446 dma_unmap_single(chan
->device
->dev
, sg_dma_address(&pd
->sg
),
447 pd
->msg
->len
, pd
->dma_direction
);
449 pd
->dma_direction
= DMA_NONE
;
452 static void sh_mobile_i2c_cleanup_dma(struct sh_mobile_i2c_data
*pd
)
454 if (pd
->dma_direction
== DMA_NONE
)
456 else if (pd
->dma_direction
== DMA_FROM_DEVICE
)
457 dmaengine_terminate_all(pd
->dma_rx
);
458 else if (pd
->dma_direction
== DMA_TO_DEVICE
)
459 dmaengine_terminate_all(pd
->dma_tx
);
461 sh_mobile_i2c_dma_unmap(pd
);
464 static void sh_mobile_i2c_dma_callback(void *data
)
466 struct sh_mobile_i2c_data
*pd
= data
;
468 sh_mobile_i2c_dma_unmap(pd
);
469 pd
->pos
= pd
->msg
->len
;
470 pd
->stop_after_dma
= true;
472 iic_set_clr(pd
, ICIC
, 0, ICIC_TDMAE
| ICIC_RDMAE
);
475 static struct dma_chan
*sh_mobile_i2c_request_dma_chan(struct device
*dev
,
476 enum dma_transfer_direction dir
, dma_addr_t port_addr
)
478 struct dma_chan
*chan
;
479 struct dma_slave_config cfg
;
480 char *chan_name
= dir
== DMA_MEM_TO_DEV
? "tx" : "rx";
483 chan
= dma_request_chan(dev
, chan_name
);
485 dev_dbg(dev
, "request_channel failed for %s (%ld)\n", chan_name
,
490 memset(&cfg
, 0, sizeof(cfg
));
492 if (dir
== DMA_MEM_TO_DEV
) {
493 cfg
.dst_addr
= port_addr
;
494 cfg
.dst_addr_width
= DMA_SLAVE_BUSWIDTH_1_BYTE
;
496 cfg
.src_addr
= port_addr
;
497 cfg
.src_addr_width
= DMA_SLAVE_BUSWIDTH_1_BYTE
;
500 ret
= dmaengine_slave_config(chan
, &cfg
);
502 dev_dbg(dev
, "slave_config failed for %s (%d)\n", chan_name
, ret
);
503 dma_release_channel(chan
);
507 dev_dbg(dev
, "got DMA channel for %s\n", chan_name
);
511 static void sh_mobile_i2c_xfer_dma(struct sh_mobile_i2c_data
*pd
)
513 bool read
= pd
->msg
->flags
& I2C_M_RD
;
514 enum dma_data_direction dir
= read
? DMA_FROM_DEVICE
: DMA_TO_DEVICE
;
515 struct dma_chan
*chan
= read
? pd
->dma_rx
: pd
->dma_tx
;
516 struct dma_async_tx_descriptor
*txdesc
;
520 if (PTR_ERR(chan
) == -EPROBE_DEFER
) {
522 chan
= pd
->dma_rx
= sh_mobile_i2c_request_dma_chan(pd
->dev
, DMA_DEV_TO_MEM
,
523 pd
->res
->start
+ ICDR
);
525 chan
= pd
->dma_tx
= sh_mobile_i2c_request_dma_chan(pd
->dev
, DMA_MEM_TO_DEV
,
526 pd
->res
->start
+ ICDR
);
532 dma_addr
= dma_map_single(chan
->device
->dev
, pd
->dma_buf
, pd
->msg
->len
, dir
);
533 if (dma_mapping_error(chan
->device
->dev
, dma_addr
)) {
534 dev_dbg(pd
->dev
, "dma map failed, using PIO\n");
538 sg_dma_len(&pd
->sg
) = pd
->msg
->len
;
539 sg_dma_address(&pd
->sg
) = dma_addr
;
541 pd
->dma_direction
= dir
;
543 txdesc
= dmaengine_prep_slave_sg(chan
, &pd
->sg
, 1,
544 read
? DMA_DEV_TO_MEM
: DMA_MEM_TO_DEV
,
545 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
547 dev_dbg(pd
->dev
, "dma prep slave sg failed, using PIO\n");
548 sh_mobile_i2c_cleanup_dma(pd
);
552 txdesc
->callback
= sh_mobile_i2c_dma_callback
;
553 txdesc
->callback_param
= pd
;
555 cookie
= dmaengine_submit(txdesc
);
556 if (dma_submit_error(cookie
)) {
557 dev_dbg(pd
->dev
, "submitting dma failed, using PIO\n");
558 sh_mobile_i2c_cleanup_dma(pd
);
562 dma_async_issue_pending(chan
);
565 static void start_ch(struct sh_mobile_i2c_data
*pd
, struct i2c_msg
*usr_msg
,
569 /* Initialize channel registers */
570 iic_wr(pd
, ICCR
, ICCR_SCP
);
572 /* Enable channel and configure rx ack */
573 iic_wr(pd
, ICCR
, ICCR_ICE
| ICCR_SCP
);
576 iic_wr(pd
, ICCL
, pd
->iccl
& 0xff);
577 iic_wr(pd
, ICCH
, pd
->icch
& 0xff);
584 pd
->dma_buf
= i2c_get_dma_safe_msg_buf(pd
->msg
, 8);
586 sh_mobile_i2c_xfer_dma(pd
);
588 /* Enable all interrupts to begin with */
589 iic_wr(pd
, ICIC
, ICIC_DTEE
| ICIC_WAITE
| ICIC_ALE
| ICIC_TACKE
);
592 static int poll_dte(struct sh_mobile_i2c_data
*pd
)
596 for (i
= 1000; i
; i
--) {
597 u_int8_t val
= iic_rd(pd
, ICSR
);
608 return i
? 0 : -ETIMEDOUT
;
611 static int poll_busy(struct sh_mobile_i2c_data
*pd
)
615 for (i
= 1000; i
; i
--) {
616 u_int8_t val
= iic_rd(pd
, ICSR
);
618 dev_dbg(pd
->dev
, "val 0x%02x pd->sr 0x%02x\n", val
, pd
->sr
);
620 /* the interrupt handler may wake us up before the
621 * transfer is finished, so poll the hardware
624 if (!(val
& ICSR_BUSY
)) {
625 /* handle missing acknowledge and arbitration lost */
637 return i
? 0 : -ETIMEDOUT
;
640 static int sh_mobile_i2c_xfer(struct i2c_adapter
*adapter
,
641 struct i2c_msg
*msgs
,
644 struct sh_mobile_i2c_data
*pd
= i2c_get_adapdata(adapter
);
650 /* Wake up device and enable clock */
651 pm_runtime_get_sync(pd
->dev
);
653 /* Process all messages */
654 for (i
= 0; i
< num
; i
++) {
655 bool do_start
= pd
->send_stop
|| !i
;
657 pd
->send_stop
= i
== num
- 1 || msg
->flags
& I2C_M_STOP
;
658 pd
->stop_after_dma
= false;
660 start_ch(pd
, msg
, do_start
);
663 i2c_op(pd
, OP_START
);
665 /* The interrupt handler takes care of the rest... */
666 timeout
= wait_event_timeout(pd
->wait
,
667 pd
->sr
& (ICSR_TACK
| SW_DONE
),
670 /* 'stop_after_dma' tells if DMA transfer was complete */
671 i2c_put_dma_safe_msg_buf(pd
->dma_buf
, pd
->msg
, pd
->stop_after_dma
);
674 dev_err(pd
->dev
, "Transfer request timed out\n");
675 if (pd
->dma_direction
!= DMA_NONE
)
676 sh_mobile_i2c_cleanup_dma(pd
);
690 /* Disable channel */
691 iic_wr(pd
, ICCR
, ICCR_SCP
);
693 /* Disable clock and mark device as idle */
694 pm_runtime_put_sync(pd
->dev
);
699 static u32
sh_mobile_i2c_func(struct i2c_adapter
*adapter
)
701 return I2C_FUNC_I2C
| I2C_FUNC_SMBUS_EMUL
| I2C_FUNC_PROTOCOL_MANGLING
;
704 static const struct i2c_algorithm sh_mobile_i2c_algorithm
= {
705 .functionality
= sh_mobile_i2c_func
,
706 .master_xfer
= sh_mobile_i2c_xfer
,
709 static const struct i2c_adapter_quirks sh_mobile_i2c_quirks
= {
710 .flags
= I2C_AQ_NO_ZERO_LEN_READ
,
714 * r8a7740 has an errata regarding I2C I/O pad reset needing this workaround.
716 static int sh_mobile_i2c_r8a7740_workaround(struct sh_mobile_i2c_data
*pd
)
718 iic_set_clr(pd
, ICCR
, ICCR_ICE
, 0);
719 iic_rd(pd
, ICCR
); /* dummy read */
721 iic_set_clr(pd
, ICSTART
, ICSTART_ICSTART
, 0);
722 iic_rd(pd
, ICSTART
); /* dummy read */
726 iic_wr(pd
, ICCR
, ICCR_SCP
);
727 iic_wr(pd
, ICSTART
, 0);
731 iic_wr(pd
, ICCR
, ICCR_TRS
);
735 iic_wr(pd
, ICCR
, ICCR_TRS
);
738 return sh_mobile_i2c_init(pd
);
741 static const struct sh_mobile_dt_config default_dt_config
= {
743 .setup
= sh_mobile_i2c_init
,
746 static const struct sh_mobile_dt_config fast_clock_dt_config
= {
748 .setup
= sh_mobile_i2c_init
,
751 static const struct sh_mobile_dt_config v2_freq_calc_dt_config
= {
753 .setup
= sh_mobile_i2c_v2_init
,
756 static const struct sh_mobile_dt_config r8a7740_dt_config
= {
758 .setup
= sh_mobile_i2c_r8a7740_workaround
,
761 static const struct of_device_id sh_mobile_i2c_dt_ids
[] = {
762 { .compatible
= "renesas,iic-r8a73a4", .data
= &fast_clock_dt_config
},
763 { .compatible
= "renesas,iic-r8a7740", .data
= &r8a7740_dt_config
},
764 { .compatible
= "renesas,iic-r8a774c0", .data
= &fast_clock_dt_config
},
765 { .compatible
= "renesas,iic-r8a7790", .data
= &v2_freq_calc_dt_config
},
766 { .compatible
= "renesas,iic-r8a7791", .data
= &v2_freq_calc_dt_config
},
767 { .compatible
= "renesas,iic-r8a7792", .data
= &v2_freq_calc_dt_config
},
768 { .compatible
= "renesas,iic-r8a7793", .data
= &v2_freq_calc_dt_config
},
769 { .compatible
= "renesas,iic-r8a7794", .data
= &v2_freq_calc_dt_config
},
770 { .compatible
= "renesas,iic-r8a7795", .data
= &v2_freq_calc_dt_config
},
771 { .compatible
= "renesas,iic-r8a77990", .data
= &v2_freq_calc_dt_config
},
772 { .compatible
= "renesas,iic-sh73a0", .data
= &fast_clock_dt_config
},
773 { .compatible
= "renesas,rcar-gen2-iic", .data
= &v2_freq_calc_dt_config
},
774 { .compatible
= "renesas,rcar-gen3-iic", .data
= &v2_freq_calc_dt_config
},
775 { .compatible
= "renesas,rmobile-iic", .data
= &default_dt_config
},
778 MODULE_DEVICE_TABLE(of
, sh_mobile_i2c_dt_ids
);
780 static void sh_mobile_i2c_release_dma(struct sh_mobile_i2c_data
*pd
)
782 if (!IS_ERR(pd
->dma_tx
)) {
783 dma_release_channel(pd
->dma_tx
);
784 pd
->dma_tx
= ERR_PTR(-EPROBE_DEFER
);
787 if (!IS_ERR(pd
->dma_rx
)) {
788 dma_release_channel(pd
->dma_rx
);
789 pd
->dma_rx
= ERR_PTR(-EPROBE_DEFER
);
793 static int sh_mobile_i2c_hook_irqs(struct platform_device
*dev
, struct sh_mobile_i2c_data
*pd
)
795 struct resource
*res
;
799 while ((res
= platform_get_resource(dev
, IORESOURCE_IRQ
, k
))) {
800 for (n
= res
->start
; n
<= res
->end
; n
++) {
801 ret
= devm_request_irq(&dev
->dev
, n
, sh_mobile_i2c_isr
,
802 0, dev_name(&dev
->dev
), pd
);
804 dev_err(&dev
->dev
, "cannot request IRQ %pa\n", &n
);
811 return k
> 0 ? 0 : -ENOENT
;
814 static int sh_mobile_i2c_probe(struct platform_device
*dev
)
816 struct sh_mobile_i2c_data
*pd
;
817 struct i2c_adapter
*adap
;
818 struct resource
*res
;
819 const struct sh_mobile_dt_config
*config
;
823 pd
= devm_kzalloc(&dev
->dev
, sizeof(struct sh_mobile_i2c_data
), GFP_KERNEL
);
827 pd
->clk
= devm_clk_get(&dev
->dev
, NULL
);
828 if (IS_ERR(pd
->clk
)) {
829 dev_err(&dev
->dev
, "cannot get clock\n");
830 return PTR_ERR(pd
->clk
);
833 ret
= sh_mobile_i2c_hook_irqs(dev
, pd
);
838 platform_set_drvdata(dev
, pd
);
840 res
= platform_get_resource(dev
, IORESOURCE_MEM
, 0);
843 pd
->reg
= devm_ioremap_resource(&dev
->dev
, res
);
845 return PTR_ERR(pd
->reg
);
847 ret
= of_property_read_u32(dev
->dev
.of_node
, "clock-frequency", &bus_speed
);
848 pd
->bus_speed
= (ret
|| !bus_speed
) ? I2C_MAX_STANDARD_MODE_FREQ
: bus_speed
;
849 pd
->clks_per_count
= 1;
851 /* Newer variants come with two new bits in ICIC */
852 if (resource_size(res
) > 0x17)
853 pd
->flags
|= IIC_FLAG_HAS_ICIC67
;
855 pm_runtime_enable(&dev
->dev
);
856 pm_runtime_get_sync(&dev
->dev
);
858 config
= of_device_get_match_data(&dev
->dev
);
860 pd
->clks_per_count
= config
->clks_per_count
;
861 ret
= config
->setup(pd
);
863 ret
= sh_mobile_i2c_init(pd
);
866 pm_runtime_put_sync(&dev
->dev
);
871 sg_init_table(&pd
->sg
, 1);
872 pd
->dma_direction
= DMA_NONE
;
873 pd
->dma_rx
= pd
->dma_tx
= ERR_PTR(-EPROBE_DEFER
);
875 /* setup the private data */
877 i2c_set_adapdata(adap
, pd
);
879 adap
->owner
= THIS_MODULE
;
880 adap
->algo
= &sh_mobile_i2c_algorithm
;
881 adap
->quirks
= &sh_mobile_i2c_quirks
;
882 adap
->dev
.parent
= &dev
->dev
;
885 adap
->dev
.of_node
= dev
->dev
.of_node
;
887 strlcpy(adap
->name
, dev
->name
, sizeof(adap
->name
));
889 spin_lock_init(&pd
->lock
);
890 init_waitqueue_head(&pd
->wait
);
892 ret
= i2c_add_numbered_adapter(adap
);
894 sh_mobile_i2c_release_dma(pd
);
898 dev_info(&dev
->dev
, "I2C adapter %d, bus speed %lu Hz\n", adap
->nr
, pd
->bus_speed
);
903 static int sh_mobile_i2c_remove(struct platform_device
*dev
)
905 struct sh_mobile_i2c_data
*pd
= platform_get_drvdata(dev
);
907 i2c_del_adapter(&pd
->adap
);
908 sh_mobile_i2c_release_dma(pd
);
909 pm_runtime_disable(&dev
->dev
);
913 static struct platform_driver sh_mobile_i2c_driver
= {
915 .name
= "i2c-sh_mobile",
916 .of_match_table
= sh_mobile_i2c_dt_ids
,
918 .probe
= sh_mobile_i2c_probe
,
919 .remove
= sh_mobile_i2c_remove
,
922 static int __init
sh_mobile_i2c_adap_init(void)
924 return platform_driver_register(&sh_mobile_i2c_driver
);
926 subsys_initcall(sh_mobile_i2c_adap_init
);
928 static void __exit
sh_mobile_i2c_adap_exit(void)
930 platform_driver_unregister(&sh_mobile_i2c_driver
);
932 module_exit(sh_mobile_i2c_adap_exit
);
934 MODULE_DESCRIPTION("SuperH Mobile I2C Bus Controller driver");
935 MODULE_AUTHOR("Magnus Damm and Wolfram Sang");
936 MODULE_LICENSE("GPL v2");
937 MODULE_ALIAS("platform:i2c-sh_mobile");