1 // SPDX-License-Identifier: GPL-2.0
3 * SuperH Mobile I2C Controller
5 * Copyright (C) 2014-19 Wolfram Sang <wsa@sang-engineering.com>
6 * Copyright (C) 2008 Magnus Damm
8 * Portions of the code based on out-of-tree driver i2c-sh7343.c
9 * Copyright (c) 2006 Carlos Munoz <carlos@kenati.com>
12 #include <linux/clk.h>
13 #include <linux/delay.h>
14 #include <linux/dmaengine.h>
15 #include <linux/dma-mapping.h>
16 #include <linux/err.h>
17 #include <linux/i2c.h>
18 #include <linux/init.h>
19 #include <linux/interrupt.h>
21 #include <linux/kernel.h>
22 #include <linux/module.h>
23 #include <linux/of_device.h>
24 #include <linux/platform_device.h>
25 #include <linux/pm_runtime.h>
26 #include <linux/slab.h>
28 /* Transmit operation: */
31 /* BUS: S A8 ACK P(*) */
38 /* BUS: S A8 ACK D8(1) ACK P(*) */
39 /* IRQ: DTE WAIT WAIT */
45 /* BUS: S A8 ACK D8(1) ACK D8(2) ACK P(*) */
46 /* IRQ: DTE WAIT WAIT WAIT */
49 /* ICDR: A8 D8(1) D8(2) */
51 /* 3 bytes or more, +---------+ gets repeated */
54 /* Receive operation: */
56 /* 0 byte receive - not supported since slave may hold SDA low */
58 /* 1 byte receive [TX] | [RX] */
59 /* BUS: S A8 ACK | D8(1) ACK P(*) */
60 /* IRQ: DTE WAIT | WAIT DTE */
61 /* ICIC: -DTE | +DTE */
62 /* ICCR: 0x94 0x81 | 0xc0 */
63 /* ICDR: A8 | D8(1) */
65 /* 2 byte receive [TX]| [RX] */
66 /* BUS: S A8 ACK | D8(1) ACK D8(2) ACK P(*) */
67 /* IRQ: DTE WAIT | WAIT WAIT DTE */
68 /* ICIC: -DTE | +DTE */
69 /* ICCR: 0x94 0x81 | 0xc0 */
70 /* ICDR: A8 | D8(1) D8(2) */
72 /* 3 byte receive [TX] | [RX] (*) */
73 /* BUS: S A8 ACK | D8(1) ACK D8(2) ACK D8(3) ACK P */
74 /* IRQ: DTE WAIT | WAIT WAIT WAIT DTE */
75 /* ICIC: -DTE | +DTE */
76 /* ICCR: 0x94 0x81 | 0xc0 */
77 /* ICDR: A8 | D8(1) D8(2) D8(3) */
79 /* 4 bytes or more, this part is repeated +---------+ */
82 /* Interrupt order and BUSY flag */
84 /* SDA ___\___XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXAAAAAAAAA___/ */
85 /* SCL \_/1\_/2\_/3\_/4\_/5\_/6\_/7\_/8\___/9\_____/ */
87 /* S D7 D6 D5 D4 D3 D2 D1 D0 P(*) */
89 /* WAIT IRQ ________________________________/ \___________ */
90 /* TACK IRQ ____________________________________/ \_______ */
91 /* DTE IRQ __________________________________________/ \_ */
92 /* AL IRQ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX */
93 /* _______________________________________________ */
96 /* (*) The STOP condition is only sent by the master at the end of the last */
97 /* I2C message or if the I2C_M_STOP flag is set. Similarly, the BUSY bit is */
98 /* only cleared after the STOP condition, so, between messages we have to */
99 /* poll for the DTE bit. */
102 enum sh_mobile_i2c_op
{
113 struct sh_mobile_i2c_data
{
116 struct i2c_adapter adap
;
117 unsigned long bus_speed
;
118 unsigned int clks_per_count
;
126 wait_queue_head_t wait
;
134 struct resource
*res
;
135 struct dma_chan
*dma_tx
;
136 struct dma_chan
*dma_rx
;
137 struct scatterlist sg
;
138 enum dma_data_direction dma_direction
;
142 struct sh_mobile_dt_config
{
144 int (*setup
)(struct sh_mobile_i2c_data
*pd
);
147 #define IIC_FLAG_HAS_ICIC67 (1 << 0)
149 /* Register offsets */
159 #define ICCR_ICE 0x80
160 #define ICCR_RACK 0x40
161 #define ICCR_TRS 0x10
162 #define ICCR_BBSY 0x04
163 #define ICCR_SCP 0x01
165 #define ICSR_SCLM 0x80
166 #define ICSR_SDAM 0x40
168 #define ICSR_BUSY 0x10
170 #define ICSR_TACK 0x04
171 #define ICSR_WAIT 0x02
172 #define ICSR_DTE 0x01
174 #define ICIC_ICCLB8 0x80
175 #define ICIC_ICCHB8 0x40
176 #define ICIC_TDMAE 0x20
177 #define ICIC_RDMAE 0x10
178 #define ICIC_ALE 0x08
179 #define ICIC_TACKE 0x04
180 #define ICIC_WAITE 0x02
181 #define ICIC_DTEE 0x01
183 #define ICSTART_ICSTART 0x10
185 static void iic_wr(struct sh_mobile_i2c_data
*pd
, int offs
, unsigned char data
)
190 iowrite8(data
, pd
->reg
+ offs
);
193 static unsigned char iic_rd(struct sh_mobile_i2c_data
*pd
, int offs
)
195 return ioread8(pd
->reg
+ offs
);
198 static void iic_set_clr(struct sh_mobile_i2c_data
*pd
, int offs
,
199 unsigned char set
, unsigned char clr
)
201 iic_wr(pd
, offs
, (iic_rd(pd
, offs
) | set
) & ~clr
);
204 static u32
sh_mobile_i2c_iccl(unsigned long count_khz
, u32 tLOW
, u32 tf
)
207 * Conditional expression:
208 * ICCL >= COUNT_CLK * (tLOW + tf)
210 * SH-Mobile IIC hardware starts counting the LOW period of
211 * the SCL signal (tLOW) as soon as it pulls the SCL line.
212 * In order to meet the tLOW timing spec, we need to take into
213 * account the fall time of SCL signal (tf). Default tf value
214 * should be 0.3 us, for safety.
216 return (((count_khz
* (tLOW
+ tf
)) + 5000) / 10000);
219 static u32
sh_mobile_i2c_icch(unsigned long count_khz
, u32 tHIGH
, u32 tf
)
222 * Conditional expression:
223 * ICCH >= COUNT_CLK * (tHIGH + tf)
225 * SH-Mobile IIC hardware is aware of SCL transition period 'tr',
226 * and can ignore it. SH-Mobile IIC controller starts counting
227 * the HIGH period of the SCL signal (tHIGH) after the SCL input
228 * voltage increases at VIH.
230 * Afterward it turned out calculating ICCH using only tHIGH spec
231 * will result in violation of the tHD;STA timing spec. We need
232 * to take into account the fall time of SDA signal (tf) at START
233 * condition, in order to meet both tHIGH and tHD;STA specs.
235 return (((count_khz
* (tHIGH
+ tf
)) + 5000) / 10000);
238 static int sh_mobile_i2c_check_timing(struct sh_mobile_i2c_data
*pd
)
240 u16 max_val
= pd
->flags
& IIC_FLAG_HAS_ICIC67
? 0x1ff : 0xff;
242 if (pd
->iccl
> max_val
|| pd
->icch
> max_val
) {
243 dev_err(pd
->dev
, "timing values out of range: L/H=0x%x/0x%x\n",
248 /* one more bit of ICCL in ICIC */
249 if (pd
->iccl
& 0x100)
250 pd
->icic
|= ICIC_ICCLB8
;
252 pd
->icic
&= ~ICIC_ICCLB8
;
254 /* one more bit of ICCH in ICIC */
255 if (pd
->icch
& 0x100)
256 pd
->icic
|= ICIC_ICCHB8
;
258 pd
->icic
&= ~ICIC_ICCHB8
;
260 dev_dbg(pd
->dev
, "timing values: L/H=0x%x/0x%x\n", pd
->iccl
, pd
->icch
);
264 static int sh_mobile_i2c_init(struct sh_mobile_i2c_data
*pd
)
266 unsigned long i2c_clk_khz
;
269 i2c_clk_khz
= clk_get_rate(pd
->clk
) / 1000 / pd
->clks_per_count
;
271 if (pd
->bus_speed
== I2C_MAX_STANDARD_MODE_FREQ
) {
272 tLOW
= 47; /* tLOW = 4.7 us */
273 tHIGH
= 40; /* tHD;STA = tHIGH = 4.0 us */
274 tf
= 3; /* tf = 0.3 us */
275 } else if (pd
->bus_speed
== I2C_MAX_FAST_MODE_FREQ
) {
276 tLOW
= 13; /* tLOW = 1.3 us */
277 tHIGH
= 6; /* tHD;STA = tHIGH = 0.6 us */
278 tf
= 3; /* tf = 0.3 us */
280 dev_err(pd
->dev
, "unrecognized bus speed %lu Hz\n",
285 pd
->iccl
= sh_mobile_i2c_iccl(i2c_clk_khz
, tLOW
, tf
);
286 pd
->icch
= sh_mobile_i2c_icch(i2c_clk_khz
, tHIGH
, tf
);
288 return sh_mobile_i2c_check_timing(pd
);
291 static int sh_mobile_i2c_v2_init(struct sh_mobile_i2c_data
*pd
)
293 unsigned long clks_per_cycle
;
295 /* L = 5, H = 4, L + H = 9 */
296 clks_per_cycle
= clk_get_rate(pd
->clk
) / pd
->bus_speed
;
297 pd
->iccl
= DIV_ROUND_UP(clks_per_cycle
* 5 / 9 - 1, pd
->clks_per_count
);
298 pd
->icch
= DIV_ROUND_UP(clks_per_cycle
* 4 / 9 - 5, pd
->clks_per_count
);
300 return sh_mobile_i2c_check_timing(pd
);
303 static unsigned char i2c_op(struct sh_mobile_i2c_data
*pd
, enum sh_mobile_i2c_op op
)
305 unsigned char ret
= 0;
308 dev_dbg(pd
->dev
, "op %d\n", op
);
310 spin_lock_irqsave(&pd
->lock
, flags
);
313 case OP_START
: /* issue start and trigger DTE interrupt */
314 iic_wr(pd
, ICCR
, ICCR_ICE
| ICCR_TRS
| ICCR_BBSY
);
316 case OP_TX_FIRST
: /* disable DTE interrupt and write client address */
317 iic_wr(pd
, ICIC
, ICIC_WAITE
| ICIC_ALE
| ICIC_TACKE
);
318 iic_wr(pd
, ICDR
, i2c_8bit_addr_from_msg(pd
->msg
));
320 case OP_TX
: /* write data */
321 iic_wr(pd
, ICDR
, pd
->msg
->buf
[pd
->pos
]);
323 case OP_TX_STOP
: /* issue a stop (or rep_start) */
324 iic_wr(pd
, ICCR
, pd
->send_stop
? ICCR_ICE
| ICCR_TRS
325 : ICCR_ICE
| ICCR_TRS
| ICCR_BBSY
);
327 case OP_TX_TO_RX
: /* select read mode */
328 iic_wr(pd
, ICCR
, ICCR_ICE
| ICCR_SCP
);
330 case OP_RX
: /* just read data */
331 ret
= iic_rd(pd
, ICDR
);
333 case OP_RX_STOP
: /* enable DTE interrupt, issue stop */
334 if (!pd
->atomic_xfer
)
336 ICIC_DTEE
| ICIC_WAITE
| ICIC_ALE
| ICIC_TACKE
);
337 iic_wr(pd
, ICCR
, ICCR_ICE
| ICCR_RACK
);
339 case OP_RX_STOP_DATA
: /* enable DTE interrupt, read data, issue stop */
340 if (!pd
->atomic_xfer
)
342 ICIC_DTEE
| ICIC_WAITE
| ICIC_ALE
| ICIC_TACKE
);
343 ret
= iic_rd(pd
, ICDR
);
344 iic_wr(pd
, ICCR
, ICCR_ICE
| ICCR_RACK
);
348 spin_unlock_irqrestore(&pd
->lock
, flags
);
350 dev_dbg(pd
->dev
, "op %d, data out 0x%02x\n", op
, ret
);
354 static int sh_mobile_i2c_isr_tx(struct sh_mobile_i2c_data
*pd
)
356 if (pd
->pos
== pd
->msg
->len
) {
357 i2c_op(pd
, OP_TX_STOP
);
362 i2c_op(pd
, OP_TX_FIRST
);
370 static int sh_mobile_i2c_isr_rx(struct sh_mobile_i2c_data
*pd
)
374 /* switch from TX (address) to RX (data) adds two interrupts */
375 real_pos
= pd
->pos
- 2;
378 i2c_op(pd
, OP_TX_FIRST
);
379 } else if (pd
->pos
== 0) {
380 i2c_op(pd
, OP_TX_TO_RX
);
381 } else if (pd
->pos
== pd
->msg
->len
) {
382 if (pd
->stop_after_dma
) {
383 /* Simulate PIO end condition after DMA transfer */
384 i2c_op(pd
, OP_RX_STOP
);
390 i2c_op(pd
, OP_RX_STOP
);
392 pd
->msg
->buf
[real_pos
] = i2c_op(pd
, OP_RX_STOP_DATA
);
393 } else if (real_pos
>= 0) {
394 pd
->msg
->buf
[real_pos
] = i2c_op(pd
, OP_RX
);
399 return pd
->pos
== (pd
->msg
->len
+ 2);
402 static irqreturn_t
sh_mobile_i2c_isr(int irq
, void *dev_id
)
404 struct sh_mobile_i2c_data
*pd
= dev_id
;
408 sr
= iic_rd(pd
, ICSR
);
409 pd
->sr
|= sr
; /* remember state */
411 dev_dbg(pd
->dev
, "i2c_isr 0x%02x 0x%02x %s %d %d!\n", sr
, pd
->sr
,
412 (pd
->msg
->flags
& I2C_M_RD
) ? "read" : "write",
413 pd
->pos
, pd
->msg
->len
);
415 /* Kick off TxDMA after preface was done */
416 if (pd
->dma_direction
== DMA_TO_DEVICE
&& pd
->pos
== 0)
417 iic_set_clr(pd
, ICIC
, ICIC_TDMAE
, 0);
418 else if (sr
& (ICSR_AL
| ICSR_TACK
))
419 /* don't interrupt transaction - continue to issue stop */
420 iic_wr(pd
, ICSR
, sr
& ~(ICSR_AL
| ICSR_TACK
));
421 else if (pd
->msg
->flags
& I2C_M_RD
)
422 wakeup
= sh_mobile_i2c_isr_rx(pd
);
424 wakeup
= sh_mobile_i2c_isr_tx(pd
);
426 /* Kick off RxDMA after preface was done */
427 if (pd
->dma_direction
== DMA_FROM_DEVICE
&& pd
->pos
== 1)
428 iic_set_clr(pd
, ICIC
, ICIC_RDMAE
, 0);
430 if (sr
& ICSR_WAIT
) /* TODO: add delay here to support slow acks */
431 iic_wr(pd
, ICSR
, sr
& ~ICSR_WAIT
);
435 if (!pd
->atomic_xfer
)
439 /* defeat write posting to avoid spurious WAIT interrupts */
445 static void sh_mobile_i2c_dma_unmap(struct sh_mobile_i2c_data
*pd
)
447 struct dma_chan
*chan
= pd
->dma_direction
== DMA_FROM_DEVICE
448 ? pd
->dma_rx
: pd
->dma_tx
;
450 dma_unmap_single(chan
->device
->dev
, sg_dma_address(&pd
->sg
),
451 pd
->msg
->len
, pd
->dma_direction
);
453 pd
->dma_direction
= DMA_NONE
;
456 static void sh_mobile_i2c_cleanup_dma(struct sh_mobile_i2c_data
*pd
)
458 if (pd
->dma_direction
== DMA_NONE
)
460 else if (pd
->dma_direction
== DMA_FROM_DEVICE
)
461 dmaengine_terminate_all(pd
->dma_rx
);
462 else if (pd
->dma_direction
== DMA_TO_DEVICE
)
463 dmaengine_terminate_all(pd
->dma_tx
);
465 sh_mobile_i2c_dma_unmap(pd
);
468 static void sh_mobile_i2c_dma_callback(void *data
)
470 struct sh_mobile_i2c_data
*pd
= data
;
472 sh_mobile_i2c_dma_unmap(pd
);
473 pd
->pos
= pd
->msg
->len
;
474 pd
->stop_after_dma
= true;
476 iic_set_clr(pd
, ICIC
, 0, ICIC_TDMAE
| ICIC_RDMAE
);
479 static struct dma_chan
*sh_mobile_i2c_request_dma_chan(struct device
*dev
,
480 enum dma_transfer_direction dir
, dma_addr_t port_addr
)
482 struct dma_chan
*chan
;
483 struct dma_slave_config cfg
;
484 char *chan_name
= dir
== DMA_MEM_TO_DEV
? "tx" : "rx";
487 chan
= dma_request_chan(dev
, chan_name
);
489 dev_dbg(dev
, "request_channel failed for %s (%ld)\n", chan_name
,
494 memset(&cfg
, 0, sizeof(cfg
));
496 if (dir
== DMA_MEM_TO_DEV
) {
497 cfg
.dst_addr
= port_addr
;
498 cfg
.dst_addr_width
= DMA_SLAVE_BUSWIDTH_1_BYTE
;
500 cfg
.src_addr
= port_addr
;
501 cfg
.src_addr_width
= DMA_SLAVE_BUSWIDTH_1_BYTE
;
504 ret
= dmaengine_slave_config(chan
, &cfg
);
506 dev_dbg(dev
, "slave_config failed for %s (%d)\n", chan_name
, ret
);
507 dma_release_channel(chan
);
511 dev_dbg(dev
, "got DMA channel for %s\n", chan_name
);
515 static void sh_mobile_i2c_xfer_dma(struct sh_mobile_i2c_data
*pd
)
517 bool read
= pd
->msg
->flags
& I2C_M_RD
;
518 enum dma_data_direction dir
= read
? DMA_FROM_DEVICE
: DMA_TO_DEVICE
;
519 struct dma_chan
*chan
= read
? pd
->dma_rx
: pd
->dma_tx
;
520 struct dma_async_tx_descriptor
*txdesc
;
524 if (PTR_ERR(chan
) == -EPROBE_DEFER
) {
526 chan
= pd
->dma_rx
= sh_mobile_i2c_request_dma_chan(pd
->dev
, DMA_DEV_TO_MEM
,
527 pd
->res
->start
+ ICDR
);
529 chan
= pd
->dma_tx
= sh_mobile_i2c_request_dma_chan(pd
->dev
, DMA_MEM_TO_DEV
,
530 pd
->res
->start
+ ICDR
);
536 dma_addr
= dma_map_single(chan
->device
->dev
, pd
->dma_buf
, pd
->msg
->len
, dir
);
537 if (dma_mapping_error(chan
->device
->dev
, dma_addr
)) {
538 dev_dbg(pd
->dev
, "dma map failed, using PIO\n");
542 sg_dma_len(&pd
->sg
) = pd
->msg
->len
;
543 sg_dma_address(&pd
->sg
) = dma_addr
;
545 pd
->dma_direction
= dir
;
547 txdesc
= dmaengine_prep_slave_sg(chan
, &pd
->sg
, 1,
548 read
? DMA_DEV_TO_MEM
: DMA_MEM_TO_DEV
,
549 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
551 dev_dbg(pd
->dev
, "dma prep slave sg failed, using PIO\n");
552 sh_mobile_i2c_cleanup_dma(pd
);
556 txdesc
->callback
= sh_mobile_i2c_dma_callback
;
557 txdesc
->callback_param
= pd
;
559 cookie
= dmaengine_submit(txdesc
);
560 if (dma_submit_error(cookie
)) {
561 dev_dbg(pd
->dev
, "submitting dma failed, using PIO\n");
562 sh_mobile_i2c_cleanup_dma(pd
);
566 dma_async_issue_pending(chan
);
569 static void start_ch(struct sh_mobile_i2c_data
*pd
, struct i2c_msg
*usr_msg
,
573 /* Initialize channel registers */
574 iic_wr(pd
, ICCR
, ICCR_SCP
);
576 /* Enable channel and configure rx ack */
577 iic_wr(pd
, ICCR
, ICCR_ICE
| ICCR_SCP
);
580 iic_wr(pd
, ICCL
, pd
->iccl
& 0xff);
581 iic_wr(pd
, ICCH
, pd
->icch
& 0xff);
591 pd
->dma_buf
= i2c_get_dma_safe_msg_buf(pd
->msg
, 8);
593 sh_mobile_i2c_xfer_dma(pd
);
595 /* Enable all interrupts to begin with */
596 iic_wr(pd
, ICIC
, ICIC_DTEE
| ICIC_WAITE
| ICIC_ALE
| ICIC_TACKE
);
599 static int poll_dte(struct sh_mobile_i2c_data
*pd
)
603 for (i
= 1000; i
; i
--) {
604 u_int8_t val
= iic_rd(pd
, ICSR
);
615 return i
? 0 : -ETIMEDOUT
;
618 static int poll_busy(struct sh_mobile_i2c_data
*pd
)
622 for (i
= 1000; i
; i
--) {
623 u_int8_t val
= iic_rd(pd
, ICSR
);
625 dev_dbg(pd
->dev
, "val 0x%02x pd->sr 0x%02x\n", val
, pd
->sr
);
627 /* the interrupt handler may wake us up before the
628 * transfer is finished, so poll the hardware
631 if (!(val
& ICSR_BUSY
)) {
632 /* handle missing acknowledge and arbitration lost */
644 return i
? 0 : -ETIMEDOUT
;
647 static int sh_mobile_xfer(struct sh_mobile_i2c_data
*pd
,
648 struct i2c_msg
*msgs
, int num
)
655 /* Wake up device and enable clock */
656 pm_runtime_get_sync(pd
->dev
);
658 /* Process all messages */
659 for (i
= 0; i
< num
; i
++) {
660 bool do_start
= pd
->send_stop
|| !i
;
662 pd
->send_stop
= i
== num
- 1 || msg
->flags
& I2C_M_STOP
;
663 pd
->stop_after_dma
= false;
665 start_ch(pd
, msg
, do_start
);
668 i2c_op(pd
, OP_START
);
670 if (pd
->atomic_xfer
) {
671 unsigned long j
= jiffies
+ pd
->adap
.timeout
;
673 time_left
= time_before_eq(jiffies
, j
);
675 !(pd
->sr
& (ICSR_TACK
| SW_DONE
))) {
676 unsigned char sr
= iic_rd(pd
, ICSR
);
678 if (sr
& (ICSR_AL
| ICSR_TACK
|
679 ICSR_WAIT
| ICSR_DTE
)) {
680 sh_mobile_i2c_isr(0, pd
);
685 time_left
= time_before_eq(jiffies
, j
);
688 /* The interrupt handler takes care of the rest... */
689 time_left
= wait_event_timeout(pd
->wait
,
690 pd
->sr
& (ICSR_TACK
| SW_DONE
),
693 /* 'stop_after_dma' tells if DMA xfer was complete */
694 i2c_put_dma_safe_msg_buf(pd
->dma_buf
, pd
->msg
,
699 dev_err(pd
->dev
, "Transfer request timed out\n");
700 if (pd
->dma_direction
!= DMA_NONE
)
701 sh_mobile_i2c_cleanup_dma(pd
);
715 /* Disable channel */
716 iic_wr(pd
, ICCR
, ICCR_SCP
);
718 /* Disable clock and mark device as idle */
719 pm_runtime_put_sync(pd
->dev
);
724 static int sh_mobile_i2c_xfer(struct i2c_adapter
*adapter
,
725 struct i2c_msg
*msgs
,
728 struct sh_mobile_i2c_data
*pd
= i2c_get_adapdata(adapter
);
730 pd
->atomic_xfer
= false;
731 return sh_mobile_xfer(pd
, msgs
, num
);
734 static int sh_mobile_i2c_xfer_atomic(struct i2c_adapter
*adapter
,
735 struct i2c_msg
*msgs
,
738 struct sh_mobile_i2c_data
*pd
= i2c_get_adapdata(adapter
);
740 pd
->atomic_xfer
= true;
741 return sh_mobile_xfer(pd
, msgs
, num
);
744 static u32
sh_mobile_i2c_func(struct i2c_adapter
*adapter
)
746 return I2C_FUNC_I2C
| I2C_FUNC_SMBUS_EMUL
| I2C_FUNC_PROTOCOL_MANGLING
;
749 static const struct i2c_algorithm sh_mobile_i2c_algorithm
= {
750 .functionality
= sh_mobile_i2c_func
,
751 .master_xfer
= sh_mobile_i2c_xfer
,
752 .master_xfer_atomic
= sh_mobile_i2c_xfer_atomic
,
755 static const struct i2c_adapter_quirks sh_mobile_i2c_quirks
= {
756 .flags
= I2C_AQ_NO_ZERO_LEN_READ
,
760 * r8a7740 has an errata regarding I2C I/O pad reset needing this workaround.
762 static int sh_mobile_i2c_r8a7740_workaround(struct sh_mobile_i2c_data
*pd
)
764 iic_set_clr(pd
, ICCR
, ICCR_ICE
, 0);
765 iic_rd(pd
, ICCR
); /* dummy read */
767 iic_set_clr(pd
, ICSTART
, ICSTART_ICSTART
, 0);
768 iic_rd(pd
, ICSTART
); /* dummy read */
772 iic_wr(pd
, ICCR
, ICCR_SCP
);
773 iic_wr(pd
, ICSTART
, 0);
777 iic_wr(pd
, ICCR
, ICCR_TRS
);
781 iic_wr(pd
, ICCR
, ICCR_TRS
);
784 return sh_mobile_i2c_init(pd
);
787 static const struct sh_mobile_dt_config default_dt_config
= {
789 .setup
= sh_mobile_i2c_init
,
792 static const struct sh_mobile_dt_config fast_clock_dt_config
= {
794 .setup
= sh_mobile_i2c_init
,
797 static const struct sh_mobile_dt_config v2_freq_calc_dt_config
= {
799 .setup
= sh_mobile_i2c_v2_init
,
802 static const struct sh_mobile_dt_config r8a7740_dt_config
= {
804 .setup
= sh_mobile_i2c_r8a7740_workaround
,
807 static const struct of_device_id sh_mobile_i2c_dt_ids
[] = {
808 { .compatible
= "renesas,iic-r8a73a4", .data
= &fast_clock_dt_config
},
809 { .compatible
= "renesas,iic-r8a7740", .data
= &r8a7740_dt_config
},
810 { .compatible
= "renesas,iic-r8a774c0", .data
= &fast_clock_dt_config
},
811 { .compatible
= "renesas,iic-r8a7790", .data
= &v2_freq_calc_dt_config
},
812 { .compatible
= "renesas,iic-r8a7791", .data
= &v2_freq_calc_dt_config
},
813 { .compatible
= "renesas,iic-r8a7792", .data
= &v2_freq_calc_dt_config
},
814 { .compatible
= "renesas,iic-r8a7793", .data
= &v2_freq_calc_dt_config
},
815 { .compatible
= "renesas,iic-r8a7794", .data
= &v2_freq_calc_dt_config
},
816 { .compatible
= "renesas,iic-r8a7795", .data
= &v2_freq_calc_dt_config
},
817 { .compatible
= "renesas,iic-r8a77990", .data
= &v2_freq_calc_dt_config
},
818 { .compatible
= "renesas,iic-sh73a0", .data
= &fast_clock_dt_config
},
819 { .compatible
= "renesas,rcar-gen2-iic", .data
= &v2_freq_calc_dt_config
},
820 { .compatible
= "renesas,rcar-gen3-iic", .data
= &v2_freq_calc_dt_config
},
821 { .compatible
= "renesas,rmobile-iic", .data
= &default_dt_config
},
824 MODULE_DEVICE_TABLE(of
, sh_mobile_i2c_dt_ids
);
826 static void sh_mobile_i2c_release_dma(struct sh_mobile_i2c_data
*pd
)
828 if (!IS_ERR(pd
->dma_tx
)) {
829 dma_release_channel(pd
->dma_tx
);
830 pd
->dma_tx
= ERR_PTR(-EPROBE_DEFER
);
833 if (!IS_ERR(pd
->dma_rx
)) {
834 dma_release_channel(pd
->dma_rx
);
835 pd
->dma_rx
= ERR_PTR(-EPROBE_DEFER
);
839 static int sh_mobile_i2c_hook_irqs(struct platform_device
*dev
, struct sh_mobile_i2c_data
*pd
)
841 struct resource
*res
;
845 while ((res
= platform_get_resource(dev
, IORESOURCE_IRQ
, k
))) {
846 for (n
= res
->start
; n
<= res
->end
; n
++) {
847 ret
= devm_request_irq(&dev
->dev
, n
, sh_mobile_i2c_isr
,
848 0, dev_name(&dev
->dev
), pd
);
850 dev_err(&dev
->dev
, "cannot request IRQ %pa\n", &n
);
857 return k
> 0 ? 0 : -ENOENT
;
860 static int sh_mobile_i2c_probe(struct platform_device
*dev
)
862 struct sh_mobile_i2c_data
*pd
;
863 struct i2c_adapter
*adap
;
864 struct resource
*res
;
865 const struct sh_mobile_dt_config
*config
;
869 pd
= devm_kzalloc(&dev
->dev
, sizeof(struct sh_mobile_i2c_data
), GFP_KERNEL
);
873 pd
->clk
= devm_clk_get(&dev
->dev
, NULL
);
874 if (IS_ERR(pd
->clk
)) {
875 dev_err(&dev
->dev
, "cannot get clock\n");
876 return PTR_ERR(pd
->clk
);
879 ret
= sh_mobile_i2c_hook_irqs(dev
, pd
);
884 platform_set_drvdata(dev
, pd
);
886 res
= platform_get_resource(dev
, IORESOURCE_MEM
, 0);
889 pd
->reg
= devm_ioremap_resource(&dev
->dev
, res
);
891 return PTR_ERR(pd
->reg
);
893 ret
= of_property_read_u32(dev
->dev
.of_node
, "clock-frequency", &bus_speed
);
894 pd
->bus_speed
= (ret
|| !bus_speed
) ? I2C_MAX_STANDARD_MODE_FREQ
: bus_speed
;
895 pd
->clks_per_count
= 1;
897 /* Newer variants come with two new bits in ICIC */
898 if (resource_size(res
) > 0x17)
899 pd
->flags
|= IIC_FLAG_HAS_ICIC67
;
901 pm_runtime_enable(&dev
->dev
);
902 pm_runtime_get_sync(&dev
->dev
);
904 config
= of_device_get_match_data(&dev
->dev
);
906 pd
->clks_per_count
= config
->clks_per_count
;
907 ret
= config
->setup(pd
);
909 ret
= sh_mobile_i2c_init(pd
);
912 pm_runtime_put_sync(&dev
->dev
);
917 sg_init_table(&pd
->sg
, 1);
918 pd
->dma_direction
= DMA_NONE
;
919 pd
->dma_rx
= pd
->dma_tx
= ERR_PTR(-EPROBE_DEFER
);
921 /* setup the private data */
923 i2c_set_adapdata(adap
, pd
);
925 adap
->owner
= THIS_MODULE
;
926 adap
->algo
= &sh_mobile_i2c_algorithm
;
927 adap
->quirks
= &sh_mobile_i2c_quirks
;
928 adap
->dev
.parent
= &dev
->dev
;
931 adap
->dev
.of_node
= dev
->dev
.of_node
;
933 strlcpy(adap
->name
, dev
->name
, sizeof(adap
->name
));
935 spin_lock_init(&pd
->lock
);
936 init_waitqueue_head(&pd
->wait
);
938 ret
= i2c_add_numbered_adapter(adap
);
940 sh_mobile_i2c_release_dma(pd
);
944 dev_info(&dev
->dev
, "I2C adapter %d, bus speed %lu Hz\n", adap
->nr
, pd
->bus_speed
);
949 static int sh_mobile_i2c_remove(struct platform_device
*dev
)
951 struct sh_mobile_i2c_data
*pd
= platform_get_drvdata(dev
);
953 i2c_del_adapter(&pd
->adap
);
954 sh_mobile_i2c_release_dma(pd
);
955 pm_runtime_disable(&dev
->dev
);
959 #ifdef CONFIG_PM_SLEEP
960 static int sh_mobile_i2c_suspend(struct device
*dev
)
962 struct sh_mobile_i2c_data
*pd
= dev_get_drvdata(dev
);
964 i2c_mark_adapter_suspended(&pd
->adap
);
968 static int sh_mobile_i2c_resume(struct device
*dev
)
970 struct sh_mobile_i2c_data
*pd
= dev_get_drvdata(dev
);
972 i2c_mark_adapter_resumed(&pd
->adap
);
976 static const struct dev_pm_ops sh_mobile_i2c_pm_ops
= {
977 SET_NOIRQ_SYSTEM_SLEEP_PM_OPS(sh_mobile_i2c_suspend
,
978 sh_mobile_i2c_resume
)
981 #define DEV_PM_OPS (&sh_mobile_i2c_pm_ops)
983 #define DEV_PM_OPS NULL
984 #endif /* CONFIG_PM_SLEEP */
986 static struct platform_driver sh_mobile_i2c_driver
= {
988 .name
= "i2c-sh_mobile",
989 .of_match_table
= sh_mobile_i2c_dt_ids
,
992 .probe
= sh_mobile_i2c_probe
,
993 .remove
= sh_mobile_i2c_remove
,
996 static int __init
sh_mobile_i2c_adap_init(void)
998 return platform_driver_register(&sh_mobile_i2c_driver
);
1000 subsys_initcall(sh_mobile_i2c_adap_init
);
1002 static void __exit
sh_mobile_i2c_adap_exit(void)
1004 platform_driver_unregister(&sh_mobile_i2c_driver
);
1006 module_exit(sh_mobile_i2c_adap_exit
);
1008 MODULE_DESCRIPTION("SuperH Mobile I2C Bus Controller driver");
1009 MODULE_AUTHOR("Magnus Damm");
1010 MODULE_AUTHOR("Wolfram Sang");
1011 MODULE_LICENSE("GPL v2");
1012 MODULE_ALIAS("platform:i2c-sh_mobile");