1 // SPDX-License-Identifier: GPL-2.0-only
3 * Apple DART (Device Address Resolution Table) IOMMU driver
5 * Copyright (C) 2021 The Asahi Linux Contributors
7 * Based on arm/arm-smmu/arm-ssmu.c and arm/arm-smmu-v3/arm-smmu-v3.c
8 * Copyright (C) 2013 ARM Limited
9 * Copyright (C) 2015 ARM Limited
10 * and on exynos-iommu.c
11 * Copyright (c) 2011,2016 Samsung Electronics Co., Ltd.
14 #include <linux/atomic.h>
15 #include <linux/bitfield.h>
16 #include <linux/clk.h>
17 #include <linux/dev_printk.h>
18 #include <linux/dma-mapping.h>
19 #include <linux/err.h>
20 #include <linux/interrupt.h>
21 #include <linux/io-pgtable.h>
22 #include <linux/iommu.h>
23 #include <linux/iopoll.h>
24 #include <linux/module.h>
26 #include <linux/of_address.h>
27 #include <linux/of_iommu.h>
28 #include <linux/of_platform.h>
29 #include <linux/pci.h>
30 #include <linux/platform_device.h>
31 #include <linux/slab.h>
32 #include <linux/swab.h>
33 #include <linux/types.h>
35 #include "dma-iommu.h"
37 #define DART_MAX_STREAMS 256
38 #define DART_MAX_TTBR 4
39 #define MAX_DARTS_PER_DEVICE 2
41 /* Common registers */
43 #define DART_PARAMS1 0x00
44 #define DART_PARAMS1_PAGE_SHIFT GENMASK(27, 24)
46 #define DART_PARAMS2 0x04
47 #define DART_PARAMS2_BYPASS_SUPPORT BIT(0)
49 /* T8020/T6000 registers */
51 #define DART_T8020_STREAM_COMMAND 0x20
52 #define DART_T8020_STREAM_COMMAND_BUSY BIT(2)
53 #define DART_T8020_STREAM_COMMAND_INVALIDATE BIT(20)
55 #define DART_T8020_STREAM_SELECT 0x34
57 #define DART_T8020_ERROR 0x40
58 #define DART_T8020_ERROR_STREAM GENMASK(27, 24)
59 #define DART_T8020_ERROR_CODE GENMASK(11, 0)
60 #define DART_T8020_ERROR_FLAG BIT(31)
62 #define DART_T8020_ERROR_READ_FAULT BIT(4)
63 #define DART_T8020_ERROR_WRITE_FAULT BIT(3)
64 #define DART_T8020_ERROR_NO_PTE BIT(2)
65 #define DART_T8020_ERROR_NO_PMD BIT(1)
66 #define DART_T8020_ERROR_NO_TTBR BIT(0)
68 #define DART_T8020_CONFIG 0x60
69 #define DART_T8020_CONFIG_LOCK BIT(15)
71 #define DART_STREAM_COMMAND_BUSY_TIMEOUT 100
73 #define DART_T8020_ERROR_ADDR_HI 0x54
74 #define DART_T8020_ERROR_ADDR_LO 0x50
76 #define DART_T8020_STREAMS_ENABLE 0xfc
78 #define DART_T8020_TCR 0x100
79 #define DART_T8020_TCR_TRANSLATE_ENABLE BIT(7)
80 #define DART_T8020_TCR_BYPASS_DART BIT(8)
81 #define DART_T8020_TCR_BYPASS_DAPF BIT(12)
83 #define DART_T8020_TTBR 0x200
84 #define DART_T8020_USB4_TTBR 0x400
85 #define DART_T8020_TTBR_VALID BIT(31)
86 #define DART_T8020_TTBR_ADDR_FIELD_SHIFT 0
87 #define DART_T8020_TTBR_SHIFT 12
91 #define DART_T8110_PARAMS3 0x08
92 #define DART_T8110_PARAMS3_PA_WIDTH GENMASK(29, 24)
93 #define DART_T8110_PARAMS3_VA_WIDTH GENMASK(21, 16)
94 #define DART_T8110_PARAMS3_VER_MAJ GENMASK(15, 8)
95 #define DART_T8110_PARAMS3_VER_MIN GENMASK(7, 0)
97 #define DART_T8110_PARAMS4 0x0c
98 #define DART_T8110_PARAMS4_NUM_CLIENTS GENMASK(24, 16)
99 #define DART_T8110_PARAMS4_NUM_SIDS GENMASK(8, 0)
101 #define DART_T8110_TLB_CMD 0x80
102 #define DART_T8110_TLB_CMD_BUSY BIT(31)
103 #define DART_T8110_TLB_CMD_OP GENMASK(10, 8)
104 #define DART_T8110_TLB_CMD_OP_FLUSH_ALL 0
105 #define DART_T8110_TLB_CMD_OP_FLUSH_SID 1
106 #define DART_T8110_TLB_CMD_STREAM GENMASK(7, 0)
108 #define DART_T8110_ERROR 0x100
109 #define DART_T8110_ERROR_STREAM GENMASK(27, 20)
110 #define DART_T8110_ERROR_CODE GENMASK(14, 0)
111 #define DART_T8110_ERROR_FLAG BIT(31)
113 #define DART_T8110_ERROR_MASK 0x104
115 #define DART_T8110_ERROR_READ_FAULT BIT(5)
116 #define DART_T8110_ERROR_WRITE_FAULT BIT(4)
117 #define DART_T8110_ERROR_NO_PTE BIT(3)
118 #define DART_T8110_ERROR_NO_PMD BIT(2)
119 #define DART_T8110_ERROR_NO_PGD BIT(1)
120 #define DART_T8110_ERROR_NO_TTBR BIT(0)
122 #define DART_T8110_ERROR_ADDR_LO 0x170
123 #define DART_T8110_ERROR_ADDR_HI 0x174
125 #define DART_T8110_PROTECT 0x200
126 #define DART_T8110_UNPROTECT 0x204
127 #define DART_T8110_PROTECT_LOCK 0x208
128 #define DART_T8110_PROTECT_TTBR_TCR BIT(0)
130 #define DART_T8110_ENABLE_STREAMS 0xc00
131 #define DART_T8110_DISABLE_STREAMS 0xc20
133 #define DART_T8110_TCR 0x1000
134 #define DART_T8110_TCR_REMAP GENMASK(11, 8)
135 #define DART_T8110_TCR_REMAP_EN BIT(7)
136 #define DART_T8110_TCR_BYPASS_DAPF BIT(2)
137 #define DART_T8110_TCR_BYPASS_DART BIT(1)
138 #define DART_T8110_TCR_TRANSLATE_ENABLE BIT(0)
140 #define DART_T8110_TTBR 0x1400
141 #define DART_T8110_TTBR_VALID BIT(0)
142 #define DART_T8110_TTBR_ADDR_FIELD_SHIFT 2
143 #define DART_T8110_TTBR_SHIFT 14
145 #define DART_TCR(dart, sid) ((dart)->hw->tcr + ((sid) << 2))
147 #define DART_TTBR(dart, sid, idx) ((dart)->hw->ttbr + \
148 (((dart)->hw->ttbr_count * (sid)) << 2) + \
151 struct apple_dart_stream_map
;
159 struct apple_dart_hw
{
161 irqreturn_t (*irq_handler
)(int irq
, void *dev
);
162 int (*invalidate_tlb
)(struct apple_dart_stream_map
*stream_map
);
165 enum io_pgtable_fmt fmt
;
183 u64 ttbr_addr_field_shift
;
189 * Private structure associated with each DART device.
191 * @dev: device struct
192 * @hw: SoC-specific hardware data
193 * @regs: mapped MMIO region
194 * @irq: interrupt number, can be shared with other DARTs
195 * @clks: clocks associated with this DART
196 * @num_clks: number of @clks
197 * @lock: lock for hardware operations involving this dart
198 * @pgsize: pagesize supported by this DART
199 * @supports_bypass: indicates if this DART supports bypass mode
200 * @sid2group: maps stream ids to iommu_groups
201 * @iommu: iommu core device
205 const struct apple_dart_hw
*hw
;
210 struct clk_bulk_data
*clks
;
219 u32 supports_bypass
: 1;
221 struct iommu_group
*sid2group
[DART_MAX_STREAMS
];
222 struct iommu_device iommu
;
224 u32 save_tcr
[DART_MAX_STREAMS
];
225 u32 save_ttbr
[DART_MAX_STREAMS
][DART_MAX_TTBR
];
229 * Convenience struct to identify streams.
231 * The normal variant is used inside apple_dart_master_cfg which isn't written
233 * The atomic variant is used inside apple_dart_domain where we have to guard
234 * against races from potential parallel calls to attach/detach_device.
235 * Note that even inside the atomic variant the apple_dart pointer is not
236 * protected: This pointer is initialized once under the domain init mutex
237 * and never changed again afterwards. Devices with different dart pointers
238 * cannot be attached to the same domain.
241 * @sid stream id bitmap
243 struct apple_dart_stream_map
{
244 struct apple_dart
*dart
;
245 DECLARE_BITMAP(sidmap
, DART_MAX_STREAMS
);
247 struct apple_dart_atomic_stream_map
{
248 struct apple_dart
*dart
;
249 atomic_long_t sidmap
[BITS_TO_LONGS(DART_MAX_STREAMS
)];
253 * This structure is attached to each iommu domain handled by a DART.
255 * @pgtbl_ops: pagetable ops allocated by io-pgtable
256 * @finalized: true if the domain has been completely initialized
257 * @init_lock: protects domain initialization
258 * @stream_maps: streams attached to this domain (valid for DMA/UNMANAGED only)
259 * @domain: core iommu domain pointer
261 struct apple_dart_domain
{
262 struct io_pgtable_ops
*pgtbl_ops
;
265 struct mutex init_lock
;
266 struct apple_dart_atomic_stream_map stream_maps
[MAX_DARTS_PER_DEVICE
];
268 struct iommu_domain domain
;
272 * This structure is attached to devices with dev_iommu_priv_set() on of_xlate
273 * and contains a list of streams bound to this device.
274 * So far the worst case seen is a single device with two streams
275 * from different darts, such that this simple static array is enough.
277 * @streams: streams for this device
279 struct apple_dart_master_cfg
{
280 struct apple_dart_stream_map stream_maps
[MAX_DARTS_PER_DEVICE
];
284 * Helper macro to iterate over apple_dart_master_cfg.stream_maps and
285 * apple_dart_domain.stream_maps
287 * @i int used as loop variable
288 * @base pointer to base struct (apple_dart_master_cfg or apple_dart_domain)
289 * @stream pointer to the apple_dart_streams struct for each loop iteration
291 #define for_each_stream_map(i, base, stream_map) \
292 for (i = 0, stream_map = &(base)->stream_maps[0]; \
293 i < MAX_DARTS_PER_DEVICE && stream_map->dart; \
294 stream_map = &(base)->stream_maps[++i])
296 static struct platform_driver apple_dart_driver
;
297 static const struct iommu_ops apple_dart_iommu_ops
;
299 static struct apple_dart_domain
*to_dart_domain(struct iommu_domain
*dom
)
301 return container_of(dom
, struct apple_dart_domain
, domain
);
305 apple_dart_hw_enable_translation(struct apple_dart_stream_map
*stream_map
)
307 struct apple_dart
*dart
= stream_map
->dart
;
310 for_each_set_bit(sid
, stream_map
->sidmap
, dart
->num_streams
)
311 writel(dart
->hw
->tcr_enabled
, dart
->regs
+ DART_TCR(dart
, sid
));
314 static void apple_dart_hw_disable_dma(struct apple_dart_stream_map
*stream_map
)
316 struct apple_dart
*dart
= stream_map
->dart
;
319 for_each_set_bit(sid
, stream_map
->sidmap
, dart
->num_streams
)
320 writel(dart
->hw
->tcr_disabled
, dart
->regs
+ DART_TCR(dart
, sid
));
324 apple_dart_hw_enable_bypass(struct apple_dart_stream_map
*stream_map
)
326 struct apple_dart
*dart
= stream_map
->dart
;
329 WARN_ON(!stream_map
->dart
->supports_bypass
);
330 for_each_set_bit(sid
, stream_map
->sidmap
, dart
->num_streams
)
331 writel(dart
->hw
->tcr_bypass
,
332 dart
->regs
+ DART_TCR(dart
, sid
));
335 static void apple_dart_hw_set_ttbr(struct apple_dart_stream_map
*stream_map
,
336 u8 idx
, phys_addr_t paddr
)
338 struct apple_dart
*dart
= stream_map
->dart
;
341 WARN_ON(paddr
& ((1 << dart
->hw
->ttbr_shift
) - 1));
342 for_each_set_bit(sid
, stream_map
->sidmap
, dart
->num_streams
)
343 writel(dart
->hw
->ttbr_valid
|
344 (paddr
>> dart
->hw
->ttbr_shift
) << dart
->hw
->ttbr_addr_field_shift
,
345 dart
->regs
+ DART_TTBR(dart
, sid
, idx
));
348 static void apple_dart_hw_clear_ttbr(struct apple_dart_stream_map
*stream_map
,
351 struct apple_dart
*dart
= stream_map
->dart
;
354 for_each_set_bit(sid
, stream_map
->sidmap
, dart
->num_streams
)
355 writel(0, dart
->regs
+ DART_TTBR(dart
, sid
, idx
));
359 apple_dart_hw_clear_all_ttbrs(struct apple_dart_stream_map
*stream_map
)
363 for (i
= 0; i
< stream_map
->dart
->hw
->ttbr_count
; ++i
)
364 apple_dart_hw_clear_ttbr(stream_map
, i
);
368 apple_dart_t8020_hw_stream_command(struct apple_dart_stream_map
*stream_map
,
375 spin_lock_irqsave(&stream_map
->dart
->lock
, flags
);
377 for (i
= 0; i
< BITS_TO_U32(stream_map
->dart
->num_streams
); i
++)
378 writel(stream_map
->sidmap
[i
],
379 stream_map
->dart
->regs
+ DART_T8020_STREAM_SELECT
+ 4 * i
);
380 writel(command
, stream_map
->dart
->regs
+ DART_T8020_STREAM_COMMAND
);
382 ret
= readl_poll_timeout_atomic(
383 stream_map
->dart
->regs
+ DART_T8020_STREAM_COMMAND
, command_reg
,
384 !(command_reg
& DART_T8020_STREAM_COMMAND_BUSY
), 1,
385 DART_STREAM_COMMAND_BUSY_TIMEOUT
);
387 spin_unlock_irqrestore(&stream_map
->dart
->lock
, flags
);
390 dev_err(stream_map
->dart
->dev
,
391 "busy bit did not clear after command %x for streams %lx\n",
392 command
, stream_map
->sidmap
[0]);
400 apple_dart_t8110_hw_tlb_command(struct apple_dart_stream_map
*stream_map
,
403 struct apple_dart
*dart
= stream_map
->dart
;
408 spin_lock_irqsave(&dart
->lock
, flags
);
410 for_each_set_bit(sid
, stream_map
->sidmap
, dart
->num_streams
) {
411 u32 val
= FIELD_PREP(DART_T8110_TLB_CMD_OP
, command
) |
412 FIELD_PREP(DART_T8110_TLB_CMD_STREAM
, sid
);
413 writel(val
, dart
->regs
+ DART_T8110_TLB_CMD
);
415 ret
= readl_poll_timeout_atomic(
416 dart
->regs
+ DART_T8110_TLB_CMD
, val
,
417 !(val
& DART_T8110_TLB_CMD_BUSY
), 1,
418 DART_STREAM_COMMAND_BUSY_TIMEOUT
);
425 spin_unlock_irqrestore(&dart
->lock
, flags
);
428 dev_err(stream_map
->dart
->dev
,
429 "busy bit did not clear after command %x for stream %d\n",
438 apple_dart_t8020_hw_invalidate_tlb(struct apple_dart_stream_map
*stream_map
)
440 return apple_dart_t8020_hw_stream_command(
441 stream_map
, DART_T8020_STREAM_COMMAND_INVALIDATE
);
445 apple_dart_t8110_hw_invalidate_tlb(struct apple_dart_stream_map
*stream_map
)
447 return apple_dart_t8110_hw_tlb_command(
448 stream_map
, DART_T8110_TLB_CMD_OP_FLUSH_SID
);
451 static int apple_dart_hw_reset(struct apple_dart
*dart
)
454 struct apple_dart_stream_map stream_map
;
457 config
= readl(dart
->regs
+ dart
->hw
->lock
);
458 if (config
& dart
->hw
->lock_bit
) {
459 dev_err(dart
->dev
, "DART is locked down until reboot: %08x\n",
464 stream_map
.dart
= dart
;
465 bitmap_zero(stream_map
.sidmap
, DART_MAX_STREAMS
);
466 bitmap_set(stream_map
.sidmap
, 0, dart
->num_streams
);
467 apple_dart_hw_disable_dma(&stream_map
);
468 apple_dart_hw_clear_all_ttbrs(&stream_map
);
470 /* enable all streams globally since TCR is used to control isolation */
471 for (i
= 0; i
< BITS_TO_U32(dart
->num_streams
); i
++)
472 writel(U32_MAX
, dart
->regs
+ dart
->hw
->enable_streams
+ 4 * i
);
474 /* clear any pending errors before the interrupt is unmasked */
475 writel(readl(dart
->regs
+ dart
->hw
->error
), dart
->regs
+ dart
->hw
->error
);
477 if (dart
->hw
->type
== DART_T8110
)
478 writel(0, dart
->regs
+ DART_T8110_ERROR_MASK
);
480 return dart
->hw
->invalidate_tlb(&stream_map
);
483 static void apple_dart_domain_flush_tlb(struct apple_dart_domain
*domain
)
486 struct apple_dart_atomic_stream_map
*domain_stream_map
;
487 struct apple_dart_stream_map stream_map
;
489 for_each_stream_map(i
, domain
, domain_stream_map
) {
490 stream_map
.dart
= domain_stream_map
->dart
;
492 for (j
= 0; j
< BITS_TO_LONGS(stream_map
.dart
->num_streams
); j
++)
493 stream_map
.sidmap
[j
] = atomic_long_read(&domain_stream_map
->sidmap
[j
]);
495 stream_map
.dart
->hw
->invalidate_tlb(&stream_map
);
499 static void apple_dart_flush_iotlb_all(struct iommu_domain
*domain
)
501 apple_dart_domain_flush_tlb(to_dart_domain(domain
));
504 static void apple_dart_iotlb_sync(struct iommu_domain
*domain
,
505 struct iommu_iotlb_gather
*gather
)
507 apple_dart_domain_flush_tlb(to_dart_domain(domain
));
510 static int apple_dart_iotlb_sync_map(struct iommu_domain
*domain
,
511 unsigned long iova
, size_t size
)
513 apple_dart_domain_flush_tlb(to_dart_domain(domain
));
517 static phys_addr_t
apple_dart_iova_to_phys(struct iommu_domain
*domain
,
520 struct apple_dart_domain
*dart_domain
= to_dart_domain(domain
);
521 struct io_pgtable_ops
*ops
= dart_domain
->pgtbl_ops
;
526 return ops
->iova_to_phys(ops
, iova
);
529 static int apple_dart_map_pages(struct iommu_domain
*domain
, unsigned long iova
,
530 phys_addr_t paddr
, size_t pgsize
,
531 size_t pgcount
, int prot
, gfp_t gfp
,
534 struct apple_dart_domain
*dart_domain
= to_dart_domain(domain
);
535 struct io_pgtable_ops
*ops
= dart_domain
->pgtbl_ops
;
540 return ops
->map_pages(ops
, iova
, paddr
, pgsize
, pgcount
, prot
, gfp
,
544 static size_t apple_dart_unmap_pages(struct iommu_domain
*domain
,
545 unsigned long iova
, size_t pgsize
,
547 struct iommu_iotlb_gather
*gather
)
549 struct apple_dart_domain
*dart_domain
= to_dart_domain(domain
);
550 struct io_pgtable_ops
*ops
= dart_domain
->pgtbl_ops
;
552 return ops
->unmap_pages(ops
, iova
, pgsize
, pgcount
, gather
);
556 apple_dart_setup_translation(struct apple_dart_domain
*domain
,
557 struct apple_dart_stream_map
*stream_map
)
560 struct io_pgtable_cfg
*pgtbl_cfg
=
561 &io_pgtable_ops_to_pgtable(domain
->pgtbl_ops
)->cfg
;
563 for (i
= 0; i
< pgtbl_cfg
->apple_dart_cfg
.n_ttbrs
; ++i
)
564 apple_dart_hw_set_ttbr(stream_map
, i
,
565 pgtbl_cfg
->apple_dart_cfg
.ttbr
[i
]);
566 for (; i
< stream_map
->dart
->hw
->ttbr_count
; ++i
)
567 apple_dart_hw_clear_ttbr(stream_map
, i
);
569 apple_dart_hw_enable_translation(stream_map
);
570 stream_map
->dart
->hw
->invalidate_tlb(stream_map
);
573 static int apple_dart_finalize_domain(struct apple_dart_domain
*dart_domain
,
574 struct apple_dart_master_cfg
*cfg
)
576 struct apple_dart
*dart
= cfg
->stream_maps
[0].dart
;
577 struct io_pgtable_cfg pgtbl_cfg
;
581 if (dart
->pgsize
> PAGE_SIZE
)
584 mutex_lock(&dart_domain
->init_lock
);
586 if (dart_domain
->finalized
)
589 for (i
= 0; i
< MAX_DARTS_PER_DEVICE
; ++i
) {
590 dart_domain
->stream_maps
[i
].dart
= cfg
->stream_maps
[i
].dart
;
591 for (j
= 0; j
< BITS_TO_LONGS(dart
->num_streams
); j
++)
592 atomic_long_set(&dart_domain
->stream_maps
[i
].sidmap
[j
],
593 cfg
->stream_maps
[i
].sidmap
[j
]);
596 pgtbl_cfg
= (struct io_pgtable_cfg
){
597 .pgsize_bitmap
= dart
->pgsize
,
601 .iommu_dev
= dart
->dev
,
604 dart_domain
->pgtbl_ops
= alloc_io_pgtable_ops(dart
->hw
->fmt
, &pgtbl_cfg
,
605 &dart_domain
->domain
);
606 if (!dart_domain
->pgtbl_ops
) {
611 dart_domain
->domain
.pgsize_bitmap
= pgtbl_cfg
.pgsize_bitmap
;
612 dart_domain
->domain
.geometry
.aperture_start
= 0;
613 dart_domain
->domain
.geometry
.aperture_end
=
614 (dma_addr_t
)DMA_BIT_MASK(dart
->ias
);
615 dart_domain
->domain
.geometry
.force_aperture
= true;
617 dart_domain
->finalized
= true;
620 mutex_unlock(&dart_domain
->init_lock
);
625 apple_dart_mod_streams(struct apple_dart_atomic_stream_map
*domain_maps
,
626 struct apple_dart_stream_map
*master_maps
,
631 for (i
= 0; i
< MAX_DARTS_PER_DEVICE
; ++i
) {
632 if (domain_maps
[i
].dart
!= master_maps
[i
].dart
)
636 for (i
= 0; i
< MAX_DARTS_PER_DEVICE
; ++i
) {
637 if (!domain_maps
[i
].dart
)
639 for (j
= 0; j
< BITS_TO_LONGS(domain_maps
[i
].dart
->num_streams
); j
++) {
641 atomic_long_or(master_maps
[i
].sidmap
[j
],
642 &domain_maps
[i
].sidmap
[j
]);
644 atomic_long_and(~master_maps
[i
].sidmap
[j
],
645 &domain_maps
[i
].sidmap
[j
]);
652 static int apple_dart_domain_add_streams(struct apple_dart_domain
*domain
,
653 struct apple_dart_master_cfg
*cfg
)
655 return apple_dart_mod_streams(domain
->stream_maps
, cfg
->stream_maps
,
659 static int apple_dart_attach_dev_paging(struct iommu_domain
*domain
,
663 struct apple_dart_stream_map
*stream_map
;
664 struct apple_dart_master_cfg
*cfg
= dev_iommu_priv_get(dev
);
665 struct apple_dart_domain
*dart_domain
= to_dart_domain(domain
);
667 ret
= apple_dart_finalize_domain(dart_domain
, cfg
);
671 ret
= apple_dart_domain_add_streams(dart_domain
, cfg
);
675 for_each_stream_map(i
, cfg
, stream_map
)
676 apple_dart_setup_translation(dart_domain
, stream_map
);
680 static int apple_dart_attach_dev_identity(struct iommu_domain
*domain
,
683 struct apple_dart_master_cfg
*cfg
= dev_iommu_priv_get(dev
);
684 struct apple_dart_stream_map
*stream_map
;
687 if (!cfg
->stream_maps
[0].dart
->supports_bypass
)
690 for_each_stream_map(i
, cfg
, stream_map
)
691 apple_dart_hw_enable_bypass(stream_map
);
695 static const struct iommu_domain_ops apple_dart_identity_ops
= {
696 .attach_dev
= apple_dart_attach_dev_identity
,
699 static struct iommu_domain apple_dart_identity_domain
= {
700 .type
= IOMMU_DOMAIN_IDENTITY
,
701 .ops
= &apple_dart_identity_ops
,
704 static int apple_dart_attach_dev_blocked(struct iommu_domain
*domain
,
707 struct apple_dart_master_cfg
*cfg
= dev_iommu_priv_get(dev
);
708 struct apple_dart_stream_map
*stream_map
;
711 for_each_stream_map(i
, cfg
, stream_map
)
712 apple_dart_hw_disable_dma(stream_map
);
716 static const struct iommu_domain_ops apple_dart_blocked_ops
= {
717 .attach_dev
= apple_dart_attach_dev_blocked
,
720 static struct iommu_domain apple_dart_blocked_domain
= {
721 .type
= IOMMU_DOMAIN_BLOCKED
,
722 .ops
= &apple_dart_blocked_ops
,
725 static struct iommu_device
*apple_dart_probe_device(struct device
*dev
)
727 struct apple_dart_master_cfg
*cfg
= dev_iommu_priv_get(dev
);
728 struct apple_dart_stream_map
*stream_map
;
732 return ERR_PTR(-ENODEV
);
734 for_each_stream_map(i
, cfg
, stream_map
)
736 dev
, stream_map
->dart
->dev
,
737 DL_FLAG_PM_RUNTIME
| DL_FLAG_AUTOREMOVE_SUPPLIER
);
739 return &cfg
->stream_maps
[0].dart
->iommu
;
742 static void apple_dart_release_device(struct device
*dev
)
744 struct apple_dart_master_cfg
*cfg
= dev_iommu_priv_get(dev
);
749 static struct iommu_domain
*apple_dart_domain_alloc_paging(struct device
*dev
)
751 struct apple_dart_domain
*dart_domain
;
753 dart_domain
= kzalloc(sizeof(*dart_domain
), GFP_KERNEL
);
757 mutex_init(&dart_domain
->init_lock
);
760 struct apple_dart_master_cfg
*cfg
= dev_iommu_priv_get(dev
);
763 ret
= apple_dart_finalize_domain(dart_domain
, cfg
);
769 return &dart_domain
->domain
;
772 static void apple_dart_domain_free(struct iommu_domain
*domain
)
774 struct apple_dart_domain
*dart_domain
= to_dart_domain(domain
);
776 if (dart_domain
->pgtbl_ops
)
777 free_io_pgtable_ops(dart_domain
->pgtbl_ops
);
782 static int apple_dart_of_xlate(struct device
*dev
,
783 const struct of_phandle_args
*args
)
785 struct apple_dart_master_cfg
*cfg
= dev_iommu_priv_get(dev
);
786 struct platform_device
*iommu_pdev
= of_find_device_by_node(args
->np
);
787 struct apple_dart
*dart
= platform_get_drvdata(iommu_pdev
);
788 struct apple_dart
*cfg_dart
;
791 if (args
->args_count
!= 1)
796 cfg
= kzalloc(sizeof(*cfg
), GFP_KERNEL
);
799 dev_iommu_priv_set(dev
, cfg
);
801 cfg_dart
= cfg
->stream_maps
[0].dart
;
803 if (cfg_dart
->supports_bypass
!= dart
->supports_bypass
)
805 if (cfg_dart
->pgsize
!= dart
->pgsize
)
809 for (i
= 0; i
< MAX_DARTS_PER_DEVICE
; ++i
) {
810 if (cfg
->stream_maps
[i
].dart
== dart
) {
811 set_bit(sid
, cfg
->stream_maps
[i
].sidmap
);
815 for (i
= 0; i
< MAX_DARTS_PER_DEVICE
; ++i
) {
816 if (!cfg
->stream_maps
[i
].dart
) {
817 cfg
->stream_maps
[i
].dart
= dart
;
818 set_bit(sid
, cfg
->stream_maps
[i
].sidmap
);
826 static DEFINE_MUTEX(apple_dart_groups_lock
);
828 static void apple_dart_release_group(void *iommu_data
)
831 struct apple_dart_stream_map
*stream_map
;
832 struct apple_dart_master_cfg
*group_master_cfg
= iommu_data
;
834 mutex_lock(&apple_dart_groups_lock
);
836 for_each_stream_map(i
, group_master_cfg
, stream_map
)
837 for_each_set_bit(sid
, stream_map
->sidmap
, stream_map
->dart
->num_streams
)
838 stream_map
->dart
->sid2group
[sid
] = NULL
;
841 mutex_unlock(&apple_dart_groups_lock
);
844 static int apple_dart_merge_master_cfg(struct apple_dart_master_cfg
*dst
,
845 struct apple_dart_master_cfg
*src
)
848 * We know that this function is only called for groups returned from
849 * pci_device_group and that all Apple Silicon platforms never spread
850 * PCIe devices from the same bus across multiple DARTs such that we can
851 * just assume that both src and dst only have the same single DART.
853 if (src
->stream_maps
[1].dart
)
855 if (dst
->stream_maps
[1].dart
)
857 if (src
->stream_maps
[0].dart
!= dst
->stream_maps
[0].dart
)
860 bitmap_or(dst
->stream_maps
[0].sidmap
,
861 dst
->stream_maps
[0].sidmap
,
862 src
->stream_maps
[0].sidmap
,
863 dst
->stream_maps
[0].dart
->num_streams
);
867 static struct iommu_group
*apple_dart_device_group(struct device
*dev
)
870 struct apple_dart_master_cfg
*cfg
= dev_iommu_priv_get(dev
);
871 struct apple_dart_stream_map
*stream_map
;
872 struct apple_dart_master_cfg
*group_master_cfg
;
873 struct iommu_group
*group
= NULL
;
874 struct iommu_group
*res
= ERR_PTR(-EINVAL
);
876 mutex_lock(&apple_dart_groups_lock
);
878 for_each_stream_map(i
, cfg
, stream_map
) {
879 for_each_set_bit(sid
, stream_map
->sidmap
, stream_map
->dart
->num_streams
) {
880 struct iommu_group
*stream_group
=
881 stream_map
->dart
->sid2group
[sid
];
883 if (group
&& group
!= stream_group
) {
884 res
= ERR_PTR(-EINVAL
);
888 group
= stream_group
;
893 res
= iommu_group_ref_get(group
);
899 group
= pci_device_group(dev
);
902 group
= generic_device_group(dev
);
904 res
= ERR_PTR(-ENOMEM
);
908 group_master_cfg
= iommu_group_get_iommudata(group
);
909 if (group_master_cfg
) {
912 ret
= apple_dart_merge_master_cfg(group_master_cfg
, cfg
);
914 dev_err(dev
, "Failed to merge DART IOMMU groups.\n");
915 iommu_group_put(group
);
920 group_master_cfg
= kmemdup(cfg
, sizeof(*group_master_cfg
),
922 if (!group_master_cfg
) {
923 iommu_group_put(group
);
927 iommu_group_set_iommudata(group
, group_master_cfg
,
928 apple_dart_release_group
);
931 for_each_stream_map(i
, cfg
, stream_map
)
932 for_each_set_bit(sid
, stream_map
->sidmap
, stream_map
->dart
->num_streams
)
933 stream_map
->dart
->sid2group
[sid
] = group
;
938 mutex_unlock(&apple_dart_groups_lock
);
942 static int apple_dart_def_domain_type(struct device
*dev
)
944 struct apple_dart_master_cfg
*cfg
= dev_iommu_priv_get(dev
);
946 if (cfg
->stream_maps
[0].dart
->pgsize
> PAGE_SIZE
)
947 return IOMMU_DOMAIN_IDENTITY
;
948 if (!cfg
->stream_maps
[0].dart
->supports_bypass
)
949 return IOMMU_DOMAIN_DMA
;
954 #ifndef CONFIG_PCIE_APPLE_MSI_DOORBELL_ADDR
955 /* Keep things compiling when CONFIG_PCI_APPLE isn't selected */
956 #define CONFIG_PCIE_APPLE_MSI_DOORBELL_ADDR 0
958 #define DOORBELL_ADDR (CONFIG_PCIE_APPLE_MSI_DOORBELL_ADDR & PAGE_MASK)
960 static void apple_dart_get_resv_regions(struct device
*dev
,
961 struct list_head
*head
)
963 if (IS_ENABLED(CONFIG_PCIE_APPLE
) && dev_is_pci(dev
)) {
964 struct iommu_resv_region
*region
;
965 int prot
= IOMMU_WRITE
| IOMMU_NOEXEC
| IOMMU_MMIO
;
967 region
= iommu_alloc_resv_region(DOORBELL_ADDR
,
969 IOMMU_RESV_MSI
, GFP_KERNEL
);
973 list_add_tail(®ion
->list
, head
);
976 iommu_dma_get_resv_regions(dev
, head
);
979 static const struct iommu_ops apple_dart_iommu_ops
= {
980 .identity_domain
= &apple_dart_identity_domain
,
981 .blocked_domain
= &apple_dart_blocked_domain
,
982 .domain_alloc_paging
= apple_dart_domain_alloc_paging
,
983 .probe_device
= apple_dart_probe_device
,
984 .release_device
= apple_dart_release_device
,
985 .device_group
= apple_dart_device_group
,
986 .of_xlate
= apple_dart_of_xlate
,
987 .def_domain_type
= apple_dart_def_domain_type
,
988 .get_resv_regions
= apple_dart_get_resv_regions
,
989 .pgsize_bitmap
= -1UL, /* Restricted during dart probe */
990 .owner
= THIS_MODULE
,
991 .default_domain_ops
= &(const struct iommu_domain_ops
) {
992 .attach_dev
= apple_dart_attach_dev_paging
,
993 .map_pages
= apple_dart_map_pages
,
994 .unmap_pages
= apple_dart_unmap_pages
,
995 .flush_iotlb_all
= apple_dart_flush_iotlb_all
,
996 .iotlb_sync
= apple_dart_iotlb_sync
,
997 .iotlb_sync_map
= apple_dart_iotlb_sync_map
,
998 .iova_to_phys
= apple_dart_iova_to_phys
,
999 .free
= apple_dart_domain_free
,
1003 static irqreturn_t
apple_dart_t8020_irq(int irq
, void *dev
)
1005 struct apple_dart
*dart
= dev
;
1006 const char *fault_name
= NULL
;
1007 u32 error
= readl(dart
->regs
+ DART_T8020_ERROR
);
1008 u32 error_code
= FIELD_GET(DART_T8020_ERROR_CODE
, error
);
1009 u32 addr_lo
= readl(dart
->regs
+ DART_T8020_ERROR_ADDR_LO
);
1010 u32 addr_hi
= readl(dart
->regs
+ DART_T8020_ERROR_ADDR_HI
);
1011 u64 addr
= addr_lo
| (((u64
)addr_hi
) << 32);
1012 u8 stream_idx
= FIELD_GET(DART_T8020_ERROR_STREAM
, error
);
1014 if (!(error
& DART_T8020_ERROR_FLAG
))
1017 /* there should only be a single bit set but let's use == to be sure */
1018 if (error_code
== DART_T8020_ERROR_READ_FAULT
)
1019 fault_name
= "READ FAULT";
1020 else if (error_code
== DART_T8020_ERROR_WRITE_FAULT
)
1021 fault_name
= "WRITE FAULT";
1022 else if (error_code
== DART_T8020_ERROR_NO_PTE
)
1023 fault_name
= "NO PTE FOR IOVA";
1024 else if (error_code
== DART_T8020_ERROR_NO_PMD
)
1025 fault_name
= "NO PMD FOR IOVA";
1026 else if (error_code
== DART_T8020_ERROR_NO_TTBR
)
1027 fault_name
= "NO TTBR FOR IOVA";
1029 fault_name
= "unknown";
1031 dev_err_ratelimited(
1033 "translation fault: status:0x%x stream:%d code:0x%x (%s) at 0x%llx",
1034 error
, stream_idx
, error_code
, fault_name
, addr
);
1036 writel(error
, dart
->regs
+ DART_T8020_ERROR
);
1040 static irqreturn_t
apple_dart_t8110_irq(int irq
, void *dev
)
1042 struct apple_dart
*dart
= dev
;
1043 const char *fault_name
= NULL
;
1044 u32 error
= readl(dart
->regs
+ DART_T8110_ERROR
);
1045 u32 error_code
= FIELD_GET(DART_T8110_ERROR_CODE
, error
);
1046 u32 addr_lo
= readl(dart
->regs
+ DART_T8110_ERROR_ADDR_LO
);
1047 u32 addr_hi
= readl(dart
->regs
+ DART_T8110_ERROR_ADDR_HI
);
1048 u64 addr
= addr_lo
| (((u64
)addr_hi
) << 32);
1049 u8 stream_idx
= FIELD_GET(DART_T8110_ERROR_STREAM
, error
);
1051 if (!(error
& DART_T8110_ERROR_FLAG
))
1054 /* there should only be a single bit set but let's use == to be sure */
1055 if (error_code
== DART_T8110_ERROR_READ_FAULT
)
1056 fault_name
= "READ FAULT";
1057 else if (error_code
== DART_T8110_ERROR_WRITE_FAULT
)
1058 fault_name
= "WRITE FAULT";
1059 else if (error_code
== DART_T8110_ERROR_NO_PTE
)
1060 fault_name
= "NO PTE FOR IOVA";
1061 else if (error_code
== DART_T8110_ERROR_NO_PMD
)
1062 fault_name
= "NO PMD FOR IOVA";
1063 else if (error_code
== DART_T8110_ERROR_NO_PGD
)
1064 fault_name
= "NO PGD FOR IOVA";
1065 else if (error_code
== DART_T8110_ERROR_NO_TTBR
)
1066 fault_name
= "NO TTBR FOR IOVA";
1068 fault_name
= "unknown";
1070 dev_err_ratelimited(
1072 "translation fault: status:0x%x stream:%d code:0x%x (%s) at 0x%llx",
1073 error
, stream_idx
, error_code
, fault_name
, addr
);
1075 writel(error
, dart
->regs
+ DART_T8110_ERROR
);
1079 static int apple_dart_probe(struct platform_device
*pdev
)
1083 struct resource
*res
;
1084 struct apple_dart
*dart
;
1085 struct device
*dev
= &pdev
->dev
;
1087 dart
= devm_kzalloc(dev
, sizeof(*dart
), GFP_KERNEL
);
1092 dart
->hw
= of_device_get_match_data(dev
);
1093 spin_lock_init(&dart
->lock
);
1095 dart
->regs
= devm_platform_get_and_ioremap_resource(pdev
, 0, &res
);
1096 if (IS_ERR(dart
->regs
))
1097 return PTR_ERR(dart
->regs
);
1099 if (resource_size(res
) < 0x4000) {
1100 dev_err(dev
, "MMIO region too small (%pr)\n", res
);
1104 dart
->irq
= platform_get_irq(pdev
, 0);
1108 ret
= devm_clk_bulk_get_all(dev
, &dart
->clks
);
1111 dart
->num_clks
= ret
;
1113 ret
= clk_bulk_prepare_enable(dart
->num_clks
, dart
->clks
);
1117 dart_params
[0] = readl(dart
->regs
+ DART_PARAMS1
);
1118 dart_params
[1] = readl(dart
->regs
+ DART_PARAMS2
);
1119 dart
->pgsize
= 1 << FIELD_GET(DART_PARAMS1_PAGE_SHIFT
, dart_params
[0]);
1120 dart
->supports_bypass
= dart_params
[1] & DART_PARAMS2_BYPASS_SUPPORT
;
1122 switch (dart
->hw
->type
) {
1126 dart
->oas
= dart
->hw
->oas
;
1127 dart
->num_streams
= dart
->hw
->max_sid_count
;
1131 dart_params
[2] = readl(dart
->regs
+ DART_T8110_PARAMS3
);
1132 dart_params
[3] = readl(dart
->regs
+ DART_T8110_PARAMS4
);
1133 dart
->ias
= FIELD_GET(DART_T8110_PARAMS3_VA_WIDTH
, dart_params
[2]);
1134 dart
->oas
= FIELD_GET(DART_T8110_PARAMS3_PA_WIDTH
, dart_params
[2]);
1135 dart
->num_streams
= FIELD_GET(DART_T8110_PARAMS4_NUM_SIDS
, dart_params
[3]);
1139 if (dart
->num_streams
> DART_MAX_STREAMS
) {
1140 dev_err(&pdev
->dev
, "Too many streams (%d > %d)\n",
1141 dart
->num_streams
, DART_MAX_STREAMS
);
1143 goto err_clk_disable
;
1146 ret
= apple_dart_hw_reset(dart
);
1148 goto err_clk_disable
;
1150 ret
= request_irq(dart
->irq
, dart
->hw
->irq_handler
, IRQF_SHARED
,
1151 "apple-dart fault handler", dart
);
1153 goto err_clk_disable
;
1155 platform_set_drvdata(pdev
, dart
);
1157 ret
= iommu_device_sysfs_add(&dart
->iommu
, dev
, NULL
, "apple-dart.%s",
1158 dev_name(&pdev
->dev
));
1162 ret
= iommu_device_register(&dart
->iommu
, &apple_dart_iommu_ops
, dev
);
1164 goto err_sysfs_remove
;
1168 "DART [pagesize %x, %d streams, bypass support: %d, bypass forced: %d] initialized\n",
1169 dart
->pgsize
, dart
->num_streams
, dart
->supports_bypass
,
1170 dart
->pgsize
> PAGE_SIZE
);
1174 iommu_device_sysfs_remove(&dart
->iommu
);
1176 free_irq(dart
->irq
, dart
);
1178 clk_bulk_disable_unprepare(dart
->num_clks
, dart
->clks
);
1183 static void apple_dart_remove(struct platform_device
*pdev
)
1185 struct apple_dart
*dart
= platform_get_drvdata(pdev
);
1187 apple_dart_hw_reset(dart
);
1188 free_irq(dart
->irq
, dart
);
1190 iommu_device_unregister(&dart
->iommu
);
1191 iommu_device_sysfs_remove(&dart
->iommu
);
1193 clk_bulk_disable_unprepare(dart
->num_clks
, dart
->clks
);
1196 static const struct apple_dart_hw apple_dart_hw_t8103
= {
1198 .irq_handler
= apple_dart_t8020_irq
,
1199 .invalidate_tlb
= apple_dart_t8020_hw_invalidate_tlb
,
1202 .max_sid_count
= 16,
1204 .enable_streams
= DART_T8020_STREAMS_ENABLE
,
1205 .lock
= DART_T8020_CONFIG
,
1206 .lock_bit
= DART_T8020_CONFIG_LOCK
,
1208 .error
= DART_T8020_ERROR
,
1210 .tcr
= DART_T8020_TCR
,
1211 .tcr_enabled
= DART_T8020_TCR_TRANSLATE_ENABLE
,
1213 .tcr_bypass
= DART_T8020_TCR_BYPASS_DAPF
| DART_T8020_TCR_BYPASS_DART
,
1215 .ttbr
= DART_T8020_TTBR
,
1216 .ttbr_valid
= DART_T8020_TTBR_VALID
,
1217 .ttbr_addr_field_shift
= DART_T8020_TTBR_ADDR_FIELD_SHIFT
,
1218 .ttbr_shift
= DART_T8020_TTBR_SHIFT
,
1222 static const struct apple_dart_hw apple_dart_hw_t8103_usb4
= {
1224 .irq_handler
= apple_dart_t8020_irq
,
1225 .invalidate_tlb
= apple_dart_t8020_hw_invalidate_tlb
,
1228 .max_sid_count
= 64,
1230 .enable_streams
= DART_T8020_STREAMS_ENABLE
,
1231 .lock
= DART_T8020_CONFIG
,
1232 .lock_bit
= DART_T8020_CONFIG_LOCK
,
1234 .error
= DART_T8020_ERROR
,
1236 .tcr
= DART_T8020_TCR
,
1237 .tcr_enabled
= DART_T8020_TCR_TRANSLATE_ENABLE
,
1241 .ttbr
= DART_T8020_USB4_TTBR
,
1242 .ttbr_valid
= DART_T8020_TTBR_VALID
,
1243 .ttbr_addr_field_shift
= DART_T8020_TTBR_ADDR_FIELD_SHIFT
,
1244 .ttbr_shift
= DART_T8020_TTBR_SHIFT
,
1248 static const struct apple_dart_hw apple_dart_hw_t6000
= {
1250 .irq_handler
= apple_dart_t8020_irq
,
1251 .invalidate_tlb
= apple_dart_t8020_hw_invalidate_tlb
,
1254 .max_sid_count
= 16,
1256 .enable_streams
= DART_T8020_STREAMS_ENABLE
,
1257 .lock
= DART_T8020_CONFIG
,
1258 .lock_bit
= DART_T8020_CONFIG_LOCK
,
1260 .error
= DART_T8020_ERROR
,
1262 .tcr
= DART_T8020_TCR
,
1263 .tcr_enabled
= DART_T8020_TCR_TRANSLATE_ENABLE
,
1265 .tcr_bypass
= DART_T8020_TCR_BYPASS_DAPF
| DART_T8020_TCR_BYPASS_DART
,
1267 .ttbr
= DART_T8020_TTBR
,
1268 .ttbr_valid
= DART_T8020_TTBR_VALID
,
1269 .ttbr_addr_field_shift
= DART_T8020_TTBR_ADDR_FIELD_SHIFT
,
1270 .ttbr_shift
= DART_T8020_TTBR_SHIFT
,
1274 static const struct apple_dart_hw apple_dart_hw_t8110
= {
1276 .irq_handler
= apple_dart_t8110_irq
,
1277 .invalidate_tlb
= apple_dart_t8110_hw_invalidate_tlb
,
1279 .max_sid_count
= 256,
1281 .enable_streams
= DART_T8110_ENABLE_STREAMS
,
1282 .lock
= DART_T8110_PROTECT
,
1283 .lock_bit
= DART_T8110_PROTECT_TTBR_TCR
,
1285 .error
= DART_T8110_ERROR
,
1287 .tcr
= DART_T8110_TCR
,
1288 .tcr_enabled
= DART_T8110_TCR_TRANSLATE_ENABLE
,
1290 .tcr_bypass
= DART_T8110_TCR_BYPASS_DAPF
| DART_T8110_TCR_BYPASS_DART
,
1292 .ttbr
= DART_T8110_TTBR
,
1293 .ttbr_valid
= DART_T8110_TTBR_VALID
,
1294 .ttbr_addr_field_shift
= DART_T8110_TTBR_ADDR_FIELD_SHIFT
,
1295 .ttbr_shift
= DART_T8110_TTBR_SHIFT
,
1299 static __maybe_unused
int apple_dart_suspend(struct device
*dev
)
1301 struct apple_dart
*dart
= dev_get_drvdata(dev
);
1302 unsigned int sid
, idx
;
1304 for (sid
= 0; sid
< dart
->num_streams
; sid
++) {
1305 dart
->save_tcr
[sid
] = readl(dart
->regs
+ DART_TCR(dart
, sid
));
1306 for (idx
= 0; idx
< dart
->hw
->ttbr_count
; idx
++)
1307 dart
->save_ttbr
[sid
][idx
] =
1308 readl(dart
->regs
+ DART_TTBR(dart
, sid
, idx
));
1314 static __maybe_unused
int apple_dart_resume(struct device
*dev
)
1316 struct apple_dart
*dart
= dev_get_drvdata(dev
);
1317 unsigned int sid
, idx
;
1320 ret
= apple_dart_hw_reset(dart
);
1322 dev_err(dev
, "Failed to reset DART on resume\n");
1326 for (sid
= 0; sid
< dart
->num_streams
; sid
++) {
1327 for (idx
= 0; idx
< dart
->hw
->ttbr_count
; idx
++)
1328 writel(dart
->save_ttbr
[sid
][idx
],
1329 dart
->regs
+ DART_TTBR(dart
, sid
, idx
));
1330 writel(dart
->save_tcr
[sid
], dart
->regs
+ DART_TCR(dart
, sid
));
1336 static DEFINE_SIMPLE_DEV_PM_OPS(apple_dart_pm_ops
, apple_dart_suspend
, apple_dart_resume
);
1338 static const struct of_device_id apple_dart_of_match
[] = {
1339 { .compatible
= "apple,t8103-dart", .data
= &apple_dart_hw_t8103
},
1340 { .compatible
= "apple,t8103-usb4-dart", .data
= &apple_dart_hw_t8103_usb4
},
1341 { .compatible
= "apple,t8110-dart", .data
= &apple_dart_hw_t8110
},
1342 { .compatible
= "apple,t6000-dart", .data
= &apple_dart_hw_t6000
},
1345 MODULE_DEVICE_TABLE(of
, apple_dart_of_match
);
1347 static struct platform_driver apple_dart_driver
= {
1349 .name
= "apple-dart",
1350 .of_match_table
= apple_dart_of_match
,
1351 .suppress_bind_attrs
= true,
1352 .pm
= pm_sleep_ptr(&apple_dart_pm_ops
),
1354 .probe
= apple_dart_probe
,
1355 .remove
= apple_dart_remove
,
1358 module_platform_driver(apple_dart_driver
);
1360 MODULE_DESCRIPTION("IOMMU API for Apple's DART");
1361 MODULE_AUTHOR("Sven Peter <sven@svenpeter.dev>");
1362 MODULE_LICENSE("GPL v2");