1 #ifndef _X8664_DMA_MAPPING_H
2 #define _X8664_DMA_MAPPING_H 1
5 * IOMMU interface. See Documentation/DMA-mapping.txt and DMA-API.txt for
9 #include <linux/config.h>
11 #include <asm/scatterlist.h>
13 #include <asm/swiotlb.h>
15 extern dma_addr_t bad_dma_address
;
16 #define dma_mapping_error(x) \
17 (swiotlb ? swiotlb_dma_mapping_error(x) : ((x) == bad_dma_address))
19 void *dma_alloc_coherent(struct device
*dev
, size_t size
, dma_addr_t
*dma_handle
,
21 void dma_free_coherent(struct device
*dev
, size_t size
, void *vaddr
,
22 dma_addr_t dma_handle
);
24 #ifdef CONFIG_GART_IOMMU
26 extern dma_addr_t
dma_map_single(struct device
*hwdev
, void *ptr
, size_t size
,
28 extern void dma_unmap_single(struct device
*dev
, dma_addr_t addr
,size_t size
,
35 static inline dma_addr_t
dma_map_single(struct device
*hwdev
, void *ptr
,
36 size_t size
, int direction
)
40 if (direction
== DMA_NONE
)
42 addr
= virt_to_bus(ptr
);
44 if ((addr
+size
) & ~*hwdev
->dma_mask
)
49 static inline void dma_unmap_single(struct device
*hwdev
, dma_addr_t dma_addr
,
50 size_t size
, int direction
)
52 if (direction
== DMA_NONE
)
59 #define dma_map_page(dev,page,offset,size,dir) \
60 dma_map_single((dev), page_address(page)+(offset), (size), (dir))
62 static inline void dma_sync_single_for_cpu(struct device
*hwdev
,
63 dma_addr_t dma_handle
,
64 size_t size
, int direction
)
66 if (direction
== DMA_NONE
)
70 return swiotlb_sync_single_for_cpu(hwdev
,dma_handle
,size
,direction
);
72 flush_write_buffers();
75 static inline void dma_sync_single_for_device(struct device
*hwdev
,
76 dma_addr_t dma_handle
,
77 size_t size
, int direction
)
79 if (direction
== DMA_NONE
)
83 return swiotlb_sync_single_for_device(hwdev
,dma_handle
,size
,direction
);
85 flush_write_buffers();
88 static inline void dma_sync_sg_for_cpu(struct device
*hwdev
,
89 struct scatterlist
*sg
,
90 int nelems
, int direction
)
92 if (direction
== DMA_NONE
)
96 return swiotlb_sync_sg_for_cpu(hwdev
,sg
,nelems
,direction
);
98 flush_write_buffers();
101 static inline void dma_sync_sg_for_device(struct device
*hwdev
,
102 struct scatterlist
*sg
,
103 int nelems
, int direction
)
105 if (direction
== DMA_NONE
)
109 return swiotlb_sync_sg_for_device(hwdev
,sg
,nelems
,direction
);
111 flush_write_buffers();
114 extern int dma_map_sg(struct device
*hwdev
, struct scatterlist
*sg
,
115 int nents
, int direction
);
116 extern void dma_unmap_sg(struct device
*hwdev
, struct scatterlist
*sg
,
117 int nents
, int direction
);
119 #define dma_unmap_page dma_unmap_single
121 extern int dma_supported(struct device
*hwdev
, u64 mask
);
122 extern int dma_get_cache_alignment(void);
123 #define dma_is_consistent(h) 1
125 static inline int dma_set_mask(struct device
*dev
, u64 mask
)
127 if (!dev
->dma_mask
|| !dma_supported(dev
, mask
))
129 *dev
->dma_mask
= mask
;
133 static inline void dma_cache_sync(void *vaddr
, size_t size
, enum dma_data_direction dir
)
135 flush_write_buffers();