IA64: iommu uses sg_next with an invalid sg element
[pv_ops_mirror.git] / include / linux / dma-mapping.h
blob0ebfafbd338ca56fc89f781c2848a9c16b81cb6d
1 #ifndef _ASM_LINUX_DMA_MAPPING_H
2 #define _ASM_LINUX_DMA_MAPPING_H
4 #include <linux/device.h>
5 #include <linux/err.h>
7 /* These definitions mirror those in pci.h, so they can be used
8 * interchangeably with their PCI_ counterparts */
9 enum dma_data_direction {
10 DMA_BIDIRECTIONAL = 0,
11 DMA_TO_DEVICE = 1,
12 DMA_FROM_DEVICE = 2,
13 DMA_NONE = 3,
16 #define DMA_64BIT_MASK 0xffffffffffffffffULL
17 #define DMA_48BIT_MASK 0x0000ffffffffffffULL
18 #define DMA_40BIT_MASK 0x000000ffffffffffULL
19 #define DMA_39BIT_MASK 0x0000007fffffffffULL
20 #define DMA_32BIT_MASK 0x00000000ffffffffULL
21 #define DMA_31BIT_MASK 0x000000007fffffffULL
22 #define DMA_30BIT_MASK 0x000000003fffffffULL
23 #define DMA_29BIT_MASK 0x000000001fffffffULL
24 #define DMA_28BIT_MASK 0x000000000fffffffULL
25 #define DMA_24BIT_MASK 0x0000000000ffffffULL
27 #define DMA_MASK_NONE 0x0ULL
29 static inline int valid_dma_direction(int dma_direction)
31 return ((dma_direction == DMA_BIDIRECTIONAL) ||
32 (dma_direction == DMA_TO_DEVICE) ||
33 (dma_direction == DMA_FROM_DEVICE));
36 static inline int is_device_dma_capable(struct device *dev)
38 return dev->dma_mask != NULL && *dev->dma_mask != DMA_MASK_NONE;
41 #ifdef CONFIG_HAS_DMA
42 #include <asm/dma-mapping.h>
43 #else
44 #include <asm-generic/dma-mapping-broken.h>
45 #endif
47 /* Backwards compat, remove in 2.7.x */
48 #define dma_sync_single dma_sync_single_for_cpu
49 #define dma_sync_sg dma_sync_sg_for_cpu
51 extern u64 dma_get_required_mask(struct device *dev);
53 /* flags for the coherent memory api */
54 #define DMA_MEMORY_MAP 0x01
55 #define DMA_MEMORY_IO 0x02
56 #define DMA_MEMORY_INCLUDES_CHILDREN 0x04
57 #define DMA_MEMORY_EXCLUSIVE 0x08
59 #ifndef ARCH_HAS_DMA_DECLARE_COHERENT_MEMORY
60 static inline int
61 dma_declare_coherent_memory(struct device *dev, dma_addr_t bus_addr,
62 dma_addr_t device_addr, size_t size, int flags)
64 return 0;
67 static inline void
68 dma_release_declared_memory(struct device *dev)
72 static inline void *
73 dma_mark_declared_memory_occupied(struct device *dev,
74 dma_addr_t device_addr, size_t size)
76 return ERR_PTR(-EBUSY);
78 #endif
81 * Managed DMA API
83 extern void *dmam_alloc_coherent(struct device *dev, size_t size,
84 dma_addr_t *dma_handle, gfp_t gfp);
85 extern void dmam_free_coherent(struct device *dev, size_t size, void *vaddr,
86 dma_addr_t dma_handle);
87 extern void *dmam_alloc_noncoherent(struct device *dev, size_t size,
88 dma_addr_t *dma_handle, gfp_t gfp);
89 extern void dmam_free_noncoherent(struct device *dev, size_t size, void *vaddr,
90 dma_addr_t dma_handle);
91 #ifdef ARCH_HAS_DMA_DECLARE_COHERENT_MEMORY
92 extern int dmam_declare_coherent_memory(struct device *dev, dma_addr_t bus_addr,
93 dma_addr_t device_addr, size_t size,
94 int flags);
95 extern void dmam_release_declared_memory(struct device *dev);
96 #else /* ARCH_HAS_DMA_DECLARE_COHERENT_MEMORY */
97 static inline int dmam_declare_coherent_memory(struct device *dev,
98 dma_addr_t bus_addr, dma_addr_t device_addr,
99 size_t size, gfp_t gfp)
101 return 0;
104 static inline void dmam_release_declared_memory(struct device *dev)
107 #endif /* ARCH_HAS_DMA_DECLARE_COHERENT_MEMORY */
109 #endif