1 #ifndef __ASM_SH_DMA_MAPPING_H
2 #define __ASM_SH_DMA_MAPPING_H
4 extern struct dma_map_ops
*dma_ops
;
5 extern void no_iommu_init(void);
7 static inline struct dma_map_ops
*get_dma_ops(struct device
*dev
)
12 #include <asm-generic/dma-coherent.h>
13 #include <asm-generic/dma-mapping-common.h>
15 static inline int dma_supported(struct device
*dev
, u64 mask
)
17 struct dma_map_ops
*ops
= get_dma_ops(dev
);
19 if (ops
->dma_supported
)
20 return ops
->dma_supported(dev
, mask
);
25 static inline int dma_set_mask(struct device
*dev
, u64 mask
)
27 struct dma_map_ops
*ops
= get_dma_ops(dev
);
29 if (!dev
->dma_mask
|| !dma_supported(dev
, mask
))
31 if (ops
->set_dma_mask
)
32 return ops
->set_dma_mask(dev
, mask
);
34 *dev
->dma_mask
= mask
;
39 void dma_cache_sync(struct device
*dev
, void *vaddr
, size_t size
,
40 enum dma_data_direction dir
);
42 #define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
43 #define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
45 #ifdef CONFIG_DMA_COHERENT
46 #define dma_is_consistent(d, h) (1)
48 #define dma_is_consistent(d, h) (0)
51 static inline int dma_get_cache_alignment(void)
54 * Each processor family will define its own L1_CACHE_SHIFT,
55 * L1_CACHE_BYTES wraps to this, so this is always safe.
57 return L1_CACHE_BYTES
;
60 static inline int dma_mapping_error(struct device
*dev
, dma_addr_t dma_addr
)
62 struct dma_map_ops
*ops
= get_dma_ops(dev
);
64 if (ops
->mapping_error
)
65 return ops
->mapping_error(dev
, dma_addr
);
70 static inline void *dma_alloc_coherent(struct device
*dev
, size_t size
,
71 dma_addr_t
*dma_handle
, gfp_t gfp
)
73 struct dma_map_ops
*ops
= get_dma_ops(dev
);
76 if (dma_alloc_from_coherent(dev
, size
, dma_handle
, &memory
))
78 if (!ops
->alloc_coherent
)
81 memory
= ops
->alloc_coherent(dev
, size
, dma_handle
, gfp
);
82 debug_dma_alloc_coherent(dev
, size
, *dma_handle
, memory
);
87 static inline void dma_free_coherent(struct device
*dev
, size_t size
,
88 void *vaddr
, dma_addr_t dma_handle
)
90 struct dma_map_ops
*ops
= get_dma_ops(dev
);
92 if (dma_release_from_coherent(dev
, get_order(size
), vaddr
))
95 debug_dma_free_coherent(dev
, size
, vaddr
, dma_handle
);
96 if (ops
->free_coherent
)
97 ops
->free_coherent(dev
, size
, vaddr
, dma_handle
);
100 /* arch/sh/mm/consistent.c */
101 extern void *dma_generic_alloc_coherent(struct device
*dev
, size_t size
,
102 dma_addr_t
*dma_addr
, gfp_t flag
);
103 extern void dma_generic_free_coherent(struct device
*dev
, size_t size
,
104 void *vaddr
, dma_addr_t dma_handle
);
106 #endif /* __ASM_SH_DMA_MAPPING_H */