1 #ifndef _ASM_S390_DMA_MAPPING_H
2 #define _ASM_S390_DMA_MAPPING_H
4 #include <linux/kernel.h>
5 #include <linux/types.h>
7 #include <linux/scatterlist.h>
8 #include <linux/dma-attrs.h>
9 #include <linux/dma-debug.h>
12 #define DMA_ERROR_CODE (~(dma_addr_t) 0x0)
14 extern struct dma_map_ops s390_dma_ops
;
16 static inline struct dma_map_ops
*get_dma_ops(struct device
*dev
)
21 extern int dma_set_mask(struct device
*dev
, u64 mask
);
23 static inline void dma_cache_sync(struct device
*dev
, void *vaddr
, size_t size
,
24 enum dma_data_direction direction
)
28 #define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
29 #define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
31 #include <asm-generic/dma-mapping-common.h>
33 static inline int dma_supported(struct device
*dev
, u64 mask
)
35 struct dma_map_ops
*dma_ops
= get_dma_ops(dev
);
37 if (dma_ops
->dma_supported
== NULL
)
39 return dma_ops
->dma_supported(dev
, mask
);
42 static inline bool dma_capable(struct device
*dev
, dma_addr_t addr
, size_t size
)
46 return addr
+ size
- 1 <= *dev
->dma_mask
;
49 static inline int dma_mapping_error(struct device
*dev
, dma_addr_t dma_addr
)
51 struct dma_map_ops
*dma_ops
= get_dma_ops(dev
);
53 debug_dma_mapping_error(dev
, dma_addr
);
54 if (dma_ops
->mapping_error
)
55 return dma_ops
->mapping_error(dev
, dma_addr
);
56 return dma_addr
== DMA_ERROR_CODE
;
59 static inline void *dma_alloc_coherent(struct device
*dev
, size_t size
,
60 dma_addr_t
*dma_handle
, gfp_t flag
)
62 struct dma_map_ops
*ops
= get_dma_ops(dev
);
65 ret
= ops
->alloc(dev
, size
, dma_handle
, flag
, NULL
);
66 debug_dma_alloc_coherent(dev
, size
, *dma_handle
, ret
);
70 static inline void dma_free_coherent(struct device
*dev
, size_t size
,
71 void *cpu_addr
, dma_addr_t dma_handle
)
73 struct dma_map_ops
*dma_ops
= get_dma_ops(dev
);
75 debug_dma_free_coherent(dev
, size
, cpu_addr
, dma_handle
);
76 dma_ops
->free(dev
, size
, cpu_addr
, dma_handle
, NULL
);
79 #endif /* _ASM_S390_DMA_MAPPING_H */