2 * DMA Mapping glue for ARC
4 * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
11 #ifndef ASM_ARC_DMA_MAPPING_H
12 #define ASM_ARC_DMA_MAPPING_H
14 #include <asm-generic/dma-coherent.h>
15 #include <asm/cacheflush.h>
17 void *dma_alloc_noncoherent(struct device
*dev
, size_t size
,
18 dma_addr_t
*dma_handle
, gfp_t gfp
);
20 void dma_free_noncoherent(struct device
*dev
, size_t size
, void *vaddr
,
21 dma_addr_t dma_handle
);
23 void *dma_alloc_coherent(struct device
*dev
, size_t size
,
24 dma_addr_t
*dma_handle
, gfp_t gfp
);
26 void dma_free_coherent(struct device
*dev
, size_t size
, void *kvaddr
,
27 dma_addr_t dma_handle
);
29 /* drivers/base/dma-mapping.c */
30 extern int dma_common_mmap(struct device
*dev
, struct vm_area_struct
*vma
,
31 void *cpu_addr
, dma_addr_t dma_addr
, size_t size
);
32 extern int dma_common_get_sgtable(struct device
*dev
, struct sg_table
*sgt
,
33 void *cpu_addr
, dma_addr_t dma_addr
,
36 #define dma_mmap_coherent(d, v, c, h, s) dma_common_mmap(d, v, c, h, s)
37 #define dma_get_sgtable(d, t, v, h, s) dma_common_get_sgtable(d, t, v, h, s)
40 * streaming DMA Mapping API...
41 * CPU accesses page via normal paddr, thus needs to explicitly made
42 * consistent before each use
45 static inline void __inline_dma_cache_sync(unsigned long paddr
, size_t size
,
46 enum dma_data_direction dir
)
50 dma_cache_inv(paddr
, size
);
53 dma_cache_wback(paddr
, size
);
55 case DMA_BIDIRECTIONAL
:
56 dma_cache_wback_inv(paddr
, size
);
59 pr_err("Invalid DMA dir [%d] for OP @ %lx\n", dir
, paddr
);
63 void __arc_dma_cache_sync(unsigned long paddr
, size_t size
,
64 enum dma_data_direction dir
);
66 #define _dma_cache_sync(addr, sz, dir) \
68 if (__builtin_constant_p(dir)) \
69 __inline_dma_cache_sync(addr, sz, dir); \
71 __arc_dma_cache_sync(addr, sz, dir); \
75 static inline dma_addr_t
76 dma_map_single(struct device
*dev
, void *cpu_addr
, size_t size
,
77 enum dma_data_direction dir
)
79 _dma_cache_sync((unsigned long)cpu_addr
, size
, dir
);
80 return (dma_addr_t
)cpu_addr
;
84 dma_unmap_single(struct device
*dev
, dma_addr_t dma_addr
,
85 size_t size
, enum dma_data_direction dir
)
89 static inline dma_addr_t
90 dma_map_page(struct device
*dev
, struct page
*page
,
91 unsigned long offset
, size_t size
,
92 enum dma_data_direction dir
)
94 unsigned long paddr
= page_to_phys(page
) + offset
;
95 return dma_map_single(dev
, (void *)paddr
, size
, dir
);
99 dma_unmap_page(struct device
*dev
, dma_addr_t dma_handle
,
100 size_t size
, enum dma_data_direction dir
)
105 dma_map_sg(struct device
*dev
, struct scatterlist
*sg
,
106 int nents
, enum dma_data_direction dir
)
108 struct scatterlist
*s
;
111 for_each_sg(sg
, s
, nents
, i
)
112 s
->dma_address
= dma_map_page(dev
, sg_page(s
), s
->offset
,
119 dma_unmap_sg(struct device
*dev
, struct scatterlist
*sg
,
120 int nents
, enum dma_data_direction dir
)
122 struct scatterlist
*s
;
125 for_each_sg(sg
, s
, nents
, i
)
126 dma_unmap_page(dev
, sg_dma_address(s
), sg_dma_len(s
), dir
);
130 dma_sync_single_for_cpu(struct device
*dev
, dma_addr_t dma_handle
,
131 size_t size
, enum dma_data_direction dir
)
133 _dma_cache_sync(dma_handle
, size
, DMA_FROM_DEVICE
);
137 dma_sync_single_for_device(struct device
*dev
, dma_addr_t dma_handle
,
138 size_t size
, enum dma_data_direction dir
)
140 _dma_cache_sync(dma_handle
, size
, DMA_TO_DEVICE
);
144 dma_sync_single_range_for_cpu(struct device
*dev
, dma_addr_t dma_handle
,
145 unsigned long offset
, size_t size
,
146 enum dma_data_direction direction
)
148 _dma_cache_sync(dma_handle
+ offset
, size
, DMA_FROM_DEVICE
);
152 dma_sync_single_range_for_device(struct device
*dev
, dma_addr_t dma_handle
,
153 unsigned long offset
, size_t size
,
154 enum dma_data_direction direction
)
156 _dma_cache_sync(dma_handle
+ offset
, size
, DMA_TO_DEVICE
);
160 dma_sync_sg_for_cpu(struct device
*dev
, struct scatterlist
*sglist
, int nelems
,
161 enum dma_data_direction dir
)
164 struct scatterlist
*sg
;
166 for_each_sg(sglist
, sg
, nelems
, i
)
167 _dma_cache_sync((unsigned int)sg_virt(sg
), sg
->length
, dir
);
171 dma_sync_sg_for_device(struct device
*dev
, struct scatterlist
*sglist
,
172 int nelems
, enum dma_data_direction dir
)
175 struct scatterlist
*sg
;
177 for_each_sg(sglist
, sg
, nelems
, i
)
178 _dma_cache_sync((unsigned int)sg_virt(sg
), sg
->length
, dir
);
181 static inline int dma_supported(struct device
*dev
, u64 dma_mask
)
183 /* Support 32 bit DMA mask exclusively */
184 return dma_mask
== DMA_BIT_MASK(32);
187 static inline int dma_mapping_error(struct device
*dev
, dma_addr_t dma_addr
)
192 static inline int dma_set_mask(struct device
*dev
, u64 dma_mask
)
194 if (!dev
->dma_mask
|| !dma_supported(dev
, dma_mask
))
197 *dev
->dma_mask
= dma_mask
;