Loading...
1#ifndef ___ASM_SPARC_DMA_MAPPING_H
2#define ___ASM_SPARC_DMA_MAPPING_H
3
4#include <linux/scatterlist.h>
5#include <linux/mm.h>
6#include <linux/dma-debug.h>
7
8#define DMA_ERROR_CODE (~(dma_addr_t)0x0)
9
10extern int dma_supported(struct device *dev, u64 mask);
11
12#define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
13#define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
14
15extern struct dma_map_ops *dma_ops, pci32_dma_ops;
16extern struct bus_type pci_bus_type;
17
18static inline struct dma_map_ops *get_dma_ops(struct device *dev)
19{
20#if defined(CONFIG_SPARC32) && defined(CONFIG_PCI)
21 if (dev->bus == &pci_bus_type)
22 return &pci32_dma_ops;
23#endif
24 return dma_ops;
25}
26
27#include <asm-generic/dma-mapping-common.h>
28
29static inline void *dma_alloc_coherent(struct device *dev, size_t size,
30 dma_addr_t *dma_handle, gfp_t flag)
31{
32 struct dma_map_ops *ops = get_dma_ops(dev);
33 void *cpu_addr;
34
35 cpu_addr = ops->alloc_coherent(dev, size, dma_handle, flag);
36 debug_dma_alloc_coherent(dev, size, *dma_handle, cpu_addr);
37 return cpu_addr;
38}
39
40static inline void dma_free_coherent(struct device *dev, size_t size,
41 void *cpu_addr, dma_addr_t dma_handle)
42{
43 struct dma_map_ops *ops = get_dma_ops(dev);
44
45 debug_dma_free_coherent(dev, size, cpu_addr, dma_handle);
46 ops->free_coherent(dev, size, cpu_addr, dma_handle);
47}
48
49static inline int dma_mapping_error(struct device *dev, dma_addr_t dma_addr)
50{
51 return (dma_addr == DMA_ERROR_CODE);
52}
53
54static inline int dma_set_mask(struct device *dev, u64 mask)
55{
56#ifdef CONFIG_PCI
57 if (dev->bus == &pci_bus_type) {
58 if (!dev->dma_mask || !dma_supported(dev, mask))
59 return -EINVAL;
60 *dev->dma_mask = mask;
61 return 0;
62 }
63#endif
64 return -EINVAL;
65}
66
67#endif
1#ifndef ___ASM_SPARC_DMA_MAPPING_H
2#define ___ASM_SPARC_DMA_MAPPING_H
3
4#include <linux/scatterlist.h>
5#include <linux/mm.h>
6#include <linux/dma-debug.h>
7
8#define DMA_ERROR_CODE (~(dma_addr_t)0x0)
9
10extern int dma_supported(struct device *dev, u64 mask);
11
12#define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
13#define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
14
15extern struct dma_map_ops *dma_ops;
16extern struct dma_map_ops *leon_dma_ops;
17extern struct dma_map_ops pci32_dma_ops;
18
19extern struct bus_type pci_bus_type;
20
21static inline struct dma_map_ops *get_dma_ops(struct device *dev)
22{
23#if defined(CONFIG_SPARC32) && defined(CONFIG_PCI)
24 if (sparc_cpu_model == sparc_leon)
25 return leon_dma_ops;
26 else if (dev->bus == &pci_bus_type)
27 return &pci32_dma_ops;
28#endif
29 return dma_ops;
30}
31
32#include <asm-generic/dma-mapping-common.h>
33
34#define dma_alloc_coherent(d,s,h,f) dma_alloc_attrs(d,s,h,f,NULL)
35
36static inline void *dma_alloc_attrs(struct device *dev, size_t size,
37 dma_addr_t *dma_handle, gfp_t flag,
38 struct dma_attrs *attrs)
39{
40 struct dma_map_ops *ops = get_dma_ops(dev);
41 void *cpu_addr;
42
43 cpu_addr = ops->alloc(dev, size, dma_handle, flag, attrs);
44 debug_dma_alloc_coherent(dev, size, *dma_handle, cpu_addr);
45 return cpu_addr;
46}
47
48#define dma_free_coherent(d,s,c,h) dma_free_attrs(d,s,c,h,NULL)
49
50static inline void dma_free_attrs(struct device *dev, size_t size,
51 void *cpu_addr, dma_addr_t dma_handle,
52 struct dma_attrs *attrs)
53{
54 struct dma_map_ops *ops = get_dma_ops(dev);
55
56 debug_dma_free_coherent(dev, size, cpu_addr, dma_handle);
57 ops->free(dev, size, cpu_addr, dma_handle, attrs);
58}
59
60static inline int dma_mapping_error(struct device *dev, dma_addr_t dma_addr)
61{
62 debug_dma_mapping_error(dev, dma_addr);
63 return (dma_addr == DMA_ERROR_CODE);
64}
65
66static inline int dma_set_mask(struct device *dev, u64 mask)
67{
68#ifdef CONFIG_PCI
69 if (dev->bus == &pci_bus_type) {
70 if (!dev->dma_mask || !dma_supported(dev, mask))
71 return -EINVAL;
72 *dev->dma_mask = mask;
73 return 0;
74 }
75#endif
76 return -EINVAL;
77}
78
79#endif