Rev 6934 | Only display areas with differences | Regard whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 6934 | Rev 7143 | ||
---|---|---|---|
1 | #ifndef _ASM_X86_DMA_MAPPING_H |
1 | #ifndef _ASM_X86_DMA_MAPPING_H |
2 | #define _ASM_X86_DMA_MAPPING_H |
2 | #define _ASM_X86_DMA_MAPPING_H |
3 | 3 | ||
4 | /* |
4 | /* |
5 | * IOMMU interface. See Documentation/DMA-API-HOWTO.txt and |
5 | * IOMMU interface. See Documentation/DMA-API-HOWTO.txt and |
6 | * Documentation/DMA-API.txt for documentation. |
6 | * Documentation/DMA-API.txt for documentation. |
7 | */ |
7 | */ |
8 | 8 | ||
9 | #include |
9 | #include |
10 | #include |
10 | #include |
11 | #include |
11 | #include |
12 | #include |
12 | #include |
13 | #include |
13 | #include |
14 | #include |
14 | #include |
15 | #include |
15 | #include |
16 | 16 | ||
17 | #ifdef CONFIG_ISA |
17 | #ifdef CONFIG_ISA |
18 | # define ISA_DMA_BIT_MASK DMA_BIT_MASK(24) |
18 | # define ISA_DMA_BIT_MASK DMA_BIT_MASK(24) |
19 | #else |
19 | #else |
20 | # define ISA_DMA_BIT_MASK DMA_BIT_MASK(32) |
20 | # define ISA_DMA_BIT_MASK DMA_BIT_MASK(32) |
21 | #endif |
21 | #endif |
22 | 22 | ||
23 | #define DMA_ERROR_CODE 0 |
23 | #define DMA_ERROR_CODE 0 |
24 | 24 | ||
25 | extern int iommu_merge; |
25 | extern int iommu_merge; |
26 | extern struct device x86_dma_fallback_dev; |
26 | extern struct device x86_dma_fallback_dev; |
27 | extern int panic_on_overflow; |
27 | extern int panic_on_overflow; |
28 | 28 | ||
29 | extern struct dma_map_ops *dma_ops; |
29 | extern struct dma_map_ops *dma_ops; |
30 | 30 | ||
31 | static inline struct dma_map_ops *get_dma_ops(struct device *dev) |
31 | static inline struct dma_map_ops *get_dma_ops(struct device *dev) |
32 | { |
32 | { |
33 | #ifndef CONFIG_X86_DEV_DMA_OPS |
33 | #ifndef CONFIG_X86_DEV_DMA_OPS |
34 | return dma_ops; |
34 | return dma_ops; |
35 | #else |
35 | #else |
36 | if (unlikely(!dev) || !dev->archdata.dma_ops) |
36 | if (unlikely(!dev) || !dev->archdata.dma_ops) |
37 | return dma_ops; |
37 | return dma_ops; |
38 | else |
38 | else |
39 | return dev->archdata.dma_ops; |
39 | return dev->archdata.dma_ops; |
40 | #endif |
40 | #endif |
41 | } |
41 | } |
42 | 42 | ||
43 | bool arch_dma_alloc_attrs(struct device **dev, gfp_t *gfp); |
43 | bool arch_dma_alloc_attrs(struct device **dev, gfp_t *gfp); |
44 | #define arch_dma_alloc_attrs arch_dma_alloc_attrs |
44 | #define arch_dma_alloc_attrs arch_dma_alloc_attrs |
45 | 45 | ||
46 | #define HAVE_ARCH_DMA_SUPPORTED 1 |
46 | #define HAVE_ARCH_DMA_SUPPORTED 1 |
47 | extern int dma_supported(struct device *hwdev, u64 mask); |
47 | extern int dma_supported(struct device *hwdev, u64 mask); |
48 | - | ||
49 | #include |
- | |
50 | 48 | ||
51 | extern void *dma_generic_alloc_coherent(struct device *dev, size_t size, |
49 | extern void *dma_generic_alloc_coherent(struct device *dev, size_t size, |
52 | dma_addr_t *dma_addr, gfp_t flag, |
50 | dma_addr_t *dma_addr, gfp_t flag, |
53 | struct dma_attrs *attrs); |
51 | struct dma_attrs *attrs); |
54 | 52 | ||
55 | extern void dma_generic_free_coherent(struct device *dev, size_t size, |
53 | extern void dma_generic_free_coherent(struct device *dev, size_t size, |
56 | void *vaddr, dma_addr_t dma_addr, |
54 | void *vaddr, dma_addr_t dma_addr, |
57 | struct dma_attrs *attrs); |
55 | struct dma_attrs *attrs); |
58 | 56 | ||
59 | #ifdef CONFIG_X86_DMA_REMAP /* Platform code defines bridge-specific code */ |
57 | #ifdef CONFIG_X86_DMA_REMAP /* Platform code defines bridge-specific code */ |
60 | extern bool dma_capable(struct device *dev, dma_addr_t addr, size_t size); |
58 | extern bool dma_capable(struct device *dev, dma_addr_t addr, size_t size); |
61 | extern dma_addr_t phys_to_dma(struct device *dev, phys_addr_t paddr); |
59 | extern dma_addr_t phys_to_dma(struct device *dev, phys_addr_t paddr); |
62 | extern phys_addr_t dma_to_phys(struct device *dev, dma_addr_t daddr); |
60 | extern phys_addr_t dma_to_phys(struct device *dev, dma_addr_t daddr); |
63 | #else |
61 | #else |
64 | 62 | ||
65 | static inline bool dma_capable(struct device *dev, dma_addr_t addr, size_t size) |
63 | static inline bool dma_capable(struct device *dev, dma_addr_t addr, size_t size) |
66 | { |
64 | { |
67 | if (!dev->dma_mask) |
65 | if (!dev->dma_mask) |
68 | return 0; |
66 | return 0; |
69 | 67 | ||
70 | return addr + size - 1 <= *dev->dma_mask; |
68 | return addr + size - 1 <= *dev->dma_mask; |
71 | } |
69 | } |
72 | 70 | ||
73 | static inline dma_addr_t phys_to_dma(struct device *dev, phys_addr_t paddr) |
71 | static inline dma_addr_t phys_to_dma(struct device *dev, phys_addr_t paddr) |
74 | { |
72 | { |
75 | return paddr; |
73 | return paddr; |
76 | } |
74 | } |
77 | 75 | ||
78 | static inline phys_addr_t dma_to_phys(struct device *dev, dma_addr_t daddr) |
76 | static inline phys_addr_t dma_to_phys(struct device *dev, dma_addr_t daddr) |
79 | { |
77 | { |
80 | return daddr; |
78 | return daddr; |
81 | } |
79 | } |
82 | #endif /* CONFIG_X86_DMA_REMAP */ |
80 | #endif /* CONFIG_X86_DMA_REMAP */ |
83 | 81 | ||
84 | static inline void |
82 | static inline void |
85 | dma_cache_sync(struct device *dev, void *vaddr, size_t size, |
83 | dma_cache_sync(struct device *dev, void *vaddr, size_t size, |
86 | enum dma_data_direction dir) |
84 | enum dma_data_direction dir) |
87 | { |
85 | { |
88 | flush_write_buffers(); |
86 | flush_write_buffers(); |
89 | } |
87 | } |
90 | 88 | ||
91 | static inline unsigned long dma_alloc_coherent_mask(struct device *dev, |
89 | static inline unsigned long dma_alloc_coherent_mask(struct device *dev, |
92 | gfp_t gfp) |
90 | gfp_t gfp) |
93 | { |
91 | { |
94 | unsigned long dma_mask = 0; |
92 | unsigned long dma_mask = 0; |
95 | 93 | ||
96 | dma_mask = dev->coherent_dma_mask; |
94 | dma_mask = dev->coherent_dma_mask; |
97 | if (!dma_mask) |
95 | if (!dma_mask) |
98 | dma_mask = (gfp & GFP_DMA) ? DMA_BIT_MASK(24) : DMA_BIT_MASK(32); |
96 | dma_mask = (gfp & GFP_DMA) ? DMA_BIT_MASK(24) : DMA_BIT_MASK(32); |
99 | 97 | ||
100 | return dma_mask; |
98 | return dma_mask; |
101 | } |
99 | } |
102 | 100 | ||
103 | static inline gfp_t dma_alloc_coherent_gfp_flags(struct device *dev, gfp_t gfp) |
101 | static inline gfp_t dma_alloc_coherent_gfp_flags(struct device *dev, gfp_t gfp) |
104 | { |
102 | { |
105 | unsigned long dma_mask = dma_alloc_coherent_mask(dev, gfp); |
103 | unsigned long dma_mask = dma_alloc_coherent_mask(dev, gfp); |
106 | 104 | ||
107 | if (dma_mask <= DMA_BIT_MASK(24)) |
105 | if (dma_mask <= DMA_BIT_MASK(24)) |
108 | gfp |= GFP_DMA; |
106 | gfp |= GFP_DMA; |
109 | #ifdef CONFIG_X86_64 |
107 | #ifdef CONFIG_X86_64 |
110 | if (dma_mask <= DMA_BIT_MASK(32) && !(gfp & GFP_DMA)) |
108 | if (dma_mask <= DMA_BIT_MASK(32) && !(gfp & GFP_DMA)) |
111 | gfp |= GFP_DMA32; |
109 | gfp |= GFP_DMA32; |
112 | #endif |
110 | #endif |
113 | return gfp; |
111 | return gfp; |
114 | } |
112 | } |
115 | 113 | ||
116 | #endif=>=>=> |
114 | #endif=>=>=> |