1 /* Copyright (C) 2004 IBM
3 * Implements the generic device dma API for ppc64. Handles
4 * the pci and vio busses
7 #ifndef _ASM_DMA_MAPPING_H
8 #define _ASM_DMA_MAPPING_H
10 #include <linux/types.h>
11 #include <linux/device.h>
12 #include <linux/cache.h>
13 /* need struct page definitions */
15 #include <asm/scatterlist.h>
18 #define DMA_ERROR_CODE (~(dma_addr_t)0x0)
20 extern int dma_supported(struct device *dev, u64 mask);
21 extern int dma_set_mask(struct device *dev, u64 dma_mask);
22 extern void *dma_alloc_coherent(struct device *dev, size_t size,
23 dma_addr_t *dma_handle, int flag);
24 extern void dma_free_coherent(struct device *dev, size_t size, void *cpu_addr,
25 dma_addr_t dma_handle);
26 extern dma_addr_t dma_map_single(struct device *dev, void *cpu_addr,
27 size_t size, enum dma_data_direction direction);
28 extern void dma_unmap_single(struct device *dev, dma_addr_t dma_addr,
29 size_t size, enum dma_data_direction direction);
30 extern dma_addr_t dma_map_page(struct device *dev, struct page *page,
31 unsigned long offset, size_t size,
32 enum dma_data_direction direction);
33 extern void dma_unmap_page(struct device *dev, dma_addr_t dma_address,
34 size_t size, enum dma_data_direction direction);
35 extern int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents,
36 enum dma_data_direction direction);
37 extern void dma_unmap_sg(struct device *dev, struct scatterlist *sg,
38 int nhwentries, enum dma_data_direction direction);
41 dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle, size_t size,
42 enum dma_data_direction direction)
44 BUG_ON(direction == DMA_NONE);
49 dma_sync_single_for_device(struct device *dev, dma_addr_t dma_handle, size_t size,
50 enum dma_data_direction direction)
52 BUG_ON(direction == DMA_NONE);
57 dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nelems,
58 enum dma_data_direction direction)
60 BUG_ON(direction == DMA_NONE);
65 dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, int nelems,
66 enum dma_data_direction direction)
68 BUG_ON(direction == DMA_NONE);
72 static inline int dma_mapping_error(dma_addr_t dma_addr)
74 return (dma_addr == DMA_ERROR_CODE);
77 /* Now for the API extensions over the pci_ one */
79 #define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
80 #define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
81 #define dma_is_consistent(d) (1)
84 dma_get_cache_alignment(void)
86 /* no easy way to get cache size on all processors, so return
87 * the maximum possible, to be safe */
88 return (1 << L1_CACHE_SHIFT_MAX);
92 dma_sync_single_range_for_cpu(struct device *dev, dma_addr_t dma_handle,
93 unsigned long offset, size_t size,
94 enum dma_data_direction direction)
96 BUG_ON(direction == DMA_NONE);
101 dma_sync_single_range_for_device(struct device *dev, dma_addr_t dma_handle,
102 unsigned long offset, size_t size,
103 enum dma_data_direction direction)
105 BUG_ON(direction == DMA_NONE);
110 dma_cache_sync(void *vaddr, size_t size,
111 enum dma_data_direction direction)
113 BUG_ON(direction == DMA_NONE);
117 #endif /* _ASM_DMA_MAPPING_H */