blob: 1f6d78f044b671bca827b15c5b6b8388c09e4fa6 [file] [log] [blame]
Konrad Rzeszutek Wilkb097186f2010-05-11 10:05:49 -04001#ifndef __LINUX_SWIOTLB_XEN_H
2#define __LINUX_SWIOTLB_XEN_H
3
Stefano Stabellini6fe19272013-11-04 17:54:27 +00004#include <linux/dma-direction.h>
Andrii Anisov69369f52017-02-07 19:58:03 +02005#include <linux/scatterlist.h>
Konrad Rzeszutek Wilkb097186f2010-05-11 10:05:49 -04006#include <linux/swiotlb.h>
7
Konrad Rzeszutek Wilkb8277602012-08-23 14:36:15 -04008extern int xen_swiotlb_init(int verbose, bool early);
Konrad Rzeszutek Wilkb097186f2010-05-11 10:05:49 -04009
10extern void
11*xen_swiotlb_alloc_coherent(struct device *hwdev, size_t size,
Andrzej Pietrasiewiczbaa676f2012-03-27 14:28:18 +020012 dma_addr_t *dma_handle, gfp_t flags,
Krzysztof Kozlowski00085f12016-08-03 13:46:00 -070013 unsigned long attrs);
Konrad Rzeszutek Wilkb097186f2010-05-11 10:05:49 -040014
15extern void
16xen_swiotlb_free_coherent(struct device *hwdev, size_t size,
Andrzej Pietrasiewiczbaa676f2012-03-27 14:28:18 +020017 void *vaddr, dma_addr_t dma_handle,
Krzysztof Kozlowski00085f12016-08-03 13:46:00 -070018 unsigned long attrs);
Konrad Rzeszutek Wilkb097186f2010-05-11 10:05:49 -040019
20extern dma_addr_t xen_swiotlb_map_page(struct device *dev, struct page *page,
21 unsigned long offset, size_t size,
22 enum dma_data_direction dir,
Krzysztof Kozlowski00085f12016-08-03 13:46:00 -070023 unsigned long attrs);
Konrad Rzeszutek Wilkb097186f2010-05-11 10:05:49 -040024
25extern void xen_swiotlb_unmap_page(struct device *hwdev, dma_addr_t dev_addr,
26 size_t size, enum dma_data_direction dir,
Krzysztof Kozlowski00085f12016-08-03 13:46:00 -070027 unsigned long attrs);
Konrad Rzeszutek Wilkb097186f2010-05-11 10:05:49 -040028extern int
29xen_swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl,
30 int nelems, enum dma_data_direction dir,
Krzysztof Kozlowski00085f12016-08-03 13:46:00 -070031 unsigned long attrs);
Konrad Rzeszutek Wilkb097186f2010-05-11 10:05:49 -040032
33extern void
34xen_swiotlb_unmap_sg_attrs(struct device *hwdev, struct scatterlist *sgl,
35 int nelems, enum dma_data_direction dir,
Krzysztof Kozlowski00085f12016-08-03 13:46:00 -070036 unsigned long attrs);
Konrad Rzeszutek Wilkb097186f2010-05-11 10:05:49 -040037
38extern void
39xen_swiotlb_sync_single_for_cpu(struct device *hwdev, dma_addr_t dev_addr,
40 size_t size, enum dma_data_direction dir);
41
42extern void
43xen_swiotlb_sync_sg_for_cpu(struct device *hwdev, struct scatterlist *sg,
44 int nelems, enum dma_data_direction dir);
45
46extern void
47xen_swiotlb_sync_single_for_device(struct device *hwdev, dma_addr_t dev_addr,
48 size_t size, enum dma_data_direction dir);
49
50extern void
51xen_swiotlb_sync_sg_for_device(struct device *hwdev, struct scatterlist *sg,
52 int nelems, enum dma_data_direction dir);
53
54extern int
Konrad Rzeszutek Wilkb097186f2010-05-11 10:05:49 -040055xen_swiotlb_dma_supported(struct device *hwdev, u64 mask);
56
Stefano Stabellinieb1ddc02013-10-09 16:56:33 +000057extern int
58xen_swiotlb_set_dma_mask(struct device *dev, u64 dma_mask);
Stefano Stabellini7e91c7d2017-02-07 19:58:02 +020059
60extern int
61xen_swiotlb_dma_mmap(struct device *dev, struct vm_area_struct *vma,
62 void *cpu_addr, dma_addr_t dma_addr, size_t size,
63 unsigned long attrs);
Andrii Anisov69369f52017-02-07 19:58:03 +020064
65extern int
66xen_swiotlb_get_sgtable(struct device *dev, struct sg_table *sgt,
67 void *cpu_addr, dma_addr_t handle, size_t size,
68 unsigned long attrs);
Konrad Rzeszutek Wilkb097186f2010-05-11 10:05:49 -040069#endif /* __LINUX_SWIOTLB_XEN_H */