NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
| 2 | #ifndef _MM_SWAP_H |
| 3 | #define _MM_SWAP_H |
| 4 | |
Hugh Dickins | ddc1a5c | 2023-10-19 13:39:08 -0700 | [diff] [blame] | 5 | struct mempolicy; |
| 6 | |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 7 | #ifdef CONFIG_SWAP |
| 8 | #include <linux/blk_types.h> /* for bio_end_io_t */ |
| 9 | |
| 10 | /* linux/mm/page_io.c */ |
NeilBrown | e1209d3 | 2022-05-09 18:20:48 -0700 | [diff] [blame] | 11 | int sio_pool_init(void); |
NeilBrown | 5169b84 | 2022-05-09 18:20:49 -0700 | [diff] [blame] | 12 | struct swap_iocb; |
Matthew Wilcox (Oracle) | c9bdf76 | 2023-12-13 21:58:39 +0000 | [diff] [blame] | 13 | void swap_read_folio(struct folio *folio, bool do_poll, |
| 14 | struct swap_iocb **plug); |
NeilBrown | 5169b84 | 2022-05-09 18:20:49 -0700 | [diff] [blame] | 15 | void __swap_read_unplug(struct swap_iocb *plug); |
| 16 | static inline void swap_read_unplug(struct swap_iocb *plug) |
| 17 | { |
| 18 | if (unlikely(plug)) |
| 19 | __swap_read_unplug(plug); |
| 20 | } |
NeilBrown | 2282679 | 2022-05-09 18:20:49 -0700 | [diff] [blame] | 21 | void swap_write_unplug(struct swap_iocb *sio); |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 22 | int swap_writepage(struct page *page, struct writeback_control *wbc); |
Matthew Wilcox (Oracle) | b99b4e0 | 2023-12-13 21:58:31 +0000 | [diff] [blame] | 23 | void __swap_writepage(struct folio *folio, struct writeback_control *wbc); |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 24 | |
| 25 | /* linux/mm/swap_state.c */ |
| 26 | /* One swap address space for each 64M swap space */ |
| 27 | #define SWAP_ADDRESS_SPACE_SHIFT 14 |
| 28 | #define SWAP_ADDRESS_SPACE_PAGES (1 << SWAP_ADDRESS_SPACE_SHIFT) |
| 29 | extern struct address_space *swapper_spaces[]; |
| 30 | #define swap_address_space(entry) \ |
| 31 | (&swapper_spaces[swp_type(entry)][swp_offset(entry) \ |
| 32 | >> SWAP_ADDRESS_SPACE_SHIFT]) |
| 33 | |
| 34 | void show_swap_cache_info(void); |
Matthew Wilcox (Oracle) | 09c02e5 | 2022-05-12 20:23:02 -0700 | [diff] [blame] | 35 | bool add_to_swap(struct folio *folio); |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 36 | void *get_shadow_from_swap_cache(swp_entry_t entry); |
Matthew Wilcox (Oracle) | a4c366f | 2022-09-02 20:46:08 +0100 | [diff] [blame] | 37 | int add_to_swap_cache(struct folio *folio, swp_entry_t entry, |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 38 | gfp_t gfp, void **shadowp); |
Matthew Wilcox (Oracle) | ceff9d3 | 2022-06-17 18:50:20 +0100 | [diff] [blame] | 39 | void __delete_from_swap_cache(struct folio *folio, |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 40 | swp_entry_t entry, void *shadow); |
Matthew Wilcox (Oracle) | 75fa68a | 2022-06-17 18:50:19 +0100 | [diff] [blame] | 41 | void delete_from_swap_cache(struct folio *folio); |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 42 | void clear_shadow_from_swap_cache(int type, unsigned long begin, |
| 43 | unsigned long end); |
Kairui Song | 13ddaf2 | 2024-02-07 02:25:59 +0800 | [diff] [blame] | 44 | void swapcache_clear(struct swap_info_struct *si, swp_entry_t entry); |
Matthew Wilcox (Oracle) | c9edc24 | 2022-09-02 20:46:15 +0100 | [diff] [blame] | 45 | struct folio *swap_cache_get_folio(swp_entry_t entry, |
| 46 | struct vm_area_struct *vma, unsigned long addr); |
Matthew Wilcox (Oracle) | 524984f | 2022-10-19 19:33:31 +0100 | [diff] [blame] | 47 | struct folio *filemap_get_incore_folio(struct address_space *mapping, |
| 48 | pgoff_t index); |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 49 | |
Matthew Wilcox (Oracle) | 6e03492 | 2023-12-13 21:58:41 +0000 | [diff] [blame] | 50 | struct folio *read_swap_cache_async(swp_entry_t entry, gfp_t gfp_mask, |
| 51 | struct vm_area_struct *vma, unsigned long addr, |
| 52 | struct swap_iocb **plug); |
Matthew Wilcox (Oracle) | 96c7b0b | 2023-12-13 21:58:30 +0000 | [diff] [blame] | 53 | struct folio *__read_swap_cache_async(swp_entry_t entry, gfp_t gfp_flags, |
| 54 | struct mempolicy *mpol, pgoff_t ilx, bool *new_page_allocated, |
| 55 | bool skip_if_exists); |
Matthew Wilcox (Oracle) | a4575c4 | 2023-12-13 21:58:42 +0000 | [diff] [blame] | 56 | struct folio *swap_cluster_readahead(swp_entry_t entry, gfp_t flag, |
| 57 | struct mempolicy *mpol, pgoff_t ilx); |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 58 | struct page *swapin_readahead(swp_entry_t entry, gfp_t flag, |
| 59 | struct vm_fault *vmf); |
| 60 | |
Matthew Wilcox (Oracle) | b98c359 | 2022-06-17 18:50:18 +0100 | [diff] [blame] | 61 | static inline unsigned int folio_swap_flags(struct folio *folio) |
NeilBrown | d791ea6 | 2022-05-09 18:20:48 -0700 | [diff] [blame] | 62 | { |
Matthew Wilcox (Oracle) | 69fe7d6 | 2023-12-13 21:58:40 +0000 | [diff] [blame] | 63 | return swp_swap_info(folio->swap)->flags; |
NeilBrown | d791ea6 | 2022-05-09 18:20:48 -0700 | [diff] [blame] | 64 | } |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 65 | #else /* CONFIG_SWAP */ |
NeilBrown | 5169b84 | 2022-05-09 18:20:49 -0700 | [diff] [blame] | 66 | struct swap_iocb; |
Matthew Wilcox (Oracle) | c9bdf76 | 2023-12-13 21:58:39 +0000 | [diff] [blame] | 67 | static inline void swap_read_folio(struct folio *folio, bool do_poll, |
Christoph Hellwig | a8c1408 | 2023-01-25 14:34:31 +0100 | [diff] [blame] | 68 | struct swap_iocb **plug) |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 69 | { |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 70 | } |
NeilBrown | 2282679 | 2022-05-09 18:20:49 -0700 | [diff] [blame] | 71 | static inline void swap_write_unplug(struct swap_iocb *sio) |
| 72 | { |
| 73 | } |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 74 | |
| 75 | static inline struct address_space *swap_address_space(swp_entry_t entry) |
| 76 | { |
| 77 | return NULL; |
| 78 | } |
| 79 | |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 80 | static inline void show_swap_cache_info(void) |
| 81 | { |
| 82 | } |
| 83 | |
Matthew Wilcox (Oracle) | a4575c4 | 2023-12-13 21:58:42 +0000 | [diff] [blame] | 84 | static inline struct folio *swap_cluster_readahead(swp_entry_t entry, |
Hugh Dickins | ddc1a5c | 2023-10-19 13:39:08 -0700 | [diff] [blame] | 85 | gfp_t gfp_mask, struct mempolicy *mpol, pgoff_t ilx) |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 86 | { |
| 87 | return NULL; |
| 88 | } |
| 89 | |
| 90 | static inline struct page *swapin_readahead(swp_entry_t swp, gfp_t gfp_mask, |
| 91 | struct vm_fault *vmf) |
| 92 | { |
| 93 | return NULL; |
| 94 | } |
| 95 | |
| 96 | static inline int swap_writepage(struct page *p, struct writeback_control *wbc) |
| 97 | { |
| 98 | return 0; |
| 99 | } |
| 100 | |
Kairui Song | 13ddaf2 | 2024-02-07 02:25:59 +0800 | [diff] [blame] | 101 | static inline void swapcache_clear(struct swap_info_struct *si, swp_entry_t entry) |
| 102 | { |
| 103 | } |
| 104 | |
Matthew Wilcox (Oracle) | c9edc24 | 2022-09-02 20:46:15 +0100 | [diff] [blame] | 105 | static inline struct folio *swap_cache_get_folio(swp_entry_t entry, |
| 106 | struct vm_area_struct *vma, unsigned long addr) |
| 107 | { |
| 108 | return NULL; |
| 109 | } |
| 110 | |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 111 | static inline |
Matthew Wilcox (Oracle) | 524984f | 2022-10-19 19:33:31 +0100 | [diff] [blame] | 112 | struct folio *filemap_get_incore_folio(struct address_space *mapping, |
| 113 | pgoff_t index) |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 114 | { |
Matthew Wilcox (Oracle) | 524984f | 2022-10-19 19:33:31 +0100 | [diff] [blame] | 115 | return filemap_get_folio(mapping, index); |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 116 | } |
| 117 | |
Matthew Wilcox (Oracle) | 09c02e5 | 2022-05-12 20:23:02 -0700 | [diff] [blame] | 118 | static inline bool add_to_swap(struct folio *folio) |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 119 | { |
Matthew Wilcox (Oracle) | 09c02e5 | 2022-05-12 20:23:02 -0700 | [diff] [blame] | 120 | return false; |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 121 | } |
| 122 | |
| 123 | static inline void *get_shadow_from_swap_cache(swp_entry_t entry) |
| 124 | { |
| 125 | return NULL; |
| 126 | } |
| 127 | |
Matthew Wilcox (Oracle) | a4c366f | 2022-09-02 20:46:08 +0100 | [diff] [blame] | 128 | static inline int add_to_swap_cache(struct folio *folio, swp_entry_t entry, |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 129 | gfp_t gfp_mask, void **shadowp) |
| 130 | { |
| 131 | return -1; |
| 132 | } |
| 133 | |
Matthew Wilcox (Oracle) | ceff9d3 | 2022-06-17 18:50:20 +0100 | [diff] [blame] | 134 | static inline void __delete_from_swap_cache(struct folio *folio, |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 135 | swp_entry_t entry, void *shadow) |
| 136 | { |
| 137 | } |
| 138 | |
Matthew Wilcox (Oracle) | 75fa68a | 2022-06-17 18:50:19 +0100 | [diff] [blame] | 139 | static inline void delete_from_swap_cache(struct folio *folio) |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 140 | { |
| 141 | } |
| 142 | |
| 143 | static inline void clear_shadow_from_swap_cache(int type, unsigned long begin, |
| 144 | unsigned long end) |
| 145 | { |
| 146 | } |
| 147 | |
Matthew Wilcox (Oracle) | b98c359 | 2022-06-17 18:50:18 +0100 | [diff] [blame] | 148 | static inline unsigned int folio_swap_flags(struct folio *folio) |
NeilBrown | d791ea6 | 2022-05-09 18:20:48 -0700 | [diff] [blame] | 149 | { |
| 150 | return 0; |
| 151 | } |
NeilBrown | 014bb1d | 2022-05-09 18:20:47 -0700 | [diff] [blame] | 152 | #endif /* CONFIG_SWAP */ |
| 153 | #endif /* _MM_SWAP_H */ |