summaryrefslogtreecommitdiffstats
path: root/include/linux/swap.h
diff options
context:
space:
mode:
authorBarry Song <v-songbaohua@oppo.com>2024-05-29 20:28:20 +1200
committerAndrew Morton <akpm@linux-foundation.org>2024-07-03 19:30:01 -0700
commit54f7a49c20ebb5189980c53e6e66709d22bee572 (patch)
tree93191cc7827d7a38fc42343a404cc69fd98b1b85 /include/linux/swap.h
parentebfba0045176cb013f49cb3e5bd9f0b16eba203c (diff)
downloadlinux-stable-54f7a49c20ebb5189980c53e6e66709d22bee572.tar.gz
linux-stable-54f7a49c20ebb5189980c53e6e66709d22bee572.tar.bz2
linux-stable-54f7a49c20ebb5189980c53e6e66709d22bee572.zip
mm: remove the implementation of swap_free() and always use swap_free_nr()
To streamline maintenance efforts, we propose removing the implementation of swap_free(). Instead, we can simply invoke swap_free_nr() with nr set to 1. swap_free_nr() is designed with a bitmap consisting of only one long, resulting in overhead that can be ignored for cases where nr equals 1. A prime candidate for leveraging swap_free_nr() lies within kernel/power/swap.c. Implementing this change facilitates the adoption of batch processing for hibernation. Link: https://lkml.kernel.org/r/20240529082824.150954-3-21cnbao@gmail.com Signed-off-by: Barry Song <v-songbaohua@oppo.com> Suggested-by: "Huang, Ying" <ying.huang@intel.com> Reviewed-by: "Huang, Ying" <ying.huang@intel.com> Acked-by: Chris Li <chrisl@kernel.org> Reviewed-by: Ryan Roberts <ryan.roberts@arm.com> Cc: "Rafael J. Wysocki" <rafael@kernel.org> Cc: Pavel Machek <pavel@ucw.cz> Cc: Len Brown <len.brown@intel.com> Cc: Hugh Dickins <hughd@google.com> Cc: Christoph Hellwig <hch@infradead.org> Cc: Andreas Larsson <andreas@gaisler.com> Cc: Baolin Wang <baolin.wang@linux.alibaba.com> Cc: Chuanhua Han <hanchuanhua@oppo.com> Cc: David Hildenbrand <david@redhat.com> Cc: "David S. Miller" <davem@davemloft.net> Cc: Gao Xiang <xiang@kernel.org> Cc: Johannes Weiner <hannes@cmpxchg.org> Cc: Kairui Song <kasong@tencent.com> Cc: Khalid Aziz <khalid.aziz@oracle.com> Cc: Matthew Wilcox (Oracle) <willy@infradead.org> Cc: Suren Baghdasaryan <surenb@google.com> Cc: Yosry Ahmed <yosryahmed@google.com> Cc: Yu Zhao <yuzhao@google.com> Cc: Zi Yan <ziy@nvidia.com> Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Diffstat (limited to 'include/linux/swap.h')
-rw-r--r--include/linux/swap.h10
1 files changed, 5 insertions, 5 deletions
diff --git a/include/linux/swap.h b/include/linux/swap.h
index 0f41fe49c9dc..d33ce740b695 100644
--- a/include/linux/swap.h
+++ b/include/linux/swap.h
@@ -477,7 +477,6 @@ extern int add_swap_count_continuation(swp_entry_t, gfp_t);
extern void swap_shmem_alloc(swp_entry_t);
extern int swap_duplicate(swp_entry_t);
extern int swapcache_prepare(swp_entry_t);
-extern void swap_free(swp_entry_t);
extern void swap_free_nr(swp_entry_t entry, int nr_pages);
extern void swapcache_free_entries(swp_entry_t *entries, int n);
extern void free_swap_and_cache_nr(swp_entry_t entry, int nr);
@@ -556,10 +555,6 @@ static inline int swapcache_prepare(swp_entry_t swp)
return 0;
}
-static inline void swap_free(swp_entry_t swp)
-{
-}
-
static inline void swap_free_nr(swp_entry_t entry, int nr_pages)
{
}
@@ -608,6 +603,11 @@ static inline void free_swap_and_cache(swp_entry_t entry)
free_swap_and_cache_nr(entry, 1);
}
+static inline void swap_free(swp_entry_t entry)
+{
+ swap_free_nr(entry, 1);
+}
+
#ifdef CONFIG_MEMCG
static inline int mem_cgroup_swappiness(struct mem_cgroup *memcg)
{