@@ -105,7 +105,10 @@ void lruvec_add_folio(struct lruvec *lruvec, struct folio *folio)
static __always_inline void add_page_to_lru_list(struct page *page,
struct lruvec *lruvec)
{
- lruvec_add_folio(lruvec, page_folio(page));
+ struct folio *folio = page_folio(page);
+
+ VM_BUG_ON_FOLIO(!folio_matches_lruvec(folio, lruvec), folio);
+ lruvec_add_folio(lruvec, folio);
}
static __always_inline
@@ -121,7 +124,10 @@ void lruvec_add_folio_tail(struct lruvec *lruvec, struct folio *folio)
static __always_inline void add_page_to_lru_list_tail(struct page *page,
struct lruvec *lruvec)
{
- lruvec_add_folio_tail(lruvec, page_folio(page));
+ struct folio *folio = page_folio(page);
+
+ VM_BUG_ON_FOLIO(!folio_matches_lruvec(folio, lruvec), folio);
+ lruvec_add_folio_tail(lruvec, folio);
}
static __always_inline
@@ -135,7 +141,10 @@ void lruvec_del_folio(struct lruvec *lruvec, struct folio *folio)
static __always_inline void del_page_from_lru_list(struct page *page,
struct lruvec *lruvec)
{
- lruvec_del_folio(lruvec, page_folio(page));
+ struct folio *folio = page_folio(page);
+
+ VM_BUG_ON_FOLIO(!folio_matches_lruvec(folio, lruvec), folio);
+ lruvec_del_folio(lruvec, folio);
}
#ifdef CONFIG_ANON_VMA_NAME
@@ -2356,7 +2356,6 @@ static unsigned int move_pages_to_lru(struct list_head *list)
continue;
}
- VM_BUG_ON_PAGE(!folio_matches_lruvec(folio, lruvec), page);
add_page_to_lru_list(page, lruvec);
nr_pages = thp_nr_pages(page);
nr_moved += nr_pages;
We need to make sure that the page is deleted from or added to the correct lruvec list. So add a VM_BUG_ON_FOLIO() to catch invalid users. Signed-off-by: Muchun Song <songmuchun@bytedance.com> --- include/linux/mm_inline.h | 15 ++++++++++++--- mm/vmscan.c | 1 - 2 files changed, 12 insertions(+), 4 deletions(-)