diff mbox series

[RFC,V1,1/3] mm/damon: rename damon_evenly_split_region()

Message ID 537ed6bc00ea35dbd73270477d77707891e97b0c.1647378112.git.xhao@linux.alibaba.com (mailing list archive)
State New
Headers show
Series mm/damon: Add CMA minotor support | expand

Commit Message

haoxin March 15, 2022, 4:37 p.m. UTC
This patch rename damon_va_evenly_split_region() to
damon_evenly_split_region() is aimed to call it
in the physical address space.

So there fix it, and move it to "ops-common.c" file.

Signed-off-by: Xin Hao <xhao@linux.alibaba.com>
---
 mm/damon/ops-common.c | 39 +++++++++++++++++++++++++++++++++++++++
 mm/damon/ops-common.h |  3 +++
 mm/damon/vaddr-test.h |  6 +++---
 mm/damon/vaddr.c      | 41 +----------------------------------------
 4 files changed, 46 insertions(+), 43 deletions(-)
diff mbox series

Patch

diff --git a/mm/damon/ops-common.c b/mm/damon/ops-common.c
index e346cc10d143..fd5e98005358 100644
--- a/mm/damon/ops-common.c
+++ b/mm/damon/ops-common.c
@@ -131,3 +131,42 @@  int damon_pageout_score(struct damon_ctx *c, struct damon_region *r,
 	/* Return coldness of the region */
 	return DAMOS_MAX_SCORE - hotness;
 }
+
+/*
+ * Size-evenly split a region into 'nr_pieces' small regions
+ *
+ * Returns 0 on success, or negative error code otherwise.
+ */
+int damon_evenly_split_region(struct damon_target *t,
+		struct damon_region *r, unsigned int nr_pieces)
+{
+	unsigned long sz_orig, sz_piece, orig_end;
+	struct damon_region *n = NULL, *next;
+	unsigned long start;
+
+	if (!r || !nr_pieces)
+		return -EINVAL;
+
+	orig_end = r->ar.end;
+	sz_orig = r->ar.end - r->ar.start;
+	sz_piece = ALIGN_DOWN(sz_orig / nr_pieces, DAMON_MIN_REGION);
+
+	if (!sz_piece)
+		return -EINVAL;
+
+	r->ar.end = r->ar.start + sz_piece;
+	next = damon_next_region(r);
+	for (start = r->ar.end; start + sz_piece <= orig_end;
+			start += sz_piece) {
+		n = damon_new_region(start, start + sz_piece);
+		if (!n)
+			return -ENOMEM;
+		damon_insert_region(n, r, next, t);
+		r = n;
+	}
+	/* complement last region for possible rounding error */
+	if (n)
+		n->ar.end = orig_end;
+
+	return 0;
+}
diff --git a/mm/damon/ops-common.h b/mm/damon/ops-common.h
index e790cb5f8fe0..fd441016a2ae 100644
--- a/mm/damon/ops-common.h
+++ b/mm/damon/ops-common.h
@@ -14,3 +14,6 @@  void damon_pmdp_mkold(pmd_t *pmd, struct mm_struct *mm, unsigned long addr);
 
 int damon_pageout_score(struct damon_ctx *c, struct damon_region *r,
 			struct damos *s);
+
+int damon_evenly_split_region(struct damon_target *t,
+		struct damon_region *r, unsigned int nr_pieces);
diff --git a/mm/damon/vaddr-test.h b/mm/damon/vaddr-test.h
index 1a55bb6c36c3..161906ab66a7 100644
--- a/mm/damon/vaddr-test.h
+++ b/mm/damon/vaddr-test.h
@@ -256,7 +256,7 @@  static void damon_test_split_evenly_fail(struct kunit *test,
 
 	damon_add_region(r, t);
 	KUNIT_EXPECT_EQ(test,
-			damon_va_evenly_split_region(t, r, nr_pieces), -EINVAL);
+			damon_evenly_split_region(t, r, nr_pieces), -EINVAL);
 	KUNIT_EXPECT_EQ(test, damon_nr_regions(t), 1u);
 
 	damon_for_each_region(r, t) {
@@ -277,7 +277,7 @@  static void damon_test_split_evenly_succ(struct kunit *test,
 
 	damon_add_region(r, t);
 	KUNIT_EXPECT_EQ(test,
-			damon_va_evenly_split_region(t, r, nr_pieces), 0);
+			damon_evenly_split_region(t, r, nr_pieces), 0);
 	KUNIT_EXPECT_EQ(test, damon_nr_regions(t), nr_pieces);
 
 	damon_for_each_region(r, t) {
@@ -294,7 +294,7 @@  static void damon_test_split_evenly_succ(struct kunit *test,
 
 static void damon_test_split_evenly(struct kunit *test)
 {
-	KUNIT_EXPECT_EQ(test, damon_va_evenly_split_region(NULL, NULL, 5),
+	KUNIT_EXPECT_EQ(test, damon_evenly_split_region(NULL, NULL, 5),
 			-EINVAL);
 
 	damon_test_split_evenly_fail(test, 0, 100, 0);
diff --git a/mm/damon/vaddr.c b/mm/damon/vaddr.c
index b2ec0aa1ff45..0870e178b1b8 100644
--- a/mm/damon/vaddr.c
+++ b/mm/damon/vaddr.c
@@ -56,45 +56,6 @@  static struct mm_struct *damon_get_mm(struct damon_target *t)
  * Functions for the initial monitoring target regions construction
  */
 
-/*
- * Size-evenly split a region into 'nr_pieces' small regions
- *
- * Returns 0 on success, or negative error code otherwise.
- */
-static int damon_va_evenly_split_region(struct damon_target *t,
-		struct damon_region *r, unsigned int nr_pieces)
-{
-	unsigned long sz_orig, sz_piece, orig_end;
-	struct damon_region *n = NULL, *next;
-	unsigned long start;
-
-	if (!r || !nr_pieces)
-		return -EINVAL;
-
-	orig_end = r->ar.end;
-	sz_orig = r->ar.end - r->ar.start;
-	sz_piece = ALIGN_DOWN(sz_orig / nr_pieces, DAMON_MIN_REGION);
-
-	if (!sz_piece)
-		return -EINVAL;
-
-	r->ar.end = r->ar.start + sz_piece;
-	next = damon_next_region(r);
-	for (start = r->ar.end; start + sz_piece <= orig_end;
-			start += sz_piece) {
-		n = damon_new_region(start, start + sz_piece);
-		if (!n)
-			return -ENOMEM;
-		damon_insert_region(n, r, next, t);
-		r = n;
-	}
-	/* complement last region for possible rounding error */
-	if (n)
-		n->ar.end = orig_end;
-
-	return 0;
-}
-
 static unsigned long sz_range(struct damon_addr_range *r)
 {
 	return r->end - r->start;
@@ -265,7 +226,7 @@  static void __damon_va_init_regions(struct damon_ctx *ctx,
 		damon_add_region(r, t);
 
 		nr_pieces = (regions[i].end - regions[i].start) / sz;
-		damon_va_evenly_split_region(t, r, nr_pieces);
+		damon_evenly_split_region(t, r, nr_pieces);
 	}
 }