[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-ID: <20230720173956.3674987-3-glider@google.com>
Date: Thu, 20 Jul 2023 19:39:53 +0200
From: Alexander Potapenko <glider@...gle.com>
To: glider@...gle.com, catalin.marinas@....com, will@...nel.org,
pcc@...gle.com, andreyknvl@...il.com,
andriy.shevchenko@...ux.intel.com, linux@...musvillemoes.dk,
yury.norov@...il.com
Cc: linux-kernel@...r.kernel.org, linux-arm-kernel@...ts.infradead.org,
eugenis@...gle.com, syednwaris@...il.com, william.gray@...aro.org
Subject: [PATCH v4 2/5] lib/test_bitmap: add tests for bitmap_{set,get}_value()
Add basic tests ensuring that values can be added at arbitrary positions
of the bitmap, including those spanning into the adjacent unsigned
longs.
Signed-off-by: Alexander Potapenko <glider@...gle.com>
Reviewed-by: Andy Shevchenko <andriy.shevchenko@...ux.intel.com>
---
This patch was previously called
"lib/test_bitmap: add tests for bitmap_{set,get}_value_unaligned"
(https://lore.kernel.org/lkml/20230713125706.2884502-3-glider@google.com/)
v4:
- Address comments by Andy Shevchenko: added Reviewed-by: and a link to
the previous discussion
- Address comments by Yury Norov:
- expand the bitmap to catch more corner cases
- add code testing that bitmap_set_value() does not touch adjacent
bits
- add code testing the nbits==0 case
- rename bitmap_{get,set}_value() to bitmap_{read,write}()
v3:
- switch to using bitmap_{set,get}_value()
- change the expected bit pattern in test_set_get_value(),
as the test was incorrectly assuming 0 is the LSB.
---
lib/test_bitmap.c | 81 +++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 81 insertions(+)
diff --git a/lib/test_bitmap.c b/lib/test_bitmap.c
index 187f5b2db4cf1..601000c7799df 100644
--- a/lib/test_bitmap.c
+++ b/lib/test_bitmap.c
@@ -71,6 +71,17 @@ __check_eq_uint(const char *srcfile, unsigned int line,
return true;
}
+static bool __init
+__check_eq_ulong(const char *srcfile, unsigned int line,
+ const unsigned long exp_ulong, unsigned long x)
+{
+ if (exp_ulong != x) {
+ pr_err("[%s:%u] expected %lu, got %lu\n",
+ srcfile, line, exp_ulong, x);
+ return false;
+ }
+ return true;
+}
static bool __init
__check_eq_bitmap(const char *srcfile, unsigned int line,
@@ -186,6 +197,7 @@ __check_eq_str(const char *srcfile, unsigned int line,
})
#define expect_eq_uint(...) __expect_eq(uint, ##__VA_ARGS__)
+#define expect_eq_ulong(...) __expect_eq(ulong, ##__VA_ARGS__)
#define expect_eq_bitmap(...) __expect_eq(bitmap, ##__VA_ARGS__)
#define expect_eq_pbl(...) __expect_eq(pbl, ##__VA_ARGS__)
#define expect_eq_u32_array(...) __expect_eq(u32_array, ##__VA_ARGS__)
@@ -1222,6 +1234,73 @@ static void __init test_bitmap_const_eval(void)
BUILD_BUG_ON(~var != ~BIT(25));
}
+/*
+ * Test bitmap should be big enough to include the cases when start is not in
+ * the first word, and start+nbits lands in the following word.
+ */
+#define TEST_BIT_LEN (BITS_PER_LONG * 3)
+#define TEST_BYTE_LEN (BITS_TO_LONGS(TEST_BIT_LEN) * sizeof(unsigned long))
+static void __init test_set_get_value(void)
+{
+ DECLARE_BITMAP(bitmap, TEST_BIT_LEN);
+ DECLARE_BITMAP(exp_bitmap, TEST_BIT_LEN);
+ /* Prevent constant folding. */
+ volatile unsigned long zero_bits = 0;
+ unsigned long val, bit;
+ int i;
+
+ /* Setting/getting zero bytes should not crash the kernel. */
+ bitmap_write(NULL, 0, 0, zero_bits);
+ val = bitmap_read(NULL, 0, zero_bits);
+ expect_eq_ulong(0, val);
+
+ /*
+ * Ensure that bitmap_read() reads the same value that was previously
+ * written, and two consequent values are correctly merged.
+ * The resulting bit pattern is asymmetric to rule out possible issues
+ * with bit numeration order.
+ */
+ for (i = 0; i < TEST_BIT_LEN - 7; i++) {
+ bitmap_zero(bitmap, TEST_BIT_LEN);
+ bitmap_write(bitmap, 0b10101UL, i, 5);
+ val = bitmap_read(bitmap, i, 5);
+ expect_eq_ulong(0b10101UL, val);
+ bitmap_write(bitmap, 0b101UL, i + 5, 3);
+ val = bitmap_read(bitmap, i + 5, 3);
+ expect_eq_ulong(0b101UL, val);
+ val = bitmap_read(bitmap, i, 8);
+ expect_eq_ulong(0b10110101UL, val);
+ }
+
+ /*
+ * Check that setting a single bit does not accidentally touch the
+ * adjacent bits.
+ */
+ for (i = 0; i < TEST_BIT_LEN; i++) {
+ /*
+ * A 0b10101010 pattern to catch both 0s replaced to 1s and vice
+ * versa.
+ */
+ memset(bitmap, 0xaa, TEST_BYTE_LEN);
+ memset(exp_bitmap, 0xaa, TEST_BYTE_LEN);
+ for (bit = 0; bit <= 1; bit++) {
+ bitmap_write(bitmap, bit, i, 1);
+ __assign_bit(i, exp_bitmap, bit);
+ expect_eq_bitmap(exp_bitmap, bitmap, TEST_BIT_LEN);
+ }
+ }
+
+ /* Ensure setting 0 bits does not change anything. */
+ memset(bitmap, 0xaa, TEST_BYTE_LEN);
+ memset(exp_bitmap, 0xaa, TEST_BYTE_LEN);
+ for (i = 0; i < TEST_BIT_LEN; i++) {
+ bitmap_write(bitmap, ~0UL, i, 0);
+ expect_eq_bitmap(exp_bitmap, bitmap, TEST_BIT_LEN);
+ }
+}
+#undef TEST_BYTE_LEN
+#undef TEST_BIT_LEN
+
static void __init selftest(void)
{
test_zero_clear();
@@ -1249,6 +1328,8 @@ static void __init selftest(void)
test_for_each_clear_bitrange_from();
test_for_each_set_clump8();
test_for_each_set_bit_wrap();
+
+ test_set_get_value();
}
KSTM_MODULE_LOADERS(test_bitmap);
--
2.41.0.487.g6d72f3e995-goog
Powered by blists - more mailing lists