diff mbox series

[RFC,net-next,mlxsw,14/14] selftests: forwarding: Add a test for testing lib.sh functionality

Message ID ee9b8ed1d9f3afad6cdd4e8b0ce7da54e40ac153.1711385796.git.petrm@nvidia.com (mailing list archive)
State New
Headers show
Series selftests: Fixes for kernel CI | expand

Commit Message

Petr Machata March 25, 2024, 5:29 p.m. UTC
Rerunning various scenarios to make sure lib.sh changes do not impact the
observable behavior is no fun. Add a selftest at least for the bare basics
-- the mechanics of setting RET, retmsg, and EXIT_STATUS.

Since the selftest itself uses lib.sh, it would be possible to break lib.sh
in such a way that invalidates result of the selftest. Since the metatest
only uses the bare basics (just pass/fail), hopefully such fundamental
breakages would be noticed.

Signed-off-by: Petr Machata <petrm@nvidia.com>
---
 .../testing/selftests/net/forwarding/Makefile |   1 +
 .../selftests/net/forwarding/lib_sh_test.sh   | 208 ++++++++++++++++++
 2 files changed, 209 insertions(+)
 create mode 100755 tools/testing/selftests/net/forwarding/lib_sh_test.sh
diff mbox series

Patch

diff --git a/tools/testing/selftests/net/forwarding/Makefile b/tools/testing/selftests/net/forwarding/Makefile
index 56e3557ba8a6..fa7b59ff4029 100644
--- a/tools/testing/selftests/net/forwarding/Makefile
+++ b/tools/testing/selftests/net/forwarding/Makefile
@@ -37,6 +37,7 @@  TEST_PROGS = bridge_fdb_learning_limit.sh \
 	ipip_hier_gre_key.sh \
 	ipip_hier_gre_keys.sh \
 	ipip_hier_gre.sh \
+	lib_sh_test.sh \
 	local_termination.sh \
 	mirror_gre_bound.sh \
 	mirror_gre_bridge_1d.sh \
diff --git a/tools/testing/selftests/net/forwarding/lib_sh_test.sh b/tools/testing/selftests/net/forwarding/lib_sh_test.sh
new file mode 100755
index 000000000000..ff2accccaf4d
--- /dev/null
+++ b/tools/testing/selftests/net/forwarding/lib_sh_test.sh
@@ -0,0 +1,208 @@ 
+#!/bin/bash
+# SPDX-License-Identifier: GPL-2.0
+
+# This tests the operation of lib.sh itself.
+
+ALL_TESTS="
+	test_ret
+	test_exit_status
+"
+NUM_NETIFS=0
+source lib.sh
+
+# Simulated checks.
+
+do_test()
+{
+	local msg=$1; shift
+
+	"$@"
+	check_err $? "$msg"
+}
+
+tpass()
+{
+	do_test "tpass" true
+}
+
+tfail()
+{
+	do_test "tfail" false
+}
+
+txfail()
+{
+	FAIL_TO_XFAIL=yes do_test "txfail" false
+}
+
+# Simulated tests.
+
+pass()
+{
+	RET=0
+	do_test "true" true
+	log_test "true"
+}
+
+fail()
+{
+	RET=0
+	do_test "false" false
+	log_test "false"
+}
+
+xfail()
+{
+	RET=0
+	FAIL_TO_XFAIL=yes do_test "xfalse" false
+	log_test "xfalse"
+}
+
+skip()
+{
+	RET=0
+	log_test_skip "skip"
+}
+
+slow_xfail()
+{
+	RET=0
+	xfail_on_slow do_test "slow_false" false
+	log_test "slow_false"
+}
+
+# lib.sh tests.
+
+ret_tests_run()
+{
+	local t
+
+	RET=0
+	retmsg=
+	for t in "$@"; do
+		$t
+	done
+	echo "$retmsg"
+	return $RET
+}
+
+ret_subtest()
+{
+	local expect_ret=$1; shift
+	local expect_retmsg=$1; shift
+	local -a tests=( "$@" )
+
+	local status_names=(pass fail xfail xpass skip)
+	local ret
+	local out
+
+	RET=0
+
+	# Run this in a subshell, so that our environment is intact.
+	out=$(ret_tests_run "${tests[@]}")
+	ret=$?
+
+	(( ret == expect_ret ))
+	check_err $? "RET=$ret expected $expect_ret"
+
+	[[ $out == $expect_retmsg ]]
+	check_err $? "retmsg=$out expected $expect_retmsg"
+
+	log_test "RET $(echo ${tests[@]}) -> ${status_names[$ret]}"
+}
+
+test_ret()
+{
+	ret_subtest $ksft_pass ""
+
+	ret_subtest $ksft_pass "" tpass
+	ret_subtest $ksft_fail "tfail" tfail
+	ret_subtest $ksft_xfail "txfail" txfail
+
+	ret_subtest $ksft_pass "" tpass tpass
+	ret_subtest $ksft_fail "tfail" tpass tfail
+	ret_subtest $ksft_xfail "txfail" tpass txfail
+
+	ret_subtest $ksft_fail "tfail" tfail tpass
+	ret_subtest $ksft_xfail "txfail" txfail tpass
+
+	ret_subtest $ksft_fail "tfail" tfail tfail
+	ret_subtest $ksft_fail "tfail" tfail txfail
+
+	ret_subtest $ksft_fail "tfail" txfail tfail
+
+	ret_subtest $ksft_xfail "txfail" txfail txfail
+}
+
+exit_status_tests_run()
+{
+	EXIT_STATUS=0
+	tests_run > /dev/null
+	return $EXIT_STATUS
+}
+
+exit_status_subtest()
+{
+	local expect_exit_status=$1; shift
+	local tests=$1; shift
+	local what=$1; shift
+
+	local status_names=(pass fail xfail xpass skip)
+	local exit_status
+	local out
+
+	RET=0
+
+	# Run this in a subshell, so that our environment is intact.
+	out=$(TESTS="$tests" exit_status_tests_run)
+	exit_status=$?
+
+	(( exit_status == expect_exit_status ))
+	check_err $? "EXIT_STATUS=$exit_status, expected $expect_exit_status"
+
+	log_test "EXIT_STATUS $tests$what -> ${status_names[$exit_status]}"
+}
+
+test_exit_status()
+{
+	exit_status_subtest $ksft_pass ":"
+
+	exit_status_subtest $ksft_pass "pass"
+	exit_status_subtest $ksft_fail "fail"
+	exit_status_subtest $ksft_pass "xfail"
+	exit_status_subtest $ksft_skip "skip"
+
+	exit_status_subtest $ksft_pass "pass pass"
+	exit_status_subtest $ksft_fail "pass fail"
+	exit_status_subtest $ksft_pass "pass xfail"
+	exit_status_subtest $ksft_skip "pass skip"
+
+	exit_status_subtest $ksft_fail "fail pass"
+	exit_status_subtest $ksft_pass "xfail pass"
+	exit_status_subtest $ksft_skip "skip pass"
+
+	exit_status_subtest $ksft_fail "fail fail"
+	exit_status_subtest $ksft_fail "fail xfail"
+	exit_status_subtest $ksft_fail "fail skip"
+
+	exit_status_subtest $ksft_fail "xfail fail"
+	exit_status_subtest $ksft_fail "skip fail"
+
+	exit_status_subtest $ksft_pass "xfail xfail"
+	exit_status_subtest $ksft_skip "xfail skip"
+	exit_status_subtest $ksft_skip "skip xfail"
+
+	exit_status_subtest $ksft_skip "skip skip"
+
+	KSFT_MACHINE_SLOW=yes \
+		exit_status_subtest $ksft_pass "slow_xfail" ": slow"
+
+	KSFT_MACHINE_SLOW=no \
+		exit_status_subtest $ksft_fail "slow_xfail" ": fast"
+}
+
+trap pre_cleanup EXIT
+
+tests_run
+
+exit $EXIT_STATUS