2 # SPDX-License-Identifier: GPL-2.0-only
4 # ftracetest - Ftrace test shell scripts
6 # Copyright (C) Hitachi Ltd., 2014
7 # Written by Masami Hiramatsu <masami.hiramatsu.pt@hitachi.com>
10 usage
() { # errno [message]
11 [ ! -z "$2" ] && echo $2
12 echo "Usage: ftracetest [options] [testcase(s)] [testcase-directory(s)]"
14 echo " -h|--help Show help message"
15 echo " -k|--keep Keep passed test logs"
16 echo " -v|--verbose Increase verbosity of test messages"
17 echo " -vv Alias of -v -v (Show all results in stdout)"
18 echo " -vvv Alias of -v -v -v (Show all commands immediately)"
19 echo " --fail-unsupported Treat UNSUPPORTED as a failure"
20 echo " --fail-unresolved Treat UNRESOLVED as a failure"
21 echo " -d|--debug Debug mode (trace all shell commands)"
22 echo " -l|--logdir <dir> Save logs on the <dir>"
23 echo " If <dir> is -, all logs output in console only"
30 # kselftest skip code is 4
33 # cgroup RT scheduling prevents chrt commands from succeeding, which
34 # induces failures in test wakeup tests. Disable for the duration of
37 readonly sched_rt_runtime
=/proc
/sys
/kernel
/sched_rt_runtime_us
39 sched_rt_runtime_orig
=$
(cat $sched_rt_runtime)
42 echo -1 > $sched_rt_runtime
46 echo $sched_rt_runtime_orig > $sched_rt_runtime
55 # Ensuring user privilege
56 if [ `id -u` -ne 0 ]; then
57 errexit
"this must be run by root user"
63 absdir
() { # file_path
64 (cd `dirname $1`; pwd)
68 echo `absdir $1`/`basename $1`
71 find_testcases
() { #directory
72 echo `find $1 -name \*.tc | sort`
79 while [ ! -z "$1" ]; do
88 --verbose|
-v|
-vv|
-vvv)
89 if [ $VERBOSE -eq -1 ]; then
90 usage
"--console can not use with --verbose"
92 VERBOSE
=$
((VERBOSE
+ 1))
93 [ $1 = '-vv' ] && VERBOSE
=$
((VERBOSE
+ 1))
94 [ $1 = '-vvv' ] && VERBOSE
=$
((VERBOSE
+ 2))
98 if [ $VERBOSE -ne 0 ]; then
99 usage
"--console can not use with --verbose"
126 OPT_TEST_CASES
="$OPT_TEST_CASES `abspath $1`"
129 usage
1 "$1 is not a testcase"
134 OPT_TEST_DIR
=`abspath $1`
135 OPT_TEST_CASES
="$OPT_TEST_CASES `find_testcases $OPT_TEST_DIR`"
138 usage
1 "Invalid option ($1)"
143 if [ ! -z "$OPT_TEST_CASES" ]; then
144 TEST_CASES
=$OPT_TEST_CASES
149 TRACING_DIR
=`grep tracefs /proc/mounts | cut -f2 -d' ' | head -1`
150 if [ -z "$TRACING_DIR" ]; then
151 DEBUGFS_DIR
=`grep debugfs /proc/mounts | cut -f2 -d' ' | head -1`
152 if [ -z "$DEBUGFS_DIR" ]; then
153 # If tracefs exists, then so does /sys/kernel/tracing
154 if [ -d "/sys/kernel/tracing" ]; then
155 mount
-t tracefs nodev
/sys
/kernel
/tracing ||
156 errexit
"Failed to mount /sys/kernel/tracing"
157 TRACING_DIR
="/sys/kernel/tracing"
158 # If debugfs exists, then so does /sys/kernel/debug
159 elif [ -d "/sys/kernel/debug" ]; then
160 mount
-t debugfs nodev
/sys
/kernel
/debug ||
161 errexit
"Failed to mount /sys/kernel/debug"
162 TRACING_DIR
="/sys/kernel/debug/tracing"
165 errexit
"debugfs and tracefs are not configured in this kernel"
168 TRACING_DIR
="$DEBUGFS_DIR/tracing"
171 if [ ! -d "$TRACING_DIR" ]; then
173 errexit
"ftrace is not configured in this kernel"
177 TEST_DIR
=$TOP_DIR/test.d
178 TEST_CASES
=`find_testcases $TEST_DIR`
179 LOG_DIR
=$TOP_DIR/logs
/`date +%Y%m%d-%H%M%S`/
186 # Parse command-line options
189 [ $DEBUG -ne 0 ] && set -x
192 if [ -z "$TRACING_DIR" -o ! -d "$TRACING_DIR" ]; then
193 errexit
"No ftrace directory found"
197 if [ "x$LOG_DIR" = "x-" ]; then
201 LOG_FILE
=$LOG_DIR/ftracetest.log
202 mkdir
-p $LOG_DIR || errexit
"Failed to make a log directory: $LOG_DIR"
207 # Check available colors on the terminal, if any
208 ncolors
=`tput colors 2>/dev/null || echo 0`
213 # If stdout exists and number of colors is eight or more, use them
214 if [ -t 1 -a "$ncolors" -ge 8 ]; then
215 color_reset
="\033[0m"
217 color_green
="\033[32m"
218 color_blue
="\033[34m"
222 # busybox sed implementation doesn't accept "\x1B", so use [:cntrl:] instead.
223 sed -E "s/[[:cntrl:]]\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g"
228 if [ "$1" = "-n" ] ; then
233 [ "$LOG_FILE" ] && printf "$*$newline" | strip_esc
>> $LOG_FILE
237 [ "$LOG_FILE" ] && cat $1 | strip_esc
>> $LOG_FILE
239 prlog
"=== Ftrace unit tests ==="
242 # Testcase management
243 # Test result codes - Dejagnu extended code
244 PASS
=0 # The test succeeded.
245 FAIL
=1 # The test failed, but was expected to succeed.
246 UNRESOLVED
=2 # The test produced indeterminate results. (e.g. interrupted)
247 UNTESTED
=3 # The test was not run, currently just a placeholder.
248 UNSUPPORTED
=4 # The test failed because of lack of feature.
249 XFAIL
=5 # The test failed, and was expected to fail.
264 testcase
() { # testfile
266 desc
=`grep "^#[ \t]*description:" $1 | cut -f2- -d:`
267 prlog
-n "[$CASENO]$INSTANCE$desc"
270 checkreq
() { # testfile
271 requires
=`grep "^#[ \t]*requires:" $1 | cut -f2- -d:`
272 # Use eval to pass quoted-patterns correctly.
273 eval check_requires
"$requires"
276 test_on_instance
() { # testfile
277 grep -q "^#[ \t]*flags:.*instance" $1
280 eval_result
() { # sigval
283 prlog
" [${color_green}PASS${color_reset}]"
284 PASSED_CASES
="$PASSED_CASES $CASENO"
288 prlog
" [${color_red}FAIL${color_reset}]"
289 FAILED_CASES
="$FAILED_CASES $CASENO"
290 return 1 # this is a bug.
293 prlog
" [${color_blue}UNRESOLVED${color_reset}]"
294 UNRESOLVED_CASES
="$UNRESOLVED_CASES $CASENO"
295 return $UNRESOLVED_RESULT # depends on use case
298 prlog
" [${color_blue}UNTESTED${color_reset}]"
299 UNTESTED_CASES
="$UNTESTED_CASES $CASENO"
303 prlog
" [${color_blue}UNSUPPORTED${color_reset}]"
304 UNSUPPORTED_CASES
="$UNSUPPORTED_CASES $CASENO"
305 return $UNSUPPORTED_RESULT # depends on use case
308 prlog
" [${color_green}XFAIL${color_reset}]"
309 XFAILED_CASES
="$XFAILED_CASES $CASENO"
313 prlog
" [${color_blue}UNDEFINED${color_reset}]"
314 UNDEFINED_CASES
="$UNDEFINED_CASES $CASENO"
315 return 1 # this must be a test bug
320 # Signal handling for result codes
322 SIG_BASE
=36 # Use realtime signals
329 SIG_FAIL
=$
((SIG_BASE
+ FAIL
))
333 trap 'SIG_RESULT=$FAIL' $SIG_FAIL
335 SIG_UNRESOLVED
=$
((SIG_BASE
+ UNRESOLVED
))
337 kill -s $SIG_UNRESOLVED $SIG_PID
340 trap 'SIG_RESULT=$UNRESOLVED' $SIG_UNRESOLVED
342 SIG_UNTESTED
=$
((SIG_BASE
+ UNTESTED
))
344 kill -s $SIG_UNTESTED $SIG_PID
347 trap 'SIG_RESULT=$UNTESTED' $SIG_UNTESTED
349 SIG_UNSUPPORTED
=$
((SIG_BASE
+ UNSUPPORTED
))
350 exit_unsupported
() {
351 kill -s $SIG_UNSUPPORTED $SIG_PID
354 trap 'SIG_RESULT=$UNSUPPORTED' $SIG_UNSUPPORTED
356 SIG_XFAIL
=$
((SIG_BASE
+ XFAIL
))
358 kill -s $SIG_XFAIL $SIG_PID
361 trap 'SIG_RESULT=$XFAIL' $SIG_XFAIL
363 __run_test
() { # testfile
364 # setup PID and PPID, $$ is not updated.
365 (cd $TRACING_DIR; read PID _
< /proc
/self
/stat
; set -e; set -x;
366 checkreq
$1; initialize_ftrace
; .
$1)
367 [ $?
-ne 0 ] && kill -s $SIG_FAIL $SIG_PID
371 run_test
() { # testfile
372 local testname
=`basename $1`
374 if [ ! -z "$LOG_FILE" ] ; then
375 local testlog
=`mktemp $LOG_DIR/${CASENO}-${testname}-log.XXXXXX`
377 local testlog
=/proc
/self
/fd
/1
379 export TMPDIR
=`mktemp -d /tmp/ftracetest-dir.XXXXXX`
380 export FTRACETEST_ROOT
=$TOP_DIR
381 echo "execute$INSTANCE: "$1 > $testlog
383 if [ $VERBOSE -eq -1 ]; then
385 elif [ -z "$LOG_FILE" ]; then
387 elif [ $VERBOSE -ge 3 ]; then
388 __run_test
$1 |
tee -a $testlog 2>&1
389 elif [ $VERBOSE -eq 2 ]; then
390 __run_test
$1 2>> $testlog |
tee -a $testlog
392 __run_test
$1 >> $testlog 2>&1
394 eval_result
$SIG_RESULT
395 if [ $?
-eq 0 ]; then
396 # Remove test log if the test was done as it was expected.
397 [ $KEEP_LOG -eq 0 -a ! -z "$LOG_FILE" ] && rm $testlog
399 [ $VERBOSE -eq 1 -o $VERBOSE -eq 2 ] && catlog
$testlog
405 # load in the helper functions
406 .
$TEST_DIR/functions
409 for t
in $TEST_CASES; do
411 if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then
412 echo "A failure detected. Stop test."
417 # Test on instance loop
418 INSTANCE
=" (instance) "
419 for t
in $TEST_CASES; do
420 test_on_instance
$t ||
continue
421 SAVED_TRACING_DIR
=$TRACING_DIR
422 export TRACING_DIR
=`mktemp -d $TRACING_DIR/instances/ftracetest.XXXXXX`
425 TRACING_DIR
=$SAVED_TRACING_DIR
426 if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then
427 echo "A failure detected. Stop test."
431 (cd $TRACING_DIR; initialize_ftrace
) # for cleanup
434 prlog
"# of passed: " `echo $PASSED_CASES | wc -w`
435 prlog
"# of failed: " `echo $FAILED_CASES | wc -w`
436 prlog
"# of unresolved: " `echo $UNRESOLVED_CASES | wc -w`
437 prlog
"# of untested: " `echo $UNTESTED_CASES | wc -w`
438 prlog
"# of unsupported: " `echo $UNSUPPORTED_CASES | wc -w`
439 prlog
"# of xfailed: " `echo $XFAILED_CASES | wc -w`
440 prlog
"# of undefined(test bug): " `echo $UNDEFINED_CASES | wc -w`
444 # if no error, return 0