Linux 4.19.133
[linux/fpc-iii.git] / tools / testing / selftests / ftrace / ftracetest
blobf9a9d424c980cc932f527904b5425786cde567dc
1 #!/bin/sh
3 # ftracetest - Ftrace test shell scripts
5 # Copyright (C) Hitachi Ltd., 2014
6 # Written by Masami Hiramatsu <masami.hiramatsu.pt@hitachi.com>
8 # Released under the terms of the GPL v2.
10 usage() { # errno [message]
11 [ ! -z "$2" ] && echo $2
12 echo "Usage: ftracetest [options] [testcase(s)] [testcase-directory(s)]"
13 echo " Options:"
14 echo " -h|--help Show help message"
15 echo " -k|--keep Keep passed test logs"
16 echo " -v|--verbose Increase verbosity of test messages"
17 echo " -vv Alias of -v -v (Show all results in stdout)"
18 echo " -vvv Alias of -v -v -v (Show all commands immediately)"
19 echo " --fail-unsupported Treat UNSUPPORTED as a failure"
20 echo " -d|--debug Debug mode (trace all shell commands)"
21 echo " -l|--logdir <dir> Save logs on the <dir>"
22 echo " If <dir> is -, all logs output in console only"
23 exit $1
26 errexit() { # message
27 echo "Error: $1" 1>&2
28 exit 1
31 # Ensuring user privilege
32 if [ `id -u` -ne 0 ]; then
33 errexit "this must be run by root user"
36 # Utilities
37 absdir() { # file_path
38 (cd `dirname $1`; pwd)
41 abspath() {
42 echo `absdir $1`/`basename $1`
45 find_testcases() { #directory
46 echo `find $1 -name \*.tc | sort`
49 parse_opts() { # opts
50 local OPT_TEST_CASES=
51 local OPT_TEST_DIR=
53 while [ ! -z "$1" ]; do
54 case "$1" in
55 --help|-h)
56 usage 0
58 --keep|-k)
59 KEEP_LOG=1
60 shift 1
62 --verbose|-v|-vv|-vvv)
63 VERBOSE=$((VERBOSE + 1))
64 [ $1 = '-vv' ] && VERBOSE=$((VERBOSE + 1))
65 [ $1 = '-vvv' ] && VERBOSE=$((VERBOSE + 2))
66 shift 1
68 --debug|-d)
69 DEBUG=1
70 shift 1
72 --fail-unsupported)
73 UNSUPPORTED_RESULT=1
74 shift 1
76 --logdir|-l)
77 LOG_DIR=$2
78 shift 2
80 *.tc)
81 if [ -f "$1" ]; then
82 OPT_TEST_CASES="$OPT_TEST_CASES `abspath $1`"
83 shift 1
84 else
85 usage 1 "$1 is not a testcase"
89 if [ -d "$1" ]; then
90 OPT_TEST_DIR=`abspath $1`
91 OPT_TEST_CASES="$OPT_TEST_CASES `find_testcases $OPT_TEST_DIR`"
92 shift 1
93 else
94 usage 1 "Invalid option ($1)"
97 esac
98 done
99 if [ ! -z "$OPT_TEST_CASES" ]; then
100 TEST_CASES=$OPT_TEST_CASES
104 # Parameters
105 DEBUGFS_DIR=`grep debugfs /proc/mounts | cut -f2 -d' ' | head -1`
106 if [ -z "$DEBUGFS_DIR" ]; then
107 TRACING_DIR=`grep tracefs /proc/mounts | cut -f2 -d' ' | head -1`
108 else
109 TRACING_DIR=$DEBUGFS_DIR/tracing
112 TOP_DIR=`absdir $0`
113 TEST_DIR=$TOP_DIR/test.d
114 TEST_CASES=`find_testcases $TEST_DIR`
115 LOG_DIR=$TOP_DIR/logs/`date +%Y%m%d-%H%M%S`/
116 KEEP_LOG=0
117 DEBUG=0
118 VERBOSE=0
119 UNSUPPORTED_RESULT=0
120 # Parse command-line options
121 parse_opts $*
123 [ $DEBUG -ne 0 ] && set -x
125 # Verify parameters
126 if [ -z "$TRACING_DIR" -o ! -d "$TRACING_DIR" ]; then
127 errexit "No ftrace directory found"
130 # Preparing logs
131 if [ "x$LOG_DIR" = "x-" ]; then
132 LOG_FILE=
133 date
134 else
135 LOG_FILE=$LOG_DIR/ftracetest.log
136 mkdir -p $LOG_DIR || errexit "Failed to make a log directory: $LOG_DIR"
137 date > $LOG_FILE
140 prlog() { # messages
141 [ -z "$LOG_FILE" ] && echo "$@" || echo "$@" | tee -a $LOG_FILE
143 catlog() { #file
144 [ -z "$LOG_FILE" ] && cat $1 || cat $1 | tee -a $LOG_FILE
146 prlog "=== Ftrace unit tests ==="
149 # Testcase management
150 # Test result codes - Dejagnu extended code
151 PASS=0 # The test succeeded.
152 FAIL=1 # The test failed, but was expected to succeed.
153 UNRESOLVED=2 # The test produced indeterminate results. (e.g. interrupted)
154 UNTESTED=3 # The test was not run, currently just a placeholder.
155 UNSUPPORTED=4 # The test failed because of lack of feature.
156 XFAIL=5 # The test failed, and was expected to fail.
158 # Accumulations
159 PASSED_CASES=
160 FAILED_CASES=
161 UNRESOLVED_CASES=
162 UNTESTED_CASES=
163 UNSUPPORTED_CASES=
164 XFAILED_CASES=
165 UNDEFINED_CASES=
166 TOTAL_RESULT=0
168 INSTANCE=
169 CASENO=0
170 testcase() { # testfile
171 CASENO=$((CASENO+1))
172 desc=`grep "^#[ \t]*description:" $1 | cut -f2 -d:`
173 prlog -n "[$CASENO]$INSTANCE$desc"
176 test_on_instance() { # testfile
177 grep -q "^#[ \t]*flags:.*instance" $1
180 eval_result() { # sigval
181 case $1 in
182 $PASS)
183 prlog " [PASS]"
184 PASSED_CASES="$PASSED_CASES $CASENO"
185 return 0
187 $FAIL)
188 prlog " [FAIL]"
189 FAILED_CASES="$FAILED_CASES $CASENO"
190 return 1 # this is a bug.
192 $UNRESOLVED)
193 prlog " [UNRESOLVED]"
194 UNRESOLVED_CASES="$UNRESOLVED_CASES $CASENO"
195 return 1 # this is a kind of bug.. something happened.
197 $UNTESTED)
198 prlog " [UNTESTED]"
199 UNTESTED_CASES="$UNTESTED_CASES $CASENO"
200 return 0
202 $UNSUPPORTED)
203 prlog " [UNSUPPORTED]"
204 UNSUPPORTED_CASES="$UNSUPPORTED_CASES $CASENO"
205 return $UNSUPPORTED_RESULT # depends on use case
207 $XFAIL)
208 prlog " [XFAIL]"
209 XFAILED_CASES="$XFAILED_CASES $CASENO"
210 return 0
213 prlog " [UNDEFINED]"
214 UNDEFINED_CASES="$UNDEFINED_CASES $CASENO"
215 return 1 # this must be a test bug
217 esac
220 # Signal handling for result codes
221 SIG_RESULT=
222 SIG_BASE=36 # Use realtime signals
223 SIG_PID=$$
225 exit_pass () {
226 exit 0
229 SIG_FAIL=$((SIG_BASE + FAIL))
230 exit_fail () {
231 exit 1
233 trap 'SIG_RESULT=$FAIL' $SIG_FAIL
235 SIG_UNRESOLVED=$((SIG_BASE + UNRESOLVED))
236 exit_unresolved () {
237 kill -s $SIG_UNRESOLVED $SIG_PID
238 exit 0
240 trap 'SIG_RESULT=$UNRESOLVED' $SIG_UNRESOLVED
242 SIG_UNTESTED=$((SIG_BASE + UNTESTED))
243 exit_untested () {
244 kill -s $SIG_UNTESTED $SIG_PID
245 exit 0
247 trap 'SIG_RESULT=$UNTESTED' $SIG_UNTESTED
249 SIG_UNSUPPORTED=$((SIG_BASE + UNSUPPORTED))
250 exit_unsupported () {
251 kill -s $SIG_UNSUPPORTED $SIG_PID
252 exit 0
254 trap 'SIG_RESULT=$UNSUPPORTED' $SIG_UNSUPPORTED
256 SIG_XFAIL=$((SIG_BASE + XFAIL))
257 exit_xfail () {
258 kill -s $SIG_XFAIL $SIG_PID
259 exit 0
261 trap 'SIG_RESULT=$XFAIL' $SIG_XFAIL
263 __run_test() { # testfile
264 # setup PID and PPID, $$ is not updated.
265 (cd $TRACING_DIR; read PID _ < /proc/self/stat; set -e; set -x; initialize_ftrace; . $1)
266 [ $? -ne 0 ] && kill -s $SIG_FAIL $SIG_PID
269 # Run one test case
270 run_test() { # testfile
271 local testname=`basename $1`
272 if [ ! -z "$LOG_FILE" ] ; then
273 local testlog=`mktemp $LOG_DIR/${testname}-log.XXXXXX`
274 else
275 local testlog=/proc/self/fd/1
277 export TMPDIR=`mktemp -d /tmp/ftracetest-dir.XXXXXX`
278 testcase $1
279 echo "execute$INSTANCE: "$1 > $testlog
280 SIG_RESULT=0
281 if [ -z "$LOG_FILE" ]; then
282 __run_test $1 2>&1
283 elif [ $VERBOSE -ge 3 ]; then
284 __run_test $1 | tee -a $testlog 2>&1
285 elif [ $VERBOSE -eq 2 ]; then
286 __run_test $1 2>> $testlog | tee -a $testlog
287 else
288 __run_test $1 >> $testlog 2>&1
290 eval_result $SIG_RESULT
291 if [ $? -eq 0 ]; then
292 # Remove test log if the test was done as it was expected.
293 [ $KEEP_LOG -eq 0 -a ! -z "$LOG_FILE" ] && rm $testlog
294 else
295 [ $VERBOSE -eq 1 -o $VERBOSE -eq 2 ] && catlog $testlog
296 TOTAL_RESULT=1
298 rm -rf $TMPDIR
301 # load in the helper functions
302 . $TEST_DIR/functions
304 # Main loop
305 for t in $TEST_CASES; do
306 run_test $t
307 done
309 # Test on instance loop
310 INSTANCE=" (instance) "
311 for t in $TEST_CASES; do
312 test_on_instance $t || continue
313 SAVED_TRACING_DIR=$TRACING_DIR
314 export TRACING_DIR=`mktemp -d $TRACING_DIR/instances/ftracetest.XXXXXX`
315 run_test $t
316 rmdir $TRACING_DIR
317 TRACING_DIR=$SAVED_TRACING_DIR
318 done
320 prlog ""
321 prlog "# of passed: " `echo $PASSED_CASES | wc -w`
322 prlog "# of failed: " `echo $FAILED_CASES | wc -w`
323 prlog "# of unresolved: " `echo $UNRESOLVED_CASES | wc -w`
324 prlog "# of untested: " `echo $UNTESTED_CASES | wc -w`
325 prlog "# of unsupported: " `echo $UNSUPPORTED_CASES | wc -w`
326 prlog "# of xfailed: " `echo $XFAILED_CASES | wc -w`
327 prlog "# of undefined(test bug): " `echo $UNDEFINED_CASES | wc -w`
329 # if no error, return 0
330 exit $TOTAL_RESULT