]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blob - tools/testing/selftests/ftrace/ftracetest
52e3c4df28d6ff243a70190592d404eba2be23b7
[mirror_ubuntu-bionic-kernel.git] / tools / testing / selftests / ftrace / ftracetest
1 #!/bin/sh
2
3 # ftracetest - Ftrace test shell scripts
4 #
5 # Copyright (C) Hitachi Ltd., 2014
6 # Written by Masami Hiramatsu <masami.hiramatsu.pt@hitachi.com>
7 #
8 # Released under the terms of the GPL v2.
9
10 usage() { # errno [message]
11 [ "$2" ] && echo $2
12 echo "Usage: ftracetest [options] [testcase(s)] [testcase-directory(s)]"
13 echo " Options:"
14 echo " -h|--help Show help message"
15 echo " -k|--keep Keep passed test logs"
16 echo " -v|--verbose Increase verbosity of test messages"
17 echo " -vv Alias of -v -v (Show all results in stdout)"
18 echo " -d|--debug Debug mode (trace all shell commands)"
19 exit $1
20 }
21
22 errexit() { # message
23 echo "Error: $1" 1>&2
24 exit 1
25 }
26
27 # Ensuring user privilege
28 if [ `id -u` -ne 0 ]; then
29 errexit "this must be run by root user"
30 fi
31
32 # Utilities
33 absdir() { # file_path
34 (cd `dirname $1`; pwd)
35 }
36
37 abspath() {
38 echo `absdir $1`/`basename $1`
39 }
40
41 find_testcases() { #directory
42 echo `find $1 -name \*.tc | sort`
43 }
44
45 parse_opts() { # opts
46 local OPT_TEST_CASES=
47 local OPT_TEST_DIR=
48
49 while [ "$1" ]; do
50 case "$1" in
51 --help|-h)
52 usage 0
53 ;;
54 --keep|-k)
55 KEEP_LOG=1
56 shift 1
57 ;;
58 --verbose|-v|-vv)
59 VERBOSE=$((VERBOSE + 1))
60 [ $1 == '-vv' ] && VERBOSE=$((VERBOSE + 1))
61 shift 1
62 ;;
63 --debug|-d)
64 DEBUG=1
65 shift 1
66 ;;
67 *.tc)
68 if [ -f "$1" ]; then
69 OPT_TEST_CASES="$OPT_TEST_CASES `abspath $1`"
70 shift 1
71 else
72 usage 1 "$1 is not a testcase"
73 fi
74 ;;
75 *)
76 if [ -d "$1" ]; then
77 OPT_TEST_DIR=`abspath $1`
78 OPT_TEST_CASES="$OPT_TEST_CASES `find_testcases $OPT_TEST_DIR`"
79 shift 1
80 else
81 usage 1 "Invalid option ($1)"
82 fi
83 ;;
84 esac
85 done
86 if [ "$OPT_TEST_CASES" ]; then
87 TEST_CASES=$OPT_TEST_CASES
88 fi
89 }
90
91 # Parameters
92 DEBUGFS_DIR=`grep debugfs /proc/mounts | cut -f2 -d' ' | head -1`
93 if [ -z "$DEBUGFS_DIR" ]; then
94 TRACING_DIR=`grep tracefs /proc/mounts | cut -f2 -d' ' | head -1`
95 else
96 TRACING_DIR=$DEBUGFS_DIR/tracing
97 fi
98
99 TOP_DIR=`absdir $0`
100 TEST_DIR=$TOP_DIR/test.d
101 TEST_CASES=`find_testcases $TEST_DIR`
102 LOG_DIR=$TOP_DIR/logs/`date +%Y%m%d-%H%M%S`/
103 KEEP_LOG=0
104 DEBUG=0
105 VERBOSE=0
106 # Parse command-line options
107 parse_opts $*
108
109 [ $DEBUG -ne 0 ] && set -x
110
111 # Verify parameters
112 if [ -z "$TRACING_DIR" -o ! -d "$TRACING_DIR" ]; then
113 errexit "No ftrace directory found"
114 fi
115
116 # Preparing logs
117 LOG_FILE=$LOG_DIR/ftracetest.log
118 mkdir -p $LOG_DIR || errexit "Failed to make a log directory: $LOG_DIR"
119 date > $LOG_FILE
120 prlog() { # messages
121 echo "$@" | tee -a $LOG_FILE
122 }
123 catlog() { #file
124 cat $1 | tee -a $LOG_FILE
125 }
126 prlog "=== Ftrace unit tests ==="
127
128
129 # Testcase management
130 # Test result codes - Dejagnu extended code
131 PASS=0 # The test succeeded.
132 FAIL=1 # The test failed, but was expected to succeed.
133 UNRESOLVED=2 # The test produced indeterminate results. (e.g. interrupted)
134 UNTESTED=3 # The test was not run, currently just a placeholder.
135 UNSUPPORTED=4 # The test failed because of lack of feature.
136 XFAIL=5 # The test failed, and was expected to fail.
137
138 # Accumulations
139 PASSED_CASES=
140 FAILED_CASES=
141 UNRESOLVED_CASES=
142 UNTESTED_CASES=
143 UNSUPPORTED_CASES=
144 XFAILED_CASES=
145 UNDEFINED_CASES=
146 TOTAL_RESULT=0
147
148 CASENO=0
149 testcase() { # testfile
150 CASENO=$((CASENO+1))
151 desc=`grep "^#[ \t]*description:" $1 | cut -f2 -d:`
152 prlog -n "[$CASENO]$desc"
153 }
154
155 eval_result() { # sigval
156 case $1 in
157 $PASS)
158 prlog " [PASS]"
159 PASSED_CASES="$PASSED_CASES $CASENO"
160 return 0
161 ;;
162 $FAIL)
163 prlog " [FAIL]"
164 FAILED_CASES="$FAILED_CASES $CASENO"
165 return 1 # this is a bug.
166 ;;
167 $UNRESOLVED)
168 prlog " [UNRESOLVED]"
169 UNRESOLVED_CASES="$UNRESOLVED_CASES $CASENO"
170 return 1 # this is a kind of bug.. something happened.
171 ;;
172 $UNTESTED)
173 prlog " [UNTESTED]"
174 UNTESTED_CASES="$UNTESTED_CASES $CASENO"
175 return 0
176 ;;
177 $UNSUPPORTED)
178 prlog " [UNSUPPORTED]"
179 UNSUPPORTED_CASES="$UNSUPPORTED_CASES $CASENO"
180 return 1 # this is not a bug, but the result should be reported.
181 ;;
182 $XFAIL)
183 prlog " [XFAIL]"
184 XFAILED_CASES="$XFAILED_CASES $CASENO"
185 return 0
186 ;;
187 *)
188 prlog " [UNDEFINED]"
189 UNDEFINED_CASES="$UNDEFINED_CASES $CASENO"
190 return 1 # this must be a test bug
191 ;;
192 esac
193 }
194
195 # Signal handling for result codes
196 SIG_RESULT=
197 SIG_BASE=36 # Use realtime signals
198 SIG_PID=$$
199
200 SIG_FAIL=$((SIG_BASE + FAIL))
201 trap 'SIG_RESULT=$FAIL' $SIG_FAIL
202
203 SIG_UNRESOLVED=$((SIG_BASE + UNRESOLVED))
204 exit_unresolved () {
205 kill -s $SIG_UNRESOLVED $SIG_PID
206 exit 0
207 }
208 trap 'SIG_RESULT=$UNRESOLVED' $SIG_UNRESOLVED
209
210 SIG_UNTESTED=$((SIG_BASE + UNTESTED))
211 exit_untested () {
212 kill -s $SIG_UNTESTED $SIG_PID
213 exit 0
214 }
215 trap 'SIG_RESULT=$UNTESTED' $SIG_UNTESTED
216
217 SIG_UNSUPPORTED=$((SIG_BASE + UNSUPPORTED))
218 exit_unsupported () {
219 kill -s $SIG_UNSUPPORTED $SIG_PID
220 exit 0
221 }
222 trap 'SIG_RESULT=$UNSUPPORTED' $SIG_UNSUPPORTED
223
224 SIG_XFAIL=$((SIG_BASE + XFAIL))
225 exit_xfail () {
226 kill -s $SIG_XFAIL $SIG_PID
227 exit 0
228 }
229 trap 'SIG_RESULT=$XFAIL' $SIG_XFAIL
230
231 __run_test() { # testfile
232 # setup PID and PPID, $$ is not updated.
233 (cd $TRACING_DIR; read PID _ < /proc/self/stat; set -e; set -x; initialize_ftrace; . $1)
234 [ $? -ne 0 ] && kill -s $SIG_FAIL $SIG_PID
235 }
236
237 # Run one test case
238 run_test() { # testfile
239 local testname=`basename $1`
240 local testlog=`mktemp $LOG_DIR/${testname}-log.XXXXXX`
241 export TMPDIR=`mktemp -d /tmp/ftracetest-dir.XXXXXX`
242 testcase $1
243 echo "execute: "$1 > $testlog
244 SIG_RESULT=0
245 if [ $VERBOSE -ge 2 ]; then
246 __run_test $1 2>> $testlog | tee -a $testlog
247 else
248 __run_test $1 >> $testlog 2>&1
249 fi
250 eval_result $SIG_RESULT
251 if [ $? -eq 0 ]; then
252 # Remove test log if the test was done as it was expected.
253 [ $KEEP_LOG -eq 0 ] && rm $testlog
254 else
255 [ $VERBOSE -ge 1 ] && catlog $testlog
256 TOTAL_RESULT=1
257 fi
258 rm -rf $TMPDIR
259 }
260
261 # load in the helper functions
262 . $TEST_DIR/functions
263
264 # Main loop
265 for t in $TEST_CASES; do
266 run_test $t
267 done
268
269 prlog ""
270 prlog "# of passed: " `echo $PASSED_CASES | wc -w`
271 prlog "# of failed: " `echo $FAILED_CASES | wc -w`
272 prlog "# of unresolved: " `echo $UNRESOLVED_CASES | wc -w`
273 prlog "# of untested: " `echo $UNTESTED_CASES | wc -w`
274 prlog "# of unsupported: " `echo $UNSUPPORTED_CASES | wc -w`
275 prlog "# of xfailed: " `echo $XFAILED_CASES | wc -w`
276 prlog "# of undefined(test bug): " `echo $UNDEFINED_CASES | wc -w`
277
278 # if no error, return 0
279 exit $TOTAL_RESULT