OILS / test / spec-runner.sh View on Github | oils.pub

454 lines, 170 significant
1#!/usr/bin/env bash
2#
3# Run tests against multiple shells with the sh_spec framework.
4#
5# Usage:
6# test/spec-runner.sh <function name>
7
8set -o nounset
9set -o pipefail
10set -o errexit
11shopt -s strict:all 2>/dev/null || true # dogfood for OSH
12
13REPO_ROOT=$(cd "$(dirname $0)/.."; pwd)
14
15source build/dev-shell.sh
16source test/common.sh
17source test/spec-common.sh
18source test/tsv-lib.sh # $TAB
19
20#
21# Test Runner
22#
23
24write-suite-manifests() {
25 # This takes ~160 ms, it would be nice not to do it 3 times!
26 # I guess we can print (suite, name, tag) with duplicates, and then use 'uniq'
27 #
28 #test/sh_spec.py --print-table spec/*.test.sh
29
30 local dir=_tmp/spec
31
32 { test/sh_spec.py --print-table spec/*.test.sh | while read suite name; do
33 case $suite in
34 osh) echo $name >& $osh ;;
35 ysh) echo $name >& $ysh ;;
36 disabled) ;; # ignore
37 *) die "Invalid suite $suite" ;;
38 esac
39 done
40 } {osh}>$dir/SUITE-osh.txt \
41 {ysh}>$dir/SUITE-ysh.txt \
42 {needs_terminal}>$dir/SUITE-needs-terminal.txt
43
44 # These are kind of pseudo-suites, not the main 3
45 test/sh_spec.py --print-tagged interactive \
46 spec/*.test.sh > $dir/SUITE-interactive.txt
47
48 test/sh_spec.py --print-tagged dev-minimal \
49 spec/*.test.sh > $dir/SUITE-osh-minimal.txt
50
51 # For spec-compat, remove files that other shells aren't expected to run.
52 # Keep SUITE-osh the same for historical comparison.
53
54 # I want errexit-osh to be adopted by other shells, so I'm keeping it
55 local remove='strict-options'
56 #local remove='errexit-osh|strict-options'
57
58 egrep -v "$remove" $dir/SUITE-osh.txt > $dir/SUITE-compat.txt
59}
60
61print-manifest() {
62 local manifest=$1
63 if test -n "${SPEC_EGREP:-}"; then
64 egrep "$SPEC_EGREP" $manifest
65 else
66 head -n $NUM_SPEC_TASKS $manifest
67 fi
68}
69
70_print-task-file() {
71 cat <<'EOF'
72#!/usr/bin/env bash
73#
74# This file is GENERATED -- DO NOT EDIT.
75#
76# Update it with:
77# test/spec-runner.sh gen-task-file
78#
79# Usage:
80# test/spec.sh <function name>
81
82: ${LIB_OSH=stdlib/osh}
83source $LIB_OSH/bash-strict.sh
84source $LIB_OSH/task-five.sh
85
86source build/dev-shell.sh
87EOF
88
89 while read spec_name; do
90 echo "
91$spec_name() {
92 test/spec-py.sh run-file $spec_name \"\$@\"
93}"
94 done
95
96 echo
97 echo 'task-five "$@"'
98}
99
100gen-task-file() {
101 # make sorting stable across machines
102 LANG=C
103 test/sh_spec.py --print-table spec/*.test.sh | while read suite name; do
104 echo $name
105 done | _print-task-file > test/spec.sh
106}
107
108diff-manifest() {
109 ### temporary test
110
111 write-suite-manifests
112 #return
113
114 # crazy sorting, affects glob
115 # doesn't work
116 #LANG=C
117 #LC_COLLATE=C
118 #LC_ALL=C
119 #export LANG LC_COLLATE LC_ALL
120
121 for suite in osh ysh interactive osh-minimal; do
122 echo
123 echo [$suite]
124 echo
125
126 diff -u -r <(sort spec2/SUITE-$suite.txt) <(sort _tmp/spec/SUITE-$suite.txt) #|| true
127 done
128}
129
130dispatch-one() {
131 # Determines what binaries to compare against: compare-py | compare-cpp | release-alpine
132 local compare_mode=${1:-compare-py}
133 # Which subdir of _tmp/spec: osh-py ysh-py osh-cpp ysh-cpp smoosh
134 local spec_subdir=${2:-osh-py}
135 local spec_name=$3
136 shift 3 # rest are more flags
137
138 log "__ $spec_name"
139
140 local -a prefix
141 case $compare_mode in
142
143 #compare-py) prefix=(test/spec.sh) ;;
144 compare-py) prefix=(test/spec-py.sh run-file) ;;
145
146 compare-cpp) prefix=(test/spec-cpp.sh run-file) ;;
147 spec-compat) prefix=(test/spec-compat.sh run-file) ;;
148
149 # For interactive comparison
150 osh-only) prefix=(test/spec-util.sh run-file-with-osh) ;;
151 bash-only) prefix=(test/spec-util.sh run-file-with-bash) ;;
152
153 release-alpine) prefix=(test/spec-alpine.sh run-file) ;;
154
155 *) die "Invalid compare mode $compare_mode" ;;
156 esac
157
158 local base_dir=_tmp/spec/$spec_subdir
159
160 # TODO: Could --stats-{file,template} be a separate awk step on .tsv files?
161 run-task-with-status \
162 $base_dir/${spec_name}.task.txt \
163 "${prefix[@]}" $spec_name \
164 --format html \
165 --stats-file $base_dir/${spec_name}.stats.txt \
166 --stats-template \
167 '%(num_cases)d %(oils_num_passed)d %(oils_num_failed)d %(oils_failures_allowed)d %(oils_ALT_delta)d' \
168 "$@" \
169 > $base_dir/${spec_name}.html
170}
171
172
173_html-summary() {
174 ### Print an HTML summary to stdout and return whether all tests succeeded
175
176 local sh_label=$1 # osh or ysh
177 local base_dir=$2 # e.g. _tmp/spec/ysh-cpp
178 local totals=$3 # path to print HTML to
179 local manifest=$4
180
181 html-head --title "Spec Test Summary" \
182 ../../../web/base.css ../../../web/spec-tests.css
183
184 cat <<EOF
185 <body class="width50">
186
187<p id="home-link">
188 <!-- The release index is two dirs up -->
189 <a href="../..">Up</a> |
190 <a href="/">oils.pub</a>
191</p>
192
193<h1>Spec Test Results Summary</h1>
194
195<table>
196 <thead>
197 <tr>
198 <td>name</td>
199 <td># cases</td> <td>$sh_label # passed</td> <td>$sh_label # failed</td>
200 <td>$sh_label failures allowed</td>
201 <td>$sh_label ALT delta</td>
202 <td>Elapsed Seconds</td>
203 </tr>
204 </thead>
205 <!-- TOTALS -->
206EOF
207
208 # Awk notes:
209 # - "getline" is kind of like bash "read", but it doesn't allow you do
210 # specify variable names. You have to destructure it yourself.
211 # - Lack of string interpolation is very annoying
212
213 print-manifest $manifest | sort | awk -v totals=$totals -v base_dir=$base_dir '
214 # Awk problem: getline errors are ignored by default!
215 function error(path) {
216 print "Error reading line from file: " path > "/dev/stderr"
217 exit(1)
218 }
219
220 {
221 spec_name = $0
222
223 # Read from the task files
224 path = ( base_dir "/" spec_name ".task.txt" )
225 n = getline < path
226 if (n != 1) {
227 error(path)
228 }
229 status = $1
230 wall_secs = $2
231
232 path = ( base_dir "/" spec_name ".stats.txt" )
233 n = getline < path
234 if (n != 1) {
235 error(path)
236 }
237 num_cases = $1
238 oils_num_passed = $2
239 oils_num_failed = $3
240 oils_failures_allowed = $4
241 oils_ALT_delta = $5
242
243 sum_status += status
244 sum_wall_secs += wall_secs
245 sum_num_cases += num_cases
246 sum_oils_num_passed += oils_num_passed
247 sum_oils_num_failed += oils_num_failed
248 sum_oils_failures_allowed += oils_failures_allowed
249 sum_oils_ALT_delta += oils_ALT_delta
250 num_rows += 1
251
252 # For the console
253 if (status == 0) {
254 num_passed += 1
255 } else {
256 num_failed += 1
257 print spec_name " failed with status " status > "/dev/stderr"
258 }
259
260 if (status != 0) {
261 css_class = "failed"
262 } else if (oils_num_failed != 0) {
263 css_class = "osh-allow-fail"
264 } else if (oils_num_passed != 0) {
265 css_class = "osh-pass"
266 } else {
267 css_class = ""
268 }
269 print "<tr class=" css_class ">"
270 print "<td><a href=" spec_name ".html>" spec_name "</a></td>"
271 print "<td>" num_cases "</td>"
272 print "<td>" oils_num_passed "</td>"
273 print "<td>" oils_num_failed "</td>"
274 print "<td>" oils_failures_allowed "</td>"
275 print "<td>" oils_ALT_delta "</td>"
276 printf("<td>%.2f</td>\n", wall_secs);
277 print "</tr>"
278 }
279
280 END {
281 print "<tr class=totals>" >totals
282 print "<td>TOTAL (" num_rows " rows) </td>" >totals
283 print "<td>" sum_num_cases "</td>" >totals
284 print "<td>" sum_oils_num_passed "</td>" >totals
285 print "<td>" sum_oils_num_failed "</td>" >totals
286 print "<td>" sum_oils_failures_allowed "</td>" >totals
287 print "<td>" sum_oils_ALT_delta "</td>" >totals
288 printf("<td>%.2f</td>\n", sum_wall_secs) > totals
289 print "</tr>" >totals
290
291 print "<tfoot>"
292 print "<!-- TOTALS -->"
293 print "</tfoot>"
294
295 # For the console
296 print "" > "/dev/stderr"
297 if (num_failed == 0) {
298 print "*** All " num_passed " tests PASSED" > "/dev/stderr"
299 } else {
300 print "*** " num_failed " tests FAILED" > "/dev/stderr"
301 exit(1) # failure
302 }
303 }
304 '
305 all_passed=$?
306
307 cat <<EOF
308 </table>
309
310 <h3>Version Information</h3>
311 <pre>
312EOF
313
314 # TODO: can pass shells here, e.g. for test/spec-cpp.sh
315 test/spec-version.sh ${suite}-version-text
316
317 cat <<EOF
318 </pre>
319 </body>
320</html>
321EOF
322
323 return $all_passed
324}
325
326html-summary() {
327 local suite=$1
328 local base_dir=$2
329
330 local manifest="_tmp/spec/SUITE-$suite.txt"
331
332 local totals=$base_dir/totals-$suite.html
333 local tmp=$base_dir/tmp-$suite.html
334
335 local out=$base_dir/index.html
336
337 # TODO: Do we also need $base_dir/{osh,oil}-details-for-toil.json
338 # osh failures, and all failures
339 # When deploying, if they exist, them copy them outside?
340 # I guess toil_web.py can use the zipfile module?
341 # To get _tmp/spec/...
342 # it can read JSON like:
343 # { "task_tsv": "_tmp/toil/INDEX.tsv",
344 # "details_json": [ ... ],
345 # }
346
347 set +o errexit
348 _html-summary $suite $base_dir $totals $manifest > $tmp
349 all_passed=$?
350 set -o errexit
351
352 # Total rows are displayed at both the top and bottom.
353 awk -v totals="$(cat $totals)" '
354 /<!-- TOTALS -->/ {
355 print totals
356 next
357 }
358 { print }
359 ' < $tmp > $out
360
361 echo
362 echo "Results: file://$PWD/$out"
363
364 return $all_passed
365}
366
367assert-FOO() {
368 # there's a stray 'foo' at the end
369 #
370 # I bet this is file descriptor leak from a redirect!
371 # Maybe a shell is doing something in correct?
372 # But the manifest shouldn't be open for write? I guess there could be some
373 # swapping
374 #
375 # Happens with NUM_SPEC_TASKS=100, but not NUM_SPEC_TASKS=50
376 # Gah
377
378 if grep foo _tmp/spec/SUITE-osh.txt; then
379 echo "BAD FOO"
380 exit
381 fi
382}
383
384_all-parallel() {
385 local suite=${1:-osh}
386 local compare_mode=${2:-compare-py}
387 local spec_subdir=${3:-survey}
388
389 # The rest are more flags
390 shift 3
391
392 local manifest="_tmp/spec/SUITE-$suite.txt"
393 local output_base_dir="_tmp/spec/$spec_subdir"
394 mkdir -p $output_base_dir
395
396 write-suite-manifests
397
398 assert-FOO
399
400 # The exit codes are recorded in files for html-summary to aggregate.
401 set +o errexit
402 print-manifest $manifest \
403 | xargs -I {} -P $MAX_PROCS -- \
404 $0 dispatch-one $compare_mode $spec_subdir {} "$@"
405 set -o errexit
406
407 assert-FOO
408
409 all-tests-to-html $manifest $output_base_dir
410
411 # note: the HTML links to ../../web/, which is in the repo.
412 html-summary $suite $output_base_dir # returns whether all passed
413}
414
415all-parallel() {
416 ### Run spec tests in parallel.
417
418 # Note: this function doesn't fail because 'run-file' saves the status to a
419 # file.
420 time _all-parallel "$@"
421}
422
423src-tree-py() {
424 PYTHONPATH='.:vendor/' doctools/src_tree.py "$@"
425}
426
427all-tests-to-html() {
428 local manifest=$1
429 local output_base_dir=$2
430 # ignore attrs output
431 print-manifest $manifest \
432 | xargs --verbose -- $0 src-tree-py spec-files $output_base_dir >/dev/null
433
434 #| xargs -n 1 -P $MAX_PROCS -- $0 test-to-html $output_base_dir
435 log "done: all-tests-to-html"
436}
437
438shell-sanity-check() {
439 echo "PWD = $PWD"
440 echo "PATH = $PATH"
441
442 for sh in "$@"; do
443 # note: shells are in $PATH, but not $OSH_LIST
444 if ! $sh -c 'echo -n "hello from $0: "; command -v $0 || true'; then
445 echo "ERROR: $sh failed sanity check"
446 return 1
447 fi
448 done
449}
450
451filename=$(basename $0)
452if test "$filename" = 'spec-runner.sh'; then
453 "$@"
454fi