OILS / soil / web.sh View on Github | oilshell.org

236 lines, 117 significant
1#!/usr/bin/env bash
2#
3# Wrapper for soil/web.py.
4#
5# Usage:
6# soil/web.sh <function name>
7
8set -o nounset
9set -o pipefail
10set -o errexit
11
12shopt -s nullglob # for list-json
13
14REPO_ROOT=$(cd $(dirname $0)/.. && pwd)
15readonly REPO_ROOT
16
17source $REPO_ROOT/soil/common.sh
18
19# Jobs to show and keep. This corresponds to say soil/worker.sh JOB-dummy,
20# which means each git COMMIT is more than 15 jobs.
21readonly NUM_JOBS=4000
22
23soil-web() {
24 # We may be executed by a wwup.cgi on the server, which doesn't have
25 # PATH=~/bin, and the shebang is /usr/bin/env python2
26
27 # OpalStack doesn't need this
28 # Also it still uses bash 4.2 with the empty array bug!
29
30 local py2=~/bin/python2
31 local prefix=''
32 if test -f $py2; then
33 prefix=$py2
34 fi
35
36 # Relies on empty elision of $prefix
37 PYTHONPATH=$REPO_ROOT $prefix $REPO_ROOT/soil/web.py "$@"
38}
39
40# Bug fix for another race:
41# ls *.json has a race: the shell expands files that may no longer exist, and
42# then 'ls' fails!
43list-json() {
44 local dir=$1 # e.g. travis-ci.oilshell.org/github-jobs
45
46 for name in $dir/*/*.json; do
47 echo $name
48 done
49}
50
51rewrite-jobs-index() {
52 ### Atomic update of travis-ci.oilshell.org/jobs/
53 local prefix=$1
54 local run_id=$2 # pass GITHUB_RUN_NUMBER or git-$hash
55
56 local dir=$SOIL_HOST_DIR/uuu/${prefix}jobs
57
58 log "soil-web: Rewriting uuu/${prefix}jobs/index.html"
59
60 # Fix for bug #1169: don't create the temp file on a different file system,
61 # which /tmp may be.
62 #
63 # When the source and target are on different systems, I believe 'mv' falls
64 # back to 'cp', which has this race condition:
65 #
66 # https://unix.stackexchange.com/questions/116280/cannot-create-regular-file-filename-file-exists
67
68 # Limit to last 100 jobs. Glob is in alphabetical order and jobs look like
69 # 2020-03-20__...
70
71 local index_tmp=$dir/$$.index.html # index of every job in every run
72 local run_index_tmp=$dir/$$.runs.html # only the jobs in this run/commit
73
74 list-json $dir \
75 | tail -n -$NUM_JOBS \
76 | soil-web ${prefix}index $index_tmp $run_index_tmp $run_id
77
78 echo "rewrite index status = ${PIPESTATUS[@]}"
79
80 mv -v $index_tmp $dir/index.html
81
82 mkdir -v -p $dir/$run_id # this could be a new commit hash, etc.
83 mv -v $run_index_tmp $dir/$run_id/index.html
84}
85
86cleanup-jobs-index() {
87 local prefix=$1
88 local dry_run=${2:-true}
89
90 local dir=$SOIL_HOST_DIR/uuu/${prefix}jobs
91
92 # Pass it all JSON, and then it figures out what files to delete (TSV, etc.)
93 case $dry_run in
94 false)
95 # Bug fix: There's a race here when 2 jobs complete at the same time.
96 # Use rm -f to ignore failure if the file was already deleted.
97
98 list-json $dir | soil-web cleanup $NUM_JOBS | xargs --no-run-if-empty -- rm -f -v
99 ;;
100 true)
101 list-json $dir | soil-web cleanup $NUM_JOBS
102 ;;
103 *)
104 log 'Expected true or false for dry_run'
105 esac
106}
107
108test-cleanup() {
109 # the 999 jobs are the oldest
110
111 soil-web cleanup 2 <<EOF
112travis-ci.oilshell.org/github-jobs/999/one.json
113travis-ci.oilshell.org/github-jobs/999/two.json
114travis-ci.oilshell.org/github-jobs/999/three.json
115travis-ci.oilshell.org/github-jobs/1000/one.json
116travis-ci.oilshell.org/github-jobs/1000/two.json
117travis-ci.oilshell.org/github-jobs/1001/one.json
118travis-ci.oilshell.org/github-jobs/1001/two.json
119travis-ci.oilshell.org/github-jobs/1001/three.json
120EOF
121}
122
123cleanup-status-api() {
124 ### cleanup the files used for maybe-merge
125
126 local dry_run=${1:-true}
127
128 local dir=$SOIL_HOST_DIR/uuu/status-api/github
129
130 pushd $dir
131 case $dry_run in
132 false)
133 # delete all but the last 30
134 ls | head -n -30 | xargs --no-run-if-empty -- rm -r -f -v
135 ;;
136 true)
137 ls | head -n -30
138 ;;
139 *)
140 log 'Expected true or false for dry_run'
141 esac
142 popd
143}
144
145event-job-done() {
146 ### "Server side" handler
147
148 local prefix=$1 # 'github-' or 'sourcehut-'
149 local run_id=$2 # $GITHUB_RUN_NUMBER or git-$hash
150
151 rewrite-jobs-index $prefix $run_id
152
153 # note: we could speed jobs up by doing this separately?
154 cleanup-jobs-index $prefix false
155}
156
157DISABLED-event-job-done() {
158 ### Hook for wwup.cgi to execute
159
160 # As long as the CGI script shows output, I don't think we need any wrappers
161 # The scripts are written so we don't need to 'cd'
162 _event-job-done "$@"
163 return
164
165 # This is the directory that soil/web-init.sh deploys to, and it's shaped
166 # like the Oils repo
167 cd ~/soil-web
168
169 # Figure out why exit code is 127
170 # Oh probably because it's not started in the home dir?
171
172 # TODO: I guess wwup.cgi can buffer this entire response or something?
173 # You POST and you get of status, stdout, stderr back?
174 _event-job-done "$@" > ~/event-job-done.$$.log 2>&1
175}
176
177#
178# Dev Tools
179#
180
181sync-testdata() {
182
183 local dest=_tmp/github-jobs/
184
185 rsync --archive --verbose \
186 $SOIL_USER@$SOIL_HOST:$SOIL_HOST/github-jobs/ $dest
187
188 # 2023-04: 3.2 GB of files! Probably can reduce this
189
190 du --si -s $dest
191}
192
193copy-web() {
194 ### for relative URLs to work
195
196 cp -r -v web/ _tmp/
197}
198
199local-test() {
200 ### Used the sync'd testdata
201 local dir=${1:-_tmp/github-jobs}
202
203 local index=$dir/index.html
204
205 local run_id=3722
206 local run_index=$dir/$run_id/index.html
207
208 list-json $dir | soil-web github-index $index $run_index $run_id
209
210 echo "Wrote $index and $run_index"
211}
212
213hello() {
214 echo "hi from $0"
215 echo
216
217 echo ARGS
218 local i=0
219 for arg in "$@"; do
220 echo "[$i] $arg"
221
222 # For testing wwup.cgi
223 if test "$arg" = 'FAIL'; then
224 echo 'failing early'
225 return 42
226 fi
227
228 i=$(( i + 1 ))
229 done
230 echo
231
232 whoami
233 hostname
234}
235
236"$@"