OILS / soil / web.sh View on Github | oils.pub

238 lines, 117 significant
1#!/usr/bin/env bash
2#
3# Wrapper for soil/web.py.
4#
5# Usage:
6# soil/web.sh <function name>
7
8set -o nounset
9set -o pipefail
10set -o errexit
11
12shopt -s nullglob # for list-json
13
14REPO_ROOT=$(cd $(dirname $0)/.. && pwd)
15readonly REPO_ROOT
16
17source $REPO_ROOT/soil/common.sh
18
19# Jobs to show and keep. This corresponds to say soil/worker.sh JOB-dummy,
20# which means each git COMMIT is more than 15 jobs.
21readonly NUM_JOBS=4000
22
23soil-web() {
24 # We may be executed by a wwup.cgi on the server, which doesn't have
25 # PATH=~/bin, and the shebang is /usr/bin/env python2
26
27 # OpalStack doesn't need this
28 # Also it still uses bash 4.2 with the empty array bug!
29
30 local py2=~/bin/python2
31 local prefix=''
32 if test -f $py2; then
33 prefix=$py2
34 fi
35
36 # Relies on empty elision of $prefix
37 PYTHONPATH=$REPO_ROOT $prefix $REPO_ROOT/soil/web.py "$@"
38}
39
40# Bug fix for another race:
41# ls *.json has a race: the shell expands files that may no longer exist, and
42# then 'ls' fails!
43list-json() {
44 local dir=$1 # e.g. travis-ci.oilshell.org/github-jobs
45
46 for name in $dir/*/*.json; do
47 echo $name
48 done
49}
50
51rewrite-jobs-index() {
52 ### Atomic update of travis-ci.oilshell.org/jobs/
53 local prefix=$1
54 local run_id=$2 # pass GITHUB_RUN_NUMBER or git-$hash
55
56 local dir=$SOIL_HOST_DIR/uuu/${prefix}jobs
57
58 log "soil-web: Rewriting uuu/${prefix}jobs/index.html"
59
60 # Fix for bug #1169: don't create the temp file on a different file system,
61 # which /tmp may be.
62 #
63 # When the source and target are on different systems, I believe 'mv' falls
64 # back to 'cp', which has this race condition:
65 #
66 # https://unix.stackexchange.com/questions/116280/cannot-create-regular-file-filename-file-exists
67
68 # Limit to last 100 jobs. Glob is in alphabetical order and jobs look like
69 # 2020-03-20__...
70
71 local index_tmp=$dir/$$.index.html # index of every job in every run
72 local run_index_tmp=$dir/$$.runs.html # only the jobs in this run/commit
73
74 list-json $dir \
75 | tail -n -$NUM_JOBS \
76 | soil-web ${prefix}index $index_tmp $run_index_tmp $run_id
77
78 echo "rewrite index status = ${PIPESTATUS[@]}"
79
80 mv -v $index_tmp $dir/index.html
81
82 mkdir -v -p $dir/$run_id # this could be a new commit hash, etc.
83 mv -v $run_index_tmp $dir/$run_id/index.html
84}
85
86cleanup-jobs-index() {
87 local prefix=$1
88 local dry_run=${2:-true}
89
90 local dir=$SOIL_HOST_DIR/uuu/${prefix}jobs
91
92 # Pass it all JSON, and then it figures out what files to delete (TSV, etc.)
93 case $dry_run in
94 false)
95 # Bug fix: There's a race here when 2 jobs complete at the same time.
96 # Use rm -f to ignore failure if the file was already deleted.
97
98 list-json $dir | soil-web cleanup $NUM_JOBS | xargs --no-run-if-empty -- rm -f -v
99 ;;
100 true)
101 list-json $dir | soil-web cleanup $NUM_JOBS
102 ;;
103 *)
104 log 'Expected true or false for dry_run'
105 esac
106}
107
108test-cleanup() {
109 # the 999 jobs are the oldest
110
111 soil-web cleanup 2 <<EOF
112travis-ci.oilshell.org/github-jobs/999/one.json
113travis-ci.oilshell.org/github-jobs/999/two.json
114travis-ci.oilshell.org/github-jobs/999/three.json
115travis-ci.oilshell.org/github-jobs/1000/one.json
116travis-ci.oilshell.org/github-jobs/1000/two.json
117travis-ci.oilshell.org/github-jobs/1001/one.json
118travis-ci.oilshell.org/github-jobs/1001/two.json
119travis-ci.oilshell.org/github-jobs/1001/three.json
120EOF
121}
122
123cleanup-status-api() {
124 ### cleanup the files used for maybe-merge
125
126 local dry_run=${1:-true}
127
128 local dir=$SOIL_HOST_DIR/uuu/status-api/github
129
130 pushd $dir
131 case $dry_run in
132 false)
133 # delete all but the last 30
134 ls | head -n -30 | xargs --no-run-if-empty -- rm -r -f -v
135 ;;
136 true)
137 # TODO: fix spew
138 #ls | head -n -30
139 ls | head
140 ;;
141 *)
142 log 'Expected true or false for dry_run'
143 esac
144 popd
145}
146
147event-job-done() {
148 ### "Server side" handler
149
150 local prefix=$1 # 'github-' or 'sourcehut-'
151 local run_id=$2 # $GITHUB_RUN_NUMBER or git-$hash
152
153 rewrite-jobs-index $prefix $run_id
154
155 # note: we could speed jobs up by doing this separately?
156 cleanup-jobs-index $prefix false
157}
158
159DISABLED-event-job-done() {
160 ### Hook for wwup.cgi to execute
161
162 # As long as the CGI script shows output, I don't think we need any wrappers
163 # The scripts are written so we don't need to 'cd'
164 _event-job-done "$@"
165 return
166
167 # This is the directory that soil/web-init.sh deploys to, and it's shaped
168 # like the Oils repo
169 cd ~/soil-web
170
171 # Figure out why exit code is 127
172 # Oh probably because it's not started in the home dir?
173
174 # TODO: I guess wwup.cgi can buffer this entire response or something?
175 # You POST and you get of status, stdout, stderr back?
176 _event-job-done "$@" > ~/event-job-done.$$.log 2>&1
177}
178
179#
180# Dev Tools
181#
182
183sync-testdata() {
184
185 local dest=_tmp/github-jobs/
186
187 rsync --archive --verbose \
188 $SOIL_USER@$SOIL_HOST:$SOIL_HOST/github-jobs/ $dest
189
190 # 2023-04: 3.2 GB of files! Probably can reduce this
191
192 du --si -s $dest
193}
194
195copy-web() {
196 ### for relative URLs to work
197
198 cp -r -v web/ _tmp/
199}
200
201local-test() {
202 ### Used the sync'd testdata
203 local dir=${1:-_tmp/github-jobs}
204
205 local index=$dir/index.html
206
207 local run_id=3722
208 local run_index=$dir/$run_id/index.html
209
210 list-json $dir | soil-web github-index $index $run_index $run_id
211
212 echo "Wrote $index and $run_index"
213}
214
215hello() {
216 echo "hi from $0"
217 echo
218
219 echo ARGS
220 local i=0
221 for arg in "$@"; do
222 echo "[$i] $arg"
223
224 # For testing wwup.cgi
225 if test "$arg" = 'FAIL'; then
226 echo 'failing early'
227 return 42
228 fi
229
230 i=$(( i + 1 ))
231 done
232 echo
233
234 whoami
235 hostname
236}
237
238"$@"