1 | #!/usr/bin/env bash
|
2 | #
|
3 | # Wrapper for soil/web.py.
|
4 | #
|
5 | # Usage:
|
6 | # soil/web.sh <function name>
|
7 |
|
8 | set -o nounset
|
9 | set -o pipefail
|
10 | set -o errexit
|
11 |
|
12 | shopt -s nullglob # for list-json
|
13 |
|
14 | REPO_ROOT=$(cd $(dirname $0)/.. && pwd)
|
15 | readonly REPO_ROOT
|
16 |
|
17 | source $REPO_ROOT/soil/common.sh
|
18 |
|
19 | # Jobs to show and keep. This corresponds to say soil/worker.sh JOB-dummy,
|
20 | # which means each git COMMIT is more than 15 jobs.
|
21 | readonly NUM_JOBS=4000
|
22 |
|
23 | soil-web() {
|
24 | # We may be executed by a wwup.cgi on the server, which doesn't have
|
25 | # PATH=~/bin, and the shebang is /usr/bin/env python2
|
26 |
|
27 | # OpalStack doesn't need this
|
28 | # Also it still uses bash 4.2 with the empty array bug!
|
29 |
|
30 | local py2=~/bin/python2
|
31 | local prefix=''
|
32 | if test -f $py2; then
|
33 | prefix=$py2
|
34 | fi
|
35 |
|
36 | # Relies on empty elision of $prefix
|
37 | PYTHONPATH=$REPO_ROOT $prefix $REPO_ROOT/soil/web.py "$@"
|
38 | }
|
39 |
|
40 | # Bug fix for another race:
|
41 | # ls *.json has a race: the shell expands files that may no longer exist, and
|
42 | # then 'ls' fails!
|
43 | list-json() {
|
44 | local dir=$1 # e.g. travis-ci.oilshell.org/github-jobs
|
45 |
|
46 | for name in $dir/*/*.json; do
|
47 | echo $name
|
48 | done
|
49 | }
|
50 |
|
51 | rewrite-jobs-index() {
|
52 | ### Atomic update of travis-ci.oilshell.org/jobs/
|
53 | local prefix=$1
|
54 | local run_id=$2 # pass GITHUB_RUN_NUMBER or git-$hash
|
55 |
|
56 | local dir=$SOIL_HOST_DIR/uuu/${prefix}jobs
|
57 |
|
58 | log "soil-web: Rewriting uuu/${prefix}jobs/index.html"
|
59 |
|
60 | # Fix for bug #1169: don't create the temp file on a different file system,
|
61 | # which /tmp may be.
|
62 | #
|
63 | # When the source and target are on different systems, I believe 'mv' falls
|
64 | # back to 'cp', which has this race condition:
|
65 | #
|
66 | # https://unix.stackexchange.com/questions/116280/cannot-create-regular-file-filename-file-exists
|
67 |
|
68 | # Limit to last 100 jobs. Glob is in alphabetical order and jobs look like
|
69 | # 2020-03-20__...
|
70 |
|
71 | local index_tmp=$dir/$$.index.html # index of every job in every run
|
72 | local run_index_tmp=$dir/$$.runs.html # only the jobs in this run/commit
|
73 |
|
74 | list-json $dir \
|
75 | | tail -n -$NUM_JOBS \
|
76 | | soil-web ${prefix}index $index_tmp $run_index_tmp $run_id
|
77 |
|
78 | echo "rewrite index status = ${PIPESTATUS[@]}"
|
79 |
|
80 | mv -v $index_tmp $dir/index.html
|
81 |
|
82 | mkdir -v -p $dir/$run_id # this could be a new commit hash, etc.
|
83 | mv -v $run_index_tmp $dir/$run_id/index.html
|
84 | }
|
85 |
|
86 | cleanup-jobs-index() {
|
87 | local prefix=$1
|
88 | local dry_run=${2:-true}
|
89 |
|
90 | local dir=$SOIL_HOST_DIR/uuu/${prefix}jobs
|
91 |
|
92 | # Pass it all JSON, and then it figures out what files to delete (TSV, etc.)
|
93 | case $dry_run in
|
94 | false)
|
95 | # Bug fix: There's a race here when 2 jobs complete at the same time.
|
96 | # Use rm -f to ignore failure if the file was already deleted.
|
97 |
|
98 | list-json $dir | soil-web cleanup $NUM_JOBS | xargs --no-run-if-empty -- rm -f -v
|
99 | ;;
|
100 | true)
|
101 | list-json $dir | soil-web cleanup $NUM_JOBS
|
102 | ;;
|
103 | *)
|
104 | log 'Expected true or false for dry_run'
|
105 | esac
|
106 | }
|
107 |
|
108 | test-cleanup() {
|
109 | # the 999 jobs are the oldest
|
110 |
|
111 | soil-web cleanup 2 <<EOF
|
112 | travis-ci.oilshell.org/github-jobs/999/one.json
|
113 | travis-ci.oilshell.org/github-jobs/999/two.json
|
114 | travis-ci.oilshell.org/github-jobs/999/three.json
|
115 | travis-ci.oilshell.org/github-jobs/1000/one.json
|
116 | travis-ci.oilshell.org/github-jobs/1000/two.json
|
117 | travis-ci.oilshell.org/github-jobs/1001/one.json
|
118 | travis-ci.oilshell.org/github-jobs/1001/two.json
|
119 | travis-ci.oilshell.org/github-jobs/1001/three.json
|
120 | EOF
|
121 | }
|
122 |
|
123 | cleanup-status-api() {
|
124 | ### cleanup the files used for maybe-merge
|
125 |
|
126 | local dry_run=${1:-true}
|
127 |
|
128 | local dir=$SOIL_HOST_DIR/uuu/status-api/github
|
129 |
|
130 | pushd $dir > /dev/null
|
131 | case $dry_run in
|
132 | false)
|
133 | # delete all but the last 30
|
134 | ls | head -n -30 | xargs --no-run-if-empty -- rm -r -f -v
|
135 | ;;
|
136 | true)
|
137 | # TODO: fix spew
|
138 | #ls | head -n -30
|
139 | echo 'Last 10 status entries'
|
140 | echo
|
141 | ls | tail
|
142 | ;;
|
143 | *)
|
144 | log 'Expected true or false for dry_run'
|
145 | esac
|
146 | popd > /dev/null
|
147 | }
|
148 |
|
149 | event-job-done() {
|
150 | ### "Server side" handler
|
151 |
|
152 | local prefix=$1 # 'github-' or 'sourcehut-'
|
153 | local run_id=$2 # $GITHUB_RUN_NUMBER or git-$hash
|
154 |
|
155 | rewrite-jobs-index $prefix $run_id
|
156 |
|
157 | # note: we could speed jobs up by doing this separately?
|
158 | cleanup-jobs-index $prefix false
|
159 | }
|
160 |
|
161 | DISABLED-event-job-done() {
|
162 | ### Hook for wwup.cgi to execute
|
163 |
|
164 | # As long as the CGI script shows output, I don't think we need any wrappers
|
165 | # The scripts are written so we don't need to 'cd'
|
166 | _event-job-done "$@"
|
167 | return
|
168 |
|
169 | # This is the directory that soil/web-init.sh deploys to, and it's shaped
|
170 | # like the Oils repo
|
171 | cd ~/soil-web
|
172 |
|
173 | # Figure out why exit code is 127
|
174 | # Oh probably because it's not started in the home dir?
|
175 |
|
176 | # TODO: I guess wwup.cgi can buffer this entire response or something?
|
177 | # You POST and you get of status, stdout, stderr back?
|
178 | _event-job-done "$@" > ~/event-job-done.$$.log 2>&1
|
179 | }
|
180 |
|
181 | #
|
182 | # Dev Tools
|
183 | #
|
184 |
|
185 | sync-testdata() {
|
186 |
|
187 | local dest=_tmp/github-jobs/
|
188 |
|
189 | rsync --archive --verbose \
|
190 | $SOIL_USER@$SOIL_HOST:$SOIL_HOST/github-jobs/ $dest
|
191 |
|
192 | # 2023-04: 3.2 GB of files! Probably can reduce this
|
193 |
|
194 | du --si -s $dest
|
195 | }
|
196 |
|
197 | copy-web() {
|
198 | ### for relative URLs to work
|
199 |
|
200 | cp -r -v web/ _tmp/
|
201 | }
|
202 |
|
203 | local-test() {
|
204 | ### Used the sync'd testdata
|
205 | local dir=${1:-_tmp/github-jobs}
|
206 |
|
207 | local index=$dir/index.html
|
208 |
|
209 | local run_id=3722
|
210 | local run_index=$dir/$run_id/index.html
|
211 |
|
212 | list-json $dir | soil-web github-index $index $run_index $run_id
|
213 |
|
214 | echo "Wrote $index and $run_index"
|
215 | }
|
216 |
|
217 | hello() {
|
218 | echo "hi from $0"
|
219 | echo
|
220 |
|
221 | echo ARGS
|
222 | local i=0
|
223 | for arg in "$@"; do
|
224 | echo "[$i] $arg"
|
225 |
|
226 | # For testing wwup.cgi
|
227 | if test "$arg" = 'FAIL'; then
|
228 | echo 'failing early'
|
229 | return 42
|
230 | fi
|
231 |
|
232 | i=$(( i + 1 ))
|
233 | done
|
234 | echo
|
235 |
|
236 | whoami
|
237 | hostname
|
238 | }
|
239 |
|
240 | "$@"
|