| 1 | #!/usr/bin/env bash
|
| 2 | #
|
| 3 | # Wrapper for soil/web.py.
|
| 4 | #
|
| 5 | # Usage:
|
| 6 | # soil/web.sh <function name>
|
| 7 |
|
| 8 | set -o nounset
|
| 9 | set -o pipefail
|
| 10 | set -o errexit
|
| 11 |
|
| 12 | shopt -s nullglob # for list-json
|
| 13 |
|
| 14 | REPO_ROOT=$(cd $(dirname $0)/.. && pwd)
|
| 15 | readonly REPO_ROOT
|
| 16 |
|
| 17 | source $REPO_ROOT/soil/common.sh
|
| 18 |
|
| 19 | # Jobs to keep. This relates to each of soil/worker.sh JOB-dummy, which means
|
| 20 | # each git COMMIT is more than 15 jobs.
|
| 21 | readonly JOBS_TO_SHOW=4000
|
| 22 |
|
| 23 | # Fudge factor of 100 for concurrent deleting and listing
|
| 24 | readonly JOBS_TO_KEEP=$(( $JOBS_TO_SHOW + 100 ))
|
| 25 |
|
| 26 | soil-web() {
|
| 27 | # We may be executed by a wwup.cgi on the server, which doesn't have
|
| 28 | # PATH=~/bin, and the shebang is /usr/bin/env python2
|
| 29 |
|
| 30 | # OpalStack doesn't need this
|
| 31 | # Also it still uses bash 4.2 with the empty array bug!
|
| 32 |
|
| 33 | local py2=~/bin/python2
|
| 34 | local prefix=''
|
| 35 | if test -f $py2; then
|
| 36 | prefix=$py2
|
| 37 | fi
|
| 38 |
|
| 39 | # Relies on empty elision of $prefix
|
| 40 | PYTHONPATH=$REPO_ROOT $prefix $REPO_ROOT/soil/web.py "$@"
|
| 41 | }
|
| 42 |
|
| 43 | list-json() {
|
| 44 | local dir=$1 # e.g. travis-ci.oilshell.org/github-jobs
|
| 45 |
|
| 46 | # Bug fix for another race:
|
| 47 | # ls *.json has a race: the shell expands files that may no longer exist, and
|
| 48 | # then 'ls' fails!
|
| 49 | #
|
| 50 | # Also note that 1000/foo.json will alphabetically sort before 999/foo.json,
|
| 51 | # which is not numeric sorting.
|
| 52 |
|
| 53 | for name in $dir/*/*.json; do
|
| 54 | echo $name
|
| 55 | done
|
| 56 | }
|
| 57 |
|
| 58 | rewrite-jobs-index() {
|
| 59 | ### Atomic update of travis-ci.oilshell.org/jobs/
|
| 60 | local prefix=$1
|
| 61 | local run_id=$2 # pass GITHUB_RUN_NUMBER or git-$hash
|
| 62 |
|
| 63 | local dir=$SOIL_HOST_DIR/uuu/${prefix}jobs
|
| 64 |
|
| 65 | log "soil-web: Rewriting uuu/${prefix}jobs/index.html"
|
| 66 |
|
| 67 | # Fix for bug #1169: don't create the temp file on a different file system,
|
| 68 | # which /tmp may be.
|
| 69 | #
|
| 70 | # When the source and target are on different systems, I believe 'mv' falls
|
| 71 | # back to 'cp', which has this race condition:
|
| 72 | #
|
| 73 | # https://unix.stackexchange.com/questions/116280/cannot-create-regular-file-filename-file-exists
|
| 74 |
|
| 75 | local index_tmp=$dir/$$.index.html # index of every job in every run
|
| 76 | local run_index_tmp=$dir/$$.runs.html # only the jobs in this run/commit
|
| 77 |
|
| 78 | list-json $dir | soil-web ${prefix}index $index_tmp $run_index_tmp $run_id $JOBS_TO_SHOW
|
| 79 |
|
| 80 | echo "rewrite index status = ${PIPESTATUS[@]}"
|
| 81 |
|
| 82 | mv -v $index_tmp $dir/index.html
|
| 83 |
|
| 84 | mkdir -v -p $dir/$run_id # this could be a new commit hash, etc.
|
| 85 | mv -v $run_index_tmp $dir/$run_id/index.html
|
| 86 | }
|
| 87 |
|
| 88 | cleanup-jobs-index() {
|
| 89 | local prefix=$1
|
| 90 | local dry_run=${2:-true}
|
| 91 | local max_jobs=${3:-$JOBS_TO_KEEP}
|
| 92 |
|
| 93 | local dir=$SOIL_HOST_DIR/uuu/${prefix}jobs
|
| 94 |
|
| 95 | # Pass it all JSON, and then it figures out what files to delete (TSV, etc.)
|
| 96 | case $dry_run in
|
| 97 | false)
|
| 98 | # Bug fix: There's a race here when 2 jobs complete at the same time.
|
| 99 | # Use rm -f to ignore failure if the file was already deleted.
|
| 100 |
|
| 101 | list-json $dir | soil-web cleanup $max_jobs | xargs --no-run-if-empty -- rm -f -v
|
| 102 | ;;
|
| 103 | true)
|
| 104 | list-json $dir | soil-web cleanup $max_jobs
|
| 105 | ;;
|
| 106 | *)
|
| 107 | log 'Expected true or false for dry_run'
|
| 108 | esac
|
| 109 | }
|
| 110 |
|
| 111 | test-cleanup() {
|
| 112 | # the 999 jobs are the oldest
|
| 113 |
|
| 114 | soil-web cleanup 2 <<EOF
|
| 115 | travis-ci.oilshell.org/github-jobs/999/one.json
|
| 116 | travis-ci.oilshell.org/github-jobs/999/two.json
|
| 117 | travis-ci.oilshell.org/github-jobs/999/three.json
|
| 118 | travis-ci.oilshell.org/github-jobs/1000/one.json
|
| 119 | travis-ci.oilshell.org/github-jobs/1000/two.json
|
| 120 | travis-ci.oilshell.org/github-jobs/1001/one.json
|
| 121 | travis-ci.oilshell.org/github-jobs/1001/two.json
|
| 122 | travis-ci.oilshell.org/github-jobs/1001/three.json
|
| 123 | EOF
|
| 124 | }
|
| 125 |
|
| 126 | cleanup-status-api() {
|
| 127 | ### cleanup the files used for maybe-merge
|
| 128 |
|
| 129 | local dry_run=${1:-true}
|
| 130 |
|
| 131 | local dir=$SOIL_HOST_DIR/uuu/status-api/github
|
| 132 |
|
| 133 | pushd $dir > /dev/null
|
| 134 | case $dry_run in
|
| 135 | false)
|
| 136 | # delete all but the last 30
|
| 137 | ls | head -n -30 | xargs --no-run-if-empty -- rm -r -f -v
|
| 138 | ;;
|
| 139 | true)
|
| 140 | # TODO: fix spew
|
| 141 | #ls | head -n -30
|
| 142 | echo 'Last 10 status entries'
|
| 143 | echo
|
| 144 | ls | tail
|
| 145 | ;;
|
| 146 | *)
|
| 147 | log 'Expected true or false for dry_run'
|
| 148 | esac
|
| 149 | popd > /dev/null
|
| 150 | }
|
| 151 |
|
| 152 | event-job-done() {
|
| 153 | ### "Server side" handler
|
| 154 |
|
| 155 | local prefix=$1 # 'github-' or 'sourcehut-'
|
| 156 | local run_id=$2 # $GITHUB_RUN_NUMBER or git-$hash
|
| 157 |
|
| 158 | rewrite-jobs-index $prefix $run_id
|
| 159 |
|
| 160 | # note: we could speed jobs up by doing this separately?
|
| 161 | cleanup-jobs-index $prefix false
|
| 162 | }
|
| 163 |
|
| 164 | DISABLED-event-job-done() {
|
| 165 | ### Hook for wwup.cgi to execute
|
| 166 |
|
| 167 | # As long as the CGI script shows output, I don't think we need any wrappers
|
| 168 | # The scripts are written so we don't need to 'cd'
|
| 169 | _event-job-done "$@"
|
| 170 | return
|
| 171 |
|
| 172 | # This is the directory that soil/web-init.sh deploys to, and it's shaped
|
| 173 | # like the Oils repo
|
| 174 | cd ~/soil-web
|
| 175 |
|
| 176 | # Figure out why exit code is 127
|
| 177 | # Oh probably because it's not started in the home dir?
|
| 178 |
|
| 179 | # TODO: I guess wwup.cgi can buffer this entire response or something?
|
| 180 | # You POST and you get of status, stdout, stderr back?
|
| 181 | _event-job-done "$@" > ~/event-job-done.$$.log 2>&1
|
| 182 | }
|
| 183 |
|
| 184 | #
|
| 185 | # Dev Tools
|
| 186 | #
|
| 187 |
|
| 188 | sync-testdata() {
|
| 189 |
|
| 190 | local dest=_tmp/github-jobs/
|
| 191 |
|
| 192 | rsync --archive --verbose \
|
| 193 | $SOIL_USER@$SOIL_HOST:$SOIL_HOST/github-jobs/ $dest
|
| 194 |
|
| 195 | # 2023-04: 3.2 GB of files! Probably can reduce this
|
| 196 |
|
| 197 | du --si -s $dest
|
| 198 | }
|
| 199 |
|
| 200 | copy-web() {
|
| 201 | ### for relative URLs to work
|
| 202 |
|
| 203 | cp -r -v web/ _tmp/
|
| 204 | }
|
| 205 |
|
| 206 | local-test() {
|
| 207 | ### Used the sync'd testdata
|
| 208 | local dir=${1:-_tmp/github-jobs}
|
| 209 |
|
| 210 | local index=$dir/index.html
|
| 211 |
|
| 212 | local run_id=3722
|
| 213 | local run_index=$dir/$run_id/index.html
|
| 214 |
|
| 215 | list-json $dir | soil-web github-index $index $run_index $run_id $JOBS_TO_SHOW
|
| 216 |
|
| 217 | echo "Wrote $index and $run_index"
|
| 218 | }
|
| 219 |
|
| 220 | hello() {
|
| 221 | echo "hi from $0"
|
| 222 | echo
|
| 223 |
|
| 224 | echo ARGS
|
| 225 | local i=0
|
| 226 | for arg in "$@"; do
|
| 227 | echo "[$i] $arg"
|
| 228 |
|
| 229 | # For testing wwup.cgi
|
| 230 | if test "$arg" = 'FAIL'; then
|
| 231 | echo 'failing early'
|
| 232 | return 42
|
| 233 | fi
|
| 234 |
|
| 235 | i=$(( i + 1 ))
|
| 236 | done
|
| 237 | echo
|
| 238 |
|
| 239 | whoami
|
| 240 | hostname
|
| 241 | }
|
| 242 |
|
| 243 | "$@"
|