#!/bin/echo "This file is sourced, not run"

# Set of functions to download, extract, and patch source tarballs.
#
# Tools to populate and verify a directory of package source tarballs
# (saved in $SRCDIR which defaults to $TOP/packages). Used by download.sh.
#
#   You can supply your own tarball in $SRCDIR to avoid downloading it.
#
#   You can also provide an expanded directory (same filename as $URL
#   but no version or extension) to be used instead of a tarball. This is
#   usually a source control checkout.
#
#   $IGNORE_REPOS - comma separated list of package names (or "all") to
#     download tarballs for anyway, ignoring the directory version if present.
#
# Functions to call from here:
#
# download
#   - fetch a file (with wget) if it doesn't already exist, or doesn't match
#     checksum.
#
#   It expects you to set:
#     $URL - Default location of file, including filename
#     $SHA1 - sha1sum of good file. (Blank means accept any file.)
#
#   You can also set these (which use filename from $URL):
#     $PREFERRED_MIRROR - Check here first (appending filename from $URL)
#     $MIRROR_LIST - Space separated list of fallback locations (appending
#       filename from $URL) to check if default location didn't have it.
#
#   Does not re-download existing tarballs if the $SHA1 matches/is blank.
#   Does not download tarball if expanded directory present.
#
# cleanup_oldfiles
#   - remove stale files from $SRCDIR
#
#   Stale means not referenced by a download call since start of script.
#   Only affects files, not subdirectories.


# Remove version information and extension tarball name "$1".
# If "$2", add that version number back, keeping original extension.

proc noversion {
  setvar LOGRUS = ''s/-*\(\([0-9\.]\)*\([_-]rc\)*\(-pre\)*\([0-9][a-zA-Z]\)*\)*\(\.tar\(\..z2*\)*\)$''
  test -z $2 && setvar LOGRUS = ""$LOGRUS//"" || setvar LOGRUS = ""$LOGRUS/$2\\6/""

  echo $1 | sed -e $LOGRUS
}

proc gather_patches {
  ls "$PATCHDIR/${PACKAGE}"-*.patch 2> /dev/null | sort | while read i
  {
    if test -f $i
    {
      echo $i
    }
  }

  # gather external package patches sorted by filename
  if test ! -z $MY_PATCH_DIR && test -d "${MY_PATCH_DIR}/${PACKAGE}"
  {
    for i in "${MY_PATCH_DIR}/${PACKAGE}/"*.patch
    {
      if test -f $i
      {
        echo $i
      }
    }
  }
}

# Apply any patches to this package
proc patch_package {
  gather_patches | while read i
  {
    if test -f $i
    {
      echo "Applying $i"
      shell {cd "${SRCTREE}/${PACKAGE}" &&
       patch -p1 -i $i &&
       sha1file $i >> "$SHA1FILE"} ||
        if test -z $ALLOW_PATCH_FAILURE
        {
          dienow
        }
    }
  }
}

# Get the tarball for this package

proc find_package_tarball {
  # If there are multiple similar files we want the newest timestamp, in case
  # the URL just got upgraded but cleanup_oldfiles hasn't run yet.  Be able to
  # distinguish "package-123.tar.bz2" from "package-tests-123.tar.bz2" and
  # return the shorter one reliably.

  ls -tc "$SRCDIR/$1-"*.tar* 2>/dev/null | while read i
  {
    if test $(noversion "${i/*\//}") == $1
    {
      echo $i
      break
    }
  }
}

proc package_cache {
  setvar SNAPFROM = ""$SRCDIR/$1""
  shell {is_in_list $1 $IGNORE_REPOS || test ! -d $SNAPFROM} &&
    setvar SNAPFROM = ""$SRCTREE/$1""

  if test ! -d $SNAPFROM
  {
    echo "$1 not found.  Did you run download.sh?" >&2
    dienow
  }

  echo $SNAPFROM
}

# Extract tarball named in $1 and apply all relevant patches into
# "$BUILD/packages/$1".  Record sha1sum of tarball and patch files in
# sha1-for-source.txt.  Re-extract if tarball or patches change.

proc extract_package {
  mkdir -p $SRCTREE || dienow

  # Announce to the world that we're cracking open a new package

  setvar PACKAGE = "$1"
  announce $PACKAGE

  ! is_in_list "PACKAGE" $IGNORE_REPOS && test -d "$SRCDIR/$PACKAGE" &&
    return 0

  # Find tarball, and determine type

  setvar FILENAME = "$(find_package_tarball "$PACKAGE")"
  setvar DECOMPRESS = """"
  test $FILENAME != ${FILENAME/%\.tar\.bz2/} && setvar DECOMPRESS = ""j""
  test $FILENAME != ${FILENAME/%\.tar\.gz/} && setvar DECOMPRESS = ""z""

  # If the source tarball doesn't exist, but the extracted directory is there,
  # assume everything's ok.

  setvar SHA1NAME = ""sha1-for-source.txt""
  setvar SHA1FILE = ""$SRCTREE/$PACKAGE/$SHA1NAME""
  if test -z $FILENAME
  {
    if test ! -e "$SRCTREE/$PACKAGE"
    {
      echo "No tarball for $PACKAGE" >&2
      dienow
    }

    # If the sha1sum file isn't there, re-patch the package.
    test ! -e $SHA1FILE && patch_package
    return 0
  }

  # Check the sha1 list from the previous extract.  If the source is already
  # up to date (including patches), keep it.

  setvar SHA1TAR = "$(sha1file "$FILENAME")"
  setvar SHALIST = $(cat "$SHA1FILE" 2> /dev/null)
  if test ! -z $SHALIST
  {
    for i in "$SHA1TAR" $(sha1file $(gather_patches))
    {
      # Is this sha1 in the file?
      if test -z $(echo "$SHALIST" | grep "$i")
      {
        setvar SHALIST = 'missing'
        break
      }
      # Remove it
      setvar SHALIST = "$(echo "$SHALIST" | sed "s/$i//" )"
    }
    # If we matched all the sha1sums, nothing more to do.
    test -z $SHALIST && return 0
  }

  # Re-extract the package, deleting the old one (if any)..

  echo -n "Extracting '$PACKAGE'"
  shell {
    setvar UNIQUE = $(readlink /proc/self)
    trap 'rm -rf "$BUILD/temp-'$UNIQUE'"' EXIT
    rm -rf "$SRCTREE/$PACKAGE" 2>/dev/null
    mkdir -p "$BUILD/temp-$UNIQUE" $SRCTREE || dienow

    do { tar -xv${DECOMPRESS} -f $FILENAME -C "$BUILD/temp-$UNIQUE" &&
      # Wildcards only expand when they ponit to something that exists,
      # and even then they have to be an argument to a command.
      setvar TARDIR = "$(readlink -f "$BUILD/temp-$UNIQUE"/*)" &&
      touch "$TARDIR/$SHA1NAME"
    } | dotprogress

    test -e "$BUILD/temp-$UNIQUE"/*/"$SHA1NAME" &&
    mv "$BUILD/temp-$UNIQUE/"* "$SRCTREE/$PACKAGE" &&
    echo $SHA1TAR >> "$SHA1FILE"
  }

  test $? -ne 0 && dienow

  patch_package
}

# Confirm that a file has the appropriate checksum (or exists but SHA1 is blank)
# Delete invalid file.

proc confirm_checksum {
  setvar SUM = "$(sha1file "$SRCDIR/$FILENAME" 2>/dev/null)"
  if test x"$SUM" == x"$SHA1" || test -z $SHA1 && test -f "$SRCDIR/$FILENAME"
  {
    if test -z $SHA1
    {
      echo "No SHA1 for $FILENAME ($SUM)"
    } else {
      echo "Confirmed $FILENAME"
    }

    # Preemptively extract source packages?

    test -z $EXTRACT_ALL && return 0
    extract_package $BASENAME
    return $?
  }

  # If there's a corrupted file, delete it.  In theory it would be nice
  # to resume downloads, but wget creates "*.1" files instead.

  rm -f "$SRCDIR/$FILENAME"

  return 1
}

# Attempt to obtain file from a specific location

proc download_from {
  # Return success if we already have a valid copy of the file

  confirm_checksum && return 0

  # If we have another source, try to download file from there.

  test -z $1 && return 1
  wget -t 2 -T 20 -O "$SRCDIR/$FILENAME" $1 ||
    shell {rm -f "$SRCDIR/$FILENAME"; return 2}
  touch -c "$SRCDIR/$FILENAME"

  confirm_checksum
}

# Confirm a file matches sha1sum, else try to download it from mirror list.

proc download {
  setvar FILENAME = $(echo $URL | sed 's .*/  )
  test -z $RENAME || setvar FILENAME = "$(echo "$FILENAME" | sed -r "$RENAME")"

  if test -z $(sha1sum < /dev/null)
  {
    echo "Error: please install sha1sum" >&2
    exit 1
  }

  echo -ne "checking $FILENAME\r"

  # Update timestamp on tarball (if it exists) so cleanup_oldfiles keeps it

  touch -c "$SRCDIR"/"$FILENAME" 2>/dev/null

  # Give package name, minus file's version number and archive extension.
  setvar BASENAME = "$(noversion "$FILENAME")"
  if ! is_in_list $BASENAME $IGNORE_REPOS && test -d "$SRCDIR/$BASENAME"
  {
    echo "Using $SRCDIR/$BASENAME"
    if test $EXTRACT_ALL == force
    {
      rm -rf "$SRCTREE/$BASENAME" &&
      cp -a "$SRCDIR/$BASENAME" "$SRCTREE/$BASENAME" || dienow
    }

    return 0
  }

  # If environment variable specifies a preferred mirror, try that first.

  if test ! -z $PREFERRED_MIRROR
  {
    download_from "$PREFERRED_MIRROR/$FILENAME" && return 0
  }

  # Try original location, then mirrors.
  # Note: the URLs in mirror list cannot contain whitespace.

  download_from $URL && return 0
  for i in $MIRROR_LIST
  {
    download_from "$i/$FILENAME" && return 0
  }

  # Return failure.

  echo "Could not download $FILENAME"
  echo -en "\e[0m"
  return 1
}

# Clean obsolete files out of the source directory

setvar START_TIME = $(date +%s)

proc cleanup_oldfiles {
  # wait for asynchronous downloads to complete

  wait

  for i in "${SRCDIR}"/*
  {
    if test -f $i && test $(date +%s -r "$i") -lt ${START_TIME}
    {
      echo Removing old file $i
      rm -rf $i
    }
  }
}