You cannot select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
2648 lines
92 KiB
Bash
2648 lines
92 KiB
Bash
4 years ago
|
#!/usr/bin/env bash
|
||
|
|
||
|
# release.sh generates an addon zip file from a Git, SVN, or Mercurial checkout.
|
||
|
#
|
||
|
# This is free and unencumbered software released into the public domain.
|
||
|
#
|
||
|
# Anyone is free to copy, modify, publish, use, compile, sell, or
|
||
|
# distribute this software, either in source code form or as a compiled
|
||
|
# binary, for any purpose, commercial or non-commercial, and by any
|
||
|
# means.
|
||
|
#
|
||
|
# In jurisdictions that recognize copyright laws, the author or authors
|
||
|
# of this software dedicate any and all copyright interest in the
|
||
|
# software to the public domain. We make this dedication for the benefit
|
||
|
# of the public at large and to the detriment of our heirs and
|
||
|
# successors. We intend this dedication to be an overt act of
|
||
|
# relinquishment in perpetuity of all present and future rights to this
|
||
|
# software under copyright law.
|
||
|
#
|
||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||
|
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
|
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||
|
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
||
|
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||
|
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||
|
# OTHER DEALINGS IN THE SOFTWARE.
|
||
|
#
|
||
|
# For more information, please refer to <http://unlicense.org/>
|
||
|
|
||
|
## USER OPTIONS
|
||
|
|
||
|
# Secrets for uploading
|
||
|
cf_token=
|
||
|
github_token=
|
||
|
wowi_token=
|
||
|
wago_token=
|
||
|
|
||
|
# Variables set via command-line options
|
||
|
slug=
|
||
|
addonid=
|
||
|
wagoid=
|
||
|
topdir=
|
||
|
releasedir=
|
||
|
overwrite=
|
||
|
nolib=
|
||
|
line_ending="dos"
|
||
|
skip_copying=
|
||
|
skip_externals=
|
||
|
skip_localization=
|
||
|
skip_zipfile=
|
||
|
skip_upload=
|
||
|
skip_cf_upload=
|
||
|
pkgmeta_file=
|
||
|
game_version=
|
||
|
game_type=
|
||
|
file_type=
|
||
|
file_name="{package-name}-{project-version}{nolib}{classic}"
|
||
|
|
||
|
wowi_markup="bbcode"
|
||
|
|
||
|
## END USER OPTIONS
|
||
|
|
||
|
if [[ ${BASH_VERSINFO[0]} -lt 4 ]] || [[ ${BASH_VERSINFO[0]} -eq 4 && ${BASH_VERSINFO[1]} -lt 3 ]]; then
|
||
|
echo "ERROR! bash version 4.3 or above is required. Your version is ${BASH_VERSION}." >&2
|
||
|
exit 1
|
||
|
fi
|
||
|
|
||
|
# Game versions for uploading
|
||
|
declare -A game_flavors=( ["retail"]="mainline" ["classic"]="classic" ["bcc"]="bcc" )
|
||
|
declare -A game_versions
|
||
|
toc_version=
|
||
|
|
||
|
# Script return code
|
||
|
exit_code=0
|
||
|
|
||
|
# Escape a string for use in sed substitutions.
|
||
|
escape_substr() {
|
||
|
local s="$1"
|
||
|
s=${s//\\/\\\\}
|
||
|
s=${s//\//\\/}
|
||
|
s=${s//&/\\&}
|
||
|
echo "$s"
|
||
|
}
|
||
|
|
||
|
# File name templating
|
||
|
filename_filter() {
|
||
|
local classic alpha beta invalid
|
||
|
[ -n "$skip_invalid" ] && invalid="&" || invalid="_"
|
||
|
if [[ "$game_type" != "retail" ]] && [[ "$game_type" != "classic" || "${si_project_version,,}" != *"-classic"* ]] && [[ "$game_type" != "bcc" || "${si_project_version,,}" != *"-bcc"* ]]; then
|
||
|
# only append the game type if the tag doesn't include it
|
||
|
classic="-$game_type"
|
||
|
fi
|
||
|
[ "$file_type" == "alpha" ] && alpha="-alpha"
|
||
|
[ "$file_type" == "beta" ] && beta="-beta"
|
||
|
sed \
|
||
|
-e "s/{package-name}/$( escape_substr "$package" )/g" \
|
||
|
-e "s/{project-revision}/$si_project_revision/g" \
|
||
|
-e "s/{project-hash}/$si_project_hash/g" \
|
||
|
-e "s/{project-abbreviated-hash}/$si_project_abbreviated_hash/g" \
|
||
|
-e "s/{project-author}/$( escape_substr "$si_project_author" )/g" \
|
||
|
-e "s/{project-date-iso}/$si_project_date_iso/g" \
|
||
|
-e "s/{project-date-integer}/$si_project_date_integer/g" \
|
||
|
-e "s/{project-timestamp}/$si_project_timestamp/g" \
|
||
|
-e "s/{project-version}/$( escape_substr "$si_project_version" )/g" \
|
||
|
-e "s/{game-type}/${game_type}/g" \
|
||
|
-e "s/{release-type}/${file_type}/g" \
|
||
|
-e "s/{alpha}/${alpha}/g" \
|
||
|
-e "s/{beta}/${beta}/g" \
|
||
|
-e "s/{nolib}/${nolib:+-nolib}/g" \
|
||
|
-e "s/{classic}/${classic}/g" \
|
||
|
-e "s/\([^A-Za-z0-9._-]\)/${invalid}/g" \
|
||
|
<<< "$1"
|
||
|
}
|
||
|
|
||
|
toc_filter() {
|
||
|
local keyword="$1"
|
||
|
local remove="$2"
|
||
|
if [ -z "$remove" ]; then
|
||
|
# "active" build type: remove comments (keep content), remove non-blocks (remove all)
|
||
|
sed \
|
||
|
-e "/#@\(end-\)\{0,1\}${keyword}@/d" \
|
||
|
-e "/#@non-${keyword}@/,/#@end-non-${keyword}@/d"
|
||
|
else
|
||
|
# "non" build type: remove blocks (remove content), uncomment non-blocks (remove tags)
|
||
|
sed \
|
||
|
-e "/#@${keyword}@/,/#@end-${keyword}@/d" \
|
||
|
-e "/#@non-${keyword}@/,/#@end-non-${keyword}@/s/^#[[:blank:]]\{1,\}//" \
|
||
|
-e "/#@\(end-\)\{0,1\}non-${keyword}@/d"
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
|
||
|
# Process command-line options
|
||
|
usage() {
|
||
|
cat <<-'EOF' >&2
|
||
|
Usage: release.sh [options]
|
||
|
-c Skip copying files into the package directory.
|
||
|
-d Skip uploading.
|
||
|
-e Skip checkout of external repositories.
|
||
|
-l Skip @localization@ keyword replacement.
|
||
|
-L Only do @localization@ keyword replacement (skip upload to CurseForge).
|
||
|
-o Keep existing package directory, overwriting its contents.
|
||
|
-s Create a stripped-down "nolib" package.
|
||
|
-u Use Unix line-endings.
|
||
|
-z Skip zip file creation.
|
||
|
-t topdir Set top-level directory of checkout.
|
||
|
-r releasedir Set directory containing the package directory. Defaults to "$topdir/.release".
|
||
|
-p curse-id Set the project id used on CurseForge for localization and uploading. (Use 0 to unset the TOC value)
|
||
|
-w wowi-id Set the addon id used on WoWInterface for uploading. (Use 0 to unset the TOC value)
|
||
|
-a wago-id Set the project id used on Wago Addons for uploading. (Use 0 to unset the TOC value)
|
||
|
-g game-version Set the game version to use for uploading.
|
||
|
-m pkgmeta.yaml Set the pkgmeta file to use.
|
||
|
-n package-name Set the package zip file name. Use "-n help" for more info.
|
||
|
EOF
|
||
|
}
|
||
|
|
||
|
OPTIND=1
|
||
|
while getopts ":celLzusop:dw:a:r:t:g:m:n:" opt; do
|
||
|
case $opt in
|
||
|
c) skip_copying="true" ;; # Skip copying files into the package directory
|
||
|
z) skip_zipfile="true" ;; # Skip creating a zip file
|
||
|
e) skip_externals="true" ;; # Skip checkout of external repositories
|
||
|
l) skip_localization="true" ;; # Skip @localization@ keyword replacement
|
||
|
L) skip_cf_upload="true" ;; # Skip uploading to CurseForge
|
||
|
d) skip_upload="true" ;; # Skip uploading
|
||
|
u) line_ending="unix" ;; # Use LF instead of CRLF as the line ending for all text files
|
||
|
o) overwrite="true" ;; # Don't delete existing directories in the release directory
|
||
|
p) slug="$OPTARG" ;; # Set CurseForge project id
|
||
|
w) addonid="$OPTARG" ;; # Set WoWInterface addon id
|
||
|
a) wagoid="$OPTARG" ;; # Set Wago Addons project id
|
||
|
r) releasedir="$OPTARG" ;; # Set the release directory
|
||
|
t) # Set the top-level directory of the checkout
|
||
|
if [ ! -d "$OPTARG" ]; then
|
||
|
echo "Invalid argument for option \"-t\" - Directory \"$OPTARG\" does not exist." >&2
|
||
|
usage
|
||
|
exit 1
|
||
|
fi
|
||
|
topdir="$OPTARG"
|
||
|
;;
|
||
|
s) # Create a nolib package without externals
|
||
|
nolib="true"
|
||
|
skip_externals="true"
|
||
|
;;
|
||
|
g) # Set the game type or version
|
||
|
OPTARG="${OPTARG,,}"
|
||
|
case "$OPTARG" in
|
||
|
retail|classic|bcc) game_type="$OPTARG" ;; # game_version from toc
|
||
|
mainline) game_type="retail" ;;
|
||
|
bc)
|
||
|
echo "Invalid argument for option \"-g\" ($OPTARG)" >&2
|
||
|
echo "" >&2
|
||
|
echo "The \"bc\" game type has been changed to \"bcc\" to match Blizzard." >&2
|
||
|
echo "This affects TOC interface lines (Interface-BC -> Interface-BCC) and" >&2
|
||
|
echo "build keywords (version-bc -> version-bcc)." >&2
|
||
|
echo "" >&2
|
||
|
exit 1
|
||
|
;;
|
||
|
*)
|
||
|
# Set game version (x.y.z)
|
||
|
# Build game type set from the last value if a list
|
||
|
IFS=',' read -ra V <<< "$OPTARG"
|
||
|
for i in "${V[@]}"; do
|
||
|
if [[ ! "$i" =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)[a-z]?$ ]]; then
|
||
|
echo "Invalid argument for option \"-g\" ($i)" >&2
|
||
|
usage
|
||
|
exit 1
|
||
|
fi
|
||
|
if [[ ${BASH_REMATCH[1]} == "1" && ${BASH_REMATCH[2]} == "13" ]]; then
|
||
|
game_type="classic"
|
||
|
elif [[ ${BASH_REMATCH[1]} == "2" && ${BASH_REMATCH[2]} == "5" ]]; then
|
||
|
game_type="bcc"
|
||
|
else
|
||
|
game_type="retail"
|
||
|
fi
|
||
|
# Only one version per game type is allowed
|
||
|
if [ -n "${game_versions[$game_type]}" ]; then
|
||
|
echo "Invalid argument for option \"-g\" ($i) - Only one version per game type is supported." >&2
|
||
|
usage
|
||
|
exit 1
|
||
|
fi
|
||
|
game_versions[$game_type]="$i"
|
||
|
done
|
||
|
game_version="$OPTARG"
|
||
|
esac
|
||
|
;;
|
||
|
m) # Set the pkgmeta file
|
||
|
if [ ! -f "$OPTARG" ]; then
|
||
|
echo "Invalid argument for option \"-m\" - File \"$OPTARG\" does not exist." >&2
|
||
|
usage
|
||
|
exit 1
|
||
|
fi
|
||
|
pkgmeta_file="$OPTARG"
|
||
|
;;
|
||
|
n) # Set the package file name
|
||
|
if [ "$OPTARG" = "help" ]; then
|
||
|
cat <<-'EOF' >&2
|
||
|
Set the package zip file name. There are several string substitutions you can
|
||
|
use to include version control and build type infomation in the file name.
|
||
|
|
||
|
The default file name is "{package-name}-{project-version}{nolib}{classic}".
|
||
|
|
||
|
Tokens: {package-name}{project-revision}{project-hash}{project-abbreviated-hash}
|
||
|
{project-author}{project-date-iso}{project-date-integer}{project-timestamp}
|
||
|
{project-version}{game-type}{release-type}
|
||
|
|
||
|
Flags: {alpha}{beta}{nolib}{classic}
|
||
|
|
||
|
Tokens are always replaced with their value. Flags are shown prefixed with a dash
|
||
|
depending on the build type.
|
||
|
EOF
|
||
|
exit 0
|
||
|
fi
|
||
|
file_name="$OPTARG"
|
||
|
if skip_invalid=true filename_filter "$file_name" | grep -q '[{}]'; then
|
||
|
tokens=$( skip_invalid=true filename_filter "$file_name" | sed -e '/^[^{]*{\|}[^{]*{\|}[^{]*/s//}{/g' -e 's/^}\({.*}\){$/\1/' )
|
||
|
echo "Invalid argument for option \"-n\" - Invalid substitutions: $tokens" >&2
|
||
|
exit 1
|
||
|
fi
|
||
|
;;
|
||
|
:)
|
||
|
echo "Option \"-$OPTARG\" requires an argument." >&2
|
||
|
usage
|
||
|
exit 1
|
||
|
;;
|
||
|
\?)
|
||
|
if [ "$OPTARG" = "?" ] || [ "$OPTARG" = "h" ]; then
|
||
|
usage
|
||
|
exit 0
|
||
|
fi
|
||
|
echo "Unknown option \"-$OPTARG\"" >&2
|
||
|
usage
|
||
|
exit 1
|
||
|
;;
|
||
|
esac
|
||
|
done
|
||
|
shift $((OPTIND - 1))
|
||
|
|
||
|
# Set $topdir to top-level directory of the checkout.
|
||
|
if [ -z "$topdir" ]; then
|
||
|
dir=$( pwd )
|
||
|
if [ -d "$dir/.git" ] || [ -d "$dir/.svn" ] || [ -d "$dir/.hg" ]; then
|
||
|
topdir=.
|
||
|
else
|
||
|
dir=${dir%/*}
|
||
|
topdir=".."
|
||
|
while [ -n "$dir" ]; do
|
||
|
if [ -d "$topdir/.git" ] || [ -d "$topdir/.svn" ] || [ -d "$topdir/.hg" ]; then
|
||
|
break
|
||
|
fi
|
||
|
dir=${dir%/*}
|
||
|
topdir="$topdir/.."
|
||
|
done
|
||
|
if [ ! -d "$topdir/.git" ] && [ ! -d "$topdir/.svn" ] && [ ! -d "$topdir/.hg" ]; then
|
||
|
echo "No Git, SVN, or Hg checkout found." >&2
|
||
|
exit 1
|
||
|
fi
|
||
|
fi
|
||
|
fi
|
||
|
|
||
|
# Handle folding sections in CI logs
|
||
|
start_group() { echo "$1"; }
|
||
|
end_group() { echo; }
|
||
|
|
||
|
# Check for Travis CI
|
||
|
if [ -n "$TRAVIS" ]; then
|
||
|
# Don't run the packager for pull requests
|
||
|
if [ "$TRAVIS_PULL_REQUEST" != "false" ]; then
|
||
|
echo "Not packaging pull request."
|
||
|
exit 0
|
||
|
fi
|
||
|
if [ -z "$TRAVIS_TAG" ]; then
|
||
|
# Don't run the packager if there is a tag pending
|
||
|
check_tag=$( git -C "$topdir" tag --points-at HEAD )
|
||
|
if [ -n "$check_tag" ]; then
|
||
|
echo "Found future tag \"${check_tag}\", not packaging."
|
||
|
exit 0
|
||
|
fi
|
||
|
# Only package master, classic, or develop
|
||
|
if [ "$TRAVIS_BRANCH" != "master" ] && [ "$TRAVIS_BRANCH" != "classic" ] && [ "$TRAVIS_BRANCH" != "develop" ]; then
|
||
|
echo "Not packaging \"${TRAVIS_BRANCH}\"."
|
||
|
exit 0
|
||
|
fi
|
||
|
fi
|
||
|
# https://github.com/travis-ci/travis-build/tree/master/lib/travis/build/bash
|
||
|
start_group() {
|
||
|
echo -en "travis_fold:start:$2\\r\033[0K"
|
||
|
# release_timer_id="$(printf %08x $((RANDOM * RANDOM)))"
|
||
|
# release_timer_start_time="$(date -u +%s%N)"
|
||
|
# echo -en "travis_time:start:${release_timer_id}\\r\033[0K"
|
||
|
echo "$1"
|
||
|
}
|
||
|
end_group() {
|
||
|
# local release_timer_end_time="$(date -u +%s%N)"
|
||
|
# local duration=$((release_timer_end_time - release_timer_start_time))
|
||
|
# echo -en "\\ntravis_time:end:${release_timer_id}:start=${release_timer_start_time},finish=${release_timer_end_time},duration=${duration}\\r\033[0K"
|
||
|
echo -en "travis_fold:end:$1\\r\033[0K"
|
||
|
}
|
||
|
fi
|
||
|
|
||
|
# Check for GitHub Actions
|
||
|
if [ -n "$GITHUB_ACTIONS" ]; then
|
||
|
# Prevent duplicate builds
|
||
|
if [[ "$GITHUB_REF" == "refs/heads"* ]]; then
|
||
|
check_tag=$( git -C "$topdir" tag --points-at HEAD )
|
||
|
if [ -n "$check_tag" ]; then
|
||
|
echo "Found future tag \"${check_tag}\", not packaging."
|
||
|
exit 0
|
||
|
fi
|
||
|
fi
|
||
|
start_group() { echo "##[group]$1"; }
|
||
|
end_group() { echo "##[endgroup]"; }
|
||
|
fi
|
||
|
unset check_tag
|
||
|
|
||
|
# Load secrets
|
||
|
if [ -f "$topdir/.env" ]; then
|
||
|
# shellcheck disable=1090
|
||
|
. "$topdir/.env"
|
||
|
elif [ -f ".env" ]; then
|
||
|
. ".env"
|
||
|
fi
|
||
|
[ -z "$cf_token" ] && cf_token=$CF_API_KEY
|
||
|
[ -z "$github_token" ] && github_token=$GITHUB_OAUTH
|
||
|
[ -z "$wowi_token" ] && wowi_token=$WOWI_API_TOKEN
|
||
|
[ -z "$wago_token" ] && wago_token=$WAGO_API_TOKEN
|
||
|
|
||
|
# Set $releasedir to the directory which will contain the generated addon zipfile.
|
||
|
if [ -z "$releasedir" ]; then
|
||
|
releasedir="$topdir/.release"
|
||
|
fi
|
||
|
|
||
|
# Set $basedir to the basename of the checkout directory.
|
||
|
basedir=$( cd "$topdir" && pwd )
|
||
|
case $basedir in
|
||
|
/*/*)
|
||
|
basedir=${basedir##/*/}
|
||
|
;;
|
||
|
/*)
|
||
|
basedir=${basedir##/}
|
||
|
;;
|
||
|
esac
|
||
|
|
||
|
# Set $repository_type to "git" or "svn" or "hg".
|
||
|
repository_type=
|
||
|
if [ -d "$topdir/.git" ]; then
|
||
|
repository_type=git
|
||
|
elif [ -d "$topdir/.svn" ]; then
|
||
|
repository_type=svn
|
||
|
elif [ -d "$topdir/.hg" ]; then
|
||
|
repository_type=hg
|
||
|
else
|
||
|
echo "No Git, SVN, or Hg checkout found in \"$topdir\"." >&2
|
||
|
exit 1
|
||
|
fi
|
||
|
|
||
|
# $releasedir must be an absolute path or inside $topdir.
|
||
|
case $releasedir in
|
||
|
/*) ;;
|
||
|
$topdir/*) ;;
|
||
|
*)
|
||
|
echo "The release directory \"$releasedir\" must be an absolute path or inside \"$topdir\"." >&2
|
||
|
exit 1
|
||
|
;;
|
||
|
esac
|
||
|
|
||
|
# Create the staging directory.
|
||
|
mkdir -p "$releasedir" 2>/dev/null || {
|
||
|
echo "Unable to create the release directory \"$releasedir\"." >&2
|
||
|
exit 1
|
||
|
}
|
||
|
|
||
|
# Expand $topdir and $releasedir to their absolute paths for string comparisons later.
|
||
|
topdir=$( cd "$topdir" && pwd )
|
||
|
releasedir=$( cd "$releasedir" && pwd )
|
||
|
|
||
|
###
|
||
|
### set_info_<repo> returns the following information:
|
||
|
###
|
||
|
si_repo_type= # "git" or "svn" or "hg"
|
||
|
si_repo_dir= # the checkout directory
|
||
|
si_repo_url= # the checkout url
|
||
|
si_tag= # tag for HEAD
|
||
|
si_previous_tag= # previous tag
|
||
|
si_previous_revision= # [SVN|Hg] revision number for previous tag
|
||
|
|
||
|
si_project_revision= # Turns into the highest revision of the entire project in integer form, e.g. 1234, for SVN. Turns into the commit count for the project's hash for Git.
|
||
|
si_project_hash= # [Git|Hg] Turns into the hash of the entire project in hex form. e.g. 106c634df4b3dd4691bf24e148a23e9af35165ea
|
||
|
si_project_abbreviated_hash= # [Git|Hg] Turns into the abbreviated hash of the entire project in hex form. e.g. 106c63f
|
||
|
si_project_author= # Turns into the last author of the entire project. e.g. ckknight
|
||
|
si_project_date_iso= # Turns into the last changed date (by UTC) of the entire project in ISO 8601. e.g. 2008-05-01T12:34:56Z
|
||
|
si_project_date_integer= # Turns into the last changed date (by UTC) of the entire project in a readable integer fashion. e.g. 2008050123456
|
||
|
si_project_timestamp= # Turns into the last changed date (by UTC) of the entire project in POSIX timestamp. e.g. 1209663296
|
||
|
si_project_version= # Turns into an approximate version of the project. The tag name if on a tag, otherwise it's up to the repo. SVN returns something like "r1234", Git returns something like "v0.1-873fc1"
|
||
|
|
||
|
si_file_revision= # Turns into the current revision of the file in integer form, e.g. 1234, for SVN. Turns into the commit count for the file's hash for Git.
|
||
|
si_file_hash= # Turns into the hash of the file in hex form. e.g. 106c634df4b3dd4691bf24e148a23e9af35165ea
|
||
|
si_file_abbreviated_hash= # Turns into the abbreviated hash of the file in hex form. e.g. 106c63
|
||
|
si_file_author= # Turns into the last author of the file. e.g. ckknight
|
||
|
si_file_date_iso= # Turns into the last changed date (by UTC) of the file in ISO 8601. e.g. 2008-05-01T12:34:56Z
|
||
|
si_file_date_integer= # Turns into the last changed date (by UTC) of the file in a readable integer fashion. e.g. 20080501123456
|
||
|
si_file_timestamp= # Turns into the last changed date (by UTC) of the file in POSIX timestamp. e.g. 1209663296
|
||
|
|
||
|
# SVN date helper function
|
||
|
strtotime() {
|
||
|
local value="$1" # datetime string
|
||
|
local format="$2" # strptime string
|
||
|
if [[ "${OSTYPE,,}" == *"darwin"* ]]; then # bsd
|
||
|
date -j -f "$format" "$value" "+%s" 2>/dev/null
|
||
|
else # gnu
|
||
|
date -d "$value" +%s 2>/dev/null
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
set_info_git() {
|
||
|
si_repo_dir="$1"
|
||
|
si_repo_type="git"
|
||
|
si_repo_url=$( git -C "$si_repo_dir" remote get-url origin 2>/dev/null | sed -e 's/^git@\(.*\):/https:\/\/\1\//' )
|
||
|
if [ -z "$si_repo_url" ]; then # no origin so grab the first fetch url
|
||
|
si_repo_url=$( git -C "$si_repo_dir" remote -v | awk '/(fetch)/ { print $2; exit }' | sed -e 's/^git@\(.*\):/https:\/\/\1\//' )
|
||
|
fi
|
||
|
|
||
|
# Populate filter vars.
|
||
|
si_project_hash=$( git -C "$si_repo_dir" show --no-patch --format="%H" 2>/dev/null )
|
||
|
si_project_abbreviated_hash=$( git -C "$si_repo_dir" show --no-patch --abbrev=7 --format="%h" 2>/dev/null )
|
||
|
si_project_author=$( git -C "$si_repo_dir" show --no-patch --format="%an" 2>/dev/null )
|
||
|
si_project_timestamp=$( git -C "$si_repo_dir" show --no-patch --format="%at" 2>/dev/null )
|
||
|
si_project_date_iso=$( TZ='' printf "%(%Y-%m-%dT%H:%M:%SZ)T" "$si_project_timestamp" )
|
||
|
si_project_date_integer=$( TZ='' printf "%(%Y%m%d%H%M%S)T" "$si_project_timestamp" )
|
||
|
# XXX --depth limits rev-list :\ [ ! -s "$(git rev-parse --git-dir)/shallow" ] || git fetch --unshallow --no-tags
|
||
|
si_project_revision=$( git -C "$si_repo_dir" rev-list --count "$si_project_hash" 2>/dev/null )
|
||
|
|
||
|
# Get the tag for the HEAD.
|
||
|
si_previous_tag=
|
||
|
si_previous_revision=
|
||
|
_si_tag=$( git -C "$si_repo_dir" describe --tags --always --abbrev=7 2>/dev/null )
|
||
|
si_tag=$( git -C "$si_repo_dir" describe --tags --always --abbrev=0 2>/dev/null )
|
||
|
# Set $si_project_version to the version number of HEAD. May be empty if there are no commits.
|
||
|
si_project_version=$si_tag
|
||
|
# The HEAD is not tagged if the HEAD is several commits past the most recent tag.
|
||
|
if [ "$si_tag" = "$si_project_hash" ]; then
|
||
|
# --abbrev=0 expands out the full sha if there was no previous tag
|
||
|
si_project_version=$_si_tag
|
||
|
si_previous_tag=
|
||
|
si_tag=
|
||
|
elif [ "$_si_tag" != "$si_tag" ]; then
|
||
|
# not on a tag
|
||
|
si_project_version=$( git -C "$si_repo_dir" describe --tags --abbrev=7 --exclude="*[Aa][Ll][Pp][Hh][Aa]*" 2>/dev/null )
|
||
|
si_previous_tag=$( git -C "$si_repo_dir" describe --tags --abbrev=0 --exclude="*[Aa][Ll][Pp][Hh][Aa]*" 2>/dev/null )
|
||
|
si_tag=
|
||
|
else # we're on a tag, just jump back one commit
|
||
|
if [[ ${si_tag,,} != *"beta"* && ${si_tag,,} != *"alpha"* ]]; then
|
||
|
# full release, ignore beta tags
|
||
|
si_previous_tag=$( git -C "$si_repo_dir" describe --tags --abbrev=0 --exclude="*[Aa][Ll][Pp][Hh][Aa]*" --exclude="*[Bb][Ee][Tt][Aa]*" HEAD~ 2>/dev/null )
|
||
|
else
|
||
|
si_previous_tag=$( git -C "$si_repo_dir" describe --tags --abbrev=0 --exclude="*[Aa][Ll][Pp][Hh][Aa]*" HEAD~ 2>/dev/null )
|
||
|
fi
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
set_info_svn() {
|
||
|
si_repo_dir="$1"
|
||
|
si_repo_type="svn"
|
||
|
|
||
|
# Temporary file to hold results of "svn info".
|
||
|
_si_svninfo="${si_repo_dir}/.svn/release_sh_svninfo"
|
||
|
svn info -r BASE "$si_repo_dir" 2>/dev/null > "$_si_svninfo"
|
||
|
|
||
|
if [ -s "$_si_svninfo" ]; then
|
||
|
_si_root=$( awk '/^Repository Root:/ { print $3; exit }' < "$_si_svninfo" )
|
||
|
_si_url=$( awk '/^URL:/ { print $2; exit }' < "$_si_svninfo" )
|
||
|
_si_revision=$( awk '/^Last Changed Rev:/ { print $NF; exit }' < "$_si_svninfo" )
|
||
|
si_repo_url=$_si_root
|
||
|
|
||
|
case ${_si_url#${_si_root}/} in
|
||
|
tags/*)
|
||
|
# Extract the tag from the URL.
|
||
|
si_tag=${_si_url#${_si_root}/tags/}
|
||
|
si_tag=${si_tag%%/*}
|
||
|
si_project_revision="$_si_revision"
|
||
|
;;
|
||
|
*)
|
||
|
# Check if the latest tag matches the working copy revision (/trunk checkout instead of /tags)
|
||
|
_si_tag_line=$( svn log --verbose --limit 1 "$_si_root/tags" 2>/dev/null | awk '/^ A/ { print $0; exit }' )
|
||
|
_si_tag=$( echo "$_si_tag_line" | awk '/^ A/ { print $2 }' | awk -F/ '{ print $NF }' )
|
||
|
_si_tag_from_revision=$( echo "$_si_tag_line" | sed -e 's/^.*:\([0-9]\{1,\}\)).*$/\1/' ) # (from /project/trunk:N)
|
||
|
|
||
|
if [ "$_si_tag_from_revision" = "$_si_revision" ]; then
|
||
|
si_tag="$_si_tag"
|
||
|
si_project_revision=$( svn info "$_si_root/tags/$si_tag" 2>/dev/null | awk '/^Last Changed Rev:/ { print $NF; exit }' )
|
||
|
else
|
||
|
# Set $si_project_revision to the highest revision of the project at the checkout path
|
||
|
si_project_revision=$( svn info --recursive "$si_repo_dir" 2>/dev/null | awk '/^Last Changed Rev:/ { print $NF }' | sort -nr | head -n1 )
|
||
|
fi
|
||
|
;;
|
||
|
esac
|
||
|
|
||
|
if [ -n "$si_tag" ]; then
|
||
|
si_project_version="$si_tag"
|
||
|
else
|
||
|
si_project_version="r$si_project_revision"
|
||
|
fi
|
||
|
|
||
|
# Get the previous tag and it's revision
|
||
|
_si_limit=$((si_project_revision - 1))
|
||
|
_si_tag=$( svn log --verbose --limit 1 "$_si_root/tags" -r $_si_limit:1 2>/dev/null | awk '/^ A/ { print $0; exit }' | awk '/^ A/ { print $2 }' | awk -F/ '{ print $NF }' )
|
||
|
if [ -n "$_si_tag" ]; then
|
||
|
si_previous_tag="$_si_tag"
|
||
|
si_previous_revision=$( svn info "$_si_root/tags/$_si_tag" 2>/dev/null | awk '/^Last Changed Rev:/ { print $NF; exit }' )
|
||
|
fi
|
||
|
|
||
|
# Populate filter vars.
|
||
|
si_project_author=$( awk '/^Last Changed Author:/ { print $0; exit }' < "$_si_svninfo" | cut -d" " -f4- )
|
||
|
_si_timestamp=$( awk '/^Last Changed Date:/ { print $4,$5; exit }' < "$_si_svninfo" )
|
||
|
si_project_timestamp=$( strtotime "$_si_timestamp" "%F %T" )
|
||
|
si_project_date_iso=$( TZ='' printf "%(%Y-%m-%dT%H:%M:%SZ)T" "$si_project_timestamp" )
|
||
|
si_project_date_integer=$( TZ='' printf "%(%Y%m%d%H%M%S)T" "$si_project_timestamp" )
|
||
|
# SVN repositories have no project hash.
|
||
|
si_project_hash=
|
||
|
si_project_abbreviated_hash=
|
||
|
|
||
|
rm -f "$_si_svninfo" 2>/dev/null
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
set_info_hg() {
|
||
|
si_repo_dir="$1"
|
||
|
si_repo_type="hg"
|
||
|
si_repo_url=$( hg --cwd "$si_repo_dir" paths -q default )
|
||
|
if [ -z "$si_repo_url" ]; then # no default so grab the first path
|
||
|
si_repo_url=$( hg --cwd "$si_repo_dir" paths | awk '{ print $3; exit }' )
|
||
|
fi
|
||
|
|
||
|
# Populate filter vars.
|
||
|
si_project_hash=$( hg --cwd "$si_repo_dir" log -r . --template '{node}' 2>/dev/null )
|
||
|
si_project_abbreviated_hash=$( hg --cwd "$si_repo_dir" log -r . --template '{node|short}' 2>/dev/null )
|
||
|
si_project_author=$( hg --cwd "$si_repo_dir" log -r . --template '{author}' 2>/dev/null )
|
||
|
si_project_timestamp=$( hg --cwd "$si_repo_dir" log -r . --template '{date}' 2>/dev/null | cut -d. -f1 )
|
||
|
si_project_date_iso=$( TZ='' printf "%(%Y-%m-%dT%H:%M:%SZ)T" "$si_project_timestamp" )
|
||
|
si_project_date_integer=$( TZ='' printf "%(%Y%m%d%H%M%S)T" "$si_project_timestamp" )
|
||
|
si_project_revision=$( hg --cwd "$si_repo_dir" log -r . --template '{rev}' 2>/dev/null )
|
||
|
|
||
|
# Get tag info
|
||
|
si_tag=
|
||
|
# I'm just muddling through revsets, so there is probably a better way to do this
|
||
|
# Ignore tag commits, so v1.0-1 will package as v1.0
|
||
|
if [ "$( hg --cwd "$si_repo_dir" log -r '.-filelog(.hgtags)' --template '{rev}' 2>/dev/null )" == "" ]; then
|
||
|
_si_tip=$( hg --cwd "$si_repo_dir" log -r 'last(parents(.))' --template '{rev}' 2>/dev/null )
|
||
|
else
|
||
|
_si_tip=$( hg --cwd "$si_repo_dir" log -r . --template '{rev}' 2>/dev/null )
|
||
|
fi
|
||
|
si_previous_tag=$( hg --cwd "$si_repo_dir" log -r "$_si_tip" --template '{latesttag}' 2>/dev/null )
|
||
|
# si_project_version=$( hg --cwd "$si_repo_dir" log -r "$_si_tip" --template "{ ifeq(changessincelatesttag, 0, latesttag, '{latesttag}-{changessincelatesttag}-m{node|short}') }" 2>/dev/null ) # git style
|
||
|
si_project_version=$( hg --cwd "$si_repo_dir" log -r "$_si_tip" --template "{ ifeq(changessincelatesttag, 0, latesttag, 'r{rev}') }" 2>/dev/null ) # svn style
|
||
|
if [ "$si_previous_tag" = "$si_project_version" ]; then
|
||
|
# we're on a tag
|
||
|
si_tag=$si_previous_tag
|
||
|
si_previous_tag=$( hg --cwd "$si_repo_dir" log -r "last(parents($_si_tip))" --template '{latesttag}' 2>/dev/null )
|
||
|
fi
|
||
|
si_previous_revision=$( hg --cwd "$si_repo_dir" log -r "$si_previous_tag" --template '{rev}' 2>/dev/null )
|
||
|
}
|
||
|
|
||
|
set_info_file() {
|
||
|
if [ "$si_repo_type" = "git" ]; then
|
||
|
_si_file=${1#si_repo_dir} # need the path relative to the checkout
|
||
|
# Populate filter vars from the last commit the file was included in.
|
||
|
si_file_hash=$( git -C "$si_repo_dir" log --max-count=1 --format="%H" "$_si_file" 2>/dev/null )
|
||
|
si_file_abbreviated_hash=$( git -C "$si_repo_dir" log --max-count=1 --abbrev=7 --format="%h" "$_si_file" 2>/dev/null )
|
||
|
si_file_author=$( git -C "$si_repo_dir" log --max-count=1 --format="%an" "$_si_file" 2>/dev/null )
|
||
|
si_file_timestamp=$( git -C "$si_repo_dir" log --max-count=1 --format="%at" "$_si_file" 2>/dev/null )
|
||
|
si_file_date_iso=$( TZ='' printf "%(%Y-%m-%dT%H:%M:%SZ)T" "$si_file_timestamp" )
|
||
|
si_file_date_integer=$( TZ='' printf "%(%Y%m%d%H%M%S)T" "$si_file_timestamp" )
|
||
|
si_file_revision=$( git -C "$si_repo_dir" rev-list --count "$si_file_hash" 2>/dev/null ) # XXX checkout depth affects rev-list, see set_info_git
|
||
|
elif [ "$si_repo_type" = "svn" ]; then
|
||
|
_si_file="$1"
|
||
|
# Temporary file to hold results of "svn info".
|
||
|
_sif_svninfo="${si_repo_dir}/.svn/release_sh_svnfinfo"
|
||
|
svn info "$_si_file" 2>/dev/null > "$_sif_svninfo"
|
||
|
if [ -s "$_sif_svninfo" ]; then
|
||
|
# Populate filter vars.
|
||
|
si_file_revision=$( awk '/^Last Changed Rev:/ { print $NF; exit }' < "$_sif_svninfo" )
|
||
|
si_file_author=$( awk '/^Last Changed Author:/ { print $0; exit }' < "$_sif_svninfo" | cut -d" " -f4- )
|
||
|
_si_timestamp=$( awk '/^Last Changed Date:/ { print $4,$5,$6; exit }' < "$_sif_svninfo" )
|
||
|
si_file_timestamp=$( strtotime "$_si_timestamp" "%F %T %z" )
|
||
|
si_file_date_iso=$( TZ='' printf "%(%Y-%m-%dT%H:%M:%SZ)T" "$si_file_timestamp" )
|
||
|
si_file_date_integer=$( TZ='' printf "%(%Y%m%d%H%M%S)T" "$si_file_timestamp" )
|
||
|
# SVN repositories have no project hash.
|
||
|
si_file_hash=
|
||
|
si_file_abbreviated_hash=
|
||
|
|
||
|
rm -f "$_sif_svninfo" 2>/dev/null
|
||
|
fi
|
||
|
elif [ "$si_repo_type" = "hg" ]; then
|
||
|
_si_file=${1#si_repo_dir} # need the path relative to the checkout
|
||
|
# Populate filter vars.
|
||
|
si_file_hash=$( hg --cwd "$si_repo_dir" log --limit 1 --template '{node}' "$_si_file" 2>/dev/null )
|
||
|
si_file_abbreviated_hash=$( hg --cwd "$si_repo_dir" log --limit 1 --template '{node|short}' "$_si_file" 2>/dev/null )
|
||
|
si_file_author=$( hg --cwd "$si_repo_dir" log --limit 1 --template '{author}' "$_si_file" 2>/dev/null )
|
||
|
si_file_timestamp=$( hg --cwd "$si_repo_dir" log --limit 1 --template '{date}' "$_si_file" 2>/dev/null | cut -d. -f1 )
|
||
|
si_file_date_iso=$( TZ='' printf "%(%Y-%m-%dT%H:%M:%SZ)T" "$si_file_timestamp" )
|
||
|
si_file_date_integer=$( TZ='' printf "%(%Y%m%d%H%M%S)T" "$si_file_timestamp" )
|
||
|
si_file_revision=$( hg --cwd "$si_repo_dir" log --limit 1 --template '{rev}' "$_si_file" 2>/dev/null )
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
# Set some version info about the project
|
||
|
case $repository_type in
|
||
|
git) set_info_git "$topdir" ;;
|
||
|
svn) set_info_svn "$topdir" ;;
|
||
|
hg) set_info_hg "$topdir" ;;
|
||
|
esac
|
||
|
|
||
|
tag=$si_tag
|
||
|
project_version=$si_project_version
|
||
|
previous_version=$si_previous_tag
|
||
|
project_hash=$si_project_hash
|
||
|
project_revision=$si_project_revision
|
||
|
previous_revision=$si_previous_revision
|
||
|
project_timestamp=$si_project_timestamp
|
||
|
project_github_url=
|
||
|
project_github_slug=
|
||
|
if [[ "$si_repo_url" == "https://github.com"* ]]; then
|
||
|
project_github_url=${si_repo_url%.git}
|
||
|
project_github_slug=${project_github_url#https://github.com/}
|
||
|
fi
|
||
|
project_site=
|
||
|
|
||
|
# Bare carriage-return character.
|
||
|
carriage_return=$( printf "\r" )
|
||
|
|
||
|
# Returns 0 if $1 matches one of the colon-separated patterns in $2.
|
||
|
match_pattern() {
|
||
|
_mp_file=$1
|
||
|
_mp_list="$2:"
|
||
|
while [ -n "$_mp_list" ]; do
|
||
|
_mp_pattern=${_mp_list%%:*}
|
||
|
_mp_list=${_mp_list#*:}
|
||
|
# shellcheck disable=2254
|
||
|
case $_mp_file in
|
||
|
$_mp_pattern)
|
||
|
return 0
|
||
|
;;
|
||
|
esac
|
||
|
done
|
||
|
return 1
|
||
|
}
|
||
|
|
||
|
# Simple .pkgmeta YAML processor.
|
||
|
yaml_keyvalue() {
|
||
|
yaml_key=${1%%:*}
|
||
|
yaml_value=${1#$yaml_key:}
|
||
|
yaml_value=${yaml_value#"${yaml_value%%[! ]*}"} # trim leading whitespace
|
||
|
yaml_value=${yaml_value#[\'\"]} # trim leading quotes
|
||
|
yaml_value=${yaml_value%[\'\"]} # trim trailing quotes
|
||
|
}
|
||
|
|
||
|
yaml_listitem() {
|
||
|
yaml_item=${1#-}
|
||
|
yaml_item=${yaml_item#"${yaml_item%%[! ]*}"} # trim leading whitespace
|
||
|
}
|
||
|
|
||
|
###
|
||
|
### Process .pkgmeta to set variables used later in the script.
|
||
|
###
|
||
|
|
||
|
if [ -z "$pkgmeta_file" ]; then
|
||
|
pkgmeta_file="$topdir/.pkgmeta"
|
||
|
# CurseForge allows this so check for it
|
||
|
if [ ! -f "$pkgmeta_file" ] && [ -f "$topdir/pkgmeta.yaml" ]; then
|
||
|
pkgmeta_file="$topdir/pkgmeta.yaml"
|
||
|
fi
|
||
|
fi
|
||
|
|
||
|
# Variables set via .pkgmeta.
|
||
|
package=
|
||
|
manual_changelog=
|
||
|
changelog=
|
||
|
changelog_markup="text"
|
||
|
enable_nolib_creation=
|
||
|
ignore=
|
||
|
contents=
|
||
|
nolib_exclude=
|
||
|
wowi_gen_changelog="true"
|
||
|
wowi_archive="true"
|
||
|
wowi_convert_changelog="true"
|
||
|
declare -A relations=()
|
||
|
|
||
|
parse_ignore() {
|
||
|
pkgmeta="$1"
|
||
|
[ -f "$pkgmeta" ] || return 1
|
||
|
|
||
|
checkpath="$topdir" # paths are relative to the topdir
|
||
|
copypath=""
|
||
|
if [ "$2" != "" ]; then
|
||
|
checkpath=$( dirname "$pkgmeta" )
|
||
|
copypath="$2/"
|
||
|
fi
|
||
|
|
||
|
yaml_eof=
|
||
|
while [ -z "$yaml_eof" ]; do
|
||
|
IFS='' read -r yaml_line || yaml_eof="true"
|
||
|
# Skip commented out lines.
|
||
|
if [[ $yaml_line =~ ^[[:space:]]*\# ]]; then
|
||
|
continue
|
||
|
fi
|
||
|
# Strip any trailing CR character.
|
||
|
yaml_line=${yaml_line%$carriage_return}
|
||
|
|
||
|
case $yaml_line in
|
||
|
[!\ ]*:*)
|
||
|
# Split $yaml_line into a $yaml_key, $yaml_value pair.
|
||
|
yaml_keyvalue "$yaml_line"
|
||
|
# Set the $pkgmeta_phase for stateful processing.
|
||
|
pkgmeta_phase=$yaml_key
|
||
|
;;
|
||
|
[\ ]*"- "*)
|
||
|
yaml_line=${yaml_line#"${yaml_line%%[! ]*}"} # trim leading whitespace
|
||
|
# Get the YAML list item.
|
||
|
yaml_listitem "$yaml_line"
|
||
|
if [ "$pkgmeta_phase" = "ignore" ]; then
|
||
|
pattern=$yaml_item
|
||
|
if [ -d "$checkpath/$pattern" ]; then
|
||
|
pattern="$copypath$pattern/*"
|
||
|
elif [ ! -f "$checkpath/$pattern" ]; then
|
||
|
# doesn't exist so match both a file and a path
|
||
|
pattern="$copypath$pattern:$copypath$pattern/*"
|
||
|
else
|
||
|
pattern="$copypath$pattern"
|
||
|
fi
|
||
|
if [ -z "$ignore" ]; then
|
||
|
ignore="$pattern"
|
||
|
else
|
||
|
ignore="$ignore:$pattern"
|
||
|
fi
|
||
|
fi
|
||
|
;;
|
||
|
esac
|
||
|
done < "$pkgmeta"
|
||
|
}
|
||
|
|
||
|
if [ -f "$pkgmeta_file" ]; then
|
||
|
if grep -q $'^[ ]*\t\+[[:blank:]]*[[:graph:]]' "$pkgmeta_file"; then
|
||
|
# Try to cut down on some troubleshooting pain.
|
||
|
echo "ERROR! Your pkgmeta file contains a leading tab. Only spaces are allowed for indentation in YAML files." >&2
|
||
|
grep -n $'^[ ]*\t\+[[:blank:]]*[[:graph:]]' "$pkgmeta_file" | sed $'s/\t/^I/g'
|
||
|
exit 1
|
||
|
fi
|
||
|
|
||
|
yaml_eof=
|
||
|
while [ -z "$yaml_eof" ]; do
|
||
|
IFS='' read -r yaml_line || yaml_eof="true"
|
||
|
# Skip commented out lines.
|
||
|
if [[ $yaml_line =~ ^[[:space:]]*\# ]]; then
|
||
|
continue
|
||
|
fi
|
||
|
# Strip any trailing CR character.
|
||
|
yaml_line=${yaml_line%$carriage_return}
|
||
|
|
||
|
case $yaml_line in
|
||
|
[!\ ]*:*)
|
||
|
# Split $yaml_line into a $yaml_key, $yaml_value pair.
|
||
|
yaml_keyvalue "$yaml_line"
|
||
|
# Set the $pkgmeta_phase for stateful processing.
|
||
|
pkgmeta_phase=$yaml_key
|
||
|
|
||
|
case $yaml_key in
|
||
|
enable-nolib-creation)
|
||
|
if [ "$yaml_value" = "yes" ]; then
|
||
|
enable_nolib_creation="true"
|
||
|
fi
|
||
|
;;
|
||
|
manual-changelog)
|
||
|
changelog=$yaml_value
|
||
|
manual_changelog="true"
|
||
|
;;
|
||
|
changelog-title)
|
||
|
project="$yaml_value"
|
||
|
;;
|
||
|
package-as)
|
||
|
package=$yaml_value
|
||
|
;;
|
||
|
wowi-create-changelog)
|
||
|
if [ "$yaml_value" = "no" ]; then
|
||
|
wowi_gen_changelog=
|
||
|
fi
|
||
|
;;
|
||
|
wowi-convert-changelog)
|
||
|
if [ "$yaml_value" = "no" ]; then
|
||
|
wowi_convert_changelog=
|
||
|
fi
|
||
|
;;
|
||
|
wowi-archive-previous)
|
||
|
if [ "$yaml_value" = "no" ]; then
|
||
|
wowi_archive=
|
||
|
fi
|
||
|
;;
|
||
|
esac
|
||
|
;;
|
||
|
" "*)
|
||
|
yaml_line=${yaml_line#"${yaml_line%%[! ]*}"} # trim leading whitespace
|
||
|
case $yaml_line in
|
||
|
"- "*)
|
||
|
# Get the YAML list item.
|
||
|
yaml_listitem "$yaml_line"
|
||
|
case $pkgmeta_phase in
|
||
|
ignore)
|
||
|
pattern=$yaml_item
|
||
|
if [ -d "$topdir/$pattern" ]; then
|
||
|
pattern="$pattern/*"
|
||
|
elif [ ! -f "$topdir/$pattern" ]; then
|
||
|
# doesn't exist so match both a file and a path
|
||
|
pattern="$pattern:$pattern/*"
|
||
|
fi
|
||
|
if [ -z "$ignore" ]; then
|
||
|
ignore="$pattern"
|
||
|
else
|
||
|
ignore="$ignore:$pattern"
|
||
|
fi
|
||
|
;;
|
||
|
tools-used)
|
||
|
relations["$yaml_item"]="tool"
|
||
|
;;
|
||
|
required-dependencies)
|
||
|
relations["$yaml_item"]="requiredDependency"
|
||
|
;;
|
||
|
optional-dependencies)
|
||
|
relations["$yaml_item"]="optionalDependency"
|
||
|
;;
|
||
|
embedded-libraries)
|
||
|
relations["$yaml_item"]="embeddedLibrary"
|
||
|
;;
|
||
|
esac
|
||
|
;;
|
||
|
*:*)
|
||
|
# Split $yaml_line into a $yaml_key, $yaml_value pair.
|
||
|
yaml_keyvalue "$yaml_line"
|
||
|
case $pkgmeta_phase in
|
||
|
manual-changelog)
|
||
|
case $yaml_key in
|
||
|
filename)
|
||
|
changelog=$yaml_value
|
||
|
manual_changelog="true"
|
||
|
;;
|
||
|
markup-type)
|
||
|
if [ "$yaml_value" = "markdown" ] || [ "$yaml_value" = "html" ]; then
|
||
|
changelog_markup=$yaml_value
|
||
|
else
|
||
|
changelog_markup="text"
|
||
|
fi
|
||
|
;;
|
||
|
esac
|
||
|
;;
|
||
|
esac
|
||
|
;;
|
||
|
esac
|
||
|
;;
|
||
|
esac
|
||
|
done < "$pkgmeta_file"
|
||
|
fi
|
||
|
|
||
|
# Add untracked/ignored files to the ignore list
|
||
|
if [ "$repository_type" = "git" ]; then
|
||
|
OLDIFS=$IFS
|
||
|
IFS=$'\n'
|
||
|
for _vcs_ignore in $( git -C "$topdir" ls-files --others --directory ); do
|
||
|
if [ -d "$topdir/$_vcs_ignore" ]; then
|
||
|
_vcs_ignore="$_vcs_ignore*"
|
||
|
fi
|
||
|
if [ -z "$ignore" ]; then
|
||
|
ignore="$_vcs_ignore"
|
||
|
else
|
||
|
ignore="$ignore:$_vcs_ignore"
|
||
|
fi
|
||
|
done
|
||
|
IFS=$OLDIFS
|
||
|
elif [ "$repository_type" = "svn" ]; then
|
||
|
# svn always being difficult.
|
||
|
OLDIFS=$IFS
|
||
|
IFS=$'\n'
|
||
|
for _vcs_ignore in $( cd "$topdir" && svn status --no-ignore --ignore-externals | awk '/^[?IX]/' | cut -c9- | tr '\\' '/' ); do
|
||
|
if [ -d "$topdir/$_vcs_ignore" ]; then
|
||
|
_vcs_ignore="$_vcs_ignore/*"
|
||
|
fi
|
||
|
if [ -z "$ignore" ]; then
|
||
|
ignore="$_vcs_ignore"
|
||
|
else
|
||
|
ignore="$ignore:$_vcs_ignore"
|
||
|
fi
|
||
|
done
|
||
|
IFS=$OLDIFS
|
||
|
elif [ "$repository_type" = "hg" ]; then
|
||
|
_vcs_ignore=$( hg --cwd "$topdir" status --ignored --unknown --no-status --print0 | tr '\0' ':' )
|
||
|
if [ -n "$_vcs_ignore" ]; then
|
||
|
_vcs_ignore=${_vcs_ignore:0:-1}
|
||
|
if [ -z "$ignore" ]; then
|
||
|
ignore="$_vcs_ignore"
|
||
|
else
|
||
|
ignore="$ignore:$_vcs_ignore"
|
||
|
fi
|
||
|
fi
|
||
|
fi
|
||
|
|
||
|
###
|
||
|
### Process TOC file
|
||
|
###
|
||
|
|
||
|
# Set the package name from a TOC file name
|
||
|
if [[ -z "$package" ]]; then
|
||
|
package=$( cd "$topdir" && find *.toc -maxdepth 0 2>/dev/null | head -n1 )
|
||
|
if [[ -z "$package" ]]; then
|
||
|
echo "Could not find an addon TOC file. In another directory? Set 'package-as' in .pkgmeta" >&2
|
||
|
exit 1
|
||
|
fi
|
||
|
package=${package%.toc}
|
||
|
if [[ $package =~ ^(.*)-(Mainline|Classic|BCC)$ ]]; then
|
||
|
package="${BASH_REMATCH[1]}"
|
||
|
fi
|
||
|
fi
|
||
|
|
||
|
toc_path="$package.toc"
|
||
|
|
||
|
# Handle having the main addon in a sub dir
|
||
|
if [[ ! -f "$topdir/$toc_path" && -f "$topdir/$package/$toc_path" ]]; then
|
||
|
toc_path="$package/$toc_path"
|
||
|
fi
|
||
|
|
||
|
if [[ ! -f "$topdir/$toc_path" ]]; then
|
||
|
echo "Could not find an addon TOC file. In another directory? Make sure it matches the 'package-as' in .pkgmeta" >&2
|
||
|
exit 1
|
||
|
fi
|
||
|
|
||
|
# Get the interface version for setting the upload version.
|
||
|
toc_file=$(
|
||
|
# remove bom and cr and apply some non-version toc filters
|
||
|
[ "$file_type" != "alpha" ] && _tf_alpha="true"
|
||
|
sed -e $'1s/^\xEF\xBB\xBF//' -e $'s/\r//g' "$topdir/$toc_path" | toc_filter alpha ${_tf_alpha} | toc_filter debug true
|
||
|
)
|
||
|
root_toc_version=$( awk '/^## Interface:/ { print $NF; exit }' <<< "$toc_file" )
|
||
|
toc_version="$root_toc_version"
|
||
|
if [[ -n "$toc_version" && -z "$game_type" ]]; then
|
||
|
# toc -> game type
|
||
|
case $toc_version in
|
||
|
113*) game_type="classic" ;;
|
||
|
205*) game_type="bcc" ;;
|
||
|
*) game_type="retail"
|
||
|
esac
|
||
|
else
|
||
|
# game type -> toc
|
||
|
game_type_toc_version=$( awk 'tolower($0) ~ /^## interface-'${game_type:-retail}':/ { print $NF; exit }' <<< "$toc_file" )
|
||
|
if [[ -z "$game_type" ]]; then
|
||
|
# default to retail
|
||
|
game_type="retail"
|
||
|
elif [[ -n "$game_type_toc_version" ]]; then
|
||
|
# use the game version value if set
|
||
|
toc_version="$game_type_toc_version"
|
||
|
fi
|
||
|
# Check for other interface lines
|
||
|
if [[ -z "$toc_version" ]] || \
|
||
|
[[ "$game_type" == "classic" && "$toc_version" != "113"* ]] || \
|
||
|
[[ "$game_type" == "bcc" && "$toc_version" != "205"* ]] || \
|
||
|
[[ "$game_type" == "retail" && ("$toc_version" == "113"* || "$toc_version" == "205"*) ]]
|
||
|
then
|
||
|
toc_version="$game_type_toc_version"
|
||
|
if [[ -z "$toc_version" ]]; then
|
||
|
# Check @non-@ blocks
|
||
|
case $game_type in
|
||
|
classic) toc_version=$( sed -n '/@non-[-a-z]*@/,/@end-non-[-a-z]*@/{//b;p}' <<< "$toc_file" | awk '/#[[:blank:]]*## Interface:[[:blank:]]*(113)/ { print $NF; exit }' ) ;;
|
||
|
bcc) toc_version=$( sed -n '/@non-[-a-z]*@/,/@end-non-[-a-z]*@/{//b;p}' <<< "$toc_file" | awk '/#[[:blank:]]*## Interface:[[:blank:]]*(205)/ { print $NF; exit }' ) ;;
|
||
|
esac
|
||
|
# This becomes the actual interface version after string replacements
|
||
|
root_toc_version="$toc_version"
|
||
|
fi
|
||
|
fi
|
||
|
if [[ -z "$toc_version" ]]; then
|
||
|
echo "Addon TOC interface version is not compatible with the game version \"${game_type}\" or was not found." >&2
|
||
|
exit 1
|
||
|
fi
|
||
|
if [[ "${toc_version,,}" == "incompatible" ]]; then
|
||
|
echo "Addon TOC interface version is set as incompatible for game version \"${game_type}\"." >&2
|
||
|
exit 1
|
||
|
fi
|
||
|
fi
|
||
|
if [ -z "$game_version" ]; then
|
||
|
printf -v game_version "%d.%d.%d" ${toc_version:0:1} ${toc_version:1:2} ${toc_version:3:2} 2>/dev/null || {
|
||
|
echo "Addon TOC interface version \"${toc_version}\" is invalid." >&2
|
||
|
exit 1
|
||
|
}
|
||
|
game_versions[$game_type]="$game_version"
|
||
|
fi
|
||
|
|
||
|
# Get the title of the project for using in the changelog.
|
||
|
if [ -z "$project" ]; then
|
||
|
project=$( awk '/^## Title:/ { print $0; exit }' <<< "$toc_file" | sed -e 's/|c[0-9A-Fa-f]\{8\}//g' -e 's/|r//g' -e 's/|T[^|]*|t//g' -e 's/## Title[[:space:]]*:[[:space:]]*\(.*\)/\1/' -e 's/[[:space:]]*$//' )
|
||
|
fi
|
||
|
# Grab CurseForge ID and WoWI ID from the TOC file if not set by the script.
|
||
|
if [ -z "$slug" ]; then
|
||
|
slug=$( awk '/^## X-Curse-Project-ID:/ { print $NF; exit }' <<< "$toc_file" )
|
||
|
fi
|
||
|
if [ -z "$addonid" ]; then
|
||
|
addonid=$( awk '/^## X-WoWI-ID:/ { print $NF; exit }' <<< "$toc_file" )
|
||
|
fi
|
||
|
if [ -z "$wagoid" ]; then
|
||
|
wagoid=$( awk '/^## X-Wago-ID:/ { print $NF; exit }' <<< "$toc_file" )
|
||
|
fi
|
||
|
unset toc_file
|
||
|
|
||
|
# unset project ids if they are set to 0
|
||
|
[ "$slug" = "0" ] && slug=
|
||
|
[ "$addonid" = "0" ] && addonid=
|
||
|
[ "$wagoid" = "0" ] && wagoid=
|
||
|
|
||
|
# Automatic file type detection based on CurseForge rules
|
||
|
# 1) Untagged commits will be marked as an alpha.
|
||
|
# 2) Tagged commits will be marked as a release with the following exceptions:
|
||
|
# - If the tag contains the word "alpha", it will be marked as an alpha file.
|
||
|
# - If instead the tag contains the word "beta", it will be marked as a beta file.
|
||
|
if [ -n "$tag" ]; then
|
||
|
if [[ "${tag,,}" == *"alpha"* ]]; then
|
||
|
file_type="alpha"
|
||
|
elif [[ "${tag,,}" == *"beta"* ]]; then
|
||
|
file_type="beta"
|
||
|
else
|
||
|
file_type="release"
|
||
|
fi
|
||
|
else
|
||
|
file_type="alpha"
|
||
|
fi
|
||
|
|
||
|
echo
|
||
|
echo "Packaging $package"
|
||
|
if [ -n "$project_version" ]; then
|
||
|
echo "Current version: $project_version"
|
||
|
fi
|
||
|
if [ -n "$previous_version" ]; then
|
||
|
echo "Previous version: $previous_version"
|
||
|
fi
|
||
|
(
|
||
|
[ "$game_type" = "retail" ] && retail="retail" || retail="non-retail version-${game_type}"
|
||
|
[ "$file_type" = "alpha" ] && alpha="alpha" || alpha="non-alpha"
|
||
|
echo "Build type: ${retail} ${alpha} non-debug${nolib:+ nolib}"
|
||
|
echo "Game version: ${game_version}"
|
||
|
echo
|
||
|
)
|
||
|
if [[ "$slug" =~ ^[0-9]+$ ]]; then
|
||
|
project_site="https://wow.curseforge.com"
|
||
|
echo "CurseForge ID: $slug${cf_token:+ [token set]}"
|
||
|
fi
|
||
|
if [ -n "$addonid" ]; then
|
||
|
echo "WoWInterface ID: $addonid${wowi_token:+ [token set]}"
|
||
|
fi
|
||
|
if [ -n "$wagoid" ]; then
|
||
|
echo "Wago ID: $wagoid${wago_token:+ [token set]}"
|
||
|
fi
|
||
|
if [ -n "$project_github_slug" ]; then
|
||
|
echo "GitHub: $project_github_slug${github_token:+ [token set]}"
|
||
|
fi
|
||
|
if [ -n "$project_site" ] || [ -n "$addonid" ] || [ -n "$wagoid" ] || [ -n "$project_github_slug" ]; then
|
||
|
echo
|
||
|
fi
|
||
|
echo "Checkout directory: $topdir"
|
||
|
echo "Release directory: $releasedir"
|
||
|
echo
|
||
|
|
||
|
# Set $pkgdir to the path of the package directory inside $releasedir.
|
||
|
pkgdir="$releasedir/$package"
|
||
|
if [ -d "$pkgdir" ] && [ -z "$overwrite" ]; then
|
||
|
#echo "Removing previous package directory: $pkgdir"
|
||
|
rm -fr "$pkgdir"
|
||
|
fi
|
||
|
if [ ! -d "$pkgdir" ]; then
|
||
|
mkdir -p "$pkgdir"
|
||
|
fi
|
||
|
|
||
|
# Set the contents of the addon zipfile.
|
||
|
contents="$package"
|
||
|
|
||
|
###
|
||
|
### Create filters for pass-through processing of files to replace repository keywords.
|
||
|
###
|
||
|
|
||
|
# Filter for simple repository keyword replacement.
|
||
|
vcs_filter() {
|
||
|
sed \
|
||
|
-e "s/@project-revision@/$si_project_revision/g" \
|
||
|
-e "s/@project-hash@/$si_project_hash/g" \
|
||
|
-e "s/@project-abbreviated-hash@/$si_project_abbreviated_hash/g" \
|
||
|
-e "s/@project-author@/$( escape_substr "$si_project_author" )/g" \
|
||
|
-e "s/@project-date-iso@/$si_project_date_iso/g" \
|
||
|
-e "s/@project-date-integer@/$si_project_date_integer/g" \
|
||
|
-e "s/@project-timestamp@/$si_project_timestamp/g" \
|
||
|
-e "s/@project-version@/$( escape_substr "$si_project_version" )/g" \
|
||
|
-e "s/@file-revision@/$si_file_revision/g" \
|
||
|
-e "s/@file-hash@/$si_file_hash/g" \
|
||
|
-e "s/@file-abbreviated-hash@/$si_file_abbreviated_hash/g" \
|
||
|
-e "s/@file-author@/$( escape_substr "$si_file_author" )/g" \
|
||
|
-e "s/@file-date-iso@/$si_file_date_iso/g" \
|
||
|
-e "s/@file-date-integer@/$si_file_date_integer/g" \
|
||
|
-e "s/@file-timestamp@/$si_file_timestamp/g"
|
||
|
}
|
||
|
|
||
|
# Find URL of localization api.
|
||
|
set_localization_url() {
|
||
|
localization_url=
|
||
|
if [ -n "$slug" ] && [ -n "$cf_token" ] && [ -n "$project_site" ]; then
|
||
|
localization_url="${project_site}/api/projects/$slug/localization/export"
|
||
|
fi
|
||
|
if [ -z "$localization_url" ] && find "$topdir" -path '*/.*' -prune -o -name "*.lua" -print0 | xargs -0 grep -q "@localization"; then
|
||
|
echo "Skipping localization! Missing CurseForge API token and/or project id is invalid."
|
||
|
echo
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
# Filter to handle @localization@ repository keyword replacement.
|
||
|
# https://authors.curseforge.com/knowledge-base/projects/531-localization-substitutions/
|
||
|
declare -A unlocalized_values=( ["english"]="ShowPrimary" ["comment"]="ShowPrimaryAsComment" ["blank"]="ShowBlankAsComment" ["ignore"]="Ignore" )
|
||
|
localization_filter() {
|
||
|
_ul_eof=
|
||
|
while [ -z "$_ul_eof" ]; do
|
||
|
IFS='' read -r _ul_line || _ul_eof="true"
|
||
|
# Strip any trailing CR character.
|
||
|
_ul_line=${_ul_line%$carriage_return}
|
||
|
case $_ul_line in
|
||
|
*@localization\(*\)@*)
|
||
|
_ul_lang=
|
||
|
_ul_namespace=
|
||
|
_ul_singlekey=
|
||
|
_ul_tablename="L"
|
||
|
# Get the prefix of the line before the comment.
|
||
|
_ul_prefix=${_ul_line%%@localization(*}
|
||
|
_ul_prefix=${_ul_prefix%%--*}
|
||
|
# Strip everything but the localization parameters.
|
||
|
_ul_params=${_ul_line#*@localization(}
|
||
|
_ul_params=${_ul_params%)@}
|
||
|
# Sanitize the params a bit. (namespaces are restricted to [a-zA-Z0-9_], separated by [./:])
|
||
|
_ul_params=${_ul_params// /}
|
||
|
_ul_params=${_ul_params//,/, }
|
||
|
# Pull the locale language first (mainly for warnings).
|
||
|
_ul_lang="enUS"
|
||
|
if [[ $_ul_params == *"locale=\""* ]]; then
|
||
|
_ul_lang=${_ul_params##*locale=\"}
|
||
|
_ul_lang=${_ul_lang:0:4}
|
||
|
_ul_lang=${_ul_lang%%\"*}
|
||
|
else
|
||
|
echo " Warning! No locale set, using enUS." >&2
|
||
|
fi
|
||
|
# Generate a URL parameter string from the localization parameters.
|
||
|
# https://authors.curseforge.com/knowledge-base/projects/529-api
|
||
|
_ul_url_params=""
|
||
|
set -- ${_ul_params}
|
||
|
for _ul_param; do
|
||
|
_ul_key=${_ul_param%%=*}
|
||
|
_ul_value=${_ul_param#*=}
|
||
|
_ul_value=${_ul_value%,*}
|
||
|
_ul_value=${_ul_value#*\"}
|
||
|
_ul_value=${_ul_value%\"*}
|
||
|
case ${_ul_key} in
|
||
|
escape-non-ascii)
|
||
|
if [ "$_ul_value" = "true" ]; then
|
||
|
_ul_url_params="${_ul_url_params}&escape-non-ascii-characters=true"
|
||
|
fi
|
||
|
;;
|
||
|
format)
|
||
|
if [ "$_ul_value" = "lua_table" ]; then
|
||
|
_ul_url_params="${_ul_url_params}&export-type=Table"
|
||
|
fi
|
||
|
;;
|
||
|
handle-unlocalized)
|
||
|
if [ "$_ul_value" != "english" ] && [ -n "${unlocalized_values[$_ul_value]}" ]; then
|
||
|
_ul_url_params="${_ul_url_params}&unlocalized=${unlocalized_values[$_ul_value]}"
|
||
|
fi
|
||
|
;;
|
||
|
handle-subnamespaces)
|
||
|
if [ "$_ul_value" = "concat" ]; then # concat with /
|
||
|
_ul_url_params="${_ul_url_params}&concatenante-subnamespaces=true"
|
||
|
elif [ "$_ul_value" = "subtable" ]; then
|
||
|
echo " ($_ul_lang) Warning! ${_ul_key}=\"${_ul_value}\" is not supported. Include each full subnamespace, comma delimited." >&2
|
||
|
fi
|
||
|
;;
|
||
|
key)
|
||
|
# _ul_params was stripped of spaces, so reparse the line for the key
|
||
|
_ul_singlekey=${_ul_line#*@localization(}
|
||
|
_ul_singlekey=${_ul_singlekey#*key=\"}
|
||
|
_ul_singlekey=${_ul_singlekey%%\",*}
|
||
|
_ul_singlekey=${_ul_singlekey%%\")@*}
|
||
|
;;
|
||
|
locale)
|
||
|
_ul_lang=$_ul_value
|
||
|
;;
|
||
|
namespace)
|
||
|
# reparse to get all namespaces if multiple
|
||
|
_ul_namespace=${_ul_params##*namespace=\"}
|
||
|
_ul_namespace=${_ul_namespace%%\"*}
|
||
|
_ul_namespace=${_ul_namespace//, /,}
|
||
|
_ul_url_params="${_ul_url_params}&namespaces=${_ul_namespace}"
|
||
|
_ul_namespace="/${_ul_namespace}"
|
||
|
;;
|
||
|
namespace-delimiter)
|
||
|
if [ "$_ul_value" != "/" ]; then
|
||
|
echo " ($_ul_lang) Warning! ${_ul_key}=\"${_ul_value}\" is not supported." >&2
|
||
|
fi
|
||
|
;;
|
||
|
prefix-values)
|
||
|
echo " ($_ul_lang) Warning! \"${_ul_key}\" is not supported." >&2
|
||
|
;;
|
||
|
same-key-is-true)
|
||
|
if [ "$_ul_value" = "true" ]; then
|
||
|
_ul_url_params="${_ul_url_params}&true-if-value-equals-key=true"
|
||
|
fi
|
||
|
;;
|
||
|
table-name)
|
||
|
if [ "$_ul_value" != "L" ]; then
|
||
|
_ul_tablename="$_ul_value"
|
||
|
_ul_url_params="${_ul_url_params}&table-name=${_ul_value}"
|
||
|
fi
|
||
|
;;
|
||
|
esac
|
||
|
done
|
||
|
|
||
|
if [ -z "$_cdt_localization" ] || [ -z "$localization_url" ]; then
|
||
|
echo " Skipping localization (${_ul_lang}${_ul_namespace})" >&2
|
||
|
|
||
|
# If the line isn't a TOC entry, print anything before the keyword.
|
||
|
if [[ $_ul_line != "## "* ]]; then
|
||
|
if [ -n "$_ul_eof" ]; then
|
||
|
echo -n "$_ul_prefix"
|
||
|
else
|
||
|
echo "$_ul_prefix"
|
||
|
fi
|
||
|
fi
|
||
|
else
|
||
|
_ul_url="${localization_url}?lang=${_ul_lang}${_ul_url_params}"
|
||
|
echo " Adding ${_ul_lang}${_ul_namespace}" >&2
|
||
|
|
||
|
if [ -z "$_ul_singlekey" ]; then
|
||
|
# Write text that preceded the substitution.
|
||
|
echo -n "$_ul_prefix"
|
||
|
|
||
|
# Fetch the localization data, but don't output anything if there is an error.
|
||
|
curl -s -H "x-api-token: $cf_token" "${_ul_url}" | awk -v url="$_ul_url" '/^{"error/ { o=" Error! "$0"\n "url; print o >"/dev/stderr"; exit 1 } /<!DOCTYPE/ { print " Error! Invalid output\n "url >"/dev/stderr"; exit 1 } /^'"$_ul_tablename"' = '"$_ul_tablename"' or \{\}/ { next } { print }'
|
||
|
|
||
|
# Insert a trailing blank line to match CF packager.
|
||
|
if [ -z "$_ul_eof" ]; then
|
||
|
echo ""
|
||
|
fi
|
||
|
else
|
||
|
# Parse out a single phrase. This is kind of expensive, but caching would be way too much effort to optimize for what is basically an edge case.
|
||
|
_ul_value=$( curl -s -H "x-api-token: $cf_token" "${_ul_url}" | awk -v url="$_ul_url" '/^{"error/ { o=" Error! "$0"\n "url; print o >"/dev/stderr"; exit 1 } /<!DOCTYPE/ { print " Error! Invalid output\n "url >"/dev/stderr"; exit 1 } { print }' | sed -n '/L\["'"$_ul_singlekey"'"\]/p' | sed 's/^.* = "\(.*\)"/\1/' )
|
||
|
if [ -n "$_ul_value" ] && [ "$_ul_value" != "$_ul_singlekey" ]; then
|
||
|
# The result is different from the base value so print out the line.
|
||
|
echo "${_ul_prefix}${_ul_value}${_ul_line##*)@}"
|
||
|
fi
|
||
|
fi
|
||
|
fi
|
||
|
;;
|
||
|
*)
|
||
|
if [ -n "$_ul_eof" ]; then
|
||
|
echo -n "$_ul_line"
|
||
|
else
|
||
|
echo "$_ul_line"
|
||
|
fi
|
||
|
esac
|
||
|
done
|
||
|
}
|
||
|
|
||
|
lua_filter() {
|
||
|
local level
|
||
|
case $1 in
|
||
|
alpha) level="=" ;;
|
||
|
debug) level="==" ;;
|
||
|
retail|version-*) level="====" ;;
|
||
|
*) level="==="
|
||
|
esac
|
||
|
sed \
|
||
|
-e "s/--@$1@/--[${level}[@$1@/g" \
|
||
|
-e "s/--@end-$1@/--@end-$1@]${level}]/g" \
|
||
|
-e "s/--\[===\[@non-$1@/--@non-$1@/g" \
|
||
|
-e "s/--@end-non-$1@\]===\]/--@end-non-$1@/g"
|
||
|
}
|
||
|
|
||
|
toc_interface_filter() {
|
||
|
# Always remove BOM so ^ works
|
||
|
if [ "$root_toc_version" != "$toc_version" ]; then
|
||
|
# toc version isn't what is set in the toc file
|
||
|
if [ -n "$root_toc_version" ]; then # rewrite
|
||
|
sed -e $'1s/^\xEF\xBB\xBF//' -e 's/^## Interface:.*$/## Interface: '"$toc_version"'/' -e '/^## Interface-/d'
|
||
|
else # add
|
||
|
sed -e $'1s/^\xEF\xBB\xBF//' -e '1i\
|
||
|
## Interface: '"$toc_version" -e '/^## Interface-/d'
|
||
|
fi
|
||
|
else # cleanup
|
||
|
sed -e $'1s/^\xEF\xBB\xBF//' -e '/^## Interface-/d'
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
xml_filter() {
|
||
|
sed \
|
||
|
-e "s/<!--@$1@-->/<!--@$1@/g" \
|
||
|
-e "s/<!--@end-$1@-->/@end-$1@-->/g" \
|
||
|
-e "s/<!--@non-$1@/<!--@non-$1@-->/g" \
|
||
|
-e "s/@end-non-$1@-->/<!--@end-non-$1@-->/g"
|
||
|
}
|
||
|
|
||
|
do_not_package_filter() {
|
||
|
case $1 in
|
||
|
lua) sed '/--@do-not-package@/,/--@end-do-not-package@/d' ;;
|
||
|
toc) sed '/#@do-not-package@/,/#@end-do-not-package@/d' ;;
|
||
|
xml) sed '/<!--@do-not-package@-->/,/<!--@end-do-not-package@-->/d' ;;
|
||
|
esac
|
||
|
}
|
||
|
|
||
|
line_ending_filter() {
|
||
|
local _lef_eof _lef_line
|
||
|
while [ -z "$_lef_eof" ]; do
|
||
|
IFS='' read -r _lef_line || _lef_eof="true"
|
||
|
# Strip any trailing CR character.
|
||
|
_lef_line=${_lef_line%$carriage_return}
|
||
|
if [ -n "$_lef_eof" ]; then
|
||
|
# Preserve EOF not preceded by newlines.
|
||
|
echo -n "$_lef_line"
|
||
|
else
|
||
|
case $line_ending in
|
||
|
dos) printf "%s\r\n" "$_lef_line" ;; # Terminate lines with CR LF.
|
||
|
unix) printf "%s\n" "$_lef_line" ;; # Terminate lines with LF.
|
||
|
esac
|
||
|
fi
|
||
|
done
|
||
|
}
|
||
|
|
||
|
###
|
||
|
### Copy files from the working directory into the package directory.
|
||
|
###
|
||
|
|
||
|
# Copy of the contents of the source directory into the destination directory.
|
||
|
# Dotfiles and any files matching the ignore pattern are skipped. Copied files
|
||
|
# are subject to repository keyword replacement.
|
||
|
#
|
||
|
copy_directory_tree() {
|
||
|
_cdt_alpha=
|
||
|
_cdt_debug=
|
||
|
_cdt_ignored_patterns=
|
||
|
_cdt_localization=
|
||
|
_cdt_nolib=
|
||
|
_cdt_do_not_package=
|
||
|
_cdt_unchanged_patterns=
|
||
|
_cdt_classic=
|
||
|
_cdt_external=
|
||
|
OPTIND=1
|
||
|
while getopts :adi:lnpu:c:e _cdt_opt "$@"; do
|
||
|
# shellcheck disable=2220
|
||
|
case $_cdt_opt in
|
||
|
a) _cdt_alpha="true" ;;
|
||
|
d) _cdt_debug="true" ;;
|
||
|
i) _cdt_ignored_patterns=$OPTARG ;;
|
||
|
l) _cdt_localization="true"
|
||
|
set_localization_url
|
||
|
;;
|
||
|
n) _cdt_nolib="true" ;;
|
||
|
p) _cdt_do_not_package="true" ;;
|
||
|
u) _cdt_unchanged_patterns=$OPTARG ;;
|
||
|
c) _cdt_classic=$OPTARG ;;
|
||
|
e) _cdt_external="true" ;;
|
||
|
esac
|
||
|
done
|
||
|
shift $((OPTIND - 1))
|
||
|
_cdt_srcdir=$1
|
||
|
_cdt_destdir=$2
|
||
|
|
||
|
if [ -z "$_cdt_external" ]; then
|
||
|
start_group "Copying files into ${_cdt_destdir#$topdir/}:" "copy"
|
||
|
else # don't nest groups
|
||
|
echo "Copying files into ${_cdt_destdir#$topdir/}:"
|
||
|
fi
|
||
|
if [ ! -d "$_cdt_destdir" ]; then
|
||
|
mkdir -p "$_cdt_destdir"
|
||
|
fi
|
||
|
# Create a "find" command to list all of the files in the current directory, minus any ones we need to prune.
|
||
|
_cdt_find_cmd="find ."
|
||
|
# Prune everything that begins with a dot except for the current directory ".".
|
||
|
_cdt_find_cmd+=" \( -name \".*\" -a \! -name \".\" \) -prune"
|
||
|
# Prune the destination directory if it is a subdirectory of the source directory.
|
||
|
_cdt_dest_subdir=${_cdt_destdir#${_cdt_srcdir}/}
|
||
|
case $_cdt_dest_subdir in
|
||
|
/*) ;;
|
||
|
*) _cdt_find_cmd+=" -o -path \"./$_cdt_dest_subdir\" -prune" ;;
|
||
|
esac
|
||
|
# Print the filename, but suppress the current directory ".".
|
||
|
_cdt_find_cmd+=" -o \! -name \".\" -print"
|
||
|
( cd "$_cdt_srcdir" && eval "$_cdt_find_cmd" ) | while read -r file; do
|
||
|
file=${file#./}
|
||
|
if [ -f "$_cdt_srcdir/$file" ]; then
|
||
|
# Check if the file should be ignored.
|
||
|
skip_copy=
|
||
|
# Prefix external files with the relative pkgdir path
|
||
|
_cdt_check_file=$file
|
||
|
if [ -n "${_cdt_destdir#$pkgdir}" ]; then
|
||
|
_cdt_check_file="${_cdt_destdir#$pkgdir/}/$file"
|
||
|
fi
|
||
|
# Skip files matching the colon-separated "ignored" shell wildcard patterns.
|
||
|
if [ -z "$skip_copy" ] && match_pattern "$_cdt_check_file" "$_cdt_ignored_patterns"; then
|
||
|
skip_copy="true"
|
||
|
fi
|
||
|
# Never skip files that match the colon-separated "unchanged" shell wildcard patterns.
|
||
|
unchanged=
|
||
|
if [ -n "$skip_copy" ] && match_pattern "$file" "$_cdt_unchanged_patterns"; then
|
||
|
skip_copy=
|
||
|
unchanged="true"
|
||
|
fi
|
||
|
# Copy unskipped files into $_cdt_destdir.
|
||
|
if [ -n "$skip_copy" ]; then
|
||
|
echo " Ignoring: $file"
|
||
|
else
|
||
|
dir=${file%/*}
|
||
|
if [ "$dir" != "$file" ]; then
|
||
|
mkdir -p "$_cdt_destdir/$dir"
|
||
|
fi
|
||
|
# Check if the file matches a pattern for keyword replacement.
|
||
|
if [ -n "$unchanged" ] || ! match_pattern "$file" "*.lua:*.md:*.toc:*.txt:*.xml"; then
|
||
|
echo " Copying: $file (unchanged)"
|
||
|
cp "$_cdt_srcdir/$file" "$_cdt_destdir/$dir"
|
||
|
else
|
||
|
# Set the filters for replacement based on file extension.
|
||
|
_cdt_filters="vcs_filter"
|
||
|
case $file in
|
||
|
*.lua)
|
||
|
[ -n "$_cdt_do_not_package" ] && _cdt_filters+="|do_not_package_filter lua"
|
||
|
[ -n "$_cdt_alpha" ] && _cdt_filters+="|lua_filter alpha"
|
||
|
[ -n "$_cdt_debug" ] && _cdt_filters+="|lua_filter debug"
|
||
|
if [ -n "$_cdt_classic" ]; then
|
||
|
_cdt_filters+="|lua_filter retail"
|
||
|
_cdt_filters+="|lua_filter version-retail"
|
||
|
[ "$_cdt_classic" = "classic" ] && _cdt_filters+="|lua_filter version-bcc"
|
||
|
[ "$_cdt_classic" = "bcc" ] && _cdt_filters+="|lua_filter version-classic"
|
||
|
else
|
||
|
_cdt_filters+="|lua_filter version-classic"
|
||
|
_cdt_filters+="|lua_filter version-bcc"
|
||
|
fi
|
||
|
[ -n "$_cdt_localization" ] && _cdt_filters+="|localization_filter"
|
||
|
;;
|
||
|
*.xml)
|
||
|
[ -n "$_cdt_do_not_package" ] && _cdt_filters+="|do_not_package_filter xml"
|
||
|
[ -n "$_cdt_nolib" ] && _cdt_filters+="|xml_filter no-lib-strip"
|
||
|
[ -n "$_cdt_alpha" ] && _cdt_filters+="|xml_filter alpha"
|
||
|
[ -n "$_cdt_debug" ] && _cdt_filters+="|xml_filter debug"
|
||
|
if [ -n "$_cdt_classic" ]; then
|
||
|
_cdt_filters+="|xml_filter retail"
|
||
|
_cdt_filters+="|xml_filter version-retail"
|
||
|
[ "$_cdt_classic" = "classic" ] && _cdt_filters+="|xml_filter version-bcc"
|
||
|
[ "$_cdt_classic" = "bcc" ] && _cdt_filters+="|xml_filter version-classic"
|
||
|
else
|
||
|
_cdt_filters+="|xml_filter version-classic"
|
||
|
_cdt_filters+="|xml_filter version-bcc"
|
||
|
fi
|
||
|
;;
|
||
|
*.toc)
|
||
|
_cdt_filters+="|do_not_package_filter toc"
|
||
|
[ -n "$_cdt_nolib" ] && _cdt_filters+="|toc_filter no-lib-strip true" # leave the tokens in the file normally
|
||
|
_cdt_filters+="|toc_filter alpha ${_cdt_alpha}"
|
||
|
_cdt_filters+="|toc_filter debug ${_cdt_debug}"
|
||
|
_cdt_filters+="|toc_filter retail ${_cdt_classic:+true}"
|
||
|
_cdt_filters+="|toc_filter version-retail ${_cdt_classic:+true}"
|
||
|
_cdt_filters+="|toc_filter version-classic $([[ -z "$_cdt_classic" || "$_cdt_classic" == "bcc" ]] && echo "true")"
|
||
|
_cdt_filters+="|toc_filter version-bcc $([[ -z "$_cdt_classic" || "$_cdt_classic" == "classic" ]] && echo "true")"
|
||
|
[[ -z "$_cdt_external" && ! $file =~ -(Mainline|Classic|BCC).toc$ ]] && _cdt_filters+="|toc_interface_filter"
|
||
|
[ -n "$_cdt_localization" ] && _cdt_filters+="|localization_filter"
|
||
|
;;
|
||
|
esac
|
||
|
|
||
|
# Set the filter for normalizing line endings.
|
||
|
_cdt_filters+="|line_ending_filter"
|
||
|
|
||
|
# Set version control values for the file.
|
||
|
set_info_file "$_cdt_srcdir/$file"
|
||
|
|
||
|
echo " Copying: $file"
|
||
|
eval < "$_cdt_srcdir/$file" "$_cdt_filters" > "$_cdt_destdir/$file"
|
||
|
fi
|
||
|
fi
|
||
|
fi
|
||
|
done
|
||
|
if [ -z "$_external_dir" ]; then
|
||
|
end_group "copy"
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
if [ -z "$skip_copying" ]; then
|
||
|
cdt_args="-dp"
|
||
|
[ "$file_type" != "alpha" ] && cdt_args+="a"
|
||
|
[ -z "$skip_localization" ] && cdt_args+="l"
|
||
|
[ -n "$nolib" ] && cdt_args+="n"
|
||
|
[ "$game_type" != "retail" ] && cdt_args+=" -c $game_type"
|
||
|
[ -n "$ignore" ] && cdt_args+=" -i \"$ignore\""
|
||
|
[ -n "$changelog" ] && cdt_args+=" -u \"$changelog\""
|
||
|
eval copy_directory_tree "$cdt_args" "\"$topdir\"" "\"$pkgdir\""
|
||
|
fi
|
||
|
|
||
|
# Reset ignore and parse pkgmeta ignores again to handle ignoring external paths
|
||
|
ignore=
|
||
|
parse_ignore "$pkgmeta_file"
|
||
|
|
||
|
###
|
||
|
### Process .pkgmeta again to perform any pre-move-folders actions.
|
||
|
###
|
||
|
|
||
|
retry() {
|
||
|
local result=0
|
||
|
local count=1
|
||
|
while [[ "$count" -le 3 ]]; do
|
||
|
[[ "$result" -ne 0 ]] && {
|
||
|
echo -e "\033[01;31mRetrying (${count}/3)\033[0m" >&2
|
||
|
}
|
||
|
"$@" && { result=0 && break; } || result="$?"
|
||
|
count="$((count + 1))"
|
||
|
sleep 3
|
||
|
done
|
||
|
return "$result"
|
||
|
}
|
||
|
|
||
|
# Checkout the external into a ".checkout" subdirectory of the final directory.
|
||
|
checkout_external() {
|
||
|
_external_dir=$1
|
||
|
_external_uri=$2
|
||
|
_external_tag=$3
|
||
|
_external_type=$4
|
||
|
# shellcheck disable=2034
|
||
|
_external_slug=$5 # unused until we can easily fetch the project id
|
||
|
_external_checkout_type=$6
|
||
|
|
||
|
_cqe_checkout_dir="$pkgdir/$_external_dir/.checkout"
|
||
|
mkdir -p "$_cqe_checkout_dir"
|
||
|
if [ "$_external_type" = "git" ]; then
|
||
|
if [ -z "$_external_tag" ]; then
|
||
|
echo "Fetching latest version of external $_external_uri"
|
||
|
retry git clone -q --depth 1 "$_external_uri" "$_cqe_checkout_dir" || return 1
|
||
|
elif [ "$_external_tag" != "latest" ]; then
|
||
|
echo "Fetching $_external_checkout_type \"$_external_tag\" from external $_external_uri"
|
||
|
if [ "$_external_checkout_type" = "commit" ]; then
|
||
|
retry git clone -q "$_external_uri" "$_cqe_checkout_dir" || return 1
|
||
|
git -C "$_cqe_checkout_dir" checkout -q "$_external_tag" || return 1
|
||
|
else
|
||
|
git -c advice.detachedHead=false clone -q --depth 1 --branch "$_external_tag" "$_external_uri" "$_cqe_checkout_dir" || return 1
|
||
|
fi
|
||
|
else # [ "$_external_tag" = "latest" ]; then
|
||
|
retry git clone -q --depth 50 "$_external_uri" "$_cqe_checkout_dir" || return 1
|
||
|
_external_tag=$( git -C "$_cqe_checkout_dir" for-each-ref refs/tags --sort=-creatordate --format=%\(refname:short\) --count=1 )
|
||
|
if [ -n "$_external_tag" ]; then
|
||
|
echo "Fetching tag \"$_external_tag\" from external $_external_uri"
|
||
|
git -C "$_cqe_checkout_dir" checkout -q "$_external_tag" || return 1
|
||
|
else
|
||
|
echo "Fetching latest version of external $_external_uri"
|
||
|
fi
|
||
|
fi
|
||
|
|
||
|
# pull submodules
|
||
|
git -C "$_cqe_checkout_dir" submodule -q update --init --recursive || return 1
|
||
|
|
||
|
set_info_git "$_cqe_checkout_dir"
|
||
|
echo "Checked out $( git -C "$_cqe_checkout_dir" describe --always --tags --abbrev=7 --long )" #$si_project_abbreviated_hash
|
||
|
elif [ "$_external_type" = "svn" ]; then
|
||
|
if [[ $external_uri == *"/trunk" ]]; then
|
||
|
_cqe_svn_trunk_url=$_external_uri
|
||
|
_cqe_svn_subdir=
|
||
|
else
|
||
|
_cqe_svn_trunk_url="${_external_uri%/trunk/*}/trunk"
|
||
|
_cqe_svn_subdir=${_external_uri#${_cqe_svn_trunk_url}/}
|
||
|
fi
|
||
|
|
||
|
if [ -z "$_external_tag" ]; then
|
||
|
echo "Fetching latest version of external $_external_uri"
|
||
|
retry svn checkout -q "$_external_uri" "$_cqe_checkout_dir" || return 1
|
||
|
else
|
||
|
_cqe_svn_tag_url="${_cqe_svn_trunk_url%/trunk}/tags"
|
||
|
if [ "$_external_tag" = "latest" ]; then
|
||
|
_external_tag=$( svn log --verbose --limit 1 "$_cqe_svn_tag_url" 2>/dev/null | awk '/^ A \/tags\// { print $2; exit }' | awk -F/ '{ print $3 }' )
|
||
|
if [ -z "$_external_tag" ]; then
|
||
|
_external_tag="latest"
|
||
|
fi
|
||
|
fi
|
||
|
if [ "$_external_tag" = "latest" ]; then
|
||
|
echo "No tags found in $_cqe_svn_tag_url"
|
||
|
echo "Fetching latest version of external $_external_uri"
|
||
|
retry svn checkout -q "$_external_uri" "$_cqe_checkout_dir" || return 1
|
||
|
else
|
||
|
_cqe_external_uri="${_cqe_svn_tag_url}/$_external_tag"
|
||
|
if [ -n "$_cqe_svn_subdir" ]; then
|
||
|
_cqe_external_uri="${_cqe_external_uri}/$_cqe_svn_subdir"
|
||
|
fi
|
||
|
echo "Fetching tag \"$_external_tag\" from external $_cqe_external_uri"
|
||
|
retry svn checkout -q "$_cqe_external_uri" "$_cqe_checkout_dir" || return 1
|
||
|
fi
|
||
|
fi
|
||
|
set_info_svn "$_cqe_checkout_dir"
|
||
|
echo "Checked out r$si_project_revision"
|
||
|
elif [ "$_external_type" = "hg" ]; then
|
||
|
if [ -z "$_external_tag" ]; then
|
||
|
echo "Fetching latest version of external $_external_uri"
|
||
|
retry hg clone -q "$_external_uri" "$_cqe_checkout_dir" || return 1
|
||
|
elif [ "$_external_tag" != "latest" ]; then
|
||
|
echo "Fetching $_external_checkout_type \"$_external_tag\" from external $_external_uri"
|
||
|
retry hg clone -q --updaterev "$_external_tag" "$_external_uri" "$_cqe_checkout_dir" || return 1
|
||
|
else # [ "$_external_tag" = "latest" ]; then
|
||
|
retry hg clone -q "$_external_uri" "$_cqe_checkout_dir" || return 1
|
||
|
_external_tag=$( hg --cwd "$_cqe_checkout_dir" log -r . --template '{latesttag}' )
|
||
|
if [ -n "$_external_tag" ]; then
|
||
|
echo "Fetching tag \"$_external_tag\" from external $_external_uri"
|
||
|
hg --cwd "$_cqe_checkout_dir" update -q "$_external_tag"
|
||
|
else
|
||
|
echo "Fetching latest version of external $_external_uri"
|
||
|
fi
|
||
|
fi
|
||
|
set_info_hg "$_cqe_checkout_dir"
|
||
|
echo "Checked out r$si_project_revision"
|
||
|
else
|
||
|
echo "Unknown external: $_external_uri" >&2
|
||
|
return 1
|
||
|
fi
|
||
|
# Copy the checkout into the proper external directory.
|
||
|
(
|
||
|
cd "$_cqe_checkout_dir" || return 1
|
||
|
# Set the slug for external localization, if needed.
|
||
|
# Note: We don't actually do localization since we need the project id and
|
||
|
# the only way to convert slug->id would be to scrape the project page :\
|
||
|
slug= #$_external_slug
|
||
|
project_site=
|
||
|
if [[ "$_external_uri" == *"wowace.com"* || "$_external_uri" == *"curseforge.com"* ]]; then
|
||
|
project_site="https://wow.curseforge.com"
|
||
|
fi
|
||
|
# If a .pkgmeta file is present, process it for an "ignore" list.
|
||
|
parse_ignore "$_cqe_checkout_dir/.pkgmeta" "$_external_dir"
|
||
|
copy_directory_tree -dnpe -i "$ignore" "$_cqe_checkout_dir" "$pkgdir/$_external_dir"
|
||
|
)
|
||
|
# Remove the ".checkout" subdirectory containing the full checkout.
|
||
|
if [ -d "$_cqe_checkout_dir" ]; then
|
||
|
rm -fr "$_cqe_checkout_dir"
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
external_pids=()
|
||
|
|
||
|
external_dir=
|
||
|
external_uri=
|
||
|
external_tag=
|
||
|
external_type=
|
||
|
external_slug=
|
||
|
external_checkout_type=
|
||
|
process_external() {
|
||
|
if [ -n "$external_dir" ] && [ -n "$external_uri" ] && [ -z "$skip_externals" ]; then
|
||
|
# convert old curse repo urls
|
||
|
case $external_uri in
|
||
|
*git.curseforge.com*|*git.wowace.com*)
|
||
|
external_type="git"
|
||
|
# git://git.curseforge.com/wow/$slug/mainline.git -> https://repos.curseforge.com/wow/$slug
|
||
|
external_uri=${external_uri%/mainline.git}
|
||
|
external_uri="https://repos${external_uri#*://git}"
|
||
|
;;
|
||
|
*svn.curseforge.com*|*svn.wowace.com*)
|
||
|
external_type="svn"
|
||
|
# svn://svn.curseforge.com/wow/$slug/mainline/trunk -> https://repos.curseforge.com/wow/$slug/trunk
|
||
|
external_uri=${external_uri/\/mainline/}
|
||
|
external_uri="https://repos${external_uri#*://svn}"
|
||
|
;;
|
||
|
*hg.curseforge.com*|*hg.wowace.com*)
|
||
|
external_type="hg"
|
||
|
# http://hg.curseforge.com/wow/$slug/mainline -> https://repos.curseforge.com/wow/$slug
|
||
|
external_uri=${external_uri%/mainline}
|
||
|
external_uri="https://repos${external_uri#*://hg}"
|
||
|
;;
|
||
|
svn:*)
|
||
|
# just in case
|
||
|
external_type="svn"
|
||
|
;;
|
||
|
*)
|
||
|
if [ -z "$external_type" ]; then
|
||
|
external_type="git"
|
||
|
fi
|
||
|
;;
|
||
|
esac
|
||
|
|
||
|
if [[ $external_uri == "https://repos.curseforge.com/wow/"* || $external_uri == "https://repos.wowace.com/wow/"* ]]; then
|
||
|
if [ -z "$external_slug" ]; then
|
||
|
external_slug=${external_uri#*/wow/}
|
||
|
external_slug=${external_slug%%/*}
|
||
|
fi
|
||
|
|
||
|
# check if the repo is svn
|
||
|
_svn_path=${external_uri#*/wow/$external_slug/}
|
||
|
if [[ "$_svn_path" == "trunk"* ]]; then
|
||
|
external_type="svn"
|
||
|
elif [[ "$_svn_path" == "tags/"* ]]; then
|
||
|
external_type="svn"
|
||
|
# change the tag path into the trunk path and use the tag var so it gets logged as a tag
|
||
|
external_tag=${_svn_path#tags/}
|
||
|
external_tag=${external_tag%%/*}
|
||
|
external_uri="${external_uri%/tags*}/trunk${_svn_path#tags/$external_tag}"
|
||
|
fi
|
||
|
fi
|
||
|
|
||
|
if [ -n "$external_slug" ]; then
|
||
|
relations["$external_slug"]="embeddedLibrary"
|
||
|
fi
|
||
|
|
||
|
echo "Fetching external: $external_dir"
|
||
|
checkout_external "$external_dir" "$external_uri" "$external_tag" "$external_type" "$external_slug" "$external_checkout_type" &> "$releasedir/.$BASHPID.externalout" &
|
||
|
external_pids+=($!)
|
||
|
fi
|
||
|
external_dir=
|
||
|
external_uri=
|
||
|
external_tag=
|
||
|
external_type=
|
||
|
external_slug=
|
||
|
external_checkout_type=
|
||
|
}
|
||
|
|
||
|
# Don't leave extra files around if exited early
|
||
|
kill_externals() {
|
||
|
rm -f "$releasedir"/.*.externalout
|
||
|
kill 0
|
||
|
}
|
||
|
trap kill_externals INT
|
||
|
|
||
|
if [ -z "$skip_externals" ] && [ -f "$pkgmeta_file" ]; then
|
||
|
yaml_eof=
|
||
|
while [ -z "$yaml_eof" ]; do
|
||
|
IFS='' read -r yaml_line || yaml_eof="true"
|
||
|
# Skip commented out lines.
|
||
|
if [[ $yaml_line =~ ^[[:space:]]*\# ]]; then
|
||
|
continue
|
||
|
fi
|
||
|
# Strip any trailing CR character.
|
||
|
yaml_line=${yaml_line%$carriage_return}
|
||
|
|
||
|
case $yaml_line in
|
||
|
[!\ ]*:*)
|
||
|
# Started a new section, so checkout any queued externals.
|
||
|
process_external
|
||
|
# Split $yaml_line into a $yaml_key, $yaml_value pair.
|
||
|
yaml_keyvalue "$yaml_line"
|
||
|
# Set the $pkgmeta_phase for stateful processing.
|
||
|
pkgmeta_phase=$yaml_key
|
||
|
;;
|
||
|
" "*)
|
||
|
yaml_line=${yaml_line#"${yaml_line%%[! ]*}"} # trim leading whitespace
|
||
|
case $yaml_line in
|
||
|
"- "*)
|
||
|
;;
|
||
|
*:*)
|
||
|
# Split $yaml_line into a $yaml_key, $yaml_value pair.
|
||
|
yaml_keyvalue "$yaml_line"
|
||
|
case $pkgmeta_phase in
|
||
|
externals)
|
||
|
case $yaml_key in
|
||
|
url) external_uri=$yaml_value ;;
|
||
|
tag)
|
||
|
external_tag=$yaml_value
|
||
|
external_checkout_type=$yaml_key
|
||
|
;;
|
||
|
branch)
|
||
|
external_tag=$yaml_value
|
||
|
external_checkout_type=$yaml_key
|
||
|
;;
|
||
|
commit)
|
||
|
external_tag=$yaml_value
|
||
|
external_checkout_type=$yaml_key
|
||
|
;;
|
||
|
type) external_type=$yaml_value ;;
|
||
|
curse-slug) external_slug=$yaml_value ;;
|
||
|
*)
|
||
|
# Started a new external, so checkout any queued externals.
|
||
|
process_external
|
||
|
|
||
|
external_dir=$yaml_key
|
||
|
nolib_exclude="$nolib_exclude $pkgdir/$external_dir/*"
|
||
|
if [ -n "$yaml_value" ]; then
|
||
|
external_uri=$yaml_value
|
||
|
# Immediately checkout this fully-specified external.
|
||
|
process_external
|
||
|
fi
|
||
|
;;
|
||
|
esac
|
||
|
;;
|
||
|
esac
|
||
|
;;
|
||
|
esac
|
||
|
;;
|
||
|
esac
|
||
|
done < "$pkgmeta_file"
|
||
|
# Reached end of file, so checkout any remaining queued externals.
|
||
|
process_external
|
||
|
|
||
|
if [ ${#external_pids[*]} -gt 0 ]; then
|
||
|
echo
|
||
|
echo "Waiting for externals to finish..."
|
||
|
echo
|
||
|
|
||
|
while [ ${#external_pids[*]} -gt 0 ]; do
|
||
|
wait -n
|
||
|
for i in ${!external_pids[*]}; do
|
||
|
pid=${external_pids[i]}
|
||
|
if ! kill -0 $pid 2>/dev/null; then
|
||
|
_external_output="$releasedir/.$pid.externalout"
|
||
|
if ! wait $pid; then
|
||
|
_external_error=1
|
||
|
# wrap each line with a bright red color code
|
||
|
awk '{ printf "\033[01;31m%s\033[0m\n", $0 }' "$_external_output"
|
||
|
echo
|
||
|
else
|
||
|
start_group "$( head -n1 "$_external_output" )" "external.$pid"
|
||
|
tail -n+2 "$_external_output"
|
||
|
end_group "external.$pid"
|
||
|
fi
|
||
|
rm -f "$_external_output" 2>/dev/null
|
||
|
unset 'external_pids[i]'
|
||
|
fi
|
||
|
done
|
||
|
done
|
||
|
|
||
|
if [ -n "$_external_error" ]; then
|
||
|
echo
|
||
|
echo "There was an error fetching externals :(" >&2
|
||
|
exit 1
|
||
|
fi
|
||
|
fi
|
||
|
fi
|
||
|
|
||
|
# Restore the signal handlers
|
||
|
trap - INT
|
||
|
|
||
|
###
|
||
|
### Create the changelog of commits since the previous release tag.
|
||
|
###
|
||
|
|
||
|
if [ -z "$project" ]; then
|
||
|
project="$package"
|
||
|
fi
|
||
|
|
||
|
# Create a changelog in the package directory if the source directory does
|
||
|
# not contain a manual changelog.
|
||
|
if [ -n "$manual_changelog" ] && [ -f "$topdir/$changelog" ]; then
|
||
|
start_group "Using manual changelog at $changelog" "changelog"
|
||
|
head -n7 "$topdir/$changelog"
|
||
|
[ "$( wc -l < "$topdir/$changelog" )" -gt 7 ] && echo "..."
|
||
|
end_group "changelog"
|
||
|
|
||
|
# Convert Markdown to BBCode (with HTML as an intermediary) for sending to WoWInterface
|
||
|
# Requires pandoc (http://pandoc.org/)
|
||
|
if [ "$changelog_markup" = "markdown" ] && [ -n "$wowi_convert_changelog" ] && hash pandoc &>/dev/null; then
|
||
|
wowi_changelog="$releasedir/WOWI-$project_version-CHANGELOG.txt"
|
||
|
pandoc -f commonmark -t html "$topdir/$changelog" | sed \
|
||
|
-e 's/<\(\/\)\?\(b\|i\|u\)>/[\1\2]/g' \
|
||
|
-e 's/<\(\/\)\?em>/[\1i]/g' \
|
||
|
-e 's/<\(\/\)\?strong>/[\1b]/g' \
|
||
|
-e 's/<ul[^>]*>/[list]/g' -e 's/<ol[^>]*>/[list="1"]/g' \
|
||
|
-e 's/<\/[ou]l>/[\/list]\n/g' \
|
||
|
-e 's/<li><p>/[*]/g' -e 's/<li>/[*]/g' -e 's/<\/p><\/li>//g' -e 's/<\/li>//g' \
|
||
|
-e 's/\[\*\]\[ \] /[*]☐ /g' -e 's/\[\*\]\[[xX]\] /[*]☒ /g' \
|
||
|
-e 's/<h1[^>]*>/[size="6"]/g' -e 's/<h2[^>]*>/[size="5"]/g' -e 's/<h3[^>]*>/[size="4"]/g' \
|
||
|
-e 's/<h4[^>]*>/[size="3"]/g' -e 's/<h5[^>]*>/[size="3"]/g' -e 's/<h6[^>]*>/[size="3"]/g' \
|
||
|
-e 's/<\/h[1-6]>/[\/size]\n/g' \
|
||
|
-e 's/<blockquote>/[quote]/g' -e 's/<\/blockquote>/[\/quote]\n/g' \
|
||
|
-e 's/<div class="sourceCode"[^>]*><pre class="sourceCode lua"><code class="sourceCode lua">/[highlight="lua"]/g' -e 's/<\/code><\/pre><\/div>/[\/highlight]\n/g' \
|
||
|
-e 's/<pre><code>/[code]/g' -e 's/<\/code><\/pre>/[\/code]\n/g' \
|
||
|
-e 's/<code>/[font="monospace"]/g' -e 's/<\/code>/[\/font]/g' \
|
||
|
-e 's/<a href=\"\([^"]\+\)\"[^>]*>/[url="\1"]/g' -e 's/<\/a>/\[\/url]/g' \
|
||
|
-e 's/<img src=\"\([^"]\+\)\"[^>]*>/[img]\1[\/img]/g' \
|
||
|
-e 's/<hr \/>/_____________________________________________________________________________\n/g' \
|
||
|
-e 's/<\/p>/\n/g' \
|
||
|
-e '/^<[^>]\+>$/d' -e 's/<[^>]\+>//g' \
|
||
|
-e 's/"/"/g' \
|
||
|
-e 's/&/&/g' \
|
||
|
-e 's/</</g' \
|
||
|
-e 's/>/>/g' \
|
||
|
-e "s/'/'/g" \
|
||
|
| line_ending_filter > "$wowi_changelog"
|
||
|
fi
|
||
|
else
|
||
|
if [ -n "$manual_changelog" ]; then
|
||
|
echo "Warning! Could not find a manual changelog at $topdir/$changelog"
|
||
|
manual_changelog=
|
||
|
fi
|
||
|
changelog="CHANGELOG.md"
|
||
|
changelog_markup="markdown"
|
||
|
|
||
|
if [ -n "$wowi_gen_changelog" ] && [ -z "$wowi_convert_changelog" ]; then
|
||
|
wowi_markup="markdown"
|
||
|
fi
|
||
|
|
||
|
start_group "Generating changelog of commits into $changelog" "changelog"
|
||
|
|
||
|
_changelog_range=
|
||
|
if [ "$repository_type" = "git" ]; then
|
||
|
changelog_url=
|
||
|
changelog_version=
|
||
|
changelog_previous="[Previous Releases](${project_github_url}/releases)"
|
||
|
changelog_url_wowi=
|
||
|
changelog_version_wowi=
|
||
|
changelog_previous_wowi="[url=${project_github_url}/releases]Previous Releases[/url]"
|
||
|
if [ -z "$previous_version" ] && [ -z "$tag" ]; then
|
||
|
# no range, show all commits up to ours
|
||
|
changelog_url="[Full Changelog](${project_github_url}/commits/${project_hash})"
|
||
|
changelog_version="[${project_version}](${project_github_url}/tree/${project_hash})"
|
||
|
changelog_url_wowi="[url=${project_github_url}/commits/${project_hash}]Full Changelog[/url]"
|
||
|
changelog_version_wowi="[url=${project_github_url}/tree/${project_hash}]${project_version}[/url]"
|
||
|
_changelog_range="$project_hash"
|
||
|
elif [ -z "$previous_version" ] && [ -n "$tag" ]; then
|
||
|
# first tag, show all commits upto it
|
||
|
changelog_url="[Full Changelog](${project_github_url}/commits/${tag})"
|
||
|
changelog_version="[${project_version}](${project_github_url}/tree/${tag})"
|
||
|
changelog_url_wowi="[url=${project_github_url}/commits/${tag}]Full Changelog[/url]"
|
||
|
changelog_version_wowi="[url=${project_github_url}/tree/${tag}]${project_version}[/url]"
|
||
|
_changelog_range="$tag"
|
||
|
elif [ -n "$previous_version" ] && [ -z "$tag" ]; then
|
||
|
# compare between last tag and our commit
|
||
|
changelog_url="[Full Changelog](${project_github_url}/compare/${previous_version}...${project_hash})"
|
||
|
changelog_version="[$project_version](${project_github_url}/tree/${project_hash})"
|
||
|
changelog_url_wowi="[url=${project_github_url}/compare/${previous_version}...${project_hash}]Full Changelog[/url]"
|
||
|
changelog_version_wowi="[url=${project_github_url}/tree/${project_hash}]${project_version}[/url]"
|
||
|
_changelog_range="$previous_version..$project_hash"
|
||
|
elif [ -n "$previous_version" ] && [ -n "$tag" ]; then
|
||
|
# compare between last tag and our tag
|
||
|
changelog_url="[Full Changelog](${project_github_url}/compare/${previous_version}...${tag})"
|
||
|
changelog_version="[$project_version](${project_github_url}/tree/${tag})"
|
||
|
changelog_url_wowi="[url=${project_github_url}/compare/${previous_version}...${tag}]Full Changelog[/url]"
|
||
|
changelog_version_wowi="[url=${project_github_url}/tree/${tag}]${project_version}[/url]"
|
||
|
_changelog_range="$previous_version..$tag"
|
||
|
fi
|
||
|
# lazy way out
|
||
|
if [ -z "$project_github_url" ]; then
|
||
|
changelog_url=
|
||
|
changelog_version=$project_version
|
||
|
changelog_previous=
|
||
|
changelog_url_wowi=
|
||
|
changelog_version_wowi="[color=orange]${project_version}[/color]"
|
||
|
changelog_previous_wowi=
|
||
|
elif [ -z "$github_token" ]; then
|
||
|
# not creating releases :(
|
||
|
changelog_previous=
|
||
|
changelog_previous_wowi=
|
||
|
fi
|
||
|
changelog_date=$( TZ='' printf "%(%Y-%m-%d)T" "$project_timestamp" )
|
||
|
|
||
|
cat <<- EOF | line_ending_filter > "$pkgdir/$changelog"
|
||
|
# $project
|
||
|
|
||
|
## $changelog_version ($changelog_date)
|
||
|
$changelog_url $changelog_previous
|
||
|
|
||
|
EOF
|
||
|
git -C "$topdir" log "$_changelog_range" --pretty=format:"###%B" \
|
||
|
| sed -e 's/^/ /g' -e 's/^ *$//g' -e 's/^ ###/- /g' -e 's/$/ /' \
|
||
|
-e 's/\([a-zA-Z0-9]\)_\([a-zA-Z0-9]\)/\1\\_\2/g' \
|
||
|
-e 's/\[ci skip\]//g' -e 's/\[skip ci\]//g' \
|
||
|
-e '/git-svn-id:/d' -e '/^[[:space:]]*This reverts commit [0-9a-f]\{40\}\.[[:space:]]*$/d' \
|
||
|
-e '/^[[:space:]]*$/d' \
|
||
|
| line_ending_filter >> "$pkgdir/$changelog"
|
||
|
|
||
|
# WoWI uses BBCode, generate something usable to post to the site
|
||
|
# the file is deleted on successful upload
|
||
|
if [ -n "$addonid" ] && [ -n "$tag" ] && [ -n "$wowi_gen_changelog" ] && [ "$wowi_markup" = "bbcode" ]; then
|
||
|
wowi_changelog="$releasedir/WOWI-$project_version-CHANGELOG.txt"
|
||
|
cat <<- EOF | line_ending_filter > "$wowi_changelog"
|
||
|
[size=5]${project}[/size]
|
||
|
[size=4]${changelog_version_wowi} (${changelog_date})[/size]
|
||
|
${changelog_url_wowi} ${changelog_previous_wowi}
|
||
|
[list]
|
||
|
EOF
|
||
|
git -C "$topdir" log "$_changelog_range" --pretty=format:"###%B" \
|
||
|
| sed -e 's/^/ /g' -e 's/^ *$//g' -e 's/^ ###/[*]/g' \
|
||
|
-e 's/\[ci skip\]//g' -e 's/\[skip ci\]//g' \
|
||
|
-e '/git-svn-id:/d' -e '/^[[:space:]]*This reverts commit [0-9a-f]\{40\}\.[[:space:]]*$/d' \
|
||
|
-e '/^[[:space:]]*$/d' \
|
||
|
| line_ending_filter >> "$wowi_changelog"
|
||
|
echo "[/list]" | line_ending_filter >> "$wowi_changelog"
|
||
|
fi
|
||
|
|
||
|
elif [ "$repository_type" = "svn" ]; then
|
||
|
if [ -n "$previous_revision" ]; then
|
||
|
_changelog_range="-r$project_revision:$previous_revision"
|
||
|
else
|
||
|
_changelog_range="-rHEAD:1"
|
||
|
fi
|
||
|
changelog_date=$( TZ='' printf "%(%Y-%m-%d)T" "$project_timestamp" )
|
||
|
|
||
|
cat <<- EOF | line_ending_filter > "$pkgdir/$changelog"
|
||
|
# $project
|
||
|
|
||
|
## $project_version ($changelog_date)
|
||
|
|
||
|
EOF
|
||
|
svn log "$topdir" "$_changelog_range" --xml \
|
||
|
| awk '/<msg>/,/<\/msg>/' \
|
||
|
| sed -e 's/<msg>/###/g' -e 's/<\/msg>//g' \
|
||
|
-e 's/^/ /g' -e 's/^ *$//g' -e 's/^ ###/- /g' -e 's/$/ /' \
|
||
|
-e 's/\([a-zA-Z0-9]\)_\([a-zA-Z0-9]\)/\1\\_\2/g' \
|
||
|
-e 's/\[ci skip\]//g' -e 's/\[skip ci\]//g' \
|
||
|
-e '/^[[:space:]]*$/d' \
|
||
|
| line_ending_filter >> "$pkgdir/$changelog"
|
||
|
|
||
|
# WoWI uses BBCode, generate something usable to post to the site
|
||
|
# the file is deleted on successful upload
|
||
|
if [ -n "$addonid" ] && [ -n "$tag" ] && [ -n "$wowi_gen_changelog" ] && [ "$wowi_markup" = "bbcode" ]; then
|
||
|
wowi_changelog="$releasedir/WOWI-$project_version-CHANGELOG.txt"
|
||
|
cat <<- EOF | line_ending_filter > "$wowi_changelog"
|
||
|
[size=5]${project}[/size]
|
||
|
[size=4][color=orange]${project_version}[/color] (${changelog_date})[/size]
|
||
|
|
||
|
[list]
|
||
|
EOF
|
||
|
svn log "$topdir" "$_changelog_range" --xml \
|
||
|
| awk '/<msg>/,/<\/msg>/' \
|
||
|
| sed -e 's/<msg>/###/g' -e 's/<\/msg>//g' \
|
||
|
-e 's/^/ /g' -e 's/^ *$//g' -e 's/^ ###/[*]/g' \
|
||
|
-e 's/\[ci skip\]//g' -e 's/\[skip ci\]//g' \
|
||
|
-e '/^[[:space:]]*$/d' \
|
||
|
| line_ending_filter >> "$wowi_changelog"
|
||
|
echo "[/list]" | line_ending_filter >> "$wowi_changelog"
|
||
|
fi
|
||
|
|
||
|
elif [ "$repository_type" = "hg" ]; then
|
||
|
if [ -n "$previous_revision" ]; then
|
||
|
_changelog_range="::$project_revision - ::$previous_revision - filelog(.hgtags)"
|
||
|
else
|
||
|
_changelog_range="."
|
||
|
fi
|
||
|
changelog_date=$( TZ='' printf "%(%Y-%m-%d)T" "$project_timestamp" )
|
||
|
|
||
|
cat <<- EOF | line_ending_filter > "$pkgdir/$changelog"
|
||
|
# $project
|
||
|
|
||
|
## $project_version ($changelog_date)
|
||
|
|
||
|
EOF
|
||
|
hg --cwd "$topdir" log -r "$_changelog_range" --template '- {fill(desc|strip, 76, "", " ")}\n' | line_ending_filter >> "$pkgdir/$changelog"
|
||
|
|
||
|
# WoWI uses BBCode, generate something usable to post to the site
|
||
|
# the file is deleted on successful upload
|
||
|
if [ -n "$addonid" ] && [ -n "$tag" ] && [ -n "$wowi_gen_changelog" ] && [ "$wowi_markup" = "bbcode" ]; then
|
||
|
wowi_changelog="$releasedir/WOWI-$project_version-CHANGELOG.txt"
|
||
|
cat <<- EOF | line_ending_filter > "$wowi_changelog"
|
||
|
[size=5]${project}[/size]
|
||
|
[size=4][color=orange]${project_version}[/color] (${changelog_date})[/size]
|
||
|
|
||
|
[list]
|
||
|
EOF
|
||
|
hg --cwd "$topdir" log "$_changelog_range" --template '[*]{desc|strip|escape}\n' | line_ending_filter >> "$wowi_changelog"
|
||
|
echo "[/list]" | line_ending_filter >> "$wowi_changelog"
|
||
|
fi
|
||
|
fi
|
||
|
|
||
|
echo "$(<"$pkgdir/$changelog")"
|
||
|
end_group "changelog"
|
||
|
fi
|
||
|
|
||
|
###
|
||
|
### Process .pkgmeta to perform move-folders actions.
|
||
|
###
|
||
|
|
||
|
if [ -f "$pkgmeta_file" ]; then
|
||
|
yaml_eof=
|
||
|
while [ -z "$yaml_eof" ]; do
|
||
|
IFS='' read -r yaml_line || yaml_eof="true"
|
||
|
# Skip commented out lines.
|
||
|
if [[ $yaml_line =~ ^[[:space:]]*\# ]]; then
|
||
|
continue
|
||
|
fi
|
||
|
# Strip any trailing CR character.
|
||
|
yaml_line=${yaml_line%$carriage_return}
|
||
|
|
||
|
case $yaml_line in
|
||
|
[!\ ]*:*)
|
||
|
# Split $yaml_line into a $yaml_key, $yaml_value pair.
|
||
|
yaml_keyvalue "$yaml_line"
|
||
|
# Set the $pkgmeta_phase for stateful processing.
|
||
|
pkgmeta_phase=$yaml_key
|
||
|
;;
|
||
|
" "*)
|
||
|
yaml_line=${yaml_line#"${yaml_line%%[! ]*}"} # trim leading whitespace
|
||
|
case $yaml_line in
|
||
|
"- "*)
|
||
|
;;
|
||
|
*:*)
|
||
|
# Split $yaml_line into a $yaml_key, $yaml_value pair.
|
||
|
yaml_keyvalue "$yaml_line"
|
||
|
case $pkgmeta_phase in
|
||
|
move-folders)
|
||
|
srcdir="$releasedir/$yaml_key"
|
||
|
destdir="$releasedir/$yaml_value"
|
||
|
if [[ -d "$destdir" && -z "$overwrite" && "$srcdir" != "$destdir/"* ]]; then
|
||
|
rm -fr "$destdir"
|
||
|
fi
|
||
|
if [ -d "$srcdir" ]; then
|
||
|
if [ ! -d "$destdir" ]; then
|
||
|
mkdir -p "$destdir"
|
||
|
fi
|
||
|
echo "Moving $yaml_key to $yaml_value"
|
||
|
mv -f "$srcdir"/* "$destdir" && rm -fr "$srcdir"
|
||
|
contents="$contents $yaml_value"
|
||
|
# Check to see if the base source directory is empty
|
||
|
_mf_basedir=${srcdir%$(basename "$yaml_key")}
|
||
|
if [ ! "$( ls -A "$_mf_basedir" )" ]; then
|
||
|
echo "Removing empty directory ${_mf_basedir#$releasedir/}"
|
||
|
rm -fr "$_mf_basedir"
|
||
|
fi
|
||
|
fi
|
||
|
# update external dir
|
||
|
nolib_exclude=${nolib_exclude//$srcdir/$destdir}
|
||
|
;;
|
||
|
esac
|
||
|
;;
|
||
|
esac
|
||
|
;;
|
||
|
esac
|
||
|
done < "$pkgmeta_file"
|
||
|
if [ -n "$srcdir" ]; then
|
||
|
echo
|
||
|
fi
|
||
|
fi
|
||
|
|
||
|
###
|
||
|
### Create the final zipfile for the addon.
|
||
|
###
|
||
|
|
||
|
if [ -z "$skip_zipfile" ]; then
|
||
|
archive_version="$project_version"
|
||
|
archive_name="$( filename_filter "$file_name" ).zip"
|
||
|
archive_label="$archive_version"
|
||
|
if [[ "${file_name}" == *"{game-type}"* ]] || [[ "$game_type" != "retail" && "${file_name}" == *"{classic}"* ]]; then
|
||
|
# append the game-type for clarity
|
||
|
archive_label="$archive_version-$game_type"
|
||
|
if [[ "$game_type" == "classic" && "${project_version,,}" == *"-classic"* ]] || [[ "$game_type" == "bcc" && "${project_version,,}" == *"-bcc"* ]]; then
|
||
|
# this is mostly for BigWigs projects that tag classic separately (eg, v10-classic)
|
||
|
# to prevent the extra -classic without changing all our workflows
|
||
|
archive_label="$archive_version"
|
||
|
fi
|
||
|
fi
|
||
|
archive="$releasedir/$archive_name"
|
||
|
|
||
|
if [ -n "$GITHUB_ACTIONS" ]; then
|
||
|
echo "::set-output name=archive_path::${archive}"
|
||
|
fi
|
||
|
|
||
|
nolib_archive_version="${project_version}-nolib"
|
||
|
nolib_archive_name="$( nolib=true filename_filter "$file_name" ).zip"
|
||
|
if [ "$archive_name" = "$nolib_archive_name" ]; then
|
||
|
# someone didn't include {nolib} and they're forcing nolib creation
|
||
|
nolib_archive_name="${nolib_archive_name#.zip}-nolib.zip"
|
||
|
fi
|
||
|
nolib_archive_label="${archive_label}-nolib"
|
||
|
nolib_archive="$releasedir/$nolib_archive_name"
|
||
|
|
||
|
if [ -n "$nolib" ]; then
|
||
|
archive_version="$nolib_archive_version"
|
||
|
archive_name="$nolib_archive_name"
|
||
|
archive_label="$nolib_archive_label"
|
||
|
archive="$nolib_archive"
|
||
|
nolib_archive=
|
||
|
fi
|
||
|
|
||
|
start_group "Creating archive: $archive_name" "archive"
|
||
|
if [ -f "$archive" ]; then
|
||
|
rm -f "$archive"
|
||
|
fi
|
||
|
#( cd "$releasedir" && zip -X -r "$archive" $contents )
|
||
|
( cd "$releasedir" && 7z a -bso0 -sns- -r "$archive" $contents )
|
||
|
|
||
|
if [ ! -f "$archive" ]; then
|
||
|
exit 1
|
||
|
fi
|
||
|
end_group "archive"
|
||
|
|
||
|
# Create nolib version of the zipfile
|
||
|
if [ -n "$enable_nolib_creation" ] && [ -z "$nolib" ] && [ -n "$nolib_exclude" ]; then
|
||
|
# run the nolib_filter
|
||
|
find "$pkgdir" -type f \( -name "*.xml" -o -name "*.toc" \) -print | while read -r file; do
|
||
|
case $file in
|
||
|
*.toc) _filter="toc_filter no-lib-strip true" ;;
|
||
|
*.xml) _filter="xml_filter no-lib-strip" ;;
|
||
|
esac
|
||
|
$_filter < "$file" > "$file.tmp" && mv "$file.tmp" "$file"
|
||
|
done
|
||
|
|
||
|
# make the exclude paths relative to the release directory
|
||
|
nolib_exclude=${nolib_exclude//$releasedir\//}
|
||
|
|
||
|
start_group "Creating no-lib archive: $nolib_archive_name" "archive.nolib"
|
||
|
if [ -f "$nolib_archive" ]; then
|
||
|
rm -f "$nolib_archive"
|
||
|
fi
|
||
|
# set noglob so each nolib_exclude path gets quoted instead of expanded
|
||
|
( set -f; cd "$releasedir" && zip -X -r -q "$nolib_archive" $contents -x $nolib_exclude )
|
||
|
|
||
|
if [ ! -f "$nolib_archive" ]; then
|
||
|
exit_code=1
|
||
|
fi
|
||
|
end_group "archive.nolib"
|
||
|
fi
|
||
|
|
||
|
###
|
||
|
### Deploy the zipfile.
|
||
|
###
|
||
|
|
||
|
upload_curseforge=$( [[ -z "$skip_upload" && -z "$skip_cf_upload" && -n "$slug" && -n "$cf_token" && -n "$project_site" ]] && echo true )
|
||
|
upload_wowinterface=$( [[ -z "$skip_upload" && -n "$tag" && -n "$addonid" && -n "$wowi_token" ]] && echo true )
|
||
|
upload_wago=$( [[ -z "$skip_upload" && -n "$wagoid" && -n "$wago_token" ]] && echo true )
|
||
|
upload_github=$( [[ -z "$skip_upload" && -n "$tag" && -n "$project_github_slug" && -n "$github_token" ]] && echo true )
|
||
|
|
||
|
if [[ -n "$upload_curseforge" || -n "$upload_wowinterface" || -n "$upload_github" || -n "$upload_wago" ]] && ! hash jq &>/dev/null; then
|
||
|
echo "Skipping upload because \"jq\" was not found."
|
||
|
echo
|
||
|
upload_curseforge=
|
||
|
upload_wowinterface=
|
||
|
upload_wago=
|
||
|
upload_github=
|
||
|
exit_code=1
|
||
|
fi
|
||
|
|
||
|
if [ -n "$upload_curseforge" ]; then
|
||
|
_cf_versions=$( curl -s -H "x-api-token: $cf_token" $project_site/api/game/versions )
|
||
|
if [ -n "$_cf_versions" ]; then
|
||
|
_cf_game_version="$game_version"
|
||
|
if [ -n "$_cf_game_version" ]; then
|
||
|
_cf_game_version_id=$( echo "$_cf_versions" | jq -c --argjson v "[\"${game_version//,/\",\"}\"]" 'map(select(.name as $x | $v | index($x)) | .id) | select(length > 0)' 2>/dev/null )
|
||
|
if [ -n "$_cf_game_version_id" ]; then
|
||
|
# and now the reverse, since an invalid version will just be dropped
|
||
|
_cf_game_version=$( echo "$_cf_versions" | jq -r --argjson v "$_cf_game_version_id" 'map(select(.id as $x | $v | index($x)) | .name) | join(",")' 2>/dev/null )
|
||
|
fi
|
||
|
fi
|
||
|
if [ -z "$_cf_game_version_id" ]; then
|
||
|
case $game_type in
|
||
|
retail) _cf_game_type_id=517 ;;
|
||
|
classic) _cf_game_type_id=67408 ;;
|
||
|
bcc) _cf_game_type_id=73246 ;;
|
||
|
esac
|
||
|
_cf_game_version_id=$( echo "$_cf_versions" | jq -c --argjson v "$_cf_game_type_id" 'map(select(.gameVersionTypeID == $v)) | max_by(.id) | [.id]' 2>/dev/null )
|
||
|
_cf_game_version=$( echo "$_cf_versions" | jq -r --argjson v "$_cf_game_type_id" 'map(select(.gameVersionTypeID == $v)) | max_by(.id) | .name' 2>/dev/null )
|
||
|
fi
|
||
|
fi
|
||
|
if [ -z "$_cf_game_version_id" ]; then
|
||
|
echo "Error fetching game version info from $project_site/api/game/versions"
|
||
|
echo
|
||
|
echo "Skipping upload to CurseForge."
|
||
|
echo
|
||
|
upload_curseforge=
|
||
|
exit_code=1
|
||
|
fi
|
||
|
fi
|
||
|
|
||
|
# Upload to CurseForge.
|
||
|
if [ -n "$upload_curseforge" ]; then
|
||
|
_cf_payload=$( cat <<-EOF
|
||
|
{
|
||
|
"displayName": "$archive_label",
|
||
|
"gameVersions": $_cf_game_version_id,
|
||
|
"releaseType": "$file_type",
|
||
|
"changelog": $( jq --slurp --raw-input '.' < "$pkgdir/$changelog" ),
|
||
|
"changelogType": "$changelog_markup"
|
||
|
}
|
||
|
EOF
|
||
|
)
|
||
|
_cf_payload_relations=
|
||
|
for i in "${!relations[@]}"; do
|
||
|
_cf_payload_relations="$_cf_payload_relations{\"slug\":\"$i\",\"type\":\"${relations[$i]}\"},"
|
||
|
done
|
||
|
if [[ -n $_cf_payload_relations ]]; then
|
||
|
_cf_payload_relations="{\"relations\":{\"projects\":[${_cf_payload_relations%,}]}}"
|
||
|
_cf_payload=$( echo "$_cf_payload $_cf_payload_relations" | jq -s -c '.[0] * .[1]' )
|
||
|
fi
|
||
|
|
||
|
echo "Uploading $archive_name ($_cf_game_version $file_type) to $project_site/projects/$slug"
|
||
|
resultfile="$releasedir/cf_result.json"
|
||
|
result=$( echo "$_cf_payload" | curl -sS --retry 3 --retry-delay 10 \
|
||
|
-w "%{http_code}" -o "$resultfile" \
|
||
|
-H "x-api-token: $cf_token" \
|
||
|
-F "metadata=<-" \
|
||
|
-F "file=@$archive" \
|
||
|
"$project_site/api/projects/$slug/upload-file"
|
||
|
) && {
|
||
|
case $result in
|
||
|
200) echo "Success!" ;;
|
||
|
302)
|
||
|
echo "Error! ($result)"
|
||
|
# don't need to ouput the redirect page
|
||
|
exit_code=1
|
||
|
;;
|
||
|
404)
|
||
|
echo "Error! No project for \"$slug\" found."
|
||
|
exit_code=1
|
||
|
;;
|
||
|
*)
|
||
|
echo "Error! ($result)"
|
||
|
if [ -s "$resultfile" ]; then
|
||
|
echo "$(<"$resultfile")"
|
||
|
fi
|
||
|
exit_code=1
|
||
|
;;
|
||
|
esac
|
||
|
} || {
|
||
|
exit_code=1
|
||
|
}
|
||
|
echo
|
||
|
|
||
|
rm -f "$resultfile" 2>/dev/null
|
||
|
fi
|
||
|
|
||
|
if [ -n "$upload_wowinterface" ]; then
|
||
|
_wowi_game_version=
|
||
|
_wowi_versions=$( curl -s -H "x-api-token: $wowi_token" https://api.wowinterface.com/addons/compatible.json )
|
||
|
if [ -n "$_wowi_versions" ]; then
|
||
|
# Multiple versions, match on game version
|
||
|
if [[ "$game_version" == *","* ]]; then
|
||
|
_wowi_game_version=$( echo "$_wowi_versions" | jq -r --argjson v "[\"${game_version//,/\",\"}\"]" 'map(select(.id as $x | $v | index($x)) | .id) | join(",")' 2>/dev/null )
|
||
|
fi
|
||
|
# TOC matching
|
||
|
if [ -z "$_wowi_game_version" ]; then
|
||
|
_wowi_game_version=$( echo "$_wowi_versions" | jq -r --arg toc "$toc_version" '.[] | select(.interface == $toc and .default == true) | .id' 2>/dev/null )
|
||
|
fi
|
||
|
if [ -z "$_wowi_game_version" ]; then
|
||
|
_wowi_game_version=$( echo "$_wowi_versions" | jq -r --arg toc "$toc_version" 'map(select(.interface == $toc))[0] | .id // empty' 2>/dev/null )
|
||
|
fi
|
||
|
# Handle delayed support (probably don't really need this anymore)
|
||
|
if [ -z "$_wowi_game_version" ] && [ "$game_type" != "retail" ]; then
|
||
|
_wowi_game_version=$( echo "$_wowi_versions" | jq -r --arg toc $((toc_version - 1)) '.[] | select(.interface == $toc) | .id' 2>/dev/null )
|
||
|
fi
|
||
|
if [ -z "$_wowi_game_version" ]; then
|
||
|
_wowi_game_version=$( echo "$_wowi_versions" | jq -r '.[] | select(.default == true) | .id' 2>/dev/null )
|
||
|
fi
|
||
|
fi
|
||
|
if [ -z "$_wowi_game_version" ]; then
|
||
|
echo "Error fetching game version info from https://api.wowinterface.com/addons/compatible.json"
|
||
|
echo
|
||
|
echo "Skipping upload to WoWInterface."
|
||
|
echo
|
||
|
upload_wowinterface=
|
||
|
exit_code=1
|
||
|
fi
|
||
|
fi
|
||
|
|
||
|
# Upload tags to WoWInterface.
|
||
|
if [ -n "$upload_wowinterface" ]; then
|
||
|
_wowi_args=()
|
||
|
if [ -f "$wowi_changelog" ]; then
|
||
|
_wowi_args+=("-F changelog=<$wowi_changelog")
|
||
|
elif [ -n "$manual_changelog" ] || [ "$wowi_markup" = "markdown" ]; then
|
||
|
_wowi_args+=("-F changelog=<$pkgdir/$changelog")
|
||
|
fi
|
||
|
if [ -z "$wowi_archive" ]; then
|
||
|
_wowi_args+=("-F archive=No")
|
||
|
fi
|
||
|
|
||
|
echo "Uploading $archive_name ($_wowi_game_version) to https://www.wowinterface.com/downloads/info$addonid"
|
||
|
resultfile="$releasedir/wi_result.json"
|
||
|
result=$( curl -sS --retry 3 --retry-delay 10 \
|
||
|
-w "%{http_code}" -o "$resultfile" \
|
||
|
-H "x-api-token: $wowi_token" \
|
||
|
-F "id=$addonid" \
|
||
|
-F "version=$archive_version" \
|
||
|
-F "compatible=$_wowi_game_version" \
|
||
|
"${_wowi_args[@]}" \
|
||
|
-F "updatefile=@$archive" \
|
||
|
"https://api.wowinterface.com/addons/update"
|
||
|
) && {
|
||
|
case $result in
|
||
|
202)
|
||
|
echo "Success!"
|
||
|
if [ -f "$wowi_changelog" ]; then
|
||
|
rm -f "$wowi_changelog" 2>/dev/null
|
||
|
fi
|
||
|
;;
|
||
|
401)
|
||
|
echo "Error! No addon for id \"$addonid\" found or you do not have permission to upload files."
|
||
|
exit_code=1
|
||
|
;;
|
||
|
403)
|
||
|
echo "Error! Incorrect api key or you do not have permission to upload files."
|
||
|
exit_code=1
|
||
|
;;
|
||
|
*)
|
||
|
echo "Error! ($result)"
|
||
|
if [ -s "$resultfile" ]; then
|
||
|
echo "$(<"$resultfile")"
|
||
|
fi
|
||
|
exit_code=1
|
||
|
;;
|
||
|
esac
|
||
|
} || {
|
||
|
exit_code=1
|
||
|
}
|
||
|
echo
|
||
|
|
||
|
rm -f "$resultfile" 2>/dev/null
|
||
|
fi
|
||
|
|
||
|
# Upload to Wago
|
||
|
if [ -n "$upload_wago" ] ; then
|
||
|
_wago_support_property=""
|
||
|
for type in "${!game_versions[@]}"; do
|
||
|
if [[ "$type" == "bcc" ]]; then
|
||
|
_wago_support_property+="\"supported_bc_patch\": \"${game_versions[$type]}\", "
|
||
|
else
|
||
|
_wago_support_property+="\"supported_${type}_patch\": \"${game_versions[$type]}\", "
|
||
|
fi
|
||
|
done
|
||
|
|
||
|
_wago_stability="$file_type"
|
||
|
if [ "$file_type" = "release" ]; then
|
||
|
_wago_stability="stable"
|
||
|
fi
|
||
|
|
||
|
_wago_payload=$( cat <<-EOF
|
||
|
{
|
||
|
"label": "$archive_label",
|
||
|
$_wago_support_property
|
||
|
"stability": "$_wago_stability",
|
||
|
"changelog": $( jq --slurp --raw-input '.' < "$pkgdir/$changelog" )
|
||
|
}
|
||
|
EOF
|
||
|
)
|
||
|
|
||
|
echo "Uploading $archive_name ($game_version $file_type) to Wago"
|
||
|
resultfile="$releasedir/wago_result.json"
|
||
|
result=$( echo "$_wago_payload" | curl -sS --retry 3 --retry-delay 10 \
|
||
|
-w "%{http_code}" -o "$resultfile" \
|
||
|
-H "authorization: Bearer $wago_token" \
|
||
|
-H "accept: application/json" \
|
||
|
-F "metadata=<-" \
|
||
|
-F "file=@$archive" \
|
||
|
"https://addons.wago.io/api/projects/$wagoid/version"
|
||
|
) && {
|
||
|
case $result in
|
||
|
200|201) echo "Success!" ;;
|
||
|
302)
|
||
|
echo "Error! ($result)"
|
||
|
# don't need to ouput the redirect page
|
||
|
exit_code=1
|
||
|
;;
|
||
|
404)
|
||
|
echo "Error! No Wago project for id \"$wagoid\" found."
|
||
|
exit_code=1
|
||
|
;;
|
||
|
*)
|
||
|
echo "Error! ($result)"
|
||
|
if [ -s "$resultfile" ]; then
|
||
|
echo "$(<"$resultfile")"
|
||
|
fi
|
||
|
exit_code=1
|
||
|
;;
|
||
|
esac
|
||
|
} || {
|
||
|
exit_code=1
|
||
|
}
|
||
|
echo
|
||
|
|
||
|
rm -f "$resultfile" 2>/dev/null
|
||
|
fi
|
||
|
|
||
|
# Create a GitHub Release for tags and upload the zipfile as an asset.
|
||
|
if [ -n "$upload_github" ]; then
|
||
|
upload_github_asset() {
|
||
|
_ghf_release_id=$1
|
||
|
_ghf_file_name=$2
|
||
|
_ghf_file_path=$3
|
||
|
_ghf_resultfile="$releasedir/gh_asset_result.json"
|
||
|
_ghf_content_type="application/${_ghf_file_name##*.}" # zip or json
|
||
|
|
||
|
# check if an asset exists and delete it (editing a release)
|
||
|
asset_id=$( curl -sS \
|
||
|
-H "Accept: application/vnd.github.v3+json" \
|
||
|
-H "Authorization: token $github_token" \
|
||
|
"https://api.github.com/repos/$project_github_slug/releases/$_ghf_release_id/assets" \
|
||
|
| jq --arg file "$_ghf_file_name" '.[] | select(.name? == $file) | .id'
|
||
|
)
|
||
|
if [ -n "$asset_id" ]; then
|
||
|
curl -s \
|
||
|
-X DELETE \
|
||
|
-H "Accept: application/vnd.github.v3+json" \
|
||
|
-H "Authorization: token $github_token" \
|
||
|
"https://api.github.com/repos/$project_github_slug/releases/assets/$asset_id" &>/dev/null
|
||
|
fi
|
||
|
|
||
|
echo -n "Uploading $_ghf_file_name... "
|
||
|
result=$( curl -sS --retry 3 --retry-delay 10 \
|
||
|
-w "%{http_code}" -o "$_ghf_resultfile" \
|
||
|
-H "Accept: application/vnd.github.v3+json" \
|
||
|
-H "Authorization: token $github_token" \
|
||
|
-H "Content-Type: $_ghf_content_type" \
|
||
|
--data-binary "@$_ghf_file_path" \
|
||
|
"https://uploads.github.com/repos/$project_github_slug/releases/$_ghf_release_id/assets?name=$_ghf_file_name"
|
||
|
) && {
|
||
|
if [ "$result" = "201" ]; then
|
||
|
echo "Success!"
|
||
|
else
|
||
|
echo "Error ($result)"
|
||
|
if [ -s "$_ghf_resultfile" ]; then
|
||
|
echo "$(<"$_ghf_resultfile")"
|
||
|
fi
|
||
|
exit_code=1
|
||
|
fi
|
||
|
} || {
|
||
|
exit_code=1
|
||
|
}
|
||
|
|
||
|
rm -f "$_ghf_resultfile" 2>/dev/null
|
||
|
return 0
|
||
|
}
|
||
|
|
||
|
_gh_metadata='{ "filename": "'"$archive_name"'", "nolib": false, "metadata": ['
|
||
|
for type in "${!game_versions[@]}"; do
|
||
|
_gh_metadata+='{ "flavor": "'"${game_flavors[$type]}"'", "interface": '"$toc_version"' },'
|
||
|
done
|
||
|
_gh_metadata=${_gh_metadata%,}
|
||
|
_gh_metadata+='] }'
|
||
|
if [ -f "$nolib_archive" ]; then
|
||
|
_gh_metadata+=',{ "filename": "'"$nolib_archive_name"'", "nolib": true, "metadata": ['
|
||
|
for type in "${!game_versions[@]}"; do
|
||
|
_gh_metadata+='{ "flavor": "'"${game_flavors[$type]}"'", "interface": '"$toc_version"' },'
|
||
|
done
|
||
|
_gh_metadata=${_gh_metadata%,}
|
||
|
_gh_metadata+='] }'
|
||
|
fi
|
||
|
_gh_metadata='{ "releases": ['"$_gh_metadata"'] }'
|
||
|
|
||
|
versionfile="$releasedir/release.json"
|
||
|
jq -c '.' <<< "$_gh_metadata" > "$versionfile" || echo "There was an error creating release.json" >&2
|
||
|
|
||
|
_gh_payload=$( cat <<-EOF
|
||
|
{
|
||
|
"tag_name": "$tag",
|
||
|
"name": "$tag",
|
||
|
"body": $( jq --slurp --raw-input '.' < "$pkgdir/$changelog" ),
|
||
|
"draft": false,
|
||
|
"prerelease": $( [[ "$file_type" != "release" ]] && echo true || echo false )
|
||
|
}
|
||
|
EOF
|
||
|
)
|
||
|
resultfile="$releasedir/gh_result.json"
|
||
|
|
||
|
release_id=$( curl -sS \
|
||
|
-H "Accept: application/vnd.github.v3+json" \
|
||
|
-H "Authorization: token $github_token" \
|
||
|
"https://api.github.com/repos/$project_github_slug/releases/tags/$tag" \
|
||
|
| jq '.id // empty'
|
||
|
)
|
||
|
if [ -n "$release_id" ]; then
|
||
|
echo "Updating GitHub release: https://github.com/$project_github_slug/releases/tag/$tag"
|
||
|
_gh_release_url="-X PATCH https://api.github.com/repos/$project_github_slug/releases/$release_id"
|
||
|
|
||
|
# combine version info
|
||
|
_gh_metadata_url=$( curl -sS \
|
||
|
-H "Accept: application/vnd.github.v3+json" \
|
||
|
-H "Authorization: token $github_token" \
|
||
|
"https://api.github.com/repos/$project_github_slug/releases/$release_id/assets" \
|
||
|
| jq -r '.[] | select(.name? == "release.json") | .url // empty'
|
||
|
)
|
||
|
if [ -n "$_gh_metadata_url" ]; then
|
||
|
_gh_previous_metadata=$( curl -sSL --fail \
|
||
|
-H "Accept: application/octet-stream" \
|
||
|
-H "Authorization: token $github_token" \
|
||
|
"$_gh_metadata_url"
|
||
|
) && {
|
||
|
jq -sc '.[0].releases + .[1].releases | unique_by(.filename) | { releases: [.[]] }' <<< "${_gh_previous_metadata} ${_gh_metadata}" > "$versionfile"
|
||
|
} || {
|
||
|
echo "Warning: Unable to update release.json ($?)"
|
||
|
}
|
||
|
fi
|
||
|
else
|
||
|
echo "Creating GitHub release: https://github.com/$project_github_slug/releases/tag/$tag"
|
||
|
_gh_release_url="https://api.github.com/repos/$project_github_slug/releases"
|
||
|
fi
|
||
|
result=$( echo "$_gh_payload" | curl -sS --retry 3 --retry-delay 10 \
|
||
|
-w "%{http_code}" -o "$resultfile" \
|
||
|
-H "Accept: application/vnd.github.v3+json" \
|
||
|
-H "Authorization: token $github_token" \
|
||
|
-d @- \
|
||
|
$_gh_release_url
|
||
|
) && {
|
||
|
if [ "$result" = "200" ] || [ "$result" = "201" ]; then # edited || created
|
||
|
if [ -z "$release_id" ]; then
|
||
|
release_id=$( jq '.id' < "$resultfile" )
|
||
|
fi
|
||
|
upload_github_asset "$release_id" "$archive_name" "$archive"
|
||
|
if [ -f "$nolib_archive" ]; then
|
||
|
upload_github_asset "$release_id" "$nolib_archive_name" "$nolib_archive"
|
||
|
fi
|
||
|
if [ -s "$versionfile" ]; then
|
||
|
upload_github_asset "$release_id" "release.json" "$versionfile"
|
||
|
fi
|
||
|
else
|
||
|
echo "Error! ($result)"
|
||
|
if [ -s "$resultfile" ]; then
|
||
|
echo "$(<"$resultfile")"
|
||
|
fi
|
||
|
exit_code=1
|
||
|
fi
|
||
|
} || {
|
||
|
exit_code=1
|
||
|
}
|
||
|
|
||
|
rm -f "$resultfile" 2>/dev/null
|
||
|
[ -z "$CI" ] && rm -f "$versionfile" 2>/dev/null
|
||
|
echo
|
||
|
fi
|
||
|
fi
|
||
|
|
||
|
# All done.
|
||
|
|
||
|
echo
|
||
|
echo "Packaging complete."
|
||
|
echo
|
||
|
|
||
|
exit $exit_code
|