c4ab7bcfd0
Earlier in 5b8b15e7aa
, I had configured
our upload scripts to treat failure of `aptly_add_to_repo` as failure to publish
packages at all. However it won't work, since even if all debs are
uploaded, and aptly_add_to_repo is called, debs are moved from temporary
directory to the repository. The proxy may drop connection during this
moment due to stale connection. And just after that we call
aptly_delete_dir which will delete the temporary directory. On the
server side, aptly will however continue to move debs from temporary
directory. Repository is published, but since the temporary directory is
removed during the transaction, the transaction never completely
succeeds. So only some of the debs are uploaded practically. This is now
fixed by adding a sleep of 180s (should be more than enough for all
stuff)
266 lines
10 KiB
YAML
266 lines
10 KiB
YAML
name: Packages
|
|
|
|
on:
|
|
push:
|
|
branches:
|
|
- master
|
|
paths:
|
|
- 'packages/**'
|
|
pull_request:
|
|
paths:
|
|
- 'packages/**'
|
|
workflow_dispatch:
|
|
inputs:
|
|
packages:
|
|
description: "A space-separated names of packages selected for rebuilding"
|
|
required: true
|
|
|
|
jobs:
|
|
build:
|
|
runs-on: ubuntu-latest
|
|
env:
|
|
ANDROID_HOME: "/opt/termux/android-sdk"
|
|
NDK: "/opt/termux/android-ndk"
|
|
strategy:
|
|
matrix:
|
|
target_arch: [aarch64, arm, i686, x86_64]
|
|
fail-fast: false
|
|
steps:
|
|
- name: Clone repository
|
|
uses: actions/checkout@v2
|
|
with:
|
|
fetch-depth: 1000
|
|
- name: Build
|
|
run: |
|
|
if [ "${{ github.event_name }}" != "workflow_dispatch" ]; then
|
|
BASE_COMMIT=$(jq --raw-output .pull_request.base.sha "$GITHUB_EVENT_PATH")
|
|
OLD_COMMIT=$(jq --raw-output .commits[0].id "$GITHUB_EVENT_PATH")
|
|
HEAD_COMMIT=$(jq --raw-output .commits[-1].id "$GITHUB_EVENT_PATH")
|
|
if [ "$BASE_COMMIT" = "null" ]; then
|
|
if [ "$OLD_COMMIT" = "$HEAD_COMMIT" ]; then
|
|
# Single-commit push.
|
|
echo "Processing commit: ${HEAD_COMMIT}"
|
|
CHANGED_FILES=$(git diff-tree --no-commit-id --name-only -r "${HEAD_COMMIT}")
|
|
else
|
|
# Multi-commit push.
|
|
OLD_COMMIT="${OLD_COMMIT}~1"
|
|
echo "Processing commit range: ${OLD_COMMIT}..${HEAD_COMMIT}"
|
|
CHANGED_FILES=$(git diff-tree --no-commit-id --name-only -r "${OLD_COMMIT}" "${HEAD_COMMIT}")
|
|
fi
|
|
else
|
|
# Pull requests.
|
|
echo "Processing pull request #$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH"): ${BASE_COMMIT}..HEAD"
|
|
CHANGED_FILES=$(git diff-tree --no-commit-id --name-only -r "${BASE_COMMIT}" "HEAD")
|
|
fi
|
|
fi
|
|
mkdir -p ./artifacts ./debs
|
|
touch ./debs/.placeholder
|
|
|
|
# Process tag '%ci:no-build' that may be added as line to commit message.
|
|
# Forces CI to cancel current build with status 'passed'
|
|
if grep -qiP '^\s*%ci:no-build\s*$' <(git log --format="%B" -n 1 "HEAD"); then
|
|
tar cf artifacts/debs-${{ matrix.target_arch }}.tar debs
|
|
echo "[!] Force exiting as tag '%ci:no-build' was applied to HEAD commit message."
|
|
exit 0
|
|
fi
|
|
|
|
if [ "${{ github.event_name }}" != "workflow_dispatch" ]; then
|
|
# Build local Docker image if setup scripts were changed.
|
|
# Useful for pull requests submitting changes for both build environment and packages.
|
|
if grep -qP '^scripts/(Dockerfile|setup-android-sdk\.sh|setup-ubuntu\.sh)$' <<< "$CHANGED_FILES"; then
|
|
echo "Detected changes for environment setup scripts. Building custom Docker image now."
|
|
cd ./scripts
|
|
docker build -t termux/package-builder:latest .
|
|
cd ..
|
|
fi
|
|
# Parse changed files and identify new packages and deleted packages.
|
|
# Create lists of those packages that will be passed to upload job for
|
|
# further processing.
|
|
while read -r file; do
|
|
if ! [[ $file == packages/* ]]; then
|
|
# This file does not belong to a package, so ignore it
|
|
continue
|
|
fi
|
|
if [[ $file =~ ^packages/([.a-z0-9+-]*)/([.a-z0-9+-]*).subpackage.sh$ ]]; then
|
|
# A subpackage was modified, check if it was deleted or just updated
|
|
pkg=${BASH_REMATCH[1]}
|
|
subpkg=${BASH_REMATCH[2]}
|
|
if [ ! -f "packages/${pkg}/${subpkg}.subpackage.sh" ]; then
|
|
echo "$subpkg" >> ./deleted_packages.txt
|
|
fi
|
|
elif [[ $file =~ ^packages/([.a-z0-9+-]*)/.*$ ]]; then
|
|
# package, check if it was deleted or updated
|
|
pkg=${BASH_REMATCH[1]}
|
|
if [ ! -d "packages/${pkg}" ]; then
|
|
echo "$pkg" >> ./deleted_packages.txt
|
|
else
|
|
echo "$pkg" >> ./built_packages.txt
|
|
# If there are subpackages we want to create a list of those
|
|
# as well
|
|
for file in $(find "packages/${pkg}/" -maxdepth 1 -type f -name \*.subpackage.sh | sort); do
|
|
echo "$(basename "${file%%.subpackage.sh}")" >> ./built_subpackages.txt
|
|
done
|
|
fi
|
|
fi
|
|
done<<<${CHANGED_FILES}
|
|
else
|
|
for pkg in ${{ github.event.inputs.packages }}; do
|
|
echo "$pkg" >> ./built_packages.txt
|
|
for subpkg in $(find "packages/${pkg}/" -maxdepth 1 -type f -name \*.subpackage.sh | sort); do
|
|
echo "$(basename "${subpkg%%.subpackage.sh}")" >> ./built_subpackages.txt
|
|
done
|
|
done
|
|
fi
|
|
|
|
# Fix so that lists do not contain duplicates
|
|
if [ -f ./built_packages.txt ]; then
|
|
uniq ./built_packages.txt > ./built_packages.txt.tmp
|
|
mv ./built_packages.txt.tmp ./built_packages.txt
|
|
fi
|
|
if [ -f ./built_subpackages.txt ]; then
|
|
uniq ./built_subpackages.txt > ./built_subpackages.txt.tmp
|
|
mv ./built_subpackages.txt.tmp ./built_subpackages.txt
|
|
fi
|
|
if [ -f ./deleted_packages.txt ]; then
|
|
uniq ./deleted_packages.txt > ./deleted_packages.txt.tmp
|
|
mv ./deleted_packages.txt.tmp ./deleted_packages.txt
|
|
fi
|
|
|
|
if grep -qP '^rust$' ./built_packages.txt ; then
|
|
echo "Free additional disk space on host"
|
|
sudo apt purge -yq $(dpkg -l | grep '^ii' | awk '{ print $2 }' | grep -P '(cabal-|dotnet-|ghc-|libmono|php)') \
|
|
liblldb-6.0 libllvm6.0:amd64 mono-runtime-common monodoc-manual powershell ruby
|
|
sudo apt autoremove -yq
|
|
sudo rm -rf /opt/hostedtoolcache /usr/local /usr/share/dotnet /usr/share/swift
|
|
fi
|
|
|
|
if [ -f ./built_packages.txt ]; then
|
|
./scripts/lint-packages.sh $(cat ./built_packages.txt | awk '{print "packages/"$1"/build.sh"}')
|
|
./scripts/run-docker.sh ./build-package.sh -I -a ${{ matrix.target_arch }} $(cat ./built_packages.txt)
|
|
fi
|
|
|
|
- name: Generate build artifacts
|
|
if: always()
|
|
run: |
|
|
mkdir -p debs
|
|
test -d termux-packages/output && mv termux-packages/output/* ./output/
|
|
|
|
# Put package lists into directory with *.deb files so they will be transferred to
|
|
# upload job.
|
|
test -f ./built_packages.txt && mv ./built_packages.txt ./debs/
|
|
test -f ./built_subpackages.txt && cat ./built_subpackages.txt >> ./debs/built_packages.txt \
|
|
&& rm ./built_subpackages.txt
|
|
test -f ./deleted_packages.txt && mv ./deleted_packages.txt ./debs/
|
|
|
|
# Move only debs from built_packages into debs/ folder before
|
|
# creating an archive.
|
|
while read -r pkg; do
|
|
# Match both $pkg.deb and $pkg-static.deb.
|
|
find output \( -name "$pkg_*.deb" -o -name "$pkg-static_*.deb" \) -type f -print0 | xargs -0r mv -t debs/
|
|
done < <(cat ./debs/built_packages.txt)
|
|
|
|
# Files containing certain symbols (e.g. ":") will cause failure in actions/upload-artifact.
|
|
# Archiving *.deb files in a tarball to avoid issues with uploading.
|
|
tar cf artifacts/debs-${{ matrix.target_arch }}-${{ github.sha }}.tar debs
|
|
- name: Checksums for built *.deb files
|
|
if: always()
|
|
run: |
|
|
find debs -type f -name "*.deb" -exec sha256sum "{}" \; | sort -k2
|
|
- name: Store *.deb files
|
|
if: always()
|
|
uses: actions/upload-artifact@v2
|
|
with:
|
|
name: termux-packages-${{ matrix.target_arch }}-${{ github.sha }}
|
|
path: ./artifacts
|
|
|
|
upload:
|
|
if: github.event_name != 'pull_request'
|
|
needs: build
|
|
runs-on: ubuntu-latest
|
|
steps:
|
|
- name: Clone repository
|
|
uses: actions/checkout@v2
|
|
- name: Get *.deb files
|
|
uses: actions/download-artifact@v2
|
|
with:
|
|
path: ./
|
|
- name: Upload to packages.termux.org
|
|
env:
|
|
REPOSITORY_NAME: termux-main
|
|
REPOSITORY_DISTRIBUTION: stable
|
|
REPOSITORY_URL: https://packages.termux.org/aptly-api
|
|
run: |
|
|
GITHUB_SHA=${{ github.sha }}
|
|
APTLY_API_AUTH=${{ secrets.APTLY_API_AUTH }}
|
|
GPG_PASSPHRASE=${{ secrets.GPG_PASSPHRASE }}
|
|
|
|
source scripts/aptly_api.sh
|
|
|
|
for archive in termux-packages-*/*.tar; do
|
|
tar xf "$archive"
|
|
done
|
|
|
|
# Upload file to temporary directory.
|
|
uploaded_files=false
|
|
shopt -s nullglob
|
|
for filename in $(cat debs/built_packages.txt | sed -E 's/(..*)/debs\/\1_\*.deb debs\/\1-static_\*.deb/g'); do
|
|
if ! aptly_upload_file "$filename"; then
|
|
exit 1
|
|
fi
|
|
|
|
uploaded_files=true
|
|
done
|
|
shopt -u nullglob
|
|
|
|
# Publishing repository changes.
|
|
if [ "$uploaded_files" = "true" ]; then
|
|
aptly_add_to_repo
|
|
sleep 180
|
|
|
|
# Usually temporary directory is deleted automatically, but in certain cases it is left.
|
|
aptly_delete_dir
|
|
|
|
# Final part to make changes appear in web root.
|
|
if ! aptly_publish_repo; then
|
|
exit 1
|
|
fi
|
|
fi
|
|
- name: Upload to grimler.se
|
|
# Run even if upload to packages.termux.org failed:
|
|
if: always()
|
|
env:
|
|
REPOSITORY_NAME: termux-main
|
|
REPOSITORY_DISTRIBUTION: stable
|
|
REPOSITORY_URL: https://aptly-api.grimler.se
|
|
run: |
|
|
GITHUB_SHA=${{ github.sha }}
|
|
APTLY_API_AUTH=${{ secrets.APTLY_API_AUTH }}
|
|
|
|
source scripts/aptly_api.sh
|
|
|
|
# Upload file to temporary directory.
|
|
uploaded_files=false
|
|
shopt -s nullglob
|
|
for filename in $(cat debs/built_packages.txt | sed -E 's/(..*)/debs\/\1_\*.deb debs\/\1-static_\*.deb/g'); do
|
|
if ! aptly_upload_file "$filename"; then
|
|
exit 1
|
|
fi
|
|
|
|
uploaded_files=true
|
|
done
|
|
shopt -u nullglob
|
|
|
|
# Publishing repository changes.
|
|
if [ "$uploaded_files" = "true" ]; then
|
|
aptly_add_to_repo
|
|
|
|
# Usually temporary directory is deleted automatically, but in certain cases it is left.
|
|
aptly_delete_dir
|
|
|
|
# grimler.se mirror is signed manually, can't publish
|
|
# through CI
|
|
# if ! aptly_publish_repo; then
|
|
# exit 1
|
|
# fi
|
|
fi
|