330 lines
13 KiB
YAML
330 lines
13 KiB
YAML
name: Packages
|
|
|
|
on:
|
|
push:
|
|
branches:
|
|
- master
|
|
paths:
|
|
- 'packages/**'
|
|
- 'root-packages/**'
|
|
- 'x11-packages/**'
|
|
pull_request:
|
|
paths:
|
|
- 'packages/**'
|
|
- 'root-packages/**'
|
|
- 'x11-packages/**'
|
|
workflow_dispatch:
|
|
inputs:
|
|
packages:
|
|
description: "A space-separated names of packages selected for rebuilding"
|
|
required: true
|
|
|
|
jobs:
|
|
build:
|
|
runs-on: ubuntu-latest
|
|
env:
|
|
ANDROID_HOME: "/opt/termux/android-sdk"
|
|
NDK: "/opt/termux/android-ndk"
|
|
strategy:
|
|
matrix:
|
|
target_arch: [aarch64, arm, i686, x86_64]
|
|
fail-fast: false
|
|
steps:
|
|
- name: Clone repository
|
|
uses: actions/checkout@v3
|
|
with:
|
|
fetch-depth: 1000
|
|
- name: Gather build summary
|
|
run: |
|
|
if [ "${{ github.event_name }}" != "workflow_dispatch" ]; then
|
|
BASE_COMMIT=$(jq --raw-output .pull_request.base.sha "$GITHUB_EVENT_PATH")
|
|
OLD_COMMIT=$(jq --raw-output .commits[0].id "$GITHUB_EVENT_PATH")
|
|
HEAD_COMMIT=$(jq --raw-output .commits[-1].id "$GITHUB_EVENT_PATH")
|
|
if [ "$BASE_COMMIT" = "null" ]; then
|
|
if [ "$OLD_COMMIT" = "$HEAD_COMMIT" ]; then
|
|
# Single-commit push.
|
|
echo "Processing commit: ${HEAD_COMMIT}"
|
|
CHANGED_FILES=$(git diff-tree --no-commit-id --name-only -r "${HEAD_COMMIT}")
|
|
else
|
|
# Multi-commit push.
|
|
OLD_COMMIT="${OLD_COMMIT}~1"
|
|
echo "Processing commit range: ${OLD_COMMIT}..${HEAD_COMMIT}"
|
|
CHANGED_FILES=$(git diff-tree --no-commit-id --name-only -r "${OLD_COMMIT}" "${HEAD_COMMIT}")
|
|
fi
|
|
else
|
|
# Pull requests.
|
|
echo "Processing pull request #$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH"): ${BASE_COMMIT}..HEAD"
|
|
CHANGED_FILES=$(git diff-tree --no-commit-id --name-only -r "${BASE_COMMIT}" "HEAD")
|
|
fi
|
|
fi
|
|
mkdir -p ./artifacts ./debs
|
|
touch ./debs/.placeholder
|
|
|
|
# Process tag '%ci:no-build' that may be added as line to commit message.
|
|
# Forces CI to cancel current build with status 'passed'
|
|
if grep -qiP '^\s*%ci:no-build\s*$' <(git log --format="%B" -n 1 "HEAD"); then
|
|
tar cf artifacts/debs-${{ matrix.target_arch }}.tar debs
|
|
echo "[!] Force exiting as tag '%ci:no-build' was applied to HEAD commit message."
|
|
exit 0
|
|
fi
|
|
|
|
if [ "${{ github.event_name }}" != "workflow_dispatch" ]; then
|
|
# Build local Docker image if setup scripts were changed.
|
|
# Useful for pull requests submitting changes for both build environment and packages.
|
|
if grep -qP '^scripts/(Dockerfile|setup-android-sdk\.sh|setup-ubuntu\.sh)$' <<< "$CHANGED_FILES"; then
|
|
echo "Detected changes for environment setup scripts. Building custom Docker image now."
|
|
cd ./scripts
|
|
docker build -t termux/package-builder:latest .
|
|
cd ..
|
|
fi
|
|
|
|
for repo_path in $(jq --raw-output 'keys | .[]' < repo.json); do
|
|
repo=$(jq --raw-output '.["'$repo_path'"]' < repo.json)
|
|
# Parse changed files and identify new packages and deleted packages.
|
|
# Create lists of those packages that will be passed to upload job for
|
|
# further processing.
|
|
while read -r file; do
|
|
if ! [[ $file == $repo_path/* ]]; then
|
|
# This file does not belong to a package, so ignore it
|
|
continue
|
|
fi
|
|
if [[ $file =~ ^$repo_path/([.a-z0-9+-]*)/([.a-z0-9+-]*).subpackage.sh$ ]]; then
|
|
# A subpackage was modified, check if it was deleted or just updated
|
|
pkg=${BASH_REMATCH[1]}
|
|
subpkg=${BASH_REMATCH[2]}
|
|
if [ ! -f "${repo_path}/${pkg}/${subpkg}.subpackage.sh" ]; then
|
|
echo "$subpkg" >> ./deleted_$repo_packages.txt
|
|
fi
|
|
elif [[ $file =~ ^$repo_path/([.a-z0-9+-]*)/.*$ ]]; then
|
|
# package, check if it was deleted or updated
|
|
pkg=${BASH_REMATCH[1]}
|
|
if [ ! -d "${repo_path}/${pkg}" ]; then
|
|
echo "$pkg" >> ./deleted_$repo_packages.txt
|
|
else
|
|
echo "$pkg" >> ./built_$repo_packages.txt
|
|
# If there are subpackages we want to create a list of those
|
|
# as well
|
|
for file in $(find "${repo_path}/${pkg}/" -maxdepth 1 -type f -name \*.subpackage.sh | sort); do
|
|
echo "$(basename "${file%%.subpackage.sh}")" >> ./built_$repo_subpackages.txt
|
|
done
|
|
fi
|
|
fi
|
|
done<<<${CHANGED_FILES}
|
|
done
|
|
else
|
|
for pkg in ${{ github.event.inputs.packages }}; do
|
|
for repo_path in $(jq --raw-output 'keys | .[]' < repo.json); do
|
|
repo=$(jq --raw-output '.["'$repo_path'"]' < repo.json)
|
|
if [ -d "${repo}/${pkg}" ]; then
|
|
echo "$pkg" >> ./built_$repo_packages.txt
|
|
for subpkg in $(find "${repo}/${pkg}/" -maxdepth 1 -type f -name \*.subpackage.sh | sort); do
|
|
echo "$(basename "${subpkg%%.subpackage.sh}")" >> ./built_$repo_subpackages.txt
|
|
done
|
|
else
|
|
echo "Package '${pkg}' not found in any of the repo"
|
|
fi
|
|
done
|
|
done
|
|
fi
|
|
|
|
for repo in $(jq --raw-output '.[]' < repo.json); do
|
|
# Fix so that lists do not contain duplicates
|
|
if [ -f ./built_$repo_packages.txt ]; then
|
|
uniq ./built_$repo_packages.txt > ./built_$repo_packages.txt.tmp
|
|
mv ./built_$repo_packages.txt.tmp ./built_$repo_packages.txt
|
|
fi
|
|
if [ -f ./built_$repo_subpackages.txt ]; then
|
|
uniq ./built_$repo_subpackages.txt > ./built_$repo_subpackages.txt.tmp
|
|
mv ./built_$repo_subpackages.txt.tmp ./built_$repo_subpackages.txt
|
|
fi
|
|
if [ -f ./deleted_$repo_packages.txt ]; then
|
|
uniq ./deleted_$repo_packages.txt > ./deleted_$repo_packages.txt.tmp
|
|
mv ./deleted_$repo_packages.txt.tmp ./deleted_$repo_packages.txt
|
|
fi
|
|
done
|
|
|
|
- name: Free additional disk space (if necessary)
|
|
run: |
|
|
if grep -qP '^rust$' ./built_packages.txt ; then
|
|
echo "Free additional disk space on host"
|
|
sudo apt purge -yq $(dpkg -l | grep '^ii' | awk '{ print $2 }' | grep -P '(cabal-|dotnet-|ghc-|libmono|php)') \
|
|
liblldb-6.0 libllvm6.0:amd64 mono-runtime-common monodoc-manual powershell ruby
|
|
sudo apt autoremove -yq
|
|
sudo rm -rf /opt/hostedtoolcache /usr/local /usr/share/dotnet /usr/share/swift
|
|
fi
|
|
|
|
- name: Lint packages
|
|
run: |
|
|
package_recipes=
|
|
for repo_path in $(jq --raw-output 'keys | .[]' < repo.json); do
|
|
repo=$(jq --raw-output '.["'$repo_path'"]' < repo.json)
|
|
if [ -f ./built_$repo_packages.txt ]; then
|
|
package_recipes+=$(cat ./built_$repo_packages.txt | repo_path=$repo_path awk '{print ENVIRON["repo_path"]"/"$1"/build.sh"}')
|
|
fi
|
|
done
|
|
|
|
./scripts/lint-packages.sh $package_recipes
|
|
|
|
- name: Build packages
|
|
run: |
|
|
packages=
|
|
for repo_path in $(jq --raw-output 'keys | .[]' < repo.json); do
|
|
repo=$(jq --raw-output '.["'$repo_path'"]' < repo.json)
|
|
|
|
if [ -f ./built_$repo_packages.txt ]; then
|
|
packages+=$(cat ./build_$repo_packages.txt)
|
|
fi
|
|
done
|
|
|
|
./scripts/run-docker.sh ./build-package.sh -I -a ${{ matrix.target_arch }} $packages
|
|
|
|
- name: Generate build artifacts
|
|
if: always()
|
|
run: |
|
|
test -d termux-packages/output && mv termux-packages/output/* ./output/
|
|
|
|
rm -rf debs
|
|
|
|
for repo in $(jq --raw-output '.[]' < repo.json); do
|
|
mkdir debs
|
|
# Put package lists into directory with *.deb files so they will be transferred to
|
|
# upload job.
|
|
test -f ./built_$repo_packages.txt && mv ./built_$repo_packages.txt ./debs/built_packages.txt
|
|
test -f ./built_$repo_subpackages.txt && cat ./built_$repo_subpackages.txt >> ./debs/built_packages.txt \
|
|
&& rm ./built_$repo_subpackages.txt
|
|
test -f ./deleted_$repo_packages.txt && mv ./deleted_$repo_packages.txt ./debs/deleted_packages.txt
|
|
|
|
# Move only debs from built_packages into debs/ folder before
|
|
# creating an archive.
|
|
while read -r pkg; do
|
|
# Match both $pkg.deb and $pkg-static.deb.
|
|
find output \( -name "$pkg_*.deb" -o -name "$pkg-static_*.deb" \) -type f -print0 | xargs -0r mv -t debs/
|
|
done < <(cat ./debs/built_packages.txt)
|
|
|
|
# Files containing certain symbols (e.g. ":") will cause failure in actions/upload-artifact.
|
|
# Archiving *.deb files in a tarball to avoid issues with uploading.
|
|
tar cf artifacts/debs-${repo}-${{ matrix.target_arch }}-${{ github.sha }}.tar debs
|
|
rm -r debs/
|
|
done
|
|
- name: Checksums for built *.deb files
|
|
if: always()
|
|
run: |
|
|
find debs -type f -name "*.deb" -exec sha256sum "{}" \; | sort -k2
|
|
- name: Store *.deb files
|
|
if: always()
|
|
uses: actions/upload-artifact@v3
|
|
with:
|
|
name: debs-${{ matrix.target_arch }}-${{ github.sha }}
|
|
path: ./artifacts
|
|
|
|
upload:
|
|
if: github.event_name != 'pull_request'
|
|
needs: build
|
|
runs-on: ubuntu-latest
|
|
steps:
|
|
- name: Clone repository
|
|
uses: actions/checkout@v3
|
|
- name: Get *.deb files
|
|
uses: actions/download-artifact@v3
|
|
with:
|
|
path: ./
|
|
- name: Upload to packages.termux.org
|
|
env:
|
|
REPOSITORY_DISTRIBUTION: stable
|
|
REPOSITORY_URL: https://packages.termux.org/aptly-api
|
|
run: |
|
|
GITHUB_SHA=${{ github.sha }}
|
|
APTLY_API_AUTH=${{ secrets.APTLY_API_AUTH }}
|
|
GPG_PASSPHRASE=${{ secrets.GPG_PASSPHRASE }}
|
|
|
|
source scripts/aptly_api.sh
|
|
|
|
for repo in $(jq --raw-output '.[]' < repo.json); do
|
|
export REPOSITORY_NAME=$repo
|
|
|
|
for archive in debs-*/debs-$repo-{aarch64,arm,i686,x86_64}-${{ github.sha }}.tar; do
|
|
tar xf "$archive"
|
|
done
|
|
|
|
# Upload file to temporary directory.
|
|
uploaded_files=false
|
|
shopt -s nullglob
|
|
for filename in $(cat debs/built_packages.txt | sed -E 's/(..*)/debs\/\1_\*.deb debs\/\1-static_\*.deb/g'); do
|
|
if ! aptly_upload_file "$filename"; then
|
|
exit 1
|
|
fi
|
|
|
|
uploaded_files=true
|
|
done
|
|
shopt -u nullglob
|
|
|
|
# Publishing repository changes.
|
|
if [ "$uploaded_files" = "true" ]; then
|
|
if ! aptly_add_to_repo; then
|
|
exit 1
|
|
fi
|
|
|
|
# Usually temporary directory is deleted automatically, but in certain cases it is left.
|
|
aptly_delete_dir
|
|
|
|
# Final part to make changes appear in web root.
|
|
if ! aptly_publish_repo; then
|
|
exit 1
|
|
fi
|
|
fi
|
|
|
|
# Delete the debs folder in order to not upload packages from one repo to other
|
|
# TODO(@thunder-coding): Perhaps it would make sense to make use of tar's `-C` option in order to prevent extracting the archives twice
|
|
rm -rf debs/
|
|
done
|
|
- name: Upload to grimler.se
|
|
# Run even if upload to packages.termux.org failed:
|
|
if: always()
|
|
env:
|
|
REPOSITORY_DISTRIBUTION: stable
|
|
REPOSITORY_URL: https://aptly-api.grimler.se
|
|
run: |
|
|
GITHUB_SHA=${{ github.sha }}
|
|
APTLY_API_AUTH=${{ secrets.APTLY_API_AUTH }}
|
|
|
|
source scripts/aptly_api.sh
|
|
|
|
|
|
for repo in $(jq --raw-output '.[]' < repo.json); do
|
|
export REPOSITORY_NAME=$repo
|
|
|
|
for archive in debs-*/debs-$repo-{aarch64,arm,i686,x86_64}-${{ github.sha }}.tar; do
|
|
tar xf "$archive"
|
|
done
|
|
|
|
# Upload file to temporary directory.
|
|
uploaded_files=false
|
|
shopt -s nullglob
|
|
for filename in $(cat debs/built_packages.txt | sed -E 's/(..*)/debs\/\1_\*.deb debs\/\1-static_\*.deb/g'); do
|
|
if ! aptly_upload_file "$filename"; then
|
|
exit 1
|
|
fi
|
|
|
|
uploaded_files=true
|
|
done
|
|
shopt -u nullglob
|
|
|
|
# Publishing repository changes.
|
|
if [ "$uploaded_files" = "true" ]; then
|
|
if ! aptly_add_to_repo; then
|
|
exit 1
|
|
fi
|
|
|
|
# Usually temporary directory is deleted automatically, but in certain cases it is left.
|
|
aptly_delete_dir
|
|
|
|
# grimler.se mirror is signed manually, can't publish
|
|
# through CI
|
|
# if ! aptly_publish_repo; then
|
|
# exit 1
|
|
# fi
|
|
fi
|
|
# Delete the debs folder in order to not upload packages from one repo to other
|
|
rm -rf debs/
|
|
done
|