4c77ed98d9
This can be helpful while building large packages which often fail to compile on some archs but compile well on other
272 lines
11 KiB
YAML
272 lines
11 KiB
YAML
name: Packages
|
|
|
|
on:
|
|
push:
|
|
branches:
|
|
- master
|
|
paths:
|
|
- 'packages/**'
|
|
pull_request:
|
|
paths:
|
|
- 'packages/**'
|
|
|
|
jobs:
|
|
build:
|
|
runs-on: ubuntu-latest
|
|
env:
|
|
ANDROID_HOME: "/opt/termux/android-sdk"
|
|
NDK: "/opt/termux/android-ndk"
|
|
strategy:
|
|
matrix:
|
|
target_arch: [aarch64, arm, i686, x86_64]
|
|
fail-fast: false
|
|
steps:
|
|
- name: Clone repository
|
|
uses: actions/checkout@v2
|
|
with:
|
|
fetch-depth: 1000
|
|
- name: Free additional disk space on host
|
|
run: |
|
|
sudo apt purge -yq $(dpkg -l | grep '^ii' | awk '{ print $2 }' | grep -P '(cabal-|dotnet-|ghc-|libmono|php)') \
|
|
liblldb-6.0 libllvm6.0:amd64 mono-runtime-common monodoc-manual powershell ruby
|
|
sudo apt autoremove -yq
|
|
sudo rm -rf /opt/hostedtoolcache /usr/local /usr/share/dotnet /usr/share/swift
|
|
- name: Build
|
|
run: |
|
|
BASE_COMMIT=$(jq --raw-output .pull_request.base.sha "$GITHUB_EVENT_PATH")
|
|
OLD_COMMIT=$(jq --raw-output .commits[0].id "$GITHUB_EVENT_PATH")
|
|
HEAD_COMMIT=$(jq --raw-output .commits[-1].id "$GITHUB_EVENT_PATH")
|
|
if [ "$BASE_COMMIT" = "null" ]; then
|
|
if [ "$OLD_COMMIT" = "$HEAD_COMMIT" ]; then
|
|
# Single-commit push.
|
|
echo "Processing commit: ${HEAD_COMMIT}"
|
|
CHANGED_FILES=$(git diff-tree --no-commit-id --name-only -r "${HEAD_COMMIT}")
|
|
else
|
|
# Multi-commit push.
|
|
OLD_COMMIT="${OLD_COMMIT}~1"
|
|
echo "Processing commit range: ${OLD_COMMIT}..${HEAD_COMMIT}"
|
|
CHANGED_FILES=$(git diff-tree --no-commit-id --name-only -r "${OLD_COMMIT}" "${HEAD_COMMIT}")
|
|
fi
|
|
else
|
|
# Pull requests.
|
|
echo "Processing pull request #$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH"): ${BASE_COMMIT}..HEAD"
|
|
CHANGED_FILES=$(git diff-tree --no-commit-id --name-only -r "${BASE_COMMIT}" "HEAD")
|
|
fi
|
|
mkdir -p ./artifacts ./debs
|
|
touch ./debs/.placeholder
|
|
# Process tag '%ci:no-build' that may be added as line to commit message.
|
|
# Forces CI to cancel current build with status 'passed'.
|
|
if grep -qiP '^\s*%ci:no-build\s*$' <(git log --format="%B" -n 1 "HEAD"); then
|
|
tar cf artifacts/debs-${{ matrix.target_arch }}.tar debs
|
|
echo "[!] Force exiting as tag '%ci:no-build' was applied to HEAD commit message."
|
|
exit 0
|
|
fi
|
|
# Build local Docker image if setup scripts were changed.
|
|
# Useful for pull requests submitting changes for both build environment and packages.
|
|
if grep -qP '^scripts/(Dockerfile|setup-ubuntu\.sh)$' <<< "$CHANGED_FILES"; then
|
|
echo "Detected changes for environment setup scripts. Building custom Docker image now."
|
|
cd ./scripts
|
|
docker build -t termux/package-builder:latest .
|
|
cd ..
|
|
fi
|
|
# Parse changed files and identify new packages and deleted packages.
|
|
# Create lists of those packages that will be passed to upload job for
|
|
# further processing.
|
|
while read -r file; do
|
|
if ! [[ $file == packages/* ]]; then
|
|
# This file does not belong to a package, so ignore it
|
|
continue
|
|
fi
|
|
if [[ $file =~ ^packages/([a-z0-9-]*)/([a-z0-9-]*).subpackage.sh$ ]]; then
|
|
# A subpackage was modified, check if it was deleted or just updated
|
|
pkg=${BASH_REMATCH[1]}
|
|
subpkg=${BASH_REMATCH[2]}
|
|
if [ ! -f "packages/${pkg}/${subpkg}.subpackage.sh" ]; then
|
|
echo "$subpkg" >> ./deleted_packages.txt
|
|
fi
|
|
elif [[ $file =~ ^packages/([a-z0-9-]*)/.*$ ]]; then
|
|
# package, check if it was deleted or updated
|
|
pkg=${BASH_REMATCH[1]}
|
|
if [ ! -d "packages/${pkg}" ]; then
|
|
echo "$pkg" >> ./deleted_packages.txt
|
|
else
|
|
echo "$pkg" >> ./built_packages.txt
|
|
# If there are subpackages we want to create a list of those
|
|
# as well
|
|
for file in $(find "packages/${pkg}/" -maxdepth 1 -type f -name \*.subpackage.sh | sort); do
|
|
echo "$(basename "${file%%.subpackage.sh}")" >> ./built_subpackages.txt
|
|
done
|
|
fi
|
|
fi
|
|
done<<<${CHANGED_FILES}
|
|
|
|
# Fix so that lists do not contain duplicates
|
|
if [ -f ./built_packages.txt ]; then
|
|
uniq ./built_packages.txt > ./built_packages.txt.tmp
|
|
mv ./built_packages.txt.tmp ./built_packages.txt
|
|
fi
|
|
if [ -f ./built_subpackages.txt ]; then
|
|
uniq ./built_subpackages.txt > ./built_subpackages.txt.tmp
|
|
mv ./built_subpackages.txt.tmp ./built_subpackages.txt
|
|
fi
|
|
if [ -f ./deleted_packages.txt ]; then
|
|
uniq ./deleted_packages.txt > ./deleted_packages.txt.tmp
|
|
mv ./deleted_packages.txt.tmp ./deleted_packages.txt
|
|
fi
|
|
|
|
if [ -f ./built_packages.txt ]; then
|
|
./scripts/lint-packages.sh $(cat ./built_packages.txt | awk '{print "packages/"$1"/build.sh"}')
|
|
./scripts/run-docker.sh ./build-package.sh -I -a ${{ matrix.target_arch }} $(cat ./built_packages.txt)
|
|
fi
|
|
|
|
test -d ./termux-packages/debs && mv ./termux-packages/debs/* ./debs/
|
|
# Put package lists into directory with *.deb files so they will be transferred to
|
|
# upload job.
|
|
test -f ./built_packages.txt && mv ./built_packages.txt ./debs/
|
|
test -f ./built_subpackages.txt && cat ./built_subpackages.txt >> ./debs/built_packages.txt \
|
|
&& rm ./built_subpackages.txt
|
|
test -f ./deleted_packages.txt && mv ./deleted_packages.txt ./debs/
|
|
# Files containing certain symbols (e.g. ":") will cause failure in actions/upload-artifact.
|
|
# Archiving *.deb files in a tarball to avoid issues with uploading.
|
|
tar cf artifacts/debs-${{ matrix.target_arch }}-${{ github.sha }}.tar debs
|
|
- name: Checksums for built *.deb files
|
|
run: |
|
|
find debs -type f -name "*.deb" -exec sha256sum "{}" \; | sort -k2
|
|
- name: Store *.deb files
|
|
uses: actions/upload-artifact@v2
|
|
with:
|
|
name: termux-packages-${{ github.sha }}
|
|
path: ./artifacts
|
|
upload:
|
|
if: github.event_name != 'pull_request'
|
|
needs: build
|
|
runs-on: ubuntu-latest
|
|
steps:
|
|
- name: Clone repository
|
|
uses: actions/checkout@v2
|
|
- name: Get *.deb files
|
|
uses: actions/download-artifact@v2
|
|
with:
|
|
name: termux-packages-${{ github.sha }}
|
|
path: ./
|
|
- name: Upload to packages.termux.org
|
|
env:
|
|
REPOSITORY_NAME: termux-main
|
|
REPOSITORY_DISTRIBUTION: stable
|
|
run: |
|
|
for archive in *.tar; do
|
|
tar xf "$archive"
|
|
done
|
|
|
|
# Function for deleting temporary directory with uploaded files from
|
|
# the server.
|
|
aptly_delete_dir() {
|
|
echo "[*] Deleting uploads temporary directory."
|
|
|
|
curl_response=$(
|
|
curl \
|
|
--silent \
|
|
--user "${{ secrets.APTLY_API_AUTH }}" \
|
|
--request DELETE \
|
|
--write-out "|%{http_code}" \
|
|
https://packages.termux.org/aptly-api/files/${REPOSITORY_NAME}-${{ github.sha }}
|
|
)
|
|
|
|
http_status_code=$(echo "$curl_response" | cut -d'|' -f2)
|
|
|
|
if [ "$http_status_code" != "200" ]; then
|
|
echo "[!] Server returned $http_status_code code while deleting temporary directory."
|
|
fi
|
|
}
|
|
|
|
# Upload file to temporary directory.
|
|
uploaded_files=false
|
|
shopt -s nullglob
|
|
for filename in $(cat debs/built_packages.txt | sed -E 's/(..*)/debs\/\1_\*.deb debs\/\1-static_\*.deb/g'); do
|
|
curl_response=$(
|
|
curl \
|
|
--silent \
|
|
--user "${{ secrets.APTLY_API_AUTH }}" \
|
|
--request POST \
|
|
--form file=@${filename} \
|
|
--write-out "|%{http_code}" \
|
|
https://packages.termux.org/aptly-api/files/${REPOSITORY_NAME}-${{ github.sha }}
|
|
)
|
|
|
|
http_status_code=$(echo "$curl_response" | cut -d'|' -f2)
|
|
|
|
if [ "$http_status_code" = "200" ]; then
|
|
echo "[*] Uploaded: $(echo "$curl_response" | cut -d'|' -f1 | jq -r '.[]' | cut -d'/' -f2)"
|
|
else
|
|
# Manually cleaning up the temporary directory to reclaim disk space.
|
|
# Don't rely on scheduled server-side scripts.
|
|
echo "[!] Failed to upload '$filename'. Server returned $http_status_code code."
|
|
echo "[!] Aborting any further uploads."
|
|
aptly_delete_dir
|
|
exit 1
|
|
fi
|
|
|
|
uploaded_files=true
|
|
done
|
|
shopt -u nullglob
|
|
|
|
# Publishing repository changes.
|
|
if [ "$uploaded_files" = "true" ]; then
|
|
echo "[*] Adding packages to repository '$REPOSITORY_NAME'..."
|
|
|
|
curl_response=$(
|
|
curl \
|
|
--silent \
|
|
--user "${{ secrets.APTLY_API_AUTH }}" \
|
|
--request POST \
|
|
--write-out "|%{http_code}" \
|
|
https://packages.termux.org/aptly-api/repos/${REPOSITORY_NAME}/file/${REPOSITORY_NAME}-${{ github.sha }}
|
|
)
|
|
|
|
http_status_code=$(echo "$curl_response" | cut -d'|' -f2)
|
|
|
|
if [ "$http_status_code" = "200" ]; then
|
|
warnings=$(echo "$curl_response" | cut -d'|' -f1 | jq '.Report.Warnings' | jq -r '.[]')
|
|
if [ -n "$warnings" ]; then
|
|
echo "[!] WARNINGS (NON-CRITICAL):"
|
|
echo
|
|
echo "$warnings"
|
|
echo
|
|
fi
|
|
|
|
# Upload directory is usually deleted automatically once all deb
|
|
# files were added without issues. Attempting to do this manually
|
|
# in case it left for some reason (e.g. some debs not added due to
|
|
# conflicts).
|
|
aptly_delete_dir
|
|
else
|
|
echo "[*] Server returned $http_status_code. Not publishing repository!"
|
|
aptly_delete_dir
|
|
exit 1
|
|
fi
|
|
|
|
# Final part to make changes appear in web root.
|
|
echo "[*] Publishing repository changes..."
|
|
|
|
curl_response=$(
|
|
curl \
|
|
--silent \
|
|
--user "${{ secrets.APTLY_API_AUTH }}" \
|
|
--header 'Content-Type: application/json' \
|
|
--request PUT \
|
|
--data '{"Signing": {"Passphrase": "${{ secrets.GPG_PASSPHRASE }}"}}' \
|
|
--write-out "|%{http_code}" \
|
|
https://packages.termux.org/aptly-api/publish/${REPOSITORY_NAME}/${REPOSITORY_DISTRIBUTION}
|
|
)
|
|
|
|
http_status_code=$(echo "$curl_response" | cut -d'|' -f2)
|
|
|
|
if [ "$http_status_code" = "200" ]; then
|
|
echo "[*] Repository updated successfully."
|
|
else
|
|
# Note: may return error 500 if gpg signing failed.
|
|
echo "[*] Server returned $http_status_code."
|
|
exit 1
|
|
fi
|
|
fi
|