mirror of
https://github.com/f-droid/fdroidserver.git
synced 2025-09-28 21:41:06 +03:00
Compare commits
No commits in common. "master" and "2.0a4" have entirely different histories.
360 changed files with 48875 additions and 135269 deletions
3
.bandit
3
.bandit
|
@ -1,3 +0,0 @@
|
|||
[bandit]
|
||||
skips: B110,B404,B408,B603,B607,B322
|
||||
targets: .
|
|
@ -1,15 +0,0 @@
|
|||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[**.py]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
||||
[.gitlab-ci.yml]
|
||||
indent_style = space
|
||||
indent_size = 2
|
29
.gitignore
vendored
29
.gitignore
vendored
|
@ -4,14 +4,13 @@
|
|||
*.box
|
||||
TAGS
|
||||
.idea
|
||||
.ropeproject/
|
||||
|
||||
# files generated by build
|
||||
/build/
|
||||
/dist/
|
||||
build/
|
||||
dist/
|
||||
env/
|
||||
ENV/
|
||||
/fdroidserver.egg-info/
|
||||
fdroidserver.egg-info/
|
||||
pylint.parseable
|
||||
/.testfiles/
|
||||
README.rst
|
||||
|
@ -19,7 +18,6 @@ README.rst
|
|||
|
||||
# editor tmp files
|
||||
.*.swp
|
||||
.ropeproject/
|
||||
|
||||
# files generated by tests
|
||||
tmp/
|
||||
|
@ -27,6 +25,7 @@ tmp/
|
|||
/tests/repo/status
|
||||
|
||||
# files used in manual testing
|
||||
/config.py
|
||||
/config.yml
|
||||
/tmp/
|
||||
/logs/
|
||||
|
@ -41,25 +40,14 @@ makebuildserver.config.py
|
|||
/tests/OBBMainPatchCurrent.apk
|
||||
/tests/OBBMainTwoVersions.apk
|
||||
/tests/archive/categories.txt
|
||||
/tests/archive/diff/[1-9]*.json
|
||||
/tests/archive/entry.jar
|
||||
/tests/archive/entry.json
|
||||
/tests/archive/icons*
|
||||
/tests/archive/index.jar
|
||||
/tests/archive/index_unsigned.jar
|
||||
/tests/archive/index.xml
|
||||
/tests/archive/index-v1.jar
|
||||
/tests/archive/index-v1.json
|
||||
/tests/archive/index-v2.json
|
||||
/tests/archive/index.css
|
||||
/tests/archive/index.html
|
||||
/tests/archive/index.jar
|
||||
/tests/archive/index.png
|
||||
/tests/archive/index.xml
|
||||
/tests/archive/index_unsigned.jar
|
||||
/tests/metadata/org.videolan.vlc/en-US/icon*.png
|
||||
/tests/repo/diff/[1-9]*.json
|
||||
/tests/repo/index.css
|
||||
/tests/repo/index.html
|
||||
/tests/repo/index.jar
|
||||
/tests/repo/index.png
|
||||
/tests/repo/index_unsigned.jar
|
||||
/tests/repo/index-v1.jar
|
||||
/tests/repo/info.guardianproject.urzip/
|
||||
|
@ -74,6 +62,3 @@ makebuildserver.config.py
|
|||
|
||||
# generated by gettext
|
||||
locale/*/LC_MESSAGES/fdroidserver.mo
|
||||
|
||||
# sphinx
|
||||
public/
|
||||
|
|
843
.gitlab-ci.yml
843
.gitlab-ci.yml
|
@ -1,52 +1,18 @@
|
|||
---
|
||||
|
||||
# Use merge request pipelines when a merge request is open for the branch.
|
||||
# Use branch pipelines when a merge request is not open for the branch.
|
||||
# https://docs.gitlab.com/ci/yaml/workflow/#switch-between-branch-pipelines-and-merge-request-pipelines
|
||||
workflow:
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
|
||||
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS
|
||||
when: never
|
||||
- if: $CI_COMMIT_BRANCH
|
||||
|
||||
|
||||
stages:
|
||||
- lint
|
||||
- test # default for jobs that do not specify stage:
|
||||
- deploy
|
||||
|
||||
|
||||
variables:
|
||||
pip: pip3 --timeout 100 --retries 10
|
||||
# speed up git checkout phase
|
||||
# speed up git checkout phase
|
||||
GIT_DEPTH: 1
|
||||
|
||||
|
||||
# Run the whole test suite in an environment that is like the
|
||||
# buildserver guest VM. This installs python3-babel because that is
|
||||
# only used by the test suite, and not needed in the buildserver.
|
||||
#
|
||||
# Some extra packages are required for this test run that are not
|
||||
# provided by the buildserver since they are not needed there:
|
||||
# * python3-babel for compiling localization files
|
||||
# * gnupg-agent for the full signing setup
|
||||
# * python3-clint for fancy progress bars for users
|
||||
# * python3-pycountry for linting config/mirrors.yml
|
||||
buildserver run-tests:
|
||||
image: registry.gitlab.com/fdroid/fdroidserver:buildserver
|
||||
test:
|
||||
image: registry.gitlab.com/fdroid/ci-images-base
|
||||
script:
|
||||
- apt-get update
|
||||
- apt-get install gnupg-agent python3-babel python3-biplist python3-clint python3-pycountry
|
||||
- ./tests/run-tests
|
||||
# make sure that translations do not cause stacktraces
|
||||
- cd $CI_PROJECT_DIR/locale
|
||||
- for locale in *; do
|
||||
test -d $locale || continue;
|
||||
for cmd in `sed -n 's/.*("\(.*\)", *_.*/\1/p' $CI_PROJECT_DIR/fdroid`; do
|
||||
LANGUAGE=$locale $CI_PROJECT_DIR/fdroid $cmd --help > /dev/null;
|
||||
done
|
||||
done
|
||||
- $pip install -e .[test]
|
||||
# the `fdroid build` test in tests/run-tests needs android-23
|
||||
- echo y | $ANDROID_HOME/tools/bin/sdkmanager "platforms;android-23"
|
||||
- cd tests
|
||||
- ./complete-ci-tests
|
||||
|
||||
# Test that the parsing of the .yml metadata format didn't change from last
|
||||
# released version. This uses the commit ID of the release tags,
|
||||
|
@ -56,18 +22,17 @@ buildserver run-tests:
|
|||
# The COMMIT_ID should be bumped after each release, so that the list
|
||||
# of sed hacks needed does not continuously grow.
|
||||
metadata_v0:
|
||||
image: registry.gitlab.com/fdroid/fdroidserver:buildserver
|
||||
image: registry.gitlab.com/fdroid/ci-images-base
|
||||
variables:
|
||||
GIT_DEPTH: 1000
|
||||
RELEASE_COMMIT_ID: 50aa35772b058e76b950c01e16019c072c191b73 # after switching to `git rev-parse`
|
||||
RELEASE_COMMIT_ID: 37f37ebd88e79ebe93239b72ed5503d5bde13f4b # 2.0a~
|
||||
script:
|
||||
- git fetch https://gitlab.com/fdroid/fdroidserver.git $RELEASE_COMMIT_ID
|
||||
- cd tests
|
||||
- export GITCOMMIT=$(git rev-parse HEAD)
|
||||
- export GITCOMMIT=`git describe`
|
||||
- git checkout $RELEASE_COMMIT_ID
|
||||
- cd ..
|
||||
- git clone --depth 1 https://gitlab.com/fdroid/fdroiddata.git
|
||||
- rm -f fdroiddata/config.yml # ignore config for this test
|
||||
- cd fdroiddata
|
||||
- ../tests/dump_internal_metadata_format.py
|
||||
- cd ..
|
||||
|
@ -76,9 +41,8 @@ metadata_v0:
|
|||
- cd fdroiddata
|
||||
- ../tests/dump_internal_metadata_format.py
|
||||
- sed -i
|
||||
-e '/ArchivePolicy:/d'
|
||||
-e '/FlattrID:/d'
|
||||
-e '/RequiresRoot:/d'
|
||||
-e '/Liberapay:/d'
|
||||
-e '/OpenCollective/d'
|
||||
metadata/dump_*/*.yaml
|
||||
- diff -uw metadata/dump_*
|
||||
|
||||
|
@ -90,330 +54,132 @@ metadata_v0:
|
|||
- echo Etc/UTC > /etc/timezone
|
||||
- echo 'APT::Install-Recommends "0";'
|
||||
'APT::Install-Suggests "0";'
|
||||
'APT::Acquire::Retries "20";'
|
||||
'APT::Get::Assume-Yes "true";'
|
||||
'Acquire::Retries "20";'
|
||||
'Dpkg::Use-Pty "0";'
|
||||
'quiet "1";'
|
||||
>> /etc/apt/apt.conf.d/99gitlab
|
||||
# Ubuntu and other distros often lack https:// support
|
||||
- grep Debian /etc/issue.net
|
||||
&& { find /etc/apt/sources.list* -type f | xargs sed -i s,http:,https:, ; }
|
||||
# The official Debian docker images ship without ca-certificates,
|
||||
# TLS certificates cannot be verified until that is installed. The
|
||||
# following code turns off TLS verification, and enables HTTPS, so
|
||||
# at least unverified TLS is used for apt-get instead of plain
|
||||
# HTTP. Once ca-certificates is installed, the CA verification is
|
||||
# enabled by removing this config. This set up makes the initial
|
||||
# `apt-get update` and `apt-get install` look the same as verified
|
||||
# TLS to the network observer and hides the metadata.
|
||||
- echo 'Acquire::https::Verify-Peer "false";' > /etc/apt/apt.conf.d/99nocacertificates
|
||||
- apt-get update
|
||||
- apt-get install ca-certificates
|
||||
- rm /etc/apt/apt.conf.d/99nocacertificates
|
||||
- apt-get dist-upgrade
|
||||
|
||||
# For jobs that only need to run when there are changes to Python files.
|
||||
.python-rules-changes: &python-rules-changes
|
||||
rules:
|
||||
- changes:
|
||||
- .gitlab-ci.yml
|
||||
- fdroid
|
||||
- makebuildserver
|
||||
- setup.py
|
||||
- fdroidserver/*.py
|
||||
- tests/*.py
|
||||
|
||||
|
||||
# Since F-Droid uses Debian as its default platform, from production
|
||||
# servers to CI to contributor machines, it is important to know when
|
||||
# changes in Debian break our stuff. This tests against the latest
|
||||
# dependencies as they are included in Debian.
|
||||
debian_testing:
|
||||
image: debian:testing
|
||||
<<: *apt-template
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "master" && $CI_PROJECT_PATH == "fdroid/fdroidserver"
|
||||
only:
|
||||
- master@fdroid/fdroidserver
|
||||
script:
|
||||
- apt-get install
|
||||
aapt
|
||||
androguard
|
||||
apksigner
|
||||
dexdump
|
||||
fdroidserver
|
||||
git
|
||||
gnupg
|
||||
ipfs-cid
|
||||
python3-biplist
|
||||
python3-defusedxml
|
||||
python3-libcloud
|
||||
python3-pycountry
|
||||
python3-setuptools
|
||||
sdkmanager
|
||||
zipalign
|
||||
- python3 -c 'import fdroidserver'
|
||||
- python3 -c 'import androguard'
|
||||
- python3 -c 'import sdkmanager'
|
||||
- cd tests
|
||||
- ./run-tests
|
||||
|
||||
|
||||
# Test using latest LTS set up with the PPA, including Recommends.
|
||||
# bionic's apksigner, which comes from Recommends:, requires binfmt
|
||||
# support in the kernel.
|
||||
ubuntu_lts_ppa:
|
||||
image: ubuntu:latest
|
||||
<<: *apt-template
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "master" && $CI_PROJECT_PATH == "fdroid/fdroidserver"
|
||||
only:
|
||||
- master@fdroid/fdroidserver
|
||||
script:
|
||||
- export ANDROID_HOME=/usr/lib/android-sdk
|
||||
- apt-get install gnupg
|
||||
- while ! apt-key adv --keyserver keyserver.ubuntu.com --recv-key 9AAC253193B65D4DF1D0A13EEC4632C79C5E0151; do sleep 15; done
|
||||
- export RELEASE=$(sed -n 's,^Suites\x3a \([a-z]*\).*,\1,p' /etc/apt/sources.list.d/*.sources | head -1)
|
||||
- while ! apt-key adv --keyserver hkp://pool.sks-keyservers.net --recv-key 9AAC253193B65D4DF1D0A13EEC4632C79C5E0151; do sleep 15; done
|
||||
- export RELEASE=`sed -n 's,^deb [^ ][^ ]* \([a-z]*\).*,\1,p' /etc/apt/sources.list | head -1`
|
||||
- echo "deb http://ppa.launchpad.net/fdroid/fdroidserver/ubuntu $RELEASE main" >> /etc/apt/sources.list
|
||||
- apt-get update
|
||||
- apt-get dist-upgrade
|
||||
- apt-get install --install-recommends
|
||||
dexdump
|
||||
fdroidserver
|
||||
git
|
||||
python3-biplist
|
||||
python3-pycountry
|
||||
python3-setuptools
|
||||
sdkmanager
|
||||
|
||||
# Test things work with a default branch other than 'master'
|
||||
- git config --global init.defaultBranch thisisnotmasterormain
|
||||
|
||||
- mount | grep binfmt_misc || mount -t binfmt_misc binfmt_misc /proc/sys/fs/binfmt_misc
|
||||
- apt-get install --install-recommends binfmt-support fdroidserver git python3-defusedxml python3-setuptools
|
||||
- ls -l /proc/sys/fs/binfmt_misc || true
|
||||
- test -e /proc/sys/fs/binfmt_misc/jarwrapper || apt -qy purge apksigner
|
||||
- cd tests
|
||||
- ./run-tests
|
||||
|
||||
|
||||
# Test to see how rclone works with S3
|
||||
test_deploy_to_s3_with_rclone:
|
||||
image: debian:bookworm-slim
|
||||
<<: *apt-template
|
||||
tags:
|
||||
- saas-linux-small-amd64 # the shared runners are known to support Docker.
|
||||
services:
|
||||
- name: docker:dind
|
||||
command: ["--tls=false"]
|
||||
variables:
|
||||
DOCKER_HOST: "tcp://docker:2375"
|
||||
DOCKER_DRIVER: overlay2
|
||||
DOCKER_TLS_CERTDIR: ""
|
||||
before_script:
|
||||
# ensure minio is up before executing tests
|
||||
- apt-get update
|
||||
- apt-get install -y
|
||||
androguard
|
||||
apksigner
|
||||
curl
|
||||
docker.io
|
||||
git
|
||||
python3-venv
|
||||
rclone
|
||||
# This job requires working docker but will silently fail if docker is not available
|
||||
- docker info
|
||||
- python3 -m venv --system-site-packages test-venv
|
||||
- . test-venv/bin/activate
|
||||
- pip install testcontainers[minio]
|
||||
- pip install .
|
||||
script:
|
||||
- python3 -m unittest -k test_update_remote_storage_with_rclone --verbose
|
||||
rules:
|
||||
- changes:
|
||||
- .gitlab-ci.yml
|
||||
- fdroidserver/deploy.py
|
||||
- tests/test_deploy.py
|
||||
- tests/test_integration.py
|
||||
|
||||
|
||||
# Test using Ubuntu/jammy LTS (supported til April, 2027) with depends
|
||||
# from pypi and sdkmanager. The venv is used to isolate the dist
|
||||
# tarball generation environment from the clean install environment.
|
||||
ubuntu_jammy_pip:
|
||||
image: ubuntu:jammy
|
||||
# Test using Xenial LTS with all depends from pypi. The venv is used
|
||||
# to isolate the dist tarball generation environment from the clean
|
||||
# install environment. Xenial's pip is too old to install all the
|
||||
# dependencies, so this has to uppgrade pip and setuptools in order to
|
||||
# run the install.
|
||||
ubuntu_xenial_pip:
|
||||
image: ubuntu:xenial
|
||||
<<: *apt-template
|
||||
script:
|
||||
- apt-get install git default-jdk-headless python3-pip python3-venv rsync
|
||||
|
||||
- apt-get install git default-jdk-headless python3-pip python3-venv rsync zipalign libarchive13
|
||||
- rm -rf env
|
||||
- pyvenv env
|
||||
- . env/bin/activate
|
||||
- $pip install --upgrade babel pip setuptools
|
||||
# setup venv to act as release build machine
|
||||
- python3 -m venv sdist-env
|
||||
- python -m venv sdist-env
|
||||
- . sdist-env/bin/activate
|
||||
- ./setup.py sdist
|
||||
- ./setup.py compile_catalog sdist
|
||||
- deactivate
|
||||
- tar tzf dist/fdroidserver-*.tar.gz
|
||||
|
||||
- tar tzf dist/fdroidserver-*.tar.gz | grep locale/de/LC_MESSAGES/fdroidserver.mo
|
||||
# back to bare machine to act as user's install machine
|
||||
- export ANDROID_HOME=/opt/android-sdk
|
||||
- $pip install sdkmanager
|
||||
- sdkmanager 'build-tools;35.0.0'
|
||||
|
||||
# Install extras_require.optional from setup.py
|
||||
- $pip install biplist pycountry
|
||||
|
||||
- $pip install --upgrade pip setuptools
|
||||
- $pip install dist/fdroidserver-*.tar.gz
|
||||
- tar xzf dist/fdroidserver-*.tar.gz
|
||||
- cd fdroidserver-*
|
||||
- export PATH=$PATH:$ANDROID_HOME/build-tools/35.0.0
|
||||
- fdroid=`which fdroid` ./tests/run-tests
|
||||
- test -e /usr/share/locale/de/LC_MESSAGES/fdroidserver.mo
|
||||
- ./tests/run-tests
|
||||
|
||||
# check localization was properly installed
|
||||
- LANGUAGE='de' fdroid --help | grep 'Gültige Befehle sind'
|
||||
# test install process on a bleeding edge distro with pip
|
||||
arch_pip_install:
|
||||
image: archlinux/base
|
||||
only:
|
||||
- master@fdroid/fdroidserver
|
||||
script:
|
||||
- pacman --sync --sysupgrade --refresh --noconfirm git grep python-pip python-virtualenv tar
|
||||
- pip install -e .
|
||||
- fdroid
|
||||
- fdroid readmeta
|
||||
- fdroid update --help
|
||||
|
||||
|
||||
# Run all the various linters and static analysis tools.
|
||||
hooks/pre-commit:
|
||||
stage: lint
|
||||
image: debian:bookworm-slim
|
||||
lint_format_safety_bandit_checks:
|
||||
image: alpine:3.10 # cannot upgrade until bandit supports Python 3.8
|
||||
variables:
|
||||
LANG: C.UTF-8
|
||||
script:
|
||||
- apt-get update
|
||||
- apt-get -y install --no-install-recommends
|
||||
bash
|
||||
ca-certificates
|
||||
dash
|
||||
gcc
|
||||
git
|
||||
make
|
||||
pycodestyle
|
||||
pyflakes3
|
||||
python3-dev
|
||||
python3-git
|
||||
python3-nose
|
||||
python3-pip
|
||||
python3-yaml
|
||||
- ./hooks/pre-commit
|
||||
|
||||
bandit:
|
||||
image: debian:bookworm-slim
|
||||
<<: *python-rules-changes
|
||||
<<: *apt-template
|
||||
script:
|
||||
- apt-get install python3-pip
|
||||
- $pip install --break-system-packages bandit
|
||||
- bandit -r -ii --ini .bandit
|
||||
|
||||
pylint:
|
||||
stage: lint
|
||||
image: debian:bookworm-slim
|
||||
<<: *python-rules-changes
|
||||
<<: *apt-template
|
||||
script:
|
||||
- apt-get install pylint python3-pip
|
||||
- $pip install --break-system-packages pylint-gitlab
|
||||
- pylint --output-format=colorized,pylint_gitlab.GitlabCodeClimateReporter:pylint-report.json
|
||||
- apk add --no-cache bash build-base dash ca-certificates gcc python3 python3-dev
|
||||
- python3 -m ensurepip
|
||||
- $pip install Babel 'bandit<1.6.0' pycodestyle pyflakes pylint safety
|
||||
- export EXITVALUE=0
|
||||
- function set_error() { export EXITVALUE=1; printf "\x1b[31mERROR `history|tail -2|head -1|cut -b 6-500`\x1b[0m\n"; }
|
||||
- ./hooks/pre-commit || set_error
|
||||
- ./tests/test-gradlew-fdroid || set_error
|
||||
- bandit
|
||||
-ii
|
||||
-s B110,B322,B404,B408,B410,B603,B607
|
||||
-r $CI_PROJECT_DIR fdroid
|
||||
|| set_error
|
||||
- safety check --full-report || set_error
|
||||
- pylint --rcfile=.pylint-rcfile --output-format=colorized --reports=n
|
||||
fdroid
|
||||
makebuildserver
|
||||
setup.py
|
||||
fdroidserver/*.py
|
||||
tests/*.py
|
||||
artifacts:
|
||||
reports:
|
||||
codequality: pylint-report.json
|
||||
when: always
|
||||
|
||||
|
||||
shellcheck:
|
||||
stage: lint
|
||||
image: debian:bookworm-slim
|
||||
rules:
|
||||
- changes:
|
||||
- .gitlab-ci.yml
|
||||
- hooks/install-hooks.sh
|
||||
- hooks/pre-commit
|
||||
- tests/run-tests
|
||||
<<: *apt-template
|
||||
script:
|
||||
- apt-get install shellcheck
|
||||
# TODO GitLab Code Quality report https://github.com/koalaman/shellcheck/issues/3155
|
||||
- shellcheck --exclude SC2046,SC2090 --severity=warning --color
|
||||
hooks/install-hooks.sh
|
||||
hooks/pre-commit
|
||||
tests/run-tests
|
||||
|
||||
# Check all the dependencies in Debian to mirror production. CVEs are
|
||||
# generally fixed in the latest versions in pip/pypi.org, so it isn't
|
||||
# so important to scan that kind of install in CI.
|
||||
# https://docs.safetycli.com/safety-docs/installation/gitlab
|
||||
safety:
|
||||
image: debian:bookworm-slim
|
||||
rules:
|
||||
- if: $SAFETY_API_KEY
|
||||
changes:
|
||||
- .gitlab-ci.yml
|
||||
- .safety-policy.yml
|
||||
- pyproject.toml
|
||||
- setup.py
|
||||
<<: *apt-template
|
||||
variables:
|
||||
LANG: C.UTF-8
|
||||
script:
|
||||
- apt-get install
|
||||
fdroidserver
|
||||
python3-biplist
|
||||
python3-pip
|
||||
python3-pycountry
|
||||
- $pip install --break-system-packages .
|
||||
|
||||
- $pip install --break-system-packages safety
|
||||
- python3 -m safety --key "$SAFETY_API_KEY" --stage cicd scan
|
||||
|
||||
|
||||
# TODO tests/*/*/*.yaml are not covered
|
||||
yamllint:
|
||||
stage: lint
|
||||
image: debian:bookworm-slim
|
||||
rules:
|
||||
- changes:
|
||||
- .gitlab-ci.yml
|
||||
- .safety-policy.yml
|
||||
- .yamllint
|
||||
- tests/*.yml
|
||||
- tests/*/*.yml
|
||||
- tests/*/*/.*.yml
|
||||
<<: *apt-template
|
||||
variables:
|
||||
LANG: C.UTF-8
|
||||
script:
|
||||
- apt-get install yamllint
|
||||
- yamllint
|
||||
.gitlab-ci.yml
|
||||
.safety-policy.yml
|
||||
.yamllint
|
||||
tests/*.yml
|
||||
tests/*/*.yml
|
||||
tests/*/*/.*.yml
|
||||
|
||||
|
||||
locales:
|
||||
stage: lint
|
||||
image: debian:bookworm-slim
|
||||
variables:
|
||||
LANG: C.UTF-8
|
||||
script:
|
||||
- apt-get update
|
||||
- apt-get -y install --no-install-recommends
|
||||
gettext
|
||||
make
|
||||
python3-babel
|
||||
- export EXITVALUE=0
|
||||
- function set_error() { export EXITVALUE=1; printf "\x1b[31mERROR `history|tail -2|head -1|cut -b 6-500`\x1b[0m\n"; }
|
||||
tests/*.TestCase
|
||||
|| set_error
|
||||
- apk add --no-cache gettext make
|
||||
- make -C locale compile || set_error
|
||||
- rm -f locale/*/*/*.mo
|
||||
- pybabel compile --domain=fdroidserver --directory locale 2>&1 | { grep -F "error:" && exit 1; } || true
|
||||
- pybabel compile --domain=fdroidserver --directory locale 2>&1 | (grep -F "error:" && exit 1) || true
|
||||
- exit $EXITVALUE
|
||||
|
||||
|
||||
black:
|
||||
stage: lint
|
||||
image: debian:bookworm-slim
|
||||
<<: *apt-template
|
||||
script:
|
||||
- apt-get install black
|
||||
- black --check --diff --color $CI_PROJECT_DIR
|
||||
|
||||
fedora_latest:
|
||||
image: fedora:39 # support ends on 2024-11-12
|
||||
image: fedora:latest
|
||||
only:
|
||||
- master@fdroid/fdroidserver
|
||||
script:
|
||||
# tricks to hopefully make runs more reliable
|
||||
- echo "timeout=600" >> /etc/dnf/dnf.conf
|
||||
|
@ -426,429 +192,102 @@ fedora_latest:
|
|||
findutils
|
||||
git
|
||||
gnupg
|
||||
java-17-openjdk-devel
|
||||
openssl
|
||||
java-1.8.0-openjdk-devel
|
||||
python3
|
||||
python3-babel
|
||||
python3-matplotlib
|
||||
python3-pip
|
||||
python3-pycountry
|
||||
rsync
|
||||
unzip
|
||||
wget
|
||||
which
|
||||
- $pip install sdkmanager
|
||||
- ./setup.py sdist
|
||||
- ./setup.py compile_catalog sdist
|
||||
- useradd -m -c "test account" --password "fakepassword" testuser
|
||||
- su testuser --login --command "cd `pwd`; $pip install --user dist/fdroidserver-*.tar.gz"
|
||||
- test -e ~testuser/.local/share/locale/de/LC_MESSAGES/fdroidserver.mo
|
||||
- wget --no-verbose -O tools.zip https://dl.google.com/android/repository/tools_r25.2.5-linux.zip
|
||||
- unzip -q tools.zip
|
||||
- rm tools.zip
|
||||
- export BUILD_TOOLS_VERSION=`sed -n "s,^MINIMUM_APKSIGNER_BUILD_TOOLS_VERSION\s*=\s*['\"]\(.*\)[['\"],\1,p" fdroidserver/common.py`
|
||||
- export JAVA_HOME=/etc/alternatives/jre
|
||||
- export ANDROID_HOME=`pwd`/android-sdk
|
||||
- mkdir $ANDROID_HOME
|
||||
- mv tools $ANDROID_HOME/
|
||||
- mkdir -p $ANDROID_HOME/licenses/
|
||||
- printf "\n8933bad161af4178b1185d1a37fbf41ea5269c55\nd56f5187479451eabf01fb78af6dfcb131a6481e\n24333f8a63b6825ea9c5514f83c2829b004d1fee" > $ANDROID_HOME/licenses/android-sdk-license
|
||||
- printf "\n84831b9409646a918e30573bab4c9c91346d8abd" > $ANDROID_HOME/licenses/android-sdk-preview-license
|
||||
- printf "\n79120722343a6f314e0719f863036c702b0e6b2a\n84831b9409646a918e30573bab4c9c91346d8abd" > $ANDROID_HOME/licenses/android-sdk-preview-license-old
|
||||
- mkdir ~/.android
|
||||
- touch ~/.android/repositories.cfg
|
||||
- sdkmanager "platform-tools" "build-tools;$BUILD_TOOLS_VERSION"
|
||||
- echo y | $ANDROID_HOME/tools/bin/sdkmanager "platform-tools"
|
||||
- echo y | $ANDROID_HOME/tools/bin/sdkmanager "build-tools;$BUILD_TOOLS_VERSION"
|
||||
- chown -R testuser .
|
||||
- cd tests
|
||||
- su testuser --login --command
|
||||
"cd `pwd`; export CI=$CI ANDROID_HOME=$ANDROID_HOME; fdroid=~testuser/.local/bin/fdroid ./run-tests"
|
||||
|
||||
|
||||
macOS:
|
||||
tags:
|
||||
- saas-macos-medium-m1
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "master" && $CI_PROJECT_PATH == "fdroid/fdroidserver"
|
||||
script:
|
||||
- export HOMEBREW_CURL_RETRIES=10
|
||||
- brew update > /dev/null
|
||||
- brew upgrade
|
||||
- brew install fdroidserver
|
||||
|
||||
# Android SDK and Java JDK
|
||||
- brew install --cask android-commandlinetools temurin # temurin is a JDK
|
||||
|
||||
# test suite dependencies
|
||||
- brew install bash coreutils gnu-sed
|
||||
# TODO port tests/run-tests to POSIX and gsed, it has a couple GNU-isms like du --bytes
|
||||
- export PATH="$(brew --prefix fdroidserver)/libexec/bin:$(brew --prefix coreutils)/libexec/gnubin:$PATH"
|
||||
|
||||
- brew autoremove
|
||||
- brew info fdroidserver
|
||||
|
||||
- export BUILD_TOOLS_VERSION=`gsed -n "s,^MINIMUM_APKSIGNER_BUILD_TOOLS_VERSION\s*=\s*['\"]\(.*\)[['\"],\1,p" fdroidserver/common.py`
|
||||
- export ANDROID_HOME="$(brew --prefix)/share/android-commandlinetools"
|
||||
- mkdir -p "$ANDROID_HOME/licenses"
|
||||
- echo -e "\n8933bad161af4178b1185d1a37fbf41ea5269c55" > "$ANDROID_HOME/licenses/android-sdk-license"
|
||||
- echo -e "\nd56f5187479451eabf01fb78af6dfcb131a6481e" >> "$ANDROID_HOME/licenses/android-sdk-license"
|
||||
- echo -e "\n24333f8a63b6825ea9c5514f83c2829b004d1fee" >> "$ANDROID_HOME/licenses/android-sdk-license"
|
||||
- $(brew --prefix)/bin/sdkmanager "build-tools;$BUILD_TOOLS_VERSION"
|
||||
|
||||
- echo "macOS sticks with bash 3.x because of licenses, so avoid new bash syntax"
|
||||
- /bin/bash --version
|
||||
- /bin/bash -n tests/run-tests
|
||||
|
||||
# test fdroidserver from git with current package's dependencies
|
||||
- fdroid="$(brew --prefix fdroidserver)/libexec/bin/python3 $PWD/fdroid" ./tests/run-tests
|
||||
|
||||
"cd `pwd`; export ANDROID_HOME=$ANDROID_HOME; fdroid=~testuser/.local/bin/fdroid ./run-tests"
|
||||
|
||||
gradle:
|
||||
image: debian:trixie-slim
|
||||
<<: *apt-template
|
||||
rules:
|
||||
- changes:
|
||||
- .gitlab-ci.yml
|
||||
- makebuildserver
|
||||
image: alpine:3.7
|
||||
variables:
|
||||
LANG: C.UTF-8
|
||||
script:
|
||||
- apt-get install
|
||||
ca-certificates
|
||||
git
|
||||
python3-colorama
|
||||
python3-packaging
|
||||
python3-requests
|
||||
- apk add --no-cache ca-certificates git python3
|
||||
# if this is a merge request fork, then only check if makebuildserver changed
|
||||
- if [ "$CI_PROJECT_NAMESPACE" != "fdroid" ]; then
|
||||
git fetch https://gitlab.com/fdroid/fdroidserver.git;
|
||||
for f in `git diff --name-only --diff-filter=d FETCH_HEAD...HEAD`; do
|
||||
test "$f" == "makebuildserver" && export CHANGED="yes";
|
||||
done;
|
||||
test -z "$CHANGED" && exit;
|
||||
fi
|
||||
- python3 -m ensurepip
|
||||
- $pip install beautifulsoup4 requests
|
||||
- ./tests/gradle-release-checksums.py
|
||||
|
||||
|
||||
# Run an actual build in a simple, faked version of the buildserver guest VM.
|
||||
fdroid build:
|
||||
image: registry.gitlab.com/fdroid/fdroidserver:buildserver
|
||||
rules:
|
||||
- changes:
|
||||
- .gitlab-ci.yml
|
||||
- fdroidserver/build.py
|
||||
- fdroidserver/common.py
|
||||
- fdroidserver/exception.py
|
||||
- fdroidserver/metadata.py
|
||||
- fdroidserver/net.py
|
||||
- fdroidserver/scanner.py
|
||||
- fdroidserver/vmtools.py
|
||||
# for the docker: job which depends on this one
|
||||
- makebuildserver
|
||||
- buildserver/*
|
||||
image: registry.gitlab.com/fdroid/ci-images-client
|
||||
only:
|
||||
refs:
|
||||
- branches
|
||||
- pipelines
|
||||
changes:
|
||||
- .gitlab-ci.yml
|
||||
- buildserver/provision-apt-get-install
|
||||
- fdroidserver/build.py
|
||||
- fdroidserver/common.py
|
||||
- fdroidserver/exception.py
|
||||
- fdroidserver/metadata.py
|
||||
- fdroidserver/net.py
|
||||
- fdroidserver/scanner.py
|
||||
- fdroidserver/vmtools.py
|
||||
cache:
|
||||
key: "$CI_JOB_NAME"
|
||||
paths:
|
||||
- .gradle
|
||||
script:
|
||||
- apt-get update
|
||||
- bash buildserver/provision-apt-get-install http://deb.debian.org/debian
|
||||
- apt-get dist-upgrade
|
||||
- apt-get clean
|
||||
|
||||
- test -n "$fdroidserver" || source /etc/profile.d/bsenv.sh
|
||||
|
||||
- ln -fsv "$CI_PROJECT_DIR" "$fdroidserver"
|
||||
|
||||
# TODO remove sdkmanager install once it is included in the buildserver image
|
||||
- apt-get install sdkmanager
|
||||
- rm -rf "$ANDROID_HOME/tools" # TODO remove once sdkmanager can upgrade installed packages
|
||||
- sdkmanager "tools" "platform-tools" "build-tools;31.0.0"
|
||||
|
||||
- git ls-remote https://gitlab.com/fdroid/fdroiddata.git master
|
||||
- git clone --depth 1 https://gitlab.com/fdroid/fdroiddata.git
|
||||
- cd fdroiddata
|
||||
- for d in build logs repo tmp unsigned $home_vagrant/.android; do
|
||||
test -d $d || mkdir $d;
|
||||
chown -R vagrant $d;
|
||||
done
|
||||
|
||||
- export GRADLE_USER_HOME=$home_vagrant/.gradle
|
||||
- export fdroid="sudo --preserve-env --user vagrant
|
||||
env PATH=$fdroidserver:$PATH
|
||||
env PYTHONPATH=$fdroidserver:$fdroidserver/examples
|
||||
env PYTHONUNBUFFERED=true
|
||||
env TERM=$TERM
|
||||
env HOME=$home_vagrant
|
||||
fdroid"
|
||||
|
||||
- git -C $home_vagrant/gradlew-fdroid pull
|
||||
|
||||
- chown -R vagrant $home_vagrant
|
||||
- chown -R vagrant $fdroidserver/.git
|
||||
- chown vagrant $fdroidserver/
|
||||
- chown -R vagrant .git
|
||||
- chown vagrant .
|
||||
|
||||
# try user build
|
||||
- $fdroid build --verbose --latest org.fdroid.fdroid.privileged
|
||||
|
||||
# try on-server build
|
||||
- $fdroid build --verbose --on-server --no-tarball --latest org.fdroid.fdroid
|
||||
|
||||
# each `fdroid build --on-server` run expects sudo, then uninstalls it
|
||||
- if dpkg --list sudo; then echo "sudo should not be still there"; exit 1; fi
|
||||
- 'if [ ! -f repo/status/running.json ]; then echo "ERROR: running.json does not exist!"; exit 1; fi'
|
||||
- 'if [ ! -f repo/status/build.json ]; then echo "ERROR: build.json does not exist!"; exit 1; fi'
|
||||
|
||||
|
||||
# test the plugin API and specifically the fetchsrclibs plugin, which
|
||||
# is used by the `fdroid build` job. This uses a fixed commit from
|
||||
# fdroiddata because that one is known to work, and this is a CI job,
|
||||
# so it should be isolated from the normal churn of fdroiddata.
|
||||
plugin_fetchsrclibs:
|
||||
image: debian:bookworm-slim
|
||||
<<: *apt-template
|
||||
rules:
|
||||
- changes:
|
||||
- .gitlab-ci.yml
|
||||
- examples/fdroid_fetchsrclibs.py
|
||||
- fdroidserver/__main__.py
|
||||
script:
|
||||
- apt-get install
|
||||
curl
|
||||
git
|
||||
python3-cffi
|
||||
python3-matplotlib
|
||||
python3-nacl
|
||||
python3-paramiko
|
||||
python3-pil
|
||||
python3-pip
|
||||
python3-pycparser
|
||||
python3-venv
|
||||
- python3 -m venv --system-site-packages env
|
||||
- apt-get install -t stretch-backports
|
||||
python3-asn1crypto
|
||||
python3-pip
|
||||
python3-ruamel.yaml
|
||||
python3-setuptools
|
||||
python3-venv
|
||||
- apt-get purge fdroidserver
|
||||
- pyvenv env --system-site-packages
|
||||
- . env/bin/activate
|
||||
- $pip install -e .
|
||||
- export PATH="$CI_PROJECT_DIR:$PATH"
|
||||
- export PYTHONPATH="$CI_PROJECT_DIR/examples"
|
||||
# workaround https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=1003252
|
||||
- export SETUPTOOLS_USE_DISTUTILS=stdlib
|
||||
- $pip install -e .
|
||||
- fdroid | grep fetchsrclibs
|
||||
- export PYTHONPATH=$CI_PROJECT_DIR
|
||||
- export PYTHONUNBUFFERED=true
|
||||
|
||||
- mkdir fdroiddata
|
||||
- commitid=b9e9a077d720c86ff6fff4dbb341254cc4370b1a
|
||||
- curl https://gitlab.com/fdroid/fdroiddata/-/archive/${commitid}/fdroiddata-${commitid}.tar.gz
|
||||
| tar -xz --directory=fdroiddata --strip-components=1
|
||||
- git clone https://gitlab.com/fdroid/fdroiddata.git --depth 1
|
||||
- cd fdroiddata
|
||||
- fdroid fetchsrclibs freemap.opentrail:4 --verbose
|
||||
- test -d build/freemap.opentrail/.git
|
||||
- test -d build/srclib/andromaps/.git
|
||||
- test -d build/srclib/freemaplib/.git
|
||||
- test -d build/srclib/freemaplibProj/.git
|
||||
- test -d build/srclib/JCoord/.git
|
||||
- test -d build/srclib/javaproj/.git
|
||||
- test -d build || mkdir build
|
||||
|
||||
|
||||
# test a full update and deploy cycle to gitlab.com
|
||||
servergitmirrors:
|
||||
image: debian:bookworm-slim
|
||||
<<: *apt-template
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "master" && $CI_PROJECT_PATH == "fdroid/fdroidserver"
|
||||
script:
|
||||
- apt-get install
|
||||
default-jdk-headless
|
||||
git
|
||||
openssh-client
|
||||
openssl
|
||||
python3-cffi
|
||||
python3-cryptography
|
||||
python3-matplotlib
|
||||
python3-nacl
|
||||
python3-pil
|
||||
python3-pip
|
||||
python3-pycparser
|
||||
python3-setuptools
|
||||
python3-venv
|
||||
rsync
|
||||
wget
|
||||
- apt-get install apksigner
|
||||
- python3 -m venv --system-site-packages env
|
||||
- . env/bin/activate
|
||||
- export PYTHONPATH=`pwd`
|
||||
- export SETUPTOOLS_USE_DISTUTILS=stdlib # https://github.com/pypa/setuptools/issues/2956
|
||||
- $pip install -e .
|
||||
- mkdir /root/.ssh/
|
||||
- ./tests/key-tricks.py
|
||||
- ssh-keyscan gitlab.com >> /root/.ssh/known_hosts
|
||||
- test -d /tmp/fdroid/repo || mkdir -p /tmp/fdroid/repo
|
||||
- cp tests/config.yml tests/keystore.jks /tmp/fdroid/
|
||||
- cp tests/repo/com.politedroid_6.apk /tmp/fdroid/repo/
|
||||
- cd /tmp/fdroid
|
||||
- touch fdroid-icon.png
|
||||
- printf "\nservergitmirrors\x3a 'git@gitlab.com:fdroid/ci-test-servergitmirrors-repo.git'\n" >> config.yml
|
||||
- $PYTHONPATH/fdroid update --verbose --create-metadata
|
||||
- $PYTHONPATH/fdroid deploy --verbose
|
||||
- export DLURL=`grep -Eo 'https://gitlab.com/fdroid/ci-test-servergitmirrors-repo[^"]+' repo/index-v1.json`
|
||||
- echo $DLURL
|
||||
- wget $DLURL/index-v1.jar
|
||||
- diff repo/index-v1.jar index-v1.jar
|
||||
|
||||
Build documentation:
|
||||
image: debian:bookworm-slim
|
||||
<<: *python-rules-changes
|
||||
<<: *apt-template
|
||||
script:
|
||||
- apt-get install make python3-sphinx python3-numpydoc python3-pydata-sphinx-theme pydocstyle fdroidserver
|
||||
- apt purge fdroidserver
|
||||
# ignore vendored files
|
||||
- pydocstyle --verbose --match='(?!apksigcopier|looseversion|setup|test_).*\.py' fdroidserver
|
||||
- cd docs
|
||||
- sphinx-apidoc -o ./source ../fdroidserver -M -e
|
||||
- PYTHONPATH=.. sphinx-autogen -o generated source/*.rst
|
||||
- PYTHONPATH=.. make html
|
||||
artifacts:
|
||||
paths:
|
||||
- docs/build/html/
|
||||
|
||||
|
||||
# this job will only run in branches called "windows" until the Windows port is complete
|
||||
Windows:
|
||||
tags:
|
||||
- windows
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "windows"
|
||||
script:
|
||||
- Import-Module "$env:ChocolateyInstall\helpers\chocolateyProfile.psm1"
|
||||
- choco install --no-progress -y git --force --params "/GitAndUnixToolsOnPath"
|
||||
- choco install --no-progress -y python3 --version=3.10
|
||||
- choco install --no-progress -y jdk8
|
||||
- choco install --no-progress -y rsync
|
||||
- refreshenv
|
||||
- python -m pip install --upgrade babel pip setuptools
|
||||
- python -m pip install -e .
|
||||
|
||||
- $files = @(Get-ChildItem tests\test_*.py)
|
||||
- foreach ($f in $files) {
|
||||
write-output $f;
|
||||
python -m unittest $f;
|
||||
if( $LASTEXITCODE -eq 0 ) {
|
||||
write-output "SUCCESS $f";
|
||||
} else {
|
||||
write-output "ERROR $f failed";
|
||||
}
|
||||
}
|
||||
|
||||
# these are the tests that must pass
|
||||
- python -m unittest -k
|
||||
checkupdates
|
||||
exception
|
||||
import_subcommand
|
||||
test_lint
|
||||
test_metadata
|
||||
test_rewritemeta
|
||||
test_vcs
|
||||
tests.test_init
|
||||
tests.test_main
|
||||
after_script:
|
||||
- Copy-Item C:\ProgramData\chocolatey\logs\chocolatey.log
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
- "*.log"
|
||||
allow_failure:
|
||||
exit_codes: 1
|
||||
|
||||
|
||||
pages:
|
||||
image: alpine:latest
|
||||
stage: deploy
|
||||
script:
|
||||
- cp docs/build/html public -r # GL Pages needs the files in a directory named "public"
|
||||
artifacts:
|
||||
paths:
|
||||
- public
|
||||
needs:
|
||||
- job: "Build documentation"
|
||||
optional: true
|
||||
rules:
|
||||
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH' # only publish pages on default (master) branch
|
||||
|
||||
|
||||
# This job pushes the official CI docker image based on the master
|
||||
# branch, so in fdroid/fdroidserver, it should only run on the master
|
||||
# branch. Otherwise, tags or other branches will overwrite the docker
|
||||
# image which is supposed to be what is in master.
|
||||
docker:
|
||||
dependencies:
|
||||
- fdroid build
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "master" && $CI_PROJECT_PATH == "fdroid/fdroidserver"
|
||||
changes:
|
||||
- .gitlab-ci.yml
|
||||
- makebuildserver
|
||||
- buildserver/*
|
||||
image: docker:dind
|
||||
services:
|
||||
- docker:dind
|
||||
variables:
|
||||
RELEASE_IMAGE: $CI_REGISTRY_IMAGE:buildserver
|
||||
script:
|
||||
# git ref names can contain many chars that are not allowed in docker tags
|
||||
- export TEST_IMAGE=$CI_REGISTRY_IMAGE:$(printf $CI_COMMIT_REF_NAME | sed 's,[^a-zA-Z0-9_.-],_,g')
|
||||
- cd buildserver
|
||||
- docker build -t $TEST_IMAGE --build-arg GIT_REV_PARSE_HEAD=$(git rev-parse HEAD) .
|
||||
- docker tag $TEST_IMAGE $RELEASE_IMAGE
|
||||
- docker tag $TEST_IMAGE ${RELEASE_IMAGE}-bookworm
|
||||
- echo $CI_JOB_TOKEN | docker login -u gitlab-ci-token --password-stdin registry.gitlab.com
|
||||
# This avoids filling up gitlab.com free tier accounts with unused docker images.
|
||||
- if test -z "$FDROID_PUSH_DOCKER_IMAGE"; then
|
||||
echo "Skipping docker push to save quota on your gitlab namespace.";
|
||||
echo "If you want to enable the push, set FDROID_PUSH_DOCKER_IMAGE in";
|
||||
echo "https://gitlab.com/$CI_PROJECT_NAMESPACE/fdroidserver/-/settings/ci_cd#js-cicd-variables-settings";
|
||||
exit 0;
|
||||
fi
|
||||
- docker push $RELEASE_IMAGE
|
||||
- docker push $RELEASE_IMAGE-bookworm
|
||||
|
||||
|
||||
# PUBLISH is the signing server. It has a very minimal manual setup.
|
||||
PUBLISH:
|
||||
image: debian:bookworm-backports
|
||||
<<: *python-rules-changes
|
||||
script:
|
||||
- apt-get update
|
||||
- apt-get -qy upgrade
|
||||
- apt-get -qy install --no-install-recommends -t bookworm-backports
|
||||
androguard
|
||||
apksigner
|
||||
curl
|
||||
default-jdk-headless
|
||||
git
|
||||
gpg
|
||||
gpg-agent
|
||||
python3-asn1crypto
|
||||
python3-defusedxml
|
||||
python3-git
|
||||
python3-ruamel.yaml
|
||||
python3-yaml
|
||||
rsync
|
||||
|
||||
# Run only relevant parts of the test suite, other parts will fail
|
||||
# because of this minimal base setup.
|
||||
- python3 -m unittest
|
||||
tests/test_gpgsign.py
|
||||
tests/test_metadata.py
|
||||
tests/test_publish.py
|
||||
tests/test_signatures.py
|
||||
tests/test_signindex.py
|
||||
|
||||
- cd tests
|
||||
- mkdir archive
|
||||
- mkdir unsigned
|
||||
- cp urzip-release-unsigned.apk unsigned/info.guardianproject.urzip_100.apk
|
||||
- grep '^key.*pass' config.yml | sed 's,\x3a ,=,' > $CI_PROJECT_DIR/variables
|
||||
- sed -Ei 's,^(key.*pass|keystore)\x3a.*,\1\x3a {env\x3a \1},' config.yml
|
||||
- printf '\ngpghome\x3a {env\x3a gpghome}\n' >> config.yml
|
||||
- |
|
||||
tee --append $CI_PROJECT_DIR/variables <<EOF
|
||||
gpghome=$CI_PROJECT_DIR/tests/gnupghome
|
||||
keystore=$CI_PROJECT_DIR/tests/keystore.jks
|
||||
serverwebroot=/tmp
|
||||
export gpghome keypass keystorepass keystore serverwebroot
|
||||
EOF
|
||||
- source $CI_PROJECT_DIR/variables
|
||||
# silence warnings
|
||||
- chmod 0600 config.yml config/*.yml config/*/*.yml
|
||||
- chmod 0700 $gpghome
|
||||
|
||||
- export PATH=$CI_PROJECT_DIR:$PATH
|
||||
|
||||
# run signpkg.sh
|
||||
- fdroid publish --verbose
|
||||
- fdroid gpgsign --verbose
|
||||
- rsync --progress repo/* $serverwebroot/
|
||||
|
||||
# run signindex.sh
|
||||
- fdroid gpgsign --verbose
|
||||
- fdroid signindex --verbose
|
||||
- rsync --stats repo/* $serverwebroot/
|
||||
- export GRADLE_USER_HOME=$CI_PROJECT_DIR/.gradle
|
||||
# try a user build first
|
||||
- fdroid build --verbose --latest org.fdroid.fdroid.privileged
|
||||
# each `fdroid build --on-server` run expects sudo, then uninstalls it
|
||||
- apt-get install sudo
|
||||
- fdroid build --verbose --on-server --no-tarball --latest org.fdroid.fdroid
|
||||
|
|
2
.mailmap
2
.mailmap
|
@ -1,2 +0,0 @@
|
|||
Gregor Düster <git@gdstr.eu> FestplattenSchnitzel <festplatte.schnitzel@posteo.de>
|
||||
Hans-Christoph Steiner <hans@eds.org> <hans@guardianproject.info>
|
45
.pylint-rcfile
Normal file
45
.pylint-rcfile
Normal file
|
@ -0,0 +1,45 @@
|
|||
[MASTER]
|
||||
|
||||
# Use multiple processes to speed up Pylint.
|
||||
jobs=4
|
||||
|
||||
# Allow loading of arbitrary C extensions. Extensions are imported into the
|
||||
# active Python interpreter and may run arbitrary code.
|
||||
unsafe-load-any-extension=no
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
|
||||
confidence=HIGH,INFERENCE
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once).You can also use "--disable=all" to
|
||||
# disable everything first and then reenable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
disable=invalid-name,missing-docstring,no-member
|
||||
|
||||
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=FIXME,XXX,TODO
|
||||
|
||||
|
||||
[BASIC]
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma
|
||||
good-names=i,j,k,ex,Run,_,e,f,fp
|
||||
|
||||
|
||||
[ELIF]
|
||||
|
||||
# Maximum number of nested blocks for function / method body
|
||||
max-nested-blocks=5
|
||||
|
|
@ -1,55 +0,0 @@
|
|||
---
|
||||
|
||||
version: '3.0'
|
||||
|
||||
scanning-settings:
|
||||
max-depth: 6
|
||||
exclude:
|
||||
|
||||
report:
|
||||
dependency-vulnerabilities:
|
||||
enabled: true
|
||||
auto-ignore-in-report:
|
||||
vulnerabilities:
|
||||
52495:
|
||||
reason: setuptools comes from Debian
|
||||
expires: '2025-01-31'
|
||||
60350:
|
||||
reason: GitPython comes from Debian https://security-tracker.debian.org/tracker/CVE-2023-40267
|
||||
expires: '2025-01-31'
|
||||
60789:
|
||||
reason: GitPython comes from Debian https://security-tracker.debian.org/tracker/CVE-2023-40590
|
||||
expires: '2025-01-31'
|
||||
60841:
|
||||
reason: GitPython comes from Debian https://security-tracker.debian.org/tracker/CVE-2023-41040
|
||||
expires: '2025-01-31'
|
||||
62044:
|
||||
reason: "F-Droid doesn't fetch pip dependencies directly from hg/mercurial repositories: https://data.safetycli.com/v/62044/f17/"
|
||||
expires: '2025-01-31'
|
||||
63687:
|
||||
reason: Only affects Windows https://security-tracker.debian.org/tracker/CVE-2024-22190
|
||||
expires: '2026-01-31'
|
||||
67599:
|
||||
reason: Only affects pip when using --extra-index-url, which is never the case in fdroidserver CI.
|
||||
expires: '2026-05-31'
|
||||
70612:
|
||||
reason: jinja2 is not used by fdroidserver, nor any dependencies I could find via debtree and pipdeptree.
|
||||
expires: '2026-05-31'
|
||||
72132:
|
||||
reason: We get these packages from Debian, zipp is not used in production, and its only a DoS.
|
||||
expires: '2026-08-31'
|
||||
72236:
|
||||
reason: setuptools is not used in production to download or install packages, they come from Debian.
|
||||
expires: '2026-08-31'
|
||||
|
||||
fail-scan-with-exit-code:
|
||||
dependency-vulnerabilities:
|
||||
enabled: true
|
||||
fail-on-any-of:
|
||||
cvss-severity:
|
||||
- critical
|
||||
- high
|
||||
- medium
|
||||
|
||||
security-updates:
|
||||
dependency-vulnerabilities:
|
91
.travis.yml
Normal file
91
.travis.yml
Normal file
|
@ -0,0 +1,91 @@
|
|||
|
||||
# Use the Android base system since it provides the SDK, etc.
|
||||
language: java
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- os: osx
|
||||
osx_image: xcode12
|
||||
env: ANDROID_SDK_ROOT=/usr/local/share/android-sdk
|
||||
env: ANDROID_HOME=/usr/local/share/android-sdk
|
||||
- os: osx
|
||||
osx_image: xcode10.3
|
||||
env: ANDROID_SDK_ROOT=/usr/local/share/android-sdk
|
||||
env: ANDROID_HOME=/usr/local/share/android-sdk
|
||||
|
||||
android:
|
||||
components:
|
||||
- android-23 # required for `fdroid build` test
|
||||
- build-tools-28.0.3 # required for `fdroid build` test
|
||||
licenses:
|
||||
- 'android-sdk-preview-.+'
|
||||
- 'android-sdk-license-.+'
|
||||
|
||||
# * ensure java8 is installed since Android SDK doesn't work with Java9
|
||||
# * Java needs to be at least 1.8.0_131 to have MD5 properly disabled
|
||||
# https://blogs.oracle.com/java-platform-group/oracle-jre-will-no-longer-trust-md5-signed-code-by-default
|
||||
# https://opsech.io/posts/2017/Jun/09/openjdk-april-2017-security-update-131-8u131-and-md5-signed-jars.html
|
||||
# * mercurial is unused and requires Python 2.x
|
||||
install:
|
||||
- export HOMEBREW_CURL_RETRIES=10
|
||||
- brew update > /dev/null
|
||||
- if [ "`sw_vers -productVersion | sed 's,10\.\([0-9]*\).*,\1,'`" -ge 14 ]; then
|
||||
python3 --version;
|
||||
elif [ "`sw_vers -productVersion | sed 's,10\.\([0-9]*\).*,\1,'`" -gt 10 ]; then
|
||||
brew uninstall mercurial --force;
|
||||
brew upgrade python;
|
||||
else
|
||||
brew install python3;
|
||||
fi
|
||||
- brew install dash bash gnu-sed gradle jenv
|
||||
- export PATH="/usr/local/opt/gnu-sed/libexec/gnubin:$PATH"
|
||||
- brew uninstall java --force || true
|
||||
- brew cask uninstall java --force || true
|
||||
- brew tap adoptopenjdk/openjdk
|
||||
- travis_retry brew cask install adoptopenjdk8
|
||||
- travis_retry brew cask install android-sdk
|
||||
|
||||
- export AAPT_VERSION=`sed -n "s,^MINIMUM_APKSIGNER_BUILD_TOOLS_VERSION\s*=\s*['\"]\(.*\)[['\"],\1,p" fdroidserver/common.py`
|
||||
- mkdir -p "$ANDROID_HOME/licenses"
|
||||
- echo -e "\n8933bad161af4178b1185d1a37fbf41ea5269c55" > "$ANDROID_HOME/licenses/android-sdk-license"
|
||||
- echo -e "\nd56f5187479451eabf01fb78af6dfcb131a6481e" >> "$ANDROID_HOME/licenses/android-sdk-license"
|
||||
- echo -e "\n24333f8a63b6825ea9c5514f83c2829b004d1fee" >> "$ANDROID_HOME/licenses/android-sdk-license"
|
||||
- echo -e "\n84831b9409646a918e30573bab4c9c91346d8abd" > "$ANDROID_HOME/licenses/android-sdk-preview-license"
|
||||
- echo y | travis_retry $ANDROID_HOME/tools/bin/sdkmanager "platform-tools" > /dev/null
|
||||
- echo y | travis_retry $ANDROID_HOME/tools/bin/sdkmanager "build-tools;$AAPT_VERSION" > /dev/null
|
||||
- echo y | travis_retry $ANDROID_HOME/tools/bin/sdkmanager "platforms;android-23" > /dev/null
|
||||
|
||||
- travis_retry sudo pip3 install --progress-bar off babel
|
||||
- travis_retry sudo pip3 install --quiet --progress-bar off --editable .
|
||||
- sudo rm -rf fdroidserver.egg-info
|
||||
|
||||
- ls -l /System/Library/Java/JavaVirtualMachines || true
|
||||
- ls -l /Library/Java/JavaVirtualMachines || true
|
||||
- for f in /Library/Java/JavaVirtualMachines/*.jdk; do jenv add $f; done
|
||||
- echo $PATH
|
||||
- echo $JAVA_HOME
|
||||
- jenv versions
|
||||
- /usr/libexec/java_home
|
||||
- java -version
|
||||
- which java
|
||||
- javac -version
|
||||
- which javac
|
||||
- jarsigner -help
|
||||
- which jarsigner
|
||||
- keytool -help
|
||||
- which keytool
|
||||
- sudo rm -rf /Library/Java/JavaVirtualMachines/jdk1.8.0_1*.jdk || true
|
||||
|
||||
# The OSX tests seem to run slower, they often timeout. So only run
|
||||
# the test suite with the installed version of fdroid.
|
||||
#
|
||||
# macOS sticks with bash 3.x because of licenses, so avoid use new bash syntax
|
||||
script:
|
||||
- /bin/bash --version
|
||||
- /bin/bash -n gradlew-fdroid tests/run-tests
|
||||
|
||||
- ./tests/run-tests
|
||||
|
||||
after_failure:
|
||||
- cd $TRAVIS_BUILD_DIR
|
||||
- ls -lR | curl -F 'clbin=<-' https://clbin.com
|
5
.vscode/extensions.json
vendored
5
.vscode/extensions.json
vendored
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"recommendations": [
|
||||
"ms-python.python",
|
||||
]
|
||||
}
|
21
.vscode/settings.json
vendored
21
.vscode/settings.json
vendored
|
@ -1,21 +0,0 @@
|
|||
{
|
||||
"python.formatting.blackArgs": [
|
||||
"--config=pyproject.toml"
|
||||
],
|
||||
"python.formatting.provider": "black",
|
||||
"python.linting.banditEnabled": true,
|
||||
"python.linting.banditArgs": [
|
||||
"-ii",
|
||||
"--ini=.bandit",
|
||||
],
|
||||
"python.linting.enabled": true,
|
||||
"python.linting.mypyArgs": [
|
||||
"--config-file=mypy.ini"
|
||||
],
|
||||
"python.linting.mypyEnabled": true,
|
||||
"python.linting.flake8Enabled": true,
|
||||
"python.linting.pylintArgs": [
|
||||
"--rcfile=.pylint-rcfile"
|
||||
],
|
||||
"python.linting.pylintEnabled": true,
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
https://f-droid.org/funding.json
|
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
|
||||
extends: default
|
||||
rules:
|
||||
document-start: disable
|
||||
line-length: disable
|
||||
truthy: disable
|
391
CHANGELOG.md
391
CHANGELOG.md
|
@ -4,375 +4,20 @@ All notable changes to this project will be documented in this file.
|
|||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
|
||||
|
||||
## [2.5.0] - NEXT
|
||||
|
||||
### Removed
|
||||
|
||||
* deploy: `awsaccesskeyid:` and `awssecretkey:` config items removed, use the
|
||||
standard env vars: `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`.
|
||||
|
||||
## [2.4.2] - 2025-06-24
|
||||
|
||||
### Fixed
|
||||
|
||||
* nightly: fix bug that clones nightly repo to wrong location
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1672
|
||||
* Sync translations for all supported languages: es pl ru
|
||||
|
||||
## [2.4.1] - 2025-06-23
|
||||
|
||||
## [Unreleased]
|
||||
### Added
|
||||
|
||||
* build: Clearer error messages when working with Git.
|
||||
* verify: generate <appid>.json files that list all reports
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1632
|
||||
|
||||
### Fixed
|
||||
|
||||
* deploy: use master branch when working complete git-mirror repo
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1666
|
||||
* update: use ctime/mtime to control _strip_and_copy_image runs
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1665
|
||||
* update: If categories.yml only has icon:, then add name:
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1659
|
||||
* update: fix handling of Triple-T 1.0.0 graphics
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1652
|
||||
* update: never execute any VCS e.g. git
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1630
|
||||
* config: lazyload environment variables in config.yml
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1645
|
||||
* config: make localized name/description/icon optional
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1649
|
||||
* lint: add repo_key_sha256 to list of valid config keys
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1643
|
||||
* build: calculate all combinations of gradle flavors
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1638
|
||||
* build: set SOURCE_DATE_EPOCH from app's git otherwise fdroiddata metadata file
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1653
|
||||
* Sync translations for all supported languages: ca cs de fr ga ja pl pt pt_BR
|
||||
pt_PT ru sq tr uk zh_Hans
|
||||
|
||||
### Removed
|
||||
|
||||
## [2.4.0] - 2025-03-25
|
||||
|
||||
### Added
|
||||
|
||||
* lint: support the base _config.yml_.
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1606
|
||||
|
||||
### Fixed
|
||||
|
||||
* Expand {env: foo} config syntax to be allowed any place a string is.
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1610
|
||||
* Only show "unsafe permissions on config.yml" when secrets are present.
|
||||
* Standardized config files on ruamel.yaml with a YAML 1.2 data format.
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1611
|
||||
* Brought back error when a package has multiple package types (e.g. xapk and
|
||||
apk). https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1602
|
||||
* Reworked test suite to be entirely based on Python unittest (thanks @mindston).
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1587
|
||||
* publish/signindex/gpgsign no longer load the _qrcode_ and _requests_ modules,
|
||||
and can operate without them installed.
|
||||
* scanner: add bun.lock as lock file of package.json
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1615
|
||||
* index: fail if user sets mirrors:isPrimary wrong
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1617
|
||||
https://gitlab.com/fdroid/fdroidserver/-/issues/1125
|
||||
* Sync translations for all supported languages: bo ca cs de es fr ga hu it ja
|
||||
ko nb_NO pl pt pt_BR pt_PT ro ru sq sr sw tr uk zh_Hans zh_Hant
|
||||
|
||||
### Removed
|
||||
|
||||
* checkupdates: remove auto_author: config, it is no longer used.
|
||||
* Purge support for the long-deprecated _config.py_ config file.
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1607
|
||||
|
||||
|
||||
## [2.3.5] - 2025-01-20
|
||||
|
||||
### Fixed
|
||||
|
||||
* Fix issue where APKs with v1-only signatures and targetSdkVersion < 30 could
|
||||
be maliciously crafted to bypass AllowedAPKSigningKeys
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1588
|
||||
* Ignore apksigner v33.x, it has bugs verifying APKs with v3/v3.1 sigs.
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1593
|
||||
* Sync translations for: ca cs de es fr ga ja pt_BR pt_PT ru sq sr uk zh_Hans
|
||||
|
||||
## [2.3.4] - 2024-12-12
|
||||
|
||||
### Fixed
|
||||
|
||||
* Fix localhost network tests on systems with IPv6.
|
||||
* lint: only error out on missing extlib on versions not archived.
|
||||
|
||||
## [2.3.3] - 2024-12-11
|
||||
|
||||
### Added
|
||||
|
||||
* verify: `--clean-up-verified` to delete files used when verifying an APK if
|
||||
the verification was successful.
|
||||
|
||||
### Fixed
|
||||
|
||||
* Support Python 3.13 in the full test suite.
|
||||
* Sync translations for: ca de fr ja pl ro ru sr ta
|
||||
* update: only generate _index.png_ when making _index.html_, allowing the repo
|
||||
operator to set a different repo icon, e.g. not the QR Code.
|
||||
|
||||
## [2.3.2] - 2024-11-26
|
||||
|
||||
### Fixed
|
||||
|
||||
* install: fix downloading from GitHub Releases and Maven Central.
|
||||
* Sync translations for: ca fa fr pt ru sr ta zh_Hant
|
||||
|
||||
## [2.3.1] - 2024-11-25
|
||||
|
||||
### Fixed
|
||||
|
||||
* Sync all translations for: cs de es fr ga pt_BR ru sq zh_Hans.
|
||||
* Drop use of deprecated imghdr library to support Python 3.13.
|
||||
* Install biplist and pycountry by default on macOS.
|
||||
* Fixed running test suite out of dist tarball.
|
||||
|
||||
## [2.3.0] - 2024-11-21
|
||||
|
||||
### Added
|
||||
|
||||
* YAML 1.2 as native format for all _.yml_ files, including metadata and config.
|
||||
* install: will now fetch _F-Droid.apk_ and install it via `adb`.
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1546
|
||||
* scanner: scan APK Signing Block for known block types like Google Play
|
||||
Signature aka "Frosting".
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1555
|
||||
* Support Rclone for deploying to many different cloud services.
|
||||
* deploy: support deploying to GitHub Releases.
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1471
|
||||
* scanner: support libs.versions.toml
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1526
|
||||
* Consider subdir for triple-t metadata discovery in Flutter apps.
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1541
|
||||
* deploy: added `index_only:` mode for mirroring the index to small hosting
|
||||
locations. https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1420
|
||||
* Support publishing repos in AltStore format.
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1465
|
||||
* Support indexing iOS IPA app files.
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1413
|
||||
* deploy: _config/mirrors.yml_ file with support for adding per-mirror metadata,
|
||||
like `countryCode:`.
|
||||
* Repo's categories are now set in the config files.
|
||||
* lint: check syntax of config files.
|
||||
* publish: `--error-on-failed` to exit when signing/verifying fails.
|
||||
* scanner: `--refresh` and `refresh_config:` to control triggering a refresh of
|
||||
the rule sets.
|
||||
* Terminal output colorization and `--color` argument to control it.
|
||||
* New languages: Catalan (ca), Irish (ga), Japanese (ja), Serbian (sr), and
|
||||
Swahili (sw).
|
||||
* Support donation links from `community_bridge`, `buy_me_a_coffee`.
|
||||
|
||||
### Fixed
|
||||
|
||||
* Use last modified time and file size for caching data about scanned APKs
|
||||
instead of SHA-256 checksum.
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1542
|
||||
* `repo_web_base_url:` config for generating per-app URLs for viewing in
|
||||
browsers. https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1178
|
||||
* `fdroid scanner` flags WebAssembly binary _.wasm_ files.
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1562
|
||||
* Test suite as standard Python `unittest` setup (thanks @ghost.adh).
|
||||
* scanner: error on dependency files without lock file.
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1504
|
||||
* nightly: finding APKs in the wrong directory. (thanks @WrenIX)
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1512
|
||||
* `AllowedAPKSigningKeys` works with all single-signer APK signatures.
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1466
|
||||
* Sync all translations for: cs de it ko pl pt pt_BR pt_PT ro ru sq tr uk
|
||||
zh_Hans zh_Hant.
|
||||
* Support Androguard 4.x.
|
||||
* Support Python 3.12.
|
||||
|
||||
### Removed
|
||||
|
||||
* Drop all uses of _stats/known_apks.txt_ and the `update_stats:` config key.
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1547
|
||||
* The `maven:` field is now always a string, with `yes` as a legacy special
|
||||
value. It is no longer treated like a boolean in any case.
|
||||
* scanner: jcenter is no longer an allowed Maven repo.
|
||||
* build: `--reset-server` removed (thanks @gotmi1k).
|
||||
|
||||
## [2.2.2] - 2024-04-24
|
||||
|
||||
### Added
|
||||
|
||||
* Include sdkmanager as dep in setup.py for Homebrew package.
|
||||
https://github.com/Homebrew/homebrew-core/pull/164510
|
||||
|
||||
## [2.2.1] - 2023-03-09
|
||||
|
||||
### Added
|
||||
|
||||
* `download_repo_index_v2()` and `download_repo_index_v2()` API functions
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1323
|
||||
|
||||
### Fixed
|
||||
|
||||
* Fix OpenJDK detection on different CPU architectures
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1315
|
||||
|
||||
### Removed
|
||||
|
||||
* Purge all references to `zipalign`, that is delegated to other things
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1316
|
||||
* Remove obsolete, unused `buildozer` build type
|
||||
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1322
|
||||
|
||||
## [2.2.0] - 2023-02-20
|
||||
|
||||
### Added
|
||||
* Support index-v2 format, localizable Anti-Features, Categories
|
||||
* New entry point for repos, entry.jar, signed with modern algorithms
|
||||
* New config/ subdirectory for localizable configuration
|
||||
* Script entries in metadata files (init, prebuild, build, etc) now handled as
|
||||
lists so they now support using && or ; in the script, and behave like
|
||||
.gitlab-ci.yml and other CI YAML.
|
||||
* GPG signatures for index-v1.json and index-v2.json
|
||||
* Use default.txt as fallback changelog when inserting fastlane metadata
|
||||
* scanner: F-Droid signatures now maintained in fdroid/suss
|
||||
* scanner: maintain signature sources in config.yml, including Exodus Privacy
|
||||
* scanner: use dexdump for class names
|
||||
* scanner: directly scan APK files when given a path
|
||||
* scanner: recursively scan APKs for DEX and ZIP using file magic
|
||||
* signindex: validate index files before signing
|
||||
* update: set ArchivePolicy based on VercodeOperation/signature
|
||||
* Include IPFS CIDv1 in index-v2.json for hosting repos on IPFS
|
||||
* Per-repo beta channel configuration
|
||||
* Add Czech translation
|
||||
|
||||
### Fixed
|
||||
|
||||
* apksigner v30 or higher now required for verifying and signing APKs
|
||||
* 3.9 as minimum supported Python version
|
||||
* Lots of translation updates
|
||||
* Better pip packaging
|
||||
* nightly: big overhaul for reliable operation on all Debian/Ubuntu versions
|
||||
* Improved logging, fewer confusing verbose messages
|
||||
* scanner: fix detection of binary files without extension
|
||||
* import: more reliable operation, including Flutter apps
|
||||
* Support Java 20 and up
|
||||
|
||||
### Removed
|
||||
* Remove obsolete `fdroid stats` command
|
||||
|
||||
## [2.1.1] - 2022-09-06
|
||||
|
||||
* gradlew-fdroid: Include latest versions and checksums
|
||||
* nightly: update Raw URLs to fix breakage and avoid redirects
|
||||
* signindex: gpg-sign index-v1.json and deploy it
|
||||
* update: fix --use-date-from-apk when used with files (#1012)
|
||||
|
||||
## [2.1] - 2022-02-22
|
||||
|
||||
For a more complete overview, see the [2.1
|
||||
milestone](https://gitlab.com/fdroid/fdroidserver/-/milestones/11)
|
||||
|
||||
## [2.0.5] - 2022-09-06
|
||||
|
||||
### Fixed
|
||||
|
||||
* gradlew-fdroid: Include latest versions and checksums
|
||||
* nightly: add support for GitHub Actions
|
||||
* nightly: update Raw URLs to fix breakage and avoid redirects
|
||||
* update: fix --use-date-from-apk when used with files (#1012)
|
||||
* Fix GitLab CI
|
||||
|
||||
## [2.0.4] - 2022-06-29
|
||||
|
||||
### Fixed
|
||||
|
||||
* deploy: ensure progress is instantiated before trying to use it
|
||||
* signindex: gpg-sign index-v1.json and deploy it
|
||||
[1080](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1080)
|
||||
[1124](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1124)
|
||||
|
||||
## [2.0.3] - 2021-07-01
|
||||
|
||||
### Fixed
|
||||
|
||||
* Support AutoUpdateMode: Version without pattern
|
||||
[931](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/931)
|
||||
|
||||
## [2.0.2] - 2021-06-01
|
||||
|
||||
### Fixed
|
||||
|
||||
* fix "ruamel round_trip_dump will be removed"
|
||||
[932](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/932)
|
||||
|
||||
## [2.0.1] - 2021-03-09
|
||||
|
||||
### Fixed
|
||||
|
||||
* metadata: stop setting up source repo when running lint/rewritemeta
|
||||
* scanner: show error if scan_binary fails to run apkanalyzer
|
||||
* common: properly parse version from NDK's source.properties
|
||||
* update: stop extracting and storing XML icons, they're useless
|
||||
* index: raise error rather than crash on bad repo file
|
||||
* update: handle large, corrupt, or inaccessible fastlane/triple-t files
|
||||
* Update SPDX License List
|
||||
* checkupdates: set User-Agent to make gitlab.com happy
|
||||
* Run push_binary_transparency only once
|
||||
|
||||
## [2.0] - 2021-01-31
|
||||
|
||||
For a more complete overview, see the [2.0
|
||||
milestone](https://gitlab.com/fdroid/fdroidserver/-/milestones/10)
|
||||
|
||||
### Added
|
||||
* `fdroid update` inserts donation links based on upstream's _FUNDING.yml_
|
||||
([!754](https://gitlab.com/fdroid/fdroidserver/merge_requests/754))
|
||||
* Stable, public API for most useful functions
|
||||
([!798](https://gitlab.com/fdroid/fdroidserver/merge_requests/798))
|
||||
* Load with any YAML lib and use with the API, no more custom parser needed
|
||||
([!826](https://gitlab.com/fdroid/fdroidserver/merge_requests/826))
|
||||
([!838](https://gitlab.com/fdroid/fdroidserver/merge_requests/838))
|
||||
* _config.yml_ for a safe, easy, standard configuration format
|
||||
* makebuildserver: added ndk r20
|
||||
([!663](https://gitlab.com/fdroid/fdroidserver/merge_requests/663))
|
||||
* Config options can be set from environment variables using this syntax:
|
||||
`keystorepass: {env: keystorepass}`
|
||||
* added support for gradle 5.5.1
|
||||
([!656](https://gitlab.com/fdroid/fdroidserver/merge_requests/656))
|
||||
* add SHA256 to filename of repo graphics
|
||||
([!669](https://gitlab.com/fdroid/fdroidserver/merge_requests/669))
|
||||
* Add SHA256 to filename of repo graphics
|
||||
([!669](https://gitlab.com/fdroid/fdroidserver/merge_requests/669))
|
||||
* Support for srclibs metadata in YAML format
|
||||
* support for srclibs metadata in YAML format
|
||||
([!700](https://gitlab.com/fdroid/fdroidserver/merge_requests/700))
|
||||
* Check srclibs and app-metadata files with yamllint
|
||||
* check srclibs and app-metadata files with yamllint
|
||||
([!721](https://gitlab.com/fdroid/fdroidserver/merge_requests/721))
|
||||
* Added plugin system for adding subcommands to `fdroid`
|
||||
([!709](https://gitlab.com/fdroid/fdroidserver/merge_requests/709))
|
||||
* `fdroid update`, `fdroid publish`, and `fdroid signindex` now work
|
||||
with SmartCard HSMs, specifically the NitroKey HSM
|
||||
([!779](https://gitlab.com/fdroid/fdroidserver/merge_requests/779))
|
||||
([!782](https://gitlab.com/fdroid/fdroidserver/merge_requests/782))
|
||||
* `fdroid update` support for Triple-T Gradle Play Publisher v2.x
|
||||
([!683](https://gitlab.com/fdroid/fdroidserver/merge_requests/683))
|
||||
* Translated into: bo de es fr hu it ko nb_NO pl pt pt_BR pt_PT ru sq tr uk
|
||||
zh_Hans zh_Hant
|
||||
|
||||
### Fixed
|
||||
* Smoother process for signing APKs with `apksigner`
|
||||
([!736](https://gitlab.com/fdroid/fdroidserver/merge_requests/736))
|
||||
([!821](https://gitlab.com/fdroid/fdroidserver/merge_requests/821))
|
||||
* `apksigner` is used by default on new repos
|
||||
* All parts except _build_ and _publish_ work without the Android SDK
|
||||
([!821](https://gitlab.com/fdroid/fdroidserver/merge_requests/821))
|
||||
* Description: is now passed to clients unchanged, no HTML conversion
|
||||
([!828](https://gitlab.com/fdroid/fdroidserver/merge_requests/828))
|
||||
* Lots of improvements for scanning for proprietary code and trackers
|
||||
([!748](https://gitlab.com/fdroid/fdroidserver/merge_requests/748))
|
||||
([!REPLACE](https://gitlab.com/fdroid/fdroidserver/merge_requests/REPLACE))
|
||||
([!844](https://gitlab.com/fdroid/fdroidserver/merge_requests/844))
|
||||
* `fdroid mirror` now generates complete, working local mirror repos
|
||||
* fix build-logs dissapearing when deploying
|
||||
([!685](https://gitlab.com/fdroid/fdroidserver/merge_requests/685))
|
||||
* do not crash when system encoding can not be retrieved
|
||||
|
@ -388,31 +33,11 @@ milestone](https://gitlab.com/fdroid/fdroidserver/-/milestones/10)
|
|||
([!651](https://gitlab.com/fdroid/fdroidserver/merge_requests/651))
|
||||
* `fdroid init` generates PKCS12 keystores, drop Java < 8 support
|
||||
([!801](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/801))
|
||||
* Parse Version Codes specified in hex
|
||||
([!692](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/692))
|
||||
* Major refactoring on core parts of code to be more Pythonic
|
||||
([!756](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/756))
|
||||
* `fdroid init` now works when installed with pip
|
||||
|
||||
### Removed
|
||||
* Removed all support for _.txt_ and _.json_ metadata
|
||||
* removed support for txt and json metadata
|
||||
([!772](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/772))
|
||||
* dropped support for Debian 8 _jessie_ and 9 _stretch_
|
||||
* dropped support for Ubuntu releases older than bionic 18.04
|
||||
* dropped `fdroid server update` and `fdroid server init`,
|
||||
use `fdroid deploy`
|
||||
* `fdroid dscanner` was removed.
|
||||
([!711](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/711))
|
||||
* `make_current_version_link` is now off by default
|
||||
* Dropped `force_build_tools` config option
|
||||
([!797](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/797))
|
||||
* Dropped `accepted_formats` config option, there is only _.yml_ now
|
||||
([!818](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/818))
|
||||
* `Provides:` was removed as a metadata field
|
||||
([!654](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/654))
|
||||
* Remove unused `latestapps.dat`
|
||||
([!794](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/794))
|
||||
|
||||
|
||||
## [1.1.4] - 2019-08-15
|
||||
### Fixed
|
||||
|
|
|
@ -1,66 +0,0 @@
|
|||
There are many ways to contribute, you can find out all the ways on our
|
||||
[Contribute](https://f-droid.org/contribute/) page. Find out how to get
|
||||
involved, including as a translator, data analyst, tester, helping others, and
|
||||
much more!
|
||||
|
||||
## Contributing Code
|
||||
|
||||
We want more contributors and want different points of view represented. Some
|
||||
parts of the code make contributing quick and easy. Other parts make it
|
||||
difficult and slow, so we ask that contributors have patience.
|
||||
|
||||
To submit a patch, please open a merge request on GitLab. If you are thinking of
|
||||
making a large contribution, open an issue or merge request before starting
|
||||
work, to get comments from the community. Someone may be already working on the
|
||||
same thing, or there may be reasons why that feature isn't implemented. Once
|
||||
there is agreement, then the work might need to proceed asynchronously with the
|
||||
core team towards the solution.
|
||||
|
||||
To make it easier to review and accept your merge request, please follow these
|
||||
guidelines:
|
||||
|
||||
* When at all possible, include tests. These can either be added to an existing
|
||||
test, or completely new. Practicing test-driven development will make it
|
||||
easiest to get merged. That usually means starting your work by writing tests.
|
||||
|
||||
* See [help-wanted](https://gitlab.com/fdroid/fdroidserver/-/issues/?sort=updated_desc&state=opened&label_name%5B%5D=help-wanted)
|
||||
tags for things that maintainers have marked as things they want to see
|
||||
merged.
|
||||
|
||||
* The amount of technical debt varies widely in this code base. There are some
|
||||
parts where the code is nicely isolated with good test coverage. There are
|
||||
other parts that are tangled and complicated, full of technical debt, and
|
||||
difficult to test.
|
||||
|
||||
* The general approach is to treat the tangled and complicated parts as an
|
||||
external API (albeit a bad one). That means it needs to stay unchanged as much
|
||||
as possible. Changes to those parts of the code will trigger a migration,
|
||||
which can require a lot of time and coordination. When there is time for large
|
||||
development efforts, we refactor the code to get rid of those areas of
|
||||
technical debt.
|
||||
|
||||
* We use [_black_](https://black.readthedocs.io/) code format, run `black .` to
|
||||
format the code. Whenever editing code in any file, the new code should be
|
||||
formatted as _black_. Some files are not yet fully in _black_ format (see
|
||||
_pyproject.toml_), our goal is to opportunistically convert the code whenever
|
||||
possible. As of the time of this writing, forcing the code format on all files
|
||||
would be too disruptive. The officially supported _black_ version is the one
|
||||
in Debian/stable.
|
||||
|
||||
* Many of the tests run very fast and can be run interactively in isolation.
|
||||
Some of the essential test cases run slowly because they do things like
|
||||
signing files and generating signing keys.
|
||||
|
||||
* Some parts of the code are difficult to test, and currently require a
|
||||
relatively complete production setup in order to effectively test them. That
|
||||
is mostly the code around building packages, managing the disposable VM, and
|
||||
scheduling build jobs to run.
|
||||
|
||||
* For user visible changes (API changes, behaviour changes, etc.), consider
|
||||
adding a note in _CHANGELOG.md_. This could be a summarizing description of
|
||||
the change, and could explain the grander details. Have a look through
|
||||
existing entries for inspiration. Please note that this is NOT simply a copy
|
||||
of git-log one-liners. Also note that security fixes get an entry in
|
||||
_CHANGELOG.md_. This file helps users get more in-depth information of what
|
||||
comes with a specific release without having to sift through the higher noise
|
||||
ratio in git-log.
|
343
MANIFEST.in
343
MANIFEST.in
|
@ -9,48 +9,35 @@ include buildserver/Vagrantfile
|
|||
include CHANGELOG.md
|
||||
include completion/bash-completion
|
||||
include examples/config.yml
|
||||
include examples/fdroid_exportkeystore.py
|
||||
include examples/fdroid_export_keystore_to_nitrokey.py
|
||||
include examples/fdroid_extract_repo_pubkey.py
|
||||
include examples/fdroid_fetchsrclibs.py
|
||||
include examples/fdroid_nitrokeyimport.py
|
||||
include examples/fdroid-icon.png
|
||||
include examples/makebuildserver.config.py
|
||||
include examples/opensc-fdroid.cfg
|
||||
include examples/public-read-only-s3-bucket-policy.json
|
||||
include examples/template.yml
|
||||
include examples/Vagrantfile.yaml
|
||||
include fdroid
|
||||
include gradlew-fdroid
|
||||
include LICENSE
|
||||
include locale/ba/LC_MESSAGES/fdroidserver.po
|
||||
include locale/bo/LC_MESSAGES/fdroidserver.po
|
||||
include locale/ca/LC_MESSAGES/fdroidserver.po
|
||||
include locale/cs/LC_MESSAGES/fdroidserver.po
|
||||
include locale/de/LC_MESSAGES/fdroidserver.po
|
||||
include locale/es/LC_MESSAGES/fdroidserver.po
|
||||
include locale/fr/LC_MESSAGES/fdroidserver.po
|
||||
include locale/ga/LC_MESSAGES/fdroidserver.po
|
||||
include locale/hu/LC_MESSAGES/fdroidserver.po
|
||||
include locale/it/LC_MESSAGES/fdroidserver.po
|
||||
include locale/ja/LC_MESSAGES/fdroidserver.po
|
||||
include locale/ko/LC_MESSAGES/fdroidserver.po
|
||||
include locale/nb_NO/LC_MESSAGES/fdroidserver.po
|
||||
include locale/pl/LC_MESSAGES/fdroidserver.po
|
||||
include locale/pt/LC_MESSAGES/fdroidserver.po
|
||||
include locale/pt_BR/LC_MESSAGES/fdroidserver.po
|
||||
include locale/pt_PT/LC_MESSAGES/fdroidserver.po
|
||||
include locale/ro/LC_MESSAGES/fdroidserver.po
|
||||
include locale/ru/LC_MESSAGES/fdroidserver.po
|
||||
include locale/sq/LC_MESSAGES/fdroidserver.po
|
||||
include locale/sr/LC_MESSAGES/fdroidserver.po
|
||||
include locale/sw/LC_MESSAGES/fdroidserver.po
|
||||
include locale/tr/LC_MESSAGES/fdroidserver.po
|
||||
include locale/uk/LC_MESSAGES/fdroidserver.po
|
||||
include locale/zh_Hans/LC_MESSAGES/fdroidserver.po
|
||||
include locale/zh_Hant/LC_MESSAGES/fdroidserver.po
|
||||
include locale/bo/LC_MESSAGES/fdroidserver.mo
|
||||
include locale/de/LC_MESSAGES/fdroidserver.mo
|
||||
include locale/es/LC_MESSAGES/fdroidserver.mo
|
||||
include locale/fr/LC_MESSAGES/fdroidserver.mo
|
||||
include locale/hu/LC_MESSAGES/fdroidserver.mo
|
||||
include locale/it/LC_MESSAGES/fdroidserver.mo
|
||||
include locale/ko/LC_MESSAGES/fdroidserver.mo
|
||||
include locale/nb_NO/LC_MESSAGES/fdroidserver.mo
|
||||
include locale/pl/LC_MESSAGES/fdroidserver.mo
|
||||
include locale/pt_BR/LC_MESSAGES/fdroidserver.mo
|
||||
include locale/pt_PT/LC_MESSAGES/fdroidserver.mo
|
||||
include locale/ru/LC_MESSAGES/fdroidserver.mo
|
||||
include locale/tr/LC_MESSAGES/fdroidserver.mo
|
||||
include locale/uk/LC_MESSAGES/fdroidserver.mo
|
||||
include locale/zh_Hans/LC_MESSAGES/fdroidserver.mo
|
||||
include locale/zh_Hant/LC_MESSAGES/fdroidserver.mo
|
||||
include makebuildserver
|
||||
include README.md
|
||||
include tests/aosp_testkey_debug.keystore
|
||||
include tests/apk.embedded_1.apk
|
||||
include tests/androguard_test.py
|
||||
include tests/bad-unicode-*.apk
|
||||
include tests/build.TestCase
|
||||
include tests/build-tools/17.0.0/aapt-output-com.moez.QKSMS_182.txt
|
||||
include tests/build-tools/17.0.0/aapt-output-com.politedroid_3.txt
|
||||
include tests/build-tools/17.0.0/aapt-output-com.politedroid_4.txt
|
||||
|
@ -60,10 +47,10 @@ include tests/build-tools/17.0.0/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/17.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/17.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/17.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/17.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/17.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/17.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/17.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/17.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/17.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/17.0.0/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/18.1.1/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -75,10 +62,10 @@ include tests/build-tools/18.1.1/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/18.1.1/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/18.1.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/18.1.1/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/18.1.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/18.1.1/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/18.1.1/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/18.1.1/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/18.1.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/18.1.1/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/18.1.1/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/19.0.0/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -90,10 +77,10 @@ include tests/build-tools/19.0.0/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/19.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/19.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/19.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/19.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/19.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/19.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/19.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/19.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/19.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/19.0.0/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/19.1.0/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -105,10 +92,10 @@ include tests/build-tools/19.1.0/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/19.1.0/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/19.1.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/19.1.0/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/19.1.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/19.1.0/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/19.1.0/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/19.1.0/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/19.1.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/19.1.0/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/19.1.0/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/20.0.0/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -120,10 +107,10 @@ include tests/build-tools/20.0.0/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/20.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/20.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/20.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/20.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/20.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/20.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/20.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/20.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/20.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/20.0.0/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/21.1.1/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -135,10 +122,10 @@ include tests/build-tools/21.1.1/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/21.1.1/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/21.1.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/21.1.1/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/21.1.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/21.1.1/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/21.1.1/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/21.1.1/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/21.1.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/21.1.1/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/21.1.1/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/21.1.2/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -150,10 +137,10 @@ include tests/build-tools/21.1.2/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/21.1.2/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/21.1.2/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/21.1.2/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/21.1.2/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/21.1.2/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/21.1.2/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/21.1.2/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/21.1.2/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/21.1.2/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/21.1.2/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/22.0.0/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -165,10 +152,10 @@ include tests/build-tools/22.0.0/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/22.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/22.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/22.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/22.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/22.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/22.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/22.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/22.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/22.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/22.0.0/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/22.0.1/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -180,10 +167,10 @@ include tests/build-tools/22.0.1/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/22.0.1/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/22.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/22.0.1/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/22.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/22.0.1/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/22.0.1/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/22.0.1/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/22.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/22.0.1/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/22.0.1/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/23.0.0/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -195,10 +182,10 @@ include tests/build-tools/23.0.0/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/23.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/23.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/23.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/23.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/23.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/23.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/23.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/23.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/23.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/23.0.0/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/23.0.1/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -210,10 +197,10 @@ include tests/build-tools/23.0.1/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/23.0.1/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/23.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/23.0.1/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/23.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/23.0.1/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/23.0.1/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/23.0.1/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/23.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/23.0.1/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/23.0.1/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/23.0.2/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -225,10 +212,10 @@ include tests/build-tools/23.0.2/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/23.0.2/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/23.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/23.0.2/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/23.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/23.0.2/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/23.0.2/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/23.0.2/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/23.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/23.0.2/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/23.0.2/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/23.0.3/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -240,10 +227,10 @@ include tests/build-tools/23.0.3/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/23.0.3/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/23.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/23.0.3/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/23.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/23.0.3/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/23.0.3/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/23.0.3/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/23.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/23.0.3/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/23.0.3/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/24.0.0/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -255,10 +242,10 @@ include tests/build-tools/24.0.0/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/24.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/24.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/24.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/24.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/24.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/24.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/24.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/24.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/24.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/24.0.0/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/24.0.1/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -270,10 +257,10 @@ include tests/build-tools/24.0.1/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/24.0.1/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/24.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/24.0.1/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/24.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/24.0.1/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/24.0.1/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/24.0.1/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/24.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/24.0.1/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/24.0.1/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/24.0.2/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -285,10 +272,10 @@ include tests/build-tools/24.0.2/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/24.0.2/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/24.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/24.0.2/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/24.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/24.0.2/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/24.0.2/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/24.0.2/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/24.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/24.0.2/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/24.0.2/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/24.0.3/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -300,10 +287,10 @@ include tests/build-tools/24.0.3/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/24.0.3/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/24.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/24.0.3/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/24.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/24.0.3/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/24.0.3/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/24.0.3/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/24.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/24.0.3/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/24.0.3/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/25.0.0/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -315,10 +302,10 @@ include tests/build-tools/25.0.0/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/25.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/25.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/25.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/25.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/25.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/25.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/25.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/25.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/25.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/25.0.0/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/25.0.1/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -330,10 +317,10 @@ include tests/build-tools/25.0.1/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/25.0.1/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/25.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/25.0.1/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/25.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/25.0.1/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/25.0.1/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/25.0.1/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/25.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/25.0.1/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/25.0.1/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/25.0.2/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -345,10 +332,10 @@ include tests/build-tools/25.0.2/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/25.0.2/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/25.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/25.0.2/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/25.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/25.0.2/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/25.0.2/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/25.0.2/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/25.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/25.0.2/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/25.0.2/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/25.0.3/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -360,10 +347,10 @@ include tests/build-tools/25.0.3/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/25.0.3/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/25.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/25.0.3/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/25.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/25.0.3/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/25.0.3/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/25.0.3/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/25.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/25.0.3/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/25.0.3/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/26.0.0/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -375,10 +362,10 @@ include tests/build-tools/26.0.0/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/26.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/26.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/26.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/26.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/26.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/26.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/26.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/26.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/26.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/26.0.0/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/26.0.1/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -390,10 +377,10 @@ include tests/build-tools/26.0.1/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/26.0.1/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/26.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/26.0.1/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/26.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/26.0.1/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/26.0.1/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/26.0.1/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/26.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/26.0.1/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/26.0.1/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/26.0.2/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -405,10 +392,10 @@ include tests/build-tools/26.0.2/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/26.0.2/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/26.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/26.0.2/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/26.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/26.0.2/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/26.0.2/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/26.0.2/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/26.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/26.0.2/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/26.0.2/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/26.0.3/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -420,10 +407,10 @@ include tests/build-tools/26.0.3/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/26.0.3/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/26.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/26.0.3/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/26.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/26.0.3/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/26.0.3/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/26.0.3/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/26.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/26.0.3/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/26.0.3/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/27.0.0/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -435,10 +422,10 @@ include tests/build-tools/27.0.0/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/27.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/27.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/27.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/27.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/27.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/27.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/27.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/27.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/27.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/27.0.0/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/27.0.1/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -450,10 +437,10 @@ include tests/build-tools/27.0.1/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/27.0.1/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/27.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/27.0.1/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/27.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/27.0.1/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/27.0.1/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/27.0.1/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/27.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/27.0.1/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/27.0.1/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/27.0.2/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -465,10 +452,10 @@ include tests/build-tools/27.0.2/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/27.0.2/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/27.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/27.0.2/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/27.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/27.0.2/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/27.0.2/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/27.0.2/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/27.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/27.0.2/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/27.0.2/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/27.0.3/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -480,10 +467,10 @@ include tests/build-tools/27.0.3/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/27.0.3/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/27.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/27.0.3/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/27.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/27.0.3/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/27.0.3/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/27.0.3/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/27.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/27.0.3/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/27.0.3/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/28.0.0/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -495,10 +482,10 @@ include tests/build-tools/28.0.0/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/28.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/28.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/28.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/28.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/28.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/28.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/28.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/28.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/28.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/28.0.0/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/28.0.1/aapt-output-com.moez.QKSMS_182.txt
|
||||
|
@ -510,10 +497,10 @@ include tests/build-tools/28.0.1/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/28.0.1/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/28.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/28.0.1/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/28.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/28.0.1/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/28.0.1/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/28.0.1/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/28.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/28.0.1/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/28.0.1/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/28.0.2/aapt-output-com.politedroid_3.txt
|
||||
|
@ -524,10 +511,10 @@ include tests/build-tools/28.0.2/aapt-output-duplicate.permisssions_9999999.txt
|
|||
include tests/build-tools/28.0.2/aapt-output-info.guardianproject.urzip_100.txt
|
||||
include tests/build-tools/28.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/28.0.2/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/28.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/28.0.2/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/28.0.2/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/28.0.2/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/28.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/28.0.2/aapt-output-org.droidtr.keyboard_34.txt
|
||||
include tests/build-tools/28.0.2/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/28.0.3/aapt-output-com.example.test.helloworld_1.txt
|
||||
|
@ -540,119 +527,85 @@ include tests/build-tools/28.0.3/aapt-output-info.guardianproject.urzip_100.txt
|
|||
include tests/build-tools/28.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||
include tests/build-tools/28.0.3/aapt-output-no.min.target.sdk_987.txt
|
||||
include tests/build-tools/28.0.3/aapt-output-obb.main.oldversion_1444412523.txt
|
||||
include tests/build-tools/28.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/28.0.3/aapt-output-obb.main.twoversions_1101613.txt
|
||||
include tests/build-tools/28.0.3/aapt-output-obb.main.twoversions_1101615.txt
|
||||
include tests/build-tools/28.0.3/aapt-output-obb.main.twoversions_1101617.txt
|
||||
include tests/build-tools/28.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
||||
include tests/build-tools/28.0.3/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/generate.sh
|
||||
include tests/check-fdroid-apk
|
||||
include tests/com.fake.IpaApp_1000000000001.ipa
|
||||
include tests/config.yml
|
||||
include tests/config/antiFeatures.yml
|
||||
include tests/config/categories.yml
|
||||
include tests/config/de/antiFeatures.yml
|
||||
include tests/config/fa/antiFeatures.yml
|
||||
include tests/config/ic_antifeature_ads.xml
|
||||
include tests/config/ic_antifeature_disabledalgorithm.xml
|
||||
include tests/config/ic_antifeature_knownvuln.xml
|
||||
include tests/config/ic_antifeature_nonfreeadd.xml
|
||||
include tests/config/ic_antifeature_nonfreeassets.xml
|
||||
include tests/config/ic_antifeature_nonfreedep.xml
|
||||
include tests/config/ic_antifeature_nonfreenet.xml
|
||||
include tests/config/ic_antifeature_nosourcesince.xml
|
||||
include tests/config/ic_antifeature_nsfw.xml
|
||||
include tests/config/ic_antifeature_tracking.xml
|
||||
include tests/config/ic_antifeature_upstreamnonfree.xml
|
||||
include tests/config/ro/antiFeatures.yml
|
||||
include tests/config/zh-rCN/antiFeatures.yml
|
||||
include tests/corrupt-featureGraphic.png
|
||||
include tests/common.TestCase
|
||||
include tests/complete-ci-tests
|
||||
include tests/config.py
|
||||
include tests/deploy.TestCase
|
||||
include tests/description-parsing.py
|
||||
include tests/dummy-keystore.jks
|
||||
include tests/dump_internal_metadata_format.py
|
||||
include tests/exception.TestCase
|
||||
include tests/extra/manual-vmtools-test.py
|
||||
include tests/funding-usernames.yaml
|
||||
include tests/get_android_tools_versions/android-ndk-r10e/RELEASE.TXT
|
||||
include tests/get_android_tools_versions/android-sdk/ndk-bundle/package.xml
|
||||
include tests/get_android_tools_versions/android-sdk/ndk-bundle/source.properties
|
||||
include tests/get_android_tools_versions/android-sdk/ndk/11.2.2725575/source.properties
|
||||
include tests/get_android_tools_versions/android-sdk/ndk/17.2.4988734/source.properties
|
||||
include tests/get_android_tools_versions/android-sdk/ndk/21.3.6528147/source.properties
|
||||
include tests/get_android_tools_versions/android-sdk/patcher/v4/source.properties
|
||||
include tests/get_android_tools_versions/android-sdk/platforms/android-30/source.properties
|
||||
include tests/get_android_tools_versions/android-sdk/skiaparser/1/source.properties
|
||||
include tests/get_android_tools_versions/android-sdk/tools/source.properties
|
||||
include tests/getsig/getsig.java
|
||||
include tests/getsig/make.sh
|
||||
include tests/getsig/run.sh
|
||||
include tests/gnupghome/pubring.gpg
|
||||
include tests/gnupghome/random_seed
|
||||
include tests/gnupghome/secring.gpg
|
||||
include tests/gnupghome/trustdb.gpg
|
||||
include tests/gradle-maven-blocks.yaml
|
||||
include tests/gradle-release-checksums.py
|
||||
include tests/import_proxy.py
|
||||
include tests/import.TestCase
|
||||
include tests/index.TestCase
|
||||
include tests/install.TestCase
|
||||
include tests/IsMD5Disabled.java
|
||||
include tests/issue-1128-min-sdk-30-poc.apk
|
||||
include tests/issue-1128-poc1.apk
|
||||
include tests/issue-1128-poc2.apk
|
||||
include tests/issue-1128-poc3a.apk
|
||||
include tests/issue-1128-poc3b.apk
|
||||
include tests/janus.apk
|
||||
include tests/key-tricks.py
|
||||
include tests/keystore.jks
|
||||
include tests/metadata-rewrite-yml/app.with.special.build.params.yml
|
||||
include tests/metadata-rewrite-yml/fake.ota.update.yml
|
||||
include tests/metadata-rewrite-yml/org.fdroid.fdroid.yml
|
||||
include tests/lint.TestCase
|
||||
include tests/metadata/apk/info.guardianproject.urzip.yaml
|
||||
include tests/metadata/apk/org.dyndns.fules.ck.yaml
|
||||
include tests/metadata/app.with.special.build.params.yml
|
||||
include tests/metadata/app.with.special.build.params/en-US/antifeatures/50_Ads.txt
|
||||
include tests/metadata/app.with.special.build.params/en-US/antifeatures/50_Tracking.txt
|
||||
include tests/metadata/app.with.special.build.params/en-US/antifeatures/Ads.txt
|
||||
include tests/metadata/app.with.special.build.params/en-US/antifeatures/NoSourceSince.txt
|
||||
include tests/metadata/app.with.special.build.params/zh-CN/antifeatures/49_Tracking.txt
|
||||
include tests/metadata/app.with.special.build.params/zh-CN/antifeatures/50_Ads.txt
|
||||
include tests/metadata/com.politedroid.yml
|
||||
include tests/metadata/dump/app.with.special.build.params.yaml
|
||||
include tests/metadata/dump/com.politedroid.yaml
|
||||
include tests/metadata/dump/org.adaway.yaml
|
||||
include tests/metadata/dump/org.smssecure.smssecure.yaml
|
||||
include tests/metadata/dump/org.videolan.vlc.yaml
|
||||
include tests/metadata/duplicate.permisssions.yml
|
||||
include tests/metadata/fake.ota.update.yml
|
||||
include tests/metadata/info.guardianproject.checkey.yml
|
||||
include tests/metadata/info.guardianproject.checkey/en-US/description.txt
|
||||
include tests/metadata/info.guardianproject.checkey/en-US/name.txt
|
||||
include tests/metadata/info.guardianproject.checkey/en-US/phoneScreenshots/checkey-phone.png
|
||||
include tests/metadata/info.guardianproject.checkey/en-US/phoneScreenshots/checkey.png
|
||||
include tests/metadata/info.guardianproject.checkey/en-US/summary.txt
|
||||
include tests/metadata/info.guardianproject.checkey/ja-JP/name.txt
|
||||
include tests/metadata/info.guardianproject.urzip.yml
|
||||
include tests/metadata/info.guardianproject.checkey.yml
|
||||
include tests/metadata/info.guardianproject.urzip/en-US/changelogs/100.txt
|
||||
include tests/metadata/info.guardianproject.urzip/en-US/changelogs/default.txt
|
||||
include tests/metadata/info.guardianproject.urzip/en-US/full_description.txt
|
||||
include tests/metadata/info.guardianproject.urzip/en-US/images/featureGraphic.png
|
||||
include tests/metadata/info.guardianproject.urzip/en-US/images/icon.png
|
||||
include tests/metadata/info.guardianproject.urzip/en-US/short_description.txt
|
||||
include tests/metadata/info.guardianproject.urzip/en-US/title.txt
|
||||
include tests/metadata/info.guardianproject.urzip/en-US/video.txt
|
||||
include tests/metadata/info.guardianproject.urzip.yml
|
||||
include tests/metadata/info.zwanenburg.caffeinetile.yml
|
||||
include tests/metadata/no.min.target.sdk.yml
|
||||
include tests/metadata/obb.main.oldversion.yml
|
||||
include tests/metadata/obb.main.twoversions.yml
|
||||
include tests/metadata/obb.mainpatch.current.yml
|
||||
include tests/metadata/obb.main.twoversions.yml
|
||||
include tests/metadata/org.adaway.yml
|
||||
include tests/metadata/org.fdroid.ci.test.app.yml
|
||||
include tests/metadata/org.fdroid.fdroid.yml
|
||||
include tests/metadata/org.maxsdkversion.yml
|
||||
include tests/metadata/org.smssecure.smssecure.yml
|
||||
include tests/metadata/org.smssecure.smssecure/signatures/134/28969C09.RSA
|
||||
include tests/metadata/org.smssecure.smssecure/signatures/134/28969C09.SF
|
||||
include tests/metadata/org.smssecure.smssecure/signatures/134/MANIFEST.MF
|
||||
include tests/metadata/org.smssecure.smssecure/signatures/135/28969C09.RSA
|
||||
include tests/metadata/org.smssecure.smssecure/signatures/135/28969C09.SF
|
||||
include tests/metadata/org.smssecure.smssecure/signatures/135/MANIFEST.MF
|
||||
include tests/metadata/org.smssecure.smssecure.yml
|
||||
include tests/metadata/org.videolan.vlc.yml
|
||||
include tests/metadata/raw.template.yml
|
||||
include tests/metadata-rewrite-yml/app.with.special.build.params.yml
|
||||
include tests/metadata-rewrite-yml/fake.ota.update.yml
|
||||
include tests/metadata-rewrite-yml/org.fdroid.fdroid.yml
|
||||
include tests/metadata/souch.smsbypass.yml
|
||||
include tests/metadata.TestCase
|
||||
include tests/minimal_targetsdk_30_unsigned.apk
|
||||
include tests/Norway_bouvet_europe_2.obf.zip
|
||||
include tests/no_targetsdk_minsdk1_unsigned.apk
|
||||
include tests/no_targetsdk_minsdk30_unsigned.apk
|
||||
include tests/openssl-version-check-test.py
|
||||
|
@ -661,17 +614,16 @@ include tests/org.bitbucket.tickytacky.mirrormirror_2.apk
|
|||
include tests/org.bitbucket.tickytacky.mirrormirror_3.apk
|
||||
include tests/org.bitbucket.tickytacky.mirrormirror_4.apk
|
||||
include tests/org.dyndns.fules.ck_20.apk
|
||||
include tests/org.sajeg.fallingblocks_3.apk
|
||||
include tests/publish.TestCase
|
||||
include tests/repo/categories.txt
|
||||
include tests/repo/com.example.test.helloworld_1.apk
|
||||
include tests/repo/com.politedroid_3.apk
|
||||
include tests/repo/com.politedroid_4.apk
|
||||
include tests/repo/com.politedroid_5.apk
|
||||
include tests/repo/com.politedroid_6.apk
|
||||
include tests/repo/duplicate.permisssions_9999999.apk
|
||||
include tests/repo/entry.json
|
||||
include tests/repo/fake.ota.update_1234.zip
|
||||
include tests/repo/index-v1.json
|
||||
include tests/repo/index-v2.json
|
||||
include tests/repo/index.xml
|
||||
include tests/repo/info.zwanenburg.caffeinetile_4.apk
|
||||
include tests/repo/main.1101613.obb.main.twoversions.obb
|
||||
|
@ -680,17 +632,16 @@ include tests/repo/main.1434483388.obb.main.oldversion.obb
|
|||
include tests/repo/main.1619.obb.mainpatch.current.obb
|
||||
include tests/repo/no.min.target.sdk_987.apk
|
||||
include tests/repo/obb.main.oldversion_1444412523.apk
|
||||
include tests/repo/obb.main.twoversions_1101613.apk
|
||||
include tests/repo/obb.main.twoversions_1101615.apk
|
||||
include tests/repo/obb.main.twoversions_1101617.apk
|
||||
include tests/repo/obb.main.twoversions_1101617_src.tar.gz
|
||||
include tests/repo/obb.mainpatch.current_1619_another-release-key.apk
|
||||
include tests/repo/obb.mainpatch.current_1619.apk
|
||||
include tests/repo/obb.mainpatch.current/en-US/featureGraphic.png
|
||||
include tests/repo/obb.mainpatch.current/en-US/icon.png
|
||||
include tests/repo/obb.mainpatch.current/en-US/phoneScreenshots/screenshot-main.png
|
||||
include tests/repo/obb.mainpatch.current/en-US/sevenInchScreenshots/screenshot-tablet-main.png
|
||||
include tests/repo/obb.mainpatch.current_1619.apk
|
||||
include tests/repo/obb.mainpatch.current_1619_another-release-key.apk
|
||||
include tests/repo/org.maxsdkversion_4.apk
|
||||
include tests/repo/obb.main.twoversions_1101613.apk
|
||||
include tests/repo/obb.main.twoversions_1101615.apk
|
||||
include tests/repo/obb.main.twoversions_1101617.apk
|
||||
include tests/repo/obb.main.twoversions_1101617_src.tar.gz
|
||||
include tests/repo/org.videolan.vlc/en-US/icon.png
|
||||
include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot10.png
|
||||
include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot12.png
|
||||
|
@ -702,16 +653,16 @@ include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot4.png
|
|||
include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot7.png
|
||||
include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot9.png
|
||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot0.png
|
||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot1.png
|
||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot11.png
|
||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot13.png
|
||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot14.png
|
||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot16.png
|
||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot17.png
|
||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot19.png
|
||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot2.png
|
||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot1.png
|
||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot21.png
|
||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot23.png
|
||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot2.png
|
||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot3.png
|
||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot5.png
|
||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot6.png
|
||||
|
@ -721,24 +672,13 @@ include tests/repo/souch.smsbypass_9.apk
|
|||
include tests/repo/urzip-*.apk
|
||||
include tests/repo/v1.v2.sig_1020.apk
|
||||
include tests/run-tests
|
||||
include tests/SANAPPSI.RSA
|
||||
include tests/SANAPPSI.SF
|
||||
include tests/shared_test_code.py
|
||||
include tests/signindex/guardianproject-v1.jar
|
||||
include tests/scanner.TestCase
|
||||
include tests/signatures.TestCase
|
||||
include tests/signindex/guardianproject.jar
|
||||
include tests/signindex/guardianproject-v1.jar
|
||||
include tests/signindex/testy.jar
|
||||
include tests/signindex/unsigned.jar
|
||||
include tests/source-files/at.bitfire.davdroid/build.gradle
|
||||
include tests/source-files/catalog.test/app/build.gradle
|
||||
include tests/source-files/catalog.test/build.gradle.kts
|
||||
include tests/source-files/catalog.test/buildSrc/build.gradle.kts
|
||||
include tests/source-files/catalog.test/buildSrc/settings.gradle.kts
|
||||
include tests/source-files/catalog.test/buildSrc2/build.gradle.kts
|
||||
include tests/source-files/catalog.test/buildSrc2/settings.gradle.kts
|
||||
include tests/source-files/catalog.test/core/build.gradle
|
||||
include tests/source-files/catalog.test/gradle/libs.versions.toml
|
||||
include tests/source-files/catalog.test/libs.versions.toml
|
||||
include tests/source-files/catalog.test/settings.gradle.kts
|
||||
include tests/source-files/cn.wildfirechat.chat/avenginekit/build.gradle
|
||||
include tests/source-files/cn.wildfirechat.chat/build.gradle
|
||||
include tests/source-files/cn.wildfirechat.chat/chat/build.gradle
|
||||
|
@ -754,12 +694,6 @@ include tests/source-files/com.anpmech.launcher/app/build.gradle
|
|||
include tests/source-files/com.anpmech.launcher/app/src/main/AndroidManifest.xml
|
||||
include tests/source-files/com.anpmech.launcher/build.gradle
|
||||
include tests/source-files/com.anpmech.launcher/settings.gradle
|
||||
include tests/source-files/com.github.jameshnsears.quoteunquote/build.gradle
|
||||
include tests/source-files/com.github.shadowsocks/core/build.gradle.kts
|
||||
include tests/source-files/com.github.shadowsocks/mobile/build.gradle.kts
|
||||
include tests/source-files/com.infomaniak.mail/Core/gradle/core.versions.toml
|
||||
include tests/source-files/com.infomaniak.mail/gradle/libs.versions.toml
|
||||
include tests/source-files/com.infomaniak.mail/settings.gradle
|
||||
include tests/source-files/com.integreight.onesheeld/build.gradle
|
||||
include tests/source-files/com.integreight.onesheeld/gradle/wrapper/gradle-wrapper.properties
|
||||
include tests/source-files/com.integreight.onesheeld/localeapi/build.gradle
|
||||
|
@ -773,69 +707,37 @@ include tests/source-files/com.integreight.onesheeld/pullToRefreshlibrary/src/ma
|
|||
include tests/source-files/com.integreight.onesheeld/quickReturnHeader/build.gradle
|
||||
include tests/source-files/com.integreight.onesheeld/quickReturnHeader/src/main/AndroidManifest.xml
|
||||
include tests/source-files/com.integreight.onesheeld/settings.gradle
|
||||
include tests/source-files/com.jens.automation2/app/build.gradle
|
||||
include tests/source-files/com.jens.automation2/build.gradle
|
||||
include tests/source-files/com.kunzisoft.testcase/build.gradle
|
||||
include tests/source-files/com.lolo.io.onelist/app/build.gradle.kts
|
||||
include tests/source-files/com.lolo.io.onelist/build.gradle.kts
|
||||
include tests/source-files/com.lolo.io.onelist/gradle/libs.versions.toml
|
||||
include tests/source-files/com.lolo.io.onelist/gradle/wrapper/gradle-wrapper.properties
|
||||
include tests/source-files/com.lolo.io.onelist/settings.gradle
|
||||
include tests/source-files/com.nextcloud.client/build.gradle
|
||||
include tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/full_description.txt
|
||||
include tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/short_description.txt
|
||||
include tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/title.txt
|
||||
include tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/full_description.txt
|
||||
include tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/short_description.txt
|
||||
include tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/title.txt
|
||||
include tests/source-files/com.nextcloud.client/build.gradle
|
||||
include tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/full_description.txt
|
||||
include tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/short_description.txt
|
||||
include tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/title.txt
|
||||
include tests/source-files/com.nextcloud.client/src/versionDev/fastlane/metadata/android/en-US/full_description.txt
|
||||
include tests/source-files/com.nextcloud.client/src/versionDev/fastlane/metadata/android/en-US/short_description.txt
|
||||
include tests/source-files/com.nextcloud.client/src/versionDev/fastlane/metadata/android/en-US/title.txt
|
||||
include tests/source-files/com.seafile.seadroid2/app/build.gradle
|
||||
include tests/source-files/com.ubergeek42.WeechatAndroid/app/build.gradle.kts
|
||||
include tests/source-files/com.ubergeek42.WeechatAndroid/app/src/main/res/values/strings.xml
|
||||
include tests/source-files/de.varengold.activeTAN/build.gradle
|
||||
include tests/source-files/dev.patrickgold.florisboard/app/build.gradle.kts
|
||||
include tests/source-files/eu.siacs.conversations/build.gradle
|
||||
include tests/source-files/eu.siacs.conversations/metadata/en-US/name.txt
|
||||
include tests/source-files/fdroid/fdroidclient/AndroidManifest.xml
|
||||
include tests/source-files/fdroid/fdroidclient/build.gradle
|
||||
include tests/source-files/firebase-allowlisted/app/build.gradle
|
||||
include tests/source-files/firebase-allowlisted/build.gradle
|
||||
include tests/source-files/firebase-suspect/app/build.gradle
|
||||
include tests/source-files/firebase-suspect/build.gradle
|
||||
include tests/source-files/flavor.test/build.gradle
|
||||
include tests/source-files/info.guardianproject.ripple/build.gradle
|
||||
include tests/source-files/lockfile.test/flutter/.dart_tool/flutter_gen/pubspec.yaml
|
||||
include tests/source-files/lockfile.test/flutter/pubspec.lock
|
||||
include tests/source-files/lockfile.test/flutter/pubspec.yaml
|
||||
include tests/source-files/lockfile.test/javascript/package.json
|
||||
include tests/source-files/lockfile.test/javascript/yarn.lock
|
||||
include tests/source-files/lockfile.test/rust/subdir/Cargo.lock
|
||||
include tests/source-files/lockfile.test/rust/subdir/Cargo.toml
|
||||
include tests/source-files/lockfile.test/rust/subdir/subdir/subdir/Cargo.toml
|
||||
include tests/source-files/lockfile.test/rust/subdir2/Cargo.toml
|
||||
include tests/source-files/firebase-whitelisted/app/build.gradle
|
||||
include tests/source-files/firebase-whitelisted/build.gradle
|
||||
include tests/source-files/open-keychain/open-keychain/build.gradle
|
||||
include tests/source-files/open-keychain/open-keychain/OpenKeychain/build.gradle
|
||||
include tests/source-files/org.mozilla.rocket/app/build.gradle
|
||||
include tests/source-files/org.noise_planet.noisecapture/app/build.gradle
|
||||
include tests/source-files/org.noise_planet.noisecapture/settings.gradle
|
||||
include tests/source-files/org.noise_planet.noisecapture/sosfilter/build.gradle
|
||||
include tests/source-files/org.piepmeyer.gauguin/build.gradle.kts
|
||||
include tests/source-files/org.piepmeyer.gauguin/libs.versions.toml
|
||||
include tests/source-files/org.piepmeyer.gauguin/settings.gradle.kts
|
||||
include tests/source-files/org.tasks/app/build.gradle.kts
|
||||
include tests/source-files/org.tasks/build.gradle
|
||||
include tests/source-files/org.tasks/build.gradle.kts
|
||||
include tests/source-files/org.tasks/buildSrc/build.gradle.kts
|
||||
include tests/source-files/org.tasks/settings.gradle.kts
|
||||
include tests/source-files/osmandapp/osmand/build.gradle
|
||||
include tests/source-files/osmandapp/osmand/gradle/wrapper/gradle-wrapper.properties
|
||||
include tests/source-files/OtakuWorld/build.gradle
|
||||
include tests/source-files/realm/react-native/android/build.gradle
|
||||
include tests/source-files/se.manyver/android/app/build.gradle
|
||||
include tests/source-files/se.manyver/android/build.gradle
|
||||
include tests/source-files/se.manyver/android/gradle.properties
|
||||
|
@ -849,36 +751,12 @@ include tests/source-files/ut.ewh.audiometrytest/app/build.gradle
|
|||
include tests/source-files/ut.ewh.audiometrytest/app/src/main/AndroidManifest.xml
|
||||
include tests/source-files/ut.ewh.audiometrytest/build.gradle
|
||||
include tests/source-files/ut.ewh.audiometrytest/settings.gradle
|
||||
include tests/source-files/yuriykulikov/AlarmClock/gradle/wrapper/gradle-wrapper.properties
|
||||
include tests/source-files/Zillode/syncthing-silk/build.gradle
|
||||
include tests/SpeedoMeterApp.main_1.apk
|
||||
include tests/test_build.py
|
||||
include tests/test_checkupdates.py
|
||||
include tests/test_common.py
|
||||
include tests/test_deploy.py
|
||||
include tests/test_exception.py
|
||||
include tests/test_gradlew-fdroid
|
||||
include tests/test_import_subcommand.py
|
||||
include tests/test_index.py
|
||||
include tests/test_init.py
|
||||
include tests/test_install.py
|
||||
include tests/test_lint.py
|
||||
include tests/test_main.py
|
||||
include tests/test_metadata.py
|
||||
include tests/test_nightly.py
|
||||
include tests/test_publish.py
|
||||
include tests/test_rewritemeta.py
|
||||
include tests/test_scanner.py
|
||||
include tests/test_signatures.py
|
||||
include tests/test_signindex.py
|
||||
include tests/test_update.py
|
||||
include tests/test_vcs.py
|
||||
include tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/featureGraphic/play_store_feature_graphic.png
|
||||
include tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/icon/icon.png
|
||||
include tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/phoneScreenshots/1.png
|
||||
include tests/triple-t-1-graphics/metadata/de.wivewa.dialer.yml
|
||||
include tests/triple-t-2/build/org.piwigo.android/app/.gitignore
|
||||
include tests/stats/known_apks.txt
|
||||
include tests/testcommon.py
|
||||
include tests/triple-t-2/build/org.piwigo.android/app/build.gradle
|
||||
include tests/triple-t-2/build/org.piwigo.android/app/.gitignore
|
||||
include tests/triple-t-2/build/org.piwigo.android/app/src/debug/res/values/constants.xml
|
||||
include tests/triple-t-2/build/org.piwigo.android/app/src/debug/res/values/strings.xml
|
||||
include tests/triple-t-2/build/org.piwigo.android/app/src/main/java/org/piwigo/PiwigoApplication.java
|
||||
|
@ -912,34 +790,17 @@ include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-note
|
|||
include tests/triple-t-2/build/org.piwigo.android/build.gradle
|
||||
include tests/triple-t-2/build/org.piwigo.android/settings.gradle
|
||||
include tests/triple-t-2/metadata/org.piwigo.android.yml
|
||||
include tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/addons/languages/dutch/apk/src/main/play/listings/en-US/title.txt
|
||||
include tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/ime/app/src/main/play/listings/en-US/title.txt
|
||||
include tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/settings.gradle
|
||||
include tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/addons/languages/dutch/apk/src/main/play/listings/en-US/title.txt
|
||||
include tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/ime/app/src/main/play/listings/en-US/title.txt
|
||||
include tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/settings.gradle
|
||||
include tests/triple-t-anysoftkeyboard/metadata/com.anysoftkeyboard.languagepack.dutch.yml
|
||||
include tests/triple-t-anysoftkeyboard/metadata/com.menny.android.anysoftkeyboard.yml
|
||||
include tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/contact-website.txt
|
||||
include tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/full-description.txt
|
||||
include tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/short-description.txt
|
||||
include tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/title.txt
|
||||
include tests/triple-t-flutter/metadata/fr.emersion.goguma.yml
|
||||
include tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/settings.gradle
|
||||
include tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/verifier/src/main/play/listings/en-US/title.txt
|
||||
include tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/wallet/src/main/play/listings/en-US/title.txt
|
||||
include tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/settings.gradle
|
||||
include tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/verifier/src/main/play/listings/en-US/title.txt
|
||||
include tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/wallet/src/main/play/listings/en-US/title.txt
|
||||
include tests/triple-t-multiple/metadata/ch.admin.bag.covidcertificate.verifier.yml
|
||||
include tests/triple-t-multiple/metadata/ch.admin.bag.covidcertificate.wallet.yml
|
||||
include tests/update.TestCase
|
||||
include tests/urzip.apk
|
||||
include tests/urzip-badcert.apk
|
||||
include tests/urzip-badsig.apk
|
||||
include tests/urzip-release-unsigned.apk
|
||||
include tests/urzip-release.apk
|
||||
include tests/urzip.apk
|
||||
include tests/urzip-release-unsigned.apk
|
||||
include tests/v2.only.sig_2.apk
|
||||
include tests/valid-package-names/random-package-names
|
||||
include tests/valid-package-names/RandomPackageNames.java
|
||||
include tests/valid-package-names/test.py
|
||||
include tests/__init__.py
|
||||
include tests/Norway_bouvet_europe_2.obf.zip
|
||||
include tests/xref/metadata/aarddict.android.yml
|
||||
include tests/xref/metadata/org.coolreader.yml
|
||||
include tests/xref/metadata/org.geometerplus.zlibrary.ui.android.yml
|
||||
|
|
162
README.md
162
README.md
|
@ -1,116 +1,88 @@
|
|||
<div align="center">
|
||||
<a name="build-status"></a>
|
||||
|
||||
| CI Builds | fdroidserver | buildserver | fdroid build --all | publishing tools |
|
||||
|--------------------------|:-------------:|:-----------:|:------------------:|:----------------:|
|
||||
| GNU/Linux | [](https://gitlab.com/fdroid/fdroidserver/builds) | [](https://jenkins.debian.net/job/reproducible_setup_fdroid_build_environment) | [](https://jenkins.debian.net/job/reproducible_fdroid_build_apps/) | [](https://jenkins.debian.net/job/reproducible_fdroid_test/) |
|
||||
| macOS | [](https://travis-ci.org/f-droid/fdroidserver) | | | |
|
||||
|
||||
<p><img src="https://gitlab.com/fdroid/artwork/-/raw/master/fdroid-logo-2015/fdroid-logo.svg" width="200"></p>
|
||||
|
||||
# F-Droid Server
|
||||
### Tools for maintaining an F-Droid repository system.
|
||||
|
||||
</div>
|
||||
Server for [F-Droid](https://f-droid.org), the Free Software repository system
|
||||
for Android.
|
||||
|
||||
---
|
||||
The F-Droid server tools provide various scripts and tools that are
|
||||
used to maintain the main
|
||||
[F-Droid application repository](https://f-droid.org/packages). You
|
||||
can use these same tools to create your own additional or alternative
|
||||
repository for publishing, or to assist in creating, testing and
|
||||
submitting metadata to the main repository.
|
||||
|
||||
## What is F-Droid Server?
|
||||
|
||||
_fdroidserver_ is a suite of tools to publish and work with collections of
|
||||
Android apps (APK files) and other kinds of packages. It is used to maintain
|
||||
the [f-droid.org application repository](https://f-droid.org/packages). These
|
||||
same tools can be used to create additional or alternative repositories for
|
||||
publishing, or to assist in creating, testing and submitting metadata to the
|
||||
f-droid.org repository, also known as
|
||||
[_fdroiddata_](https://gitlab.com/fdroid/fdroiddata).
|
||||
|
||||
For documentation, please see <https://f-droid.org/docs>.
|
||||
|
||||
In the beginning, _fdroidserver_ was the complete server-side setup that ran
|
||||
f-droid.org. Since then, the website and other parts have been split out into
|
||||
their own projects. The name for this suite of tooling has stayed
|
||||
_fdroidserver_ even though it no longer contains any proper server component.
|
||||
For documentation, please see <https://f-droid.org/docs/>, or you can
|
||||
find the source for the documentation in
|
||||
[fdroid/fdroid-website](https://gitlab.com/fdroid/fdroid-website).
|
||||
|
||||
|
||||
## Installing
|
||||
### What is F-Droid?
|
||||
|
||||
There are many ways to install _fdroidserver_, including using a range of
|
||||
package managers. All of the options are documented on the website:
|
||||
F-Droid is an installable catalogue of FOSS (Free and Open Source Software)
|
||||
applications for the Android platform. The client makes it easy to browse,
|
||||
install, and keep track of updates on your device.
|
||||
|
||||
|
||||
### Installing
|
||||
|
||||
There are many ways to install _fdroidserver_, they are documented on
|
||||
the website:
|
||||
https://f-droid.org/docs/Installing_the_Server_and_Repo_Tools
|
||||
|
||||
|
||||
## Releases
|
||||
|
||||
The production setup of _fdroidserver_ for f-droid.org is run directly from the
|
||||
_master_ branch. This is put into production on an schedule (currently weekly).
|
||||
So development and testing happens in the branches. We track branches using
|
||||
merge requests. Therefore, there are many WIP and long-lived merge requests.
|
||||
|
||||
There are also stable releases of _fdroidserver_. This is mostly intended for
|
||||
running custom repositories, where the build process is separate. It can also
|
||||
be useful as a simple way to get started contributing packages to _fdroiddata_,
|
||||
since the stable releases are available in package managers.
|
||||
All sorts of other documentation lives there as well.
|
||||
|
||||
|
||||
## Tests
|
||||
### Tests
|
||||
|
||||
To run the full test suite:
|
||||
|
||||
tests/run-tests
|
||||
|
||||
To run the tests for individual Python modules, see the `tests/test_*.py` files, e.g.:
|
||||
|
||||
python -m unittest tests/test_metadata.py
|
||||
|
||||
It is also possible to run individual tests:
|
||||
|
||||
python -m unittest tests.test_metadata.MetadataTest.test_rewrite_yaml_special_build_params
|
||||
|
||||
There is a growing test suite that has good coverage on a number of key parts of
|
||||
this code base. It does not yet cover all the code, and there are some parts
|
||||
where the technical debt makes it difficult to write unit tests. New tests
|
||||
should be standard Python _unittest_ test cases. Whenever possible, the old
|
||||
tests written in _bash_ in _tests/run-tests_ should be ported to Python.
|
||||
|
||||
This test suite has built over time a bit haphazardly, so it is not as clean,
|
||||
organized, or complete as it could be. We welcome contributions. The goal is
|
||||
to move towards standard Python testing patterns and to expand the unit test
|
||||
coverage. Before rearchitecting any parts of it, be sure to [contact
|
||||
us](https://f-droid.org/about) to discuss the changes beforehand.
|
||||
There are many components to all of the tests for the components in
|
||||
this git repo. The most commonly used parts of well tested, while
|
||||
some parts still lack tests. This test suite has built over time a
|
||||
bit haphazardly, so it is not as clean, organized, or complete as it
|
||||
could be. We welcome contributions. Before rearchitecting any parts
|
||||
of it, be sure to [contact us](https://f-droid.org/about) to discuss
|
||||
the changes beforehand.
|
||||
|
||||
|
||||
### Additional tests for different linux distributions
|
||||
#### `fdroid` commands
|
||||
|
||||
These tests are also run on various configurations through GitLab CI. This is
|
||||
only enabled for `master@fdroid/fdroidserver` because it takes longer to
|
||||
The test suite for all of the `fdroid` commands is in the _tests/_
|
||||
subdir. _.gitlab-ci.yml_ and _.travis.yml_ run this test suite on
|
||||
various configurations.
|
||||
|
||||
* _tests/complete-ci-tests_ runs _pylint_ and all tests on two
|
||||
different pyvenvs
|
||||
* _tests/run-tests_ runs the whole test suite
|
||||
* _tests/*.TestCase_ are individual unit tests for all of the `fdroid`
|
||||
commands, which can be run separately, e.g. `./update.TestCase`.
|
||||
|
||||
|
||||
#### Additional tests for different linux distributions
|
||||
|
||||
These tests are also run on various distributions through GitLab CI. This is
|
||||
only enabled for `master@fdroid/fdroidserver` because it'll take longer to
|
||||
complete than the regular CI tests. Most of the time you won't need to worry
|
||||
about them, but sometimes it might make sense to also run them for your merge
|
||||
request. In that case you need to remove [these lines from .gitlab-ci.yml](https://gitlab.com/fdroid/fdroidserver/-/blob/0124b9dde99f9cab19c034cbc7d8cc6005a99b48/.gitlab-ci.yml#L90-91)
|
||||
about them but sometimes it might make sense to also run them for your merge
|
||||
request. In that case you need to remove [these lines from
|
||||
.gitlab-ci.yml](https://gitlab.com/fdroid/fdroidserver/blob/master/.gitlab-ci.yml#L34-35)
|
||||
and push this to a new branch of your fork.
|
||||
|
||||
Alternatively [run them
|
||||
locally](https://docs.gitlab.com/runner/commands/README.html#gitlab-runner-exec)
|
||||
like this: `gitlab-runner exec docker ubuntu_lts`
|
||||
|
||||
#### buildserver
|
||||
|
||||
## Documentation
|
||||
|
||||
The API documentation based on the docstrings gets automatically
|
||||
published [here](https://fdroid.gitlab.io/fdroidserver) on every commit
|
||||
on the `master` branch.
|
||||
|
||||
It can be built locally via
|
||||
|
||||
```bash
|
||||
pip install -e .[docs]
|
||||
cd docs
|
||||
sphinx-apidoc -o ./source ../fdroidserver -M -e
|
||||
sphinx-autogen -o generated source/*.rst
|
||||
make html
|
||||
```
|
||||
|
||||
To additionally lint the code call
|
||||
```bash
|
||||
pydocstyle fdroidserver --count
|
||||
```
|
||||
|
||||
When writing docstrings you should follow the
|
||||
[numpy style guide](https://numpydoc.readthedocs.io/en/latest/format.html).
|
||||
The tests for the whole build server setup are entirely separate
|
||||
because they require at least 200GB of disk space, and 8GB of
|
||||
RAM. These test scripts are in the root of the project, all starting
|
||||
with _jenkins-_ since they are run on https://jenkins.debian.net.
|
||||
|
||||
|
||||
## Translation
|
||||
|
@ -118,16 +90,4 @@ When writing docstrings you should follow the
|
|||
Everything can be translated. See
|
||||
[Translation and Localization](https://f-droid.org/docs/Translation_and_Localization)
|
||||
for more info.
|
||||
|
||||
<div align="center">
|
||||
|
||||
[](https://hosted.weblate.org/engage/f-droid)
|
||||
|
||||
<details>
|
||||
<summary>View translation status for all languages.</summary>
|
||||
|
||||
[](https://hosted.weblate.org/engage/f-droid/?utm_source=widget)
|
||||
|
||||
</details>
|
||||
|
||||
</div>
|
||||
[](https://hosted.weblate.org/engage/f-droid/?utm_source=widget)
|
||||
|
|
|
@ -1,74 +0,0 @@
|
|||
|
||||
FROM debian:bookworm
|
||||
|
||||
ENV LANG=C.UTF-8 \
|
||||
DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN echo Etc/UTC > /etc/timezone \
|
||||
&& echo 'Acquire::Retries "20";' \
|
||||
'APT::Get::Assume-Yes "true";' \
|
||||
'APT::Install-Recommends "0";' \
|
||||
'APT::Install-Suggests "0";' \
|
||||
'Dpkg::Use-Pty "0";' \
|
||||
'quiet "1";' \
|
||||
>> /etc/apt/apt.conf.d/99gitlab
|
||||
|
||||
# provision-apt-proxy was deliberately omitted, its not relevant in Docker
|
||||
COPY provision-android-ndk \
|
||||
provision-android-sdk \
|
||||
provision-apt-get-install \
|
||||
provision-buildserverid \
|
||||
provision-gradle \
|
||||
setup-env-vars \
|
||||
/opt/buildserver/
|
||||
|
||||
ARG GIT_REV_PARSE_HEAD=unspecified
|
||||
LABEL org.opencontainers.image.revision=$GIT_REV_PARSE_HEAD
|
||||
|
||||
# setup 'vagrant' user for compatibility
|
||||
RUN useradd --create-home -s /bin/bash vagrant && echo -n 'vagrant:vagrant' | chpasswd
|
||||
|
||||
# The provision scripts must be run in the same order as in Vagrantfile
|
||||
# - vagrant needs openssh-client iproute2 ssh sudo
|
||||
# - ansible needs python3
|
||||
#
|
||||
# Debian Docker images will soon default to HTTPS for apt sources, so force it.
|
||||
# https://github.com/debuerreotype/docker-debian-artifacts/issues/15
|
||||
#
|
||||
# Ensure fdroidserver's dependencies are marked manual before purging
|
||||
# unneeded packages, otherwise, all its dependencies get purged.
|
||||
#
|
||||
# The official Debian docker images ship without ca-certificates, so
|
||||
# TLS certificates cannot be verified until that is installed. The
|
||||
# following code temporarily turns off TLS verification, and enables
|
||||
# HTTPS, so at least unverified TLS is used for apt-get instead of
|
||||
# plain HTTP. Once ca-certificates is installed, the CA verification
|
||||
# is enabled by removing the newly created config file. This set up
|
||||
# makes the initial `apt-get update` and `apt-get install` look the
|
||||
# same as verified TLS to the network observer and hides the metadata.
|
||||
RUN printf "path-exclude=/usr/share/locale/*\npath-exclude=/usr/share/man/*\npath-exclude=/usr/share/doc/*\npath-include=/usr/share/doc/*/copyright\n" >/etc/dpkg/dpkg.cfg.d/01_nodoc \
|
||||
&& mkdir -p /usr/share/man/man1 \
|
||||
&& echo 'Acquire::https::Verify-Peer "false";' > /etc/apt/apt.conf.d/99nocacertificates \
|
||||
&& find /etc/apt/sources.list* -type f -exec sed -i s,http:,https:, {} \; \
|
||||
&& apt-get update \
|
||||
&& apt-get install ca-certificates \
|
||||
&& rm /etc/apt/apt.conf.d/99nocacertificates \
|
||||
&& apt-get upgrade \
|
||||
&& apt-get dist-upgrade \
|
||||
&& apt-get install openssh-client iproute2 python3 openssh-server sudo \
|
||||
&& bash /opt/buildserver/setup-env-vars /opt/android-sdk \
|
||||
&& . /etc/profile.d/bsenv.sh \
|
||||
&& bash /opt/buildserver/provision-apt-get-install https://deb.debian.org/debian \
|
||||
&& bash /opt/buildserver/provision-android-sdk "tools;25.2.5" \
|
||||
&& bash /opt/buildserver/provision-android-ndk /opt/android-sdk/ndk \
|
||||
&& bash /opt/buildserver/provision-gradle \
|
||||
&& bash /opt/buildserver/provision-buildserverid $GIT_REV_PARSE_HEAD \
|
||||
&& rm -rf /vagrant/cache \
|
||||
&& apt-get autoremove --purge \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Vagrant sudo setup for compatibility
|
||||
RUN echo 'vagrant ALL = NOPASSWD: ALL' > /etc/sudoers.d/vagrant \
|
||||
&& chmod 440 /etc/sudoers.d/vagrant \
|
||||
&& sed -i -e 's/Defaults.*requiretty/#&/' /etc/sudoers
|
76
buildserver/Vagrantfile
vendored
76
buildserver/Vagrantfile
vendored
|
@ -1,41 +1,24 @@
|
|||
|
||||
require 'yaml'
|
||||
require 'pathname'
|
||||
require 'fileutils'
|
||||
|
||||
configfile = {
|
||||
'boot_timeout' => 600,
|
||||
'cachedir' => File.join(ENV['HOME'], '.cache', 'fdroidserver'),
|
||||
'cpus' => 1,
|
||||
'debian_mirror' => 'https://deb.debian.org/debian/',
|
||||
'hwvirtex' => 'on',
|
||||
'memory' => 2048,
|
||||
'vm_provider' => 'virtualbox',
|
||||
}
|
||||
|
||||
srvpath = Pathname.new(File.dirname(__FILE__)).realpath
|
||||
configpath = File.join(srvpath, "/Vagrantfile.yaml")
|
||||
if File.exist? configpath
|
||||
c = YAML.load_file(configpath)
|
||||
if c and not c.empty?
|
||||
c.each do |k,v|
|
||||
configfile[k] = v
|
||||
end
|
||||
end
|
||||
else
|
||||
puts "Copying example file to #{configpath}"
|
||||
FileUtils.cp('../examples/Vagrantfile.yaml', configpath)
|
||||
end
|
||||
configfile = YAML.load_file(File.join(srvpath, "/Vagrantfile.yaml"))
|
||||
|
||||
Vagrant.configure("2") do |config|
|
||||
|
||||
if Vagrant.has_plugin?("vagrant-cachier")
|
||||
# these two caching methods conflict, so only use one at a time
|
||||
if Vagrant.has_plugin?("vagrant-cachier") and not configfile.has_key? "aptcachedir"
|
||||
config.cache.scope = :box
|
||||
config.cache.auto_detect = false
|
||||
config.cache.enable :apt
|
||||
config.cache.enable :chef
|
||||
end
|
||||
|
||||
config.vm.box = "debian/bookworm64"
|
||||
config.vm.box = configfile['basebox']
|
||||
if configfile.has_key? "basebox_version"
|
||||
config.vm.box_version = configfile['basebox_version']
|
||||
end
|
||||
|
||||
if not configfile.has_key? "vm_provider" or configfile["vm_provider"] == "virtualbox"
|
||||
# default to VirtualBox if not set
|
||||
|
@ -53,8 +36,6 @@ Vagrant.configure("2") do |config|
|
|||
libvirt.uri = "qemu:///system"
|
||||
libvirt.cpus = configfile["cpus"]
|
||||
libvirt.memory = configfile["memory"]
|
||||
# Debian Vagrant image is only 20G, so allocate more
|
||||
libvirt.machine_virtual_size = 1024
|
||||
if configfile.has_key? "libvirt_disk_bus"
|
||||
libvirt.disk_bus = configfile["libvirt_disk_bus"]
|
||||
end
|
||||
|
@ -67,8 +48,7 @@ Vagrant.configure("2") do |config|
|
|||
else
|
||||
synced_folder_type = '9p'
|
||||
end
|
||||
config.vm.synced_folder './', '/vagrant', type: synced_folder_type,
|
||||
SharedFoldersEnableSymlinksCreate: false
|
||||
config.vm.synced_folder './', '/vagrant', type: synced_folder_type
|
||||
else
|
||||
abort("No supported VM Provider found, set vm_provider in Vagrantfile.yaml!")
|
||||
end
|
||||
|
@ -80,30 +60,30 @@ Vagrant.configure("2") do |config|
|
|||
args: [configfile["aptproxy"]]
|
||||
end
|
||||
|
||||
config.vm.synced_folder configfile["cachedir"], '/vagrant/cache',
|
||||
create: true, type: synced_folder_type
|
||||
|
||||
# buildserver/ is shared to the VM's /vagrant by default so the old
|
||||
# default does not need a custom mount
|
||||
if configfile["cachedir"] != "buildserver/cache"
|
||||
config.vm.synced_folder configfile["cachedir"], '/vagrant/cache',
|
||||
create: true, type: synced_folder_type
|
||||
end
|
||||
# Make sure dir exists to mount to, since buildserver/ is
|
||||
# automatically mounted as /vagrant in the guest VM. This is more
|
||||
# necessary with 9p synced folders
|
||||
Dir.mkdir('cache') unless File.exist?('cache')
|
||||
Dir.mkdir('cache') unless File.exists?('cache')
|
||||
|
||||
# Root partition needs to be resized to the new allocated space
|
||||
config.vm.provision "shell", inline: <<-SHELL
|
||||
growpart -v -u auto /dev/vda 1
|
||||
resize2fs /dev/vda1
|
||||
SHELL
|
||||
# cache .deb packages on the host via a mount trick
|
||||
if configfile.has_key? "aptcachedir"
|
||||
config.vm.synced_folder configfile["aptcachedir"], "/var/cache/apt/archives",
|
||||
owner: 'root', group: 'root', create: true
|
||||
end
|
||||
|
||||
config.vm.provision "shell", name: "setup-env-vars", path: "setup-env-vars",
|
||||
args: ["/opt/android-sdk"]
|
||||
config.vm.provision "shell", name: "apt-get-install", path: "provision-apt-get-install",
|
||||
config.vm.provision "shell", path: "setup-env-vars",
|
||||
args: ["/home/vagrant/android-sdk"]
|
||||
config.vm.provision "shell", path: "provision-apt-get-install",
|
||||
args: [configfile['debian_mirror']]
|
||||
config.vm.provision "shell", name: "android-sdk", path: "provision-android-sdk"
|
||||
config.vm.provision "shell", name: "android-ndk", path: "provision-android-ndk",
|
||||
args: ["/opt/android-sdk/ndk"]
|
||||
config.vm.provision "shell", name: "gradle", path: "provision-gradle"
|
||||
config.vm.provision "shell", name: "disable-analytics", path: "provision-disable-analytics"
|
||||
config.vm.provision "shell", name: "buildserverid", path: "provision-buildserverid",
|
||||
args: [`git rev-parse HEAD`]
|
||||
config.vm.provision "shell", path: "provision-android-sdk"
|
||||
config.vm.provision "shell", path: "provision-android-ndk",
|
||||
args: ["/home/vagrant/android-ndk"]
|
||||
config.vm.provision "shell", path: "provision-gradle"
|
||||
|
||||
end
|
||||
|
|
|
@ -1,2 +1,19 @@
|
|||
sdk_path: /opt/android-sdk
|
||||
sdk_path: /home/vagrant/android-sdk
|
||||
ndk_paths:
|
||||
r10e: /home/vagrant/android-ndk/r10e
|
||||
r11c: /home/vagrant/android-ndk/r11c
|
||||
r12b: /home/vagrant/android-ndk/r12b
|
||||
r13b: /home/vagrant/android-ndk/r13b
|
||||
r14b: /home/vagrant/android-ndk/r14b
|
||||
r15c: /home/vagrant/android-ndk/r15c
|
||||
r16b: /home/vagrant/android-ndk/r16b
|
||||
r17c: /home/vagrant/android-ndk/r17c
|
||||
r18b: /home/vagrant/android-ndk/r18b
|
||||
r19c: /home/vagrant/android-ndk/r19c
|
||||
r20b: /home/vagrant/android-ndk/r20b
|
||||
r21d: /home/vagrant/android-ndk/r21d
|
||||
|
||||
java_paths:
|
||||
8: /usr/lib/jvm/java-8-openjdk-amd64
|
||||
|
||||
gradle_version_dir: /opt/gradle/versions
|
||||
|
|
|
@ -1,30 +1,26 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# $1 is the root dir to install the NDKs into
|
||||
# $2 and after are the NDK releases to install
|
||||
|
||||
echo $0
|
||||
set -e
|
||||
set -x
|
||||
|
||||
NDK_BASE=$1
|
||||
shift
|
||||
|
||||
test -e $NDK_BASE || mkdir -p $NDK_BASE
|
||||
cd $NDK_BASE
|
||||
|
||||
for version in $@; do
|
||||
if [ ! -e $NDK_BASE/r10e ]; then
|
||||
7zr x /vagrant/cache/android-ndk-r10e-linux-x86_64.bin > /dev/null
|
||||
mv android-ndk-r10e r10e
|
||||
fi
|
||||
|
||||
for version in r11c r12b r13b r14b r15c r16b r17c r18b r19c r20b r21d; do
|
||||
if [ ! -e ${NDK_BASE}/${version} ]; then
|
||||
unzip /vagrant/cache/android-ndk-${version}-linux*.zip > /dev/null
|
||||
mv android-ndk-${version} \
|
||||
`sed -En 's,^Pkg.Revision *= *(.+),\1,p' android-ndk-${version}/source.properties`
|
||||
unzip /vagrant/cache/android-ndk-${version}-linux-x86_64.zip > /dev/null
|
||||
mv android-ndk-${version} ${version}
|
||||
fi
|
||||
done
|
||||
|
||||
# allow gradle/etc to install missing NDK versions
|
||||
chgrp vagrant $NDK_BASE
|
||||
chmod g+w $NDK_BASE
|
||||
|
||||
# ensure all users can read and execute the NDK
|
||||
chmod -R a+rX $NDK_BASE/
|
||||
find $NDK_BASE/ -type f -executable -exec chmod a+x -- {} +
|
||||
find $NDK_BASE/ -type f -executable -print0 | xargs -0 chmod a+x
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
|
||||
echo $0
|
||||
set -e
|
||||
|
@ -9,6 +10,19 @@ if [ -z $ANDROID_HOME ]; then
|
|||
exit 1
|
||||
fi
|
||||
|
||||
# TODO remove the rm, this should work with an existing ANDROID_HOME
|
||||
if [ ! -x $ANDROID_HOME/tools/android ]; then
|
||||
rm -rf $ANDROID_HOME
|
||||
mkdir ${ANDROID_HOME}
|
||||
mkdir ${ANDROID_HOME}/temp
|
||||
mkdir ${ANDROID_HOME}/platforms
|
||||
mkdir ${ANDROID_HOME}/build-tools
|
||||
cd $ANDROID_HOME
|
||||
|
||||
tools=`ls -1 /vagrant/cache/tools_*.zip | sort -n | tail -1`
|
||||
unzip -qq $tools
|
||||
fi
|
||||
|
||||
# disable the repositories of proprietary stuff
|
||||
disabled="
|
||||
@version@=1
|
||||
|
@ -26,96 +40,59 @@ for line in $disabled; do
|
|||
echo $line >> ${HOME}/.android/sites-settings.cfg
|
||||
done
|
||||
|
||||
# Include old makebuildserver cache that is a Vagrant synced_folder
|
||||
# for sdkmanager to use.
|
||||
cachedir=$HOME/.cache/sdkmanager
|
||||
mkdir -p $cachedir
|
||||
pushd $cachedir
|
||||
for f in /vagrant/cache/*.zip; do
|
||||
test -e $f && ln -s $f
|
||||
|
||||
cd /vagrant/cache
|
||||
|
||||
# make links for `android update sdk` to use and delete
|
||||
blacklist="build-tools_r17-linux.zip
|
||||
build-tools_r18.0.1-linux.zip
|
||||
build-tools_r18.1-linux.zip
|
||||
build-tools_r18.1.1-linux.zip
|
||||
build-tools_r19-linux.zip
|
||||
build-tools_r19.0.1-linux.zip
|
||||
build-tools_r19.0.2-linux.zip
|
||||
build-tools_r19.0.3-linux.zip
|
||||
build-tools_r21-linux.zip
|
||||
build-tools_r21.0.1-linux.zip
|
||||
build-tools_r21.0.2-linux.zip
|
||||
build-tools_r21.1-linux.zip
|
||||
build-tools_r21.1.1-linux.zip
|
||||
build-tools_r22-linux.zip
|
||||
build-tools_r23-linux.zip
|
||||
android-1.5_r04-linux.zip
|
||||
android-1.6_r03-linux.zip
|
||||
android-2.0_r01-linux.zip
|
||||
android-2.0.1_r01-linux.zip"
|
||||
latestm2=`ls -1 android_m2repository*.zip | sort -n | tail -1`
|
||||
for f in $latestm2 android-[0-9]*.zip platform-[0-9]*.zip build-tools_r*-linux.zip; do
|
||||
rm -f ${ANDROID_HOME}/temp/$f
|
||||
if [[ $blacklist != *$f* ]]; then
|
||||
ln -s /vagrant/cache/$f ${ANDROID_HOME}/temp/
|
||||
fi
|
||||
done
|
||||
popd
|
||||
|
||||
# TODO do not preinstall 'tools' or 'platform-tools' at all, app builds don't need them
|
||||
packages="
|
||||
tools;25.2.5
|
||||
platform-tools
|
||||
build-tools;19.1.0
|
||||
build-tools;20.0.0
|
||||
build-tools;21.1.2
|
||||
build-tools;22.0.1
|
||||
build-tools;23.0.1
|
||||
build-tools;23.0.2
|
||||
build-tools;23.0.3
|
||||
build-tools;24.0.0
|
||||
build-tools;24.0.1
|
||||
build-tools;24.0.2
|
||||
build-tools;24.0.3
|
||||
build-tools;25.0.0
|
||||
build-tools;25.0.1
|
||||
build-tools;25.0.2
|
||||
build-tools;25.0.3
|
||||
build-tools;26.0.0
|
||||
build-tools;26.0.1
|
||||
build-tools;26.0.2
|
||||
build-tools;26.0.3
|
||||
build-tools;27.0.0
|
||||
build-tools;27.0.1
|
||||
build-tools;27.0.2
|
||||
build-tools;27.0.3
|
||||
build-tools;28.0.0
|
||||
build-tools;28.0.1
|
||||
build-tools;28.0.2
|
||||
build-tools;28.0.3
|
||||
build-tools;29.0.2
|
||||
build-tools;29.0.3
|
||||
build-tools;30.0.0
|
||||
build-tools;30.0.1
|
||||
build-tools;30.0.2
|
||||
build-tools;30.0.3
|
||||
build-tools;31.0.0
|
||||
build-tools;32.0.0
|
||||
build-tools;33.0.0
|
||||
platforms;android-10
|
||||
platforms;android-11
|
||||
platforms;android-12
|
||||
platforms;android-13
|
||||
platforms;android-14
|
||||
platforms;android-15
|
||||
platforms;android-16
|
||||
platforms;android-17
|
||||
platforms;android-18
|
||||
platforms;android-19
|
||||
platforms;android-20
|
||||
platforms;android-21
|
||||
platforms;android-22
|
||||
platforms;android-23
|
||||
platforms;android-24
|
||||
platforms;android-25
|
||||
platforms;android-26
|
||||
platforms;android-27
|
||||
platforms;android-28
|
||||
platforms;android-29
|
||||
platforms;android-30
|
||||
platforms;android-31
|
||||
platforms;android-32
|
||||
platforms;android-33
|
||||
"
|
||||
# install all cached platforms
|
||||
cached=""
|
||||
for f in `ls -1 android-[0-9]*.zip platform-[0-9]*.zip`; do
|
||||
sdk=`unzip -c $f "*/build.prop" | sed -n 's,^ro.build.version.sdk=,,p'`
|
||||
cached=,android-${sdk}${cached}
|
||||
done
|
||||
|
||||
if [ $# -gt 0 ]; then
|
||||
echo found args
|
||||
packages=$@
|
||||
fi
|
||||
# install all cached build-tools
|
||||
for f in `ls -1 build-tools*.zip`; do
|
||||
ver=`unzip -c $f "*/source.properties" | sed -n 's,^Pkg.Revision=,,p'`
|
||||
if [[ $ver == 24.0.0 ]] && [[ $f =~ .*r24\.0\.1.* ]]; then
|
||||
# 24.0.1 has the wrong revision in the zip
|
||||
ver=24.0.1
|
||||
fi
|
||||
cached=,build-tools-${ver}${cached}
|
||||
done
|
||||
|
||||
# temporary test of whether this script ran. It will change once
|
||||
# 'tools' is no longer installed by default.
|
||||
if [ ! -x $ANDROID_HOME/tools/bin/sdkmanager ]; then
|
||||
mkdir -p ${ANDROID_HOME}/
|
||||
sdkmanager $packages
|
||||
fi
|
||||
${ANDROID_HOME}/tools/android update sdk --no-ui --all \
|
||||
--filter platform-tools,extra-android-m2repository${cached} <<EOH
|
||||
y
|
||||
|
||||
# this hacked cache should not end up in the Vagrant box or Docker image
|
||||
rm -rf $cachedir
|
||||
EOH
|
||||
|
||||
mkdir -p $ANDROID_HOME/licenses/
|
||||
|
||||
|
@ -140,19 +117,17 @@ cat <<EOF > $ANDROID_HOME/licenses/android-sdk-preview-license-old
|
|||
84831b9409646a918e30573bab4c9c91346d8abd
|
||||
EOF
|
||||
|
||||
cat <<EOF > $ANDROID_HOME/licenses/intel-android-extra-license
|
||||
echo y | $ANDROID_HOME/tools/bin/sdkmanager "extras;m2repository;com;android;support;constraint;constraint-layout;1.0.1"
|
||||
echo y | $ANDROID_HOME/tools/bin/sdkmanager "extras;m2repository;com;android;support;constraint;constraint-layout-solver;1.0.1"
|
||||
echo y | $ANDROID_HOME/tools/bin/sdkmanager "extras;m2repository;com;android;support;constraint;constraint-layout;1.0.2"
|
||||
echo y | $ANDROID_HOME/tools/bin/sdkmanager "extras;m2repository;com;android;support;constraint;constraint-layout-solver;1.0.2"
|
||||
|
||||
d975f751698a77b662f1254ddbeed3901e976f5a
|
||||
EOF
|
||||
|
||||
chmod a+X $(dirname $ANDROID_HOME/)
|
||||
chmod -R a+rX $ANDROID_HOME/
|
||||
chgrp vagrant $ANDROID_HOME
|
||||
chmod g+w $ANDROID_HOME
|
||||
find $ANDROID_HOME/ -type f -executable -print0 | xargs -0 chmod a+x
|
||||
|
||||
# allow gradle to install newer build-tools and platforms
|
||||
mkdir -p $ANDROID_HOME/{build-tools,platforms}
|
||||
chgrp vagrant $ANDROID_HOME/{build-tools,platforms}
|
||||
chmod g+w $ANDROID_HOME/{build-tools,platforms}
|
||||
|
||||
|
@ -160,8 +135,3 @@ chmod g+w $ANDROID_HOME/{build-tools,platforms}
|
|||
test -d $ANDROID_HOME/extras/m2repository || mkdir -p $ANDROID_HOME/extras/m2repository
|
||||
find $ANDROID_HOME/extras/m2repository -type d | xargs chgrp vagrant
|
||||
find $ANDROID_HOME/extras/m2repository -type d | xargs chmod g+w
|
||||
|
||||
# allow gradle/sdkmanager to install extras;android;m2repository
|
||||
test -d $ANDROID_HOME/extras/android || mkdir -p $ANDROID_HOME/extras/android
|
||||
find $ANDROID_HOME/extras/android -type d | xargs chgrp vagrant
|
||||
find $ANDROID_HOME/extras/android -type d | xargs chmod g+w
|
||||
|
|
|
@ -10,7 +10,7 @@ export DEBIAN_FRONTEND=noninteractive
|
|||
printf 'APT::Install-Recommends "0";\nAPT::Install-Suggests "0";\n' \
|
||||
> /etc/apt/apt.conf.d/99no-install-recommends
|
||||
|
||||
printf 'Acquire::Retries "20";\n' \
|
||||
printf 'APT::Acquire::Retries "20";\n' \
|
||||
> /etc/apt/apt.conf.d/99acquire-retries
|
||||
|
||||
cat <<EOF > /etc/apt/apt.conf.d/99no-auto-updates
|
||||
|
@ -27,113 +27,115 @@ Dpkg::Use-Pty "0";
|
|||
quiet "1";
|
||||
EOF
|
||||
|
||||
cat <<EOF > /etc/apt/apt.conf.d/99confdef
|
||||
Dpkg::Options { "--force-confdef"; };
|
||||
EOF
|
||||
|
||||
echo "man-db man-db/auto-update boolean false" | debconf-set-selections
|
||||
|
||||
if echo $debian_mirror | grep '^https' 2>&1 > /dev/null; then
|
||||
apt-get update || apt-get update
|
||||
apt-get install ca-certificates
|
||||
apt-get install apt-transport-https ca-certificates
|
||||
fi
|
||||
|
||||
cat << EOF > /etc/apt/sources.list
|
||||
deb ${debian_mirror} bookworm main
|
||||
deb https://security.debian.org/debian-security bookworm-security main
|
||||
deb ${debian_mirror} bookworm-updates main
|
||||
deb ${debian_mirror} stretch main
|
||||
deb http://security.debian.org/debian-security stretch/updates main
|
||||
deb ${debian_mirror} stretch-updates main
|
||||
EOF
|
||||
echo "deb ${debian_mirror} bookworm-backports main" > /etc/apt/sources.list.d/backports.list
|
||||
echo "deb ${debian_mirror} stretch-backports main" > /etc/apt/sources.list.d/stretch-backports.list
|
||||
echo "deb ${debian_mirror} stretch-backports-sloppy main" > /etc/apt/sources.list.d/stretch-backports-sloppy.list
|
||||
echo "deb ${debian_mirror} testing main" > /etc/apt/sources.list.d/testing.list
|
||||
printf "Package: *\nPin: release o=Debian,a=testing\nPin-Priority: -300\n" > /etc/apt/preferences.d/debian-testing
|
||||
|
||||
dpkg --add-architecture i386
|
||||
|
||||
apt-get update || apt-get update
|
||||
|
||||
# purge things that might come from the base box, but we don't want
|
||||
# https://salsa.debian.org/cloud-team/debian-vagrant-images/-/tree/master/config_space/package_config
|
||||
# cat config_space/package_config/* | sort -u | grep -v '[A-Z#]'
|
||||
|
||||
purge="
|
||||
apt-listchanges
|
||||
apt-utils
|
||||
bash-completion
|
||||
bind9-*
|
||||
bsdextrautils
|
||||
bzip2
|
||||
chrony
|
||||
cloud-utils
|
||||
cron
|
||||
cron-daemon-common
|
||||
dbus
|
||||
debconf-i18n
|
||||
debian-faq
|
||||
dmidecode
|
||||
doc-debian
|
||||
fdisk
|
||||
file
|
||||
groff-base
|
||||
inetutils-telnet
|
||||
krb5-locales
|
||||
less
|
||||
locales
|
||||
logrotate
|
||||
lsof
|
||||
manpages
|
||||
nano
|
||||
ncurses-term
|
||||
netcat-traditional
|
||||
pciutils
|
||||
reportbug
|
||||
rsyslog
|
||||
tasksel
|
||||
traceroute
|
||||
unattended-upgrades
|
||||
usrmerge
|
||||
vim-*
|
||||
wamerican
|
||||
wget
|
||||
whiptail
|
||||
xz-utils
|
||||
"
|
||||
# clean up files packages to be purged, then purge the packages
|
||||
rm -rf /var/run/dbus /var/log/unattended-upgrades
|
||||
apt-get purge $purge
|
||||
|
||||
apt-get upgrade --download-only
|
||||
apt-get upgrade
|
||||
|
||||
# again after upgrade in case of keyring changes
|
||||
apt-get update || apt-get update
|
||||
|
||||
packages="
|
||||
androguard/bookworm-backports
|
||||
apksigner
|
||||
default-jdk-headless
|
||||
default-jre-headless
|
||||
androguard/stretch-backports
|
||||
ant
|
||||
asn1c
|
||||
ant-contrib
|
||||
autoconf
|
||||
autoconf2.13
|
||||
automake
|
||||
automake1.11
|
||||
autopoint
|
||||
bison
|
||||
bzr
|
||||
ca-certificates-java
|
||||
cmake
|
||||
curl
|
||||
dexdump
|
||||
fdroidserver
|
||||
disorderfs
|
||||
expect
|
||||
faketime
|
||||
flex
|
||||
gettext
|
||||
gettext-base
|
||||
git-core
|
||||
git-svn
|
||||
gnupg
|
||||
gperf
|
||||
gpg/stretch-backports-sloppy
|
||||
gpgconf/stretch-backports-sloppy
|
||||
libassuan0/stretch-backports
|
||||
libgpg-error0/stretch-backports
|
||||
javacc
|
||||
libarchive-zip-perl
|
||||
libexpat1-dev
|
||||
libgcc1:i386
|
||||
libglib2.0-dev
|
||||
liblzma-dev
|
||||
libncurses5:i386
|
||||
librsvg2-bin
|
||||
libsaxonb-java
|
||||
libssl-dev
|
||||
libstdc++6:i386
|
||||
libtool
|
||||
libtool-bin
|
||||
make
|
||||
maven
|
||||
mercurial
|
||||
patch
|
||||
python3-magic
|
||||
python3-packaging
|
||||
nasm
|
||||
openjdk-8-jre-headless
|
||||
openjdk-8-jdk-headless
|
||||
optipng
|
||||
p7zip
|
||||
pkg-config
|
||||
python-gnupg
|
||||
python-lxml
|
||||
python-magic
|
||||
python-pip
|
||||
python-setuptools
|
||||
python3-asn1crypto/stretch-backports
|
||||
python3-defusedxml
|
||||
python3-git
|
||||
python3-gitdb
|
||||
python3-gnupg
|
||||
python3-pip
|
||||
python3-pyasn1
|
||||
python3-pyasn1-modules
|
||||
python3-requests
|
||||
python3-setuptools
|
||||
python3-smmap
|
||||
python3-yaml
|
||||
python3-ruamel.yaml
|
||||
python3-pil
|
||||
python3-paramiko
|
||||
quilt
|
||||
rsync
|
||||
sdkmanager/bookworm-backports
|
||||
scons
|
||||
sqlite3
|
||||
subversion
|
||||
sudo
|
||||
swig
|
||||
unzip
|
||||
xsltproc
|
||||
yasm
|
||||
zip
|
||||
zlib1g:i386
|
||||
"
|
||||
|
||||
apt-get install $packages --download-only
|
||||
apt-get install $packages
|
||||
|
||||
# fdroidserver comes from git, it was installed just for dependencies
|
||||
apt-mark manual `apt-cache depends fdroidserver | sed -nE 's,^[| ]*Depends: ([a-z0-9 -]+),\1,p'`
|
||||
apt-get purge fdroidserver
|
||||
|
||||
# clean up things that will become outdated anyway
|
||||
apt-get autoremove --purge
|
||||
apt-get clean
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
highestjava=`update-java-alternatives --list | sort -n | tail -1 | cut -d ' ' -f 1`
|
||||
update-java-alternatives --set $highestjava
|
||||
|
||||
# configure headless openjdk to work without gtk accessability dependencies
|
||||
sed -i -e 's@\(assistive_technologies=org.GNOME.Accessibility.AtkWrapper\)@#\1@' /etc/java-8-openjdk/accessibility.properties
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
#!/bin/bash -e
|
||||
|
||||
test -n "$1"
|
||||
|
||||
echo "Writing buildserver ID ...ID is $1"
|
||||
set -x
|
||||
echo "$1" > /home/vagrant/buildserverid
|
||||
# sync data before we halt() the machine, we had an empty buildserverid otherwise
|
||||
sync
|
|
@ -1,15 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
# Flutter
|
||||
# https://github.com/flutter/flutter/issues/73657
|
||||
flutter_conf=/home/vagrant/.flutter
|
||||
cat <<EOF > $flutter_conf
|
||||
{
|
||||
"enabled": false
|
||||
}
|
||||
EOF
|
||||
chown -R vagrant:vagrant $flutter_conf
|
||||
chmod -R 0644 $flutter_conf
|
||||
|
|
@ -10,29 +10,21 @@ vergte() {
|
|||
|
||||
test -e /opt/gradle/versions || mkdir -p /opt/gradle/versions
|
||||
cd /opt/gradle/versions
|
||||
|
||||
glob="/vagrant/cache/gradle-*.zip"
|
||||
if compgen -G $glob; then # test if glob matches anything
|
||||
f=$(ls -1 --sort=version --group-directories-first $glob | tail -1)
|
||||
for f in /vagrant/cache/gradle-*.zip; do
|
||||
ver=`echo $f | sed 's,.*gradle-\([0-9][0-9.]*\).*\.zip,\1,'`
|
||||
# only use versions greater or equal 2.2.1
|
||||
if vergte $ver 2.2.1 && [ ! -d /opt/gradle/versions/${ver} ]; then
|
||||
unzip -qq $f
|
||||
mv gradle-${ver} /opt/gradle/versions/${ver}
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
chmod -R a+rX /opt/gradle
|
||||
|
||||
test -e /opt/gradle/bin || mkdir -p /opt/gradle/bin
|
||||
git clone --depth 1 https://gitlab.com/fdroid/gradlew-fdroid.git /home/vagrant/gradlew-fdroid/
|
||||
chmod 0755 /home/vagrant/gradlew-fdroid/gradlew-fdroid
|
||||
chmod -R u+rwX,a+rX,go-w /home/vagrant/gradlew-fdroid/
|
||||
ln -fs /home/vagrant/gradlew-fdroid/gradlew-fdroid /opt/gradle/bin/gradle
|
||||
ln -fs /home/vagrant/gradlew-fdroid/gradlew-fdroid /usr/local/bin/
|
||||
|
||||
chown -h vagrant:vagrant /opt/gradle/bin/gradle
|
||||
chown vagrant:vagrant /opt/gradle/versions
|
||||
ln -fs /home/vagrant/fdroidserver/gradlew-fdroid /opt/gradle/bin/gradle
|
||||
chown -h vagrant.vagrant /opt/gradle/bin/gradle
|
||||
chown vagrant.vagrant /opt/gradle/versions
|
||||
chmod 0755 /opt/gradle/versions
|
||||
|
||||
GRADLE_HOME=/home/vagrant/.gradle
|
||||
|
@ -49,5 +41,5 @@ systemProp.org.gradle.internal.http.connectionTimeout=600000
|
|||
systemProp.org.gradle.internal.http.socketTimeout=600000
|
||||
EOF
|
||||
|
||||
chown -R vagrant:vagrant $GRADLE_HOME/
|
||||
chown -R vagrant.vagrant $GRADLE_HOME/
|
||||
chmod -R a+rX $GRADLE_HOME/
|
||||
|
|
|
@ -12,16 +12,9 @@ echo "# generated on "`date` > $bsenv
|
|||
echo export ANDROID_HOME=$1 >> $bsenv
|
||||
echo 'export PATH=$PATH:${ANDROID_HOME}/tools:${ANDROID_HOME}/platform-tools:/opt/gradle/bin' >> $bsenv
|
||||
echo "export DEBIAN_FRONTEND=noninteractive" >> $bsenv
|
||||
echo 'export home_vagrant=/home/vagrant' >> $bsenv
|
||||
echo 'export fdroidserver=$home_vagrant/fdroidserver' >> $bsenv
|
||||
echo 'export LC_ALL=C.UTF-8' >> $bsenv
|
||||
|
||||
chmod 0644 $bsenv
|
||||
|
||||
# make sure that SSH never hangs at a password or key prompt
|
||||
mkdir -p /etc/ssh/ssh_config.d/
|
||||
cat << EOF >> /etc/ssh/ssh_config.d/fdroid
|
||||
Host *
|
||||
StrictHostKeyChecking yes
|
||||
BatchMode yes
|
||||
EOF
|
||||
printf ' StrictHostKeyChecking yes' >> /etc/ssh/ssh_config
|
||||
printf ' BatchMode yes' >> /etc/ssh/config
|
||||
|
|
|
@ -83,7 +83,7 @@ __complete_options() {
|
|||
__complete_build() {
|
||||
opts="-v -q -l -s -t -f -a"
|
||||
|
||||
lopts="--verbose --quiet --latest --stop --test --server --skip-scan --scan-binary --no-tarball --force --all --no-refresh"
|
||||
lopts="--verbose --quiet --latest --stop --test --server --reset-server --skip-scan --scan-binary --no-tarball --force --all --no-refresh"
|
||||
case "${prev}" in
|
||||
:)
|
||||
__vercode
|
||||
|
@ -109,8 +109,8 @@ __complete_gpgsign() {
|
|||
}
|
||||
|
||||
__complete_install() {
|
||||
opts="-v -q -a -p -n -y"
|
||||
lopts="--verbose --quiet --all --color --no-color --privacy-mode --no-privacy-mode --no --yes"
|
||||
opts="-v -q"
|
||||
lopts="--verbose --quiet --all"
|
||||
case "${cur}" in
|
||||
-*)
|
||||
__complete_options
|
||||
|
@ -155,7 +155,7 @@ __complete_publish() {
|
|||
|
||||
__complete_checkupdates() {
|
||||
opts="-v -q"
|
||||
lopts="--verbose --quiet --auto --autoonly --commit --allow-dirty"
|
||||
lopts="--verbose --quiet --auto --autoonly --commit --gplay --allow-dirty"
|
||||
case "${cur}" in
|
||||
-*)
|
||||
__complete_options
|
||||
|
@ -251,7 +251,7 @@ __complete_btlog() {
|
|||
|
||||
__complete_mirror() {
|
||||
opts="-v"
|
||||
lopts="--all --archive --build-logs --color --no-color --pgp-signatures --src-tarballs --output-dir"
|
||||
lopts="--all --archive --build-logs --pgp-signatures --src-tarballs --output-dir"
|
||||
__complete_options
|
||||
}
|
||||
|
||||
|
@ -261,6 +261,12 @@ __complete_nightly() {
|
|||
__complete_options
|
||||
}
|
||||
|
||||
__complete_stats() {
|
||||
opts="-v -q -d"
|
||||
lopts="--verbose --quiet --download"
|
||||
__complete_options
|
||||
}
|
||||
|
||||
__complete_deploy() {
|
||||
opts="-i -v -q"
|
||||
lopts="--identity-file --local-copy-dir --sync-from-local-copy-dir
|
||||
|
@ -270,14 +276,12 @@ __complete_deploy() {
|
|||
|
||||
__complete_signatures() {
|
||||
opts="-v -q"
|
||||
lopts="--verbose --color --no-color --no-check-https"
|
||||
lopts="--verbose --no-check-https"
|
||||
case "${cur}" in
|
||||
-*)
|
||||
__complete_options
|
||||
return 0;;
|
||||
esac
|
||||
_filedir 'apk'
|
||||
return 0
|
||||
}
|
||||
|
||||
__complete_signindex() {
|
||||
|
@ -289,7 +293,7 @@ __complete_signindex() {
|
|||
__complete_init() {
|
||||
opts="-v -q -d"
|
||||
lopts="--verbose --quiet --distinguished-name --keystore
|
||||
--repo-keyalias --android-home --no-prompt --color --no-color"
|
||||
--repo-keyalias --android-home --no-prompt"
|
||||
__complete_options
|
||||
}
|
||||
|
||||
|
@ -298,6 +302,7 @@ btlog \
|
|||
build \
|
||||
checkupdates \
|
||||
deploy \
|
||||
dscanner \
|
||||
gpgsign \
|
||||
import \
|
||||
init \
|
||||
|
@ -311,6 +316,7 @@ rewritemeta \
|
|||
scanner \
|
||||
signatures \
|
||||
signindex \
|
||||
stats \
|
||||
update \
|
||||
verify \
|
||||
"
|
||||
|
|
|
@ -1,20 +0,0 @@
|
|||
# Minimal makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line, and also
|
||||
# from the environment for the first two.
|
||||
SPHINXOPTS ?=
|
||||
SPHINXBUILD ?= sphinx-build
|
||||
SOURCEDIR = source
|
||||
BUILDDIR = build
|
||||
|
||||
# Put it first so that "make" without argument is like "make help".
|
||||
help:
|
||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
|
||||
.PHONY: help Makefile
|
||||
|
||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||
%: Makefile
|
||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
|
@ -1,35 +0,0 @@
|
|||
@ECHO OFF
|
||||
|
||||
pushd %~dp0
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set SOURCEDIR=source
|
||||
set BUILDDIR=build
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
%SPHINXBUILD% >NUL 2>NUL
|
||||
if errorlevel 9009 (
|
||||
echo.
|
||||
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.http://sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
goto end
|
||||
|
||||
:help
|
||||
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
|
||||
:end
|
||||
popd
|
|
@ -1,78 +0,0 @@
|
|||
# Configuration file for the Sphinx documentation builder.
|
||||
#
|
||||
# This file only contains a selection of the most common options. For a full
|
||||
# list see the documentation:
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html
|
||||
|
||||
# -- Path setup --------------------------------------------------------------
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.abspath('../../fdroidserver'))
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
|
||||
project = 'fdroidserver'
|
||||
copyright = '2021, The F-Droid Project'
|
||||
author = 'The F-Droid Project'
|
||||
|
||||
|
||||
# -- General configuration ---------------------------------------------------
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'numpydoc',
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.autosummary',
|
||||
"sphinx.ext.intersphinx",
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
# This pattern also affects html_static_path and html_extra_path.
|
||||
exclude_patterns = []
|
||||
|
||||
|
||||
# -- Options for HTML output -------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
#
|
||||
html_theme = "pydata_sphinx_theme"
|
||||
|
||||
html_theme_options = {
|
||||
"gitlab_url": "https://gitlab.com/fdroid/fdroidserver",
|
||||
"show_prev_next": False,
|
||||
"navbar_end": ["search-field.html", "navbar-icon-links.html"],
|
||||
}
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
html_sidebars = {
|
||||
"**": [],
|
||||
}
|
||||
|
||||
#html_sidebars = {
|
||||
# '**': ['globaltoc.html', 'sourcelink.html', 'searchbox.html'],
|
||||
# 'using/windows': ['windowssidebar.html', 'searchbox.html'],
|
||||
#}
|
||||
|
||||
html_split_index = True
|
||||
#numpydoc_validation_checks = {"all"}
|
||||
|
||||
intersphinx_mapping = {
|
||||
"python": ("https://docs.python.org/3/", None),
|
||||
}
|
|
@ -1,20 +0,0 @@
|
|||
.. fdroidserver documentation master file, created by
|
||||
sphinx-quickstart on Mon May 3 10:06:52 2021.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to fdroidserver's documentation!
|
||||
========================================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Contents:
|
||||
|
||||
These pages contain the autogenerated module docu based on the current `sources <https://gitlab.com/fdroid/fdroidserver/-/tree/master/fdroidserver>`_.
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
|
||||
* Under :ref:`modindex` the different fdroidserver modules are listed.
|
||||
* In :ref:`genindex` you'll find all methods sorted alphabetically.
|
|
@ -1,54 +0,0 @@
|
|||
---
|
||||
|
||||
# You may want to alter these before running ./makebuildserver
|
||||
|
||||
# In the process of setting up the build server, many gigs of files
|
||||
# are downloaded (Android SDK components, gradle, etc). These are
|
||||
# cached so that they are not redownloaded each time. By default,
|
||||
# these are stored in ~/.cache/fdroidserver
|
||||
#
|
||||
# cachedir: buildserver/cache
|
||||
|
||||
# To specify which Debian mirror the build server VM should use, by
|
||||
# default it uses http.debian.net, which auto-detects which is the
|
||||
# best mirror to use.
|
||||
#
|
||||
# debian_mirror: https://debian.osuosl.org/debian/
|
||||
|
||||
# The amount of RAM the build server will have (default: 2048)
|
||||
# memory: 3584
|
||||
|
||||
# The number of CPUs the build server will have
|
||||
# cpus: 1
|
||||
|
||||
# Debian package proxy server - if you have one
|
||||
# aptproxy: http://192.168.0.19:8000
|
||||
|
||||
# If this is running on an older machine or on a virtualized system,
|
||||
# it can run a lot slower. If the provisioning fails with a warning
|
||||
# about the timeout, extend the timeout here. (default: 600 seconds)
|
||||
#
|
||||
# boot_timeout: 1200
|
||||
|
||||
# By default, this whole process uses VirtualBox as the provider, but
|
||||
# QEMU+KVM is also supported via the libvirt plugin to vagrant. If
|
||||
# this is run within a KVM guest, then libvirt's QEMU+KVM will be used
|
||||
# automatically. It can also be manually enabled by uncommenting
|
||||
# below:
|
||||
#
|
||||
# vm_provider: libvirt
|
||||
|
||||
# By default libvirt uses 'virtio' for both network and disk drivers.
|
||||
# Some systems (eg. nesting VMware ESXi) do not support virtio. As a
|
||||
# workaround for such rare cases, this setting allows to configure
|
||||
# KVM/libvirt to emulate hardware rather than using virtio.
|
||||
#
|
||||
# libvirt_disk_bus: sata
|
||||
# libvirt_nic_model_type: rtl8139
|
||||
|
||||
# Sometimes, it is not possible to use the 9p synced folder type with
|
||||
# libvirt, like if running a KVM buildserver instance inside of a
|
||||
# VMware ESXi guest. In that case, using NFS or another method is
|
||||
# required.
|
||||
#
|
||||
# synced_folder_type: nfs
|
|
@ -5,20 +5,23 @@
|
|||
# Custom path to the Android SDK, defaults to $ANDROID_HOME
|
||||
# sdk_path: $ANDROID_HOME
|
||||
|
||||
# Paths to installed versions of the Android NDK. This will be
|
||||
# automatically filled out from well known sources like
|
||||
# $ANDROID_HOME/ndk-bundle and $ANDROID_HOME/ndk/*. If a required
|
||||
# version is missing in the buildserver VM, it will be automatically
|
||||
# downloaded and installed into the standard $ANDROID_HOME/ndk/
|
||||
# directory. Manually setting it here will override the auto-detected
|
||||
# values. The keys can either be the "release" (e.g. r21e) or the
|
||||
# "revision" (e.g. 21.4.7075529).
|
||||
#
|
||||
# Custom paths to various versions of the Android NDK, defaults to 'r12b' set
|
||||
# to $ANDROID_NDK. Most users will have the latest at $ANDROID_NDK, which is
|
||||
# used by default. If a version is missing or assigned to None, it is assumed
|
||||
# not installed.
|
||||
# ndk_paths:
|
||||
# r10e: $ANDROID_HOME/android-ndk-r10e
|
||||
# r17: ""
|
||||
# 21.4.7075529: ~/Android/Ndk
|
||||
# r22b: null
|
||||
# r10e: None
|
||||
# r11c: None
|
||||
# r12b: $ANDROID_NDK
|
||||
# r13b: None
|
||||
# r14b: None
|
||||
# r15c: None
|
||||
# r16b: None
|
||||
# r17c: None
|
||||
# r18b: None
|
||||
# r19c: None
|
||||
# r20b: None
|
||||
# r21d: None
|
||||
|
||||
# Directory to store downloaded tools in (i.e. gradle versions)
|
||||
# By default, these are stored in ~/.cache/fdroidserver
|
||||
|
@ -48,46 +51,25 @@
|
|||
# The same policy is applied to the archive repo, if there is one.
|
||||
# repo_maxage: 0
|
||||
|
||||
# Canonical URL of the repositoy, needs to end in /repo. Is is used to identity
|
||||
# the repo in the client, as well.
|
||||
# repo_url: https://MyFirstFDroidRepo.org/fdroid/repo
|
||||
#
|
||||
# Base URL for per-package pages on the website of this repo,
|
||||
# i.e. https://f-droid.org/packages/<appid>/ This should be accessible
|
||||
# with a browser. Setting it to null or not setting this disables the
|
||||
# feature.
|
||||
# repo_web_base_url: https://MyFirstFDroidRepo.org/packages/
|
||||
#
|
||||
# repo_name: My First F-Droid Repo Demo
|
||||
# repo_description: >-
|
||||
# This is a repository of apps to be used with F-Droid. Applications
|
||||
# in this repository are either official binaries built by the
|
||||
# original application developers, or are binaries built from source
|
||||
# by the admin of f-droid.org using the tools on
|
||||
# https://gitlab.com/fdroid.
|
||||
repo_url: https://MyFirstFDroidRepo.org/fdroid/repo
|
||||
repo_name: My First F-Droid Repo Demo
|
||||
repo_icon: fdroid-icon.png
|
||||
repo_description: >-
|
||||
This is a repository of apps to be used with F-Droid. Applications in this
|
||||
repository are either official binaries built by the original application
|
||||
developers, or are binaries built from source by the admin of f-droid.org
|
||||
using the tools on https://gitlab.com/u/fdroid.
|
||||
|
||||
# As above, but for the archive repo.
|
||||
#
|
||||
# archive_url: https://f-droid.org/archive
|
||||
# archive_web_base_url:
|
||||
# archive_name: My First F-Droid Archive Demo
|
||||
# archive_description: >-
|
||||
# The repository of older versions of packages from the main demo repository.
|
||||
|
||||
# archive_older sets the number of versions kept in the main repo, with all
|
||||
# older ones going to the archive. Set it to 0, and there will be no archive
|
||||
# repository, and no need to define the other archive_ values.
|
||||
#
|
||||
# archive_older: 3
|
||||
|
||||
# The repo's icon defaults to a file called 'icon.png' in the 'icons'
|
||||
# folder for each section, e.g. repo/icons/icon.png and
|
||||
# archive/icons/icon.png. To use a different filename for the icons,
|
||||
# set the filename here. You must still copy it into place in
|
||||
# repo/icons/ and/or archive/icons/.
|
||||
#
|
||||
# repo_icon: myicon.png
|
||||
# archive_icon: myicon.png
|
||||
archive_older: 3
|
||||
archive_url: https://f-droid.org/archive
|
||||
archive_name: My First F-Droid Archive Demo
|
||||
archive_icon: fdroid-icon.png
|
||||
archive_description: >-
|
||||
The repository of older versions of packages from the main demo repository.
|
||||
|
||||
# This allows a specific kind of insecure APK to be included in the
|
||||
# 'repo' section. Since April 2017, APK signatures that use MD5 are
|
||||
|
@ -117,7 +99,7 @@
|
|||
# Optionally, override home directory for gpg
|
||||
# gpghome: /home/fdroid/somewhere/else/.gnupg
|
||||
|
||||
# The ID of a GPG key for making detached signatures for APKs. Optional.
|
||||
# The ID of a GPG key for making detached signatures for apks. Optional.
|
||||
# gpgkey: 1DBA2E89
|
||||
|
||||
# The key (from the keystore defined below) to be used for signing the
|
||||
|
@ -186,12 +168,6 @@
|
|||
# serverwebroot:
|
||||
# - foo.com:/usr/share/nginx/www/fdroid
|
||||
# - bar.info:/var/www/fdroid
|
||||
#
|
||||
# There is a special mode to only deploy the index file:
|
||||
#
|
||||
# serverwebroot:
|
||||
# - url: 'me@b.az:/srv/fdroid'
|
||||
# index_only: true
|
||||
|
||||
|
||||
# When running fdroid processes on a remote server, it is possible to
|
||||
|
@ -206,49 +182,14 @@
|
|||
# deploy_process_logs: true
|
||||
|
||||
# The full URL to a git remote repository. You can include
|
||||
# multiple servers to mirror to by adding strings to a YAML list or map.
|
||||
# multiple servers to mirror to by wrapping the whole thing in {} or [], and
|
||||
# including the servergitmirrors strings in a comma-separated list.
|
||||
# Servers listed here will also be automatically inserted in the mirrors list.
|
||||
#
|
||||
# servergitmirrors: https://github.com/user/repo
|
||||
# servergitmirrors:
|
||||
# - https://github.com/user/repo
|
||||
# - https://gitlab.com/user/repo
|
||||
#
|
||||
# servergitmirrors:
|
||||
# - url: https://github.com/user/repo
|
||||
# - url: https://gitlab.com/user/repo
|
||||
# index_only: true
|
||||
|
||||
|
||||
# These settings allow using `fdroid deploy` for publishing APK files from
|
||||
# your repository to GitHub Releases. (You should also run `fdroid update`
|
||||
# every time before deploying to GitHub releases to update index files.) Here's
|
||||
# an example for this deployment automation:
|
||||
# https://github.com/f-droid/fdroidclient/releases/
|
||||
#
|
||||
# Currently, versions which are assigned to a release channel (e.g. alpha or
|
||||
# beta releases) are ignored.
|
||||
#
|
||||
# In the example below, tokens are read from environment variables. Putting
|
||||
# tokens directly into the config file is also supported but discouraged. It is
|
||||
# highly recommended to use a "Fine-grained personal access token", which is
|
||||
# restricted to the minimum required permissions, which are:
|
||||
# * Metadata - read
|
||||
# * Contents - read/write
|
||||
# (https://github.com/settings/personal-access-tokens/new)
|
||||
#
|
||||
# github_token: {env: GITHUB_TOKEN}
|
||||
# github_releases:
|
||||
# - projectUrl: https://github.com/f-droid/fdroidclient
|
||||
# packageNames:
|
||||
# - org.fdroid.basic
|
||||
# - org.fdroid.fdroid
|
||||
# release_notes_prepend: |
|
||||
# Re-post of official F-Droid App release from https://f-droid.org
|
||||
# - projectUrl: https://github.com/example/app
|
||||
# packageNames: com.example.app
|
||||
# token: {env: GITHUB_TOKEN_EXAMPLE}
|
||||
|
||||
|
||||
# Most git hosting services have hard size limits for each git repo.
|
||||
# `fdroid deploy` will delete the git history when the git mirror repo
|
||||
|
@ -269,18 +210,6 @@
|
|||
# mirrors:
|
||||
# - https://foo.bar/fdroid
|
||||
# - http://foobarfoobarfoobar.onion/fdroid
|
||||
#
|
||||
# Or additional metadata can also be included by adding key/value pairs:
|
||||
#
|
||||
# mirrors:
|
||||
# - url: https://foo.bar/fdroid
|
||||
# countryCode: BA
|
||||
# - url: http://foobarfoobarfoobar.onion/fdroid
|
||||
#
|
||||
# The list of mirrors can also be maintained in config/mirrors.yml, a
|
||||
# standalone YAML file in the optional configuration directory. In
|
||||
# that case, mirrors: should be removed from this file (config.yml).
|
||||
|
||||
|
||||
# optionally specify which identity file to use when using rsync or git over SSH
|
||||
#
|
||||
|
@ -305,33 +234,19 @@
|
|||
#
|
||||
# sync_from_local_copy_dir: true
|
||||
|
||||
# To deploy to an AWS S3 "bucket" in the US East region, set the
|
||||
# bucket name in the config, then set the environment variables
|
||||
# AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY using the values from
|
||||
# the AWS Management Console. See
|
||||
# https://rclone.org/s3/#authentication
|
||||
|
||||
# To upload the repo to an Amazon S3 bucket using `fdroid server
|
||||
# update`. Warning, this deletes and recreates the whole fdroid/
|
||||
# directory each time. This prefers s3cmd, but can also use
|
||||
# apache-libcloud. To customize how s3cmd interacts with the cloud
|
||||
# provider, create a 's3cfg' file next to this file (config.yml), and
|
||||
# those settings will be used instead of any 'aws' variable below.
|
||||
# Secrets can be fetched from environment variables to ensure that
|
||||
# they are not leaked as part of this file.
|
||||
#
|
||||
# awsbucket: myawsfdroidbucket
|
||||
|
||||
|
||||
# For extended options for syncing to cloud drive and object store
|
||||
# services, `fdroid deploy' wraps Rclone. Rclone is a full featured
|
||||
# sync tool for a huge variety of cloud services. Set up your services
|
||||
# using `rclone config`, then specify each config name to deploy the
|
||||
# awsbucket: to. Using rclone_config: overrides the default AWS S3 US
|
||||
# East setup, and will only sync to the services actually specified.
|
||||
#
|
||||
# awsbucket: myawsfdroidbucket
|
||||
# rclone_config:
|
||||
# - aws-sample-config
|
||||
# - rclone-supported-service-config
|
||||
|
||||
|
||||
# By default Rclone uses the user's default configuration file at
|
||||
# ~/.config/rclone/rclone.conf To specify a custom configuration file,
|
||||
# please add the full path to the configuration file as below.
|
||||
#
|
||||
# path_to_custom_rclone_config: /home/mycomputer/somedir/example.conf
|
||||
# awsbucket: myawsfdroid
|
||||
# awsaccesskeyid: SEE0CHAITHEIMAUR2USA
|
||||
# awssecretkey: {env: awssecretkey}
|
||||
|
||||
|
||||
# If you want to force 'fdroid server' to use a non-standard serverwebroot.
|
||||
|
@ -342,12 +257,12 @@
|
|||
# nonstandardwebroot: false
|
||||
|
||||
|
||||
# If you want to upload the release APK file to androidobservatory.org
|
||||
# If you want to upload the release apk file to androidobservatory.org
|
||||
#
|
||||
# androidobservatory: false
|
||||
|
||||
|
||||
# If you want to upload the release APK file to virustotal.com
|
||||
# If you want to upload the release apk file to virustotal.com
|
||||
# You have to enter your profile apikey to enable the upload.
|
||||
#
|
||||
# virustotal_apikey: 9872987234982734
|
||||
|
@ -357,6 +272,13 @@
|
|||
# virustotal_apikey: {env: virustotal_apikey}
|
||||
|
||||
|
||||
# The build logs can be posted to a mediawiki instance, like on f-droid.org.
|
||||
# wiki_protocol: http
|
||||
# wiki_server: server
|
||||
# wiki_path: /wiki/
|
||||
# wiki_user: login
|
||||
# wiki_password: 1234
|
||||
|
||||
# Keep a log of all generated index files in a git repo to provide a
|
||||
# "binary transparency" log for anyone to check the history of the
|
||||
# binaries that are published. This is in the form of a "git remote",
|
||||
|
@ -364,6 +286,27 @@
|
|||
# configured to allow push access (e.g. ssh key, username/password, etc)
|
||||
# binary_transparency_remote: git@gitlab.com:fdroid/binary-transparency-log.git
|
||||
|
||||
# Only set this to true when running a repository where you want to generate
|
||||
# stats, and only then on the master build servers, not a development
|
||||
# machine. If you want to keep the "added" and "last updated" dates for each
|
||||
# app and APK in your repo, then you should enable this.
|
||||
# update_stats: true
|
||||
|
||||
# When used with stats, this is a list of IP addresses that are ignored for
|
||||
# calculation purposes.
|
||||
# stats_ignore: []
|
||||
|
||||
# Server stats logs are retrieved from. Required when update_stats is True.
|
||||
# stats_server: example.com
|
||||
|
||||
# User stats logs are retrieved from. Required when update_stats is True.
|
||||
# stats_user: bob
|
||||
|
||||
# Use the following to push stats to a Carbon instance:
|
||||
# stats_to_carbon: false
|
||||
# carbon_host: 0.0.0.0
|
||||
# carbon_port: 2003
|
||||
|
||||
# Set this to true to always use a build server. This saves specifying the
|
||||
# --server option on dedicated secure build server hosts.
|
||||
# build_server_always: true
|
||||
|
@ -403,31 +346,9 @@
|
|||
# generating our default list. (https://pypi.org/project/spdx-license-list)
|
||||
#
|
||||
# You can override our default list of allowed licenes by setting this option.
|
||||
# Just supply a custom list of licene names you would like to allow. To disable
|
||||
# checking licenses by the linter, assign an empty value to lint_licenses.
|
||||
# Just supply a custom list of licene names you would like to allow. Setting
|
||||
# this to `None` disables this lint check.
|
||||
#
|
||||
# lint_licenses:
|
||||
# - Custom-License-A
|
||||
# - Another-License
|
||||
|
||||
# `fdroid scanner` can scan for signatures from various sources. By default
|
||||
# it's configured to only use F-Droids official SUSS collection. We have
|
||||
# support for these special collections:
|
||||
# * 'exodus' - official exodus-privacy.org signatures
|
||||
# * 'etip' - exodus privacy investigation platfrom community contributed
|
||||
# signatures
|
||||
# * 'suss' - official F-Droid: Suspicious or Unwanted Software Signatures
|
||||
# You can also configure scanner to use custom collections of signatures here.
|
||||
# They have to follow the format specified in the SUSS readme.
|
||||
# (https://gitlab.com/fdroid/fdroid-suss/#cache-file-data-format)
|
||||
#
|
||||
# scanner_signature_sources:
|
||||
# - suss
|
||||
# - exodus
|
||||
# - https://example.com/signatures.json
|
||||
|
||||
# The scanner can use signature sources from the internet. These are
|
||||
# cached locally. To force them to be refreshed from the network on
|
||||
# every run, set this to true:
|
||||
#
|
||||
# refresh_scanner: true
|
||||
|
|
BIN
examples/fdroid-icon.png
Normal file
BIN
examples/fdroid-icon.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 3.3 KiB |
|
@ -1,46 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# an fdroid plugin for resetting app VCSs to the latest version for the metadata
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
from fdroidserver import _, common, metadata
|
||||
from fdroidserver.exception import VCSException
|
||||
|
||||
fdroid_summary = 'reset app VCSs to the latest version'
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]"
|
||||
)
|
||||
common.setup_global_opts(parser)
|
||||
parser.add_argument(
|
||||
"appid",
|
||||
nargs='*',
|
||||
help=_("applicationId with optional versionCode in the form APPID[:VERCODE]"),
|
||||
)
|
||||
metadata.add_metadata_arguments(parser)
|
||||
options = common.parse_args(parser)
|
||||
apps = common.read_app_args(
|
||||
options.appid, allow_version_codes=True, sort_by_time=True
|
||||
)
|
||||
common.read_config()
|
||||
|
||||
for appid, app in apps.items():
|
||||
if "Builds" in app and len(app["Builds"]) > 0:
|
||||
build = app.get('Builds')[-1]
|
||||
logging.info(_("Cleaning up '{appid}' VCS").format(appid=appid))
|
||||
try:
|
||||
vcs, build_dir = common.setup_vcs(app)
|
||||
vcs.gotorevision(build.commit)
|
||||
if build.submodules:
|
||||
vcs.initsubmodules()
|
||||
|
||||
except VCSException:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,62 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# an fdroid plugin for exporting a repo's keystore in standard PEM format
|
||||
|
||||
import os
|
||||
from argparse import ArgumentParser
|
||||
|
||||
from fdroidserver import common
|
||||
from fdroidserver.common import FDroidPopen
|
||||
from fdroidserver.exception import BuildException
|
||||
|
||||
fdroid_summary = "export the repo's keystore file to a NitroKey HSM"
|
||||
|
||||
|
||||
def run(cmd, error):
|
||||
envs = {'LC_ALL': 'C.UTF-8',
|
||||
'PIN': config['smartcard_pin'],
|
||||
'FDROID_KEY_STORE_PASS': config['keystorepass'],
|
||||
'FDROID_KEY_PASS': config['keypass']}
|
||||
p = FDroidPopen(cmd, envs=envs)
|
||||
if p.returncode != 0:
|
||||
raise BuildException(error, p.output)
|
||||
|
||||
|
||||
def main():
|
||||
global config
|
||||
parser = ArgumentParser()
|
||||
common.setup_global_opts(parser)
|
||||
common.parse_args(parser)
|
||||
config = common.read_config()
|
||||
destkeystore = config['keystore'].replace('.jks', '.p12').replace('/', '_')
|
||||
exportkeystore = config['keystore'].replace('.jks', '.pem').replace('/', '_')
|
||||
if os.path.exists(destkeystore) or os.path.exists(exportkeystore):
|
||||
raise BuildException('%s exists!' % exportkeystore)
|
||||
run([config['keytool'], '-importkeystore',
|
||||
'-srckeystore', config['keystore'],
|
||||
'-srcalias', config['repo_keyalias'],
|
||||
'-srcstorepass:env', 'FDROID_KEY_STORE_PASS',
|
||||
'-srckeypass:env', 'FDROID_KEY_PASS',
|
||||
'-destkeystore', destkeystore,
|
||||
'-deststorepass:env', 'FDROID_KEY_STORE_PASS',
|
||||
'-deststoretype', 'PKCS12'],
|
||||
'Failed to convert to PKCS12!')
|
||||
# run(['openssl', 'pkcs12', '-in', destkeystore,
|
||||
# '-passin', 'env:FDROID_KEY_STORE_PASS', '-nokeys',
|
||||
# '-out', exportkeystore,
|
||||
# '-passout', 'env:FDROID_KEY_STORE_PASS'],
|
||||
# 'Failed to convert to PEM!')
|
||||
run(['pkcs15-init', '--delete-objects', 'privkey,pubkey',
|
||||
'--id', '3', '--store-private-key', destkeystore,
|
||||
'--format', 'pkcs12', '--auth-id', '3',
|
||||
'--verify-pin', '--pin', 'env:PIN'],
|
||||
'')
|
||||
run(['pkcs15-init', '--delete-objects', 'privkey,pubkey',
|
||||
'--id', '2', '--store-private-key', destkeystore,
|
||||
'--format', 'pkcs12', '--auth-id', '3',
|
||||
'--verify-pin', '--pin', 'env:PIN'],
|
||||
'')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,49 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# an fdroid plugin for exporting a repo's keystore in standard PEM format
|
||||
|
||||
import os
|
||||
from argparse import ArgumentParser
|
||||
|
||||
from fdroidserver import common
|
||||
from fdroidserver.common import FDroidPopen
|
||||
from fdroidserver.exception import BuildException
|
||||
|
||||
fdroid_summary = 'export the keystore in standard PEM format'
|
||||
|
||||
|
||||
def main():
|
||||
parser = ArgumentParser()
|
||||
common.setup_global_opts(parser)
|
||||
common.parse_args(parser)
|
||||
config = common.read_config()
|
||||
env_vars = {'LC_ALL': 'C.UTF-8',
|
||||
'FDROID_KEY_STORE_PASS': config['keystorepass'],
|
||||
'FDROID_KEY_PASS': config['keypass']}
|
||||
destkeystore = config['keystore'].replace('.jks', '.p12').replace('/', '_')
|
||||
exportkeystore = config['keystore'].replace('.jks', '.pem').replace('/', '_')
|
||||
if os.path.exists(destkeystore) or os.path.exists(exportkeystore):
|
||||
raise BuildException('%s exists!' % exportkeystore)
|
||||
p = FDroidPopen([config['keytool'], '-importkeystore',
|
||||
'-srckeystore', config['keystore'],
|
||||
'-srcalias', config['repo_keyalias'],
|
||||
'-srcstorepass:env', 'FDROID_KEY_STORE_PASS',
|
||||
'-srckeypass:env', 'FDROID_KEY_PASS',
|
||||
'-destkeystore', destkeystore,
|
||||
'-deststoretype', 'PKCS12',
|
||||
'-deststorepass:env', 'FDROID_KEY_STORE_PASS',
|
||||
'-destkeypass:env', 'FDROID_KEY_PASS'],
|
||||
envs=env_vars)
|
||||
if p.returncode != 0:
|
||||
raise BuildException("Failed to convert to PKCS12!", p.output)
|
||||
p = FDroidPopen(['openssl', 'pkcs12', '-in', destkeystore,
|
||||
'-passin', 'env:FDROID_KEY_STORE_PASS', '-nokeys',
|
||||
'-out', exportkeystore,
|
||||
'-passout', 'env:FDROID_KEY_STORE_PASS'],
|
||||
envs=env_vars)
|
||||
if p.returncode != 0:
|
||||
raise BuildException("Failed to convert to PEM!", p.output)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,23 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# an fdroid plugin print the repo_pubkey from a repo's keystore
|
||||
#
|
||||
|
||||
from argparse import ArgumentParser
|
||||
|
||||
from fdroidserver import common, index
|
||||
|
||||
fdroid_summary = 'export the keystore in standard PEM format'
|
||||
|
||||
|
||||
def main():
|
||||
parser = ArgumentParser()
|
||||
common.setup_global_opts(parser)
|
||||
common.parse_args(parser)
|
||||
common.read_config()
|
||||
pubkey, repo_pubkey_fingerprint = index.extract_pubkey()
|
||||
print('repo_pubkey = "%s"' % pubkey.decode())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,43 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# an fdroid plugin for setting up srclibs
|
||||
#
|
||||
# The 'fdroid build' gitlab-ci job uses --on-server, which does not
|
||||
# set up the srclibs. This plugin does the missing setup.
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import pprint
|
||||
|
||||
from fdroidserver import _, common, metadata
|
||||
|
||||
fdroid_summary = 'prepare the srclibs for `fdroid build --on-server`'
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
|
||||
common.setup_global_opts(parser)
|
||||
parser.add_argument("appid", nargs='*', help=_("applicationId with optional versionCode in the form APPID[:VERCODE]"))
|
||||
metadata.add_metadata_arguments(parser)
|
||||
options = common.parse_args(parser)
|
||||
apps = common.read_app_args(options.appid, allow_version_codes=True, sort_by_time=True)
|
||||
common.read_config()
|
||||
srclib_dir = os.path.join('build', 'srclib')
|
||||
os.makedirs(srclib_dir, exist_ok=True)
|
||||
srclibpaths = []
|
||||
for appid, app in apps.items():
|
||||
vcs, _ignored = common.setup_vcs(app)
|
||||
for build in app.get('Builds', []):
|
||||
vcs.gotorevision(build.commit, refresh=False)
|
||||
if build.submodules:
|
||||
vcs.initsubmodules()
|
||||
else:
|
||||
vcs.deinitsubmodules()
|
||||
for lib in build.srclibs:
|
||||
srclibpaths.append(common.getsrclib(lib, srclib_dir, prepare=False, build=build))
|
||||
print('Set up srclibs:')
|
||||
pprint.pprint(srclibpaths)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,42 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
from argparse import ArgumentParser
|
||||
|
||||
from fdroidserver import common
|
||||
from fdroidserver.common import FDroidPopen
|
||||
from fdroidserver.exception import BuildException
|
||||
|
||||
fdroid_summary = 'import the local keystore into a SmartCard HSM'
|
||||
|
||||
|
||||
def main():
|
||||
parser = ArgumentParser()
|
||||
common.setup_global_opts(parser)
|
||||
common.parse_args(parser)
|
||||
config = common.read_config()
|
||||
env_vars = {
|
||||
'LC_ALL': 'C.UTF-8',
|
||||
'FDROID_KEY_STORE_PASS': config['keystorepass'],
|
||||
'FDROID_KEY_PASS': config['keypass'],
|
||||
'SMARTCARD_PIN': str(config['smartcard_pin']),
|
||||
}
|
||||
p = FDroidPopen([config['keytool'], '-importkeystore',
|
||||
'-srcalias', config['repo_keyalias'],
|
||||
'-srckeystore', config['keystore'],
|
||||
'-srcstorepass:env', 'FDROID_KEY_STORE_PASS',
|
||||
'-srckeypass:env', 'FDROID_KEY_PASS',
|
||||
'-destalias', config['repo_keyalias'],
|
||||
'-destkeystore', 'NONE',
|
||||
'-deststoretype', 'PKCS11',
|
||||
'-providerName', 'SunPKCS11-OpenSC',
|
||||
'-providerClass', 'sun.security.pkcs11.SunPKCS11',
|
||||
'-providerArg', 'opensc-fdroid.cfg',
|
||||
'-deststorepass:env', 'SMARTCARD_PIN',
|
||||
'-J-Djava.security.debug=sunpkcs11'],
|
||||
envs=env_vars)
|
||||
if p.returncode != 0:
|
||||
raise BuildException("Failed to import into HSM!", p.output)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
94
examples/makebuildserver.config.py
Normal file
94
examples/makebuildserver.config.py
Normal file
|
@ -0,0 +1,94 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# You may want to alter these before running ./makebuildserver
|
||||
|
||||
# Name of the Vagrant basebox to use, by default it will be downloaded
|
||||
# from Vagrant Cloud. For release builds setup, generate the basebox
|
||||
# locally using https://gitlab.com/fdroid/basebox, add it to Vagrant,
|
||||
# then set this to the local basebox name.
|
||||
# This defaults to "fdroid/basebox-stretch64" which will download a
|
||||
# prebuilt basebox from https://app.vagrantup.com/fdroid.
|
||||
#
|
||||
# (If you change this value you have to supply the `--clean` option on
|
||||
# your next `makebuildserver` run.)
|
||||
#
|
||||
# basebox = "basebox-stretch64"
|
||||
|
||||
# This allows you to pin your basebox to a specific versions. It defaults
|
||||
# the most recent basebox version which can be aumotaically verifyed by
|
||||
# `makebuildserver`.
|
||||
# Please note that vagrant does not support versioning of locally added
|
||||
# boxes, so we can't support that either.
|
||||
#
|
||||
# (If you change this value you have to supply the `--clean` option on
|
||||
# your next `makebuildserver` run.)
|
||||
#
|
||||
# basebox_version = "0.1"
|
||||
|
||||
# In the process of setting up the build server, many gigs of files
|
||||
# are downloaded (Android SDK components, gradle, etc). These are
|
||||
# cached so that they are not redownloaded each time. By default,
|
||||
# these are stored in ~/.cache/fdroidserver
|
||||
#
|
||||
# cachedir = 'buildserver/cache'
|
||||
|
||||
# A big part of creating a new instance is downloading packages from Debian.
|
||||
# This setups up a folder in ~/.cache/fdroidserver to cache the downloaded
|
||||
# packages when rebuilding the build server from scratch. This requires
|
||||
# that virtualbox-guest-utils is installed.
|
||||
#
|
||||
# apt_package_cache = True
|
||||
|
||||
# The buildserver can use some local caches to speed up builds,
|
||||
# especially when the internet connection is slow and/or expensive.
|
||||
# If enabled, the buildserver setup will look for standard caches in
|
||||
# your HOME dir and copy them to the buildserver VM. Be aware: this
|
||||
# will reduce the isolation of the buildserver from your host machine,
|
||||
# so the buildserver will provide an environment only as trustworthy
|
||||
# as the host machine's environment.
|
||||
#
|
||||
# copy_caches_from_host = True
|
||||
|
||||
# To specify which Debian mirror the build server VM should use, by
|
||||
# default it uses http.debian.net, which auto-detects which is the
|
||||
# best mirror to use.
|
||||
#
|
||||
# debian_mirror = 'http://ftp.uk.debian.org/debian/'
|
||||
|
||||
# The amount of RAM the build server will have (default: 2048)
|
||||
# memory = 3584
|
||||
|
||||
# The number of CPUs the build server will have
|
||||
# cpus = 1
|
||||
|
||||
# Debian package proxy server - if you have one
|
||||
# aptproxy = "http://192.168.0.19:8000"
|
||||
|
||||
# If this is running on an older machine or on a virtualized system,
|
||||
# it can run a lot slower. If the provisioning fails with a warning
|
||||
# about the timeout, extend the timeout here. (default: 600 seconds)
|
||||
#
|
||||
# boot_timeout = 1200
|
||||
|
||||
# By default, this whole process uses VirtualBox as the provider, but
|
||||
# QEMU+KVM is also supported via the libvirt plugin to vagrant. If
|
||||
# this is run within a KVM guest, then libvirt's QEMU+KVM will be used
|
||||
# automatically. It can also be manually enabled by uncommenting
|
||||
# below:
|
||||
#
|
||||
# vm_provider = 'libvirt'
|
||||
|
||||
# By default libvirt uses 'virtio' for both network and disk drivers.
|
||||
# Some systems (eg. nesting VMware ESXi) do not support virtio. As a
|
||||
# workaround for such rare cases, this setting allows to configure
|
||||
# KVM/libvirt to emulate hardware rather than using virtio.
|
||||
#
|
||||
# libvirt_disk_bus = 'sata'
|
||||
# libvirt_nic_model_type = 'rtl8139'
|
||||
|
||||
# Sometimes, it is not possible to use the 9p synced folder type with
|
||||
# libvirt, like if running a KVM buildserver instance inside of a
|
||||
# VMware ESXi guest. In that case, using NFS or another method is
|
||||
# required.
|
||||
#
|
||||
# synced_folder_type = 'nfs'
|
28
examples/mirror-to-mirror.sh
Normal file
28
examples/mirror-to-mirror.sh
Normal file
|
@ -0,0 +1,28 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# This script syncs the entire repo to the primary mirrors. It is
|
||||
# meant to run in a cronjob quite frequently, as often as there are
|
||||
# files to send.
|
||||
#
|
||||
# This script expects the receiving side to have the following
|
||||
# preceeding the ssh key entry in ~/.ssh/authorized_keys:
|
||||
# command="rsync --server -logDtpre.iLsfx --log-format=X --delete --delay-updates . /path/to/htdocs/fdroid/",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty
|
||||
#
|
||||
set -e
|
||||
(
|
||||
flock -n 200
|
||||
set -e
|
||||
cd /home/fdroid
|
||||
for section in repo archive; do
|
||||
echo "Started $section at `date`:"
|
||||
for host in fdroid@mirror.f-droid.org fdroid@ftp-push.lysator.liu.se; do
|
||||
set -x
|
||||
# be super careful with the trailing slashes here! if one is wrong, it'll delete the entire section!
|
||||
rsync --archive --delay-updates --progress --delete \
|
||||
/home/fdroid/public_html/${section} \
|
||||
${host}:/srv/fdroid-mirror.at.or.at/htdocs/fdroid/ &
|
||||
set +x
|
||||
done
|
||||
wait
|
||||
done
|
||||
) 200>/var/lock/root_fdroidmirrortomirror
|
|
@ -1,23 +1,20 @@
|
|||
|
||||
import gettext
|
||||
import glob
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
# support running straight from git and standard installs
|
||||
rootpaths = [
|
||||
os.path.realpath(os.path.join(os.path.dirname(__file__), '..')),
|
||||
os.path.realpath(
|
||||
os.path.join(os.path.dirname(__file__), '..', '..', '..', '..', 'share')
|
||||
),
|
||||
os.path.realpath(os.path.join(os.path.dirname(__file__), '..', '..', '..', '..', 'share')),
|
||||
os.path.join(sys.prefix, 'share'),
|
||||
]
|
||||
|
||||
localedir = None
|
||||
for rootpath in rootpaths:
|
||||
found_mo = glob.glob(
|
||||
os.path.join(rootpath, 'locale', '*', 'LC_MESSAGES', 'fdroidserver.mo')
|
||||
)
|
||||
if len(found_mo) > 0:
|
||||
if len(glob.glob(os.path.join(rootpath, 'locale', '*', 'LC_MESSAGES', 'fdroidserver.mo'))) > 0:
|
||||
localedir = os.path.join(rootpath, 'locale')
|
||||
break
|
||||
|
||||
|
@ -26,53 +23,34 @@ gettext.textdomain('fdroidserver')
|
|||
_ = gettext.gettext
|
||||
|
||||
|
||||
from fdroidserver.exception import (
|
||||
FDroidException,
|
||||
MetaDataException,
|
||||
VerificationException, # NOQA: E402
|
||||
)
|
||||
|
||||
from fdroidserver.exception import (FDroidException,
|
||||
MetaDataException,
|
||||
VerificationException) # NOQA: E402
|
||||
FDroidException # NOQA: B101
|
||||
MetaDataException # NOQA: B101
|
||||
VerificationException # NOQA: B101
|
||||
|
||||
from fdroidserver.common import genkeystore as generate_keystore # NOQA: E402
|
||||
from fdroidserver.common import verify_apk_signature
|
||||
|
||||
from fdroidserver.common import (verify_apk_signature,
|
||||
genkeystore as generate_keystore) # NOQA: E402
|
||||
verify_apk_signature # NOQA: B101
|
||||
generate_keystore # NOQA: B101
|
||||
from fdroidserver.index import (
|
||||
download_repo_index,
|
||||
download_repo_index_v1,
|
||||
download_repo_index_v2,
|
||||
get_mirror_service_urls,
|
||||
)
|
||||
from fdroidserver.index import make as make_index # NOQA: E402
|
||||
|
||||
from fdroidserver.index import (download_repo_index,
|
||||
get_mirror_service_urls,
|
||||
make as make_index) # NOQA: E402
|
||||
download_repo_index # NOQA: B101
|
||||
download_repo_index_v1 # NOQA: B101
|
||||
download_repo_index_v2 # NOQA: B101
|
||||
get_mirror_service_urls # NOQA: B101
|
||||
make_index # NOQA: B101
|
||||
from fdroidserver.update import (
|
||||
process_apk,
|
||||
process_apks,
|
||||
scan_apk,
|
||||
scan_repo_files, # NOQA: E402
|
||||
)
|
||||
|
||||
from fdroidserver.update import (process_apk,
|
||||
process_apks,
|
||||
scan_apk,
|
||||
scan_repo_files) # NOQA: E402
|
||||
process_apk # NOQA: B101
|
||||
process_apks # NOQA: B101
|
||||
scan_apk # NOQA: B101
|
||||
scan_repo_files # NOQA: B101
|
||||
from fdroidserver.deploy import (
|
||||
update_awsbucket,
|
||||
update_servergitmirrors,
|
||||
update_serverwebroot, # NOQA: E402
|
||||
update_serverwebroots,
|
||||
)
|
||||
|
||||
from fdroidserver.deploy import (update_awsbucket,
|
||||
update_servergitmirrors,
|
||||
update_serverwebroot) # NOQA: E402
|
||||
update_awsbucket # NOQA: B101
|
||||
update_servergitmirrors # NOQA: B101
|
||||
update_serverwebroots # NOQA: B101
|
||||
update_serverwebroot # NOQA: B101
|
||||
|
|
|
@ -18,20 +18,19 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import importlib.metadata
|
||||
import logging
|
||||
import os
|
||||
import pkgutil
|
||||
import re
|
||||
import sys
|
||||
from argparse import ArgumentError
|
||||
from collections import OrderedDict
|
||||
|
||||
import git
|
||||
import os
|
||||
import locale
|
||||
import pkgutil
|
||||
import logging
|
||||
|
||||
import fdroidserver.common
|
||||
import fdroidserver.metadata
|
||||
from fdroidserver import _
|
||||
from argparse import ArgumentError
|
||||
from collections import OrderedDict
|
||||
|
||||
|
||||
COMMANDS = OrderedDict([
|
||||
("build", _("Build a package from source")),
|
||||
|
@ -42,12 +41,13 @@ COMMANDS = OrderedDict([
|
|||
("deploy", _("Interact with the repo HTTP server")),
|
||||
("verify", _("Verify the integrity of downloaded packages")),
|
||||
("checkupdates", _("Check for updates to applications")),
|
||||
("import", _("Extract application metadata from a source repository")),
|
||||
("import", _("Add a new application from its source code")),
|
||||
("install", _("Install built packages on devices")),
|
||||
("readmeta", _("Read all the metadata files and exit")),
|
||||
("rewritemeta", _("Rewrite all the metadata files")),
|
||||
("lint", _("Warn about possible metadata errors")),
|
||||
("scanner", _("Scan the source code of a package")),
|
||||
("stats", _("Update the stats of the repo")),
|
||||
("signindex", _("Sign indexes created using update --nosign")),
|
||||
("btlog", _("Update the binary transparency log for a URL")),
|
||||
("signatures", _("Extract signatures from APKs")),
|
||||
|
@ -70,13 +70,9 @@ def print_help(available_plugins=None):
|
|||
|
||||
|
||||
def preparse_plugin(module_name, module_dir):
|
||||
"""No summary.
|
||||
|
||||
Simple regex based parsing for plugin scripts.
|
||||
|
||||
So we don't have to import them when we just need the summary,
|
||||
but not plan on executing this particular plugin.
|
||||
"""
|
||||
"""simple regex based parsing for plugin scripts,
|
||||
so we don't have to import them when we just need the summary,
|
||||
but not plan on executing this particular plugin."""
|
||||
if '.' in module_name:
|
||||
raise ValueError("No '.' allowed in fdroid plugin modules: '{}'"
|
||||
.format(module_name))
|
||||
|
@ -136,7 +132,7 @@ def main():
|
|||
sys.exit(0)
|
||||
|
||||
command = sys.argv[1]
|
||||
if command not in COMMANDS and command not in available_plugins:
|
||||
if command not in COMMANDS and command not in available_plugins.keys():
|
||||
if command in ('-h', '--help'):
|
||||
print_help(available_plugins=available_plugins)
|
||||
sys.exit(0)
|
||||
|
@ -144,21 +140,32 @@ def main():
|
|||
print(_("""ERROR: The "server" subcommand has been removed, use "deploy"!"""))
|
||||
sys.exit(1)
|
||||
elif command == '--version':
|
||||
try:
|
||||
print(importlib.metadata.version("fdroidserver"))
|
||||
sys.exit(0)
|
||||
except importlib.metadata.PackageNotFoundError:
|
||||
pass
|
||||
try:
|
||||
print(
|
||||
git.repo.Repo(
|
||||
os.path.dirname(os.path.dirname(__file__))
|
||||
).git.describe(always=True, tags=True)
|
||||
)
|
||||
sys.exit(0)
|
||||
except git.exc.InvalidGitRepositoryError:
|
||||
print(_('No version information could be found.'))
|
||||
sys.exit(1)
|
||||
output = _('no version info found!')
|
||||
cmddir = os.path.realpath(os.path.dirname(os.path.dirname(__file__)))
|
||||
moduledir = os.path.realpath(os.path.dirname(fdroidserver.common.__file__) + '/..')
|
||||
if cmddir == moduledir:
|
||||
# running from git
|
||||
os.chdir(cmddir)
|
||||
if os.path.isdir('.git'):
|
||||
import subprocess
|
||||
try:
|
||||
output = subprocess.check_output(['git', 'describe'],
|
||||
stderr=subprocess.STDOUT,
|
||||
universal_newlines=True)
|
||||
except subprocess.CalledProcessError:
|
||||
output = 'git commit ' + subprocess.check_output(['git', 'rev-parse', 'HEAD'],
|
||||
universal_newlines=True)
|
||||
elif os.path.exists('setup.py'):
|
||||
import re
|
||||
m = re.search(r'''.*[\s,\(]+version\s*=\s*["']([0-9a-z.]+)["'].*''',
|
||||
open('setup.py').read(), flags=re.MULTILINE)
|
||||
if m:
|
||||
output = m.group(1) + '\n'
|
||||
else:
|
||||
from pkg_resources import get_distribution
|
||||
output = get_distribution('fdroidserver').version + '\n'
|
||||
print(output)
|
||||
sys.exit(0)
|
||||
else:
|
||||
print(_("Command '%s' not recognised.\n" % command))
|
||||
print_help(available_plugins=available_plugins)
|
||||
|
@ -182,18 +189,16 @@ def main():
|
|||
"can not be specified at the same time."))
|
||||
sys.exit(1)
|
||||
|
||||
# Trick argparse into displaying the right usage when --help is used.
|
||||
# Trick optparse into displaying the right usage when --help is used.
|
||||
sys.argv[0] += ' ' + command
|
||||
|
||||
del sys.argv[1]
|
||||
if command in COMMANDS.keys():
|
||||
# import is named import_subcommand internally b/c import is reserved by Python
|
||||
command = 'import_subcommand' if command == 'import' else command
|
||||
mod = __import__('fdroidserver.' + command, None, None, [command])
|
||||
else:
|
||||
mod = __import__(available_plugins[command]['name'], None, None, [command])
|
||||
|
||||
system_encoding = sys.getdefaultencoding()
|
||||
system_langcode, system_encoding = locale.getdefaultlocale()
|
||||
if system_encoding is None or system_encoding.lower() not in ('utf-8', 'utf8'):
|
||||
logging.warning(_("Encoding is set to '{enc}' fdroid might run "
|
||||
"into encoding issues. Please set it to 'UTF-8' "
|
||||
|
|
|
@ -1,64 +0,0 @@
|
|||
# Copyright (C) 2025, Hans-Christoph Steiner <hans@eds.org>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Standard YAML parsing and dumping.
|
||||
|
||||
YAML 1.2 is the preferred format for all data files. When loading
|
||||
F-Droid formats like config.yml and <Application ID>.yml, YAML 1.2 is
|
||||
forced, and older YAML constructs should be considered an error.
|
||||
|
||||
It is OK to load and dump files in other YAML versions if they are
|
||||
externally defined formats, like FUNDING.yml. In those cases, these
|
||||
common instances might not be appropriate to use.
|
||||
|
||||
There is a separate instance for dumping based on the "round trip" aka
|
||||
"rt" mode. The "rt" mode maintains order while the "safe" mode sorts
|
||||
the output. Also, yaml.version is not forced in the dumper because that
|
||||
makes it write out a "%YAML 1.2" header. F-Droid's formats are
|
||||
explicitly defined as YAML 1.2 and meant to be human-editable. So that
|
||||
header gets in the way.
|
||||
|
||||
"""
|
||||
|
||||
import ruamel.yaml
|
||||
|
||||
yaml = ruamel.yaml.YAML(typ='safe')
|
||||
yaml.version = (1, 2)
|
||||
|
||||
yaml_dumper = ruamel.yaml.YAML(typ='rt')
|
||||
|
||||
|
||||
def config_dump(config, fp=None):
|
||||
"""Dump config data in YAML 1.2 format without headers.
|
||||
|
||||
This outputs YAML in a string that is suitable for use in regexps
|
||||
and string replacements, as well as complete files. It is therefore
|
||||
explicitly set up to avoid writing out headers and footers.
|
||||
|
||||
This is modeled after PyYAML's yaml.dump(), which can dump to a file
|
||||
or return a string.
|
||||
|
||||
https://yaml.dev/doc/ruamel.yaml/example/#Output_of_%60dump()%60_as_a_string
|
||||
|
||||
"""
|
||||
dumper = ruamel.yaml.YAML(typ='rt')
|
||||
dumper.default_flow_style = False
|
||||
dumper.explicit_start = False
|
||||
dumper.explicit_end = False
|
||||
if fp is None:
|
||||
with ruamel.yaml.compat.StringIO() as fp:
|
||||
dumper.dump(config, fp)
|
||||
return fp.getvalue()
|
||||
dumper.dump(config, fp)
|
File diff suppressed because it is too large
Load diff
|
@ -1,8 +1,9 @@
|
|||
"""Simple thread based asynchronous file reader for Python.
|
||||
|
||||
"""
|
||||
AsynchronousFileReader
|
||||
======================
|
||||
|
||||
Simple thread based asynchronous file reader for Python.
|
||||
|
||||
see https://github.com/soxofaan/asynchronousfilereader
|
||||
|
||||
MIT License
|
||||
|
@ -12,7 +13,6 @@ Copyright (c) 2014 Stefaan Lippens
|
|||
__version__ = '0.2.1'
|
||||
|
||||
import threading
|
||||
|
||||
try:
|
||||
# Python 2
|
||||
from Queue import Queue
|
||||
|
@ -22,9 +22,10 @@ except ImportError:
|
|||
|
||||
|
||||
class AsynchronousFileReader(threading.Thread):
|
||||
"""Helper class to implement asynchronous reading of a file in a separate thread.
|
||||
|
||||
Pushes read lines on a queue to be consumed in another thread.
|
||||
"""
|
||||
Helper class to implement asynchronous reading of a file
|
||||
in a separate thread. Pushes read lines on a queue to
|
||||
be consumed in another thread.
|
||||
"""
|
||||
|
||||
def __init__(self, fd, queue=None, autostart=True):
|
||||
|
@ -39,7 +40,9 @@ class AsynchronousFileReader(threading.Thread):
|
|||
self.start()
|
||||
|
||||
def run(self):
|
||||
"""Read lines and put them on the queue (the body of the tread)."""
|
||||
"""
|
||||
The body of the tread: read lines and put them on the queue.
|
||||
"""
|
||||
while True:
|
||||
line = self._fd.readline()
|
||||
if not line:
|
||||
|
@ -47,10 +50,15 @@ class AsynchronousFileReader(threading.Thread):
|
|||
self.queue.put(line)
|
||||
|
||||
def eof(self):
|
||||
"""Check whether there is no more content to expect."""
|
||||
"""
|
||||
Check whether there is no more content to expect.
|
||||
"""
|
||||
return not self.is_alive() and self.queue.empty()
|
||||
|
||||
def readlines(self):
|
||||
"""Get currently available lines."""
|
||||
"""
|
||||
Get currently available lines.
|
||||
"""
|
||||
while not self.queue.empty():
|
||||
yield self.queue.get()
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Update the binary transparency log for a URL."""
|
||||
#
|
||||
# btlog.py - part of the FDroid server tools
|
||||
# Copyright (C) 2017, Hans-Christoph Steiner <hans@eds.org>
|
||||
|
@ -27,64 +26,51 @@
|
|||
# client app so its not easy for the server to distinguish this from
|
||||
# the F-Droid client.
|
||||
|
||||
|
||||
import collections
|
||||
import defusedxml.minidom
|
||||
import git
|
||||
import glob
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import requests
|
||||
import shutil
|
||||
import tempfile
|
||||
import zipfile
|
||||
from argparse import ArgumentParser
|
||||
from typing import Optional
|
||||
|
||||
import defusedxml.minidom
|
||||
import git
|
||||
import requests
|
||||
|
||||
from . import _, common, deploy
|
||||
from . import _
|
||||
from . import common
|
||||
from . import deploy
|
||||
from .exception import FDroidException
|
||||
|
||||
|
||||
def make_binary_transparency_log(
|
||||
repodirs: collections.abc.Iterable,
|
||||
btrepo: str = 'binary_transparency',
|
||||
url: Optional[str] = None,
|
||||
commit_title: str = 'fdroid update',
|
||||
):
|
||||
"""Log the indexes in a standalone git repo to serve as a "binary transparency" log.
|
||||
options = None
|
||||
|
||||
Parameters
|
||||
----------
|
||||
repodirs
|
||||
The directories of the F-Droid repository to generate the binary
|
||||
transparency log for.
|
||||
btrepo
|
||||
The path to the Git repository of the binary transparency log.
|
||||
url
|
||||
The URL of the F-Droid repository to generate the binary transparency
|
||||
log for.
|
||||
commit_title
|
||||
The commit title for commits in the binary transparency log Git
|
||||
repository.
|
||||
|
||||
Notes
|
||||
-----
|
||||
Also see https://www.eff.org/deeplinks/2014/02/open-letter-to-tech-companies .
|
||||
"""
|
||||
def make_binary_transparency_log(repodirs, btrepo='binary_transparency',
|
||||
url=None,
|
||||
commit_title='fdroid update'):
|
||||
'''Log the indexes in a standalone git repo to serve as a "binary
|
||||
transparency" log.
|
||||
|
||||
see: https://www.eff.org/deeplinks/2014/02/open-letter-to-tech-companies
|
||||
|
||||
'''
|
||||
|
||||
logging.info('Committing indexes to ' + btrepo)
|
||||
if os.path.exists(os.path.join(btrepo, '.git')):
|
||||
gitrepo = git.Repo(btrepo)
|
||||
else:
|
||||
if not os.path.exists(btrepo):
|
||||
os.mkdir(btrepo)
|
||||
gitrepo = git.Repo.init(btrepo, initial_branch=deploy.GIT_BRANCH)
|
||||
gitrepo = git.Repo.init(btrepo)
|
||||
|
||||
if not url:
|
||||
url = common.config['repo_url'].rstrip('/')
|
||||
with open(os.path.join(btrepo, 'README.md'), 'w') as fp:
|
||||
fp.write(
|
||||
"""
|
||||
fp.write("""
|
||||
# Binary Transparency Log for %s
|
||||
|
||||
This is a log of the signed app index metadata. This is stored in a
|
||||
|
@ -94,17 +80,15 @@ F-Droid repository was a publicly released file.
|
|||
|
||||
For more info on this idea:
|
||||
* https://wiki.mozilla.org/Security/Binary_Transparency
|
||||
"""
|
||||
% url[: url.rindex('/')] # strip '/repo'
|
||||
)
|
||||
gitrepo.index.add(['README.md'])
|
||||
""" % url[:url.rindex('/')]) # strip '/repo'
|
||||
gitrepo.index.add(['README.md', ])
|
||||
gitrepo.index.commit('add README')
|
||||
|
||||
for repodir in repodirs:
|
||||
cpdir = os.path.join(btrepo, repodir)
|
||||
if not os.path.exists(cpdir):
|
||||
os.mkdir(cpdir)
|
||||
for f in ('index.xml', 'index-v1.json', 'index-v2.json', 'entry.json'):
|
||||
for f in ('index.xml', 'index-v1.json'):
|
||||
repof = os.path.join(repodir, f)
|
||||
if not os.path.exists(repof):
|
||||
continue
|
||||
|
@ -119,8 +103,8 @@ For more info on this idea:
|
|||
output = json.load(fp, object_pairs_hook=collections.OrderedDict)
|
||||
with open(dest, 'w') as fp:
|
||||
json.dump(output, fp, indent=2)
|
||||
gitrepo.index.add([repof])
|
||||
for f in ('index.jar', 'index-v1.jar', 'entry.jar'):
|
||||
gitrepo.index.add([repof, ])
|
||||
for f in ('index.jar', 'index-v1.jar'):
|
||||
repof = os.path.join(repodir, f)
|
||||
if not os.path.exists(repof):
|
||||
continue
|
||||
|
@ -132,7 +116,7 @@ For more info on this idea:
|
|||
jarout.writestr(info, jarin.read(info.filename))
|
||||
jarout.close()
|
||||
jarin.close()
|
||||
gitrepo.index.add([repof])
|
||||
gitrepo.index.add([repof, ])
|
||||
|
||||
output_files = []
|
||||
for root, dirs, files in os.walk(repodir):
|
||||
|
@ -153,45 +137,27 @@ For more info on this idea:
|
|||
fslogfile = os.path.join(cpdir, 'filesystemlog.json')
|
||||
with open(fslogfile, 'w') as fp:
|
||||
json.dump(output, fp, indent=2)
|
||||
gitrepo.index.add([os.path.join(repodir, 'filesystemlog.json')])
|
||||
gitrepo.index.add([os.path.join(repodir, 'filesystemlog.json'), ])
|
||||
|
||||
for f in glob.glob(os.path.join(cpdir, '*.HTTP-headers.json')):
|
||||
gitrepo.index.add([os.path.join(repodir, os.path.basename(f))])
|
||||
gitrepo.index.add([os.path.join(repodir, os.path.basename(f)), ])
|
||||
|
||||
gitrepo.index.commit(commit_title)
|
||||
|
||||
|
||||
def main():
|
||||
"""Generate or update a binary transparency log for a F-Droid repository.
|
||||
global options
|
||||
|
||||
The behaviour of this function is influenced by the configuration file as
|
||||
well as command line parameters.
|
||||
|
||||
Raises
|
||||
------
|
||||
:exc:`~fdroidserver.exception.FDroidException`
|
||||
If the specified or default Git repository does not exist.
|
||||
|
||||
"""
|
||||
parser = ArgumentParser()
|
||||
common.setup_global_opts(parser)
|
||||
parser.add_argument(
|
||||
"--git-repo",
|
||||
default=os.path.join(os.getcwd(), 'binary_transparency'),
|
||||
help=_("Path to the git repo to use as the log"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"-u",
|
||||
"--url",
|
||||
default='https://f-droid.org',
|
||||
help=_("The base URL for the repo to log (default: https://f-droid.org)"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--git-remote",
|
||||
default=None,
|
||||
help=_("Push the log to this git remote repository"),
|
||||
)
|
||||
options = common.parse_args(parser)
|
||||
parser.add_argument("--git-repo",
|
||||
default=os.path.join(os.getcwd(), 'binary_transparency'),
|
||||
help=_("Path to the git repo to use as the log"))
|
||||
parser.add_argument("-u", "--url", default='https://f-droid.org',
|
||||
help=_("The base URL for the repo to log (default: https://f-droid.org)"))
|
||||
parser.add_argument("--git-remote", default=None,
|
||||
help=_("Push the log to this git remote repository"))
|
||||
options = parser.parse_args()
|
||||
|
||||
if options.verbose:
|
||||
logging.getLogger("requests").setLevel(logging.INFO)
|
||||
|
@ -202,8 +168,7 @@ def main():
|
|||
|
||||
if not os.path.exists(options.git_repo):
|
||||
raise FDroidException(
|
||||
'"%s" does not exist! Create it, or use --git-repo' % options.git_repo
|
||||
)
|
||||
'"%s" does not exist! Create it, or use --git-repo' % options.git_repo)
|
||||
|
||||
session = requests.Session()
|
||||
|
||||
|
@ -216,20 +181,14 @@ def main():
|
|||
os.makedirs(tempdir, exist_ok=True)
|
||||
gitrepodir = os.path.join(options.git_repo, repodir)
|
||||
os.makedirs(gitrepodir, exist_ok=True)
|
||||
for f in (
|
||||
'entry.jar',
|
||||
'entry.json',
|
||||
'index-v1.jar',
|
||||
'index-v1.json',
|
||||
'index-v2.json',
|
||||
'index.jar',
|
||||
'index.xml',
|
||||
):
|
||||
for f in ('index.jar', 'index.xml', 'index-v1.jar', 'index-v1.json'):
|
||||
dlfile = os.path.join(tempdir, f)
|
||||
dlurl = options.url + '/' + repodir + '/' + f
|
||||
http_headers_file = os.path.join(gitrepodir, f + '.HTTP-headers.json')
|
||||
|
||||
headers = {'User-Agent': 'F-Droid 0.102.3'}
|
||||
headers = {
|
||||
'User-Agent': 'F-Droid 0.102.3'
|
||||
}
|
||||
etag = None
|
||||
if os.path.exists(http_headers_file):
|
||||
with open(http_headers_file) as fp:
|
||||
|
@ -237,9 +196,7 @@ def main():
|
|||
|
||||
r = session.head(dlurl, headers=headers, allow_redirects=False)
|
||||
if r.status_code != 200:
|
||||
logging.debug(
|
||||
'HTTP Response (%d), did not download %s' % (r.status_code, dlurl)
|
||||
)
|
||||
logging.debug('HTTP Response (' + str(r.status_code) + '), did not download ' + dlurl)
|
||||
continue
|
||||
if etag and etag == r.headers.get('ETag'):
|
||||
logging.debug('ETag matches, did not download ' + dlurl)
|
||||
|
@ -260,9 +217,7 @@ def main():
|
|||
|
||||
if new_files:
|
||||
os.chdir(tempdirbase)
|
||||
make_binary_transparency_log(
|
||||
repodirs, options.git_repo, options.url, 'fdroid btlog'
|
||||
)
|
||||
make_binary_transparency_log(repodirs, options.git_repo, options.url, 'fdroid btlog')
|
||||
if options.git_remote:
|
||||
deploy.push_binary_transparency(options.git_repo, options.git_remote)
|
||||
shutil.rmtree(tempdirbase, ignore_errors=True)
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -1,6 +1,6 @@
|
|||
class FDroidException(Exception):
|
||||
|
||||
def __init__(self, value=None, detail=None):
|
||||
super().__init__()
|
||||
self.value = value
|
||||
self.detail = detail
|
||||
|
||||
|
@ -9,22 +9,26 @@ class FDroidException(Exception):
|
|||
return self.detail
|
||||
return '[...]\n' + self.detail[-16000:]
|
||||
|
||||
def get_wikitext(self):
|
||||
ret = repr(self.value) + "\n"
|
||||
if self.detail:
|
||||
ret += "=detail=\n"
|
||||
ret += "<pre>\n" + self.shortened_detail() + "</pre>\n"
|
||||
return ret
|
||||
|
||||
def __str__(self):
|
||||
if self.value is None:
|
||||
ret = __name__
|
||||
else:
|
||||
ret = str(self.value)
|
||||
if self.detail:
|
||||
ret += (
|
||||
"\n==== detail begin ====\n%s\n==== detail end ===="
|
||||
% ''.join(self.detail).strip()
|
||||
)
|
||||
ret += "\n==== detail begin ====\n%s\n==== detail end ====" % ''.join(self.detail).strip()
|
||||
return ret
|
||||
|
||||
|
||||
class MetaDataException(Exception):
|
||||
|
||||
def __init__(self, value):
|
||||
super().__init__()
|
||||
self.value = value
|
||||
|
||||
def __str__(self):
|
||||
|
@ -35,10 +39,6 @@ class VCSException(FDroidException):
|
|||
pass
|
||||
|
||||
|
||||
class NoVersionCodeException(FDroidException):
|
||||
pass
|
||||
|
||||
|
||||
class NoSubmodulesException(VCSException):
|
||||
pass
|
||||
|
||||
|
@ -49,10 +49,3 @@ class BuildException(FDroidException):
|
|||
|
||||
class VerificationException(FDroidException):
|
||||
pass
|
||||
|
||||
|
||||
class ConfigurationException(FDroidException):
|
||||
def __init__(self, value=None, detail=None):
|
||||
super().__init__()
|
||||
self.value = value
|
||||
self.detail = detail
|
||||
|
|
|
@ -1,178 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# github.py - part of the FDroid server tools
|
||||
# Copyright (C) 2024, Michael Pöhn, michael@poehn.at
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import json
|
||||
import pathlib
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
|
||||
|
||||
class GithubApi:
|
||||
"""Wrapper for some select calls to GitHub Json/REST API.
|
||||
|
||||
This class wraps some calls to api.github.com. This is not intended to be a
|
||||
general API wrapper. Instead it's purpose is to return pre-filtered and
|
||||
transformed data that's playing well with other fdroidserver functions.
|
||||
|
||||
With the GitHub API, the token is optional, but it has pretty
|
||||
severe rate limiting.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, api_token, repo_path):
|
||||
self._api_token = api_token
|
||||
if repo_path.startswith("https://github.com/"):
|
||||
self._repo_path = repo_path[19:]
|
||||
else:
|
||||
self._repo_path = repo_path
|
||||
|
||||
def _req(self, url, data=None):
|
||||
h = {
|
||||
"Accept": "application/vnd.github+json",
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
}
|
||||
if self._api_token:
|
||||
h["Authorization"] = f"Bearer {self._api_token}"
|
||||
return urllib.request.Request(
|
||||
url,
|
||||
headers=h,
|
||||
data=data,
|
||||
)
|
||||
|
||||
def list_released_tags(self):
|
||||
"""List of all tags that are associated with a release for this repo on GitHub."""
|
||||
names = []
|
||||
req = self._req(f"https://api.github.com/repos/{self._repo_path}/releases")
|
||||
with urllib.request.urlopen(req) as resp: # nosec CWE-22 disable bandit warning
|
||||
releases = json.load(resp)
|
||||
for release in releases:
|
||||
names.append(release['tag_name'])
|
||||
return names
|
||||
|
||||
def list_unreleased_tags(self):
|
||||
all_tags = self.list_all_tags()
|
||||
released_tags = self.list_released_tags()
|
||||
return [x for x in all_tags if x not in released_tags]
|
||||
|
||||
def get_latest_apk(self):
|
||||
req = self._req(
|
||||
f"https://api.github.com/repos/{self._repo_path}/releases/latest"
|
||||
)
|
||||
with urllib.request.urlopen(req) as resp: # nosec CWE-22 disable bandit warning
|
||||
assets = json.load(resp)['assets']
|
||||
for asset in assets:
|
||||
url = asset.get('browser_download_url')
|
||||
if url and url.endswith('.apk'):
|
||||
return url
|
||||
|
||||
def tag_exists(self, tag):
|
||||
"""
|
||||
Check if git tag is present on github.
|
||||
|
||||
https://docs.github.com/en/rest/git/refs?apiVersion=2022-11-28#list-matching-references--fine-grained-access-tokens
|
||||
"""
|
||||
req = self._req(
|
||||
f"https://api.github.com/repos/{self._repo_path}/git/matching-refs/tags/{tag}"
|
||||
)
|
||||
with urllib.request.urlopen(req) as resp: # nosec CWE-22 disable bandit warning
|
||||
rd = json.load(resp)
|
||||
return len(rd) == 1 and rd[0].get("ref", False) == f"refs/tags/{tag}"
|
||||
return False
|
||||
|
||||
def list_all_tags(self):
|
||||
"""Get list of all tags for this repo on GitHub."""
|
||||
tags = []
|
||||
req = self._req(
|
||||
f"https://api.github.com/repos/{self._repo_path}/git/matching-refs/tags/"
|
||||
)
|
||||
with urllib.request.urlopen(req) as resp: # nosec CWE-22 disable bandit warning
|
||||
refs = json.load(resp)
|
||||
for ref in refs:
|
||||
r = ref.get('ref', '')
|
||||
if r.startswith('refs/tags/'):
|
||||
tags.append(r[10:])
|
||||
return tags
|
||||
|
||||
def create_release(self, tag, files, body=''):
|
||||
"""
|
||||
Create a new release on github.
|
||||
|
||||
also see: https://docs.github.com/en/rest/releases/releases?apiVersion=2022-11-28#create-a-release
|
||||
|
||||
:returns: True if release was created, False if release already exists
|
||||
:raises: urllib exceptions in case of network or api errors, also
|
||||
raises an exception when the tag doesn't exists.
|
||||
"""
|
||||
# Querying github to create a new release for a non-existent tag, will
|
||||
# also create that tag on github. So we need an additional check to
|
||||
# prevent this behavior.
|
||||
if not self.tag_exists(tag):
|
||||
raise Exception(
|
||||
f"can't create github release for {self._repo_path} {tag}, tag doesn't exists"
|
||||
)
|
||||
# create the relase on github
|
||||
req = self._req(
|
||||
f"https://api.github.com/repos/{self._repo_path}/releases",
|
||||
data=json.dumps(
|
||||
{
|
||||
"tag_name": tag,
|
||||
"body": body,
|
||||
}
|
||||
).encode("utf-8"),
|
||||
)
|
||||
try:
|
||||
with urllib.request.urlopen( # nosec CWE-22 disable bandit warning
|
||||
req
|
||||
) as resp:
|
||||
release_id = json.load(resp)['id']
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.status == 422:
|
||||
codes = [x['code'] for x in json.load(e).get('errors', [])]
|
||||
if "already_exists" in codes:
|
||||
return False
|
||||
raise e
|
||||
|
||||
# attach / upload all files for the relase
|
||||
for file in files:
|
||||
self._create_release_asset(release_id, file)
|
||||
|
||||
return True
|
||||
|
||||
def _create_release_asset(self, release_id, file):
|
||||
"""
|
||||
Attach a file to a release on GitHub.
|
||||
|
||||
This uploads a file to github relases, it will be attached to the supplied release
|
||||
|
||||
also see: https://docs.github.com/en/rest/releases/assets?apiVersion=2022-11-28#upload-a-release-asset
|
||||
"""
|
||||
file = pathlib.Path(file)
|
||||
with open(file, 'rb') as f:
|
||||
req = urllib.request.Request(
|
||||
f"https://uploads.github.com/repos/{self._repo_path}/releases/{release_id}/assets?name={file.name}",
|
||||
headers={
|
||||
"Accept": "application/vnd.github+json",
|
||||
"Authorization": f"Bearer {self._api_token}",
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
"Content-Type": "application/octet-stream",
|
||||
},
|
||||
data=f.read(),
|
||||
)
|
||||
with urllib.request.urlopen(req): # nosec CWE-22 disable bandit warning
|
||||
return True
|
||||
return False
|
|
@ -16,22 +16,25 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import glob
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import glob
|
||||
from argparse import ArgumentParser
|
||||
import logging
|
||||
import time
|
||||
|
||||
from . import _, common
|
||||
from . import _
|
||||
from . import common
|
||||
from .common import FDroidPopen
|
||||
from .exception import FDroidException
|
||||
|
||||
config = None
|
||||
options = None
|
||||
start_timestamp = time.gmtime()
|
||||
|
||||
|
||||
def status_update_json(signed):
|
||||
"""Output a JSON file with metadata about this run."""
|
||||
"""Output a JSON file with metadata about this run"""
|
||||
|
||||
logging.debug(_('Outputting JSON'))
|
||||
output = common.setup_status_output(start_timestamp)
|
||||
if signed:
|
||||
|
@ -40,14 +43,15 @@ def status_update_json(signed):
|
|||
|
||||
|
||||
def main():
|
||||
global config
|
||||
|
||||
global config, options
|
||||
|
||||
# Parse command line...
|
||||
parser = ArgumentParser()
|
||||
common.setup_global_opts(parser)
|
||||
common.parse_args(parser)
|
||||
options = parser.parse_args()
|
||||
|
||||
config = common.read_config()
|
||||
config = common.read_config(options)
|
||||
|
||||
repodirs = ['repo']
|
||||
if config['archive_older'] != 0:
|
||||
|
@ -56,20 +60,22 @@ def main():
|
|||
signed = []
|
||||
for output_dir in repodirs:
|
||||
if not os.path.isdir(output_dir):
|
||||
raise FDroidException(
|
||||
_("Missing output directory") + " '" + output_dir + "'"
|
||||
)
|
||||
raise FDroidException(_("Missing output directory") + " '" + output_dir + "'")
|
||||
|
||||
# Process any apks that are waiting to be signed...
|
||||
for f in sorted(glob.glob(os.path.join(output_dir, '*.*'))):
|
||||
if not common.is_repo_file(f, for_gpg_signing=True):
|
||||
if common.get_file_extension(f) == 'asc':
|
||||
continue
|
||||
if not common.is_repo_file(f):
|
||||
continue
|
||||
filename = os.path.basename(f)
|
||||
sigfilename = filename + ".asc"
|
||||
sigpath = os.path.join(output_dir, sigfilename)
|
||||
|
||||
if not os.path.exists(sigpath):
|
||||
gpgargs = ['gpg', '-a', '--output', sigpath, '--detach-sig']
|
||||
gpgargs = ['gpg', '-a',
|
||||
'--output', sigpath,
|
||||
'--detach-sig']
|
||||
if 'gpghome' in config:
|
||||
gpgargs.extend(['--homedir', config['gpghome']])
|
||||
if 'gpgkey' in config:
|
||||
|
|
265
fdroidserver/import.py
Normal file
265
fdroidserver/import.py
Normal file
|
@ -0,0 +1,265 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# import.py - part of the FDroid server tools
|
||||
# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com
|
||||
# Copyright (C) 2013-2014 Daniel Martí <mvdan@mvdan.cc>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import configparser
|
||||
import git
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import yaml
|
||||
from argparse import ArgumentParser
|
||||
import logging
|
||||
|
||||
try:
|
||||
from yaml import CSafeLoader as SafeLoader
|
||||
except ImportError:
|
||||
from yaml import SafeLoader
|
||||
|
||||
from . import _
|
||||
from . import common
|
||||
from . import metadata
|
||||
from .exception import FDroidException
|
||||
|
||||
|
||||
config = None
|
||||
options = None
|
||||
|
||||
|
||||
# WARNING! This cannot be imported as a Python module, so reuseable functions need to go into common.py!
|
||||
|
||||
def clone_to_tmp_dir(app):
|
||||
tmp_dir = 'tmp'
|
||||
if not os.path.isdir(tmp_dir):
|
||||
logging.info(_("Creating temporary directory"))
|
||||
os.makedirs(tmp_dir)
|
||||
|
||||
tmp_dir = os.path.join(tmp_dir, 'importer')
|
||||
if os.path.exists(tmp_dir):
|
||||
shutil.rmtree(tmp_dir)
|
||||
vcs = common.getvcs(app.RepoType, app.Repo, tmp_dir)
|
||||
vcs.gotorevision(options.rev)
|
||||
|
||||
return tmp_dir
|
||||
|
||||
|
||||
def check_for_kivy_buildozer(tmp_importer_dir, app, build):
|
||||
versionCode = None
|
||||
buildozer_spec = os.path.join(tmp_importer_dir, 'buildozer.spec')
|
||||
if os.path.exists(buildozer_spec):
|
||||
config = configparser.ConfigParser()
|
||||
config.read(buildozer_spec)
|
||||
import pprint
|
||||
pprint.pprint(sorted(config['app'].keys()))
|
||||
app.id = config['app'].get('package.domain')
|
||||
print(app.id)
|
||||
app.AutoName = config['app'].get('package.name', app.AutoName)
|
||||
app.License = config['app'].get('license', app.License)
|
||||
app.Description = config['app'].get('description', app.Description)
|
||||
build.versionName = config['app'].get('version')
|
||||
build.output = 'bin/%s-$$VERSION$$-release-unsigned.apk' % app.AutoName
|
||||
build.ndk = 'r17c'
|
||||
build.srclibs = [
|
||||
'buildozer@586152c',
|
||||
'python-for-android@ccb0f8e1',
|
||||
]
|
||||
build.sudo = [
|
||||
'apt-get update',
|
||||
'apt-get install -y build-essential libffi-dev libltdl-dev',
|
||||
]
|
||||
build.prebuild = [
|
||||
'sed -iE "/^[# ]*android\\.(ant|ndk|sdk)_path[ =]/d" buildozer.spec',
|
||||
'sed -iE "/^[# ]*android.accept_sdk_license[ =]+.*/d" buildozer.spec',
|
||||
'sed -iE "/^[# ]*android.skip_update[ =]+.*/d" buildozer.spec',
|
||||
'sed -iE "/^[# ]*p4a.source_dir[ =]+.*/d" buildozer.spec',
|
||||
'sed -i "s,\\[app\\],[app]\\n\\nandroid.sdk_path = $$SDK$$\\nandroid.ndk_path = $$NDK$$\\np4a.source_dir = $$python-for-android$$\\nandroid.accept_sdk_license = False\\nandroid.skip_update = True\\nandroid.ant_path = /usr/bin/ant\\n," buildozer.spec',
|
||||
'pip3 install --user --upgrade $$buildozer$$ Cython==0.28.6',
|
||||
]
|
||||
build.build = [
|
||||
'PATH="$HOME/.local/bin:$PATH" buildozer android release',
|
||||
]
|
||||
return build.get('versionName'), versionCode, app.get('id')
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
global config, options
|
||||
|
||||
# Parse command line...
|
||||
parser = ArgumentParser()
|
||||
common.setup_global_opts(parser)
|
||||
parser.add_argument("-u", "--url", default=None,
|
||||
help=_("Project URL to import from."))
|
||||
parser.add_argument("-s", "--subdir", default=None,
|
||||
help=_("Path to main Android project subdirectory, if not in root."))
|
||||
parser.add_argument("-c", "--categories", default=None,
|
||||
help=_("Comma separated list of categories."))
|
||||
parser.add_argument("-l", "--license", default=None,
|
||||
help=_("Overall license of the project."))
|
||||
parser.add_argument("--omit-disable", action="store_true", default=False,
|
||||
help=_("Do not add 'disable:' to the generated build entries"))
|
||||
parser.add_argument("--rev", default=None,
|
||||
help=_("Allows a different revision (or git branch) to be specified for the initial import"))
|
||||
metadata.add_metadata_arguments(parser)
|
||||
options = parser.parse_args()
|
||||
metadata.warnings_action = options.W
|
||||
|
||||
config = common.read_config(options)
|
||||
|
||||
apps = metadata.read_metadata()
|
||||
app = None
|
||||
|
||||
tmp_importer_dir = None
|
||||
|
||||
local_metadata_files = common.get_local_metadata_files()
|
||||
if local_metadata_files != []:
|
||||
raise FDroidException(_("This repo already has local metadata: %s") % local_metadata_files[0])
|
||||
|
||||
build = metadata.Build()
|
||||
if options.url is None and os.path.isdir('.git'):
|
||||
app = metadata.App()
|
||||
app.AutoName = os.path.basename(os.getcwd())
|
||||
app.RepoType = 'git'
|
||||
|
||||
if os.path.exists('build.gradle') or os.path.exists('build.gradle.kts'):
|
||||
build.gradle = ['yes']
|
||||
|
||||
git_repo = git.repo.Repo(os.getcwd())
|
||||
for remote in git.Remote.iter_items(git_repo):
|
||||
if remote.name == 'origin':
|
||||
url = git_repo.remotes.origin.url
|
||||
if url.startswith('https://git'): # github, gitlab
|
||||
app.SourceCode = url.rstrip('.git')
|
||||
app.Repo = url
|
||||
break
|
||||
write_local_file = True
|
||||
elif options.url:
|
||||
app = common.get_app_from_url(options.url)
|
||||
tmp_importer_dir = clone_to_tmp_dir(app)
|
||||
git_repo = git.repo.Repo(tmp_importer_dir)
|
||||
if not options.omit_disable:
|
||||
build.disable = 'Generated by import.py - check/set version fields and commit id'
|
||||
write_local_file = False
|
||||
else:
|
||||
raise FDroidException("Specify project url.")
|
||||
|
||||
app.UpdateCheckMode = 'Tags'
|
||||
build.commit = common.get_head_commit_id(git_repo)
|
||||
|
||||
versionName, versionCode, appid = check_for_kivy_buildozer(tmp_importer_dir, app, build)
|
||||
|
||||
# Extract some information...
|
||||
paths = common.get_all_gradle_and_manifests(tmp_importer_dir)
|
||||
subdir = common.get_gradle_subdir(tmp_importer_dir, paths)
|
||||
if paths:
|
||||
versionName, versionCode, appid = common.parse_androidmanifests(paths, app)
|
||||
if not appid:
|
||||
raise FDroidException(_("Couldn't find Application ID"))
|
||||
if not versionName:
|
||||
logging.warning(_('Could not find latest version name'))
|
||||
if not versionCode:
|
||||
logging.warning(_('Could not find latest version code'))
|
||||
elif not appid:
|
||||
raise FDroidException(_("No gradle project could be found. Specify --subdir?"))
|
||||
|
||||
# Make sure it's actually new...
|
||||
if appid in apps:
|
||||
raise FDroidException(_('Package "{appid}" already exists').format(appid=appid))
|
||||
|
||||
# Create a build line...
|
||||
build.versionName = versionName or 'Unknown'
|
||||
build.versionCode = versionCode or '0' # TODO heinous but this is still a str
|
||||
if options.subdir:
|
||||
build.subdir = options.subdir
|
||||
build.gradle = ['yes']
|
||||
elif subdir:
|
||||
build.subdir = subdir
|
||||
build.gradle = ['yes']
|
||||
|
||||
if options.license:
|
||||
app.License = options.license
|
||||
if options.categories:
|
||||
app.Categories = options.categories.split(',')
|
||||
if os.path.exists(os.path.join(subdir, 'jni')):
|
||||
build.buildjni = ['yes']
|
||||
if os.path.exists(os.path.join(subdir, 'build.gradle')) \
|
||||
or os.path.exists(os.path.join(subdir, 'build.gradle')):
|
||||
build.gradle = ['yes']
|
||||
|
||||
package_json = os.path.join(tmp_importer_dir, 'package.json') # react-native
|
||||
pubspec_yaml = os.path.join(tmp_importer_dir, 'pubspec.yaml') # flutter
|
||||
if os.path.exists(package_json):
|
||||
build.sudo = ['apt-get update || apt-get update', 'apt-get install -t stretch-backports npm', 'npm install -g react-native-cli']
|
||||
build.init = ['npm install']
|
||||
with open(package_json) as fp:
|
||||
data = json.load(fp)
|
||||
app.AutoName = data.get('name', app.AutoName)
|
||||
app.License = data.get('license', app.License)
|
||||
app.Description = data.get('description', app.Description)
|
||||
app.WebSite = data.get('homepage', app.WebSite)
|
||||
app_json = os.path.join(tmp_importer_dir, 'app.json')
|
||||
if os.path.exists(app_json):
|
||||
with open(app_json) as fp:
|
||||
data = json.load(fp)
|
||||
app.AutoName = data.get('name', app.AutoName)
|
||||
if os.path.exists(pubspec_yaml):
|
||||
with open(pubspec_yaml) as fp:
|
||||
data = yaml.load(fp, Loader=SafeLoader)
|
||||
app.AutoName = data.get('name', app.AutoName)
|
||||
app.License = data.get('license', app.License)
|
||||
app.Description = data.get('description', app.Description)
|
||||
build.srclibs = ['flutter@stable']
|
||||
build.output = 'build/app/outputs/apk/release/app-release.apk'
|
||||
build.build = [
|
||||
'$$flutter$$/bin/flutter config --no-analytics',
|
||||
'$$flutter$$/bin/flutter packages pub get',
|
||||
'$$flutter$$/bin/flutter build apk',
|
||||
]
|
||||
|
||||
git_modules = os.path.join(tmp_importer_dir, '.gitmodules')
|
||||
if os.path.exists(git_modules):
|
||||
build.submodules = True
|
||||
|
||||
metadata.post_metadata_parse(app)
|
||||
|
||||
app.builds.append(build)
|
||||
|
||||
if write_local_file:
|
||||
metadata.write_metadata('.fdroid.yml', app)
|
||||
else:
|
||||
# Keep the repo directory to save bandwidth...
|
||||
if not os.path.exists('build'):
|
||||
os.mkdir('build')
|
||||
build_dir = os.path.join('build', appid)
|
||||
if os.path.exists(build_dir):
|
||||
logging.warning(_('{path} already exists, ignoring import results!')
|
||||
.format(path=build_dir))
|
||||
sys.exit(1)
|
||||
elif tmp_importer_dir is not None:
|
||||
shutil.move(tmp_importer_dir, build_dir)
|
||||
with open('build/.fdroidvcs-' + appid, 'w') as f:
|
||||
f.write(app.RepoType + ' ' + app.Repo)
|
||||
|
||||
metadatapath = os.path.join('metadata', appid + '.yml')
|
||||
metadata.write_metadata(metadatapath, app)
|
||||
logging.info("Wrote " + metadatapath)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,470 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Extract application metadata from a source repository."""
|
||||
#
|
||||
# import_subcommand.py - part of the FDroid server tools
|
||||
# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com
|
||||
# Copyright (C) 2013-2014 Daniel Martí <mvdan@mvdan.cc>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import urllib
|
||||
from argparse import ArgumentParser
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import git
|
||||
import yaml
|
||||
|
||||
try:
|
||||
from yaml import CSafeLoader as SafeLoader
|
||||
except ImportError:
|
||||
from yaml import SafeLoader
|
||||
|
||||
from . import _, common, metadata
|
||||
from .exception import FDroidException
|
||||
|
||||
config = None
|
||||
|
||||
SETTINGS_GRADLE_REGEX = re.compile(r'settings\.gradle(?:\.kts)?')
|
||||
GRADLE_SUBPROJECT_REGEX = re.compile(r'''['"]:?([^'"]+)['"]''')
|
||||
APPLICATION_ID_REGEX = re.compile(r'''\s*applicationId\s=?\s?['"].*['"]''')
|
||||
|
||||
|
||||
def get_all_gradle_and_manifests(build_dir):
|
||||
paths = []
|
||||
for root, dirs, files in os.walk(build_dir):
|
||||
for f in sorted(files):
|
||||
if f == 'AndroidManifest.xml' or f.endswith(('.gradle', '.gradle.kts')):
|
||||
full = Path(root) / f
|
||||
paths.append(full)
|
||||
return paths
|
||||
|
||||
|
||||
def get_gradle_subdir(build_dir, paths):
|
||||
"""Get the subdir where the gradle build is based."""
|
||||
first_gradle_dir = None
|
||||
for path in paths:
|
||||
if not first_gradle_dir:
|
||||
first_gradle_dir = path.parent.relative_to(build_dir)
|
||||
if path.exists() and SETTINGS_GRADLE_REGEX.match(path.name):
|
||||
for m in GRADLE_SUBPROJECT_REGEX.finditer(path.read_text(encoding='utf-8')):
|
||||
for f in (path.parent / m.group(1)).glob('build.gradle*'):
|
||||
with f.open(encoding='utf-8') as fp:
|
||||
for line in fp:
|
||||
if common.ANDROID_PLUGIN_REGEX.match(
|
||||
line
|
||||
) or APPLICATION_ID_REGEX.match(line):
|
||||
return f.parent.relative_to(build_dir)
|
||||
if first_gradle_dir and first_gradle_dir != Path('.'):
|
||||
return first_gradle_dir
|
||||
|
||||
|
||||
def handle_retree_error_on_windows(function, path, excinfo):
|
||||
"""Python can't remove a readonly file on Windows so chmod first."""
|
||||
if function in (os.unlink, os.rmdir, os.remove) and excinfo[0] == PermissionError:
|
||||
os.chmod(path, stat.S_IWRITE)
|
||||
function(path)
|
||||
|
||||
|
||||
def clone_to_tmp_dir(app: metadata.App, rev=None) -> Path:
|
||||
"""Clone the source repository of an app to a temporary directory for further processing.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
app
|
||||
The App instance to clone the source of.
|
||||
|
||||
Returns
|
||||
-------
|
||||
tmp_dir
|
||||
The (temporary) directory the apps source has been cloned into.
|
||||
|
||||
"""
|
||||
tmp_dir = Path('tmp')
|
||||
tmp_dir.mkdir(exist_ok=True)
|
||||
|
||||
tmp_dir = tmp_dir / 'importer'
|
||||
|
||||
if tmp_dir.exists():
|
||||
shutil.rmtree(str(tmp_dir), onerror=handle_retree_error_on_windows)
|
||||
vcs = common.getvcs(app.RepoType, app.Repo, tmp_dir)
|
||||
vcs.gotorevision(rev)
|
||||
|
||||
return tmp_dir
|
||||
|
||||
|
||||
def getrepofrompage(url: str) -> tuple[Optional[str], str]:
|
||||
"""Get the repo type and address from the given web page.
|
||||
|
||||
The page is scanned in a rather naive manner for 'git clone xxxx',
|
||||
'hg clone xxxx', etc, and when one of these is found it's assumed
|
||||
that's the information we want. Returns repotype, address, or
|
||||
None, reason
|
||||
|
||||
Parameters
|
||||
----------
|
||||
url
|
||||
The url to look for repository information at.
|
||||
|
||||
Returns
|
||||
-------
|
||||
repotype_or_none
|
||||
The found repository type or None if an error occured.
|
||||
address_or_reason
|
||||
The address to the found repository or the reason if an error occured.
|
||||
|
||||
"""
|
||||
if not url.startswith('http'):
|
||||
return (None, _('{url} does not start with "http"!'.format(url=url)))
|
||||
req = urllib.request.urlopen(url) # nosec B310 non-http URLs are filtered out
|
||||
if req.getcode() != 200:
|
||||
return (None, 'Unable to get ' + url + ' - return code ' + str(req.getcode()))
|
||||
page = req.read().decode(req.headers.get_content_charset())
|
||||
|
||||
# Works for BitBucket
|
||||
m = re.search('data-fetch-url="(.*)"', page)
|
||||
if m is not None:
|
||||
repo = m.group(1)
|
||||
|
||||
if repo.endswith('.git'):
|
||||
return ('git', repo)
|
||||
|
||||
return ('hg', repo)
|
||||
|
||||
# Works for BitBucket (obsolete)
|
||||
index = page.find('hg clone')
|
||||
if index != -1:
|
||||
repotype = 'hg'
|
||||
repo = page[index + 9 :]
|
||||
index = repo.find('<')
|
||||
if index == -1:
|
||||
return (None, _("Error while getting repo address"))
|
||||
repo = repo[:index]
|
||||
repo = repo.split('"')[0]
|
||||
return (repotype, repo)
|
||||
|
||||
# Works for BitBucket (obsolete)
|
||||
index = page.find('git clone')
|
||||
if index != -1:
|
||||
repotype = 'git'
|
||||
repo = page[index + 10 :]
|
||||
index = repo.find('<')
|
||||
if index == -1:
|
||||
return (None, _("Error while getting repo address"))
|
||||
repo = repo[:index]
|
||||
repo = repo.split('"')[0]
|
||||
return (repotype, repo)
|
||||
|
||||
return (None, _("No information found.") + page)
|
||||
|
||||
|
||||
def get_app_from_url(url: str) -> metadata.App:
|
||||
"""Guess basic app metadata from the URL.
|
||||
|
||||
The URL must include a network hostname, unless it is an lp:,
|
||||
file:, or git/ssh URL. This throws ValueError on bad URLs to
|
||||
match urlparse().
|
||||
|
||||
Parameters
|
||||
----------
|
||||
url
|
||||
The URL to look to look for app metadata at.
|
||||
|
||||
Returns
|
||||
-------
|
||||
app
|
||||
App instance with the found metadata.
|
||||
|
||||
Raises
|
||||
------
|
||||
:exc:`~fdroidserver.exception.FDroidException`
|
||||
If the VCS type could not be determined.
|
||||
:exc:`ValueError`
|
||||
If the URL is invalid.
|
||||
|
||||
"""
|
||||
parsed = urllib.parse.urlparse(url)
|
||||
invalid_url = False
|
||||
if not parsed.scheme or not parsed.path:
|
||||
invalid_url = True
|
||||
|
||||
app = metadata.App()
|
||||
app.Repo = url
|
||||
if url.startswith('git://') or url.startswith('git@'):
|
||||
app.RepoType = 'git'
|
||||
elif parsed.netloc == 'github.com':
|
||||
app.RepoType = 'git'
|
||||
app.SourceCode = url
|
||||
app.IssueTracker = url + '/issues'
|
||||
elif parsed.netloc in ('gitlab.com', 'framagit.org'):
|
||||
# git can be fussy with gitlab URLs unless they end in .git
|
||||
if url.endswith('.git'):
|
||||
url = url[:-4]
|
||||
app.Repo = url + '.git'
|
||||
app.RepoType = 'git'
|
||||
app.SourceCode = url
|
||||
app.IssueTracker = url + '/issues'
|
||||
elif parsed.netloc == 'notabug.org':
|
||||
if url.endswith('.git'):
|
||||
url = url[:-4]
|
||||
app.Repo = url + '.git'
|
||||
app.RepoType = 'git'
|
||||
app.SourceCode = url
|
||||
app.IssueTracker = url + '/issues'
|
||||
elif parsed.netloc == 'bitbucket.org':
|
||||
if url.endswith('/'):
|
||||
url = url[:-1]
|
||||
app.SourceCode = url + '/src'
|
||||
app.IssueTracker = url + '/issues'
|
||||
# Figure out the repo type and adddress...
|
||||
app.RepoType, app.Repo = getrepofrompage(url)
|
||||
elif parsed.netloc == 'codeberg.org':
|
||||
app.RepoType = 'git'
|
||||
app.SourceCode = url
|
||||
app.IssueTracker = url + '/issues'
|
||||
elif url.startswith('https://') and url.endswith('.git'):
|
||||
app.RepoType = 'git'
|
||||
|
||||
if not parsed.netloc and parsed.scheme in ('git', 'http', 'https', 'ssh'):
|
||||
invalid_url = True
|
||||
|
||||
if invalid_url:
|
||||
raise ValueError(_('"{url}" is not a valid URL!'.format(url=url)))
|
||||
|
||||
if not app.RepoType:
|
||||
raise FDroidException("Unable to determine vcs type. " + app.Repo)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
def main():
|
||||
"""Extract app metadata and write it to a file.
|
||||
|
||||
The behaviour of this function is influenced by the configuration file as
|
||||
well as command line parameters.
|
||||
|
||||
Raises
|
||||
------
|
||||
:exc:`~fdroidserver.exception.FDroidException`
|
||||
If the repository already has local metadata, no URL is specified and
|
||||
the current directory is not a Git repository, no application ID could
|
||||
be found, no Gradle project could be found or there is already metadata
|
||||
for the found application ID.
|
||||
|
||||
"""
|
||||
global config
|
||||
|
||||
# Parse command line...
|
||||
parser = ArgumentParser()
|
||||
common.setup_global_opts(parser)
|
||||
parser.add_argument("-u", "--url", help=_("Project URL to import from."))
|
||||
parser.add_argument(
|
||||
"-s",
|
||||
"--subdir",
|
||||
help=_("Path to main Android project subdirectory, if not in root."),
|
||||
)
|
||||
parser.add_argument(
|
||||
"-c",
|
||||
"--categories",
|
||||
help=_("Comma separated list of categories."),
|
||||
)
|
||||
parser.add_argument("-l", "--license", help=_("Overall license of the project."))
|
||||
parser.add_argument(
|
||||
"--omit-disable",
|
||||
action="store_true",
|
||||
help=_("Do not add 'disable:' to the generated build entries"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--rev",
|
||||
help=_(
|
||||
"Allows a different revision (or git branch) to be specified for the initial import"
|
||||
),
|
||||
)
|
||||
metadata.add_metadata_arguments(parser)
|
||||
options = common.parse_args(parser)
|
||||
metadata.warnings_action = options.W
|
||||
|
||||
config = common.read_config()
|
||||
|
||||
apps = metadata.read_metadata()
|
||||
app = None
|
||||
|
||||
tmp_importer_dir = None
|
||||
|
||||
local_metadata_files = common.get_local_metadata_files()
|
||||
if local_metadata_files:
|
||||
raise FDroidException(
|
||||
_("This repo already has local metadata: %s") % local_metadata_files[0]
|
||||
)
|
||||
|
||||
build = metadata.Build()
|
||||
app = metadata.App()
|
||||
if options.url is None and Path('.git').is_dir():
|
||||
app.RepoType = 'git'
|
||||
tmp_importer_dir = Path.cwd()
|
||||
git_repo = git.Repo(tmp_importer_dir)
|
||||
for remote in git.Remote.iter_items(git_repo):
|
||||
if remote.name == 'origin':
|
||||
url = git_repo.remotes.origin.url
|
||||
app = get_app_from_url(url)
|
||||
break
|
||||
write_local_file = True
|
||||
elif options.url:
|
||||
app = get_app_from_url(options.url)
|
||||
tmp_importer_dir = clone_to_tmp_dir(app, options.rev)
|
||||
git_repo = git.Repo(tmp_importer_dir)
|
||||
|
||||
if not options.omit_disable:
|
||||
build.disable = (
|
||||
'Generated by `fdroid import` - check version fields and commitid'
|
||||
)
|
||||
write_local_file = False
|
||||
else:
|
||||
raise FDroidException("Specify project url.")
|
||||
|
||||
app.AutoUpdateMode = 'Version'
|
||||
app.UpdateCheckMode = 'Tags'
|
||||
build.commit = common.get_head_commit_id(tmp_importer_dir)
|
||||
|
||||
# Extract some information...
|
||||
paths = get_all_gradle_and_manifests(tmp_importer_dir)
|
||||
gradle_subdir = get_gradle_subdir(tmp_importer_dir, paths)
|
||||
if paths:
|
||||
versionName, versionCode, appid = common.parse_androidmanifests(paths, app)
|
||||
if not appid:
|
||||
raise FDroidException(_("Couldn't find Application ID"))
|
||||
if not versionName:
|
||||
logging.warning(_('Could not find latest versionName'))
|
||||
if not versionCode:
|
||||
logging.warning(_('Could not find latest versionCode'))
|
||||
else:
|
||||
raise FDroidException(_("No gradle project could be found. Specify --subdir?"))
|
||||
|
||||
# Make sure it's actually new...
|
||||
if appid in apps:
|
||||
raise FDroidException(_('Package "{appid}" already exists').format(appid=appid))
|
||||
|
||||
# Create a build line...
|
||||
build.versionName = versionName or 'Unknown'
|
||||
app.CurrentVersion = build.versionName
|
||||
build.versionCode = versionCode or 0
|
||||
app.CurrentVersionCode = build.versionCode
|
||||
if options.subdir:
|
||||
build.subdir = options.subdir
|
||||
elif gradle_subdir:
|
||||
build.subdir = gradle_subdir.as_posix()
|
||||
# subdir might be None
|
||||
subdir = Path(tmp_importer_dir / build.subdir) if build.subdir else tmp_importer_dir
|
||||
|
||||
if options.license:
|
||||
app.License = options.license
|
||||
if options.categories:
|
||||
app.Categories = options.categories.split(',')
|
||||
if (subdir / 'jni').exists():
|
||||
build.buildjni = ['yes']
|
||||
if (subdir / 'build.gradle').exists() or (subdir / 'build.gradle.kts').exists():
|
||||
build.gradle = ['yes']
|
||||
|
||||
app.AutoName = common.fetch_real_name(subdir, build.gradle)
|
||||
|
||||
package_json = tmp_importer_dir / 'package.json' # react-native
|
||||
pubspec_yaml = tmp_importer_dir / 'pubspec.yaml' # flutter
|
||||
if package_json.exists():
|
||||
build.sudo = [
|
||||
'sysctl fs.inotify.max_user_watches=524288 || true',
|
||||
'apt-get update',
|
||||
'apt-get install -y npm',
|
||||
]
|
||||
build.init = ['npm install --build-from-source']
|
||||
with package_json.open() as fp:
|
||||
data = json.load(fp)
|
||||
app.AutoName = app.AutoName or data.get('name')
|
||||
app.License = data.get('license', app.License)
|
||||
app.Description = data.get('description', app.Description)
|
||||
app.WebSite = data.get('homepage', app.WebSite)
|
||||
app_json = tmp_importer_dir / 'app.json'
|
||||
build.scanignore = ['android/build.gradle']
|
||||
build.scandelete = ['node_modules']
|
||||
if app_json.exists():
|
||||
with app_json.open() as fp:
|
||||
data = json.load(fp)
|
||||
app.AutoName = app.AutoName or data.get('name')
|
||||
if pubspec_yaml.exists():
|
||||
with pubspec_yaml.open() as fp:
|
||||
data = yaml.load(fp, Loader=SafeLoader)
|
||||
app.AutoName = app.AutoName or data.get('name')
|
||||
app.License = data.get('license', app.License)
|
||||
app.Description = data.get('description', app.Description)
|
||||
app.UpdateCheckData = 'pubspec.yaml|version:\\s.+\\+(\\d+)|.|version:\\s(.+)\\+'
|
||||
build.srclibs = ['flutter@stable']
|
||||
build.output = 'build/app/outputs/flutter-apk/app-release.apk'
|
||||
build.subdir = None
|
||||
build.gradle = None
|
||||
build.prebuild = [
|
||||
'export PUB_CACHE=$(pwd)/.pub-cache',
|
||||
'$$flutter$$/bin/flutter config --no-analytics',
|
||||
'$$flutter$$/bin/flutter packages pub get',
|
||||
]
|
||||
build.scandelete = [
|
||||
'.pub-cache',
|
||||
]
|
||||
build.build = [
|
||||
'export PUB_CACHE=$(pwd)/.pub-cache',
|
||||
'$$flutter$$/bin/flutter build apk',
|
||||
]
|
||||
|
||||
git_modules = tmp_importer_dir / '.gitmodules'
|
||||
if git_modules.exists():
|
||||
build.submodules = True
|
||||
|
||||
metadata.post_parse_yaml_metadata(app)
|
||||
|
||||
app['Builds'].append(build)
|
||||
|
||||
if write_local_file:
|
||||
metadata.write_metadata(Path('.fdroid.yml'), app)
|
||||
else:
|
||||
# Keep the repo directory to save bandwidth...
|
||||
Path('build').mkdir(exist_ok=True)
|
||||
build_dir = Path('build') / appid
|
||||
if build_dir.exists():
|
||||
logging.warning(
|
||||
_('{path} already exists, ignoring import results!').format(
|
||||
path=build_dir
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
elif tmp_importer_dir:
|
||||
# For Windows: Close the repo or a git.exe instance holds handles to repo
|
||||
try:
|
||||
git_repo.close()
|
||||
except AttributeError: # Debian/stretch's version does not have close()
|
||||
pass
|
||||
shutil.move(tmp_importer_dir, build_dir)
|
||||
Path('build/.fdroidvcs-' + appid).write_text(app.RepoType + ' ' + app.Repo)
|
||||
|
||||
metadatapath = Path('metadata') / (appid + '.yml')
|
||||
metadata.write_metadata(metadatapath, app)
|
||||
logging.info("Wrote " + str(metadatapath))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
File diff suppressed because it is too large
Load diff
|
@ -19,70 +19,54 @@
|
|||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import glob
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import socket
|
||||
import sys
|
||||
from argparse import ArgumentParser
|
||||
import logging
|
||||
|
||||
from . import _, common
|
||||
from . import _
|
||||
from . import common
|
||||
from .exception import FDroidException
|
||||
|
||||
config = {}
|
||||
options = None
|
||||
|
||||
|
||||
def disable_in_config(key, value):
|
||||
"""Write a key/value to the local config.yml, then comment it out."""
|
||||
'''write a key/value to the local config.yml, then comment it out'''
|
||||
import yaml
|
||||
|
||||
with open(common.CONFIG_FILE) as fp:
|
||||
data = fp.read()
|
||||
with open('config.yml') as f:
|
||||
data = f.read()
|
||||
pattern = r'\n[\s#]*' + key + r':.*'
|
||||
repl = '\n#' + yaml.dump({key: value}, default_flow_style=False)
|
||||
data = re.sub(pattern, repl, data)
|
||||
with open(common.CONFIG_FILE, 'w') as fp:
|
||||
fp.writelines(data)
|
||||
with open('config.yml', 'w') as f:
|
||||
f.writelines(data)
|
||||
|
||||
|
||||
def main():
|
||||
global config
|
||||
|
||||
global options, config
|
||||
|
||||
# Parse command line...
|
||||
parser = ArgumentParser()
|
||||
common.setup_global_opts(parser)
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--distinguished-name",
|
||||
default=None,
|
||||
help=_("X.509 'Distinguished Name' used when generating keys"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--keystore",
|
||||
default=None,
|
||||
help=_("Path to the keystore for the repo signing key"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--repo-keyalias",
|
||||
default=None,
|
||||
help=_("Alias of the repo signing key in the keystore"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--android-home",
|
||||
default=None,
|
||||
help=_("Path to the Android SDK (sometimes set in ANDROID_HOME)"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-prompt",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=_("Do not prompt for Android SDK path, just fail"),
|
||||
)
|
||||
options = common.parse_args(parser)
|
||||
|
||||
common.set_console_logging(options.verbose, options.color)
|
||||
parser.add_argument("-d", "--distinguished-name", default=None,
|
||||
help=_("X.509 'Distinguished Name' used when generating keys"))
|
||||
parser.add_argument("--keystore", default=None,
|
||||
help=_("Path to the keystore for the repo signing key"))
|
||||
parser.add_argument("--repo-keyalias", default=None,
|
||||
help=_("Alias of the repo signing key in the keystore"))
|
||||
parser.add_argument("--android-home", default=None,
|
||||
help=_("Path to the Android SDK (sometimes set in ANDROID_HOME)"))
|
||||
parser.add_argument("--no-prompt", action="store_true", default=False,
|
||||
help=_("Do not prompt for Android SDK path, just fail"))
|
||||
options = parser.parse_args()
|
||||
|
||||
aapt = None
|
||||
fdroiddir = os.getcwd()
|
||||
test_config = dict()
|
||||
examplesdir = common.get_examples_dir()
|
||||
|
@ -97,10 +81,9 @@ def main():
|
|||
# exist, prompt the user using platform-specific default
|
||||
# and if the user leaves it blank, ignore and move on.
|
||||
default_sdk_path = ''
|
||||
if sys.platform in ('win32', 'cygwin'):
|
||||
p = os.path.join(
|
||||
os.getenv('USERPROFILE'), 'AppData', 'Local', 'Android', 'android-sdk'
|
||||
)
|
||||
if sys.platform == 'win32' or sys.platform == 'cygwin':
|
||||
p = os.path.join(os.getenv('USERPROFILE'),
|
||||
'AppData', 'Local', 'Android', 'android-sdk')
|
||||
elif sys.platform == 'darwin':
|
||||
# on OSX, Homebrew is common and has an easy path to detect
|
||||
p = '/usr/local/opt/android-sdk'
|
||||
|
@ -114,13 +97,10 @@ def main():
|
|||
test_config['sdk_path'] = default_sdk_path
|
||||
|
||||
if not common.test_sdk_exists(test_config):
|
||||
del test_config['sdk_path']
|
||||
del(test_config['sdk_path'])
|
||||
while not options.no_prompt:
|
||||
try:
|
||||
s = input(
|
||||
_('Enter the path to the Android SDK (%s) here:\n> ')
|
||||
% default_sdk_path
|
||||
)
|
||||
s = input(_('Enter the path to the Android SDK (%s) here:\n> ') % default_sdk_path)
|
||||
except KeyboardInterrupt:
|
||||
print('')
|
||||
sys.exit(1)
|
||||
|
@ -133,28 +113,16 @@ def main():
|
|||
default_sdk_path = ''
|
||||
|
||||
if test_config.get('sdk_path') and not common.test_sdk_exists(test_config):
|
||||
raise FDroidException(
|
||||
_("Android SDK not found at {path}!").format(path=test_config['sdk_path'])
|
||||
)
|
||||
raise FDroidException(_("Android SDK not found at {path}!")
|
||||
.format(path=test_config['sdk_path']))
|
||||
|
||||
if not os.path.exists(common.CONFIG_FILE):
|
||||
if not os.path.exists('config.yml') and not os.path.exists('config.py'):
|
||||
# 'metadata' and 'tmp' are created in fdroid
|
||||
if not os.path.exists('repo'):
|
||||
os.mkdir('repo')
|
||||
example_config_yml = os.path.join(examplesdir, common.CONFIG_FILE)
|
||||
if os.path.exists(example_config_yml):
|
||||
shutil.copyfile(example_config_yml, common.CONFIG_FILE)
|
||||
else:
|
||||
from pkg_resources import get_distribution
|
||||
|
||||
versionstr = get_distribution('fdroidserver').version
|
||||
if not versionstr:
|
||||
versionstr = 'master'
|
||||
with open(common.CONFIG_FILE, 'w') as fp:
|
||||
fp.write('# see https://gitlab.com/fdroid/fdroidserver/blob/')
|
||||
fp.write(versionstr)
|
||||
fp.write(f'/examples/{common.CONFIG_FILE}\n')
|
||||
os.chmod(common.CONFIG_FILE, 0o0600)
|
||||
shutil.copy(os.path.join(examplesdir, 'fdroid-icon.png'), fdroiddir)
|
||||
shutil.copyfile(os.path.join(examplesdir, 'config.yml'), 'config.yml')
|
||||
os.chmod('config.yml', 0o0600)
|
||||
# If android_home is None, test_config['sdk_path'] will be used and
|
||||
# "$ANDROID_HOME" may be used if the env var is set up correctly.
|
||||
# If android_home is not None, the path given from the command line
|
||||
|
@ -162,14 +130,16 @@ def main():
|
|||
if 'sdk_path' in test_config:
|
||||
common.write_to_config(test_config, 'sdk_path', options.android_home)
|
||||
else:
|
||||
logging.warning(
|
||||
'Looks like this is already an F-Droid repo, cowardly refusing to overwrite it...'
|
||||
)
|
||||
logging.warning('Looks like this is already an F-Droid repo, cowardly refusing to overwrite it...')
|
||||
logging.info('Try running `fdroid init` in an empty directory.')
|
||||
raise FDroidException('Repository already exists.')
|
||||
|
||||
# now that we have a local config.yml, read configuration...
|
||||
config = common.read_config()
|
||||
config = common.read_config(options)
|
||||
|
||||
# enable apksigner by default so v2/v3 APK signatures validate
|
||||
if common.find_apksigner() is not None:
|
||||
test_config['apksigner'] = common.find_apksigner()
|
||||
|
||||
# the NDK is optional and there may be multiple versions of it, so it's
|
||||
# left for the user to configure
|
||||
|
@ -188,9 +158,8 @@ def main():
|
|||
else:
|
||||
keystore = os.path.abspath(options.keystore)
|
||||
if not os.path.exists(keystore):
|
||||
logging.info(
|
||||
'"' + keystore + '" does not exist, creating a new keystore there.'
|
||||
)
|
||||
logging.info('"' + keystore
|
||||
+ '" does not exist, creating a new keystore there.')
|
||||
common.write_to_config(test_config, 'keystore', keystore)
|
||||
repo_keyalias = None
|
||||
keydname = None
|
||||
|
@ -201,19 +170,12 @@ def main():
|
|||
keydname = options.distinguished_name
|
||||
common.write_to_config(test_config, 'keydname', keydname)
|
||||
if keystore == 'NONE': # we're using a smartcard
|
||||
common.write_to_config(
|
||||
test_config, 'repo_keyalias', '1'
|
||||
) # seems to be the default
|
||||
common.write_to_config(test_config, 'repo_keyalias', '1') # seems to be the default
|
||||
disable_in_config('keypass', 'never used with smartcard')
|
||||
common.write_to_config(
|
||||
test_config,
|
||||
'smartcardoptions',
|
||||
(
|
||||
'-storetype PKCS11 '
|
||||
+ '-providerClass sun.security.pkcs11.SunPKCS11 '
|
||||
+ '-providerArg opensc-fdroid.cfg'
|
||||
),
|
||||
)
|
||||
common.write_to_config(test_config, 'smartcardoptions',
|
||||
('-storetype PKCS11 '
|
||||
+ '-providerClass sun.security.pkcs11.SunPKCS11 '
|
||||
+ '-providerArg opensc-fdroid.cfg'))
|
||||
# find opensc-pkcs11.so
|
||||
if not os.path.exists('opensc-fdroid.cfg'):
|
||||
if os.path.exists('/usr/lib/opensc-pkcs11.so'):
|
||||
|
@ -221,49 +183,35 @@ def main():
|
|||
elif os.path.exists('/usr/lib64/opensc-pkcs11.so'):
|
||||
opensc_so = '/usr/lib64/opensc-pkcs11.so'
|
||||
else:
|
||||
files = glob.glob(
|
||||
'/usr/lib/' + os.uname()[4] + '-*-gnu/opensc-pkcs11.so'
|
||||
)
|
||||
files = glob.glob('/usr/lib/' + os.uname()[4] + '-*-gnu/opensc-pkcs11.so')
|
||||
if len(files) > 0:
|
||||
opensc_so = files[0]
|
||||
else:
|
||||
opensc_so = '/usr/lib/opensc-pkcs11.so'
|
||||
logging.warning(
|
||||
'No OpenSC PKCS#11 module found, '
|
||||
+ 'install OpenSC then edit "opensc-fdroid.cfg"!'
|
||||
)
|
||||
logging.warning('No OpenSC PKCS#11 module found, '
|
||||
+ 'install OpenSC then edit "opensc-fdroid.cfg"!')
|
||||
with open('opensc-fdroid.cfg', 'w') as f:
|
||||
f.write('name = OpenSC\nlibrary = ')
|
||||
f.write(opensc_so)
|
||||
f.write('\n')
|
||||
logging.info(
|
||||
"Repo setup using a smartcard HSM. Please edit keystorepass and repo_keyalias in config.yml."
|
||||
)
|
||||
logging.info(
|
||||
"If you want to generate a new repo signing key in the HSM you can do that with 'fdroid update "
|
||||
"--create-key'."
|
||||
)
|
||||
logging.info("Repo setup using a smartcard HSM. Please edit keystorepass and repo_keyalias in config.yml.")
|
||||
logging.info("If you want to generate a new repo signing key in the HSM you can do that with 'fdroid update "
|
||||
"--create-key'.")
|
||||
elif os.path.exists(keystore):
|
||||
to_set = ['keystorepass', 'keypass', 'repo_keyalias', 'keydname']
|
||||
if repo_keyalias:
|
||||
to_set.remove('repo_keyalias')
|
||||
if keydname:
|
||||
to_set.remove('keydname')
|
||||
logging.warning(
|
||||
'\n'
|
||||
+ _('Using existing keystore "{path}"').format(path=keystore)
|
||||
+ '\n'
|
||||
+ _('Now set these in config.yml:')
|
||||
+ ' '
|
||||
+ ', '.join(to_set)
|
||||
+ '\n'
|
||||
)
|
||||
logging.warning('\n' + _('Using existing keystore "{path}"').format(path=keystore)
|
||||
+ '\n' + _('Now set these in config.yml:') + ' '
|
||||
+ ', '.join(to_set) + '\n')
|
||||
else:
|
||||
password = common.genpassword()
|
||||
c = dict(test_config)
|
||||
c['keystorepass'] = password
|
||||
c['keypass'] = password
|
||||
c['repo_keyalias'] = repo_keyalias or socket.getfqdn()
|
||||
c['repo_keyalias'] = socket.getfqdn()
|
||||
c['keydname'] = 'CN=' + c['repo_keyalias'] + ', OU=F-Droid'
|
||||
common.write_to_config(test_config, 'keystorepass', password)
|
||||
common.write_to_config(test_config, 'keypass', password)
|
||||
|
@ -274,25 +222,17 @@ def main():
|
|||
msg = '\n'
|
||||
msg += _('Built repo based in "%s" with this config:') % fdroiddir
|
||||
msg += '\n\n Android SDK:\t\t\t' + config['sdk_path']
|
||||
if aapt:
|
||||
msg += '\n Android SDK Build Tools:\t' + os.path.dirname(aapt)
|
||||
msg += '\n Android NDK r12b (optional):\t$ANDROID_NDK'
|
||||
msg += '\n ' + _('Keystore for signing key:\t') + keystore
|
||||
if repo_keyalias is not None:
|
||||
msg += '\n Alias for key in store:\t' + repo_keyalias
|
||||
msg += '\n\n'
|
||||
msg += (
|
||||
_(
|
||||
"""To complete the setup, add your APKs to "%s"
|
||||
msg += '\n\n' + '''To complete the setup, add your APKs to "%s"
|
||||
then run "fdroid update -c; fdroid update". You might also want to edit
|
||||
"config.yml" to set the URL, repo name, and more. You should also set up
|
||||
a signing key (a temporary one might have been automatically generated).
|
||||
|
||||
For more info: https://f-droid.org/docs/Setup_an_F-Droid_App_Repo
|
||||
and https://f-droid.org/docs/Signing_Process"""
|
||||
)
|
||||
% os.path.join(fdroiddir, 'repo')
|
||||
)
|
||||
if not options.quiet:
|
||||
# normally, INFO is only shown with --verbose, but show this unless --quiet
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(logging.INFO)
|
||||
logger.info(msg)
|
||||
logging.shutdown()
|
||||
and https://f-droid.org/docs/Signing_Process''' % os.path.join(fdroiddir, 'repo')
|
||||
logging.info(msg)
|
||||
|
|
|
@ -17,372 +17,62 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import glob
|
||||
import locale
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import termios
|
||||
import tty
|
||||
from argparse import ArgumentParser, BooleanOptionalAction
|
||||
from pathlib import Path
|
||||
from urllib.parse import urlencode, urlparse, urlunparse
|
||||
import os
|
||||
import glob
|
||||
from argparse import ArgumentParser
|
||||
import logging
|
||||
|
||||
import defusedxml.ElementTree as XMLElementTree
|
||||
|
||||
from . import _, common, github, index, net
|
||||
from . import _
|
||||
from . import common
|
||||
from .common import SdkToolsPopen
|
||||
from .exception import FDroidException
|
||||
|
||||
DEFAULT_IPFS_GATEWAYS = ("https://gateway.ipfs.io/ipfs/",)
|
||||
MAVEN_CENTRAL_MIRRORS = [
|
||||
{
|
||||
"url": "https://repo1.maven.org/maven2/",
|
||||
"dnsA": ["199.232.16.209"],
|
||||
"worksWithoutSNI": True,
|
||||
},
|
||||
{
|
||||
"url": "https://repo.maven.apache.org/maven2/",
|
||||
"dnsA": ["199.232.16.215"],
|
||||
"worksWithoutSNI": True,
|
||||
},
|
||||
{
|
||||
"url": "https://maven-central-asia.storage-download.googleapis.com/maven2/",
|
||||
},
|
||||
{
|
||||
"url": "https://maven-central-eu.storage-download.googleapis.com/maven2/",
|
||||
},
|
||||
{
|
||||
"url": "https://maven-central.storage-download.googleapis.com/maven2/",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def download_apk(appid='org.fdroid.fdroid', privacy_mode=False):
|
||||
"""Download an APK from F-Droid via the first mirror that works."""
|
||||
url = urlunparse(
|
||||
urlparse(common.FDROIDORG_MIRRORS[0]['url'])._replace(
|
||||
query=urlencode({'fingerprint': common.FDROIDORG_FINGERPRINT})
|
||||
)
|
||||
)
|
||||
|
||||
data, _ignored = index.download_repo_index_v2(url)
|
||||
app = data.get('packages', dict()).get(appid)
|
||||
preferred_version = None
|
||||
for version in app['versions'].values():
|
||||
if not preferred_version:
|
||||
# if all else fails, use the first one
|
||||
preferred_version = version
|
||||
if not version.get('releaseChannels'):
|
||||
# prefer APK in default release channel
|
||||
preferred_version = version
|
||||
break
|
||||
|
||||
mirrors = common.append_filename_to_mirrors(
|
||||
preferred_version['file']['name'][1:], common.FDROIDORG_MIRRORS
|
||||
)
|
||||
ipfsCIDv1 = preferred_version['file'].get('ipfsCIDv1')
|
||||
if ipfsCIDv1:
|
||||
for gateway in DEFAULT_IPFS_GATEWAYS:
|
||||
mirrors.append({'url': os.path.join(gateway, ipfsCIDv1)})
|
||||
f = net.download_using_mirrors(mirrors)
|
||||
if f and os.path.exists(f):
|
||||
versionCode = preferred_version['manifest']['versionCode']
|
||||
f = Path(f)
|
||||
return str(f.rename(f.with_stem(f'{appid}_{versionCode}')).resolve())
|
||||
|
||||
|
||||
def download_fdroid_apk(privacy_mode=False): # pylint: disable=unused-argument
|
||||
"""Directly download the current F-Droid APK and verify it.
|
||||
|
||||
This downloads the "download button" link, which is the version
|
||||
that is best tested for new installs.
|
||||
|
||||
"""
|
||||
mirror = common.FDROIDORG_MIRRORS[0]
|
||||
mirror['url'] = urlunparse(urlparse(mirror['url'])._replace(path='F-Droid.apk'))
|
||||
return net.download_using_mirrors([mirror])
|
||||
|
||||
|
||||
def download_fdroid_apk_from_github(privacy_mode=False):
|
||||
"""Download F-Droid.apk from F-Droid's GitHub Releases."""
|
||||
if common.config and not privacy_mode:
|
||||
token = common.config.get('github_token')
|
||||
else:
|
||||
token = None
|
||||
gh = github.GithubApi(token, 'https://github.com/f-droid/fdroidclient')
|
||||
latest_apk = gh.get_latest_apk()
|
||||
filename = os.path.basename(latest_apk)
|
||||
return net.download_file(latest_apk, os.path.join(common.get_cachedir(), filename))
|
||||
|
||||
|
||||
def download_fdroid_apk_from_ipns(privacy_mode=False):
|
||||
"""Download the F-Droid APK from an IPNS repo."""
|
||||
cid = 'k51qzi5uqu5dl4hbcksbdmplanu9n4hivnqsupqe6vzve1pdbeh418ssptldd3'
|
||||
mirrors = [
|
||||
{"url": f"https://ipfs.io/ipns/{cid}/F-Droid.apk"},
|
||||
]
|
||||
if not privacy_mode:
|
||||
mirrors.append({"url": f"https://{cid}.ipns.dweb.link/F-Droid.apk"})
|
||||
return net.download_using_mirrors(mirrors)
|
||||
|
||||
|
||||
def download_fdroid_apk_from_maven(privacy_mode=False):
|
||||
"""Download F-Droid.apk from Maven Central and official mirrors."""
|
||||
path = 'org/fdroid/fdroid/F-Droid'
|
||||
if privacy_mode:
|
||||
mirrors = MAVEN_CENTRAL_MIRRORS[:2] # skip the Google servers
|
||||
else:
|
||||
mirrors = MAVEN_CENTRAL_MIRRORS
|
||||
metadata = net.download_using_mirrors(
|
||||
common.append_filename_to_mirrors(
|
||||
os.path.join(path, 'maven-metadata.xml'), mirrors
|
||||
)
|
||||
)
|
||||
version = XMLElementTree.parse(metadata).getroot().findall('*.//latest')[0].text
|
||||
mirrors = common.append_filename_to_mirrors(
|
||||
os.path.join(path, version, f'F-Droid-{version}.apk'), mirrors
|
||||
)
|
||||
return net.download_using_mirrors(mirrors)
|
||||
|
||||
|
||||
def install_fdroid_apk(privacy_mode=False):
|
||||
"""Download and install F-Droid.apk using all tricks we can muster.
|
||||
|
||||
By default, this first tries to fetch the official install APK
|
||||
which is offered when someone clicks the "download" button on
|
||||
https://f-droid.org/. Then it will try all the mirrors and
|
||||
methods until it gets something successful, or runs out of
|
||||
options.
|
||||
|
||||
There is privacy_mode which tries to download from mirrors first,
|
||||
so that this downloads from a mirror that has many different kinds
|
||||
of files available, thereby breaking the clear link to F-Droid.
|
||||
|
||||
Returns
|
||||
-------
|
||||
None for success or the error message.
|
||||
|
||||
"""
|
||||
country_code = locale.getlocale()[0].split('_')[-1]
|
||||
if privacy_mode is None and country_code in ('CN', 'HK', 'IR', 'TM'):
|
||||
logging.warning(
|
||||
_('Privacy mode was enabled based on your locale ({country_code}).').format(
|
||||
country_code=country_code
|
||||
)
|
||||
)
|
||||
privacy_mode = True
|
||||
|
||||
if privacy_mode or not (common.config and common.config.get('jarsigner')):
|
||||
download_methods = [
|
||||
download_fdroid_apk_from_maven,
|
||||
download_fdroid_apk_from_ipns,
|
||||
download_fdroid_apk_from_github,
|
||||
]
|
||||
else:
|
||||
download_methods = [
|
||||
download_apk,
|
||||
download_fdroid_apk_from_maven,
|
||||
download_fdroid_apk_from_github,
|
||||
download_fdroid_apk_from_ipns,
|
||||
download_fdroid_apk,
|
||||
]
|
||||
for method in download_methods:
|
||||
try:
|
||||
f = method(privacy_mode=privacy_mode)
|
||||
break
|
||||
except Exception as e:
|
||||
logging.info(e)
|
||||
else:
|
||||
return _('F-Droid.apk could not be downloaded from any known source!')
|
||||
|
||||
fingerprint = common.apk_signer_fingerprint(f)
|
||||
if fingerprint.upper() != common.FDROIDORG_FINGERPRINT:
|
||||
return _('{path} has the wrong fingerprint ({fingerprint})!').format(
|
||||
path=f, fingerprint=fingerprint
|
||||
)
|
||||
install_apk(f)
|
||||
|
||||
|
||||
def install_apk(f):
|
||||
if common.config and common.config.get('apksigner'):
|
||||
# TODO this should always verify, but that requires APK sig verification in Python #94
|
||||
logging.info(_('Verifying package {path} with apksigner.').format(path=f))
|
||||
common.verify_apk_signature(f)
|
||||
if common.config and common.config.get('adb'):
|
||||
if devices():
|
||||
install_apks_to_devices([f])
|
||||
os.remove(f)
|
||||
else:
|
||||
os.remove(f)
|
||||
return _('No devices found for `adb install`! Please plug one in.')
|
||||
options = None
|
||||
config = None
|
||||
|
||||
|
||||
def devices():
|
||||
"""Get the list of device serials for use with adb commands."""
|
||||
p = common.SdkToolsPopen(['adb', "devices"])
|
||||
p = SdkToolsPopen(['adb', "devices"])
|
||||
if p.returncode != 0:
|
||||
raise FDroidException("An error occured when finding devices: %s" % p.output)
|
||||
serials = list()
|
||||
for line in p.output.splitlines():
|
||||
columns = line.strip().split("\t", maxsplit=1)
|
||||
if len(columns) == 2:
|
||||
serial, status = columns
|
||||
if status == 'device':
|
||||
serials.append(serial)
|
||||
else:
|
||||
d = {'serial': serial, 'status': status}
|
||||
logging.warning(_('adb reports {serial} is "{status}"!'.format(**d)))
|
||||
return serials
|
||||
|
||||
|
||||
def install_apks_to_devices(apks):
|
||||
"""Install the list of APKs to all Android devices reported by `adb devices`."""
|
||||
for apk in apks:
|
||||
# Get device list each time to avoid device not found errors
|
||||
devs = devices()
|
||||
if not devs:
|
||||
raise FDroidException(_("No attached devices found"))
|
||||
logging.info(_("Installing %s...") % apk)
|
||||
for dev in devs:
|
||||
logging.info(
|
||||
_("Installing '{apkfilename}' on {dev}...").format(
|
||||
apkfilename=apk, dev=dev
|
||||
)
|
||||
)
|
||||
p = common.SdkToolsPopen(['adb', "-s", dev, "install", apk])
|
||||
fail = ""
|
||||
for line in p.output.splitlines():
|
||||
if line.startswith("Failure"):
|
||||
fail = line[9:-1]
|
||||
if not fail:
|
||||
continue
|
||||
|
||||
if fail == "INSTALL_FAILED_ALREADY_EXISTS":
|
||||
logging.warning(
|
||||
_('"{apkfilename}" is already installed on {dev}.').format(
|
||||
apkfilename=apk, dev=dev
|
||||
)
|
||||
)
|
||||
else:
|
||||
raise FDroidException(
|
||||
_("Failed to install '{apkfilename}' on {dev}: {error}").format(
|
||||
apkfilename=apk, dev=dev, error=fail
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def read_char():
|
||||
"""Read input from the terminal prompt one char at a time."""
|
||||
fd = sys.stdin.fileno()
|
||||
old_settings = termios.tcgetattr(fd)
|
||||
try:
|
||||
tty.setraw(fd)
|
||||
ch = sys.stdin.read(1)
|
||||
finally:
|
||||
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
|
||||
return ch
|
||||
|
||||
|
||||
def strtobool(val):
|
||||
"""Convert a localized string representation of truth to True or False."""
|
||||
return val.lower() in ('', 'y', 'yes', _('yes'), _('true')) # '' is pressing Enter
|
||||
|
||||
|
||||
def prompt_user(yes, msg):
|
||||
"""Prompt user for yes/no, supporting Enter and Esc as accepted answers."""
|
||||
run_install = yes
|
||||
if yes is None and sys.stdout.isatty():
|
||||
print(msg, end=' ', flush=True)
|
||||
answer = ''
|
||||
while True:
|
||||
in_char = read_char()
|
||||
if in_char == '\r': # Enter key
|
||||
break
|
||||
if not in_char.isprintable():
|
||||
sys.exit(1)
|
||||
print(in_char, end='', flush=True)
|
||||
answer += in_char
|
||||
run_install = strtobool(answer)
|
||||
print()
|
||||
return run_install
|
||||
lines = [line for line in p.output.splitlines() if not line.startswith('* ')]
|
||||
if len(lines) < 3:
|
||||
return []
|
||||
lines = lines[1:-1]
|
||||
return [line.split()[0] for line in lines]
|
||||
|
||||
|
||||
def main():
|
||||
parser = ArgumentParser(
|
||||
usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]"
|
||||
)
|
||||
|
||||
global options, config
|
||||
|
||||
# Parse command line...
|
||||
parser = ArgumentParser(usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
|
||||
common.setup_global_opts(parser)
|
||||
parser.add_argument(
|
||||
"appid",
|
||||
nargs='*',
|
||||
help=_("application ID with optional versionCode in the form APPID[:VERCODE]"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"-a",
|
||||
"--all",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=_("Install all signed applications available"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"-p",
|
||||
"--privacy-mode",
|
||||
action=BooleanOptionalAction,
|
||||
default=None,
|
||||
help=_("Download F-Droid.apk using mirrors that leak less to the network"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"-y",
|
||||
"--yes",
|
||||
action="store_true",
|
||||
default=None,
|
||||
help=_("Automatic yes to all prompts."),
|
||||
)
|
||||
parser.add_argument(
|
||||
"-n",
|
||||
"--no",
|
||||
action="store_false",
|
||||
dest='yes',
|
||||
help=_("Automatic no to all prompts."),
|
||||
)
|
||||
options = common.parse_args(parser)
|
||||
|
||||
common.set_console_logging(options.verbose, options.color)
|
||||
logging.captureWarnings(True) # for SNIMissingWarning
|
||||
|
||||
common.get_config()
|
||||
parser.add_argument("appid", nargs='*', help=_("application ID with optional versionCode in the form APPID[:VERCODE]"))
|
||||
parser.add_argument("-a", "--all", action="store_true", default=False,
|
||||
help=_("Install all signed applications available"))
|
||||
options = parser.parse_args()
|
||||
|
||||
if not options.appid and not options.all:
|
||||
run_install = prompt_user(
|
||||
options.yes,
|
||||
_('Would you like to download and install F-Droid.apk via adb? (YES/no)'),
|
||||
)
|
||||
if run_install:
|
||||
sys.exit(install_fdroid_apk(options.privacy_mode))
|
||||
sys.exit(1)
|
||||
parser.error(_("option %s: If you really want to install all the signed apps, use --all") % "all")
|
||||
|
||||
config = common.read_config(options)
|
||||
|
||||
output_dir = 'repo'
|
||||
if (options.appid or options.all) and not os.path.isdir(output_dir):
|
||||
logging.error(_("No signed output directory - nothing to do"))
|
||||
run_install = prompt_user(
|
||||
options.yes,
|
||||
_('Would you like to download the app(s) from f-droid.org? (YES/no)'),
|
||||
)
|
||||
if run_install:
|
||||
for appid in options.appid:
|
||||
f = download_apk(appid)
|
||||
install_apk(f)
|
||||
sys.exit(install_fdroid_apk(options.privacy_mode))
|
||||
sys.exit(1)
|
||||
if not os.path.isdir(output_dir):
|
||||
logging.info(_("No signed output directory - nothing to do"))
|
||||
sys.exit(0)
|
||||
|
||||
if options.appid:
|
||||
|
||||
vercodes = common.read_pkg_args(options.appid, True)
|
||||
common.get_metadata_files(vercodes) # only check appids
|
||||
apks = {appid: None for appid in vercodes}
|
||||
|
||||
# Get the signed APK with the highest vercode
|
||||
# Get the signed apk with the highest vercode
|
||||
for apkfile in sorted(glob.glob(os.path.join(output_dir, '*.apk'))):
|
||||
|
||||
try:
|
||||
appid, vercode = common.publishednameinfo(apkfile)
|
||||
except FDroidException:
|
||||
|
@ -395,15 +85,35 @@ def main():
|
|||
|
||||
for appid, apk in apks.items():
|
||||
if not apk:
|
||||
raise FDroidException(_("No signed APK available for %s") % appid)
|
||||
install_apks_to_devices(apks.values())
|
||||
raise FDroidException(_("No signed apk available for %s") % appid)
|
||||
|
||||
elif options.all:
|
||||
apks = {
|
||||
common.publishednameinfo(apkfile)[0]: apkfile
|
||||
for apkfile in sorted(glob.glob(os.path.join(output_dir, '*.apk')))
|
||||
}
|
||||
install_apks_to_devices(apks.values())
|
||||
else:
|
||||
|
||||
apks = {common.publishednameinfo(apkfile)[0]: apkfile for apkfile in
|
||||
sorted(glob.glob(os.path.join(output_dir, '*.apk')))}
|
||||
|
||||
for appid, apk in apks.items():
|
||||
# Get device list each time to avoid device not found errors
|
||||
devs = devices()
|
||||
if not devs:
|
||||
raise FDroidException(_("No attached devices found"))
|
||||
logging.info(_("Installing %s...") % apk)
|
||||
for dev in devs:
|
||||
logging.info(_("Installing '{apkfilename}' on {dev}...").format(apkfilename=apk, dev=dev))
|
||||
p = SdkToolsPopen(['adb', "-s", dev, "install", apk])
|
||||
fail = ""
|
||||
for line in p.output.splitlines():
|
||||
if line.startswith("Failure"):
|
||||
fail = line[9:-1]
|
||||
if not fail:
|
||||
continue
|
||||
|
||||
if fail == "INSTALL_FAILED_ALREADY_EXISTS":
|
||||
logging.warning(_('"{apkfilename}" is already installed on {dev}.')
|
||||
.format(apkfilename=apk, dev=dev))
|
||||
else:
|
||||
raise FDroidException(_("Failed to install '{apkfilename}' on {dev}: {error}")
|
||||
.format(apkfilename=apk, dev=dev, error=fail))
|
||||
|
||||
logging.info('\n' + _('Finished'))
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,300 +0,0 @@
|
|||
# PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
||||
# --------------------------------------------
|
||||
#
|
||||
# 1. This LICENSE AGREEMENT is between the Python Software Foundation
|
||||
# ("PSF"), and the Individual or Organization ("Licensee") accessing and
|
||||
# otherwise using this software ("Python") in source or binary form and
|
||||
# its associated documentation.
|
||||
#
|
||||
# 2. Subject to the terms and conditions of this License Agreement, PSF hereby
|
||||
# grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
|
||||
# analyze, test, perform and/or display publicly, prepare derivative works,
|
||||
# distribute, and otherwise use Python alone or in any derivative version,
|
||||
# provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
||||
# i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
|
||||
# 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022 Python Software Foundation;
|
||||
# All Rights Reserved" are retained in Python alone or in any derivative version
|
||||
# prepared by Licensee.
|
||||
#
|
||||
# 3. In the event Licensee prepares a derivative work that is based on
|
||||
# or incorporates Python or any part thereof, and wants to make
|
||||
# the derivative work available to others as provided herein, then
|
||||
# Licensee hereby agrees to include in any such work a brief summary of
|
||||
# the changes made to Python.
|
||||
#
|
||||
# 4. PSF is making Python available to Licensee on an "AS IS"
|
||||
# basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||
# IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
|
||||
# DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||
# FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
|
||||
# INFRINGE ANY THIRD PARTY RIGHTS.
|
||||
#
|
||||
# 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||
# FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||
# A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
|
||||
# OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||
#
|
||||
# 6. This License Agreement will automatically terminate upon a material
|
||||
# breach of its terms and conditions.
|
||||
#
|
||||
# 7. Nothing in this License Agreement shall be deemed to create any
|
||||
# relationship of agency, partnership, or joint venture between PSF and
|
||||
# Licensee. This License Agreement does not grant permission to use PSF
|
||||
# trademarks or trade name in a trademark sense to endorse or promote
|
||||
# products or services of Licensee, or any third party.
|
||||
#
|
||||
# 8. By copying, installing or otherwise using Python, Licensee
|
||||
# agrees to be bound by the terms and conditions of this License
|
||||
# Agreement.
|
||||
#
|
||||
# SPDX-License-Identifier: Python-2.0
|
||||
#
|
||||
# downloaded from:
|
||||
# https://github.com/effigies/looseversion/blob/e1a5a176a92dc6825deda4205c1be6d05e9ed352/src/looseversion/__init__.py
|
||||
|
||||
"""Provides classes to represent module version numbers (one class for
|
||||
each style of version numbering). There are currently two such classes
|
||||
implemented: StrictVersion and LooseVersion.
|
||||
|
||||
Every version number class implements the following interface:
|
||||
* the 'parse' method takes a string and parses it to some internal
|
||||
representation; if the string is an invalid version number,
|
||||
'parse' raises a ValueError exception
|
||||
* the class constructor takes an optional string argument which,
|
||||
if supplied, is passed to 'parse'
|
||||
* __str__ reconstructs the string that was passed to 'parse' (or
|
||||
an equivalent string -- ie. one that will generate an equivalent
|
||||
version number instance)
|
||||
* __repr__ generates Python code to recreate the version number instance
|
||||
* _cmp compares the current instance with either another instance
|
||||
of the same class or a string (which will be parsed to an instance
|
||||
of the same class, thus must follow the same rules)
|
||||
"""
|
||||
import re
|
||||
import sys
|
||||
|
||||
__license__ = "Python License 2.0"
|
||||
|
||||
# The rules according to Greg Stein:
|
||||
# 1) a version number has 1 or more numbers separated by a period or by
|
||||
# sequences of letters. If only periods, then these are compared
|
||||
# left-to-right to determine an ordering.
|
||||
# 2) sequences of letters are part of the tuple for comparison and are
|
||||
# compared lexicographically
|
||||
# 3) recognize the numeric components may have leading zeroes
|
||||
#
|
||||
# The LooseVersion class below implements these rules: a version number
|
||||
# string is split up into a tuple of integer and string components, and
|
||||
# comparison is a simple tuple comparison. This means that version
|
||||
# numbers behave in a predictable and obvious way, but a way that might
|
||||
# not necessarily be how people *want* version numbers to behave. There
|
||||
# wouldn't be a problem if people could stick to purely numeric version
|
||||
# numbers: just split on period and compare the numbers as tuples.
|
||||
# However, people insist on putting letters into their version numbers;
|
||||
# the most common purpose seems to be:
|
||||
# - indicating a "pre-release" version
|
||||
# ('alpha', 'beta', 'a', 'b', 'pre', 'p')
|
||||
# - indicating a post-release patch ('p', 'pl', 'patch')
|
||||
# but of course this can't cover all version number schemes, and there's
|
||||
# no way to know what a programmer means without asking him.
|
||||
#
|
||||
# The problem is what to do with letters (and other non-numeric
|
||||
# characters) in a version number. The current implementation does the
|
||||
# obvious and predictable thing: keep them as strings and compare
|
||||
# lexically within a tuple comparison. This has the desired effect if
|
||||
# an appended letter sequence implies something "post-release":
|
||||
# eg. "0.99" < "0.99pl14" < "1.0", and "5.001" < "5.001m" < "5.002".
|
||||
#
|
||||
# However, if letters in a version number imply a pre-release version,
|
||||
# the "obvious" thing isn't correct. Eg. you would expect that
|
||||
# "1.5.1" < "1.5.2a2" < "1.5.2", but under the tuple/lexical comparison
|
||||
# implemented here, this just isn't so.
|
||||
#
|
||||
# Two possible solutions come to mind. The first is to tie the
|
||||
# comparison algorithm to a particular set of semantic rules, as has
|
||||
# been done in the StrictVersion class above. This works great as long
|
||||
# as everyone can go along with bondage and discipline. Hopefully a
|
||||
# (large) subset of Python module programmers will agree that the
|
||||
# particular flavor of bondage and discipline provided by StrictVersion
|
||||
# provides enough benefit to be worth using, and will submit their
|
||||
# version numbering scheme to its domination. The free-thinking
|
||||
# anarchists in the lot will never give in, though, and something needs
|
||||
# to be done to accommodate them.
|
||||
#
|
||||
# Perhaps a "moderately strict" version class could be implemented that
|
||||
# lets almost anything slide (syntactically), and makes some heuristic
|
||||
# assumptions about non-digits in version number strings. This could
|
||||
# sink into special-case-hell, though; if I was as talented and
|
||||
# idiosyncratic as Larry Wall, I'd go ahead and implement a class that
|
||||
# somehow knows that "1.2.1" < "1.2.2a2" < "1.2.2" < "1.2.2pl3", and is
|
||||
# just as happy dealing with things like "2g6" and "1.13++". I don't
|
||||
# think I'm smart enough to do it right though.
|
||||
#
|
||||
# In any case, I've coded the test suite for this module (see
|
||||
# ../test/test_version.py) specifically to fail on things like comparing
|
||||
# "1.2a2" and "1.2". That's not because the *code* is doing anything
|
||||
# wrong, it's because the simple, obvious design doesn't match my
|
||||
# complicated, hairy expectations for real-world version numbers. It
|
||||
# would be a snap to fix the test suite to say, "Yep, LooseVersion does
|
||||
# the Right Thing" (ie. the code matches the conception). But I'd rather
|
||||
# have a conception that matches common notions about version numbers.
|
||||
|
||||
|
||||
if sys.version_info >= (3,):
|
||||
|
||||
class _Py2Int(int):
|
||||
"""Integer object that compares < any string"""
|
||||
|
||||
def __gt__(self, other):
|
||||
if isinstance(other, str):
|
||||
return False
|
||||
return super().__gt__(other)
|
||||
|
||||
def __lt__(self, other):
|
||||
if isinstance(other, str):
|
||||
return True
|
||||
return super().__lt__(other)
|
||||
|
||||
else:
|
||||
_Py2Int = int
|
||||
|
||||
|
||||
class LooseVersion(object):
|
||||
"""Version numbering for anarchists and software realists.
|
||||
Implements the standard interface for version number classes as
|
||||
described above. A version number consists of a series of numbers,
|
||||
separated by either periods or strings of letters. When comparing
|
||||
version numbers, the numeric components will be compared
|
||||
numerically, and the alphabetic components lexically. The following
|
||||
are all valid version numbers, in no particular order:
|
||||
|
||||
1.5.1
|
||||
1.5.2b2
|
||||
161
|
||||
3.10a
|
||||
8.02
|
||||
3.4j
|
||||
1996.07.12
|
||||
3.2.pl0
|
||||
3.1.1.6
|
||||
2g6
|
||||
11g
|
||||
0.960923
|
||||
2.2beta29
|
||||
1.13++
|
||||
5.5.kw
|
||||
2.0b1pl0
|
||||
|
||||
In fact, there is no such thing as an invalid version number under
|
||||
this scheme; the rules for comparison are simple and predictable,
|
||||
but may not always give the results you want (for some definition
|
||||
of "want").
|
||||
"""
|
||||
|
||||
component_re = re.compile(r"(\d+ | [a-z]+ | \.)", re.VERBOSE)
|
||||
|
||||
def __init__(self, vstring=None):
|
||||
if vstring:
|
||||
self.parse(vstring)
|
||||
|
||||
def __eq__(self, other):
|
||||
c = self._cmp(other)
|
||||
if c is NotImplemented:
|
||||
return NotImplemented
|
||||
return c == 0
|
||||
|
||||
def __lt__(self, other):
|
||||
c = self._cmp(other)
|
||||
if c is NotImplemented:
|
||||
return NotImplemented
|
||||
return c < 0
|
||||
|
||||
def __le__(self, other):
|
||||
c = self._cmp(other)
|
||||
if c is NotImplemented:
|
||||
return NotImplemented
|
||||
return c <= 0
|
||||
|
||||
def __gt__(self, other):
|
||||
c = self._cmp(other)
|
||||
if c is NotImplemented:
|
||||
return NotImplemented
|
||||
return c > 0
|
||||
|
||||
def __ge__(self, other):
|
||||
c = self._cmp(other)
|
||||
if c is NotImplemented:
|
||||
return NotImplemented
|
||||
return c >= 0
|
||||
|
||||
def parse(self, vstring):
|
||||
# I've given up on thinking I can reconstruct the version string
|
||||
# from the parsed tuple -- so I just store the string here for
|
||||
# use by __str__
|
||||
self.vstring = vstring
|
||||
components = [x for x in self.component_re.split(vstring) if x and x != "."]
|
||||
for i, obj in enumerate(components):
|
||||
try:
|
||||
components[i] = int(obj)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
self.version = components
|
||||
|
||||
def __str__(self):
|
||||
return self.vstring
|
||||
|
||||
def __repr__(self):
|
||||
return "LooseVersion ('%s')" % str(self)
|
||||
|
||||
def _cmp(self, other):
|
||||
other = self._coerce(other)
|
||||
if other is NotImplemented:
|
||||
return NotImplemented
|
||||
|
||||
if self.version == other.version:
|
||||
return 0
|
||||
if self.version < other.version:
|
||||
return -1
|
||||
if self.version > other.version:
|
||||
return 1
|
||||
return NotImplemented
|
||||
|
||||
@classmethod
|
||||
def _coerce(cls, other):
|
||||
if isinstance(other, cls):
|
||||
return other
|
||||
elif isinstance(other, str):
|
||||
return cls(other)
|
||||
elif "distutils" in sys.modules:
|
||||
# Using this check to avoid importing distutils and suppressing the warning
|
||||
try:
|
||||
from distutils.version import LooseVersion as deprecated
|
||||
except ImportError:
|
||||
return NotImplemented
|
||||
if isinstance(other, deprecated):
|
||||
return cls(str(other))
|
||||
return NotImplemented
|
||||
|
||||
|
||||
class LooseVersion2(LooseVersion):
|
||||
"""LooseVersion variant that restores Python 2 semantics
|
||||
|
||||
In Python 2, comparing LooseVersions where paired components could be string
|
||||
and int always resulted in the string being "greater". In Python 3, this produced
|
||||
a TypeError.
|
||||
"""
|
||||
|
||||
def parse(self, vstring):
|
||||
# I've given up on thinking I can reconstruct the version string
|
||||
# from the parsed tuple -- so I just store the string here for
|
||||
# use by __str__
|
||||
self.vstring = vstring
|
||||
components = [x for x in self.component_re.split(vstring) if x and x != "."]
|
||||
for i, obj in enumerate(components):
|
||||
try:
|
||||
components[i] = _Py2Int(obj)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
self.version = components
|
File diff suppressed because it is too large
Load diff
|
@ -7,14 +7,19 @@ import posixpath
|
|||
import socket
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib.parse
|
||||
from argparse import ArgumentParser
|
||||
import urllib.parse
|
||||
|
||||
from . import _, common, index, update
|
||||
from . import _
|
||||
from . import common
|
||||
from . import index
|
||||
from . import update
|
||||
|
||||
options = None
|
||||
|
||||
|
||||
def _run_wget(path, urls, verbose=False):
|
||||
if verbose:
|
||||
def _run_wget(path, urls):
|
||||
if options.verbose:
|
||||
verbose = '--verbose'
|
||||
else:
|
||||
verbose = '--no-verbose'
|
||||
|
@ -22,73 +27,38 @@ def _run_wget(path, urls, verbose=False):
|
|||
if not urls:
|
||||
return
|
||||
logging.debug(_('Running wget in {path}').format(path=path))
|
||||
cwd = os.getcwd()
|
||||
os.makedirs(path, exist_ok=True)
|
||||
os.chdir(path)
|
||||
urls_file = '.fdroid-mirror-wget-input-file'
|
||||
with open(urls_file, 'w') as fp:
|
||||
for url in urls:
|
||||
fp.write(url.split('?')[0] + '\n') # wget puts query string in the filename
|
||||
subprocess.call(
|
||||
[
|
||||
'wget',
|
||||
verbose,
|
||||
'--continue',
|
||||
'--user-agent="fdroid mirror"',
|
||||
'--input-file=' + urls_file,
|
||||
]
|
||||
)
|
||||
subprocess.call(['wget', verbose, '--continue', '--user-agent="fdroid mirror"',
|
||||
'--input-file=' + urls_file])
|
||||
os.remove(urls_file)
|
||||
os.chdir(cwd) # leave the working env the way we found it
|
||||
|
||||
|
||||
def main():
|
||||
global options
|
||||
|
||||
parser = ArgumentParser()
|
||||
common.setup_global_opts(parser)
|
||||
parser.add_argument(
|
||||
"url",
|
||||
nargs='?',
|
||||
help=_(
|
||||
'Base URL to mirror, can include the index signing key '
|
||||
+ 'using the query string: ?fingerprint='
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--all",
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_("Mirror the full repo and archive, all file types."),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--archive",
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_("Also mirror the full archive section"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--build-logs",
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_("Include the build logs in the mirror"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--pgp-signatures",
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_("Include the PGP signature .asc files in the mirror"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--src-tarballs",
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_("Include the source tarballs in the mirror"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output-dir", default=None, help=_("The directory to write the mirror to")
|
||||
)
|
||||
options = common.parse_args(parser)
|
||||
|
||||
common.set_console_logging(options.verbose, options.color)
|
||||
parser.add_argument("url", nargs='?',
|
||||
help=_('Base URL to mirror, can include the index signing key '
|
||||
+ 'using the query string: ?fingerprint='))
|
||||
parser.add_argument("--all", action='store_true', default=False,
|
||||
help=_("Mirror the full repo and archive, all file types."))
|
||||
parser.add_argument("--archive", action='store_true', default=False,
|
||||
help=_("Also mirror the full archive section"))
|
||||
parser.add_argument("--build-logs", action='store_true', default=False,
|
||||
help=_("Include the build logs in the mirror"))
|
||||
parser.add_argument("--pgp-signatures", action='store_true', default=False,
|
||||
help=_("Include the PGP signature .asc files in the mirror"))
|
||||
parser.add_argument("--src-tarballs", action='store_true', default=False,
|
||||
help=_("Include the source tarballs in the mirror"))
|
||||
parser.add_argument("--output-dir", default=None,
|
||||
help=_("The directory to write the mirror to"))
|
||||
options = parser.parse_args()
|
||||
|
||||
if options.all:
|
||||
options.archive = True
|
||||
|
@ -105,34 +75,26 @@ def main():
|
|||
fingerprint = urllib.parse.parse_qs(query).get('fingerprint')
|
||||
|
||||
def _append_to_url_path(*args):
|
||||
"""Append the list of path components to URL, keeping the rest the same."""
|
||||
'''Append the list of path components to URL, keeping the rest the same'''
|
||||
newpath = posixpath.join(path, *args)
|
||||
return urllib.parse.urlunparse(
|
||||
(scheme, hostname, newpath, params, query, fragment)
|
||||
)
|
||||
return urllib.parse.urlunparse((scheme, hostname, newpath, params, query, fragment))
|
||||
|
||||
if fingerprint:
|
||||
config = common.read_config()
|
||||
config = common.read_config(options)
|
||||
if not ('jarsigner' in config or 'apksigner' in config):
|
||||
logging.error(
|
||||
_('Java JDK not found! Install in standard location or set java_paths!')
|
||||
)
|
||||
logging.error(_('Java JDK not found! Install in standard location or set java_paths!'))
|
||||
sys.exit(1)
|
||||
|
||||
def _get_index(section, etag=None):
|
||||
url = _append_to_url_path(section)
|
||||
data, etag = index.download_repo_index(url, etag=etag)
|
||||
return data, etag, _append_to_url_path(section, 'index-v1.jar')
|
||||
|
||||
else:
|
||||
|
||||
def _get_index(section, etag=None):
|
||||
import io
|
||||
import json
|
||||
import zipfile
|
||||
|
||||
from . import net
|
||||
|
||||
url = _append_to_url_path(section, 'index-v1.jar')
|
||||
content, etag = net.http_get(url)
|
||||
with zipfile.ZipFile(io.BytesIO(content)) as zip:
|
||||
|
@ -145,30 +107,21 @@ def main():
|
|||
ip = ipaddress.ip_address(hostname)
|
||||
except ValueError:
|
||||
pass
|
||||
if hostname == 'f-droid.org' or (
|
||||
ip is not None and hostname in socket.gethostbyname_ex('f-droid.org')[2]
|
||||
):
|
||||
logging.error(
|
||||
_(
|
||||
'This command should never be used to mirror f-droid.org! '
|
||||
'A full copy requires more than 600GB.'
|
||||
)
|
||||
)
|
||||
if hostname == 'f-droid.org' \
|
||||
or (ip is not None and hostname in socket.gethostbyname_ex('f-droid.org')[2]):
|
||||
print(_('ERROR: this command should never be used to mirror f-droid.org!\n'
|
||||
'A full mirror of f-droid.org requires more than 200GB.'))
|
||||
sys.exit(1)
|
||||
|
||||
path = path.rstrip('/')
|
||||
if path.endswith('repo') or path.endswith('archive'):
|
||||
logging.warning(
|
||||
_('Do not include "{path}" in URL!').format(path=path.split('/')[-1])
|
||||
)
|
||||
logging.warning(_('Do not include "{path}" in URL!')
|
||||
.format(path=path.split('/')[-1]))
|
||||
elif not path.endswith('fdroid'):
|
||||
logging.warning(
|
||||
_('{url} does not end with "fdroid", check the URL path!').format(
|
||||
url=options.url
|
||||
)
|
||||
)
|
||||
logging.warning(_('{url} does not end with "fdroid", check the URL path!')
|
||||
.format(url=options.url))
|
||||
|
||||
icondirs = ['icons']
|
||||
icondirs = ['icons', ]
|
||||
for density in update.screen_densities:
|
||||
icondirs.append('icons-' + density)
|
||||
|
||||
|
@ -181,7 +134,7 @@ def main():
|
|||
if options.archive:
|
||||
sections = ('repo', 'archive')
|
||||
else:
|
||||
sections = ('repo',)
|
||||
sections = ('repo', )
|
||||
|
||||
for section in sections:
|
||||
sectiondir = os.path.join(basedir, section)
|
||||
|
@ -199,31 +152,25 @@ def main():
|
|||
for packageName, packageList in data['packages'].items():
|
||||
for package in packageList:
|
||||
to_fetch = []
|
||||
keys = ['apkName']
|
||||
keys = ['apkName', ]
|
||||
if options.src_tarballs:
|
||||
keys.append('srcname')
|
||||
for k in keys:
|
||||
if k in package:
|
||||
to_fetch.append(package[k])
|
||||
elif k == 'apkName':
|
||||
logging.error(
|
||||
_('{appid} is missing {name}').format(
|
||||
appid=package['packageName'], name=k
|
||||
)
|
||||
)
|
||||
logging.error(_('{appid} is missing {name}')
|
||||
.format(appid=package['packageName'], name=k))
|
||||
for f in to_fetch:
|
||||
if not os.path.exists(f) or (
|
||||
f.endswith('.apk') and os.path.getsize(f) != package['size']
|
||||
):
|
||||
if not os.path.exists(f) \
|
||||
or (f.endswith('.apk') and os.path.getsize(f) != package['size']):
|
||||
urls.append(_append_to_url_path(section, f))
|
||||
if options.pgp_signatures:
|
||||
urls.append(_append_to_url_path(section, f + '.asc'))
|
||||
if options.build_logs and f.endswith('.apk'):
|
||||
urls.append(
|
||||
_append_to_url_path(section, f[:-4] + '.log.gz')
|
||||
)
|
||||
urls.append(_append_to_url_path(section, f[:-4] + '.log.gz'))
|
||||
|
||||
_run_wget(sectiondir, urls, options.verbose)
|
||||
_run_wget(sectiondir, urls)
|
||||
|
||||
for app in data['apps']:
|
||||
localized = app.get('localized')
|
||||
|
@ -234,29 +181,23 @@ def main():
|
|||
for k in update.GRAPHIC_NAMES:
|
||||
f = d.get(k)
|
||||
if f:
|
||||
filepath_tuple = components + (f,)
|
||||
filepath_tuple = components + (f, )
|
||||
urls.append(_append_to_url_path(*filepath_tuple))
|
||||
_run_wget(os.path.join(basedir, *components), urls, options.verbose)
|
||||
_run_wget(os.path.join(basedir, *components), urls)
|
||||
for k in update.SCREENSHOT_DIRS:
|
||||
urls = []
|
||||
filelist = d.get(k)
|
||||
if filelist:
|
||||
components = (section, app['packageName'], locale, k)
|
||||
for f in filelist:
|
||||
filepath_tuple = components + (f,)
|
||||
filepath_tuple = components + (f, )
|
||||
urls.append(_append_to_url_path(*filepath_tuple))
|
||||
_run_wget(
|
||||
os.path.join(basedir, *components),
|
||||
urls,
|
||||
options.verbose,
|
||||
)
|
||||
_run_wget(os.path.join(basedir, *components), urls)
|
||||
|
||||
urls = dict()
|
||||
for app in data['apps']:
|
||||
if 'icon' not in app:
|
||||
logging.error(
|
||||
_('no "icon" in {appid}').format(appid=app['packageName'])
|
||||
)
|
||||
logging.error(_('no "icon" in {appid}').format(appid=app['packageName']))
|
||||
continue
|
||||
icon = app['icon']
|
||||
for icondir in icondirs:
|
||||
|
@ -266,12 +207,7 @@ def main():
|
|||
urls[icondir].append(url)
|
||||
|
||||
for icondir in icondirs:
|
||||
if icondir in urls:
|
||||
_run_wget(
|
||||
os.path.join(basedir, section, icondir),
|
||||
urls[icondir],
|
||||
options.verbose,
|
||||
)
|
||||
_run_wget(os.path.join(basedir, section, icondir), urls[icondir])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
#
|
||||
# net.py - part of the FDroid server tools
|
||||
# Copyright (C) 2015 Hans-Christoph Steiner <hans@eds.org>
|
||||
# Copyright (C) 2022 FC Stegerman <flx@obfusk.net>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
@ -17,160 +16,38 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import copy
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import tempfile
|
||||
import time
|
||||
import urllib
|
||||
|
||||
import requests
|
||||
import urllib3
|
||||
from requests.adapters import HTTPAdapter, Retry
|
||||
|
||||
from . import _, common
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
HEADERS = {'User-Agent': 'F-Droid'}
|
||||
|
||||
|
||||
def download_file(url, local_filename=None, dldir='tmp', retries=3, backoff_factor=0.1):
|
||||
"""Try hard to download the file, including retrying on failures.
|
||||
|
||||
This has two retry cycles, one inside of the requests session, the
|
||||
other provided by this function. The requests retry logic applies
|
||||
to failed DNS lookups, socket connections and connection timeouts,
|
||||
never to requests where data has made it to the server. This
|
||||
handles ChunkedEncodingError during transfer in its own retry
|
||||
loop. This can result in more retries than are specified in the
|
||||
retries parameter.
|
||||
|
||||
"""
|
||||
filename = urllib.parse.urlparse(url).path.split('/')[-1]
|
||||
def download_file(url, local_filename=None, dldir='tmp'):
|
||||
filename = url.split('/')[-1]
|
||||
if local_filename is None:
|
||||
local_filename = os.path.join(dldir, filename)
|
||||
for i in range(retries + 1):
|
||||
if retries:
|
||||
max_retries = Retry(total=retries - i, backoff_factor=backoff_factor)
|
||||
adapter = HTTPAdapter(max_retries=max_retries)
|
||||
session = requests.Session()
|
||||
session.mount('http://', adapter)
|
||||
session.mount('https://', adapter)
|
||||
else:
|
||||
session = requests
|
||||
# the stream=True parameter keeps memory usage low
|
||||
r = session.get(
|
||||
url, stream=True, allow_redirects=True, headers=HEADERS, timeout=300
|
||||
)
|
||||
r.raise_for_status()
|
||||
try:
|
||||
with open(local_filename, 'wb') as f:
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
if chunk: # filter out keep-alive new chunks
|
||||
f.write(chunk)
|
||||
f.flush()
|
||||
return local_filename
|
||||
except requests.exceptions.ChunkedEncodingError as err:
|
||||
if i == retries:
|
||||
raise err
|
||||
logger.warning('Download interrupted, retrying...')
|
||||
time.sleep(backoff_factor * 2**i)
|
||||
raise ValueError("retries must be >= 0")
|
||||
|
||||
|
||||
def download_using_mirrors(mirrors, local_filename=None):
|
||||
"""Try to download the file from any working mirror.
|
||||
|
||||
Download the file that all URLs in the mirrors list point to,
|
||||
trying all the tricks, starting with the most private methods
|
||||
first. The list of mirrors is converted into a list of mirror
|
||||
configurations to try, in order that the should be attempted.
|
||||
|
||||
This builds mirror_configs_to_try using all possible combos to
|
||||
try. If a mirror is marked with worksWithoutSNI: True, then this
|
||||
logic will try it twice: first without SNI, then again with SNI.
|
||||
|
||||
"""
|
||||
mirrors = common.parse_list_of_dicts(mirrors)
|
||||
mirror_configs_to_try = []
|
||||
for mirror in mirrors:
|
||||
mirror_configs_to_try.append(mirror)
|
||||
if mirror.get('worksWithoutSNI'):
|
||||
m = copy.deepcopy(mirror)
|
||||
del m['worksWithoutSNI']
|
||||
mirror_configs_to_try.append(m)
|
||||
|
||||
if not local_filename:
|
||||
for mirror in mirrors:
|
||||
filename = urllib.parse.urlparse(mirror['url']).path.split('/')[-1]
|
||||
if filename:
|
||||
break
|
||||
if filename:
|
||||
local_filename = os.path.join(common.get_cachedir(), filename)
|
||||
else:
|
||||
local_filename = tempfile.mkstemp(prefix='fdroid-')
|
||||
|
||||
timeouts = (2, 10, 100)
|
||||
last_exception = None
|
||||
for timeout in timeouts:
|
||||
for mirror in mirror_configs_to_try:
|
||||
last_exception = None
|
||||
urllib3.util.ssl_.HAS_SNI = not mirror.get('worksWithoutSNI')
|
||||
try:
|
||||
# the stream=True parameter keeps memory usage low
|
||||
r = requests.get(
|
||||
mirror['url'],
|
||||
stream=True,
|
||||
allow_redirects=False,
|
||||
headers=HEADERS,
|
||||
# add jitter to the timeout to be less predictable
|
||||
timeout=timeout + random.randint(0, timeout), # nosec B311
|
||||
)
|
||||
if r.status_code != 200:
|
||||
raise requests.exceptions.HTTPError(r.status_code, response=r)
|
||||
with open(local_filename, 'wb') as f:
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
if chunk: # filter out keep-alive new chunks
|
||||
f.write(chunk)
|
||||
f.flush()
|
||||
return local_filename
|
||||
except (
|
||||
ConnectionError,
|
||||
requests.exceptions.ChunkedEncodingError,
|
||||
requests.exceptions.ConnectionError,
|
||||
requests.exceptions.ContentDecodingError,
|
||||
requests.exceptions.HTTPError,
|
||||
requests.exceptions.SSLError,
|
||||
requests.exceptions.StreamConsumedError,
|
||||
requests.exceptions.Timeout,
|
||||
requests.exceptions.UnrewindableBodyError,
|
||||
) as e:
|
||||
last_exception = e
|
||||
logger.debug(_('Retrying failed download: %s') % str(e))
|
||||
# if it hasn't succeeded by now, then give up and raise last exception
|
||||
if last_exception:
|
||||
raise last_exception
|
||||
# the stream=True parameter keeps memory usage low
|
||||
r = requests.get(url, stream=True, allow_redirects=True, headers=HEADERS)
|
||||
r.raise_for_status()
|
||||
with open(local_filename, 'wb') as f:
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
if chunk: # filter out keep-alive new chunks
|
||||
f.write(chunk)
|
||||
f.flush()
|
||||
return local_filename
|
||||
|
||||
|
||||
def http_get(url, etag=None, timeout=600):
|
||||
"""Download the content from the given URL by making a GET request.
|
||||
"""
|
||||
Downloads the content from the given URL by making a GET request.
|
||||
|
||||
If an ETag is given, it will do a HEAD request first, to see if the content changed.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
url
|
||||
The URL to download from.
|
||||
etag
|
||||
The last ETag to be used for the request (optional).
|
||||
|
||||
Returns
|
||||
-------
|
||||
A tuple consisting of:
|
||||
- The raw content that was downloaded or None if it did not change
|
||||
- The new eTag as returned by the HTTP request
|
||||
:param url: The URL to download from.
|
||||
:param etag: The last ETag to be used for the request (optional).
|
||||
:return: A tuple consisting of:
|
||||
- The raw content that was downloaded or None if it did not change
|
||||
- The new eTag as returned by the HTTP request
|
||||
"""
|
||||
# TODO disable TLS Session IDs and TLS Session Tickets
|
||||
# (plain text cookie visible to anyone who can see the network traffic)
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Set up an app build for a nightly build repo."""
|
||||
#
|
||||
# nightly.py - part of the FDroid server tools
|
||||
# Copyright (C) 2017 Hans-Christoph Steiner <hans@eds.org>
|
||||
|
@ -19,26 +18,23 @@
|
|||
|
||||
import base64
|
||||
import datetime
|
||||
import git
|
||||
import hashlib
|
||||
import inspect
|
||||
import logging
|
||||
import os
|
||||
import paramiko
|
||||
import platform
|
||||
import shutil
|
||||
import ssl
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from argparse import ArgumentParser
|
||||
from typing import Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import git
|
||||
import paramiko
|
||||
import yaml
|
||||
from urllib.parse import urlparse
|
||||
from argparse import ArgumentParser
|
||||
|
||||
from . import _
|
||||
from . import common
|
||||
|
||||
from . import _, common
|
||||
from .exception import VCSException
|
||||
|
||||
# hard coded defaults for Android ~/.android/debug.keystore files
|
||||
# https://developers.google.com/android/guides/client-auth
|
||||
|
@ -51,121 +47,34 @@ DISTINGUISHED_NAME = 'CN=Android Debug,O=Android,C=US'
|
|||
NIGHTLY = '-nightly'
|
||||
|
||||
|
||||
def _get_keystore_secret_var(keystore: str) -> str:
|
||||
"""Get keystore secret as base64.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
keystore
|
||||
The path of the keystore.
|
||||
|
||||
Returns
|
||||
-------
|
||||
base64_secret
|
||||
The keystore secret as base64 string.
|
||||
"""
|
||||
with open(keystore, 'rb') as fp:
|
||||
return base64.standard_b64encode(fp.read()).decode('ascii')
|
||||
|
||||
|
||||
def _ssh_key_from_debug_keystore(keystore: Optional[str] = None) -> str:
|
||||
"""Convert a debug keystore to an SSH private key.
|
||||
|
||||
This leaves the original keystore file in place.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
keystore
|
||||
The keystore to convert to a SSH private key.
|
||||
|
||||
Returns
|
||||
-------
|
||||
key_path
|
||||
The SSH private key file path in the temporary directory.
|
||||
"""
|
||||
if keystore is None:
|
||||
# set this here so it can be overridden in the tests
|
||||
# TODO convert this to a class to get rid of this nonsense
|
||||
keystore = KEYSTORE_FILE
|
||||
def _ssh_key_from_debug_keystore(keystore=KEYSTORE_FILE):
|
||||
tmp_dir = tempfile.mkdtemp(prefix='.')
|
||||
privkey = os.path.join(tmp_dir, '.privkey')
|
||||
key_pem = os.path.join(tmp_dir, '.key.pem')
|
||||
p12 = os.path.join(tmp_dir, '.keystore.p12')
|
||||
_config = dict()
|
||||
common.fill_config_defaults(_config)
|
||||
subprocess.check_call(
|
||||
[
|
||||
_config['keytool'],
|
||||
'-importkeystore',
|
||||
'-srckeystore',
|
||||
keystore,
|
||||
'-srcalias',
|
||||
KEY_ALIAS,
|
||||
'-srcstorepass',
|
||||
PASSWORD,
|
||||
'-srckeypass',
|
||||
PASSWORD,
|
||||
'-destkeystore',
|
||||
p12,
|
||||
'-destalias',
|
||||
KEY_ALIAS,
|
||||
'-deststorepass',
|
||||
PASSWORD,
|
||||
'-destkeypass',
|
||||
PASSWORD,
|
||||
'-deststoretype',
|
||||
'PKCS12',
|
||||
],
|
||||
env={'LC_ALL': 'C.UTF-8'},
|
||||
)
|
||||
subprocess.check_call(
|
||||
[
|
||||
'openssl',
|
||||
'pkcs12',
|
||||
'-in',
|
||||
p12,
|
||||
'-out',
|
||||
key_pem,
|
||||
'-passin',
|
||||
'pass:' + PASSWORD,
|
||||
'-passout',
|
||||
'pass:' + PASSWORD,
|
||||
],
|
||||
env={'LC_ALL': 'C.UTF-8'},
|
||||
)
|
||||
|
||||
# OpenSSL 3.0 changed the default output format from PKCS#1 to
|
||||
# PKCS#8, which paramiko does not support.
|
||||
# https://www.openssl.org/docs/man3.0/man1/openssl-rsa.html#traditional
|
||||
# https://github.com/paramiko/paramiko/issues/1015
|
||||
openssl_rsa_cmd = ['openssl', 'rsa']
|
||||
if ssl.OPENSSL_VERSION_INFO[0] >= 3:
|
||||
openssl_rsa_cmd += ['-traditional']
|
||||
subprocess.check_call(
|
||||
openssl_rsa_cmd
|
||||
+ [
|
||||
'-in',
|
||||
key_pem,
|
||||
'-out',
|
||||
privkey,
|
||||
'-passin',
|
||||
'pass:' + PASSWORD,
|
||||
],
|
||||
env={'LC_ALL': 'C.UTF-8'},
|
||||
)
|
||||
subprocess.check_call([_config['keytool'], '-importkeystore',
|
||||
'-srckeystore', keystore, '-srcalias', KEY_ALIAS,
|
||||
'-srcstorepass', PASSWORD, '-srckeypass', PASSWORD,
|
||||
'-destkeystore', p12, '-destalias', KEY_ALIAS,
|
||||
'-deststorepass', PASSWORD, '-destkeypass', PASSWORD,
|
||||
'-deststoretype', 'PKCS12'],
|
||||
env={'LC_ALL': 'C.UTF-8'})
|
||||
subprocess.check_call(['openssl', 'pkcs12', '-in', p12, '-out', key_pem,
|
||||
'-passin', 'pass:' + PASSWORD, '-passout', 'pass:' + PASSWORD],
|
||||
env={'LC_ALL': 'C.UTF-8'})
|
||||
subprocess.check_call(['openssl', 'rsa', '-in', key_pem, '-out', privkey,
|
||||
'-passin', 'pass:' + PASSWORD],
|
||||
env={'LC_ALL': 'C.UTF-8'})
|
||||
os.remove(key_pem)
|
||||
os.remove(p12)
|
||||
os.chmod(privkey, 0o600) # os.umask() should cover this, but just in case
|
||||
|
||||
rsakey = paramiko.RSAKey.from_private_key_file(privkey)
|
||||
fingerprint = (
|
||||
base64.b64encode(hashlib.sha256(rsakey.asbytes()).digest())
|
||||
.decode('ascii')
|
||||
.rstrip('=')
|
||||
)
|
||||
ssh_private_key_file = os.path.join(
|
||||
tmp_dir, 'debug_keystore_' + fingerprint.replace('/', '_') + '_id_rsa'
|
||||
)
|
||||
fingerprint = base64.b64encode(hashlib.sha256(rsakey.asbytes()).digest()).decode('ascii').rstrip('=')
|
||||
ssh_private_key_file = os.path.join(tmp_dir, 'debug_keystore_'
|
||||
+ fingerprint.replace('/', '_') + '_id_rsa')
|
||||
shutil.move(privkey, ssh_private_key_file)
|
||||
|
||||
pub = rsakey.get_name() + ' ' + rsakey.get_base64() + ' ' + ssh_private_key_file
|
||||
|
@ -177,127 +86,26 @@ def _ssh_key_from_debug_keystore(keystore: Optional[str] = None) -> str:
|
|||
return ssh_private_key_file
|
||||
|
||||
|
||||
def get_repo_base_url(
|
||||
clone_url: str, repo_git_base: str, force_type: Optional[str] = None
|
||||
) -> str:
|
||||
"""Generate the base URL for the F-Droid repository.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
clone_url
|
||||
The URL to clone the Git repository.
|
||||
repo_git_base
|
||||
The project path of the Git repository at the Git forge.
|
||||
force_type
|
||||
The Git forge of the project.
|
||||
|
||||
Returns
|
||||
-------
|
||||
repo_base_url
|
||||
The base URL of the F-Droid repository.
|
||||
"""
|
||||
if force_type is None:
|
||||
force_type = urlparse(clone_url).netloc
|
||||
if force_type == 'gitlab.com':
|
||||
return clone_url + '/-/raw/master/fdroid'
|
||||
if force_type == 'github.com':
|
||||
return 'https://raw.githubusercontent.com/%s/master/fdroid' % repo_git_base
|
||||
print(_('ERROR: unsupported git host "%s", patches welcome!') % force_type)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def clone_git_repo(clone_url, git_mirror_path):
|
||||
"""Clone a git repo into the given path, failing if a password is required.
|
||||
|
||||
If GitPython's safe mode is present, this will use that. Otherwise,
|
||||
this includes a very limited version of the safe mode just to ensure
|
||||
this won't hang on password prompts.
|
||||
|
||||
https://github.com/gitpython-developers/GitPython/pull/2029
|
||||
|
||||
"""
|
||||
logging.debug(_('cloning {url}').format(url=clone_url))
|
||||
try:
|
||||
sig = inspect.signature(git.Repo.clone_from)
|
||||
if 'safe' in sig.parameters:
|
||||
git.Repo.clone_from(clone_url, git_mirror_path, safe=True)
|
||||
else:
|
||||
git.Repo.clone_from(
|
||||
clone_url,
|
||||
git_mirror_path,
|
||||
env={
|
||||
'GIT_ASKPASS': '/bin/true',
|
||||
'SSH_ASKPASS': '/bin/true',
|
||||
'GIT_USERNAME': 'u',
|
||||
'GIT_PASSWORD': 'p',
|
||||
'GIT_HTTP_USERNAME': 'u',
|
||||
'GIT_HTTP_PASSWORD': 'p',
|
||||
'GIT_SSH': '/bin/false', # for git < 2.3
|
||||
'GIT_TERMINAL_PROMPT': '0',
|
||||
},
|
||||
)
|
||||
except git.exc.GitCommandError as e:
|
||||
logging.warning(_('WARNING: only public git repos are supported!'))
|
||||
raise VCSException(f'git clone {clone_url} failed:', str(e)) from e
|
||||
|
||||
|
||||
def main():
|
||||
"""Deploy to F-Droid repository or generate SSH private key from keystore.
|
||||
|
||||
The behaviour of this function is influenced by the configuration file as
|
||||
well as command line parameters.
|
||||
|
||||
Raises
|
||||
------
|
||||
:exc:`~fdroidserver.exception.VCSException`
|
||||
If the nightly Git repository could not be cloned during an attempt to
|
||||
deploy.
|
||||
"""
|
||||
parser = ArgumentParser()
|
||||
common.setup_global_opts(parser)
|
||||
parser.add_argument(
|
||||
"--keystore",
|
||||
default=KEYSTORE_FILE,
|
||||
help=_("Specify which debug keystore file to use."),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--show-secret-var",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=_("Print the secret variable to the terminal for easy copy/paste"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--keep-private-keys",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=_("Do not remove the private keys generated from the keystore"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-deploy",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=_("Do not deploy the new files to the repo"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--file",
|
||||
default='app/build/outputs/apk/*.apk',
|
||||
help=_('The file to be included in the repo (path or glob)'),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-checksum",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=_("Don't use rsync checksums"),
|
||||
)
|
||||
archive_older_unset = -1
|
||||
parser.add_argument(
|
||||
"--archive-older",
|
||||
type=int,
|
||||
default=archive_older_unset,
|
||||
help=_("Set maximum releases in repo before older ones are archived"),
|
||||
)
|
||||
parser.add_argument("--keystore", default=KEYSTORE_FILE,
|
||||
help=_("Specify which debug keystore file to use."))
|
||||
parser.add_argument("--show-secret-var", action="store_true", default=False,
|
||||
help=_("Print the secret variable to the terminal for easy copy/paste"))
|
||||
parser.add_argument("--keep-private-keys", action="store_true", default=False,
|
||||
help=_("Do not remove the private keys generated from the keystore"))
|
||||
parser.add_argument("--no-deploy", action="store_true", default=False,
|
||||
help=_("Do not deploy the new files to the repo"))
|
||||
parser.add_argument("--file", default='app/build/outputs/apk/*.apk',
|
||||
help=_('The file to be included in the repo (path or glob)'))
|
||||
parser.add_argument("--no-checksum", action="store_true", default=False,
|
||||
help=_("Don't use rsync checksums"))
|
||||
parser.add_argument("--archive-older", type=int, default=20,
|
||||
help=_("Set maximum releases in repo before older ones are archived"))
|
||||
# TODO add --with-btlog
|
||||
options = common.parse_args(parser)
|
||||
options = parser.parse_args()
|
||||
|
||||
# force a tighter umask since this writes private key material
|
||||
umask = os.umask(0o077)
|
||||
|
@ -321,86 +129,56 @@ def main():
|
|||
cibase = os.getcwd()
|
||||
os.makedirs(repodir, exist_ok=True)
|
||||
|
||||
# the 'master' branch is hardcoded in fdroidserver/deploy.py
|
||||
if 'CI_PROJECT_PATH' in os.environ and 'CI_PROJECT_URL' in os.environ:
|
||||
# we are in GitLab CI
|
||||
repo_git_base = os.getenv('CI_PROJECT_PATH') + NIGHTLY
|
||||
clone_url = os.getenv('CI_PROJECT_URL') + NIGHTLY
|
||||
repo_base = get_repo_base_url(
|
||||
clone_url, repo_git_base, force_type='gitlab.com'
|
||||
)
|
||||
repo_base = clone_url + '/raw/master/fdroid'
|
||||
servergitmirror = 'git@' + urlparse(clone_url).netloc + ':' + repo_git_base
|
||||
deploy_key_url = (
|
||||
f'{clone_url}/-/settings/repository#js-deploy-keys-settings'
|
||||
)
|
||||
deploy_key_url = clone_url + '/settings/repository'
|
||||
git_user_name = os.getenv('GITLAB_USER_NAME')
|
||||
git_user_email = os.getenv('GITLAB_USER_EMAIL')
|
||||
elif 'TRAVIS_REPO_SLUG' in os.environ:
|
||||
# we are in Travis CI
|
||||
repo_git_base = os.getenv('TRAVIS_REPO_SLUG') + NIGHTLY
|
||||
clone_url = 'https://github.com/' + repo_git_base
|
||||
repo_base = get_repo_base_url(
|
||||
clone_url, repo_git_base, force_type='github.com'
|
||||
)
|
||||
_branch = os.getenv('TRAVIS_BRANCH')
|
||||
repo_base = 'https://raw.githubusercontent.com/' + repo_git_base + '/' + _branch + '/fdroid'
|
||||
servergitmirror = 'git@github.com:' + repo_git_base
|
||||
deploy_key_url = (
|
||||
f'https://github.com/{repo_git_base}/settings/keys'
|
||||
+ '\nhttps://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys'
|
||||
)
|
||||
deploy_key_url = ('https://github.com/' + repo_git_base + '/settings/keys'
|
||||
+ '\nhttps://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys')
|
||||
git_user_name = repo_git_base
|
||||
git_user_email = os.getenv('USER') + '@' + platform.node()
|
||||
elif (
|
||||
'CIRCLE_REPOSITORY_URL' in os.environ
|
||||
and 'CIRCLE_PROJECT_USERNAME' in os.environ
|
||||
and 'CIRCLE_PROJECT_REPONAME' in os.environ
|
||||
):
|
||||
elif 'CIRCLE_REPOSITORY_URL' in os.environ \
|
||||
and 'CIRCLE_PROJECT_USERNAME' in os.environ \
|
||||
and 'CIRCLE_PROJECT_REPONAME' in os.environ:
|
||||
# we are in Circle CI
|
||||
repo_git_base = (
|
||||
os.getenv('CIRCLE_PROJECT_USERNAME')
|
||||
+ '/'
|
||||
+ os.getenv('CIRCLE_PROJECT_REPONAME')
|
||||
+ NIGHTLY
|
||||
)
|
||||
repo_git_base = (os.getenv('CIRCLE_PROJECT_USERNAME')
|
||||
+ '/' + os.getenv('CIRCLE_PROJECT_REPONAME') + NIGHTLY)
|
||||
clone_url = os.getenv('CIRCLE_REPOSITORY_URL') + NIGHTLY
|
||||
repo_base = get_repo_base_url(
|
||||
clone_url, repo_git_base, force_type='github.com'
|
||||
)
|
||||
repo_base = clone_url + '/raw/master/fdroid'
|
||||
servergitmirror = 'git@' + urlparse(clone_url).netloc + ':' + repo_git_base
|
||||
deploy_key_url = (
|
||||
f'https://github.com/{repo_git_base}/settings/keys'
|
||||
+ '\nhttps://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys'
|
||||
)
|
||||
deploy_key_url = ('https://github.com/' + repo_git_base + '/settings/keys'
|
||||
+ '\nhttps://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys')
|
||||
git_user_name = os.getenv('CIRCLE_USERNAME')
|
||||
git_user_email = git_user_name + '@' + platform.node()
|
||||
elif 'GITHUB_ACTIONS' in os.environ:
|
||||
# we are in Github actions
|
||||
repo_git_base = os.getenv('GITHUB_REPOSITORY') + NIGHTLY
|
||||
clone_url = os.getenv('GITHUB_SERVER_URL') + '/' + repo_git_base
|
||||
repo_base = get_repo_base_url(
|
||||
clone_url, repo_git_base, force_type='github.com'
|
||||
)
|
||||
servergitmirror = 'git@' + urlparse(clone_url).netloc + ':' + repo_git_base
|
||||
deploy_key_url = (
|
||||
f'https://github.com/{repo_git_base}/settings/keys'
|
||||
+ '\nhttps://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys'
|
||||
)
|
||||
git_user_name = os.getenv('GITHUB_ACTOR')
|
||||
git_user_email = git_user_name + '@' + platform.node()
|
||||
else:
|
||||
print(_('ERROR: unsupported CI type, patches welcome!'))
|
||||
sys.exit(1)
|
||||
|
||||
repo_url = repo_base + '/repo'
|
||||
git_mirror_path = os.path.join(repo_basedir, 'git-mirror')
|
||||
git_mirror_fdroiddir = os.path.join(git_mirror_path, 'fdroid')
|
||||
git_mirror_repodir = os.path.join(git_mirror_fdroiddir, 'repo')
|
||||
git_mirror_metadatadir = os.path.join(git_mirror_fdroiddir, 'metadata')
|
||||
git_mirror_repodir = os.path.join(git_mirror_path, 'fdroid', 'repo')
|
||||
git_mirror_metadatadir = os.path.join(git_mirror_path, 'fdroid', 'metadata')
|
||||
git_mirror_statsdir = os.path.join(git_mirror_path, 'fdroid', 'stats')
|
||||
if not os.path.isdir(git_mirror_repodir):
|
||||
clone_git_repo(clone_url, git_mirror_path)
|
||||
logging.debug(_('cloning {url}').format(url=clone_url))
|
||||
try:
|
||||
git.Repo.clone_from(clone_url, git_mirror_path)
|
||||
except Exception:
|
||||
pass
|
||||
if not os.path.isdir(git_mirror_repodir):
|
||||
os.makedirs(git_mirror_repodir, mode=0o755)
|
||||
if os.path.exists('LICENSE'):
|
||||
shutil.copy2('LICENSE', git_mirror_path)
|
||||
|
||||
mirror_git_repo = git.Repo.init(git_mirror_path)
|
||||
writer = mirror_git_repo.config_writer()
|
||||
|
@ -414,31 +192,34 @@ def main():
|
|||
readme = '''
|
||||
# {repo_git_base}
|
||||
|
||||
This is an app repository for nightly versions.
|
||||
You can use it with the [F-Droid](https://f-droid.org/) Android app.
|
||||
[]({repo_url})
|
||||
|
||||
[](https://fdroid.link/#{repo_url})
|
||||
|
||||
Last updated: {date}'''.format(
|
||||
repo_git_base=repo_git_base,
|
||||
repo_url=repo_url,
|
||||
date=datetime.datetime.now(datetime.timezone.utc).strftime(
|
||||
'%Y-%m-%d %H:%M:%S UTC'
|
||||
),
|
||||
)
|
||||
Last updated: {date}'''.format(repo_git_base=repo_git_base,
|
||||
repo_url=repo_url,
|
||||
date=datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S UTC'))
|
||||
with open(readme_path, 'w') as fp:
|
||||
fp.write(readme)
|
||||
mirror_git_repo.git.add(all=True)
|
||||
mirror_git_repo.index.commit("update README")
|
||||
|
||||
icon_path = os.path.join(git_mirror_path, 'icon.png')
|
||||
try:
|
||||
import qrcode
|
||||
qrcode.make(repo_url).save(icon_path)
|
||||
except Exception:
|
||||
exampleicon = os.path.join(common.get_examples_dir(), 'fdroid-icon.png')
|
||||
shutil.copy(exampleicon, icon_path)
|
||||
mirror_git_repo.git.add(all=True)
|
||||
mirror_git_repo.index.commit("update repo/website icon")
|
||||
shutil.copy(icon_path, repo_basedir)
|
||||
|
||||
os.chdir(repo_basedir)
|
||||
if os.path.isdir(git_mirror_repodir):
|
||||
common.local_rsync(options, [git_mirror_repodir + '/'], 'repo/')
|
||||
common.local_rsync(options, git_mirror_repodir + '/', 'repo/')
|
||||
if os.path.isdir(git_mirror_metadatadir):
|
||||
common.local_rsync(options, [git_mirror_metadatadir + '/'], 'metadata/')
|
||||
common.local_rsync(options, git_mirror_metadatadir + '/', 'metadata/')
|
||||
if os.path.isdir(git_mirror_statsdir):
|
||||
common.local_rsync(options, git_mirror_statsdir + '/', 'stats/')
|
||||
|
||||
ssh_private_key_file = _ssh_key_from_debug_keystore()
|
||||
# this is needed for GitPython to find the SSH key
|
||||
|
@ -449,89 +230,57 @@ Last updated: {date}'''.format(
|
|||
with open(ssh_config, 'a') as fp:
|
||||
fp.write('\n\nHost *\n\tIdentityFile %s\n' % ssh_private_key_file)
|
||||
|
||||
if options.archive_older == archive_older_unset:
|
||||
fdroid_size = common.get_dir_size(git_mirror_fdroiddir)
|
||||
max_size = common.GITLAB_COM_PAGES_MAX_SIZE
|
||||
if fdroid_size < max_size:
|
||||
options.archive_older = 20
|
||||
else:
|
||||
options.archive_older = 3
|
||||
print(
|
||||
'WARNING: repo is %s over the GitLab Pages limit (%s)'
|
||||
% (fdroid_size - max_size, max_size)
|
||||
)
|
||||
print('Setting --archive-older to 3')
|
||||
|
||||
config = {
|
||||
'identity_file': ssh_private_key_file,
|
||||
'repo_name': repo_git_base,
|
||||
'repo_url': repo_url,
|
||||
'repo_description': 'Nightly builds from %s' % git_user_email,
|
||||
'archive_name': repo_git_base + ' archive',
|
||||
'archive_url': repo_base + '/archive',
|
||||
'archive_description': 'Old nightly builds that have been archived.',
|
||||
'archive_older': options.archive_older,
|
||||
'servergitmirrors': [{"url": servergitmirror}],
|
||||
'keystore': KEYSTORE_FILE,
|
||||
'repo_keyalias': KEY_ALIAS,
|
||||
'keystorepass': PASSWORD,
|
||||
'keypass': PASSWORD,
|
||||
'keydname': DISTINGUISHED_NAME,
|
||||
'make_current_version_link': False,
|
||||
}
|
||||
with open(common.CONFIG_FILE, 'w', encoding='utf-8') as fp:
|
||||
yaml.dump(config, fp, default_flow_style=False)
|
||||
os.chmod(common.CONFIG_FILE, 0o600)
|
||||
config = common.read_config()
|
||||
config = ''
|
||||
config += "identity_file = '%s'\n" % ssh_private_key_file
|
||||
config += "repo_name = '%s'\n" % repo_git_base
|
||||
config += "repo_url = '%s'\n" % repo_url
|
||||
config += "repo_icon = 'icon.png'\n"
|
||||
config += "repo_description = 'Nightly builds from %s'\n" % git_user_email
|
||||
config += "archive_name = '%s'\n" % (repo_git_base + ' archive')
|
||||
config += "archive_url = '%s'\n" % (repo_base + '/archive')
|
||||
config += "archive_icon = 'icon.png'\n"
|
||||
config += "archive_description = 'Old nightly builds that have been archived.'\n"
|
||||
config += "archive_older = %i\n" % options.archive_older
|
||||
config += "servergitmirrors = '%s'\n" % servergitmirror
|
||||
config += "keystore = '%s'\n" % KEYSTORE_FILE
|
||||
config += "repo_keyalias = '%s'\n" % KEY_ALIAS
|
||||
config += "keystorepass = '%s'\n" % PASSWORD
|
||||
config += "keypass = '%s'\n" % PASSWORD
|
||||
config += "keydname = '%s'\n" % DISTINGUISHED_NAME
|
||||
config += "make_current_version_link = False\n"
|
||||
config += "update_stats = True\n"
|
||||
with open('config.py', 'w') as fp:
|
||||
fp.write(config)
|
||||
os.chmod('config.py', 0o600)
|
||||
config = common.read_config(options)
|
||||
common.assert_config_keystore(config)
|
||||
|
||||
logging.debug(
|
||||
_(
|
||||
'Run over {cibase} to find -debug.apk. and skip repo_basedir {repo_basedir}'
|
||||
).format(cibase=cibase, repo_basedir=repo_basedir)
|
||||
)
|
||||
|
||||
for root, dirs, files in os.walk(cibase):
|
||||
for d in ('.git', '.gradle'):
|
||||
if d in dirs:
|
||||
for d in dirs:
|
||||
if d == '.git' or d == '.gradle' or (d == 'fdroid' and root == cibase):
|
||||
dirs.remove(d)
|
||||
if root == cibase and 'fdroid' in dirs:
|
||||
dirs.remove('fdroid')
|
||||
|
||||
for f in files:
|
||||
if f.endswith('-debug.apk'):
|
||||
apkfilename = os.path.join(root, f)
|
||||
logging.debug(
|
||||
_('Stripping mystery signature from {apkfilename}').format(
|
||||
apkfilename=apkfilename
|
||||
)
|
||||
)
|
||||
logging.debug(_('Stripping mystery signature from {apkfilename}')
|
||||
.format(apkfilename=apkfilename))
|
||||
destapk = os.path.join(repodir, os.path.basename(f))
|
||||
os.chmod(apkfilename, 0o644)
|
||||
logging.debug(
|
||||
_(
|
||||
'Resigning {apkfilename} with provided debug.keystore'
|
||||
).format(apkfilename=os.path.basename(apkfilename))
|
||||
)
|
||||
logging.debug(_('Resigning {apkfilename} with provided debug.keystore')
|
||||
.format(apkfilename=os.path.basename(apkfilename)))
|
||||
common.apk_strip_v1_signatures(apkfilename, strip_manifest=True)
|
||||
common.sign_apk(apkfilename, destapk, KEY_ALIAS)
|
||||
|
||||
if options.verbose:
|
||||
logging.debug(_('attempting bare SSH connection to test deploy key:'))
|
||||
try:
|
||||
subprocess.check_call(
|
||||
[
|
||||
'ssh',
|
||||
'-Tvi',
|
||||
ssh_private_key_file,
|
||||
'-oIdentitiesOnly=yes',
|
||||
'-oStrictHostKeyChecking=no',
|
||||
servergitmirror.split(':')[0],
|
||||
]
|
||||
)
|
||||
subprocess.check_call(['ssh', '-Tvi', ssh_private_key_file,
|
||||
'-oIdentitiesOnly=yes', '-oStrictHostKeyChecking=no',
|
||||
servergitmirror.split(':')[0]])
|
||||
except subprocess.CalledProcessError:
|
||||
pass
|
||||
|
||||
app_url = clone_url[: -len(NIGHTLY)]
|
||||
app_url = clone_url[:-len(NIGHTLY)]
|
||||
template = dict()
|
||||
template['AuthorName'] = clone_url.split('/')[4]
|
||||
template['AuthorWebSite'] = '/'.join(clone_url.split('/')[:4])
|
||||
|
@ -543,13 +292,10 @@ Last updated: {date}'''.format(
|
|||
with open('template.yml', 'w') as fp:
|
||||
yaml.dump(template, fp)
|
||||
|
||||
subprocess.check_call(
|
||||
['fdroid', 'update', '--rename-apks', '--create-metadata', '--verbose'],
|
||||
cwd=repo_basedir,
|
||||
)
|
||||
common.local_rsync(
|
||||
options, [repo_basedir + '/metadata/'], git_mirror_metadatadir + '/'
|
||||
)
|
||||
subprocess.check_call(['fdroid', 'update', '--rename-apks', '--create-metadata', '--verbose'],
|
||||
cwd=repo_basedir)
|
||||
common.local_rsync(options, repo_basedir + '/metadata/', git_mirror_metadatadir + '/')
|
||||
common.local_rsync(options, repo_basedir + '/stats/', git_mirror_statsdir + '/')
|
||||
mirror_git_repo.git.add(all=True)
|
||||
mirror_git_repo.index.commit("update app metadata")
|
||||
|
||||
|
@ -558,11 +304,8 @@ Last updated: {date}'''.format(
|
|||
cmd = ['fdroid', 'deploy', '--verbose', '--no-keep-git-mirror-archive']
|
||||
subprocess.check_call(cmd, cwd=repo_basedir)
|
||||
except subprocess.CalledProcessError:
|
||||
logging.error(
|
||||
_('cannot publish update, did you set the deploy key?')
|
||||
+ '\n'
|
||||
+ deploy_key_url
|
||||
)
|
||||
logging.error(_('cannot publish update, did you set the deploy key?')
|
||||
+ '\n' + deploy_key_url)
|
||||
sys.exit(1)
|
||||
|
||||
if not options.keep_private_keys:
|
||||
|
@ -576,33 +319,25 @@ Last updated: {date}'''.format(
|
|||
if not os.path.exists(androiddir):
|
||||
os.mkdir(androiddir)
|
||||
logging.info(_('created {path}').format(path=androiddir))
|
||||
logging.error(
|
||||
_('{path} does not exist! Create it by running:').format(
|
||||
path=options.keystore
|
||||
)
|
||||
+ '\n keytool -genkey -v -keystore '
|
||||
+ options.keystore
|
||||
+ ' -storepass android \\'
|
||||
+ '\n -alias androiddebugkey -keypass android -keyalg RSA -keysize 2048 -validity 10000 \\'
|
||||
+ '\n -dname "CN=Android Debug,O=Android,C=US"'
|
||||
)
|
||||
logging.error(_('{path} does not exist! Create it by running:').format(path=options.keystore)
|
||||
+ '\n keytool -genkey -v -keystore ' + options.keystore + ' -storepass android \\'
|
||||
+ '\n -alias androiddebugkey -keypass android -keyalg RSA -keysize 2048 -validity 10000 \\'
|
||||
+ '\n -dname "CN=Android Debug,O=Android,C=US"')
|
||||
sys.exit(1)
|
||||
ssh_dir = os.path.join(os.getenv('HOME'), '.ssh')
|
||||
os.makedirs(os.path.dirname(ssh_dir), exist_ok=True)
|
||||
privkey = _ssh_key_from_debug_keystore(options.keystore)
|
||||
if os.path.exists(ssh_dir):
|
||||
ssh_private_key_file = os.path.join(ssh_dir, os.path.basename(privkey))
|
||||
shutil.move(privkey, ssh_private_key_file)
|
||||
shutil.move(privkey + '.pub', ssh_private_key_file + '.pub')
|
||||
ssh_private_key_file = os.path.join(ssh_dir, os.path.basename(privkey))
|
||||
shutil.move(privkey, ssh_private_key_file)
|
||||
shutil.move(privkey + '.pub', ssh_private_key_file + '.pub')
|
||||
if shutil.rmtree.avoids_symlink_attacks:
|
||||
shutil.rmtree(os.path.dirname(privkey))
|
||||
|
||||
if options.show_secret_var:
|
||||
debug_keystore = _get_keystore_secret_var(options.keystore)
|
||||
print(
|
||||
_('\n{path} encoded for the DEBUG_KEYSTORE secret variable:').format(
|
||||
path=options.keystore
|
||||
)
|
||||
)
|
||||
with open(options.keystore, 'rb') as fp:
|
||||
debug_keystore = base64.standard_b64encode(fp.read()).decode('ascii')
|
||||
print(_('\n{path} encoded for the DEBUG_KEYSTORE secret variable:')
|
||||
.format(path=options.keystore))
|
||||
print(debug_keystore)
|
||||
|
||||
os.umask(umask)
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
# publish.py - part of the FDroid server tools
|
||||
# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com
|
||||
# Copyright (C) 2013-2014 Daniel Martí <mvdan@mvdan.cc>
|
||||
# Copyright (C) 2021 Felix C. Stegerman <flx@obfusk.net>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
@ -18,40 +17,34 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Sign APKs using keys or via reproducible builds signature copying.
|
||||
|
||||
This command takes unsigned APKs and signs them. It looks for
|
||||
unsigned APKs in the unsigned/ directory and puts successfully signed
|
||||
APKs into the repo/ directory. The default is to run in a kind of
|
||||
batch mode, where it will only quit on certain kinds of errors. It
|
||||
mostly reports success by moving an APK from unsigned/ to repo/
|
||||
|
||||
"""
|
||||
|
||||
import glob
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import time
|
||||
import zipfile
|
||||
import glob
|
||||
import hashlib
|
||||
from argparse import ArgumentParser
|
||||
from collections import OrderedDict
|
||||
import logging
|
||||
from gettext import ngettext
|
||||
import json
|
||||
import time
|
||||
import zipfile
|
||||
|
||||
from . import _, common, metadata
|
||||
from . import _
|
||||
from . import common
|
||||
from . import metadata
|
||||
from .common import FDroidPopen
|
||||
from .exception import BuildException, FDroidException
|
||||
|
||||
config = None
|
||||
options = None
|
||||
start_timestamp = time.gmtime()
|
||||
|
||||
|
||||
def publish_source_tarball(apkfilename, unsigned_dir, output_dir):
|
||||
"""Move the source tarball into the output directory..."""
|
||||
|
||||
tarfilename = apkfilename[:-4] + '_src.tar.gz'
|
||||
tarfile = os.path.join(unsigned_dir, tarfilename)
|
||||
if os.path.exists(tarfile):
|
||||
|
@ -62,9 +55,7 @@ def publish_source_tarball(apkfilename, unsigned_dir, output_dir):
|
|||
|
||||
|
||||
def key_alias(appid):
|
||||
"""No summary.
|
||||
|
||||
Get the alias which F-Droid uses to indentify the singing key
|
||||
"""Get the alias which F-Droid uses to indentify the singing key
|
||||
for this App in F-Droids keystore.
|
||||
"""
|
||||
if config and 'keyaliases' in config and appid in config['keyaliases']:
|
||||
|
@ -82,27 +73,24 @@ def key_alias(appid):
|
|||
|
||||
|
||||
def read_fingerprints_from_keystore():
|
||||
"""Obtain a dictionary containing all singning-key fingerprints which are managed by F-Droid, grouped by appid."""
|
||||
env_vars = {'LC_ALL': 'C.UTF-8', 'FDROID_KEY_STORE_PASS': config['keystorepass']}
|
||||
cmd = [
|
||||
config['keytool'],
|
||||
'-list',
|
||||
'-v',
|
||||
'-keystore',
|
||||
config['keystore'],
|
||||
'-storepass:env',
|
||||
'FDROID_KEY_STORE_PASS',
|
||||
]
|
||||
"""Obtain a dictionary containing all singning-key fingerprints which
|
||||
are managed by F-Droid, grouped by appid.
|
||||
"""
|
||||
env_vars = {'LC_ALL': 'C.UTF-8',
|
||||
'FDROID_KEY_STORE_PASS': config['keystorepass']}
|
||||
cmd = [config['keytool'], '-list',
|
||||
'-v', '-keystore', config['keystore'],
|
||||
'-storepass:env', 'FDROID_KEY_STORE_PASS']
|
||||
if config['keystore'] == 'NONE':
|
||||
cmd += config['smartcardoptions']
|
||||
p = FDroidPopen(cmd, envs=env_vars, output=False)
|
||||
if p.returncode != 0:
|
||||
raise FDroidException('could not read keystore {}'.format(config['keystore']))
|
||||
|
||||
realias = re.compile('Alias name: (?P<alias>.+)' + os.linesep)
|
||||
resha256 = re.compile(r'\s+SHA256: (?P<sha256>[:0-9A-F]{95})' + os.linesep)
|
||||
realias = re.compile('Alias name: (?P<alias>.+)\n')
|
||||
resha256 = re.compile(r'\s+SHA256: (?P<sha256>[:0-9A-F]{95})\n')
|
||||
fps = {}
|
||||
for block in p.output.split(('*' * 43) + os.linesep + '*' * 43):
|
||||
for block in p.output.split(('*' * 43) + '\n' + '*' * 43):
|
||||
s_alias = realias.search(block)
|
||||
s_sha256 = resha256.search(block)
|
||||
if s_alias and s_sha256:
|
||||
|
@ -112,9 +100,8 @@ def read_fingerprints_from_keystore():
|
|||
|
||||
|
||||
def sign_sig_key_fingerprint_list(jar_file):
|
||||
"""Sign the list of app-signing key fingerprints.
|
||||
|
||||
This is used primaryily by fdroid update to determine which APKs
|
||||
"""sign the list of app-signing key fingerprints which is
|
||||
used primaryily by fdroid update to determine which APKs
|
||||
where built and signed by F-Droid and which ones were
|
||||
manually added by users.
|
||||
"""
|
||||
|
@ -128,22 +115,19 @@ def sign_sig_key_fingerprint_list(jar_file):
|
|||
cmd += config['smartcardoptions']
|
||||
else: # smardcards never use -keypass
|
||||
cmd += '-keypass:env', 'FDROID_KEY_PASS'
|
||||
env_vars = {
|
||||
'FDROID_KEY_STORE_PASS': config['keystorepass'],
|
||||
'FDROID_KEY_PASS': config.get('keypass', ""),
|
||||
}
|
||||
env_vars = {'FDROID_KEY_STORE_PASS': config['keystorepass'],
|
||||
'FDROID_KEY_PASS': config.get('keypass', "")}
|
||||
p = common.FDroidPopen(cmd, envs=env_vars)
|
||||
if p.returncode != 0:
|
||||
raise FDroidException("Failed to sign '{}'!".format(jar_file))
|
||||
|
||||
|
||||
def store_publish_signer_fingerprints(appids, indent=None):
|
||||
def store_stats_fdroid_signing_key_fingerprints(appids, indent=None):
|
||||
"""Store list of all signing-key fingerprints for given appids to HD.
|
||||
|
||||
This list will later on be needed by fdroid update.
|
||||
"""
|
||||
if not os.path.exists('repo'):
|
||||
os.makedirs('repo')
|
||||
if not os.path.exists('stats'):
|
||||
os.makedirs('stats')
|
||||
data = OrderedDict()
|
||||
fps = read_fingerprints_from_keystore()
|
||||
for appid in sorted(appids):
|
||||
|
@ -151,22 +135,17 @@ def store_publish_signer_fingerprints(appids, indent=None):
|
|||
if alias in fps:
|
||||
data[appid] = {'signer': fps[key_alias(appid)]}
|
||||
|
||||
jar_file = os.path.join('repo', 'signer-index.jar')
|
||||
output = json.dumps(data, indent=indent)
|
||||
jar_file = os.path.join('stats', 'publishsigkeys.jar')
|
||||
with zipfile.ZipFile(jar_file, 'w', zipfile.ZIP_DEFLATED) as jar:
|
||||
jar.writestr('signer-index.json', output)
|
||||
with open(os.path.join('repo', 'signer-index.json'), 'w') as fp:
|
||||
fp.write(output)
|
||||
jar.writestr('publishsigkeys.json', json.dumps(data, indent=indent))
|
||||
sign_sig_key_fingerprint_list(jar_file)
|
||||
|
||||
|
||||
def status_update_json(generatedKeys, signedApks):
|
||||
"""Output a JSON file with metadata about this run."""
|
||||
"""Output a JSON file with metadata about this run"""
|
||||
|
||||
logging.debug(_('Outputting JSON'))
|
||||
output = common.setup_status_output(start_timestamp)
|
||||
output['apksigner'] = shutil.which(config.get('apksigner', ''))
|
||||
output['jarsigner'] = shutil.which(config.get('jarsigner', ''))
|
||||
output['keytool'] = shutil.which(config.get('keytool', ''))
|
||||
if generatedKeys:
|
||||
output['generatedKeys'] = generatedKeys
|
||||
if signedApks:
|
||||
|
@ -175,8 +154,8 @@ def status_update_json(generatedKeys, signedApks):
|
|||
|
||||
|
||||
def check_for_key_collisions(allapps):
|
||||
"""Make sure there's no collision in keyaliases from apps.
|
||||
|
||||
"""
|
||||
Make sure there's no collision in keyaliases from apps.
|
||||
It was suggested at
|
||||
https://dev.guardianproject.info/projects/bazaar/wiki/FDroid_Audit
|
||||
that a package could be crafted, such that it would use the same signing
|
||||
|
@ -185,16 +164,9 @@ def check_for_key_collisions(allapps):
|
|||
the colliding ID would be something that would be a) a valid package ID,
|
||||
and b) a sane-looking ID that would make its way into the repo.
|
||||
Nonetheless, to be sure, before publishing we check that there are no
|
||||
collisions, and refuse to do any publishing if that's the case.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
allapps
|
||||
a dict of all apps to process
|
||||
|
||||
Returns
|
||||
-------
|
||||
a list of all aliases corresponding to allapps
|
||||
collisions, and refuse to do any publishing if that's the case...
|
||||
:param allapps a dict of all apps to process
|
||||
:return: a list of all aliases corresponding to allapps
|
||||
"""
|
||||
allaliases = []
|
||||
for appid in allapps:
|
||||
|
@ -209,53 +181,30 @@ def check_for_key_collisions(allapps):
|
|||
|
||||
|
||||
def create_key_if_not_existing(keyalias):
|
||||
"""Ensure a signing key with the given keyalias exists.
|
||||
|
||||
Returns
|
||||
-------
|
||||
boolean
|
||||
True if a new key was created, False otherwise
|
||||
"""
|
||||
Ensures a signing key with the given keyalias exists
|
||||
:return: boolean, True if a new key was created, false otherwise
|
||||
"""
|
||||
# See if we already have a key for this application, and
|
||||
# if not generate one...
|
||||
env_vars = {
|
||||
'LC_ALL': 'C.UTF-8',
|
||||
'FDROID_KEY_STORE_PASS': config['keystorepass'],
|
||||
'FDROID_KEY_PASS': config.get('keypass', ""),
|
||||
}
|
||||
cmd = [
|
||||
config['keytool'],
|
||||
'-list',
|
||||
'-alias',
|
||||
keyalias,
|
||||
'-keystore',
|
||||
config['keystore'],
|
||||
'-storepass:env',
|
||||
'FDROID_KEY_STORE_PASS',
|
||||
]
|
||||
env_vars = {'LC_ALL': 'C.UTF-8',
|
||||
'FDROID_KEY_STORE_PASS': config['keystorepass'],
|
||||
'FDROID_KEY_PASS': config.get('keypass', "")}
|
||||
cmd = [config['keytool'], '-list',
|
||||
'-alias', keyalias, '-keystore', config['keystore'],
|
||||
'-storepass:env', 'FDROID_KEY_STORE_PASS']
|
||||
if config['keystore'] == 'NONE':
|
||||
cmd += config['smartcardoptions']
|
||||
p = FDroidPopen(cmd, envs=env_vars)
|
||||
if p.returncode != 0:
|
||||
logging.info("Key does not exist - generating...")
|
||||
cmd = [
|
||||
config['keytool'],
|
||||
'-genkey',
|
||||
'-keystore',
|
||||
config['keystore'],
|
||||
'-alias',
|
||||
keyalias,
|
||||
'-keyalg',
|
||||
'RSA',
|
||||
'-keysize',
|
||||
'2048',
|
||||
'-validity',
|
||||
'10000',
|
||||
'-storepass:env',
|
||||
'FDROID_KEY_STORE_PASS',
|
||||
'-dname',
|
||||
config['keydname'],
|
||||
]
|
||||
cmd = [config['keytool'], '-genkey',
|
||||
'-keystore', config['keystore'],
|
||||
'-alias', keyalias,
|
||||
'-keyalg', 'RSA', '-keysize', '2048',
|
||||
'-validity', '10000',
|
||||
'-storepass:env', 'FDROID_KEY_STORE_PASS',
|
||||
'-dname', config['keydname']]
|
||||
if config['keystore'] == 'NONE':
|
||||
cmd += config['smartcardoptions']
|
||||
else:
|
||||
|
@ -269,35 +218,22 @@ def create_key_if_not_existing(keyalias):
|
|||
|
||||
|
||||
def main():
|
||||
global config
|
||||
global config, options
|
||||
|
||||
# Parse command line...
|
||||
parser = ArgumentParser(
|
||||
usage="%(prog)s [options] " "[APPID[:VERCODE] [APPID[:VERCODE] ...]]"
|
||||
)
|
||||
parser = ArgumentParser(usage="%(prog)s [options] "
|
||||
"[APPID[:VERCODE] [APPID[:VERCODE] ...]]")
|
||||
common.setup_global_opts(parser)
|
||||
parser.add_argument(
|
||||
"-e",
|
||||
"--error-on-failed",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=_("When signing or verifying fails, exit with an error code."),
|
||||
)
|
||||
parser.add_argument(
|
||||
"appid",
|
||||
nargs='*',
|
||||
help=_("application ID with optional versionCode in the form APPID[:VERCODE]"),
|
||||
)
|
||||
parser.add_argument("appid", nargs='*',
|
||||
help=_("application ID with optional versionCode in the form APPID[:VERCODE]"))
|
||||
metadata.add_metadata_arguments(parser)
|
||||
options = common.parse_args(parser)
|
||||
options = parser.parse_args()
|
||||
metadata.warnings_action = options.W
|
||||
|
||||
config = common.read_config()
|
||||
config = common.read_config(options)
|
||||
|
||||
if not ('jarsigner' in config and 'keytool' in config):
|
||||
logging.critical(
|
||||
_('Java JDK not found! Install in standard location or set java_paths!')
|
||||
)
|
||||
logging.critical(_('Java JDK not found! Install in standard location or set java_paths!'))
|
||||
sys.exit(1)
|
||||
|
||||
common.assert_config_keystore(config)
|
||||
|
@ -329,22 +265,16 @@ def main():
|
|||
|
||||
allapps = metadata.read_metadata()
|
||||
vercodes = common.read_pkg_args(options.appid, True)
|
||||
common.get_metadata_files(vercodes) # only check appids
|
||||
signed_apks = dict()
|
||||
generated_keys = dict()
|
||||
allaliases = check_for_key_collisions(allapps)
|
||||
logging.info(
|
||||
ngettext(
|
||||
'{0} app, {1} key aliases', '{0} apps, {1} key aliases', len(allapps)
|
||||
).format(len(allapps), len(allaliases))
|
||||
)
|
||||
logging.info(ngettext('{0} app, {1} key aliases',
|
||||
'{0} apps, {1} key aliases', len(allapps)).format(len(allapps), len(allaliases)))
|
||||
|
||||
failed = 0
|
||||
# Process any APKs or ZIPs that are waiting to be signed...
|
||||
for apkfile in sorted(
|
||||
glob.glob(os.path.join(unsigned_dir, '*.apk'))
|
||||
+ glob.glob(os.path.join(unsigned_dir, '*.zip'))
|
||||
):
|
||||
for apkfile in sorted(glob.glob(os.path.join(unsigned_dir, '*.apk'))
|
||||
+ glob.glob(os.path.join(unsigned_dir, '*.zip'))):
|
||||
|
||||
appid, vercode = common.publishednameinfo(apkfile)
|
||||
apkfilename = os.path.basename(apkfile)
|
||||
if vercodes and appid not in vercodes:
|
||||
|
@ -357,17 +287,13 @@ def main():
|
|||
# There ought to be valid metadata for this app, otherwise why are we
|
||||
# trying to publish it?
|
||||
if appid not in allapps:
|
||||
logging.error(
|
||||
"Unexpected {0} found in unsigned directory".format(apkfilename)
|
||||
)
|
||||
logging.error("Unexpected {0} found in unsigned directory"
|
||||
.format(apkfilename))
|
||||
sys.exit(1)
|
||||
app = allapps[appid]
|
||||
|
||||
build = None
|
||||
for b in app.get("Builds", ()):
|
||||
if b.get("versionCode") == vercode:
|
||||
build = b
|
||||
if app.Binaries or (build and build.binary):
|
||||
if app.Binaries:
|
||||
|
||||
# It's an app where we build from source, and verify the apk
|
||||
# contents against a developer's binary, and then publish their
|
||||
# version if everything checks out.
|
||||
|
@ -378,22 +304,14 @@ def main():
|
|||
srcapk = srcapk.replace(unsigned_dir, binaries_dir)
|
||||
|
||||
if not os.path.isfile(srcapk):
|
||||
logging.error(
|
||||
"...reference binary missing - publish skipped: '{refpath}'".format(
|
||||
refpath=srcapk
|
||||
)
|
||||
)
|
||||
failed += 1
|
||||
logging.error("...reference binary missing - publish skipped: "
|
||||
"'{refpath}'".format(refpath=srcapk))
|
||||
else:
|
||||
# Compare our unsigned one with the downloaded one...
|
||||
compare_result = common.verify_apks(srcapk, apkfile, tmp_dir)
|
||||
if compare_result:
|
||||
logging.error(
|
||||
"...verification failed - publish skipped : {result}".format(
|
||||
result=compare_result
|
||||
)
|
||||
)
|
||||
failed += 1
|
||||
logging.error("...verification failed - publish skipped : "
|
||||
"{result}".format(result=compare_result))
|
||||
else:
|
||||
# Success! So move the downloaded file to the repo, and remove
|
||||
# our built version.
|
||||
|
@ -404,6 +322,7 @@ def main():
|
|||
logging.info('Published ' + apkfilename)
|
||||
|
||||
elif apkfile.endswith('.zip'):
|
||||
|
||||
# OTA ZIPs built by fdroid do not need to be signed by jarsigner,
|
||||
# just to be moved into place in the repo
|
||||
shutil.move(apkfile, os.path.join(output_dir, apkfilename))
|
||||
|
@ -411,6 +330,7 @@ def main():
|
|||
logging.info('Published ' + apkfilename)
|
||||
|
||||
else:
|
||||
|
||||
# It's a 'normal' app, i.e. we sign and publish it...
|
||||
skipsigning = False
|
||||
|
||||
|
@ -421,23 +341,22 @@ def main():
|
|||
# metadata. This means we're going to prepare both a locally
|
||||
# signed APK and a version signed with the developers key.
|
||||
|
||||
signature_file, _ignored, manifest, v2_files = signingfiles
|
||||
signaturefile, signedfile, manifest = signingfiles
|
||||
|
||||
with open(signature_file, 'rb') as f:
|
||||
devfp = common.signer_fingerprint_short(
|
||||
common.get_certificate(f.read())
|
||||
)
|
||||
with open(signaturefile, 'rb') as f:
|
||||
devfp = common.signer_fingerprint_short(common.get_certificate(f.read()))
|
||||
devsigned = '{}_{}_{}.apk'.format(appid, vercode, devfp)
|
||||
devsignedtmp = os.path.join(tmp_dir, devsigned)
|
||||
shutil.copy(apkfile, devsignedtmp)
|
||||
|
||||
common.apk_implant_signatures(apkfile, devsignedtmp, manifest=manifest)
|
||||
common.apk_implant_signatures(devsignedtmp, signaturefile,
|
||||
signedfile, manifest)
|
||||
if common.verify_apk_signature(devsignedtmp):
|
||||
shutil.move(devsignedtmp, os.path.join(output_dir, devsigned))
|
||||
else:
|
||||
os.remove(devsignedtmp)
|
||||
logging.error('...verification failed - skipping: %s', devsigned)
|
||||
skipsigning = True
|
||||
failed += 1
|
||||
|
||||
# Now we sign with the F-Droid key.
|
||||
if not skipsigning:
|
||||
|
@ -449,30 +368,25 @@ def main():
|
|||
|
||||
signed_apk_path = os.path.join(output_dir, apkfilename)
|
||||
if os.path.exists(signed_apk_path):
|
||||
raise BuildException(
|
||||
_(
|
||||
"Refusing to sign '{path}', file exists in both {dir1} and {dir2} folder."
|
||||
).format(path=apkfilename, dir1=unsigned_dir, dir2=output_dir)
|
||||
)
|
||||
raise BuildException("Refusing to sign '{0}' file exists in both "
|
||||
"{1} and {2} folder.".format(apkfilename,
|
||||
unsigned_dir,
|
||||
output_dir))
|
||||
|
||||
# Sign the application...
|
||||
# Sign and zipalign the application...
|
||||
common.sign_apk(apkfile, signed_apk_path, keyalias)
|
||||
if appid not in signed_apks:
|
||||
signed_apks[appid] = []
|
||||
signed_apks[appid].append({"keyalias": keyalias, "filename": apkfile})
|
||||
signed_apks[appid].append({"keyalias": keyalias,
|
||||
"filename": apkfile})
|
||||
|
||||
publish_source_tarball(apkfilename, unsigned_dir, output_dir)
|
||||
logging.info('Published ' + apkfilename)
|
||||
|
||||
store_publish_signer_fingerprints(allapps.keys())
|
||||
store_stats_fdroid_signing_key_fingerprints(allapps.keys())
|
||||
status_update_json(generated_keys, signed_apks)
|
||||
logging.info('published list signing-key fingerprints')
|
||||
|
||||
if failed:
|
||||
logging.error(_('%d APKs failed to be signed or verified!') % failed)
|
||||
if options.error_on_failed:
|
||||
sys.exit(failed)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -17,17 +17,20 @@
|
|||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from argparse import ArgumentParser
|
||||
from . import common
|
||||
from . import metadata
|
||||
|
||||
from . import common, metadata
|
||||
options = None
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
parser = ArgumentParser()
|
||||
common.setup_global_opts(parser)
|
||||
metadata.add_metadata_arguments(parser)
|
||||
options = parser.parse_args()
|
||||
metadata.warnings_action = options.W
|
||||
common.read_config()
|
||||
common.read_config(None)
|
||||
|
||||
metadata.read_metadata()
|
||||
|
||||
|
|
|
@ -17,73 +17,56 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import io
|
||||
import logging
|
||||
import shutil
|
||||
import tempfile
|
||||
from argparse import ArgumentParser
|
||||
from pathlib import Path
|
||||
import os
|
||||
import logging
|
||||
import io
|
||||
import tempfile
|
||||
import shutil
|
||||
|
||||
from . import _, common, metadata
|
||||
from . import _
|
||||
from . import common
|
||||
from . import metadata
|
||||
|
||||
config = None
|
||||
options = None
|
||||
|
||||
|
||||
def proper_format(app):
|
||||
s = io.StringIO()
|
||||
# TODO: currently reading entire file again, should reuse first
|
||||
# read in metadata.py
|
||||
cur_content = Path(app.metadatapath).read_text(encoding='utf-8')
|
||||
if Path(app.metadatapath).suffix == '.yml':
|
||||
with open(app.metadatapath, 'r') as f:
|
||||
cur_content = f.read()
|
||||
if app.metadatapath.endswith('.yml'):
|
||||
metadata.write_yaml(s, app)
|
||||
content = s.getvalue()
|
||||
s.close()
|
||||
return content == cur_content
|
||||
|
||||
|
||||
def remove_blank_flags_from_builds(builds):
|
||||
"""Remove unset entries from Builds so they are not written out."""
|
||||
if not builds:
|
||||
return list()
|
||||
newbuilds = list()
|
||||
for build in builds:
|
||||
new = dict()
|
||||
for k in metadata.build_flags:
|
||||
v = build.get(k)
|
||||
# 0 is valid value, it should not be stripped
|
||||
if v is None or v is False or v == '' or v == dict() or v == list():
|
||||
continue
|
||||
new[k] = v
|
||||
newbuilds.append(new)
|
||||
return newbuilds
|
||||
|
||||
|
||||
def main():
|
||||
global config
|
||||
|
||||
global config, options
|
||||
|
||||
parser = ArgumentParser()
|
||||
common.setup_global_opts(parser)
|
||||
parser.add_argument(
|
||||
"-l",
|
||||
"--list",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=_("List files that would be reformatted (dry run)"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"appid", nargs='*', help=_("application ID of file to operate on")
|
||||
)
|
||||
parser.add_argument("-l", "--list", action="store_true", default=False,
|
||||
help=_("List files that would be reformatted"))
|
||||
parser.add_argument("appid", nargs='*', help=_("application ID of file to operate on"))
|
||||
metadata.add_metadata_arguments(parser)
|
||||
options = common.parse_args(parser)
|
||||
options = parser.parse_args()
|
||||
metadata.warnings_action = options.W
|
||||
|
||||
config = common.read_config()
|
||||
config = common.read_config(options)
|
||||
|
||||
apps = common.read_app_args(options.appid)
|
||||
# Get all apps...
|
||||
allapps = metadata.read_metadata(options.appid)
|
||||
apps = common.read_app_args(options.appid, allapps, False)
|
||||
|
||||
for appid, app in apps.items():
|
||||
path = Path(app.metadatapath)
|
||||
if path.suffix == '.yml':
|
||||
path = app.metadatapath
|
||||
if path.endswith('.yml'):
|
||||
logging.info(_("Rewriting '{appid}'").format(appid=appid))
|
||||
else:
|
||||
logging.warning(_('Cannot rewrite "{path}"').format(path=path))
|
||||
|
@ -94,15 +77,21 @@ def main():
|
|||
print(path)
|
||||
continue
|
||||
|
||||
# TODO these should be moved to metadata.write_yaml()
|
||||
builds = remove_blank_flags_from_builds(app.get('Builds'))
|
||||
if builds:
|
||||
app['Builds'] = builds
|
||||
newbuilds = []
|
||||
for build in app.builds:
|
||||
new = metadata.Build()
|
||||
for k in metadata.build_flags:
|
||||
v = build[k]
|
||||
if v is None or v is False or v == [] or v == '':
|
||||
continue
|
||||
new[k] = v
|
||||
newbuilds.append(new)
|
||||
app.builds = newbuilds
|
||||
|
||||
# rewrite to temporary file before overwriting existing
|
||||
# rewrite to temporary file before overwriting existsing
|
||||
# file in case there's a bug in write_metadata
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
tmp_path = Path(tmpdir) / path.name
|
||||
tmp_path = os.path.join(tmpdir, os.path.basename(path))
|
||||
metadata.write_metadata(tmp_path, app)
|
||||
shutil.move(tmp_path, path)
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -15,17 +15,21 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from argparse import ArgumentParser
|
||||
|
||||
from . import _, common
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
|
||||
from . import _
|
||||
from . import common
|
||||
from . import net
|
||||
from .exception import FDroidException
|
||||
|
||||
|
||||
def extract_signature(apkpath):
|
||||
|
||||
if not os.path.exists(apkpath):
|
||||
raise FDroidException("file APK does not exists '{}'".format(apkpath))
|
||||
if not common.verify_apk_signature(apkpath):
|
||||
|
@ -42,6 +46,7 @@ def extract_signature(apkpath):
|
|||
|
||||
|
||||
def extract(options):
|
||||
|
||||
# Create tmp dir if missing…
|
||||
tmp_dir = 'tmp'
|
||||
if not os.path.exists(tmp_dir):
|
||||
|
@ -57,40 +62,26 @@ def extract(options):
|
|||
try:
|
||||
if os.path.isfile(apk):
|
||||
sigdir = extract_signature(apk)
|
||||
logging.info(
|
||||
_("Fetched signatures for '{apkfilename}' -> '{sigdir}'").format(
|
||||
apkfilename=apk, sigdir=sigdir
|
||||
)
|
||||
)
|
||||
logging.info(_("Fetched signatures for '{apkfilename}' -> '{sigdir}'")
|
||||
.format(apkfilename=apk, sigdir=sigdir))
|
||||
elif httpre.match(apk):
|
||||
if apk.startswith('https') or options.no_check_https:
|
||||
try:
|
||||
from . import net
|
||||
|
||||
tmp_apk = os.path.join(tmp_dir, 'signed.apk')
|
||||
net.download_file(apk, tmp_apk)
|
||||
sigdir = extract_signature(tmp_apk)
|
||||
logging.info(
|
||||
_(
|
||||
"Fetched signatures for '{apkfilename}' -> '{sigdir}'"
|
||||
).format(apkfilename=apk, sigdir=sigdir)
|
||||
)
|
||||
logging.info(_("Fetched signatures for '{apkfilename}' -> '{sigdir}'")
|
||||
.format(apkfilename=apk, sigdir=sigdir))
|
||||
finally:
|
||||
if tmp_apk and os.path.exists(tmp_apk):
|
||||
os.remove(tmp_apk)
|
||||
else:
|
||||
logging.warning(
|
||||
_(
|
||||
'refuse downloading via insecure HTTP connection '
|
||||
'(use HTTPS or specify --no-https-check): {apkfilename}'
|
||||
).format(apkfilename=apk)
|
||||
)
|
||||
logging.warning(_('refuse downloading via insecure HTTP connection '
|
||||
'(use HTTPS or specify --no-https-check): {apkfilename}')
|
||||
.format(apkfilename=apk))
|
||||
except FDroidException as e:
|
||||
logging.warning(
|
||||
_("Failed fetching signatures for '{apkfilename}': {error}").format(
|
||||
apkfilename=apk, error=e
|
||||
)
|
||||
)
|
||||
logging.warning(_("Failed fetching signatures for '{apkfilename}': {error}")
|
||||
.format(apkfilename=apk, error=e))
|
||||
if e.detail:
|
||||
logging.debug(e.detail)
|
||||
|
||||
|
@ -98,12 +89,12 @@ def extract(options):
|
|||
def main():
|
||||
parser = ArgumentParser()
|
||||
common.setup_global_opts(parser)
|
||||
parser.add_argument(
|
||||
"APK", nargs='*', help=_("signed APK, either a file-path or HTTPS URL.")
|
||||
)
|
||||
parser.add_argument("APK", nargs='*',
|
||||
help=_("signed APK, either a file-path or HTTPS URL."))
|
||||
parser.add_argument("--no-check-https", action="store_true", default=False)
|
||||
options = common.parse_args(parser)
|
||||
common.set_console_logging(options.verbose, options.color)
|
||||
common.read_config()
|
||||
options = parser.parse_args()
|
||||
|
||||
# Read config.py...
|
||||
common.read_config(options)
|
||||
|
||||
extract(options)
|
||||
|
|
|
@ -16,154 +16,70 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import zipfile
|
||||
from argparse import ArgumentParser
|
||||
import logging
|
||||
|
||||
from . import _, common, metadata
|
||||
from . import _
|
||||
from . import common
|
||||
from .exception import FDroidException
|
||||
|
||||
config = None
|
||||
options = None
|
||||
start_timestamp = time.gmtime()
|
||||
|
||||
|
||||
def sign_jar(jar, use_old_algs=False):
|
||||
"""Sign a JAR file with the best available algorithm.
|
||||
|
||||
The current signing method uses apksigner to sign the JAR so that
|
||||
it will automatically select algorithms that are compatible with
|
||||
Android SDK 23, which added the most recent algorithms:
|
||||
https://developer.android.com/reference/java/security/Signature
|
||||
|
||||
This signing method uses then inherits the default signing
|
||||
algothim settings, since Java and Android both maintain those.
|
||||
That helps avoid a repeat of being stuck on an old signing
|
||||
algorithm. That means specifically that this call to apksigner
|
||||
does not specify any of the algorithms.
|
||||
|
||||
The old indexes must be signed by SHA1withRSA otherwise they will
|
||||
no longer be compatible with old Androids.
|
||||
def sign_jar(jar):
|
||||
"""
|
||||
Sign a JAR file with Java's jarsigner.
|
||||
|
||||
This method requires a properly initialized config object.
|
||||
|
||||
This does use old hashing algorithms, i.e. SHA1, but that's not
|
||||
broken yet for file verification. This could be set to SHA256,
|
||||
but then Android < 4.3 would not be able to verify it.
|
||||
https://code.google.com/p/android/issues/detail?id=38321
|
||||
"""
|
||||
if use_old_algs:
|
||||
# This does use old hashing algorithms, i.e. SHA1, but that's not
|
||||
# broken yet for file verification. This could be set to SHA256,
|
||||
# but then Android < 4.3 would not be able to verify it.
|
||||
# https://code.google.com/p/android/issues/detail?id=38321
|
||||
args = [
|
||||
config['jarsigner'],
|
||||
'-keystore',
|
||||
config['keystore'],
|
||||
'-storepass:env',
|
||||
'FDROID_KEY_STORE_PASS',
|
||||
'-digestalg',
|
||||
'SHA1',
|
||||
'-sigalg',
|
||||
'SHA1withRSA',
|
||||
jar,
|
||||
config['repo_keyalias'],
|
||||
]
|
||||
if config['keystore'] == 'NONE':
|
||||
args += config['smartcardoptions']
|
||||
else: # smardcards never use -keypass
|
||||
args += ['-keypass:env', 'FDROID_KEY_PASS']
|
||||
else:
|
||||
# https://developer.android.com/studio/command-line/apksigner
|
||||
args = [
|
||||
config['apksigner'],
|
||||
'sign',
|
||||
'--min-sdk-version',
|
||||
'23', # enable all current algorithms
|
||||
'--max-sdk-version',
|
||||
'24', # avoid future incompatible algorithms
|
||||
# disable all APK signature types, only use JAR sigs aka v1
|
||||
'--v1-signing-enabled',
|
||||
'true',
|
||||
'--v2-signing-enabled',
|
||||
'false',
|
||||
'--v3-signing-enabled',
|
||||
'false',
|
||||
'--v4-signing-enabled',
|
||||
'false',
|
||||
'--ks',
|
||||
config['keystore'],
|
||||
'--ks-pass',
|
||||
'env:FDROID_KEY_STORE_PASS',
|
||||
'--ks-key-alias',
|
||||
config['repo_keyalias'],
|
||||
]
|
||||
if config['keystore'] == 'NONE':
|
||||
args += common.get_apksigner_smartcardoptions(config['smartcardoptions'])
|
||||
else: # smardcards never use --key-pass
|
||||
args += ['--key-pass', 'env:FDROID_KEY_PASS']
|
||||
args += [jar]
|
||||
args = [config['jarsigner'], '-keystore', config['keystore'],
|
||||
'-storepass:env', 'FDROID_KEY_STORE_PASS',
|
||||
'-digestalg', 'SHA1', '-sigalg', 'SHA1withRSA',
|
||||
jar, config['repo_keyalias']]
|
||||
if config['keystore'] == 'NONE':
|
||||
args += config['smartcardoptions']
|
||||
else: # smardcards never use -keypass
|
||||
args += ['-keypass:env', 'FDROID_KEY_PASS']
|
||||
env_vars = {
|
||||
'FDROID_KEY_STORE_PASS': config['keystorepass'],
|
||||
'FDROID_KEY_PASS': config.get('keypass', ""),
|
||||
}
|
||||
p = common.FDroidPopen(args, envs=env_vars)
|
||||
if not use_old_algs and p.returncode != 0:
|
||||
# workaround for apksigner v30 on f-droid.org publish server
|
||||
v4 = args.index("--v4-signing-enabled")
|
||||
del args[v4 + 1]
|
||||
del args[v4]
|
||||
p = common.FDroidPopen(args, envs=env_vars)
|
||||
if p.returncode != 0:
|
||||
raise FDroidException("Failed to sign %s: %s" % (jar, p.output))
|
||||
if p.returncode != 0:
|
||||
raise FDroidException("Failed to sign %s!" % jar)
|
||||
|
||||
|
||||
def sign_index(repodir, json_name):
|
||||
"""Sign data file like entry.json to make a signed JAR like entry.jar.
|
||||
|
||||
The data file like index-v1.json means that there is unsigned
|
||||
data. That file is then stuck into a jar and signed by the
|
||||
signing process. This is a bit different than sign_jar, which is
|
||||
used for index.jar: that creates index.xml then puts that in a
|
||||
index_unsigned.jar, then that file is signed.
|
||||
|
||||
This also checks to make sure that the JSON files are intact
|
||||
before signing them. Broken JSON files should never be signed, so
|
||||
taking some extra time and failing hard is the preferred
|
||||
option. This signing process can happen on an entirely separate
|
||||
machine and file tree, so this ensures that nothing got broken
|
||||
during transfer.
|
||||
|
||||
def sign_index_v1(repodir, json_name):
|
||||
"""
|
||||
json_file = os.path.join(repodir, json_name)
|
||||
with open(json_file, encoding="utf-8") as fp:
|
||||
data = json.load(fp)
|
||||
if json_name == 'entry.json':
|
||||
index_file = os.path.join(repodir, data['index']['name'].lstrip('/'))
|
||||
sha256 = common.sha256sum(index_file)
|
||||
if sha256 != data['index']['sha256']:
|
||||
raise FDroidException(
|
||||
_('%s has bad SHA-256: %s') % (index_file, sha256)
|
||||
)
|
||||
with open(index_file) as fp:
|
||||
index = json.load(fp)
|
||||
if not isinstance(index, dict):
|
||||
raise FDroidException(_('%s did not produce a dict!') % index_file)
|
||||
elif json_name == 'index-v1.json':
|
||||
[metadata.App(app) for app in data["apps"]]
|
||||
Sign index-v1.json to make index-v1.jar
|
||||
|
||||
This is a bit different than index.jar: instead of their being index.xml
|
||||
and index_unsigned.jar, the presence of index-v1.json means that there is
|
||||
unsigned data. That file is then stuck into a jar and signed by the
|
||||
signing process. index-v1.json is never published to the repo. It is
|
||||
included in the binary transparency log, if that is enabled.
|
||||
"""
|
||||
name, ext = common.get_extension(json_name)
|
||||
index_file = os.path.join(repodir, json_name)
|
||||
jar_file = os.path.join(repodir, name + '.jar')
|
||||
with zipfile.ZipFile(jar_file, 'w', zipfile.ZIP_DEFLATED) as jar:
|
||||
jar.write(json_file, json_name)
|
||||
|
||||
if json_name in ('index.xml', 'index-v1.json'):
|
||||
sign_jar(jar_file, use_old_algs=True)
|
||||
else:
|
||||
sign_jar(jar_file)
|
||||
jar.write(index_file, json_name)
|
||||
sign_jar(jar_file)
|
||||
|
||||
|
||||
def status_update_json(signed):
|
||||
"""Output a JSON file with metadata about this run."""
|
||||
"""Output a JSON file with metadata about this run"""
|
||||
|
||||
logging.debug(_('Outputting JSON'))
|
||||
output = common.setup_status_output(start_timestamp)
|
||||
if signed:
|
||||
|
@ -172,20 +88,18 @@ def status_update_json(signed):
|
|||
|
||||
|
||||
def main():
|
||||
global config
|
||||
|
||||
global config, options
|
||||
|
||||
parser = ArgumentParser()
|
||||
common.setup_global_opts(parser)
|
||||
common.parse_args(parser)
|
||||
options = parser.parse_args()
|
||||
|
||||
config = common.read_config()
|
||||
config = common.read_config(options)
|
||||
|
||||
if 'jarsigner' not in config:
|
||||
raise FDroidException(
|
||||
_(
|
||||
'Java jarsigner not found! Install in standard location or set java_paths!'
|
||||
)
|
||||
)
|
||||
_('Java jarsigner not found! Install in standard location or set java_paths!'))
|
||||
|
||||
repodirs = ['repo']
|
||||
if config['archive_older'] != 0:
|
||||
|
@ -207,14 +121,8 @@ def main():
|
|||
json_name = 'index-v1.json'
|
||||
index_file = os.path.join(output_dir, json_name)
|
||||
if os.path.exists(index_file):
|
||||
sign_index(output_dir, json_name)
|
||||
logging.info('Signed ' + index_file)
|
||||
signed.append(index_file)
|
||||
|
||||
json_name = 'entry.json'
|
||||
index_file = os.path.join(output_dir, json_name)
|
||||
if os.path.exists(index_file):
|
||||
sign_index(output_dir, json_name)
|
||||
sign_index_v1(output_dir, json_name)
|
||||
os.remove(index_file)
|
||||
logging.info('Signed ' + index_file)
|
||||
signed.append(index_file)
|
||||
|
||||
|
|
306
fdroidserver/stats.py
Normal file
306
fdroidserver/stats.py
Normal file
|
@ -0,0 +1,306 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# stats.py - part of the FDroid server tools
|
||||
# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import traceback
|
||||
import glob
|
||||
import json
|
||||
from argparse import ArgumentParser
|
||||
import paramiko
|
||||
import socket
|
||||
import logging
|
||||
import subprocess
|
||||
from collections import Counter
|
||||
|
||||
from . import _
|
||||
from . import common
|
||||
from . import metadata
|
||||
|
||||
|
||||
def carbon_send(key, value):
|
||||
s = socket.socket()
|
||||
s.connect((config['carbon_host'], config['carbon_port']))
|
||||
msg = '%s %d %d\n' % (key, value, int(time.time()))
|
||||
s.sendall(msg)
|
||||
s.close()
|
||||
|
||||
|
||||
options = None
|
||||
config = None
|
||||
|
||||
|
||||
def most_common_stable(counts):
|
||||
pairs = []
|
||||
for s in counts:
|
||||
pairs.append((s, counts[s]))
|
||||
return sorted(pairs, key=lambda t: (-t[1], t[0]))
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
global options, config
|
||||
|
||||
# Parse command line...
|
||||
parser = ArgumentParser()
|
||||
common.setup_global_opts(parser)
|
||||
parser.add_argument("-d", "--download", action="store_true", default=False,
|
||||
help=_("Download logs we don't have"))
|
||||
parser.add_argument("--recalc", action="store_true", default=False,
|
||||
help=_("Recalculate aggregate stats - use when changes "
|
||||
"have been made that would invalidate old cached data."))
|
||||
parser.add_argument("--nologs", action="store_true", default=False,
|
||||
help=_("Don't do anything logs-related"))
|
||||
metadata.add_metadata_arguments(parser)
|
||||
options = parser.parse_args()
|
||||
metadata.warnings_action = options.W
|
||||
|
||||
config = common.read_config(options)
|
||||
|
||||
if not config['update_stats']:
|
||||
logging.info("Stats are disabled - set \"update_stats = True\" in your config.py")
|
||||
sys.exit(1)
|
||||
|
||||
# Get all metadata-defined apps...
|
||||
allmetaapps = [app for app in metadata.read_metadata().values()]
|
||||
metaapps = [app for app in allmetaapps if not app.Disabled]
|
||||
|
||||
statsdir = 'stats'
|
||||
logsdir = os.path.join(statsdir, 'logs')
|
||||
datadir = os.path.join(statsdir, 'data')
|
||||
if not os.path.exists(statsdir):
|
||||
os.mkdir(statsdir)
|
||||
if not os.path.exists(logsdir):
|
||||
os.mkdir(logsdir)
|
||||
if not os.path.exists(datadir):
|
||||
os.mkdir(datadir)
|
||||
|
||||
if options.download:
|
||||
# Get any access logs we don't have...
|
||||
ssh = None
|
||||
ftp = None
|
||||
try:
|
||||
logging.info('Retrieving logs')
|
||||
ssh = paramiko.SSHClient()
|
||||
ssh.load_system_host_keys()
|
||||
ssh.connect(config['stats_server'], username=config['stats_user'],
|
||||
timeout=10, key_filename=config['webserver_keyfile'])
|
||||
ftp = ssh.open_sftp()
|
||||
ftp.get_channel().settimeout(60)
|
||||
logging.info("...connected")
|
||||
|
||||
ftp.chdir('logs')
|
||||
files = ftp.listdir()
|
||||
for f in files:
|
||||
if f.startswith('access-') and f.endswith('.log.gz'):
|
||||
|
||||
destpath = os.path.join(logsdir, f)
|
||||
destsize = ftp.stat(f).st_size
|
||||
if not os.path.exists(destpath) \
|
||||
or os.path.getsize(destpath) != destsize:
|
||||
logging.debug("...retrieving " + f)
|
||||
ftp.get(f, destpath)
|
||||
except Exception:
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
finally:
|
||||
# Disconnect
|
||||
if ftp is not None:
|
||||
ftp.close()
|
||||
if ssh is not None:
|
||||
ssh.close()
|
||||
|
||||
knownapks = common.KnownApks()
|
||||
unknownapks = []
|
||||
|
||||
if not options.nologs:
|
||||
# Process logs
|
||||
logging.info('Processing logs...')
|
||||
appscount = Counter()
|
||||
appsvercount = Counter()
|
||||
logexpr = r'(?P<ip>[.:0-9a-fA-F]+) - - \[(?P<time>.*?)\] ' \
|
||||
+ r'"GET (?P<uri>.*?) HTTP/1.\d" (?P<statuscode>\d+) ' \
|
||||
+ r'\d+ "(?P<referral>.*?)" "(?P<useragent>.*?)"'
|
||||
logsearch = re.compile(logexpr).search
|
||||
for logfile in glob.glob(os.path.join(logsdir, 'access-*.log.gz')):
|
||||
logging.debug('...' + logfile)
|
||||
|
||||
# Get the date for this log - e.g. 2012-02-28
|
||||
thisdate = os.path.basename(logfile)[7:-7]
|
||||
|
||||
agg_path = os.path.join(datadir, thisdate + '.json')
|
||||
if not options.recalc and os.path.exists(agg_path):
|
||||
# Use previously calculated aggregate data
|
||||
with open(agg_path, 'r') as f:
|
||||
today = json.load(f)
|
||||
|
||||
else:
|
||||
# Calculate from logs...
|
||||
|
||||
today = {
|
||||
'apps': Counter(),
|
||||
'appsver': Counter(),
|
||||
'unknown': []
|
||||
}
|
||||
|
||||
p = subprocess.Popen(["zcat", logfile], stdout=subprocess.PIPE)
|
||||
matches = (logsearch(line) for line in p.stdout)
|
||||
for match in matches:
|
||||
if not match:
|
||||
continue
|
||||
if match.group('statuscode') != '200':
|
||||
continue
|
||||
if match.group('ip') in config['stats_ignore']:
|
||||
continue
|
||||
uri = match.group('uri')
|
||||
if not uri.endswith('.apk'):
|
||||
continue
|
||||
_ignored, apkname = os.path.split(uri)
|
||||
app = knownapks.getapp(apkname)
|
||||
if app:
|
||||
appid, _ignored = app
|
||||
today['apps'][appid] += 1
|
||||
# Strip the '.apk' from apkname
|
||||
appver = apkname[:-4]
|
||||
today['appsver'][appver] += 1
|
||||
else:
|
||||
if apkname not in today['unknown']:
|
||||
today['unknown'].append(apkname)
|
||||
|
||||
# Save calculated aggregate data for today to cache
|
||||
with open(agg_path, 'w') as f:
|
||||
json.dump(today, f)
|
||||
|
||||
# Add today's stats (whether cached or recalculated) to the total
|
||||
for appid in today['apps']:
|
||||
appscount[appid] += today['apps'][appid]
|
||||
for appid in today['appsver']:
|
||||
appsvercount[appid] += today['appsver'][appid]
|
||||
for uk in today['unknown']:
|
||||
if uk not in unknownapks:
|
||||
unknownapks.append(uk)
|
||||
|
||||
# Calculate and write stats for total downloads...
|
||||
lst = []
|
||||
alldownloads = 0
|
||||
for appid in appscount:
|
||||
count = appscount[appid]
|
||||
lst.append(appid + " " + str(count))
|
||||
if config['stats_to_carbon']:
|
||||
carbon_send('fdroid.download.' + appid.replace('.', '_'),
|
||||
count)
|
||||
alldownloads += count
|
||||
lst.append("ALL " + str(alldownloads))
|
||||
with open(os.path.join(statsdir, 'total_downloads_app.txt'), 'w') as f:
|
||||
f.write('# Total downloads by application, since October 2011\n')
|
||||
for line in sorted(lst):
|
||||
f.write(line + '\n')
|
||||
|
||||
lst = []
|
||||
for appver in appsvercount:
|
||||
count = appsvercount[appver]
|
||||
lst.append(appver + " " + str(count))
|
||||
|
||||
with open(os.path.join(statsdir, 'total_downloads_app_version.txt'), 'w') as f:
|
||||
f.write('# Total downloads by application and version, '
|
||||
'since October 2011\n')
|
||||
for line in sorted(lst):
|
||||
f.write(line + "\n")
|
||||
|
||||
# Calculate and write stats for repo types...
|
||||
logging.info("Processing repo types...")
|
||||
repotypes = Counter()
|
||||
for app in metaapps:
|
||||
rtype = app.RepoType or 'none'
|
||||
if rtype == 'srclib':
|
||||
rtype = common.getsrclibvcs(app.Repo)
|
||||
repotypes[rtype] += 1
|
||||
with open(os.path.join(statsdir, 'repotypes.txt'), 'w') as f:
|
||||
for rtype, count in most_common_stable(repotypes):
|
||||
f.write(rtype + ' ' + str(count) + '\n')
|
||||
|
||||
# Calculate and write stats for update check modes...
|
||||
logging.info("Processing update check modes...")
|
||||
ucms = Counter()
|
||||
for app in metaapps:
|
||||
checkmode = app.UpdateCheckMode
|
||||
if checkmode.startswith('RepoManifest/'):
|
||||
checkmode = checkmode[:12]
|
||||
if checkmode.startswith('Tags '):
|
||||
checkmode = checkmode[:4]
|
||||
ucms[checkmode] += 1
|
||||
with open(os.path.join(statsdir, 'update_check_modes.txt'), 'w') as f:
|
||||
for checkmode, count in most_common_stable(ucms):
|
||||
f.write(checkmode + ' ' + str(count) + '\n')
|
||||
|
||||
logging.info("Processing categories...")
|
||||
ctgs = Counter()
|
||||
for app in metaapps:
|
||||
for category in app.Categories:
|
||||
ctgs[category] += 1
|
||||
with open(os.path.join(statsdir, 'categories.txt'), 'w') as f:
|
||||
for category, count in most_common_stable(ctgs):
|
||||
f.write(category + ' ' + str(count) + '\n')
|
||||
|
||||
logging.info("Processing antifeatures...")
|
||||
afs = Counter()
|
||||
for app in metaapps:
|
||||
if app.AntiFeatures is None:
|
||||
continue
|
||||
for antifeature in app.AntiFeatures:
|
||||
afs[antifeature] += 1
|
||||
with open(os.path.join(statsdir, 'antifeatures.txt'), 'w') as f:
|
||||
for antifeature, count in most_common_stable(afs):
|
||||
f.write(antifeature + ' ' + str(count) + '\n')
|
||||
|
||||
# Calculate and write stats for licenses...
|
||||
logging.info("Processing licenses...")
|
||||
licenses = Counter()
|
||||
for app in metaapps:
|
||||
license = app.License
|
||||
licenses[license] += 1
|
||||
with open(os.path.join(statsdir, 'licenses.txt'), 'w') as f:
|
||||
for license, count in most_common_stable(licenses):
|
||||
f.write(license + ' ' + str(count) + '\n')
|
||||
|
||||
# Write list of disabled apps...
|
||||
logging.info("Processing disabled apps...")
|
||||
disabled = [app.id for app in allmetaapps if app.Disabled]
|
||||
with open(os.path.join(statsdir, 'disabled_apps.txt'), 'w') as f:
|
||||
for appid in sorted(disabled):
|
||||
f.write(appid + '\n')
|
||||
|
||||
# Write list of latest apps added to the repo...
|
||||
logging.info("Processing latest apps...")
|
||||
latest = knownapks.getlatest(10)
|
||||
with open(os.path.join(statsdir, 'latestapps.txt'), 'w') as f:
|
||||
for appid in latest:
|
||||
f.write(appid + '\n')
|
||||
|
||||
if unknownapks:
|
||||
logging.info('\nUnknown apks:')
|
||||
for apk in unknownapks:
|
||||
logging.info(apk)
|
||||
|
||||
logging.info(_("Finished"))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,24 +1,23 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
"""Python-Tail - Unix tail follow implementation in Python.
|
||||
'''
|
||||
Python-Tail - Unix tail follow implementation in Python.
|
||||
|
||||
python-tail can be used to monitor changes to a file.
|
||||
|
||||
Example
|
||||
-------
|
||||
>>> import tail
|
||||
>>>
|
||||
>>> # Create a tail instance
|
||||
>>> t = tail.Tail('file-to-be-followed')
|
||||
>>>
|
||||
>>> # Register a callback function to be called when a new line is found in the followed file.
|
||||
>>> # If no callback function is registerd, new lines would be printed to standard out.
|
||||
>>> t.register_callback(callback_function)
|
||||
>>>
|
||||
>>> # Follow the file with 5 seconds as sleep time between iterations.
|
||||
>>> # If sleep time is not provided 1 second is used as the default time.
|
||||
>>> t.follow(s=5)
|
||||
"""
|
||||
Example:
|
||||
import tail
|
||||
|
||||
# Create a tail instance
|
||||
t = tail.Tail('file-to-be-followed')
|
||||
|
||||
# Register a callback function to be called when a new line is found in the followed file.
|
||||
# If no callback function is registerd, new lines would be printed to standard out.
|
||||
t.register_callback(callback_function)
|
||||
|
||||
# Follow the file with 5 seconds as sleep time between iterations.
|
||||
# If sleep time is not provided 1 second is used as the default time.
|
||||
t.follow(s=5) '''
|
||||
|
||||
# Author - Kasun Herath <kasunh01 at gmail.com>
|
||||
# Source - https://github.com/kasun/python-tail
|
||||
|
@ -28,54 +27,46 @@ Example
|
|||
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import threading
|
||||
|
||||
|
||||
class Tail(object):
|
||||
"""Represents a tail command."""
|
||||
|
||||
''' Represents a tail command. '''
|
||||
def __init__(self, tailed_file):
|
||||
"""Initialize a Tail instance.
|
||||
''' Initiate a Tail instance.
|
||||
Check for file validity, assigns callback function to standard out.
|
||||
|
||||
Check for file validity, assigns callback function to standard out.
|
||||
Arguments:
|
||||
tailed_file - File to be followed. '''
|
||||
|
||||
Parameters
|
||||
----------
|
||||
tailed_file
|
||||
File to be followed.
|
||||
"""
|
||||
self.check_file_validity(tailed_file)
|
||||
self.tailed_file = tailed_file
|
||||
self.callback = sys.stdout.write
|
||||
self.t_stop = threading.Event()
|
||||
|
||||
def start(self, s=1):
|
||||
"""Start tailing a file in a background thread.
|
||||
'''Start tailing a file in a background thread.
|
||||
|
||||
Arguments:
|
||||
s - Number of seconds to wait between each iteration; Defaults to 3.
|
||||
'''
|
||||
|
||||
Parameters
|
||||
----------
|
||||
s
|
||||
Number of seconds to wait between each iteration; Defaults to 3.
|
||||
"""
|
||||
t = threading.Thread(target=self.follow, args=(s,))
|
||||
t.start()
|
||||
|
||||
def stop(self):
|
||||
"""Stop a background tail."""
|
||||
'''Stop a background tail.
|
||||
'''
|
||||
self.t_stop.set()
|
||||
|
||||
def follow(self, s=1):
|
||||
"""Do a tail follow.
|
||||
|
||||
If a callback function is registered it is called with every new line.
|
||||
''' Do a tail follow. If a callback function is registered it is called with every new line.
|
||||
Else printed to standard out.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
s
|
||||
Number of seconds to wait between each iteration; Defaults to 1.
|
||||
"""
|
||||
Arguments:
|
||||
s - Number of seconds to wait between each iteration; Defaults to 1. '''
|
||||
|
||||
with open(self.tailed_file) as file_:
|
||||
# Go to the end of file
|
||||
file_.seek(0, 2)
|
||||
|
@ -90,11 +81,11 @@ class Tail(object):
|
|||
time.sleep(s)
|
||||
|
||||
def register_callback(self, func):
|
||||
"""Override default callback function to provided function."""
|
||||
''' Overrides default callback function to provided function. '''
|
||||
self.callback = func
|
||||
|
||||
def check_file_validity(self, file_):
|
||||
"""Check whether the a given file exists, readable and is a file."""
|
||||
''' Check whether the a given file exists, readable and is a file '''
|
||||
if not os.access(file_, os.F_OK):
|
||||
raise TailError("File '%s' does not exist" % (file_))
|
||||
if not os.access(file_, os.R_OK):
|
||||
|
@ -104,8 +95,8 @@ class Tail(object):
|
|||
|
||||
|
||||
class TailError(Exception):
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__()
|
||||
self.message = msg
|
||||
|
||||
def __str__(self):
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -16,207 +16,119 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import glob
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import requests
|
||||
from argparse import ArgumentParser
|
||||
from collections import OrderedDict
|
||||
|
||||
import requests
|
||||
|
||||
from . import _, common, net
|
||||
from . import _
|
||||
from . import common
|
||||
from . import net
|
||||
from . import update
|
||||
from .exception import FDroidException
|
||||
|
||||
options = None
|
||||
config = None
|
||||
|
||||
|
||||
def _add_diffoscope_info(d):
|
||||
"""Add diffoscope setup metadata to provided dict under 'diffoscope' key.
|
||||
class hashabledict(OrderedDict):
|
||||
def __key(self):
|
||||
return tuple((k, self[k]) for k in sorted(self))
|
||||
|
||||
The imports are broken out at stages since various versions of
|
||||
diffoscope support various parts of these.
|
||||
def __hash__(self):
|
||||
return hash(self.__key())
|
||||
|
||||
"""
|
||||
try:
|
||||
import diffoscope
|
||||
def __eq__(self, other):
|
||||
return self.__key() == other.__key()
|
||||
|
||||
d['diffoscope'] = dict()
|
||||
d['diffoscope']['VERSION'] = diffoscope.VERSION
|
||||
def __lt__(self, other):
|
||||
return self.__key() < other.__key()
|
||||
|
||||
from diffoscope.comparators import ComparatorManager
|
||||
|
||||
ComparatorManager().reload()
|
||||
|
||||
from diffoscope.tools import tool_check_installed, tool_required
|
||||
|
||||
external_tools = sorted(tool_required.all)
|
||||
external_tools = [
|
||||
tool for tool in external_tools if not tool_check_installed(tool)
|
||||
]
|
||||
d['diffoscope']['External-Tools-Required'] = external_tools
|
||||
|
||||
from diffoscope.external_tools import EXTERNAL_TOOLS
|
||||
from diffoscope.tools import OS_NAMES, get_current_os
|
||||
|
||||
current_os = get_current_os()
|
||||
os_list = [current_os] if (current_os in OS_NAMES) else iter(OS_NAMES)
|
||||
for os_ in os_list:
|
||||
tools = set()
|
||||
for x in external_tools:
|
||||
try:
|
||||
tools.add(EXTERNAL_TOOLS[x][os_])
|
||||
except KeyError:
|
||||
pass
|
||||
tools = sorted(tools)
|
||||
d['diffoscope']['Available-in-{}-packages'.format(OS_NAMES[os_])] = tools
|
||||
|
||||
from diffoscope.tools import python_module_missing as pmm
|
||||
|
||||
d['diffoscope']['Missing-Python-Modules'] = sorted(pmm.modules)
|
||||
except ImportError:
|
||||
pass
|
||||
def __qt__(self, other):
|
||||
return self.__key() > other.__key()
|
||||
|
||||
|
||||
def get_verified_json(path):
|
||||
"""Get the full collection of reports that is written out to verified.json."""
|
||||
if os.path.exists(path):
|
||||
try:
|
||||
with open(path) as fp:
|
||||
return json.load(fp)
|
||||
except Exception as e:
|
||||
logging.info(f'{path}: {e}')
|
||||
class Decoder(json.JSONDecoder):
|
||||
def __init__(self, **kwargs):
|
||||
json.JSONDecoder.__init__(self, **kwargs)
|
||||
self.parse_array = self.JSONArray
|
||||
# Use the python implemenation of the scanner
|
||||
self.scan_once = json.scanner.py_make_scanner(self)
|
||||
|
||||
data = OrderedDict()
|
||||
data['packages'] = OrderedDict()
|
||||
|
||||
for f in glob.glob(os.path.join(os.path.dirname(path), '*.apk.json')):
|
||||
with open(f) as fp:
|
||||
reports = json.load(fp)
|
||||
for report in reports.values():
|
||||
packageName = report['local']['packageName']
|
||||
if packageName not in data['packages']:
|
||||
data['packages'][packageName] = []
|
||||
data['packages'][packageName].append(report)
|
||||
|
||||
return data
|
||||
def JSONArray(self, s_and_end, scan_once, **kwargs):
|
||||
values, end = json.decoder.JSONArray(s_and_end, scan_once, **kwargs)
|
||||
return set(values), end
|
||||
|
||||
|
||||
def write_json_report(url, remote_apk, unsigned_apk, compare_result):
|
||||
"""Write out the results of the verify run to JSON.
|
||||
"""write out the results of the verify run to JSON
|
||||
|
||||
This builds up reports on the repeated runs of `fdroid verify` on
|
||||
a set of apps. It uses the timestamps on the compared files to
|
||||
ensure that there is only one report per file, even when run
|
||||
repeatedly.
|
||||
|
||||
The output is run through JSON to normalize things like tuples vs
|
||||
lists.
|
||||
|
||||
"""
|
||||
|
||||
jsonfile = unsigned_apk + '.json'
|
||||
if os.path.exists(jsonfile):
|
||||
with open(jsonfile) as fp:
|
||||
data = json.load(fp, object_pairs_hook=OrderedDict)
|
||||
else:
|
||||
data = OrderedDict()
|
||||
output = dict()
|
||||
_add_diffoscope_info(output)
|
||||
output = hashabledict()
|
||||
output['url'] = url
|
||||
for key, filename in (('local', unsigned_apk), ('remote', remote_apk)):
|
||||
d = dict()
|
||||
d = hashabledict()
|
||||
output[key] = d
|
||||
d['file'] = filename
|
||||
d['sha256'] = common.sha256sum(filename)
|
||||
d['sha256'] = update.sha256sum(filename)
|
||||
d['timestamp'] = os.stat(filename).st_ctime
|
||||
d['packageName'], d['versionCode'], d['versionName'] = common.get_apk_id(
|
||||
filename
|
||||
)
|
||||
d['packageName'], d['versionCode'], d['versionName'] = common.get_apk_id(filename)
|
||||
if compare_result:
|
||||
output['verified'] = False
|
||||
output['result'] = compare_result
|
||||
else:
|
||||
output['verified'] = True
|
||||
# str makes better dict keys than float
|
||||
data[str(output['local']['timestamp'])] = output
|
||||
data[str(output['local']['timestamp'])] = output # str makes better dict keys than float
|
||||
with open(jsonfile, 'w') as fp:
|
||||
json.dump(data, fp, sort_keys=True)
|
||||
|
||||
appid, version_code = os.path.basename(unsigned_apk[:-4]).rsplit('_', 1)
|
||||
appid_base = unsigned_apk.rsplit('_', 1)[0]
|
||||
apkReports = sorted(
|
||||
glob.glob(f'{appid_base}_[0-9]*.json'), # don't include <appid>.json
|
||||
key=lambda s: int(s[:-9].rsplit('_', 1)[1]), # numeric sort by versionCode
|
||||
)
|
||||
with open(apkReports[-1]) as fp:
|
||||
reports = json.load(fp)
|
||||
appid_output = {'apkReports': apkReports}
|
||||
most_recent = 0
|
||||
for report_time, run in reports.items():
|
||||
if float(report_time) > most_recent:
|
||||
most_recent = float(report_time)
|
||||
appid_output['lastRunVerified'] = run['verified']
|
||||
with open(f'{appid_base}.json', 'w') as fp:
|
||||
json.dump(appid_output, fp, cls=common.Encoder, sort_keys=True)
|
||||
|
||||
if output['verified']:
|
||||
write_verified_json(output)
|
||||
|
||||
|
||||
def write_verified_json(output):
|
||||
jsonfile = 'unsigned/verified.json'
|
||||
data = get_verified_json(jsonfile)
|
||||
packageName = output['local']['packageName']
|
||||
|
||||
if packageName not in data['packages']:
|
||||
data['packages'][packageName] = []
|
||||
found = False
|
||||
output_dump = json.dumps(output, sort_keys=True)
|
||||
for p in data['packages'][packageName]:
|
||||
if output_dump == json.dumps(p, sort_keys=True):
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
data['packages'][packageName].insert(0, json.loads(output_dump))
|
||||
with open(jsonfile, 'w') as fp:
|
||||
json.dump(data, fp, cls=common.Encoder, sort_keys=True)
|
||||
jsonfile = 'unsigned/verified.json'
|
||||
if os.path.exists(jsonfile):
|
||||
with open(jsonfile) as fp:
|
||||
data = json.load(fp, cls=Decoder, object_pairs_hook=hashabledict)
|
||||
else:
|
||||
data = OrderedDict()
|
||||
data['packages'] = OrderedDict()
|
||||
packageName = output['local']['packageName']
|
||||
if packageName not in data['packages']:
|
||||
data['packages'][packageName] = set()
|
||||
data['packages'][packageName].add(output)
|
||||
with open(jsonfile, 'w') as fp:
|
||||
json.dump(data, fp, cls=common.Encoder, sort_keys=True)
|
||||
|
||||
|
||||
def main():
|
||||
global config
|
||||
|
||||
global options, config
|
||||
|
||||
# Parse command line...
|
||||
parser = ArgumentParser(
|
||||
usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]"
|
||||
)
|
||||
parser = ArgumentParser(usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
|
||||
common.setup_global_opts(parser)
|
||||
parser.add_argument(
|
||||
"appid",
|
||||
nargs='*',
|
||||
help=_("application ID with optional versionCode in the form APPID[:VERCODE]"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--clean-up-verified",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=_("Remove source tarball and any APKs if successfully verified."),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--reuse-remote-apk",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=_("Verify against locally cached copy rather than redownloading."),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output-json",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=_("Output JSON report to file named after APK."),
|
||||
)
|
||||
options = common.parse_args(parser)
|
||||
parser.add_argument("appid", nargs='*', help=_("application ID with optional versionCode in the form APPID[:VERCODE]"))
|
||||
parser.add_argument("--reuse-remote-apk", action="store_true", default=False,
|
||||
help=_("Verify against locally cached copy rather than redownloading."))
|
||||
parser.add_argument("--output-json", action="store_true", default=False,
|
||||
help=_("Output JSON report to file named after APK."))
|
||||
options = parser.parse_args()
|
||||
|
||||
config = common.read_config()
|
||||
config = common.read_config(options)
|
||||
|
||||
tmp_dir = 'tmp'
|
||||
if not os.path.isdir(tmp_dir):
|
||||
|
@ -228,13 +140,13 @@ def main():
|
|||
logging.error(_("No unsigned directory - nothing to do"))
|
||||
sys.exit(0)
|
||||
|
||||
processed = set()
|
||||
verified = 0
|
||||
notverified = 0
|
||||
|
||||
vercodes = common.read_pkg_args(options.appid, True)
|
||||
|
||||
for apkfile in sorted(glob.glob(os.path.join(unsigned_dir, '*.apk'))):
|
||||
|
||||
apkfilename = os.path.basename(apkfile)
|
||||
url = 'https://f-droid.org/repo/' + apkfilename
|
||||
appid, vercode = common.publishednameinfo(apkfile)
|
||||
|
@ -244,9 +156,8 @@ def main():
|
|||
if vercodes[appid] and vercode not in vercodes[appid]:
|
||||
continue
|
||||
|
||||
processed.add(appid)
|
||||
|
||||
try:
|
||||
|
||||
logging.info("Processing {apkfilename}".format(apkfilename=apkfilename))
|
||||
|
||||
remote_apk = os.path.join(tmp_dir, apkfilename)
|
||||
|
@ -258,37 +169,18 @@ def main():
|
|||
net.download_file(url, dldir=tmp_dir)
|
||||
except requests.exceptions.HTTPError:
|
||||
try:
|
||||
net.download_file(
|
||||
url.replace('/repo', '/archive'), dldir=tmp_dir
|
||||
)
|
||||
net.download_file(url.replace('/repo', '/archive'), dldir=tmp_dir)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
raise FDroidException(
|
||||
_('Downloading {url} failed. {error}').format(
|
||||
url=url, error=e
|
||||
)
|
||||
) from e
|
||||
raise FDroidException(_('Downloading {url} failed. {error}')
|
||||
.format(url=url, error=e))
|
||||
|
||||
unsigned_apk = os.path.join(unsigned_dir, apkfilename)
|
||||
compare_result = common.verify_apks(
|
||||
remote_apk,
|
||||
unsigned_apk,
|
||||
tmp_dir,
|
||||
clean_up_verified=options.clean_up_verified,
|
||||
)
|
||||
compare_result = common.verify_apks(remote_apk, unsigned_apk, tmp_dir)
|
||||
if options.output_json:
|
||||
write_json_report(url, remote_apk, unsigned_apk, compare_result)
|
||||
if compare_result:
|
||||
raise FDroidException(compare_result)
|
||||
|
||||
if options.clean_up_verified:
|
||||
src_tarball = os.path.join(
|
||||
unsigned_dir, common.get_src_tarball_name(appid, vercode)
|
||||
)
|
||||
for f in (remote_apk, unsigned_apk, src_tarball):
|
||||
if os.path.exists(f):
|
||||
logging.info(f"...cleaned up {f} after successful verification")
|
||||
os.remove(f)
|
||||
|
||||
logging.info("...successfully verified")
|
||||
verified += 1
|
||||
|
||||
|
@ -296,12 +188,6 @@ def main():
|
|||
logging.info("...NOT verified - {0}".format(e))
|
||||
notverified += 1
|
||||
|
||||
for appid in options.appid:
|
||||
package = appid.split(":")[0]
|
||||
if package not in processed:
|
||||
logging.critical(_("No APK for package: %s") % package)
|
||||
notverified += 1
|
||||
|
||||
if verified > 0:
|
||||
logging.info("{0} successfully verified".format(verified))
|
||||
if notverified > 0:
|
||||
|
|
|
@ -16,17 +16,20 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import json
|
||||
import logging
|
||||
from os.path import isdir, isfile, basename, abspath, expanduser
|
||||
import os
|
||||
import math
|
||||
import json
|
||||
import tarfile
|
||||
import shutil
|
||||
import subprocess
|
||||
import textwrap
|
||||
import threading
|
||||
from os.path import abspath, basename, expanduser, isdir, isfile
|
||||
|
||||
import logging
|
||||
from .common import FDroidException
|
||||
|
||||
from fdroidserver import _
|
||||
import threading
|
||||
|
||||
lock = threading.Lock()
|
||||
|
||||
|
||||
|
@ -38,19 +41,15 @@ def get_clean_builder(serverdir):
|
|||
os.makedirs(serverdir)
|
||||
vagrantfile = os.path.join(serverdir, 'Vagrantfile')
|
||||
if not os.path.isfile(vagrantfile):
|
||||
with open(vagrantfile, 'w') as f:
|
||||
f.write(
|
||||
textwrap.dedent(
|
||||
"""\
|
||||
with open(os.path.join('builder', 'Vagrantfile'), 'w') as f:
|
||||
f.write(textwrap.dedent("""\
|
||||
# generated file, do not change.
|
||||
|
||||
Vagrant.configure("2") do |config|
|
||||
config.vm.box = "buildserver"
|
||||
config.vm.synced_folder ".", "/vagrant", disabled: true
|
||||
end
|
||||
"""
|
||||
)
|
||||
)
|
||||
"""))
|
||||
vm = get_build_vm(serverdir)
|
||||
logging.info('destroying buildserver before build')
|
||||
vm.destroy()
|
||||
|
@ -80,24 +79,15 @@ def _check_output(cmd, cwd=None):
|
|||
|
||||
|
||||
def get_build_vm(srvdir, provider=None):
|
||||
"""No summary.
|
||||
|
||||
Factory function for getting FDroidBuildVm instances.
|
||||
"""Factory function for getting FDroidBuildVm instances.
|
||||
|
||||
This function tries to figure out what hypervisor should be used
|
||||
and creates an object for controlling a build VM.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
srvdir
|
||||
path to a directory which contains a Vagrantfile
|
||||
provider
|
||||
optionally this parameter allows specifiying an
|
||||
specific vagrant provider.
|
||||
|
||||
Returns
|
||||
-------
|
||||
FDroidBuildVm instance.
|
||||
:param srvdir: path to a directory which contains a Vagrantfile
|
||||
:param provider: optionally this parameter allows specifiying an
|
||||
specific vagrant provider.
|
||||
:returns: FDroidBuildVm instance.
|
||||
"""
|
||||
abssrvdir = abspath(srvdir)
|
||||
|
||||
|
@ -120,37 +110,25 @@ def get_build_vm(srvdir, provider=None):
|
|||
if kvm_installed and vbox_installed:
|
||||
logging.debug('both kvm and vbox are installed.')
|
||||
elif kvm_installed:
|
||||
logging.debug(
|
||||
'libvirt is the sole installed and supported vagrant provider, selecting \'libvirt\''
|
||||
)
|
||||
logging.debug('libvirt is the sole installed and supported vagrant provider, selecting \'libvirt\'')
|
||||
return LibvirtBuildVm(abssrvdir)
|
||||
elif vbox_installed:
|
||||
logging.debug(
|
||||
'virtualbox is the sole installed and supported vagrant provider, selecting \'virtualbox\''
|
||||
)
|
||||
logging.debug('virtualbox is the sole installed and supported vagrant provider, selecting \'virtualbox\'')
|
||||
return VirtualboxBuildVm(abssrvdir)
|
||||
else:
|
||||
logging.debug(
|
||||
'could not confirm that either virtualbox or kvm/libvirt are installed'
|
||||
)
|
||||
logging.debug('could not confirm that either virtualbox or kvm/libvirt are installed')
|
||||
|
||||
# try guessing provider from .../srvdir/.vagrant internals
|
||||
vagrant_libvirt_path = os.path.join(
|
||||
abssrvdir, '.vagrant', 'machines', 'default', 'libvirt'
|
||||
)
|
||||
has_libvirt_machine = (
|
||||
isdir(vagrant_libvirt_path) and len(os.listdir(vagrant_libvirt_path)) > 0
|
||||
)
|
||||
vagrant_virtualbox_path = os.path.join(
|
||||
abssrvdir, '.vagrant', 'machines', 'default', 'virtualbox'
|
||||
)
|
||||
has_vbox_machine = (
|
||||
isdir(vagrant_virtualbox_path) and len(os.listdir(vagrant_virtualbox_path)) > 0
|
||||
)
|
||||
vagrant_libvirt_path = os.path.join(abssrvdir, '.vagrant', 'machines',
|
||||
'default', 'libvirt')
|
||||
has_libvirt_machine = isdir(vagrant_libvirt_path) \
|
||||
and len(os.listdir(vagrant_libvirt_path)) > 0
|
||||
vagrant_virtualbox_path = os.path.join(abssrvdir, '.vagrant', 'machines',
|
||||
'default', 'virtualbox')
|
||||
has_vbox_machine = isdir(vagrant_virtualbox_path) \
|
||||
and len(os.listdir(vagrant_virtualbox_path)) > 0
|
||||
if has_libvirt_machine and has_vbox_machine:
|
||||
logging.info(
|
||||
'build vm provider lookup found virtualbox and libvirt, defaulting to \'virtualbox\''
|
||||
)
|
||||
logging.info('build vm provider lookup found virtualbox and libvirt, defaulting to \'virtualbox\'')
|
||||
return VirtualboxBuildVm(abssrvdir)
|
||||
elif has_libvirt_machine:
|
||||
logging.debug('build vm provider lookup found \'libvirt\'')
|
||||
|
@ -162,15 +140,12 @@ def get_build_vm(srvdir, provider=None):
|
|||
# try guessing provider from available buildserver boxes
|
||||
available_boxes = []
|
||||
import vagrant
|
||||
|
||||
boxes = vagrant.Vagrant().box_list()
|
||||
for box in boxes:
|
||||
if box.name == "buildserver":
|
||||
available_boxes.append(box.provider)
|
||||
if "libvirt" in available_boxes and "virtualbox" in available_boxes:
|
||||
logging.info(
|
||||
'basebox lookup found virtualbox and libvirt boxes, defaulting to \'virtualbox\''
|
||||
)
|
||||
logging.info('basebox lookup found virtualbox and libvirt boxes, defaulting to \'virtualbox\'')
|
||||
return VirtualboxBuildVm(abssrvdir)
|
||||
elif "libvirt" in available_boxes:
|
||||
logging.info('\'libvirt\' buildserver box available, using that')
|
||||
|
@ -187,7 +162,7 @@ class FDroidBuildVmException(FDroidException):
|
|||
pass
|
||||
|
||||
|
||||
class FDroidBuildVm:
|
||||
class FDroidBuildVm():
|
||||
"""Abstract base class for working with FDroids build-servers.
|
||||
|
||||
Use the factory method `fdroidserver.vmtools.get_build_vm()` for
|
||||
|
@ -196,27 +171,19 @@ class FDroidBuildVm:
|
|||
This is intended to be a hypervisor independent, fault tolerant
|
||||
wrapper around the vagrant functions we use.
|
||||
"""
|
||||
|
||||
def __init__(self, srvdir, provider=None):
|
||||
"""Create new server class."""
|
||||
self.provider = provider
|
||||
def __init__(self, srvdir):
|
||||
"""Create new server class.
|
||||
"""
|
||||
self.srvdir = srvdir
|
||||
self.srvname = basename(srvdir) + '_default'
|
||||
self.vgrntfile = os.path.join(srvdir, 'Vagrantfile')
|
||||
self.srvuuid = self._vagrant_fetch_uuid()
|
||||
if not isdir(srvdir):
|
||||
raise FDroidBuildVmException(
|
||||
"Can not init vagrant, directory %s not present" % (srvdir)
|
||||
)
|
||||
raise FDroidBuildVmException("Can not init vagrant, directory %s not present" % (srvdir))
|
||||
if not isfile(self.vgrntfile):
|
||||
raise FDroidBuildVmException(
|
||||
"Can not init vagrant, '%s' not present" % (self.vgrntfile)
|
||||
)
|
||||
raise FDroidBuildVmException("Can not init vagrant, '%s' not present" % (self.vgrntfile))
|
||||
import vagrant
|
||||
|
||||
self.vgrnt = vagrant.Vagrant(
|
||||
root=srvdir, out_cm=vagrant.stdout_cm, err_cm=vagrant.stdout_cm
|
||||
)
|
||||
self.vgrnt = vagrant.Vagrant(root=srvdir, out_cm=vagrant.stdout_cm, err_cm=vagrant.stdout_cm)
|
||||
|
||||
def up(self, provision=True):
|
||||
global lock
|
||||
|
@ -281,8 +248,11 @@ class FDroidBuildVm:
|
|||
except subprocess.CalledProcessError as e:
|
||||
logging.debug('pruning global vagrant status failed: %s', e)
|
||||
|
||||
def package(self, output=None):
|
||||
self.vgrnt.package(output=output)
|
||||
|
||||
def vagrant_uuid_okay(self):
|
||||
"""Having an uuid means that vagrant up has run successfully."""
|
||||
'''Having an uuid means that vagrant up has run successfully.'''
|
||||
if self.srvuuid is None:
|
||||
return False
|
||||
return True
|
||||
|
@ -312,20 +282,13 @@ class FDroidBuildVm:
|
|||
def box_add(self, boxname, boxfile, force=True):
|
||||
"""Add vagrant box to vagrant.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
boxname
|
||||
name assigned to local deployment of box
|
||||
boxfile
|
||||
path to box file
|
||||
force
|
||||
overwrite existing box image (default: True)
|
||||
:param boxname: name assigned to local deployment of box
|
||||
:param boxfile: path to box file
|
||||
:param force: overwrite existing box image (default: True)
|
||||
"""
|
||||
boxfile = abspath(boxfile)
|
||||
if not isfile(boxfile):
|
||||
raise FDroidBuildVmException(
|
||||
'supplied boxfile \'%s\' does not exist' % boxfile
|
||||
)
|
||||
raise FDroidBuildVmException('supplied boxfile \'%s\' does not exist', boxfile)
|
||||
self.vgrnt.box_add(boxname, abspath(boxfile), force=force)
|
||||
|
||||
def box_remove(self, boxname):
|
||||
|
@ -333,28 +296,25 @@ class FDroidBuildVm:
|
|||
_check_call(['vagrant', 'box', 'remove', '--all', '--force', boxname])
|
||||
except subprocess.CalledProcessError as e:
|
||||
logging.debug('tried removing box %s, but is did not exist: %s', boxname, e)
|
||||
boxpath = os.path.join(
|
||||
expanduser('~'), '.vagrant', self._vagrant_file_name(boxname)
|
||||
)
|
||||
boxpath = os.path.join(expanduser('~'), '.vagrant',
|
||||
self._vagrant_file_name(boxname))
|
||||
if isdir(boxpath):
|
||||
logging.info(
|
||||
"attempting to remove box '%s' by deleting: %s", boxname, boxpath
|
||||
)
|
||||
logging.info("attempting to remove box '%s' by deleting: %s",
|
||||
boxname, boxpath)
|
||||
shutil.rmtree(boxpath)
|
||||
|
||||
def sshinfo(self):
|
||||
"""Get ssh connection info for a vagrant VM.
|
||||
"""Get ssh connection info for a vagrant VM
|
||||
|
||||
Returns
|
||||
-------
|
||||
A dictionary containing 'hostname', 'port', 'user' and 'idfile'
|
||||
:returns: A dictionary containing 'hostname', 'port', 'user'
|
||||
and 'idfile'
|
||||
"""
|
||||
import paramiko
|
||||
|
||||
try:
|
||||
sshconfig_path = os.path.join(self.srvdir, 'sshconfig')
|
||||
with open(sshconfig_path, 'wb') as fp:
|
||||
fp.write(_check_output(['vagrant', 'ssh-config'], cwd=self.srvdir))
|
||||
fp.write(_check_output(['vagrant', 'ssh-config'],
|
||||
cwd=self.srvdir))
|
||||
vagranthost = 'default' # Host in ssh config file
|
||||
sshconfig = paramiko.SSHConfig()
|
||||
with open(sshconfig_path, 'r') as f:
|
||||
|
@ -365,25 +325,36 @@ class FDroidBuildVm:
|
|||
idfile = idfile[0]
|
||||
elif idfile.startswith('"') and idfile.endswith('"'):
|
||||
idfile = idfile[1:-1]
|
||||
return {
|
||||
'hostname': sshconfig['hostname'],
|
||||
'port': int(sshconfig['port']),
|
||||
'user': sshconfig['user'],
|
||||
'idfile': idfile,
|
||||
}
|
||||
return {'hostname': sshconfig['hostname'],
|
||||
'port': int(sshconfig['port']),
|
||||
'user': sshconfig['user'],
|
||||
'idfile': idfile}
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise FDroidBuildVmException("Error getting ssh config") from e
|
||||
|
||||
def snapshot_create(self, snapshot_name):
|
||||
raise NotImplementedError('not implemented, please use a sub-type instance')
|
||||
|
||||
def snapshot_list(self):
|
||||
raise NotImplementedError('not implemented, please use a sub-type instance')
|
||||
|
||||
def snapshot_exists(self, snapshot_name):
|
||||
raise NotImplementedError('not implemented, please use a sub-type instance')
|
||||
|
||||
def snapshot_revert(self, snapshot_name):
|
||||
raise NotImplementedError('not implemented, please use a sub-type instance')
|
||||
|
||||
|
||||
class LibvirtBuildVm(FDroidBuildVm):
|
||||
def __init__(self, srvdir):
|
||||
super().__init__(srvdir, 'libvirt')
|
||||
self.provider = 'libvirt'
|
||||
super().__init__(srvdir)
|
||||
import libvirt
|
||||
|
||||
try:
|
||||
self.conn = libvirt.open('qemu:///system')
|
||||
except libvirt.libvirtError as e:
|
||||
raise FDroidBuildVmException('could not connect to libvirtd: %s' % (e)) from e
|
||||
raise FDroidBuildVmException('could not connect to libvirtd: %s' % (e))
|
||||
|
||||
def destroy(self):
|
||||
|
||||
|
@ -403,6 +374,84 @@ class LibvirtBuildVm(FDroidBuildVm):
|
|||
except subprocess.CalledProcessError as e:
|
||||
logging.info("could not undefine libvirt domain '%s': %s", self.srvname, e)
|
||||
|
||||
def package(self, output=None, keep_box_file=False):
|
||||
if not output:
|
||||
output = "buildserver.box"
|
||||
logging.debug("no output name set for packaging '%s', "
|
||||
"defaulting to %s", self.srvname, output)
|
||||
storagePool = self.conn.storagePoolLookupByName('default')
|
||||
domainInfo = self.conn.lookupByName(self.srvname).info()
|
||||
if storagePool:
|
||||
|
||||
if isfile('metadata.json'):
|
||||
os.remove('metadata.json')
|
||||
if isfile('Vagrantfile'):
|
||||
os.remove('Vagrantfile')
|
||||
if isfile('box.img'):
|
||||
os.remove('box.img')
|
||||
|
||||
logging.debug('preparing box.img for box %s', output)
|
||||
vol = storagePool.storageVolLookupByName(self.srvname + '.img')
|
||||
imagepath = vol.path()
|
||||
# TODO use a libvirt storage pool to ensure the img file is readable
|
||||
if not os.access(imagepath, os.R_OK):
|
||||
logging.warning(_('Cannot read "{path}"!').format(path=imagepath))
|
||||
_check_call(['sudo', '/bin/chmod', '-R', 'a+rX', '/var/lib/libvirt/images'])
|
||||
shutil.copy2(imagepath, 'box.img')
|
||||
_check_call(['qemu-img', 'rebase', '-p', '-b', '', 'box.img'])
|
||||
img_info_raw = _check_output(['qemu-img', 'info', '--output=json', 'box.img'])
|
||||
img_info = json.loads(img_info_raw.decode('utf-8'))
|
||||
metadata = {"provider": "libvirt",
|
||||
"format": img_info['format'],
|
||||
"virtual_size": math.ceil(img_info['virtual-size'] / (1024. ** 3)),
|
||||
}
|
||||
|
||||
logging.debug('preparing metadata.json for box %s', output)
|
||||
with open('metadata.json', 'w') as fp:
|
||||
fp.write(json.dumps(metadata))
|
||||
logging.debug('preparing Vagrantfile for box %s', output)
|
||||
vagrantfile = textwrap.dedent("""\
|
||||
Vagrant.configure("2") do |config|
|
||||
config.ssh.username = "vagrant"
|
||||
config.ssh.password = "vagrant"
|
||||
|
||||
config.vm.provider :libvirt do |libvirt|
|
||||
|
||||
libvirt.driver = "kvm"
|
||||
libvirt.host = ""
|
||||
libvirt.connect_via_ssh = false
|
||||
libvirt.storage_pool_name = "default"
|
||||
libvirt.cpus = {cpus}
|
||||
libvirt.memory = {memory}
|
||||
|
||||
end
|
||||
end""".format_map({'memory': str(int(domainInfo[1] / 1024)), 'cpus': str(domainInfo[3])}))
|
||||
with open('Vagrantfile', 'w') as fp:
|
||||
fp.write(vagrantfile)
|
||||
try:
|
||||
import libarchive
|
||||
with libarchive.file_writer(output, 'gnutar', 'gzip') as tar:
|
||||
logging.debug('adding files to box %s ...', output)
|
||||
tar.add_files('metadata.json', 'Vagrantfile', 'box.img')
|
||||
except (ImportError, AttributeError):
|
||||
with tarfile.open(output, 'w:gz') as tar:
|
||||
logging.debug('adding metadata.json to box %s ...', output)
|
||||
tar.add('metadata.json')
|
||||
logging.debug('adding Vagrantfile to box %s ...', output)
|
||||
tar.add('Vagrantfile')
|
||||
logging.debug('adding box.img to box %s ...', output)
|
||||
tar.add('box.img')
|
||||
|
||||
if not keep_box_file:
|
||||
logging.debug('box packaging complete, removing temporary files.')
|
||||
os.remove('metadata.json')
|
||||
os.remove('Vagrantfile')
|
||||
os.remove('box.img')
|
||||
|
||||
else:
|
||||
logging.warning("could not connect to storage-pool 'default', "
|
||||
"skip packaging buildserver box")
|
||||
|
||||
def box_add(self, boxname, boxfile, force=True):
|
||||
boximg = '%s_vagrant_box_image_0.img' % (boxname)
|
||||
if force:
|
||||
|
@ -423,8 +472,82 @@ class LibvirtBuildVm(FDroidBuildVm):
|
|||
except subprocess.CalledProcessError as e:
|
||||
logging.debug("tried removing '%s', file was not present in first place", boxname, exc_info=e)
|
||||
|
||||
def snapshot_create(self, snapshot_name):
|
||||
logging.info("creating snapshot '%s' for vm '%s'", snapshot_name, self.srvname)
|
||||
try:
|
||||
_check_call(['virsh', '-c', 'qemu:///system', 'snapshot-create-as', self.srvname, snapshot_name])
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise FDroidBuildVmException("could not create snapshot '%s' "
|
||||
"of libvirt vm '%s'"
|
||||
% (snapshot_name, self.srvname)) from e
|
||||
|
||||
def snapshot_list(self):
|
||||
import libvirt
|
||||
try:
|
||||
dom = self.conn.lookupByName(self.srvname)
|
||||
return dom.listAllSnapshots()
|
||||
except libvirt.libvirtError as e:
|
||||
raise FDroidBuildVmException('could not list snapshots for domain \'%s\'' % self.srvname) from e
|
||||
|
||||
def snapshot_exists(self, snapshot_name):
|
||||
import libvirt
|
||||
try:
|
||||
dom = self.conn.lookupByName(self.srvname)
|
||||
return dom.snapshotLookupByName(snapshot_name) is not None
|
||||
except libvirt.libvirtError:
|
||||
return False
|
||||
|
||||
def snapshot_revert(self, snapshot_name):
|
||||
logging.info("reverting vm '%s' to snapshot '%s'", self.srvname, snapshot_name)
|
||||
import libvirt
|
||||
try:
|
||||
dom = self.conn.lookupByName(self.srvname)
|
||||
snap = dom.snapshotLookupByName(snapshot_name)
|
||||
dom.revertToSnapshot(snap)
|
||||
except libvirt.libvirtError as e:
|
||||
raise FDroidBuildVmException('could not revert domain \'%s\' to snapshot \'%s\''
|
||||
% (self.srvname, snapshot_name)) from e
|
||||
|
||||
|
||||
class VirtualboxBuildVm(FDroidBuildVm):
|
||||
|
||||
def __init__(self, srvdir):
|
||||
super().__init__(srvdir, 'virtualbox')
|
||||
self.provider = 'virtualbox'
|
||||
super().__init__(srvdir)
|
||||
|
||||
def snapshot_create(self, snapshot_name):
|
||||
logging.info("creating snapshot '%s' for vm '%s'", snapshot_name, self.srvname)
|
||||
try:
|
||||
_check_call(['VBoxManage', 'snapshot', self.srvuuid, 'take', 'fdroidclean'], cwd=self.srvdir)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise FDroidBuildVmException('could not cerate snapshot '
|
||||
'of virtualbox vm %s'
|
||||
% self.srvname) from e
|
||||
|
||||
def snapshot_list(self):
|
||||
try:
|
||||
o = _check_output(['VBoxManage', 'snapshot',
|
||||
self.srvuuid, 'list',
|
||||
'--details'], cwd=self.srvdir)
|
||||
return o
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise FDroidBuildVmException("could not list snapshots "
|
||||
"of virtualbox vm '%s'"
|
||||
% (self.srvname)) from e
|
||||
|
||||
def snapshot_exists(self, snapshot_name):
|
||||
try:
|
||||
return str(snapshot_name) in str(self.snapshot_list())
|
||||
except FDroidBuildVmException:
|
||||
return False
|
||||
|
||||
def snapshot_revert(self, snapshot_name):
|
||||
logging.info("reverting vm '%s' to snapshot '%s'",
|
||||
self.srvname, snapshot_name)
|
||||
try:
|
||||
_check_call(['VBoxManage', 'snapshot', self.srvuuid,
|
||||
'restore', 'fdroidclean'], cwd=self.srvdir)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise FDroidBuildVmException("could not load snapshot "
|
||||
"'fdroidclean' for vm '%s'"
|
||||
% (self.srvname)) from e
|
||||
|
|
240
gradlew-fdroid
Executable file
240
gradlew-fdroid
Executable file
|
@ -0,0 +1,240 @@
|
|||
#!/bin/bash
|
||||
|
||||
bindir="$(dirname $0)"
|
||||
basedir="$(dirname $bindir)"
|
||||
# Check if GRADLE_VERSION_DIR/CACHEDIR is set from environment
|
||||
if [ -z "$GRADLE_VERSION_DIR" ]; then
|
||||
gradle_version_dir="${basedir}/versions"
|
||||
else
|
||||
gradle_version_dir="$GRADLE_VERSION_DIR"
|
||||
fi
|
||||
if [ -n "$CACHEDIR" ]; then
|
||||
cachedir="$CACHEDIR"
|
||||
fi
|
||||
args=("$@")
|
||||
|
||||
run_gradle() {
|
||||
if [ ! -d "${gradle_version_dir}/${v_found}" ]; then
|
||||
download_gradle ${v_found}
|
||||
fi
|
||||
echo "Running ${gradle_version_dir}/${v_found}/bin/gradle ${args[@]}"
|
||||
"${gradle_version_dir}/${v_found}/bin/gradle" "${args[@]}"
|
||||
exit $?
|
||||
}
|
||||
|
||||
download_gradle() {
|
||||
URL="https://downloads.gradle.org/distributions/gradle-${1}-bin.zip"
|
||||
shasum=$(get_sha $1)
|
||||
if [ $? != 0 ]; then
|
||||
echo "No hash for gradle version $1! Exiting..."
|
||||
exit 1
|
||||
fi
|
||||
if [ -n "${cachedir}" ] && [ -e "${cachedir}/gradle-$1-bin.zip" ]; then
|
||||
echo "Using cached ${cachedir}/gradle-$1-bin.zip ..."
|
||||
gradle_zip="${cachedir}/gradle-$1-bin.zip"
|
||||
else
|
||||
echo "Downloading missing gradle version $1"
|
||||
if [ -n "${cachedir}" ]; then
|
||||
tmpdir="${cachedir}"
|
||||
if [ ! -d ${tmpdir} ]; then
|
||||
mkdir -p "${cachedir}"
|
||||
fi
|
||||
else
|
||||
tmpdir=$(mktemp -d)
|
||||
fi
|
||||
curl -o "${tmpdir}/gradle-$1-bin.zip" --silent --fail --show-error --location "${URL}"
|
||||
gradle_zip="${tmpdir}/gradle-$1-bin.zip"
|
||||
fi
|
||||
echo "${shasum} ${gradle_zip}" | sha256sum -c -
|
||||
if [ $? != 0 ]; then
|
||||
echo "gradle download checksum mismatch! Exiting..."
|
||||
exit 1
|
||||
fi
|
||||
mkdir -p "${gradle_version_dir}/"
|
||||
unzip -q -d "${gradle_version_dir}" "${gradle_zip}"
|
||||
mv "${gradle_version_dir}/gradle-$1" "${gradle_version_dir}/${v_found}"
|
||||
}
|
||||
|
||||
get_sha() {
|
||||
case $1 in
|
||||
'1.4') echo 'cd99e85fbcd0ae8b99e81c9992a2f10cceb7b5f009c3720ef3a0078f4f92e94e' ;;
|
||||
'1.6') echo 'de3e89d2113923dcc2e0def62d69be0947ceac910abd38b75ec333230183fac4' ;;
|
||||
'1.7') echo '360c97d51621b5a1ecf66748c718594e5f790ae4fbc1499543e0c006033c9d30' ;;
|
||||
'1.8') echo 'a342bbfa15fd18e2482287da4959588f45a41b60910970a16e6d97959aea5703' ;;
|
||||
'1.9') echo '097ddc2bcbc9da2bb08cbf6bf8079585e35ad088bafd42e8716bc96405db98e9' ;;
|
||||
'1.10') echo '6e6db4fc595f27ceda059d23693b6f6848583950606112b37dfd0e97a0a0a4fe' ;;
|
||||
'1.11') echo '07e235df824964f0e19e73ea2327ce345c44bcd06d44a0123d29ab287fc34091' ;;
|
||||
'1.12') echo '8734b13a401f4311ee418173ed6ca8662d2b0a535be8ff2a43ecb1c13cd406ea' ;;
|
||||
'2.1') echo '3eee4f9ea2ab0221b89f8e4747a96d4554d00ae46d8d633f11cfda60988bf878' ;;
|
||||
'2.2') echo '91e5655fe11ef414449f218c4fa2985b3a49b7903c57556da109c84fa26e1dfb' ;;
|
||||
'2.2.1') echo '420aa50738299327b611c10b8304b749e8d3a579407ee9e755b15921d95ff418' ;;
|
||||
'2.3') echo '010dd9f31849abc3d5644e282943b1c1c355f8e2635c5789833979ce590a3774' ;;
|
||||
'2.4') echo 'c4eaecc621a81f567ded1aede4a5ddb281cc02a03a6a87c4f5502add8fc2f16f' ;;
|
||||
'2.5') echo '3f953e0cb14bb3f9ebbe11946e84071547bf5dfd575d90cfe9cc4e788da38555' ;;
|
||||
'2.6') echo '18a98c560af231dfa0d3f8e0802c20103ae986f12428bb0a6f5396e8f14e9c83' ;;
|
||||
'2.7') echo 'cde43b90945b5304c43ee36e58aab4cc6fb3a3d5f9bd9449bb1709a68371cb06' ;;
|
||||
'2.8') echo 'a88db9c2f104defdaa8011c58cf6cda6c114298ae3695ecfb8beb30da3a903cb' ;;
|
||||
'2.9') echo 'c9159ec4362284c0a38d73237e224deae6139cbde0db4f0f44e1c7691dd3de2f' ;;
|
||||
'2.10') echo '66406247f745fc6f05ab382d3f8d3e120c339f34ef54b86f6dc5f6efc18fbb13' ;;
|
||||
'2.11') echo '8d7437082356c9fd6309a4479c8db307673965546daea445c6c72759cd6b1ed6' ;;
|
||||
'2.12') echo 'e77064981906cd0476ff1e0de3e6fef747bd18e140960f1915cca8ff6c33ab5c' ;;
|
||||
'2.13') echo '0f665ec6a5a67865faf7ba0d825afb19c26705ea0597cec80dd191b0f2cbb664' ;;
|
||||
'2.14') echo '993b4f33b652c689e9721917d8e021cab6bbd3eae81b39ab2fd46fdb19a928d5' ;;
|
||||
'2.14.1') echo 'cfc61eda71f2d12a572822644ce13d2919407595c2aec3e3566d2aab6f97ef39' ;;
|
||||
'3.0') echo '39c906941a474444afbddc38144ed44166825acb0a57b0551dddb04bbf157f80' ;;
|
||||
'3.1') echo 'c7de3442432253525902f7e8d7eac8b5fd6ce1623f96d76916af6d0e383010fc' ;;
|
||||
'3.2') echo '5321b36837226dc0377047a328f12010f42c7bf88ee4a3b1cee0c11040082935' ;;
|
||||
'3.2.1') echo '9843a3654d3e57dce54db06d05f18b664b95c22bf90c6becccb61fc63ce60689' ;;
|
||||
'3.3') echo 'c58650c278d8cf0696cab65108ae3c8d95eea9c1938e0eb8b997095d5ca9a292' ;;
|
||||
'3.4') echo '72d0cd4dcdd5e3be165eb7cd7bbd25cf8968baf400323d9ab1bba622c3f72205' ;;
|
||||
'3.4.1') echo 'db1db193d479cc1202be843f17e4526660cfb0b21b57d62f3a87f88c878af9b2' ;;
|
||||
'3.5') echo '0b7450798c190ff76b9f9a3d02e18b33d94553f708ebc08ebe09bdf99111d110' ;;
|
||||
'3.5.1') echo '8dce35f52d4c7b4a4946df73aa2830e76ba7148850753d8b5e94c5dc325ceef8' ;;
|
||||
'4.0') echo '56bd2dde29ba2a93903c557da1745cafd72cdd8b6b0b83c05a40ed7896b79dfe' ;;
|
||||
'4.0.1') echo 'd717e46200d1359893f891dab047fdab98784143ac76861b53c50dbd03b44fd4' ;;
|
||||
'4.0.2') echo '79ac421342bd11f6a4f404e0988baa9c1f5fabf07e3c6fa65b0c15c1c31dda22' ;;
|
||||
'4.1') echo 'd55dfa9cfb5a3da86a1c9e75bb0b9507f9a8c8c100793ccec7beb6e259f9ed43' ;;
|
||||
'4.2') echo '515dd63d32e55a9c05667809c5e40a947529de3054444ad274b3b75af5582eae' ;;
|
||||
'4.2.1') echo 'b551cc04f2ca51c78dd14edb060621f0e5439bdfafa6fd167032a09ac708fbc0' ;;
|
||||
'4.3') echo '8dcbf44eef92575b475dcb1ce12b5f19d38dc79e84c662670248dc8b8247654c' ;;
|
||||
'4.3.1') echo '15ebe098ce0392a2d06d252bff24143cc88c4e963346582c8d88814758d93ac7' ;;
|
||||
'4.4') echo 'fa4873ae2c7f5e8c02ec6948ba95848cedced6134772a0169718eadcb39e0a2f' ;;
|
||||
'4.4.1') echo 'e7cf7d1853dfc30c1c44f571d3919eeeedef002823b66b6a988d27e919686389' ;;
|
||||
'4.5') echo '03f2a43a314ff0fb843a85ef68078e06d181c4549c1e5fb983f289382b59b5e3' ;;
|
||||
'4.5.1') echo '3e2ea0d8b96605b7c528768f646e0975bd9822f06df1f04a64fd279b1a17805e' ;;
|
||||
'4.6') echo '98bd5fd2b30e070517e03c51cbb32beee3e2ee1a84003a5a5d748996d4b1b915' ;;
|
||||
'4.7') echo 'fca5087dc8b50c64655c000989635664a73b11b9bd3703c7d6cabd31b7dcdb04' ;;
|
||||
'4.8') echo 'f3e29692a8faa94eb0b02ebf36fa263a642b3ae8694ef806c45c345b8683f1ba' ;;
|
||||
'4.8.1') echo 'af334d994b5e69e439ab55b5d2b7d086da5ea6763d78054f49f147b06370ed71' ;;
|
||||
'4.9') echo 'e66e69dce8173dd2004b39ba93586a184628bc6c28461bc771d6835f7f9b0d28' ;;
|
||||
'4.10') echo '248cfd92104ce12c5431ddb8309cf713fe58de8e330c63176543320022f59f18' ;;
|
||||
'4.10.1') echo 'e53ce3a01cf016b5d294eef20977ad4e3c13e761ac1e475f1ffad4c6141a92bd' ;;
|
||||
'4.10.2') echo 'b49c6da1b2cb67a0caf6c7480630b51c70a11ca2016ff2f555eaeda863143a29' ;;
|
||||
'4.10.3') echo '8626cbf206b4e201ade7b87779090690447054bc93f052954c78480fa6ed186e' ;;
|
||||
'5.0') echo '6157ac9f3410bc63644625b3b3e9e96c963afd7910ae0697792db57813ee79a6' ;;
|
||||
'5.1') echo '7506638a380092a0406364c79d6c87d03d23017fc25a5770379d1ce23c3fcd4d' ;;
|
||||
'5.1.1') echo '4953323605c5d7b89e97d0dc7779e275bccedefcdac090aec123375eae0cc798' ;;
|
||||
'5.2') echo 'ff322863250159595e93b5a4d17a6f0d21c59a1a0497c1e1cf1d53826485503f' ;;
|
||||
'5.2.1') echo '748c33ff8d216736723be4037085b8dc342c6a0f309081acf682c9803e407357' ;;
|
||||
'5.3') echo 'bed2bdd3955be5a09ca7e0201e9d131f194f7f6c466e1795a733733ccfb09f25' ;;
|
||||
'5.3.1') echo '1c59a17a054e9c82f0dd881871c9646e943ec4c71dd52ebc6137d17f82337436' ;;
|
||||
'5.4') echo 'c8c17574245ecee9ed7fe4f6b593b696d1692d1adbfef425bef9b333e3a0e8de' ;;
|
||||
'5.4.1') echo '7bdbad1e4f54f13c8a78abc00c26d44dd8709d4aedb704d913fb1bb78ac025dc' ;;
|
||||
'5.5') echo '8d78b2ed63e7f07ad169c1186d119761c4773e681f332cfe1901045b1b0141bc' ;;
|
||||
'5.5.1') echo '222a03fcf2fcaf3691767ce9549f78ebd4a77e73f9e23a396899fb70b420cd00' ;;
|
||||
'5.6') echo '15c02ef5dd3631ec02ac52e8725703e0285d9a7eecbf4e5939aa9e924604d01d' ;;
|
||||
'5.6.1') echo '0986244820e4a35d32d91df2ec4b768b5ba5d6c8246753794f85159f9963ec12' ;;
|
||||
'5.6.2') echo '32fce6628848f799b0ad3205ae8db67d0d828c10ffe62b748a7c0d9f4a5d9ee0' ;;
|
||||
'5.6.3') echo '60a6d8f687e3e7a4bc901cc6bc3db190efae0f02f0cc697e323e0f9336f224a3' ;;
|
||||
'5.6.4') echo '1f3067073041bc44554d0efe5d402a33bc3d3c93cc39ab684f308586d732a80d' ;;
|
||||
'6.0') echo '5a3578b9f0bb162f5e08cf119f447dfb8fa950cedebb4d2a977e912a11a74b91' ;;
|
||||
'6.0.1') echo 'd364b7098b9f2e58579a3603dc0a12a1991353ac58ed339316e6762b21efba44' ;;
|
||||
'6.1') echo 'd0c43d14e1c70a48b82442f435d06186351a2d290d72afd5b8866f15e6d7038a' ;;
|
||||
'6.1.1') echo '9d94e6e4a28ad328072ef6e56bce79a810494ae756751fdcedffdeaf27c093b1' ;;
|
||||
'6.2') echo 'b93a5f30d01195ec201e240f029c8b42d59c24086b8d1864112c83558e23cf8a' ;;
|
||||
'6.2.1') echo 'a68ca7ba57f3404c3f6fc1f70a02d3a7d78652e6b46bbfaff83fc9a17168c279' ;;
|
||||
'6.2.2') echo '0f6ba231b986276d8221d7a870b4d98e0df76e6daf1f42e7c0baec5032fb7d17' ;;
|
||||
'6.3') echo '038794feef1f4745c6347107b6726279d1c824f3fc634b60f86ace1e9fbd1768' ;;
|
||||
'6.4') echo 'b888659f637887e759749f6226ddfcb1cb04f828c58c41279de73c463fdbacc9' ;;
|
||||
'6.4.1') echo 'e58cdff0cee6d9b422dcd08ebeb3177bc44eaa09bd9a2e838ff74c408fe1cbcd' ;;
|
||||
'6.5') echo '23e7d37e9bb4f8dabb8a3ea7fdee9dd0428b9b1a71d298aefd65b11dccea220f' ;;
|
||||
'6.5.1') echo '50a7d30529fa939721fe9268a0205142f3f2302bcac5fb45b27a3902e58db54a' ;;
|
||||
'6.6') echo 'e6f83508f0970452f56197f610d13c5f593baaf43c0e3c6a571e5967be754025' ;;
|
||||
'6.6.1') echo '7873ed5287f47ca03549ab8dcb6dc877ac7f0e3d7b1eb12685161d10080910ac' ;;
|
||||
'6.7') echo '8ad57759019a9233dc7dc4d1a530cefe109dc122000d57f7e623f8cf4ba9dfc4' ;;
|
||||
'6.7.1') echo '3239b5ed86c3838a37d983ac100573f64c1f3fd8e1eb6c89fa5f9529b5ec091d' ;;
|
||||
*) exit 1
|
||||
esac
|
||||
}
|
||||
|
||||
contains() {
|
||||
local e
|
||||
for e in $2; do
|
||||
[[ $e == $1 ]] && return 0;
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
# key-value pairs of what gradle version (value) each gradle plugin version
|
||||
# (key) should accept. plugin versions are actually prefixes and catch sub-
|
||||
# versions as well. Pairs are taken from:
|
||||
# https://developer.android.com/studio/releases/gradle-plugin.html#updating-gradle
|
||||
d_plugin_k=(4.1 4.0 3.6 3.5 3.4 3.3 3.2 3.1 3.0 2.3 2.2 2.1.3 2.1 2.0 1.5 1.3 1.2 1.1 1.0 0.14 0.13 0.12 0.11 0.10 0.9 0.8 0.7 0.6 0.5 0.4 0.3 0.2)
|
||||
d_plugin_v=(6.5 6.1.1 5.6.4 5.4.1 5.1.1 4.10.1 4.6 4.4 4.1 3.3 2.14.1 2.14.1 2.12 2.12 2.4 2.4 2.3 2.2.1 2.2.1 2.1 2.1 1.12 1.12 1.12 1.11 1.10 1.9 1.8 1.6 1.6 1.4 1.4)
|
||||
|
||||
# All gradle versions we know about
|
||||
plugin_v=(6.7.1 6.7 6.6.1 6.6 6.5.1 6.5 6.4.1 6.4 6.3 6.2.2 6.2.1 6.2 6.1.1 6.1 6.0.1 6.0 5.6.4 5.6.3 5.6.2 5.6.1 5.6 5.5.1 5.5 5.4.1 5.4 5.3.1 5.3 5.2.1 5.2 5.1.1 5.1 5.0 4.10.3 4.10.2 4.10.1 4.10 4.9 4.8.1 4.8 4.7 4.6 4.5.1 4.5 4.4.1 4.4 4.3.1 4.3 4.2.1 4.2 4.1 4.0.2 4.0.1 4.0 3.5.1 3.5 3.4.1 3.4 3.3 3.2.1 3.2 3.1 3.0 2.14.1 2.14 2.13 2.12 2.11 2.10 2.9 2.8 2.7 2.6 2.5 2.4 2.3 2.2.1 2.2 2.1 1.12 1.11 1.10 1.9 1.8 1.7 1.6 1.4)
|
||||
|
||||
v_all=${plugin_v[@]}
|
||||
|
||||
# Earliest takes priority
|
||||
for f in {.,..}/gradle/wrapper/gradle-wrapper.properties; do
|
||||
[[ -f $f ]] || continue
|
||||
while IFS='' read -r line || [ -n "$line" ]; do
|
||||
line=$(printf "$line" | tr -d '\r') # strip Windows linefeeds
|
||||
if [[ $line == 'distributionUrl='* ]]; then
|
||||
wrapper_ver=${line#*/gradle-}
|
||||
wrapper_ver=${wrapper_ver%-*.zip}
|
||||
break 2
|
||||
fi
|
||||
done < $f
|
||||
done
|
||||
|
||||
if [[ -n $wrapper_ver ]]; then
|
||||
v_found=$wrapper_ver
|
||||
echo "Found $v_found via distributionUrl"
|
||||
run_gradle
|
||||
fi
|
||||
|
||||
# Earliest takes priority
|
||||
for f in {.,..}/build.gradle{,.kts}; do
|
||||
[[ -f $f ]] || continue
|
||||
while IFS='' read -r line || [ -n "$line" ]; do
|
||||
line=$(printf "$line" | tr -d '\r') # strip Windows linefeeds
|
||||
if [[ -z "$plugin_pver" && $line == *'com.android.tools.build:gradle:'* ]]; then
|
||||
plugin_pver=${line#*[\'\"]com.android.tools.build:gradle:}
|
||||
plugin_pver=${plugin_pver%[\'\"]*}
|
||||
elif [[ -z "$wrapper_ver" && $line == *'gradleVersion = '* ]]; then
|
||||
wrapper_ver=${line#*gradleVersion*=*[\'\"]}
|
||||
wrapper_ver=${wrapper_ver%[\'\"]*}
|
||||
fi
|
||||
done < $f
|
||||
done
|
||||
|
||||
if [[ -n $wrapper_ver ]]; then
|
||||
v_found=$wrapper_ver
|
||||
echo "Found $v_found via gradleVersion"
|
||||
run_gradle
|
||||
fi
|
||||
|
||||
if [[ -n $plugin_pver ]]; then
|
||||
i=0
|
||||
match=false
|
||||
for k in ${d_plugin_k[@]}; do
|
||||
if [[ $plugin_pver == ${k}* ]]; then
|
||||
plugin_ver=${d_plugin_v[$i]}
|
||||
match=true
|
||||
break
|
||||
fi
|
||||
let i++
|
||||
done
|
||||
if $match; then
|
||||
v_found=$plugin_ver
|
||||
echo "Found $v_found via gradle plugin version $k"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Find the highest version available
|
||||
for v in ${plugin_v[*]}; do
|
||||
if contains $v "${v_all[*]}"; then
|
||||
v_def=$v
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ -z $v_found ]]; then
|
||||
echo "No suitable gradle version found - defaulting to $v_def"
|
||||
v_found=$v_def
|
||||
fi
|
||||
|
||||
run_gradle
|
|
@ -2,7 +2,7 @@
|
|||
#
|
||||
# Install all the client hooks
|
||||
|
||||
BASE_DIR="$(cd $(dirname $0) || exit; pwd -P)"
|
||||
BASE_DIR="$(cd $(dirname $0); pwd -P)"
|
||||
HOOK_NAMES="applypatch-msg pre-applypatch post-applypatch pre-commit prepare-commit-msg commit-msg post-commit pre-rebase post-checkout post-merge pre-receive update post-receive post-update pre-auto-gc"
|
||||
HOOK_DIR="$(git rev-parse --show-toplevel)/.git/hooks"
|
||||
|
||||
|
|
|
@ -8,12 +8,12 @@ exec 1>&2
|
|||
|
||||
files=`git diff-index --cached HEAD 2>&1 | sed 's/^:.* //' | uniq | cut -b100-500`
|
||||
if [ -z "$files" ]; then
|
||||
PY_FILES="fdroid makebuildserver setup.py fdroidserver/*.py examples/*.py tests/*-release-checksums.py"
|
||||
PY_TEST_FILES="tests/test_*.py"
|
||||
PY_FILES="fdroid makebuildserver setup.py fdroidserver/*.py"
|
||||
PY_TEST_FILES="tests/*.TestCase"
|
||||
SH_FILES="hooks/pre-commit"
|
||||
BASH_FILES="jenkins-build-all jenkins-setup-build-environment jenkins-test completion/bash-completion buildserver/provision-*"
|
||||
BASH_FILES="gradlew-fdroid jenkins-build-all jenkins-setup-build-environment jenkins-test completion/bash-completion buildserver/provision-*"
|
||||
RB_FILES="buildserver/Vagrantfile"
|
||||
YML_FILES=".*.yml .yamllint */*.yml */*.yaml"
|
||||
YML_FILES="buildserver/*.yml examples/*.yml"
|
||||
else
|
||||
# if actually committing right now, then only run on the files
|
||||
# that are going to be committed at this moment
|
||||
|
@ -27,16 +27,16 @@ else
|
|||
for f in $files; do
|
||||
test -e $f || continue
|
||||
case $f in
|
||||
test_*.py)
|
||||
PY_TEST_FILES+=" $f"
|
||||
;;
|
||||
*.py)
|
||||
PY_FILES+=" $f"
|
||||
;;
|
||||
*.TestCase)
|
||||
PY_TEST_FILES+=" $f"
|
||||
;;
|
||||
*.rb)
|
||||
RB_FILES+=" $f"
|
||||
;;
|
||||
*.yml|*.yaml|.yamllint)
|
||||
*.yml)
|
||||
YML_FILES+=" $f"
|
||||
;;
|
||||
*)
|
||||
|
@ -52,6 +52,17 @@ else
|
|||
done
|
||||
fi
|
||||
|
||||
# We ignore the following PEP8 warnings
|
||||
# * E123: closing bracket does not match indentation of opening bracket's line
|
||||
# - Broken if multiple indentation levels start on a single line
|
||||
# * E501: line too long (82 > 79 characters)
|
||||
# - Recommended for readability but not enforced
|
||||
# - Some lines are awkward to wrap around a char limit
|
||||
# * W503: line break before binary operator
|
||||
# - Quite pedantic
|
||||
|
||||
PEP8_IGNORE="E123,E501,W503"
|
||||
|
||||
err() {
|
||||
echo >&2 ERROR: "$@"
|
||||
exit 1
|
||||
|
@ -66,7 +77,7 @@ cmd_exists() {
|
|||
}
|
||||
|
||||
find_command() {
|
||||
for name in "$@"; do
|
||||
for name in $@; do
|
||||
for suff in "3" "-3" "-python3" ""; do
|
||||
cmd=${name}${suff}
|
||||
if cmd_exists $cmd; then
|
||||
|
@ -80,9 +91,8 @@ find_command() {
|
|||
}
|
||||
|
||||
DASH=$(find_command dash)
|
||||
PYDOCSTYLE=$(find_command pydocstyle)
|
||||
PYFLAKES=$(find_command pyflakes)
|
||||
PYCODESTYLE=$(find_command pycodestyle pep8)
|
||||
PEP8=$(find_command pycodestyle pep8)
|
||||
RUBY=$(find_command ruby)
|
||||
YAMLLINT=$(find_command yamllint)
|
||||
|
||||
|
@ -90,20 +100,19 @@ if [ "$PY_FILES $PY_TEST_FILES" != " " ]; then
|
|||
if ! $PYFLAKES $PY_FILES $PY_TEST_FILES; then
|
||||
err "pyflakes tests failed!"
|
||||
fi
|
||||
# ignore vendored files
|
||||
if ! $PYDOCSTYLE --match='(?!apksigcopier|looseversion|setup|test_).*\.py' $PY_FILES $PY_TEST_FILES; then
|
||||
err "pydocstyle tests failed!"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$PY_FILES" != "" ]; then
|
||||
if ! $PYCODESTYLE $PY_FILES; then
|
||||
if ! $PEP8 --ignore=$PEP8_IGNORE $PY_FILES; then
|
||||
err "pep8 tests failed!"
|
||||
fi
|
||||
fi
|
||||
|
||||
# The tests use a little hack in order to cleanly import the fdroidserver
|
||||
# package locally like a regular package. pep8 doesn't see that, so this
|
||||
# makes pep8 skip E402 on the test files that need that hack.
|
||||
if [ "$PY_TEST_FILES" != "" ]; then
|
||||
if ! $PYCODESTYLE $PY_TEST_FILES; then
|
||||
if ! $PEP8 --ignore=$PEP8_IGNORE,E402 $PY_TEST_FILES; then
|
||||
err "pep8 tests failed!"
|
||||
fi
|
||||
fi
|
||||
|
@ -127,7 +136,7 @@ for f in $RB_FILES; do
|
|||
done
|
||||
|
||||
for f in $YML_FILES; do
|
||||
if ! $YAMLLINT $f; then
|
||||
if ! $YAMLLINT $f 1>/dev/null; then
|
||||
err ".yml tests failed on $f!"
|
||||
fi
|
||||
done
|
||||
|
|
|
@ -62,7 +62,7 @@ vagrant global-status \
|
|||
# so we need to "manually" clone the git repo here…
|
||||
cd $WORKSPACE
|
||||
|
||||
# set up Android SDK to use the Debian packages
|
||||
# set up Android SDK to use the Debian packages in stretch
|
||||
export ANDROID_HOME=/usr/lib/android-sdk
|
||||
|
||||
# now build the whole archive
|
||||
|
@ -83,27 +83,19 @@ fi
|
|||
|
||||
echo "build_server_always: true" > config.yml
|
||||
echo "deploy_process_logs: true" >> config.yml
|
||||
# if the local mediawiki is available, then use it
|
||||
if nc -z -w1 localhost 32445; then
|
||||
wikiflag="--wiki"
|
||||
echo "wiki_protocol: http" >> config.yml
|
||||
echo "wiki_server: localhost:32445" >> config.yml
|
||||
echo "wiki_path: /mediawiki/" >> config.yml
|
||||
echo "wiki_user: fdroid" >> config.yml
|
||||
echo "wiki_password: update.TestCase" >> config.yml
|
||||
else
|
||||
sed -i '/^wiki_/d' config.yml
|
||||
fi
|
||||
|
||||
printf '\n@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\nbuild all with reproducible signatures\n'
|
||||
for f in metadata/*/signatures/*; do
|
||||
appid=$(basename $(dirname $(dirname $f)))
|
||||
versionCode=$(basename $f)
|
||||
rm -f repo/${appid}_${versionCode}*.* archive/${appid}_${versionCode}*.* unsigned/${appid}_${versionCode}*.*
|
||||
$WORKSPACE/fdroid build --verbose --latest --no-tarball ${appid}:$versionCode
|
||||
done
|
||||
|
||||
printf '\n@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\nbuild all with Binaries:\n'
|
||||
for appid in `grep '^Binaries: ' metadata/*.yml --files-with-match | sed 's,^metadata/\(.*\)\.yml$,\1,'`; do
|
||||
rm -f repo/${appid}_*.* archive/${appid}_*.* unsigned/${appid}_*.*
|
||||
$WORKSPACE/fdroid build --verbose --latest --no-tarball ${appid}
|
||||
done
|
||||
|
||||
# force global timeout to 6 hours
|
||||
sed -Ei 's,^(\s+endtime\s*=\s*time\.time\(\))\s*.*,\1 + 6 * 60 * 60 # 6 hours,' \
|
||||
$WORKSPACE/fdroidserver/build.py
|
||||
|
||||
printf '\n@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\nbuild all\n'
|
||||
$WORKSPACE/fdroid build --verbose --latest --no-tarball --all
|
||||
$WORKSPACE/fdroid build --verbose --latest --no-tarball --all $wikiflag
|
||||
|
||||
vagrant global-status
|
||||
if [ -d builder ]; then
|
||||
|
|
|
@ -66,11 +66,13 @@ if [ `nproc` -le 6 ]; then
|
|||
else
|
||||
cpus=6
|
||||
fi
|
||||
cat <<EOF > $WORKSPACE/buildserver/Vagrantfile.yaml
|
||||
debian_mirror: https://deb.debian.org/debian/
|
||||
boot_timeout: 1200
|
||||
memory: $memory
|
||||
cpus: $cpus
|
||||
cat <<EOF > $WORKSPACE/makebuildserver.config.py
|
||||
debian_mirror = 'http://deb.debian.org/debian/'
|
||||
boot_timeout = 1200
|
||||
apt_package_cache = True
|
||||
copy_caches_from_host = True
|
||||
memory = $memory
|
||||
cpus = $cpus
|
||||
EOF
|
||||
|
||||
cd $WORKSPACE
|
||||
|
|
|
@ -29,7 +29,7 @@ fi
|
|||
set -e
|
||||
set -x
|
||||
|
||||
# set up Android SDK to use the Debian packages
|
||||
# set up Android SDK to use the Debian packages in stretch
|
||||
export ANDROID_HOME=/usr/lib/android-sdk
|
||||
|
||||
rm -rf "$WORKSPACE/.testfiles"
|
||||
|
@ -76,3 +76,5 @@ echo "repo_pubkey: 308204e1308202c9a003020102020434597643300d06092a864886f70d010
|
|||
sed -i '/^repo_pubkey: /d' config.yml
|
||||
# when everything is copied over to run on SIGN machine
|
||||
../fdroid signindex --verbose
|
||||
|
||||
../fdroid checkupdates --auto --autoonly --commit
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
|
||||
FILES = $(wildcard ../fdroidserver/*.py) \
|
||||
FILES = ../fdroid $(wildcard ../fdroidserver/*.py) \
|
||||
$(wildcard /usr/lib/python3.*/argparse.py) \
|
||||
../fdroid
|
||||
$(wildcard /usr/lib/python3.*/optparse.py) \
|
||||
$(wildcard /usr/lib/python3.*/getopt.py)
|
||||
|
||||
# these are the supported languages
|
||||
ALL_LINGUAS = $(shell sed -En 's,include locale/([^/]+)/.*,\1,p' ../MANIFEST.in)
|
||||
ALL_LINGUAS = bo de es fr hu it ko nb_NO pl pt_BR pt_PT ru tr uk zh_Hans zh_Hant
|
||||
POFILES = $(wildcard */LC_MESSAGES/fdroidserver.po)
|
||||
MOFILES = $(ALL_LINGUAS:=/LC_MESSAGES/fdroidserver.mo)
|
||||
|
||||
|
@ -12,8 +13,6 @@ TEMPLATE = fdroidserver.pot
|
|||
|
||||
VERSION = $(shell git describe)
|
||||
|
||||
OPTS = --no-wrap --sort-output --add-location=file
|
||||
|
||||
default:
|
||||
@printf "Build the translation files using: ./setup.py compile_catalog\n\n"
|
||||
|
||||
|
@ -30,24 +29,18 @@ clean:
|
|||
-rm -f -- $(MOFILES)
|
||||
-rm -f -- $(POFILES:=~)
|
||||
|
||||
# to remove obsolete source strings, run xgettext without --join-existing
|
||||
$(TEMPLATE): $(FILES)
|
||||
xgettext --join-existing --from-code=UTF-8 \
|
||||
--language=Python --keyword=_ \
|
||||
$(OPTS) --output=$(TEMPLATE) \
|
||||
--no-wrap --sort-output --add-location=file --output=$(TEMPLATE) \
|
||||
--package-name="fdroidserver" --package-version=$(VERSION) \
|
||||
--foreign-user \
|
||||
--msgid-bugs-address=https://gitlab.com/fdroid/fdroidserver/issues \
|
||||
$(FILES)
|
||||
msguniq $(OPTS) --use-first \
|
||||
--output-file=$(TEMPLATE) $(TEMPLATE)
|
||||
sed -i 's,CHARSET,UTF-8,' $(TEMPLATE)
|
||||
|
||||
%.po: $(TEMPLATE)
|
||||
msgattrib --set-obsolete --ignore-file=$(TEMPLATE) -o $@ $@
|
||||
msgattrib $(OPTS) --no-obsolete --output-file=$@ $@
|
||||
msguniq $(OPTS) --use-first --output-file=$@ $@
|
||||
msgmerge $(OPTS) --update $@ $(TEMPLATE)
|
||||
msgmerge --no-wrap --sort-output --add-location=file --update $@ $(TEMPLATE)
|
||||
|
||||
%/LC_MESSAGES/fdroidserver.mo: %/LC_MESSAGES/fdroidserver.po
|
||||
msgfmt --check -o $@ $(@:mo=po)
|
||||
|
|
17
locale/POTFILES.in
Normal file
17
locale/POTFILES.in
Normal file
|
@ -0,0 +1,17 @@
|
|||
fdroid
|
||||
fdroidserver/btlog.py
|
||||
fdroidserver/build.py
|
||||
fdroidserver/checkupdates.py
|
||||
fdroidserver/common.py
|
||||
fdroidserver/deploy.py
|
||||
fdroidserver/import.py
|
||||
fdroidserver/init.py
|
||||
fdroidserver/install.py
|
||||
fdroidserver/lint.py
|
||||
fdroidserver/metadata.py
|
||||
fdroidserver/publish.py
|
||||
fdroidserver/rewritemeta.py
|
||||
fdroidserver/scanner.py
|
||||
fdroidserver/stats.py
|
||||
fdroidserver/update.py
|
||||
fdroidserver/verify.py
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue