diff --git a/.bandit b/.bandit new file mode 100644 index 00000000..dc28620f --- /dev/null +++ b/.bandit @@ -0,0 +1,3 @@ +[bandit] +skips: B110,B404,B408,B603,B607,B322 +targets: . diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..43412092 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,15 @@ +root = true + +[*] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true + +[**.py] +indent_style = space +indent_size = 4 + +[.gitlab-ci.yml] +indent_style = space +indent_size = 2 diff --git a/.gitignore b/.gitignore index b442d84d..ce3a0e9a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,79 @@ -/config.py -/makebs.config.py *~ *.pyc *.class *.box +TAGS +.idea +.ropeproject/ + # files generated by build -FDroidServer.egg-info/ +/build/ +/dist/ +env/ +ENV/ +/fdroidserver.egg-info/ +pylint.parseable +/.testfiles/ +README.rst +/.eggs/ + +# editor tmp files +.*.swp +.ropeproject/ + +# files generated by tests +tmp/ +/tests/repo/icons* +/tests/repo/status + +# files used in manual testing +/config.yml +/tmp/ +/logs/ +/metadata/ +/makebs.config.py +makebuildserver.config.py +/tests/.fdroid.keypass.txt +/tests/.fdroid.keystorepass.txt +/tests/.java.security +/tests/fdroid-icon.png +/tests/OBBMainOldVersion.apk +/tests/OBBMainPatchCurrent.apk +/tests/OBBMainTwoVersions.apk +/tests/archive/categories.txt +/tests/archive/diff/[1-9]*.json +/tests/archive/entry.jar +/tests/archive/entry.json +/tests/archive/icons* +/tests/archive/index-v1.jar +/tests/archive/index-v1.json +/tests/archive/index-v2.json +/tests/archive/index.css +/tests/archive/index.html +/tests/archive/index.jar +/tests/archive/index.png +/tests/archive/index.xml +/tests/archive/index_unsigned.jar +/tests/metadata/org.videolan.vlc/en-US/icon*.png +/tests/repo/diff/[1-9]*.json +/tests/repo/index.css +/tests/repo/index.html +/tests/repo/index.jar +/tests/repo/index.png +/tests/repo/index_unsigned.jar +/tests/repo/index-v1.jar +/tests/repo/info.guardianproject.urzip/ +/tests/repo/info.guardianproject.checkey/en-US/phoneScreenshots/checkey-phone.png +/tests/repo/info.guardianproject.checkey/en-US/phoneScreenshots/checkey.png +/tests/repo/obb.mainpatch.current/en-US/featureGraphic_ffhLaojxbGAfu9ROe1MJgK5ux8d0OVc6b65nmvOBaTk=.png +/tests/repo/obb.mainpatch.current/en-US/icon_WI0pkO3LsklrsTAnRr-OQSxkkoMY41lYe2-fAvXLiLg=.png +/tests/repo/org.videolan.vlc/en-US/icon_yAfSvPRJukZzMMfUzvbYqwaD1XmHXNtiPBtuPVHW-6s=.png +/tests/urzip-πÇÇπÇÇ现代汉语通用字-български-عربي1234.apk +/tests/virustotal/ +/unsigned/ + +# generated by gettext +locale/*/LC_MESSAGES/fdroidserver.mo + +# sphinx +public/ diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 00000000..65510c45 --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,854 @@ +--- + +# Use merge request pipelines when a merge request is open for the branch. +# Use branch pipelines when a merge request is not open for the branch. +# https://docs.gitlab.com/ci/yaml/workflow/#switch-between-branch-pipelines-and-merge-request-pipelines +workflow: + rules: + - if: $CI_PIPELINE_SOURCE == 'merge_request_event' + - if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS + when: never + - if: $CI_COMMIT_BRANCH + + +stages: + - lint + - test # default for jobs that do not specify stage: + - deploy + + +variables: + pip: pip3 --timeout 100 --retries 10 + # speed up git checkout phase + GIT_DEPTH: 1 + + +# Run the whole test suite in an environment that is like the +# buildserver guest VM. This installs python3-babel because that is +# only used by the test suite, and not needed in the buildserver. +# +# Some extra packages are required for this test run that are not +# provided by the buildserver since they are not needed there: +# * python3-babel for compiling localization files +# * gnupg-agent for the full signing setup +# * python3-clint for fancy progress bars for users +# * python3-pycountry for linting config/mirrors.yml +buildserver run-tests: + image: registry.gitlab.com/fdroid/fdroidserver:buildserver + script: + - apt-get update + - apt-get install gnupg-agent python3-babel python3-biplist python3-clint python3-pycountry + - ./tests/run-tests + # make sure that translations do not cause stacktraces + - cd $CI_PROJECT_DIR/locale + - for locale in *; do + test -d $locale || continue; + for cmd in `sed -n 's/.*("\(.*\)", *_.*/\1/p' $CI_PROJECT_DIR/fdroid`; do + LANGUAGE=$locale $CI_PROJECT_DIR/fdroid $cmd --help > /dev/null; + done + done + +# Test that the parsing of the .yml metadata format didn't change from last +# released version. This uses the commit ID of the release tags, +# rather than the release tag itself so that contributor forks do not +# need to include the tags in them for this test to work. +# +# The COMMIT_ID should be bumped after each release, so that the list +# of sed hacks needed does not continuously grow. +metadata_v0: + image: registry.gitlab.com/fdroid/fdroidserver:buildserver + variables: + GIT_DEPTH: 1000 + RELEASE_COMMIT_ID: 50aa35772b058e76b950c01e16019c072c191b73 # after switching to `git rev-parse` + script: + - git fetch https://gitlab.com/fdroid/fdroidserver.git $RELEASE_COMMIT_ID + - cd tests + - export GITCOMMIT=$(git rev-parse HEAD) + - git checkout $RELEASE_COMMIT_ID + - cd .. + - git clone --depth 1 https://gitlab.com/fdroid/fdroiddata.git + - rm -f fdroiddata/config.yml # ignore config for this test + - cd fdroiddata + - ../tests/dump_internal_metadata_format.py + - cd .. + - git reset --hard + - git checkout $GITCOMMIT + - cd fdroiddata + - ../tests/dump_internal_metadata_format.py + - sed -i + -e '/ArchivePolicy:/d' + -e '/FlattrID:/d' + -e '/RequiresRoot:/d' + metadata/dump_*/*.yaml + - diff -uw metadata/dump_* + +.apt-template: &apt-template + variables: + DEBIAN_FRONTEND: noninteractive + LANG: C.UTF-8 + before_script: + - echo Etc/UTC > /etc/timezone + - echo 'APT::Install-Recommends "0";' + 'APT::Install-Suggests "0";' + 'APT::Get::Assume-Yes "true";' + 'Acquire::Retries "20";' + 'Dpkg::Use-Pty "0";' + 'quiet "1";' + >> /etc/apt/apt.conf.d/99gitlab + # Ubuntu and other distros often lack https:// support + - grep Debian /etc/issue.net + && { find /etc/apt/sources.list* -type f | xargs sed -i s,http:,https:, ; } + # The official Debian docker images ship without ca-certificates, + # TLS certificates cannot be verified until that is installed. The + # following code turns off TLS verification, and enables HTTPS, so + # at least unverified TLS is used for apt-get instead of plain + # HTTP. Once ca-certificates is installed, the CA verification is + # enabled by removing this config. This set up makes the initial + # `apt-get update` and `apt-get install` look the same as verified + # TLS to the network observer and hides the metadata. + - echo 'Acquire::https::Verify-Peer "false";' > /etc/apt/apt.conf.d/99nocacertificates + - apt-get update + - apt-get install ca-certificates + - rm /etc/apt/apt.conf.d/99nocacertificates + - apt-get dist-upgrade + +# For jobs that only need to run when there are changes to Python files. +.python-rules-changes: &python-rules-changes + rules: + - changes: + - .gitlab-ci.yml + - fdroid + - makebuildserver + - setup.py + - fdroidserver/*.py + - tests/*.py + + +# Since F-Droid uses Debian as its default platform, from production +# servers to CI to contributor machines, it is important to know when +# changes in Debian break our stuff. This tests against the latest +# dependencies as they are included in Debian. +debian_testing: + image: debian:testing + <<: *apt-template + rules: + - if: $CI_COMMIT_BRANCH == "master" && $CI_PROJECT_PATH == "fdroid/fdroidserver" + script: + - apt-get install + aapt + androguard + apksigner + dexdump + fdroidserver + git + gnupg + ipfs-cid + python3-biplist + python3-defusedxml + python3-libcloud + python3-pycountry + python3-setuptools + sdkmanager + - python3 -c 'import fdroidserver' + - python3 -c 'import androguard' + - python3 -c 'import sdkmanager' + - cd tests + - ./run-tests + + +# Test using latest LTS set up with the PPA, including Recommends. +ubuntu_lts_ppa: + image: ubuntu:latest + <<: *apt-template + rules: + - if: $CI_COMMIT_BRANCH == "master" && $CI_PROJECT_PATH == "fdroid/fdroidserver" + script: + - export ANDROID_HOME=/usr/lib/android-sdk + - apt-get install gnupg + - while ! apt-key adv --keyserver keyserver.ubuntu.com --recv-key 9AAC253193B65D4DF1D0A13EEC4632C79C5E0151; do sleep 15; done + - export RELEASE=$(sed -n 's,^Suites\x3a \([a-z]*\).*,\1,p' /etc/apt/sources.list.d/*.sources | head -1) + - echo "deb http://ppa.launchpad.net/fdroid/fdroidserver/ubuntu $RELEASE main" >> /etc/apt/sources.list + - apt-get update + - apt-get dist-upgrade + - apt-get install --install-recommends + dexdump + fdroidserver + git + python3-biplist + python3-pycountry + python3-setuptools + sdkmanager + + # Test things work with a default branch other than 'master' + - git config --global init.defaultBranch thisisnotmasterormain + + - cd tests + - ./run-tests + + +# Test to see how rclone works with S3 +test_deploy_to_s3_with_rclone: + image: debian:bookworm-slim + <<: *apt-template + tags: + - saas-linux-small-amd64 # the shared runners are known to support Docker. + services: + - name: docker:dind + command: ["--tls=false"] + variables: + DOCKER_HOST: "tcp://docker:2375" + DOCKER_DRIVER: overlay2 + DOCKER_TLS_CERTDIR: "" + before_script: + # ensure minio is up before executing tests + - apt-get update + - apt-get install -y + androguard + apksigner + curl + docker.io + git + python3-venv + rclone + # This job requires working docker but will silently fail if docker is not available + - docker info + - python3 -m venv --system-site-packages test-venv + - . test-venv/bin/activate + - pip install testcontainers[minio] + - pip install . + script: + - python3 -m unittest -k test_update_remote_storage_with_rclone --verbose + rules: + - changes: + - .gitlab-ci.yml + - fdroidserver/deploy.py + - tests/test_deploy.py + - tests/test_integration.py + + +# Test using Ubuntu/jammy LTS (supported til April, 2027) with depends +# from pypi and sdkmanager. The venv is used to isolate the dist +# tarball generation environment from the clean install environment. +ubuntu_jammy_pip: + image: ubuntu:jammy + <<: *apt-template + script: + - apt-get install git default-jdk-headless python3-pip python3-venv rsync + + # setup venv to act as release build machine + - python3 -m venv sdist-env + - . sdist-env/bin/activate + - ./setup.py sdist + - deactivate + - tar tzf dist/fdroidserver-*.tar.gz + + # back to bare machine to act as user's install machine + - export ANDROID_HOME=/opt/android-sdk + - $pip install sdkmanager + - sdkmanager 'build-tools;35.0.0' + + # Install extras_require.optional from setup.py + - $pip install biplist pycountry + + - $pip install dist/fdroidserver-*.tar.gz + - tar xzf dist/fdroidserver-*.tar.gz + - cd fdroidserver-* + - export PATH=$PATH:$ANDROID_HOME/build-tools/35.0.0 + - fdroid=`which fdroid` ./tests/run-tests + + # check localization was properly installed + - LANGUAGE='de' fdroid --help | grep 'Gültige Befehle sind' + + +# Run all the various linters and static analysis tools. +hooks/pre-commit: + stage: lint + image: debian:bookworm-slim + variables: + LANG: C.UTF-8 + script: + - apt-get update + - apt-get -y install --no-install-recommends + bash + ca-certificates + dash + gcc + git + make + pycodestyle + pyflakes3 + python3-dev + python3-git + python3-nose + python3-pip + python3-yaml + - ./hooks/pre-commit + +bandit: + image: debian:bookworm-slim + <<: *python-rules-changes + <<: *apt-template + script: + - apt-get install python3-pip + - $pip install --break-system-packages bandit + - bandit -r -ii --ini .bandit + +pylint: + stage: lint + image: debian:bookworm-slim + <<: *python-rules-changes + <<: *apt-template + script: + - apt-get install pylint python3-pip + - $pip install --break-system-packages pylint-gitlab + - pylint --output-format=colorized,pylint_gitlab.GitlabCodeClimateReporter:pylint-report.json + fdroid + makebuildserver + setup.py + fdroidserver/*.py + tests/*.py + artifacts: + reports: + codequality: pylint-report.json + when: always + + +shellcheck: + stage: lint + image: debian:bookworm-slim + rules: + - changes: + - .gitlab-ci.yml + - hooks/install-hooks.sh + - hooks/pre-commit + - tests/run-tests + <<: *apt-template + script: + - apt-get install shellcheck + # TODO GitLab Code Quality report https://github.com/koalaman/shellcheck/issues/3155 + - shellcheck --exclude SC2046,SC2090 --severity=warning --color + hooks/install-hooks.sh + hooks/pre-commit + tests/run-tests + +# Check all the dependencies in Debian to mirror production. CVEs are +# generally fixed in the latest versions in pip/pypi.org, so it isn't +# so important to scan that kind of install in CI. +# https://docs.safetycli.com/safety-docs/installation/gitlab +safety: + image: debian:bookworm-slim + rules: + - if: $SAFETY_API_KEY + changes: + - .gitlab-ci.yml + - .safety-policy.yml + - pyproject.toml + - setup.py + <<: *apt-template + variables: + LANG: C.UTF-8 + script: + - apt-get install + fdroidserver + python3-biplist + python3-pip + python3-pycountry + - $pip install --break-system-packages . + + - $pip install --break-system-packages safety + - python3 -m safety --key "$SAFETY_API_KEY" --stage cicd scan + + +# TODO tests/*/*/*.yaml are not covered +yamllint: + stage: lint + image: debian:bookworm-slim + rules: + - changes: + - .gitlab-ci.yml + - .safety-policy.yml + - .yamllint + - tests/*.yml + - tests/*/*.yml + - tests/*/*/.*.yml + <<: *apt-template + variables: + LANG: C.UTF-8 + script: + - apt-get install yamllint + - yamllint + .gitlab-ci.yml + .safety-policy.yml + .yamllint + tests/*.yml + tests/*/*.yml + tests/*/*/.*.yml + + +locales: + stage: lint + image: debian:bookworm-slim + variables: + LANG: C.UTF-8 + script: + - apt-get update + - apt-get -y install --no-install-recommends + gettext + make + python3-babel + - export EXITVALUE=0 + - function set_error() { export EXITVALUE=1; printf "\x1b[31mERROR `history|tail -2|head -1|cut -b 6-500`\x1b[0m\n"; } + - make -C locale compile || set_error + - rm -f locale/*/*/*.mo + - pybabel compile --domain=fdroidserver --directory locale 2>&1 | { grep -F "error:" && exit 1; } || true + - exit $EXITVALUE + + +black: + stage: lint + image: debian:bookworm-slim + <<: *apt-template + script: + - apt-get install black + - black --check --diff --color $CI_PROJECT_DIR + +fedora_latest: + image: fedora:39 # support ends on 2024-11-12 + script: + # tricks to hopefully make runs more reliable + - echo "timeout=600" >> /etc/dnf/dnf.conf + - echo "retries=50" >> /etc/dnf/dnf.conf + - echo "keepcache=True" >> /etc/dnf/dnf.conf + + - dnf -y update || dnf -y update + - dnf -y install @development-tools + diffutils + findutils + git + gnupg + java-17-openjdk-devel + openssl + python3 + python3-babel + python3-matplotlib + python3-pip + python3-pycountry + rsync + which + - $pip install sdkmanager + - ./setup.py sdist + - useradd -m -c "test account" --password "fakepassword" testuser + - su testuser --login --command "cd `pwd`; $pip install --user dist/fdroidserver-*.tar.gz" + - test -e ~testuser/.local/share/locale/de/LC_MESSAGES/fdroidserver.mo + - export BUILD_TOOLS_VERSION=`sed -n "s,^MINIMUM_APKSIGNER_BUILD_TOOLS_VERSION\s*=\s*['\"]\(.*\)[['\"],\1,p" fdroidserver/common.py` + - export ANDROID_HOME=`pwd`/android-sdk + - mkdir -p $ANDROID_HOME/licenses/ + - printf "\n8933bad161af4178b1185d1a37fbf41ea5269c55\nd56f5187479451eabf01fb78af6dfcb131a6481e\n24333f8a63b6825ea9c5514f83c2829b004d1fee" > $ANDROID_HOME/licenses/android-sdk-license + - printf "\n84831b9409646a918e30573bab4c9c91346d8abd" > $ANDROID_HOME/licenses/android-sdk-preview-license + - printf "\n79120722343a6f314e0719f863036c702b0e6b2a\n84831b9409646a918e30573bab4c9c91346d8abd" > $ANDROID_HOME/licenses/android-sdk-preview-license-old + - mkdir ~/.android + - touch ~/.android/repositories.cfg + - sdkmanager "platform-tools" "build-tools;$BUILD_TOOLS_VERSION" + - chown -R testuser . + - cd tests + - su testuser --login --command + "cd `pwd`; export CI=$CI ANDROID_HOME=$ANDROID_HOME; fdroid=~testuser/.local/bin/fdroid ./run-tests" + + +macOS: + tags: + - saas-macos-medium-m1 + rules: + - if: $CI_COMMIT_BRANCH == "master" && $CI_PROJECT_PATH == "fdroid/fdroidserver" + script: + - export HOMEBREW_CURL_RETRIES=10 + - brew update > /dev/null + - brew upgrade + - brew install fdroidserver + + # Android SDK and Java JDK + - brew install --cask android-commandlinetools temurin # temurin is a JDK + + # test suite dependencies + - brew install bash coreutils gnu-sed + # TODO port tests/run-tests to POSIX and gsed, it has a couple GNU-isms like du --bytes + - export PATH="$(brew --prefix fdroidserver)/libexec/bin:$(brew --prefix coreutils)/libexec/gnubin:$PATH" + + - brew autoremove + - brew info fdroidserver + + - export BUILD_TOOLS_VERSION=`gsed -n "s,^MINIMUM_APKSIGNER_BUILD_TOOLS_VERSION\s*=\s*['\"]\(.*\)[['\"],\1,p" fdroidserver/common.py` + - export ANDROID_HOME="$(brew --prefix)/share/android-commandlinetools" + - mkdir -p "$ANDROID_HOME/licenses" + - echo -e "\n8933bad161af4178b1185d1a37fbf41ea5269c55" > "$ANDROID_HOME/licenses/android-sdk-license" + - echo -e "\nd56f5187479451eabf01fb78af6dfcb131a6481e" >> "$ANDROID_HOME/licenses/android-sdk-license" + - echo -e "\n24333f8a63b6825ea9c5514f83c2829b004d1fee" >> "$ANDROID_HOME/licenses/android-sdk-license" + - $(brew --prefix)/bin/sdkmanager "build-tools;$BUILD_TOOLS_VERSION" + + - echo "macOS sticks with bash 3.x because of licenses, so avoid new bash syntax" + - /bin/bash --version + - /bin/bash -n tests/run-tests + + # test fdroidserver from git with current package's dependencies + - fdroid="$(brew --prefix fdroidserver)/libexec/bin/python3 $PWD/fdroid" ./tests/run-tests + + +gradle: + image: debian:trixie-slim + <<: *apt-template + rules: + - changes: + - .gitlab-ci.yml + - makebuildserver + script: + - apt-get install + ca-certificates + git + python3-colorama + python3-packaging + python3-requests + - ./tests/gradle-release-checksums.py + + +# Run an actual build in a simple, faked version of the buildserver guest VM. +fdroid build: + image: registry.gitlab.com/fdroid/fdroidserver:buildserver + rules: + - changes: + - .gitlab-ci.yml + - fdroidserver/build.py + - fdroidserver/common.py + - fdroidserver/exception.py + - fdroidserver/metadata.py + - fdroidserver/net.py + - fdroidserver/scanner.py + - fdroidserver/vmtools.py + # for the docker: job which depends on this one + - makebuildserver + - buildserver/* + cache: + key: "$CI_JOB_NAME" + paths: + - .gradle + script: + - apt-get update + - apt-get dist-upgrade + - apt-get clean + + - test -n "$fdroidserver" || source /etc/profile.d/bsenv.sh + + - ln -fsv "$CI_PROJECT_DIR" "$fdroidserver" + + # TODO remove sdkmanager install once it is included in the buildserver image + - apt-get install sdkmanager + - rm -rf "$ANDROID_HOME/tools" # TODO remove once sdkmanager can upgrade installed packages + - sdkmanager "tools" "platform-tools" "build-tools;31.0.0" + + - git ls-remote https://gitlab.com/fdroid/fdroiddata.git master + - git clone --depth 1 https://gitlab.com/fdroid/fdroiddata.git + - cd fdroiddata + - for d in build logs repo tmp unsigned $home_vagrant/.android; do + test -d $d || mkdir $d; + chown -R vagrant $d; + done + + - export GRADLE_USER_HOME=$home_vagrant/.gradle + - export fdroid="sudo --preserve-env --user vagrant + env PATH=$fdroidserver:$PATH + env PYTHONPATH=$fdroidserver:$fdroidserver/examples + env PYTHONUNBUFFERED=true + env TERM=$TERM + env HOME=$home_vagrant + fdroid" + + - git -C $home_vagrant/gradlew-fdroid pull + + - chown -R vagrant $home_vagrant + - chown -R vagrant $fdroidserver/.git + - chown vagrant $fdroidserver/ + - chown -R vagrant .git + - chown vagrant . + + # try user build + - $fdroid build --verbose --latest org.fdroid.fdroid.privileged + + # try on-server build + - $fdroid build --verbose --on-server --no-tarball --latest org.fdroid.fdroid + + # each `fdroid build --on-server` run expects sudo, then uninstalls it + - if dpkg --list sudo; then echo "sudo should not be still there"; exit 1; fi + - 'if [ ! -f repo/status/running.json ]; then echo "ERROR: running.json does not exist!"; exit 1; fi' + - 'if [ ! -f repo/status/build.json ]; then echo "ERROR: build.json does not exist!"; exit 1; fi' + + +# test the plugin API and specifically the fetchsrclibs plugin, which +# is used by the `fdroid build` job. This uses a fixed commit from +# fdroiddata because that one is known to work, and this is a CI job, +# so it should be isolated from the normal churn of fdroiddata. +plugin_fetchsrclibs: + image: debian:bookworm-slim + <<: *apt-template + rules: + - changes: + - .gitlab-ci.yml + - examples/fdroid_fetchsrclibs.py + - fdroidserver/__main__.py + script: + - apt-get install + curl + git + python3-cffi + python3-matplotlib + python3-nacl + python3-paramiko + python3-pil + python3-pip + python3-pycparser + python3-venv + - python3 -m venv --system-site-packages env + - . env/bin/activate + - export PATH="$CI_PROJECT_DIR:$PATH" + - export PYTHONPATH="$CI_PROJECT_DIR/examples" + # workaround https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=1003252 + - export SETUPTOOLS_USE_DISTUTILS=stdlib + - $pip install -e . + - fdroid | grep fetchsrclibs + + - mkdir fdroiddata + - commitid=b9e9a077d720c86ff6fff4dbb341254cc4370b1a + - curl https://gitlab.com/fdroid/fdroiddata/-/archive/${commitid}/fdroiddata-${commitid}.tar.gz + | tar -xz --directory=fdroiddata --strip-components=1 + - cd fdroiddata + - fdroid fetchsrclibs freemap.opentrail:4 --verbose + - test -d build/freemap.opentrail/.git + - test -d build/srclib/andromaps/.git + - test -d build/srclib/freemaplib/.git + - test -d build/srclib/freemaplibProj/.git + - test -d build/srclib/JCoord/.git + - test -d build/srclib/javaproj/.git + + +# test a full update and deploy cycle to gitlab.com +servergitmirrors: + image: debian:bookworm-slim + <<: *apt-template + rules: + - if: $CI_COMMIT_BRANCH == "master" && $CI_PROJECT_PATH == "fdroid/fdroidserver" + script: + - apt-get install + default-jdk-headless + git + openssh-client + openssl + python3-cffi + python3-cryptography + python3-matplotlib + python3-nacl + python3-pil + python3-pip + python3-pycparser + python3-setuptools + python3-venv + rsync + wget + - apt-get install apksigner + - python3 -m venv --system-site-packages env + - . env/bin/activate + - export PYTHONPATH=`pwd` + - export SETUPTOOLS_USE_DISTUTILS=stdlib # https://github.com/pypa/setuptools/issues/2956 + - $pip install -e . + - mkdir /root/.ssh/ + - ./tests/key-tricks.py + - ssh-keyscan gitlab.com >> /root/.ssh/known_hosts + - test -d /tmp/fdroid/repo || mkdir -p /tmp/fdroid/repo + - cp tests/config.yml tests/keystore.jks /tmp/fdroid/ + - cp tests/repo/com.politedroid_6.apk /tmp/fdroid/repo/ + - cd /tmp/fdroid + - touch fdroid-icon.png + - printf "\nservergitmirrors\x3a 'git@gitlab.com:fdroid/ci-test-servergitmirrors-repo.git'\n" >> config.yml + - $PYTHONPATH/fdroid update --verbose --create-metadata + - $PYTHONPATH/fdroid deploy --verbose + - export DLURL=`grep -Eo 'https://gitlab.com/fdroid/ci-test-servergitmirrors-repo[^"]+' repo/index-v1.json` + - echo $DLURL + - wget $DLURL/index-v1.jar + - diff repo/index-v1.jar index-v1.jar + +Build documentation: + image: debian:bookworm-slim + <<: *python-rules-changes + <<: *apt-template + script: + - apt-get install make python3-sphinx python3-numpydoc python3-pydata-sphinx-theme pydocstyle fdroidserver + - apt purge fdroidserver + # ignore vendored files + - pydocstyle --verbose --match='(?!apksigcopier|looseversion|setup|test_).*\.py' fdroidserver + - cd docs + - sphinx-apidoc -o ./source ../fdroidserver -M -e + - PYTHONPATH=.. sphinx-autogen -o generated source/*.rst + - PYTHONPATH=.. make html + artifacts: + paths: + - docs/build/html/ + + +# this job will only run in branches called "windows" until the Windows port is complete +Windows: + tags: + - windows + rules: + - if: $CI_COMMIT_BRANCH == "windows" + script: + - Import-Module "$env:ChocolateyInstall\helpers\chocolateyProfile.psm1" + - choco install --no-progress -y git --force --params "/GitAndUnixToolsOnPath" + - choco install --no-progress -y python3 --version=3.10 + - choco install --no-progress -y jdk8 + - choco install --no-progress -y rsync + - refreshenv + - python -m pip install --upgrade babel pip setuptools + - python -m pip install -e . + + - $files = @(Get-ChildItem tests\test_*.py) + - foreach ($f in $files) { + write-output $f; + python -m unittest $f; + if( $LASTEXITCODE -eq 0 ) { + write-output "SUCCESS $f"; + } else { + write-output "ERROR $f failed"; + } + } + + # these are the tests that must pass + - python -m unittest -k + checkupdates + exception + import_subcommand + test_lint + test_metadata + test_rewritemeta + test_vcs + tests.test_init + tests.test_main + after_script: + - Copy-Item C:\ProgramData\chocolatey\logs\chocolatey.log + artifacts: + when: always + paths: + - "*.log" + allow_failure: + exit_codes: 1 + + +pages: + image: alpine:latest + stage: deploy + script: + - cp docs/build/html public -r # GL Pages needs the files in a directory named "public" + artifacts: + paths: + - public + needs: + - job: "Build documentation" + optional: true + rules: + - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH' # only publish pages on default (master) branch + + +# This job pushes the official CI docker image based on the master +# branch, so in fdroid/fdroidserver, it should only run on the master +# branch. Otherwise, tags or other branches will overwrite the docker +# image which is supposed to be what is in master. +docker: + dependencies: + - fdroid build + rules: + - if: $CI_COMMIT_BRANCH == "master" && $CI_PROJECT_PATH == "fdroid/fdroidserver" + changes: + - .gitlab-ci.yml + - makebuildserver + - buildserver/* + image: docker:dind + services: + - docker:dind + variables: + RELEASE_IMAGE: $CI_REGISTRY_IMAGE:buildserver + script: + # git ref names can contain many chars that are not allowed in docker tags + - export TEST_IMAGE=$CI_REGISTRY_IMAGE:$(printf $CI_COMMIT_REF_NAME | sed 's,[^a-zA-Z0-9_.-],_,g') + - cd buildserver + - docker build -t $TEST_IMAGE --build-arg GIT_REV_PARSE_HEAD=$(git rev-parse HEAD) . + - docker tag $TEST_IMAGE $RELEASE_IMAGE + - docker tag $TEST_IMAGE ${RELEASE_IMAGE}-bookworm + - echo $CI_JOB_TOKEN | docker login -u gitlab-ci-token --password-stdin registry.gitlab.com + # This avoids filling up gitlab.com free tier accounts with unused docker images. + - if test -z "$FDROID_PUSH_DOCKER_IMAGE"; then + echo "Skipping docker push to save quota on your gitlab namespace."; + echo "If you want to enable the push, set FDROID_PUSH_DOCKER_IMAGE in"; + echo "https://gitlab.com/$CI_PROJECT_NAMESPACE/fdroidserver/-/settings/ci_cd#js-cicd-variables-settings"; + exit 0; + fi + - docker push $RELEASE_IMAGE + - docker push $RELEASE_IMAGE-bookworm + + +# PUBLISH is the signing server. It has a very minimal manual setup. +PUBLISH: + image: debian:bookworm-backports + <<: *python-rules-changes + script: + - apt-get update + - apt-get -qy upgrade + - apt-get -qy install --no-install-recommends -t bookworm-backports + androguard + apksigner + curl + default-jdk-headless + git + gpg + gpg-agent + python3-asn1crypto + python3-defusedxml + python3-git + python3-ruamel.yaml + python3-yaml + rsync + + # Run only relevant parts of the test suite, other parts will fail + # because of this minimal base setup. + - python3 -m unittest + tests/test_gpgsign.py + tests/test_metadata.py + tests/test_publish.py + tests/test_signatures.py + tests/test_signindex.py + + - cd tests + - mkdir archive + - mkdir unsigned + - cp urzip-release-unsigned.apk unsigned/info.guardianproject.urzip_100.apk + - grep '^key.*pass' config.yml | sed 's,\x3a ,=,' > $CI_PROJECT_DIR/variables + - sed -Ei 's,^(key.*pass|keystore)\x3a.*,\1\x3a {env\x3a \1},' config.yml + - printf '\ngpghome\x3a {env\x3a gpghome}\n' >> config.yml + - | + tee --append $CI_PROJECT_DIR/variables < FestplattenSchnitzel +Hans-Christoph Steiner diff --git a/.safety-policy.yml b/.safety-policy.yml new file mode 100644 index 00000000..ea44e7e6 --- /dev/null +++ b/.safety-policy.yml @@ -0,0 +1,55 @@ +--- + +version: '3.0' + +scanning-settings: + max-depth: 6 + exclude: + +report: + dependency-vulnerabilities: + enabled: true + auto-ignore-in-report: + vulnerabilities: + 52495: + reason: setuptools comes from Debian + expires: '2025-01-31' + 60350: + reason: GitPython comes from Debian https://security-tracker.debian.org/tracker/CVE-2023-40267 + expires: '2025-01-31' + 60789: + reason: GitPython comes from Debian https://security-tracker.debian.org/tracker/CVE-2023-40590 + expires: '2025-01-31' + 60841: + reason: GitPython comes from Debian https://security-tracker.debian.org/tracker/CVE-2023-41040 + expires: '2025-01-31' + 62044: + reason: "F-Droid doesn't fetch pip dependencies directly from hg/mercurial repositories: https://data.safetycli.com/v/62044/f17/" + expires: '2025-01-31' + 63687: + reason: Only affects Windows https://security-tracker.debian.org/tracker/CVE-2024-22190 + expires: '2026-01-31' + 67599: + reason: Only affects pip when using --extra-index-url, which is never the case in fdroidserver CI. + expires: '2026-05-31' + 70612: + reason: jinja2 is not used by fdroidserver, nor any dependencies I could find via debtree and pipdeptree. + expires: '2026-05-31' + 72132: + reason: We get these packages from Debian, zipp is not used in production, and its only a DoS. + expires: '2026-08-31' + 72236: + reason: setuptools is not used in production to download or install packages, they come from Debian. + expires: '2026-08-31' + +fail-scan-with-exit-code: + dependency-vulnerabilities: + enabled: true + fail-on-any-of: + cvss-severity: + - critical + - high + - medium + +security-updates: + dependency-vulnerabilities: diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 00000000..f0fec078 --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,5 @@ +{ + "recommendations": [ + "ms-python.python", + ] +} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..da31cd7f --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,21 @@ +{ + "python.formatting.blackArgs": [ + "--config=pyproject.toml" + ], + "python.formatting.provider": "black", + "python.linting.banditEnabled": true, + "python.linting.banditArgs": [ + "-ii", + "--ini=.bandit", + ], + "python.linting.enabled": true, + "python.linting.mypyArgs": [ + "--config-file=mypy.ini" + ], + "python.linting.mypyEnabled": true, + "python.linting.flake8Enabled": true, + "python.linting.pylintArgs": [ + "--rcfile=.pylint-rcfile" + ], + "python.linting.pylintEnabled": true, +} diff --git a/.weblate b/.weblate new file mode 100644 index 00000000..cf2e653f --- /dev/null +++ b/.weblate @@ -0,0 +1,3 @@ +[weblate] +url = https://hosted.weblate.org/api/ +translation = f-droid/fdroidserver diff --git a/.well-known/funding-manifest-urls b/.well-known/funding-manifest-urls new file mode 100644 index 00000000..9935b4d4 --- /dev/null +++ b/.well-known/funding-manifest-urls @@ -0,0 +1 @@ +https://f-droid.org/funding.json diff --git a/.yamllint b/.yamllint new file mode 100644 index 00000000..067a389e --- /dev/null +++ b/.yamllint @@ -0,0 +1,7 @@ +--- + +extends: default +rules: + document-start: disable + line-length: disable + truthy: disable diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..6b61a8f2 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,458 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) + +## [2.5.0] - NEXT + +### Removed + +* deploy: `awsaccesskeyid:` and `awssecretkey:` config items removed, use the + standard env vars: `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`. + +## [2.4.2] - 2025-06-24 + +### Fixed + +* nightly: fix bug that clones nightly repo to wrong location + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1672 +* Sync translations for all supported languages: es pl ru + +## [2.4.1] - 2025-06-23 + +### Added + +* build: Clearer error messages when working with Git. +* verify: generate .json files that list all reports + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1632 + +### Fixed + +* deploy: use master branch when working complete git-mirror repo + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1666 +* update: use ctime/mtime to control _strip_and_copy_image runs + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1665 +* update: If categories.yml only has icon:, then add name: + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1659 +* update: fix handling of Triple-T 1.0.0 graphics + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1652 +* update: never execute any VCS e.g. git + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1630 +* config: lazyload environment variables in config.yml + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1645 +* config: make localized name/description/icon optional + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1649 +* lint: add repo_key_sha256 to list of valid config keys + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1643 +* build: calculate all combinations of gradle flavors + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1638 +* build: set SOURCE_DATE_EPOCH from app's git otherwise fdroiddata metadata file + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1653 +* Sync translations for all supported languages: ca cs de fr ga ja pl pt pt_BR + pt_PT ru sq tr uk zh_Hans + +### Removed + +## [2.4.0] - 2025-03-25 + +### Added + +* lint: support the base _config.yml_. + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1606 + +### Fixed + +* Expand {env: foo} config syntax to be allowed any place a string is. + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1610 +* Only show "unsafe permissions on config.yml" when secrets are present. +* Standardized config files on ruamel.yaml with a YAML 1.2 data format. + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1611 +* Brought back error when a package has multiple package types (e.g. xapk and + apk). https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1602 +* Reworked test suite to be entirely based on Python unittest (thanks @mindston). + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1587 +* publish/signindex/gpgsign no longer load the _qrcode_ and _requests_ modules, + and can operate without them installed. +* scanner: add bun.lock as lock file of package.json + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1615 +* index: fail if user sets mirrors:isPrimary wrong + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1617 + https://gitlab.com/fdroid/fdroidserver/-/issues/1125 +* Sync translations for all supported languages: bo ca cs de es fr ga hu it ja + ko nb_NO pl pt pt_BR pt_PT ro ru sq sr sw tr uk zh_Hans zh_Hant + +### Removed + +* checkupdates: remove auto_author: config, it is no longer used. +* Purge support for the long-deprecated _config.py_ config file. + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1607 + + +## [2.3.5] - 2025-01-20 + +### Fixed + +* Fix issue where APKs with v1-only signatures and targetSdkVersion < 30 could + be maliciously crafted to bypass AllowedAPKSigningKeys + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1588 +* Ignore apksigner v33.x, it has bugs verifying APKs with v3/v3.1 sigs. + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1593 +* Sync translations for: ca cs de es fr ga ja pt_BR pt_PT ru sq sr uk zh_Hans + +## [2.3.4] - 2024-12-12 + +### Fixed + +* Fix localhost network tests on systems with IPv6. +* lint: only error out on missing extlib on versions not archived. + +## [2.3.3] - 2024-12-11 + +### Added + +* verify: `--clean-up-verified` to delete files used when verifying an APK if + the verification was successful. + +### Fixed + +* Support Python 3.13 in the full test suite. +* Sync translations for: ca de fr ja pl ro ru sr ta +* update: only generate _index.png_ when making _index.html_, allowing the repo + operator to set a different repo icon, e.g. not the QR Code. + +## [2.3.2] - 2024-11-26 + +### Fixed + +* install: fix downloading from GitHub Releases and Maven Central. +* Sync translations for: ca fa fr pt ru sr ta zh_Hant + +## [2.3.1] - 2024-11-25 + +### Fixed + +* Sync all translations for: cs de es fr ga pt_BR ru sq zh_Hans. +* Drop use of deprecated imghdr library to support Python 3.13. +* Install biplist and pycountry by default on macOS. +* Fixed running test suite out of dist tarball. + +## [2.3.0] - 2024-11-21 + +### Added + +* YAML 1.2 as native format for all _.yml_ files, including metadata and config. +* install: will now fetch _F-Droid.apk_ and install it via `adb`. + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1546 +* scanner: scan APK Signing Block for known block types like Google Play + Signature aka "Frosting". + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1555 +* Support Rclone for deploying to many different cloud services. +* deploy: support deploying to GitHub Releases. + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1471 +* scanner: support libs.versions.toml + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1526 +* Consider subdir for triple-t metadata discovery in Flutter apps. + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1541 +* deploy: added `index_only:` mode for mirroring the index to small hosting + locations. https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1420 +* Support publishing repos in AltStore format. + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1465 +* Support indexing iOS IPA app files. + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1413 +* deploy: _config/mirrors.yml_ file with support for adding per-mirror metadata, + like `countryCode:`. +* Repo's categories are now set in the config files. +* lint: check syntax of config files. +* publish: `--error-on-failed` to exit when signing/verifying fails. +* scanner: `--refresh` and `refresh_config:` to control triggering a refresh of + the rule sets. +* Terminal output colorization and `--color` argument to control it. +* New languages: Catalan (ca), Irish (ga), Japanese (ja), Serbian (sr), and + Swahili (sw). +* Support donation links from `community_bridge`, `buy_me_a_coffee`. + +### Fixed + +* Use last modified time and file size for caching data about scanned APKs + instead of SHA-256 checksum. + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1542 +* `repo_web_base_url:` config for generating per-app URLs for viewing in + browsers. https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1178 +* `fdroid scanner` flags WebAssembly binary _.wasm_ files. + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1562 +* Test suite as standard Python `unittest` setup (thanks @ghost.adh). +* scanner: error on dependency files without lock file. + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1504 +* nightly: finding APKs in the wrong directory. (thanks @WrenIX) + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1512 +* `AllowedAPKSigningKeys` works with all single-signer APK signatures. + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1466 +* Sync all translations for: cs de it ko pl pt pt_BR pt_PT ro ru sq tr uk + zh_Hans zh_Hant. +* Support Androguard 4.x. +* Support Python 3.12. + +### Removed + +* Drop all uses of _stats/known_apks.txt_ and the `update_stats:` config key. + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1547 +* The `maven:` field is now always a string, with `yes` as a legacy special + value. It is no longer treated like a boolean in any case. +* scanner: jcenter is no longer an allowed Maven repo. +* build: `--reset-server` removed (thanks @gotmi1k). + +## [2.2.2] - 2024-04-24 + +### Added + +* Include sdkmanager as dep in setup.py for Homebrew package. + https://github.com/Homebrew/homebrew-core/pull/164510 + +## [2.2.1] - 2023-03-09 + +### Added + +* `download_repo_index_v2()` and `download_repo_index_v2()` API functions + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1323 + +### Fixed + +* Fix OpenJDK detection on different CPU architectures + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1315 + +### Removed + +* Purge all references to `zipalign`, that is delegated to other things + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1316 +* Remove obsolete, unused `buildozer` build type + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1322 + +## [2.2.0] - 2023-02-20 + +### Added +* Support index-v2 format, localizable Anti-Features, Categories +* New entry point for repos, entry.jar, signed with modern algorithms +* New config/ subdirectory for localizable configuration +* Script entries in metadata files (init, prebuild, build, etc) now handled as + lists so they now support using && or ; in the script, and behave like + .gitlab-ci.yml and other CI YAML. +* GPG signatures for index-v1.json and index-v2.json +* Use default.txt as fallback changelog when inserting fastlane metadata +* scanner: F-Droid signatures now maintained in fdroid/suss +* scanner: maintain signature sources in config.yml, including Exodus Privacy +* scanner: use dexdump for class names +* scanner: directly scan APK files when given a path +* scanner: recursively scan APKs for DEX and ZIP using file magic +* signindex: validate index files before signing +* update: set ArchivePolicy based on VercodeOperation/signature +* Include IPFS CIDv1 in index-v2.json for hosting repos on IPFS +* Per-repo beta channel configuration +* Add Czech translation + +### Fixed + +* apksigner v30 or higher now required for verifying and signing APKs +* 3.9 as minimum supported Python version +* Lots of translation updates +* Better pip packaging +* nightly: big overhaul for reliable operation on all Debian/Ubuntu versions +* Improved logging, fewer confusing verbose messages +* scanner: fix detection of binary files without extension +* import: more reliable operation, including Flutter apps +* Support Java 20 and up + +### Removed +* Remove obsolete `fdroid stats` command + +## [2.1.1] - 2022-09-06 + +* gradlew-fdroid: Include latest versions and checksums +* nightly: update Raw URLs to fix breakage and avoid redirects +* signindex: gpg-sign index-v1.json and deploy it +* update: fix --use-date-from-apk when used with files (#1012) + +## [2.1] - 2022-02-22 + +For a more complete overview, see the [2.1 +milestone](https://gitlab.com/fdroid/fdroidserver/-/milestones/11) + +## [2.0.5] - 2022-09-06 + +### Fixed + +* gradlew-fdroid: Include latest versions and checksums +* nightly: add support for GitHub Actions +* nightly: update Raw URLs to fix breakage and avoid redirects +* update: fix --use-date-from-apk when used with files (#1012) +* Fix GitLab CI + +## [2.0.4] - 2022-06-29 + +### Fixed + +* deploy: ensure progress is instantiated before trying to use it +* signindex: gpg-sign index-v1.json and deploy it + [1080](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1080) + [1124](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1124) + +## [2.0.3] - 2021-07-01 + +### Fixed + +* Support AutoUpdateMode: Version without pattern + [931](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/931) + +## [2.0.2] - 2021-06-01 + +### Fixed + +* fix "ruamel round_trip_dump will be removed" + [932](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/932) + +## [2.0.1] - 2021-03-09 + +### Fixed + +* metadata: stop setting up source repo when running lint/rewritemeta +* scanner: show error if scan_binary fails to run apkanalyzer +* common: properly parse version from NDK's source.properties +* update: stop extracting and storing XML icons, they're useless +* index: raise error rather than crash on bad repo file +* update: handle large, corrupt, or inaccessible fastlane/triple-t files +* Update SPDX License List +* checkupdates: set User-Agent to make gitlab.com happy +* Run push_binary_transparency only once + +## [2.0] - 2021-01-31 + +For a more complete overview, see the [2.0 +milestone](https://gitlab.com/fdroid/fdroidserver/-/milestones/10) + +### Added +* `fdroid update` inserts donation links based on upstream's _FUNDING.yml_ + ([!754](https://gitlab.com/fdroid/fdroidserver/merge_requests/754)) +* Stable, public API for most useful functions + ([!798](https://gitlab.com/fdroid/fdroidserver/merge_requests/798)) +* Load with any YAML lib and use with the API, no more custom parser needed + ([!826](https://gitlab.com/fdroid/fdroidserver/merge_requests/826)) + ([!838](https://gitlab.com/fdroid/fdroidserver/merge_requests/838)) +* _config.yml_ for a safe, easy, standard configuration format + ([!663](https://gitlab.com/fdroid/fdroidserver/merge_requests/663)) +* Config options can be set from environment variables using this syntax: + `keystorepass: {env: keystorepass}` + ([!669](https://gitlab.com/fdroid/fdroidserver/merge_requests/669)) +* Add SHA256 to filename of repo graphics + ([!669](https://gitlab.com/fdroid/fdroidserver/merge_requests/669)) +* Support for srclibs metadata in YAML format + ([!700](https://gitlab.com/fdroid/fdroidserver/merge_requests/700)) +* Check srclibs and app-metadata files with yamllint + ([!721](https://gitlab.com/fdroid/fdroidserver/merge_requests/721)) +* Added plugin system for adding subcommands to `fdroid` + ([!709](https://gitlab.com/fdroid/fdroidserver/merge_requests/709)) +* `fdroid update`, `fdroid publish`, and `fdroid signindex` now work + with SmartCard HSMs, specifically the NitroKey HSM + ([!779](https://gitlab.com/fdroid/fdroidserver/merge_requests/779)) + ([!782](https://gitlab.com/fdroid/fdroidserver/merge_requests/782)) +* `fdroid update` support for Triple-T Gradle Play Publisher v2.x + ([!683](https://gitlab.com/fdroid/fdroidserver/merge_requests/683)) +* Translated into: bo de es fr hu it ko nb_NO pl pt pt_BR pt_PT ru sq tr uk + zh_Hans zh_Hant + +### Fixed +* Smoother process for signing APKs with `apksigner` + ([!736](https://gitlab.com/fdroid/fdroidserver/merge_requests/736)) + ([!821](https://gitlab.com/fdroid/fdroidserver/merge_requests/821)) +* `apksigner` is used by default on new repos +* All parts except _build_ and _publish_ work without the Android SDK + ([!821](https://gitlab.com/fdroid/fdroidserver/merge_requests/821)) +* Description: is now passed to clients unchanged, no HTML conversion + ([!828](https://gitlab.com/fdroid/fdroidserver/merge_requests/828)) +* Lots of improvements for scanning for proprietary code and trackers + ([!748](https://gitlab.com/fdroid/fdroidserver/merge_requests/748)) + ([!REPLACE](https://gitlab.com/fdroid/fdroidserver/merge_requests/REPLACE)) + ([!844](https://gitlab.com/fdroid/fdroidserver/merge_requests/844)) +* `fdroid mirror` now generates complete, working local mirror repos +* fix build-logs dissapearing when deploying + ([!685](https://gitlab.com/fdroid/fdroidserver/merge_requests/685)) +* do not crash when system encoding can not be retrieved + ([!671](https://gitlab.com/fdroid/fdroidserver/merge_requests/671)) +* checkupdates: UpdateCheckIngore gets properly observed now + ([!659](https://gitlab.com/fdroid/fdroidserver/merge_requests/659), + [!660](https://gitlab.com/fdroid/fdroidserver/merge_requests/660)) +* keep yaml metadata when rewrite failed + ([!658](https://gitlab.com/fdroid/fdroidserver/merge_requests/658)) +* import: `template.yml` now supports omitting values + ([!657](https://gitlab.com/fdroid/fdroidserver/merge_requests/657)) +* build: deploying buildlogs with rsync + ([!651](https://gitlab.com/fdroid/fdroidserver/merge_requests/651)) +* `fdroid init` generates PKCS12 keystores, drop Java < 8 support + ([!801](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/801)) +* Parse Version Codes specified in hex + ([!692](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/692)) +* Major refactoring on core parts of code to be more Pythonic + ([!756](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/756)) +* `fdroid init` now works when installed with pip + +### Removed +* Removed all support for _.txt_ and _.json_ metadata + ([!772](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/772)) +* dropped support for Debian 8 _jessie_ and 9 _stretch_ +* dropped support for Ubuntu releases older than bionic 18.04 +* dropped `fdroid server update` and `fdroid server init`, + use `fdroid deploy` +* `fdroid dscanner` was removed. + ([!711](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/711)) +* `make_current_version_link` is now off by default +* Dropped `force_build_tools` config option + ([!797](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/797)) +* Dropped `accepted_formats` config option, there is only _.yml_ now + ([!818](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/818)) +* `Provides:` was removed as a metadata field + ([!654](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/654)) +* Remove unused `latestapps.dat` + ([!794](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/794)) + + +## [1.1.4] - 2019-08-15 +### Fixed +* include bitcoin validation regex required by fdroiddata +* merged Debian patches to fix test suite there + +## [1.1.3] - 2019-07-03 +### Fixed +* fixed test suite when run from source tarball +* fixed test runs in Debian + +## [1.1.2] - 2019-03-29 +### Fixed +* fix bug while downloading repo index + ([!636](https://gitlab.com/fdroid/fdroidserver/merge_requests/636)) + +## [1.1.1] - 2019-02-03 +### Fixed +* support APK Signature v2 and v3 +* all SDK Version values are output as integers in the index JSON +* take graphics from Fastlane dirs using any valid RFC5646 locale +* print warning if not running in UTF-8 encoding +* fdroid build: hide --on-server cli flag + +## [1.1] - 2019-01-28 +### Fixed +* a huge update with many fixes and new features: + https://gitlab.com/fdroid/fdroidserver/milestones/7 +* can run without and Android SDK installed +* much more reliable operation with large binary APK collections +* sync all translations, including newly added languages: hu it ko pl pt_PT ru +* many security fixes, based on the security audit +* NoSourceSince automatically adds SourceGone Anti-Feature +* aapt scraping works with all known aapt versions +* smoother mirror setups +* much faster `fdroid update` when using androguard + +[Unreleased]: https://gitlab.com/fdroid/fdroidserver/compare/1.1.4...master +[1.1.4]: https://gitlab.com/fdroid/fdroidserver/compare/1.1.3...1.1.4 +[1.1.3]: https://gitlab.com/fdroid/fdroidserver/compare/1.1.2...1.1.3 +[1.1.2]: https://gitlab.com/fdroid/fdroidserver/compare/1.1.1...1.1.2 +[1.1.1]: https://gitlab.com/fdroid/fdroidserver/compare/1.1...1.1.1 +[1.1]: https://gitlab.com/fdroid/fdroidserver/tags/1.1 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..226c0854 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,66 @@ +There are many ways to contribute, you can find out all the ways on our +[Contribute](https://f-droid.org/contribute/) page. Find out how to get +involved, including as a translator, data analyst, tester, helping others, and +much more! + +## Contributing Code + +We want more contributors and want different points of view represented. Some +parts of the code make contributing quick and easy. Other parts make it +difficult and slow, so we ask that contributors have patience. + +To submit a patch, please open a merge request on GitLab. If you are thinking of +making a large contribution, open an issue or merge request before starting +work, to get comments from the community. Someone may be already working on the +same thing, or there may be reasons why that feature isn't implemented. Once +there is agreement, then the work might need to proceed asynchronously with the +core team towards the solution. + +To make it easier to review and accept your merge request, please follow these +guidelines: + +* When at all possible, include tests. These can either be added to an existing + test, or completely new. Practicing test-driven development will make it + easiest to get merged. That usually means starting your work by writing tests. + +* See [help-wanted](https://gitlab.com/fdroid/fdroidserver/-/issues/?sort=updated_desc&state=opened&label_name%5B%5D=help-wanted) + tags for things that maintainers have marked as things they want to see + merged. + +* The amount of technical debt varies widely in this code base. There are some + parts where the code is nicely isolated with good test coverage. There are + other parts that are tangled and complicated, full of technical debt, and + difficult to test. + +* The general approach is to treat the tangled and complicated parts as an + external API (albeit a bad one). That means it needs to stay unchanged as much + as possible. Changes to those parts of the code will trigger a migration, + which can require a lot of time and coordination. When there is time for large + development efforts, we refactor the code to get rid of those areas of + technical debt. + +* We use [_black_](https://black.readthedocs.io/) code format, run `black .` to + format the code. Whenever editing code in any file, the new code should be + formatted as _black_. Some files are not yet fully in _black_ format (see + _pyproject.toml_), our goal is to opportunistically convert the code whenever + possible. As of the time of this writing, forcing the code format on all files + would be too disruptive. The officially supported _black_ version is the one + in Debian/stable. + +* Many of the tests run very fast and can be run interactively in isolation. + Some of the essential test cases run slowly because they do things like + signing files and generating signing keys. + +* Some parts of the code are difficult to test, and currently require a + relatively complete production setup in order to effectively test them. That + is mostly the code around building packages, managing the disposable VM, and + scheduling build jobs to run. + +* For user visible changes (API changes, behaviour changes, etc.), consider + adding a note in _CHANGELOG.md_. This could be a summarizing description of + the change, and could explain the grander details. Have a look through + existing entries for inspiration. Please note that this is NOT simply a copy + of git-log one-liners. Also note that security fixes get an entry in + _CHANGELOG.md_. This file helps users get more in-depth information of what + comes with a specific release without having to sift through the higher noise + ratio in git-log. diff --git a/COPYING b/LICENSE similarity index 100% rename from COPYING rename to LICENSE diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 00000000..93307ace --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,945 @@ +include buildserver/config.buildserver.yml +include buildserver/provision-android-ndk +include buildserver/provision-android-sdk +include buildserver/provision-apt-get-install +include buildserver/provision-apt-proxy +include buildserver/provision-gradle +include buildserver/setup-env-vars +include buildserver/Vagrantfile +include CHANGELOG.md +include completion/bash-completion +include examples/config.yml +include examples/fdroid_exportkeystore.py +include examples/fdroid_export_keystore_to_nitrokey.py +include examples/fdroid_extract_repo_pubkey.py +include examples/fdroid_fetchsrclibs.py +include examples/fdroid_nitrokeyimport.py +include examples/opensc-fdroid.cfg +include examples/public-read-only-s3-bucket-policy.json +include examples/template.yml +include examples/Vagrantfile.yaml +include gradlew-fdroid +include LICENSE +include locale/ba/LC_MESSAGES/fdroidserver.po +include locale/bo/LC_MESSAGES/fdroidserver.po +include locale/ca/LC_MESSAGES/fdroidserver.po +include locale/cs/LC_MESSAGES/fdroidserver.po +include locale/de/LC_MESSAGES/fdroidserver.po +include locale/es/LC_MESSAGES/fdroidserver.po +include locale/fr/LC_MESSAGES/fdroidserver.po +include locale/ga/LC_MESSAGES/fdroidserver.po +include locale/hu/LC_MESSAGES/fdroidserver.po +include locale/it/LC_MESSAGES/fdroidserver.po +include locale/ja/LC_MESSAGES/fdroidserver.po +include locale/ko/LC_MESSAGES/fdroidserver.po +include locale/nb_NO/LC_MESSAGES/fdroidserver.po +include locale/pl/LC_MESSAGES/fdroidserver.po +include locale/pt/LC_MESSAGES/fdroidserver.po +include locale/pt_BR/LC_MESSAGES/fdroidserver.po +include locale/pt_PT/LC_MESSAGES/fdroidserver.po +include locale/ro/LC_MESSAGES/fdroidserver.po +include locale/ru/LC_MESSAGES/fdroidserver.po +include locale/sq/LC_MESSAGES/fdroidserver.po +include locale/sr/LC_MESSAGES/fdroidserver.po +include locale/sw/LC_MESSAGES/fdroidserver.po +include locale/tr/LC_MESSAGES/fdroidserver.po +include locale/uk/LC_MESSAGES/fdroidserver.po +include locale/zh_Hans/LC_MESSAGES/fdroidserver.po +include locale/zh_Hant/LC_MESSAGES/fdroidserver.po +include makebuildserver +include README.md +include tests/aosp_testkey_debug.keystore +include tests/apk.embedded_1.apk +include tests/bad-unicode-*.apk +include tests/build-tools/17.0.0/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/17.0.0/aapt-output-com.politedroid_3.txt +include tests/build-tools/17.0.0/aapt-output-com.politedroid_4.txt +include tests/build-tools/17.0.0/aapt-output-com.politedroid_5.txt +include tests/build-tools/17.0.0/aapt-output-com.politedroid_6.txt +include tests/build-tools/17.0.0/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/17.0.0/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/17.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/17.0.0/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/17.0.0/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/17.0.0/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/17.0.0/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/17.0.0/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/17.0.0/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/17.0.0/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/18.1.1/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/18.1.1/aapt-output-com.politedroid_3.txt +include tests/build-tools/18.1.1/aapt-output-com.politedroid_4.txt +include tests/build-tools/18.1.1/aapt-output-com.politedroid_5.txt +include tests/build-tools/18.1.1/aapt-output-com.politedroid_6.txt +include tests/build-tools/18.1.1/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/18.1.1/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/18.1.1/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/18.1.1/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/18.1.1/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/18.1.1/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/18.1.1/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/18.1.1/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/18.1.1/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/18.1.1/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/19.0.0/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/19.0.0/aapt-output-com.politedroid_3.txt +include tests/build-tools/19.0.0/aapt-output-com.politedroid_4.txt +include tests/build-tools/19.0.0/aapt-output-com.politedroid_5.txt +include tests/build-tools/19.0.0/aapt-output-com.politedroid_6.txt +include tests/build-tools/19.0.0/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/19.0.0/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/19.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/19.0.0/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/19.0.0/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/19.0.0/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/19.0.0/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/19.0.0/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/19.0.0/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/19.0.0/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/19.1.0/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/19.1.0/aapt-output-com.politedroid_3.txt +include tests/build-tools/19.1.0/aapt-output-com.politedroid_4.txt +include tests/build-tools/19.1.0/aapt-output-com.politedroid_5.txt +include tests/build-tools/19.1.0/aapt-output-com.politedroid_6.txt +include tests/build-tools/19.1.0/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/19.1.0/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/19.1.0/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/19.1.0/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/19.1.0/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/19.1.0/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/19.1.0/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/19.1.0/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/19.1.0/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/19.1.0/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/20.0.0/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/20.0.0/aapt-output-com.politedroid_3.txt +include tests/build-tools/20.0.0/aapt-output-com.politedroid_4.txt +include tests/build-tools/20.0.0/aapt-output-com.politedroid_5.txt +include tests/build-tools/20.0.0/aapt-output-com.politedroid_6.txt +include tests/build-tools/20.0.0/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/20.0.0/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/20.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/20.0.0/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/20.0.0/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/20.0.0/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/20.0.0/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/20.0.0/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/20.0.0/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/20.0.0/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/21.1.1/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/21.1.1/aapt-output-com.politedroid_3.txt +include tests/build-tools/21.1.1/aapt-output-com.politedroid_4.txt +include tests/build-tools/21.1.1/aapt-output-com.politedroid_5.txt +include tests/build-tools/21.1.1/aapt-output-com.politedroid_6.txt +include tests/build-tools/21.1.1/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/21.1.1/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/21.1.1/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/21.1.1/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/21.1.1/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/21.1.1/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/21.1.1/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/21.1.1/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/21.1.1/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/21.1.1/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/21.1.2/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/21.1.2/aapt-output-com.politedroid_3.txt +include tests/build-tools/21.1.2/aapt-output-com.politedroid_4.txt +include tests/build-tools/21.1.2/aapt-output-com.politedroid_5.txt +include tests/build-tools/21.1.2/aapt-output-com.politedroid_6.txt +include tests/build-tools/21.1.2/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/21.1.2/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/21.1.2/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/21.1.2/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/21.1.2/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/21.1.2/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/21.1.2/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/21.1.2/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/21.1.2/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/21.1.2/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/22.0.0/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/22.0.0/aapt-output-com.politedroid_3.txt +include tests/build-tools/22.0.0/aapt-output-com.politedroid_4.txt +include tests/build-tools/22.0.0/aapt-output-com.politedroid_5.txt +include tests/build-tools/22.0.0/aapt-output-com.politedroid_6.txt +include tests/build-tools/22.0.0/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/22.0.0/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/22.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/22.0.0/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/22.0.0/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/22.0.0/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/22.0.0/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/22.0.0/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/22.0.0/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/22.0.0/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/22.0.1/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/22.0.1/aapt-output-com.politedroid_3.txt +include tests/build-tools/22.0.1/aapt-output-com.politedroid_4.txt +include tests/build-tools/22.0.1/aapt-output-com.politedroid_5.txt +include tests/build-tools/22.0.1/aapt-output-com.politedroid_6.txt +include tests/build-tools/22.0.1/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/22.0.1/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/22.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/22.0.1/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/22.0.1/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/22.0.1/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/22.0.1/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/22.0.1/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/22.0.1/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/22.0.1/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/23.0.0/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/23.0.0/aapt-output-com.politedroid_3.txt +include tests/build-tools/23.0.0/aapt-output-com.politedroid_4.txt +include tests/build-tools/23.0.0/aapt-output-com.politedroid_5.txt +include tests/build-tools/23.0.0/aapt-output-com.politedroid_6.txt +include tests/build-tools/23.0.0/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/23.0.0/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/23.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/23.0.0/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/23.0.0/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/23.0.0/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/23.0.0/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/23.0.0/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/23.0.0/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/23.0.0/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/23.0.1/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/23.0.1/aapt-output-com.politedroid_3.txt +include tests/build-tools/23.0.1/aapt-output-com.politedroid_4.txt +include tests/build-tools/23.0.1/aapt-output-com.politedroid_5.txt +include tests/build-tools/23.0.1/aapt-output-com.politedroid_6.txt +include tests/build-tools/23.0.1/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/23.0.1/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/23.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/23.0.1/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/23.0.1/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/23.0.1/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/23.0.1/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/23.0.1/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/23.0.1/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/23.0.1/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/23.0.2/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/23.0.2/aapt-output-com.politedroid_3.txt +include tests/build-tools/23.0.2/aapt-output-com.politedroid_4.txt +include tests/build-tools/23.0.2/aapt-output-com.politedroid_5.txt +include tests/build-tools/23.0.2/aapt-output-com.politedroid_6.txt +include tests/build-tools/23.0.2/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/23.0.2/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/23.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/23.0.2/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/23.0.2/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/23.0.2/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/23.0.2/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/23.0.2/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/23.0.2/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/23.0.2/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/23.0.3/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/23.0.3/aapt-output-com.politedroid_3.txt +include tests/build-tools/23.0.3/aapt-output-com.politedroid_4.txt +include tests/build-tools/23.0.3/aapt-output-com.politedroid_5.txt +include tests/build-tools/23.0.3/aapt-output-com.politedroid_6.txt +include tests/build-tools/23.0.3/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/23.0.3/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/23.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/23.0.3/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/23.0.3/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/23.0.3/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/23.0.3/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/23.0.3/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/23.0.3/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/23.0.3/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/24.0.0/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/24.0.0/aapt-output-com.politedroid_3.txt +include tests/build-tools/24.0.0/aapt-output-com.politedroid_4.txt +include tests/build-tools/24.0.0/aapt-output-com.politedroid_5.txt +include tests/build-tools/24.0.0/aapt-output-com.politedroid_6.txt +include tests/build-tools/24.0.0/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/24.0.0/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/24.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/24.0.0/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/24.0.0/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/24.0.0/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/24.0.0/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/24.0.0/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/24.0.0/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/24.0.0/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/24.0.1/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/24.0.1/aapt-output-com.politedroid_3.txt +include tests/build-tools/24.0.1/aapt-output-com.politedroid_4.txt +include tests/build-tools/24.0.1/aapt-output-com.politedroid_5.txt +include tests/build-tools/24.0.1/aapt-output-com.politedroid_6.txt +include tests/build-tools/24.0.1/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/24.0.1/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/24.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/24.0.1/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/24.0.1/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/24.0.1/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/24.0.1/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/24.0.1/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/24.0.1/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/24.0.1/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/24.0.2/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/24.0.2/aapt-output-com.politedroid_3.txt +include tests/build-tools/24.0.2/aapt-output-com.politedroid_4.txt +include tests/build-tools/24.0.2/aapt-output-com.politedroid_5.txt +include tests/build-tools/24.0.2/aapt-output-com.politedroid_6.txt +include tests/build-tools/24.0.2/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/24.0.2/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/24.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/24.0.2/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/24.0.2/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/24.0.2/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/24.0.2/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/24.0.2/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/24.0.2/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/24.0.2/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/24.0.3/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/24.0.3/aapt-output-com.politedroid_3.txt +include tests/build-tools/24.0.3/aapt-output-com.politedroid_4.txt +include tests/build-tools/24.0.3/aapt-output-com.politedroid_5.txt +include tests/build-tools/24.0.3/aapt-output-com.politedroid_6.txt +include tests/build-tools/24.0.3/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/24.0.3/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/24.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/24.0.3/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/24.0.3/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/24.0.3/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/24.0.3/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/24.0.3/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/24.0.3/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/24.0.3/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/25.0.0/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/25.0.0/aapt-output-com.politedroid_3.txt +include tests/build-tools/25.0.0/aapt-output-com.politedroid_4.txt +include tests/build-tools/25.0.0/aapt-output-com.politedroid_5.txt +include tests/build-tools/25.0.0/aapt-output-com.politedroid_6.txt +include tests/build-tools/25.0.0/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/25.0.0/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/25.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/25.0.0/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/25.0.0/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/25.0.0/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/25.0.0/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/25.0.0/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/25.0.0/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/25.0.0/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/25.0.1/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/25.0.1/aapt-output-com.politedroid_3.txt +include tests/build-tools/25.0.1/aapt-output-com.politedroid_4.txt +include tests/build-tools/25.0.1/aapt-output-com.politedroid_5.txt +include tests/build-tools/25.0.1/aapt-output-com.politedroid_6.txt +include tests/build-tools/25.0.1/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/25.0.1/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/25.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/25.0.1/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/25.0.1/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/25.0.1/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/25.0.1/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/25.0.1/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/25.0.1/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/25.0.1/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/25.0.2/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/25.0.2/aapt-output-com.politedroid_3.txt +include tests/build-tools/25.0.2/aapt-output-com.politedroid_4.txt +include tests/build-tools/25.0.2/aapt-output-com.politedroid_5.txt +include tests/build-tools/25.0.2/aapt-output-com.politedroid_6.txt +include tests/build-tools/25.0.2/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/25.0.2/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/25.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/25.0.2/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/25.0.2/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/25.0.2/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/25.0.2/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/25.0.2/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/25.0.2/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/25.0.2/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/25.0.3/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/25.0.3/aapt-output-com.politedroid_3.txt +include tests/build-tools/25.0.3/aapt-output-com.politedroid_4.txt +include tests/build-tools/25.0.3/aapt-output-com.politedroid_5.txt +include tests/build-tools/25.0.3/aapt-output-com.politedroid_6.txt +include tests/build-tools/25.0.3/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/25.0.3/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/25.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/25.0.3/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/25.0.3/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/25.0.3/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/25.0.3/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/25.0.3/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/25.0.3/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/25.0.3/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/26.0.0/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/26.0.0/aapt-output-com.politedroid_3.txt +include tests/build-tools/26.0.0/aapt-output-com.politedroid_4.txt +include tests/build-tools/26.0.0/aapt-output-com.politedroid_5.txt +include tests/build-tools/26.0.0/aapt-output-com.politedroid_6.txt +include tests/build-tools/26.0.0/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/26.0.0/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/26.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/26.0.0/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/26.0.0/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/26.0.0/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/26.0.0/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/26.0.0/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/26.0.0/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/26.0.0/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/26.0.1/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/26.0.1/aapt-output-com.politedroid_3.txt +include tests/build-tools/26.0.1/aapt-output-com.politedroid_4.txt +include tests/build-tools/26.0.1/aapt-output-com.politedroid_5.txt +include tests/build-tools/26.0.1/aapt-output-com.politedroid_6.txt +include tests/build-tools/26.0.1/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/26.0.1/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/26.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/26.0.1/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/26.0.1/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/26.0.1/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/26.0.1/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/26.0.1/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/26.0.1/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/26.0.1/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/26.0.2/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/26.0.2/aapt-output-com.politedroid_3.txt +include tests/build-tools/26.0.2/aapt-output-com.politedroid_4.txt +include tests/build-tools/26.0.2/aapt-output-com.politedroid_5.txt +include tests/build-tools/26.0.2/aapt-output-com.politedroid_6.txt +include tests/build-tools/26.0.2/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/26.0.2/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/26.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/26.0.2/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/26.0.2/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/26.0.2/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/26.0.2/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/26.0.2/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/26.0.2/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/26.0.2/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/26.0.3/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/26.0.3/aapt-output-com.politedroid_3.txt +include tests/build-tools/26.0.3/aapt-output-com.politedroid_4.txt +include tests/build-tools/26.0.3/aapt-output-com.politedroid_5.txt +include tests/build-tools/26.0.3/aapt-output-com.politedroid_6.txt +include tests/build-tools/26.0.3/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/26.0.3/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/26.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/26.0.3/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/26.0.3/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/26.0.3/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/26.0.3/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/26.0.3/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/26.0.3/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/26.0.3/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/27.0.0/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/27.0.0/aapt-output-com.politedroid_3.txt +include tests/build-tools/27.0.0/aapt-output-com.politedroid_4.txt +include tests/build-tools/27.0.0/aapt-output-com.politedroid_5.txt +include tests/build-tools/27.0.0/aapt-output-com.politedroid_6.txt +include tests/build-tools/27.0.0/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/27.0.0/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/27.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/27.0.0/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/27.0.0/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/27.0.0/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/27.0.0/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/27.0.0/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/27.0.0/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/27.0.0/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/27.0.1/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/27.0.1/aapt-output-com.politedroid_3.txt +include tests/build-tools/27.0.1/aapt-output-com.politedroid_4.txt +include tests/build-tools/27.0.1/aapt-output-com.politedroid_5.txt +include tests/build-tools/27.0.1/aapt-output-com.politedroid_6.txt +include tests/build-tools/27.0.1/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/27.0.1/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/27.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/27.0.1/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/27.0.1/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/27.0.1/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/27.0.1/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/27.0.1/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/27.0.1/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/27.0.1/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/27.0.2/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/27.0.2/aapt-output-com.politedroid_3.txt +include tests/build-tools/27.0.2/aapt-output-com.politedroid_4.txt +include tests/build-tools/27.0.2/aapt-output-com.politedroid_5.txt +include tests/build-tools/27.0.2/aapt-output-com.politedroid_6.txt +include tests/build-tools/27.0.2/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/27.0.2/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/27.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/27.0.2/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/27.0.2/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/27.0.2/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/27.0.2/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/27.0.2/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/27.0.2/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/27.0.2/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/27.0.3/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/27.0.3/aapt-output-com.politedroid_3.txt +include tests/build-tools/27.0.3/aapt-output-com.politedroid_4.txt +include tests/build-tools/27.0.3/aapt-output-com.politedroid_5.txt +include tests/build-tools/27.0.3/aapt-output-com.politedroid_6.txt +include tests/build-tools/27.0.3/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/27.0.3/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/27.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/27.0.3/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/27.0.3/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/27.0.3/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/27.0.3/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/27.0.3/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/27.0.3/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/27.0.3/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/28.0.0/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/28.0.0/aapt-output-com.politedroid_3.txt +include tests/build-tools/28.0.0/aapt-output-com.politedroid_4.txt +include tests/build-tools/28.0.0/aapt-output-com.politedroid_5.txt +include tests/build-tools/28.0.0/aapt-output-com.politedroid_6.txt +include tests/build-tools/28.0.0/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/28.0.0/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/28.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/28.0.0/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/28.0.0/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/28.0.0/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/28.0.0/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/28.0.0/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/28.0.0/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/28.0.0/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/28.0.1/aapt-output-com.moez.QKSMS_182.txt +include tests/build-tools/28.0.1/aapt-output-com.politedroid_3.txt +include tests/build-tools/28.0.1/aapt-output-com.politedroid_4.txt +include tests/build-tools/28.0.1/aapt-output-com.politedroid_5.txt +include tests/build-tools/28.0.1/aapt-output-com.politedroid_6.txt +include tests/build-tools/28.0.1/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/28.0.1/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/28.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/28.0.1/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/28.0.1/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/28.0.1/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/28.0.1/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/28.0.1/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/28.0.1/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/28.0.1/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/28.0.2/aapt-output-com.politedroid_3.txt +include tests/build-tools/28.0.2/aapt-output-com.politedroid_4.txt +include tests/build-tools/28.0.2/aapt-output-com.politedroid_5.txt +include tests/build-tools/28.0.2/aapt-output-com.politedroid_6.txt +include tests/build-tools/28.0.2/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/28.0.2/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/28.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/28.0.2/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/28.0.2/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/28.0.2/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/28.0.2/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/28.0.2/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/28.0.2/aapt-output-org.droidtr.keyboard_34.txt +include tests/build-tools/28.0.2/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/28.0.3/aapt-output-com.example.test.helloworld_1.txt +include tests/build-tools/28.0.3/aapt-output-com.politedroid_3.txt +include tests/build-tools/28.0.3/aapt-output-com.politedroid_4.txt +include tests/build-tools/28.0.3/aapt-output-com.politedroid_5.txt +include tests/build-tools/28.0.3/aapt-output-com.politedroid_6.txt +include tests/build-tools/28.0.3/aapt-output-duplicate.permisssions_9999999.txt +include tests/build-tools/28.0.3/aapt-output-info.guardianproject.urzip_100.txt +include tests/build-tools/28.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt +include tests/build-tools/28.0.3/aapt-output-no.min.target.sdk_987.txt +include tests/build-tools/28.0.3/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/28.0.3/aapt-output-obb.main.twoversions_1101613.txt +include tests/build-tools/28.0.3/aapt-output-obb.main.twoversions_1101615.txt +include tests/build-tools/28.0.3/aapt-output-obb.main.twoversions_1101617.txt +include tests/build-tools/28.0.3/aapt-output-obb.mainpatch.current_1619.txt +include tests/build-tools/28.0.3/aapt-output-souch.smsbypass_9.txt +include tests/build-tools/generate.sh +include tests/check-fdroid-apk +include tests/com.fake.IpaApp_1000000000001.ipa +include tests/config.yml +include tests/config/antiFeatures.yml +include tests/config/categories.yml +include tests/config/de/antiFeatures.yml +include tests/config/fa/antiFeatures.yml +include tests/config/ic_antifeature_ads.xml +include tests/config/ic_antifeature_disabledalgorithm.xml +include tests/config/ic_antifeature_knownvuln.xml +include tests/config/ic_antifeature_nonfreeadd.xml +include tests/config/ic_antifeature_nonfreeassets.xml +include tests/config/ic_antifeature_nonfreedep.xml +include tests/config/ic_antifeature_nonfreenet.xml +include tests/config/ic_antifeature_nosourcesince.xml +include tests/config/ic_antifeature_nsfw.xml +include tests/config/ic_antifeature_tracking.xml +include tests/config/ic_antifeature_upstreamnonfree.xml +include tests/config/ro/antiFeatures.yml +include tests/config/zh-rCN/antiFeatures.yml +include tests/corrupt-featureGraphic.png +include tests/dummy-keystore.jks +include tests/dump_internal_metadata_format.py +include tests/extra/manual-vmtools-test.py +include tests/funding-usernames.yaml +include tests/get_android_tools_versions/android-ndk-r10e/RELEASE.TXT +include tests/get_android_tools_versions/android-sdk/ndk-bundle/package.xml +include tests/get_android_tools_versions/android-sdk/ndk-bundle/source.properties +include tests/get_android_tools_versions/android-sdk/ndk/11.2.2725575/source.properties +include tests/get_android_tools_versions/android-sdk/ndk/17.2.4988734/source.properties +include tests/get_android_tools_versions/android-sdk/ndk/21.3.6528147/source.properties +include tests/get_android_tools_versions/android-sdk/patcher/v4/source.properties +include tests/get_android_tools_versions/android-sdk/platforms/android-30/source.properties +include tests/get_android_tools_versions/android-sdk/skiaparser/1/source.properties +include tests/get_android_tools_versions/android-sdk/tools/source.properties +include tests/gnupghome/pubring.gpg +include tests/gnupghome/random_seed +include tests/gnupghome/secring.gpg +include tests/gnupghome/trustdb.gpg +include tests/gradle-maven-blocks.yaml +include tests/gradle-release-checksums.py +include tests/IsMD5Disabled.java +include tests/issue-1128-min-sdk-30-poc.apk +include tests/issue-1128-poc1.apk +include tests/issue-1128-poc2.apk +include tests/issue-1128-poc3a.apk +include tests/issue-1128-poc3b.apk +include tests/janus.apk +include tests/key-tricks.py +include tests/keystore.jks +include tests/metadata-rewrite-yml/app.with.special.build.params.yml +include tests/metadata-rewrite-yml/fake.ota.update.yml +include tests/metadata-rewrite-yml/org.fdroid.fdroid.yml +include tests/metadata/apk/info.guardianproject.urzip.yaml +include tests/metadata/apk/org.dyndns.fules.ck.yaml +include tests/metadata/app.with.special.build.params.yml +include tests/metadata/app.with.special.build.params/en-US/antifeatures/50_Ads.txt +include tests/metadata/app.with.special.build.params/en-US/antifeatures/50_Tracking.txt +include tests/metadata/app.with.special.build.params/en-US/antifeatures/Ads.txt +include tests/metadata/app.with.special.build.params/en-US/antifeatures/NoSourceSince.txt +include tests/metadata/app.with.special.build.params/zh-CN/antifeatures/49_Tracking.txt +include tests/metadata/app.with.special.build.params/zh-CN/antifeatures/50_Ads.txt +include tests/metadata/com.politedroid.yml +include tests/metadata/dump/app.with.special.build.params.yaml +include tests/metadata/dump/com.politedroid.yaml +include tests/metadata/dump/org.adaway.yaml +include tests/metadata/dump/org.smssecure.smssecure.yaml +include tests/metadata/dump/org.videolan.vlc.yaml +include tests/metadata/duplicate.permisssions.yml +include tests/metadata/fake.ota.update.yml +include tests/metadata/info.guardianproject.checkey.yml +include tests/metadata/info.guardianproject.checkey/en-US/description.txt +include tests/metadata/info.guardianproject.checkey/en-US/name.txt +include tests/metadata/info.guardianproject.checkey/en-US/phoneScreenshots/checkey-phone.png +include tests/metadata/info.guardianproject.checkey/en-US/phoneScreenshots/checkey.png +include tests/metadata/info.guardianproject.checkey/en-US/summary.txt +include tests/metadata/info.guardianproject.checkey/ja-JP/name.txt +include tests/metadata/info.guardianproject.urzip.yml +include tests/metadata/info.guardianproject.urzip/en-US/changelogs/100.txt +include tests/metadata/info.guardianproject.urzip/en-US/changelogs/default.txt +include tests/metadata/info.guardianproject.urzip/en-US/full_description.txt +include tests/metadata/info.guardianproject.urzip/en-US/images/featureGraphic.png +include tests/metadata/info.guardianproject.urzip/en-US/images/icon.png +include tests/metadata/info.guardianproject.urzip/en-US/short_description.txt +include tests/metadata/info.guardianproject.urzip/en-US/title.txt +include tests/metadata/info.guardianproject.urzip/en-US/video.txt +include tests/metadata/info.zwanenburg.caffeinetile.yml +include tests/metadata/no.min.target.sdk.yml +include tests/metadata/obb.main.oldversion.yml +include tests/metadata/obb.main.twoversions.yml +include tests/metadata/obb.mainpatch.current.yml +include tests/metadata/org.adaway.yml +include tests/metadata/org.fdroid.ci.test.app.yml +include tests/metadata/org.fdroid.fdroid.yml +include tests/metadata/org.maxsdkversion.yml +include tests/metadata/org.smssecure.smssecure.yml +include tests/metadata/org.smssecure.smssecure/signatures/134/28969C09.RSA +include tests/metadata/org.smssecure.smssecure/signatures/134/28969C09.SF +include tests/metadata/org.smssecure.smssecure/signatures/134/MANIFEST.MF +include tests/metadata/org.smssecure.smssecure/signatures/135/28969C09.RSA +include tests/metadata/org.smssecure.smssecure/signatures/135/28969C09.SF +include tests/metadata/org.smssecure.smssecure/signatures/135/MANIFEST.MF +include tests/metadata/org.videolan.vlc.yml +include tests/metadata/raw.template.yml +include tests/metadata/souch.smsbypass.yml +include tests/minimal_targetsdk_30_unsigned.apk +include tests/Norway_bouvet_europe_2.obf.zip +include tests/no_targetsdk_minsdk1_unsigned.apk +include tests/no_targetsdk_minsdk30_unsigned.apk +include tests/openssl-version-check-test.py +include tests/org.bitbucket.tickytacky.mirrormirror_1.apk +include tests/org.bitbucket.tickytacky.mirrormirror_2.apk +include tests/org.bitbucket.tickytacky.mirrormirror_3.apk +include tests/org.bitbucket.tickytacky.mirrormirror_4.apk +include tests/org.dyndns.fules.ck_20.apk +include tests/org.sajeg.fallingblocks_3.apk +include tests/repo/com.example.test.helloworld_1.apk +include tests/repo/com.politedroid_3.apk +include tests/repo/com.politedroid_4.apk +include tests/repo/com.politedroid_5.apk +include tests/repo/com.politedroid_6.apk +include tests/repo/duplicate.permisssions_9999999.apk +include tests/repo/entry.json +include tests/repo/fake.ota.update_1234.zip +include tests/repo/index-v1.json +include tests/repo/index-v2.json +include tests/repo/index.xml +include tests/repo/info.zwanenburg.caffeinetile_4.apk +include tests/repo/main.1101613.obb.main.twoversions.obb +include tests/repo/main.1101615.obb.main.twoversions.obb +include tests/repo/main.1434483388.obb.main.oldversion.obb +include tests/repo/main.1619.obb.mainpatch.current.obb +include tests/repo/no.min.target.sdk_987.apk +include tests/repo/obb.main.oldversion_1444412523.apk +include tests/repo/obb.main.twoversions_1101613.apk +include tests/repo/obb.main.twoversions_1101615.apk +include tests/repo/obb.main.twoversions_1101617.apk +include tests/repo/obb.main.twoversions_1101617_src.tar.gz +include tests/repo/obb.mainpatch.current/en-US/featureGraphic.png +include tests/repo/obb.mainpatch.current/en-US/icon.png +include tests/repo/obb.mainpatch.current/en-US/phoneScreenshots/screenshot-main.png +include tests/repo/obb.mainpatch.current/en-US/sevenInchScreenshots/screenshot-tablet-main.png +include tests/repo/obb.mainpatch.current_1619.apk +include tests/repo/obb.mainpatch.current_1619_another-release-key.apk +include tests/repo/org.maxsdkversion_4.apk +include tests/repo/org.videolan.vlc/en-US/icon.png +include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot10.png +include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot12.png +include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot15.png +include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot18.png +include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot20.png +include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot22.png +include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot4.png +include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot7.png +include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot9.png +include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot0.png +include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot1.png +include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot11.png +include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot13.png +include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot14.png +include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot16.png +include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot17.png +include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot19.png +include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot2.png +include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot21.png +include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot23.png +include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot3.png +include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot5.png +include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot6.png +include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot8.png +include tests/repo/patch.1619.obb.mainpatch.current.obb +include tests/repo/souch.smsbypass_9.apk +include tests/repo/urzip-*.apk +include tests/repo/v1.v2.sig_1020.apk +include tests/run-tests +include tests/SANAPPSI.RSA +include tests/SANAPPSI.SF +include tests/shared_test_code.py +include tests/signindex/guardianproject-v1.jar +include tests/signindex/guardianproject.jar +include tests/signindex/testy.jar +include tests/signindex/unsigned.jar +include tests/source-files/at.bitfire.davdroid/build.gradle +include tests/source-files/catalog.test/app/build.gradle +include tests/source-files/catalog.test/build.gradle.kts +include tests/source-files/catalog.test/buildSrc/build.gradle.kts +include tests/source-files/catalog.test/buildSrc/settings.gradle.kts +include tests/source-files/catalog.test/buildSrc2/build.gradle.kts +include tests/source-files/catalog.test/buildSrc2/settings.gradle.kts +include tests/source-files/catalog.test/core/build.gradle +include tests/source-files/catalog.test/gradle/libs.versions.toml +include tests/source-files/catalog.test/libs.versions.toml +include tests/source-files/catalog.test/settings.gradle.kts +include tests/source-files/cn.wildfirechat.chat/avenginekit/build.gradle +include tests/source-files/cn.wildfirechat.chat/build.gradle +include tests/source-files/cn.wildfirechat.chat/chat/build.gradle +include tests/source-files/cn.wildfirechat.chat/client/build.gradle +include tests/source-files/cn.wildfirechat.chat/client/src/main/AndroidManifest.xml +include tests/source-files/cn.wildfirechat.chat/emojilibrary/build.gradle +include tests/source-files/cn.wildfirechat.chat/gradle/build_libraries.gradle +include tests/source-files/cn.wildfirechat.chat/imagepicker/build.gradle +include tests/source-files/cn.wildfirechat.chat/mars-core-release/build.gradle +include tests/source-files/cn.wildfirechat.chat/push/build.gradle +include tests/source-files/cn.wildfirechat.chat/settings.gradle +include tests/source-files/com.anpmech.launcher/app/build.gradle +include tests/source-files/com.anpmech.launcher/app/src/main/AndroidManifest.xml +include tests/source-files/com.anpmech.launcher/build.gradle +include tests/source-files/com.anpmech.launcher/settings.gradle +include tests/source-files/com.github.jameshnsears.quoteunquote/build.gradle +include tests/source-files/com.github.shadowsocks/core/build.gradle.kts +include tests/source-files/com.github.shadowsocks/mobile/build.gradle.kts +include tests/source-files/com.infomaniak.mail/Core/gradle/core.versions.toml +include tests/source-files/com.infomaniak.mail/gradle/libs.versions.toml +include tests/source-files/com.infomaniak.mail/settings.gradle +include tests/source-files/com.integreight.onesheeld/build.gradle +include tests/source-files/com.integreight.onesheeld/gradle/wrapper/gradle-wrapper.properties +include tests/source-files/com.integreight.onesheeld/localeapi/build.gradle +include tests/source-files/com.integreight.onesheeld/localeapi/src/main/AndroidManifest.xml +include tests/source-files/com.integreight.onesheeld/oneSheeld/build.gradle +include tests/source-files/com.integreight.onesheeld/oneSheeld/src/main/AndroidManifest.xml +include tests/source-files/com.integreight.onesheeld/pagerIndicator/build.gradle +include tests/source-files/com.integreight.onesheeld/pagerIndicator/src/main/AndroidManifest.xml +include tests/source-files/com.integreight.onesheeld/pullToRefreshlibrary/build.gradle +include tests/source-files/com.integreight.onesheeld/pullToRefreshlibrary/src/main/AndroidManifest.xml +include tests/source-files/com.integreight.onesheeld/quickReturnHeader/build.gradle +include tests/source-files/com.integreight.onesheeld/quickReturnHeader/src/main/AndroidManifest.xml +include tests/source-files/com.integreight.onesheeld/settings.gradle +include tests/source-files/com.jens.automation2/app/build.gradle +include tests/source-files/com.jens.automation2/build.gradle +include tests/source-files/com.kunzisoft.testcase/build.gradle +include tests/source-files/com.lolo.io.onelist/app/build.gradle.kts +include tests/source-files/com.lolo.io.onelist/build.gradle.kts +include tests/source-files/com.lolo.io.onelist/gradle/libs.versions.toml +include tests/source-files/com.lolo.io.onelist/gradle/wrapper/gradle-wrapper.properties +include tests/source-files/com.lolo.io.onelist/settings.gradle +include tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/full_description.txt +include tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/short_description.txt +include tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/title.txt +include tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/full_description.txt +include tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/short_description.txt +include tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/title.txt +include tests/source-files/com.nextcloud.client/build.gradle +include tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/full_description.txt +include tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/short_description.txt +include tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/title.txt +include tests/source-files/com.nextcloud.client/src/versionDev/fastlane/metadata/android/en-US/full_description.txt +include tests/source-files/com.nextcloud.client/src/versionDev/fastlane/metadata/android/en-US/short_description.txt +include tests/source-files/com.nextcloud.client/src/versionDev/fastlane/metadata/android/en-US/title.txt +include tests/source-files/com.seafile.seadroid2/app/build.gradle +include tests/source-files/com.ubergeek42.WeechatAndroid/app/build.gradle.kts +include tests/source-files/com.ubergeek42.WeechatAndroid/app/src/main/res/values/strings.xml +include tests/source-files/de.varengold.activeTAN/build.gradle +include tests/source-files/dev.patrickgold.florisboard/app/build.gradle.kts +include tests/source-files/eu.siacs.conversations/build.gradle +include tests/source-files/eu.siacs.conversations/metadata/en-US/name.txt +include tests/source-files/fdroid/fdroidclient/AndroidManifest.xml +include tests/source-files/fdroid/fdroidclient/build.gradle +include tests/source-files/firebase-allowlisted/app/build.gradle +include tests/source-files/firebase-allowlisted/build.gradle +include tests/source-files/firebase-suspect/app/build.gradle +include tests/source-files/firebase-suspect/build.gradle +include tests/source-files/flavor.test/build.gradle +include tests/source-files/info.guardianproject.ripple/build.gradle +include tests/source-files/lockfile.test/flutter/.dart_tool/flutter_gen/pubspec.yaml +include tests/source-files/lockfile.test/flutter/pubspec.lock +include tests/source-files/lockfile.test/flutter/pubspec.yaml +include tests/source-files/lockfile.test/javascript/package.json +include tests/source-files/lockfile.test/javascript/yarn.lock +include tests/source-files/lockfile.test/rust/subdir/Cargo.lock +include tests/source-files/lockfile.test/rust/subdir/Cargo.toml +include tests/source-files/lockfile.test/rust/subdir/subdir/subdir/Cargo.toml +include tests/source-files/lockfile.test/rust/subdir2/Cargo.toml +include tests/source-files/open-keychain/open-keychain/build.gradle +include tests/source-files/open-keychain/open-keychain/OpenKeychain/build.gradle +include tests/source-files/org.mozilla.rocket/app/build.gradle +include tests/source-files/org.noise_planet.noisecapture/app/build.gradle +include tests/source-files/org.noise_planet.noisecapture/settings.gradle +include tests/source-files/org.noise_planet.noisecapture/sosfilter/build.gradle +include tests/source-files/org.piepmeyer.gauguin/build.gradle.kts +include tests/source-files/org.piepmeyer.gauguin/libs.versions.toml +include tests/source-files/org.piepmeyer.gauguin/settings.gradle.kts +include tests/source-files/org.tasks/app/build.gradle.kts +include tests/source-files/org.tasks/build.gradle +include tests/source-files/org.tasks/build.gradle.kts +include tests/source-files/org.tasks/buildSrc/build.gradle.kts +include tests/source-files/org.tasks/settings.gradle.kts +include tests/source-files/osmandapp/osmand/build.gradle +include tests/source-files/osmandapp/osmand/gradle/wrapper/gradle-wrapper.properties +include tests/source-files/OtakuWorld/build.gradle +include tests/source-files/realm/react-native/android/build.gradle +include tests/source-files/se.manyver/android/app/build.gradle +include tests/source-files/se.manyver/android/build.gradle +include tests/source-files/se.manyver/android/gradle.properties +include tests/source-files/se.manyver/android/gradle/wrapper/gradle-wrapper.properties +include tests/source-files/se.manyver/android/settings.gradle +include tests/source-files/se.manyver/app.json +include tests/source-files/se.manyver/index.android.js +include tests/source-files/se.manyver/package.json +include tests/source-files/se.manyver/react-native.config.js +include tests/source-files/ut.ewh.audiometrytest/app/build.gradle +include tests/source-files/ut.ewh.audiometrytest/app/src/main/AndroidManifest.xml +include tests/source-files/ut.ewh.audiometrytest/build.gradle +include tests/source-files/ut.ewh.audiometrytest/settings.gradle +include tests/source-files/yuriykulikov/AlarmClock/gradle/wrapper/gradle-wrapper.properties +include tests/source-files/Zillode/syncthing-silk/build.gradle +include tests/SpeedoMeterApp.main_1.apk +include tests/test_build.py +include tests/test_checkupdates.py +include tests/test_common.py +include tests/test_deploy.py +include tests/test_exception.py +include tests/test_gradlew-fdroid +include tests/test_import_subcommand.py +include tests/test_index.py +include tests/test_init.py +include tests/test_install.py +include tests/test_lint.py +include tests/test_main.py +include tests/test_metadata.py +include tests/test_nightly.py +include tests/test_publish.py +include tests/test_rewritemeta.py +include tests/test_scanner.py +include tests/test_signatures.py +include tests/test_signindex.py +include tests/test_update.py +include tests/test_vcs.py +include tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/featureGraphic/play_store_feature_graphic.png +include tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/icon/icon.png +include tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/phoneScreenshots/1.png +include tests/triple-t-1-graphics/metadata/de.wivewa.dialer.yml +include tests/triple-t-2/build/org.piwigo.android/app/.gitignore +include tests/triple-t-2/build/org.piwigo.android/app/build.gradle +include tests/triple-t-2/build/org.piwigo.android/app/src/debug/res/values/constants.xml +include tests/triple-t-2/build/org.piwigo.android/app/src/debug/res/values/strings.xml +include tests/triple-t-2/build/org.piwigo.android/app/src/main/java/org/piwigo/PiwigoApplication.java +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/contact-email.txt +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/contact-website.txt +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/default-language.txt +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/full-description.txt +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/short-description.txt +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/title.txt +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/full-description.txt +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/feature-graphic/piwigo-full.png +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/icon/piwigo-icon.png +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/01_Login.jpg +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/02_Albums.jpg +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/03_Photos.jpg +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/04_Albums_horizontal.jpg +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/05_Menu.jpg +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/tablet-screenshots/01_Login.png +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/short-description.txt +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/title.txt +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/full-description.txt +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/short-description.txt +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/title.txt +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/full-description.txt +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/short-description.txt +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/title.txt +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/de-DE/default.txt +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/en-US/default.txt +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/fr-FR/default.txt +include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/kn-IN/default.txt +include tests/triple-t-2/build/org.piwigo.android/build.gradle +include tests/triple-t-2/build/org.piwigo.android/settings.gradle +include tests/triple-t-2/metadata/org.piwigo.android.yml +include tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/addons/languages/dutch/apk/src/main/play/listings/en-US/title.txt +include tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/ime/app/src/main/play/listings/en-US/title.txt +include tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/settings.gradle +include tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/addons/languages/dutch/apk/src/main/play/listings/en-US/title.txt +include tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/ime/app/src/main/play/listings/en-US/title.txt +include tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/settings.gradle +include tests/triple-t-anysoftkeyboard/metadata/com.anysoftkeyboard.languagepack.dutch.yml +include tests/triple-t-anysoftkeyboard/metadata/com.menny.android.anysoftkeyboard.yml +include tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/contact-website.txt +include tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/full-description.txt +include tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/short-description.txt +include tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/title.txt +include tests/triple-t-flutter/metadata/fr.emersion.goguma.yml +include tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/settings.gradle +include tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/verifier/src/main/play/listings/en-US/title.txt +include tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/wallet/src/main/play/listings/en-US/title.txt +include tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/settings.gradle +include tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/verifier/src/main/play/listings/en-US/title.txt +include tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/wallet/src/main/play/listings/en-US/title.txt +include tests/triple-t-multiple/metadata/ch.admin.bag.covidcertificate.verifier.yml +include tests/triple-t-multiple/metadata/ch.admin.bag.covidcertificate.wallet.yml +include tests/urzip-badcert.apk +include tests/urzip-badsig.apk +include tests/urzip-release-unsigned.apk +include tests/urzip-release.apk +include tests/urzip.apk +include tests/v2.only.sig_2.apk +include tests/valid-package-names/random-package-names +include tests/valid-package-names/RandomPackageNames.java +include tests/valid-package-names/test.py +include tests/__init__.py diff --git a/README b/README deleted file mode 100644 index ee354e8d..00000000 --- a/README +++ /dev/null @@ -1,12 +0,0 @@ -F-Droid is an installable catalogue of FOSS (Free and Open Source Software) -applications for the Android platform. The client makes it easy to browse, -install, and keep track of updates on your device. - -The F-Droid server tools provide various scripts and tools that are used to -maintain the main F-Droid application repository. You can use these same tools -to create your own additional or alternative repository for publishing, or to -assist in creating, testing and submitting metadata to the main repository. - -For documentation, please see the docs directory. - -Alternatively, visit http://f-droid.org/manual/ diff --git a/README.md b/README.md new file mode 100644 index 00000000..41f725cb --- /dev/null +++ b/README.md @@ -0,0 +1,133 @@ +
+ +

+ +# F-Droid Server +### Tools for maintaining an F-Droid repository system. + +
+ +--- + +## What is F-Droid Server? + +_fdroidserver_ is a suite of tools to publish and work with collections of +Android apps (APK files) and other kinds of packages. It is used to maintain +the [f-droid.org application repository](https://f-droid.org/packages). These +same tools can be used to create additional or alternative repositories for +publishing, or to assist in creating, testing and submitting metadata to the +f-droid.org repository, also known as +[_fdroiddata_](https://gitlab.com/fdroid/fdroiddata). + +For documentation, please see . + +In the beginning, _fdroidserver_ was the complete server-side setup that ran +f-droid.org. Since then, the website and other parts have been split out into +their own projects. The name for this suite of tooling has stayed +_fdroidserver_ even though it no longer contains any proper server component. + + +## Installing + +There are many ways to install _fdroidserver_, including using a range of +package managers. All of the options are documented on the website: +https://f-droid.org/docs/Installing_the_Server_and_Repo_Tools + + +## Releases + +The production setup of _fdroidserver_ for f-droid.org is run directly from the +_master_ branch. This is put into production on an schedule (currently weekly). +So development and testing happens in the branches. We track branches using +merge requests. Therefore, there are many WIP and long-lived merge requests. + +There are also stable releases of _fdroidserver_. This is mostly intended for +running custom repositories, where the build process is separate. It can also +be useful as a simple way to get started contributing packages to _fdroiddata_, +since the stable releases are available in package managers. + + +## Tests + +To run the full test suite: + + tests/run-tests + +To run the tests for individual Python modules, see the `tests/test_*.py` files, e.g.: + + python -m unittest tests/test_metadata.py + +It is also possible to run individual tests: + + python -m unittest tests.test_metadata.MetadataTest.test_rewrite_yaml_special_build_params + +There is a growing test suite that has good coverage on a number of key parts of +this code base. It does not yet cover all the code, and there are some parts +where the technical debt makes it difficult to write unit tests. New tests +should be standard Python _unittest_ test cases. Whenever possible, the old +tests written in _bash_ in _tests/run-tests_ should be ported to Python. + +This test suite has built over time a bit haphazardly, so it is not as clean, +organized, or complete as it could be. We welcome contributions. The goal is +to move towards standard Python testing patterns and to expand the unit test +coverage. Before rearchitecting any parts of it, be sure to [contact +us](https://f-droid.org/about) to discuss the changes beforehand. + + +### Additional tests for different linux distributions + +These tests are also run on various configurations through GitLab CI. This is +only enabled for `master@fdroid/fdroidserver` because it takes longer to +complete than the regular CI tests. Most of the time you won't need to worry +about them, but sometimes it might make sense to also run them for your merge +request. In that case you need to remove [these lines from .gitlab-ci.yml](https://gitlab.com/fdroid/fdroidserver/-/blob/0124b9dde99f9cab19c034cbc7d8cc6005a99b48/.gitlab-ci.yml#L90-91) +and push this to a new branch of your fork. + +Alternatively [run them +locally](https://docs.gitlab.com/runner/commands/README.html#gitlab-runner-exec) +like this: `gitlab-runner exec docker ubuntu_lts` + + +## Documentation + +The API documentation based on the docstrings gets automatically +published [here](https://fdroid.gitlab.io/fdroidserver) on every commit +on the `master` branch. + +It can be built locally via + +```bash +pip install -e .[docs] +cd docs +sphinx-apidoc -o ./source ../fdroidserver -M -e +sphinx-autogen -o generated source/*.rst +make html +``` + +To additionally lint the code call +```bash +pydocstyle fdroidserver --count +``` + +When writing docstrings you should follow the +[numpy style guide](https://numpydoc.readthedocs.io/en/latest/format.html). + + +## Translation + +Everything can be translated. See +[Translation and Localization](https://f-droid.org/docs/Translation_and_Localization) +for more info. + +
+ +[![](https://hosted.weblate.org/widgets/f-droid/-/287x66-white.png)](https://hosted.weblate.org/engage/f-droid) + +
+View translation status for all languages. + +[![](https://hosted.weblate.org/widgets/f-droid/-/fdroidserver/multi-auto.svg)](https://hosted.weblate.org/engage/f-droid/?utm_source=widget) + +
+ +
diff --git a/buildserver/.gitignore b/buildserver/.gitignore index 4a3901ed..d8165d61 100644 --- a/buildserver/.gitignore +++ b/buildserver/.gitignore @@ -1,4 +1,4 @@ .vagrant up.log cache/ -Vagrantfile +Vagrantfile.yaml diff --git a/buildserver/Dockerfile b/buildserver/Dockerfile new file mode 100644 index 00000000..27ada3f8 --- /dev/null +++ b/buildserver/Dockerfile @@ -0,0 +1,74 @@ + +FROM debian:bookworm + +ENV LANG=C.UTF-8 \ + DEBIAN_FRONTEND=noninteractive + +RUN echo Etc/UTC > /etc/timezone \ + && echo 'Acquire::Retries "20";' \ + 'APT::Get::Assume-Yes "true";' \ + 'APT::Install-Recommends "0";' \ + 'APT::Install-Suggests "0";' \ + 'Dpkg::Use-Pty "0";' \ + 'quiet "1";' \ + >> /etc/apt/apt.conf.d/99gitlab + +# provision-apt-proxy was deliberately omitted, its not relevant in Docker +COPY provision-android-ndk \ + provision-android-sdk \ + provision-apt-get-install \ + provision-buildserverid \ + provision-gradle \ + setup-env-vars \ + /opt/buildserver/ + +ARG GIT_REV_PARSE_HEAD=unspecified +LABEL org.opencontainers.image.revision=$GIT_REV_PARSE_HEAD + +# setup 'vagrant' user for compatibility +RUN useradd --create-home -s /bin/bash vagrant && echo -n 'vagrant:vagrant' | chpasswd + +# The provision scripts must be run in the same order as in Vagrantfile +# - vagrant needs openssh-client iproute2 ssh sudo +# - ansible needs python3 +# +# Debian Docker images will soon default to HTTPS for apt sources, so force it. +# https://github.com/debuerreotype/docker-debian-artifacts/issues/15 +# +# Ensure fdroidserver's dependencies are marked manual before purging +# unneeded packages, otherwise, all its dependencies get purged. +# +# The official Debian docker images ship without ca-certificates, so +# TLS certificates cannot be verified until that is installed. The +# following code temporarily turns off TLS verification, and enables +# HTTPS, so at least unverified TLS is used for apt-get instead of +# plain HTTP. Once ca-certificates is installed, the CA verification +# is enabled by removing the newly created config file. This set up +# makes the initial `apt-get update` and `apt-get install` look the +# same as verified TLS to the network observer and hides the metadata. +RUN printf "path-exclude=/usr/share/locale/*\npath-exclude=/usr/share/man/*\npath-exclude=/usr/share/doc/*\npath-include=/usr/share/doc/*/copyright\n" >/etc/dpkg/dpkg.cfg.d/01_nodoc \ + && mkdir -p /usr/share/man/man1 \ + && echo 'Acquire::https::Verify-Peer "false";' > /etc/apt/apt.conf.d/99nocacertificates \ + && find /etc/apt/sources.list* -type f -exec sed -i s,http:,https:, {} \; \ + && apt-get update \ + && apt-get install ca-certificates \ + && rm /etc/apt/apt.conf.d/99nocacertificates \ + && apt-get upgrade \ + && apt-get dist-upgrade \ + && apt-get install openssh-client iproute2 python3 openssh-server sudo \ + && bash /opt/buildserver/setup-env-vars /opt/android-sdk \ + && . /etc/profile.d/bsenv.sh \ + && bash /opt/buildserver/provision-apt-get-install https://deb.debian.org/debian \ + && bash /opt/buildserver/provision-android-sdk "tools;25.2.5" \ + && bash /opt/buildserver/provision-android-ndk /opt/android-sdk/ndk \ + && bash /opt/buildserver/provision-gradle \ + && bash /opt/buildserver/provision-buildserverid $GIT_REV_PARSE_HEAD \ + && rm -rf /vagrant/cache \ + && apt-get autoremove --purge \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +# Vagrant sudo setup for compatibility +RUN echo 'vagrant ALL = NOPASSWD: ALL' > /etc/sudoers.d/vagrant \ + && chmod 440 /etc/sudoers.d/vagrant \ + && sed -i -e 's/Defaults.*requiretty/#&/' /etc/sudoers diff --git a/buildserver/Vagrantfile b/buildserver/Vagrantfile new file mode 100644 index 00000000..61e3459a --- /dev/null +++ b/buildserver/Vagrantfile @@ -0,0 +1,109 @@ +require 'yaml' +require 'pathname' +require 'fileutils' + +configfile = { + 'boot_timeout' => 600, + 'cachedir' => File.join(ENV['HOME'], '.cache', 'fdroidserver'), + 'cpus' => 1, + 'debian_mirror' => 'https://deb.debian.org/debian/', + 'hwvirtex' => 'on', + 'memory' => 2048, + 'vm_provider' => 'virtualbox', +} + +srvpath = Pathname.new(File.dirname(__FILE__)).realpath +configpath = File.join(srvpath, "/Vagrantfile.yaml") +if File.exist? configpath + c = YAML.load_file(configpath) + if c and not c.empty? + c.each do |k,v| + configfile[k] = v + end + end +else + puts "Copying example file to #{configpath}" + FileUtils.cp('../examples/Vagrantfile.yaml', configpath) +end + +Vagrant.configure("2") do |config| + + if Vagrant.has_plugin?("vagrant-cachier") + config.cache.scope = :box + config.cache.auto_detect = false + config.cache.enable :apt + config.cache.enable :chef + end + + config.vm.box = "debian/bookworm64" + + if not configfile.has_key? "vm_provider" or configfile["vm_provider"] == "virtualbox" + # default to VirtualBox if not set + config.vm.provider "virtualbox" do |v| + v.customize ["modifyvm", :id, "--memory", configfile['memory']] + v.customize ["modifyvm", :id, "--cpus", configfile['cpus']] + v.customize ["modifyvm", :id, "--hwvirtex", configfile['hwvirtex']] + end + synced_folder_type = 'virtualbox' + elsif configfile["vm_provider"] == "libvirt" + # use KVM/QEMU if this is running in KVM/QEMU + config.vm.provider :libvirt do |libvirt| + libvirt.driver = configfile["hwvirtex"] == "on" ? "kvm" : "qemu" + libvirt.host = "localhost" + libvirt.uri = "qemu:///system" + libvirt.cpus = configfile["cpus"] + libvirt.memory = configfile["memory"] + # Debian Vagrant image is only 20G, so allocate more + libvirt.machine_virtual_size = 1024 + if configfile.has_key? "libvirt_disk_bus" + libvirt.disk_bus = configfile["libvirt_disk_bus"] + end + if configfile.has_key? "libvirt_nic_model_type" + libvirt.nic_model_type = configfile["libvirt_nic_model_type"] + end + end + if configfile.has_key? "synced_folder_type" + synced_folder_type = configfile["synced_folder_type"] + else + synced_folder_type = '9p' + end + config.vm.synced_folder './', '/vagrant', type: synced_folder_type, + SharedFoldersEnableSymlinksCreate: false + else + abort("No supported VM Provider found, set vm_provider in Vagrantfile.yaml!") + end + + config.vm.boot_timeout = configfile['boot_timeout'] + + if configfile.has_key? "aptproxy" + config.vm.provision :shell, path: "provision-apt-proxy", + args: [configfile["aptproxy"]] + end + + config.vm.synced_folder configfile["cachedir"], '/vagrant/cache', + create: true, type: synced_folder_type + + # Make sure dir exists to mount to, since buildserver/ is + # automatically mounted as /vagrant in the guest VM. This is more + # necessary with 9p synced folders + Dir.mkdir('cache') unless File.exist?('cache') + + # Root partition needs to be resized to the new allocated space + config.vm.provision "shell", inline: <<-SHELL + growpart -v -u auto /dev/vda 1 + resize2fs /dev/vda1 + SHELL + + config.vm.provision "shell", name: "setup-env-vars", path: "setup-env-vars", + args: ["/opt/android-sdk"] + config.vm.provision "shell", name: "apt-get-install", path: "provision-apt-get-install", + args: [configfile['debian_mirror']] + config.vm.provision "shell", name: "android-sdk", path: "provision-android-sdk" + config.vm.provision "shell", name: "android-ndk", path: "provision-android-ndk", + args: ["/opt/android-sdk/ndk"] + config.vm.provision "shell", name: "gradle", path: "provision-gradle" + config.vm.provision "shell", name: "disable-analytics", path: "provision-disable-analytics" + config.vm.provision "shell", name: "buildserverid", path: "provision-buildserverid", + args: [`git rev-parse HEAD`] + +end diff --git a/buildserver/config.buildserver.yml b/buildserver/config.buildserver.yml new file mode 100644 index 00000000..944535c5 --- /dev/null +++ b/buildserver/config.buildserver.yml @@ -0,0 +1,2 @@ +sdk_path: /opt/android-sdk +gradle_version_dir: /opt/gradle/versions diff --git a/buildserver/cookbooks/android-ndk/recipes/default.rb b/buildserver/cookbooks/android-ndk/recipes/default.rb deleted file mode 100644 index 13e64d48..00000000 --- a/buildserver/cookbooks/android-ndk/recipes/default.rb +++ /dev/null @@ -1,36 +0,0 @@ - -ndk_loc = node[:settings][:ndk_loc] -user = node[:settings][:user] - -execute "add-android-ndk-path" do - user user - command "echo \"export PATH=\\$PATH:#{ndk_loc} #PATH-NDK\" >> /home/#{user}/.bsenv" - not_if "grep PATH-NDK /home/#{user}/.bsenv" -end - -execute "add-android-ndk-var" do - user user - command "echo \"export ANDROID_NDK=#{ndk_loc}\" >> /home/#{user}/.bsenv" - not_if "grep ANDROID_NDK /home/#{user}/.bsenv" -end - -script "setup-android-ndk" do - timeout 14400 - interpreter "bash" - user node[:settings][:user] - cwd "/tmp" - code " - if [ `uname -m` == 'x86_64' ] ; then - SUFFIX='_64' - else - SUFFIX='' - fi - tar jxvf /vagrant/cache/android-ndk-r9b-linux-x86$SUFFIX.tar.bz2 - tar jxvf /vagrant/cache/android-ndk-r9b-linux-x86$SUFFIX-legacy-toolchains.tar.bz2 - mv android-ndk-r9b #{ndk_loc} - " - not_if do - File.exists?("#{ndk_loc}") - end -end - diff --git a/buildserver/cookbooks/android-sdk/recipes/default.rb b/buildserver/cookbooks/android-sdk/recipes/default.rb deleted file mode 100644 index 6e91b341..00000000 --- a/buildserver/cookbooks/android-sdk/recipes/default.rb +++ /dev/null @@ -1,99 +0,0 @@ - -sdk_loc = node[:settings][:sdk_loc] -user = node[:settings][:user] - -script "setup-android-sdk" do - timeout 14400 - interpreter "bash" - user user - cwd "/tmp" - code " - tar zxvf /vagrant/cache/android-sdk_r22.3-linux.tgz - mv android-sdk-linux #{sdk_loc} - #{sdk_loc}/tools/android update sdk --no-ui -t platform-tool - #{sdk_loc}/tools/android update sdk --no-ui -t tool - " - not_if "test -d #{sdk_loc}" -end - -execute "add-android-sdk-path" do - user user - path = "#{sdk_loc}/tools:#{sdk_loc}/platform-tools" - command "echo \"export PATH=\\$PATH:#{path} #PATH-SDK\" >> /home/#{user}/.bsenv" - not_if "grep PATH-SDK /home/#{user}/.bsenv" -end - -execute "add-android-home" do - user user - command "echo \"export ANDROID_HOME=#{sdk_loc}\" >> /home/#{user}/.bsenv" - not_if "grep ANDROID_HOME /home/#{user}/.bsenv" -end - -script "add_build_tools" do - interpreter "bash" - user user - ver = "19.0.1" - cwd "/tmp" - code " - if [ -f /vagrant/cache/build-tools/#{ver}.tar.gz ] ; then - echo Installing from cache - mkdir #{sdk_loc}/build-tools - tar -C #{sdk_loc}/build-tools -z -x -f /vagrant/cache/build-tools/#{ver}.tar.gz - else - #{sdk_loc}/tools/android update sdk --no-ui -a -t build-tools-#{ver} <> /home/#{user}/.bsenv - " - not_if "test -d #{sdk_loc}/build-tools/#{ver}" -end - -# This is currently 19.0.1 -script "add_platform_tools" do - interpreter "bash" - user user - cwd "/tmp" - code " - if [ -f /vagrant/cache/platform-tools.tar.gz ] ; then - echo Installing from cache - mkdir #{sdk_loc}/platform-tools - tar -C #{sdk_loc}/platform-tools -z -x -f /vagrant/cache/platform-tools.tar.gz - else - #{sdk_loc}/tools/android update sdk --no-ui -a -t platform-tools <> /home/#{user}/.bashrc" - not_if "grep bsenv /home/#{user}/.bashrc" -end - - diff --git a/buildserver/cookbooks/gradle/recipes/default.rb b/buildserver/cookbooks/gradle/recipes/default.rb deleted file mode 100644 index b13e397e..00000000 --- a/buildserver/cookbooks/gradle/recipes/default.rb +++ /dev/null @@ -1,48 +0,0 @@ - -user = node[:settings][:user] - -gradle_script = IO.read(File.join( - File.expand_path(File.dirname(__FILE__)), "gradle")) - -script "add-gradle-bindir" do - cwd "/tmp" - interpreter "bash" - code "mkdir -p /opt/gradle/bin" - not_if "test -d /opt/gradle/bin" -end - -script "add-gradle-verdir" do - cwd "/tmp" - interpreter "bash" - code "mkdir -p /opt/gradle/versions" - not_if "test -d /opt/gradle/versions" -end - -%w{1.4 1.6 1.7 1.8 1.9}.each do |ver| - script "install-gradle-#{ver}" do - cwd "/tmp" - interpreter "bash" - code " - unzip /vagrant/cache/gradle-#{ver}-bin.zip - mv gradle-#{ver} /opt/gradle/versions/#{ver} - " - not_if "test -d /opt/gradle/versions/#{ver}" - end -end - -script "add-gradle-wrapper" do - cwd "/tmp" - interpreter "bash" - code " - cat << \"EOF\" > /opt/gradle/bin/gradle -#{gradle_script} -EOF - chmod a+x /opt/gradle/bin/gradle - " -end - -execute "add-android-ndk-path" do - user user - command "echo \"export PATH=\\$PATH:/opt/gradle/bin #PATH-GRADLE\" >> /home/#{user}/.bsenv" - not_if "grep PATH-GRADLE /home/#{user}/.bsenv" -end diff --git a/buildserver/cookbooks/gradle/recipes/gradle b/buildserver/cookbooks/gradle/recipes/gradle deleted file mode 100755 index 78937e27..00000000 --- a/buildserver/cookbooks/gradle/recipes/gradle +++ /dev/null @@ -1,68 +0,0 @@ -#!/bin/bash - -bindir="$(dirname $0)" -basedir="$(dirname $bindir)" -verdir="${basedir}/versions" -args=("$@") -pushd "${verdir}" &>/dev/null - -v_all=(*/) -v_all=(${v_all[@]%/}) - -v_def=${v_all[-1]} -echo "Available gradle versions: ${v_all[@]}" - -popd &>/dev/null - -run_gradle() { - ${verdir}/${v_found}/bin/gradle "${args[@]}" - exit $? -} - -# key-value pairs of what gradle version each gradle plugin version -# should accept -d_plugin_k=(0.7 0.6 0.5 0.4 0.3 0.2) -d_plugin_v=(1.9 1.8 1.6 1.6 1.4 1.4) - -# Latest takes priority -files=(../build.gradle build.gradle) - -for f in ${files[@]}; do - [[ -f $f ]] || continue - while read l; do - if [[ $l == *'com.android.tools.build:gradle:'* ]]; then - plugin_pver=$(echo -n "$l" | sed "s/.*com.android.tools.build:gradle:\\([0-9\\.\\+]\\+\\).*/\\1/") - elif [[ $l == *'gradleVersion'* ]]; then - wrapper_ver=$(echo -n "$l" | sed "s/.*gradleVersion[ ]*=[ ]*[\"']\\([0-9\\.]\\+\\)[\"'].*/\\1/") - fi - done < $f -done - -if [[ -n $wrapper_ver ]]; then - v_found=$wrapper_ver - echo "Found $v_found via gradleVersion" - run_gradle -fi - -if [[ -n $plugin_pver ]]; then - i=0 - match=false - for k in ${d_plugin_k[@]}; do - if [[ $plugin_pver == ${k}* ]]; then - plugin_ver=${d_plugin_v[$i]} - match=true - break - fi - let i++ - done - if $match; then - v_found=$plugin_ver - echo "Found $v_found via gradle plugin version $k" - fi -fi - -[[ -n $v_found ]] && run_gradle - -echo "No suitable gradle version found - defaulting to $v_def" -v_found=$v_def -run_gradle diff --git a/buildserver/cookbooks/kivy/recipes/default.rb b/buildserver/cookbooks/kivy/recipes/default.rb deleted file mode 100644 index 368b4cd8..00000000 --- a/buildserver/cookbooks/kivy/recipes/default.rb +++ /dev/null @@ -1,36 +0,0 @@ - -user = node[:settings][:user] - -%w{cython python-pygame python-pip python-virtualenv python-opengl python-gst0.10 python-enchant libgl1-mesa-dev libgles2-mesa-dev}.each do |pkg| - package pkg do - action :install - end -end - -script "install-kivy" do - cwd "/tmp" - interpreter "bash" - code " - tar xf /vagrant/cache/Kivy-1.7.2.tar.gz - cd Kivy-1.7.2 - python setup.py install - cd .. - rm -rf Kivy* - " - not_if "python -c 'import kivy'" -end - -script "install-p4a" do - cwd "/home/vagrant" - interpreter "bash" - code " - git clone git://github.com/kivy/python-for-android - chown -R vagrant:vagrant python-for-android - cd python-for-android - git checkout ca369d774e2 - " - not_if "test -d /home/vagrant/python-for-android" -end - - - diff --git a/buildserver/fixpaths.sh b/buildserver/fixpaths.sh deleted file mode 100644 index eb8a81fb..00000000 --- a/buildserver/fixpaths.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/sh - -fixit() -{ - #Fix sudoers so the PATH gets passed through, otherwise chef - #provisioning doesn't work. - if [ -z "$1" ]; then - export EDITOR=$0 && sudo -E visudo - else - echo "Fix sudoers" - echo "Defaults exempt_group=admin" >> $1 - fi - #Stick the gems bin onto root's path as well. - sudo echo "PATH=$PATH:/var/lib/gems/1.8/bin" >>/root/.bashrc - # Restart sudo so it gets the changes straight away - sudo /etc/init.d/sudo restart -} - -sudo grep "exempt_group" /etc/sudoers -q -if [ "$?" -eq "1" ]; then - fixit -fi - diff --git a/buildserver/provision-android-ndk b/buildserver/provision-android-ndk new file mode 100644 index 00000000..63f5eee7 --- /dev/null +++ b/buildserver/provision-android-ndk @@ -0,0 +1,30 @@ +#!/bin/bash +# +# $1 is the root dir to install the NDKs into +# $2 and after are the NDK releases to install + +echo $0 +set -e +set -x + +NDK_BASE=$1 +shift + +test -e $NDK_BASE || mkdir -p $NDK_BASE +cd $NDK_BASE + +for version in $@; do + if [ ! -e ${NDK_BASE}/${version} ]; then + unzip /vagrant/cache/android-ndk-${version}-linux*.zip > /dev/null + mv android-ndk-${version} \ + `sed -En 's,^Pkg.Revision *= *(.+),\1,p' android-ndk-${version}/source.properties` + fi +done + +# allow gradle/etc to install missing NDK versions +chgrp vagrant $NDK_BASE +chmod g+w $NDK_BASE + +# ensure all users can read and execute the NDK +chmod -R a+rX $NDK_BASE/ +find $NDK_BASE/ -type f -executable -exec chmod a+x -- {} + diff --git a/buildserver/provision-android-sdk b/buildserver/provision-android-sdk new file mode 100644 index 00000000..19002a47 --- /dev/null +++ b/buildserver/provision-android-sdk @@ -0,0 +1,167 @@ +#!/bin/bash + +echo $0 +set -e +set -x + +if [ -z $ANDROID_HOME ]; then + echo "ANDROID_HOME env var must be set!" + exit 1 +fi + +# disable the repositories of proprietary stuff +disabled=" +@version@=1 +@disabled@https\://dl.google.com/android/repository/extras/intel/addon.xml=disabled +@disabled@https\://dl.google.com/android/repository/glass/addon.xml=disabled +@disabled@https\://dl.google.com/android/repository/sys-img/android/sys-img.xml=disabled +@disabled@https\://dl.google.com/android/repository/sys-img/android-tv/sys-img.xml=disabled +@disabled@https\://dl.google.com/android/repository/sys-img/android-wear/sys-img.xml=disabled +@disabled@https\://dl.google.com/android/repository/sys-img/google_apis/sys-img.xml=disabled +" +test -d ${HOME}/.android || mkdir ${HOME}/.android +# there are currently zero user repos +echo 'count=0' > ${HOME}/.android/repositories.cfg +for line in $disabled; do + echo $line >> ${HOME}/.android/sites-settings.cfg +done + +# Include old makebuildserver cache that is a Vagrant synced_folder +# for sdkmanager to use. +cachedir=$HOME/.cache/sdkmanager +mkdir -p $cachedir +pushd $cachedir +for f in /vagrant/cache/*.zip; do + test -e $f && ln -s $f +done +popd + +# TODO do not preinstall 'tools' or 'platform-tools' at all, app builds don't need them +packages=" + tools;25.2.5 + platform-tools + build-tools;19.1.0 + build-tools;20.0.0 + build-tools;21.1.2 + build-tools;22.0.1 + build-tools;23.0.1 + build-tools;23.0.2 + build-tools;23.0.3 + build-tools;24.0.0 + build-tools;24.0.1 + build-tools;24.0.2 + build-tools;24.0.3 + build-tools;25.0.0 + build-tools;25.0.1 + build-tools;25.0.2 + build-tools;25.0.3 + build-tools;26.0.0 + build-tools;26.0.1 + build-tools;26.0.2 + build-tools;26.0.3 + build-tools;27.0.0 + build-tools;27.0.1 + build-tools;27.0.2 + build-tools;27.0.3 + build-tools;28.0.0 + build-tools;28.0.1 + build-tools;28.0.2 + build-tools;28.0.3 + build-tools;29.0.2 + build-tools;29.0.3 + build-tools;30.0.0 + build-tools;30.0.1 + build-tools;30.0.2 + build-tools;30.0.3 + build-tools;31.0.0 + build-tools;32.0.0 + build-tools;33.0.0 + platforms;android-10 + platforms;android-11 + platforms;android-12 + platforms;android-13 + platforms;android-14 + platforms;android-15 + platforms;android-16 + platforms;android-17 + platforms;android-18 + platforms;android-19 + platforms;android-20 + platforms;android-21 + platforms;android-22 + platforms;android-23 + platforms;android-24 + platforms;android-25 + platforms;android-26 + platforms;android-27 + platforms;android-28 + platforms;android-29 + platforms;android-30 + platforms;android-31 + platforms;android-32 + platforms;android-33 +" + +if [ $# -gt 0 ]; then + echo found args + packages=$@ +fi + +# temporary test of whether this script ran. It will change once +# 'tools' is no longer installed by default. +if [ ! -x $ANDROID_HOME/tools/bin/sdkmanager ]; then + mkdir -p ${ANDROID_HOME}/ + sdkmanager $packages +fi + +# this hacked cache should not end up in the Vagrant box or Docker image +rm -rf $cachedir + +mkdir -p $ANDROID_HOME/licenses/ + +cat << EOF > $ANDROID_HOME/licenses/android-sdk-license + +8933bad161af4178b1185d1a37fbf41ea5269c55 + +d56f5187479451eabf01fb78af6dfcb131a6481e + +24333f8a63b6825ea9c5514f83c2829b004d1fee +EOF + +cat < $ANDROID_HOME/licenses/android-sdk-preview-license + +84831b9409646a918e30573bab4c9c91346d8abd +EOF + +cat < $ANDROID_HOME/licenses/android-sdk-preview-license-old + +79120722343a6f314e0719f863036c702b0e6b2a + +84831b9409646a918e30573bab4c9c91346d8abd +EOF + +cat < $ANDROID_HOME/licenses/intel-android-extra-license + +d975f751698a77b662f1254ddbeed3901e976f5a +EOF + +chmod a+X $(dirname $ANDROID_HOME/) +chmod -R a+rX $ANDROID_HOME/ +chgrp vagrant $ANDROID_HOME +chmod g+w $ANDROID_HOME +find $ANDROID_HOME/ -type f -executable -print0 | xargs -0 chmod a+x + +# allow gradle to install newer build-tools and platforms +mkdir -p $ANDROID_HOME/{build-tools,platforms} +chgrp vagrant $ANDROID_HOME/{build-tools,platforms} +chmod g+w $ANDROID_HOME/{build-tools,platforms} + +# allow gradle/sdkmanager to install into the new m2repository +test -d $ANDROID_HOME/extras/m2repository || mkdir -p $ANDROID_HOME/extras/m2repository +find $ANDROID_HOME/extras/m2repository -type d | xargs chgrp vagrant +find $ANDROID_HOME/extras/m2repository -type d | xargs chmod g+w + +# allow gradle/sdkmanager to install extras;android;m2repository +test -d $ANDROID_HOME/extras/android || mkdir -p $ANDROID_HOME/extras/android +find $ANDROID_HOME/extras/android -type d | xargs chgrp vagrant +find $ANDROID_HOME/extras/android -type d | xargs chmod g+w diff --git a/buildserver/provision-apt-get-install b/buildserver/provision-apt-get-install new file mode 100644 index 00000000..ca39c47b --- /dev/null +++ b/buildserver/provision-apt-get-install @@ -0,0 +1,139 @@ +#!/bin/bash + +echo $0 +set -e +set -x + +debian_mirror=$1 +export DEBIAN_FRONTEND=noninteractive + +printf 'APT::Install-Recommends "0";\nAPT::Install-Suggests "0";\n' \ + > /etc/apt/apt.conf.d/99no-install-recommends + +printf 'Acquire::Retries "20";\n' \ + > /etc/apt/apt.conf.d/99acquire-retries + +cat < /etc/apt/apt.conf.d/99no-auto-updates +APT::Periodic::Enable "0"; +APT::Periodic::Update-Package-Lists "0"; +APT::Periodic::Unattended-Upgrade "0"; +EOF + +printf 'APT::Get::Assume-Yes "true";\n' \ + > /etc/apt/apt.conf.d/99assumeyes + +cat < /etc/apt/apt.conf.d/99quiet +Dpkg::Use-Pty "0"; +quiet "1"; +EOF + +cat < /etc/apt/apt.conf.d/99confdef +Dpkg::Options { "--force-confdef"; }; +EOF + +echo "man-db man-db/auto-update boolean false" | debconf-set-selections + +if echo $debian_mirror | grep '^https' 2>&1 > /dev/null; then + apt-get update || apt-get update + apt-get install ca-certificates +fi + +cat << EOF > /etc/apt/sources.list +deb ${debian_mirror} bookworm main +deb https://security.debian.org/debian-security bookworm-security main +deb ${debian_mirror} bookworm-updates main +EOF +echo "deb ${debian_mirror} bookworm-backports main" > /etc/apt/sources.list.d/backports.list + +apt-get update || apt-get update + +# purge things that might come from the base box, but we don't want +# https://salsa.debian.org/cloud-team/debian-vagrant-images/-/tree/master/config_space/package_config +# cat config_space/package_config/* | sort -u | grep -v '[A-Z#]' + +purge=" + apt-listchanges + apt-utils + bash-completion + bind9-* + bsdextrautils + bzip2 + chrony + cloud-utils + cron + cron-daemon-common + dbus + debconf-i18n + debian-faq + dmidecode + doc-debian + fdisk + file + groff-base + inetutils-telnet + krb5-locales + less + locales + logrotate + lsof + manpages + nano + ncurses-term + netcat-traditional + pciutils + reportbug + rsyslog + tasksel + traceroute + unattended-upgrades + usrmerge + vim-* + wamerican + wget + whiptail + xz-utils +" +# clean up files packages to be purged, then purge the packages +rm -rf /var/run/dbus /var/log/unattended-upgrades +apt-get purge $purge + +apt-get upgrade --download-only +apt-get upgrade + +# again after upgrade in case of keyring changes +apt-get update || apt-get update + +packages=" + androguard/bookworm-backports + apksigner + default-jdk-headless + default-jre-headless + curl + dexdump + fdroidserver + git-svn + gnupg + mercurial + patch + python3-magic + python3-packaging + rsync + sdkmanager/bookworm-backports + sudo + unzip +" + +apt-get install $packages --download-only +apt-get install $packages + +# fdroidserver comes from git, it was installed just for dependencies +apt-mark manual `apt-cache depends fdroidserver | sed -nE 's,^[| ]*Depends: ([a-z0-9 -]+),\1,p'` +apt-get purge fdroidserver + +# clean up things that will become outdated anyway +apt-get autoremove --purge +apt-get clean +rm -rf /var/lib/apt/lists/* + +highestjava=`update-java-alternatives --list | sort -n | tail -1 | cut -d ' ' -f 1` +update-java-alternatives --set $highestjava diff --git a/buildserver/provision-apt-proxy b/buildserver/provision-apt-proxy new file mode 100644 index 00000000..9d42a2a4 --- /dev/null +++ b/buildserver/provision-apt-proxy @@ -0,0 +1,11 @@ +#!/bin/bash + +echo $0 +set -e + +rm -f /etc/apt/apt.conf.d/02proxy +echo "Acquire::ftp::Proxy \"$1\";" >> /etc/apt/apt.conf.d/02proxy +echo "Acquire::http::Proxy \"$1\";" >> /etc/apt/apt.conf.d/02proxy +echo "Acquire::https::Proxy \"$1\";" >> /etc/apt/apt.conf.d/02proxy + +apt-get update || apt-get update diff --git a/buildserver/provision-buildserverid b/buildserver/provision-buildserverid new file mode 100644 index 00000000..f5010c39 --- /dev/null +++ b/buildserver/provision-buildserverid @@ -0,0 +1,9 @@ +#!/bin/bash -e + +test -n "$1" + +echo "Writing buildserver ID ...ID is $1" +set -x +echo "$1" > /home/vagrant/buildserverid +# sync data before we halt() the machine, we had an empty buildserverid otherwise +sync diff --git a/buildserver/provision-disable-analytics b/buildserver/provision-disable-analytics new file mode 100644 index 00000000..e1ec62b7 --- /dev/null +++ b/buildserver/provision-disable-analytics @@ -0,0 +1,15 @@ +#!/bin/bash + +set -ex + +# Flutter +# https://github.com/flutter/flutter/issues/73657 +flutter_conf=/home/vagrant/.flutter +cat < $flutter_conf +{ + "enabled": false +} +EOF +chown -R vagrant:vagrant $flutter_conf +chmod -R 0644 $flutter_conf + diff --git a/buildserver/provision-gradle b/buildserver/provision-gradle new file mode 100644 index 00000000..a282a4c5 --- /dev/null +++ b/buildserver/provision-gradle @@ -0,0 +1,53 @@ +#!/bin/bash + +set -ex + + +# version compare magic +vergte() { + printf '%s\n%s' "$1" "$2" | sort -C -V -r +} + +test -e /opt/gradle/versions || mkdir -p /opt/gradle/versions +cd /opt/gradle/versions + +glob="/vagrant/cache/gradle-*.zip" +if compgen -G $glob; then # test if glob matches anything + f=$(ls -1 --sort=version --group-directories-first $glob | tail -1) + ver=`echo $f | sed 's,.*gradle-\([0-9][0-9.]*\).*\.zip,\1,'` + # only use versions greater or equal 2.2.1 + if vergte $ver 2.2.1 && [ ! -d /opt/gradle/versions/${ver} ]; then + unzip -qq $f + mv gradle-${ver} /opt/gradle/versions/${ver} + fi +fi + +chmod -R a+rX /opt/gradle + +test -e /opt/gradle/bin || mkdir -p /opt/gradle/bin +git clone --depth 1 https://gitlab.com/fdroid/gradlew-fdroid.git /home/vagrant/gradlew-fdroid/ +chmod 0755 /home/vagrant/gradlew-fdroid/gradlew-fdroid +chmod -R u+rwX,a+rX,go-w /home/vagrant/gradlew-fdroid/ +ln -fs /home/vagrant/gradlew-fdroid/gradlew-fdroid /opt/gradle/bin/gradle +ln -fs /home/vagrant/gradlew-fdroid/gradlew-fdroid /usr/local/bin/ + +chown -h vagrant:vagrant /opt/gradle/bin/gradle +chown vagrant:vagrant /opt/gradle/versions +chmod 0755 /opt/gradle/versions + +GRADLE_HOME=/home/vagrant/.gradle +test -d $GRADLE_HOME/ || mkdir $GRADLE_HOME/ +cat < $GRADLE_HOME/gradle.properties +# builds are not reused, so the daemon is a waste of time +org.gradle.daemon=false + +# set network timeouts to 10 minutes +# https://github.com/gradle/gradle/pull/3371/files +systemProp.http.connectionTimeout=600000 +systemProp.http.socketTimeout=600000 +systemProp.org.gradle.internal.http.connectionTimeout=600000 +systemProp.org.gradle.internal.http.socketTimeout=600000 +EOF + +chown -R vagrant:vagrant $GRADLE_HOME/ +chmod -R a+rX $GRADLE_HOME/ diff --git a/buildserver/setup-env-vars b/buildserver/setup-env-vars new file mode 100644 index 00000000..1c3599e9 --- /dev/null +++ b/buildserver/setup-env-vars @@ -0,0 +1,27 @@ +#!/bin/sh +# +# sets up the environment vars needed by the build process + +set -e +set -x + +bsenv=/etc/profile.d/bsenv.sh + +echo "# generated on "`date` > $bsenv + +echo export ANDROID_HOME=$1 >> $bsenv +echo 'export PATH=$PATH:${ANDROID_HOME}/tools:${ANDROID_HOME}/platform-tools:/opt/gradle/bin' >> $bsenv +echo "export DEBIAN_FRONTEND=noninteractive" >> $bsenv +echo 'export home_vagrant=/home/vagrant' >> $bsenv +echo 'export fdroidserver=$home_vagrant/fdroidserver' >> $bsenv +echo 'export LC_ALL=C.UTF-8' >> $bsenv + +chmod 0644 $bsenv + +# make sure that SSH never hangs at a password or key prompt +mkdir -p /etc/ssh/ssh_config.d/ +cat << EOF >> /etc/ssh/ssh_config.d/fdroid +Host * + StrictHostKeyChecking yes + BatchMode yes +EOF diff --git a/completion/bash-completion b/completion/bash-completion index 47c51044..57fcfd12 100644 --- a/completion/bash-completion +++ b/completion/bash-completion @@ -1,8 +1,8 @@ -#!/bin/bash +# fdroid(1) completion -*- shell-script -*- # # bash-completion - part of the FDroid server tools -# Commits updates to apps, allowing you to edit the commit messages # +# Copyright (C) 2013-2017 Hans-Christoph Steiner # Copyright (C) 2013, 2014 Daniel Martí # # This program is free software: you can redistribute it and/or modify @@ -18,17 +18,6 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -# 'fdroid' is completed automatically, but aliases to it are not. -# For instance, to alias 'fd' to 'fdroid' and have competion available: -# -# alias fd='fdroid' -# complete -F _fdroid fd -# -# One can use completion on aliased subcommands as follows: -# -# alias fbuild='fdroid build' -# complete -F _fdroid_build fbuild - __fdroid_init() { COMPREPLY=() cur="${COMP_WORDS[COMP_CWORD]}" @@ -37,10 +26,15 @@ __fdroid_init() { (( $# >= 1 )) && __complete_${1} } -__package() { - files=( metadata/*.txt ) +__get_appid() { + files=( metadata/*.yml ) files=( ${files[@]#metadata/} ) - files=${files[@]%.txt} + files=${files[@]%.yml} + echo "$files" +} + +__package() { + files="$(__get_appid)" COMPREPLY=( $( compgen -W "$files" -- $cur ) ) } @@ -65,41 +59,41 @@ __apk_vercode() { } __vercode() { - local p=${cur:0:-1} - - COMPREPLY=( $( compgen -P "${p}:" -W "$( while read line; do - if [[ "$line" == "Build Version:"* ]] - then - line="${line#*,}" - printf "${line%%,*} " - elif [[ "$line" == "Build:"* ]] - then - line="${line#*,}" - printf "${line%%,*} " - fi - done < "metadata/${p}.txt" )" -- $cur ) ) + if [ $prev = ":" ]; then + appid="${COMP_WORDS[COMP_CWORD-2]}" + elif [ $cur = ":" ]; then + appid=$prev + cur="" + fi + versionCodes=`sed -En 's,^ +versionCode: +([0-9]+) *$,\1,p' metadata/${appid}.yml` + COMPREPLY=( $( compgen -W "$versionCodes" -- $cur ) ) } __complete_options() { case "${cur}" in --*) - COMPREPLY=( $( compgen -W "${lopts}" -- $cur ) ) + COMPREPLY=( $( compgen -W "--help --version ${lopts}" -- $cur ) ) return 0;; *) - COMPREPLY=( $( compgen -W "${opts} ${lopts}" -- $cur ) ) + COMPREPLY=( $( compgen -W "-h ${opts} --help --version ${lopts}" -- $cur ) ) return 0;; esac } __complete_build() { - opts="-h -v -c -l -s -t -f" - lopts="--help --verbose --latest --server --resetserver --on-server - --force --all" + opts="-v -q -l -s -t -f -a" + + lopts="--verbose --quiet --latest --stop --test --server --skip-scan --scan-binary --no-tarball --force --all --no-refresh" + case "${prev}" in + :) + __vercode + return 0;; + esac case "${cur}" in -*) __complete_options return 0;; - *:) + :) __vercode return 0;; *) @@ -108,9 +102,15 @@ __complete_build() { esac } +__complete_gpgsign() { + opts="-v -q" + lopts="--verbose --quiet" + __complete_options +} + __complete_install() { - opts="-h -v" - lopts="--help --verbose --all" + opts="-v -q -a -p -n -y" + lopts="--verbose --quiet --all --color --no-color --privacy-mode --no-privacy-mode --no --yes" case "${cur}" in -*) __complete_options @@ -125,9 +125,10 @@ __complete_install() { } __complete_update() { - opts="-h -c -v -q -b -i -I -e -w" - lopts="--help --createmeta --verbose --quiet --buildreport --interactive - --icons --editor --wiki --pretty --clean" + opts="-c -v -q -i -I -e" + lopts="--create-metadata --verbose --quiet + --icons --pretty --clean --delete-unknown + --nosign --rename-apks --use-date-from-apk" case "${prev}" in -e|--editor) _filedir @@ -137,8 +138,8 @@ __complete_update() { } __complete_publish() { - opts="-h -v" - lopts="--help --verbose" + opts="-v -q" + lopts="--verbose --quiet" case "${cur}" in -*) __complete_options @@ -153,8 +154,8 @@ __complete_publish() { } __complete_checkupdates() { - opts="-h -v" - lopts="--help --verbose --auto --autoonly --commit --gplay" + opts="-v -q" + lopts="--verbose --quiet --auto --autoonly --commit --allow-dirty" case "${cur}" in -*) __complete_options @@ -166,17 +167,27 @@ __complete_checkupdates() { } __complete_import() { - opts="-h -u -s -r" - lopts="--help --url --subdir --repo" + opts="-c -h -l -q -s -u -v -W" + lopts="--categories --license --quiet --rev --subdir --url" case "${prev}" in - -u|--url|-r|--repo|-s|--subdir) return 0;; + -c|-l|-s|-u|--categories|--license|--quiet|--rev|--subdir|--url) + return 0;; + -W) + COMPREPLY=( $( compgen -W "error warn ignore" -- $cur ) ) + return 0;; esac __complete_options } +__complete_readmeta() { + opts="-v -q" + lopts="--verbose --quiet" + __complete_options +} + __complete_rewritemeta() { - opts="-h -v" - lopts="--help --verbose" + opts="-v -q -l" + lopts="--verbose --quiet --list" case "${cur}" in -*) __complete_options @@ -188,8 +199,8 @@ __complete_rewritemeta() { } __complete_lint() { - opts="-h -v" - lopts="--help --verbose" + opts="-v -q -f" + lopts="--verbose --quiet --force-yamllint --format" case "${cur}" in -*) __complete_options @@ -201,8 +212,8 @@ __complete_lint() { } __complete_scanner() { - opts="-h -v" - lopts="--help --verbose --nosvn" + opts="-v -q" + lopts="--verbose --quiet" case "${cur}" in -*) __complete_options @@ -217,8 +228,8 @@ __complete_scanner() { } __complete_verify() { - opts="-h -v -p" - lopts="--help --verbose" + opts="-v -q -p" + lopts="--verbose --quiet" case "${cur}" in -*) __complete_options @@ -232,45 +243,94 @@ __complete_verify() { esac } -__complete_stats() { - opts="-h -v -d" - lopts="--help --verbose --download" +__complete_btlog() { + opts="-u" + lopts="--git-remote --git-repo --url" __complete_options } -__complete_server() { - opts="-h -v" - lopts="--help --verbose update" +__complete_mirror() { + opts="-v" + lopts="--all --archive --build-logs --color --no-color --pgp-signatures --src-tarballs --output-dir" + __complete_options +} + +__complete_nightly() { + opts="-v -q" + lopts="--show-secret-var --archive-older" + __complete_options +} + +__complete_deploy() { + opts="-i -v -q" + lopts="--identity-file --local-copy-dir --sync-from-local-copy-dir + --verbose --quiet --no-checksum --no-keep-git-mirror-archive" + __complete_options +} + +__complete_signatures() { + opts="-v -q" + lopts="--verbose --color --no-color --no-check-https" + case "${cur}" in + -*) + __complete_options + return 0;; + esac + _filedir 'apk' + return 0 +} + +__complete_signindex() { + opts="-v -q" + lopts="--verbose" __complete_options } __complete_init() { - opts="-h -v -d" - lopts="--help --verbose --keystore --distinguished-name --repo-keyalias" + opts="-v -q -d" + lopts="--verbose --quiet --distinguished-name --keystore + --repo-keyalias --android-home --no-prompt --color --no-color" __complete_options } -_fdroid() { - local cmd cmds - cmd=${COMP_WORDS[1]} - cmds=" build init install update publish checkupdates import \ -rewritemeta lint scanner verify stats server " +__cmds=" \ +btlog \ +build \ +checkupdates \ +deploy \ +gpgsign \ +import \ +init \ +install \ +lint \ +mirror \ +nightly \ +publish \ +readmeta \ +rewritemeta \ +scanner \ +signatures \ +signindex \ +update \ +verify \ +" - for c in $cmds; do eval "_fdroid_${c} () { +for c in $__cmds; do + eval "_fdroid_${c} () { local cur prev opts lopts - __fdroid_init ${c}; - }"; done + __fdroid_init ${c} + }" +done - [[ $cmds == *\ $cmd\ * ]] && _fdroid_${cmd} || { - (($COMP_CWORD == 1)) && COMPREPLY=( $( compgen -W "${cmds}" -- $cmd ) ) +_fdroid() { + local cmd + cmd=${COMP_WORDS[1]} + + [[ $__cmds == *\ $cmd\ * ]] && _fdroid_${cmd} || { + (($COMP_CWORD == 1)) && COMPREPLY=( $( compgen -W "${__cmds}" -- $cmd ) ) } } -_fd-commit() { - __package -} - complete -F _fdroid fdroid -complete -F _fd-commit fd-commit return 0 diff --git a/config.buildserver.py b/config.buildserver.py deleted file mode 100644 index 152a46e8..00000000 --- a/config.buildserver.py +++ /dev/null @@ -1,5 +0,0 @@ -sdk_path = "/home/vagrant/android-sdk" -ndk_path = "/home/vagrant/android-ndk" -build_tools = "19.0.1" -mvn3 = "mvn" -gradle = "gradle" diff --git a/docs/.gitignore b/docs/.gitignore deleted file mode 100644 index 642a2e6a..00000000 --- a/docs/.gitignore +++ /dev/null @@ -1,13 +0,0 @@ -/manual/ -# generated docs files -/fdroid.aux -/fdroid.cp -/fdroid.cps -/fdroid.fn -/fdroid.info -/fdroid.ky -/fdroid.log -/fdroid.pg -/fdroid.toc -/fdroid.tp -/fdroid.vr diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000..d0c3cbf1 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = source +BUILDDIR = build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/fdl.texi b/docs/fdl.texi deleted file mode 100644 index 40e23a10..00000000 --- a/docs/fdl.texi +++ /dev/null @@ -1,463 +0,0 @@ -@c The GNU Free Documentation License. -@center Version 1.3, 3 November 2008 - -@c This file is intended to be included within another document, -@c hence no sectioning command or @node. - -@display -Copyright @copyright{} 2000, 2001, 2002, 2007, 2008 Free Software Foundation, Inc. -@uref{http://fsf.org/} - -Everyone is permitted to copy and distribute verbatim copies -of this license document, but changing it is not allowed. -@end display - -@enumerate 0 -@item -PREAMBLE - -The purpose of this License is to make a manual, textbook, or other -functional and useful document @dfn{free} in the sense of freedom: to -assure everyone the effective freedom to copy and redistribute it, -with or without modifying it, either commercially or noncommercially. -Secondarily, this License preserves for the author and publisher a way -to get credit for their work, while not being considered responsible -for modifications made by others. - -This License is a kind of ``copyleft'', which means that derivative -works of the document must themselves be free in the same sense. It -complements the GNU General Public License, which is a copyleft -license designed for free software. - -We have designed this License in order to use it for manuals for free -software, because free software needs free documentation: a free -program should come with manuals providing the same freedoms that the -software does. But this License is not limited to software manuals; -it can be used for any textual work, regardless of subject matter or -whether it is published as a printed book. We recommend this License -principally for works whose purpose is instruction or reference. - -@item -APPLICABILITY AND DEFINITIONS - -This License applies to any manual or other work, in any medium, that -contains a notice placed by the copyright holder saying it can be -distributed under the terms of this License. Such a notice grants a -world-wide, royalty-free license, unlimited in duration, to use that -work under the conditions stated herein. The ``Document'', below, -refers to any such manual or work. Any member of the public is a -licensee, and is addressed as ``you''. You accept the license if you -copy, modify or distribute the work in a way requiring permission -under copyright law. - -A ``Modified Version'' of the Document means any work containing the -Document or a portion of it, either copied verbatim, or with -modifications and/or translated into another language. - -A ``Secondary Section'' is a named appendix or a front-matter section -of the Document that deals exclusively with the relationship of the -publishers or authors of the Document to the Document's overall -subject (or to related matters) and contains nothing that could fall -directly within that overall subject. (Thus, if the Document is in -part a textbook of mathematics, a Secondary Section may not explain -any mathematics.) The relationship could be a matter of historical -connection with the subject or with related matters, or of legal, -commercial, philosophical, ethical or political position regarding -them. - -The ``Invariant Sections'' are certain Secondary Sections whose titles -are designated, as being those of Invariant Sections, in the notice -that says that the Document is released under this License. If a -section does not fit the above definition of Secondary then it is not -allowed to be designated as Invariant. The Document may contain zero -Invariant Sections. If the Document does not identify any Invariant -Sections then there are none. - -The ``Cover Texts'' are certain short passages of text that are listed, -as Front-Cover Texts or Back-Cover Texts, in the notice that says that -the Document is released under this License. A Front-Cover Text may -be at most 5 words, and a Back-Cover Text may be at most 25 words. - -A ``Transparent'' copy of the Document means a machine-readable copy, -represented in a format whose specification is available to the -general public, that is suitable for revising the document -straightforwardly with generic text editors or (for images composed of -pixels) generic paint programs or (for drawings) some widely available -drawing editor, and that is suitable for input to text formatters or -for automatic translation to a variety of formats suitable for input -to text formatters. A copy made in an otherwise Transparent file -format whose markup, or absence of markup, has been arranged to thwart -or discourage subsequent modification by readers is not Transparent. -An image format is not Transparent if used for any substantial amount -of text. A copy that is not ``Transparent'' is called ``Opaque''. - -Examples of suitable formats for Transparent copies include plain -@sc{ascii} without markup, Texinfo input format, La@TeX{} input -format, @acronym{SGML} or @acronym{XML} using a publicly available -@acronym{DTD}, and standard-conforming simple @acronym{HTML}, -PostScript or @acronym{PDF} designed for human modification. Examples -of transparent image formats include @acronym{PNG}, @acronym{XCF} and -@acronym{JPG}. Opaque formats include proprietary formats that can be -read and edited only by proprietary word processors, @acronym{SGML} or -@acronym{XML} for which the @acronym{DTD} and/or processing tools are -not generally available, and the machine-generated @acronym{HTML}, -PostScript or @acronym{PDF} produced by some word processors for -output purposes only. - -The ``Title Page'' means, for a printed book, the title page itself, -plus such following pages as are needed to hold, legibly, the material -this License requires to appear in the title page. For works in -formats which do not have any title page as such, ``Title Page'' means -the text near the most prominent appearance of the work's title, -preceding the beginning of the body of the text. - -The ``publisher'' means any person or entity that distributes copies -of the Document to the public. - -A section ``Entitled XYZ'' means a named subunit of the Document whose -title either is precisely XYZ or contains XYZ in parentheses following -text that translates XYZ in another language. (Here XYZ stands for a -specific section name mentioned below, such as ``Acknowledgements'', -``Dedications'', ``Endorsements'', or ``History''.) To ``Preserve the Title'' -of such a section when you modify the Document means that it remains a -section ``Entitled XYZ'' according to this definition. - -The Document may include Warranty Disclaimers next to the notice which -states that this License applies to the Document. These Warranty -Disclaimers are considered to be included by reference in this -License, but only as regards disclaiming warranties: any other -implication that these Warranty Disclaimers may have is void and has -no effect on the meaning of this License. - -@item -VERBATIM COPYING - -You may copy and distribute the Document in any medium, either -commercially or noncommercially, provided that this License, the -copyright notices, and the license notice saying this License applies -to the Document are reproduced in all copies, and that you add no other -conditions whatsoever to those of this License. You may not use -technical measures to obstruct or control the reading or further -copying of the copies you make or distribute. However, you may accept -compensation in exchange for copies. If you distribute a large enough -number of copies you must also follow the conditions in section 3. - -You may also lend copies, under the same conditions stated above, and -you may publicly display copies. - -@item -COPYING IN QUANTITY - -If you publish printed copies (or copies in media that commonly have -printed covers) of the Document, numbering more than 100, and the -Document's license notice requires Cover Texts, you must enclose the -copies in covers that carry, clearly and legibly, all these Cover -Texts: Front-Cover Texts on the front cover, and Back-Cover Texts on -the back cover. Both covers must also clearly and legibly identify -you as the publisher of these copies. The front cover must present -the full title with all words of the title equally prominent and -visible. You may add other material on the covers in addition. -Copying with changes limited to the covers, as long as they preserve -the title of the Document and satisfy these conditions, can be treated -as verbatim copying in other respects. - -If the required texts for either cover are too voluminous to fit -legibly, you should put the first ones listed (as many as fit -reasonably) on the actual cover, and continue the rest onto adjacent -pages. - -If you publish or distribute Opaque copies of the Document numbering -more than 100, you must either include a machine-readable Transparent -copy along with each Opaque copy, or state in or with each Opaque copy -a computer-network location from which the general network-using -public has access to download using public-standard network protocols -a complete Transparent copy of the Document, free of added material. -If you use the latter option, you must take reasonably prudent steps, -when you begin distribution of Opaque copies in quantity, to ensure -that this Transparent copy will remain thus accessible at the stated -location until at least one year after the last time you distribute an -Opaque copy (directly or through your agents or retailers) of that -edition to the public. - -It is requested, but not required, that you contact the authors of the -Document well before redistributing any large number of copies, to give -them a chance to provide you with an updated version of the Document. - -@item -MODIFICATIONS - -You may copy and distribute a Modified Version of the Document under -the conditions of sections 2 and 3 above, provided that you release -the Modified Version under precisely this License, with the Modified -Version filling the role of the Document, thus licensing distribution -and modification of the Modified Version to whoever possesses a copy -of it. In addition, you must do these things in the Modified Version: - -@enumerate A -@item -Use in the Title Page (and on the covers, if any) a title distinct -from that of the Document, and from those of previous versions -(which should, if there were any, be listed in the History section -of the Document). You may use the same title as a previous version -if the original publisher of that version gives permission. - -@item -List on the Title Page, as authors, one or more persons or entities -responsible for authorship of the modifications in the Modified -Version, together with at least five of the principal authors of the -Document (all of its principal authors, if it has fewer than five), -unless they release you from this requirement. - -@item -State on the Title page the name of the publisher of the -Modified Version, as the publisher. - -@item -Preserve all the copyright notices of the Document. - -@item -Add an appropriate copyright notice for your modifications -adjacent to the other copyright notices. - -@item -Include, immediately after the copyright notices, a license notice -giving the public permission to use the Modified Version under the -terms of this License, in the form shown in the Addendum below. - -@item -Preserve in that license notice the full lists of Invariant Sections -and required Cover Texts given in the Document's license notice. - -@item -Include an unaltered copy of this License. - -@item -Preserve the section Entitled ``History'', Preserve its Title, and add -to it an item stating at least the title, year, new authors, and -publisher of the Modified Version as given on the Title Page. If -there is no section Entitled ``History'' in the Document, create one -stating the title, year, authors, and publisher of the Document as -given on its Title Page, then add an item describing the Modified -Version as stated in the previous sentence. - -@item -Preserve the network location, if any, given in the Document for -public access to a Transparent copy of the Document, and likewise -the network locations given in the Document for previous versions -it was based on. These may be placed in the ``History'' section. -You may omit a network location for a work that was published at -least four years before the Document itself, or if the original -publisher of the version it refers to gives permission. - -@item -For any section Entitled ``Acknowledgements'' or ``Dedications'', Preserve -the Title of the section, and preserve in the section all the -substance and tone of each of the contributor acknowledgements and/or -dedications given therein. - -@item -Preserve all the Invariant Sections of the Document, -unaltered in their text and in their titles. Section numbers -or the equivalent are not considered part of the section titles. - -@item -Delete any section Entitled ``Endorsements''. Such a section -may not be included in the Modified Version. - -@item -Do not retitle any existing section to be Entitled ``Endorsements'' or -to conflict in title with any Invariant Section. - -@item -Preserve any Warranty Disclaimers. -@end enumerate - -If the Modified Version includes new front-matter sections or -appendices that qualify as Secondary Sections and contain no material -copied from the Document, you may at your option designate some or all -of these sections as invariant. To do this, add their titles to the -list of Invariant Sections in the Modified Version's license notice. -These titles must be distinct from any other section titles. - -You may add a section Entitled ``Endorsements'', provided it contains -nothing but endorsements of your Modified Version by various -parties---for example, statements of peer review or that the text has -been approved by an organization as the authoritative definition of a -standard. - -You may add a passage of up to five words as a Front-Cover Text, and a -passage of up to 25 words as a Back-Cover Text, to the end of the list -of Cover Texts in the Modified Version. Only one passage of -Front-Cover Text and one of Back-Cover Text may be added by (or -through arrangements made by) any one entity. If the Document already -includes a cover text for the same cover, previously added by you or -by arrangement made by the same entity you are acting on behalf of, -you may not add another; but you may replace the old one, on explicit -permission from the previous publisher that added the old one. - -The author(s) and publisher(s) of the Document do not by this License -give permission to use their names for publicity for or to assert or -imply endorsement of any Modified Version. - -@item -COMBINING DOCUMENTS - -You may combine the Document with other documents released under this -License, under the terms defined in section 4 above for modified -versions, provided that you include in the combination all of the -Invariant Sections of all of the original documents, unmodified, and -list them all as Invariant Sections of your combined work in its -license notice, and that you preserve all their Warranty Disclaimers. - -The combined work need only contain one copy of this License, and -multiple identical Invariant Sections may be replaced with a single -copy. If there are multiple Invariant Sections with the same name but -different contents, make the title of each such section unique by -adding at the end of it, in parentheses, the name of the original -author or publisher of that section if known, or else a unique number. -Make the same adjustment to the section titles in the list of -Invariant Sections in the license notice of the combined work. - -In the combination, you must combine any sections Entitled ``History'' -in the various original documents, forming one section Entitled -``History''; likewise combine any sections Entitled ``Acknowledgements'', -and any sections Entitled ``Dedications''. You must delete all -sections Entitled ``Endorsements.'' - -@item -COLLECTIONS OF DOCUMENTS - -You may make a collection consisting of the Document and other documents -released under this License, and replace the individual copies of this -License in the various documents with a single copy that is included in -the collection, provided that you follow the rules of this License for -verbatim copying of each of the documents in all other respects. - -You may extract a single document from such a collection, and distribute -it individually under this License, provided you insert a copy of this -License into the extracted document, and follow this License in all -other respects regarding verbatim copying of that document. - -@item -AGGREGATION WITH INDEPENDENT WORKS - -A compilation of the Document or its derivatives with other separate -and independent documents or works, in or on a volume of a storage or -distribution medium, is called an ``aggregate'' if the copyright -resulting from the compilation is not used to limit the legal rights -of the compilation's users beyond what the individual works permit. -When the Document is included in an aggregate, this License does not -apply to the other works in the aggregate which are not themselves -derivative works of the Document. - -If the Cover Text requirement of section 3 is applicable to these -copies of the Document, then if the Document is less than one half of -the entire aggregate, the Document's Cover Texts may be placed on -covers that bracket the Document within the aggregate, or the -electronic equivalent of covers if the Document is in electronic form. -Otherwise they must appear on printed covers that bracket the whole -aggregate. - -@item -TRANSLATION - -Translation is considered a kind of modification, so you may -distribute translations of the Document under the terms of section 4. -Replacing Invariant Sections with translations requires special -permission from their copyright holders, but you may include -translations of some or all Invariant Sections in addition to the -original versions of these Invariant Sections. You may include a -translation of this License, and all the license notices in the -Document, and any Warranty Disclaimers, provided that you also include -the original English version of this License and the original versions -of those notices and disclaimers. In case of a disagreement between -the translation and the original version of this License or a notice -or disclaimer, the original version will prevail. - -If a section in the Document is Entitled ``Acknowledgements'', -``Dedications'', or ``History'', the requirement (section 4) to Preserve -its Title (section 1) will typically require changing the actual -title. - -@item -TERMINATION - -You may not copy, modify, sublicense, or distribute the Document -except as expressly provided under this License. Any attempt -otherwise to copy, modify, sublicense, or distribute it is void, and -will automatically terminate your rights under this License. - -However, if you cease all violation of this License, then your license -from a particular copyright holder is reinstated (a) provisionally, -unless and until the copyright holder explicitly and finally -terminates your license, and (b) permanently, if the copyright holder -fails to notify you of the violation by some reasonable means prior to -60 days after the cessation. - -Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - -Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, receipt of a copy of some or all of the same material does -not give you any rights to use it. - -@item -FUTURE REVISIONS OF THIS LICENSE - -The Free Software Foundation may publish new, revised versions -of the GNU Free Documentation License from time to time. Such new -versions will be similar in spirit to the present version, but may -differ in detail to address new problems or concerns. See -@uref{http://www.gnu.org/copyleft/}. - -Each version of the License is given a distinguishing version number. -If the Document specifies that a particular numbered version of this -License ``or any later version'' applies to it, you have the option of -following the terms and conditions either of that specified version or -of any later version that has been published (not as a draft) by the -Free Software Foundation. If the Document does not specify a version -number of this License, you may choose any version ever published (not -as a draft) by the Free Software Foundation. If the Document -specifies that a proxy can decide which future versions of this -License can be used, that proxy's public statement of acceptance of a -version permanently authorizes you to choose that version for the -Document. - -@item -RELICENSING - -``Massive Multiauthor Collaboration Site'' (or ``MMC Site'') means any -World Wide Web server that publishes copyrightable works and also -provides prominent facilities for anybody to edit those works. A -public wiki that anybody can edit is an example of such a server. A -``Massive Multiauthor Collaboration'' (or ``MMC'') contained in the -site means any set of copyrightable works thus published on the MMC -site. - -``CC-BY-SA'' means the Creative Commons Attribution-Share Alike 3.0 -license published by Creative Commons Corporation, a not-for-profit -corporation with a principal place of business in San Francisco, -California, as well as future copyleft versions of that license -published by that same organization. - -``Incorporate'' means to publish or republish a Document, in whole or -in part, as part of another Document. - -An MMC is ``eligible for relicensing'' if it is licensed under this -License, and if all works that were first published under this License -somewhere other than this MMC, and subsequently incorporated in whole -or in part into the MMC, (1) had no cover texts or invariant sections, -and (2) were thus incorporated prior to November 1, 2008. - -The operator of an MMC Site may republish an MMC contained in the site -under CC-BY-SA on the same site at any time before August 1, 2009, -provided the MMC is eligible for relicensing. - -@end enumerate - diff --git a/docs/fdroid.texi b/docs/fdroid.texi deleted file mode 100644 index 16681408..00000000 --- a/docs/fdroid.texi +++ /dev/null @@ -1,1507 +0,0 @@ -\input texinfo @c -*-texinfo-*- -@c %**start of header -@setfilename fdroid.info -@documentencoding UTF-8 -@settitle F-Droid Server Manual -@c %**end of header - -@copying -This manual is for the F-Droid repository server tools. - -Copyright @copyright{} 2010, 2011, 2012, 2013 Ciaran Gultnieks - -Copyright @copyright{} 2011 Henrik Tunedal, Michael Haas, John Sullivan - -Copyright @copyright{} 2013 David Black, Daniel Martí - -@quotation -Permission is granted to copy, distribute and/or modify this document -under the terms of the GNU Free Documentation License, Version 1.3 -or any later version published by the Free Software Foundation; -with no Invariant Sections, no Front-Cover Texts, and no Back-Cover Texts. -A copy of the license is included in the section entitled "GNU -Free Documentation License". -@end quotation -@end copying - -@titlepage -@title F-Droid Server Manual -@author Ciaran Gultnieks and the F-Droid project -@page -@vskip 0pt plus 1filll -@insertcopying - -@end titlepage - -@contents - -@ifnottex -@node Top -@top F-Droid Server - -@insertcopying -@end ifnottex - -@menu -* Overview:: -* System Requirements:: -* Setup:: -* Simple Binary Repository:: -* Building Applications:: -* Importing Applications:: -* Metadata:: -* Update Processing:: -* Build Server:: -* Signing:: -* GNU Free Documentation License:: -* Index:: -@end menu - -@node Overview -@chapter Overview - -The F-Droid server tools provide various scripts and tools that are used -to maintain the main F-Droid application repository. You can use these same -tools to create your own additional or alternative repository for publishing, -or to assist in creating, testing and submitting metadata to the main -repository. - - -@node System Requirements -@chapter System Requirements - -@cindex installation - -The system requirements for using the tools will vary depending on your -intended usage. At the very least, you'll need: - -@itemize @bullet -@item -GNU/Linux -@item -Python 2.x -@item -The Android SDK Tools and Build-tools. -Note that F-Droid does not assume that you have the Android SDK in your -@code{PATH}: these directories will be specified in your repository -configuration. Recent revisions of the SDK have @code{aapt} located in -android-sdk/build-tools/ and it may be necessary to make a symlink to it in -android-sdk/platform-tools/ -@end itemize - -If you intend to build applications from source you'll also need most, if not -all, of the following: - -@itemize @bullet -@item -JDK (Debian package openjdk-6-jdk): openjdk-6 is recommended though openjdk-7 -should work too -@item -VCS clients: svn, git, git-svn, hg, bzr -@item -A keystore for holding release keys. (Safe, secure and well backed up!) -@end itemize - -If you intend to use the 'Build Server' system, for secure and clean builds -(highly recommended), you will also need: - -@itemize @bullet -@item -VirtualBox (debian package virtualbox) -@item -Ruby (debian packages ruby and rubygems) -@item -Vagrant (unpackaged) Be sure to use 1.3.x because 1.4.x is completely broken -(at the time of writing, the forthcoming 1.4.3 might work) -@item -Paramiko (debian package python-paramiko) -@item -Imaging (debian package python-imaging) -@item -Magic (debian package python-magic) -@end itemize - -On the other hand, if you want to build the apps directly on your system -without the 'Build Server' system, you may need: - -@itemize @bullet -@item -All SDK platforms requested by the apps you want to build -(The Android SDK is made available by Google under a proprietary license but -within that, the SDK platforms, support library and some other components are -under the Apache license and source code is provided. -Google APIs, used for building apps using Google Maps, are free to the extent -that the library comes pre-installed on the device. -Google Play Services, Google Admob and others are proprietary and shouldn't be -included in the main F-Droid repository.) -@item -A version of the Android NDK -@item -Ant with Contrib Tasks (Debian packages ant and ant-contrib) -@item -Maven (Debian package maven) -@item -JavaCC (Debian package javacc) -@item -Miscellaneous packages listed in -buildserver/cookbooks/fdroidbuild-general/recipes/default.rb -of the F-Droid server repository -@end itemize - -@node Setup -@chapter Setup - -@cindex setup, installation - -Because the tools and data will always change rapidly, you will almost -certainly want to work from a git clone of the tools at this stage. To -get started: - -@example -git clone git://gitorious.org/f-droid/fdroidserver.git -@end example - -You now have lots of stuff in the fdroidserver directory, but the most -important is the 'fdroid' command script which you run to perform all tasks. -This script is always run from a repository data directory, so the -most sensible thing to do next is to put your new fdroidserver directory -in your @code{PATH}. - -@section Data - -To do anything, you'll need at least one repository data directory. It's -from this directory that you run the @code{fdroid} command to perform all -repository management tasks. You can either create a brand new one, or -grab a copy of the data used by the main F-Droid repository: - -@example -git clone git://gitorious.org/f-droid/fdroiddata.git -@end example - -Regardless of the intended usage of the tools, you will always need to set -up some basic configuration details. This is done by creating a file called -@code{config.py} in the data directory. You should do this by copying the -example file (@code{config.sample.py}) from the fdroidserver project to your -data directory and then editing according to the instructions within. - -Once configured in this way, all the functionality of the tools is accessed -by running the @code{fdroid} command. Run it on its own to get a list of the -available sub-commands. - -You can follow any command with @code{--help} to get a list of additional -options available for that command. - -@example -fdroid update --help -@end example - - -@node Simple Binary Repository -@chapter Simple Binary Repository - -@cindex binary - -If you want to maintain a simple repository hosting only binary APKs obtained -and compiled elsewhere, the process is quite simple: - -@enumerate -@item -Set up the server tools, as described in Setup. -@item -Make a directory for your repository. This is the directory from which you -will do all the work with your repository. Create a config file there, called -@code{config.py}, by copying the @code{config.sample.py} from the server -project and editing it. -@item -Within that, make a directory called @code{repo} and put APK files in it. -@item -Run @code{fdroid update}. -@item -If it reports that any metadata files are missing, you can create them -in the @code{metadata} directory and run it again. -@item -To ease creation of metadata files, run @code{fdroid update} with the @code{-c} -option. It will create 'skeleton' metadata files that are missing, and you can -then just edit them and fill in the details. -@item -Then, if you've changed things, run @code{fdroid update} again. -@item -Running @code{fdroid update} adds an Icons directory into the repo directory, -and also creates the repository index (index.xml, and also index.jar if you've -configured the system to use a signed index). -@item -Publish the resulting contents of the @code{repo} directory to your web server. -@end enumerate - -Following the above process will result in a @code{repo} directory, which you -simply need to push to any HTTP (or preferably HTTPS) server to make it -accessible. - -While some information about the applications (and versions thereof) is -retrieved directly from the APK files, most comes from the corresponding file -in the @code{metadata} directory. The metadata file covering ALL versions of a -particular application is named @code{package.id.txt} where package.id is the -unique identifier for that package. - -See the Metadata chapter for details of what goes in the metadata file. All -fields are relevant for binary APKs, EXCEPT for 'Build Version' entries, which -should be omitted. - - -@node Building Applications -@chapter Building Applications - -Instead of (or as well as) including binary APKs from external sources in a -repository, you can build them directly from the source code. - -Using this method, it is is possible to verify that the application builds -correctly, corresponds to the source code, and contains only free software. -Unforunately, in the Android world, it seems to be very common for an -application supplied as a binary APK to present itself as Free Software -when in fact some or all of the following are true: - -@enumerate -@item -The source code (either for a particular version, or even all versions!) is -unavailable or incomplete. -@item -The source code is not capable of producing the actual binary supplied. -@item -The 'source code' contains binary files of unknown origin, or with proprietary -licenses. -@end enumerate - -For this reason, source-built applications are the preferred method for the -main F-Droid repository, although occasionally for technical or historical -reasons, exceptions are made to this policy. - -When building applications from source, it should be noted that you will be -signing them (all APK files must be signed to be installable on Android) with -your own key. When an application is already installed on a device, it is not -possible to upgrade it in place to a new version signed with a different key -without first uninstalling the original. This may present an inconvenience to -users, as the process of uninstalling loses any data associated with the -previous installation. - -The process for managing a repository for built-from-source applications is -very similar to that described in the Simple Binary Repository chapter, -except now you need to: - -@enumerate -@item -Include Build Version entries in the metadata files. -@item -Run @code{fdroid build} to build any applications that are not already built. -@item -Run @code{fdroid publish} to finalise packaging and sign any APKs that have -been built. -@end enumerate - - -@section More about "fdroid build" - -When run without any parameters, @code{fdroid build} will build any and all -versions of applications that you don't already have in the @code{repo} -directory (or more accurately, the @code{unsigned} directory). There are various -other things you can do. As with all the tools, the @code{--help} option is -your friend, but a few annotated examples and discussion of the more common -usage modes follows: - -To build a single version of a single application, you could run the -following: - -@example -./fdroid build org.fdroid.fdroid:16 -@end example - -This attempts to build version code 16 (which is version 0.25) of the F-Droid -client. Many of the tools recognise arguments as packages, allowing their -activity to be limited to just a limited set of packages. - -If the build above was successful, two files will have been placed in the -@code{unsigned} directory: - -@example -org.fdroid.fdroid_16.apk -org.fdroid.fdroid_16_src.tar.gz -@end example - -The first is the (unsigned) APK. You could sign this with a debug key and push -it direct to your device or an emulator for testing. The second is a source -tarball containing exactly the source that was used to generate the binary. - -If you were intending to publish these files, you could then run: - -@example -./fdroid publish -@end example - -The source tarball would move to the @code{repo} directory (which is the -directory you would push to your web server). A signed and zip-aligned version -of the APK would also appear there, and both files would be removed from the -@code{unsigned} directory. - -If you're building purely for the purposes of testing, and not intending to -push the results to a repository, at least yet, the @code{--test} option can be -used to direct output to the @code{tmp} directory instead of @code{unsigned}. -A similar effect could by achieved by simply deleting the output files from -@code{unsigned} after the build, but with the risk of forgetting to do so! - -Along similar lines (and only in conjunction with @code{--test}, you can use -@code{--force} to force a build of a Disabled application, where normally it -would be completely ignored. Similarly a version that was found to contain -ELFs or known non-free libraries can be forced to build. See also — -scanignore= and scandelete= in the Build Version section. - -If the build was unsuccessful, you can find out why by looking at the output -in the logs/ directory. If that isn't illuminating, try building the app the -regular way, step by step: android update project, ndk-build, ant debug. - -Note that source code repositories often contain prebuilt libraries. If the -app is being considered for the main F-Droid repository, it is important that -all such prebuilts are built either via the metadata or by a reputable third -party. - - -@section Direct Installation - -You can also build and install directly to a connected device or emulator -using the @code{fdroid install} command. If you do this without passing -packages as arguments then all the latest built and signed version available -of each package will be installed . In most cases, this will not be what you -want to do, so execution will stop straight away. However, you can override -this if you're sure that's what you want, by using @code{--all}. Note that -currently, no sanity checks are performed with this mode, so if the files in -the signed output directory were modified, you won't be notified. - - -@node Importing Applications -@chapter Importing Applications - -To help with starting work on including a new application, @code{fdroid import} -will take a URL and optionally some other parameters, and attempt to construct -as much information as possible by analysing the source code. Basic usage is: - -@example -./fdroid import --url=http://address.of.project -@end example - -For this to work, the URL must point to a project format that the script -understands. Currently this is limited to one of the following: - -@enumerate -@item -Gitorious - @code{https://gitorious.org/PROJECTNAME/REPONAME} -@item -Github - @code{https://github.com/USER/PROJECT} -@item -Google Code - @code{http://code.google.com/p/PROJECT/} -Supports git, svn and hg repos. - -Some Google Code projects have multiple repositories, identified by a -dropdown list on the @code{source/checkout} page. To access one other than -the default, specify its name using the @code{--repo} switch. -@item -Bitbucket - @code{https://bitbucket.org/USER/PROJECT/} -@item -Git - @code{git://REPO} -@end enumerate - -Depending on the project type, more or less information may be gathered. For -example, the license will be retrieved from a Google Code project, but not a -GitHub one. A bare repo url, such as the git:// one, is the least preferable -optional of all, since you will have to enter much more information manually. - -If the import is successful, a metadata file will be created. You will need to -edit this further to check the information, and fill in the blanks. - -If it fails, you'll be told why. If it got as far as retrieving the source -code, you can inspect it further by looking in @code{tmp/importer} where a full -checkout will exist. - -A frequent cause of initial failure is that the project directory is actually -a subdirectory in the repository. In this case, run the importer again using -the @code{--subdir} option to tell it where. It will not attempt to determine -this automatically, since there may be several options. - - -@node Metadata -@chapter Metadata - -@cindex metadata - -Information used by update.py to compile the public index comes from two -sources: - -@enumerate -@item -the APK files in the repo directory, and -@item -the metadata files in the metadata directory. -@end enumerate - -The metadata files are simple, easy to edit text files, always named as the -application's package ID with '.txt' appended. - -Note that although the metadata files are designed to be easily read and -writable by humans, they are also processed and written by various scripts. -They are capable of rewriting the entire file when necessary. Even so, -the structure and comments will be preserved correctly, although the order -of fields will be standardised. (In the event that the original file was -in a different order, comments are considered as being attached to the field -following them). In fact, you can standardise all the metadata in a single -command, without changing the functional content, by running: - -@example -fdroid rewritemetadata -@end example - -The following sections describe the fields recognised within the file. - -@menu -* Categories:: -* License:: -* Name:: -* Provides:: -* Auto Name:: -* Web Site:: -* Source Code:: -* Issue Tracker:: -* Donate:: -* FlattrID:: -* Bitcoin:: -* Litecoin:: -* Summary:: -* Description:: -* Maintainer Notes:: -* Repo Type:: -* Repo:: -* Build Version:: -* AntiFeatures:: -* Disabled:: -* Requires Root:: -* Archive Policy:: -* Update Check Mode:: -* Vercode Operation:: -* Update Check Data:: -* Auto Update Mode:: -* Current Version:: -* Current Version Code:: -* No Source Since:: -@end menu - -@node Categories -@section Categories - -Any number of categories for the application to be placed in. There is no -fixed list of categories - both the client and the web site will automatically -show any categories that exist in any applications. However, if your metadata -is intended for the main F-Droid repository, you should use one of the -existing categories (look at the site/client), or discuss the proposal to add -a new one. - -Categories must be separated by a single comma character, ','. For backwards -compatibility, F-Droid will use the first category given as element -for older clients to at least see one category. - -This is converted to (@code{}) in the public index file. - -@node License -@section License - -@cindex license - -The overall license for the application, or in certain cases, for the -source code only. - -Common values: - -@itemize @bullet - -@item -@samp{GPLv2} -GNU GPL version 2 - -@item -@samp{GPLv2+} -GNU GPL version 2 or later - -@item -@samp{GPLv3} -GNU GPL version 3 - -@item -@samp{GPLv3+} -GNU GPL version 3 or later - -@item -@samp{GPL} -An unspecified GPL version. Use this only as a last resort or if there is -some confusion over compatiblity of component licenses: particularly the use of -Apache libraries with GPLv2 source code. - -@item -@samp{AGPL} -Afferro GPL version 3. - -@item -@samp{Apache2} -Apache 2 - -@item -@samp{MIT} -MIT X11 license - -@item -@samp{BSD} -BSD license - the original '4-clause' version. - -@item -@samp{NewBSD} -BSD license - the new, or modified, version. - -@end itemize - -This is converted to (@code{}) in the public index file. - -@node Auto Name -@section Auto Name - -@cindex Auto Name - -The name of the application as can best be retrieved from the source code. -This is done so that the commitupdates script can put a familiar name in the -description of commits created when a new update of the application is -found. The Auto Name entry is generated automatically when @code{fdroid -checkupdates} is run. - -@node Name -@section Name - -@cindex Name - -The name of the application. Normally, this field should not be present since -the application's correct name is retrieved from the APK file. However, in a -situation where an APK contains a bad or missing application name, it can be -overridden using this. Note that this only overrides the name in the list of -apps presented in the client; it doesn't changed the name or application label -in the source code. - -@node Provides -@section Provides - -@cindex Provides - -Comma-separated list of application IDs that this app provides. In other -words, if the user has any of these apps installed, F-Droid will show this app -as installed instead. It will also appear if the user clicks on urls linking -to the other app IDs. Useful when an app switches package name, or when you -want an app to act as multiple apps. - -@node Web Site -@section Web Site - -@cindex Web Site - -The URL for the application's web site. - -This is converted to (@code{}) in the public index file. - -@node Source Code -@section Source Code - -@cindex Source Code - -The URL to view or obtain the application's source code. This should be -something human-friendly. Machine-readable source-code is covered in the -'Repo' field. - -This is converted to (@code{}) in the public index file. - -@node Issue Tracker -@section Issue Tracker - -@cindex Issue Tracker - -The URL for the application's issue tracker. Optional, since not all -applications have one. - -This is converted to (@code{}) in the public index file. - -@node Donate -@section Donate - -@cindex Donate - -The URL to donate to the project. This should be the project's donate page -if it has one. - -It is possible to use a direct PayPal link here, if that is all that is -available. However, bear in mind that the developer may not be aware of -that direct link, and if they later changed to a different PayPal account, -or the PayPal link format changed, things could go wrong. It is always -best to use a link that the developer explicitly makes public, rather than -something that is auto-generated 'button code'. - -This is converted to (@code{}) in the public index file. - -@node FlattrID -@section FlattrID - -@cindex FlattrID - -The project's Flattr (http://flattr.com) ID, if it has one. This should be -a numeric ID, such that (for example) https://flattr.com/thing/xxxx leads -directly to the page to donate to the project. - -This is converted to (@code{}) in the public index file. - -@node Bitcoin -@section Bitcoin - -@cindex Bitcoin - -A bitcoin address for donating to the project. - -This is converted to (@code{}) in the public index file. - -@node Litecoin -@section Litecoin - -@cindex Litecoin - -A litecoin address for donating to the project. - -@node Summary -@section Summary - -@cindex Summary - -A brief summary of what the application is. Since the summary is only allowed -one line on the list of the F-Droid client, keeping it to within 32 characters -will ensure it fits even on the smallest screens. - -@node Description -@section Description - -@cindex Description - -A full description of the application, relevant to the latest version. -This can span multiple lines (which should be kept to a maximum of 80 -characters), and is terminated by a line containing a single '.'. - -Basic MediaWiki-style formatting can be used. Leaving a blank line starts a -new paragraph. Surrounding text with @code{''} make it italic, and with -@code{'''} makes it bold. - -You can link to another app in the repo by using @code{[[app.id]]}. The link -will be made appropriately whether in the Android client, the web repo -browser or the wiki. The link text will be the apps name. - -Links to web addresses can be done using @code{[http://example.com Text]}. - -For both of the above link formats, the entire link (from opening to closing -square bracket) must be on the same line. - -Bulletted lists are done by simply starting each item with a @code{*} on -a new line, and numbered lists are the same but using @code{#}. There is -currently no support for nesting lists - you can have one level only. - -It can be helpful to note information pertaining to updating from an -earlier version; whether the app contains any prebuilts built by the -upstream developers or whether non-free elements were removed; whether the -app is in rapid development or whether the latest version lags behind the -current version; whether the app supports multiple architectures or whether -there is a maximum SDK specified (such info not being recorded in the index). - -This is converted to (@code{}) in the public index file. - -@node Maintainer Notes -@section Maintainer Notes - -@cindex Maintainer Notes - -This is a multi-line field using the same rules and syntax as the description. -It's used to record notes for F-Droid maintainers to assist in maintaining and -updating the application in the repository. - -This information is also published to the wiki. - -@node Repo Type -@section Repo Type - -@cindex Repo Type - -The type of repository - for automatic building from source. If this is not -specified, automatic building is disabled for this application. Possible -values are: - -@itemize @bullet -@item -@samp{git} -@item -@samp{svn} -@item -@samp{git-svn} -@item -@samp{hg} -@item -@samp{bzr} -@item -@samp{srclib} -@end itemize -@node Repo -@section Repo - -@cindex Repo - -The repository location. Usually a git: or svn: URL, for example. - -The git-svn option connects to an SVN repository, and you specify the URL in -exactly the same way, but git is used as a back-end. This is preferable for -performance reasons, and also because a local copy of the entire history is -available in case the upstream repository disappears. (It happens!). In -order to use Tags as update check mode for this VCS type, the URL must have -the tags= special argument set. Likewise, if you intend to use the -RepoManifest/branch scheme, you would want to specify branches= as well. -Finally, trunk= can also be added. All these special arguments will be passed -to "git svn" in order, and their values must be relative paths to the svn repo -root dir. -Here's an example of a complex git-svn Repo URL: -http://svn.code.sf.net/p/project/code/svn;trunk=trunk;tags=tags;branches=branches - -For a Subversion repo that requires authentication, you can precede the repo -URL with username:password@ and those parameters will be passed as @option{--username} -and @option{--password} to the SVN checkout command. (This now works for both -svn and git-svn) - -If the Repo Type is @code{srclib}, then you must specify the name of the -according srclib .txt file. For example if @code{scrlibs/FooBar.txt} exist -and you want to use this srclib, then you have to set Repo to -@code{FooBar}. - -@node Build Version -@section Build Version - -@cindex Build Version - -Any number of these fields can be present, each specifying a version to -automatically build from source. The value is a comma-separated list. -For example: - -@samp{Build Version:0.12,3,651696a49be2cd7db5ce6a2fa8185e31f9a20035} - -The above specifies to build version 0.12, which has a version code of 3. -The third parameter specifies the tag, commit or revision number from -which to build it in the source repository. - -In addition to the three, always required, parameters described above, -further parameters can be added (in name=value format) to apply further -configuration to the build. These are (roughly in order of application): - -@table @code - -@item disable= -Disables this build, giving a reason why. (For backwards compatibility, this -can also be achieved by starting the commit ID with '!') - -The purpose of this feature is to allow non-buildable releases (e.g. the source -is not published) to be flagged, so the scripts don't generate repeated -messages about them. (And also to record the information for review later). -If an apk has already been built, disabling causes it to be deleted once -@code{fdroid update} is run; this is the procedure if ever a version has to -be replaced. - -@item subdir= -Specifies to build from a subdirectory of the checked out source code. -Normally this directory is changed to before building, - -@item submodules=yes -Use if the project (git only) has submodules - causes git submodule -init and update to be executed after the source is cloned. - -@item init=xxxx -As for 'prebuild', but runs on the source code BEFORE any other processing -takes place. - -You can use $$SDK$$, $$NDK$$ and $$MVN3$$ to substitute the paths to the -android SDK and NDK directories, and maven 3 executable respectively. - -@item oldsdkloc=yes -The sdk location in the repo is in an old format, or the build.xml is -expecting such. The 'new' format is sdk.dir while the VERY OLD format -is sdk-location. Typically, if you get a message along the lines of: -"com.android.ant.SetupTask cannot be found" when trying to build, then -try enabling this option. - -@item target= -Specifies a particular SDK target for compilation, overriding the -project.properties of the app and possibly sub-projects. Note that this does -not change the target SDK in the AndroidManifest.xml — the level of features -that can be included in the build. This is likely to cause the whole build.xml -to be rewritten, which is fine if it's a 'standard' android file or doesn't -already exist, but not a good idea if it's heavily customised. If you get an -error about invalid target, first try @code{init=rm -rf bin/}; otherwise this -parameter should do the trick. - -Please note that gradle builds should be using compilesdk=. - -@item compilesdk= -Practically accomplishes the same that target= does when used in ant and maven -projects. compilesdk= is used rather than target= so as to not cause any more -confusion. It only takes effect on gradle builds in the build.gradle file, -thus using it in any other case is not wise. - -@item update=xxx -By default, 'android update project' is used to generate or update the -project and all its referenced projects. Specifying update=no bypasses that. - -Specifiying update=force forces rebuilding of the build.xml file at the -same time - this is frequently needed with r14 of the Android platform -tools. Be aware of any customisations in build.xml when using -update=force. - -Default value is '@code{auto}', which uses the paths used in the -project.properties file to find out what project paths to update. - -Otherwise, value can be a semicolon-separated list of directories in -which to run 'android update project' relative to the main -application directory (which may include '@code{subdir}' parameter). - -@item encoding=xxxx -Adds a java.encoding property to local.properties with the given -value. Generally the value will be 'utf-8'. This is picked up by the -SDK's ant rules, and forces the Java compiler to interpret source -files with this encoding. If you receive warnings during the compile -about character encodings, you probably need this. - -@item forceversion=yes -If specified, the package version in AndroidManifest.xml is replaced -with the version name for the build as specified in the metadata. - -This is useful for cases when upstream repo failed to update it for -specific tag; to build an arbitrary revision; to make it apparent that -the version differs significantly from upstream; or to make it apparent -which architecture or platform the apk is designed to run on. - -@item forcevercode=yes -If specified, the package version code in the AndroidManifest.xml is -replaced with the version code for the build. See also forceversion. - -@item rm= -Specifies the relative paths of files or directories to delete before -the build is done. The paths are relative to the base of the build -directory - i.e. the root of the directory structure checked out from -the source respository - not necessarily the directory that contains -AndroidManifest.xml. - -Multiple files/directories can be specified by separating them with ';'. -Directories will be recursively deleted. - -@item fixtrans=yes -Modifies any instances of string resources that use multiple -formatting arguments, but don't use positional notation. For example, -"Hello %s, %d" becomes "Hello %1$s, %2$d". Newer versions of the -Android platform tools enforce this sensible standard. If you get -error messages relating to that, you need to enable this. - -@item fixapos=yes -Like fixtrans, but deals with an even older issue relating to -'unescaped apostrophes' in translation strings. - -@item extlibs=a;b;c -Specifies a list of external libraries (jar files) from the -@code{build/extlib} library, which will be placed in the @code{libs} directory -of the project. Separate items with semicolons. - -@item srclibs=a@@r;b@@r1; -Specifies a list of source libraries or Android projects. Separate items with -semicolons, and each item is of the form name@@rev where name is the predefined -source library name and rev is the revision or tag in source control to use. - -Each srclib has a metadata file under srclibs/ in the repository directory, -and the source code is stored in build/srclib/. -Repo Type: and Repo: are specified in the same way as for apps; Subdir: can be -a comma separated list, for when directories are renamed by upstream; Update -Project: updates the projects in the working directory and one level down; -Prepare: can be used for any kind of preparation: in particular if you need to -update the project with a particular target. You can then also use $$name$$ in -the init/prebuild/build command to substitute the relative path to the library -directory, but it could need tweaking if you've changed into another directory. - -@item patch=x -Apply patch(es). 'x' names one (or more - comma-seperated) -files within a directory below the metadata, with the same -name as the metadata file but without the extension. Each of -these patches is applied to the code in turn. - -@item prebuild=xxxx -Specifies a shell command (or commands - chain with &&) to run before -the build takes place. Backslash can be used as an escape character to -insert literal commas, or as the last character on a line to join that -line with the next. It has no special meaning in other contexts; in -particular, literal backslashes should not be escaped. - -The command runs using bash. - -Note that nothing should be build during this prebuild phase - scanning -of the code and building of the source tarball, for example, take place -after this. For custom actions that actually build things, use 'build' -instead. - -You can use $$name$$ to substitute the path to a referenced srclib - see -the @code{srclib} directory for details of this. - -You can use $$SDK$$, $$NDK$$ and $$MVN3$$ to substitute the paths to the -android SDK and NDK directories, and maven 3 executable respectively e.g. -for when you need to run @code{android update project} explicitly. - -@item scanignore=path1;path2;... -Enables one or more files/paths to be exlcuded from the scan process. -This should only be used where there is a very good reason, and -probably accompanied by a comment explaining why it is necessary. - -When scanning the source tree for problems, matching files whose relative -paths start with any of the paths given here are ignored. - -@item scandelete=path1;path2;... -Similar to scanignore=, but instead of ignoring files under the given paths, -it tells f-droid to delete the matching files directly. - -@item build=xxxx -As for 'prebuild', but runs during the actual build phase (but before the -main ant/maven build). Use this only for actions that do actual building. -Any prepartion of the source code should be done using 'init' or 'prebuild'. - -Any building that takes place before build= will be ignored, as either ant, -mvn or gradle will be executed to clean the build environment right before -build= (or the final build) is run. - -You can use $$SDK$$, $$NDK$$ and $$MVN3$$ to substitute the paths to the -android SDK and NDK directories, and maven 3 executable respectively. - -@item buildjni=[yes|no|] -Enables building of native code via the ndk-build script before doing -the main ant build. The value may be a list of directories relative -to the main application directory in which to run ndk-build, or 'yes' -which corresponds to '.' . Using explicit list may be useful to build -multi-component projects. - -The build and scan processes will complain (refuse to build) if this -parameter is not defined, but there is a @code{jni} directory present. -If the native code is being built by other means, you can specify -@code{no} here to avoid that. However, if the native code is actually -not required, remove the directory instead (using @code{prebuild} for -example). - -@item gradle=[@@] -Build with gradle instead of ant, specifying what flavour to assemble. -If is 'yes', 'main' or empty, no flavour will be used. Note -that this will not work on projects with flavours, since it will build -all flavours and there will be no 'main' build. -If @@ is attached to , then the gradle tasks will be run in that -directory. This might be necessary if gradle needs to be run in the parent -directory, in which case one would use 'gradle=@..'. - -@item maven=yes[@@] -Build with maven instead of ant. Like gradle, an extra @@ tells f-droid -to run maven inside that relative subdirectory. - -@item preassemble= -Space-separated list of gradle tasks to be run before the assemble task -in a gradle project build. - -@item bindir= -Normally the build output (apk) is expected to be in the bin -subdirectory below the ant build files. If the project is configured -to put it elsewhere, that can be specified here, relative to the base -of the checked out repo. Not yet implemented for gradle. - -@item antcommand=xxx -Specify an alternate ant command (target) instead of the default -'release'. It can't be given any flags, such as the path to a build.xml. - -@item novcheck=yes -Don't check that the version name and code in the resulting apk are -correct by looking at the build output - assume the metadata is -correct. This takes away a useful level of sanity checking, and should -only be used if the values can't be extracted. - -@end table - -Another example, using extra parameters: - -@samp{Build Version:1.09.03,10903,45,subdir=Timeriffic,oldsdkloc=yes} - -@node AntiFeatures -@section AntiFeatures - -@cindex AntiFeatures - -This is optional - if present, it contains a comma-separated list of any of -the following values, describing an anti-feature the application has. -Even though such apps won't be displayed unless a settings box is ticked, -it is a good idea to mention the reasons for the anti-feature(s) in the -description: - -@itemize @bullet - -@item -@samp{Ads} - the application contains advertising. - -@item -@samp{Tracking} - the application tracks and reports your activity to -somewhere without your consent. It's commonly used for when developers -obtain crash logs without the user's consent, or when an app is useless -without some kind of authentication. - -@item -@samp{NonFreeNet} - the application relies on computational services that -are impossible to replace or that the replacement cannot be connected to -without major changes to the app. - -@item -@samp{NonFreeAdd} - the application promotes non-Free add-ons, such that the -app is effectively an advert for other non-free software and such software is -not clearly labelled as such. - -@item -@samp{NonFreeDep} - the application depends on a non-Free application (e.g. -Google Maps) - i.e. it requires it to be installed on the device, but does not -include it. - -@end itemize - -@node Disabled -@section Disabled - -@cindex Disabled - -If this field is present, the application does not get put into the public -index. This allows metadata to be retained while an application is temporarily -disabled from being published. The value should be a description of why the -application is disabled. No apks or source code archives are deleted: to purge -an apk see the Build Version section or delete manually for developer builds. -The field is therefore used when an app has outlived it's usefulness, because -the source tarball is retained. - -@node Requires Root -@section Requires Root - -@cindex Requires Root - -Set this optional field to "Yes" if the application requires root -privileges to be usable. This lets the client filter it out if the -user so desires. Whether root is required or not, it is good to give -a paragraph in the description to the conditions on which root may be -asked for and the reason for it. - -@node Update Check Mode -@section Update Check Mode - -@cindex Update Check Mode - -This determines the method using for determining when new releases are -available - in other words, the updating of the Current Version and Current -Version Code fields in the metadata by the @code{fdroid checkupdates} process. - -Valid modes are: - -@itemize -@item -@code{None} - No checking is done because there's no appropriate automated way -of doing so. Updates should be checked for manually. Use this, for example, -when deploying betas or patched versions; when builds are done in a directory -different to where the AndroidManifest.xml is; if the developers use the -gradle build system and store version info in a separate file; if the -developers make a new branch for each release and don't make tags; or if you've -changed the package name or version code logic. -@item -@code{Static} - No checking is done - either development has ceased or new versions -are not desired. This method is also used when there is no other checking method -available and the upstream developer keeps us posted on new versions. -@item -@code{RepoManifest} - At the most recent commit, the AndroidManifest.xml file -is looked for in the directory where it was found in the the most recent build. -The appropriateness of this method depends on the development process used by -the application's developers. You should not specify this method unless you're -sure it's appropriate. For example, some developers bump the version when -commencing development instead of when publishing. -It will return an error if the AndroidManifest.xml has moved to a different -directory or if the package name has changed. -The current version that it gives may not be accurate, since not all -versions are fit to be published. Therefore, before building, it is often -necessary to check if the current version has been published somewhere by the -upstream developers, either by checking for apks that they distribute or for -tags in the source code repository. - -It currently works for every repository type to different extents, except -the srclib repo type. For git, git-svn and hg repo types, you may use -"RepoManifest/yourbranch" as UCM so that "yourbranch" would be the branch used -in place of the default one. The default values are "master" for git, -"default" for hg and none for git-svn (it stays in the same branch). -On the other hand, branch support hasn't been implemented yet in bzr and svn, -but RepoManifest may still be used without it. -@item -@code{RepoTrunk} - For svn and git-svn repositories, especially those who -don't have a bundled AndroidManifest.xml file, the Tags and RepoManifest -checks will not work, since there is no version information to obtain. But, -for those apps who automate their build process with the commit ref that HEAD -points to, RepoTrunk will set the Current Version and Current Version Code to -that number. -@item -@code{Tags} - The AndroidManifest.xml file in all tagged revisions in the -source repository is checked, looking for the highest version code. The -appropriateness of this method depends on the development process used by the -application's developers. You should not specify this method unless you're sure -it's appropriate. It shouldn't be used if the developers like to tag betas or -are known to forget to tag releases. Like RepoManifest, it will not return the -correct value if the directory containing the AndroidManifest.xml has moved. -Despite these caveats, it is the often the favourite update check mode. - -It currently only works for git, hg, bzr and git-svn repositories. In the case -of the latter, the repo URL must encode the path to the trunk and tags or else -no tags will be found. -@item -@code{HTTP} - HTTP requests are used to determine the current version code and -version name. This is controlled by the @code{Update Check Data} field, which -is of the form @code{urlcode|excode|urlver|exver}. - -Firstly, if @code{urlcode} is non-empty, the document from that URL is -retrieved, and matched against the regular expression @code{excode}, with the -first group becoming the version code. - -Secondly, if @code{urlver} is non-empty, the document from that URL is -retrieved, and matched against the regular expression @code{exver}, with the -first group becoming the version name. The @code{urlver} field can be set to -simply '.' which says to use the same document returned for the version code -again, rather than retrieving a different one. -@end itemize - -@node Update Check Data -@section Update Check Data - -@cindex Update Check Data - -Used in conjunction with @code{Update Check Mode} for certain modes. - -@node Vercode Operation -@section Vercode Operation - -@cindex Vercode Operation - -Operation to be applied to the vercode obtained by the defined @code{Update -Check Mode}. @code{%c} will be replaced by the actual vercode, and the whole -string will be passed to python's @code{eval} function. - -Especially useful with apps that we want to compile for different ABIs, but -whose vercodes don't always have trailing zeros. With @code{Vercode Operation} -set at something like @code{%c*10 + 4}, we will be able to track updates and -build three different versions of every upstream version. - -@node Archive Policy -@section Archive Policy - -@cindex Archive Policy - -This determines the policy for moving old versions of an app to the archive -repo, if one is configured. The configuration sets a default maximum number -of versions kept in the main repo, after which older ones are moved to the -archive. This app-specific policy setting can override that. - -Currently the only supported format is "n versions", where n is the number -of versions to keep. - -@node Auto Update Mode -@section Auto Update Mode - -@cindex Auto Update Mode - -This determines the method using for auto-generating new builds when new -releases are available - in other words, adding a new Build Version line to the -metadata. -This happens in conjunction with the 'Update Check Mode' functionality - i.e. -when an update is detected by that, it is also processed by this. - -Valid modes are: - -@itemize -@item -@code{None} - No auto-updating is done -@item -@code{Version} - Identifies the target commit (i.e. tag) for the new build based -on the given version specification, which is simply text in which %v and %c are -replaced with the required version name and version code respectively. - -For example, if an app always has a tag "2.7.2" corresponding to version 2.7.2, -you would simply specify "Version %v". If an app always has a tag "ver_1234" -for a version with version code 1234, you would specify "Version ver_%c". - -Additionally, a suffix can be added to the version name at this stage, to -differentiate F-Droid's build from the original. Continuing the first example -above, you would specify that as "Version +-fdroid %v" - "-fdroid" is the suffix. -@end itemize - - -@node Current Version -@section Current Version - -@cindex Current Version - -The name of the version that is current. There may be newer versions of the -application than this (e.g. betas), and there will almost certainly be older -ones. This should be the one that is recommended for general use. -In the event that there is no source code for the current version, or that -non-free libraries are being used, this would ideally be the latest -version that is still free, though it may still be expedient to -retain the automatic update check — see No Source Since. - -This field is normally automatically updated - see Update Check Mode. - -This is converted to (@code{}) in the public index file. - -@node Current Version Code -@section Current Version Code - -@cindex Current Version Code - -The version code corresponding to the Current Version field. Both these fields -must be correct and matching although it's the current version code that's -used by Android to determine version order and by F-Droid client to determine -which version should be recommended. - -This field is normally automatically updated - see Update Check Mode. - -This is converted to (@code{}) in the public index file. - -@node No Source Since -@section No Source Since - -@cindex No Source Since - -In case we are missing the source code for the Current Version reported by -Upstream, or that non-free elements have been introduced, this defines the -first version that began to miss source code. -Apps that are missing source code for just one or a few versions, but provide -source code for newer ones are not to be considered here - this field is -intended to illustrate which apps do not currently distribute source code, and -since when have they been doing so. - -@node Update Processing -@chapter Update Processing - -@section Detecting - -There are various mechanisms in place for automatically detecting that updates -are available for applications, with the @code{Update Check Mode} field in the -metadata determining which method is used for a particular application. - -Running the @code{fdroid checkupdates} command will apply this method to each -application in the repository and update the @code{Current Version} and -@code{Current Version Code} fields in the metadata accordingly. - -As usual, the @code{-p} option can be used with this, to restrict processing -to a particular application. - -Note that this only updates the metadata such that we know what the current -published/recommended version is. It doesn't make that version available in -the repository - for that, see the next section. - -@section Adding - -Adding updates (i.e. new versions of applications already included in the -repository) happens in two ways. The simple case is applications where the -APK files are binaries, retrieved from a developer's published build. In this -case, all that's required is to place the new binary in the @code{Repo} -directory, and the next run of @code{fdroid update} will pick it up. - -For applications built from source, it is necessary to add a new -@code{Build Version} line to the metadata file. At the very least, the version -name, version code and commit will be different. It is also possible that the -additional build flags will change between versions. - -For processing multiple updates in the metadata at once, it can be useful to -run @code{fdroid update --interactive}. This will check all the applications -in the repository, and where updates are required you will be prompted to -[E]dit the metadata, [I]gnore the update, or [Q]uit altogether. - -@node Build Server -@chapter Build Server - -The Build Server system isolates the builds for each package within a clean, -isolated and secure throwaway virtual machine environment. - -@section Overview - -Building applications in this manner on a large scale, especially with the -involvement of automated and/or unattended processes, could be considered -a dangerous pastime from a security perspective. This is even more the case -when the products of the build are also distributed widely and in a -semi-automated ("you have updates available") fashion. - -Assume that an upstream source repository is compromised. A small selection -of things that an attacker could do in such a situation: - -@enumerate -@item -Use custom ant build steps to execute virtually anything as the user doing -the build. -@item -Access the keystore. -@item -Modify the built apk files or source tarballs for other applications in the -repository. -@item -Modify the metadata (which includes build scripts, which again, also includes -the ability to execute anything) for other applications in the repository. -@end enumerate - -Through complete isolation, the repurcussions are at least limited to the -application in question. Not only is the build environment fresh for each -build, and thrown away afterwards, but it is also isolated from the signing -environment. - -Aside from security issues, there are some applications which have strange -requirements such as custom versions of the NDK. It would be impractical (or -at least extremely messy) to start modifying and restoring the SDK on a -multi-purpose system, but within the confines of a throwaway single-use -virtual machine, anything is possible. - -All this is in addition to the obvious advantage of having a standardised -and completely reproducible environment in which builds are made. Additionally, -it allows for specialised custom build environments for particular -applications. - -@section Setting up a build server - -In addition to the basic setup previously described, you will also need -a Vagrant-compatible Debian Testing base box called 'testing32' (or testing64 -for a 64-bit VM, if you want it to be much slower, and require more disk -space). - -You can use a different version or distro for the base box, so long as you -don't expect any help making it work. One thing to be aware of is that -working copies of source trees are moved from the host to the guest, so -for example, having subversion v1.6 on the host and v1.7 on the guest -would fail. - -Unless you're very trusting. you should create one of these for yourself -from verified standard Debian installation media. However, you could skip -over the next few paragraphs (and sacrifice some security) by downloading -@url{https://f-droid.org/testing32.box}. - -Documentation for creating a base box can be found at -@url{http://docs.vagrantup.com/v1/docs/base_boxes.html}. - -In addition to carefully following the steps described there, you should -consider the following: - -@enumerate -@item -It is advisable to disable udev network device persistence, otherwise any -movement of the VM between machines, or reconfiguration, will result in -broken networking. - -For a Debian/Ubuntu default install, just -@code{touch /etc/udev/rules.d/75-persistent-net-generator.rules} to turn -off rule generation, and at the same time, get rid of any rules it's -already created in @code{/etc/udev/rules.d/70-persistent-net.rules}. -@item -Unless you want the VM to become totally inaccessible following a failed -boot, you need to set @code{GRUB_RECORDFAIL_TIMEOUT} to a value other than --1 in @code{/etc/grub/default} and then run @code{update-grub}. -@end enumerate - - -With this base box available, you should then create @code{makebs.config.py}, -using @code{makebs.config.sample.py} as a reference - look at the settings and -documentation there to decide if any need changing to suit your environment. -There is a path for retrieving the base box if it doesn't exist, and an apt -proxy definition, both of which may need customising for your environment. -You can then go to the @code{fdroidserver} directory and run this: - -@example -./makebuildserver -@end example - -This will take a long time, and use a lot of bandwidth - most of it spent -installing the necessary parts of the Android SDK for all the various -platforms. Luckily you only need to do it occasionally. Once you have a -working build server image, if the recipes change (e.g. when packages need -to be added) you can just run that script again and the existing one will -be updated in place. - -The main sdk/ndk downloads will automatically be cached to speed things -up the next time, but there's no easy way of doing this for the longer -sections which use the SDK's @code{android} tool to install platforms, -add-ons and tools. However, instead of allowing automatic caching, you -can supply a pre-populated cache directory which includes not only these -downloads, but also .tar.gz files for all the relevant additions. If the -provisioning scripts detect these, they will be used in preference to -running the android tools. For example, if you have -@code{buildserver/addons/cache/platforms/android-19.tar.gz} that will be -used when installing the android-19 platform, instead of re-downloading it -using @code{android update sdk --no-ui -t android-19}. - -Once it's complete you'll have a new base box called 'buildserver' which is -what's used for the actual builds. You can then build packages as normal, -but with the addition of the @code{--server} flag to @code{fdroid build} to -instruct it to do all the hard work within the virtual machine. - -The first time a build is done, a new virtual machine is created using the -'buildserver' box as a base. A snapshot of this clean machine state is saved -for use in future builds, to improve performance. You can force discarding -of this snapshot and rebuilding from scratch using the @code{--resetserver} -switch with @code{fdroid build}. - -@node Signing -@chapter Signing - -There are two kinds of signing involved in running a repository - the signing -of the APK files generated from source builds, and the signing of the repo -index itself. The latter is optional, but very strongly recommended. - -@section Repo Index Signing - -When setting up the repository, one of the first steps should be to generate -a signing key for the repository index. This will also create a keystore, which -is a file that can be used to hold this and all other keys used. Consider the -location, security and backup status of this file carefully, then create it as -follows: - -@code{keytool -genkey -v -keystore my.keystore -alias repokey -keyalg RSA -keysize 2048 -validity 10000} - -In the above, replace 'my.keystore' with the name of the keystore file to be -created, and 'repokey' with a name to identify the repo index key by. - -You'll be asked for a password for the keystore, AND a password for the key. -They shouldn't be the same. In between, you'll be asked for some identifying -details which will go in the certificate. - -The two passwords entered go into @code{config.py}, as @code{keystorepass} and -@code{keypass} respectively. The path to the keystore file, and the alias you -chose for the key also go into that file, as @code{keystore} and -@code{repo_keyalias} respectively. - -@section Package Signing - -With the repo index signing configured, all that remains to be done for package -signing to work is to set the @code{keydname} field in @code{config.py} to -contain the same identifying details you entered before. - -A new key will be generated using these details, for each application that is -built. (If a specific key is required for a particular application, this system -can be overridden using the @code{keyaliases} config settings. - - -@node GNU Free Documentation License -@appendix GNU Free Documentation License - -@include fdl.texi - -@node Index -@unnumbered Index - -@printindex cp - -@bye diff --git a/docs/gendocs.sh b/docs/gendocs.sh deleted file mode 100755 index e4bfc9fd..00000000 --- a/docs/gendocs.sh +++ /dev/null @@ -1,466 +0,0 @@ -#!/bin/sh -e -# gendocs.sh -- generate a GNU manual in many formats. This script is -# mentioned in maintain.texi. See the help message below for usage details. - -scriptversion=2013-02-03.15 - -# Copyright 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013 -# Free Software Foundation, Inc. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# -# Original author: Mohit Agarwal. -# Send bug reports and any other correspondence to bug-texinfo@gnu.org. -# -# The latest version of this script, and the companion template, is -# available from Texinfo CVS: -# http://savannah.gnu.org/cgi-bin/viewcvs/texinfo/texinfo/util/gendocs.sh -# http://savannah.gnu.org/cgi-bin/viewcvs/texinfo/texinfo/util/gendocs_template -# -# An up-to-date copy is also maintained in Gnulib (gnu.org/software/gnulib). - -# TODO: -# - image importation was only implemented for HTML generated by -# makeinfo. But it should be simple enough to adjust. -# - images are not imported in the source tarball. All the needed -# formats (PDF, PNG, etc.) should be included. - -prog=`basename "$0"` -srcdir=`pwd` - -scripturl="http://savannah.gnu.org/cgi-bin/viewcvs/~checkout~/texinfo/texinfo/util/gendocs.sh" -templateurl="http://savannah.gnu.org/cgi-bin/viewcvs/~checkout~/texinfo/texinfo/util/gendocs_template" - -: ${SETLANG="env LANG= LC_MESSAGES= LC_ALL= LANGUAGE="} -: ${MAKEINFO="makeinfo"} -: ${TEXI2DVI="texi2dvi -t @finalout"} -: ${DOCBOOK2HTML="docbook2html"} -: ${DOCBOOK2PDF="docbook2pdf"} -: ${DOCBOOK2TXT="docbook2txt"} -: ${GENDOCS_TEMPLATE_DIR="."} -: ${PERL='perl'} -: ${TEXI2HTML="texi2html"} -unset CDPATH -unset use_texi2html - -version="gendocs.sh $scriptversion - -Copyright 2013 Free Software Foundation, Inc. -There is NO warranty. You may redistribute this software -under the terms of the GNU General Public License. -For more information about these matters, see the files named COPYING." - -usage="Usage: $prog [OPTION]... PACKAGE MANUAL-TITLE - -Generate output in various formats from PACKAGE.texinfo (or .texi or -.txi) source. See the GNU Maintainers document for a more extensive -discussion: - http://www.gnu.org/prep/maintain_toc.html - -Options: - --email ADR use ADR as contact in generated web pages; always give this. - - -s SRCFILE read Texinfo from SRCFILE, instead of PACKAGE.{texinfo|texi|txi} - -o OUTDIR write files into OUTDIR, instead of manual/. - -I DIR append DIR to the Texinfo search path. - --common ARG pass ARG in all invocations. - --html ARG pass ARG to makeinfo or texi2html for HTML targets. - --info ARG pass ARG to makeinfo for Info, instead of --no-split. - --no-ascii skip generating the plain text output. - --source ARG include ARG in tar archive of sources. - --split HOW make split HTML by node, section, chapter; default node. - - --texi2html use texi2html to make HTML target, with all split versions. - --docbook convert through DocBook too (xml, txt, html, pdf). - - --help display this help and exit successfully. - --version display version information and exit successfully. - -Simple example: $prog --email bug-gnu-emacs@gnu.org emacs \"GNU Emacs Manual\" - -Typical sequence: - cd PACKAGESOURCE/doc - wget \"$scripturl\" - wget \"$templateurl\" - $prog --email BUGLIST MANUAL \"GNU MANUAL - One-line description\" - -Output will be in a new subdirectory \"manual\" (by default; -use -o OUTDIR to override). Move all the new files into your web CVS -tree, as explained in the Web Pages node of maintain.texi. - -Please use the --email ADDRESS option so your own bug-reporting -address will be used in the generated HTML pages. - -MANUAL-TITLE is included as part of the HTML of the overall -manual/index.html file. It should include the name of the package being -documented. manual/index.html is created by substitution from the file -$GENDOCS_TEMPLATE_DIR/gendocs_template. (Feel free to modify the -generic template for your own purposes.) - -If you have several manuals, you'll need to run this script several -times with different MANUAL values, specifying a different output -directory with -o each time. Then write (by hand) an overall index.html -with links to them all. - -If a manual's Texinfo sources are spread across several directories, -first copy or symlink all Texinfo sources into a single directory. -(Part of the script's work is to make a tar.gz of the sources.) - -As implied above, by default monolithic Info files are generated. -If you want split Info, or other Info options, use --info to override. - -You can set the environment variables MAKEINFO, TEXI2DVI, TEXI2HTML, -and PERL to control the programs that get executed, and -GENDOCS_TEMPLATE_DIR to control where the gendocs_template file is -looked for. With --docbook, the environment variables DOCBOOK2HTML, -DOCBOOK2PDF, and DOCBOOK2TXT are also consulted. - -By default, makeinfo and texi2dvi are run in the default (English) -locale, since that's the language of most Texinfo manuals. If you -happen to have a non-English manual and non-English web site, see the -SETLANG setting in the source. - -Email bug reports or enhancement requests to bug-texinfo@gnu.org. -" - -MANUAL_TITLE= -PACKAGE= -EMAIL=webmasters@gnu.org # please override with --email -commonarg= # passed to all makeinfo/texi2html invcations. -dirargs= # passed to all tools (-I dir). -dirs= # -I's directories. -htmlarg= -infoarg=--no-split -generate_ascii=true -outdir=manual -source_extra= -split=node -srcfile= - -while test $# -gt 0; do - case $1 in - -s) shift; srcfile=$1;; - -o) shift; outdir=$1;; - -I) shift; dirargs="$dirargs -I '$1'"; dirs="$dirs $1";; - --common) shift; commonarg=$1;; - --docbook) docbook=yes;; - --email) shift; EMAIL=$1;; - --html) shift; htmlarg=$1;; - --info) shift; infoarg=$1;; - --no-ascii) generate_ascii=false;; - --source) shift; source_extra=$1;; - --split) shift; split=$1;; - --texi2html) use_texi2html=1;; - - --help) echo "$usage"; exit 0;; - --version) echo "$version"; exit 0;; - -*) - echo "$0: Unknown option \`$1'." >&2 - echo "$0: Try \`--help' for more information." >&2 - exit 1;; - *) - if test -z "$PACKAGE"; then - PACKAGE=$1 - elif test -z "$MANUAL_TITLE"; then - MANUAL_TITLE=$1 - else - echo "$0: extra non-option argument \`$1'." >&2 - exit 1 - fi;; - esac - shift -done - -# makeinfo uses the dirargs, but texi2dvi doesn't. -commonarg=" $dirargs $commonarg" - -# For most of the following, the base name is just $PACKAGE -base=$PACKAGE - -if test -n "$srcfile"; then - # but here, we use the basename of $srcfile - base=`basename "$srcfile"` - case $base in - *.txi|*.texi|*.texinfo) base=`echo "$base"|sed 's/\.[texinfo]*$//'`;; - esac - PACKAGE=$base -elif test -s "$srcdir/$PACKAGE.texinfo"; then - srcfile=$srcdir/$PACKAGE.texinfo -elif test -s "$srcdir/$PACKAGE.texi"; then - srcfile=$srcdir/$PACKAGE.texi -elif test -s "$srcdir/$PACKAGE.txi"; then - srcfile=$srcdir/$PACKAGE.txi -else - echo "$0: cannot find .texinfo or .texi or .txi for $PACKAGE in $srcdir." >&2 - exit 1 -fi - -if test ! -r $GENDOCS_TEMPLATE_DIR/gendocs_template; then - echo "$0: cannot read $GENDOCS_TEMPLATE_DIR/gendocs_template." >&2 - echo "$0: it is available from $templateurl." >&2 - exit 1 -fi - -# Function to return size of $1 in something resembling kilobytes. -calcsize() -{ - size=`ls -ksl $1 | awk '{print $1}'` - echo $size -} - -# copy_images OUTDIR HTML-FILE... -# ------------------------------- -# Copy all the images needed by the HTML-FILEs into OUTDIR. Look -# for them in the -I directories. -copy_images() -{ - local odir - odir=$1 - shift - $PERL -n -e " -BEGIN { - \$me = '$prog'; - \$odir = '$odir'; - @dirs = qw($dirs); -} -" -e ' -/<img src="(.*?)"/g && ++$need{$1}; - -END { - #print "$me: @{[keys %need]}\n"; # for debugging, show images found. - FILE: for my $f (keys %need) { - for my $d (@dirs) { - if (-f "$d/$f") { - use File::Basename; - my $dest = dirname ("$odir/$f"); - # - use File::Path; - -d $dest || mkpath ($dest) - || die "$me: cannot mkdir $dest: $!\n"; - # - use File::Copy; - copy ("$d/$f", $dest) - || die "$me: cannot copy $d/$f to $dest: $!\n"; - next FILE; - } - } - die "$me: $ARGV: cannot find image $f\n"; - } -} -' -- "$@" || exit 1 -} - -case $outdir in - /*) abs_outdir=$outdir;; - *) abs_outdir=$srcdir/$outdir;; -esac - -echo "Making output for $srcfile" -echo " in `pwd`" -mkdir -p "$outdir/" - -cmd="$SETLANG $MAKEINFO -o $PACKAGE.info $commonarg $infoarg \"$srcfile\"" -echo "Generating info... ($cmd)" -eval "$cmd" -tar czf "$outdir/$PACKAGE.info.tar.gz" $PACKAGE.info* -ls -l "$outdir/$PACKAGE.info.tar.gz" -info_tgz_size=`calcsize "$outdir/$PACKAGE.info.tar.gz"` -# do not mv the info files, there's no point in having them available -# separately on the web. - -cmd="$SETLANG $TEXI2DVI $dirargs \"$srcfile\"" -printf "\nGenerating dvi... ($cmd)\n" -eval "$cmd" -# compress/finish dvi: -gzip -f -9 $PACKAGE.dvi -dvi_gz_size=`calcsize $PACKAGE.dvi.gz` -mv $PACKAGE.dvi.gz "$outdir/" -ls -l "$outdir/$PACKAGE.dvi.gz" - -cmd="$SETLANG $TEXI2DVI --pdf $dirargs \"$srcfile\"" -printf "\nGenerating pdf... ($cmd)\n" -eval "$cmd" -pdf_size=`calcsize $PACKAGE.pdf` -mv $PACKAGE.pdf "$outdir/" -ls -l "$outdir/$PACKAGE.pdf" - -if $generate_ascii; then - opt="-o $PACKAGE.txt --no-split --no-headers $commonarg" - cmd="$SETLANG $MAKEINFO $opt \"$srcfile\"" - printf "\nGenerating ascii... ($cmd)\n" - eval "$cmd" - ascii_size=`calcsize $PACKAGE.txt` - gzip -f -9 -c $PACKAGE.txt >"$outdir/$PACKAGE.txt.gz" - ascii_gz_size=`calcsize "$outdir/$PACKAGE.txt.gz"` - mv $PACKAGE.txt "$outdir/" - ls -l "$outdir/$PACKAGE.txt" "$outdir/$PACKAGE.txt.gz" -fi - -html_split() -{ - opt="--split=$1 --node-files $commonarg $htmlarg" - cmd="$SETLANG $TEXI2HTML --output $PACKAGE.html $opt \"$srcfile\"" - printf "\nGenerating html by $1... ($cmd)\n" - eval "$cmd" - split_html_dir=$PACKAGE.html - ( - cd ${split_html_dir} || exit 1 - ln -sf ${PACKAGE}.html index.html - tar -czf "$abs_outdir/${PACKAGE}.html_$1.tar.gz" -- *.html - ) - eval html_$1_tgz_size=`calcsize "$outdir/${PACKAGE}.html_$1.tar.gz"` - rm -f "$outdir"/html_$1/*.html - mkdir -p "$outdir/html_$1/" - mv ${split_html_dir}/*.html "$outdir/html_$1/" - rmdir ${split_html_dir} -} - -if test -z "$use_texi2html"; then - opt="--no-split --html -o $PACKAGE.html $commonarg $htmlarg" - cmd="$SETLANG $MAKEINFO $opt \"$srcfile\"" - printf "\nGenerating monolithic html... ($cmd)\n" - rm -rf $PACKAGE.html # in case a directory is left over - eval "$cmd" - html_mono_size=`calcsize $PACKAGE.html` - gzip -f -9 -c $PACKAGE.html >"$outdir/$PACKAGE.html.gz" - html_mono_gz_size=`calcsize "$outdir/$PACKAGE.html.gz"` - copy_images "$outdir/" $PACKAGE.html - mv $PACKAGE.html "$outdir/" - ls -l "$outdir/$PACKAGE.html" "$outdir/$PACKAGE.html.gz" - - opt="--html -o $PACKAGE.html --split=$split $commonarg $htmlarg" - cmd="$SETLANG $MAKEINFO $opt \"$srcfile\"" - printf "\nGenerating html by $split... ($cmd)\n" - eval "$cmd" - split_html_dir=$PACKAGE.html - copy_images $split_html_dir/ $split_html_dir/*.html - ( - cd $split_html_dir || exit 1 - tar -czf "$abs_outdir/$PACKAGE.html_$split.tar.gz" -- * - ) - eval \ - html_${split}_tgz_size=`calcsize "$outdir/$PACKAGE.html_$split.tar.gz"` - rm -rf "$outdir/html_$split/" - mv $split_html_dir "$outdir/html_$split/" - du -s "$outdir/html_$split/" - ls -l "$outdir/$PACKAGE.html_$split.tar.gz" - -else # use texi2html: - opt="--output $PACKAGE.html $commonarg $htmlarg" - cmd="$SETLANG $TEXI2HTML $opt \"$srcfile\"" - printf "\nGenerating monolithic html with texi2html... ($cmd)\n" - rm -rf $PACKAGE.html # in case a directory is left over - eval "$cmd" - html_mono_size=`calcsize $PACKAGE.html` - gzip -f -9 -c $PACKAGE.html >"$outdir/$PACKAGE.html.gz" - html_mono_gz_size=`calcsize "$outdir/$PACKAGE.html.gz"` - mv $PACKAGE.html "$outdir/" - - html_split node - html_split chapter - html_split section -fi - -printf "\nMaking .tar.gz for sources...\n" -d=`dirname $srcfile` -( - cd "$d" - srcfiles=`ls -d *.texinfo *.texi *.txi *.eps $source_extra 2>/dev/null` || true - tar czfh "$abs_outdir/$PACKAGE.texi.tar.gz" $srcfiles - ls -l "$abs_outdir/$PACKAGE.texi.tar.gz" -) -texi_tgz_size=`calcsize "$outdir/$PACKAGE.texi.tar.gz"` - -if test -n "$docbook"; then - opt="-o - --docbook $commonarg" - cmd="$SETLANG $MAKEINFO $opt \"$srcfile\" >${srcdir}/$PACKAGE-db.xml" - printf "\nGenerating docbook XML... ($cmd)\n" - eval "$cmd" - docbook_xml_size=`calcsize $PACKAGE-db.xml` - gzip -f -9 -c $PACKAGE-db.xml >"$outdir/$PACKAGE-db.xml.gz" - docbook_xml_gz_size=`calcsize "$outdir/$PACKAGE-db.xml.gz"` - mv $PACKAGE-db.xml "$outdir/" - - split_html_db_dir=html_node_db - opt="$commonarg -o $split_html_db_dir" - cmd="$DOCBOOK2HTML $opt \"${outdir}/$PACKAGE-db.xml\"" - printf "\nGenerating docbook HTML... ($cmd)\n" - eval "$cmd" - ( - cd ${split_html_db_dir} || exit 1 - tar -czf "$abs_outdir/${PACKAGE}.html_node_db.tar.gz" -- *.html - ) - html_node_db_tgz_size=`calcsize "$outdir/${PACKAGE}.html_node_db.tar.gz"` - rm -f "$outdir"/html_node_db/*.html - mkdir -p "$outdir/html_node_db" - mv ${split_html_db_dir}/*.html "$outdir/html_node_db/" - rmdir ${split_html_db_dir} - - cmd="$DOCBOOK2TXT \"${outdir}/$PACKAGE-db.xml\"" - printf "\nGenerating docbook ASCII... ($cmd)\n" - eval "$cmd" - docbook_ascii_size=`calcsize $PACKAGE-db.txt` - mv $PACKAGE-db.txt "$outdir/" - - cmd="$DOCBOOK2PDF \"${outdir}/$PACKAGE-db.xml\"" - printf "\nGenerating docbook PDF... ($cmd)\n" - eval "$cmd" - docbook_pdf_size=`calcsize $PACKAGE-db.pdf` - mv $PACKAGE-db.pdf "$outdir/" -fi - -printf "\nMaking index file...\n" -if test -z "$use_texi2html"; then - CONDS="/%%IF *HTML_SECTION%%/,/%%ENDIF *HTML_SECTION%%/d;\ - /%%IF *HTML_CHAPTER%%/,/%%ENDIF *HTML_CHAPTER%%/d" -else - # should take account of --split here. - CONDS="/%%ENDIF.*%%/d;/%%IF *HTML_SECTION%%/d;/%%IF *HTML_CHAPTER%%/d" -fi - -curdate=`$SETLANG date '+%B %d, %Y'` -sed \ - -e "s!%%TITLE%%!$MANUAL_TITLE!g" \ - -e "s!%%EMAIL%%!$EMAIL!g" \ - -e "s!%%PACKAGE%%!$PACKAGE!g" \ - -e "s!%%DATE%%!$curdate!g" \ - -e "s!%%HTML_MONO_SIZE%%!$html_mono_size!g" \ - -e "s!%%HTML_MONO_GZ_SIZE%%!$html_mono_gz_size!g" \ - -e "s!%%HTML_NODE_TGZ_SIZE%%!$html_node_tgz_size!g" \ - -e "s!%%HTML_SECTION_TGZ_SIZE%%!$html_section_tgz_size!g" \ - -e "s!%%HTML_CHAPTER_TGZ_SIZE%%!$html_chapter_tgz_size!g" \ - -e "s!%%INFO_TGZ_SIZE%%!$info_tgz_size!g" \ - -e "s!%%DVI_GZ_SIZE%%!$dvi_gz_size!g" \ - -e "s!%%PDF_SIZE%%!$pdf_size!g" \ - -e "s!%%ASCII_SIZE%%!$ascii_size!g" \ - -e "s!%%ASCII_GZ_SIZE%%!$ascii_gz_size!g" \ - -e "s!%%TEXI_TGZ_SIZE%%!$texi_tgz_size!g" \ - -e "s!%%DOCBOOK_HTML_NODE_TGZ_SIZE%%!$html_node_db_tgz_size!g" \ - -e "s!%%DOCBOOK_ASCII_SIZE%%!$docbook_ascii_size!g" \ - -e "s!%%DOCBOOK_PDF_SIZE%%!$docbook_pdf_size!g" \ - -e "s!%%DOCBOOK_XML_SIZE%%!$docbook_xml_size!g" \ - -e "s!%%DOCBOOK_XML_GZ_SIZE%%!$docbook_xml_gz_size!g" \ - -e "s,%%SCRIPTURL%%,$scripturl,g" \ - -e "s!%%SCRIPTNAME%%!$prog!g" \ - -e "$CONDS" \ -$GENDOCS_TEMPLATE_DIR/gendocs_template >"$outdir/index.html" - -echo "Done, see $outdir/ subdirectory for new files." - -# Local variables: -# eval: (add-hook 'write-file-hooks 'time-stamp) -# time-stamp-start: "scriptversion=" -# time-stamp-format: "%:y-%02m-%02d.%02H" -# time-stamp-end: "$" -# End: diff --git a/docs/gendocs_template b/docs/gendocs_template deleted file mode 100644 index 63fbe539..00000000 --- a/docs/gendocs_template +++ /dev/null @@ -1,87 +0,0 @@ -<!--#include virtual="/server/header.html" --> -<title>%%TITLE%% - GNU Project - Free Software Foundation (FSF) - -

%%TITLE%%

- -
Free Software Foundation
-
last updated %%DATE%%
- -

This manual (%%PACKAGE%%) is available in the following formats:

- - - -

You can buy printed copies of -some manuals (among other items) from the Free Software Foundation; -this helps support FSF activities.

- -

(This page generated by the %%SCRIPTNAME%% -script.)

- - - - - - - - diff --git a/docs/index_versions.md b/docs/index_versions.md deleted file mode 100644 index af903b8b..00000000 --- a/docs/index_versions.md +++ /dev/null @@ -1,11 +0,0 @@ -### 11 (January 2014) - -* Support per-density icon folders (/icons-\*) - -### 10 (January 2014) - -* First version - -### 0 (?) - -* No version yet declared diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 00000000..6247f7e2 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 00000000..c20542de --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,78 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys + +sys.path.insert(0, os.path.abspath('../../fdroidserver')) + +# -- Project information ----------------------------------------------------- + +project = 'fdroidserver' +copyright = '2021, The F-Droid Project' +author = 'The F-Droid Project' + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'numpydoc', + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + "sphinx.ext.intersphinx", +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = [] + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = "pydata_sphinx_theme" + +html_theme_options = { + "gitlab_url": "https://gitlab.com/fdroid/fdroidserver", + "show_prev_next": False, + "navbar_end": ["search-field.html", "navbar-icon-links.html"], +} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +html_sidebars = { + "**": [], +} + +#html_sidebars = { +# '**': ['globaltoc.html', 'sourcelink.html', 'searchbox.html'], +# 'using/windows': ['windowssidebar.html', 'searchbox.html'], +#} + +html_split_index = True +#numpydoc_validation_checks = {"all"} + +intersphinx_mapping = { + "python": ("https://docs.python.org/3/", None), +} diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 00000000..fcd4dfe3 --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,20 @@ +.. fdroidserver documentation master file, created by + sphinx-quickstart on Mon May 3 10:06:52 2021. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to fdroidserver's documentation! +======================================== + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + +These pages contain the autogenerated module docu based on the current `sources `_. + +Indices and tables +================== + + +* Under :ref:`modindex` the different fdroidserver modules are listed. +* In :ref:`genindex` you'll find all methods sorted alphabetically. diff --git a/docs/update.sh b/docs/update.sh deleted file mode 100755 index 7ecf2966..00000000 --- a/docs/update.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash -set -e -./gendocs.sh --email admin@f-droid.org fdroid "F-Droid Server Manual" -scp -r manual/* fdroid@f-droid.org:public_html/manual/ -rm fdroid.cps fdroid.ky fdroid.vr fdroid.aux fdroid.fn fdroid.log fdroid.toc -rm fdroid.cp fdroid.info fdroid.pg fdroid.tp - diff --git a/examples/Vagrantfile.yaml b/examples/Vagrantfile.yaml new file mode 100644 index 00000000..276f0179 --- /dev/null +++ b/examples/Vagrantfile.yaml @@ -0,0 +1,54 @@ +--- + +# You may want to alter these before running ./makebuildserver + +# In the process of setting up the build server, many gigs of files +# are downloaded (Android SDK components, gradle, etc). These are +# cached so that they are not redownloaded each time. By default, +# these are stored in ~/.cache/fdroidserver +# +# cachedir: buildserver/cache + +# To specify which Debian mirror the build server VM should use, by +# default it uses http.debian.net, which auto-detects which is the +# best mirror to use. +# +# debian_mirror: https://debian.osuosl.org/debian/ + +# The amount of RAM the build server will have (default: 2048) +# memory: 3584 + +# The number of CPUs the build server will have +# cpus: 1 + +# Debian package proxy server - if you have one +# aptproxy: http://192.168.0.19:8000 + +# If this is running on an older machine or on a virtualized system, +# it can run a lot slower. If the provisioning fails with a warning +# about the timeout, extend the timeout here. (default: 600 seconds) +# +# boot_timeout: 1200 + +# By default, this whole process uses VirtualBox as the provider, but +# QEMU+KVM is also supported via the libvirt plugin to vagrant. If +# this is run within a KVM guest, then libvirt's QEMU+KVM will be used +# automatically. It can also be manually enabled by uncommenting +# below: +# +# vm_provider: libvirt + +# By default libvirt uses 'virtio' for both network and disk drivers. +# Some systems (eg. nesting VMware ESXi) do not support virtio. As a +# workaround for such rare cases, this setting allows to configure +# KVM/libvirt to emulate hardware rather than using virtio. +# +# libvirt_disk_bus: sata +# libvirt_nic_model_type: rtl8139 + +# Sometimes, it is not possible to use the 9p synced folder type with +# libvirt, like if running a KVM buildserver instance inside of a +# VMware ESXi guest. In that case, using NFS or another method is +# required. +# +# synced_folder_type: nfs diff --git a/examples/config.yml b/examples/config.yml new file mode 100644 index 00000000..ae4e7008 --- /dev/null +++ b/examples/config.yml @@ -0,0 +1,433 @@ +--- +# Copy this file to config.yml, then amend the settings below according to +# your system configuration. + +# Custom path to the Android SDK, defaults to $ANDROID_HOME +# sdk_path: $ANDROID_HOME + +# Paths to installed versions of the Android NDK. This will be +# automatically filled out from well known sources like +# $ANDROID_HOME/ndk-bundle and $ANDROID_HOME/ndk/*. If a required +# version is missing in the buildserver VM, it will be automatically +# downloaded and installed into the standard $ANDROID_HOME/ndk/ +# directory. Manually setting it here will override the auto-detected +# values. The keys can either be the "release" (e.g. r21e) or the +# "revision" (e.g. 21.4.7075529). +# +# ndk_paths: +# r10e: $ANDROID_HOME/android-ndk-r10e +# r17: "" +# 21.4.7075529: ~/Android/Ndk +# r22b: null + +# Directory to store downloaded tools in (i.e. gradle versions) +# By default, these are stored in ~/.cache/fdroidserver +# cachedir: cache + +# Specify paths to each major Java release that you want to support +# java_paths: +# 8: /usr/lib/jvm/java-8-openjdk + +# Command or path to binary for running Ant +# ant: ant + +# Command or path to binary for running maven 3 +# mvn3: mvn + +# Command or path to binary for running Gradle +# Defaults to using an internal gradle wrapper (gradlew-fdroid). +# gradle: gradle + +# Always scan the APKs produced by `fdroid build` for known non-free classes +# scan_binary: true + +# Set the maximum age (in days) of an index that a client should accept from +# this repo. Setting it to 0 or not setting it at all disables this +# functionality. If you do set this to a non-zero value, you need to ensure +# that your index is updated much more frequently than the specified interval. +# The same policy is applied to the archive repo, if there is one. +# repo_maxage: 0 + +# Canonical URL of the repositoy, needs to end in /repo. Is is used to identity +# the repo in the client, as well. +# repo_url: https://MyFirstFDroidRepo.org/fdroid/repo +# +# Base URL for per-package pages on the website of this repo, +# i.e. https://f-droid.org/packages// This should be accessible +# with a browser. Setting it to null or not setting this disables the +# feature. +# repo_web_base_url: https://MyFirstFDroidRepo.org/packages/ +# +# repo_name: My First F-Droid Repo Demo +# repo_description: >- +# This is a repository of apps to be used with F-Droid. Applications +# in this repository are either official binaries built by the +# original application developers, or are binaries built from source +# by the admin of f-droid.org using the tools on +# https://gitlab.com/fdroid. + +# As above, but for the archive repo. +# +# archive_url: https://f-droid.org/archive +# archive_web_base_url: +# archive_name: My First F-Droid Archive Demo +# archive_description: >- +# The repository of older versions of packages from the main demo repository. + +# archive_older sets the number of versions kept in the main repo, with all +# older ones going to the archive. Set it to 0, and there will be no archive +# repository, and no need to define the other archive_ values. +# +# archive_older: 3 + +# The repo's icon defaults to a file called 'icon.png' in the 'icons' +# folder for each section, e.g. repo/icons/icon.png and +# archive/icons/icon.png. To use a different filename for the icons, +# set the filename here. You must still copy it into place in +# repo/icons/ and/or archive/icons/. +# +# repo_icon: myicon.png +# archive_icon: myicon.png + +# This allows a specific kind of insecure APK to be included in the +# 'repo' section. Since April 2017, APK signatures that use MD5 are +# no longer considered valid, jarsigner and apksigner will return an +# error when verifying. `fdroid update` will move APKs with these +# disabled signatures to the archive. This option stops that +# behavior, and lets those APKs stay part of 'repo'. +# +# allow_disabled_algorithms: true + +# Normally, all apps are collected into a single app repository, like on +# https://f-droid.org. For certain situations, it is better to make a repo +# that is made up of APKs only from a single app. For example, an automated +# build server that publishes nightly builds. +# per_app_repos: true + +# `fdroid update` will create a link to the current version of a given app. +# This provides a static path to the current APK. To disable the creation of +# this link, uncomment this: +# make_current_version_link: false + +# By default, the "current version" link will be based on the "Name" of the +# app from the metadata. You can change it to use a different field from the +# metadata here: +# current_version_name_source: packageName + +# Optionally, override home directory for gpg +# gpghome: /home/fdroid/somewhere/else/.gnupg + +# The ID of a GPG key for making detached signatures for APKs. Optional. +# gpgkey: 1DBA2E89 + +# The key (from the keystore defined below) to be used for signing the +# repository itself. This is the same name you would give to keytool or +# jarsigner using -alias. (Not needed in an unsigned repository). +# repo_keyalias: fdroidrepo + +# Optionally, the public key for the key defined by repo_keyalias above can +# be specified here. There is no need to do this, as the public key can and +# will be retrieved from the keystore when needed. However, specifying it +# manually can allow some processing to take place without access to the +# keystore. +# repo_pubkey: ... + +# The keystore to use for release keys when building. This needs to be +# somewhere safe and secure, and backed up! The best way to manage these +# sensitive keys is to use a "smartcard" (aka Hardware Security Module). To +# configure F-Droid to use a smartcard, set the keystore file using the keyword +# "NONE" (i.e. keystore: "NONE"). That makes Java find the keystore on the +# smartcard based on 'smartcardoptions' below. +# keystore: ~/.local/share/fdroidserver/keystore.jks + +# You should not need to change these at all, unless you have a very +# customized setup for using smartcards in Java with keytool/jarsigner +# smartcardoptions: | +# -storetype PKCS11 -providerName SunPKCS11-OpenSC +# -providerClass sun.security.pkcs11.SunPKCS11 +# -providerArg opensc-fdroid.cfg + +# The password for the keystore (at least 6 characters). If this password is +# different than the keypass below, it can be OK to store the password in this +# file for real use. But in general, sensitive passwords should not be stored +# in text files! +# keystorepass: password1 + +# The password for keys - the same is used for each auto-generated key as well +# as for the repository key. You should not normally store this password in a +# file since it is a sensitive password. +# keypass: password2 + +# The distinguished name used for all keys. +# keydname: CN=Birdman, OU=Cell, O=Alcatraz, L=Alcatraz, S=California, C=US + +# Use this to override the auto-generated key aliases with specific ones +# for particular applications. Normally, just leave it empty. +# +# keyaliases: +# com.example.app: example +# +# You can also force an app to use the same key alias as another one, using +# the @ prefix. +# +# keyaliases: +# com.example.another.plugin: "@com.example.another" + + +# The full path to the root of the repository. It must be specified in +# rsync/ssh format for a remote host/path. This is used for syncing a locally +# generated repo to the server that is it hosted on. It must end in the +# standard public repo name of "/fdroid", but can be in up to three levels of +# sub-directories (i.e. /var/www/packagerepos/fdroid). You can include +# multiple servers to sync to by wrapping the whole thing in {} or [], and +# including the serverwebroot strings in a comma-separated list. +# +# serverwebroot: user@example:/var/www/fdroid +# serverwebroot: +# - foo.com:/usr/share/nginx/www/fdroid +# - bar.info:/var/www/fdroid +# +# There is a special mode to only deploy the index file: +# +# serverwebroot: +# - url: 'me@b.az:/srv/fdroid' +# index_only: true + + +# When running fdroid processes on a remote server, it is possible to +# publish extra information about the status. Each fdroid sub-command +# can create repo/status/running.json when it starts, then a +# repo/status/.json when it completes. The builds logs +# and other processes will also get published, if they are running in +# a buildserver VM. The build logs name scheme is: +# .../repo/$APPID_$VERCODE.log.gz. These files are also pushed to all +# servers configured in 'serverwebroot'. +# +# deploy_process_logs: true + +# The full URL to a git remote repository. You can include +# multiple servers to mirror to by adding strings to a YAML list or map. +# Servers listed here will also be automatically inserted in the mirrors list. +# +# servergitmirrors: https://github.com/user/repo +# servergitmirrors: +# - https://github.com/user/repo +# - https://gitlab.com/user/repo +# +# servergitmirrors: +# - url: https://github.com/user/repo +# - url: https://gitlab.com/user/repo +# index_only: true + + +# These settings allow using `fdroid deploy` for publishing APK files from +# your repository to GitHub Releases. (You should also run `fdroid update` +# every time before deploying to GitHub releases to update index files.) Here's +# an example for this deployment automation: +# https://github.com/f-droid/fdroidclient/releases/ +# +# Currently, versions which are assigned to a release channel (e.g. alpha or +# beta releases) are ignored. +# +# In the example below, tokens are read from environment variables. Putting +# tokens directly into the config file is also supported but discouraged. It is +# highly recommended to use a "Fine-grained personal access token", which is +# restricted to the minimum required permissions, which are: +# * Metadata - read +# * Contents - read/write +# (https://github.com/settings/personal-access-tokens/new) +# +# github_token: {env: GITHUB_TOKEN} +# github_releases: +# - projectUrl: https://github.com/f-droid/fdroidclient +# packageNames: +# - org.fdroid.basic +# - org.fdroid.fdroid +# release_notes_prepend: | +# Re-post of official F-Droid App release from https://f-droid.org +# - projectUrl: https://github.com/example/app +# packageNames: com.example.app +# token: {env: GITHUB_TOKEN_EXAMPLE} + + +# Most git hosting services have hard size limits for each git repo. +# `fdroid deploy` will delete the git history when the git mirror repo +# approaches this limit to ensure that the repo will still fit when +# pushed. GitHub recommends 1GB, gitlab.com recommends 10GB. +# +# git_mirror_size_limit: 10GB + +# Any mirrors of this repo, for example all of the servers declared in +# serverwebroot and all the servers declared in servergitmirrors, +# will automatically be used by the client. If one +# mirror is not working, then the client will try another. If the +# client has Tor enabled, then the client will prefer mirrors with +# .onion addresses. This base URL will be used for both the main repo +# and the archive, if it is enabled. So these URLs should end in the +# 'fdroid' base of the F-Droid part of the web server like serverwebroot. +# +# mirrors: +# - https://foo.bar/fdroid +# - http://foobarfoobarfoobar.onion/fdroid +# +# Or additional metadata can also be included by adding key/value pairs: +# +# mirrors: +# - url: https://foo.bar/fdroid +# countryCode: BA +# - url: http://foobarfoobarfoobar.onion/fdroid +# +# The list of mirrors can also be maintained in config/mirrors.yml, a +# standalone YAML file in the optional configuration directory. In +# that case, mirrors: should be removed from this file (config.yml). + + +# optionally specify which identity file to use when using rsync or git over SSH +# +# identity_file: ~/.ssh/fdroid_id_rsa + + +# If you are running the repo signing process on a completely offline machine, +# which provides the best security, then you can specify a folder to sync the +# repo to when running `fdroid deploy`. This is most likely going to +# be a USB thumb drive, SD Card, or some other kind of removable media. Make +# sure it is mounted before running `fdroid deploy`. Using the +# standard folder called 'fdroid' as the specified folder is recommended, like +# with serverwebroot. +# +# local_copy_dir: /media/MyUSBThumbDrive/fdroid + + +# If you are using local_copy_dir on an offline build/signing server, once the +# thumb drive has been plugged into the online machine, it will need to be +# synced to the copy on the online machine. To make that happen +# automatically, set sync_from_local_copy_dir to True: +# +# sync_from_local_copy_dir: true + +# To deploy to an AWS S3 "bucket" in the US East region, set the +# bucket name in the config, then set the environment variables +# AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY using the values from +# the AWS Management Console. See +# https://rclone.org/s3/#authentication +# +# awsbucket: myawsfdroidbucket + + +# For extended options for syncing to cloud drive and object store +# services, `fdroid deploy' wraps Rclone. Rclone is a full featured +# sync tool for a huge variety of cloud services. Set up your services +# using `rclone config`, then specify each config name to deploy the +# awsbucket: to. Using rclone_config: overrides the default AWS S3 US +# East setup, and will only sync to the services actually specified. +# +# awsbucket: myawsfdroidbucket +# rclone_config: +# - aws-sample-config +# - rclone-supported-service-config + + +# By default Rclone uses the user's default configuration file at +# ~/.config/rclone/rclone.conf To specify a custom configuration file, +# please add the full path to the configuration file as below. +# +# path_to_custom_rclone_config: /home/mycomputer/somedir/example.conf + + +# If you want to force 'fdroid server' to use a non-standard serverwebroot. +# This will allow you to have 'serverwebroot' entries which do not end in +# '/fdroid'. (Please note that some client features expect repository URLs +# to end in '/fdroid/repo'.) +# +# nonstandardwebroot: false + + +# If you want to upload the release APK file to androidobservatory.org +# +# androidobservatory: false + + +# If you want to upload the release APK file to virustotal.com +# You have to enter your profile apikey to enable the upload. +# +# virustotal_apikey: 9872987234982734 +# +# Or get it from an environment variable: +# +# virustotal_apikey: {env: virustotal_apikey} + + +# Keep a log of all generated index files in a git repo to provide a +# "binary transparency" log for anyone to check the history of the +# binaries that are published. This is in the form of a "git remote", +# which this machine where `fdroid update` is run has already been +# configured to allow push access (e.g. ssh key, username/password, etc) +# binary_transparency_remote: git@gitlab.com:fdroid/binary-transparency-log.git + +# Set this to true to always use a build server. This saves specifying the +# --server option on dedicated secure build server hosts. +# build_server_always: true + +# Limit in number of characters that fields can take up +# Only the fields listed here are supported, defaults shown +# char_limits: +# author: 256 +# name: 50 +# summary: 80 +# description: 4000 +# video: 256 +# whatsNew: 500 + +# It is possible for the server operator to specify lists of apps that +# must be installed or uninstalled on the client (aka "push installs). +# If the user has opted in, or the device is already setup to respond +# to these requests, then F-Droid will automatically install/uninstall +# the packageNames listed. This is protected by the same signing key +# as the app index metadata. +# +# install_list: +# - at.bitfire.davdroid +# - com.fsck.k9 +# - us.replicant +# +# uninstall_list: +# - com.facebook.orca +# - com.android.vending + +# `fdroid lint` checks licenses in metadata against a built white list. By +# default we will require license metadata to be present and only allow +# licenses approved either by FSF or OSI. We're using the standardized SPDX +# license IDs. (https://spdx.org/licenses/) +# +# We use `python3 -m spdx-license-list print --filter-fsf-or-osi` for +# generating our default list. (https://pypi.org/project/spdx-license-list) +# +# You can override our default list of allowed licenes by setting this option. +# Just supply a custom list of licene names you would like to allow. To disable +# checking licenses by the linter, assign an empty value to lint_licenses. +# +# lint_licenses: +# - Custom-License-A +# - Another-License + +# `fdroid scanner` can scan for signatures from various sources. By default +# it's configured to only use F-Droids official SUSS collection. We have +# support for these special collections: +# * 'exodus' - official exodus-privacy.org signatures +# * 'etip' - exodus privacy investigation platfrom community contributed +# signatures +# * 'suss' - official F-Droid: Suspicious or Unwanted Software Signatures +# You can also configure scanner to use custom collections of signatures here. +# They have to follow the format specified in the SUSS readme. +# (https://gitlab.com/fdroid/fdroid-suss/#cache-file-data-format) +# +# scanner_signature_sources: +# - suss +# - exodus +# - https://example.com/signatures.json + +# The scanner can use signature sources from the internet. These are +# cached locally. To force them to be refreshed from the network on +# every run, set this to true: +# +# refresh_scanner: true diff --git a/examples/fdroid_clean_repos.py b/examples/fdroid_clean_repos.py new file mode 100644 index 00000000..6b19cacc --- /dev/null +++ b/examples/fdroid_clean_repos.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python3 +# +# an fdroid plugin for resetting app VCSs to the latest version for the metadata + +import argparse +import logging + +from fdroidserver import _, common, metadata +from fdroidserver.exception import VCSException + +fdroid_summary = 'reset app VCSs to the latest version' + + +def main(): + parser = argparse.ArgumentParser( + usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]" + ) + common.setup_global_opts(parser) + parser.add_argument( + "appid", + nargs='*', + help=_("applicationId with optional versionCode in the form APPID[:VERCODE]"), + ) + metadata.add_metadata_arguments(parser) + options = common.parse_args(parser) + apps = common.read_app_args( + options.appid, allow_version_codes=True, sort_by_time=True + ) + common.read_config() + + for appid, app in apps.items(): + if "Builds" in app and len(app["Builds"]) > 0: + build = app.get('Builds')[-1] + logging.info(_("Cleaning up '{appid}' VCS").format(appid=appid)) + try: + vcs, build_dir = common.setup_vcs(app) + vcs.gotorevision(build.commit) + if build.submodules: + vcs.initsubmodules() + + except VCSException: + pass + + +if __name__ == "__main__": + main() diff --git a/examples/fdroid_export_keystore_to_nitrokey.py b/examples/fdroid_export_keystore_to_nitrokey.py new file mode 100644 index 00000000..6e920a78 --- /dev/null +++ b/examples/fdroid_export_keystore_to_nitrokey.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python3 +# +# an fdroid plugin for exporting a repo's keystore in standard PEM format + +import os +from argparse import ArgumentParser + +from fdroidserver import common +from fdroidserver.common import FDroidPopen +from fdroidserver.exception import BuildException + +fdroid_summary = "export the repo's keystore file to a NitroKey HSM" + + +def run(cmd, error): + envs = {'LC_ALL': 'C.UTF-8', + 'PIN': config['smartcard_pin'], + 'FDROID_KEY_STORE_PASS': config['keystorepass'], + 'FDROID_KEY_PASS': config['keypass']} + p = FDroidPopen(cmd, envs=envs) + if p.returncode != 0: + raise BuildException(error, p.output) + + +def main(): + global config + parser = ArgumentParser() + common.setup_global_opts(parser) + common.parse_args(parser) + config = common.read_config() + destkeystore = config['keystore'].replace('.jks', '.p12').replace('/', '_') + exportkeystore = config['keystore'].replace('.jks', '.pem').replace('/', '_') + if os.path.exists(destkeystore) or os.path.exists(exportkeystore): + raise BuildException('%s exists!' % exportkeystore) + run([config['keytool'], '-importkeystore', + '-srckeystore', config['keystore'], + '-srcalias', config['repo_keyalias'], + '-srcstorepass:env', 'FDROID_KEY_STORE_PASS', + '-srckeypass:env', 'FDROID_KEY_PASS', + '-destkeystore', destkeystore, + '-deststorepass:env', 'FDROID_KEY_STORE_PASS', + '-deststoretype', 'PKCS12'], + 'Failed to convert to PKCS12!') +# run(['openssl', 'pkcs12', '-in', destkeystore, +# '-passin', 'env:FDROID_KEY_STORE_PASS', '-nokeys', +# '-out', exportkeystore, +# '-passout', 'env:FDROID_KEY_STORE_PASS'], +# 'Failed to convert to PEM!') + run(['pkcs15-init', '--delete-objects', 'privkey,pubkey', + '--id', '3', '--store-private-key', destkeystore, + '--format', 'pkcs12', '--auth-id', '3', + '--verify-pin', '--pin', 'env:PIN'], + '') + run(['pkcs15-init', '--delete-objects', 'privkey,pubkey', + '--id', '2', '--store-private-key', destkeystore, + '--format', 'pkcs12', '--auth-id', '3', + '--verify-pin', '--pin', 'env:PIN'], + '') + + +if __name__ == "__main__": + main() diff --git a/examples/fdroid_exportkeystore.py b/examples/fdroid_exportkeystore.py new file mode 100644 index 00000000..f2a16980 --- /dev/null +++ b/examples/fdroid_exportkeystore.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python3 +# +# an fdroid plugin for exporting a repo's keystore in standard PEM format + +import os +from argparse import ArgumentParser + +from fdroidserver import common +from fdroidserver.common import FDroidPopen +from fdroidserver.exception import BuildException + +fdroid_summary = 'export the keystore in standard PEM format' + + +def main(): + parser = ArgumentParser() + common.setup_global_opts(parser) + common.parse_args(parser) + config = common.read_config() + env_vars = {'LC_ALL': 'C.UTF-8', + 'FDROID_KEY_STORE_PASS': config['keystorepass'], + 'FDROID_KEY_PASS': config['keypass']} + destkeystore = config['keystore'].replace('.jks', '.p12').replace('/', '_') + exportkeystore = config['keystore'].replace('.jks', '.pem').replace('/', '_') + if os.path.exists(destkeystore) or os.path.exists(exportkeystore): + raise BuildException('%s exists!' % exportkeystore) + p = FDroidPopen([config['keytool'], '-importkeystore', + '-srckeystore', config['keystore'], + '-srcalias', config['repo_keyalias'], + '-srcstorepass:env', 'FDROID_KEY_STORE_PASS', + '-srckeypass:env', 'FDROID_KEY_PASS', + '-destkeystore', destkeystore, + '-deststoretype', 'PKCS12', + '-deststorepass:env', 'FDROID_KEY_STORE_PASS', + '-destkeypass:env', 'FDROID_KEY_PASS'], + envs=env_vars) + if p.returncode != 0: + raise BuildException("Failed to convert to PKCS12!", p.output) + p = FDroidPopen(['openssl', 'pkcs12', '-in', destkeystore, + '-passin', 'env:FDROID_KEY_STORE_PASS', '-nokeys', + '-out', exportkeystore, + '-passout', 'env:FDROID_KEY_STORE_PASS'], + envs=env_vars) + if p.returncode != 0: + raise BuildException("Failed to convert to PEM!", p.output) + + +if __name__ == "__main__": + main() diff --git a/examples/fdroid_extract_repo_pubkey.py b/examples/fdroid_extract_repo_pubkey.py new file mode 100644 index 00000000..cb5a895c --- /dev/null +++ b/examples/fdroid_extract_repo_pubkey.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 +# +# an fdroid plugin print the repo_pubkey from a repo's keystore +# + +from argparse import ArgumentParser + +from fdroidserver import common, index + +fdroid_summary = 'export the keystore in standard PEM format' + + +def main(): + parser = ArgumentParser() + common.setup_global_opts(parser) + common.parse_args(parser) + common.read_config() + pubkey, repo_pubkey_fingerprint = index.extract_pubkey() + print('repo_pubkey = "%s"' % pubkey.decode()) + + +if __name__ == "__main__": + main() diff --git a/examples/fdroid_fetchsrclibs.py b/examples/fdroid_fetchsrclibs.py new file mode 100644 index 00000000..aba6f7fa --- /dev/null +++ b/examples/fdroid_fetchsrclibs.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python3 +# +# an fdroid plugin for setting up srclibs +# +# The 'fdroid build' gitlab-ci job uses --on-server, which does not +# set up the srclibs. This plugin does the missing setup. + +import argparse +import os +import pprint + +from fdroidserver import _, common, metadata + +fdroid_summary = 'prepare the srclibs for `fdroid build --on-server`' + + +def main(): + parser = argparse.ArgumentParser(usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]") + common.setup_global_opts(parser) + parser.add_argument("appid", nargs='*', help=_("applicationId with optional versionCode in the form APPID[:VERCODE]")) + metadata.add_metadata_arguments(parser) + options = common.parse_args(parser) + apps = common.read_app_args(options.appid, allow_version_codes=True, sort_by_time=True) + common.read_config() + srclib_dir = os.path.join('build', 'srclib') + os.makedirs(srclib_dir, exist_ok=True) + srclibpaths = [] + for appid, app in apps.items(): + vcs, _ignored = common.setup_vcs(app) + for build in app.get('Builds', []): + vcs.gotorevision(build.commit, refresh=False) + if build.submodules: + vcs.initsubmodules() + else: + vcs.deinitsubmodules() + for lib in build.srclibs: + srclibpaths.append(common.getsrclib(lib, srclib_dir, prepare=False, build=build)) + print('Set up srclibs:') + pprint.pprint(srclibpaths) + + +if __name__ == "__main__": + main() diff --git a/examples/fdroid_nitrokeyimport.py b/examples/fdroid_nitrokeyimport.py new file mode 100644 index 00000000..d17a6186 --- /dev/null +++ b/examples/fdroid_nitrokeyimport.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python3 + +from argparse import ArgumentParser + +from fdroidserver import common +from fdroidserver.common import FDroidPopen +from fdroidserver.exception import BuildException + +fdroid_summary = 'import the local keystore into a SmartCard HSM' + + +def main(): + parser = ArgumentParser() + common.setup_global_opts(parser) + common.parse_args(parser) + config = common.read_config() + env_vars = { + 'LC_ALL': 'C.UTF-8', + 'FDROID_KEY_STORE_PASS': config['keystorepass'], + 'FDROID_KEY_PASS': config['keypass'], + 'SMARTCARD_PIN': str(config['smartcard_pin']), + } + p = FDroidPopen([config['keytool'], '-importkeystore', + '-srcalias', config['repo_keyalias'], + '-srckeystore', config['keystore'], + '-srcstorepass:env', 'FDROID_KEY_STORE_PASS', + '-srckeypass:env', 'FDROID_KEY_PASS', + '-destalias', config['repo_keyalias'], + '-destkeystore', 'NONE', + '-deststoretype', 'PKCS11', + '-providerName', 'SunPKCS11-OpenSC', + '-providerClass', 'sun.security.pkcs11.SunPKCS11', + '-providerArg', 'opensc-fdroid.cfg', + '-deststorepass:env', 'SMARTCARD_PIN', + '-J-Djava.security.debug=sunpkcs11'], + envs=env_vars) + if p.returncode != 0: + raise BuildException("Failed to import into HSM!", p.output) + + +if __name__ == "__main__": + main() diff --git a/examples/opensc-fdroid.cfg b/examples/opensc-fdroid.cfg new file mode 100644 index 00000000..bf3ef2fd --- /dev/null +++ b/examples/opensc-fdroid.cfg @@ -0,0 +1,4 @@ +name = OpenSC +description = SunPKCS11 w/ OpenSC Smart card Framework +library = /usr/lib/opensc-pkcs11.so +slotListIndex = 1 diff --git a/examples/public-read-only-s3-bucket-policy.json b/examples/public-read-only-s3-bucket-policy.json new file mode 100644 index 00000000..9316bbe0 --- /dev/null +++ b/examples/public-read-only-s3-bucket-policy.json @@ -0,0 +1,11 @@ +{ + "Version":"2012-10-17", + "Statement":[ + {"Sid":"AddPerm", + "Effect":"Allow", + "Principal":"*", + "Action":"s3:GetObject", + "Resource":"arn:aws:s3:::examplebucket/fdroid/*" + } + ] +} diff --git a/examples/template.yml b/examples/template.yml new file mode 100644 index 00000000..c9e565f6 --- /dev/null +++ b/examples/template.yml @@ -0,0 +1,21 @@ +AuthorName: . +WebSite: '' +Bitcoin: null +Litecoin: null +Donate: null + +License: Unknown +Categories: + - Internet + +IssueTracker: '' +SourceCode: '' +Changelog: '' + +Name: . +Summary: . +Description: | + . + +ArchivePolicy: 2 versions +RequiresRoot: false diff --git a/fd-commit b/fd-commit deleted file mode 100755 index e64486a1..00000000 --- a/fd-commit +++ /dev/null @@ -1,89 +0,0 @@ -#!/bin/bash -# -# fd-commit - part of the FDroid server tools -# Commits updates to apps, allowing you to edit the commit messages -# -# Copyright (C) 2013 Daniel Martí -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -commands=() - -if [ ! -d metadata ]; then - [ -d ../metadata ] && cd .. || { echo "No metadata files found!"; exit 2; } -fi - -while read line; do - if [[ "$line" == *M*metadata/*.txt ]]; then - file=${line##* } - - id=${file##*/} - id=${id%.txt*} - if [ $# -gt 0 ]; then - found=false - for arg in "$@"; do - if [ "$id" == "$arg" ]; then - found=true - break - fi - done - $found || continue - fi - - [ -d metadata/$id ] && extra=metadata/$id || extra= - - name= autoname= - while read l; do - if [[ "$l" == "Auto Name:"* ]]; then - autoname=${l##*:} - elif [[ "$l" == "Name:"* ]]; then - name=${l##*:} - fi - done < "$file" - - if [ -n "$name" ]; then - fullname="$name ($id)" - elif [ -n "$autoname" ]; then - fullname="$autoname ($id)" - else - fullname="$id" - fi - - newbuild=false - while read l; do - if [[ "$l" == "+Build:"* ]]; then - newbuild=true - build=${l#*:} - version=${build%%,*} - build=${build#*,} - vercode=${build%%,*} - fi - done < <(git diff HEAD -- "$file") - - if $newbuild ; then - message="Update $fullname to $version ($vercode)" - else - message="$fullname:" - fi - - message=${message//\"/\\\"} - commands+=("git add -- $file $extra && git commit -m \"$message\" -e -v") - fi -done < <(git status --porcelain) - -git reset >/dev/null -for cmd in "${commands[@]}"; do - eval "$cmd" - git reset >/dev/null -done diff --git a/fdroid b/fdroid index 02396313..314d2467 100755 --- a/fdroid +++ b/fdroid @@ -1,9 +1,7 @@ -#!/usr/bin/env python2 -# -*- coding: utf-8 -*- +#!/usr/bin/env python3 # # fdroid.py - part of the FDroid server tools -# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com -# Copyright (C) 2013 Daniel Martí +# Copyright (C) 2020 Michael Pöhn # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -18,50 +16,7 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import sys -commands = [ - "build", - "init", - "install", - "update", - "publish", - "verify", - "checkupdates", - "import", - "rewritemeta", - "lint", - "scanner", - "stats", - "server"] - -def print_help(): - print "Valid commands are:" - for command in commands: - print " " + command - print "Use '%s --help' for more info about that command."%sys.argv[0] - -def main(): - - if len(sys.argv) <= 1: - print_help() - sys.exit(0) - - command = sys.argv[1] - if not command in commands: - if command not in ('-h', '--help'): - print "Command '" + command + "' not recognised.\n" - print_help() - sys.exit(1) - - # Trick optparse into displaying the right usage when --help is used. - sys.argv[0] += ' ' + command - - del sys.argv[1] - mod = __import__('fdroidserver.' + command, None, None, [command]) - mod.main() - sys.exit(0) - -if __name__ == "__main__": - main() +import fdroidserver.__main__ +fdroidserver.__main__.main() diff --git a/fdroid-icon.png b/fdroid-icon.png deleted file mode 100644 index 0c0d4173..00000000 Binary files a/fdroid-icon.png and /dev/null differ diff --git a/fdroidserver/__init__.py b/fdroidserver/__init__.py index e69de29b..fdf64421 100644 --- a/fdroidserver/__init__.py +++ b/fdroidserver/__init__.py @@ -0,0 +1,78 @@ +import gettext +import glob +import os +import sys + +# support running straight from git and standard installs +rootpaths = [ + os.path.realpath(os.path.join(os.path.dirname(__file__), '..')), + os.path.realpath( + os.path.join(os.path.dirname(__file__), '..', '..', '..', '..', 'share') + ), + os.path.join(sys.prefix, 'share'), +] + +localedir = None +for rootpath in rootpaths: + found_mo = glob.glob( + os.path.join(rootpath, 'locale', '*', 'LC_MESSAGES', 'fdroidserver.mo') + ) + if len(found_mo) > 0: + localedir = os.path.join(rootpath, 'locale') + break + +gettext.bindtextdomain('fdroidserver', localedir) +gettext.textdomain('fdroidserver') +_ = gettext.gettext + + +from fdroidserver.exception import ( + FDroidException, + MetaDataException, + VerificationException, # NOQA: E402 +) + +FDroidException # NOQA: B101 +MetaDataException # NOQA: B101 +VerificationException # NOQA: B101 + +from fdroidserver.common import genkeystore as generate_keystore # NOQA: E402 +from fdroidserver.common import verify_apk_signature + +verify_apk_signature # NOQA: B101 +generate_keystore # NOQA: B101 +from fdroidserver.index import ( + download_repo_index, + download_repo_index_v1, + download_repo_index_v2, + get_mirror_service_urls, +) +from fdroidserver.index import make as make_index # NOQA: E402 + +download_repo_index # NOQA: B101 +download_repo_index_v1 # NOQA: B101 +download_repo_index_v2 # NOQA: B101 +get_mirror_service_urls # NOQA: B101 +make_index # NOQA: B101 +from fdroidserver.update import ( + process_apk, + process_apks, + scan_apk, + scan_repo_files, # NOQA: E402 +) + +process_apk # NOQA: B101 +process_apks # NOQA: B101 +scan_apk # NOQA: B101 +scan_repo_files # NOQA: B101 +from fdroidserver.deploy import ( + update_awsbucket, + update_servergitmirrors, + update_serverwebroot, # NOQA: E402 + update_serverwebroots, +) + +update_awsbucket # NOQA: B101 +update_servergitmirrors # NOQA: B101 +update_serverwebroots # NOQA: B101 +update_serverwebroot # NOQA: B101 diff --git a/fdroidserver/__main__.py b/fdroidserver/__main__.py new file mode 100755 index 00000000..71c39b2c --- /dev/null +++ b/fdroidserver/__main__.py @@ -0,0 +1,227 @@ +#!/usr/bin/env python3 +# +# fdroidserver/__main__.py - part of the FDroid server tools +# Copyright (C) 2020 Michael Pöhn +# Copyright (C) 2010-2015, Ciaran Gultnieks, ciaran@ciarang.com +# Copyright (C) 2013-2014 Daniel Marti +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import importlib.metadata +import logging +import os +import pkgutil +import re +import sys +from argparse import ArgumentError +from collections import OrderedDict + +import git + +import fdroidserver.common +import fdroidserver.metadata +from fdroidserver import _ + +COMMANDS = OrderedDict([ + ("build", _("Build a package from source")), + ("init", _("Quickly start a new repository")), + ("publish", _("Sign and place packages in the repo")), + ("gpgsign", _("Add PGP signatures using GnuPG for packages in repo")), + ("update", _("Update repo information for new packages")), + ("deploy", _("Interact with the repo HTTP server")), + ("verify", _("Verify the integrity of downloaded packages")), + ("checkupdates", _("Check for updates to applications")), + ("import", _("Extract application metadata from a source repository")), + ("install", _("Install built packages on devices")), + ("readmeta", _("Read all the metadata files and exit")), + ("rewritemeta", _("Rewrite all the metadata files")), + ("lint", _("Warn about possible metadata errors")), + ("scanner", _("Scan the source code of a package")), + ("signindex", _("Sign indexes created using update --nosign")), + ("btlog", _("Update the binary transparency log for a URL")), + ("signatures", _("Extract signatures from APKs")), + ("nightly", _("Set up an app build for a nightly build repo")), + ("mirror", _("Download complete mirrors of small repos")), +]) + + +def print_help(available_plugins=None): + print(_("usage: ") + _("fdroid [] [-h|--help|--version|]")) + print("") + print(_("Valid commands are:")) + for cmd, summary in COMMANDS.items(): + print(" " + cmd + ' ' * (15 - len(cmd)) + summary) + if available_plugins: + print(_('commands from plugin modules:')) + for command in sorted(available_plugins.keys()): + print(' {:15}{}'.format(command, available_plugins[command]['summary'])) + print("") + + +def preparse_plugin(module_name, module_dir): + """No summary. + + Simple regex based parsing for plugin scripts. + + So we don't have to import them when we just need the summary, + but not plan on executing this particular plugin. + """ + if '.' in module_name: + raise ValueError("No '.' allowed in fdroid plugin modules: '{}'" + .format(module_name)) + path = os.path.join(module_dir, module_name + '.py') + if not os.path.isfile(path): + path = os.path.join(module_dir, module_name, '__main__.py') + if not os.path.isfile(path): + raise ValueError("unable to find main plugin script " + "for module '{n}' ('{d}')" + .format(n=module_name, + d=module_dir)) + summary = None + main = None + with open(path, 'r', encoding='utf-8') as f: + re_main = re.compile(r'^(\s*def\s+main\s*\(.*\)\s*:' + r'|\s*main\s*=\s*lambda\s*:.+)$') + re_summary = re.compile(r'^\s*fdroid_summary\s*=\s["\'](?P.+)["\']$') + for line in f: + m_summary = re_summary.match(line) + if m_summary: + summary = m_summary.group('text') + if re_main.match(line): + main = True + + if summary is None: + raise NameError("could not find 'fdroid_summary' in: '{}' plugin" + .format(module_name)) + if main is None: + raise NameError("could not find 'main' function in: '{}' plugin" + .format(module_name)) + return {'name': module_name, 'summary': summary} + + +def find_plugins(): + found_plugins = [{'name': x[1], 'dir': x[0].path} for x in pkgutil.iter_modules() if x[1].startswith('fdroid_')] + plugin_infos = {} + for plugin_def in found_plugins: + command_name = plugin_def['name'][7:] + try: + plugin_infos[command_name] = preparse_plugin(plugin_def['name'], + plugin_def['dir']) + except Exception as e: + # We need to keep module lookup fault tolerant because buggy + # modules must not prevent fdroidserver from functioning + if len(sys.argv) > 1 and sys.argv[1] == command_name: + # only raise exeption when a user specifies the broken + # plugin in explicitly in command line + raise e + return plugin_infos + + +def main(): + available_plugins = find_plugins() + + if len(sys.argv) <= 1: + print_help(available_plugins=available_plugins) + sys.exit(0) + + command = sys.argv[1] + if command not in COMMANDS and command not in available_plugins: + if command in ('-h', '--help'): + print_help(available_plugins=available_plugins) + sys.exit(0) + elif command == 'server': + print(_("""ERROR: The "server" subcommand has been removed, use "deploy"!""")) + sys.exit(1) + elif command == '--version': + try: + print(importlib.metadata.version("fdroidserver")) + sys.exit(0) + except importlib.metadata.PackageNotFoundError: + pass + try: + print( + git.repo.Repo( + os.path.dirname(os.path.dirname(__file__)) + ).git.describe(always=True, tags=True) + ) + sys.exit(0) + except git.exc.InvalidGitRepositoryError: + print(_('No version information could be found.')) + sys.exit(1) + else: + print(_("Command '%s' not recognised.\n" % command)) + print_help(available_plugins=available_plugins) + sys.exit(1) + + verbose = any(s in sys.argv for s in ['-v', '--verbose']) + quiet = any(s in sys.argv for s in ['-q', '--quiet']) + + # Helpful to differentiate warnings from errors even when on quiet + logformat = '%(asctime)s %(levelname)s: %(message)s' + loglevel = logging.INFO + if verbose: + loglevel = logging.DEBUG + elif quiet: + loglevel = logging.WARN + + logging.basicConfig(format=logformat, level=loglevel) + + if verbose and quiet: + logging.critical(_("Conflicting arguments: '--verbose' and '--quiet' " + "can not be specified at the same time.")) + sys.exit(1) + + # Trick argparse into displaying the right usage when --help is used. + sys.argv[0] += ' ' + command + + del sys.argv[1] + if command in COMMANDS.keys(): + # import is named import_subcommand internally b/c import is reserved by Python + command = 'import_subcommand' if command == 'import' else command + mod = __import__('fdroidserver.' + command, None, None, [command]) + else: + mod = __import__(available_plugins[command]['name'], None, None, [command]) + + system_encoding = sys.getdefaultencoding() + if system_encoding is None or system_encoding.lower() not in ('utf-8', 'utf8'): + logging.warning(_("Encoding is set to '{enc}' fdroid might run " + "into encoding issues. Please set it to 'UTF-8' " + "for best results.".format(enc=system_encoding))) + + try: + mod.main() + # These are ours, contain a proper message and are "expected" + except (fdroidserver.common.FDroidException, + fdroidserver.metadata.MetaDataException) as e: + if verbose: + raise + else: + logging.critical(str(e)) + sys.exit(1) + except ArgumentError as e: + logging.critical(str(e)) + sys.exit(1) + except KeyboardInterrupt: + print('') + fdroidserver.common.force_exit(1) + # These should only be unexpected crashes due to bugs in the code + # str(e) often doesn't contain a reason, so just show the backtrace + except Exception as e: + logging.critical(_("Unknown exception found!")) + raise e + sys.exit(0) + + +if __name__ == "__main__": + main() diff --git a/fdroidserver/_yaml.py b/fdroidserver/_yaml.py new file mode 100644 index 00000000..260f67c0 --- /dev/null +++ b/fdroidserver/_yaml.py @@ -0,0 +1,64 @@ +# Copyright (C) 2025, Hans-Christoph Steiner +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +"""Standard YAML parsing and dumping. + +YAML 1.2 is the preferred format for all data files. When loading +F-Droid formats like config.yml and .yml, YAML 1.2 is +forced, and older YAML constructs should be considered an error. + +It is OK to load and dump files in other YAML versions if they are +externally defined formats, like FUNDING.yml. In those cases, these +common instances might not be appropriate to use. + +There is a separate instance for dumping based on the "round trip" aka +"rt" mode. The "rt" mode maintains order while the "safe" mode sorts +the output. Also, yaml.version is not forced in the dumper because that +makes it write out a "%YAML 1.2" header. F-Droid's formats are +explicitly defined as YAML 1.2 and meant to be human-editable. So that +header gets in the way. + +""" + +import ruamel.yaml + +yaml = ruamel.yaml.YAML(typ='safe') +yaml.version = (1, 2) + +yaml_dumper = ruamel.yaml.YAML(typ='rt') + + +def config_dump(config, fp=None): + """Dump config data in YAML 1.2 format without headers. + + This outputs YAML in a string that is suitable for use in regexps + and string replacements, as well as complete files. It is therefore + explicitly set up to avoid writing out headers and footers. + + This is modeled after PyYAML's yaml.dump(), which can dump to a file + or return a string. + + https://yaml.dev/doc/ruamel.yaml/example/#Output_of_%60dump()%60_as_a_string + + """ + dumper = ruamel.yaml.YAML(typ='rt') + dumper.default_flow_style = False + dumper.explicit_start = False + dumper.explicit_end = False + if fp is None: + with ruamel.yaml.compat.StringIO() as fp: + dumper.dump(config, fp) + return fp.getvalue() + dumper.dump(config, fp) diff --git a/fdroidserver/apksigcopier.py b/fdroidserver/apksigcopier.py new file mode 100644 index 00000000..f36de2eb --- /dev/null +++ b/fdroidserver/apksigcopier.py @@ -0,0 +1,1019 @@ +#!/usr/bin/python3 +# encoding: utf-8 +# SPDX-FileCopyrightText: 2023 FC Stegerman +# SPDX-License-Identifier: GPL-3.0-or-later + +# -- ; {{{1 +# +# File : apksigcopier +# Maintainer : FC Stegerman +# Date : 2023-02-08 +# +# Copyright : Copyright (C) 2023 FC Stegerman +# Version : v1.1.1 +# License : GPLv3+ +# +# -- ; }}}1 + +""" +Copy/extract/patch android apk signatures & compare apks. + +apksigcopier is a tool for copying android APK signatures from a signed APK to +an unsigned one (in order to verify reproducible builds). + +It can also be used to compare two APKs with different signatures; this requires +apksigner. + + +CLI +=== + +$ apksigcopier extract [OPTIONS] SIGNED_APK OUTPUT_DIR +$ apksigcopier patch [OPTIONS] METADATA_DIR UNSIGNED_APK OUTPUT_APK +$ apksigcopier copy [OPTIONS] SIGNED_APK UNSIGNED_APK OUTPUT_APK +$ apksigcopier compare [OPTIONS] FIRST_APK SECOND_APK + +The following environment variables can be set to 1, yes, or true to +override the default behaviour: + +* set APKSIGCOPIER_EXCLUDE_ALL_META=1 to exclude all metadata files +* set APKSIGCOPIER_COPY_EXTRA_BYTES=1 to copy extra bytes after data (e.g. a v2 sig) +* set APKSIGCOPIER_SKIP_REALIGNMENT=1 to skip realignment of ZIP entries + + +API +=== + +>> from apksigcopier import do_extract, do_patch, do_copy, do_compare +>> do_extract(signed_apk, output_dir, v1_only=NO) +>> do_patch(metadata_dir, unsigned_apk, output_apk, v1_only=NO) +>> do_copy(signed_apk, unsigned_apk, output_apk, v1_only=NO) +>> do_compare(first_apk, second_apk, unsigned=False) + +You can use False, None, and True instead of NO, AUTO, and YES respectively. + +The following global variables (which default to False), can be set to +override the default behaviour: + +* set exclude_all_meta=True to exclude all metadata files +* set copy_extra_bytes=True to copy extra bytes after data (e.g. a v2 sig) +* set skip_realignment=True to skip realignment of ZIP entries +""" + +import glob +import json +import os +import re +import struct +import sys +import zipfile +import zlib +from collections import namedtuple +from typing import ( + Any, + BinaryIO, + Callable, + Dict, + Iterable, + Iterator, + Optional, + Tuple, + Union, +) + +__version__ = "1.1.1" +NAME = "apksigcopier" + +if sys.version_info >= (3, 8): + from typing import Literal + NoAutoYes = Literal["no", "auto", "yes"] +else: + NoAutoYes = str + +DateTime = Tuple[int, int, int, int, int, int] +NoAutoYesBoolNone = Union[NoAutoYes, bool, None] +ZipInfoDataPairs = Iterable[Tuple[zipfile.ZipInfo, bytes]] + +SIGBLOCK, SIGOFFSET = "APKSigningBlock", "APKSigningBlockOffset" +NOAUTOYES: Tuple[NoAutoYes, NoAutoYes, NoAutoYes] = ("no", "auto", "yes") +NO, AUTO, YES = NOAUTOYES +APK_META = re.compile(r"^META-INF/([0-9A-Za-z_-]+\.(SF|RSA|DSA|EC)|MANIFEST\.MF)$") +META_EXT: Tuple[str, ...] = ("SF", "RSA|DSA|EC", "MF") +COPY_EXCLUDE: Tuple[str, ...] = ("META-INF/MANIFEST.MF",) +DATETIMEZERO: DateTime = (1980, 0, 0, 0, 0, 0) + +################################################################################ +# +# NB: these values are all from apksigner (the first element of each tuple, same +# as APKZipInfo) or signflinger/zipflinger, except for external_attr w/ 0664 +# permissions and flag_bits 0x08, added for completeness. +# +# NB: zipflinger changed from 0666 to 0644 in commit 895ba5fba6ab84617dd67e38f456a8f96aa37ff0 +# +# https://android.googlesource.com/platform/tools/apksig +# src/main/java/com/android/apksig/internal/zip/{CentralDirectoryRecord,LocalFileRecord,ZipUtils}.java +# https://android.googlesource.com/platform/tools/base +# signflinger/src/com/android/signflinger/SignedApk.java +# zipflinger/src/com/android/zipflinger/{CentralDirectoryRecord,LocalFileHeader,Source}.java +# +################################################################################ + +VALID_ZIP_META = dict( + compresslevel=(9, 1), # best compression, best speed + create_system=(0, 3), # fat, unx + create_version=(20, 0), # 2.0, 0.0 + external_attr=(0, # N/A + 0o100644 << 16, # regular file rw-r--r-- + 0o100664 << 16, # regular file rw-rw-r-- + 0o100666 << 16), # regular file rw-rw-rw- + extract_version=(20, 0), # 2.0, 0.0 + flag_bits=(0x800, 0, 0x08, 0x808), # 0x800 = utf8, 0x08 = data_descriptor +) + +ZipData = namedtuple("ZipData", ("cd_offset", "eocd_offset", "cd_and_eocd")) + +exclude_all_meta = False # exclude all metadata files in copy_apk() +copy_extra_bytes = False # copy extra bytes after data in copy_apk() +skip_realignment = False # skip realignment of ZIP entries in copy_apk() + + +class APKSigCopierError(Exception): + """Base class for errors.""" + + +class APKSigningBlockError(APKSigCopierError): + """Something wrong with the APK Signing Block.""" + + +class NoAPKSigningBlock(APKSigningBlockError): + """APK Signing Block Missing.""" + + +class ZipError(APKSigCopierError): + """Something wrong with ZIP file.""" + + +# FIXME: is there a better alternative? +class ReproducibleZipInfo(zipfile.ZipInfo): + """Reproducible ZipInfo hack.""" + + _override: Dict[str, Any] = {} + + def __init__(self, zinfo: zipfile.ZipInfo, **override: Any) -> None: + # pylint: disable=W0231 + if override: + self._override = {**self._override, **override} + for k in self.__slots__: + if hasattr(zinfo, k): + setattr(self, k, getattr(zinfo, k)) + + def __getattribute__(self, name: str) -> Any: + if name != "_override": + try: + return self._override[name] + except KeyError: + pass + return object.__getattribute__(self, name) + + +# See VALID_ZIP_META +class APKZipInfo(ReproducibleZipInfo): + """Reproducible ZipInfo for APK files.""" + + COMPRESSLEVEL = 9 + + _override = dict( + compress_type=8, + create_system=0, + create_version=20, + date_time=DATETIMEZERO, + external_attr=0, + extract_version=20, + flag_bits=0x800, + ) + + +def noautoyes(value: NoAutoYesBoolNone) -> NoAutoYes: + """ + Turn False into NO, None into AUTO, and True into YES. + + >>> from apksigcopier import noautoyes, NO, AUTO, YES + >>> noautoyes(False) == NO == noautoyes(NO) + True + >>> noautoyes(None) == AUTO == noautoyes(AUTO) + True + >>> noautoyes(True) == YES == noautoyes(YES) + True + + """ + if isinstance(value, str): + if value not in NOAUTOYES: + raise ValueError("expected NO, AUTO, or YES") + return value + try: + return {False: NO, None: AUTO, True: YES}[value] + except KeyError: + raise ValueError("expected False, None, or True") # pylint: disable=W0707 + + +def is_meta(filename: str) -> bool: + """ + Check whether filename is a JAR metadata file. + + Returns whether filename is a v1 (JAR) signature file (.SF), signature block + file (.RSA, .DSA, or .EC), or manifest (MANIFEST.MF). + + See https://docs.oracle.com/javase/tutorial/deployment/jar/intro.html + + >>> from apksigcopier import is_meta + >>> is_meta("classes.dex") + False + >>> is_meta("META-INF/CERT.SF") + True + >>> is_meta("META-INF/CERT.RSA") + True + >>> is_meta("META-INF/MANIFEST.MF") + True + >>> is_meta("META-INF/OOPS") + False + + """ + return APK_META.fullmatch(filename) is not None + + +def exclude_from_copying(filename: str) -> bool: + """ + Check whether to exclude a file during copy_apk(). + + Excludes filenames in COPY_EXCLUDE (i.e. MANIFEST.MF) by default; when + exclude_all_meta is set to True instead, excludes all metadata files as + matched by is_meta(). + + Directories are always excluded. + + >>> import apksigcopier + >>> from apksigcopier import exclude_from_copying + >>> exclude_from_copying("classes.dex") + False + >>> exclude_from_copying("foo/") + True + >>> exclude_from_copying("META-INF/") + True + >>> exclude_from_copying("META-INF/MANIFEST.MF") + True + >>> exclude_from_copying("META-INF/CERT.SF") + False + >>> exclude_from_copying("META-INF/OOPS") + False + + >>> apksigcopier.exclude_all_meta = True + >>> exclude_from_copying("classes.dex") + False + >>> exclude_from_copying("META-INF/") + True + >>> exclude_from_copying("META-INF/MANIFEST.MF") + True + >>> exclude_from_copying("META-INF/CERT.SF") + True + >>> exclude_from_copying("META-INF/OOPS") + False + + """ + return exclude_meta(filename) if exclude_all_meta else exclude_default(filename) + + +def exclude_default(filename: str) -> bool: + """ + Like exclude_from_copying(). + + Excludes directories and filenames in COPY_EXCLUDE (i.e. MANIFEST.MF). + """ + return is_directory(filename) or filename in COPY_EXCLUDE + + +def exclude_meta(filename: str) -> bool: + """Like exclude_from_copying(); excludes directories and all metadata files.""" + return is_directory(filename) or is_meta(filename) + + +def is_directory(filename: str) -> bool: + """ZIP entries with filenames that end with a '/' are directories.""" + return filename.endswith("/") + + +################################################################################ +# +# There is usually a 132-byte virtual entry at the start of an APK signed with a +# v1 signature by signflinger/zipflinger; almost certainly this is a default +# manifest ZIP entry created at initialisation, deleted (from the CD but not +# from the file) during v1 signing, and eventually replaced by a virtual entry. +# +# >>> (30 + len("META-INF/MANIFEST.MF") + +# ... len("Manifest-Version: 1.0\r\n" +# ... "Created-By: Android Gradle 7.1.3\r\n" +# ... "Built-By: Signflinger\r\n\r\n")) +# 132 +# +# NB: they could be a different size, depending on Created-By and Built-By. +# +# FIXME: could virtual entries occur elsewhere as well? +# +# https://android.googlesource.com/platform/tools/base +# signflinger/src/com/android/signflinger/SignedApk.java +# zipflinger/src/com/android/zipflinger/{LocalFileHeader,ZipArchive}.java +# +################################################################################ + +def zipflinger_virtual_entry(size: int) -> bytes: + """Create zipflinger virtual entry.""" + if size < 30: + raise ValueError("Minimum size for virtual entries is 30 bytes") + return ( + # header extract_version flag_bits + b"\x50\x4b\x03\x04" b"\x00\x00" b"\x00\x00" + # compress_type (1981,1,1,1,1,2) crc32 + b"\x00\x00" b"\x21\x08\x21\x02" b"\x00\x00\x00\x00" + # compress_size file_size filename length + b"\x00\x00\x00\x00" b"\x00\x00\x00\x00" b"\x00\x00" + ) + int.to_bytes(size - 30, 2, "little") + b"\x00" * (size - 30) + + +def detect_zfe(apkfile: str) -> Optional[int]: + """ + Detect zipflinger virtual entry. + + Returns the size of the virtual entry if found, None otherwise. + + Raises ZipError if the size is less than 30 or greater than 4096, or the + data isn't all zeroes. + """ + with open(apkfile, "rb") as fh: + zfe_start = zipflinger_virtual_entry(30)[:28] # w/o len(extra) + if fh.read(28) == zfe_start: + zfe_size = 30 + int.from_bytes(fh.read(2), "little") + if not (30 <= zfe_size <= 4096): + raise ZipError("Unsupported virtual entry size") + if not fh.read(zfe_size - 30) == b"\x00" * (zfe_size - 30): + raise ZipError("Unsupported virtual entry data") + return zfe_size + return None + + +################################################################################ +# +# https://en.wikipedia.org/wiki/ZIP_(file_format) +# https://source.android.com/docs/security/features/apksigning/v2#apk-signing-block-format +# +# ================================= +# | Contents of ZIP entries | +# ================================= +# | APK Signing Block | +# | ----------------------------- | +# | | size (w/o this) uint64 LE | | +# | | ... | | +# | | size (again) uint64 LE | | +# | | "APK Sig Block 42" (16B) | | +# | ----------------------------- | +# ================================= +# | ZIP Central Directory | +# ================================= +# | ZIP End of Central Directory | +# | ----------------------------- | +# | | 0x06054b50 ( 4B) | | +# | | ... (12B) | | +# | | CD Offset ( 4B) | | +# | | ... | | +# | ----------------------------- | +# ================================= +# +################################################################################ + + +# FIXME: makes certain assumptions and doesn't handle all valid ZIP files! +# FIXME: support zip64? +# FIXME: handle utf8 filenames w/o utf8 flag (as produced by zipflinger)? +# https://android.googlesource.com/platform/tools/apksig +# src/main/java/com/android/apksig/ApkSigner.java +def copy_apk(unsigned_apk: str, output_apk: str, *, + copy_extra: Optional[bool] = None, + exclude: Optional[Callable[[str], bool]] = None, + realign: Optional[bool] = None, + zfe_size: Optional[int] = None) -> DateTime: + """ + Copy APK like apksigner would, excluding files matched by exclude_from_copying(). + + Adds a zipflinger virtual entry of zfe_size bytes if one is not already + present and zfe_size is not None. + + Returns max date_time. + + The following global variables (which default to False), can be set to + override the default behaviour: + + * set exclude_all_meta=True to exclude all metadata files + * set copy_extra_bytes=True to copy extra bytes after data (e.g. a v2 sig) + * set skip_realignment=True to skip realignment of ZIP entries + + The default behaviour can also be changed using the keyword-only arguments + exclude, copy_extra, and realign; these take precedence over the global + variables when not None. NB: exclude is a callable, not a bool; realign is + the inverse of skip_realignment. + + >>> import apksigcopier, os, zipfile + >>> apk = "test/apks/apks/golden-aligned-in.apk" + >>> with zipfile.ZipFile(apk, "r") as zf: + ... infos_in = zf.infolist() + >>> with tempfile.TemporaryDirectory() as tmpdir: + ... out = os.path.join(tmpdir, "out.apk") + ... apksigcopier.copy_apk(apk, out) + ... with zipfile.ZipFile(out, "r") as zf: + ... infos_out = zf.infolist() + (2017, 5, 15, 11, 28, 40) + >>> for i in infos_in: + ... print(i.filename) + META-INF/ + META-INF/MANIFEST.MF + AndroidManifest.xml + classes.dex + temp.txt + lib/armeabi/fake.so + resources.arsc + temp2.txt + >>> for i in infos_out: + ... print(i.filename) + AndroidManifest.xml + classes.dex + temp.txt + lib/armeabi/fake.so + resources.arsc + temp2.txt + >>> infos_in[2] + + >>> infos_out[0] + + >>> repr(infos_in[2:]) == repr(infos_out) + True + + """ + if copy_extra is None: + copy_extra = copy_extra_bytes + if exclude is None: + exclude = exclude_from_copying + if realign is None: + realign = not skip_realignment + with zipfile.ZipFile(unsigned_apk, "r") as zf: + infos = zf.infolist() + zdata = zip_data(unsigned_apk) + offsets = {} + with open(unsigned_apk, "rb") as fhi, open(output_apk, "w+b") as fho: + if zfe_size: + zfe = zipflinger_virtual_entry(zfe_size) + if fhi.read(zfe_size) != zfe: + fho.write(zfe) + fhi.seek(0) + for info in sorted(infos, key=lambda info: info.header_offset): + off_i = fhi.tell() + if info.header_offset > off_i: + # copy extra bytes + fho.write(fhi.read(info.header_offset - off_i)) + hdr = fhi.read(30) + if hdr[:4] != b"\x50\x4b\x03\x04": + raise ZipError("Expected local file header signature") + n, m = struct.unpack(" bytes: + align = 4096 if info.filename.endswith(".so") else 4 + old_off = 30 + n + m + info.header_offset + new_off = 30 + n + m + off_o + old_xtr = hdr[30 + n:30 + n + m] + new_xtr = b"" + while len(old_xtr) >= 4: + hdr_id, size = struct.unpack(" len(old_xtr) - 4: + break + if not (hdr_id == 0 and size == 0): + if hdr_id == 0xd935: + if size >= 2: + align = int.from_bytes(old_xtr[4:6], "little") + else: + new_xtr += old_xtr[:size + 4] + old_xtr = old_xtr[size + 4:] + if old_off % align == 0 and new_off % align != 0: + if pad_like_apksigner: + pad = (align - (new_off - m + len(new_xtr) + 6) % align) % align + xtr = new_xtr + struct.pack(" None: + while size > 0: + data = fhi.read(min(size, blocksize)) + if not data: + break + size -= len(data) + fho.write(data) + if size != 0: + raise ZipError("Unexpected EOF") + + +def extract_meta(signed_apk: str) -> Iterator[Tuple[zipfile.ZipInfo, bytes]]: + """ + Extract v1 signature metadata files from signed APK. + + Yields (ZipInfo, data) pairs. + + >>> from apksigcopier import extract_meta + >>> apk = "test/apks/apks/golden-aligned-v1v2v3-out.apk" + >>> meta = tuple(extract_meta(apk)) + >>> [ x.filename for x, _ in meta ] + ['META-INF/RSA-2048.SF', 'META-INF/RSA-2048.RSA', 'META-INF/MANIFEST.MF'] + >>> for line in meta[0][1].splitlines()[:4]: + ... print(line.decode()) + Signature-Version: 1.0 + Created-By: 1.0 (Android) + SHA-256-Digest-Manifest: hz7AxDJU9Namxoou/kc4Z2GVRS9anCGI+M52tbCsXT0= + X-Android-APK-Signed: 2, 3 + >>> for line in meta[2][1].splitlines()[:2]: + ... print(line.decode()) + Manifest-Version: 1.0 + Created-By: 1.8.0_45-internal (Oracle Corporation) + + """ + with zipfile.ZipFile(signed_apk, "r") as zf_sig: + for info in zf_sig.infolist(): + if is_meta(info.filename): + yield info, zf_sig.read(info.filename) + + +def extract_differences(signed_apk: str, extracted_meta: ZipInfoDataPairs) \ + -> Optional[Dict[str, Any]]: + """ + Extract ZIP metadata differences from signed APK. + + >>> import apksigcopier as asc, pprint + >>> apk = "test/apks/apks/debuggable-boolean.apk" + >>> meta = tuple(asc.extract_meta(apk)) + >>> [ x.filename for x, _ in meta ] + ['META-INF/CERT.SF', 'META-INF/CERT.RSA', 'META-INF/MANIFEST.MF'] + >>> diff = asc.extract_differences(apk, meta) + >>> pprint.pprint(diff) + {'files': {'META-INF/CERT.RSA': {'flag_bits': 2056}, + 'META-INF/CERT.SF': {'flag_bits': 2056}, + 'META-INF/MANIFEST.MF': {'flag_bits': 2056}}} + + >>> meta[2][0].extract_version = 42 + >>> try: + ... asc.extract_differences(apk, meta) + ... except asc.ZipError as e: + ... print(e) + Unsupported extract_version + + >>> asc.validate_differences(diff) is None + True + >>> diff["files"]["META-INF/OOPS"] = {} + >>> asc.validate_differences(diff) + ".files key 'META-INF/OOPS' is not a metadata file" + >>> del diff["files"]["META-INF/OOPS"] + >>> diff["files"]["META-INF/CERT.RSA"]["compresslevel"] = 42 + >>> asc.validate_differences(diff) + ".files['META-INF/CERT.RSA'].compresslevel has an unexpected value" + >>> diff["oops"] = 42 + >>> asc.validate_differences(diff) + 'contains unknown key(s)' + + """ + differences: Dict[str, Any] = {} + files = {} + for info, data in extracted_meta: + diffs = {} + for k in VALID_ZIP_META: + if k != "compresslevel": + v = getattr(info, k) + if v != APKZipInfo._override[k]: + if v not in VALID_ZIP_META[k]: + raise ZipError(f"Unsupported {k}") + diffs[k] = v + level = _get_compresslevel(signed_apk, info, data) + if level != APKZipInfo.COMPRESSLEVEL: + diffs["compresslevel"] = level + if diffs: + files[info.filename] = diffs + if files: + differences["files"] = files + zfe_size = detect_zfe(signed_apk) + if zfe_size: + differences["zipflinger_virtual_entry"] = zfe_size + return differences or None + + +def validate_differences(differences: Dict[str, Any]) -> Optional[str]: + """ + Validate differences dict. + + Returns None if valid, error otherwise. + """ + if set(differences) - {"files", "zipflinger_virtual_entry"}: + return "contains unknown key(s)" + if "zipflinger_virtual_entry" in differences: + if type(differences["zipflinger_virtual_entry"]) is not int: + return ".zipflinger_virtual_entry is not an int" + if not (30 <= differences["zipflinger_virtual_entry"] <= 4096): + return ".zipflinger_virtual_entry is < 30 or > 4096" + if "files" in differences: + if not isinstance(differences["files"], dict): + return ".files is not a dict" + for name, info in differences["files"].items(): + if not is_meta(name): + return f".files key {name!r} is not a metadata file" + if not isinstance(info, dict): + return f".files[{name!r}] is not a dict" + if set(info) - set(VALID_ZIP_META): + return f".files[{name!r}] contains unknown key(s)" + for k, v in info.items(): + if v not in VALID_ZIP_META[k]: + return f".files[{name!r}].{k} has an unexpected value" + return None + + +def _get_compresslevel(apkfile: str, info: zipfile.ZipInfo, data: bytes) -> int: + if info.compress_type != 8: + raise ZipError("Unsupported compress_type") + crc = _get_compressed_crc(apkfile, info) + for level in VALID_ZIP_META["compresslevel"]: + comp = zlib.compressobj(level, 8, -15) + if zlib.crc32(comp.compress(data) + comp.flush()) == crc: + return level + raise ZipError("Unsupported compresslevel") + + +def _get_compressed_crc(apkfile: str, info: zipfile.ZipInfo) -> int: + with open(apkfile, "rb") as fh: + fh.seek(info.header_offset) + hdr = fh.read(30) + if hdr[:4] != b"\x50\x4b\x03\x04": + raise ZipError("Expected local file header signature") + n, m = struct.unpack(" None: + """ + Add v1 signature metadata to APK (removes v2 sig block, if any). + + >>> import apksigcopier as asc + >>> unsigned_apk = "test/apks/apks/golden-aligned-in.apk" + >>> signed_apk = "test/apks/apks/golden-aligned-v1v2v3-out.apk" + >>> meta = tuple(asc.extract_meta(signed_apk)) + >>> [ x.filename for x, _ in meta ] + ['META-INF/RSA-2048.SF', 'META-INF/RSA-2048.RSA', 'META-INF/MANIFEST.MF'] + >>> with zipfile.ZipFile(unsigned_apk, "r") as zf: + ... infos_in = zf.infolist() + >>> with tempfile.TemporaryDirectory() as tmpdir: + ... out = os.path.join(tmpdir, "out.apk") + ... asc.copy_apk(unsigned_apk, out) + ... asc.patch_meta(meta, out) + ... with zipfile.ZipFile(out, "r") as zf: + ... infos_out = zf.infolist() + (2017, 5, 15, 11, 28, 40) + >>> for i in infos_in: + ... print(i.filename) + META-INF/ + META-INF/MANIFEST.MF + AndroidManifest.xml + classes.dex + temp.txt + lib/armeabi/fake.so + resources.arsc + temp2.txt + >>> for i in infos_out: + ... print(i.filename) + AndroidManifest.xml + classes.dex + temp.txt + lib/armeabi/fake.so + resources.arsc + temp2.txt + META-INF/RSA-2048.SF + META-INF/RSA-2048.RSA + META-INF/MANIFEST.MF + + """ + with zipfile.ZipFile(output_apk, "r") as zf_out: + for info in zf_out.infolist(): + if is_meta(info.filename): + raise ZipError("Unexpected metadata") + with zipfile.ZipFile(output_apk, "a") as zf_out: + for info, data in extracted_meta: + if differences and "files" in differences: + more = differences["files"].get(info.filename, {}).copy() + else: + more = {} + level = more.pop("compresslevel", APKZipInfo.COMPRESSLEVEL) + zinfo = APKZipInfo(info, date_time=date_time, **more) + zf_out.writestr(zinfo, data, compresslevel=level) + + +def extract_v2_sig(apkfile: str, expected: bool = True) -> Optional[Tuple[int, bytes]]: + """ + Extract APK Signing Block and offset from APK. + + When successful, returns (sb_offset, sig_block); otherwise raises + NoAPKSigningBlock when expected is True, else returns None. + + >>> import apksigcopier as asc + >>> apk = "test/apks/apks/golden-aligned-v1v2v3-out.apk" + >>> sb_offset, sig_block = asc.extract_v2_sig(apk) + >>> sb_offset + 8192 + >>> len(sig_block) + 4096 + + >>> apk = "test/apks/apks/golden-aligned-in.apk" + >>> try: + ... asc.extract_v2_sig(apk) + ... except asc.NoAPKSigningBlock as e: + ... print(e) + No APK Signing Block + + """ + cd_offset = zip_data(apkfile).cd_offset + with open(apkfile, "rb") as fh: + fh.seek(cd_offset - 16) + if fh.read(16) != b"APK Sig Block 42": + if expected: + raise NoAPKSigningBlock("No APK Signing Block") + return None + fh.seek(-24, os.SEEK_CUR) + sb_size2 = int.from_bytes(fh.read(8), "little") + fh.seek(-sb_size2 + 8, os.SEEK_CUR) + sb_size1 = int.from_bytes(fh.read(8), "little") + if sb_size1 != sb_size2: + raise APKSigningBlockError("APK Signing Block sizes not equal") + fh.seek(-8, os.SEEK_CUR) + sb_offset = fh.tell() + sig_block = fh.read(sb_size2 + 8) + return sb_offset, sig_block + + +# FIXME: OSError for APKs < 1024 bytes [wontfix] +def zip_data(apkfile: str, count: int = 1024) -> ZipData: + """ + Extract central directory, EOCD, and offsets from ZIP. + + Returns ZipData. + + >>> import apksigcopier + >>> apk = "test/apks/apks/golden-aligned-v1v2v3-out.apk" + >>> data = apksigcopier.zip_data(apk) + >>> data.cd_offset, data.eocd_offset + (12288, 12843) + >>> len(data.cd_and_eocd) + 577 + + """ + with open(apkfile, "rb") as fh: + fh.seek(-count, os.SEEK_END) + data = fh.read() + pos = data.rfind(b"\x50\x4b\x05\x06") + if pos == -1: + raise ZipError("Expected end of central directory record (EOCD)") + fh.seek(pos - len(data), os.SEEK_CUR) + eocd_offset = fh.tell() + fh.seek(16, os.SEEK_CUR) + cd_offset = int.from_bytes(fh.read(4), "little") + fh.seek(cd_offset) + cd_and_eocd = fh.read() + return ZipData(cd_offset, eocd_offset, cd_and_eocd) + + +# FIXME: can we determine signed_sb_offset? +def patch_v2_sig(extracted_v2_sig: Tuple[int, bytes], output_apk: str) -> None: + """ + Implant extracted v2/v3 signature into APK. + + >>> import apksigcopier as asc + >>> unsigned_apk = "test/apks/apks/golden-aligned-in.apk" + >>> signed_apk = "test/apks/apks/golden-aligned-v1v2v3-out.apk" + >>> meta = tuple(asc.extract_meta(signed_apk)) + >>> v2_sig = asc.extract_v2_sig(signed_apk) + >>> with tempfile.TemporaryDirectory() as tmpdir: + ... out = os.path.join(tmpdir, "out.apk") + ... date_time = asc.copy_apk(unsigned_apk, out) + ... asc.patch_meta(meta, out, date_time=date_time) + ... asc.extract_v2_sig(out, expected=False) is None + ... asc.patch_v2_sig(v2_sig, out) + ... asc.extract_v2_sig(out) == v2_sig + ... with open(signed_apk, "rb") as a, open(out, "rb") as b: + ... a.read() == b.read() + True + True + True + + """ + signed_sb_offset, signed_sb = extracted_v2_sig + data_out = zip_data(output_apk) + if signed_sb_offset < data_out.cd_offset: + raise APKSigningBlockError("APK Signing Block offset < central directory offset") + padding = b"\x00" * (signed_sb_offset - data_out.cd_offset) + offset = len(signed_sb) + len(padding) + with open(output_apk, "r+b") as fh: + fh.seek(data_out.cd_offset) + fh.write(padding) + fh.write(signed_sb) + fh.write(data_out.cd_and_eocd) + fh.seek(data_out.eocd_offset + offset + 16) + fh.write(int.to_bytes(data_out.cd_offset + offset, 4, "little")) + + +def patch_apk(extracted_meta: ZipInfoDataPairs, extracted_v2_sig: Optional[Tuple[int, bytes]], + unsigned_apk: str, output_apk: str, *, + differences: Optional[Dict[str, Any]] = None, + exclude: Optional[Callable[[str], bool]] = None) -> None: + """Patch extracted_meta + extracted_v2_sig (if not None) onto unsigned_apk and save as output_apk.""" + if differences and "zipflinger_virtual_entry" in differences: + zfe_size = differences["zipflinger_virtual_entry"] + else: + zfe_size = None + date_time = copy_apk(unsigned_apk, output_apk, exclude=exclude, zfe_size=zfe_size) + patch_meta(extracted_meta, output_apk, date_time=date_time, differences=differences) + if extracted_v2_sig is not None: + patch_v2_sig(extracted_v2_sig, output_apk) + + +# FIXME: support multiple signers? +def do_extract(signed_apk: str, output_dir: str, v1_only: NoAutoYesBoolNone = NO, + *, ignore_differences: bool = False) -> None: + """ + Extract signatures from signed_apk and save in output_dir. + + The v1_only parameter controls whether the absence of a v1 signature is + considered an error or not: + * use v1_only=NO (or v1_only=False) to only accept (v1+)v2/v3 signatures; + * use v1_only=AUTO (or v1_only=None) to automatically detect v2/v3 signatures; + * use v1_only=YES (or v1_only=True) to ignore any v2/v3 signatures. + """ + v1_only = noautoyes(v1_only) + extracted_meta = tuple(extract_meta(signed_apk)) + if len(extracted_meta) not in (len(META_EXT), 0): + raise APKSigCopierError("Unexpected or missing metadata files in signed_apk") + for info, data in extracted_meta: + name = os.path.basename(info.filename) + with open(os.path.join(output_dir, name), "wb") as fh: + fh.write(data) + if v1_only == YES: + if not extracted_meta: + raise APKSigCopierError("Expected v1 signature") + return + expected = v1_only == NO + extracted_v2_sig = extract_v2_sig(signed_apk, expected=expected) + if extracted_v2_sig is None: + if not extracted_meta: + raise APKSigCopierError("Expected v1 and/or v2/v3 signature, found neither") + return + signed_sb_offset, signed_sb = extracted_v2_sig + with open(os.path.join(output_dir, SIGOFFSET), "w") as fh: + fh.write(str(signed_sb_offset) + "\n") + with open(os.path.join(output_dir, SIGBLOCK), "wb") as fh: + fh.write(signed_sb) + if not ignore_differences: + differences = extract_differences(signed_apk, extracted_meta) + if differences: + with open(os.path.join(output_dir, "differences.json"), "w") as fh: + json.dump(differences, fh, sort_keys=True, indent=2) + fh.write("\n") + + +# FIXME: support multiple signers? +def do_patch(metadata_dir: str, unsigned_apk: str, output_apk: str, + v1_only: NoAutoYesBoolNone = NO, *, + exclude: Optional[Callable[[str], bool]] = None, + ignore_differences: bool = False) -> None: + """ + Patch signatures from metadata_dir onto unsigned_apk and save as output_apk. + + The v1_only parameter controls whether the absence of a v1 signature is + considered an error or not: + * use v1_only=NO (or v1_only=False) to only accept (v1+)v2/v3 signatures; + * use v1_only=AUTO (or v1_only=None) to automatically detect v2/v3 signatures; + * use v1_only=YES (or v1_only=True) to ignore any v2/v3 signatures. + """ + v1_only = noautoyes(v1_only) + extracted_meta = [] + differences = None + for pat in META_EXT: + files = [fn for ext in pat.split("|") for fn in + glob.glob(os.path.join(metadata_dir, "*." + ext))] + if len(files) != 1: + continue + info = zipfile.ZipInfo("META-INF/" + os.path.basename(files[0])) + with open(files[0], "rb") as fh: + extracted_meta.append((info, fh.read())) + if len(extracted_meta) not in (len(META_EXT), 0): + raise APKSigCopierError("Unexpected or missing files in metadata_dir") + if v1_only == YES: + extracted_v2_sig = None + else: + sigoffset_file = os.path.join(metadata_dir, SIGOFFSET) + sigblock_file = os.path.join(metadata_dir, SIGBLOCK) + if v1_only == AUTO and not os.path.exists(sigblock_file): + extracted_v2_sig = None + else: + with open(sigoffset_file, "r") as fh: + signed_sb_offset = int(fh.read()) + with open(sigblock_file, "rb") as fh: + signed_sb = fh.read() + extracted_v2_sig = signed_sb_offset, signed_sb + differences_file = os.path.join(metadata_dir, "differences.json") + if not ignore_differences and os.path.exists(differences_file): + with open(differences_file, "r") as fh: + try: + differences = json.load(fh) + except json.JSONDecodeError as e: + raise APKSigCopierError(f"Invalid differences.json: {e}") # pylint: disable=W0707 + error = validate_differences(differences) + if error: + raise APKSigCopierError(f"Invalid differences.json: {error}") + if not extracted_meta and extracted_v2_sig is None: + raise APKSigCopierError("Expected v1 and/or v2/v3 signature, found neither") + patch_apk(extracted_meta, extracted_v2_sig, unsigned_apk, output_apk, + differences=differences, exclude=exclude) + + +def do_copy(signed_apk: str, unsigned_apk: str, output_apk: str, + v1_only: NoAutoYesBoolNone = NO, *, + exclude: Optional[Callable[[str], bool]] = None, + ignore_differences: bool = False) -> None: + """ + Copy signatures from signed_apk onto unsigned_apk and save as output_apk. + + The v1_only parameter controls whether the absence of a v1 signature is + considered an error or not: + * use v1_only=NO (or v1_only=False) to only accept (v1+)v2/v3 signatures; + * use v1_only=AUTO (or v1_only=None) to automatically detect v2/v3 signatures; + * use v1_only=YES (or v1_only=True) to ignore any v2/v3 signatures. + """ + v1_only = noautoyes(v1_only) + extracted_meta = tuple(extract_meta(signed_apk)) + differences = None + if v1_only == YES: + extracted_v2_sig = None + else: + extracted_v2_sig = extract_v2_sig(signed_apk, expected=v1_only == NO) + if extracted_v2_sig is not None and not ignore_differences: + differences = extract_differences(signed_apk, extracted_meta) + patch_apk(extracted_meta, extracted_v2_sig, unsigned_apk, output_apk, + differences=differences, exclude=exclude) + +# vim: set tw=80 sw=4 sts=4 et fdm=marker : diff --git a/fdroidserver/asynchronousfilereader/__init__.py b/fdroidserver/asynchronousfilereader/__init__.py new file mode 100644 index 00000000..7ba02b69 --- /dev/null +++ b/fdroidserver/asynchronousfilereader/__init__.py @@ -0,0 +1,56 @@ +"""Simple thread based asynchronous file reader for Python. + +AsynchronousFileReader +====================== + +see https://github.com/soxofaan/asynchronousfilereader + +MIT License +Copyright (c) 2014 Stefaan Lippens +""" + +__version__ = '0.2.1' + +import threading + +try: + # Python 2 + from Queue import Queue +except ImportError: + # Python 3 + from queue import Queue + + +class AsynchronousFileReader(threading.Thread): + """Helper class to implement asynchronous reading of a file in a separate thread. + + Pushes read lines on a queue to be consumed in another thread. + """ + + def __init__(self, fd, queue=None, autostart=True): + self._fd = fd + if queue is None: + queue = Queue() + self.queue = queue + + threading.Thread.__init__(self) + + if autostart: + self.start() + + def run(self): + """Read lines and put them on the queue (the body of the tread).""" + while True: + line = self._fd.readline() + if not line: + break + self.queue.put(line) + + def eof(self): + """Check whether there is no more content to expect.""" + return not self.is_alive() and self.queue.empty() + + def readlines(self): + """Get currently available lines.""" + while not self.queue.empty(): + yield self.queue.get() diff --git a/fdroidserver/btlog.py b/fdroidserver/btlog.py new file mode 100755 index 00000000..7ca3ddbf --- /dev/null +++ b/fdroidserver/btlog.py @@ -0,0 +1,272 @@ +#!/usr/bin/env python3 +"""Update the binary transparency log for a URL.""" +# +# btlog.py - part of the FDroid server tools +# Copyright (C) 2017, Hans-Christoph Steiner +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +# This is for creating a binary transparency log in a git repo for any +# F-Droid repo accessible via HTTP. It is meant to run very often, +# even once a minute in a cronjob, so it uses HEAD requests and the +# HTTP ETag to check if the file has changed. HEAD requests should +# not count against the download counts. This pattern of a HEAD then +# a GET is what fdroidclient uses to avoid ETags being abused as +# cookies. This also uses the same HTTP User Agent as the F-Droid +# client app so its not easy for the server to distinguish this from +# the F-Droid client. + +import collections +import glob +import json +import logging +import os +import shutil +import tempfile +import zipfile +from argparse import ArgumentParser +from typing import Optional + +import defusedxml.minidom +import git +import requests + +from . import _, common, deploy +from .exception import FDroidException + + +def make_binary_transparency_log( + repodirs: collections.abc.Iterable, + btrepo: str = 'binary_transparency', + url: Optional[str] = None, + commit_title: str = 'fdroid update', +): + """Log the indexes in a standalone git repo to serve as a "binary transparency" log. + + Parameters + ---------- + repodirs + The directories of the F-Droid repository to generate the binary + transparency log for. + btrepo + The path to the Git repository of the binary transparency log. + url + The URL of the F-Droid repository to generate the binary transparency + log for. + commit_title + The commit title for commits in the binary transparency log Git + repository. + + Notes + ----- + Also see https://www.eff.org/deeplinks/2014/02/open-letter-to-tech-companies . + """ + logging.info('Committing indexes to ' + btrepo) + if os.path.exists(os.path.join(btrepo, '.git')): + gitrepo = git.Repo(btrepo) + else: + if not os.path.exists(btrepo): + os.mkdir(btrepo) + gitrepo = git.Repo.init(btrepo, initial_branch=deploy.GIT_BRANCH) + + if not url: + url = common.config['repo_url'].rstrip('/') + with open(os.path.join(btrepo, 'README.md'), 'w') as fp: + fp.write( + """ +# Binary Transparency Log for %s + +This is a log of the signed app index metadata. This is stored in a +git repo, which serves as an imperfect append-only storage mechanism. +People can then check that any file that they received from that +F-Droid repository was a publicly released file. + +For more info on this idea: +* https://wiki.mozilla.org/Security/Binary_Transparency +""" + % url[: url.rindex('/')] # strip '/repo' + ) + gitrepo.index.add(['README.md']) + gitrepo.index.commit('add README') + + for repodir in repodirs: + cpdir = os.path.join(btrepo, repodir) + if not os.path.exists(cpdir): + os.mkdir(cpdir) + for f in ('index.xml', 'index-v1.json', 'index-v2.json', 'entry.json'): + repof = os.path.join(repodir, f) + if not os.path.exists(repof): + continue + dest = os.path.join(cpdir, f) + if f.endswith('.xml'): + doc = defusedxml.minidom.parse(repof) + output = doc.toprettyxml(encoding='utf-8') + with open(dest, 'wb') as f: + f.write(output) + elif f.endswith('.json'): + with open(repof) as fp: + output = json.load(fp, object_pairs_hook=collections.OrderedDict) + with open(dest, 'w') as fp: + json.dump(output, fp, indent=2) + gitrepo.index.add([repof]) + for f in ('index.jar', 'index-v1.jar', 'entry.jar'): + repof = os.path.join(repodir, f) + if not os.path.exists(repof): + continue + dest = os.path.join(cpdir, f) + jarin = zipfile.ZipFile(repof, 'r') + jarout = zipfile.ZipFile(dest, 'w') + for info in jarin.infolist(): + if info.filename.startswith('META-INF/'): + jarout.writestr(info, jarin.read(info.filename)) + jarout.close() + jarin.close() + gitrepo.index.add([repof]) + + output_files = [] + for root, dirs, files in os.walk(repodir): + for f in files: + output_files.append(os.path.relpath(os.path.join(root, f), repodir)) + output = collections.OrderedDict() + for f in sorted(output_files): + repofile = os.path.join(repodir, f) + stat = os.stat(repofile) + output[f] = ( + stat.st_size, + stat.st_ctime_ns, + stat.st_mtime_ns, + stat.st_mode, + stat.st_uid, + stat.st_gid, + ) + fslogfile = os.path.join(cpdir, 'filesystemlog.json') + with open(fslogfile, 'w') as fp: + json.dump(output, fp, indent=2) + gitrepo.index.add([os.path.join(repodir, 'filesystemlog.json')]) + + for f in glob.glob(os.path.join(cpdir, '*.HTTP-headers.json')): + gitrepo.index.add([os.path.join(repodir, os.path.basename(f))]) + + gitrepo.index.commit(commit_title) + + +def main(): + """Generate or update a binary transparency log for a F-Droid repository. + + The behaviour of this function is influenced by the configuration file as + well as command line parameters. + + Raises + ------ + :exc:`~fdroidserver.exception.FDroidException` + If the specified or default Git repository does not exist. + + """ + parser = ArgumentParser() + common.setup_global_opts(parser) + parser.add_argument( + "--git-repo", + default=os.path.join(os.getcwd(), 'binary_transparency'), + help=_("Path to the git repo to use as the log"), + ) + parser.add_argument( + "-u", + "--url", + default='https://f-droid.org', + help=_("The base URL for the repo to log (default: https://f-droid.org)"), + ) + parser.add_argument( + "--git-remote", + default=None, + help=_("Push the log to this git remote repository"), + ) + options = common.parse_args(parser) + + if options.verbose: + logging.getLogger("requests").setLevel(logging.INFO) + logging.getLogger("urllib3").setLevel(logging.INFO) + else: + logging.getLogger("requests").setLevel(logging.WARNING) + logging.getLogger("urllib3").setLevel(logging.WARNING) + + if not os.path.exists(options.git_repo): + raise FDroidException( + '"%s" does not exist! Create it, or use --git-repo' % options.git_repo + ) + + session = requests.Session() + + new_files = False + repodirs = ('repo', 'archive') + tempdirbase = tempfile.mkdtemp(prefix='.fdroid-btlog-') + for repodir in repodirs: + # TODO read HTTP headers for etag from git repo + tempdir = os.path.join(tempdirbase, repodir) + os.makedirs(tempdir, exist_ok=True) + gitrepodir = os.path.join(options.git_repo, repodir) + os.makedirs(gitrepodir, exist_ok=True) + for f in ( + 'entry.jar', + 'entry.json', + 'index-v1.jar', + 'index-v1.json', + 'index-v2.json', + 'index.jar', + 'index.xml', + ): + dlfile = os.path.join(tempdir, f) + dlurl = options.url + '/' + repodir + '/' + f + http_headers_file = os.path.join(gitrepodir, f + '.HTTP-headers.json') + + headers = {'User-Agent': 'F-Droid 0.102.3'} + etag = None + if os.path.exists(http_headers_file): + with open(http_headers_file) as fp: + etag = json.load(fp)['ETag'] + + r = session.head(dlurl, headers=headers, allow_redirects=False) + if r.status_code != 200: + logging.debug( + 'HTTP Response (%d), did not download %s' % (r.status_code, dlurl) + ) + continue + if etag and etag == r.headers.get('ETag'): + logging.debug('ETag matches, did not download ' + dlurl) + continue + + r = session.get(dlurl, headers=headers, allow_redirects=False) + if r.status_code == 200: + with open(dlfile, 'wb') as f: + for chunk in r: + f.write(chunk) + + dump = dict() + for k, v in r.headers.items(): + dump[k] = v + with open(http_headers_file, 'w') as fp: + json.dump(dump, fp, indent=2, sort_keys=True) + new_files = True + + if new_files: + os.chdir(tempdirbase) + make_binary_transparency_log( + repodirs, options.git_repo, options.url, 'fdroid btlog' + ) + if options.git_remote: + deploy.push_binary_transparency(options.git_repo, options.git_remote) + shutil.rmtree(tempdirbase, ignore_errors=True) + + +if __name__ == "__main__": + main() diff --git a/fdroidserver/build.py b/fdroidserver/build.py index 7204d57d..2e716c10 100644 --- a/fdroidserver/build.py +++ b/fdroidserver/build.py @@ -1,9 +1,9 @@ -#!/usr/bin/env python2 -# -*- coding: utf-8 -*- +#!/usr/bin/env python3 +"""Build a package from source.""" # # build.py - part of the FDroid server tools -# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com -# Copyright (C) 2013 Daniel Martí +# Copyright (C) 2010-2014, Ciaran Gultnieks, ciaran@ciarang.com +# Copyright (C) 2013-2014 Daniel Martí # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -18,267 +18,173 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import sys +import argparse +import glob +import logging import os +import posixpath +import re import shutil import subprocess -import re import tarfile -import traceback +import tempfile +import threading import time -import json -from ConfigParser import ConfigParser -from optparse import OptionParser, OptionError +import traceback +from gettext import ngettext +from pathlib import Path -import common, metadata -from common import BuildException, VCSException, FDroidPopen +import requests -def get_builder_vm_id(): - vd = os.path.join('builder', '.vagrant') - if os.path.isdir(vd): - # Vagrant 1.2 (and maybe 1.1?) it's a directory tree... - with open(os.path.join(vd, 'machines', 'default', 'virtualbox', 'id')) as vf: - id = vf.read() - return id - else: - # Vagrant 1.0 - it's a json file... - with open(os.path.join('builder', '.vagrant')) as vf: - v = json.load(vf) - return v['active']['default'] +from . import _, common, metadata, net, scanner, vmtools +from .common import FDroidPopen +from .exception import BuildException, FDroidException, VCSException -def got_valid_builder_vm(): - """Returns True if we have a valid-looking builder vm - """ - if not os.path.exists(os.path.join('builder', 'Vagrantfile')): - return False - vd = os.path.join('builder', '.vagrant') - if not os.path.exists(vd): - return False - if not os.path.isdir(vd): - # Vagrant 1.0 - if the directory is there, it's valid... - return True - # Vagrant 1.2 - the directory can exist, but the id can be missing... - if not os.path.exists(os.path.join(vd, 'machines', 'default', 'virtualbox', 'id')): - return False - return True +try: + import paramiko +except ImportError: + pass - -def vagrant(params, cwd=None, printout=False): - """Run vagrant. - - :param: list of parameters to pass to vagrant - :cwd: directory to run in, or None for current directory - :returns: (ret, out) where ret is the return code, and out - is the stdout (and stderr) from vagrant - """ - p = subprocess.Popen(['vagrant'] + params, cwd=cwd, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - out = p.communicate()[0] - if options.verbose: - print out - return (p.returncode, out) +buildserverid = None +ssh_channel = None # Note that 'force' here also implies test mode. -def build_server(app, thisbuild, vcs, build_dir, output_dir, force): - """Do a build on the build server.""" +def build_server(app, build, vcs, build_dir, output_dir, log_dir, force): + """Do a build on the builder vm. - import ssh + Parameters + ---------- + app + The metadata of the app to build. + build + The build of the app to build. + vcs + The version control system controller object of the app. + build_dir + The local source-code checkout directory of the app. + output_dir + The target folder for the build result. + log_dir + The directory in the VM where the build logs are getting stored. + force + Don't refresh the already cloned repository and make the build stop on + exceptions. - # Reset existing builder machine to a clean state if possible. - vm_ok = False - if not options.resetserver: - print "Checking for valid existing build server" - if got_valid_builder_vm(): - print "...VM is present" - p = subprocess.Popen(['VBoxManage', 'snapshot', get_builder_vm_id(), 'list', '--details'], - cwd='builder', stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - output = p.communicate()[0] - if 'fdroidclean' in output: - if options.verbose: - print "...snapshot exists - resetting build server to clean state" - retcode, output = vagrant(['status'], cwd='builder') - if 'running' in output: - if options.verbose: - print "...suspending" - vagrant(['suspend'], cwd='builder') - print "...waiting a sec..." - time.sleep(10) - p = subprocess.Popen(['VBoxManage', 'snapshot', get_builder_vm_id(), 'restore', 'fdroidclean'], - cwd='builder', stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - output = p.communicate()[0] - if options.verbose: - print output - if p.returncode == 0: - print "...reset to snapshot - server is valid" - retcode, output = vagrant(['up'], cwd='builder') - if retcode != 0: - raise BuildException("Failed to start build server") - print "...waiting a sec..." - time.sleep(10) - vm_ok = True - else: - print "...failed to reset to snapshot" - else: - print "...snapshot doesn't exist - VBoxManage snapshot list:\n" + output - - # If we can't use the existing machine for any reason, make a - # new one from scratch. - if not vm_ok: - if os.path.exists('builder'): - print "Removing broken/incomplete/unwanted build server" - vagrant(['destroy', '-f'], cwd='builder') - shutil.rmtree('builder') - os.mkdir('builder') - - p = subprocess.Popen('vagrant --version', shell=True, stdout=subprocess.PIPE) - vver = p.communicate()[0] - if vver.startswith('Vagrant version 1.2'): - with open('builder/Vagrantfile', 'w') as vf: - vf.write('Vagrant.configure("2") do |config|\n') - vf.write('config.vm.box = "buildserver"\n') - vf.write('end\n') - else: - with open('builder/Vagrantfile', 'w') as vf: - vf.write('Vagrant::Config.run do |config|\n') - vf.write('config.vm.box = "buildserver"\n') - vf.write('end\n') - - print "Starting new build server" - retcode, _ = vagrant(['up'], cwd='builder') - if retcode != 0: - raise BuildException("Failed to start build server") - - # Open SSH connection to make sure it's working and ready... - print "Connecting to virtual machine..." - if subprocess.call('vagrant ssh-config >sshconfig', - cwd='builder', shell=True) != 0: - raise BuildException("Error getting ssh config") - vagranthost = 'default' # Host in ssh config file - sshconfig = ssh.SSHConfig() - sshf = open('builder/sshconfig', 'r') - sshconfig.parse(sshf) - sshf.close() - sshconfig = sshconfig.lookup(vagranthost) - sshs = ssh.SSHClient() - sshs.set_missing_host_key_policy(ssh.AutoAddPolicy()) - idfile = sshconfig['identityfile'] - if idfile.startswith('"') and idfile.endswith('"'): - idfile = idfile[1:-1] - sshs.connect(sshconfig['hostname'], username=sshconfig['user'], - port=int(sshconfig['port']), timeout=300, look_for_keys=False, - key_filename=idfile) - sshs.close() - - print "Saving clean state of new build server" - retcode, _ = vagrant(['suspend'], cwd='builder') - if retcode != 0: - raise BuildException("Failed to suspend build server") - print "...waiting a sec..." - time.sleep(10) - p = subprocess.Popen(['VBoxManage', 'snapshot', get_builder_vm_id(), 'take', 'fdroidclean'], - cwd='builder', stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - output = p.communicate()[0] - if p.returncode != 0: - print output - raise BuildException("Failed to take snapshot") - print "...waiting a sec..." - time.sleep(10) - print "Restarting new build server" - retcode, _ = vagrant(['up'], cwd='builder') - if retcode != 0: - raise BuildException("Failed to start build server") - print "...waiting a sec..." - time.sleep(10) - # Make sure it worked... - p = subprocess.Popen(['VBoxManage', 'snapshot', get_builder_vm_id(), 'list', '--details'], - cwd='builder', stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - output = p.communicate()[0] - if 'fdroidclean' not in output: - raise BuildException("Failed to take snapshot.") + Raises + ------ + :exc:`~fdroidserver.exception.BuildException` + If Paramiko is not installed, a srclib directory or srclib metadata + file is unexpectedly missing, the build process in the VM failed or + output files of the build process are missing. + :exc:`~fdroidserver.exception.FDroidException` + If the Buildserver ID could not be obtained or copying a directory to + the server failed. + """ + global buildserverid, ssh_channel try: + paramiko + except NameError as e: + raise BuildException("Paramiko is required to use the buildserver") from e + if options.verbose: + logging.getLogger("paramiko").setLevel(logging.INFO) + else: + logging.getLogger("paramiko").setLevel(logging.WARN) - # Get SSH configuration settings for us to connect... - if options.verbose: - print "Getting ssh configuration..." - subprocess.call('vagrant ssh-config >sshconfig', - cwd='builder', shell=True) - vagranthost = 'default' # Host in ssh config file + sshinfo = vmtools.get_clean_builder('builder') - # Load and parse the SSH config... - sshconfig = ssh.SSHConfig() - sshf = open('builder/sshconfig', 'r') - sshconfig.parse(sshf) - sshf.close() - sshconfig = sshconfig.lookup(vagranthost) + output = None + try: + if not buildserverid: + try: + buildserverid = subprocess.check_output(['vagrant', 'ssh', '-c', + 'cat /home/vagrant/buildserverid'], + cwd='builder').strip().decode() + logging.debug(_('Fetched buildserverid from VM: {buildserverid}') + .format(buildserverid=buildserverid)) + except Exception as e: + if type(buildserverid) is not str or not re.match('^[0-9a-f]{40}$', buildserverid): + logging.info(subprocess.check_output(['vagrant', 'status'], cwd="builder")) + raise FDroidException("Could not obtain buildserverid from buldserver VM. " + "(stored inside the buildserver VM at '/home/vagrant/buildserverid') " + "Please reset your buildserver, the setup VM is broken.") from e # Open SSH connection... - if options.verbose: - print "Connecting to virtual machine..." - sshs = ssh.SSHClient() - sshs.set_missing_host_key_policy(ssh.AutoAddPolicy()) - idfile = sshconfig['identityfile'] - if idfile.startswith('"') and idfile.endswith('"'): - idfile = idfile[1:-1] - sshs.connect(sshconfig['hostname'], username=sshconfig['user'], - port=int(sshconfig['port']), timeout=300, look_for_keys=False, - key_filename=idfile) + logging.info("Connecting to virtual machine...") + sshs = paramiko.SSHClient() + sshs.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # nosec B507 only connects to local VM + sshs.connect(sshinfo['hostname'], username=sshinfo['user'], + port=sshinfo['port'], timeout=300, + look_for_keys=False, key_filename=sshinfo['idfile']) + + homedir = posixpath.join('/home', sshinfo['user']) # Get an SFTP connection... ftp = sshs.open_sftp() - ftp.get_channel().settimeout(15) + ftp.get_channel().settimeout(60) # Put all the necessary files in place... - ftp.chdir('/home/vagrant') + ftp.chdir(homedir) - # Helper to copy the contents of a directory to the server... def send_dir(path): - root = os.path.dirname(path) - main = os.path.basename(path) - ftp.mkdir(main) - for r, d, f in os.walk(path): - rr = os.path.relpath(r, root) - ftp.chdir(rr) - for dd in d: - ftp.mkdir(dd) - for ff in f: - lfile = os.path.join(root, rr, ff) - if not os.path.islink(lfile): - ftp.put(lfile, ff) - ftp.chmod(ff, os.stat(lfile).st_mode) - for i in range(len(rr.split('/'))): - ftp.chdir('..') - ftp.chdir('..') + """Copy the contents of a directory to the server.""" + logging.debug("rsyncing %s to %s" % (path, ftp.getcwd())) + # TODO this should move to `vagrant rsync` from >= v1.5 + try: + subprocess.check_output(['rsync', '--recursive', '--perms', '--links', '--quiet', '--rsh=' + + 'ssh -o StrictHostKeyChecking=no' + + ' -o UserKnownHostsFile=/dev/null' + + ' -o LogLevel=FATAL' + + ' -o IdentitiesOnly=yes' + + ' -o PasswordAuthentication=no' + + ' -p ' + str(sshinfo['port']) + + ' -i ' + sshinfo['idfile'], + path, + sshinfo['user'] + "@" + sshinfo['hostname'] + ":" + ftp.getcwd()], + stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as e: + raise FDroidException(str(e), e.output.decode()) from e - print "Preparing server for build..." + logging.info("Preparing server for build...") serverpath = os.path.abspath(os.path.dirname(__file__)) - ftp.put(os.path.join(serverpath, 'build.py'), 'build.py') - ftp.put(os.path.join(serverpath, 'common.py'), 'common.py') - ftp.put(os.path.join(serverpath, 'metadata.py'), 'metadata.py') - ftp.put(os.path.join(serverpath, '..', 'config.buildserver.py'), 'config.py') - ftp.chmod('config.py', 0o600) + ftp.mkdir('fdroidserver') + ftp.chdir('fdroidserver') + ftp.put(os.path.join(serverpath, '..', 'fdroid'), 'fdroid') + ftp.chmod('fdroid', 0o755) # nosec B103 permissions are appropriate + send_dir(os.path.join(serverpath)) + ftp.chdir(homedir) + + ftp.put(os.path.join(serverpath, '..', 'buildserver', + 'config.buildserver.yml'), 'config.yml') + ftp.chmod('config.yml', 0o600) + + # Copy over the ID (head commit hash) of the fdroidserver in use... + with open(os.path.join(os.getcwd(), 'tmp', 'fdroidserverid'), 'wb') as fp: + fp.write(subprocess.check_output(['git', 'rev-parse', 'HEAD'], + cwd=serverpath)) + ftp.put('tmp/fdroidserverid', 'fdroidserverid') # Copy the metadata - just the file for this app... ftp.mkdir('metadata') ftp.mkdir('srclibs') ftp.chdir('metadata') - ftp.put(os.path.join('metadata', app['id'] + '.txt'), - app['id'] + '.txt') - # And patches if there are any... - if os.path.exists(os.path.join('metadata', app['id'])): - send_dir(os.path.join('metadata', app['id'])) + ftp.put(app.metadatapath, os.path.basename(app.metadatapath)) - ftp.chdir('/home/vagrant') + # And patches if there are any... + if os.path.exists(os.path.join('metadata', app.id)): + send_dir(os.path.join('metadata', app.id)) + + ftp.chdir(homedir) # Create the build directory... ftp.mkdir('build') ftp.chdir('build') ftp.mkdir('extlib') ftp.mkdir('srclib') # Copy any extlibs that are required... - if 'extlibs' in thisbuild: - ftp.chdir('/home/vagrant/build/extlib') - for lib in thisbuild['extlibs'].split(';'): + if build.extlibs: + ftp.chdir(posixpath.join(homedir, 'build', 'extlib')) + for lib in build.extlibs: lib = lib.strip() libsrc = os.path.join('build/extlib', lib) if not os.path.exists(libsrc): @@ -289,200 +195,461 @@ def build_server(app, thisbuild, vcs, build_dir, output_dir, force): ftp.mkdir(d) ftp.chdir(d) ftp.put(libsrc, lp[-1]) - for _ in lp[:-1]: + for _ignored in lp[:-1]: ftp.chdir('..') # Copy any srclibs that are required... srclibpaths = [] - if 'srclibs' in thisbuild: - for lib in thisbuild['srclibs'].split(';'): - srclibpaths.append(common.getsrclib(lib, 'build/srclib', srclibpaths, - basepath=True, prepare=False)) + if build.srclibs: + for lib in build.srclibs: + srclibpaths.append( + common.getsrclib(lib, 'build/srclib', basepath=True, prepare=False)) # If one was used for the main source, add that too. basesrclib = vcs.getsrclib() if basesrclib: srclibpaths.append(basesrclib) for name, number, lib in srclibpaths: - if options.verbose: - print "Sending srclib '" + lib + "'" - ftp.chdir('/home/vagrant/build/srclib') + logging.info("Sending srclib '%s'" % lib) + ftp.chdir(posixpath.join(homedir, 'build', 'srclib')) if not os.path.exists(lib): raise BuildException("Missing srclib directory '" + lib + "'") fv = '.fdroidvcs-' + name ftp.put(os.path.join('build/srclib', fv), fv) send_dir(lib) # Copy the metadata file too... - ftp.chdir('/home/vagrant/srclibs') - ftp.put(os.path.join('srclibs', name + '.txt'), - name + '.txt') + ftp.chdir(posixpath.join(homedir, 'srclibs')) + srclibsfile = os.path.join('srclibs', name + '.yml') + if os.path.isfile(srclibsfile): + ftp.put(srclibsfile, os.path.basename(srclibsfile)) + else: + raise BuildException(_('cannot find required srclibs: "{path}"') + .format(path=srclibsfile)) # Copy the main app source code # (no need if it's a srclib) if (not basesrclib) and os.path.exists(build_dir): - ftp.chdir('/home/vagrant/build') - fv = '.fdroidvcs-' + app['id'] + ftp.chdir(posixpath.join(homedir, 'build')) + fv = '.fdroidvcs-' + app.id ftp.put(os.path.join('build', fv), fv) send_dir(build_dir) # Execute the build script... - print "Starting build..." - chan = sshs.get_transport().open_session() - cmdline = 'python build.py --on-server' + logging.info("Starting build...") + ssh_channel = sshs.get_transport().open_session() + ssh_channel.get_pty() + cmdline = posixpath.join(homedir, 'fdroidserver', 'fdroid') + cmdline += ' build --on-server' if force: cmdline += ' --force --test' if options.verbose: cmdline += ' --verbose' - cmdline += " %s:%s" % (app['id'], thisbuild['vercode']) - chan.exec_command('bash -c ". ~/.bsenv && ' + cmdline + '"') - output = '' - error = '' - while not chan.exit_status_ready(): - while chan.recv_ready(): - output += chan.recv(1024) - while chan.recv_stderr_ready(): - error += chan.recv_stderr(1024) - time.sleep(0.1) - print "...getting exit status" - returncode = chan.recv_exit_status() - while True: - get = chan.recv(1024) - if len(get) == 0: - break - output += get - while True: - get = chan.recv_stderr(1024) - if len(get) == 0: - break - error += get + if options.refresh_scanner or config.get('refresh_scanner'): + cmdline += ' --refresh-scanner' + if options.skipscan: + cmdline += ' --skip-scan' + if options.notarball: + cmdline += ' --no-tarball' + if (options.scan_binary or config.get('scan_binary')) and not options.skipscan: + cmdline += ' --scan-binary' + cmdline += " %s:%s" % (app.id, build.versionCode) + ssh_channel.exec_command('bash --login -c "' + cmdline + '"') # nosec B601 inputs are sanitized + + # Fetch build process output ... + try: + cmd_stdout = ssh_channel.makefile('rb', 1024) + output = bytes() + output += common.get_android_tools_version_log().encode() + while not ssh_channel.exit_status_ready(): + line = cmd_stdout.readline() + if line: + if options.verbose: + logging.debug("buildserver > " + str(line, 'utf-8', 'replace').rstrip()) + output += line + else: + time.sleep(0.05) + for line in cmd_stdout.readlines(): + if options.verbose: + logging.debug("buildserver > " + str(line, 'utf-8', 'replace').rstrip()) + output += line + finally: + cmd_stdout.close() + + # Check build process exit status ... + logging.info("...getting exit status") + returncode = ssh_channel.recv_exit_status() if returncode != 0: - raise BuildException("Build.py failed on server for %s:%s" % (app['id'], thisbuild['version']), output, error) + if timeout_event.is_set(): + message = "Timeout exceeded! Build VM force-stopped for {0}:{1}" + else: + message = "Build.py failed on server for {0}:{1}" + raise BuildException(message.format(app.id, build.versionName), + str(output, 'utf-8', 'replace')) + + # Retreive logs... + toolsversion_log = common.get_toolsversion_logname(app, build) + try: + ftp.chdir(posixpath.join(homedir, log_dir)) + ftp.get(toolsversion_log, os.path.join(log_dir, toolsversion_log)) + logging.debug('retrieved %s', toolsversion_log) + except Exception as e: + logging.warning('could not get %s from builder vm: %s' % (toolsversion_log, e)) # Retrieve the built files... - print "Retrieving build output..." + logging.info("Retrieving build output...") if force: - ftp.chdir('/home/vagrant/tmp') + ftp.chdir(posixpath.join(homedir, 'tmp')) else: - ftp.chdir('/home/vagrant/unsigned') - apkfile = common.getapkname(app,thisbuild) - tarball = common.getsrcname(app,thisbuild) + ftp.chdir(posixpath.join(homedir, 'unsigned')) + apkfile = common.get_release_filename(app, build) + tarball = common.get_src_tarball_name(app.id, build.versionCode) try: ftp.get(apkfile, os.path.join(output_dir, apkfile)) - ftp.get(tarball, os.path.join(output_dir, tarball)) - except: - raise BuildException("Build failed for %s:%s - missing output files" % (app['id'], thisbuild['version']), output, error) + if not options.notarball: + ftp.get(tarball, os.path.join(output_dir, tarball)) + except Exception as exc: + raise BuildException( + "Build failed for {0}:{1} - missing output files".format( + app.id, build.versionName), str(output, 'utf-8', 'replace')) from exc ftp.close() finally: - # Suspend the build server. - print "Suspending build server" - subprocess.call(['vagrant', 'suspend'], cwd='builder') + vm = vmtools.get_build_vm('builder') + logging.info('destroying buildserver after build') + vm.destroy() -def adapt_gradle(build_dir): + # deploy logfile to repository web server + if output: + common.deploy_build_log_with_rsync(app.id, build.versionCode, output) + else: + logging.debug('skip publishing full build logs: ' + 'no output present') + + +def force_gradle_build_tools(build_dir, build_tools): + """Manipulate build tools version used in top level gradle file. + + Parameters + ---------- + build_dir + The directory to start looking for gradle files. + build_tools + The build tools version that should be forced to use. + """ for root, dirs, files in os.walk(build_dir): - if 'build.gradle' in files: - path = os.path.join(root, 'build.gradle') - if options.verbose: - print "Adapting build.gradle at %s" % path - - subprocess.call(['sed', '-i', - 's@buildToolsVersion[ ]*["\\\'][0-9\.]*["\\\']@buildToolsVersion "' - + config['build_tools'] + '"@g', path]) + for filename in files: + if not filename.endswith('.gradle'): + continue + path = os.path.join(root, filename) + if not os.path.isfile(path): + continue + logging.debug("Forcing build-tools %s in %s" % (build_tools, path)) + common.regsub_file(r"""(\s*)buildToolsVersion([\s=]+).*""", + r"""\1buildToolsVersion\2'%s'""" % build_tools, + path) -def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_dir, tmp_dir, force, onserver): - """Do a build locally.""" +def transform_first_char(string, method): + """Use method() on the first character of string.""" + if len(string) == 0: + return string + if len(string) == 1: + return method(string) + return method(string[0]) + string[1:] + + +def get_metadata_from_apk(app, build, apkfile): + """Get the required metadata from the built APK. + + VersionName is allowed to be a blank string, i.e. '' + + Parameters + ---------- + app + The app metadata used to build the APK. + build + The build that resulted in the APK. + apkfile + The path of the APK file. + + Returns + ------- + versionCode + The versionCode from the APK or from the metadata is build.novcheck is + set. + versionName + The versionName from the APK or from the metadata is build.novcheck is + set. + + Raises + ------ + :exc:`~fdroidserver.exception.BuildException` + If native code should have been built but was not packaged, no version + information or no package ID could be found or there is a mismatch + between the package ID in the metadata and the one found in the APK. + """ + appid, versionCode, versionName = common.get_apk_id(apkfile) + native_code = common.get_native_code(apkfile) + + if build.buildjni and build.buildjni != ['no'] and not native_code: + raise BuildException("Native code should have been built but none was packaged") + if build.novcheck: + versionCode = build.versionCode + versionName = build.versionName + if not versionCode or versionName is None: + raise BuildException("Could not find version information in build in output") + if not appid: + raise BuildException("Could not find package ID in output") + if appid != app.id: + raise BuildException("Wrong package ID - build " + appid + " but expected " + app.id) + + return versionCode, versionName + + +def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, extlib_dir, tmp_dir, force, onserver, refresh): + """Do a build locally. + + Parameters + ---------- + app + The metadata of the app to build. + build + The build of the app to build. + vcs + The version control system controller object of the app. + build_dir + The local source-code checkout directory of the app. + output_dir + The target folder for the build result. + log_dir + The directory in the VM where the build logs are getting stored. + srclib_dir + The path to the srclibs directory, usually 'build/srclib'. + extlib_dir + The path to the extlibs directory, usually 'build/extlib'. + tmp_dir + The temporary directory for building the source tarball. + force + Don't refresh the already cloned repository and make the build stop on + exceptions. + onserver + Assume the build is happening inside the VM. + refresh + Enable fetching the latest refs from the VCS remote. + + Raises + ------ + :exc:`~fdroidserver.exception.BuildException` + If running a `sudo` command failed, locking the root account failed, + `sudo` couldn't be removed, cleaning the build environment failed, + skipping the scanning has been requested but `scandelete` is present, + errors occurred during scanning, running the `build` commands from the + metadata failed, building native code failed, building with the + specified build method failed, no output could be found with build + method `maven`, more or less than one APK were found with build method + `gradle`, less or more than one APKs match the `output` glob specified + in the metadata, running a `postbuild` command specified in the + metadata failed, the built APK is debuggable, the unsigned APK is not + at the expected location, the APK does not contain the expected + `versionName` and `versionCode` or undesired package names have been + found in the APK. + :exc:`~fdroidserver.exception.FDroidException` + If no Android NDK version could be found and the build isn't run in a + builder VM, the selected Android NDK is not a directory. + """ + ndk_path = build.ndk_path() + if build.ndk or (build.buildjni and build.buildjni != ['no']): + if not ndk_path: + logging.warning("Android NDK version '%s' could not be found!" % build.ndk) + logging.warning("Configured versions:") + for k, v in config['ndk_paths'].items(): + if k.endswith("_orig"): + continue + logging.warning(" %s: %s" % (k, v)) + if onserver: + common.auto_install_ndk(build) + else: + raise FDroidException() + elif not os.path.isdir(ndk_path): + logging.critical("Android NDK '%s' is not a directory!" % ndk_path) + raise FDroidException() + + common.set_FDroidPopen_env(app, build) + + # create ..._toolsversion.log when running in builder vm + if onserver: + # before doing anything, run the sudo commands to setup the VM + if build.sudo: + logging.info("Running 'sudo' commands in %s" % os.getcwd()) + + p = FDroidPopen(['sudo', 'DEBIAN_FRONTEND=noninteractive', + 'bash', '-e', '-u', '-o', 'pipefail', '-x', '-c', '; '.join(build.sudo)]) + if p.returncode != 0: + raise BuildException("Error running sudo command for %s:%s" % + (app.id, build.versionName), p.output) + + p = FDroidPopen(['sudo', 'passwd', '--lock', 'root']) + if p.returncode != 0: + raise BuildException("Error locking root account for %s:%s" % + (app.id, build.versionName), p.output) + + p = FDroidPopen(['sudo', 'SUDO_FORCE_REMOVE=yes', 'dpkg', '--purge', 'sudo']) + if p.returncode != 0: + raise BuildException("Error removing sudo for %s:%s" % + (app.id, build.versionName), p.output) + + log_path = os.path.join(log_dir, + common.get_toolsversion_logname(app, build)) + with open(log_path, 'w') as f: + f.write(common.get_android_tools_version_log()) + else: + if build.sudo: + logging.warning('%s:%s runs this on the buildserver with sudo:\n\t%s\nThese commands were skipped because fdroid build is not running on a dedicated build server.' + % (app.id, build.versionName, build.sudo)) # Prepare the source code... - root_dir, srclibpaths = common.prepare_source(vcs, app, thisbuild, - build_dir, srclib_dir, extlib_dir, onserver) + root_dir, srclibpaths = common.prepare_source(vcs, app, build, + build_dir, srclib_dir, + extlib_dir, onserver, refresh) # We need to clean via the build tool in case the binary dirs are # different from the default ones p = None - if thisbuild['type'] == 'maven': - print "Cleaning Maven project..." + gradletasks = [] + bmethod = build.build_method() + if bmethod == 'maven': + logging.info("Cleaning Maven project...") cmd = [config['mvn3'], 'clean', '-Dandroid.sdk.path=' + config['sdk_path']] - if '@' in thisbuild['maven']: - maven_dir = os.path.join(root_dir, thisbuild['maven'].split('@',1)[1]) + if '@' in build.maven: + maven_dir = os.path.join(root_dir, build.maven.split('@', 1)[1]) maven_dir = os.path.normpath(maven_dir) else: maven_dir = root_dir p = FDroidPopen(cmd, cwd=maven_dir) - elif thisbuild['type'] == 'gradle': - print "Cleaning Gradle project..." - cmd = [config['gradle'], 'clean'] + elif bmethod == 'gradle': - if '@' in thisbuild['gradle']: - gradle_dir = os.path.join(root_dir, thisbuild['gradle'].split('@',1)[1]) - gradle_dir = os.path.normpath(gradle_dir) - else: - gradle_dir = root_dir + logging.info("Cleaning Gradle project...") - p = FDroidPopen(cmd, cwd=gradle_dir) + if build.preassemble: + gradletasks += build.preassemble - elif thisbuild['type'] == 'kivy': - pass + flavors = build.gradle + if flavors == ['yes']: + flavors = [] - elif thisbuild['type'] == 'ant': - print "Cleaning Ant project..." + flavors_cmd = ''.join([transform_first_char(flav, str.upper) for flav in flavors]) + + gradletasks += ['assemble' + flavors_cmd + 'Release'] + + cmd = [config['gradle']] + if build.gradleprops: + cmd += ['-P' + kv for kv in build.gradleprops] + + cmd += ['clean'] + p = FDroidPopen(cmd, cwd=root_dir, envs={"GRADLE_VERSION_DIR": config['gradle_version_dir'], "CACHEDIR": config['cachedir']}) + + elif bmethod == 'ant': + logging.info("Cleaning Ant project...") p = FDroidPopen(['ant', 'clean'], cwd=root_dir) if p is not None and p.returncode != 0: raise BuildException("Error cleaning %s:%s" % - (app['id'], thisbuild['version']), p.stdout, p.stderr) + (app.id, build.versionName), p.output) - # Scan before building... - print "Scanning source for common problems..." - buildprobs = common.scan_source(build_dir, root_dir, thisbuild) - if len(buildprobs) > 0: - print 'Scanner found ' + str(len(buildprobs)) + ' problems:' - for problem in buildprobs: - print ' %s' % problem - if not force: - raise BuildException("Can't build due to " + - str(len(buildprobs)) + " scanned problems") + for root, dirs, files in os.walk(build_dir): - # Build the source tarball right before we build the release... - print "Creating source tarball..." - tarname = common.getsrcname(app,thisbuild) - tarball = tarfile.open(os.path.join(tmp_dir, tarname), "w:gz") - def tarexc(f): - return any(f.endswith(s) for s in ['.svn', '.git', '.hg', '.bzr']) - tarball.add(build_dir, tarname, exclude=tarexc) - tarball.close() + def del_dirs(dl): + for d in dl: + shutil.rmtree(os.path.join(root, d), ignore_errors=True) + + def del_files(fl): + for f in fl: + if f in files: + os.remove(os.path.join(root, f)) + + if any(f in files for f in ['build.gradle', 'build.gradle.kts', 'settings.gradle', 'settings.gradle.kts']): + # Even when running clean, gradle stores task/artifact caches in + # .gradle/ as binary files. To avoid overcomplicating the scanner, + # manually delete them, just like `gradle clean` should have removed + # the build/* dirs. + del_dirs([os.path.join('build', 'android-profile'), + os.path.join('build', 'generated'), + os.path.join('build', 'intermediates'), + os.path.join('build', 'outputs'), + os.path.join('build', 'reports'), + os.path.join('build', 'tmp'), + os.path.join('buildSrc', 'build'), + '.gradle']) + del_files(['gradlew', 'gradlew.bat']) + + if 'pom.xml' in files: + del_dirs(['target']) + + if any(f in files for f in ['ant.properties', 'project.properties', 'build.xml']): + del_dirs(['bin', 'gen']) + + if 'jni' in dirs: + del_dirs(['obj']) + + if options.skipscan: + if build.scandelete: + raise BuildException("Refusing to skip source scan since scandelete is present") + else: + # Scan before building... + logging.info("Scanning source for common problems...") + scanner.options = options # pass verbose through + count = scanner.scan_source(build_dir, build) + if count > 0: + if force: + logging.warning(ngettext('Scanner found {} problem', + 'Scanner found {} problems', count).format(count)) + else: + raise BuildException(ngettext( + "Can't build due to {} error while scanning", + "Can't build due to {} errors while scanning", count).format(count)) + + if not options.notarball: + # Build the source tarball right before we build the release... + logging.info("Creating source tarball...") + tarname = common.get_src_tarball_name(app.id, build.versionCode) + tarball = tarfile.open(os.path.join(tmp_dir, tarname), "w:gz") + + def tarexc(t): + return None if any(t.name.endswith(s) for s in ['.svn', '.git', '.hg', '.bzr']) else t + tarball.add(build_dir, tarname, filter=tarexc) + tarball.close() # Run a build command if one is required... - if 'build' in thisbuild: - cmd = common.replace_config_vars(thisbuild['build']) + if build.build: + logging.info("Running 'build' commands in %s" % root_dir) + cmd = common.replace_config_vars("; ".join(build.build), build) + # Substitute source library paths into commands... for name, number, libpath in srclibpaths: - libpath = os.path.relpath(libpath, root_dir) - cmd = cmd.replace('$$' + name + '$$', libpath) - if options.verbose: - print "Running 'build' commands in %s" % root_dir + cmd = cmd.replace('$$' + name + '$$', os.path.join(os.getcwd(), libpath)) - p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir) + p = FDroidPopen(['bash', '-e', '-u', '-o', 'pipefail', '-x', '-c', cmd], cwd=root_dir) if p.returncode != 0: raise BuildException("Error running build command for %s:%s" % - (app['id'], thisbuild['version']), p.stdout, p.stderr) + (app.id, build.versionName), p.output) # Build native stuff if required... - if thisbuild.get('buildjni') not in (None, 'no'): - print "Building native libraries..." - jni_components = thisbuild.get('buildjni') - if jni_components == 'yes': + if build.buildjni and build.buildjni != ['no']: + logging.info("Building the native code") + jni_components = build.buildjni + + if jni_components == ['yes']: jni_components = [''] - else: - jni_components = [c.strip() for c in jni_components.split(';')] - ndkbuild = os.path.join(config['ndk_path'], "ndk-build") + cmd = [os.path.join(ndk_path, "ndk-build"), "-j1"] for d in jni_components: - if options.verbose: - print "Running ndk-build in " + root_dir + '/' + d - manifest = root_dir + '/' + d + '/AndroidManifest.xml' + if d: + logging.info("Building native code in '%s'" % d) + else: + logging.info("Building native code in the main project") + manifest = os.path.join(root_dir, d, 'AndroidManifest.xml') if os.path.exists(manifest): # Read and write the whole AM.xml to fix newlines and avoid # the ndk r8c or later 'wordlist' errors. The outcome of this @@ -493,370 +660,426 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d open(manifest, 'w').write(manifest_text) # In case the AM.xml read was big, free the memory del manifest_text - p = FDroidPopen([ndkbuild], cwd=os.path.join(root_dir,d)) + p = FDroidPopen(cmd, cwd=os.path.join(root_dir, d)) if p.returncode != 0: - raise BuildException("NDK build failed for %s:%s" % (app['id'], thisbuild['version']), p.stdout, p.stderr) + raise BuildException("NDK build failed for %s:%s" % (app.id, build.versionName), p.output) p = None # Build the release... - if thisbuild['type'] == 'maven': - print "Building Maven project..." + if bmethod == 'maven': + logging.info("Building Maven project...") - if '@' in thisbuild['maven']: - maven_dir = os.path.join(root_dir, thisbuild['maven'].split('@',1)[1]) + if '@' in build.maven: + maven_dir = os.path.join(root_dir, build.maven.split('@', 1)[1]) else: maven_dir = root_dir mvncmd = [config['mvn3'], '-Dandroid.sdk.path=' + config['sdk_path'], - '-Dandroid.sign.debug=false', '-Dandroid.release=true', 'package'] - if 'target' in thisbuild: - target = thisbuild["target"].split('-')[1] - subprocess.call(['sed', '-i', - 's@[0-9]*@'+target+'@g', - 'pom.xml'], cwd=root_dir) - if '@' in thisbuild['maven']: - subprocess.call(['sed', '-i', - 's@[0-9]*@'+target+'@g', - 'pom.xml'], cwd=maven_dir) - - if 'mvnflags' in thisbuild: - mvncmd += thisbuild['mvnflags'] + '-Dmaven.jar.sign.skip=true', '-Dmaven.test.skip=true', + '-Dandroid.sign.debug=false', '-Dandroid.release=true', + 'package'] + if build.target: + target = build.target.split('-')[1] + common.regsub_file(r'[0-9]*', + r'%s' % target, + os.path.join(root_dir, 'pom.xml')) + if '@' in build.maven: + common.regsub_file(r'[0-9]*', + r'%s' % target, + os.path.join(maven_dir, 'pom.xml')) p = FDroidPopen(mvncmd, cwd=maven_dir) bindir = os.path.join(root_dir, 'target') - elif thisbuild['type'] == 'kivy': - print "Building Kivy project..." + elif bmethod == 'gradle': + logging.info("Building Gradle project...") - spec = os.path.join(root_dir, 'buildozer.spec') - if not os.path.exists(spec): - raise BuildException("Expected to find buildozer-compatible spec at {0}" - .format(spec)) + cmd = [config['gradle']] + if build.gradleprops: + cmd += ['-P' + kv for kv in build.gradleprops] - defaults = {'orientation': 'landscape', 'icon': '', - 'permissions': '', 'android.api': "18"} - bconfig = ConfigParser(defaults, allow_no_value=True) - bconfig.read(spec) + cmd += gradletasks - distdir = 'python-for-android/dist/fdroid' - if os.path.exists(distdir): - shutil.rmtree(distdir) + p = FDroidPopen(cmd, cwd=root_dir, envs={"GRADLE_VERSION_DIR": config['gradle_version_dir'], "CACHEDIR": config['cachedir']}) - modules = bconfig.get('app', 'requirements').split(',') - - cmd = 'ANDROIDSDK=' + config['sdk_path'] - cmd += ' ANDROIDNDK=' + config['ndk_path'] - cmd += ' ANDROIDNDKVER=r9' - cmd += ' ANDROIDAPI=' + str(bconfig.get('app', 'android.api')) - cmd += ' VIRTUALENV=virtualenv' - cmd += ' ./distribute.sh' - cmd += ' -m ' + "'" + ' '.join(modules) + "'" - cmd += ' -d fdroid' - if subprocess.call(cmd, cwd='python-for-android', shell=True) != 0: - raise BuildException("Distribute build failed") - - cid = bconfig.get('app', 'package.domain') + '.' + bconfig.get('app', 'package.name') - if cid != app['id']: - raise BuildException("Package ID mismatch between metadata and spec") - - orientation = bconfig.get('app', 'orientation', 'landscape') - if orientation == 'all': - orientation = 'sensor' - - cmd = ['./build.py' - '--dir', root_dir, - '--name', bconfig.get('app', 'title'), - '--package', app['id'], - '--version', bconfig.get('app', 'version'), - '--orientation', orientation, - ] - - perms = bconfig.get('app', 'permissions') - for perm in perms.split(','): - cmd.extend(['--permission', perm]) - - if config.get('app', 'fullscreen') == 0: - cmd.append('--window') - - icon = bconfig.get('app', 'icon.filename') - if icon: - cmd.extend(['--icon', os.path.join(root_dir, icon)]) - - cmd.append('release') - p = FDroidPopen(cmd, cwd=distdir) - - elif thisbuild['type'] == 'gradle': - print "Building Gradle project..." - if '@' in thisbuild['gradle']: - flavour = thisbuild['gradle'].split('@')[0] - gradle_dir = thisbuild['gradle'].split('@')[1] - gradle_dir = os.path.join(root_dir, gradle_dir) - else: - flavour = thisbuild['gradle'] - gradle_dir = root_dir - - - if 'compilesdk' in thisbuild: - level = thisbuild["compilesdk"].split('-')[1] - subprocess.call(['sed', '-i', - 's@compileSdkVersion[ ]*[0-9]*@compileSdkVersion '+level+'@g', - 'build.gradle'], cwd=root_dir) - if '@' in thisbuild['gradle']: - subprocess.call(['sed', '-i', - 's@compileSdkVersion[ ]*[0-9]*@compileSdkVersion '+level+'@g', - 'build.gradle'], cwd=gradle_dir) - - adapt_gradle(gradle_dir) - - for name, number, libpath in srclibpaths: - adapt_gradle(libpath) - - if flavour in ['main', 'yes', '']: - flavour = '' - - commands = [config['gradle']] - if 'preassemble' in thisbuild: - for task in thisbuild['preassemble'].split(): - commands.append(task) - commands += ['assemble'+flavour+'Release'] - - p = FDroidPopen(commands, cwd=gradle_dir) - - else: - print "Building Ant project..." + elif bmethod == 'ant': + logging.info("Building Ant project...") cmd = ['ant'] - if 'antcommand' in thisbuild: - cmd += [thisbuild['antcommand']] + if build.antcommands: + cmd += build.antcommands else: cmd += ['release'] p = FDroidPopen(cmd, cwd=root_dir) bindir = os.path.join(root_dir, 'bin') - if p.returncode != 0: - raise BuildException("Build failed for %s:%s" % (app['id'], thisbuild['version']), p.stdout, p.stderr) - print "Successfully built version " + thisbuild['version'] + ' of ' + app['id'] + if os.path.isdir(os.path.join(build_dir, '.git')): + commit_id = str(common.get_head_commit_id(build_dir)) + else: + commit_id = build.commit - # Find the apk name in the output... - if 'bindir' in thisbuild: - bindir = os.path.join(build_dir, thisbuild['bindir']) + if p is not None and p.returncode != 0: + raise BuildException("Build failed for %s:%s@%s" % (app.id, build.versionName, commit_id), + p.output) + logging.info("Successfully built version {versionName} of {appid} from {commit_id}" + .format(versionName=build.versionName, appid=app.id, commit_id=commit_id)) - if thisbuild['type'] == 'maven': + omethod = build.output_method() + if omethod == 'maven': stdout_apk = '\n'.join([ - line for line in p.stdout.splitlines() if any(a in line for a in ('.apk','.ap_'))]) + line for line in p.output.splitlines() if any( + a in line for a in ('.apk', '.ap_', '.jar'))]) m = re.match(r".*^\[INFO\] .*apkbuilder.*/([^/]*)\.apk", - stdout_apk, re.S|re.M) + stdout_apk, re.S | re.M) if not m: m = re.match(r".*^\[INFO\] Creating additional unsigned apk file .*/([^/]+)\.apk[^l]", - stdout_apk, re.S|re.M) + stdout_apk, re.S | re.M) if not m: m = re.match(r'.*^\[INFO\] [^$]*aapt \[package,[^$]*' + bindir + r'/([^/]+)\.ap[_k][,\]]', - stdout_apk, re.S|re.M) + stdout_apk, re.S | re.M) + + if not m: + m = re.match(r".*^\[INFO\] Building jar: .*/" + bindir + r"/(.+)\.jar", + stdout_apk, re.S | re.M) if not m: raise BuildException('Failed to find output') src = m.group(1) src = os.path.join(bindir, src) + '.apk' - elif thisbuild['type'] == 'kivy': - src = 'python-for-android/dist/default/bin/{0}-{1}-release.apk'.format( - bconfig.get('app', 'title'), bconfig.get('app', 'version')) - elif thisbuild['type'] == 'gradle': - dd = build_dir - if 'subdir' in thisbuild: - dd = os.path.join(dd, thisbuild['subdir']) - if flavour in ['main', 'yes', '']: - name = '-'.join([os.path.basename(dd), 'release', 'unsigned']) - else: - name = '-'.join([os.path.basename(dd), flavour, 'release', 'unsigned']) - src = os.path.join(dd, 'build', 'apk', name+'.apk') - else: + + elif omethod == 'gradle': + src = None + apk_dirs = [ + # gradle plugin >= 3.0 + os.path.join(root_dir, 'build', 'outputs', 'apk', 'release'), + # gradle plugin < 3.0 and >= 0.11 + os.path.join(root_dir, 'build', 'outputs', 'apk'), + # really old path + os.path.join(root_dir, 'build', 'apk'), + ] + # If we build with gradle flavors with gradle plugin >= 3.0 the APK will be in + # a subdirectory corresponding to the flavor command used, but with different + # capitalization. + if flavors_cmd: + apk_dirs.append(os.path.join(root_dir, 'build', 'outputs', 'apk', transform_first_char(flavors_cmd, str.lower), 'release')) + for apks_dir in apk_dirs: + for apkglob in ['*-release-unsigned.apk', '*-unsigned.apk', '*.apk']: + apks = glob.glob(os.path.join(apks_dir, apkglob)) + + if len(apks) > 1: + raise BuildException('More than one resulting apks found in %s' % apks_dir, + '\n'.join(apks)) + if len(apks) == 1: + src = apks[0] + break + if src is not None: + break + + if src is None: + raise BuildException('Failed to find any output apks') + + elif omethod == 'ant': stdout_apk = '\n'.join([ - line for line in p.stdout.splitlines() if '.apk' in line]) + line for line in p.output.splitlines() if '.apk' in line]) src = re.match(r".*^.*Creating (.+) for release.*$.*", stdout_apk, - re.S|re.M).group(1) + re.S | re.M).group(1) src = os.path.join(bindir, src) + elif omethod == 'raw': + output_path = common.replace_build_vars(build.output, build) + globpath = os.path.join(root_dir, output_path) + apks = glob.glob(globpath) + if len(apks) > 1: + raise BuildException('Multiple apks match %s' % globpath, '\n'.join(apks)) + if len(apks) < 1: + raise BuildException('No apks match %s' % globpath) + src = os.path.normpath(apks[0]) + + # Run a postbuild command if one is required... + if build.postbuild: + logging.info(f"Running 'postbuild' commands in {root_dir}") + cmd = common.replace_config_vars("; ".join(build.postbuild), build) + + # Substitute source library paths into commands... + for name, number, libpath in srclibpaths: + cmd = cmd.replace(f"$${name}$$", str(Path.cwd() / libpath)) + + cmd = cmd.replace('$$OUT$$', str(Path(src).resolve())) + + p = FDroidPopen(['bash', '-e', '-u', '-o', 'pipefail', '-x', '-c', cmd], cwd=root_dir) + + if p.returncode != 0: + raise BuildException("Error running postbuild command for " + f"{app.id}:{build.versionName}", p.output) # Make sure it's not debuggable... - if common.isApkDebuggable(src, config): - raise BuildException("APK is debuggable") + if common.is_debuggable_or_testOnly(src): + raise BuildException( + "%s: debuggable or testOnly set in AndroidManifest.xml" % src + ) # By way of a sanity check, make sure the version and version - # code in our new apk match what we expect... - print "Checking " + src + # code in our new APK match what we expect... + logging.debug("Checking " + src) if not os.path.exists(src): - raise BuildException("Unsigned apk is not at expected location of " + src) + raise BuildException("Unsigned APK is not at expected location of " + src) - p = subprocess.Popen([os.path.join(config['sdk_path'], - 'build-tools', config['build_tools'], 'aapt'), - 'dump', 'badging', src], - stdout=subprocess.PIPE) - output = p.communicate()[0] + if common.get_file_extension(src) == 'apk': + vercode, version = get_metadata_from_apk(app, build, src) + if version != build.versionName or vercode != build.versionCode: + raise BuildException(("Unexpected version/version code in output;" + " APK: '%s' / '%d', " + " Expected: '%s' / '%d'") + % (version, vercode, build.versionName, + build.versionCode)) + if (options.scan_binary or config.get('scan_binary')) and not options.skipscan: + if scanner.scan_binary(src): + raise BuildException("Found blocklisted packages in final apk!") - vercode = None - version = None - foundid = None - for line in output.splitlines(): - if line.startswith("package:"): - pat = re.compile(".*name='([a-zA-Z0-9._]*)'.*") - m = pat.match(line) - if m: - foundid = m.group(1) - pat = re.compile(".*versionCode='([0-9]*)'.*") - m = pat.match(line) - if m: - vercode = m.group(1) - pat = re.compile(".*versionName='([^']*)'.*") - m = pat.match(line) - if m: - version = m.group(1) - - if thisbuild['novcheck']: - vercode = thisbuild['vercode'] - version = thisbuild['version'] - if not version or not vercode: - raise BuildException("Could not find version information in build in output") - if not foundid: - raise BuildException("Could not find package ID in output") - if foundid != app['id']: - raise BuildException("Wrong package ID - build " + foundid + " but expected " + app['id']) - - # Some apps (e.g. Timeriffic) have had the bonkers idea of - # including the entire changelog in the version number. Remove - # it so we can compare. (TODO: might be better to remove it - # before we compile, in fact) - index = version.find(" //") - if index != -1: - version = version[:index] - - if (version != thisbuild['version'] or - vercode != thisbuild['vercode']): - raise BuildException(("Unexpected version/version code in output;" - " APK: '%s' / '%s', " - " Expected: '%s' / '%s'") - % (version, str(vercode), thisbuild['version'], str(thisbuild['vercode'])) - ) - - # Copy the unsigned apk to our destination directory for further + # Copy the unsigned APK to our destination directory for further # processing (by publish.py)... - dest = os.path.join(output_dir, common.getapkname(app,thisbuild)) + dest = os.path.join( + output_dir, + common.get_release_filename( + app, build, common.get_file_extension(src) + ) + ) shutil.copyfile(src, dest) # Move the source tarball into the output directory... - if output_dir != tmp_dir: + if output_dir != tmp_dir and not options.notarball: shutil.move(os.path.join(tmp_dir, tarname), - os.path.join(output_dir, tarname)) + os.path.join(output_dir, tarname)) -def trybuild(app, thisbuild, build_dir, output_dir, also_check_dir, srclib_dir, extlib_dir, - tmp_dir, repo_dir, vcs, test, server, force, onserver): +def trybuild(app, build, build_dir, output_dir, log_dir, also_check_dir, + srclib_dir, extlib_dir, tmp_dir, repo_dir, vcs, test, + server, force, onserver, refresh): + """Build a particular version of an application, if it needs building. + + Parameters + ---------- + app + The metadata of the app to build. + build + The build of the app to build. + build_dir + The local source-code checkout directory of the app. + output_dir + The directory where the build output will go. Usually this is the + 'unsigned' directory. + log_dir + The directory in the VM where the build logs are getting stored. + also_check_dir + An additional location for checking if the build is necessary (usually + the archive repo). + srclib_dir + The path to the srclibs directory, usually 'build/srclib'. + extlib_dir + The path to the extlibs directory, usually 'build/extlib'. + tmp_dir + The temporary directory for building the source tarball of the app to + build. + repo_dir + The repo directory - used for checking if the build is necessary. + vcs + The version control system controller object of the app to build. + test + True if building in test mode, in which case the build will always + happen, even if the output already exists. In test mode, the output + directory should be a temporary location, not any of the real ones. + server + Use buildserver VM for building. + force + Build app regardless of disabled state or scanner errors. + onserver + Assume the build is happening inside the VM. + refresh + Enable fetching the latest refs from the VCS remote. + + Returns + ------- + status + True if the build was done, False if it wasn't necessary. """ - Build a particular version of an application, if it needs building. + dest_file = common.get_release_filename(app, build) - :param output_dir: The directory where the build output will go. Usually - this is the 'unsigned' directory. - :param repo_dir: The repo directory - used for checking if the build is - necessary. - :paaram also_check_dir: An additional location for checking if the build - is necessary (usually the archive repo) - :param test: True if building in test mode, in which case the build will - always happen, even if the output already exists. In test mode, the - output directory should be a temporary location, not any of the real - ones. - - :returns: True if the build was done, False if it wasn't necessary. - """ - - dest_apk = common.getapkname(app, thisbuild) - - dest = os.path.join(output_dir, dest_apk) - dest_repo = os.path.join(repo_dir, dest_apk) + dest = os.path.join(output_dir, dest_file) + dest_repo = os.path.join(repo_dir, dest_file) if not test: if os.path.exists(dest) or os.path.exists(dest_repo): return False if also_check_dir: - dest_also = os.path.join(also_check_dir, dest_apk) + dest_also = os.path.join(also_check_dir, dest_file) if os.path.exists(dest_also): return False - if 'disable' in thisbuild: + if build.disable and not options.force: return False - print "Building version " + thisbuild['version'] + ' of ' + app['id'] + logging.info("Building version %s (%s) of %s" % ( + build.versionName, build.versionCode, app.id)) if server: # When using server mode, still keep a local cache of the repo, by # grabbing the source now. - vcs.gotorevision(thisbuild['commit']) + vcs.gotorevision(build.commit, refresh) - build_server(app, thisbuild, vcs, build_dir, output_dir, force) + # Initialise submodules if required + if build.submodules: + vcs.initsubmodules() + + build_server(app, build, vcs, build_dir, output_dir, log_dir, force) else: - build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_dir, tmp_dir, force, onserver) + build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, extlib_dir, tmp_dir, force, onserver, refresh) return True +def force_halt_build(timeout): + """Halt the currently running Vagrant VM, to be called from a Timer. + + Parameters + ---------- + timeout + The timeout in seconds. + """ + logging.error(_('Force halting build after {0} sec timeout!').format(timeout)) + timeout_event.set() + if ssh_channel: + ssh_channel.close() + vm = vmtools.get_build_vm('builder') + vm.destroy() + + +def keep_when_not_allowed(): + """Control if APKs signed by keys not in AllowedAPKSigningKeys are removed.""" + return ( + (options is not None and options.keep_when_not_allowed) + or (config is not None and config.get('keep_when_not_allowed')) + or common.default_config['keep_when_not_allowed'] + ) + + def parse_commandline(): - """Parse the command line. Returns options, args.""" + """Parse the command line. - parser = OptionParser(usage="Usage: %prog [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]") - parser.add_option("-v", "--verbose", action="store_true", default=False, - help="Spew out even more information than normal") - parser.add_option("-l", "--latest", action="store_true", default=False, - help="Build only the latest version of each package") - parser.add_option("-s", "--stop", action="store_true", default=False, - help="Make the build stop on exceptions") - parser.add_option("-t", "--test", action="store_true", default=False, - help="Test mode - put output in the tmp directory only, and always build, even if the output already exists.") - parser.add_option("--server", action="store_true", default=False, - help="Use build server") - parser.add_option("--resetserver", action="store_true", default=False, - help="Reset and create a brand new build server, even if the existing one appears to be ok.") - parser.add_option("--on-server", dest="onserver", action="store_true", default=False, - help="Specify that we're running on the build server") - parser.add_option("-f", "--force", action="store_true", default=False, - help="Force build of disabled apps, and carries on regardless of scan problems. Only allowed in test mode.") - parser.add_option("-a", "--all", action="store_true", default=False, - help="Build all applications available") - parser.add_option("-w", "--wiki", default=False, action="store_true", - help="Update the wiki") - options, args = parser.parse_args() + Returns + ------- + options + The resulting options parsed from the command line arguments. + parser + The argument parser. + """ + parser = argparse.ArgumentParser(usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]") + common.setup_global_opts(parser) + parser.add_argument("appid", nargs='*', help=_("application ID with optional versionCode in the form APPID[:VERCODE]")) + parser.add_argument("-l", "--latest", action="store_true", default=False, + help=_("Build only the latest version of each package")) + parser.add_argument("-s", "--stop", action="store_true", default=False, + help=_("Make the build stop on exceptions")) + parser.add_argument("-t", "--test", action="store_true", default=False, + help=_("Test mode - put output in the tmp directory only, and always build, even if the output already exists.")) + parser.add_argument("--server", action="store_true", default=False, + help=_("Use build server")) + # this option is internal API for telling fdroid that + # it's running inside a buildserver vm. + parser.add_argument("--on-server", dest="onserver", action="store_true", default=False, + help=argparse.SUPPRESS) + parser.add_argument("--skip-scan", dest="skipscan", action="store_true", default=False, + help=_("Skip scanning the source code for binaries and other problems")) + parser.add_argument("--scan-binary", action="store_true", default=False, + help=_("Scan the resulting APK(s) for known non-free classes.")) + parser.add_argument("--no-tarball", dest="notarball", action="store_true", default=False, + help=_("Don't create a source tarball, useful when testing a build")) + parser.add_argument("--no-refresh", dest="refresh", action="store_false", default=True, + help=_("Don't refresh the repository, useful when testing a build with no internet connection")) + parser.add_argument("-r", "--refresh-scanner", dest="refresh_scanner", action="store_true", default=False, + help=_("Refresh and cache scanner rules and signatures from the network")) + parser.add_argument("-f", "--force", action="store_true", default=False, + help=_("Force build of disabled apps, and carries on regardless of scan problems. Only allowed in test mode.")) + parser.add_argument("-a", "--all", action="store_true", default=False, + help=_("Build all applications available")) + parser.add_argument("--keep-when-not-allowed", default=False, action="store_true", + help=argparse.SUPPRESS) + parser.add_argument("-w", "--wiki", default=False, action="store_true", + help=argparse.SUPPRESS) + metadata.add_metadata_arguments(parser) + options = common.parse_args(parser) + metadata.warnings_action = options.W - # Force --stop with --on-server to get cotrect exit code + # Force --stop with --on-server to get correct exit code if options.onserver: options.stop = True if options.force and not options.test: - raise OptionError("Force is only allowed in test mode", "force") + parser.error("option %s: Force is only allowed in test mode" % "force") + + return options, parser - return options, args options = None config = None +fdroidserverid = None +start_timestamp = time.gmtime() +status_output = None +timeout_event = threading.Event() + def main(): + """Build a package from source. - global options, config + The behaviour of this function is influenced by the configuration file as + well as command line parameters. - options, args = parse_commandline() - if not args and not options.all: - raise OptionError("If you really want to build all the apps, use --all", "all") + Raises + ------ + :exc:`~fdroidserver.exception.FDroidException` + If more than one local metadata file has been found, no app metadata + has been found, there are no apps to process, downloading binaries for + checking the reproducibility of a built binary failed, the built binary + is different from supplied reference binary, the reference binary is + signed with a different signing key than expected, a VCS error occured + while building an app or a different error occured while building an + app. + """ + global options, config, buildserverid, fdroidserverid - config = common.read_config(options) + options, parser = parse_commandline() + + # The defaults for .fdroid.* metadata that is included in a git repo are + # different than for the standard metadata/ layout because expectations + # are different. In this case, the most common user will be the app + # developer working on the latest update of the app on their own machine. + local_metadata_files = common.get_local_metadata_files() + if len(local_metadata_files) == 1: # there is local metadata in an app's source + config = dict(common.default_config) + # `fdroid build` should build only the latest version by default since + # most of the time the user will be building the most recent update + if not options.all: + options.latest = True + elif len(local_metadata_files) > 1: + raise FDroidException("Only one local metadata file allowed! Found: " + + " ".join(local_metadata_files)) + else: + if not os.path.isdir('metadata') and len(local_metadata_files) == 0: + raise FDroidException("No app metadata found, nothing to process!") + if not options.appid and not options.all: + parser.error("option %s: If you really want to build all the apps, use --all" % "all") + + config = common.read_config() if config['build_server_always']: options.server = True - if options.resetserver and not options.server: - raise OptionError("Using --resetserver without --server makes no sense", "resetserver") log_dir = 'logs' if not os.path.isdir(log_dir): - print "Creating log directory" + logging.info("Creating log directory") os.makedirs(log_dir) tmp_dir = 'tmp' if not os.path.isdir(tmp_dir): - print "Creating temporary directory" + logging.info("Creating temporary directory") os.makedirs(tmp_dir) if options.test: @@ -864,126 +1087,315 @@ def main(): else: output_dir = 'unsigned' if not os.path.isdir(output_dir): - print "Creating output directory" + logging.info("Creating output directory") os.makedirs(output_dir) + binaries_dir = os.path.join(output_dir, 'binaries') if config['archive_older'] != 0: also_check_dir = 'archive' else: also_check_dir = None + if options.onserver: + status_output = dict() # HACK dummy placeholder + else: + status_output = common.setup_status_output(start_timestamp) + repo_dir = 'repo' build_dir = 'build' if not os.path.isdir(build_dir): - print "Creating build directory" + logging.info("Creating build directory") os.makedirs(build_dir) srclib_dir = os.path.join(build_dir, 'srclib') extlib_dir = os.path.join(build_dir, 'extlib') - # Get all apps... - allapps = metadata.read_metadata(xref=not options.onserver) + apps = common.read_app_args(options.appid, allow_version_codes=True, sort_by_time=True) - apps = common.read_app_args(args, allapps, True) - apps = [app for app in apps if (options.force or not app['Disabled']) and - len(app['Repo Type']) > 0 and len(app['builds']) > 0] + for appid, app in list(apps.items()): + if (app.get('Disabled') and not options.force) or not app.get('RepoType') or not app.get('Builds', []): + del apps[appid] - if len(apps) == 0: - raise Exception("No apps to process.") + if not apps: + raise FDroidException("No apps to process.") + + # make sure enough open files are allowed to process everything + try: + import resource # not available on Windows + + soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) + if len(apps) > soft: + try: + soft = len(apps) * 2 + if soft > hard: + soft = hard + resource.setrlimit(resource.RLIMIT_NOFILE, (soft, hard)) + logging.debug(_('Set open file limit to {integer}') + .format(integer=soft)) + except (OSError, ValueError) as e: + logging.warning(_('Setting open file limit failed: ') + str(e)) + except ImportError: + pass if options.latest: - for app in apps: - app['builds'] = app['builds'][-1:] + for app in apps.values(): + for build in reversed(app.get('Builds', [])): + if build.disable and not options.force: + continue + app['Builds'] = [build] + break - if options.wiki: - import mwclient - site = mwclient.Site((config['wiki_protocol'], config['wiki_server']), - path=config['wiki_path']) - site.login(config['wiki_user'], config['wiki_password']) + if not options.onserver: + common.write_running_status_json(status_output) # Build applications... - failed_apps = {} - build_succeeded = [] - for app in apps: + failed_builds = [] + build_succeeded_ids = [] + status_output['failedBuilds'] = failed_builds + status_output['successfulBuildIds'] = build_succeeded_ids + # Only build for 72 hours, then stop gracefully. + endtime = time.time() + 72 * 60 * 60 + max_build_time_reached = False + for appid, app in apps.items(): first = True - for thisbuild in app['builds']: - wikilog = None + for build in app.get('Builds', []): + if time.time() > endtime: + max_build_time_reached = True + break + + # Enable watchdog timer (2 hours by default). + if build.timeout is None: + timeout = 7200 + else: + timeout = build.timeout + if options.server and timeout > 0: + logging.debug(_('Setting {0} sec timeout for this build').format(timeout)) + timer = threading.Timer(timeout, force_halt_build, [timeout]) + timeout_event.clear() + timer.start() + else: + timer = None + + tools_version_log = '' try: # For the first build of a particular app, we need to set up # the source repo. We can reuse it on subsequent builds, if # there are any. if first: - if app['Repo Type'] == 'srclib': - build_dir = os.path.join('build', 'srclib', app['Repo']) - else: - build_dir = os.path.join('build', app['id']) - - # Set up vcs interface and make sure we have the latest code... - if options.verbose: - print "Getting {0} vcs interface for {1}".format( - app['Repo Type'], app['Repo']) - vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir) - + vcs, build_dir = common.setup_vcs(app) first = False - if options.verbose: - print "Checking " + thisbuild['version'] - if trybuild(app, thisbuild, build_dir, output_dir, also_check_dir, - srclib_dir, extlib_dir, tmp_dir, repo_dir, vcs, options.test, - options.server, options.force, options.onserver): - build_succeeded.append(app) - wikilog = "Build succeeded" - except BuildException as be: - logfile = open(os.path.join(log_dir, app['id'] + '.log'), 'a+') - logfile.write(str(be)) - logfile.close() - print "Could not build app %s due to BuildException: %s" % (app['id'], be) - if options.stop: - sys.exit(1) - failed_apps[app['id']] = be - wikilog = be.get_wikitext() + logging.debug("Checking %s:%s" % (appid, build.versionCode)) + if trybuild(app, build, build_dir, output_dir, log_dir, + also_check_dir, srclib_dir, extlib_dir, + tmp_dir, repo_dir, vcs, options.test, + options.server, options.force, + options.onserver, options.refresh): + toolslog = os.path.join(log_dir, + common.get_toolsversion_logname(app, build)) + if not options.onserver and os.path.exists(toolslog): + with open(toolslog, 'r') as f: + tools_version_log = ''.join(f.readlines()) + os.remove(toolslog) + + if url := build.binary or app.Binaries: + # This is an app where we build from source, and + # verify the APK contents against a developer's + # binary. We get that binary now, and save it + # alongside our built one in the 'unsigend' + # directory. + if not os.path.isdir(binaries_dir): + os.makedirs(binaries_dir) + logging.info("Created directory for storing " + "developer supplied reference " + "binaries: '{path}'" + .format(path=binaries_dir)) + url = url.replace('%v', build.versionName) + url = url.replace('%c', str(build.versionCode)) + logging.info("...retrieving " + url) + of = re.sub(r'\.apk$', '.binary.apk', common.get_release_filename(app, build)) + of = os.path.join(binaries_dir, of) + try: + net.download_file(url, local_filename=of) + except requests.exceptions.HTTPError as e: + raise FDroidException( + 'Downloading Binaries from %s failed.' % url) from e + + # Now we check whether the build can be verified to + # match the supplied binary or not. Should the + # comparison fail, we mark this build as a failure + # and remove everything from the unsigend folder. + with tempfile.TemporaryDirectory() as tmpdir: + unsigned_apk = \ + common.get_release_filename(app, build) + unsigned_apk = \ + os.path.join(output_dir, unsigned_apk) + compare_result = \ + common.verify_apks(of, unsigned_apk, tmpdir) + if compare_result: + if options.test: + logging.warning(_('Keeping failed build "{apkfilename}"') + .format(apkfilename=unsigned_apk)) + else: + logging.debug('removing %s', unsigned_apk) + os.remove(unsigned_apk) + logging.debug('removing %s', of) + os.remove(of) + compare_result = compare_result.split('\n') + line_count = len(compare_result) + compare_result = compare_result[:299] + if line_count > len(compare_result): + line_difference = \ + line_count - len(compare_result) + compare_result.append('%d more lines ...' % + line_difference) + compare_result = '\n'.join(compare_result) + raise FDroidException('compared built binary ' + 'to supplied reference ' + 'binary but failed', + compare_result) + else: + logging.info('compared built binary to ' + 'supplied reference binary ' + 'successfully') + + used_key = common.apk_signer_fingerprint(of) + expected_keys = app['AllowedAPKSigningKeys'] + if used_key is None: + logging.warn(_('reference binary missing ' + 'signature')) + elif len(expected_keys) == 0: + logging.warn(_('AllowedAPKSigningKeys missing ' + 'but reference binary supplied')) + elif used_key not in expected_keys: + if options.test or keep_when_not_allowed(): + logging.warning(_('Keeping failed build "{apkfilename}"') + .format(apkfilename=unsigned_apk)) + else: + logging.debug('removing %s', unsigned_apk) + os.remove(unsigned_apk) + logging.debug('removing %s', of) + os.remove(of) + raise FDroidException('supplied reference ' + 'binary signed with ' + '{signer} instead of ' + 'with {expected}'. + format(signer=used_key, + expected=expected_keys)) + else: + logging.info(_('supplied reference binary has ' + 'allowed signer {signer}'). + format(signer=used_key)) + + build_succeeded_ids.append([app['id'], build.versionCode]) + + if not options.onserver: + common.write_running_status_json(status_output) + except VCSException as vcse: - print "VCS error while building app %s: %s" % (app['id'], vcse) + reason = str(vcse).split('\n', 1)[0] if options.verbose else str(vcse) + logging.error("VCS error while building app %s: %s" % ( + appid, reason)) if options.stop: - sys.exit(1) - failed_apps[app['id']] = vcse - wikilog = str(vcse) - except Exception as e: - print "Could not build app %s due to unknown error: %s" % (app['id'], traceback.format_exc()) + logging.debug("Error encountered, stopping by user request.") + common.force_exit(1) + failed_builds.append((appid, build.versionCode)) + common.deploy_build_log_with_rsync( + appid, build.versionCode, "".join(traceback.format_exc()) + ) + if not options.onserver: + common.write_running_status_json(status_output) + + except FDroidException as e: + tstamp = time.strftime("%Y-%m-%d %H:%M:%SZ", time.gmtime()) + with open(os.path.join(log_dir, appid + '.log'), 'a+') as f: + f.write('\n\n============================================================\n') + f.write('versionCode: %s\nversionName: %s\ncommit: %s\n' % + (build.versionCode, build.versionName, build.commit)) + f.write('Build completed at ' + + tstamp + '\n') + f.write('\n' + tools_version_log + '\n') + f.write(str(e)) + logging.error("Could not build app %s: %s" % (appid, e)) if options.stop: - sys.exit(1) - failed_apps[app['id']] = e - wikilog = str(e) + logging.debug("Error encountered, stopping by user request.") + common.force_exit(1) + failed_builds.append((appid, build.versionCode)) + common.deploy_build_log_with_rsync( + appid, build.versionCode, "".join(traceback.format_exc()) + ) + if not options.onserver: + common.write_running_status_json(status_output) - if options.wiki and wikilog: - try: - newpage = site.Pages[app['id'] + '/lastbuild'] - txt = wikilog - if len(txt) > 8192: - txt = txt[-8192:] - txt = "Build completed at " + time.strftime("%Y-%m-%d %H:%M:%SZ", time.gmtime()) + "\n\n" + txt - newpage.save(wikilog, summary='Build log') - except: - print "Error while attempting to publish build log" + except Exception: + logging.error("Could not build app %s due to unknown error: %s" % ( + appid, traceback.format_exc())) + if options.stop: + logging.debug("Error encountered, stopping by user request.") + common.force_exit(1) + failed_builds.append((appid, build.versionCode)) + common.deploy_build_log_with_rsync( + appid, build.versionCode, "".join(traceback.format_exc()) + ) + if not options.onserver: + common.write_running_status_json(status_output) - for app in build_succeeded: - print "success: %s" % (app['id']) + if timer: + timer.cancel() # kill the watchdog timer + + if max_build_time_reached: + status_output['maxBuildTimeReached'] = True + logging.info("Stopping after global build timeout...") + break + + for app in build_succeeded_ids: + logging.info("success: %s" % app[0]) if not options.verbose: - for fa in failed_apps: - print "Build for app %s failed:\n%s" % (fa, failed_apps[fa]) + for fb in failed_builds: + logging.info('Build for app {}:{} failed'.format(*fb)) - print "Finished." - if len(build_succeeded) > 0: - print str(len(build_succeeded)) + ' builds succeeded' - if len(failed_apps) > 0: - print str(len(failed_apps)) + ' builds failed' + logging.info(_("Finished")) + if len(build_succeeded_ids) > 0: + logging.info(ngettext("{} build succeeded", + "{} builds succeeded", len(build_succeeded_ids)).format(len(build_succeeded_ids))) + if len(failed_builds) > 0: + logging.info(ngettext("{} build failed", + "{} builds failed", len(failed_builds)).format(len(failed_builds))) + + if options.server: + if os.cpu_count(): + status_output['hostOsCpuCount'] = os.cpu_count() + if os.path.isfile('/proc/meminfo') and os.access('/proc/meminfo', os.R_OK): + with open('/proc/meminfo') as fp: + for line in fp: + m = re.search(r'MemTotal:\s*([0-9].*)', line) + if m: + status_output['hostProcMeminfoMemTotal'] = m.group(1) + break + buildserver_config = 'builder/Vagrantfile' + if os.path.isfile(buildserver_config) and os.access(buildserver_config, os.R_OK): + with open(buildserver_config) as configfile: + for line in configfile: + m = re.search(r'cpus\s*=\s*([0-9].*)', line) + if m: + status_output['guestVagrantVmCpus'] = m.group(1) + m = re.search(r'memory\s*=\s*([0-9].*)', line) + if m: + status_output['guestVagrantVmMemory'] = m.group(1) + + if buildserverid: + status_output['buildserver'] = {'commitId': buildserverid} + + if not options.onserver: + common.write_status_json(status_output) + + # hack to ensure this exits, even is some threads are still running + common.force_exit() - sys.exit(0) if __name__ == "__main__": main() - diff --git a/fdroidserver/checkupdates.py b/fdroidserver/checkupdates.py index d93a0646..e7945910 100644 --- a/fdroidserver/checkupdates.py +++ b/fdroidserver/checkupdates.py @@ -1,9 +1,9 @@ -#!/usr/bin/env python2 -# -*- coding: utf-8 -*- +#!/usr/bin/env python3 +"""Check for updates to applications.""" # # checkupdates.py - part of the FDroid server tools -# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com -# Copyright (C) 2013 Daniel Martí +# Copyright (C) 2010-2015, Ciaran Gultnieks, ciaran@ciarang.com +# Copyright (C) 2013-2014 Daniel Martí # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -18,463 +18,950 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import sys +import configparser +import copy +import logging import os import re -import urllib2 -import time import subprocess -from optparse import OptionParser +import sys +import time import traceback -import HTMLParser -from distutils.version import LooseVersion -import common, metadata -from common import BuildException -from common import VCSException -from metadata import MetaDataException +import urllib.error +import urllib.parse +import urllib.request +from argparse import ArgumentParser +from pathlib import Path +from typing import Optional + +import git + +from . import _, common, metadata, net +from .exception import ( + FDroidException, + MetaDataException, + NoSubmodulesException, + VCSException, +) + +# https://gitlab.com/fdroid/checkupdates-runner/-/blob/1861899262a62a4ed08fa24e5449c0368dfb7617/.gitlab-ci.yml#L36 +BOT_EMAIL = 'fdroidci@bubu1.eu' -# Check for a new version by looking at a document retrieved via HTTP. -# The app's Update Check Data field is used to provide the information -# required. -def check_http(app): +def check_http(app: metadata.App) -> tuple[Optional[str], Optional[int]]: + """Check for a new version by looking at a document retrieved via HTTP. - try: + The app's UpdateCheckData field is used to provide the information + required. - if not 'Update Check Data' in app: - raise Exception('Missing Update Check Data') + Parameters + ---------- + app + The App instance to check for updates for. - urlcode, codeex, urlver, verex = app['Update Check Data'].split('|') + Returns + ------- + version + The found versionName or None if the versionName should be ignored + according to UpdateCheckIgnore. + vercode + The found versionCode or None if the versionCode should be ignored + according to UpdateCheckIgnore. - vercode = "99999999" - if len(urlcode) > 0: - print "...requesting {0}".format(urlcode) - req = urllib2.Request(urlcode, None) - resp = urllib2.urlopen(req, None, 20) - page = resp.read() + Raises + ------ + :exc:`~fdroidserver.exception.FDroidException` + If UpdateCheckData is missing or is an invalid URL or if there is no + match for the provided versionName or versionCode regex. + """ + if not app.UpdateCheckData: + raise FDroidException('Missing Update Check Data') - m = re.search(codeex, page) - if not m: - raise Exception("No RE match for version code") - vercode = m.group(1) + urlcode, codeex, urlver, verex = app.UpdateCheckData.split('|') + parsed = urllib.parse.urlparse(urlcode) + if not parsed.netloc or not parsed.scheme or parsed.scheme != 'https': + raise FDroidException(_('UpdateCheckData has invalid URL: {url}').format(url=urlcode)) + if urlver != '.': + parsed = urllib.parse.urlparse(urlver) + if not parsed.netloc or not parsed.scheme or parsed.scheme != 'https': + raise FDroidException(_('UpdateCheckData has invalid URL: {url}').format(url=urlver)) - version = "??" - if len(urlver) > 0: - if urlver != '.': - print "...requesting {0}".format(urlver) - req = urllib2.Request(urlver, None) - resp = urllib2.urlopen(req, None, 20) - page = resp.read() + logging.debug("...requesting {0}".format(urlcode)) + req = urllib.request.Request(urlcode, None, headers=net.HEADERS) + resp = urllib.request.urlopen(req, None, 20) # nosec B310 scheme is filtered above + page = resp.read().decode('utf-8') - m = re.search(verex, page) - if not m: - raise Exception("No RE match for version") - version = m.group(1) + m = re.search(codeex, page) + if not m: + raise FDroidException("No RE match for versionCode") + vercode = common.version_code_string_to_int(m.group(1).strip()) - return (version, vercode) + if urlver != '.': + logging.debug("...requesting {0}".format(urlver)) + req = urllib.request.Request(urlver, None) + resp = urllib.request.urlopen(req, None, 20) # nosec B310 scheme is filtered above + page = resp.read().decode('utf-8') - except Exception: - msg = "Could not complete http check for app %s due to unknown error: %s" % (app['id'], traceback.format_exc()) - return (None, msg) + m = re.search(verex, page) + if not m: + raise FDroidException("No RE match for version") + version = m.group(1) -# Check for a new version by looking at the tags in the source repo. -# Whether this can be used reliably or not depends on -# the development procedures used by the project's developers. Use it with -# caution, because it's inappropriate for many projects. -# Returns (None, "a message") if this didn't work, or (version, vercode) for -# the details of the current version. -def check_tags(app): + if app.UpdateCheckIgnore and re.search(app.UpdateCheckIgnore, version): + logging.info("Version {version} for {appid} is ignored".format(version=version, appid=app.id)) + return (None, None) - try: + return (version, vercode) - if app['Repo Type'] == 'srclib': - build_dir = os.path.join('build', 'srclib', app['Repo']) - repotype = common.getsrclibvcs(app['Repo']) + +def check_tags(app: metadata.App, pattern: str) -> tuple[str, int, str]: + """Check for a new version by looking at the tags in the source repo. + + Whether this can be used reliably or not depends on + the development procedures used by the project's developers. Use it with + caution, because it's inappropriate for many projects. + + Parameters + ---------- + app + The App instance to check for updates for. + pattern + The pattern a tag needs to match to be considered. + + Returns + ------- + versionName + The highest found versionName. + versionCode + The highest found versionCode. + ref + The Git reference, commit hash or tag name, of the highest found + versionName, versionCode. + + Raises + ------ + :exc:`~fdroidserver.exception.MetaDataException` + If this function is not suitable for the RepoType of the app or + information is missing to perform this type of check. + :exc:`~fdroidserver.exception.FDroidException` + If no matching tags or no information whatsoever could be found. + """ + if app.RepoType == 'srclib': + build_dir = Path('build/srclib') / app.Repo + repotype = common.getsrclibvcs(app.Repo) + else: + build_dir = Path('build') / app.id + repotype = app.RepoType + + if repotype not in ('git', 'git-svn', 'hg', 'bzr'): + raise MetaDataException(_('Tags update mode only works for git, hg, bzr and git-svn repositories currently')) + + if repotype == 'git-svn' and ';' not in app.Repo: + raise MetaDataException(_('Tags update mode used in git-svn, but the repo was not set up with tags')) + + # Set up vcs interface and make sure we have the latest code... + vcs = common.getvcs(app.RepoType, app.Repo, build_dir) + + vcs.gotorevision(None) + + last_build = get_last_build_from_app(app) + + try_init_submodules(app, last_build, vcs) + + htag = None + hver = None + hcode = 0 + + tags = [] + if repotype == 'git': + tags = vcs.latesttags() + else: + tags = vcs.gettags() + if not tags: + raise FDroidException(_('No tags found')) + + logging.debug("All tags: " + ','.join(tags)) + if pattern: + pat = re.compile(pattern) + tags = [tag for tag in tags if pat.match(tag)] + if not tags: + raise FDroidException(_('No matching tags found')) + logging.debug("Matching tags: " + ','.join(tags)) + + if len(tags) > 5 and repotype == 'git': + tags = tags[:5] + logging.debug("Latest tags: " + ','.join(tags)) + + for tag in tags: + logging.debug("Check tag: '{0}'".format(tag)) + vcs.gotorevision(tag) + try_init_submodules(app, last_build, vcs) + + if app.UpdateCheckData: + filecode, codeex, filever, verex = app.UpdateCheckData.split('|') + + if filecode: + filecode = build_dir / filecode + if not filecode.is_file(): + logging.debug("UpdateCheckData file {0} not found in tag {1}".format(filecode, tag)) + continue + filecontent = filecode.read_text() + else: + filecontent = tag + + vercode = tag + if codeex: + m = re.search(codeex, filecontent) + if not m: + logging.debug(f"UpdateCheckData regex {codeex} for versionCode" + f" has no match in tag {tag}") + continue + + vercode = m.group(1).strip() + + if filever: + if filever != '.': + filever = build_dir / filever + if filever.is_file(): + filecontent = filever.read_text() + else: + logging.debug("UpdateCheckData file {0} not found in tag {1}".format(filever, tag)) + else: + filecontent = tag + + version = tag + if verex: + m = re.search(verex, filecontent) + if not m: + logging.debug(f"UpdateCheckData regex {verex} for versionName" + f" has no match in tag {tag}") + continue + + version = m.group(1) + + logging.debug("UpdateCheckData found version {0} ({1})" + .format(version, vercode)) + vercode = common.version_code_string_to_int(vercode) + if vercode > hcode: + htag = tag + hcode = vercode + hver = version else: - build_dir = os.path.join('build/', app['id']) - repotype = app['Repo Type'] + for subdir in possible_subdirs(app): + root_dir = build_dir / subdir + paths = common.manifest_paths(root_dir, last_build.gradle) + version, vercode, _package = common.parse_androidmanifests(paths, app) + if version in ('Unknown', 'Ignore'): + version = tag + if vercode: + logging.debug("Manifest exists in subdir '{0}'. Found version {1} ({2})" + .format(subdir, version, vercode)) + if vercode > hcode: + htag = tag + hcode = vercode + hver = version - if repotype not in ('git', 'git-svn', 'hg', 'bzr'): - return (None, 'Tags update mode only works for git, hg, bzr and git-svn repositories currently', None) + if hver: + if htag != tags[0]: + logging.warning( + "{appid}: latest tag {tag} does not contain highest version {version}".format( + appid=app.id, tag=tags[0], version=hver + ) + ) + try: + commit = vcs.getref(htag) + if commit: + return (hver, hcode, commit) + except VCSException: + pass + return (hver, hcode, htag) + raise FDroidException(_("Couldn't find any version information")) - # Set up vcs interface and make sure we have the latest code... - vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir) +def check_repomanifest(app: metadata.App, branch: Optional[str] = None) -> tuple[str, int]: + """Check for a new version by looking at the AndroidManifest.xml at the HEAD of the source repo. + + Whether this can be used reliably or not depends on + the development procedures used by the project's developers. Use it with + caution, because it's inappropriate for many projects. + + Parameters + ---------- + app + The App instance to check for updates for. + branch + The VCS branch where to search for versionCode, versionName. + + Returns + ------- + versionName + The highest found versionName. + versionCode + The highest found versionCode. + + Raises + ------ + :exc:`~fdroidserver.exception.FDroidException` + If no package id or no version information could be found. + """ + if app.RepoType == 'srclib': + build_dir = Path('build/srclib') / app.Repo + repotype = common.getsrclibvcs(app.Repo) + else: + build_dir = Path('build') / app.id + repotype = app.RepoType + + # Set up vcs interface and make sure we have the latest code... + vcs = common.getvcs(app.RepoType, app.Repo, build_dir) + + if repotype == 'git': + if branch: + branch = 'origin/' + branch + vcs.gotorevision(branch) + elif repotype == 'git-svn': + vcs.gotorevision(branch) + elif repotype == 'hg': + vcs.gotorevision(branch) + elif repotype == 'bzr': vcs.gotorevision(None) - flavour = None - if len(app['builds']) > 0: - if 'subdir' in app['builds'][-1]: - build_dir = os.path.join(build_dir, app['builds'][-1]['subdir']) - if 'gradle' in app['builds'][-1]: - flavour = app['builds'][-1]['gradle'] + last_build = get_last_build_from_app(app) + try_init_submodules(app, last_build, vcs) - htag = None - hver = None - hcode = "0" + hpak = None + hver = None + hcode = 0 + for subdir in possible_subdirs(app): + root_dir = build_dir / subdir + paths = common.manifest_paths(root_dir, last_build.gradle) + version, vercode, package = common.parse_androidmanifests(paths, app) + if vercode: + logging.debug("Manifest exists in subdir '{0}'. Found version {1} ({2})" + .format(subdir, version, vercode)) + if vercode > hcode: + hpak = package + hcode = vercode + hver = version - for tag in vcs.gettags(): - if options.verbose: - print "Check tag: '{0}'".format(tag) - vcs.gotorevision(tag) + if not hpak: + raise FDroidException(_("Couldn't find package ID")) + if hver: + return (hver, hcode) + raise FDroidException(_("Couldn't find any version information")) - # Only process tags where the manifest exists... - paths = common.manifest_paths(build_dir, flavour) - version, vercode, package = common.parse_androidmanifests(paths) - if package and package == app['id'] and version and vercode: - print "Manifest exists. Found version %s (%s)" % ( - version, vercode) - if int(vercode) > int(hcode): - htag = tag - hcode = str(int(vercode)) - hver = version - if hver: - return (hver, hcode, htag) - return (None, "Couldn't find any version information", None) +def try_init_submodules(app: metadata.App, last_build: metadata.Build, vcs: common.vcs): + """Try to init submodules if the last build entry uses them. - except BuildException as be: - msg = "Could not scan app %s due to BuildException: %s" % (app['id'], be) - return (None, msg, None) - except VCSException as vcse: - msg = "VCS error while scanning app %s: %s" % (app['id'], vcse) - return (None, msg, None) - except Exception: - msg = "Could not scan app %s due to unknown error: %s" % (app['id'], traceback.format_exc()) - return (None, msg, None) + They might have been removed from the app's repo in the meantime, + so if we can't find any submodules we continue with the updates check. + If there is any other error in initializing them then we stop the check. + """ + if last_build.submodules: + try: + vcs.initsubmodules() + except NoSubmodulesException: + logging.info("No submodules present for {}".format(_getappname(app))) + except VCSException: + logging.info("submodule broken for {}".format(_getappname(app))) -# Check for a new version by looking at the AndroidManifest.xml at the HEAD -# of the source repo. Whether this can be used reliably or not depends on -# the development procedures used by the project's developers. Use it with -# caution, because it's inappropriate for many projects. -# Returns (None, "a message") if this didn't work, or (version, vercode) for -# the details of the current version. -def check_repomanifest(app, branch=None): + +def dirs_with_manifest(startdir: str): + """Find directories containing a manifest file. + + Yield all directories under startdir that contain any of the manifest + files, and thus are probably an Android project. + + Parameters + ---------- + startdir + Directory to be walked down for search + + Yields + ------ + path : :class:`pathlib.Path` or None + A directory that contains a manifest file of an Android project, None if + no directory could be found + """ + for root, dirs, files in os.walk(startdir): + dirs.sort() + if any(m in files for m in [ + 'AndroidManifest.xml', 'pom.xml', 'build.gradle', 'build.gradle.kts']): + yield Path(root) + + +def possible_subdirs(app: metadata.App): + """Try to find a new subdir starting from the root build_dir. + + Yields said subdir relative to the build dir if found, None otherwise. + + Parameters + ---------- + app + The app to check for subdirs + + Yields + ------ + subdir : :class:`pathlib.Path` or None + A possible subdir, None if no subdir could be found + """ + if app.RepoType == 'srclib': + build_dir = Path('build/srclib') / app.Repo + else: + build_dir = Path('build') / app.id + + last_build = get_last_build_from_app(app) + + for d in dirs_with_manifest(build_dir): + m_paths = common.manifest_paths(d, last_build.gradle) + package = common.parse_androidmanifests(m_paths, app)[2] + if package is not None or app.UpdateCheckName == "Ignore": + subdir = d.relative_to(build_dir) + logging.debug("Adding possible subdir %s" % subdir) + yield subdir + + +def _getappname(app: metadata.App) -> str: + return common.get_app_display_name(app) + + +def _getcvname(app: metadata.App) -> str: + return '%s (%s)' % (app.CurrentVersion, app.CurrentVersionCode) + + +def fetch_autoname(app: metadata.App, tag: str) -> Optional[str]: + """Fetch AutoName. + + Get the to be displayed name of an app from the source code and adjust the + App instance in case it is different name has been found. + + Parameters + ---------- + app + The App instance to get the AutoName for. + tag + Tag to fetch AutoName at. + + Returns + ------- + commitmsg + Commit message about the name change. None in case checking for the + name is disabled, a VCSException occured or no name could be found. + """ + if not app.RepoType or app.UpdateCheckMode in ('None', 'Static') \ + or app.UpdateCheckName == "Ignore": + return None + + if app.RepoType == 'srclib': + build_dir = Path('build/srclib') / app.Repo + else: + build_dir = Path('build') / app.id try: + vcs = common.getvcs(app.RepoType, app.Repo, build_dir) + vcs.gotorevision(tag) + except VCSException: + return None - if app['Repo Type'] == 'srclib': - build_dir = os.path.join('build', 'srclib', app['Repo']) - repotype = common.getsrclibvcs(app['Repo']) + last_build = get_last_build_from_app(app) + + logging.debug("...fetch auto name from " + str(build_dir)) + new_name = None + for subdir in possible_subdirs(app): + root_dir = build_dir / subdir + new_name = common.fetch_real_name(root_dir, last_build.gradle) + if new_name is not None: + break + commitmsg = None + if new_name: + logging.debug("...got autoname '" + new_name + "'") + if new_name != app.AutoName: + app.AutoName = new_name + if not commitmsg: + commitmsg = "Set autoname of {0}".format(_getappname(app)) + else: + logging.debug("...couldn't get autoname") + + return commitmsg + + +def operate_vercode(operation: str, vercode: int) -> int: + """Calculate a new versionCode from a mathematical operation. + + Parameters + ---------- + operation + The operation to execute to get the new versionCode. + vercode + The versionCode for replacing "%c" in the operation. + + Returns + ------- + vercode + The new versionCode obtained by executing the operation. + + Raises + ------ + :exc:`~fdroidserver.exception.MetaDataException` + If the operation is invalid. + """ + if not common.VERCODE_OPERATION_RE.match(operation): + raise MetaDataException(_('Invalid VercodeOperation: {field}') + .format(field=operation)) + oldvercode = vercode + op = operation.replace("%c", str(oldvercode)) + vercode = common.calculate_math_string(op) + logging.debug("Applied vercode operation: %d -> %d" % (oldvercode, vercode)) + return vercode + + +def checkupdates_app(app: metadata.App, auto: bool, commit: bool = False) -> None: + """Check for new versions and updated name of a single app. + + Also write back changes to the metadata file and create a Git commit if + requested. + + Parameters + ---------- + app + The app to check for updates for. + + Raises + ------ + :exc:`~fdroidserver.exception.MetaDataException` + If the app has an invalid UpdateCheckMode or AutoUpdateMode. + :exc:`~fdroidserver.exception.FDroidException` + If no version information could be found, the current version is newer + than the found version, auto-update was requested but an app has no + CurrentVersionCode or (Git) commiting the changes failed. + """ + # If a change is made, commitmsg should be set to a description of it. + # Only if this is set, changes will be written back to the metadata. + commitmsg = None + + tag = None + mode = app.UpdateCheckMode + if mode.startswith('Tags'): + pattern = mode[5:] if len(mode) > 4 else None + (version, vercode, tag) = check_tags(app, pattern) + elif mode == 'RepoManifest': + (version, vercode) = check_repomanifest(app) + elif mode.startswith('RepoManifest/'): + tag = mode[13:] + (version, vercode) = check_repomanifest(app, tag) + elif mode == 'HTTP': + (version, vercode) = check_http(app) + elif mode in ('None', 'Static'): + logging.debug('Checking disabled') + return + else: + raise MetaDataException(_('Invalid UpdateCheckMode: {mode}').format(mode=mode)) + + if not version or not vercode: + raise FDroidException(_('no version information found')) + + if app.VercodeOperation: + vercodes = sorted([ + operate_vercode(operation, vercode) for operation in app.VercodeOperation + ]) + else: + vercodes = [vercode] + + updating = False + if vercodes[-1] == app.CurrentVersionCode: + logging.debug("...up to date") + elif vercodes[-1] > app.CurrentVersionCode: + logging.debug("...updating - old vercode={0}, new vercode={1}".format( + app.CurrentVersionCode, vercodes[-1])) + app.CurrentVersion = version + app.CurrentVersionCode = vercodes[-1] + updating = True + else: + raise FDroidException( + _('current version is newer: old vercode={old}, new vercode={new}').format( + old=app.CurrentVersionCode, new=vercodes[-1] + ) + ) + + commitmsg = fetch_autoname(app, tag) + + if updating: + name = _getappname(app) + ver = _getcvname(app) + logging.info('...updating to version %s' % ver) + commitmsg = 'Update CurrentVersion of %s to %s' % (name, ver) + + if auto: + mode = app.AutoUpdateMode + if not app.CurrentVersionCode: + raise MetaDataException( + _("Can't auto-update app with no CurrentVersionCode") + ) + elif mode in ('None', 'Static'): + pass + elif mode.startswith('Version'): + pattern = mode[8:] + suffix = '' + if pattern.startswith('+'): + try: + suffix, pattern = pattern[1:].split(' ', 1) + except ValueError as exc: + raise MetaDataException("Invalid AutoUpdateMode: " + mode) from exc + + gotcur = False + latest = None + builds = app.get('Builds', []) + + if builds: + latest = builds[-1] + if latest.versionCode == app.CurrentVersionCode: + gotcur = True + elif latest.versionCode > app.CurrentVersionCode: + raise FDroidException( + _( + 'latest build recipe is newer: ' + 'old vercode={old}, new vercode={new}' + ).format(old=latest.versionCode, new=app.CurrentVersionCode) + ) + + if not gotcur: + newbuilds = copy.deepcopy(builds[-len(vercodes):]) + + # These are either built-in or invalid in newer system versions + bookworm_blocklist = [ + 'apt-get install -y openjdk-11-jdk', + 'apt-get install openjdk-11-jdk-headless', + 'apt-get install -y openjdk-11-jdk-headless', + 'apt-get install -t stretch-backports openjdk-11-jdk-headless openjdk-11-jre-headless', + 'apt-get install -y -t stretch-backports openjdk-11-jdk-headless openjdk-11-jre-headless', + 'apt-get install -y openjdk-17-jdk', + 'apt-get install openjdk-17-jdk-headless', + 'apt-get install -y openjdk-17-jdk-headless', + 'update-alternatives --auto java', + 'update-java-alternatives -a', + ] + + for build in newbuilds: + if "sudo" in build: + if any("openjdk-11" in line for line in build["sudo"]) or any("openjdk-17" in line for line in build["sudo"]): + build["sudo"] = [line for line in build["sudo"] if line not in bookworm_blocklist] + if build["sudo"] == ['apt-get update']: + build["sudo"] = '' + + for b, v in zip(newbuilds, vercodes): + b.disable = False + b.versionCode = v + b.versionName = app.CurrentVersion + suffix.replace( + '%c', str(v) + ) + logging.info("...auto-generating build for " + b.versionName) + if tag: + b.commit = tag + else: + commit = pattern.replace('%v', app.CurrentVersion) + commit = commit.replace('%c', str(v)) + b.commit = commit + + app['Builds'].extend(newbuilds) + + name = _getappname(app) + ver = _getcvname(app) + commitmsg = "Update %s to %s" % (name, ver) else: - build_dir = os.path.join('build/', app['id']) - repotype = app['Repo Type'] + raise MetaDataException( + _('Invalid AutoUpdateMode: {mode}').format(mode=mode) + ) - # Set up vcs interface and make sure we have the latest code... - vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir) + if commitmsg: + metadata.write_metadata(app.metadatapath, app) + if commit: + logging.info("Commiting update for " + app.metadatapath) + gitcmd = ["git", "commit", "-m", commitmsg] + gitcmd.extend(["--", app.metadatapath]) + if subprocess.call(gitcmd) != 0: + raise FDroidException("Git commit failed") - if repotype == 'git': - if branch: - branch = 'origin/'+branch - vcs.gotorevision(branch) - elif repotype == 'git-svn': - vcs.gotorevision(branch) - elif repotype == 'svn': - vcs.gotorevision(None) - elif repotype == 'hg': - vcs.gotorevision(branch) - elif repotype == 'bzr': - vcs.gotorevision(None) - flavour = None +def get_last_build_from_app(app: metadata.App) -> metadata.Build: + """Get the last build entry of an app.""" + if app.get('Builds'): + return app['Builds'][-1] + else: + return metadata.Build() - if len(app['builds']) > 0: - if 'subdir' in app['builds'][-1]: - build_dir = os.path.join(build_dir, app['builds'][-1]['subdir']) - if 'gradle' in app['builds'][-1]: - flavour = app['builds'][-1]['gradle'] - - if not os.path.isdir(build_dir): - return (None, "Subdir '" + app['builds'][-1]['subdir'] + "'is not a valid directory") - - paths = common.manifest_paths(build_dir, flavour) - - version, vercode, package = common.parse_androidmanifests(paths) - if not package: - return (None, "Couldn't find package ID") - if package != app['id']: - return (None, "Package ID mismatch") - if not version: - return (None,"Couldn't find latest version name") - if not vercode: - return (None,"Couldn't find latest version code") - - vercode = str(int(vercode)) - - print "Manifest exists. Found version %s (%s)" % (version, vercode) - - return (version, vercode) - - except BuildException as be: - msg = "Could not scan app %s due to BuildException: %s" % (app['id'], be) - return (None, msg) - except VCSException as vcse: - msg = "VCS error while scanning app %s: %s" % (app['id'], vcse) - return (None, msg) - except Exception: - msg = "Could not scan app %s due to unknown error: %s" % (app['id'], traceback.format_exc()) - return (None, msg) - -def check_repotrunk(app, branch=None): +def get_upstream_main_branch(git_repo): + refs = list() + for ref in git_repo.remotes.upstream.refs: + if ref.name != 'upstream/HEAD': + refs.append(ref.name) + if len(refs) == 1: + return refs[0] + for name in ('upstream/main', 'upstream/master'): + if name in refs: + return name try: - if app['Repo Type'] == 'srclib': - build_dir = os.path.join('build', 'srclib', app['Repo']) - repotype = common.getsrclibvcs(app['Repo']) + with git_repo.config_reader() as reader: + return 'upstream/%s' % reader.get_value('init', 'defaultBranch') + except configparser.NoSectionError: + return 'upstream/main' + + +def checkout_appid_branch(appid): + """Prepare the working branch named after the appid. + + This sets up everything for checkupdates_app() to run and add + commits. If there is an existing branch named after the appid, + and it has commits from users other than the checkupdates-bot, + then this will return False. Otherwise, it returns True. + + The checkupdates-runner must set the committer email address in + the git config. Then any commit with a committer or author that + does not match that will be considered to have human edits. That + email address is currently set in: + https://gitlab.com/fdroid/checkupdates-runner/-/blob/1861899262a62a4ed08fa24e5449c0368dfb7617/.gitlab-ci.yml#L36 + + """ + logging.debug(f'Creating merge request branch for {appid}') + git_repo = git.Repo.init('.') + upstream_main = get_upstream_main_branch(git_repo) + for remote in git_repo.remotes: + remote.fetch() + try: + git_repo.remotes.origin.fetch(f'{appid}:refs/remotes/origin/{appid}') + except Exception as e: + logging.debug('"%s" branch not found on origin remote:\n\t%s', appid, e) + if appid in git_repo.remotes.origin.refs: + start_point = f"origin/{appid}" + for commit in git_repo.iter_commits( + f'{upstream_main}...{start_point}', right_only=True + ): + if commit.committer.email != BOT_EMAIL or commit.author.email != BOT_EMAIL: + return False + else: + start_point = upstream_main + git_repo.git.checkout('-B', appid, start_point) + git_repo.git.rebase(upstream_main, strategy_option='ours', kill_after_timeout=120) + return True + + +def get_changes_versus_ref(git_repo, ref, f): + changes = [] + for m in re.findall( + r"^[+-].*", git_repo.git.diff(f"{ref}", '--', f), flags=re.MULTILINE + ): + if not re.match(r"^(\+\+\+|---) ", m): + changes.append(m) + return changes + + +def push_commits(branch_name='checkupdates'): + """Make git branch then push commits as merge request. + + The appid is parsed from the actual file that was changed so that + only the right branch is ever updated. + + This uses the appid as the standard branch name so that there is + only ever one open merge request per-app. If multiple apps are + included in the branch, then 'checkupdates' is used as branch + name. This is to support the old way operating, e.g. in batches. + + This uses GitLab "Push Options" to create a merge request. Git + Push Options are config data that can be sent via `git push + --push-option=... origin foo`. + + References + ---------- + * https://docs.gitlab.com/ee/user/project/push_options.html + + """ + if branch_name != "checkupdates": + if callable(getattr(git.SymbolicReference, "_check_ref_name_valid", None)): + git.SymbolicReference._check_ref_name_valid(branch_name) + + git_repo = git.Repo.init('.') + upstream_main = get_upstream_main_branch(git_repo) + files = set() + for commit in git_repo.iter_commits(f'{upstream_main}...HEAD', right_only=True): + files.update(commit.stats.files.keys()) + + files = list(files) + if len(files) == 1: + m = re.match(r'metadata/(\S+)\.yml', files[0]) + if m: + branch_name = m.group(1) # appid + if not files: + return + + # https://git-scm.com/docs/git-check-ref-format Git refname can't end with .lock + if branch_name.endswith(".lock"): + branch_name = f"{branch_name}_" + + remote = git_repo.remotes.origin + if branch_name in remote.refs: + if not get_changes_versus_ref(git_repo, f'origin/{branch_name}', files[0]): + return + + git_repo.create_head(branch_name, force=True) + push_options = [ + 'merge_request.create', + 'merge_request.remove_source_branch', + 'merge_request.title=bot: ' + git_repo.branches[branch_name].commit.summary, + 'merge_request.description=' + + '~%s checkupdates-bot run %s' % (branch_name, os.getenv('CI_JOB_URL')), + ] + + # mark as draft if there are only changes to CurrentVersion: + current_version_only = True + for m in get_changes_versus_ref(git_repo, upstream_main, files[0]): + if not re.match(r"^[-+]CurrentVersion", m): + current_version_only = False + break + if current_version_only: + push_options.append('merge_request.draft') + + progress = git.RemoteProgress() + + pushinfos = remote.push( + f"HEAD:refs/heads/{branch_name}", + progress=progress, + force=True, + set_upstream=True, + push_option=push_options, + ) + + for pushinfo in pushinfos: + logging.info(pushinfo.summary) + # Show potentially useful messages from git remote + if progress: + for line in progress.other_lines: + logging.info(line) + if pushinfo.flags & ( + git.remote.PushInfo.ERROR + | git.remote.PushInfo.REJECTED + | git.remote.PushInfo.REMOTE_FAILURE + | git.remote.PushInfo.REMOTE_REJECTED + ): + raise FDroidException( + f'{remote.url} push failed: {pushinfo.flags} {pushinfo.summary}' + ) else: - build_dir = os.path.join('build/', app['id']) - repotype = app['Repo Type'] + logging.info(remote.url + ': ' + pushinfo.summary) - if repotype not in ('svn', 'git-svn'): - return (None, 'RepoTrunk update mode only makes sense in svn and git-svn repositories') - # Set up vcs interface and make sure we have the latest code... - vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir) +def prune_empty_appid_branches(git_repo=None, main_branch='main'): + """Remove empty branches from checkupdates-bot git remote.""" + if git_repo is None: + git_repo = git.Repo.init('.') + upstream_main = get_upstream_main_branch(git_repo) + main_branch = upstream_main.split('/')[1] - vcs.gotorevision(None) + remote = git_repo.remotes.origin + remote.update(prune=True) + merged_branches = git_repo.git().branch(remotes=True, merged=upstream_main).split() + for remote_branch in merged_branches: + if not remote_branch or '/' not in remote_branch: + continue + if remote_branch.split('/')[1] not in (main_branch, 'HEAD'): + for ref in git_repo.remotes.origin.refs: + if remote_branch == ref.name: + remote.push(':%s' % ref.remote_head, force=True) # rm remote branch - ref = vcs.getref() - return (ref, ref) - except BuildException as be: - msg = "Could not scan app %s due to BuildException: %s" % (app['id'], be) - return (None, msg) - except VCSException as vcse: - msg = "VCS error while scanning app %s: %s" % (app['id'], vcse) - return (None, msg) - except Exception: - msg = "Could not scan app %s due to unknown error: %s" % (app['id'], traceback.format_exc()) - return (None, msg) -# Check for a new version by looking at the Google Play Store. -# Returns (None, "a message") if this didn't work, or (version, None) for -# the details of the current version. -def check_gplay(app): - time.sleep(15) - url = 'https://play.google.com/store/apps/details?id=' + app['id'] - headers = {'User-Agent' : 'Mozilla/5.0 (X11; Linux i686; rv:18.0) Gecko/20100101 Firefox/18.0'} - req = urllib2.Request(url, None, headers) - try: - resp = urllib2.urlopen(req, None, 20) - page = resp.read() - except urllib2.HTTPError, e: - return (None, str(e.code)) - except Exception, e: - return (None, 'Failed:' + str(e)) - - version = None - - m = re.search('itemprop="softwareVersion">[ ]*([^<]+)[ ]*', page) - if m: - html_parser = HTMLParser.HTMLParser() - version = html_parser.unescape(m.group(1)) - - if version == 'Varies with device': - return (None, 'Device-variable version, cannot use this method') - - if not version: - return (None, "Couldn't find version") - return (version.strip(), None) +def status_update_json(processed: list, failed: dict) -> None: + """Output a JSON file with metadata about this run.""" + logging.debug(_('Outputting JSON')) + output = common.setup_status_output(start_timestamp) + if processed: + output['processed'] = processed + if failed: + output['failed'] = failed + common.write_status_json(output) config = None -options = None +start_timestamp = time.gmtime() + def main(): + """Check for updates for one or more apps. - global config, options + The behaviour of this function is influenced by the configuration file as + well as command line parameters. + """ + global config # Parse command line... - parser = OptionParser(usage="Usage: %prog [options] [APPID [APPID ...]]") - parser.add_option("-v", "--verbose", action="store_true", default=False, - help="Spew out even more information than normal") - parser.add_option("--auto", action="store_true", default=False, - help="Process auto-updates") - parser.add_option("--autoonly", action="store_true", default=False, - help="Only process apps with auto-updates") - parser.add_option("--commit", action="store_true", default=False, - help="Commit changes") - parser.add_option("--gplay", action="store_true", default=False, - help="Only print differences with the Play Store") - (options, args) = parser.parse_args() + parser = ArgumentParser() + common.setup_global_opts(parser) + parser.add_argument("appid", nargs='*', help=_("application ID of file to operate on")) + parser.add_argument("--auto", action="store_true", default=False, + help=_("Process auto-updates")) + parser.add_argument("--autoonly", action="store_true", default=False, + help=_("Only process apps with auto-updates")) + parser.add_argument("--commit", action="store_true", default=False, + help=_("Commit changes")) + parser.add_argument("--merge-request", action="store_true", default=False, + help=_("Commit changes, push, then make a merge request")) + parser.add_argument("--allow-dirty", action="store_true", default=False, + help=_("Run on git repo that has uncommitted changes")) + metadata.add_metadata_arguments(parser) + options = common.parse_args(parser) + metadata.warnings_action = options.W - config = common.read_config(options) + config = common.read_config() - # Get all apps... - allapps = metadata.read_metadata(options.verbose) + if not options.allow_dirty: + status = subprocess.check_output(['git', 'status', '--porcelain']) + if status: + logging.error(_('Build metadata git repo has uncommited changes!')) + sys.exit(1) - apps = common.read_app_args(args, allapps, False) + if options.merge_request and not (options.appid and len(options.appid) == 1): + logging.error(_('--merge-request only runs on a single appid!')) + sys.exit(1) - if options.gplay: - for app in apps: - version, reason = check_gplay(app) - if version is None and options.verbose: - if reason == '404': - print "%s is not in the Play Store" % common.getappname(app) - else: - print "%s encountered a problem: %s" % (common.getappname(app), reason) - if version is not None: - stored = app['Current Version'] - if not stored: - if options.verbose: - print "%s has no Current Version but has version %s on the Play Store" % ( - common.getappname(app), version) - elif LooseVersion(stored) < LooseVersion(version): - print "%s has version %s on the Play Store, which is bigger than %s" % ( - common.getappname(app), version, stored) - elif options.verbose: - if stored != version: - print "%s has version %s on the Play Store, which differs from %s" % ( - common.getappname(app), version, stored) - else: - print "%s has the same version %s on the Play Store" % ( - common.getappname(app), version) - return + apps = common.read_app_args(options.appid) + processed = [] + failed = dict() + exit_code = 0 + for appid, app in apps.items(): - for app in apps: - - if options.autoonly and app['Auto Update Mode'] == 'None': - if options.verbose: - print "Nothing to do for %s..." % app['id'] + if options.autoonly and app.AutoUpdateMode in ('None', 'Static'): + logging.debug(_("Nothing to do for {appid}.").format(appid=appid)) continue - print "Processing " + app['id'] + '...' + msg = _("Processing {appid}").format(appid=appid) + logging.info(msg) - writeit = False - logmsg = None + try: + if options.merge_request: + if not checkout_appid_branch(appid): + msg = _("...checkupdate failed for {appid} : {error}").format( + appid=appid, + error='Open merge request with human edits, skipped.', + ) + logging.warning(msg) + failed[appid] = msg + continue - tag = None - msg = None - vercode = None - mode = app['Update Check Mode'] - if mode == 'Tags': - (version, vercode, tag) = check_tags(app) - elif mode == 'RepoManifest': - (version, vercode) = check_repomanifest(app) - elif mode.startswith('RepoManifest/'): - tag = mode[13:] - (version, vercode) = check_repomanifest(app, tag) - elif mode == 'RepoTrunk': - (version, vercode) = check_repotrunk(app) - elif mode == 'HTTP': - (version, vercode) = check_http(app) - elif mode == 'Static': - version = None - msg = 'Checking disabled' - elif mode == 'None': - version = None - msg = 'Checking disabled' - else: - version = None - msg = 'Invalid update check method' + checkupdates_app(app, options.auto, options.commit or options.merge_request) + processed.append(appid) + except Exception as e: + msg = _("...checkupdate failed for {appid} : {error}").format(appid=appid, error=e) + logging.error(msg) + logging.debug(traceback.format_exc()) + failed[appid] = str(e) + exit_code = 1 - if vercode and app['Vercode Operation']: - op = app['Vercode Operation'].replace("%c", str(int(vercode))) - vercode = str(eval(op)) + if options.appid and options.merge_request: + push_commits() + prune_empty_appid_branches() - updating = False - if not version: - print "...%s" % msg - elif vercode == app['Current Version Code']: - print "...up to date" - else: - app['Current Version'] = version - app['Current Version Code'] = str(int(vercode)) - updating = True - writeit = True + status_update_json(processed, failed) + sys.exit(exit_code) - # Do the Auto Name thing as well as finding the CV real name - if len(app["Repo Type"]) > 0: - - try: - - if app['Repo Type'] == 'srclib': - app_dir = os.path.join('build', 'srclib', app['Repo']) - else: - app_dir = os.path.join('build/', app['id']) - - vcs = common.getvcs(app["Repo Type"], app["Repo"], app_dir) - vcs.gotorevision(tag) - - flavour = None - if len(app['builds']) > 0: - if 'subdir' in app['builds'][-1]: - app_dir = os.path.join(app_dir, app['builds'][-1]['subdir']) - if 'gradle' in app['builds'][-1]: - flavour = app['builds'][-1]['gradle'] - - new_name = common.fetch_real_name(app_dir, flavour) - if new_name != app['Auto Name']: - app['Auto Name'] = new_name - - if app['Current Version'].startswith('@string/'): - cv = common.version_name(app['Current Version'], app_dir, flavour) - if app['Current Version'] != cv: - app['Current Version'] = cv - writeit = True - except Exception: - print "ERROR: Auto Name or Current Version failed for %s due to exception: %s" % (app['id'], traceback.format_exc()) - - if updating: - name = common.getappname(app) - ver = common.getcvname(app) - print '...updating to version %s' % ver - logmsg = 'Update CV of %s to %s' % (name, ver) - - if options.auto: - mode = app['Auto Update Mode'] - if mode == 'None': - pass - elif mode.startswith('Version '): - pattern = mode[8:] - if pattern.startswith('+'): - try: - suffix, pattern = pattern.split(' ', 1) - except ValueError: - raise MetaDataException("Invalid AUM: " + mode) - else: - suffix = '' - gotcur = False - latest = None - for build in app['builds']: - if build['vercode'] == app['Current Version Code']: - gotcur = True - if not latest or int(build['vercode']) > int(latest['vercode']): - latest = build - if not gotcur: - newbuild = latest.copy() - if 'origlines' in newbuild: - del newbuild['origlines'] - newbuild['vercode'] = app['Current Version Code'] - newbuild['version'] = app['Current Version'] + suffix - print "...auto-generating build for " + newbuild['version'] - commit = pattern.replace('%v', newbuild['version']) - commit = commit.replace('%c', newbuild['vercode']) - newbuild['commit'] = commit - app['builds'].append(newbuild) - writeit = True - name = common.getappname(app) - ver = common.getcvname(app) - logmsg = "Update %s to %s" % (name, ver) - else: - print 'Invalid auto update mode "' + mode + '"' - - if writeit: - metafile = os.path.join('metadata', app['id'] + '.txt') - metadata.write_metadata(metafile, app) - if options.commit and logmsg: - print "Commiting update for " + metafile - gitcmd = ["git", "commit", "-m", - logmsg] - if 'auto_author' in config: - gitcmd.extend(['--author', config['auto_author']]) - gitcmd.extend(["--", metafile]) - if subprocess.call(gitcmd) != 0: - print "Git commit failed" - sys.exit(1) - - print "Finished." if __name__ == "__main__": main() - diff --git a/fdroidserver/common.py b/fdroidserver/common.py index 3477124b..127976c3 100644 --- a/fdroidserver/common.py +++ b/fdroidserver/common.py @@ -1,8 +1,16 @@ -# -*- coding: utf-8 -*- +#!/usr/bin/env python3 # # common.py - part of the FDroid server tools -# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com -# Copyright (C) 2013 Daniel Martí +# +# Copyright (C) 2010-2016, Ciaran Gultnieks, ciaran@ciarang.com +# Copyright (C) 2013-2017, Daniel Martí +# Copyright (C) 2013-2021, Hans-Christoph Steiner +# Copyright (C) 2017-2018, Torsten Grote +# Copyright (C) 2017, tobiasKaminsky +# Copyright (C) 2017-2021, Michael Pöhn +# Copyright (C) 2017,2021, mimi89999 +# Copyright (C) 2019-2021, Jochen Sprickerhof +# Copyright (C) 2021, Felix C. Stegerman # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -17,115 +25,1055 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import glob, os, sys, re + +"""Collection of functions shared by subcommands. + +This is basically the "shared library" for all the fdroid subcommands. +The contains core functionality and a number of utility functions. +This is imported by all modules, so do not import third-party +libraries here as they will become a requirement for all commands. + +Config +------ + +Parsing and using the configuration settings from config.yml is +handled here. The data format is YAML 1.2. The config has its own +supported data types: + +* Boolean (e.g. deploy_process_logs:) +* Integer (e.g. archive_older:, repo_maxage:) +* String-only (e.g. repo_name:, sdk_path:) +* Multi-String (string, list of strings, or list of dicts with + strings, e.g. serverwebroot:, mirrors:) + +String-only fields can also use a special value {env: varname}, which +is a dict with a single key 'env' and a value that is the name of the +environment variable to include. + +""" + +import ast +import base64 +import copy +import difflib +import filecmp +import glob +import gzip +import hashlib +import io +import itertools +import json +import logging +import operator +import os +import re import shutil +import socket import stat import subprocess +import sys +import tempfile import time -import operator -import Queue -import threading -import magic -from distutils.spawn import find_executable +import zipfile +from argparse import BooleanOptionalAction +from base64 import urlsafe_b64encode +from binascii import hexlify +from datetime import datetime, timedelta, timezone +from pathlib import Path +from queue import Queue +from typing import List +from urllib.parse import urlparse, urlsplit, urlunparse +from zipfile import ZipFile -import metadata +import defusedxml.ElementTree as XMLElementTree +import git +from asn1crypto import cms + +import fdroidserver.metadata +from fdroidserver import _ +from fdroidserver._yaml import config_dump, yaml +from fdroidserver.exception import ( + BuildException, + FDroidException, + MetaDataException, + NoSubmodulesException, + VCSException, + VerificationException, +) + +from . import apksigcopier, common +from .asynchronousfilereader import AsynchronousFileReader +from .looseversion import LooseVersion + +# The path to this fdroidserver distribution +FDROID_PATH = os.path.realpath(os.path.join(os.path.dirname(__file__), '..')) + +# There needs to be a default, and this is the most common for software. +DEFAULT_LOCALE = 'en-US' + +# this is the build-tools version, aapt has a separate version that +# has to be manually set in test_aapt_version() +MINIMUM_AAPT_BUILD_TOOLS_VERSION = '26.0.0' +# 33.0.x has a bug that verifies APKs it shouldn't https://gitlab.com/fdroid/fdroidserver/-/issues/1253 +# 31.0.0 is the first version to support --v4-signing-enabled. +# we only require 30.0.0 for now as that's the version in buster-backports, see also signindex.py +# 26.0.2 is the first version recognizing md5 based signatures as valid again +# (as does android, so we want that) +MINIMUM_APKSIGNER_BUILD_TOOLS_VERSION = '30.0.0' + +VERCODE_OPERATION_RE = re.compile(r'^([ 0-9/*+-]|%c)+$') + +# A signature block file with a .DSA, .RSA, or .EC extension +SIGNATURE_BLOCK_FILE_REGEX = re.compile(r'\AMETA-INF/.*\.(DSA|EC|RSA)\Z', re.DOTALL) +APK_NAME_REGEX = re.compile(r'^([a-zA-Z][\w.]*)_(-?[0-9]+)_?([0-9a-f]{7})?\.apk') +APK_ID_TRIPLET_REGEX = re.compile(r"^package: name='(\w[^']*)' versionCode='([^']+)' versionName='([^']*)'") +STANDARD_FILE_NAME_REGEX = re.compile(r'^(\w[\w.]*)_(-?[0-9]+)\.\w+') +FDROID_PACKAGE_NAME_REGEX = re.compile(r'''^[a-f0-9]+$''', re.IGNORECASE) +STRICT_APPLICATION_ID_REGEX = re.compile(r'''(?:^[a-zA-Z]+(?:\d*[a-zA-Z_]*)*)(?:\.[a-zA-Z]+(?:\d*[a-zA-Z_]*)*)+$''') +VALID_APPLICATION_ID_REGEX = re.compile(r'''(?:^[a-z_]+(?:\d*[a-zA-Z_]*)*)(?:\.[a-z_]+(?:\d*[a-zA-Z_]*)*)*$''', + re.IGNORECASE) +ANDROID_PLUGIN_REGEX = re.compile(r'''\s*(:?apply plugin:|id)\(?\s*['"](android|com\.android\.application)['"]\s*\)?''') + +MAX_VERSION_CODE = 0x7fffffff # Java's Integer.MAX_VALUE (2147483647) + +XMLNS_ANDROID = '{http://schemas.android.com/apk/res/android}' + +# https://docs.gitlab.com/ee/user/gitlab_com/#gitlab-pages +GITLAB_COM_PAGES_MAX_SIZE = 1000000000 + +# the names used for things that are configured per-repo +ANTIFEATURES_CONFIG_NAME = 'antiFeatures' +CATEGORIES_CONFIG_NAME = 'categories' +CONFIG_CONFIG_NAME = 'config' +MIRRORS_CONFIG_NAME = 'mirrors' +RELEASECHANNELS_CONFIG_NAME = "releaseChannels" +CONFIG_NAMES = ( + ANTIFEATURES_CONFIG_NAME, + CATEGORIES_CONFIG_NAME, + CONFIG_CONFIG_NAME, + MIRRORS_CONFIG_NAME, + RELEASECHANNELS_CONFIG_NAME, +) + +CONFIG_FILE = 'config.yml' config = None options = None +env = None +orig_path = None -def read_config(opts, config_file='config.py'): - """Read the repository config - The config is read from config_file, which is in the current directory when - any of the repo management commands are used. +def get_default_cachedir(): + """Get a cachedir, using platformdirs for cross-platform, but works without. + + Once platformdirs is installed everywhere, this function can be + removed. + """ - global config, options + appname = __name__.split('.')[0] + try: + import platformdirs + + return platformdirs.user_cache_dir(appname, 'F-Droid') + except ImportError: + return str(Path.home() / '.cache' / appname) + + +# All paths in the config must be strings, never pathlib.Path instances +default_config = { + 'sdk_path': "$ANDROID_HOME", + 'ndk_paths': {}, + 'cachedir': get_default_cachedir(), + 'java_paths': None, + 'scan_binary': False, + 'ant': "ant", + 'mvn3': "mvn", + 'gradle': shutil.which('gradlew-fdroid'), + 'sync_from_local_copy_dir': False, + 'allow_disabled_algorithms': False, + 'keep_when_not_allowed': False, + 'per_app_repos': False, + 'make_current_version_link': False, + 'current_version_name_source': 'Name', + 'deploy_process_logs': False, + 'repo_maxage': 0, + 'build_server_always': False, + 'keystore': 'keystore.p12', + 'smartcardoptions': [], + 'char_limits': { + 'author': 256, + 'name': 50, + 'summary': 80, + 'description': 4000, + 'video': 256, + 'whatsNew': 500, + }, + 'keyaliases': {}, + 'repo_url': "https://MyFirstFDroidRepo.org/fdroid/repo", + 'repo_name': "My First F-Droid Repo Demo", + 'repo_icon': "icon.png", + 'repo_description': _("""This is a repository of apps to be used with F-Droid. Applications in this repository are either official binaries built by the original application developers, or are binaries built from source by the admin of f-droid.org using the tools on https://gitlab.com/fdroid."""), # type: ignore + 'archive_name': 'My First F-Droid Archive Demo', + 'archive_description': _('These are the apps that have been archived from the main repo.'), # type: ignore + 'archive_older': 0, + 'git_mirror_size_limit': 10000000000, + 'scanner_signature_sources': ['suss'], +} + + +def get_options(): + """Return options as set up by parse_args(). + + This provides an easy way to get the global instance without + having to think about very confusing import and submodule + visibility. The code should be probably refactored so it does not + need this. If each individual option value was always passed to + functions as args, for example. + + https://docs.python.org/3/reference/import.html#submodules + + """ + return fdroidserver.common.options + + +def parse_args(parser): + """Call parser.parse_args(), store result in module-level variable and return it. + + This is needed to set up the copy of the options instance in the + fdroidserver.common module. A subcommand only needs to call this + if it uses functions from fdroidserver.common that expect the + "options" variable to be initialized. + + """ + fdroidserver.common.options = parser.parse_args() + return fdroidserver.common.options + + +def setup_global_opts(parser): + try: # the buildserver VM might not have PIL installed + from PIL import PngImagePlugin + + logger = logging.getLogger(PngImagePlugin.__name__) + logger.setLevel(logging.INFO) # tame the "STREAM" debug messages + except ImportError: + pass + + parser.add_argument( + "-v", + "--verbose", + action="store_true", + default=False, + help=_("Spew out even more information than normal"), + ) + parser.add_argument( + "-q", + "--quiet", + action="store_true", + default=False, + help=_("Restrict output to warnings and errors"), + ) + parser.add_argument( + "--color", + action=BooleanOptionalAction, + default=None, + help=_("Color the log output"), + ) + + +class ColorFormatter(logging.Formatter): + + def __init__(self, msg): + logging.Formatter.__init__(self, msg) + + bright_black = "\x1b[90;20m" + yellow = "\x1b[33;20m" + red = "\x1b[31;20m" + bold_red = "\x1b[31;1m" + reset = "\x1b[0m" + + self.FORMATS = { + logging.DEBUG: bright_black + msg + reset, + logging.INFO: reset + msg + reset, # use default color + logging.WARNING: yellow + msg + reset, + logging.ERROR: red + msg + reset, + logging.CRITICAL: bold_red + msg + reset + } + + def format(self, record): + log_fmt = self.FORMATS.get(record.levelno) + formatter = logging.Formatter(log_fmt) + return formatter.format(record) + + +def set_console_logging(verbose=False, color=False): + """Globally set logging to output nicely to the console.""" + + class _StdOutFilter(logging.Filter): + def filter(self, record): + return record.levelno < logging.ERROR + + if verbose: + level = logging.DEBUG + else: + level = logging.ERROR + + if color or (color is None and sys.stdout.isatty()): + formatter = ColorFormatter + else: + formatter = logging.Formatter + + stdout_handler = logging.StreamHandler(sys.stdout) + stdout_handler.addFilter(_StdOutFilter()) + stdout_handler.setFormatter(formatter('%(message)s')) + + stderr_handler = logging.StreamHandler(sys.stderr) + stderr_handler.setLevel(logging.ERROR) + stderr_handler.setFormatter(formatter(_('ERROR: %(message)s'))) + + logging.basicConfig( + force=True, level=level, handlers=[stdout_handler, stderr_handler] + ) + + +def _add_java_paths_to_config(pathlist, thisconfig): + def path_version_key(s): + versionlist = [] + for u in re.split('[^0-9]+', s): + try: + versionlist.append(int(u)) + except ValueError: + pass + return versionlist + + for d in sorted(pathlist, key=path_version_key): + if os.path.islink(d): + continue + j = os.path.basename(d) + # the last one found will be the canonical one, so order appropriately + for regex in [ + r'^1\.([126-9][0-9]?)\.0\.jdk$', # OSX + r'^jdk1\.([126-9][0-9]?)\.0_[0-9]+.jdk$', # OSX and Oracle tarball + r'^jdk1\.([126-9][0-9]?)\.0_[0-9]+$', # Oracle Windows + r'^jdk([126-9][0-9]?)-openjdk$', # Arch + r'^java-([126-9][0-9]?)-openjdk$', # Arch + r'^java-([126-9][0-9]?)-jdk$', # Arch (oracle) + r'^java-1\.([126-9][0-9]?)\.0-.*$', # RedHat + r'^java-([126-9][0-9]?)-oracle$', # Debian WebUpd8 + r'^jdk-([126-9][0-9]?)-oracle-.*$', # Debian make-jpkg + r'^java-([126-9][0-9]?)-openjdk-.*$', # Debian + r'^oracle-jdk-bin-1\.([126-9][0-9]?).*$', # Gentoo (oracle) + r'^icedtea-bin-([126-9][0-9]?).*$', # Gentoo (openjdk) + ]: + m = re.match(regex, j) + if not m: + continue + for p in [d, os.path.join(d, 'Contents', 'Home')]: + if os.path.exists(os.path.join(p, 'bin', 'javac')): + thisconfig['java_paths'][m.group(1)] = p + + +def fill_config_defaults(thisconfig): + """Fill in the global config dict with relevant defaults. + + For config values that have a path that can be expanded, e.g. an + env var or a ~/, this will store the original value using "_orig" + appended to the key name so that if the config gets written out, + it will preserve the original, unexpanded string. + + """ + for k, v in default_config.items(): + if k not in thisconfig: + if isinstance(v, dict) or isinstance(v, list): + thisconfig[k] = v.copy() + else: + thisconfig[k] = v + + # Expand paths (~users and $vars) + def expand_path(path): + if not path or not isinstance(path, str): + return None + orig = path + path = os.path.expanduser(path) + path = os.path.expandvars(path) + if orig == path: + return None + return path + + for k in ['sdk_path', 'ant', 'mvn3', 'gradle', 'keystore']: + v = thisconfig[k] + exp = expand_path(v) + if exp is not None: + thisconfig[k] = exp + thisconfig[k + '_orig'] = v + + # find all installed JDKs for keytool, jarsigner, and JAVA[6-9]_HOME env vars + if thisconfig['java_paths'] is None: + thisconfig['java_paths'] = dict() + pathlist = [] + pathlist += glob.glob('/usr/lib/jvm/j*[126-9]*') + pathlist += glob.glob('/usr/java/jdk1.[126-9]*') + pathlist += glob.glob('/System/Library/Java/JavaVirtualMachines/1.[126-9][0-9]?.0.jdk') + pathlist += glob.glob('/Library/Java/JavaVirtualMachines/*jdk*[0-9]*') + pathlist += glob.glob('/opt/oracle-jdk-*1.[0-9]*') + pathlist += glob.glob('/opt/icedtea-*[0-9]*') + if os.getenv('JAVA_HOME') is not None: + pathlist.append(os.getenv('JAVA_HOME')) + if os.getenv('PROGRAMFILES') is not None: + pathlist += glob.glob(os.path.join(os.getenv('PROGRAMFILES'), 'Java', 'jdk1.[126-9][0-9]?.*')) + _add_java_paths_to_config(pathlist, thisconfig) + + for java_version in range(29, 6, -1): + java_version = str(java_version) + if java_version not in thisconfig['java_paths']: + continue + java_home = thisconfig['java_paths'][java_version] + jarsigner = os.path.join(java_home, 'bin', 'jarsigner') + if os.path.exists(jarsigner): + thisconfig['jarsigner'] = jarsigner + thisconfig['keytool'] = os.path.join(java_home, 'bin', 'keytool') + break + + if 'jarsigner' not in thisconfig and shutil.which('jarsigner'): + thisconfig['jarsigner'] = shutil.which('jarsigner') + if 'keytool' not in thisconfig and shutil.which('keytool'): + thisconfig['keytool'] = shutil.which('keytool') + + # enable apksigner by default so v2/v3 APK signatures validate + find_apksigner(thisconfig) + if not thisconfig.get('apksigner'): + logging.warning(_('apksigner not found! Cannot sign or verify modern APKs')) + + if 'ipfs_cid' not in thisconfig and shutil.which('ipfs_cid'): + thisconfig['ipfs_cid'] = shutil.which('ipfs_cid') + cmd = sys.argv[1] if len(sys.argv) >= 2 else '' + if cmd == 'update' and not thisconfig.get('ipfs_cid'): + logging.debug(_("ipfs_cid not found, skipping CIDv1 generation")) + + for k in ['ndk_paths', 'java_paths']: + d = thisconfig[k] + for k2 in d.copy(): + v = d[k2] + exp = expand_path(v) + if exp is not None: + thisconfig[k][k2] = exp + thisconfig[k][k2 + '_orig'] = v + + ndk_paths = thisconfig.get('ndk_paths', {}) + + ndk_bundle = os.path.join(thisconfig['sdk_path'], 'ndk-bundle') + if os.path.exists(ndk_bundle): + version = get_ndk_version(ndk_bundle) + if version not in ndk_paths: + ndk_paths[version] = ndk_bundle + + ndk_dir = os.path.join(thisconfig['sdk_path'], 'ndk') + if os.path.exists(ndk_dir): + for ndk in glob.glob(os.path.join(ndk_dir, '*')): + version = get_ndk_version(ndk) + if version not in ndk_paths: + ndk_paths[version] = ndk + + if 'cachedir_scanner' not in thisconfig: + thisconfig['cachedir_scanner'] = str(Path(thisconfig['cachedir']) / 'scanner') + if 'gradle_version_dir' not in thisconfig: + thisconfig['gradle_version_dir'] = str(Path(thisconfig['cachedir']) / 'gradle') + + +def get_config(): + """Get the initalized, singleton config instance. + + config and options are intertwined in read_config(), so they have + to be here too. In the current ugly state of things, there are + multiple potential instances of config and options in use: + + * global + * module-level in the subcommand module (e.g. fdroidserver/build.py) + * module-level in fdroidserver.common + + There are some insane parts of the code that are probably + referring to multiple instances of these at different points. + This can be super confusing and maddening. + + The current intermediate refactoring step is to move all + subcommands to always get/set config and options via this function + so that there is no longer a distinction between the global and + module-level instances. Then there can be only one module-level + instance in fdroidserver.common. + + """ + global config if config is not None: return config - if not os.path.isfile(config_file): - print "Missing config file - is this a repo directory?" - sys.exit(2) - options = opts - if not hasattr(options, 'verbose'): - options.verbose = False + read_config() - defconfig = { - 'build_server_always': False, - 'mvn3': "mvn3", - 'archive_older': 0, - 'gradle': 'gradle', - 'update_stats': False, - 'archive_older': 0, - 'max_icon_size': 72, - 'stats_to_carbon': False, - 'repo_maxage': 0, - 'char_limits': { - 'Summary' : 50, - 'Description' : 1500 - } - - } - config = {} - - if options.verbose: - print "Reading %s..." % config_file - execfile(config_file, config) - - if any(k in config for k in ["keystore", "keystorepass", "keypass"]): - st = os.stat(config_file) - if st.st_mode & stat.S_IRWXG or st.st_mode & stat.S_IRWXO: - print "WARNING: unsafe permissions on {0} (should be 0600)!".format(config_file) - - # Expand environment variables - for k, v in config.items(): - if type(v) != str: - continue - v = os.path.expanduser(v) - config[k] = os.path.expandvars(v) - - # Check that commands and binaries do exist - for key in ('mvn3', 'gradle'): - if key not in config: - continue - val = config[key] - executable = find_executable(val) - if not executable: - print "ERROR: No such command or binary for %s: %s" % (key, val) - sys.exit(3) - - # Check that directories exist - for key in ('sdk_path', 'ndk_path', 'build_tools'): - if key not in config: - continue - val = config[key] - if key == 'build_tools': - if 'sdk_path' not in config: - print "ERROR: sdk_path needs to be set for build_tools" - sys.exit(3) - val = os.path.join(config['sdk_path'], 'build-tools', val) - if not os.path.isdir(val): - print "ERROR: No such directory found for %s: %s" % (key, val) - sys.exit(3) - - for k, v in defconfig.items(): - if k not in config: - config[k] = v + # make sure these values are available in common.py even if they didn't + # declare global in a scope + common.config = config return config -# Given the arguments in the form of multiple appid:[vc] strings, this returns -# a dictionary with the set of vercodes specified for each package. -def read_pkg_args(args, allow_vercodes=False): +def get_cachedir(): + cachedir = config and config.get('cachedir') + if cachedir and os.path.exists(cachedir): + return Path(cachedir) + else: + return Path(tempfile.mkdtemp()) + + +def regsub_file(pattern, repl, path): + with open(path, 'rb') as f: + text = f.read() + text = re.sub(bytes(pattern, 'utf8'), bytes(repl, 'utf8'), text) + with open(path, 'wb') as f: + f.write(text) + + +def config_type_check(path, data): + if Path(path).name == 'mirrors.yml': + expected_type = list + else: + expected_type = dict + if expected_type == dict: + if not isinstance(data, dict): + msg = _('{path} is not "key: value" dict, but a {datatype}!') + raise TypeError(msg.format(path=path, datatype=type(data).__name__)) + elif not isinstance(data, expected_type): + msg = _('{path} is not {expected_type}, but a {datatype}!') + raise TypeError( + msg.format( + path=path, + expected_type=expected_type.__name__, + datatype=type(data).__name__, + ) + ) + + +class _Config(dict): + def __init__(self, default={}): + super(_Config, self).__init__(default) + self.loaded = {} + + def lazyget(self, key): + if key not in self.loaded: + value = super(_Config, self).__getitem__(key) + + if key == 'serverwebroot': + roots = parse_list_of_dicts(value) + rootlist = [] + for d in roots: + # since this is used with rsync, where trailing slashes have + # meaning, ensure there is always a trailing slash + rootstr = d.get('url') + if not rootstr: + logging.error('serverwebroot: has blank value!') + continue + if rootstr[-1] != '/': + rootstr += '/' + d['url'] = rootstr.replace('//', '/') + rootlist.append(d) + self.loaded[key] = rootlist + + elif key == 'servergitmirrors': + self.loaded[key] = parse_list_of_dicts(value) + + elif isinstance(value, dict) and 'env' in value and len(value) == 1: + var = value['env'] + if var in os.environ: + self.loaded[key] = os.getenv(var) + else: + logging.error( + _( + 'Environment variable {var} from {configname} is not set!' + ).format(var=value['env'], configname=key) + ) + self.loaded[key] = None + else: + self.loaded[key] = value + + return self.loaded[key] + + def __getitem__(self, key): + return self.lazyget(key) + + def get(self, key, default=None, /): + try: + return self.lazyget(key) + except KeyError: + return default + + +def read_config(): + """Read the repository config. + + The config is read from config.yml, which is in the current + directory when any of the repo management commands are used. If + there is a local metadata file in the git repo, then the config is + not required, just use defaults. + + config.yml is the preferred form because no code is executed when + reading it. config.py is deprecated and no longer supported. + + config.yml requires ASCII or UTF-8 encoding because this code does + not auto-detect the file's encoding. That is left up to the YAML + library. YAML allows ASCII, UTF-8, UTF-16, and UTF-32 encodings. + Since it is a good idea to manage config.yml (WITHOUT PASSWORDS!) + in git, it makes sense to use a globally standard encoding. + + """ + global config + + if config is not None: + return config + + config = {} + + if os.path.exists(CONFIG_FILE): + logging.debug(_("Reading '{config_file}'").format(config_file=CONFIG_FILE)) + with open(CONFIG_FILE, encoding='utf-8') as fp: + config = yaml.load(fp) + if not config: + config = {} + config_type_check(CONFIG_FILE, config) + + old_config_file = 'config.py' + if os.path.exists(old_config_file): + logging.warning( + _("""Ignoring deprecated {oldfile}, use {newfile}!""").format( + oldfile=old_config_file, newfile=CONFIG_FILE + ) + ) + + # smartcardoptions must be a list since its command line args for Popen + smartcardoptions = config.get('smartcardoptions') + if isinstance(smartcardoptions, str): + sco_items = re.sub(r'\s+', r' ', config['smartcardoptions']).split(' ') + config['smartcardoptions'] = [i.strip() for i in sco_items if i] + elif not smartcardoptions and 'keystore' in config and config['keystore'] == 'NONE': + # keystore='NONE' means use smartcard, these are required defaults + config['smartcardoptions'] = ['-storetype', 'PKCS11', '-providerName', + 'SunPKCS11-OpenSC', '-providerClass', + 'sun.security.pkcs11.SunPKCS11', + '-providerArg', 'opensc-fdroid.cfg'] + + fill_config_defaults(config) + + if 'servergitmirrors' in config: + limit = config['git_mirror_size_limit'] + config['git_mirror_size_limit'] = parse_human_readable_size(limit) + + if 'repo_url' in config: + if not config['repo_url'].endswith('/repo'): + raise FDroidException(_('repo_url needs to end with /repo')) + + if 'archive_url' in config: + if not config['archive_url'].endswith('/archive'): + raise FDroidException(_('archive_url needs to end with /archive')) + + confignames_to_delete = set() + for configname, dictvalue in config.items(): + if configname == 'java_paths': + new = dict() + for k, v in dictvalue.items(): + new[str(k)] = v + config[configname] = new + elif configname in ('ndk_paths', 'java_paths', 'char_limits', 'keyaliases'): + continue + elif isinstance(dictvalue, dict): + for k, v in dictvalue.items(): + if k != 'env': + confignames_to_delete.add(configname) + logging.error(_('Unknown entry {key} in {configname}') + .format(key=k, configname=configname)) + + for configname in confignames_to_delete: + del config[configname] + + if any(k in config and config.get(k) for k in ["keystorepass", "keypass"]): + st = os.stat(CONFIG_FILE) + if st.st_mode & stat.S_IRWXG or st.st_mode & stat.S_IRWXO: + logging.warning( + _("unsafe permissions on '{config_file}' (should be 0600)!").format( + config_file=CONFIG_FILE + ) + ) + + config = _Config(config) + return config + + +def expand_env_dict(s): + """Expand env var dict to a string value. + + {env: varName} syntax can be used to replace any string value in the + config with the value of an environment variable "varName". This + allows for secrets management when commiting the config file to a + public git repo. + + """ + if not s or type(s) not in (str, dict): + return + if isinstance(s, dict): + if 'env' not in s or len(s) > 1: + raise TypeError(_('Only accepts a single key "env"')) + var = s['env'] + s = os.getenv(var) + if not s: + logging.error( + _('Environment variable {{env: {var}}} is not set!').format(var=var) + ) + return + return os.path.expanduser(s) + + +def parse_list_of_dicts(l_of_d): + """Parse config data structure that is a list of dicts of strings. + + The value can be specified as a string, list of strings, or list of dictionary maps + where the values are strings. + + """ + if isinstance(l_of_d, str): + return [{"url": expand_env_dict(l_of_d)}] + if isinstance(l_of_d, dict): + return [{"url": expand_env_dict(l_of_d)}] + if all(isinstance(item, str) for item in l_of_d): + return [{'url': expand_env_dict(i)} for i in l_of_d] + if all(isinstance(item, dict) for item in l_of_d): + for item in l_of_d: + item['url'] = expand_env_dict(item['url']) + return l_of_d + raise TypeError(_('only accepts strings, lists, and tuples')) + + +def get_mirrors(url, filename=None): + """Get list of dict entries for mirrors, appending filename if provided.""" + # TODO use cached index if it exists + if isinstance(url, str): + url = urlsplit(url) + + if url.netloc == 'f-droid.org': + mirrors = FDROIDORG_MIRRORS + else: + mirrors = parse_list_of_dicts(url.geturl()) + + if filename: + return append_filename_to_mirrors(filename, mirrors) + else: + return mirrors + + +def append_filename_to_mirrors(filename, mirrors): + """Append the filename to all "url" entries in the mirrors dict.""" + appended = copy.deepcopy(mirrors) + for mirror in appended: + parsed = urlparse(mirror['url']) + mirror['url'] = urlunparse( + parsed._replace(path=os.path.join(parsed.path, filename)) + ) + return appended + + +def file_entry(filename, hash_value=None): + meta = {} + meta["name"] = "/" + Path(filename).as_posix().split("/", 1)[1] + meta["sha256"] = hash_value or sha256sum(filename) + meta["size"] = os.stat(filename).st_size + return meta + + +def load_localized_config(name, repodir): + """Load localized config files and put them into internal dict format. + + This will maintain the order as came from the data files, e.g + YAML. The locale comes from unsorted paths on the filesystem, so + that is separately sorted. + + """ + ret = dict() + found_config_file = False + for f in Path().glob("config/**/{name}.yml".format(name=name)): + found_config_file = True + locale = f.parts[1] + if len(f.parts) == 2: + locale = DEFAULT_LOCALE + with open(f, encoding="utf-8") as fp: + elem = yaml.load(fp) + if not isinstance(elem, dict): + msg = _('{path} is not "key: value" dict, but a {datatype}!') + raise TypeError(msg.format(path=f, datatype=type(elem).__name__)) + for afname, field_dict in elem.items(): + if afname not in ret: + ret[afname] = dict() + for key, value in field_dict.items(): + if key not in ret[afname]: + ret[afname][key] = dict() + if key == "icon": + icons_dir = os.path.join(repodir, 'icons') + if not os.path.exists(icons_dir): + os.makedirs(icons_dir, exist_ok=True) + src = os.path.join("config", value) + dest = os.path.join(icons_dir, os.path.basename(src)) + if not os.path.exists(dest) or not filecmp.cmp(src, dest): + shutil.copy2(src, dest) + ret[afname][key][locale] = file_entry( + os.path.join(icons_dir, value) + ) + else: + ret[afname][key][locale] = value + + if not found_config_file: + for f in Path().glob("config/*.yml"): + if f.stem not in CONFIG_NAMES: + msg = _('{path} is not a standard config file!').format(path=f) + m = difflib.get_close_matches(f.stem, CONFIG_NAMES, 1) + if m: + msg += ' ' + msg += _('Did you mean config/{name}.yml?').format(name=m[0]) + logging.error(msg) + + for elem in ret.values(): + for afname in elem: + elem[afname] = {locale: v for locale, v in sorted(elem[afname].items())} + return ret + + +def parse_human_readable_size(size): + units = { + 'b': 1, + 'kb': 1000, 'mb': 1000**2, 'gb': 1000**3, 'tb': 1000**4, + 'kib': 1024, 'mib': 1024**2, 'gib': 1024**3, 'tib': 1024**4, + } + try: + return int(float(size)) + except (ValueError, TypeError) as exc: + if type(size) != str: + raise ValueError(_('Could not parse size "{size}", wrong type "{type}"') + .format(size=size, type=type(size))) from exc + s = size.lower().replace(' ', '') + m = re.match(r'^(?P[0-9][0-9.]*) *(?P' + r'|'.join(units.keys()) + r')$', s) + if not m: + raise ValueError(_('Not a valid size definition: "{}"').format(size)) from exc + return int(float(m.group("value")) * units[m.group("unit")]) + + +def get_dir_size(path_or_str): + """Get the total size of all files in the given directory.""" + if isinstance(path_or_str, str): + path_or_str = Path(path_or_str) + return sum(f.stat().st_size for f in path_or_str.glob('**/*') if f.is_file()) + + +def assert_config_keystore(config): + """Check weather keystore is configured correctly and raise exception if not.""" + nosigningkey = False + if 'repo_keyalias' not in config: + nosigningkey = True + logging.critical(_("'repo_keyalias' not found in config.yml!")) + if 'keystore' not in config: + nosigningkey = True + logging.critical(_("'keystore' not found in config.yml!")) + elif config['keystore'] == 'NONE': + if not config.get('smartcardoptions'): + nosigningkey = True + logging.critical(_("'keystore' is NONE and 'smartcardoptions' is blank!")) + elif not os.path.exists(config['keystore']): + nosigningkey = True + logging.critical("'" + config['keystore'] + "' does not exist!") + if 'keystorepass' not in config: + nosigningkey = True + logging.critical(_("'keystorepass' not found in config.yml!")) + if 'keypass' not in config and config.get('keystore') != 'NONE': + nosigningkey = True + logging.critical(_("'keypass' not found in config.yml!")) + if nosigningkey: + raise FDroidException("This command requires a signing key, " + + "you can create one using: fdroid update --create-key") + + +def find_apksigner(config): + """Search for the best version apksigner and adds it to the config. + + Returns the best version of apksigner following this algorithm: + + * use config['apksigner'] if set + * try to find apksigner in path + * find apksigner in build-tools starting from newest installed + going down to MINIMUM_APKSIGNER_BUILD_TOOLS_VERSION + + Returns + ------- + str + path to apksigner or None if no version is found + + """ + command = 'apksigner' + if command in config: + return + + tmp = find_command(command) + if tmp is not None: + config[command] = tmp + return + + build_tools_path = os.path.join(config.get('sdk_path', ''), 'build-tools') + if not os.path.isdir(build_tools_path): + return + for f in sorted(os.listdir(build_tools_path), reverse=True): + if not os.path.isdir(os.path.join(build_tools_path, f)): + continue + try: + version = LooseVersion(f) + if version >= LooseVersion('33') and version < LooseVersion('34'): + logging.warning( + _('apksigner in build-tools;{version} passes APKs with invalid v3 signatures, ignoring.').format( + version=version + ) + ) + continue + if version < LooseVersion(MINIMUM_APKSIGNER_BUILD_TOOLS_VERSION): + logging.debug("Local Android SDK only has outdated apksigner versions") + return + except TypeError: + continue + if os.path.exists(os.path.join(build_tools_path, f, 'apksigner')): + apksigner = os.path.join(build_tools_path, f, 'apksigner') + logging.info("Using %s " % apksigner) + config['apksigner'] = apksigner + return + + +def find_sdk_tools_cmd(cmd): + """Find a working path to a tool from the Android SDK.""" + tooldirs = [] + if config is not None and 'sdk_path' in config and os.path.exists(config['sdk_path']): + # try to find a working path to this command, in all the recent possible paths + build_tools = os.path.join(config['sdk_path'], 'build-tools') + if os.path.isdir(build_tools): + for f in sorted(os.listdir(build_tools), reverse=True): + if os.path.isdir(os.path.join(build_tools, f)): + tooldirs.append(os.path.join(build_tools, f)) + sdk_tools = os.path.join(config['sdk_path'], 'tools') + if os.path.exists(sdk_tools): + tooldirs.append(sdk_tools) + tooldirs.append(os.path.join(sdk_tools, 'bin')) + sdk_platform_tools = os.path.join(config['sdk_path'], 'platform-tools') + if os.path.exists(sdk_platform_tools): + tooldirs.append(sdk_platform_tools) + sdk_build_tools = glob.glob(os.path.join(config['sdk_path'], 'build-tools', '*.*')) + if sdk_build_tools: + tooldirs.append(sorted(sdk_build_tools)[-1]) # use most recent version + if os.path.exists('/usr/bin'): + tooldirs.append('/usr/bin') + for d in tooldirs: + path = os.path.join(d, cmd) + if not os.path.isfile(path): + path += '.exe' + if os.path.isfile(path): + if cmd == 'aapt': + test_aapt_version(path) + return path + # did not find the command, exit with error message + test_sdk_exists(config) # ignore result so None is never returned + raise FDroidException(_("Android SDK tool {cmd} not found!").format(cmd=cmd)) + + +def test_aapt_version(aapt): + """Check whether the version of aapt is new enough.""" + output = subprocess.check_output([aapt, 'version'], universal_newlines=True) + if output is None or output == '': + logging.error(_("'{path}' failed to execute!").format(path=aapt)) + else: + m = re.match(r'.*v([0-9]+)\.([0-9]+)[.-]?([0-9.-]*)', output) + if m: + major = m.group(1) + minor = m.group(2) + bugfix = m.group(3) + # the Debian package has the version string like "v0.2-23.0.2" + too_old = False + if '.' in bugfix: + if LooseVersion(bugfix) < LooseVersion(MINIMUM_AAPT_BUILD_TOOLS_VERSION): + too_old = True + elif LooseVersion('.'.join((major, minor, bugfix))) < LooseVersion('0.2.4062713'): + too_old = True + if too_old: + logging.warning(_("'{aapt}' is too old, fdroid requires build-tools-{version} or newer!") + .format(aapt=aapt, version=MINIMUM_AAPT_BUILD_TOOLS_VERSION)) + else: + logging.warning(_('Unknown version of aapt, might cause problems: ') + output) + + +def test_sdk_exists(thisconfig): + if 'sdk_path' not in thisconfig: + # check the 'apksigner' value in the config to see if its new enough + f = thisconfig.get('apksigner', '') + if os.path.isfile(f): + sdk_path = os.path.dirname(os.path.dirname(os.path.dirname(f))) + tmpconfig = {'sdk_path': sdk_path} + find_apksigner(tmpconfig) + if os.path.exists(tmpconfig.get('apksigner', '')): + return True + logging.error(_("'sdk_path' not set in config.yml!")) + return False + if thisconfig['sdk_path'] == default_config['sdk_path']: + logging.error(_('No Android SDK found!')) + logging.error(_('You can use ANDROID_HOME to set the path to your SDK, i.e.:')) + logging.error('\texport ANDROID_HOME=/opt/android-sdk') + return False + if not os.path.exists(thisconfig['sdk_path']): + logging.critical(_("Android SDK path '{path}' does not exist!") + .format(path=thisconfig['sdk_path'])) + return False + if not os.path.isdir(thisconfig['sdk_path']): + logging.critical(_("Android SDK path '{path}' is not a directory!") + .format(path=thisconfig['sdk_path'])) + return False + find_apksigner(thisconfig) + if not os.path.exists(thisconfig.get('apksigner', '')): + return False + return True + + +def get_local_metadata_files(): + """Get any metadata files local to an app's source repo. + + This tries to ignore anything that does not count as app metdata, + including emacs cruft ending in ~ + + """ + return glob.glob('.fdroid.[a-jl-z]*[a-rt-z]') + + +def read_pkg_args(appid_versionCode_pairs, allow_version_codes=False): + """No summary. + + Parameters + ---------- + appids + arguments in the form of multiple appid:[versionCode] strings + + Returns + ------- + a dictionary with the set of vercodes specified for each package + """ vercodes = {} - if not args: + if not appid_versionCode_pairs: return vercodes - for p in args: - if allow_vercodes and ':' in p: + error = False + apk_regex = re.compile(r'_(\d+)\.apk$') + for p in appid_versionCode_pairs: + # Convert the apk name to a appid:versioncode pair + p = apk_regex.sub(r':\1', p) + if allow_version_codes and ':' in p: package, vercode = p.split(':') + try: + vercode = version_code_string_to_int(vercode) + except ValueError as e: + logging.error('"%s": %s' % (p, str(e))) + error = True else: package, vercode = p, None if package not in vercodes: @@ -134,86 +1082,314 @@ def read_pkg_args(args, allow_vercodes=False): elif vercode and vercode not in vercodes[package]: vercodes[package] += [vercode] if vercode else [] + if error: + raise FDroidException(_("Found invalid versionCodes for some apps")) + return vercodes -# On top of what read_pkg_args does, this returns the whole app metadata, but -# limiting the builds list to the builds matching the vercodes specified. -def read_app_args(args, allapps, allow_vercodes=False): - vercodes = read_pkg_args(args, allow_vercodes) +def get_metadata_files(vercodes): + """ + Build a list of metadata files and raise an exception for invalid appids. + + Parameters + ---------- + vercodes + versionCodes as returned by read_pkg_args() + + Returns + ------- + List + a list of corresponding metadata/*.yml files + """ + found_invalid = False + metadatafiles = [] + for appid in vercodes.keys(): + f = Path('metadata') / ('%s.yml' % appid) + if f.exists(): + metadatafiles.append(f) + else: + found_invalid = True + logging.critical(_("No such package: %s") % appid) + if found_invalid: + raise FDroidException(_("Found invalid appids in arguments")) + return metadatafiles + + +def read_app_args(appid_versionCode_pairs, allow_version_codes=False, sort_by_time=False): + """Build a list of App instances for processing. + + On top of what read_pkg_args does, this returns the whole app + metadata, but limiting the builds list to the builds matching the + appid_versionCode_pairs and vercodes specified. If no + appid_versionCode_pairs are specified, then all App and Build instances are + returned. + + """ + vercodes = read_pkg_args(appid_versionCode_pairs, allow_version_codes) + allapps = fdroidserver.metadata.read_metadata(vercodes, sort_by_time) if not vercodes: return allapps - apps = [app for app in allapps if app['id'] in vercodes] + apps = {} + for appid, app in allapps.items(): + if appid in vercodes: + apps[appid] = app if not apps: - raise Exception("No packages specified") - if len(apps) != len(vercodes): - allids = [app["id"] for app in allapps] - for p in vercodes: - if p not in allids: - print "No such package: %s" % p - raise Exception("Found invalid app ids in arguments") + raise FDroidException(_("No packages specified")) error = False - for app in apps: - vc = vercodes[app['id']] + for appid, app in apps.items(): + vc = vercodes[appid] if not vc: continue - app['builds'] = [b for b in app['builds'] if b['vercode'] in vc] - if len(app['builds']) != len(vercodes[app['id']]): + app['Builds'] = [b for b in app.get('Builds', []) if b.versionCode in vc] + if len(app.get('Builds', [])) != len(vercodes[appid]): error = True - allvcs = [b['vercode'] for b in app['builds']] - for v in vercodes[app['id']]: + allvcs = [b.versionCode for b in app.get('Builds', [])] + for v in vercodes[appid]: if v not in allvcs: - print "No such vercode %s for app %s" % (v, app['id']) + logging.critical(_("No such versionCode {versionCode} for app {appid}") + .format(versionCode=v, appid=appid)) if error: - raise Exception("Found invalid vercodes for some apps") + raise FDroidException(_("Found invalid versionCodes for some apps")) return apps -def has_extension(filename, extension): - name, ext = os.path.splitext(filename) - ext = ext.lower()[1:] - return ext == extension -apk_regex = None +def get_extension(filename): + """Get name and extension of filename, with extension always lower case.""" + base, ext = os.path.splitext(filename) + if not ext: + return base, '' + return base, ext.lower()[1:] -def apknameinfo(filename): - global apk_regex + +publish_name_regex = re.compile(r"^(.+)_([0-9]+)\.(apk|zip)$") + + +def publishednameinfo(filename): filename = os.path.basename(filename) - if apk_regex is None: - apk_regex = re.compile(r"^(.+)_([0-9]+)\.apk$") - m = apk_regex.match(filename) + m = publish_name_regex.match(filename) try: - result = (m.group(1), m.group(2)) - except AttributeError: - raise Exception("Invalid apk name: %s" % filename) + result = (m.group(1), int(m.group(2))) + except AttributeError as exc: + raise FDroidException(_("Invalid name for published file: %s") % filename) from exc return result -def getapkname(app, build): - return "%s_%s.apk" % (app['id'], build['vercode']) -def getsrcname(app, build): - return "%s_%s_src.tar.gz" % (app['id'], build['vercode']) +apk_release_filename = re.compile(r'(?P[a-zA-Z0-9_\.]+)_(?P[0-9]+)\.apk') +apk_release_filename_with_sigfp = re.compile(r'(?P[a-zA-Z0-9_\.]+)_(?P[0-9]+)_(?P[0-9a-f]{7})\.apk') -def getappname(app): - if app['Name']: - return '%s (%s)' % (app['Name'], app['id']) - if app['Auto Name']: - return '%s (%s)' % (app['Auto Name'], app['id']) - return app['id'] -def getcvname(app): - return '%s (%s)' % (app['Current Version'], app['Current Version Code']) +def apk_parse_release_filename(apkname): + """Parse the name of an APK file according the F-Droids APK naming scheme. + + WARNING: Returned values don't necessarily represent the APKs actual + properties, the are just paresed from the file name. + + Returns + ------- + Tuple + A triplet containing (appid, versionCode, signer), where appid + should be the package name, versionCode should be the integer + represion of the APKs version and signer should be the first 7 hex + digists of the sha256 signing key fingerprint which was used to sign + this APK. + """ + m = apk_release_filename_with_sigfp.match(apkname) + if m: + return m.group('appid'), int(m.group('vercode')), m.group('sigfp') + m = apk_release_filename.match(apkname) + if m: + return m.group('appid'), int(m.group('vercode')), None + return None, None, None + + +def get_output_extension(build): + if build.output: + return get_file_extension(replace_build_vars(build.output, build)) + return 'apk' + + +def get_release_apk_filename(appid, versionCode): + return f"{appid}_{versionCode}.apk" + + +def get_release_filename(app, build, extension=None): + if extension: + return "%s_%s.%s" % (app.id, build.versionCode, extension) + if build.output and get_file_extension(build.output): + return "%s_%s.%s" % (app.id, build.versionCode, get_file_extension(build.output)) + else: + return get_release_apk_filename(app.id, build.versionCode) + + +def get_toolsversion_logname(app, build): + return "%s_%s_toolsversion.log" % (app.id, build.versionCode) + + +def get_src_tarball_name(appid, versionCode): + return f"{appid}_{versionCode}_src.tar.gz" + + +def get_source_date_epoch(build_dir): + """Return timestamp suitable for the SOURCE_DATE_EPOCH variable. + + https://reproducible-builds.org/docs/source-date-epoch/ + + """ + try: + return git.repo.Repo(build_dir).git.log(n=1, pretty='%ct') + except Exception as e: + logging.warning('%s: %s', e.__class__.__name__, build_dir) + build_dir = Path(build_dir) + appid = build_dir.name + data_dir = build_dir.parent.parent + metadata_file = f'metadata/{appid}.yml' + if (data_dir / '.git').exists() and (data_dir / metadata_file).exists(): + repo = git.repo.Repo(data_dir) + return repo.git.log('-n1', '--pretty=%ct', '--', metadata_file) + + +def get_build_dir(app): + """Get the dir that this app will be built in.""" + if app.RepoType == 'srclib': + return Path('build/srclib') / app.Repo + + return Path('build') / app.id + + +class Encoder(json.JSONEncoder): + def default(self, obj): + if isinstance(obj, set): + return sorted(obj) + return super().default(obj) + + +def epoch_millis_now(): + """Get the current time in epoch milliseconds. + + This is the format returned by Java's System.currentTimeMillis(). + + Parameters + ---------- + millis + Java-style integer time since UNIX epoch in milliseconds + """ + return int(datetime.now(timezone.utc).timestamp() * 1000) + + +def setup_status_output(start_timestamp): + """Create the common output dictionary for public status updates.""" + output = { + 'commandLine': sys.argv, + 'startTimestamp': int(time.mktime(start_timestamp) * 1000), + 'subcommand': sys.argv[0].split()[1], + } + if os.path.isdir('.git'): + git_repo = git.repo.Repo(os.getcwd()) + output['fdroiddata'] = { + 'commitId': get_head_commit_id(git_repo), + 'isDirty': git_repo.is_dirty(), + 'modifiedFiles': git_repo.git().ls_files(modified=True).split(), + 'untrackedFiles': git_repo.untracked_files, + } + fdroidserver_dir = os.path.dirname(sys.argv[0]) + if os.path.isdir(os.path.join(fdroidserver_dir, '.git')): + git_repo = git.repo.Repo(fdroidserver_dir) + output['fdroidserver'] = { + 'commitId': get_head_commit_id(git_repo), + 'isDirty': git_repo.is_dirty(), + 'modifiedFiles': git_repo.git().ls_files(modified=True).split(), + 'untrackedFiles': git_repo.untracked_files, + } + etc_issue_net = '/etc/issue.net' + if os.path.exists(etc_issue_net): + with open(etc_issue_net) as fp: + output[etc_issue_net] = fp.read(100).strip() + write_running_status_json(output) + return output + + +def write_running_status_json(output): + write_status_json(output, pretty=True, name='running') + + +def write_status_json(output, pretty=False, name=None): + """Write status out as JSON, and rsync it to the repo server.""" + status_dir = os.path.join('repo', 'status') + if not os.path.exists(status_dir): + os.makedirs(status_dir) + if not name: + output['endTimestamp'] = epoch_millis_now() + names = ['running', sys.argv[0].split()[1]] # fdroid subcommand + else: + names = [name] + + for fname in names: + path = os.path.join(status_dir, fname + '.json') + with open(path, "w", encoding="utf-8") as fp: + if pretty: + json.dump(output, fp, sort_keys=True, cls=Encoder, indent=2) + else: + json.dump(output, fp, sort_keys=True, cls=Encoder, separators=(',', ':')) + rsync_status_file_to_repo(path, repo_subdir='status') + + +def get_head_commit_id(git_repo_dir): + """Get git commit ID for HEAD as a str. + + This only reads files, so it should be safe to use on untrusted + repos. It was created to avoid running the git executable, no + matter what. It uses a tiny subset of the git.Repo class to avoid + setting up the git executable. + + """ + try: + if type(git_repo_dir) is git.Repo: + d = git_repo_dir.git_dir + else: + d = os.path.join(git_repo_dir, '.git') + repo = type( + 'Repo', + (object,), + {'common_dir': d, 'git_dir': d, 're_hexsha_only': git.Repo.re_hexsha_only}, + )() + return git.refs.symbolic.SymbolicReference.dereference_recursive(repo, 'HEAD') + except (FileNotFoundError, ValueError) as e: + msg = _("Cannot read {path}: {error}").format(path=os.getcwd(), error=str(e)) + logging.debug(msg) + + +def setup_vcs(app): + """Checkout code from VCS and return instance of vcs and the build dir.""" + build_dir = get_build_dir(app) + + # Set up vcs interface and make sure we have the latest code... + logging.debug("Getting {0} vcs interface for {1}" + .format(app.RepoType, app.Repo)) + if app.RepoType == 'git' and os.path.exists('.fdroid.yml'): + remote = os.getcwd() + else: + remote = app.Repo + vcs = getvcs(app.RepoType, remote, build_dir) + + return vcs, build_dir + def getvcs(vcstype, remote, local): + """Return a vcs instance based on the arguments. + + remote and local can be either a string or a pathlib.Path + + """ if vcstype == 'git': return vcs_git(remote, local) - if vcstype == 'svn': - return vcs_svn(remote, local) + logging.warning(_("RepoType {type} is deprecated, please switch to git.").format(type=vcstype)) if vcstype == 'git-svn': return vcs_gitsvn(remote, local) if vcstype == 'hg': @@ -221,165 +1397,323 @@ def getvcs(vcstype, remote, local): if vcstype == 'bzr': return vcs_bzr(remote, local) if vcstype == 'srclib': - if local != 'build/srclib/' + remote: + if str(local) != os.path.join('build', 'srclib', str(remote)): raise VCSException("Error: srclib paths are hard-coded!") - return getsrclib(remote, 'build/srclib', raw=True) + return getsrclib(remote, os.path.join('build', 'srclib'), raw=True) + if vcstype == 'svn': + raise VCSException("Deprecated vcs type 'svn' - please use 'git-svn' instead") raise VCSException("Invalid vcs type " + vcstype) + def getsrclibvcs(name): - srclib_path = os.path.join('srclibs', name + ".txt") - if not os.path.exists(srclib_path): + if name not in fdroidserver.metadata.srclibs: raise VCSException("Missing srclib " + name) - return metadata.parse_srclib(srclib_path)['Repo Type'] + return fdroidserver.metadata.srclibs[name]['RepoType'] + class vcs: + def __init__(self, remote, local): # svn, git-svn and bzr may require auth self.username = None - if self.repotype() in ('svn', 'git-svn', 'bzr'): + if self.repotype() in ('git-svn', 'bzr'): if '@' in remote: + if self.repotype == 'git-svn': + raise VCSException("Authentication is not supported for git-svn") self.username, remote = remote.split('@') if ':' not in self.username: - raise VCSException("Password required with username") + raise VCSException(_("Password required with username")) self.username, self.password = self.username.split(':') self.remote = remote self.local = local + self.clone_failed = False self.refreshed = False self.srclib = None - # Take the local repository to a clean version of the given revision, which - # is specificed in the VCS's native format. Beforehand, the repository can - # be dirty, or even non-existent. If the repository does already exist - # locally, it will be updated from the origin, but only once in the - # lifetime of the vcs object. - # None is acceptable for 'rev' if you know you are cloning a clean copy of - # the repo - otherwise it must specify a valid revision. - def gotorevision(self, rev): + def _gettags(self): + raise NotImplementedError + + def repotype(self): + return None + + def clientversion(self): + versionstr = FDroidPopen(self.clientversioncmd()).output + return versionstr[0:versionstr.find('\n')] + + def clientversioncmd(self): + return None + + def gotorevision(self, rev, refresh=True): + """Take the local repository to a clean version of the given revision. + + Take the local repository to a clean version of the given + revision, which is specificed in the VCS's native + format. Beforehand, the repository can be dirty, or even + non-existent. If the repository does already exist locally, it + will be updated from the origin, but only once in the lifetime + of the vcs object. None is acceptable for 'rev' if you know + you are cloning a clean copy of the repo - otherwise it must + specify a valid revision. + """ + if self.clone_failed: + raise VCSException(_("Downloading the repository already failed once, not trying again.")) # The .fdroidvcs-id file for a repo tells us what VCS type # and remote that directory was created from, allowing us to drop it # automatically if either of those things changes. fdpath = os.path.join(self.local, '..', - '.fdroidvcs-' + os.path.basename(self.local)) + '.fdroidvcs-' + os.path.basename(self.local)) + fdpath = os.path.normpath(fdpath) cdata = self.repotype() + ' ' + self.remote writeback = True deleterepo = False if os.path.exists(self.local): if os.path.exists(fdpath): with open(fdpath, 'r') as f: - fsdata = f.read() + fsdata = f.read().strip() if fsdata == cdata: writeback = False else: deleterepo = True - print "*** Repository details changed - deleting ***" + logging.info("Repository details for %s changed - deleting" % ( + self.local)) else: deleterepo = True - print "*** Repository details missing - deleting ***" + logging.info("Repository details for %s missing - deleting" % ( + self.local)) if deleterepo: shutil.rmtree(self.local) - self.gotorevisionx(rev) + exc = None + if not refresh: + self.refreshed = True + + try: + self.gotorevisionx(rev) + except FDroidException as e: + exc = e # If necessary, write the .fdroidvcs file. - if writeback: - with open(fdpath, 'w') as f: + if writeback and not self.clone_failed: + os.makedirs(os.path.dirname(fdpath), exist_ok=True) + with open(fdpath, 'w+') as f: f.write(cdata) - # Derived classes need to implement this. It's called once basic checking - # has been performend. - def gotorevisionx(self, rev): + if exc is not None: + raise exc + + def gotorevisionx(self, rev): # pylint: disable=unused-argument + """No summary. + + Derived classes need to implement this. + + It's called once basic checking has been performed. + """ raise VCSException("This VCS type doesn't define gotorevisionx") # Initialise and update submodules def initsubmodules(self): raise VCSException('Submodules not supported for this vcs type') + # Deinitialise and update submodules + def deinitsubmodules(self): + pass + # Get a list of all known tags def gettags(self): - raise VCSException('gettags not supported for this vcs type') + if not self._gettags: + raise VCSException('gettags not supported for this vcs type') + rtags = [] + for tag in self._gettags(): + if re.match('[-A-Za-z0-9_. /]+$', tag): + rtags.append(tag) + return rtags - # Get current commit reference (hash, revision, etc) - def getref(self): + def latesttags(self): + """Get a list of all the known tags, sorted from newest to oldest.""" + raise VCSException('latesttags not supported for this vcs type') + + def getref(self, revname=None): + """Get current commit reference (hash, revision, etc).""" raise VCSException('getref not supported for this vcs type') - # Returns the srclib (name, path) used in setting up the current - # revision, or None. def getsrclib(self): + """Return the srclib (name, path) used in setting up the current revision, or None.""" return self.srclib + class vcs_git(vcs): def repotype(self): return 'git' - # If the local directory exists, but is somehow not a git repository, git - # will traverse up the directory tree until it finds one that is (i.e. - # fdroidserver) and then we'll proceed to destroy it! This is called as - # a safety check. + def clientversioncmd(self): + return ['git', '--version'] + + def git(self, args, envs=dict(), cwd=None, output=True): + """Prevent git fetch/clone/submodule from hanging at the username/password prompt. + + While fetch/pull/clone respect the command line option flags, + it seems that submodule commands do not. They do seem to + follow whatever is in env vars, if the version of git is new + enough. So we just throw the kitchen sink at it to see what + sticks. + + Also, because of CVE-2017-1000117, block all SSH URLs. + """ + # + # supported in git >= 2.3 + git_config = [ + '-c', 'core.askpass=/bin/true', + '-c', 'core.sshCommand=/bin/false', + '-c', 'url.https://.insteadOf=ssh://', + ] + for domain in ('bitbucket.org', 'github.com', 'gitlab.com', 'codeberg.org'): + git_config.append('-c') + git_config.append('url.https://u:p@' + domain + '/.insteadOf=git@' + domain + ':') + git_config.append('-c') + git_config.append('url.https://u:p@' + domain + '.insteadOf=git://' + domain) + git_config.append('-c') + git_config.append('url.https://u:p@' + domain + '.insteadOf=https://' + domain) + envs.update({ + 'GIT_TERMINAL_PROMPT': '0', + 'GIT_ASKPASS': '/bin/true', + 'SSH_ASKPASS': '/bin/true', + 'GIT_SSH': '/bin/false', # for git < 2.3 + }) + return FDroidPopen(['git', ] + git_config + args, + envs=envs, cwd=cwd, output=output) + def checkrepo(self): - p = subprocess.Popen(['git', 'rev-parse', '--show-toplevel'], - stdout=subprocess.PIPE, cwd=self.local) - result = p.communicate()[0].rstrip() - if not result.endswith(self.local): - raise VCSException('Repository mismatch') + """No summary. + + If the local directory exists, but is somehow not a git repository, + git will traverse up the directory tree until it finds one + that is (i.e. fdroidserver) and then we'll proceed to destroy + it! This is called as a safety check. + + """ + cmd = ['git', 'rev-parse', '--show-toplevel'] + p = FDroidPopen(cmd, cwd=self.local, output=False) + result = p.output.rstrip() + if p.returncode > 0: + raise VCSException( + f"`{' '.join(cmd)}` failed, (in '{os.path.abspath(self.local)}') {result}" + ) + if Path(result) != Path(self.local).resolve(): + raise VCSException(f"Repository mismatch ('{self.local}' != '{result}')") def gotorevisionx(self, rev): if not os.path.exists(self.local): - # Brand new checkout... - if subprocess.call(['git', 'clone', self.remote, self.local]) != 0: - raise VCSException("Git clone failed") + # Brand new checkout + p = self.git(['clone', '--', self.remote, str(self.local)]) + if p.returncode != 0: + self.clone_failed = True + raise VCSException("Git clone failed", p.output) self.checkrepo() else: self.checkrepo() - # Discard any working tree changes... - if subprocess.call(['git', 'reset', '--hard'], cwd=self.local) != 0: - raise VCSException("Git reset failed") + # Discard any working tree changes + p = FDroidPopen(['git', 'submodule', 'foreach', '--recursive', + 'git', 'reset', '--hard'], cwd=self.local, output=False) + if p.returncode != 0: + logging.debug("Git submodule reset failed (ignored) {output}".format(output=p.output)) + p = FDroidPopen(['git', 'reset', '--hard'], cwd=self.local, output=False) + if p.returncode != 0: + raise VCSException(_("Git reset failed"), p.output) # Remove untracked files now, in case they're tracked in the target - # revision (it happens!)... - if subprocess.call(['git', 'clean', '-dffx'], cwd=self.local) != 0: - raise VCSException("Git clean failed") + # revision (it happens!) + p = FDroidPopen(['git', 'submodule', 'foreach', '--recursive', + 'git', 'clean', '-dffx'], cwd=self.local, output=False) + if p.returncode != 0: + logging.debug("Git submodule cleanup failed (ignored) {output}".format(output=p.output)) + p = FDroidPopen(['git', 'clean', '-dffx'], cwd=self.local, output=False) + if p.returncode != 0: + raise VCSException(_("Git clean failed"), p.output) if not self.refreshed: - # Get latest commits and tags from remote... - if subprocess.call(['git', 'fetch', 'origin'], - cwd=self.local) != 0: - raise VCSException("Git fetch failed") - if subprocess.call(['git', 'fetch', '--tags', 'origin'], - cwd=self.local) != 0: - raise VCSException("Git fetch failed") + # Get latest commits and tags from remote + p = self.git(['fetch', '--prune', '--prune-tags', '--force', 'origin'], cwd=self.local) + if p.returncode != 0: + raise VCSException(_("Git fetch failed"), p.output) + p = self.git(['fetch', '--prune', '--tags', '--force', 'origin'], output=False, cwd=self.local) + if p.returncode != 0: + raise VCSException(_("Git fetch failed"), p.output) + # Recreate origin/HEAD as git clone would do it, in case it disappeared + p = FDroidPopen(['git', 'remote', 'set-head', 'origin', '--auto'], cwd=self.local, output=False) + if p.returncode != 0: + lines = p.output.splitlines() + if 'Multiple remote HEAD branches' not in lines[0]: + logging.warning(_("Git remote set-head failed: \"%s\"") % p.output.strip()) + else: + branch = lines[1].split(' ')[-1] + p2 = FDroidPopen(['git', 'remote', 'set-head', 'origin', '--', branch], + cwd=self.local, output=False) + if p2.returncode != 0: + logging.warning(_("Git remote set-head failed: \"%s\"") + % p.output.strip() + '\n' + p2.output.strip()) self.refreshed = True - # Check out the appropriate revision... - rev = str(rev if rev else 'origin/master') - if subprocess.call(['git', 'checkout', '-f', rev], cwd=self.local) != 0: - raise VCSException("Git checkout failed") - # Get rid of any uncontrolled files left behind... - if subprocess.call(['git', 'clean', '-dffx'], cwd=self.local) != 0: - raise VCSException("Git clean failed") + # origin/HEAD is the HEAD of the remote, e.g. the "default branch" on + # a github repo. Most of the time this is the same as origin/master. + rev = rev or 'origin/HEAD' + p = FDroidPopen(['git', 'checkout', '-f', rev], cwd=self.local, output=False) + if p.returncode != 0: + raise VCSException(_("Git checkout of '%s' failed") % rev, p.output) + # Get rid of any uncontrolled files left behind + p = FDroidPopen(['git', 'clean', '-dffx'], cwd=self.local, output=False) + if p.returncode != 0: + raise VCSException(_("Git clean failed"), p.output) def initsubmodules(self): self.checkrepo() - if subprocess.call(['git', 'submodule', 'init'], - cwd=self.local) != 0: - raise VCSException("Git submodule init failed") - if subprocess.call(['git', 'submodule', 'update'], - cwd=self.local) != 0: - raise VCSException("Git submodule update failed") - if subprocess.call(['git', 'submodule', 'foreach', - 'git', 'reset', '--hard'], - cwd=self.local) != 0: - raise VCSException("Git submodule reset failed") - if subprocess.call(['git', 'submodule', 'foreach', - 'git', 'clean', '-dffx'], - cwd=self.local) != 0: - raise VCSException("Git submodule clean failed") + submfile = os.path.join(self.local, '.gitmodules') + if not os.path.isfile(submfile): + raise NoSubmodulesException(_("No git submodules available")) - def gettags(self): + # fix submodules not accessible without an account and public key auth + with open(submfile, 'r') as f: + lines = f.readlines() + with open(submfile, 'w') as f: + for line in lines: + for domain in ('bitbucket.org', 'github.com', 'gitlab.com'): + line = re.sub('git@' + domain + ':', 'https://u:p@' + domain + '/', line) + f.write(line) + + p = FDroidPopen(['git', 'submodule', 'sync'], cwd=self.local, output=False) + if p.returncode != 0: + raise VCSException(_("Git submodule sync failed"), p.output) + p = self.git(['submodule', 'update', '--init', '--force', '--recursive'], cwd=self.local) + if p.returncode != 0: + raise VCSException(_("Git submodule update failed"), p.output) + + def deinitsubmodules(self): self.checkrepo() - p = subprocess.Popen(['git', 'tag'], - stdout=subprocess.PIPE, cwd=self.local) - return p.communicate()[0].splitlines() + p = FDroidPopen(['git', 'submodule', 'deinit', '--all', '--force'], cwd=self.local, output=False) + if p.returncode != 0: + raise VCSException(_("Git submodule deinit failed"), p.output) + + def _gettags(self): + self.checkrepo() + p = FDroidPopen(['git', 'tag'], cwd=self.local, output=False) + return p.output.splitlines() + + def latesttags(self): + """Return a list of latest tags.""" + self.checkrepo() + return [tag.name for tag in sorted( + git.Repo(self.local).tags, + key=lambda t: t.commit.committed_date, + reverse=True + )] + + def getref(self, revname='HEAD'): + self.checkrepo() + repo = git.Repo(self.local) + try: + return repo.commit(revname).hexsha + except git.BadName: + return None class vcs_gitsvn(vcs): @@ -387,191 +1721,210 @@ class vcs_gitsvn(vcs): def repotype(self): return 'git-svn' - # Damn git-svn tries to use a graphical password prompt, so we have to - # trick it into taking the password from stdin - def userargs(self): - if self.username is None: - return ('', '') - return ('echo "%s" | DISPLAY="" ' % self.password, '--username "%s"' % self.username) + def clientversioncmd(self): + return ['git', 'svn', '--version'] - # If the local directory exists, but is somehow not a git repository, git - # will traverse up the directory tree until it finds one that is (i.e. - # fdroidserver) and then we'll proceed to destory it! This is called as - # a safety check. def checkrepo(self): - p = subprocess.Popen(['git', 'rev-parse', '--show-toplevel'], - stdout=subprocess.PIPE, cwd=self.local) - result = p.communicate()[0].rstrip() - if not result.endswith(self.local): + """No summary. + + If the local directory exists, but is somehow not a git repository, + git will traverse up the directory tree until it finds one that + is (i.e. fdroidserver) and then we'll proceed to destory it! + This is called as a safety check. + + """ + p = FDroidPopen(['git', 'rev-parse', '--show-toplevel'], cwd=self.local, output=False) + result = p.output.rstrip() + if Path(result) != Path(self.local).resolve(): raise VCSException('Repository mismatch') + def git(self, args, envs=dict(), cwd=None, output=True): + """Prevent git fetch/clone/submodule from hanging at the username/password prompt. + + AskPass is set to /bin/true to let the process try to connect + without a username/password. + + The SSH command is set to /bin/false to block all SSH URLs + (supported in git >= 2.3). This protects against + CVE-2017-1000117. + + """ + git_config = [ + '-c', 'core.askpass=/bin/true', + '-c', 'core.sshCommand=/bin/false', + ] + envs.update({ + 'GIT_TERMINAL_PROMPT': '0', + 'GIT_ASKPASS': '/bin/true', + 'SSH_ASKPASS': '/bin/true', + 'GIT_SSH': '/bin/false', # for git < 2.3 + 'SVN_SSH': '/bin/false', + }) + return FDroidPopen(['git', ] + git_config + args, + envs=envs, cwd=cwd, output=output) + def gotorevisionx(self, rev): if not os.path.exists(self.local): - # Brand new checkout... - gitsvn_cmd = '%sgit svn clone %s' % self.userargs() + # Brand new checkout + gitsvn_args = ['svn', 'clone'] + remote = None if ';' in self.remote: remote_split = self.remote.split(';') for i in remote_split[1:]: if i.startswith('trunk='): - gitsvn_cmd += ' -T %s' % i[6:] + gitsvn_args.extend(['-T', i[6:]]) elif i.startswith('tags='): - gitsvn_cmd += ' -t %s' % i[5:] + gitsvn_args.extend(['-t', i[5:]]) elif i.startswith('branches='): - gitsvn_cmd += ' -b %s' % i[9:] - if subprocess.call([gitsvn_cmd + " %s %s" % (remote_split[0], self.local)], - shell=True) != 0: - raise VCSException("Git clone failed") + gitsvn_args.extend(['-b', i[9:]]) + remote = remote_split[0] else: - if subprocess.call([gitsvn_cmd + " %s %s" % (self.remote, self.local)], - shell=True) != 0: - raise VCSException("Git clone failed") + remote = self.remote + + if not remote.startswith('https://'): + raise VCSException(_('HTTPS must be used with Subversion URLs!')) + + # git-svn sucks at certificate validation, this throws useful errors: + try: + import requests + r = requests.head(remote, timeout=300) + r.raise_for_status() + except Exception as e: + raise VCSException('SVN certificate pre-validation failed: ' + str(e)) from e + location = r.headers.get('location') + if location and not location.startswith('https://'): + raise VCSException(_('Invalid redirect to non-HTTPS: {before} -> {after} ') + .format(before=remote, after=location)) + + gitsvn_args.extend(['--', remote, str(self.local)]) + p = self.git(gitsvn_args) + if p.returncode != 0: + self.clone_failed = True + raise VCSException(_('git svn clone failed'), p.output) self.checkrepo() else: self.checkrepo() - # Discard any working tree changes... - if subprocess.call(['git', 'reset', '--hard'], cwd=self.local) != 0: - raise VCSException("Git reset failed") + # Discard any working tree changes + p = self.git(['reset', '--hard'], cwd=self.local, output=False) + if p.returncode != 0: + raise VCSException("Git reset failed", p.output) # Remove untracked files now, in case they're tracked in the target - # revision (it happens!)... - if subprocess.call(['git', 'clean', '-dffx'], cwd=self.local) != 0: - raise VCSException("Git clean failed") + # revision (it happens!) + p = self.git(['clean', '-dffx'], cwd=self.local, output=False) + if p.returncode != 0: + raise VCSException("Git clean failed", p.output) if not self.refreshed: - # Get new commits and tags from repo... - if subprocess.call(['%sgit svn rebase %s' % self.userargs()], - cwd=self.local, shell=True) != 0: - raise VCSException("Git svn rebase failed") + # Get new commits, branches and tags from repo + p = self.git(['svn', 'fetch'], cwd=self.local, output=False) + if p.returncode != 0: + raise VCSException("Git svn fetch failed") + p = self.git(['svn', 'rebase'], cwd=self.local, output=False) + if p.returncode != 0: + raise VCSException("Git svn rebase failed", p.output) self.refreshed = True - rev = str(rev if rev else 'master') + rev = rev or 'master' if rev: nospaces_rev = rev.replace(' ', '%20') # Try finding a svn tag - p = subprocess.Popen(['git', 'checkout', 'tags/' + nospaces_rev], - cwd=self.local, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - out, err = p.communicate() - if p.returncode == 0: - print out - else: + for treeish in ['origin/', '']: + p = self.git(['checkout', treeish + 'tags/' + nospaces_rev], cwd=self.local, output=False) + if p.returncode == 0: + break + if p.returncode != 0: # No tag found, normal svn rev translation # Translate svn rev into git format - p = subprocess.Popen(['git', 'svn', 'find-rev', 'r' + rev], - cwd=self.local, stdout=subprocess.PIPE) - git_rev = p.communicate()[0].rstrip() + rev_split = rev.split('/') + + p = None + for treeish in ['origin/', '']: + if len(rev_split) > 1: + treeish += rev_split[0] + svn_rev = rev_split[1] + + else: + # if no branch is specified, then assume trunk (i.e. 'master' branch): + treeish += 'master' + svn_rev = rev + + svn_rev = svn_rev if svn_rev[0] == 'r' else 'r' + svn_rev + + p = self.git(['svn', 'find-rev', '--before', svn_rev, treeish], cwd=self.local, output=False) + git_rev = p.output.rstrip() + + if p.returncode == 0 and git_rev: + break + if p.returncode != 0 or not git_rev: # Try a plain git checkout as a last resort - p = subprocess.Popen(['git', 'checkout', rev], cwd=self.local, - stdout=subprocess.PIPE, stderr=subprocess.PIPE) - out, err = p.communicate() - if p.returncode == 0: - print out - else: - raise VCSException("No git treeish found and direct git checkout failed") + p = self.git(['checkout', rev], cwd=self.local, output=False) + if p.returncode != 0: + raise VCSException("No git treeish found and direct git checkout of '%s' failed" % rev, p.output) else: # Check out the git rev equivalent to the svn rev - p = subprocess.Popen(['git', 'checkout', git_rev], cwd=self.local, - stdout=subprocess.PIPE, stderr=subprocess.PIPE) - out, err = p.communicate() - if p.returncode == 0: - print out - else: - raise VCSException("Git svn checkout failed") - # Get rid of any uncontrolled files left behind... - if subprocess.call(['git', 'clean', '-dffx'], cwd=self.local) != 0: - raise VCSException("Git clean failed") + p = self.git(['checkout', git_rev], cwd=self.local, output=False) + if p.returncode != 0: + raise VCSException(_("Git checkout of '%s' failed") % rev, p.output) - def gettags(self): + # Get rid of any uncontrolled files left behind + p = self.git(['clean', '-dffx'], cwd=self.local, output=False) + if p.returncode != 0: + raise VCSException(_("Git clean failed"), p.output) + + def _gettags(self): self.checkrepo() - return os.listdir(os.path.join(self.local, '.git/svn/refs/remotes/tags')) + for treeish in ['origin/', '']: + d = os.path.join(self.local, '.git', 'svn', 'refs', 'remotes', treeish, 'tags') + if os.path.isdir(d): + return os.listdir(d) - def getref(self): + def getref(self, revname='HEAD'): self.checkrepo() - p = subprocess.Popen(['git', 'svn', 'find-rev', 'HEAD'], - stdout=subprocess.PIPE, cwd=self.local) - return p.communicate()[0].strip() + p = FDroidPopen(['git', 'svn', 'find-rev', revname], cwd=self.local, output=False) + if p.returncode != 0: + return None + return p.output.strip() -class vcs_svn(vcs): - - def repotype(self): - return 'svn' - - def userargs(self): - if self.username is None: - return ['--non-interactive'] - return ['--username', self.username, - '--password', self.password, - '--non-interactive'] - - def gotorevisionx(self, rev): - if not os.path.exists(self.local): - if subprocess.call(['svn', 'checkout', self.remote, self.local] + - self.userargs()) != 0: - raise VCSException("Svn checkout failed") - else: - for svncommand in ( - 'svn revert -R .', - r"svn status | awk '/\?/ {print $2}' | xargs rm -rf"): - if subprocess.call(svncommand, cwd=self.local, shell=True) != 0: - raise VCSException("Svn reset ({0}) failed in {1}".format(svncommand, self.local)) - if not self.refreshed: - if subprocess.call(['svn', 'update'] + - self.userargs(), cwd=self.local) != 0: - raise VCSException("Svn update failed") - self.refreshed = True - - revargs = list(['-r', rev] if rev else []) - if subprocess.call(['svn', 'update', '--force'] + revargs + - self.userargs(), cwd=self.local) != 0: - raise VCSException("Svn update failed") - - def getref(self): - p = subprocess.Popen(['svn', 'info'], - stdout=subprocess.PIPE, cwd=self.local) - for line in p.communicate()[0].splitlines(): - if line and line.startswith('Last Changed Rev: '): - return line[18:] class vcs_hg(vcs): def repotype(self): return 'hg' + def clientversioncmd(self): + return ['hg', '--version'] + def gotorevisionx(self, rev): if not os.path.exists(self.local): - if subprocess.call(['hg', 'clone', self.remote, self.local]) !=0: - raise VCSException("Hg clone failed") + p = FDroidPopen(['hg', 'clone', '--ssh', '/bin/false', '--', self.remote, str(self.local)], + output=False) + if p.returncode != 0: + self.clone_failed = True + raise VCSException("Hg clone failed", p.output) else: - if subprocess.call('hg status -uS | xargs rm -rf', - cwd=self.local, shell=True) != 0: - raise VCSException("Hg clean failed") + p = FDroidPopen(['hg', 'status', '-uiS'], cwd=self.local, output=False) + if p.returncode != 0: + raise VCSException("Hg status failed", p.output) + for line in p.output.splitlines(): + if not line.startswith('? ') and not line.startswith('I '): + raise VCSException("Unexpected output from hg status -uS: " + line) + FDroidPopen(['rm', '-rf', '--', line[2:]], cwd=self.local, output=False) if not self.refreshed: - if subprocess.call(['hg', 'pull'], - cwd=self.local) != 0: - raise VCSException("Hg pull failed") + p = FDroidPopen(['hg', 'pull', '--ssh', '/bin/false'], cwd=self.local, output=False) + if p.returncode != 0: + raise VCSException("Hg pull failed", p.output) self.refreshed = True - rev = str(rev if rev else 'default') + rev = rev or 'default' if not rev: return - if subprocess.call(['hg', 'update', '-C', rev], - cwd=self.local) != 0: - raise VCSException("Hg checkout failed") - p = subprocess.Popen(['hg', 'purge', '--all'], stdout=subprocess.PIPE, - cwd=self.local) - result = p.communicate()[0] - # Also delete untracked files, we have to enable purge extension for that: - if "'purge' is provided by the following extension" in result: - with open(self.local+"/.hg/hgrc", "a") as myfile: - myfile.write("\n[extensions]\nhgext.purge=") - if subprocess.call(['hg', 'purge', '--all'], - cwd=self.local) != 0: - raise VCSException("HG purge failed") - else: - raise VCSException("HG purge failed") + p = FDroidPopen(['hg', 'update', '-C', '--', rev], cwd=self.local, output=False) + if p.returncode != 0: + raise VCSException("Hg checkout of '%s' failed" % rev, p.output) - def gettags(self): - p = subprocess.Popen(['hg', 'tags', '-q'], - stdout=subprocess.PIPE, cwd=self.local) - return p.communicate()[0].splitlines()[1:] + def _gettags(self): + p = FDroidPopen(['hg', 'tags', '-q'], cwd=self.local, output=False) + return p.output.splitlines()[1:] class vcs_bzr(vcs): @@ -579,243 +1932,493 @@ class vcs_bzr(vcs): def repotype(self): return 'bzr' + def clientversioncmd(self): + return ['bzr', '--version'] + + def bzr(self, args, envs=dict(), cwd=None, output=True): + """Prevent bzr from ever using SSH to avoid security vulns.""" + envs.update({ + 'BZR_SSH': 'false', + }) + return FDroidPopen(['bzr', ] + args, envs=envs, cwd=cwd, output=output) + def gotorevisionx(self, rev): if not os.path.exists(self.local): - if subprocess.call(['bzr', 'branch', self.remote, self.local]) != 0: - raise VCSException("Bzr branch failed") + p = self.bzr(['branch', self.remote, str(self.local)], output=False) + if p.returncode != 0: + self.clone_failed = True + raise VCSException("Bzr branch failed", p.output) else: - if subprocess.call(['bzr', 'clean-tree', '--force', - '--unknown', '--ignored'], cwd=self.local) != 0: - raise VCSException("Bzr revert failed") + p = self.bzr(['clean-tree', '--force', '--unknown', '--ignored'], cwd=self.local, output=False) + if p.returncode != 0: + raise VCSException("Bzr revert failed", p.output) if not self.refreshed: - if subprocess.call(['bzr', 'pull'], - cwd=self.local) != 0: - raise VCSException("Bzr update failed") + p = self.bzr(['pull'], cwd=self.local, output=False) + if p.returncode != 0: + raise VCSException("Bzr update failed", p.output) self.refreshed = True revargs = list(['-r', rev] if rev else []) - if subprocess.call(['bzr', 'revert'] + revargs, - cwd=self.local) != 0: - raise VCSException("Bzr revert failed") + p = self.bzr(['revert'] + revargs, cwd=self.local, output=False) + if p.returncode != 0: + raise VCSException("Bzr revert of '%s' failed" % rev, p.output) - def gettags(self): - p = subprocess.Popen(['bzr', 'tags'], - stdout=subprocess.PIPE, cwd=self.local) + def _gettags(self): + p = self.bzr(['tags'], cwd=self.local, output=False) return [tag.split(' ')[0].strip() for tag in - p.communicate()[0].splitlines()] + p.output.splitlines()] + + +def unescape_string(string): + if len(string) < 2: + return string + if string[0] == '"' and string[-1] == '"': + return string[1:-1] + + return string.replace("\\'", "'") + + +def retrieve_string(app_dir, string, xmlfiles=None): -def retrieve_string(xml_dir, string): if string.startswith('@string/'): - string_search = re.compile(r'.*"'+string[8:]+'".*?>([^<]+?)<.*').search - for xmlfile in glob.glob(os.path.join(xml_dir, '*.xml')): - for line in file(xmlfile): - matches = string_search(line) - if matches: - return retrieve_string(xml_dir, matches.group(1)) - elif string.startswith('&') and string.endswith(';'): - string_search = re.compile(r'.*').search - for xmlfile in glob.glob(os.path.join(xml_dir, '*.xml')): - for line in file(xmlfile): - matches = string_search(line) - if matches: - return retrieve_string(xml_dir, matches.group(1)) + name = string[len('@string/'):] + elif string.startswith('${'): + return '' # Gradle variable + else: + return unescape_string(string) - return string.replace("\\'","'") + if xmlfiles is None: + xmlfiles = [] + for res_dir in [ + os.path.join(app_dir, 'res'), + os.path.join(app_dir, 'src', 'main', 'res'), + ]: + for root, dirs, files in os.walk(res_dir): + if os.path.basename(root) == 'values': + xmlfiles += [os.path.join(root, x) for x in files if x.endswith('.xml')] -# Return list of existing files that will be used to find the highest vercode -def manifest_paths(app_dir, flavour): + def element_content(element): + if element.text is None: + return "" + s = XMLElementTree.tostring(element, encoding='utf-8', method='text') + return s.decode('utf-8').strip() - possible_manifests = [ os.path.join(app_dir, 'AndroidManifest.xml'), - os.path.join(app_dir, 'src', 'main', 'AndroidManifest.xml'), - os.path.join(app_dir, 'build.gradle') ] - - if flavour: - possible_manifests.append( - os.path.join(app_dir, 'src', flavour, 'AndroidManifest.xml')) - - return [path for path in possible_manifests if os.path.isfile(path)] - -# Retrieve the package name -def fetch_real_name(app_dir, flavour): - app_search = re.compile(r'.* 0 and 'gradle' in app['Builds'][-1] and app['Builds'][-1].gradle: + flavors = app['Builds'][-1].gradle + + if path.suffix == '.gradle' or path.name.endswith('.gradle.kts'): + with open(path, 'r', encoding='utf-8') as f: + android_plugin_file = False + inside_flavor_group = 0 + inside_required_flavor = 0 + for line in f: + if gradle_comment.match(line): + continue + + if "applicationId" in line and not temp_app_id: + matches = psearch_g(line) + if matches: + temp_app_id = matches.group(2) + + if "versionName" in line and not temp_version_name: + matches = vnsearch(line) + if matches: + temp_version_name = matches + + if inside_flavor_group > 0: + if inside_required_flavor > 1: + matches = psearch_g(line) + if matches: + s = matches.group(2) + if app_matches_packagename(app, s): + package = s + else: + # If build.gradle contains applicationIdSuffix add it to the end of package name + matches = fsearch_g(line) + if matches and temp_app_id: + suffix = matches.group(2) + temp_app_id = temp_app_id + suffix + if app_matches_packagename(app, temp_app_id): + package = temp_app_id + + matches = vnsearch(line) + if matches: + version = matches + + else: + # If build.gradle contains applicationNameSuffix add it to the end of versionName + matches = vnssearch_g(line) + if matches and temp_version_name: + name_suffix = matches.group(2) + version = temp_version_name + name_suffix + + matches = vcsearch_g(line) + if matches: + vercode = version_code_string_to_int(matches.group(1)) + + if inside_required_flavor > 0: + if '{' in line: + inside_required_flavor += 1 + if '}' in line: + inside_required_flavor -= 1 + if inside_required_flavor == 1: + inside_required_flavor -= 1 + elif flavors: + for flavor in flavors: + if re.match(r'.*[\'"\s]{flavor}[\'"\s].*\{{.*'.format(flavor=flavor), line): + inside_required_flavor = 2 + break + if re.match(r'.*[\'"\s]{flavor}[\'"\s].*'.format(flavor=flavor), line): + inside_required_flavor = 1 + break + + if '{' in line: + inside_flavor_group += 1 + if '}' in line: + inside_flavor_group -= 1 + else: + if "productFlavors" in line: + inside_flavor_group = 1 + if not package: + matches = psearch_g(line) + if matches: + s = matches.group(2) + if app_matches_packagename(app, s): + package = s + if not version: + matches = vnsearch(line) + if matches: + version = matches + if not vercode: + matches = vcsearch_g(line) + if matches: + vercode = version_code_string_to_int(matches.group(1)) + if not android_plugin_file and ANDROID_PLUGIN_REGEX.match(line): + android_plugin_file = True + if android_plugin_file: + if package: + max_package = package + if version: + max_version = version + if vercode: + max_vercode = vercode + if max_package and max_version and max_vercode: + break + else: + try: + xml = parse_xml(path) + except (XMLElementTree.ParseError, ValueError): + logging.warning(_("Problem with xml at '{path}'").format(path=path)) + continue + if "package" in xml.attrib: + s = xml.attrib["package"] + if app_matches_packagename(app, s): + package = s + if XMLNS_ANDROID + "versionName" in xml.attrib: + version = xml.attrib[XMLNS_ANDROID + "versionName"] + base_dir = os.path.dirname(path) + version = retrieve_string_singleline(base_dir, version) + if XMLNS_ANDROID + "versionCode" in xml.attrib: + vercode = version_code_string_to_int( + xml.attrib[XMLNS_ANDROID + "versionCode"]) + # Remember package name, may be defined separately from version+vercode - package = max_package + if package is None: + package = max_package - for line in file(path): - if not package: - if gradle: - matches = psearch_g(line) - else: - matches = psearch(line) - if matches: - package = matches.group(1) - if not version: - if gradle: - matches = vnsearch_g(line) - else: - matches = vnsearch(line) - if matches: - version = matches.group(2 if gradle else 1) - if not vercode: - if gradle: - matches = vcsearch_g(line) - else: - matches = vcsearch(line) - if matches: - vercode = matches.group(1) + logging.debug("..got package={0}, version={1}, vercode={2}" + .format(package, version, vercode)) - # Better some package name than nothing - if max_package is None: + # Always grab the package name and versionName in case they are not + # together with the highest versionCode + if max_package is None and package is not None: max_package = package - - if max_vercode is None or (vercode is not None and vercode > max_vercode): + if max_version is None and version is not None: max_version = version - max_vercode = vercode - max_package = package + + if vercode is not None \ + and (max_vercode is None or vercode > max_vercode): + if version and (not ignoresearch or not ignoresearch(version)): + if version is not None: + max_version = version + if vercode is not None: + max_vercode = vercode + if package is not None: + max_package = package + else: + max_version = "Ignore" if max_version is None: max_version = "Unknown" + if max_package: + msg = _("Invalid application ID {appid}").format(appid=max_package) + if not is_valid_package_name(max_package): + raise FDroidException(msg) + elif not is_strict_application_id(max_package): + logging.warning(msg) + return (max_version, max_vercode, max_package) -class BuildException(Exception): - def __init__(self, value, stdout = None, stderr = None): - self.value = value - self.stdout = stdout - self.stderr = stderr - def get_wikitext(self): - ret = repr(self.value) + "\n" - if self.stdout: - ret += "=stdout=\n" - ret += "
\n"
-            ret += str(self.stdout)
-            ret += "
\n" - if self.stderr: - ret += "=stderr=\n" - ret += "
\n"
-            ret += str(self.stderr)
-            ret += "
\n" - return ret +def is_valid_package_name(name): + """Check whether name is a valid fdroid package name. - def __str__(self): - ret = repr(self.value) - if self.stdout: - ret += "\n==== stdout begin ====\n%s\n==== stdout end ====" % self.stdout.strip() - if self.stderr: - ret += "\n==== stderr begin ====\n%s\n==== stderr end ====" % self.stderr.strip() - return ret + APKs and manually defined package names must use a valid Java + Package Name. Automatically generated package names for non-APK + files use the SHA-256 sum. -class VCSException(Exception): - def __init__(self, value): - self.value = value + """ + return VALID_APPLICATION_ID_REGEX.match(name) is not None \ + or FDROID_PACKAGE_NAME_REGEX.match(name) is not None - def __str__(self): - return repr(self.value) -# Get the specified source library. -# Returns the path to it. Normally this is the path to be used when referencing -# it, which may be a subdirectory of the actual project. If you want the base -# directory of the project, pass 'basepath=True'. -def getsrclib(spec, srclib_dir, srclibpaths=[], subdir=None, target=None, - basepath=False, raw=False, prepare=True, preponly=False): +def is_strict_application_id(name): + """Check whether name is a valid Android Application ID. + The Android ApplicationID is basically a Java Package Name, but + with more restrictive naming rules: + + * It must have at least two segments (one or more dots). + * Each segment must start with a letter. + * All characters must be alphanumeric or an underscore [a-zA-Z0-9_]. + + References + ---------- + https://developer.android.com/studio/build/application-id + + """ + return STRICT_APPLICATION_ID_REGEX.match(name) is not None \ + and '.' in name + + +def parse_srclib_spec(spec): + + if type(spec) != str: + raise MetaDataException(_("can not parse scrlib spec " + "(not a string): '{}'") + .format(spec)) + + tokens = spec.split('@', 1) + if not tokens[0]: + raise MetaDataException( + _("could not parse srclib spec (no name specified): '{}'").format(spec) + ) + if len(tokens) < 2 or not tokens[1]: + raise MetaDataException( + _("could not parse srclib spec (no ref specified): '{}'").format(spec) + ) + + name = tokens[0] + ref = tokens[1] number = None subdir = None + + if ':' in name: + number, name = name.split(':', 1) + if '/' in name: + name, subdir = name.split('/', 1) + + return (name, ref, number, subdir) + + +def getsrclib(spec, srclib_dir, basepath=False, + raw=False, prepare=True, preponly=False, refresh=True, + build=None): + """Get the specified source library. + + Return the path to it. Normally this is the path to be used when + referencing it, which may be a subdirectory of the actual project. If + you want the base directory of the project, pass 'basepath=True'. + + spec and srclib_dir are both strings, not pathlib.Path. + """ + number = None + subdir = None + if not isinstance(spec, str): + spec = str(spec) + if not isinstance(srclib_dir, str): + spec = str(srclib_dir) if raw: name = spec ref = None else: - name, ref = spec.split('@') - if ':' in name: - number, name = name.split(':', 1) - if '/' in name: - name, subdir = name.split('/',1) + name, ref, number, subdir = parse_srclib_spec(spec) - srclib_path = os.path.join('srclibs', name + ".txt") + if name not in fdroidserver.metadata.srclibs: + raise VCSException('srclib ' + name + ' not found.') - if not os.path.exists(srclib_path): - raise BuildException('srclib ' + name + ' not found.') - - srclib = metadata.parse_srclib(srclib_path) + srclib = fdroidserver.metadata.srclibs[name] sdir = os.path.join(srclib_dir, name) if not preponly: - vcs = getvcs(srclib["Repo Type"], srclib["Repo"], sdir) + vcs = getvcs(srclib["RepoType"], srclib["Repo"], sdir) vcs.srclib = (name, number, sdir) if ref: - vcs.gotorevision(ref) + vcs.gotorevision(ref, refresh) if raw: return vcs @@ -833,46 +2436,18 @@ def getsrclib(spec, srclib_dir, srclibpaths=[], subdir=None, target=None, if libdir is None: libdir = sdir - if srclib["Srclibs"]: - n=1 - for lib in srclib["Srclibs"].split(','): - s_tuple = None - for t in srclibpaths: - if t[0] == lib: - s_tuple = t - break - if s_tuple is None: - raise BuildException('Missing recursive srclib %s for %s' % ( - lib, name)) - place_srclib(libdir, n, s_tuple[2]) - n+=1 + remove_signing_keys(sdir) + remove_debuggable_flags(sdir) if prepare: if srclib["Prepare"]: - cmd = replace_config_vars(srclib["Prepare"]) + cmd = replace_config_vars("; ".join(srclib["Prepare"]), build) - p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=libdir) + p = FDroidPopen(['bash', '-e', '-u', '-o', 'pipefail', '-x', '-c', '--', cmd], cwd=libdir) if p.returncode != 0: raise BuildException("Error running prepare command for srclib %s" - % name, p.stdout, p.stderr) - - if srclib["Update Project"] == "Yes": - print "Updating srclib %s at path %s" % (name, libdir) - cmd = [os.path.join(config['sdk_path'], 'tools', 'android'), - 'update', 'project', '-p', libdir] - if target: - cmd += ['-t', target] - p = FDroidPopen(cmd) - # Check to see whether an error was returned without a proper exit - # code (this is the case for the 'no target set or target invalid' - # error) - if p.returncode != 0 or (p.stderr != "" and - p.stderr.startswith("Error: ")): - raise BuildException("Failed to update srclib project {0}" - .format(name), p.stdout, p.stderr) - - remove_signing_keys(libdir) + % name, p.output) if basepath: libdir = sdir @@ -880,281 +2455,86 @@ def getsrclib(spec, srclib_dir, srclibpaths=[], subdir=None, target=None, return (name, number, libdir) -# Prepare the source code for a particular build -# 'vcs' - the appropriate vcs object for the application -# 'app' - the application details from the metadata -# 'build' - the build details from the metadata -# 'build_dir' - the path to the build directory, usually -# 'build/app.id' -# 'srclib_dir' - the path to the source libraries directory, usually -# 'build/srclib' -# 'extlib_dir' - the path to the external libraries directory, usually -# 'build/extlib' -# Returns the (root, srclibpaths) where: -# 'root' is the root directory, which may be the same as 'build_dir' or may -# be a subdirectory of it. -# 'srclibpaths' is information on the srclibs being used -def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=False): +gradle_version_regex = re.compile(r"[^/]*'com\.android\.tools\.build:gradle:([^\.]+\.[^\.]+).*'.*") - # Optionally, the actual app source can be in a subdirectory... - if 'subdir' in build: - root_dir = os.path.join(build_dir, build['subdir']) + +def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=False, refresh=True): + """Prepare the source code for a particular build. + + Parameters + ---------- + vcs + the appropriate vcs object for the application + app + the application details from the metadata + build + the build details from the metadata + build_dir + the path to the build directory, usually 'build/app.id' + srclib_dir + the path to the source libraries directory, usually 'build/srclib' + extlib_dir + the path to the external libraries directory, usually 'build/extlib' + + Returns + ------- + root + is the root directory, which may be the same as 'build_dir' or may + be a subdirectory of it. + srclibpaths + is information on the srclibs being used + """ + # Optionally, the actual app source can be in a subdirectory + if build.subdir: + root_dir = os.path.join(build_dir, build.subdir) else: root_dir = build_dir - # Get a working copy of the right revision... - print "Getting source for revision " + build['commit'] - vcs.gotorevision(build['commit']) + # Get a working copy of the right revision + logging.info("Getting source for revision " + build.commit) + vcs.gotorevision(build.commit, refresh) + + # Initialise submodules if required + if build.submodules: + logging.info(_("Initialising submodules")) + vcs.initsubmodules() + else: + vcs.deinitsubmodules() # Check that a subdir (if we're using one) exists. This has to happen - # after the checkout, since it might not exist elsewhere... + # after the checkout, since it might not exist elsewhere if not os.path.exists(root_dir): raise BuildException('Missing subdir ' + root_dir) - # Initialise submodules if requred... - if build['submodules']: - if options.verbose: - print "Initialising submodules..." - vcs.initsubmodules() + # Run an init command if one is required + if build.init: + cmd = replace_config_vars("; ".join(build.init), build) + logging.info("Running 'init' commands in %s" % root_dir) - # Run an init command if one is required... - if 'init' in build: - cmd = replace_config_vars(build['init']) - if options.verbose: - print "Running 'init' commands in %s" % root_dir - - p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir) + p = FDroidPopen(['bash', '-e', '-u', '-o', 'pipefail', '-x', '-c', '--', cmd], cwd=root_dir) if p.returncode != 0: raise BuildException("Error running init command for %s:%s" % - (app['id'], build['version']), p.stdout, p.stderr) - - # Generate (or update) the ant build file, build.xml... - updatemode = build.get('update', 'auto') - if (updatemode != 'no' and build['type'] == 'ant'): - parms = [os.path.join(config['sdk_path'], 'tools', 'android'), - 'update', 'project'] - if 'target' in build and build['target']: - parms += ['-t', build['target']] - update_dirs = None - if updatemode == 'auto': - update_dirs = ['.'] + ant_subprojects(root_dir) - else: - update_dirs = [d.strip() for d in updatemode.split(';')] - # Force build.xml update if necessary... - if updatemode == 'force' or 'target' in build: - if updatemode == 'force': - update_dirs = ['.'] - buildxml = os.path.join(root_dir, 'build.xml') - if os.path.exists(buildxml): - print 'Force-removing old build.xml' - os.remove(buildxml) - - for d in update_dirs: - subdir = os.path.join(root_dir, d) - # Clean update dirs via ant - p = FDroidPopen(['ant', 'clean'], cwd=subdir) - dparms = parms + ['-p', d] - if options.verbose: - if d == '.': - print "Updating main project..." - else: - print "Updating subproject %s..." % d - p = FDroidPopen(dparms, cwd=root_dir) - # Check to see whether an error was returned without a proper exit - # code (this is the case for the 'no target set or target invalid' - # error) - if p.returncode != 0 or (p.stderr != "" and - p.stderr.startswith("Error: ")): - raise BuildException("Failed to update project at %s" % d, - p.stdout, p.stderr) - - # Update the local.properties file... - localprops = [ os.path.join(build_dir, 'local.properties') ] - if 'subdir' in build: - localprops += [ os.path.join(root_dir, 'local.properties') ] - for path in localprops: - if not os.path.isfile(path): - continue - if options.verbose: - print "Updating properties file at %s" % path - f = open(path, 'r') - props = f.read() - f.close() - props += '\n' - # Fix old-fashioned 'sdk-location' by copying - # from sdk.dir, if necessary... - if build['oldsdkloc']: - sdkloc = re.match(r".*^sdk.dir=(\S+)$.*", props, - re.S|re.M).group(1) - props += "sdk-location=%s\n" % sdkloc - else: - props += "sdk.dir=%s\n" % config['sdk_path'] - props += "sdk-location=%s\n" % ['sdk_path'] - # Add ndk location... - props += "ndk.dir=%s\n" % config['ndk_path'] - props += "ndk-location=%s\n" % config['ndk_path'] - # Add java.encoding if necessary... - if 'encoding' in build: - props += "java.encoding=%s\n" % build['encoding'] - f = open(path, 'w') - f.write(props) - f.close() - - flavour = None - if build['type'] == 'gradle': - flavour = build['gradle'].split('@')[0] - if flavour in ['main', 'yes', '']: - flavour = None - - # Remove forced debuggable flags - print "Removing debuggable flags..." - for path in manifest_paths(root_dir, flavour): - if not os.path.isfile(path): - continue - if subprocess.call(['sed','-i', - 's/android:debuggable="[^"]*"//g', path]) != 0: - raise BuildException("Failed to remove debuggable flags") - - # Insert version code and number into the manifest if necessary... - if build['forceversion']: - print "Changing the version name..." - for path in manifest_paths(root_dir, flavour): - if not os.path.isfile(path): - continue - if has_extension(path, 'xml'): - if subprocess.call(['sed','-i', - 's/android:versionName="[^"]*"/android:versionName="' + build['version'] + '"/g', - path]) != 0: - raise BuildException("Failed to amend manifest") - elif has_extension(path, 'gradle'): - if subprocess.call(['sed','-i', - 's/versionName[ ]*=[ ]*"[^"]*"/versionName = "' + build['version'] + '"/g', - path]) != 0: - raise BuildException("Failed to amend build.gradle") - if build['forcevercode']: - print "Changing the version code..." - for path in manifest_paths(root_dir, flavour): - if not os.path.isfile(path): - continue - if has_extension(path, 'xml'): - if subprocess.call(['sed','-i', - 's/android:versionCode="[^"]*"/android:versionCode="' + build['vercode'] + '"/g', - path]) != 0: - raise BuildException("Failed to amend manifest") - elif has_extension(path, 'gradle'): - if subprocess.call(['sed','-i', - 's/versionCode[ ]*=[ ]*[0-9]*/versionCode = ' + build['vercode'] + '/g', - path]) != 0: - raise BuildException("Failed to amend build.gradle") - - # Delete unwanted files... - if 'rm' in build: - for part in build['rm'].split(';'): - dest = os.path.join(build_dir, part.strip()) - rdest = os.path.abspath(dest) - if options.verbose: - print "Removing {0}".format(rdest) - if not rdest.startswith(os.path.abspath(build_dir)): - raise BuildException("rm for {1} is outside build root {0}".format( - os.path.abspath(build_dir),os.path.abspath(dest))) - if rdest == os.path.abspath(build_dir): - raise BuildException("rm removes whole build directory") - if os.path.lexists(rdest): - if os.path.islink(rdest): - subprocess.call('unlink ' + rdest, shell=True) - else: - subprocess.call('rm -rf ' + rdest, shell=True) - else: - if options.verbose: - print "...but it didn't exist" - - # Fix apostrophes translation files if necessary... - if build['fixapos']: - for root, dirs, files in os.walk(os.path.join(root_dir, 'res')): - for filename in files: - if has_extension(filename, 'xml'): - if subprocess.call(['sed','-i','s@' + - r"\([^\\]\)'@\1\\'" + - '@g', - os.path.join(root, filename)]) != 0: - raise BuildException("Failed to amend " + filename) - - # Fix translation files if necessary... - if build['fixtrans']: - for root, dirs, files in os.walk(os.path.join(root_dir, 'res')): - for filename in files: - if has_extension(filename, 'xml'): - f = open(os.path.join(root, filename)) - changed = False - outlines = [] - for line in f: - num = 1 - index = 0 - oldline = line - while True: - index = line.find("%", index) - if index == -1: - break - next = line[index+1:index+2] - if next == "s" or next == "d": - line = (line[:index+1] + - str(num) + "$" + - line[index+1:]) - num += 1 - index += 3 - else: - index += 1 - # We only want to insert the positional arguments - # when there is more than one argument... - if oldline != line: - if num > 2: - changed = True - else: - line = oldline - outlines.append(line) - f.close() - if changed: - f = open(os.path.join(root, filename), 'w') - f.writelines(outlines) - f.close() - - remove_signing_keys(build_dir) - - # Add required external libraries... - if 'extlibs' in build: - print "Collecting prebuilt libraries..." - libsdir = os.path.join(root_dir, 'libs') - if not os.path.exists(libsdir): - os.mkdir(libsdir) - for lib in build['extlibs'].split(';'): - lib = lib.strip() - if options.verbose: - print "...installing extlib {0}".format(lib) - libf = os.path.basename(lib) - libsrc = os.path.join(extlib_dir, lib) - if not os.path.exists(libsrc): - raise BuildException("Missing extlib file {0}".format(libsrc)) - shutil.copyfile(libsrc, os.path.join(libsdir, libf)) - - # Get required source libraries... - srclibpaths = [] - if 'srclibs' in build: - target=build['target'] if 'target' in build else None - print "Collecting source libraries..." - for lib in build['srclibs'].split(';'): - srclibpaths.append(getsrclib(lib, srclib_dir, srclibpaths, - target=target, preponly=onserver)) + (app.id, build.versionName), p.output) # Apply patches if any - if 'patch' in build: - for patch in build['patch'].split(';'): + if build.patch: + logging.info("Applying patches") + for patch in build.patch: patch = patch.strip() - print "Applying " + patch - patch_path = os.path.join('metadata', app['id'], patch) - if subprocess.call(['patch', '-p1', - '-i', os.path.abspath(patch_path)], cwd=build_dir) != 0: + logging.info("Applying " + patch) + patch_path = os.path.join('metadata', app.id, patch) + p = FDroidPopen(['patch', '-p1', '-i', os.path.abspath(patch_path)], cwd=build_dir) + if p.returncode != 0: raise BuildException("Failed to apply patch %s" % patch_path) + # Get required source libraries + srclibpaths = [] + if build.srclibs: + logging.info("Collecting source libraries") + for lib in build.srclibs: + srclibpaths.append(getsrclib(lib, srclib_dir, preponly=onserver, + refresh=refresh, build=build)) + for name, number, libpath in srclibpaths: place_srclib(root_dir, int(number) if number else None, libpath) @@ -1163,381 +2543,2417 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver= if basesrclib: srclibpaths.append(basesrclib) - # Run a pre-build command if one is required... - if 'prebuild' in build: - cmd = replace_config_vars(build['prebuild']) + # Update the local.properties file + localprops = [os.path.join(build_dir, 'local.properties')] + if build.subdir: + parts = build.subdir.split(os.sep) + cur = build_dir + for d in parts: + cur = os.path.join(cur, d) + localprops += [os.path.join(cur, 'local.properties')] + for path in localprops: + props = "" + if os.path.isfile(path): + logging.info("Updating local.properties file at %s" % path) + with open(path, 'r', encoding='iso-8859-1') as f: + props += f.read() + props += '\n' + else: + logging.info("Creating local.properties file at %s" % path) + # Fix old-fashioned 'sdk-location' by copying + # from sdk.dir, if necessary + if build.oldsdkloc: + sdkloc = re.match(r".*^sdk.dir=(\S+)$.*", props, + re.S | re.M).group(1) + props += "sdk-location=%s\n" % sdkloc + else: + props += "sdk.dir=%s\n" % config['sdk_path'] + props += "sdk-location=%s\n" % config['sdk_path'] + ndk_path = build.ndk_path() + # if for any reason the path isn't valid or the directory + # doesn't exist, some versions of Gradle will error with a + # cryptic message (even if the NDK is not even necessary). + # https://gitlab.com/fdroid/fdroidserver/issues/171 + if ndk_path and os.path.exists(ndk_path): + # Add ndk location + props += "ndk.dir=%s\n" % ndk_path + props += "ndk-location=%s\n" % ndk_path + # Add java.encoding if necessary + if build.encoding: + props += "java.encoding=%s\n" % build.encoding + with open(path, 'w', encoding='iso-8859-1') as f: + f.write(props) - # Substitute source library paths into prebuild commands... + flavors = [] + if build.build_method() == 'gradle': + flavors = build.gradle + + if build.target: + n = build.target.split('-')[1] + build_gradle = os.path.join(root_dir, "build.gradle") + build_gradle_kts = build_gradle + ".kts" + if os.path.exists(build_gradle): + gradlefile = build_gradle + elif os.path.exists(build_gradle_kts): + gradlefile = build_gradle_kts + else: + raise BuildException("No gradle file found") + regsub_file(r'compileSdkVersion[ =]+[0-9]+', + r'compileSdkVersion %s' % n, + gradlefile) + + # Remove forced debuggable flags + remove_debuggable_flags(root_dir) + + # Insert versionCode and number into the manifest if necessary + if build.forceversion: + logging.info("Changing the versionName") + for path in manifest_paths(root_dir, flavors): + if not os.path.isfile(path): + continue + if path.suffix == '.xml': + regsub_file(r'android:versionName="[^"]*"', + r'android:versionName="%s"' % build.versionName, + path) + elif path.suffix == '.gradle': + regsub_file(r"""(\s*)versionName[\s'"=]+.*""", + r"""\1versionName '%s'""" % build.versionName, + path) + + if build.forcevercode: + logging.info("Changing the versionCode") + for path in manifest_paths(root_dir, flavors): + if not path.is_file(): + continue + if path.suffix == '.xml': + regsub_file(r'android:versionCode="[^"]*"', + r'android:versionCode="%s"' % build.versionCode, + path) + elif path.suffix == '.gradle': + regsub_file(r'versionCode[ =]+[0-9]+', + r'versionCode %s' % build.versionCode, + path) + + # Delete unwanted files + if build.rm: + logging.info(_("Removing specified files")) + for part in getpaths(build_dir, build.rm): + dest = os.path.join(build_dir, part) + logging.info("Removing {0}".format(part)) + if os.path.lexists(dest): + # rmtree can only handle directories that are not symlinks, so catch anything else + if not os.path.isdir(dest) or os.path.islink(dest): + os.remove(dest) + else: + shutil.rmtree(dest) + else: + logging.info("...but it didn't exist") + + remove_signing_keys(build_dir) + + # Add required external libraries + if build.extlibs: + logging.info("Collecting prebuilt libraries") + libsdir = os.path.join(root_dir, 'libs') + if not os.path.exists(libsdir): + os.mkdir(libsdir) + for lib in build.extlibs: + lib = lib.strip() + logging.info("...installing extlib {0}".format(lib)) + libf = os.path.basename(lib) + libsrc = os.path.join(extlib_dir, lib) + if not os.path.exists(libsrc): + raise BuildException("Missing extlib file {0}".format(libsrc)) + shutil.copyfile(libsrc, os.path.join(libsdir, libf)) + # Add extlibs to scanignore (this is relative to the build dir root, *sigh*) + if build.subdir: + scanignorepath = os.path.join(build.subdir, 'libs', libf) + else: + scanignorepath = os.path.join('libs', libf) + if scanignorepath not in build.scanignore: + build.scanignore.append(scanignorepath) + + # Run a pre-build command if one is required + if build.prebuild: + logging.info("Running 'prebuild' commands in %s" % root_dir) + + cmd = replace_config_vars("; ".join(build.prebuild), build) + + # Substitute source library paths into prebuild commands for name, number, libpath in srclibpaths: - libpath = os.path.relpath(libpath, root_dir) - cmd = cmd.replace('$$' + name + '$$', libpath) + cmd = cmd.replace('$$' + name + '$$', os.path.join(os.getcwd(), libpath)) - if options.verbose: - print "Running 'prebuild' commands in %s" % root_dir - - p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir) + p = FDroidPopen(['bash', '-e', '-u', '-o', 'pipefail', '-x', '-c', '--', cmd], cwd=root_dir) if p.returncode != 0: raise BuildException("Error running prebuild command for %s:%s" % - (app['id'], build['version']), p.stdout, p.stderr) + (app.id, build.versionName), p.output) + + # Generate (or update) the ant build file, build.xml... + if build.build_method() == 'ant' and build.androidupdate != ['no']: + parms = ['android', 'update', 'lib-project'] + lparms = ['android', 'update', 'project'] + + if build.target: + parms += ['-t', build.target] + lparms += ['-t', build.target] + if build.androidupdate: + update_dirs = build.androidupdate + else: + update_dirs = ant_subprojects(root_dir) + ['.'] + + for d in update_dirs: + subdir = os.path.join(root_dir, d) + if d == '.': + logging.debug("Updating main project") + cmd = parms + ['-p', d] + else: + logging.debug("Updating subproject %s" % d) + cmd = lparms + ['-p', d] + p = SdkToolsPopen(cmd, cwd=root_dir) + # Check to see whether an error was returned without a proper exit + # code (this is the case for the 'no target set or target invalid' + # error) + if p.returncode != 0 or p.output.startswith("Error: "): + raise BuildException("Failed to update project at %s" % d, p.output) + # Clean update dirs via ant + if d != '.': + logging.info("Cleaning subproject %s" % d) + p = FDroidPopen(['ant', 'clean'], cwd=subdir) return (root_dir, srclibpaths) -# Scan the source code in the given directory (and all subdirectories) -# and return a list of potential problems. -def scan_source(build_dir, root_dir, thisbuild): - problems = [] +def getpaths_map(build_dir, globpaths): + """Extend via globbing the paths from a field and return them as a map from original path to resulting paths.""" + paths = dict() + not_found_paths = [] + for p in globpaths: + p = p.strip() + full_path = os.path.join(build_dir, p) + full_path = os.path.normpath(full_path) + paths[p] = [r[len(str(build_dir)) + 1:] for r in glob.glob(full_path)] + if not paths[p]: + not_found_paths.append(p) + return paths, not_found_paths - # Common known non-free blobs (always lower case): - usual_suspects = ['flurryagent', - 'paypal_mpl', - 'libgoogleanalytics', - 'admob-sdk-android', - 'googleadview', - 'googleadmobadssdk', - 'google-play-services', - 'crittercism', - 'heyzap', - 'jpct-ae', - 'youtubeandroidplayerapi', - 'bugsense', - 'crashlytics', - 'ouya-sdk'] - def getpaths(field): - paths = [] - if field not in thisbuild: - return paths - for p in thisbuild[field].split(';'): - p = p.strip() - if p == '.': - p = '/' - elif p.startswith('./'): - p = p[1:] - elif not p.startswith('/'): - p = '/' + p; - if p not in paths: - paths.append(p) - return paths +def getpaths(build_dir, globpaths): + """Extend via globbing the paths from a field and return them as a set.""" + paths_map, not_found_paths = getpaths_map(build_dir, globpaths) + if not_found_paths: + raise FDroidException( + "Some glob paths did not match any files/dirs:\n" + + "\n".join(not_found_paths) + ) + paths = set() + for k, v in paths_map.items(): + for p in v: + paths.add(p) + return paths - scanignore = getpaths('scanignore') - scandelete = getpaths('scandelete') - ms = magic.open(magic.MIME_TYPE) - ms.load() +def natural_key(s): + return [int(sp) if sp.isdigit() else sp for sp in re.split(r'(\d+)', s)] - def toignore(fd): - for i in scanignore: - if fd.startswith(i): - return True - return False - def todelete(fd): - for i in scandelete: - if fd.startswith(i): - return True - return False +def check_system_clock(dt_obj, path): + """Check if system clock is updated based on provided date. - def removeproblem(what, fd, fp): - print 'Removing %s at %s' % (what, fd) - os.remove(fp) + If an APK has files newer than the system time, suggest updating + the system clock. This is useful for offline systems, used for + signing, which do not have another source of clock sync info. It + has to be more than 24 hours newer because ZIP/APK files do not + store timezone info - def handleproblem(what, fd, fp): - if todelete(fd): - removeproblem(what, fd, fp) - else: - problems.append('Found %s at %s' % (what, fd)) - - def warnproblem(what, fd, fp): - print 'Warning: Found %s at %s' % (what, fd) - - # Iterate through all files in the source code... - for r,d,f in os.walk(build_dir): - for curfile in f: - - if '/.hg' in r or '/.git' in r or '/.svn' in r: - continue - - # Path (relative) to the file... - fp = os.path.join(r, curfile) - fd = fp[len(build_dir):] - - # Check if this file has been explicitly excluded from scanning... - if toignore(fd): - continue - - for suspect in usual_suspects: - if suspect in curfile.lower(): - handleproblem('usual supect', fd, fp) - - mime = ms.file(fp) - if mime == 'application/x-sharedlib': - handleproblem('shared library', fd, fp) - elif mime == 'application/x-archive': - handleproblem('static library', fd, fp) - elif mime == 'application/x-executable': - handleproblem('binary executable', fd, fp) - elif mime == 'application/jar' and has_extension(fp, 'apk'): - removeproblem('APK file', fd, fp) - elif mime == 'application/jar' and has_extension(fp, 'jar'): - warnproblem('JAR file', fd, fp) - - elif has_extension(fp, 'java'): - for line in file(fp): - if 'DexClassLoader' in line: - handleproblem('DexClassLoader', fd, fp) - break - ms.close() - - # Presence of a jni directory without buildjni=yes might - # indicate a problem... (if it's not a problem, explicitly use - # buildjni=no to bypass this check) - if (os.path.exists(os.path.join(root_dir, 'jni')) and - thisbuild.get('buildjni') is None): - msg = 'Found jni directory, but buildjni is not enabled' - problems.append(msg) - - return problems + """ + checkdt = dt_obj - timedelta(1) + if datetime.today() < checkdt: + logging.warning(_('System clock is older than date in {path}!').format(path=path) + + '\n' + _('Set clock to that time using:') + '\n' + + 'sudo date -s "' + str(dt_obj) + '"') class KnownApks: + """Permanent store of existing APKs with the date they were added. + + This is currently the only way to permanently store the "updated" + date of APKs. + """ def __init__(self): - self.path = os.path.join('stats', 'known_apks.txt') + """Load filename/date info about previously seen APKs. + + Since the appid and date strings both will never have spaces, + this is parsed as a list from the end to allow the filename to + have any combo of spaces. + """ self.apks = {} - if os.path.exists(self.path): - for line in file( self.path): - t = line.rstrip().split(' ') - if len(t) == 2: - self.apks[t[0]] = (t[1], None) - else: - self.apks[t[0]] = (t[1], time.strptime(t[2], '%Y-%m-%d')) - self.changed = False + for part in ('repo', 'archive'): + path = os.path.join(part, 'index-v2.json') + if os.path.isfile(path): + with open(path, 'r', encoding='utf-8') as f: + index = json.load(f) + for appid, data in index["packages"].items(): + for version in data["versions"].values(): + filename = version["file"]["name"][1:] + date = datetime.fromtimestamp(version["added"] // 1000, tz=timezone.utc) + self.apks[filename] = date - def writeifchanged(self): - if self.changed: - if not os.path.exists('stats'): - os.mkdir('stats') - f = open(self.path, 'w') - lst = [] - for apk, app in self.apks.iteritems(): - appid, added = app - line = apk + ' ' + appid - if added: - line += ' ' + time.strftime('%Y-%m-%d', added) - lst.append(line) - for line in sorted(lst): - f.write(line + '\n') - f.close() + def recordapk(self, apkName, default_date=None): + """ + Record an APK (if it's new, otherwise does nothing). - # Record an apk (if it's new, otherwise does nothing) - # Returns the date it was added. - def recordapk(self, apk, app): - if not apk in self.apks: - self.apks[apk] = (app, time.gmtime(time.time())) - self.changed = True - _, added = self.apks[apk] - return added + Returns + ------- + datetime + the date it was added as a datetime instance. + """ + if apkName not in self.apks: + if default_date is None: + default_date = datetime.now(timezone.utc) + self.apks[apkName] = default_date + return self.apks[apkName] - # Look up information - given the 'apkname', returns (app id, date added/None). - # Or returns None for an unknown apk. - def getapp(self, apkname): - if apkname in self.apks: - return self.apks[apkname] - return None - # Get the most recent 'num' apps added to the repo, as a list of package ids - # with the most recent first. - def getlatest(self, num): - apps = {} - for apk, app in self.apks.iteritems(): - appid, added = app - if added: - if appid in apps: - if apps[appid] > added: - apps[appid] = added - else: - apps[appid] = added - sortedapps = sorted(apps.iteritems(), key=operator.itemgetter(1))[-num:] - lst = [app for app,added in sortedapps] - lst.reverse() - return lst +def get_file_extension(filename): + """Get the normalized file extension, can be blank string but never None.""" + if isinstance(filename, bytes): + filename = filename.decode('utf-8') + return os.path.splitext(filename)[1].lower()[1:] -def isApkDebuggable(apkfile, config): - """Returns True if the given apk file is debuggable - :param apkfile: full path to the apk to check""" +def _androguard_logging_level(level=logging.ERROR): + """Tames androguard's default debug output. - p = subprocess.Popen([os.path.join(config['sdk_path'], - 'build-tools', config['build_tools'], 'aapt'), - 'dump', 'xmltree', apkfile, 'AndroidManifest.xml'], - stdout=subprocess.PIPE) - output = p.communicate()[0] - if p.returncode != 0: - print "ERROR: Failed to get apk manifest information" - sys.exit(1) - for line in output.splitlines(): - if 'android:debuggable' in line and not line.endswith('0x0'): - return True + There should be no debug output when the functions are being used + via the API. Otherwise, the output is controlled by the --verbose + flag. + + To get coverage across the full range of androguard >= 3.3.5, this + includes all known logger names that are relevant. So some of + these names might not be present in the version of androguard + currently in use. + + """ + if options and options.verbose: + level = logging.WARNING + + for name in ( + 'androguard.apk', + 'androguard.axml', + 'androguard.core.api_specific_resources', + 'androguard.core.apk', + 'androguard.core.axml', + ): + logging.getLogger(name).setLevel(level) + + # some parts of androguard 4.x use loguru instead of logging + try: + from loguru import logger + logger.remove() + except ImportError: + pass + + +def get_androguard_APK(apkfile, skip_analysis=False): + try: + # these were moved in androguard 4.0 + from androguard.core.apk import APK + except ImportError: + from androguard.core.bytecodes.apk import APK + _androguard_logging_level() + + return APK(apkfile, skip_analysis=skip_analysis) + + +def ensure_final_value(packageName, arsc, value): + """Ensure incoming value is always the value, not the resid. + + androguard will sometimes return the Android "resId" aka + Resource ID instead of the actual value. This checks whether + the value is actually a resId, then performs the Android + Resource lookup as needed. + """ + if value: + returnValue = value + if value[0] == '@': + try: # can be a literal value or a resId + res_id = int('0x' + value[1:], 16) + res_id = arsc.get_id(packageName, res_id)[1] + returnValue = arsc.get_string(packageName, res_id)[1] + except (ValueError, TypeError): + pass + return returnValue + return '' + + +def is_debuggable_or_testOnly(apkfile): + """Return True if the given file is an APK and is debuggable or testOnly. + + These two settings should never be enabled in release builds. This + parses + from the APK and nothing else to run fast, since it is run on + every APK as part of update. + + Parameters + ---------- + apkfile + full path to the APK to check + + """ + if get_file_extension(apkfile) != 'apk': + return False + try: + # these were moved in androguard 4.0 + from androguard.core.axml import START_TAG, AXMLParser, format_value + except ImportError: + from androguard.core.bytecodes.axml import START_TAG, AXMLParser, format_value + _androguard_logging_level() + + with ZipFile(apkfile) as apk: + with apk.open('AndroidManifest.xml') as manifest: + axml = AXMLParser(manifest.read()) + while axml.is_valid(): + _type = next(axml) + if _type == START_TAG and axml.getName() == 'application': + for i in range(0, axml.getAttributeCount()): + name = axml.getAttributeName(i) + if name in ('debuggable', 'testOnly'): + _type = axml.getAttributeValueType(i) + _data = axml.getAttributeValueData(i) + value = format_value(_type, _data, lambda _: axml.getAttributeValue(i)) + if value == 'true': + return True + else: + return False + break return False -class AsynchronousFileReader(threading.Thread): - ''' - Helper class to implement asynchronous reading of a file - in a separate thread. Pushes read lines on a queue to - be consumed in another thread. - ''' +def get_apk_id(apkfile): + """Extract identification information from APK. - def __init__(self, fd, queue): - assert isinstance(queue, Queue.Queue) - assert callable(fd.readline) - threading.Thread.__init__(self) - self._fd = fd - self._queue = queue + Androguard is preferred since it is more reliable and a lot + faster. Occasionally, when androguard can't get the info from the + APK, aapt still can. So aapt is also used as the final fallback + method. - def run(self): - '''The body of the tread: read lines and put them on the queue.''' - for line in iter(self._fd.readline, ''): - self._queue.put(line) + Parameters + ---------- + apkfile + path to an APK file. + + Returns + ------- + appid + versionCode + versionName + + """ + try: + return get_apk_id_androguard(apkfile) + except zipfile.BadZipFile as e: + if config and 'aapt' in config: + logging.error(apkfile + ': ' + str(e)) + return get_apk_id_aapt(apkfile) + else: + raise e + + +def get_apk_id_androguard(apkfile): + """Read (appid, versionCode, versionName) from an APK. + + This first tries to do quick binary XML parsing to just get the + values that are needed. It will fallback to full androguard + parsing, which is slow, if it can't find the versionName value or + versionName is set to a Android String Resource (e.g. an integer + hex value that starts with @). + + This function is part of androguard as get_apkid(), so this + vendored and modified to return versionCode as an integer. + + """ + if not os.path.exists(apkfile): + raise FDroidException(_("Reading packageName/versionCode/versionName failed, APK invalid: '{apkfilename}'") + .format(apkfilename=apkfile)) + + try: + # these were moved in androguard 4.0 + from androguard.core.axml import ( + END_DOCUMENT, + END_TAG, + START_TAG, + TEXT, + AXMLParser, + format_value, + ) + except ImportError: + from androguard.core.bytecodes.axml import ( + END_DOCUMENT, + END_TAG, + START_TAG, + TEXT, + AXMLParser, + format_value, + ) + _androguard_logging_level() + + appid = None + versionCode = None + versionName = None + with zipfile.ZipFile(apkfile) as apk: + with apk.open('AndroidManifest.xml') as manifest: + axml = AXMLParser(manifest.read()) + count = 0 + while axml.is_valid(): + _type = next(axml) + count += 1 + if _type == START_TAG: + for i in range(0, axml.getAttributeCount()): + name = axml.getAttributeName(i) + _type = axml.getAttributeValueType(i) + _data = axml.getAttributeValueData(i) + value = format_value(_type, _data, lambda _: axml.getAttributeValue(i)) + if appid is None and name == 'package': + appid = value + elif versionCode is None and name == 'versionCode': + if value.startswith('0x'): + versionCode = int(value, 16) + else: + versionCode = int(value) + elif versionName is None and name == 'versionName': + versionName = value + + if axml.getName() == 'manifest': + break + elif _type in (END_TAG, TEXT, END_DOCUMENT): + raise RuntimeError('{path}: must be the first element in AndroidManifest.xml' + .format(path=apkfile)) + + if not versionName or versionName[0] == '@': + a = get_androguard_APK(apkfile) + versionName = ensure_final_value(a.package, a.get_android_resources(), a.get_androidversion_name()) + if not versionName: + versionName = '' # versionName is expected to always be a str + + return appid, versionCode, versionName.strip('\0') + + +def get_apk_id_aapt(apkfile): + """Read (appid, versionCode, versionName) from an APK.""" + p = SdkToolsPopen(['aapt', 'dump', 'badging', apkfile], output=False) + m = APK_ID_TRIPLET_REGEX.match(p.output[0:p.output.index('\n')]) + if m: + return m.group(1), int(m.group(2)), m.group(3) + raise FDroidException(_( + "Reading packageName/versionCode/versionName failed," + "APK invalid: '{apkfilename}'" + ).format(apkfilename=apkfile)) + + +def get_native_code(apkfile): + """Aapt checks if there are architecture folders under the lib/ folder. + + We are simulating the same behaviour. + """ + arch_re = re.compile("^lib/(.*)/.*$") + archset = set() + with ZipFile(apkfile) as apk: + for filename in apk.namelist(): + m = arch_re.match(filename) + if m: + archset.add(m.group(1)) + return sorted(list(archset)) - def eof(self): - '''Check whether there is no more content to expect.''' - return not self.is_alive() and self._queue.empty() class PopenResult: - returncode = None - stdout = '' - stderr = '' - stdout_apk = '' + def __init__(self, returncode=None, output=None): + self.returncode = returncode + self.output = output -def FDroidPopen(commands, cwd=None): + +def SdkToolsPopen(commands, cwd=None, output=True): + cmd = commands[0] + if cmd not in config: + config[cmd] = find_sdk_tools_cmd(commands[0]) + abscmd = config[cmd] + if abscmd is None: + raise FDroidException(_("Could not find '{command}' on your system").format(command=cmd)) + if cmd == 'aapt': + test_aapt_version(config['aapt']) + return FDroidPopen([abscmd] + commands[1:], + cwd=cwd, output=output) + + +def FDroidPopenBytes(commands, cwd=None, envs=None, output=True, stderr_to_stdout=True): """ - Runs a command the FDroid way and returns return code and output + Run a command and capture the possibly huge output as bytes. - :param commands and cwd like in subprocess.Popen + Parameters + ---------- + commands + command and argument list like in subprocess.Popen + cwd + optionally specifies a working directory + envs + a optional dictionary of environment variables and their values + + Returns + ------- + A PopenResult. """ + global env + if env is None: + set_FDroidPopen_env() - if options.verbose: - if cwd: - print "Directory: %s" % cwd - print " > %s" % ' '.join(commands) + process_env = env.copy() + if envs is not None and len(envs) > 0: + process_env.update(envs) + if cwd: + cwd = os.path.normpath(cwd) + logging.debug("Directory: %s" % cwd) + logging.debug("> %s" % ' '.join(commands)) + + stderr_param = subprocess.STDOUT if stderr_to_stdout else subprocess.PIPE result = PopenResult() - p = subprocess.Popen(commands, cwd=cwd, - stdout=subprocess.PIPE, stderr=subprocess.PIPE) + p = None + try: + p = subprocess.Popen(commands, cwd=cwd, shell=False, env=process_env, + stdin=subprocess.DEVNULL, stdout=subprocess.PIPE, + stderr=stderr_param) + except OSError as e: + raise BuildException("OSError while trying to execute " + + ' '.join(commands) + ': ' + str(e)) from e - stdout_queue = Queue.Queue() + # TODO are these AsynchronousFileReader threads always exiting? + if not stderr_to_stdout and options.verbose: + stderr_queue = Queue() + stderr_reader = AsynchronousFileReader(p.stderr, stderr_queue) + + while not stderr_reader.eof(): + while not stderr_queue.empty(): + line = stderr_queue.get() + sys.stderr.buffer.write(line) + sys.stderr.flush() + + time.sleep(0.1) + + stdout_queue = Queue() stdout_reader = AsynchronousFileReader(p.stdout, stdout_queue) - stdout_reader.start() - stderr_queue = Queue.Queue() - stderr_reader = AsynchronousFileReader(p.stderr, stderr_queue) - stderr_reader.start() + buf = io.BytesIO() - # Check the queues for output (until there is no more to get) - while not stdout_reader.eof() or not stderr_reader.eof(): - # Show what we received from standard output + # Check the queue for output (until there is no more to get) + while not stdout_reader.eof(): while not stdout_queue.empty(): line = stdout_queue.get() - if options.verbose: + if output and options and options.verbose: # Output directly to console - sys.stdout.write(line) - sys.stdout.flush() - result.stdout += line - - # Show what we received from standard error - while not stderr_queue.empty(): - line = stderr_queue.get() - if options.verbose: - # Output directly to console - sys.stderr.write(line) + sys.stderr.buffer.write(line) sys.stderr.flush() - result.stderr += line + buf.write(line) + time.sleep(0.1) - p.communicate() - result.returncode = p.returncode + result.returncode = p.wait() + result.output = buf.getvalue() + buf.close() + # make sure all filestreams of the subprocess are closed + for streamvar in ['stdin', 'stdout', 'stderr']: + if hasattr(p, streamvar): + stream = getattr(p, streamvar) + if stream: + stream.close() return result -def remove_signing_keys(build_dir): - comment = re.compile(r'[ ]*//') - signing_configs = re.compile(r'[\t ]*signingConfigs[ \t]*{[ \t]*$') - r_open = re.compile(r'.*{[\t ]*$') - r_close = re.compile(r'.*}[\t ]*$') - for root, dirs, files in os.walk(build_dir): - if 'build.gradle' in files: - path = os.path.join(root, 'build.gradle') - changed = False +def FDroidPopen(commands, cwd=None, envs=None, output=True, stderr_to_stdout=True): + """ + Run a command and capture the possibly huge output as a str. + + Parameters + ---------- + commands + command and argument list like in subprocess.Popen + cwd + optionally specifies a working directory + envs + a optional dictionary of environment variables and their values + + Returns + ------- + A PopenResult. + """ + result = FDroidPopenBytes(commands, cwd, envs, output, stderr_to_stdout) + result.output = result.output.decode('utf-8', 'ignore') + return result + + +gradle_comment = re.compile(r'[ ]*//') +gradle_signing_configs = re.compile(r'^[\t ]*signingConfigs[ \t]*{[ \t]*$') +gradle_line_matches = [ + re.compile(r'^[\t ]*signingConfig\s*[= ]\s*[^ ]*$'), + re.compile(r'.*android\.signingConfigs\.[^{]*$'), + re.compile(r'.*release\.signingConfig *= *'), +] + + +def remove_signing_keys(build_dir): + for root, dirs, files in os.walk(build_dir): + gradlefile = None + if 'build.gradle' in files: + gradlefile = "build.gradle" + elif 'build.gradle.kts' in files: + gradlefile = "build.gradle.kts" + if gradlefile: + path = os.path.join(root, gradlefile) with open(path, "r") as o: lines = o.readlines() + changed = False + opened = 0 + i = 0 with open(path, "w") as o: - for line in lines: - if comment.match(line): - pass - elif signing_configs.match(line): - opened = 1 + while i < len(lines): + line = lines[i] + i += 1 + while line.endswith('\\\n'): + line = line.rstrip('\\\n') + lines[i] + i += 1 + + if gradle_comment.match(line): + o.write(line) + continue + + if opened > 0: + opened += line.count('{') + opened -= line.count('}') + continue + + if gradle_signing_configs.match(line): changed = True - elif opened > 0: - if r_open.match(line): - opened += 1 - elif r_close.match(line): - opened -= 1 - elif any(s in line for s in ( - ' signingConfig ', - 'android.signingConfigs.', - 'variant.outputFile = ', - '.readLine(')): + opened += 1 + continue + + if any(s.match(line) for s in gradle_line_matches): changed = True - else: + continue + + if opened == 0: o.write(line) - if changed and options.verbose: - print "Cleaned build.gradle of keysigning configs at %s" % path + if changed: + logging.info("Cleaned %s of keysigning configs at %s" % (gradlefile, path)) - for propfile in ('build.properties', 'default.properties', 'ant.properties'): + for propfile in [ + 'project.properties', + 'build.properties', + 'default.properties', + 'ant.properties', ]: if propfile in files: path = os.path.join(root, propfile) - changed = False - with open(path, "r") as o: + with open(path, "r", encoding='iso-8859-1') as o: lines = o.readlines() - with open(path, "w") as o: + changed = False + + with open(path, "w", encoding='iso-8859-1') as o: for line in lines: - if line.startswith('key.store'): + if any(line.startswith(s) for s in ('key.store', 'key.alias')): changed = True - else: - o.write(line) + continue - if changed and options.verbose: - print "Cleaned %s of keysigning configs at %s" % (propfile,path) + o.write(line) -def replace_config_vars(cmd): - cmd = cmd.replace('$$SDK$$', config['sdk_path']) - cmd = cmd.replace('$$NDK$$', config['ndk_path']) - cmd = cmd.replace('$$MVN3$$', config['mvn3']) + if changed: + logging.info("Cleaned %s of keysigning configs at %s" % (propfile, path)) + + +def set_FDroidPopen_env(app=None, build=None): + """Set up the environment variables for the build environment. + + There is only a weak standard, the variables used by gradle, so also set + up the most commonly used environment variables for SDK and NDK. Also, if + there is no locale set, this will set the locale (e.g. LANG) to en_US.UTF-8. + + If an App instance is provided, then the SOURCE_DATE_EPOCH + environment variable will be set based on that app's source repo. + + """ + global env, orig_path + + if env is None: + env = os.environ + orig_path = env['PATH'] + if config: + if config.get('sdk_path'): + for n in ['ANDROID_HOME', 'ANDROID_SDK', 'ANDROID_SDK_ROOT']: + env[n] = config['sdk_path'] + for k, v in config.get('java_paths', {}).items(): + env['JAVA%s_HOME' % k] = v + + missinglocale = True + for k, v in env.items(): + if k == 'LANG' and v != 'C': + missinglocale = False + elif k == 'LC_ALL': + missinglocale = False + if missinglocale: + env['LANG'] = 'en_US.UTF-8' + + if app: + env['SOURCE_DATE_EPOCH'] = get_source_date_epoch(get_build_dir(app)) + if build is not None: + path = build.ndk_path() + paths = orig_path.split(os.pathsep) + if path and path not in paths: + paths = [path] + paths + env['PATH'] = os.pathsep.join(paths) + for n in ['ANDROID_NDK', 'NDK', 'ANDROID_NDK_HOME']: + env[n] = build.ndk_path() + + +def replace_build_vars(cmd, build): + cmd = cmd.replace('$$COMMIT$$', build.commit) + cmd = cmd.replace('$$VERSION$$', build.versionName) + cmd = cmd.replace('$$VERCODE$$', str(build.versionCode)) return cmd + +def replace_config_vars(cmd, build): + cmd = cmd.replace('$$SDK$$', config['sdk_path']) + cmd = cmd.replace('$$NDK$$', build.ndk_path()) + if build is not None: + cmd = replace_build_vars(cmd, build) + return cmd + + def place_srclib(root_dir, number, libpath): if not number: return relpath = os.path.relpath(libpath, root_dir) proppath = os.path.join(root_dir, 'project.properties') - with open(proppath, "r") as o: - lines = o.readlines() + lines = [] + if os.path.isfile(proppath): + with open(proppath, "r", encoding='iso-8859-1') as o: + lines = o.readlines() - with open(proppath, "w") as o: + with open(proppath, "w", encoding='iso-8859-1') as o: placed = False for line in lines: if line.startswith('android.library.reference.%d=' % number): - o.write('android.library.reference.%d=%s\n' % (number,relpath)) + o.write('android.library.reference.%d=%s\n' % (number, relpath)) placed = True else: o.write(line) if not placed: - o.write('android.library.reference.%d=%s\n' % (number,relpath)) + o.write('android.library.reference.%d=%s\n' % (number, relpath)) + +APK_SIGNATURE_FILES = re.compile(r'META-INF/[0-9A-Za-z_\-]+\.(SF|RSA|DSA|EC)') + + +def signer_fingerprint_short(cert_encoded): + """Obtain shortened sha256 signing-key fingerprint for pkcs7 DER certficate. + + Extracts the first 7 hexadecimal digits of sha256 signing-key fingerprint + for a given pkcs7 signature. + + Parameters + ---------- + cert_encoded + Contents of an APK signing certificate. + + Returns + ------- + shortened signing-key fingerprint. + """ + return signer_fingerprint(cert_encoded)[:7] + + +def signer_fingerprint(cert_encoded): + """Return SHA-256 signer fingerprint for PKCS#7 DER-encoded signature. + + Parameters + ---------- + Contents of an APK signature. + + Returns + ------- + Standard SHA-256 signer fingerprint. + + """ + return hashlib.sha256(cert_encoded).hexdigest() + + +def get_first_signer_certificate(apkpath): + """Get the first signing certificate from the APK, DER-encoded. + + JAR and APK Signatures allow for multiple signers, though it is + rarely used, and this is poorly documented. So this method only + fetches the first certificate, and errors out if there are more. + + Starting with targetSdkVersion 30, APK v2 Signatures are required. + https://developer.android.com/about/versions/11/behavior-changes-11#minimum-signature-scheme + + When a APK v2+ signature is present, the JAR signature is not + verified. The verifier parses the signers from the v2+ signature + and does not seem to look at the JAR signature. + https://source.android.com/docs/security/features/apksigning/v2#apk-signature-scheme-v2-block + https://android.googlesource.com/platform/tools/apksig/+/refs/tags/android-13.0.0_r3/src/main/java/com/android/apksig/ApkVerifier.java#270 + + apksigner checks that the signers from all the APK signatures match: + https://android.googlesource.com/platform/tools/apksig/+/refs/tags/android-13.0.0_r3/src/main/java/com/android/apksig/ApkVerifier.java#383 + + apksigner verifies each signer's signature block file + .(RSA|DSA|EC) against the corresponding signature file .SF + https://android.googlesource.com/platform/tools/apksig/+/refs/tags/android-13.0.0_r3/src/main/java/com/android/apksig/internal/apk/v1/V1SchemeVerifier.java#280 + + NoOverwriteDict is a workaround for: + https://github.com/androguard/androguard/issues/1030 + + Lots more discusion here: + https://gitlab.com/fdroid/fdroidserver/-/issues/1128 + + """ + + class NoOverwriteDict(dict): + def __setitem__(self, k, v): + if k not in self: + super().__setitem__(k, v) + + cert_encoded = None + found_certs = [] + apkobject = get_androguard_APK(apkpath) + apkobject._v2_blocks = NoOverwriteDict() + certs_v3 = apkobject.get_certificates_der_v3() + if certs_v3: + cert_v3 = certs_v3[0] + found_certs.append(cert_v3) + if not cert_encoded: + logging.debug(_('Using APK Signature v3')) + cert_encoded = cert_v3 + + certs_v2 = apkobject.get_certificates_der_v2() + if certs_v2: + cert_v2 = certs_v2[0] + found_certs.append(cert_v2) + if not cert_encoded: + logging.debug(_('Using APK Signature v2')) + cert_encoded = cert_v2 + + if get_min_sdk_version(apkobject) < 24 or ( + not (certs_v3 or certs_v2) and get_effective_target_sdk_version(apkobject) < 30 + ): + with zipfile.ZipFile(apkpath, 'r') as apk: + cert_files = [ + n for n in apk.namelist() if SIGNATURE_BLOCK_FILE_REGEX.match(n) + ] + if len(cert_files) > 1: + logging.error( + _("Found multiple JAR Signature Block Files in {path}").format( + path=apkpath + ) + ) + return + elif len(cert_files) == 1: + signature_block_file = cert_files[0] + signature_file = ( + cert_files[0][: signature_block_file.rindex('.')] + '.SF' + ) + cert_v1 = get_certificate( + apk.read(signature_block_file), + apk.read(signature_file), + ) + found_certs.append(cert_v1) + if not cert_encoded: + logging.debug(_('Using JAR Signature')) + cert_encoded = cert_v1 + + if not cert_encoded: + logging.error(_("No signing certificates found in {path}").format(path=apkpath)) + return + + if not all(cert == found_certs[0] for cert in found_certs): + logging.error( + _("APK signatures have different certificates in {path}:").format( + path=apkpath + ) + ) + return + + return cert_encoded + + +def apk_signer_fingerprint(apk_path): + """Get SHA-256 fingerprint string for the first signer from given APK. + + Parameters + ---------- + apk_path + path to APK + + Returns + ------- + Standard SHA-256 signer fingerprint + + """ + cert_encoded = get_first_signer_certificate(apk_path) + if not cert_encoded: + return None + return signer_fingerprint(cert_encoded) + + +def metadata_get_sigdir(appid, vercode=None): + """Get signature directory for app.""" + if vercode: + return os.path.join('metadata', appid, 'signatures', str(vercode)) + else: + return os.path.join('metadata', appid, 'signatures') + + +def metadata_find_developer_signature(appid, vercode=None): + """Try to find the developer signature for given appid. + + This picks the first signature file found in metadata an returns its + signature. + + Returns + ------- + sha256 signing key fingerprint of the developer signing key. + None in case no signature can not be found. + """ + # fetch list of dirs for all versions of signatures + appversigdirs = [] + if vercode: + appversigdirs.append(metadata_get_sigdir(appid, vercode)) + else: + appsigdir = metadata_get_sigdir(appid) + if os.path.isdir(appsigdir): + numre = re.compile('[0-9]+') + for ver in os.listdir(appsigdir): + if numre.match(ver): + appversigdir = os.path.join(appsigdir, ver) + appversigdirs.append(appversigdir) + + for sigdir in appversigdirs: + signature_block_files = ( + glob.glob(os.path.join(sigdir, '*.DSA')) + + glob.glob(os.path.join(sigdir, '*.EC')) + + glob.glob(os.path.join(sigdir, '*.RSA')) + ) + if len(signature_block_files) > 1: + raise FDroidException('ambiguous signatures, please make sure there is only one signature in \'{}\'. (The signature has to be the App maintainers signature for version of the APK.)'.format(sigdir)) + for signature_block_file in signature_block_files: + with open(signature_block_file, 'rb') as f: + return signer_fingerprint(get_certificate(f.read())) + return None + + +def metadata_find_signing_files(appid, vercode): + """Get a list of signed manifests and signatures. + + Parameters + ---------- + appid + app id string + vercode + app versionCode + + Returns + ------- + List + of 4-tuples for each signing key with following paths: + (signature_file, signature_block_file, manifest, v2_files), where v2_files + is either a (apk_signing_block_offset_file, apk_signing_block_file) pair or None + + References + ---------- + * https://docs.oracle.com/javase/tutorial/deployment/jar/intro.html + * https://source.android.com/security/apksigning/v2 + * https://source.android.com/security/apksigning/v3 + """ + ret = [] + sigdir = metadata_get_sigdir(appid, vercode) + signature_block_files = ( + glob.glob(os.path.join(sigdir, '*.DSA')) + + glob.glob(os.path.join(sigdir, '*.EC')) + + glob.glob(os.path.join(sigdir, '*.RSA')) + ) + signature_block_pat = re.compile(r'(\.DSA|\.EC|\.RSA)$') + apk_signing_block = os.path.join(sigdir, "APKSigningBlock") + apk_signing_block_offset = os.path.join(sigdir, "APKSigningBlockOffset") + if os.path.isfile(apk_signing_block) and os.path.isfile(apk_signing_block_offset): + v2_files = apk_signing_block, apk_signing_block_offset + else: + v2_files = None + for signature_block_file in signature_block_files: + signature_file = signature_block_pat.sub('.SF', signature_block_file) + if os.path.isfile(signature_file): + manifest = os.path.join(sigdir, 'MANIFEST.MF') + if os.path.isfile(manifest): + ret.append((signature_block_file, signature_file, manifest, v2_files)) + return ret + + +def metadata_find_developer_signing_files(appid, vercode): + """Get developer signature files for specified app from metadata. + + Returns + ------- + List + of 4-tuples for each signing key with following paths: + (signature_file, signature_block_file, manifest, v2_files), where v2_files + is either a (apk_signing_block_offset_file, apk_signing_block_file) pair or None + + """ + allsigningfiles = metadata_find_signing_files(appid, vercode) + if allsigningfiles and len(allsigningfiles) == 1: + return allsigningfiles[0] + else: + return None + + +class ClonedZipInfo(zipfile.ZipInfo): + """Hack to allow fully cloning ZipInfo instances. + + The zipfile library has some bugs that prevent it from fully + cloning ZipInfo entries. https://bugs.python.org/issue43547 + + """ + + def __init__(self, zinfo): + super().__init__() + self.original = zinfo + for k in self.__slots__: + try: + setattr(self, k, getattr(zinfo, k)) + except AttributeError: + pass + + def __getattribute__(self, name): + if name in ("date_time", "external_attr", "flag_bits"): + return getattr(self.original, name) + return object.__getattribute__(self, name) + + +def apk_has_v1_signatures(apkfile): + """Test whether an APK has v1 signature files.""" + with ZipFile(apkfile, 'r') as apk: + for info in apk.infolist(): + if APK_SIGNATURE_FILES.match(info.filename): + return True + return False + + +def apk_strip_v1_signatures(signed_apk, strip_manifest=False): + """Remove signatures from APK. + + Parameters + ---------- + signed_apk + path to APK file. + strip_manifest + when set to True also the manifest file will be removed from the APK. + """ + with tempfile.TemporaryDirectory() as tmpdir: + tmp_apk = os.path.join(tmpdir, 'tmp.apk') + shutil.move(signed_apk, tmp_apk) + with ZipFile(tmp_apk, 'r') as in_apk: + with ZipFile(signed_apk, 'w') as out_apk: + for info in in_apk.infolist(): + if not APK_SIGNATURE_FILES.match(info.filename): + if strip_manifest: + if info.filename != 'META-INF/MANIFEST.MF': + buf = in_apk.read(info.filename) + out_apk.writestr(ClonedZipInfo(info), buf) + else: + buf = in_apk.read(info.filename) + out_apk.writestr(ClonedZipInfo(info), buf) + + +def apk_implant_signatures(apkpath, outpath, manifest): + """Implant a signature from metadata into an APK. + + Note: this changes there supplied APK in place. So copy it if you + need the original to be preserved. + + Parameters + ---------- + apkpath + location of the unsigned apk + outpath + location of the output apk + + References + ---------- + * https://docs.oracle.com/javase/tutorial/deployment/jar/intro.html + * https://source.android.com/security/apksigning/v2 + * https://source.android.com/security/apksigning/v3 + + """ + sigdir = os.path.dirname(manifest) # FIXME + apksigcopier.do_patch(sigdir, apkpath, outpath, v1_only=None, + exclude=apksigcopier.exclude_meta) + + +def apk_extract_signatures(apkpath, outdir): + """Extract a signature files from APK and puts them into target directory. + + Parameters + ---------- + apkpath + location of the apk + outdir + older where the extracted signature files will be stored + + References + ---------- + * https://docs.oracle.com/javase/tutorial/deployment/jar/intro.html + * https://source.android.com/security/apksigning/v2 + * https://source.android.com/security/apksigning/v3 + + """ + apksigcopier.do_extract(apkpath, outdir, v1_only=None) + + +def get_min_sdk_version(apk): + """Wrap the androguard function to always return an integer. + + Fall back to 1 if we can't get a valid minsdk version. + + Parameters + ---------- + apk + androguard APK object + + Returns + ------- + minSdkVersion: int + """ + try: + return int(apk.get_min_sdk_version()) + except TypeError: + return 1 + + +def get_effective_target_sdk_version(apk): + """Wrap the androguard function to always return an integer. + + Parameters + ---------- + apk + androguard APK object + + Returns + ------- + targetSdkVersion: int + """ + try: + return int(apk.get_effective_target_sdk_version()) + except TypeError: + return get_min_sdk_version(apk) + + +def get_apksigner_smartcardoptions(smartcardoptions): + if '-providerName' in smartcardoptions.copy(): + pos = smartcardoptions.index('-providerName') + # remove -providerName and it's argument + del smartcardoptions[pos] + del smartcardoptions[pos] + replacements = {'-storetype': '--ks-type', + '-providerClass': '--provider-class', + '-providerArg': '--provider-arg'} + return [replacements.get(n, n) for n in smartcardoptions] + + +def sign_apk(unsigned_path, signed_path, keyalias): + """Sign an unsigned APK, then save to a new file, deleting the unsigned. + + NONE is a Java keyword used to configure smartcards as the + keystore. Otherwise, the keystore is a local file. + https://docs.oracle.com/javase/7/docs/technotes/guides/security/p11guide.html#KeyToolJarSigner + + When using smartcards, apksigner does not use the same options has + Java/keytool/jarsigner (-providerName, -providerClass, + -providerArg, -storetype). apksigner documents the options as + --ks-provider-class and --ks-provider-arg. Those seem to be + accepted but fail when actually making a signature with weird + internal exceptions. We use the options that actually work. From: + https://geoffreymetais.github.io/code/key-signing/#scripting + + """ + if config['keystore'] == 'NONE': + signing_args = get_apksigner_smartcardoptions(config['smartcardoptions']) + else: + signing_args = ['--key-pass', 'env:FDROID_KEY_PASS'] + apksigner = config.get('apksigner', '') + if not shutil.which(apksigner): + raise BuildException(_("apksigner not found, it's required for signing!")) + cmd = [apksigner, 'sign', + '--ks', config['keystore'], + '--ks-pass', 'env:FDROID_KEY_STORE_PASS'] + cmd += signing_args + cmd += ['--ks-key-alias', keyalias, + '--in', unsigned_path, + '--out', signed_path] + p = FDroidPopen(cmd, envs={ + 'FDROID_KEY_STORE_PASS': config['keystorepass'], + 'FDROID_KEY_PASS': config.get('keypass', "")}) + if p.returncode != 0: + if os.path.exists(signed_path): + os.remove(signed_path) + raise BuildException(_("Failed to sign application"), p.output) + os.remove(unsigned_path) + + +def verify_apks( + signed_apk, unsigned_apk, tmp_dir, v1_only=None, clean_up_verified=False +): + """Verify that two apks are the same. + + One of the inputs is signed, the other is unsigned. The signature metadata + is transferred from the signed to the unsigned apk, and then apksigner is + used to verify that the signature from the signed APK is also valid for + the unsigned one. If the APK given as unsigned actually does have a + signature, it will be stripped out and ignored. + + Parameters + ---------- + signed_apk + Path to a signed APK file + unsigned_apk + Path to an unsigned APK file expected to match it + tmp_dir + Path to directory for temporary files + v1_only + True for v1-only signatures, False for v1 and v2 signatures, + or None for autodetection + clean_up_verified + Remove any files created here if the verification succeeded. + + Returns + ------- + None if the verification is successful, otherwise a string describing what went wrong. + """ + if not verify_apk_signature(signed_apk): + logging.info('...NOT verified - {0}'.format(signed_apk)) + return 'verification of signed APK failed' + + if not os.path.isfile(signed_apk): + return 'can not verify: file does not exists: {}'.format(signed_apk) + if not os.path.isfile(unsigned_apk): + return 'can not verify: file does not exists: {}'.format(unsigned_apk) + + tmp_apk = os.path.join(tmp_dir, 'sigcp_' + os.path.basename(unsigned_apk)) + + try: + apksigcopier.do_copy(signed_apk, unsigned_apk, tmp_apk, v1_only=v1_only, + exclude=apksigcopier.exclude_meta) + except apksigcopier.APKSigCopierError as e: + logging.info('...NOT verified - {0}'.format(tmp_apk)) + error = 'signature copying failed: {}'.format(str(e)) + result = compare_apks(signed_apk, unsigned_apk, tmp_dir, + os.path.dirname(unsigned_apk)) + if result is not None: + error += '\nComparing reference APK to unsigned APK...\n' + result + return error + + if not verify_apk_signature(tmp_apk): + logging.info('...NOT verified - {0}'.format(tmp_apk)) + error = 'verification of APK with copied signature failed' + result = compare_apks(signed_apk, tmp_apk, tmp_dir, + os.path.dirname(unsigned_apk)) + if result is not None: + error += '\nComparing reference APK to APK with copied signature...\n' + result + return error + if clean_up_verified and os.path.exists(tmp_apk): + logging.info(f"...cleaned up {tmp_apk} after successful verification") + os.remove(tmp_apk) + + logging.info('...successfully verified') + return None + + +def verify_jar_signature(jar): + """Verify the signature of a given JAR file. + + jarsigner is very shitty: unsigned JARs pass as "verified"! So + this has to turn on -strict then check for result 4, since this + does not expect the signature to be from a CA-signed certificate. + + Raises + ------ + VerificationException + If the JAR's signature could not be verified. + + """ + error = _('JAR signature failed to verify: {path}').format(path=jar) + try: + output = subprocess.check_output( + [config['jarsigner'], '-strict', '-verify', jar], stderr=subprocess.STDOUT + ) + raise VerificationException(error + '\n' + output.decode('utf-8')) + except subprocess.CalledProcessError as e: + if e.returncode == 4: + logging.debug(_('JAR signature verified: {path}').format(path=jar)) + else: + raise VerificationException(error + '\n' + e.output.decode('utf-8')) from e + + +def verify_deprecated_jar_signature(jar): + """Verify the signature of a given JAR file, allowing deprecated algorithms. + + index.jar (v0) and index-v1.jar are both signed by MD5/SHA1 by + definition, so this method provides a way to verify those. Also, + apksigner has different deprecation rules than jarsigner, so this + is our current hack to try to represent the apksigner rules when + executing jarsigner. + + jarsigner is very shitty: unsigned JARs pass as "verified"! So + this has to turn on -strict then check for result 4, since this + does not expect the signature to be from a CA-signed certificate. + + Also used to verify the signature on an archived APK, supporting deprecated + algorithms. + + F-Droid aims to keep every single binary that it ever published. Therefore, + it needs to be able to verify APK signatures that include deprecated/removed + algorithms. For example, jarsigner treats an MD5 signature as unsigned. + + jarsigner passes unsigned APKs as "verified"! So this has to turn + on -strict then check for result 4. + + Just to be safe, this never reuses the file, and locks down the + file permissions while in use. That should prevent a bad actor + from changing the settings during operation. + + Raises + ------ + VerificationException + If the JAR's signature could not be verified. + + """ + error = _('JAR signature failed to verify: {path}').format(path=jar) + with tempfile.TemporaryDirectory() as tmpdir: + java_security = os.path.join(tmpdir, 'java.security') + with open(java_security, 'w') as fp: + fp.write('jdk.jar.disabledAlgorithms=MD2, RSA keySize < 1024') + os.chmod(java_security, 0o400) + + try: + cmd = [ + config['jarsigner'], + '-J-Djava.security.properties=' + java_security, + '-strict', '-verify', jar + ] + output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) + raise VerificationException(error + '\n' + output.decode('utf-8')) + except subprocess.CalledProcessError as e: + if e.returncode == 4: + logging.debug(_('JAR signature verified: {path}').format(path=jar)) + else: + raise VerificationException(error + '\n' + e.output.decode('utf-8')) from e + + +def verify_apk_signature(apk, min_sdk_version=None): + """Verify the signature on an APK. + + Try to use apksigner whenever possible since jarsigner is very + shitty: unsigned APKs pass as "verified"! Warning, this does + not work on JARs with apksigner >= 0.7 (build-tools 26.0.1) + + Returns + ------- + Boolean + whether the APK was verified + """ + if set_command_in_config('apksigner'): + args = [config['apksigner'], 'verify'] + if min_sdk_version: + args += ['--min-sdk-version=' + min_sdk_version] + if options and options.verbose: + args += ['--verbose'] + try: + output = subprocess.check_output(args + [apk]) + if options and options.verbose: + logging.debug(apk + ': ' + output.decode('utf-8')) + return True + except subprocess.CalledProcessError as e: + logging.error('\n' + apk + ': ' + e.output.decode('utf-8')) + else: + if not config.get('jarsigner_warning_displayed'): + config['jarsigner_warning_displayed'] = True + logging.warning(_("Using Java's jarsigner, not recommended for verifying APKs! Use apksigner")) + try: + verify_deprecated_jar_signature(apk) + return True + except Exception as e: + logging.error(e) + return False + + +apk_badchars = re.compile('''[/ :;'"]''') + + +def compare_apks(apk1, apk2, tmp_dir, log_dir=None): + """Compare two apks. + + Returns + ------- + None if the APK content is the same (apart from the signing key), + otherwise a string describing what's different, or what went wrong when + trying to do the comparison. + """ + if not log_dir: + log_dir = tmp_dir + + absapk1 = os.path.abspath(apk1) + absapk2 = os.path.abspath(apk2) + + if set_command_in_config('diffoscope'): + logfilename = os.path.join(log_dir, os.path.basename(absapk1)) + htmlfile = logfilename + '.diffoscope.html' + textfile = logfilename + '.diffoscope.txt' + if subprocess.call([config['diffoscope'], + '--max-report-size', '12345678', '--max-diff-block-lines', '128', + '--html', htmlfile, '--text', textfile, + absapk1, absapk2]) != 0: + return "Failed to run diffoscope " + apk1 + + apk1dir = os.path.join(tmp_dir, apk_badchars.sub('_', apk1[0:-4])) # trim .apk + apk2dir = os.path.join(tmp_dir, apk_badchars.sub('_', apk2[0:-4])) # trim .apk + for d in [apk1dir, apk2dir]: + if os.path.exists(d): + shutil.rmtree(d) + os.mkdir(d) + os.mkdir(os.path.join(d, 'content')) + + # extract APK contents for comparision + with ZipFile(absapk1, 'r') as f: + f.extractall(path=os.path.join(apk1dir, 'content')) + with ZipFile(absapk2, 'r') as f: + f.extractall(path=os.path.join(apk2dir, 'content')) + + if set_command_in_config('apktool'): + if subprocess.call( + [config['apktool'], 'd', absapk1, '--output', 'apktool'], cwd=apk1dir + ): + return "Failed to run apktool " + apk1 + if subprocess.call( + [config['apktool'], 'd', absapk2, '--output', 'apktool'], cwd=apk2dir + ): + return "Failed to run apktool " + apk2 + + p = FDroidPopen(['diff', '-r', apk1dir, apk2dir], output=False) + lines = p.output.splitlines() + if len(lines) != 1 or 'META-INF' not in lines[0]: + if set_command_in_config('meld'): + p = FDroidPopen([config['meld'], apk1dir, apk2dir], output=False) + return "Unexpected diff output:\n" + p.output.replace("\r", "^M") + + # since everything verifies, delete the comparison to keep cruft down + shutil.rmtree(apk1dir) + shutil.rmtree(apk2dir) + + # If we get here, it seems like they're the same! + return None + + +def set_command_in_config(command): + """Try to find specified command in the path, if it hasn't been manually set in config.yml. + + If found, it is added to the config + dict. The return value says whether the command is available. + + """ + if command in config: + return True + else: + tmp = find_command(command) + if tmp is not None: + config[command] = tmp + return True + return False + + +def find_command(command): + """Find the full path of a command, or None if it can't be found in the PATH.""" + def is_exe(fpath): + return os.path.isfile(fpath) and os.access(fpath, os.X_OK) + + fpath, fname = os.path.split(command) + if fpath: + if is_exe(command): + return command + else: + for path in os.environ["PATH"].split(os.pathsep): + path = path.strip('"') + exe_file = os.path.join(path, command) + if is_exe(exe_file): + return exe_file + + return None + + +def genpassword(): + """Generate a random password for when generating keys.""" + h = hashlib.sha256() + h.update(os.urandom(16)) # salt + h.update(socket.getfqdn().encode('utf-8')) + passwd = base64.b64encode(h.digest()).strip() + return passwd.decode('utf-8') + + +def genkeystore(localconfig): + """Generate a new key with password provided in localconfig and add it to new keystore. + + Parameters + ---------- + localconfig + + Returns + ------- + hexed public key, public key fingerprint + """ + logging.info('Generating a new key in "' + localconfig['keystore'] + '"...') + keystoredir = os.path.dirname(localconfig['keystore']) + if keystoredir is None or keystoredir == '': + keystoredir = os.path.join(os.getcwd(), keystoredir) + if not os.path.exists(keystoredir): + os.makedirs(keystoredir, mode=0o700) + + env_vars = {'LC_ALL': 'C.UTF-8', + 'FDROID_KEY_STORE_PASS': localconfig['keystorepass'], + 'FDROID_KEY_PASS': localconfig.get('keypass', "")} + + cmd = [config['keytool'], '-genkey', + '-keystore', localconfig['keystore'], + '-alias', localconfig['repo_keyalias'], + '-keyalg', 'RSA', '-keysize', '4096', + '-sigalg', 'SHA256withRSA', + '-validity', '10000', + '-storetype', 'pkcs12', + '-storepass:env', 'FDROID_KEY_STORE_PASS', + '-dname', localconfig['keydname'], + '-J-Duser.language=en'] + if localconfig['keystore'] == "NONE": + cmd += localconfig['smartcardoptions'] + else: + cmd += '-keypass:env', 'FDROID_KEY_PASS' + p = FDroidPopen(cmd, envs=env_vars) + if p.returncode != 0: + raise BuildException("Failed to generate key", p.output) + if localconfig['keystore'] != "NONE": + os.chmod(localconfig['keystore'], 0o0600) + if not options.quiet: + # now show the lovely key that was just generated + p = FDroidPopen([config['keytool'], '-list', '-v', + '-keystore', localconfig['keystore'], + '-alias', localconfig['repo_keyalias'], + '-storepass:env', 'FDROID_KEY_STORE_PASS', '-J-Duser.language=en'] + + config['smartcardoptions'], envs=env_vars) + logging.info(p.output.strip() + '\n\n') + # get the public key + p = FDroidPopenBytes([config['keytool'], '-exportcert', + '-keystore', localconfig['keystore'], + '-alias', localconfig['repo_keyalias'], + '-storepass:env', 'FDROID_KEY_STORE_PASS'] + + config['smartcardoptions'], + envs=env_vars, output=False, stderr_to_stdout=False) + if p.returncode != 0 or len(p.output) < 20: + raise BuildException("Failed to get public key", p.output) + pubkey = p.output + fingerprint = get_cert_fingerprint(pubkey) + return hexlify(pubkey), fingerprint + + +def get_cert_fingerprint(pubkey): + """Generate a certificate fingerprint the same way keytool does it (but with slightly different formatting).""" + digest = hashlib.sha256(pubkey).digest() + ret = [' '.join("%02X" % b for b in bytearray(digest))] + return " ".join(ret) + + +def get_certificate(signature_block_file, signature_file=None): + """Extract a single DER certificate from JAR Signature's "Signature Block File". + + If there is more than one signer certificate, this exits with an + error, unless the signature_file is provided. If that is set, it + will return the certificate that matches the Signature File, for + example, if there is a certificate chain, like TLS does. In the + fdroidserver use cases, there should always be a single signer. + But rarely, some APKs include certificate chains. + + This could be replaced by androguard's APK.get_certificate_der() + provided the cert chain fix was merged there. Maybe in 4.1.2? + https://github.com/androguard/androguard/pull/1038 + + https://docs.oracle.com/en/java/javase/21/docs/specs/man/jarsigner.html#the-signed-jar-file + + Parameters + ---------- + signature_block_file + Bytes representing the PKCS#7 signer certificate and + signature, as read directly out of the JAR/APK, e.g. CERT.RSA. + + signature_file + Bytes representing the manifest signed by the Signature Block + File, e.g. CERT.SF. If this is not given, the assumption is + there will be only a single certificate in + signature_block_file, otherwise it is an error. + + Returns + ------- + A binary representation of the certificate's public key, + or None in case of error + + """ + pkcs7obj = cms.ContentInfo.load(signature_block_file) + certificates = pkcs7obj['content']['certificates'] + if len(certificates) == 1: + return certificates[0].chosen.dump() + elif not signature_file: + logging.error(_('Found multiple Signer Certificates!')) + return + certificate = get_jar_signer_certificate(pkcs7obj, signature_file) + if certificate: + return certificate.chosen.dump() + + +def _find_matching_certificate(signer_info, certificate): + """Find the certificates that matches signer_info using issuer and serial number. + + https://android.googlesource.com/platform/tools/apksig/+/refs/tags/android-13.0.0_r3/src/main/java/com/android/apksig/internal/apk/v1/V1SchemeVerifier.java#590 + https://android.googlesource.com/platform/tools/apksig/+/refs/tags/android-13.0.0_r3/src/main/java/com/android/apksig/internal/x509/Certificate.java#55 + + """ + certificate_serial = certificate.chosen['tbs_certificate']['serial_number'] + expected_issuer_serial = signer_info['sid'].chosen + return ( + expected_issuer_serial['issuer'] == certificate.chosen.issuer + and expected_issuer_serial['serial_number'] == certificate_serial + ) + + +def get_jar_signer_certificate(pkcs7obj: cms.ContentInfo, signature_file: bytes): + """Return the one certificate in a chain that actually signed the manifest. + + PKCS#7-signed data can include certificate chains for use cases + where an Certificate Authority (CA) is used. Android does not + validate the certificate chain on APK signatures, so neither does + this. + https://android.googlesource.com/platform/tools/apksig/+/refs/tags/android-13.0.0_r3/src/main/java/com/android/apksig/internal/apk/v1/V1SchemeVerifier.java#512 + + Some useful fodder for understanding all this: + https://docs.oracle.com/javase/tutorial/deployment/jar/intro.html + https://technotes.shemyak.com/posts/jar-signature-block-file-format/ + https://docs.oracle.com/en/java/javase/21/docs/specs/man/jarsigner.html#the-signed-jar-file + https://qistoph.blogspot.com/2012/01/manual-verify-pkcs7-signed-data-with.html + + """ + import oscrypto.asymmetric + import oscrypto.errors + + # Android attempts to verify all SignerInfos and then picks the first verified SignerInfo. + first_verified_signer_info = None + first_verified_signer_info_signing_certificate = None + for signer_info in pkcs7obj['content']['signer_infos']: + signature = signer_info['signature'].contents + digest_algorithm = signer_info["digest_algorithm"]["algorithm"].native + public_key = None + for certificate in pkcs7obj['content']['certificates']: + if _find_matching_certificate(signer_info, certificate): + public_key = oscrypto.asymmetric.load_public_key(certificate.chosen.public_key) + break + if public_key is None: + logging.info('No certificate found that matches signer info!') + continue + + signature_algo = signer_info['signature_algorithm'].signature_algo + if signature_algo == 'rsassa_pkcs1v15': + # ASN.1 - 1.2.840.113549.1.1.1 + verify_func = oscrypto.asymmetric.rsa_pkcs1v15_verify + elif signature_algo == 'rsassa_pss': + # ASN.1 - 1.2.840.113549.1.1.10 + verify_func = oscrypto.asymmetric.rsa_pss_verify + elif signature_algo == 'dsa': + # ASN.1 - 1.2.840.10040.4.1 + verify_func = oscrypto.asymmetric.dsa_verify + elif signature_algo == 'ecdsa': + # ASN.1 - 1.2.840.10045.4 + verify_func = oscrypto.asymmetric.ecdsa_verify + else: + logging.error( + 'Unknown signature algorithm %s:\n %s\n %s' + % ( + signature_algo, + hexlify(certificate.chosen.sha256).decode(), + certificate.chosen.subject.human_friendly, + ), + ) + return + + try: + verify_func(public_key, signature, signature_file, digest_algorithm) + if not first_verified_signer_info: + first_verified_signer_info = signer_info + first_verified_signer_info_signing_certificate = certificate + + except oscrypto.errors.SignatureError as e: + logging.error( + '"%s", skipping:\n %s\n %s' % ( + e, + hexlify(certificate.chosen.sha256).decode(), + certificate.chosen.subject.human_friendly), + ) + + if first_verified_signer_info_signing_certificate: + return first_verified_signer_info_signing_certificate + + +def load_publish_signer_fingerprints(): + """Load signing-key fingerprints stored in file generated by fdroid publish. + + Returns + ------- + dict + containing the signing-key fingerprints. + """ + jar_file = os.path.join('repo', 'signer-index.jar') + if not os.path.isfile(jar_file): + return {} + try: + verify_deprecated_jar_signature(jar_file) + except VerificationException as e: + raise FDroidException("Signature validation of '{}' failed! " + "Please run publish again to rebuild this file.".format(jar_file)) from e + + jar_sigkey = apk_signer_fingerprint(jar_file) + repo_key_sig = config.get('repo_key_sha256') + if repo_key_sig: + if jar_sigkey != repo_key_sig: + raise FDroidException("Signature key fingerprint of file '{}' does not match repo_key_sha256 in config.yml (found fingerprint: '{}')".format(jar_file, jar_sigkey)) + else: + logging.warning("repo_key_sha256 not in config.yml, setting it to the signature key fingerprint of '{}'".format(jar_file)) + config['repo_key_sha256'] = jar_sigkey + write_to_config(config, 'repo_key_sha256') + + with zipfile.ZipFile(jar_file, 'r') as f: + return json.loads(str(f.read('signer-index.json'), 'utf-8')) + + +def write_config_file(config): + """Write the provided string to config.yml with the right path and encoding.""" + Path(CONFIG_FILE).write_text(config, encoding='utf-8') + + +def write_to_config(thisconfig, key, value=None): + """Write a key/value to the local config.yml. + + The config.yml is defined as YAML 1.2 in UTF-8 encoding on all + platforms. + + NOTE: only supports writing string variables. + + Parameters + ---------- + thisconfig + config dictionary + key + variable name in config to be overwritten/added + value + optional value to be written, instead of fetched + from 'thisconfig' dictionary. + + """ + if value is None: + origkey = key + '_orig' + value = thisconfig[origkey] if origkey in thisconfig else thisconfig[key] + + # load config file, create one if it doesn't exist + if not os.path.exists(CONFIG_FILE): + write_config_file('') + logging.info(_("Creating empty {config_file}").format(config_file=CONFIG_FILE)) + with open(CONFIG_FILE) as fp: + lines = fp.readlines() + + # make sure the file ends with a carraige return + if len(lines) > 0: + if not lines[-1].endswith('\n'): + lines[-1] += '\n' + + pattern = re.compile(r'^[\s#]*' + key + r':.*\n') + repl = config_dump({key: value}) + + # If we replaced this line once, we make sure won't be a + # second instance of this line for this key in the document. + didRepl = False + # edit config file + with open(CONFIG_FILE, 'w', encoding='utf-8') as f: + for line in lines: + if pattern.match(line): + if not didRepl: + line = pattern.sub(repl, line) + f.write(line) + didRepl = True + else: + f.write(line) + if not didRepl: + f.write(repl) + + +def parse_xml(path): + return XMLElementTree.parse(path).getroot() + + +def string_is_integer(string): + try: + int(string, 0) + return True + except ValueError: + try: + int(string) + return True + except ValueError: + return False + + +def version_code_string_to_int(vercode): + """Convert an versionCode string of any base into an int.""" + # TODO: Python 3.6 allows underscores in numeric literals + vercode = vercode.replace('_', '') + try: + return int(vercode, 0) + except ValueError: + return int(vercode) + + +def get_app_display_name(app): + """Get a human readable name for the app for logging and sorting. + + When trying to find a localized name, this first tries en-US since + that his the historical language used for sorting. + + """ + if app.get('Name'): + return app['Name'] + if app.get('localized'): + localized = app['localized'].get(DEFAULT_LOCALE) + if not localized: + for v in app['localized'].values(): + localized = v + break + if localized.get('name'): + return localized['name'] + return app.get('AutoName') or app['id'] + + +def local_rsync(options, from_paths: List[str], todir: str): + """Rsync method for local to local copying of things. + + This is an rsync wrapper with all the settings for safe use within + the various fdroidserver use cases. This uses stricter rsync + checking on all files since people using offline mode are already + prioritizing security above ease and speed. + + """ + rsyncargs = ['rsync', '--recursive', '--safe-links', '--times', '--perms', + '--one-file-system', '--delete', '--chmod=Da+rx,Fa-x,a+r,u+w'] + if not options.no_checksum: + rsyncargs.append('--checksum') + if options.verbose: + rsyncargs += ['--verbose'] + if options.quiet: + rsyncargs += ['--quiet'] + logging.debug(' '.join(rsyncargs + from_paths + [todir])) + if subprocess.call(rsyncargs + from_paths + [todir]) != 0: + raise FDroidException() + + +def deploy_build_log_with_rsync(appid, vercode, log_content): + """Upload build log of one individual app build to an fdroid repository. + + Parameters + ---------- + appid + package name for dientifying to which app this log belongs. + vercode + version of the app to which this build belongs. + log_content + Content of the log which is about to be posted. + Should be either a string or bytes. (bytes will + be decoded as 'utf-8') + """ + if not log_content: + logging.warning(_('skip deploying full build logs: log content is empty')) + return + + if not os.path.exists('repo'): + os.mkdir('repo') + + # gzip compress log file + log_gz_path = os.path.join('repo', + '{appid}_{versionCode}.log.gz'.format(appid=appid, + versionCode=vercode)) + + with gzip.open(log_gz_path, 'wb') as f: + if isinstance(log_content, str): + f.write(bytes(log_content, 'utf-8')) + else: + f.write(log_content) + rsync_status_file_to_repo(log_gz_path) + + +def rsync_status_file_to_repo(path, repo_subdir=None): + """Copy a build log or status JSON to the repo using rsync.""" + if not config.get('deploy_process_logs', False): + logging.debug(_('skip deploying full build logs: not enabled in config')) + return + + for d in config.get('serverwebroot', []): + webroot = d['url'] + cmd = ['rsync', + '--archive', + '--delete-after', + '--safe-links'] + if options.verbose: + cmd += ['--verbose'] + if options.quiet: + cmd += ['--quiet'] + if 'identity_file' in config: + cmd += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + config['identity_file']] + + dest_path = os.path.join(webroot, "repo") + if repo_subdir is not None: + dest_path = os.path.join(dest_path, repo_subdir) + if not dest_path.endswith('/'): + dest_path += '/' # make sure rsync knows this is a directory + cmd += [path, dest_path] + + retcode = subprocess.call(cmd) + if retcode: + logging.error(_('process log deploy {path} to {dest} failed!') + .format(path=path, dest=webroot)) + else: + logging.debug(_('deployed process log {path} to {dest}') + .format(path=path, dest=webroot)) + + +def get_per_app_repos(): + """Per-app repos are dirs named with the packageName of a single app.""" + # Android packageNames are Java packages, they may contain uppercase or + # lowercase letters ('A' through 'Z'), numbers, and underscores + # ('_'). However, individual package name parts may only start with + # letters. https://developer.android.com/guide/topics/manifest/manifest-element.html#package + p = re.compile('^([a-zA-Z][a-zA-Z0-9_]*(\\.[a-zA-Z][a-zA-Z0-9_]*)*)?$') + + repos = [] + for root, dirs, files in os.walk(os.getcwd()): + for d in dirs: + print('checking', root, 'for', d) + if d in ('archive', 'metadata', 'repo', 'srclibs', 'tmp'): + # standard parts of an fdroid repo, so never packageNames + continue + elif p.match(d) \ + and os.path.exists(os.path.join(d, 'fdroid', 'repo', 'index.jar')): + repos.append(d) + break + return repos + + +# list of index files that are never gpg-signed +NO_GPG_INDEX_FILES = [ + "entry.jar", + "index-v1.jar", + "index.css", + "index.html", + "index.jar", + "index.png", + "index.xml", + "signer-index.jar", +] + +# list of index files that are signed by gpgsign.py to make a .asc file +GPG_INDEX_FILES = [ + "altstore-index.json", + "entry.json", + "index-v1.json", + "index-v2.json", + "signer-index.json", +] + + +INDEX_FILES = sorted( + NO_GPG_INDEX_FILES + GPG_INDEX_FILES + [i + '.asc' for i in GPG_INDEX_FILES] +) + + +def is_repo_file(filename, for_gpg_signing=False): + """Whether the file in a repo is a build product to be delivered to users.""" + if isinstance(filename, str): + filename = filename.encode('utf-8', errors="surrogateescape") + ignore_files = [i.encode() for i in NO_GPG_INDEX_FILES] + ignore_files.append(b'index_unsigned.jar') + if not for_gpg_signing: + ignore_files += [i.encode() for i in GPG_INDEX_FILES] + + return ( + os.path.isfile(filename) + and not filename.endswith(b'.asc') + and not filename.endswith(b'.sig') + and not filename.endswith(b'.idsig') + and not filename.endswith(b'.log.gz') + and os.path.basename(filename) not in ignore_files + ) + + +def get_examples_dir(): + """Return the dir where the fdroidserver example files are available.""" + examplesdir = None + tmp = os.path.dirname(sys.argv[0]) + if os.path.basename(tmp) == 'bin': + egg_links = glob.glob(os.path.join(tmp, '..', + 'local/lib/python3.*/site-packages/fdroidserver.egg-link')) + if egg_links: + # installed from local git repo + examplesdir = os.path.join(open(egg_links[0]).readline().rstrip(), 'examples') + else: + # try .egg layout + examplesdir = os.path.dirname(os.path.dirname(__file__)) + '/share/doc/fdroidserver/examples' + if not os.path.exists(examplesdir): # use UNIX layout + examplesdir = os.path.dirname(tmp) + '/share/doc/fdroidserver/examples' + else: + # we're running straight out of the git repo + prefix = os.path.normpath(os.path.join(os.path.dirname(__file__), '..')) + examplesdir = prefix + '/examples' + + return examplesdir + + +def get_android_tools_versions(): + """Get a list of the versions of all installed Android SDK/NDK components.""" + global config + sdk_path = config['sdk_path'] + if sdk_path[-1] != '/': + sdk_path += '/' + components = set() + for ndk_path in config.get('ndk_paths', {}).values(): + version = get_ndk_version(ndk_path) + components.add((os.path.relpath(ndk_path, sdk_path), str(version))) + + pattern = re.compile(r'^Pkg.Revision *= *(.+)', re.MULTILINE) + for root, dirs, files in os.walk(sdk_path): + if 'source.properties' in files: + source_properties = os.path.join(root, 'source.properties') + with open(source_properties, 'r') as fp: + m = pattern.search(fp.read()) + if m: + components.add((os.path.relpath(root, sdk_path), m.group(1))) + + return sorted(components) + + +def get_android_tools_version_log(): + """Get a list of the versions of all installed Android SDK/NDK components.""" + log = '== Installed Android Tools ==\n\n' + components = get_android_tools_versions() + for name, version in sorted(components): + log += '* ' + name + ' (' + version + ')\n' + + return log + + +def calculate_math_string(expr): + ops = { + ast.Add: operator.add, + ast.Mult: operator.mul, + ast.Sub: operator.sub, + ast.USub: operator.neg, + ast.Pow: operator.pow, + } + + def execute_ast(node): + if isinstance(node, ast.Num): # + return node.n + elif isinstance(node, ast.BinOp): # + return ops[type(node.op)](execute_ast(node.left), + execute_ast(node.right)) + elif isinstance(node, ast.UnaryOp): # e.g., -1 + return ops[type(node.op)](ast.literal_eval(node.operand)) + else: + raise SyntaxError(node) + + try: + if '#' in expr: + raise SyntaxError('no comments allowed') + return execute_ast(ast.parse(expr, mode='eval').body) + except SyntaxError as exc: + raise SyntaxError("could not parse expression '{expr}', " + "only basic math operations are allowed (+, -, *)" + .format(expr=expr)) from exc + + +def force_exit(exitvalue=0): + """Force exit when thread operations could block the exit. + + The build command has to use some threading stuff to handle the + timeout and locks. This seems to prevent the command from + exiting, unless this hack is used. + + """ + sys.stdout.flush() + sys.stderr.flush() + os._exit(exitvalue) + + +YAML_LINT_CONFIG = {'extends': 'default', + 'rules': {'document-start': 'disable', + 'line-length': 'disable', + 'truthy': 'disable'}} + + +def run_yamllint(path, indent=0): + path = Path(path) + try: + import yamllint.config + import yamllint.linter + except ImportError: + return '' + + result = [] + with path.open('r', encoding='utf-8') as f: + problems = yamllint.linter.run(f, yamllint.config.YamlLintConfig(json.dumps(YAML_LINT_CONFIG))) + for problem in problems: + result.append(' ' * indent + str(path) + ':' + str(problem.line) + ': ' + problem.message) + return '\n'.join(result) + + +def calculate_IPFS_cid(filename): + """Calculate the IPFS CID of a file and add it to the index. + + uses ipfs_cid package at https://packages.debian.org/sid/ipfs-cid + Returns CIDv1 of a file as per IPFS recommendation + """ + cmd = config and config.get('ipfs_cid') + if not cmd: + return + file_cid = subprocess.run([cmd, filename], capture_output=True) + + if file_cid.returncode == 0: + cid_output = file_cid.stdout.decode() + cid_output_dict = json.loads(cid_output) + return cid_output_dict['CIDv1'] + + +def sha256sum(filename): + """Calculate the sha256 of the given file.""" + sha = hashlib.sha256() + with open(filename, 'rb') as f: + while True: + t = f.read(16384) + if len(t) == 0: + break + sha.update(t) + return sha.hexdigest() + + +def sha256base64(filename): + """Calculate the sha256 of the given file as URL-safe base64.""" + hasher = hashlib.sha256() + with open(filename, 'rb') as f: + while True: + t = f.read(16384) + if len(t) == 0: + break + hasher.update(t) + return urlsafe_b64encode(hasher.digest()).decode() + + +def get_ndk_version(ndk_path): + """Get the version info from the metadata in the NDK package. + + Since r11, the info is nice and easy to find in + sources.properties. Before, there was a kludgey format in + RELEASE.txt. This is only needed for r10e. + + """ + source_properties = os.path.join(ndk_path, 'source.properties') + release_txt = os.path.join(ndk_path, 'RELEASE.TXT') + if os.path.exists(source_properties): + with open(source_properties) as fp: + m = re.search(r'^Pkg.Revision *= *(.+)', fp.read(), flags=re.MULTILINE) + if m: + return m.group(1) + elif os.path.exists(release_txt): + with open(release_txt) as fp: + return fp.read().split('-')[0] + + +def auto_install_ndk(build): + """Auto-install the NDK in the build, this assumes its in a buildserver guest VM. + + Download, verify, and install the NDK version as specified via the + "ndk:" field in the build entry. As it uncompresses the zipball, + this forces the permissions to work for all users, since this + might uncompress as root and then be used from a different user. + + This needs to be able to install multiple versions of the NDK, + since this is also used in CI builds, where multiple `fdroid build + --onserver` calls can run in a single session. The production + buildserver is reset between every build. + + The default ANDROID_SDK_ROOT base dir of /opt/android-sdk is hard-coded in + buildserver/Vagrantfile. The $ANDROID_HOME/ndk subdir is where Android + Studio will install the NDK into versioned subdirs. + https://developer.android.com/studio/projects/configure-agp-ndk#agp_version_41 + + Also, r10e and older cannot be handled via this mechanism because + they are packaged differently. + + """ + import sdkmanager + + global config + if build.get('disable'): + return + ndk = build.get('ndk') + if not ndk: + return + if isinstance(ndk, str): + sdkmanager.build_package_list(use_net=True) + _install_ndk(ndk) + elif isinstance(ndk, list): + sdkmanager.build_package_list(use_net=True) + for n in ndk: + _install_ndk(n) + else: + raise BuildException(_('Invalid ndk: entry in build: "{ndk}"') + .format(ndk=str(ndk))) + + +def _install_ndk(ndk): + """Install specified NDK if it is not already installed. + + Parameters + ---------- + ndk + The NDK version to install, either in "release" form (r21e) or + "revision" form (21.4.7075529). + """ + import sdkmanager + + sdk_path = config['sdk_path'] + sdkmanager.install(f'ndk;{ndk}', sdk_path) + for found in glob.glob(f'{sdk_path}/ndk/*'): + version = get_ndk_version(found) + if 'ndk_paths' not in config: + config['ndk_paths'] = dict() + config['ndk_paths'][ndk] = found + config['ndk_paths'][version] = found + logging.info( + _('Set NDK {release} ({version}) up').format(release=ndk, version=version) + ) + + +def calculate_archive_policy(app, default): + """Calculate the archive policy from the metadata and default config.""" + if app.get('ArchivePolicy') is not None: + archive_policy = app['ArchivePolicy'] + else: + archive_policy = default + if app.get('VercodeOperation'): + archive_policy *= len(app['VercodeOperation']) + builds = [build for build in app.Builds if not build.disable] + if app.Builds and archive_policy > len(builds): + archive_policy = len(builds) + return archive_policy + + +def calculate_gradle_flavor_combination(flavors): + """Calculate all combinations of gradle flavors.""" + combination_lists = itertools.product(*[[flavor, ''] for flavor in flavors]) + combinations = [ + re.sub( + r' +\w', + lambda pat: pat.group(0)[-1].upper(), + ' '.join(combination_list).strip(), + ) + for combination_list in combination_lists + ] + return combinations + + +FDROIDORG_MIRRORS = [ + { + 'isPrimary': True, + 'url': 'https://f-droid.org/repo', + 'dnsA': ['65.21.79.229', '136.243.44.143'], + 'dnsAAAA': ['2a01:4f8:212:c98::2', '2a01:4f9:3b:546d::2'], + 'worksWithoutSNI': True, + }, + { + 'url': 'http://fdroidorg6cooksyluodepej4erfctzk7rrjpjbbr6wx24jh3lqyfwyd.onion/fdroid/repo' + }, + { + 'url': 'http://dotsrccccbidkzg7oc7oj4ugxrlfbt64qebyunxbrgqhxiwj3nl6vcad.onion/fdroid/repo' + }, + { + 'url': 'http://ftpfaudev4triw2vxiwzf4334e3mynz7osqgtozhbc77fixncqzbyoyd.onion/fdroid/repo' + }, + { + 'url': 'http://lysator7eknrfl47rlyxvgeamrv7ucefgrrlhk7rouv3sna25asetwid.onion/pub/fdroid/repo' + }, + { + 'url': 'http://mirror.ossplanetnyou5xifr6liw5vhzwc2g2fmmlohza25wwgnnaw65ytfsad.onion/fdroid/repo' + }, + {'url': 'https://fdroid.tetaneutral.net/fdroid/repo', 'countryCode': 'FR'}, + { + 'url': 'https://ftp.agdsn.de/fdroid/repo', + 'countryCode': 'DE', + "dnsA": ["141.30.235.39"], + "dnsAAAA": ["2a13:dd85:b00:12::1"], + "worksWithoutSNI": True, + }, + { + 'url': 'https://ftp.fau.de/fdroid/repo', + 'countryCode': 'DE', + "dnsA": ["131.188.12.211"], + "dnsAAAA": ["2001:638:a000:1021:21::1"], + "worksWithoutSNI": True, + }, + {'url': 'https://ftp.gwdg.de/pub/android/fdroid/repo', 'countryCode': 'DE'}, + { + 'url': 'https://ftp.lysator.liu.se/pub/fdroid/repo', + 'countryCode': 'SE', + "dnsA": ["130.236.254.251", "130.236.254.253"], + "dnsAAAA": ["2001:6b0:17:f0a0::fb", "2001:6b0:17:f0a0::fd"], + "worksWithoutSNI": True, + }, + {'url': 'https://mirror.cyberbits.eu/fdroid/repo', 'countryCode': 'FR'}, + { + 'url': 'https://mirror.fcix.net/fdroid/repo', + 'countryCode': 'US', + "dnsA": ["23.152.160.16"], + "dnsAAAA": ["2620:13b:0:1000::16"], + "worksWithoutSNI": True, + }, + {'url': 'https://mirror.kumi.systems/fdroid/repo', 'countryCode': 'AT'}, + {'url': 'https://mirror.level66.network/fdroid/repo', 'countryCode': 'DE'}, + {'url': 'https://mirror.ossplanet.net/fdroid/repo', 'countryCode': 'TW'}, + {'url': 'https://mirrors.dotsrc.org/fdroid/repo', 'countryCode': 'DK'}, + {'url': 'https://opencolo.mm.fcix.net/fdroid/repo', 'countryCode': 'US'}, + { + 'url': 'https://plug-mirror.rcac.purdue.edu/fdroid/repo', + 'countryCode': 'US', + "dnsA": ["128.211.151.252"], + "dnsAAAA": ["2001:18e8:804:35::1337"], + "worksWithoutSNI": True, + }, +] +FDROIDORG_FINGERPRINT = ( + '43238D512C1E5EB2D6569F4A3AFBF5523418B82E0A3ED1552770ABB9A9C9CCAB' +) diff --git a/fdroidserver/deploy.py b/fdroidserver/deploy.py new file mode 100644 index 00000000..f1dcce21 --- /dev/null +++ b/fdroidserver/deploy.py @@ -0,0 +1,1177 @@ +#!/usr/bin/env python3 +# +# deploy.py - part of the FDroid server tools +# Copyright (C) 2010-15, Ciaran Gultnieks, ciaran@ciarang.com +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import configparser +import glob +import json +import logging +import os +import pathlib +import re +import shutil +import subprocess +import sys +import time +import urllib +from argparse import ArgumentParser +from typing import Dict, List + +import git +import yaml +from git import Repo + +import fdroidserver.github + +from . import _, common, index +from .exception import FDroidException + +config = None +start_timestamp = time.gmtime() + +GIT_BRANCH = 'master' + +BINARY_TRANSPARENCY_DIR = 'binary_transparency' + +REMOTE_HOSTNAME_REGEX = re.compile(r'\W*\w+\W+(\w+).*') + +EMBEDDED_RCLONE_CONF = 'rclone.conf' + + +def _get_index_file_paths(base_dir): + """Return the list of files to be synced last, since they finalize the deploy. + + The process of pushing all the new packages to the various + services can take a while. So the index files should be updated + last. That ensures that the package files are available when the + client learns about them from the new index files. + + signer-index.* are only published in the repo/ section. + + """ + return [ + os.path.join(base_dir, filename) + for filename in common.INDEX_FILES + if not (filename.startswith('signer-index.') and base_dir.endswith('archive')) + ] + + +def _get_index_excludes(base_dir): + indexes = _get_index_file_paths(base_dir) + index_excludes = [] + for f in indexes: + index_excludes.append('--exclude') + index_excludes.append(f) + return index_excludes + + +def _get_index_includes(base_dir): + indexes = _get_index_file_paths(base_dir) + index_includes = [] + for f in indexes: + index_includes.append('--include') + index_includes.append(f) + return index_includes + + +def _remove_missing_files(files: List[str]) -> List[str]: + """Remove files that are missing from the file system.""" + existing = [] + for f in files: + if os.path.exists(f): + existing.append(f) + return existing + + +def _generate_rclone_include_pattern(files): + """Generate a pattern for rclone's --include flag (https://rclone.org/filtering/).""" + return "{" + ",".join(sorted(set(files))) + "}" + + +def update_awsbucket(repo_section, is_index_only=False, verbose=False, quiet=False): + """Sync the directory `repo_section` (including subdirectories) to AWS S3 US East. + + This is a shim function for public API compatibility. + + Requires AWS credentials set as environment variables: + https://rclone.org/s3/#authentication + + """ + update_remote_storage_with_rclone(repo_section, is_index_only, verbose, quiet) + + +def update_remote_storage_with_rclone( + repo_section, + awsbucket, + is_index_only=False, + verbose=False, + quiet=False, + checksum=False, +): + """Sync the directory `repo_section` (including subdirectories) to configed cloud services. + + Rclone sync can send the files to any supported remote storage + service once without numerous polling. If remote storage is S3 e.g + AWS S3, Wasabi, Filebase, etc, then path will be + bucket_name/fdroid/repo where bucket_name will be an S3 bucket. If + remote storage is storage drive/sftp e.g google drive, rsync.net the + new path will be bucket_name/fdroid/repo where bucket_name will be a + folder + + See https://rclone.org/docs/#config-config-file + + rclone filtering works differently than rsync. For example, + "--include" implies "--exclude **" at the end of an rclone internal + filter list. + + If rclone.conf is in the root of the repo, then it will be preferred + over the rclone default config paths. + + """ + logging.debug(_('Using rclone to sync to "{name}"').format(name=awsbucket)) + + rclone_config = config.get('rclone_config', []) + if rclone_config and isinstance(rclone_config, str): + rclone_config = [rclone_config] + + path = config.get('path_to_custom_rclone_config') + if path: + if not os.path.exists(path): + logging.error( + _('path_to_custom_rclone_config: "{path}" does not exist!').format( + path=path + ) + ) + sys.exit(1) + configfilename = path + elif os.path.exists(EMBEDDED_RCLONE_CONF): + path = EMBEDDED_RCLONE_CONF # in this case, only for display + configfilename = EMBEDDED_RCLONE_CONF + if not rclone_config: + raise FDroidException(_("'rclone_config' must be set in config.yml!")) + else: + configfilename = None + output = subprocess.check_output(['rclone', 'config', 'file'], text=True) + default_config_path = output.split('\n')[-2] + if os.path.exists(default_config_path): + path = default_config_path + if path: + logging.info(_('Using "{path}" for rclone config.').format(path=path)) + + upload_dir = 'fdroid/' + repo_section + + if not rclone_config: + env = os.environ + # Check both canonical and backup names, but only tell user about canonical. + if not env.get("AWS_SECRET_ACCESS_KEY") and not env.get("AWS_SECRET_KEY"): + raise FDroidException( + _( + """"AWS_SECRET_ACCESS_KEY" must be set as an environmental variable!""" + ) + ) + if not env.get("AWS_ACCESS_KEY_ID") and not env.get('AWS_ACCESS_KEY'): + raise FDroidException( + _(""""AWS_ACCESS_KEY_ID" must be set as an environmental variable!""") + ) + + default_remote = "AWS-S3-US-East-1" + env_rclone_config = configparser.ConfigParser() + env_rclone_config.add_section(default_remote) + env_rclone_config.set( + default_remote, + '; = This file is auto-generated by fdroid deploy, do not edit!', + '', + ) + env_rclone_config.set(default_remote, "type", "s3") + env_rclone_config.set(default_remote, "provider", "AWS") + env_rclone_config.set(default_remote, "region", "us-east-1") + env_rclone_config.set(default_remote, "env_auth", "true") + + configfilename = ".fdroid-deploy-rclone.conf" + with open(configfilename, "w", encoding="utf-8") as autoconfigfile: + env_rclone_config.write(autoconfigfile) + rclone_config = [default_remote] + + rclone_sync_command = ['rclone', 'sync', '--delete-after'] + if configfilename: + rclone_sync_command += ['--config', configfilename] + + if checksum: + rclone_sync_command.append('--checksum') + + if verbose: + rclone_sync_command += ['--verbose'] + elif quiet: + rclone_sync_command += ['--quiet'] + + # TODO copying update_serverwebroot rsync algo + for remote_config in rclone_config: + complete_remote_path = f'{remote_config}:{awsbucket}/{upload_dir}' + logging.info(f'rclone sync to {complete_remote_path}') + if is_index_only: + index_only_files = common.INDEX_FILES + ['diff/*.*'] + include_pattern = _generate_rclone_include_pattern(index_only_files) + cmd = rclone_sync_command + [ + '--include', + include_pattern, + '--delete-excluded', + repo_section, + complete_remote_path, + ] + logging.info(cmd) + if subprocess.call(cmd) != 0: + raise FDroidException() + else: + cmd = ( + rclone_sync_command + + _get_index_excludes(repo_section) + + [ + repo_section, + complete_remote_path, + ] + ) + if subprocess.call(cmd) != 0: + raise FDroidException() + cmd = rclone_sync_command + [ + repo_section, + complete_remote_path, + ] + if subprocess.call(cmd) != 0: + raise FDroidException() + + +def update_serverwebroot(serverwebroot, repo_section): + """Deploy the index files to the serverwebroot using rsync. + + Upload the first time without the index files and delay the + deletion as much as possible. That keeps the repo functional + while this update is running. Then once it is complete, rerun the + command again to upload the index files. Always using the same + target with rsync allows for very strict settings on the receiving + server, you can literally specify the one rsync command that is + allowed to run in ~/.ssh/authorized_keys. (serverwebroot is + guaranteed to have a trailing slash in common.py) + + It is possible to optionally use a checksum comparison for + accurate comparisons on different filesystems, for example, FAT + has a low resolution timestamp + + """ + try: + subprocess.run(['rsync', '--version'], capture_output=True, check=True) + except Exception as e: + raise FDroidException( + _('rsync is missing or broken: {error}').format(error=e) + ) from e + rsyncargs = ['rsync', '--archive', '--delete-after', '--safe-links'] + options = common.get_options() + if not options or not options.no_checksum: + rsyncargs.append('--checksum') + if options and options.verbose: + rsyncargs += ['--verbose'] + if options and options.quiet: + rsyncargs += ['--quiet'] + if options and options.identity_file: + rsyncargs += [ + '-e', + 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + options.identity_file, + ] + elif config and config.get('identity_file'): + rsyncargs += [ + '-e', + 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + config['identity_file'], + ] + url = serverwebroot['url'] + is_index_only = serverwebroot.get('index_only', False) + logging.info('rsyncing ' + repo_section + ' to ' + url) + if is_index_only: + files_to_upload = _get_index_file_paths(repo_section) + files_to_upload = _remove_missing_files(files_to_upload) + + rsyncargs += files_to_upload + rsyncargs += [f'{url}/{repo_section}/'] + logging.info(rsyncargs) + if subprocess.call(rsyncargs) != 0: + raise FDroidException() + else: + excludes = _get_index_excludes(repo_section) + if subprocess.call(rsyncargs + excludes + [repo_section, url]) != 0: + raise FDroidException() + if subprocess.call(rsyncargs + [repo_section, url]) != 0: + raise FDroidException() + # upload "current version" symlinks if requested + if ( + config + and config.get('make_current_version_link') + and repo_section == 'repo' + ): + links_to_upload = [] + for f in ( + glob.glob('*.apk') + glob.glob('*.apk.asc') + glob.glob('*.apk.sig') + ): + if os.path.islink(f): + links_to_upload.append(f) + if len(links_to_upload) > 0: + if subprocess.call(rsyncargs + links_to_upload + [url]) != 0: + raise FDroidException() + + +def update_serverwebroots(serverwebroots, repo_section, standardwebroot=True): + for d in serverwebroots: + # this supports both an ssh host:path and just a path + serverwebroot = d['url'] + s = serverwebroot.rstrip('/').split(':') + if len(s) == 1: + fdroiddir = s[0] + elif len(s) == 2: + host, fdroiddir = s + else: + logging.error(_('Malformed serverwebroot line:') + ' ' + serverwebroot) + sys.exit(1) + repobase = os.path.basename(fdroiddir) + if standardwebroot and repobase != 'fdroid': + logging.error( + _( + 'serverwebroot: path does not end with "fdroid", perhaps you meant one of these:' + ) + + '\n\t' + + serverwebroot.rstrip('/') + + '/fdroid\n\t' + + serverwebroot.rstrip('/').rstrip(repobase) + + 'fdroid' + ) + sys.exit(1) + update_serverwebroot(d, repo_section) + + +def sync_from_localcopy(repo_section, local_copy_dir): + """Sync the repo from "local copy dir" filesystem to this box. + + In setups that use offline signing, this is the last step that + syncs the repo from the "local copy dir" e.g. a thumb drive to the + repo on the local filesystem. That local repo is then used to + push to all the servers that are configured. + + """ + logging.info('Syncing from local_copy_dir to this repo.') + + # trailing slashes have a meaning in rsync which is not needed here, so + # make sure both paths have exactly one trailing slash + common.local_rsync( + common.get_options(), + [os.path.join(local_copy_dir, repo_section).rstrip('/') + '/'], + repo_section.rstrip('/') + '/', + ) + + offline_copy = os.path.join(local_copy_dir, BINARY_TRANSPARENCY_DIR) + if os.path.exists(os.path.join(offline_copy, '.git')): + online_copy = os.path.join(os.getcwd(), BINARY_TRANSPARENCY_DIR) + push_binary_transparency(offline_copy, online_copy) + + +def update_localcopy(repo_section, local_copy_dir): + """Copy data from offline to the "local copy dir" filesystem. + + This updates the copy of this repo used to shuttle data from an + offline signing machine to the online machine, e.g. on a thumb + drive. + + """ + # local_copy_dir is guaranteed to have a trailing slash in main() below + common.local_rsync(common.get_options(), [repo_section], local_copy_dir) + + offline_copy = os.path.join(os.getcwd(), BINARY_TRANSPARENCY_DIR) + if os.path.isdir(os.path.join(offline_copy, '.git')): + online_copy = os.path.join(local_copy_dir, BINARY_TRANSPARENCY_DIR) + push_binary_transparency(offline_copy, online_copy) + + +def _get_size(start_path='.'): + """Get size of all files in a dir https://stackoverflow.com/a/1392549.""" + total_size = 0 + for root, dirs, files in os.walk(start_path): + for f in files: + fp = os.path.join(root, f) + total_size += os.path.getsize(fp) + return total_size + + +def update_servergitmirrors(servergitmirrors, repo_section): + """Update repo mirrors stored in git repos. + + This is a hack to use public git repos as F-Droid repos. It + recreates the git repo from scratch each time, so that there is no + history. That keeps the size of the git repo small. Services + like GitHub or GitLab have a size limit of something like 1 gig. + This git repo is only a git repo for the purpose of being hosted. + For history, there is the archive section, and there is the binary + transparency log. + + This will attempt to use the existing remote branch so that it does + not have to push all of the files in the repo each time. Old setups + or runs of `fdroid nightly` might use the "master" branch. For the + "index only" mode, it will recreate the branch from scratch each + time since usually all the files are changed. In any case, the + index files are small compared to the full repo. + + """ + from clint.textui import progress + + if config.get('local_copy_dir') and not config.get('sync_from_local_copy_dir'): + logging.debug( + _('Offline machine, skipping git mirror generation until `fdroid deploy`') + ) + return + + options = common.get_options() + workspace_dir = pathlib.Path(os.getcwd()) + + # right now we support only 'repo' git-mirroring + if repo_section == 'repo': + git_mirror_path = workspace_dir / 'git-mirror' + dotgit = os.path.join(git_mirror_path, '.git') + git_fdroiddir = os.path.join(git_mirror_path, 'fdroid') + git_repodir = os.path.join(git_fdroiddir, repo_section) + if not os.path.isdir(git_repodir): + os.makedirs(git_repodir) + # github/gitlab use bare git repos, so only count the .git folder + # test: generate giant APKs by including AndroidManifest.xml and and large + # file from /dev/urandom, then sign it. Then add those to the git repo. + dotgit_size = _get_size(dotgit) + dotgit_over_limit = dotgit_size > config['git_mirror_size_limit'] + if os.path.isdir(dotgit) and dotgit_over_limit: + logging.warning( + _( + 'Deleting git-mirror history, repo is too big ({size} max {limit})' + ).format(size=dotgit_size, limit=config['git_mirror_size_limit']) + ) + shutil.rmtree(dotgit) + if options.no_keep_git_mirror_archive and dotgit_over_limit: + logging.warning( + _('Deleting archive, repo is too big ({size} max {limit})').format( + size=dotgit_size, limit=config['git_mirror_size_limit'] + ) + ) + archive_path = os.path.join(git_mirror_path, 'fdroid', 'archive') + shutil.rmtree(archive_path, ignore_errors=True) + + # use custom SSH command if identity_file specified + ssh_cmd = 'ssh -oBatchMode=yes' + if options.identity_file is not None: + ssh_cmd += ' -oIdentitiesOnly=yes -i "%s"' % options.identity_file + elif 'identity_file' in config: + ssh_cmd += ' -oIdentitiesOnly=yes -i "%s"' % config['identity_file'] + + if options.verbose: + progressbar = progress.Bar() + + class MyProgressPrinter(git.RemoteProgress): + def update(self, op_code, current, maximum=None, message=None): + if isinstance(maximum, float): + progressbar.show(current, maximum) + + progress = MyProgressPrinter() + else: + progress = None + + repo = git.Repo.init(git_mirror_path, initial_branch=GIT_BRANCH) + + enabled_remotes = [] + for d in servergitmirrors: + is_index_only = d.get('index_only', False) + + # Use a separate branch for the index only mode as it needs a different set of files to commit + if is_index_only: + local_branch_name = 'index_only' + else: + local_branch_name = GIT_BRANCH + if local_branch_name in repo.heads: + repo.git.switch(local_branch_name) + else: + repo.git.switch('--orphan', local_branch_name) + + # trailing slashes have a meaning in rsync which is not needed here, so + # make sure both paths have exactly one trailing slash + if is_index_only: + files_to_sync = _get_index_file_paths(str(workspace_dir / repo_section)) + files_to_sync = _remove_missing_files(files_to_sync) + else: + files_to_sync = [str(workspace_dir / repo_section).rstrip('/') + '/'] + common.local_rsync( + common.get_options(), files_to_sync, git_repodir.rstrip('/') + '/' + ) + + upload_to_servergitmirror( + mirror_config=d, + local_repo=repo, + enabled_remotes=enabled_remotes, + repo_section=repo_section, + is_index_only=is_index_only, + fdroid_dir=git_fdroiddir, + git_mirror_path=str(git_mirror_path), + ssh_cmd=ssh_cmd, + progress=progress, + ) + if progress: + progressbar.done() + + +def upload_to_servergitmirror( + mirror_config: Dict[str, str], + local_repo: Repo, + enabled_remotes: List[str], + repo_section: str, + is_index_only: bool, + fdroid_dir: str, + git_mirror_path: str, + ssh_cmd: str, + progress: git.RemoteProgress, +) -> None: + remote_branch_name = GIT_BRANCH + local_branch_name = local_repo.active_branch.name + + remote_url = mirror_config['url'] + name = REMOTE_HOSTNAME_REGEX.sub(r'\1', remote_url) + enabled_remotes.append(name) + r = git.remote.Remote(local_repo, name) + if r in local_repo.remotes: + r = local_repo.remote(name) + if 'set_url' in dir(r): # force remote URL if using GitPython 2.x + r.set_url(remote_url) + else: + local_repo.create_remote(name, remote_url) + logging.info('Mirroring to: ' + remote_url) + + if is_index_only: + files_to_upload = _get_index_file_paths( + os.path.join(local_repo.working_tree_dir, 'fdroid', repo_section) + ) + files_to_upload = _remove_missing_files(files_to_upload) + local_repo.index.add(files_to_upload) + else: + # sadly index.add don't allow the --all parameter + logging.debug('Adding all files to git mirror') + local_repo.git.add(all=True) + + logging.debug('Committing files into git mirror') + local_repo.index.commit("fdroidserver git-mirror") + + # only deploy to GitLab Artifacts if too big for GitLab Pages + if ( + is_index_only + or common.get_dir_size(fdroid_dir) <= common.GITLAB_COM_PAGES_MAX_SIZE + ): + gitlab_ci_job_name = 'pages' + else: + gitlab_ci_job_name = 'GitLab Artifacts' + logging.warning( + _('Skipping GitLab Pages mirror because the repo is too large (>%.2fGB)!') + % (common.GITLAB_COM_PAGES_MAX_SIZE / 1000000000) + ) + + # push. This will overwrite the git history + remote = local_repo.remote(name) + if remote.name == 'gitlab': + logging.debug('Writing .gitlab-ci.yml to deploy to GitLab Pages') + with open(os.path.join(git_mirror_path, ".gitlab-ci.yml"), "wt") as fp: + yaml.dump( + { + gitlab_ci_job_name: { + 'script': [ + 'mkdir .public', + 'cp -r * .public/', + 'mv .public public', + ], + 'artifacts': {'paths': ['public']}, + 'variables': {'GIT_DEPTH': 1}, + } + }, + fp, + default_flow_style=False, + ) + + local_repo.index.add(['.gitlab-ci.yml']) + local_repo.index.commit("fdroidserver git-mirror: Deploy to GitLab Pages") + + logging.debug(_('Pushing to {url}').format(url=remote.url)) + with local_repo.git.custom_environment(GIT_SSH_COMMAND=ssh_cmd): + pushinfos = remote.push( + f"{local_branch_name}:{remote_branch_name}", + force=True, + set_upstream=True, + progress=progress, + ) + for pushinfo in pushinfos: + if pushinfo.flags & ( + git.remote.PushInfo.ERROR + | git.remote.PushInfo.REJECTED + | git.remote.PushInfo.REMOTE_FAILURE + | git.remote.PushInfo.REMOTE_REJECTED + ): + # Show potentially useful messages from git remote + if progress: + for line in progress.other_lines: + if line.startswith('remote:'): + logging.debug(line) + raise FDroidException( + remote.url + + ' push failed: ' + + str(pushinfo.flags) + + ' ' + + pushinfo.summary + ) + else: + logging.debug(remote.url + ': ' + pushinfo.summary) + + +def upload_to_android_observatory(repo_section): + import requests + + requests # stop unused import warning + + if common.get_options().verbose: + logging.getLogger("requests").setLevel(logging.INFO) + logging.getLogger("urllib3").setLevel(logging.INFO) + else: + logging.getLogger("requests").setLevel(logging.WARNING) + logging.getLogger("urllib3").setLevel(logging.WARNING) + + if repo_section == 'repo': + for f in sorted(glob.glob(os.path.join(repo_section, '*.apk'))): + upload_apk_to_android_observatory(f) + + +def upload_apk_to_android_observatory(path): + # depend on requests and lxml only if users enable AO + import requests + from lxml.html import fromstring + + from . import net + + apkfilename = os.path.basename(path) + r = requests.post( + 'https://androidobservatory.org/', + data={'q': common.sha256sum(path), 'searchby': 'hash'}, + headers=net.HEADERS, + timeout=300, + ) + if r.status_code == 200: + # from now on XPath will be used to retrieve the message in the HTML + # androidobservatory doesn't have a nice API to talk with + # so we must scrape the page content + tree = fromstring(r.text) + + href = None + for element in tree.xpath("//html/body/div/div/table/tbody/tr/td/a"): + a = element.attrib.get('href') + if a: + m = re.match(r'^/app/[0-9A-F]{40}$', a) + if m: + href = m.group() + + page = 'https://androidobservatory.org' + if href: + message = _('Found {apkfilename} at {url}').format( + apkfilename=apkfilename, url=(page + href) + ) + logging.debug(message) + return + + # upload the file with a post request + logging.info( + _('Uploading {apkfilename} to androidobservatory.org').format( + apkfilename=apkfilename + ) + ) + r = requests.post( + 'https://androidobservatory.org/upload', + files={'apk': (apkfilename, open(path, 'rb'))}, + headers=net.HEADERS, + allow_redirects=False, + timeout=300, + ) + + +def upload_to_virustotal(repo_section, virustotal_apikey): + import requests + + requests # stop unused import warning + + if repo_section == 'repo': + if not os.path.exists('virustotal'): + os.mkdir('virustotal') + + if os.path.exists(os.path.join(repo_section, 'index-v1.json')): + with open(os.path.join(repo_section, 'index-v1.json')) as fp: + data = json.load(fp) + else: + local_jar = os.path.join(repo_section, 'index-v1.jar') + data, _ignored, _ignored = index.get_index_from_jar(local_jar) + + for packageName, packages in data['packages'].items(): + for package in packages: + upload_apk_to_virustotal(virustotal_apikey, **package) + + +def upload_apk_to_virustotal( + virustotal_apikey, packageName, apkName, hash, versionCode, **kwargs +): + import requests + + logging.getLogger("urllib3").setLevel(logging.WARNING) + logging.getLogger("requests").setLevel(logging.WARNING) + + outputfilename = os.path.join( + 'virustotal', packageName + '_' + str(versionCode) + '_' + hash + '.json' + ) + if os.path.exists(outputfilename): + logging.debug(apkName + ' results are in ' + outputfilename) + return outputfilename + repofilename = os.path.join('repo', apkName) + logging.info('Checking if ' + repofilename + ' is on virustotal') + + headers = {"User-Agent": "F-Droid"} + if 'headers' in kwargs: + for k, v in kwargs['headers'].items(): + headers[k] = v + + apikey = { + 'apikey': virustotal_apikey, + 'resource': hash, + } + needs_file_upload = False + while True: + report_url = ( + 'https://www.virustotal.com/vtapi/v2/file/report?' + + urllib.parse.urlencode(apikey) + ) + r = requests.get(report_url, headers=headers, timeout=300) + if r.status_code == 200: + response = r.json() + if response['response_code'] == 0: + needs_file_upload = True + else: + response['filename'] = apkName + response['packageName'] = packageName + response['versionCode'] = versionCode + if kwargs.get('versionName'): + response['versionName'] = kwargs.get('versionName') + with open(outputfilename, 'w') as fp: + json.dump(response, fp, indent=2, sort_keys=True) + + if response.get('positives', 0) > 0: + logging.warning( + _('{path} has been flagged by virustotal {count} times:').format( + path=repofilename, count=response['positives'] + ), + +'\n\t' + response['permalink'], + ) + break + if r.status_code == 204: + logging.warning(_('virustotal.com is rate limiting, waiting to retry...')) + time.sleep(30) # wait for public API rate limiting + + upload_url = None + if needs_file_upload: + manual_url = 'https://www.virustotal.com/' + size = os.path.getsize(repofilename) + if size > 200000000: + # VirusTotal API 200MB hard limit + logging.error( + _('{path} more than 200MB, manually upload: {url}').format( + path=repofilename, url=manual_url + ) + ) + elif size > 32000000: + # VirusTotal API requires fetching a URL to upload bigger files + query_url = ( + 'https://www.virustotal.com/vtapi/v2/file/scan/upload_url?' + + urllib.parse.urlencode(apikey) + ) + r = requests.get(query_url, headers=headers, timeout=300) + if r.status_code == 200: + upload_url = r.json().get('upload_url') + elif r.status_code == 403: + logging.error( + _( + 'VirusTotal API key cannot upload files larger than 32MB, ' + + 'use {url} to upload {path}.' + ).format(path=repofilename, url=manual_url) + ) + else: + r.raise_for_status() + else: + upload_url = 'https://www.virustotal.com/vtapi/v2/file/scan' + + if upload_url: + logging.info( + _('Uploading {apkfilename} to virustotal').format(apkfilename=repofilename) + ) + r = requests.post( + upload_url, + data=apikey, + headers=headers, + files={'file': (apkName, open(repofilename, 'rb'))}, + timeout=300, + ) + logging.debug( + _('If this upload fails, try manually uploading to {url}').format( + url=manual_url + ) + ) + r.raise_for_status() + response = r.json() + logging.info(response['verbose_msg'] + " " + response['permalink']) + + return outputfilename + + +def push_binary_transparency(git_repo_path, git_remote): + """Push the binary transparency git repo to the specifed remote. + + If the remote is a local directory, make sure it exists, and is a + git repo. This is used to move this git repo from an offline + machine onto a flash drive, then onto the online machine. Also, + this pulls because pushing to a non-bare git repo is error prone. + + This is also used in offline signing setups, where it then also + creates a "local copy dir" git repo that serves to shuttle the git + data from the offline machine to the online machine. In that + case, git_remote is a dir on the local file system, e.g. a thumb + drive. + + """ + logging.info(_('Pushing binary transparency log to {url}').format(url=git_remote)) + + if os.path.isdir(os.path.dirname(git_remote)): + # from offline machine to thumbdrive + remote_path = os.path.abspath(git_repo_path) + if not os.path.isdir(os.path.join(git_remote, '.git')): + os.makedirs(git_remote, exist_ok=True) + thumbdriverepo = git.Repo.init(git_remote, initial_branch=GIT_BRANCH) + local = thumbdriverepo.create_remote('local', remote_path) + else: + thumbdriverepo = git.Repo(git_remote) + local = git.remote.Remote(thumbdriverepo, 'local') + if local in thumbdriverepo.remotes: + local = thumbdriverepo.remote('local') + if 'set_url' in dir(local): # force remote URL if using GitPython 2.x + local.set_url(remote_path) + else: + local = thumbdriverepo.create_remote('local', remote_path) + local.pull(GIT_BRANCH) + else: + # from online machine to remote on a server on the internet + gitrepo = git.Repo(git_repo_path) + origin = git.remote.Remote(gitrepo, 'origin') + if origin in gitrepo.remotes: + origin = gitrepo.remote('origin') + if 'set_url' in dir(origin): # added in GitPython 2.x + origin.set_url(git_remote) + else: + origin = gitrepo.create_remote('origin', git_remote) + for _i in range(3): + try: + origin.push(GIT_BRANCH) + except git.GitCommandError as e: + logging.error(e) + continue + break + else: + raise FDroidException(_("Pushing to remote server failed!")) + + +def find_release_infos(index_v2_path, repo_dir, package_names): + """Find files, texts, etc. for uploading to a release page in index-v2.json. + + This function parses index-v2.json for file-paths elegible for deployment + to release pages. (e.g. GitHub releases) It also groups these files by + packageName and versionName. e.g. to get a list of files for all specific + release of fdroid client you may call: + + find_binary_release_infos()['org.fdroid.fdroid']['0.19.2'] + + All paths in the returned data-structure are of type pathlib.Path. + """ + release_infos = {} + with open(index_v2_path, 'r') as f: + idx = json.load(f) + for package_name in package_names: + package = idx.get('packages', {}).get(package_name, {}) + for version in package.get('versions', {}).values(): + if package_name not in release_infos: + release_infos[package_name] = {} + version_name = version['manifest']['versionName'] + version_path = repo_dir / version['file']['name'].lstrip("/") + files = [version_path] + asc_path = pathlib.Path(str(version_path) + '.asc') + if asc_path.is_file(): + files.append(asc_path) + sig_path = pathlib.Path(str(version_path) + '.sig') + if sig_path.is_file(): + files.append(sig_path) + release_infos[package_name][version_name] = { + 'files': files, + 'whatsNew': version.get('whatsNew', {}).get("en-US"), + 'hasReleaseChannels': len(version.get('releaseChannels', [])) > 0, + } + return release_infos + + +def upload_to_github_releases(repo_section, gh_config, global_gh_token): + repo_dir = pathlib.Path(repo_section) + index_v2_path = repo_dir / 'index-v2.json' + if not index_v2_path.is_file(): + logging.warning( + _( + "Error deploying 'github_releases', {} not present. (You might " + "need to run `fdroid update` first.)" + ).format(index_v2_path) + ) + return + + package_names = [] + for repo_conf in gh_config: + for package_name in repo_conf.get('packageNames', []): + package_names.append(package_name) + + release_infos = fdroidserver.deploy.find_release_infos( + index_v2_path, repo_dir, package_names + ) + + for repo_conf in gh_config: + upload_to_github_releases_repo(repo_conf, release_infos, global_gh_token) + + +def upload_to_github_releases_repo(repo_conf, release_infos, global_gh_token): + projectUrl = repo_conf.get("projectUrl") + if not projectUrl: + logging.warning( + _( + "One of the 'github_releases' config items is missing the " + "'projectUrl' value. skipping ..." + ) + ) + return + token = repo_conf.get("token") or global_gh_token + if not token: + logging.warning( + _( + "One of the 'github_releases' config items is missing the " + "'token' value. skipping ..." + ) + ) + return + conf_package_names = repo_conf.get("packageNames", []) + if type(conf_package_names) == str: + conf_package_names = [conf_package_names] + if not conf_package_names: + logging.warning( + _( + "One of the 'github_releases' config items is missing the " + "'packageNames' value. skipping ..." + ) + ) + return + + # lookup all versionNames (git tags) for all packages available in the + # local fdroid repo + all_local_versions = set() + for package_name in conf_package_names: + for version in release_infos.get(package_name, {}).keys(): + all_local_versions.add(version) + + gh = fdroidserver.github.GithubApi(token, projectUrl) + unreleased_tags = gh.list_unreleased_tags() + + for version in all_local_versions: + if version in unreleased_tags: + # Making sure we're not uploading this version when releaseChannels + # is set. (releaseChannels usually mean it's e.g. an alpha or beta + # version) + if ( + not release_infos.get(conf_package_names[0], {}) + .get(version, {}) + .get('hasReleaseChannels') + ): + # collect files associated with this github release + files = [] + for package in conf_package_names: + files.extend( + release_infos.get(package, {}).get(version, {}).get('files', []) + ) + # always use the whatsNew text from the first app listed in + # config.yml github_releases.packageNames + text = ( + release_infos.get(conf_package_names[0], {}) + .get(version, {}) + .get('whatsNew') + or '' + ) + if 'release_notes_prepend' in repo_conf: + text = repo_conf['release_notes_prepend'] + "\n\n" + text + # create new release on github and upload all associated files + gh.create_release(version, files, text) + + +def main(): + global config + + parser = ArgumentParser() + common.setup_global_opts(parser) + parser.add_argument( + "-i", + "--identity-file", + default=None, + help=_("Specify an identity file to provide to SSH for rsyncing"), + ) + parser.add_argument( + "--local-copy-dir", + default=None, + help=_("Specify a local folder to sync the repo to"), + ) + parser.add_argument( + "--no-checksum", + action="store_true", + default=False, + help=_("Don't use rsync checksums"), + ) + parser.add_argument( + "--no-keep-git-mirror-archive", + action="store_true", + default=False, + help=_("If a git mirror gets to big, allow the archive to be deleted"), + ) + options = common.parse_args(parser) + config = common.read_config() + + if config.get('nonstandardwebroot') is True: + standardwebroot = False + else: + standardwebroot = True + + if options.local_copy_dir is not None: + local_copy_dir = options.local_copy_dir + elif config.get('local_copy_dir'): + local_copy_dir = config['local_copy_dir'] + else: + local_copy_dir = None + if local_copy_dir is not None: + fdroiddir = local_copy_dir.rstrip('/') + if os.path.exists(fdroiddir) and not os.path.isdir(fdroiddir): + logging.error(_('local_copy_dir must be directory, not a file!')) + sys.exit(1) + if not os.path.exists(os.path.dirname(fdroiddir)): + logging.error( + _('The root dir for local_copy_dir "{path}" does not exist!').format( + path=os.path.dirname(fdroiddir) + ) + ) + sys.exit(1) + if not os.path.isabs(fdroiddir): + logging.error(_('local_copy_dir must be an absolute path!')) + sys.exit(1) + repobase = os.path.basename(fdroiddir) + if standardwebroot and repobase != 'fdroid': + logging.error( + _( + 'local_copy_dir does not end with "fdroid", ' + + 'perhaps you meant: "{path}"' + ).format(path=fdroiddir + '/fdroid') + ) + sys.exit(1) + if local_copy_dir[-1] != '/': + local_copy_dir += '/' + local_copy_dir = local_copy_dir.replace('//', '/') + if not os.path.exists(fdroiddir): + os.mkdir(fdroiddir) + + if ( + not config.get('awsbucket') + and not config.get('serverwebroot') + and not config.get('servergitmirrors') + and not config.get('androidobservatory') + and not config.get('binary_transparency_remote') + and not config.get('virustotal_apikey') + and not config.get('github_releases') + and local_copy_dir is None + ): + logging.warning( + _('No option set! Edit your config.yml to set at least one of these:') + + '\nserverwebroot, servergitmirrors, local_copy_dir, awsbucket, ' + + 'virustotal_apikey, androidobservatory, github_releases ' + + 'or binary_transparency_remote' + ) + sys.exit(1) + + repo_sections = ['repo'] + if config['archive_older'] != 0: + repo_sections.append('archive') + if not os.path.exists('archive'): + os.mkdir('archive') + if config['per_app_repos']: + repo_sections += common.get_per_app_repos() + + if os.path.isdir('unsigned') or ( + local_copy_dir is not None + and os.path.isdir(os.path.join(local_copy_dir, 'unsigned')) + ): + repo_sections.append('unsigned') + + for repo_section in repo_sections: + if local_copy_dir is not None: + if config['sync_from_local_copy_dir']: + sync_from_localcopy(repo_section, local_copy_dir) + else: + update_localcopy(repo_section, local_copy_dir) + if config.get('serverwebroot'): + update_serverwebroots( + config['serverwebroot'], repo_section, standardwebroot + ) + if config.get('servergitmirrors'): + # update_servergitmirrors will take care of multiple mirrors so don't need a foreach + update_servergitmirrors(config['servergitmirrors'], repo_section) + if config.get('awsbucket'): + awsbucket = config['awsbucket'] + index_only = config.get('awsbucket_index_only') + update_remote_storage_with_rclone( + repo_section, + awsbucket, + index_only, + options.verbose, + options.quiet, + not options.no_checksum, + ) + if config.get('androidobservatory'): + upload_to_android_observatory(repo_section) + if config.get('virustotal_apikey'): + upload_to_virustotal(repo_section, config.get('virustotal_apikey')) + if config.get('github_releases'): + upload_to_github_releases( + repo_section, config.get('github_releases'), config.get('github_token') + ) + + binary_transparency_remote = config.get('binary_transparency_remote') + if binary_transparency_remote: + push_binary_transparency(BINARY_TRANSPARENCY_DIR, binary_transparency_remote) + + common.write_status_json(common.setup_status_output(start_timestamp)) + sys.exit(0) + + +if __name__ == "__main__": + main() diff --git a/fdroidserver/exception.py b/fdroidserver/exception.py new file mode 100644 index 00000000..682ccef7 --- /dev/null +++ b/fdroidserver/exception.py @@ -0,0 +1,58 @@ +class FDroidException(Exception): + def __init__(self, value=None, detail=None): + super().__init__() + self.value = value + self.detail = detail + + def shortened_detail(self): + if len(self.detail) < 16000: + return self.detail + return '[...]\n' + self.detail[-16000:] + + def __str__(self): + if self.value is None: + ret = __name__ + else: + ret = str(self.value) + if self.detail: + ret += ( + "\n==== detail begin ====\n%s\n==== detail end ====" + % ''.join(self.detail).strip() + ) + return ret + + +class MetaDataException(Exception): + def __init__(self, value): + super().__init__() + self.value = value + + def __str__(self): + return self.value + + +class VCSException(FDroidException): + pass + + +class NoVersionCodeException(FDroidException): + pass + + +class NoSubmodulesException(VCSException): + pass + + +class BuildException(FDroidException): + pass + + +class VerificationException(FDroidException): + pass + + +class ConfigurationException(FDroidException): + def __init__(self, value=None, detail=None): + super().__init__() + self.value = value + self.detail = detail diff --git a/fdroidserver/getsig/getsig.java b/fdroidserver/getsig/getsig.java deleted file mode 100644 index 78e7ac0c..00000000 --- a/fdroidserver/getsig/getsig.java +++ /dev/null @@ -1,105 +0,0 @@ -import java.io.IOException; -import java.io.InputStream; -import java.math.BigInteger; -import java.security.Signature; -import java.security.cert.*; -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.util.Enumeration; -import java.util.jar.JarEntry; -import java.util.jar.JarFile; - -public class getsig { - - public static void main(String[] args) { - - String apkPath = null; - boolean full = false; - - if(args.length == 1) { - apkPath = args[0]; - } else if (args.length == 2) { - if(!args[0].equals("-f")) { - System.out.println("Only -f is supported"); - System.exit(1); - } - apkPath = args[1]; - full = true; - } else { - System.out.println("Specify the APK file to get the signature from!"); - System.exit(1); - } - - try { - - JarFile apk = new JarFile(apkPath); - java.security.cert.Certificate[] certs = null; - - Enumeration entries = apk.entries(); - while (entries.hasMoreElements()) { - JarEntry je = (JarEntry) entries.nextElement(); - if (!je.isDirectory() && !je.getName().startsWith("META-INF/")) { - // Just need to read the stream (discarding the data) to get - // it to process the certificate... - byte[] b = new byte[4096]; - InputStream is = apk.getInputStream(je); - while (is.read(b, 0, b.length) != -1); - is.close(); - certs = je.getCertificates(); - if(certs != null) - break; - } - } - apk.close(); - - if (certs == null) { - System.out.println("Not signed"); - System.exit(1); - } - if (certs.length != 1) { - System.out.println("One signature expected"); - System.exit(1); - } - - // Get the signature in the same form that is returned by - // android.content.pm.Signature.toCharsString() (but in the - // form of a byte array so we can pass it to the MD5 function)... - byte[] sig = certs[0].getEncoded(); - byte[] csig = new byte[sig.length * 2]; - for (int j=0; j>4)&0xf; - csig[j*2] = (byte)(d >= 10 ? ('a' + d - 10) : ('0' + d)); - d = v&0xf; - csig[j*2+1] = (byte)(d >= 10 ? ('a' + d - 10) : ('0' + d)); - } - - String result; - if(full) { - result = new String(csig); - } else { - // Get the MD5 sum... - MessageDigest md; - md = MessageDigest.getInstance("MD5"); - byte[] md5sum = new byte[32]; - md.update(csig); - md5sum = md.digest(); - BigInteger bigInt = new BigInteger(1, md5sum); - String md5hash = bigInt.toString(16); - while (md5hash.length() < 32) - md5hash = "0" + md5hash; - result = md5hash; - } - - System.out.println("Result:" + result); - System.exit(0); - - } catch (Exception e) { - System.out.println("Exception:" + e); - System.exit(1); - } - } - -} - - diff --git a/fdroidserver/getsig/make.sh b/fdroidserver/getsig/make.sh deleted file mode 100755 index aa63c1a2..00000000 --- a/fdroidserver/getsig/make.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -javac getsig.java diff --git a/fdroidserver/getsig/run.sh b/fdroidserver/getsig/run.sh deleted file mode 100755 index 726995bf..00000000 --- a/fdroidserver/getsig/run.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -java getsig $1 $2 $3 diff --git a/fdroidserver/github.py b/fdroidserver/github.py new file mode 100644 index 00000000..34a3ee53 --- /dev/null +++ b/fdroidserver/github.py @@ -0,0 +1,178 @@ +#!/usr/bin/env python3 +# +# github.py - part of the FDroid server tools +# Copyright (C) 2024, Michael Pöhn, michael@poehn.at +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import json +import pathlib +import urllib.parse +import urllib.request + + +class GithubApi: + """Wrapper for some select calls to GitHub Json/REST API. + + This class wraps some calls to api.github.com. This is not intended to be a + general API wrapper. Instead it's purpose is to return pre-filtered and + transformed data that's playing well with other fdroidserver functions. + + With the GitHub API, the token is optional, but it has pretty + severe rate limiting. + + """ + + def __init__(self, api_token, repo_path): + self._api_token = api_token + if repo_path.startswith("https://github.com/"): + self._repo_path = repo_path[19:] + else: + self._repo_path = repo_path + + def _req(self, url, data=None): + h = { + "Accept": "application/vnd.github+json", + "X-GitHub-Api-Version": "2022-11-28", + } + if self._api_token: + h["Authorization"] = f"Bearer {self._api_token}" + return urllib.request.Request( + url, + headers=h, + data=data, + ) + + def list_released_tags(self): + """List of all tags that are associated with a release for this repo on GitHub.""" + names = [] + req = self._req(f"https://api.github.com/repos/{self._repo_path}/releases") + with urllib.request.urlopen(req) as resp: # nosec CWE-22 disable bandit warning + releases = json.load(resp) + for release in releases: + names.append(release['tag_name']) + return names + + def list_unreleased_tags(self): + all_tags = self.list_all_tags() + released_tags = self.list_released_tags() + return [x for x in all_tags if x not in released_tags] + + def get_latest_apk(self): + req = self._req( + f"https://api.github.com/repos/{self._repo_path}/releases/latest" + ) + with urllib.request.urlopen(req) as resp: # nosec CWE-22 disable bandit warning + assets = json.load(resp)['assets'] + for asset in assets: + url = asset.get('browser_download_url') + if url and url.endswith('.apk'): + return url + + def tag_exists(self, tag): + """ + Check if git tag is present on github. + + https://docs.github.com/en/rest/git/refs?apiVersion=2022-11-28#list-matching-references--fine-grained-access-tokens + """ + req = self._req( + f"https://api.github.com/repos/{self._repo_path}/git/matching-refs/tags/{tag}" + ) + with urllib.request.urlopen(req) as resp: # nosec CWE-22 disable bandit warning + rd = json.load(resp) + return len(rd) == 1 and rd[0].get("ref", False) == f"refs/tags/{tag}" + return False + + def list_all_tags(self): + """Get list of all tags for this repo on GitHub.""" + tags = [] + req = self._req( + f"https://api.github.com/repos/{self._repo_path}/git/matching-refs/tags/" + ) + with urllib.request.urlopen(req) as resp: # nosec CWE-22 disable bandit warning + refs = json.load(resp) + for ref in refs: + r = ref.get('ref', '') + if r.startswith('refs/tags/'): + tags.append(r[10:]) + return tags + + def create_release(self, tag, files, body=''): + """ + Create a new release on github. + + also see: https://docs.github.com/en/rest/releases/releases?apiVersion=2022-11-28#create-a-release + + :returns: True if release was created, False if release already exists + :raises: urllib exceptions in case of network or api errors, also + raises an exception when the tag doesn't exists. + """ + # Querying github to create a new release for a non-existent tag, will + # also create that tag on github. So we need an additional check to + # prevent this behavior. + if not self.tag_exists(tag): + raise Exception( + f"can't create github release for {self._repo_path} {tag}, tag doesn't exists" + ) + # create the relase on github + req = self._req( + f"https://api.github.com/repos/{self._repo_path}/releases", + data=json.dumps( + { + "tag_name": tag, + "body": body, + } + ).encode("utf-8"), + ) + try: + with urllib.request.urlopen( # nosec CWE-22 disable bandit warning + req + ) as resp: + release_id = json.load(resp)['id'] + except urllib.error.HTTPError as e: + if e.status == 422: + codes = [x['code'] for x in json.load(e).get('errors', [])] + if "already_exists" in codes: + return False + raise e + + # attach / upload all files for the relase + for file in files: + self._create_release_asset(release_id, file) + + return True + + def _create_release_asset(self, release_id, file): + """ + Attach a file to a release on GitHub. + + This uploads a file to github relases, it will be attached to the supplied release + + also see: https://docs.github.com/en/rest/releases/assets?apiVersion=2022-11-28#upload-a-release-asset + """ + file = pathlib.Path(file) + with open(file, 'rb') as f: + req = urllib.request.Request( + f"https://uploads.github.com/repos/{self._repo_path}/releases/{release_id}/assets?name={file.name}", + headers={ + "Accept": "application/vnd.github+json", + "Authorization": f"Bearer {self._api_token}", + "X-GitHub-Api-Version": "2022-11-28", + "Content-Type": "application/octet-stream", + }, + data=f.read(), + ) + with urllib.request.urlopen(req): # nosec CWE-22 disable bandit warning + return True + return False diff --git a/fdroidserver/gpgsign.py b/fdroidserver/gpgsign.py new file mode 100644 index 00000000..4341cb36 --- /dev/null +++ b/fdroidserver/gpgsign.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python3 +# +# gpgsign.py - part of the FDroid server tools +# Copyright (C) 2014, Ciaran Gultnieks, ciaran@ciarang.com +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import glob +import logging +import os +import time +from argparse import ArgumentParser + +from . import _, common +from .common import FDroidPopen +from .exception import FDroidException + +config = None +start_timestamp = time.gmtime() + + +def status_update_json(signed): + """Output a JSON file with metadata about this run.""" + logging.debug(_('Outputting JSON')) + output = common.setup_status_output(start_timestamp) + if signed: + output['signed'] = signed + common.write_status_json(output) + + +def main(): + global config + + # Parse command line... + parser = ArgumentParser() + common.setup_global_opts(parser) + common.parse_args(parser) + + config = common.read_config() + + repodirs = ['repo'] + if config['archive_older'] != 0: + repodirs.append('archive') + + signed = [] + for output_dir in repodirs: + if not os.path.isdir(output_dir): + raise FDroidException( + _("Missing output directory") + " '" + output_dir + "'" + ) + + # Process any apks that are waiting to be signed... + for f in sorted(glob.glob(os.path.join(output_dir, '*.*'))): + if not common.is_repo_file(f, for_gpg_signing=True): + continue + filename = os.path.basename(f) + sigfilename = filename + ".asc" + sigpath = os.path.join(output_dir, sigfilename) + + if not os.path.exists(sigpath): + gpgargs = ['gpg', '-a', '--output', sigpath, '--detach-sig'] + if 'gpghome' in config: + gpgargs.extend(['--homedir', config['gpghome']]) + if 'gpgkey' in config: + gpgargs.extend(['--local-user', config['gpgkey']]) + gpgargs.append(os.path.join(output_dir, filename)) + p = FDroidPopen(gpgargs) + if p.returncode != 0: + raise FDroidException("Signing failed.") + + signed.append(filename) + logging.info('Signed ' + filename) + status_update_json(signed) + + +if __name__ == "__main__": + main() diff --git a/fdroidserver/import.py b/fdroidserver/import.py deleted file mode 100644 index 3a4d3f75..00000000 --- a/fdroidserver/import.py +++ /dev/null @@ -1,303 +0,0 @@ -#!/usr/bin/env python2 -# -*- coding: utf-8 -*- -# -# import.py - part of the FDroid server tools -# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com -# Copyright (C) 2013 Daniel Martí -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -import sys -import os -import shutil -import urllib -from optparse import OptionParser -from ConfigParser import ConfigParser -import common, metadata - -# Get the repo type and address from the given web page. The page is scanned -# in a rather naive manner for 'git clone xxxx', 'hg clone xxxx', etc, and -# when one of these is found it's assumed that's the information we want. -# Returns repotype, address, or None, reason -def getrepofrompage(url): - - req = urllib.urlopen(url) - if req.getcode() != 200: - return (None, 'Unable to get ' + url + ' - return code ' + str(req.getcode())) - page = req.read() - - # Works for Google Code and BitBucket... - index = page.find('hg clone') - if index != -1: - repotype = 'hg' - repo = page[index + 9:] - index = repo.find('<') - if index == -1: - return (None, "Error while getting repo address") - repo = repo[:index] - repo = repo.split('"')[0] - return (repotype, repo) - - # Works for Google Code and BitBucket... - index=page.find('git clone') - if index != -1: - repotype = 'git' - repo = page[index + 10:] - index = repo.find('<') - if index == -1: - return (None, "Error while getting repo address") - repo = repo[:index] - repo = repo.split('"')[0] - return (repotype, repo) - - # Google Code only... - index=page.find('svn checkout') - if index != -1: - repotype = 'git-svn' - repo = page[index + 13:] - prefix = 'http' - if not repo.startswith(prefix): - return (None, "Unexpected checkout instructions format") - repo = 'http' + repo[len(prefix):] - index = repo.find('<') - if index == -1: - return (None, "Error while getting repo address - no end tag? '" + repo + "'") - sys.exit(1) - repo = repo[:index] - index = repo.find(' ') - if index == -1: - return (None, "Error while getting repo address - no space? '" + repo + "'") - repo = repo[:index] - repo = repo.split('"')[0] - return (repotype, repo) - - return (None, "No information found." + page) - -config = None -options = None - -def main(): - - global config, options - - # Parse command line... - parser = OptionParser() - parser.add_option("-u", "--url", default=None, - help="Project URL to import from.") - parser.add_option("-s", "--subdir", default=None, - help="Path to main android project subdirectory, if not in root.") - parser.add_option("-r", "--repo", default=None, - help="Allows a different repo to be specified for a multi-repo google code project") - parser.add_option("--rev", default=None, - help="Allows a different revision (or git branch) to be specified for the initial import") - (options, args) = parser.parse_args() - - config = common.read_config(options) - - if not options.url: - print "Specify project url." - sys.exit(1) - url = options.url - - tmp_dir = 'tmp' - if not os.path.isdir(tmp_dir): - print "Creating temporary directory" - os.makedirs(tmp_dir) - - # Get all apps... - apps = metadata.read_metadata() - - # Figure out what kind of project it is... - projecttype = None - issuetracker = None - license = None - website = url #by default, we might override it - if url.startswith('git://'): - projecttype = 'git' - repo = url - repotype = 'git' - sourcecode = "" - website = "" - elif url.startswith('https://github.com'): - if url.endswith('/'): - url = url[:-1] - if url.endswith('.git'): - print "A github URL should point to the project, not the git repo" - sys.exit(1) - projecttype = 'github' - repo = url + '.git' - repotype = 'git' - sourcecode = url - issuetracker = url + '/issues' - elif url.startswith('https://gitorious.org/'): - projecttype = 'gitorious' - repo = 'https://git.gitorious.org/' + url[22:] + '.git' - repotype = 'git' - sourcecode = url - elif url.startswith('https://bitbucket.org/'): - if url.endswith('/'): - url = url[:-1] - projecttype = 'bitbucket' - sourcecode = url + '/src' - issuetracker = url + '/issues' - # Figure out the repo type and adddress... - repotype, repo = getrepofrompage(sourcecode) - if not repotype: - print "Unable to determine vcs type. " + repo - sys.exit(1) - elif url.startswith('http://code.google.com/p/'): - if not url.endswith('/'): - url += '/'; - projecttype = 'googlecode' - sourcecode = url + 'source/checkout' - if options.repo: - sourcecode += "?repo=" + options.repo - issuetracker = url + 'issues/list' - - # Figure out the repo type and adddress... - repotype, repo = getrepofrompage(sourcecode) - if not repotype: - print "Unable to determine vcs type. " + repo - sys.exit(1) - - # Figure out the license... - req = urllib.urlopen(url) - if req.getcode() != 200: - print 'Unable to find project page at ' + sourcecode + ' - return code ' + str(req.getcode()) - sys.exit(1) - page = req.read() - index = page.find('Code license') - if index == -1: - print "Couldn't find license data" - sys.exit(1) - ltext = page[index:] - lprefix = 'rel="nofollow">' - index = ltext.find(lprefix) - if index == -1: - print "Couldn't find license text" - sys.exit(1) - ltext = ltext[index + len(lprefix):] - index = ltext.find('<') - if index == -1: - print "License text not formatted as expected" - sys.exit(1) - ltext = ltext[:index] - if ltext == 'GNU GPL v3': - license = 'GPLv3' - elif ltext == 'GNU GPL v2': - license = 'GPLv2' - elif ltext == 'Apache License 2.0': - license = 'Apache2' - elif ltext == 'MIT License': - license = 'MIT' - elif ltext == 'GNU Lesser GPL': - license = 'LGPL' - elif ltext == 'Mozilla Public License 1.1': - license = 'MPL' - elif ltext == 'New BSD License': - license = 'NewBSD' - else: - print "License " + ltext + " is not recognised" - sys.exit(1) - - if not projecttype: - print "Unable to determine the project type." - print "The URL you supplied was not in one of the supported formats. Please consult" - print "the manual for a list of supported formats, and supply one of those." - sys.exit(1) - - # Get a copy of the source so we can extract some info... - print 'Getting source from ' + repotype + ' repo at ' + repo - src_dir = os.path.join(tmp_dir, 'importer') - if os.path.exists(src_dir): - shutil.rmtree(src_dir) - vcs = common.getvcs(repotype, repo, src_dir) - vcs.gotorevision(options.rev) - if options.subdir: - root_dir = os.path.join(src_dir, options.subdir) - else: - root_dir = src_dir - - # Extract some information... - paths = common.manifest_paths(root_dir, None) - if paths: - - version, vercode, package = common.parse_androidmanifests(paths) - if not package: - print "Couldn't find package ID" - sys.exit(1) - if not version: - print "WARNING: Couldn't find latest version name" - if not vercode: - print "WARNING: Couldn't find latest version code" - else: - spec = os.path.join(root_dir, 'buildozer.spec') - if os.path.exists(spec): - defaults = {'orientation': 'landscape', 'icon': '', - 'permissions': '', 'android.api': "18"} - bconfig = ConfigParser(defaults, allow_no_value=True) - bconfig.read(spec) - package = bconfig.get('app', 'package.domain') + '.' + bconfig.get('app', 'package.name') - version = bconfig.get('app', 'version') - vercode = None - else: - print "No android or kivy project could be found. Specify --subdir?" - sys.exit(1) - - # Make sure it's actually new... - for app in apps: - if app['id'] == package: - print "Package " + package + " already exists" - sys.exit(1) - - # Construct the metadata... - app = metadata.parse_metadata(None) - app['id'] = package - app['Web Site'] = website - app['Source Code'] = sourcecode - if issuetracker: - app['Issue Tracker'] = issuetracker - if license: - app['License'] = license - app['Repo Type'] = repotype - app['Repo'] = repo - app['Update Check Mode'] = "Tags" - - # Create a build line... - build = {} - build['version'] = version if version else '?' - build['vercode'] = vercode if vercode else '?' - build['commit'] = '?' - build['disable'] = 'Generated by import.py - check/set version fields and commit id' - if options.subdir: - build['subdir'] = options.subdir - if os.path.exists(os.path.join(root_dir, 'jni')): - build['buildjni'] = 'yes' - app['builds'].append(build) - - # Keep the repo directory to save bandwidth... - if not os.path.exists('build'): - os.mkdir('build') - shutil.move(src_dir, os.path.join('build', package)) - with open('build/.fdroidvcs-' + package, 'w') as f: - f.write(repotype + ' ' + repo) - - metafile = os.path.join('metadata', package + '.txt') - metadata.write_metadata(metafile, app) - print "Wrote " + metafile - - -if __name__ == "__main__": - main() - diff --git a/fdroidserver/import_subcommand.py b/fdroidserver/import_subcommand.py new file mode 100644 index 00000000..017ebe54 --- /dev/null +++ b/fdroidserver/import_subcommand.py @@ -0,0 +1,470 @@ +#!/usr/bin/env python3 +"""Extract application metadata from a source repository.""" +# +# import_subcommand.py - part of the FDroid server tools +# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com +# Copyright (C) 2013-2014 Daniel Martí +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import json +import logging +import os +import re +import shutil +import stat +import sys +import urllib +from argparse import ArgumentParser +from pathlib import Path +from typing import Optional + +import git +import yaml + +try: + from yaml import CSafeLoader as SafeLoader +except ImportError: + from yaml import SafeLoader + +from . import _, common, metadata +from .exception import FDroidException + +config = None + +SETTINGS_GRADLE_REGEX = re.compile(r'settings\.gradle(?:\.kts)?') +GRADLE_SUBPROJECT_REGEX = re.compile(r'''['"]:?([^'"]+)['"]''') +APPLICATION_ID_REGEX = re.compile(r'''\s*applicationId\s=?\s?['"].*['"]''') + + +def get_all_gradle_and_manifests(build_dir): + paths = [] + for root, dirs, files in os.walk(build_dir): + for f in sorted(files): + if f == 'AndroidManifest.xml' or f.endswith(('.gradle', '.gradle.kts')): + full = Path(root) / f + paths.append(full) + return paths + + +def get_gradle_subdir(build_dir, paths): + """Get the subdir where the gradle build is based.""" + first_gradle_dir = None + for path in paths: + if not first_gradle_dir: + first_gradle_dir = path.parent.relative_to(build_dir) + if path.exists() and SETTINGS_GRADLE_REGEX.match(path.name): + for m in GRADLE_SUBPROJECT_REGEX.finditer(path.read_text(encoding='utf-8')): + for f in (path.parent / m.group(1)).glob('build.gradle*'): + with f.open(encoding='utf-8') as fp: + for line in fp: + if common.ANDROID_PLUGIN_REGEX.match( + line + ) or APPLICATION_ID_REGEX.match(line): + return f.parent.relative_to(build_dir) + if first_gradle_dir and first_gradle_dir != Path('.'): + return first_gradle_dir + + +def handle_retree_error_on_windows(function, path, excinfo): + """Python can't remove a readonly file on Windows so chmod first.""" + if function in (os.unlink, os.rmdir, os.remove) and excinfo[0] == PermissionError: + os.chmod(path, stat.S_IWRITE) + function(path) + + +def clone_to_tmp_dir(app: metadata.App, rev=None) -> Path: + """Clone the source repository of an app to a temporary directory for further processing. + + Parameters + ---------- + app + The App instance to clone the source of. + + Returns + ------- + tmp_dir + The (temporary) directory the apps source has been cloned into. + + """ + tmp_dir = Path('tmp') + tmp_dir.mkdir(exist_ok=True) + + tmp_dir = tmp_dir / 'importer' + + if tmp_dir.exists(): + shutil.rmtree(str(tmp_dir), onerror=handle_retree_error_on_windows) + vcs = common.getvcs(app.RepoType, app.Repo, tmp_dir) + vcs.gotorevision(rev) + + return tmp_dir + + +def getrepofrompage(url: str) -> tuple[Optional[str], str]: + """Get the repo type and address from the given web page. + + The page is scanned in a rather naive manner for 'git clone xxxx', + 'hg clone xxxx', etc, and when one of these is found it's assumed + that's the information we want. Returns repotype, address, or + None, reason + + Parameters + ---------- + url + The url to look for repository information at. + + Returns + ------- + repotype_or_none + The found repository type or None if an error occured. + address_or_reason + The address to the found repository or the reason if an error occured. + + """ + if not url.startswith('http'): + return (None, _('{url} does not start with "http"!'.format(url=url))) + req = urllib.request.urlopen(url) # nosec B310 non-http URLs are filtered out + if req.getcode() != 200: + return (None, 'Unable to get ' + url + ' - return code ' + str(req.getcode())) + page = req.read().decode(req.headers.get_content_charset()) + + # Works for BitBucket + m = re.search('data-fetch-url="(.*)"', page) + if m is not None: + repo = m.group(1) + + if repo.endswith('.git'): + return ('git', repo) + + return ('hg', repo) + + # Works for BitBucket (obsolete) + index = page.find('hg clone') + if index != -1: + repotype = 'hg' + repo = page[index + 9 :] + index = repo.find('<') + if index == -1: + return (None, _("Error while getting repo address")) + repo = repo[:index] + repo = repo.split('"')[0] + return (repotype, repo) + + # Works for BitBucket (obsolete) + index = page.find('git clone') + if index != -1: + repotype = 'git' + repo = page[index + 10 :] + index = repo.find('<') + if index == -1: + return (None, _("Error while getting repo address")) + repo = repo[:index] + repo = repo.split('"')[0] + return (repotype, repo) + + return (None, _("No information found.") + page) + + +def get_app_from_url(url: str) -> metadata.App: + """Guess basic app metadata from the URL. + + The URL must include a network hostname, unless it is an lp:, + file:, or git/ssh URL. This throws ValueError on bad URLs to + match urlparse(). + + Parameters + ---------- + url + The URL to look to look for app metadata at. + + Returns + ------- + app + App instance with the found metadata. + + Raises + ------ + :exc:`~fdroidserver.exception.FDroidException` + If the VCS type could not be determined. + :exc:`ValueError` + If the URL is invalid. + + """ + parsed = urllib.parse.urlparse(url) + invalid_url = False + if not parsed.scheme or not parsed.path: + invalid_url = True + + app = metadata.App() + app.Repo = url + if url.startswith('git://') or url.startswith('git@'): + app.RepoType = 'git' + elif parsed.netloc == 'github.com': + app.RepoType = 'git' + app.SourceCode = url + app.IssueTracker = url + '/issues' + elif parsed.netloc in ('gitlab.com', 'framagit.org'): + # git can be fussy with gitlab URLs unless they end in .git + if url.endswith('.git'): + url = url[:-4] + app.Repo = url + '.git' + app.RepoType = 'git' + app.SourceCode = url + app.IssueTracker = url + '/issues' + elif parsed.netloc == 'notabug.org': + if url.endswith('.git'): + url = url[:-4] + app.Repo = url + '.git' + app.RepoType = 'git' + app.SourceCode = url + app.IssueTracker = url + '/issues' + elif parsed.netloc == 'bitbucket.org': + if url.endswith('/'): + url = url[:-1] + app.SourceCode = url + '/src' + app.IssueTracker = url + '/issues' + # Figure out the repo type and adddress... + app.RepoType, app.Repo = getrepofrompage(url) + elif parsed.netloc == 'codeberg.org': + app.RepoType = 'git' + app.SourceCode = url + app.IssueTracker = url + '/issues' + elif url.startswith('https://') and url.endswith('.git'): + app.RepoType = 'git' + + if not parsed.netloc and parsed.scheme in ('git', 'http', 'https', 'ssh'): + invalid_url = True + + if invalid_url: + raise ValueError(_('"{url}" is not a valid URL!'.format(url=url))) + + if not app.RepoType: + raise FDroidException("Unable to determine vcs type. " + app.Repo) + + return app + + +def main(): + """Extract app metadata and write it to a file. + + The behaviour of this function is influenced by the configuration file as + well as command line parameters. + + Raises + ------ + :exc:`~fdroidserver.exception.FDroidException` + If the repository already has local metadata, no URL is specified and + the current directory is not a Git repository, no application ID could + be found, no Gradle project could be found or there is already metadata + for the found application ID. + + """ + global config + + # Parse command line... + parser = ArgumentParser() + common.setup_global_opts(parser) + parser.add_argument("-u", "--url", help=_("Project URL to import from.")) + parser.add_argument( + "-s", + "--subdir", + help=_("Path to main Android project subdirectory, if not in root."), + ) + parser.add_argument( + "-c", + "--categories", + help=_("Comma separated list of categories."), + ) + parser.add_argument("-l", "--license", help=_("Overall license of the project.")) + parser.add_argument( + "--omit-disable", + action="store_true", + help=_("Do not add 'disable:' to the generated build entries"), + ) + parser.add_argument( + "--rev", + help=_( + "Allows a different revision (or git branch) to be specified for the initial import" + ), + ) + metadata.add_metadata_arguments(parser) + options = common.parse_args(parser) + metadata.warnings_action = options.W + + config = common.read_config() + + apps = metadata.read_metadata() + app = None + + tmp_importer_dir = None + + local_metadata_files = common.get_local_metadata_files() + if local_metadata_files: + raise FDroidException( + _("This repo already has local metadata: %s") % local_metadata_files[0] + ) + + build = metadata.Build() + app = metadata.App() + if options.url is None and Path('.git').is_dir(): + app.RepoType = 'git' + tmp_importer_dir = Path.cwd() + git_repo = git.Repo(tmp_importer_dir) + for remote in git.Remote.iter_items(git_repo): + if remote.name == 'origin': + url = git_repo.remotes.origin.url + app = get_app_from_url(url) + break + write_local_file = True + elif options.url: + app = get_app_from_url(options.url) + tmp_importer_dir = clone_to_tmp_dir(app, options.rev) + git_repo = git.Repo(tmp_importer_dir) + + if not options.omit_disable: + build.disable = ( + 'Generated by `fdroid import` - check version fields and commitid' + ) + write_local_file = False + else: + raise FDroidException("Specify project url.") + + app.AutoUpdateMode = 'Version' + app.UpdateCheckMode = 'Tags' + build.commit = common.get_head_commit_id(tmp_importer_dir) + + # Extract some information... + paths = get_all_gradle_and_manifests(tmp_importer_dir) + gradle_subdir = get_gradle_subdir(tmp_importer_dir, paths) + if paths: + versionName, versionCode, appid = common.parse_androidmanifests(paths, app) + if not appid: + raise FDroidException(_("Couldn't find Application ID")) + if not versionName: + logging.warning(_('Could not find latest versionName')) + if not versionCode: + logging.warning(_('Could not find latest versionCode')) + else: + raise FDroidException(_("No gradle project could be found. Specify --subdir?")) + + # Make sure it's actually new... + if appid in apps: + raise FDroidException(_('Package "{appid}" already exists').format(appid=appid)) + + # Create a build line... + build.versionName = versionName or 'Unknown' + app.CurrentVersion = build.versionName + build.versionCode = versionCode or 0 + app.CurrentVersionCode = build.versionCode + if options.subdir: + build.subdir = options.subdir + elif gradle_subdir: + build.subdir = gradle_subdir.as_posix() + # subdir might be None + subdir = Path(tmp_importer_dir / build.subdir) if build.subdir else tmp_importer_dir + + if options.license: + app.License = options.license + if options.categories: + app.Categories = options.categories.split(',') + if (subdir / 'jni').exists(): + build.buildjni = ['yes'] + if (subdir / 'build.gradle').exists() or (subdir / 'build.gradle.kts').exists(): + build.gradle = ['yes'] + + app.AutoName = common.fetch_real_name(subdir, build.gradle) + + package_json = tmp_importer_dir / 'package.json' # react-native + pubspec_yaml = tmp_importer_dir / 'pubspec.yaml' # flutter + if package_json.exists(): + build.sudo = [ + 'sysctl fs.inotify.max_user_watches=524288 || true', + 'apt-get update', + 'apt-get install -y npm', + ] + build.init = ['npm install --build-from-source'] + with package_json.open() as fp: + data = json.load(fp) + app.AutoName = app.AutoName or data.get('name') + app.License = data.get('license', app.License) + app.Description = data.get('description', app.Description) + app.WebSite = data.get('homepage', app.WebSite) + app_json = tmp_importer_dir / 'app.json' + build.scanignore = ['android/build.gradle'] + build.scandelete = ['node_modules'] + if app_json.exists(): + with app_json.open() as fp: + data = json.load(fp) + app.AutoName = app.AutoName or data.get('name') + if pubspec_yaml.exists(): + with pubspec_yaml.open() as fp: + data = yaml.load(fp, Loader=SafeLoader) + app.AutoName = app.AutoName or data.get('name') + app.License = data.get('license', app.License) + app.Description = data.get('description', app.Description) + app.UpdateCheckData = 'pubspec.yaml|version:\\s.+\\+(\\d+)|.|version:\\s(.+)\\+' + build.srclibs = ['flutter@stable'] + build.output = 'build/app/outputs/flutter-apk/app-release.apk' + build.subdir = None + build.gradle = None + build.prebuild = [ + 'export PUB_CACHE=$(pwd)/.pub-cache', + '$$flutter$$/bin/flutter config --no-analytics', + '$$flutter$$/bin/flutter packages pub get', + ] + build.scandelete = [ + '.pub-cache', + ] + build.build = [ + 'export PUB_CACHE=$(pwd)/.pub-cache', + '$$flutter$$/bin/flutter build apk', + ] + + git_modules = tmp_importer_dir / '.gitmodules' + if git_modules.exists(): + build.submodules = True + + metadata.post_parse_yaml_metadata(app) + + app['Builds'].append(build) + + if write_local_file: + metadata.write_metadata(Path('.fdroid.yml'), app) + else: + # Keep the repo directory to save bandwidth... + Path('build').mkdir(exist_ok=True) + build_dir = Path('build') / appid + if build_dir.exists(): + logging.warning( + _('{path} already exists, ignoring import results!').format( + path=build_dir + ) + ) + sys.exit(1) + elif tmp_importer_dir: + # For Windows: Close the repo or a git.exe instance holds handles to repo + try: + git_repo.close() + except AttributeError: # Debian/stretch's version does not have close() + pass + shutil.move(tmp_importer_dir, build_dir) + Path('build/.fdroidvcs-' + appid).write_text(app.RepoType + ' ' + app.Repo) + + metadatapath = Path('metadata') / (appid + '.yml') + metadata.write_metadata(metadatapath, app) + logging.info("Wrote " + str(metadatapath)) + + +if __name__ == "__main__": + main() diff --git a/fdroidserver/index.py b/fdroidserver/index.py new file mode 100644 index 00000000..b63729e4 --- /dev/null +++ b/fdroidserver/index.py @@ -0,0 +1,1928 @@ +#!/usr/bin/env python3 +# +# update.py - part of the FDroid server tools +# Copyright (C) 2017, Torsten Grote +# Copyright (C) 2016, Blue Jay Wireless +# Copyright (C) 2014-2016, Hans-Christoph Steiner +# Copyright (C) 2010-2015, Ciaran Gultnieks +# Copyright (C) 2013-2014, Daniel Martí +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +"""Process the index files. + +This module is loaded by all fdroid subcommands since it is loaded in +fdroidserver/__init__.py. Any narrowly used dependencies should be +imported where they are used to limit dependencies for subcommands +like publish/signindex/gpgsign. This eliminates the need to have +these installed on the signing server. + +""" + +import calendar +import collections +import hashlib +import json +import logging +import os +import re +import shutil +import sys +import tempfile +import urllib.parse +import zipfile +from binascii import hexlify, unhexlify +from datetime import datetime, timezone +from pathlib import Path +from xml.dom.minidom import Document + +from fdroidserver._yaml import yaml +from fdroidserver.common import ( + ANTIFEATURES_CONFIG_NAME, + CATEGORIES_CONFIG_NAME, + CONFIG_CONFIG_NAME, + DEFAULT_LOCALE, + MIRRORS_CONFIG_NAME, + RELEASECHANNELS_CONFIG_NAME, + FDroidPopen, + FDroidPopenBytes, + load_publish_signer_fingerprints, +) +from fdroidserver.exception import FDroidException, VerificationException + +from . import _, common, metadata, signindex + + +def make(apps, apks, repodir, archive): + """Generate the repo index files. + + This requires properly initialized options and config objects. + + Parameters + ---------- + apps + OrderedDict of apps to go into the index, each app should have + at least one associated apk + apks + list of apks to go into the index + repodir + the repo directory + archive + True if this is the archive repo, False if it's the + main one. + """ + from fdroidserver.update import METADATA_VERSION + + if not hasattr(common.options, 'nosign') or not common.options.nosign: + common.assert_config_keystore(common.config) + + # Historically the index has been sorted by App Name, so we enforce this ordering here + sortedids = sorted(apps, key=lambda appid: common.get_app_display_name(apps[appid]).upper()) + sortedapps = collections.OrderedDict() + for appid in sortedids: + sortedapps[appid] = apps[appid] + + repodict = collections.OrderedDict() + repodict['timestamp'] = datetime.now(timezone.utc) + repodict['version'] = METADATA_VERSION + + if common.config['repo_maxage'] != 0: + repodict['maxage'] = common.config['repo_maxage'] + + if archive: + repodict['name'] = common.config['archive_name'] + repodict['icon'] = common.config.get('archive_icon', common.default_config['repo_icon']) + repodict['description'] = common.config['archive_description'] + archive_url = common.config.get('archive_url', common.config['repo_url'][:-4] + 'archive') + repodict['address'] = archive_url + if 'archive_web_base_url' in common.config: + repodict["webBaseUrl"] = common.config['archive_web_base_url'] + repo_section = os.path.basename(urllib.parse.urlparse(archive_url).path) + else: + repodict['name'] = common.config['repo_name'] + repodict['icon'] = common.config.get('repo_icon', common.default_config['repo_icon']) + repodict['address'] = common.config['repo_url'] + if 'repo_web_base_url' in common.config: + repodict["webBaseUrl"] = common.config['repo_web_base_url'] + repodict['description'] = common.config['repo_description'] + repo_section = os.path.basename(urllib.parse.urlparse(common.config['repo_url']).path) + + add_mirrors_to_repodict(repo_section, repodict) + + requestsdict = collections.OrderedDict() + for command in ('install', 'uninstall'): + packageNames = [] + key = command + '_list' + if key in common.config: + if isinstance(common.config[key], str): + packageNames = [common.config[key]] + elif all(isinstance(item, str) for item in common.config[key]): + packageNames = common.config[key] + else: + raise TypeError(_('only accepts strings, lists, and tuples')) + requestsdict[command] = packageNames + + signer_fingerprints = load_publish_signer_fingerprints() + + make_v0(sortedapps, apks, repodir, repodict, requestsdict, signer_fingerprints) + make_v1(sortedapps, apks, repodir, repodict, requestsdict, signer_fingerprints) + make_v2( + sortedapps, apks, repodir, repodict, requestsdict, signer_fingerprints, archive + ) + make_website(sortedapps, repodir, repodict) + make_altstore( + sortedapps, + apks, + common.config, + repodir, + pretty=common.options.pretty, + ) + + +def _should_file_be_generated(path, magic_string): + if os.path.exists(path): + with open(path) as f: + # if the magic_string is not in the first line the file should be overwritten + if magic_string not in f.readline(): + return False + return True + + +def make_website(apps, repodir, repodict): + # do not change this string, as it will break updates for files with older versions of this string + autogenerate_comment = "auto-generated - fdroid index updates will overwrite this file" + + if not os.path.exists(repodir): + os.makedirs(repodir) + + html_name = 'index.html' + html_file = os.path.join(repodir, html_name) + + if _should_file_be_generated(html_file, autogenerate_comment): + import qrcode + + _ignored, repo_pubkey_fingerprint = extract_pubkey() + repo_pubkey_fingerprint_stripped = repo_pubkey_fingerprint.replace(" ", "") + link = repodict["address"] + link_fingerprinted = '{link}?fingerprint={fingerprint}'.format( + link=link, fingerprint=repo_pubkey_fingerprint_stripped + ) + qrcode.make(link_fingerprinted).save(os.path.join(repodir, "index.png")) + with open(html_file, 'w') as f: + name = repodict["name"] + description = repodict["description"] + icon = repodict["icon"] + f.write(""" + + + + + + + {name} + + + + + + + + + + + + +

+ {name} +

+
+

+ + + QR: test + + + {description} +
+
+ Currently it serves + + {number_of_apps} + + apps. To add it to your F-Droid app, scan the QR code (click it to enlarge) or use this link: +

+

+ + + {link} + + +

+

+ If you would like to manually verify the fingerprint (SHA-256) of the repository signing key, here it is: +
+ + {fingerprint} + +

+
+ + +""".format(autogenerate_comment=autogenerate_comment, + description=description, + fingerprint=repo_pubkey_fingerprint, + icon=icon, + link=link, + link_fingerprinted=link_fingerprinted, + name=name, + number_of_apps=str(len(apps)))) + + css_file = os.path.join(repodir, "index.css") + if _should_file_be_generated(css_file, autogenerate_comment): + with open(css_file, "w") as f: + # this auto generated comment was not included via .format(), as python seems to have problems with css files in combination with .format() + f.write("""/* auto-generated - fdroid index updates will overwrite this file */ +BODY { + font-family : Arial, Helvetica, Sans-Serif; + color : #0000ee; + background-color : #ffffff; +} +p { + text-align : justify; +} +p.center { + text-align : center; +} +TD { + font-family : Arial, Helvetica, Sans-Serif; + color : #0000ee; +} +body,td { + font-size : 14px; +} +TH { + font-family : Arial, Helvetica, Sans-Serif; + color : #0000ee; + background-color : #F5EAD4; +} +a:link { + color : #bb0000; +} +a:visited { + color : #ff0000; +} +.zitat { + margin-left : 1cm; + margin-right : 1cm; + font-style : italic; +} +#intro { + border-spacing : 1em; + border : 1px solid gray; + border-radius : 0.5em; + box-shadow : 10px 10px 5px #888; + margin : 1.5em; + font-size : .9em; + width : 600px; + max-width : 90%; + display : table; + margin-left : auto; + margin-right : auto; + font-size : .8em; + color : #555555; +} +#intro > p { + margin-top : 0; +} +#intro p:last-child { + margin-bottom : 0; +} +.last { + border-bottom : 1px solid black; + padding-bottom : .5em; + text-align : center; +} +table { + border-collapse : collapse; +} +h2 { + text-align : center; +} +.perms { + font-family : monospace; + font-size : .8em; +} +.repoapplist { + display : table; + border-collapse : collapse; + margin-left : auto; + margin-right : auto; + width : 600px; + max-width : 90%; +} +.approw, appdetailrow { + display : table-row; +} +.appdetailrow { + display : flex; + padding : .5em; +} +.appiconbig, .appdetailblock, .appdetailcell { + display : table-cell +} +.appiconbig { + vertical-align : middle; + text-align : center; +} +.appdetailinner { + width : 100%; +} +.applinkcell { + text-align : center; + float : right; + width : 100%; + margin-bottom : .1em; +} +.paddedlink { + margin : 1em; +} +.approw { + border-spacing : 1em; + border : 1px solid gray; + border-radius : 0.5em; + padding : 0.5em; + margin : 1.5em; +} +.appdetailinner .appdetailrow:first-child { + background-color : #d5d5d5; +} +.appdetailinner .appdetailrow:first-child .appdetailcell { + min-width : 33%; + flex : 1 33%; + text-align : center; +} +.appdetailinner .appdetailrow:first-child .appdetailcell:first-child { + text-align : left; +} +.appdetailinner .appdetailrow:first-child .appdetailcell:last-child { + float : none; + text-align : right; +} +.minor-details { + font-size : .8em; + color : #555555; +} +.boldname { + font-weight : bold; +} +#appcount { + text-align : center; + margin-bottom : .5em; +} +kbd { + padding : 0.1em 0.6em; + border : 1px solid #CCC; + background-color : #F7F7F7; + color : #333; + box-shadow : 0px 1px 0px rgba(0, 0, 0, 0.2), 0px 0px 0px 2px #FFF inset; + border-radius : 3px; + display : inline-block; + margin : 0px 0.1em; + text-shadow : 0px 1px 0px #FFF; + white-space : nowrap; +} +div.filterline, div.repoline { + display : table; + margin-left : auto; + margin-right : auto; + margin-bottom : 1em; + vertical-align : middle; + display : table; + font-size : .8em; +} +.filterline form { + display : table-row; +} +.filterline .filtercell { + display : table-cell; + vertical-align : middle; +} +fieldset { + float : left; +} +fieldset select, fieldset input, #reposelect select, #reposelect input { + font-size : .9em; +} +.pager { + display : table; + margin-left : auto; + margin-right : auto; + width : 600px; + max-width : 90%; + padding-top : .6em; +} +/* should correspond to .repoapplist */ +.pagerrow { + display : table-row; +} +.pagercell { + display : table-cell; +} +.pagercell.left { + text-align : left; + padding-right : 1em; +} +.pagercell.middle { + text-align : center; + font-size : .9em; + color : #555; +} +.pagercell.right { + text-align : right; + padding-left : 1em; +} +.anti { + color : peru; +} +.antibold { + color : crimson; +} +#footer { + text-align : center; + margin-top : 1em; + font-size : 11px; + color : #555; +} +#footer img { + vertical-align : middle; +} +@media (max-width: 600px) { + .repoapplist { + display : block; + } + .appdetailinner, .appdetailrow { + display : block; + } + .appdetailcell { + display : block; + float : left; + line-height : 1.5em; + } +}""") + + +def dict_diff(source, target): + if not isinstance(target, dict) or not isinstance(source, dict): + return target + + result = {key: None for key in source if key not in target} + + for key, value in target.items(): + if key not in source: + result[key] = value + elif value != source[key]: + result[key] = dict_diff(source[key], value) + + return result + + +def convert_datetime(obj): + if isinstance(obj, datetime): + # Java prefers milliseconds + # we also need to account for time zone/daylight saving time + return int(calendar.timegm(obj.timetuple()) * 1000) + return obj + + +def package_metadata(app, repodir): + meta = {} + for element in ( + "added", + # "binaries", + "Categories", + "Changelog", + "IssueTracker", + "lastUpdated", + "License", + "SourceCode", + "Translation", + "WebSite", + "featureGraphic", + "promoGraphic", + "tvBanner", + "screenshots", + "AuthorEmail", + "AuthorName", + "AuthorPhone", + "AuthorWebSite", + "Bitcoin", + "Liberapay", + "Litecoin", + "OpenCollective", + ): + if element in app and app[element]: + element_new = element[:1].lower() + element[1:] + meta[element_new] = convert_datetime(app[element]) + + for element in ( + "Name", + "Summary", + "Description", + "video", + ): + element_new = element[:1].lower() + element[1:] + if element in app and app[element]: + meta[element_new] = {DEFAULT_LOCALE: convert_datetime(app[element])} + elif "localized" in app: + localized = {k: v[element_new] for k, v in app["localized"].items() if element_new in v} + if localized: + meta[element_new] = localized + + if "name" not in meta and app["AutoName"]: + meta["name"] = {DEFAULT_LOCALE: app["AutoName"]} + + # fdroidserver/metadata.py App default + if meta["license"] == "Unknown": + del meta["license"] + + if app["Donate"]: + meta["donate"] = [app["Donate"]] + + # TODO handle different resolutions + if app.get("icon"): + icon_path = os.path.join(repodir, "icons", app["icon"]) + meta["icon"] = {DEFAULT_LOCALE: common.file_entry(icon_path)} + + if "iconv2" in app: + meta["icon"] = app["iconv2"] + + return meta + + +def convert_version(version, app, repodir): + """Convert the internal representation of Builds: into index-v2 versions. + + The diff algorithm of index-v2 uses null/None to mean a field to + be removed, so this function handles any Nones that are in the + metadata file. + + """ + ver = {} + if "added" in version: + ver["added"] = convert_datetime(version["added"]) + else: + ver["added"] = 0 + + ver["file"] = { + "name": "/{}".format(version["apkName"]), + version["hashType"]: version["hash"], + "size": version["size"], + } + + ipfsCIDv1 = version.get("ipfsCIDv1") + if ipfsCIDv1: + ver["file"]["ipfsCIDv1"] = ipfsCIDv1 + + if "srcname" in version: + ver["src"] = common.file_entry( + os.path.join(repodir, version["srcname"]), + version["srcnameSha256"], + ) + + if "obbMainFile" in version: + ver["obbMainFile"] = common.file_entry( + os.path.join(repodir, version["obbMainFile"]), + version["obbMainFileSha256"], + ) + + if "obbPatchFile" in version: + ver["obbPatchFile"] = common.file_entry( + os.path.join(repodir, version["obbPatchFile"]), + version["obbPatchFileSha256"], + ) + + ver["manifest"] = manifest = {} + + for element in ( + "nativecode", + "versionName", + "maxSdkVersion", + ): + if element in version: + manifest[element] = version[element] + + if "versionCode" in version: + manifest["versionCode"] = version["versionCode"] + + if "features" in version and version["features"]: + manifest["features"] = features = [] + for feature in version["features"]: + # TODO get version from manifest, default (0) is omitted + # features.append({"name": feature, "version": 1}) + features.append({"name": feature}) + + if "minSdkVersion" in version: + manifest["usesSdk"] = {} + manifest["usesSdk"]["minSdkVersion"] = version["minSdkVersion"] + if "targetSdkVersion" in version: + manifest["usesSdk"]["targetSdkVersion"] = version["targetSdkVersion"] + else: + # https://developer.android.com/guide/topics/manifest/uses-sdk-element.html#target + manifest["usesSdk"]["targetSdkVersion"] = manifest["usesSdk"]["minSdkVersion"] + + if "signer" in version: + manifest["signer"] = {"sha256": [version["signer"]]} + + for element in ("uses-permission", "uses-permission-sdk-23"): + en = element.replace("uses-permission", "usesPermission").replace("-sdk-23", "Sdk23") + if element in version and version[element]: + manifest[en] = [] + for perm in version[element]: + if perm[1]: + manifest[en].append({"name": perm[0], "maxSdkVersion": perm[1]}) + else: + manifest[en].append({"name": perm[0]}) + + # index-v2 has only per-version antifeatures, not per package. + antiFeatures = app.get('AntiFeatures', {}).copy() + for name, descdict in version.get('antiFeatures', dict()).items(): + antiFeatures[name] = descdict + if antiFeatures: + ver['antiFeatures'] = { + k: dict(sorted(antiFeatures[k].items())) for k in sorted(antiFeatures) + } + + if "versionCode" in version: + if version["versionCode"] > app["CurrentVersionCode"]: + ver[RELEASECHANNELS_CONFIG_NAME] = ["Beta"] + + builds = app.get("Builds", []) + + if len(builds) > 0 and version["versionCode"] == builds[-1]["versionCode"]: + if "localized" in app: + localized = {k: v["whatsNew"] for k, v in app["localized"].items() if "whatsNew" in v} + if localized: + ver["whatsNew"] = localized + + for build in builds: + if build['versionCode'] == version['versionCode'] and "whatsNew" in build: + ver["whatsNew"] = build["whatsNew"] + break + + return ver + + +def v2_repo(repodict, repodir, archive): + repo = {} + + repo["name"] = {DEFAULT_LOCALE: repodict["name"]} + repo["description"] = {DEFAULT_LOCALE: repodict["description"]} + repo["icon"] = { + DEFAULT_LOCALE: common.file_entry("%s/icons/%s" % (repodir, repodict["icon"])) + } + + config = common.load_localized_config(CONFIG_CONFIG_NAME, repodir) + if config: + localized_config = config["archive" if archive else "repo"] + if "name" in localized_config: + repo["name"] = localized_config["name"] + if "description" in localized_config: + repo["description"] = localized_config["description"] + if "icon" in localized_config: + repo["icon"] = localized_config["icon"] + + repo["address"] = repodict["address"] + if "mirrors" in repodict: + repo["mirrors"] = repodict["mirrors"] + if "webBaseUrl" in repodict: + repo["webBaseUrl"] = repodict["webBaseUrl"] + + repo["timestamp"] = repodict["timestamp"] + + antiFeatures = common.load_localized_config(ANTIFEATURES_CONFIG_NAME, repodir) + if antiFeatures: + repo[ANTIFEATURES_CONFIG_NAME] = antiFeatures + + categories = common.load_localized_config(CATEGORIES_CONFIG_NAME, repodir) + if categories: + repo[CATEGORIES_CONFIG_NAME] = categories + + releaseChannels = common.load_localized_config(RELEASECHANNELS_CONFIG_NAME, repodir) + if releaseChannels: + repo[RELEASECHANNELS_CONFIG_NAME] = releaseChannels + + return repo + + +def make_v2(apps, packages, repodir, repodict, requestsdict, signer_fingerprints, archive): + + def _index_encoder_default(obj): + if isinstance(obj, set): + return sorted(list(obj)) + if isinstance(obj, datetime): + # Java prefers milliseconds + # we also need to account for time zone/daylight saving time + return int(calendar.timegm(obj.timetuple()) * 1000) + if isinstance(obj, dict): + d = collections.OrderedDict() + for key in sorted(obj.keys()): + d[key] = obj[key] + return d + raise TypeError(repr(obj) + " is not JSON serializable") + + output = collections.OrderedDict() + output["repo"] = v2_repo(repodict, repodir, archive) + if requestsdict and (requestsdict["install"] or requestsdict["uninstall"]): + output["repo"]["requests"] = requestsdict + + # establish sort order of the index + sort_package_versions(packages, signer_fingerprints) + + output_packages = collections.OrderedDict() + output['packages'] = output_packages + categories_used_by_apps = set() + for package in packages: + packageName = package['packageName'] + if packageName not in apps: + logging.info(_('Ignoring package without metadata: ') + package['apkName']) + continue + if not package.get('versionName'): + app = apps[packageName] + for build in app.get('Builds', []): + if build['versionCode'] == package['versionCode']: + versionName = build.get('versionName') + logging.info(_('Overriding blank versionName in {apkfilename} from metadata: {version}') + .format(apkfilename=package['apkName'], version=versionName)) + package['versionName'] = versionName + break + if packageName in output_packages: + packagelist = output_packages[packageName] + else: + packagelist = {} + output_packages[packageName] = packagelist + app = apps[packageName] + categories_used_by_apps.update(app.get('Categories', [])) + packagelist["metadata"] = package_metadata(app, repodir) + if "signer" in package: + packagelist["metadata"]["preferredSigner"] = package["signer"] + + packagelist["versions"] = {} + + packagelist["versions"][package["hash"]] = convert_version(package, apps[packageName], repodir) + + if categories_used_by_apps and not output['repo'].get(CATEGORIES_CONFIG_NAME): + output['repo'][CATEGORIES_CONFIG_NAME] = dict() + # include definitions for "auto-defined" categories, e.g. just used in app metadata + for category in sorted(categories_used_by_apps): + if category not in output['repo'][CATEGORIES_CONFIG_NAME]: + output['repo'][CATEGORIES_CONFIG_NAME][category] = dict() + if 'name' not in output['repo'][CATEGORIES_CONFIG_NAME][category]: + output['repo'][CATEGORIES_CONFIG_NAME][category]['name'] = {DEFAULT_LOCALE: category} + # do not include defined categories if no apps use them + for category in list(output['repo'].get(CATEGORIES_CONFIG_NAME, list())): + if category not in categories_used_by_apps: + del output['repo'][CATEGORIES_CONFIG_NAME][category] + msg = _('Category "{category}" defined but not used for any apps!') + logging.warning(msg.format(category=category)) + + entry = {} + entry["timestamp"] = repodict["timestamp"] + + entry["version"] = repodict["version"] + if "maxage" in repodict: + entry["maxAge"] = repodict["maxage"] + + json_name = 'index-v2.json' + index_file = os.path.join(repodir, json_name) + with open(index_file, "w", encoding="utf-8") as fp: + if common.options.pretty: + json.dump(output, fp, default=_index_encoder_default, indent=2, ensure_ascii=False) + else: + json.dump(output, fp, default=_index_encoder_default, ensure_ascii=False) + + json_name = "tmp/{}_{}.json".format(repodir, convert_datetime(repodict["timestamp"])) + with open(json_name, "w", encoding="utf-8") as fp: + if common.options.pretty: + json.dump(output, fp, default=_index_encoder_default, indent=2, ensure_ascii=False) + else: + json.dump(output, fp, default=_index_encoder_default, ensure_ascii=False) + + entry["index"] = common.file_entry(index_file) + entry["index"]["numPackages"] = len(output.get("packages", [])) + + indexes = sorted(Path().glob("tmp/{}*.json".format(repodir)), key=lambda x: x.name) + indexes.pop() # remove current index + # remove older indexes + while len(indexes) > 10: + indexes.pop(0).unlink() + + indexes = [json.loads(Path(fn).read_text(encoding="utf-8")) for fn in indexes] + + for diff in Path().glob("{}/diff/*.json".format(repodir)): + diff.unlink() + + entry["diffs"] = {} + for old in indexes: + diff_name = str(old["repo"]["timestamp"]) + ".json" + diff_file = os.path.join(repodir, "diff", diff_name) + diff = dict_diff(old, output) + if not os.path.exists(os.path.join(repodir, "diff")): + os.makedirs(os.path.join(repodir, "diff")) + with open(diff_file, "w", encoding="utf-8") as fp: + if common.options.pretty: + json.dump(diff, fp, default=_index_encoder_default, indent=2, ensure_ascii=False) + else: + json.dump(diff, fp, default=_index_encoder_default, ensure_ascii=False) + + entry["diffs"][old["repo"]["timestamp"]] = common.file_entry(diff_file) + entry["diffs"][old["repo"]["timestamp"]]["numPackages"] = len(diff.get("packages", [])) + + json_name = "entry.json" + index_file = os.path.join(repodir, json_name) + with open(index_file, "w", encoding="utf-8") as fp: + if common.options.pretty: + json.dump(entry, fp, default=_index_encoder_default, indent=2, ensure_ascii=False) + else: + json.dump(entry, fp, default=_index_encoder_default, ensure_ascii=False) + + if common.options.nosign: + _copy_to_local_copy_dir(repodir, index_file) + logging.debug(_('index-v2 must have a signature, use `fdroid signindex` to create it!')) + else: + signindex.config = common.config + signindex.sign_index(repodir, json_name) + + +def make_v1(apps, packages, repodir, repodict, requestsdict, signer_fingerprints): + + def _index_encoder_default(obj): + if isinstance(obj, set): + return sorted(list(obj)) + if isinstance(obj, datetime): + # Java prefers milliseconds + # we also need to account for time zone/daylight saving time + return int(calendar.timegm(obj.timetuple()) * 1000) + if isinstance(obj, dict): + d = collections.OrderedDict() + for key in sorted(obj.keys()): + d[key] = obj[key] + return d + raise TypeError(repr(obj) + " is not JSON serializable") + + output = collections.OrderedDict() + output['repo'] = repodict.copy() + output['requests'] = requestsdict + + # index-v1 only supports a list of URL strings for additional mirrors + mirrors = [] + for mirror in repodict.get('mirrors', []): + url = mirror['url'] + if url != repodict['address']: + mirrors.append(mirror['url']) + if mirrors: + output['repo']['mirrors'] = mirrors + + # establish sort order of the index + sort_package_versions(packages, signer_fingerprints) + + appslist = [] + output['apps'] = appslist + for packageName, app_dict in apps.items(): + d = collections.OrderedDict() + appslist.append(d) + for k, v in sorted(app_dict.items()): + if not v: + continue + if k in ('Builds', 'metadatapath', + 'ArchivePolicy', 'AutoName', 'AutoUpdateMode', 'MaintainerNotes', + 'Provides', 'Repo', 'RepoType', 'RequiresRoot', + 'UpdateCheckData', 'UpdateCheckIgnore', 'UpdateCheckMode', + 'UpdateCheckName', 'NoSourceSince', 'VercodeOperation', + 'summary', 'description', 'promoGraphic', 'screenshots', 'whatsNew', + 'featureGraphic', 'iconv2', 'tvBanner', + ): + continue + + # name things after the App class fields in fdroidclient + if k == 'id': + k = 'packageName' + elif k == 'CurrentVersionCode': # TODO make SuggestedVersionCode the canonical name + k = 'suggestedVersionCode' + v = str(v) + elif k == 'CurrentVersion': # TODO make SuggestedVersionName the canonical name + k = 'suggestedVersionName' + else: + k = k[:1].lower() + k[1:] + d[k] = v + + # establish sort order in lists, sets, and localized dicts + for app_dict in output['apps']: + localized = app_dict.get('localized') + if localized: + lordered = collections.OrderedDict() + for lkey, lvalue in sorted(localized.items()): + lordered[lkey] = collections.OrderedDict() + for ikey, iname in sorted(lvalue.items()): + lordered[lkey][ikey] = iname + app_dict['localized'] = lordered + # v1 uses a list of keys for Anti-Features + antiFeatures = app_dict.get('antiFeatures', dict()).keys() + if antiFeatures: + app_dict['antiFeatures'] = sorted(set(antiFeatures)) + + output_packages = collections.OrderedDict() + output['packages'] = output_packages + for package in packages: + packageName = package['packageName'] + if packageName not in apps: + logging.info(_('Ignoring package without metadata: ') + package['apkName']) + continue + if not package.get('versionName'): + app = apps[packageName] + for build in app.get('Builds', []): + if build['versionCode'] == package['versionCode']: + versionName = build.get('versionName') + logging.info(_('Overriding blank versionName in {apkfilename} from metadata: {version}') + .format(apkfilename=package['apkName'], version=versionName)) + package['versionName'] = versionName + break + if packageName in output_packages: + packagelist = output_packages[packageName] + else: + packagelist = [] + output_packages[packageName] = packagelist + d = collections.OrderedDict() + packagelist.append(d) + for k, v in sorted(package.items()): + if not v: + continue + if k in ('icon', 'icons', 'icons_src', 'ipfsCIDv1', 'name', 'srcnameSha256'): + continue + if k == 'antiFeatures': + d[k] = sorted(v.keys()) + continue + d[k] = v + + json_name = 'index-v1.json' + index_file = os.path.join(repodir, json_name) + with open(index_file, 'w') as fp: + if common.options.pretty: + json.dump(output, fp, default=_index_encoder_default, indent=2) + else: + json.dump(output, fp, default=_index_encoder_default) + + if common.options.nosign: + _copy_to_local_copy_dir(repodir, index_file) + logging.debug(_('index-v1 must have a signature, use `fdroid signindex` to create it!')) + else: + signindex.config = common.config + signindex.sign_index(repodir, json_name) + + +def _copy_to_local_copy_dir(repodir, f): + local_copy_dir = common.config.get('local_copy_dir', '') + if os.path.exists(local_copy_dir): + destdir = os.path.join(local_copy_dir, repodir) + if not os.path.exists(destdir): + os.mkdir(destdir) + shutil.copy2(f, destdir, follow_symlinks=False) + elif local_copy_dir: + raise FDroidException(_('"local_copy_dir" {path} does not exist!') + .format(path=local_copy_dir)) + + +def sort_package_versions(packages, signer_fingerprints): + """Sort to ensure a deterministic order for package versions in the index file. + + This sort-order also expresses + installation preference to the clients. + (First in this list = first to install) + + Parameters + ---------- + packages + list of packages which need to be sorted before but into index file. + """ + GROUP_DEV_SIGNED = 1 + GROUP_FDROID_SIGNED = 2 + GROUP_OTHER_SIGNED = 3 + + def v1_sort_keys(package): + packageName = package.get('packageName', None) + + signer = package.get('signer', None) + + dev_signer = common.metadata_find_developer_signature(packageName) + group = GROUP_OTHER_SIGNED + if dev_signer and dev_signer == signer: + group = GROUP_DEV_SIGNED + else: + fdroid_signer = signer_fingerprints.get(packageName, {}).get('signer') + if fdroid_signer and fdroid_signer == signer: + group = GROUP_FDROID_SIGNED + + versionCode = None + if package.get('versionCode', None): + versionCode = -package['versionCode'] + + return packageName, group, signer, versionCode + + packages.sort(key=v1_sort_keys) + + +def make_v0(apps, apks, repodir, repodict, requestsdict, signer_fingerprints): + """Aka index.jar aka index.xml.""" + doc = Document() + + def addElement(name, value, doc, parent): + el = doc.createElement(name) + el.appendChild(doc.createTextNode(value)) + parent.appendChild(el) + + def addElementNonEmpty(name, value, doc, parent): + if not value: + return + addElement(name, value, doc, parent) + + def addElementIfInApk(name, apk, key, doc, parent): + if key not in apk: + return + value = str(apk[key]) + addElement(name, value, doc, parent) + + def addElementCheckLocalized(name, app, key, doc, parent, default=''): + """Fill in field from metadata or localized block. + + For name/summary/description, they can come only from the app source, + or from a dir in fdroiddata. They can be entirely missing from the + metadata file if there is localized versions. This will fetch those + from the localized version if its not available in the metadata file. + + Attributes should be alpha-sorted, so they must be added in + alpha- sort order. + + """ + el = doc.createElement(name) + value = app.get(key) + lkey = key[:1].lower() + key[1:] + localized = app.get('localized') + if not value and localized: + for lang in [DEFAULT_LOCALE] + [x for x in localized.keys()]: + if not lang.startswith('en'): + continue + if lang in localized: + value = localized[lang].get(lkey) + if value: + break + if not value and localized and len(localized) > 1: + lang = list(localized.keys())[0] + value = localized[lang].get(lkey) + if not value: + value = default + if not value and name == 'name' and app.get('AutoName'): + value = app['AutoName'] + el.appendChild(doc.createTextNode(value)) + parent.appendChild(el) + + root = doc.createElement("fdroid") + doc.appendChild(root) + + repoel = doc.createElement("repo") + repoel.setAttribute("icon", repodict['icon']) + if 'maxage' in repodict: + repoel.setAttribute("maxage", str(repodict['maxage'])) + repoel.setAttribute("name", repodict['name']) + pubkey, repo_pubkey_fingerprint = extract_pubkey() + repoel.setAttribute("pubkey", pubkey.decode('utf-8')) + repoel.setAttribute("timestamp", '%d' % repodict['timestamp'].timestamp()) + repoel.setAttribute("url", repodict['address']) + repoel.setAttribute("version", str(repodict['version'])) + + addElement('description', repodict['description'], doc, repoel) + # index v0 only supports a list of URL strings for additional mirrors + for mirror in repodict.get('mirrors', []): + url = mirror['url'] + if url != repodict['address']: + addElement('mirror', url, doc, repoel) + + root.appendChild(repoel) + + for command in ('install', 'uninstall'): + for packageName in requestsdict[command]: + element = doc.createElement(command) + root.appendChild(element) + element.setAttribute('packageName', packageName) + + for appid, app_dict in apps.items(): + app = metadata.App(app_dict) + + if app.get('Disabled') is not None: + continue + + # Get a list of the apks for this app... + apklist = [] + name_from_apk = None + apksbyversion = collections.defaultdict(lambda: []) + for apk in apks: + if apk.get('versionCode') and apk.get('packageName') == appid: + apksbyversion[apk['versionCode']].append(apk) + if name_from_apk is None: + name_from_apk = apk.get('name') + for versionCode, apksforver in apksbyversion.items(): + fdroid_signer = signer_fingerprints.get(appid, {}).get('signer') + fdroid_signed_apk = None + name_match_apk = None + for x in apksforver: + if fdroid_signer and x.get('signer', None) == fdroid_signer: + fdroid_signed_apk = x + if common.apk_release_filename.match(x.get('apkName', '')): + name_match_apk = x + # choose which of the available versions is most + # suiteable for index v0 + if fdroid_signed_apk: + apklist.append(fdroid_signed_apk) + elif name_match_apk: + apklist.append(name_match_apk) + else: + apklist.append(apksforver[0]) + + if len(apklist) == 0: + continue + + apel = doc.createElement("application") + apel.setAttribute("id", app.id) + root.appendChild(apel) + + addElement('id', app.id, doc, apel) + if app.added: + addElement('added', app.added.strftime('%Y-%m-%d'), doc, apel) + if app.lastUpdated: + addElement('lastupdated', app.lastUpdated.strftime('%Y-%m-%d'), doc, apel) + + addElementCheckLocalized('name', app, 'Name', doc, apel, name_from_apk) + addElementCheckLocalized('summary', app, 'Summary', doc, apel) + + if app.icon: + addElement('icon', app.icon, doc, apel) + + addElementCheckLocalized('desc', app, 'Description', doc, apel, + 'No description available') + + addElement('license', app.License, doc, apel) + if app.Categories: + addElement('categories', ','.join(app.Categories), doc, apel) + # We put the first (primary) category in LAST, which will have + # the desired effect of making clients that only understand one + # category see that one. + addElement('category', app.Categories[0], doc, apel) + addElement('web', app.WebSite, doc, apel) + addElement('source', app.SourceCode, doc, apel) + addElement('tracker', app.IssueTracker, doc, apel) + addElementNonEmpty('changelog', app.Changelog, doc, apel) + addElementNonEmpty('author', app.AuthorName, doc, apel) + addElementNonEmpty('email', app.AuthorEmail, doc, apel) + addElementNonEmpty('donate', app.Donate, doc, apel) + addElementNonEmpty('bitcoin', app.Bitcoin, doc, apel) + addElementNonEmpty('litecoin', app.Litecoin, doc, apel) + addElementNonEmpty('openCollective', app.OpenCollective, doc, apel) + + # These elements actually refer to the current version (i.e. which + # one is recommended. They are historically mis-named, and need + # changing, but stay like this for now to support existing clients. + addElement('marketversion', app.CurrentVersion, doc, apel) + addElement('marketvercode', str(app.CurrentVersionCode), doc, apel) + + if app.Provides: + pv = app.Provides.split(',') + addElementNonEmpty('provides', ','.join(pv), doc, apel) + if app.RequiresRoot: + addElement('requirements', 'root', doc, apel) + + # Sort the APK list into version order, just so the web site + # doesn't have to do any work by default... + apklist = sorted(apklist, key=lambda apk: apk['versionCode'], reverse=True) + + antiFeatures = list(app.AntiFeatures) + if 'antiFeatures' in apklist[0]: + antiFeatures.extend(apklist[0]['antiFeatures']) + if antiFeatures: + afout = sorted(set(antiFeatures)) + addElementNonEmpty('antifeatures', ','.join(afout), doc, apel) + + # Check for duplicates - they will make the client unhappy... + for i in range(len(apklist) - 1): + first = apklist[i] + second = apklist[i + 1] + if first['versionCode'] == second['versionCode'] \ + and first['sig'] == second['sig']: + if first['hash'] == second['hash']: + raise FDroidException('"{0}/{1}" and "{0}/{2}" are exact duplicates!'.format( + repodir, first['apkName'], second['apkName'])) + else: + raise FDroidException('duplicates: "{0}/{1}" - "{0}/{2}"'.format( + repodir, first['apkName'], second['apkName'])) + + current_version_code = 0 + current_version_file = None + for apk in apklist: + file_extension = common.get_file_extension(apk['apkName']) + # find the APK for the "Current Version" + if current_version_code < app.CurrentVersionCode: + current_version_file = apk['apkName'] + if current_version_code < apk['versionCode']: + current_version_code = apk['versionCode'] + + apkel = doc.createElement("package") + apel.appendChild(apkel) + + versionName = apk.get('versionName') + if not versionName: + for build in app.get('Builds', []): + if ( + build['versionCode'] == apk['versionCode'] + and 'versionName' in build + ): + versionName = build['versionName'] + break + if versionName: + addElement('version', versionName, doc, apkel) + + addElement('versioncode', str(apk['versionCode']), doc, apkel) + addElement('apkname', apk['apkName'], doc, apkel) + addElementIfInApk('srcname', apk, 'srcname', doc, apkel) + + hashel = doc.createElement("hash") + hashel.setAttribute('type', 'sha256') + hashel.appendChild(doc.createTextNode(apk['hash'])) + apkel.appendChild(hashel) + + addElement('size', str(apk['size']), doc, apkel) + addElementIfInApk('sdkver', apk, + 'minSdkVersion', doc, apkel) + addElementIfInApk('targetSdkVersion', apk, + 'targetSdkVersion', doc, apkel) + addElementIfInApk('maxsdkver', apk, + 'maxSdkVersion', doc, apkel) + addElementIfInApk('obbMainFile', apk, + 'obbMainFile', doc, apkel) + addElementIfInApk('obbMainFileSha256', apk, + 'obbMainFileSha256', doc, apkel) + addElementIfInApk('obbPatchFile', apk, + 'obbPatchFile', doc, apkel) + addElementIfInApk('obbPatchFileSha256', apk, + 'obbPatchFileSha256', doc, apkel) + if 'added' in apk: + addElement('added', apk['added'].strftime('%Y-%m-%d'), doc, apkel) + + if file_extension == 'apk': # sig is required for APKs, but only APKs + addElement('sig', apk['sig'], doc, apkel) + + old_permissions = set() + sorted_permissions = sorted(apk['uses-permission']) + for perm in sorted_permissions: + perm_name = perm[0] + if perm_name.startswith("android.permission."): + perm_name = perm_name[19:] + old_permissions.add(perm_name) + addElementNonEmpty('permissions', ','.join(sorted(old_permissions)), doc, apkel) + + for permission in sorted_permissions: + permel = doc.createElement('uses-permission') + if permission[1] is not None: + permel.setAttribute('maxSdkVersion', '%d' % permission[1]) + apkel.appendChild(permel) + permel.setAttribute('name', permission[0]) + for permission_sdk_23 in sorted(apk['uses-permission-sdk-23']): + permel = doc.createElement('uses-permission-sdk-23') + if permission_sdk_23[1] is not None: + permel.setAttribute('maxSdkVersion', '%d' % permission_sdk_23[1]) + apkel.appendChild(permel) + permel.setAttribute('name', permission_sdk_23[0]) + if 'nativecode' in apk: + addElement('nativecode', ','.join(sorted(apk['nativecode'])), doc, apkel) + addElementNonEmpty('features', ','.join(sorted(apk['features'])), doc, apkel) + + if current_version_file is not None \ + and common.config['make_current_version_link'] \ + and repodir == 'repo': # only create these + namefield = common.config['current_version_name_source'] + name = app.get(namefield) + if not name and namefield == 'Name': + name = app.get('localized', {}).get(DEFAULT_LOCALE, {}).get('name') + if not name: + name = app.id + sanitized_name = re.sub(b'''[ '"&%?+=/]''', b'', str(name).encode('utf-8')) + apklinkname = sanitized_name + os.path.splitext(current_version_file)[1].encode('utf-8') + current_version_path = os.path.join(repodir, current_version_file).encode('utf-8', 'surrogateescape') + if os.path.islink(apklinkname): + os.remove(apklinkname) + os.symlink(current_version_path, apklinkname) + # also symlink gpg signature, if it exists + for extension in (b'.asc', b'.sig'): + sigfile_path = current_version_path + extension + if os.path.exists(sigfile_path): + siglinkname = apklinkname + extension + if os.path.islink(siglinkname): + os.remove(siglinkname) + os.symlink(sigfile_path, siglinkname) + + if sys.version_info.minor >= 13: + # Python 3.13 changed minidom so it no longer converts " to an XML entity. + # https://github.com/python/cpython/commit/154477be722ae5c4e18d22d0860e284006b09c4f + # This just puts back the previous implementation, with black code format. + import inspect + import xml.dom.minidom + + def _write_data(writer, text, attr): # pylint: disable=unused-argument + if text: + text = ( + text.replace('&', '&') + .replace('<', '<') + .replace('"', '"') + .replace('>', '>') + ) + writer.write(text) + + argnames = tuple(inspect.signature(xml.dom.minidom._write_data).parameters) + if argnames == ('writer', 'text', 'attr'): + xml.dom.minidom._write_data = _write_data + else: + logging.warning('Failed to monkey patch minidom for index.xml support!') + + if common.options.pretty: + output = doc.toprettyxml(encoding='utf-8') + else: + output = doc.toxml(encoding='utf-8') + + with open(os.path.join(repodir, 'index.xml'), 'wb') as f: + f.write(output) + + if 'repo_keyalias' in common.config \ + or (common.options.nosign and 'repo_pubkey' in common.config): + + if common.options.nosign: + logging.info(_("Creating unsigned index in preparation for signing")) + else: + logging.info(_("Creating signed index with this key (SHA256):")) + logging.info("%s" % repo_pubkey_fingerprint) + + # Create a jar of the index... + jar_output = 'index_unsigned.jar' if common.options.nosign else 'index.jar' + p = FDroidPopen(['jar', 'cf', jar_output, 'index.xml'], cwd=repodir) + if p.returncode != 0: + raise FDroidException("Failed to create {0}".format(jar_output)) + + # Sign the index... + signed = os.path.join(repodir, 'index.jar') + if common.options.nosign: + _copy_to_local_copy_dir(repodir, os.path.join(repodir, jar_output)) + # Remove old signed index if not signing + if os.path.exists(signed): + os.remove(signed) + else: + signindex.config = common.config + signindex.sign_jar(signed, use_old_algs=True) + + # Copy the repo icon into the repo directory... + icon_dir = os.path.join(repodir, 'icons') + repo_icon = common.config.get('repo_icon', common.default_config['repo_icon']) + iconfilename = os.path.join(icon_dir, os.path.basename(repo_icon)) + if os.path.exists(repo_icon): + shutil.copyfile(common.config['repo_icon'], iconfilename) + else: + logging.warning(_('repo_icon "repo/icons/%s" does not exist, generating placeholder.') + % repo_icon) + os.makedirs(os.path.dirname(iconfilename), exist_ok=True) + try: + import qrcode + + qrcode.make(common.config['repo_url']).save(iconfilename) + except ModuleNotFoundError as e: + raise ModuleNotFoundError( + _( + 'The "qrcode" Python package is not installed (e.g. apt-get install python3-qrcode)!' + ) + ) from e + except Exception: + exampleicon = os.path.join(common.get_examples_dir(), + common.default_config['repo_icon']) + shutil.copy(exampleicon, iconfilename) + + +def extract_pubkey(): + """Extract and return the repository's public key from the keystore. + + Returns + ------- + public key in hex + repository fingerprint + """ + if 'repo_pubkey' in common.config: + pubkey = unhexlify(common.config['repo_pubkey']) + elif 'keystorepass' in common.config: + env_vars = {'LC_ALL': 'C.UTF-8', + 'FDROID_KEY_STORE_PASS': common.config['keystorepass']} + p = FDroidPopenBytes([common.config['keytool'], '-exportcert', + '-alias', common.config['repo_keyalias'], + '-keystore', common.config['keystore'], + '-storepass:env', 'FDROID_KEY_STORE_PASS'] + + list(common.config['smartcardoptions']), + envs=env_vars, output=False, stderr_to_stdout=False) + if p.returncode != 0 or len(p.output) < 20: + msg = "Failed to get repo pubkey!" + if common.config['keystore'] == 'NONE': + msg += ' Is your crypto smartcard plugged in?' + raise FDroidException(msg) + pubkey = p.output + else: + raise FDroidException(_('Neither "repo_pubkey" nor "keystorepass" set in config.yml')) + + repo_pubkey_fingerprint = common.get_cert_fingerprint(pubkey) + return hexlify(pubkey), repo_pubkey_fingerprint + + +def add_mirrors_to_repodict(repo_section, repodict): + """Convert config into final dict of mirror metadata for the repo. + + Internally and in index-v2, mirrors is a list of dicts, but it can + be specified in the config as a string or list of strings. Also, + index v0 and v1 use a list of URL strings as the data structure. + + The first entry is traditionally the primary mirror and canonical + URL. 'mirrors' should not be present in the index if there is + only the canonical URL, and no other mirrors. + + The metadata items for each mirror entry are sorted by key to + ensure minimum diffs in the index files. + + """ + mirrors_config = common.config.get('mirrors', []) + if type(mirrors_config) not in (list, tuple): + mirrors_config = [mirrors_config] + + mirrors_yml = Path(f'config/{MIRRORS_CONFIG_NAME}.yml') + if mirrors_yml.exists(): + if mirrors_config: + raise FDroidException( + _('mirrors set twice, in config.yml and {path}!').format( + path=mirrors_yml + ) + ) + with mirrors_yml.open() as fp: + mirrors_config = yaml.load(fp) + if not isinstance(mirrors_config, list): + msg = _('{path} is not list, but a {datatype}!') + raise TypeError( + msg.format(path=mirrors_yml, datatype=type(mirrors_config).__name__) + ) + + if type(mirrors_config) not in (list, tuple, set): + msg = 'In config.yml, mirrors: is not list, but a {datatype}!' + raise TypeError(msg.format(datatype=type(mirrors_config).__name__)) + + mirrorcheckfailed = False + mirrors = [] + urls = set() + for mirror in mirrors_config: + if isinstance(mirror, str): + mirror = {'url': mirror} + elif not isinstance(mirror, dict): + logging.error( + _('Bad entry type "{mirrortype}" in mirrors config: {mirror}').format( + mirrortype=type(mirror), mirror=mirror + ) + ) + mirrorcheckfailed = True + continue + config_url = mirror['url'] + base = os.path.basename(urllib.parse.urlparse(config_url).path.rstrip('/')) + if common.config.get('nonstandardwebroot') is not True and base != 'fdroid': + logging.error(_("mirror '%s' does not end with 'fdroid'!") % config_url) + mirrorcheckfailed = True + # must end with / or urljoin strips a whole path segment + if config_url.endswith('/'): + mirror['url'] = urllib.parse.urljoin(config_url, repo_section) + else: + mirror['url'] = urllib.parse.urljoin(config_url + '/', repo_section) + mirrors.append(mirror) + if mirror['url'] in urls: + mirrorcheckfailed = True + logging.error( + _('Duplicate entry "%s" in mirrors config!') % mirror['url'] + ) + urls.add(mirror['url']) + for mirror in common.config.get('servergitmirrors', []): + for url in get_mirror_service_urls(mirror): + mirrors.append({'url': url + '/' + repo_section}) + if mirrorcheckfailed: + raise FDroidException(_("Malformed repository mirrors.")) + + if not mirrors: + return + + repodict['mirrors'] = [] + canonical_url = repodict['address'] + found_primary = False + errors = 0 + for mirror in mirrors: + if canonical_url == mirror['url']: + found_primary = True + mirror['isPrimary'] = True + sortedmirror = dict() + for k in sorted(mirror.keys()): + sortedmirror[k] = mirror[k] + repodict['mirrors'].insert(0, sortedmirror) + elif mirror.get('isPrimary'): + errors += 1 + logging.error( + _('Mirror config for {url} contains "isPrimary" key!').format( + url=mirror['url'] + ) + ) + else: + repodict['mirrors'].append(mirror) + + if errors: + raise FDroidException(_('"isPrimary" key should not be added to mirrors!')) + + if repodict['mirrors'] and not found_primary: + repodict['mirrors'].insert(0, {'isPrimary': True, 'url': repodict['address']}) + + +def get_mirror_service_urls(mirror): + """Get direct URLs from git service for use by fdroidclient. + + Via 'servergitmirrors', fdroidserver can create and push a mirror + to certain well known git services like GitLab or GitHub. This + will always use the 'master' branch since that is the default + branch in git. The files are then accessible via alternate URLs, + where they are served in their raw format via a CDN rather than + from git. + + Both of the GitLab URLs will work with F-Droid, but only the + GitLab Pages will work in the browser This is because the "raw" + URLs are not served with the correct mime types, so any index.html + which is put in the repo will not be rendered. Putting an + index.html file in the repo root is a common way for to make + information about the repo available to end user. + + """ + url = mirror['url'] + if url.startswith('git@'): + url = re.sub(r'^git@([^:]+):(.+)', r'https://\1/\2', url) + + segments = url.split("/") + + if segments[4].endswith('.git'): + segments[4] = segments[4][:-4] + + hostname = segments[2] + user = segments[3] + repo = segments[4] + branch = "master" + folder = "fdroid" + + urls = [] + if hostname == "github.com": + # Github-like RAW segments "https://raw.githubusercontent.com/user/repo/branch/folder" + segments[2] = "raw.githubusercontent.com" + segments.extend([branch, folder]) + urls.append('/'.join(segments)) + elif hostname == "gitlab.com": + git_mirror_path = os.path.join('git-mirror', folder) + if ( + mirror.get('index_only') + or common.get_dir_size(git_mirror_path) <= common.GITLAB_COM_PAGES_MAX_SIZE + ): + # Gitlab-like Pages segments "https://user.gitlab.io/repo/folder" + gitlab_pages = ["https:", "", user + ".gitlab.io", repo, folder] + urls.append('/'.join(gitlab_pages)) + else: + logging.warning( + _( + 'Skipping GitLab Pages mirror because the repo is too large (>%.2fGB)!' + ) + % (common.GITLAB_COM_PAGES_MAX_SIZE / 1000000000) + ) + # GitLab Raw "https://gitlab.com/user/repo/-/raw/branch/folder" + gitlab_raw = segments + ['-', 'raw', branch, folder] + urls.append('/'.join(gitlab_raw)) + # GitLab Artifacts "https://user.gitlab.io/-/repo/-/jobs/job_id/artifacts/public/folder" + job_id = os.getenv('CI_JOB_ID') + try: + int(job_id) + gitlab_artifacts = [ + "https:", + "", + user + ".gitlab.io", + '-', + repo, + '-', + 'jobs', + job_id, + 'artifacts', + 'public', + folder, + ] + urls.append('/'.join(gitlab_artifacts)) + except (TypeError, ValueError): + pass # no Job ID to use, ignore + + return urls + + +def download_repo_index(url_str, etag=None, verify_fingerprint=True, timeout=600): + """Download and verifies index v1 file, then returns its data. + + Use the versioned functions to be sure you are getting the + expected data format. + + """ + return download_repo_index_v1(url_str, etag, verify_fingerprint, timeout) + + +def download_repo_index_v1(url_str, etag=None, verify_fingerprint=True, timeout=600): + """Download and verifies index v1 file, then returns its data. + + Downloads the repository index from the given :param url_str and + verifies the repository's fingerprint if :param verify_fingerprint + is not False. + + Raises + ------ + VerificationException() if the repository could not be verified + + Returns + ------- + A tuple consisting of: + - The index in JSON v1 format or None if the index did not change + - The new eTag as returned by the HTTP request + + """ + from . import net + + url = urllib.parse.urlsplit(url_str) + + fingerprint = None + if verify_fingerprint: + query = urllib.parse.parse_qs(url.query) + if 'fingerprint' not in query: + raise VerificationException(_("No fingerprint in URL.")) + fingerprint = query['fingerprint'][0] + + if url.path.endswith('/index-v1.jar'): + path = url.path[:-13].rstrip('/') + else: + path = url.path.rstrip('/') + + url = urllib.parse.SplitResult(url.scheme, url.netloc, path + '/index-v1.jar', '', '') + download, new_etag = net.http_get(url.geturl(), etag, timeout) + + if download is None: + return None, new_etag + + with tempfile.NamedTemporaryFile() as fp: + fp.write(download) + fp.flush() + index, public_key, public_key_fingerprint = get_index_from_jar( + fp.name, fingerprint, allow_deprecated=True + ) + index["repo"]["pubkey"] = hexlify(public_key).decode() + index["repo"]["fingerprint"] = public_key_fingerprint + index["apps"] = [metadata.App(app) for app in index["apps"]] + return index, new_etag + + +def download_repo_index_v2(url_str, etag=None, verify_fingerprint=True, timeout=None): + """Download and verifies index v2 file, then returns its data. + + Downloads the repository index from the given :param url_str and + verifies the repository's fingerprint if :param verify_fingerprint + is not False. In order to verify the data, the fingerprint must + be provided as part of the URL. + + Raises + ------ + VerificationException() if the repository could not be verified + + Returns + ------- + A tuple consisting of: + - The index in JSON v2 format or None if the index did not change + - The new eTag as returned by the HTTP request + + """ + from . import net + + etag # etag is unused but needs to be there to keep the same API as the earlier functions. + + url = urllib.parse.urlsplit(url_str) + + if timeout is not None: + logging.warning('"timeout" argument of download_repo_index_v2() is deprecated!') + + fingerprint = None + if verify_fingerprint: + query = urllib.parse.parse_qs(url.query) + if 'fingerprint' not in query: + raise VerificationException(_("No fingerprint in URL.")) + fingerprint = query['fingerprint'][0] + + if url.path.endswith('/entry.jar') or url.path.endswith('/index-v2.json'): + path = url.path.rsplit('/', 1)[0] + else: + path = url.path.rstrip('/') + url = urllib.parse.SplitResult(url.scheme, url.netloc, path, '', '') + + mirrors = common.get_mirrors(url, 'entry.jar') + f = net.download_using_mirrors(mirrors) + entry, public_key, fingerprint = get_index_from_jar(f, fingerprint) + + sha256 = entry['index']['sha256'] + mirrors = common.get_mirrors(url, entry['index']['name'][1:]) + f = net.download_using_mirrors(mirrors) + with open(f, 'rb') as fp: + index = fp.read() + if sha256 != hashlib.sha256(index).hexdigest(): + raise VerificationException( + _("SHA-256 of {url} does not match entry!").format(url=url) + ) + return json.loads(index), None + + +def get_index_from_jar(jarfile, fingerprint=None, allow_deprecated=False): + """Return the data, public key and fingerprint from an index JAR with one JSON file. + + The F-Droid index files always contain a single data file and a + JAR Signature. Since index-v1, the data file is always JSON. + That single data file is named the same as the JAR file. + + Parameters + ---------- + fingerprint is the SHA-256 fingerprint of signing key. Only + hex digits count, all other chars will can be discarded. + + Raises + ------ + VerificationException() if the repository could not be verified + + """ + logging.debug(_('Verifying index signature:')) + + if allow_deprecated: + common.verify_deprecated_jar_signature(jarfile) + else: + common.verify_jar_signature(jarfile) + + with zipfile.ZipFile(jarfile) as jar: + public_key, public_key_fingerprint = get_public_key_from_jar(jar) + if fingerprint is not None: + fingerprint = re.sub(r'[^0-9A-F]', r'', fingerprint.upper()) + if fingerprint != public_key_fingerprint: + raise VerificationException( + _("The repository's fingerprint does not match.") + ) + for f in jar.namelist(): + if not f.startswith('META-INF/'): + jsonfile = f + break + data = json.loads(jar.read(jsonfile)) + return data, public_key, public_key_fingerprint + + +def get_public_key_from_jar(jar): + """Get the public key and its fingerprint from a JAR file. + + Raises + ------ + VerificationException() if the JAR was not signed exactly once + + Parameters + ---------- + jar + a zipfile.ZipFile object + + Returns + ------- + the public key from the jar and its fingerprint + """ + # extract certificate from jar + certs = [n for n in jar.namelist() if common.SIGNATURE_BLOCK_FILE_REGEX.match(n)] + if len(certs) < 1: + raise VerificationException(_("Found no signing certificates for repository.")) + if len(certs) > 1: + raise VerificationException(_("Found multiple signing certificates for repository.")) + + # extract public key from certificate + public_key = common.get_certificate(jar.read(certs[0])) + public_key_fingerprint = common.get_cert_fingerprint(public_key).replace(' ', '') + + return public_key, public_key_fingerprint + + +def make_altstore(apps, apks, config, repodir, pretty=False): + """Assemble altstore-index.json for iOS (.ipa) apps. + + builds index files based on: + https://faq.altstore.io/distribute-your-apps/make-a-source + https://faq.altstore.io/distribute-your-apps/updating-apps + """ + if not any(Path(repodir).glob('*.ipa')): + # no IPA files present in repo, nothing to do here, exiting early + return + + indent = 2 if pretty else None + # for now alt-store support is english only + for lang in ['en']: + + # prepare minimal altstore index + idx = { + 'name': config['repo_name'], + "apps": [], + "news": [], + } + + # add optional values if available + # idx["subtitle"] F-Droid doesn't have a corresponding value + if config.get("repo_description"): + idx['description'] = config['repo_description'] + if (Path(repodir) / 'icons' / config['repo_icon']).exists(): + idx['iconURL'] = f"{config['repo_url']}/icons/{config['repo_icon']}" + # idx["headerURL"] F-Droid doesn't have a corresponding value + # idx["website"] F-Droid doesn't have a corresponding value + # idx["patreonURL"] F-Droid doesn't have a corresponding value + # idx["tintColor"] F-Droid doesn't have a corresponding value + # idx["featuredApps"] = [] maybe mappable to F-Droids what's new? + + # assemble "apps" + for packageName, app in apps.items(): + app_name = app.get("Name") or app.get("AutoName") + icon_url = "{}{}".format( + config['repo_url'], + app.get('iconv2', {}).get(DEFAULT_LOCALE, {}).get('name', ''), + ) + screenshot_urls = [ + "{}{}".format(config["repo_url"], s["name"]) + for s in app.get("screenshots", {}) + .get("phone", {}) + .get(DEFAULT_LOCALE, {}) + ] + + a = { + "name": app_name, + 'bundleIdentifier': packageName, + 'developerName': app.get("AuthorName") or f"{app_name} team", + 'iconURL': icon_url, + "localizedDescription": "", + 'appPermissions': { + "entitlements": set(), + "privacy": {}, + }, + 'versions': [], + } + + if app.get('summary'): + a['subtitle'] = app['summary'] + # a["tintColor"] F-Droid doesn't have a corresponding value + # a["category"] F-Droid doesn't have a corresponding value + # a['patreon'] F-Droid doesn't have a corresponding value + a["screenshots"] = screenshot_urls + + # populate 'versions' + for apk in apks: + last4 = apk.get('apkName', '').lower()[-4:] + if apk['packageName'] == packageName and last4 == '.ipa': + v = { + "version": apk["versionName"], + "date": apk["added"].isoformat(), + "downloadURL": f"{config['repo_url']}/{apk['apkName']}", + "size": apk['size'], + } + + # v['localizedDescription'] maybe what's new text? + v["minOSVersion"] = apk["ipa_MinimumOSVersion"] + v["maxOSVersion"] = apk["ipa_DTPlatformVersion"] + + # writing this spot here has the effect that always the + # permissions of the latest processed permissions list used + a['appPermissions']['privacy'] = apk['ipa_permissions'] + a['appPermissions']['entitlements'] = list(apk['ipa_entitlements']) + + a['versions'].append(v) + + if len(a['versions']) > 0: + idx['apps'].append(a) + + with open(Path(repodir) / 'altstore-index.json', "w", encoding="utf-8") as f: + json.dump(idx, f, indent=indent) diff --git a/fdroidserver/init.py b/fdroidserver/init.py index 26bc897d..39b18c1a 100644 --- a/fdroidserver/init.py +++ b/fdroidserver/init.py @@ -1,9 +1,8 @@ -#!/usr/bin/env python2 -# -*- coding: utf-8 -*- +#!/usr/bin/env python3 # -# update.py - part of the FDroid server tools +# init.py - part of the FDroid server tools # Copyright (C) 2010-2013, Ciaran Gultnieks, ciaran@ciarang.com -# Copyright (C) 2013 Daniel Martí +# Copyright (C) 2013-2014 Daniel Martí # Copyright (C) 2013 Hans-Christoph Steiner # # This program is free software: you can redistribute it and/or modify @@ -19,206 +18,281 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import hashlib +import glob +import logging import os import re import shutil import socket -import subprocess import sys -from optparse import OptionParser - -import common -from common import FDroidPopen, BuildException +from argparse import ArgumentParser +from . import _, common +from .exception import FDroidException config = {} -options = None -def write_to_config(key, value): - '''write a key/value to the local config.py''' - with open('config.py', 'r') as f: - data = f.read() - pattern = key + '\s*=.*' - repl = key + ' = "' + value + '"' + +def disable_in_config(key, value): + """Write a key/value to the local config.yml, then comment it out.""" + import yaml + + with open(common.CONFIG_FILE) as fp: + data = fp.read() + pattern = r'\n[\s#]*' + key + r':.*' + repl = '\n#' + yaml.dump({key: value}, default_flow_style=False) data = re.sub(pattern, repl, data) - with open('config.py', 'w') as f: - f.writelines(data) - - -def genpassword(): - '''generate a random password for when generating keys''' - h = hashlib.sha256() - h.update(os.urandom(16)) # salt - h.update(bytes(socket.getfqdn())) - return h.digest().encode('base64').strip() - - -def genkey(keystore, repo_keyalias, password, keydname): - '''generate a new keystore with a new key in it for signing repos''' - print('Generating a new key in "' + keystore + '"...') - p = FDroidPopen(['keytool', '-genkey', - '-keystore', keystore, '-alias', repo_keyalias, - '-keyalg', 'RSA', '-keysize', '4096', - '-sigalg', 'SHA256withRSA', - '-validity', '10000', - '-storepass', password, '-keypass', password, - '-dname', keydname]) - if p.returncode != 0: - raise BuildException("Failed to generate key", p.stdout, p.stderr) - # now show the lovely key that was just generated - p = subprocess.Popen(['keytool', '-list', '-v', - '-keystore', keystore, '-alias', repo_keyalias], - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - output = p.communicate(password)[0] - print(output.lstrip().strip() + '\n\n') + with open(common.CONFIG_FILE, 'w') as fp: + fp.writelines(data) def main(): - - global options, config + global config # Parse command line... - parser = OptionParser() - parser.add_option("-v", "--verbose", action="store_true", default=False, - help="Spew out even more information than normal") - parser.add_option("-d", "--distinguished-name", default=None, - help="X.509 'Distiguished Name' used when generating keys") - parser.add_option("--keystore", default=None, - help="Path to the keystore for the repo signing key") - parser.add_option("--repo-keyalias", default=None, - help="Alias of the repo signing key in the keystore") - (options, args) = parser.parse_args() + parser = ArgumentParser() + common.setup_global_opts(parser) + parser.add_argument( + "-d", + "--distinguished-name", + default=None, + help=_("X.509 'Distinguished Name' used when generating keys"), + ) + parser.add_argument( + "--keystore", + default=None, + help=_("Path to the keystore for the repo signing key"), + ) + parser.add_argument( + "--repo-keyalias", + default=None, + help=_("Alias of the repo signing key in the keystore"), + ) + parser.add_argument( + "--android-home", + default=None, + help=_("Path to the Android SDK (sometimes set in ANDROID_HOME)"), + ) + parser.add_argument( + "--no-prompt", + action="store_true", + default=False, + help=_("Do not prompt for Android SDK path, just fail"), + ) + options = common.parse_args(parser) - # find root install prefix - tmp = os.path.dirname(sys.argv[0]) - if os.path.basename(tmp) == 'bin': - prefix = os.path.dirname(tmp) - examplesdir = prefix + '/share/doc/fdroidserver/examples' - else: - # we're running straight out of the git repo - prefix = tmp - examplesdir = prefix + common.set_console_logging(options.verbose, options.color) fdroiddir = os.getcwd() + test_config = dict() + examplesdir = common.get_examples_dir() + common.fill_config_defaults(test_config) - if not os.path.exists('config.py') and not os.path.exists('repo'): - # 'metadata' and 'tmp' are created in fdroid - os.mkdir('repo') - shutil.copy(os.path.join(examplesdir, 'fdroid-icon.png'), fdroiddir) - shutil.copyfile(os.path.join(examplesdir, 'config.sample.py'), 'config.py') - os.chmod('config.py', 0o0600) - else: - print('Looks like this is already an F-Droid repo, cowardly refusing to overwrite it...') - sys.exit() - - # now that we have a local config.py, read configuration... - config = common.read_config(options) - - # track down where the Android SDK is - if os.path.isdir(config['sdk_path']): - print('Using "' + config['sdk_path'] + '" for the Android SDK') - sdk_path = config['sdk_path'] - elif 'ANDROID_HOME' in os.environ.keys(): - sdk_path = os.environ['ANDROID_HOME'] - else: - default_sdk_path = '/opt/android-sdk' - while True: - s = raw_input('Enter the path to the Android SDK (' + default_sdk_path + '): ') - if re.match('^\s*$', s) != None: - sdk_path = default_sdk_path - else: - sdk_path = s - if os.path.isdir(os.path.join(sdk_path, 'build-tools')): - break - else: - print('"' + s + '" does not contain the Android SDK! Try again...') - if os.path.isdir(sdk_path): - write_to_config('sdk_path', sdk_path) - - # try to find a working aapt, in all the recent possible paths - build_tools = os.path.join(sdk_path, 'build-tools') - aaptdirs = [] - aaptdirs.append(os.path.join(build_tools, config['build_tools'])) - aaptdirs.append(build_tools) - for f in sorted(os.listdir(build_tools), reverse=True): - if os.path.isdir(os.path.join(build_tools, f)): - aaptdirs.append(os.path.join(build_tools, f)) - for d in aaptdirs: - if os.path.isfile(os.path.join(d, 'aapt')): - aapt = os.path.join(d, 'aapt') - break - if os.path.isfile(aapt): - dirname = os.path.basename(os.path.dirname(aapt)) - if dirname == 'build-tools': - # this is the old layout, before versioned build-tools - write_to_config('build_tools', '') + # track down where the Android SDK is, the default is to use the path set + # in ANDROID_HOME if that exists, otherwise None + if options.android_home is not None: + test_config['sdk_path'] = options.android_home + elif not common.test_sdk_exists(test_config): + # if neither --android-home nor the default sdk_path + # exist, prompt the user using platform-specific default + # and if the user leaves it blank, ignore and move on. + default_sdk_path = '' + if sys.platform in ('win32', 'cygwin'): + p = os.path.join( + os.getenv('USERPROFILE'), 'AppData', 'Local', 'Android', 'android-sdk' + ) + elif sys.platform == 'darwin': + # on OSX, Homebrew is common and has an easy path to detect + p = '/usr/local/opt/android-sdk' + elif os.path.isdir('/usr/lib/android-sdk'): + # if the Debian packages are installed, suggest them + p = '/usr/lib/android-sdk' else: - write_to_config('build_tools', dirname) + p = '/opt/android-sdk' + if os.path.exists(p): + default_sdk_path = p + test_config['sdk_path'] = default_sdk_path - # track down where the Android NDK is - ndk_path = '/opt/android-ndk' - if os.path.isdir(config['ndk_path']): - ndk_path = config['ndk_path'] - elif 'ANDROID_NDK' in os.environ.keys(): - print('using ANDROID_NDK') - ndk_path = os.environ['ANDROID_NDK'] - if os.path.isdir(ndk_path): - write_to_config('ndk_path', ndk_path) - # the NDK is optional so we don't prompt the user for it if its not found + if not common.test_sdk_exists(test_config): + del test_config['sdk_path'] + while not options.no_prompt: + try: + s = input( + _('Enter the path to the Android SDK (%s) here:\n> ') + % default_sdk_path + ) + except KeyboardInterrupt: + print('') + sys.exit(1) + if re.match(r'^\s*$', s) is not None: + test_config['sdk_path'] = default_sdk_path + else: + test_config['sdk_path'] = s + if common.test_sdk_exists(test_config): + break + default_sdk_path = '' + + if test_config.get('sdk_path') and not common.test_sdk_exists(test_config): + raise FDroidException( + _("Android SDK not found at {path}!").format(path=test_config['sdk_path']) + ) + + if not os.path.exists(common.CONFIG_FILE): + # 'metadata' and 'tmp' are created in fdroid + if not os.path.exists('repo'): + os.mkdir('repo') + example_config_yml = os.path.join(examplesdir, common.CONFIG_FILE) + if os.path.exists(example_config_yml): + shutil.copyfile(example_config_yml, common.CONFIG_FILE) + else: + from pkg_resources import get_distribution + + versionstr = get_distribution('fdroidserver').version + if not versionstr: + versionstr = 'master' + with open(common.CONFIG_FILE, 'w') as fp: + fp.write('# see https://gitlab.com/fdroid/fdroidserver/blob/') + fp.write(versionstr) + fp.write(f'/examples/{common.CONFIG_FILE}\n') + os.chmod(common.CONFIG_FILE, 0o0600) + # If android_home is None, test_config['sdk_path'] will be used and + # "$ANDROID_HOME" may be used if the env var is set up correctly. + # If android_home is not None, the path given from the command line + # will be directly written in the config. + if 'sdk_path' in test_config: + common.write_to_config(test_config, 'sdk_path', options.android_home) + else: + logging.warning( + 'Looks like this is already an F-Droid repo, cowardly refusing to overwrite it...' + ) + logging.info('Try running `fdroid init` in an empty directory.') + raise FDroidException('Repository already exists.') + + # now that we have a local config.yml, read configuration... + config = common.read_config() + + # the NDK is optional and there may be multiple versions of it, so it's + # left for the user to configure # find or generate the keystore for the repo signing key. First try the - # path written in the default config.py. Then check if the user has + # path written in the default config.yml. Then check if the user has # specified a path from the command line, which will trump all others. - # Otherwise, create ~/.local/share/fdroidserver and stick it in there. + # Otherwise, create ~/.local/share/fdroidserver and stick it in there. If + # keystore is set to NONE, that means that Java will look for keys in a + # Hardware Security Module aka Smartcard. keystore = config['keystore'] if options.keystore: - if os.path.isfile(options.keystore): + keystore = os.path.abspath(options.keystore) + if options.keystore == 'NONE': keystore = options.keystore - write_to_config('keystore', keystore) else: - print('"' + options.keystore + '" does not exist or is not a file!') - sys.exit(1) + keystore = os.path.abspath(options.keystore) + if not os.path.exists(keystore): + logging.info( + '"' + keystore + '" does not exist, creating a new keystore there.' + ) + common.write_to_config(test_config, 'keystore', keystore) + repo_keyalias = None + keydname = None if options.repo_keyalias: repo_keyalias = options.repo_keyalias - write_to_config('repo_keyalias', repo_keyalias) + common.write_to_config(test_config, 'repo_keyalias', repo_keyalias) if options.distinguished_name: keydname = options.distinguished_name - write_to_config('keydname', keydname) - if not os.path.isfile(keystore): - # no existing or specified keystore, generate the whole thing - keystoredir = os.path.join(os.getenv('HOME'), - '.local', 'share', 'fdroidserver') - if not os.path.exists(keystoredir): - os.makedirs(keystoredir, mode=0o700) - keystore = os.path.join(keystoredir, 'keystore.jks') - write_to_config('keystore', keystore) - password = genpassword() - write_to_config('keystorepass', password) - write_to_config('keypass', password) - if not options.repo_keyalias: - repo_keyalias = socket.getfqdn() - write_to_config('repo_keyalias', repo_keyalias) - if not options.distinguished_name: - keydname = 'CN=' + repo_keyalias + ', OU=F-Droid' - write_to_config('keydname', keydname) - genkey(keystore, repo_keyalias, password, keydname) + common.write_to_config(test_config, 'keydname', keydname) + if keystore == 'NONE': # we're using a smartcard + common.write_to_config( + test_config, 'repo_keyalias', '1' + ) # seems to be the default + disable_in_config('keypass', 'never used with smartcard') + common.write_to_config( + test_config, + 'smartcardoptions', + ( + '-storetype PKCS11 ' + + '-providerClass sun.security.pkcs11.SunPKCS11 ' + + '-providerArg opensc-fdroid.cfg' + ), + ) + # find opensc-pkcs11.so + if not os.path.exists('opensc-fdroid.cfg'): + if os.path.exists('/usr/lib/opensc-pkcs11.so'): + opensc_so = '/usr/lib/opensc-pkcs11.so' + elif os.path.exists('/usr/lib64/opensc-pkcs11.so'): + opensc_so = '/usr/lib64/opensc-pkcs11.so' + else: + files = glob.glob( + '/usr/lib/' + os.uname()[4] + '-*-gnu/opensc-pkcs11.so' + ) + if len(files) > 0: + opensc_so = files[0] + else: + opensc_so = '/usr/lib/opensc-pkcs11.so' + logging.warning( + 'No OpenSC PKCS#11 module found, ' + + 'install OpenSC then edit "opensc-fdroid.cfg"!' + ) + with open('opensc-fdroid.cfg', 'w') as f: + f.write('name = OpenSC\nlibrary = ') + f.write(opensc_so) + f.write('\n') + logging.info( + "Repo setup using a smartcard HSM. Please edit keystorepass and repo_keyalias in config.yml." + ) + logging.info( + "If you want to generate a new repo signing key in the HSM you can do that with 'fdroid update " + "--create-key'." + ) + elif os.path.exists(keystore): + to_set = ['keystorepass', 'keypass', 'repo_keyalias', 'keydname'] + if repo_keyalias: + to_set.remove('repo_keyalias') + if keydname: + to_set.remove('keydname') + logging.warning( + '\n' + + _('Using existing keystore "{path}"').format(path=keystore) + + '\n' + + _('Now set these in config.yml:') + + ' ' + + ', '.join(to_set) + + '\n' + ) + else: + password = common.genpassword() + c = dict(test_config) + c['keystorepass'] = password + c['keypass'] = password + c['repo_keyalias'] = repo_keyalias or socket.getfqdn() + c['keydname'] = 'CN=' + c['repo_keyalias'] + ', OU=F-Droid' + common.write_to_config(test_config, 'keystorepass', password) + common.write_to_config(test_config, 'keypass', password) + common.write_to_config(test_config, 'repo_keyalias', c['repo_keyalias']) + common.write_to_config(test_config, 'keydname', c['keydname']) + common.genkeystore(c) - print('Built repo based in "' + fdroiddir + '"') - print('with this config:') - print(' Android SDK:\t\t\t' + sdk_path) - print(' Android SDK Build Tools:\t' + os.path.dirname(aapt)) - print(' Android NDK (optional):\t' + ndk_path) - print(' Keystore for signing key:\t' + keystore) - print('\nTo complete the setup, add your APKs to "' + - os.path.join(fdroiddir, 'repo') + '"' + -''' + msg = '\n' + msg += _('Built repo based in "%s" with this config:') % fdroiddir + msg += '\n\n Android SDK:\t\t\t' + config['sdk_path'] + msg += '\n ' + _('Keystore for signing key:\t') + keystore + if repo_keyalias is not None: + msg += '\n Alias for key in store:\t' + repo_keyalias + msg += '\n\n' + msg += ( + _( + """To complete the setup, add your APKs to "%s" then run "fdroid update -c; fdroid update". You might also want to edit -"config.py" to set the URL, repo name, and more. You should also set up -a signing key. +"config.yml" to set the URL, repo name, and more. You should also set up +a signing key (a temporary one might have been automatically generated). -For more info: https://f-droid.org/manual/fdroid.html#Simple-Binary-Repository -and https://f-droid.org/manual/fdroid.html#Signing -''') +For more info: https://f-droid.org/docs/Setup_an_F-Droid_App_Repo +and https://f-droid.org/docs/Signing_Process""" + ) + % os.path.join(fdroiddir, 'repo') + ) + if not options.quiet: + # normally, INFO is only shown with --verbose, but show this unless --quiet + logger = logging.getLogger() + logger.setLevel(logging.INFO) + logger.info(msg) + logging.shutdown() diff --git a/fdroidserver/install.py b/fdroidserver/install.py index 85d6e34a..8c1dc948 100644 --- a/fdroidserver/install.py +++ b/fdroidserver/install.py @@ -1,9 +1,8 @@ -#!/usr/bin/env python2 -# -*- coding: utf-8 -*- +#!/usr/bin/env python3 # -# verify.py - part of the FDroid server tools +# install.py - part of the FDroid server tools # Copyright (C) 2013, Ciaran Gultnieks, ciaran@ciarang.com -# Copyright (C) 2013 Daniel Martí +# Copyright (C) 2013-2014 Daniel Martí # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -18,98 +17,396 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import sys -import os import glob -from optparse import OptionParser, OptionError +import locale +import logging +import os +import sys +import termios +import tty +from argparse import ArgumentParser, BooleanOptionalAction +from pathlib import Path +from urllib.parse import urlencode, urlparse, urlunparse -import common -from common import FDroidPopen +import defusedxml.ElementTree as XMLElementTree + +from . import _, common, github, index, net +from .exception import FDroidException + +DEFAULT_IPFS_GATEWAYS = ("https://gateway.ipfs.io/ipfs/",) +MAVEN_CENTRAL_MIRRORS = [ + { + "url": "https://repo1.maven.org/maven2/", + "dnsA": ["199.232.16.209"], + "worksWithoutSNI": True, + }, + { + "url": "https://repo.maven.apache.org/maven2/", + "dnsA": ["199.232.16.215"], + "worksWithoutSNI": True, + }, + { + "url": "https://maven-central-asia.storage-download.googleapis.com/maven2/", + }, + { + "url": "https://maven-central-eu.storage-download.googleapis.com/maven2/", + }, + { + "url": "https://maven-central.storage-download.googleapis.com/maven2/", + }, +] + + +# pylint: disable=unused-argument +def download_apk(appid='org.fdroid.fdroid', privacy_mode=False): + """Download an APK from F-Droid via the first mirror that works.""" + url = urlunparse( + urlparse(common.FDROIDORG_MIRRORS[0]['url'])._replace( + query=urlencode({'fingerprint': common.FDROIDORG_FINGERPRINT}) + ) + ) + + data, _ignored = index.download_repo_index_v2(url) + app = data.get('packages', dict()).get(appid) + preferred_version = None + for version in app['versions'].values(): + if not preferred_version: + # if all else fails, use the first one + preferred_version = version + if not version.get('releaseChannels'): + # prefer APK in default release channel + preferred_version = version + break + + mirrors = common.append_filename_to_mirrors( + preferred_version['file']['name'][1:], common.FDROIDORG_MIRRORS + ) + ipfsCIDv1 = preferred_version['file'].get('ipfsCIDv1') + if ipfsCIDv1: + for gateway in DEFAULT_IPFS_GATEWAYS: + mirrors.append({'url': os.path.join(gateway, ipfsCIDv1)}) + f = net.download_using_mirrors(mirrors) + if f and os.path.exists(f): + versionCode = preferred_version['manifest']['versionCode'] + f = Path(f) + return str(f.rename(f.with_stem(f'{appid}_{versionCode}')).resolve()) + + +def download_fdroid_apk(privacy_mode=False): # pylint: disable=unused-argument + """Directly download the current F-Droid APK and verify it. + + This downloads the "download button" link, which is the version + that is best tested for new installs. + + """ + mirror = common.FDROIDORG_MIRRORS[0] + mirror['url'] = urlunparse(urlparse(mirror['url'])._replace(path='F-Droid.apk')) + return net.download_using_mirrors([mirror]) + + +def download_fdroid_apk_from_github(privacy_mode=False): + """Download F-Droid.apk from F-Droid's GitHub Releases.""" + if common.config and not privacy_mode: + token = common.config.get('github_token') + else: + token = None + gh = github.GithubApi(token, 'https://github.com/f-droid/fdroidclient') + latest_apk = gh.get_latest_apk() + filename = os.path.basename(latest_apk) + return net.download_file(latest_apk, os.path.join(common.get_cachedir(), filename)) + + +def download_fdroid_apk_from_ipns(privacy_mode=False): + """Download the F-Droid APK from an IPNS repo.""" + cid = 'k51qzi5uqu5dl4hbcksbdmplanu9n4hivnqsupqe6vzve1pdbeh418ssptldd3' + mirrors = [ + {"url": f"https://ipfs.io/ipns/{cid}/F-Droid.apk"}, + ] + if not privacy_mode: + mirrors.append({"url": f"https://{cid}.ipns.dweb.link/F-Droid.apk"}) + return net.download_using_mirrors(mirrors) + + +def download_fdroid_apk_from_maven(privacy_mode=False): + """Download F-Droid.apk from Maven Central and official mirrors.""" + path = 'org/fdroid/fdroid/F-Droid' + if privacy_mode: + mirrors = MAVEN_CENTRAL_MIRRORS[:2] # skip the Google servers + else: + mirrors = MAVEN_CENTRAL_MIRRORS + metadata = net.download_using_mirrors( + common.append_filename_to_mirrors( + os.path.join(path, 'maven-metadata.xml'), mirrors + ) + ) + version = XMLElementTree.parse(metadata).getroot().findall('*.//latest')[0].text + mirrors = common.append_filename_to_mirrors( + os.path.join(path, version, f'F-Droid-{version}.apk'), mirrors + ) + return net.download_using_mirrors(mirrors) + + +def install_fdroid_apk(privacy_mode=False): + """Download and install F-Droid.apk using all tricks we can muster. + + By default, this first tries to fetch the official install APK + which is offered when someone clicks the "download" button on + https://f-droid.org/. Then it will try all the mirrors and + methods until it gets something successful, or runs out of + options. + + There is privacy_mode which tries to download from mirrors first, + so that this downloads from a mirror that has many different kinds + of files available, thereby breaking the clear link to F-Droid. + + Returns + ------- + None for success or the error message. + + """ + country_code = locale.getlocale()[0].split('_')[-1] + if privacy_mode is None and country_code in ('CN', 'HK', 'IR', 'TM'): + logging.warning( + _('Privacy mode was enabled based on your locale ({country_code}).').format( + country_code=country_code + ) + ) + privacy_mode = True + + if privacy_mode or not (common.config and common.config.get('jarsigner')): + download_methods = [ + download_fdroid_apk_from_maven, + download_fdroid_apk_from_ipns, + download_fdroid_apk_from_github, + ] + else: + download_methods = [ + download_apk, + download_fdroid_apk_from_maven, + download_fdroid_apk_from_github, + download_fdroid_apk_from_ipns, + download_fdroid_apk, + ] + for method in download_methods: + try: + f = method(privacy_mode=privacy_mode) + break + except Exception as e: + logging.info(e) + else: + return _('F-Droid.apk could not be downloaded from any known source!') + + fingerprint = common.apk_signer_fingerprint(f) + if fingerprint.upper() != common.FDROIDORG_FINGERPRINT: + return _('{path} has the wrong fingerprint ({fingerprint})!').format( + path=f, fingerprint=fingerprint + ) + install_apk(f) + + +def install_apk(f): + if common.config and common.config.get('apksigner'): + # TODO this should always verify, but that requires APK sig verification in Python #94 + logging.info(_('Verifying package {path} with apksigner.').format(path=f)) + common.verify_apk_signature(f) + if common.config and common.config.get('adb'): + if devices(): + install_apks_to_devices([f]) + os.remove(f) + else: + os.remove(f) + return _('No devices found for `adb install`! Please plug one in.') -options = None -config = None def devices(): - p = FDroidPopen(["adb", "devices"]) + """Get the list of device serials for use with adb commands.""" + p = common.SdkToolsPopen(['adb', "devices"]) if p.returncode != 0: - raise Exception("An error occured when finding devices: %s" % p.stderr) - lines = p.stdout.splitlines() - if lines[0].startswith('* daemon not running'): - lines = lines[2:] - if len(lines) < 3: - return [] - lines = lines[1:-1] - return [l.split()[0] for l in lines] + raise FDroidException("An error occured when finding devices: %s" % p.output) + serials = list() + for line in p.output.splitlines(): + columns = line.strip().split("\t", maxsplit=1) + if len(columns) == 2: + serial, status = columns + if status == 'device': + serials.append(serial) + else: + d = {'serial': serial, 'status': status} + logging.warning(_('adb reports {serial} is "{status}"!'.format(**d))) + return serials + + +def install_apks_to_devices(apks): + """Install the list of APKs to all Android devices reported by `adb devices`.""" + for apk in apks: + # Get device list each time to avoid device not found errors + devs = devices() + if not devs: + raise FDroidException(_("No attached devices found")) + logging.info(_("Installing %s...") % apk) + for dev in devs: + logging.info( + _("Installing '{apkfilename}' on {dev}...").format( + apkfilename=apk, dev=dev + ) + ) + p = common.SdkToolsPopen(['adb', "-s", dev, "install", apk]) + fail = "" + for line in p.output.splitlines(): + if line.startswith("Failure"): + fail = line[9:-1] + if not fail: + continue + + if fail == "INSTALL_FAILED_ALREADY_EXISTS": + logging.warning( + _('"{apkfilename}" is already installed on {dev}.').format( + apkfilename=apk, dev=dev + ) + ) + else: + raise FDroidException( + _("Failed to install '{apkfilename}' on {dev}: {error}").format( + apkfilename=apk, dev=dev, error=fail + ) + ) + + +def read_char(): + """Read input from the terminal prompt one char at a time.""" + fd = sys.stdin.fileno() + old_settings = termios.tcgetattr(fd) + try: + tty.setraw(fd) + ch = sys.stdin.read(1) + finally: + termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) + return ch + + +def strtobool(val): + """Convert a localized string representation of truth to True or False.""" + return val.lower() in ('', 'y', 'yes', _('yes'), _('true')) # '' is pressing Enter + + +def prompt_user(yes, msg): + """Prompt user for yes/no, supporting Enter and Esc as accepted answers.""" + run_install = yes + if yes is None and sys.stdout.isatty(): + print(msg, end=' ', flush=True) + answer = '' + while True: + in_char = read_char() + if in_char == '\r': # Enter key + break + if not in_char.isprintable(): + sys.exit(1) + print(in_char, end='', flush=True) + answer += in_char + run_install = strtobool(answer) + print() + return run_install def main(): + parser = ArgumentParser( + usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]" + ) + common.setup_global_opts(parser) + parser.add_argument( + "appid", + nargs='*', + help=_("application ID with optional versionCode in the form APPID[:VERCODE]"), + ) + parser.add_argument( + "-a", + "--all", + action="store_true", + default=False, + help=_("Install all signed applications available"), + ) + parser.add_argument( + "-p", + "--privacy-mode", + action=BooleanOptionalAction, + default=None, + help=_("Download F-Droid.apk using mirrors that leak less to the network"), + ) + parser.add_argument( + "-y", + "--yes", + action="store_true", + default=None, + help=_("Automatic yes to all prompts."), + ) + parser.add_argument( + "-n", + "--no", + action="store_false", + dest='yes', + help=_("Automatic no to all prompts."), + ) + options = common.parse_args(parser) - global options, config + common.set_console_logging(options.verbose, options.color) + logging.captureWarnings(True) # for SNIMissingWarning - # Parse command line... - parser = OptionParser(usage="Usage: %prog [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]") - parser.add_option("-v", "--verbose", action="store_true", default=False, - help="Spew out even more information than normal") - parser.add_option("-a", "--all", action="store_true", default=False, - help="Install all signed applications available") - (options, args) = parser.parse_args() + common.get_config() - if not args and not options.all: - raise OptionError("If you really want to install all the signed apps, use --all", "all") - - config = common.read_config(options) + if not options.appid and not options.all: + run_install = prompt_user( + options.yes, + _('Would you like to download and install F-Droid.apk via adb? (YES/no)'), + ) + if run_install: + sys.exit(install_fdroid_apk(options.privacy_mode)) + sys.exit(1) output_dir = 'repo' - if not os.path.isdir(output_dir): - print "No signed output directory - nothing to do" - sys.exit(0) + if (options.appid or options.all) and not os.path.isdir(output_dir): + logging.error(_("No signed output directory - nothing to do")) + run_install = prompt_user( + options.yes, + _('Would you like to download the app(s) from f-droid.org? (YES/no)'), + ) + if run_install: + for appid in options.appid: + f = download_apk(appid) + install_apk(f) + sys.exit(install_fdroid_apk(options.privacy_mode)) + sys.exit(1) - if args: + if options.appid: + vercodes = common.read_pkg_args(options.appid, True) + common.get_metadata_files(vercodes) # only check appids + apks = {appid: None for appid in vercodes} - vercodes = common.read_pkg_args(args, True) - apks = { appid : None for appid in vercodes } - - # Get the signed apk with the highest vercode + # Get the signed APK with the highest vercode for apkfile in sorted(glob.glob(os.path.join(output_dir, '*.apk'))): - - appid, vercode = common.apknameinfo(apkfile) + try: + appid, vercode = common.publishednameinfo(apkfile) + except FDroidException: + continue if appid not in apks: continue if vercodes[appid] and vercode not in vercodes[appid]: continue apks[appid] = apkfile - for appid, apk in apks.iteritems(): + for appid, apk in apks.items(): if not apk: - raise Exception("No signed apk available for %s" % appid) + raise FDroidException(_("No signed APK available for %s") % appid) + install_apks_to_devices(apks.values()) - else: + elif options.all: + apks = { + common.publishednameinfo(apkfile)[0]: apkfile + for apkfile in sorted(glob.glob(os.path.join(output_dir, '*.apk'))) + } + install_apks_to_devices(apks.values()) - apks = { common.apknameinfo(apkfile)[0] : apkfile for apkfile in - sorted(glob.glob(os.path.join(output_dir, '*.apk'))) } + logging.info('\n' + _('Finished')) - for appid, apk in apks.iteritems(): - # Get device list each time to avoid device not found errors - devs = devices() - if not devs: - raise Exception("No attached devices found") - print "Installing %s..." % apk - for dev in devs: - print "Installing %s on %s..." % (apk, dev) - p = FDroidPopen(["adb", "-s", dev, "install", apk ]) - fail= "" - for line in p.stdout.splitlines(): - if line.startswith("Failure"): - fail = line[9:-1] - if fail: - if fail == "INSTALL_FAILED_ALREADY_EXISTS": - print "%s is already installed on %s." % (apk, dev) - else: - raise Exception("Failed to install %s on %s: %s" % ( - apk, dev, fail)) - - print "\nFinished" if __name__ == "__main__": main() - diff --git a/fdroidserver/lint.py b/fdroidserver/lint.py index 3def6ff7..99b1a392 100644 --- a/fdroidserver/lint.py +++ b/fdroidserver/lint.py @@ -1,8 +1,7 @@ -#!/usr/bin/env python2 -# -*- coding: utf-8 -*- +#!/usr/bin/env python3 # -# rewritemeta.py - part of the FDroid server tool -# Copyright (C) 2010-12, Ciaran Gultnieks, ciaran@ciarang.com +# lint.py - part of the FDroid server tool +# Copyright (C) 2013-2014 Daniel Martí # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -17,71 +16,1255 @@ # You should have received a copy of the GNU Affero General Public Licen # along with this program. If not, see . -from optparse import OptionParser -import common, metadata +import difflib +import platform +import re +import sys +import urllib.parse +from argparse import ArgumentParser +from pathlib import Path + +from fdroidserver._yaml import yaml + +from . import _, common, metadata, rewritemeta config = None -options = None -appid = None -def warn(message): - global appid - if appid: - print "%s:" % appid - appid = None - print(' %s' % message) +def enforce_https(domain): + return ( + re.compile( + r'^http://([^/]*\.)?' + re.escape(domain) + r'(/.*)?', re.IGNORECASE + ), + domain + " URLs should always use https://", + ) + + +https_enforcings = [ + enforce_https('github.com'), + enforce_https('gitlab.com'), + enforce_https('bitbucket.org'), + enforce_https('apache.org'), + enforce_https('google.com'), + enforce_https('git.code.sf.net'), + enforce_https('svn.code.sf.net'), + enforce_https('anongit.kde.org'), + enforce_https('savannah.nongnu.org'), + enforce_https('git.savannah.nongnu.org'), + enforce_https('download.savannah.nongnu.org'), + enforce_https('savannah.gnu.org'), + enforce_https('git.savannah.gnu.org'), + enforce_https('download.savannah.gnu.org'), + enforce_https('github.io'), + enforce_https('gitlab.io'), + enforce_https('githubusercontent.com'), +] + + +def forbid_shortener(domain): + return ( + re.compile(r'https?://[^/]*' + re.escape(domain) + r'/.*'), + _("URL shorteners should not be used"), + ) + + +http_url_shorteners = [ + forbid_shortener('1url.com'), + forbid_shortener('adf.ly'), + forbid_shortener('bc.vc'), + forbid_shortener('bit.do'), + forbid_shortener('bit.ly'), + forbid_shortener('bitly.com'), + forbid_shortener('budurl.com'), + forbid_shortener('buzurl.com'), + forbid_shortener('cli.gs'), + forbid_shortener('cur.lv'), + forbid_shortener('cutt.us'), + forbid_shortener('db.tt'), + forbid_shortener('filoops.info'), + forbid_shortener('goo.gl'), + forbid_shortener('is.gd'), + forbid_shortener('ity.im'), + forbid_shortener('j.mp'), + forbid_shortener('l.gg'), + forbid_shortener('lnkd.in'), + forbid_shortener('moourl.com'), + forbid_shortener('ow.ly'), + forbid_shortener('para.pt'), + forbid_shortener('po.st'), + forbid_shortener('q.gs'), + forbid_shortener('qr.ae'), + forbid_shortener('qr.net'), + forbid_shortener('rdlnk.com'), + forbid_shortener('scrnch.me'), + forbid_shortener('short.nr'), + forbid_shortener('sn.im'), + forbid_shortener('snipurl.com'), + forbid_shortener('su.pr'), + forbid_shortener('t.co'), + forbid_shortener('tiny.cc'), + forbid_shortener('tinyarrows.com'), + forbid_shortener('tinyurl.com'), + forbid_shortener('tr.im'), + forbid_shortener('tweez.me'), + forbid_shortener('twitthis.com'), + forbid_shortener('twurl.nl'), + forbid_shortener('tyn.ee'), + forbid_shortener('u.bb'), + forbid_shortener('u.to'), + forbid_shortener('ur1.ca'), + forbid_shortener('urlof.site'), + forbid_shortener('v.gd'), + forbid_shortener('vzturl.com'), + forbid_shortener('x.co'), + forbid_shortener('xrl.us'), + forbid_shortener('yourls.org'), + forbid_shortener('zip.net'), + forbid_shortener('✩.ws'), + forbid_shortener('➡.ws'), +] + +http_checks = ( + https_enforcings + + http_url_shorteners + + [ + ( + re.compile(r'^(?!https?://)[^/]+'), + _("URL must start with https:// or http://"), + ), + ( + re.compile(r'^https://(github|gitlab)\.com(/[^/]+){2,3}\.git'), + _("Appending .git is not necessary"), + ), + ( + re.compile( + r'^https://[^/]*(github|gitlab|bitbucket|rawgit|githubusercontent)\.[a-zA-Z]+/([^/]+/){2,3}(master|main)/' + ), + _( + "Use /HEAD instead of /master or /main to point at a file in the default branch" + ), + ), + ] +) + +regex_checks = { + 'WebSite': http_checks, + 'SourceCode': http_checks, + 'Repo': https_enforcings, + 'UpdateCheckMode': https_enforcings, + 'IssueTracker': http_checks + + [ + (re.compile(r'.*github\.com/[^/]+/[^/]+/*$'), _("/issues is missing")), + (re.compile(r'.*gitlab\.com/[^/]+/[^/]+/*$'), _("/issues is missing")), + ], + 'Donate': http_checks + + [ + ( + re.compile(r'.*liberapay\.com'), + _("Liberapay donation methods belong in the Liberapay: field"), + ), + ( + re.compile(r'.*opencollective\.com'), + _("OpenCollective donation methods belong in the OpenCollective: field"), + ), + ], + 'Changelog': http_checks, + 'Author Name': [ + (re.compile(r'^\s'), _("Unnecessary leading space")), + (re.compile(r'.*\s$'), _("Unnecessary trailing space")), + ], + 'Summary': [ + ( + re.compile(r'.*\b(free software|open source)\b.*', re.IGNORECASE), + _("No need to specify that the app is Free Software"), + ), + ( + re.compile( + r'.*((your|for).*android|android.*(app|device|client|port|version))', + re.IGNORECASE, + ), + _("No need to specify that the app is for Android"), + ), + (re.compile(r'.*[a-z0-9][.!?]( |$)'), _("Punctuation should be avoided")), + (re.compile(r'^\s'), _("Unnecessary leading space")), + (re.compile(r'.*\s$'), _("Unnecessary trailing space")), + ], + 'Description': https_enforcings + + http_url_shorteners + + [ + (re.compile(r'\s*[*#][^ .]'), _("Invalid bulleted list")), + ( + re.compile(r'https://f-droid.org/[a-z][a-z](_[A-Za-z]{2,4})?/'), + _("Locale included in f-droid.org URL"), + ), + (re.compile(r'^\s'), _("Unnecessary leading space")), + (re.compile(r'.*\s$'), _("Unnecessary trailing space")), + ( + re.compile( + r'.*<(applet|base|body|button|embed|form|head|html|iframe|img|input|link|object|picture|script|source|style|svg|video).*', + re.IGNORECASE, + ), + _("Forbidden HTML tags"), + ), + ( + re.compile(r""".*\s+src=["']javascript:.*"""), + _("Javascript in HTML src attributes"), + ), + ], +} + +# config keys that are currently ignored by lint, but could be supported. +ignore_config_keys = ( + 'github_releases', + 'java_paths', +) + +bool_keys = ( + 'allow_disabled_algorithms', + 'androidobservatory', + 'build_server_always', + 'deploy_process_logs', + 'keep_when_not_allowed', + 'make_current_version_link', + 'nonstandardwebroot', + 'per_app_repos', + 'refresh_scanner', + 'scan_binary', + 'sync_from_local_copy_dir', +) + +check_config_keys = ( + 'ant', + 'apk_signing_key_block_list', + 'archive', + 'archive_description', + 'archive_icon', + 'archive_name', + 'archive_older', + 'archive_url', + 'archive_web_base_url', + 'awsbucket', + 'awsbucket_index_only', + 'binary_transparency_remote', + 'cachedir', + 'char_limits', + 'current_version_name_source', + 'git_mirror_size_limit', + 'github_token', + 'gpghome', + 'gpgkey', + 'gradle', + 'identity_file', + 'install_list', + 'java_paths', + 'keyaliases', + 'keydname', + 'keypass', + 'keystore', + 'keystorepass', + 'lint_licenses', + 'local_copy_dir', + 'mirrors', + 'mvn3', + 'ndk_paths', + 'path_to_custom_rclone_config', + 'rclone_config', + 'repo', + 'repo_description', + 'repo_icon', + 'repo_key_sha256', + 'repo_keyalias', + 'repo_maxage', + 'repo_name', + 'repo_pubkey', + 'repo_url', + 'repo_web_base_url', + 'scanner_signature_sources', + 'sdk_path', + 'servergitmirrors', + 'serverwebroot', + 'smartcardoptions', + 'sync_from_local_copy_dir', + 'uninstall_list', + 'virustotal_apikey', +) + +locale_pattern = re.compile(r"[a-z]{2,3}(-([A-Z][a-zA-Z]+|\d+|[a-z]+))*") + +versioncode_check_pattern = re.compile(r"(\\d|\[(0-9|\\d)_?(a-fA-F)?])[+]") + +ANTIFEATURES_KEYS = None +ANTIFEATURES_PATTERN = None +CATEGORIES_KEYS = list() + + +def load_antiFeatures_config(): + """Lazy loading, since it might read a lot of files.""" + global ANTIFEATURES_KEYS, ANTIFEATURES_PATTERN + k = common.ANTIFEATURES_CONFIG_NAME + if not ANTIFEATURES_KEYS or k not in common.config: + common.config[k] = common.load_localized_config(k, 'repo') + ANTIFEATURES_KEYS = sorted(common.config[k].keys()) + ANTIFEATURES_PATTERN = ','.join(ANTIFEATURES_KEYS) + + +def load_categories_config(): + """Lazy loading, since it might read a lot of files.""" + global CATEGORIES_KEYS + k = common.CATEGORIES_CONFIG_NAME + if not CATEGORIES_KEYS: + if config and k in config: + CATEGORIES_KEYS = config[k] + else: + config[k] = common.load_localized_config(k, 'repo') + CATEGORIES_KEYS = list(config[k].keys()) + + +def check_regexes(app): + for f, checks in regex_checks.items(): + for m, r in checks: + v = app.get(f) + t = metadata.fieldtype(f) + if t == metadata.TYPE_MULTILINE: + for line in v.splitlines(): + if m.match(line): + yield "%s at line '%s': %s" % (f, line, r) + else: + if v is None: + continue + if m.match(v): + yield "%s '%s': %s" % (f, v, r) + + +def get_lastbuild(builds): + lowest_vercode = -1 + lastbuild = None + for build in builds: + if not build.disable: + vercode = build.versionCode + if lowest_vercode == -1 or vercode < lowest_vercode: + lowest_vercode = vercode + if not lastbuild or build.versionCode > lastbuild.versionCode: + lastbuild = build + return lastbuild + + +def check_update_check_data_url(app): # noqa: D403 + """UpdateCheckData must have a valid HTTPS URL to protect checkupdates runs.""" + if app.UpdateCheckData and app.UpdateCheckMode == 'HTTP': + urlcode, codeex, urlver, verex = app.UpdateCheckData.split('|') + for url in (urlcode, urlver): + if url != '.': + parsed = urllib.parse.urlparse(url) + if not parsed.scheme or not parsed.netloc: + yield _('UpdateCheckData not a valid URL: {url}').format(url=url) + if parsed.scheme != 'https': + yield _('UpdateCheckData must use HTTPS URL: {url}').format(url=url) + + +def check_update_check_data_int(app): # noqa: D403 + """UpdateCheckData regex must match integers.""" + if app.UpdateCheckData: + urlcode, codeex, urlver, verex = app.UpdateCheckData.split('|') + # codeex can be empty as well + if codeex and not versioncode_check_pattern.search(codeex): + yield _( + f'UpdateCheckData must match the versionCode as integer (\\d or [0-9]): {codeex}' + ) + + +def check_vercode_operation(app): + if not app.VercodeOperation: + return + invalid_ops = [] + for op in app.VercodeOperation: + if not common.VERCODE_OPERATION_RE.match(op): + invalid_ops += op + if invalid_ops: + yield _('Invalid VercodeOperation: {invalid_ops}').format( + invalid_ops=invalid_ops + ) + + +def check_ucm_tags(app): + lastbuild = get_lastbuild(app.get('Builds', [])) + if ( + lastbuild is not None + and lastbuild.commit + and app.UpdateCheckMode == 'RepoManifest' + and not lastbuild.commit.startswith('unknown') + and lastbuild.versionCode == app.CurrentVersionCode + and not lastbuild.forcevercode + and any(s in lastbuild.commit for s in '.,_-/') + ): + yield _( + "Last used commit '{commit}' looks like a tag, but UpdateCheckMode is '{ucm}'" + ).format(commit=lastbuild.commit, ucm=app.UpdateCheckMode) + + +def check_char_limits(app): + limits = config['char_limits'] + + if len(app.Summary) > limits['summary']: + yield _("Summary of length {length} is over the {limit} char limit").format( + length=len(app.Summary), limit=limits['summary'] + ) + + if len(app.Description) > limits['description']: + yield _("Description of length {length} is over the {limit} char limit").format( + length=len(app.Description), limit=limits['description'] + ) + + +def check_old_links(app): + usual_sites = [ + 'github.com', + 'gitlab.com', + 'bitbucket.org', + ] + old_sites = [ + 'gitorious.org', + 'code.google.com', + ] + if any(s in app.Repo for s in usual_sites): + for f in ['WebSite', 'SourceCode', 'IssueTracker', 'Changelog']: + v = app.get(f) + if any(s in v for s in old_sites): + yield _("App is in '{repo}' but has a link to {url}").format( + repo=app.Repo, url=v + ) + + +def check_useless_fields(app): + if app.UpdateCheckName == app.id: + yield _("UpdateCheckName is set to the known application ID, it can be removed") + + +filling_ucms = re.compile(r'^(Tags.*|RepoManifest.*)') + + +def check_checkupdates_ran(app): + if filling_ucms.match(app.UpdateCheckMode): + if not app.AutoName and not app.CurrentVersion and app.CurrentVersionCode == 0: + yield _( + "UpdateCheckMode is set but it looks like checkupdates hasn't been run yet." + ) + + +def check_empty_fields(app): + if not app.Categories: + yield _("Categories are not set") + + +def check_categories(app): + """App uses 'Categories' key and parsed config uses 'categories' key.""" + for categ in app.Categories: + if categ not in CATEGORIES_KEYS: + yield _("Categories '%s' is not valid" % categ) + + +def check_duplicates(app): + links_seen = set() + for f in ['Source Code', 'Web Site', 'Issue Tracker', 'Changelog']: + v = app.get(f) + if not v: + continue + v = v.lower() + if v in links_seen: + yield _("Duplicate link in '{field}': {url}").format(field=f, url=v) + else: + links_seen.add(v) + + name = common.get_app_display_name(app) + if app.Summary and name: + if app.Summary.lower() == name.lower(): + yield _("Summary '%s' is just the app's name") % app.Summary + + if app.Summary and app.Description and len(app.Description) == 1: + if app.Summary.lower() == app.Description[0].lower(): + yield _("Description '%s' is just the app's summary") % app.Summary + + seenlines = set() + for line in app.Description.splitlines(): + if len(line) < 1: + continue + if line in seenlines: + yield _("Description has a duplicate line") + seenlines.add(line) + + +desc_url = re.compile(r'(^|[^[])\[([^ ]+)( |\]|$)') + + +def check_mediawiki_links(app): + wholedesc = ' '.join(app.Description) + for um in desc_url.finditer(wholedesc): + url = um.group(1) + for m, r in http_checks: + if m.match(url): + yield _("URL {url} in Description: {error}").format(url=url, error=r) + + +def check_builds(app): + supported_flags = set(metadata.build_flags) + # needed for YAML and JSON + for build in app.get('Builds', []): + if build.disable: + if build.disable.startswith('Generated by import.py'): + yield _( + "Build generated by `fdroid import` - remove disable line once ready" + ) + continue + for s in ['master', 'main', 'origin', 'HEAD', 'default', 'trunk']: + if build.commit and build.commit.startswith(s): + yield _( + "Branch '{branch}' used as commit in build '{versionName}'" + ).format(branch=s, versionName=build.versionName) + for srclib in build.srclibs: + if '@' in srclib: + ref = srclib.split('@')[1].split('/')[0] + if ref.startswith(s): + yield _( + "Branch '{branch}' used as commit in srclib '{srclib}'" + ).format(branch=s, srclib=srclib) + else: + yield ( + _('srclibs missing name and/or @') + + ' (srclibs: ' + + srclib + + ')' + ) + for key in build.keys(): + if key not in supported_flags: + yield _('%s is not an accepted build field') % key + + +def check_files_dir(app): + dir_path = Path('metadata') / app.id + if not dir_path.is_dir(): + return + files = set() + for path in dir_path.iterdir(): + name = path.name + if not ( + path.is_file() or name == 'signatures' or locale_pattern.fullmatch(name) + ): + yield _("Found non-file at %s") % path + continue + files.add(name) + + used = { + 'signatures', + } + for build in app.get('Builds', []): + for fname in build.patch: + if fname not in files: + yield _("Unknown file '{filename}' in build '{versionName}'").format( + filename=fname, versionName=build.versionName + ) + else: + used.add(fname) + + for name in files.difference(used): + if locale_pattern.fullmatch(name): + continue + yield _("Unused file at %s") % (dir_path / name) + + +def check_format(app): + if common.options.format and not rewritemeta.proper_format(app): + yield _("Run rewritemeta to fix formatting") + + +def check_license_tag(app): + """Ensure all license tags contain only valid/approved values. + + It is possible to disable license checking by setting a null or empty value, + e.g. `lint_licenses: ` or `lint_licenses: []` + + """ + if 'lint_licenses' in config: + lint_licenses = config['lint_licenses'] + if lint_licenses is None: + return + else: + lint_licenses = APPROVED_LICENSES + if app.License not in lint_licenses: + if lint_licenses == APPROVED_LICENSES: + yield _( + 'Unexpected license tag "{}"! Only use FSF or OSI ' + 'approved tags from https://spdx.org/license-list' + ).format(app.License) + else: + yield _( + 'Unexpected license tag "{}"! Only use license tags ' + 'configured in your config file' + ).format(app.License) + + +def check_extlib_dir(apps): + dir_path = Path('build/extlib') + extlib_files = set() + for path in dir_path.glob('**/*'): + if path.is_file(): + extlib_files.add(path.relative_to(dir_path)) + + used = set() + for app in apps: + if app.Disabled: + continue + archive_policy = common.calculate_archive_policy( + app, common.config['archive_older'] + ) + builds = [build for build in app.Builds if not build.disable] + + for i in range(len(builds)): + build = builds[i] + for path in build.extlibs: + path = Path(path) + if path not in extlib_files: + # Don't show error on archived versions + if i >= len(builds) - archive_policy: + yield _( + "{appid}: Unknown extlib {path} in build '{versionName}'" + ).format(appid=app.id, path=path, versionName=build.versionName) + else: + used.add(path) + + for path in extlib_files.difference(used): + if path.name not in [ + '.gitignore', + 'source.txt', + 'origin.txt', + 'md5.txt', + 'LICENSE', + 'LICENSE.txt', + 'COPYING', + 'COPYING.txt', + 'NOTICE', + 'NOTICE.txt', + ]: + yield _("Unused extlib at %s") % (dir_path / path) + + +def check_app_field_types(app): + """Check the fields have valid data types.""" + for field in app.keys(): + v = app.get(field) + t = metadata.fieldtype(field) + if v is None: + continue + elif field == 'Builds': + if not isinstance(v, list): + yield ( + _( + "{appid}: {field} must be a '{type}', but it is a '{fieldtype}'!" + ).format( + appid=app.id, + field=field, + type='list', + fieldtype=v.__class__.__name__, + ) + ) + elif t == metadata.TYPE_LIST and not isinstance(v, list): + yield ( + _( + "{appid}: {field} must be a '{type}', but it is a '{fieldtype}!'" + ).format( + appid=app.id, + field=field, + type='list', + fieldtype=v.__class__.__name__, + ) + ) + elif t == metadata.TYPE_STRING and type(v) not in (str, bool, dict): + yield ( + _( + "{appid}: {field} must be a '{type}', but it is a '{fieldtype}'!" + ).format( + appid=app.id, + field=field, + type='str', + fieldtype=v.__class__.__name__, + ) + ) + elif t == metadata.TYPE_STRINGMAP and not isinstance(v, dict): + yield ( + _( + "{appid}: {field} must be a '{type}', but it is a '{fieldtype}'!" + ).format( + appid=app.id, + field=field, + type='dict', + fieldtype=v.__class__.__name__, + ) + ) + elif t == metadata.TYPE_INT and not isinstance(v, int): + yield ( + _( + "{appid}: {field} must be a '{type}', but it is a '{fieldtype}'!" + ).format( + appid=app.id, + field=field, + type='int', + fieldtype=v.__class__.__name__, + ) + ) + + +def check_antiFeatures(app): + """Check the Anti-Features keys match those declared in the config.""" + pattern = ANTIFEATURES_PATTERN + msg = _("'{value}' is not a valid {field} in {appid}. Regex pattern: {pattern}") + + field = 'AntiFeatures' # App entries use capitalized CamelCase + for value in app.get(field, []): + if value not in ANTIFEATURES_KEYS: + yield msg.format(value=value, field=field, appid=app.id, pattern=pattern) + + field = 'antifeatures' # Build entries use all lowercase + for build in app.get('Builds', []): + build_antiFeatures = build.get(field, []) + for value in build_antiFeatures: + if value not in ANTIFEATURES_KEYS: + yield msg.format( + value=value, field=field, appid=app.id, pattern=pattern + ) + + +def check_for_unsupported_metadata_files(basedir=""): + """Check whether any non-metadata files are in metadata/.""" + basedir = Path(basedir) + global config + + if not (basedir / 'metadata').exists(): + return False + return_value = False + for f in (basedir / 'metadata').iterdir(): + if f.is_dir(): + if not Path(str(f) + '.yml').exists(): + print(_('"%s/" has no matching metadata file!') % f) + return_value = True + elif f.suffix == '.yml': + packageName = f.stem + if not common.is_valid_package_name(packageName): + print( + '"' + + packageName + + '" is an invalid package name!\n' + + 'https://developer.android.com/studio/build/application-id' + ) + return_value = True + else: + print( + _( + '"{path}" is not a supported file format (use: metadata/*.yml)' + ).format(path=f.relative_to(basedir)) + ) + return_value = True + + return return_value + + +def check_current_version_code(app): + """Check that the CurrentVersionCode is currently available.""" + if app.get('ArchivePolicy') == 0: + return + cv = app.get('CurrentVersionCode') + if cv is not None and cv == 0: + return + + builds = app.get('Builds') + active_builds = 0 + min_versionCode = None + if builds: + for build in builds: + vc = build['versionCode'] + if min_versionCode is None or min_versionCode > vc: + min_versionCode = vc + if not build.get('disable'): + active_builds += 1 + if cv == build['versionCode']: + break + if active_builds == 0: + return # all builds are disabled + if cv is not None and cv < min_versionCode: + yield ( + _( + 'CurrentVersionCode {cv} is less than oldest build entry {versionCode}' + ).format(cv=cv, versionCode=min_versionCode) + ) + + +def check_updates_expected(app): + """Check if update checking makes sense.""" + if (app.get('NoSourceSince') or app.get('ArchivePolicy') == 0) and not all( + app.get(key, 'None') == 'None' for key in ('AutoUpdateMode', 'UpdateCheckMode') + ): + yield _( + 'App has NoSourceSince or ArchivePolicy "0 versions" or 0 but AutoUpdateMode or UpdateCheckMode are not None' + ) + + +def check_updates_ucm_http_aum_pattern(app): # noqa: D403 + """AutoUpdateMode with UpdateCheckMode: HTTP must have a pattern.""" + if app.UpdateCheckMode == "HTTP" and app.AutoUpdateMode == "Version": + yield _("AutoUpdateMode with UpdateCheckMode: HTTP must have a pattern.") + + +def check_certificate_pinned_binaries(app): + keys = app.get('AllowedAPKSigningKeys') + known_keys = common.config.get('apk_signing_key_block_list', []) + if keys: + if known_keys: + for key in keys: + if key in known_keys: + yield _('Known debug key is used in AllowedAPKSigningKeys: ') + key + return + if app.get('Binaries') is not None: + yield _( + 'App has Binaries but does not have corresponding AllowedAPKSigningKeys to pin certificate.' + ) + return + builds = app.get('Builds') + if builds is None: + return + for build in builds: + if build.get('binary') is not None: + yield _( + 'App version has binary but does not have corresponding AllowedAPKSigningKeys to pin certificate.' + ) + return + + +def lint_config(arg): + path = Path(arg) + passed = True + + mirrors_name = f'{common.MIRRORS_CONFIG_NAME}.yml' + config_name = f'{common.CONFIG_CONFIG_NAME}.yml' + categories_name = f'{common.CATEGORIES_CONFIG_NAME}.yml' + antifeatures_name = f'{common.ANTIFEATURES_CONFIG_NAME}.yml' + + yamllintresult = common.run_yamllint(path) + if yamllintresult: + print(yamllintresult) + passed = False + + with path.open() as fp: + data = yaml.load(fp) + common.config_type_check(arg, data) + + if path.name == mirrors_name: + import pycountry + + valid_country_codes = [c.alpha_2 for c in pycountry.countries] + for mirror in data: + code = mirror.get('countryCode') + if code and code not in valid_country_codes: + passed = False + msg = _( + '{path}: "{code}" is not a valid ISO_3166-1 alpha-2 country code!' + ).format(path=str(path), code=code) + if code.upper() in valid_country_codes: + m = [code.upper()] + else: + m = difflib.get_close_matches( + code.upper(), valid_country_codes, 2, 0.5 + ) + if m: + msg += ' ' + msg += _('Did you mean {code}?').format(code=', '.join(sorted(m))) + print(msg) + elif path.name == config_name and path.parent.name != 'config': + valid_keys = set(tuple(common.default_config) + bool_keys + check_config_keys) + for key in ignore_config_keys: + if key in valid_keys: + valid_keys.remove(key) + for key in data: + if key not in valid_keys: + passed = False + msg = _("ERROR: {key} not a valid key!").format(key=key) + m = difflib.get_close_matches(key.lower(), valid_keys, 2, 0.5) + if m: + msg += ' ' + msg += _('Did you mean {code}?').format(code=', '.join(sorted(m))) + print(msg) + continue + + if key in bool_keys: + t = bool + else: + t = type(common.default_config.get(key, "")) + + show_error = False + if t is str: + if type(data[key]) not in (str, list, dict): + passed = False + show_error = True + elif type(data[key]) != t: + passed = False + show_error = True + if show_error: + print( + _("ERROR: {key}'s value should be of type {t}!").format( + key=key, t=t.__name__ + ) + ) + elif path.name in (config_name, categories_name, antifeatures_name): + for key in data: + if path.name == config_name and key not in ('archive', 'repo'): + passed = False + print( + _('ERROR: {key} in {path} is not "archive" or "repo"!').format( + key=key, path=path + ) + ) + allowed_keys = ['name'] + if path.name in [config_name, antifeatures_name]: + allowed_keys.append('description') + # only for source strings currently + if path.parent.name == 'config': + allowed_keys.append('icon') + for subkey in data[key]: + if subkey not in allowed_keys: + passed = False + print( + _( + 'ERROR: {key}:{subkey} in {path} is not in allowed keys: {allowed_keys}!' + ).format( + key=key, + subkey=subkey, + path=path, + allowed_keys=', '.join(allowed_keys), + ) + ) + + return passed + def main(): - - global config, options, appid + global config # Parse command line... - parser = OptionParser(usage="Usage: %prog [options] [APPID [APPID ...]]") - parser.add_option("-v", "--verbose", action="store_true", default=False, - help="Spew out even more information than normal") - (options, args) = parser.parse_args() + parser = ArgumentParser() + common.setup_global_opts(parser) + parser.add_argument( + "-f", + "--format", + action="store_true", + default=False, + help=_("Also warn about formatting issues, like rewritemeta -l"), + ) + parser.add_argument( + '--force-yamllint', + action="store_true", + default=False, + help=_( + "When linting the entire repository yamllint is disabled by default. " + "This option forces yamllint regardless." + ), + ) + parser.add_argument( + "appid", nargs='*', help=_("application ID of file to operate on") + ) + metadata.add_metadata_arguments(parser) + options = common.parse_args(parser) + metadata.warnings_action = options.W - config = common.read_config(options) + config = common.read_config() + load_antiFeatures_config() + load_categories_config() - # Get all apps... - allapps = metadata.read_metadata(xref=False) - apps = common.read_app_args(args, allapps, False) + if options.force_yamllint: + import yamllint # throw error if it is not installed - for app in apps: - appid = app['id'] - lastcommit = '' + yamllint # make pyflakes ignore this - for build in app['builds']: - if 'commit' in build and 'disable' not in build: - lastcommit = build['commit'] + paths = list() + for arg in options.appid: + if ( + arg == common.CONFIG_FILE + or Path(arg).parent.name == 'config' + or Path(arg).parent.parent.name == 'config' # localized + ): + paths.append(arg) - if (app['Update Check Mode'] == 'RepoManifest' and - any(s in lastcommit for s in ('.', ',', '_', '-', '/'))): - warn("Last used commit '%s' looks like a tag, but Update Check Mode is RepoManifest" % lastcommit) + failed = 0 + if paths: + for path in paths: + options.appid.remove(path) + if not lint_config(path): + failed += 1 + # an empty list of appids means check all apps, avoid that if files were given + if not options.appid: + sys.exit(failed) - summ_chars = len(app['Summary']) - if summ_chars > config['char_limits']['Summary']: - warn("Summary of length %s is over the %i char limit" % ( - summ_chars, config['char_limits']['Summary'])) + if not lint_metadata(options): + failed += 1 - if app['Summary']: - lastchar = app['Summary'][-1] - if any(lastchar==c for c in ['.', ',', '!', '?']): - warn("Summary should not end with a %s" % lastchar) + if failed: + sys.exit(failed) - desc_chars = 0 - for line in app['Description']: - desc_chars += len(line) - if desc_chars > config['char_limits']['Description']: - warn("Description of length %s is over the %i char limit" % ( - desc_chars, config['char_limits']['Description'])) - if not appid: - print +def lint_metadata(options): + apps = common.read_app_args(options.appid) - print "Finished." + anywarns = check_for_unsupported_metadata_files() + + apps_check_funcs = [] + if not options.appid: + # otherwise it finds tons of unused extlibs + apps_check_funcs.append(check_extlib_dir) + for check_func in apps_check_funcs: + for warn in check_func(apps.values()): + anywarns = True + print(warn) + + for appid, app in apps.items(): + if app.Disabled: + continue + + # only run yamllint when linting individual apps. + if options.appid or options.force_yamllint: + # run yamllint on app metadata + ymlpath = Path('metadata') / (appid + '.yml') + if ymlpath.is_file(): + yamllintresult = common.run_yamllint(ymlpath) + if yamllintresult: + print(yamllintresult) + + # run yamllint on srclib metadata + srclibs = set() + for build in app.get('Builds', []): + for srclib in build.srclibs: + name, _ref, _number, _subdir = common.parse_srclib_spec(srclib) + srclibs.add(name + '.yml') + for srclib in srclibs: + srclibpath = Path('srclibs') / srclib + if srclibpath.is_file(): + if platform.system() == 'Windows': + # Handle symlink on Windows + symlink = srclibpath.read_text() + if symlink in srclibs: + continue + elif (srclibpath.parent / symlink).is_file(): + srclibpath = srclibpath.parent / symlink + yamllintresult = common.run_yamllint(srclibpath) + if yamllintresult: + print(yamllintresult) + + app_check_funcs = [ + check_app_field_types, + check_antiFeatures, + check_regexes, + check_update_check_data_url, + check_update_check_data_int, + check_vercode_operation, + check_ucm_tags, + check_char_limits, + check_old_links, + check_checkupdates_ran, + check_useless_fields, + check_empty_fields, + check_categories, + check_duplicates, + check_mediawiki_links, + check_builds, + check_files_dir, + check_format, + check_license_tag, + check_current_version_code, + check_updates_expected, + check_updates_ucm_http_aum_pattern, + check_certificate_pinned_binaries, + ] + + for check_func in app_check_funcs: + for warn in check_func(app): + anywarns = True + print("%s: %s" % (appid, warn)) + + return not anywarns + + +# A compiled, public domain list of official SPDX license tags. generated +# using: `python3 -m spdx_license_list print --filter-fsf-or-osi` Only contains +# licenes approved by either FSF to be free/libre software or OSI to be open +# source +APPROVED_LICENSES = [ + '0BSD', + 'AAL', + 'AFL-1.1', + 'AFL-1.2', + 'AFL-2.0', + 'AFL-2.1', + 'AFL-3.0', + 'AGPL-3.0-only', + 'AGPL-3.0-or-later', + 'APL-1.0', + 'APSL-1.0', + 'APSL-1.1', + 'APSL-1.2', + 'APSL-2.0', + 'Apache-1.0', + 'Apache-1.1', + 'Apache-2.0', + 'Artistic-1.0', + 'Artistic-1.0-Perl', + 'Artistic-1.0-cl8', + 'Artistic-2.0', + 'BSD-1-Clause', + 'BSD-2-Clause', + 'BSD-2-Clause-Patent', + 'BSD-3-Clause', + 'BSD-3-Clause-Clear', + 'BSD-3-Clause-LBNL', + 'BSD-4-Clause', + 'BSL-1.0', + 'BitTorrent-1.1', + 'CAL-1.0', + 'CAL-1.0-Combined-Work-Exception', + 'CATOSL-1.1', + 'CC-BY-4.0', + 'CC-BY-SA-4.0', + 'CC0-1.0', + 'CDDL-1.0', + 'CECILL-2.0', + 'CECILL-2.1', + 'CECILL-B', + 'CECILL-C', + 'CNRI-Python', + 'CPAL-1.0', + 'CPL-1.0', + 'CUA-OPL-1.0', + 'ClArtistic', + 'Condor-1.1', + 'ECL-1.0', + 'ECL-2.0', + 'EFL-1.0', + 'EFL-2.0', + 'EPL-1.0', + 'EPL-2.0', + 'EUDatagrid', + 'EUPL-1.1', + 'EUPL-1.2', + 'Entessa', + 'FSFAP', + 'FTL', + 'Fair', + 'Frameworx-1.0', + 'GFDL-1.1-only', + 'GFDL-1.1-or-later', + 'GFDL-1.2-only', + 'GFDL-1.2-or-later', + 'GFDL-1.3-only', + 'GFDL-1.3-or-later', + 'GPL-2.0-only', + 'GPL-2.0-or-later', + 'GPL-3.0-only', + 'GPL-3.0-or-later', + 'HPND', + 'IJG', + 'IPA', + 'IPL-1.0', + 'ISC', + 'Imlib2', + 'Intel', + 'LGPL-2.0-only', + 'LGPL-2.0-or-later', + 'LGPL-2.1-only', + 'LGPL-2.1-or-later', + 'LGPL-3.0-only', + 'LGPL-3.0-or-later', + 'LPL-1.0', + 'LPL-1.02', + 'LPPL-1.2', + 'LPPL-1.3a', + 'LPPL-1.3c', + 'LiLiQ-P-1.1', + 'LiLiQ-R-1.1', + 'LiLiQ-Rplus-1.1', + 'MIT', + 'MIT-0', + 'MPL-1.0', + 'MPL-1.1', + 'MPL-2.0', + 'MPL-2.0-no-copyleft-exception', + 'MS-PL', + 'MS-RL', + 'MirOS', + 'Motosoto', + 'MulanPSL-2.0', + 'Multics', + 'NASA-1.3', + 'NCSA', + 'NGPL', + 'NOSL', + 'NPL-1.0', + 'NPL-1.1', + 'NPOSL-3.0', + 'NTP', + 'Naumen', + 'Nokia', + 'OCLC-2.0', + 'ODbL-1.0', + 'OFL-1.0', + 'OFL-1.1', + 'OFL-1.1-RFN', + 'OFL-1.1-no-RFN', + 'OGTSL', + 'OLDAP-2.3', + 'OLDAP-2.7', + 'OLDAP-2.8', + 'OSET-PL-2.1', + 'OSL-1.0', + 'OSL-1.1', + 'OSL-2.0', + 'OSL-2.1', + 'OSL-3.0', + 'OpenSSL', + 'PHP-3.0', + 'PHP-3.01', + 'PostgreSQL', + 'Python-2.0', + 'QPL-1.0', + 'RPL-1.1', + 'RPL-1.5', + 'RPSL-1.0', + 'RSCPL', + 'Ruby', + 'SGI-B-2.0', + 'SISSL', + 'SMLNJ', + 'SPL-1.0', + 'SimPL-2.0', + 'Sleepycat', + 'UCL-1.0', + 'UPL-1.0', + 'Unicode-DFS-2016', + 'Unlicense', + 'VSL-1.0', + 'Vim', + 'W3C', + 'WTFPL', + 'Watcom-1.0', + 'X11', + 'XFree86-1.1', + 'Xnet', + 'YPL-1.1', + 'ZPL-2.0', + 'ZPL-2.1', + 'Zend-2.0', + 'Zimbra-1.3', + 'Zlib', + 'gnuplot', + 'iMatix', + 'xinetd', +] + +# an F-Droid addition, until we can enforce a better option +APPROVED_LICENSES.append("PublicDomain") if __name__ == "__main__": main() - diff --git a/fdroidserver/looseversion.py b/fdroidserver/looseversion.py new file mode 100644 index 00000000..c2a32213 --- /dev/null +++ b/fdroidserver/looseversion.py @@ -0,0 +1,300 @@ +# PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +# -------------------------------------------- +# +# 1. This LICENSE AGREEMENT is between the Python Software Foundation +# ("PSF"), and the Individual or Organization ("Licensee") accessing and +# otherwise using this software ("Python") in source or binary form and +# its associated documentation. +# +# 2. Subject to the terms and conditions of this License Agreement, PSF hereby +# grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +# analyze, test, perform and/or display publicly, prepare derivative works, +# distribute, and otherwise use Python alone or in any derivative version, +# provided, however, that PSF's License Agreement and PSF's notice of copyright, +# i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +# 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022 Python Software Foundation; +# All Rights Reserved" are retained in Python alone or in any derivative version +# prepared by Licensee. +# +# 3. In the event Licensee prepares a derivative work that is based on +# or incorporates Python or any part thereof, and wants to make +# the derivative work available to others as provided herein, then +# Licensee hereby agrees to include in any such work a brief summary of +# the changes made to Python. +# +# 4. PSF is making Python available to Licensee on an "AS IS" +# basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +# IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +# DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +# FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +# INFRINGE ANY THIRD PARTY RIGHTS. +# +# 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +# FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +# A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +# OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. +# +# 6. This License Agreement will automatically terminate upon a material +# breach of its terms and conditions. +# +# 7. Nothing in this License Agreement shall be deemed to create any +# relationship of agency, partnership, or joint venture between PSF and +# Licensee. This License Agreement does not grant permission to use PSF +# trademarks or trade name in a trademark sense to endorse or promote +# products or services of Licensee, or any third party. +# +# 8. By copying, installing or otherwise using Python, Licensee +# agrees to be bound by the terms and conditions of this License +# Agreement. +# +# SPDX-License-Identifier: Python-2.0 +# +# downloaded from: +# https://github.com/effigies/looseversion/blob/e1a5a176a92dc6825deda4205c1be6d05e9ed352/src/looseversion/__init__.py + +"""Provides classes to represent module version numbers (one class for +each style of version numbering). There are currently two such classes +implemented: StrictVersion and LooseVersion. + +Every version number class implements the following interface: + * the 'parse' method takes a string and parses it to some internal + representation; if the string is an invalid version number, + 'parse' raises a ValueError exception + * the class constructor takes an optional string argument which, + if supplied, is passed to 'parse' + * __str__ reconstructs the string that was passed to 'parse' (or + an equivalent string -- ie. one that will generate an equivalent + version number instance) + * __repr__ generates Python code to recreate the version number instance + * _cmp compares the current instance with either another instance + of the same class or a string (which will be parsed to an instance + of the same class, thus must follow the same rules) +""" +import re +import sys + +__license__ = "Python License 2.0" + +# The rules according to Greg Stein: +# 1) a version number has 1 or more numbers separated by a period or by +# sequences of letters. If only periods, then these are compared +# left-to-right to determine an ordering. +# 2) sequences of letters are part of the tuple for comparison and are +# compared lexicographically +# 3) recognize the numeric components may have leading zeroes +# +# The LooseVersion class below implements these rules: a version number +# string is split up into a tuple of integer and string components, and +# comparison is a simple tuple comparison. This means that version +# numbers behave in a predictable and obvious way, but a way that might +# not necessarily be how people *want* version numbers to behave. There +# wouldn't be a problem if people could stick to purely numeric version +# numbers: just split on period and compare the numbers as tuples. +# However, people insist on putting letters into their version numbers; +# the most common purpose seems to be: +# - indicating a "pre-release" version +# ('alpha', 'beta', 'a', 'b', 'pre', 'p') +# - indicating a post-release patch ('p', 'pl', 'patch') +# but of course this can't cover all version number schemes, and there's +# no way to know what a programmer means without asking him. +# +# The problem is what to do with letters (and other non-numeric +# characters) in a version number. The current implementation does the +# obvious and predictable thing: keep them as strings and compare +# lexically within a tuple comparison. This has the desired effect if +# an appended letter sequence implies something "post-release": +# eg. "0.99" < "0.99pl14" < "1.0", and "5.001" < "5.001m" < "5.002". +# +# However, if letters in a version number imply a pre-release version, +# the "obvious" thing isn't correct. Eg. you would expect that +# "1.5.1" < "1.5.2a2" < "1.5.2", but under the tuple/lexical comparison +# implemented here, this just isn't so. +# +# Two possible solutions come to mind. The first is to tie the +# comparison algorithm to a particular set of semantic rules, as has +# been done in the StrictVersion class above. This works great as long +# as everyone can go along with bondage and discipline. Hopefully a +# (large) subset of Python module programmers will agree that the +# particular flavor of bondage and discipline provided by StrictVersion +# provides enough benefit to be worth using, and will submit their +# version numbering scheme to its domination. The free-thinking +# anarchists in the lot will never give in, though, and something needs +# to be done to accommodate them. +# +# Perhaps a "moderately strict" version class could be implemented that +# lets almost anything slide (syntactically), and makes some heuristic +# assumptions about non-digits in version number strings. This could +# sink into special-case-hell, though; if I was as talented and +# idiosyncratic as Larry Wall, I'd go ahead and implement a class that +# somehow knows that "1.2.1" < "1.2.2a2" < "1.2.2" < "1.2.2pl3", and is +# just as happy dealing with things like "2g6" and "1.13++". I don't +# think I'm smart enough to do it right though. +# +# In any case, I've coded the test suite for this module (see +# ../test/test_version.py) specifically to fail on things like comparing +# "1.2a2" and "1.2". That's not because the *code* is doing anything +# wrong, it's because the simple, obvious design doesn't match my +# complicated, hairy expectations for real-world version numbers. It +# would be a snap to fix the test suite to say, "Yep, LooseVersion does +# the Right Thing" (ie. the code matches the conception). But I'd rather +# have a conception that matches common notions about version numbers. + + +if sys.version_info >= (3,): + + class _Py2Int(int): + """Integer object that compares < any string""" + + def __gt__(self, other): + if isinstance(other, str): + return False + return super().__gt__(other) + + def __lt__(self, other): + if isinstance(other, str): + return True + return super().__lt__(other) + +else: + _Py2Int = int + + +class LooseVersion(object): + """Version numbering for anarchists and software realists. + Implements the standard interface for version number classes as + described above. A version number consists of a series of numbers, + separated by either periods or strings of letters. When comparing + version numbers, the numeric components will be compared + numerically, and the alphabetic components lexically. The following + are all valid version numbers, in no particular order: + + 1.5.1 + 1.5.2b2 + 161 + 3.10a + 8.02 + 3.4j + 1996.07.12 + 3.2.pl0 + 3.1.1.6 + 2g6 + 11g + 0.960923 + 2.2beta29 + 1.13++ + 5.5.kw + 2.0b1pl0 + + In fact, there is no such thing as an invalid version number under + this scheme; the rules for comparison are simple and predictable, + but may not always give the results you want (for some definition + of "want"). + """ + + component_re = re.compile(r"(\d+ | [a-z]+ | \.)", re.VERBOSE) + + def __init__(self, vstring=None): + if vstring: + self.parse(vstring) + + def __eq__(self, other): + c = self._cmp(other) + if c is NotImplemented: + return NotImplemented + return c == 0 + + def __lt__(self, other): + c = self._cmp(other) + if c is NotImplemented: + return NotImplemented + return c < 0 + + def __le__(self, other): + c = self._cmp(other) + if c is NotImplemented: + return NotImplemented + return c <= 0 + + def __gt__(self, other): + c = self._cmp(other) + if c is NotImplemented: + return NotImplemented + return c > 0 + + def __ge__(self, other): + c = self._cmp(other) + if c is NotImplemented: + return NotImplemented + return c >= 0 + + def parse(self, vstring): + # I've given up on thinking I can reconstruct the version string + # from the parsed tuple -- so I just store the string here for + # use by __str__ + self.vstring = vstring + components = [x for x in self.component_re.split(vstring) if x and x != "."] + for i, obj in enumerate(components): + try: + components[i] = int(obj) + except ValueError: + pass + + self.version = components + + def __str__(self): + return self.vstring + + def __repr__(self): + return "LooseVersion ('%s')" % str(self) + + def _cmp(self, other): + other = self._coerce(other) + if other is NotImplemented: + return NotImplemented + + if self.version == other.version: + return 0 + if self.version < other.version: + return -1 + if self.version > other.version: + return 1 + return NotImplemented + + @classmethod + def _coerce(cls, other): + if isinstance(other, cls): + return other + elif isinstance(other, str): + return cls(other) + elif "distutils" in sys.modules: + # Using this check to avoid importing distutils and suppressing the warning + try: + from distutils.version import LooseVersion as deprecated + except ImportError: + return NotImplemented + if isinstance(other, deprecated): + return cls(str(other)) + return NotImplemented + + +class LooseVersion2(LooseVersion): + """LooseVersion variant that restores Python 2 semantics + + In Python 2, comparing LooseVersions where paired components could be string + and int always resulted in the string being "greater". In Python 3, this produced + a TypeError. + """ + + def parse(self, vstring): + # I've given up on thinking I can reconstruct the version string + # from the parsed tuple -- so I just store the string here for + # use by __str__ + self.vstring = vstring + components = [x for x in self.component_re.split(vstring) if x and x != "."] + for i, obj in enumerate(components): + try: + components[i] = _Py2Int(obj) + except ValueError: + pass + + self.version = components diff --git a/fdroidserver/metadata.py b/fdroidserver/metadata.py index 552a338e..0d9195be 100644 --- a/fdroidserver/metadata.py +++ b/fdroidserver/metadata.py @@ -1,8 +1,9 @@ -# -*- coding: utf-8 -*- +#!/usr/bin/env python3 # -# common.py - part of the FDroid server tools +# metadata.py - part of the FDroid server tools # Copyright (C) 2013, Ciaran Gultnieks, ciaran@ciarang.com -# Copyright (C) 2013 Daniel Martí +# Copyright (C) 2013-2014 Daniel Martí +# Copyright (C) 2017-2018 Michael Pöhn # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -17,739 +18,1267 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import os, re, glob -import cgi +import logging +import math +import os +import platform +import re +from collections import OrderedDict +from pathlib import Path -class MetaDataException(Exception): - def __init__(self, value): - self.value = value +import ruamel.yaml - def __str__(self): - return repr(self.value) +from . import _, common +from ._yaml import yaml +from .exception import MetaDataException -# Designates a metadata field type and checks that it matches -# -# 'name' - The long name of the field type -# 'matching' - List of possible values or regex expression -# 'sep' - Separator to use if value may be a list -# 'fields' - Metadata fields (Field:Value) of this type -# 'attrs' - Build attributes (attr=value) of this type -# -class FieldType(): - def __init__(self, name, matching, sep, fields, attrs): +srclibs = None +warnings_action = None + +# validates usernames based on a loose collection of rules from GitHub, GitLab, +# Liberapay and issuehunt. This is mostly to block abuse. +VALID_USERNAME_REGEX = re.compile(r'^[a-z\d](?:[a-z\d/._-]){0,38}$', re.IGNORECASE) + + +def _warn_or_exception(value, cause=None): + """Output warning or Exception depending on -W.""" + if warnings_action == 'ignore': + pass + elif warnings_action == 'error': + if cause: + raise MetaDataException(value) from cause + else: + raise MetaDataException(value) + else: + logging.warning(value) + + +yaml_app_field_order = [ + 'Disabled', + 'AntiFeatures', + 'Categories', + 'License', + 'AuthorName', + 'AuthorEmail', + 'AuthorWebSite', + 'WebSite', + 'SourceCode', + 'IssueTracker', + 'Translation', + 'Changelog', + 'Donate', + 'Liberapay', + 'OpenCollective', + 'Bitcoin', + 'Litecoin', + '\n', + 'Name', + 'AutoName', + 'Summary', + 'Description', + '\n', + 'RequiresRoot', + '\n', + 'RepoType', + 'Repo', + 'Binaries', + '\n', + 'Builds', + '\n', + 'AllowedAPKSigningKeys', + '\n', + 'MaintainerNotes', + '\n', + 'ArchivePolicy', + 'AutoUpdateMode', + 'UpdateCheckMode', + 'UpdateCheckIgnore', + 'VercodeOperation', + 'UpdateCheckName', + 'UpdateCheckData', + 'CurrentVersion', + 'CurrentVersionCode', + '\n', + 'NoSourceSince', +] + + +yaml_app_fields = [x for x in yaml_app_field_order if x != '\n'] + + +class App(dict): + def __init__(self, copydict=None): + if copydict: + super().__init__(copydict) + return + super().__init__() + + self.Disabled = None + self.AntiFeatures = dict() + self.Provides = None + self.Categories = [] + self.License = 'Unknown' + self.AuthorName = None + self.AuthorEmail = None + self.AuthorWebSite = None + self.WebSite = '' + self.SourceCode = '' + self.IssueTracker = '' + self.Translation = '' + self.Changelog = '' + self.Donate = None + self.Liberapay = None + self.OpenCollective = None + self.Bitcoin = None + self.Litecoin = None + self.Name = None + self.AutoName = '' + self.Summary = '' + self.Description = '' + self.RequiresRoot = False + self.RepoType = '' + self.Repo = '' + self.Binaries = None + self.AllowedAPKSigningKeys = [] + self.MaintainerNotes = '' + self.ArchivePolicy = None + self.AutoUpdateMode = 'None' + self.UpdateCheckMode = 'None' + self.UpdateCheckIgnore = None + self.VercodeOperation = [] + self.UpdateCheckName = None + self.UpdateCheckData = None + self.CurrentVersion = '' + self.CurrentVersionCode = None + self.NoSourceSince = '' + + self.id = None + self.metadatapath = None + self.Builds = [] + self.added = None + self.lastUpdated = None + + def __getattr__(self, name): + if name in self: + return self[name] + else: + raise AttributeError("No such attribute: " + name) + + def __setattr__(self, name, value): + self[name] = value + + def __delattr__(self, name): + if name in self: + del self[name] + else: + raise AttributeError("No such attribute: " + name) + + +TYPE_STRING = 2 +TYPE_BOOL = 3 +TYPE_LIST = 4 +TYPE_SCRIPT = 5 +TYPE_MULTILINE = 6 +TYPE_BUILD = 7 +TYPE_INT = 8 +TYPE_STRINGMAP = 9 + +fieldtypes = { + 'Description': TYPE_MULTILINE, + 'MaintainerNotes': TYPE_MULTILINE, + 'Categories': TYPE_LIST, + 'AntiFeatures': TYPE_STRINGMAP, + 'RequiresRoot': TYPE_BOOL, + 'AllowedAPKSigningKeys': TYPE_LIST, + 'Builds': TYPE_BUILD, + 'VercodeOperation': TYPE_LIST, + 'CurrentVersionCode': TYPE_INT, + 'ArchivePolicy': TYPE_INT, +} + + +def fieldtype(name): + name = name.replace(' ', '') + if name in fieldtypes: + return fieldtypes[name] + return TYPE_STRING + + +# In the order in which they are laid out on files +build_flags = [ + 'versionName', + 'versionCode', + 'disable', + 'commit', + 'timeout', + 'subdir', + 'submodules', + 'sudo', + 'init', + 'patch', + 'gradle', + 'maven', + 'output', + 'binary', + 'srclibs', + 'oldsdkloc', + 'encoding', + 'forceversion', + 'forcevercode', + 'rm', + 'extlibs', + 'prebuild', + 'androidupdate', + 'target', + 'scanignore', + 'scandelete', + 'build', + 'buildjni', + 'ndk', + 'preassemble', + 'gradleprops', + 'antcommands', + 'postbuild', + 'novcheck', + 'antifeatures', +] + + +class Build(dict): + def __init__(self, copydict=None): + super().__init__() + self.disable = '' + self.commit = None + self.timeout = None + self.subdir = None + self.submodules = False + self.sudo = '' + self.init = '' + self.patch = [] + self.gradle = [] + self.maven = None + self.output = None + self.binary = None + self.srclibs = [] + self.oldsdkloc = False + self.encoding = None + self.forceversion = False + self.forcevercode = False + self.rm = [] + self.extlibs = [] + self.prebuild = '' + self.androidupdate = [] + self.target = None + self.scanignore = [] + self.scandelete = [] + self.build = '' + self.buildjni = [] + self.ndk = None + self.preassemble = [] + self.gradleprops = [] + self.antcommands = [] + self.postbuild = '' + self.novcheck = False + self.antifeatures = dict() + if copydict: + super().__init__(copydict) + return + + def __getattr__(self, name): + if name in self: + return self[name] + else: + raise AttributeError("No such attribute: " + name) + + def __setattr__(self, name, value): + self[name] = value + + def __delattr__(self, name): + if name in self: + del self[name] + else: + raise AttributeError("No such attribute: " + name) + + @classmethod + def to_yaml(cls, representer, node): + return representer.represent_dict(node) + + def build_method(self): + for f in ['maven', 'gradle']: + if self.get(f): + return f + if self.output: + return 'raw' + return 'ant' + + # like build_method, but prioritize output= + def output_method(self): + if self.output: + return 'raw' + for f in ['maven', 'gradle']: + if self.get(f): + return f + return 'ant' + + def ndk_path(self) -> str: + """Return the path string of the first configured NDK or an empty string.""" + ndk = self.ndk + if isinstance(ndk, list): + ndk = self.ndk[0] + path = common.config['ndk_paths'].get(ndk) + if path and not isinstance(path, str): + raise TypeError('NDK path is not string') + if path: + return path + for vsn, path in common.config['ndk_paths'].items(): + if not vsn.endswith("_orig") and path and os.path.basename(path) == ndk: + return path + return '' + + +flagtypes = { + 'versionCode': TYPE_INT, + 'extlibs': TYPE_LIST, + 'srclibs': TYPE_LIST, + 'patch': TYPE_LIST, + 'rm': TYPE_LIST, + 'buildjni': TYPE_LIST, + 'preassemble': TYPE_LIST, + 'androidupdate': TYPE_LIST, + 'scanignore': TYPE_LIST, + 'scandelete': TYPE_LIST, + 'gradle': TYPE_LIST, + 'antcommands': TYPE_LIST, + 'gradleprops': TYPE_LIST, + 'sudo': TYPE_SCRIPT, + 'init': TYPE_SCRIPT, + 'prebuild': TYPE_SCRIPT, + 'build': TYPE_SCRIPT, + 'postbuild': TYPE_SCRIPT, + 'submodules': TYPE_BOOL, + 'oldsdkloc': TYPE_BOOL, + 'forceversion': TYPE_BOOL, + 'forcevercode': TYPE_BOOL, + 'novcheck': TYPE_BOOL, + 'antifeatures': TYPE_STRINGMAP, + 'timeout': TYPE_INT, +} + + +def flagtype(name): + if name in flagtypes: + return flagtypes[name] + return TYPE_STRING + + +class FieldValidator: + """Designate App metadata field types and checks that it matches. + + 'name' - The long name of the field type + 'matching' - List of possible values or regex expression + 'sep' - Separator to use if value may be a list + 'fields' - Metadata fields (Field:Value) of this type + """ + + def __init__(self, name, matching, fields): self.name = name self.matching = matching - if type(matching) is str: - self.compiled = re.compile(matching) - self.sep = sep + self.compiled = re.compile(matching) self.fields = fields - self.attrs = attrs - def _assert_regex(self, values, appid): + def check(self, v, appid): + if not v: + return + if type(v) == list: + values = v + else: + values = [v] for v in values: if not self.compiled.match(v): - raise MetaDataException("'%s' is not a valid %s in %s. " - % (v, self.name, appid) + - "Regex pattern: %s" % (self.matching)) - - def _assert_list(self, values, appid): - for v in values: - if v not in self.matching: - raise MetaDataException("'%s' is not a valid %s in %s. " - % (v, self.name, appid) + - "Possible values: %s" % (", ".join(self.matching))) - - def check(self, value, appid): - if type(value) is not str or not value: - return - if self.sep is not None: - values = value.split(self.sep) - else: - values = [value] - if type(self.matching) is list: - self._assert_list(values, appid) - else: - self._assert_regex(values, appid) + _warn_or_exception( + _( + "'{value}' is not a valid {field} in {appid}. Regex pattern: {pattern}" + ).format( + value=v, field=self.name, appid=appid, pattern=self.matching + ) + ) # Generic value types valuetypes = { - 'int' : FieldType("Integer", - r'^[1-9][0-9]*$', None, - [ 'FlattrID' ], - [ 'vercode' ]), + FieldValidator("Liberapay", + VALID_USERNAME_REGEX, + ['Liberapay']), - 'http' : FieldType("HTTP link", - r'^http[s]?://', None, - [ "Web Site", "Source Code", "Issue Tracker", "Donate" ], []), + FieldValidator("Open Collective", + VALID_USERNAME_REGEX, + ['OpenCollective']), - 'bitcoin' : FieldType("Bitcoin address", - r'^[a-zA-Z0-9]{27,34}$', None, - [ "Bitcoin" ], - [ ]), + FieldValidator("HTTP link", + r'^http[s]?://', + ["WebSite", "SourceCode", "IssueTracker", "Translation", "Changelog", "Donate"]), - 'litecoin' : FieldType("Litecoin address", - r'^L[a-zA-Z0-9]{33}$', None, - [ "Litecoin" ], - [ ]), + FieldValidator("Email", + r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$', + ["AuthorEmail"]), - 'dogecoin' : FieldType("Dogecoin address", - r'^D[a-zA-Z0-9]{33}$', None, - [ "Dogecoin" ], - [ ]), + FieldValidator("Bitcoin address", + r'^(bc1|[13])[a-zA-HJ-NP-Z0-9]{25,39}$', + ["Bitcoin"]), - 'Bool' : FieldType("Boolean", - ['Yes', 'No'], None, - [ "Requires Root" ], - [ ]), + FieldValidator("Litecoin address", + r'^([LM3][a-km-zA-HJ-NP-Z1-9]{26,33}|ltc1[a-z0-9]{39})$', + ["Litecoin"]), - 'bool' : FieldType("Boolean", - ['yes', 'no'], None, - [ ], - [ 'submodules', 'oldsdkloc', 'forceversion', 'forcevercode', - 'fixtrans', 'fixapos', 'novcheck' ]), + FieldValidator("Repo Type", + r'^(git|git-svn|svn|hg|bzr|srclib)$', + ["RepoType"]), - 'Repo Type' : FieldType("Repo Type", - [ 'git', 'git-svn', 'svn', 'hg', 'bzr', 'srclib' ], None, - [ "Repo Type" ], - [ ]), + FieldValidator("Binaries", + r'^http[s]?://', + ["Binaries"]), - 'archive' : FieldType("Archive Policy", - r'^[0-9]+ versions$', None, - [ "Archive Policy" ], - [ ]), + FieldValidator("AllowedAPKSigningKeys", + r'^[a-fA-F0-9]{64}$', + ["AllowedAPKSigningKeys"]), - 'antifeatures' : FieldType("Anti-Feature", - [ "Ads", "Tracking", "NonFreeNet", "NonFreeDep", "NonFreeAdd", "UpstreamNonFree" ], ',', - [ "AntiFeatures" ], - [ ]), + FieldValidator("Auto Update Mode", + r"^(Version.*|None)$", + ["AutoUpdateMode"]), - 'autoupdatemodes' : FieldType("Auto Update Mode", - r"^(Version .+|None)$", None, - [ "Auto Update Mode" ], - [ ]), - - 'updatecheckmodes' : FieldType("Update Check Mode", - r"^(Tags|RepoManifest|RepoManifest/.+|RepoTrunk|HTTP|Static|None)$", None, - [ "Update Check Mode" ], - [ ]) + FieldValidator("Update Check Mode", + r"^(Tags|Tags .+|RepoManifest|RepoManifest/.+|HTTP|Static|None)$", + ["UpdateCheckMode"]) } + # Check an app's metadata information for integrity errors -def check_metadata(info): - for k, t in valuetypes.iteritems(): - for field in t.fields: - if field in info: - t.check(info[field], info['id']) - if k == 'Bool': - info[field] = info[field] == "Yes" - for build in info['builds']: - for attr in t.attrs: - if attr in build: - t.check(build[attr], info['id']) - if k == 'bool': - build[attr] = build[attr] == "yes" - elif k == 'bool': - build[attr] = False - -# Formatter for descriptions. Create an instance, and call parseline() with -# each line of the description source from the metadata. At the end, call -# end() and then text_plain, text_wiki and text_html will contain the result. -class DescriptionFormatter: - stNONE = 0 - stPARA = 1 - stUL = 2 - stOL = 3 - bold = False - ital = False - state = stNONE - text_plain = '' - text_wiki = '' - text_html = '' - linkResolver = None - def __init__(self, linkres): - self.linkResolver = linkres - def endcur(self, notstates=None): - if notstates and self.state in notstates: - return - if self.state == self.stPARA: - self.endpara() - elif self.state == self.stUL: - self.endul() - elif self.state == self.stOL: - self.endol() - def endpara(self): - self.text_plain += '\n' - self.text_html += '

' - self.state = self.stNONE - def endul(self): - self.text_html += '' - self.state = self.stNONE - def endol(self): - self.text_html += '' - self.state = self.stNONE - - def formatted(self, txt, html): - formatted = '' - if html: - txt = cgi.escape(txt) - while True: - index = txt.find("''") - if index == -1: - return formatted + txt - formatted += txt[:index] - txt = txt[index:] - if txt.startswith("'''"): - if html: - if self.bold: - formatted += '' - else: - formatted += '' - self.bold = not self.bold - txt = txt[3:] - else: - if html: - if self.ital: - formatted += '' - else: - formatted += '' - self.ital = not self.ital - txt = txt[2:] +def check_metadata(app): + for v in valuetypes: + for k in v.fields: + v.check(app[k], app.id) - def linkify(self, txt): - linkified_plain = '' - linkified_html = '' - while True: - index = txt.find("[") - if index == -1: - return (linkified_plain + self.formatted(txt, False), linkified_html + self.formatted(txt, True)) - linkified_plain += self.formatted(txt[:index], False) - linkified_html += self.formatted(txt[:index], True) - txt = txt[index:] - if txt.startswith("[["): - index = txt.find("]]") - if index == -1: - raise MetaDataException("Unterminated ]]") - url = txt[2:index] - if self.linkResolver: - url, urltext = self.linkResolver(url) - else: - urltext = url - linkified_html += '' + cgi.escape(urltext) + '' - linkified_plain += urltext - txt = txt[index+2:] - else: - index = txt.find("]") - if index == -1: - raise MetaDataException("Unterminated ]") - url = txt[1:index] - index2 = url.find(' ') - if index2 == -1: - urltxt = url - else: - urltxt = url[index2 + 1:] - url = url[:index2] - linkified_html += '' + cgi.escape(urltxt) + '' - linkified_plain += urltxt - if urltxt != url: - linkified_plain += ' (' + url + ')' - txt = txt[index+1:] +def parse_yaml_srclib(metadatapath): + thisinfo = {'RepoType': '', 'Repo': '', 'Subdir': None, 'Prepare': None} - def addtext(self, txt): - p, h = self.linkify(txt) - self.text_plain += p - self.text_html += h - - def parseline(self, line): - self.text_wiki += "%s\n" % line - if not line: - self.endcur() - elif line.startswith('*'): - self.endcur([self.stUL]) - if self.state != self.stUL: - self.text_html += '
    ' - self.state = self.stUL - self.text_html += '
  • ' - self.text_plain += '*' - self.addtext(line[1:]) - self.text_html += '
  • ' - elif line.startswith('#'): - self.endcur([self.stOL]) - if self.state != self.stOL: - self.text_html += '
      ' - self.state = self.stOL - self.text_html += '
    1. ' - self.text_plain += '*' #TODO: lazy - put the numbers in! - self.addtext(line[1:]) - self.text_html += '
    2. ' - else: - self.endcur([self.stPARA]) - if self.state == self.stNONE: - self.text_html += '

      ' - self.state = self.stPARA - elif self.state == self.stPARA: - self.text_html += ' ' - self.text_plain += ' ' - self.addtext(line) - - def end(self): - self.endcur() - -# Parse multiple lines of description as written in a metadata file, returning -# a single string in plain text format. -def description_plain(lines, linkres): - ps = DescriptionFormatter(linkres) - for line in lines: - ps.parseline(line) - ps.end() - return ps.text_plain - -# Parse multiple lines of description as written in a metadata file, returning -# a single string in wiki format. Used for the Maintainer Notes field as well, -# because it's the same format. -def description_wiki(lines): - ps = DescriptionFormatter(None) - for line in lines: - ps.parseline(line) - ps.end() - return ps.text_wiki - -# Parse multiple lines of description as written in a metadata file, returning -# a single string in HTML format. -def description_html(lines,linkres): - ps = DescriptionFormatter(linkres) - for line in lines: - ps.parseline(line) - ps.end() - return ps.text_html - -def parse_srclib(metafile, **kw): - - thisinfo = {} - if metafile and not isinstance(metafile, file): - metafile = open(metafile, "r") - - # Defaults for fields that come from metadata - thisinfo['Repo Type'] = '' - thisinfo['Repo'] = '' - thisinfo['Subdir'] = None - thisinfo['Prepare'] = None - thisinfo['Srclibs'] = None - thisinfo['Update Project'] = None - - if metafile is None: + if not metadatapath.exists(): + _warn_or_exception( + _("Invalid scrlib metadata: '{file}' does not exist").format( + file=metadatapath + ) + ) return thisinfo - for line in metafile: - line = line.rstrip('\r\n') - if not line or line.startswith("#"): - continue - + with metadatapath.open("r", encoding="utf-8") as f: try: - field, value = line.split(':',1) - except ValueError: - raise MetaDataException("Invalid metadata in " + metafile.name + " at: " + line) + data = yaml.load(f) + if type(data) is not dict: + if platform.system() == 'Windows': + # Handle symlink on Windows + symlink = metadatapath.parent / metadatapath.read_text(encoding='utf-8') + if symlink.is_file(): + with symlink.open("r", encoding="utf-8") as s: + data = yaml.load(s) + if type(data) is not dict: + raise ruamel.yaml.YAMLError( + _('{file} is blank or corrupt!').format(file=metadatapath) + ) + except ruamel.yaml.YAMLError as e: + _warn_or_exception(_("Invalid srclib metadata: could not " + "parse '{file}'") + .format(file=metadatapath) + '\n' + + common.run_yamllint(metadatapath, indent=4), + cause=e) + return thisinfo - if field == "Subdir": - thisinfo[field] = value.split(',') + for key in data: + if key not in thisinfo: + _warn_or_exception( + _("Invalid srclib metadata: unknown key '{key}' in '{file}'").format( + key=key, file=metadatapath + ) + ) + return thisinfo else: - thisinfo[field] = value + if key == 'Subdir': + if isinstance(data[key], str): + thisinfo[key] = data[key].split(',') + elif isinstance(data[key], list): + thisinfo[key] = data[key] + elif data[key] is None: + thisinfo[key] = [''] + elif key == 'Prepare' or flagtype(key) == TYPE_SCRIPT: + if isinstance(data[key], list): + thisinfo[key] = data[key] + else: + thisinfo[key] = [data[key]] if data[key] else [] + else: + thisinfo[key] = str(data[key] or '') return thisinfo -# Read all metadata. Returns a list of 'app' objects (which are dictionaries as -# returned by the parse_metadata function. -def read_metadata(xref=True, package=None): - apps = [] - for basedir in ('metadata', 'tmp'): - if not os.path.exists(basedir): - os.makedirs(basedir) - for metafile in sorted(glob.glob(os.path.join('metadata', '*.txt'))): - if package is None or metafile == os.path.join('metadata', package + '.txt'): - try: - appinfo = parse_metadata(metafile) - except Exception, e: - raise MetaDataException("Problem reading metadata file %s: - %s" % (metafile, str(e))) - check_metadata(appinfo) - apps.append(appinfo) - if xref: - # Parse all descriptions at load time, just to ensure cross-referencing - # errors are caught early rather than when they hit the build server. - def linkres(link): - for app in apps: - if app['id'] == link: - return ("fdroid.app:" + link, "Dummy name - don't know yet") - raise MetaDataException("Cannot resolve app id " + link) - for app in apps: - try: - description_html(app['Description'], linkres) - except Exception, e: - raise MetaDataException("Problem with description of " + app['id'] + - " - " + str(e)) +def read_srclibs(): + """Read all srclib metadata. + + The information read will be accessible as metadata.srclibs, which is a + dictionary, keyed on srclib name, with the values each being a dictionary + in the same format as that returned by the parse_yaml_srclib function. + + A MetaDataException is raised if there are any problems with the srclib + metadata. + """ + global srclibs + + # They were already loaded + if srclibs is not None: + return + + srclibs = {} + + srclibs_dir = Path('srclibs') + srclibs_dir.mkdir(exist_ok=True) + + for metadatapath in sorted(srclibs_dir.glob('*.yml')): + srclibs[metadatapath.stem] = parse_yaml_srclib(metadatapath) + + +def read_metadata(appid_to_vercode={}, sort_by_time=False): + """Return a list of App instances sorted newest first. + + This reads all of the metadata files in a 'data' repository, then + builds a list of App instances from those files. The list is + sorted based on creation time, newest first. Most of the time, + the newer files are the most interesting. + + appid_to_vercode is a dict with appids a keys and versionCodes as values. + + """ + # Always read the srclibs before the apps, since they can use a srlib as + # their source repository. + read_srclibs() + + apps = OrderedDict() + + for basedir in ('metadata', 'tmp'): + Path(basedir).mkdir(exist_ok=True) + + if appid_to_vercode: + metadatafiles = common.get_metadata_files(appid_to_vercode) + else: + metadatafiles = list(Path('metadata').glob('*.yml')) + list( + Path('.').glob('.fdroid.yml') + ) + + if sort_by_time: + entries = ((path.stat().st_mtime, path) for path in metadatafiles) + metadatafiles = [] + for _ignored, path in sorted(entries, reverse=True): + metadatafiles.append(path) + else: + # most things want the index alpha sorted for stability + metadatafiles = sorted(metadatafiles) + + for metadatapath in metadatafiles: + appid = metadatapath.stem + if appid != '.fdroid' and not common.is_valid_package_name(appid): + _warn_or_exception( + _("{appid} from {path} is not a valid Java Package Name!").format( + appid=appid, path=metadatapath + ) + ) + if appid in apps: + _warn_or_exception( + _("Found multiple metadata files for {appid}").format(appid=appid) + ) + app = parse_metadata(metadatapath) + check_metadata(app) + apps[app.id] = app return apps -# Get the type expected for a given metadata field. -def metafieldtype(name): - if name in ['Description', 'Maintainer Notes']: - return 'multiline' - if name == 'Build Version': - return 'build' - if name == 'Build': - return 'buildv2' - if name == 'Use Built': - return 'obsolete' - return 'string' -# Parse metadata for a single application. -# -# 'metafile' - the filename to read. The package id for the application comes -# from this filename. Pass None to get a blank entry. -# -# Returns a dictionary containing all the details of the application. There are -# two major kinds of information in the dictionary. Keys beginning with capital -# letters correspond directory to identically named keys in the metadata file. -# Keys beginning with lower case letters are generated in one way or another, -# and are not found verbatim in the metadata. -# -# Known keys not originating from the metadata are: -# -# 'id' - the application's package ID -# 'builds' - a list of dictionaries containing build information -# for each defined build -# 'comments' - a list of comments from the metadata file. Each is -# a tuple of the form (field, comment) where field is -# the name of the field it preceded in the metadata -# file. Where field is None, the comment goes at the -# end of the file. Alternatively, 'build:version' is -# for a comment before a particular build version. -# 'descriptionlines' - original lines of description as formatted in the -# metadata file. -# -def parse_metadata(metafile): +def parse_metadata(metadatapath): + """Parse metadata file, also checking the source repo for .fdroid.yml. - def parse_buildline(lines): - value = "".join(lines) - parts = [p.replace("\\,", ",") - for p in re.split(r"(?//antifeatures/_.txt + metadata///antifeatures/.txt + + └── metadata/ + └── / + ├── en-US/ + │ └── antifeatures/ + │ ├── 123_Ads.txt -> "includes ad lib" + │ ├── 123_Tracking.txt -> "standard suspects" + │ └── NoSourceSince.txt -> "it vanished" + │ + └── zh-CN/ + └── antifeatures/ + └── 123_Ads.txt -> "包括广告库" + + Gets parsed into the metadata data structure: + + AntiFeatures: + NoSourceSince: + en-US: it vanished + Builds: + - versionCode: 123 + antifeatures: + Ads: + en-US: includes ad lib + zh-CN: 包括广告库 + Tracking: + en-US: standard suspects + + """ + app_dir = Path('metadata', app['id']) + if not app_dir.is_dir(): + return + af_dup_msg = _('Duplicate Anti-Feature declaration at {path} was ignored!') + + if app.get('AntiFeatures'): + app_has_AntiFeatures = True + else: + app_has_AntiFeatures = False + + has_versionCode = re.compile(r'^-?[0-9]+_.*') + has_antifeatures_from_app = set() + for build in app.get('Builds', []): + antifeatures = build.get('antifeatures') + if antifeatures: + has_antifeatures_from_app.add(build['versionCode']) + + for f in sorted(app_dir.glob('*/antifeatures/*.txt')): + path = f.as_posix() + left = path.index('/', 9) # 9 is length of "metadata/" + right = path.index('/', left + 1) + locale = path[left + 1 : right] + description = f.read_text() + if has_versionCode.match(f.stem): + i = f.stem.index('_') + versionCode = int(f.stem[:i]) + antifeature = f.stem[i + 1 :] + if versionCode in has_antifeatures_from_app: + logging.error(af_dup_msg.format(path=f)) continue - if line.startswith("#"): - curcomments.append(line) + if 'Builds' not in app: + app['Builds'] = [] + found = False + for build in app['Builds']: + # loop though builds again, there might be duplicate versionCodes + if versionCode == build['versionCode']: + found = True + if 'antifeatures' not in build: + build['antifeatures'] = dict() + if antifeature not in build['antifeatures']: + build['antifeatures'][antifeature] = dict() + build['antifeatures'][antifeature][locale] = description + if not found: + app['Builds'].append( + { + 'versionCode': versionCode, + 'antifeatures': { + antifeature: {locale: description}, + }, + } + ) + elif app_has_AntiFeatures: + logging.error(af_dup_msg.format(path=f)) + continue + else: + if 'AntiFeatures' not in app: + app['AntiFeatures'] = dict() + if f.stem not in app['AntiFeatures']: + app['AntiFeatures'][f.stem] = dict() + app['AntiFeatures'][f.stem][locale] = f.read_text() + + +def _normalize_type_int(k, v): + """Normalize anything that can be reliably converted to an integer.""" + if isinstance(v, int) and not isinstance(v, bool): + return v + if v is None: + return None + if isinstance(v, str): + try: + return int(v) + except ValueError: + pass + msg = _('{build_flag} must be an integer, found: {value}') + _warn_or_exception(msg.format(build_flag=k, value=v)) + + +def _normalize_type_string(v): + """Normalize any data to TYPE_STRING. + + YAML 1.2's booleans are all lowercase. + + Things like versionName are strings, but without quotes can be + numbers. Like "versionName: 1.0" would be a YAML float, but + should be a string. + + SHA-256 values are string values, but YAML 1.2 can interpret some + unquoted values as decimal ints. This converts those to a string + if they are over 50 digits. In the wild, the longest 0 padding on + a SHA-256 key fingerprint I found was 8 zeros. + + """ + if isinstance(v, bool): + if v: + return 'true' + return 'false' + if isinstance(v, float): + # YAML 1.2 values for NaN, Inf, and -Inf + if math.isnan(v): + return '.nan' + if math.isinf(v): + if v > 0: + return '.inf' + return '-.inf' + if v and isinstance(v, int): + if math.log10(v) > 50: # only if the int has this many digits + return '%064d' % v + return str(v) + + +def _normalize_type_stringmap(k, v): + """Normalize any data to TYPE_STRINGMAP. + + The internal representation of this format is a dict of dicts, + where the outer dict's keys are things like tag names of + Anti-Features, the inner dict's keys are locales, and the ultimate + values are human readable text. + + Metadata entries like AntiFeatures: can be written in many + forms, including a simple one-entry string, a list of strings, + a dict with keys and descriptions as values, or a dict with + localization. + + Returns + ------- + A dictionary with string keys, where each value is either a string + message or a dict with locale keys and string message values. + + """ + if v is None: + return dict() + if isinstance(v, str) or isinstance(v, int) or isinstance(v, float): + return {_normalize_type_string(v): dict()} + if isinstance(v, list) or isinstance(v, tuple) or isinstance(v, set): + retdict = dict() + for i in v: + if isinstance(i, dict): + # transitional format + if len(i) != 1: + _warn_or_exception( + _( + "'{value}' is not a valid {field}, should be {pattern}" + ).format(field=k, value=v, pattern='key: value') + ) + afname = _normalize_type_string(next(iter(i))) + desc = _normalize_type_string(next(iter(i.values()))) + retdict[afname] = {common.DEFAULT_LOCALE: desc} + else: + retdict[_normalize_type_string(i)] = {} + return retdict + + retdict = dict() + for af, afdict in v.items(): + key = _normalize_type_string(af) + if afdict: + if isinstance(afdict, dict): + retdict[key] = afdict + else: + retdict[key] = {common.DEFAULT_LOCALE: _normalize_type_string(afdict)} + else: + retdict[key] = dict() + + return retdict + + +def _normalize_type_list(k, v): + """Normalize any data to TYPE_LIST, which is always a list of strings.""" + if isinstance(v, dict): + msg = _('{build_flag} must be list or string, found: {value}') + _warn_or_exception(msg.format(build_flag=k, value=v)) + elif type(v) not in (list, tuple, set): + v = [v] + return [_normalize_type_string(i) for i in v] + + +def post_parse_yaml_metadata(yamldata): + """Convert human-readable metadata data structures into consistent data structures. + + "Be conservative in what is written out, be liberal in what is parsed." + https://en.wikipedia.org/wiki/Robustness_principle + + This also handles conversions that make metadata YAML behave + something like StrictYAML. Specifically, a field should have a + fixed value type, regardless of YAML 1.2's type auto-detection. + + TODO: None values should probably be treated as the string 'null', + since YAML 1.2 uses that for nulls + + """ + for k, v in yamldata.items(): + _fieldtype = fieldtype(k) + if _fieldtype == TYPE_LIST: + if v or v == 0: + yamldata[k] = _normalize_type_list(k, v) + elif _fieldtype == TYPE_INT: + # ArchivePolicy used to require " versions" in the value. + if k == 'ArchivePolicy' and isinstance(v, str): + v = v.split(' ', maxsplit=1)[0] + v = _normalize_type_int(k, v) + if v or v == 0: + yamldata[k] = v + elif _fieldtype == TYPE_STRING: + if v or v == 0: + yamldata[k] = _normalize_type_string(v) + elif _fieldtype == TYPE_STRINGMAP: + if v or v == 0: # TODO probably want just `if v:` + yamldata[k] = _normalize_type_stringmap(k, v) + elif _fieldtype == TYPE_BOOL: + yamldata[k] = bool(v) + else: + if type(v) in (float, int): + yamldata[k] = str(v) + + builds = [] + for build in yamldata.get('Builds', []): + for k, v in build.items(): + if v is None: continue - try: - field, value = line.split(':',1) - except ValueError: - raise MetaDataException("Invalid metadata in " + metafile.name + " at: " + line) - # Translate obsolete fields... - if field == 'Market Version': - field = 'Current Version' - if field == 'Market Version Code': - field = 'Current Version Code' + _flagtype = flagtype(k) + if _flagtype == TYPE_STRING: + if v or v == 0: + build[k] = _normalize_type_string(v) + elif _flagtype == TYPE_INT: + v = _normalize_type_int(k, v) + if v or v == 0: + build[k] = v + elif _flagtype in (TYPE_LIST, TYPE_SCRIPT): + if v or v == 0: + build[k] = _normalize_type_list(k, v) + elif _flagtype == TYPE_STRINGMAP: + if v or v == 0: + build[k] = _normalize_type_stringmap(k, v) + elif _flagtype == TYPE_BOOL: + build[k] = bool(v) - fieldtype = metafieldtype(field) - if fieldtype not in ['build', 'buildv2']: - add_comments(field) - if fieldtype == 'multiline': - mode = 1 - thisinfo[field] = [] - if value: - raise MetaDataException("Unexpected text on same line as " + field + " in " + metafile.name) - elif fieldtype == 'string': - if field == 'Category' and thisinfo['Categories'] == 'None': - thisinfo['Categories'] = value.replace(';',',') - thisinfo[field] = value - elif fieldtype == 'build': - if value.endswith("\\"): - mode = 2 - buildlines = [value[:-1]] + builds.append(build) + + if builds: + yamldata['Builds'] = sorted(builds, key=lambda build: build['versionCode']) + + no_source_since = yamldata.get("NoSourceSince") + # do not overwrite the description if it is there + if no_source_since and not yamldata.get('AntiFeatures', {}).get('NoSourceSince'): + if 'AntiFeatures' not in yamldata: + yamldata['AntiFeatures'] = dict() + yamldata['AntiFeatures']['NoSourceSince'] = { + common.DEFAULT_LOCALE: no_source_since + } + + +def _format_multiline(value): + """TYPE_MULTILINE with newlines in them are saved as YAML literal strings.""" + if '\n' in value: + return ruamel.yaml.scalarstring.preserve_literal(str(value)) + return str(value) + + +def _format_list(value): + """TYPE_LIST should not contain null values.""" + return [v for v in value if v] + + +def _format_script(value): + """TYPE_SCRIPT with one value are converted to YAML string values.""" + value = [v for v in value if v] + if len(value) == 1: + return value[0] + return value + + +def _format_stringmap(appid, field, stringmap, versionCode=None): + """Format TYPE_STRINGMAP taking into account localized files in the metadata dir. + + If there are any localized versions on the filesystem already, + then move them all there. Otherwise, keep them in the .yml file. + + The directory for the localized files that is named after the + field is all lower case, following the convention set by Fastlane + metadata, and used by fdroidserver. + + """ + app_dir = Path('metadata', appid) + try: + next(app_dir.glob('*/%s/*.txt' % field.lower())) + files = [] + overwrites = [] + for name, descdict in stringmap.items(): + for locale, desc in descdict.items(): + outdir = app_dir / locale / field.lower() + if versionCode: + filename = '%d_%s.txt' % (versionCode, name) else: - thisinfo['builds'].append(parse_buildline([value])) - add_comments('build:' + thisinfo['builds'][-1]['version']) - elif fieldtype == 'buildv2': - curbuild = {} - vv = value.split(',') - if len(vv) != 2: - raise MetaDataException('Build should have comma-separated version and vercode, not "{0}", in {1}'. - format(value, metafile.name)) - curbuild['version'] = vv[0] - curbuild['vercode'] = vv[1] - buildlines = [] - mode = 3 - elif fieldtype == 'obsolete': - pass # Just throw it away! - else: - raise MetaDataException("Unrecognised field type for " + field + " in " + metafile.name) - elif mode == 1: # Multiline field - if line == '.': - mode = 0 - else: - thisinfo[field].append(line) - elif mode == 2: # Line continuation mode in Build Version - if line.endswith("\\"): - buildlines.append(line[:-1]) - else: - buildlines.append(line) - thisinfo['builds'].append( - parse_buildline(buildlines)) - add_comments('build:' + thisinfo['builds'][-1]['version']) - mode = 0 - add_comments(None) - - # Mode at end of file should always be 0... - if mode == 1: - raise MetaDataException(field + " not terminated in " + metafile.name) - elif mode == 2: - raise MetaDataException("Unterminated continuation in " + metafile.name) - elif mode == 3: - raise MetaDataException("Unterminated build in " + metafile.name) - - if not thisinfo['Description']: - thisinfo['Description'].append('No description available') - - for build in thisinfo['builds']: - build['type'] = get_build_type(build) - - return thisinfo - -# Write a metadata file. -# -# 'dest' - The path to the output file -# 'app' - The app data -def write_metadata(dest, app): - - def writecomments(key): - written = 0 - for pf, comment in app['comments']: - if pf == key: - mf.write("%s\n" % comment) - written += 1 - #if options.verbose and written > 0: - #print "...writing comments for " + (key if key else 'EOF') - - def writefield(field, value=None): - writecomments(field) - if value is None: - value = app[field] - mf.write("%s:%s\n" % (field, value)) - - mf = open(dest, 'w') - if app['Disabled']: - writefield('Disabled') - if app['AntiFeatures']: - writefield('AntiFeatures') - if app['Provides']: - writefield('Provides') - writefield('Categories') - writefield('License') - writefield('Web Site') - writefield('Source Code') - writefield('Issue Tracker') - if app['Donate']: - writefield('Donate') - if app['FlattrID']: - writefield('FlattrID') - if app['Bitcoin']: - writefield('Bitcoin') - if app['Litecoin']: - writefield('Litecoin') - if app['Dogecoin']: - writefield('Dogecoin') - mf.write('\n') - if app['Name']: - writefield('Name') - if app['Auto Name']: - writefield('Auto Name') - writefield('Summary') - writefield('Description', '') - for line in app['Description']: - mf.write("%s\n" % line) - mf.write('.\n') - mf.write('\n') - if app['Requires Root']: - writefield('Requires Root', 'Yes') - mf.write('\n') - if app['Repo Type']: - writefield('Repo Type') - writefield('Repo') - mf.write('\n') - for build in app['builds']: - writecomments('build:' + build['version']) - mf.write("Build:%s,%s\n" % ( build['version'], build['vercode'])) - - # This defines the preferred order for the build items - as in the - # manual, they're roughly in order of application. - keyorder = ['disable', 'commit', 'subdir', 'submodules', 'init', - 'gradle', 'maven', 'oldsdkloc', 'target', 'compilesdk', - 'update', 'encoding', 'forceversion', 'forcevercode', 'rm', - 'fixtrans', 'fixapos', 'extlibs', 'srclibs', 'patch', - 'prebuild', 'scanignore', 'scandelete', 'build', 'buildjni', - 'preassemble', 'bindir', 'antcommand', 'novcheck'] - - def write_builditem(key, value): - if key in ['version', 'vercode', 'origlines', 'type']: - return - if key in valuetypes['bool'].attrs: - if not value: - return - value = 'yes' - #if options.verbose: - #print "...writing {0} : {1}".format(key, value) - outline = ' %s=' % key - outline += '&& \\\n '.join([s.lstrip() for s in value.split('&& ')]) - outline += '\n' - mf.write(outline) - - for key in keyorder: - if key in build: - write_builditem(key, build[key]) - for key, value in build.iteritems(): - if not key in keyorder: - write_builditem(key, value) - mf.write('\n') - - if 'Maintainer Notes' in app: - writefield('Maintainer Notes', '') - for line in app['Maintainer Notes']: - mf.write("%s\n" % line) - mf.write('.\n') - mf.write('\n') + filename = '%s.txt' % name + outfile = outdir / filename + files.append(str(outfile)) + if outfile.exists(): + if desc != outfile.read_text(): + overwrites.append(str(outfile)) + else: + if not outfile.parent.exists(): + outfile.parent.mkdir(parents=True) + outfile.write_text(desc) + if overwrites: + _warn_or_exception( + _( + 'Conflicting "{field}" definitions between .yml and localized files:' + ).format(field=field) + + '\n' + + '\n'.join(sorted(overwrites)) + ) + logging.warning( + _('Moving Anti-Features declarations to localized files:') + + '\n' + + '\n'.join(sorted(files)) + ) + return + except StopIteration: + pass + make_list = True + outlist = [] + for name in sorted(stringmap): + outlist.append(name) + descdict = stringmap.get(name) + if descdict and any(descdict.values()): + make_list = False + break + if make_list: + return sorted(outlist, key=str.lower) + return stringmap - if app['Archive Policy']: - writefield('Archive Policy') - writefield('Auto Update Mode') - writefield('Update Check Mode') - if app['Vercode Operation']: - writefield('Vercode Operation') - if 'Update Check Data' in app: - writefield('Update Check Data') - if app['Current Version']: - writefield('Current Version') - writefield('Current Version Code') - mf.write('\n') - if app['No Source Since']: - writefield('No Source Since') - mf.write('\n') - writecomments(None) - mf.close() +def _del_duplicated_NoSourceSince(app): + # noqa: D403 NoSourceSince is the word. + """NoSourceSince gets auto-added to AntiFeatures, but can also be manually added.""" + key = 'NoSourceSince' + if key in app: + no_source_since = app.get(key) + af_no_source_since = app.get('AntiFeatures', dict()).get(key) + if af_no_source_since == {common.DEFAULT_LOCALE: no_source_since}: + del app['AntiFeatures'][key] +def _builds_to_yaml(app): + """Reformat Builds: flags for output to YAML 1.2. + + This will strip any flag/value that is not set or is empty. + TYPE_BOOL fields are removed when they are false. 0 is valid + value, it should not be stripped, so there are special cases to + handle that. + + """ + builds = ruamel.yaml.comments.CommentedSeq() + for build in app.get('Builds', []): + b = ruamel.yaml.comments.CommentedMap() + for field in build_flags: + v = build.get(field) + if v is None or v is False or v == '' or v == dict() or v == list(): + continue + _flagtype = flagtype(field) + if _flagtype == TYPE_MULTILINE: + v = _format_multiline(v) + elif _flagtype == TYPE_LIST: + v = _format_list(v) + elif _flagtype == TYPE_SCRIPT: + v = _format_script(v) + elif _flagtype == TYPE_STRINGMAP: + v = _format_stringmap(app['id'], field, v, build['versionCode']) + + if v or v == 0: + b[field] = v + + builds.append(b) + + # insert extra empty lines between build entries + for i in range(1, len(builds)): + builds.yaml_set_comment_before_after_key(i, 'bogus') + builds.ca.items[i][1][-1].value = '\n' + + return builds + + +def _app_to_yaml(app): + cm = ruamel.yaml.comments.CommentedMap() + insert_newline = False + for field in yaml_app_field_order: + if field == '\n': + # next iteration will need to insert a newline + insert_newline = True + else: + value = app.get(field) + if value or field in ('Builds', 'ArchivePolicy'): + _fieldtype = fieldtype(field) + if field == 'Builds': + if app.get('Builds'): + cm.update({field: _builds_to_yaml(app)}) + elif field == 'Categories': + cm[field] = sorted(value, key=str.lower) + elif field == 'AntiFeatures': + v = _format_stringmap(app['id'], field, value) + if v: + cm[field] = v + elif field == 'AllowedAPKSigningKeys': + value = [str(i).lower() for i in value] + if len(value) == 1: + cm[field] = value[0] + else: + cm[field] = value + elif field == 'ArchivePolicy': + if value is None: + continue + cm[field] = value + elif _fieldtype == TYPE_MULTILINE: + v = _format_multiline(value) + if v: + cm[field] = v + elif _fieldtype == TYPE_SCRIPT: + v = _format_script(value) + if v: + cm[field] = v + else: + if value: + cm[field] = value + + if insert_newline: + # we need to prepend a newline in front of this field + insert_newline = False + # inserting empty lines is not supported so we add a + # bogus comment and over-write its value + cm.yaml_set_comment_before_after_key(field, 'bogus') + cm.ca.items[field][1][-1].value = '\n' + return cm + + +def write_yaml(mf, app): + """Write metadata in yaml format. + + This requires the 'rt' round trip dumper to maintain order and needs + custom indent settings, so it needs to instantiate its own YAML + instance. Therefore, this function deliberately avoids using any of + the common YAML parser setups. + + Parameters + ---------- + mf + active file discriptor for writing + app + app metadata to written to the YAML file + + """ + _del_duplicated_NoSourceSince(app) + yaml_app = _app_to_yaml(app) + yamlmf = ruamel.yaml.YAML(typ='rt') + yamlmf.indent(mapping=2, sequence=4, offset=2) + yamlmf.dump(yaml_app, stream=mf) + + +def write_metadata(metadatapath, app): + metadatapath = Path(metadatapath) + if metadatapath.suffix == '.yml': + with metadatapath.open('w') as mf: + return write_yaml(mf, app) + + _warn_or_exception(_('Unknown metadata format: %s') % metadatapath) + + +def add_metadata_arguments(parser): + """Add common command line flags related to metadata processing.""" + parser.add_argument( + "-W", + choices=['error', 'warn', 'ignore'], + default='error', + help=_("force metadata errors (default) to be warnings, or to be ignored."), + ) diff --git a/fdroidserver/mirror.py b/fdroidserver/mirror.py new file mode 100644 index 00000000..b06df3b1 --- /dev/null +++ b/fdroidserver/mirror.py @@ -0,0 +1,278 @@ +#!/usr/bin/env python3 + +import ipaddress +import logging +import os +import posixpath +import socket +import subprocess +import sys +import urllib.parse +from argparse import ArgumentParser + +from . import _, common, index, update + + +def _run_wget(path, urls, verbose=False): + if verbose: + verbose = '--verbose' + else: + verbose = '--no-verbose' + + if not urls: + return + logging.debug(_('Running wget in {path}').format(path=path)) + cwd = os.getcwd() + os.makedirs(path, exist_ok=True) + os.chdir(path) + urls_file = '.fdroid-mirror-wget-input-file' + with open(urls_file, 'w') as fp: + for url in urls: + fp.write(url.split('?')[0] + '\n') # wget puts query string in the filename + subprocess.call( + [ + 'wget', + verbose, + '--continue', + '--user-agent="fdroid mirror"', + '--input-file=' + urls_file, + ] + ) + os.remove(urls_file) + os.chdir(cwd) # leave the working env the way we found it + + +def main(): + parser = ArgumentParser() + common.setup_global_opts(parser) + parser.add_argument( + "url", + nargs='?', + help=_( + 'Base URL to mirror, can include the index signing key ' + + 'using the query string: ?fingerprint=' + ), + ) + parser.add_argument( + "--all", + action='store_true', + default=False, + help=_("Mirror the full repo and archive, all file types."), + ) + parser.add_argument( + "--archive", + action='store_true', + default=False, + help=_("Also mirror the full archive section"), + ) + parser.add_argument( + "--build-logs", + action='store_true', + default=False, + help=_("Include the build logs in the mirror"), + ) + parser.add_argument( + "--pgp-signatures", + action='store_true', + default=False, + help=_("Include the PGP signature .asc files in the mirror"), + ) + parser.add_argument( + "--src-tarballs", + action='store_true', + default=False, + help=_("Include the source tarballs in the mirror"), + ) + parser.add_argument( + "--output-dir", default=None, help=_("The directory to write the mirror to") + ) + options = common.parse_args(parser) + + common.set_console_logging(options.verbose, options.color) + + if options.all: + options.archive = True + options.build_logs = True + options.pgp_signatures = True + options.src_tarballs = True + + if options.url is None: + logging.error(_('A URL is required as an argument!') + '\n') + parser.print_help() + sys.exit(1) + + scheme, hostname, path, params, query, fragment = urllib.parse.urlparse(options.url) + fingerprint = urllib.parse.parse_qs(query).get('fingerprint') + + def _append_to_url_path(*args): + """Append the list of path components to URL, keeping the rest the same.""" + newpath = posixpath.join(path, *args) + return urllib.parse.urlunparse( + (scheme, hostname, newpath, params, query, fragment) + ) + + if fingerprint: + config = common.read_config() + if not ('jarsigner' in config or 'apksigner' in config): + logging.error( + _('Java JDK not found! Install in standard location or set java_paths!') + ) + sys.exit(1) + + def _get_index(section, etag=None): + url = _append_to_url_path(section) + data, etag = index.download_repo_index(url, etag=etag) + return data, etag, _append_to_url_path(section, 'index-v1.jar') + + else: + + def _get_index(section, etag=None): + import io + import json + import zipfile + + from . import net + + url = _append_to_url_path(section, 'index-v1.jar') + content, etag = net.http_get(url) + with zipfile.ZipFile(io.BytesIO(content)) as zip: + jsoncontents = zip.open('index-v1.json').read() + data = json.loads(jsoncontents.decode('utf-8')) + return data, etag, None # no verified index file to return + + ip = None + try: + ip = ipaddress.ip_address(hostname) + except ValueError: + pass + if hostname == 'f-droid.org' or ( + ip is not None and hostname in socket.gethostbyname_ex('f-droid.org')[2] + ): + logging.error( + _( + 'This command should never be used to mirror f-droid.org! ' + 'A full copy requires more than 600GB.' + ) + ) + sys.exit(1) + + path = path.rstrip('/') + if path.endswith('repo') or path.endswith('archive'): + logging.warning( + _('Do not include "{path}" in URL!').format(path=path.split('/')[-1]) + ) + elif not path.endswith('fdroid'): + logging.warning( + _('{url} does not end with "fdroid", check the URL path!').format( + url=options.url + ) + ) + + icondirs = ['icons'] + for density in update.screen_densities: + icondirs.append('icons-' + density) + + if options.output_dir: + basedir = options.output_dir + else: + basedir = os.path.join(os.getcwd(), hostname, path.strip('/')) + os.makedirs(basedir, exist_ok=True) + + if options.archive: + sections = ('repo', 'archive') + else: + sections = ('repo',) + + for section in sections: + sectiondir = os.path.join(basedir, section) + + urls = [] + data, etag, index_url = _get_index(section) + if index_url: + urls.append(index_url) + + os.makedirs(sectiondir, exist_ok=True) + os.chdir(sectiondir) + for icondir in icondirs: + os.makedirs(os.path.join(sectiondir, icondir), exist_ok=True) + + for packageName, packageList in data['packages'].items(): + for package in packageList: + to_fetch = [] + keys = ['apkName'] + if options.src_tarballs: + keys.append('srcname') + for k in keys: + if k in package: + to_fetch.append(package[k]) + elif k == 'apkName': + logging.error( + _('{appid} is missing {name}').format( + appid=package['packageName'], name=k + ) + ) + for f in to_fetch: + if not os.path.exists(f) or ( + f.endswith('.apk') and os.path.getsize(f) != package['size'] + ): + urls.append(_append_to_url_path(section, f)) + if options.pgp_signatures: + urls.append(_append_to_url_path(section, f + '.asc')) + if options.build_logs and f.endswith('.apk'): + urls.append( + _append_to_url_path(section, f[:-4] + '.log.gz') + ) + + _run_wget(sectiondir, urls, options.verbose) + + for app in data['apps']: + localized = app.get('localized') + if localized: + for locale, d in localized.items(): + urls = [] + components = (section, app['packageName'], locale) + for k in update.GRAPHIC_NAMES: + f = d.get(k) + if f: + filepath_tuple = components + (f,) + urls.append(_append_to_url_path(*filepath_tuple)) + _run_wget(os.path.join(basedir, *components), urls, options.verbose) + for k in update.SCREENSHOT_DIRS: + urls = [] + filelist = d.get(k) + if filelist: + components = (section, app['packageName'], locale, k) + for f in filelist: + filepath_tuple = components + (f,) + urls.append(_append_to_url_path(*filepath_tuple)) + _run_wget( + os.path.join(basedir, *components), + urls, + options.verbose, + ) + + urls = dict() + for app in data['apps']: + if 'icon' not in app: + logging.error( + _('no "icon" in {appid}').format(appid=app['packageName']) + ) + continue + icon = app['icon'] + for icondir in icondirs: + url = _append_to_url_path(section, icondir, icon) + if icondir not in urls: + urls[icondir] = [] + urls[icondir].append(url) + + for icondir in icondirs: + if icondir in urls: + _run_wget( + os.path.join(basedir, section, icondir), + urls[icondir], + options.verbose, + ) + + +if __name__ == "__main__": + main() diff --git a/fdroidserver/net.py b/fdroidserver/net.py new file mode 100644 index 00000000..fe097fd5 --- /dev/null +++ b/fdroidserver/net.py @@ -0,0 +1,190 @@ +#!/usr/bin/env python3 +# +# net.py - part of the FDroid server tools +# Copyright (C) 2015 Hans-Christoph Steiner +# Copyright (C) 2022 FC Stegerman +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import copy +import logging +import os +import random +import tempfile +import time +import urllib + +import requests +import urllib3 +from requests.adapters import HTTPAdapter, Retry + +from . import _, common + +logger = logging.getLogger(__name__) + +HEADERS = {'User-Agent': 'F-Droid'} + + +def download_file(url, local_filename=None, dldir='tmp', retries=3, backoff_factor=0.1): + """Try hard to download the file, including retrying on failures. + + This has two retry cycles, one inside of the requests session, the + other provided by this function. The requests retry logic applies + to failed DNS lookups, socket connections and connection timeouts, + never to requests where data has made it to the server. This + handles ChunkedEncodingError during transfer in its own retry + loop. This can result in more retries than are specified in the + retries parameter. + + """ + filename = urllib.parse.urlparse(url).path.split('/')[-1] + if local_filename is None: + local_filename = os.path.join(dldir, filename) + for i in range(retries + 1): + if retries: + max_retries = Retry(total=retries - i, backoff_factor=backoff_factor) + adapter = HTTPAdapter(max_retries=max_retries) + session = requests.Session() + session.mount('http://', adapter) + session.mount('https://', adapter) + else: + session = requests + # the stream=True parameter keeps memory usage low + r = session.get( + url, stream=True, allow_redirects=True, headers=HEADERS, timeout=300 + ) + r.raise_for_status() + try: + with open(local_filename, 'wb') as f: + for chunk in r.iter_content(chunk_size=1024): + if chunk: # filter out keep-alive new chunks + f.write(chunk) + f.flush() + return local_filename + except requests.exceptions.ChunkedEncodingError as err: + if i == retries: + raise err + logger.warning('Download interrupted, retrying...') + time.sleep(backoff_factor * 2**i) + raise ValueError("retries must be >= 0") + + +def download_using_mirrors(mirrors, local_filename=None): + """Try to download the file from any working mirror. + + Download the file that all URLs in the mirrors list point to, + trying all the tricks, starting with the most private methods + first. The list of mirrors is converted into a list of mirror + configurations to try, in order that the should be attempted. + + This builds mirror_configs_to_try using all possible combos to + try. If a mirror is marked with worksWithoutSNI: True, then this + logic will try it twice: first without SNI, then again with SNI. + + """ + mirrors = common.parse_list_of_dicts(mirrors) + mirror_configs_to_try = [] + for mirror in mirrors: + mirror_configs_to_try.append(mirror) + if mirror.get('worksWithoutSNI'): + m = copy.deepcopy(mirror) + del m['worksWithoutSNI'] + mirror_configs_to_try.append(m) + + if not local_filename: + for mirror in mirrors: + filename = urllib.parse.urlparse(mirror['url']).path.split('/')[-1] + if filename: + break + if filename: + local_filename = os.path.join(common.get_cachedir(), filename) + else: + local_filename = tempfile.mkstemp(prefix='fdroid-') + + timeouts = (2, 10, 100) + last_exception = None + for timeout in timeouts: + for mirror in mirror_configs_to_try: + last_exception = None + urllib3.util.ssl_.HAS_SNI = not mirror.get('worksWithoutSNI') + try: + # the stream=True parameter keeps memory usage low + r = requests.get( + mirror['url'], + stream=True, + allow_redirects=False, + headers=HEADERS, + # add jitter to the timeout to be less predictable + timeout=timeout + random.randint(0, timeout), # nosec B311 + ) + if r.status_code != 200: + raise requests.exceptions.HTTPError(r.status_code, response=r) + with open(local_filename, 'wb') as f: + for chunk in r.iter_content(chunk_size=1024): + if chunk: # filter out keep-alive new chunks + f.write(chunk) + f.flush() + return local_filename + except ( + ConnectionError, + requests.exceptions.ChunkedEncodingError, + requests.exceptions.ConnectionError, + requests.exceptions.ContentDecodingError, + requests.exceptions.HTTPError, + requests.exceptions.SSLError, + requests.exceptions.StreamConsumedError, + requests.exceptions.Timeout, + requests.exceptions.UnrewindableBodyError, + ) as e: + last_exception = e + logger.debug(_('Retrying failed download: %s') % str(e)) + # if it hasn't succeeded by now, then give up and raise last exception + if last_exception: + raise last_exception + + +def http_get(url, etag=None, timeout=600): + """Download the content from the given URL by making a GET request. + + If an ETag is given, it will do a HEAD request first, to see if the content changed. + + Parameters + ---------- + url + The URL to download from. + etag + The last ETag to be used for the request (optional). + + Returns + ------- + A tuple consisting of: + - The raw content that was downloaded or None if it did not change + - The new eTag as returned by the HTTP request + """ + # TODO disable TLS Session IDs and TLS Session Tickets + # (plain text cookie visible to anyone who can see the network traffic) + if etag: + r = requests.head(url, headers=HEADERS, timeout=timeout) + r.raise_for_status() + if 'ETag' in r.headers and etag == r.headers['ETag']: + return None, etag + + r = requests.get(url, headers=HEADERS, timeout=timeout) + r.raise_for_status() + + new_etag = None + if 'ETag' in r.headers: + new_etag = r.headers['ETag'] + + return r.content, new_etag diff --git a/fdroidserver/nightly.py b/fdroidserver/nightly.py new file mode 100644 index 00000000..372390ea --- /dev/null +++ b/fdroidserver/nightly.py @@ -0,0 +1,612 @@ +#!/usr/bin/env python3 +"""Set up an app build for a nightly build repo.""" +# +# nightly.py - part of the FDroid server tools +# Copyright (C) 2017 Hans-Christoph Steiner +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import base64 +import datetime +import hashlib +import inspect +import logging +import os +import platform +import shutil +import ssl +import subprocess +import sys +import tempfile +from argparse import ArgumentParser +from typing import Optional +from urllib.parse import urlparse + +import git +import paramiko +import yaml + +from . import _, common +from .exception import VCSException + +# hard coded defaults for Android ~/.android/debug.keystore files +# https://developers.google.com/android/guides/client-auth +KEYSTORE_FILE = os.path.join(os.getenv('HOME'), '.android', 'debug.keystore') +PASSWORD = 'android' # nosec B105 standard hardcoded password for debug keystores +KEY_ALIAS = 'androiddebugkey' +DISTINGUISHED_NAME = 'CN=Android Debug,O=Android,C=US' + +# standard suffix for naming fdroid git repos +NIGHTLY = '-nightly' + + +def _get_keystore_secret_var(keystore: str) -> str: + """Get keystore secret as base64. + + Parameters + ---------- + keystore + The path of the keystore. + + Returns + ------- + base64_secret + The keystore secret as base64 string. + """ + with open(keystore, 'rb') as fp: + return base64.standard_b64encode(fp.read()).decode('ascii') + + +def _ssh_key_from_debug_keystore(keystore: Optional[str] = None) -> str: + """Convert a debug keystore to an SSH private key. + + This leaves the original keystore file in place. + + Parameters + ---------- + keystore + The keystore to convert to a SSH private key. + + Returns + ------- + key_path + The SSH private key file path in the temporary directory. + """ + if keystore is None: + # set this here so it can be overridden in the tests + # TODO convert this to a class to get rid of this nonsense + keystore = KEYSTORE_FILE + tmp_dir = tempfile.mkdtemp(prefix='.') + privkey = os.path.join(tmp_dir, '.privkey') + key_pem = os.path.join(tmp_dir, '.key.pem') + p12 = os.path.join(tmp_dir, '.keystore.p12') + _config = dict() + common.fill_config_defaults(_config) + subprocess.check_call( + [ + _config['keytool'], + '-importkeystore', + '-srckeystore', + keystore, + '-srcalias', + KEY_ALIAS, + '-srcstorepass', + PASSWORD, + '-srckeypass', + PASSWORD, + '-destkeystore', + p12, + '-destalias', + KEY_ALIAS, + '-deststorepass', + PASSWORD, + '-destkeypass', + PASSWORD, + '-deststoretype', + 'PKCS12', + ], + env={'LC_ALL': 'C.UTF-8'}, + ) + subprocess.check_call( + [ + 'openssl', + 'pkcs12', + '-in', + p12, + '-out', + key_pem, + '-passin', + 'pass:' + PASSWORD, + '-passout', + 'pass:' + PASSWORD, + ], + env={'LC_ALL': 'C.UTF-8'}, + ) + + # OpenSSL 3.0 changed the default output format from PKCS#1 to + # PKCS#8, which paramiko does not support. + # https://www.openssl.org/docs/man3.0/man1/openssl-rsa.html#traditional + # https://github.com/paramiko/paramiko/issues/1015 + openssl_rsa_cmd = ['openssl', 'rsa'] + if ssl.OPENSSL_VERSION_INFO[0] >= 3: + openssl_rsa_cmd += ['-traditional'] + subprocess.check_call( + openssl_rsa_cmd + + [ + '-in', + key_pem, + '-out', + privkey, + '-passin', + 'pass:' + PASSWORD, + ], + env={'LC_ALL': 'C.UTF-8'}, + ) + os.remove(key_pem) + os.remove(p12) + os.chmod(privkey, 0o600) # os.umask() should cover this, but just in case + + rsakey = paramiko.RSAKey.from_private_key_file(privkey) + fingerprint = ( + base64.b64encode(hashlib.sha256(rsakey.asbytes()).digest()) + .decode('ascii') + .rstrip('=') + ) + ssh_private_key_file = os.path.join( + tmp_dir, 'debug_keystore_' + fingerprint.replace('/', '_') + '_id_rsa' + ) + shutil.move(privkey, ssh_private_key_file) + + pub = rsakey.get_name() + ' ' + rsakey.get_base64() + ' ' + ssh_private_key_file + with open(ssh_private_key_file + '.pub', 'w') as fp: + fp.write(pub) + + logging.info(_('\nSSH public key to be used as deploy key:') + '\n' + pub) + + return ssh_private_key_file + + +def get_repo_base_url( + clone_url: str, repo_git_base: str, force_type: Optional[str] = None +) -> str: + """Generate the base URL for the F-Droid repository. + + Parameters + ---------- + clone_url + The URL to clone the Git repository. + repo_git_base + The project path of the Git repository at the Git forge. + force_type + The Git forge of the project. + + Returns + ------- + repo_base_url + The base URL of the F-Droid repository. + """ + if force_type is None: + force_type = urlparse(clone_url).netloc + if force_type == 'gitlab.com': + return clone_url + '/-/raw/master/fdroid' + if force_type == 'github.com': + return 'https://raw.githubusercontent.com/%s/master/fdroid' % repo_git_base + print(_('ERROR: unsupported git host "%s", patches welcome!') % force_type) + sys.exit(1) + + +def clone_git_repo(clone_url, git_mirror_path): + """Clone a git repo into the given path, failing if a password is required. + + If GitPython's safe mode is present, this will use that. Otherwise, + this includes a very limited version of the safe mode just to ensure + this won't hang on password prompts. + + https://github.com/gitpython-developers/GitPython/pull/2029 + + """ + logging.debug(_('cloning {url}').format(url=clone_url)) + try: + sig = inspect.signature(git.Repo.clone_from) + if 'safe' in sig.parameters: + git.Repo.clone_from(clone_url, git_mirror_path, safe=True) + else: + git.Repo.clone_from( + clone_url, + git_mirror_path, + env={ + 'GIT_ASKPASS': '/bin/true', + 'SSH_ASKPASS': '/bin/true', + 'GIT_USERNAME': 'u', + 'GIT_PASSWORD': 'p', + 'GIT_HTTP_USERNAME': 'u', + 'GIT_HTTP_PASSWORD': 'p', + 'GIT_SSH': '/bin/false', # for git < 2.3 + 'GIT_TERMINAL_PROMPT': '0', + }, + ) + except git.exc.GitCommandError as e: + logging.warning(_('WARNING: only public git repos are supported!')) + raise VCSException(f'git clone {clone_url} failed:', str(e)) from e + + +def main(): + """Deploy to F-Droid repository or generate SSH private key from keystore. + + The behaviour of this function is influenced by the configuration file as + well as command line parameters. + + Raises + ------ + :exc:`~fdroidserver.exception.VCSException` + If the nightly Git repository could not be cloned during an attempt to + deploy. + """ + parser = ArgumentParser() + common.setup_global_opts(parser) + parser.add_argument( + "--keystore", + default=KEYSTORE_FILE, + help=_("Specify which debug keystore file to use."), + ) + parser.add_argument( + "--show-secret-var", + action="store_true", + default=False, + help=_("Print the secret variable to the terminal for easy copy/paste"), + ) + parser.add_argument( + "--keep-private-keys", + action="store_true", + default=False, + help=_("Do not remove the private keys generated from the keystore"), + ) + parser.add_argument( + "--no-deploy", + action="store_true", + default=False, + help=_("Do not deploy the new files to the repo"), + ) + parser.add_argument( + "--file", + default='app/build/outputs/apk/*.apk', + help=_('The file to be included in the repo (path or glob)'), + ) + parser.add_argument( + "--no-checksum", + action="store_true", + default=False, + help=_("Don't use rsync checksums"), + ) + archive_older_unset = -1 + parser.add_argument( + "--archive-older", + type=int, + default=archive_older_unset, + help=_("Set maximum releases in repo before older ones are archived"), + ) + # TODO add --with-btlog + options = common.parse_args(parser) + + # force a tighter umask since this writes private key material + umask = os.umask(0o077) + + if 'CI' in os.environ: + v = os.getenv('DEBUG_KEYSTORE') + debug_keystore = None + if v: + debug_keystore = base64.b64decode(v) + if not debug_keystore: + logging.error(_('DEBUG_KEYSTORE is not set or the value is incomplete')) + sys.exit(1) + os.makedirs(os.path.dirname(KEYSTORE_FILE), exist_ok=True) + if os.path.exists(KEYSTORE_FILE): + logging.warning(_('overwriting existing {path}').format(path=KEYSTORE_FILE)) + with open(KEYSTORE_FILE, 'wb') as fp: + fp.write(debug_keystore) + + repo_basedir = os.path.join(os.getcwd(), 'fdroid') + repodir = os.path.join(repo_basedir, 'repo') + cibase = os.getcwd() + os.makedirs(repodir, exist_ok=True) + + # the 'master' branch is hardcoded in fdroidserver/deploy.py + if 'CI_PROJECT_PATH' in os.environ and 'CI_PROJECT_URL' in os.environ: + # we are in GitLab CI + repo_git_base = os.getenv('CI_PROJECT_PATH') + NIGHTLY + clone_url = os.getenv('CI_PROJECT_URL') + NIGHTLY + repo_base = get_repo_base_url( + clone_url, repo_git_base, force_type='gitlab.com' + ) + servergitmirror = 'git@' + urlparse(clone_url).netloc + ':' + repo_git_base + deploy_key_url = ( + f'{clone_url}/-/settings/repository#js-deploy-keys-settings' + ) + git_user_name = os.getenv('GITLAB_USER_NAME') + git_user_email = os.getenv('GITLAB_USER_EMAIL') + elif 'TRAVIS_REPO_SLUG' in os.environ: + # we are in Travis CI + repo_git_base = os.getenv('TRAVIS_REPO_SLUG') + NIGHTLY + clone_url = 'https://github.com/' + repo_git_base + repo_base = get_repo_base_url( + clone_url, repo_git_base, force_type='github.com' + ) + servergitmirror = 'git@github.com:' + repo_git_base + deploy_key_url = ( + f'https://github.com/{repo_git_base}/settings/keys' + + '\nhttps://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys' + ) + git_user_name = repo_git_base + git_user_email = os.getenv('USER') + '@' + platform.node() + elif ( + 'CIRCLE_REPOSITORY_URL' in os.environ + and 'CIRCLE_PROJECT_USERNAME' in os.environ + and 'CIRCLE_PROJECT_REPONAME' in os.environ + ): + # we are in Circle CI + repo_git_base = ( + os.getenv('CIRCLE_PROJECT_USERNAME') + + '/' + + os.getenv('CIRCLE_PROJECT_REPONAME') + + NIGHTLY + ) + clone_url = os.getenv('CIRCLE_REPOSITORY_URL') + NIGHTLY + repo_base = get_repo_base_url( + clone_url, repo_git_base, force_type='github.com' + ) + servergitmirror = 'git@' + urlparse(clone_url).netloc + ':' + repo_git_base + deploy_key_url = ( + f'https://github.com/{repo_git_base}/settings/keys' + + '\nhttps://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys' + ) + git_user_name = os.getenv('CIRCLE_USERNAME') + git_user_email = git_user_name + '@' + platform.node() + elif 'GITHUB_ACTIONS' in os.environ: + # we are in Github actions + repo_git_base = os.getenv('GITHUB_REPOSITORY') + NIGHTLY + clone_url = os.getenv('GITHUB_SERVER_URL') + '/' + repo_git_base + repo_base = get_repo_base_url( + clone_url, repo_git_base, force_type='github.com' + ) + servergitmirror = 'git@' + urlparse(clone_url).netloc + ':' + repo_git_base + deploy_key_url = ( + f'https://github.com/{repo_git_base}/settings/keys' + + '\nhttps://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys' + ) + git_user_name = os.getenv('GITHUB_ACTOR') + git_user_email = git_user_name + '@' + platform.node() + else: + print(_('ERROR: unsupported CI type, patches welcome!')) + sys.exit(1) + + repo_url = repo_base + '/repo' + git_mirror_path = os.path.join(repo_basedir, 'git-mirror') + git_mirror_fdroiddir = os.path.join(git_mirror_path, 'fdroid') + git_mirror_repodir = os.path.join(git_mirror_fdroiddir, 'repo') + git_mirror_metadatadir = os.path.join(git_mirror_fdroiddir, 'metadata') + if not os.path.isdir(git_mirror_repodir): + clone_git_repo(clone_url, git_mirror_path) + if not os.path.isdir(git_mirror_repodir): + os.makedirs(git_mirror_repodir, mode=0o755) + if os.path.exists('LICENSE'): + shutil.copy2('LICENSE', git_mirror_path) + + mirror_git_repo = git.Repo.init(git_mirror_path) + writer = mirror_git_repo.config_writer() + writer.set_value('user', 'name', git_user_name) + writer.set_value('user', 'email', git_user_email) + writer.release() + for remote in mirror_git_repo.remotes: + mirror_git_repo.delete_remote(remote) + + readme_path = os.path.join(git_mirror_path, 'README.md') + readme = ''' +# {repo_git_base} + +This is an app repository for nightly versions. +You can use it with the [F-Droid](https://f-droid.org/) Android app. + +[![{repo_url}]({repo_url}/icons/icon.png)](https://fdroid.link/#{repo_url}) + +Last updated: {date}'''.format( + repo_git_base=repo_git_base, + repo_url=repo_url, + date=datetime.datetime.now(datetime.timezone.utc).strftime( + '%Y-%m-%d %H:%M:%S UTC' + ), + ) + with open(readme_path, 'w') as fp: + fp.write(readme) + mirror_git_repo.git.add(all=True) + mirror_git_repo.index.commit("update README") + + mirror_git_repo.git.add(all=True) + mirror_git_repo.index.commit("update repo/website icon") + + os.chdir(repo_basedir) + if os.path.isdir(git_mirror_repodir): + common.local_rsync(options, [git_mirror_repodir + '/'], 'repo/') + if os.path.isdir(git_mirror_metadatadir): + common.local_rsync(options, [git_mirror_metadatadir + '/'], 'metadata/') + + ssh_private_key_file = _ssh_key_from_debug_keystore() + # this is needed for GitPython to find the SSH key + ssh_dir = os.path.join(os.getenv('HOME'), '.ssh') + os.makedirs(ssh_dir, exist_ok=True) + ssh_config = os.path.join(ssh_dir, 'config') + logging.debug(_('adding IdentityFile to {path}').format(path=ssh_config)) + with open(ssh_config, 'a') as fp: + fp.write('\n\nHost *\n\tIdentityFile %s\n' % ssh_private_key_file) + + if options.archive_older == archive_older_unset: + fdroid_size = common.get_dir_size(git_mirror_fdroiddir) + max_size = common.GITLAB_COM_PAGES_MAX_SIZE + if fdroid_size < max_size: + options.archive_older = 20 + else: + options.archive_older = 3 + print( + 'WARNING: repo is %s over the GitLab Pages limit (%s)' + % (fdroid_size - max_size, max_size) + ) + print('Setting --archive-older to 3') + + config = { + 'identity_file': ssh_private_key_file, + 'repo_name': repo_git_base, + 'repo_url': repo_url, + 'repo_description': 'Nightly builds from %s' % git_user_email, + 'archive_name': repo_git_base + ' archive', + 'archive_url': repo_base + '/archive', + 'archive_description': 'Old nightly builds that have been archived.', + 'archive_older': options.archive_older, + 'servergitmirrors': [{"url": servergitmirror}], + 'keystore': KEYSTORE_FILE, + 'repo_keyalias': KEY_ALIAS, + 'keystorepass': PASSWORD, + 'keypass': PASSWORD, + 'keydname': DISTINGUISHED_NAME, + 'make_current_version_link': False, + } + with open(common.CONFIG_FILE, 'w', encoding='utf-8') as fp: + yaml.dump(config, fp, default_flow_style=False) + os.chmod(common.CONFIG_FILE, 0o600) + config = common.read_config() + common.assert_config_keystore(config) + + logging.debug( + _( + 'Run over {cibase} to find -debug.apk. and skip repo_basedir {repo_basedir}' + ).format(cibase=cibase, repo_basedir=repo_basedir) + ) + + for root, dirs, files in os.walk(cibase): + for d in ('.git', '.gradle'): + if d in dirs: + dirs.remove(d) + if root == cibase and 'fdroid' in dirs: + dirs.remove('fdroid') + + for f in files: + if f.endswith('-debug.apk'): + apkfilename = os.path.join(root, f) + logging.debug( + _('Stripping mystery signature from {apkfilename}').format( + apkfilename=apkfilename + ) + ) + destapk = os.path.join(repodir, os.path.basename(f)) + os.chmod(apkfilename, 0o644) + logging.debug( + _( + 'Resigning {apkfilename} with provided debug.keystore' + ).format(apkfilename=os.path.basename(apkfilename)) + ) + common.sign_apk(apkfilename, destapk, KEY_ALIAS) + + if options.verbose: + logging.debug(_('attempting bare SSH connection to test deploy key:')) + try: + subprocess.check_call( + [ + 'ssh', + '-Tvi', + ssh_private_key_file, + '-oIdentitiesOnly=yes', + '-oStrictHostKeyChecking=no', + servergitmirror.split(':')[0], + ] + ) + except subprocess.CalledProcessError: + pass + + app_url = clone_url[: -len(NIGHTLY)] + template = dict() + template['AuthorName'] = clone_url.split('/')[4] + template['AuthorWebSite'] = '/'.join(clone_url.split('/')[:4]) + template['Categories'] = ['nightly'] + template['SourceCode'] = app_url + template['IssueTracker'] = app_url + '/issues' + template['Summary'] = 'Nightly build of ' + urlparse(app_url).path[1:] + template['Description'] = template['Summary'] + with open('template.yml', 'w') as fp: + yaml.dump(template, fp) + + subprocess.check_call( + ['fdroid', 'update', '--rename-apks', '--create-metadata', '--verbose'], + cwd=repo_basedir, + ) + common.local_rsync( + options, [repo_basedir + '/metadata/'], git_mirror_metadatadir + '/' + ) + mirror_git_repo.git.add(all=True) + mirror_git_repo.index.commit("update app metadata") + + if not options.no_deploy: + try: + cmd = ['fdroid', 'deploy', '--verbose', '--no-keep-git-mirror-archive'] + subprocess.check_call(cmd, cwd=repo_basedir) + except subprocess.CalledProcessError: + logging.error( + _('cannot publish update, did you set the deploy key?') + + '\n' + + deploy_key_url + ) + sys.exit(1) + + if not options.keep_private_keys: + os.remove(KEYSTORE_FILE) + if shutil.rmtree.avoids_symlink_attacks: + shutil.rmtree(os.path.dirname(ssh_private_key_file)) + + else: + if not os.path.isfile(options.keystore): + androiddir = os.path.dirname(options.keystore) + if not os.path.exists(androiddir): + os.mkdir(androiddir) + logging.info(_('created {path}').format(path=androiddir)) + logging.error( + _('{path} does not exist! Create it by running:').format( + path=options.keystore + ) + + '\n keytool -genkey -v -keystore ' + + options.keystore + + ' -storepass android \\' + + '\n -alias androiddebugkey -keypass android -keyalg RSA -keysize 2048 -validity 10000 \\' + + '\n -dname "CN=Android Debug,O=Android,C=US"' + ) + sys.exit(1) + ssh_dir = os.path.join(os.getenv('HOME'), '.ssh') + privkey = _ssh_key_from_debug_keystore(options.keystore) + if os.path.exists(ssh_dir): + ssh_private_key_file = os.path.join(ssh_dir, os.path.basename(privkey)) + shutil.move(privkey, ssh_private_key_file) + shutil.move(privkey + '.pub', ssh_private_key_file + '.pub') + if shutil.rmtree.avoids_symlink_attacks: + shutil.rmtree(os.path.dirname(privkey)) + + if options.show_secret_var: + debug_keystore = _get_keystore_secret_var(options.keystore) + print( + _('\n{path} encoded for the DEBUG_KEYSTORE secret variable:').format( + path=options.keystore + ) + ) + print(debug_keystore) + + os.umask(umask) + + +if __name__ == "__main__": + main() diff --git a/fdroidserver/publish.py b/fdroidserver/publish.py index 85349fb1..42945166 100644 --- a/fdroidserver/publish.py +++ b/fdroidserver/publish.py @@ -1,9 +1,9 @@ -#!/usr/bin/env python2 -# -*- coding: utf-8 -*- +#!/usr/bin/env python3 # # publish.py - part of the FDroid server tools # Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com -# Copyright (C) 2013 Daniel Martí +# Copyright (C) 2013-2014 Daniel Martí +# Copyright (C) 2021 Felix C. Stegerman # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -18,155 +18,461 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import sys -import os -import shutil -import subprocess -import md5 -import glob -from optparse import OptionParser +"""Sign APKs using keys or via reproducible builds signature copying. -import common, metadata -from common import BuildException +This command takes unsigned APKs and signs them. It looks for +unsigned APKs in the unsigned/ directory and puts successfully signed +APKs into the repo/ directory. The default is to run in a kind of +batch mode, where it will only quit on certain kinds of errors. It +mostly reports success by moving an APK from unsigned/ to repo/ + +""" + +import glob +import hashlib +import json +import logging +import os +import re +import shutil +import sys +import time +import zipfile +from argparse import ArgumentParser +from collections import OrderedDict +from gettext import ngettext + +from . import _, common, metadata +from .common import FDroidPopen +from .exception import BuildException, FDroidException config = None -options = None +start_timestamp = time.gmtime() + + +def publish_source_tarball(apkfilename, unsigned_dir, output_dir): + """Move the source tarball into the output directory...""" + tarfilename = apkfilename[:-4] + '_src.tar.gz' + tarfile = os.path.join(unsigned_dir, tarfilename) + if os.path.exists(tarfile): + shutil.move(tarfile, os.path.join(output_dir, tarfilename)) + logging.debug('...published %s', tarfilename) + else: + logging.debug('...no source tarball for %s', apkfilename) + + +def key_alias(appid): + """No summary. + + Get the alias which F-Droid uses to indentify the singing key + for this App in F-Droids keystore. + """ + if config and 'keyaliases' in config and appid in config['keyaliases']: + # For this particular app, the key alias is overridden... + keyalias = config['keyaliases'][appid] + if keyalias.startswith('@'): + m = hashlib.md5() # nosec just used to generate a keyalias + m.update(keyalias[1:].encode('utf-8')) + keyalias = m.hexdigest()[:8] + return keyalias + else: + m = hashlib.md5() # nosec just used to generate a keyalias + m.update(appid.encode('utf-8')) + return m.hexdigest()[:8] + + +def read_fingerprints_from_keystore(): + """Obtain a dictionary containing all singning-key fingerprints which are managed by F-Droid, grouped by appid.""" + env_vars = {'LC_ALL': 'C.UTF-8', 'FDROID_KEY_STORE_PASS': config['keystorepass']} + cmd = [ + config['keytool'], + '-list', + '-v', + '-keystore', + config['keystore'], + '-storepass:env', + 'FDROID_KEY_STORE_PASS', + ] + if config['keystore'] == 'NONE': + cmd += config['smartcardoptions'] + p = FDroidPopen(cmd, envs=env_vars, output=False) + if p.returncode != 0: + raise FDroidException('could not read keystore {}'.format(config['keystore'])) + + realias = re.compile('Alias name: (?P.+)' + os.linesep) + resha256 = re.compile(r'\s+SHA256: (?P[:0-9A-F]{95})' + os.linesep) + fps = {} + for block in p.output.split(('*' * 43) + os.linesep + '*' * 43): + s_alias = realias.search(block) + s_sha256 = resha256.search(block) + if s_alias and s_sha256: + sigfp = s_sha256.group('sha256').replace(':', '').lower() + fps[s_alias.group('alias')] = sigfp + return fps + + +def sign_sig_key_fingerprint_list(jar_file): + """Sign the list of app-signing key fingerprints. + + This is used primaryily by fdroid update to determine which APKs + where built and signed by F-Droid and which ones were + manually added by users. + """ + cmd = [config['jarsigner']] + cmd += '-keystore', config['keystore'] + cmd += '-storepass:env', 'FDROID_KEY_STORE_PASS' + cmd += '-digestalg', 'SHA1' + cmd += '-sigalg', 'SHA1withRSA' + cmd += jar_file, config['repo_keyalias'] + if config['keystore'] == 'NONE': + cmd += config['smartcardoptions'] + else: # smardcards never use -keypass + cmd += '-keypass:env', 'FDROID_KEY_PASS' + env_vars = { + 'FDROID_KEY_STORE_PASS': config['keystorepass'], + 'FDROID_KEY_PASS': config.get('keypass', ""), + } + p = common.FDroidPopen(cmd, envs=env_vars) + if p.returncode != 0: + raise FDroidException("Failed to sign '{}'!".format(jar_file)) + + +def store_publish_signer_fingerprints(appids, indent=None): + """Store list of all signing-key fingerprints for given appids to HD. + + This list will later on be needed by fdroid update. + """ + if not os.path.exists('repo'): + os.makedirs('repo') + data = OrderedDict() + fps = read_fingerprints_from_keystore() + for appid in sorted(appids): + alias = key_alias(appid) + if alias in fps: + data[appid] = {'signer': fps[key_alias(appid)]} + + jar_file = os.path.join('repo', 'signer-index.jar') + output = json.dumps(data, indent=indent) + with zipfile.ZipFile(jar_file, 'w', zipfile.ZIP_DEFLATED) as jar: + jar.writestr('signer-index.json', output) + with open(os.path.join('repo', 'signer-index.json'), 'w') as fp: + fp.write(output) + sign_sig_key_fingerprint_list(jar_file) + + +def status_update_json(generatedKeys, signedApks): + """Output a JSON file with metadata about this run.""" + logging.debug(_('Outputting JSON')) + output = common.setup_status_output(start_timestamp) + output['apksigner'] = shutil.which(config.get('apksigner', '')) + output['jarsigner'] = shutil.which(config.get('jarsigner', '')) + output['keytool'] = shutil.which(config.get('keytool', '')) + if generatedKeys: + output['generatedKeys'] = generatedKeys + if signedApks: + output['signedApks'] = signedApks + common.write_status_json(output) + + +def check_for_key_collisions(allapps): + """Make sure there's no collision in keyaliases from apps. + + It was suggested at + https://dev.guardianproject.info/projects/bazaar/wiki/FDroid_Audit + that a package could be crafted, such that it would use the same signing + key as an existing app. While it may be theoretically possible for such a + colliding package ID to be generated, it seems virtually impossible that + the colliding ID would be something that would be a) a valid package ID, + and b) a sane-looking ID that would make its way into the repo. + Nonetheless, to be sure, before publishing we check that there are no + collisions, and refuse to do any publishing if that's the case. + + Parameters + ---------- + allapps + a dict of all apps to process + + Returns + ------- + a list of all aliases corresponding to allapps + """ + allaliases = [] + for appid in allapps: + m = hashlib.md5() # nosec just used to generate a keyalias + m.update(appid.encode('utf-8')) + keyalias = m.hexdigest()[:8] + if keyalias in allaliases: + logging.error(_("There is a keyalias collision - publishing halted")) + sys.exit(1) + allaliases.append(keyalias) + return allaliases + + +def create_key_if_not_existing(keyalias): + """Ensure a signing key with the given keyalias exists. + + Returns + ------- + boolean + True if a new key was created, False otherwise + """ + # See if we already have a key for this application, and + # if not generate one... + env_vars = { + 'LC_ALL': 'C.UTF-8', + 'FDROID_KEY_STORE_PASS': config['keystorepass'], + 'FDROID_KEY_PASS': config.get('keypass', ""), + } + cmd = [ + config['keytool'], + '-list', + '-alias', + keyalias, + '-keystore', + config['keystore'], + '-storepass:env', + 'FDROID_KEY_STORE_PASS', + ] + if config['keystore'] == 'NONE': + cmd += config['smartcardoptions'] + p = FDroidPopen(cmd, envs=env_vars) + if p.returncode != 0: + logging.info("Key does not exist - generating...") + cmd = [ + config['keytool'], + '-genkey', + '-keystore', + config['keystore'], + '-alias', + keyalias, + '-keyalg', + 'RSA', + '-keysize', + '2048', + '-validity', + '10000', + '-storepass:env', + 'FDROID_KEY_STORE_PASS', + '-dname', + config['keydname'], + ] + if config['keystore'] == 'NONE': + cmd += config['smartcardoptions'] + else: + cmd += '-keypass:env', 'FDROID_KEY_PASS' + p = FDroidPopen(cmd, envs=env_vars) + if p.returncode != 0: + raise BuildException("Failed to generate key", p.output) + return True + else: + return False + def main(): - - global config, options + global config # Parse command line... - parser = OptionParser(usage="Usage: %prog [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]") - parser.add_option("-v", "--verbose", action="store_true", default=False, - help="Spew out even more information than normal") - (options, args) = parser.parse_args() + parser = ArgumentParser( + usage="%(prog)s [options] " "[APPID[:VERCODE] [APPID[:VERCODE] ...]]" + ) + common.setup_global_opts(parser) + parser.add_argument( + "-e", + "--error-on-failed", + action="store_true", + default=False, + help=_("When signing or verifying fails, exit with an error code."), + ) + parser.add_argument( + "appid", + nargs='*', + help=_("application ID with optional versionCode in the form APPID[:VERCODE]"), + ) + metadata.add_metadata_arguments(parser) + options = common.parse_args(parser) + metadata.warnings_action = options.W - config = common.read_config(options) + config = common.read_config() + + if not ('jarsigner' in config and 'keytool' in config): + logging.critical( + _('Java JDK not found! Install in standard location or set java_paths!') + ) + sys.exit(1) + + common.assert_config_keystore(config) log_dir = 'logs' if not os.path.isdir(log_dir): - print "Creating log directory" + logging.info(_("Creating log directory")) os.makedirs(log_dir) tmp_dir = 'tmp' if not os.path.isdir(tmp_dir): - print "Creating temporary directory" + logging.info(_("Creating temporary directory")) os.makedirs(tmp_dir) output_dir = 'repo' if not os.path.isdir(output_dir): - print "Creating output directory" + logging.info(_("Creating output directory")) os.makedirs(output_dir) unsigned_dir = 'unsigned' if not os.path.isdir(unsigned_dir): - print "No unsigned directory - nothing to do" - sys.exit(0) + logging.warning(_("No unsigned directory - nothing to do")) + sys.exit(1) + binaries_dir = os.path.join(unsigned_dir, 'binaries') + + if not config['keystore'] == "NONE" and not os.path.exists(config['keystore']): + logging.error("Config error - missing '{0}'".format(config['keystore'])) + sys.exit(1) - # It was suggested at https://dev.guardianproject.info/projects/bazaar/wiki/FDroid_Audit - # that a package could be crafted, such that it would use the same signing - # key as an existing app. While it may be theoretically possible for such a - # colliding package ID to be generated, it seems virtually impossible that - # the colliding ID would be something that would be a) a valid package ID, - # and b) a sane-looking ID that would make its way into the repo. - # Nonetheless, to be sure, before publishing we check that there are no - # collisions, and refuse to do any publishing if that's the case... allapps = metadata.read_metadata() - vercodes = common.read_pkg_args(args, True) - allaliases = [] - for app in allapps: - m = md5.new() - m.update(app['id']) - keyalias = m.hexdigest()[:8] - if keyalias in allaliases: - print "There is a keyalias collision - publishing halted" - sys.exit(1) - allaliases.append(keyalias) - if options.verbose: - print "{0} apps, {0} key aliases".format(len(allapps), len(allaliases)) + vercodes = common.read_pkg_args(options.appid, True) + common.get_metadata_files(vercodes) # only check appids + signed_apks = dict() + generated_keys = dict() + allaliases = check_for_key_collisions(allapps) + logging.info( + ngettext( + '{0} app, {1} key aliases', '{0} apps, {1} key aliases', len(allapps) + ).format(len(allapps), len(allaliases)) + ) - # Process any apks that are waiting to be signed... - for apkfile in sorted(glob.glob(os.path.join(unsigned_dir, '*.apk'))): - - appid, vercode = common.apknameinfo(apkfile) + failed = 0 + # Process any APKs or ZIPs that are waiting to be signed... + for apkfile in sorted( + glob.glob(os.path.join(unsigned_dir, '*.apk')) + + glob.glob(os.path.join(unsigned_dir, '*.zip')) + ): + appid, vercode = common.publishednameinfo(apkfile) apkfilename = os.path.basename(apkfile) if vercodes and appid not in vercodes: continue if appid in vercodes and vercodes[appid]: if vercode not in vercodes[appid]: continue - print "Processing " + apkfile + logging.info(_("Processing {apkfilename}").format(apkfilename=apkfile)) + + # There ought to be valid metadata for this app, otherwise why are we + # trying to publish it? + if appid not in allapps: + logging.error( + "Unexpected {0} found in unsigned directory".format(apkfilename) + ) + sys.exit(1) + app = allapps[appid] + + build = None + for b in app.get("Builds", ()): + if b.get("versionCode") == vercode: + build = b + if app.Binaries or (build and build.binary): + # It's an app where we build from source, and verify the apk + # contents against a developer's binary, and then publish their + # version if everything checks out. + # The binary should already have been retrieved during the build + # process. + + srcapk = re.sub(r'\.apk$', '.binary.apk', apkfile) + srcapk = srcapk.replace(unsigned_dir, binaries_dir) + + if not os.path.isfile(srcapk): + logging.error( + "...reference binary missing - publish skipped: '{refpath}'".format( + refpath=srcapk + ) + ) + failed += 1 + else: + # Compare our unsigned one with the downloaded one... + compare_result = common.verify_apks(srcapk, apkfile, tmp_dir) + if compare_result: + logging.error( + "...verification failed - publish skipped : {result}".format( + result=compare_result + ) + ) + failed += 1 + else: + # Success! So move the downloaded file to the repo, and remove + # our built version. + shutil.move(srcapk, os.path.join(output_dir, apkfilename)) + os.remove(apkfile) + + publish_source_tarball(apkfilename, unsigned_dir, output_dir) + logging.info('Published ' + apkfilename) + + elif apkfile.endswith('.zip'): + # OTA ZIPs built by fdroid do not need to be signed by jarsigner, + # just to be moved into place in the repo + shutil.move(apkfile, os.path.join(output_dir, apkfilename)) + publish_source_tarball(apkfilename, unsigned_dir, output_dir) + logging.info('Published ' + apkfilename) - # Figure out the key alias name we'll use. Only the first 8 - # characters are significant, so we'll use the first 8 from - # the MD5 of the app's ID and hope there are no collisions. - # If a collision does occur later, we're going to have to - # come up with a new alogrithm, AND rename all existing keys - # in the keystore! - if appid in config['keyaliases']: - # For this particular app, the key alias is overridden... - keyalias = config['keyaliases'][appid] - if keyalias.startswith('@'): - m = md5.new() - m.update(keyalias[1:]) - keyalias = m.hexdigest()[:8] else: - m = md5.new() - m.update(appid) - keyalias = m.hexdigest()[:8] - print "Key alias: " + keyalias + # It's a 'normal' app, i.e. we sign and publish it... + skipsigning = False - # See if we already have a key for this application, and - # if not generate one... - p = subprocess.Popen(['keytool', '-list', - '-alias', keyalias, '-keystore', config['keystore'], - '-storepass', config['keystorepass']], stdout=subprocess.PIPE) - output = p.communicate()[0] - if p.returncode !=0: - print "Key does not exist - generating..." - p = subprocess.Popen(['keytool', '-genkey', - '-keystore', config['keystore'], '-alias', keyalias, - '-keyalg', 'RSA', '-keysize', '2048', - '-validity', '10000', - '-storepass', config['keystorepass'], - '-keypass', config['keypass'], - '-dname', config['keydname']], stdout=subprocess.PIPE) - output = p.communicate()[0] - print output - if p.returncode != 0: - raise BuildException("Failed to generate key") + # First we handle signatures for this app from local metadata + signingfiles = common.metadata_find_developer_signing_files(appid, vercode) + if signingfiles: + # There's a signature of the app developer present in our + # metadata. This means we're going to prepare both a locally + # signed APK and a version signed with the developers key. - # Sign the application... - p = subprocess.Popen(['jarsigner', '-keystore', config['keystore'], - '-storepass', config['keystorepass'], - '-keypass', config['keypass'], '-sigalg', - 'MD5withRSA', '-digestalg', 'SHA1', - apkfile, keyalias], stdout=subprocess.PIPE) - output = p.communicate()[0] - print output - if p.returncode != 0: - raise BuildException("Failed to sign application") + signature_file, _ignored, manifest, v2_files = signingfiles - # Zipalign it... - p = subprocess.Popen([os.path.join(config['sdk_path'],'tools','zipalign'), - '-v', '4', apkfile, - os.path.join(output_dir, apkfilename)], - stdout=subprocess.PIPE) - output = p.communicate()[0] - print output - if p.returncode != 0: - raise BuildException("Failed to align application") - os.remove(apkfile) + with open(signature_file, 'rb') as f: + devfp = common.signer_fingerprint_short( + common.get_certificate(f.read()) + ) + devsigned = '{}_{}_{}.apk'.format(appid, vercode, devfp) + devsignedtmp = os.path.join(tmp_dir, devsigned) - # Move the source tarball into the output directory... - tarfilename = apkfilename[:-4] + '_src.tar.gz' - shutil.move(os.path.join(unsigned_dir, tarfilename), - os.path.join(output_dir, tarfilename)) + common.apk_implant_signatures(apkfile, devsignedtmp, manifest=manifest) + if common.verify_apk_signature(devsignedtmp): + shutil.move(devsignedtmp, os.path.join(output_dir, devsigned)) + else: + os.remove(devsignedtmp) + logging.error('...verification failed - skipping: %s', devsigned) + skipsigning = True + failed += 1 - print 'Published ' + apkfilename + # Now we sign with the F-Droid key. + if not skipsigning: + keyalias = key_alias(appid) + logging.info("Key alias: " + keyalias) + + if create_key_if_not_existing(keyalias): + generated_keys[appid] = keyalias + + signed_apk_path = os.path.join(output_dir, apkfilename) + if os.path.exists(signed_apk_path): + raise BuildException( + _( + "Refusing to sign '{path}', file exists in both {dir1} and {dir2} folder." + ).format(path=apkfilename, dir1=unsigned_dir, dir2=output_dir) + ) + + # Sign the application... + common.sign_apk(apkfile, signed_apk_path, keyalias) + if appid not in signed_apks: + signed_apks[appid] = [] + signed_apks[appid].append({"keyalias": keyalias, "filename": apkfile}) + + publish_source_tarball(apkfilename, unsigned_dir, output_dir) + logging.info('Published ' + apkfilename) + + store_publish_signer_fingerprints(allapps.keys()) + status_update_json(generated_keys, signed_apks) + logging.info('published list signing-key fingerprints') + + if failed: + logging.error(_('%d APKs failed to be signed or verified!') % failed) + if options.error_on_failed: + sys.exit(failed) if __name__ == "__main__": main() - diff --git a/fdroidserver/readmeta.py b/fdroidserver/readmeta.py new file mode 100644 index 00000000..b3ef7c3b --- /dev/null +++ b/fdroidserver/readmeta.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python3 +# +# readmeta.py - part of the FDroid server tools +# Copyright (C) 2014 Daniel Martí +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +from argparse import ArgumentParser + +from . import common, metadata + + +def main(): + parser = ArgumentParser() + common.setup_global_opts(parser) + metadata.add_metadata_arguments(parser) + options = parser.parse_args() + metadata.warnings_action = options.W + common.read_config() + + metadata.read_metadata() + + +if __name__ == "__main__": + main() diff --git a/fdroidserver/rewritemeta.py b/fdroidserver/rewritemeta.py index e463cf14..4bbe810d 100644 --- a/fdroidserver/rewritemeta.py +++ b/fdroidserver/rewritemeta.py @@ -1,7 +1,7 @@ -#!/usr/bin/env python2 -# -*- coding: utf-8 -*- +#!/usr/bin/env python3 # # rewritemeta.py - part of the FDroid server tools +# This cleans up the original .yml metadata file format. # Copyright (C) 2010-12, Ciaran Gultnieks, ciaran@ciarang.com # # This program is free software: you can redistribute it and/or modify @@ -17,35 +17,97 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import os -from optparse import OptionParser -import common, metadata +import io +import logging +import shutil +import tempfile +from argparse import ArgumentParser +from pathlib import Path + +from . import _, common, metadata config = None -options = None + + +def proper_format(app): + s = io.StringIO() + # TODO: currently reading entire file again, should reuse first + # read in metadata.py + cur_content = Path(app.metadatapath).read_text(encoding='utf-8') + if Path(app.metadatapath).suffix == '.yml': + metadata.write_yaml(s, app) + content = s.getvalue() + s.close() + return content == cur_content + + +def remove_blank_flags_from_builds(builds): + """Remove unset entries from Builds so they are not written out.""" + if not builds: + return list() + newbuilds = list() + for build in builds: + new = dict() + for k in metadata.build_flags: + v = build.get(k) + # 0 is valid value, it should not be stripped + if v is None or v is False or v == '' or v == dict() or v == list(): + continue + new[k] = v + newbuilds.append(new) + return newbuilds + def main(): + global config - global config, options + parser = ArgumentParser() + common.setup_global_opts(parser) + parser.add_argument( + "-l", + "--list", + action="store_true", + default=False, + help=_("List files that would be reformatted (dry run)"), + ) + parser.add_argument( + "appid", nargs='*', help=_("application ID of file to operate on") + ) + metadata.add_metadata_arguments(parser) + options = common.parse_args(parser) + metadata.warnings_action = options.W - # Parse command line... - parser = OptionParser(usage="Usage: %prog [options] [APPID [APPID ...]]") - parser.add_option("-v", "--verbose", action="store_true", default=False, - help="Spew out even more information than normal") - (options, args) = parser.parse_args() + config = common.read_config() - config = common.read_config(options) + apps = common.read_app_args(options.appid) - # Get all apps... - allapps = metadata.read_metadata(xref=False) - apps = common.read_app_args(args, allapps, False) + for appid, app in apps.items(): + path = Path(app.metadatapath) + if path.suffix == '.yml': + logging.info(_("Rewriting '{appid}'").format(appid=appid)) + else: + logging.warning(_('Cannot rewrite "{path}"').format(path=path)) + continue - for app in apps: - print "Writing " + app['id'] - metadata.write_metadata(os.path.join('metadata', app['id'])+'.txt', app) + if options.list: + if not proper_format(app): + print(path) + continue + + # TODO these should be moved to metadata.write_yaml() + builds = remove_blank_flags_from_builds(app.get('Builds')) + if builds: + app['Builds'] = builds + + # rewrite to temporary file before overwriting existing + # file in case there's a bug in write_metadata + with tempfile.TemporaryDirectory() as tmpdir: + tmp_path = Path(tmpdir) / path.name + metadata.write_metadata(tmp_path, app) + shutil.move(tmp_path, path) + + logging.debug(_("Finished")) - print "Finished." if __name__ == "__main__": main() - diff --git a/fdroidserver/scanner.py b/fdroidserver/scanner.py index 64cebfb2..f28e3803 100644 --- a/fdroidserver/scanner.py +++ b/fdroidserver/scanner.py @@ -1,5 +1,4 @@ -#!/usr/bin/env python2 -# -*- coding: utf-8 -*- +#!/usr/bin/env python3 # # scanner.py - part of the FDroid server tools # Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com @@ -17,96 +16,2988 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . +import itertools +import json +import logging import os +import re +import sys import traceback -from optparse import OptionParser -import common, metadata -from common import BuildException -from common import VCSException +import urllib.parse +import urllib.request +import zipfile +from argparse import ArgumentParser +from dataclasses import dataclass, field, fields +from datetime import datetime, timedelta, timezone +from enum import IntEnum +from pathlib import Path +from tempfile import TemporaryDirectory +from typing import Union + +try: + import magic +except ImportError: + import puremagic as magic + +if sys.version_info >= (3, 11): + import tomllib +else: + import tomli as tomllib + +from . import _, common, metadata, scanner +from .exception import BuildException, ConfigurationException, VCSException + + +@dataclass +class MessageStore: + infos: list = field(default_factory=list) + warnings: list = field(default_factory=list) + errors: list = field(default_factory=list) + + +MAVEN_URL_REGEX = re.compile( + r"""\smaven\s*(?:{.*?(?:setUrl|url)|\(\s*(?:url)?)\s*=?\s*(?:uri|URI|Uri\.create)?\(?\s*["']?([^\s"']+)["']?[^})]*[)}]""", + re.DOTALL, +) + +DEPFILE = { + "Cargo.toml": ["Cargo.lock"], + "pubspec.yaml": ["pubspec.lock"], + "package.json": ["package-lock.json", "yarn.lock", "pnpm-lock.yaml", "bun.lock"], +} + +SCANNER_CACHE_VERSION = 1 + +DEFAULT_CATALOG_PREFIX_REGEX = re.compile( + r'''defaultLibrariesExtensionName\s*=\s*['"](\w+)['"]''' +) +GRADLE_CATALOG_FILE_REGEX = re.compile( + r'''(?:create\()?['"]?(\w+)['"]?\)?\s*\{[^}]*from\(files\(['"]([^"]+)['"]\)\)''' +) +VERSION_CATALOG_REGEX = re.compile(r'versionCatalogs\s*\{') + +APK_SIGNING_BLOCK_IDS = { + # https://source.android.com/docs/security/features/apksigning/v2#apk-signing-block + # 0x7109871a: 'APK signature scheme v2', + # https://source.android.com/docs/security/features/apksigning/v3#apk-signing-block + # 0xf05368c0: 'APK signature scheme v3', + # See "Security metadata in early 2018" + # https://android-developers.googleblog.com/2017/12/improving-app-security-and-performance.html + 0x2146444E: 'Google Play Signature aka "Frosting"', + # 0x42726577: 'Verity padding', + # 0x6DFF800D: 'Source stamp V2 X509 cert', + # JSON with some metadata, used by Chinese company Meituan + 0x71777777: 'Meituan payload', + # Dependencies metadata generated by Gradle and encrypted by Google Play. + # '...The data is compressed, encrypted by a Google Play signing key...' + # https://developer.android.com/studio/releases/gradle-plugin#dependency-metadata + 0x504B4453: 'Dependency metadata', +} + + +class ExitCode(IntEnum): + NONFREE_CODE = 1 + + +class GradleVersionCatalog: + """Parse catalog from libs.versions.toml. + + https://docs.gradle.org/current/userguide/platforms.html + """ + + def __init__(self, catalog): + self.version = { + alias: self.get_version(version) + for alias, version in catalog.get("versions", {}).items() + } + self.libraries = { + self.alias_to_accessor(alias): self.library_to_coordinate(library) + for alias, library in catalog.get("libraries", {}).items() + } + self.plugins = { + self.alias_to_accessor(alias): self.plugin_to_coordinate(plugin) + for alias, plugin in catalog.get("plugins", {}).items() + } + self.bundles = { + self.alias_to_accessor(alias): self.bundle_to_coordinates(bundle) + for alias, bundle in catalog.get("bundles", {}).items() + } + + @staticmethod + def alias_to_accessor(alias: str) -> str: + """Covert alias to accessor. + + https://docs.gradle.org/current/userguide/platforms.html#sub:mapping-aliases-to-accessors + Alias is used to define a lib in catalog. Accessor is used to access it. + """ + return alias.replace("-", ".").replace("_", ".") + + def get_version(self, version: Union[dict, str]) -> str: + if isinstance(version, str): + return version + ref = version.get("ref") + if ref: + return self.version.get(ref, "") + return ( + version.get("prefer", "") + or version.get("require", "") + or version.get("strictly", "") + ) + + def library_to_coordinate(self, library: Union[dict, str]) -> str: + """Generate the Gradle dependency coordinate from catalog.""" + if isinstance(library, str): + return library + module = library.get("module") + if not module: + group = library.get("group") + name = library.get("name") + if group and name: + module = f"{group}:{name}" + else: + return "" + + version = library.get("version") + if version: + return f"{module}:{self.get_version(version)}" + else: + return module + + def plugin_to_coordinate(self, plugin: Union[dict, str]) -> str: + """Generate the Gradle plugin coordinate from catalog.""" + if isinstance(plugin, str): + return plugin + id = plugin.get("id") + if not id: + return "" + + version = plugin.get("version") + if version: + return f"{id}:{self.get_version(version)}" + else: + return id + + def bundle_to_coordinates(self, bundle: list[str]) -> list[str]: + """Generate the Gradle dependency bundle coordinate from catalog.""" + coordinates = [] + for alias in bundle: + library = self.libraries.get(self.alias_to_accessor(alias)) + if library: + coordinates.append(library) + return coordinates + + def get_coordinate(self, accessor: str) -> list[str]: + """Get the Gradle coordinate from the catalog with an accessor.""" + if accessor.startswith("plugins."): + return [ + self.plugins.get(accessor[8:].removesuffix(".asLibraryDependency"), "") + ] + if accessor.startswith("bundles."): + return self.bundles.get(accessor[8:], []) + return [self.libraries.get(accessor, "")] + + +def get_catalogs(root: str) -> dict[str, GradleVersionCatalog]: + """Get all Gradle dependency catalogs from settings.gradle[.kts]. + + Returns a dict with the extension and the corresponding catalog. + The extension is used as the prefix of the accessor to access libs in the catalog. + """ + root = Path(root) + catalogs = {} + default_prefix = "libs" + catalog_files_m = [] + + def find_block_end(s, start): + pat = re.compile("[{}]") + depth = 1 + for m in pat.finditer(s, pos=start): + if m.group() == "{": + depth += 1 + else: + depth -= 1 + if depth == 0: + return m.start() + else: + return -1 + + groovy_file = root / "settings.gradle" + kotlin_file = root / "settings.gradle.kts" + if groovy_file.is_file(): + gradle_file = groovy_file + elif kotlin_file.is_file(): + gradle_file = kotlin_file + else: + return {} + + s = gradle_file.read_text(encoding="utf-8") + version_catalogs_m = VERSION_CATALOG_REGEX.search(s) + if version_catalogs_m: + start = version_catalogs_m.end() + end = find_block_end(s, start) + catalog_files_m = GRADLE_CATALOG_FILE_REGEX.finditer(s, start, end) + + m_default = DEFAULT_CATALOG_PREFIX_REGEX.search(s) + if m_default: + default_prefix = m_default.group(1) + default_catalog_file = Path(root) / "gradle/libs.versions.toml" + if default_catalog_file.is_file(): + with default_catalog_file.open("rb") as f: + catalogs[default_prefix] = GradleVersionCatalog(tomllib.load(f)) + for m in catalog_files_m: + catalog_file = Path(root) / m.group(2).replace("$rootDir/", "") + if catalog_file.is_file(): + with catalog_file.open("rb") as f: + catalogs[m.group(1)] = GradleVersionCatalog(tomllib.load(f)) + return catalogs + + +def get_gradle_compile_commands(build): + compileCommands = [ + 'alias', + 'api', + 'apk', + 'classpath', + 'compile', + 'compileOnly', + 'id', + 'implementation', + 'provided', + 'runtimeOnly', + ] + buildTypes = ['', 'release'] + if build.gradle and build.gradle != ['yes']: + flavors = common.calculate_gradle_flavor_combination(build.gradle) + else: + flavors = [''] + + return [''.join(c) for c in itertools.product(flavors, buildTypes, compileCommands)] + + +def get_gradle_compile_commands_without_catalog(build): + return [ + re.compile(rf'''\s*{c}.*\s*\(?['"].*['"]''', re.IGNORECASE) + for c in get_gradle_compile_commands(build) + ] + + +def get_gradle_compile_commands_with_catalog(build, prefix): + return [ + re.compile(rf'\s*{c}.*\s*\(?{prefix}\.([a-z0-9.]+)', re.IGNORECASE) + for c in get_gradle_compile_commands(build) + ] + + +def get_embedded_classes(apkfile, depth=0): + """Get the list of Java classes embedded into all DEX files. + + :return: set of Java classes names as string + """ + if depth > 10: # zipbomb protection + return {_('Max recursion depth in ZIP file reached: %s') % apkfile} + + archive_regex = re.compile(r'.*\.(aab|aar|apk|apks|jar|war|xapk|zip)$') + class_regex = re.compile(r'classes.*\.dex') + classes = set() + + try: + with TemporaryDirectory() as tmp_dir, zipfile.ZipFile(apkfile, 'r') as apk_zip: + for info in apk_zip.infolist(): + # apk files can contain apk files, again + with apk_zip.open(info) as apk_fp: + if zipfile.is_zipfile(apk_fp): + classes = classes.union(get_embedded_classes(apk_fp, depth + 1)) + if not archive_regex.search(info.filename): + classes.add( + 'ZIP file without proper file extension: %s' + % info.filename + ) + continue + + with apk_zip.open(info.filename) as fp: + file_magic = fp.read(3) + if file_magic == b'dex': + if not class_regex.search(info.filename): + classes.add('DEX file with fake name: %s' % info.filename) + apk_zip.extract(info, tmp_dir) + run = common.SdkToolsPopen( + ["dexdump", '{}/{}'.format(tmp_dir, info.filename)], + output=False, + ) + classes = classes.union( + set(re.findall(r'[A-Z]+((?:\w+\/)+\w+)', run.output)) + ) + except zipfile.BadZipFile as ex: + return {_('Problem with ZIP file: %s, error %s') % (apkfile, ex)} + + return classes + + +def _datetime_now(): + """Get datetime.now(), using this funciton allows mocking it for testing.""" + return datetime.now(timezone.utc) + + +def _scanner_cachedir(): + """Get `Path` to fdroidserver cache dir.""" + cfg = common.get_config() + if not cfg: + raise ConfigurationException('config not initialized') + if "cachedir_scanner" not in cfg: + raise ConfigurationException("could not load 'cachedir_scanner' from config") + cachedir = Path(cfg["cachedir_scanner"]) + cachedir.mkdir(exist_ok=True, parents=True) + return cachedir + + +class SignatureDataMalformedException(Exception): + pass + + +class SignatureDataOutdatedException(Exception): + pass + + +class SignatureDataCacheMissException(Exception): + pass + + +class SignatureDataNoDefaultsException(Exception): + pass + + +class SignatureDataVersionMismatchException(Exception): + pass + + +class SignatureDataController: + def __init__(self, name, filename, url): + self.name = name + self.filename = filename + self.url = url + # by default we assume cache is valid indefinitely + self.cache_duration = timedelta(days=999999) + self.data = {} + + def check_data_version(self): + if self.data.get("version") != SCANNER_CACHE_VERSION: + raise SignatureDataVersionMismatchException() + + def check_last_updated(self): + """Check if the last_updated value is ok and raise an exception if expired or inaccessible. + + :raises SignatureDataMalformedException: when timestamp value is + inaccessible or not parse-able + :raises SignatureDataOutdatedException: when timestamp is older then + `self.cache_duration` + """ + last_updated = self.data.get("last_updated", None) + if last_updated: + try: + last_updated = datetime.fromtimestamp(last_updated, timezone.utc) + except ValueError as e: + raise SignatureDataMalformedException() from e + except TypeError as e: + raise SignatureDataMalformedException() from e + delta = (last_updated + self.cache_duration) - scanner._datetime_now() + if delta > timedelta(seconds=0): + logging.debug( + _('next {name} cache update due in {time}').format( + name=self.filename, time=delta + ) + ) + else: + raise SignatureDataOutdatedException() + + def fetch(self): + try: + self.fetch_signatures_from_web() + self.write_to_cache() + except Exception as e: + raise Exception( + _("downloading scanner signatures from '{}' failed").format(self.url) + ) from e + + def load(self): + try: + try: + self.load_from_cache() + self.verify_data() + self.check_last_updated() + except SignatureDataCacheMissException: + self.load_from_defaults() + except (SignatureDataOutdatedException, SignatureDataNoDefaultsException): + self.fetch_signatures_from_web() + self.write_to_cache() + except ( + SignatureDataMalformedException, + SignatureDataVersionMismatchException, + ) as e: + logging.critical( + _( + "scanner cache is malformed! You can clear it with: '{clear}'" + ).format( + clear='rm -r {}'.format(common.get_config()['cachedir_scanner']) + ) + ) + raise e + + def load_from_defaults(self): + raise SignatureDataNoDefaultsException() + + def load_from_cache(self): + sig_file = scanner._scanner_cachedir() / self.filename + if not sig_file.exists(): + raise SignatureDataCacheMissException() + with open(sig_file) as f: + self.set_data(json.load(f)) + + def write_to_cache(self): + sig_file = scanner._scanner_cachedir() / self.filename + with open(sig_file, "w", encoding="utf-8") as f: + json.dump(self.data, f, indent=2) + logging.debug("write '{}' to cache".format(self.filename)) + + def verify_data(self): + """Clean and validate `self.data`. + + Right now this function does just a basic key sanitation. + """ + self.check_data_version() + valid_keys = [ + 'timestamp', + 'last_updated', + 'version', + 'signatures', + 'cache_duration', + ] + + for k in list(self.data.keys()): + if k not in valid_keys: + del self.data[k] + + def set_data(self, new_data): + self.data = new_data + if 'cache_duration' in new_data: + self.cache_duration = timedelta(seconds=new_data['cache_duration']) + + def fetch_signatures_from_web(self): + if not self.url.startswith("https://"): + raise Exception(_("can't open non-https url: '{};".format(self.url))) + logging.debug(_("downloading '{}'").format(self.url)) + with urllib.request.urlopen(self.url) as f: # nosec B310 scheme filtered above + self.set_data(json.load(f)) + self.data['last_updated'] = scanner._datetime_now().timestamp() + + +class ExodusSignatureDataController(SignatureDataController): + def __init__(self): + super().__init__( + 'Exodus signatures', + 'exodus.json', + 'https://reports.exodus-privacy.eu.org/api/trackers', + ) + self.cache_duration = timedelta(days=1) # refresh exodus cache after one day + self.has_trackers_json_key = True + + def fetch_signatures_from_web(self): + logging.debug(_("downloading '{}'").format(self.url)) + + data = { + "signatures": {}, + "timestamp": scanner._datetime_now().timestamp(), + "last_updated": scanner._datetime_now().timestamp(), + "version": SCANNER_CACHE_VERSION, + } + + if not self.url.startswith("https://"): + raise Exception(_("can't open non-https url: '{};".format(self.url))) + with urllib.request.urlopen(self.url) as f: # nosec B310 scheme filtered above + trackerlist = json.load(f) + if self.has_trackers_json_key: + trackerlist = trackerlist["trackers"].values() + for tracker in trackerlist: + if tracker.get('code_signature'): + data["signatures"][tracker["name"]] = { + "name": tracker["name"], + "warn_code_signatures": [tracker["code_signature"]], + # exodus also provides network signatures, unused atm. + # "network_signatures": [tracker["network_signature"]], + "AntiFeatures": ["Tracking"], # TODO + "license": "NonFree", # We assume all trackers in exodus + # are non-free, although free + # trackers like piwik, acra, + # etc. might be listed by exodus + # too. + } + self.set_data(data) + + +class EtipSignatureDataController(ExodusSignatureDataController): + def __init__(self): + super().__init__() + self.name = 'ETIP signatures' + self.filename = 'etip.json' + self.url = 'https://etip.exodus-privacy.eu.org/api/trackers/?format=json' + self.has_trackers_json_key = False + + +class SUSSDataController(SignatureDataController): + def __init__(self): + super().__init__( + 'SUSS', 'suss.json', 'https://fdroid.gitlab.io/fdroid-suss/suss.json' + ) + + def load_from_defaults(self): + self.set_data(json.loads(SUSS_DEFAULT)) + + +class ScannerTool: + refresh_allowed = True + + def __init__(self): + # we could add support for loading additional signature source + # definitions from config.yml here + + self.scanner_data_lookup() + + options = common.get_options() + options_refresh_scanner = ( + hasattr(options, "refresh_scanner") + and options.refresh_scanner + and ScannerTool.refresh_allowed + ) + if options_refresh_scanner or common.get_config().get('refresh_scanner'): + self.refresh() + + self.load() + self.compile_regexes() + + def scanner_data_lookup(self): + sigsources = common.get_config().get('scanner_signature_sources', []) + logging.debug( + "scanner is configured to use signature data from: '{}'".format( + "', '".join(sigsources) + ) + ) + self.sdcs = [] + for i, source_url in enumerate(sigsources): + if source_url.lower() == 'suss': + self.sdcs.append(SUSSDataController()) + elif source_url.lower() == 'exodus': + self.sdcs.append(ExodusSignatureDataController()) + elif source_url.lower() == 'etip': + self.sdcs.append(EtipSignatureDataController()) + else: + u = urllib.parse.urlparse(source_url) + if u.scheme != 'https' or u.path == "": + raise ConfigurationException( + "Invalid 'scanner_signature_sources' configuration: '{}'. " + "Has to be a valid HTTPS-URL or match a predefined " + "constants: 'suss', 'exodus'".format(source_url) + ) + self.sdcs.append( + SignatureDataController( + source_url, + '{}_{}'.format(i, os.path.basename(u.path)), + source_url, + ) + ) + + def load(self): + for sdc in self.sdcs: + sdc.load() + + def compile_regexes(self): + self.regexs = { + 'err_code_signatures': {}, + 'err_gradle_signatures': {}, + 'warn_code_signatures': {}, + 'warn_gradle_signatures': {}, + } + for sdc in self.sdcs: + for signame, sigdef in sdc.data.get('signatures', {}).items(): + for sig in sigdef.get('code_signatures', []): + self.regexs['err_code_signatures'][sig] = re.compile( + '.*' + sig, re.IGNORECASE + ) + for sig in sigdef.get('gradle_signatures', []): + self.regexs['err_gradle_signatures'][sig] = re.compile( + '.*' + sig, re.IGNORECASE + ) + for sig in sigdef.get('warn_code_signatures', []): + self.regexs['warn_code_signatures'][sig] = re.compile( + '.*' + sig, re.IGNORECASE + ) + for sig in sigdef.get('warn_gradle_signatures', []): + self.regexs['warn_gradle_signatures'][sig] = re.compile( + '.*' + sig, re.IGNORECASE + ) + + def refresh(self): + for sdc in self.sdcs: + sdc.fetch_signatures_from_web() + sdc.write_to_cache() + + def add(self, new_controller: SignatureDataController): + self.sdcs.append(new_controller) + self.compile_regexes() + + +# TODO: change this from singleton instance to dependency injection +# use `_get_tool()` instead of accessing this directly +_SCANNER_TOOL = None + + +def _get_tool(): + """Lazy loading function for getting a ScannerTool instance. + + ScannerTool initialization need to access `common.config` values. Those are only available after initialization through `common.read_config()`. So this factory assumes config was called at an erlier point in time. + """ + if not scanner._SCANNER_TOOL: + scanner._SCANNER_TOOL = ScannerTool() + return scanner._SCANNER_TOOL + + +def scan_binary(apkfile): + """Scan output of dexdump for known non-free classes.""" + logging.info(_('Scanning APK with dexdump for known non-free classes.')) + result = get_embedded_classes(apkfile) + problems, warnings = 0, 0 + for classname in result: + for suspect, regexp in _get_tool().regexs['warn_code_signatures'].items(): + if regexp.match(classname): + logging.debug("Warning: found class '%s'" % classname) + warnings += 1 + for suspect, regexp in _get_tool().regexs['err_code_signatures'].items(): + if regexp.match(classname): + logging.debug("Problem: found class '%s'" % classname) + problems += 1 + + logging.info(_('Scanning APK for extra signing blocks.')) + a = common.get_androguard_APK(str(apkfile)) + a.parse_v2_v3_signature() + for b in a._v2_blocks: + if b in APK_SIGNING_BLOCK_IDS: + logging.debug( + f"Problem: found extra signing block '{APK_SIGNING_BLOCK_IDS[b]}'" + ) + problems += 1 + + if warnings: + logging.warning( + _("Found {count} warnings in {filename}").format( + count=warnings, filename=apkfile + ) + ) + if problems: + logging.critical( + _("Found {count} problems in {filename}").format( + count=problems, filename=apkfile + ) + ) + return problems + + +def scan_source(build_dir, build=metadata.Build(), json_per_build=None): + """Scan the source code in the given directory (and all subdirectories). + + Returns + ------- + the number of fatal problems encountered. + + """ + count = 0 + + if not json_per_build: + json_per_build = MessageStore() + + def suspects_found(s): + for n, r in _get_tool().regexs['err_gradle_signatures'].items(): + if r.match(s): + yield n + + allowed_repos = [ + re.compile(r'^https://' + re.escape(repo) + r'/*') + for repo in [ + 'repo1.maven.org/maven2', # mavenCentral() + 'jitpack.io', + 'www.jitpack.io', + 'repo.maven.apache.org/maven2', + 'oss.jfrog.org/artifactory/oss-snapshot-local', + 'central.sonatype.com/repository/maven-snapshots', + 'oss.sonatype.org/content/repositories/snapshots', + 'oss.sonatype.org/content/repositories/releases', + 'oss.sonatype.org/content/groups/public', + 'oss.sonatype.org/service/local/staging/deploy/maven2', + 's01.oss.sonatype.org/content/repositories/snapshots', + 's01.oss.sonatype.org/content/repositories/releases', + 's01.oss.sonatype.org/content/groups/public', + 's01.oss.sonatype.org/service/local/staging/deploy/maven2', + 'clojars.org/repo', # Clojure free software libs + 'repo.clojars.org', # Clojure free software libs + 's3.amazonaws.com/repo.commonsware.com', # CommonsWare + 'plugins.gradle.org/m2', # Gradle plugin repo + 'maven.google.com', # google() + ] + ] + [ + re.compile(r'^file://' + re.escape(repo) + r'/*') + for repo in [ + '/usr/share/maven-repo', # local repo on Debian installs + ] + ] + + scanignore, scanignore_not_found_paths = common.getpaths_map( + build_dir, build.scanignore + ) + scandelete, scandelete_not_found_paths = common.getpaths_map( + build_dir, build.scandelete + ) + + scanignore_worked = set() + scandelete_worked = set() + + def toignore(path_in_build_dir): + for k, paths in scanignore.items(): + for p in paths: + if path_in_build_dir.startswith(p): + scanignore_worked.add(k) + return True + return False + + def todelete(path_in_build_dir): + for k, paths in scandelete.items(): + for p in paths: + if path_in_build_dir.startswith(p): + scandelete_worked.add(k) + return True + return False + + def ignoreproblem(what, path_in_build_dir, json_per_build): + """No summary. + + Parameters + ---------- + what: string + describing the problem, will be printed in log messages + path_in_build_dir + path to the file relative to `build`-dir + + Returns + ------- + 0 as we explicitly ignore the file, so don't count an error + + """ + msg = 'Ignoring %s at %s' % (what, path_in_build_dir) + logging.info(msg) + if json_per_build is not None: + json_per_build.infos.append([msg, path_in_build_dir]) + return 0 + + def removeproblem(what, path_in_build_dir, filepath, json_per_build): + """No summary. + + Parameters + ---------- + what: string + describing the problem, will be printed in log messages + path_in_build_dir + path to the file relative to `build`-dir + filepath + Path (relative to our current path) to the file + + Returns + ------- + 0 as we deleted the offending file + + """ + msg = 'Removing %s at %s' % (what, path_in_build_dir) + logging.info(msg) + if json_per_build is not None: + json_per_build.infos.append([msg, path_in_build_dir]) + try: + os.remove(filepath) + except FileNotFoundError: + # File is already gone, nothing to do. + # This can happen if we find multiple problems in one file that is setup for scandelete + # I.e. build.gradle files containig multiple unknown maven repos. + pass + return 0 + + def warnproblem(what, path_in_build_dir, json_per_build): + """No summary. + + Parameters + ---------- + what: string + describing the problem, will be printed in log messages + path_in_build_dir + path to the file relative to `build`-dir + + Returns + ------- + 0, as warnings don't count as errors + + """ + if toignore(path_in_build_dir): + return 0 + logging.warning('Found %s at %s' % (what, path_in_build_dir)) + if json_per_build is not None: + json_per_build.warnings.append([what, path_in_build_dir]) + return 0 + + def handleproblem(what, path_in_build_dir, filepath, json_per_build): + """Dispatches to problem handlers (ignore, delete, warn). + + Or returns 1 for increasing the error count. + + Parameters + ---------- + what: string + describing the problem, will be printed in log messages + path_in_build_dir + path to the file relative to `build`-dir + filepath + Path (relative to our current path) to the file + + Returns + ------- + 0 if the problem was ignored/deleted/is only a warning, 1 otherwise + + """ + options = common.get_options() + if toignore(path_in_build_dir): + return ignoreproblem(what, path_in_build_dir, json_per_build) + if todelete(path_in_build_dir): + return removeproblem(what, path_in_build_dir, filepath, json_per_build) + if 'src/test' in path_in_build_dir or '/test/' in path_in_build_dir: + return warnproblem(what, path_in_build_dir, json_per_build) + if options and 'json' in vars(options) and options.json: + json_per_build.errors.append([what, path_in_build_dir]) + if options and ( + options.verbose or not ('json' in vars(options) and options.json) + ): + logging.error('Found %s at %s' % (what, path_in_build_dir)) + return 1 + + def is_executable(path): + return os.path.exists(path) and os.access(path, os.X_OK) + + textchars = bytearray({7, 8, 9, 10, 12, 13, 27} | set(range(0x20, 0x100)) - {0x7f}) # fmt: skip + + def is_binary(path): + d = None + with open(path, 'rb') as f: + d = f.read(1024) + return bool(d.translate(None, textchars)) + + # False positives patterns for files that are binary and executable. + safe_paths = [ + re.compile(r) + for r in [ + r".*/drawable[^/]*/.*\.png$", # png drawables + r".*/mipmap[^/]*/.*\.png$", # png mipmaps + ] + ] + + def is_image_file(path): + try: + mimetype = magic.from_file(path, mime=True) + if mimetype and mimetype.startswith('image/'): + return True + except Exception as e: + logging.info(e) + + def safe_path(path_in_build_dir): + for sp in safe_paths: + if sp.match(path_in_build_dir): + return True + return False + + def is_used_by_gradle_without_catalog(line): + return any( + command.match(line) + for command in get_gradle_compile_commands_without_catalog(build) + ) + + def is_used_by_gradle_with_catalog(line, prefix): + for m in ( + command.match(line) + for command in get_gradle_compile_commands_with_catalog(build, prefix) + ): + if m: + return m + + all_catalogs = {} + # Iterate through all files in the source code + for root, dirs, files in os.walk(build_dir, topdown=True): + # It's topdown, so checking the basename is enough + for ignoredir in ('.hg', '.git', '.svn', '.bzr'): + if ignoredir in dirs: + dirs.remove(ignoredir) + + if "settings.gradle" in files or "settings.gradle.kts" in files: + all_catalogs[str(root)] = get_catalogs(root) + + for curfile in files: + if curfile in ['.DS_Store']: + continue + + # Path (relative) to the file + filepath = os.path.join(root, curfile) + + if os.path.islink(filepath): + continue + + path_in_build_dir = os.path.relpath(filepath, build_dir) + + if curfile in ('gradle-wrapper.jar', 'gradlew', 'gradlew.bat'): + removeproblem(curfile, path_in_build_dir, filepath, json_per_build) + elif curfile.endswith('.apk'): + removeproblem( + _('Android APK file'), path_in_build_dir, filepath, json_per_build + ) + + elif curfile.endswith('.a'): + count += handleproblem( + _('static library'), path_in_build_dir, filepath, json_per_build + ) + elif curfile.endswith('.aar'): + count += handleproblem( + _('Android AAR library'), + path_in_build_dir, + filepath, + json_per_build, + ) + elif curfile.endswith('.class'): + count += handleproblem( + _('Java compiled class'), + path_in_build_dir, + filepath, + json_per_build, + ) + elif curfile.endswith('.dex'): + count += handleproblem( + _('Android DEX code'), path_in_build_dir, filepath, json_per_build + ) + elif curfile.endswith('.gz') or curfile.endswith('.tgz'): + count += handleproblem( + _('gzip file archive'), path_in_build_dir, filepath, json_per_build + ) + # We use a regular expression here to also match versioned shared objects like .so.0.0.0 + elif re.match(r'.*\.so(\..+)*$', curfile): + count += handleproblem( + _('shared library'), path_in_build_dir, filepath, json_per_build + ) + elif curfile.endswith('.zip'): + count += handleproblem( + _('ZIP file archive'), path_in_build_dir, filepath, json_per_build + ) + elif curfile.endswith('.jar'): + for name in suspects_found(curfile): + count += handleproblem( + 'usual suspect \'%s\'' % name, + path_in_build_dir, + filepath, + json_per_build, + ) + count += handleproblem( + _('Java JAR file'), path_in_build_dir, filepath, json_per_build + ) + elif curfile.endswith('.wasm'): + count += handleproblem( + _('WebAssembly binary file'), + path_in_build_dir, + filepath, + json_per_build, + ) + + elif curfile.endswith('.java'): + if not os.path.isfile(filepath): + continue + with open(filepath, 'r', errors='replace') as f: + for line in f: + if 'DexClassLoader' in line: + count += handleproblem( + 'DexClassLoader', + path_in_build_dir, + filepath, + json_per_build, + ) + break + + elif curfile.endswith('.gradle') or curfile.endswith('.gradle.kts'): + catalog_path = str(build_dir) + # Find the longest path of dir that the curfile is in + for p in all_catalogs: + if os.path.commonpath([root, p]) == p: + catalog_path = p + catalogs = all_catalogs.get(catalog_path, {}) + + if not os.path.isfile(filepath): + continue + with open(filepath, 'r', errors='replace') as f: + lines = f.readlines() + for i, line in enumerate(lines): + if is_used_by_gradle_without_catalog(line): + for name in suspects_found(line): + count += handleproblem( + f"usual suspect '{name}'", + path_in_build_dir, + filepath, + json_per_build, + ) + for prefix, catalog in catalogs.items(): + m = is_used_by_gradle_with_catalog(line, prefix) + if not m: + continue + accessor = m[1] + coordinates = catalog.get_coordinate(accessor) + for coordinate in coordinates: + for name in suspects_found(coordinate): + count += handleproblem( + f"usual suspect '{prefix}.{accessor}: {name}'", + path_in_build_dir, + filepath, + json_per_build, + ) + noncomment_lines = [ + line for line in lines if not common.gradle_comment.match(line) + ] + no_comments = re.sub( + r'/\*.*?\*/', '', ''.join(noncomment_lines), flags=re.DOTALL + ) + for url in MAVEN_URL_REGEX.findall(no_comments): + if not any(r.match(url) for r in allowed_repos): + count += handleproblem( + 'unknown maven repo \'%s\'' % url, + path_in_build_dir, + filepath, + json_per_build, + ) + + elif os.path.splitext(path_in_build_dir)[1] in ['', '.bin', '.out', '.exe']: + if is_binary(filepath): + count += handleproblem( + 'binary', path_in_build_dir, filepath, json_per_build + ) + + elif curfile in DEPFILE: + d = root + while d.startswith(str(build_dir)): + for lockfile in DEPFILE[curfile]: + if os.path.isfile(os.path.join(d, lockfile)): + break + else: + d = os.path.dirname(d) + continue + break + else: + count += handleproblem( + _('dependency file without lock'), + path_in_build_dir, + filepath, + json_per_build, + ) + + elif is_executable(filepath): + if is_binary(filepath) and not ( + safe_path(path_in_build_dir) or is_image_file(filepath) + ): + warnproblem( + _('executable binary, possibly code'), + path_in_build_dir, + json_per_build, + ) + + for p in scanignore_not_found_paths: + logging.error(_("Non-exist scanignore path: %s") % p) + count += 1 + + for p in scanignore: + if p not in scanignore_worked: + logging.error(_('Unused scanignore path: %s') % p) + count += 1 + + for p in scandelete_not_found_paths: + logging.error(_("Non-exist scandelete path: %s") % p) + count += 1 + + for p in scandelete: + if p not in scandelete_worked: + logging.error(_('Unused scandelete path: %s') % p) + count += 1 + + return count -config = None -options = None def main(): + parser = ArgumentParser( + usage="%(prog)s [options] [(APPID[:VERCODE] | path/to.apk) ...]" + ) + common.setup_global_opts(parser) + parser.add_argument( + "appid", + nargs='*', + help=_("application ID with optional versionCode in the form APPID[:VERCODE]"), + ) + parser.add_argument( + "-f", + "--force", + action="store_true", + default=False, + help=_("Force scan of disabled apps and builds."), + ) + parser.add_argument( + "--json", action="store_true", default=False, help=_("Output JSON to stdout.") + ) + parser.add_argument( + "-r", + "--refresh", + dest="refresh_scanner", + action="store_true", + default=False, + help=_("fetch the latest version of signatures from the web"), + ) + parser.add_argument( + "-e", + "--exit-code", + action="store_true", + default=False, + help=_("Exit with a non-zero code if problems were found"), + ) + metadata.add_metadata_arguments(parser) + options = common.parse_args(parser) + metadata.warnings_action = options.W - global config, options + json_output = dict() + if options.json: + if options.verbose: + logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) + else: + logging.getLogger().setLevel(logging.ERROR) - # Parse command line... - parser = OptionParser(usage="Usage: %prog [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]") - parser.add_option("-v", "--verbose", action="store_true", default=False, - help="Spew out even more information than normal") - parser.add_option("--nosvn", action="store_true", default=False, - help="Skip svn repositories - for test purposes, because they are too slow.") - (options, args) = parser.parse_args() + # initialize/load configuration values + common.get_config() - config = common.read_config(options) + probcount = 0 - # Get all apps... - allapps = metadata.read_metadata() - apps = common.read_app_args(args, allapps, True) + appids = [] + for apk in options.appid: + if os.path.isfile(apk): + count = scanner.scan_binary(apk) + if count > 0: + logging.warning( + _('Scanner found {count} problems in {apk}').format( + count=count, apk=apk + ) + ) + probcount += count + else: + appids.append(apk) - problems = [] + if not appids: + if options.exit_code and probcount > 0: + sys.exit(ExitCode.NONFREE_CODE) + if options.refresh_scanner: + _get_tool() + return + + apps = common.read_app_args(appids, allow_version_codes=True) build_dir = 'build' if not os.path.isdir(build_dir): - print "Creating build directory" + logging.info("Creating build directory") os.makedirs(build_dir) srclib_dir = os.path.join(build_dir, 'srclib') extlib_dir = os.path.join(build_dir, 'extlib') - for app in apps: + for appid, app in apps.items(): + json_per_appid = dict() - if app['Disabled']: - print "Skipping %s: disabled" % app['id'] + if app.Disabled and not options.force: + logging.info(_("Skipping {appid}: disabled").format(appid=appid)) + json_per_appid['disabled'] = MessageStore().infos.append( + 'Skipping: disabled' + ) continue - if not app['builds']: - print "Skipping %s: no builds specified" % app['id'] - continue - elif options.nosvn and app['Repo Type'] == 'svn': - continue - - print "Processing " + app['id'] try: + if app.RepoType == 'srclib': + build_dir = os.path.join('build', 'srclib', app.Repo) + else: + build_dir = os.path.join('build', appid) - build_dir = 'build/' + app['id'] + if app.get('Builds'): + logging.info(_("Processing {appid}").format(appid=appid)) + # Set up vcs interface and make sure we have the latest code... + vcs = common.getvcs(app.RepoType, app.Repo, build_dir) + else: + logging.info( + _( + "{appid}: no builds specified, running on current source state" + ).format(appid=appid) + ) + json_per_build = MessageStore() + json_per_appid['current-source-state'] = json_per_build + count = scan_source(build_dir, json_per_build=json_per_build) + if count > 0: + logging.warning( + _('Scanner found {count} problems in {appid}:').format( + count=count, appid=appid + ) + ) + probcount += count + app['Builds'] = [] - # Set up vcs interface and make sure we have the latest code... - vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir) + for build in app.get('Builds', []): + json_per_build = MessageStore() + json_per_appid[build.versionCode] = json_per_build - for thisbuild in app['builds']: + if build.disable and not options.force: + logging.info( + "...skipping version %s - %s" + % (build.versionName, build.get('disable', build.commit[1:])) + ) + continue - if 'disable' in thisbuild: - print ("..skipping version " + thisbuild['version'] + " - " + - thisbuild.get('disable', thisbuild['commit'][1:])) - else: - print "..scanning version " + thisbuild['version'] + logging.info("...scanning version " + build.versionName) + # Prepare the source code... + common.prepare_source( + vcs, app, build, build_dir, srclib_dir, extlib_dir, False + ) - # Prepare the source code... - root_dir, _ = common.prepare_source(vcs, app, thisbuild, - build_dir, srclib_dir, extlib_dir, False) - - # Do the scan... - buildprobs = common.scan_source(build_dir, root_dir, thisbuild) - for problem in buildprobs: - problems.append(problem + - ' in ' + app['id'] + ' ' + thisbuild['version']) + count = scan_source(build_dir, build, json_per_build=json_per_build) + if count > 0: + logging.warning( + _( + 'Scanner found {count} problems in {appid}:{versionCode}:' + ).format( + count=count, appid=appid, versionCode=build.versionCode + ) + ) + probcount += count except BuildException as be: - msg = "Could not scan app %s due to BuildException: %s" % (app['id'], be) - problems.append(msg) + logging.warning( + 'Could not scan app %s due to BuildException: %s' % (appid, be) + ) + probcount += 1 except VCSException as vcse: - msg = "VCS error while scanning app %s: %s" % (app['id'], vcse) - problems.append(msg) + logging.warning('VCS error while scanning app %s: %s' % (appid, vcse)) + probcount += 1 except Exception: - msg = "Could not scan app %s due to unknown error: %s" % (app['id'], traceback.format_exc()) - problems.append(msg) + logging.warning( + 'Could not scan app %s due to unknown error: %s' + % (appid, traceback.format_exc()) + ) + probcount += 1 + + for k, v in json_per_appid.items(): + if len(v.errors) or len(v.warnings) or len(v.infos): + json_output[appid] = { + k: dict((field.name, getattr(v, field.name)) for field in fields(v)) + for k, v in json_per_appid.items() + } + break + + logging.info(_("Finished")) + if options.json: + print(json.dumps(json_output)) + elif probcount or options.verbose: + print(_("%d problems found") % probcount) - print "Finished:" - for problem in problems: - print problem - print str(len(problems)) + ' problems.' if __name__ == "__main__": main() + +SUSS_DEFAULT = r'''{ + "cache_duration": 86400, + "signatures": { + "com.amazon.device.ads": { + "anti_features": [ + "Ads", + "NonFreeComp" + ], + "code_signatures": [ + "com/amazon/device/ads" + ], + "description": "an interface for views used to retrieve and display Amazon ads.", + "license": "NonFree" + }, + "com.amazon.device.associates": { + "anti_features": [ + "Ads", + "NonFreeComp" + ], + "code_signatures": [ + "com/amazon/device/associates" + ], + "description": "library for Amazon\u2019s affiliate marketing program.", + "license": "NonFree" + }, + "com.amazon.device.iap": { + "anti_features": [ + "NonFreeComp", + "NonFreeNet" + ], + "code_signatures": [ + "com/amazon/device/iap" + ], + "description": "allows an app to present, process, and fulfill purchases of digital content and subscriptions within your app.", + "license": "NonFree" + }, + "com.amazonaws": { + "code_signatures": [ + "com/amazonaws/AbortedException", + "com/amazonaws/AmazonClientException", + "com/amazonaws/AmazonServiceException$ErrorType", + "com/amazonaws/AmazonServiceException", + "com/amazonaws/AmazonWebServiceClient", + "com/amazonaws/AmazonWebServiceRequest", + "com/amazonaws/AmazonWebServiceResponse", + "com/amazonaws/async", + "com/amazonaws/auth", + "com/amazonaws/ClientConfiguration", + "com/amazonaws/cognito", + "com/amazonaws/DefaultRequest", + "com/amazonaws/event", + "com/amazonaws/handlers", + "com/amazonaws/http", + "com/amazonaws/HttpMethod", + "com/amazonaws/internal", + "com/amazonaws/logging", + "com/amazonaws/metrics", + "com/amazonaws/mobile", + "com/amazonaws/mobileconnectors", + "com/amazonaws/Protocol", + "com/amazonaws/regions", + "com/amazonaws/RequestClientOptions$Marker", + "com/amazonaws/RequestClientOptions", + "com/amazonaws/Request", + "com/amazonaws/ResponseMetadata", + "com/amazonaws/Response", + "com/amazonaws/retry", + "com/amazonaws/SDKGlobalConfiguration", + "com/amazonaws/ServiceNameFactory", + "com/amazonaws/services", + "com/amazonaws/transform", + "com/amazonaws/util" + ], + "gradle_signatures": [ + "com.amazonaws:amazon-kinesis-aggregator", + "com.amazonaws:amazon-kinesis-connectors", + "com.amazonaws:amazon-kinesis-deaggregator", + "com.amazonaws:aws-android-sdk-apigateway-core", + "com.amazonaws:aws-android-sdk-auth-core", + "com.amazonaws:aws-android-sdk-auth-facebook", + "com.amazonaws:aws-android-sdk-auth-google", + "com.amazonaws:aws-android-sdk-auth-ui", + "com.amazonaws:aws-android-sdk-auth-userpools", + "com.amazonaws:aws-android-sdk-cognito", + "com.amazonaws:aws-android-sdk-cognitoauth", + "com.amazonaws:aws-android-sdk-cognitoidentityprovider-asf", + "com.amazonaws:aws-android-sdk-comprehend", + "com.amazonaws:aws-android-sdk-core", + "com.amazonaws:aws-android-sdk-ddb", + "com.amazonaws:aws-android-sdk-ddb-document", + "com.amazonaws:aws-android-sdk-iot", + "com.amazonaws:aws-android-sdk-kinesis", + "com.amazonaws:aws-android-sdk-kinesisvideo", + "com.amazonaws:aws-android-sdk-kinesisvideo-archivedmedia", + "com.amazonaws:aws-android-sdk-kms", + "com.amazonaws:aws-android-sdk-lambda", + "com.amazonaws:aws-android-sdk-lex", + "com.amazonaws:aws-android-sdk-location", + "com.amazonaws:aws-android-sdk-logs", + "com.amazonaws:aws-android-sdk-mobileanalytics", + "com.amazonaws:aws-android-sdk-mobile-client", + "com.amazonaws:aws-android-sdk-pinpoint", + "com.amazonaws:aws-android-sdk-polly", + "com.amazonaws:aws-android-sdk-rekognition", + "com.amazonaws:aws-android-sdk-s3", + "com.amazonaws:aws-android-sdk-ses", + "com.amazonaws:aws-android-sdk-sns", + "com.amazonaws:aws-android-sdk-sqs", + "com.amazonaws:aws-android-sdk-textract", + "com.amazonaws:aws-android-sdk-transcribe", + "com.amazonaws:aws-android-sdk-translate", + "com.amazonaws:dynamodb-key-diagnostics-library", + "com.amazonaws:DynamoDBLocal", + "com.amazonaws:dynamodb-lock-client", + "com.amazonaws:ivs-broadcast", + "com.amazonaws:ivs-player", + "com.amazonaws:kinesis-storm-spout" + ], + "license": "NonFree", + "name": "AmazonAWS" + }, + "com.android.billingclient": { + "code_signatures": [ + "com/android/billingclient" + ], + "documentation": [ + "https://developer.android.com/google/play/billing/integrate" + ], + "gradle_signatures": [ + "com.android.billingclient", + "com.google.androidbrowserhelper:billing", + "com.anjlab.android.iab.v3:library", + "com.github.penn5:donations", + "me.proton.core:payment-iap" + ], + "license": "NonFree", + "name": "BillingClient" + }, + "com.android.installreferrer": { + "anti_features": [ + "NonFreeDep", + "NonFreeNet" + ], + "code_signatures": [ + "com/android/installreferrer" + ], + "documentation": [ + "https://developer.android.com/google/play/installreferrer/library" + ], + "gradle_signatures": [ + "com.android.installreferrer" + ], + "license": "NonFree", + "name": "Play Install Referrer Library" + }, + "com.anychart": { + "anti_features": [ + "NonFreeComp" + ], + "code_signatures": [ + "com/anychart" + ], + "description": "a data visualization library for easily creating interactive charts in Android apps.", + "license": "NonFree" + }, + "com.appboy": { + "anti_features": [ + "NonFreeComp", + "Tracking" + ], + "code_signatures": [ + "com/appboy" + ], + "description": "Targets customers based on personal interests, location, past purchases, and more; profiles users, segments audiences, and utilizes analytics for targeted advertisements.", + "license": "NonFree" + }, + "com.appbrain": { + "anti_features": [ + "Ads", + "NonFreeComp" + ], + "code_signatures": [ + "com/appbrain" + ], + "description": "See Exodus Privacy.", + "license": "NonFree" + }, + "com.applause.android": { + "anti_features": [ + "NonFreeComp", + "Tracking" + ], + "code_signatures": [ + "com/applause/android" + ], + "description": "crowd-sourced testing. See Crunchbase and Exodus Privacy.", + "license": "NonFree" + }, + "com.applovin": { + "anti_features": [ + "Ads" + ], + "code_signatures": [ + "com/applovin" + ], + "description": "a mobile advertising technology company that enables brands to create mobile marketing campaigns that are fueled by data. Primary targets games.", + "license": "NonFree" + }, + "com.appsflyer": { + "anti_features": [ + "NonFreeComp", + "Tracking" + ], + "code_signatures": [ + "com/appsflyer" + ], + "description": "a mobile & attribution analytics platform.", + "license": "NonFree" + }, + "com.apptentive": { + "anti_features": [ + "NonFreeComp", + "Tracking" + ], + "code_signatures": [ + "com/apptentive" + ], + "description": "See Exodus Privacy.", + "license": "NonFree" + }, + "com.apptimize": { + "anti_features": [ + "NonFreeComp", + "Tracking" + ], + "code_signatures": [ + "com/apptimize" + ], + "description": "See Exodus Privacy and Crunchbase.", + "license": "NonFree" + }, + "com.askingpoint": { + "anti_features": [ + "NonFreeComp", + "Tracking" + ], + "code_signatures": [ + "com/askingpoint" + ], + "description": "complete mobile user engagement solution (power local, In-application evaluations and audits, input, user support, mobile reviews and informing).", + "license": "NonFree" + }, + "com.baidu.mobstat": { + "code_signatures": [ + "com/baidu/mobstat" + ], + "documentation": [ + "https://mtj.baidu.com/web/sdk/index" + ], + "gradle_signatures": [ + "com.baidu.mobstat" + ], + "license": "NonFree", + "name": "\u767e\u5ea6\u79fb\u52a8\u7edf\u8ba1SDK" + }, + "com.batch": { + "anti_features": [ + "Ads", + "NonFreeComp", + "Tracking" + ], + "code_signatures": [ + "com/batch" + ], + "description": "mobile engagement platform to execute CRM tactics over iOS, Android & mobile websites.", + "license": "NonFree" + }, + "com.bosch.mtprotocol": { + "anti_features": [ + "NonFreeComp" + ], + "code_signatures": [ + "com/bosch/mtprotocol" + ], + "description": "simplify and manage use of Bosch GLM and PLR laser rangefinders with Bluetooth connectivity.", + "license": "NonFree" + }, + "com.bugsee.library.Bugsee": { + "anti_features": [ + "NonFreeComp", + "Tracking" + ], + "code_signatures": [ + "com/bugsee/library/Bugsee" + ], + "description": "see video, network and logs that led to bugs and crashes in live apps. No need to reproduce intermittent bugs. With Bugsee, all the crucial data is always there.", + "license": "NonFree" + }, + "com.bugsense": { + "code_signatures": [ + "com/bugsense" + ], + "documentation": [ + "https://github.com/bugsense/docs/blob/master/android.md" + ], + "gradle_signatures": [ + "com.bugsense" + ], + "license": "NonFree", + "name": "BugSense" + }, + "com.chartboost.sdk": { + "anti_features": [ + "Ads", + "NonFreeComp" + ], + "code_signatures": [ + "com/chartboost/sdk" + ], + "description": "create customized interstitial and video ads, promote new games, and swap traffic with one another. For more details, see Wikipedia.", + "license": "NonFree" + }, + "com.cloudrail": { + "code_signature": [ + "com/cloudrail" + ], + "documentation": [ + "https://cloudrail.com/" + ], + "gradle_signatures": [ + "com.cloudrail" + ], + "license": "NonFree", + "name": "CloudRail" + }, + "com.comscore.analytics": { + "anti_features": [ + "NonFreeComp", + "Tracking" + ], + "code_signatures": [ + "com/comscore" + ], + "description": "See Wikipedia for details.", + "license": "NonFree" + }, + "com.crashlytics.sdk.android": { + "code_signatures": [ + "com/crashlytics" + ], + "documentation": [ + "https://firebase.google.com/docs/crashlytics" + ], + "gradle_signatures": [ + "crashlytics" + ], + "license": "NonFree", + "name": "Firebase Crashlytics" + }, + "com.crittercism": { + "code_signatures": [ + "com/crittercism" + ], + "documentation": [ + "https://github.com/crittercism/crittercism-unity-android" + ], + "gradle_signatures": [ + "com.crittercism" + ], + "license": "NonFree", + "name": "Crittercism Plugin for Unity Crash Reporting" + }, + "com.criware": { + "anti_features": [ + "NonFreeComp", + "NonFreeAssets" + ], + "code_signatures": [ + "com/criware" + ], + "description": "audio and video solutions that can be integrated with popular game engines.", + "license": "NonFree" + }, + "com.deezer.sdk": { + "anti_features": [ + "NonFreeComp", + "NonFreeNet" + ], + "code_signatures": [ + "com/deezer/sdk" + ], + "description": "a closed-source API for the Deezer music streaming service.", + "license": "NonFree" + }, + "com.dynamicyield": { + "anti_features": [ + "Ads", + "NonFreeComp", + "Tracking" + ], + "code_signatures": [ + "com/dynamicyield" + ], + "description": "targeted advertising. Tracks user via location (GPS, WiFi, location data). Collects PII, profiling. See Exodus Privacy for more details.", + "license": "NonFree" + }, + "com.dynatrace.android.app": { + "anti_features": [ + "NonFreeComp", + "Tracking" + ], + "code_signatures": [ + "com/dynatrace/android/app" + ], + "description": "See Crunchbase and Exodus Privacy.", + "license": "NonFree" + }, + "com.ensighten": { + "anti_features": [ + "NonFreeComp", + "Tracking" + ], + "code_signatures": [ + "com/ensighten" + ], + "description": "organizations can leverage first-party customer data and profiles to fuel omni-channel action and insight using their existing technology investments. See Crunchbase and Exodus Privacy.", + "license": "NonFree" + }, + "com.epicgames.mobile.eossdk": { + "anti_features": [ + "NonFreeComp", + "NonFreeNet" + ], + "code_signatures": [ + "com/epicgames/mobile/eossdk" + ], + "description": "integrate games with Epic Account Services and Epic Games Store", + "license": "NonFree" + }, + "com.facebook.android": { + "code_signatures": [ + "com/facebook/AccessToken", + "com/facebook/AccessTokenCache", + "com/facebook/AccessTokenManager", + "com/facebook/AccessTokenSource", + "com/facebook/AccessTokenTracker", + "com/facebook/all/All", + "com/facebook/appevents/aam/MetadataIndexer", + "com/facebook/appevents/aam/MetadataMatcher", + "com/facebook/appevents/aam/MetadataRule", + "com/facebook/appevents/aam/MetadataViewObserver", + "com/facebook/appevents/AccessTokenAppIdPair", + "com/facebook/appevents/AnalyticsUserIDStore", + "com/facebook/appevents/AppEvent", + "com/facebook/appevents/AppEventCollection", + "com/facebook/appevents/AppEventDiskStore", + "com/facebook/appevents/AppEventQueue", + "com/facebook/appevents/AppEventsConstants", + "com/facebook/appevents/AppEventsLogger", + "com/facebook/appevents/AppEventsLoggerImpl", + "com/facebook/appevents/AppEventsManager", + "com/facebook/appevents/AppEventStore", + "com/facebook/appevents/cloudbridge/AppEventsCAPIManager", + "com/facebook/appevents/cloudbridge/AppEventsConversionsAPITransformer", + "com/facebook/appevents/cloudbridge/AppEventsConversionsAPITransformerWebRequests", + "com/facebook/appevents/codeless/CodelessLoggingEventListener", + "com/facebook/appevents/codeless/CodelessManager", + "com/facebook/appevents/codeless/CodelessMatcher", + "com/facebook/appevents/codeless/internal/Constants", + "com/facebook/appevents/codeless/internal/EventBinding", + "com/facebook/appevents/codeless/internal/ParameterComponent", + "com/facebook/appevents/codeless/internal/PathComponent", + "com/facebook/appevents/codeless/internal/SensitiveUserDataUtils", + "com/facebook/appevents/codeless/internal/UnityReflection", + "com/facebook/appevents/codeless/internal/ViewHierarchy", + "com/facebook/appevents/codeless/RCTCodelessLoggingEventListener", + "com/facebook/appevents/codeless/ViewIndexer", + "com/facebook/appevents/codeless/ViewIndexingTrigger", + "com/facebook/appevents/eventdeactivation/EventDeactivationManager", + "com/facebook/appevents/FacebookSDKJSInterface", + "com/facebook/appevents/FlushReason", + "com/facebook/appevents/FlushResult", + "com/facebook/appevents/FlushStatistics", + "com/facebook/appevents/iap/InAppPurchaseActivityLifecycleTracker", + "com/facebook/appevents/iap/InAppPurchaseAutoLogger", + "com/facebook/appevents/iap/InAppPurchaseBillingClientWrapper", + "com/facebook/appevents/iap/InAppPurchaseEventManager", + "com/facebook/appevents/iap/InAppPurchaseLoggerManager", + "com/facebook/appevents/iap/InAppPurchaseManager", + "com/facebook/appevents/iap/InAppPurchaseSkuDetailsWrapper", + "com/facebook/appevents/iap/InAppPurchaseUtils", + "com/facebook/appevents/integrity/BlocklistEventsManager", + "com/facebook/appevents/integrity/IntegrityManager", + "com/facebook/appevents/integrity/MACARuleMatchingManager", + "com/facebook/appevents/integrity/ProtectedModeManager", + "com/facebook/appevents/integrity/RedactedEventsManager", + "com/facebook/appevents/internal/ActivityLifecycleTracker", + "com/facebook/appevents/InternalAppEventsLogger", + "com/facebook/appevents/internal/AppEventsLoggerUtility", + "com/facebook/appevents/internal/AppEventUtility", + "com/facebook/appevents/internal/AutomaticAnalyticsLogger", + "com/facebook/appevents/internal/Constants", + "com/facebook/appevents/internal/FileDownloadTask", + "com/facebook/appevents/internal/HashUtils", + "com/facebook/appevents/internal/SessionInfo", + "com/facebook/appevents/internal/SessionLogger", + "com/facebook/appevents/internal/SourceApplicationInfo", + "com/facebook/appevents/internal/ViewHierarchyConstants", + "com/facebook/appevents/ml/Model", + "com/facebook/appevents/ml/ModelManager", + "com/facebook/appevents/ml/MTensor", + "com/facebook/appevents/ml/Operator", + "com/facebook/appevents/ml/Utils", + "com/facebook/appevents/ondeviceprocessing/OnDeviceProcessingManager", + "com/facebook/appevents/ondeviceprocessing/RemoteServiceParametersHelper", + "com/facebook/appevents/ondeviceprocessing/RemoteServiceWrapper", + "com/facebook/appevents/PersistedEvents", + "com/facebook/appevents/restrictivedatafilter/RestrictiveDataManager", + "com/facebook/appevents/SessionEventsState", + "com/facebook/appevents/suggestedevents/FeatureExtractor", + "com/facebook/appevents/suggestedevents/PredictionHistoryManager", + "com/facebook/appevents/suggestedevents/SuggestedEventsManager", + "com/facebook/appevents/suggestedevents/SuggestedEventViewHierarchy", + "com/facebook/appevents/suggestedevents/ViewObserver", + "com/facebook/appevents/suggestedevents/ViewOnClickListener", + "com/facebook/appevents/UserDataStore", + "com/facebook/applinks/AppLinkData", + "com/facebook/applinks/AppLinks", + "com/facebook/applinks/FacebookAppLinkResolver", + "com/facebook/AuthenticationToken", + "com/facebook/AuthenticationTokenCache", + "com/facebook/AuthenticationTokenClaims", + "com/facebook/AuthenticationTokenHeader", + "com/facebook/AuthenticationTokenManager", + "com/facebook/AuthenticationTokenTracker", + "com/facebook/bolts/AggregateException", + "com/facebook/bolts/AndroidExecutors", + "com/facebook/bolts/AppLink", + "com/facebook/bolts/AppLinkResolver", + "com/facebook/bolts/AppLinks", + "com/facebook/bolts/BoltsExecutors", + "com/facebook/bolts/CancellationToken", + "com/facebook/bolts/CancellationTokenRegistration", + "com/facebook/bolts/CancellationTokenSource", + "com/facebook/bolts/Continuation", + "com/facebook/bolts/ExecutorException", + "com/facebook/bolts/Task", + "com/facebook/bolts/TaskCompletionSource", + "com/facebook/bolts/UnobservedErrorNotifier", + "com/facebook/bolts/UnobservedTaskException", + "com/facebook/CallbackManager", + "com/facebook/common/Common", + "com/facebook/core/Core", + "com/facebook/CurrentAccessTokenExpirationBroadcastReceiver", + "com/facebook/CustomTabActivity", + "com/facebook/CustomTabMainActivity", + "com/facebook/devicerequests/internal/DeviceRequestsHelper", + "com/facebook/FacebookActivity", + "com/facebook/FacebookAuthorizationException", + "com/facebook/FacebookBroadcastReceiver", + "com/facebook/FacebookButtonBase", + "com/facebook/FacebookCallback", + "com/facebook/FacebookContentProvider", + "com/facebook/FacebookDialog", + "com/facebook/FacebookDialogException", + "com/facebook/FacebookException", + "com/facebook/FacebookGraphResponseException", + "com/facebook/FacebookOperationCanceledException", + "com/facebook/FacebookRequestError", + "com/facebook/FacebookSdk", + "com/facebook/FacebookSdkNotInitializedException", + "com/facebook/FacebookSdkVersion", + "com/facebook/FacebookServiceException", + "com/facebook/gamingservices/cloudgaming/AppToUserNotificationSender", + "com/facebook/gamingservices/cloudgaming/CloudGameLoginHandler", + "com/facebook/gamingservices/cloudgaming/DaemonReceiver", + "com/facebook/gamingservices/cloudgaming/DaemonRequest", + "com/facebook/gamingservices/cloudgaming/GameFeaturesLibrary", + "com/facebook/gamingservices/cloudgaming/InAppAdLibrary", + "com/facebook/gamingservices/cloudgaming/InAppPurchaseLibrary", + "com/facebook/gamingservices/cloudgaming/internal/SDKAnalyticsEvents", + "com/facebook/gamingservices/cloudgaming/internal/SDKConstants", + "com/facebook/gamingservices/cloudgaming/internal/SDKLogger", + "com/facebook/gamingservices/cloudgaming/internal/SDKMessageEnum", + "com/facebook/gamingservices/cloudgaming/internal/SDKShareIntentEnum", + "com/facebook/gamingservices/cloudgaming/PlayableAdsLibrary", + "com/facebook/gamingservices/ContextChooseDialog", + "com/facebook/gamingservices/ContextCreateDialog", + "com/facebook/gamingservices/ContextSwitchDialog", + "com/facebook/gamingservices/CustomUpdate", + "com/facebook/gamingservices/FriendFinderDialog", + "com/facebook/gamingservices/GameRequestDialog", + "com/facebook/gamingservices/GamingContext", + "com/facebook/gamingservices/GamingGroupIntegration", + "com/facebook/gamingservices/GamingImageUploader", + "com/facebook/gamingservices/GamingPayload", + "com/facebook/gamingservices/GamingServices", + "com/facebook/gamingservices/GamingVideoUploader", + "com/facebook/gamingservices/internal/DateFormatter", + "com/facebook/gamingservices/internal/GamingMediaUploader", + "com/facebook/gamingservices/internal/TournamentJoinDialogURIBuilder", + "com/facebook/gamingservices/internal/TournamentScoreType", + "com/facebook/gamingservices/internal/TournamentShareDialogURIBuilder", + "com/facebook/gamingservices/internal/TournamentSortOrder", + "com/facebook/gamingservices/model/ContextChooseContent", + "com/facebook/gamingservices/model/ContextCreateContent", + "com/facebook/gamingservices/model/ContextSwitchContent", + "com/facebook/gamingservices/model/CustomUpdateContent", + "com/facebook/gamingservices/OpenGamingMediaDialog", + "com/facebook/gamingservices/Tournament", + "com/facebook/gamingservices/TournamentConfig", + "com/facebook/gamingservices/TournamentFetcher", + "com/facebook/gamingservices/TournamentJoinDialog", + "com/facebook/gamingservices/TournamentShareDialog", + "com/facebook/gamingservices/TournamentUpdater", + "com/facebook/GraphRequest", + "com/facebook/GraphRequestAsyncTask", + "com/facebook/GraphRequestBatch", + "com/facebook/GraphResponse", + "com/facebook/HttpMethod", + "com/facebook/internal/AnalyticsEvents", + "com/facebook/internal/AppCall", + "com/facebook/internal/AttributionIdentifiers", + "com/facebook/internal/BoltsMeasurementEventListener", + "com/facebook/internal/BundleJSONConverter", + "com/facebook/internal/CallbackManagerImpl", + "com/facebook/internal/CollectionMapper", + "com/facebook/internal/CustomTab", + "com/facebook/internal/CustomTabUtils", + "com/facebook/internal/DialogFeature", + "com/facebook/internal/DialogPresenter", + "com/facebook/internal/FacebookDialogBase", + "com/facebook/internal/FacebookDialogFragment", + "com/facebook/internal/FacebookGamingAction", + "com/facebook/internal/FacebookInitProvider", + "com/facebook/internal/FacebookRequestErrorClassification", + "com/facebook/internal/FacebookSignatureValidator", + "com/facebook/internal/FacebookWebFallbackDialog", + "com/facebook/internal/FeatureManager", + "com/facebook/internal/FetchedAppGateKeepersManager", + "com/facebook/internal/FetchedAppSettings", + "com/facebook/internal/FetchedAppSettingsManager", + "com/facebook/internal/FileLruCache", + "com/facebook/internal/FragmentWrapper", + "com/facebook/internal/gatekeeper/GateKeeper", + "com/facebook/internal/gatekeeper/GateKeeperRuntimeCache", + "com/facebook/internal/ImageDownloader", + "com/facebook/internal/ImageRequest", + "com/facebook/internal/ImageResponse", + "com/facebook/internal/ImageResponseCache", + "com/facebook/internal/InstagramCustomTab", + "com/facebook/internal/InstallReferrerUtil", + "com/facebook/internal/instrument/anrreport/ANRDetector", + "com/facebook/internal/instrument/anrreport/ANRHandler", + "com/facebook/internal/instrument/crashreport/CrashHandler", + "com/facebook/internal/instrument/crashshield/AutoHandleExceptions", + "com/facebook/internal/instrument/crashshield/CrashShieldHandler", + "com/facebook/internal/instrument/crashshield/NoAutoExceptionHandling", + "com/facebook/internal/instrument/errorreport/ErrorReportData", + "com/facebook/internal/instrument/errorreport/ErrorReportHandler", + "com/facebook/internal/instrument/ExceptionAnalyzer", + "com/facebook/internal/instrument/InstrumentData", + "com/facebook/internal/instrument/InstrumentManager", + "com/facebook/internal/instrument/InstrumentUtility", + "com/facebook/internal/instrument/threadcheck/ThreadCheckHandler", + "com/facebook/internal/InternalSettings", + "com/facebook/internal/LockOnGetVariable", + "com/facebook/internal/Logger", + "com/facebook/internal/logging/dumpsys/EndToEndDumper", + "com/facebook/internal/Mutable", + "com/facebook/internal/NativeAppCallAttachmentStore", + "com/facebook/internal/NativeProtocol", + "com/facebook/internal/PlatformServiceClient", + "com/facebook/internal/ProfileInformationCache", + "com/facebook/internal/qualityvalidation/Excuse", + "com/facebook/internal/qualityvalidation/ExcusesForDesignViolations", + "com/facebook/internal/security/CertificateUtil", + "com/facebook/internal/security/OidcSecurityUtil", + "com/facebook/internal/ServerProtocol", + "com/facebook/internal/SmartLoginOption", + "com/facebook/internal/UrlRedirectCache", + "com/facebook/internal/Utility", + "com/facebook/internal/Validate", + "com/facebook/internal/WebDialog", + "com/facebook/internal/WorkQueue", + "com/facebook/LegacyTokenHelper", + "com/facebook/LoggingBehavior", + "com/facebook/login/CodeChallengeMethod", + "com/facebook/login/CustomTabLoginMethodHandler", + "com/facebook/login/CustomTabPrefetchHelper", + "com/facebook/login/DefaultAudience", + "com/facebook/login/DeviceAuthDialog", + "com/facebook/login/DeviceAuthMethodHandler", + "com/facebook/login/DeviceLoginManager", + "com/facebook/login/GetTokenClient", + "com/facebook/login/GetTokenLoginMethodHandler", + "com/facebook/login/InstagramAppLoginMethodHandler", + "com/facebook/login/KatanaProxyLoginMethodHandler", + "com/facebook/login/Login", + "com/facebook/login/LoginBehavior", + "com/facebook/login/LoginClient", + "com/facebook/login/LoginConfiguration", + "com/facebook/login/LoginFragment", + "com/facebook/login/LoginLogger", + "com/facebook/login/LoginManager", + "com/facebook/login/LoginMethodHandler", + "com/facebook/login/LoginResult", + "com/facebook/login/LoginStatusClient", + "com/facebook/login/LoginTargetApp", + "com/facebook/login/NativeAppLoginMethodHandler", + "com/facebook/login/NonceUtil", + "com/facebook/login/PKCEUtil", + "com/facebook/login/StartActivityDelegate", + "com/facebook/LoginStatusCallback", + "com/facebook/login/WebLoginMethodHandler", + "com/facebook/login/WebViewLoginMethodHandler", + "com/facebook/login/widget/DeviceLoginButton", + "com/facebook/login/widget/LoginButton", + "com/facebook/login/widget/ProfilePictureView", + "com/facebook/login/widget/ToolTipPopup", + "com/facebook/messenger/Messenger", + "com/facebook/messenger/MessengerThreadParams", + "com/facebook/messenger/MessengerUtils", + "com/facebook/messenger/ShareToMessengerParams", + "com/facebook/messenger/ShareToMessengerParamsBuilder", + "com/facebook/Profile", + "com/facebook/ProfileCache", + "com/facebook/ProfileManager", + "com/facebook/ProfileTracker", + "com/facebook/ProgressNoopOutputStream", + "com/facebook/ProgressOutputStream", + "com/facebook/RequestOutputStream", + "com/facebook/RequestProgress", + "com/facebook/share/internal/CameraEffectFeature", + "com/facebook/share/internal/CameraEffectJSONUtility", + "com/facebook/share/internal/GameRequestValidation", + "com/facebook/share/internal/LegacyNativeDialogParameters", + "com/facebook/share/internal/MessageDialogFeature", + "com/facebook/share/internal/NativeDialogParameters", + "com/facebook/share/internal/ResultProcessor", + "com/facebook/share/internal/ShareConstants", + "com/facebook/share/internal/ShareContentValidation", + "com/facebook/share/internal/ShareDialogFeature", + "com/facebook/share/internal/ShareFeedContent", + "com/facebook/share/internal/ShareInternalUtility", + "com/facebook/share/internal/ShareStoryFeature", + "com/facebook/share/internal/VideoUploader", + "com/facebook/share/internal/WebDialogParameters", + "com/facebook/share/model/AppGroupCreationContent", + "com/facebook/share/model/CameraEffectArguments", + "com/facebook/share/model/CameraEffectTextures", + "com/facebook/share/model/GameRequestContent", + "com/facebook/share/model/ShareCameraEffectContent", + "com/facebook/share/model/ShareContent", + "com/facebook/share/model/ShareHashtag", + "com/facebook/share/model/ShareLinkContent", + "com/facebook/share/model/ShareMedia", + "com/facebook/share/model/ShareMediaContent", + "com/facebook/share/model/ShareMessengerActionButton", + "com/facebook/share/model/ShareMessengerURLActionButton", + "com/facebook/share/model/ShareModel", + "com/facebook/share/model/ShareModelBuilder", + "com/facebook/share/model/SharePhoto", + "com/facebook/share/model/SharePhotoContent", + "com/facebook/share/model/ShareStoryContent", + "com/facebook/share/model/ShareVideo", + "com/facebook/share/model/ShareVideoContent", + "com/facebook/share/Share", + "com/facebook/share/ShareApi", + "com/facebook/share/ShareBuilder", + "com/facebook/share/Sharer", + "com/facebook/share/widget/GameRequestDialog", + "com/facebook/share/widget/MessageDialog", + "com/facebook/share/widget/SendButton", + "com/facebook/share/widget/ShareButton", + "com/facebook/share/widget/ShareButtonBase", + "com/facebook/share/widget/ShareDialog", + "com/facebook/UserSettingsManager", + "com/facebook/WebDialog" + ], + "documentation": [ + "https://developers.facebook.com/docs/android" + ], + "gradle_signatures": [ + "com.facebook.android" + ], + "license": "NonFree", + "name": "Facebook Android SDK" + }, + "com.flurry.android": { + "code_signature": [ + "com/flurry" + ], + "documentation": [ + "https://www.flurry.com/" + ], + "gradle_signatures": [ + "com.flurry.android" + ], + "license": "NonFree", + "name": "Flurry Android SDK" + }, + "com.garmin.android.connectiq": { + "anti_features": [ + "NonFreeComp" + ], + "code_signatures": [ + "com/garmin/android/apps/connectmobile/connectiq" + ], + "description": "SDK to build unique wearable experiences leveraging Garmin device sensors and features.", + "license": "NonFree" + }, + "com.garmin.connectiq": { + "code_signatures": [ + "com/garmin/android/connectiq" + ], + "documentation": [ + "https://developer.garmin.com/connect-iq/core-topics/mobile-sdk-for-android/" + ], + "gradle_signatures": [ + "com.garmin.connectiq:ciq-companion-app-sdk" + ], + "license": "NonFree", + "name": "Connect IQ Mobile SDK for Android" + }, + "com.garmin.fit": { + "anti_features": [ + "NonFreeComp" + ], + "code_signatures": [ + "com/garmin/fit" + ], + "description": "SDK to access the Garmin Fit.", + "license": "NonFree" + }, + "com.geetest": { + "code_signatures": [ + "com/geetest" + ], + "documentation": [ + "https://docs.geetest.com/" + ], + "gradle_signatures": [ + "com.geetest" + ], + "license": "NonFree", + "name": "GeeTest" + }, + "com.github.junrar": { + "code_signatures": [ + "com/github/junrar" + ], + "documentation": [ + "https://github.com/junrar/junrar" + ], + "gradle_signatures": [ + "com.github.junrar:junrar" + ], + "license": "NonFree", + "name": "Junrar" + }, + "com.github.omicronapps.7-Zip-JBinding-4Android": { + "documentation": [ + "https://github.com/omicronapps/7-Zip-JBinding-4Android" + ], + "gradle_signatures": [ + "com.github.omicronapps:7-Zip-JBinding-4Android" + ], + "license": "NonFree", + "name": "7-Zip-JBinding-4Android" + }, + "com.google.ads": { + "code_signatures": [ + "com/google/ads" + ], + "documentation": [ + "https://developers.google.com/interactive-media-ads/docs/sdks/android/client-side" + ], + "gradle_signatures": [ + "com.google.ads", + "com.google.android.exoplayer:extension-ima", + "androidx.media3:media3-exoplayer-ima" + ], + "license": "NonFree", + "name": "IMA SDK for Android" + }, + "com.google.android.apps.auto.sdk": { + "anti_features": [ + "NonFreeComp" + ], + "code_signatures": [ + "com/google/android/apps/auto/sdk" + ], + "description": "Framework to develop apps for Android Auto", + "license": "NonFree" + }, + "com.google.android.gcm": { + "anti_features": [ + "NonFreeComp", + "NonFreeNet" + ], + "code_signatures": [ + "com/google/android/gcm" + ], + "description": "Google Cloud Messaging is a mobile notification service developed by Google that enables third-party application developers to send notification data or information from developer-run servers to app.", + "license": "NonFree" + }, + "com.google.android.gms": { + "code_signatures": [ + "com/google/android/gms" + ], + "documentation": [ + "https://www.android.com/gms/" + ], + "gradle_signatures": [ + "com.google.android.gms(?!.(oss-licenses-plugin|strict-version-matcher-plugin))", + "com.google.android.ump", + "androidx.core:core-google-shortcuts", + "androidx.credentials:credentials-play-services-auth", + "androidx.media3:media3-cast", + "androidx.media3:media3-datasource-cronet", + "androidx.wear:wear-remote-interactions", + "androidx.work:work-gcm", + "com.google.android.exoplayer:extension-cast", + "com.google.android.exoplayer:extension-cronet", + "com.evernote:android-job", + "com.cloudinary:cloudinary-android.*:2\\.[12]\\.", + "com.pierfrancescosoffritti.androidyoutubeplayer:chromecast-sender", + "com.yayandroid:locationmanager", + "(?Home channels for mobile apps.", + "license": "NonFree" + }, + "com.google.android.play": { + "anti_features": [ + "NonFreeDep", + "NonFreeNet" + ], + "code_signatures": [ + "com/google/android/play/core" + ], + "documentation": [ + "https://developer.android.com/guide/playcore" + ], + "gradle_signatures": [ + "com.google.android.play:app-update", + "com.google.android.play:asset-delivery", + "com.google.android.play:core.*", + "com.google.android.play:feature-delivery", + "com.google.android.play:review", + "androidx.navigation:navigation-dynamic-features", + "com.github.SanojPunchihewa:InAppUpdater", + "com.suddenh4x.ratingdialog:awesome-app-rating" + ], + "license": "NonFree", + "name": "Google Play Core" + }, + "com.google.android.play.appupdate": { + "anti_features": [ + "NonFreeComp" + ], + "code_signatures": [ + "com/google/android/play/appupdate" + ], + "description": "manages operations that allow an app to initiate its own updates.", + "license": "NonFree" + }, + "com.google.android.play.integrity": { + "anti_features": [ + "NonFreeComp", + "NonFreeNet" + ], + "code_signatures": [ + "com/google/android/play/integrity" + ], + "description": "helps you check that interactions and server requests are coming from your genuine app binary running on a genuine Android device.", + "license": "NonFree" + }, + "com.google.android.play.review": { + "anti_features": [ + "NonFreeComp" + ], + "code_signatures": [ + "com/google/android/play/review" + ], + "description": "lets you prompt users to submit Play Store ratings and reviews without the inconvenience of leaving your app or game.", + "license": "NonFree" + }, + "com.google.android.vending": { + "anti_features": [ + "NonFreeComp" + ], + "code_signatures": [ + "com/google/android/vending/(?!licensing|expansion)" + ], + "description": "the Google Play Store app and its libaries, parts are FOSS and get vendored in libs as they are", + "documentation": [ + "https://github.com/google/play-licensing/tree/master/lvl_library/src/main", + "https://github.com/googlearchive/play-apk-expansion/tree/master/zip_file/src/com/google/android/vending/expansion/zipfile", + "https://github.com/googlearchive/play-apk-expansion/tree/master/apkx_library/src/com/google/android/vending/expansion/downloader" + ], + "license": "NonFree" + }, + "com.google.android.wearable": { + "code_signatures": [ + "com/google/android/wearable/(?!compat/WearableActivityController)" + ], + "description": "an API for the Android Wear platform, note that androidx.wear:wear has a stub https://android.googlesource.com/platform/frameworks/support/+/refs/heads/androidx-master-release/wear/wear/src/androidTest/java/com/google/android/wearable/compat/WearableActivityController.java#26", + "gradle_signatures": [ + "com.google.android.support:wearable", + "com.google.android.wearable:wearable" + ], + "license": "NonFree" + }, + "com.google.android.youtube.player": { + "anti_features": [ + "NonFreeComp", + "NonFreeNet" + ], + "code_signatures": [ + "com/google/android/youtube/player" + ], + "description": "enables you to easily play YouTube videos and display thumbnails of YouTube videos in your Android application.", + "license": "NonFree" + }, + "com.google.mlkit": { + "code_signatures": [ + "com/google/mlkit" + ], + "documentation": [ + "https://developers.google.com/ml-kit" + ], + "gradle_signatures": [ + "com.google.mlkit", + "io.github.g00fy2.quickie" + ], + "license": "NonFree", + "name": "ML Kit" + }, + "com.google.vr": { + "anti_features": [ + "NonFreeComp" + ], + "code_signatures": [ + "com/google/vr" + ], + "description": "enables Daydream and Cardboard app development on Android.", + "license": "NonFree" + }, + "com.heapanalytics": { + "anti_features": [ + "NonFreeComp", + "Tracking" + ], + "code_signatures": [ + "com/heapanalytics" + ], + "description": "automatically captures every web, mobile, and cloud interaction: clicks, submits, transactions, emails, and more. Retroactively analyze your data without writing code.", + "license": "NonFree" + }, + "com.heyzap": { + "code_signatures": [ + "com/heyzap" + ], + "documentation": [ + "https://www.digitalturbine.com/" + ], + "license": "NonFree", + "name": "Heyzap" + }, + "com.huawei.hms": { + "anti_features": [ + "NonFreeComp" + ], + "code_signatures": [ + "com/huawei/hms" + ], + "description": "Huawei's pendant to GMS (Google Mobile Services)", + "license": "NonFree" + }, + "com.hypertrack": { + "code_signatures": [ + "com/hypertrack/(?!hyperlog)" + ], + "documentation": [ + "https://github.com/hypertrack/sdk-android" + ], + "gradle_signatures": [ + "com.hypertrack(?!:hyperlog)" + ], + "gradle_signatures_negative_examples": [ + "com.hypertrack:hyperlog" + ], + "license": "NonFree", + "name": "HyperTrack SDK for Android" + }, + "com.instabug": { + "anti_features": [ + "NonFreeComp", + "Tracking" + ], + "code_signatures": [ + "com/instabug" + ], + "description": "In-App Feedback and Bug Reporting for Mobile Apps.", + "license": "NonFree" + }, + "com.kiddoware.kidsplace.sdk": { + "anti_features": [ + "NonFreeComp" + ], + "code_signatures": [ + "com/kiddoware/kidsplace/sdk" + ], + "description": "parental control", + "license": "NonFree" + }, + "com.kochava.android.tracker": { + "anti_features": [ + "NonFreeComp", + "Tracking" + ], + "code_signatures": [ + "com/kochava/android/tracker" + ], + "description": "provides holistic, unbiased measurement for precise, real-time visualization of app performance through the funnel. See Crunchbase and Exodus Privacy.", + "license": "NonFree" + }, + "com.mapbox": { + "MaintainerNotes": "It seems that all libs in https://github.com/mapbox/mapbox-java is fully FOSS\nsince 3.0.0.\n", + "documentation": [ + "https://docs.mapbox.com/android/java/overview/", + "https://github.com/mapbox/mapbox-java" + ], + "gradle_signatures": [ + "com\\.mapbox(?!\\.mapboxsdk:mapbox-sdk-(services|geojson|turf):([3-5]))" + ], + "gradle_signatures_negative_examples": [ + "com.mapbox.mapboxsdk:mapbox-sdk-services:5.0.0", + "com.github.johan12345:mapbox-events-android:a21c324501", + "implementation(\"com.github.johan12345.AnyMaps:anymaps-mapbox:$anyMapsVersion\")" + ], + "gradle_signatures_positive_examples": [ + "com.mapbox.mapboxsdk:mapbox-android-plugin-annotation-v7:0.6.0", + "com.mapbox.mapboxsdk:mapbox-android-plugin-annotation-v8:0.7.0", + "com.mapbox.mapboxsdk:mapbox-android-plugin-localization-v7:0.7.0", + "com.mapbox.mapboxsdk:mapbox-android-plugin-locationlayer:0.4.0", + "com.mapbox.mapboxsdk:mapbox-android-plugin-markerview-v8:0.3.0", + "com.mapbox.mapboxsdk:mapbox-android-plugin-places-v8:0.9.0", + "com.mapbox.mapboxsdk:mapbox-android-plugin-scalebar-v8:0.2.0", + "com.mapbox.mapboxsdk:mapbox-android-sdk:7.3.0" + ], + "license": "NonFree", + "name": "Mapbox Java SDK" + }, + "com.microblink": { + "anti_features": [ + "NonFreeComp", + "NonFreeNet", + "Tracking" + ], + "code_signatures": [ + "com/microblink" + ], + "description": "verify users at scale and automate your document-based workflow with computer vision tech built for a remote world.", + "license": "NonFree" + }, + "com.microsoft.band": { + "anti_features": [ + "NonFreeComp" + ], + "code_signatures": [ + "com/microsoft/band" + ], + "description": "library to access the Microsoft Band smartwatch.", + "license": "NonFree" + }, + "com.mopub.mobileads": { + "anti_features": [ + "Ads", + "NonFreeComp" + ], + "code_signatures": [ + "com/mopub/mobileads" + ], + "description": "ad framework run by Twitter until 1/2022, then sold to AppLovin.", + "license": "NonFree" + }, + "com.newrelic.agent": { + "anti_features": [ + "NonFreeComp", + "Tracking" + ], + "code_signatures": [ + "com/newrelic/agent" + ], + "description": "delivering full-stack visibility and analytics to enterprises around the world. See Crunchbase and Exodus Privacy.", + "license": "NonFree" + }, + "com.onesignal": { + "code_signatures": [ + "com/onesignal" + ], + "documentation": [ + "https://github.com/OneSignal/OneSignal-Android-SDK" + ], + "gradle_signatures": [ + "com.onesignal:OneSignal" + ], + "license": "NonFree", + "name": "OneSignal Android Push Notification Plugin" + }, + "com.optimizely": { + "anti_features": [ + "NonFreeComp", + "Tracking" + ], + "code_signatures": [ + "com/optimizely" + ], + "description": "part of the comScore, Inc. market research community, a leading global market research effort that studies and reports on Internet trends and behavior.", + "license": "NonFree" + }, + "com.paypal.sdk": { + "code_signatures": [ + "com/paypal" + ], + "documentation": [ + "https://github.com/paypal/PayPal-Android-SDK", + "https://github.com/paypal/android-checkout-sdk" + ], + "gradle_signatures": [ + "com.paypal" + ], + "license": "NonFree", + "name": "PayPal Android SDK" + }, + "com.pushwoosh": { + "anti_features": [ + "NonFreeComp", + "Tracking" + ], + "code_signatures": [ + "com/pushwoosh" + ], + "description": "mobile analytics under the cover of push messaging.", + "license": "NonFree" + }, + "com.quantcast.measurement.service": { + "anti_features": [ + "NonFreeComp", + "Tracking" + ], + "code_signatures": [ + "com/quantcast/measurement/service" + ], + "description": "processes real-time data at the intersection of commerce and culture, providing useful, actionable insights for brands and publishers. See Crunchbase and Exodus Privacy.", + "license": "NonFree" + }, + "com.revenuecat.purchases": { + "code_signatures": [ + "com/revenuecat/purchases" + ], + "documentation": [ + "https://www.revenuecat.com/" + ], + "gradle_signatures": [ + "com.revenuecat.purchases" + ], + "license": "NonFree", + "name": "RevenueCat Purchases" + }, + "com.samsung.accessory": { + "anti_features": [ + "NonFreeComp" + ], + "code_signatures": [ + "com/samsung/accessory" + ], + "description": "provides a stable environment in which you can use a variety features by connecting accessories to your mobile device.", + "license": "NonFree" + }, + "com.samsung.android.sdk.look": { + "anti_features": [ + "NonFreeComp" + ], + "code_signatures": [ + "com/samsung/android/sdk/look" + ], + "description": "offers specialized widgets and service components for extended functions of the Samsung Android devices.", + "license": "NonFree" + }, + "com.sendbird.android": { + "anti_features": [ + "NonFreeComp", + "NonFreeNet", + "Tracking" + ], + "code_signatures": [ + "com/sendbird/android" + ], + "description": "an easy-to-use Chat API, native Chat SDKs, and a fully-managed chat platform on the backend means faster time-to-market.", + "license": "NonFree" + }, + "com.smaato.soma": { + "anti_features": [ + "Ads", + "NonFreeComp" + ], + "code_signatures": [ + "com/smaato/soma" + ], + "description": "a mobile ad platform that includes video ads.", + "license": "NonFree" + }, + "com.spotify.sdk": { + "anti_features": [ + "NonFreeComp", + "NonFreeNet" + ], + "code_signatures": [ + "com/spotify/sdk" + ], + "description": "allows your application to interact with the Spotify app service. (Note that while the SDK repo claims Apache license, the code is not available there)", + "license": "NonFree" + }, + "com.startapp.android": { + "anti_features": [ + "Ads", + "Tracking", + "NonFreeComp" + ], + "code_signatures": [ + "com/startapp" + ], + "description": "partly quite intrusive ad network.", + "license": "NonFree" + }, + "com.telerik.android": { + "anti_features": [ + "NonFreeComp" + ], + "code_signatures": [ + "com/telerik/android" + ], + "description": "offers high quality Xamarin Forms UI components and Visual Studio item templates to enable every developer.", + "license": "NonFree" + }, + "com.tencent.bugly": { + "code_signatures": [ + "com/tencent/bugly" + ], + "documentation": [ + "https://bugly.qq.com/" + ], + "gradle_signatures": [ + "com.tencent.bugly" + ], + "license": "NonFree", + "name": "Bugly Android SDK" + }, + "com.tencent.mapsdk": { + "anti_features": [ + "NonFreeNet" + ], + "code_signatures": [ + "com/tencent/tencentmap" + ], + "description": "giving access to Tencent Maps.", + "license": "NonFree" + }, + "com.tenjin.android.TenjinSDK": { + "anti_features": [ + "Tracking" + ], + "code_signatures": [ + "com/tenjin/android/TenjinSDK" + ], + "description": "a marketing platform designed for mobile that features analytics, automated aggregation, and direct data visualization with direct SQL access.", + "license": "NonFree" + }, + "com.umeng.umsdk": { + "code_signatures": [ + "com/umeng" + ], + "documentation": [ + "https://developer.umeng.com/docs/119267/detail/118584" + ], + "gradle_signatures": [ + "com.umeng" + ], + "license": "NonFree", + "name": "Umeng SDK" + }, + "com.wei.android.lib": { + "code_signatures": [ + "com/wei/android/lib/fingerprintidentify" + ], + "documentation": [ + "https://github.com/uccmawei/FingerprintIdentify" + ], + "gradle_signatures": [ + "com.wei.android.lib:fingerprintidentify", + "com.github.uccmawei:FingerprintIdentify" + ], + "gradle_signatures_positive_examples": [ + "implementation \"com.github.uccmawei:fingerprintidentify:${safeExtGet(\"fingerprintidentify\", \"1.2.6\")}\"" + ], + "license": "NonFree", + "name": "FingerprintIdentify" + }, + "com.yandex.android": { + "code_signatures": [ + "com/yandex/android/(?!:authsdk)" + ], + "gradle_signatures": [ + "com\\.yandex\\.android(?!:authsdk)" + ], + "gradle_signatures_negative_examples": [ + "com.yandex.android:authsdk" + ], + "license": "NonFree", + "name": "Yandex SDK" + }, + "com.yandex.metrica": { + "anti_features": [ + "NonFreeComp", + "Tracking" + ], + "code_signatures": [ + "com/yandex/metrica" + ], + "description": "a mobile attribution and analytics platform developed by Yandex. It is free, real-time and has no data limits restriction. See Crunchbase and Exodus Privacy.", + "license": "NonFree" + }, + "com.yandex.mobile.ads": { + "anti_features": [ + "Ads", + "NonFreeComp" + ], + "code_signatures": [ + "com/yandex/mobile/ads" + ], + "description": "See Exodus Privacy.", + "license": "NonFree" + }, + "de.epgpaid": { + "anti_features": [ + "NonFreeComp" + ], + "code_signatures": [ + "de/epgpaid" + ], + "description": "access paid EPG (Electronic Program Guide, for TV) data (after payment, of course). Part of TVBrowser.", + "license": "NonFree" + }, + "de.innosystec.unrar": { + "code_signatures": [ + "de/innosystec/unrar" + ], + "description": "java unrar util", + "license": "NonFree" + }, + "firebase": { + "code_signatures": [ + "com/google/firebase" + ], + "documentation": [ + "https://www.firebase.com" + ], + "gradle_signatures": [ + "com(\\.google)?\\.firebase[.:](?!firebase-jobdispatcher|geofire-java)", + "com.microsoft.appcenter:appcenter-push" + ], + "gradle_signatures_negative_examples": [ + " compile 'com.firebase:firebase-jobdispatcher:0.8.4'", + "implementation 'com.firebase:geofire-java:3.0.0'", + " compile 'com.firebaseui:firebase-ui-auth:3.1.3'", + "com.firebaseui:firebase-ui-database", + "com.firebaseui:firebase-ui-storage", + "com.github.axet:android-firebase-fake", + "com.github.b3er.rxfirebase:firebase-database", + "com.github.b3er.rxfirebase:firebase-database-kotlin", + "com.segment.analytics.android.integrations:firebase" + ], + "gradle_signatures_positive_examples": [ + "\tcompile 'com.google.firebase:firebase-crash:11.0.8'", + "\tcompile 'com.google.firebase:firebase-core:11.0.8'", + "com.firebase:firebase-client-android:2.5.2", + "com.google.firebase.crashlytics", + "com.google.firebase.firebase-perf", + "com.google.firebase:firebase-ads", + "com.google.firebase:firebase-analytics", + "com.google.firebase:firebase-appindexing", + "com.google.firebase:firebase-auth", + "com.google.firebase:firebase-config", + "com.google.firebase:firebase-core", + "com.google.firebase:firebase-crash", + "com.google.firebase:firebase-crashlytics", + "com.google.firebase:firebase-database", + "com.google.firebase:firebase-dynamic-links", + "com.google.firebase:firebase-firestore", + "com.google.firebase:firebase-inappmessaging", + "com.google.firebase:firebase-inappmessaging-display", + "com.google.firebase:firebase-messaging", + "com.google.firebase:firebase-ml-natural-language", + "com.google.firebase:firebase-ml-natural-language-smart-reply-model", + "com.google.firebase:firebase-ml-vision", + "com.google.firebase:firebase-perf", + "com.google.firebase:firebase-plugins", + "com.google.firebase:firebase-storage" + ], + "license": "NonFree", + "name": "Firebase" + }, + "google-maps": { + "anti_features": [ + "NonFreeDep", + "NonFreeNet" + ], + "api_key_ids": [ + "com\\.google\\.android\\.geo\\.API_KEY", + "com\\.google\\.android\\.maps\\.v2\\.API_KEY" + ], + "documentation": [ + "https://developers.google.com/maps/documentation/android-sdk/overview" + ], + "license": "NonFree", + "name": "Google Maps" + }, + "io.fabric.sdk.android": { + "anti_features": [ + "NonFreeComp", + "Tracking" + ], + "code_signatures": [ + "io/fabric/sdk/android" + ], + "description": "Framework to integrate services. Provides e.g. crash reports and analytics. Aquired by Google in 2017.", + "license": "NonFree" + }, + "io.github.sinaweibosdk": { + "code_signatures": [ + "com/sina" + ], + "documentation": [ + "https://github.com/sinaweibosdk/weibo_android_sdk" + ], + "gradle_signatures": [ + "io.github.sinaweibosdk" + ], + "license": "NonFree", + "name": "SinaWeiboSDK" + }, + "io.intercom": { + "anti_features": [ + "NonFreeComp", + "NonFreeNet" + ], + "code_signatures": [ + "io/intercom" + ], + "description": "engage customers with email, push, and in\u2011app messages and support them with an integrated knowledge base and help desk.", + "license": "NonFree" + }, + "io.objectbox": { + "code_signatures": [ + "io/objectbox" + ], + "documentation": [ + "https://objectbox.io/faq/#license-pricing" + ], + "gradle_signatures": [ + "io.objectbox:objectbox-gradle-plugin" + ], + "license": "NonFree", + "name": "ObjectBox Database" + }, + "me.pushy": { + "code_signatures": [ + "me/pushy" + ], + "documentation": [ + "https://pushy.me/" + ], + "gradle_signatures": [ + "me.pushy" + ], + "license": "NonFree", + "name": "Pushy" + }, + "org.gradle.toolchains.foojay-resolver-convention": { + "documentation": [ + "https://github.com/gradle/foojay-toolchains" + ], + "gradle_signatures": [ + "org.gradle.toolchains.foojay-resolver" + ], + "license": "Apache-2.0", + "name": "Foojay Toolchains Plugin" + }, + "org.mariuszgromada.math": { + "code_signatures": [ + "org/mariuszgromada/math/mxparser/parsertokens/SyntaxStringBuilder", + "org/mariuszgromada/math/mxparser/CalcStepRecord", + "org/mariuszgromada/math/mxparser/CalcStepsRegister", + "org/mariuszgromada/math/mxparser/License", + "org/mariuszgromada/math/mxparser/CloneCache", + "org/mariuszgromada/math/mxparser/ElementAtTheEnd", + "org/mariuszgromada/math/mxparser/CompilationDetails", + "org/mariuszgromada/math/mxparser/CompiledElement" + ], + "documentation": [ + "https://mathparser.org", + "https://mathparser.org/mxparser-license/" + ], + "gradle_signatures": [ + "org.mariuszgromada.math:MathParser.org-mXparser:[5-9]" + ], + "license": "NonFree", + "name": "mXparser" + }, + "tornaco.android.sec": { + "anti_features": [ + "NonFreeComp" + ], + "code_signatures": [ + "tornaco/android/sec" + ], + "description": "proprietary part of the Thanox application", + "license": "NonFree" + } + }, + "timestamp": 1747829076.702502, + "version": 1, + "last_updated": 1750710966.431471 +}''' diff --git a/fdroidserver/server.py b/fdroidserver/server.py deleted file mode 100644 index 43ea05fd..00000000 --- a/fdroidserver/server.py +++ /dev/null @@ -1,92 +0,0 @@ -#!/usr/bin/env python2 -# -*- coding: utf-8 -*- -# -# server.py - part of the FDroid server tools -# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -import sys -import os -import subprocess -from optparse import OptionParser -import common - -config = None -options = None - - -def main(): - - global config, options - - # Parse command line... - parser = OptionParser() - parser.add_option("-v", "--verbose", action="store_true", default=False, - help="Spew out even more information than normal") - (options, args) = parser.parse_args() - - config = common.read_config(options) - - if len(args) != 1: - print "Specify a single command" - sys.exit(1) - - if args[0] != 'init' and args[0] != 'update': - print "The only commands currently supported are 'init' and 'update'" - sys.exit(1) - - serverwebroot = config['serverwebroot'].rstrip('/').replace('//', '/') - host, fdroiddir = serverwebroot.split(':') - serverrepobase = os.path.basename(fdroiddir) - if 'nonstandardwebroot' in config and config['nonstandardwebroot'] == True: - standardwebroot = False - else: - standardwebroot = True - if serverrepobase != 'fdroid' and standardwebroot: - print('ERROR: serverwebroot does not end with "fdroid", ' - + 'perhaps you meant one of these:\n\t' - + serverwebroot.rstrip('/') + '/fdroid\n\t' - + serverwebroot.rstrip('/').rstrip(serverrepobase) + 'fdroid') - sys.exit(1) - - repodirs = ['repo'] - if config['archive_older'] != 0: - repodirs.append('archive') - - for repodir in repodirs: - if args[0] == 'init': - if subprocess.call(['ssh', '-v', host, - 'mkdir -p', fdroiddir + '/' + repodir]) != 0: - sys.exit(1) - elif args[0] == 'update': - index = os.path.join(repodir, 'index.xml') - indexjar = os.path.join(repodir, 'index.jar') - if subprocess.call(['rsync', '-u', '-v', '-r', '--delete', - '--exclude', index, '--exclude', indexjar, - repodir, config['serverwebroot']]) != 0: - sys.exit(1) - if subprocess.call(['rsync', '-u', '-v', '-r', '--delete', - index, - config['serverwebroot'] + '/' + repodir]) != 0: - sys.exit(1) - if subprocess.call(['rsync', '-u', '-v', '-r', '--delete', - indexjar, - config['serverwebroot'] + '/' + repodir]) != 0: - sys.exit(1) - - sys.exit(0) - -if __name__ == "__main__": - main() diff --git a/fdroidserver/signatures.py b/fdroidserver/signatures.py new file mode 100644 index 00000000..00c9d264 --- /dev/null +++ b/fdroidserver/signatures.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python3 +# +# Copyright (C) 2017, Michael Poehn +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import logging +import os +import re +import sys +from argparse import ArgumentParser + +from . import _, common +from .exception import FDroidException + + +def extract_signature(apkpath): + if not os.path.exists(apkpath): + raise FDroidException("file APK does not exists '{}'".format(apkpath)) + if not common.verify_apk_signature(apkpath): + raise FDroidException("no valid signature in '{}'".format(apkpath)) + logging.debug('signature okay: %s', apkpath) + + appid, vercode, _ignored = common.get_apk_id(apkpath) + sigdir = common.metadata_get_sigdir(appid, vercode) + if not os.path.exists(sigdir): + os.makedirs(sigdir) + common.apk_extract_signatures(apkpath, sigdir) + + return sigdir + + +def extract(options): + # Create tmp dir if missing… + tmp_dir = 'tmp' + if not os.path.exists(tmp_dir): + os.mkdir(tmp_dir) + + if not options.APK or len(options.APK) <= 0: + logging.critical(_('no APK supplied')) + sys.exit(1) + + # iterate over supplied APKs downlaod and extract them… + httpre = re.compile(r'https?:\/\/') + for apk in options.APK: + try: + if os.path.isfile(apk): + sigdir = extract_signature(apk) + logging.info( + _("Fetched signatures for '{apkfilename}' -> '{sigdir}'").format( + apkfilename=apk, sigdir=sigdir + ) + ) + elif httpre.match(apk): + if apk.startswith('https') or options.no_check_https: + try: + from . import net + + tmp_apk = os.path.join(tmp_dir, 'signed.apk') + net.download_file(apk, tmp_apk) + sigdir = extract_signature(tmp_apk) + logging.info( + _( + "Fetched signatures for '{apkfilename}' -> '{sigdir}'" + ).format(apkfilename=apk, sigdir=sigdir) + ) + finally: + if tmp_apk and os.path.exists(tmp_apk): + os.remove(tmp_apk) + else: + logging.warning( + _( + 'refuse downloading via insecure HTTP connection ' + '(use HTTPS or specify --no-https-check): {apkfilename}' + ).format(apkfilename=apk) + ) + except FDroidException as e: + logging.warning( + _("Failed fetching signatures for '{apkfilename}': {error}").format( + apkfilename=apk, error=e + ) + ) + if e.detail: + logging.debug(e.detail) + + +def main(): + parser = ArgumentParser() + common.setup_global_opts(parser) + parser.add_argument( + "APK", nargs='*', help=_("signed APK, either a file-path or HTTPS URL.") + ) + parser.add_argument("--no-check-https", action="store_true", default=False) + options = common.parse_args(parser) + common.set_console_logging(options.verbose, options.color) + common.read_config() + + extract(options) diff --git a/fdroidserver/signindex.py b/fdroidserver/signindex.py new file mode 100644 index 00000000..47cd5ec2 --- /dev/null +++ b/fdroidserver/signindex.py @@ -0,0 +1,227 @@ +#!/usr/bin/env python3 +# +# gpgsign.py - part of the FDroid server tools +# Copyright (C) 2015, Ciaran Gultnieks, ciaran@ciarang.com +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import json +import logging +import os +import time +import zipfile +from argparse import ArgumentParser + +from . import _, common, metadata +from .exception import FDroidException + +config = None +start_timestamp = time.gmtime() + + +def sign_jar(jar, use_old_algs=False): + """Sign a JAR file with the best available algorithm. + + The current signing method uses apksigner to sign the JAR so that + it will automatically select algorithms that are compatible with + Android SDK 23, which added the most recent algorithms: + https://developer.android.com/reference/java/security/Signature + + This signing method uses then inherits the default signing + algothim settings, since Java and Android both maintain those. + That helps avoid a repeat of being stuck on an old signing + algorithm. That means specifically that this call to apksigner + does not specify any of the algorithms. + + The old indexes must be signed by SHA1withRSA otherwise they will + no longer be compatible with old Androids. + + This method requires a properly initialized config object. + + """ + if use_old_algs: + # This does use old hashing algorithms, i.e. SHA1, but that's not + # broken yet for file verification. This could be set to SHA256, + # but then Android < 4.3 would not be able to verify it. + # https://code.google.com/p/android/issues/detail?id=38321 + args = [ + config['jarsigner'], + '-keystore', + config['keystore'], + '-storepass:env', + 'FDROID_KEY_STORE_PASS', + '-digestalg', + 'SHA1', + '-sigalg', + 'SHA1withRSA', + jar, + config['repo_keyalias'], + ] + if config['keystore'] == 'NONE': + args += config['smartcardoptions'] + else: # smardcards never use -keypass + args += ['-keypass:env', 'FDROID_KEY_PASS'] + else: + # https://developer.android.com/studio/command-line/apksigner + args = [ + config['apksigner'], + 'sign', + '--min-sdk-version', + '23', # enable all current algorithms + '--max-sdk-version', + '24', # avoid future incompatible algorithms + # disable all APK signature types, only use JAR sigs aka v1 + '--v1-signing-enabled', + 'true', + '--v2-signing-enabled', + 'false', + '--v3-signing-enabled', + 'false', + '--v4-signing-enabled', + 'false', + '--ks', + config['keystore'], + '--ks-pass', + 'env:FDROID_KEY_STORE_PASS', + '--ks-key-alias', + config['repo_keyalias'], + ] + if config['keystore'] == 'NONE': + args += common.get_apksigner_smartcardoptions(config['smartcardoptions']) + else: # smardcards never use --key-pass + args += ['--key-pass', 'env:FDROID_KEY_PASS'] + args += [jar] + env_vars = { + 'FDROID_KEY_STORE_PASS': config['keystorepass'], + 'FDROID_KEY_PASS': config.get('keypass', ""), + } + p = common.FDroidPopen(args, envs=env_vars) + if not use_old_algs and p.returncode != 0: + # workaround for apksigner v30 on f-droid.org publish server + v4 = args.index("--v4-signing-enabled") + del args[v4 + 1] + del args[v4] + p = common.FDroidPopen(args, envs=env_vars) + if p.returncode != 0: + raise FDroidException("Failed to sign %s: %s" % (jar, p.output)) + + +def sign_index(repodir, json_name): + """Sign data file like entry.json to make a signed JAR like entry.jar. + + The data file like index-v1.json means that there is unsigned + data. That file is then stuck into a jar and signed by the + signing process. This is a bit different than sign_jar, which is + used for index.jar: that creates index.xml then puts that in a + index_unsigned.jar, then that file is signed. + + This also checks to make sure that the JSON files are intact + before signing them. Broken JSON files should never be signed, so + taking some extra time and failing hard is the preferred + option. This signing process can happen on an entirely separate + machine and file tree, so this ensures that nothing got broken + during transfer. + + """ + json_file = os.path.join(repodir, json_name) + with open(json_file, encoding="utf-8") as fp: + data = json.load(fp) + if json_name == 'entry.json': + index_file = os.path.join(repodir, data['index']['name'].lstrip('/')) + sha256 = common.sha256sum(index_file) + if sha256 != data['index']['sha256']: + raise FDroidException( + _('%s has bad SHA-256: %s') % (index_file, sha256) + ) + with open(index_file) as fp: + index = json.load(fp) + if not isinstance(index, dict): + raise FDroidException(_('%s did not produce a dict!') % index_file) + elif json_name == 'index-v1.json': + [metadata.App(app) for app in data["apps"]] + + name, ext = common.get_extension(json_name) + jar_file = os.path.join(repodir, name + '.jar') + with zipfile.ZipFile(jar_file, 'w', zipfile.ZIP_DEFLATED) as jar: + jar.write(json_file, json_name) + + if json_name in ('index.xml', 'index-v1.json'): + sign_jar(jar_file, use_old_algs=True) + else: + sign_jar(jar_file) + + +def status_update_json(signed): + """Output a JSON file with metadata about this run.""" + logging.debug(_('Outputting JSON')) + output = common.setup_status_output(start_timestamp) + if signed: + output['signed'] = signed + common.write_status_json(output) + + +def main(): + global config + + parser = ArgumentParser() + common.setup_global_opts(parser) + common.parse_args(parser) + + config = common.read_config() + + if 'jarsigner' not in config: + raise FDroidException( + _( + 'Java jarsigner not found! Install in standard location or set java_paths!' + ) + ) + + repodirs = ['repo'] + if config['archive_older'] != 0: + repodirs.append('archive') + + signed = [] + for output_dir in repodirs: + if not os.path.isdir(output_dir): + raise FDroidException("Missing output directory '" + output_dir + "'") + + unsigned = os.path.join(output_dir, 'index_unsigned.jar') + if os.path.exists(unsigned): + sign_jar(unsigned) + index_jar = os.path.join(output_dir, 'index.jar') + os.rename(unsigned, index_jar) + logging.info('Signed index in ' + output_dir) + signed.append(index_jar) + + json_name = 'index-v1.json' + index_file = os.path.join(output_dir, json_name) + if os.path.exists(index_file): + sign_index(output_dir, json_name) + logging.info('Signed ' + index_file) + signed.append(index_file) + + json_name = 'entry.json' + index_file = os.path.join(output_dir, json_name) + if os.path.exists(index_file): + sign_index(output_dir, json_name) + logging.info('Signed ' + index_file) + signed.append(index_file) + + if not signed: + logging.info(_("Nothing to do")) + status_update_json(signed) + + +if __name__ == "__main__": + main() diff --git a/fdroidserver/stats.py b/fdroidserver/stats.py deleted file mode 100644 index c7bfde39..00000000 --- a/fdroidserver/stats.py +++ /dev/null @@ -1,259 +0,0 @@ -#!/usr/bin/env python2 -# -*- coding: utf-8 -*- -# -# stats.py - part of the FDroid server tools -# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -import sys -import os -import re -import time -import traceback -import glob -from optparse import OptionParser -import paramiko -import common, metadata -import socket -import subprocess - -def carbon_send(key, value): - s = socket.socket() - s.connect((config['carbon_host'], config['carbon_port'])) - msg = '%s %d %d\n' % (key, value, int(time.time())) - s.sendall(msg) - s.close() - -options = None -config = None - -def main(): - - global options, config - - # Parse command line... - parser = OptionParser() - parser.add_option("-v", "--verbose", action="store_true", default=False, - help="Spew out even more information than normal") - parser.add_option("-d", "--download", action="store_true", default=False, - help="Download logs we don't have") - parser.add_option("--nologs", action="store_true", default=False, - help="Don't do anything logs-related") - (options, args) = parser.parse_args() - - config = common.read_config(options) - - if not config['update_stats']: - print "Stats are disabled - check your configuration" - sys.exit(1) - - # Get all metadata-defined apps... - metaapps = metadata.read_metadata(options.verbose) - - statsdir = 'stats' - logsdir = os.path.join(statsdir, 'logs') - datadir = os.path.join(statsdir, 'data') - if not os.path.exists(statsdir): - os.mkdir(statsdir) - if not os.path.exists(logsdir): - os.mkdir(logsdir) - if not os.path.exists(datadir): - os.mkdir(datadir) - - if options.download: - # Get any access logs we don't have... - ssh = None - ftp = None - try: - print 'Retrieving logs' - ssh = paramiko.SSHClient() - ssh.load_system_host_keys() - ssh.connect('f-droid.org', username='fdroid', timeout=10, - key_filename=config['webserver_keyfile']) - ftp = ssh.open_sftp() - ftp.get_channel().settimeout(60) - print "...connected" - - ftp.chdir('logs') - files = ftp.listdir() - for f in files: - if f.startswith('access-') and f.endswith('.log.gz'): - - destpath = os.path.join(logsdir, f) - destsize = ftp.stat(f).st_size - if (not os.path.exists(destpath) or - os.path.getsize(destpath) != destsize): - print "...retrieving " + f - ftp.get(f, destpath) - except Exception: - traceback.print_exc() - sys.exit(1) - finally: - #Disconnect - if ftp is not None: - ftp.close() - if ssh is not None: - ssh.close() - - knownapks = common.KnownApks() - unknownapks = [] - - if not options.nologs: - # Process logs - if options.verbose: - print 'Processing logs...' - apps = {} - logexpr = '(?P[.:0-9a-fA-F]+) - - \[(?P

      Filter SMS and show them in a fake app + souch.smsbypass.9.png + In order to keep away curious eyes, SMS-bypass filters incoming SMS messages +before they reach your inbox. Based on bughunter2.smsfilter. + +Features: + +* Discrete fake app "Battery level": Long tap on Battery percentage will show SMS. +* Filter incoming SMS specified address: redirect the SMS to SMS-bypass messages list; remove SMS arrival sound or vibration; show a discreet notification icon (battery level); vibrate if checked in settings +* Add contact from contact list +* Export messages to a text file + GPL-3.0-only + Phone & SMS + Phone & SMS + https://gitlab.com/souch/SMSbypass + https://gitlab.com/souch/SMSbypass/tree/HEAD + https://gitlab.com/souch/SMSbypass/issues + http://rodolphe.souchaud.free.fr/donate + 0.9 + 9 + + 0.9 + 9 + souch.smsbypass_9.apk + 80b0ae68a1189baa3ee6717092e3dbf1a4210165f7f7e5f2f9616bd63a2ec01d + 81295 + 8 + 18 + 2018-04-26 + e50c99753cd45e2736d52cb49be07581 + READ_CONTACTS,READ_EXTERNAL_STORAGE,RECEIVE_SMS,SEND_SMS,VIBRATE,WRITE_EXTERNAL_STORAGE + + + + info.zwanenburg.caffeinetile + 2018-10-10 + 2018-10-10 + Caffeine Tile + Test app for extracting icons when an XML one is default + No description available + Unknown + Development + Development + + + + + 4 + + 1.3 + 4 + info.zwanenburg.caffeinetile_4.apk + dbbdd7deadb038862f426b71efe4a64df8c3edf25d669e935f349510e16f65db + 11740 + 24 + 25 + 2018-10-10 + 03f9b2f848d22fd1d8d1331e8b1b486d + WAKE_LOCK + + + + duplicate.permisssions + 2017-12-22 + 2017-12-22 + Duplicate Permisssions + Test app for all possible <uses-permissions> + duplicate.permisssions.9999999.png + No description available + Unknown + tests + tests + + + + + 9999999 + + 9999999 + duplicate.permisssions_9999999.apk + 8367857fe75f85321ce2c344b34804d0bc193707f6ba03710d025d9030803434 + 27446 + 18 + 27 + 2017-12-22 + 056c9f1554c40ba59a2103009c82b420 + ACCESS_NETWORK_STATE,ACCESS_WIFI_STATE,CHANGE_WIFI_MULTICAST_STATE,INTERNET,READ_EXTERNAL_STORAGE,WRITE_EXTERNAL_STORAGE + + + + + + + fake.ota.update + 2016-03-10 + 2016-03-10 + fake.ota.update_1234 + Tests whether OTA ZIP files are being include + F-Droid can make use of system privileges or permissions to +install, update and remove applications on its own. The only way to obtain those +privileges is to become a system app. + +This is where the Privileged Extension comes in - being a separate app and much +smaller, it can be installed as a system app and communicate with the main app +via AIDL IPC. + +This has several advantages: + +* Reduced disk usage in the system partition +* System updates don't remove F-Droid +* The process of installing into system via root is safer + +This is packaged as an OTA (Over-The-Air) update ZIP file. It must be installed +using TWRP or other Android recovery that can flash updates to the system from +the /data/data/org.fdroid.fdroid folder on the /data partition. The standalone +APK is called F-Droid Privileged Extension. + Apache-2.0 + System + System + https://f-droid.org + https://gitlab.com/fdroid/privileged-extension + https://gitlab.com/fdroid/privileged-extension/issues + https://f-droid.org/about + 0.2.1 + 2000 + + 897a92a + 1234 + fake.ota.update_1234.zip + 897a92a4ccff4f415f6ba275b2af16d4ecaee60a983b215bddcb9f8964e7a24c + 233 + 2016-03-10 + + + + org.maxsdkversion + 2025-02-17 + 2025-02-17 + maxSdkVersion + Test setting maxSdkVersion in an APK + org.maxsdkversion.4.png + No description available + Unknown + + + + + 4 + + 1.0.3 + 4 + org.maxsdkversion_4.apk + 877d582369d2840fc0d6892e44feaaad21419b0e35af42f22b3e127bcd08274d + 12768 + 14 + 19 + 25 + 2025-02-17 + 1a5e67bcef6b2d6242f2d36982b54589 + CAMERA + android.hardware.camera.front + + + + no.min.target.sdk + 2018-10-10 + 2018-10-10 + No minSdkVersion or targetSdkVersion + An APK without any <uses-sdk> block in AndroidManifest.xml + no.min.target.sdk.987.png + No description available + Unknown + Development + Development + + + + + 987 + + 1.2-fake + 987 + no.min.target.sdk_987.apk + e2e1dc1d550df2b5bc383860139207258645b5540abeccd305ed8b2cb6459d2c + 14102 + 3 + 2018-10-10 + b4964fd759edaa54e65bb476d0276880 + READ_EXTERNAL_STORAGE,READ_PHONE_STATE,WRITE_EXTERNAL_STORAGE + + + + obb.main.oldversion + 2013-12-31 + 2013-12-31 + OBB Main Old Version + + obb.main.oldversion.1444412523.png + No description available + GPL-3.0-only + Development + Development + + https://github.com/eighthave/urzip + + 1Fi5xUHiAPRKxHvyUGVFGt9extBe8Srdbk + + 99999999 + + 0.1 + 1444412523 + obb.main.oldversion_1444412523.apk + c5f149e526f89c05c62923bdb7bb1e2be5673c46ec85143f41e514340631449c + 14323 + 4 + 18 + main.1434483388.obb.main.oldversion.obb + d3eb539a556352f3f47881d71fb0e5777b2f3e9a4251d283c18c67ce996774b7 + 2013-12-31 + eb41d4d6082bb3e81c3d58dbf7fc7332 + ACCESS_NETWORK_STATE,ACCESS_WIFI_STATE,BLUETOOTH,BLUETOOTH_ADMIN,CHANGE_NETWORK_STATE,CHANGE_WIFI_MULTICAST_STATE,CHANGE_WIFI_STATE,INTERNET,NFC,RECEIVE_BOOT_COMPLETED + + + + + + + obb.main.twoversions + 2015-10-12 + 2016-06-20 + OBB Main Two Versions + + obb.main.twoversions.1101617.png + No description available + GPL-3.0-only + Development + Development + + https://github.com/eighthave/urzip + + 1Fi5xUHiAPRKxHvyUGVFGt9extBe8Srdbk + + 99999999 + + 0.1 + 1101617 + obb.main.twoversions_1101617.apk + obb.main.twoversions_1101617_src.tar.gz + 9bc74566f089ef030ac33e7fbd99d92f1a38f363fb499fed138d9e7b774e821c + 11481 + 4 + 18 + main.1101615.obb.main.twoversions.obb + d3eb539a556352f3f47881d71fb0e5777b2f3e9a4251d283c18c67ce996774b7 + 2016-06-20 + b4964fd759edaa54e65bb476d0276880 + + + 0.1 + 1101615 + obb.main.twoversions_1101615.apk + 7b0b7b9ba248e15751a16e3a0e01e1e24cbb673686c38422030cb75d5c33f0bb + 11480 + 4 + 18 + main.1101615.obb.main.twoversions.obb + d3eb539a556352f3f47881d71fb0e5777b2f3e9a4251d283c18c67ce996774b7 + 2016-01-01 + b4964fd759edaa54e65bb476d0276880 + + + 0.1 + 1101613 + obb.main.twoversions_1101613.apk + cce97a52ff18d843185be7f22ecb1a557c36b7a9f8ba07a8be94e328e00b35dc + 11477 + 4 + 18 + main.1101613.obb.main.twoversions.obb + d3eb539a556352f3f47881d71fb0e5777b2f3e9a4251d283c18c67ce996774b7 + 2015-10-12 + b4964fd759edaa54e65bb476d0276880 + + + + obb.mainpatch.current + 2016-04-23 + 2017-06-01 + OBB Main/Patch Current + + obb.mainpatch.current.1619.png + No description available + GPL-3.0-only + Development + Development + + https://github.com/eighthave/urzip + + 1Fi5xUHiAPRKxHvyUGVFGt9extBe8Srdbk + + 99999999 + + 0.1 + 1619 + obb.mainpatch.current_1619.apk + eda5fc3ecfdac3252717e36bdbc9820865baeef162264af9ba5db7364f0e7a0c + 11479 + 4 + 18 + main.1619.obb.mainpatch.current.obb + d3eb539a556352f3f47881d71fb0e5777b2f3e9a4251d283c18c67ce996774b7 + patch.1619.obb.mainpatch.current.obb + d3eb539a556352f3f47881d71fb0e5777b2f3e9a4251d283c18c67ce996774b7 + 2016-04-23 + b4964fd759edaa54e65bb476d0276880 + + + + com.politedroid + 2017-06-23 + 2017-06-23 + Polite Droid + Calendar tool + com.politedroid.6.png + Activates silent mode during calendar events. + GPL-3.0-only + Multimedia,Security,Time + Multimedia + + https://github.com/miguelvps/PoliteDroid + https://github.com/miguelvps/PoliteDroid/issues + 1.5 + 6 + KnownVuln,NoSourceSince,NonFreeAssets,NonFreeNet + + 1.5 + 6 + com.politedroid_6.apk + 70c2f776a2bac38a58a7d521f96ee0414c6f0fb1de973c3ca8b10862a009247d + 16578 + 14 + 21 + 2017-06-23 + b4964fd759edaa54e65bb476d0276880 + READ_CALENDAR,RECEIVE_BOOT_COMPLETED + + + 1.4 + 5 + com.politedroid_5.apk + 5bdbfa071cca4b8d05ced41d6b28763595d6e8096cca5bbf0f9253c9a2622e5d + 18817 + 3 + 10 + 2017-06-23 + b4964fd759edaa54e65bb476d0276880 + READ_CALENDAR,RECEIVE_BOOT_COMPLETED + + + 1.3 + 4 + com.politedroid_4.apk + c809bdff83715fbf919f3840ee09869b038e209378b906e135ee40d3f0e1f075 + 18489 + 3 + 2017-06-23 + b4964fd759edaa54e65bb476d0276880 + READ_CALENDAR,READ_EXTERNAL_STORAGE,READ_PHONE_STATE,RECEIVE_BOOT_COMPLETED,WRITE_EXTERNAL_STORAGE + + + 1.2 + 3 + com.politedroid_3.apk + 665d03d61ebc642289fda697f71a59305b0202b16cafc5ffdae91cbe91f0b25d + 17552 + 3 + 2017-06-23 + b4964fd759edaa54e65bb476d0276880 + READ_CALENDAR,READ_EXTERNAL_STORAGE,READ_PHONE_STATE,RECEIVE_BOOT_COMPLETED,WRITE_EXTERNAL_STORAGE + + + + info.guardianproject.urzip + 2016-06-23 + 2016-06-23 + title + 一个实用工具,获取已安装在您的设备上的应用的有关信息 + info.guardianproject.urzip.100.png + It’s Urzip 是一个获得已安装 APK 相关信息的实用工具。它从您的设备上已安装的所有应用开始,一键触摸即可显示 APK 的指纹,并且提供到达 virustotal.com 和 androidobservatory.org 的快捷链接,让您方便地了解特定 APK 的档案。它还可以让您导出签名证书和生成 ApkSignaturePin Pin 文件供 TrustedIntents 库使用。 + +★ Urzip 支持下列语言: Deutsch, English, español, suomi, 日本語, 한국어, Norsk, português (Portugal), Русский, Slovenščina, Türkçe +没看到您的语言?帮忙翻译本应用吧: +https://www.transifex.com/projects/p/urzip + +★ 致用户:我们还缺少你喜欢的功能?发现了一个 bug?请告诉我们!我们乐于听取您的意见。请发送电子邮件至: support@guardianproject.info 或者加入我们的聊天室 https://guardianproject.info/contact + + GPL-3.0-only + Development,GuardianProject,1,2.0 + Development + https://dev.guardianproject.info/projects/urzip + https://github.com/guardianproject/urzip + https://dev.guardianproject.info/projects/urzip/issues + 1Fi5xUHiAPRKxHvyUGVFGt9extBe8Srdbk + f-droid-just-testing + + 2147483647 + + 0.1 + 100 + urzip-; Рахма́, [rɐxˈmanʲɪnəf] سيرجي_رخمانينوف 谢·.apk + 15c0ec72c74a3791f42cdb43c57df0fb11a4dbb656851bbb8cf05b26a8372789 + 11471 + 4 + 18 + 2016-06-23 + b4964fd759edaa54e65bb476d0276880 + + + diff --git a/tests/repo/info.zwanenburg.caffeinetile_4.apk b/tests/repo/info.zwanenburg.caffeinetile_4.apk new file mode 100644 index 00000000..bc54a73e Binary files /dev/null and b/tests/repo/info.zwanenburg.caffeinetile_4.apk differ diff --git a/tests/repo/main.1101613.obb.main.twoversions.obb b/tests/repo/main.1101613.obb.main.twoversions.obb new file mode 100644 index 00000000..421376db --- /dev/null +++ b/tests/repo/main.1101613.obb.main.twoversions.obb @@ -0,0 +1 @@ +dummy diff --git a/tests/repo/main.1101615.obb.main.twoversions.obb b/tests/repo/main.1101615.obb.main.twoversions.obb new file mode 100644 index 00000000..421376db --- /dev/null +++ b/tests/repo/main.1101615.obb.main.twoversions.obb @@ -0,0 +1 @@ +dummy diff --git a/tests/repo/main.1434483388.obb.main.oldversion.obb b/tests/repo/main.1434483388.obb.main.oldversion.obb new file mode 100644 index 00000000..421376db --- /dev/null +++ b/tests/repo/main.1434483388.obb.main.oldversion.obb @@ -0,0 +1 @@ +dummy diff --git a/tests/repo/main.1619.obb.mainpatch.current.obb b/tests/repo/main.1619.obb.mainpatch.current.obb new file mode 100644 index 00000000..421376db --- /dev/null +++ b/tests/repo/main.1619.obb.mainpatch.current.obb @@ -0,0 +1 @@ +dummy diff --git a/tests/repo/no.min.target.sdk_987.apk b/tests/repo/no.min.target.sdk_987.apk new file mode 100644 index 00000000..27f8de12 Binary files /dev/null and b/tests/repo/no.min.target.sdk_987.apk differ diff --git a/tests/repo/obb.main.oldversion_1444412523.apk b/tests/repo/obb.main.oldversion_1444412523.apk new file mode 100644 index 00000000..b9c7d7fd Binary files /dev/null and b/tests/repo/obb.main.oldversion_1444412523.apk differ diff --git a/tests/repo/obb.main.twoversions_1101613.apk b/tests/repo/obb.main.twoversions_1101613.apk new file mode 100644 index 00000000..259d0903 Binary files /dev/null and b/tests/repo/obb.main.twoversions_1101613.apk differ diff --git a/tests/repo/obb.main.twoversions_1101615.apk b/tests/repo/obb.main.twoversions_1101615.apk new file mode 100644 index 00000000..0d82052e Binary files /dev/null and b/tests/repo/obb.main.twoversions_1101615.apk differ diff --git a/tests/repo/obb.main.twoversions_1101617.apk b/tests/repo/obb.main.twoversions_1101617.apk new file mode 100644 index 00000000..202d6a03 Binary files /dev/null and b/tests/repo/obb.main.twoversions_1101617.apk differ diff --git a/tests/repo/obb.main.twoversions_1101617_src.tar.gz b/tests/repo/obb.main.twoversions_1101617_src.tar.gz new file mode 100644 index 00000000..3e086c60 Binary files /dev/null and b/tests/repo/obb.main.twoversions_1101617_src.tar.gz differ diff --git a/tests/repo/obb.mainpatch.current/en-US/featureGraphic.png b/tests/repo/obb.mainpatch.current/en-US/featureGraphic.png new file mode 100644 index 00000000..656e749b Binary files /dev/null and b/tests/repo/obb.mainpatch.current/en-US/featureGraphic.png differ diff --git a/tests/repo/obb.mainpatch.current/en-US/icon.png b/tests/repo/obb.mainpatch.current/en-US/icon.png new file mode 100644 index 00000000..4a42916d Binary files /dev/null and b/tests/repo/obb.mainpatch.current/en-US/icon.png differ diff --git a/tests/repo/obb.mainpatch.current/en-US/phoneScreenshots/screenshot-main.png b/tests/repo/obb.mainpatch.current/en-US/phoneScreenshots/screenshot-main.png new file mode 100644 index 00000000..39b0ac17 Binary files /dev/null and b/tests/repo/obb.mainpatch.current/en-US/phoneScreenshots/screenshot-main.png differ diff --git a/tests/repo/obb.mainpatch.current/en-US/sevenInchScreenshots/screenshot-tablet-main.png b/tests/repo/obb.mainpatch.current/en-US/sevenInchScreenshots/screenshot-tablet-main.png new file mode 100644 index 00000000..00ce9d0c Binary files /dev/null and b/tests/repo/obb.mainpatch.current/en-US/sevenInchScreenshots/screenshot-tablet-main.png differ diff --git a/tests/repo/obb.mainpatch.current_1619.apk b/tests/repo/obb.mainpatch.current_1619.apk new file mode 100644 index 00000000..23cf8232 Binary files /dev/null and b/tests/repo/obb.mainpatch.current_1619.apk differ diff --git a/tests/repo/obb.mainpatch.current_1619_another-release-key.apk b/tests/repo/obb.mainpatch.current_1619_another-release-key.apk new file mode 100644 index 00000000..1a494fe2 Binary files /dev/null and b/tests/repo/obb.mainpatch.current_1619_another-release-key.apk differ diff --git a/tests/repo/org.maxsdkversion_4.apk b/tests/repo/org.maxsdkversion_4.apk new file mode 100644 index 00000000..39578861 Binary files /dev/null and b/tests/repo/org.maxsdkversion_4.apk differ diff --git a/tests/repo/org.videolan.vlc/en-US/icon.png b/tests/repo/org.videolan.vlc/en-US/icon.png new file mode 100644 index 00000000..0516b951 Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/icon.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot10.png b/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot10.png new file mode 100644 index 00000000..7aba9dd3 Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot10.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot12.png b/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot12.png new file mode 100644 index 00000000..92f8da5d Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot12.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot15.png b/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot15.png new file mode 100644 index 00000000..79aec826 Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot15.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot18.png b/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot18.png new file mode 100644 index 00000000..116d66c1 Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot18.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot20.png b/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot20.png new file mode 100644 index 00000000..9ca048e7 Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot20.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot22.png b/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot22.png new file mode 100644 index 00000000..c61893a3 Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot22.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot4.png b/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot4.png new file mode 100644 index 00000000..dc4c85a0 Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot4.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot7.png b/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot7.png new file mode 100644 index 00000000..fd86418e Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot7.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot9.png b/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot9.png new file mode 100644 index 00000000..9ad6569e Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot9.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot0.png b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot0.png new file mode 100644 index 00000000..8d32cdd8 Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot0.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot1.png b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot1.png new file mode 100644 index 00000000..01d505a6 Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot1.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot11.png b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot11.png new file mode 100644 index 00000000..22fa4656 Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot11.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot13.png b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot13.png new file mode 100644 index 00000000..2346e1de Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot13.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot14.png b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot14.png new file mode 100644 index 00000000..e7407579 Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot14.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot16.png b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot16.png new file mode 100644 index 00000000..2c3a76cf Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot16.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot17.png b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot17.png new file mode 100644 index 00000000..05a3704b Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot17.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot19.png b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot19.png new file mode 100644 index 00000000..05a3704b Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot19.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot2.png b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot2.png new file mode 100644 index 00000000..a896762c Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot2.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot21.png b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot21.png new file mode 100644 index 00000000..c9afc66c Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot21.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot23.png b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot23.png new file mode 100644 index 00000000..266aff87 Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot23.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot3.png b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot3.png new file mode 100644 index 00000000..c8fad2f7 Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot3.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot5.png b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot5.png new file mode 100644 index 00000000..c85fbc01 Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot5.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot6.png b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot6.png new file mode 100644 index 00000000..9c712af1 Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot6.png differ diff --git a/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot8.png b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot8.png new file mode 100644 index 00000000..de12807c Binary files /dev/null and b/tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot8.png differ diff --git a/tests/repo/patch.1619.obb.mainpatch.current.obb b/tests/repo/patch.1619.obb.mainpatch.current.obb new file mode 100644 index 00000000..421376db --- /dev/null +++ b/tests/repo/patch.1619.obb.mainpatch.current.obb @@ -0,0 +1 @@ +dummy diff --git a/tests/repo/souch.smsbypass_9.apk b/tests/repo/souch.smsbypass_9.apk new file mode 100644 index 00000000..ce96f299 Binary files /dev/null and b/tests/repo/souch.smsbypass_9.apk differ diff --git a/tests/repo/urzip-; Рахма́, [rɐxˈmanʲɪnəf] سيرجي_رخمانينوف 谢·.apk b/tests/repo/urzip-; Рахма́, [rɐxˈmanʲɪnəf] سيرجي_رخمانينوف 谢·.apk new file mode 100644 index 00000000..0851e066 Binary files /dev/null and b/tests/repo/urzip-; Рахма́, [rɐxˈmanʲɪnəf] سيرجي_رخمانينوف 谢·.apk differ diff --git a/tests/repo/v1.v2.sig_1020.apk b/tests/repo/v1.v2.sig_1020.apk new file mode 100644 index 00000000..006ff764 Binary files /dev/null and b/tests/repo/v1.v2.sig_1020.apk differ diff --git a/tests/run-tests b/tests/run-tests new file mode 100755 index 00000000..a790d4c7 --- /dev/null +++ b/tests/run-tests @@ -0,0 +1,27 @@ +#!/bin/bash + +set -e # quit script on error + +echo_header() { + { echo -e "==============================================================================\n$1"; } 2>/dev/null +} + +#------------------------------------------------------------------------------# + +if [ ! -d tests ]; then + cd .. +fi + +set -x # show each command as it is executed + +#------------------------------------------------------------------------------# +echo_header "run commit hooks" + +test -x ./hooks/pre-commit && ./hooks/pre-commit + +#------------------------------------------------------------------------------# +echo_header "run unit tests" + +python3 -m unittest -v + +echo SUCCESS diff --git a/tests/shared_test_code.py b/tests/shared_test_code.py new file mode 100644 index 00000000..3e34900b --- /dev/null +++ b/tests/shared_test_code.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python3 +# +# Copyright (C) 2017, Michael Poehn +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import os +import sys +import tempfile +import unittest +import unittest.mock +from pathlib import Path + +GP_FINGERPRINT = 'B7C2EEFD8DAC7806AF67DFCD92EB18126BC08312A7F2D6F3862E46013C7A6135' + + +class VerboseFalseOptions: + verbose = False + + +class TmpCwd: + """Context-manager for temporarily changing the current working directory.""" + + def __init__(self, new_cwd): + self.new_cwd = new_cwd + + def __enter__(self): + self.orig_cwd = os.getcwd() + os.chdir(self.new_cwd) + + def __exit__(self, a, b, c): + os.chdir(self.orig_cwd) + + +class TmpPyPath: + """Context-manager for temporarily adding a directory to Python path.""" + + def __init__(self, additional_path): + self.additional_path = additional_path + + def __enter__(self): + sys.path.append(self.additional_path) + + def __exit__(self, a, b, c): + sys.path.remove(self.additional_path) + + +def mock_open_to_str(mock): + """For accessing all data written into a unittest.mock.mock_open() instance as a string.""" + + return "".join( + [x.args[0] for x in mock.mock_calls if str(x).startswith("call().write(")] + ) + + +def mkdtemp(): + if sys.version_info < (3, 10): # ignore_cleanup_errors was added in 3.10 + return tempfile.TemporaryDirectory() + else: + return tempfile.TemporaryDirectory(ignore_cleanup_errors=True) + + +def mkdir_testfiles(localmodule, test): + """Keep the test files in a labeled test dir for easy reference""" + testroot = Path(localmodule) / '.testfiles' + testroot.mkdir(exist_ok=True) + testdir = testroot / unittest.TestCase.id(test) + testdir.mkdir(exist_ok=True) + return Path(tempfile.mkdtemp(dir=testdir)) + + +def mock_urlopen(status=200, body=None): + resp = unittest.mock.MagicMock() + resp.getcode.return_value = status + resp.read.return_value = body + resp.__enter__.return_value = resp + return unittest.mock.Mock(return_value=resp) diff --git a/tests/signindex/guardianproject-v1.jar b/tests/signindex/guardianproject-v1.jar new file mode 100644 index 00000000..59edc87c Binary files /dev/null and b/tests/signindex/guardianproject-v1.jar differ diff --git a/tests/signindex/guardianproject.jar b/tests/signindex/guardianproject.jar new file mode 100644 index 00000000..946c69ac Binary files /dev/null and b/tests/signindex/guardianproject.jar differ diff --git a/tests/signindex/testy.jar b/tests/signindex/testy.jar new file mode 100644 index 00000000..6d7dd359 Binary files /dev/null and b/tests/signindex/testy.jar differ diff --git a/tests/signindex/unsigned.jar b/tests/signindex/unsigned.jar new file mode 100644 index 00000000..b62c930a Binary files /dev/null and b/tests/signindex/unsigned.jar differ diff --git a/tests/source-files/OtakuWorld/build.gradle b/tests/source-files/OtakuWorld/build.gradle new file mode 100644 index 00000000..cfbc1450 --- /dev/null +++ b/tests/source-files/OtakuWorld/build.gradle @@ -0,0 +1,166 @@ +// Top-level build file where you can add configuration options common to all sub-projects/modules. +buildscript { + ext.kotlin_version = "1.6.21" + ext.latestAboutLibsRelease = "10.3.0" + + ext.coroutinesVersion = "1.6.2" + + ext.coroutinesCore = "org.jetbrains.kotlinx:kotlinx-coroutines-core:$coroutinesVersion" + ext.coroutinesAndroid = "org.jetbrains.kotlinx:kotlinx-coroutines-android:$coroutinesVersion" + ext.coroutinesRX = "org.jetbrains.kotlinx:kotlinx-coroutines-rx2:$coroutinesVersion" + + ext.gson = 'com.google.code.gson:gson:2.9.0' + + ext.glideVersion = "4.13.1" + ext.glide = "com.github.bumptech.glide:glide:$glideVersion" + ext.glideCompiler = "com.github.bumptech.glide:compiler:$glideVersion" + + ext.paging_version = "3.1.1" + + ext.rxkotlin = "io.reactivex.rxjava2:rxkotlin:2.4.0" + ext.rxandroid = "io.reactivex.rxjava2:rxandroid:2.1.1" + ext.rxbinding = 'com.jakewharton.rxbinding2:rxbinding:2.2.0' + ext.rxbindingKotlin = 'com.jakewharton.rxbinding2:rxbinding-kotlin:2.2.0' + + ext.androidCore = 'androidx.core:core-ktx:1.8.0' + ext.appCompat = 'androidx.appcompat:appcompat:1.4.2' + ext.material = 'com.google.android.material:material:1.6.0-beta01' + + ext.preference = "androidx.preference:preference-ktx:1.2.0" + + ext.recyclerview = 'androidx.recyclerview:recyclerview:1.2.1' + ext.constraintlayout = 'androidx.constraintlayout:constraintlayout:2.1.4' + ext.swiperefresh = 'androidx.swiperefreshlayout:swiperefreshlayout:1.1.0' + + ext.jsoup = 'org.jsoup:jsoup:1.15.1' + + ext.crashlytics = 'com.google.firebase:firebase-crashlytics:18.2.8' + ext.analytics = 'com.google.firebase:firebase-analytics:20.1.0' + ext.play_services = 'com.google.android.gms:play-services-auth:20.1.0' + + ext.exoplayer_version = "2.16.1" + ext.room_version = "2.4.2" + + ext.nav_version = "2.4.2" + + def koin_version = "3.0.2" + + // Koin main features for Android (Scope,ViewModel ...) + ext.koinAndroid = "io.insert-koin:koin-android:$koin_version" + // Koin Android - experimental builder extensions + ext.koinAndroidExt = "io.insert-koin:koin-android-ext:$koin_version" + + ext.lottieVersion = "4.2.2" + + ext.coil = "2.1.0" + + ext.jetpack = "1.2.0-rc01" + + ext.accompanist = "0.24.11-rc" + + ext.composeUi = "androidx.compose.ui:ui:$jetpack" + // Tooling support (Previews, etc.) + ext.composeUiTooling = "androidx.compose.ui:ui-tooling:$jetpack" + // Foundation (Border, Background, Box, Image, Scroll, shapes, animations, etc.) + ext.composeFoundation = "androidx.compose.foundation:foundation:$jetpack" + // Material Design + ext.composeMaterial = "androidx.compose.material:material:$jetpack" + ext.materialYou = "androidx.compose.material3:material3:1.0.0-alpha13" + // Material design icons + ext.composeMaterialIconsCore = "androidx.compose.material:material-icons-core:$jetpack" + ext.composeMaterialIconsExtended = "androidx.compose.material:material-icons-extended:$jetpack" + // Integration with activities + ext.composeActivity = 'androidx.activity:activity-compose:1.4.0' + // Integration with ViewModels + ext.composeLifecycle = 'androidx.lifecycle:lifecycle-viewmodel-compose:2.4.1' + // Integration with observables + ext.composeRuntimeLivedata = "androidx.compose.runtime:runtime-livedata:$jetpack" + ext.composeRuntimeRxjava2 = "androidx.compose.runtime:runtime-rxjava2:$jetpack" + ext.composeMaterialThemeAdapter = "com.google.android.material:compose-theme-adapter:1.1.11" + ext.composeMaterial3ThemeAdapter = "com.google.android.material:compose-theme-adapter-3:1.0.11" + ext.landscapistGlide = "com.github.skydoves:landscapist-glide:1.5.2" + ext.composeConstraintLayout = "androidx.constraintlayout:constraintlayout-compose:1.0.1" + ext.composeAnimation = "androidx.compose.animation:animation:$jetpack" + ext.materialPlaceholder = "com.google.accompanist:accompanist-placeholder-material:$accompanist" + ext.drawablePainter = "com.google.accompanist:accompanist-drawablepainter:$accompanist" + ext.permissions = "com.google.accompanist:accompanist-permissions:$accompanist" + ext.uiUtil = "androidx.compose.ui:ui-util:$jetpack" + ext.coilCompose = "io.coil-kt:coil-compose:$coil" + ext.navCompose = "androidx.navigation:navigation-compose:$nav_version" + ext.navMaterial = "com.google.accompanist:accompanist-navigation-material:$accompanist" + ext.navAnimation = "com.google.accompanist:accompanist-navigation-animation:$accompanist" + + ext.swipeRefresh = "com.google.accompanist:accompanist-swiperefresh:$accompanist" + ext.systemUiController = "com.google.accompanist:accompanist-systemuicontroller:$accompanist" + + ext.inset = "com.google.accompanist:accompanist-insets:$accompanist" + // If using insets-ui + ext.insetUi = "com.google.accompanist:accompanist-insets-ui:$accompanist" + + ext.datastore = "androidx.datastore:datastore:1.0.0" + ext.datastorePref = "androidx.datastore:datastore-preferences:1.0.0" + + ext { + jakepurple13Tools = [ + helpfultools: [ + Deps.gsonutils, + Deps.helpfulutils, + Deps.loggingutils, + Deps.dragswipe, + Deps.funutils, + Deps.rxutils + ] + ] + room = [ + room: [ + "androidx.room:room-runtime:$room_version", + "androidx.room:room-ktx:$room_version", + "androidx.room:room-rxjava2:$room_version" + ] + ] + + koin = [koin: [koinAndroid, koinAndroidExt]] + + compose = [ + compose: [ + composeUi, composeUiTooling, composeFoundation, composeMaterial, + composeMaterialIconsCore, composeMaterialIconsExtended, + composeActivity, composeLifecycle, + composeRuntimeLivedata, composeRuntimeRxjava2, + composeMaterialThemeAdapter, composeMaterial3ThemeAdapter, + landscapistGlide, coilCompose, + composeConstraintLayout, permissions, + materialPlaceholder, drawablePainter, uiUtil, + materialYou, + inset, insetUi, + navCompose, navMaterial, navAnimation, + swipeRefresh, systemUiController + ] + ] + + firebaseCrash = [ crash: [crashlytics, analytics] ] + + datastore = [ datastore: [datastore, datastorePref] ] + } + + repositories { + google() + gradlePluginPortal() + mavenCentral() + } + dependencies { + classpath 'com.android.tools.build:gradle:7.1.2' + classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" + classpath 'com.google.gms:google-services:4.3.10' + classpath 'com.google.firebase:firebase-crashlytics-gradle:2.8.1' + classpath "com.mikepenz.aboutlibraries.plugin:aboutlibraries-plugin:10.3.0" + classpath "org.jetbrains.kotlin:kotlin-serialization:$kotlin_version" + // NOTE: Do not place your application dependencies here; they belong + // in the individual module build.gradle files + classpath "androidx.navigation:navigation-safe-args-gradle-plugin:$nav_version" + } +} + +task clean(type: Delete) { + delete rootProject.buildDir +} diff --git a/tests/source-files/Zillode/syncthing-silk/build.gradle b/tests/source-files/Zillode/syncthing-silk/build.gradle new file mode 100644 index 00000000..873a64c7 --- /dev/null +++ b/tests/source-files/Zillode/syncthing-silk/build.gradle @@ -0,0 +1,66 @@ +/* + * Copyright (c) 2015 OpenSilk Productions LLC + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +// Top-level build file where you can add configuration options common to all sub-projects/modules. + +buildscript { + repositories { + mavenCentral() + jcenter() + } + dependencies { + classpath 'com.android.tools.build:gradle:1.1.3' + classpath 'me.tatarka:gradle-retrolambda:2.5.0' + classpath 'org.robolectric:robolectric-gradle-plugin:1.0.1' + + // NOTE: Do not place your application dependencies here; they belong + // in the individual module build.gradle files + } +} + +allprojects { + repositories { + mavenCentral() + jcenter() + maven { url '../../m2/repository' } + maven { url 'https://oss.sonatype.org/content/repositories/snapshots' } + } +} + +// Build config +ext.compileSdkVersion = 22 +ext.buildToolsVersion = "22.0.1" + +// defaultConfig +ext.targetSdkVersion = 22 + +ext.supportLibVersion = "22.1.1" +ext.dagger2Version = "2.0" +ext.rxAndroidVersion = "0.23.0" +ext.timberVersion = "2.5.0" +ext.commonsLangVersion = "3.3.2" +ext.butterKnifeVersion = "6.0.0" +ext.commonsIoVersion = "2.4" +ext.gsonVersion = "2.3" + +def gitSha() { + return 'git rev-parse --short HEAD'.execute().text.trim() +} + +def getDebugVersionSuffix() { + return "${gitSha()}".isEmpty() ? "-SNAPSHOT" : "-SNAPSHOT-${gitSha()}" +} diff --git a/tests/source-files/at.bitfire.davdroid/build.gradle b/tests/source-files/at.bitfire.davdroid/build.gradle new file mode 100644 index 00000000..5bac8b01 --- /dev/null +++ b/tests/source-files/at.bitfire.davdroid/build.gradle @@ -0,0 +1,108 @@ +/* + * Copyright (c) Ricki Hirner (bitfire web engineering). + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the GNU Public License v3.0 + * which accompanies this distribution, and is available at + * http://www.gnu.org/licenses/gpl.html + */ + +apply plugin: 'com.android.application' +apply plugin: 'kotlin-android' +apply plugin: 'kotlin-android-extensions' +apply plugin: 'org.jetbrains.dokka-android' + +android { + compileSdkVersion 27 + buildToolsVersion '27.0.1' + + defaultConfig { + applicationId "at.bitfire.davdroid" + + versionCode 197 + buildConfigField "long", "buildTime", System.currentTimeMillis() + "L" + buildConfigField "boolean", "customCerts", "true" + + minSdkVersion 19 // Android 4.4 + targetSdkVersion 27 // Android 8.1 + + // when using this, make sure that notification icons are real bitmaps + vectorDrawables.useSupportLibrary = true + } + + flavorDimensions "type" + + productFlavors { + standard { + versionName "1.9.8.1-ose" + + buildConfigField "boolean", "customCerts", "true" + buildConfigField "boolean", "customCertsUI", "true" + } + } + + buildTypes { + debug { + minifyEnabled false + } + release { + minifyEnabled true + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt' + } + } + + lintOptions { + disable 'GoogleAppIndexingWarning' // we don't need Google indexing, thanks + disable 'ImpliedQuantity', 'MissingQuantity' // quantities from Transifex may vary + disable 'MissingTranslation', 'ExtraTranslation' // translations from Transifex are not always up to date + disable "OnClick" // doesn't recognize Kotlin onClick methods + disable 'Recycle' // doesn't understand Lombok's @Cleanup + disable 'RtlEnabled' + disable 'RtlHardcoded' + disable 'Typos' + } + packagingOptions { + exclude 'META-INF/DEPENDENCIES' + exclude 'META-INF/LICENSE' + } + + defaultConfig { + testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" + } +} + +dependencies { + compile project(':cert4android') + compile project(':dav4android') + compile project(':ical4android') + compile project(':vcard4android') + + compile "org.jetbrains.kotlin:kotlin-stdlib-jre7:$kotlin_version" + + compile 'com.android.support:appcompat-v7:27.0.2' + compile 'com.android.support:cardview-v7:27.0.2' + compile 'com.android.support:design:27.0.2' + compile 'com.android.support:preference-v14:27.0.2' + + compile 'com.github.yukuku:ambilwarna:2.0.1' + + compile 'com.squareup.okhttp3:logging-interceptor:3.9.1' + compile 'commons-io:commons-io:2.6' + compile 'dnsjava:dnsjava:2.1.8' + compile 'org.apache.commons:commons-lang3:3.6' + compile 'org.apache.commons:commons-collections4:4.1' + + // for tests + //noinspection GradleDynamicVersion + androidTestCompile('com.android.support.test:runner:+') { + exclude group: 'com.android.support', module: 'support-annotations' + } + //noinspection GradleDynamicVersion + androidTestCompile('com.android.support.test:rules:+') { + exclude group: 'com.android.support', module: 'support-annotations' + } + androidTestCompile 'junit:junit:4.12' + androidTestCompile 'com.squareup.okhttp3:mockwebserver:3.9.1' + + testCompile 'junit:junit:4.12' + testCompile 'com.squareup.okhttp3:mockwebserver:3.9.1' +} diff --git a/tests/source-files/catalog.test/app/build.gradle b/tests/source-files/catalog.test/app/build.gradle new file mode 100644 index 00000000..72c9d184 --- /dev/null +++ b/tests/source-files/catalog.test/app/build.gradle @@ -0,0 +1,2 @@ +implementation libs.bundles.firebase +implementation libs.play.service.ads diff --git a/tests/source-files/catalog.test/build.gradle.kts b/tests/source-files/catalog.test/build.gradle.kts new file mode 100644 index 00000000..5572706f --- /dev/null +++ b/tests/source-files/catalog.test/build.gradle.kts @@ -0,0 +1,5 @@ +plugins { + alias(libs.plugins.google.services) + alias(libs.plugins.firebase.crashlytics) + alias(projectLibs.plugins.firebase.crashlytics) +} diff --git a/tests/source-files/catalog.test/buildSrc/build.gradle.kts b/tests/source-files/catalog.test/buildSrc/build.gradle.kts new file mode 100644 index 00000000..40eeaa54 --- /dev/null +++ b/tests/source-files/catalog.test/buildSrc/build.gradle.kts @@ -0,0 +1,9 @@ +plugins { + alias(libs.plugins.google.services) + alias(libs.plugins.firebase.crashlytics) + alias(projectLibs.plugins.firebase.crashlytics) +} + +dependencies { + implementation(libs.plugins.androidApplication.asLibraryDependency) +} diff --git a/tests/source-files/catalog.test/buildSrc/settings.gradle.kts b/tests/source-files/catalog.test/buildSrc/settings.gradle.kts new file mode 100644 index 00000000..98644daf --- /dev/null +++ b/tests/source-files/catalog.test/buildSrc/settings.gradle.kts @@ -0,0 +1,22 @@ +pluginManagement { + repositories { + gradlePluginPortal() + } +} + +dependencyResolutionManagement { + repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS) + repositories { + google() + mavenCentral() + } + + versionCatalogs { + create("libs") { + from(files("../gradle/libs.versions.toml")) + } + } +} + +rootProject.name = "buildSrc" +rootProject.buildFileName = "buildSrc.gradle.kts" diff --git a/tests/source-files/catalog.test/buildSrc2/build.gradle.kts b/tests/source-files/catalog.test/buildSrc2/build.gradle.kts new file mode 100644 index 00000000..5572706f --- /dev/null +++ b/tests/source-files/catalog.test/buildSrc2/build.gradle.kts @@ -0,0 +1,5 @@ +plugins { + alias(libs.plugins.google.services) + alias(libs.plugins.firebase.crashlytics) + alias(projectLibs.plugins.firebase.crashlytics) +} diff --git a/tests/source-files/catalog.test/buildSrc2/settings.gradle.kts b/tests/source-files/catalog.test/buildSrc2/settings.gradle.kts new file mode 100644 index 00000000..98644daf --- /dev/null +++ b/tests/source-files/catalog.test/buildSrc2/settings.gradle.kts @@ -0,0 +1,22 @@ +pluginManagement { + repositories { + gradlePluginPortal() + } +} + +dependencyResolutionManagement { + repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS) + repositories { + google() + mavenCentral() + } + + versionCatalogs { + create("libs") { + from(files("../gradle/libs.versions.toml")) + } + } +} + +rootProject.name = "buildSrc" +rootProject.buildFileName = "buildSrc.gradle.kts" diff --git a/tests/source-files/catalog.test/core/build.gradle b/tests/source-files/catalog.test/core/build.gradle new file mode 100644 index 00000000..72c9d184 --- /dev/null +++ b/tests/source-files/catalog.test/core/build.gradle @@ -0,0 +1,2 @@ +implementation libs.bundles.firebase +implementation libs.play.service.ads diff --git a/tests/source-files/catalog.test/gradle/libs.versions.toml b/tests/source-files/catalog.test/gradle/libs.versions.toml new file mode 100644 index 00000000..9fb3707f --- /dev/null +++ b/tests/source-files/catalog.test/gradle/libs.versions.toml @@ -0,0 +1,18 @@ +[versions] +firebase = "1.1.1" +gms = "1.2.1" +androidGradlePlugin = "8.12.0" + +[libraries] +firebase-crash = { module = "com.google.firebase:firebase-crash", version.ref = "firebase" } +firebase_core = { module = "com.google.firebase:firebase-core", version = "2.2.2" } +"play.service.ads" = { module = "com.google.android.gms:play-services-ads", version.ref = "gms" } +jacoco = "org.jacoco:org.jacoco.core:0.8.7" + +[plugins] +google-services = { id = "com.google.gms.google-services", version.ref = "gms" } +firebase-crashlytics = { id = "com.google.firebase.crashlytics", version.ref = "firebase" } +androidApplication = { id = "com.android.application", version.ref = "androidGradlePlugin" } + +[bundles] +firebase = ["firebase-crash", "firebase_core"] diff --git a/tests/source-files/catalog.test/libs.versions.toml b/tests/source-files/catalog.test/libs.versions.toml new file mode 100644 index 00000000..666a0f7f --- /dev/null +++ b/tests/source-files/catalog.test/libs.versions.toml @@ -0,0 +1,15 @@ +[versions] +firebase = "1.1.1" +gms = "1.2.1" + +[libraries] +firebase-crash = { module = "com.google.firebase:firebase-crash", version.ref = "firebase" } +firebase_core = { module = "com.google.firebase:firebase-core", version = "2.2.2" } +"play.service.ads" = { module = "com.google.android.gms:play-services-ads", version.ref = "gms"} + +[plugins] +google-services = { id = "com.google.gms.google-services", version.ref = "gms" } +firebase-crashlytics = { id = "com.google.firebase.crashlytics", version.ref = "firebase" } + +[bundles] +firebase = ["firebase-crash", "firebase_core"] diff --git a/tests/source-files/catalog.test/settings.gradle.kts b/tests/source-files/catalog.test/settings.gradle.kts new file mode 100644 index 00000000..fd9ba80c --- /dev/null +++ b/tests/source-files/catalog.test/settings.gradle.kts @@ -0,0 +1,14 @@ +dependencyResolutionManagement { + repositories { + mavenCentral() + } + defaultLibrariesExtensionName = "projectLibs" + versionCatalogs { + create("libs") { + from(files("./libs.versions.toml")) + } + create("anotherLibs") { + from(files("$rootDir/libs.versions.toml")) + } + } +} diff --git a/tests/source-files/cn.wildfirechat.chat/avenginekit/build.gradle b/tests/source-files/cn.wildfirechat.chat/avenginekit/build.gradle new file mode 100644 index 00000000..02955512 --- /dev/null +++ b/tests/source-files/cn.wildfirechat.chat/avenginekit/build.gradle @@ -0,0 +1,2 @@ +configurations.maybeCreate("default") +artifacts.add("default", file('avenginekit.aar')) \ No newline at end of file diff --git a/tests/source-files/cn.wildfirechat.chat/build.gradle b/tests/source-files/cn.wildfirechat.chat/build.gradle new file mode 100644 index 00000000..acc41375 --- /dev/null +++ b/tests/source-files/cn.wildfirechat.chat/build.gradle @@ -0,0 +1,41 @@ +// Top-level build file where you can add configuration options common to all sub-projects/modules. + +buildscript { + + repositories { + google() + jcenter() + mavenCentral() + } + dependencies { + classpath 'com.android.tools.build:gradle:3.4.2' + classpath 'com.github.dcendents:android-maven-gradle-plugin:2.1' + + // NOTE: Do not place your application dependencies here; they belong + // in the individual module build.gradle files + } +} + +allprojects { + repositories { + google() + jcenter() + maven { + url "http://developer.huawei.com/repo/" + } + + maven { url 'https://jitpack.io' } + maven { url 'https://dl.bintray.com/jenly/maven' } + } + configurations { + all { + resolutionStrategy { + //force "android.arch.lifecycle:runtime:1.1.1" + } + } + } +} + +task clean(type: Delete) { + delete rootProject.buildDir +} diff --git a/tests/source-files/cn.wildfirechat.chat/chat/build.gradle b/tests/source-files/cn.wildfirechat.chat/chat/build.gradle new file mode 100644 index 00000000..f2503356 --- /dev/null +++ b/tests/source-files/cn.wildfirechat.chat/chat/build.gradle @@ -0,0 +1,115 @@ +apply plugin: 'com.android.application' + +android { + signingConfigs { + wfc { + keyAlias 'wfc' + keyPassword 'wildfirechat' + storeFile file('../wfc.keystore') + storePassword 'wildfirechat' + } + } + compileSdkVersion 28 + aaptOptions.cruncherEnabled = false + aaptOptions.useNewCruncher = false + defaultConfig { + applicationId "cn.wildfirechat.chat" + minSdkVersion 16 + targetSdkVersion 28 //当targetversion大于23时,需要使用fileprovider + versionCode 23 + versionName "0.6.9" + multiDexEnabled true + javaCompileOptions { + annotationProcessorOptions { + includeCompileClasspath true + } + } + signingConfig signingConfigs.wfc + +// buildConfigField("String", "BuglyId", '"34490ba79f"') + + ndk { + abiFilters "armeabi-v7a", 'x86', 'x86_64' // ,'armeabi', 'arm64-v8a', 'x86', 'x86_64' + } + } + buildTypes { + release { + minifyEnabled true + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + signingConfig signingConfigs.wfc + } + debug { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + signingConfig signingConfigs.wfc + } + } + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_8 + targetCompatibility JavaVersion.VERSION_1_8 + } + lintOptions { + abortOnError false + } + sourceSets { + main { + // wfc kit start + jniLibs.srcDirs += ['libs', 'kit/libs'] + res.srcDirs += ['kit/src/main/res', 'kit/src/main/res-av'] + assets.srcDirs += ['kit/src/main/assets'] + java.srcDirs += ['kit/src/main/java'] + // wfc kit end + } + } + productFlavors { + } + + compileOptions { + sourceCompatibility 1.8 + targetCompatibility 1.8 + } +} + +dependencies { + implementation fileTree(include: ['*.jar'], dir: 'libs') + implementation files('libs/TencentLocationSDK_v4.9.7.12_r247861_161205_1104.jar') + implementation files('libs/TencentMapSDK_Raster_v_1.2.7_51ae0e7.jar') + implementation files('libs/TencentSearch1.1.3.jar') + + implementation 'com.tencent.bugly:crashreport:2.8.6.0' + implementation 'com.tencent.bugly:nativecrashreport:3.6.0.1' + implementation 'com.lqr.adapter:library:1.0.2' + implementation 'com.jaeger.statusbaruitl:library:1.3.5' + implementation project(':push') + + // wfc kit start + implementation fileTree(include: ['*.jar'], dir: 'kit/libs') + implementation 'androidx.appcompat:appcompat:1.1.0-beta01' + implementation 'com.google.android.material:material:1.1.0-alpha10' + implementation 'cjt.library.wheel:camera:1.1.9' + implementation 'com.kyleduo.switchbutton:library:1.4.4' + implementation 'com.squareup.okhttp3:okhttp:3.11.0' + implementation 'com.squareup.okio:okio:1.14.0' + implementation 'com.jakewharton:butterknife:10.2.0' + annotationProcessor 'com.jakewharton:butterknife-compiler:10.2.0' + implementation 'com.github.bumptech.glide:glide:4.8.0' + annotationProcessor 'com.github.bumptech.glide:compiler:4.8.0' + implementation 'com.github.chrisbanes:PhotoView:2.3.0' + implementation 'org.webrtc:google-webrtc:1.0.21929' + implementation 'com.afollestad.material-dialogs:core:0.9.6.0' + implementation 'q.rorbin:badgeview:1.1.3' + implementation 'com.google.code.gson:gson:2.8.5' + + // ViewModel and LiveData + def lifecycle_version = '2.2.0-alpha05' + implementation "androidx.lifecycle:lifecycle-extensions:$lifecycle_version" + + implementation project(':client') + implementation project(':avenginekit') + implementation project(':emojilibrary') + implementation project(':imagepicker') + + implementation 'com.king.zxing:zxing-lite:1.1.1' + implementation 'androidx.swiperefreshlayout:swiperefreshlayout:1.0.0' + // kit wfc end +} diff --git a/tests/source-files/cn.wildfirechat.chat/client/build.gradle b/tests/source-files/cn.wildfirechat.chat/client/build.gradle new file mode 100644 index 00000000..ce41d062 --- /dev/null +++ b/tests/source-files/cn.wildfirechat.chat/client/build.gradle @@ -0,0 +1,57 @@ +apply plugin: 'com.android.library' +apply plugin: 'com.github.dcendents.android-maven' + +group = 'com.github.wildfirechat' + +android { + compileSdkVersion 28 + + + defaultConfig { + minSdkVersion 16 + targetSdkVersion 28 + versionCode 1 + versionName "1.0" + + // testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" + ndk { + // TODO: changes this for your application if needed + moduleName = "mmnet" + //abiFilter "armeabi" //去掉armeabi架构,armeabi-v7a可以兼容armeabi架构。 + abiFilter "armeabi-v7a" + abiFilter "arm64-v8a" + abiFilter "x86" + abiFilter "x86_64" + } + } + + + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + } + } + + + sourceSets { + main { + jniLibs.srcDirs = ['libs'] + } + } + + lintOptions { + abortOnError false + } + compileOptions { + targetCompatibility 1.8 + sourceCompatibility 1.8 + } +} + + +dependencies { + api project(':mars-core-release') + def lifecycle_version = '2.0.0-beta01' + implementation "androidx.lifecycle:lifecycle-extensions:$lifecycle_version" +} diff --git a/tests/source-files/cn.wildfirechat.chat/client/src/main/AndroidManifest.xml b/tests/source-files/cn.wildfirechat.chat/client/src/main/AndroidManifest.xml new file mode 100644 index 00000000..0c056938 --- /dev/null +++ b/tests/source-files/cn.wildfirechat.chat/client/src/main/AndroidManifest.xml @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/source-files/cn.wildfirechat.chat/emojilibrary/build.gradle b/tests/source-files/cn.wildfirechat.chat/emojilibrary/build.gradle new file mode 100755 index 00000000..50ea5f5a --- /dev/null +++ b/tests/source-files/cn.wildfirechat.chat/emojilibrary/build.gradle @@ -0,0 +1,34 @@ +apply plugin: 'com.android.library' + +android { + compileSdkVersion 28 + + + defaultConfig { + minSdkVersion 16 + targetSdkVersion 28 + versionCode 1 + versionName "1.0" + + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + + } + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + } + } + + android { + lintOptions { + abortOnError false + } + } + +} + +dependencies { + implementation fileTree(include: ['*.jar'], dir: 'libs') + implementation 'androidx.appcompat:appcompat:1.0.0-beta01' +} \ No newline at end of file diff --git a/tests/source-files/cn.wildfirechat.chat/gradle/build_libraries.gradle b/tests/source-files/cn.wildfirechat.chat/gradle/build_libraries.gradle new file mode 100644 index 00000000..42020666 --- /dev/null +++ b/tests/source-files/cn.wildfirechat.chat/gradle/build_libraries.gradle @@ -0,0 +1,42 @@ +def checkExecResult(execResult) { + if (execResult) { + if (execResult.getExitValue() != 0) { + throw new GradleException('Non-zero exit value: ' + execResult.getExitValue()) + } + + } else { + throw new GradleException('Returned a null execResult object') + } +} + +task buildLibrariesForAndroid(type: Exec) { + workingDir '../' + + def sdkDir = System.env.ANDROID_HOME + def ndkDir = System.env.ANDROID_NDK_HOME + + if (rootProject.file("local.properties").exists()) { + Properties properties = new Properties() + properties.load(project.rootProject.file('local.properties').newDataInputStream()) + sdkDir = properties.getProperty('sdk.dir') + ndkDir = properties.getProperty('ndk.dir') + } + + def path = System.env.PATH + + def envMap = [ + 'ANDROID_HOME' : sdkDir, + 'ANDROID_NDK_HOME': ndkDir, + '_ARCH_' : 'armeabi', + 'PATH' : ndkDir, + ] + environment envMap + + print envMap + + commandLine 'python', 'build_android.py', '2', 'armeabi' + + doLast { + checkExecResult(execResult) + } +} diff --git a/tests/source-files/cn.wildfirechat.chat/imagepicker/build.gradle b/tests/source-files/cn.wildfirechat.chat/imagepicker/build.gradle new file mode 100755 index 00000000..66b153a1 --- /dev/null +++ b/tests/source-files/cn.wildfirechat.chat/imagepicker/build.gradle @@ -0,0 +1,30 @@ +apply plugin: 'com.android.library' + +android { + compileSdkVersion 28 + + defaultConfig { + minSdkVersion 16 + targetSdkVersion 28 + versionCode 1 + versionName "1.0" + + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + + } + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + } + } + lintOptions { + abortOnError false + } +} + +dependencies { + implementation 'androidx.appcompat:appcompat:1.0.0-beta01' + implementation 'com.github.chrisbanes.photoview:library:1.2.4' + implementation 'com.github.bumptech.glide:glide:4.8.0' +} diff --git a/tests/source-files/cn.wildfirechat.chat/mars-core-release/build.gradle b/tests/source-files/cn.wildfirechat.chat/mars-core-release/build.gradle new file mode 100644 index 00000000..65e8c7fe --- /dev/null +++ b/tests/source-files/cn.wildfirechat.chat/mars-core-release/build.gradle @@ -0,0 +1,2 @@ +configurations.maybeCreate("default") +artifacts.add("default", file('mars-core-release.aar')) \ No newline at end of file diff --git a/tests/source-files/cn.wildfirechat.chat/push/build.gradle b/tests/source-files/cn.wildfirechat.chat/push/build.gradle new file mode 100644 index 00000000..26f5cbe2 --- /dev/null +++ b/tests/source-files/cn.wildfirechat.chat/push/build.gradle @@ -0,0 +1,55 @@ +apply plugin: 'com.android.library' + +android { + compileSdkVersion 28 + + + defaultConfig { + minSdkVersion 16 + targetSdkVersion 28 + versionCode 1 + versionName "1.0" + + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + + manifestPlaceholders = [ + + MI_APP_ID : "2882303761517722456", + MI_APP_KEY : "5731772292456", + + HMS_APP_ID : "100221325", + + MEIZU_APP_ID : "113616", + MEIZU_APP_KEY: "fcd886f51c144b45b87a67a28e2934d1", + + VIVO_APP_ID : "12918", + VIVO_APP_KEY : "c42feb05-de6c-427d-af55-4f902d9e0a75" + ] + } + + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + } + } + compileOptions { + sourceCompatibility 1.8 + targetCompatibility 1.8 + } + +} + +dependencies { + implementation fileTree(dir: 'libs', include: ['*.jar']) + + implementation 'com.huawei.android.hms:push:2.5.3.305' + implementation 'com.huawei.android.hms:base:2.5.3.305' + + implementation 'androidx.appcompat:appcompat:1.0.0-beta01' + implementation project(':client') + implementation 'com.meizu.flyme.internet:push-internal:3.4.2@aar' + + def lifecycle_version = '2.2.0-alpha05' + implementation "androidx.lifecycle:lifecycle-extensions:$lifecycle_version" +} diff --git a/tests/source-files/cn.wildfirechat.chat/settings.gradle b/tests/source-files/cn.wildfirechat.chat/settings.gradle new file mode 100644 index 00000000..e98e916c --- /dev/null +++ b/tests/source-files/cn.wildfirechat.chat/settings.gradle @@ -0,0 +1,7 @@ +include ':client', + ':push', + ':chat', + ':mars-core-release', + ':emojilibrary', + ':imagepicker', + ':avenginekit' diff --git a/tests/source-files/com.anpmech.launcher/app/build.gradle b/tests/source-files/com.anpmech.launcher/app/build.gradle new file mode 100644 index 00000000..97d2e4fc --- /dev/null +++ b/tests/source-files/com.anpmech.launcher/app/build.gradle @@ -0,0 +1,76 @@ +/* + * Copyright 2015-2017 Hayai Software + * Copyright 2018 The KeikaiLauncher Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the + * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND + * either express or implied. See the License for the specific language governing permissions and + * limitations under the License. + */ +plugins { + id 'com.android.application' + id 'pl.allegro.tech.build.axion-release' version '1.8.1' +} + +scmVersion { + tag { + prefix = '' + } +} + +/** + * Takes version {@code major.minor.patch[-suffix]} and returns numeric versionCode based on it + * Example: {@code 1.2.3-SNAPSHOT} will return {@code 1002003} + */ +static int versionCode(String versionName) { + def matcher = (versionName =~ /(\d+)\.(\d+)\.(\d+).*/) + return matcher.matches() ? + matcher.collect { version, major, minor, patch -> + major.toInteger() * 10000 + minor.toInteger() * 100 + patch.toInteger() + }.head() : + -1 +} + +def androidVersion = [ + name: scmVersion.version, + code: versionCode(scmVersion.version), +] + +android { + compileSdkVersion 28 + defaultConfig { + applicationId 'com.anpmech.launcher' + minSdkVersion 15 + targetSdkVersion 28 + versionName androidVersion.name + versionCode androidVersion.code + } + lintOptions { + abortOnError false + } + buildTypes { + all { + buildConfigField("String", "GITHUB_USER", "\"KeikaiLauncher\"") + buildConfigField("String", "GITHUB_PROJECT", "\"KeikaiLauncher\"") + } + release { + minifyEnabled true + shrinkResources true + proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.txt' + } + debug { + versionNameSuffix "-debug" + } + } + dependencies { + implementation 'com.android.support:support-annotations:28.0.0' + } +} + +dependencies { +} diff --git a/tests/source-files/com.anpmech.launcher/app/src/main/AndroidManifest.xml b/tests/source-files/com.anpmech.launcher/app/src/main/AndroidManifest.xml new file mode 100644 index 00000000..77c4e22f --- /dev/null +++ b/tests/source-files/com.anpmech.launcher/app/src/main/AndroidManifest.xml @@ -0,0 +1,66 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/source-files/com.anpmech.launcher/build.gradle b/tests/source-files/com.anpmech.launcher/build.gradle new file mode 100644 index 00000000..a92bf663 --- /dev/null +++ b/tests/source-files/com.anpmech.launcher/build.gradle @@ -0,0 +1,45 @@ +/* + * Copyright 2015-2017 Hayai Software + * Copyright 2018 The KeikaiLauncher Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the + * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND + * either express or implied. See the License for the specific language governing permissions and + * limitations under the License. + */ + +// Top-level build file where you can add configuration options common to all sub-projects/modules. +buildscript { + repositories { + jcenter() + google() + } + dependencies { + classpath 'com.android.tools.build:gradle:3.2.1' + } +} + +allprojects { + repositories { + jcenter() + google() + } +} + +buildscript { + repositories { + mavenCentral() + } + dependencies { + classpath 'org.owasp:dependency-check-gradle:5.2.4' + } +} +apply plugin: 'org.owasp.dependencycheck' +dependencyCheck { + format='JSON' +} diff --git a/tests/source-files/com.anpmech.launcher/settings.gradle b/tests/source-files/com.anpmech.launcher/settings.gradle new file mode 100644 index 00000000..4d775aa7 --- /dev/null +++ b/tests/source-files/com.anpmech.launcher/settings.gradle @@ -0,0 +1,16 @@ +/* + * Copyright 2015-2017 Hayai Software + * Copyright 2018 The KeikaiLauncher Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the + * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND + * either express or implied. See the License for the specific language governing permissions and + * limitations under the License. + */ + +include ':app' diff --git a/tests/source-files/com.github.jameshnsears.quoteunquote/build.gradle b/tests/source-files/com.github.jameshnsears.quoteunquote/build.gradle new file mode 100644 index 00000000..bb55589e --- /dev/null +++ b/tests/source-files/com.github.jameshnsears.quoteunquote/build.gradle @@ -0,0 +1,232 @@ +apply plugin: 'com.android.application' +apply plugin: 'com.google.gms.google-services' +apply plugin: 'kotlin-android' + +apply from: '../jacoco.gradle' +apply from: '../ktlint.gradle' +apply from: '../detekt.gradle' +apply from: '../checkstyle.gradle' +apply from: '../sonarcube.gradle' + +def localPropertiesFile = rootProject.file("local.properties") +def localProperties = new Properties() + +if (!localPropertiesFile.exists()) { + localProperties.setProperty("RELEASE_STORE_PASSWORD", "") + localProperties.setProperty("RELEASE_KEY_PASSWORD", "") + localProperties.setProperty("RELEASE_KEY_ALIAS", "") + localProperties.setProperty("RELEASE_STORE_FILE", "keystore.jks") + Writer writer = new FileWriter(localPropertiesFile, false) + localProperties.store(writer, "empty, as creating the file is done manually via gpg") + writer.close() + + file(project(':app').projectDir.path + "/keystore.jks").text = "" +} + +localProperties.load(new FileInputStream(localPropertiesFile)) + +android { + compileSdkVersion 30 + // compileSdkVersion "android-S" + + signingConfigs { + googleplay { + keyAlias localProperties['RELEASE_KEY_ALIAS'] + keyPassword localProperties['RELEASE_KEY_PASSWORD'] + storeFile file(localProperties['RELEASE_STORE_FILE']) + storePassword localProperties['RELEASE_STORE_PASSWORD'] + } + } + + defaultConfig { + minSdkVersion 24 + targetSdkVersion 30 + // minSdkVersion "S" + // targetSdkVersion "S" + + applicationId "com.github.jameshnsears.quoteunquote" + + versionCode 73 + versionName "2.5.2" + + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + testInstrumentationRunnerArguments clearPackageData: 'true' + + javaCompileOptions { + annotationProcessorOptions { + arguments += ["room.schemaLocation": + "$projectDir/schemas".toString()] + } + } + } + + packagingOptions { + exclude "**/module-info.class" + exclude 'LICENSE' + exclude 'README.md' + } + + lintOptions { + abortOnError true + warningsAsErrors false + checkAllWarnings = true + xmlReport false + htmlReport true + } + + buildTypes { + def gitHash = { -> + def stdout = new ByteArrayOutputStream() + exec { + commandLine 'git', 'rev-parse', '--short=8', 'HEAD' + standardOutput = stdout + } + return stdout.toString().trim() + } + + release { + minifyEnabled true + shrinkResources true + proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' + + buildConfigField("String", "GIT_HASH", "\"$gitHash\"") + buildConfigField("String", "DATABASE_QUOTATIONS", "\"quotations.db.prod\"") + } + debug { + testCoverageEnabled true + buildConfigField("String", "GIT_HASH", "\"$gitHash\"") + buildConfigField("String", "DATABASE_QUOTATIONS", "\"quotations.db.dev\"") + } + } + + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_8 + targetCompatibility JavaVersion.VERSION_1_8 + } + + kotlinOptions { + jvmTarget = JavaVersion.VERSION_1_8.toString() + } + + flavorDimensions 'Version' + productFlavors { + 'googleplay' { + dimension 'Version' + versionNameSuffix "-googleplay" + signingConfig signingConfigs.googleplay + } + 'googleplayS' { + dimension 'Version' + versionNameSuffix "-googleplay-S" + signingConfig signingConfigs.googleplay + } + 'fdroid' { + dimension 'Version' + versionNameSuffix "-fdroid" + isDefault true + } + 'fdroidS' { + dimension 'Version' + versionNameSuffix "-fdroid-S" + } + } + + sourceSets { + androidTest { + assets.srcDirs += files("$projectDir/schemas".toString()) + } + fdroid { + assets.srcDirs = ['src/main/assets'] + java.srcDirs = ['src/main/java', 'src/fdroid/java'] + } + fdroidS { + assets.srcDirs = ['src/main/assets'] + java.srcDirs = ['src/main/java', 'src/fdroid/java'] + } + googleplay { + assets.srcDirs = ['src/main/assets'] + java.srcDirs = ['src/main/java'] + } + googleplayS { + assets.srcDirs = ['src/main/assets'] + java.srcDirs = ['src/main/java'] + } + } + + testOptions { + // will make tests run very slowly on the emulator/device + affects coverage # + // execution 'ANDROIDX_TEST_ORCHESTRATOR' + + animationsDisabled true + unitTests { + includeAndroidResources = true + returnDefaultValues = true + all { + maxHeapSize = "1024m" + jacoco { + includeNoLocationClasses = true + excludes = ['jdk.internal.*'] + } + } + } + } + + buildFeatures { + viewBinding = true + } +} + +dependencies { + androidTestImplementation "androidx.arch.core:core-testing:2.1.0" + androidTestImplementation 'androidx.room:room-testing:2.3.0' + androidTestImplementation 'androidx.test:core:1.4.0-beta01' + androidTestImplementation 'androidx.test.ext:junit:1.1.2' + androidTestImplementation 'androidx.test:rules:1.3.0' + androidTestImplementation 'androidx.test:runner:1.3.0' + androidTestImplementation 'io.mockk:mockk-android:1.11.0' + + annotationProcessor 'androidx.room:room-compiler:2.3.0' + + debugImplementation 'androidx.fragment:fragment-testing:1.3.4' + debugImplementation 'androidx.test:core:1.4.0-beta01' + debugImplementation 'com.squareup.leakcanary:leakcanary-android:2.7' + + implementation 'androidx.activity:activity:1.2.3' + implementation 'androidx.fragment:fragment:1.3.4' + implementation 'androidx.constraintlayout:constraintlayout:2.0.4' + implementation 'androidx.core:core-ktx:1.5.0' + fdroidSImplementation 'androidx.core:core-ktx:1.6.0-beta02' + googleplaySImplementation 'androidx.core:core-ktx:1.6.0-beta02' + implementation 'androidx.legacy:legacy-support-v4:1.0.0' + implementation 'androidx.lifecycle:lifecycle-common-java8:2.3.1' + implementation 'androidx.lifecycle:lifecycle-extensions:2.2.0' + implementation 'androidx.lifecycle:lifecycle-viewmodel-ktx:2.3.1' + implementation 'androidx.multidex:multidex:2.0.1' + implementation 'androidx.room:room-guava:2.3.0' + implementation 'androidx.room:room-runtime:2.3.0' + implementation 'androidx.room:room-rxjava2:2.3.0' + implementation 'com.google.android.material:material:1.3.0' + implementation 'com.jakewharton.rxbinding2:rxbinding:2.2.0' + implementation 'com.jakewharton.timber:timber:4.7.1' + implementation fileTree(include: ['*.jar'], dir: 'libs') + implementation 'io.reactivex.rxjava2:rxandroid:2.1.1' + implementation 'io.reactivex.rxjava2:rxjava:2.2.21' + implementation 'org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.5.10' + + implementation project(path: ':cloudLib') + implementation project(path: ':utilsLib') + + testImplementation 'androidx.arch.core:core-testing:2.1.0' + testImplementation 'androidx.room:room-testing:2.3.0' + testImplementation 'androidx.test:core-ktx:1.3.0' + testImplementation 'androidx.test.ext:junit:1.1.2' + testImplementation 'androidx.test:rules:1.3.0' + testImplementation 'com.google.guava:guava:30.1.1-jre' + testImplementation 'io.mockk:mockk:1.11.0' + testImplementation 'junit:junit:4.13.2' + testImplementation 'org.robolectric:robolectric:4.5.1' +} + +repositories { + mavenCentral() +} diff --git a/tests/source-files/com.github.shadowsocks/core/build.gradle.kts b/tests/source-files/com.github.shadowsocks/core/build.gradle.kts new file mode 100644 index 00000000..b9708b02 --- /dev/null +++ b/tests/source-files/com.github.shadowsocks/core/build.gradle.kts @@ -0,0 +1,94 @@ +import com.android.build.gradle.internal.tasks.factory.dependsOn + +plugins { + id("com.android.library") + id("org.mozilla.rust-android-gradle.rust-android") + kotlin("android") + kotlin("kapt") + id("kotlin-parcelize") +} + +setupCore() + +android { + defaultConfig { + consumerProguardFiles("proguard-rules.pro") + + externalNativeBuild.ndkBuild { + abiFilters("armeabi-v7a", "arm64-v8a", "x86", "x86_64") + arguments("-j${Runtime.getRuntime().availableProcessors()}") + } + + kapt.arguments { + arg("room.incremental", true) + arg("room.schemaLocation", "$projectDir/schemas") + } + } + + externalNativeBuild.ndkBuild.path("src/main/jni/Android.mk") + + sourceSets.getByName("androidTest") { + assets.setSrcDirs(assets.srcDirs + files("$projectDir/schemas")) + } +} + +cargo { + module = "src/main/rust/shadowsocks-rust" + libname = "sslocal" + targets = listOf("arm", "arm64", "x86", "x86_64") + profile = findProperty("CARGO_PROFILE")?.toString() ?: currentFlavor + extraCargoBuildArguments = listOf("--bin", libname!!) + featureSpec.noDefaultBut(arrayOf( + "stream-cipher", + "aead-cipher-extra", + "logging", + "local-flow-stat", + "local-dns")) + exec = { spec, toolchain -> + spec.environment("RUST_ANDROID_GRADLE_LINKER_WRAPPER_PY", "$projectDir/$module/../linker-wrapper.py") + spec.environment("RUST_ANDROID_GRADLE_TARGET", "target/${toolchain.target}/$profile/lib$libname.so") + } +} + +tasks.whenTaskAdded { + when (name) { + "mergeDebugJniLibFolders", "mergeReleaseJniLibFolders" -> dependsOn("cargoBuild") + } +} + +tasks.register("cargoClean") { + executable("cargo") // cargo.cargoCommand + args("clean") + workingDir("$projectDir/${cargo.module}") +} +tasks.clean.dependsOn("cargoClean") + +dependencies { + val coroutinesVersion = "1.5.2" + val roomVersion = "2.3.0" + val workVersion = "2.7.0-beta01" + + api(project(":plugin")) + api("androidx.core:core-ktx:1.6.0") + // https://android-developers.googleblog.com/2019/07/android-q-beta-5-update.html + api("androidx.drawerlayout:drawerlayout:1.1.1") + api("androidx.fragment:fragment-ktx:1.3.6") + api("com.google.android.material:material:1.4.0") + + api("androidx.lifecycle:lifecycle-livedata-core-ktx:$lifecycleVersion") + api("androidx.preference:preference:1.1.1") + api("androidx.room:room-runtime:$roomVersion") + api("androidx.work:work-multiprocess:$workVersion") + api("androidx.work:work-runtime-ktx:$workVersion") + api("com.google.android.gms:play-services-oss-licenses:17.0.0") + api("com.google.code.gson:gson:2.8.8") + api("com.google.firebase:firebase-analytics-ktx:19.0.1") + api("com.google.firebase:firebase-crashlytics:18.2.1") + api("com.jakewharton.timber:timber:5.0.1") + api("dnsjava:dnsjava:3.4.1") + api("org.jetbrains.kotlinx:kotlinx-coroutines-android:$coroutinesVersion") + api("org.jetbrains.kotlinx:kotlinx-coroutines-play-services:$coroutinesVersion") + kapt("androidx.room:room-compiler:$roomVersion") + androidTestImplementation("androidx.room:room-testing:$roomVersion") + androidTestImplementation("androidx.test.ext:junit-ktx:1.1.3") +} diff --git a/tests/source-files/com.github.shadowsocks/mobile/build.gradle.kts b/tests/source-files/com.github.shadowsocks/mobile/build.gradle.kts new file mode 100644 index 00000000..874ea857 --- /dev/null +++ b/tests/source-files/com.github.shadowsocks/mobile/build.gradle.kts @@ -0,0 +1,28 @@ +plugins { + id("com.android.application") + id("com.google.android.gms.oss-licenses-plugin") + id("com.google.gms.google-services") + id("com.google.firebase.crashlytics") + kotlin("android") + id("kotlin-parcelize") +} + +setupApp() + +android.defaultConfig.applicationId = "com.github.shadowsocks" + +dependencies { + val cameraxVersion = "1.0.1" + + implementation("androidx.browser:browser:1.3.0") + implementation("androidx.camera:camera-camera2:$cameraxVersion") + implementation("androidx.camera:camera-lifecycle:$cameraxVersion") + implementation("androidx.camera:camera-view:1.0.0-alpha28") + implementation("androidx.constraintlayout:constraintlayout:2.1.0") + implementation("androidx.lifecycle:lifecycle-runtime-ktx:$lifecycleVersion") + implementation("com.google.mlkit:barcode-scanning:17.0.0") + implementation("com.google.zxing:core:3.4.1") + implementation("com.takisoft.preferencex:preferencex-simplemenu:1.1.0") + implementation("com.twofortyfouram:android-plugin-api-for-locale:1.0.4") + implementation("me.zhanghai.android.fastscroll:library:1.1.7") +} diff --git a/tests/source-files/com.infomaniak.mail/Core/gradle/core.versions.toml b/tests/source-files/com.infomaniak.mail/Core/gradle/core.versions.toml new file mode 100644 index 00000000..e69de29b diff --git a/tests/source-files/com.infomaniak.mail/gradle/libs.versions.toml b/tests/source-files/com.infomaniak.mail/gradle/libs.versions.toml new file mode 100644 index 00000000..e69de29b diff --git a/tests/source-files/com.infomaniak.mail/settings.gradle b/tests/source-files/com.infomaniak.mail/settings.gradle new file mode 100644 index 00000000..bb9b1161 --- /dev/null +++ b/tests/source-files/com.infomaniak.mail/settings.gradle @@ -0,0 +1,44 @@ +pluginManagement { + repositories { + gradlePluginPortal() + google() + mavenCentral() + } +} + +dependencyResolutionManagement { + repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS) + repositories { + google() + mavenCentral() + maven { url 'https://jitpack.io' } + } + versionCatalogs { + create("core") { from(files("Core/gradle/core.versions.toml")) } + } +} + +rootProject.name = 'Infomaniak Mail' +include ':app', + ':Core:AppIntegrity', + ':Core:Auth', + ':Core:Avatar', + ':Core:Coil', + ':Core:Compose:Basics', + ':Core:Compose:Margin', + ':Core:Compose:MaterialThemeFromXml', + ':Core:CrossAppLogin', + ':Core:CrossAppLoginUI', + ':Core:FragmentNavigation', + ':Core:Legacy', + ':Core:Legacy:AppLock', + ':Core:Legacy:BugTracker', + ':Core:Legacy:Confetti', + ':Core:Legacy:Stores', + ':Core:Matomo', + ':Core:MyKSuite', + ':Core:Network', + ':Core:Network:Models', + ':Core:Sentry', + ':EmojiComponents', + ':HtmlCleaner' diff --git a/tests/source-files/com.integreight.onesheeld/build.gradle b/tests/source-files/com.integreight.onesheeld/build.gradle new file mode 100644 index 00000000..4aa9de97 --- /dev/null +++ b/tests/source-files/com.integreight.onesheeld/build.gradle @@ -0,0 +1,16 @@ +// Top-level build file where you can add configuration options common to all sub-projects/modules. +buildscript { + repositories { + jcenter() + } + dependencies { + classpath 'com.android.tools.build:gradle:2.3.2' + classpath 'com.google.gms:google-services:3.0.0' + } +} + +allprojects { + repositories { + jcenter() + } +} diff --git a/tests/source-files/com.integreight.onesheeld/gradle/wrapper/gradle-wrapper.properties b/tests/source-files/com.integreight.onesheeld/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 00000000..c88a02a7 --- /dev/null +++ b/tests/source-files/com.integreight.onesheeld/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +#Wed Mar 15 14:07:53 EET 2017 +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-3.3-all.zip diff --git a/tests/source-files/com.integreight.onesheeld/localeapi/build.gradle b/tests/source-files/com.integreight.onesheeld/localeapi/build.gradle new file mode 100644 index 00000000..83e327f8 --- /dev/null +++ b/tests/source-files/com.integreight.onesheeld/localeapi/build.gradle @@ -0,0 +1,22 @@ +apply plugin: 'com.android.library' + +android { + compileSdkVersion 25 + buildToolsVersion "25.0.3" + + defaultConfig { + minSdkVersion 9 + targetSdkVersion 17 + } + + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt' + } + } +} + +dependencies { + compile 'com.android.support:support-v4:25.1.0' +} diff --git a/tests/source-files/com.integreight.onesheeld/localeapi/src/main/AndroidManifest.xml b/tests/source-files/com.integreight.onesheeld/localeapi/src/main/AndroidManifest.xml new file mode 100644 index 00000000..3178f4f4 --- /dev/null +++ b/tests/source-files/com.integreight.onesheeld/localeapi/src/main/AndroidManifest.xml @@ -0,0 +1,9 @@ + + + + + + \ No newline at end of file diff --git a/tests/source-files/com.integreight.onesheeld/oneSheeld/build.gradle b/tests/source-files/com.integreight.onesheeld/oneSheeld/build.gradle new file mode 100644 index 00000000..60559586 --- /dev/null +++ b/tests/source-files/com.integreight.onesheeld/oneSheeld/build.gradle @@ -0,0 +1,129 @@ +buildscript { + repositories { + jcenter() + maven { url 'https://maven.fabric.io/public' } + } + + dependencies { + classpath 'io.fabric.tools:gradle:1.+' + } +} +apply plugin: 'com.android.application' +apply plugin: 'io.fabric' + +repositories { + jcenter() + maven { url 'https://maven.fabric.io/public' } + maven { url "https://jitpack.io" } +} + + +android { + compileSdkVersion 25 + buildToolsVersion "25.0.3" + + defaultConfig { + applicationId "com.integreight.onesheeld" + minSdkVersion 9 + targetSdkVersion 25 + versionCode 170521 + versionName "1.9.0" + archivesBaseName = "1Sheeld.v$versionName.$versionCode" + buildConfigField "long", "TIMESTAMP", System.currentTimeMillis() + "L" + } + + buildTypes.all { + ext.enableCrashlytics = isCrashlyticsPropertiesAvailable() + } + + buildTypes { + debug { + versionNameSuffix getWorkingBranchSuffix() + minifyEnabled true + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + } + release { + minifyEnabled true + debuggable false + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + } + } + + packagingOptions { + exclude 'META-INF/LICENSE' + exclude 'META-INF/NOTICE' + exclude 'META-INF/LICENSE.txt' + exclude 'META-INF/NOTICE.txt' + } + + lintOptions { + abortOnError false + } + + useLibrary 'org.apache.http.legacy' +} + +dependencies { + compile project(':localeapi') + compile project(':pullToRefreshlibrary') + compile project(':quickReturnHeader') + compile project(':pagerIndicator') + compile fileTree(dir: 'libs', include: ['*.jar']) + compile 'com.android.support:support-v4:25.3.1' + compile 'com.facebook.android:facebook-android-sdk:4.5.0' + compile 'com.google.android.gms:play-services-analytics:10.0.1' + compile 'com.google.android.gms:play-services-location:10.0.1' + compile 'com.google.android.gms:play-services-auth:10.0.1' + compile 'com.google.android.gms:play-services-vision:10.0.1' + compile 'com.loopj.android:android-async-http:1.4.9' + compile 'com.snappydb:snappydb-lib:0.5.0' + compile 'com.esotericsoftware.kryo:kryo:2.24.0' + compile 'com.github.hotchemi:android-rate:0.5.0' + compile('com.crashlytics.sdk.android:crashlytics:2.6.8@aar') { + transitive = true; + } + + compile('com.google.android.gms:play-services-identity:10.0.1') { + transitive = true; + } + compile('com.google.api-client:google-api-client-android:1.22.0') { + exclude group: 'org.apache.httpcomponents' + } + compile('com.google.apis:google-api-services-gmail:v1-rev48-1.22.0') { + exclude group: 'org.apache.httpcomponents' + } + compile 'org.twitter4j:twitter4j-core:4.0.4' + compile 'org.twitter4j:twitter4j-async:4.0.4' + compile 'org.twitter4j:twitter4j-stream:4.0.4' + compile 'cz.msebera.android:httpclient:4.4.1.1' + compile 'net.sf.supercsv:super-csv:2.4.0' + compile 'com.github.amlcurran.showcaseview:library:5.4.3' + compile 'com.github.emanzanoaxa:RippleEffect:52ea2a0ab6' + compile 'com.drewnoakes:metadata-extractor:2.8.1' + compile 'com.integreight.onesheeld:sdk:2.2.0' + compile 'com.google.firebase:firebase-core:10.0.1' + compile 'com.google.firebase:firebase-messaging:10.0.1' +} + +def isCrashlyticsPropertiesAvailable() { + return new File("./oneSheeld/fabric.properties").exists() +} + +def getWorkingBranchSuffix() { + def workingBranchSuffix = "" + try { + def workingBranch = "git --git-dir=${rootDir}/.git --work-tree=${rootDir} rev-parse --abbrev-ref HEAD".execute().text.trim() + workingBranchSuffix = (workingBranch != "") ? " - branch:" + workingBranch : "" + } + catch (all) { + } + return workingBranchSuffix +} + +def isGoogleServicesFileAvailable() { + return new File("./oneSheeld/google-services.json").exists() +} + +if (isGoogleServicesFileAvailable()) { + apply plugin: 'com.google.gms.google-services' +} \ No newline at end of file diff --git a/tests/source-files/com.integreight.onesheeld/oneSheeld/src/main/AndroidManifest.xml b/tests/source-files/com.integreight.onesheeld/oneSheeld/src/main/AndroidManifest.xml new file mode 100644 index 00000000..05350254 --- /dev/null +++ b/tests/source-files/com.integreight.onesheeld/oneSheeld/src/main/AndroidManifest.xml @@ -0,0 +1,280 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/tests/source-files/com.integreight.onesheeld/pagerIndicator/build.gradle b/tests/source-files/com.integreight.onesheeld/pagerIndicator/build.gradle new file mode 100644 index 00000000..254932bd --- /dev/null +++ b/tests/source-files/com.integreight.onesheeld/pagerIndicator/build.gradle @@ -0,0 +1,22 @@ +apply plugin: 'com.android.library' + +android { + compileSdkVersion 25 + buildToolsVersion "25.0.3" + + defaultConfig { + minSdkVersion 9 + targetSdkVersion 9 + } + + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt' + } + } +} + +dependencies { + compile 'com.android.support:support-v4:25.1.0' +} diff --git a/tests/source-files/com.integreight.onesheeld/pagerIndicator/src/main/AndroidManifest.xml b/tests/source-files/com.integreight.onesheeld/pagerIndicator/src/main/AndroidManifest.xml new file mode 100644 index 00000000..4314f1b3 --- /dev/null +++ b/tests/source-files/com.integreight.onesheeld/pagerIndicator/src/main/AndroidManifest.xml @@ -0,0 +1,9 @@ + + + + + + diff --git a/tests/source-files/com.integreight.onesheeld/pullToRefreshlibrary/build.gradle b/tests/source-files/com.integreight.onesheeld/pullToRefreshlibrary/build.gradle new file mode 100644 index 00000000..7db12afa --- /dev/null +++ b/tests/source-files/com.integreight.onesheeld/pullToRefreshlibrary/build.gradle @@ -0,0 +1,20 @@ +apply plugin: 'com.android.library' +android { + compileSdkVersion 25 + buildToolsVersion "25.0.3" + + defaultConfig { + minSdkVersion 9 + targetSdkVersion 9 + } + + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt' + } + } +} + +dependencies { +} \ No newline at end of file diff --git a/tests/source-files/com.integreight.onesheeld/pullToRefreshlibrary/src/main/AndroidManifest.xml b/tests/source-files/com.integreight.onesheeld/pullToRefreshlibrary/src/main/AndroidManifest.xml new file mode 100644 index 00000000..c3db5673 --- /dev/null +++ b/tests/source-files/com.integreight.onesheeld/pullToRefreshlibrary/src/main/AndroidManifest.xml @@ -0,0 +1,11 @@ + + + + + + + + \ No newline at end of file diff --git a/tests/source-files/com.integreight.onesheeld/quickReturnHeader/build.gradle b/tests/source-files/com.integreight.onesheeld/quickReturnHeader/build.gradle new file mode 100644 index 00000000..83e327f8 --- /dev/null +++ b/tests/source-files/com.integreight.onesheeld/quickReturnHeader/build.gradle @@ -0,0 +1,22 @@ +apply plugin: 'com.android.library' + +android { + compileSdkVersion 25 + buildToolsVersion "25.0.3" + + defaultConfig { + minSdkVersion 9 + targetSdkVersion 17 + } + + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt' + } + } +} + +dependencies { + compile 'com.android.support:support-v4:25.1.0' +} diff --git a/tests/source-files/com.integreight.onesheeld/quickReturnHeader/src/main/AndroidManifest.xml b/tests/source-files/com.integreight.onesheeld/quickReturnHeader/src/main/AndroidManifest.xml new file mode 100644 index 00000000..235aef0b --- /dev/null +++ b/tests/source-files/com.integreight.onesheeld/quickReturnHeader/src/main/AndroidManifest.xml @@ -0,0 +1,11 @@ + + + + + + \ No newline at end of file diff --git a/tests/source-files/com.integreight.onesheeld/settings.gradle b/tests/source-files/com.integreight.onesheeld/settings.gradle new file mode 100644 index 00000000..fe8d1fea --- /dev/null +++ b/tests/source-files/com.integreight.onesheeld/settings.gradle @@ -0,0 +1,5 @@ +include ':pagerIndicator' +include ':pullToRefreshlibrary' +include ':quickReturnHeader' +include ':localeapi' +include ':oneSheeld' diff --git a/tests/source-files/com.jens.automation2/app/build.gradle b/tests/source-files/com.jens.automation2/app/build.gradle new file mode 100644 index 00000000..c2ffeda9 --- /dev/null +++ b/tests/source-files/com.jens.automation2/app/build.gradle @@ -0,0 +1,78 @@ +plugins { + id 'com.android.application' +} + +android { + compileSdkVersion 29 + + defaultConfig { + applicationId "com.jens.automation2" + minSdkVersion 16 + compileSdkVersion 29 + buildToolsVersion '29.0.2' + useLibrary 'org.apache.http.legacy' + versionCode 96 + versionName "1.6.21" + + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + } + + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' + } + } + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_8 + targetCompatibility JavaVersion.VERSION_1_8 + } + + lintOptions { + checkReleaseBuilds false + abortOnError false + } + + flavorDimensions "version" + + productFlavors + { + googlePlayFlavor + { + dimension "version" +// applicationIdSuffix ".googlePlay" + versionNameSuffix "-googlePlay" + targetSdkVersion 29 + } + + fdroidFlavor + { + dimension "version" +// applicationIdSuffix ".fdroid" + versionNameSuffix "-fdroid" + targetSdkVersion 28 + } + + apkFlavor + { + dimension "version" +// applicationIdSuffix ".apk" + versionNameSuffix "-apk" + targetSdkVersion 28 + } + } +} + +dependencies { + googlePlayFlavorImplementation 'com.google.firebase:firebase-appindexing:19.2.0' + googlePlayFlavorImplementation 'com.google.android.gms:play-services-location:17.1.0' + + apkFlavorImplementation 'com.google.firebase:firebase-appindexing:19.2.0' + apkFlavorImplementation 'com.google.android.gms:play-services-location:17.1.0' + + implementation 'androidx.appcompat:appcompat:1.2.0' + implementation 'com.google.android.material:material:1.3.0' + testImplementation 'junit:junit:4.+' + androidTestImplementation 'androidx.test.ext:junit:1.1.2' + androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0' +} \ No newline at end of file diff --git a/tests/source-files/com.jens.automation2/build.gradle b/tests/source-files/com.jens.automation2/build.gradle new file mode 100644 index 00000000..1ed6b0cf --- /dev/null +++ b/tests/source-files/com.jens.automation2/build.gradle @@ -0,0 +1,77 @@ +plugins { + id 'com.android.application' +} + +android { + compileSdkVersion 29 + + defaultConfig { + applicationId "com.jens.automation2" + minSdkVersion 16 + compileSdkVersion 29 + buildToolsVersion '29.0.2' + useLibrary 'org.apache.http.legacy' + versionCode 105 + versionName "1.6.34" + + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + } + + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' + } + } + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_8 + targetCompatibility JavaVersion.VERSION_1_8 + } + + lintOptions { + checkReleaseBuilds false + abortOnError false + } + + flavorDimensions "version" + + productFlavors + { + googlePlayFlavor + { + dimension "version" +// applicationIdSuffix ".googlePlay" + versionNameSuffix "-googlePlay" + targetSdkVersion 29 + } + + fdroidFlavor + { + dimension "version" +// applicationIdSuffix ".fdroid" + versionNameSuffix "-fdroid" + targetSdkVersion 28 + } + + apkFlavor + { + dimension "version" +// applicationIdSuffix ".apk" + versionNameSuffix "-apk" + targetSdkVersion 28 + } + } +} + +dependencies { + + + + implementation 'com.linkedin.dexmaker:dexmaker:2.25.0' + + implementation 'androidx.appcompat:appcompat:1.2.0' + implementation 'com.google.android.material:material:1.3.0' + testImplementation 'junit:junit:4.+' + androidTestImplementation 'androidx.test.ext:junit:1.1.2' + androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0' +} diff --git a/tests/source-files/com.kunzisoft.testcase/build.gradle b/tests/source-files/com.kunzisoft.testcase/build.gradle new file mode 100644 index 00000000..13cc4bc2 --- /dev/null +++ b/tests/source-files/com.kunzisoft.testcase/build.gradle @@ -0,0 +1,81 @@ +apply plugin: 'com.android.application' + +android { + compileSdkVersion 27 + defaultConfig { + applicationId "com.kunzisoft.fdroidtest.applicationidsuffix" + minSdkVersion 14 + targetSdkVersion 27 + versionCode 1 + versionName "1.0" + testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" + } + buildTypes { + release { + minifyEnabled = false + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + } + } + + dexOptions { + } + + flavorDimensions "tier" + productFlavors { + libre { + applicationIdSuffix = ".libre" + versionNameSuffix "-libre" + buildConfigField "boolean", "FULL_VERSION", "true" + buildConfigField "boolean", "CLOSED_STORE", "false" + // ApplicationId : com.kunzisoft.fdroidtest.applicationidsuffix.libre + // Version code : 1 + // Version name : 1.0-libre + } + pro { + applicationIdSuffix = ".pro" + versionCode 20180430 + versionName "20180430-pro" + buildConfigField "boolean", "FULL_VERSION", "true" + buildConfigField "boolean", "CLOSED_STORE", "true" + // ApplicationId : com.kunzisoft.fdroidtest.applicationidsuffix.pro + // Version code : 20180430 + // Version name : 20180430-pro + } + free { + versionNameSuffix "-free" + buildConfigField "boolean", "FULL_VERSION", "false" + buildConfigField "boolean", "CLOSED_STORE", "true" + // ApplicationId : com.kunzisoft.fdroidtest.applicationidsuffix + // Version code : 1 + // Version name : 1.0-free + } + underscore { + applicationIdSuffix = ".underscore" + versionCode 2018_04_30 + versionName "20180430-underscore" + buildConfigField "boolean", "FULL_VERSION", "true" + buildConfigField "boolean", "CLOSED_STORE", "true" + // ApplicationId : com.kunzisoft.fdroidtest.applicationidsuffix.underscore + // Version code : 2018_04_30 + // Version name : 20180430-underscore + } + underscore_first { + applicationIdSuffix = ".underscore_first" + versionCode _04_30 + buildConfigField "boolean", "FULL_VERSION", "true" + buildConfigField "boolean", "CLOSED_STORE", "true" + // ApplicationId : com.kunzisoft.fdroidtest.applicationidsuffix.underscore_first + // Version code : 1 + // Version name : 1.0 + } + } +} + +dependencies { + implementation fileTree(dir: 'libs', include: ['*.jar']) + implementation 'com.android.support:appcompat-v7:27.1.1' + implementation 'com.android.support.constraint:constraint-layout:1.1.0' + testImplementation 'junit:junit:4.12' + androidTestImplementation 'com.android.support.test:runner:1.0.2' + androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2' +} diff --git a/tests/source-files/com.lolo.io.onelist/app/build.gradle.kts b/tests/source-files/com.lolo.io.onelist/app/build.gradle.kts new file mode 100644 index 00000000..261cfe2f --- /dev/null +++ b/tests/source-files/com.lolo.io.onelist/app/build.gradle.kts @@ -0,0 +1,118 @@ +import java.io.FileInputStream +import java.util.Properties + +plugins { + alias(libs.plugins.android.application) + alias(libs.plugins.kotlin.android) + alias(libs.plugins.google.services) + alias(libs.plugins.firebase.crashlytics) + alias(libs.plugins.ksp) +} +android { + namespace = "com.lolo.io.onelist" + + val versionPropsFile = file("../version.properties") + var versionCodeCI: Int? = null + if (versionPropsFile.canRead()) { + val versionProps = Properties() + versionProps.load(FileInputStream(versionPropsFile)) + val v = versionProps["VERSION_CODE"] + versionCodeCI = (versionProps["VERSION_CODE"] as String).toInt() + } + + + defaultConfig { + multiDexEnabled = true + applicationId = "com.lolo.io.onelist" + compileSdk = 34 + minSdk = 23 + targetSdk = 34 + versionCode = versionCodeCI ?: 19 + versionName = "1.4.2" + vectorDrawables.useSupportLibrary = true + } + + androidResources { + generateLocaleConfig = true + } + + buildFeatures { + viewBinding = true + buildConfig = true + } + + + ksp { + arg("room.schemaLocation", "$projectDir/schemas") + } + + buildTypes { + getByName("debug") { + applicationIdSuffix = ".debug" + versionNameSuffix = "-DEBUG" + resValue("string", "app_name", "1ListDev") + } + getByName("release") { + isMinifyEnabled = true + isShrinkResources = true + + proguardFiles( + getDefaultProguardFile("proguard-android-optimize.txt"), + "proguard-rules.pro" + ) + resValue("string", "app_name", "1List") + } + } + + compileOptions { + sourceCompatibility = JavaVersion.VERSION_17 + targetCompatibility = JavaVersion.VERSION_17 + } + +} +repositories { + google() + mavenCentral() + maven { url = uri("https://jitpack.io") } +} + +dependencies { + + // android + implementation(libs.androidx.core.splashscreen) + implementation(libs.androidx.preference.ktx) + implementation(libs.androidx.lifecycle.extensions) + implementation(libs.androidx.legacy.support.v4) + implementation(libs.androidx.appcompat) + + // android - design + implementation(libs.constraint.layout) + implementation(libs.androidx.recyclerview) + implementation(libs.flexbox) + implementation(libs.material) + implementation(libs.androidx.swiperefreshlayout) + + // kotlin + implementation(libs.kotlinx.coroutines.core) + implementation(libs.kotlin.stdlib.jdk7) + + // firebase + implementation(libs.firebase.crashlytics) + + // koin di + implementation(libs.koin.android) + implementation(libs.koin.androidx.navigation) + + // room + implementation(libs.androidx.room.runtime) + implementation(libs.androidx.room.ktx) + ksp(libs.androidx.room.compiler) + + // json + implementation(libs.gson) + + // other libs + implementation(libs.whatsnew) + implementation(libs.storage) + implementation(libs.advrecyclerview) +} diff --git a/tests/source-files/com.lolo.io.onelist/build.gradle.kts b/tests/source-files/com.lolo.io.onelist/build.gradle.kts new file mode 100644 index 00000000..baad9726 --- /dev/null +++ b/tests/source-files/com.lolo.io.onelist/build.gradle.kts @@ -0,0 +1,11 @@ +plugins { + alias(libs.plugins.android.application) apply false + alias(libs.plugins.kotlin.android) apply false + alias(libs.plugins.google.services) apply false + alias(libs.plugins.firebase.crashlytics) apply false + alias(libs.plugins.ksp) apply false +} + +tasks.register("clean", Delete::class) { + delete(rootProject.layout.buildDirectory) +} \ No newline at end of file diff --git a/tests/source-files/com.lolo.io.onelist/gradle/libs.versions.toml b/tests/source-files/com.lolo.io.onelist/gradle/libs.versions.toml new file mode 100644 index 00000000..d6cf9869 --- /dev/null +++ b/tests/source-files/com.lolo.io.onelist/gradle/libs.versions.toml @@ -0,0 +1,58 @@ +[versions] +advrecyclerview = "1.0.0" +appcompat = "1.6.1" +constraint-layout = "2.0.4" +crashlytics = "18.6.2" +firebase-crashlytics-gradle-plugin = "2.9.9" +flexbox = "3.0.0" +gson = "2.5.6" +kotlin = "1.9.20" +kotlin-coroutines = "1.6.4" +legacy-support-v4 = "1.0.0" +lifecycle-extensions = "2.2.0" +material = "1.11.0" +preference-ktx = "1.2.1" +recyclerview = "1.3.2" +splashscreen ="1.0.1" +koin ="3.5.0" +room="2.6.1" +storage = "1.5.5" +swiperefreshlayout = "1.1.0" +whatsnew = "0.1.7" +ksp-plugin="1.9.20-1.0.14" + +# plugins versions +android-application-plugin="8.3.0" +kotlin-android-plugin="1.9.22" +google-services-plugin = "4.4.1" + +[libraries] +advrecyclerview = { module = "com.h6ah4i.android.widget.advrecyclerview:advrecyclerview", version.ref = "advrecyclerview" } +androidx-appcompat = { module = "androidx.appcompat:appcompat", version.ref = "appcompat" } +androidx-core-splashscreen = { module = "androidx.core:core-splashscreen", version.ref = "splashscreen" } +androidx-legacy-support-v4 = { module = "androidx.legacy:legacy-support-v4", version.ref = "legacy-support-v4" } +androidx-lifecycle-extensions = { module = "androidx.lifecycle:lifecycle-extensions", version.ref = "lifecycle-extensions" } +androidx-preference-ktx = { module = "androidx.preference:preference-ktx", version.ref = "preference-ktx" } +androidx-recyclerview = { module = "androidx.recyclerview:recyclerview", version.ref = "recyclerview" } +androidx-room-runtime = { group = "androidx.room", name = "room-runtime", version.ref = "room" } +androidx-room-ktx = { group = "androidx.room", name = "room-ktx", version.ref = "room" } +androidx-room-compiler = { group = "androidx.room", name = "room-compiler", version.ref = "room" } +androidx-swiperefreshlayout = { module = "androidx.swiperefreshlayout:swiperefreshlayout", version.ref = "swiperefreshlayout" } +constraint-layout = { module = "com.android.support.constraint:constraint-layout", version.ref = "constraint-layout" } +firebase-crashlytics = { module = "com.google.firebase:firebase-crashlytics", version.ref = "crashlytics" } +flexbox = { module = "com.google.android.flexbox:flexbox", version.ref = "flexbox" } +gson = { module = "org.immutables:gson", version.ref = "gson" } +koin-android = { module = "io.insert-koin:koin-android", version.ref = "koin" } +koin-androidx-navigation = { module = "io.insert-koin:koin-androidx-navigation", version.ref = "koin" } +kotlin-stdlib-jdk7 = { module = "org.jetbrains.kotlin:kotlin-stdlib-jdk7", version.ref = "kotlin" } +kotlinx-coroutines-core = { module = "org.jetbrains.kotlinx:kotlinx-coroutines-core", version.ref = "kotlin-coroutines" } +material = { module = "com.google.android.material:material", version.ref = "material" } +storage = { module = "com.anggrayudi:storage", version.ref = "storage" } +whatsnew = { module = "io.github.tonnyl:whatsnew", version.ref = "whatsnew" } + +[plugins] +ksp = { id = "com.google.devtools.ksp", version.ref = "ksp-plugin" } +android-application = { id = "com.android.application", version.ref = "android-application-plugin" } +kotlin-android = { id = "org.jetbrains.kotlin.android", version.ref = "kotlin-android-plugin" } +google-services = { id = "com.google.gms.google-services", version.ref = "google-services-plugin" } +firebase-crashlytics = { id = "com.google.firebase.crashlytics", version.ref = "firebase-crashlytics-gradle-plugin" } \ No newline at end of file diff --git a/tests/source-files/com.lolo.io.onelist/gradle/wrapper/gradle-wrapper.properties b/tests/source-files/com.lolo.io.onelist/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 00000000..e411586a --- /dev/null +++ b/tests/source-files/com.lolo.io.onelist/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,5 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-bin.zip +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/tests/source-files/com.lolo.io.onelist/settings.gradle b/tests/source-files/com.lolo.io.onelist/settings.gradle new file mode 100644 index 00000000..533aeeeb --- /dev/null +++ b/tests/source-files/com.lolo.io.onelist/settings.gradle @@ -0,0 +1,9 @@ +pluginManagement { + repositories { + google() + mavenCentral() + gradlePluginPortal() + } +} + +include 'app' diff --git a/tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/full_description.txt b/tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/full_description.txt new file mode 100644 index 00000000..f11ffa13 --- /dev/null +++ b/tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/full_description.txt @@ -0,0 +1 @@ +The copyleft libre software Nextcloud Android app, gives you access to all the files in your Nextcloud.\n\nFeatures:\n* Easy, modern interface, suited to the theme of your server\n* Upload files to your Nextcloud server\n* Share them with others\n* Keep your favorite files and folders synced\n* Search across all folders on your server\n* Auto Upload for photos and videos taken by your device\n* Keep up to date with notifications\n* Multi-account support\n* Secure access to your data with fingerprint or PIN\n* Integration with DAVdroid for easy setup of calendar & Contacts synchronization\n\nPlease report all issues at https://github.com/nextcloud/android/issues and discuss this app at https://help.nextcloud.com/c/clients/android\n\nNew to Nextcloud? Nextcloud is a private file sync & share and communication server. It is libre software, and you can host it yourself or pay a company to do it for you. That way, you are in control of your photos, your calendar and contact data, your documents and everything else.\n\nCheck out Nextcloud at https://nextcloud.com \ No newline at end of file diff --git a/tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/short_description.txt b/tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/short_description.txt new file mode 100644 index 00000000..69b7d99b --- /dev/null +++ b/tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/short_description.txt @@ -0,0 +1 @@ +The Nextcloud Android app gives you access to all your files in your Nextcloud \ No newline at end of file diff --git a/tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/title.txt b/tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/title.txt new file mode 100644 index 00000000..9928b03a --- /dev/null +++ b/tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/title.txt @@ -0,0 +1 @@ +Nextcloud \ No newline at end of file diff --git a/tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/full_description.txt b/tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/full_description.txt new file mode 100644 index 00000000..8593fc4f --- /dev/null +++ b/tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/full_description.txt @@ -0,0 +1 @@ +The Open Source Nextcloud Android app allows you to access all your files on your Nextcloud.\nThis is a dev version of the official Nextcloud app and includes brand-new, untested features which might lead to instabilities and data loss. The app is designed for users willing to test the new features and to report bugs if they occur. Do not use it for your productive work!\n\nThe dev version can be installed alongside the official Nextcloud app which is available at F-Droid, too. Once a day it is checked if the source code was updated, so there can be longer pauses between builds. diff --git a/tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/short_description.txt b/tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/short_description.txt new file mode 100644 index 00000000..42b3df3e --- /dev/null +++ b/tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/short_description.txt @@ -0,0 +1 @@ +The Nextcloud Dev app is a development snapshot and can be installed parallel. diff --git a/tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/title.txt b/tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/title.txt new file mode 100644 index 00000000..dc9e6199 --- /dev/null +++ b/tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/title.txt @@ -0,0 +1 @@ +Nextcloud Dev \ No newline at end of file diff --git a/tests/source-files/com.nextcloud.client/build.gradle b/tests/source-files/com.nextcloud.client/build.gradle new file mode 100644 index 00000000..528f6123 --- /dev/null +++ b/tests/source-files/com.nextcloud.client/build.gradle @@ -0,0 +1,253 @@ +// Gradle build file +// +// This project was started in Eclipse and later moved to Android Studio. In the transition, both IDEs were supported. +// Due to this, the files layout is not the usual in new projects created with Android Studio / gradle. This file +// merges declarations usually split in two separates build.gradle file, one for global settings of the project in +// its root folder, another one for the app module in subfolder of root. + +buildscript { + repositories { + jcenter() + maven { + url 'https://oss.sonatype.org/content/repositories/snapshots/' + } + google() + } + dependencies { + classpath 'com.android.tools.build:gradle:3.0.1' + classpath 'com.google.gms:google-services:3.0.0' + } +} + +apply plugin: 'com.android.application' +apply plugin: 'checkstyle' +apply plugin: 'pmd' +apply plugin: 'findbugs' + +configurations.all { + // check for updates every build + resolutionStrategy.cacheChangingModulesFor 0, 'seconds' +} + +ext { + supportLibraryVersion = '26.1.0' + googleLibraryVersion = '11.2.2' + + travisBuild = System.getenv("TRAVIS") == "true" + + // allows for -Dpre-dex=false to be set + preDexEnabled = "true".equals(System.getProperty("pre-dex", "true")) +} + +repositories { + jcenter() + maven { url "https://jitpack.io" } + maven { url 'https://oss.sonatype.org/content/repositories/snapshots/' } + google() + + flatDir { + dirs 'libs' + } +} + +android { + lintOptions { + abortOnError false + htmlReport true + htmlOutput file("$project.buildDir/reports/lint/lint.html") + disable 'MissingTranslation' + } + + dexOptions { + javaMaxHeapSize "4g" + } + + compileSdkVersion 26 + buildToolsVersion '26.0.2' + + defaultConfig { + testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" + + // arguments to be passed to functional tests + testInstrumentationRunnerArgument "TEST_USER", "\"$System.env.OCTEST_APP_USERNAME\"" + testInstrumentationRunnerArgument "TEST_PASSWORD", "\"$System.env.OCTEST_APP_PASSWORD\"" + testInstrumentationRunnerArgument "TEST_SERVER_URL", "\"$System.env.OCTEST_SERVER_BASE_URL\"" + + multiDexEnabled true + + versionCode = 20000099 + versionName = "2.0.0" + + // adapt structure from Eclipse to Gradle/Android Studio expectations; + // see http://tools.android.com/tech-docs/new-build-system/user-guide#TOC-Configuring-the-Structure + + flavorDimensions "default" + + productFlavors { + // used for f-droid + generic { + applicationId 'com.nextcloud.client' + dimension "default" + } + + gplay { + applicationId 'com.nextcloud.client' + dimension "default" + } + + modified { + // structure is: + // domain tld + // domain name + // .client + applicationId 'com.custom.client' + dimension "default" + } + + versionDev { + applicationId "com.nextcloud.android.beta" + dimension "default" + versionCode 20171223 + versionName "20171223" + } + } + + configurations { + modifiedCompile + } + } + + + // adapt structure from Eclipse to Gradle/Android Studio expectations; + // see http://tools.android.com/tech-docs/new-build-system/user-guide#TOC-Configuring-the-Structure + + dexOptions { + // Skip pre-dexing when running on Travis CI or when disabled via -Dpre-dex=false. + preDexLibraries = preDexEnabled && !travisBuild + } + + packagingOptions { + exclude 'META-INF/LICENSE.txt' + exclude 'META-INF/LICENSE' + } + + task checkstyle(type: Checkstyle) { + configFile = file("${rootProject.projectDir}/checkstyle.xml") + configProperties.checkstyleSuppressionsPath = file("${project.rootDir}/config/quality/checkstyle/suppressions.xml").absolutePath + source 'src' + include '**/*.java' + exclude '**/gen/**' + classpath = files() + } + + task pmd(type: Pmd) { + ruleSetFiles = files("${project.rootDir}/pmd-ruleset.xml") + ignoreFailures = false + ruleSets = [] + + source 'src' + include '**/*.java' + exclude '**/gen/**' + + reports { + xml.enabled = false + html.enabled = true + xml { + destination = file("$project.buildDir/reports/pmd/pmd.xml") + } + html { + destination = file("$project.buildDir/reports/pmd/pmd.html") + } + } + } + + task findbugs(type: FindBugs) { + ignoreFailures = false + effort = "max" + reportLevel = "high" + classes = files("$project.buildDir/intermediates/classes") + excludeFilter = new File("${project.rootDir}/findbugs-filter.xml") + source 'src' + include '**/*.java' + exclude '**/gen/**' + + reports { + xml.enabled = false + html.enabled = true + html { + destination = file("$project.buildDir/reports/findbugs/findbugs.html") + } + } + classpath = files() + } + check.dependsOn 'checkstyle', 'findbugs', 'pmd', 'lint' + + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_8 + targetCompatibility JavaVersion.VERSION_1_8 + } +} + +dependencies { + /// dependencies for app building + implementation 'com.android.support:multidex:1.0.2' + implementation 'com.github.nextcloud:android-library:1.0.33' + versionDevImplementation 'com.github.nextcloud:android-library:master-SNAPSHOT' // use always latest master + implementation "com.android.support:support-v4:${supportLibraryVersion}" + implementation "com.android.support:design:${supportLibraryVersion}" + implementation 'com.jakewharton:disklrucache:2.0.2' + implementation "com.android.support:appcompat-v7:${supportLibraryVersion}" + implementation "com.android.support:cardview-v7:${supportLibraryVersion}" + implementation "com.android.support:exifinterface:${supportLibraryVersion}" + implementation 'com.github.tobiasKaminsky:android-floating-action-button:1.10.2' + implementation 'com.github.albfernandez:juniversalchardet:v2.0.0' + implementation 'com.google.code.findbugs:annotations:2.0.1' + implementation 'commons-io:commons-io:2.5' + implementation 'com.github.evernote:android-job:v1.2.0' + implementation 'com.jakewharton:butterknife:8.5.1' + annotationProcessor 'com.jakewharton:butterknife-compiler:8.5.1' + implementation 'org.greenrobot:eventbus:3.0.0' + implementation 'com.googlecode.ez-vcard:ez-vcard:0.10.2' + implementation 'org.lukhnos:nnio:0.2' + // uncomment for gplay, modified + // implementation "com.google.firebase:firebase-messaging:${googleLibraryVersion}" + // implementation "com.google.android.gms:play-services-base:${googleLibraryVersion}" + // implementation "com.google.android.gms:play-services-gcm:${googleLibraryVersion}" + // implementation "com.google.firebase:firebase-core:${googleLibraryVersion}" + implementation 'org.parceler:parceler-api:1.1.6' + annotationProcessor 'org.parceler:parceler:1.1.6' + implementation 'com.github.bumptech.glide:glide:3.7.0' + implementation 'com.caverock:androidsvg:1.2.1' + implementation "com.android.support:support-annotations:${supportLibraryVersion}" + + /// dependencies for local unit tests + testImplementation 'junit:junit:4.12' + testImplementation 'org.mockito:mockito-core:1.10.19' + /// dependencies for instrumented tests + // JUnit4 Rules + androidTestImplementation 'com.android.support.test:rules:1.0.1' + // Android JUnit Runner + androidTestImplementation 'com.android.support.test:runner:1.0.1' + + // Espresso core + androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.1' + // UIAutomator - for cross-app UI tests, and to grant screen is turned on in Espresso tests + //androidTestImplementation 'com.android.support.test.uiautomator:uiautomator-v18:2.1.2' + // fix conflict in dependencies; see http://g.co/androidstudio/app-test-app-conflict for details + //androidTestImplementation "com.android.support:support-annotations:${supportLibraryVersion}" + implementation 'org.jetbrains:annotations:15.0' +} + +configurations.all { + resolutionStrategy.cacheChangingModulesFor 0, 'seconds' +} + +tasks.withType(Test) { + /// increased logging for tests + testLogging { + events "passed", "skipped", "failed" + } +} + +// uncomment for gplay, modified (must be at the bottom) +//apply plugin: 'com.google.gms.google-services' diff --git a/tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/full_description.txt b/tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/full_description.txt new file mode 100644 index 00000000..f11ffa13 --- /dev/null +++ b/tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/full_description.txt @@ -0,0 +1 @@ +The copyleft libre software Nextcloud Android app, gives you access to all the files in your Nextcloud.\n\nFeatures:\n* Easy, modern interface, suited to the theme of your server\n* Upload files to your Nextcloud server\n* Share them with others\n* Keep your favorite files and folders synced\n* Search across all folders on your server\n* Auto Upload for photos and videos taken by your device\n* Keep up to date with notifications\n* Multi-account support\n* Secure access to your data with fingerprint or PIN\n* Integration with DAVdroid for easy setup of calendar & Contacts synchronization\n\nPlease report all issues at https://github.com/nextcloud/android/issues and discuss this app at https://help.nextcloud.com/c/clients/android\n\nNew to Nextcloud? Nextcloud is a private file sync & share and communication server. It is libre software, and you can host it yourself or pay a company to do it for you. That way, you are in control of your photos, your calendar and contact data, your documents and everything else.\n\nCheck out Nextcloud at https://nextcloud.com \ No newline at end of file diff --git a/tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/short_description.txt b/tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/short_description.txt new file mode 100644 index 00000000..69b7d99b --- /dev/null +++ b/tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/short_description.txt @@ -0,0 +1 @@ +The Nextcloud Android app gives you access to all your files in your Nextcloud \ No newline at end of file diff --git a/tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/title.txt b/tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/title.txt new file mode 100644 index 00000000..9928b03a --- /dev/null +++ b/tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/title.txt @@ -0,0 +1 @@ +Nextcloud \ No newline at end of file diff --git a/tests/source-files/com.nextcloud.client/src/versionDev/fastlane/metadata/android/en-US/full_description.txt b/tests/source-files/com.nextcloud.client/src/versionDev/fastlane/metadata/android/en-US/full_description.txt new file mode 100644 index 00000000..8593fc4f --- /dev/null +++ b/tests/source-files/com.nextcloud.client/src/versionDev/fastlane/metadata/android/en-US/full_description.txt @@ -0,0 +1 @@ +The Open Source Nextcloud Android app allows you to access all your files on your Nextcloud.\nThis is a dev version of the official Nextcloud app and includes brand-new, untested features which might lead to instabilities and data loss. The app is designed for users willing to test the new features and to report bugs if they occur. Do not use it for your productive work!\n\nThe dev version can be installed alongside the official Nextcloud app which is available at F-Droid, too. Once a day it is checked if the source code was updated, so there can be longer pauses between builds. diff --git a/tests/source-files/com.nextcloud.client/src/versionDev/fastlane/metadata/android/en-US/short_description.txt b/tests/source-files/com.nextcloud.client/src/versionDev/fastlane/metadata/android/en-US/short_description.txt new file mode 100644 index 00000000..42b3df3e --- /dev/null +++ b/tests/source-files/com.nextcloud.client/src/versionDev/fastlane/metadata/android/en-US/short_description.txt @@ -0,0 +1 @@ +The Nextcloud Dev app is a development snapshot and can be installed parallel. diff --git a/tests/source-files/com.nextcloud.client/src/versionDev/fastlane/metadata/android/en-US/title.txt b/tests/source-files/com.nextcloud.client/src/versionDev/fastlane/metadata/android/en-US/title.txt new file mode 100644 index 00000000..dc9e6199 --- /dev/null +++ b/tests/source-files/com.nextcloud.client/src/versionDev/fastlane/metadata/android/en-US/title.txt @@ -0,0 +1 @@ +Nextcloud Dev \ No newline at end of file diff --git a/tests/source-files/com.seafile.seadroid2/app/build.gradle b/tests/source-files/com.seafile.seadroid2/app/build.gradle new file mode 100644 index 00000000..55813267 --- /dev/null +++ b/tests/source-files/com.seafile.seadroid2/app/build.gradle @@ -0,0 +1,122 @@ +apply plugin: 'com.android.application' + +android { + compileSdkVersion rootProject.ext.compileSdkVersion + + defaultConfig { + applicationId 'com.seafile.seadroid2' + minSdkVersion rootProject.ext.minSdkVersion + targetSdkVersion rootProject.ext.targetSdkVersion + versionCode 93 + versionName "2.2.18" + multiDexEnabled true + resValue "string", "authorities", applicationId + '.cameraupload.provider' + resValue "string", "account_type", "com.seafile.seadroid2.account.api2" + buildConfigField "String", "ACCOUNT_TYPE", '"com.seafile.seadroid2.account.api2"' + ndk { + abiFilters 'armeabi', 'armeabi-v7a', 'x86' + } + } + + lintOptions { + abortOnError false + disable 'MissingTranslation' + } + + + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_8 + targetCompatibility JavaVersion.VERSION_1_8 + } + + signingConfigs { + debug { +// def props = new Properties() +// props.load(new FileInputStream(project.file("debugkey.properties"))) +// storeFile project.file(props.keyStore) +// storePassword props.keyStorePassword +// keyAlias props.keyAlias +// keyPassword props.keyAliasPassword + } + release { +// Signing code for manual signing +// storeFile file(System.console().readLine("\n\$ Enter keystore path: ")) +// storePassword System.console().readPassword("\n\$ Enter keystore password: ").toString() +// keyAlias System.console().readLine("\n\$ Enter key alias: ") +// keyPassword System.console().readPassword("\n\$ Enter key password: ").toString() + + def props = new Properties() + props.load(new FileInputStream(project.file("key.properties"))) + storeFile project.file(props.keyStore) + storePassword props.keyStorePassword + keyAlias props.keyAlias + keyPassword props.keyAliasPassword + } + } + + buildTypes { + debug { + debuggable true + applicationIdSuffix ".debug" + resValue "string", "authorities", defaultConfig.applicationId + '.debug.cameraupload.provider' + resValue "string", "account_type", "com.seafile.seadroid2.debug.account.api2" + buildConfigField "String", "ACCOUNT_TYPE", '"com.seafile.seadroid2.debug.account.api2"' + signingConfig signingConfigs.debug + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-project.txt' + applicationVariants.all { variant -> + variant.outputs.all { output -> + if (output.outputFile != null && output.outputFile.name.endsWith('.apk')) { + if (variant.name == "debug") + outputFileName = "seafile-debug-" + defaultConfig.versionName + ".apk" + } + } + } + } + release { + signingConfig signingConfigs.release + minifyEnabled true + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-project.txt' + applicationVariants.all { variant -> + variant.outputs.all { output -> + if (output.outputFile != null && output.outputFile.name.endsWith('.apk')) { + if (variant.name == "release") { + outputFileName = "seafile-" + defaultConfig.versionName + ".apk" + } + } + } + } + } + } + + allprojects { + repositories { + maven { url 'https://jitpack.io' } + } + } + dependencies { + implementation fileTree(include: ['*.jar'], dir: 'libs') + implementation "com.android.support:appcompat-v7:${rootProject.ext.supportLibVersion}" + implementation "com.android.support:design:${rootProject.ext.supportLibVersion}" + implementation 'com.github.JakeWharton:ViewPagerIndicator:2.4.1' + implementation 'com.github.kevinsawicki:http-request:6.0' + implementation 'commons-io:commons-io:2.4' + implementation 'com.google.guava:guava:18.0' + implementation 'com.nostra13.universalimageloader:universal-image-loader:1.9.3' + implementation 'com.cocosw:bottomsheet:1.3.1' + implementation 'com.commit451:PhotoView:1.2.4' + implementation 'com.joanzapata.iconify:android-iconify-material-community:2.2.1' + testImplementation 'junit:junit:4.12' + testImplementation 'org.robolectric:robolectric:3.0' + implementation 'com.madgag.spongycastle:core:1.54.0.0' + implementation 'com.madgag.spongycastle:prov:1.54.0.0' + implementation 'com.shuyu:gsyVideoPlayer-java:2.1.0' + implementation 'com.shuyu:gsyVideoPlayer-ex_so:2.1.0' + implementation 'com.squareup.okhttp3:okhttp:3.9.1' + + implementation 'com.yydcdut:markdown-processor:0.1.3' + implementation 'ren.qinc.edit:lib:0.0.5'//editor undo redo + implementation 'com.github.tiagohm.MarkdownView:library:0.19.0' + } +} + diff --git a/tests/source-files/com.ubergeek42.WeechatAndroid/app/build.gradle.kts b/tests/source-files/com.ubergeek42.WeechatAndroid/app/build.gradle.kts new file mode 100644 index 00000000..88c03a08 --- /dev/null +++ b/tests/source-files/com.ubergeek42.WeechatAndroid/app/build.gradle.kts @@ -0,0 +1,306 @@ +import com.android.build.api.transform.* +import com.android.build.api.variant.VariantInfo +import com.android.utils.FileUtils +import org.gradle.internal.os.OperatingSystem +import org.aspectj.bridge.IMessage +import org.aspectj.bridge.MessageHandler +import org.aspectj.tools.ajc.Main + +plugins { + id("com.android.application") + kotlin("android") + kotlin("kapt") +} + +dependencies { + implementation(project(":cats")) + implementation(project(":relay")) + + implementation("org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.7.0") + + // these two are required for logging within the relay module. todo remove? + implementation("org.slf4j:slf4j-api:1.7.36") + implementation("com.noveogroup.android:android-logger:1.3.6") + + implementation("androidx.core:core-ktx:1.8.0") + implementation("androidx.legacy:legacy-support-v4:1.0.0") + implementation("androidx.annotation:annotation:1.3.0") // For @Nullable/@NonNull + implementation("androidx.appcompat:appcompat:1.4.2") + implementation("androidx.emoji2:emoji2:1.1.0") + implementation("androidx.preference:preference-ktx:1.2.0") // preference fragment & al + implementation("androidx.legacy:legacy-preference-v14:1.0.0") // styling for the fragment + implementation("androidx.lifecycle:lifecycle-runtime-ktx:2.4.1") + implementation("androidx.lifecycle:lifecycle-common-java8:2.4.1") + implementation("androidx.sharetarget:sharetarget:1.2.0-rc01") + + implementation("org.jetbrains.kotlinx:kotlinx-coroutines-android:1.6.2") + + implementation("com.github.bumptech.glide:glide:4.13.2") + kapt("com.github.bumptech.glide:compiler:4.13.2") + implementation("com.squareup.okhttp3:okhttp:4.10.0") + + val roomVersion = "2.4.2" + implementation("androidx.room:room-runtime:$roomVersion") + annotationProcessor("androidx.room:room-compiler:$roomVersion") + kapt("androidx.room:room-compiler:$roomVersion") + + implementation("org.yaml:snakeyaml:1.30") + + implementation("org.bouncycastle:bcpkix-jdk15on:1.70") + + // needed for thread-safe date formatting as SimpleDateFormat isn"t thread-safe + // the alternatives, including apache commons and threetenabp, seem to be much slower + // todo perhaps replace with core library desugaring, if it"s fast + implementation("net.danlew:android.joda:2.10.14") + + implementation("org.greenrobot:eventbus:3.3.1") + + debugImplementation("org.aspectj:aspectjrt:1.9.9.1") + debugImplementation("com.squareup.leakcanary:leakcanary-android:2.9.1") + + testImplementation("org.junit.jupiter:junit-jupiter:5.8.2") + testImplementation("org.junit.jupiter:junit-jupiter-params:5.8.2") +} + +tasks.withType { + options.encoding = "UTF-8" +} + +android { + compileSdk = 31 + + defaultConfig { + versionCode = 1_08_01 + versionName = "1.8.1" + + minSdk = 21 + targetSdk = 31 + buildConfigField("String", "VERSION_BANNER", "\"" + versionBanner() + "\"") + + vectorDrawables.useSupportLibrary = true + + javaCompileOptions { + annotationProcessorOptions { + arguments["room.schemaLocation"] = "$projectDir/schemas" + arguments["room.incremental"] = "true" + } + } + + kotlinOptions { + freeCompilerArgs = listOf( + "-language-version", "1.7", + "-api-version", "1.7") + jvmTarget = "11" + } + } + + signingConfigs { + create("dev") { + try { + storeFile = file(project.properties["devStorefile"] as String) + storePassword = project.properties["devStorePassword"] as String + keyAlias = project.properties["devKeyAlias"] as String + keyPassword = project.properties["devKeyPassword"] as String + } catch (e: Exception) { + project.logger.warn("WARNING: Set the values devStorefile, devStorePassword, " + + "devKeyAlias, and devKeyPassword " + + "in ~/.gradle/gradle.properties to sign the release.") + } + } + } + + buildTypes { + getByName("debug") { + applicationIdSuffix = ".debug" + versionNameSuffix = "-debug" + } + + getByName("release") { + isMinifyEnabled = true + proguardFiles(getDefaultProguardFile("proguard-android-optimize.txt"), + "proguard-rules.pro", + "../cats/proguard-rules.pro") + // kotlinx-coroutines-core debug-only artifact + // see https://github.com/Kotlin/kotlinx.coroutines#avoiding-including-the-debug-infrastructure-in-the-resulting-apk + packagingOptions { + resources.excludes += "DebugProbesKt.bin" + } + } + + create("dev") { + initWith(getByName("release")) + matchingFallbacks += listOf("release") + applicationIdSuffix = ".dev" + versionNameSuffix = "-dev" + signingConfig = signingConfigs.getByName("dev") + } + + compileOptions { + sourceCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_11 + } + } + + buildFeatures { + viewBinding = true + } +} + +fun versionBanner(): String { + val os = org.apache.commons.io.output.ByteArrayOutputStream() + project.exec { + commandLine = "git describe --long".split(" ") + standardOutput = os + } + return String(os.toByteArray()).trim() +} + +//////////////////////////////////////////////////////////////////////////////////////////////////// +/////////////////////////////////////////////////////////////////////////////////////////////// cats +//////////////////////////////////////////////////////////////////////////////////////////////////// + +// ajc gets hold of some files such as R.jar, and on Windows it leads to errors such as: +// The process cannot access the file because it is being used by another process +// to avoid these, weave in a process, which `javaexec` will helpfully launch for us. + +fun weave(classPath: Iterable, aspectPath: Iterable, input: Iterable, output: File) { + val runInAProcess = OperatingSystem.current().isWindows + val bootClassPath = android.bootClasspath + + println(if (runInAProcess) ":: weaving in a process..." else ":: weaving...") + println(":: boot class path: $bootClassPath") + println(":: class path: $classPath") + println(":: aspect path: $aspectPath") + println(":: input: $input") + println(":: output: $output") + + val arguments = listOf("-showWeaveInfo", + "-1.8", + "-preserveAllLocals", + "-bootclasspath", bootClassPath.asArgument, + "-classpath", classPath.asArgument, + "-aspectpath", aspectPath.asArgument, + "-inpath", input.asArgument, + "-d", output.absolutePath) + + if (runInAProcess) { + javaexec { + classpath = weaving + main = "org.aspectj.tools.ajc.Main" + args = arguments + } + } else { + val handler = MessageHandler(true) + Main().run(arguments.toTypedArray(), handler) + + val log = project.logger + for (message in handler.getMessages(null, true)) { + when (message.kind) { + IMessage.DEBUG -> log.debug("DEBUG " + message.message, message.thrown) + IMessage.INFO -> log.info("INFO: " + message.message, message.thrown) + IMessage.WARNING -> log.warn("WARN: " + message.message, message.thrown) + IMessage.FAIL, + IMessage.ERROR, + IMessage.ABORT -> log.error("ERROR: " + message.message, message.thrown) + } + } + } +} + +// the only purpose of the following is to get a hold of aspectjtools jar +// this jar is already on build script classpath, but that classpath is impossible to get +// see https://discuss.gradle.org/t/how-do-i-determine-buildscript-classpath/37973/3 + +val weaving: Configuration by configurations.creating + +dependencies { + weaving("org.aspectj:aspectjtools:1.9.9.1") +} + +// historical note: the problem with weaving Kotlin and Java in-place is that: +// * Java is compiled by task compileDebugJavaWithJavac +// * gradle can run either one of these tasks, or both of them +// * compileDebugJavaWithJavac depends on compileDebugKotlin +// * weaving Kotlin requires Java classes +// +// a transformation is a poorly advertised feature that works on merged code, and also has its own +// inputs and outputs, so this fixes all of our problems... + + class TransformCats : Transform() { + override fun getName(): String = TransformCats::class.simpleName!! + + override fun getInputTypes() = setOf(QualifiedContent.DefaultContentType.CLASSES) + + // only look for annotations in app classes + // transformation will consume these and put woven classes in the output dir + override fun getScopes() = mutableSetOf(QualifiedContent.Scope.PROJECT) + + // but also have the rest on our class path + // these will not be touched by the transformation + override fun getReferencedScopes() = mutableSetOf(QualifiedContent.Scope.SUB_PROJECTS, + QualifiedContent.Scope.EXTERNAL_LIBRARIES) + + override fun isIncremental() = false + + // only run on debug builds + override fun applyToVariant(variant: VariantInfo) = variant.isDebuggable + + override fun transform(invocation: TransformInvocation) { + if (!invocation.isIncremental) { + invocation.outputProvider.deleteAll() + } + + val output = invocation.outputProvider.getContentLocation(name, outputTypes, + scopes, Format.DIRECTORY) + if (output.isDirectory) FileUtils.deleteDirectoryContents(output) + FileUtils.mkdirs(output) + + val input = mutableListOf() + val classPath = mutableListOf() + val aspectPath = mutableListOf() + + invocation.inputs.forEach { source -> + source.directoryInputs.forEach { dir -> + input.add(dir.file) + classPath.add(dir.file) + } + + source.jarInputs.forEach { jar -> + input.add(jar.file) + classPath.add(jar.file) + } + } + + invocation.referencedInputs.forEach { source -> + source.directoryInputs.forEach { dir -> + classPath.add(dir.file) + } + + source.jarInputs.forEach { jar -> + classPath.add(jar.file) + // this used to read `if (jar.name == ":cats") ...`, + // but with android gradle plugin 4.2.0 jar names contain garbage + // this is a very simple but a bit fragile workaround. todo improve + if (jar.file.directoriesInsideRootProject().contains("cats")) { + aspectPath.add(jar.file) + } + } + + } + + weave(classPath, aspectPath, input, output) + } +} + +android.registerTransform(TransformCats()) + +val Iterable.asArgument get() = joinToString(File.pathSeparator) + +fun File.directoriesInsideRootProject() = sequence { + var file = this@directoriesInsideRootProject + while (true) { + yield(file.name) + file = file.parentFile ?: break + if (file == rootProject.projectDir) break + } +} diff --git a/tests/source-files/com.ubergeek42.WeechatAndroid/app/src/main/res/values/strings.xml b/tests/source-files/com.ubergeek42.WeechatAndroid/app/src/main/res/values/strings.xml new file mode 100644 index 00000000..0e1ff29b --- /dev/null +++ b/tests/source-files/com.ubergeek42.WeechatAndroid/app/src/main/res/values/strings.xml @@ -0,0 +1,1023 @@ + + +]> + + + + + + + + + Weechat-Android + com.ubergeek42.WeechatAndroid + + + + Relay host is not set + Relay password is not set + SSH host is not set + SSH password is not set + SSH private key is not set + + + + Server unexpectedly closed connection while connecting. + Wrong password or connection type? + + Could not resolve address %s + + Error: %s + Buffer lost some hot messages because of new lines + There are no hot buffers for now + Not connected + Buffer list empty + Activity not found for intent %s + + + + Upload file + Tab + Upload + Send + + Cancel search + Search up + Search down + More + + Go to bottom + + Fetch more lines + Fetching lines… + + search + + filter + + Open drawer + Close drawer + + + + + + Search + + Users + + Hotlist + Show hot message + + Close + + Connect + Disconnect + Stop connecting + + Settings + + Filter lines + + Dark theme + + Attach image + Attach media + Attach file + Attach image + Attach media + Take photo + + Debug + Sync hotlist + Die + + + + Prefix + Message + Both + + Regex + Case sensitive + + + + With timestamps + Without timestamps + Messages only + + + + + + Connection Status + Hotlist + + Waiting for network + Will connect in %d seconds… + Connecting now… + Connected to %s + + + + %d message + %d messages + + +  in %d buffer +  in %d buffers + + + + + New message in %2$s + %1$d new messages in %2$s + + + + (user unknown) + (users unknown) + + (message not fetched) + + (message not fetched) + (%d messages not fetched) + + + + Me + + Reply + + + + + + + + %1$s (%2$s) + + %1$d user + %1$d users + + %s (away) + + + Copy + Select text + + Paste + + + Permission required + + To take photos, app needs write access to public storage + OK + + + + + + Issued to:

      + %1$s
      +
      + Issued by:
      + %2$s
      +
      + Validity period:
      + Issued on: %3$s
      + Expires on: %4$s
      +
      + SHA-256 fingerprint:
      + %5$s + ]]> + Unknown + + Reject + Back to safety + + + + + Invalid hostname + + %1$s + but the certificate is only valid for the following hosts: %2$s + ]]> + Note that Android P and beyond does not fall back to Common Name (CN) validation. + Subject Alternative Name (SAN) must be used instead. + Learn more + ]]> +       %s + ]]> +       (none) + ]]> + + + + + Certificate expired + + This certificate is no longer valid. + Please make sure that the device date is correct. + + + + + Certificate not yet valid + + This certificate will be valid in the future. + Please make sure that the device date is correct. + + + + + Untrusted certificate + + This certificate isn’t trusted by Android, but you can still connect. + The app will remember the selected certificate + and trust it and any certificates signed by it. + + + Accept selected + + + + + Certificate not pinned + + This server is trusted by Android, + but a setting requires you to confirm that you trust it as well. + The app will remember the selected certificate + and trust it and any certificates signed by it. + + + Pin selected + + + + + + + Unknown + + + + + Unknown server + %1$s has never been encountered. +
      +
      %2$s key SHA256 fingerprint: +
      %3$s + ]]>
      + + Accept server key + Reject + + + + + Server changed key + ⚠ Warning: it’s possible that someone is trying to hijack this connection! +
      +
      Server at %1$s is known, but its key doesn’t match the key you previously accepted. +
      +
      %2$s key SHA256 fingerprint: +
      %3$s +
      +
      If you want to continue, please clear known hosts in preferences. + ]]>
      + + + + + + + + + + Clear + Paste + Choose file + %1$s (%2$s) + set + not set + Clipboard is empty + + password + + Save + Default + Discard changes? + Cancel + Discard + + None + Unknown + + Invalid number + No spaces allowed in hostnames + + + + + + Connection + + Connection type + + Plain connection + WeeChat SSL + SSH tunnel + WebSocket + WebSocket (SSL) + + + + + WebSocket path + + + + + + SSL settings + + + Require certificate pins + + Prompt to confirm that you trust the server, even if the system trusts it + + Clear certificates + No trusted certificates + One trusted certificate + + One trusted certificate + %s trusted certificates + + Clear certificates? + Clear + Cancel + Certificates cleared + Could not clear certificates + + + Client certificate + + PKCS #12 file containing private key and client certificate + + Certificate was stored inside security hardware + + Certificate was stored inside software key store + + Certificate was stored inside key store + + Certificate forgotten + + + Server is asking for a client certificate but none is set. + Wanted: %1$s certificate issued by: %2$s + + Server is asking for a client certificate but the one we have doesn’t fit. + Wanted: %1$s certificate issued by: %2$s + + + + SSH tunnel settings + + SSH host + + SSH port + + SSH username + + Authentication method + + Password + Key + + + Password + + Private key + Ed25519, ECDSA, RSA or DSA key + + %s key was stored inside security hardware + + %s key was stored inside software key store + + %s key was stored inside key store + + %1$s key was stored inside the app. + \n + \nThe key couldn’t be stored in the key store: %2$s + + Key forgotten + + Clear known hosts + No entries + + %s entry + %s entries + + Clear known hosts? + Cancel + Clear + Known hosts cleared + + + Failed to authenticate with password + + Failed to authenticate with key + + + + Relay + + Relay host + + Relay port + + Relay password + + + + Handshake settings + + Handshake + + Compatibility + Modern & fast + Modern + + + In compatibility mode, the password isn’t hashed. This is the fastest method. + This method is required if using WeeChat < 2.9, but works on the later versions as well. + +

      Modern & fast handshake limits algorithms to the SHA-2 family. + +

      Modern handshake also includes PBKDF2. + These algorithms can be very slow, depending on the number of iterations. + +

      Password hashing offers little to no benefit if the connection is encrypted. + Learn more + ]]> + + + + Synchronization settings + + + Only sync open buffers + + Can significantly reduce traffic and battery usage, + but hotlist updates will only happen once 5 minutes + + + Sync buffer read status + + Mark buffers as read in WeeChat when you read them in this app + + + Number of lines to fetch + + The number of lines requested when opening a buffer + or when you press the “Load more lines” button (%s) + + + Number of lines to fetch for search + + When starting a new search, unless already fetched, + the app will request up to this many lines from WeeChat (%s) + + + These settings take effect after reconnection. + Note that due to filtering the number of lines actually shown + might be less than the number of loaded lines. + Also note that due to WeeChat’s limitations + the app has to re-fetch all lines every times it requests more lines. + + + + Miscellaneous + + Reconnect on connection loss + + Connect on system boot + + + + Ping settings + + + Enable ping + + Periodically check that the relay connection is still alive when idle + + + Idle time + + Number of seconds to wait before sending a ping when the connection is idle (%s) + + + Ping timeout + + Number of seconds to wait before closing an unresponsive connection (%s) + + + + + + Buffer list + + + Sort buffer list + + Sort by number of highlights/private messages/unread messages + + + Hide non-conversation buffers + + E.g. server buffers and plugin buffers + + + Hide hidden buffers + + Hide buffers hidden with /buffer hide + + + Show buffer filter + + Filter matches full buffer names and reveals matching hidden buffers + + + System gesture exclusion zone + + On Android Q, the left side of the screen is reserved for the back gesture. + Enable this to have a small area in the bottom of the screen + where you can open the buffer list normally. + + + + + + Look & feel + + Text size + + + Hide action bar + + Hide action bar when the keyboard is open or when scrolling up + + + Filter messages + + Hide messages filtered by WeeChat (e.g. irc_smart_filter) + + + Prefix alignment + + Left aligned + Right aligned + Timestamp aligned + No alignment + + + + Maximum width of prefix + + In terms of letters; longer nicknames will be cut (%s) + + + Enclose nicknames + + Enclose the nicknames in < and > + + Timestamp format + %s (default: HH:mm:ss) + Invalid timestamp format + + + Buffer font + Default + Import + Imported: %s + + + Non-monospace fonts will not work well with alignment. + Import fonts from the dialog, + or put them into one of the following locations:%1$s + + + + + + Theme + + @string/pref__theme__theme__system + Dark + Light + + + Set by battery saver + + Theme switch + Show theme switch in the menu + + Light color scheme + Dark color scheme + Error loading color scheme %s + Not set + Error + Import + Imported: %s + + Learn more +
      +
      Import color schemes from the dialogs + or put them into the following location:%1$s + ]]>
      + + + Dim down non-human lines + + Display joins/quits in a faint color, as set in the color scheme + + + + + + Buttons + + Show tab button + + Show send button + + Show paperclip button + + Paperclip button short tap + + @string/pref__buttons__paperclip__actions__content_images + @string/pref__buttons__paperclip__actions__content_media + @string/pref__buttons__paperclip__actions__content_anything + @string/pref__buttons__paperclip__actions__mediastore_images + @string/pref__buttons__paperclip__actions__mediastore_media + @string/pref__buttons__paperclip__actions__camera + + + Paperclip button long tap + + @string/pref__buttons__paperclip__actions__none + @string/pref__buttons__paperclip__actions__content_images + @string/pref__buttons__paperclip__actions__content_media + @string/pref__buttons__paperclip__actions__content_anything + @string/pref__buttons__paperclip__actions__mediastore_images + @string/pref__buttons__paperclip__actions__mediastore_media + @string/pref__buttons__paperclip__actions__camera + + + Disabled + + System: attach images + + System: attach images and videos + + System: attach any files + + Gallery: attach images + + Gallery: attach images and videos + + Take photo + + + When the paperclip button gets hidden to provide more space for the input field, + you can still attach files via overflow menu. + + + Volume buttons change text size + + If set, volume buttons will change text size instead of volume + + + + + + Notifications + + + Enable notifications + + Notify about hot messages such as private messages or highlights + + Notification sound + + Vibration + + Notification light + + + + + + Media preview + + Enabled + + Never + On Wi-Fi only + On unmetered networks only + Always + + + Context + Disabled everywhere + Enabled for %s + + Chat + Paste dialog + Notifications + + + + Insecure requests + + Allow + Rewrite as HTTPS + Disallow + + + +
      ⚠ Warning: the app is accessing the web directly. + A malicious person could craft a website to learn your IP address and other data. + To prevent the app from accessing websites you don’t know, + remove the strategy for the wildcard host “*” or set it to “none”. + Learn more + ]]>
      + + + Strategies + + Defines the ways images are fetched from individual websites, and some filters. + \n + \n%1$s; %2$s; + \n + \n%3$s + Error + Message filter set + Message filter not set + line filters not set + + %d line filter set + %d line filters set + + No strategies loaded + Strategies: %s + +"# don’t look for links in the part +# of the message that matches the +# following regex. this prevents +# the app from showing broken links +# in matrix clients’ quotes, e.g. +# <nick "http://broken.co"> message +#message filter: +# ^<[^ ]{1,16} \".{1,33}\">\\s + +line filters: +# don’t display thumbnails for any +# lines that match the following regex +- regex: '^(?:Title: |[↑^] )' + +# don’t display thumbnails +# for any lines from bot +#- nicks: [bot] + +# don’t display thumbnails +# for any lines from bot +# that also math the given regex +#- nicks: [bot] +# regex: ^<\\S+>\\s + +strategies: +- name: skip pastebins + type: none + hosts: + - pastebin.com + - bpa.st + - dpaste.com + - termbin.com + +- name: skip site banners + type: none + hosts: + - github.com + - gist.github.com + - stackoverflow.com + - '*.stackexchange.com' + - twitch.tv + - '*.twitch.tv' + +#- name: skip the rest, including redirects +# type: none +# hosts: ['*'] + +- name: try the rest + type: any + hosts: ['*'] + +- name: youtube + type: image + hosts: [www.youtube.com, m.youtube.com, youtube.com, youtu.be] + regex: (?i)^https?://(?:(?:www\\.|m\\.)?youtube\\.com/watch\\?v=|youtu\\.be/)([A-Za-z0-9_-]+) + small: https://img.youtube.com/vi/$1/mqdefault.jpg + big: https://img.youtube.com/vi/$1/hqdefault.jpg + +- name: i.imgur + type: image + hosts: [i.imgur.com] + regex: (?i)^https?://i\\.imgur\\.com/([A-Za-z0-9]+) + small: https://i.imgur.com/$1m.jpg + big: https://i.imgur.com/$1h.jpg + +- name: imgur/gallery + type: any + hosts: [imgur.com, www.imgur.com] + regex: (?i)^https?://(?:www\\.)?imgur\\.com/gallery/(.*) + sub: https://imgur.com/a/$1 + +- name: 9gag + type: image + hosts: [9gag.com, img-9gag-fun.9cache.com] + regex: (?i)^https?://(?:9gag\\.com/gag|img-9gag-fun\\.9cache\\.com/photo)/([^_]+) + small: https://images-cdn.9gag.com/photo/$1_700b.jpg + big: https://images-cdn.9gag.com/photo/$1_700b.jpg + +- name: mobile.twitter + type: any + hosts: [mobile.twitter.com] + regex: (?i)^https?://mobile\\.twitter\\.com/(.*) + sub: https://twitter.com/$1 + +- name: common + type: any + regex: (?i)^https?://(.+) + sub: https://$1 + hosts: + - '*.wikipedia.org' + - gfycat.com + - imgur.com + +- name: reddit + type: any + hosts: [v.redd.it, reddit.com, www.reddit.com, old.reddit.com] + body size: 196608 +" + + Advanced + + Download size limit + %s MB + + + Disk cache + + %s MB; takes effect on restart + + + Success cooldown + + %s hours. The app will consider successfully fetched image available, + either from cache or the web, + for the specified amount of time. + + Thumbnail width + + Minimum thumbnail height + + Maximum thumbnail height + + + + + + File sharing + + Accept from other apps + + Text only + Text, images and videos + Everything + + + Direct share + + Disabled + Up to one buffer + Up to two buffers + Up to three buffers + Up to four buffers + + + Upload URL + + File field + + Regex + + +
      curl -s --user user:pass \\ +
        --header \'Additional: Header\' \\ +
        --form additional=field \\ +
        --form file=@file.ext \\ +
        https://example.com | perl -nle \\ +
        \'m#^https://\\S+#; print $1//$&\'
      +
      +
      If the regular expression is set, it is used to find the URL in the response body; + either the first capture group is used, or the whole match. + Learn more + ]]>
      + + Advanced + + Additional headers + + Additional fields + + Authentication + + None + Basic + + + User + + Password + + Remember uploads for + %s hours + + + + + + About + + Weechat-Android %s + + created by ubergeek42 + + build ID %s + Unknown version ID + + + \nThis project uses the following libraries: + \n + \n    • Android Logger by Noveo Group + \n    • AspectJ by Eclipse Foundation + \n    • Bouncy Castle + \n    • EventBus by greenrobot + \n    • Glide by Bump Technologies + \n    • Hugo by Jake Wharton + \n    • Java-WebSocket by Nathan Rajlich + \n    • joda-time-android by Daniel Lew + \n    • JSch by JCraft + \n    • LeakCanary by Square, Inc + \n    • Mockito by Szczepan Faber and friends + \n    • nv-websocket-client by Takahiko Kawasaki + \n    • OkHttp by Square, Inc + \n    • SLF4J by QOS.ch + \n    • SnakeYAML by Andrey Somov + \n    • sshlib by ConnectBot + \n + \nPlease create an issue on GitHub + if you find a bug or have a feature request. + \n + \n + diff --git a/tests/source-files/de.varengold.activeTAN/build.gradle b/tests/source-files/de.varengold.activeTAN/build.gradle new file mode 100644 index 00000000..e950be92 --- /dev/null +++ b/tests/source-files/de.varengold.activeTAN/build.gradle @@ -0,0 +1,115 @@ +apply plugin: 'com.android.application' + +android { + compileSdkVersion versions.compileSdk + defaultConfig { + versionCode 34 + versionName "2021-06-30" + + // Requires API level 23 (Android 6.0) to use Android keystore system for cryptography. + minSdkVersion 23 + targetSdkVersion versions.targetSdk + + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + + vectorDrawables.useSupportLibrary = true + + javaCompileOptions { + annotationProcessorOptions { + // Export database schema history as JSON files. + arguments = ["room.schemaLocation": "$projectDir/schemas".toString()] + } + } + sourceSets { + // Include database schema history for migration testing. + androidTest.assets.srcDirs += files("$projectDir/schemas".toString()) + } + } + + buildFeatures { + viewBinding = true + } + + buildTypes { + debug { + // Don't mess with the release versions during debugging, so use a different appId. + applicationIdSuffix ".debug" + debuggable true + } + release { + minifyEnabled true + shrinkResources true + proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' + } + } + + flavorDimensions "client", "environment" + productFlavors { + prod { + dimension "environment" + } + + qs { + dimension "environment" + // To be able to install a second app variant, we must change the applicationId. + // Otherwise it would not be possible to use the same device for testing and production. + applicationIdSuffix ".QS" + } + + EFD { + // Demo portal efdis-online.de (extern) / dailybuild.efdis-online.de (intern) + dimension "client" + applicationId "de.efdis.activeTAN" + } + + VAR { + dimension "client" + applicationId "de.varengold.activeTAN" + } + + } + compileOptions { + // ZXing uses Java 8 language features from the core library + coreLibraryDesugaringEnabled true + + targetCompatibility JavaVersion.VERSION_1_8 + sourceCompatibility JavaVersion.VERSION_1_8 + } + +} + +tasks.withType(JavaCompile) { + options.deprecation = true +} + +dependencies { + implementation fileTree(dir: 'libs', include: ['*.jar']) + + implementation project(':material-design-icons') + implementation project(":barcodescanner") + + implementation 'com.google.android.material:material:1.3.0' + + implementation 'androidx.appcompat:appcompat:1.3.0' + implementation 'androidx.biometric:biometric:1.1.0' + + implementation 'androidx.constraintlayout:constraintlayout:2.0.4' + implementation 'androidx.recyclerview:recyclerview:1.2.1' + + coreLibraryDesugaring 'com.android.tools:desugar_jdk_libs:1.1.5' + implementation 'com.google.zxing:core:' + versions.zxing + + def room_version = '2.3.0' + implementation "androidx.room:room-runtime:$room_version" + annotationProcessor "androidx.room:room-compiler:$room_version" + androidTestImplementation "androidx.room:room-testing:$room_version" + + testImplementation 'junit:junit:4.13.1' + + androidTestImplementation 'androidx.test.ext:junit:1.1.2' + androidTestImplementation 'androidx.test:runner:1.3.0' + androidTestImplementation 'androidx.test:rules:1.3.0' + androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0' + androidTestImplementation 'androidx.test.espresso:espresso-contrib:3.3.0' + androidTestImplementation 'androidx.test.espresso:espresso-intents:3.3.0' +} diff --git a/tests/source-files/dev.patrickgold.florisboard/app/build.gradle.kts b/tests/source-files/dev.patrickgold.florisboard/app/build.gradle.kts new file mode 100644 index 00000000..14767fa8 --- /dev/null +++ b/tests/source-files/dev.patrickgold.florisboard/app/build.gradle.kts @@ -0,0 +1,97 @@ +plugins { + id("com.android.application") version "4.1.2" + kotlin("android") version "1.4.30" +} + +android { + compileSdkVersion(30) + buildToolsVersion("30.0.3") + + compileOptions { + sourceCompatibility = JavaVersion.VERSION_1_8 + targetCompatibility = JavaVersion.VERSION_1_8 + } + + kotlinOptions { + jvmTarget = JavaVersion.VERSION_1_8.toString() + freeCompilerArgs = listOf("-Xallow-result-return-type") // enables use of kotlin.Result + } + + defaultConfig { + applicationId = "dev.patrickgold.florisboard" + minSdkVersion(23) + targetSdkVersion(30) + versionCode(29) + versionName("0.3.10") + + testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner" + } + + buildFeatures { + viewBinding = true + } + + buildTypes { + named("debug").configure { + applicationIdSuffix = ".debug" + versionNameSuffix = "-debug" + + resValue("mipmap", "floris_app_icon", "@mipmap/ic_app_icon_debug") + resValue("mipmap", "floris_app_icon_round", "@mipmap/ic_app_icon_debug_round") + resValue("string", "floris_app_name", "FlorisBoard Debug") + } + + create("beta") // Needed because by default the "beta" BuildType does not exist + named("beta").configure { + applicationIdSuffix = ".beta" + versionNameSuffix = "-beta01" + proguardFiles.add(getDefaultProguardFile("proguard-android-optimize.txt")) + + resValue("mipmap", "floris_app_icon", "@mipmap/ic_app_icon_beta") + resValue("mipmap", "floris_app_icon_round", "@mipmap/ic_app_icon_beta_round") + resValue("string", "floris_app_name", "FlorisBoard Beta") + } + + named("release").configure { + proguardFiles.add(getDefaultProguardFile("proguard-android-optimize.txt")) + + resValue("mipmap", "floris_app_icon", "@mipmap/ic_app_icon_release") + resValue("mipmap", "floris_app_icon_round", "@mipmap/ic_app_icon_release_round") + resValue("string", "floris_app_name", "@string/app_name") + } + } + + testOptions { + unitTests { + isIncludeAndroidResources = true + } + } + + lintOptions { + isAbortOnError = false + } +} + +dependencies { + implementation("androidx.activity", "activity-ktx", "1.2.1") + implementation("androidx.appcompat", "appcompat", "1.2.0") + implementation("androidx.core", "core-ktx", "1.3.2") + implementation("androidx.fragment", "fragment-ktx", "1.3.0") + implementation("androidx.preference", "preference-ktx", "1.1.1") + implementation("androidx.constraintlayout", "constraintlayout", "2.0.4") + implementation("androidx.lifecycle", "lifecycle-service", "2.2.0") + implementation("com.google.android", "flexbox", "2.0.1") // requires jcenter as of version 2.0.1 + implementation("com.squareup.moshi", "moshi-kotlin", "1.11.0") + implementation("com.squareup.moshi", "moshi-adapters", "1.11.0") + implementation("com.google.android.material", "material", "1.3.0") + implementation("org.jetbrains.kotlinx", "kotlinx-coroutines-android", "1.4.2") + implementation("com.jaredrummler", "colorpicker", "1.1.0") + implementation("com.jakewharton.timber", "timber", "4.7.1") + implementation("com.nambimobile.widgets", "expandable-fab", "1.0.2") + + testImplementation("junit", "junit", "4.13.1") + testImplementation("org.mockito", "mockito-inline", "3.7.7") + testImplementation("org.robolectric", "robolectric", "4.5.1") + androidTestImplementation("androidx.test.ext", "junit", "1.1.2") + androidTestImplementation("androidx.test.espresso", "espresso-core", "3.3.0") +} diff --git a/tests/source-files/eu.siacs.conversations/build.gradle b/tests/source-files/eu.siacs.conversations/build.gradle new file mode 100644 index 00000000..1762892b --- /dev/null +++ b/tests/source-files/eu.siacs.conversations/build.gradle @@ -0,0 +1,128 @@ +// Top-level build file where you can add configuration options common to all +// sub-projects/modules. +buildscript { + repositories { + jcenter() + } + dependencies { + classpath 'com.android.tools.build:gradle:2.3.3' + } +} + +apply plugin: 'com.android.application' + +repositories { + jcenter() + mavenCentral() + maven { + url 'https://maven.google.com' + } +} + +configurations { + playstoreCompile + freeCompile +} + +ext { + supportLibVersion = '27.0.2' +} + +dependencies { + compile project(':libs:MemorizingTrustManager') + playstoreCompile 'com.google.android.gms:play-services-gcm:11.6.2' + compile 'org.sufficientlysecure:openpgp-api:10.0' + compile 'com.soundcloud.android:android-crop:1.0.1@aar' + compile "com.android.support:support-v13:$supportLibVersion" + compile "com.android.support:appcompat-v7:$supportLibVersion" + compile "com.android.support:support-emoji:$supportLibVersion" + freeCompile "com.android.support:support-emoji-bundled:$supportLibVersion" + compile 'org.bouncycastle:bcmail-jdk15on:1.52' + compile 'org.jitsi:org.otr4j:0.22' + compile 'org.gnu.inet:libidn:1.15' + compile 'com.google.zxing:core:3.2.1' + compile 'com.google.zxing:android-integration:3.2.1' + compile 'de.measite.minidns:minidns-hla:0.2.4' + compile 'de.timroes.android:EnhancedListView:0.3.4' + compile 'me.leolin:ShortcutBadger:1.1.19@aar' + compile 'com.kyleduo.switchbutton:library:1.2.8' + compile 'org.whispersystems:signal-protocol-java:2.6.2' + compile 'com.makeramen:roundedimageview:2.3.0' + compile "com.wefika:flowlayout:0.4.1" + compile 'net.ypresto.androidtranscoder:android-transcoder:0.2.0' + +} + +ext { + travisBuild = System.getenv("TRAVIS") == "true" + preDexEnabled = System.getProperty("pre-dex", "true") +} + +android { + compileSdkVersion 26 + buildToolsVersion "26.0.2" + + defaultConfig { + minSdkVersion 14 + targetSdkVersion 25 + versionCode 245 + versionName "1.23.1" + archivesBaseName += "-$versionName" + applicationId "eu.siacs.conversations" + } + + dexOptions { + // Skip pre-dexing when running on Travis CI or when disabled via -Dpre-dex=false. + preDexLibraries = preDexEnabled && !travisBuild + jumboMode true + } + + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_7 + targetCompatibility JavaVersion.VERSION_1_7 + } + + productFlavors { + playstore + free + } + + + if(new File("signing.properties").exists()) { + Properties props = new Properties() + props.load(new FileInputStream(file("signing.properties"))) + + signingConfigs { + release { + storeFile file(props['keystore']) + storePassword props['keystore.password'] + keyAlias props['keystore.alias'] + keyPassword props['keystore.password'] + } + } + buildTypes.release.signingConfig = signingConfigs.release + } + + lintOptions { + disable 'MissingTranslation', 'InvalidPackage', 'MissingQuantity', 'AppCompatResource' + } + + subprojects { + + afterEvaluate { + if (getPlugins().hasPlugin('android') || + getPlugins().hasPlugin('android-library')) { + + configure(android.lintOptions) { + disable 'AndroidGradlePluginVersion', 'MissingTranslation' + } + } + + } + } + + packagingOptions { + exclude 'META-INF/BCKEY.DSA' + exclude 'META-INF/BCKEY.SF' + } +} diff --git a/tests/source-files/eu.siacs.conversations/metadata/en-US/name.txt b/tests/source-files/eu.siacs.conversations/metadata/en-US/name.txt new file mode 100644 index 00000000..29057c14 --- /dev/null +++ b/tests/source-files/eu.siacs.conversations/metadata/en-US/name.txt @@ -0,0 +1 @@ +Conversations \ No newline at end of file diff --git a/tests/source-files/fdroid/fdroidclient/AndroidManifest.xml b/tests/source-files/fdroid/fdroidclient/AndroidManifest.xml new file mode 100644 index 00000000..bd84256b --- /dev/null +++ b/tests/source-files/fdroid/fdroidclient/AndroidManifest.xml @@ -0,0 +1,484 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/source-files/fdroid/fdroidclient/build.gradle b/tests/source-files/fdroid/fdroidclient/build.gradle new file mode 100644 index 00000000..8971f235 --- /dev/null +++ b/tests/source-files/fdroid/fdroidclient/build.gradle @@ -0,0 +1,233 @@ +apply plugin: 'com.android.application' + +if ( !hasProperty( 'sourceDeps' ) ) { + + logger.info "Setting up *binary* dependencies for F-Droid (if you'd prefer to build from source, pass the -PsourceDeps argument to gradle while building)." + + repositories { + jcenter() + + // This is here until we sort out all dependencies from mavenCentral/jcenter. Once all of + // the dependencies below have been sorted out, this can be removed. + flatDir { + dirs 'libs/binaryDeps' + } + } + + dependencies { + + compile 'com.android.support:support-v4:22.1.0', + 'com.android.support:appcompat-v7:22.1.0', + 'com.android.support:support-annotations:22.1.0', + + 'org.thoughtcrime.ssl.pinning:AndroidPinning:1.0.0', + 'com.nostra13.universalimageloader:universal-image-loader:1.9.4', + 'com.google.zxing:core:3.2.0', + 'eu.chainfire:libsuperuser:1.0.0.201504231659', + + // We use a slightly modified spongycastle, see + // openkeychain/spongycastle with some changes on top of 1.51.0.0 + 'com.madgag.spongycastle:pkix:1.51.0.0', + 'com.madgag.spongycastle:prov:1.51.0.0', + 'com.madgag.spongycastle:core:1.51.0.0' + + // Upstream doesn't have a binary on mavenCentral/jcenter yet: + // https://github.com/kolavar/android-support-v4-preferencefragment/issues/13 + compile(name: 'support-v4-preferencefragment-release', ext: 'aar') + + // Fork for F-Droid, including support for https. Not merged into upstream + // yet (seems to be a little unsupported as of late), so not using mavenCentral/jcenter. + compile(name: 'nanohttpd-2.1.0') + + // Upstream doesn't have a binary on mavenCentral. + compile(name: 'zipsigner') + + // Project semi-abandoned, 3.4.1 is from 2011 and we use trunk from 2013 + compile(name: 'jmdns') + + androidTestCompile 'commons-io:commons-io:2.2' + } + +} else { + + logger.info "Setting up *source* dependencies for F-Droid (because you passed in the -PsourceDeps argument to gradle while building)." + + repositories { + jcenter() + } + + dependencies { + compile project(':extern:AndroidPinning') + compile project(':extern:UniversalImageLoader:library') + compile project(':extern:libsuperuser:libsuperuser') + compile project(':extern:nanohttpd:core') + compile project(':extern:jmdns') + compile project(':extern:zipsigner') + compile project(':extern:zxing-core') + compile( project(':extern:support-v4-preferencefragment') ) { + exclude module: 'support-v4' + } + + // Until the android team updates the gradle plugin version from 0.10.0 to + // a newer version, we can't build this from source with our gradle version + // of 1.0.0. They use API's which have been moved in the newer plugin. + // So yes, this is a little annoying that our "source dependencies" include + // a bunch of binaries from jcenter - but the ant build file (which is the + // one used to build F-Droid which is distributed on https://f-droid.org + // builds these from source - well - not support-v4). + // + // If the android team gets the build script working with the newer plugin, + // then you can find the relevant portions of the ../build.gradle file that + // include magic required to make it work at around about the v0.78 git tag. + // They have since been removed to clean up the build file. + compile 'com.android.support:support-v4:22.1.0', + 'com.android.support:appcompat-v7:22.1.0', + 'com.android.support:support-annotations:22.1.0' + + androidTestCompile 'commons-io:commons-io:2.2' + } + +} + +task cleanBinaryDeps(type: Delete) { + + enabled = project.hasProperty('sourceDeps') + description = "Removes all .jar and .aar files from F-Droid/libs/. Requires the sourceDeps property to be set (\"gradle -PsourceDeps cleanBinaryDeps\")" + + delete fileTree('libs/binaryDeps') { + include '*.aar' + include '*.jar' + } +} + +task binaryDeps(type: Copy, dependsOn: ':F-Droid:prepareReleaseDependencies') { + + enabled = project.hasProperty('sourceDeps') + description = "Copies .jar and .aar files from subproject dependencies in extern/ to F-Droid/libs. Requires the sourceDeps property to be set (\"gradle -PsourceDeps binaryDeps\")" + + from ('../extern/' ) { + include 'support-v4-preferencefragment/build/outputs/aar/support-v4-preferencefragment-release.aar', + 'nanohttpd/core/build/libs/nanohttpd-2.1.0.jar', + 'zipsigner/build/libs/zipsigner.jar', + 'jmdns/build/libs/jmdns.jar', + 'Support/v4/build/libs/support-v4.jar' + } + + into 'libs/binaryDeps' + + includeEmptyDirs false + + eachFile { FileCopyDetails details -> + // Don't copy to a sub folder such as libs/binaryDeps/Project/build/outputs/aar/project.aar, but + // rather libs/binaryDeps/project.aar. + details.path = details.name + } + +} + +android { + compileSdkVersion 21 + buildToolsVersion '22.0.1' + + defaultConfig { + + flavorDimensions "default" + + productFlavors { + devVersion { + applicationId "org.fdroid.fdroid.dev" + dimension "default" + versionCode 949 + versionName "0.95-dev" + } + } + + } + + sourceSets { + main { + manifest.srcFile 'AndroidManifest.xml' + java.srcDirs = ['src'] + resources.srcDirs = ['src'] + aidl.srcDirs = ['src'] + renderscript.srcDirs = ['src'] + res.srcDirs = ['res'] + assets.srcDirs = ['assets'] + } + + androidTest.setRoot('test') + androidTest { + manifest.srcFile 'test/AndroidManifest.xml' + java.srcDirs = ['test/src'] + resources.srcDirs = ['test/src'] + aidl.srcDirs = ['test/src'] + renderscript.srcDirs = ['test/src'] + res.srcDirs = ['test/res'] + assets.srcDirs = ['test/assets'] + } + } + + buildTypes { + release { + minifyEnabled false + } + buildTypes { + debug { + debuggable true + } + } + } + + compileOptions { + compileOptions.encoding = "UTF-8" + + // Use Java 1.7, requires minSdk 8 + sourceCompatibility JavaVersion.VERSION_1_7 + targetCompatibility JavaVersion.VERSION_1_7 + } + + lintOptions { + checkReleaseBuilds false + abortOnError false + } + + // Enable all Android lint warnings + gradle.projectsEvaluated { + tasks.withType(JavaCompile) { + options.compilerArgs << "-Xlint:all" + } + } + +} + +// This person took the example code below from another blogpost online, however +// I lost the reference to it: +// http://stackoverflow.com/questions/23297562/gradle-javadoc-and-android-documentation +android.applicationVariants.all { variant -> + + task("generate${variant.name}Javadoc", type: Javadoc) { + title = "$name $version API" + description "Generates Javadoc for F-Droid." + source = variant.javaCompile.source + + def sdkDir + Properties properties = new Properties() + File localProps = project.rootProject.file('local.properties') + if (localProps.exists()) { + properties.load(localProps.newDataInputStream()) + sdkDir = properties.getProperty('sdk.dir') + } else { + sdkDir = System.getenv('ANDROID_HOME') + } + if (!sdkDir) { + throw new ProjectConfigurationException("Cannot find android sdk. Make sure sdk.dir is defined in local.properties or the environment variable ANDROID_HOME is set.", null) + } + + ext.androidJar = "${sdkDir}/platforms/${android.compileSdkVersion}/android.jar" + classpath = files(variant.javaCompile.classpath.files) + files(ext.androidJar) + options.links("http://docs.oracle.com/javase/7/docs/api/"); + options.links("http://d.android.com/reference/"); + exclude '**/BuildConfig.java' + exclude '**/R.java' + } +} diff --git a/tests/source-files/firebase-allowlisted/app/build.gradle b/tests/source-files/firebase-allowlisted/app/build.gradle new file mode 100644 index 00000000..e97e2316 --- /dev/null +++ b/tests/source-files/firebase-allowlisted/app/build.gradle @@ -0,0 +1,5 @@ +dependencies { + compile 'com.firebase:firebase-jobdispatcher:0.8.4' + + testCompile 'junit:junit:4.12' +} diff --git a/tests/source-files/firebase-allowlisted/build.gradle b/tests/source-files/firebase-allowlisted/build.gradle new file mode 100644 index 00000000..e52ed0ac --- /dev/null +++ b/tests/source-files/firebase-allowlisted/build.gradle @@ -0,0 +1,14 @@ +buildscript { + repositories { + jcenter() + } + dependencies { + classpath 'com.android.tools.build:gradle:2.3.3' + } +} + +allprojects { + repositories { + jcenter() + } +} diff --git a/tests/source-files/firebase-suspect/app/build.gradle b/tests/source-files/firebase-suspect/app/build.gradle new file mode 100644 index 00000000..440d75a6 --- /dev/null +++ b/tests/source-files/firebase-suspect/app/build.gradle @@ -0,0 +1,5 @@ +dependencies { + compile 'com.google.firebase:firebase-core:11.6.2' + + testCompile 'junit:junit:4.12' +} diff --git a/tests/source-files/firebase-suspect/build.gradle b/tests/source-files/firebase-suspect/build.gradle new file mode 100644 index 00000000..47295bbb --- /dev/null +++ b/tests/source-files/firebase-suspect/build.gradle @@ -0,0 +1,11 @@ +buildscript { + dependencies { + classpath 'com.android.tools.build:gradle:2.3.3' + } +} + +allprojects { + repositories { + maven { url "https://maven.google.com" } + } +} diff --git a/tests/source-files/flavor.test/build.gradle b/tests/source-files/flavor.test/build.gradle new file mode 100644 index 00000000..2c958bdc --- /dev/null +++ b/tests/source-files/flavor.test/build.gradle @@ -0,0 +1,15 @@ +dependenies { + /// dependencies for app building + fossImplementation 'com.android.support:multidex:1.0.2' + implementation 'com.github.nextcloud:android-library:1.0.33' + devImplementation 'com.github.nextcloud:android-library:master-SNAPSHOT' // use always latest master + implementation "com.android.support:support-v4:${supportLibraryVersion}" + prodImplementation "com.android.support:design:${supportLibraryVersion}" + gplayImplementation 'com.jakewharton:disklrucache:2.0.2' + implementation "com.android.support:appcompat-v7:${supportLibraryVersion}" + gplayProdImplementation "com.android.support:cardview-v7:${supportLibraryVersion}" + implementation "com.android.support:exifinterface:${supportLibraryVersion}" + fossDevImplementation 'com.github.tobiasKaminsky:android-floating-action-button:1.10.2' + gplayDevImplementation 'com.github.albfernandez:juniversalchardet:v2.0.0' + fossProdImplementation 'com.google.code.findbugs:annotations:2.0.1' +} diff --git a/tests/source-files/info.guardianproject.ripple/build.gradle b/tests/source-files/info.guardianproject.ripple/build.gradle new file mode 100644 index 00000000..5062b208 --- /dev/null +++ b/tests/source-files/info.guardianproject.ripple/build.gradle @@ -0,0 +1,18 @@ +buildscript { + repositories { + maven { url 'file:///usr/share/maven-repo' } + maven { url 'https://maven.google.com' } + jcenter() + } + dependencies { + classpath 'com.android.tools.build:gradle:2.2.2' + } +} + +allprojects { + repositories { + maven { url 'file:///usr/share/maven-repo' } + maven { url 'https://maven.google.com' } + jcenter() + } +} diff --git a/tests/source-files/lockfile.test/flutter/.dart_tool/flutter_gen/pubspec.yaml b/tests/source-files/lockfile.test/flutter/.dart_tool/flutter_gen/pubspec.yaml new file mode 100644 index 00000000..e69de29b diff --git a/tests/source-files/lockfile.test/flutter/pubspec.lock b/tests/source-files/lockfile.test/flutter/pubspec.lock new file mode 100644 index 00000000..e69de29b diff --git a/tests/source-files/lockfile.test/flutter/pubspec.yaml b/tests/source-files/lockfile.test/flutter/pubspec.yaml new file mode 100644 index 00000000..e69de29b diff --git a/tests/source-files/lockfile.test/javascript/package.json b/tests/source-files/lockfile.test/javascript/package.json new file mode 100644 index 00000000..e69de29b diff --git a/tests/source-files/lockfile.test/javascript/yarn.lock b/tests/source-files/lockfile.test/javascript/yarn.lock new file mode 100644 index 00000000..e69de29b diff --git a/tests/source-files/lockfile.test/rust/subdir/Cargo.lock b/tests/source-files/lockfile.test/rust/subdir/Cargo.lock new file mode 100644 index 00000000..e69de29b diff --git a/tests/source-files/lockfile.test/rust/subdir/Cargo.toml b/tests/source-files/lockfile.test/rust/subdir/Cargo.toml new file mode 100644 index 00000000..e69de29b diff --git a/tests/source-files/lockfile.test/rust/subdir/subdir/subdir/Cargo.toml b/tests/source-files/lockfile.test/rust/subdir/subdir/subdir/Cargo.toml new file mode 100644 index 00000000..e69de29b diff --git a/tests/source-files/lockfile.test/rust/subdir2/Cargo.toml b/tests/source-files/lockfile.test/rust/subdir2/Cargo.toml new file mode 100644 index 00000000..e69de29b diff --git a/tests/source-files/open-keychain/open-keychain/OpenKeychain/build.gradle b/tests/source-files/open-keychain/open-keychain/OpenKeychain/build.gradle new file mode 100644 index 00000000..e2d1dd8c --- /dev/null +++ b/tests/source-files/open-keychain/open-keychain/OpenKeychain/build.gradle @@ -0,0 +1,248 @@ +apply plugin: 'com.android.application' +apply plugin: 'witness' +apply plugin: 'jacoco' +apply plugin: 'com.github.kt3k.coveralls' + +dependencies { + // NOTE: Always use fixed version codes not dynamic ones, e.g. 0.7.3 instead of 0.7.+, see README for more information + // NOTE: libraries are pinned to a specific build, see below + + // from local Android SDK + compile 'com.android.support:support-v4:22.1.1' + compile 'com.android.support:appcompat-v7:22.1.1' + compile 'com.android.support:recyclerview-v7:22.1.0' + compile 'com.android.support:cardview-v7:22.1.0' + + // Unit tests in the local JVM with Robolectric + // https://developer.android.com/training/testing/unit-testing/local-unit-tests.html + // https://github.com/nenick/AndroidStudioAndRobolectric + // http://www.vogella.com/tutorials/Robolectric/article.html + testCompile 'junit:junit:4.12' + testCompile 'org.robolectric:robolectric:3.0-rc3' + + // UI testing with Espresso + androidTestCompile 'com.android.support.test:runner:0.3' + androidTestCompile 'com.android.support.test:rules:0.3' + androidTestCompile 'com.android.support.test.espresso:espresso-core:2.2' + androidTestCompile ('com.android.support.test.espresso:espresso-contrib:2.2') { + exclude group: 'com.android.support', module: 'appcompat' + exclude group: 'com.android.support', module: 'support-v4' + exclude module: 'recyclerview-v7' + } + + // Temporary workaround for bug: https://code.google.com/p/android-test-kit/issues/detail?id=136 + // from https://github.com/googlesamples/android-testing/blob/master/build.gradle#L21 + configurations.all { + resolutionStrategy.force 'com.android.support:support-annotations:22.1.1' + } + + // JCenter etc. + compile 'com.eftimoff:android-patternview:1.0.1@aar' + compile 'com.journeyapps:zxing-android-embedded:2.3.0@aar' + compile 'com.journeyapps:zxing-android-integration:2.3.0@aar' + compile 'com.google.zxing:core:3.2.0' + compile 'com.jpardogo.materialtabstrip:library:1.0.9' + compile 'com.getbase:floatingactionbutton:1.9.0' + compile 'org.commonjava.googlecode.markdown4j:markdown4j:2.2-cj-1.0' + compile 'com.splitwise:tokenautocomplete:1.3.3@aar' + compile 'se.emilsjolander:stickylistheaders:2.6.0' + compile 'org.sufficientlysecure:html-textview:1.1' + compile 'com.mikepenz.materialdrawer:library:2.8.2@aar' + compile 'com.mikepenz.iconics:library:0.9.1@aar' + compile 'com.mikepenz.iconics:octicons-typeface:2.2.0@aar' + compile 'com.mikepenz.iconics:meteocons-typeface:1.1.1@aar' + compile 'com.mikepenz.iconics:community-material-typeface:1.0.0@aar' + compile 'com.nispok:snackbar:2.10.8' + + // libs as submodules + compile project(':extern:openpgp-api-lib:openpgp-api') + compile project(':extern:openkeychain-api-lib:openkeychain-intents') + compile project(':extern:spongycastle:core') + compile project(':extern:spongycastle:pg') + compile project(':extern:spongycastle:pkix') + compile project(':extern:spongycastle:prov') + compile project(':extern:minidns') + compile project(':extern:KeybaseLib:Lib') + compile project(':extern:safeslinger-exchange') +} + +// Output of ./gradlew -q calculateChecksums +// Comment out the libs referenced as git submodules! +dependencyVerification { + verify = [ + 'com.android.support:support-v4:1e2e4d35ac7fd30db5ce3bc177b92e4d5af86acef2ef93e9221599d733346f56', + 'com.android.support:appcompat-v7:9a2355537c2f01cf0b95523605c18606b8d824017e6e94a05c77b0cfc8f21c96', + 'com.android.support:recyclerview-v7:522d323079a29bcd76173bd9bc7535223b4af3e5eefef9d9287df1f9e54d0c10', + 'com.android.support:cardview-v7:8dc99af71fec000baa4470c3907755264f15f816920861bc015b2babdbb49807', + 'com.eftimoff:android-patternview:cec80e7265b8d8278b3c55b5fcdf551e4600ac2c8bf60d8dd76adca538af0b1e', + 'com.journeyapps:zxing-android-embedded:702a4f58154dbd9baa80f66b6a15410f7a4d403f3e73b66537a8bfb156b4b718', + 'com.journeyapps:zxing-android-integration:562737821b6d34c899b6fd2234ce0a8a31e02ff1fd7c59f6211961ce9767c7c8', + 'com.google.zxing:core:7fe5a8ff437635a540e56317649937b768b454795ce999ed5f244f83373dee7b', + 'com.jpardogo.materialtabstrip:library:c6ef812fba4f74be7dc4a905faa4c2908cba261a94c13d4f96d5e67e4aad4aaa', + 'com.getbase:floatingactionbutton:052aa2a94e49e5dccc97cb99f2add87e8698b84859f0e3ac181100c0bc7640ca', + 'org.commonjava.googlecode.markdown4j:markdown4j:e952e825d29e1317d96f79f346bfb6786c7c5eef50bd26e54a80823704b62e13', + 'com.splitwise:tokenautocomplete:20bee71cc59b3828eb000b684d46ddf738efd56b8fee453a509cd16fda42c8cb', + 'se.emilsjolander:stickylistheaders:8c05981ec5725be33f7cee5e68c13f3db49cd5c75f1aaeb04024920b1ef96ad4', + 'org.sufficientlysecure:html-textview:ca24b1522be88378634093815ce9ff1b4920c72e7513a045a7846e14069ef988', + 'com.mikepenz.materialdrawer:library:970317ed1a3cb96317f7b8d62ff592b3103eb46dfd68d9b244e7143623dc6d7a', + 'com.mikepenz.iconics:library:4698a36ee4c2af765d0a85779c61474d755b90d66a59020105b6760a8a909e9e', + 'com.mikepenz.iconics:octicons-typeface:67ed7d456a9ce5f5307b85f955797bfb3dd674e2f6defb31c6b8bbe2ede290be', + 'com.mikepenz.iconics:meteocons-typeface:39a8a9e70cd8287cdb119af57a672a41dd09240dba6697f5a0dbda1ccc33298b', + 'com.mikepenz.iconics:community-material-typeface:f1c5afee5f0f10d66beb3ed0df977246a02a9c46de4e05d7c0264bcde53b6b7f', + 'com.nispok:snackbar:80bebc8e5d8b3d728cd5f2336e2d0c1cc2a6b7dc4b55d36acd6b75a78265590a', +// 'OpenKeychain.extern:openpgp-api-lib:f05a9215cdad3a6597e4c5ece6fcec92b178d218195a3e88d2c0937c48dd9580', +// 'OpenKeychain.extern:openkeychain-api-lib:50f6ebb5452d3fdc7be137ccf857a0ff44d55539fcb7b91baef495766ed7f429', +// 'com.madgag.spongycastle:core:df8fcc028a95ac5ffab3b78c9163f5cfa672e41cd50128ca55d458b6cfbacf4b', +// 'com.madgag.spongycastle:pg:160b345b10a2c92dc731453eec87037377f66a8e14a0648d404d7b193c4e380d', +// 'com.madgag.spongycastle:pkix:0b4f3301ea12dd9f25d71770e6ea9f75e0611bf53062543e47be5bc15340a7e4', +// 'com.madgag.spongycastle:prov:7325942e0b39f5fb35d6380818eed4b826e7dfc7570ad35b696d778049d8c36a', +// 'OpenKeychain.extern:minidns:77b1786d29469e3b21f9404827cab811edc857cd68bc732cd57f11307c332eae', +// 'OpenKeychain.extern.KeybaseLib:Lib:c91cda4a75692d8664644cd17d8ac962ce5bc0e266ea26673a639805f1eccbdf', +// 'OpenKeychain.extern:safeslinger-exchange:d222721bb35408daaab9f46449364b2657112705ee571d7532f81cbeb9c4a73f', +// 'OpenKeychain.extern.snackbar:lib:52357426e5275412e2063bdf6f0e6b957a3ea74da45e0aef35d22d9afc542e23', + 'com.android.support:support-annotations:7bc07519aa613b186001160403bcfd68260fa82c61cc7e83adeedc9b862b94ae', + ] +} + +android { + compileSdkVersion rootProject.ext.compileSdkVersion + buildToolsVersion rootProject.ext.buildToolsVersion + + defaultConfig { + minSdkVersion 15 + targetSdkVersion 22 + versionCode 32300 + versionName "3.2.3" + applicationId "org.sufficientlysecure.keychain" + // the androidjunitrunner is broken regarding coverage, see here: + // https://code.google.com/p/android/issues/detail?id=170607 + testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" + // this workaround runner fixes the coverage problem, BUT doesn't work + // with android studio single test execution. use it to generate coverage + // data, but keep the other one otherwis + // testInstrumentationRunner "org.sufficientlysecure.keychain.JacocoWorkaroundJUnitRunner" + } + + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_7 + targetCompatibility JavaVersion.VERSION_1_7 + } + + buildTypes { + release { + minifyEnabled true + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + + // Reference them in the java files with e.g. BuildConfig.ACCOUNT_TYPE. + buildConfigField "String", "ACCOUNT_TYPE", "\"org.sufficientlysecure.keychain.account\"" + + // Reference them in .xml files. + resValue "string", "account_type", "org.sufficientlysecure.keychain.account" + } + + debug { + applicationIdSuffix ".debug" + + // Reference them in the java files with e.g. BuildConfig.ACCOUNT_TYPE. + buildConfigField "String", "ACCOUNT_TYPE", "\"org.sufficientlysecure.keychain.debug.account\"" + + // Reference them in .xml files. + resValue "string", "account_type", "org.sufficientlysecure.keychain.debug.account" + + // Enable code coverage (Jacoco) + testCoverageEnabled true + } + } + + /* + * To sign release build, create file gradle.properties in ~/.gradle/ with this content: + * + * signingStoreLocation=/home/key.store + * signingStorePassword=xxx + * signingKeyAlias=alias + * signingKeyPassword=xxx + */ + if (project.hasProperty('signingStoreLocation') && + project.hasProperty('signingStorePassword') && + project.hasProperty('signingKeyAlias') && + project.hasProperty('signingKeyPassword')) { + println "Found sign properties in gradle.properties! Signing build…" + + signingConfigs { + release { + storeFile file(signingStoreLocation) + storePassword signingStorePassword + keyAlias signingKeyAlias + keyPassword signingKeyPassword + } + } + + buildTypes.release.signingConfig = signingConfigs.release + } else { + buildTypes.release.signingConfig = null + } + + // NOTE: Lint is disabled because it slows down builds, + // to enable it comment out the code at the bottom of this build.gradle + lintOptions { + // Do not abort build if lint finds errors + abortOnError false + + checkAllWarnings true + htmlReport true + htmlOutput file('lint-report.html') + } + + // Disable preDexing, causes com.android.dx.cf.iface.ParseException: bad class file magic (cafebabe) or version (0034.0000) on some systems + dexOptions { + preDexLibraries = false + } + + packagingOptions { + exclude 'LICENSE.txt' + } +} + +// apply plugin: 'spoon' + +task jacocoTestReport(type:JacocoReport) { + group = "Reporting" + description = "Generate Jacoco coverage reports" + + classDirectories = fileTree( + dir: "${buildDir}/intermediates/classes/debug", + excludes: ['**/R.class', + '**/R$*.class', + '**/*$ViewInjector*.*', + '**/BuildConfig.*', + '**/Manifest*.*'] + ) + + sourceDirectories = files("${buildDir.parent}/src/main/java") + additionalSourceDirs = files([ + "${buildDir}/generated/source/buildConfig/debug", + "${buildDir}/generated/source/r/debug" + ]) + executionData = files([ + "${buildDir}/jacoco/testDebug.exec", + "${buildDir}/outputs/code-coverage/connected/coverage.ec" + ]) + + reports { + xml.enabled = true + html.enabled = true + } +} + +// Fix for: No report file available: [/home/travis/build/open-keychain/open-keychain/OpenKeychain/build/reports/cobertura/coverage.xml, /home/travis/build/open-keychain/open-keychain/OpenKeychain/build/reports/jacoco/test/jacocoTestReport.xml] +coveralls { + jacocoReportPath 'build/reports/jacoco/jacocoTestReport/jacocoTestReport.xml' +} + +// NOTE: This disables Lint! +tasks.whenTaskAdded { task -> + if (task.name.contains('lint')) { + task.enabled = false + } +} + diff --git a/tests/source-files/open-keychain/open-keychain/build.gradle b/tests/source-files/open-keychain/open-keychain/build.gradle new file mode 100644 index 00000000..9543e384 --- /dev/null +++ b/tests/source-files/open-keychain/open-keychain/build.gradle @@ -0,0 +1,48 @@ +buildscript { + repositories { + jcenter() + } + + dependencies { + // NOTE: Always use fixed version codes not dynamic ones, e.g. 0.7.3 instead of 0.7.+, see README for more information + classpath 'com.android.tools.build:gradle:1.2.3' + classpath files('gradle-witness.jar') + // bintray dependency to satisfy dependency of openpgp-api lib + classpath 'com.novoda:bintray-release:0.2.7' + + classpath 'org.kt3k.gradle.plugin:coveralls-gradle-plugin:2.0.1' + // classpath 'com.stanfy.spoon:spoon-gradle-plugin:1.0.2' + } +} + +allprojects { + repositories { + jcenter() + } +} + +task wrapper(type: Wrapper) { + gradleVersion = '2.4' +} + +subprojects { + tasks.withType(Test) { + maxParallelForks = 1 + } +} + +// Ignore tests for external spongycastle +project(':extern:spongycastle') { + subprojects { + // Need to re-apply the plugin here otherwise the test property below can't be set. + apply plugin: 'java' + test.enabled = false + } +} + +// SDK Version and Build Tools used by all subprojects +// See http://tools.android.com/tech-docs/new-build-system/tips#TOC-Controlling-Android-properties-of-all-your-modules-from-the-main-project. +ext { + compileSdkVersion = 22 + buildToolsVersion = '22.0.1' +} diff --git a/tests/source-files/org.mozilla.rocket/app/build.gradle b/tests/source-files/org.mozilla.rocket/app/build.gradle new file mode 100644 index 00000000..f05d2899 --- /dev/null +++ b/tests/source-files/org.mozilla.rocket/app/build.gradle @@ -0,0 +1,414 @@ +apply plugin: 'com.android.application' +apply plugin: 'kotlin-android' +apply plugin: 'kotlin-android-extensions' +apply plugin: 'kotlin-kapt' +apply plugin: 'com.google.android.gms.oss-licenses-plugin' +apply from: '../buildSrc/pmd.gradle' +apply from: '../buildSrc/checkstyle.gradle' +apply from: '../buildSrc/findbugs.gradle' +apply from: 'buildscripts/l10n.gradle' + + +android { + compileSdkVersion Versions.compile_sdk + buildToolsVersion Versions.build_tools + defaultConfig { + applicationId "org.mozilla" + minSdkVersion Versions.min_sdk + targetSdkVersion Versions.target_sdk + versionCode Versions.version_code + versionName Versions.version_name + if (SystemEnv.auto_screenshot == "1") { + testInstrumentationRunner "org.mozilla.focus.test.runner.ScreenshotTestRunner" + testInstrumentationRunnerArguments clearPackageData: 'true' + } else { + // general UI test, using notAnnotation to filter out auto screenshot classes + testInstrumentationRunner "org.mozilla.focus.test.runner.CustomTestRunner" + testInstrumentationRunnerArguments clearPackageData: 'true', notAnnotation: 'org.mozilla.focus.annotation.ScreengrabOnly,android.support.test.filters.FlakyTest' + } + testInstrumentationRunnerArgument 'disableAnalytics', 'true' + + multiDexEnabled true + + vectorDrawables { + useSupportLibrary false + generatedDensities = [] + } + + def bitrise_build_number = System.getenv("BITRISE_BUILD_NUMBER") + if (bitrise_build_number?.trim()) { + versionCode bitrise_build_number.toInteger() + versionNameSuffix "(" + bitrise_build_number + ")" + } + + // used by Room, to test migrations + javaCompileOptions { + annotationProcessorOptions { + arguments = ["room.schemaLocation": "$projectDir/schemas".toString()] + } + } + + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_8 + targetCompatibility JavaVersion.VERSION_1_8 + } + } + + dexOptions { + preDexLibraries true + } + + bundle { + language { + enableSplit = false + } + density { + enableSplit = false + } + abi { + enableSplit = true + } + } + + // We have a three dimensional build configuration: + // BUILD TYPE (debug, release) + + buildTypes { + release { + minifyEnabled true + shrinkResources true + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + matchingFallbacks = ["firebase"] + } + debug { + def userName = System.getenv("USER") + applicationIdSuffix ".debug." + userName + versionNameSuffix applicationIdSuffix + matchingFallbacks = ["firebase_no_op"] + } + // Use a separate buildType for coverage: testCoverageEnabled produces slower code (4-5x slower + // in places that I've benchmarked), and more importantly seems to break debugging with Android Studio + // for some developers (i.e. variables can't be inspected or seen). + coverage { + initWith debug + applicationIdSuffix ".coverage" + testCoverageEnabled true + matchingFallbacks = ["debug", "firebase_no_op"] + } + // special build type to develop Firebase related stuff + firebase { + initWith debug + applicationIdSuffix ".debug.firebase" + + versionNameSuffix applicationIdSuffix + matchingFallbacks = ["debug", "firebase"] + } + } + + testBuildType "firebase" + + testOptions { + animationsDisabled = true + unitTests.returnDefaultValues = true + unitTests.includeAndroidResources = true + execution 'ANDROID_TEST_ORCHESTRATOR' + } + + // used by Room, to test migrations + sourceSets { + androidTest.assets.srcDirs += files("$projectDir/schemas".toString()) + } + + flavorDimensions "product", "engine" + + productFlavors { + focus { + resConfigs Localization.KEPT_LOCALE + dimension "product" + + applicationIdSuffix ".rocket" + } + + preview { + dimension "product" + applicationId "gro.allizom.zelda.beta" + applicationIdSuffix "" + versionNameSuffix ".nightly" + } + + // We can build with two engines: webkit or gecko + webkit { + dimension "engine" + } + + } + + variantFilter { variant -> + def flavors = variant.flavors*.name + // We only need a nightly release for now + if (flavors.contains("preview") && variant.buildType.name != "release") { + setIgnore(true) + } + } + + sourceSets { + test { + resources { + // Make the default asset folder available as test resource folder. Robolectric seems + // to fail to read assets for our setup. With this we can just read the files directly + // and do not need to rely on Robolectric. + srcDir "${projectDir}/src/main/assets/" + } + } + + preview { + res.srcDir 'src/preview/res' + } + + // used by Room, to test migrations + androidTest.assets.srcDirs += files("$projectDir/schemas".toString()) + } +} + +repositories { + flatDir { + dirs 'libs' + } + mavenCentral() +} + +dependencies { + implementation project(':telemetry-annotation') + kapt project(':telemetry-compiler') + + implementation project(':third_party:subsampling-scale-image-view') + implementation project(':third_party:glide:annotation') + implementation project(':third_party:glide:library') + kapt "com.github.bumptech.glide:compiler:${Versions.glide}" + + implementation project(':firebase') + implementation project(':feature-tabs') + implementation project(':HttpRequest') + implementation project(':httptask') + implementation project(':urlutils') + implementation project(':fileutils') + implementation project(':icon') + implementation project(':logger') + implementation project(':threadutils') + implementation project(':cachedrequestloader') + implementation project(':permissionhandler') + + implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:${Versions.kotlin}" + + // We didn't use CustomTabs so far. This is a build hack to force Android-Components to use + // same version of support library as we are. Android-Components depends on CustomTabs which + // version will be override by this. + // We can get rid of this once Android-Components' issue #404 has been resolve. + implementation "com.android.support:customtabs:${Versions.support}" + implementation "com.android.support:support-v4:${Versions.support}" + implementation "com.android.support:appcompat-v7:${Versions.support}" + implementation "com.android.support:design:${Versions.support}" + implementation "com.android.support:cardview-v7:${Versions.support}" + implementation "com.android.support:recyclerview-v7:${Versions.support}" + implementation "com.android.support.constraint:constraint-layout:${Versions.constraint}" + implementation "android.arch.work:work-runtime:${Versions.arch_work}" + + + // Architecture components + implementation "android.arch.lifecycle:extensions:${Versions.lifecycle}" + implementation "android.arch.lifecycle:common-java8:${Versions.lifecycle}" + implementation "android.arch.persistence.room:runtime:${Versions.room}" + implementation "android.arch.navigation:navigation-fragment:${Versions.navigation}" + kapt "android.arch.persistence.room:compiler:${Versions.room}" + + implementation("com.google.code.findbugs:annotations:${Versions.findbugs}", { + // We really only need the SuppressFBWarnings annotation, everything else can be ignored. + // Without this we get weird failures due to dependencies. + transitive = false + }) + + implementation "org.mozilla.components:browser-session:${Versions.android_components}" + implementation "org.mozilla.components:service-telemetry:${Versions.android_components}" + implementation "org.mozilla.components:browser-domains:${Versions.android_components}" + implementation "org.mozilla.components:ui-autocomplete:${Versions.android_components}" + + implementation "com.adjust.sdk:adjust-android:${Versions.adjust}" + implementation "com.google.android.gms:play-services-analytics:${Versions.firebase}" // Required by Adjust + // Required by Adjust + + implementation "com.airbnb.android:lottie:${Versions.lottie}" + + testImplementation "junit:junit:${Versions.junit}" + testImplementation "org.robolectric:robolectric:${Versions.robolectric}" + testImplementation "org.mockito:mockito-core:${Versions.mockito}" + + androidTestImplementation("com.android.support.test.espresso:espresso-core:${Versions.espresso}", { + exclude group: 'com.android.support', module: 'support-annotations' + }) + androidTestImplementation "com.android.support.test:runner:${Versions.test_runner}" + androidTestImplementation "com.android.support.test.espresso:espresso-idling-resource:${Versions.espresso}" + androidTestImplementation "com.android.support:support-annotations:${Versions.support}" + androidTestImplementation "com.android.support.test.uiautomator:uiautomator-v18:${Versions.uiautomator}" + androidTestImplementation "com.squareup.okhttp3:mockwebserver:${Versions.mockwebserver}" + androidTestImplementation "android.arch.persistence.room:testing:${Versions.room}" + androidTestImplementation "android.arch.core:core-testing:${Versions.arch_core}" + androidTestImplementation("com.android.support.test.espresso:espresso-contrib:${Versions.espresso}", { + exclude group: 'com.android.support', module: 'appcompat' + exclude group: 'com.android.support', module: 'support-v4' + exclude module: 'recyclerview-v7' + }) + androidTestImplementation "com.android.support.test.espresso:espresso-web:${Versions.espresso}" + androidTestImplementation "com.android.support.test.espresso:espresso-intents:${Versions.espresso}" + androidTestImplementation "tools.fastlane:screengrab:${Versions.fastlane_screengrab}" + androidTestImplementation "com.jraska:falcon:${Versions.jraska_falcon}" + androidTestUtil "com.android.support.test:orchestrator:${Versions.test_runner}" + + // LeakCanary + debugImplementation "com.squareup.leakcanary:leakcanary-android:${Versions.leakcanary}" + coverageImplementation "com.squareup.leakcanary:leakcanary-android-no-op:${Versions.leakcanary}" + releaseImplementation "com.squareup.leakcanary:leakcanary-android-no-op:${Versions.leakcanary}" + firebaseImplementation "com.squareup.leakcanary:leakcanary-android:${Versions.leakcanary}" + + implementation project(':bhaskar') + implementation project(':newspoint') + implementation project(':partnerrepository') +} + +// ------------------------------------------------------------------------------------------------- +// LeakCanary - Ensure the no-op dependency is always used in JVM tests. +// ------------------------------------------------------------------------------------------------- + +configurations.all { config -> + if (config.name.contains('UnitTest') || config.name.contains('AndroidTest')) { + config.resolutionStrategy.eachDependency { details -> + if (details.requested.group == 'com.squareup.leakcanary' && details.requested.name == 'leakcanary-android') { + details.useTarget(group: details.requested.group, name: 'leakcanary-android-no-op', version: details.requested.version) + } + } + } +} + +// ------------------------------------------------------------------------------------------------- +// Generate blocklists +// ------------------------------------------------------------------------------------------------- + +def blockListOutputDir = 'src/webkit/res/raw' + +task buildBlocklists(type: Copy) { + from('../shavar-prod-lists') { + include '*.json' + } + into blockListOutputDir + + // Android can't handle dashes in the filename, so we need to rename: + rename 'disconnect-blacklist.json', 'blocklist.json' + rename 'disconnect-entitylist.json', 'entitylist.json' + // google_mapping.json already has an expected name +} + +clean.doLast { + file(blockListOutputDir).deleteDir() +} + +tasks.whenTaskAdded { task -> + def name = task.name + if (name.contains("generate") && name.contains("Webkit") && name.contains("Resources")) { + task.dependsOn buildBlocklists + } +} + +// ------------------------------------------------------------------------------------------------- +// Adjust: Read token from environment variable (Only release builds) +// ------------------------------------------------------------------------------------------------- + +android.applicationVariants.all { variant -> + def variantName = variant.getName() + + print(variantName + ": ") + + // release and nightly will have Adjust. just nightly will use sandbox environment. + if (variantName.contains("Release")) { + def token = System.getenv("ADJUST_TOKEN_FOCUS") ?: null + + if (token != null) { + buildConfigField 'String', 'ADJUST_TOKEN', '"' + token + '"' + if (variantName.contains("preview")) { + buildConfigField 'String', 'ADJUST_ENVIRONMENT', 'com.adjust.sdk.AdjustConfig.ENVIRONMENT_SANDBOX' + } else if (variantName.contains("focus")) { + buildConfigField 'String', 'ADJUST_ENVIRONMENT', 'com.adjust.sdk.AdjustConfig.ENVIRONMENT_PRODUCTION' + } else { + buildConfigField 'String', 'ADJUST_ENVIRONMENT', 'null' + } + println "Added adjust token set from environment variable" + + def tracker = System.getenv("ADJUST_SIDELOAD_TRACKER") ?: null + if (tracker != null) { + buildConfigField 'String', 'ADJUST_DEFAULT_TRACKER', '"' + tracker + '"' + } else { + buildConfigField 'String', 'ADJUST_DEFAULT_TRACKER', 'null' + logger.error(variant.getName() + ": Not setting adjust default tracker (environment variable not set)") + } + } else { + buildConfigField 'String', 'ADJUST_TOKEN', 'null' + buildConfigField 'String', 'ADJUST_ENVIRONMENT', 'null' + buildConfigField 'String', 'ADJUST_DEFAULT_TRACKER', 'null' + println("Not setting adjust token (environment variable not set)") + } + } else { + buildConfigField 'String', 'ADJUST_TOKEN', 'null' + buildConfigField 'String', 'ADJUST_ENVIRONMENT', 'null' + buildConfigField 'String', 'ADJUST_DEFAULT_TRACKER', 'null' + + println("Not setting adjust token (Not a focus release build)") + } + if (variant.buildType.name == "release" || variant.buildType.name == "firebase") { + variant.assemble.doFirst { + if (SystemEnv.google_app_id == null || SystemEnv.default_web_client_id == null || + SystemEnv.firebase_database_url == null || SystemEnv.gcm_defaultSenderId == null || + SystemEnv.google_api_key == null || SystemEnv.google_crash_reporting_api_key == null || + SystemEnv.project_id == null) { + logger.warn("If you want to enable Firebase, please follow the steps:") + logger.warn("1. Download google-services.json and put it in the folder where you run below command.") + logger.warn("2. Run 'python./tools/firebase/firebase_setup.py' and follow the steps.\n") + } + } + } +} + +tasks.whenTaskAdded { task -> + if (name.contains("compile")) { + task.dependsOn generatePreviewLocaleList + task.dependsOn generateFocusLocaleList + } +} + +tasks.withType(org.jetbrains.kotlin.gradle.tasks.KotlinCompile).all { + kotlinOptions { + kotlinOptions.allWarningsAsErrors = true + } +} + + + +afterEvaluate { + check.dependsOn 'findbugs', 'pmd', 'checkstyle', 'checkTelemetryDocDirty', 'ktlint' +} + +task checkTelemetryDocDirty() { + + doLast { + Process p = Runtime.getRuntime().exec("git diff ./docs/events.md"); + p.waitFor(); + + BufferedReader reader = new BufferedReader(new InputStreamReader(p.getInputStream())); + + StringBuilder sb = new StringBuilder(); + String line = ""; + while ((line = reader.readLine()) != null) { + sb.append(line + "\n"); + } + if (sb.length() > 0) { + throw new GradleException("events.md is drity, please commit the change first.\n" + sb.toString()) + } + } +} diff --git a/tests/source-files/org.noise_planet.noisecapture/app/build.gradle b/tests/source-files/org.noise_planet.noisecapture/app/build.gradle new file mode 100644 index 00000000..158712bd --- /dev/null +++ b/tests/source-files/org.noise_planet.noisecapture/app/build.gradle @@ -0,0 +1,116 @@ +apply plugin: 'com.android.application' + +def getCheckedOutGitCommitHash() { + 'git rev-parse --verify --short HEAD'.execute().text.trim() +} + +def getAvailableLocales() { + new File("app/src/main/res").list(new FilenameFilter() { + @Override + boolean accept(File dir, String name) { + return name.startsWith("values-") && new File(new File(dir,name), "strings.xml").exists(); + } + }).collect() { fold -> fold.substring("values-".length())}.join(",") +} + +android { + compileSdkVersion 30 + buildToolsVersion '30.0.0' + def signingFilePath = System.getProperty("user.home") + "/.idea/signing.gradle" + if (new File(signingFilePath).exists()) { + apply from: signingFilePath + } + defaultConfig { + applicationId "org.noise_planet.noisecapture" + minSdkVersion 15 + targetSdkVersion 30 + versionCode 55 + versionName "1.2.19" + // Store build date in apk + buildConfigField "long", "TIMESTAMP", System.currentTimeMillis() + "L" + buildConfigField "String", "GITHASH", "\"${getCheckedOutGitCommitHash().toString()}\"" + buildConfigField "String", "SUPPORTEDLOCALES", "\"${getAvailableLocales()}\"" + // Enabling multidex support. + multiDexEnabled false + + vectorDrawables.useSupportLibrary = true + + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + } + + dexOptions { + javaMaxHeapSize "4g" + } + lintOptions { + abortOnError false + disable 'MissingTranslation' + } + buildTypes { + release { + minifyEnabled true + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + if (new File(signingFilePath).exists()) { + signingConfig signingConfigs.release + } + } + debug { + debuggable true + if (new File(signingFilePath).exists()) { + signingConfig signingConfigs.debug + } + } + } + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_7 + targetCompatibility JavaVersion.VERSION_1_7 + } + testOptions { + unitTests { + includeAndroidResources = true + } + } +} + +// For using the MPAndroidChart package +// https://github.com/PhilJay/MPAndroidChart +// Apache License, Version 2.0 +task listrepos { + doLast { + println "Repositories:" + project.repositories.each { println "Name: " + it.name + "; url: " + it.url } + } +} + +repositories { + mavenLocal() +} + +dependencies { + implementation 'com.github.PhilJay:MPAndroidChart:v2.2.5' + implementation 'org.slf4j:slf4j-simple:1.7.12' + implementation name: 'org/noise-planet/jwarble/0.2.3/jwarble-0.2.3' + implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.10' + implementation group: 'com.fasterxml.jackson.core', name: 'jackson-core', version: '2.9.10' + + // multithreaded FFT for realtime visualisation of spectrum only + implementation 'com.github.wendykierp:JTransforms:3.1' + implementation 'org.apache.commons:commons-math3:3.5' + implementation 'androidx.appcompat:appcompat:1.0.0' + implementation 'com.google.android.material:material:1.0.0' + implementation 'androidx.vectordrawable:vectordrawable:1.0.0' + implementation 'com.nhaarman.supertooltips:library:3.0.0' + //compile 'com.android.support:multidex:1.0.0' + // Testing-only dependencies + // Force usage of support annotations in the test app, since it is internally used by the runner module. + implementation 'androidx.constraintlayout:constraintlayout:1.1.3' + androidTestImplementation 'androidx.annotation:annotation:1.0.0' + androidTestImplementation 'androidx.test.ext:junit:1.1.1' + androidTestImplementation 'androidx.test:rules:1.1.1' + androidTestImplementation 'androidx.test.espresso:espresso-core:3.1.0' + // unit test + testImplementation group: 'org.robolectric', name: 'robolectric', version: '4.3.1' + testImplementation 'junit:junit:4.12' + testImplementation group: 'com.googlecode.soundlibs', name: 'jorbis', version: '0.0.17.4' + implementation project(':sosfilter') +} + diff --git a/tests/source-files/org.noise_planet.noisecapture/settings.gradle b/tests/source-files/org.noise_planet.noisecapture/settings.gradle new file mode 100644 index 00000000..3af2006a --- /dev/null +++ b/tests/source-files/org.noise_planet.noisecapture/settings.gradle @@ -0,0 +1 @@ +include 'sosfilter', 'app' diff --git a/tests/source-files/org.noise_planet.noisecapture/sosfilter/build.gradle b/tests/source-files/org.noise_planet.noisecapture/sosfilter/build.gradle new file mode 100644 index 00000000..ab7b0caa --- /dev/null +++ b/tests/source-files/org.noise_planet.noisecapture/sosfilter/build.gradle @@ -0,0 +1,31 @@ +apply plugin: 'java' +apply plugin: 'idea' +apply plugin: 'java' + +group = 'org.orbisgis' +version = '1.0-SNAPSHOT' + + +description = """Signal processing, A-weighting and third-octave bands filtering""" + +compileJava { + sourceCompatibility = 1.7 + targetCompatibility = 1.7 +} +repositories { + mavenCentral() +} + +dependencies { + compile group: 'org.slf4j', name: 'slf4j-api', version:'1.7.12' + compile group: 'com.github.wendykierp', name: 'JTransforms', version:'3.1' + testCompile group: 'org.slf4j', name: 'slf4j-simple', version:'1.7.12' + testCompile group: 'junit', name: 'junit', version:'4.12' +} + +// Copy resource for unit tests +task copyTestResources(type: Copy) { + from "${projectDir}/src/test/resources" + into "${buildDir}/classes/test" +} +processTestResources.dependsOn copyTestResources diff --git a/tests/source-files/org.piepmeyer.gauguin/build.gradle.kts b/tests/source-files/org.piepmeyer.gauguin/build.gradle.kts new file mode 100644 index 00000000..cb7d1d02 --- /dev/null +++ b/tests/source-files/org.piepmeyer.gauguin/build.gradle.kts @@ -0,0 +1,47 @@ +import java.net.URI + +buildscript { + dependencies { + classpath("com.android.tools.build:gradle:8.6.0") + } +} + +plugins { + alias(libs.plugins.android.application) apply false + alias(libs.plugins.android.library) apply false + alias(libs.plugins.kotlin.android) apply false + alias(libs.plugins.kotlin.jvm) apply false + alias(libs.plugins.sonarqube) + alias(libs.plugins.ktlint) + alias(libs.plugins.ksp) + alias(libs.plugins.roborazzi) apply false + alias(libs.plugins.gms) apply false +} + +sonarqube { + properties { + property("sonar.projectKey", "org.piepmeyer.gauguin") + property("sonar.organization", "meikpiep") + property("sonar.verbose", "true") + property("sonar.host.url", "https://sonarcloud.io") + } +} + +tasks.sonar { + onlyIf("There is no property 'buildserver'") { + project.hasProperty("buildserver") + } + dependsOn(":gauguin-app:lint") +} + +allprojects { + repositories { + mavenCentral() + google() + maven { url = URI("https://jitpack.io") } + } +} + +subprojects { + apply(plugin = "org.jlleitschuh.gradle.ktlint") +} diff --git a/tests/source-files/org.piepmeyer.gauguin/libs.versions.toml b/tests/source-files/org.piepmeyer.gauguin/libs.versions.toml new file mode 100644 index 00000000..7159985c --- /dev/null +++ b/tests/source-files/org.piepmeyer.gauguin/libs.versions.toml @@ -0,0 +1,91 @@ +[versions] + +kotlin = "1.9.23" +koin = "3.5.6" +koin-annotations="1.3.1" +kotest = "5.9.1" +kotest-extensions = "1.3.0" +kotlin-coroutines = "1.8.1" +android-gradle-plugin = "8.6.0" +androidUiTestingUtils = "2.3.3" +roborazzi = "1.26.0" + +[libraries] + +kotlin-coroutines-core = { group = "org.jetbrains.kotlinx", name = "kotlinx-coroutines-core", version.ref = "kotlin-coroutines" } +kotlin-coroutines-debug = { group = "org.jetbrains.kotlinx", name = "kotlinx-coroutines-debug", version.ref = "kotlin-coroutines" } +kotlin-serialization = { group = "org.jetbrains.kotlinx", name = "kotlinx-serialization-json", version = "1.6.3" } + +logging-kotlin = { group = "io.github.oshai", name = "kotlin-logging-jvm", version = "6.0.9" } +logging-slf = { group = "org.slf4j", name = "slf4j-api", version = "2.0.13" } +logging-logback-android = { group = "com.github.tony19", name = "logback-android", version = "3.0.0" } +logging-logback-kotlin = { group = "ch.qos.logback", name = "logback-classic", version = "1.5.6" } + +android-material = { group = "com.google.android.material", name = "material", version = "1.12.0" } + +androidx-annotation = { group = "androidx.annotation", name = "annotation", version = "1.8.2" } +androidx-ktx = { group = "androidx.core", name = "core-ktx", version = "1.13.1" } +androidx-constraintlayout = { group = "androidx.constraintlayout", name = "constraintlayout", version = "2.1.4" } +androidx-drawerlayout = { group = "androidx.drawerlayout", name = "drawerlayout", version = "1.2.0" } +androidx-fragment = { group = "androidx.fragment", name = "fragment-ktx", version = "1.8.3" } +androidx-gridlayout = { group = "androidx.gridlayout", name = "gridlayout", version = "1.0.0" } +androidx-lifecycle-runtime = { group = "androidx.lifecycle", name = "lifecycle-runtime-ktx", version = "2.8.5" } +androidx-lifecycle-viewmodel = { group = "androidx.lifecycle", name = "lifecycle-viewmodel-ktx", version = "2.8.5" } +androidx-preference = { group = "androidx.preference", name = "preference-ktx", version = "1.2.1" } +androidx-recyclerview = { group = "androidx.recyclerview", name = "recyclerview", version = "1.3.2" } +androidx-transition = { group = "androidx.transition", name = "transition", version = "1.5.1" } +androidx-window = { group = "androidx.window", name = "window", version = "1.3.0" } +androidx-window-core = { group = "androidx.window", name = "window-core", version = "1.3.0" } + +androidx-test-junit-ktx = { group = "androidx.test.ext", name = "junit-ktx", version = "1.2.1" } +androidx-test-rules = { group = "androidx.test", name = "rules", version = "1.6.1" } +androidx-test-runner = { group = "androidx.test", name = "runner", version = "1.6.2" } + +koin-core = { group = "io.insert-koin", name = "koin-core", version.ref = "koin" } +koin-annotations = { group = "io.insert-koin", name = "koin-annotations", version.ref = "koin-annotations" } +koin-ksp-compiler = { group = "io.insert-koin", name = "koin-ksp-compiler", version.ref = "koin-annotations" } +koin-test = { group = "io.insert-koin", name = "koin-test", version.ref = "koin" } +koin-android = { group = "io.insert-koin", name = "koin-android", version.ref = "koin" } + +kotest-runner = { group = "io.kotest", name = "kotest-runner-junit5", version.ref = "kotest" } +kotest-assertions = { group = "io.kotest", name = "kotest-assertions-core", version.ref = "kotest" } +kotest-parametrizedtests = { group = "io.kotest", name = "kotest-framework-datatest", version.ref = "kotest" } +kotest-koin = { group = "io.kotest.extensions", name = "kotest-extensions-koin", version.ref = "kotest-extensions" } + +test-mockk = { group = "io.mockk", name = "mockk", version = "1.13.11" } + +androiduitestingutils-utils = { group = "com.github.sergio-sastre.AndroidUiTestingUtils", name = "utils", version.ref = "androidUiTestingUtils" } +androiduitestingutils-robolectric = { group = "com.github.sergio-sastre.AndroidUiTestingUtils", name = "robolectric", version.ref = "androidUiTestingUtils" } +roboelectric = { group = "org.robolectric", name = "robolectric", version = "4.13" } +roborazzi = { group = "io.github.takahirom.roborazzi", name = "roborazzi", version.ref = "roborazzi" } +roborazzi-junit = { group = "io.github.takahirom.roborazzi", name = "roborazzi-junit-rule", version.ref = "roborazzi" } +junit-vintage-engine = { group = "org.junit.vintage", name = "junit-vintage-engine", version = "5.10.3" } + +thirdparty-konfetti = { group = "nl.dionsegijn", name = "konfetti-xml", version = "2.0.4" } +#thirdparty-ferriswheel = { group = "ru.github.igla", name = "ferriswheel", version = "1.2" } +thirdparty-navigationdrawer = { group = "com.mikepenz", name = "materialdrawer", version = "9.0.2" } +thirdparty-balloon = { group = "com.github.skydoves", name = "balloon", version = "1.6.7" } +thirdparty-vico = { group = "com.patrykandpatrick.vico", name = "views", version = "2.0.0-alpha.25" } +thirdparty-androidplot = { group = "com.androidplot", name = "androidplot-core", version = "1.5.11" } +thirdparty-leakcanary = { group = "com.squareup.leakcanary", name = "leakcanary-android", version = "2.14" } + +[plugins] + +android-application = { id = "com.android.application", version.ref = "android-gradle-plugin" } +android-library = { id = "com.android.library", version.ref = "android-gradle-plugin" } +kotlin-android = { id = "org.jetbrains.kotlin.android", version.ref = "kotlin" } +kotlin-jvm = { id = "org.jetbrains.kotlin.jvm", version.ref = "kotlin" } +sonarqube = { id = "org.sonarqube", version = "5.0.0.4638" } +ktlint = { id = "org.jlleitschuh.gradle.ktlint", version = "12.1.1" } +ksp = { id = "com.google.devtools.ksp", version = "1.9.23-1.0.20" } +roborazzi = { id = "io.github.takahirom.roborazzi", version.ref = "roborazzi" } +gms = { id = "com.google.gms.google-services", version = "1" } + +[bundles] + +logging = ["logging-kotlin", "logging-slf"] +kotest = ["kotest-runner", "kotest-assertions", "kotest-parametrizedtests", "kotest-koin"] +koin = ["koin-core", "koin-annotations", "koin-ksp-compiler"] +androidx-test = ["androidx-test-junit-ktx", "androidx-test-rules", "androidx-test-runner"] +screenshotTests = ["androiduitestingutils-utils", "androiduitestingutils-robolectric", "roboelectric", "roborazzi", "roborazzi-junit", "junit-vintage-engine"] + diff --git a/tests/source-files/org.piepmeyer.gauguin/settings.gradle.kts b/tests/source-files/org.piepmeyer.gauguin/settings.gradle.kts new file mode 100644 index 00000000..46f4acda --- /dev/null +++ b/tests/source-files/org.piepmeyer.gauguin/settings.gradle.kts @@ -0,0 +1,24 @@ +pluginManagement { + repositories { + google() + mavenCentral() + gradlePluginPortal() + } +} + +dependencyResolutionManagement { + versionCatalogs { + create("libs") { + from(files("libs.versions.toml")) + } + } +} + +plugins { + id("org.gradle.toolchains.foojay-resolver-convention") version ("0.8.0") +} + +rootProject.name = "gauguin" + +include(":gauguin-app") +include(":gauguin-core") diff --git a/tests/source-files/org.tasks/app/build.gradle.kts b/tests/source-files/org.tasks/app/build.gradle.kts new file mode 100644 index 00000000..23b0524d --- /dev/null +++ b/tests/source-files/org.tasks/app/build.gradle.kts @@ -0,0 +1,225 @@ +import com.android.build.gradle.api.ApplicationVariant + +plugins { + id("com.android.application") + id("checkstyle") + id("io.fabric") + id("com.cookpad.android.licensetools") + kotlin("android") +} + +repositories { + jcenter() + google() + maven(url = "https://jitpack.io") +} + +android { + bundle { + language { + enableSplit = false + } + } + + dexOptions { + javaMaxHeapSize = "2g" + } + + lintOptions { + setLintConfig(file("lint.xml")) + textOutput("stdout") + textReport = true + } + + compileSdkVersion(Versions.targetSdk) + + defaultConfig { + testApplicationId = "org.tasks.test" + applicationId = "org.tasks" + versionCode = 651 + versionName = "7.6.1" + targetSdkVersion(Versions.targetSdk) + minSdkVersion(Versions.minSdk) + multiDexEnabled = true + testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner" + + javaCompileOptions { + annotationProcessorOptions { + arguments["room.schemaLocation"] = "$projectDir/schemas" + } + } + } + + signingConfigs { + create("release") { + val tasksKeyAlias: String? by project + val tasksStoreFile: String? by project + val tasksStorePassword: String? by project + val tasksKeyPassword: String? by project + + keyAlias = tasksKeyAlias + storeFile = file(tasksStoreFile?: "none") + storePassword = tasksStorePassword + keyPassword = tasksKeyPassword + } + } + + compileOptions { + sourceCompatibility = JavaVersion.VERSION_1_8 + targetCompatibility = JavaVersion.VERSION_1_8 + } + + @Suppress("LocalVariableName") + buildTypes { + getByName("debug") { + val tasks_mapbox_key_debug: String? by project + val tasks_google_key_debug: String? by project + + applicationIdSuffix = ".debug" + resValue("string", "mapbox_key", tasks_mapbox_key_debug ?: "") + resValue("string", "google_key", tasks_google_key_debug ?: "") + isTestCoverageEnabled = true + } + getByName("release") { + val tasks_mapbox_key: String? by project + val tasks_google_key: String? by project + + resValue("string", "mapbox_key", tasks_mapbox_key ?: "") + resValue("string", "google_key", tasks_google_key ?: "") + isMinifyEnabled = true + proguardFiles(getDefaultProguardFile("proguard-android.txt"), "proguard.pro") + signingConfig = signingConfigs.getByName("release") + } + } + + applicationVariants.all(object : Action { + override fun execute(variant: ApplicationVariant) { + variant.resValue("string", "app_package", variant.applicationId) + } + }) + + flavorDimensions("store") + + productFlavors { + create("generic") { + setDimension("store") + proguardFile("generic.pro") + } + create("googleplay") { + setDimension("store") + } + create("amazon") { + setDimension("store") + } + } + + viewBinding { + isEnabled = true + } + + dataBinding { + isEnabled = true + } + + packagingOptions { + exclude("META-INF/*.kotlin_module") + } +} + +configure { + configFile = project.file("google_checks.xml") + toolVersion = "8.16" +} + +configurations.all { + exclude(group = "com.google.guava", module = "guava-jdk5") + exclude(group = "org.apache.httpcomponents", module = "httpclient") + exclude(group = "com.google.http-client", module = "google-http-client-apache") + resolutionStrategy { + force("com.squareup.okhttp3:okhttp:" + Versions.okhttp) + } +} + +val googleplayImplementation by configurations +val amazonImplementation by configurations + +dependencies { + implementation("com.gitlab.bitfireAT:dav4jvm:1.0") + implementation("com.gitlab.bitfireAT:ical4android:be6d515db8") { + exclude(group = "org.threeten", module = "threetenbp") + } + implementation("com.gitlab.bitfireAT:cert4android:1488e39a66") + + annotationProcessor("com.google.dagger:dagger-compiler:${Versions.dagger}") + implementation("com.google.dagger:dagger:${Versions.dagger}") + + implementation("androidx.room:room-rxjava2:${Versions.room}") + annotationProcessor("androidx.room:room-compiler:${Versions.room}") + implementation("androidx.lifecycle:lifecycle-extensions:2.1.0") + implementation("io.reactivex.rxjava2:rxandroid:2.1.1") + implementation("androidx.paging:paging-runtime:2.1.1") + + annotationProcessor("com.jakewharton:butterknife-compiler:${Versions.butterknife}") + implementation("com.jakewharton:butterknife:${Versions.butterknife}") + + debugImplementation("com.facebook.flipper:flipper:${Versions.flipper}") + debugImplementation("com.facebook.flipper:flipper-network-plugin:${Versions.flipper}") + debugImplementation("com.facebook.soloader:soloader:0.8.0") + + debugImplementation("com.squareup.leakcanary:leakcanary-android:${Versions.leakcanary}") + + implementation("org.jetbrains.kotlin:kotlin-stdlib:${Versions.kotlin}") + implementation("io.github.luizgrp.sectionedrecyclerviewadapter:sectionedrecyclerviewadapter:2.0.0") + implementation("androidx.multidex:multidex:2.0.1") + implementation("me.saket:better-link-movement-method:2.2.0") + implementation("com.squareup.okhttp3:okhttp:${Versions.okhttp}") + implementation("com.google.code.gson:gson:2.8.5") + implementation("com.github.rey5137:material:1.2.5") + implementation("com.nononsenseapps:filepicker:4.2.1") + implementation("com.google.android.material:material:1.1.0-rc01") + implementation("androidx.annotation:annotation:1.1.0") + implementation("androidx.constraintlayout:constraintlayout:2.0.0-beta4") + implementation("androidx.swiperefreshlayout:swiperefreshlayout:1.0.0") + implementation("com.jakewharton.timber:timber:4.7.1") + implementation("com.jakewharton.threetenabp:threetenabp:1.2.1") + implementation("com.google.guava:guava:27.1-android") + implementation("com.jakewharton:process-phoenix:2.0.0") + implementation("com.google.android.apps.dashclock:dashclock-api:2.0.0") + implementation("com.twofortyfouram:android-plugin-api-for-locale:1.0.2") + implementation("com.rubiconproject.oss:jchronic:0.2.6") { + isTransitive = false + } + implementation("org.scala-saddle:google-rfc-2445:20110304") { + isTransitive = false + } + implementation("com.wdullaer:materialdatetimepicker:4.0.1") + implementation("me.leolin:ShortcutBadger:1.1.22@aar") + implementation("com.google.apis:google-api-services-tasks:v1-rev59-1.25.0") + implementation("com.google.apis:google-api-services-drive:v3-rev188-1.25.0") + implementation("com.google.api-client:google-api-client-android:1.30.7") + implementation("androidx.work:work-runtime:${Versions.work}") + implementation("com.mapbox.mapboxsdk:mapbox-android-sdk:7.3.0") + implementation("com.mapbox.mapboxsdk:mapbox-sdk-services:4.6.0") + + googleplayImplementation("com.crashlytics.sdk.android:crashlytics:${Versions.crashlytics}") + googleplayImplementation("com.google.firebase:firebase-analytics:${Versions.firebase}") + googleplayImplementation("com.google.android.gms:play-services-location:17.0.0") + googleplayImplementation("com.google.android.gms:play-services-maps:17.0.0") + googleplayImplementation("com.google.android.libraries.places:places:2.1.0") + googleplayImplementation("com.android.billingclient:billing:1.2.2") + + amazonImplementation(fileTree(mapOf("dir" to "libs", "include" to listOf("*.jar")))) + amazonImplementation("com.crashlytics.sdk.android:crashlytics:${Versions.crashlytics}") + amazonImplementation("com.google.firebase:firebase-core:${Versions.firebase}") + + androidTestAnnotationProcessor("com.google.dagger:dagger-compiler:${Versions.dagger}") + androidTestAnnotationProcessor("com.jakewharton:butterknife-compiler:${Versions.butterknife}") + androidTestImplementation("com.google.dexmaker:dexmaker-mockito:1.2") + androidTestImplementation("com.natpryce:make-it-easy:4.0.1") + androidTestImplementation("androidx.test:runner:1.2.0") + androidTestImplementation("androidx.test:rules:1.2.0") + androidTestImplementation("androidx.test.ext:junit:1.1.1") + androidTestImplementation("androidx.annotation:annotation:1.1.0") +} + +apply(mapOf("plugin" to "com.google.gms.google-services")) diff --git a/tests/source-files/org.tasks/build.gradle b/tests/source-files/org.tasks/build.gradle new file mode 100644 index 00000000..2edd2b70 --- /dev/null +++ b/tests/source-files/org.tasks/build.gradle @@ -0,0 +1,13 @@ + +buildscript { + repositories { + mavenCentral() + } + dependencies { + classpath 'org.owasp:dependency-check-gradle:1.3.2.1' + } +} +apply plugin: 'org.owasp.dependencycheck' +dependencyCheck { + format='JSON' +} diff --git a/tests/source-files/org.tasks/build.gradle.kts b/tests/source-files/org.tasks/build.gradle.kts new file mode 100644 index 00000000..f766cea2 --- /dev/null +++ b/tests/source-files/org.tasks/build.gradle.kts @@ -0,0 +1,26 @@ +buildscript { + repositories { + jcenter() + google() + maven("https://maven.fabric.io/public") + } + + dependencies { + classpath("com.android.tools.build:gradle:3.6.0-rc01") + classpath("com.google.gms:google-services:4.3.3") + // https://docs.fabric.io/android/changelog.html#fabric-gradle-plugin + classpath("io.fabric.tools:gradle:1.31.2") + classpath("com.github.ben-manes:gradle-versions-plugin:0.27.0") + classpath("com.cookpad.android.licensetools:license-tools-plugin:1.7.0") + classpath("org.jetbrains.kotlin:kotlin-gradle-plugin:${Versions.kotlin}") + } +} + +plugins { + id("com.github.ben-manes.versions") version "0.21.0" +} + +tasks.getByName("wrapper") { + gradleVersion = "5.6.4" + distributionType = Wrapper.DistributionType.ALL +} diff --git a/tests/source-files/org.tasks/buildSrc/build.gradle.kts b/tests/source-files/org.tasks/buildSrc/build.gradle.kts new file mode 100644 index 00000000..c39a297b --- /dev/null +++ b/tests/source-files/org.tasks/buildSrc/build.gradle.kts @@ -0,0 +1,7 @@ +plugins { + `kotlin-dsl` +} + +repositories { + jcenter() +} \ No newline at end of file diff --git a/tests/source-files/org.tasks/settings.gradle.kts b/tests/source-files/org.tasks/settings.gradle.kts new file mode 100644 index 00000000..15a801b1 --- /dev/null +++ b/tests/source-files/org.tasks/settings.gradle.kts @@ -0,0 +1 @@ +include(":app") diff --git a/tests/source-files/osmandapp/osmand/build.gradle b/tests/source-files/osmandapp/osmand/build.gradle new file mode 100644 index 00000000..854dddaf --- /dev/null +++ b/tests/source-files/osmandapp/osmand/build.gradle @@ -0,0 +1,321 @@ +apply plugin: 'com.android.application' + +// Global Parameters accepted +// APK_NUMBER_VERSION - version number of apk +// APK_VERSION - build number like #9999Z, for dev builds appended to app_version like 2.0.0 in no_translate.xml) +// flavor Z : M=-master, D=-design, B=-Blackberry, MD=-main-default, MQA=-main-qt-arm, MQDA=-main-qt-default-arm, S=-sherpafy +// TARGET_APP_NAME - app name +// APP_EDITION - date stamp of builds +// APP_FEATURES - features +play_market +gps_status -parking_plugin -blackberry -free_version -amazon + + +// 1. To be done Filter fonts +// +// +// +// +// +// +// Less important + +android { + compileSdkVersion 21 + buildToolsVersion "21.1.2" + + signingConfigs { + development { + storeFile file("../keystores/debug.keystore") + storePassword "android" + keyAlias "androiddebugkey" + keyPassword "android" + } + + publishing { + storeFile file("/var/lib/jenkins/osmand_key") + storePassword System.getenv("OSMAND_APK_PASSWORD") + keyAlias "osmand" + keyPassword System.getenv("OSMAND_APK_PASSWORD") + } + } + + defaultConfig { + minSdkVersion 9 + targetSdkVersion 21 + + versionCode System.getenv("APK_NUMBER_VERSION") ? System.getenv("APK_NUMBER_VERSION").toInteger() : versionCode + //versionName already assigned in code + //versionName System.getenv("APK_VERSION")? System.getenv("APK_VERSION").toString(): versionName + } + + lintOptions { + lintConfig file("lint.xml") + abortOnError false + warningsAsErrors false + } + + // This is from OsmAndCore_android.aar - for some reason it's not inherited + aaptOptions { + // Don't compress any embedded resources + noCompress "qz" + } + + dexOptions { + jumboMode = true + } + + sourceSets { + main { + manifest.srcFile "AndroidManifest.xml" + jni.srcDirs = [] + jniLibs.srcDirs = ["libs"] + aidl.srcDirs = ["src"] + java.srcDirs = ["src"] + resources.srcDirs = ["src"] + renderscript.srcDirs = ["src"] + res.srcDirs = ["res"] + assets.srcDirs = ["assets"] + } + free { + manifest.srcFile "AndroidManifest-free.xml" + } + + legacy { + jniLibs.srcDirs = ["libgnustl"] + } + } + + flavorDimensions "version", "coreversion", "abi" + productFlavors { + // ABI + armv7 { + flavorDimension "abi" + ndk { + abiFilter "armeabi-v7a" + } + } + armv5 { + flavorDimension "abi" + ndk { + abiFilter "armeabi" + } + } + x86 { + flavorDimension "abi" + ndk { + abiFilter "x86" + } + } + mips { + flavorDimension "abi" + ndk { + abiFilter "mips" + } + } + fat { + flavorDimension "abi" + } + + // Version + free { + flavorDimension "version" + applicationId "net.osmand" + } + full { + flavorDimension "version" + applicationId "net.osmand.plus" + } + + // CoreVersion + legacy { + flavorDimension "coreversion" + } + + qtcore { + flavorDimension "coreversion" + } + + qtcoredebug { + flavorDimension "coreversion" + } + } + + buildTypes { + debug { + // proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-project.txt' + // minifyEnabled true + // proguardFiles 'proguard-project.txt' + signingConfig signingConfigs.development + } + release { + // proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-project.txt' + // minifyEnabled true + //proguardFiles 'proguard-project.txt' + signingConfig signingConfigs.publishing + } + } +} + +def replaceNoTranslate(line) { + if (line.contains("\"app_name\"") && System.getenv("TARGET_APP_NAME")) { + return line.replaceAll(">[^<]*<", ">" + System.getenv("TARGET_APP_NAME") + "<") + } + if (line.contains("\"app_edition\"") && System.getenv("APP_EDITION")) { + return line.replaceAll(">[^<]*<", ">" + System.getenv("APP_EDITION") + "<") + } + if (line.contains("\"app_version\"") && System.getenv("APK_VERSION")) { + return line.replaceAll(">[^<]*<", ">" + System.getenv("APK_VERSION") + "<") + } + if (line.contains("\"app_version\"") && System.getenv("APK_VERSION_SUFFIX")) { + // appends build number to version number for dev builds + return line.replaceAll("[^<]*<", ">" + System.getenv("APP_FEATURES") + "<") + } + return line; +} + +task updateNoTranslate(type: Copy) { + from('.') { + include 'no_translate.xml' + filter { + line -> replaceNoTranslate(line); + } + } + into 'res/values/' +} + +task collectVoiceAssets(type: Sync) { + from "../../resources/voice" + into "assets/voice" + include "**/*.p" +} + +task collectHelpContentsAssets(type: Sync) { + from "../../help/help" + into "assets/help" + include "*.html" + include "images/**/*.png" + + from "assets/" + into "assets/help" + include "style.css" +} + +task collectRoutingResources(type: Sync) { + from "../../resources/routing" + into "src/net/osmand/router" + include "*.xml" +} + +task collectMiscResources(type: Copy) { + into "src/net/osmand/osm" + from("../../resources/obf_creation") { + include "rendering_types.xml" + } + from("../../resources/poi") { + include "poi_types.xml" + } +} + +task collectRenderingStylesResources(type: Sync) { + from "../../resources/rendering_styles" + into "src/net/osmand/render" + include "*.xml" +} + +task collectRegionsInfoResources(type: Copy) { + from "../../resources/countries-info" + into "src/net/osmand/map" + include "regions.ocbf" +} + +task copyStyleIcons(type: Copy) { + from "../../resources/rendering_styles/style-icons/" + into "res/" + include "**/*.png" +} + +task collectExternalResources << {} +collectExternalResources.dependsOn collectVoiceAssets, + collectHelpContentsAssets, + collectRoutingResources, + collectRenderingStylesResources, + collectRegionsInfoResources, + collectMiscResources, + copyStyleIcons, + updateNoTranslate +// tasks.whenTaskAdded { task -> +// if (task.name.startsWith("generate") && task.name.endsWith("Resources")) { +// task.dependsOn collectExternalResources +// } +// } + +// Legacy core build +import org.apache.tools.ant.taskdefs.condition.Os + +task buildOsmAndCore(type: Exec) { + description "Build Legacy OsmAndCore" + + if (!Os.isFamily(Os.FAMILY_WINDOWS)) { + commandLine "bash", file("./old-ndk-build.sh").getAbsolutePath() + } else { + commandLine "cmd", "/c", "echo", "Not supported" + } +} + +task cleanupDuplicatesInCore() { + dependsOn buildOsmAndCore + // doesn't work for legacy debug builds + doLast { + file("libgnustl/armeabi").mkdirs() + file("libs/armeabi/libgnustl_shared.so").renameTo(file("libgnustl/armeabi/libgnustl_shared.so")) + file("libgnustl/armeabi-v7a").mkdirs() + file("libs/armeabi-v7a/libgnustl_shared.so").renameTo(file("libgnustl/armeabi-v7a/libgnustl_shared.so")) + file("libgnustl/mips").mkdirs() + file("libs/mips/libgnustl_shared.so").renameTo(file("libgnustl/mips/libgnustl_shared.so")) + file("libgnustl/x86").mkdirs() + file("libs/x86/libgnustl_shared.so").renameTo(file("libgnustl/x86/libgnustl_shared.so")) + } +} +tasks.withType(JavaCompile) { + compileTask -> compileTask.dependsOn << [collectExternalResources, buildOsmAndCore, cleanupDuplicatesInCore] +} + +clean.dependsOn 'cleanNoTranslate' + +task cleanNoTranslate() { + delete ('res/values/no_translate.xml') +} + +repositories { + ivy { + name = "OsmAndBinariesIvy" + url = "http://builder.osmand.net" + layout "pattern", { + artifact "ivy/[organisation]/[module]/[revision]/[artifact]-[revision].[ext]" + } + } + // mavenCentral() +} + +dependencies { + compile project(path: ":OsmAnd-java", configuration: "android") + compile project(":eclipse-compile:appcompat") + compile fileTree( + dir: "libs", + include: ["*.jar"], + exclude: [ + "QtAndroid-bundled.jar", + "QtAndroidAccessibility-bundled.jar", + "OsmAndCore_android.jar", + "OsmAndCore_wrapper.jar"]) + // compile "com.github.ksoichiro:android-observablescrollview:1.5.0" + // compile "com.android.support:appcompat-v7:21.0.3" + // compile "com.github.shell-software:fab:1.0.5" + legacyCompile "net.osmand:OsmAndCore_android:0.1-SNAPSHOT@jar" + qtcoredebugCompile "net.osmand:OsmAndCore_androidNativeDebug:0.1-SNAPSHOT@aar" + qtcoredebugCompile "net.osmand:OsmAndCore_android:0.1-SNAPSHOT@aar" + qtcoreCompile "net.osmand:OsmAndCore_androidNativeRelease:0.1-SNAPSHOT@aar" + qtcoreCompile "net.osmand:OsmAndCore_android:0.1-SNAPSHOT@aar" +} diff --git a/tests/source-files/osmandapp/osmand/gradle/wrapper/gradle-wrapper.properties b/tests/source-files/osmandapp/osmand/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 00000000..a3185c1e --- /dev/null +++ b/tests/source-files/osmandapp/osmand/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +#Mon Sep 01 10:23:06 EEST 2014 +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-2.2.1-all.zip diff --git a/tests/source-files/realm/react-native/android/build.gradle b/tests/source-files/realm/react-native/android/build.gradle new file mode 100644 index 00000000..d6244d2b --- /dev/null +++ b/tests/source-files/realm/react-native/android/build.gradle @@ -0,0 +1,409 @@ +buildscript { + repositories { + google() + jcenter() + } + dependencies { + classpath 'com.android.tools.build:gradle:3.1.4' + classpath 'de.undercouch:gradle-download-task:1.2' + } +} + +allprojects { + repositories { + google() + jcenter() + mavenLocal() + maven { + // All of React Native (JS, Obj-C sources, Android binaries) is installed from npm + url "$projectDir/../../tests/react-test-app/node_modules/react-native/android" + } + } +} + +apply plugin: 'com.android.library' +apply plugin: 'maven' +apply plugin: 'signing' +apply plugin: 'de.undercouch.download' + +import de.undercouch.gradle.tasks.download.Download +import org.apache.tools.ant.taskdefs.condition.Os +import org.apache.tools.ant.filters.ReplaceTokens + +// We download various C++ open-source dependencies into downloads. +// We then copy both the downloaded code and our custom makefiles and headers into third-party-ndk. +// After that we build native code from src/main/jni with module path pointing at third-party-ndk. + + +ext.coreVersion = getDependenciesVersion("REALM_CORE_VERSION").trim() +ext.syncVersion = getDependenciesVersion("REALM_SYNC_VERSION").trim() +def currentVersion = getDependenciesVersion("VERSION").trim() +println "Realm Core Version: $ext.coreVersion" +println "Realm Sync Version: $ext.syncVersion" + +def downloadsDir = new File("$projectDir/downloads") +def jscDownloadDir = new File("$projectDir/src/main/jni/jsc") +def coreDownloadDir = new File("$projectDir/src/main/jni") +def publishDir = new File("$projectDir/../../android/") +// to build with sync run: ./gradlew assembleDebug -PbuildWithSync=true +ext.buildSync = project.hasProperty('buildWithSync') ? project.getProperty('buildWithSync').toBoolean() : true + +task generateVersionClass(type: Copy) { + from 'src/main/templates/Version.java' + into 'build/generated-src/main/java/io/realm/react' + filter(ReplaceTokens, tokens: [version: currentVersion]) + outputs.upToDateWhen { false } +} + +task createNativeDepsDirectories { + downloadsDir.mkdirs() +} + +task downloadJSCHeaders(type: Download) { + def jscAPIBaseURL = 'https://svn.webkit.org/repository/webkit/!svn/bc/174650/trunk/Source/JavaScriptCore/API/' + def jscHeaderFiles = ['JSBase.h', 'JSContextRef.h', 'JSObjectRef.h', 'JSRetainPtr.h', 'JSStringRef.h', 'JSValueRef.h', 'WebKitAvailability.h'] + + def output = new File(jscDownloadDir, 'JavaScriptCore') + output.mkdirs() + src(jscHeaderFiles.collect { headerName -> "$jscAPIBaseURL$headerName" }) + onlyIfNewer true + overwrite false + dest output + } + +task downloadRealmCore(type: Download) { + if (project.buildSync) { + src "https://static.realm.io/downloads/sync/realm-sync-android-${project.syncVersion}.tar.gz" + } else { + src "https://static.realm.io/downloads/core/realm-core-android-${project.coreVersion}.tar.gz" + } + onlyIfNewer true + overwrite true + if (project.buildSync) { + dest new File(downloadsDir, "realm-core-android-${project.syncVersion}.tar.gz") + } else { + dest new File(downloadsDir, "realm-core-android-${project.coreVersion}.tar.gz") + } +} + +task prepareRealmCore(dependsOn: downloadRealmCore, type:Copy) { + from tarTree(downloadRealmCore.dest) + into "$coreDownloadDir/core" + rename { String fileName -> + fileName.replace("-arm-", "-armeabi-") + } +} + +task downloadOpenSSL_x86(type: Download) { + src "https://static.realm.io/downloads/openssl/1.0.2k/Android/x86/openssl-release-1.0.2k-Android-x86.tar.gz" + onlyIfNewer true + overwrite true + dest new File(downloadsDir, "openssl-release-1.0.2k-Android-x86.tar.gz") +} + +task prepareOpenSSL_x86(dependsOn: downloadOpenSSL_x86, type:Copy) { + from tarTree(downloadOpenSSL_x86.dest) + into "$coreDownloadDir/core" +} + +task downloadOpenSSL_arm(type: Download) { + src "https://static.realm.io/downloads/openssl/1.0.2k/Android/armeabi-v7a/openssl-release-1.0.2k-Android-armeabi-v7a.tar.gz" + onlyIfNewer true + overwrite true + dest new File(downloadsDir, "openssl-release-1.0.2k-Android-armeabi-v7a.tar.gz") +} + +task prepareOpenSSL_arm(dependsOn: downloadOpenSSL_arm, type:Copy) { + from tarTree(downloadOpenSSL_arm.dest) + into "$coreDownloadDir/core" + rename { String fileName -> + fileName.replace("-arm-", "-armeabi-") + } +} + +def getDependenciesVersion(keyName) { + def inputFile = new File(buildscript.sourceFile.getParent() + "/../../dependencies.list") + def line + inputFile.withReader { reader -> + while ((line = reader.readLine())!=null) { + def (key, value) = line.tokenize('=') + if (keyName == key) { + return value + } + } + + throw new GradleException("${keyName} not found in dependencies.list.") + } +} + +def getNdkBuildName() { + if (Os.isFamily(Os.FAMILY_WINDOWS)) { + return "ndk-build.cmd" + } else { + return "ndk-build" + } +} + +def findNdkBuildFullPath() { + // we allow to provide full path to ndk-build tool + if (hasProperty('ndk.command')) { + return property('ndk.command') + } + // or just a path to the containing directory + if (hasProperty('ndk.path')) { + def ndkDir = property('ndk.path') + return new File(ndkDir, getNdkBuildName()).getAbsolutePath() + } + if (System.getenv('ANDROID_NDK') != null) { + def ndkDir = System.getenv('ANDROID_NDK') + return new File(ndkDir, getNdkBuildName()).getAbsolutePath() + } + if (System.getenv('ANDROID_NDK_HOME') != null) { + def ndkDir = System.getenv('ANDROID_NDK_HOME') + return new File(ndkDir, getNdkBuildName()).getAbsolutePath() + } + def ndkDir = android.hasProperty('plugin') ? android.plugin.ndkFolder : + plugins.getPlugin('com.android.library').sdkHandler.getNdkFolder() + if (ndkDir) { + return new File(ndkDir, getNdkBuildName()).getAbsolutePath() + } + return null +} + +def checkNdkVersion(ndkBuildFullPath) { + def ndkPath = new File(ndkBuildFullPath).getParent() + def detectedNdkVersion + def releaseFile = new File(ndkPath, 'RELEASE.TXT') + def propertyFile = new File(ndkPath, 'source.properties') + if (releaseFile.isFile()) { + detectedNdkVersion = releaseFile.text.trim().split()[0].split('-')[0] + } else if (propertyFile.isFile()) { + detectedNdkVersion = getValueFromPropertiesFile(propertyFile, 'Pkg.Revision') + if (detectedNdkVersion == null) { + throw new GradleException("Failed to obtain the NDK version information from ${ndkPath}/source.properties") + } + } else { + throw new GradleException("Neither ${releaseFile.getAbsolutePath()} nor ${propertyFile.getAbsolutePath()} is a file.") + } + if (detectedNdkVersion != project.ndkVersion) { + throw new GradleException("Your NDK version: ${detectedNdkVersion}." + + " Realm JNI must be compiled with the version ${project.ndkVersion} of NDK.") + } +} + +static def getValueFromPropertiesFile(File propFile, String key) { + if (!propFile.isFile() || !propFile.canRead()) { + return null + } + def prop = new Properties() + def reader = propFile.newReader() + try { + prop.load(reader) + } finally { + reader.close() + } + return prop.get(key) +} + +def getNdkBuildFullPath() { + def ndkBuildFullPath = findNdkBuildFullPath() + if (ndkBuildFullPath == null) { + throw new GradleScriptException( + "ndk-build binary cannot be found, check if you've set " + + "\$ANDROID_NDK environment variable correctly or if ndk.dir is " + + "setup in local.properties", + null) + } + if (!new File(ndkBuildFullPath).canExecute()) { + throw new GradleScriptException( + "ndk-build binary " + ndkBuildFullPath + " doesn't exist or isn't executable.\n" + + "Check that the \$ANDROID_NDK environment variable, or ndk.dir in local.proerties, is set correctly.\n" + + "(On Windows, make sure you escape backslashes in local.properties or use forward slashes, e.g. C:\\\\ndk or C:/ndk rather than C:\\ndk)", + null) + } + + checkNdkVersion(ndkBuildFullPath); + + return ndkBuildFullPath +} + +task buildReactNdkLib(dependsOn: [downloadJSCHeaders,prepareRealmCore,prepareOpenSSL_x86,prepareOpenSSL_arm], type: Exec) { + inputs.files('src/main/jni') + outputs.dir("$buildDir/realm-react-ndk/all") + commandLine getNdkBuildFullPath(), + '-e', + project.buildSync ? 'BUILD_TYPE_SYNC=1' : 'BUILD_TYPE_SYNC=0', + 'NDK_PROJECT_PATH=null', + "NDK_APPLICATION_MK=$projectDir/src/main/jni/Application.mk", + 'NDK_OUT=' + temporaryDir, + "NDK_LIBS_OUT=$buildDir/realm-react-ndk/all", + '-C', file('src/main/jni').absolutePath, + 'NDK_LOG=1', + 'NDK_DEBUG=' + (DEBUG_BUILD.toBoolean() ? '1' : '0'), + '--jobs', Runtime.runtime.availableProcessors(), + 'V=1' +} + +task cleanReactNdkLib(type: Exec) { + commandLine getNdkBuildFullPath(), + '-C', file('src/main/jni').absolutePath, + 'clean' +} + +task packageReactNdkLibs(dependsOn: buildReactNdkLib, type: Copy) { + from "$buildDir/realm-react-ndk/all" + exclude '**/libjsc.so' + exclude '**/gdbserver' + exclude '**/gdb.setup' + into "$buildDir/realm-react-ndk/exported" +} + +android { + compileSdkVersion 26 + + defaultConfig { + minSdkVersion 16 + targetSdkVersion 26 + } + + sourceSets.main { + java.srcDir "$buildDir/generated-src/main/java" + jni.srcDirs = [] + jniLibs.srcDir "$buildDir/realm-react-ndk/exported" + res.srcDirs = ['src/main/res/devsupport', 'src/main/res/shell'] + } + + tasks.withType(JavaCompile) { + compileTask -> compileTask.dependsOn generateVersionClass, packageReactNdkLibs + } + + clean.dependsOn cleanReactNdkLib + + lintOptions { + abortOnError false + } +} + +task publishAndroid(dependsOn: [generateVersionClass, packageReactNdkLibs], type: Sync) { + // Copy task can only have one top level + into "$publishDir" + + // copy java source + into ('/src/main') { + from "$projectDir/src/main", "$buildDir/generated-src/main" + exclude '**/jni/**', '**/templates/**' + } + + // add compiled shared object + into ('/src/main/jniLibs') { + from "$buildDir/realm-react-ndk/exported/" + } + + // copy gradle wrapper files + FileTree gradleWrapper = fileTree(projectDir).include('gradlew*').include('gradle/**') + into ('/') { + from gradleWrapper + } + + // copy and rename template build.gradle + into ('/') { + from "$projectDir/publish_android_template" + rename { String fileName -> + 'build.gradle' + } + } + + // copy analytics script + into ('/') { + from "$projectDir/analytics_template" + rename { String fileName -> + 'analytics.gradle' + } + } +} + +// publishing into maven local + +def configureRealmReactNativePom(def pom) { + pom.project { + name POM_NAME + artifactId POM_ARTIFACT_ID + packaging POM_PACKAGING + description POM_DESCRIPTION + url 'https://github.com/realm/realm-js' + + issueManagement { + system 'github' + url 'https://github.com/realm/realm-js/issues' + } + + scm { + url 'scm:https://github.com/realm/realm-js' + connection 'scm:git@github.com:realm/realm-js.git' + developerConnection 'scm:git@github.com:realm/realm-js.git' + } + + licenses { + license { + name 'The Apache Software License, Version 2.0' + url 'http://www.apache.org/licenses/LICENSE-2.0.txt' + distribution 'repo' + } + } + } +} + +afterEvaluate { project -> + task androidSourcesJar(type: Jar) { + classifier = 'sources' + from android.sourceSets.main.java.srcDirs + include '**/*.java' + } + + android.libraryVariants.all { variant -> + def name = variant.name.capitalize() + task "jar${name}"(type: Jar, dependsOn: variant.javaCompile) { + from variant.javaCompile.destinationDir + } + } + + artifacts { + archives androidSourcesJar + } + + version = currentVersion + group = GROUP + + signing { + required { false } + sign configurations.archives + } + + task installArchives(type: Upload) { + configuration = configurations.archives + repositories.mavenDeployer { + beforeDeployment { + MavenDeployment deployment -> signing.signPom(deployment) + } + + repository url: "file://${System.properties['user.home']}/.m2/repository" + configureRealmReactNativePom pom + } + } +} + +def dependencyType = "implementation" +def providedDependencyType = "compileOnly" +try { + project.getConfigurations().getByName("implementation") +} catch (UnknownConfigurationException e) { + // Pre 3.0 Android Gradle Plugin + dependencyType = "compile" + providedDependencyType = "provided" +} + +project.dependencies { + add(providedDependencyType, 'com.squareup.okhttp3:okhttp:3.9.0') + add(providedDependencyType, 'com.facebook.react:react-native:+') + add(dependencyType, 'org.nanohttpd:nanohttpd:2.2.0') +} diff --git a/tests/source-files/se.manyver/android/app/build.gradle b/tests/source-files/se.manyver/android/app/build.gradle new file mode 100644 index 00000000..1c77f965 --- /dev/null +++ b/tests/source-files/se.manyver/android/app/build.gradle @@ -0,0 +1,272 @@ +apply plugin: "com.android.application" + +import com.android.build.OutputFile + +/** + * The react.gradle file registers a task for each build variant (e.g. bundleDebugJsAndAssets + * and bundleReleaseJsAndAssets). + * These basically call `react-native bundle` with the correct arguments during the Android build + * cycle. By default, bundleDebugJsAndAssets is skipped, as in debug/dev mode we prefer to load the + * bundle directly from the development server. Below you can see all the possible configurations + * and their defaults. If you decide to add a configuration block, make sure to add it before the + * `apply from: "../../node_modules/react-native/react.gradle"` line. + * + * project.ext.react = [ + * // the name of the generated asset file containing your JS bundle + * bundleAssetName: "index.android.bundle", + * + * // the entry file for bundle generation + * entryFile: "index.android.js", + * + * // https://facebook.github.io/react-native/docs/performance#enable-the-ram-format + * bundleCommand: "ram-bundle", + * + * // whether to bundle JS and assets in debug mode + * bundleInDebug: false, + * + * // whether to bundle JS and assets in release mode + * bundleInRelease: true, + * + * // whether to bundle JS and assets in another build variant (if configured). + * // See http://tools.android.com/tech-docs/new-build-system/user-guide#TOC-Build-Variants + * // The configuration property can be in the following formats + * // 'bundleIn${productFlavor}${buildType}' + * // 'bundleIn${buildType}' + * // bundleInFreeDebug: true, + * // bundleInPaidRelease: true, + * // bundleInBeta: true, + * + * // whether to disable dev mode in custom build variants (by default only disabled in release) + * // for example: to disable dev mode in the staging build type (if configured) + * devDisabledInStaging: true, + * // The configuration property can be in the following formats + * // 'devDisabledIn${productFlavor}${buildType}' + * // 'devDisabledIn${buildType}' + * + * // the root of your project, i.e. where "package.json" lives + * root: "../../", + * + * // where to put the JS bundle asset in debug mode + * jsBundleDirDebug: "$buildDir/intermediates/assets/debug", + * + * // where to put the JS bundle asset in release mode + * jsBundleDirRelease: "$buildDir/intermediates/assets/release", + * + * // where to put drawable resources / React Native assets, e.g. the ones you use via + * // require('./image.png')), in debug mode + * resourcesDirDebug: "$buildDir/intermediates/res/merged/debug", + * + * // where to put drawable resources / React Native assets, e.g. the ones you use via + * // require('./image.png')), in release mode + * resourcesDirRelease: "$buildDir/intermediates/res/merged/release", + * + * // by default the gradle tasks are skipped if none of the JS files or assets change; this means + * // that we don't look at files in android/ or ios/ to determine whether the tasks are up to + * // date; if you have any other folders that you want to ignore for performance reasons (gradle + * // indexes the entire tree), add them here. Alternatively, if you have JS files in android/ + * // for example, you might want to remove it from here. + * inputExcludes: ["android/**", "ios/**"], + * + * // override which node gets called and with what additional arguments + * nodeExecutableAndArgs: ["node"], + * + * // supply additional arguments to the packager + * extraPackagerArgs: [] + * ] + */ + +project.ext.vectoricons = [ + iconFontNames: [ 'MaterialIcons.ttf', 'MaterialCommunityIcons.ttf' ] +] +project.ext.react = [ + entryFile: "index.android.js", + enableHermes: false, // clean and rebuild if changing + hermesCommand: "../../node_modules/hermes-engine/%OS-BIN%/hermes", +] +apply from: "../../node_modules/react-native-vector-icons/fonts.gradle" +apply from: "../../node_modules/react-native/react.gradle" + +/** + * Set this to true to create two separate APKs instead of one: + * - An APK that only works on ARM devices + * - An APK that only works on x86 devices + * The advantage is the size of the APK is reduced by about 4MB. + * Upload all the APKs to the Play Store and people will download + * the correct one based on the CPU architecture of their device. + */ +def enableSeparateBuildPerCPUArchitecture = false + +/** + * Run Proguard to shrink the Java bytecode in release builds. + */ +def enableProguardInReleaseBuilds = false + +/** + * The preferred build flavor of JavaScriptCore. + * + * For example, to use the international variant, you can use: + * `def jscFlavor = 'org.webkit:android-jsc-intl:+'` + * + * The international variant includes ICU i18n library and necessary data + * allowing to use e.g. `Date.toLocaleString` and `String.localeCompare` that + * give correct results when using with locales other than en-US. Note that + * this variant is about 6MiB larger per architecture than default. + */ +def jscFlavor = 'org.webkit:android-jsc:+' + +/** + * Whether to enable the Hermes VM. + * + * This should be set on project.ext.react and mirrored here. If it is not set + * on project.ext.react, JavaScript will not be compiled to Hermes Bytecode + * and the benefits of using Hermes will therefore be sharply reduced. + */ +def enableHermes = project.ext.react.get("enableHermes", false); + +android { + compileSdkVersion rootProject.ext.compileSdkVersion + + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_8 + targetCompatibility JavaVersion.VERSION_1_8 + } + + flavorDimensions "store" + + defaultConfig { + versionCode 78 + versionName "0.1911.27-beta" + applicationId "se.manyver" + minSdkVersion rootProject.ext.minSdkVersion + targetSdkVersion rootProject.ext.targetSdkVersion + missingDimensionStrategy "RNN.reactNativeVersion", "reactNative60" + ndk { + abiFilters "armeabi-v7a", "arm64-v8a" // , "x86", "x86_64" + } + aaptOptions { + ignoreAssetsPattern '!.svn:!.git:!.ds_store:!*.scc:!CVS:!thumbs.db:!picasa.ini:!*~' + } + vectorDrawables.useSupportLibrary = true + } + + // dexOptions { + // javaMaxHeapSize "4g" + // } + + productFlavors { + indie { + dimension "store" + } + + googlePlay { + dimension "store" + versionNameSuffix "-googlePlay" + targetSdkVersion rootProject.ext.targetSdkVersionForGooglePlay + } + } + + signingConfigs { + // debug { + // storeFile file('debug.keystore') + // storePassword 'android' + // keyAlias 'androiddebugkey' + // keyPassword 'android' + // } + release { + if (project.hasProperty('MYAPP_RELEASE_STORE_FILE')) { + storeFile file(MYAPP_RELEASE_STORE_FILE) + storePassword MYAPP_RELEASE_STORE_PASSWORD + keyAlias MYAPP_RELEASE_KEY_ALIAS + keyPassword MYAPP_RELEASE_KEY_PASSWORD + } + } + } + + splits { + abi { + reset() + enable enableSeparateBuildPerCPUArchitecture + universalApk false // If true, also generate a universal APK + include "armeabi-v7a", "arm64-v8a"//, "x86", "x86_64" + } + } + + buildTypes { + debug { + signingConfig signingConfigs.debug + } + release { + // Caution! In production, you need to generate your own keystore file. + // see https://facebook.github.io/react-native/docs/signed-apk-android. + // signingConfig signingConfigs.debug + minifyEnabled enableProguardInReleaseBuilds + proguardFiles getDefaultProguardFile("proguard-android.txt"), "proguard-rules.pro" + if (project.hasProperty('MYAPP_RELEASE_STORE_FILE')) { + signingConfig signingConfigs.release + } + } + } + + // applicationVariants are e.g. debug, release + applicationVariants.all { variant -> + variant.outputs.each { output -> + // For each separate APK per architecture, set a unique version code as described here: + // https://developer.android.com/studio/build/configure-apk-splits.html + def versionCodes = ["armeabi-v7a": 1, "arm64-v8a": 2] //, "x86":3, "x86_64":4] + def abi = output.getFilter(OutputFile.ABI) + if (abi != null) { // null for the universal-debug, universal-release variants + output.versionCodeOverride = + versionCodes.get(abi) * 1048576 + defaultConfig.versionCode + } + } + } +} + +def acraVersion = '5.3.0' + +dependencies { + implementation "ch.acra:acra-core:$acraVersion" + implementation "ch.acra:acra-mail:$acraVersion" + implementation "ch.acra:acra-dialog:$acraVersion" + implementation project(':nodejs-mobile-react-native') + implementation project(':@react-native-community_async-storage') + implementation project(':react-native-bluetooth-socket-bridge') + implementation project(':react-native-bluetooth-status') + implementation project(':react-native-dialogs') + implementation project(':react-native-vector-icons') + implementation project(':react-native-os-staltz') + implementation project(':react-native-randombytes') + implementation project(':react-native-image-crop-picker') + implementation project(':react-native-navigation') + implementation project(':react-native-android-local-notification') + implementation project(':react-native-android-wifi') + implementation project(':react-native-has-internet') + implementation project(':react-native-flag-secure-android') + implementation project(':react-native-orientation-locker') + implementation project(':react-native-fs') + implementation project(':react-native-splash-screen') + implementation project(':@react-native-community_viewpager') + implementation fileTree(dir: "libs", include: ["*.jar"]) + implementation 'androidx.appcompat:appcompat:1.0.2' + implementation("com.facebook.react:react-native:+") { + force = true + } + implementation 'com.facebook.fresco:animated-gif:1.3.0' + + if (enableHermes) { + def hermesPath = "../../node_modules/hermes-engine/android/"; + debugImplementation files(hermesPath + "hermes-debug.aar") + releaseImplementation files(hermesPath + "hermes-release.aar") + } else { + implementation jscFlavor + } +} + +// Run this once to be able to run the application with BUCK +// puts all compile dependencies into folder libs for BUCK to use +task copyDownloadableDepsToLibs(type: Copy) { + from configurations.compile + into 'libs' +} + +apply from: file("../../node_modules/@react-native-community/cli-platform-android/native_modules.gradle"); applyNativeModulesAppBuildGradle(project) \ No newline at end of file diff --git a/tests/source-files/se.manyver/android/build.gradle b/tests/source-files/se.manyver/android/build.gradle new file mode 100644 index 00000000..de8f5e77 --- /dev/null +++ b/tests/source-files/se.manyver/android/build.gradle @@ -0,0 +1,64 @@ +// Top-level build file where you can add configuration options common to all sub-projects/modules. + +buildscript { + ext { + minSdkVersion = 21 + targetSdkVersion = 26 + targetSdkVersionForGooglePlay = 28 + compileSdkVersion = 28 + } + repositories { + google() + jcenter() + mavenLocal() + mavenCentral() + } + dependencies { + classpath("com.android.tools.build:gradle:3.4.2") + + // NOTE: Do not place your application dependencies here; they belong + // in the individual module build.gradle files + } +} + +allprojects { + repositories { + mavenCentral() + mavenLocal() + maven { + // All of React Native (JS, Obj-C sources, Android binaries) is installed from npm + url("$rootDir/../node_modules/react-native/android") + } + maven { + // Android JSC is installed from npm + url("$rootDir/../node_modules/jsc-android/dist") + } + maven { + url 'https://maven.google.com' + } + maven { + url "https://jitpack.io" + } + google() + jcenter() + } +} + +subprojects { subproject -> + afterEvaluate { + if ((subproject.plugins.hasPlugin('android') || subproject.plugins.hasPlugin('android-library'))) { + android { + variantFilter { variant -> + def names = variant.flavors*.name + if (names.contains("reactNative51")) setIgnore(true) + if (names.contains("reactNative55")) setIgnore(true) + if (names.contains("reactNative56")) setIgnore(true) + if (names.contains("reactNative57")) setIgnore(true) + if (names.contains("reactNative57_5")) setIgnore(true) + if (names.contains("reactNative57WixFork")) setIgnore(true) + if (names.contains("reactNative59")) setIgnore(true) + } + } + } + } +} diff --git a/tests/source-files/se.manyver/android/gradle.properties b/tests/source-files/se.manyver/android/gradle.properties new file mode 100644 index 00000000..33fd4ac9 --- /dev/null +++ b/tests/source-files/se.manyver/android/gradle.properties @@ -0,0 +1,22 @@ +# Project-wide Gradle settings. + +# IDE (e.g. Android Studio) users: +# Gradle settings configured through the IDE *will override* +# any settings specified in this file. + +# For more details on how to configure your build environment visit +# http://www.gradle.org/docs/current/userguide/build_environment.html + +# Specifies the JVM arguments used for the daemon process. +# The setting is particularly useful for tweaking memory settings. +# Default value: -Xmx10248m -XX:MaxPermSize=256m +# org.gradle.jvmargs=-Xmx2048m -XX:MaxPermSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8 + +# When configured, Gradle will run in incubating parallel mode. +# This option should only be used with decoupled projects. More details, visit +# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects +# org.gradle.parallel=true + +android.useAndroidX=true +android.enableJetifier=true +org.gradle.jvmargs=-Xmx4608M \ No newline at end of file diff --git a/tests/source-files/se.manyver/android/gradle/wrapper/gradle-wrapper.properties b/tests/source-files/se.manyver/android/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 00000000..a14cb8c9 --- /dev/null +++ b/tests/source-files/se.manyver/android/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,5 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-5.5-all.zip \ No newline at end of file diff --git a/tests/source-files/se.manyver/android/settings.gradle b/tests/source-files/se.manyver/android/settings.gradle new file mode 100644 index 00000000..6a4c91ed --- /dev/null +++ b/tests/source-files/se.manyver/android/settings.gradle @@ -0,0 +1,40 @@ +rootProject.name = 'Manyverse' +apply from: file("../node_modules/@react-native-community/cli-platform-android/native_modules.gradle"); applyNativeModulesSettingsGradle(settings) +include ':@react-native-community_async-storage' +project(':@react-native-community_async-storage').projectDir = new File(rootProject.projectDir, '../node_modules/@react-native-community/async-storage/android') +include ':react-native-bluetooth-status' +project(':react-native-bluetooth-status').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-bluetooth-status/android') +include ':react-native-bluetooth-socket-bridge' +project(':react-native-bluetooth-socket-bridge').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-bluetooth-socket-bridge/android') +include ':react-native-image-crop-picker' +project(':react-native-image-crop-picker').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-image-crop-picker/android') +include ':nodejs-mobile-react-native' +project(':nodejs-mobile-react-native').projectDir = new File(rootProject.projectDir, '../node_modules/nodejs-mobile-react-native/android') +include ':react-native-dialogs' +project(':react-native-dialogs').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-dialogs/android') +include ':react-native-vector-icons' +project(':react-native-vector-icons').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-vector-icons/android') +include ':react-native-os-staltz' +project(':react-native-os-staltz').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-os-staltz/android') +include ':react-native-randombytes' +project(':react-native-randombytes').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-randombytes/android') +include ':react-native-navigation' +project(':react-native-navigation').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-navigation/lib/android/app/') +include ':react-native-android-local-notification' +project(':react-native-android-local-notification').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-android-local-notification/android') +include ':react-native-android-wifi' +project(':react-native-android-wifi').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-android-wifi/android') +include ':react-native-has-internet' +project(':react-native-has-internet').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-has-internet/android') +include ':react-native-flag-secure-android' +project(':react-native-flag-secure-android').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-flag-secure-android/android') +include ':react-native-orientation-locker', ':app' +project(':react-native-orientation-locker').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-orientation-locker/android') +include ':react-native-fs' +project(':react-native-fs').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-fs/android') +include ':react-native-splash-screen' +project(':react-native-splash-screen').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-splash-screen/android') +include ':@react-native-community_viewpager' +project(':@react-native-community_viewpager').projectDir = new File(rootProject.projectDir, '../node_modules/@react-native-community/viewpager/android') + +include ':app' diff --git a/tests/source-files/se.manyver/app.json b/tests/source-files/se.manyver/app.json new file mode 100644 index 00000000..8931f1dd --- /dev/null +++ b/tests/source-files/se.manyver/app.json @@ -0,0 +1,4 @@ +{ + "name": "Manyverse", + "displayName": "Manyverse" +} diff --git a/tests/source-files/se.manyver/index.android.js b/tests/source-files/se.manyver/index.android.js new file mode 100644 index 00000000..aac66bea --- /dev/null +++ b/tests/source-files/se.manyver/index.android.js @@ -0,0 +1,17 @@ +/* Copyright (C) 2018-2019 The Manyverse Authors. + * + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +import 'react-native-ssb-shims'; +import {run} from 'cycle-native-navigation'; +import { + screens, + drivers, + welcomeLayout, + defaultNavOptions, +} from './lib/frontend/index'; +// import './snoopy'; // Log and debug the React Native JS<-->Native Bridge + +run(screens, drivers, welcomeLayout, defaultNavOptions); diff --git a/tests/source-files/se.manyver/package.json b/tests/source-files/se.manyver/package.json new file mode 100644 index 00000000..08af1994 --- /dev/null +++ b/tests/source-files/se.manyver/package.json @@ -0,0 +1,135 @@ +{ + "name": "manyverse", + "version": "0.1911.27-beta", + "private": true, + "scripts": { + "postinstall": "patch-package", + "lib": "tsc", + "clean-bundler": "watchman watch-del-all && rm -rf $TMPDIR/react-*", + "clean-android": "adb uninstall se.manyver && cd android && ./gradlew clean", + "full-clean": "npm run clean-android && npm run clean-bundler && rm -rf node_modules && rm -rf lib", + "propagate-replacements": "propagate-replacement-fields --field=react-native", + "build-backend": "./tools/build-backend && ./tools/minify-backend", + "build-android-assets": "npm run lib && npm run propagate-replacements && npm run build-backend", + "build-android-debug": "npm run build-android-assets && react-native run-android --variant=indieDebug", + "build-android-release": "npm run build-android-assets && cd android && ./gradlew assembleRelease && cd ..", + "start": "npm run lib && npm run propagate-replacements && react-native start", + "psdr": "./tools/print-service-desk-report.js", + "test-e2e-android": "./tools/test-e2e-android", + "changelog": "npm run update-repo-changelog && npm run update-dat-latest-readme", + "update-repo-changelog": "./tools/update-repo-changelog.js", + "update-dat-latest-readme": "./tools/update-dat-latest-readme.js", + "echo-ssb-post": "./tools/echo-ssb-post.js", + "update-version": "./tools/update-version.js", + "dat-release": "./tools/dat-release", + "commit-release": "./tools/commit-release", + "release": "npm run update-version && npm run clean-bundler && npm run clean-android && npm run build-android-release && npm run test-e2e-android && npm run changelog && npm run commit-release && npm run dat-release && npm run echo-ssb-post" + }, + "dependencies": { + "@cycle/isolate": "5.1.0", + "@cycle/react": "2.6.0", + "@cycle/run": "5.3.0", + "@cycle/state": "1.3.0", + "@react-native-community/viewpager": "3.1.0", + "@types/node": "~12.7.5", + "@types/react": "16.9.x", + "@types/react-native": "0.60.23", + "@types/react-native-vector-icons": "6.4.3", + "buffer": "5.4.3", + "color-hash": "1.0.3", + "cycle-native-alert": "1.1.0", + "cycle-native-android-local-notification": "1.1.0", + "cycle-native-asyncstorage": "2.0.0", + "cycle-native-clipboard": "1.0.0", + "cycle-native-keyboard": "1.2.0", + "cycle-native-linking": "1.1.0", + "cycle-native-navigation": "6.1.0", + "cycle-native-share": "1.1.0", + "cycle-native-toastandroid": "1.1.0", + "mdast-normalize-react-native": "3.2.x", + "nodejs-mobile-react-native": "0.5.0", + "path": "~0.12.7", + "promisify-tuple": "1.0.0", + "pull-flat-list": "2.10.0", + "pull-pushable": "2.2.0", + "pull-stream": "3.6.14", + "pull-thenable": "1.0.0", + "react": "16.9.0", + "react-human-time": "^1.1.0", + "react-markdown": "4.0.2", + "react-native": "0.61.5", + "react-native-android-local-notification": "3.0.0", + "react-native-android-wifi": "0.0.41", + "react-native-bluetooth-socket-bridge": "1.2.0", + "react-native-bluetooth-status": "1.3.0", + "react-native-dialogs": "1.1.0", + "react-native-flag-secure-android": "1.0.2", + "react-native-floating-action": "1.19.1", + "react-native-fs": "~2.16.2", + "react-native-has-internet": "4.0.0", + "react-native-image-crop-picker": "~0.26.1", + "react-native-image-view": "~2.1.6", + "react-native-navigation": "4.0.2", + "react-native-orientation-locker": "1.1.7", + "react-native-popup-menu": "0.15.6", + "react-native-splash-screen": "^3.2.0", + "react-native-ssb-client": "7.0.0", + "react-native-ssb-shims": "4.6.0", + "react-native-swiper": "1.5.14", + "react-native-vector-icons": "6.6.0", + "react-propify-methods": "16.3.1", + "react-xstream-hoc": "1.0.0", + "remark": "~9.0.0", + "remark-gemoji-to-emoji": "1.1.0", + "remark-images-to-ssb-serve-blobs": "2.1.0-1", + "remark-linkify-regex": "1.0.0", + "remark-ssb-mentions": "~2.0.0", + "rn-viewpager": "1.2.9", + "ssb-cached-about": "~1.0.0", + "ssb-conn-query": "~0.4.4", + "ssb-ref": "2.13.9", + "ssb-room": "~1.1.1", + "ssb-serve-blobs": "2.1.0", + "ssb-threads": "3.6.0", + "ssb-typescript": "1.4.0", + "xstream": "11.11.0", + "xstream-backoff": "1.0", + "xstream-between": "1.0", + "xstream-from-callback": "1.0", + "xstream-from-pull-stream": "1.1", + "xstream-sample": "1.0" + }, + "devDependencies": { + "@babel/core": "~7.7.2", + "@babel/runtime": "~7.7.2", + "add-stream": "~1.0.0", + "conventional-changelog": "~3.1.15", + "husky": "^3.1.0", + "into-stream": "~5.1.1", + "jase": "1.2.0", + "left-pad": "1.3.0", + "metro-react-native-babel-preset": "^0.56.0", + "patch-package": "6.2.0", + "prettier": "~1.19.1", + "pretty-quick": "~2.0.1", + "propagate-replacement-fields": "1.2.0", + "react-native-version": "3.2.0", + "rn-snoopy": "2.0.2", + "tslint": "~5.20.1", + "typescript": "~3.7.2" + }, + "optionalDependencies": { + "appium": "1.14.0", + "tap-spec": "5.0.0", + "tape": "~4.9.1", + "wd": "1.11.3" + }, + "husky": { + "hooks": { + "pre-commit": "pretty-quick --staged --pattern \"**/*.*(ts|tsx|js|jsx)\"" + } + }, + "react-native": { + "os": "react-native-os-staltz" + } +} diff --git a/tests/source-files/se.manyver/react-native.config.js b/tests/source-files/se.manyver/react-native.config.js new file mode 100644 index 00000000..26adf2e4 --- /dev/null +++ b/tests/source-files/se.manyver/react-native.config.js @@ -0,0 +1,18 @@ +module.exports = { + dependencies: { + 'nodejs-mobile-react-native': { + // Ignored because we need to set this up manually in order to + // call some APIs of this library directly in our MainActivity.java + platforms: { + android: null, + }, + }, + 'react-native-bluetooth-socket-bridge': { + // This package needs some config passed as arguments to the constructor + // so we need to "link" it manually in MainApplication.java + platforms: { + android: null, + }, + }, + }, +}; diff --git a/tests/source-files/ut.ewh.audiometrytest/app/build.gradle b/tests/source-files/ut.ewh.audiometrytest/app/build.gradle new file mode 100644 index 00000000..640b6678 --- /dev/null +++ b/tests/source-files/ut.ewh.audiometrytest/app/build.gradle @@ -0,0 +1,41 @@ +apply plugin: 'android' + +android { + compileSdkVersion 21 + buildToolsVersion "21.1.1" + + defaultConfig { + minSdkVersion 8 + targetSdkVersion 21 + versionCode 14 + versionName "1.65" + } + signingConfigs{ + releaseSign{ + storeFile file("/Users/reecestevens/keys/keystore.jks") + //storePassword System.console().readLine("\nKeystore password: ") + storePassword System.getenv("KSTOREPWD") + keyAlias "AppKey" + //keyPassword System.console().readLine("\nKey password: ") + keyPassword System.getenv("KEYPWD") + } + } + buildTypes { + release { + minifyEnabled true; + debuggable false + signingConfig signingConfigs.releaseSign + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt' + } + } +} + +repositories { + maven { url "https://jitpack.io" } +} + +dependencies { + compile 'com.android.support:appcompat-v7:21.0.2' + compile 'com.github.PhilJay:MPAndroidChart:v2.0.9' + compile fileTree(dir: 'libs', include: ['*.jar']) +} diff --git a/tests/source-files/ut.ewh.audiometrytest/app/src/main/AndroidManifest.xml b/tests/source-files/ut.ewh.audiometrytest/app/src/main/AndroidManifest.xml new file mode 100644 index 00000000..1ef6bb12 --- /dev/null +++ b/tests/source-files/ut.ewh.audiometrytest/app/src/main/AndroidManifest.xml @@ -0,0 +1,124 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/source-files/ut.ewh.audiometrytest/build.gradle b/tests/source-files/ut.ewh.audiometrytest/build.gradle new file mode 100644 index 00000000..a90b6488 --- /dev/null +++ b/tests/source-files/ut.ewh.audiometrytest/build.gradle @@ -0,0 +1,29 @@ +// Top-level build file where you can add configuration options common to all sub-projects/modules. + +buildscript { + repositories { + mavenCentral() + } + dependencies { + classpath 'com.android.tools.build:gradle:1.0.0-rc4' + } +} + +allprojects { + repositories { + mavenCentral() + } +} + +buildscript { + repositories { + mavenCentral() + } + dependencies { + classpath 'org.owasp:dependency-check-gradle:1.3.2.1' + } +} +apply plugin: 'org.owasp.dependencycheck' +dependencyCheck { + format='JSON' +} diff --git a/tests/source-files/ut.ewh.audiometrytest/settings.gradle b/tests/source-files/ut.ewh.audiometrytest/settings.gradle new file mode 100644 index 00000000..e7b4def4 --- /dev/null +++ b/tests/source-files/ut.ewh.audiometrytest/settings.gradle @@ -0,0 +1 @@ +include ':app' diff --git a/tests/source-files/yuriykulikov/AlarmClock/gradle/wrapper/gradle-wrapper.properties b/tests/source-files/yuriykulikov/AlarmClock/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 00000000..a7a1a8ca --- /dev/null +++ b/tests/source-files/yuriykulikov/AlarmClock/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +#Wed Jan 30 10:59:12 CET 2019 +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-5.1.1-all.zip diff --git a/tests/test_api.py b/tests/test_api.py new file mode 100755 index 00000000..ba18caa6 --- /dev/null +++ b/tests/test_api.py @@ -0,0 +1,103 @@ +#!/usr/bin/env python3 + +import os +import shutil +import unittest +from pathlib import Path +from unittest import mock + +import fdroidserver +from fdroidserver import common, signindex + +from .shared_test_code import GP_FINGERPRINT, mkdtemp + +basedir = Path(__file__).parent + + +class ApiTest(unittest.TestCase): + """Test the public API in the base "fdroidserver" module + + This is mostly a smokecheck to make sure the public API as + declared in fdroidserver/__init__.py is working. The functions + are all implemented in other modules, with their own tests. + + """ + + def setUp(self): + os.chdir(basedir) + + self._td = mkdtemp() + self.testdir = self._td.name + + common.config = None + config = common.read_config() + config['jarsigner'] = common.find_sdk_tools_cmd('jarsigner') + common.config = config + signindex.config = config + + def tearDown(self): + self._td.cleanup() + + def test_download_repo_index_no_fingerprint(self): + with self.assertRaises(fdroidserver.VerificationException): + fdroidserver.download_repo_index("http://example.org") + + @mock.patch('fdroidserver.net.http_get') + def test_download_repo_index_url_parsing(self, mock_http_get): + """Test whether it is trying to download the right file + + This passes the URL back via the etag return value just as a + hack to check which URL was actually attempted. + + """ + mock_http_get.side_effect = lambda url, etag, timeout: (None, url) + repo_url = 'https://example.org/fdroid/repo' + index_url = 'https://example.org/fdroid/repo/index-v1.jar' + for url in (repo_url, index_url): + _ignored, etag_set_to_url = fdroidserver.download_repo_index( + url, verify_fingerprint=False + ) + self.assertEqual(index_url, etag_set_to_url) + + @mock.patch('fdroidserver.net.http_get') + def test_download_repo_index_v1_url_parsing(self, mock_http_get): + """Test whether it is trying to download the right file + + This passes the URL back via the etag return value just as a + hack to check which URL was actually attempted. + + """ + mock_http_get.side_effect = lambda url, etag, timeout: (None, url) + repo_url = 'https://example.org/fdroid/repo' + index_url = 'https://example.org/fdroid/repo/index-v1.jar' + for url in (repo_url, index_url): + _ignored, etag_set_to_url = fdroidserver.download_repo_index_v1( + url, verify_fingerprint=False + ) + self.assertEqual(index_url, etag_set_to_url) + + @mock.patch('fdroidserver.net.download_using_mirrors') + def test_download_repo_index_v2(self, mock_download_using_mirrors): + """Basically a copy of IndexTest.test_download_repo_index_v2""" + mock_download_using_mirrors.side_effect = lambda mirrors: os.path.join( + self.testdir, 'repo', os.path.basename(mirrors[0]['url']) + ) + os.chdir(self.testdir) + signindex.config['keystore'] = os.path.join(basedir, 'keystore.jks') + os.mkdir('repo') + shutil.copy(basedir / 'repo' / 'entry.json', 'repo') + shutil.copy(basedir / 'repo' / 'index-v2.json', 'repo') + signindex.sign_index('repo', 'entry.json') + repo_url = 'https://fake.url/fdroid/repo' + entry_url = 'https://fake.url/fdroid/repo/entry.jar' + index_url = 'https://fake.url/fdroid/repo/index-v2.json' + fingerprint_url = 'https://fake.url/fdroid/repo?fingerprint=' + GP_FINGERPRINT + slash_url = 'https://fake.url/fdroid/repo//?fingerprint=' + GP_FINGERPRINT + for url in (repo_url, entry_url, index_url, fingerprint_url, slash_url): + data, _ignored = fdroidserver.download_repo_index_v2( + url, verify_fingerprint=False + ) + self.assertEqual(['repo', 'packages'], list(data)) + self.assertEqual( + 'My First F-Droid Repo Demo', data['repo']['name']['en-US'] + ) diff --git a/tests/test_build.py b/tests/test_build.py new file mode 100755 index 00000000..578837ed --- /dev/null +++ b/tests/test_build.py @@ -0,0 +1,1092 @@ +#!/usr/bin/env python3 + +import os +import shutil +import sys +import tempfile +import textwrap +import unittest +from pathlib import Path +from unittest import mock + +import yaml + +import fdroidserver.build +import fdroidserver.common + +from .shared_test_code import TmpCwd, mkdtemp + + +class FakeProcess: + output = 'fake output' + returncode = 0 + + def __init__(self, args, **kwargs): + print('FakeFDroidPopen', args, kwargs) + + +class Options: + keep_when_not_allowed = False + + +class BuildTest(unittest.TestCase): + '''fdroidserver/build.py''' + + def setUp(self): + self.basedir = str(Path(__file__).parent) + os.chdir(self.basedir) + fdroidserver.common.config = None + fdroidserver.build.config = None + fdroidserver.build.options = None + self._td = mkdtemp() + self.testdir = self._td.name + + def tearDown(self): + os.chdir(self.basedir) + self._td.cleanup() + + def create_fake_android_home(self, d): + os.makedirs(os.path.join(d, 'build-tools'), exist_ok=True) + os.makedirs(os.path.join(d, 'platform-tools'), exist_ok=True) + os.makedirs(os.path.join(d, 'tools'), exist_ok=True) + + @unittest.skipIf(sys.byteorder == 'big', "androguard is not ported to big-endian") + def test_get_apk_metadata(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.build.config = config + try: + config['aapt'] = fdroidserver.common.find_sdk_tools_cmd('aapt') + except fdroidserver.exception.FDroidException: + pass # aapt is not required if androguard is present + + testcases = [ + ( + 'repo/obb.main.twoversions_1101613.apk', + 'obb.main.twoversions', + 1101613, + '0.1', + None, + ), + ( + 'org.bitbucket.tickytacky.mirrormirror_1.apk', + 'org.bitbucket.tickytacky.mirrormirror', + 1, + '1.0', + None, + ), + ( + 'org.bitbucket.tickytacky.mirrormirror_2.apk', + 'org.bitbucket.tickytacky.mirrormirror', + 2, + '1.0.1', + None, + ), + ( + 'org.bitbucket.tickytacky.mirrormirror_3.apk', + 'org.bitbucket.tickytacky.mirrormirror', + 3, + '1.0.2', + None, + ), + ( + 'org.bitbucket.tickytacky.mirrormirror_4.apk', + 'org.bitbucket.tickytacky.mirrormirror', + 4, + '1.0.3', + None, + ), + ( + 'org.dyndns.fules.ck_20.apk', + 'org.dyndns.fules.ck', + 20, + 'v1.6pre2', + [ + 'arm64-v8a', + 'armeabi', + 'armeabi-v7a', + 'mips', + 'mips64', + 'x86', + 'x86_64', + ], + ), + ('urzip.apk', 'info.guardianproject.urzip', 100, '0.1', None), + ('urzip-badcert.apk', 'info.guardianproject.urzip', 100, '0.1', None), + ('urzip-badsig.apk', 'info.guardianproject.urzip', 100, '0.1', None), + ('urzip-release.apk', 'info.guardianproject.urzip', 100, '0.1', None), + ( + 'urzip-release-unsigned.apk', + 'info.guardianproject.urzip', + 100, + '0.1', + None, + ), + ('repo/com.politedroid_3.apk', 'com.politedroid', 3, '1.2', None), + ('repo/com.politedroid_4.apk', 'com.politedroid', 4, '1.3', None), + ('repo/com.politedroid_5.apk', 'com.politedroid', 5, '1.4', None), + ('repo/com.politedroid_6.apk', 'com.politedroid', 6, '1.5', None), + ( + 'repo/duplicate.permisssions_9999999.apk', + 'duplicate.permisssions', + 9999999, + '', + None, + ), + ( + 'repo/info.zwanenburg.caffeinetile_4.apk', + 'info.zwanenburg.caffeinetile', + 4, + '1.3', + None, + ), + ( + 'repo/obb.main.oldversion_1444412523.apk', + 'obb.main.oldversion', + 1444412523, + '0.1', + None, + ), + ( + 'repo/obb.mainpatch.current_1619_another-release-key.apk', + 'obb.mainpatch.current', + 1619, + '0.1', + None, + ), + ( + 'repo/obb.mainpatch.current_1619.apk', + 'obb.mainpatch.current', + 1619, + '0.1', + None, + ), + ( + 'repo/obb.main.twoversions_1101613.apk', + 'obb.main.twoversions', + 1101613, + '0.1', + None, + ), + ( + 'repo/obb.main.twoversions_1101615.apk', + 'obb.main.twoversions', + 1101615, + '0.1', + None, + ), + ( + 'repo/obb.main.twoversions_1101617.apk', + 'obb.main.twoversions', + 1101617, + '0.1', + None, + ), + ( + 'repo/urzip-; Рахма́, [rɐxˈmanʲɪnəf] سيرجي_رخمانينوف 谢·.apk', + 'info.guardianproject.urzip', + 100, + '0.1', + None, + ), + ] + for apkfilename, appid, versionCode, versionName, nativecode in testcases: + app = fdroidserver.metadata.App() + app.id = appid + build = fdroidserver.metadata.Build() + build.buildjni = ['yes'] if nativecode else build.buildjni + build.versionCode = versionCode + build.versionName = versionName + vc, vn = fdroidserver.build.get_metadata_from_apk(app, build, apkfilename) + self.assertEqual(versionCode, vc) + self.assertEqual(versionName, vn) + + @mock.patch('fdroidserver.common.get_apk_id') + @mock.patch('fdroidserver.build.FDroidPopen') + @mock.patch('fdroidserver.common.is_debuggable_or_testOnly', lambda f: False) + @mock.patch('fdroidserver.common.get_native_code', lambda f: 'x86') + @mock.patch('fdroidserver.common.get_source_date_epoch', lambda f: '1234567890') + def test_build_local_maven(self, fake_FDroidPopen, fake_get_apk_id): + """Test build_local() with a maven project""" + + # pylint: disable=unused-argument + def _side_effect(cmd, cwd=None): + p = mock.MagicMock() + p.output = '[INFO] fake apkbuilder target/no.apk' + with open(os.path.join(self.testdir, 'target', 'no.apk'), 'w') as fp: + fp.write('placeholder') + p.returncode = 0 + return p + + fake_FDroidPopen.side_effect = _side_effect + os.chdir(self.testdir) + os.mkdir('target') + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.build.config = config + fdroidserver.build.options = mock.Mock() + fdroidserver.build.options.scan_binary = False + fdroidserver.build.options.notarball = True + fdroidserver.build.options.skipscan = False + + app = fdroidserver.metadata.App() + app.id = 'mocked.app.id' + build = fdroidserver.metadata.Build() + build.commit = '1.0' + build.versionCode = 1 + build.versionName = '1.0' + fake_get_apk_id.side_effect = lambda f: ( + app.id, + build.versionCode, + build.versionName, + ) + vcs = mock.Mock() + + build.maven = 'yes@..' + fdroidserver.build.build_local( + app, + build, + vcs, + build_dir=self.testdir, + output_dir=self.testdir, + log_dir=os.getcwd(), + srclib_dir=None, + extlib_dir=None, + tmp_dir=None, + force=False, + onserver=True, + refresh=False, + ) + + build.maven = 'yes' + fdroidserver.build.build_local( + app, + build, + vcs, + build_dir=self.testdir, + output_dir=self.testdir, + log_dir=os.getcwd(), + srclib_dir=None, + extlib_dir=None, + tmp_dir=None, + force=False, + onserver=True, + refresh=False, + ) + + @mock.patch('sdkmanager.build_package_list', lambda use_net: None) + def test_build_local_ndk(self): + """Test if `fdroid build` detects installed NDKs and auto-installs when missing""" + with tempfile.TemporaryDirectory() as testdir, TmpCwd( + testdir + ), tempfile.TemporaryDirectory() as sdk_path: + config = {'ndk_paths': {}, 'sdk_path': sdk_path} + fdroidserver.common.config = config + fdroidserver.build.config = config + fdroidserver.build.options = mock.Mock() + fdroidserver.build.options.scan_binary = False + fdroidserver.build.options.notarball = True + fdroidserver.build.options.skipscan = True + + app = fdroidserver.metadata.App() + app.id = 'mocked.app.id' + build = fdroidserver.metadata.Build() + build.commit = '1.0' + build.output = app.id + '.apk' + build.versionCode = 1 + build.versionName = '1.0' + build.ndk = 'r21e' # aka 21.4.7075529 + ndk_version = '21.4.7075529' + ndk_dir = Path(config['sdk_path']) / 'ndk' / ndk_version + vcs = mock.Mock() + + def make_fake_apk(output, build): + with open(build.output, 'w') as fp: + fp.write('APK PLACEHOLDER') + return output + + # pylint: disable=unused-argument + def fake_sdkmanager_install(to_install, android_home=None): + ndk_dir.mkdir(parents=True) + self.assertNotEqual(ndk_version, to_install) # converts r21e to version + with (ndk_dir / 'source.properties').open('w') as fp: + fp.write('Pkg.Revision = %s\n' % ndk_version) + + # use "as _ignored" just to make a pretty layout + with mock.patch( + 'fdroidserver.common.replace_build_vars', wraps=make_fake_apk + ) as _ignored, mock.patch( + 'fdroidserver.common.get_native_code', return_value='x86' + ) as _ignored, mock.patch( + 'fdroidserver.common.get_apk_id', + return_value=(app.id, build.versionCode, build.versionName), + ) as _ignored, mock.patch( + 'fdroidserver.common.sha256sum', + return_value='ad7ce5467e18d40050dc51b8e7affc3e635c85bd8c59be62de32352328ed467e', + ) as _ignored, mock.patch( + 'fdroidserver.common.is_debuggable_or_testOnly', + return_value=False, + ) as _ignored, mock.patch( + 'fdroidserver.build.FDroidPopen', FakeProcess + ) as _ignored, mock.patch( + 'sdkmanager.install', wraps=fake_sdkmanager_install + ) as _ignored, mock.patch( + 'fdroidserver.common.get_source_date_epoch', lambda f: '1234567890' + ) as _ignored: + _ignored # silence the linters + with self.assertRaises( + fdroidserver.exception.FDroidException, + msg="No NDK setup, `fdroid build` should fail with error", + ): + fdroidserver.build.build_local( + app, + build, + vcs, + build_dir=testdir, + output_dir=testdir, + log_dir=None, + srclib_dir=None, + extlib_dir=None, + tmp_dir=None, + force=False, + onserver=False, + refresh=False, + ) + # now run `fdroid build --onserver` + print('now run `fdroid build --onserver`') + self.assertFalse(ndk_dir.exists()) + self.assertFalse('r21e' in config['ndk_paths']) + self.assertFalse(ndk_version in config['ndk_paths']) + fdroidserver.build.build_local( + app, + build, + vcs, + build_dir=testdir, + output_dir=testdir, + log_dir=os.getcwd(), + srclib_dir=None, + extlib_dir=None, + tmp_dir=None, + force=False, + onserver=True, + refresh=False, + ) + self.assertTrue(ndk_dir.exists()) + self.assertTrue(os.path.exists(config['ndk_paths'][ndk_version])) + # All paths in the config must be strings, never pathlib.Path instances + self.assertIsInstance(config['ndk_paths'][ndk_version], str) + + @mock.patch('sdkmanager.build_package_list', lambda use_net: None) + @mock.patch('fdroidserver.build.FDroidPopen', FakeProcess) + @mock.patch('fdroidserver.common.get_native_code', lambda _ignored: 'x86') + @mock.patch('fdroidserver.common.is_debuggable_or_testOnly', lambda _ignored: False) + @mock.patch('fdroidserver.common.get_source_date_epoch', lambda f: '1234567890') + @mock.patch( + 'fdroidserver.common.sha256sum', + lambda f: 'ad7ce5467e18d40050dc51b8e7affc3e635c85bd8c59be62de32352328ed467e', + ) + def test_build_local_ndk_some_installed(self): + """Test if `fdroid build` detects installed NDKs and auto-installs when missing""" + with tempfile.TemporaryDirectory() as testdir, TmpCwd( + testdir + ), tempfile.TemporaryDirectory() as sdk_path: + ndk_r24 = os.path.join(sdk_path, 'ndk', '24.0.8215888') + os.makedirs(ndk_r24) + with open(os.path.join(ndk_r24, 'source.properties'), 'w') as fp: + fp.write('Pkg.Revision = 24.0.8215888\n') + config = {'ndk_paths': {'r24': ndk_r24}, 'sdk_path': sdk_path} + fdroidserver.common.config = config + fdroidserver.build.config = config + fdroidserver.build.options = mock.Mock() + fdroidserver.build.options.scan_binary = False + fdroidserver.build.options.notarball = True + fdroidserver.build.options.skipscan = True + + app = fdroidserver.metadata.App() + app.id = 'mocked.app.id' + build = fdroidserver.metadata.Build() + build.commit = '1.0' + build.output = app.id + '.apk' + build.versionCode = 1 + build.versionName = '1.0' + build.ndk = 'r21e' # aka 21.4.7075529 + ndk_version = '21.4.7075529' + ndk_dir = Path(config['sdk_path']) / 'ndk' / ndk_version + vcs = mock.Mock() + + def make_fake_apk(output, build): + with open(build.output, 'w') as fp: + fp.write('APK PLACEHOLDER') + return output + + # pylint: disable=unused-argument + def fake_sdkmanager_install(to_install, android_home=None): + ndk_dir.mkdir(parents=True) + self.assertNotEqual(ndk_version, to_install) # converts r21e to version + with (ndk_dir / 'source.properties').open('w') as fp: + fp.write('Pkg.Revision = %s\n' % ndk_version) + + # use "as _ignored" just to make a pretty layout + with mock.patch( + 'fdroidserver.common.replace_build_vars', wraps=make_fake_apk + ) as _ignored, mock.patch( + 'fdroidserver.common.get_apk_id', + return_value=(app.id, build.versionCode, build.versionName), + ) as _ignored, mock.patch( + 'sdkmanager.install', wraps=fake_sdkmanager_install + ) as _ignored: + _ignored # silence the linters + self.assertFalse(ndk_dir.exists()) + self.assertFalse('r21e' in config['ndk_paths']) + self.assertFalse(ndk_version in config['ndk_paths']) + fdroidserver.build.build_local( + app, + build, + vcs, + build_dir=testdir, + output_dir=testdir, + log_dir=os.getcwd(), + srclib_dir=None, + extlib_dir=None, + tmp_dir=None, + force=False, + onserver=True, + refresh=False, + ) + self.assertTrue(ndk_dir.exists()) + self.assertTrue(os.path.exists(config['ndk_paths'][ndk_version])) + + @mock.patch('fdroidserver.common.get_source_date_epoch', lambda f: '1234567890') + def test_build_local_clean(self): + """Test if `fdroid build` cleans ant and gradle build products""" + os.chdir(self.testdir) + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.build.config = config + fdroidserver.build.options = mock.Mock() + fdroidserver.build.options.scan_binary = False + fdroidserver.build.options.notarball = True + fdroidserver.build.options.skipscan = False + + app = fdroidserver.metadata.App() + app.id = 'mocked.app.id' + build = fdroidserver.metadata.Build() + build.commit = '1.0' + build.output = app.id + '.apk' + build.scandelete = ['baz.so'] + build.scanignore = ['foo.aar'] + build.versionCode = 1 + build.versionName = '1.0' + vcs = mock.Mock() + + os.mkdir('reports') + os.mkdir('target') + + for f in ('baz.so', 'foo.aar', 'gradle-wrapper.jar'): + with open(f, 'w') as fp: + fp.write('placeholder') + self.assertTrue(os.path.exists(f)) + + os.mkdir('build') + os.mkdir('build/reports') + with open('build.gradle', 'w', encoding='utf-8') as fp: + fp.write('// placeholder') + + os.mkdir('bin') + os.mkdir('gen') + with open('build.xml', 'w', encoding='utf-8') as fp: + fp.write( + textwrap.dedent( + """ + + + + """ + ) + ) + + def make_fake_apk(output, build): + with open(build.output, 'w') as fp: + fp.write('APK PLACEHOLDER') + return output + + with mock.patch('fdroidserver.common.replace_build_vars', wraps=make_fake_apk): + with mock.patch('fdroidserver.common.get_native_code', return_value='x86'): + with mock.patch( + 'fdroidserver.common.get_apk_id', + return_value=(app.id, build.versionCode, build.versionName), + ): + with mock.patch( + 'fdroidserver.common.is_debuggable_or_testOnly', + return_value=False, + ): + fdroidserver.build.build_local( + app, + build, + vcs, + build_dir=self.testdir, + output_dir=self.testdir, + log_dir=None, + srclib_dir=None, + extlib_dir=None, + tmp_dir=None, + force=False, + onserver=False, + refresh=False, + ) + + self.assertTrue(os.path.exists('foo.aar')) + self.assertTrue(os.path.isdir('build')) + self.assertTrue(os.path.isdir('reports')) + self.assertTrue(os.path.isdir('target')) + self.assertFalse(os.path.exists('baz.so')) + self.assertFalse(os.path.exists('bin')) + self.assertFalse(os.path.exists('build/reports')) + self.assertFalse(os.path.exists('gen')) + self.assertFalse(os.path.exists('gradle-wrapper.jar')) + + def test_scan_with_extlib(self): + os.chdir(self.testdir) + os.mkdir("build") + + config = fdroidserver.common.read_config() + config['sdk_path'] = os.getenv('ANDROID_HOME') + config['ndk_paths'] = {'r10d': os.getenv('ANDROID_NDK_HOME')} + fdroidserver.common.config = config + app = fdroidserver.metadata.App() + app.id = 'com.gpl.rpg.AndorsTrail' + build = fdroidserver.metadata.Build() + build.commit = 'master' + build.androidupdate = ['no'] + os.makedirs("extlib/android") + # write a fake binary jar file the scanner should definitely error on + with open('extlib/android/android-support-v4r11.jar', 'wb') as file: + file.write( + b'PK\x03\x04\x14\x00\x08\x00\x08\x00-\x0eiA\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\t\x00\x04\x00META-INF/\xfe\xca\x00\x00\x03\x00PK\x07\x08\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00' + ) + + class FakeVcs: + # no need to change to the correct commit here + def gotorevision(self, rev, refresh=True): + pass + + def getsrclib(self): + return None + + def deinitsubmodules(self): + pass + + # Test we trigger a scanner error without extlibs + build.extlibs = [] + os.makedirs('build/libs') + shutil.copy('extlib/android/android-support-v4r11.jar', 'build/libs') + fdroidserver.common.prepare_source( + FakeVcs(), app, build, "build", "ignore", "extlib" + ) + count = fdroidserver.scanner.scan_source("build", build) + self.assertEqual(1, count, "Should produce a scanner error without extlib") + + # Now try again as an extlib + build.extlibs = ['android/android-support-v4r11.jar'] + fdroidserver.common.prepare_source( + FakeVcs(), app, build, "build", "ignore", "extlib" + ) + count = fdroidserver.scanner.scan_source("build", build) + self.assertEqual(0, count, "Shouldn't error on jar from extlib") + + def test_failed_verifies_are_not_in_unsigned(self): + os.chdir(self.testdir) + sdk_path = os.path.join(self.testdir, 'android-sdk') + self.create_fake_android_home(sdk_path) + with open(fdroidserver.common.CONFIG_FILE, 'w') as fp: + yaml.dump({'sdk_path': sdk_path, 'keep_when_not_allowed': True}, fp) + os.chmod(fdroidserver.common.CONFIG_FILE, 0o600) + fdroidserver.build.config = fdroidserver.common.read_config() + + os.mkdir('metadata') + appid = 'info.guardianproject.checkey' + metadata_file = os.path.join('metadata', appid + '.yml') + shutil.copy(os.path.join(self.basedir, metadata_file), 'metadata') + with open(metadata_file) as fp: + app = fdroidserver.metadata.App(yaml.safe_load(fp)) + app['RepoType'] = 'git' + app[ + 'Binaries' + ] = 'https://example.com/fdroid/repo/info.guardianproject.checkey_%v.apk' + build = fdroidserver.metadata.Build( + { + 'versionCode': 123, + 'versionName': '1.2.3', + 'commit': '1.2.3', + 'disable': False, + } + ) + app['Builds'] = [build] + fdroidserver.metadata.write_metadata(metadata_file, app) + + os.makedirs(os.path.join('unsigned', 'binaries')) + production_result = os.path.join( + 'unsigned', '%s_%d.apk' % (appid, build['versionCode']) + ) + production_compare_file = os.path.join( + 'unsigned', 'binaries', '%s_%d.binary.apk' % (appid, build['versionCode']) + ) + os.makedirs(os.path.join('tmp', 'binaries')) + test_result = os.path.join('tmp', '%s_%d.apk' % (appid, build['versionCode'])) + test_compare_file = os.path.join( + 'tmp', 'binaries', '%s_%d.binary.apk' % (appid, build['versionCode']) + ) + with mock.patch( + 'fdroidserver.common.force_exit', lambda *args: None + ) as a, mock.patch( + 'fdroidserver.common.get_android_tools_version_log', lambda: 'fake' + ) as b, mock.patch( + 'fdroidserver.common.FDroidPopen', FakeProcess + ) as c, mock.patch( + 'fdroidserver.build.FDroidPopen', FakeProcess + ) as d, mock.patch( + 'fdroidserver.build.trybuild', lambda *args: True + ) as e, mock.patch( + 'fdroidserver.net.download_file', lambda *args, **kwargs: None + ) as f: + a, b, c, d, e, f # silence linters' "unused" warnings + + with mock.patch('sys.argv', ['fdroid build', appid]): + # successful comparison + open(production_result, 'w').close() + open(production_compare_file, 'w').close() + with mock.patch('fdroidserver.common.verify_apks', lambda *args: None): + fdroidserver.build.main() + self.assertTrue(os.path.exists(production_result)) + self.assertTrue(os.path.exists(production_compare_file)) + # failed comparison + open(production_result, 'w').close() + open(production_compare_file, 'w').close() + with mock.patch( + 'fdroidserver.common.verify_apks', lambda *args: 'failed' + ): + fdroidserver.build.main() + self.assertFalse(os.path.exists(production_result)) + self.assertFalse(os.path.exists(production_compare_file)) + + with mock.patch('sys.argv', ['fdroid build', '--test', appid]): + # successful comparison + open(test_result, 'w').close() + open(test_compare_file, 'w').close() + with mock.patch('fdroidserver.common.verify_apks', lambda *args: None): + fdroidserver.build.main() + self.assertTrue(os.path.exists(test_result)) + self.assertTrue(os.path.exists(test_compare_file)) + self.assertFalse(os.path.exists(production_result)) + self.assertFalse(os.path.exists(production_compare_file)) + # failed comparison + open(test_result, 'w').close() + open(test_compare_file, 'w').close() + with mock.patch( + 'fdroidserver.common.verify_apks', lambda *args: 'failed' + ): + fdroidserver.build.main() + self.assertTrue(os.path.exists(test_result)) + self.assertFalse(os.path.exists(test_compare_file)) + self.assertFalse(os.path.exists(production_result)) + self.assertFalse(os.path.exists(production_compare_file)) + + def test_failed_allowedapksigningkeys_are_not_in_unsigned(self): + os.chdir(self.testdir) + os.mkdir('metadata') + appid = 'info.guardianproject.checkey' + metadata_file = os.path.join('metadata', appid + '.yml') + shutil.copy(os.path.join(self.basedir, metadata_file), 'metadata') + with open(metadata_file) as fp: + app = fdroidserver.metadata.App(yaml.safe_load(fp)) + app['RepoType'] = 'git' + app[ + 'Binaries' + ] = 'https://example.com/fdroid/repo/info.guardianproject.checkey_%v.apk' + build = fdroidserver.metadata.Build( + { + 'versionCode': 123, + 'versionName': '1.2.3', + 'commit': '1.2.3', + 'disable': False, + } + ) + app['Builds'] = [build] + expected_key = 'a' * 64 + bogus_key = 'b' * 64 + app['AllowedAPKSigningKeys'] = [expected_key] + fdroidserver.metadata.write_metadata(metadata_file, app) + + os.makedirs(os.path.join('unsigned', 'binaries')) + production_result = os.path.join( + 'unsigned', '%s_%d.apk' % (appid, build['versionCode']) + ) + production_compare_file = os.path.join( + 'unsigned', 'binaries', '%s_%d.binary.apk' % (appid, build['versionCode']) + ) + os.makedirs(os.path.join('tmp', 'binaries')) + test_result = os.path.join('tmp', '%s_%d.apk' % (appid, build['versionCode'])) + test_compare_file = os.path.join( + 'tmp', 'binaries', '%s_%d.binary.apk' % (appid, build['versionCode']) + ) + with mock.patch( + 'fdroidserver.common.force_exit', lambda *args: None + ) as a, mock.patch( + 'fdroidserver.common.get_android_tools_version_log', lambda: 'fake' + ) as b, mock.patch( + 'fdroidserver.common.FDroidPopen', FakeProcess + ) as c, mock.patch( + 'fdroidserver.build.FDroidPopen', FakeProcess + ) as d, mock.patch( + 'fdroidserver.build.trybuild', lambda *args: True + ) as e, mock.patch( + 'fdroidserver.net.download_file', lambda *args, **kwargs: None + ) as f: + a, b, c, d, e, f # silence linters' "unused" warnings + + with mock.patch('sys.argv', ['fdroid build', appid]): + # successful comparison, successful signer + open(production_result, 'w').close() + open(production_compare_file, 'w').close() + with mock.patch( + 'fdroidserver.common.verify_apks', lambda *args: None + ) as g, mock.patch( + 'fdroidserver.common.apk_signer_fingerprint', + lambda *args: expected_key, + ) as h: + g, h + fdroidserver.build.main() + self.assertTrue(os.path.exists(production_result)) + self.assertTrue(os.path.exists(production_compare_file)) + # successful comparison, failed signer + open(production_result, 'w').close() + open(production_compare_file, 'w').close() + with mock.patch( + 'fdroidserver.common.verify_apks', lambda *args: None + ) as g, mock.patch( + 'fdroidserver.common.apk_signer_fingerprint', + lambda *args: bogus_key, + ) as h: + g, h + fdroidserver.build.main() + self.assertFalse(os.path.exists(production_result)) + self.assertFalse(os.path.exists(production_compare_file)) + # failed comparison + open(production_result, 'w').close() + open(production_compare_file, 'w').close() + with mock.patch( + 'fdroidserver.common.verify_apks', lambda *args: 'failed' + ): + fdroidserver.build.main() + self.assertFalse(os.path.exists(production_result)) + self.assertFalse(os.path.exists(production_compare_file)) + + with mock.patch('sys.argv', ['fdroid build', '--test', appid]): + # successful comparison, successful signer + open(test_result, 'w').close() + open(test_compare_file, 'w').close() + with mock.patch( + 'fdroidserver.common.verify_apks', lambda *args: None + ) as g, mock.patch( + 'fdroidserver.common.apk_signer_fingerprint', + lambda *args: expected_key, + ) as h: + g, h + fdroidserver.build.main() + self.assertTrue(os.path.exists(test_result)) + self.assertTrue(os.path.exists(test_compare_file)) + self.assertFalse(os.path.exists(production_result)) + self.assertFalse(os.path.exists(production_compare_file)) + # successful comparison, failed signer + open(test_result, 'w').close() + open(test_compare_file, 'w').close() + with mock.patch( + 'fdroidserver.common.verify_apks', lambda *args: None + ) as g, mock.patch( + 'fdroidserver.common.apk_signer_fingerprint', + lambda *args: bogus_key, + ) as h: + g, h + fdroidserver.build.main() + self.assertTrue(os.path.exists(test_result)) + self.assertFalse(os.path.exists(test_compare_file)) + self.assertFalse(os.path.exists(production_result)) + self.assertFalse(os.path.exists(production_compare_file)) + # failed comparison + open(test_result, 'w').close() + open(test_compare_file, 'w').close() + with mock.patch( + 'fdroidserver.common.verify_apks', lambda *args: 'failed' + ): + fdroidserver.build.main() + self.assertTrue(os.path.exists(test_result)) + self.assertFalse(os.path.exists(test_compare_file)) + self.assertFalse(os.path.exists(production_result)) + self.assertFalse(os.path.exists(production_compare_file)) + + @mock.patch('fdroidserver.vmtools.get_build_vm') + @mock.patch('fdroidserver.vmtools.get_clean_builder') + @mock.patch('paramiko.SSHClient') + @mock.patch('subprocess.check_output') + def test_build_server_cmdline( + self, + subprocess_check_output, + paramiko_SSHClient, + fdroidserver_vmtools_get_clean_builder, + fdroidserver_vmtools_get_build_vm, + ): + """Test command line flags passed to the buildserver""" + global cmdline_args + test_flag = ['', False] + + def _exec_command(args): + flag = test_flag[0] + if test_flag[1]: + self.assertTrue(flag in args, flag + ' should be present') + else: + self.assertFalse(flag in args, flag + ' should not be present') + + os.chdir(self.testdir) + os.mkdir('tmp') + + chan = mock.MagicMock() + chan.exec_command = _exec_command + chan.recv_exit_status = lambda: 0 + transport = mock.MagicMock() + transport.open_session = mock.Mock(return_value=chan) + sshs = mock.MagicMock() + sshs.get_transport = mock.Mock(return_value=transport) + paramiko_SSHClient.return_value = sshs + subprocess_check_output.return_value = ( + b'0123456789abcdef0123456789abcdefcafebabe' + ) + fdroidserver_vmtools_get_clean_builder.side_effect = lambda s: { + 'hostname': 'example.com', + 'idfile': '/path/to/id/file', + 'port': 123, + 'user': 'fake', + } + fdroidserver.common.config = {'sdk_path': '/fake/android/sdk/path'} + fdroidserver.build.options = mock.MagicMock() + vcs = mock.Mock() + vcs.getsrclib = mock.Mock(return_value=None) + app = fdroidserver.metadata.App() + app['metadatapath'] = 'metadata/fake.id.yml' + app['id'] = 'fake.id' + app['RepoType'] = 'git' + build = fdroidserver.metadata.Build( + { + 'versionCode': 123, + 'versionName': '1.2.3', + 'commit': '1.2.3', + 'disable': False, + } + ) + app['Builds'] = [build] + + test_flag = ('--on-server', True) + fdroidserver.build.build_server(app, build, vcs, '', '', '', False) + self.assertTrue(fdroidserver_vmtools_get_build_vm.called) + + for force in (True, False): + test_flag = ('--force', force) + fdroidserver.build.build_server(app, build, vcs, '', '', '', force) + + fdroidserver.build.options.notarball = True + test_flag = ('--no-tarball', True) + fdroidserver.build.build_server(app, build, vcs, '', '', '', False) + fdroidserver.build.options.notarball = False + test_flag = ('--no-tarball', False) + fdroidserver.build.build_server(app, build, vcs, '', '', '', False) + + fdroidserver.build.options.skipscan = False + test_flag = ('--scan-binary', True) + fdroidserver.build.build_server(app, build, vcs, '', '', '', False) + fdroidserver.build.options.skipscan = True + test_flag = ('--scan-binary', False) + fdroidserver.build.build_server(app, build, vcs, '', '', '', False) + test_flag = ('--skip-scan', True) + fdroidserver.build.build_server(app, build, vcs, '', '', '', False) + + @mock.patch('fdroidserver.vmtools.get_build_vm') + @mock.patch('fdroidserver.vmtools.get_clean_builder') + @mock.patch('paramiko.SSHClient') + @mock.patch('subprocess.check_output') + @mock.patch('fdroidserver.common.getsrclib') + @mock.patch('fdroidserver.common.prepare_source') + @mock.patch('fdroidserver.build.build_local') + @mock.patch('fdroidserver.common.get_android_tools_version_log', lambda: 'versions') + @mock.patch('fdroidserver.common.deploy_build_log_with_rsync', lambda a, b, c: None) + def test_build_server_no_local_prepare( + self, + build_build_local, + common_prepare_source, + common_getsrclib, + subprocess_check_output, + paramiko_SSHClient, + fdroidserver_vmtools_get_clean_builder, + fdroidserver_vmtools_get_build_vm, # pylint: disable=unused-argument + ): + """srclibs Prepare: should only be executed in the buildserver""" + + def _exec_command(args): + print('chan.exec_command', args) + + def _getsrclib( + spec, + srclib_dir, + basepath=False, + raw=False, + prepare=True, + preponly=False, + refresh=True, + build=None, + ): + # pylint: disable=unused-argument + name, ref = spec.split('@') + libdir = os.path.join(srclib_dir, name) + os.mkdir(libdir) + self.assertFalse(prepare, 'Prepare: scripts should never run on host') + return name, None, libdir # TODO + + os.chdir(self.testdir) + + chan = mock.MagicMock() + chan.exec_command = _exec_command + chan.recv_exit_status = lambda: 0 + transport = mock.MagicMock() + transport.open_session = mock.Mock(return_value=chan) + sshs = mock.MagicMock() + sshs.get_transport = mock.Mock(return_value=transport) + paramiko_SSHClient.return_value = sshs + subprocess_check_output.return_value = ( + b'0123456789abcdef0123456789abcdefcafebabe' + ) + fdroidserver_vmtools_get_clean_builder.side_effect = lambda s: { + 'hostname': 'example.com', + 'idfile': '/path/to/id/file', + 'port': 123, + 'user': 'fake', + } + + fdroidserver.metadata.srclibs = { + 'flutter': { + 'RepoType': 'git', + 'Repo': 'https://github.com/flutter/flutter', + } + } + os.mkdir('srclibs') + with open('srclibs/flutter.yml', 'w') as fp: + yaml.dump(fdroidserver.metadata.srclibs, fp) + common_getsrclib.side_effect = _getsrclib + + options = mock.MagicMock() + options.force = False + options.notarball = True + options.onserver = False + options.refresh = False + options.scan_binary = False + options.server = True + options.skipscan = True + options.test = False + options.verbose = True + fdroidserver.build.options = options + fdroidserver.build.config = {'sdk_path': '/fake/android/sdk/path'} + + vcs = mock.Mock() + vcs.getsrclib = mock.Mock(return_value=None) + app = fdroidserver.metadata.App() + app['metadatapath'] = 'metadata/fake.id.yml' + app['id'] = 'fake.id' + app['RepoType'] = 'git' + spec = 'flutter@v1.7.8' + build = fdroidserver.metadata.Build( + { + 'versionCode': 123, + 'versionName': '1.2.3', + 'commit': '1.2.3', + 'disable': False, + 'srclibs': [spec], + } + ) + app['Builds'] = [build] + + build_dir = 'build' + srclib_dir = os.path.join(build_dir, 'srclib') + extlib_dir = os.path.join(build_dir, 'extlib') + os.mkdir('tmp') + os.mkdir(build_dir) + os.mkdir(srclib_dir) + + fdroidserver.build.trybuild( + app, + build, + build_dir, + 'unsigned', + 'logs', + None, + srclib_dir, + extlib_dir, + 'tmp', + 'repo', + vcs, + options.test, + options.server, + options.force, + options.onserver, + options.refresh, + ) + + common_getsrclib.assert_called_once_with( + spec, srclib_dir, basepath=True, prepare=False + ) + common_prepare_source.assert_not_called() + build_build_local.assert_not_called() + + def test_keep_when_not_allowed_default(self): + self.assertFalse(fdroidserver.build.keep_when_not_allowed()) + + def test_keep_when_not_allowed_config_true(self): + fdroidserver.build.config = {'keep_when_not_allowed': True} + self.assertTrue(fdroidserver.build.keep_when_not_allowed()) + + def test_keep_when_not_allowed_config_false(self): + fdroidserver.build.config = {'keep_when_not_allowed': False} + self.assertFalse(fdroidserver.build.keep_when_not_allowed()) + + def test_keep_when_not_allowed_options_true(self): + fdroidserver.build.options = Options + fdroidserver.build.options.keep_when_not_allowed = True + self.assertTrue(fdroidserver.build.keep_when_not_allowed()) + + def test_keep_when_not_allowed_options_false(self): + fdroidserver.build.options = Options + fdroidserver.build.options.keep_when_not_allowed = False + self.assertFalse(fdroidserver.build.keep_when_not_allowed()) + + def test_keep_when_not_allowed_options_true_override_config(self): + fdroidserver.build.options = Options + fdroidserver.build.options.keep_when_not_allowed = True + fdroidserver.build.config = {'keep_when_not_allowed': False} + self.assertTrue(fdroidserver.build.keep_when_not_allowed()) + + def test_keep_when_not_allowed_options_default_does_not_override(self): + fdroidserver.build.options = Options + fdroidserver.build.options.keep_when_not_allowed = False + fdroidserver.build.config = {'keep_when_not_allowed': True} + self.assertTrue(fdroidserver.build.keep_when_not_allowed()) + + def test_keep_when_not_allowed_all_true(self): + fdroidserver.build.options = Options + fdroidserver.build.options.keep_when_not_allowed = True + fdroidserver.build.config = {'keep_when_not_allowed': True} + self.assertTrue(fdroidserver.build.keep_when_not_allowed()) + + def test_keep_when_not_allowed_all_false(self): + fdroidserver.build.options = Options + fdroidserver.build.options.keep_when_not_allowed = False + fdroidserver.build.config = {'keep_when_not_allowed': False} + self.assertFalse(fdroidserver.build.keep_when_not_allowed()) diff --git a/tests/test_checkupdates.py b/tests/test_checkupdates.py new file mode 100755 index 00000000..107caf29 --- /dev/null +++ b/tests/test_checkupdates.py @@ -0,0 +1,707 @@ +#!/usr/bin/env python3 + +import os +import platform +import shutil +import tempfile +import time +import unittest +from pathlib import Path +from unittest import mock + +import git + +import fdroidserver +import fdroidserver.checkupdates + +basedir = Path(__file__).parent + + +class CheckupdatesTest(unittest.TestCase): + '''fdroidserver/checkupdates.py''' + + def setUp(self): + os.chdir(basedir) + self.testdir = tempfile.TemporaryDirectory( + str(time.time()), self._testMethodName + '_' + ) + + def tearDown(self): + self.testdir.cleanup() + + def test_autoupdatemode_no_suffix(self): + fdroidserver.checkupdates.config = {} + + app = fdroidserver.metadata.App() + app.id = 'loop.starts.shooting' + app.metadatapath = 'metadata/' + app.id + '.yml' + app.CurrentVersion = '1.1.8-fdroid' + app.CurrentVersionCode = 10108 + app.UpdateCheckMode = 'HTTP' + app.AutoUpdateMode = 'Version %v' + + build = fdroidserver.metadata.Build() + build.versionCode = app.CurrentVersionCode + build.versionName = app.CurrentVersion + app['Builds'].append(build) + + with mock.patch( + 'fdroidserver.checkupdates.check_http', lambda app: ('1.1.9', 10109) + ): + with mock.patch('fdroidserver.metadata.write_metadata', mock.Mock()): + with mock.patch('subprocess.call', lambda cmd: 0): + fdroidserver.checkupdates.checkupdates_app(app, auto=True) + + build = app['Builds'][-1] + self.assertEqual(build.versionName, '1.1.9') + self.assertEqual(build.commit, '1.1.9') + + with mock.patch( + 'fdroidserver.checkupdates.check_http', lambda app: ('1.7.9', 10107) + ): + with mock.patch('fdroidserver.metadata.write_metadata', mock.Mock()): + with mock.patch('subprocess.call', lambda cmd: 0): + with self.assertRaises(fdroidserver.exception.FDroidException): + fdroidserver.checkupdates.checkupdates_app(app, auto=True) + + build = app['Builds'][-1] + self.assertEqual(build.versionName, '1.1.9') + self.assertEqual(build.commit, '1.1.9') + + def test_autoupdatemode_suffix(self): + fdroidserver.checkupdates.config = {} + + app = fdroidserver.metadata.App() + app.id = 'loop.starts.shooting' + app.metadatapath = 'metadata/' + app.id + '.yml' + app.CurrentVersion = '1.1.8-fdroid' + app.CurrentVersionCode = 10108 + app.UpdateCheckMode = 'HTTP' + app.AutoUpdateMode = r'Version +.%c-fdroid v%v_%c' + + build = fdroidserver.metadata.Build() + build.versionCode = app.CurrentVersionCode + build.versionName = app.CurrentVersion + app['Builds'].append(build) + + with mock.patch( + 'fdroidserver.checkupdates.check_http', lambda app: ('1.1.9', 10109) + ): + with mock.patch('fdroidserver.metadata.write_metadata', mock.Mock()): + with mock.patch('subprocess.call', lambda cmd: 0): + fdroidserver.checkupdates.checkupdates_app(app, auto=True) + + build = app['Builds'][-1] + self.assertEqual(build.versionName, '1.1.9.10109-fdroid') + self.assertEqual(build.commit, 'v1.1.9_10109') + + def test_autoupdate_multi_variants(self): + fdroidserver.checkupdates.config = {} + + app = fdroidserver.metadata.App() + app.id = 'loop.starts.shooting' + app.metadatapath = 'metadata/' + app.id + '.yml' + app.CurrentVersion = '1.1.8' + app.CurrentVersionCode = 101083 + app.UpdateCheckMode = 'Tags' + app.AutoUpdateMode = r'Version' + app.VercodeOperation = [ + "10*%c+1", + "10*%c+3", + ] + + build = fdroidserver.metadata.Build() + build.versionCode = app.CurrentVersionCode - 2 + build.versionName = app.CurrentVersion + build.gradle = ["arm"] + app['Builds'].append(build) + + build = fdroidserver.metadata.Build() + build.versionCode = app.CurrentVersionCode + build.versionName = app.CurrentVersion + build.gradle = ["x86"] + app['Builds'].append(build) + + with mock.patch( + 'fdroidserver.checkupdates.check_tags', + lambda app, pattern: ('1.1.9', 10109, 'v1.1.9'), + ): + with mock.patch('fdroidserver.metadata.write_metadata', mock.Mock()): + with mock.patch('subprocess.call', lambda cmd: 0): + fdroidserver.checkupdates.checkupdates_app(app, auto=True) + + build = app['Builds'][-2] + self.assertEqual(build.versionName, '1.1.9') + self.assertEqual(build.versionCode, 101091) + self.assertEqual(build.gradle, ["arm"]) + + build = app['Builds'][-1] + self.assertEqual(build.versionName, '1.1.9') + self.assertEqual(build.versionCode, 101093) + self.assertEqual(build.gradle, ["x86"]) + + self.assertEqual(app.CurrentVersion, '1.1.9') + self.assertEqual(app.CurrentVersionCode, 101093) + + def test_checkupdates_app_http(self): + fdroidserver.checkupdates.config = {} + + app = fdroidserver.metadata.App() + app.id = 'loop.starts.shooting' + app.metadatapath = 'metadata/' + app.id + '.yml' + app.CurrentVersionCode = 10108 + app.UpdateCheckMode = 'HTTP' + app.UpdateCheckData = 'mock' + + with mock.patch( + 'fdroidserver.checkupdates.check_http', lambda app: (None, 'bla') + ): + with self.assertRaises(fdroidserver.exception.FDroidException): + fdroidserver.checkupdates.checkupdates_app(app, auto=True) + + with mock.patch( + 'fdroidserver.checkupdates.check_http', lambda app: ('1.1.9', 10109) + ): + with mock.patch( + 'fdroidserver.metadata.write_metadata', mock.Mock() + ) as wrmock: + with mock.patch('subprocess.call', lambda cmd: 0): + fdroidserver.checkupdates.checkupdates_app(app, auto=True) + wrmock.assert_called_with(app.metadatapath, app) + + def test_checkupdates_app_tags(self): + fdroidserver.checkupdates.config = {} + + app = fdroidserver.metadata.App() + app.id = 'loop.starts.shooting' + app.metadatapath = 'metadata/' + app.id + '.yml' + app.CurrentVersion = '1.1.8' + app.CurrentVersionCode = 10108 + app.UpdateCheckMode = 'Tags' + app.AutoUpdateMode = 'Version' + + build = fdroidserver.metadata.Build() + build.versionCode = app.CurrentVersionCode + build.versionName = app.CurrentVersion + app['Builds'].append(build) + + with mock.patch( + 'fdroidserver.checkupdates.check_tags', + lambda app, pattern: (None, 'bla', None), + ): + with self.assertRaises(fdroidserver.exception.FDroidException): + fdroidserver.checkupdates.checkupdates_app(app, auto=True) + + with mock.patch( + 'fdroidserver.checkupdates.check_tags', + lambda app, pattern: ('1.1.9', 10109, 'v1.1.9'), + ): + with mock.patch('fdroidserver.metadata.write_metadata', mock.Mock()): + with mock.patch('subprocess.call', lambda cmd: 0): + fdroidserver.checkupdates.checkupdates_app(app, auto=True) + + build = app['Builds'][-1] + self.assertEqual(build.versionName, '1.1.9') + self.assertEqual(build.commit, 'v1.1.9') + + def test_check_http(self): + app = fdroidserver.metadata.App() + app.id = 'loop.starts.shooting' + app.metadatapath = 'metadata/' + app.id + '.yml' + app.CurrentVersionCode = 10108 + app.UpdateCheckMode = 'HTTP' + app.UpdateCheckData = r'https://a.net/b.txt|c(.*)|https://d.net/e.txt|v(.*)' + app.UpdateCheckIgnore = 'beta' + + respmock = mock.Mock() + respmock.read = lambda: 'v1.1.9\nc10109'.encode('utf-8') + with mock.patch('urllib.request.urlopen', lambda a, b, c: respmock): + vername, vercode = fdroidserver.checkupdates.check_http(app) + self.assertEqual(vername, '1.1.9') + self.assertEqual(vercode, 10109) + + def test_check_http_blocks_unknown_schemes(self): + app = fdroidserver.metadata.App() + for scheme in ('file', 'ssh', 'http', ';pwn'): + app.id = scheme + faked = scheme + '://fake.url/for/testing/scheme' + app.UpdateCheckData = faked + '|ignored|' + faked + '|ignored' + app.metadatapath = 'metadata/' + app.id + '.yml' + with self.assertRaises(fdroidserver.exception.FDroidException): + fdroidserver.checkupdates.check_http(app) + + def test_check_http_ignore(self): + app = fdroidserver.metadata.App() + app.id = 'loop.starts.shooting' + app.metadatapath = 'metadata/' + app.id + '.yml' + app.CurrentVersionCode = 10108 + app.UpdateCheckMode = 'HTTP' + app.UpdateCheckData = r'https://a.net/b.txt|c(.*)|https://d.net/e.txt|v(.*)' + app.UpdateCheckIgnore = 'beta' + + respmock = mock.Mock() + respmock.read = lambda: 'v1.1.9-beta\nc10109'.encode('utf-8') + with mock.patch('urllib.request.urlopen', lambda a, b, c: respmock): + vername, vercode = fdroidserver.checkupdates.check_http(app) + self.assertEqual(vername, None) + + def test_check_tags_data(self): + app = fdroidserver.metadata.App() + app.id = 'loop.starts.shooting' + app.metadatapath = 'metadata/' + app.id + '.yml' + app.RepoType = 'git' + app.CurrentVersionCode = 10108 + app.UpdateCheckMode = 'Tags' + app.UpdateCheckData = r'b.txt|c(.*)|e.txt|v(.*)' + + vcs = mock.Mock() + vcs.latesttags.return_value = ['1.1.9', '1.1.8'] + with mock.patch( + 'pathlib.Path.read_text', lambda a: 'v1.1.9\nc10109' + ) as _ignored, mock.patch.object(Path, 'is_file') as mock_path, mock.patch( + 'fdroidserver.common.getvcs', return_value=vcs + ): + _ignored # silence the linters + mock_path.is_file.return_falue = True + vername, vercode, _tag = fdroidserver.checkupdates.check_tags(app, None) + self.assertEqual(vername, '1.1.9') + self.assertEqual(vercode, 10109) + + app.UpdateCheckData = r'b.txt|c(.*)|.|v(.*)' + with mock.patch( + 'pathlib.Path.read_text', lambda a: 'v1.1.0\nc10109' + ) as _ignored, mock.patch.object(Path, 'is_file') as mock_path, mock.patch( + 'fdroidserver.common.getvcs', return_value=vcs + ): + _ignored # silence the linters + mock_path.is_file.return_falue = True + vername, vercode, _tag = fdroidserver.checkupdates.check_tags(app, None) + self.assertEqual(vername, '1.1.0') + self.assertEqual(vercode, 10109) + + app.UpdateCheckData = r'b.txt|c(.*)||' + with mock.patch( + 'pathlib.Path.read_text', lambda a: 'v1.1.9\nc10109' + ) as _ignored, mock.patch.object(Path, 'is_file') as mock_path, mock.patch( + 'fdroidserver.common.getvcs', return_value=vcs + ): + _ignored # silence the linters + mock_path.is_file.return_falue = True + vername, vercode, _tag = fdroidserver.checkupdates.check_tags(app, None) + self.assertEqual(vername, '1.1.9') + self.assertEqual(vercode, 10109) + + vcs.latesttags.return_value = ['Android-1.1.0', '1.1.8'] + app.UpdateCheckData = r'b.txt|c(.*)||Android-([\d.]+)' + with mock.patch( + 'pathlib.Path.read_text', lambda a: 'v1.1.9\nc10109' + ) as _ignored, mock.patch.object(Path, 'is_file') as mock_path, mock.patch( + 'fdroidserver.common.getvcs', return_value=vcs + ): + _ignored # silence the linters + mock_path.is_file.return_falue = True + vername, vercode, _tag = fdroidserver.checkupdates.check_tags(app, None) + self.assertEqual(vername, '1.1.0') + self.assertEqual(vercode, 10109) + + app.UpdateCheckData = r'|\+(\d+)||Android-([\d.]+)' + vcs.latesttags.return_value = ['Android-1.1.0+1'] + with mock.patch('fdroidserver.common.getvcs', return_value=vcs): + vername, vercode, _tag = fdroidserver.checkupdates.check_tags(app, None) + self.assertEqual(vername, '1.1.0') + self.assertEqual(vercode, 1) + + app.UpdateCheckData = '|||' + vcs.latesttags.return_value = ['2'] + with mock.patch('fdroidserver.common.getvcs', return_value=vcs): + vername, vercode, _tag = fdroidserver.checkupdates.check_tags(app, None) + self.assertEqual(vername, '2') + self.assertEqual(vercode, 2) + + def _get_test_git_repos(self): + testdir = self.testdir.name + os.chdir(testdir) + os.mkdir('metadata') + for f in (basedir / 'metadata').glob('*.yml'): + shutil.copy(f, 'metadata') + git_repo = git.Repo.init(testdir) + with git_repo.config_writer() as cw: + cw.set_value('user', 'name', 'Foo Bar') + cw.set_value('user', 'email', 'foo@bar.com') + git_repo.git.add(all=True) + git_repo.index.commit("all metadata files") + + git_remote_upstream = os.path.join(testdir, 'git_remote_upstream') + upstream_repo = git.Repo.init(git_remote_upstream, bare=True) + with upstream_repo.config_writer() as cw: + cw.set_value('receive', 'advertisePushOptions', True) + git_repo.create_remote('upstream', 'file://' + git_remote_upstream) + + git_remote_origin = os.path.join(testdir, 'git_remote_origin') + origin_repo = git.Repo.init(git_remote_origin, bare=True) + with origin_repo.config_writer() as cw: + cw.set_value('receive', 'advertisePushOptions', True) + git_repo.create_remote('origin', 'file://' + git_remote_origin) + + return git_repo, origin_repo, upstream_repo + + def test_get_changes_versus_ref(self): + def _make_commit_new_app(git_repo, metadata_file): + app = fdroidserver.metadata.App() + fdroidserver.metadata.write_metadata(metadata_file, app) + git_repo.git.add(metadata_file) + git_repo.git.commit(metadata_file, message=f'changed {metadata_file}') + + git_repo, origin_repo, upstream_repo = self._get_test_git_repos() + for remote in git_repo.remotes: + remote.push(git_repo.active_branch) + appid = 'com.testvalue' + metadata_file = f'metadata/{appid}.yml' + + # set up remote branch with change to app + git_repo.git.checkout('-b', appid) + _make_commit_new_app(git_repo, metadata_file) + git_repo.remotes.origin.push(appid) + + # reset local branch and there should be differences + upstream_main = fdroidserver.checkupdates.get_upstream_main_branch(git_repo) + git_repo.git.reset(upstream_main) + self.assertTrue( + fdroidserver.checkupdates.get_changes_versus_ref( + git_repo, f'origin/{appid}', metadata_file + ) + ) + # make new commit that matches the previous, different commit, no diff + _make_commit_new_app(git_repo, metadata_file) + self.assertFalse( + fdroidserver.checkupdates.get_changes_versus_ref( + git_repo, f'origin/{appid}', metadata_file + ) + ) + + def test_push_commits(self): + git_repo, origin_repo, upstream_repo = self._get_test_git_repos() + for remote in git_repo.remotes: + remote.push(git_repo.active_branch) + self.assertEqual(git_repo.head, upstream_repo.head) + self.assertEqual(origin_repo.head, upstream_repo.head) + # pretend that checkupdates ran but didn't create any new commits + fdroidserver.checkupdates.push_commits() + + appid = 'org.adaway' + self.assertNotIn(appid, git_repo.branches) + self.assertNotIn(appid, origin_repo.branches) + self.assertNotIn(appid, upstream_repo.branches) + self.assertNotIn('checkupdates', git_repo.branches) + + # now make commit + app = fdroidserver.metadata.read_metadata({appid: -1})[appid] + build = fdroidserver.metadata.Build() + build.versionName = 'fake' + build.versionCode = 999999999 + app.Builds.append(build) + metadata_file = 'metadata/%s.yml' % appid + fdroidserver.metadata.write_metadata(metadata_file, app) + git_repo.index.add(metadata_file) + git_repo.index.commit('changed ' + appid) + + # and push the new commit to the dynamic branch + fdroidserver.checkupdates.push_commits() + self.assertIn(appid, git_repo.branches) + self.assertIn(appid, git_repo.remotes.origin.refs) + self.assertNotIn('checkupdates', git_repo.branches) + self.assertNotIn(appid, git_repo.remotes.upstream.refs) + + def test_push_commits_verbose(self): + class Options: + verbose = True + + fdroidserver.checkupdates.options = Options + repos = self._get_test_git_repos() + git_repo = repos[0] + git_repo.remotes.origin.push(git_repo.active_branch) + git_repo.remotes.upstream.push(git_repo.active_branch) + + # make commit + appid = 'org.adaway' + app = fdroidserver.metadata.read_metadata({appid: -1})[appid] + build = fdroidserver.metadata.Build() + build.versionName = 'fake' + build.versionCode = 999999999 + app.Builds.append(build) + metadata_file = 'metadata/%s.yml' % appid + fdroidserver.metadata.write_metadata(metadata_file, app) + git_repo.index.add(metadata_file) + git_repo.index.commit('changed ' + appid) + + # and push the new commit to the dynamic branch + fdroidserver.checkupdates.push_commits() + self.assertIn(appid, git_repo.branches) + self.assertIn(appid, git_repo.remotes.origin.refs) + + def test_prune_empty_appid_branches(self): + git_repo, origin_repo, upstream_repo = self._get_test_git_repos() + for remote in git_repo.remotes: + remote.push(git_repo.active_branch) + self.assertEqual(git_repo.head, upstream_repo.head) + self.assertEqual(origin_repo.head, upstream_repo.head) + + appid = 'org.adaway' + git_repo.create_head(appid, force=True) + git_repo.remotes.origin.push(appid, force=True) + self.assertIn(appid, git_repo.branches) + self.assertIn(appid, origin_repo.branches) + self.assertIn(appid, git_repo.remotes.origin.refs) + self.assertNotIn(appid, git_repo.remotes.upstream.refs) + fdroidserver.checkupdates.prune_empty_appid_branches() + self.assertNotIn(appid, origin_repo.branches) + self.assertNotIn(appid, git_repo.remotes.origin.refs) + self.assertNotIn(appid, git_repo.remotes.upstream.refs) + + @mock.patch('sys.exit') + @mock.patch('fdroidserver.metadata.read_metadata') + def test_merge_requests_flag(self, read_metadata, sys_exit): + def _sys_exit(return_code=0): + self.assertNotEqual(return_code, 0) + raise fdroidserver.exception.FDroidException('sys.exit() ran') + + def _read_metadata(a=None, b=None): + raise StopIteration('read_metadata() ran, test is successful') + + appid = 'com.example' + # read_metadata.return_value = dict() # {appid: dict()} + read_metadata.side_effect = _read_metadata + sys_exit.side_effect = _sys_exit + + # set up clean git repo + os.chdir(self.testdir.name) + git_repo = git.Repo.init() + open('foo', 'w').close() + git_repo.git.add(all=True) + git_repo.index.commit("all files") + + with mock.patch('sys.argv', ['fdroid checkupdates', '--merge-request']): + with self.assertRaises(fdroidserver.exception.FDroidException): + fdroidserver.checkupdates.main() + sys_exit.assert_called() + + sys_exit.reset_mock() + with mock.patch('sys.argv', ['fdroid checkupdates', '--merge-request', appid]): + with self.assertRaises(StopIteration): + fdroidserver.checkupdates.main() + sys_exit.assert_not_called() + + @unittest.skipIf( + platform.system() == 'Darwin', + 'It is difficult to configure the base system for this test.', + ) + def test_get_upstream_main_branch(self): + os.chdir(self.testdir.name) + testvalue = 'foo' + git_repo = git.Repo.init('.', initial_branch=testvalue) + + open('foo', 'w').close() + git_repo.git.add(all=True) + git_repo.index.commit("all files") + git_repo.create_remote('upstream', os.getcwd()).fetch() + + branch = fdroidserver.checkupdates.get_upstream_main_branch(git_repo) + self.assertEqual( + f'upstream/{testvalue}', + branch, + f'The default branch should be called {testvalue}!', + ) + + def test_get_upstream_main_branch_git_config(self): + os.chdir(self.testdir.name) + testvalue = 'foo' + git_repo = git.Repo.init('.', initial_branch=testvalue) + with git_repo.config_writer() as cw: + cw.set_value('init', 'defaultBranch', testvalue) + + open('foo', 'w').close() + git_repo.git.add(all=True) + git_repo.index.commit("all files") + git_repo.git.branch('somethingelse') # make another remote branch + git_repo.create_remote('upstream', os.getcwd()).fetch() + + branch = fdroidserver.checkupdates.get_upstream_main_branch(git_repo) + self.assertEqual( + f'upstream/{testvalue}', + branch, + f'The default branch should be called {testvalue}!', + ) + + def test_checkout_appid_branch_does_not_exist(self): + appid = 'com.example' + os.chdir(self.testdir.name) + git_repo = git.Repo.init('.') + open('foo', 'w').close() + git_repo.git.add(all=True) + git_repo.index.commit("all files") + # --merge-request assumes remotes called 'origin' and 'upstream' + git_repo.create_remote('origin', os.getcwd()).fetch() + git_repo.create_remote('upstream', os.getcwd()).fetch() + self.assertNotIn(appid, git_repo.heads) + fdroidserver.checkupdates.checkout_appid_branch(appid) + self.assertIn(appid, git_repo.heads) + + def test_checkout_appid_branch_exists(self): + appid = 'com.example' + + upstream_dir = os.path.join(self.testdir.name, 'upstream_git') + os.mkdir(upstream_dir) + upstream_repo = git.Repo.init(upstream_dir) + (Path(upstream_dir) / 'README').write_text('README') + upstream_repo.git.add(all=True) + upstream_repo.index.commit("README") + upstream_repo.create_head(appid) + + local_dir = os.path.join(self.testdir.name, 'local_git') + git.Repo.clone_from(upstream_dir, local_dir) + os.chdir(local_dir) + git_repo = git.Repo.init('.') + # --merge-request assumes remotes called 'origin' and 'upstream' + git_repo.create_remote('upstream', upstream_dir).fetch() + + self.assertNotIn(appid, git_repo.heads) + fdroidserver.checkupdates.checkout_appid_branch(appid) + self.assertIn(appid, git_repo.heads) + + def test_checkout_appid_branch_skip_bot_commit(self): + appid = 'com.example' + + upstream_dir = os.path.join(self.testdir.name, 'upstream_git') + os.mkdir(upstream_dir) + upstream_repo = git.Repo.init(upstream_dir) + (Path(upstream_dir) / 'README').write_text('README') + upstream_repo.git.add(all=True) + upstream_repo.index.commit("README") + upstream_repo.create_head(appid) + + local_dir = os.path.join(self.testdir.name, 'local_git') + git.Repo.clone_from(upstream_dir, local_dir) + os.chdir(local_dir) + git_repo = git.Repo.init('.') + # --merge-request assumes remotes called 'origin' and 'upstream' + git_repo.create_remote('upstream', upstream_dir).fetch() + + os.mkdir('metadata') + git_repo.create_head(appid, f'origin/{appid}', force=True) + git_repo.git.checkout(appid) + + # fake checkupdates-bot commit + Path(f'metadata/{appid}.yml').write_text('AutoName: Example\n') + with git_repo.config_writer() as cw: + cw.set_value('user', 'email', fdroidserver.checkupdates.BOT_EMAIL) + git_repo.git.add(all=True) + git_repo.index.commit("Example") + + # set up starting from remote branch + git_repo.remotes.origin.push(appid) + upstream_main = fdroidserver.checkupdates.get_upstream_main_branch(git_repo) + git_repo.git.checkout(upstream_main.split('/')[1]) + git_repo.delete_head(appid, force=True) + + self.assertTrue( + fdroidserver.checkupdates.checkout_appid_branch(appid), + 'This should have been true since there are only bot commits.', + ) + + def test_checkout_appid_branch_skip_human_edits(self): + appid = 'com.example' + + upstream_dir = os.path.join(self.testdir.name, 'upstream_git') + os.mkdir(upstream_dir) + upstream_repo = git.Repo.init(upstream_dir) + (Path(upstream_dir) / 'README').write_text('README') + upstream_repo.git.add(all=True) + upstream_repo.index.commit("README") + upstream_repo.create_head(appid) + + local_dir = os.path.join(self.testdir.name, 'local_git') + git.Repo.clone_from(upstream_dir, local_dir) + os.chdir(local_dir) + git_repo = git.Repo.init('.') + # --merge-request assumes remotes called 'origin' and 'upstream' + git_repo.create_remote('upstream', upstream_dir).fetch() + + os.mkdir('metadata') + git_repo.create_head(appid, f'origin/{appid}', force=True) + git_repo.git.checkout(appid) + + with git_repo.config_writer() as cw: + cw.set_value('user', 'email', fdroidserver.checkupdates.BOT_EMAIL) + + # fake checkupdates-bot commit + Path(f'metadata/{appid}.yml').write_text('AutoName: Example\n') + git_repo.git.add(all=True) + git_repo.index.commit("Example") + + # fake commit added on top by a human + Path(f'metadata/{appid}.yml').write_text('AutoName: Example\nName: Foo\n') + with git_repo.config_writer() as cw: + cw.set_value('user', 'email', 'human@bar.com') + git_repo.git.add(all=True) + git_repo.index.commit("Example") + + # set up starting from remote branch + git_repo.remotes.origin.push(appid) + upstream_main = fdroidserver.checkupdates.get_upstream_main_branch(git_repo) + git_repo.git.reset(upstream_main.split('/')[1]) + + self.assertFalse( + fdroidserver.checkupdates.checkout_appid_branch(appid), + 'This should have been false since there are human edits.', + ) + + @mock.patch('git.remote.Remote.push') + @mock.patch('sys.exit') + @mock.patch('fdroidserver.common.read_app_args') + @mock.patch('fdroidserver.checkupdates.checkupdates_app') + def test_merge_requests_branch( + self, checkupdates_app, read_app_args, sys_exit, push + ): + def _sys_exit(return_code=0): + self.assertEqual(return_code, 0) + + def _checkupdates_app(app, auto, commit): # pylint: disable=unused-argument + os.mkdir('metadata') + Path(f'metadata/{app["packageName"]}.yml').write_text('AutoName: Example') + git_repo.git.add(all=True) + git_repo.index.commit("Example") + + def _read_app_args(apps=[]): + appid = apps[0] + return {appid: {'packageName': appid}} + + appid = 'com.example' + read_app_args.side_effect = _read_app_args + checkupdates_app.side_effect = _checkupdates_app + sys_exit.side_effect = _sys_exit + + # set up clean git repo + os.chdir(self.testdir.name) + git_repo = git.Repo.init() + open('foo', 'w').close() + git_repo.git.add(all=True) + git_repo.index.commit("all files") + # --merge-request assumes remotes called 'origin' and 'upstream' + git_repo.create_remote('origin', os.getcwd()).fetch() + git_repo.create_remote('upstream', os.getcwd()).fetch() + + self.assertNotIn(appid, git_repo.heads) + with mock.patch('sys.argv', ['fdroid checkupdates', '--merge-request', appid]): + fdroidserver.checkupdates.main() + push.assert_called_once() + sys_exit.assert_called_once() + self.assertIn(appid, git_repo.heads) + + def test_push_commits_invalid_branch_name(self): + git_repo, origin_repo, upstream_repo = self._get_test_git_repos() + for remote in git_repo.remotes: + remote.push(git_repo.active_branch) + self.assertEqual(git_repo.head, upstream_repo.head) + self.assertEqual(origin_repo.head, upstream_repo.head) + # pretend that checkupdates ran but didn't create any new commits + fdroidserver.checkupdates.push_commits('') diff --git a/tests/test_common.py b/tests/test_common.py new file mode 100755 index 00000000..3110b446 --- /dev/null +++ b/tests/test_common.py @@ -0,0 +1,3653 @@ +#!/usr/bin/env python3 + +import difflib +import glob +import gzip +import importlib +import json +import logging +import os +import re +import shutil +import subprocess +import sys +import tempfile +import textwrap +import time +import unittest +from argparse import ArgumentParser +from datetime import datetime, timezone +from pathlib import Path +from unittest import mock +from zipfile import BadZipFile, ZipFile + +import git +import ruamel.yaml + +import fdroidserver +import fdroidserver.common +import fdroidserver.metadata +import fdroidserver.signindex +from fdroidserver._yaml import config_dump, yaml, yaml_dumper +from fdroidserver.common import ANTIFEATURES_CONFIG_NAME, CATEGORIES_CONFIG_NAME +from fdroidserver.exception import ( + FDroidException, + MetaDataException, + VCSException, + VerificationException, +) +from fdroidserver.looseversion import LooseVersion + +from .shared_test_code import TmpCwd, mkdir_testfiles, mkdtemp + +basedir = Path(__file__).parent + + +def _mock_common_module_options_instance(): + """Helper method to deal with difficult visibility of the module-level options.""" + fdroidserver.common.options = mock.Mock() + fdroidserver.common.options.verbose = False + + +class SetUpTearDownMixin: + """A mixin with no tests in it for shared setUp and tearDown.""" + + def setUp(self): + logging.basicConfig(level=logging.DEBUG) + logger = logging.getLogger('androguard.axml') + logger.setLevel(logging.INFO) # tame the axml debug messages + os.chdir(basedir) + + self.verbose = '-v' in sys.argv or '--verbose' in sys.argv + fdroidserver.common.set_console_logging(self.verbose) + + # these are declared as None at the top of the module file + fdroidserver.common.config = None + fdroidserver.common.options = None + fdroidserver.metadata.srclibs = None + + self.testdir = mkdir_testfiles(basedir, self) + + def tearDown(self): + fdroidserver.common.config = None + fdroidserver.common.options = None + os.chdir(basedir) + if os.path.exists(self.testdir): + shutil.rmtree(self.testdir) + + +class CommonTest(SetUpTearDownMixin, unittest.TestCase): + '''fdroidserver/common.py''' + + def test_yaml_1_2(self): + """Return a ruamel.yaml instance that supports YAML 1.2 + + There should be no "Norway Problem", and other things like this: + https://yaml.org/spec/1.2.2/ext/changes/ + + YAML 1.2 says "underlines _ cannot be used within numerical + values", but ruamel.yaml seems to ignore that. 1_0 should be a + string, but it is read as a 10. + + """ + os.chdir(self.testdir) + yaml12file = Path('YAML 1.2.yml') + yaml12file.write_text('[true, no, 0b010, 010, 0o10, "\\/"]', encoding='utf-8') + with yaml12file.open() as fp: + self.assertEqual( + [True, 'no', 2, 10, 8, '/'], + yaml.load(fp), + ) + + def test_parse_human_readable_size(self): + for k, v in ( + (9827, 9827), + (123.456, 123), + ('123b', 123), + ('1.2', 1), + ('10.43 KiB', 10680), + ('11GB', 11000000000), + ('59kb', 59000), + ('343.1 mb', 343100000), + ('99.9GiB', 107266808217), + ('1MB', 1000000), + ): + self.assertEqual(fdroidserver.common.parse_human_readable_size(k), v) + for v in ((12, 123), '0xfff', [], None, '12,123', '123GG', '982374bb', self): + with self.assertRaises(ValueError): + fdroidserver.common.parse_human_readable_size(v) + + def test_assert_config_keystore(self): + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + with self.assertRaises(FDroidException): + fdroidserver.common.assert_config_keystore({}) + + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + c = { + 'repo_keyalias': 'localhost', + 'keystore': 'keystore.jks', + 'keystorepass': '12345', + 'keypass': '12345', + } + with open('keystore.jks', 'w'): + pass + fdroidserver.common.assert_config_keystore(c) + + def _set_build_tools(self): + build_tools = os.path.join( + fdroidserver.common.config['sdk_path'], 'build-tools' + ) + if os.path.exists(build_tools): + for f in sorted(os.listdir(build_tools), reverse=True): + versioned = os.path.join(build_tools, f) + if os.path.isdir(versioned) and os.path.isfile( + os.path.join(versioned, 'apksigner') + ): + break + return True + else: + print('no build-tools found: ' + build_tools) + return False + + def _find_all(self): + tools = ['aapt', 'adb', 'jarsigner'] + if os.path.exists(os.path.join(os.getenv('ANDROID_HOME'), 'tools', 'android')): + tools.append('android') + for cmd in tools: + try: + path = fdroidserver.common.find_sdk_tools_cmd(cmd) + self.assertTrue(os.path.exists(path)) + self.assertTrue(os.path.isfile(path)) + except fdroidserver.exception.FDroidException: + pass + + @unittest.skipUnless(os.getenv('ANDROID_HOME'), "Needs ANDROID_HOME env var") + def test_find_sdk_tools_cmd(self): + fdroidserver.common.config = dict() + # TODO add this once everything works without sdk_path set in config + # self._find_all() + sdk_path = os.getenv('ANDROID_HOME') + if os.path.exists(sdk_path): + fdroidserver.common.config['sdk_path'] = sdk_path + build_tools = os.path.join(sdk_path, 'build-tools') + if self._set_build_tools() or os.path.exists('/usr/bin/aapt'): + self._find_all() + else: + print('no build-tools found: ' + build_tools) + + def test_find_java_root_path(self): + os.chdir(self.testdir) + + all_pathlists = [ + ( + [ # Debian + '/usr/lib/jvm/java-1.5.0-gcj-5-amd64', + '/usr/lib/jvm/java-8-openjdk-amd64', + '/usr/lib/jvm/java-1.8.0-openjdk-amd64', + ], + '/usr/lib/jvm/java-8-openjdk-amd64', + ), + ( + [ # OSX + '/Library/Java/JavaVirtualMachines/jdk1.8.0_202.jdk', + '/Library/Java/JavaVirtualMachines/jdk1.8.0_45.jdk', + '/System/Library/Java/JavaVirtualMachines/jdk1.7.0_80.jdk', + ], + '/Library/Java/JavaVirtualMachines/jdk1.8.0_202.jdk', + ), + ] + + for pathlist, choice in all_pathlists: + # strip leading / to make relative paths to test without root + pathlist = [p[1:] for p in pathlist] + + # create test file used in common._add_java_paths_to_config() + for p in pathlist: + if p.startswith('/System') or p.startswith('/Library'): + _dir = os.path.join(p, 'Contents', 'Home', 'bin') + else: + _dir = os.path.join(p, 'bin') + os.makedirs(_dir) + open(os.path.join(_dir, 'javac'), 'w').close() + + config = dict() + config['java_paths'] = dict() + fdroidserver.common._add_java_paths_to_config(pathlist, config) + self.assertEqual(config['java_paths']['8'], choice[1:]) + + @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') + def test_is_debuggable_or_testOnly(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + + # these are set debuggable + for apkfile in ('urzip.apk', 'urzip-badsig.apk', 'urzip-badcert.apk'): + self.assertTrue( + fdroidserver.common.is_debuggable_or_testOnly(str(basedir / apkfile)), + "debuggable APK state was not properly parsed!", + ) + + # these are set NOT debuggable + testfiles = 'urzip-release.apk', 'urzip-release-unsigned.apk', 'v2.only.sig_2.apk' + for apkfile in testfiles: + self.assertFalse( + fdroidserver.common.is_debuggable_or_testOnly(apkfile), + "debuggable APK state was not properly parsed!", + ) + + VALID_STRICT_PACKAGE_NAMES = [ + "An.stop", + "SpeedoMeterApp.main", + "a2dp.Vol", + "au.com.darkside.XServer", + "click.dummer.UartSmartwatch", + "com.Bisha.TI89EmuDonation", + "com.MarcosDiez.shareviahttp", + "com.Pau.ImapNotes2", + "com.app.Zensuren", + "com.darshancomputing.BatteryIndicator", + "com.geecko.QuickLyric", + "com.genonbeta.TrebleShot", + "com.gpl.rpg.AndorsTrail", + "com.hobbyone.HashDroid", + "com.moez.QKSMS", + "com.platypus.SAnd", + "com.prhlt.aemus.Read4SpeechExperiments", + "de.syss.MifareClassicTool", + "org.fdroid.fdroid", + "org.f_droid.fdr0ID", + ] + + def test_is_valid_package_name(self): + for name in self.VALID_STRICT_PACKAGE_NAMES + [ + "_SpeedoMeterApp.main", + "05041684efd9b16c2888b1eddbadd0359f655f311b89bdd1737f560a10d20fb8", + ]: + self.assertTrue( + fdroidserver.common.is_valid_package_name(name), + "{0} should be a valid package name".format(name), + ) + for name in [ + "0rg.fdroid.fdroid", + ".f_droid.fdr0ID", + "trailingdot.", + "org.fdroid/fdroid", + "/org.fdroid.fdroid", + ]: + self.assertFalse( + fdroidserver.common.is_valid_package_name(name), + "{0} should not be a valid package name".format(name), + ) + + def test_is_strict_application_id(self): + """see also tests/valid-package-names/""" + for name in self.VALID_STRICT_PACKAGE_NAMES: + self.assertTrue( + fdroidserver.common.is_strict_application_id(name), + "{0} should be a strict application id".format(name), + ) + for name in [ + "0rg.fdroid.fdroid", + ".f_droid.fdr0ID", + "oneword", + "trailingdot.", + "cafebabe", + "org.fdroid/fdroid", + "/org.fdroid.fdroid", + "_SpeedoMeterApp.main", + "05041684efd9b16c2888b1eddbadd0359f655f311b89bdd1737f560a10d20fb8", + ]: + self.assertFalse( + fdroidserver.common.is_strict_application_id(name), + "{0} should not be a strict application id".format(name), + ) + + def test_prepare_sources(self): + testint = 99999999 + teststr = 'FAKE_STR_FOR_TESTING' + + shutil.copytree( + os.path.join(basedir, 'source-files'), + os.path.join(self.testdir, 'source-files'), + ) + + fdroidclient_testdir = os.path.join( + self.testdir, 'source-files', 'fdroid', 'fdroidclient' + ) + + config = dict() + config['sdk_path'] = os.getenv('ANDROID_HOME') + config['ndk_paths'] = {'r10d': os.getenv('ANDROID_NDK_HOME')} + fdroidserver.common.config = config + app = fdroidserver.metadata.App() + app.id = 'org.fdroid.froid' + build = fdroidserver.metadata.Build() + build.commit = 'master' + build.forceversion = True + build.forcevercode = True + build.gradle = ['yes'] + build.target = 'android-' + str(testint) + build.versionName = teststr + build.versionCode = testint + + class FakeVcs: + # no need to change to the correct commit here + def gotorevision(self, rev, refresh=True): + pass + + # no srclib info needed, but it could be added... + def getsrclib(self): + return None + + def deinitsubmodules(self): + pass + + fdroidserver.common.prepare_source(FakeVcs(), app, build, + fdroidclient_testdir, fdroidclient_testdir, fdroidclient_testdir) + + fdroidclient_testdir = Path(fdroidclient_testdir) + build_gradle = fdroidclient_testdir / 'build.gradle' + filedata = build_gradle.read_text(encoding='utf-8') + self.assertIsNotNone( + re.search(r"\s+compileSdkVersion %s\s+" % testint, filedata) + ) + + androidmanifest_xml = fdroidclient_testdir / 'AndroidManifest.xml' + filedata = androidmanifest_xml.read_text(encoding='utf-8') + self.assertIsNone(re.search('android:debuggable', filedata)) + self.assertIsNotNone( + re.search('android:versionName="%s"' % build.versionName, filedata) + ) + self.assertIsNotNone( + re.search('android:versionCode="%s"' % build.versionCode, filedata) + ) + + @unittest.skipIf(os.name == 'nt', "`fdroid build` assumes POSIX scripting") + def test_prepare_sources_with_prebuild_subdir(self): + app_build_dir = os.path.join(self.testdir, 'build', 'com.example') + shutil.copytree( + basedir / 'source-files' / 'fdroid' / 'fdroidclient', + app_build_dir, + ) + + subdir = 'baz/bar' + subdir_path = Path(app_build_dir) / subdir + subdir_path.mkdir(parents=True, exist_ok=True) + build_gradle = subdir_path / 'build.gradle' + build_gradle.write_text('// just a test placeholder', encoding='utf-8') + + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + _mock_common_module_options_instance() + + srclibname = 'FakeSrcLib' + srclib_testdir = os.path.join(self.testdir, 'build', 'srclib') + os.makedirs(os.path.join(srclib_testdir, srclibname, 'testdirshouldexist')) + fdroidserver.metadata.srclibs = { + srclibname: { + 'RepoType': 'git', + 'Repo': 'https://example.com/foo/fakesrclib', + 'Subdir': None, + 'Prepare': None, + } + } + + app = fdroidserver.metadata.App() + app.id = 'app.has.srclibs' + build = fdroidserver.metadata.Build() + build.commit = 'master' + build.gradle = ['yes'] + build.prebuild = ['test -d $$FakeSrcLib$$/testdirshouldexist'] # actual test condition + build.srclibs = [srclibname + '@1.2.3'] + build.subdir = subdir + build.versionCode = 0xCAFE + build.versionName = 'vCAFE' + + class FakeVcs: + # no need to change to the correct commit here + def gotorevision(self, rev, refresh=True): + pass + + # no srclib info needed, but it could be added... + def getsrclib(self): + return None + + def deinitsubmodules(self): + pass + + fdroidserver.common.prepare_source(FakeVcs(), app, build, + app_build_dir, srclib_testdir, app_build_dir, + onserver=True, refresh=False) # do not clone in this test + + def test_prepare_sources_refresh(self): + _mock_common_module_options_instance() + packageName = 'org.fdroid.ci.test.app' + os.chdir(self.testdir) + os.mkdir('build') + os.mkdir('metadata') + + # use a local copy if available to avoid hitting the network + tmprepo = os.path.join(basedir, 'tmp', 'importer') + if os.path.exists(tmprepo): + git_url = tmprepo + else: + git_url = 'https://gitlab.com/fdroid/ci-test-app.git' + + metadata = dict() + metadata['Description'] = 'This is just a test app' + metadata['RepoType'] = 'git' + metadata['Repo'] = git_url + with open(os.path.join('metadata', packageName + '.yml'), 'w') as fp: + yaml_dumper.dump(metadata, fp) + + gitrepo = os.path.join(self.testdir, 'build', packageName) + vcs0 = fdroidserver.common.getvcs('git', git_url, gitrepo) + vcs0.gotorevision('0.3', refresh=True) + vcs1 = fdroidserver.common.getvcs('git', git_url, gitrepo) + vcs1.gotorevision('0.3', refresh=False) + + def test_setup_vcs_srclib(self): + app = fdroidserver.metadata.App( + { + 'RepoType': 'srclib', + 'Repo': 'TransportsRennes', + } + ) + srclib = { + 'RepoType': 'git', + 'Repo': 'https://github.com/ybonnel/TransportsRennes', + } + fdroidserver.metadata.srclibs = {'TransportsRennes': srclib} + vcs, build_dir = fdroidserver.common.setup_vcs(app) + self.assertIsNotNone(vcs) + self.assertEqual(build_dir, Path('build/srclib/TransportsRennes')) + + def test_getvcs_srclib(self): + vcstype = 'srclib' + remote = 'TransportsRennes' + local = 'build/srclib/' + remote + fdroidserver.metadata.srclibs = { + remote: { + 'RepoType': 'git', + 'Repo': 'https://github.com/ybonnel/TransportsRennes', + } + } + self.assertIsNotNone(fdroidserver.common.getvcs(vcstype, remote, local)) + self.assertIsNotNone(fdroidserver.common.getvcs(vcstype, Path(remote), local)) + self.assertIsNotNone(fdroidserver.common.getvcs(vcstype, remote, Path(local))) + self.assertIsNotNone(fdroidserver.common.getvcs( + vcstype, Path(remote), Path(local) + )) + with self.assertRaises(VCSException): + fdroidserver.common.getvcs(vcstype, remote, 'bad') + with self.assertRaises(VCSException): + fdroidserver.common.getvcs(vcstype, remote, Path('bad')) + with self.assertRaises(VCSException): + fdroidserver.common.getvcs(vcstype, Path(remote), 'bad') + with self.assertRaises(VCSException): + fdroidserver.common.getvcs(vcstype, Path(remote), Path('bad')) + + def test_fdroid_popen_stderr_redirect(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + _mock_common_module_options_instance() + + commands = ['sh', '-c', 'echo stdout message && echo stderr message 1>&2'] + + p = fdroidserver.common.FDroidPopen(commands) + self.assertEqual(p.output, 'stdout message\nstderr message\n') + + p = fdroidserver.common.FDroidPopen(commands, stderr_to_stdout=False) + self.assertEqual(p.output, 'stdout message\n') + + def test_signjar(self): + _mock_common_module_options_instance() + config = fdroidserver.common.read_config() + config['jarsigner'] = fdroidserver.common.find_sdk_tools_cmd('jarsigner') + fdroidserver.common.config = config + fdroidserver.signindex.config = config + + sourcedir = os.path.join(basedir, 'signindex') + for f in ('testy.jar', 'guardianproject.jar'): + sourcefile = os.path.join(sourcedir, f) + testfile = os.path.join(self.testdir, f) + shutil.copy(sourcefile, self.testdir) + fdroidserver.signindex.sign_jar(testfile, use_old_algs=True) + # these should be resigned, and therefore different + self.assertNotEqual( + open(sourcefile, 'rb').read(), open(testfile, 'rb').read() + ) + + def test_verify_apk_signature(self): + _mock_common_module_options_instance() + config = fdroidserver.common.read_config() + fdroidserver.common.config = config + + self.assertTrue(fdroidserver.common.verify_apk_signature('bad-unicode-πÇÇ现代通用字-български-عربي1.apk')) + if 'apksigner' in fdroidserver.common.config: # apksigner considers MD5 signatures valid + self.assertTrue(fdroidserver.common.verify_apk_signature('org.bitbucket.tickytacky.mirrormirror_1.apk')) + self.assertTrue(fdroidserver.common.verify_apk_signature('org.bitbucket.tickytacky.mirrormirror_2.apk')) + self.assertTrue(fdroidserver.common.verify_apk_signature('org.bitbucket.tickytacky.mirrormirror_3.apk')) + self.assertTrue(fdroidserver.common.verify_apk_signature('org.bitbucket.tickytacky.mirrormirror_4.apk')) + else: + self.assertFalse(fdroidserver.common.verify_apk_signature('org.bitbucket.tickytacky.mirrormirror_1.apk')) + self.assertFalse(fdroidserver.common.verify_apk_signature('org.bitbucket.tickytacky.mirrormirror_2.apk')) + self.assertFalse(fdroidserver.common.verify_apk_signature('org.bitbucket.tickytacky.mirrormirror_3.apk')) + self.assertFalse(fdroidserver.common.verify_apk_signature('org.bitbucket.tickytacky.mirrormirror_4.apk')) + self.assertTrue(fdroidserver.common.verify_apk_signature('org.dyndns.fules.ck_20.apk')) + self.assertTrue(fdroidserver.common.verify_apk_signature('urzip.apk')) + self.assertFalse(fdroidserver.common.verify_apk_signature('urzip-badcert.apk')) + self.assertFalse(fdroidserver.common.verify_apk_signature('urzip-badsig.apk')) + self.assertTrue(fdroidserver.common.verify_apk_signature('urzip-release.apk')) + self.assertFalse(fdroidserver.common.verify_apk_signature('urzip-release-unsigned.apk')) + + def test_verify_old_apk_signature(self): + _mock_common_module_options_instance() + config = fdroidserver.common.read_config() + config['jarsigner'] = fdroidserver.common.find_sdk_tools_cmd('jarsigner') + fdroidserver.common.config = config + + try: + fdroidserver.common.verify_deprecated_jar_signature('bad-unicode-πÇÇ现代通用字-български-عربي1.apk') + fdroidserver.common.verify_deprecated_jar_signature('org.bitbucket.tickytacky.mirrormirror_1.apk') + fdroidserver.common.verify_deprecated_jar_signature('org.bitbucket.tickytacky.mirrormirror_2.apk') + fdroidserver.common.verify_deprecated_jar_signature('org.bitbucket.tickytacky.mirrormirror_3.apk') + fdroidserver.common.verify_deprecated_jar_signature('org.bitbucket.tickytacky.mirrormirror_4.apk') + fdroidserver.common.verify_deprecated_jar_signature('org.dyndns.fules.ck_20.apk') + fdroidserver.common.verify_deprecated_jar_signature('urzip.apk') + fdroidserver.common.verify_deprecated_jar_signature('urzip-release.apk') + except VerificationException: + self.fail("failed to jarsigner failed to verify an old apk") + self.assertRaises(VerificationException, fdroidserver.common.verify_deprecated_jar_signature, 'urzip-badcert.apk') + self.assertRaises(VerificationException, fdroidserver.common.verify_deprecated_jar_signature, 'urzip-badsig.apk') + self.assertRaises(VerificationException, fdroidserver.common.verify_deprecated_jar_signature, 'urzip-release-unsigned.apk') + + def test_verify_jar_signature(self): + """Sign entry.jar and make sure it validates""" + config = fdroidserver.common.read_config() + config['jarsigner'] = fdroidserver.common.find_sdk_tools_cmd('jarsigner') + config['keystore'] = os.path.join(basedir, 'keystore.jks') + config['repo_keyalias'] = 'sova' + config['keystorepass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' + config['keypass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' + fdroidserver.common.config = config + fdroidserver.signindex.config = config + repo_dir = Path(self.testdir) / 'repo' + repo_dir.mkdir() + shutil.copy('repo/entry.json', repo_dir) + shutil.copy('repo/index-v2.json', repo_dir) + os.chdir(self.testdir) + fdroidserver.signindex.sign_index('repo', 'entry.json') + fdroidserver.common.verify_jar_signature('repo/entry.jar') + + def test_verify_jar_signature_fails(self): + """Test verify_jar_signature fails on unsigned and deprecated algorithms""" + config = fdroidserver.common.read_config() + config['jarsigner'] = fdroidserver.common.find_sdk_tools_cmd('jarsigner') + fdroidserver.common.config = config + source_dir = os.path.join(basedir, 'signindex') + for f in ('unsigned.jar', 'testy.jar', 'guardianproject.jar', 'guardianproject-v1.jar'): + testfile = os.path.join(source_dir, f) + with self.assertRaises(fdroidserver.index.VerificationException): + fdroidserver.common.verify_jar_signature(testfile) + + def test_verify_deprecated_jar_signature(self): + config = fdroidserver.common.read_config() + config['jarsigner'] = fdroidserver.common.find_sdk_tools_cmd('jarsigner') + fdroidserver.common.config = config + source_dir = os.path.join(basedir, 'signindex') + for f in ('testy.jar', 'guardianproject.jar'): + testfile = os.path.join(source_dir, f) + fdroidserver.common.verify_deprecated_jar_signature(testfile) + + testfile = os.path.join(source_dir, 'unsigned.jar') + with self.assertRaises(fdroidserver.index.VerificationException): + fdroidserver.common.verify_deprecated_jar_signature(testfile) + + def test_verify_apks(self): + config = fdroidserver.common.read_config() + fdroidserver.common.config = config + _mock_common_module_options_instance() + + sourceapk = os.path.join(basedir, 'urzip.apk') + + copyapk = os.path.join(self.testdir, 'urzip-copy.apk') + shutil.copy(sourceapk, copyapk) + self.assertTrue(fdroidserver.common.verify_apk_signature(copyapk)) + self.assertIsNone( + fdroidserver.common.verify_apks(sourceapk, copyapk, self.testdir) + ) + + unsignedapk = os.path.join(self.testdir, 'urzip-unsigned.apk') + with ZipFile(sourceapk, 'r') as apk: + with ZipFile(unsignedapk, 'w') as testapk: + for info in apk.infolist(): + if not info.filename.startswith('META-INF/'): + testapk.writestr(info, apk.read(info.filename)) + self.assertIsNone( + fdroidserver.common.verify_apks(sourceapk, unsignedapk, self.testdir) + ) + + twosigapk = os.path.join(self.testdir, 'urzip-twosig.apk') + otherapk = ZipFile(os.path.join(basedir, 'urzip-release.apk'), 'r') + with ZipFile(sourceapk, 'r') as apk: + with ZipFile(twosigapk, 'w') as testapk: + for info in apk.infolist(): + testapk.writestr(info, apk.read(info.filename)) + if info.filename.startswith('META-INF/'): + testapk.writestr(info.filename, otherapk.read(info.filename)) + otherapk.close() + self.assertFalse(fdroidserver.common.verify_apk_signature(twosigapk)) + self.assertIsNone(fdroidserver.common.verify_apks(sourceapk, twosigapk, self.testdir)) + + def test_get_certificate_with_chain_sandisk(self): + """Test that APK signatures with a cert chain are parsed like apksigner. + + SanDisk signs their APKs with a X.509 certificate chain of + trust, so there are actually three certificates + included. apksigner only cares about the certificate in the + chain that actually signs the manifest. + + The correct value comes from: + apksigner verify --print-certs 883cbdae7aeb2e4b122e8ee8d89966c7062d0d49107a130235fa220a5b994a79.apk + + """ + cert = fdroidserver.common.get_certificate( + signature_block_file=Path('SANAPPSI.RSA').read_bytes(), + signature_file=Path('SANAPPSI.SF').read_bytes(), + ) + self.assertEqual( + 'ea0abbf2a142e4b167405d516b2cc408c4af4b29cd50ba281aa4470d4aab3e53', + fdroidserver.common.signer_fingerprint(cert), + ) + + def test_write_to_config(self): + """Test that config items can be added without messing up config.yml. + + The '_orig' key are where the original string values of paths + are stored. Paths have tilde expansion and env vars replaced + in fill_config_defaults(). + + """ + os.chdir(self.testdir) + fdroidserver.common.write_config_file( + textwrap.dedent( + """\ + # abc + # test: 'example value' + a_path: ~/android-sdk + + # comment + do_not_touch: good value + a_path: "!!!" + + key: "123" # inline""" + ) + ) + + config = {'key': 111, 'a_path_orig': '~/android-sdk'} + fdroidserver.common.write_to_config(config, 'key') + fdroidserver.common.write_to_config(config, 'a_path') + fdroidserver.common.write_to_config(config, 'test', value='test value') + fdroidserver.common.write_to_config(config, 'new_key', value='new') + + with open(fdroidserver.common.CONFIG_FILE) as fp: + self.assertEqual( + fp.read(), + textwrap.dedent( + """\ + # abc + test: test value + a_path: ~/android-sdk + + # comment + do_not_touch: good value + + key: 111 + new_key: new + """ + ), + ) + + def test_write_to_config_when_empty(self): + os.chdir(self.testdir) + config_yml = Path(fdroidserver.common.CONFIG_FILE) + config_yml.write_text('', encoding='utf-8') + fdroidserver.common.write_to_config({}, 'key', 'val') + self.assertEqual(config_yml.read_text(), 'key: val\n') + + def test_apk_name_regex(self): + good = [ + 'urzipπÇÇπÇÇ现代汉语通用字българскиعربي1234ö_-123456.apk', + 'urzipπÇÇπÇÇ现代汉语通用字българскиعربي1234ö_123456_abcdef0.apk', + 'urzip_-123456.apk', + 'a0_0.apk', + 'Z0_0.apk', + 'a0_0_abcdef0.apk', + 'a_a_a_a_0_abcdef0.apk', + 'a_____0.apk', + 'a_____123456_abcdef0.apk', + 'org.fdroid.fdroid_123456.apk', + # valid, but "_99999" is part of packageName rather than versionCode + 'org.fdroid.fdroid_99999_123456.apk', + # should be valid, but I can't figure out the regex since \w includes digits + # 'πÇÇπÇÇ现代汉语通用字българскиعربي1234ö_0_123bafd.apk', + ] + for name in good: + m = fdroidserver.common.APK_NAME_REGEX.match(name) + self.assertIsNotNone(m) + self.assertIn(m.group(2), ('-123456', '0', '123456')) + self.assertIn(m.group(3), ('abcdef0', None)) + + bad = [ + 'urzipπÇÇπÇÇ现代汉语通用字българскиعربي1234ö_123456_abcdefg.apk', + 'urzip-_-198274.apk', + 'urzip-_0_123bafd.apk', + 'no spaces allowed_123.apk', + '0_0.apk', + '0_0_abcdef0.apk', + ] + for name in bad: + self.assertIsNone(fdroidserver.common.APK_NAME_REGEX.match(name)) + + def test_standard_file_name_regex(self): + good = [ + 'urzipπÇÇπÇÇ现代汉语通用字българскиعربي1234ö_-123456.mp3', + 'urzipπÇÇπÇÇ现代汉语通用字българскиعربي1234ö_123456.mov', + 'Document_-123456.pdf', + 'WTF_0.MOV', + 'Z0_0.ebk', + 'a_a_a_a_0.txt', + 'org.fdroid.fdroid.privileged.ota_123456.zip', + 'πÇÇπÇÇ现代汉语通用字българскиعربي1234ö_0.jpeg', + 'a_____0.PNG', + # valid, but "_99999" is part of packageName rather than versionCode + 'a_____99999_123456.zip', + 'org.fdroid.fdroid_99999_123456.zip', + ] + for name in good: + m = fdroidserver.common.STANDARD_FILE_NAME_REGEX.match(name) + self.assertIsNotNone(m) + self.assertIn(m.group(2), ('-123456', '0', '123456')) + + bad = [ + 'urzipπÇÇπÇÇ现代汉语通用字българскиعربي1234ö_abcdefg.JPEG', + 'urzip-_-198274.zip', + 'urzip-_123bafd.pdf', + 'no spaces allowed_123.foobar', + 'a_____0.', + ] + for name in bad: + self.assertIsNone(fdroidserver.common.STANDARD_FILE_NAME_REGEX.match(name)) + + @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') + def test_apk_signer_fingerprint(self): + + # fingerprints fetched with: keytool -printcert -file ____.RSA + testapks = (('repo/obb.main.oldversion_1444412523.apk', + '818e469465f96b704e27be2fee4c63ab9f83ddf30e7a34c7371a4728d83b0bc1'), + ('repo/obb.main.twoversions_1101613.apk', + '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6'), + ('repo/obb.main.twoversions_1101617.apk', + '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6')) + + for apkfile, keytoolcertfingerprint in testapks: + self.assertEqual(keytoolcertfingerprint, + fdroidserver.common.apk_signer_fingerprint(apkfile)) + + def test_find_apksigner_system_package_default_path(self): + """apksigner should be automatically used from the PATH""" + usr_bin_apksigner = '/usr/bin/apksigner' + if not os.path.isfile(usr_bin_apksigner): + self.skipTest('SKIPPING since %s is not installed!' % usr_bin_apksigner) + with mock.patch.dict(os.environ, clear=True): + os.environ['PATH'] = '/usr/local/bin:/usr/bin:/bin' + config = {} + fdroidserver.common.find_apksigner(config) + self.assertEqual(usr_bin_apksigner, config.get('apksigner')) + + def test_find_apksigner_config_overrides(self): + """apksigner should come from config before any auto-detection""" + os.chdir(self.testdir) + android_home = os.path.join(self.testdir, 'ANDROID_HOME') + do_not_use = os.path.join(android_home, 'build-tools', '30.0.3', 'apksigner') + os.makedirs(os.path.dirname(do_not_use)) + with open(do_not_use, 'w') as fp: + fp.write('#!/bin/sh\ndate\n') + os.chmod(do_not_use, 0o0755) # nosec B103 + apksigner = os.path.join(self.testdir, 'apksigner') + config = {'apksigner': apksigner} + with mock.patch.dict(os.environ, clear=True): + os.environ['ANDROID_HOME'] = android_home + os.environ['PATH'] = '%s:/usr/local/bin:/usr/bin:/bin' % android_home + fdroidserver.common.find_apksigner(config) + self.assertEqual(apksigner, config.get('apksigner')) + + def test_find_apksigner_prefer_path(self): + """apksigner should come from PATH before ANDROID_HOME""" + os.chdir(self.testdir) + apksigner = os.path.join(self.testdir, 'apksigner') + with open(apksigner, 'w') as fp: + fp.write('#!/bin/sh\ndate\n') + os.chmod(apksigner, 0o0755) # nosec B103 + + android_home = os.path.join(self.testdir, 'ANDROID_HOME') + do_not_use = os.path.join(android_home, 'build-tools', '30.0.3', 'apksigner') + os.makedirs(os.path.dirname(do_not_use)) + with open(do_not_use, 'w') as fp: + fp.write('#!/bin/sh\ndate\n') + os.chmod(do_not_use, 0o0755) # nosec B103 + + config = {'sdk_path': android_home} + with mock.patch.dict(os.environ, clear=True): + os.environ['ANDROID_HOME'] = android_home + os.environ['PATH'] = '%s:/usr/local/bin:/usr/bin:/bin' % os.path.dirname(apksigner) + fdroidserver.common.find_apksigner(config) + self.assertEqual(apksigner, config.get('apksigner')) + + def test_find_apksigner_prefer_newest(self): + """apksigner should be the newest available in ANDROID_HOME""" + os.chdir(self.testdir) + android_home = os.path.join(self.testdir, 'ANDROID_HOME') + + apksigner = os.path.join(android_home, 'build-tools', '30.0.3', 'apksigner') + os.makedirs(os.path.dirname(apksigner)) + with open(apksigner, 'w') as fp: + fp.write('#!/bin/sh\necho 30.0.3\n') + os.chmod(apksigner, 0o0755) # nosec B103 + + do_not_use = os.path.join(android_home, 'build-tools', '29.0.3', 'apksigner') + os.makedirs(os.path.dirname(do_not_use)) + with open(do_not_use, 'w') as fp: + fp.write('#!/bin/sh\necho 29.0.3\n') + os.chmod(do_not_use, 0o0755) # nosec B103 + + config = {'sdk_path': android_home} + with mock.patch.dict(os.environ, clear=True): + os.environ['PATH'] = '/fake/path/to/avoid/conflicts' + fdroidserver.common.find_apksigner(config) + self.assertEqual(apksigner, config.get('apksigner')) + + def test_find_apksigner_system_package_android_home(self): + """Test that apksigner v30 or newer is found""" + os.chdir(self.testdir) + android_home = os.getenv('ANDROID_HOME') + if not android_home or not os.path.isdir(android_home): + self.skipTest('SKIPPING since ANDROID_HOME (%s) is not a dir!' % android_home) + build_tools = glob.glob(os.path.join(android_home, 'build-tools', '*', 'apksigner')) + if not build_tools: + self.skipTest('SKIPPING since ANDROID_HOME (%s) build-tools has no apksigner!' % android_home) + min_version = fdroidserver.common.MINIMUM_APKSIGNER_BUILD_TOOLS_VERSION + version = '0' + for bt in sorted(build_tools): + v = bt.split('/')[-2] + if v == 'debian': + continue + if LooseVersion(version) < LooseVersion(v): + version = v + if LooseVersion(version) < LooseVersion(min_version): + self.skipTest('SKIPPING since build-tools %s or higher is required!' % min_version) + fdroidserver.common.config = {'sdk_path': android_home} + with mock.patch.dict(os.environ, clear=True): + os.environ['PATH'] = '/fake/path/to/avoid/conflicts' + config = fdroidserver.common.read_config() + fdroidserver.common.find_apksigner(config) + self.assertEqual( + os.path.join(android_home, 'build-tools'), + os.path.dirname(os.path.dirname(config.get('apksigner'))), + ) + + @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') + def test_sign_apk(self): + _mock_common_module_options_instance() + config = fdroidserver.common.read_config() + if 'apksigner' not in config: + self.skipTest('SKIPPING test_sign_apk, apksigner not installed!') + + config['keyalias'] = 'sova' + config['keystorepass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' + config['keypass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' + config['keystore'] = os.path.join(basedir, 'keystore.jks') + fdroidserver.common.config = config + fdroidserver.signindex.config = config + + unsigned = os.path.join(self.testdir, 'urzip-release-unsigned.apk') + signed = os.path.join(self.testdir, 'urzip-release.apk') + shutil.copy(os.path.join(basedir, 'urzip-release-unsigned.apk'), self.testdir) + + self.assertFalse(fdroidserver.common.verify_apk_signature(unsigned)) + + fdroidserver.common.sign_apk(unsigned, signed, config['keyalias']) + self.assertTrue(os.path.isfile(signed)) + self.assertFalse(os.path.isfile(unsigned)) + self.assertTrue(fdroidserver.common.verify_apk_signature(signed)) + + # now sign an APK with minSdkVersion >= 18 + unsigned = os.path.join(self.testdir, 'duplicate.permisssions_9999999-unsigned.apk') + signed = os.path.join(self.testdir, 'duplicate.permisssions_9999999.apk') + shutil.copy( + os.path.join(basedir, 'repo', 'duplicate.permisssions_9999999.apk'), + os.path.join(unsigned), + ) + fdroidserver.common.apk_strip_v1_signatures(unsigned, strip_manifest=True) + fdroidserver.common.sign_apk(unsigned, signed, config['keyalias']) + self.assertTrue(os.path.isfile(signed)) + self.assertFalse(os.path.isfile(unsigned)) + self.assertTrue(fdroidserver.common.verify_apk_signature(signed)) + self.assertEqual('18', fdroidserver.common.get_androguard_APK(signed).get_min_sdk_version()) + + shutil.copy(os.path.join(basedir, 'minimal_targetsdk_30_unsigned.apk'), self.testdir) + unsigned = os.path.join(self.testdir, 'minimal_targetsdk_30_unsigned.apk') + signed = os.path.join(self.testdir, 'minimal_targetsdk_30.apk') + + self.assertFalse(fdroidserver.common.verify_apk_signature(unsigned)) + fdroidserver.common.sign_apk(unsigned, signed, config['keyalias']) + + self.assertTrue(os.path.isfile(signed)) + self.assertFalse(os.path.isfile(unsigned)) + self.assertTrue(fdroidserver.common.verify_apk_signature(signed)) + # verify it has a v2 signature + self.assertTrue(fdroidserver.common.get_androguard_APK(signed).is_signed_v2()) + + shutil.copy(os.path.join(basedir, 'no_targetsdk_minsdk30_unsigned.apk'), self.testdir) + unsigned = os.path.join(self.testdir, 'no_targetsdk_minsdk30_unsigned.apk') + signed = os.path.join(self.testdir, 'no_targetsdk_minsdk30_signed.apk') + + fdroidserver.common.sign_apk(unsigned, signed, config['keyalias']) + self.assertTrue(fdroidserver.common.verify_apk_signature(signed)) + self.assertTrue(fdroidserver.common.get_androguard_APK(signed).is_signed_v2()) + + shutil.copy(os.path.join(basedir, 'no_targetsdk_minsdk1_unsigned.apk'), self.testdir) + unsigned = os.path.join(self.testdir, 'no_targetsdk_minsdk1_unsigned.apk') + signed = os.path.join(self.testdir, 'no_targetsdk_minsdk1_signed.apk') + + self.assertFalse(fdroidserver.common.verify_apk_signature(unsigned)) + fdroidserver.common.sign_apk(unsigned, signed, config['keyalias']) + + self.assertTrue(os.path.isfile(signed)) + self.assertFalse(os.path.isfile(unsigned)) + self.assertTrue(fdroidserver.common.verify_apk_signature(signed)) + + @unittest.skipIf(os.getuid() == 0, 'This is meaningless when run as root') + def test_sign_apk_fail(self): + _mock_common_module_options_instance() + config = fdroidserver.common.read_config() + if 'apksigner' not in config: + self.skipTest('SKIPPING test_sign_apk_fail, apksigner not installed!') + + config['keyalias'] = 'sova' + config['keystorepass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' + config['keypass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' + config['keystore'] = os.path.join(basedir, 'keystore.jks') + fdroidserver.common.config = config + fdroidserver.signindex.config = config + + unsigned = os.path.join(self.testdir, 'urzip-release-unsigned.apk') + signed = os.path.join(self.testdir, 'urzip-release.apk') + shutil.copy(os.path.join(basedir, 'urzip-release-unsigned.apk'), self.testdir) + + os.chmod(unsigned, 0o000) + with self.assertRaises(fdroidserver.exception.BuildException): + fdroidserver.common.sign_apk(unsigned, signed, config['keyalias']) + os.chmod(unsigned, 0o777) # nosec B103 + self.assertTrue(os.path.isfile(unsigned)) + self.assertFalse(os.path.isfile(signed)) + + def test_sign_apk_corrupt(self): + _mock_common_module_options_instance() + config = fdroidserver.common.read_config() + if 'apksigner' not in config: + self.skipTest('SKIPPING test_sign_apk_corrupt, apksigner not installed!') + + config['keyalias'] = 'sova' + config['keystorepass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' + config['keypass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' + config['keystore'] = os.path.join(basedir, 'keystore.jks') + fdroidserver.common.config = config + fdroidserver.signindex.config = config + + unsigned = os.path.join(self.testdir, 'urzip-release-unsigned.apk') + signed = os.path.join(self.testdir, 'urzip-release.apk') + with open(unsigned, 'w') as fp: + fp.write('this is a corrupt APK') + + with self.assertRaises(fdroidserver.exception.BuildException): + fdroidserver.common.sign_apk(unsigned, signed, config['keyalias']) + self.assertTrue(os.path.isfile(unsigned)) + self.assertFalse(os.path.isfile(signed)) + + @unittest.skipUnless( + os.path.exists('tests/SystemWebView-repack.apk'), "file too big for sdist" + ) + def test_resign_apk(self): + """When using apksigner, it should resign signed APKs""" + _mock_common_module_options_instance() + config = fdroidserver.common.read_config() + if 'apksigner' not in config: + self.skipTest('SKIPPING test_resign_apk, apksigner not installed!') + if sys.byteorder == 'big': + self.skipTest('SKIPPING androguard is not ported to big-endian') + + config['keyalias'] = 'sova' + config['keystorepass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' + config['keypass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' + config['keystore'] = os.path.join(basedir, 'keystore.jks') + fdroidserver.common.config = config + fdroidserver.signindex.config = config + + os.chdir(self.testdir) + os.mkdir('unsigned') + os.mkdir('repo') + + for apk in ( + 'org.bitbucket.tickytacky.mirrormirror_4.apk', + 'v2.only.sig_2.apk', + 'SystemWebView-repack.apk', + ): + original = os.path.join(basedir, apk) + unsigned = os.path.join('unsigned', apk) + resign = os.path.join('repo', apk) + shutil.copy(original, unsigned) + fdroidserver.common.sign_apk(unsigned, resign, config['keyalias']) + self.assertTrue( + fdroidserver.common.verify_apk_signature(resign), apk + " verifies" + ) + self.assertTrue(os.path.isfile(resign)) + self.assertFalse(os.path.isfile(unsigned)) + self.assertNotEqual( + fdroidserver.common.get_first_signer_certificate(original), + fdroidserver.common.get_first_signer_certificate(resign) + ) + + @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') + def test_get_apk_id(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + self._set_build_tools() + try: + config['aapt'] = fdroidserver.common.find_sdk_tools_cmd('aapt') + except fdroidserver.exception.FDroidException: + pass # aapt is not required if androguard is present + + testcases = [ + ('repo/obb.main.twoversions_1101613.apk', 'obb.main.twoversions', 1101613, '0.1'), + ('org.bitbucket.tickytacky.mirrormirror_1.apk', 'org.bitbucket.tickytacky.mirrormirror', 1, '1.0'), + ('org.bitbucket.tickytacky.mirrormirror_2.apk', 'org.bitbucket.tickytacky.mirrormirror', 2, '1.0.1'), + ('org.bitbucket.tickytacky.mirrormirror_3.apk', 'org.bitbucket.tickytacky.mirrormirror', 3, '1.0.2'), + ('org.bitbucket.tickytacky.mirrormirror_4.apk', 'org.bitbucket.tickytacky.mirrormirror', 4, '1.0.3'), + ('org.dyndns.fules.ck_20.apk', 'org.dyndns.fules.ck', 20, 'v1.6pre2'), + ('issue-1128-min-sdk-30-poc.apk', 'org.fdroid.ci', 1, '1.0'), + ('issue-1128-poc1.apk', 'android.appsecurity.cts.tinyapp', 10, '1.0'), + ('issue-1128-poc2.apk', 'android.appsecurity.cts.tinyapp', 10, '1.0'), + ('issue-1128-poc3a.apk', 'android.appsecurity.cts.tinyapp', 10, '1.0'), + ('issue-1128-poc3b.apk', 'android.appsecurity.cts.tinyapp', 10, '1.0'), + ('urzip.apk', 'info.guardianproject.urzip', 100, '0.1'), + ('urzip-badcert.apk', 'info.guardianproject.urzip', 100, '0.1'), + ('urzip-badsig.apk', 'info.guardianproject.urzip', 100, '0.1'), + ('urzip-release.apk', 'info.guardianproject.urzip', 100, '0.1'), + ('urzip-release-unsigned.apk', 'info.guardianproject.urzip', 100, '0.1'), + ('repo/com.politedroid_3.apk', 'com.politedroid', 3, '1.2'), + ('repo/com.politedroid_4.apk', 'com.politedroid', 4, '1.3'), + ('repo/com.politedroid_5.apk', 'com.politedroid', 5, '1.4'), + ('repo/com.politedroid_6.apk', 'com.politedroid', 6, '1.5'), + ('repo/duplicate.permisssions_9999999.apk', 'duplicate.permisssions', 9999999, ''), + ('repo/info.zwanenburg.caffeinetile_4.apk', 'info.zwanenburg.caffeinetile', 4, '1.3'), + ('repo/obb.main.oldversion_1444412523.apk', 'obb.main.oldversion', 1444412523, '0.1'), + ('repo/obb.mainpatch.current_1619_another-release-key.apk', 'obb.mainpatch.current', 1619, '0.1'), + ('repo/obb.mainpatch.current_1619.apk', 'obb.mainpatch.current', 1619, '0.1'), + ('repo/obb.main.twoversions_1101613.apk', 'obb.main.twoversions', 1101613, '0.1'), + ('repo/obb.main.twoversions_1101615.apk', 'obb.main.twoversions', 1101615, '0.1'), + ('repo/obb.main.twoversions_1101617.apk', 'obb.main.twoversions', 1101617, '0.1'), + ('repo/urzip-; Рахма́, [rɐxˈmanʲɪnəf] سيرجي_رخمانينوف 谢·.apk', 'info.guardianproject.urzip', 100, '0.1'), + ] + for apkfilename, appid, versionCode, versionName in testcases: + a, vc, vn = fdroidserver.common.get_apk_id(apkfilename) + self.assertEqual(appid, a, 'androguard appid parsing failed for ' + apkfilename) + self.assertEqual(versionName, vn, 'androguard versionName parsing failed for ' + apkfilename) + self.assertEqual(versionCode, vc, 'androguard versionCode parsing failed for ' + apkfilename) + if 'aapt' in config: + a, vc, vn = fdroidserver.common.get_apk_id_aapt(apkfilename) + self.assertEqual(appid, a, 'aapt appid parsing failed for ' + apkfilename) + self.assertEqual(versionCode, vc, 'aapt versionCode parsing failed for ' + apkfilename) + self.assertEqual(versionName, vn, 'aapt versionName parsing failed for ' + apkfilename) + + @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') + def test_get_apk_id_bad_apk(self): + """get_apk_id should never return None on error, only raise exceptions""" + with self.assertRaises(KeyError): + fdroidserver.common.get_apk_id('Norway_bouvet_europe_2.obf.zip') + shutil.copy('Norway_bouvet_europe_2.obf.zip', self.testdir) + os.chdir(self.testdir) + with ZipFile('Norway_bouvet_europe_2.obf.zip', 'a') as zipfp: + zipfp.writestr('AndroidManifest.xml', 'not a manifest') + with self.assertRaises(KeyError): + fdroidserver.common.get_apk_id('Norway_bouvet_europe_2.obf.zip') + + @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') + def test_get_apk_id_bad_path(self): + with self.assertRaises(FDroidException): + fdroidserver.common.get_apk_id('nope') + + @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') + def test_get_apk_id_api_call(self): + self.assertEqual( + ('info.guardianproject.urzip', 100, '0.1'), + fdroidserver.common.get_apk_id('urzip.apk'), + ) + + @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') + def test_get_apk_id_bad_zip(self): + os.chdir(self.testdir) + badzip = 'badzip.apk' + with open(badzip, 'w') as fp: + fp.write('not a ZIP') + with self.assertRaises(BadZipFile): + fdroidserver.common.get_apk_id(badzip) + + def test_get_apk_id_aapt_regex(self): + files = glob.glob(os.path.join(basedir, 'build-tools', '[1-9]*.*', '*.txt')) + self.assertNotEqual(0, len(files)) + for f in files: + appid, versionCode = os.path.splitext(os.path.basename(f))[0][12:].split('_') + with open(f, encoding='utf-8') as fp: + m = fdroidserver.common.APK_ID_TRIPLET_REGEX.match(fp.read()) + if m: + self.assertEqual(appid, m.group(1)) + self.assertEqual(versionCode, m.group(2)) + else: + self.fail('could not parse aapt output: {}'.format(f)) + + def test_get_native_code(self): + testcases = [ + ('repo/obb.main.twoversions_1101613.apk', []), + ('org.bitbucket.tickytacky.mirrormirror_1.apk', []), + ('org.bitbucket.tickytacky.mirrormirror_2.apk', []), + ('org.bitbucket.tickytacky.mirrormirror_3.apk', []), + ('org.bitbucket.tickytacky.mirrormirror_4.apk', []), + ('org.dyndns.fules.ck_20.apk', ['arm64-v8a', 'armeabi', 'armeabi-v7a', 'mips', 'mips64', 'x86', 'x86_64']), + ('urzip.apk', []), + ('urzip-badcert.apk', []), + ('urzip-badsig.apk', []), + ('urzip-release.apk', []), + ('urzip-release-unsigned.apk', []), + ('repo/com.politedroid_3.apk', []), + ('repo/com.politedroid_4.apk', []), + ('repo/com.politedroid_5.apk', []), + ('repo/com.politedroid_6.apk', []), + ('repo/duplicate.permisssions_9999999.apk', []), + ('repo/info.zwanenburg.caffeinetile_4.apk', []), + ('repo/obb.main.oldversion_1444412523.apk', []), + ('repo/obb.mainpatch.current_1619_another-release-key.apk', []), + ('repo/obb.mainpatch.current_1619.apk', []), + ('repo/obb.main.twoversions_1101613.apk', []), + ('repo/obb.main.twoversions_1101615.apk', []), + ('repo/obb.main.twoversions_1101617.apk', []), + ('repo/urzip-; Рахма́, [rɐxˈmanʲɪnəf] سيرجي_رخمانينوف 谢·.apk', []), + ] + for apkfilename, native_code in testcases: + nc = fdroidserver.common.get_native_code(apkfilename) + self.assertEqual(native_code, nc) + + @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') + def test_get_sdkversions_androguard(self): + """This is a sanity test that androguard isn't broken""" + + def get_minSdkVersion(apkfile): + apk = fdroidserver.common.get_androguard_APK(apkfile) + return fdroidserver.common.get_min_sdk_version(apk) + + def get_targetSdkVersion(apkfile): + apk = fdroidserver.common.get_androguard_APK(apkfile) + return apk.get_effective_target_sdk_version() + + self.assertEqual(4, get_minSdkVersion('bad-unicode-πÇÇ现代通用字-български-عربي1.apk')) + self.assertEqual(30, get_minSdkVersion('issue-1128-min-sdk-30-poc.apk')) + self.assertEqual(29, get_minSdkVersion('issue-1128-poc1.apk')) + self.assertEqual(29, get_minSdkVersion('issue-1128-poc2.apk')) + self.assertEqual(23, get_minSdkVersion('issue-1128-poc3a.apk')) + self.assertEqual(23, get_minSdkVersion('issue-1128-poc3b.apk')) + self.assertEqual(14, get_minSdkVersion('org.bitbucket.tickytacky.mirrormirror_1.apk')) + self.assertEqual(14, get_minSdkVersion('org.bitbucket.tickytacky.mirrormirror_2.apk')) + self.assertEqual(14, get_minSdkVersion('org.bitbucket.tickytacky.mirrormirror_3.apk')) + self.assertEqual(14, get_minSdkVersion('org.bitbucket.tickytacky.mirrormirror_4.apk')) + self.assertEqual(7, get_minSdkVersion('org.dyndns.fules.ck_20.apk')) + self.assertEqual(4, get_minSdkVersion('urzip.apk')) + self.assertEqual(4, get_minSdkVersion('urzip-badcert.apk')) + self.assertEqual(4, get_minSdkVersion('urzip-badsig.apk')) + self.assertEqual(4, get_minSdkVersion('urzip-release.apk')) + self.assertEqual(4, get_minSdkVersion('urzip-release-unsigned.apk')) + self.assertEqual(27, get_minSdkVersion('v2.only.sig_2.apk')) + self.assertEqual(3, get_minSdkVersion('repo/com.politedroid_3.apk')) + self.assertEqual(3, get_minSdkVersion('repo/com.politedroid_4.apk')) + self.assertEqual(3, get_minSdkVersion('repo/com.politedroid_5.apk')) + self.assertEqual(14, get_minSdkVersion('repo/com.politedroid_6.apk')) + self.assertEqual(4, get_minSdkVersion('repo/obb.main.oldversion_1444412523.apk')) + self.assertEqual(4, get_minSdkVersion('repo/obb.mainpatch.current_1619_another-release-key.apk')) + self.assertEqual(4, get_minSdkVersion('repo/obb.mainpatch.current_1619.apk')) + self.assertEqual(4, get_minSdkVersion('repo/obb.main.twoversions_1101613.apk')) + self.assertEqual(4, get_minSdkVersion('repo/obb.main.twoversions_1101615.apk')) + self.assertEqual(4, get_minSdkVersion('repo/obb.main.twoversions_1101617.apk')) + self.assertEqual(4, get_minSdkVersion('repo/urzip-; Рахма́, [rɐxˈmanʲɪnəf] سيرجي_رخمانينوف 谢·.apk')) + + self.assertEqual(30, get_targetSdkVersion('minimal_targetsdk_30_unsigned.apk')) + self.assertEqual(1, get_targetSdkVersion('no_targetsdk_minsdk1_unsigned.apk')) + self.assertEqual(30, get_targetSdkVersion('no_targetsdk_minsdk30_unsigned.apk')) + + def test_apk_release_name(self): + appid, vercode, sigfp = fdroidserver.common.apk_parse_release_filename('com.serwylo.lexica_905.apk') + self.assertEqual(appid, 'com.serwylo.lexica') + self.assertEqual(vercode, 905) + self.assertEqual(sigfp, None) + + appid, vercode, sigfp = fdroidserver.common.apk_parse_release_filename('com.serwylo.lexica_905_c82e0f6.apk') + self.assertEqual(appid, 'com.serwylo.lexica') + self.assertEqual(vercode, 905) + self.assertEqual(sigfp, 'c82e0f6') + + appid, vercode, sigfp = fdroidserver.common.apk_parse_release_filename('beverly_hills-90210.apk') + self.assertEqual(appid, None) + self.assertEqual(vercode, None) + self.assertEqual(sigfp, None) + + def test_metadata_find_developer_signature(self): + sig = fdroidserver.common.metadata_find_developer_signature('org.smssecure.smssecure') + self.assertEqual('b30bb971af0d134866e158ec748fcd553df97c150f58b0a963190bbafbeb0868', sig) + + def test_parse_xml(self): + manifest = Path('source-files/fdroid/fdroidclient/AndroidManifest.xml') + parsed = fdroidserver.common.parse_xml(manifest) + self.assertIsNotNone(parsed) + self.assertEqual(str(type(parsed)), "") + + def test_parse_androidmanifests(self): + app = fdroidserver.metadata.App() + app.id = 'org.fdroid.fdroid' + paths = [ + Path('source-files/fdroid/fdroidclient/AndroidManifest.xml'), + Path('source-files/fdroid/fdroidclient/build.gradle'), + ] + for path in paths: + self.assertTrue(os.path.isfile(path)) + self.assertEqual(('0.94-test', 940, 'org.fdroid.fdroid'), + fdroidserver.common.parse_androidmanifests(paths, app)) + + app = fdroidserver.metadata.App() + app.AutoName = 'android-chat' + app.RepoType = 'git' + url = 'https://github.com/wildfirechat/android-chat.git' + app.SourceCode = url.rstrip('.git') + app.Repo = url + paths = [ + Path('source-files/cn.wildfirechat.chat/avenginekit/build.gradle'), + Path('source-files/cn.wildfirechat.chat/build.gradle'), + Path('source-files/cn.wildfirechat.chat/client/build.gradle'), + Path('source-files/cn.wildfirechat.chat/client/src/main/AndroidManifest.xml'), + Path('source-files/cn.wildfirechat.chat/emojilibrary/build.gradle'), + Path('source-files/cn.wildfirechat.chat/gradle/build_libraries.gradle'), + Path('source-files/cn.wildfirechat.chat/imagepicker/build.gradle'), + Path('source-files/cn.wildfirechat.chat/mars-core-release/build.gradle'), + Path('source-files/cn.wildfirechat.chat/push/build.gradle'), + Path('source-files/cn.wildfirechat.chat/settings.gradle'), + Path('source-files/cn.wildfirechat.chat/chat/build.gradle'), + ] + for path in paths: + self.assertTrue(os.path.isfile(path)) + self.assertEqual(('0.6.9', 23, 'cn.wildfirechat.chat'), + fdroidserver.common.parse_androidmanifests(paths, app)) + + app = fdroidserver.metadata.App() + app.Repo = 'https://github.com/Integreight/1Sheeld-Android-App' + paths = [ + Path('source-files/com.integreight.onesheeld/pagerIndicator/src/main/AndroidManifest.xml'), + Path('source-files/com.integreight.onesheeld/pagerIndicator/build.gradle'), + Path('source-files/com.integreight.onesheeld/oneSheeld/src/main/AndroidManifest.xml'), + Path('source-files/com.integreight.onesheeld/oneSheeld/build.gradle'), + Path('source-files/com.integreight.onesheeld/localeapi/src/main/AndroidManifest.xml'), + Path('source-files/com.integreight.onesheeld/localeapi/build.gradle'), + Path('source-files/com.integreight.onesheeld/build.gradle'), + Path('source-files/com.integreight.onesheeld/settings.gradle'), + Path('source-files/com.integreight.onesheeld/quickReturnHeader/src/main/AndroidManifest.xml'), + Path('source-files/com.integreight.onesheeld/quickReturnHeader/build.gradle'), + Path('source-files/com.integreight.onesheeld/pullToRefreshlibrary/src/main/AndroidManifest.xml'), + Path('source-files/com.integreight.onesheeld/pullToRefreshlibrary/build.gradle'), + ] + for path in paths: + self.assertTrue(os.path.isfile(path)) + self.assertEqual(('1.9.0', 170521, 'com.integreight.onesheeld'), + fdroidserver.common.parse_androidmanifests(paths, app)) + + app = fdroidserver.metadata.App() + app.id = 'dev.patrickgold.florisboard' + paths = [ + Path('source-files/dev.patrickgold.florisboard/app/build.gradle.kts'), + ] + for path in paths: + self.assertTrue(os.path.isfile(path)) + self.assertEqual(('0.3.10', 29, 'dev.patrickgold.florisboard'), + fdroidserver.common.parse_androidmanifests(paths, app)) + + app = fdroidserver.metadata.App() + app.id = 'com.ubergeek42.WeechatAndroid' + paths = [ + Path('source-files/com.ubergeek42.WeechatAndroid/app/build.gradle.kts'), + Path('source-files/com.ubergeek42.WeechatAndroid/app/src/main/res/values/strings.xml'), + ] + for path in paths: + self.assertTrue(os.path.isfile(path)) + self.assertEqual(('1.8.1', 10801, None), + fdroidserver.common.parse_androidmanifests(paths, app)) + + def test_parse_androidmanifests_ignore(self): + app = fdroidserver.metadata.App() + app.id = 'org.fdroid.fdroid' + app.UpdateCheckIgnore = '-test' + paths = [ + Path('source-files/fdroid/fdroidclient/AndroidManifest.xml'), + Path('source-files/fdroid/fdroidclient/build.gradle'), + ] + for path in paths: + self.assertTrue(os.path.isfile(path)) + self.assertEqual(('Ignore', None, 'org.fdroid.fdroid'), + fdroidserver.common.parse_androidmanifests(paths, app)) + + def test_parse_androidmanifests_with_flavor(self): + app = fdroidserver.metadata.App() + build = fdroidserver.metadata.Build() + build.gradle = ['devVersion'] + app['Builds'] = [build] + app.id = 'org.fdroid.fdroid.dev' + paths = [ + Path('source-files/fdroid/fdroidclient/AndroidManifest.xml'), + Path('source-files/fdroid/fdroidclient/build.gradle'), + ] + for path in paths: + self.assertTrue(os.path.isfile(path)) + self.assertEqual(('0.95-dev', 949, 'org.fdroid.fdroid.dev'), + fdroidserver.common.parse_androidmanifests(paths, app)) + + app = fdroidserver.metadata.App() + build = fdroidserver.metadata.Build() + build.gradle = ['free'] + app['Builds'] = [build] + app.id = 'eu.siacs.conversations' + paths = [ + Path('source-files/eu.siacs.conversations/build.gradle'), + ] + for path in paths: + self.assertTrue(os.path.isfile(path)) + self.assertEqual(('1.23.1', 245, 'eu.siacs.conversations'), + fdroidserver.common.parse_androidmanifests(paths, app)) + + app = fdroidserver.metadata.App() + build = fdroidserver.metadata.Build() + build.gradle = ['generic'] + app['Builds'] = [build] + app.id = 'com.nextcloud.client' + paths = [ + Path('source-files/com.nextcloud.client/build.gradle'), + ] + for path in paths: + self.assertTrue(os.path.isfile(path)) + self.assertEqual(('2.0.0', 20000099, 'com.nextcloud.client'), + fdroidserver.common.parse_androidmanifests(paths, app)) + + app = fdroidserver.metadata.App() + build = fdroidserver.metadata.Build() + build.gradle = ['versionDev'] + app['Builds'] = [build] + app.id = 'com.nextcloud.android.beta' + paths = [ + Path('source-files/com.nextcloud.client/build.gradle'), + ] + for path in paths: + self.assertTrue(os.path.isfile(path)) + self.assertEqual(('20171223', 20171223, 'com.nextcloud.android.beta'), + fdroidserver.common.parse_androidmanifests(paths, app)) + + app = fdroidserver.metadata.App() + build = fdroidserver.metadata.Build() + build.gradle = ['standard'] + app['Builds'] = [build] + app.id = 'at.bitfire.davdroid' + paths = [ + Path('source-files/at.bitfire.davdroid/build.gradle'), + ] + for path in paths: + self.assertTrue(os.path.isfile(path)) + self.assertEqual(('1.9.8.1-ose', 197, 'at.bitfire.davdroid'), + fdroidserver.common.parse_androidmanifests(paths, app)) + + app = fdroidserver.metadata.App() + build = fdroidserver.metadata.Build() + build.gradle = ['libre'] + app['Builds'] = [build] + app.id = 'com.kunzisoft.fdroidtest.applicationidsuffix.libre' + paths = [ + Path('source-files/com.kunzisoft.testcase/build.gradle'), + ] + for path in paths: + self.assertTrue(os.path.isfile(path)) + self.assertEqual(('1.0-libre', 1, 'com.kunzisoft.fdroidtest.applicationidsuffix.libre'), + fdroidserver.common.parse_androidmanifests(paths, app)) + + app = fdroidserver.metadata.App() + build = fdroidserver.metadata.Build() + build.gradle = ['pro'] + app['Builds'] = [build] + app.id = 'com.kunzisoft.fdroidtest.applicationidsuffix.pro' + paths = [ + Path('source-files/com.kunzisoft.testcase/build.gradle'), + ] + for path in paths: + self.assertTrue(os.path.isfile(path)) + self.assertEqual(('20180430-pro', 20180430, 'com.kunzisoft.fdroidtest.applicationidsuffix.pro'), + fdroidserver.common.parse_androidmanifests(paths, app)) + + app = fdroidserver.metadata.App() + build = fdroidserver.metadata.Build() + build.gradle = ['free'] + app['Builds'] = [build] + app.id = 'com.kunzisoft.fdroidtest.applicationidsuffix' + paths = [ + Path('source-files/com.kunzisoft.testcase/build.gradle'), + ] + for path in paths: + self.assertTrue(os.path.isfile(path)) + self.assertEqual(('1.0-free', 1, 'com.kunzisoft.fdroidtest.applicationidsuffix'), + fdroidserver.common.parse_androidmanifests(paths, app)) + + app = fdroidserver.metadata.App() + build = fdroidserver.metadata.Build() + build.gradle = ['underscore'] + app['Builds'] = [build] + app.id = 'com.kunzisoft.fdroidtest.applicationidsuffix.underscore' + paths = [ + Path('source-files/com.kunzisoft.testcase/build.gradle'), + ] + for path in paths: + self.assertTrue(os.path.isfile(path)) + self.assertEqual(('20180430-underscore', 20180430, 'com.kunzisoft.fdroidtest.applicationidsuffix.underscore'), + fdroidserver.common.parse_androidmanifests(paths, app)) + + app = fdroidserver.metadata.App() + build = fdroidserver.metadata.Build() + build.gradle = ['underscore_first'] + app['Builds'] = [build] + app.id = 'com.kunzisoft.fdroidtest.applicationidsuffix.underscore_first' + paths = [ + Path('source-files/com.kunzisoft.testcase/build.gradle'), + ] + for path in paths: + self.assertTrue(os.path.isfile(path)) + self.assertEqual(('1.0', 1, 'com.kunzisoft.fdroidtest.applicationidsuffix.underscore_first'), + fdroidserver.common.parse_androidmanifests(paths, app)) + + app = fdroidserver.metadata.App() + build = fdroidserver.metadata.Build() + build.gradle = ['fdroid'] + app['Builds'] = [build] + app.id = 'com.github.jameshnsears.quoteunquote' + paths = [ + Path('source-files/com.github.jameshnsears.quoteunquote/build.gradle'), + ] + for path in paths: + self.assertTrue(os.path.isfile(path)) + self.assertEqual(('2.5.2-fdroid', 73, 'com.github.jameshnsears.quoteunquote'), + fdroidserver.common.parse_androidmanifests(paths, app)) + + app = fdroidserver.metadata.App() + build = fdroidserver.metadata.Build() + build.gradle = ['fdroidFlavor'] + app['Builds'] = [build] + app.id = 'com.jens.automation2' + paths = [ + Path('source-files/com.jens.automation2/build.gradle'), + ] + for path in paths: + self.assertTrue(os.path.isfile(path)) + self.assertEqual(('1.6.34-fdroid', 105, 'com.jens.automation2'), + fdroidserver.common.parse_androidmanifests(paths, app)) + + app = fdroidserver.metadata.App() + build = fdroidserver.metadata.Build() + build.gradle = ['VAR', 'prod'] + app['Builds'] = [build] + app.id = 'de.varengold.activeTAN' + paths = [ + Path('source-files/de.varengold.activeTAN/build.gradle'), + ] + for path in paths: + self.assertTrue(os.path.isfile(path)) + self.assertEqual(('2021-06-30', 34, 'de.varengold.activeTAN'), + fdroidserver.common.parse_androidmanifests(paths, app)) + + def test_parse_srclib_spec_good(self): + self.assertEqual(fdroidserver.common.parse_srclib_spec('osmand-external-skia@android/oreo'), + ('osmand-external-skia', 'android/oreo', None, None)) + self.assertEqual(fdroidserver.common.parse_srclib_spec('1:appcompat@v7'), + ('appcompat', 'v7', '1', None)) + self.assertEqual(fdroidserver.common.parse_srclib_spec('1:Support/v7/appcompat@android-4.4_r1.1'), + ('Support', 'android-4.4_r1.1', '1', 'v7/appcompat')) + + def test_parse_srclib_spec_many_ats(self): + self.assertEqual( + fdroidserver.common.parse_srclib_spec('foo@@v2'), ('foo', '@v2', None, None) + ) + self.assertEqual( + fdroidserver.common.parse_srclib_spec('bar@2@f'), ('bar', '2@f', None, None) + ) + + def test_parse_srclib_spec_none(self): + with self.assertRaises(MetaDataException): + fdroidserver.common.parse_srclib_spec(None) + + def test_parse_srclib_spec_no_ref(self): + with self.assertRaises(MetaDataException): + fdroidserver.common.parse_srclib_spec('no-ref') + with self.assertRaises(MetaDataException): + fdroidserver.common.parse_srclib_spec('noref@') + + def test_parse_srclib_spec_no_name(self): + with self.assertRaises(MetaDataException): + fdroidserver.common.parse_srclib_spec('@ref') + + def test_remove_signing_keys(self): + shutil.copytree( + os.path.join(basedir, 'source-files'), + os.path.join(self.testdir, 'source-files'), + ) + os.chdir(self.testdir) + with_signingConfigs = [ + 'source-files/com.seafile.seadroid2/app/build.gradle', + 'source-files/eu.siacs.conversations/build.gradle', + 'source-files/info.guardianproject.ripple/build.gradle', + 'source-files/open-keychain/open-keychain/build.gradle', + 'source-files/open-keychain/open-keychain/OpenKeychain/build.gradle', + 'source-files/org.tasks/app/build.gradle.kts', + 'source-files/osmandapp/osmand/build.gradle', + 'source-files/ut.ewh.audiometrytest/app/build.gradle', + ] + for f in with_signingConfigs: + build_dir = os.path.join(*f.split(os.sep)[:2]) + if not os.path.isdir(build_dir): + continue + fdroidserver.common.remove_signing_keys(build_dir) + fromfile = os.path.join(basedir, f) + with open(f) as fp: + content = fp.read() + if 'signingConfig' in content: + with open(f) as fp: + b = fp.readlines() + with open(fromfile) as fp: + a = fp.readlines() + diff = difflib.unified_diff(a, b, fromfile, f) + sys.stdout.writelines(diff) + self.assertFalse(True) + do_not_modify = [ + 'source-files/Zillode/syncthing-silk/build.gradle', + 'source-files/at.bitfire.davdroid/build.gradle', + 'source-files/com.kunzisoft.testcase/build.gradle', + 'source-files/com.nextcloud.client/build.gradle', + 'source-files/fdroid/fdroidclient/build.gradle', + 'source-files/firebase-suspect/app/build.gradle', + 'source-files/firebase-suspect/build.gradle', + 'source-files/firebase-allowlisted/app/build.gradle', + 'source-files/firebase-allowlisted/build.gradle', + 'source-files/org.mozilla.rocket/app/build.gradle', + 'source-files/realm/react-native/android/build.gradle', + 'triple-t-2/build/org.piwigo.android/app/build.gradle', + ] + for f in do_not_modify: + build_dir = os.path.join(*f.split(os.sep)[:2]) + if not os.path.isdir(build_dir): + continue + fdroidserver.common.remove_signing_keys(build_dir) + fromfile = os.path.join(basedir, f) + with open(fromfile) as fp: + a = fp.readlines() + with open(f) as fp: + b = fp.readlines() + diff = list(difflib.unified_diff(a, b, fromfile, f)) + self.assertEqual(0, len(diff), 'This file should not have been modified:\n' + ''.join(diff)) + + def test_calculate_math_string(self): + self.assertEqual(1234, + fdroidserver.common.calculate_math_string('1234')) + self.assertEqual((1 + 1) * 2, + fdroidserver.common.calculate_math_string('(1 + 1) * 2')) + self.assertEqual((1 - 1) * 2 + 3 * 1 - 1, + fdroidserver.common.calculate_math_string('(1 - 1) * 2 + 3 * 1 - 1')) + self.assertEqual(0 - 12345, + fdroidserver.common.calculate_math_string('0 - 12345')) + self.assertEqual(0xffff, + fdroidserver.common.calculate_math_string('0xffff')) + self.assertEqual(0xcafe * 123, + fdroidserver.common.calculate_math_string('0xcafe * 123')) + self.assertEqual(-1, + fdroidserver.common.calculate_math_string('-1')) + with self.assertRaises(SyntaxError): + fdroidserver.common.calculate_math_string('__import__("urllib")') + with self.assertRaises(SyntaxError): + fdroidserver.common.calculate_math_string('self') + with self.assertRaises(SyntaxError): + fdroidserver.common.calculate_math_string('Ox9()') + with self.assertRaises(SyntaxError): + fdroidserver.common.calculate_math_string('1+1; print(1)') + with self.assertRaises(SyntaxError): + fdroidserver.common.calculate_math_string('1-1 # no comment') + + def test_calculate_IPFS_cid_with_no_tool(self): + fdroidserver.common.config = {'ipfs_cid': None} + self.assertIsNone(fdroidserver.common.calculate_IPFS_cid('urzip.apk')) + self.assertIsNone(fdroidserver.common.calculate_IPFS_cid('FileDoesNotExist')) + + @unittest.skipUnless(shutil.which('ipfs_cid'), 'calculate_IPFS_cid needs ipfs_cid') + def test_calculate_IPFS_cid(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + self.assertIsNone(fdroidserver.common.calculate_IPFS_cid('FileDoesNotExist')) + self.assertEqual( + fdroidserver.common.calculate_IPFS_cid('urzip.apk'), + "bafybeigmtgrwyvj77jaflje2rf533haeqtpu2wtwsctryjusjnsawacsam", + ) + + def test_deploy_build_log_with_rsync_with_id_file(self): + + mocklogcontent = bytes( + textwrap.dedent( + """\ + build started + building... + build completed + profit!""" + ), + 'utf-8', + ) + + fdroidserver.common.options = mock.Mock() + fdroidserver.common.options.verbose = False + fdroidserver.common.options.quiet = False + fdroidserver.common.config = {} + fdroidserver.common.config['serverwebroot'] = [ + {'url': 'example.com:/var/www/fdroid/'}, + {'url': 'example.com:/var/www/fbot/'}, + ] + fdroidserver.common.config['deploy_process_logs'] = True + fdroidserver.common.config['identity_file'] = 'ssh/id_rsa' + + assert_subprocess_call_iteration = 0 + + def assert_subprocess_call(cmd): + nonlocal assert_subprocess_call_iteration + logging.debug(cmd) + if assert_subprocess_call_iteration == 0: + self.assertListEqual(['rsync', + '--archive', + '--delete-after', + '--safe-links', + '-e', + 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ssh/id_rsa', + cmd[6], + 'example.com:/var/www/fdroid/repo/'], + cmd) + self.assertTrue(cmd[6].endswith('/com.example.app_4711.log.gz')) + with gzip.open(cmd[6], 'r') as f: + self.assertTrue(f.read(), mocklogcontent) + elif assert_subprocess_call_iteration == 1: + self.assertListEqual(['rsync', + '--archive', + '--delete-after', + '--safe-links', + '-e', + 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ssh/id_rsa', + cmd[6], + 'example.com:/var/www/fbot/repo/'], + cmd) + self.assertTrue(cmd[6].endswith('/com.example.app_4711.log.gz')) + with gzip.open(cmd[6], 'r') as f: + self.assertTrue(f.read(), mocklogcontent) + else: + self.fail('unexpected subprocess.call invocation ({})' + .format(assert_subprocess_call_iteration)) + assert_subprocess_call_iteration += 1 + return 0 + + with mock.patch('subprocess.call', + side_effect=assert_subprocess_call): + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + fdroidserver.common.deploy_build_log_with_rsync( + 'com.example.app', 4711, mocklogcontent) + + expected_log_path = os.path.join(tmpdir, 'repo', 'com.example.app_4711.log.gz') + self.assertTrue(os.path.isfile(expected_log_path)) + with gzip.open(expected_log_path, 'r') as f: + self.assertEqual(f.read(), mocklogcontent) + + def test_deploy_status_json(self): + os.chdir(self.testdir) + fakesubcommand = 'fakesubcommand' + fake_timestamp = 1234567890 + fakeserver = 'example.com:/var/www/fbot/' + expected_dir = os.path.join(self.testdir, fakeserver.replace(':', ''), 'repo', 'status') + + fdroidserver.common.options = mock.Mock() + fdroidserver.common.config = {} + fdroidserver.common.config['serverwebroot'] = [{'url': fakeserver}] + fdroidserver.common.config['identity_file'] = 'ssh/id_rsa' + + def assert_subprocess_call(cmd): + dest_path = os.path.join(self.testdir, cmd[-1].replace(':', '')) + if not os.path.exists(dest_path): + os.makedirs(dest_path) + return subprocess.run(cmd[:-1] + [dest_path]).returncode + + with mock.patch('subprocess.call', side_effect=assert_subprocess_call): + with mock.patch.object(sys, 'argv', ['fdroid ' + fakesubcommand]): + output = fdroidserver.common.setup_status_output(time.localtime(fake_timestamp)) + self.assertFalse(os.path.exists(os.path.join(expected_dir, 'running.json'))) + with mock.patch.object(sys, 'argv', ['fdroid ' + fakesubcommand]): + fdroidserver.common.write_status_json(output) + self.assertFalse(os.path.exists(os.path.join(expected_dir, fakesubcommand + '.json'))) + + fdroidserver.common.config['deploy_process_logs'] = True + + output = fdroidserver.common.setup_status_output(time.localtime(fake_timestamp)) + expected_path = os.path.join(expected_dir, 'running.json') + self.assertTrue(os.path.isfile(expected_path)) + with open(expected_path) as fp: + data = json.load(fp) + self.assertEqual(fake_timestamp * 1000, data['startTimestamp']) + self.assertFalse('endTimestamp' in data) + + testvalue = 'asdfasd' + output['testvalue'] = testvalue + + fdroidserver.common.write_status_json(output) + expected_path = os.path.join(expected_dir, fakesubcommand + '.json') + self.assertTrue(os.path.isfile(expected_path)) + with open(expected_path) as fp: + data = json.load(fp) + self.assertEqual(fake_timestamp * 1000, data['startTimestamp']) + self.assertTrue('endTimestamp' in data) + self.assertEqual(testvalue, output.get('testvalue')) + + def test_string_is_integer(self): + self.assertTrue(fdroidserver.common.string_is_integer('0x10')) + self.assertTrue(fdroidserver.common.string_is_integer('010')) + self.assertTrue(fdroidserver.common.string_is_integer('123')) + self.assertFalse(fdroidserver.common.string_is_integer('0xgg')) + self.assertFalse(fdroidserver.common.string_is_integer('01g')) + self.assertFalse(fdroidserver.common.string_is_integer('o123')) + + def test_version_code_string_to_int(self): + self.assertEqual(16, fdroidserver.common.version_code_string_to_int('0x10')) + self.assertEqual(198712389, fdroidserver.common.version_code_string_to_int('198712389')) + self.assertEqual(8, fdroidserver.common.version_code_string_to_int('0o10')) + self.assertEqual(10, fdroidserver.common.version_code_string_to_int('010')) + self.assertEqual(123, fdroidserver.common.version_code_string_to_int('0000123')) + self.assertEqual(-42, fdroidserver.common.version_code_string_to_int('-42')) + + def test_getsrclibvcs(self): + fdroidserver.metadata.srclibs = {'somelib': {'RepoType': 'git'}, + 'yeslib': {'RepoType': 'hg'}, + 'nolib': {'RepoType': 'git-svn'}} + self.assertEqual(fdroidserver.common.getsrclibvcs('somelib'), 'git') + self.assertEqual(fdroidserver.common.getsrclibvcs('yeslib'), 'hg') + self.assertEqual(fdroidserver.common.getsrclibvcs('nolib'), 'git-svn') + with self.assertRaises(VCSException): + fdroidserver.common.getsrclibvcs('nonexistentlib') + + def test_getsrclib_not_found(self): + fdroidserver.common.config = {'sdk_path': '', + 'java_paths': {}} + fdroidserver.metadata.srclibs = {} + + with self.assertRaisesRegex(VCSException, 'srclib SDL not found.'): + fdroidserver.common.getsrclib('SDL@release-2.0.3', 'srclib') + + def test_getsrclib_gotorevision_raw(self): + fdroidserver.common.config = {'sdk_path': '', + 'java_paths': {}} + fdroidserver.metadata.srclibs = {'SDL': {'RepoType': 'git', + 'Repo': ''}} + + vcs = mock.Mock() + + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + os.makedirs(os.path.join('srclib', 'SDL')) + with mock.patch('fdroidserver.common.getvcs', return_value=vcs): + ret = fdroidserver.common.getsrclib('SDL', 'srclib', raw=True) + self.assertEqual(vcs.srclib, ('SDL', None, 'srclib/SDL')) + self.assertEqual(ret, vcs) + + def test_getsrclib_gotorevision_ref(self): + fdroidserver.common.config = {'sdk_path': '', + 'java_paths': {}} + fdroidserver.metadata.srclibs = {'ACRA': {'RepoType': 'git', + 'Repo': 'https://github.com/ACRA/acra.git', + 'Subdir': None, + 'Prepare': None}} + + vcs = mock.Mock() + skm = mock.Mock() + dfm = mock.Mock() + + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + os.makedirs(os.path.join('srclib', 'ACRA')) + with mock.patch('fdroidserver.common.getvcs', return_value=vcs): + with mock.patch('fdroidserver.common.remove_signing_keys', skm): + with mock.patch('fdroidserver.common.remove_debuggable_flags', dfm): + ret = fdroidserver.common.getsrclib('ACRA@acra-4.6.2', 'srclib') + self.assertEqual(vcs.srclib, ('ACRA', None, 'srclib/ACRA')) + vcs.gotorevision.assert_called_once_with('acra-4.6.2', True) + skm.assert_called_once_with('srclib/ACRA') + dfm.assert_called_once_with('srclib/ACRA') + self.assertEqual(ret, ('ACRA', None, 'srclib/ACRA')) + + def test_run_yamllint_wellformed(self): + try: + import yamllint.config + + yamllint.config # make pyflakes ignore this + except ImportError: + self.skipTest('yamllint not installed') + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + with open('wellformed.yml', 'w') as f: + f.write( + textwrap.dedent( + '''\ + yaml: + file: + - for + - test + purposeses: true + ''' + ) + ) + result = fdroidserver.common.run_yamllint('wellformed.yml') + self.assertEqual(result, '') + + def test_run_yamllint_malformed(self): + try: + import yamllint.config + + yamllint.config # make pyflakes ignore this + except ImportError: + self.skipTest('yamllint not installed') + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + with open('malformed.yml', 'w') as f: + f.write( + textwrap.dedent( + '''\ + yaml: + - that + fails + - test + ''' + ) + ) + result = fdroidserver.common.run_yamllint('malformed.yml') + self.assertIsNotNone(result) + self.assertNotEqual(result, '') + + def test_with_no_config(self): + """It should set defaults if no config file is found""" + os.chdir(self.testdir) + self.assertFalse(os.path.exists(fdroidserver.common.CONFIG_FILE)) + config = fdroidserver.common.read_config() + self.assertIsNotNone(config.get('char_limits')) + + def test_with_zero_size_config(self): + """It should set defaults if config file has nothing in it""" + os.chdir(self.testdir) + fdroidserver.common.write_config_file('') + self.assertTrue(os.path.exists(fdroidserver.common.CONFIG_FILE)) + config = fdroidserver.common.read_config() + self.assertIsNotNone(config.get('char_limits')) + + def test_with_config_yml(self): + """Make sure it is possible to use config.yml alone.""" + os.chdir(self.testdir) + fdroidserver.common.write_config_file('apksigner: yml') + self.assertTrue(os.path.exists(fdroidserver.common.CONFIG_FILE)) + config = fdroidserver.common.read_config() + self.assertEqual('yml', config.get('apksigner')) + + def test_with_config_yml_utf8(self): + """Make sure it is possible to use config.yml in UTF-8 encoding.""" + os.chdir(self.testdir) + teststr = '/πÇÇ现代通用字-български-عربي1/ö/yml' + fdroidserver.common.write_config_file('apksigner: ' + teststr) + self.assertTrue(os.path.exists(fdroidserver.common.CONFIG_FILE)) + config = fdroidserver.common.read_config() + self.assertEqual(teststr, config.get('apksigner')) + + def test_with_config_yml_utf8_as_ascii(self): + """Make sure it is possible to use config.yml Unicode encoded as ASCII.""" + os.chdir(self.testdir) + teststr = '/πÇÇ现代通用字-български-عربي1/ö/yml' + with open(fdroidserver.common.CONFIG_FILE, 'w', encoding='utf-8') as fp: + config_dump({'apksigner': teststr}, fp) + self.assertTrue(os.path.exists(fdroidserver.common.CONFIG_FILE)) + config = fdroidserver.common.read_config() + self.assertEqual(teststr, config.get('apksigner')) + + def test_with_config_yml_with_env_var(self): + """Make sure it is possible to use config.yml alone.""" + os.chdir(self.testdir) + with mock.patch.dict(os.environ): + os.environ['SECRET'] = 'mysecretpassword' # nosec B105 + fdroidserver.common.write_config_file("""keypass: {'env': 'SECRET'}\n""") + self.assertTrue(os.path.exists(fdroidserver.common.CONFIG_FILE)) + config = fdroidserver.common.read_config() + self.assertEqual(os.getenv('SECRET', 'fail'), config.get('keypass')) + + def test_with_config_yml_is_dict(self): + os.chdir(self.testdir) + Path(fdroidserver.common.CONFIG_FILE).write_text('apksigner = /bin/apksigner') + with self.assertRaises(TypeError): + fdroidserver.common.read_config() + + def test_with_config_yml_is_not_mixed_type(self): + os.chdir(self.testdir) + Path(fdroidserver.common.CONFIG_FILE).write_text('k: v\napksigner = /bin/apk') + with self.assertRaises(ruamel.yaml.scanner.ScannerError): + fdroidserver.common.read_config() + + def test_config_repo_url(self): + """repo_url ends in /repo, archive_url ends in /archive.""" + os.chdir(self.testdir) + fdroidserver.common.write_config_file( + textwrap.dedent( + """\ + repo_url: https://MyFirstFDroidRepo.org/fdroid/repo + archive_url: https://MyFirstFDroidRepo.org/fdroid/archive + """ + ) + ) + config = fdroidserver.common.read_config() + self.assertEqual( + 'https://MyFirstFDroidRepo.org/fdroid/repo', config.get('repo_url') + ) + self.assertEqual( + 'https://MyFirstFDroidRepo.org/fdroid/archive', config.get('archive_url') + ) + + def test_config_repo_url_extra_slash(self): + """repo_url ends in /repo, archive_url ends in /archive.""" + os.chdir(self.testdir) + fdroidserver.common.write_config_file('repo_url: https://MyFirstFDroidRepo.org/fdroid/repo/') + with self.assertRaises(FDroidException): + fdroidserver.common.read_config() + + def test_config_repo_url_not_repo(self): + """repo_url ends in /repo, archive_url ends in /archive.""" + os.chdir(self.testdir) + fdroidserver.common.write_config_file('repo_url: https://MyFirstFDroidRepo.org/fdroid/foo') + with self.assertRaises(FDroidException): + fdroidserver.common.read_config() + + def test_config_archive_url_extra_slash(self): + """repo_url ends in /repo, archive_url ends in /archive.""" + os.chdir(self.testdir) + fdroidserver.common.write_config_file('archive_url: https://MyFirstFDroidRepo.org/fdroid/archive/') + with self.assertRaises(FDroidException): + fdroidserver.common.read_config() + + def test_config_archive_url_not_repo(self): + """repo_url ends in /repo, archive_url ends in /archive.""" + os.chdir(self.testdir) + fdroidserver.common.write_config_file('archive_url: https://MyFirstFDroidRepo.org/fdroid/foo') + with self.assertRaises(FDroidException): + fdroidserver.common.read_config() + + def test_write_to_config_yml(self): + os.chdir(self.testdir) + fdroidserver.common.write_config_file('apksigner: yml') + os.chmod(fdroidserver.common.CONFIG_FILE, 0o0600) + self.assertTrue(os.path.exists(fdroidserver.common.CONFIG_FILE)) + config = fdroidserver.common.read_config() + self.assertFalse('keypass' in config) + self.assertEqual('yml', config.get('apksigner')) + fdroidserver.common.write_to_config(config, 'keypass', 'mysecretpassword') + fdroidserver.common.config = None + config = fdroidserver.common.read_config() + self.assertEqual('mysecretpassword', config['keypass']) + + def test_config_dict_with_int_keys(self): + os.chdir(self.testdir) + fdroidserver.common.write_config_file( + textwrap.dedent( + """ + java_paths: + 8: /usr/lib/jvm/java-8-openjdk + """ + ) + ) + self.assertTrue(os.path.exists(fdroidserver.common.CONFIG_FILE)) + config = fdroidserver.common.read_config() + self.assertEqual('/usr/lib/jvm/java-8-openjdk', config['java_paths']['8']) + + @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) + def test_config_lazy_load_env_vars(self): + """Test the environment variables in config.yml is lazy loaded. + + It shouldn't throw errors when read the config if the environment variables are + not set. It should throw errors when the variables are get from the config. + """ + os.chdir(self.testdir) + fdroidserver.common.write_config_file( + textwrap.dedent( + """ + serverwebroot: {env: serverwebroot} + servergitmirrors: + - url: {env: mirror1} + - url: {env: mirror2} + keypass: {env: keypass} + keystorepass: {env: keystorepass} + """ + ) + ) + with self.assertNoLogs(level=logging.ERROR): + config = fdroidserver.common.read_config() + + # KeyError should be raised if a key is not in the config.yml + with self.assertRaises(KeyError): + config['gpghome'] + + self.assertEqual(config.get('gpghome', 'gpg'), 'gpg') + os.environ.update({key: f"{key}supersecret" for key in ["serverwebroot", "mirror1", "mirror2", "keystorepass"]}) + self.assertEqual(config['keystorepass'], 'keystorepasssupersecret') + self.assertEqual(config['serverwebroot'], [{'url': 'serverwebrootsupersecret/'}]) + self.assertEqual(config['servergitmirrors'], [{'url': 'mirror1supersecret'}, {'url': 'mirror2supersecret'}]) + + @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) + def test_config_lazy_load_env_vars_not_set(self): + os.chdir(self.testdir) + fdroidserver.common.write_config_file('keypass: {env: keypass}') + fdroidserver.common.read_config() + with self.assertLogs(level=logging.ERROR) as lw: + fdroidserver.common.config['keypass'] + self.assertTrue('is not set' in lw.output[0]) + self.assertEqual(1, len(lw.output)) + + @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) + def test_test_sdk_exists_fails_on_bad_sdk_path(self): + config = {'sdk_path': 'nothinghere'} + self.assertFalse(fdroidserver.common.test_sdk_exists(config)) + + @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) + def test_test_sdk_exists_fails_on_empty(self): + self.assertFalse(fdroidserver.common.test_sdk_exists(dict())) + + @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) + def test_test_sdk_exists_fails_on_non_existent(self): + config = {'sdk_path': os.path.join(self.testdir, 'non_existent')} + self.assertFalse(fdroidserver.common.test_sdk_exists(config)) + + @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) + def test_test_sdk_exists_fails_on_file(self): + f = os.path.join(self.testdir, 'testfile') + open(f, 'w').close() + config = {'sdk_path': f} + self.assertFalse(fdroidserver.common.test_sdk_exists(config)) + + @mock.patch.dict(os.environ, {'PATH': '/nonexistent'}, clear=True) + def test_test_sdk_exists_valid_apksigner_in_config(self): + apksigner = os.path.join( + self.testdir, + 'build-tools', + fdroidserver.common.MINIMUM_APKSIGNER_BUILD_TOOLS_VERSION, + 'apksigner', + ) + os.makedirs(os.path.dirname(apksigner)) + with open(apksigner, 'w') as fp: + fp.write('#!/bin/sh\ndate\n') + os.chmod(apksigner, 0o0755) # nosec B103 + config = {'apksigner': apksigner} + self.assertTrue(fdroidserver.common.test_sdk_exists(config)) + + @mock.patch.dict(os.environ, {'PATH': '/nonexistent'}, clear=True) + def test_test_sdk_exists_old_apksigner_in_config(self): + apksigner = os.path.join(self.testdir, 'build-tools', '28.0.0', 'apksigner') + os.makedirs(os.path.dirname(apksigner)) + with open(apksigner, 'w') as fp: + fp.write('#!/bin/sh\ndate\n') + os.chmod(apksigner, 0o0755) # nosec B103 + config = {'apksigner': apksigner} + self.assertFalse(fdroidserver.common.test_sdk_exists(config)) + + @mock.patch.dict(os.environ, {'PATH': '/nonexistent'}, clear=True) + def test_test_sdk_exists_with_valid_apksigner(self): + apksigner = ( + Path(self.testdir) + / 'build-tools' + / fdroidserver.common.MINIMUM_APKSIGNER_BUILD_TOOLS_VERSION + / 'apksigner' + ) + apksigner.parent.mkdir(parents=True) + apksigner.write_text('#!/bin/sh\ndate\n') + apksigner.chmod(0o0755) + config = {'sdk_path': self.testdir} + self.assertTrue(fdroidserver.common.test_sdk_exists(config)) + + @mock.patch.dict(os.environ, {'PATH': '/nonexistent'}, clear=True) + def test_test_sdk_exists_with_old_apksigner(self): + apksigner = Path(self.testdir) / 'build-tools' / '17.0.0' / 'apksigner' + apksigner.parent.mkdir(parents=True) + apksigner.write_text('#!/bin/sh\ndate\n') + apksigner.chmod(0o0755) + config = {'sdk_path': self.testdir} + self.assertFalse(fdroidserver.common.test_sdk_exists(config)) + + def test_loading_config_buildserver_yml(self): + """Smoke check to make sure this file is properly parsed""" + os.chdir(self.testdir) + shutil.copy( + os.path.join(basedir, '..', 'buildserver', 'config.buildserver.yml'), + fdroidserver.common.CONFIG_FILE, + ) + fdroidserver.common.read_config() + + @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) + def test_config_with_env_string(self): + """Test whether env works in keys with string values.""" + os.chdir(self.testdir) + testvalue = 'this is just a test' + Path('config.yml').write_text('keypass: {env: foo}') + os.environ['foo'] = testvalue + self.assertEqual(testvalue, fdroidserver.common.get_config()['keypass']) + + @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) + def test_config_with_env_path(self): + """Test whether env works in keys with path values.""" + os.chdir(self.testdir) + path = 'user@server:/path/to/bar/' + os.environ['foo'] = path + Path('config.yml').write_text('serverwebroot: {env: foo}') + self.assertEqual( + [{'url': path}], + fdroidserver.common.get_config()['serverwebroot'], + ) + + def test_setup_status_output(self): + os.chdir(self.testdir) + start_timestamp = time.gmtime() + subcommand = 'test' + + fakecmd = ['fdroid ' + subcommand, '--option'] + sys.argv = fakecmd + fdroidserver.common.config = dict() + fdroidserver.common.setup_status_output(start_timestamp) + with open(os.path.join('repo', 'status', 'running.json')) as fp: + data = json.load(fp) + self.assertFalse(os.path.exists('.git')) + self.assertFalse('fdroiddata' in data) + self.assertEqual(fakecmd, data['commandLine']) + self.assertEqual(subcommand, data['subcommand']) + + def test_setup_status_output_in_git_repo(self): + os.chdir(self.testdir) + logging.getLogger('git.cmd').setLevel(logging.INFO) + git_repo = git.Repo.init(self.testdir) + file_in_git = 'README.md' + with open(file_in_git, 'w') as fp: + fp.write('this is just a test') + git_repo.git.add(all=True) + git_repo.index.commit("update README") + + start_timestamp = time.gmtime() + fakecmd = ['fdroid test2', '--option'] + sys.argv = fakecmd + fdroidserver.common.config = dict() + fdroidserver.common.setup_status_output(start_timestamp) + with open(os.path.join('repo', 'status', 'running.json')) as fp: + data = json.load(fp) + self.assertTrue(os.path.exists('.git')) + self.assertIsNotNone(re.match(r'[0-9a-f]{40}', data['fdroiddata']['commitId']), + 'Must be a valid git SHA1 commit ID!') + self.assertFalse(data['fdroiddata']['isDirty']) + self.assertEqual(fakecmd, data['commandLine']) + + self.assertEqual([], + data['fdroiddata']['untrackedFiles']) + dirtyfile = 'dirtyfile' + with open(dirtyfile, 'w', encoding='utf-8') as fp: + fp.write('this is just a test') + with open(file_in_git, 'a', encoding='utf-8') as fp: + fp.write('\nappend some stuff') + self.assertEqual([], + data['fdroiddata']['modifiedFiles']) + fdroidserver.common.setup_status_output(start_timestamp) + with open(os.path.join('repo', 'status', 'running.json')) as fp: + data = json.load(fp) + self.assertTrue(data['fdroiddata']['isDirty']) + self.assertEqual([file_in_git], + data['fdroiddata']['modifiedFiles']) + self.assertEqual([dirtyfile, 'repo/status/running.json'], + data['fdroiddata']['untrackedFiles']) + + def test_get_app_display_name(self): + testvalue = 'WIN!' + for app in [ + {'Name': testvalue}, + {'AutoName': testvalue}, + {'id': testvalue}, + {'id': 'a', 'localized': {'de-AT': {'name': testvalue}}}, + {'id': 'a', 'localized': { + 'de-AT': {'name': 'nope'}, + 'en-US': {'name': testvalue}, + }}, + {'AutoName': 'ignore me', 'Name': testvalue, 'id': 'nope'}, + {'AutoName': testvalue, 'id': 'nope'}]: + self.assertEqual(testvalue, fdroidserver.common.get_app_display_name(app)) + + @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) + def test_get_android_tools_versions(self): + sdk_path = os.path.join(basedir, 'get_android_tools_versions/android-sdk') + config = { + 'ndk_paths': {'r10e': os.path.join(sdk_path, '..', 'android-ndk-r10e')}, + 'sdk_path': sdk_path, + } + fdroidserver.common.config = config + fdroidserver.common.fill_config_defaults(config) + components = fdroidserver.common.get_android_tools_versions() + expected = ( + ('../android-ndk-r10e', 'r10e'), + ('ndk-bundle', '21.4.7075529'), + ('ndk/11.2.2725575', '11.2.2725575'), + ('ndk/17.2.4988734', '17.2.4988734'), + ('ndk/21.3.6528147', '21.3.6528147'), + ('patcher/v4', '1'), + ('platforms/android-30', '3'), + ('skiaparser/1', '6'), + ('tools', '26.1.1'), + ) + self.assertSequenceEqual(expected, sorted(components)) + + @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) + def test_get_android_tools_versions_no_ndk(self): + with tempfile.TemporaryDirectory() as tmpdir: + sdk_path = Path(tmpdir) / 'get_android_tools_versions' + shutil.copytree( + os.path.join(basedir, 'get_android_tools_versions'), sdk_path + ) + shutil.rmtree(sdk_path / 'android-ndk-r10e') + shutil.rmtree(sdk_path / 'android-sdk/ndk') + shutil.rmtree(sdk_path / 'android-sdk/ndk-bundle') + fdroidserver.common.config = {'sdk_path': str(sdk_path)} + components = fdroidserver.common.get_android_tools_versions() + expected = ( + ('android-sdk/patcher/v4', '1'), + ('android-sdk/platforms/android-30', '3'), + ('android-sdk/skiaparser/1', '6'), + ('android-sdk/tools', '26.1.1'), + ) + self.assertSequenceEqual(expected, sorted(components)) + + def test_read_pkg_args(self): + allow_vercodes = False + self.assertEqual( + {'org.fdroid.fdroid': []}, + fdroidserver.common.read_pkg_args(['org.fdroid.fdroid'], allow_vercodes), + ) + self.assertNotEqual( + {'com.example': [123456]}, + fdroidserver.common.read_pkg_args(['com.example:123456'], allow_vercodes), + ) + + allow_vercodes = True + self.assertEqual( + {'org.fdroid.fdroid': []}, + fdroidserver.common.read_pkg_args(['org.fdroid.fdroid'], allow_vercodes), + ) + self.assertEqual( + {'com.example': [123456]}, + fdroidserver.common.read_pkg_args(['com.example:123456'], allow_vercodes), + ) + self.assertEqual( + {'org.debian_kit': [6]}, + fdroidserver.common.read_pkg_args(['org.debian_kit_6.apk'], allow_vercodes), + ) + appid_versionCode_pairs = ( + 'org.fdroid.fdroid:1', + 'com.example:12345', + 'com.example:67890', + ) + self.assertEqual( + {'com.example': [12345, 67890], 'org.fdroid.fdroid': [1]}, + fdroidserver.common.read_pkg_args(appid_versionCode_pairs, allow_vercodes), + ) + appid_versionCode_pairs = ( + 'com.example:67890', + 'org.c_base.c_beam_29.apk', + ) + self.assertEqual( + {'com.example': [67890], 'org.c_base.c_beam': [29]}, + fdroidserver.common.read_pkg_args(appid_versionCode_pairs, allow_vercodes), + ) + + def test_read_pkg_args_errors(self): + allow_vercodes = True + with self.assertRaises(FDroidException): + fdroidserver.common.read_pkg_args(['org.fdroid.fdroid:'], allow_vercodes), + with self.assertRaises(FDroidException): + fdroidserver.common.read_pkg_args(['org.fdroid.fdroid:foo'], allow_vercodes), + + def test_apk_strip_v1_signatures(self): + before = os.path.join(basedir, 'no_targetsdk_minsdk1_unsigned.apk') + after = os.path.join(self.testdir, 'after.apk') + shutil.copy(before, after) + fdroidserver.common.apk_strip_v1_signatures(after, strip_manifest=False) + + def test_metadata_find_developer_signing_files(self): + appid = 'org.smssecure.smssecure' + + self.assertIsNone( + fdroidserver.common.metadata_find_developer_signing_files(appid, 133) + ) + + vc = '135' + self.assertEqual( + ( + os.path.join('metadata', appid, 'signatures', vc, '28969C09.RSA'), + os.path.join('metadata', appid, 'signatures', vc, '28969C09.SF'), + os.path.join('metadata', appid, 'signatures', vc, 'MANIFEST.MF'), + None, + ), + fdroidserver.common.metadata_find_developer_signing_files(appid, vc), + ) + + vc = '134' + self.assertEqual( + ( + os.path.join('metadata', appid, 'signatures', vc, '28969C09.RSA'), + os.path.join('metadata', appid, 'signatures', vc, '28969C09.SF'), + os.path.join('metadata', appid, 'signatures', vc, 'MANIFEST.MF'), + None, + ), + fdroidserver.common.metadata_find_developer_signing_files(appid, vc), + ) + + @mock.patch('sdkmanager.build_package_list', lambda use_net: None) + def test_auto_install_ndk(self): + """Test all possible field data types for build.ndk""" + fdroidserver.common.config = {'sdk_path': self.testdir} + sdk_path = self.testdir + build = fdroidserver.metadata.Build() + + none_entry = mock.Mock() + with mock.patch('sdkmanager.install', none_entry): + fdroidserver.common.auto_install_ndk(build) + none_entry.assert_not_called() + + empty_list = mock.Mock() + build.ndk = [] + with mock.patch('sdkmanager.install', empty_list): + fdroidserver.common.auto_install_ndk(build) + empty_list.assert_not_called() + + release_entry = mock.Mock() + build.ndk = 'r21e' + with mock.patch('sdkmanager.install', release_entry): + fdroidserver.common.auto_install_ndk(build) + release_entry.assert_called_once_with('ndk;r21e', sdk_path) + + revision_entry = mock.Mock() + build.ndk = '21.4.7075529' + with mock.patch('sdkmanager.install', revision_entry): + fdroidserver.common.auto_install_ndk(build) + revision_entry.assert_called_once_with('ndk;21.4.7075529', sdk_path) + + list_entry = mock.Mock() + calls = [] + build.ndk = ['r10e', '11.0.2655954', 'r12b', 'r21e'] + for n in build.ndk: + calls.append(mock.call(f'ndk;{n}', sdk_path)) + with mock.patch('sdkmanager.install', list_entry): + fdroidserver.common.auto_install_ndk(build) + list_entry.assert_has_calls(calls) + + @unittest.skipIf(importlib.util.find_spec('sdkmanager') is None, 'needs sdkmanager') + @mock.patch('sdkmanager.build_package_list', lambda use_net: None) + @mock.patch('sdkmanager._install_zipball_from_cache', lambda a, b: None) + @mock.patch('sdkmanager._generate_package_xml', lambda a, b, c: None) + def test_auto_install_ndk_mock_dl(self): + """Test NDK installs by actually calling sdkmanager""" + import importlib.metadata + + import sdkmanager + + sdkmanager_version = LooseVersion(importlib.metadata.version('sdkmanager')) + if sdkmanager_version < LooseVersion('0.6.4'): + raise unittest.SkipTest('needs fdroid sdkmanager >= 0.6.4') + + fdroidserver.common.config = {'sdk_path': 'placeholder'} + build = fdroidserver.metadata.Build() + url = 'https://dl.google.com/android/repository/android-ndk-r24-linux.zip' + path = sdkmanager.get_cachedir() / os.path.basename(url) + sdkmanager.packages = { + ('ndk', '24.0.8215888'): url, + ('ndk', 'r24'): url, + } + build.ndk = 'r24' + firstrun = mock.Mock() + with mock.patch('sdkmanager.download_file', firstrun): + fdroidserver.common.auto_install_ndk(build) + firstrun.assert_called_once_with(url, path) + build.ndk = '24.0.8215888' + secondrun = mock.Mock() + with mock.patch('sdkmanager.download_file', secondrun): + fdroidserver.common.auto_install_ndk(build) + secondrun.assert_called_once_with(url, path) + + @unittest.skip("This test downloads and unzips a 1GB file.") + def test_install_ndk(self): + """NDK r10e is a special case since its missing source.properties""" + config = {'sdk_path': self.testdir} + fdroidserver.common.config = config + fdroidserver.common._install_ndk('r10e') + r10e = os.path.join(self.testdir, 'ndk', 'r10e') + self.assertEqual('r10e', fdroidserver.common.get_ndk_version(r10e)) + fdroidserver.common.fill_config_defaults(config) + self.assertEqual({'r10e': r10e}, config['ndk_paths']) + + def test_fill_config_defaults(self): + """Test the auto-detection of NDKs installed in standard paths""" + ndk_bundle = os.path.join(self.testdir, 'ndk-bundle') + os.makedirs(ndk_bundle) + with open(os.path.join(ndk_bundle, 'source.properties'), 'w') as fp: + fp.write('Pkg.Desc = Android NDK\nPkg.Revision = 17.2.4988734\n') + config = {'sdk_path': self.testdir} + fdroidserver.common.fill_config_defaults(config) + self.assertEqual({'17.2.4988734': ndk_bundle}, config['ndk_paths']) + + r21e = os.path.join(self.testdir, 'ndk', '21.4.7075529') + os.makedirs(r21e) + with open(os.path.join(r21e, 'source.properties'), 'w') as fp: + fp.write('Pkg.Desc = Android NDK\nPkg.Revision = 21.4.7075529\n') + config = {'sdk_path': self.testdir} + fdroidserver.common.fill_config_defaults(config) + self.assertEqual( + {'17.2.4988734': ndk_bundle, '21.4.7075529': r21e}, + config['ndk_paths'], + ) + + r10e = os.path.join(self.testdir, 'ndk', 'r10e') + os.makedirs(r10e) + with open(os.path.join(r10e, 'RELEASE.TXT'), 'w') as fp: + fp.write('r10e-rc4 (64-bit)\n') + config = {'sdk_path': self.testdir} + fdroidserver.common.fill_config_defaults(config) + self.assertEqual( + {'r10e': r10e, '17.2.4988734': ndk_bundle, '21.4.7075529': r21e}, + config['ndk_paths'], + ) + + @unittest.skipIf(not os.path.isdir('/usr/lib/jvm/default-java'), 'uses Debian path') + def test_fill_config_defaults_java(self): + """Test the auto-detection of Java installed in standard paths""" + config = {'sdk_path': self.testdir} + fdroidserver.common.fill_config_defaults(config) + java_paths = [] + # use presence of javac to make sure its JDK not just JRE + for f in glob.glob('/usr/lib/jvm/java-*-openjdk-*/bin/javac'): + jdk = os.path.dirname(os.path.dirname(f)) + if not os.path.islink(jdk): + java_paths.append(jdk) + self.assertEqual( + len(java_paths), + len(config['java_paths']) + ) + for f in config['java_paths'].values(): + self.assertTrue(f in java_paths) + self.assertTrue(isinstance(f, str)) # paths in config must be str + + @mock.patch.dict(os.environ, clear=True) + def test_sdk_path_in_config_must_be_strings(self): + """All paths in config must be strings, and never pathlib.Path instances""" + os.environ['PATH'] = '/usr/bin:/usr/sbin' + config = {'sdk_path': Path('/opt/android-sdk')} + fdroidserver.common.fill_config_defaults(config) + build = fdroidserver.metadata.Build() + with self.assertRaises(TypeError): + fdroidserver.common.set_FDroidPopen_env(build=build) + + @mock.patch.dict(os.environ, clear=True) + def test_ndk_paths_in_config_must_be_strings(self): + """All paths in config must be strings, and never pathlib.Path instances""" + fdroidserver.common.config = { + 'ndk_paths': {'r21d': Path('/opt/android-sdk/ndk/r21d')} + } + build = fdroidserver.metadata.Build() + build.ndk = 'r21d' + os.environ['PATH'] = '/usr/bin:/usr/sbin' + with self.assertRaises(TypeError): + fdroidserver.common.set_FDroidPopen_env(build=build) + + @mock.patch.dict(os.environ, clear=True) + def test_FDroidPopen_envs_paths_can_be_pathlib(self): + _mock_common_module_options_instance() + os.environ['PATH'] = '/usr/bin:/usr/sbin' + envs = {'PATHLIB': Path('/pathlib/path'), 'STRING': '/string/path'} + p = fdroidserver.common.FDroidPopen(['/bin/sh', '-c', 'export'], envs=envs) + self.assertIn('/string/path', p.output) + self.assertIn('/pathlib/path', p.output) + + def test_vcs_git_latesttags(self): + tags = [ + "1.1.1", + "2.2.2", + "v3.0", + "0.0.4", + "0.5.0-beta", + "666(6)", + "seven", + ] + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + repo = git.Repo.init(Path.cwd()) + f = Path("test") + date = 10 ** 9 + for tag in tags: + date += 1 + f.write_text(tag) + repo.index.add([str(f)]) + repo.index.commit(tag, commit_date=str(date) + " +0000") + repo.create_tag(tag) + + vcs = fdroidserver.common.vcs_git(None, Path.cwd()) + self.assertEqual(vcs.latesttags(), tags[::-1]) + + def test_vcs_git_getref(self): + + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + repo = git.Repo.init(Path.cwd()) + tag = "1.1.1" + f = Path("test") + f.write_text(tag) + repo.index.add([str(f)]) + repo.index.commit("foo") + repo.create_tag(tag) + + vcs = fdroidserver.common.vcs_git(None, Path.cwd()) + + self.assertIsNotNone(vcs.getref("1.1.1")) + self.assertIsNone(vcs.getref("invalid")) + + def test_get_release_filename(self): + app = fdroidserver.metadata.App() + app.id = 'test.app' + build = fdroidserver.metadata.Build() + build.versionCode = 123 + + build.output = 'build/apk/*' + self.assertEqual( + fdroidserver.common.get_release_filename(app, build), + "%s_%s.apk" % (app.id, build.versionCode), + ) + + build.output = 'build/apk/*.zip' + self.assertEqual( + fdroidserver.common.get_release_filename(app, build), + "%s_%s.zip" % (app.id, build.versionCode), + ) + + build.output = 'build/apk/*.apk' + self.assertEqual( + fdroidserver.common.get_release_filename(app, build), + "%s_%s.apk" % (app.id, build.versionCode), + ) + + build.output = 'build/apk/*.apk' + self.assertEqual( + fdroidserver.common.get_release_filename(app, build, 'exe'), + "%s_%s.exe" % (app.id, build.versionCode), + ) + + def test_no_zero_length_ndk_path_prefixes(self): + fdroidserver.common.config = {'ndk_paths': {}} + build = fdroidserver.metadata.Build() + + with mock.patch.dict(os.environ, clear=True): + os.environ['PATH'] = '/usr/bin:/usr/sbin' + fdroidserver.common.set_FDroidPopen_env(build=build) + self.assertNotIn('', os.getenv('PATH').split(os.pathsep)) + + def test_is_repo_file(self): + is_repo_file = fdroidserver.common.is_repo_file + self.assertFalse(is_repo_file('does-not-exist')) + + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + Path('repo').mkdir() + repo_files = [ + 'repo/com.example.test.helloworld_1.apk', + 'repo/com.politedroid_6.apk', + 'repo/duplicate.permisssions_9999999.apk', + 'repo/fake.ota.update_1234.zip', + 'repo/info.guardianproject.index-v1.jar_123.apk', + 'repo/info.zwanenburg.caffeinetile_4.apk', + 'repo/main.1101613.obb.main.twoversions.obb', + ] + index_files = [ + 'repo/entry.jar', + 'repo/entry.json', + 'repo/index-v1.jar', + 'repo/index-v1.json', + 'repo/index-v2.json', + 'repo/index.css', + 'repo/index.html', + 'repo/index.jar', + 'repo/index.png', + 'repo/index.xml', + ] + for f in repo_files + index_files: + open(f, 'w').close() + + repo_dirs = [ + 'repo/com.politedroid', + 'repo/info.guardianproject.index-v1.jar', + 'repo/status', + ] + for d in repo_dirs: + os.mkdir(d) + + for f in repo_files: + self.assertTrue(os.path.exists(f), f + ' was created') + self.assertTrue(is_repo_file(f), f + ' is repo file') + + for f in index_files: + self.assertTrue(os.path.exists(f), f + ' was created') + self.assertFalse(is_repo_file(f), f + ' is repo file') + gpg_signed = [ + 'repo/entry.json', + 'repo/index-v1.json', + 'repo/index-v2.json', + ] + self.assertEqual( + (f in gpg_signed or is_repo_file(f, for_gpg_signing=False)), + is_repo_file(f, for_gpg_signing=True), + f + ' gpg signable?', + ) + + for d in repo_dirs: + self.assertTrue(os.path.exists(d), d + ' was created') + self.assertFalse(is_repo_file(d), d + ' not repo file') + + def test_get_apksigner_smartcardoptions(self): + os.chdir(self.testdir) + with open(fdroidserver.common.CONFIG_FILE, 'w', encoding='utf-8') as fp: + d = { + 'smartcardoptions': '-storetype PKCS11' + ' -providerName SunPKCS11-OpenSC' + ' -providerClass sun.security.pkcs11.SunPKCS11' + ' -providerArg opensc-fdroid.cfg' + } + config_dump(d, fp) + config = fdroidserver.common.read_config() + fdroidserver.common.config = config + self.assertTrue(isinstance(d['smartcardoptions'], str)) + self.assertTrue(isinstance(config['smartcardoptions'], list)) + self.assertEqual( + [ + '--ks-type', + 'PKCS11', + '--provider-class', + 'sun.security.pkcs11.SunPKCS11', + '--provider-arg', + 'opensc-fdroid.cfg', + ], + fdroidserver.common.get_apksigner_smartcardoptions( + config['smartcardoptions'] + ), + ) + + def test_get_smartcardoptions_list(self): + os.chdir(self.testdir) + fdroidserver.common.write_config_file( + textwrap.dedent( + """ + smartcardoptions: + - -storetype + - PKCS11 + - -providerName + - SunPKCS11-OpenSC + - -providerClass + - sun.security.pkcs11.SunPKCS11 + - -providerArg + - opensc-fdroid.cfg + """ + ) + ) + config = fdroidserver.common.read_config() + fdroidserver.common.config = config + self.assertTrue(isinstance(config['smartcardoptions'], list)) + self.assertEqual( + [ + '-storetype', + 'PKCS11', + '-providerName', + 'SunPKCS11-OpenSC', + '-providerClass', + 'sun.security.pkcs11.SunPKCS11', + '-providerArg', + 'opensc-fdroid.cfg', + ], + config['smartcardoptions'], + ) + + def test_get_smartcardoptions_spaces(self): + os.chdir(self.testdir) + fdroidserver.common.write_config_file( + textwrap.dedent( + """ + smartcardoptions: | + -storetype PKCS11 + -providerClass sun.security.pkcs11.SunPKCS11 + -providerArg /etc/pkcs11_java.cfg + """ + ) + ) + config = fdroidserver.common.read_config() + fdroidserver.common.config = config + self.assertTrue(isinstance(config['smartcardoptions'], list)) + self.assertEqual( + [ + '-storetype', + 'PKCS11', + '-providerClass', + 'sun.security.pkcs11.SunPKCS11', + '-providerArg', + '/etc/pkcs11_java.cfg', + ], + config['smartcardoptions'], + ) + + def test_load_localized_config(self): + """It should load""" + antiFeatures = fdroidserver.common.load_localized_config( + ANTIFEATURES_CONFIG_NAME, 'repo' + ) + self.assertEqual( + [ + 'Ads', + 'DisabledAlgorithm', + 'KnownVuln', + 'NSFW', + 'NoSourceSince', + 'NonFreeAdd', + 'NonFreeAssets', + 'NonFreeDep', + 'NonFreeNet', + 'Tracking', + ], + list(antiFeatures.keys()), + ) + self.assertEqual( + ['de', 'en-US', 'fa', 'ro', 'zh-rCN'], + list(antiFeatures['Ads']['description'].keys()), + ) + self.assertEqual( + ['en-US'], + list(antiFeatures['NoSourceSince']['description'].keys()), + ) + # it should have copied the icon files into place + for v in antiFeatures.values(): + p = Path(os.path.dirname(__file__) + '/repo' + v['icon']['en-US']['name']) + self.assertTrue(p.exists()) + + def test_load_localized_config_categories(self): + """It should load""" + categories = fdroidserver.common.load_localized_config( + CATEGORIES_CONFIG_NAME, 'repo' + ) + self.assertEqual( + [ + 'Time', + 'Development', + 'GuardianProject', + 'Multimedia', + 'Phone & SMS', + 'Security', + 'System', + ], + list(categories.keys()), + ) + self.assertEqual(['en-US'], list(categories['GuardianProject']['name'].keys())) + + def test_load_localized_config_copy_icon(self): + os.chdir(self.testdir) + os.mkdir('config') + Path('config/categories.yml').write_text('System:\n icon: system.png') + source_file = 'config/system.png' + Path(source_file).write_text('placeholder') + time.sleep(0.01) # ensure reliable failure if mtime isn't preserved + fdroidserver.common.load_localized_config(CATEGORIES_CONFIG_NAME, 'repo') + dest_file = f'repo/icons/{os.path.basename(source_file)}' + self.assertEqual(os.path.getsize(source_file), os.path.getsize(dest_file)) + self.assertEqual(os.path.getmtime(source_file), os.path.getmtime(dest_file)) + + def test_load_localized_config_copy_unchanged(self): + """The destination file should only change if the source file did.""" + os.chdir(self.testdir) + os.mkdir('config') + Path('config/categories.yml').write_text('System:\n icon: system.png') + source_file = 'config/system.png' + Path(source_file).write_text('placeholder') + fdroidserver.common.load_localized_config(CATEGORIES_CONFIG_NAME, 'repo') + delta = 0.01 + time.sleep(delta) # ensure reliable failure if file isn't preserved + fdroidserver.common.load_localized_config(CATEGORIES_CONFIG_NAME, 'repo') + dest_file = f'repo/icons/{os.path.basename(source_file)}' + self.assertAlmostEqual( + os.path.getctime(source_file), os.path.getctime(dest_file), delta=delta + ) + + def test_load_localized_config_copy_over_dest(self): + os.chdir(self.testdir) + os.mkdir('config') + Path('config/categories.yml').write_text('System:\n icon: system.png') + source_file = Path('config/system.png') + dest_file = Path(f'repo/icons/{os.path.basename(source_file)}') + source_file.write_text('placeholder') + dest_file.parent.mkdir(parents=True) + dest_file.write_text('different contents') + fdroidserver.common.load_localized_config(CATEGORIES_CONFIG_NAME, 'repo') + self.assertEqual(os.path.getsize(source_file), os.path.getsize(dest_file)) + + def test_load_localized_config_0_file(self): + os.chdir(self.testdir) + os.mkdir('config') + Path('config/categories.yml').write_text('') + with self.assertRaises(TypeError): + fdroidserver.common.load_localized_config(CATEGORIES_CONFIG_NAME, 'repo') + + def test_load_localized_config_string(self): + os.chdir(self.testdir) + os.mkdir('config') + Path('config/categories.yml').write_text('this is a string') + with self.assertRaises(TypeError): + fdroidserver.common.load_localized_config(CATEGORIES_CONFIG_NAME, 'repo') + + def test_load_localized_config_list(self): + os.chdir(self.testdir) + os.mkdir('config') + Path('config/categories.yml').write_text('- System') + with self.assertRaises(TypeError): + fdroidserver.common.load_localized_config(CATEGORIES_CONFIG_NAME, 'repo') + + def test_config_type_check_config_yml_dict(self): + fdroidserver.common.config_type_check('config.yml', dict()) + + def test_config_type_check_config_yml_list(self): + with self.assertRaises(TypeError): + fdroidserver.common.config_type_check('config.yml', list()) + + def test_config_type_check_config_yml_set(self): + with self.assertRaises(TypeError): + fdroidserver.common.config_type_check('config.yml', set()) + + def test_config_type_check_config_yml_str(self): + with self.assertRaises(TypeError): + fdroidserver.common.config_type_check('config.yml', str()) + + def test_config_type_check_mirrors_list(self): + fdroidserver.common.config_type_check('config/mirrors.yml', list()) + + def test_config_type_check_mirrors_dict(self): + with self.assertRaises(TypeError): + fdroidserver.common.config_type_check('config/mirrors.yml', dict()) + + def test_config_type_check_mirrors_set(self): + with self.assertRaises(TypeError): + fdroidserver.common.config_type_check('config/mirrors.yml', set()) + + def test_config_type_check_mirrors_str(self): + with self.assertRaises(TypeError): + fdroidserver.common.config_type_check('config/mirrors.yml', str()) + + def test_config_serverwebroot_str(self): + os.chdir(self.testdir) + fdroidserver.common.write_config_file( + """serverwebroot: 'foo@example.com:/var/www'""" + ) + self.assertEqual( + [{'url': 'foo@example.com:/var/www/'}], + fdroidserver.common.read_config()['serverwebroot'], + ) + + def test_config_serverwebroot_list(self): + os.chdir(self.testdir) + fdroidserver.common.write_config_file( + """serverwebroot:\n - foo@example.com:/var/www""" + ) + self.assertEqual( + [{'url': 'foo@example.com:/var/www/'}], + fdroidserver.common.read_config()['serverwebroot'], + ) + + def test_config_serverwebroot_dict(self): + os.chdir(self.testdir) + fdroidserver.common.write_config_file( + """serverwebroot:\n - url: 'foo@example.com:/var/www'""" + ) + self.assertEqual( + [{'url': 'foo@example.com:/var/www/'}], + fdroidserver.common.read_config()['serverwebroot'], + ) + + @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) + def test_config_serverwebroot_list_of_dicts_env(self): + os.chdir(self.testdir) + url = 'foo@example.com:/var/www/' + os.environ['serverwebroot'] = url + fdroidserver.common.write_config_file( + textwrap.dedent( + """\ + serverwebroot: + - url: {env: serverwebroot} + index_only: true + """ + ) + ) + self.assertEqual( + [{'url': url, 'index_only': True}], + fdroidserver.common.read_config()['serverwebroot'], + ) + + def test_expand_env_dict_fake_str(self): + testvalue = '"{env: foo}"' + self.assertEqual(testvalue, fdroidserver.common.expand_env_dict(testvalue)) + + @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) + def test_expand_env_dict_good(self): + name = 'foo' + value = 'bar' + os.environ[name] = value + self.assertEqual(value, fdroidserver.common.expand_env_dict({'env': name})) + + @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) + def test_expand_env_dict_bad_dict(self): + with self.assertRaises(TypeError): + fdroidserver.common.expand_env_dict({'env': 'foo', 'foo': 'bar'}) + + def test_parse_list_of_dicts_str(self): + s = 'foo@example.com:/var/www' + mirrors = yaml.load("""'%s'""" % s) + self.assertEqual( + [{'url': s}], fdroidserver.common.parse_list_of_dicts(mirrors) + ) + + def test_parse_list_of_dicts_list(self): + s = 'foo@example.com:/var/www' + mirrors = yaml.load("""- '%s'""" % s) + self.assertEqual( + [{'url': s}], fdroidserver.common.parse_list_of_dicts(mirrors) + ) + + def test_parse_list_of_dicts_dict(self): + s = 'foo@example.com:/var/www' + mirrors = yaml.load("""- url: '%s'""" % s) + self.assertEqual( + [{'url': s}], fdroidserver.common.parse_list_of_dicts(mirrors) + ) + + @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH'), 'foo': 'bar'}, clear=True) + def test_parse_list_of_dicts_env_str(self): + mirrors = yaml.load('{env: foo}') + self.assertEqual( + [{'url': 'bar'}], fdroidserver.common.parse_list_of_dicts(mirrors) + ) + + def test_parse_list_of_dicts_env_list(self): + s = 'foo@example.com:/var/www' + mirrors = yaml.load("""- '%s'""" % s) + self.assertEqual( + [{'url': s}], fdroidserver.common.parse_list_of_dicts(mirrors) + ) + + def test_parse_list_of_dicts_env_dict(self): + s = 'foo@example.com:/var/www' + mirrors = yaml.load("""- url: '%s'""" % s) + self.assertEqual( + [{'url': s}], fdroidserver.common.parse_list_of_dicts(mirrors) + ) + + def test_KnownApks_recordapk(self): + """Test that added dates are being fetched from the index. + + There are more related tests in tests/run-tests. + + """ + now = datetime.now(timezone.utc) + knownapks = fdroidserver.common.KnownApks() + for apkName in knownapks.apks: + knownapks.recordapk(apkName, default_date=now) + for added in knownapks.apks.values(): + self.assertNotEqual(added, now) + + def test_KnownApks_recordapk_new(self): + """Test that new added dates work, and are not replaced later. + + There are more related tests in tests/run-tests. + + """ + now = datetime.now(timezone.utc) + knownapks = fdroidserver.common.KnownApks() + fake_apk = 'fake.apk' + knownapks.recordapk(fake_apk, default_date=now) + for apk, added in knownapks.apks.items(): + if apk == fake_apk: + self.assertEqual(added, now) + else: + self.assertNotEqual(added, now) + knownapks.recordapk(fake_apk, default_date=datetime.now(timezone.utc)) + self.assertEqual(knownapks.apks[fake_apk], now) + + def test_get_mirrors_fdroidorg(self): + mirrors = fdroidserver.common.get_mirrors( + 'https://f-droid.org/repo', 'entry.jar' + ) + self.assertEqual( + 'https://f-droid.org/repo/entry.jar', + mirrors[0]['url'], + ) + + def test_get_mirrors_other(self): + self.assertEqual( + [{'url': 'https://example.com/fdroid/repo/index-v2.json'}], + fdroidserver.common.get_mirrors( + 'https://example.com/fdroid/repo', 'index-v2.json' + ), + ) + + def test_append_filename_to_mirrors(self): + filename = 'test.apk' + url = 'https://example.com/fdroid/repo' + mirrors = [{'url': url}] + self.assertEqual( + [{'url': url + '/' + filename}], + fdroidserver.common.append_filename_to_mirrors(filename, mirrors), + ) + + def test_append_filename_to_mirrors_full(self): + filename = 'test.apk' + mirrors = fdroidserver.common.FDROIDORG_MIRRORS + for mirror in fdroidserver.common.append_filename_to_mirrors(filename, mirrors): + self.assertTrue(mirror['url'].endswith('/' + filename)) + + def test_get_source_date_epoch(self): + git_repo = git.Repo.init(self.testdir) + Path('README').write_text('file to commit') + git_repo.git.add(all=True) + git_repo.index.commit("README") + self.assertEqual( + git_repo.git.log(n=1, pretty='%ct'), + fdroidserver.common.get_source_date_epoch(self.testdir), + ) + + def test_get_source_date_epoch_no_scm(self): + self.assertIsNone(fdroidserver.common.get_source_date_epoch(self.testdir)) + + def test_get_source_date_epoch_not_git(self): + """Test when build_dir is not a git repo, e.g. hg, svn, etc.""" + appid = 'com.example' + build_dir = Path(self.testdir) / 'build' / appid + fdroiddata = build_dir.parent.parent + (fdroiddata / 'metadata').mkdir() + build_dir.mkdir(parents=True) + os.chdir(build_dir) + git_repo = git.Repo.init(fdroiddata) # fdroiddata is always a git repo + with (fdroiddata / f'metadata/{appid}.yml').open('w') as fp: + fp.write('AutoName: Example App\n') + git_repo.git.add(all=True) + git_repo.index.commit("update README") + self.assertEqual( + git.repo.Repo(fdroiddata).git.log(n=1, pretty='%ct'), + fdroidserver.common.get_source_date_epoch(build_dir), + ) + + @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) + def test_set_FDroidPopen_env_with_app(self): + """Test SOURCE_DATE_EPOCH in FDroidPopen when build_dir is a git repo.""" + os.chdir(self.testdir) + app = fdroidserver.metadata.App() + app.id = 'com.example' + build_dir = Path(self.testdir) / 'build' / app.id + git_repo = git.Repo.init(build_dir) + Path('README').write_text('file to commit') + git_repo.git.add(all=True) + now = datetime.now(timezone.utc) + git_repo.index.commit("README", commit_date=now) + fdroidserver.common.set_FDroidPopen_env(app) + p = fdroidserver.common.FDroidPopen(['printenv', 'SOURCE_DATE_EPOCH']) + self.assertEqual(int(p.output), int(now.timestamp())) + + def test_calculate_gradle_flavor_combination(self): + flavors = ['aa', 'BB', 'δδ'] + combinations = ['aaBBΔδ', 'aaBB', 'aaΔδ', 'aa', 'BBΔδ', 'BB', 'δδ', ''] + self.assertEqual(fdroidserver.common.calculate_gradle_flavor_combination(flavors), combinations) + + +APKS_WITH_JAR_SIGNATURES = ( + ( + 'SpeedoMeterApp.main_1.apk', + '2e6b3126fb7e0db6a9d4c2a06df690620655454d6e152cf244cc9efe9787a77d', + ), + ( + 'apk.embedded_1.apk', + '764f0eaac0cdcde35023658eea865c4383ab580f9827c62fdd3daf9e654199ee', + ), + ( + 'bad-unicode-πÇÇ现代通用字-български-عربي1.apk', + '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', + ), + ( + 'issue-1128-poc3a.apk', + '1dbb8be012293e988a0820f7d455b07abd267d2c0b500fc793fcfd80141cb5ce', + ), + ( + 'issue-1128-poc3b.apk', + '1dbb8be012293e988a0820f7d455b07abd267d2c0b500fc793fcfd80141cb5ce', + ), + ( + 'janus.apk', + 'ebb0fedf1942a099b287c3db00ff732162152481abb2b6c7cbcdb2ba5894a768', + ), + ( + 'org.bitbucket.tickytacky.mirrormirror_1.apk', + 'feaa63df35b4635cf091513dfcd6d11209632555efdfc47e33b70d4e4eb5ba28', + ), + ( + 'org.bitbucket.tickytacky.mirrormirror_2.apk', + 'feaa63df35b4635cf091513dfcd6d11209632555efdfc47e33b70d4e4eb5ba28', + ), + ( + 'org.bitbucket.tickytacky.mirrormirror_3.apk', + 'feaa63df35b4635cf091513dfcd6d11209632555efdfc47e33b70d4e4eb5ba28', + ), + ( + 'org.bitbucket.tickytacky.mirrormirror_4.apk', + 'feaa63df35b4635cf091513dfcd6d11209632555efdfc47e33b70d4e4eb5ba28', + ), + ( + 'org.dyndns.fules.ck_20.apk', + '9326a2cc1a2f148202bc7837a0af3b81200bd37fd359c9e13a2296a71d342056', + ), + ( + 'org.sajeg.fallingblocks_3.apk', + '033389681f4288fdb3e72a28058c8506233ca50de75452ab6c9c76ea1ca2d70f', + ), + ( + 'repo/com.example.test.helloworld_1.apk', + 'c3a5ca5465a7585a1bda30218ae4017083605e3576867aa897d724208d99696c', + ), + ( + 'repo/com.politedroid_3.apk', + '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', + ), + ( + 'repo/com.politedroid_4.apk', + '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', + ), + ( + 'repo/com.politedroid_5.apk', + '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', + ), + ( + 'repo/com.politedroid_6.apk', + '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', + ), + ( + 'repo/duplicate.permisssions_9999999.apk', + '659e1fd284549f70d13fb02c620100e27eeea3420558cce62b0f5d4cf2b77d84', + ), + ( + 'repo/info.zwanenburg.caffeinetile_4.apk', + '51cfa5c8a743833ad89acf81cb755936876a5c8b8eca54d1ffdcec0cdca25d0e', + ), + ( + 'repo/no.min.target.sdk_987.apk', + '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', + ), + ( + 'repo/obb.main.oldversion_1444412523.apk', + '818e469465f96b704e27be2fee4c63ab9f83ddf30e7a34c7371a4728d83b0bc1', + ), + ( + 'repo/obb.main.twoversions_1101613.apk', + '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', + ), + ( + 'repo/obb.main.twoversions_1101615.apk', + '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', + ), + ( + 'repo/obb.main.twoversions_1101617.apk', + '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', + ), + ( + 'repo/obb.mainpatch.current_1619.apk', + '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', + ), + ( + 'repo/obb.mainpatch.current_1619_another-release-key.apk', + 'ce9e200667f02d96d49891a2e08a3c178870e91853d61bdd33ef5f0b54701aa5', + ), + ( + 'repo/souch.smsbypass_9.apk', + 'd3aec784b1fd71549fc22c999789122e3639895db6bd585da5835fbe3db6985c', + ), + ( + 'repo/urzip-; Рахма́, [rɐxˈmanʲɪnəf] سيرجي_رخمانينوف 谢·.apk', + '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', + ), + ( + 'repo/v1.v2.sig_1020.apk', + '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', + ), + ( + 'urzip-release.apk', + '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', + ), + ( + 'urzip.apk', + '7eabd8c15de883d1e82b5df2fd4f7f769e498078e9ad6dc901f0e96db77ceac3', + ), +) +APKS_WITHOUT_JAR_SIGNATURES = ( + ( + 'issue-1128-poc1.apk', # APK v3 Signature only + '1dbb8be012293e988a0820f7d455b07abd267d2c0b500fc793fcfd80141cb5ce', + ), + ( + 'issue-1128-poc2.apk', # APK v3 Signature only + '1dbb8be012293e988a0820f7d455b07abd267d2c0b500fc793fcfd80141cb5ce', + ), + ( + 'issue-1128-min-sdk-30-poc.apk', # APK v3 Signature only + '09350d5f3460a8a0ea5cf6b68ccd296a58754f7e683ba6aa08c19be8353504f3', + ), + ( + 'v2.only.sig_2.apk', + '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', + ), +) + + +class SignerExtractionTest(unittest.TestCase): + """Test extraction of the signer certificate from JARs and APKs + + These fingerprints can be confirmed with: + apksigner verify --print-certs foo.apk | grep SHA-256 + keytool -printcert -file ____.RSA + """ + + def setUp(self): + os.chdir(basedir) + self._td = mkdtemp() + self.testdir = self._td.name + + self.apksigner = shutil.which('apksigner') + self.keytool = shutil.which('keytool') + + def tearDown(self): + self._td.cleanup() + + @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') + def test_get_first_signer_certificate_with_jars(self): + for jar in ( + 'signindex/guardianproject-v1.jar', + 'signindex/guardianproject.jar', + 'signindex/testy.jar', + ): + outdir = os.path.join(self.testdir, jar[:-4].replace('/', '_')) + os.mkdir(outdir) + fdroidserver.common.apk_extract_signatures(jar, outdir) + certs = glob.glob(os.path.join(outdir, '*.RSA')) + with open(certs[0], 'rb') as fp: + self.assertEqual( + fdroidserver.common.get_certificate(fp.read()), + fdroidserver.common.get_first_signer_certificate(jar), + ) + + @unittest.skip("slow and only needed when adding to APKS_WITH_JAR_SIGNATURES") + def test_vs_keytool(self): + if not self.keytool: + self.skipTest('requires keytool to run') + pat = re.compile(r'[0-9A-F:]{95}') + cmd = [self.keytool, '-printcert', '-jarfile'] + for apk, fingerprint in APKS_WITH_JAR_SIGNATURES: + o = subprocess.check_output(cmd + [apk], text=True) + try: + self.assertEqual( + fingerprint, + pat.search(o).group().replace(':', '').lower(), + ) + except AttributeError as e: + print(e, o) + + @unittest.skip("slow and only needed when adding to APKS_WITH_JAR_SIGNATURES") + def test_vs_apksigner(self): + if not self.apksigner: + self.skipTest('requires apksigner to run') + pat = re.compile(r'\s[0-9a-f]{64}\s') + cmd = [self.apksigner, 'verify', '--print-certs'] + for apk, fingerprint in APKS_WITH_JAR_SIGNATURES + APKS_WITHOUT_JAR_SIGNATURES: + output = subprocess.check_output(cmd + [apk], text=True) + self.assertEqual( + fingerprint, + pat.search(output).group().strip(), + apk + " should have matching signer fingerprints", + ) + + @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') + def test_apk_signer_fingerprint_with_v1_apks(self): + for apk, fingerprint in APKS_WITH_JAR_SIGNATURES: + self.assertEqual( + fingerprint, + fdroidserver.common.apk_signer_fingerprint(apk), + f'apk_signer_fingerprint should match stored fingerprint for {apk}', + ) + + @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') + def test_apk_signer_fingerprint_without_v1_apks(self): + for apk, fingerprint in APKS_WITHOUT_JAR_SIGNATURES: + self.assertEqual( + fingerprint, + fdroidserver.common.apk_signer_fingerprint(apk), + f'apk_signer_fingerprint should match stored fingerprint for {apk}', + ) + + @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') + def test_get_first_signer_certificate_with_unsigned_jar(self): + self.assertIsNone( + fdroidserver.common.get_first_signer_certificate('signindex/unsigned.jar') + ) + + def test_apk_extract_fingerprint(self): + """Test extraction of JAR signatures (does not cover APK v2+ extraction).""" + for apk, fingerprint in APKS_WITH_JAR_SIGNATURES: + outdir = os.path.join(self.testdir, apk[:-4].replace('/', '_')) + os.mkdir(outdir) + try: + fdroidserver.common.apk_extract_signatures(apk, outdir) + except fdroidserver.apksigcopier.APKSigCopierError: + # nothing to test here when this error is thrown + continue + v1_certs = [str(cert) for cert in Path(outdir).glob('*.[DR]SA')] + cert = fdroidserver.common.get_certificate( + signature_block_file=Path(v1_certs[0]).read_bytes(), + signature_file=Path(v1_certs[0][:-4] + '.SF').read_bytes(), + ) + self.assertEqual( + fingerprint, + fdroidserver.common.signer_fingerprint(cert), + ) + apkobject = fdroidserver.common.get_androguard_APK(apk, skip_analysis=True) + v2_certs = apkobject.get_certificates_der_v2() + if v2_certs: + if v1_certs: + self.assertEqual(len(v1_certs), len(v2_certs)) + self.assertEqual( + fingerprint, + fdroidserver.common.signer_fingerprint(v2_certs[0]), + ) + v3_certs = apkobject.get_certificates_der_v3() + if v3_certs: + if v2_certs: + self.assertEqual(len(v2_certs), len(v3_certs)) + self.assertEqual( + fingerprint, + fdroidserver.common.signer_fingerprint(v3_certs[0]), + ) + + def test_signature_block_file_regex(self): + for apkpath, fingerprint in APKS_WITH_JAR_SIGNATURES: + with ZipFile(apkpath, 'r') as apk: + cert_files = [ + n + for n in apk.namelist() + if fdroidserver.common.SIGNATURE_BLOCK_FILE_REGEX.match(n) + ] + self.assertEqual(1, len(cert_files)) + + def test_signature_block_file_regex_malicious(self): + apkpath = os.path.join(self.testdir, 'malicious.apk') + with ZipFile(apkpath, 'w') as apk: + apk.writestr('META-INF/MANIFEST.MF', 'this is fake sig data') + apk.writestr('META-INF/CERT.SF\n', 'this is fake sig data') + apk.writestr('META-INF/AFTER.SF', 'this is fake sig data') + apk.writestr('META-INF/CERT.RSA\n', 'this is fake sig data') + apk.writestr('META-INF/AFTER.RSA', 'this is fake sig data') + with ZipFile(apkpath, 'r') as apk: + self.assertEqual( + ['META-INF/AFTER.RSA'], + [ + n + for n in apk.namelist() + if fdroidserver.common.SIGNATURE_BLOCK_FILE_REGEX.match(n) + ], + ) + + +class IgnoreApksignerV33Test(SetUpTearDownMixin, unittest.TestCase): + """apksigner v33 should be entirely ignored + + https://gitlab.com/fdroid/fdroidserver/-/issues/1253 + """ + + BAD_VERSIONS = [ + '33.0.0-rc1', + '33.0.0-rc2', + '33.0.0-rc3', + '33.0.0-rc4', + '33.0.0', + '33.0.1', + '33.0.2', + '33.0.3', + ] + + def setUp(self): + super().setUp() + self.config = {'sdk_path': self.testdir} + + def _create_fake_build_tools(self, version): + for v in self.BAD_VERSIONS + [version]: + apksigner = os.path.join(self.testdir, 'build-tools', v, 'apksigner') + os.makedirs(os.path.dirname(apksigner)) + with open(apksigner, 'w') as fp: + fp.write(f'#!/bin/sh\necho {v}[\n') + os.chmod(apksigner, 0o0755) # nosec B103 + + def test_find_apksigner_choose_version_32_over_any_33(self): + good = '32.0.0' + self._create_fake_build_tools(good) + with mock.patch.dict(os.environ, clear=True): + os.environ['PATH'] = '/fake/path/to/avoid/conflicts' + fdroidserver.common.find_apksigner(self.config) + self.assertEqual( + os.path.join(self.testdir, 'build-tools', good, 'apksigner'), + self.config.get('apksigner'), + ) + + def test_find_apksigner_choose_no_version_over_any_33(self): + """apksigner v33 should be entirely ignored""" + self._create_fake_build_tools('29.0.0') # too old a version + with mock.patch.dict(os.environ, clear=True): + os.environ['PATH'] = '/fake/path/to/avoid/conflicts' + fdroidserver.common.find_apksigner(self.config) + self.assertIsNone(self.config.get('apksigner')) + + +class ConfigOptionsScopeTest(unittest.TestCase): + """Test assumptions about variable scope for "config" and "options". + + The ancient architecture of config and options in fdroidserver has + weird issues around unexpected scope, like there are cases where + the global config is not the same as the module-level config, and + more. + + This is about describing what is happening, it is not about + documenting behaviors that are good design. The config and options + handling should really be refactored into a well-known, workable + Pythonic pattern. + + """ + + def setUp(self): + # these are declared as None at the top of the module file + fdroidserver.common.config = None + fdroidserver.common.options = None + + def tearDown(self): + fdroidserver.common.config = None + fdroidserver.common.options = None + if 'config' in globals(): + global config + del config + if 'options' in globals(): + global options + del options + + def test_parse_args(self): + """Test that options is properly set up at the module-level and not global.""" + self.assertFalse('options' in globals()) + self.assertIsNone(fdroidserver.common.options) + parser = ArgumentParser() + fdroidserver.common.setup_global_opts(parser) + with mock.patch('sys.argv', ['$0']): + o = fdroidserver.common.parse_args(parser) + self.assertEqual(o, fdroidserver.common.options) + + # No function should set options as a global, and the global + # keyword does not create the variable. + global options + with self.assertRaises(NameError): + options + self.assertFalse('options' in globals()) + + def test_parse_args_without_args(self): + """Test that the parsing function works fine when there are no args.""" + parser = ArgumentParser() + fdroidserver.common.setup_global_opts(parser) + with mock.patch('sys.argv', ['$0']): + o = fdroidserver.common.parse_args(parser) + self.assertFalse(o.verbose) + + def test_parse_args_with_args(self): + parser = ArgumentParser() + fdroidserver.common.setup_global_opts(parser) + with mock.patch('sys.argv', ['$0', '-v']): + o = fdroidserver.common.parse_args(parser) + self.assertTrue(o.verbose) + + def test_get_config(self): + """Show how the module-level variables are initialized.""" + self.assertTrue('config' not in vars() and 'config' not in globals()) + self.assertIsNone(fdroidserver.common.config) + config = fdroidserver.common.read_config() + self.assertIsNotNone(fdroidserver.common.config) + self.assertTrue(isinstance(config, dict)) + self.assertEqual(config, fdroidserver.common.config) + + def test_get_config_global(self): + """Test assumptions about variable scope using global keyword.""" + global config + self.assertTrue('config' not in vars() and 'config' not in globals()) + self.assertIsNone(fdroidserver.common.config) + c = fdroidserver.common.read_config() + self.assertIsNotNone(fdroidserver.common.config) + self.assertTrue(isinstance(c, dict)) + self.assertEqual(c, fdroidserver.common.config) + self.assertTrue( + 'config' not in vars() and 'config' not in globals(), + "The config should not be set in the global context, only module-level.", + ) + + +class UnsafePermissionsTest(SetUpTearDownMixin, unittest.TestCase): + def setUp(self): + config = dict() + fdroidserver.common.find_apksigner(config) + if not config.get('apksigner'): + self.skipTest('SKIPPING, apksigner not installed!') + + super().setUp() + os.chdir(self.testdir) + fdroidserver.common.write_config_file('keypass: {env: keypass}') + os.chmod(fdroidserver.common.CONFIG_FILE, 0o666) # nosec B103 + + def test_config_perm_no_warning(self): + fdroidserver.common.write_config_file('keystore: foo.jks') + with self.assertNoLogs(level=logging.WARNING): + fdroidserver.common.read_config() + + def test_config_perm_keypass_warning(self): + fdroidserver.common.write_config_file('keypass: supersecret') + with self.assertLogs(level=logging.WARNING) as lw: + fdroidserver.common.read_config() + self.assertTrue('unsafe' in lw.output[0]) + + @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) + def test_config_perm_env_warning(self): + os.environ['keypass'] = 'supersecret' + fdroidserver.common.write_config_file('keypass: {env: keypass}') + with self.assertLogs(level=logging.WARNING) as lw: + fdroidserver.common.read_config() + self.assertTrue('unsafe' in lw.output[0]) + self.assertEqual(1, len(lw.output)) + + +class GetHeadCommitIdTest(unittest.TestCase): + """Test and compare two methods of getting the commit ID.""" + + def setUp(self): + self._td = mkdtemp() + self.testdir = self._td.name + os.chdir(self.testdir) + logging.getLogger('git.cmd').setLevel(logging.INFO) + + def tearDown(self): + os.chdir(basedir) + self._td.cleanup() + + @unittest.skipUnless((basedir.parent / '.git').exists(), 'Needs a working git repo') + def test_get_head_commit_id_compare(self): + """Run on this git repo to get some real world noise in there.""" + git_dir = basedir.parent + self.assertIsNotNone(fdroidserver.common.get_head_commit_id(git_dir)) + + def test_get_head_commit_id_error_bare_repo(self): + """Error because it is an empty, bare git repo.""" + git_repo = git.Repo.init(self.testdir) + self.assertIsNone(fdroidserver.common.get_head_commit_id(git_repo)) + + def test_get_head_commit_id_error_no_repo(self): + """Error because there is no .git/ dir.""" + with self.assertLogs('root', level=logging.DEBUG): + self.assertIsNone(fdroidserver.common.get_head_commit_id(self.testdir)) + + def test_get_head_commit_id_detached_and_branch(self): + """Fetching commit ID must work from detached HEADs and branches.""" + git_repo = git.Repo.init(self.testdir) + Path('README').write_text('this is just a test') + git_repo.git.add(all=True) + git_repo.index.commit("add README") + Path('LICENSE').write_text('free!') + git_repo.git.add(all=True) + git_repo.index.commit("add LICENSE") + self.assertIsNotNone(fdroidserver.common.get_head_commit_id(git_repo)) + # detached HEAD + git_repo.git.checkout('HEAD^') + self.assertIsNotNone(fdroidserver.common.get_head_commit_id(git_repo)) + # on a branch with a new commits + git_repo.git.checkout('test', b=True) + Path('foo.py').write_text('print("code!")') + git_repo.git.add(all=True) + git_repo.index.commit("add code") + self.assertIsNotNone(fdroidserver.common.get_head_commit_id(git_repo)) diff --git a/tests/test_deploy.py b/tests/test_deploy.py new file mode 100755 index 00000000..d7de7545 --- /dev/null +++ b/tests/test_deploy.py @@ -0,0 +1,1226 @@ +#!/usr/bin/env python3 + +import configparser +import os +import shutil +import tempfile +import unittest +from pathlib import Path +from unittest import mock + +import git + +import fdroidserver + +from .shared_test_code import TmpCwd, VerboseFalseOptions, mkdtemp + +basedir = Path(__file__).parent +FILES = basedir + + +def _mock_rclone_config_file(cmd, text): # pylint: disable=unused-argument + """Mock output from rclone 1.60.1 but with nonexistent conf file.""" + return "Configuration file doesn't exist, but rclone will use this path:\n/nonexistent/rclone.conf\n" + + +class DeployTest(unittest.TestCase): + '''fdroidserver/deploy.py''' + + def setUp(self): + os.chdir(basedir) + self._td = mkdtemp() + self.testdir = self._td.name + + fdroidserver.common.options = mock.Mock() + fdroidserver.deploy.config = {} + + def tearDown(self): + self._td.cleanup() + + def test_update_serverwebroots_bad_None(self): + with self.assertRaises(TypeError): + fdroidserver.deploy.update_serverwebroots(None, 'repo') + + def test_update_serverwebroots_bad_int(self): + with self.assertRaises(TypeError): + fdroidserver.deploy.update_serverwebroots(9, 'repo') + + def test_update_serverwebroots_bad_float(self): + with self.assertRaises(TypeError): + fdroidserver.deploy.update_serverwebroots(1.0, 'repo') + + def test_update_serverwebroots(self): + """rsync works with file paths, so this test uses paths for the URLs""" + os.chdir(self.testdir) + repo = Path('repo') + repo.mkdir() + fake_apk = repo / 'fake.apk' + with fake_apk.open('w') as fp: + fp.write('not an APK, but has the right filename') + url0 = Path('url0/fdroid') + url0.mkdir(parents=True) + url1 = Path('url1/fdroid') + url1.mkdir(parents=True) + + # setup parameters for this test run + fdroidserver.common.options.identity_file = None + fdroidserver.deploy.config['make_current_version_link'] = False + + dest_apk0 = url0 / fake_apk + dest_apk1 = url1 / fake_apk + self.assertFalse(dest_apk0.is_file()) + self.assertFalse(dest_apk1.is_file()) + fdroidserver.deploy.update_serverwebroots( + [ + {'url': str(url0)}, + {'url': str(url1)}, + ], + str(repo), + ) + self.assertTrue(dest_apk0.is_file()) + self.assertTrue(dest_apk1.is_file()) + + def test_update_serverwebroots_url_does_not_end_with_fdroid(self): + with self.assertRaises(SystemExit): + fdroidserver.deploy.update_serverwebroots([{'url': 'url'}], 'repo') + + def test_update_serverwebroots_bad_ssh_url(self): + with self.assertRaises(SystemExit): + fdroidserver.deploy.update_serverwebroots( + [{'url': 'f@b.ar::/path/to/fdroid'}], 'repo' + ) + + def test_update_serverwebroots_unsupported_ssh_url(self): + with self.assertRaises(SystemExit): + fdroidserver.deploy.update_serverwebroots([{'url': 'ssh://nope'}], 'repo') + + @unittest.skipUnless(shutil.which('rclone'), 'requires rclone') + def test_update_remote_storage_with_rclone(self): + os.chdir(self.testdir) + repo = Path('repo') + repo.mkdir(parents=True, exist_ok=True) + + fake_apk = repo / 'another_fake.apk' + with fake_apk.open('w') as fp: + fp.write('not an APK, but has the right filename') + fake_index = repo / fdroidserver.common.INDEX_FILES[0] + with fake_index.open('w') as fp: + fp.write('not an index, but has the right filename') + + # write out rclone config for test use + rclone_config = configparser.ConfigParser() + rclone_config.add_section("test-local-config") + rclone_config.set("test-local-config", "type", "local") + + rclone_config_path = Path('rclone_config_path') + rclone_config_path.mkdir(parents=True, exist_ok=True) + rclone_file = rclone_config_path / 'rclone.conf' + with open(rclone_file, 'w') as configfile: + rclone_config.write(configfile) + + # setup parameters for this test run + awsbucket = 'test_bucket_folder' + fdroidserver.deploy.config['awsbucket'] = awsbucket + fdroidserver.deploy.config['rclone_config'] = 'test-local-config' + fdroidserver.deploy.config['path_to_custom_rclone_config'] = str(rclone_file) + fdroidserver.common.options = VerboseFalseOptions + + # write out destination path + destination = Path(f'{awsbucket}/fdroid') + destination.mkdir(parents=True, exist_ok=True) + dest_apk = Path(destination) / fake_apk + dest_index = Path(destination) / fake_index + self.assertFalse(dest_apk.is_file()) + self.assertFalse(dest_index.is_file()) + repo_section = str(repo) + fdroidserver.deploy.update_remote_storage_with_rclone(repo_section, awsbucket) + self.assertTrue(dest_apk.is_file()) + self.assertTrue(dest_index.is_file()) + + @unittest.skipUnless(shutil.which('rclone'), 'requires rclone') + def test_update_remote_storage_with_rclone_in_index_only_mode(self): + os.chdir(self.testdir) + repo = Path('repo') + repo.mkdir(parents=True, exist_ok=True) + + fake_apk = repo / 'another_fake.apk' + with fake_apk.open('w') as fp: + fp.write('not an APK, but has the right filename') + fake_index = repo / fdroidserver.common.INDEX_FILES[0] + with fake_index.open('w') as fp: + fp.write('not an index, but has the right filename') + + # write out rclone config for test use + rclone_config = configparser.ConfigParser() + rclone_config.add_section("test-local-config") + rclone_config.set("test-local-config", "type", "local") + + rclone_config_path = Path('rclone_config_path') + rclone_config_path.mkdir(parents=True, exist_ok=True) + rclone_file = rclone_config_path / 'rclone.conf' + with open(rclone_file, 'w') as configfile: + rclone_config.write(configfile) + + # setup parameters for this test run + awsbucket = 'test_bucket_folder' + fdroidserver.deploy.config['awsbucket'] = awsbucket + fdroidserver.deploy.config['rclone_config'] = 'test-local-config' + fdroidserver.deploy.config['path_to_custom_rclone_config'] = str(rclone_file) + fdroidserver.common.options = VerboseFalseOptions + + # write out destination path + destination = Path(f'{awsbucket}/fdroid') + destination.mkdir(parents=True, exist_ok=True) + dest_apk = Path(destination) / fake_apk + dest_index = Path(destination) / fake_index + self.assertFalse(dest_apk.is_file()) + self.assertFalse(dest_index.is_file()) + repo_section = str(repo) + fdroidserver.deploy.update_remote_storage_with_rclone( + repo_section, awsbucket, is_index_only=True + ) + self.assertFalse(dest_apk.is_file()) + self.assertTrue(dest_index.is_file()) + + @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) + @mock.patch('subprocess.check_output', _mock_rclone_config_file) + def test_update_remote_storage_with_rclone_awsbucket_no_env_vars(self): + with self.assertRaises(fdroidserver.exception.FDroidException): + fdroidserver.deploy.update_remote_storage_with_rclone('repo', 'foobucket') + + @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) + @mock.patch('subprocess.check_output', _mock_rclone_config_file) + def test_update_remote_storage_with_rclone_awsbucket_no_AWS_SECRET_ACCESS_KEY(self): + os.environ['AWS_ACCESS_KEY_ID'] = 'accesskey' + with self.assertRaises(fdroidserver.exception.FDroidException): + fdroidserver.deploy.update_remote_storage_with_rclone('repo', 'foobucket') + + @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) + @mock.patch('subprocess.check_output', _mock_rclone_config_file) + def test_update_remote_storage_with_rclone_awsbucket_no_AWS_ACCESS_KEY_ID(self): + os.environ['AWS_SECRET_ACCESS_KEY'] = 'secrets' # nosec B105 + with self.assertRaises(fdroidserver.exception.FDroidException): + fdroidserver.deploy.update_remote_storage_with_rclone('repo', 'foobucket') + + @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) + @mock.patch('subprocess.check_output', _mock_rclone_config_file) + @mock.patch('subprocess.call') + def test_update_remote_storage_with_rclone_awsbucket_env_vars(self, mock_call): + awsbucket = 'test_bucket_folder' + os.environ['AWS_ACCESS_KEY_ID'] = 'accesskey' + os.environ['AWS_SECRET_ACCESS_KEY'] = 'secrets' # nosec B105 + + def _mock_subprocess_call(cmd): + self.assertEqual( + cmd[:5], + [ + 'rclone', + 'sync', + '--delete-after', + '--config', + '.fdroid-deploy-rclone.conf', + ], + ) + return 0 + + mock_call.side_effect = _mock_subprocess_call + fdroidserver.deploy.config = {'awsbucket': awsbucket} + fdroidserver.deploy.update_remote_storage_with_rclone('repo', awsbucket) + mock_call.assert_called() + + @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) + @mock.patch('subprocess.check_output', _mock_rclone_config_file) + @mock.patch('subprocess.call') + def test_update_remote_storage_with_rclone_mock_awsbucket(self, mock_call): + awsbucket = 'test_bucket_folder' + os.environ['AWS_ACCESS_KEY_ID'] = 'accesskey' + os.environ['AWS_SECRET_ACCESS_KEY'] = 'secrets' # nosec B105 + self.last_cmd = None + + def _mock_subprocess_call(cmd): + self.last_cmd = cmd + return 0 + + mock_call.side_effect = _mock_subprocess_call + + fdroidserver.deploy.config = {'awsbucket': awsbucket} + fdroidserver.deploy.update_remote_storage_with_rclone('repo', awsbucket) + self.maxDiff = None + self.assertEqual( + self.last_cmd, + [ + 'rclone', + 'sync', + '--delete-after', + '--config', + '.fdroid-deploy-rclone.conf', + 'repo', + f'AWS-S3-US-East-1:{awsbucket}/fdroid/repo', + ], + ) + + @mock.patch('subprocess.check_output', _mock_rclone_config_file) + @mock.patch('subprocess.call') + def test_update_remote_storage_with_rclone_mock_rclone_config(self, mock_call): + awsbucket = 'test_bucket_folder' + self.last_cmd = None + + def _mock_subprocess_call(cmd): + self.last_cmd = cmd + return 0 + + mock_call.side_effect = _mock_subprocess_call + + fdroidserver.deploy.config = { + 'awsbucket': awsbucket, + 'rclone_config': 'test_local_config', + } + fdroidserver.deploy.update_remote_storage_with_rclone('repo', awsbucket) + self.maxDiff = None + self.assertEqual( + self.last_cmd, + [ + 'rclone', + 'sync', + '--delete-after', + 'repo', + 'test_local_config:test_bucket_folder/fdroid/repo', + ], + ) + + @mock.patch('subprocess.check_output', _mock_rclone_config_file) + @mock.patch('subprocess.call') + def test_update_remote_storage_with_rclone_mock_default_user_path(self, mock_call): + self.last_cmd = None + + def _mock_subprocess_call(cmd): + self.last_cmd = cmd + return 0 + + mock_call.side_effect = _mock_subprocess_call + + os.chdir(self.testdir) + config_name = 'test_local_config' + Path('rclone.conf').write_text('placeholder, contents ignored') + + awsbucket = 'test_bucket_folder' + fdroidserver.deploy.config['awsbucket'] = awsbucket + fdroidserver.deploy.config['rclone_config'] = config_name + fdroidserver.deploy.update_remote_storage_with_rclone('repo', awsbucket) + self.maxDiff = None + self.assertEqual( + self.last_cmd, + [ + 'rclone', + 'sync', + '--delete-after', + '--config', + fdroidserver.deploy.EMBEDDED_RCLONE_CONF, + 'repo', + f'{config_name}:{awsbucket}/fdroid/repo', + ], + ) + + def test_update_serverwebroot(self): + """rsync works with file paths, so this test uses paths for the URLs""" + os.chdir(self.testdir) + repo = Path('repo') + repo.mkdir(parents=True) + fake_apk = repo / 'fake.apk' + with fake_apk.open('w') as fp: + fp.write('not an APK, but has the right filename') + fake_index = repo / fdroidserver.common.INDEX_FILES[0] + with fake_index.open('w') as fp: + fp.write('not an index, but has the right filename') + url = Path('url') + url.mkdir() + + # setup parameters for this test run + fdroidserver.common.options = mock.Mock() + fdroidserver.common.options.identity_file = None + fdroidserver.common.options.identity_file = None + fdroidserver.deploy.config['make_current_version_link'] = False + + dest_apk = Path(url) / fake_apk + dest_index = Path(url) / fake_index + self.assertFalse(dest_apk.is_file()) + self.assertFalse(dest_index.is_file()) + + fdroidserver.deploy.update_serverwebroot({'url': str(url)}, 'repo') + self.assertTrue(dest_apk.is_file()) + self.assertTrue(dest_index.is_file()) + + def test_update_serverwebroot_in_index_only_mode(self): + os.chdir(self.testdir) + repo = Path('repo') + repo.mkdir() + fake_apk = repo / 'fake.apk' + with fake_apk.open('w') as fp: + fp.write('not an APK, but has the right filename') + fake_index = repo / fdroidserver.common.INDEX_FILES[0] + with fake_index.open('w') as fp: + fp.write('not an index, but has the right filename') + url = Path('url') + url.mkdir() + + # setup parameters for this test run + fdroidserver.common.options = mock.Mock() + fdroidserver.common.options.identity_file = None + fdroidserver.deploy.config['make_current_version_link'] = False + + dest_apk = Path(url) / fake_apk + dest_index = Path(url) / fake_index + self.assertFalse(dest_apk.is_file()) + self.assertFalse(dest_index.is_file()) + + fdroidserver.deploy.update_serverwebroot( + {'url': str(url), 'index_only': True}, 'repo' + ) + self.assertFalse(dest_apk.is_file()) + self.assertTrue(dest_index.is_file()) + + @mock.patch.dict(os.environ, clear=True) + def test_update_serverwebroot_no_rsync_error(self): + os.environ['PATH'] = self.testdir + os.chdir(self.testdir) + with self.assertRaises(fdroidserver.exception.FDroidException): + fdroidserver.deploy.update_serverwebroot('serverwebroot', 'repo') + + def test_update_serverwebroot_make_cur_version_link(self): + self.maxDiff = None + + # setup parameters for this test run + fdroidserver.common.options = mock.Mock() + fdroidserver.common.options.no_checksum = True + fdroidserver.common.options.identity_file = None + fdroidserver.common.options.verbose = False + fdroidserver.common.options.quiet = True + fdroidserver.common.options.index_only = False + fdroidserver.deploy.config = {'make_current_version_link': True} + url = "example.com:/var/www/fdroid" + repo_section = 'repo' + + # setup function for asserting subprocess.call invocations + call_iteration = 0 + + def update_server_webroot_call(cmd): + nonlocal call_iteration + if call_iteration == 0: + self.assertListEqual( + cmd, + [ + 'rsync', + '--archive', + '--delete-after', + '--safe-links', + '--quiet', + '--exclude', + 'repo/altstore-index.json', + '--exclude', + 'repo/altstore-index.json.asc', + '--exclude', + 'repo/entry.jar', + '--exclude', + 'repo/entry.json', + '--exclude', + 'repo/entry.json.asc', + '--exclude', + 'repo/index-v1.jar', + '--exclude', + 'repo/index-v1.json', + '--exclude', + 'repo/index-v1.json.asc', + '--exclude', + 'repo/index-v2.json', + '--exclude', + 'repo/index-v2.json.asc', + '--exclude', + 'repo/index.css', + '--exclude', + 'repo/index.html', + '--exclude', + 'repo/index.jar', + '--exclude', + 'repo/index.png', + '--exclude', + 'repo/index.xml', + '--exclude', + 'repo/signer-index.jar', + '--exclude', + 'repo/signer-index.json', + '--exclude', + 'repo/signer-index.json.asc', + 'repo', + 'example.com:/var/www/fdroid', + ], + ) + elif call_iteration == 1: + self.assertListEqual( + cmd, + [ + 'rsync', + '--archive', + '--delete-after', + '--safe-links', + '--quiet', + 'repo', + url, + ], + ) + elif call_iteration == 2: + self.assertListEqual( + cmd, + [ + 'rsync', + '--archive', + '--delete-after', + '--safe-links', + '--quiet', + 'Sym.apk', + 'Sym.apk.asc', + 'Sym.apk.sig', + 'example.com:/var/www/fdroid', + ], + ) + else: + self.fail('unexpected subprocess.call invocation') + call_iteration += 1 + return 0 + + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + os.mkdir('repo') + os.symlink('repo/com.example.sym.apk', 'Sym.apk') + os.symlink('repo/com.example.sym.apk.asc', 'Sym.apk.asc') + os.symlink('repo/com.example.sym.apk.sig', 'Sym.apk.sig') + with mock.patch('subprocess.call', side_effect=update_server_webroot_call): + fdroidserver.deploy.update_serverwebroot({'url': url}, repo_section) + self.assertEqual(call_iteration, 3, 'expected 3 invocations of subprocess.call') + + def test_update_serverwebroot_make_cur_version_link_in_index_only_mode(self): + # setup parameters for this test run + fdroidserver.common.options = mock.Mock() + fdroidserver.common.options.no_checksum = True + fdroidserver.common.options.identity_file = None + fdroidserver.common.options.verbose = False + fdroidserver.common.options.quiet = True + fdroidserver.common.options.identity_file = None + fdroidserver.deploy.config['make_current_version_link'] = True + url = "example.com:/var/www/fdroid" + repo_section = 'repo' + + # setup function for asserting subprocess.call invocations + call_iteration = 0 + + def update_server_webroot_call(cmd): + nonlocal call_iteration + if call_iteration == 0: + self.assertListEqual( + cmd, + [ + 'rsync', + '--archive', + '--delete-after', + '--safe-links', + '--quiet', + 'repo/altstore-index.json', + 'repo/altstore-index.json.asc', + 'repo/entry.jar', + 'repo/entry.json', + 'repo/entry.json.asc', + 'repo/index-v1.jar', + 'repo/index-v1.json', + 'repo/index-v1.json.asc', + 'repo/index-v2.json', + 'repo/index-v2.json.asc', + 'repo/index.css', + 'repo/index.html', + 'repo/index.jar', + 'repo/index.png', + 'repo/index.xml', + 'repo/signer-index.jar', + 'repo/signer-index.json', + 'repo/signer-index.json.asc', + 'example.com:/var/www/fdroid/repo/', + ], + ) + elif call_iteration == 1: + self.assertListEqual( + cmd, + [ + 'rsync', + '--archive', + '--delete-after', + '--safe-links', + '--quiet', + 'repo', + url, + ], + ) + elif call_iteration == 2: + self.assertListEqual( + cmd, + [ + 'rsync', + '--archive', + '--delete-after', + '--safe-links', + '--quiet', + 'Sym.apk', + 'Sym.apk.asc', + 'Sym.apk.sig', + 'example.com:/var/www/fdroid', + ], + ) + else: + self.fail('unexpected subprocess.call invocation') + call_iteration += 1 + return 0 + + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + os.mkdir(repo_section) + os.symlink('repo/com.example.sym.apk', 'Sym.apk') + os.symlink('repo/com.example.sym.apk.asc', 'Sym.apk.asc') + os.symlink('repo/com.example.sym.apk.sig', 'Sym.apk.sig') + + fake_files = fdroidserver.common.INDEX_FILES + for filename in fake_files: + fake_file = Path(repo_section) / filename + with fake_file.open('w') as fp: + fp.write('not a real one, but has the right filename') + + with mock.patch('subprocess.call', side_effect=update_server_webroot_call): + fdroidserver.deploy.update_serverwebroot( + {'url': url, 'index_only': True}, repo_section + ) + self.assertEqual(call_iteration, 1, 'expected 1 invocations of subprocess.call') + + def test_update_serverwebroot_with_id_file(self): + # setup parameters for this test run + fdroidserver.common.options = mock.Mock() + fdroidserver.common.options.identity_file = None + fdroidserver.common.options.no_checksum = True + fdroidserver.common.options.verbose = True + fdroidserver.common.options.quiet = False + fdroidserver.common.options.identity_file = None + fdroidserver.common.options.index_only = False + fdroidserver.deploy.config = {'identity_file': './id_rsa'} + url = "example.com:/var/www/fdroid" + repo_section = 'archive' + + # setup function for asserting subprocess.call invocations + call_iteration = 0 + + def update_server_webroot_call(cmd): + nonlocal call_iteration + if call_iteration == 0: + self.assertListEqual( + cmd, + [ + 'rsync', + '--archive', + '--delete-after', + '--safe-links', + '--verbose', + '-e', + 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + + fdroidserver.deploy.config['identity_file'], + '--exclude', + 'archive/altstore-index.json', + '--exclude', + 'archive/altstore-index.json.asc', + '--exclude', + 'archive/entry.jar', + '--exclude', + 'archive/entry.json', + '--exclude', + 'archive/entry.json.asc', + '--exclude', + 'archive/index-v1.jar', + '--exclude', + 'archive/index-v1.json', + '--exclude', + 'archive/index-v1.json.asc', + '--exclude', + 'archive/index-v2.json', + '--exclude', + 'archive/index-v2.json.asc', + '--exclude', + 'archive/index.css', + '--exclude', + 'archive/index.html', + '--exclude', + 'archive/index.jar', + '--exclude', + 'archive/index.png', + '--exclude', + 'archive/index.xml', + 'archive', + url, + ], + ) + elif call_iteration == 1: + self.assertListEqual( + cmd, + [ + 'rsync', + '--archive', + '--delete-after', + '--safe-links', + '--verbose', + '-e', + 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + + fdroidserver.deploy.config['identity_file'], + 'archive', + url, + ], + ) + else: + self.fail('unexpected subprocess.call invocation') + call_iteration += 1 + return 0 + + with mock.patch('subprocess.call', side_effect=update_server_webroot_call): + fdroidserver.deploy.update_serverwebroot({'url': url}, repo_section) + self.assertEqual(call_iteration, 2, 'expected 2 invocations of subprocess.call') + + def test_update_serverwebroot_with_id_file_in_index_only_mode(self): + # setup parameters for this test run + fdroidserver.common.options = mock.Mock() + fdroidserver.common.options.no_chcksum = False + fdroidserver.common.options.verbose = True + fdroidserver.common.options.quiet = False + fdroidserver.common.options.identity_file = None + fdroidserver.deploy.config['identity_file'] = './id_rsa' + fdroidserver.deploy.config['make_current_version_link'] = False + url = "example.com:/var/www/fdroid" + repo_section = 'archive' + + # setup function for asserting subprocess.call invocations + call_iteration = 0 + + def update_server_webroot_call(cmd): + nonlocal call_iteration + if call_iteration == 0: + self.assertListEqual( + cmd, + [ + 'rsync', + '--archive', + '--delete-after', + '--safe-links', + '--verbose', + '-e', + 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + + fdroidserver.deploy.config['identity_file'], + 'archive/altstore-index.json', + 'archive/altstore-index.json.asc', + 'archive/entry.jar', + 'archive/entry.json', + 'archive/entry.json.asc', + 'archive/index-v1.jar', + 'archive/index-v1.json', + 'archive/index-v1.json.asc', + 'archive/index-v2.json', + 'archive/index-v2.json.asc', + 'archive/index.css', + 'archive/index.html', + 'archive/index.jar', + 'archive/index.png', + 'archive/index.xml', + "example.com:/var/www/fdroid/archive/", + ], + ) + elif call_iteration == 1: + self.assertListEqual( + cmd, + [ + 'rsync', + '--archive', + '--delete-after', + '--safe-links', + '--verbose', + '-e', + 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + + fdroidserver.deploy.config['identity_file'], + "example.com:/var/www/fdroid/archive/", + ], + ) + else: + self.fail('unexpected subprocess.call invocation') + call_iteration += 1 + return 0 + + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + with mock.patch('subprocess.call', side_effect=update_server_webroot_call): + os.mkdir(repo_section) + fake_files = fdroidserver.common.INDEX_FILES + for filename in fake_files: + fake_file = Path(repo_section) / filename + with fake_file.open('w') as fp: + fp.write('not a real one, but has the right filename') + + fdroidserver.deploy.update_serverwebroot( + {'url': url, 'index_only': True}, repo_section + ) + self.assertEqual(call_iteration, 1, 'expected 1 invocations of subprocess.call') + + @unittest.skipIf( + not os.getenv('VIRUSTOTAL_API_KEY'), 'VIRUSTOTAL_API_KEY is not set' + ) + def test_upload_to_virustotal(self): + fdroidserver.common.options = mock.Mock() + fdroidserver.common.options.verbose = True + virustotal_apikey = os.getenv('VIRUSTOTAL_API_KEY') + fdroidserver.deploy.upload_to_virustotal('repo', virustotal_apikey) + + def test_remote_hostname_regex(self): + for remote_url, name in ( + ('git@github.com:guardianproject/fdroid-repo', 'github'), + ('git@gitlab.com:guardianproject/fdroid-repo', 'gitlab'), + ('https://github.com:guardianproject/fdroid-repo', 'github'), + ('https://gitlab.com/guardianproject/fdroid-repo', 'gitlab'), + ('https://salsa.debian.org/foo/repo', 'salsa'), + ): + self.assertEqual( + name, fdroidserver.deploy.REMOTE_HOSTNAME_REGEX.sub(r'\1', remote_url) + ) + + def test_update_servergitmirrors(self): + # setup parameters for this test run + fdroidserver.common.options = mock.Mock() + fdroidserver.common.options.identity_file = None + fdroidserver.common.options.no_keep_git_mirror_archive = False + fdroidserver.common.options.verbose = False + fdroidserver.common.options.quiet = True + + config = {} + fdroidserver.common.fill_config_defaults(config) + fdroidserver.deploy.config = config + + os.chdir(self.testdir) + + repo_section = 'repo' + initial_branch = fdroidserver.deploy.GIT_BRANCH + + remote_repo = Path(self.testdir) / 'remote' + remote_repo.mkdir(parents=True) + remote_git_repo = git.Repo.init( + remote_repo, initial_branch=initial_branch, bare=True + ) + fdroidserver.deploy.config["servergitmirrors"] = [{"url": str(remote_repo)}] + + os.chdir(self.testdir) + repo = Path('repo') + repo.mkdir(parents=True) + fake_apk = 'Sym.apk' + fake_files = fdroidserver.common.INDEX_FILES + [fake_apk] + for filename in fake_files: + fake_file = repo / filename + with fake_file.open('w') as fp: + fp.write('not a real one, but has the right filename') + + fdroidserver.deploy.update_servergitmirrors( + fdroidserver.deploy.config["servergitmirrors"], repo_section + ) + + verify_repo = remote_git_repo.clone( + Path(self.testdir) / 'verify', + ) + + for filename in fake_files: + remote_file = f"fdroid/{repo_section}/{filename}" + + self.assertIsNotNone(verify_repo.working_tree_dir) + if verify_repo.working_tree_dir is not None: + self.assertTrue( + (Path(verify_repo.working_tree_dir) / remote_file).exists() + ) + + def test_update_servergitmirrors_in_index_only_mode(self): + # setup parameters for this test run + fdroidserver.common.options = mock.Mock() + fdroidserver.common.options.identity_file = None + fdroidserver.common.options.no_keep_git_mirror_archive = False + fdroidserver.common.options.verbose = False + fdroidserver.common.options.quiet = True + + config = {} + fdroidserver.common.fill_config_defaults(config) + fdroidserver.deploy.config = config + + os.chdir(self.testdir) + + repo_section = 'repo' + initial_branch = fdroidserver.deploy.GIT_BRANCH + + remote_repo = Path(self.testdir) / 'remote' + remote_repo.mkdir(parents=True) + remote_git_repo = git.Repo.init( + remote_repo, initial_branch=initial_branch, bare=True + ) + fdroidserver.deploy.config["servergitmirrors"] = [ + {"url": str(remote_repo), "index_only": True} + ] + + os.chdir(self.testdir) + repo = Path('repo') + repo.mkdir(parents=True) + fake_apk = 'Sym.apk' + fake_files = fdroidserver.common.INDEX_FILES + [fake_apk] + for filename in fake_files: + fake_file = repo / filename + with fake_file.open('w') as fp: + fp.write('not a real one, but has the right filename') + + fdroidserver.deploy.update_servergitmirrors( + fdroidserver.deploy.config["servergitmirrors"], repo_section + ) + + verify_repo = remote_git_repo.clone( + Path(self.testdir) / 'verify', + ) + + for filename in fdroidserver.common.INDEX_FILES: + remote_file = f"fdroid/{repo_section}/{filename}" + + self.assertIsNotNone(verify_repo.working_tree_dir) + if verify_repo.working_tree_dir is not None: + self.assertTrue( + (Path(verify_repo.working_tree_dir) / remote_file).exists() + ) + + # Should not have the APK file + remote_file = f"fdroid/{repo_section}/{fake_apk}" + if verify_repo.working_tree_dir is not None: + self.assertFalse( + (Path(verify_repo.working_tree_dir) / remote_file).exists() + ) + + def test_upload_to_servergitmirror_in_index_only_mode(self): + # setup parameters for this test run + fdroidserver.common.options = mock.Mock() + fdroidserver.common.options.identity_file = None + fdroidserver.common.options.no_keep_git_mirror_archive = False + fdroidserver.common.options.verbose = False + fdroidserver.common.options.quiet = True + fdroidserver.common.options.identity_file = None + + config = {} + fdroidserver.common.fill_config_defaults(config) + fdroidserver.deploy.config = config + + repo_section = 'repo' + initial_branch = fdroidserver.deploy.GIT_BRANCH + + os.chdir(self.testdir) + + local_git_repo_path = Path(self.testdir) / 'local' + local_git_repo = git.Repo.init( + local_git_repo_path, initial_branch=initial_branch + ) + + fdroid_dir = local_git_repo_path / 'fdroid' + repo_dir = fdroid_dir / repo_section + repo_dir.mkdir(parents=True) + fake_apk = 'Sym.apk' + fake_files = fdroidserver.common.INDEX_FILES + [fake_apk] + for filename in fake_files: + fake_file = repo_dir / filename + with fake_file.open('w') as fp: + fp.write('not a real one, but has the right filename') + + # The remote repo must be a bare repo to allow being pushed to + remote_git_repo_dir = Path(self.testdir) / 'remote' + remote_git_repo = git.Repo.init( + remote_git_repo_dir, initial_branch=initial_branch, bare=True + ) + + mirror_config = {"url": str(remote_git_repo_dir), "index_only": True} + enabled_remotes = [] + ssh_cmd = 'ssh -oBatchMode=yes' + fdroidserver.deploy.upload_to_servergitmirror( + mirror_config=mirror_config, + local_repo=local_git_repo, + enabled_remotes=enabled_remotes, + repo_section=repo_section, + is_index_only=mirror_config['index_only'], + fdroid_dir=str(fdroid_dir), + git_mirror_path=str(local_git_repo_path), + ssh_cmd=ssh_cmd, + progress=git.RemoteProgress(), + ) + + verify_repo = remote_git_repo.clone( + Path(self.testdir) / 'verify', + ) + + for filename in fdroidserver.common.INDEX_FILES: + remote_file = f"fdroid/{repo_section}/{filename}" + + self.assertIsNotNone(verify_repo.working_tree_dir) + if verify_repo.working_tree_dir is not None: + self.assertTrue( + (Path(verify_repo.working_tree_dir) / remote_file).exists() + ) + + # Should not have the APK file + remote_file = f"fdroid/{repo_section}/{fake_apk}" + if verify_repo.working_tree_dir is not None: + self.assertFalse( + (Path(verify_repo.working_tree_dir) / remote_file).exists() + ) + + +class GitHubReleasesTest(unittest.TestCase): + def test_find_release_infos(self): + self.maxDiff = None + + index_mock = b""" + { + "packages": { + "com.example.app": { + "versions": { + "2e6f263c1927506015bfc98bce0818247836f2e7fe29a04e1af2b33c97848750": { + "file": { + "name": "/com.example.app_123.apk" + }, + "whatsNew": { + "en-US": "fake what's new" + }, + "manifest": { + "versionName": "1.2.3", + "versionCode": "123" + } + }, + "8a6f263c8327506015bfc98bce0815247836f2e7fe29a04e1af2bffa6409998d": { + "file": { + "name": "/com.example.app_100.apk" + }, + "manifest": { + "versionName": "1.0-alpha", + "versionCode": "123" + }, + "releaseChannels": ["alpha"] + } + } + }, + "another.app": { + "versions": { + "30602ffc19a7c0601bbfa93bce00082c78a6f2ddfe29a04e1af253fc9f84eda0": { + "file": { + "name": "/another.app_1.apk" + }, + "manifest": { + "versionName": "1", + "versionCode": "1" + } + } + } + }, + "fildered.app": { + "versions": { + "93ae02fc19a7c0601adfa93bce0443fc78a6f2ddfe3df04e1af093fca9a1ff09": { + "file": { + "name": "/another.app_1.apk" + }, + "manifest": { + "versionName": "1", + "versionCode": "1" + } + } + } + } + } + } + """ + with unittest.mock.patch( + "fdroidserver.deploy.open", unittest.mock.mock_open(read_data=index_mock) + ): + release_infos = fdroidserver.deploy.find_release_infos( + "fake_path", + Path('fake_repo'), + ["com.example.app", "another.app"], + ) + + self.assertDictEqual( + release_infos, + { + "another.app": { + "1": { + "files": [Path('fake_repo') / "another.app_1.apk"], + "hasReleaseChannels": False, + "whatsNew": None, + }, + }, + "com.example.app": { + "1.0-alpha": { + "files": [ + Path("fake_repo") / "com.example.app_100.apk", + ], + "hasReleaseChannels": True, + "whatsNew": None, + }, + "1.2.3": { + "files": [ + Path("fake_repo") / "com.example.app_123.apk", + ], + "hasReleaseChannels": False, + "whatsNew": "fake what's new", + }, + }, + }, + ) + + def test_upload_to_github_releases(self): + gh_config = [ + { + "projectUrl": "https://github.com/example/app", + "packageNames": ["com.example.app", "another.app"], + }, + { + "projectUrl": "https://github.com/custom/app", + "packageNames": ["more.custom.app"], + "token": "custom_token", + }, + ] + + fri_mock = unittest.mock.Mock(return_value="fri_result") + urr_mock = unittest.mock.Mock() + with unittest.mock.patch( + "fdroidserver.deploy.find_release_infos", fri_mock + ), unittest.mock.patch( + "fdroidserver.deploy.upload_to_github_releases_repo", urr_mock + ), tempfile.TemporaryDirectory() as tmpdir: + with open(Path(tmpdir) / "index-v2.json", "w") as f: + f.write("") + + fdroidserver.deploy.upload_to_github_releases( + tmpdir, gh_config, "fake_global_token" + ) + + fri_mock.assert_called_once_with( + Path(tmpdir) / "index-v2.json", + Path(tmpdir), + ["com.example.app", "another.app", "more.custom.app"], + ) + + self.maxDiff = None + self.assertListEqual( + urr_mock.call_args_list, + [ + unittest.mock.call( + { + "projectUrl": "https://github.com/example/app", + "packageNames": ["com.example.app", "another.app"], + }, + "fri_result", + "fake_global_token", + ), + unittest.mock.call( + { + "projectUrl": "https://github.com/custom/app", + "packageNames": ["more.custom.app"], + "token": "custom_token", + }, + "fri_result", + "fake_global_token", + ), + ], + ) + + +class Test_UploadToGithubReleasesRepo(unittest.TestCase): + def setUp(self): + self.repo_conf = { + "projectUrl": "https://github.com/example/app", + "packageNames": ["com.example.app", "com.example.altapp", "another.app"], + } + self.release_infos = { + "com.example.app": { + "1.0.0": { + "files": [ + Path("fake_repo") / "com.example.app_100100.apk", + ], + "hasReleaseChannels": False, + "whatsNew": "what's new com.example.app 1.0.0", + }, + "1.0.0-beta1": { + "files": [ + Path("fake_repo") / "com.example.app_100007.apk", + ], + "hasReleaseChannels": True, + "whatsNew": None, + }, + }, + "com.example.altapp": { + "1.0.0": { + "files": [ + Path("fake_repo") / "com.example.altapp_100100.apk", + Path("fake_repo") / "com.example.altapp_100100.apk.asc", + Path("fake_repo") / "com.example.altapp_100100.apk.idsig", + ], + "whatsNew": "what's new com.example.altapp 1.0.0", + }, + }, + } + + self.api = unittest.mock.Mock() + self.api.list_unreleased_tags = lambda: ["1.0.0", "1.0.0-beta1"] + self.api_constructor = unittest.mock.Mock(return_value=self.api) + + def test_global_token(self): + with unittest.mock.patch("fdroidserver.github.GithubApi", self.api_constructor): + fdroidserver.deploy.upload_to_github_releases_repo( + self.repo_conf, + self.release_infos, + "global_token", + ) + + self.api_constructor.assert_called_once_with( + "global_token", "https://github.com/example/app" + ) + + self.assertListEqual( + self.api.create_release.call_args_list, + [ + unittest.mock.call( + "1.0.0", + [ + Path("fake_repo/com.example.app_100100.apk"), + Path("fake_repo/com.example.altapp_100100.apk"), + Path("fake_repo/com.example.altapp_100100.apk.asc"), + Path("fake_repo/com.example.altapp_100100.apk.idsig"), + ], + "what's new com.example.app 1.0.0", + ), + ], + ) + + def test_local_token(self): + self.repo_conf["token"] = "local_token" # nosec B105 + with unittest.mock.patch("fdroidserver.github.GithubApi", self.api_constructor): + fdroidserver.deploy.upload_to_github_releases_repo( + self.repo_conf, + self.release_infos, + "global_token", + ) + + self.api_constructor.assert_called_once_with( + "local_token", "https://github.com/example/app" + ) + + self.assertListEqual( + self.api.create_release.call_args_list, + [ + unittest.mock.call( + "1.0.0", + [ + Path("fake_repo/com.example.app_100100.apk"), + Path("fake_repo/com.example.altapp_100100.apk"), + Path("fake_repo/com.example.altapp_100100.apk.asc"), + Path("fake_repo/com.example.altapp_100100.apk.idsig"), + ], + "what's new com.example.app 1.0.0", + ), + ], + ) diff --git a/tests/test_exception.py b/tests/test_exception.py new file mode 100755 index 00000000..01a6cd46 --- /dev/null +++ b/tests/test_exception.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python3 + +import unittest + +import fdroidserver + + +class ExceptionTest(unittest.TestCase): + '''fdroidserver/exception.py''' + + def test_FDroidException(self): + try: + raise fdroidserver.exception.FDroidException() + except fdroidserver.exception.FDroidException as e: + str(e) + + try: + raise fdroidserver.exception.FDroidException(9) + except fdroidserver.exception.FDroidException as e: + str(e) + + try: + raise fdroidserver.exception.FDroidException(-123.12234) + except fdroidserver.exception.FDroidException as e: + str(e) + + try: + raise fdroidserver.exception.FDroidException("this is a string") + except fdroidserver.exception.FDroidException as e: + str(e) + + try: + raise fdroidserver.exception.FDroidException(['one', 'two', 'three']) + except fdroidserver.exception.FDroidException as e: + str(e) + + try: + raise fdroidserver.exception.FDroidException(('one', 'two', 'three')) + except fdroidserver.exception.FDroidException as e: + str(e) diff --git a/tests/test_github.py b/tests/test_github.py new file mode 100755 index 00000000..f30ce0bb --- /dev/null +++ b/tests/test_github.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python3 + +import unittest +import unittest.mock + +import fdroidserver + +from .shared_test_code import mock_urlopen + + +class GithubApiTest(unittest.TestCase): + def test__init(self): + api = fdroidserver.github.GithubApi('faketoken', 'fakerepopath') + self.assertEqual(api._api_token, 'faketoken') + self.assertEqual(api._repo_path, 'fakerepopath') + + def test__req(self): + api = fdroidserver.github.GithubApi('faketoken', 'fakerepopath') + r = api._req('https://fakeurl', data='fakedata') + self.assertEqual(r.full_url, 'https://fakeurl') + self.assertEqual(r.data, "fakedata") + self.assertDictEqual( + r.headers, + { + 'Accept': 'application/vnd.github+json', + 'Authorization': 'Bearer faketoken', + 'X-github-api-version': '2022-11-28', + }, + ) + + def test_list_released_tags(self): + api = fdroidserver.github.GithubApi('faketoken', 'fakerepopath') + uomock = mock_urlopen( + body='[{"tag_name": "fake"}, {"tag_name": "double_fake"}]' + ) + with unittest.mock.patch("urllib.request.urlopen", uomock): + result = api.list_released_tags() + self.assertListEqual(result, ['fake', 'double_fake']) + + def test_list_unreleased_tags(self): + api = fdroidserver.github.GithubApi('faketoken', 'fakerepopath') + + api.list_all_tags = unittest.mock.Mock(return_value=[1, 2, 3, 4]) + api.list_released_tags = unittest.mock.Mock(return_value=[1, 2]) + + result = api.list_unreleased_tags() + + self.assertListEqual(result, [3, 4]) + + def test_tag_exists(self): + api = fdroidserver.github.GithubApi('faketoken', 'fakerepopath') + uomock = mock_urlopen(body='[{"ref": "refs/tags/fake_tag"}]') + with unittest.mock.patch("urllib.request.urlopen", uomock): + result = api.tag_exists('fake_tag') + self.assertTrue(result) + + def test_tag_exists_failure(self): + api = fdroidserver.github.GithubApi('faketoken', 'fakerepopath') + + uomock = mock_urlopen(body='[{"error": "failure"}]') + + with unittest.mock.patch("urllib.request.urlopen", uomock): + success = api.tag_exists('fake_tag') + + self.assertFalse(success) + + def test_list_all_tags(self): + api = fdroidserver.github.GithubApi('faketoken', 'fakerepopath') + + uomock = mock_urlopen( + body='[{"ref": "refs/tags/fake"}, {"ref": "refs/tags/double_fake"}]' + ) + + with unittest.mock.patch("urllib.request.urlopen", uomock): + result = api.list_all_tags() + + self.assertListEqual(result, ['fake', 'double_fake']) + + def test_create_release(self): + api = fdroidserver.github.GithubApi('faketoken', 'fakerepopath') + + uomock = mock_urlopen(body='{"id": "fakeid"}') + api.tag_exists = lambda x: True + api._create_release_asset = unittest.mock.Mock() + + with unittest.mock.patch("urllib.request.urlopen", uomock): + success = api.create_release('faketag', ['file_a', 'file_b'], body="bdy") + self.assertTrue(success) + + req = uomock.call_args_list[0][0][0] + self.assertEqual(1, len(uomock.call_args_list)) + self.assertEqual(2, len(uomock.call_args_list[0])) + self.assertEqual(1, len(uomock.call_args_list[0][0])) + self.assertEqual( + req.full_url, + 'https://api.github.com/repos/fakerepopath/releases', + ) + self.assertEqual(req.data, b'{"tag_name": "faketag", "body": "bdy"}') + self.assertListEqual( + api._create_release_asset.call_args_list, + [ + unittest.mock.call('fakeid', 'file_a'), + unittest.mock.call('fakeid', 'file_b'), + ], + ) + + def test__create_release_asset(self): + api = fdroidserver.github.GithubApi('faketoken', 'fakerepopath') + uomock = mock_urlopen() + + with unittest.mock.patch( + 'fdroidserver.github.open', + unittest.mock.mock_open(read_data=b"fake_content"), + ), unittest.mock.patch("urllib.request.urlopen", uomock): + success = api._create_release_asset('fake_id', 'fake_file') + + self.assertTrue(success) + + req = uomock.call_args_list[0][0][0] + self.assertEqual(1, len(uomock.call_args_list)) + self.assertEqual(2, len(uomock.call_args_list[0])) + self.assertEqual(1, len(uomock.call_args_list[0][0])) + self.assertEqual( + req.full_url, + 'https://uploads.github.com/repos/fakerepopath/releases/fake_id/assets?name=fake_file', + ) + self.assertDictEqual( + req.headers, + { + "Accept": "application/vnd.github+json", + 'Authorization': 'Bearer faketoken', + 'Content-type': 'application/octet-stream', + 'X-github-api-version': '2022-11-28', + }, + ) + self.assertEqual(req.data, b'fake_content') diff --git a/tests/test_gpgsign.py b/tests/test_gpgsign.py new file mode 100755 index 00000000..84634874 --- /dev/null +++ b/tests/test_gpgsign.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python3 + +import json +import os +import shutil +import tempfile +import unittest +from pathlib import Path +from unittest.mock import MagicMock, patch + +from fdroidserver import common, gpgsign + +basedir = Path(__file__).parent + + +class GpgsignTest(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.TemporaryDirectory() + os.chdir(self.tempdir.name) + self.repodir = Path('repo') + self.repodir.mkdir() + + gpgsign.config = None + config = common.read_config() + config['verbose'] = True + config['gpghome'] = str((basedir / 'gnupghome').resolve()) + config['gpgkey'] = '1DBA2E89' + gpgsign.config = config + + def tearDown(self): + self.tempdir.cleanup() + + @patch('sys.argv', ['fdroid gpgsign', '--verbose']) + @patch('fdroidserver.gpgsign.FDroidPopen') + def test_sign_index(self, FDroidPopen): + """This skips running gpg because its hard to setup in a test env""" + index_v1_json = 'repo/index-v1.json' + shutil.copy(basedir / index_v1_json, 'repo') + shutil.copy(basedir / 'SpeedoMeterApp.main_1.apk', 'repo') + + def _side_effect(gpg): + f = gpg[-1] + sig = gpg[3] + self.assertTrue(sig.startswith(f)) + open(sig, 'w').close() + p = MagicMock() + p.returncode = 0 + return p + + FDroidPopen.side_effect = _side_effect + gpgsign.main() + self.assertTrue(FDroidPopen.called) + self.assertTrue((self.repodir / 'index-v1.json').exists()) + self.assertTrue((self.repodir / 'index-v1.json.asc').exists()) + self.assertTrue((self.repodir / 'SpeedoMeterApp.main_1.apk.asc').exists()) + self.assertFalse((self.repodir / 'index.jar.asc').exists()) + # smoke check status JSON + with (self.repodir / 'status/gpgsign.json').open() as fp: + data = json.load(fp) + self.assertIn('index-v1.json', data['signed']) diff --git a/tests/test_import_subcommand.py b/tests/test_import_subcommand.py new file mode 100755 index 00000000..530e10fb --- /dev/null +++ b/tests/test_import_subcommand.py @@ -0,0 +1,199 @@ +#!/usr/bin/env python3 + +import logging +import os +import shutil +import sys +import tempfile +import unittest +from pathlib import Path +from unittest import mock + +import git +import requests +import yaml + +import fdroidserver +import fdroidserver.import_subcommand + +from .shared_test_code import TmpCwd, VerboseFalseOptions, mkdtemp + +basedir = Path(__file__).parent +logging.basicConfig(level=logging.DEBUG) + + +class ImportTest(unittest.TestCase): + '''fdroid import''' + + def setUp(self): + os.chdir(basedir) + self._td = mkdtemp() + self.testdir = self._td.name + + def tearDown(self): + os.chdir(basedir) + self._td.cleanup() + + def test_get_all_gradle_and_manifests(self): + """Test whether the function works with relative and absolute paths""" + a = fdroidserver.import_subcommand.get_all_gradle_and_manifests( + Path('source-files/cn.wildfirechat.chat') + ) + paths = [ + 'avenginekit/build.gradle', + 'build.gradle', + 'chat/build.gradle', + 'client/build.gradle', + 'client/src/main/AndroidManifest.xml', + 'emojilibrary/build.gradle', + 'gradle/build_libraries.gradle', + 'imagepicker/build.gradle', + 'mars-core-release/build.gradle', + 'push/build.gradle', + 'settings.gradle', + ] + paths = [Path('source-files/cn.wildfirechat.chat') / path for path in paths] + self.assertEqual(sorted(paths), sorted(a)) + + abspath = basedir / 'source-files/realm' + p = fdroidserver.import_subcommand.get_all_gradle_and_manifests(abspath) + self.assertEqual(1, len(p)) + self.assertTrue(p[0].is_relative_to(abspath)) + + def test_get_gradle_subdir(self): + subdirs = { + 'cn.wildfirechat.chat': 'chat', + 'com.anpmech.launcher': 'app', + 'org.tasks': 'app', + 'ut.ewh.audiometrytest': 'app', + 'org.noise_planet.noisecapture': 'app', + } + for k, v in subdirs.items(): + build_dir = Path('source-files') / k + paths = fdroidserver.import_subcommand.get_all_gradle_and_manifests( + build_dir + ) + logging.info(paths) + subdir = fdroidserver.import_subcommand.get_gradle_subdir(build_dir, paths) + self.assertEqual(v, str(subdir)) + + def test_import_gitlab(self): + with tempfile.TemporaryDirectory() as testdir, TmpCwd(testdir): + # FDroidPopen needs some config to work + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + + url = 'https://gitlab.com/fdroid/ci-test-app' + r = requests.head(url, timeout=300) + if r.status_code != 200: + print("ERROR", url, 'unreachable (', r.status_code, ')') + print('Skipping ImportTest!') + return + + fdroidserver.common.options = VerboseFalseOptions + app = fdroidserver.import_subcommand.get_app_from_url(url) + fdroidserver.import_subcommand.clone_to_tmp_dir(app) + self.assertEqual(app.RepoType, 'git') + self.assertEqual(app.Repo, 'https://gitlab.com/fdroid/ci-test-app.git') + + def test_get_app_from_url(self): + with tempfile.TemporaryDirectory() as testdir, TmpCwd(testdir): + testdir = Path(testdir) + (testdir / 'tmp').mkdir() + tmp_importer = testdir / 'tmp/importer' + data = ( + ( + 'cn.wildfirechat.chat', + 'https://github.com/wildfirechat/android-chat', + '0.6.9', + 23, + ), + ( + 'com.anpmech.launcher', + 'https://github.com/KeikaiLauncher/KeikaiLauncher', + 'Unknown', + None, + ), + ( + 'ut.ewh.audiometrytest', + 'https://github.com/ReeceStevens/ut_ewh_audiometer_2014', + '1.65', + 14, + ), + ) + for appid, url, vn, vc in data: + shutil.rmtree( + tmp_importer, + onerror=fdroidserver.import_subcommand.handle_retree_error_on_windows, + ) + shutil.copytree(basedir / 'source-files' / appid, tmp_importer) + + app = fdroidserver.import_subcommand.get_app_from_url(url) + with mock.patch( + 'fdroidserver.common.getvcs', + lambda a, b, c: fdroidserver.common.vcs(url, testdir), + ), mock.patch( + 'fdroidserver.common.vcs.gotorevision', lambda s, rev: None + ), mock.patch( + 'shutil.rmtree', lambda a, onerror=None: None + ): + build_dir = fdroidserver.import_subcommand.clone_to_tmp_dir(app) + self.assertEqual('git', app.RepoType) + self.assertEqual(url, app.Repo) + self.assertEqual(url, app.SourceCode) + logging.info(build_dir) + paths = fdroidserver.import_subcommand.get_all_gradle_and_manifests( + build_dir + ) + self.assertNotEqual(paths, []) + ( + versionName, + versionCode, + package, + ) = fdroidserver.common.parse_androidmanifests(paths, app) + self.assertEqual(vn, versionName) + self.assertEqual(vc, versionCode) + self.assertEqual(appid, package) + + def test_bad_urls(self): + for url in ( + 'asdf', + 'file://thing.git', + 'https:///github.com/my/project', + 'git:///so/many/slashes', + 'ssh:/notabug.org/missing/a/slash', + 'git:notabug.org/missing/some/slashes', + 'https//github.com/bar/baz', + ): + with self.assertRaises(ValueError): + fdroidserver.import_subcommand.get_app_from_url(url) + + @mock.patch('sys.argv', ['fdroid import', '-u', 'https://example.com/mystery/url']) + @mock.patch('fdroidserver.import_subcommand.clone_to_tmp_dir', lambda a: None) + def test_unrecognized_url(self): + """Test whether error is thrown when the RepoType was not found. + + clone_to_tmp_dir is mocked out to prevent this test from using + the network, if it gets past the code that throws the error. + + """ + with self.assertRaises(fdroidserver.exception.FDroidException): + fdroidserver.import_subcommand.main() + + @mock.patch('sys.argv', ['fdroid import', '-u', 'https://fake/git/url.git']) + @mock.patch( + 'fdroidserver.import_subcommand.clone_to_tmp_dir', lambda a, r: Path('td') + ) + def test_main_local_git(self): + os.chdir(self.testdir) + git.Repo.init('td') + Path('td/build.gradle').write_text( + 'android { defaultConfig { applicationId "com.example" } }' + ) + fdroidserver.import_subcommand.main() + with open('metadata/com.example.yml') as fp: + data = yaml.safe_load(fp) + self.assertEqual(data['Repo'], sys.argv[2]) + self.assertEqual(data['RepoType'], 'git') + self.assertEqual(1, len(data['Builds'])) diff --git a/tests/test_index.py b/tests/test_index.py new file mode 100755 index 00000000..c8ff5cbe --- /dev/null +++ b/tests/test_index.py @@ -0,0 +1,918 @@ +#!/usr/bin/env python3 + +import copy +import datetime +import glob +import json +import os +import shutil +import tempfile +import unittest +import zipfile +from pathlib import Path +from unittest.mock import patch + +import requests +import yaml + +import fdroidserver +from fdroidserver import common, index, publish, signindex, update + +from .shared_test_code import GP_FINGERPRINT, TmpCwd, mkdtemp + +basedir = Path(__file__).parent + + +class Options: + nosign = True + pretty = False + verbose = False + + +class IndexTest(unittest.TestCase): + @classmethod + def setUpClass(cls): + # TODO something should remove cls.index_v1_jar, but it was + # causing the tests to be flaky. There seems to be something + # that is running the background somehow, maybe sign_index() + # exits before jarsigner actually finishes? + cls.index_v1_jar = basedir / 'repo' / 'index-v1.jar' + + def setUp(self): + (basedir / common.CONFIG_FILE).chmod(0o600) + os.chdir(basedir) # so read_config() can find config.yml + + common.config = None + common.options = Options + config = common.read_config() + config['jarsigner'] = common.find_sdk_tools_cmd('jarsigner') + common.config = config + signindex.config = config + update.config = config + + self._td = mkdtemp() + self.testdir = self._td.name + + def tearDown(self): + self._td.cleanup() + + def _sign_test_index_v1_jar(self): + if not self.index_v1_jar.exists(): + signindex.sign_index(self.index_v1_jar.parent, 'index-v1.json') + + def test_get_public_key_from_jar_succeeds(self): + source_dir = basedir / 'signindex' + for f in ('testy.jar', 'guardianproject.jar'): + testfile = os.path.join(source_dir, f) + jar = zipfile.ZipFile(testfile) + _, fingerprint = index.get_public_key_from_jar(jar) + # comparing fingerprints should be sufficient + if f == 'testy.jar': + self.assertEqual( + fingerprint, + '818E469465F96B704E27BE2FEE4C63AB' + + '9F83DDF30E7A34C7371A4728D83B0BC1', + ) + if f == 'guardianproject.jar': + self.assertTrue(fingerprint == GP_FINGERPRINT) + + def test_get_public_key_from_jar_fails(self): + source_dir = basedir / 'signindex' + testfile = os.path.join(source_dir, 'unsigned.jar') + jar = zipfile.ZipFile(testfile) + with self.assertRaises(index.VerificationException): + index.get_public_key_from_jar(jar) + + def test_download_repo_index_no_fingerprint(self): + with self.assertRaises(index.VerificationException): + index.download_repo_index("http://example.org") + + def test_download_repo_index_no_jar(self): + with self.assertRaises(requests.exceptions.RequestException): + index.download_repo_index("http://example.org?fingerprint=nope") + + def test_get_repo_key_fingerprint(self): + self._sign_test_index_v1_jar() + pubkey, fingerprint = index.extract_pubkey() + ( + data, + public_key, + public_key_fingerprint, + ) = index.get_index_from_jar( + 'repo/index-v1.jar', fingerprint, allow_deprecated=True + ) + self.assertIsNotNone(data) + self.assertIsNotNone(public_key) + self.assertIsNotNone(public_key_fingerprint) + + def test_get_index_from_jar_with_bad_fingerprint(self): + pubkey, fingerprint = index.extract_pubkey() + fingerprint = fingerprint[:-1] + 'G' + with self.assertRaises(fdroidserver.exception.VerificationException): + index.get_index_from_jar( + 'repo/index-v1.jar', fingerprint, allow_deprecated=True + ) + + def test_get_index_from_jar_with_chars_to_be_stripped(self): + self._sign_test_index_v1_jar() + fingerprint = 'NOOOO F4 9A F3 F1 1E FD DF 20 DF FD 70 F5 E3 11 7B 99 76 67 41 67 AD CA 28 0E 6B 19 32 A0 60 1B 26 F6' + index.get_index_from_jar( + 'repo/index-v1.jar', fingerprint, allow_deprecated=True + ) + + @patch('requests.head') + def test_download_repo_index_same_etag(self, head): + url = 'http://example.org?fingerprint=test' + etag = '"4de5-54d840ce95cb9"' + + head.return_value.headers = {'ETag': etag} + data, new_etag = index.download_repo_index(url, etag=etag) + + self.assertIsNone(data) + self.assertEqual(etag, new_etag) + + @patch('requests.get') + @patch('requests.head') + def test_download_repo_index_new_etag(self, head, get): + url = 'http://example.org?fingerprint=' + GP_FINGERPRINT + etag = '"4de5-54d840ce95cb9"' + + # fake HTTP answers + head.return_value.headers = {'ETag': 'new_etag'} + get.return_value.headers = {'ETag': 'new_etag'} + get.return_value.status_code = 200 + testfile = os.path.join('signindex', 'guardianproject-v1.jar') + with open(testfile, 'rb') as file: + get.return_value.content = file.read() + + data, new_etag = index.download_repo_index(url, etag=etag) + + # assert that the index was retrieved properly + self.assertEqual('Guardian Project Official Releases', data['repo']['name']) + self.assertEqual(GP_FINGERPRINT, data['repo']['fingerprint']) + self.assertTrue(len(data['repo']['pubkey']) > 500) + self.assertEqual(10, len(data['apps'])) + self.assertEqual(10, len(data['packages'])) + self.assertEqual('new_etag', new_etag) + + @patch('fdroidserver.net.http_get') + def test_download_repo_index_url_parsing(self, mock_http_get): + """Test whether it is trying to download the right file + + This passes the URL back via the etag return value just as a + hack to check which URL was actually attempted. + + """ + mock_http_get.side_effect = lambda url, etag, timeout: (None, url) + repo_url = 'https://fake.url/fdroid/repo' + index_url = 'https://fake.url/fdroid/repo/index-v1.jar' + fingerprint_url = 'https://fake.url/fdroid/repo?fingerprint=' + GP_FINGERPRINT + slash_url = 'https://fake.url/fdroid/repo//?fingerprint=' + GP_FINGERPRINT + for url in (repo_url, index_url, fingerprint_url, slash_url): + ilist = index.download_repo_index(url, verify_fingerprint=False) + self.assertEqual(index_url, ilist[1]) # etag item used to return URL + + @patch('fdroidserver.net.download_using_mirrors') + def test_download_repo_index_v2(self, mock_download_using_mirrors): + mock_download_using_mirrors.side_effect = lambda mirrors: os.path.join( + self.testdir, 'repo', os.path.basename(mirrors[0]['url']) + ) + os.chdir(self.testdir) + signindex.config['keystore'] = os.path.join(basedir, 'keystore.jks') + os.mkdir('repo') + shutil.copy(basedir / 'repo' / 'entry.json', 'repo') + shutil.copy(basedir / 'repo' / 'index-v2.json', 'repo') + signindex.sign_index('repo', 'entry.json') + repo_url = 'https://fake.url/fdroid/repo' + entry_url = 'https://fake.url/fdroid/repo/entry.jar' + index_url = 'https://fake.url/fdroid/repo/index-v2.json' + fingerprint_url = 'https://fake.url/fdroid/repo?fingerprint=' + GP_FINGERPRINT + slash_url = 'https://fake.url/fdroid/repo//?fingerprint=' + GP_FINGERPRINT + for url in (repo_url, entry_url, index_url, fingerprint_url, slash_url): + data, _ignored = index.download_repo_index_v2(url, verify_fingerprint=False) + self.assertEqual(['repo', 'packages'], list(data.keys())) + self.assertEqual( + 'My First F-Droid Repo Demo', data['repo']['name']['en-US'] + ) + + @patch('fdroidserver.net.download_using_mirrors') + def test_download_repo_index_v2_bad_fingerprint(self, mock_download_using_mirrors): + mock_download_using_mirrors.side_effect = lambda mirrors: os.path.join( + self.testdir, 'repo', os.path.basename(mirrors[0]['url']) + ) + os.chdir(self.testdir) + signindex.config['keystore'] = os.path.join(basedir, 'keystore.jks') + os.mkdir('repo') + shutil.copy(basedir / 'repo' / 'entry.json', 'repo') + shutil.copy(basedir / 'repo' / 'index-v2.json', 'repo') + signindex.sign_index('repo', 'entry.json') + bad_fp = '0123456789001234567890012345678900123456789001234567890012345678' + bad_fp_url = 'https://fake.url/fdroid/repo?fingerprint=' + bad_fp + with self.assertRaises(fdroidserver.exception.VerificationException): + data, _ignored = index.download_repo_index_v2(bad_fp_url) + + @patch('fdroidserver.net.download_using_mirrors') + def test_download_repo_index_v2_entry_verify(self, mock_download_using_mirrors): + def download_using_mirrors_def(mirrors): + f = os.path.join(tempfile.mkdtemp(), os.path.basename(mirrors[0]['url'])) + Path(f).write_text('not the entry.jar file contents') + return f + + mock_download_using_mirrors.side_effect = download_using_mirrors_def + url = 'https://fake.url/fdroid/repo?fingerprint=' + GP_FINGERPRINT + with self.assertRaises(fdroidserver.exception.VerificationException): + data, _ignored = index.download_repo_index_v2(url) + + @patch('fdroidserver.net.download_using_mirrors') + def test_download_repo_index_v2_index_verify(self, mock_download_using_mirrors): + def download_using_mirrors_def(mirrors): + f = os.path.join(tempfile.mkdtemp(), os.path.basename(mirrors[0]['url'])) + Path(f).write_text('not the index-v2.json file contents') + return f + + mock_download_using_mirrors.side_effect = download_using_mirrors_def + os.chdir(self.testdir) + signindex.config['keystore'] = os.path.join(basedir, 'keystore.jks') + os.mkdir('repo') + shutil.copy(basedir / 'repo' / 'entry.json', 'repo') + shutil.copy(basedir / 'repo' / 'index-v2.json', 'repo') + signindex.sign_index('repo', 'entry.json') + url = 'https://fake.url/fdroid/repo?fingerprint=' + GP_FINGERPRINT + with self.assertRaises(fdroidserver.exception.VerificationException): + data, _ignored = index.download_repo_index_v2(url) + + def test_sort_package_versions(self): + i = [ + { + 'packageName': 'org.smssecure.smssecure', + 'apkName': 'org.smssecure.smssecure_134.apk', + 'signer': 'b33a601a9da97c82e6eb121eb6b90adab561f396602ec4dc8b0019fb587e2af6', + 'versionCode': 134, + }, + { + 'packageName': 'org.smssecure.smssecure', + 'apkName': 'org.smssecure.smssecure_134_b30bb97.apk', + 'signer': 'b30bb971af0d134866e158ec748fcd553df97c150f58b0a963190bbafbeb0868', + 'versionCode': 134, + }, + { + 'packageName': 'b075b32b4ef1e8a869e00edb136bd48e34a0382b85ced8628f164d1199584e4e' + }, + { + 'packageName': '43af70d1aca437c2f9974c4634cc5abe45bdc4d5d71529ac4e553488d3bb3ff6' + }, + { + 'packageName': 'org.smssecure.smssecure', + 'apkName': 'org.smssecure.smssecure_135_b30bb97.apk', + 'signer': 'b30bb971af0d134866e158ec748fcd553df97c150f58b0a963190bbafbeb0868', + 'versionCode': 135, + }, + { + 'packageName': 'org.smssecure.smssecure', + 'apkName': 'org.smssecure.smssecure_135.apk', + 'signer': 'b33a601a9da97c82e6eb121eb6b90adab561f396602ec4dc8b0019fb587e2af6', + 'versionCode': 135, + }, + { + 'packageName': 'org.smssecure.smssecure', + 'apkName': 'org.smssecure.smssecure_133.apk', + 'signer': 'b33a601a9da97c82e6eb121eb6b90adab561f396602ec4dc8b0019fb587e2af6', + 'versionCode': 133, + }, + { + 'packageName': 'org.smssecure.smssecure', + 'apkName': 'smssecure-weird-version.apk', + 'signer': '99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff', + 'versionCode': 133, + }, + { + 'packageName': 'org.smssecure.smssecure', + 'apkName': 'smssecure-custom.apk', + 'signer': '1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef', + 'versionCode': 133, + }, + { + 'packageName': 'org.smssecure.smssecure', + 'apkName': 'smssecure-new-custom.apk', + 'signer': '1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef', + 'versionCode': 135, + }, + ] + + o = [ + { + 'packageName': '43af70d1aca437c2f9974c4634cc5abe45bdc4d5d71529ac4e553488d3bb3ff6' + }, + { + 'packageName': 'b075b32b4ef1e8a869e00edb136bd48e34a0382b85ced8628f164d1199584e4e' + }, + # app test data + # # packages with reproducible developer signature + { + 'packageName': 'org.smssecure.smssecure', + 'apkName': 'org.smssecure.smssecure_135_b30bb97.apk', + 'signer': 'b30bb971af0d134866e158ec748fcd553df97c150f58b0a963190bbafbeb0868', + 'versionCode': 135, + }, + { + 'packageName': 'org.smssecure.smssecure', + 'apkName': 'org.smssecure.smssecure_134_b30bb97.apk', + 'signer': 'b30bb971af0d134866e158ec748fcd553df97c150f58b0a963190bbafbeb0868', + 'versionCode': 134, + }, + # # packages build and signed by fdroid + { + 'packageName': 'org.smssecure.smssecure', + 'apkName': 'org.smssecure.smssecure_135.apk', + 'signer': 'b33a601a9da97c82e6eb121eb6b90adab561f396602ec4dc8b0019fb587e2af6', + 'versionCode': 135, + }, + { + 'packageName': 'org.smssecure.smssecure', + 'apkName': 'org.smssecure.smssecure_134.apk', + 'signer': 'b33a601a9da97c82e6eb121eb6b90adab561f396602ec4dc8b0019fb587e2af6', + 'versionCode': 134, + }, + { + 'packageName': 'org.smssecure.smssecure', + 'apkName': 'org.smssecure.smssecure_133.apk', + 'signer': 'b33a601a9da97c82e6eb121eb6b90adab561f396602ec4dc8b0019fb587e2af6', + 'versionCode': 133, + }, + # # packages signed with unkown keys + { + 'packageName': 'org.smssecure.smssecure', + 'apkName': 'smssecure-new-custom.apk', + 'signer': '1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef', + 'versionCode': 135, + }, + { + 'packageName': 'org.smssecure.smssecure', + 'apkName': 'smssecure-custom.apk', + 'signer': '1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef', + 'versionCode': 133, + }, + { + 'packageName': 'org.smssecure.smssecure', + 'apkName': 'smssecure-weird-version.apk', + 'signer': '99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff', + 'versionCode': 133, + }, + ] + + common.config = {} + common.fill_config_defaults(common.config) + publish.config = common.config + publish.config['keystorepass'] = '123456' + publish.config['keypass'] = '123456' + publish.config['keystore'] = os.path.join(os.getcwd(), 'dummy-keystore.jks') + publish.config['repo_keyalias'] = 'repokey' + + testsmetadir = os.path.join(os.getcwd(), 'metadata') + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + shutil.copytree(testsmetadir, 'metadata') + sigkeyfps = { + "org.smssecure.smssecure": { + "signer": "b33a601a9da97c82e6eb121eb6b90adab561f396602ec4dc8b0019fb587e2af6" + } + } + os.mkdir('repo') + jarfile = 'repo/signer-index.jar' + with zipfile.ZipFile(jarfile, 'w', zipfile.ZIP_DEFLATED) as jar: + jar.writestr('signer-index.json', json.dumps(sigkeyfps)) + publish.sign_sig_key_fingerprint_list(jarfile) + common.write_config_file('') + + index.sort_package_versions(i, common.load_publish_signer_fingerprints()) + self.maxDiff = None + self.assertEqual(json.dumps(i, indent=2), json.dumps(o, indent=2)) + + # and test it still works with get_first_signer_certificate + outdir = os.path.join(self.testdir, 'index-signer-fingerprints') + os.mkdir(outdir) + common.apk_extract_signatures(jarfile, outdir) + certs = glob.glob(os.path.join(outdir, '*.RSA')) + with open(certs[0], 'rb') as fp: + self.assertEqual( + common.get_certificate(fp.read()), + common.get_first_signer_certificate(jarfile), + ) + + def test_make_v0_repo_only(self): + os.chdir(self.testdir) + os.mkdir('repo') + repo_icons_dir = os.path.join('repo', 'icons') + self.assertFalse(os.path.isdir(repo_icons_dir)) + repodict = { + 'address': 'https://example.com/fdroid/repo', + 'description': 'This is just a test', + 'icon': 'blahblah', + 'name': 'test', + 'timestamp': datetime.datetime.now(), + 'version': 12, + } + requestsdict = {'install': [], 'uninstall': []} + common.config['repo_pubkey'] = 'ffffffffffffffffffffffffffffffffff' + index.make_v0({}, [], 'repo', repodict, requestsdict, {}) + self.assertTrue(os.path.isdir(repo_icons_dir)) + self.assertTrue( + os.path.exists( + os.path.join(repo_icons_dir, common.default_config['repo_icon']) + ) + ) + self.assertTrue(os.path.exists(os.path.join('repo', 'index.xml'))) + + def test_make_v0(self): + os.chdir(self.testdir) + os.mkdir('metadata') + os.mkdir('repo') + metadatafile = 'metadata/info.zwanenburg.caffeinetile.yml' + shutil.copy(os.path.join(basedir, metadatafile), metadatafile) + repo_icons_dir = os.path.join('repo', 'icons') + self.assertFalse(os.path.isdir(repo_icons_dir)) + repodict = { + 'address': 'https://example.com/fdroid/repo', + 'description': 'This is just a test', + 'icon': 'blahblah', + 'mirrors': [ + {'isPrimary': True, 'url': 'https://example.com/fdroid/repo'}, + {'extra': 'data', 'url': 'http://one/fdroid/repo'}, + {'url': 'http://two/fdroid/repo'}, + ], + 'name': 'test', + 'timestamp': datetime.datetime.now(), + 'version': 12, + } + app = fdroidserver.metadata.parse_metadata(metadatafile) + app['icon'] = 'info.zwanenburg.caffeinetile.4.xml' + app['CurrentVersionCode'] = 4 + apps = {app.id: app} + orig_apps = copy.deepcopy(apps) + apk = { + 'hash': 'dbbdd7deadb038862f426b71efe4a64df8c3edf25d669e935f349510e16f65db', + 'hashType': 'sha256', + 'uses-permission': [['android.permission.WAKE_LOCK', None]], + 'uses-permission-sdk-23': [], + 'features': [], + 'icons_src': { + '160': 'res/drawable/ic_coffee_on.xml', + '-1': 'res/drawable/ic_coffee_on.xml', + }, + 'icons': {'160': 'info.zwanenburg.caffeinetile.4.xml'}, + 'antiFeatures': ['KnownVuln'], + 'packageName': 'info.zwanenburg.caffeinetile', + 'versionCode': 4, + 'name': 'Caffeine Tile', + 'versionName': '1.3', + 'minSdkVersion': 24, + 'targetSdkVersion': 25, + 'sig': '03f9b2f848d22fd1d8d1331e8b1b486d', + 'signer': '51cfa5c8a743833ad89acf81cb755936876a5c8b8eca54d1ffdcec0cdca25d0e', + 'size': 11740, + 'apkName': 'info.zwanenburg.caffeinetile_4.apk', + 'icon': 'info.zwanenburg.caffeinetile.4.xml', + 'added': datetime.datetime.fromtimestamp(1539122400), + } + requestsdict = {'install': [], 'uninstall': []} + common.config['repo_pubkey'] = 'ffffffffffffffffffffffffffffffffff' + common.config['make_current_version_link'] = True + index.make_v0(apps, [apk], 'repo', repodict, requestsdict, {}) + self.assertTrue(os.path.isdir(repo_icons_dir)) + self.assertTrue( + os.path.exists( + os.path.join(repo_icons_dir, common.default_config['repo_icon']) + ) + ) + self.assertTrue(os.path.exists(os.path.join('repo', 'index.xml'))) + self.assertEqual(orig_apps, apps, "apps was modified when building the index") + + def test_v0_invalid_config_exception(self): + """Index v0 needs additional config values when using --nosign + + index.xml aka Index v0 includes the full repo public key in + the XML itself. So when running `fdroid update --nosign`, + there needs to be either repo_pubkey or a full keystore config + present. + + """ + os.chdir(self.testdir) + os.mkdir('repo') + repo_icons_dir = os.path.join('repo', 'icons') + self.assertFalse(os.path.isdir(repo_icons_dir)) + repodict = { + 'address': 'https://example.com/fdroid/repo', + 'description': 'This is just a test', + 'icon': 'blahblah', + 'name': 'test', + 'timestamp': datetime.datetime.now(), + 'version': 12, + } + requestsdict = {'install': [], 'uninstall': []} + + common.options.nosign = False + with self.assertRaises(fdroidserver.exception.FDroidException): + index.make_v0({}, [], 'repo', repodict, requestsdict, {}) + + common.options.nosign = True + with self.assertRaises(fdroidserver.exception.FDroidException): + index.make_v0({}, [], 'repo', repodict, requestsdict, {}) + + common.config['repo_pubkey'] = 'ffffffffffffffffffffffffffffffffff' + self.assertFalse(os.path.exists(os.path.join('repo', 'index.xml'))) + self.assertFalse(os.path.exists(os.path.join('repo', 'index_unsigned.jar'))) + self.assertFalse(os.path.exists(os.path.join('repo', 'index.jar'))) + index.make_v0({}, [], 'repo', repodict, requestsdict, {}) + self.assertTrue(os.path.exists(os.path.join('repo', 'index.xml'))) + self.assertTrue(os.path.exists(os.path.join('repo', 'index_unsigned.jar'))) + self.assertFalse(os.path.exists(os.path.join('repo', 'index.jar'))) + + def test_make_v1_with_mirrors(self): + os.chdir(self.testdir) + os.mkdir('repo') + repodict = { + 'address': 'https://example.com/fdroid/repo', + 'mirrors': [ + {'isPrimary': True, 'url': 'https://example.com/fdroid/repo'}, + {'extra': 'data', 'url': 'http://one/fdroid/repo'}, + {'url': 'http://two/fdroid/repo'}, + ], + } + index.make_v1({}, [], 'repo', repodict, {}, {}) + index_v1 = Path('repo/index-v1.json') + self.assertTrue(index_v1.exists()) + with index_v1.open() as fp: + self.assertEqual( + json.load(fp)['repo']['mirrors'], + ['http://one/fdroid/repo', 'http://two/fdroid/repo'], + ) + + def test_github_get_mirror_service_urls(self): + for url in [ + 'git@github.com:foo/bar', + 'git@github.com:foo/bar.git', + 'https://github.com/foo/bar', + 'https://github.com/foo/bar.git', + ]: + self.assertEqual( + ['https://raw.githubusercontent.com/foo/bar/master/fdroid'], + index.get_mirror_service_urls({"url": url}), + ) + + @patch.dict(os.environ, clear=True) + def test_gitlab_get_mirror_service_urls(self): + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + git_mirror_path = Path('git-mirror/fdroid') + git_mirror_path.mkdir(parents=True) + ci_job_id = '12345678' + artifacts_url = ( + 'https://group.gitlab.io/-/project/-/jobs/%s/artifacts/public/fdroid' + % ci_job_id + ) + with (git_mirror_path / 'placeholder').open('w') as fp: + fp.write(' ') + for url in [ + 'git@gitlab.com:group/project', + 'git@gitlab.com:group/project.git', + 'https://gitlab.com/group/project', + 'https://gitlab.com/group/project.git', + ]: + with patch('fdroidserver.common.GITLAB_COM_PAGES_MAX_SIZE', 1000): + expected = [ + 'https://group.gitlab.io/project/fdroid', + 'https://gitlab.com/group/project/-/raw/master/fdroid', + ] + self.assertEqual( + expected, + index.get_mirror_service_urls({"url": url}), + ) + with patch.dict(os.environ, clear=True): + os.environ['CI_JOB_ID'] = ci_job_id + self.assertEqual( + expected + [artifacts_url], + index.get_mirror_service_urls({"url": url}), + ) + with patch('fdroidserver.common.GITLAB_COM_PAGES_MAX_SIZE', 10): + expected = [ + 'https://gitlab.com/group/project/-/raw/master/fdroid', + ] + self.assertEqual( + expected, + index.get_mirror_service_urls({"url": url}), + ) + with patch.dict(os.environ, clear=True): + os.environ['CI_JOB_ID'] = ci_job_id + self.assertEqual( + expected + [artifacts_url], + index.get_mirror_service_urls({"url": url}), + ) + + def test_make_website(self): + os.chdir(self.testdir) + os.mkdir('metadata') + os.mkdir('repo') + + repodict = { + 'address': 'https://example.com/fdroid/repo', + 'description': 'This is just a test', + 'icon': 'blahblah', + 'name': 'test', + 'timestamp': datetime.datetime.now(), + 'version': 12, + } + + common.config['repo_pubkey'] = 'ffffffffffffffffffffffffffffffffff' + + index.make_website([], "repo", repodict) + self.assertTrue(os.path.exists(os.path.join('repo', 'index.html'))) + self.assertTrue(os.path.exists(os.path.join('repo', 'index.css'))) + self.assertTrue(os.path.exists(os.path.join('repo', 'index.png'))) + + try: + from html5print import CSSBeautifier, HTMLBeautifier + except ImportError: + print('WARNING: skipping rest of test since html5print is missing!') + return + + with open(os.path.join("repo", "index.html")) as f: + html = f.read() + pretty_html = HTMLBeautifier.beautify(html) + self.maxDiff = None + self.assertEqual(html, pretty_html) + + with open(os.path.join("repo", "index.css")) as f: + css = f.read() + pretty_css = CSSBeautifier.beautify(css) + self.maxDiff = None + self.assertEqual(css, pretty_css) + + def test_sort_package_versions_with_invalid(self): + i = [ + { + 'packageName': 'org.smssecure.smssecure', + 'apkName': 'smssecure-custom.fake', + 'signer': None, + 'versionCode': 11111, + } + ] + + index.sort_package_versions(i, common.load_publish_signer_fingerprints()) + + def test_package_metadata(self): + """A smoke check and format check of index.package_metadata()""" + + def _kn(key): + return key[0].lower() + key[1:] + + apps = fdroidserver.metadata.read_metadata() + update.insert_localized_app_metadata(apps) + + # smoke check all metadata files + for appid, app in apps.items(): + metadata = index.package_metadata(app, 'repo') + for k in ('Description', 'Name', 'Summary', 'video'): + if app.get(k): + self.assertTrue(isinstance(metadata[_kn(k)], dict)) + for k in ('AuthorWebSite', 'IssueTracker', 'Translation', 'WebSite'): + if app.get(k): + self.assertTrue(isinstance(metadata[_kn(k)], str)) + + # make sure these known values were properly parsed and included + appid = 'info.guardianproject.urzip' + app = apps[appid] + metadata = index.package_metadata(app, 'repo') + # files + self.assertEqual( + os.path.getsize(f'repo/{appid}/en-US/featureGraphic.png'), + metadata['featureGraphic']['en-US']['size'], + ) + self.assertEqual( + os.path.getsize(f'repo/{appid}/en-US/icon.png'), + metadata['icon']['en-US']['size'], + ) + # localized strings + self.assertEqual({'en-US': 'title'}, metadata['name']) + self.assertEqual({'en-US': 'video'}, metadata['video']) + # strings + self.assertEqual( + 'https://dev.guardianproject.info/projects/urzip', + metadata['webSite'], + ) + + def test_add_mirrors_to_repodict(self): + """Test based on the contents of tests/config.yml""" + repodict = {'address': common.config['repo_url']} + index.add_mirrors_to_repodict('repo', repodict) + self.assertEqual( + repodict['mirrors'], + [ + {'isPrimary': True, 'url': 'https://MyFirstFDroidRepo.org/fdroid/repo'}, + {'url': 'http://foobarfoobarfoobar.onion/fdroid/repo'}, + {'url': 'https://foo.bar/fdroid/repo'}, + ], + ) + + def test_custom_config_yml_with_mirrors(self): + """Test based on custom contents of config.yml""" + os.chdir(self.testdir) + repo_url = 'https://example.com/fdroid/repo' + c = {'repo_url': repo_url, 'mirrors': ['http://one/fdroid']} + with open(common.CONFIG_FILE, 'w', encoding='utf-8') as fp: + yaml.dump(c, fp) + common.config = None + common.read_config() + repodict = {'address': common.config['repo_url']} + index.add_mirrors_to_repodict('repo', repodict) + self.assertEqual( + repodict['mirrors'], + [ + {'url': 'https://example.com/fdroid/repo', 'isPrimary': True}, + {'url': 'http://one/fdroid/repo'}, + ], + ) + + def test_no_mirrors_config(self): + common.config = dict() + repodict = {'address': 'https://example.com/fdroid/repo'} + index.add_mirrors_to_repodict('repo', repodict) + self.assertFalse('mirrors' in repodict) + + def test_add_metadata_to_canonical_in_mirrors_config(self): + """It is possible to add extra metadata to the canonical URL""" + common.config = { + 'repo_url': 'http://one/fdroid/repo', + 'mirrors': [ + {'url': 'http://one/fdroid', 'extra': 'data'}, + {'url': 'http://two/fdroid'}, + ], + } + repodict = {'address': common.config['repo_url']} + index.add_mirrors_to_repodict('repo', repodict) + self.assertEqual( + repodict['mirrors'], + [ + {'extra': 'data', 'isPrimary': True, 'url': 'http://one/fdroid/repo'}, + {'url': 'http://two/fdroid/repo'}, + ], + ) + + def test_duplicate_primary_in_mirrors_config(self): + """There can be only one primary mirror aka canonical URL""" + common.config = { + 'repo_url': 'http://one/fdroid', + 'mirrors': [ + {'url': 'http://one/fdroid', 'countryCode': 'SA'}, + {'url': 'http://two/fdroid'}, + {'url': 'http://one/fdroid'}, + ], + } + repodict = {'address': common.config['repo_url']} + with self.assertRaises(fdroidserver.exception.FDroidException): + index.add_mirrors_to_repodict('repo', repodict) + + def test_bad_type_in_mirrors_config(self): + for i in (1, 2.3, b'asdf'): + common.config = {'mirrors': i} + repodict = dict() + with self.assertRaises(fdroidserver.exception.FDroidException): + index.add_mirrors_to_repodict('repo', repodict) + + def test_load_mirrors_config_from_file(self): + # empty the dict for *.config, see setUp() + for k in sorted(common.config.keys()): + del common.config[k] + + os.chdir(self.testdir) + os.mkdir('config') + primary = 'https://primary.com/fdroid/repo' + mirror = 'https://mirror.com/fdroid' + with open('config/mirrors.yml', 'w') as fp: + yaml.dump([{'url': mirror}], fp) + repodict = {'address': primary} + index.add_mirrors_to_repodict('repo', repodict) + self.assertEqual( + repodict['mirrors'], + [ + {'isPrimary': True, 'url': primary}, + {'url': mirror + '/repo'}, + ], + ) + + def test_error_when_load_mirrors_from_config_and_file(self): + # empty the dict for *.config, see setUp() + for k in sorted(common.config.keys()): + del common.config[k] + + os.chdir(self.testdir) + os.mkdir('config') + with open('config/mirrors.yml', 'w') as fp: + yaml.dump([{'url': 'https://foo.com'}], fp) + repodict = { + 'address': 'https://foo.com', + 'mirrors': {'url': 'http://two/fdroid/repo'}, + } + with self.assertRaises(fdroidserver.exception.FDroidException): + index.add_mirrors_to_repodict('repo', repodict) + + def test_erroneous_isPrimary_in_mirrors_config(self): + """There can be only one primary mirror aka canonical URL""" + common.config = { + 'repo_url': 'http://one/fdroid', + 'mirrors': [ + {'url': 'http://one/fdroid', 'countryCode': 'SA'}, + {'url': 'http://two/fdroid', 'isPrimary': True}, + ], + } + repodict = {'address': common.config['repo_url']} + with self.assertRaises(fdroidserver.exception.FDroidException): + index.add_mirrors_to_repodict('repo', repodict) + + +class AltstoreIndexTest(unittest.TestCase): + def test_make_altstore(self): + self.maxDiff = None + + apps = { + "app.fake": { + "AutoName": "Fake App", + "AuthorName": "Fake Author", + "iconv2": {"en_US": "fake_icon.png"}, + } + } + apks = [ + { + "packageName": "app.fake", + "apkName": "app.fake_123.ipa", + "versionName": "v123", + "added": datetime.datetime(2000, 2, 2, 2, 2, 2), + "size": 123, + "ipa_MinimumOSVersion": "10.0", + "ipa_DTPlatformVersion": "12.0", + "ipa_permissions": [ + "NSCameraUsageDescription", + "NSDocumentsFolderUsageDescription", + ], + "ipa_entitlements": [ + "com.apple.developer.team-identifier", + "com.apple.developer.web-browser", + "keychain-access-groups", + ], + }, + ] + config = { + "repo_icon": "fake_repo_icon.png", + "repo_name": "fake_repo", + "repo_url": "gopher://fake-repo.com/fdroid/repo", + } + + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + repodir = Path(tmpdir) / 'repo' + repodir.mkdir() + (repodir / "fake.ipa").touch() + + fdroidserver.index.make_altstore( + apps, + apks, + config, + repodir, + True, + ) + + with open(repodir / "altstore-index.json", 'r') as f: + self.assertDictEqual( + { + "apps": [ + { + "appPermissions": { + "entitlements": [ + 'com.apple.developer.team-identifier', + 'com.apple.developer.web-browser', + 'keychain-access-groups', + ], + 'privacy': [ + 'NSCameraUsageDescription', + 'NSDocumentsFolderUsageDescription', + ], + }, + 'bundleIdentifier': 'app.fake', + 'developerName': 'Fake Author', + 'iconURL': 'gopher://fake-repo.com/fdroid/repo', + 'localizedDescription': '', + 'name': 'Fake App', + 'screenshots': [], + 'versions': [ + { + 'date': '2000-02-02T02:02:02', + 'downloadURL': 'gopher://fake-repo.com/fdroid/repo/app.fake_123.ipa', + 'maxOSVersion': '12.0', + 'minOSVersion': '10.0', + 'size': 123, + 'version': 'v123', + } + ], + }, + ], + 'name': 'fake_repo', + 'news': [], + }, + json.load(f), + ) diff --git a/tests/test_init.py b/tests/test_init.py new file mode 100755 index 00000000..a038493b --- /dev/null +++ b/tests/test_init.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python3 + +import os +import pathlib +import shutil +import sys +import unittest + +import fdroidserver.common +import fdroidserver.init + +from .shared_test_code import mkdtemp + +basedir = pathlib.Path(__file__).parent + + +class InitTest(unittest.TestCase): + '''fdroidserver/init.py''' + + def setUp(self): + fdroidserver.common.config = None + fdroidserver.init.config = None + self._td = mkdtemp() + self.testdir = self._td.name + os.chdir(self.testdir) + + def tearDown(self): + os.chdir(basedir) + self._td.cleanup() + + def test_disable_in_config(self): + test = 'mysupersecrets' + configfile = pathlib.Path(fdroidserver.common.CONFIG_FILE) + configfile.write_text(f'keystore: NONE\nkeypass: {test}\n', encoding='utf-8') + configfile.chmod(0o600) + config = fdroidserver.common.read_config() + self.assertEqual('NONE', config['keystore']) + self.assertEqual(test, config['keypass']) + fdroidserver.init.disable_in_config('keypass', 'comment') + self.assertIn('#keypass:', configfile.read_text()) + fdroidserver.common.config = None + config = fdroidserver.common.read_config() + self.assertIsNone(config.get('keypass')) + + @unittest.skipIf(os.name == 'nt', "calling main() like this hangs on Windows") + def test_main_in_empty_dir(self): + """Test that `fdroid init` will find apksigner and add it to the config""" + + shutil.copy(basedir / 'keystore.jks', self.testdir) + + bindir = os.path.join(os.getcwd(), 'bin') + os.mkdir(bindir) + apksigner = os.path.join(bindir, 'apksigner') + open(apksigner, 'w').close() + os.chmod(apksigner, 0o755) # nosec B103 + + sys.argv = ['fdroid init', '--keystore', 'keystore.jks', '--repo-keyalias=sova'] + with unittest.mock.patch.dict(os.environ, {'PATH': bindir}): + fdroidserver.init.main() + self.assertEqual(apksigner, fdroidserver.init.config.get('apksigner')) diff --git a/tests/test_install.py b/tests/test_install.py new file mode 100755 index 00000000..aa239d4d --- /dev/null +++ b/tests/test_install.py @@ -0,0 +1,256 @@ +#!/usr/bin/env python3 + +import os +import textwrap +import unittest +from pathlib import Path +from unittest.mock import Mock, patch + +import fdroidserver +from fdroidserver import common, install +from fdroidserver.exception import BuildException, FDroidException + + +@unittest.skipIf(os.uname().machine == 's390x', 'adb is not ported to s390x') +class InstallTest(unittest.TestCase): + '''fdroidserver/install.py''' + + def tearDown(self): + common.config = None + + def test_devices(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + try: + config['adb'] = fdroidserver.common.find_sdk_tools_cmd('adb') + except FDroidException as e: + self.skipTest(f'Skipping test because: {e}') + self.assertTrue(os.path.exists(config['adb'])) + self.assertTrue(os.path.isfile(config['adb'])) + devices = fdroidserver.install.devices() + self.assertIsInstance(devices, list, 'install.devices() did not return a list!') + for device in devices: + self.assertIsInstance(device, str) + + def test_devices_fail(self): + common.config = dict() + common.fill_config_defaults(common.config) + common.config['adb'] = '/bin/false' + with self.assertRaises(FDroidException): + fdroidserver.install.devices() + + def test_devices_fail_nonexistent(self): + """This is mostly just to document this strange difference in behavior""" + common.config = dict() + common.fill_config_defaults(common.config) + common.config['adb'] = '/nonexistent' + with self.assertRaises(BuildException): + fdroidserver.install.devices() + + @patch('fdroidserver.common.SdkToolsPopen') + def test_devices_with_mock_none(self, mock_SdkToolsPopen): + p = Mock() + mock_SdkToolsPopen.return_value = p + p.output = 'List of devices attached\n\n' + p.returncode = 0 + common.config = dict() + common.fill_config_defaults(common.config) + self.assertEqual([], fdroidserver.install.devices()) + + @patch('fdroidserver.common.SdkToolsPopen') + def test_devices_with_mock_one(self, mock_SdkToolsPopen): + p = Mock() + mock_SdkToolsPopen.return_value = p + p.output = 'List of devices attached\n05995813\tdevice\n\n' + p.returncode = 0 + common.config = dict() + common.fill_config_defaults(common.config) + self.assertEqual(['05995813'], fdroidserver.install.devices()) + + @patch('fdroidserver.common.SdkToolsPopen') + def test_devices_with_mock_many(self, mock_SdkToolsPopen): + p = Mock() + mock_SdkToolsPopen.return_value = p + p.output = textwrap.dedent( + """* daemon not running; starting now at tcp:5037 + * daemon started successfully + List of devices attached + RZCT809FTQM device + 05995813 device + emulator-5556 device + emulator-5554 unauthorized + 0a388e93 no permissions (missing udev rules? user is in the plugdev group); see [http://developer.android.com/tools/device.html] + 986AY133QL device + 09301JEC215064 device + 015d165c3010200e device + 4DCESKVGUC85VOTO device + + """ + ) + p.returncode = 0 + common.config = dict() + common.fill_config_defaults(common.config) + self.assertEqual( + [ + 'RZCT809FTQM', + '05995813', + 'emulator-5556', + '986AY133QL', + '09301JEC215064', + '015d165c3010200e', + '4DCESKVGUC85VOTO', + ], + fdroidserver.install.devices(), + ) + + @patch('fdroidserver.common.SdkToolsPopen') + def test_devices_with_mock_error(self, mock_SdkToolsPopen): + p = Mock() + mock_SdkToolsPopen.return_value = p + p.output = textwrap.dedent( + """* daemon not running. starting it now on port 5037 * + * daemon started successfully * + ** daemon still not running + error: cannot connect to daemon + """ + ) + p.returncode = 0 + common.config = dict() + common.fill_config_defaults(common.config) + self.assertEqual([], fdroidserver.install.devices()) + + @patch('fdroidserver.common.SdkToolsPopen') + def test_devices_with_mock_no_permissions(self, mock_SdkToolsPopen): + p = Mock() + mock_SdkToolsPopen.return_value = p + p.output = textwrap.dedent( + """List of devices attached + ???????????????? no permissions + """ + ) + p.returncode = 0 + common.config = dict() + common.fill_config_defaults(common.config) + self.assertEqual([], fdroidserver.install.devices()) + + @patch('fdroidserver.common.SdkToolsPopen') + def test_devices_with_mock_unauthorized(self, mock_SdkToolsPopen): + p = Mock() + mock_SdkToolsPopen.return_value = p + p.output = textwrap.dedent( + """List of devices attached + aeef5e4e unauthorized + """ + ) + p.returncode = 0 + common.config = dict() + common.fill_config_defaults(common.config) + self.assertEqual([], fdroidserver.install.devices()) + + @patch('fdroidserver.common.SdkToolsPopen') + def test_devices_with_mock_no_permissions_with_serial(self, mock_SdkToolsPopen): + p = Mock() + mock_SdkToolsPopen.return_value = p + p.output = textwrap.dedent( + """List of devices attached + 4DCESKVGUC85VOTO no permissions (missing udev rules? user is in the plugdev group); see [http://developer.android.com/tools/device.html] + + """ + ) + p.returncode = 0 + common.config = dict() + common.fill_config_defaults(common.config) + self.assertEqual([], fdroidserver.install.devices()) + + @staticmethod + def _download_raise(privacy_mode): + raise Exception('fake failed download') + + @patch('fdroidserver.install.download_apk') + @patch('fdroidserver.install.download_fdroid_apk') + @patch('fdroidserver.install.download_fdroid_apk_from_github') + @patch('fdroidserver.install.download_fdroid_apk_from_ipns') + @patch('fdroidserver.install.download_fdroid_apk_from_maven') + def test_install_fdroid_apk_privacy_mode_true( + self, maven, ipns, github, download_fdroid_apk, download_apk + ): + download_apk.side_effect = self._download_raise + download_fdroid_apk.side_effect = self._download_raise + github.side_effect = self._download_raise + ipns.side_effect = self._download_raise + maven.side_effect = self._download_raise + fdroidserver.common.config = {'jarsigner': 'fakepath'} + install.install_fdroid_apk(privacy_mode=True) + download_apk.assert_not_called() + download_fdroid_apk.assert_not_called() + github.assert_called_once() + ipns.assert_called_once() + maven.assert_called_once() + + @patch('fdroidserver.install.download_apk') + @patch('fdroidserver.install.download_fdroid_apk') + @patch('fdroidserver.install.download_fdroid_apk_from_github') + @patch('fdroidserver.install.download_fdroid_apk_from_ipns') + @patch('fdroidserver.install.download_fdroid_apk_from_maven') + def test_install_fdroid_apk_privacy_mode_false( + self, maven, ipns, github, download_fdroid_apk, download_apk + ): + download_apk.side_effect = self._download_raise + download_fdroid_apk.side_effect = self._download_raise + github.side_effect = self._download_raise + ipns.side_effect = self._download_raise + maven.side_effect = self._download_raise + fdroidserver.common.config = {'jarsigner': 'fakepath'} + install.install_fdroid_apk(privacy_mode=False) + download_apk.assert_called_once() + download_fdroid_apk.assert_called_once() + github.assert_called_once() + ipns.assert_called_once() + maven.assert_called_once() + + @patch('fdroidserver.install.download_apk') + @patch('fdroidserver.install.download_fdroid_apk') + @patch('fdroidserver.install.download_fdroid_apk_from_github') + @patch('fdroidserver.install.download_fdroid_apk_from_ipns') + @patch('fdroidserver.install.download_fdroid_apk_from_maven') + @patch('locale.getlocale', lambda: ('zh_CN', 'UTF-8')) + def test_install_fdroid_apk_privacy_mode_locale_auto( + self, maven, ipns, github, download_fdroid_apk, download_apk + ): + download_apk.side_effect = self._download_raise + download_fdroid_apk.side_effect = self._download_raise + github.side_effect = self._download_raise + ipns.side_effect = self._download_raise + maven.side_effect = self._download_raise + fdroidserver.common.config = {'jarsigner': 'fakepath'} + install.install_fdroid_apk(privacy_mode=None) + download_apk.assert_not_called() + download_fdroid_apk.assert_not_called() + github.assert_called_once() + ipns.assert_called_once() + maven.assert_called_once() + + @patch('fdroidserver.net.download_using_mirrors', lambda m: 'testvalue') + def test_download_fdroid_apk_smokecheck(self): + self.assertEqual('testvalue', install.download_fdroid_apk()) + + @unittest.skipUnless(os.getenv('test_download_fdroid_apk'), 'requires net access') + def test_download_fdroid_apk(self): + f = install.download_fdroid_apk() + self.assertTrue(Path(f).exists()) + + @unittest.skipUnless(os.getenv('test_download_fdroid_apk'), 'requires net access') + def test_download_fdroid_apk_from_maven(self): + f = install.download_fdroid_apk_from_maven() + self.assertTrue(Path(f).exists()) + + @unittest.skipUnless(os.getenv('test_download_fdroid_apk'), 'requires net access') + def test_download_fdroid_apk_from_ipns(self): + f = install.download_fdroid_apk_from_ipns() + self.assertTrue(Path(f).exists()) + + @unittest.skipUnless(os.getenv('test_download_fdroid_apk'), 'requires net access') + def test_download_fdroid_apk_from_github(self): + f = install.download_fdroid_apk_from_github() + self.assertTrue(Path(f).exists()) diff --git a/tests/test_integration.py b/tests/test_integration.py new file mode 100755 index 00000000..2cdf19d9 --- /dev/null +++ b/tests/test_integration.py @@ -0,0 +1,1691 @@ +import configparser +import itertools +import os +import platform +import re +import shlex +import shutil +import subprocess +import sys +import threading +import unittest +from datetime import datetime, timezone +from http.server import SimpleHTTPRequestHandler, ThreadingHTTPServer +from pathlib import Path + +try: + from androguard.core.bytecodes.apk import get_apkid # androguard <4 +except ModuleNotFoundError: + from androguard.core.apk import get_apkid + +from fdroidserver._yaml import yaml, yaml_dumper + +from .shared_test_code import mkdir_testfiles, VerboseFalseOptions + +# TODO: port generic tests that use index.xml to index-v2 (test that +# explicitly test index-v0 should still use index.xml) + + +basedir = Path(__file__).parent +FILES = basedir + +try: + WORKSPACE = Path(os.environ["WORKSPACE"]) +except KeyError: + WORKSPACE = basedir.parent + +from fdroidserver import common +from fdroidserver import deploy + +conf = {"sdk_path": os.getenv("ANDROID_HOME", "")} +common.find_apksigner(conf) +USE_APKSIGNER = "apksigner" in conf + + +def docker_exists(): + try: + subprocess.check_output(["docker", "info"]) + except Exception: + return False + else: + return True + + +@unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') +class IntegrationTest(unittest.TestCase): + @classmethod + def setUpClass(cls): + try: + cls.fdroid_cmd = shlex.split(os.environ["fdroid"]) + except KeyError: + cls.fdroid_cmd = [WORKSPACE / "fdroid"] + + os.environ.update( + { + "GIT_AUTHOR_NAME": "Test", + "GIT_AUTHOR_EMAIL": "no@mail", + "GIT_COMMITTER_NAME": "Test", + "GIT_COMMITTER_EMAIL": "no@mail", + "GIT_ALLOW_PROTOCOL": "file:https", + } + ) + + def setUp(self): + self.prev_cwd = Path() + self.testdir = mkdir_testfiles(WORKSPACE, self) + self.tmp_repo_root = self.testdir / "fdroid" + self.tmp_repo_root.mkdir(parents=True) + deploy.config = {} + os.chdir(self.tmp_repo_root) + + def tearDown(self): + os.chdir(self.prev_cwd) + shutil.rmtree(self.testdir) + + def assert_run(self, *args, **kwargs): + proc = subprocess.run(*args, **kwargs) + self.assertEqual(proc.returncode, 0) + return proc + + def assert_run_fail(self, *args, **kwargs): + proc = subprocess.run(*args, **kwargs) + self.assertNotEqual(proc.returncode, 0) + return proc + + @staticmethod + def update_yaml(path, items, replace=False): + """Update a .yml file, e.g. config.yml, with the given items.""" + doc = {} + if not replace: + try: + with open(path) as f: + doc = yaml.load(f) + except FileNotFoundError: + pass + doc.update(items) + with open(path, "w") as f: + yaml_dumper.dump(doc, f) + + @staticmethod + def remove_lines(path, unwanted_strings): + """Remove the lines in the path that contain the unwanted strings.""" + + def contains_unwanted(line, unwanted_strings): + for str in unwanted_strings: + if str in line: + return True + return False + + with open(path) as f: + filtered = [ + line for line in f if not contains_unwanted(line, unwanted_strings) + ] + + with open(path, "w") as f: + for line in filtered: + f.write(line) + + @staticmethod + def copy_apks_into_repo(): + def to_skip(name): + for str in [ + "unaligned", + "unsigned", + "badsig", + "badcert", + "bad-unicode", + "janus.apk", + ]: + if str in name: + return True + return False + + for f in FILES.glob("*.apk"): + if not to_skip(f.name): + appid, versionCode, _ignored = get_apkid(f) + shutil.copy( + f, + Path("repo") / common.get_release_apk_filename(appid, versionCode), + ) + + @staticmethod + def create_fake_android_home(path): + (path / "tools").mkdir() + (path / "platform-tools").mkdir() + (path / "build-tools/34.0.0").mkdir(parents=True) + (path / "build-tools/34.0.0/aapt").touch() + + def fdroid_init_with_prebuilt_keystore(self, keystore_path=FILES / "keystore.jks"): + self.assert_run( + self.fdroid_cmd + + ["init", "--keystore", keystore_path, "--repo-keyalias", "sova"] + ) + self.update_yaml( + common.CONFIG_FILE, + { + "keystorepass": "r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=", + "keypass": "r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=", + }, + ) + + @unittest.skipUnless(USE_APKSIGNER, "requires apksigner") + def test_run_process_when_building_and_signing_are_on_separate_machines(self): + shutil.copy(FILES / "keystore.jks", "keystore.jks") + self.fdroid_init_with_prebuilt_keystore("keystore.jks") + self.update_yaml( + common.CONFIG_FILE, + { + "make_current_version_link": True, + "keydname": "CN=Birdman, OU=Cell, O=Alcatraz, L=Alcatraz, S=California, C=US", + }, + ) + + Path("metadata").mkdir() + shutil.copy(FILES / "metadata/info.guardianproject.urzip.yml", "metadata") + Path("unsigned").mkdir() + shutil.copy( + FILES / "urzip-release-unsigned.apk", + "unsigned/info.guardianproject.urzip_100.apk", + ) + + self.assert_run(self.fdroid_cmd + ["publish", "--verbose"]) + self.assert_run(self.fdroid_cmd + ["update", "--verbose", "--nosign"]) + self.assert_run(self.fdroid_cmd + ["signindex", "--verbose"]) + + self.assertIn( + '', + Path("repo/index.xml").read_text(), + ) + self.assertTrue(Path("repo/index.jar").is_file()) + self.assertTrue(Path("repo/index-v1.jar").is_file()) + apkcache = Path("tmp/apkcache.json") + self.assertTrue(apkcache.is_file()) + self.assertTrue(apkcache.stat().st_size > 0) + self.assertTrue(Path("urzip.apk").is_symlink()) + + def test_utf8_metadata(self): + self.fdroid_init_with_prebuilt_keystore() + self.update_yaml( + common.CONFIG_FILE, + { + "repo_description": "获取已安装在您的设备上的应用的", + "mirrors": ["https://foo.bar/fdroid", "http://secret.onion/fdroid"], + }, + ) + shutil.copy(FILES / "urzip.apk", "repo") + shutil.copy(FILES / "bad-unicode-πÇÇ现代通用字-български-عربي1.apk", "repo") + Path("metadata").mkdir() + shutil.copy(FILES / "metadata/info.guardianproject.urzip.yml", "metadata") + + self.assert_run(self.fdroid_cmd + ["readmeta"]) + self.assert_run(self.fdroid_cmd + ["update"]) + + def test_copy_git_import_and_run_fdroid_scanner_on_it(self): + url = "https://gitlab.com/fdroid/ci-test-app.git" + Path("metadata").mkdir() + self.update_yaml( + "metadata/org.fdroid.ci.test.app.yml", + { + "AutoName": "Just A Test", + "WebSite": None, + "Builds": [ + { + "versionName": "0.3", + "versionCode": 300, + "commit": "0.3", + "subdir": "app", + "gradle": ["yes"], + } + ], + "Repo": url, + "RepoType": "git", + }, + ) + + self.assert_run(["git", "clone", url, "build/org.fdroid.ci.test.app"]) + self.assert_run( + self.fdroid_cmd + ["scanner", "org.fdroid.ci.test.app", "--verbose"] + ) + + @unittest.skipUnless(shutil.which("gpg"), "requires command line gpg") + def test_copy_repo_generate_java_gpg_keys_update_and_gpgsign(self): + """Needs tricks to make gpg-agent run in a test harness.""" + self.fdroid_init_with_prebuilt_keystore() + shutil.copytree(FILES / "repo", "repo", dirs_exist_ok=True) + for dir in ["config", "metadata"]: + shutil.copytree(FILES / dir, dir) + # gpg requires a short path to the socket to talk to gpg-agent + gnupghome = (WORKSPACE / '.testfiles/gnupghome').resolve() + shutil.rmtree(gnupghome, ignore_errors=True) + shutil.copytree(FILES / "gnupghome", gnupghome) + os.chmod(gnupghome, 0o700) + self.update_yaml( + common.CONFIG_FILE, + { + "install_list": "org.adaway", + "uninstall_list": ["com.android.vending", "com.facebook.orca"], + "gpghome": str(gnupghome), + "gpgkey": "CE71F7FB", + "mirrors": [ + "http://foobarfoobarfoobar.onion/fdroid", + "https://foo.bar/fdroid", + ], + }, + ) + self.assert_run( + self.fdroid_cmd + ["update", "--verbose", "--pretty"], + env=os.environ | {"LC_MESSAGES": "C.UTF-8"}, + ) + index_xml = Path("repo/index.xml").read_text() + self.assertIn("" in line) + with open("repo/index.xml") as f: + repo_cnt = sum(1 for line in f if "" in line) + if USE_APKSIGNER: + self.assertEqual(archive_cnt, 2) + self.assertEqual(repo_cnt, 10) + else: + # This will fail when jarsigner allows MD5 for APK signatures + self.assertEqual(archive_cnt, 5) + self.assertEqual(repo_cnt, 7) + + @unittest.skipIf(USE_APKSIGNER, "runs only without apksigner") + def test_per_app_archive_policy(self): + self.fdroid_init_with_prebuilt_keystore() + Path("metadata").mkdir() + shutil.copy(FILES / "metadata/com.politedroid.yml", "metadata") + for f in FILES.glob("repo/com.politedroid_[0-9].apk"): + shutil.copy(f, "repo") + self.update_yaml(common.CONFIG_FILE, {"archive_older": 3}) + + self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) + repo = Path("repo/index.xml").read_text() + repo_cnt = sum(1 for line in repo.splitlines() if "" in line) + archive = Path("archive/index.xml").read_text() + archive_cnt = sum(1 for line in archive.splitlines() if "" in line) + self.assertEqual(repo_cnt, 4) + self.assertEqual(archive_cnt, 0) + self.assertIn("com.politedroid_3.apk", repo) + self.assertIn("com.politedroid_4.apk", repo) + self.assertIn("com.politedroid_5.apk", repo) + self.assertIn("com.politedroid_6.apk", repo) + self.assertTrue(Path("repo/com.politedroid_3.apk").is_file()) + self.assertTrue(Path("repo/com.politedroid_4.apk").is_file()) + self.assertTrue(Path("repo/com.politedroid_5.apk").is_file()) + self.assertTrue(Path("repo/com.politedroid_6.apk").is_file()) + + # enable one app in the repo + self.update_yaml("metadata/com.politedroid.yml", {"ArchivePolicy": 1}) + self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) + repo = Path("repo/index.xml").read_text() + repo_cnt = sum(1 for line in repo.splitlines() if "" in line) + archive = Path("archive/index.xml").read_text() + archive_cnt = sum(1 for line in archive.splitlines() if "" in line) + self.assertEqual(repo_cnt, 1) + self.assertEqual(archive_cnt, 3) + self.assertIn("com.politedroid_6.apk", repo) + self.assertIn("com.politedroid_3.apk", archive) + self.assertIn("com.politedroid_4.apk", archive) + self.assertIn("com.politedroid_5.apk", archive) + self.assertTrue(Path("repo/com.politedroid_6.apk").is_file()) + self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) + self.assertTrue(Path("archive/com.politedroid_4.apk").is_file()) + self.assertTrue(Path("archive/com.politedroid_5.apk").is_file()) + + # remove all apps from the repo + self.update_yaml("metadata/com.politedroid.yml", {"ArchivePolicy": 0}) + self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) + repo = Path("repo/index.xml").read_text() + repo_cnt = sum(1 for line in repo.splitlines() if "" in line) + archive = Path("archive/index.xml").read_text() + archive_cnt = sum(1 for line in archive.splitlines() if "" in line) + self.assertEqual(repo_cnt, 0) + self.assertEqual(archive_cnt, 4) + self.assertIn("com.politedroid_3.apk", archive) + self.assertIn("com.politedroid_4.apk", archive) + self.assertIn("com.politedroid_5.apk", archive) + self.assertIn("com.politedroid_6.apk", archive) + self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) + self.assertTrue(Path("archive/com.politedroid_4.apk").is_file()) + self.assertTrue(Path("archive/com.politedroid_5.apk").is_file()) + self.assertTrue(Path("archive/com.politedroid_6.apk").is_file()) + self.assertFalse(Path("repo/com.politedroid_6.apk").exists()) + + # move back one from archive to the repo + self.update_yaml("metadata/com.politedroid.yml", {"ArchivePolicy": 1}) + self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) + repo = Path("repo/index.xml").read_text() + repo_cnt = sum(1 for line in repo.splitlines() if "" in line) + archive = Path("archive/index.xml").read_text() + archive_cnt = sum(1 for line in archive.splitlines() if "" in line) + self.assertEqual(repo_cnt, 1) + self.assertEqual(archive_cnt, 3) + self.assertIn("com.politedroid_6.apk", repo) + self.assertIn("com.politedroid_3.apk", archive) + self.assertIn("com.politedroid_4.apk", archive) + self.assertIn("com.politedroid_5.apk", archive) + self.assertTrue(Path("repo/com.politedroid_6.apk").is_file()) + self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) + self.assertTrue(Path("archive/com.politedroid_4.apk").is_file()) + self.assertTrue(Path("archive/com.politedroid_5.apk").is_file()) + self.assertFalse(Path("archive/com.politedroid_6.apk").exists()) + + # set an earlier version as CVC and test that it's the only one not archived + self.update_yaml("metadata/com.politedroid.yml", {"CurrentVersionCode": 5}) + self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) + repo = Path("repo/index.xml").read_text() + repo_cnt = sum(1 for line in repo.splitlines() if "" in line) + archive = Path("archive/index.xml").read_text() + archive_cnt = sum(1 for line in archive.splitlines() if "" in line) + self.assertEqual(repo_cnt, 1) + self.assertEqual(archive_cnt, 3) + self.assertIn("com.politedroid_5.apk", repo) + self.assertIn("com.politedroid_3.apk", archive) + self.assertIn("com.politedroid_4.apk", archive) + self.assertIn("com.politedroid_6.apk", archive) + self.assertTrue(Path("repo/com.politedroid_5.apk").is_file()) + self.assertFalse(Path("repo/com.politedroid_6.apk").exists()) + self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) + self.assertTrue(Path("archive/com.politedroid_4.apk").is_file()) + self.assertTrue(Path("archive/com.politedroid_6.apk").is_file()) + + def test_moving_old_apks_to_and_from_the_archive(self): + self.fdroid_init_with_prebuilt_keystore() + Path("metadata").mkdir() + shutil.copy(FILES / "metadata/com.politedroid.yml", "metadata") + self.remove_lines("metadata/com.politedroid.yml", ["ArchivePolicy:"]) + for f in FILES.glob("repo/com.politedroid_[0-9].apk"): + shutil.copy(f, "repo") + self.update_yaml(common.CONFIG_FILE, {"archive_older": 3}) + + self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) + repo = Path("repo/index.xml").read_text() + repo_cnt = sum(1 for line in repo.splitlines() if "" in line) + self.assertEqual(repo_cnt, 3) + self.assertIn("com.politedroid_4.apk", repo) + self.assertIn("com.politedroid_5.apk", repo) + self.assertIn("com.politedroid_6.apk", repo) + self.assertTrue(Path("repo/com.politedroid_4.apk").is_file()) + self.assertTrue(Path("repo/com.politedroid_5.apk").is_file()) + self.assertTrue(Path("repo/com.politedroid_6.apk").is_file()) + archive = Path("archive/index.xml").read_text() + archive_cnt = sum(1 for line in archive.splitlines() if "" in line) + self.assertEqual(archive_cnt, 1) + self.assertIn("com.politedroid_3.apk", archive) + self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) + + self.update_yaml(common.CONFIG_FILE, {"archive_older": 1}) + self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) + repo = Path("repo/index.xml").read_text() + repo_cnt = sum(1 for line in repo.splitlines() if "" in line) + self.assertEqual(repo_cnt, 1) + self.assertIn("com.politedroid_6.apk", repo) + self.assertTrue(Path("repo/com.politedroid_6.apk").is_file()) + archive = Path("archive/index.xml").read_text() + archive_cnt = sum(1 for line in archive.splitlines() if "" in line) + self.assertEqual(archive_cnt, 3) + self.assertIn("com.politedroid_3.apk", archive) + self.assertIn("com.politedroid_4.apk", archive) + self.assertIn("com.politedroid_5.apk", archive) + self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) + self.assertTrue(Path("archive/com.politedroid_4.apk").is_file()) + self.assertTrue(Path("archive/com.politedroid_5.apk").is_file()) + + # disabling deletes from the archive + metadata_path = Path("metadata/com.politedroid.yml") + metadata = metadata_path.read_text() + metadata = re.sub( + "versionCode: 4", "versionCode: 4\n disable: testing deletion", metadata + ) + metadata_path.write_text(metadata) + self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) + repo = Path("repo/index.xml").read_text() + repo_cnt = sum(1 for line in repo.splitlines() if "" in line) + self.assertEqual(repo_cnt, 1) + self.assertIn("com.politedroid_6.apk", repo) + self.assertTrue(Path("repo/com.politedroid_6.apk").is_file()) + archive = Path("archive/index.xml").read_text() + archive_cnt = sum(1 for line in archive.splitlines() if "" in line) + self.assertEqual(archive_cnt, 2) + self.assertIn("com.politedroid_3.apk", archive) + self.assertNotIn("com.politedroid_4.apk", archive) + self.assertIn("com.politedroid_5.apk", archive) + self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) + self.assertFalse(Path("archive/com.politedroid_4.apk").exists()) + self.assertTrue(Path("archive/com.politedroid_5.apk").is_file()) + + # disabling deletes from the repo, and promotes one from the archive + metadata = re.sub( + "versionCode: 6", "versionCode: 6\n disable: testing deletion", metadata + ) + metadata_path.write_text(metadata) + self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) + repo = Path("repo/index.xml").read_text() + repo_cnt = sum(1 for line in repo.splitlines() if "" in line) + self.assertEqual(repo_cnt, 1) + self.assertIn("com.politedroid_5.apk", repo) + self.assertNotIn("com.politedroid_6.apk", repo) + self.assertTrue(Path("repo/com.politedroid_5.apk").is_file()) + self.assertFalse(Path("repo/com.politedroid_6.apk").exists()) + archive = Path("archive/index.xml").read_text() + archive_cnt = sum(1 for line in archive.splitlines() if "" in line) + self.assertEqual(archive_cnt, 1) + self.assertIn("com.politedroid_3.apk", archive) + self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) + self.assertFalse(Path("archive/com.politedroid_6.apk").exists()) + + def test_that_verify_can_succeed_and_fail(self): + Path("tmp").mkdir() + Path("unsigned").mkdir() + shutil.copy(FILES / "repo/com.politedroid_6.apk", "tmp") + shutil.copy(FILES / "repo/com.politedroid_6.apk", "unsigned") + self.assert_run( + self.fdroid_cmd + + ["verify", "--reuse-remote-apk", "--verbose", "com.politedroid"] + ) + # force a fail + shutil.copy( + FILES / "repo/com.politedroid_5.apk", "unsigned/com.politedroid_6.apk" + ) + self.assert_run_fail( + self.fdroid_cmd + + ["verify", "--reuse-remote-apk", "--verbose", "com.politedroid"] + ) + + def test_allowing_disabled_signatures_in_repo_and_archive(self): + self.fdroid_init_with_prebuilt_keystore() + self.update_yaml( + common.CONFIG_FILE, {"allow_disabled_algorithms": True, "archive_older": 3} + ) + Path("metadata").mkdir() + shutil.copy(FILES / "metadata/com.politedroid.yml", "metadata") + self.update_yaml( + "metadata/info.guardianproject.urzip.yml", + {"Summary": "good test version of urzip"}, + replace=True, + ) + self.update_yaml( + "metadata/org.bitbucket.tickytacky.mirrormirror.yml", + {"Summary": "good MD5 sig, disabled algorithm"}, + replace=True, + ) + for f in Path("metadata").glob("*.yml"): + self.remove_lines(f, ["ArchivePolicy:"]) + for f in itertools.chain( + FILES.glob("urzip-badsig.apk"), + FILES.glob("org.bitbucket.tickytacky.mirrormirror_[0-9].apk"), + FILES.glob("repo/com.politedroid_[0-9].apk"), + ): + shutil.copy(f, "repo") + + self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) + repo = Path("repo/index.xml").read_text() + repo_cnt = sum(1 for line in repo.splitlines() if "" in line) + archive = Path("archive/index.xml").read_text() + archive_cnt = sum(1 for line in archive.splitlines() if "" in line) + self.assertEqual(repo_cnt, 6) + self.assertEqual(archive_cnt, 2) + self.assertIn("com.politedroid_4.apk", repo) + self.assertIn("com.politedroid_5.apk", repo) + self.assertIn("com.politedroid_6.apk", repo) + self.assertIn("com.politedroid_3.apk", archive) + self.assertIn("org.bitbucket.tickytacky.mirrormirror_2.apk", repo) + self.assertIn("org.bitbucket.tickytacky.mirrormirror_3.apk", repo) + self.assertIn("org.bitbucket.tickytacky.mirrormirror_4.apk", repo) + self.assertIn("org.bitbucket.tickytacky.mirrormirror_1.apk", archive) + self.assertNotIn("urzip-badsig.apk", repo) + self.assertNotIn("urzip-badsig.apk", archive) + self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) + self.assertTrue(Path("repo/com.politedroid_4.apk").is_file()) + self.assertTrue(Path("repo/com.politedroid_5.apk").is_file()) + self.assertTrue(Path("repo/com.politedroid_6.apk").is_file()) + self.assertTrue( + Path("archive/org.bitbucket.tickytacky.mirrormirror_1.apk").is_file() + ) + self.assertTrue( + Path("repo/org.bitbucket.tickytacky.mirrormirror_2.apk").is_file() + ) + self.assertTrue( + Path("repo/org.bitbucket.tickytacky.mirrormirror_3.apk").is_file() + ) + self.assertTrue( + Path("repo/org.bitbucket.tickytacky.mirrormirror_4.apk").is_file() + ) + self.assertTrue(Path("archive/urzip-badsig.apk").is_file()) + + if not USE_APKSIGNER: + self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) + repo = Path("repo/index.xml").read_text() + repo_cnt = sum(1 for line in repo.splitlines() if "" in line) + archive = Path("archive/index.xml").read_text() + archive_cnt = sum(1 for line in archive.splitlines() if "" in line) + self.assertEqual(repo_cnt, 3) + self.assertEqual(archive_cnt, 5) + self.assertIn("com.politedroid_4.apk", repo) + self.assertIn("com.politedroid_5.apk", repo) + self.assertIn("com.politedroid_6.apk", repo) + self.assertNotIn("urzip-badsig.apk", repo) + self.assertIn("org.bitbucket.tickytacky.mirrormirror_1.apk", archive) + self.assertIn("org.bitbucket.tickytacky.mirrormirror_2.apk", archive) + self.assertIn("org.bitbucket.tickytacky.mirrormirror_3.apk", archive) + self.assertIn("org.bitbucket.tickytacky.mirrormirror_4.apk", archive) + self.assertIn("com.politedroid_3.apk", archive) + self.assertNotIn("urzip-badsig.apk", archive) + self.assertTrue(Path("repo/com.politedroid_4.apk").is_file()) + self.assertTrue(Path("repo/com.politedroid_5.apk").is_file()) + self.assertTrue(Path("repo/com.politedroid_6.apk").is_file()) + self.assertTrue( + Path("archive/org.bitbucket.tickytacky.mirrormirror_1.apk").is_file() + ) + self.assertTrue( + Path("archive/org.bitbucket.tickytacky.mirrormirror_2.apk").is_file() + ) + self.assertTrue( + Path("archive/org.bitbucket.tickytacky.mirrormirror_3.apk").is_file() + ) + self.assertTrue( + Path("archive/org.bitbucket.tickytacky.mirrormirror_4.apk").is_file() + ) + self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) + self.assertTrue(Path("archive/urzip-badsig.apk").is_file()) + + # test unarchiving when disabled_algorithms are allowed again + self.update_yaml(common.CONFIG_FILE, {"allow_disabled_algorithms": True}) + self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) + with open("archive/index.xml") as f: + archive_cnt = sum(1 for line in f if "" in line) + with open("repo/index.xml") as f: + repo_cnt = sum(1 for line in f if "" in line) + self.assertEqual(repo_cnt, 6) + self.assertEqual(archive_cnt, 2) + self.assertIn("com.politedroid_4.apk", repo) + self.assertIn("com.politedroid_5.apk", repo) + self.assertIn("com.politedroid_6.apk", repo) + self.assertIn("org.bitbucket.tickytacky.mirrormirror_2.apk", repo) + self.assertIn("org.bitbucket.tickytacky.mirrormirror_3.apk", repo) + self.assertIn("org.bitbucket.tickytacky.mirrormirror_4.apk", repo) + self.assertNotIn("urzip-badsig.apk", repo) + self.assertIn("com.politedroid_3.apk", archive) + self.assertIn("org.bitbucket.tickytacky.mirrormirror_1.apk", archive) + self.assertNotIn("urzip-badsig.apk", archive) + self.assertTrue(Path("repo/com.politedroid_4.apk").is_file()) + self.assertTrue(Path("repo/com.politedroid_5.apk").is_file()) + self.assertTrue(Path("repo/com.politedroid_6.apk").is_file()) + self.assertTrue( + Path("repo/org.bitbucket.tickytacky.mirrormirror_2.apk").is_file() + ) + self.assertTrue( + Path("repo/org.bitbucket.tickytacky.mirrormirror_3.apk").is_file() + ) + self.assertTrue( + Path("repo/org.bitbucket.tickytacky.mirrormirror_4.apk").is_file() + ) + self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) + self.assertTrue( + Path("archive/org.bitbucket.tickytacky.mirrormirror_1.apk").is_file() + ) + self.assertTrue(Path("archive/urzip-badsig.apk").is_file()) + + def test_rename_apks_with_fdroid_update_rename_apks_opt_nosign_opt_for_speed(self): + self.fdroid_init_with_prebuilt_keystore() + self.update_yaml( + common.CONFIG_FILE, + { + "keydname": "CN=Birdman, OU=Cell, O=Alcatraz, L=Alcatraz, S=California, C=US" + }, + ) + Path("metadata").mkdir() + shutil.copy(FILES / "metadata/info.guardianproject.urzip.yml", "metadata") + shutil.copy( + FILES / "urzip.apk", + "repo/asdfiuhk urzip-πÇÇπÇÇ现代汉语通用字-български-عربي1234 ö.apk", + ) + self.assert_run( + self.fdroid_cmd + ["update", "--rename-apks", "--pretty", "--nosign"] + ) + self.assertTrue(Path("repo/info.guardianproject.urzip_100.apk").is_file()) + index_xml = Path("repo/index.xml").read_text() + index_v1_json = Path("repo/index-v1.json").read_text() + self.assertIn("info.guardianproject.urzip_100.apk", index_v1_json) + self.assertIn("info.guardianproject.urzip_100.apk", index_xml) + + shutil.copy(FILES / "urzip-release.apk", "repo") + self.assert_run( + self.fdroid_cmd + ["update", "--rename-apks", "--pretty", "--nosign"] + ) + self.assertTrue(Path("repo/info.guardianproject.urzip_100.apk").is_file()) + self.assertTrue( + Path("repo/info.guardianproject.urzip_100_b4964fd.apk").is_file() + ) + index_xml = Path("repo/index.xml").read_text() + index_v1_json = Path("repo/index-v1.json").read_text() + self.assertIn("info.guardianproject.urzip_100.apk", index_v1_json) + self.assertIn("info.guardianproject.urzip_100.apk", index_xml) + self.assertIn("info.guardianproject.urzip_100_b4964fd.apk", index_v1_json) + self.assertNotIn("info.guardianproject.urzip_100_b4964fd.apk", index_xml) + + shutil.copy(FILES / "urzip-release.apk", "repo") + self.assert_run( + self.fdroid_cmd + ["update", "--rename-apks", "--pretty", "--nosign"] + ) + self.assertTrue(Path("repo/info.guardianproject.urzip_100.apk").is_file()) + self.assertTrue( + Path("repo/info.guardianproject.urzip_100_b4964fd.apk").is_file() + ) + self.assertTrue( + Path("duplicates/repo/info.guardianproject.urzip_100_b4964fd.apk").is_file() + ) + index_xml = Path("repo/index.xml").read_text() + index_v1_json = Path("repo/index-v1.json").read_text() + self.assertIn("info.guardianproject.urzip_100.apk", index_v1_json) + self.assertIn("info.guardianproject.urzip_100.apk", index_xml) + self.assertIn("info.guardianproject.urzip_100_b4964fd.apk", index_v1_json) + self.assertNotIn("info.guardianproject.urzip_100_b4964fd.apk", index_xml) + + def test_for_added_date_being_set_correctly_for_repo_and_archive(self): + self.fdroid_init_with_prebuilt_keystore() + self.update_yaml(common.CONFIG_FILE, {"archive_older": 3}) + Path("metadata").mkdir() + Path("archive").mkdir() + shutil.copy(FILES / "repo/com.politedroid_6.apk", "repo") + shutil.copy(FILES / "repo/index-v2.json", "repo") + shutil.copy(FILES / "repo/com.politedroid_5.apk", "archive") + shutil.copy(FILES / "metadata/com.politedroid.yml", "metadata") + + # TODO: the timestamp of the oldest apk in the file should be used, even + # if that doesn't exist anymore + self.update_yaml("metadata/com.politedroid.yml", {"ArchivePolicy": 1}) + + self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) + timestamp = int(datetime(2017, 6, 23, tzinfo=timezone.utc).timestamp()) * 1000 + index_v1_json = Path("repo/index-v1.json").read_text() + self.assertIn(f'"added": {timestamp}', index_v1_json) + # the archive will have the added timestamp for the app and for the apk, + # both need to be there + with open("archive/index-v1.json") as f: + count = sum(1 for line in f if f'"added": {timestamp}' in line) + self.assertEqual(count, 2) + + def test_whatsnew_from_fastlane_without_cvc_set(self): + self.fdroid_init_with_prebuilt_keystore() + Path("metadata/com.politedroid/en-US/changelogs").mkdir(parents=True) + shutil.copy(FILES / "repo/com.politedroid_6.apk", "repo") + shutil.copy(FILES / "metadata/com.politedroid.yml", "metadata") + self.remove_lines("metadata/com.politedroid.yml", ["CurrentVersion:"]) + Path("metadata/com.politedroid/en-US/changelogs/6.txt").write_text( + "whatsnew test" + ) + self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) + index_v1_json = Path("repo/index-v1.json").read_text() + self.assertIn("whatsnew test", index_v1_json) + + def test_metadata_checks(self): + Path("repo").mkdir() + shutil.copy(FILES / "urzip.apk", "repo") + # this should fail because there is no metadata + self.assert_run_fail(self.fdroid_cmd + ["build"]) + Path("metadata").mkdir() + shutil.copy(FILES / "metadata/org.smssecure.smssecure.yml", "metadata") + self.assert_run(self.fdroid_cmd + ["readmeta"]) + + def test_ensure_commands_that_dont_need_the_jdk_work_without_a_jdk_configured(self): + Path("repo").mkdir() + Path("metadata").mkdir() + self.update_yaml( + "metadata/fake.yml", + { + "License": "GPL-2.0-only", + "Summary": "Yup still fake", + "Categories": ["Internet"], + "Description": "this is fake", + }, + ) + # fake that no JDKs are available + self.update_yaml( + common.CONFIG_FILE, + {"categories": ["Internet"], "java_paths": {}}, + replace=True, + ) + local_copy_dir = self.testdir / "local_copy_dir/fdroid" + (local_copy_dir / "repo").mkdir(parents=True) + self.update_yaml( + common.CONFIG_FILE, {"local_copy_dir": str(local_copy_dir.resolve())} + ) + + subprocess.run(self.fdroid_cmd + ["checkupdates", "--allow-dirty"]) + if shutil.which("gpg"): + self.assert_run(self.fdroid_cmd + ["gpgsign"]) + self.assert_run(self.fdroid_cmd + ["lint"]) + self.assert_run(self.fdroid_cmd + ["readmeta"]) + self.assert_run(self.fdroid_cmd + ["rewritemeta", "fake"]) + self.assert_run(self.fdroid_cmd + ["deploy"]) + self.assert_run(self.fdroid_cmd + ["scanner"]) + + # run these to get their output, but the are not setup, so don't fail + subprocess.run(self.fdroid_cmd + ["build"]) + subprocess.run(self.fdroid_cmd + ["import"]) + subprocess.run(self.fdroid_cmd + ["install", "-n"]) + + def test_config_checks_of_local_copy_dir(self): + self.assert_run(self.fdroid_cmd + ["init"]) + self.assert_run(self.fdroid_cmd + ["update", "--create-metadata", "--verbose"]) + self.assert_run(self.fdroid_cmd + ["readmeta"]) + local_copy_dir = (self.testdir / "local_copy_dir/fdroid").resolve() + local_copy_dir.mkdir(parents=True) + self.assert_run( + self.fdroid_cmd + ["deploy", "--local-copy-dir", local_copy_dir] + ) + self.assert_run( + self.fdroid_cmd + + ["deploy", "--local-copy-dir", local_copy_dir, "--verbose"] + ) + + # this should fail because thisisnotanabsolutepath is not an absolute path + self.assert_run_fail( + self.fdroid_cmd + ["deploy", "--local-copy-dir", "thisisnotanabsolutepath"] + ) + # this should fail because the path doesn't end with "fdroid" + self.assert_run_fail( + self.fdroid_cmd + + [ + "deploy", + "--local-copy-dir", + "/tmp/IReallyDoubtThisPathExistsasdfasdf", # nosec B108 + ] + ) + # this should fail because the dirname path does not exist + self.assert_run_fail( + self.fdroid_cmd + + [ + "deploy", + "--local-copy-dir", + "/tmp/IReallyDoubtThisPathExistsasdfasdf/fdroid", # nosec B108 + ] + ) + + def test_setup_a_new_repo_from_scratch_using_android_home_and_do_a_local_sync(self): + self.fdroid_init_with_prebuilt_keystore() + self.copy_apks_into_repo() + self.assert_run(self.fdroid_cmd + ["update", "--create-metadata", "--verbose"]) + self.assert_run(self.fdroid_cmd + ["readmeta"]) + self.assertIn(" 0) + + def test_check_duplicate_files_are_properly_handled_by_fdroid_update(self): + self.fdroid_init_with_prebuilt_keystore() + Path("metadata").mkdir() + shutil.copy(FILES / "metadata/obb.mainpatch.current.yml", "metadata") + shutil.copy(FILES / "repo/obb.mainpatch.current_1619.apk", "repo") + shutil.copy( + FILES / "repo/obb.mainpatch.current_1619_another-release-key.apk", "repo" + ) + self.assert_run(self.fdroid_cmd + ["update", "--pretty"]) + index_xml = Path("repo/index.xml").read_text() + index_v1_json = Path("repo/index-v1.json").read_text() + self.assertNotIn( + "obb.mainpatch.current_1619_another-release-key.apk", index_xml + ) + self.assertIn("obb.mainpatch.current_1619.apk", index_xml) + self.assertIn("obb.mainpatch.current_1619.apk", index_v1_json) + self.assertIn( + "obb.mainpatch.current_1619_another-release-key.apk", index_v1_json + ) + # die if there are exact duplicates + shutil.copy(FILES / "repo/obb.mainpatch.current_1619.apk", "repo/duplicate.apk") + self.assert_run_fail(self.fdroid_cmd + ["update"]) + + def test_setup_new_repo_from_scratch_using_android_home_env_var_putting_apks_in_repo_first( + self, + ): + Path("repo").mkdir() + self.copy_apks_into_repo() + self.fdroid_init_with_prebuilt_keystore() + self.assert_run(self.fdroid_cmd + ["update", "--create-metadata", "--verbose"]) + self.assert_run(self.fdroid_cmd + ["readmeta"]) + self.assertIn(" 0) + + def test_setup_a_new_repo_manually_and_generate_a_keystore(self): + self.assertFalse(Path("keystore.p12").exists()) + # this should fail because this repo has no keystore + self.assert_run_fail(self.fdroid_cmd + ["update"]) + self.assert_run(self.fdroid_cmd + ["update", "--create-key"]) + self.assertTrue(Path("keystore.p12").is_file()) + self.copy_apks_into_repo() + self.assert_run(self.fdroid_cmd + ["update", "--create-metadata", "--verbose"]) + self.assert_run(self.fdroid_cmd + ["readmeta"]) + self.assertIn(" 0) + + def test_setup_a_new_repo_from_scratch_generate_a_keystore_then_add_apk_and_update( + self, + ): + self.assert_run(self.fdroid_cmd + ["init", "--keystore", "keystore.p12"]) + self.assertTrue(Path("keystore.p12").is_file()) + self.copy_apks_into_repo() + self.assert_run(self.fdroid_cmd + ["update", "--create-metadata", "--verbose"]) + self.assert_run(self.fdroid_cmd + ["readmeta"]) + self.assertIn(" 0) + self.assertIn(" 0) + + # now set fake repo_keyalias + self.update_yaml(common.CONFIG_FILE, {"repo_keyalias": "fake"}) + # this should fail because this repo has a bad repo_keyalias + self.assert_run_fail(self.fdroid_cmd + ["update"]) + + # this should fail because a keystore is already there + self.assert_run_fail(self.fdroid_cmd + ["update", "--create-key"]) + + # now actually create the key with the existing settings + Path("keystore.jks").unlink() + self.assert_run(self.fdroid_cmd + ["update", "--create-key"]) + self.assertTrue(Path("keystore.jks").is_file()) + + def test_setup_a_new_repo_from_scratch_using_android_home_env_var_with_git_mirror( + self, + ): + server_git_mirror = self.testdir / "server_git_mirror" + server_git_mirror.mkdir() + self.assert_run( + ["git", "-C", server_git_mirror, "init", "--initial-branch", "master"] + ) + self.assert_run( + [ + "git", + "-C", + server_git_mirror, + "config", + "receive.denyCurrentBranch", + "updateInstead", + ] + ) + + self.fdroid_init_with_prebuilt_keystore() + self.update_yaml( + common.CONFIG_FILE, + {"archive_older": 3, "servergitmirrors": str(server_git_mirror)}, + ) + for f in FILES.glob("repo/com.politedroid_[345].apk"): + shutil.copy(f, "repo") + self.assert_run(self.fdroid_cmd + ["update", "--create-metadata"]) + self.assert_run(self.fdroid_cmd + ["deploy"]) + git_mirror = Path("git-mirror") + self.assertTrue((git_mirror / "fdroid/repo/com.politedroid_3.apk").is_file()) + self.assertTrue((git_mirror / "fdroid/repo/com.politedroid_4.apk").is_file()) + self.assertTrue((git_mirror / "fdroid/repo/com.politedroid_5.apk").is_file()) + self.assertTrue( + (server_git_mirror / "fdroid/repo/com.politedroid_3.apk").is_file() + ) + self.assertTrue( + (server_git_mirror / "fdroid/repo/com.politedroid_4.apk").is_file() + ) + self.assertTrue( + (server_git_mirror / "fdroid/repo/com.politedroid_5.apk").is_file() + ) + (git_mirror / ".git/test-stamp").write_text(str(datetime.now())) + + # add one more APK to trigger archiving + shutil.copy(FILES / "repo/com.politedroid_6.apk", "repo") + self.assert_run(self.fdroid_cmd + ["update"]) + self.assert_run(self.fdroid_cmd + ["deploy"]) + self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) + self.assertFalse((git_mirror / "fdroid/archive/com.politedroid_3.apk").exists()) + self.assertFalse( + (server_git_mirror / "fdroid/archive/com.politedroid_3.apk").exists() + ) + self.assertTrue((git_mirror / "fdroid/repo/com.politedroid_4.apk").is_file()) + self.assertTrue((git_mirror / "fdroid/repo/com.politedroid_5.apk").is_file()) + self.assertTrue((git_mirror / "fdroid/repo/com.politedroid_6.apk").is_file()) + self.assertTrue( + (server_git_mirror / "fdroid/repo/com.politedroid_4.apk").is_file() + ) + self.assertTrue( + (server_git_mirror / "fdroid/repo/com.politedroid_5.apk").is_file() + ) + self.assertTrue( + (server_git_mirror / "fdroid/repo/com.politedroid_6.apk").is_file() + ) + before = sum( + f.stat().st_size for f in (git_mirror / ".git").glob("**/*") if f.is_file() + ) + + self.update_yaml(common.CONFIG_FILE, {"git_mirror_size_limit": "60kb"}) + self.assert_run(self.fdroid_cmd + ["update"]) + self.assert_run(self.fdroid_cmd + ["deploy"]) + self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) + self.assertFalse( + (server_git_mirror / "fdroid/archive/com.politedroid_3.apk").exists() + ) + after = sum( + f.stat().st_size for f in (git_mirror / ".git").glob("**/*") if f.is_file() + ) + self.assertFalse((git_mirror / ".git/test-stamp").exists()) + self.assert_run(["git", "-C", git_mirror, "gc"]) + self.assert_run(["git", "-C", server_git_mirror, "gc"]) + self.assertGreater(before, after) + + def test_sign_binary_repo_in_offline_box_then_publishing_from_online_box(self): + offline_root = self.testdir / "offline_root" + offline_root.mkdir() + local_copy_dir = self.testdir / "local_copy_dir/fdroid" + local_copy_dir.mkdir(parents=True) + online_root = self.testdir / "online_root" + online_root.mkdir() + server_web_root = self.testdir / "server_web_root/fdroid" + server_web_root.mkdir(parents=True) + + # create offline binary transparency log + (offline_root / "binary_transparency").mkdir() + os.chdir(offline_root / "binary_transparency") + self.assert_run(["git", "init", "--initial-branch", "master"]) + + # fake git remote server for binary transparency log + binary_transparency_remote = self.testdir / "binary_transparency_remote" + binary_transparency_remote.mkdir() + + # fake git remote server for repo mirror + server_git_mirror = self.testdir / "server_git_mirror" + server_git_mirror.mkdir() + os.chdir(server_git_mirror) + self.assert_run(["git", "init", "--initial-branch", "master"]) + self.assert_run(["git", "config", "receive.denyCurrentBranch", "updateInstead"]) + + os.chdir(offline_root) + self.fdroid_init_with_prebuilt_keystore() + shutil.copytree(FILES / "repo", "repo", dirs_exist_ok=True) + shutil.copytree(FILES / "metadata", "metadata") + Path("unsigned").mkdir() + shutil.copy(FILES / "urzip-release-unsigned.apk", "unsigned") + self.update_yaml( + common.CONFIG_FILE, + { + "archive_older": 3, + "mirrors": [ + "http://foo.bar/fdroid", + "http://asdflkdsfjafdsdfhkjh.onion/fdroid", + ], + "servergitmirrors": str(server_git_mirror), + "local_copy_dir": str(local_copy_dir), + }, + ) + self.assert_run(self.fdroid_cmd + ["update", "--pretty"]) + index_xml = Path("repo/index.xml").read_text() + self.assertIn("", index_xml) + mirror_cnt = sum(1 for line in index_xml.splitlines() if "" in line) + self.assertEqual(mirror_cnt, 2) + + archive_xml = Path("archive/index.xml").read_text() + self.assertIn("/fdroid/archive", archive_xml) + mirror_cnt = sum(1 for line in archive_xml.splitlines() if "" in line) + self.assertEqual(mirror_cnt, 2) + + os.chdir("binary_transparency") + proc = self.assert_run( + ["git", "rev-list", "--count", "HEAD"], capture_output=True + ) + self.assertEqual(int(proc.stdout), 1) + os.chdir(offline_root) + self.assert_run(self.fdroid_cmd + ["deploy", "--verbose"]) + self.assertTrue( + Path(local_copy_dir / "unsigned/urzip-release-unsigned.apk").is_file() + ) + self.assertIn( + "', + } + + anywarns = False + for warn in fdroidserver.lint.check_regexes(app): + anywarns = True + logging.debug(warn) + self.assertTrue(anywarns) + + def test_source_urls(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.lint.config = config + + app = { + 'Name': 'My App', + 'Summary': 'just a placeholder', + 'Description': 'This app does all sorts of useful stuff', + } + good_urls = [ + 'https://github.com/Matteljay/mastermindy-android', + 'https://gitlab.com/origin/master', + 'https://gitlab.com/group/subgroup/masterthing', + 'https://raw.githubusercontent.com/Seva-coder/Finder/HEAD/ChangeLog.txt', + 'https://github.com/scoutant/blokish/blob/HEAD/README.md#changelog', + 'https://git.ieval.ro/?p=fonbot.git;a=blob;f=Changes;hb=HEAD', + 'https://htmlpreview.github.io/?https://github.com/YasuakiHonda/Maxima-on-Android-AS/blob/HEAD/app/src/main/assets/About_MoA/index.html', + '', + ] + + anywarns = False + for url in good_urls: + app['SourceCode'] = url + for warn in fdroidserver.lint.check_regexes(app): + anywarns = True + logging.debug(warn) + self.assertFalse(anywarns) + + bad_urls = [ + 'github.com/my/proj', + 'http://github.com/not/secure', + 'https://github.com/foo/bar.git', + 'https://gitlab.com/group/subgroup/project.git', + 'https://raw.githubusercontent.com/Seva-coder/Finder/master/ChangeLog.txt', + 'https://github.com/scoutant/blokish/blob/master/README.md#changelog', + 'http://htmlpreview.github.io/?https://github.com/my/project/blob/HEAD/index.html', + 'http://fdroid.gitlab.io/fdroid-website', + ] + logging.debug('bad urls:') + for url in bad_urls: + anywarns = False + app['SourceCode'] = url + for warn in fdroidserver.lint.check_regexes(app): + anywarns = True + logging.debug(warn) + self.assertTrue(anywarns, url + " does not fail lint!") + + def test_check_app_field_types(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.lint.config = config + + app = fdroidserver.metadata.App() + app.id = 'fake.app' + app.Name = 'Bad App' + app.Summary = 'We pwn you' + app.Description = 'These are some back' + + fields = { + 'Categories': { + 'good': [ + ['Sports & Health'], + ['Multimedia', 'Graphics'], + ], + 'bad': [ + 'Science & Education', + 'Multimedia,Graphics', + ], + }, + 'WebSite': { + 'good': [ + 'https://homepage.com', + ], + 'bad': [ + [], + [ + 'nope', + ], + 29, + ], + }, + } + + for field, values in fields.items(): + for bad in values['bad']: + anywarns = False + app[field] = bad + for warn in fdroidserver.lint.check_app_field_types(app): + anywarns = True + logging.debug(warn) + self.assertTrue(anywarns) + + for good in values['good']: + anywarns = False + app[field] = good + for warn in fdroidserver.lint.check_app_field_types(app): + anywarns = True + logging.debug(warn) + self.assertFalse(anywarns) + + def test_check_vercode_operation(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.lint.config = config + + app = fdroidserver.metadata.App() + app.Name = 'Bad App' + app.Summary = 'We pwn you' + app.Description = 'These are some back' + + good_fields = [ + '6%c', + '%c - 1', + '%c + 10', + '%c*10', + '%c*10 + 3', + '%c*10 + 8', + '%c + 2 ', + '%c + 3', + '%c + 7', + ] + bad_fields = [ + 'open("/etc/passwd")', + '%C + 1', + '%%c * 123', + '123 + %%', + '%c % 7', + ] + + anywarns = False + for good in good_fields: + app.VercodeOperation = [good] + for warn in fdroidserver.lint.check_vercode_operation(app): + anywarns = True + logging.debug(warn) + self.assertFalse(anywarns) + + for bad in bad_fields: + anywarns = False + app.VercodeOperation = [bad] + for warn in fdroidserver.lint.check_vercode_operation(app): + anywarns = True + logging.debug(warn) + self.assertTrue(anywarns) + + def test_check_license_tag_no_custom_pass(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.lint.config = config + + app = fdroidserver.metadata.App() + app.License = "GPL-3.0-or-later" + + anywarns = False + for warn in fdroidserver.lint.check_license_tag(app): + anywarns = True + logging.debug(warn) + self.assertFalse(anywarns) + + def test_check_license_tag_no_custom_fail(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.lint.config = config + + app = fdroidserver.metadata.App() + app.License = "Adobe-2006" + + anywarns = False + for warn in fdroidserver.lint.check_license_tag(app): + anywarns = True + logging.debug(warn) + self.assertTrue(anywarns) + + def test_check_license_tag_with_custom_pass(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.lint.config = config + config['lint_licenses'] = ['fancy-license', 'GPL-3.0-or-later'] + + app = fdroidserver.metadata.App() + app.License = "fancy-license" + + anywarns = False + for warn in fdroidserver.lint.check_license_tag(app): + anywarns = True + logging.debug(warn) + self.assertFalse(anywarns) + + def test_check_license_tag_with_custom_fail(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.lint.config = config + config['lint_licenses'] = ['fancy-license', 'GPL-3.0-or-later'] + + app = fdroidserver.metadata.App() + app.License = "Apache-2.0" + + anywarns = False + for warn in fdroidserver.lint.check_license_tag(app): + anywarns = True + logging.debug(warn) + self.assertTrue(anywarns) + + def test_check_license_tag_with_custom_empty(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.lint.config = config + config['lint_licenses'] = [] + + app = fdroidserver.metadata.App() + app.License = "Apache-2.0" + + anywarns = False + for warn in fdroidserver.lint.check_license_tag(app): + anywarns = True + logging.debug(warn) + self.assertTrue(anywarns) + + def test_check_license_tag_disabled(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.lint.config = config + config['lint_licenses'] = None + + app = fdroidserver.metadata.App() + app.License = "Apache-2.0" + + anywarns = False + for warn in fdroidserver.lint.check_license_tag(app): + anywarns = True + logging.debug(warn) + self.assertFalse(anywarns) + + def test_check_categories_in_config(self): + fdroidserver.lint.config = { + fdroidserver.common.CATEGORIES_CONFIG_NAME: ['InConfig'] + } + fdroidserver.lint.load_categories_config() + app = fdroidserver.metadata.App({'Categories': ['InConfig']}) + self.assertEqual(0, len(list(fdroidserver.lint.check_categories(app)))) + + def test_check_categories_not_in_config(self): + fdroidserver.lint.config = dict() + fdroidserver.lint.load_categories_config() + app = fdroidserver.metadata.App({'Categories': ['NotInConfig']}) + self.assertEqual(1, len(list(fdroidserver.lint.check_categories(app)))) + + def test_check_categories_empty_is_error(self): + fdroidserver.lint.config = {fdroidserver.common.CATEGORIES_CONFIG_NAME: []} + fdroidserver.lint.load_categories_config() + app = fdroidserver.metadata.App({'Categories': ['something']}) + self.assertEqual(1, len(list(fdroidserver.lint.check_categories(app)))) + + def test_check_categories_old_hardcoded_not_defined(self): + fdroidserver.lint.config = { + fdroidserver.common.CATEGORIES_CONFIG_NAME: ['foo', 'bar'] + } + fdroidserver.lint.load_categories_config() + app = fdroidserver.metadata.App({'Categories': ['Writing']}) + self.assertEqual(1, len(list(fdroidserver.lint.check_categories(app)))) + + def test_check_categories_from_config_yml(self): + """In config.yml, categories is a list.""" + os.chdir(self.testdir) + fdroidserver.common.write_config_file('categories: [foo, bar]\n') + fdroidserver.lint.config = fdroidserver.common.read_config() + fdroidserver.lint.load_categories_config() + self.assertEqual(fdroidserver.lint.CATEGORIES_KEYS, ['foo', 'bar']) + app = fdroidserver.metadata.App({'Categories': ['bar']}) + self.assertEqual(0, len(list(fdroidserver.lint.check_categories(app)))) + + def test_check_categories_from_config_categories_yml(self): + """In config/categories.yml, categories is a localized STRINGMAP dict.""" + os.chdir(self.testdir) + os.mkdir('config') + Path('config/categories.yml').write_text('{foo: {name: foo}, bar: {name: bar}}') + fdroidserver.lint.config = fdroidserver.common.read_config() + fdroidserver.lint.load_categories_config() + self.assertEqual(fdroidserver.lint.CATEGORIES_KEYS, ['foo', 'bar']) + app = fdroidserver.metadata.App({'Categories': ['bar']}) + self.assertEqual(0, len(list(fdroidserver.lint.check_categories(app)))) + + def test_lint_config_basic_mirrors_yml(self): + os.chdir(self.testdir) + with Path('mirrors.yml').open('w') as fp: + config_dump([{'url': 'https://example.com/fdroid/repo'}], fp) + self.assertTrue(fdroidserver.lint.lint_config('mirrors.yml')) + + def test_lint_config_mirrors_yml_kenya_countryCode(self): + os.chdir(self.testdir) + with Path('mirrors.yml').open('w') as fp: + config_dump( + [{'url': 'https://foo.com/fdroid/repo', 'countryCode': 'KE'}], fp + ) + self.assertTrue(fdroidserver.lint.lint_config('mirrors.yml')) + + def test_lint_config_mirrors_yml_invalid_countryCode(self): + """WV is "indeterminately reserved" so it should never be used.""" + os.chdir(self.testdir) + with Path('mirrors.yml').open('w') as fp: + config_dump( + [{'url': 'https://foo.com/fdroid/repo', 'countryCode': 'WV'}], fp + ) + self.assertFalse(fdroidserver.lint.lint_config('mirrors.yml')) + + def test_lint_config_mirrors_yml_alpha3_countryCode(self): + """Only ISO 3166-1 alpha 2 are supported""" + os.chdir(self.testdir) + with Path('mirrors.yml').open('w') as fp: + config_dump( + [{'url': 'https://de.com/fdroid/repo', 'countryCode': 'DEU'}], fp + ) + self.assertFalse(fdroidserver.lint.lint_config('mirrors.yml')) + + def test_lint_config_mirrors_yml_one_invalid_countryCode(self): + """WV is "indeterminately reserved" so it should never be used.""" + os.chdir(self.testdir) + with Path('mirrors.yml').open('w') as fp: + config_dump( + [ + {'url': 'https://bar.com/fdroid/repo', 'countryCode': 'BA'}, + {'url': 'https://foo.com/fdroid/repo', 'countryCode': 'FO'}, + {'url': 'https://wv.com/fdroid/repo', 'countryCode': 'WV'}, + ], + fp, + ) + self.assertFalse(fdroidserver.lint.lint_config('mirrors.yml')) + + def test_lint_config_bad_mirrors_yml_dict(self): + os.chdir(self.testdir) + Path('mirrors.yml').write_text('baz: [foo, bar]\n') + with self.assertRaises(TypeError): + fdroidserver.lint.lint_config('mirrors.yml') + + def test_lint_config_bad_mirrors_yml_float(self): + os.chdir(self.testdir) + Path('mirrors.yml').write_text('1.0\n') + with self.assertRaises(TypeError): + fdroidserver.lint.lint_config('mirrors.yml') + + def test_lint_config_bad_mirrors_yml_int(self): + os.chdir(self.testdir) + Path('mirrors.yml').write_text('1\n') + with self.assertRaises(TypeError): + fdroidserver.lint.lint_config('mirrors.yml') + + def test_lint_config_bad_mirrors_yml_str(self): + os.chdir(self.testdir) + Path('mirrors.yml').write_text('foo\n') + with self.assertRaises(TypeError): + fdroidserver.lint.lint_config('mirrors.yml') + + def test_lint_invalid_config_keys(self): + os.chdir(self.testdir) + os.mkdir('config') + config_yml = fdroidserver.common.CONFIG_FILE + with open(f'config/{config_yml}', 'w', encoding='utf-8') as fp: + fp.write('repo:\n invalid_key: test\n') + self.assertFalse(fdroidserver.lint.lint_config(f'config/{config_yml}')) + + def test_lint_invalid_localized_config_keys(self): + os.chdir(self.testdir) + Path('config/en').mkdir(parents=True) + Path('config/en/antiFeatures.yml').write_text('NonFreeNet:\n icon: test.png\n') + self.assertFalse(fdroidserver.lint.lint_config('config/en/antiFeatures.yml')) + + def test_check_certificate_pinned_binaries_empty(self): + fdroidserver.common.config = {} + app = fdroidserver.metadata.App() + app.AllowedAPKSigningKeys = [ + 'a40da80a59d170caa950cf15c18c454d47a39b26989d8b640ecd745ba71bf5dc' + ] + self.assertEqual( + [], + list(fdroidserver.lint.check_certificate_pinned_binaries(app)), + "when the config is empty, any signing key should be allowed", + ) + + def test_lint_known_debug_keys_no_match(self): + fdroidserver.common.config = { + "apk_signing_key_block_list": "a40da80a59d170caa950cf15c18c454d47a39b26989d8b640ecd745ba71bf5dc" + } + app = fdroidserver.metadata.App() + app.AllowedAPKSigningKeys = [ + '2fd4fd5f54babba4bcb21237809bb653361d0d2583c80964ec89b28a26e9539e' + ] + self.assertEqual( + [], + list(fdroidserver.lint.check_certificate_pinned_binaries(app)), + "A signing key that does not match one in the config should be allowed", + ) + + def test_lint_known_debug_keys(self): + fdroidserver.common.config = { + 'apk_signing_key_block_list': 'a40da80a59d170caa950cf15c18c454d47a39b26989d8b640ecd745ba71bf5dc' + } + app = fdroidserver.metadata.App() + app.AllowedAPKSigningKeys = [ + 'a40da80a59d170caa950cf15c18c454d47a39b26989d8b640ecd745ba71bf5dc' + ] + for warn in fdroidserver.lint.check_certificate_pinned_binaries(app): + anywarns = True + logging.debug(warn) + self.assertTrue(anywarns) + + +class LintAntiFeaturesTest(unittest.TestCase): + def setUp(self): + os.chdir(basedir) + fdroidserver.common.config = dict() + fdroidserver.lint.ANTIFEATURES_KEYS = None + fdroidserver.lint.load_antiFeatures_config() + + def test_check_antiFeatures_empty(self): + app = fdroidserver.metadata.App() + self.assertEqual([], list(fdroidserver.lint.check_antiFeatures(app))) + + def test_check_antiFeatures_empty_AntiFeatures(self): + app = fdroidserver.metadata.App() + app['AntiFeatures'] = [] + self.assertEqual([], list(fdroidserver.lint.check_antiFeatures(app))) + + def test_check_antiFeatures(self): + app = fdroidserver.metadata.App() + app['AntiFeatures'] = ['Ads', 'Tracking'] + self.assertEqual([], list(fdroidserver.lint.check_antiFeatures(app))) + + def test_check_antiFeatures_fails_one(self): + app = fdroidserver.metadata.App() + app['AntiFeatures'] = ['Ad'] + self.assertEqual(1, len(list(fdroidserver.lint.check_antiFeatures(app)))) + + def test_check_antiFeatures_fails_many(self): + app = fdroidserver.metadata.App() + app['AntiFeatures'] = ['Adss', 'Tracker', 'NoSourceSince', 'FAKE', 'NonFree'] + self.assertEqual(4, len(list(fdroidserver.lint.check_antiFeatures(app)))) + + def test_check_antiFeatures_build_empty(self): + app = fdroidserver.metadata.App() + app['Builds'] = [{'antifeatures': []}] + self.assertEqual([], list(fdroidserver.lint.check_antiFeatures(app))) + + def test_check_antiFeatures_build(self): + app = fdroidserver.metadata.App() + app['Builds'] = [{'antifeatures': ['Tracking']}] + self.assertEqual(0, len(list(fdroidserver.lint.check_antiFeatures(app)))) + + def test_check_antiFeatures_build_fail(self): + app = fdroidserver.metadata.App() + app['Builds'] = [{'antifeatures': ['Ads', 'Tracker']}] + self.assertEqual(1, len(list(fdroidserver.lint.check_antiFeatures(app)))) + + +class ConfigYmlTest(LintTest): + """Test data formats used in config.yml. + + lint.py uses print() and not logging so hacks are used to control + the output when running in the test runner. + + """ + + def setUp(self): + super().setUp() + self.config_yml = Path(self.testdir) / fdroidserver.common.CONFIG_FILE + + def test_config_yml_int(self): + self.config_yml.write_text('repo_maxage: 1\n') + self.assertTrue(fdroidserver.lint.lint_config(self.config_yml)) + + @mock.patch('builtins.print', mock.Mock()) # hide error message + def test_config_yml_int_bad(self): + self.config_yml.write_text('repo_maxage: "1"\n') + self.assertFalse(fdroidserver.lint.lint_config(self.config_yml)) + + def test_config_yml_str(self): + self.config_yml.write_text('sdk_path: /opt/android-sdk\n') + self.assertTrue(fdroidserver.lint.lint_config(self.config_yml)) + + def test_config_yml_str_list(self): + self.config_yml.write_text('serverwebroot: [server1, server2]\n') + self.assertTrue(fdroidserver.lint.lint_config(self.config_yml)) + + def test_config_yml_str_list_of_dicts(self): + self.config_yml.write_text( + textwrap.dedent( + """\ + serverwebroot: + - url: 'me@b.az:/srv/fdroid' + index_only: true + """ + ) + ) + self.assertTrue(fdroidserver.lint.lint_config(self.config_yml)) + + def test_config_yml_str_list_of_dicts_env(self): + """serverwebroot can be str, list of str, or list of dicts.""" + self.config_yml.write_text('serverwebroot: {env: ANDROID_HOME}\n') + self.assertTrue(fdroidserver.lint.lint_config(self.config_yml)) + + def test_config_yml_str_env(self): + self.config_yml.write_text('sdk_path: {env: ANDROID_HOME}\n') + self.assertTrue(fdroidserver.lint.lint_config(self.config_yml)) + + @mock.patch('builtins.print', mock.Mock()) # hide error message + def test_config_yml_str_bad(self): + self.config_yml.write_text('sdk_path: 1.0\n') + self.assertFalse(fdroidserver.lint.lint_config(self.config_yml)) + + def test_config_yml_bool(self): + self.config_yml.write_text("deploy_process_logs: true\n") + self.assertTrue(fdroidserver.lint.lint_config(self.config_yml)) + + @mock.patch('builtins.print', mock.Mock()) # hide error message + def test_config_yml_bool_bad(self): + self.config_yml.write_text('deploy_process_logs: 2342fe23\n') + self.assertFalse(fdroidserver.lint.lint_config(self.config_yml)) + + def test_config_yml_dict(self): + self.config_yml.write_text("keyaliases: {com.example: '@com.foo'}\n") + self.assertTrue(fdroidserver.lint.lint_config(self.config_yml)) + + @mock.patch('builtins.print', mock.Mock()) # hide error message + def test_config_yml_dict_bad(self): + self.config_yml.write_text('keyaliases: 2342fe23\n') + self.assertFalse(fdroidserver.lint.lint_config(self.config_yml)) + + @mock.patch('builtins.print', mock.Mock()) # hide error message + def test_config_yml_bad_key_name(self): + self.config_yml.write_text('keyalias: 2342fe23\n') + self.assertFalse(fdroidserver.lint.lint_config(self.config_yml)) + + @mock.patch('builtins.print', mock.Mock()) # hide error message + def test_config_yml_bad_value_for_all_keys(self): + """Check all config keys with a bad value.""" + for key in fdroidserver.lint.check_config_keys: + if key in fdroidserver.lint.bool_keys: + value = 'foobar' + else: + value = 'false' + self.config_yml.write_text(f'{key}: {value}\n') + self.assertFalse( + fdroidserver.lint.lint_config(self.config_yml), + f'{key} should fail on value of "{value}"', + ) + + def test_config_yml_keyaliases(self): + self.config_yml.write_text( + textwrap.dedent( + """\ + keyaliases: + com.example: myalias + com.foo: '@com.example' + """ + ) + ) + self.assertTrue(fdroidserver.lint.lint_config(self.config_yml)) + + @mock.patch('builtins.print', mock.Mock()) # hide error message + def test_config_yml_keyaliases_bad_str(self): + """The keyaliases: value is a dict not a str.""" + self.config_yml.write_text("keyaliases: '@com.example'\n") + self.assertFalse(fdroidserver.lint.lint_config(self.config_yml)) + + @mock.patch('builtins.print', mock.Mock()) # hide error message + def test_config_yml_keyaliases_bad_list(self): + """The keyaliases: value is a dict not a list.""" + self.config_yml.write_text( + textwrap.dedent( + """\ + keyaliases: + - com.example: myalias + """ + ) + ) + self.assertFalse(fdroidserver.lint.lint_config(self.config_yml)) diff --git a/tests/test_main.py b/tests/test_main.py new file mode 100755 index 00000000..68984088 --- /dev/null +++ b/tests/test_main.py @@ -0,0 +1,247 @@ +#!/usr/bin/env python3 + +import os +import pkgutil +import tempfile +import textwrap +import unittest +from unittest import mock + +import fdroidserver.__main__ + +from .shared_test_code import TmpCwd, TmpPyPath + + +class MainTest(unittest.TestCase): + '''this tests fdroid.py''' + + def test_COMMANDS_check(self): + """make sure the built in sub-command defs didn't change unintentionally""" + self.assertListEqual( + [x for x in fdroidserver.__main__.COMMANDS], + [ + 'build', + 'init', + 'publish', + 'gpgsign', + 'update', + 'deploy', + 'verify', + 'checkupdates', + 'import', + 'install', + 'readmeta', + 'rewritemeta', + 'lint', + 'scanner', + 'signindex', + 'btlog', + 'signatures', + 'nightly', + 'mirror', + ], + ) + + def test_call_init(self): + co = mock.Mock() + with mock.patch('sys.argv', ['', 'init', '-h']): + with mock.patch('fdroidserver.init.main', co): + with mock.patch('sys.exit') as exit_mock: + fdroidserver.__main__.main() + # note: this is sloppy, if `init` changes + # this might need changing too + exit_mock.assert_called_once_with(0) + co.assert_called_once_with() + + def test_call_deploy(self): + co = mock.Mock() + with mock.patch('sys.argv', ['', 'deploy', '-h']): + with mock.patch('fdroidserver.deploy.main', co): + with mock.patch('sys.exit') as exit_mock: + fdroidserver.__main__.main() + # note: this is sloppy, if `deploy` changes + # this might need changing too + exit_mock.assert_called_once_with(0) + co.assert_called_once_with() + + def test_find_plugins(self): + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + with open('fdroid_testy1.py', 'w') as f: + f.write( + textwrap.dedent( + """\ + fdroid_summary = "ttt" + main = lambda: 'all good'""" + ) + ) + with TmpPyPath(tmpdir): + plugins = fdroidserver.__main__.find_plugins() + self.assertIn('testy1', plugins.keys()) + self.assertEqual(plugins['testy1']['summary'], 'ttt') + self.assertEqual( + __import__( + plugins['testy1']['name'], None, None, ['testy1'] + ).main(), + 'all good', + ) + + def test_main_plugin_lambda(self): + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + with open('fdroid_testy2.py', 'w') as f: + f.write( + textwrap.dedent( + """\ + fdroid_summary = "ttt" + main = lambda: print('all good')""" + ) + ) + with TmpPyPath(tmpdir): + with mock.patch('sys.argv', ['', 'testy2']): + with mock.patch('sys.exit') as exit_mock: + fdroidserver.__main__.main() + exit_mock.assert_called_once_with(0) + + def test_main_plugin_def(self): + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + with open('fdroid_testy3.py', 'w') as f: + f.write( + textwrap.dedent( + """\ + fdroid_summary = "ttt" + def main(): + print('all good')""" + ) + ) + with TmpPyPath(tmpdir): + with mock.patch('sys.argv', ['', 'testy3']): + with mock.patch('sys.exit') as exit_mock: + fdroidserver.__main__.main() + exit_mock.assert_called_once_with(0) + + def test_main_broken_plugin(self): + """making sure broken plugins get their exceptions through""" + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + with open('fdroid_testy4.py', 'w') as f: + f.write( + textwrap.dedent( + """\ + fdroid_summary = "ttt" + def main(): + raise Exception("this plugin is broken")""" + ) + ) + with TmpPyPath(tmpdir): + with mock.patch('sys.argv', ['', 'testy4']): + with self.assertRaisesRegex(Exception, "this plugin is broken"): + fdroidserver.__main__.main() + + def test_main_malicious_plugin(self): + """The purpose of this test is to make sure code in plugins + doesn't get executed unintentionally. + """ + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + with open('fdroid_testy5.py', 'w') as f: + f.write( + textwrap.dedent( + """\ + fdroid_summary = "ttt" + raise Exception("this plugin is malicious") + def main(): + print("evil things")""" + ) + ) + with TmpPyPath(tmpdir): + with mock.patch('sys.argv', ['', 'lint']): + with mock.patch('sys.exit') as exit_mock: + fdroidserver.__main__.main() + # note: this is sloppy, if `lint` changes + # this might need changing too + exit_mock.assert_called_once_with(0) + + def test_main_prevent_plugin_override(self): + """making sure build-in subcommands cannot be overridden by plugins""" + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + with open('fdroid_signatures.py', 'w') as f: + f.write( + textwrap.dedent( + """\ + fdroid_summary = "ttt" + def main(): + raise("plugin overrides don't get prevent!")""" + ) + ) + with TmpPyPath(tmpdir): + with mock.patch('sys.argv', ['', 'signatures']): + with mock.patch('sys.exit') as exit_mock: + fdroidserver.__main__.main() + # note: this is sloppy, if `signatures` changes + # this might need changing too + self.assertEqual(exit_mock.call_count, 2) + + def test_preparse_plugin_lookup_bad_name(self): + self.assertRaises( + ValueError, + fdroidserver.__main__.preparse_plugin, + "some.package", + "/non/existent/module/path", + ) + + def test_preparse_plugin_lookup_bad_path(self): + self.assertRaises( + ValueError, + fdroidserver.__main__.preparse_plugin, + "fake_module_name", + "/non/existent/module/path", + ) + + def test_preparse_plugin_lookup_summary_missing(self): + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + with open('fdroid_testy6.py', 'w') as f: + f.write("main = lambda: print('all good')") + with TmpPyPath(tmpdir): + p = [x for x in pkgutil.iter_modules() if x[1].startswith('fdroid_')] + module_dir = p[0][0].path + module_name = p[0][1] + self.assertRaises( + NameError, + fdroidserver.__main__.preparse_plugin, + module_name, + module_dir, + ) + + def test_preparse_plugin_lookup_module_file(self): + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + with open('fdroid_testy7.py', 'w') as f: + f.write( + textwrap.dedent( + """\ + fdroid_summary = "ttt" + main = lambda: pritn('all good')""" + ) + ) + with TmpPyPath(tmpdir): + p = [x for x in pkgutil.iter_modules() if x[1].startswith('fdroid_')] + module_path = p[0][0].path + module_name = p[0][1] + d = fdroidserver.__main__.preparse_plugin(module_name, module_path) + self.assertDictEqual(d, {'name': 'fdroid_testy7', 'summary': 'ttt'}) + + def test_preparse_plugin_lookup_module_dir(self): + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + os.mkdir(os.path.join(tmpdir, 'fdroid_testy8')) + with open('fdroid_testy8/__main__.py', 'w') as f: + f.write( + textwrap.dedent( + """\ + fdroid_summary = "ttt" + main = lambda: print('all good')""" + ) + ) + with open('fdroid_testy8/__init__.py', 'w') as f: + pass + with TmpPyPath(tmpdir): + p = [x for x in pkgutil.iter_modules() if x[1].startswith('fdroid_')] + module_path = p[0][0].path + module_name = p[0][1] + d = fdroidserver.__main__.preparse_plugin(module_name, module_path) + self.assertDictEqual(d, {'name': 'fdroid_testy8', 'summary': 'ttt'}) diff --git a/tests/test_metadata.py b/tests/test_metadata.py new file mode 100755 index 00000000..84040024 --- /dev/null +++ b/tests/test_metadata.py @@ -0,0 +1,2427 @@ +#!/usr/bin/env python3 + +import copy +import io +import os +import random +import shutil +import tempfile +import textwrap +import unittest +from collections import OrderedDict +from pathlib import Path +from unittest import mock + +import ruamel.yaml + +import fdroidserver +from fdroidserver import metadata +from fdroidserver._yaml import yaml +from fdroidserver.common import DEFAULT_LOCALE +from fdroidserver.exception import MetaDataException + +from .shared_test_code import TmpCwd, mkdtemp + +basedir = Path(__file__).parent + + +def _get_mock_mf(s): + mf = io.StringIO(s) + mf.name = 'mock_filename.yaml' + return mf + + +class MetadataTest(unittest.TestCase): + '''fdroidserver/metadata.py''' + + def setUp(self): + os.chdir(basedir) + self._td = mkdtemp() + self.testdir = self._td.name + fdroidserver.metadata.warnings_action = 'error' + + def tearDown(self): + # auto-generated dirs by functions, not tests, so they are not always cleaned up + self._td.cleanup() + try: + os.rmdir("srclibs") + except OSError: + pass + try: + os.rmdir("tmp") + except OSError: + pass + + def test_fieldtypes_key_exist(self): + for k in fdroidserver.metadata.fieldtypes: + self.assertIn(k, fdroidserver.metadata.yaml_app_fields) + + def test_build_flagtypes_key_exist(self): + for k in fdroidserver.metadata.flagtypes: + self.assertIn(k, fdroidserver.metadata.build_flags) + + def test_FieldValidator_BitcoinAddress(self): + validator = None + for vali in fdroidserver.metadata.valuetypes: + if vali.name == 'Bitcoin address': + validator = vali + break + self.assertIsNotNone(validator, "could not find 'Bitcoin address' validator") + + # some valid addresses (P2PKH, P2SH, Bech32) + self.assertIsNone( + validator.check('1BrrrrErsrWetrTrnrrrrm4GFg7xJaNVN2', 'fake.app.id') + ) + self.assertIsNone( + validator.check('3JrrrrWrEZr3rNrrvrecrnyirrnqRhWNLy', 'fake.app.id') + ) + self.assertIsNone( + validator.check('bc1qar0srrr7xrkvr5lr43lrdnwrre5rgtrzrf5rrq', 'fake.app.id') + ) + + # some invalid addresses + self.assertRaises( + fdroidserver.exception.MetaDataException, + validator.check, + '21BvMrSYsrWrtrrlL5A10mlGFr7rrarrN2', + 'fake.app.id', + ) + self.assertRaises( + fdroidserver.exception.MetaDataException, + validator.check, + '5Hrgr3ur5rGLrfKrrrrrrHSrqJrroGrrzrQrrrrrrLNrsrDrrrA', + 'fake.app.id', + ) + self.assertRaises( + fdroidserver.exception.MetaDataException, + validator.check, + '92rr46rUrgTrrromrVrirW6r1rrrdrerrdbJrrrhrCsYrrrrrrc', + 'fake.app.id', + ) + self.assertRaises( + fdroidserver.exception.MetaDataException, + validator.check, + 'K1BvMrSYsrWrtrrrn5Au4m4GFr7rrarrN2', + 'fake.app.id', + ) + self.assertRaises( + fdroidserver.exception.MetaDataException, + validator.check, + 'L1BvMrSYsrWrtrrrn5Au4m4GFr7rrarrN2', + 'fake.app.id', + ) + self.assertRaises( + fdroidserver.exception.MetaDataException, + validator.check, + 'tb1qw5r8drrejxrrg4y5rrrrrraryrrrrwrkxrjrsx', + 'fake.app.id', + ) + + def test_FieldValidator_LitecoinAddress(self): + validator = None + for vali in fdroidserver.metadata.valuetypes: + if vali.name == 'Litecoin address': + validator = vali + break + self.assertIsNotNone(validator, "could not find 'Litecoin address' validator") + + # some valid addresses (L, M, 3, segwit) + self.assertIsNone( + validator.check('LgeGrrrrJAxyXprrPrrBrrX5Qrrrrrrrrd', 'fake.app.id') + ) + self.assertIsNone( + validator.check('MrrrrrrrJAxyXpanPtrrRAX5QHxvUJo8id', 'fake.app.id') + ) + self.assertIsNone(validator.check('3rereVr9rAryrranrrrrrAXrrHx', 'fake.app.id')) + self.assertIsNone( + validator.check( + 'ltc1q7euacwhn6ef99vcfa57mute92q572aqsc4c2j5', 'fake.app.id' + ) + ) + + # some invalid addresses (various special use/testnet addresses, invalid chars) + self.assertRaises( + fdroidserver.exception.MetaDataException, + validator.check, + '21BvMrSYsrWrtrrrn5Au4l4GFr7rrarrN2', + 'fake.app.id', + ) + self.assertRaises( + fdroidserver.exception.MetaDataException, + validator.check, + '5Hrgr3ur5rGLrfKrrrrrr1SrqJrroGrrzrQrrrrrrLNrsrDrrrA', + 'fake.app.id', + ) + self.assertRaises( + fdroidserver.exception.MetaDataException, + validator.check, + '92rr46rUrgTrrromrVrirW6r1rrrdrerrdbJrrrhrCsYrrrrrrc', + 'fake.app.id', + ) + self.assertRaises( + fdroidserver.exception.MetaDataException, + validator.check, + 'K1BvMrSYsrWrtrrrn5Au4m4GFr7rrarrN2', + 'fake.app.id', + ) + self.assertRaises( + fdroidserver.exception.MetaDataException, + validator.check, + 'L0000rSYsrWrtrrrn5Au4m4GFr7rrarrN2', + 'fake.app.id', + ) + self.assertRaises( + fdroidserver.exception.MetaDataException, + validator.check, + 'tb1qw5r8drrejxrrg4y5rrrrrraryrrrrwrkxrjrsx', + 'fake.app.id', + ) + + def test_valid_funding_yml_regex(self): + """Check the regex can find all the cases""" + with (basedir / 'funding-usernames.yaml').open() as fp: + data = yaml.load(fp) + + for k, entries in data.items(): + for entry in entries: + m = fdroidserver.metadata.VALID_USERNAME_REGEX.match(entry) + if k == 'custom': + pass + elif k == 'bad': + self.assertIsNone( + m, 'this is an invalid %s username: {%s}' % (k, entry) + ) + else: + self.assertIsNotNone( + m, 'this is a valid %s username: {%s}' % (k, entry) + ) + + @mock.patch('git.Repo', mock.Mock()) + @mock.patch('logging.error') + def test_read_metadata(self, logging_error): + """Read specified metadata files included in tests/, compare to stored output""" + + self.maxDiff = None + + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.metadata.warnings_action = None + + apps = fdroidserver.metadata.read_metadata() + for appid in ( + 'app.with.special.build.params', + 'org.smssecure.smssecure', + 'org.adaway', + 'org.videolan.vlc', + 'com.politedroid', + ): + savepath = Path('metadata/dump') / (appid + '.yaml') + frommeta = dict(apps[appid]) + self.assertTrue(appid in apps) + with savepath.open('r') as f: + from_yaml = yaml.load(f) + self.assertEqual(frommeta, from_yaml) + # comment above assert and uncomment below to update test + # files when new metadata fields are added + # with savepath.open('w') as fp: + # yaml.default_flow_style = False + # yaml.register_class(metadata.Build) + # yaml.dump(frommeta, fp) + + # errors are printed when .yml overrides localized + logging_error.assert_called() + self.assertEqual(3, len(logging_error.call_args_list)) + + @mock.patch('git.Repo', mock.Mock()) + def test_metadata_overrides_dot_fdroid_yml(self): + """Fields in metadata files should override anything in .fdroid.yml.""" + app = metadata.parse_metadata('metadata/info.guardianproject.urzip.yml') + self.assertEqual(app['Summary'], '一个实用工具,获取已安装在您的设备上的应用的有关信息') + + def test_dot_fdroid_yml_works_without_git(self): + """Parsing should work if .fdroid.yml is present and it is not a git repo.""" + os.chdir(self.testdir) + yml = Path('metadata/test.yml') + yml.parent.mkdir() + with yml.open('w') as fp: + fp.write('Repo: https://example.com/not/git/or/anything') + fdroid_yml = Path('build/test/.fdroid.yml') + fdroid_yml.parent.mkdir(parents=True) + with fdroid_yml.open('w') as fp: + fp.write('OpenCollective: test') + metadata.parse_metadata(yml) # should not throw an exception + + @mock.patch('git.Repo', mock.Mock()) + @mock.patch('logging.error') + def test_rewrite_yaml_fakeotaupdate(self, logging_error): + with tempfile.TemporaryDirectory() as testdir: + testdir = Path(testdir) + fdroidserver.common.config = {'accepted_formats': ['yml']} + fdroidserver.metadata.warnings_action = None + + # rewrite metadata + allapps = fdroidserver.metadata.read_metadata() + for appid, app in allapps.items(): + if appid == 'fake.ota.update': + fdroidserver.metadata.write_metadata( + testdir / (appid + '.yml'), app + ) + + # assert rewrite result + self.maxDiff = None + file_name = 'fake.ota.update.yml' + self.assertEqual( + (testdir / file_name).read_text(encoding='utf-8'), + (Path('metadata-rewrite-yml') / file_name).read_text(encoding='utf-8'), + ) + + # errors are printed when .yml overrides localized + logging_error.assert_called() + self.assertEqual(3, len(logging_error.call_args_list)) + + @mock.patch('git.Repo', mock.Mock()) + def test_rewrite_yaml_fdroidclient(self): + with tempfile.TemporaryDirectory() as testdir: + testdir = Path(testdir) + fdroidserver.common.config = {'accepted_formats': ['yml']} + + # rewrite metadata + allapps = fdroidserver.metadata.read_metadata() + for appid, app in allapps.items(): + if appid == 'org.fdroid.fdroid': + fdroidserver.metadata.write_metadata( + testdir / (appid + '.yml'), app + ) + + # assert rewrite result + self.maxDiff = None + file_name = 'org.fdroid.fdroid.yml' + self.assertEqual( + (testdir / file_name).read_text(encoding='utf-8'), + (Path('metadata-rewrite-yml') / file_name).read_text(encoding='utf-8'), + ) + + @mock.patch('git.Repo', mock.Mock()) + def test_rewrite_yaml_special_build_params(self): + """Test rewriting a plain YAML metadata file without localized files.""" + os.chdir(self.testdir) + os.mkdir('metadata') + appid = 'app.with.special.build.params' + file_name = Path('metadata/%s.yml' % appid) + shutil.copy(basedir / file_name, file_name) + + # rewrite metadata + allapps = fdroidserver.metadata.read_metadata({appid: -1}) + for appid, app in allapps.items(): + metadata.write_metadata(file_name, app) + + # assert rewrite result + self.maxDiff = None + self.assertEqual( + file_name.read_text(), + (basedir / 'metadata-rewrite-yml' / file_name.name).read_text(), + ) + + def test_normalize_type_string(self): + """TYPE_STRING currently has some quirky behavior.""" + self.assertEqual('123456', metadata._normalize_type_string(123456)) + self.assertEqual('1.0', metadata._normalize_type_string(1.0)) + self.assertEqual('0', metadata._normalize_type_string(0)) + self.assertEqual('0.0', metadata._normalize_type_string(0.0)) + self.assertEqual('0.1', metadata._normalize_type_string(0.1)) + self.assertEqual('[]', metadata._normalize_type_string(list())) + self.assertEqual('{}', metadata._normalize_type_string(dict())) + self.assertEqual('false', metadata._normalize_type_string(False)) + self.assertEqual('true', metadata._normalize_type_string(True)) + + def test_normalize_type_string_sha256(self): + """SHA-256 values are TYPE_STRING, which YAML can parse as decimal ints.""" + for v in range(1, 1000): + s = '%064d' % (v * (10**51)) + self.assertEqual(s, metadata._normalize_type_string(yaml.load(s))) + + def test_normalize_type_stringmap_none(self): + self.assertEqual(dict(), metadata._normalize_type_stringmap('key', None)) + + def test_normalize_type_stringmap_empty_list(self): + self.assertEqual(dict(), metadata._normalize_type_stringmap('AntiFeatures', [])) + + def test_normalize_type_stringmap_simple_list_format(self): + self.assertEqual( + {'Ads': {}, 'Tracking': {}}, + metadata._normalize_type_stringmap('AntiFeatures', ['Ads', 'Tracking']), + ) + + def test_normalize_type_int(self): + """TYPE_INT should be an int whenever possible.""" + self.assertEqual(0, metadata._normalize_type_int('key', 0)) + self.assertEqual(1, metadata._normalize_type_int('key', 1)) + self.assertEqual(-5, metadata._normalize_type_int('key', -5)) + self.assertEqual(0, metadata._normalize_type_int('key', '0')) + self.assertEqual(1, metadata._normalize_type_int('key', '1')) + self.assertEqual(-5, metadata._normalize_type_int('key', '-5')) + self.assertEqual( + 12345678901234567890, + metadata._normalize_type_int('key', 12345678901234567890), + ) + + def test_normalize_type_int_fails(self): + with self.assertRaises(MetaDataException): + metadata._normalize_type_int('key', '1a') + with self.assertRaises(MetaDataException): + metadata._normalize_type_int('key', 1.1) + with self.assertRaises(MetaDataException): + metadata._normalize_type_int('key', True) + + def test_normalize_type_list(self): + """TYPE_LIST is always a list of strings, no matter what YAML thinks.""" + k = 'placeholder' + self.assertEqual(['1.0'], metadata._normalize_type_list(k, 1.0)) + self.assertEqual(['1234567890'], metadata._normalize_type_list(k, 1234567890)) + self.assertEqual(['false'], metadata._normalize_type_list(k, False)) + self.assertEqual(['true'], metadata._normalize_type_list(k, True)) + self.assertEqual(['foo'], metadata._normalize_type_list(k, 'foo')) + self.assertEqual([], metadata._normalize_type_list(k, list())) + self.assertEqual([], metadata._normalize_type_list(k, tuple())) + self.assertEqual([], metadata._normalize_type_list(k, set())) + self.assertEqual(['0', '1', '2'], metadata._normalize_type_list(k, {0, 1, 2})) + self.assertEqual( + ['a', 'b', 'c', '0', '0.0'], + metadata._normalize_type_list(k, yaml.load('[a, b, c, 0, 0.0]')), + ) + self.assertEqual( + ['1', '1.0', 's', 'true', '{}'], + metadata._normalize_type_list(k, yaml.load('[1, 1.0, s, true, {}]')), + ) + self.assertEqual( + ['1', '1.0', 's', 'true', '{}'], + metadata._normalize_type_list(k, (1, 1.0, 's', True, dict())), + ) + + def test_normalize_type_list_fails(self): + with self.assertRaises(MetaDataException): + metadata._normalize_type_list('placeholder', dict()) + + def test_post_parse_yaml_metadata(self): + yamldata = dict() + metadata.post_parse_yaml_metadata(yamldata) + + yamldata[ + 'AllowedAPKSigningKeys' + ] = 'c03dac71394d6c26766f1b04d3e31cfcac5d03b55d8aa40cc9b9fa6b74354c66' + metadata.post_parse_yaml_metadata(yamldata) + + def test_post_parse_yaml_metadata_ArchivePolicy_int(self): + for i in range(20): + yamldata = {'ArchivePolicy': i} + metadata.post_parse_yaml_metadata(yamldata) + self.assertEqual(i, yamldata['ArchivePolicy']) + + def test_post_parse_yaml_metadata_ArchivePolicy_string(self): + for i in range(20): + yamldata = {'ArchivePolicy': '%d' % i} + metadata.post_parse_yaml_metadata(yamldata) + self.assertEqual(i, yamldata['ArchivePolicy']) + + def test_post_parse_yaml_metadata_ArchivePolicy_versions(self): + """Test that the old format still works.""" + for i in range(20): + yamldata = {'ArchivePolicy': '%d versions' % i} + metadata.post_parse_yaml_metadata(yamldata) + self.assertEqual(i, yamldata['ArchivePolicy']) + + def test_post_parse_yaml_metadata_fails(self): + yamldata = {'AllowedAPKSigningKeys': {'bad': 'dict-placement'}} + with self.assertRaises(MetaDataException): + metadata.post_parse_yaml_metadata(yamldata) + + def test_post_parse_yaml_metadata_0padding_sha256(self): + """SHA-256 values are strings, but YAML 1.2 will read some as decimal ints.""" + v = '0027293472934293872934729834729834729834729834792837487293847926' + yamldata = yaml.load('AllowedAPKSigningKeys: ' + v) + metadata.post_parse_yaml_metadata(yamldata) + self.assertEqual(yamldata['AllowedAPKSigningKeys'], [v]) + + def test_post_parse_yaml_metadata_builds(self): + yamldata = OrderedDict() + builds = [] + yamldata['Builds'] = builds + build = OrderedDict() + builds.append(build) + + build['versionCode'] = 1.1 + self.assertRaises( + fdroidserver.exception.MetaDataException, + fdroidserver.metadata.post_parse_yaml_metadata, + yamldata, + ) + + build['versionCode'] = '1a' + self.assertRaises( + fdroidserver.exception.MetaDataException, + fdroidserver.metadata.post_parse_yaml_metadata, + yamldata, + ) + + build['versionCode'] = 1 + build['versionName'] = 1 + fdroidserver.metadata.post_parse_yaml_metadata(yamldata) + self.assertNotEqual(1, yamldata['Builds'][0]['versionName']) + self.assertEqual('1', yamldata['Builds'][0]['versionName']) + self.assertEqual(1, yamldata['Builds'][0]['versionCode']) + + build['versionName'] = 1.0 + fdroidserver.metadata.post_parse_yaml_metadata(yamldata) + self.assertNotEqual(1.0, yamldata['Builds'][0]['versionName']) + self.assertEqual('1.0', yamldata['Builds'][0]['versionName']) + + build['commit'] = 1.0 + fdroidserver.metadata.post_parse_yaml_metadata(yamldata) + self.assertNotEqual(1.0, yamldata['Builds'][0]['commit']) + self.assertEqual('1.0', yamldata['Builds'][0]['commit']) + + teststr = '98234fab134b' + build['commit'] = teststr + fdroidserver.metadata.post_parse_yaml_metadata(yamldata) + self.assertEqual(teststr, yamldata['Builds'][0]['commit']) + + testcommitid = 1234567890 + build['commit'] = testcommitid + fdroidserver.metadata.post_parse_yaml_metadata(yamldata) + self.assertNotEqual(testcommitid, yamldata['Builds'][0]['commit']) + self.assertEqual('1234567890', yamldata['Builds'][0]['commit']) + + def test_read_metadata_sort_by_time(self): + with tempfile.TemporaryDirectory() as testdir, TmpCwd(testdir): + testdir = Path(testdir) + metadatadir = testdir / 'metadata' + metadatadir.mkdir() + + randomlist = [] + randomapps = list((basedir / 'metadata').glob('*.yml')) + random.shuffle(randomapps) + i = 1 + for f in randomapps: + shutil.copy(f, metadatadir) + new = metadatadir / f.name + stat = new.stat() + os.utime(new, (stat.st_ctime, stat.st_mtime + i)) + # prepend new item so newest is always first + randomlist = [f.stem] + randomlist + i += 1 + allapps = fdroidserver.metadata.read_metadata(sort_by_time=True) + allappids = [] + for appid, app in allapps.items(): + allappids.append(appid) + self.assertEqual(randomlist, allappids) + + def test_parse_yaml_metadata_0size_file(self): + self.assertEqual(dict(), metadata.parse_yaml_metadata(_get_mock_mf(''))) + + def test_parse_yaml_metadata_empty_dict_file(self): + self.assertEqual(dict(), metadata.parse_yaml_metadata(_get_mock_mf('{}'))) + + def test_parse_yaml_metadata_empty_string_file(self): + self.assertEqual(dict(), metadata.parse_yaml_metadata(_get_mock_mf('""'))) + + def test_parse_yaml_metadata_fail_on_root_list(self): + with self.assertRaises(MetaDataException): + metadata.parse_yaml_metadata(_get_mock_mf('-')) + with self.assertRaises(MetaDataException): + metadata.parse_yaml_metadata(_get_mock_mf('[]')) + with self.assertRaises(MetaDataException): + metadata.parse_yaml_metadata(_get_mock_mf('- AutoName: fake')) + + def test_parse_yaml_metadata_type_list_str(self): + v = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855' + mf = _get_mock_mf('AllowedAPKSigningKeys: "%s"' % v) + self.assertEqual( + v, + metadata.parse_yaml_metadata(mf)['AllowedAPKSigningKeys'][0], + ) + + def test_parse_yaml_metadata_type_list_build_str(self): + mf = _get_mock_mf('Builds: [{versionCode: 1, rm: s}]') + self.assertEqual( + metadata.parse_yaml_metadata(mf), + {'Builds': [{'rm': ['s'], 'versionCode': 1}]}, + ) + + def test_parse_yaml_metadata_app_type_list_fails(self): + mf = _get_mock_mf('AllowedAPKSigningKeys: {t: f}') + with self.assertRaises(MetaDataException): + metadata.parse_yaml_metadata(mf) + + def test_parse_yaml_metadata_build_type_list_fails(self): + mf = _get_mock_mf('Builds: [{versionCode: 1, rm: {bad: dict-placement}}]') + with self.assertRaises(MetaDataException): + metadata.parse_yaml_metadata(mf) + + def test_parse_yaml_metadata_unknown_app_field(self): + mf = io.StringIO( + textwrap.dedent( + """\ + AutoName: F-Droid + RepoType: git + Builds: [] + bad: value""" + ) + ) + mf.name = 'mock_filename.yaml' + with self.assertRaises(MetaDataException): + fdroidserver.metadata.parse_yaml_metadata(mf) + + def test_parse_yaml_metadata_unknown_build_flag(self): + mf = io.StringIO( + textwrap.dedent( + """\ + AutoName: F-Droid + RepoType: git + Builds: + - bad: value""" + ) + ) + mf.name = 'mock_filename.yaml' + with self.assertRaises(MetaDataException): + fdroidserver.metadata.parse_yaml_metadata(mf) + + @mock.patch('logging.warning') + @mock.patch('logging.error') + def test_parse_yaml_metadata_continue_on_warning(self, _error, _warning): + """When errors are disabled, parsing should provide something that can work. + + When errors are disabled, then it should try to give data that + lets something happen. A zero-length file is valid for + operation, it just declares a Application ID as "known" and + nothing else. This example gives a list as the base in the + .yml file, which is unparsable, so it gives a warning message + and carries on with a blank dict. + + """ + fdroidserver.metadata.warnings_action = None + mf = _get_mock_mf('[AntiFeatures: Tracking]') + self.assertEqual(fdroidserver.metadata.parse_yaml_metadata(mf), dict()) + _warning.assert_called_once() + _error.assert_called_once() + + def test_parse_localized_antifeatures(self): + """Unit test based on reading files included in the test repo.""" + app = dict() + app['id'] = 'app.with.special.build.params' + metadata.parse_localized_antifeatures(app) + self.maxDiff = None + self.assertEqual( + app, + { + 'AntiFeatures': { + 'Ads': {'en-US': 'please no'}, + 'NoSourceSince': {'en-US': 'no activity\n'}, + }, + 'Builds': [ + { + 'versionCode': 50, + 'antifeatures': { + 'Ads': { + 'en-US': 'includes ad lib\n', + 'zh-CN': '包括广告图书馆\n', + }, + 'Tracking': {'en-US': 'standard suspects\n'}, + }, + }, + { + 'versionCode': 49, + 'antifeatures': { + 'Tracking': {'zh-CN': 'Text from zh-CN/49_Tracking.txt'}, + }, + }, + ], + 'id': app['id'], + }, + ) + + def test_parse_localized_antifeatures_passthrough(self): + """Test app values are cleanly passed through if no localized files.""" + before = { + 'id': 'placeholder', + 'AntiFeatures': {'NonFreeDep': {}}, + 'Builds': [{'versionCode': 999, 'antifeatures': {'zero': {}, 'one': {}}}], + } + after = copy.deepcopy(before) + with tempfile.TemporaryDirectory() as testdir: + os.chdir(testdir) + os.mkdir('metadata') + os.mkdir(os.path.join('metadata', after['id'])) + metadata.parse_localized_antifeatures(after) + self.assertEqual(before, after) + + def test_parse_metadata_antifeatures_NoSourceSince(self): + """Test that NoSourceSince gets added as an Anti-Feature.""" + os.chdir(self.testdir) + yml = Path('metadata/test.yml') + yml.parent.mkdir() + with yml.open('w') as fp: + fp.write('AntiFeatures: Ads\nNoSourceSince: gone\n') + app = metadata.parse_metadata(yml) + self.assertEqual( + app['AntiFeatures'], {'Ads': {}, 'NoSourceSince': {DEFAULT_LOCALE: 'gone'}} + ) + + @mock.patch('logging.error') + def test_yml_overrides_localized_antifeatures(self, logging_error): + """Definitions in .yml files should override the localized versions.""" + app = metadata.parse_metadata('metadata/app.with.special.build.params.yml') + + self.assertEqual(app['AntiFeatures'], {'Tracking': {}}) + + self.assertEqual(49, app['Builds'][-3]['versionCode']) + self.assertEqual( + app['Builds'][-3]['antifeatures'], + {'Tracking': {DEFAULT_LOCALE: 'Uses the Facebook SDK.'}}, + ) + + self.assertEqual(50, app['Builds'][-2]['versionCode']) + self.assertEqual( + app['Builds'][-2]['antifeatures'], + { + 'Ads': { + 'en-US': 'includes ad lib\n', + 'zh-CN': '包括广告图书馆\n', + }, + 'Tracking': {'en-US': 'standard suspects\n'}, + }, + ) + # errors are printed when .yml overrides localized + logging_error.assert_called() + self.assertEqual(3, len(logging_error.call_args_list)) + + def test_parse_yaml_srclib_corrupt_file(self): + with tempfile.TemporaryDirectory() as testdir: + testdir = Path(testdir) + srclibfile = testdir / 'srclib/mock.yml' + srclibfile.parent.mkdir() + with srclibfile.open('w') as fp: + fp.write( + textwrap.dedent( + """ + - RepoType: git + - Repo: https://github.com/realm/realm-js.git + """ + ) + ) + with self.assertRaises(MetaDataException): + fdroidserver.metadata.parse_yaml_srclib(srclibfile) + + def test_write_yaml_with_placeholder_values(self): + mf = io.StringIO() + + app = fdroidserver.metadata.App() + app.Categories = ['None'] + app.SourceCode = "https://gitlab.com/fdroid/fdroidclient.git" + app.IssueTracker = "https://gitlab.com/fdroid/fdroidclient/issues" + app.RepoType = 'git' + app.Repo = 'https://gitlab.com/fdroid/fdroidclient.git' + app.AutoUpdateMode = 'None' + app.UpdateCheckMode = 'Tags' + build = fdroidserver.metadata.Build() + build.versionName = 'Unknown' # taken from fdroidserver/import.py + build.versionCode = 0 # taken from fdroidserver/import.py + build.disable = 'Generated by import.py ...' + build.commit = 'Unknown' + build.gradle = ['yes'] + app['Builds'] = [build] + + fdroidserver.metadata.write_yaml(mf, app) + + mf.seek(0) + self.assertEqual( + mf.read(), + textwrap.dedent( + """\ + Categories: + - None + License: Unknown + SourceCode: https://gitlab.com/fdroid/fdroidclient.git + IssueTracker: https://gitlab.com/fdroid/fdroidclient/issues + + RepoType: git + Repo: https://gitlab.com/fdroid/fdroidclient.git + + Builds: + - versionName: Unknown + versionCode: 0 + disable: Generated by import.py ... + commit: Unknown + gradle: + - yes + + AutoUpdateMode: None + UpdateCheckMode: Tags + """ + ), + ) + + def test_parse_yaml_metadata_prebuild_list(self): + mf = io.StringIO( + textwrap.dedent( + """\ + AutoName: F-Droid + RepoType: git + Builds: + - versionCode: 1 + versionName: v0.1.0 + sudo: + - apt-get update + - apt-get install -y whatever + - sed -i -e 's/> /a/file + build: + - ./gradlew someSpecialTask + - sed -i 'd/that wrong config/' gradle.properties + - ./gradlew compile + """ + ) + ) + mf.name = 'mock_filename.yaml' + mf.seek(0) + result = fdroidserver.metadata.parse_yaml_metadata(mf) + self.maxDiff = None + self.assertDictEqual( + result, + { + 'AutoName': 'F-Droid', + 'RepoType': 'git', + 'Builds': [ + { + 'versionCode': 1, + 'versionName': 'v0.1.0', + 'sudo': [ + "apt-get update", + "apt-get install -y whatever", + "sed -i -e 's/> /a/file", + ], + 'build': [ + "./gradlew someSpecialTask", + "sed -i 'd/that wrong config/' gradle.properties", + "./gradlew compile", + ], + } + ], + }, + ) + + def test_parse_yaml_metadata_prebuild_strings(self): + mf = io.StringIO( + textwrap.dedent( + """\ + AutoName: F-Droid + RepoType: git + Builds: + - versionCode: 1 + versionName: v0.1.0 + sudo: |- + apt-get update && apt-get install -y whatever && sed -i -e 's/> /a/file + build: |- + ./gradlew someSpecialTask && sed -i 'd/that wrong config/' gradle.properties && ./gradlew compile + """ + ) + ) + mf.name = 'mock_filename.yaml' + mf.seek(0) + result = fdroidserver.metadata.parse_yaml_metadata(mf) + self.maxDiff = None + self.assertDictEqual( + result, + { + 'AutoName': 'F-Droid', + 'RepoType': 'git', + 'Builds': [ + { + 'versionCode': 1, + 'versionName': 'v0.1.0', + 'sudo': [ + "apt-get update && " + "apt-get install -y whatever && " + "sed -i -e 's/> /a/file" + ], + 'build': [ + "./gradlew someSpecialTask && " + "sed -i 'd/that wrong config/' gradle.properties && " + "./gradlew compile" + ], + } + ], + }, + ) + + def test_parse_yaml_metadata_prebuild_string(self): + mf = io.StringIO( + textwrap.dedent( + """\ + AutoName: F-Droid + RepoType: git + Builds: + - versionCode: 1 + versionName: v0.1.0 + prebuild: |- + a && b && sed -i 's,a,b,' + """ + ) + ) + mf.name = 'mock_filename.yaml' + mf.seek(0) + result = fdroidserver.metadata.parse_yaml_metadata(mf) + self.assertDictEqual( + result, + { + 'AutoName': 'F-Droid', + 'RepoType': 'git', + 'Builds': [ + { + 'versionCode': 1, + 'versionName': 'v0.1.0', + 'prebuild': ["a && b && sed -i 's,a,b,'"], + } + ], + }, + ) + + def test_parse_yaml_provides_should_be_ignored(self): + mf = io.StringIO( + textwrap.dedent( + """\ + Provides: this.is.deprecated + AutoName: F-Droid + RepoType: git + Builds: + - versionCode: 1 + versionName: v0.1.0 + prebuild: |- + a && b && sed -i 's,a,b,' + """ + ) + ) + mf.name = 'mock_filename.yaml' + mf.seek(0) + result = fdroidserver.metadata.parse_yaml_metadata(mf) + self.assertNotIn('Provides', result) + self.assertNotIn('provides', result) + + def test_parse_yaml_app_antifeatures_dict(self): + nonfreenet = 'free it!' + tracking = 'so many' + mf = io.StringIO( + textwrap.dedent( + f""" + AntiFeatures: + Tracking: {tracking} + NonFreeNet: {nonfreenet} + """ + ) + ) + self.assertEqual( + metadata.parse_yaml_metadata(mf), + { + 'AntiFeatures': { + 'NonFreeNet': {DEFAULT_LOCALE: nonfreenet}, + 'Tracking': {DEFAULT_LOCALE: tracking}, + } + }, + ) + + def test_parse_yaml_metadata_build_antifeatures_old_style(self): + mf = _get_mock_mf( + textwrap.dedent( + """ + AntiFeatures: + - Ads + Builds: + - versionCode: 123 + antifeatures: + - KnownVuln + - NonFreeAssets + """ + ) + ) + self.assertEqual( + metadata.parse_yaml_metadata(mf), + { + 'AntiFeatures': {'Ads': {}}, + 'Builds': [ + { + 'antifeatures': {'KnownVuln': {}, 'NonFreeAssets': {}}, + 'versionCode': 123, + } + ], + }, + ) + + def test_parse_yaml_metadata_antifeatures_sort(self): + """All data should end up sorted, to minimize diffs in the index files.""" + self.assertEqual( + metadata.parse_yaml_metadata( + _get_mock_mf( + textwrap.dedent( + """ + Builds: + - versionCode: 123 + antifeatures: + KnownVuln: + es: 2nd + az: zero + en-US: first + Tracking: + NonFreeAssets: + AntiFeatures: + NonFreeDep: + Ads: + sw: 2nd + zh-CN: 3rd + de: 1st + """ + ) + ) + ), + { + 'AntiFeatures': { + 'Ads': {'de': '1st', 'sw': '2nd', 'zh-CN': '3rd'}, + 'NonFreeDep': {}, + }, + 'Builds': [ + { + 'antifeatures': { + 'KnownVuln': {'az': 'zero', 'en-US': 'first', 'es': '2nd'}, + 'NonFreeAssets': {}, + 'Tracking': {}, + }, + 'versionCode': 123, + } + ], + }, + ) + + def test_parse_yaml_app_antifeatures_str(self): + self.assertEqual( + metadata.parse_yaml_metadata(io.StringIO('AntiFeatures: Tracking')), + {'AntiFeatures': {'Tracking': {}}}, + ) + + def test_parse_yaml_app_antifeatures_bool(self): + self.assertEqual( + metadata.parse_yaml_metadata(io.StringIO('AntiFeatures: true')), + {'AntiFeatures': {'true': {}}}, + ) + + def test_parse_yaml_app_antifeatures_float_nan(self): + self.assertEqual( + metadata.parse_yaml_metadata(io.StringIO('AntiFeatures: .nan')), + {'AntiFeatures': {'.nan': {}}}, + ) + + def test_parse_yaml_app_antifeatures_float_inf(self): + self.assertEqual( + metadata.parse_yaml_metadata(io.StringIO('AntiFeatures: .inf')), + {'AntiFeatures': {'.inf': {}}}, + ) + + def test_parse_yaml_app_antifeatures_float_negative_inf(self): + self.assertEqual( + metadata.parse_yaml_metadata(io.StringIO('AntiFeatures: -.inf')), + {'AntiFeatures': {'-.inf': {}}}, + ) + + def test_parse_yaml_app_antifeatures_int(self): + self.assertEqual( + metadata.parse_yaml_metadata(io.StringIO('AntiFeatures: 1')), + {'AntiFeatures': {'1': {}}}, + ) + + def test_parse_yaml_app_antifeatures_float(self): + self.assertEqual( + metadata.parse_yaml_metadata(io.StringIO('AntiFeatures: 1.0')), + {'AntiFeatures': {'1.0': {}}}, + ) + + def test_parse_yaml_app_antifeatures_list_float(self): + self.assertEqual( + metadata.parse_yaml_metadata(io.StringIO('AntiFeatures:\n - 1.0\n')), + {'AntiFeatures': {'1.0': {}}}, + ) + + def test_parse_yaml_app_antifeatures_dict_float(self): + mf = io.StringIO('AntiFeatures:\n 0.0: too early\n') + self.assertEqual( + metadata.parse_yaml_metadata(mf), + {'AntiFeatures': {'0.0': {'en-US': 'too early'}}}, + ) + + def test_parse_yaml_app_antifeatures_dict_float_fail_value(self): + mf = io.StringIO('AntiFeatures:\n NoSourceSince: 1.0\n') + self.assertEqual( + metadata.parse_yaml_metadata(mf), + {'AntiFeatures': {'NoSourceSince': {'en-US': '1.0'}}}, + ) + + def test_parse_yaml_metadata_type_stringmap_old_list(self): + mf = _get_mock_mf( + textwrap.dedent( + """ + AntiFeatures: + - Ads + - Tracking + """ + ) + ) + self.assertEqual( + {'AntiFeatures': {'Ads': {}, 'Tracking': {}}}, + metadata.parse_yaml_metadata(mf), + ) + + def test_parse_yaml_app_antifeatures_dict_no_value(self): + mf = io.StringIO( + textwrap.dedent( + """\ + AntiFeatures: + Tracking: + NonFreeNet: + """ + ) + ) + self.assertEqual( + metadata.parse_yaml_metadata(mf), + {'AntiFeatures': {'NonFreeNet': {}, 'Tracking': {}}}, + ) + + def test_parse_yaml_metadata_type_stringmap_transitional(self): + """Support a transitional format, where users just append a text""" + ads = 'Has ad lib in it.' + tracking = 'opt-out reports with ACRA' + mf = _get_mock_mf( + textwrap.dedent( + f""" + AntiFeatures: + - Ads: {ads} + - Tracking: {tracking} + """ + ) + ) + self.assertEqual( + metadata.parse_yaml_metadata(mf), + { + 'AntiFeatures': { + 'Ads': {DEFAULT_LOCALE: ads}, + 'Tracking': {DEFAULT_LOCALE: tracking}, + } + }, + ) + + def test_parse_yaml_app_antifeatures_dict_mixed_values(self): + ads = 'true' + tracking = 'many' + nonfreenet = '1' + mf = io.StringIO( + textwrap.dedent( + f""" + AntiFeatures: + Ads: {ads} + Tracking: {tracking} + NonFreeNet: {nonfreenet} + """ + ) + ) + self.assertEqual( + metadata.parse_yaml_metadata(mf), + { + 'AntiFeatures': { + 'Ads': {DEFAULT_LOCALE: ads}, + 'NonFreeNet': {DEFAULT_LOCALE: nonfreenet}, + 'Tracking': {DEFAULT_LOCALE: tracking}, + } + }, + ) + + def test_parse_yaml_app_antifeatures_stringmap_full(self): + ads = 'watching' + tracking = 'many' + nonfreenet = 'pipes' + nonfreenet_zh = '非免费网络' + self.maxDiff = None + mf = io.StringIO( + textwrap.dedent( + f""" + AntiFeatures: + Ads: + {DEFAULT_LOCALE}: {ads} + Tracking: + {DEFAULT_LOCALE}: {tracking} + NonFreeNet: + {DEFAULT_LOCALE}: {nonfreenet} + zh-CN: {nonfreenet_zh} + """ + ) + ) + self.assertEqual( + metadata.parse_yaml_metadata(mf), + { + 'AntiFeatures': { + 'Ads': {DEFAULT_LOCALE: ads}, + 'NonFreeNet': {DEFAULT_LOCALE: nonfreenet, 'zh-CN': nonfreenet_zh}, + 'Tracking': {DEFAULT_LOCALE: tracking}, + } + }, + ) + + def test_parse_yaml_build_type_int_fail(self): + mf = io.StringIO('Builds: [{versionCode: 1a}]') + with self.assertRaises(MetaDataException): + fdroidserver.metadata.parse_yaml_metadata(mf) + + def test_parse_yaml_int_strict_typing_fails(self): + """Things that cannot be preserved when parsing as YAML.""" + mf = io.StringIO('Builds: [{versionCode: 1, rm: 0xf}]') + self.assertEqual( + {'Builds': [{'rm': ['15'], 'versionCode': 1}]}, # 15 != 0xf + fdroidserver.metadata.parse_yaml_metadata(mf), + ) + mf = io.StringIO('Builds: [{versionCode: 1, rm: 0x010}]') + self.assertEqual( + {'Builds': [{'rm': ['16'], 'versionCode': 1}]}, # 16 != 0x010 + fdroidserver.metadata.parse_yaml_metadata(mf), + ) + mf = io.StringIO('Builds: [{versionCode: 1, rm: 0o015}]') + self.assertEqual( + {'Builds': [{'rm': ['13'], 'versionCode': 1}]}, # 13 != 0o015 + fdroidserver.metadata.parse_yaml_metadata(mf), + ) + mf = io.StringIO('Builds: [{versionCode: 1, rm: 10_000}]') + self.assertEqual( + {'Builds': [{'rm': ['10000'], 'versionCode': 1}]}, # 10000 != 10_000 + fdroidserver.metadata.parse_yaml_metadata(mf), + ) + + def test_write_yaml_1_line_scripts_as_string(self): + mf = io.StringIO() + app = fdroidserver.metadata.App() + app.Categories = ['None'] + app['Builds'] = [] + build = fdroidserver.metadata.Build() + build.versionCode = 102030 + build.versionName = 'v1.2.3' + build.sudo = ["chmod +rwx /opt"] + build.init = ["sed -i -e 'g/what/ever/' /some/file"] + build.prebuild = ["sed -i 'd/that wrong config/' gradle.properties"] + build.build = ["./gradlew compile"] + app['Builds'].append(build) + fdroidserver.metadata.write_yaml(mf, app) + mf.seek(0) + self.assertEqual( + mf.read(), + textwrap.dedent( + """\ + Categories: + - None + License: Unknown + + Builds: + - versionName: v1.2.3 + versionCode: 102030 + sudo: chmod +rwx /opt + init: sed -i -e 'g/what/ever/' /some/file + prebuild: sed -i 'd/that wrong config/' gradle.properties + build: ./gradlew compile + + AutoUpdateMode: None + UpdateCheckMode: None + """ + ), + ) + + def test_write_yaml_1_line_scripts_as_list(self): + mf = io.StringIO() + app = fdroidserver.metadata.App() + app.Categories = ['None'] + app['Builds'] = [] + build = fdroidserver.metadata.Build() + build.versionCode = 102030 + build.versionName = 'v1.2.3' + build.sudo = ["chmod +rwx /opt"] + build.init = ["sed -i -e 'g/what/ever/' /some/file"] + build.prebuild = ["sed -i 'd/that wrong config/' gradle.properties"] + build.build = ["./gradlew compile"] + app['Builds'].append(build) + fdroidserver.metadata.write_yaml(mf, app) + mf.seek(0) + self.assertEqual( + mf.read(), + textwrap.dedent( + """\ + Categories: + - None + License: Unknown + + Builds: + - versionName: v1.2.3 + versionCode: 102030 + sudo: chmod +rwx /opt + init: sed -i -e 'g/what/ever/' /some/file + prebuild: sed -i 'd/that wrong config/' gradle.properties + build: ./gradlew compile + + AutoUpdateMode: None + UpdateCheckMode: None + """ + ), + ) + + def test_write_yaml_multiline_scripts_from_list(self): + mf = io.StringIO() + app = fdroidserver.metadata.App() + app.Categories = ['None'] + app['Builds'] = [] + build = fdroidserver.metadata.Build() + build.versionCode = 102030 + build.versionName = 'v1.2.3' + build.sudo = [ + "apt-get update", + "apt-get install -y whatever", + "sed -i -e 's/> /a/file"] + build.build = [ + "./gradlew someSpecialTask", + "sed -i 'd/that wrong config/' gradle.properties", + "./gradlew compile", + ] + app['Builds'].append(build) + fdroidserver.metadata.write_yaml(mf, app) + mf.seek(0) + self.assertEqual( + mf.read(), + textwrap.dedent( + """\ + Categories: + - None + License: Unknown + + Builds: + - versionName: v1.2.3 + versionCode: 102030 + sudo: + - apt-get update + - apt-get install -y whatever + - sed -i -e 's/> /a/file + build: + - ./gradlew someSpecialTask + - sed -i 'd/that wrong config/' gradle.properties + - ./gradlew compile + + AutoUpdateMode: None + UpdateCheckMode: None + """ + ), + ) + + def test_write_yaml_multiline_scripts_from_string(self): + mf = io.StringIO() + app = fdroidserver.metadata.App() + app.Categories = ['None'] + app['Builds'] = [] + build = fdroidserver.metadata.Build() + build.versionCode = 102030 + build.versionName = 'v1.2.3' + build.sudo = [ + "apt-get update", + "apt-get install -y whatever", + "sed -i -e 's/> /a/file"] + build.build = [ + "./gradlew someSpecialTask", + "sed -i 'd/that wrong config/' gradle.properties", + "./gradlew compile", + ] + app['Builds'].append(build) + fdroidserver.metadata.write_yaml(mf, app) + mf.seek(0) + self.assertEqual( + mf.read(), + textwrap.dedent( + """\ + Categories: + - None + License: Unknown + + Builds: + - versionName: v1.2.3 + versionCode: 102030 + sudo: + - apt-get update + - apt-get install -y whatever + - sed -i -e 's/> /a/file + build: + - ./gradlew someSpecialTask + - sed -i 'd/that wrong config/' gradle.properties + - ./gradlew compile + + AutoUpdateMode: None + UpdateCheckMode: None + """ + ), + ) + + def test_write_yaml_build_antifeatures(self): + mf = io.StringIO() + app = metadata.App( + { + 'License': 'Apache-2.0', + 'Builds': [ + metadata.Build( + { + 'versionCode': 102030, + 'versionName': 'v1.2.3', + 'gradle': ['yes'], + 'antifeatures': { + 'a': {}, + 'b': {'de': 'Probe', 'en-US': 'test'}, + }, + } + ), + ], + 'id': 'placeholder', + } + ) + metadata.write_yaml(mf, app) + mf.seek(0) + self.assertEqual( + mf.read(), + textwrap.dedent( + """\ + License: Apache-2.0 + + Builds: + - versionName: v1.2.3 + versionCode: 102030 + gradle: + - yes + antifeatures: + a: {} + b: + de: Probe + en-US: test + """ + ), + ) + + def test_write_yaml_build_antifeatures_old_style(self): + mf = io.StringIO() + app = metadata.App( + { + 'License': 'Apache-2.0', + 'Builds': [ + metadata.Build( + { + 'versionCode': 102030, + 'versionName': 'v1.2.3', + 'gradle': ['yes'], + 'antifeatures': {'b': {}, 'a': {}}, + } + ), + ], + 'id': 'placeholder', + } + ) + metadata.write_yaml(mf, app) + mf.seek(0) + self.assertEqual( + mf.read(), + textwrap.dedent( + """\ + License: Apache-2.0 + + Builds: + - versionName: v1.2.3 + versionCode: 102030 + gradle: + - yes + antifeatures: + - a + - b + """ + ), + ) + + def test_write_yaml_make_sure_provides_does_not_get_written(self): + mf = io.StringIO() + app = fdroidserver.metadata.App() + app.Categories = ['None'] + app.Provides = 'this.is.deprecated' + app['Builds'] = [] + build = fdroidserver.metadata.Build() + build.versionCode = 102030 + build.versionName = 'v1.2.3' + build.gradle = ['yes'] + app['Builds'].append(build) + fdroidserver.metadata.write_yaml(mf, app) + mf.seek(0) + self.assertEqual( + mf.read(), + textwrap.dedent( + """\ + Categories: + - None + License: Unknown + + Builds: + - versionName: v1.2.3 + versionCode: 102030 + gradle: + - yes + + AutoUpdateMode: None + UpdateCheckMode: None + """ + ), + ) + + def test_parse_yaml_srclib_unknown_key(self): + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + with Path('test.yml').open('w', encoding='utf-8') as f: + f.write( + textwrap.dedent( + '''\ + RepoType: git + Repo: https://example.com/test.git + Evil: I should not be here. + ''' + ) + ) + with self.assertRaisesRegex( + MetaDataException, + "Invalid srclib metadata: unknown key 'Evil' in 'test.yml'", + ): + fdroidserver.metadata.parse_yaml_srclib(Path('test.yml')) + + def test_parse_yaml_srclib_does_not_exists(self): + with self.assertRaisesRegex( + MetaDataException, + "Invalid scrlib metadata: " + r"'non(/|\\)existent-test-srclib.yml' " + "does not exist", + ): + fdroidserver.metadata.parse_yaml_srclib( + Path('non/existent-test-srclib.yml') + ) + + def test_parse_yaml_srclib_simple(self): + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + with Path('simple.yml').open('w', encoding='utf-8') as f: + f.write( + textwrap.dedent( + '''\ + # this should be simple + RepoType: git + Repo: https://git.host/repo.git + ''' + ) + ) + srclib = fdroidserver.metadata.parse_yaml_srclib(Path('simple.yml')) + self.assertDictEqual( + { + 'Repo': 'https://git.host/repo.git', + 'RepoType': 'git', + 'Subdir': None, + 'Prepare': None, + }, + srclib, + ) + + def test_parse_yaml_srclib_simple_with_blanks(self): + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + with Path('simple.yml').open('w', encoding='utf-8') as f: + f.write( + textwrap.dedent( + '''\ + # this should be simple + + RepoType: git + + Repo: https://git.host/repo.git + + Subdir: + + Prepare: + ''' + ) + ) + srclib = fdroidserver.metadata.parse_yaml_srclib(Path('simple.yml')) + self.assertDictEqual( + { + 'Repo': 'https://git.host/repo.git', + 'RepoType': 'git', + 'Subdir': [''], + 'Prepare': [], + }, + srclib, + ) + + def test_parse_yaml_srclib_Changelog_cketti(self): + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + with Path('Changelog-cketti.yml').open('w', encoding='utf-8') as f: + f.write( + textwrap.dedent( + '''\ + RepoType: git + Repo: https://github.com/cketti/ckChangeLog + + Subdir: library,ckChangeLog/src/main + Prepare: "[ -f project.properties ] || echo 'source.dir=java' > ant.properties && echo -e 'android.library=true\\\\ntarget=android-19' > project.properties" + ''' + ) + ) + srclib = fdroidserver.metadata.parse_yaml_srclib( + Path('Changelog-cketti.yml') + ) + self.assertDictEqual( + srclib, + { + 'Repo': 'https://github.com/cketti/ckChangeLog', + 'RepoType': 'git', + 'Subdir': ['library', 'ckChangeLog/src/main'], + 'Prepare': [ + "[ -f project.properties ] || echo 'source.dir=java' > " + "ant.properties && echo -e " + "'android.library=true\\ntarget=android-19' > project.properties" + ], + }, + ) + + def test_read_srclibs_yml_subdir_list(self): + fdroidserver.metadata.srclibs = None + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + Path('srclibs').mkdir() + with Path('srclibs/with-list.yml').open('w', encoding='utf-8') as f: + f.write( + textwrap.dedent( + '''\ + # this should be simple + RepoType: git + Repo: https://git.host/repo.git + + Subdir: + - This is your last chance. + - After this, there is no turning back. + - You take the blue pill—the story ends, + - you wake up in your bed + - and believe whatever you want to believe. + - You take the red pill—you stay in Wonderland + - and I show you how deep the rabbit-hole goes. + Prepare: + There is a difference between knowing the path + and walking the path. + ''' + ) + ) + fdroidserver.metadata.read_srclibs() + self.maxDiff = None + self.assertDictEqual( + fdroidserver.metadata.srclibs, + { + 'with-list': { + 'RepoType': 'git', + 'Repo': 'https://git.host/repo.git', + 'Subdir': [ + 'This is your last chance.', + 'After this, there is no turning back.', + 'You take the blue pill—the story ends,', + 'you wake up in your bed', + 'and believe whatever you want to believe.', + 'You take the red pill—you stay in Wonderland', + 'and I show you how deep the rabbit-hole goes.', + ], + 'Prepare': [ + 'There is a difference between knowing the path ' + 'and walking the path.' + ], + } + }, + ) + + def test_read_srclibs_yml_prepare_list(self): + fdroidserver.metadata.srclibs = None + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + Path('srclibs').mkdir() + with Path('srclibs/with-list.yml').open('w', encoding='utf-8') as f: + f.write( + textwrap.dedent( + '''\ + # this should be simple + RepoType: git + Repo: https://git.host/repo.git + + Subdir: + Prepare: + - Many + - invalid + - commands + - here. + ''' + ) + ) + fdroidserver.metadata.read_srclibs() + self.maxDiff = None + self.assertDictEqual( + fdroidserver.metadata.srclibs, + { + 'with-list': { + 'RepoType': 'git', + 'Repo': 'https://git.host/repo.git', + 'Subdir': [''], + 'Prepare': [ + 'Many', + 'invalid', + 'commands', + 'here.', + ], + } + }, + ) + + def test_read_srclibs(self): + fdroidserver.metadata.srclibs = None + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + Path('srclibs').mkdir() + with Path('srclibs/simple.yml').open('w', encoding='utf-8') as f: + f.write( + textwrap.dedent( + '''\ + RepoType: git + Repo: https://git.host/repo.git + ''' + ) + ) + with Path('srclibs/simple-wb.yml').open('w', encoding='utf-8') as f: + f.write( + textwrap.dedent( + '''\ + # this should be simple + RepoType: git + Repo: https://git.host/repo.git + + Subdir: + Prepare: + ''' + ) + ) + fdroidserver.metadata.read_srclibs() + self.assertDictEqual( + fdroidserver.metadata.srclibs, + { + 'simple-wb': { + 'RepoType': 'git', + 'Repo': 'https://git.host/repo.git', + 'Subdir': [''], + 'Prepare': [], + }, + 'simple': { + 'RepoType': 'git', + 'Repo': 'https://git.host/repo.git', + 'Subdir': None, + 'Prepare': None, + }, + }, + ) + + def test_build_ndk_path(self): + with tempfile.TemporaryDirectory(prefix='android-sdk-') as sdk_path: + config = {'ndk_paths': {}, 'sdk_path': sdk_path} + fdroidserver.common.config = config + + build = fdroidserver.metadata.Build() + build.ndk = 'r10e' + self.assertEqual('', build.ndk_path()) + + correct = '/fake/path/ndk/r21b' + config['ndk_paths'] = {'r21b': correct} + self.assertEqual('', build.ndk_path()) + config['ndk_paths'] = {'r10e': correct} + self.assertEqual(correct, build.ndk_path()) + + r10e = '/fake/path/ndk/r10e' + r22b = '/fake/path/ndk/r22e' + config['ndk_paths'] = {'r10e': r10e, 'r22b': r22b} + self.assertEqual(r10e, build.ndk_path()) + + build.ndk = ['r10e', 'r22b'] + self.assertEqual(r10e, build.ndk_path()) + + build.ndk = ['r22b', 'r10e'] + self.assertEqual(r22b, build.ndk_path()) + + def test_build_ndk_path_only_accepts_str(self): + """Paths in the config must be strings, never pathlib.Path instances""" + config = {'ndk_paths': {'r24': Path('r24')}} + fdroidserver.common.config = config + build = fdroidserver.metadata.Build() + build.ndk = 'r24' + with self.assertRaises(TypeError): + build.ndk_path() + + def test_del_duplicated_NoSourceSince(self): + app = { + 'AntiFeatures': {'Ads': {}, 'NoSourceSince': {DEFAULT_LOCALE: '1.0'}}, + 'NoSourceSince': '1.0', + } + metadata._del_duplicated_NoSourceSince(app) + self.assertEqual(app, {'AntiFeatures': {'Ads': {}}, 'NoSourceSince': '1.0'}) + + def test_check_manually_extended_NoSourceSince(self): + app = { + 'AntiFeatures': {'NoSourceSince': {DEFAULT_LOCALE: '1.0', 'de': '1,0'}}, + 'NoSourceSince': '1.0', + } + metadata._del_duplicated_NoSourceSince(app) + self.assertEqual( + app, + { + 'AntiFeatures': {'NoSourceSince': {DEFAULT_LOCALE: '1.0', 'de': '1,0'}}, + 'NoSourceSince': '1.0', + }, + ) + + def test_make_sure_nosourcesince_does_not_get_written(self): + appid = 'com.politedroid' + app = metadata.read_metadata({appid: -1})[appid] + builds = app['Builds'] + app['Builds'] = [copy.deepcopy(builds[0])] + mf = io.StringIO() + metadata.write_yaml(mf, app) + mf.seek(0) + self.maxDiff = None + self.assertEqual( + mf.read(), + textwrap.dedent( + """\ + AntiFeatures: + - NonFreeNet + Categories: + - Multimedia + - Security + - Time + License: GPL-3.0-only + SourceCode: https://github.com/miguelvps/PoliteDroid + IssueTracker: https://github.com/miguelvps/PoliteDroid/issues + + AutoName: Polite Droid + Summary: Calendar tool + Description: Activates silent mode during calendar events. + + RepoType: git + Repo: https://github.com/miguelvps/PoliteDroid.git + + Builds: + - versionName: '1.2' + versionCode: 3 + commit: 6a548e4b19 + target: android-10 + antifeatures: + - KnownVuln + - NonFreeAssets + + ArchivePolicy: 4 + AutoUpdateMode: Version v%v + UpdateCheckMode: Tags + CurrentVersion: '1.5' + CurrentVersionCode: 6 + + NoSourceSince: '1.5' + """ + ), + ) + + def test_app_to_yaml_smokecheck(self): + self.assertTrue( + isinstance(metadata._app_to_yaml(dict()), ruamel.yaml.comments.CommentedMap) + ) + + def test_app_to_yaml_build_list_empty(self): + app = metadata.App({'Builds': [metadata.Build({'rm': []})]}) + self.assertEqual(dict(), metadata._app_to_yaml(app)['Builds'][0]) + + def test_app_to_yaml_build_list_one(self): + app = metadata.App({'Builds': [metadata.Build({'rm': ['one']})]}) + self.assertEqual({'rm': ['one']}, metadata._app_to_yaml(app)['Builds'][0]) + + def test_app_to_yaml_build_list_two(self): + app = metadata.App({'Builds': [metadata.Build({'rm': ['1', '2']})]}) + self.assertEqual({'rm': ['1', '2']}, metadata._app_to_yaml(app)['Builds'][0]) + + def test_app_to_yaml_build_list(self): + app = metadata.App({'Builds': [metadata.Build({'rm': ['b2', 'NO1']})]}) + self.assertEqual({'rm': ['b2', 'NO1']}, metadata._app_to_yaml(app)['Builds'][0]) + + def test_app_to_yaml_AllowedAPKSigningKeys_two(self): + cm = metadata._app_to_yaml(metadata.App({'AllowedAPKSigningKeys': ['b', 'A']})) + self.assertEqual(['b', 'a'], cm['AllowedAPKSigningKeys']) + + def test_app_to_yaml_AllowedAPKSigningKeys_one(self): + cm = metadata._app_to_yaml(metadata.App({'AllowedAPKSigningKeys': ['One']})) + self.assertEqual('one', cm['AllowedAPKSigningKeys']) + + def test_app_to_yaml_int_hex(self): + cm = metadata._app_to_yaml(metadata.App({'CurrentVersionCode': 0xFF})) + self.assertEqual(255, cm['CurrentVersionCode']) + + def test_app_to_yaml_int_underscore(self): + cm = metadata._app_to_yaml(metadata.App({'CurrentVersionCode': 1_2_3})) + self.assertEqual(123, cm['CurrentVersionCode']) + + def test_app_to_yaml_int_0(self): + """Document that 0 values fail to make it through.""" + # TODO it should be possible to use `CurrentVersionCode: 0` + cm = metadata._app_to_yaml(metadata.App({'CurrentVersionCode': 0})) + self.assertFalse('CurrentVersionCode' in cm) + + def test_format_multiline(self): + self.assertEqual(metadata._format_multiline('description'), 'description') + + def test_format_multiline_empty(self): + self.assertEqual(metadata._format_multiline(''), '') + + def test_format_multiline_newline_char(self): + self.assertEqual(metadata._format_multiline('one\\ntwo'), 'one\\ntwo') + + def test_format_multiline_newlines(self): + self.assertEqual( + metadata._format_multiline( + textwrap.dedent( + """ + one + two + three + """ + ) + ), + '\none\ntwo\nthree\n', + ) + + def test_format_list_empty(self): + self.assertEqual(metadata._format_list(['', None]), list()) + + def test_format_list_one_empty(self): + self.assertEqual(metadata._format_list(['foo', None]), ['foo']) + + def test_format_list_two(self): + self.assertEqual(metadata._format_list(['2', '1']), ['2', '1']) + + def test_format_list_newline(self): + self.assertEqual(metadata._format_list(['one\ntwo']), ['one\ntwo']) + + def test_format_list_newline_char(self): + self.assertEqual(metadata._format_list(['one\\ntwo']), ['one\\ntwo']) + + def test_format_script_empty(self): + self.assertEqual(metadata._format_script(['', None]), list()) + + def test_format_script_newline(self): + self.assertEqual(metadata._format_script(['one\ntwo']), 'one\ntwo') + + def test_format_script_newline_char(self): + self.assertEqual(metadata._format_script(['one\\ntwo']), 'one\\ntwo') + + def test_format_stringmap_empty(self): + self.assertEqual( + metadata._format_stringmap('🔥', 'test', dict()), + list(), + ) + + def test_format_stringmap_one_list(self): + self.assertEqual( + metadata._format_stringmap('🔥', 'test', {'Tracking': {}, 'Ads': {}}), + ['Ads', 'Tracking'], + ) + + def test_format_stringmap_one_list_empty_desc(self): + self.assertEqual( + metadata._format_stringmap('🔥', 'test', {'NonFree': {}, 'Ads': {'en': ''}}), + ['Ads', 'NonFree'], + ) + + def test_format_stringmap_three_list(self): + self.assertEqual( + metadata._format_stringmap('🔥', 'test', {'B': {}, 'A': {}, 'C': {}}), + ['A', 'B', 'C'], + ) + + def test_format_stringmap_two_dict(self): + self.assertEqual( + metadata._format_stringmap('🔥', 'test', {'1': {'uz': 'a'}, '2': {}}), + {'1': {'uz': 'a'}, '2': {}}, + ) + + def test_format_stringmap_three_locales(self): + self.assertEqual( + metadata._format_stringmap( + '🔥', 'test', {'AF': {'uz': 'a', 'ko': 'b', 'zh': 'c'}} + ), + {'AF': {'ko': 'b', 'uz': 'a', 'zh': 'c'}}, + ) + + def test_format_stringmap_move_build_antifeatures_to_filesystem(self): + os.chdir(self.testdir) + appid = 'a' + yml = Path('metadata/a.yml') + yml.parent.mkdir() + self.assertEqual( + metadata._format_stringmap( + appid, 'antifeatures', {'AF': {'uz': 'a', 'ko': 'b', 'zh': 'c'}} + ), + {'AF': {'ko': 'b', 'uz': 'a', 'zh': 'c'}}, + ) + + def test_format_stringmap_app_antifeatures_conflict(self): + """Raise an error if a YAML Anti-Feature conflicts with a localized file.""" + os.chdir(self.testdir) + appid = 'a' + field = 'AntiFeatures' + locale = 'ko' + yml = Path('metadata/a.yml') + antifeatures_ko = yml.parent / appid / locale / field.lower() + antifeatures_ko.mkdir(parents=True) + afname = 'Anti-🔥' + (antifeatures_ko / (afname + '.txt')).write_text('SOMETHING ELSE') + with self.assertRaises(MetaDataException): + metadata._format_stringmap( + appid, field, {afname: {'uz': 'a', locale: 'b', 'zh': 'c'}} + ) + + def test_format_stringmap_app_antifeatures_conflict_same_contents(self): + """Raise an error if a YAML Anti-Feature conflicts with a localized file.""" + os.chdir(self.testdir) + appid = 'a' + field = 'AntiFeatures' + locale = 'ko' + yml = Path('metadata/a.yml') + antifeatures_ko = yml.parent / appid / locale / field.lower() + antifeatures_ko.mkdir(parents=True) + afname = 'Anti-🔥' + (antifeatures_ko / (afname + '.txt')).write_text('b') + metadata._format_stringmap( + appid, field, {afname: {'uz': 'a', locale: 'b', 'zh': 'c'}} + ) + + def test_format_stringmap_build_antifeatures_conflict(self): + """Raise an error if a YAML Anti-Feature conflicts with a localized file.""" + os.chdir(self.testdir) + appid = 'a' + field = 'antifeatures' + locale = 'ko' + versionCode = 123 + yml = Path('metadata/a.yml') + antifeatures_ko = yml.parent / appid / locale / field.lower() + antifeatures_ko.mkdir(parents=True) + afname = 'Anti-🔥' + with (antifeatures_ko / ('%d_%s.txt' % (versionCode, afname))).open('w') as fp: + fp.write('SOMETHING ELSE') + with self.assertRaises(MetaDataException): + metadata._format_stringmap( + appid, field, {afname: {'uz': 'a', locale: 'b', 'zh': 'c'}}, versionCode + ) + + def test_app_to_yaml_one_category(self): + """Categories does not get simplified to string when outputting YAML.""" + self.assertEqual( + metadata._app_to_yaml({'Categories': ['one']}), + {'Categories': ['one']}, + ) + + def test_app_to_yaml_categories(self): + """Sort case-insensitive before outputting YAML.""" + self.assertEqual( + metadata._app_to_yaml({'Categories': ['c', 'a', 'B']}), + {'Categories': ['a', 'B', 'c']}, + ) + + def test_builds_to_yaml_gradle_yes(self): + app = {'Builds': [{'versionCode': 0, 'gradle': ['yes']}]} + self.assertEqual( + metadata._builds_to_yaml(app), [{'versionCode': 0, 'gradle': ['yes']}] + ) + + def test_builds_to_yaml_gradle_off(self): + app = {'Builds': [{'versionCode': 0, 'gradle': ['off']}]} + self.assertEqual( + metadata._builds_to_yaml(app), [{'versionCode': 0, 'gradle': ['off']}] + ) + + def test_builds_to_yaml_gradle_true(self): + app = {'Builds': [{'versionCode': 0, 'gradle': ['true']}]} + self.assertEqual( + metadata._builds_to_yaml(app), [{'versionCode': 0, 'gradle': ['true']}] + ) + + def test_builds_to_yaml_gradle_false(self): + app = {'Builds': [{'versionCode': 0, 'gradle': ['false']}]} + self.assertEqual( + metadata._builds_to_yaml(app), [{'versionCode': 0, 'gradle': ['false']}] + ) + + def test_builds_to_yaml_stripped(self): + self.assertEqual( + metadata._builds_to_yaml( + { + 'Builds': [ + metadata.Build({'versionCode': 0, 'rm': [None], 'init': ['']}) + ] + } + ), + [{'versionCode': 0}], + ) + + def test_builds_to_yaml(self): + """Include one of each flag type with a valid value.""" + app = { + 'Builds': [ + metadata.Build( + { + 'versionCode': 0, + 'gradle': ['free'], + 'rm': ['0', '2'], + 'submodules': True, + 'timeout': 0, + 'init': ['false', 'two'], + } + ) + ] + } + # check that metadata.Build() inited flag values + self.assertEqual(app['Builds'][0]['scanignore'], list()) + # then unchanged values should be removed by _builds_to_yaml + self.assertEqual( + metadata._builds_to_yaml(app), + [ + { + 'versionCode': 0, + 'gradle': ['free'], + 'rm': ['0', '2'], + 'submodules': True, + 'timeout': 0, + 'init': ['false', 'two'], + } + ], + ) + + +class PostMetadataParseTest(unittest.TestCase): + """Test the functions that post process the YAML input. + + The following series of "post_metadata_parse" tests map out the + current state of automatic type conversion in the YAML post + processing. They are not necessary a statement of how things + should be, but more to surface the details of it functions. + + """ + + def setUp(self): + fdroidserver.metadata.warnings_action = 'error' + + def _post_metadata_parse_app_int(self, from_yaml, expected): + app = {'ArchivePolicy': from_yaml} + metadata.post_parse_yaml_metadata(app) + return {'ArchivePolicy': expected}, app + + def _post_metadata_parse_app_list(self, from_yaml, expected): + app = {'AllowedAPKSigningKeys': from_yaml} + metadata.post_parse_yaml_metadata(app) + return {'AllowedAPKSigningKeys': expected}, app + + def _post_metadata_parse_app_string(self, from_yaml, expected): + app = {'Repo': from_yaml} + metadata.post_parse_yaml_metadata(app) + return {'Repo': expected}, app + + def _post_metadata_parse_build_bool(self, from_yaml, expected): + tested_key = 'submodules' + app = {'Builds': [{'versionCode': 1, tested_key: from_yaml}]} + post = copy.deepcopy(app) + metadata.post_parse_yaml_metadata(post) + del app['Builds'][0]['versionCode'] + del post['Builds'][0]['versionCode'] + for build in post['Builds']: + for k in list(build): + if k != tested_key: + del build[k] + app['Builds'][0][tested_key] = expected + return app, post + + def _post_metadata_parse_build_int(self, from_yaml, expected): + tested_key = 'versionCode' + app = {'Builds': [{'versionCode': from_yaml}]} + post = copy.deepcopy(app) + metadata.post_parse_yaml_metadata(post) + for build in post['Builds']: + for k in list(build): + if k != tested_key: + del build[k] + app['Builds'][0][tested_key] = expected + return app, post + + def _post_metadata_parse_build_list(self, from_yaml, expected): + tested_key = 'rm' + app = {'Builds': [{'versionCode': 1, tested_key: from_yaml}]} + post = copy.deepcopy(app) + metadata.post_parse_yaml_metadata(post) + del app['Builds'][0]['versionCode'] + del post['Builds'][0]['versionCode'] + for build in post['Builds']: + for k in list(build): + if k != tested_key: + del build[k] + app['Builds'][0][tested_key] = expected + return app, post + + def _post_metadata_parse_build_script(self, from_yaml, expected): + tested_key = 'build' + app = {'Builds': [{'versionCode': 1, tested_key: from_yaml}]} + post = copy.deepcopy(app) + metadata.post_parse_yaml_metadata(post) + del app['Builds'][0]['versionCode'] + del post['Builds'][0]['versionCode'] + for build in post['Builds']: + for k in list(build): + if k != tested_key: + del build[k] + app['Builds'][0][tested_key] = expected + return app, post + + def _post_metadata_parse_build_string(self, from_yaml, expected): + tested_key = 'commit' + app = {'Builds': [{'versionCode': 1, tested_key: from_yaml}]} + post = copy.deepcopy(app) + metadata.post_parse_yaml_metadata(post) + del app['Builds'][0]['versionCode'] + del post['Builds'][0]['versionCode'] + for build in post['Builds']: + for k in list(build): + if k != tested_key: + del build[k] + app['Builds'][0][tested_key] = expected + return app, post + + def test_post_metadata_parse_none(self): + """Run None aka YAML null or blank through the various field and flag types.""" + self.assertEqual(*self._post_metadata_parse_app_int(None, None)) + self.assertEqual(*self._post_metadata_parse_app_list(None, None)) + self.assertEqual(*self._post_metadata_parse_app_string(None, None)) + self.assertEqual(*self._post_metadata_parse_build_bool(None, None)) + self.assertEqual(*self._post_metadata_parse_build_int(None, None)) + self.assertEqual(*self._post_metadata_parse_build_list(None, None)) + self.assertEqual(*self._post_metadata_parse_build_script(None, None)) + self.assertEqual(*self._post_metadata_parse_build_string(None, None)) + + def test_post_metadata_parse_int(self): + """Run the int 123456 through the various field and flag types.""" + self.assertEqual(*self._post_metadata_parse_app_int(123456, 123456)) + self.assertEqual(*self._post_metadata_parse_app_list(123456, ['123456'])) + self.assertEqual(*self._post_metadata_parse_app_string(123456, '123456')) + self.assertEqual(*self._post_metadata_parse_build_bool(123456, True)) + self.assertEqual(*self._post_metadata_parse_build_int(123456, 123456)) + self.assertEqual(*self._post_metadata_parse_build_list(123456, ['123456'])) + self.assertEqual(*self._post_metadata_parse_build_script(123456, ['123456'])) + self.assertEqual(*self._post_metadata_parse_build_string(123456, '123456')) + + def test_post_metadata_parse_sha256(self): + """Run a SHA-256 that YAML calls an int through the various types. + + The current f-droid.org signer set has SHA-256 values with a + maximum of two leading zeros, but this will handle more. + + """ + str_sha256 = '0000000000000498456908409534729834729834729834792837487293847926' + sha256 = yaml.load('a: ' + str_sha256)['a'] + self.assertEqual(*self._post_metadata_parse_app_int(sha256, int(str_sha256))) + self.assertEqual(*self._post_metadata_parse_app_list(sha256, [str_sha256])) + self.assertEqual(*self._post_metadata_parse_app_string(sha256, str_sha256)) + self.assertEqual(*self._post_metadata_parse_build_bool(sha256, True)) + self.assertEqual(*self._post_metadata_parse_build_int(sha256, sha256)) + self.assertEqual(*self._post_metadata_parse_build_list(sha256, [str_sha256])) + self.assertEqual(*self._post_metadata_parse_build_script(sha256, [str_sha256])) + self.assertEqual(*self._post_metadata_parse_build_string(sha256, str_sha256)) + + def test_post_metadata_parse_int_0(self): + """Run the int 0 through the various field and flag types.""" + self.assertEqual(*self._post_metadata_parse_app_int(0, 0)) + self.assertEqual(*self._post_metadata_parse_app_list(0, ['0'])) + self.assertEqual(*self._post_metadata_parse_app_string(0, '0')) + self.assertEqual(*self._post_metadata_parse_build_bool(0, False)) + self.assertEqual(*self._post_metadata_parse_build_int(0, 0)) + self.assertEqual(*self._post_metadata_parse_build_list(0, ['0'])) + self.assertEqual(*self._post_metadata_parse_build_script(0, ['0'])) + self.assertEqual(*self._post_metadata_parse_build_string(0, '0')) + + def test_post_metadata_parse_float_0_0(self): + """Run the float 0.0 through the various field and flag types.""" + with self.assertRaises(MetaDataException): + self._post_metadata_parse_app_int(0.0, MetaDataException) + self.assertEqual(*self._post_metadata_parse_app_list(0.0, ['0.0'])) + self.assertEqual(*self._post_metadata_parse_app_string(0.0, '0.0')) + self.assertEqual(*self._post_metadata_parse_build_bool(0.0, False)) + with self.assertRaises(MetaDataException): + self._post_metadata_parse_build_int(0.0, MetaDataException) + self.assertEqual(*self._post_metadata_parse_build_list(0.0, ['0.0'])) + self.assertEqual(*self._post_metadata_parse_build_script(0.0, ['0.0'])) + self.assertEqual(*self._post_metadata_parse_build_string(0.0, '0.0')) + + def test_post_metadata_parse_float_0_1(self): + """Run the float 0.1 through the various field and flag types.""" + with self.assertRaises(MetaDataException): + self._post_metadata_parse_app_int(0.1, MetaDataException) + self.assertEqual(*self._post_metadata_parse_app_list(0.1, ['0.1'])) + self.assertEqual(*self._post_metadata_parse_app_string(0.1, '0.1')) + self.assertEqual(*self._post_metadata_parse_build_bool(0.1, True)) + with self.assertRaises(MetaDataException): + self._post_metadata_parse_build_int(0.1, MetaDataException) + self.assertEqual(*self._post_metadata_parse_build_list(0.1, ['0.1'])) + self.assertEqual(*self._post_metadata_parse_build_script(0.1, ['0.1'])) + self.assertEqual(*self._post_metadata_parse_build_string(0.1, '0.1')) + + def test_post_metadata_parse_float_1_0(self): + """Run the float 1.0 through the various field and flag types.""" + with self.assertRaises(MetaDataException): + self._post_metadata_parse_app_int(1.0, MetaDataException) + self.assertEqual(*self._post_metadata_parse_app_list(1.0, ['1.0'])) + self.assertEqual(*self._post_metadata_parse_app_string(1.0, '1.0')) + self.assertEqual(*self._post_metadata_parse_build_bool(1.0, True)) + with self.assertRaises(MetaDataException): + self._post_metadata_parse_build_int(1.0, MetaDataException) + self.assertEqual(*self._post_metadata_parse_build_list(1.0, ['1.0'])) + self.assertEqual(*self._post_metadata_parse_build_script(1.0, ['1.0'])) + self.assertEqual(*self._post_metadata_parse_build_string(1.0, '1.0')) + + def test_post_metadata_parse_empty_list(self): + with self.assertRaises(MetaDataException): + self._post_metadata_parse_app_int(list(), MetaDataException) + self.assertEqual(*self._post_metadata_parse_app_list(list(), list())) + self.assertEqual(*self._post_metadata_parse_app_string(list(), list())) + self.assertEqual(*self._post_metadata_parse_build_bool(list(), False)) + with self.assertRaises(MetaDataException): + self._post_metadata_parse_build_int(list(), MetaDataException) + self.assertEqual(*self._post_metadata_parse_build_list(list(), list())) + self.assertEqual(*self._post_metadata_parse_build_script(list(), list())) + self.assertEqual(*self._post_metadata_parse_build_string(list(), list())) + + def test_post_metadata_parse_set_of_1(self): + with self.assertRaises(MetaDataException): + self._post_metadata_parse_app_int({1}, MetaDataException) + self.assertEqual(*self._post_metadata_parse_app_list({1}, ['1'])) + self.assertEqual(*self._post_metadata_parse_app_string({1}, '{1}')) + self.assertEqual(*self._post_metadata_parse_build_bool({1}, True)) + with self.assertRaises(MetaDataException): + self._post_metadata_parse_build_int({1}, MetaDataException) + self.assertEqual(*self._post_metadata_parse_build_list({1}, ['1'])) + self.assertEqual(*self._post_metadata_parse_build_script({1}, ['1'])) + self.assertEqual(*self._post_metadata_parse_build_string({1}, '{1}')) + + def test_post_metadata_parse_empty_dict(self): + with self.assertRaises(MetaDataException): + self._post_metadata_parse_app_int(dict(), MetaDataException) + self.assertEqual(*self._post_metadata_parse_app_list(dict(), dict())) + self.assertEqual(*self._post_metadata_parse_app_string(dict(), dict())) + self.assertEqual(*self._post_metadata_parse_build_bool(dict(), False)) + with self.assertRaises(MetaDataException): + self._post_metadata_parse_build_int(dict(), MetaDataException) + self.assertEqual(*self._post_metadata_parse_build_list(dict(), dict())) + self.assertEqual(*self._post_metadata_parse_build_script(dict(), dict())) + self.assertEqual(*self._post_metadata_parse_build_string(dict(), dict())) + + def test_post_metadata_parse_list_int_string(self): + with self.assertRaises(MetaDataException): + self._post_metadata_parse_app_int([1, 'a'], MetaDataException) + self.assertEqual(*self._post_metadata_parse_app_list([1, 'a'], ['1', 'a'])) + self.assertEqual(*self._post_metadata_parse_app_string([1, 'a'], "[1, 'a']")) + self.assertEqual(*self._post_metadata_parse_build_bool([1, 'a'], True)) + with self.assertRaises(MetaDataException): + self._post_metadata_parse_build_int([1, 'a'], MetaDataException) + self.assertEqual(*self._post_metadata_parse_build_list([1, 'a'], ['1', 'a'])) + self.assertEqual(*self._post_metadata_parse_build_script([1, 'a'], ['1', 'a'])) + self.assertEqual(*self._post_metadata_parse_build_string([1, 'a'], "[1, 'a']")) + + def test_post_metadata_parse_dict_int_string(self): + with self.assertRaises(MetaDataException): + self._post_metadata_parse_app_int({'k': 1}, MetaDataException) + with self.assertRaises(MetaDataException): + self._post_metadata_parse_app_list({'k': 1}, MetaDataException) + self.assertEqual(*self._post_metadata_parse_app_string({'k': 1}, "{'k': 1}")) + self.assertEqual(*self._post_metadata_parse_build_bool({'k': 1}, True)) + with self.assertRaises(MetaDataException): + self._post_metadata_parse_build_int({'k': 1}, MetaDataException) + with self.assertRaises(MetaDataException): + self._post_metadata_parse_build_list({'k': 1}, MetaDataException) + with self.assertRaises(MetaDataException): + self._post_metadata_parse_build_script({'k': 1}, MetaDataException) + self.assertEqual(*self._post_metadata_parse_build_string({'k': 1}, "{'k': 1}")) + + def test_post_metadata_parse_false(self): + with self.assertRaises(MetaDataException): + self._post_metadata_parse_app_int(False, MetaDataException) + self.assertEqual(*self._post_metadata_parse_app_list(False, ['false'])) + self.assertEqual(*self._post_metadata_parse_app_string(False, 'false')) + self.assertEqual(*self._post_metadata_parse_build_bool(False, False)) + with self.assertRaises(MetaDataException): + self._post_metadata_parse_build_int(False, MetaDataException) + self.assertEqual(*self._post_metadata_parse_build_list(False, ['false'])) + self.assertEqual(*self._post_metadata_parse_build_script(False, ['false'])) + self.assertEqual(*self._post_metadata_parse_build_string(False, 'false')) + + def test_post_metadata_parse_true(self): + with self.assertRaises(MetaDataException): + self._post_metadata_parse_app_int(True, MetaDataException) + self.assertEqual(*self._post_metadata_parse_app_list(True, ['true'])) + self.assertEqual(*self._post_metadata_parse_app_string(True, 'true')) + self.assertEqual(*self._post_metadata_parse_build_bool(True, True)) + with self.assertRaises(MetaDataException): + self._post_metadata_parse_build_int(True, MetaDataException) + self.assertEqual(*self._post_metadata_parse_build_list(True, ['true'])) + self.assertEqual(*self._post_metadata_parse_build_script(True, ['true'])) + self.assertEqual(*self._post_metadata_parse_build_string(True, 'true')) diff --git a/tests/test_net.py b/tests/test_net.py new file mode 100755 index 00000000..beacd9af --- /dev/null +++ b/tests/test_net.py @@ -0,0 +1,146 @@ +#!/usr/bin/env python3 + +import os +import random +import socket +import tempfile +import threading +import time +import unittest +from pathlib import Path +from unittest.mock import MagicMock, patch + +import requests + +from fdroidserver import net + + +class RetryServer: + """A stupid simple HTTP server that can fail to connect. + + Proxy settings via environment variables can interfere with this + test. The requests library will automatically pick up proxy + settings from environment variables. Proxy settings can force the + local connection over the proxy, which might not support that, + then this fails with an error like 405 or others. + + """ + + def __init__(self, port=None, failures=3): + self.port = port + if self.port is None: + self.port = random.randint(1024, 65535) # nosec B311 + self.failures = failures + self.stop_event = threading.Event() + threading.Thread(target=self.run_fake_server).start() + + def stop(self): + self.stop_event.set() + + def run_fake_server(self): + addr = ('localhost', self.port) + # localhost might not be a valid name for all families, use the first available + family = socket.getaddrinfo(addr[0], addr[1], type=socket.SOCK_STREAM)[0][0] + server_sock = socket.create_server(addr, family=family) + server_sock.listen(5) + server_sock.settimeout(5) + time.sleep(0.001) # wait for it to start + + while not self.stop_event.is_set(): + self.failures -= 1 + conn = None + try: + conn, address = server_sock.accept() + conn.settimeout(5) + except TimeoutError: + break + if self.failures > 0: + conn.close() + continue + conn.recv(8192) # request ignored + self.reply = b"""HTTP/1.1 200 OK + Date: Mon, 26 Feb 2024 09:00:14 GMT + Connection: close + Content-Type: text/html + + Hello World! + """ + self.reply = self.reply.replace(b' ', b'') # dedent + conn.sendall(self.reply) + conn.shutdown(socket.SHUT_RDWR) + conn.close() + + self.stop_event.wait(timeout=1) + server_sock.shutdown(socket.SHUT_RDWR) + server_sock.close() + + +class NetTest(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.TemporaryDirectory() + os.chdir(self.tempdir.name) + Path('tmp').mkdir() + + def tearDown(self): + self.tempdir.cleanup() + + @patch('requests.get') + def test_download_file_url_parsing(self, requests_get): + # pylint: disable=unused-argument + def _get(url, stream, allow_redirects, headers, timeout): + return MagicMock() + + requests_get.side_effect = _get + f = net.download_file('https://f-droid.org/repo/entry.jar', retries=0) + requests_get.assert_called() + self.assertTrue(os.path.exists(f)) + self.assertEqual('tmp/entry.jar', f) + + f = net.download_file( + 'https://d-05.example.com/custom/com.downloader.aegis-3175421.apk?_fn=QVBLUHVyZV92My4xNy41NF9hcGtwdXJlLmNvbS5hcGs&_p=Y29tLmFwa3B1cmUuYWVnb24&am=6avvTpfJ1dMl9-K6JYKzQw&arg=downloader%3A%2F%2Fcampaign%2F%3Futm_medium%3Ddownloader%26utm_source%3Daegis&at=1652080635&k=1f6e58465df3a441665e585719ab0b13627a117f&r=https%3A%2F%2Fdownloader.com%2Fdownloader-app.html%3Ficn%3Daegis%26ici%3Dimage_qr&uu=http%3A%2F%2F172.16.82.1%2Fcustom%2Fcom.downloader.aegis-3175421.apk%3Fk%3D3fb9c4ae0be578206f6a1c330736fac1627a117f', + retries=0, + ) + self.assertTrue(requests_get.called) + self.assertTrue(os.path.exists(f)) + self.assertEqual('tmp/com.downloader.aegis-3175421.apk', f) + + @patch.dict(os.environ, clear=True) + def test_download_file_retries(self): + server = RetryServer() + f = net.download_file('http://localhost:%d/f.txt' % server.port) + # strip the HTTP headers and compare the reply + self.assertEqual(server.reply.split(b'\n\n')[1], Path(f).read_bytes()) + server.stop() + + @patch.dict(os.environ, clear=True) + def test_download_file_retries_not_forever(self): + """The retry logic should eventually exit with an error.""" + server = RetryServer(failures=5) + with self.assertRaises(requests.exceptions.ConnectionError): + net.download_file('http://localhost:%d/f.txt' % server.port) + server.stop() + + @unittest.skipIf(os.getenv('CI'), 'FIXME this fails mysteriously only in GitLab CI') + @patch.dict(os.environ, clear=True) + def test_download_using_mirrors_retries(self): + server = RetryServer() + f = net.download_using_mirrors( + [ + 'https://fake.com/f.txt', # 404 or 301 Redirect + 'https://httpbin.org/status/403', + 'https://httpbin.org/status/500', + 'http://localhost:1/f.txt', # ConnectionError + 'http://localhost:%d/should-succeed' % server.port, + ], + ) + # strip the HTTP headers and compare the reply + self.assertEqual(server.reply.split(b'\n\n')[1], Path(f).read_bytes()) + server.stop() + + @patch.dict(os.environ, clear=True) + def test_download_using_mirrors_retries_not_forever(self): + """The retry logic should eventually exit with an error.""" + server = RetryServer(failures=5) + with self.assertRaises(requests.exceptions.ConnectionError): + net.download_using_mirrors(['http://localhost:%d/' % server.port]) + server.stop() diff --git a/tests/test_nightly.py b/tests/test_nightly.py new file mode 100755 index 00000000..fb1614b7 --- /dev/null +++ b/tests/test_nightly.py @@ -0,0 +1,372 @@ +#!/usr/bin/env python3 + +import os +import platform +import shutil +import subprocess +import tempfile +import time +import unittest +from pathlib import Path +from unittest.mock import patch + +import requests +import yaml + +from fdroidserver import common, exception, index, nightly + +DEBUG_KEYSTORE = '/u3+7QAAAAIAAAABAAAAAQAPYW5kcm9pZGRlYnVna2V5AAABNYhAuskAAAK8MIICuDAOBgorBgEEASoCEQEBBQAEggKkqRnFlhidQmVff83bsAeewXPIsF0jiymzJnvrnUAQtCK0MV9uZonu37Mrj/qKLn56mf6QcvEoKvpCstZxzftgYYpAHWMVLM+hy2Z707QZEHlY7Ukppt8DItj+dXkeqGt7f8KzOb2AQwDbt9lm1fJb+MefLowTaubtvrLMcKIne43CbCu2D8HyN7RPWpEkVetA2Qgr5W4sa3tIUT80afqo9jzwJjKCspuxY9A1M8EIM3/kvyLo2B9r0cuWwRjYZXJ6gmTYI2ARNz0KQnCZUok14NDg+mZTb1B7AzRfb0lfjbA6grbzuAL+WaEpO8/LgGfuOh7QBZBT498TElOaFfQ9toQWA79wAmrQCm4OoFukpPIy2m/l6VjJSmlK5Q+CMOl/Au7OG1sUUCTvPaIr0XKnsiwDJ7a71n9garnPWHkvuWapSRCzCNgaUoGQjB+fTMJFFrwT8P1aLfM6onc3KNrDStoQZuYe5ngCLlNS56bENkVGvJBfdkboxtHZjqDXXON9jWGSOI527J3o2D5sjSVyx3T9XPrsL4TA/nBtdU+c/+M6aoASZR2VymzAKdMrGfj9kE5GXp8vv2vkJj9+OJ4Jm5yeczocc/Idtojjb1yg+sq1yY8kAQxgezpY1rpgi2jF3tSN01c23DNvAaSJLJX2ZuH8sD40ACc80Y1Qp1nUTdpwBZUeaeNruBwx4PHU8GnC71FwtiUpwNs0OoSl0pgDUJ3ODC5bs8B5QmW1wu1eg7I4mMSmCsNGW6VN3sFcu+WEqnmTxPoZombdFZKxsr2oq359Nn4bJ6Uc9PBz/sXsns7Zx1vND/oK/Jv5Y269UVAMeKX/eGpfnxzagW3tqGbOu12C2p9Azo5VxiU2fG/tmk2PjaG5hV/ywReco7I6C1p8OWM2fwAAAAEABVguNTA5AAAB6TCCAeUwggFOoAMCAQICBE89gTUwDQYJKoZIhvcNAQEFBQAwNzELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0FuZHJvaWQxFjAUBgNVBAMTDUFuZHJvaWQgRGVidWcwHhcNMTIwMjE2MjIyMDM3WhcNNDIwMjA4MjIyMDM3WjA3MQswCQYDVQQGEwJVUzEQMA4GA1UEChMHQW5kcm9pZDEWMBQGA1UEAxMNQW5kcm9pZCBEZWJ1ZzCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA3AKU7S7JXhUjEwxWP1/LPHXieh61SaA/+xbpqsPA+yjGz1sAcGAyuG6bjNAVm56pq7nkjJzicX7Wi83nUBo58DEC/quxOLdy0C4PEOSAeTnTT1RJIwMDvOgiL1GFCErvQ7gCH6zuAID/JRFbN6nIkhDjs2DYnSBl7aJJf8wCLc0CAwEAATANBgkqhkiG9w0BAQUFAAOBgQAoq/TJffA0l+ZGf89xndmHdxrO6qi+TzSlByvLZ4eFfCovTh1iO+Edrd5V1yXGLxyyvdsadMAFZT8SaxMrP5xxhJ0nra0APWYLpA96M//auMhQBWPgqPntwgvEZuEH7f0kdItjBJ39yijbG8xfgwid6XqNUo0TDDkp/wNWKpJ9tJe+2PrGw1NAvrgSydoH2j8DI1Eq' +DEBUG_KEYSTORE_KEY_FILE_NAME = ( + 'debug_keystore_QW+xRCJDGHXyyFtgCW8QRajj+6uYmsLwGWpCfYqYQ5M_id_rsa' +) + +AOSP_TESTKEY_DEBUG_KEYSTORE = '/u3+7QAAAAIAAAABAAAAAQAPYW5kcm9pZGRlYnVna2V5AAABejjuIU0AAAUBMIIE/TAOBgorBgEEASoCEQEBBQAEggTpvqhdBtq9D3jRUZGnhKLbFH1LMtCKqwGg25ETAEhvK1GVRNuWAHAUUedCnarjgeUy/zx9OsHuZq18KjUI115kWq/jxkf00fIg7wrOmXoyJf5Dbc7NGKjU64rRmppQEkJ417Lq4Uola9EBJ/WweEu6UTjTn5HcNl4mVloWKMBKNPkVfhZhAkXUyjiZ9rCVHMjLOVKG5vyTWZLwXpYR00Xz6VyzSunTyDza5oUOT/Fh7Gw74V7iNHANydkBHmH+UJ100p0vNPRFvt/3ABfMjkNbRXKNERnyN7NeBmCAOceuXjme/n0XLUidP9/NYk1yAmRJgUnauKD6UPSZYaUPuNSSdf4dD5fCQ7OVDq95e7vmqRDfrKUoWmtpndN7hbVl+OHVZXk2ngvXbvoS+F7ShsEfbq7+c37dnOcVrIlrY+wlOWX2jN42T+AkGt3AfA8zdIPdNgLGk64Op+aP4vGyLQqbuUEzOTNG9uExjGlamogPKFf93GAF83xv7AChYLR/9H+B1E955FL58bRuYOXVWJfLRsO/jyjXsilhBggo3VD1omRuOp98AkKP+P9JXCTswK7IZgvbMK3GB6QIzD20vlT0eK6JGLeWE7cXVn6oT26zvnqAjJ94PjS+YckMOExhqwCivPp1VaX6JzpQ1wr52OsGDUvconcjYrBEHBiY+UnMUk0Wj4mhZlJd1lpybZcWZ3vhTIlM0uMt4udl7t+zsgZ6BW97/pkGaa+QoxeTvgNlHGYyDYp8hveM3bCLXTHULw8mXUHxOJawq/J3E6vZ5/h2nzfmQmWtZtBOGWCkq+gKusTFUsHghjvHsPcQ2+EVfMcePBb/FKvtzSgH59C3iNOHE29l3ceSqccgxlxfStzbf+QkP7gxGVGZ8rLnCn3s8WzkGHZE4LtS0Zm3Y+hV5igrClk940YZP1hmilt2y7adPE4gCyQjb44JXgc3/NxlkZJcmeZTfAGxMXT8HG6Use/Kti114phsF7GDrqk1kPbB51Hr3xF1NAJUWP3csg3jgTS3E6jgD5XjPPG9BEDE2MwnBlUUMe3TC8TIWkK+AlwjlsDr5B9nqy2Fevv62+k5Adplw+fsQ8VzZREZF+MllWO3vtkD6srdx9h4vPD3dp5urFCFXNRaoD3SMDk27z3EVCQZ4bPL5PsVpB/ZBotLGkUZ0yi+5oC+u7ByP1ihMXMsRgvXbQpyOonEqDy84EZiIPWbyzGd0tEAXLz3mMh1x/IqZ1wxyDT/vkxhNCFqlBNlRW6GbMN2cng4A9Cigj9eNu9ptL1tdgFTxwndjoNRQMJ0NAc6WnsQ1UeIu8nMsa8/kLDtnVFLVmPQv2ZBUM4mxLrwC1mxOiQrWBW2XJ1OIheimSkLHfQOef1mIH3Z0cBuLBKGkRYGaXiZ6RX7po+ch0WFGjBef3e3uczl1mT5WGKdIG4x1+aRAtJHL+9K7Z6wzG0ygoamdiX2Fd0xBrWjTU72DzYbceqc+uHrbcLKDa5w0ENhyYK0+XEzG5fXHjFgmawY1D7xZQOJZO3jxStcv+xzoiTnNSrIxbxog/0Fez/WhMM9H6gV4eeDjMWEg79cJLugCBNwqmp3Yoe5EDU2TxQlLT53tye3Aji3FbocuDWjLI3Jc5VDxd7lrbzeIbFzSNpoFG8DSgjSiq41WJVeuzXxmdl7HM4zQpGRAAAAAQAFWC41MDkAAASsMIIEqDCCA5CgAwIBAgIJAJNurL4H8gHfMA0GCSqGSIb3DQEBBQUAMIGUMQswCQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAeFw0wODAyMjkwMTMzNDZaFw0zNTA3MTcwMTMzNDZaMIGUMQswCQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASAwDQYJKoZIhvcNAQEBBQADggENADCCAQgCggEBANaTGQTexgskse3HYuDZ2CU+Ps1s6x3i/waMqOi8qM1r03hupwqnbOYOuw+ZNVn/2T53qUPn6D1LZLjk/qLT5lbx4meoG7+yMLV4wgRDvkxyGLhG9SEVhvA4oU6Jwr44f46+z4/Kw9oe4zDJ6pPQp8PcSvNQIg1QCAcy4ICXF+5qBTNZ5qaU7Cyz8oSgpGbIepTYOzEJOmc3Li9kEsBubULxWBjf/gOBzAzURNps3cO4JFgZSAGzJWQTT7/emMkod0jb9WdqVA2BVMi7yge54kdVMxHEa5r3b97szI5p58ii0I54JiCUP5lyfTwE/nKZHZnfm644oLIXf6MdW2r+6R8CAQOjgfwwgfkwHQYDVR0OBBYEFEhZAFY9JyxGrhGGBaR0GawJyowRMIHJBgNVHSMEgcEwgb6AFEhZAFY9JyxGrhGGBaR0GawJyowRoYGapIGXMIGUMQswCQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbYIJAJNurL4H8gHfMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAHqvlozrUMRBBVEY0NqrrwFbinZaJ6cVosK0TyIUFf/azgMJWr+kLfcHCHJsIGnlw27drgQAvilFLAhLwn62oX6snb4YLCBOsVMR9FXYJLZW2+TcIkCRLXWG/oiVHQGo/rWuWkJgU134NDEFJCJGjDbiLCpe+ZTWHdcwauTJ9pUbo8EvHRkU3cYfGmLaLfgn9gP+pWA7LFQNvXwBnDa6sppCccEX31I828XzgXpJ4O+mDL1/dBd+ek8ZPUP0IgdyZm5MTYPhvVqGCHzzTy3sIeJFymwrsBbmg2OAUNLEMO6nwmocSdN2ClirfxqCzJOLSDE4QyS9BAH6EhY6UFcOaE21IJawTAEXnf52TqT7diFUlWRSnQ==' +AOSP_TESTKEY_DEBUG_KEYSTORE_KEY_FILE_NAME = ( + 'debug_keystore_k47SVrA85+oMZAexHc62PkgvIgO8TJBYN00U82xSlxc_id_rsa' +) + +basedir = Path(__file__).parent +testroot = basedir.with_name('.testfiles') + + +class Options: + allow_disabled_algorithms = False + clean = False + delete_unknown = False + nosign = False + pretty = True + rename_apks = False + verbose = False + + +@unittest.skipUnless( + platform.system() == 'Linux', + 'skipping test_nightly, it currently only works GNU/Linux', +) +class NightlyTest(unittest.TestCase): + path = os.environ['PATH'] + + def setUp(self): + common.config = None + nightly.config = None + testroot.mkdir(exist_ok=True) + os.chdir(basedir) + self.tempdir = tempfile.TemporaryDirectory( + str(time.time()), self._testMethodName + '_', testroot + ) + self.testdir = Path(self.tempdir.name) + self.home = self.testdir / 'home' + self.home.mkdir() + self.dot_android = self.home / '.android' + nightly.KEYSTORE_FILE = str(self.dot_android / 'debug.keystore') + + def tearDown(self): + self.tempdir.cleanup() + try: + os.rmdir(testroot) + except OSError: # other test modules might have left stuff around + pass + + def _copy_test_debug_keystore(self): + self.dot_android.mkdir() + shutil.copy( + basedir / 'aosp_testkey_debug.keystore', + self.dot_android / 'debug.keystore', + ) + + def _copy_debug_apk(self): + outputdir = Path('app/build/output/apk/debug') + outputdir.mkdir(parents=True) + shutil.copy(basedir / 'urzip.apk', outputdir / 'urzip-debug.apk') + + def test_get_repo_base_url(self): + for clone_url, repo_git_base, result in [ + ( + 'https://github.com/onionshare/onionshare-android-nightly', + 'onionshare/onionshare-android-nightly', + 'https://raw.githubusercontent.com/onionshare/onionshare-android-nightly/master/fdroid', + ), + ( + 'https://gitlab.com/fdroid/fdroidclient-nightly', + 'fdroid/fdroidclient-nightly', + 'https://gitlab.com/fdroid/fdroidclient-nightly/-/raw/master/fdroid', + ), + ]: + url = nightly.get_repo_base_url(clone_url, repo_git_base) + self.assertEqual(result, url) + r = requests.head(os.path.join(url, 'repo/index-v1.jar'), timeout=300) + # gitlab.com often returns 403 Forbidden from their cloudflare restrictions + self.assertTrue(r.status_code in (200, 403), 'should not be a redirect') + + def test_get_keystore_secret_var(self): + self.assertEqual( + AOSP_TESTKEY_DEBUG_KEYSTORE, + nightly._get_keystore_secret_var(basedir / 'aosp_testkey_debug.keystore'), + ) + + @patch.dict(os.environ, clear=True) + def test_ssh_key_from_debug_keystore(self): + os.environ['HOME'] = str(self.home) + os.environ['PATH'] = self.path + ssh_private_key_file = nightly._ssh_key_from_debug_keystore( + basedir / 'aosp_testkey_debug.keystore' + ) + with open(ssh_private_key_file) as fp: + self.assertIn('-----BEGIN RSA PRIVATE KEY-----', fp.read()) + with open(ssh_private_key_file + '.pub') as fp: + self.assertEqual(fp.read(8), 'ssh-rsa ') + shutil.rmtree(os.path.dirname(ssh_private_key_file)) + + @patch.dict(os.environ, clear=True) + @patch('sys.argv', ['fdroid nightly', '--verbose']) + def test_main_empty_dot_android(self): + """Test that it exits with an error when ~/.android is empty""" + os.environ['HOME'] = str(self.home) + os.environ['PATH'] = self.path + with self.assertRaises(SystemExit) as cm: + nightly.main() + self.assertEqual(cm.exception.code, 1) + + @patch.dict(os.environ, clear=True) + @patch('sys.argv', ['fdroid nightly', '--verbose']) + def test_main_empty_dot_ssh(self): + """Test that it does not create ~/.ssh if it does not exist + + Careful! If the test env is wrong, it can mess up the local + SSH setup. + + """ + dot_ssh = self.home / '.ssh' + self._copy_test_debug_keystore() + os.environ['HOME'] = str(self.home) + os.environ['PATH'] = self.path + self.assertFalse(dot_ssh.exists()) + nightly.main() + self.assertFalse(dot_ssh.exists()) + + @patch.dict(os.environ, clear=True) + @patch('sys.argv', ['fdroid nightly', '--verbose']) + def test_main_on_user_machine(self): + """Test that `fdroid nightly` runs on the user's machine + + Careful! If the test env is wrong, it can mess up the local + SSH setup. + + """ + dot_ssh = self.home / '.ssh' + dot_ssh.mkdir() + self._copy_test_debug_keystore() + os.environ['HOME'] = str(self.home) + os.environ['PATH'] = self.path + nightly.main() + self.assertTrue((dot_ssh / AOSP_TESTKEY_DEBUG_KEYSTORE_KEY_FILE_NAME).exists()) + self.assertTrue( + (dot_ssh / (AOSP_TESTKEY_DEBUG_KEYSTORE_KEY_FILE_NAME + '.pub')).exists() + ) + + @patch('fdroidserver.common.vcs_git.git', lambda args, e: common.PopenResult(1)) + @patch('sys.argv', ['fdroid nightly', '--verbose']) + def test_private_or_non_existent_git_mirror(self): + """Test that this exits with an error when the git mirror repo won't work + + Careful! If the test environment is setup wrong, it can mess + up local files in ~/.ssh or ~/.android. + + """ + os.chdir(self.testdir) + with patch.dict( + os.environ, + { + 'CI': 'true', + 'CI_PROJECT_PATH': 'thisshouldneverexist/orthistoo', + 'CI_PROJECT_URL': 'https://gitlab.com/thisshouldneverexist/orthistoo', + 'DEBUG_KEYSTORE': DEBUG_KEYSTORE, + 'GITLAB_USER_NAME': 'username', + 'GITLAB_USER_EMAIL': 'username@example.com', + 'HOME': str(self.testdir), + 'PATH': os.getenv('PATH'), + }, + clear=True, + ): + with self.assertRaises(exception.VCSException): + nightly.main() + + def test_clone_git_repo(self): + os.chdir(self.testdir) + common.options = Options + d = 'fakeappid' + nightly.clone_git_repo('https://gitlab.com/fdroid/ci-test-tiny-repo.git', d) + self.assertTrue(os.path.isdir(Path(d) / '.git')) + + def test_clone_git_repo_fails_on_gitlab_password_prompt(self): + os.chdir(self.testdir) + common.options = Options + d = 'shouldnotbecreated' + with self.assertRaises(exception.VCSException): + nightly.clone_git_repo(f'https://gitlab.com/{d}/{d}.git', d) + self.assertFalse(os.path.isdir(Path(d))) + + def test_clone_git_repo_fails_on_github_password_prompt(self): + os.chdir(self.testdir) + common.options = Options + d = 'shouldnotbecreated' + with self.assertRaises(exception.VCSException): + nightly.clone_git_repo(f'https://github.com/{d}/{d}.git', d) + self.assertFalse(os.path.isdir(Path(d))) + + def _put_fdroid_in_args(self, args): + """Find fdroid command that belongs to this source code tree""" + fdroid = os.path.join(basedir.parent, 'fdroid') + if not os.path.exists(fdroid): + fdroid = os.getenv('fdroid') + return [fdroid] + args[1:] + + @patch('sys.argv', ['fdroid nightly', '--verbose']) + @patch('platform.node', lambda: 'example.com') + def test_github_actions(self): + """Careful! If the test env is bad, it'll mess up the local SSH setup + + https://docs.github.com/en/actions/learn-github-actions/environment-variables + + """ + + called = [] + orig_check_call = subprocess.check_call + os.chdir(self.testdir) + os.makedirs('fdroid/git-mirror/fdroid/repo') # fake this to avoid cloning + self._copy_test_debug_keystore() + self._copy_debug_apk() + + def _subprocess_check_call(args, cwd=None, env=None): + if os.path.basename(args[0]) in ('keytool', 'openssl'): + orig_check_call(args, cwd=cwd, env=env) + elif args[:2] == ['fdroid', 'update']: + orig_check_call(self._put_fdroid_in_args(args), cwd=cwd, env=env) + else: + called.append(args[:2]) + return + + with patch.dict( + os.environ, + { + 'CI': 'true', + 'DEBUG_KEYSTORE': DEBUG_KEYSTORE, + 'GITHUB_ACTIONS': 'true', + 'GITHUB_ACTOR': 'username', + 'GITHUB_REPOSITORY': 'f-droid/test', + 'GITHUB_SERVER_URL': 'https://github.com', + 'HOME': str(self.testdir), + 'PATH': os.getenv('PATH'), + 'fdroid': os.getenv('fdroid', ''), + }, + clear=True, + ): + self.assertTrue(testroot == Path.home().parent) + with patch('subprocess.check_call', _subprocess_check_call): + try: + nightly.main() + except exception.BuildException as e: + if "apksigner not found" in e.value: + self.skipTest("skipping, apksigner not found due to fake $HOME") + else: + raise + + self.assertEqual(called, [['ssh', '-Tvi'], ['fdroid', 'deploy']]) + git_url = 'git@github.com:f-droid/test-nightly' + mirror_url = index.get_mirror_service_urls({"url": git_url})[0] + expected = { + 'archive_description': 'Old nightly builds that have been archived.', + 'archive_name': 'f-droid/test-nightly archive', + 'archive_older': 20, + 'archive_url': mirror_url + '/archive', + 'keydname': 'CN=Android Debug,O=Android,C=US', + 'keypass': 'android', + 'keystore': nightly.KEYSTORE_FILE, + 'keystorepass': 'android', + 'make_current_version_link': False, + 'repo_description': 'Nightly builds from username@example.com', + 'repo_keyalias': 'androiddebugkey', + 'repo_name': 'f-droid/test-nightly', + 'repo_url': mirror_url + '/repo', + 'servergitmirrors': [{"url": git_url}], + } + with open(common.CONFIG_FILE) as fp: + config = yaml.safe_load(fp) + # .ssh is random tmpdir set in nightly.py, so test basename only + self.assertEqual( + os.path.basename(config['identity_file']), + DEBUG_KEYSTORE_KEY_FILE_NAME, + ) + del config['identity_file'] + self.assertEqual(expected, config) + + @patch('sys.argv', ['fdroid nightly', '--verbose']) + def test_gitlab_ci(self): + """Careful! If the test env is bad, it can mess up the local SSH setup""" + called = [] + orig_check_call = subprocess.check_call + os.chdir(self.testdir) + os.makedirs('fdroid/git-mirror/fdroid/repo') # fake this to avoid cloning + self._copy_test_debug_keystore() + self._copy_debug_apk() + + def _subprocess_check_call(args, cwd=None, env=None): + if os.path.basename(args[0]) in ('keytool', 'openssl'): + orig_check_call(args, cwd=cwd, env=env) + elif args[:2] == ['fdroid', 'update']: + orig_check_call(self._put_fdroid_in_args(args), cwd=cwd, env=env) + else: + called.append(args[:2]) + return + + with patch.dict( + os.environ, + { + 'CI': 'true', + 'CI_PROJECT_PATH': 'fdroid/test', + 'CI_PROJECT_URL': 'https://gitlab.com/fdroid/test', + 'DEBUG_KEYSTORE': DEBUG_KEYSTORE, + 'GITLAB_USER_NAME': 'username', + 'GITLAB_USER_EMAIL': 'username@example.com', + 'HOME': str(self.testdir), + 'PATH': os.getenv('PATH'), + 'fdroid': os.getenv('fdroid', ''), + }, + clear=True, + ): + self.assertTrue(testroot == Path.home().parent) + with patch('subprocess.check_call', _subprocess_check_call): + try: + nightly.main() + except exception.BuildException as e: + if "apksigner not found" in e.value: + self.skipTest("skipping, apksigner not found due to fake $HOME") + else: + raise + + self.assertEqual(called, [['ssh', '-Tvi'], ['fdroid', 'deploy']]) + expected = { + 'archive_description': 'Old nightly builds that have been archived.', + 'archive_name': 'fdroid/test-nightly archive', + 'archive_older': 20, + 'archive_url': 'https://gitlab.com/fdroid/test-nightly/-/raw/master/fdroid/archive', + 'keydname': 'CN=Android Debug,O=Android,C=US', + 'keypass': 'android', + 'keystore': nightly.KEYSTORE_FILE, + 'keystorepass': 'android', + 'make_current_version_link': False, + 'repo_description': 'Nightly builds from username@example.com', + 'repo_keyalias': 'androiddebugkey', + 'repo_name': 'fdroid/test-nightly', + 'repo_url': 'https://gitlab.com/fdroid/test-nightly/-/raw/master/fdroid/repo', + 'servergitmirrors': [{"url": 'git@gitlab.com:fdroid/test-nightly'}], + } + with open(common.CONFIG_FILE) as fp: + config = yaml.safe_load(fp) + # .ssh is random tmpdir set in nightly.py, so test basename only + self.assertEqual( + os.path.basename(config['identity_file']), + DEBUG_KEYSTORE_KEY_FILE_NAME, + ) + del config['identity_file'] + self.assertEqual(expected, config) diff --git a/tests/test_publish.py b/tests/test_publish.py new file mode 100755 index 00000000..82c670d7 --- /dev/null +++ b/tests/test_publish.py @@ -0,0 +1,413 @@ +#!/usr/bin/env python3 + +# +# command which created the keystore used in this test case: +# +# $ for ALIAS in repokey a163ec9b d2d51ff2 dc3b169e 78688a0f; \ +# do keytool -genkey -keystore dummy-keystore.jks \ +# -alias $ALIAS -keyalg 'RSA' -keysize '2048' \ +# -validity '10000' -storepass 123456 -storetype jks \ +# -keypass 123456 -dname 'CN=test, OU=F-Droid'; done +# + +import json +import os +import pathlib +import shutil +import sys +import tempfile +import unittest +from unittest import mock + +from fdroidserver import common, metadata, publish, signatures +from fdroidserver._yaml import yaml +from fdroidserver.exception import FDroidException + +from .shared_test_code import VerboseFalseOptions, mkdtemp + +basedir = pathlib.Path(__file__).parent + + +class PublishTest(unittest.TestCase): + '''fdroidserver/publish.py''' + + def setUp(self): + os.chdir(basedir) + self._td = mkdtemp() + self.testdir = self._td.name + + def tearDown(self): + self._td.cleanup() + os.chdir(basedir) + + def test_key_alias(self): + publish.config = {} + self.assertEqual('a163ec9b', publish.key_alias('com.example.app')) + self.assertEqual('d2d51ff2', publish.key_alias('com.example.anotherapp')) + self.assertEqual('dc3b169e', publish.key_alias('org.test.testy')) + self.assertEqual('78688a0f', publish.key_alias('org.org.org')) + + self.assertEqual('ee8807d2', publish.key_alias("org.schabi.newpipe")) + self.assertEqual('b53c7e11', publish.key_alias("de.grobox.liberario")) + + publish.config = { + 'keyaliases': {'yep.app': '@org.org.org', 'com.example.app': '1a2b3c4d'} + } + self.assertEqual('78688a0f', publish.key_alias('yep.app')) + self.assertEqual('1a2b3c4d', publish.key_alias('com.example.app')) + + def test_read_fingerprints_from_keystore(self): + common.config = {} + common.fill_config_defaults(common.config) + publish.config = common.config + publish.config['keystorepass'] = '123456' + publish.config['keypass'] = '123456' + publish.config['keystore'] = 'dummy-keystore.jks' + + expected = { + '78688a0f': '277655a6235bc6b0ef2d824396c51ba947f5ebc738c293d887e7083ff338af82', + 'd2d51ff2': 'fa3f6a017541ee7fe797be084b1bcfbf92418a7589ef1f7fdeb46741b6d2e9c3', + 'dc3b169e': '6ae5355157a47ddcc3834a71f57f6fb5a8c2621c8e0dc739e9ddf59f865e497c', + 'a163ec9b': 'd34f678afbaa8f2fa6cc0edd6f0c2d1d2e2e9eb08bea521b24c740806016bff4', + 'repokey': 'c58460800c7b250a619c30c13b07b7359a43e5af71a4352d86c58ae18c9f6d41', + } + result = publish.read_fingerprints_from_keystore() + self.maxDiff = None + self.assertEqual(expected, result) + + def test_store_and_load_signer_fingerprints(self): + common.config = {} + common.fill_config_defaults(common.config) + publish.config = common.config + publish.config['keystorepass'] = '123456' + publish.config['keypass'] = '123456' + publish.config['keystore'] = os.path.join(basedir, 'dummy-keystore.jks') + publish.config['repo_keyalias'] = 'repokey' + + appids = [ + 'com.example.app', + 'net.unavailable', + 'org.test.testy', + 'com.example.anotherapp', + 'org.org.org', + ] + + os.chdir(self.testdir) + common.write_config_file('') + + publish.store_publish_signer_fingerprints(appids, indent=2) + + self.maxDiff = None + expected = { + "com.example.anotherapp": { + "signer": "fa3f6a017541ee7fe797be084b1bcfbf92418a7589ef1f7fdeb46741b6d2e9c3" + }, + "com.example.app": { + "signer": "d34f678afbaa8f2fa6cc0edd6f0c2d1d2e2e9eb08bea521b24c740806016bff4" + }, + "org.org.org": { + "signer": "277655a6235bc6b0ef2d824396c51ba947f5ebc738c293d887e7083ff338af82" + }, + "org.test.testy": { + "signer": "6ae5355157a47ddcc3834a71f57f6fb5a8c2621c8e0dc739e9ddf59f865e497c" + }, + } + self.assertEqual(expected, common.load_publish_signer_fingerprints()) + + with open(common.CONFIG_FILE) as fp: + config = yaml.load(fp) + self.assertEqual( + 'c58460800c7b250a619c30c13b07b7359a43e5af71a4352d86c58ae18c9f6d41', + config['repo_key_sha256'], + ) + + def test_store_and_load_signer_fingerprints_with_missmatch(self): + common.config = {} + common.fill_config_defaults(common.config) + publish.config = common.config + publish.config['keystorepass'] = '123456' + publish.config['keypass'] = '123456' + publish.config['keystore'] = os.path.join(basedir, 'dummy-keystore.jks') + publish.config['repo_keyalias'] = 'repokey' + publish.config['repo_key_sha256'] = 'bad bad bad bad bad bad bad bad bad bad' + + os.chdir(self.testdir) + publish.store_publish_signer_fingerprints({}, indent=2) + with self.assertRaises(FDroidException): + common.load_publish_signer_fingerprints() + + def test_reproducible_binaries_process(self): + common.config = {} + common.fill_config_defaults(common.config) + publish.config = common.config + publish.config['keystore'] = 'keystore.jks' + publish.config['repo_keyalias'] = 'sova' + publish.config['keystorepass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' + publish.config['keypass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' + shutil.copy('keystore.jks', self.testdir) + os.mkdir(os.path.join(self.testdir, 'repo')) + metadata_dir = os.path.join(self.testdir, 'metadata') + os.mkdir(metadata_dir) + shutil.copy(os.path.join('metadata', 'com.politedroid.yml'), metadata_dir) + with open(os.path.join(metadata_dir, 'com.politedroid.yml'), 'a') as fp: + fp.write('\nBinaries: https://placeholder/foo%v.apk\n') + os.mkdir(os.path.join(self.testdir, 'unsigned')) + shutil.copy( + 'repo/com.politedroid_6.apk', os.path.join(self.testdir, 'unsigned') + ) + os.mkdir(os.path.join(self.testdir, 'unsigned', 'binaries')) + shutil.copy( + 'repo/com.politedroid_6.apk', + os.path.join( + self.testdir, 'unsigned', 'binaries', 'com.politedroid_6.binary.apk' + ), + ) + + os.chdir(self.testdir) + with mock.patch.object(sys, 'argv', ['fdroid fakesubcommand']): + publish.main() + + def test_check_for_key_collisions(self): + from fdroidserver.metadata import App + + common.config = {} + common.fill_config_defaults(common.config) + publish.config = common.config + + randomappids = [ + "org.fdroid.fdroid", + "a.b.c", + "u.v.w.x.y.z", + "lpzpkgqwyevnmzvrlaazhgardbyiyoybyicpmifkyrxkobljoz", + "vuslsm.jlrevavz.qnbsenmizhur.lprwbjiujtu.ekiho", + "w.g.g.w.p.v.f.v.gvhyz", + "nlozuqer.ufiinmrbjqboogsjgmpfks.dywtpcpnyssjmqz", + ] + allapps = {} + for appid in randomappids: + allapps[appid] = App() + allaliases = publish.check_for_key_collisions(allapps) + self.assertEqual(len(randomappids), len(allaliases)) + + allapps = {'tof.cv.mpp': App(), 'j6mX276h': App()} + self.assertEqual(publish.key_alias('tof.cv.mpp'), publish.key_alias('j6mX276h')) + self.assertRaises(SystemExit, publish.check_for_key_collisions, allapps) + + def test_create_key_if_not_existing(self): + try: + import jks + import jks.util + except ImportError: + self.skipTest("pyjks not installed") + common.config = {} + common.fill_config_defaults(common.config) + publish.config = common.config + publish.config['keystorepass'] = '123456' + publish.config['keypass'] = '654321' + publish.config['keystore'] = "keystore.jks" + publish.config[ + 'keydname' + ] = 'CN=Birdman, OU=Cell, O=Alcatraz, L=Alcatraz, S=CA, C=US' + os.chdir(self.testdir) + keystore = jks.KeyStore.new("jks", []) + keystore.save(publish.config['keystore'], publish.config['keystorepass']) + + self.assertTrue(publish.create_key_if_not_existing("newalias")) + # The second time we try that, a new key should not be created + self.assertFalse(publish.create_key_if_not_existing("newalias")) + self.assertTrue(publish.create_key_if_not_existing("anotheralias")) + + keystore = jks.KeyStore.load( + publish.config['keystore'], publish.config['keystorepass'] + ) + self.assertCountEqual(keystore.private_keys, ["newalias", "anotheralias"]) + for alias, pk in keystore.private_keys.items(): + self.assertFalse(pk.is_decrypted()) + pk.decrypt(publish.config['keypass']) + self.assertTrue(pk.is_decrypted()) + self.assertEqual(jks.util.RSA_ENCRYPTION_OID, pk.algorithm_oid) + + def test_status_update_json(self): + common.config = {} + publish.config = {} + with tempfile.TemporaryDirectory() as tmpdir: + os.chdir(tmpdir) + with mock.patch('sys.argv', ['fdroid publish', '']): + publish.status_update_json([], []) + with open('repo/status/publish.json') as fp: + data = json.load(fp) + self.assertTrue('apksigner' in data) + + publish.config = { + 'apksigner': 'apksigner', + } + publish.status_update_json([], []) + with open('repo/status/publish.json') as fp: + data = json.load(fp) + self.assertEqual( + shutil.which(publish.config['apksigner']), data['apksigner'] + ) + + publish.config = {} + common.fill_config_defaults(publish.config) + publish.status_update_json([], []) + with open('repo/status/publish.json') as fp: + data = json.load(fp) + self.assertEqual(publish.config.get('apksigner'), data['apksigner']) + self.assertEqual(publish.config['jarsigner'], data['jarsigner']) + self.assertEqual(publish.config['keytool'], data['keytool']) + + @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') + def test_sign_then_implant_signature(self): + os.chdir(self.testdir) + + common.options = VerboseFalseOptions + config = common.read_config() + if 'apksigner' not in config: + self.skipTest( + 'SKIPPING test_sign_then_implant_signature, apksigner not installed!' + ) + config['repo_keyalias'] = 'sova' + config['keystorepass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' + config['keypass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' + shutil.copy(basedir / 'keystore.jks', self.testdir) + config['keystore'] = 'keystore.jks' + config['keydname'] = 'CN=Birdman, OU=Cell, O=Alcatraz, L=Alcatraz, S=CA, C=US' + publish.config = config + common.config = config + + app = metadata.App() + app.id = 'org.fdroid.ci' + versionCode = 1 + build = metadata.Build( + { + 'versionCode': versionCode, + 'versionName': '1.0', + } + ) + app.Builds = [build] + os.mkdir('metadata') + metadata.write_metadata(os.path.join('metadata', '%s.yml' % app.id), app) + + os.mkdir('unsigned') + testapk = basedir / 'no_targetsdk_minsdk1_unsigned.apk' + unsigned = os.path.join('unsigned', common.get_release_filename(app, build)) + signed = os.path.join('repo', common.get_release_filename(app, build)) + shutil.copy(testapk, unsigned) + + # sign the unsigned APK + self.assertTrue(os.path.exists(unsigned)) + self.assertFalse(os.path.exists(signed)) + with mock.patch( + 'sys.argv', ['fdroid publish', '%s:%d' % (app.id, versionCode)] + ): + publish.main() + self.assertFalse(os.path.exists(unsigned)) + self.assertTrue(os.path.exists(signed)) + + with mock.patch('sys.argv', ['fdroid signatures', signed]): + signatures.main() + self.assertTrue( + os.path.exists( + os.path.join( + 'metadata', 'org.fdroid.ci', 'signatures', '1', 'MANIFEST.MF' + ) + ) + ) + os.remove(signed) + + # implant the signature into the unsigned APK + shutil.copy(testapk, unsigned) + self.assertTrue(os.path.exists(unsigned)) + self.assertFalse(os.path.exists(signed)) + with mock.patch( + 'sys.argv', ['fdroid publish', '%s:%d' % (app.id, versionCode)] + ): + publish.main() + self.assertFalse(os.path.exists(unsigned)) + self.assertTrue(os.path.exists(signed)) + + @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') + def test_exit_on_error(self): + """Exits properly on errors, with and without --error-on-failed. + + `fdroid publish` runs on the signing server and does large + batches. In that case, it shouldn't exit after a single + failure since it should try to complete the whole batch. For + CI and other use cases, there is --error-on-failed to force it + to exit after a failure. + + """ + + class Options: + error_on_failed = True + verbose = False + + os.chdir(self.testdir) + + common.options = Options + config = common.read_config() + if 'apksigner' not in config: + self.skipTest('SKIPPING test_error_on_failed, apksigner not installed!') + config['repo_keyalias'] = 'sova' + config['keystorepass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' + config['keypass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' + shutil.copy(basedir / 'keystore.jks', self.testdir) + config['keystore'] = 'keystore.jks' + config['keydname'] = 'CN=Birdman, OU=Cell, O=Alcatraz, L=Alcatraz, S=CA, C=US' + publish.config = config + common.config = config + + app = metadata.App() + app.id = 'org.fdroid.ci' + versionCode = 1 + build = metadata.Build( + { + 'versionCode': versionCode, + 'versionName': '1.0', + } + ) + app.Builds = [build] + os.mkdir('metadata') + metadata.write_metadata(os.path.join('metadata', '%s.yml' % app.id), app) + + os.mkdir('unsigned') + testapk = basedir / 'no_targetsdk_minsdk1_unsigned.apk' + unsigned = os.path.join('unsigned', common.get_release_filename(app, build)) + signed = os.path.join('repo', common.get_release_filename(app, build)) + shutil.copy(testapk, unsigned) + + # sign the unsigned APK + self.assertTrue(os.path.exists(unsigned)) + self.assertFalse(os.path.exists(signed)) + with mock.patch( + 'sys.argv', ['fdroid publish', '%s:%d' % (app.id, versionCode)] + ): + publish.main() + self.assertFalse(os.path.exists(unsigned)) + self.assertTrue(os.path.exists(signed)) + + with mock.patch('sys.argv', ['fdroid signatures', signed]): + signatures.main() + mf = os.path.join('metadata', 'org.fdroid.ci', 'signatures', '1', 'MANIFEST.MF') + self.assertTrue(os.path.exists(mf)) + os.remove(signed) + + with open(mf, 'a') as fp: + fp.write('appended to break signature') + + # implant the signature into the unsigned APK + shutil.copy(testapk, unsigned) + self.assertTrue(os.path.exists(unsigned)) + self.assertFalse(os.path.exists(signed)) + apk_id = '%s:%d' % (app.id, versionCode) + + # by default, it should complete without exiting + with mock.patch('sys.argv', ['fdroid publish', apk_id]): + publish.main() + + # --error-on-failed should make it exit + with mock.patch('sys.argv', ['fdroid publish', '--error-on-failed', apk_id]): + with self.assertRaises(SystemExit) as e: + publish.main() + self.assertEqual(e.exception.code, 1) diff --git a/tests/test_rewritemeta.py b/tests/test_rewritemeta.py new file mode 100755 index 00000000..4dcdd03f --- /dev/null +++ b/tests/test_rewritemeta.py @@ -0,0 +1,257 @@ +#!/usr/bin/env python3 + +import os +import tempfile +import textwrap +import unittest +from pathlib import Path +from unittest import mock + +from fdroidserver import metadata, rewritemeta + +from .shared_test_code import TmpCwd, mkdtemp + +basedir = Path(__file__).parent + + +class RewriteMetaTest(unittest.TestCase): + '''fdroidserver/publish.py''' + + def setUp(self): + os.chdir(basedir) + metadata.warnings_action = 'error' + self._td = mkdtemp() + self.testdir = self._td.name + + def tearDown(self): + self._td.cleanup() + + def test_remove_blank_flags_from_builds_com_politedroid_3(self): + """Unset fields in Builds: entries should be removed.""" + appid = 'com.politedroid' + app = metadata.read_metadata({appid: -1})[appid] + builds = rewritemeta.remove_blank_flags_from_builds(app.get('Builds')) + self.assertEqual( + builds[0], + { + 'versionName': '1.2', + 'versionCode': 3, + 'commit': '6a548e4b19', + 'target': 'android-10', + 'antifeatures': { + 'KnownVuln': {}, + 'NonFreeAssets': {}, + }, + }, + ) + + def test_remove_blank_flags_from_builds_com_politedroid_4(self): + """Unset fields in Builds: entries should be removed.""" + appid = 'com.politedroid' + app = metadata.read_metadata({appid: -1})[appid] + builds = rewritemeta.remove_blank_flags_from_builds(app.get('Builds')) + self.assertEqual( + builds[1], + { + 'versionName': '1.3', + 'versionCode': 4, + 'commit': 'ad865b57bf3ac59580f38485608a9b1dda4fa7dc', + 'target': 'android-15', + }, + ) + + def test_remove_blank_flags_from_builds_org_adaway_52(self): + """Unset fields in Builds: entries should be removed.""" + appid = 'org.adaway' + app = metadata.read_metadata({appid: -1})[appid] + builds = rewritemeta.remove_blank_flags_from_builds(app.get('Builds')) + self.assertEqual( + builds[-1], + { + 'buildjni': ['yes'], + 'commit': 'v3.0', + 'gradle': ['yes'], + 'preassemble': ['renameExecutables'], + 'subdir': 'AdAway', + 'versionCode': 52, + 'versionName': '3.0', + }, + ) + + def test_remove_blank_flags_from_builds_no_builds(self): + """Unset fields in Builds: entries should be removed.""" + self.assertEqual( + rewritemeta.remove_blank_flags_from_builds(None), + list(), + ) + self.assertEqual( + rewritemeta.remove_blank_flags_from_builds(dict()), + list(), + ) + self.assertEqual( + rewritemeta.remove_blank_flags_from_builds(list()), + list(), + ) + self.assertEqual( + rewritemeta.remove_blank_flags_from_builds(set()), + list(), + ) + self.assertEqual( + rewritemeta.remove_blank_flags_from_builds(tuple()), + list(), + ) + + def test_remove_blank_flags_from_builds_0_is_a_value(self): + self.assertEqual( + rewritemeta.remove_blank_flags_from_builds([{'versionCode': 0}]), + [{'versionCode': 0}], + ) + + def test_remove_blank_flags_from_builds_values_to_purge(self): + self.assertEqual( + rewritemeta.remove_blank_flags_from_builds( + [ + { + 'antifeatures': dict(), + 'forceversion': False, + 'init': None, + 'rm': '', + 'scandelete': list(), + 'versionCode': 0, + }, + {'antifeatures': list(), 'versionCode': 1}, + {'antifeatures': '', 'versionCode': 2}, + ] + ), + [{'versionCode': 0}, {'versionCode': 1}, {'versionCode': 2}], + ) + + @mock.patch('sys.argv', ['fdroid rewritemeta', 'a']) + def test_rewrite_no_builds(self): + os.chdir(self.testdir) + Path('metadata').mkdir() + with Path('metadata/a.yml').open('w') as f: + f.write('AutoName: a') + rewritemeta.main() + self.assertEqual( + Path('metadata/a.yml').read_text(encoding='utf-8'), + textwrap.dedent( + '''\ + License: Unknown + + AutoName: a + + AutoUpdateMode: None + UpdateCheckMode: None + ''' + ), + ) + + @mock.patch('sys.argv', ['fdroid rewritemeta', 'a']) + def test_rewrite_empty_build_field(self): + os.chdir(self.testdir) + Path('metadata').mkdir() + with Path('metadata/a.yml').open('w') as fp: + fp.write( + textwrap.dedent( + """ + License: Apache-2.0 + Builds: + - versionCode: 4 + versionName: a + rm: + """ + ) + ) + rewritemeta.main() + self.assertEqual( + Path('metadata/a.yml').read_text(encoding='utf-8'), + textwrap.dedent( + '''\ + License: Apache-2.0 + + Builds: + - versionName: a + versionCode: 4 + + AutoUpdateMode: None + UpdateCheckMode: None + ''' + ), + ) + + def test_remove_blank_flags_from_builds_app_with_special_build_params(self): + appid = 'app.with.special.build.params' + app = metadata.read_metadata({appid: -1})[appid] + builds = rewritemeta.remove_blank_flags_from_builds(app.get('Builds')) + self.assertEqual( + builds[-1], + { + 'versionName': '2.1.2', + 'versionCode': 51, + 'disable': 'Labelled as pre-release, so skipped', + }, + ) + + def test_remove_blank_flags_from_builds_app_with_special_build_params_af(self): + """Unset fields in Builds: entries should be removed.""" + appid = 'app.with.special.build.params' + app = metadata.read_metadata({appid: -1})[appid] + builds = rewritemeta.remove_blank_flags_from_builds(app.get('Builds')) + self.assertEqual( + builds[-2], + { + 'antifeatures': { + 'Ads': {'en-US': 'includes ad lib\n', 'zh-CN': '包括广告图书馆\n'}, + 'Tracking': {'en-US': 'standard suspects\n'}, + }, + 'commit': '2.1.1', + 'maven': '2', + 'patch': [ + 'manifest-ads.patch', + 'mobilecore.patch', + ], + 'srclibs': ['FacebookSDK@sdk-version-3.0.2'], + 'versionCode': 50, + 'versionName': '2.1.1-c', + }, + ) + + @mock.patch('sys.argv', ['fdroid rewritemeta', 'a', 'b']) + def test_rewrite_scenario_trivial(self): + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + Path('metadata').mkdir() + with Path('metadata/a.yml').open('w') as f: + f.write('AutoName: a') + with Path('metadata/b.yml').open('w') as f: + f.write('AutoName: b') + + rewritemeta.main() + + self.assertEqual( + Path('metadata/a.yml').read_text(encoding='utf-8'), + textwrap.dedent( + '''\ + License: Unknown + + AutoName: a + + AutoUpdateMode: None + UpdateCheckMode: None + ''' + ), + ) + + self.assertEqual( + Path('metadata/b.yml').read_text(encoding='utf-8'), + textwrap.dedent( + '''\ + License: Unknown + + AutoName: b + + AutoUpdateMode: None + UpdateCheckMode: None + ''' + ), + ) diff --git a/tests/test_scanner.py b/tests/test_scanner.py new file mode 100755 index 00000000..8da5d5cb --- /dev/null +++ b/tests/test_scanner.py @@ -0,0 +1,928 @@ +#!/usr/bin/env python3 + +import logging +import os +import pathlib +import re +import shutil +import sys +import tempfile +import textwrap +import unittest +import uuid +import zipfile +from dataclasses import asdict +from datetime import datetime, timedelta, timezone +from unittest import mock + +if sys.version_info >= (3, 11): + import tomllib +else: + import tomli as tomllib +import yaml + +import fdroidserver.build +import fdroidserver.common +import fdroidserver.exception +import fdroidserver.metadata +import fdroidserver.scanner + +from .shared_test_code import TmpCwd, mkdtemp, mock_open_to_str + +basedir = pathlib.Path(__file__).parent + + +def _dexdump_found(): + """Find if dexdump is available in the PATH or in an Android SDK install. + + This must be run after common.config is setup. + + """ + try: + dexdump = fdroidserver.common.find_sdk_tools_cmd("dexdump") + logging.debug('Found dexdump: %s', dexdump) + return dexdump is not None + except fdroidserver.exception.FDroidException: + pass + return False + + +# Always use built-in default rules so changes in downloaded rules don't break tests. +@mock.patch( + 'fdroidserver.scanner.SUSSDataController.load', + fdroidserver.scanner.SUSSDataController.load_from_defaults, +) +class ScannerTest(unittest.TestCase): + def setUp(self): + os.chdir(basedir) + self._td = mkdtemp() + self.testdir = self._td.name + fdroidserver.scanner.ScannerTool.refresh_allowed = False + + def tearDown(self): + os.chdir(basedir) + self._td.cleanup() + + def test_scan_source_files(self): + fdroidserver.common.options = mock.Mock() + fdroidserver.common.options.json = False + projects = { + 'OtakuWorld': 2, + 'Zillode': 1, + 'cn.wildfirechat.chat': 4, + 'com.github.shadowsocks': 9, + 'com.integreight.onesheeld': 17, + 'com.jens.automation2': 3, + 'firebase-suspect': 1, + 'org.mozilla.rocket': 2, + 'org.piepmeyer.gauguin': 1, + 'org.tasks': 3, + 'realm': 1, + 'se.manyver': 3, + 'lockfile.test': 1, + 'com.lolo.io.onelist': 6, + 'catalog.test': 22, + } + for d in (basedir / 'source-files').iterdir(): + build = fdroidserver.metadata.Build() + should = projects.get(d.name, 0) + if should > 0: + with self.assertLogs(level=logging.ERROR): + fatal_problems = fdroidserver.scanner.scan_source(d, build) + else: + with self.assertNoLogs(): + fatal_problems = fdroidserver.scanner.scan_source(d, build) + self.assertEqual( + should, fatal_problems, f'{d} should have {should} errors!' + ) + + def test_get_gradle_compile_commands_without_catalog(self): + test_files = [ + ('source-files/fdroid/fdroidclient/build.gradle', 'yes', 15), + ('source-files/com.nextcloud.client/build.gradle', 'generic', 24), + ('source-files/com.kunzisoft.testcase/build.gradle', 'libre', 3), + ('source-files/cn.wildfirechat.chat/chat/build.gradle', 'yes', 30), + ('source-files/org.tasks/app/build.gradle.kts', 'generic', 41), + ('source-files/at.bitfire.davdroid/build.gradle', 'standard', 15), + ('source-files/se.manyver/android/app/build.gradle', 'indie', 26), + ('source-files/osmandapp/osmand/build.gradle', 'free', 2), + ('source-files/eu.siacs.conversations/build.gradle', 'free', 21), + ('source-files/org.mozilla.rocket/app/build.gradle', 'focus', 40), + ('source-files/com.jens.automation2/app/build.gradle', 'fdroidFlavor', 5), + ('source-files/flavor.test/build.gradle', ['foss', 'prod'], 7), + ] + + for f, flavor, count in test_files: + i = 0 + build = fdroidserver.metadata.Build() + if isinstance(flavor, list): + build.gradle = flavor + else: + build.gradle = [flavor] + regexs = fdroidserver.scanner.get_gradle_compile_commands_without_catalog( + build + ) + with open(f, encoding='utf-8') as fp: + for line in fp.readlines(): + for regex in regexs: + m = regex.match(line) + if m: + i += 1 + self.assertEqual(count, i) + + def test_get_gradle_compile_commands_with_catalog(self): + test_files = [ + ('source-files/com.lolo.io.onelist/build.gradle.kts', 'yes', 5), + ('source-files/com.lolo.io.onelist/app/build.gradle.kts', 'yes', 26), + ('source-files/catalog.test/build.gradle.kts', 'yes', 3), + ('source-files/catalog.test/app/build.gradle', 'yes', 2), + ] + + for f, flavor, count in test_files: + i = 0 + build = fdroidserver.metadata.Build() + build.gradle = [flavor] + regexs = fdroidserver.scanner.get_gradle_compile_commands_with_catalog( + build, "libs" + ) + with open(f, encoding='utf-8') as fp: + for line in fp.readlines(): + for regex in regexs: + m = regex.match(line) + if m: + i += 1 + self.assertEqual(count, i) + + def test_catalog(self): + accessor_coordinate_pairs = { + 'firebase.crash': ['com.google.firebase:firebase-crash:1.1.1'], + 'firebase.core': ['com.google.firebase:firebase-core:2.2.2'], + 'play.service.ads': ['com.google.android.gms:play-services-ads:1.2.1'], + 'jacoco': ['org.jacoco:org.jacoco.core:0.8.7'], + 'plugins.google.services': ['com.google.gms.google-services:1.2.1'], + 'plugins.firebase.crashlytics': ['com.google.firebase.crashlytics:1.1.1'], + 'bundles.firebase': [ + 'com.google.firebase:firebase-crash:1.1.1', + 'com.google.firebase:firebase-core:2.2.2', + ], + 'plugins.androidApplication.asLibraryDependency': [ + 'com.android.application:8.12.0' + ], + } + with open('source-files/catalog.test/gradle/libs.versions.toml', 'rb') as f: + catalog = fdroidserver.scanner.GradleVersionCatalog(tomllib.load(f)) + for accessor, coordinate in accessor_coordinate_pairs.items(): + self.assertEqual(catalog.get_coordinate(accessor), coordinate) + + def test_get_catalogs(self): + test_files = [ + ('source-files/com.lolo.io.onelist/', 1), + ('source-files/catalog.test/', 3), + ('source-files/org.piepmeyer.gauguin/', 1), + ('source-files/com.infomaniak.mail/', 2), + ] + + for root, count in test_files: + self.assertEqual(count, len(fdroidserver.scanner.get_catalogs(root))) + + def test_scan_source_files_sneaky_maven(self): + """Check for sneaking in banned maven repos""" + os.chdir(self.testdir) + fdroidserver.scanner.config = None + fdroidserver.common.options = mock.Mock() + fdroidserver.common.options.json = True + with open('build.gradle', 'w', encoding='utf-8') as fp: + fp.write( + textwrap.dedent( + """ + maven { + "https://jitpack.io" + url 'https://maven.fabric.io/public' + } + maven { + "https://maven.google.com" + setUrl('https://evilcorp.com/maven') + } + """ + ) + ) + with self.assertLogs(level=logging.ERROR): + count = fdroidserver.scanner.scan_source(self.testdir) + self.assertEqual(2, count, 'there should be this many errors') + + def test_scan_source_file_types(self): + """Build product files are not allowed, test they are detected + + This test runs as if `fdroid build` running to test the + difference between absolute and relative paths. + + """ + build_dir = os.path.join('build', 'fake.app') + abs_build_dir = os.path.join(self.testdir, build_dir) + os.makedirs(abs_build_dir, exist_ok=True) + os.chdir(abs_build_dir) + + fdroidserver.scanner.config = None + fdroidserver.common.options = mock.Mock() + fdroidserver.common.options.json = True + + keep = [ + 'arg.jar', + 'ascii.out', + 'baz.so', + 'classes.dex', + 'sqlcipher.aar', + 'static.a', + 'src/test/resources/classes.dex', + ] + remove = ['gradle-wrapper.jar', 'gradlew', 'gradlew.bat'] + os.makedirs('src/test/resources', exist_ok=True) + for f in keep + remove: + with open(f, 'w') as fp: + fp.write('placeholder') + self.assertTrue(os.path.exists(f)) + binaries = ['binary.out', 'fake.png', 'snippet.png'] + with open('binary.out', 'wb') as fp: + fp.write(b'\x00\x00') + fp.write(uuid.uuid4().bytes) + shutil.copyfile('binary.out', 'fake.png') + os.chmod('fake.png', 0o755) # nosec B103 + with open('snippet.png', 'wb') as fp: + fp.write( + b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x000\x00\x00' + b'\x000\x08\x06\x00\x00\x00W\x02\xf9\x87\x00\x00\x00\x04sB' + b'IT\x08\x08\x08\x08|\x08d\x88\x00\x00\x00\tpHYs\x00\x00\n' + b'a\x00\x00\na\x01\xfc\xccJ%\x00\x00\x00\x19tEXtSoftware' + ) + os.chmod('snippet.png', 0o755) # nosec B103 + + # run scanner as if from `fdroid build` + os.chdir(self.testdir) + json_per_build = fdroidserver.scanner.MessageStore() + with self.assertLogs(level=logging.ERROR): + count = fdroidserver.scanner.scan_source( + build_dir, json_per_build=json_per_build + ) + self.assertEqual(6, count, 'there should be this many errors') + os.chdir(build_dir) + + for f in keep + binaries: + self.assertTrue(os.path.exists(f), f + ' should still be there') + for f in remove: + self.assertFalse(os.path.exists(f), f + ' should have been removed') + + json_per_build_asdict = asdict(json_per_build) + files = dict() + for section in ('errors', 'infos', 'warnings'): + files[section] = [] + for msg, f in json_per_build_asdict[section]: + files[section].append(f) + + self.assertFalse( + 'ascii.out' in files['errors'], 'ASCII .out file is not an error' + ) + self.assertFalse( + 'snippet.png' in files['errors'], 'executable valid image is not an error' + ) + + self.assertTrue('arg.jar' in files['errors'], 'all JAR files are errors') + self.assertTrue('baz.so' in files['errors'], 'all .so files are errors') + self.assertTrue( + 'binary.out' in files['errors'], 'a binary .out file is an error' + ) + self.assertTrue( + 'classes.dex' in files['errors'], 'all classes.dex files are errors' + ) + self.assertTrue('sqlcipher.aar' in files['errors'], 'all AAR files are errors') + self.assertTrue('static.a' in files['errors'], 'all .a files are errors') + + self.assertTrue( + 'fake.png' in files['warnings'], + 'a random binary that is executable that is not an image is a warning', + ) + self.assertTrue( + 'src/test/resources/classes.dex' in files['warnings'], + 'suspicious file but in a test dir is a warning', + ) + + for f in remove: + self.assertTrue( + f in files['infos'], '%s should be removed with an info message' % f + ) + + def test_build_local_scanner(self): + """`fdroid build` calls scanner functions, test them here""" + os.chdir(self.testdir) + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.build.config = config + fdroidserver.build.options = mock.Mock() + fdroidserver.build.options.json = False + fdroidserver.build.options.scan_binary = False + fdroidserver.build.options.notarball = True + fdroidserver.build.options.skipscan = False + fdroidserver.common.options = fdroidserver.build.options + + app = fdroidserver.metadata.App() + app.id = 'mocked.app.id' + build = fdroidserver.metadata.Build() + build.commit = '1.0' + build.output = app.id + '.apk' + build.scanignore = ['baz.so', 'foo.aar'] + build.versionCode = 1 + build.versionName = '1.0' + vcs = mock.Mock() + + for f in ('baz.so', 'foo.aar', 'gradle-wrapper.jar'): + with open(f, 'w') as fp: + fp.write('placeholder') + self.assertTrue(os.path.exists(f)) + + with open('build.xml', 'w', encoding='utf-8') as fp: + fp.write( + textwrap.dedent( + """ + + + + """ + ) + ) + + def make_fake_apk(output, build): + with open(build.output, 'w') as fp: + fp.write('APK PLACEHOLDER') + return output + + with mock.patch('fdroidserver.common.replace_build_vars', wraps=make_fake_apk): + with mock.patch('fdroidserver.common.get_native_code', return_value='x86'): + with ( + mock.patch( + 'fdroidserver.common.get_apk_id', + return_value=(app.id, build.versionCode, build.versionName), + ), + mock.patch( + 'fdroidserver.common.get_source_date_epoch', + lambda f: '1234567890', + ), + ): + with mock.patch( + 'fdroidserver.common.is_debuggable_or_testOnly', + return_value=False, + ): + fdroidserver.build.build_local( + app, + build, + vcs, + build_dir=self.testdir, + output_dir=self.testdir, + log_dir=None, + srclib_dir=None, + extlib_dir=None, + tmp_dir=None, + force=False, + onserver=False, + refresh=False, + ) + self.assertTrue(os.path.exists('baz.so')) + self.assertTrue(os.path.exists('foo.aar')) + self.assertFalse(os.path.exists('gradle-wrapper.jar')) + + def test_gradle_maven_url_regex(self): + """Check the regex can find all the cases""" + with open(basedir / 'gradle-maven-blocks.yaml') as fp: + data = yaml.safe_load(fp) + + urls = [] + for entry in data: + found = False + for m in fdroidserver.scanner.MAVEN_URL_REGEX.findall(entry): + urls.append(m) + found = True + self.assertTrue(found, 'this block should produce a URL:\n' + entry) + self.assertEqual(len(data), len(urls), 'each data example should produce a URL') + + def test_scan_gradle_file_with_multiple_problems(self): + """Check that the scanner can handle scandelete with gradle files with multiple problems""" + os.chdir(self.testdir) + fdroidserver.scanner.config = None + fdroidserver.common.options = mock.Mock() + build = fdroidserver.metadata.Build() + build.scandelete = ['build.gradle'] + with open('build.gradle', 'w', encoding='utf-8') as fp: + fp.write( + textwrap.dedent( + """ + maven { + url 'https://maven.fabric.io/public' + } + maven { + url 'https://evilcorp.com/maven' + } + """ + ) + ) + count = fdroidserver.scanner.scan_source(self.testdir, build) + self.assertFalse(os.path.exists("build.gradle")) + self.assertEqual(0, count, 'there should be this many errors') + + def test_get_embedded_classes(self): + config = dict() + fdroidserver.common.config = config + fdroidserver.common.fill_config_defaults(config) + if not _dexdump_found(): + self.skipTest('Some Debian arches lack dexdump') + for f in ( + 'apk.embedded_1.apk', + 'bad-unicode-πÇÇ现代通用字-български-عربي1.apk', + 'janus.apk', + 'minimal_targetsdk_30_unsigned.apk', + 'no_targetsdk_minsdk1_unsigned.apk', + 'org.bitbucket.tickytacky.mirrormirror_1.apk', + 'org.bitbucket.tickytacky.mirrormirror_2.apk', + 'org.bitbucket.tickytacky.mirrormirror_3.apk', + 'org.bitbucket.tickytacky.mirrormirror_4.apk', + 'org.dyndns.fules.ck_20.apk', + 'SpeedoMeterApp.main_1.apk', + 'urzip.apk', + 'urzip-badcert.apk', + 'urzip-badsig.apk', + 'urzip-release.apk', + 'urzip-release-unsigned.apk', + 'repo/com.example.test.helloworld_1.apk', + 'repo/com.politedroid_3.apk', + 'repo/com.politedroid_4.apk', + 'repo/com.politedroid_5.apk', + 'repo/com.politedroid_6.apk', + 'repo/duplicate.permisssions_9999999.apk', + 'repo/info.zwanenburg.caffeinetile_4.apk', + 'repo/no.min.target.sdk_987.apk', + 'repo/obb.main.oldversion_1444412523.apk', + 'repo/obb.mainpatch.current_1619_another-release-key.apk', + 'repo/obb.mainpatch.current_1619.apk', + 'repo/obb.main.twoversions_1101613.apk', + 'repo/obb.main.twoversions_1101615.apk', + 'repo/obb.main.twoversions_1101617.apk', + 'repo/souch.smsbypass_9.apk', + 'repo/urzip-; Рахма́, [rɐxˈmanʲɪnəf] سيرجي_رخمانينوف 谢·.apk', + 'repo/v1.v2.sig_1020.apk', + ): + self.assertNotEqual( + set(), + fdroidserver.scanner.get_embedded_classes(f), + 'should return results for ' + f, + ) + + def test_get_embedded_classes_empty_archives(self): + config = dict() + fdroidserver.common.config = config + fdroidserver.common.fill_config_defaults(config) + for f in ( + 'Norway_bouvet_europe_2.obf.zip', + 'repo/fake.ota.update_1234.zip', + ): + self.assertEqual( + set(), + fdroidserver.scanner.get_embedded_classes(f), + 'should return not results for ' + f, + ) + + def test_get_embedded_classes_secret_apk(self): + """Try to hide an APK+DEX in an APK and see if we can find it""" + config = dict() + fdroidserver.common.config = config + fdroidserver.common.fill_config_defaults(config) + if not _dexdump_found(): + self.skipTest('Some Debian arches lack dexdump') + apk = 'urzip.apk' + mapzip = 'Norway_bouvet_europe_2.obf.zip' + secretfile = os.path.join( + basedir, 'org.bitbucket.tickytacky.mirrormirror_1.apk' + ) + with tempfile.TemporaryDirectory() as tmpdir: + shutil.copy(apk, tmpdir) + shutil.copy(mapzip, tmpdir) + os.chdir(tmpdir) + with zipfile.ZipFile(mapzip, 'a') as zipfp: + zipfp.write(secretfile, 'secretapk') + with zipfile.ZipFile(apk) as readfp: + with readfp.open('classes.dex') as cfp: + zipfp.writestr('secretdex', cfp.read()) + with zipfile.ZipFile(apk, 'a') as zipfp: + zipfp.write(mapzip) + + cls = fdroidserver.scanner.get_embedded_classes(apk) + self.assertTrue( + 'org/bitbucket/tickytacky/mirrormirror/MainActivity' in cls, + 'this should find the classes in the hidden, embedded APK', + ) + self.assertTrue( + 'DEX file with fake name: secretdex' in cls, + 'badly named embedded DEX fils should throw an error', + ) + self.assertTrue( + 'ZIP file without proper file extension: secretapk' in cls, + 'badly named embedded ZIPs should throw an error', + ) + + +class Test_scan_binary(unittest.TestCase): + def setUp(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.common.options = mock.Mock() + + if not _dexdump_found(): + self.skipTest('Some Debian arches lack dexdump') + + fdroidserver.scanner._SCANNER_TOOL = mock.Mock() + fdroidserver.scanner._SCANNER_TOOL.regexs = {} + fdroidserver.scanner._SCANNER_TOOL.regexs['err_code_signatures'] = { + "java/lang/Object": re.compile( + r'.*java/lang/Object', re.IGNORECASE | re.UNICODE + ) + } + fdroidserver.scanner._SCANNER_TOOL.regexs['warn_code_signatures'] = {} + + def test_code_signature_match(self): + apkfile = os.path.join(basedir, 'no_targetsdk_minsdk1_unsigned.apk') + with self.assertLogs(level=logging.CRITICAL): + problems = fdroidserver.scanner.scan_binary(apkfile) + self.assertEqual( + 1, + problems, + "Did not find expected code signature '{}' in binary '{}'".format( + fdroidserver.scanner._SCANNER_TOOL.regexs[ + 'err_code_signatures' + ].values(), + apkfile, + ), + ) + + def test_bottom_level_embedded_apk_code_signature(self): + apkfile = os.path.join(basedir, 'apk.embedded_1.apk') + fdroidserver.scanner._SCANNER_TOOL.regexs['err_code_signatures'] = { + "org/bitbucket/tickytacky/mirrormirror/MainActivity": re.compile( + r'.*org/bitbucket/tickytacky/mirrormirror/MainActivity', + re.IGNORECASE | re.UNICODE, + ) + } + + with self.assertLogs(level=logging.CRITICAL): + problems = fdroidserver.scanner.scan_binary(apkfile) + self.assertEqual( + 1, + problems, + "Did not find expected code signature '{}' in binary '{}'".format( + fdroidserver.scanner._SCANNER_TOOL.regexs[ + 'err_code_signatures' + ].values(), + apkfile, + ), + ) + + def test_top_level_signature_embedded_apk_present(self): + apkfile = os.path.join(basedir, 'apk.embedded_1.apk') + fdroidserver.scanner._SCANNER_TOOL.regexs['err_code_signatures'] = { + "org/fdroid/ci/BuildConfig": re.compile( + r'.*org/fdroid/ci/BuildConfig', re.IGNORECASE | re.UNICODE + ) + } + with self.assertLogs(level=logging.CRITICAL): + problems = fdroidserver.scanner.scan_binary(apkfile) + self.assertEqual( + 1, + problems, + "Did not find expected code signature '{}' in binary '{}'".format( + fdroidserver.scanner._SCANNER_TOOL.regexs[ + 'err_code_signatures' + ].values(), + apkfile, + ), + ) + + +class Test_SignatureDataController(unittest.TestCase): + def test_init(self): + sdc = fdroidserver.scanner.SignatureDataController( + 'nnn', 'fff.yml', 'https://example.com/test.json' + ) + self.assertEqual(sdc.name, 'nnn') + self.assertEqual(sdc.filename, 'fff.yml') + self.assertEqual(sdc.cache_duration, timedelta(999999)) + self.assertDictEqual(sdc.data, {}) + + # check_last_updated + def test_check_last_updated_ok(self): + sdc = fdroidserver.scanner.SignatureDataController( + 'nnn', 'fff.yml', 'https://example.com/test.json' + ) + sdc.data['last_updated'] = datetime.now(timezone.utc).timestamp() + sdc.check_last_updated() + + def test_check_last_updated_exception_cache_outdated(self): + sdc = fdroidserver.scanner.SignatureDataController( + 'nnn', 'fff.yml', 'https://example.com/test.json' + ) + sdc.cache_duration = timedelta(days=7) + sdc.data['last_updated'] = ( + datetime.now(timezone.utc) - timedelta(days=30) + ).timestamp() + with self.assertRaises(fdroidserver.scanner.SignatureDataOutdatedException): + sdc.check_last_updated() + + def test_check_last_updated_exception_not_string(self): + sdc = fdroidserver.scanner.SignatureDataController( + 'nnn', 'fff.yml', 'https://example.com/test.json' + ) + sdc.data['last_updated'] = 'sepp' + with self.assertRaises(fdroidserver.scanner.SignatureDataMalformedException): + sdc.check_last_updated() + + def test_check_last_updated_exception_not_iso_formatted_string(self): + sdc = fdroidserver.scanner.SignatureDataController( + 'nnn', 'fff.yml', 'https://example.com/test.json' + ) + sdc.data['last_updated'] = '01/09/2002 10:11' + with self.assertRaises(fdroidserver.scanner.SignatureDataMalformedException): + sdc.check_last_updated() + + def test_check_last_updated_no_exception_missing_when_last_updated_not_set(self): + sdc = fdroidserver.scanner.SignatureDataController( + 'nnn', 'fff.yml', 'https://example.com/test.json' + ) + sdc.check_last_updated() + + # check_data_version + def test_check_data_version_ok(self): + sdc = fdroidserver.scanner.SignatureDataController( + 'nnn', 'fff.yml', 'https://example.com/test.json' + ) + sdc.data['version'] = fdroidserver.scanner.SCANNER_CACHE_VERSION + sdc.check_data_version() + + def test_check_data_version_exception(self): + sdc = fdroidserver.scanner.SignatureDataController( + 'nnn', 'fff.yml', 'https://example.com/test.json' + ) + with self.assertRaises( + fdroidserver.scanner.SignatureDataVersionMismatchException + ): + sdc.check_data_version() + + def test_load_ok(self): + sdc = fdroidserver.scanner.SignatureDataController( + 'nnn', 'fff.yml', 'https://example.com/test.json' + ) + func_lfc = mock.Mock() + func_vd = mock.Mock() + func_clu = mock.Mock() + with ( + mock.patch( + 'fdroidserver.scanner.SignatureDataController.load_from_cache', + func_lfc, + ), + mock.patch( + 'fdroidserver.scanner.SignatureDataController.verify_data', + func_vd, + ), + mock.patch( + 'fdroidserver.scanner.SignatureDataController.check_last_updated', + func_clu, + ), + ): + sdc.load() + func_lfc.assert_called_once_with() + func_vd.assert_called_once_with() + func_clu.assert_called_once_with() + + def test_load_initial_cache_miss(self): + sdc = fdroidserver.scanner.SignatureDataController( + 'nnn', 'fff.yml', 'https://example.com/test.json' + ) + func_lfc = mock.Mock( + side_effect=fdroidserver.scanner.SignatureDataCacheMissException + ) + func_lfd = mock.Mock() + with ( + mock.patch( + 'fdroidserver.scanner.SignatureDataController.load_from_cache', + func_lfc, + ), + mock.patch( + 'fdroidserver.scanner.SignatureDataController.load_from_defaults', + func_lfd, + ), + ): + sdc.load() + func_lfc.assert_called_once_with() + func_lfd.assert_called_once_with() + + def test_load_cache_auto_refresh(self): + sdc = fdroidserver.scanner.SignatureDataController( + 'nnn', 'fff.yml', 'https://example.com/test.json' + ) + func_lfc = mock.Mock() + func_vd = mock.Mock() + func_clu = mock.Mock( + side_effect=fdroidserver.scanner.SignatureDataOutdatedException() + ) + func_fsfw = mock.Mock() + func_wtc = mock.Mock() + with ( + mock.patch( + 'fdroidserver.scanner.SignatureDataController.load_from_cache', + func_lfc, + ), + mock.patch( + 'fdroidserver.scanner.SignatureDataController.verify_data', + func_vd, + ), + mock.patch( + 'fdroidserver.scanner.SignatureDataController.check_last_updated', + func_clu, + ), + mock.patch( + 'fdroidserver.scanner.SignatureDataController.fetch_signatures_from_web', + func_fsfw, + ), + mock.patch( + 'fdroidserver.scanner.SignatureDataController.write_to_cache', + func_wtc, + ), + ): + sdc.load() + func_lfc.assert_called_once_with() + func_vd.assert_called_once_with() + func_clu.assert_called_once_with() + func_fsfw.assert_called_once_with() + func_wtc.assert_called_once_with() + + def test_load_try_web_when_no_defaults(self): + sdc = fdroidserver.scanner.SignatureDataController( + 'nnn', 'fff.yml', 'https://example.com/test.json' + ) + func_lfc = mock.Mock( + side_effect=fdroidserver.scanner.SignatureDataCacheMissException() + ) + func_lfd = mock.Mock( + side_effect=fdroidserver.scanner.SignatureDataNoDefaultsException() + ) + func_fsfw = mock.Mock() + func_wtc = mock.Mock() + with ( + mock.patch( + 'fdroidserver.scanner.SignatureDataController.load_from_cache', + func_lfc, + ), + mock.patch( + 'fdroidserver.scanner.SignatureDataController.load_from_defaults', + func_lfd, + ), + mock.patch( + 'fdroidserver.scanner.SignatureDataController.fetch_signatures_from_web', + func_fsfw, + ), + mock.patch( + 'fdroidserver.scanner.SignatureDataController.write_to_cache', + func_wtc, + ), + ): + sdc.load() + func_lfc.assert_called_once_with() + func_lfd.assert_called_once_with() + func_fsfw.assert_called_once_with() + func_wtc.assert_called_once_with() + + @unittest.skipIf( + sys.version_info < (3, 9, 0), + "mock_open doesn't allow easy access to written data in older python versions", + ) + def test_write_to_cache(self): + open_func = mock.mock_open() + sdc = fdroidserver.scanner.SignatureDataController( + 'nnn', 'fff.yml', 'https://example.com/test.json' + ) + sdc.data = {"mocked": "data"} + + with ( + mock.patch("builtins.open", open_func), + mock.patch( + "fdroidserver.scanner._scanner_cachedir", + return_value=pathlib.Path('.'), + ), + ): + sdc.write_to_cache() + + open_func.assert_called_with(pathlib.Path('fff.yml'), 'w', encoding="utf-8") + self.assertEqual(mock_open_to_str(open_func), """{\n "mocked": "data"\n}""") + + +class Test_ScannerTool(unittest.TestCase): + def setUp(self): + fdroidserver.common.options = None + fdroidserver.common.config = None + os.chdir(basedir) + self._td = mkdtemp() + self.testdir = self._td.name + fdroidserver.scanner.ScannerTool.refresh_allowed = True + + def tearDown(self): + fdroidserver.common.options = None + fdroidserver.common.config = None + os.chdir(basedir) + self._td.cleanup() + + def test_load(self): + st = mock.Mock() + st.sdcs = [mock.Mock(), mock.Mock()] + fdroidserver.scanner.ScannerTool.load(st) + st.sdcs[0].load.assert_called_once_with() + st.sdcs[1].load.assert_called_once_with() + + def test_refresh_no_options_or_config(self): + """This simulates what happens when running something like scan_source()""" + with mock.patch('fdroidserver.scanner.ScannerTool.refresh') as refresh: + fdroidserver.scanner.ScannerTool() + refresh.assert_not_called() + + def test_refresh_true(self): + fdroidserver.common.options = mock.Mock() + fdroidserver.common.options.refresh_scanner = True + with mock.patch('fdroidserver.scanner.ScannerTool.refresh') as refresh: + fdroidserver.scanner.ScannerTool() + refresh.assert_called_once() + + def test_refresh_false(self): + fdroidserver.common.options = mock.Mock() + fdroidserver.common.options.refresh_scanner = False + with mock.patch('fdroidserver.scanner.ScannerTool.refresh') as refresh: + fdroidserver.scanner.ScannerTool() + refresh.assert_not_called() + + def test_refresh_from_config(self): + os.chdir(self.testdir) + fdroidserver.common.write_config_file('refresh_scanner: true\n') + with mock.patch('fdroidserver.scanner.ScannerTool.refresh') as refresh: + fdroidserver.scanner.ScannerTool() + refresh.assert_called_once() + + def test_refresh_options_overrides_config(self): + fdroidserver.common.options = mock.Mock() + fdroidserver.common.options.refresh_scanner = True + os.chdir(self.testdir) + fdroidserver.common.write_config_file('refresh_scanner: false\n') + with mock.patch('fdroidserver.scanner.ScannerTool.refresh') as refresh: + fdroidserver.scanner.ScannerTool() + refresh.assert_called_once() + + +class Test_main(unittest.TestCase): + def setUp(self): + self.args = ["com.example.app", "local/additional.apk", "another.apk"] + self.exit_func = mock.Mock() + self.read_app_args_func = mock.Mock(return_value={}) + self.scan_binary_func = mock.Mock(return_value=0) + + def test_parsing_appid(self): + """This test verifies that app id get parsed correctly + (doesn't test how they get processed) + """ + self.args = ["com.example.app"] + with ( + tempfile.TemporaryDirectory() as tmpdir, + TmpCwd(tmpdir), + mock.patch("sys.exit", self.exit_func), + mock.patch("sys.argv", ["fdroid scanner", *self.args]), + mock.patch("fdroidserver.common.read_app_args", self.read_app_args_func), + mock.patch("fdroidserver.scanner.scan_binary", self.scan_binary_func), + ): + fdroidserver.scanner.main() + + self.exit_func.assert_not_called() + self.read_app_args_func.assert_called_once_with( + ['com.example.app'], allow_version_codes=True + ) + self.scan_binary_func.assert_not_called() + + def test_parsing_apkpath(self): + """This test verifies that apk paths get parsed correctly + (doesn't test how they get processed) + """ + self.args = ["local.application.apk"] + with ( + tempfile.TemporaryDirectory() as tmpdir, + TmpCwd(tmpdir), + mock.patch("sys.exit", self.exit_func), + mock.patch("sys.argv", ["fdroid scanner", *self.args]), + mock.patch("fdroidserver.common.read_app_args", self.read_app_args_func), + mock.patch("fdroidserver.scanner.scan_binary", self.scan_binary_func), + ): + pathlib.Path(self.args[0]).touch() + fdroidserver.scanner.main() + + self.exit_func.assert_not_called() + self.read_app_args_func.assert_not_called() + self.scan_binary_func.assert_called_once_with('local.application.apk') diff --git a/tests/test_signatures.py b/tests/test_signatures.py new file mode 100755 index 00000000..4f7bd105 --- /dev/null +++ b/tests/test_signatures.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python3 + +import hashlib +import os +import sys +import unittest +from tempfile import TemporaryDirectory + +from fdroidserver import common, signatures + +from .shared_test_code import TmpCwd + +basedir = os.path.dirname(__file__) + + +class SignaturesTest(unittest.TestCase): + def setUp(self): + common.config = None + config = common.read_config() + config['jarsigner'] = common.find_sdk_tools_cmd('jarsigner') + common.config = config + + @unittest.skipIf(sys.byteorder == 'big', "androguard is not ported to big-endian") + def test_main(self): + class OptionsFixture: + APK = [os.path.join(basedir, 'repo', 'com.politedroid_3.apk')] + + with TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + signatures.extract(OptionsFixture) + + # check if extracted signatures are where they are supposed to be + # also verify weather if extracted file contain what they should + filesAndHashes = ( + ( + os.path.join( + 'metadata', 'com.politedroid', 'signatures', '3', 'MANIFEST.MF' + ), + '7dcd83f0c41a75457fd2311bf3c4578f80d684362d74ba8dc52838d353f31cf2', + ), + ( + os.path.join( + 'metadata', 'com.politedroid', 'signatures', '3', 'RELEASE.RSA' + ), + '883ef3d5a6e0bf69d2a58d9e255a7930f08a49abc38e216ed054943c99c8fdb4', + ), + ( + os.path.join( + 'metadata', 'com.politedroid', 'signatures', '3', 'RELEASE.SF' + ), + '99fbb3211ef5d7c1253f3a7ad4836eadc9905103ce6a75916c40de2831958284', + ), + ) + for path, checksum in filesAndHashes: + self.assertTrue( + os.path.isfile(path), + f'check whether {path!r} was extracted correctly.', + ) + with open(path, 'rb') as f: + self.assertEqual(hashlib.sha256(f.read()).hexdigest(), checksum) diff --git a/tests/test_signindex.py b/tests/test_signindex.py new file mode 100755 index 00000000..21d54585 --- /dev/null +++ b/tests/test_signindex.py @@ -0,0 +1,178 @@ +#!/usr/bin/env python3 + +import json +import os +import shutil +import subprocess +import tempfile +import unittest +from pathlib import Path +from unittest.mock import patch + +from fdroidserver import apksigcopier, common, exception, signindex, update + + +class Options: + allow_disabled_algorithms = False + clean = False + delete_unknown = False + nosign = False + pretty = True + rename_apks = False + verbose = False + + +class SignindexTest(unittest.TestCase): + basedir = Path(__file__).resolve().parent + + def setUp(self): + signindex.config = None + config = common.read_config() + config['jarsigner'] = common.find_sdk_tools_cmd('jarsigner') + config['verbose'] = True + config['keystore'] = str(self.basedir / 'keystore.jks') + config['repo_keyalias'] = 'sova' + config['keystorepass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' + config['keypass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' + signindex.config = config + + self.tempdir = tempfile.TemporaryDirectory() + os.chdir(self.tempdir.name) + self.repodir = Path('repo') + self.repodir.mkdir() + + def tearDown(self): + self.tempdir.cleanup() + + def test_sign_index(self): + shutil.copy(str(self.basedir / 'repo/index-v1.json'), 'repo') + signindex.sign_index(str(self.repodir), 'index-v1.json') + self.assertTrue((self.repodir / 'index-v1.jar').exists()) + self.assertTrue((self.repodir / 'index-v1.json').exists()) + + def test_sign_index_corrupt(self): + with open('repo/index-v1.json', 'w') as fp: + fp.write('corrupt JSON!') + with self.assertRaises(json.decoder.JSONDecodeError, msg='error on bad JSON'): + signindex.sign_index(str(self.repodir), 'index-v1.json') + + def test_sign_entry(self): + entry = 'repo/entry.json' + v2 = 'repo/index-v2.json' + shutil.copy(self.basedir / entry, entry) + shutil.copy(self.basedir / v2, v2) + signindex.sign_index(self.repodir, 'entry.json') + self.assertTrue((self.repodir / 'entry.jar').exists()) + + def test_sign_entry_corrupt(self): + """sign_index should exit with error if entry.json is bad JSON""" + entry = 'repo/entry.json' + with open(entry, 'w') as fp: + fp.write('{') + with self.assertRaises(json.decoder.JSONDecodeError, msg='error on bad JSON'): + signindex.sign_index(self.repodir, 'entry.json') + self.assertFalse((self.repodir / 'entry.jar').exists()) + + def test_sign_entry_corrupt_leave_entry_jar(self): + """sign_index should not touch existing entry.jar if entry.json is corrupt""" + existing = 'repo/entry.jar' + testvalue = "Don't touch!" + with open(existing, 'w') as fp: + fp.write(testvalue) + with open('repo/entry.json', 'w') as fp: + fp.write('{') + with self.assertRaises(json.decoder.JSONDecodeError, msg='error on bad JSON'): + signindex.sign_index(self.repodir, 'entry.json') + with open(existing) as fp: + self.assertEqual(testvalue, fp.read()) + + def test_sign_corrupt_index_v2_json(self): + """sign_index should exit with error if index-v2.json JSON is corrupt""" + with open('repo/index-v2.json', 'w') as fp: + fp.write('{"key": "not really an index"') + good_entry = { + "timestamp": 1676583021000, + "version": 20002, + "index": { + "name": "/index-v2.json", + "sha256": common.sha256sum('repo/index-v2.json'), + "size": os.path.getsize('repo/index-v2.json'), + "numPackages": 0, + }, + } + with open('repo/entry.json', 'w') as fp: + json.dump(good_entry, fp) + with self.assertRaises(json.decoder.JSONDecodeError, msg='error on bad JSON'): + signindex.sign_index(self.repodir, 'entry.json') + self.assertFalse((self.repodir / 'entry.jar').exists()) + + def test_sign_index_v2_corrupt_sha256(self): + """sign_index should exit with error if SHA-256 of file in entry is wrong""" + entry = 'repo/entry.json' + v2 = 'repo/index-v2.json' + shutil.copy(self.basedir / entry, entry) + shutil.copy(self.basedir / v2, v2) + with open(v2, 'a') as fp: + fp.write(' ') + with self.assertRaises(exception.FDroidException, msg='error on bad SHA-256'): + signindex.sign_index(self.repodir, 'entry.json') + self.assertFalse((self.repodir / 'entry.jar').exists()) + + def test_signindex(self): + if common.find_apksigner({}) is None: # TODO remove me for buildserver-bullseye + self.skipTest('SKIPPING test_signindex, apksigner not installed!') + os.mkdir('archive') + metadata = Path('metadata') + metadata.mkdir() + with (metadata / 'info.guardianproject.urzip.yml').open('w') as fp: + fp.write('# placeholder') + shutil.copy(str(self.basedir / 'urzip.apk'), 'repo') + index_files = [] + for f in ( + 'entry.jar', + 'entry.json', + 'index-v1.jar', + 'index-v1.json', + 'index-v2.json', + 'index.jar', + 'index.xml', + ): + for section in (Path('repo'), Path('archive')): + path = section / f + self.assertFalse(path.exists(), '%s should not exist yet!' % path) + index_files.append(path) + common.options = Options + with patch('sys.argv', ['fdroid update']): + update.main() + with patch('sys.argv', ['fdroid signindex', '--verbose']): + signindex.main() + for f in index_files: + self.assertTrue(f.exists(), '%s should exist!' % f) + self.assertFalse(os.path.exists('index-v2.jar')) # no JAR version of this file + + # index.jar aka v0 must by signed by SHA1withRSA + f = 'repo/index.jar' + common.verify_deprecated_jar_signature(f) + self.assertIsNone(apksigcopier.extract_v2_sig(f, expected=False)) + cp = subprocess.run( + ['jarsigner', '-verify', '-verbose', f], stdout=subprocess.PIPE + ) + self.assertTrue(b'SHA1withRSA' in cp.stdout) + + # index-v1.jar must by signed by SHA1withRSA + f = 'repo/index-v1.jar' + common.verify_deprecated_jar_signature(f) + self.assertIsNone(apksigcopier.extract_v2_sig(f, expected=False)) + cp = subprocess.run( + ['jarsigner', '-verify', '-verbose', f], stdout=subprocess.PIPE + ) + self.assertTrue(b'SHA1withRSA' in cp.stdout) + + # entry.jar aka index v2 must by signed by a modern algorithm + f = 'repo/entry.jar' + common.verify_deprecated_jar_signature(f) + self.assertIsNone(apksigcopier.extract_v2_sig(f, expected=False)) + cp = subprocess.run( + ['jarsigner', '-verify', '-verbose', f], stdout=subprocess.PIPE + ) + self.assertFalse(b'SHA1withRSA' in cp.stdout) diff --git a/tests/test_update.py b/tests/test_update.py new file mode 100755 index 00000000..623f48cc --- /dev/null +++ b/tests/test_update.py @@ -0,0 +1,2467 @@ +#!/usr/bin/env python3 + +import copy +import glob +import hashlib +import json +import logging +import os +import random +import shutil +import string +import subprocess +import sys +import textwrap +import time +import unittest +import zipfile +from binascii import hexlify +from datetime import datetime +from pathlib import Path +from unittest import mock + +import git +import yaml + +try: + # these were moved in androguard 4.0 + from androguard.core.apk import APK +except ImportError: + from androguard.core.bytecodes.apk import APK + +try: + from yaml import CSafeLoader as SafeLoader +except ImportError: + from yaml import SafeLoader + +try: + from yaml import CFullLoader as FullLoader +except ImportError: + try: + # FullLoader is available from PyYaml 5.1+, as we don't load user + # controlled data here, it's okay to fall back the unsafe older + # Loader + from yaml import FullLoader + except ImportError: + from yaml import Loader as FullLoader + +from PIL import PngImagePlugin + +import fdroidserver.common +import fdroidserver.exception +import fdroidserver.metadata +import fdroidserver.update +from fdroidserver.common import CATEGORIES_CONFIG_NAME +from fdroidserver.looseversion import LooseVersion + +from .shared_test_code import TmpCwd, mkdtemp + +DONATION_FIELDS = ('Donate', 'Liberapay', 'OpenCollective') + +logging.getLogger(PngImagePlugin.__name__).setLevel(logging.INFO) +basedir = Path(__file__).parent + + +class Options: + allow_disabled_algorithms = False + clean = False + delete_unknown = False + nosign = False + pretty = True + rename_apks = False + verbose = False + + +@unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') +class UpdateTest(unittest.TestCase): + '''fdroid update''' + + def setUp(self): + os.chdir(basedir) + self._td = mkdtemp() + self.testdir = self._td.name + + fdroidserver.common.config = None + fdroidserver.common.options = None + + def tearDown(self): + os.chdir(basedir) + self._td.cleanup() + + def test_insert_store_metadata(self): + os.chdir(self.testdir) + + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.update.config = config + + repo_dir = basedir / 'repo' + os.mkdir('metadata') + for packageName in ( + 'obb.mainpatch.current', + 'org.videolan.vlc', + ): + shutil.copytree( + repo_dir / packageName, os.path.join('repo', packageName) + ) + for packageName in ( + 'info.guardianproject.checkey', + 'info.guardianproject.urzip', + 'org.smssecure.smssecure', + ): + shutil.copytree( + basedir / 'metadata' / packageName, + os.path.join('metadata', packageName), + ) + for packageName in ( + 'com.nextcloud.client', + 'com.nextcloud.client.dev', + 'eu.siacs.conversations', + ): + shutil.copytree( + basedir / 'source-files' / packageName, + os.path.join(self.testdir, 'build', packageName), + ) + + testfilename = 'icon_yAfSvPRJukZzMMfUzvbYqwaD1XmHXNtiPBtuPVHW-6s=.png' + testfile = repo_dir / 'org.videolan.vlc/en-US/icon.png' + cpdir = os.path.join('metadata', 'org.videolan.vlc', 'en-US') + cpfile = os.path.join(cpdir, testfilename) + os.makedirs(cpdir, exist_ok=True) + shutil.copy(testfile, cpfile) + shutil.copystat(testfile, cpfile) + + apps = dict() + for packageName in ( + 'info.guardianproject.urzip', + 'org.videolan.vlc', + 'obb.mainpatch.current', + 'com.nextcloud.client', + 'com.nextcloud.client.dev', + 'eu.siacs.conversations', + ): + apps[packageName] = fdroidserver.metadata.App() + apps[packageName]['id'] = packageName + apps[packageName]['CurrentVersionCode'] = 0xCAFEBEEF + + apps['info.guardianproject.urzip']['CurrentVersionCode'] = 100 + + buildnextcloudclient = fdroidserver.metadata.Build() + buildnextcloudclient.gradle = ['generic'] + apps['com.nextcloud.client']['Builds'] = [buildnextcloudclient] + + buildnextclouddevclient = fdroidserver.metadata.Build() + buildnextclouddevclient.gradle = ['versionDev'] + apps['com.nextcloud.client.dev']['Builds'] = [buildnextclouddevclient] + + build_conversations = fdroidserver.metadata.Build() + build_conversations.gradle = ['free'] + apps['eu.siacs.conversations']['Builds'] = [build_conversations] + + fdroidserver.update.insert_localized_app_metadata(apps) + fdroidserver.update.ingest_screenshots_from_repo_dir(apps) + + appdir = Path('repo/info.guardianproject.urzip/en-US') + self.assertTrue( + os.path.isfile( + os.path.join( + appdir, 'icon_NJXNzMcyf-v9i5a1ElJi0j9X1LvllibCa48xXYPlOqQ=.png' + ) + ) + ) + self.assertTrue( + os.path.isfile( + os.path.join( + appdir, + 'featureGraphic_GFRT5BovZsENGpJq1HqPODGWBRPWQsx25B95Ol5w_wU=.png', + ) + ) + ) + + self.assertEqual(6, len(apps)) + for packageName, app in apps.items(): + self.assertIn('localized', app, packageName) + self.assertIn('en-US', app['localized']) + self.assertEqual(1, len(app['localized'])) + if packageName == 'info.guardianproject.urzip': + self.assertEqual(7, len(app['localized']['en-US'])) + self.assertEqual('full description\n', app['localized']['en-US']['description']) + self.assertEqual('title', app['localized']['en-US']['name']) + self.assertEqual('short description', app['localized']['en-US']['summary']) + self.assertEqual('video', app['localized']['en-US']['video']) + self.assertEqual('icon_NJXNzMcyf-v9i5a1ElJi0j9X1LvllibCa48xXYPlOqQ=.png', + app['localized']['en-US']['icon']) + self.assertEqual('featureGraphic_GFRT5BovZsENGpJq1HqPODGWBRPWQsx25B95Ol5w_wU=.png', + app['localized']['en-US']['featureGraphic']) + self.assertEqual('100\n', app['localized']['en-US']['whatsNew']) + elif packageName == 'org.videolan.vlc': + self.assertEqual(testfilename, app['localized']['en-US']['icon']) + self.assertEqual(9, len(app['localized']['en-US']['phoneScreenshots'])) + self.assertEqual(15, len(app['localized']['en-US']['sevenInchScreenshots'])) + elif packageName == 'obb.mainpatch.current': + self.assertEqual('icon_WI0pkO3LsklrsTAnRr-OQSxkkoMY41lYe2-fAvXLiLg=.png', + app['localized']['en-US']['icon']) + self.assertEqual('featureGraphic_ffhLaojxbGAfu9ROe1MJgK5ux8d0OVc6b65nmvOBaTk=.png', + app['localized']['en-US']['featureGraphic']) + self.assertEqual(1, len(app['localized']['en-US']['phoneScreenshots'])) + self.assertEqual(1, len(app['localized']['en-US']['sevenInchScreenshots'])) + elif packageName == 'com.nextcloud.client': + self.assertEqual('Nextcloud', app['localized']['en-US']['name']) + self.assertEqual(1073, len(app['localized']['en-US']['description'])) + self.assertEqual(78, len(app['localized']['en-US']['summary'])) + elif packageName == 'com.nextcloud.client.dev': + self.assertEqual('Nextcloud Dev', app['localized']['en-US']['name']) + self.assertEqual(586, len(app['localized']['en-US']['description'])) + self.assertEqual(78, len(app['localized']['en-US']['summary'])) + elif packageName == 'eu.siacs.conversations': + self.assertEqual('Conversations', app['localized']['en-US']['name']) + + def test_insert_fastlane_default_txt_changelog(self): + """Test that Fastlane's default.txt is handled properly + + https://docs.fastlane.tools/actions/supply/#changelogs-whats-new + """ + os.chdir(self.testdir) + + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.update.config = config + + app = fdroidserver.metadata.App() + app.id = 'com.example.app' + changelogs_dir = 'build/%s/metadata/en-US/changelogs' % app.id + os.makedirs(changelogs_dir) + with open(os.path.join(changelogs_dir, 'default.txt'), 'w') as fp: + fp.write('default') + with open(os.path.join(changelogs_dir, '42.txt'), 'w') as fp: + fp.write('42') + apps = {app.id: app} + build = fdroidserver.metadata.Build() + build.versionCode = 42 + app['Builds'] = [build] + + fdroidserver.update.insert_localized_app_metadata(apps) + self.assertEqual('default', apps[app.id]['localized']['en-US']['whatsNew']) + + app.CurrentVersionCode = 1 + fdroidserver.update.insert_localized_app_metadata(apps) + self.assertEqual('default', apps[app.id]['localized']['en-US']['whatsNew']) + + app.CurrentVersionCode = 10000 + fdroidserver.update.insert_localized_app_metadata(apps) + self.assertEqual('default', apps[app.id]['localized']['en-US']['whatsNew']) + + app.CurrentVersionCode = 42 + fdroidserver.update.insert_localized_app_metadata(apps) + self.assertEqual('42', apps[app.id]['localized']['en-US']['whatsNew']) + + def test_fastlane_with_subdir(self): + """Test if fastlane in simple one-level subdir is found.""" + os.chdir(self.testdir) + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.update.config = config + + app = fdroidserver.metadata.App() + app.id = 'com.example.app' + build_dir = f'build/{app.id}' + flavor = 'flavor' + subdir = 'subproject' + apps = {app.id: app} + build = fdroidserver.metadata.Build() + build.versionCode = 42 + build.gradle = [flavor] + build.subdir = subdir + app['Builds'] = [build] + + first_value = 'first' + first_dir = Path(f'{build_dir}/src/{flavor}/fastlane/metadata/android/en-US') + first_dir.mkdir(parents=True) + (first_dir / 'title.txt').write_text(first_value) + fdroidserver.update.insert_localized_app_metadata(apps) + self.assertEqual(first_value, apps[app.id]['localized']['en-US']['name']) + + second_value = 'second' + second_dir = Path(f'{build_dir}/{subdir}/fastlane/metadata/android/en-US') + second_dir.mkdir(parents=True) + (second_dir / 'title.txt').write_text(second_value) + fdroidserver.update.insert_localized_app_metadata(apps) + self.assertEqual(second_value, apps[app.id]['localized']['en-US']['name']) + + def test_fastlane_with_schildichat(self): + """Test if fastlane is found in this tangle of dirs and symlinks. + + https://github.com/SchildiChat/schildichat-android-next/tree/sc_v0.10.3-ex_25_6_2 + """ + os.chdir(self.testdir) + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.update.config = config + + app = fdroidserver.metadata.App() + app.id = 'chat.schildi.android' + build_dir = f'build/{app.id}' + flavors = ['fdroid', 'sc', 'default'] + subdir = 'app' + apps = {app.id: app} + build = fdroidserver.metadata.Build() + build.versionCode = 42 + build.gradle = flavors + build.subdir = subdir + app['Builds'] = [build] + + wrong_value = 'wrong' + wrong_dir = Path(f'{build_dir}/upstream_infra/fastlane/metadata/android/en-US') + wrong_dir.mkdir(parents=True) + (wrong_dir / 'title.txt').write_text(wrong_value) + + right_value = 'right' + right_dir = Path(f'{build_dir}/metadata/en-US') + right_dir.mkdir(parents=True) + (right_dir / 'title.txt').write_text(right_value) + _fastlane = Path('.fastlane/metadata') + _fastlane.mkdir(parents=True) + os.symlink('../../metadata', _fastlane / 'android') + os.symlink('.fastlane', 'fastlane') + fdroidserver.update.insert_localized_app_metadata(apps) + self.assertEqual(right_value, apps[app.id]['localized']['en-US']['name']) + + def test_fastlane_with_multi_level_subdir(self): + """Test if fastlane in multi-level subdir is found.""" + os.chdir(self.testdir) + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.update.config = config + + app = fdroidserver.metadata.App() + app.id = 'org.videolan.vlc' + build_dir = f'build/{app.id}' + subdir = 'application/app' + apps = {app.id: app} + build = fdroidserver.metadata.Build() + build.versionCode = 42 + build.gradle = ['yes'] + build.subdir = subdir + app['Builds'] = [build] + + first_value = 'first' + first_dir = Path(f'{build_dir}/{subdir}/fastlane/metadata/android/en-US') + first_dir.mkdir(parents=True) + (first_dir / 'title.txt').write_text(first_value) + fdroidserver.update.insert_localized_app_metadata(apps) + self.assertEqual(first_value, apps[app.id]['localized']['en-US']['name']) + + # I'm not sure that it is correct behavior for this path to + # override the above path, but it is how it is working now. It + # seems to me it should be the other way around, but that is + # really hard to implement using the current algorithm. + second_value = 'second' + second_dir = Path(f'{build_dir}/fastlane/metadata/android/en-US') + second_dir.mkdir(parents=True) + (second_dir / 'title.txt').write_text(second_value) + fdroidserver.update.insert_localized_app_metadata(apps) + self.assertEqual(second_value, apps[app.id]['localized']['en-US']['name']) + + def test_name_title_scraping(self): + """metadata file --> fdroiddata localized files --> fastlane/triple-t in app source --> APK""" + shutil.copytree(basedir, self.testdir, dirs_exist_ok=True) + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + os.chdir(self.testdir) + fdroidserver.common.options = Options + fdroidserver.update.options = fdroidserver.common.options + fdroidserver.update.options.clean = True + fdroidserver.update.options.delete_unknown = True + + apps = fdroidserver.metadata.read_metadata() + apps['info.guardianproject.urzip']['CurrentVersionCode'] = 100 + knownapks = fdroidserver.common.KnownApks() + apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks, False) + fdroidserver.update.insert_localized_app_metadata(apps) + fdroidserver.update.ingest_screenshots_from_repo_dir(apps) + fdroidserver.update.apply_info_from_latest_apk(apps, apks) + app = apps['info.guardianproject.urzip'] + self.assertIsNone(app.Name) + self.assertTrue('localized' in app) + self.assertEqual('title', app['localized']['en-US']['name']) + self.assertEqual('100\n', app['localized']['en-US']['whatsNew']) + app = apps['org.videolan.vlc'] + self.assertIsNone(app.Name) + self.assertTrue('localized' in app) + self.assertFalse('name' in app['localized']['en-US']) + app = apps['info.guardianproject.checkey'] + self.assertEqual('Checkey the app!', app.Name) + self.assertTrue('localized' in app) + self.assertEqual('Checkey: info on local apps', app['localized']['en-US']['name']) + self.assertEqual('Checkey: ローカルアプリの情報', app['localized']['ja-JP']['name']) + app = apps['org.adaway'] + self.assertIsNone(app.Name) + self.assertFalse('localized' in app) + app = apps['obb.main.twoversions'] + self.assertIsNone(app.Name) + self.assertFalse('localized' in app) + + def test_insert_missing_app_names_from_apks(self): + """en-US serves as the final, default, fallback value with index-v1""" + testvalue = 'TESTVALUE!' + apps = { + 'none': {}, + 'name': {'Name': testvalue}, + 'onlyapk': {'Name': None}, + 'autoname': {'AutoName': 'autoname', 'Name': None}, + 'onlylocalized': {'localized': {'en-US': {'name': testvalue}}}, + 'non_en_us_localized': {'localized': {'de-AT': {'name': 'leiwand'}}}, + 'apks': {}, + } + apks = [ + {'packageName': 'none', 'name': '', 'versionCode': 1}, + {'packageName': 'name', 'name': 'fromapk', 'versionCode': 1}, + {'packageName': 'onlyapk', 'name': testvalue, 'versionCode': 1}, + {'packageName': 'autoname', 'name': testvalue, 'versionCode': 1}, + {'packageName': 'onlylocalized', 'name': 'fromapk', 'versionCode': 1}, + {'packageName': 'non_en_us_localized', 'name': testvalue, 'versionCode': 0xcafe}, + {'packageName': 'apks', 'name': 'fromapk1', 'versionCode': 1}, + {'packageName': 'apks', 'name': 'fromapk2', 'versionCode': 2}, + {'packageName': 'apks', 'name': testvalue, 'versionCode': 3}, + ] + fdroidserver.common.options = Options + fdroidserver.update.insert_missing_app_names_from_apks(apps, apks) + for appid, app in apps.items(): + if appid == 'none': + self.assertIsNone(app.get('Name')) + self.assertIsNone(app.get('localized')) + elif appid == 'onlyapk': + self.assertIsNone(app.get('Name')) + self.assertEqual(testvalue, app['localized']['en-US']['name']) + elif appid == 'autoname': + self.assertIsNone(app.get('Name')) + self.assertEqual(testvalue, app['localized']['en-US']['name']) + elif appid == 'onlylocalized': + self.assertIsNone(app.get('Name')) + self.assertEqual(testvalue, app['localized']['en-US']['name']) + elif appid == 'non_en_us_localized': + self.assertIsNone(app.get('Name')) + self.assertEqual(testvalue, app['localized']['en-US']['name']) + elif appid == 'name': + self.assertEqual(testvalue, app['Name']) + self.assertIsNone(app.get('localized')) + elif appid == 'apks': + self.assertIsNone(app.get('Name')) + self.assertEqual(testvalue, app['localized']['en-US']['name']) + + def test_insert_missing_app_names_from_apks_from_repo(self): + os.chdir(self.testdir) + shutil.copytree(basedir, self.testdir, dirs_exist_ok=True) + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + fdroidserver.common.options = Options + fdroidserver.update.options = fdroidserver.common.options + fdroidserver.update.options.clean = True + fdroidserver.update.options.delete_unknown = True + + apps = fdroidserver.metadata.read_metadata() + knownapks = fdroidserver.common.KnownApks() + apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks, False) + + appid = 'info.guardianproject.checkey' + testapps = {appid: copy.copy(apps[appid])} + self.assertEqual('Checkey the app!', testapps[appid]['Name']) + del testapps[appid]['Name'] + fdroidserver.update.insert_missing_app_names_from_apks(testapps, apks) + self.assertIsNone(testapps[appid].get('Name')) + + repoapps = fdroidserver.update.prepare_apps(apps, apks, 'repo') + fdroidserver.update.insert_missing_app_names_from_apks(repoapps, apks) + self.assertIsNone(repoapps['com.politedroid']['Name']) + self.assertEqual('Polite Droid', + repoapps['com.politedroid']['localized']['en-US']['name']) + self.assertEqual('Duplicate Permisssions', repoapps['duplicate.permisssions']['Name']) + self.assertEqual('Caffeine Tile', repoapps['info.zwanenburg.caffeinetile']['Name']) + self.assertEqual('No minSdkVersion or targetSdkVersion', repoapps['no.min.target.sdk']['Name']) + self.assertIsNone(repoapps['obb.main.oldversion'].get('Name')) + self.assertEqual('OBB Main Old Version', + repoapps['obb.main.oldversion']['localized']['en-US']['name']) + self.assertIsNone(repoapps['obb.main.twoversions'].get('Name')) + self.assertEqual('OBB Main Two Versions', + repoapps['obb.main.twoversions']['localized']['en-US']['name']) + self.assertIsNone(repoapps['souch.smsbypass'].get('Name')) + self.assertEqual('Battery level', + repoapps['souch.smsbypass']['localized']['en-US']['name']) + self.assertIsNone(repoapps['info.guardianproject.urzip'].get('Name')) + self.assertEqual('title', + repoapps['info.guardianproject.urzip']['localized']['en-US']['name']) + self.assertIsNone(repoapps['obb.mainpatch.current'].get('Name')) + + del repoapps['info.guardianproject.urzip']['localized'] + fdroidserver.update.insert_missing_app_names_from_apks(repoapps, apks) + self.assertEqual('urzip-πÇÇπÇÇ现代汉语通用字-български-عربي1234', + repoapps['info.guardianproject.urzip']['localized']['en-US']['name']) + + def test_insert_triple_t_metadata(self): + importer = basedir / 'tmp/importer' + packageName = 'org.fdroid.ci.test.app' + if not os.path.isdir(importer): + logging.warning('skipping test_insert_triple_t_metadata, test_import.py must run first!') + return + packageDir = os.path.join(self.testdir, 'build', packageName) + shutil.copytree(importer, packageDir) + + # always use the same commit so these tests work when ci-test-app.git is updated + repo = git.Repo(packageDir) + for remote in repo.remotes: + remote.fetch() + repo.git.reset('--hard', 'b9e5d1a0d8d6fc31d4674b2f0514fef10762ed4f') + repo.git.clean('-fdx') + + os.mkdir(os.path.join(self.testdir, 'metadata')) + metadata = dict() + metadata['Description'] = 'This is just a test app' + with open(os.path.join(self.testdir, 'metadata', packageName + '.yml'), 'w') as fp: + yaml.dump(metadata, fp) + + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + os.chdir(self.testdir) + + apps = fdroidserver.metadata.read_metadata() + fdroidserver.update.copy_triple_t_store_metadata(apps) + + # TODO ideally, this would compare the whole dict like in test_metadata.test_read_metadata() + correctlocales = [ + 'ar', 'ast_ES', 'az', 'ca', 'ca_ES', 'cs-CZ', 'cs_CZ', 'da', + 'da-DK', 'de', 'de-DE', 'el', 'en-US', 'es', 'es-ES', 'es_ES', 'et', + 'fi', 'fr', 'fr-FR', 'he_IL', 'hi-IN', 'hi_IN', 'hu', 'id', 'it', + 'it-IT', 'it_IT', 'iw-IL', 'ja', 'ja-JP', 'kn_IN', 'ko', 'ko-KR', + 'ko_KR', 'lt', 'nb', 'nb_NO', 'nl', 'nl-NL', 'no', 'pl', 'pl-PL', + 'pl_PL', 'pt', 'pt-BR', 'pt-PT', 'pt_BR', 'ro', 'ro_RO', 'ru-RU', + 'ru_RU', 'sv-SE', 'sv_SE', 'te', 'tr', 'tr-TR', 'uk', 'uk_UA', 'vi', + 'vi_VN', 'zh-CN', 'zh_CN', 'zh_TW', + ] + locales = sorted(apps['org.fdroid.ci.test.app']['localized']) + self.assertEqual(correctlocales, locales) + + def test_insert_triple_t_1_graphics(self): + packageName = 'de.wivewa.dialer' + shutil.copytree(basedir / 'triple-t-1-graphics', self.testdir, dirs_exist_ok=True) + os.chdir(self.testdir) + + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + + apps = fdroidserver.metadata.read_metadata() + fdroidserver.update.copy_triple_t_store_metadata(apps) + + os.chdir(os.path.join('repo', packageName)) + self.assertTrue(os.path.exists(os.path.join('en-US', 'icon.png'))) + self.assertTrue(os.path.exists(os.path.join('en-US', 'featureGraphic.png'))) + self.assertTrue(os.path.exists(os.path.join('en-US', 'phoneScreenshots', '1.png'))) + + def test_insert_triple_t_2_metadata(self): + packageName = 'org.piwigo.android' + shutil.copytree(basedir / 'triple-t-2', self.testdir, dirs_exist_ok=True) + os.chdir(self.testdir) + + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + + apps = fdroidserver.metadata.read_metadata() + self.assertTrue(packageName in apps) + fdroidserver.update.copy_triple_t_store_metadata(apps) + correctlocales = ['de-DE', 'en-US', 'fr-FR', 'kn-IN'] + app = apps[packageName] + self.assertEqual('android@piwigo.org', app['authorEmail']) + self.assertEqual('https://www.piwigo.org', app['authorWebSite']) + locales = sorted(list(app['localized'].keys())) + self.assertEqual(correctlocales, locales) + kn_IN = app['localized']['kn-IN'] + self.assertTrue('description' in kn_IN) + self.assertTrue('name' in kn_IN) + self.assertTrue('summary' in kn_IN) + en_US = app['localized']['en-US'] + self.assertTrue('whatsNew' in en_US) + + os.chdir(os.path.join('repo', packageName)) + self.assertTrue(os.path.exists(os.path.join('en-US', 'icon.png'))) + self.assertTrue(os.path.exists(os.path.join('en-US', 'featureGraphic.png'))) + self.assertTrue(os.path.exists(os.path.join('en-US', 'phoneScreenshots', '01_Login.jpg'))) + self.assertTrue(os.path.exists(os.path.join('en-US', 'sevenInchScreenshots', '01_Login.png'))) + self.assertFalse(os.path.exists(os.path.join('de-DE', 'icon.png'))) + self.assertFalse(os.path.exists(os.path.join('de-DE', 'featureGraphic.png'))) + self.assertFalse(os.path.exists(os.path.join('de-DE', 'phoneScreenshots', '01_Login.jpg'))) + self.assertFalse(os.path.exists(os.path.join('de-DE', 'sevenInchScreenshots', '01_Login.png'))) + + def test_insert_triple_t_anysoftkeyboard(self): + packages = ('com.anysoftkeyboard.languagepack.dutch', 'com.menny.android.anysoftkeyboard') + names = ('Dutch for AnySoftKeyboard', 'AnySoftKeyboard') + + shutil.copytree(basedir / 'triple-t-anysoftkeyboard', self.testdir, dirs_exist_ok=True) + os.chdir(self.testdir) + + for packageName, name in zip(packages, names): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + fdroidserver.update.options = fdroidserver.common.options + + apps = fdroidserver.metadata.read_metadata() + self.assertTrue(packageName in apps) + fdroidserver.update.copy_triple_t_store_metadata(apps) + app = apps[packageName] + self.assertEqual(app['localized']['en-US']['name'], name) + + def test_insert_triple_t_multiple_metadata(self): + namespace = 'ch.admin.bag.covidcertificate.' + packages = ('verifier', 'wallet') + names = dict(verifier='COVID Certificate Check', wallet='COVID Certificate') + + shutil.copytree(basedir / 'triple-t-multiple', self.testdir, dirs_exist_ok=True) + os.chdir(self.testdir) + + for p in packages: + packageName = namespace + p + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + fdroidserver.update.options = fdroidserver.common.options + + apps = fdroidserver.metadata.read_metadata() + self.assertTrue(packageName in apps) + fdroidserver.update.copy_triple_t_store_metadata(apps) + app = apps[packageName] + self.assertEqual(app['localized']['en-US']['name'], names[p]) + + def test_insert_triple_t_flutter(self): + packageName = 'fr.emersion.goguma' + + shutil.copytree(basedir / 'triple-t-flutter', self.testdir, dirs_exist_ok=True) + os.chdir(self.testdir) + + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + fdroidserver.update.options = fdroidserver.common.options + + apps = fdroidserver.metadata.read_metadata() + self.assertTrue(packageName in apps) + fdroidserver.update.copy_triple_t_store_metadata(apps) + app = apps[packageName] + self.assertEqual(app['authorWebSite'], 'https://emersion.fr') + self.assertEqual(app['localized']['en-US']['name'], 'Goguma') + self.assertEqual(app['localized']['en-US']['summary'], 'An IRC client for mobile devices') + + def testBadGetsig(self): + """getsig() should still be able to fetch the fingerprint of bad signatures""" + # config needed to use jarsigner and keytool + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.update.config = config + + apkfile = 'urzip-badsig.apk' + sig = fdroidserver.update.getsig(apkfile) + self.assertEqual(sig, 'e0ecb5fc2d63088e4a07ae410a127722', + "python sig should be: " + str(sig)) + + apkfile = 'urzip-badcert.apk' + sig = fdroidserver.update.getsig(apkfile) + self.assertEqual(sig, 'e0ecb5fc2d63088e4a07ae410a127722', + "python sig should be: " + str(sig)) + + def test_getsig(self): + # config needed to use jarsigner and keytool + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.update.config = config + + sig = fdroidserver.update.getsig('urzip-release-unsigned.apk') + self.assertIsNone(sig) + + good_fingerprint = 'b4964fd759edaa54e65bb476d0276880' + + apkpath = 'urzip-release.apk' # v1 only + sig = fdroidserver.update.getsig(apkpath) + self.assertEqual(good_fingerprint, sig, + 'python sig was: ' + str(sig)) + + apkpath = 'repo/v1.v2.sig_1020.apk' + sig = fdroidserver.update.getsig(apkpath) + self.assertEqual(good_fingerprint, sig, + 'python sig was: ' + str(sig)) + # check that v1 and v2 have the same certificate + apkobject = APK(apkpath) + cert_encoded = apkobject.get_certificates_der_v2()[0] + self.assertEqual(good_fingerprint, sig, + hashlib.md5(hexlify(cert_encoded)).hexdigest()) # nosec just used as ID for signing key + + filename = 'v2.only.sig_2.apk' + with zipfile.ZipFile(filename) as z: + self.assertTrue('META-INF/MANIFEST.MF' in z.namelist(), 'META-INF/MANIFEST.MF required') + for f in z.namelist(): + # ensure there are no v1 signature files + self.assertIsNone(fdroidserver.common.SIGNATURE_BLOCK_FILE_REGEX.match(f)) + sig = fdroidserver.update.getsig(filename) + self.assertEqual(good_fingerprint, sig, + "python sig was: " + str(sig)) + + def testScanApksAndObbs(self): + os.chdir(self.testdir) + shutil.copytree(basedir / 'repo', 'repo') + shutil.copytree(basedir / 'metadata', 'metadata') + config = dict() + fdroidserver.common.fill_config_defaults(config) + config['ndk_paths'] = dict() + fdroidserver.common.config = config + fdroidserver.update.config = config + + fdroidserver.common.options = Options + fdroidserver.update.options = fdroidserver.common.options + fdroidserver.update.options.clean = True + fdroidserver.update.options.delete_unknown = True + + apps = fdroidserver.metadata.read_metadata() + knownapks = fdroidserver.common.KnownApks() + apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks, False) + self.assertEqual(len(apks), 18) + apk = apks[1] + self.assertEqual(apk['packageName'], 'com.politedroid') + self.assertEqual(apk['versionCode'], 3) + self.assertEqual(apk['minSdkVersion'], 3) + self.assertIsNone(apk.get('targetSdkVersion')) + self.assertFalse('maxSdkVersion' in apk) + apk = apks[8] + self.assertEqual(apk['packageName'], 'obb.main.oldversion') + self.assertEqual(apk['versionCode'], 1444412523) + self.assertEqual(apk['minSdkVersion'], 4) + self.assertEqual(apk['targetSdkVersion'], 18) + self.assertFalse('maxSdkVersion' in apk) + + fdroidserver.update.insert_obbs('repo', apps, apks) + for apk in apks: + if apk['packageName'] == 'obb.mainpatch.current': + self.assertEqual(apk.get('obbMainFile'), 'main.1619.obb.mainpatch.current.obb') + self.assertEqual(apk.get('obbPatchFile'), 'patch.1619.obb.mainpatch.current.obb') + elif apk['packageName'] == 'obb.main.oldversion': + self.assertEqual(apk.get('obbMainFile'), 'main.1434483388.obb.main.oldversion.obb') + self.assertIsNone(apk.get('obbPatchFile')) + elif apk['packageName'] == 'obb.main.twoversions': + self.assertIsNone(apk.get('obbPatchFile')) + if apk['versionCode'] == 1101613: + self.assertEqual(apk.get('obbMainFile'), 'main.1101613.obb.main.twoversions.obb') + elif apk['versionCode'] == 1101615: + self.assertEqual(apk.get('obbMainFile'), 'main.1101615.obb.main.twoversions.obb') + elif apk['versionCode'] == 1101617: + self.assertEqual(apk.get('obbMainFile'), 'main.1101615.obb.main.twoversions.obb') + else: + self.assertTrue(False) + elif apk['packageName'] == 'info.guardianproject.urzip': + self.assertIsNone(apk.get('obbMainFile')) + self.assertIsNone(apk.get('obbPatchFile')) + + def test_apkcache_json(self): + """test the migration from pickle to json""" + os.chdir(self.testdir) + shutil.copytree(basedir / 'repo', 'repo') + config = dict() + fdroidserver.common.fill_config_defaults(config) + config['ndk_paths'] = dict() + fdroidserver.common.config = config + fdroidserver.update.config = config + + fdroidserver.common.options = Options + fdroidserver.update.options = fdroidserver.common.options + fdroidserver.update.options.clean = True + fdroidserver.update.options.delete_unknown = True + + fdroidserver.metadata.read_metadata() + knownapks = fdroidserver.common.KnownApks() + apkcache = fdroidserver.update.get_cache() + self.assertEqual(2, len(apkcache)) + self.assertEqual(fdroidserver.update.METADATA_VERSION, apkcache["METADATA_VERSION"]) + self.assertEqual(fdroidserver.update.options.allow_disabled_algorithms, + apkcache['allow_disabled_algorithms']) + apks, cachechanged = fdroidserver.update.process_apks(apkcache, 'repo', knownapks, False) + fdroidserver.update.write_cache(apkcache) + + fdroidserver.update.options.clean = False + read_from_json = fdroidserver.update.get_cache() + self.assertEqual(20, len(read_from_json)) + for f in glob.glob('repo/*.apk'): + self.assertTrue(os.path.basename(f) in read_from_json) + + fdroidserver.update.options.clean = True + reset = fdroidserver.update.get_cache() + self.assertEqual(2, len(reset)) + + def test_scan_repo_files(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + + os.chdir(self.testdir) + os.mkdir('repo') + filename = 'Norway_bouvet_europe_2.obf.zip' + shutil.copy(basedir / filename, 'repo') + knownapks = fdroidserver.common.KnownApks() + files, fcachechanged = fdroidserver.update.scan_repo_files(dict(), 'repo', knownapks, False) + self.assertTrue(fcachechanged) + + info = files[0] + self.assertEqual(filename, info['apkName']) + self.assertEqual(datetime, type(info['added'])) + self.assertEqual(os.path.getsize(os.path.join('repo', filename)), info['size']) + self.assertEqual( + '531190bdbc07e77d5577249949106f32dac7f62d38d66d66c3ae058be53a729d', + info['hash'], + ) + + def test_read_added_date_from_all_apks(self): + os.chdir(self.testdir) + shutil.copytree(basedir / 'repo', 'repo') + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + fdroidserver.common.options = Options + apps = fdroidserver.metadata.read_metadata() + knownapks = fdroidserver.common.KnownApks() + apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks) + fdroidserver.update.read_added_date_from_all_apks(apps, apks) + + def test_apply_info_from_latest_apk(self): + os.chdir(self.testdir) + shutil.copytree(basedir / 'repo', 'repo') + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + fdroidserver.common.options = Options + fdroidserver.update.options = fdroidserver.common.options + apps = fdroidserver.metadata.read_metadata() + knownapks = fdroidserver.common.KnownApks() + apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks) + fdroidserver.update.apply_info_from_latest_apk(apps, apks) + + def test_scan_apk(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + os.chdir(basedir) + + if 'apksigner' in config: + apk_info = fdroidserver.update.scan_apk('v2.only.sig_2.apk') + self.assertIsNone(apk_info.get('maxSdkVersion')) + self.assertEqual(apk_info.get('versionName'), 'v2-only') + self.assertEqual(apk_info.get('versionCode'), 2) + else: + print('WARNING: skipping v2-only test since apksigner cannot be found') + apk_info = fdroidserver.update.scan_apk('repo/v1.v2.sig_1020.apk') + self.assertIsNone(apk_info.get('maxSdkVersion')) + self.assertEqual(apk_info.get('versionName'), 'v1+2') + self.assertEqual(apk_info.get('versionCode'), 1020) + + apk_info = fdroidserver.update.scan_apk('repo/souch.smsbypass_9.apk') + self.assertIsNone(apk_info.get('maxSdkVersion')) + self.assertEqual(apk_info.get('versionName'), '0.9') + + apk_info = fdroidserver.update.scan_apk('repo/duplicate.permisssions_9999999.apk') + self.assertEqual(apk_info.get('versionName'), '') + self.assertEqual(apk_info['icons_src'], {'160': 'res/drawable/ic_launcher.png', + '-1': 'res/drawable/ic_launcher.png'}) + + apk_info = fdroidserver.update.scan_apk('org.dyndns.fules.ck_20.apk') + self.assertEqual(apk_info['icons_src'], {'240': 'res/drawable-hdpi-v4/icon_launcher.png', + '120': 'res/drawable-ldpi-v4/icon_launcher.png', + '160': 'res/drawable-mdpi-v4/icon_launcher.png', + '-1': 'res/drawable-mdpi-v4/icon_launcher.png'}) + self.assertEqual(apk_info['icons'], {}) + self.assertEqual(apk_info['features'], []) + self.assertEqual(apk_info['antiFeatures'], dict()) + self.assertEqual(apk_info['versionName'], 'v1.6pre2') + self.assertEqual(apk_info['hash'], + '897486e1f857c6c0ee32ccbad0e1b8cd82f6d0e65a44a23f13f852d2b63a18c8') + self.assertEqual(apk_info['packageName'], 'org.dyndns.fules.ck') + self.assertEqual(apk_info['versionCode'], 20) + self.assertEqual(apk_info['size'], 132453) + self.assertEqual(apk_info['nativecode'], + ['arm64-v8a', 'armeabi', 'armeabi-v7a', 'mips', 'mips64', 'x86', 'x86_64']) + self.assertEqual(apk_info['minSdkVersion'], 7) + self.assertEqual(apk_info['sig'], '9bf7a6a67f95688daec75eab4b1436ac') + self.assertEqual(apk_info['hashType'], 'sha256') + self.assertEqual(apk_info['targetSdkVersion'], 8) + + apk_info = fdroidserver.update.scan_apk('org.bitbucket.tickytacky.mirrormirror_4.apk') + self.assertEqual(apk_info.get('versionName'), '1.0.3') + self.assertEqual(apk_info['icons_src'], {'160': 'res/drawable-mdpi/mirror.png', + '-1': 'res/drawable-mdpi/mirror.png'}) + + apk_info = fdroidserver.update.scan_apk('repo/info.zwanenburg.caffeinetile_4.apk') + self.assertEqual(apk_info.get('versionName'), '1.3') + self.assertEqual(apk_info['icons_src'], {}) + + apk_info = fdroidserver.update.scan_apk('repo/com.politedroid_6.apk') + self.assertEqual(apk_info.get('versionName'), '1.5') + self.assertEqual(apk_info['icons_src'], {'120': 'res/drawable-ldpi-v4/icon.png', + '160': 'res/drawable-mdpi-v4/icon.png', + '240': 'res/drawable-hdpi-v4/icon.png', + '320': 'res/drawable-xhdpi-v4/icon.png', + '-1': 'res/drawable-mdpi-v4/icon.png'}) + + apk_info = fdroidserver.update.scan_apk('SpeedoMeterApp.main_1.apk') + self.assertEqual(apk_info.get('versionName'), '1.0') + self.assertEqual(apk_info['icons_src'], {}) + + def test_scan_apk_no_min_target(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + apk_info = fdroidserver.update.scan_apk('repo/no.min.target.sdk_987.apk') + self.maxDiff = None + expected = { + 'icons': {}, + 'icons_src': {'-1': 'res/drawable/ic_launcher.png', + '160': 'res/drawable/ic_launcher.png'}, + 'name': 'No minSdkVersion or targetSdkVersion', + 'signer': '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', + 'hashType': 'sha256', + 'packageName': 'no.min.target.sdk', + 'features': [], + 'antiFeatures': dict(), + 'size': 14102, + 'sig': 'b4964fd759edaa54e65bb476d0276880', + 'versionName': '1.2-fake', + 'uses-permission-sdk-23': [], + 'hash': 'e2e1dc1d550df2b5bc383860139207258645b5540abeccd305ed8b2cb6459d2c', + 'versionCode': 987, + 'minSdkVersion': 3, + 'uses-permission': [ + fdroidserver.update.UsesPermission(name='android.permission.WRITE_EXTERNAL_STORAGE', + maxSdkVersion=None), + fdroidserver.update.UsesPermission(name='android.permission.READ_PHONE_STATE', + maxSdkVersion=None), + fdroidserver.update.UsesPermission(name='android.permission.READ_EXTERNAL_STORAGE', + maxSdkVersion=None), + ], + } + if config.get('ipfs_cid'): + expected['ipfsCIDv1'] = 'bafybeidwxseoagnew3gtlasttqovl7ciuwxaud5a5p4a5pzpbrfcfj2gaa' + + self.assertDictEqual(apk_info, expected) + + def test_scan_apk_no_sig(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + os.chdir(basedir) + if os.path.basename(os.getcwd()) != 'tests': + raise Exception('This test must be run in the "tests/" subdir') + + with self.assertRaises(fdroidserver.exception.BuildException): + fdroidserver.update.scan_apk('urzip-release-unsigned.apk') + + def test_scan_apk_bad_zip(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + os.chdir(self.testdir) + os.mkdir('repo') + apkfile = 'repo/badzip_1.apk' + with open(apkfile, 'w') as fp: + fp.write('this is not a zip file') + with self.assertRaises(fdroidserver.exception.BuildException): + fdroidserver.update.scan_apk(apkfile) + + @unittest.skipUnless( + os.path.exists('tests/SystemWebView-repack.apk'), "file too big for sdist" + ) + def test_scan_apk_bad_icon_id(self): + """Some APKs can produce an exception when extracting the icon + + This kind of parsing exception should be reported then ignored + so that working APKs can be included in the index. There are + so many weird things that make it into APKs, that does not + automatically disqualify them from inclusion. For example: + + ValueError: invalid literal for int() with base 16: '<0x801FF, type 0x07>' + + The test APK was made from: + https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1018#note_690565333 + It was then stripped down by doing: + + * mkdir SystemWebView + * cd SystemWebView/ + * unzip ../SystemWebView.apk + * rm -rf META-INF/ lib assets/icudtl.dat assets/stored-locales/ + * jar cf ../SystemWebView-repack.apk * + """ + # reset the state, perhaps this should be in setUp() + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + with mkdtemp() as tmpdir, TmpCwd(tmpdir): + os.mkdir('repo') + apkfile = 'repo/SystemWebView-repack.apk' + shutil.copy(basedir / os.path.basename(apkfile), apkfile) + fdroidserver.update.scan_apk(apkfile) + + def test_scan_apk_bad_namespace_in_manifest(self): + """Some APKs can produce an exception when parsing the AndroidManifest.xml + + This kind of parsing exception should be reported then ignored + so that working APKs can be included in the index. There are + so many weird things that make it into APKs, that does not + automatically disqualify them from inclusion. + + This APK has elements with messed up namespaces: + + + """ + # reset the state, perhaps this should be in setUp() + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + with mkdtemp() as tmpdir, TmpCwd(tmpdir): + os.mkdir('repo') + apkfile = 'repo/org.sajeg.fallingblocks_3.apk' + shutil.copy(basedir / os.path.basename(apkfile), apkfile) + fdroidserver.update.scan_apk(apkfile) + + def test_process_apk(self): + def _build_yaml_representer(dumper, data): + '''Creates a YAML representation of a Build instance''' + return dumper.represent_dict(data) + + os.chdir(self.testdir) + shutil.copytree(basedir, 'tests') + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + os.chdir("tests") + + config['ndk_paths'] = dict() + fdroidserver.common.config = config + fdroidserver.update.config = config + + fdroidserver.common.options = Options + fdroidserver.update.options = fdroidserver.common.options + fdroidserver.update.options.clean = True + fdroidserver.update.options.delete_unknown = True + + for icon_dir in fdroidserver.update.get_all_icon_dirs('repo'): + if not os.path.exists(icon_dir): + os.makedirs(icon_dir) + + knownapks = fdroidserver.common.KnownApks() + apkList = ['../urzip.apk', '../org.dyndns.fules.ck_20.apk'] + + for apkName in apkList: + _, apk, cachechanged = fdroidserver.update.process_apk({}, apkName, 'repo', knownapks, + False) + # Don't care about the date added to the repo and relative apkName + self.assertEqual(datetime, type(apk['added'])) + del apk['added'] + del apk['apkName'] + + # ensure that icons have been extracted properly + if apkName == '../urzip.apk': + self.assertEqual(apk['icon'], 'info.guardianproject.urzip.100.png') + if apkName == '../org.dyndns.fules.ck_20.apk': + self.assertEqual(apk['icon'], 'org.dyndns.fules.ck.20.png') + for density in fdroidserver.update.screen_densities: + icon_path = os.path.join( + fdroidserver.update.get_icon_dir('repo', density), apk['icon'] + ) + self.assertTrue(os.path.isfile(icon_path)) + self.assertTrue(os.path.getsize(icon_path) > 1) + + savepath = os.path.join('metadata', 'apk', apk['packageName'] + '.yaml') + # Uncomment to save APK metadata + # with open(savepath, 'w') as f: + # yaml.add_representer(fdroidserver.metadata.Build, _build_yaml_representer) + # yaml.dump(apk, f, default_flow_style=False) + + # CFullLoader doesn't always work + # https://github.com/yaml/pyyaml/issues/266#issuecomment-559116876 + TestLoader = FullLoader + try: + testyaml = '- !!python/object/new:fdroidserver.update.UsesPermission\n - test\n - null' + from_yaml = yaml.load(testyaml, Loader=TestLoader) # nosec B506 + except yaml.constructor.ConstructorError: + from yaml import UnsafeLoader as TestLoader + + with open(savepath, 'r') as f: + from_yaml = yaml.load(f, Loader=TestLoader) # nosec B506 + self.maxDiff = None + if not config.get('ipfs_cid'): + del from_yaml['ipfsCIDv1'] # handle when ipfs_cid is not installed + self.assertEqual(apk, from_yaml) + + def test_process_apk_signed_by_disabled_algorithms(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.update.config = config + + config['ndk_paths'] = dict() + fdroidserver.common.config = config + fdroidserver.update.config = config + + fdroidserver.common.options = Options + fdroidserver.update.options = fdroidserver.common.options + fdroidserver.update.options.clean = True + fdroidserver.update.options.verbose = True + fdroidserver.update.options.delete_unknown = True + + knownapks = fdroidserver.common.KnownApks() + + with mkdtemp() as tmptestsdir, TmpCwd(tmptestsdir): + os.mkdir('repo') + os.mkdir('archive') + # setup the repo, create icons dirs, etc. + fdroidserver.update.process_apks({}, 'repo', knownapks) + fdroidserver.update.process_apks({}, 'archive', knownapks) + + disabledsigs = ['org.bitbucket.tickytacky.mirrormirror_2.apk'] + for apkName in disabledsigs: + shutil.copy(basedir / apkName, + os.path.join(tmptestsdir, 'repo')) + + skip, apk, cachechanged = fdroidserver.update.process_apk({}, apkName, 'repo', + knownapks, + allow_disabled_algorithms=True, + archive_bad_sig=False) + self.assertFalse(skip) + self.assertIsNotNone(apk) + self.assertTrue(cachechanged) + self.assertFalse(os.path.exists(os.path.join('archive', apkName))) + self.assertTrue(os.path.exists(os.path.join('repo', apkName))) + + if os.path.exists('/usr/bin/apksigner') or 'apksigner' in config: + print('SKIPPING: apksigner installed and it allows MD5 signatures') + return + + javac = config['jarsigner'].replace('jarsigner', 'javac') + v = subprocess.check_output([javac, '-version'], stderr=subprocess.STDOUT)[6:-1].decode('utf-8') + if LooseVersion(v) < LooseVersion('1.8.0_132'): + print('SKIPPING: running tests with old Java (' + v + ')') + return + + # this test only works on systems with fully updated Java/jarsigner + # that has MD5 listed in jdk.jar.disabledAlgorithms in java.security + # https://blogs.oracle.com/java-platform-group/oracle-jre-will-no-longer-trust-md5-signed-code-by-default + skip, apk, cachechanged = fdroidserver.update.process_apk({}, apkName, 'repo', + knownapks, + allow_disabled_algorithms=False, + archive_bad_sig=True) + self.assertTrue(skip) + self.assertIsNone(apk) + self.assertFalse(cachechanged) + self.assertTrue(os.path.exists(os.path.join('archive', apkName))) + self.assertFalse(os.path.exists(os.path.join('repo', apkName))) + + skip, apk, cachechanged = fdroidserver.update.process_apk({}, apkName, 'archive', + knownapks, + allow_disabled_algorithms=False, + archive_bad_sig=False) + self.assertFalse(skip) + self.assertIsNotNone(apk) + self.assertTrue(cachechanged) + self.assertTrue(os.path.exists(os.path.join('archive', apkName))) + self.assertFalse(os.path.exists(os.path.join('repo', apkName))) + + # ensure that icons have been moved to the archive as well + for density in fdroidserver.update.screen_densities: + icon_path = os.path.join(fdroidserver.update.get_icon_dir('archive', density), + apk['icon']) + self.assertTrue(os.path.isfile(icon_path)) + self.assertTrue(os.path.getsize(icon_path) > 1) + + badsigs = ['urzip-badcert.apk', 'urzip-badsig.apk', 'urzip-release-unsigned.apk', ] + for apkName in badsigs: + shutil.copy(basedir / apkName, + os.path.join(self.testdir, 'repo')) + + skip, apk, cachechanged = fdroidserver.update.process_apk({}, apkName, 'repo', + knownapks, + allow_disabled_algorithms=False, + archive_bad_sig=False) + self.assertTrue(skip) + self.assertIsNone(apk) + self.assertFalse(cachechanged) + + def test_process_invalid_apk(self): + os.chdir(basedir) + if os.path.basename(os.getcwd()) != 'tests': + raise Exception('This test must be run in the "tests/" subdir') + + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + fdroidserver.common.options = Options + fdroidserver.update.options = fdroidserver.common.options + fdroidserver.update.options.delete_unknown = False + + knownapks = fdroidserver.common.KnownApks() + apk = 'fake.ota.update_1234.zip' # this is not an APK, scanning should fail + (skip, apk, cachechanged) = fdroidserver.update.process_apk({}, apk, 'repo', knownapks, + False) + + self.assertTrue(skip) + self.assertIsNone(apk) + self.assertFalse(cachechanged) + + def test_get_apks_without_allowed_signatures(self): + """Test when no AllowedAPKSigningKeys is specified""" + os.chdir(self.testdir) + shutil.copytree(basedir / 'repo', 'repo') + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.common.options = Options + fdroidserver.update.options = fdroidserver.common.options + + app = fdroidserver.metadata.App() + knownapks = fdroidserver.common.KnownApks() + apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks) + apkfile = 'v1.v2.sig_1020.apk' + self.assertIn( + apkfile, + os.listdir('repo'), + f'{apkfile} was archived or otherwise removed from "repo"', + ) + (skip, apk, cachechanged) = fdroidserver.update.process_apk( + {}, apkfile, 'repo', knownapks, False + ) + + r = fdroidserver.update.get_apks_without_allowed_signatures(app, apk) + self.assertIsNone(r) + + def test_get_apks_without_allowed_signatures_allowed(self): + """Test when the APK matches the specified AllowedAPKSigningKeys""" + os.chdir(self.testdir) + shutil.copytree(basedir / 'repo', 'repo') + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.common.options = Options + fdroidserver.update.options = fdroidserver.common.options + + app = fdroidserver.metadata.App( + { + 'AllowedAPKSigningKeys': '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6' + } + ) + knownapks = fdroidserver.common.KnownApks() + apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks) + apkfile = 'v1.v2.sig_1020.apk' + (skip, apk, cachechanged) = fdroidserver.update.process_apk( + {}, apkfile, 'repo', knownapks, False + ) + + r = fdroidserver.update.get_apks_without_allowed_signatures(app, apk) + self.assertIsNone(r) + + def test_get_apks_without_allowed_signatures_blocked(self): + """Test when the APK does not match any specified AllowedAPKSigningKeys""" + os.chdir(self.testdir) + shutil.copytree(basedir / 'repo', 'repo') + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.common.options = Options + fdroidserver.update.options = fdroidserver.common.options + + app = fdroidserver.metadata.App( + { + 'AllowedAPKSigningKeys': 'fa4edeadfa4edeadfa4edeadfa4edeadfa4edeadfa4edeadfa4edeadfa4edead' + } + ) + knownapks = fdroidserver.common.KnownApks() + apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks) + apkfile = 'v1.v2.sig_1020.apk' + (skip, apk, cachechanged) = fdroidserver.update.process_apk( + {}, apkfile, 'repo', knownapks, False + ) + + r = fdroidserver.update.get_apks_without_allowed_signatures(app, apk) + self.assertEqual(apkfile, r) + + def test_update_with_AllowedAPKSigningKeys(self): + """Test that APKs without allowed signatures get deleted.""" + os.chdir(self.testdir) + os.mkdir('repo') + testapk = os.path.join('repo', 'com.politedroid_6.apk') + shutil.copy(basedir / testapk, testapk) + os.mkdir('metadata') + metadatafile = os.path.join('metadata', 'com.politedroid.yml') + + # Copy and manipulate metadata file + shutil.copy(basedir / metadatafile, metadatafile) + with open(metadatafile, 'a') as fp: + fp.write( + '\n\nAllowedAPKSigningKeys: 32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6\n' + ) + + # Set up options + fdroidserver.common.options = Options + config = fdroidserver.common.read_config() + if 'apksigner' not in config: # TODO remove me for buildserver-bullseye + self.skipTest('SKIPPING test_update_with_AllowedAPKSigningKeys, apksigner not installed!') + config['repo_keyalias'] = 'sova' + config['keystorepass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' + config['keypass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' + config['keystore'] = os.path.join(basedir, 'keystore.jks') + + self.assertTrue(os.path.exists(testapk)) + + # Test for non-deletion + with mock.patch('sys.argv', ['fdroid update', '--delete-unknown']): + fdroidserver.update.main() + self.assertTrue(os.path.exists(testapk)) + + # Copy and manipulate metadata file again + shutil.copy(basedir / metadatafile, metadatafile) + with open(metadatafile, 'a') as fp: + fp.write( + '\n\nAllowedAPKSigningKeys: fa4edeadfa4edeadfa4edeadfa4edeadfa4edeadfa4edeadfa4edeadfa4edead\n' + ) + + # Test for deletion + with mock.patch('sys.argv', ['fdroid update', '--delete-unknown']): + fdroidserver.update.main() + self.assertFalse(os.path.exists(testapk)) + + def test_translate_per_build_anti_features(self): + os.chdir(self.testdir) + shutil.copytree(basedir / 'repo', 'repo') + shutil.copytree(basedir / 'metadata', 'metadata') + config = dict() + fdroidserver.common.fill_config_defaults(config) + config['ndk_paths'] = dict() + fdroidserver.common.config = config + fdroidserver.update.config = config + + fdroidserver.common.options = Options + fdroidserver.update.options = fdroidserver.common.options + fdroidserver.update.options.clean = True + fdroidserver.update.options.delete_unknown = True + + apps = fdroidserver.metadata.read_metadata() + knownapks = fdroidserver.common.KnownApks() + apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks, False) + fdroidserver.update.translate_per_build_anti_features(apps, apks) + self.assertEqual(len(apks), 18) + foundtest = False + for apk in apks: + if apk['packageName'] == 'com.politedroid' and apk['versionCode'] == 3: + antiFeatures = apk.get('antiFeatures') + self.assertTrue('KnownVuln' in antiFeatures) + self.assertEqual(2, len(antiFeatures)) + foundtest = True + self.assertTrue(foundtest) + + def test_create_metadata_from_template(self): + os.chdir(self.testdir) + os.mkdir('repo') + os.mkdir('metadata') + shutil.copy(basedir / 'urzip.apk', 'repo') + + config = dict() + fdroidserver.common.fill_config_defaults(config) + config['ndk_paths'] = dict() + fdroidserver.common.config = config + fdroidserver.update.config = config + + fdroidserver.common.options = Options + fdroidserver.update.options = fdroidserver.common.options + fdroidserver.update.options.clean = True + + knownapks = fdroidserver.common.KnownApks() + apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks, False) + self.assertEqual(1, len(apks)) + apk = apks[0] + + testfile = 'metadata/info.guardianproject.urzip.yml' + # create empty 0 byte .yml file, run read_metadata, it should work + open(testfile, 'a').close() + apps = fdroidserver.metadata.read_metadata() + self.assertEqual(1, len(apps)) + os.remove(testfile) + + # test using internal template + apps = fdroidserver.metadata.read_metadata() + self.assertEqual(0, len(apps)) + fdroidserver.update.create_metadata_from_template(apk) + self.assertTrue(os.path.exists(testfile)) + apps = fdroidserver.metadata.read_metadata() + self.assertEqual(1, len(apps)) + for app in apps.values(): + self.assertEqual('urzip', app['Name']) + self.assertEqual(1, len(app['Categories'])) + break + + # test using external template.yml + os.remove(testfile) + self.assertFalse(os.path.exists(testfile)) + shutil.copy(basedir.with_name('examples') / 'template.yml', self.testdir) + fdroidserver.update.create_metadata_from_template(apk) + self.assertTrue(os.path.exists(testfile)) + apps = fdroidserver.metadata.read_metadata() + self.assertEqual(1, len(apps)) + for app in apps.values(): + self.assertEqual('urzip', app['Name']) + self.assertEqual(1, len(app['Categories'])) + self.assertEqual('Internet', app['Categories'][0]) + break + with open(testfile) as fp: + data = yaml.load(fp, Loader=SafeLoader) + self.assertEqual('urzip', data['Name']) + self.assertEqual('urzip', data['Summary']) + + def test_has_known_vulnerability(self): + good = [ + 'org.bitbucket.tickytacky.mirrormirror_1.apk', + 'org.bitbucket.tickytacky.mirrormirror_2.apk', + 'org.bitbucket.tickytacky.mirrormirror_3.apk', + 'org.bitbucket.tickytacky.mirrormirror_4.apk', + 'org.dyndns.fules.ck_20.apk', + 'urzip.apk', + 'urzip-badcert.apk', + 'urzip-badsig.apk', + 'urzip-release.apk', + 'urzip-release-unsigned.apk', + 'repo/com.politedroid_3.apk', + 'repo/com.politedroid_4.apk', + 'repo/com.politedroid_5.apk', + 'repo/com.politedroid_6.apk', + 'repo/obb.main.oldversion_1444412523.apk', + 'repo/obb.mainpatch.current_1619_another-release-key.apk', + 'repo/obb.mainpatch.current_1619.apk', + 'repo/obb.main.twoversions_1101613.apk', + 'repo/obb.main.twoversions_1101615.apk', + 'repo/obb.main.twoversions_1101617.apk', + 'repo/urzip-; Рахма́, [rɐxˈmanʲɪnəf] سيرجي_رخمانينوف 谢·.apk', + ] + for f in good: + self.assertFalse(fdroidserver.update.has_known_vulnerability(f)) + with self.assertRaises(fdroidserver.exception.FDroidException): + fdroidserver.update.has_known_vulnerability('janus.apk') + + def test_get_apk_icon_when_src_is_none(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + + # pylint: disable=protected-access + icons_src = fdroidserver.update._get_apk_icons_src('urzip-release.apk', None) + self.assertFalse(icons_src) + + def test_strip_and_copy_image(self): + in_file = basedir / 'metadata/info.guardianproject.urzip/en-US/images/icon.png' + out_file = os.path.join(self.testdir, 'icon.png') + with self.assertLogs(level=logging.DEBUG): + fdroidserver.update._strip_and_copy_image(in_file, out_file) + self.assertTrue(os.path.exists(out_file)) + + def test_strip_and_copy_image_bad_filename(self): + in_file = basedir / 'corrupt-featureGraphic.png' + out_file = os.path.join(self.testdir, 'corrupt-featureGraphic.png') + with self.assertLogs(level=logging.DEBUG): + fdroidserver.update._strip_and_copy_image(in_file, out_file) + self.assertFalse(os.path.exists(out_file)) + + def test_strip_and_copy_image_unchanged(self): + in_file = basedir / 'metadata/info.guardianproject.urzip/en-US/images/icon.png' + out_file = os.path.join(self.testdir, 'icon.png') + shutil.copy2(in_file, out_file) + ctime = os.path.getctime(out_file) + delta = 0.01 + time.sleep(delta) # ensure reliable failure if file isn't preserved + with self.assertLogs(level=logging.DEBUG): # suppress log output + fdroidserver.update._strip_and_copy_image(in_file, out_file) + self.assertAlmostEqual(ctime, os.path.getctime(out_file), delta=delta) + + def test_strip_and_copy_image_in_file_ctime_changed(self): + out_file = os.path.join(self.testdir, 'icon.png') + with open(out_file, 'w') as fp: + fp.write('to be replaced') + size = os.path.getsize(out_file) + delta = 0.01 + time.sleep(delta) # ensure reliable failure when testing ctime + src_file = basedir / 'metadata/info.guardianproject.urzip/en-US/images/icon.png' + in_file = os.path.join(self.testdir, 'in-icon.png') + shutil.copy(src_file, in_file) + time.sleep(delta) # ensure reliable failure when testing ctime + with self.assertLogs(level=logging.DEBUG): # suppress log output + fdroidserver.update._strip_and_copy_image(in_file, out_file) + self.assertNotEqual(size, os.path.getsize(out_file)) + self.assertTrue(os.path.getctime(in_file) <= os.path.getctime(out_file)) + # _strip_and_copy_image syncs mtime from in_file to out_file + self.assertAlmostEqual( + os.path.getmtime(in_file), os.path.getmtime(out_file), delta=delta + ) + + def test_strip_and_copy_image_in_file_mtime_changed(self): + in_file = basedir / 'metadata/info.guardianproject.urzip/en-US/images/icon.png' + out_file = os.path.join(self.testdir, 'icon.png') + shutil.copy(in_file, out_file) + os.utime(out_file, (12345, 12345)) # set atime/mtime to something old + with self.assertLogs(level=logging.DEBUG): # suppress log output + fdroidserver.update._strip_and_copy_image(in_file, out_file) + delta = 0.01 + self.assertNotAlmostEqual( + os.path.getctime(in_file), os.path.getctime(out_file), delta=delta + ) + # _strip_and_copy_image syncs mtime from in_file to out_file + self.assertAlmostEqual( + os.path.getmtime(in_file), os.path.getmtime(out_file), delta=delta + ) + + def test_create_metadata_from_template_empty_keys(self): + apk = {'packageName': 'rocks.janicerand'} + with mkdtemp() as tmpdir, TmpCwd(tmpdir): + os.mkdir('metadata') + with open('template.yml', 'w') as f: + f.write( + textwrap.dedent( + '''\ + Disabled: + License: + AuthorName: + AuthorEmail: + AuthorWebSite: + WebSite: + SourceCode: + IssueTracker: + Translation: + Changelog: + Donate: + Bitcoin: + Litecoin: + Name: + AutoName: + Summary: + RequiresRoot: + RepoType: + Repo: + Binaries: + Builds: + ArchivePolicy: + AutoUpdateMode: + UpdateCheckMode: + UpdateCheckIgnore: + VercodeOperation: + UpdateCheckName: + UpdateCheckData: + CurrentVersion: + CurrentVersionCode: + NoSourceSince: + ''' + ) + ) + fdroidserver.update.create_metadata_from_template(apk) + with open(os.path.join('metadata', 'rocks.janicerand.yml')) as f: + metadata_content = yaml.load(f, Loader=SafeLoader) + self.maxDiff = None + self.assertDictEqual( + metadata_content, + { + 'ArchivePolicy': None, + 'AuthorEmail': '', + 'AuthorName': '', + 'AuthorWebSite': '', + 'AutoName': 'rocks.janicerand', + 'AutoUpdateMode': '', + 'Binaries': '', + 'Bitcoin': '', + 'Builds': None, + 'Changelog': '', + 'CurrentVersion': '', + 'CurrentVersionCode': None, + 'Disabled': '', + 'Donate': '', + 'IssueTracker': '', + 'License': '', + 'Litecoin': '', + 'Name': 'rocks.janicerand', + 'NoSourceSince': '', + 'Repo': '', + 'RepoType': '', + 'RequiresRoot': None, + 'SourceCode': '', + 'Summary': 'rocks.janicerand', + 'Translation': '', + 'UpdateCheckData': '', + 'UpdateCheckIgnore': '', + 'UpdateCheckMode': '', + 'UpdateCheckName': '', + 'VercodeOperation': None, + 'WebSite': '', + }, + ) + + def test_insert_funding_yml_donation_links(self): + os.chdir(self.testdir) + os.mkdir('build') + content = textwrap.dedent( + """ + community_bridge: '' + custom: [LINK1, LINK2] + github: USERNAME + issuehunt: USERNAME + ko_fi: USERNAME + liberapay: USERNAME + open_collective: USERNAME + otechie: USERNAME + patreon: USERNAME + """ + ) + app = fdroidserver.metadata.App() + app.id = 'fake.app.id' + apps = {app.id: app} + os.mkdir(os.path.join('build', app.id)) + fdroidserver.update.insert_funding_yml_donation_links(apps) + for field in DONATION_FIELDS: + self.assertFalse(app.get(field)) + with open(os.path.join('build', app.id, 'FUNDING.yml'), 'w') as fp: + fp.write(content) + + fdroidserver.update.insert_funding_yml_donation_links(apps) + for field in DONATION_FIELDS: + self.assertIsNotNone(app.get(field), field) + self.assertEqual('LINK1', app.get('Donate')) + self.assertEqual('USERNAME', app.get('Liberapay')) + self.assertEqual('USERNAME', app.get('OpenCollective')) + + app['Donate'] = 'keepme' + app['Liberapay'] = 'keepme' + app['OpenCollective'] = 'keepme' + fdroidserver.update.insert_funding_yml_donation_links(apps) + for field in DONATION_FIELDS: + self.assertEqual('keepme', app.get(field)) + + def test_insert_funding_yml_donation_links_one_at_a_time(self): + """Exercise the FUNDING.yml code one entry at a time""" + os.chdir(self.testdir) + os.mkdir('build') + + app = fdroidserver.metadata.App() + app.id = 'fake.app.id' + apps = {app.id: app} + os.mkdir(os.path.join('build', app.id)) + fdroidserver.update.insert_funding_yml_donation_links(apps) + for field in DONATION_FIELDS: + self.assertIsNone(app.get(field)) + + content = textwrap.dedent( + """ + community_bridge: 'blah-de-blah' + github: USERNAME + issuehunt: USERNAME + ko_fi: USERNAME + liberapay: USERNAME + open_collective: USERNAME + patreon: USERNAME + """ + ) + for line in content.split('\n'): + if not line: + continue + app = fdroidserver.metadata.App() + app.id = 'fake.app.id' + apps = {app.id: app} + with open(os.path.join('build', app.id, 'FUNDING.yml'), 'w') as fp: + fp.write(line) + data = yaml.load(line, Loader=SafeLoader) + fdroidserver.update.insert_funding_yml_donation_links(apps) + if 'liberapay' in data: + self.assertEqual(data['liberapay'], app.get('Liberapay')) + elif 'open_collective' in data: + self.assertEqual(data['open_collective'], app.get('OpenCollective')) + else: + for v in data.values(): + self.assertEqual(app.get('Donate', '').split('/')[-1], v) + + def test_insert_funding_yml_donation_links_with_corrupt_file(self): + os.chdir(self.testdir) + os.mkdir('build') + app = fdroidserver.metadata.App() + app.id = 'fake.app.id' + apps = {app.id: app} + os.mkdir(os.path.join('build', app.id)) + with open(os.path.join('build', app.id, 'FUNDING.yml'), 'w') as fp: + fp.write( + textwrap.dedent( + """ + opencollective: foo + custom: [] + liberapay: : + """ + ) + ) + fdroidserver.update.insert_funding_yml_donation_links(apps) + for field in DONATION_FIELDS: + self.assertIsNone(app.get(field)) + + def test_sanitize_funding_yml(self): + with open(basedir / 'funding-usernames.yaml') as fp: + data = yaml.load(fp, Loader=SafeLoader) + for k, entries in data.items(): + for entry in entries: + if k in 'custom': + m = fdroidserver.update.sanitize_funding_yml_entry(entry) + else: + m = fdroidserver.update.sanitize_funding_yml_name(entry) + if k == 'bad': + self.assertIsNone(m) + else: + self.assertIsNotNone(m) + self.assertIsNone(fdroidserver.update.sanitize_funding_yml_entry('foo\nbar')) + self.assertIsNone(fdroidserver.update.sanitize_funding_yml_entry( + ''.join(chr(random.randint(65, 90)) for _ in range(2049)))) # nosec B311 + + # not recommended but valid entries + self.assertIsNotNone(fdroidserver.update.sanitize_funding_yml_entry(12345)) + self.assertIsNotNone(fdroidserver.update.sanitize_funding_yml_entry(5.0)) + self.assertIsNotNone(fdroidserver.update.sanitize_funding_yml_entry(' WhyIncludeWhitespace ')) + self.assertIsNotNone(fdroidserver.update.sanitize_funding_yml_entry(['first', 'second'])) + + def test_set_localized_text_entry(self): + os.chdir(self.testdir) + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.update.config = config + fdroidserver.update.options = fdroidserver.common.options + + files = { + 'full-description.txt': 'description', + 'short-description.txt': 'summary', + 'title.txt': 'name', + 'video-url.txt': 'video', + } + + for f, key in files.items(): + limit = config['char_limits'][key] + with open(f, 'w') as fp: + fp.write(''.join(random.choice(string.ascii_letters) for i in range(limit + 100))) # nosec B311 + locale = 'ru_US' + app = dict() + fdroidserver.update._set_localized_text_entry(app, locale, key, f) + self.assertEqual(limit, len(app['localized'][locale][key])) + + f = 'badlink-' + f + os.symlink('/path/to/nowhere', f) + app = dict() + fdroidserver.update._set_localized_text_entry(app, locale, key, f) + self.assertIsNone(app['localized'].get(locale, {}).get(key)) + + def test_set_author_entry(self): + os.chdir(self.testdir) + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.update.config = config + fdroidserver.update.options = fdroidserver.common.options + + f = 'contact-website.txt' + key = 'author' + url = 'https://f-droid.org/' + limit = config['char_limits']['author'] + with open(f, 'w') as fp: + fp.write(url) + fp.write('\n') + app = dict() + fdroidserver.update._set_author_entry(app, key, f) + self.assertEqual(url, app[key]) + + f = 'limits.txt' + key = 'author' + limit = config['char_limits']['author'] + for key in ('authorEmail', 'authorPhone', 'authorWebSite'): + with open(f, 'w') as fp: + fp.write(''.join(random.choice(string.ascii_letters) for i in range(limit + 100))) # nosec B311 + app = dict() + fdroidserver.update._set_author_entry(app, key, f) + self.assertEqual(limit, len(app[key])) + + f = 'badlink.txt' + os.symlink('/path/to/nowhere', f) + app = dict() + fdroidserver.update._set_author_entry(app, key, f) + self.assertIsNone(app.get(key)) + + def test_status_update_json(self): + fdroidserver.common.config = {} + fdroidserver.update.config = {} + fdroidserver.update.options = Options + with mkdtemp() as tmpdir: + os.chdir(tmpdir) + with mock.patch('sys.argv', ['fdroid update', '']): + fdroidserver.update.status_update_json({}, [], []) + with open('repo/status/update.json') as fp: + data = json.load(fp) + self.assertTrue('apksigner' in data) + + fdroidserver.update.config = { + 'apksigner': 'apksigner', + } + fdroidserver.update.status_update_json({}, [], []) + with open('repo/status/update.json') as fp: + data = json.load(fp) + self.assertEqual(shutil.which(fdroidserver.update.config['apksigner']), data['apksigner']) + + fdroidserver.update.config = {} + fdroidserver.common.fill_config_defaults(fdroidserver.update.config) + fdroidserver.update.status_update_json({}, [], []) + with open('repo/status/update.json') as fp: + data = json.load(fp) + self.assertEqual(fdroidserver.update.config.get('apksigner'), data['apksigner']) + self.assertEqual(fdroidserver.update.config['jarsigner'], data['jarsigner']) + self.assertEqual(fdroidserver.update.config['keytool'], data['keytool']) + + def test_scan_metadata_androguard(self): + + def _create_apkmetadata_object(apkName): + """Create an empty apk metadata object.""" + apk = {} + apk['apkName'] = apkName + apk['uses-permission'] = [] + apk['uses-permission-sdk-23'] = [] + apk['features'] = [] + apk['icons_src'] = {} + return apk + + apkList = [ + ( + 'org.dyndns.fules.ck_20.apk', + { + 'apkName': 'org.dyndns.fules.ck_20.apk', + 'uses-permission': [ + fdroidserver.update.UsesPermission( + name='android.permission.BIND_INPUT_METHOD', + maxSdkVersion=None, + ), + fdroidserver.update.UsesPermission( + name='android.permission.READ_EXTERNAL_STORAGE', + maxSdkVersion=None, + ), + fdroidserver.update.UsesPermission( + name='android.permission.VIBRATE', maxSdkVersion=None + ), + ], + 'uses-permission-sdk-23': [], + 'features': [], + 'icons_src': { + '240': 'res/drawable-hdpi-v4/icon_launcher.png', + '120': 'res/drawable-ldpi-v4/icon_launcher.png', + '160': 'res/drawable-mdpi-v4/icon_launcher.png', + '-1': 'res/drawable-mdpi-v4/icon_launcher.png', + }, + 'packageName': 'org.dyndns.fules.ck', + 'versionCode': 20, + 'versionName': 'v1.6pre2', + 'minSdkVersion': 7, + 'name': 'Compass Keyboard', + 'targetSdkVersion': 8, + 'nativecode': [ + 'arm64-v8a', + 'armeabi', + 'armeabi-v7a', + 'mips', + 'mips64', + 'x86', + 'x86_64', + ], + }, + ) + ] + + for apkfile, apkaapt in apkList: + apkandroguard = _create_apkmetadata_object(apkfile) + fdroidserver.update.scan_apk_androguard(apkandroguard, apkfile) + + self.maxDiff = None + self.assertEqual(apkaapt, apkandroguard) + + def test_exclude_disabled_apks(self): + os.chdir(self.testdir) + os.mkdir('repo') + testapk = os.path.join('repo', 'com.politedroid_6.apk') + testapk_new = os.path.join('repo', 'Politedroid-1.5.apk') + shutil.copy(basedir / testapk, testapk_new) + + config = dict() + fdroidserver.common.fill_config_defaults(config) + config['ndk_paths'] = dict() + fdroidserver.common.config = config + fdroidserver.update.config = config + + fdroidserver.common.options = Options + fdroidserver.update.options = fdroidserver.common.options + fdroidserver.update.options.clean = True + + app = fdroidserver.metadata.App() + app.id = 'com.politedroid' + apps = {app.id: app} + build = fdroidserver.metadata.Build() + build.versionCode = 6 + build.disable = "disabled" + app['Builds'] = [build] + + knownapks = fdroidserver.common.KnownApks() + apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks, False, apps) + self.assertEqual([], apks) + + def test_archive_old_apks_ArchivePolicy_0(self): + app = fdroidserver.metadata.App() + app.id = 'test' + app.ArchivePolicy = 0 + apps = {app.id: app} + with self.assertLogs(level='DEBUG') as cm: + fdroidserver.update.archive_old_apks(apps, [], [], '', '', 3) + self.assertEqual(cm.output, [ + "DEBUG:root:Checking archiving for test - apks:0, keepversions:0, archapks:0" + ]) + + def test_archive_old_apks(self): + app = fdroidserver.metadata.App() + app.id = 'test' + app.VercodeOperation = ['%c+1', '%c+2', '%c+3', '%c+4'] + apps = {app.id: app} + with self.assertLogs(level='DEBUG') as cm: + fdroidserver.update.archive_old_apks(apps, [], [], '', '', 3) + self.assertEqual(cm.output, [ + "DEBUG:root:Checking archiving for test - apks:0, keepversions:12, archapks:0" + ]) + + app = fdroidserver.metadata.App() + app.id = 'org.smssecure.smssecure' + app.CurrentVersionCode = 135 + apps = {app.id: app} + with self.assertLogs(level='DEBUG') as cm: + fdroidserver.update.archive_old_apks(apps, [], [], '', '', 3) + self.assertEqual(cm.output, [ + "DEBUG:root:Checking archiving for org.smssecure.smssecure - apks:0, keepversions:6, archapks:0" + ]) + + def test_categories_txt_is_removed_by_delete_unknown(self): + """categories.txt used to be a part of this system, now its nothing.""" + os.chdir(self.testdir) + fdroidserver.common.write_config_file( + 'repo_pubkey: ffffffffffffffffffffffffffffffffffffffff\n' + ) + + categories_txt = Path('repo/categories.txt') + categories_txt.parent.mkdir() + categories_txt.write_text('placeholder') + + self.assertTrue(categories_txt.exists()) + with mock.patch('sys.argv', ['fdroid update', '--delete-unknown', '--nosign']): + fdroidserver.update.main() + self.assertFalse(categories_txt.exists()) + + def test_no_blank_auto_defined_categories(self): + """When no app has Categories, there should be no definitions in the repo.""" + os.chdir(self.testdir) + os.mkdir('metadata') + os.mkdir('repo') + fdroidserver.common.write_config_file( + 'repo_pubkey: ffffffffffffffffffffffffffffffffffffffff\n' + ) + + testapk = os.path.join('repo', 'com.politedroid_6.apk') + shutil.copy(basedir / testapk, testapk) + Path('metadata/com.politedroid.yml').write_text('Name: Polite') + + with mock.patch('sys.argv', ['fdroid update', '--delete-unknown', '--nosign']): + fdroidserver.update.main() + with open('repo/index-v2.json') as fp: + index = json.load(fp) + self.assertNotIn(CATEGORIES_CONFIG_NAME, index['repo']) + + def test_auto_defined_categories(self): + """Repos that don't define categories in config/ should use auto-generated.""" + os.chdir(self.testdir) + os.mkdir('metadata') + os.mkdir('repo') + fdroidserver.common.write_config_file( + 'repo_pubkey: ffffffffffffffffffffffffffffffffffffffff\n' + ) + + testapk = os.path.join('repo', 'com.politedroid_6.apk') + shutil.copy(basedir / testapk, testapk) + Path('metadata/com.politedroid.yml').write_text('Categories: [Time]') + + with mock.patch('sys.argv', ['fdroid update', '--delete-unknown', '--nosign']): + fdroidserver.update.main() + with open('repo/index-v2.json') as fp: + index = json.load(fp) + self.assertEqual( + {'Time': {'name': {'en-US': 'Time'}}}, + index['repo'][CATEGORIES_CONFIG_NAME], + ) + + def test_categories_with_only_icon_defined(self): + """If cateogories.yml only includes the icon, the name should be added.""" + os.chdir(self.testdir) + os.mkdir('config') + os.mkdir('metadata') + os.mkdir('repo') + fdroidserver.common.write_config_file( + 'repo_pubkey: ffffffffffffffffffffffffffffffffffffffff\n' + ) + testvalue = 'Time' + Path('config/time.png').write_text('placeholder') + Path('config/categories.yml').write_text(testvalue + ': {icon: time.png}') + + testapk = os.path.join('repo', 'com.politedroid_6.apk') + shutil.copy(basedir / testapk, testapk) + Path('metadata/com.politedroid.yml').write_text(f'Categories: [{testvalue}]') + + with mock.patch('sys.argv', ['fdroid update', '--delete-unknown', '--nosign']): + fdroidserver.update.main() + with open('repo/index-v2.json') as fp: + index = json.load(fp) + self.assertEqual( + { + 'icon': { + 'en-US': { + 'name': '/icons/time.png', + 'sha256': '4097889236a2af26c293033feb964c4cf118c0224e0d063fec0a89e9d0569ef2', + 'size': 11, + } + }, + 'name': {'en-US': testvalue}, + }, + index['repo'][CATEGORIES_CONFIG_NAME][testvalue], + ) + + def test_auto_defined_categories_two_apps(self): + """Repos that don't define categories in config/ should use auto-generated.""" + os.chdir(self.testdir) + os.mkdir('metadata') + os.mkdir('repo') + fdroidserver.common.write_config_file( + 'repo_pubkey: ffffffffffffffffffffffffffffffffffffffff\n' + ) + + testapk = os.path.join('repo', 'com.politedroid_6.apk') + shutil.copy(basedir / testapk, testapk) + Path('metadata/com.politedroid.yml').write_text('Categories: [bar]') + testapk = os.path.join('repo', 'souch.smsbypass_9.apk') + shutil.copy(basedir / testapk, testapk) + Path('metadata/souch.smsbypass.yml').write_text('Categories: [foo, bar]') + + with mock.patch('sys.argv', ['fdroid update', '--delete-unknown', '--nosign']): + fdroidserver.update.main() + with open('repo/index-v2.json') as fp: + index = json.load(fp) + self.assertEqual( + {'bar': {'name': {'en-US': 'bar'}}, 'foo': {'name': {'en-US': 'foo'}}}, + index['repo'][CATEGORIES_CONFIG_NAME], + ) + + def test_auto_defined_categories_mix_into_config_categories(self): + """Repos that don't define all categories in config/ also use auto-generated.""" + os.chdir(self.testdir) + os.mkdir('config') + Path('config/categories.yml').write_text('System: {name: System Apps}') + os.mkdir('metadata') + os.mkdir('repo') + fdroidserver.common.write_config_file( + 'repo_pubkey: ffffffffffffffffffffffffffffffffffffffff\n' + ) + + testapk = os.path.join('repo', 'com.politedroid_6.apk') + shutil.copy(basedir / testapk, testapk) + Path('metadata/com.politedroid.yml').write_text('Categories: [Time]') + testapk = os.path.join('repo', 'souch.smsbypass_9.apk') + shutil.copy(basedir / testapk, testapk) + Path('metadata/souch.smsbypass.yml').write_text('Categories: [System, Time]') + + with mock.patch('sys.argv', ['fdroid update', '--delete-unknown', '--nosign']): + fdroidserver.update.main() + with open('repo/index-v2.json') as fp: + index = json.load(fp) + self.assertEqual( + { + 'System': {'name': {'en-US': 'System Apps'}}, + 'Time': {'name': {'en-US': 'Time'}}, + }, + index['repo'][CATEGORIES_CONFIG_NAME], + ) + + def test_empty_categories_not_in_index(self): + """A category with no apps should be ignored, even if defined in config.""" + os.chdir(self.testdir) + os.mkdir('config') + Path('config/categories.yml').write_text('System: {name: S}\nTime: {name: T}\n') + os.mkdir('metadata') + os.mkdir('repo') + fdroidserver.common.write_config_file( + 'repo_pubkey: ffffffffffffffffffffffffffffffffffffffff\n' + ) + + testapk = os.path.join('repo', 'com.politedroid_6.apk') + shutil.copy(basedir / testapk, testapk) + Path('metadata/com.politedroid.yml').write_text('Categories: [Time]') + + with mock.patch('sys.argv', ['fdroid update', '--delete-unknown', '--nosign']): + fdroidserver.update.main() + with open('repo/index-v2.json') as fp: + index = json.load(fp) + self.assertEqual( + {'Time': {'name': {'en-US': 'T'}}}, + index['repo'][CATEGORIES_CONFIG_NAME], + ) + + +class TestParseIpa(unittest.TestCase): + def test_parse_ipa(self): + self.maxDiff = None + try: + import biplist # Fedora does not have a biplist package + + biplist # silence the linters + except ImportError as e: + self.skipTest(str(e)) + ipa_path = os.path.join(basedir, 'com.fake.IpaApp_1000000000001.ipa') + result = fdroidserver.update.parse_ipa(ipa_path, 'fake_size', 'fake_sha') + self.assertDictEqual( + result, + { + 'apkName': 'com.fake.IpaApp_1000000000001.ipa', + 'hash': 'fake_sha', + 'hashType': 'sha256', + 'packageName': 'org.onionshare.OnionShare', + 'size': 'fake_size', + 'versionCode': 1000000000001, + 'versionName': '1.0.1', + 'ipa_DTPlatformVersion': '16.4', + 'ipa_MinimumOSVersion': '15.0', + 'ipa_entitlements': set(), + 'ipa_permissions': { + 'NSCameraUsageDescription': + 'Please allow access to your ' + 'camera, if you want to ' + 'create photos or videos for ' + 'direct sharing.', + 'NSMicrophoneUsageDescription': + 'Please allow access to ' + 'your microphone, if you ' + 'want to create videos ' + 'for direct sharing.', + 'NSPhotoLibraryUsageDescription': + 'Please allow access to ' + 'your photo library, if ' + 'you want to share ' + 'photos.', + }, + 'name': 'OnionShare', + }, + ) + + +class TestUpdateVersionStringToInt(unittest.TestCase): + def test_version_string_to_int(self): + self.assertEqual( + fdroidserver.update.version_string_to_int("1.2.3"), 1000002000003 + ) + self.assertEqual(fdroidserver.update.version_string_to_int("0.0.0003"), 3) + self.assertEqual(fdroidserver.update.version_string_to_int("0.0.0"), 0) + self.assertEqual( + fdroidserver.update.version_string_to_int("4321.321.21"), 4321000321000021 + ) + self.assertEqual( + fdroidserver.update.version_string_to_int("18446744.073709.551615"), + 18446744073709551615, + ) + + def test_version_string_to_int_value_errors(self): + with self.assertRaises(ValueError): + fdroidserver.update.version_string_to_int("1.2.3a") + with self.assertRaises(ValueError): + fdroidserver.update.version_string_to_int("asdfasdf") + with self.assertRaises(ValueError): + fdroidserver.update.version_string_to_int("1.2.-3") + with self.assertRaises(ValueError): + fdroidserver.update.version_string_to_int("-1.2.-3") + with self.assertRaises(ValueError): + fdroidserver.update.version_string_to_int("0.0.0x3") + + +class TestScanRepoForIpas(unittest.TestCase): + def test_scan_repo_for_ipas_no_cache(self): + self.maxDiff = None + with mkdtemp() as tmpdir: + os.chdir(tmpdir) + os.mkdir("repo") + with open('repo/abc.Def_123.ipa', 'w') as f: + f.write('abc') + with open('repo/xyz.XXX_123.ipa', 'w') as f: + f.write('xyz') + + apkcache = mock.MagicMock() + repodir = "repo" + knownapks = mock.MagicMock() + + def mocked_parse(p, s, c): + # pylint: disable=unused-argument + return {'packageName': 'abc' if 'abc' in p else 'xyz'} + + with mock.patch('fdroidserver.update.parse_ipa', mocked_parse): + ipas, checkchanged = fdroidserver.update.scan_repo_for_ipas( + apkcache, repodir, knownapks + ) + + self.assertEqual(checkchanged, True) + self.assertEqual(len(ipas), 2) + package_names_in_ipas = [x['packageName'] for x in ipas] + self.assertTrue('abc' in package_names_in_ipas) + self.assertTrue('xyz' in package_names_in_ipas) + + apkcache_setter_package_name = [ + x.args[1]['packageName'] for x in apkcache.__setitem__.mock_calls + ] + self.assertTrue('abc' in apkcache_setter_package_name) + self.assertTrue('xyz' in apkcache_setter_package_name) + self.assertEqual(apkcache.__setitem__.call_count, 2) + + knownapks.recordapk.call_count = 2 + self.assertTrue( + unittest.mock.call('abc.Def_123.ipa') in knownapks.recordapk.mock_calls + ) + self.assertTrue( + unittest.mock.call('xyz.XXX_123.ipa') in knownapks.recordapk.mock_calls + ) + + +class TestParseIosScreenShotName(unittest.TestCase): + def setUp(self): + self.maxDiff = None + + def test_parse_ios_screenshot_name_atforamt_iphone8(self): + self.assertEqual( + fdroidserver.update.parse_ios_screenshot_name(Path("iPhone 8+ @ iOS 16-1.png")), + ("phoneScreenshots", "iPhone 8+", "iOS 16",), + ) + + def test_parse_ios_screenshot_name_atforamt_ipad13(self): + self.assertEqual( + fdroidserver.update.parse_ios_screenshot_name(Path("iPad Pro 12.9\" 2gen @ iOS 16-1.png")), + ("tenInchScreenshots", "iPad Pro 12.9\" 2gen", "iOS 16",), + ) + + def test_parse_ios_screenshot_name_underscoreforamt_ipad(self): + self.assertEqual( + fdroidserver.update.parse_ios_screenshot_name(Path("1_ipadPro129_1.1.png")), + ("tenInchScreenshots", "ipadpro129", "unknown",), + ) + + def test_parse_ios_screenshot_name_underscoreforamt_iphone(self): + self.assertEqual( + fdroidserver.update.parse_ios_screenshot_name(Path("1_iphone6Plus_1.1.png")), + ("phoneScreenshots", "iphone6plus", "unknown",), + ) + + +class TestInsertLocalizedIosAppMetadata(unittest.TestCase): + + def test_insert_localized_ios_app_metadata(self): + self.maxDiff = None + + self.apps_with_packages = { + "org.fake": {} + } + + def _mock_discover(fastlane_dir): + self.assertEqual( + fastlane_dir, + Path('build/org.fake/fastlane'), + ) + return {"fake screenshots": "fake"} + + def _mock_copy(screenshots, package_name): + self.assertEqual(screenshots, {"fake screenshots": "fake"}) + self.assertEqual(package_name, "org.fake") + + with mock.patch('fdroidserver.update.discover_ios_screenshots', _mock_discover): + self.set_localized_mock = mock.Mock() + with mock.patch('fdroidserver.update.copy_ios_screenshots_to_repo', _mock_copy): + with mock.patch("fdroidserver.update._set_localized_text_entry", self.set_localized_mock): + return fdroidserver.update.insert_localized_ios_app_metadata( + self.apps_with_packages + ) + + self.assertListEqual( + self.set_localized_mock.call_args_list, + [ + mock.call({}, 'en-US', 'name', Path('build/org.fake/fastlane/metadata/en-US/name.txt')), + mock.call({}, 'en-US', 'summary', Path('build/org.fake/fastlane/metadata/en-US/subtitle.txt')), + mock.call({}, 'en-US', 'description', Path('build/org.fake/fastlane/metadata/en-US/description.txt')), + mock.call({}, 'de-DE', 'name', Path('build/org.fake/fastlane/metadata/de-DE/name.txt')), + mock.call({}, 'de-DE', 'summary', Path('build/org.fake/fastlane/metadata/de-DE/subtitle.txt')), + mock.call({}, 'de-DE', 'description', Path('build/org.fake/fastlane/metadata/de-DE/description.txt')), + ], + ) + + +class TestDiscoverIosScreenshots(unittest.TestCase): + def test_discover_ios_screenshots(self): + self.maxDiff = None + + with mkdtemp() as fastlane_dir: + fastlane_dir = Path(fastlane_dir) + (fastlane_dir / "screenshots/en-US").mkdir(parents=True) + with open(fastlane_dir / "screenshots/en-US/iPhone 8+ @ iOS 16-1.png", 'w') as f: + f.write("1") + with open(fastlane_dir / "screenshots/en-US/iPad Pro 12.9\" 2gen @ iOS 16-1.png", "w") as f: + f.write("2") + with open(fastlane_dir / "screenshots/en-US/iPad Pro 12.9\" 2gen @ iOS 16-2.png", "w") as f: + f.write("3") + (fastlane_dir / "screenshots/de-DE").mkdir(parents=True) + with open(fastlane_dir / "screenshots/de-DE/1_ipadPro129_1.1.png", "w") as f: + f.write("4") + + screenshots = fdroidserver.update.discover_ios_screenshots(fastlane_dir) + + self.assertDictEqual( + screenshots, + { + "en-US": { + "phoneScreenshots": [ + fastlane_dir / "screenshots/en-US/iPhone 8+ @ iOS 16-1.png", + ], + "tenInchScreenshots": [ + fastlane_dir / "screenshots/en-US/iPad Pro 12.9\" 2gen @ iOS 16-1.png", + fastlane_dir / "screenshots/en-US/iPad Pro 12.9\" 2gen @ iOS 16-2.png", + ], + }, + "de-DE": { + "tenInchScreenshots": [ + fastlane_dir / "screenshots/de-DE/1_ipadPro129_1.1.png", + ], + }, + }, + ) + + +class TestCopyIosScreenshotsToRepo(unittest.TestCase): + def setUp(self): + self._td = mkdtemp() + os.chdir(self._td.name) + + def tearDown(self): + os.chdir(basedir) + self._td.cleanup() + + def test_copy_ios_screenshots_to_repo(self): + self.maxDiff = None + + screenshot_dir_en = Path("build/org.fake/fastlane/screenshots/en-US") + s1 = screenshot_dir_en / "iPhone 8+ @ iOS 16-1.png" + s2 = screenshot_dir_en / "iPad Pro 12.9\" 2gen @ iOS 16-1.png" + s3 = screenshot_dir_en / "iPad Pro 12.9\" 2gen @ iOS 16-2.png" + screenshot_dir_de = Path("build/org.fake/fastlane/screenshots/de-DE") + s4 = screenshot_dir_de / "1_ipadPro129_1.1.png" + + cmock = mock.Mock() + with mock.patch("fdroidserver.update._strip_and_copy_image", cmock): + fdroidserver.update.copy_ios_screenshots_to_repo( + { + "en-US": { + "phoneScreenshots": [s1], + "tenInchScreenshots": [s2, s3], + }, + "de-DE": { + "tenInchScreenshots": [s4], + }, + }, + "org.fake", + ) + + self.assertListEqual( + cmock.call_args_list, + [ + mock.call( + 'build/org.fake/fastlane/screenshots/en-US/iPhone 8+ @ iOS 16-1.png', + 'repo/org.fake/en-US/phoneScreenshots/iPhone_8+_@_iOS_16-1.png', + ), + mock.call( + 'build/org.fake/fastlane/screenshots/en-US/iPad Pro 12.9" 2gen @ iOS 16-1.png', + 'repo/org.fake/en-US/tenInchScreenshots/iPad_Pro_12.9"_2gen_@_iOS_16-1.png', + ), + mock.call( + 'build/org.fake/fastlane/screenshots/en-US/iPad Pro 12.9" 2gen @ iOS 16-2.png', + 'repo/org.fake/en-US/tenInchScreenshots/iPad_Pro_12.9"_2gen_@_iOS_16-2.png', + ), + mock.call( + 'build/org.fake/fastlane/screenshots/de-DE/1_ipadPro129_1.1.png', + 'repo/org.fake/de-DE/tenInchScreenshots/1_ipadPro129_1.1.png', + ), + ], + ) + + +class TestGetIpaIcon(unittest.TestCase): + def test_get_ipa_icon(self): + self.maxDiff = None + + with mkdtemp() as tmpdir: + tmpdir = Path(tmpdir) + (tmpdir / 'OnionBrowser.xcodeproj').mkdir() + with open(tmpdir / 'OnionBrowser.xcodeproj/project.pbxproj', "w") as f: + f.write("") + icondir = tmpdir / "fake_icon.appiconset" + icondir.mkdir() + with open(icondir / "Contents.json", "w", encoding="utf-8") as f: + f.write(""" + {"images": [ + {"scale": "2x", "size": "128x128", "filename": "nope"}, + {"scale": "1x", "size": "512x512", "filename": "nope"}, + {"scale": "1x", "size": "16x16", "filename": "nope"}, + {"scale": "1x", "size": "32x32", "filename": "yep"} + ]} + """) + + pfp = mock.Mock(return_value="fake_icon") + with mock.patch("fdroidserver.update._parse_from_pbxproj", pfp): + p = fdroidserver.update._get_ipa_icon(tmpdir) + self.assertEqual(str(icondir / "yep"), p) + + +class TestParseFromPbxproj(unittest.TestCase): + def test_parse_from_pbxproj(self): + self.maxDiff = None + + with mkdtemp() as tmpdir: + with open(Path(tmpdir) / "asdf.pbxproj", 'w', encoding="utf-8") as f: + f.write(""" + 230jfaod=flc' + ASSETCATALOG_COMPILER_APPICON_NAME = MyIcon; + cm opa1c p[m + """) + v = fdroidserver.update._parse_from_pbxproj( + Path(tmpdir) / "asdf.pbxproj", + "ASSETCATALOG_COMPILER_APPICON_NAME" + ) + self.assertEqual(v, "MyIcon") diff --git a/tests/test_vcs.py b/tests/test_vcs.py new file mode 100755 index 00000000..a007feae --- /dev/null +++ b/tests/test_vcs.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python3 + +import os +import unittest + +from git import Repo + +import fdroidserver.common +import fdroidserver.metadata + +from .shared_test_code import VerboseFalseOptions, mkdtemp + + +class VCSTest(unittest.TestCase): + """For some reason the VCS classes are in fdroidserver/common.py""" + + def setUp(self): + self._td = mkdtemp() + os.chdir(self._td.name) + + def tearDown(self): + self._td.cleanup() + + def test_remote_set_head_can_fail(self): + # First create an upstream repo with one commit + upstream_repo = Repo.init("upstream_repo") + with open(upstream_repo.working_dir + "/file", 'w') as f: + f.write("Hello World!") + upstream_repo.index.add([upstream_repo.working_dir + "/file"]) + upstream_repo.index.commit("initial commit") + commitid = upstream_repo.head.commit.hexsha + + # Now clone it once manually, like gitlab runner gitlab-runner sets up a repo during CI + clone1 = Repo.init("clone1") + clone1.create_remote("upstream", "file://" + upstream_repo.working_dir) + clone1.remote("upstream").fetch() + clone1.head.reference = clone1.commit(commitid) + clone1.head.reset(index=True, working_tree=True) + self.assertTrue(clone1.head.is_detached) + + # and now we want to use this clone as a source repo for fdroid build + config = {} + os.mkdir("build") + config['sdk_path'] = 'MOCKPATH' + config['ndk_paths'] = {'r10d': os.getenv('ANDROID_NDK_HOME')} + config['java_paths'] = {'fake': 'fake'} + fdroidserver.common.config = config + app = fdroidserver.metadata.App() + app.RepoType = 'git' + app.Repo = clone1.working_dir + app.id = 'com.gpl.rpg.AndorsTrail' + build = fdroidserver.metadata.Build() + build.commit = commitid + build.androidupdate = ['no'] + vcs, build_dir = fdroidserver.common.setup_vcs(app) + # force an init of the repo, the remote head error only occurs on the second gotorevision call + + fdroidserver.common.options = VerboseFalseOptions + vcs.gotorevision(build.commit) + fdroidserver.common.prepare_source( + vcs, + app, + build, + build_dir=build_dir, + srclib_dir="ignore", + extlib_dir="ignore", + ) + self.assertTrue(os.path.isfile("build/com.gpl.rpg.AndorsTrail/file")) diff --git a/tests/test_verify.py b/tests/test_verify.py new file mode 100755 index 00000000..e5a2f7c4 --- /dev/null +++ b/tests/test_verify.py @@ -0,0 +1,169 @@ +#!/usr/bin/env python3 + +import json +import os +import shutil +import sys +import tempfile +import unittest +from pathlib import Path +from unittest.mock import patch + +from fdroidserver import verify + +TEST_APP_ENTRY = { + "1539780240.3885746": { + "local": { + "file": "unsigned/com.politedroid_6.apk", + "packageName": "com.politedroid", + "sha256": "70c2f776a2bac38a58a7d521f96ee0414c6f0fb1de973c3ca8b10862a009247d", + "timestamp": 1234567.8900000, + "versionCode": "6", + "versionName": "1.5", + }, + "remote": { + "file": "tmp/com.politedroid_6.apk", + "packageName": "com.politedroid", + "sha256": "70c2f776a2bac38a58a7d521f96ee0414c6f0fb1de973c3ca8b10862a009247d", + "timestamp": 1234567.8900000, + "versionCode": "6", + "versionName": "1.5", + }, + "url": "https://f-droid.org/repo/com.politedroid_6.apk", + "verified": True, + } +} + +basedir = Path(__file__).parent + + +class VerifyTest(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.TemporaryDirectory() + os.chdir(self.tempdir.name) + self.repodir = Path('repo') + self.repodir.mkdir() + self.apk_reports_json = basedir / 'org.fdroid.fdroid_1019051.apk.json' + + def tearDown(self): + self.tempdir.cleanup() + + def test_get_verified_json_creation(self): + self.assertEqual({'packages': {}}, verify.get_verified_json('does-not-exist')) + + def test_get_verified_json_existing(self): + f = 'verified.json' + reports = {'packages': {'placeholder': {}}} + with open(f, 'w') as fp: + json.dump(reports, fp) + self.assertEqual(reports, verify.get_verified_json(f)) + + def test_get_verified_json_pull_in_one_report(self): + shutil.copy(self.apk_reports_json, self.tempdir.name) + with open(self.apk_reports_json) as fp: + reports = json.load(fp) + self.assertEqual( + {'packages': {'org.fdroid.fdroid': [reports['1708238023.6572325']]}}, + verify.get_verified_json('does-not-exist'), + ) + + def test_get_verified_json_ignore_corrupt(self): + f = 'verified.json' + with open(f, 'w') as fp: + fp.write("""{"packages": {"placeholder": {""") + shutil.copy(self.apk_reports_json, self.tempdir.name) + with open(self.apk_reports_json) as fp: + reports = json.load(fp) + self.assertEqual( + {'packages': {'org.fdroid.fdroid': [reports['1708238023.6572325']]}}, + verify.get_verified_json(f), + ) + + def test_get_verified_json_ignore_apk_reports(self): + """When an intact verified.json exists, it should ignore the .apk.json reports.""" + f = 'verified.json' + placeholder = {'packages': {'placeholder': {}}} + with open(f, 'w') as fp: + json.dump(placeholder, fp) + shutil.copy(self.apk_reports_json, self.tempdir.name) + with open(self.apk_reports_json) as fp: + json.load(fp) + self.assertEqual(placeholder, verify.get_verified_json(f)) + + @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') + @patch('fdroidserver.common.sha256sum') + def test_write_json_report(self, sha256sum): + sha256sum.return_value = ( + '70c2f776a2bac38a58a7d521f96ee0414c6f0fb1de973c3ca8b10862a009247d' + ) + os.mkdir('tmp') + os.mkdir('unsigned') + verified_json = Path('unsigned/verified.json') + packageName = 'com.politedroid' + apk_name = packageName + '_6.apk' + remote_apk = 'tmp/' + apk_name + unsigned_apk = 'unsigned/' + apk_name + # TODO common.use apk_strip_v1_signatures() on unsigned_apk + shutil.copy(basedir / 'repo' / apk_name, remote_apk) + shutil.copy(basedir / 'repo' / apk_name, unsigned_apk) + url = TEST_APP_ENTRY['1539780240.3885746']['url'] + + self.assertFalse(verified_json.exists()) + verify.write_json_report(url, remote_apk, unsigned_apk, {}) + self.assertTrue(verified_json.exists()) + # smoke check status JSON + with verified_json.open() as fp: + firstpass = json.load(fp) + + verify.write_json_report(url, remote_apk, unsigned_apk, {}) + with verified_json.open() as fp: + secondpass = json.load(fp) + + self.assertEqual(firstpass, secondpass) + + @patch('fdroidserver.common.sha256sum') + @patch('fdroidserver.verify.write_verified_json', lambda s: s) + def test_write_json_report_appid_json(self, sha256sum): + sha256sum.return_value = ( + '70c2f776a2bac38a58a7d521f96ee0414c6f0fb1de973c3ca8b10862a009247d' + ) + os.mkdir('tmp') + os.mkdir('unsigned') + appid = 'com.politedroid' + apk_name = f'{appid}_6.apk' + remote_apk = 'tmp/' + apk_name + unsigned_apk = 'unsigned/' + apk_name + shutil.copy(basedir / 'repo' / apk_name, remote_apk) + shutil.copy(basedir / 'repo' / apk_name, unsigned_apk) + url = TEST_APP_ENTRY['1539780240.3885746']['url'] + with open(f'unsigned/{apk_name}.json', 'w') as fp: + json.dump(TEST_APP_ENTRY, fp) + + # make a fake existing report where the newer one broke verifiability + with open(f'unsigned/{appid}_16.apk.json', 'w') as fp: + json.dump( + { + "1444444444.4444444": { + 'local': {'versionCode': 16}, + 'verified': False, + }, + "1333333333.3333333": { + 'local': {'versionCode': 16}, + 'verified': True, + }, + }, + fp, + ) + + verify.write_json_report(url, remote_apk, unsigned_apk, {'fake': 'fail'}) + with open(f'unsigned/{appid}.json') as fp: + self.assertEqual( + { + 'apkReports': [ + 'unsigned/com.politedroid_6.apk.json', + 'unsigned/com.politedroid_16.apk.json', + ], + 'lastRunVerified': False, + }, + json.load(fp), + ) diff --git a/tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/featureGraphic/play_store_feature_graphic.png b/tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/featureGraphic/play_store_feature_graphic.png new file mode 100644 index 00000000..0d5e3591 Binary files /dev/null and b/tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/featureGraphic/play_store_feature_graphic.png differ diff --git a/tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/icon/icon.png b/tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/icon/icon.png new file mode 100644 index 00000000..17a31d54 Binary files /dev/null and b/tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/icon/icon.png differ diff --git a/tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/phoneScreenshots/1.png b/tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/phoneScreenshots/1.png new file mode 100644 index 00000000..717be319 Binary files /dev/null and b/tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/phoneScreenshots/1.png differ diff --git a/tests/triple-t-1-graphics/metadata/de.wivewa.dialer.yml b/tests/triple-t-1-graphics/metadata/de.wivewa.dialer.yml new file mode 100644 index 00000000..a86e2c53 --- /dev/null +++ b/tests/triple-t-1-graphics/metadata/de.wivewa.dialer.yml @@ -0,0 +1,25 @@ +Categories: + - Phone & SMS + - System +License: GPL-3.0-only +AuthorEmail: welefon@jolo.software +SourceCode: https://codeberg.org/wivewa/wivewa-dialer-android +IssueTracker: https://codeberg.org/wivewa/wivewa-dialer-android/issues + +AutoName: Welefon + +RepoType: git +Repo: https://codeberg.org/wivewa/wivewa-dialer-android.git + +Builds: + - versionName: 1.7.0 + versionCode: 13 + commit: 3550193fa6b6f7836876f2ca9bf5819a34eef404 + subdir: app + gradle: + - yes + +AutoUpdateMode: Version +UpdateCheckMode: Tags +CurrentVersion: 1.7.0 +CurrentVersionCode: 13 diff --git a/tests/triple-t-2/build/org.piwigo.android/app/.gitignore b/tests/triple-t-2/build/org.piwigo.android/app/.gitignore new file mode 100644 index 00000000..796b96d1 --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/.gitignore @@ -0,0 +1 @@ +/build diff --git a/tests/triple-t-2/build/org.piwigo.android/app/build.gradle b/tests/triple-t-2/build/org.piwigo.android/app/build.gradle new file mode 100644 index 00000000..6b8dd5de --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/build.gradle @@ -0,0 +1,151 @@ +plugins { + id("com.android.application") + id("com.github.triplet.play") version "2.0.0" +} + +def isCi = "true" == System.getenv("CI") +def preDexEnabled = "true" == System.getProperty("pre-dex", "true") + + +if (project.file('../PiwigoSigning.properties').exists()) { + Properties props = new Properties() + props.load(new FileInputStream(file('../PiwigoSigning.properties'))) + + android { + signingConfigs { + release { + storeFile file("../piwigo_android_keystore.jks") + storePassword props['keystore.password'] + keyAlias 'publishing' + keyPassword props['key.password'] + } + localRelease { + storeFile file("${System.properties['user.home']}${File.separator}/.android_keystore_default") + storePassword props['keystore_default.password'] + keyAlias 'defaultRelease' + keyPassword props['key_default.password'] + } + + } + + buildTypes { + release { + signingConfig signingConfigs.release + } + localRelease { + signingConfig signingConfigs.localRelease + } + } + } + play { + defaultToAppBundles = true + track = 'beta' + } +} + +android { + compileSdkVersion 29 + defaultConfig { + applicationId "org.piwigo.android" + minSdkVersion 16 + targetSdkVersion 29 + versionCode 95 + versionName "0.9.5-beta" + multiDexEnabled true + } + buildTypes { + debug { + applicationIdSuffix ".debug" + versionNameSuffix "-debug" + } + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + } + } + flavorDimensions "default" + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_8 + targetCompatibility JavaVersion.VERSION_1_8 + } + lintOptions { + disable 'InvalidPackage' + abortOnError false + } + dexOptions { + preDexLibraries = preDexEnabled && !isCi + + } + dataBinding { + enabled = true + } + testOptions { + unitTests { + returnDefaultValues = true + includeAndroidResources = true + } + } + /* Triple-T GPP is applied as plugin in all cases, so we need to configure it always */ + play { + serviceAccountCredentials = file("../upload_key.json") + } +} + +def daggerVersion = '2.23.2' +def okhttpVersion = '3.11.0' +def retrofitVersion = '2.6.1' +def assertjVersion = '1.2.0' +def acraVersion = '5.4.0' + +dependencies { + implementation fileTree(include: ['*.jar'], dir: 'libs') + implementation 'androidx.appcompat:appcompat:1.1.0' + implementation 'androidx.annotation:annotation:1.1.0' + implementation 'com.google.android.material:material:1.0.0' + implementation 'androidx.cardview:cardview:1.0.0' + implementation 'com.android.support:multidex:1.0.3' + implementation "com.google.dagger:dagger:${daggerVersion}" + implementation 'androidx.lifecycle:lifecycle-extensions:2.1.0' + annotationProcessor 'androidx.lifecycle:lifecycle-compiler:2.1.0' + + annotationProcessor "com.google.dagger:dagger-compiler:${daggerVersion}" + implementation "com.google.dagger:dagger-android:${daggerVersion}" + implementation "com.google.dagger:dagger-android-support:${daggerVersion}" + annotationProcessor "com.google.dagger:dagger-android-processor:${daggerVersion}" + implementation "com.squareup.okhttp3:okhttp:${okhttpVersion}" + implementation "com.squareup.okhttp3:logging-interceptor:${okhttpVersion}" + implementation "com.squareup.retrofit2:retrofit:${retrofitVersion}" + implementation "com.squareup.retrofit2:converter-gson:${retrofitVersion}" + implementation "com.squareup.retrofit2:adapter-rxjava:${retrofitVersion}" + implementation 'com.squareup.picasso:picasso:2.5.2' + implementation 'com.jakewharton.picasso:picasso2-okhttp3-downloader:1.1.0' + implementation 'io.reactivex:rxjava:1.3.2' + implementation 'io.reactivex:rxandroid:1.2.1' + implementation 'com.google.guava:guava:24.1-jre' + annotationProcessor 'com.google.guava:guava:24.1-jre' + implementation 'org.apache.commons:commons-lang3:3.8.1' + + implementation "ch.acra:acra-mail:$acraVersion" + implementation "ch.acra:acra-dialog:$acraVersion" + + implementation 'com.github.jorgecastilloprz:fabprogresscircle:1.01@aar' + implementation "com.leinardi.android:speed-dial:3.0.0" + implementation 'com.github.tingyik90:snackprogressbar:6.1.1' + implementation 'org.greenrobot:eventbus:3.1.1' + /* Don't forget to add to string libraries if you add a library here. */ + + debugImplementation 'com.squareup.leakcanary:leakcanary-android:2.0-beta-3' + + testImplementation 'junit:junit:4.12' + testImplementation 'org.robolectric:robolectric:4.3' + testImplementation("com.squareup.assertj:assertj-android:${assertjVersion}") { + exclude group: 'com.android.support' + } + testAnnotationProcessor "com.google.dagger:dagger-compiler:${daggerVersion}" + testImplementation 'androidx.arch.core:core-testing:2.1.0' + testImplementation 'org.mockito:mockito-core:2.19.0' + testImplementation 'com.google.guava:guava:24.1-jre' + testImplementation 'androidx.appcompat:appcompat:1.1.0' + testAnnotationProcessor 'com.google.guava:guava:24.1-jre' + testImplementation 'com.google.code.findbugs:jsr305:3.0.2' +} diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/debug/res/values/constants.xml b/tests/triple-t-2/build/org.piwigo.android/app/src/debug/res/values/constants.xml new file mode 100644 index 00000000..fe7ac7bd --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/src/debug/res/values/constants.xml @@ -0,0 +1,5 @@ + + + org.piwigo.account_debug + + diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/debug/res/values/strings.xml b/tests/triple-t-2/build/org.piwigo.android/app/src/debug/res/values/strings.xml new file mode 100644 index 00000000..093dda0e --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/src/debug/res/values/strings.xml @@ -0,0 +1,3 @@ + + Piwigo Debug + diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/java/org/piwigo/PiwigoApplication.java b/tests/triple-t-2/build/org.piwigo.android/app/src/main/java/org/piwigo/PiwigoApplication.java new file mode 100644 index 00000000..6037e1fb --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/src/main/java/org/piwigo/PiwigoApplication.java @@ -0,0 +1,115 @@ +/* + * Piwigo for Android + * Copyright (C) 2016-2017 Piwigo Team http://piwigo.org + * Copyright (C) 2018 Raphael Mack http://www.raphael-mack.de + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package org.piwigo; + +import android.app.Activity; +import android.app.Application; +import android.app.Service; +import android.content.Context; +import androidx.databinding.DataBindingUtil; +import androidx.multidex.MultiDex; + +import org.acra.ACRA; +import org.acra.ReportField; +import org.acra.annotation.AcraCore; +import org.acra.annotation.AcraDialog; +import org.acra.annotation.AcraMailSender; +import org.acra.data.StringFormat; +import org.piwigo.helper.DialogHelper; +import org.piwigo.helper.NetworkHelper; +import org.piwigo.helper.NotificationHelper; +import org.piwigo.internal.di.component.ApplicationComponent; +import org.piwigo.internal.di.component.BindingComponent; +import org.piwigo.internal.di.component.DaggerApplicationComponent; +import org.piwigo.internal.di.component.DaggerBindingComponent; +import org.piwigo.internal.di.module.ApplicationModule; + +import javax.inject.Inject; + +import dagger.android.AndroidInjector; +import dagger.android.DispatchingAndroidInjector; +import dagger.android.HasActivityInjector; +import dagger.android.HasServiceInjector; + +@AcraCore(reportContent = { ReportField.APP_VERSION_CODE, + ReportField.APP_VERSION_NAME, + ReportField.USER_COMMENT, + ReportField.SHARED_PREFERENCES, + ReportField.ANDROID_VERSION, + ReportField.CUSTOM_DATA, + ReportField.STACK_TRACE, + ReportField.BUILD, + ReportField.BUILD_CONFIG, + ReportField.CRASH_CONFIGURATION, + ReportField.DISPLAY + }, + alsoReportToAndroidFramework = true, + reportFormat = StringFormat.KEY_VALUE_LIST +) +@AcraMailSender(mailTo = "android@piwigo.org") +@AcraDialog(resCommentPrompt = R.string.crash_dialog_comment_prompt, + resText = R.string.crash_dialog_text) +public class PiwigoApplication extends Application implements HasActivityInjector, HasServiceInjector { + + @Inject DispatchingAndroidInjector dispatchingAndroidInjector; + @Inject DispatchingAndroidInjector dispatchingAndroidServiceInjector; + + private ApplicationComponent applicationComponent; + + @Override public void onCreate() { + super.onCreate(); + + new NetworkHelper(); + new NotificationHelper(getApplicationContext()); + new DialogHelper() +; initializeDependencyInjection(); + } + + @Override + protected void attachBaseContext(Context base) { + super.attachBaseContext(base); + MultiDex.install(base); + ACRA.init(this); + } + + @Override public AndroidInjector activityInjector() { + return dispatchingAndroidInjector; + } + + private void initializeDependencyInjection() { + applicationComponent = DaggerApplicationComponent.builder() + .applicationModule(new ApplicationModule(this)) + .build(); + applicationComponent.inject(this); + + BindingComponent bindingComponent = DaggerBindingComponent.builder() + .applicationComponent(applicationComponent) + .build(); + DataBindingUtil.setDefaultComponent(bindingComponent); + } + + /** + * Returns an {@link AndroidInjector} of {@link Service}s. + */ + @Override + public AndroidInjector serviceInjector() { + return dispatchingAndroidServiceInjector; + } +} \ No newline at end of file diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/contact-email.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/contact-email.txt new file mode 100644 index 00000000..f0291ce8 --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/contact-email.txt @@ -0,0 +1 @@ +android@piwigo.org diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/contact-website.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/contact-website.txt new file mode 100644 index 00000000..40046686 --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/contact-website.txt @@ -0,0 +1 @@ +https://www.piwigo.org diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/default-language.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/default-language.txt new file mode 100644 index 00000000..beb9970b --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/default-language.txt @@ -0,0 +1 @@ +en-US diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/full-description.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/full-description.txt new file mode 100644 index 00000000..69efe2c6 --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/full-description.txt @@ -0,0 +1,5 @@ +Piwigo ist eine Android-APP für die freie Open Source-Foto-Hosting-Plattform Piwigo. Mit dieser App können Sie sich selbst gehostete Galerie ansehen und Fotos von Ihrem Smart-Gerät hochladen. + +Piwigo wird von einer aktiven Community von Benutzern und Entwicklern bereitgestellt. + +Piwigo ermöglicht eine eigene Fotogalerie im Internet zu erstellen und bietet viele leistungsstarke Funktionen wie Alben, Tags, Geolokalisierung, viele Anpassungsstufen, Upload von Besuchern, Privatsphäre, Kalender oder Statistiken. diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/short-description.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/short-description.txt new file mode 100644 index 00000000..4ea23371 --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/short-description.txt @@ -0,0 +1 @@ +Greifen Sie auf die Bilder Ihrer Piwigo-Foto-Gallerie zu. diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/title.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/title.txt new file mode 100644 index 00000000..e0394ea1 --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/title.txt @@ -0,0 +1 @@ +Piwigo diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/full-description.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/full-description.txt new file mode 100644 index 00000000..2d3e92ec --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/full-description.txt @@ -0,0 +1,5 @@ +Piwigo is a native Android Application for the free and open source photo hosting platform Piwigo. With this app you can browse you self-hosted gallery and upload photos from your smart device. + +Piwigo is built by an active community of users and developers. + +Piwigo empowers you to create your own photo gallery on the web and includes many powerful features such as albums, tags, geolocation, many levels of customization, upload by visitors, privacy, calendar or statistics. diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/feature-graphic/piwigo-full.png b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/feature-graphic/piwigo-full.png new file mode 100644 index 00000000..e2d5035a Binary files /dev/null and b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/feature-graphic/piwigo-full.png differ diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/icon/piwigo-icon.png b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/icon/piwigo-icon.png new file mode 100644 index 00000000..02f745b9 Binary files /dev/null and b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/icon/piwigo-icon.png differ diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/01_Login.jpg b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/01_Login.jpg new file mode 100644 index 00000000..66ef322e Binary files /dev/null and b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/01_Login.jpg differ diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/02_Albums.jpg b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/02_Albums.jpg new file mode 100644 index 00000000..c21b517b Binary files /dev/null and b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/02_Albums.jpg differ diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/03_Photos.jpg b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/03_Photos.jpg new file mode 100644 index 00000000..5cacc9b1 Binary files /dev/null and b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/03_Photos.jpg differ diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/04_Albums_horizontal.jpg b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/04_Albums_horizontal.jpg new file mode 100644 index 00000000..27a60e30 Binary files /dev/null and b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/04_Albums_horizontal.jpg differ diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/05_Menu.jpg b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/05_Menu.jpg new file mode 100644 index 00000000..6dc4cdf1 Binary files /dev/null and b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/05_Menu.jpg differ diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/tablet-screenshots/01_Login.png b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/tablet-screenshots/01_Login.png new file mode 100644 index 00000000..c86cd9fd Binary files /dev/null and b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/tablet-screenshots/01_Login.png differ diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/short-description.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/short-description.txt new file mode 100644 index 00000000..bd96f636 --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/short-description.txt @@ -0,0 +1 @@ +Access photos in your Piwigo photo gallery. diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/title.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/title.txt new file mode 100644 index 00000000..e0394ea1 --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/title.txt @@ -0,0 +1 @@ +Piwigo diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/full-description.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/full-description.txt new file mode 100644 index 00000000..65654165 --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/full-description.txt @@ -0,0 +1,5 @@ +Piwigo est une application Android native pour la plate-forme d'hébergement photo gratuite et open source Piwigo. Avec cette application, vous pouvez parcourir votre galerie auto-hébergée et télécharger des photos depuis votre smartphone. + +Piwigo est développé par une communauté active d'utilisateurs et de développeurs. + +Piwigo vous permet de créer votre propre galerie de photos sur le Web et comprend de nombreuses fonctionnalités puissantes telles que des albums, des tags, la géolocalisation, de nombreux niveaux de personnalisation, le téléchargement par les visiteurs, la confidentialité, un calendrier ou de statistiques. diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/short-description.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/short-description.txt new file mode 100644 index 00000000..90d5f1f5 --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/short-description.txt @@ -0,0 +1 @@ +Accédez aux photos dans votre galerie de photos Piwigo. diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/title.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/title.txt new file mode 100644 index 00000000..e0394ea1 --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/title.txt @@ -0,0 +1 @@ +Piwigo diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/full-description.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/full-description.txt new file mode 100644 index 00000000..2d3e92ec --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/full-description.txt @@ -0,0 +1,5 @@ +Piwigo is a native Android Application for the free and open source photo hosting platform Piwigo. With this app you can browse you self-hosted gallery and upload photos from your smart device. + +Piwigo is built by an active community of users and developers. + +Piwigo empowers you to create your own photo gallery on the web and includes many powerful features such as albums, tags, geolocation, many levels of customization, upload by visitors, privacy, calendar or statistics. diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/short-description.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/short-description.txt new file mode 100644 index 00000000..bd96f636 --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/short-description.txt @@ -0,0 +1 @@ +Access photos in your Piwigo photo gallery. diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/title.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/title.txt new file mode 100644 index 00000000..e0394ea1 --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/title.txt @@ -0,0 +1 @@ +Piwigo diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/de-DE/default.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/de-DE/default.txt new file mode 100644 index 00000000..84d85227 --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/de-DE/default.txt @@ -0,0 +1,7 @@ +Dies ist eine Beta-Version, mit großen Verbesserungen, aber noch nicht für den produktiven Einsatz vorgesehen. Seien Sie vorsichtig und stellen Sie sicher, ein vollständiges Backup zu haben. + +In dieser Version haben wir +- die Unterstützung für Android 4.0 und 4.1 entfernt +- neues Design und weitere Sprachen hinzugefügt +- Unterstützung der Erstellung von Alben +- automatische Korrektur der Galerie-Seite hinzugefügt (falls möglich wird automatisch HTTPS verwendet) diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/en-US/default.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/en-US/default.txt new file mode 100644 index 00000000..e2f915ca --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/en-US/default.txt @@ -0,0 +1,9 @@ +This is a beta version, with major improvements but still not intended for production use. Please be careful and ensure you have proper backups of your gallery data. + +In this version we +- dropped support for Android 4.0 "Ice Cream Sandwich" and 4.1 "Jelly Bean" +- adjusted the UI to new Piwigo style +- added German, French and initial Kannada translation +- support creation of albums +- added auto detection correction of the gallery site (automatically choosing HTTPS if possible) + diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/fr-FR/default.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/fr-FR/default.txt new file mode 100644 index 00000000..699a1549 --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/fr-FR/default.txt @@ -0,0 +1,9 @@ +This is a beta version, with major improvements but still not intended for production use. Please be careful and ensure you have proper backups of your gallery data. + +Dans cette version nous avons : +- dropped support for Android 4.0 "Ice Cream Sandwich" and 4.1 "Jelly Bean" +- adjusted the UI to new Piwigo style +- added German, French and initial Kannada translation +- support creation of albums +- added auto detection correction of the gallery site (automatically choosing HTTPS if possible) + diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/kn-IN/default.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/kn-IN/default.txt new file mode 100644 index 00000000..e2f915ca --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/kn-IN/default.txt @@ -0,0 +1,9 @@ +This is a beta version, with major improvements but still not intended for production use. Please be careful and ensure you have proper backups of your gallery data. + +In this version we +- dropped support for Android 4.0 "Ice Cream Sandwich" and 4.1 "Jelly Bean" +- adjusted the UI to new Piwigo style +- added German, French and initial Kannada translation +- support creation of albums +- added auto detection correction of the gallery site (automatically choosing HTTPS if possible) + diff --git a/tests/triple-t-2/build/org.piwigo.android/build.gradle b/tests/triple-t-2/build/org.piwigo.android/build.gradle new file mode 100644 index 00000000..450ccbe8 --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/build.gradle @@ -0,0 +1,21 @@ +buildscript { + repositories { + jcenter() + mavenCentral() + maven { url 'https://maven.google.com' } + google() + } + dependencies { + classpath 'com.android.tools.build:gradle:3.5.0' + } +} + +allprojects { + repositories { + google() + jcenter() + mavenCentral() + maven { url 'https://maven.google.com' } + maven { url "https://jitpack.io" } + } +} diff --git a/tests/triple-t-2/build/org.piwigo.android/settings.gradle b/tests/triple-t-2/build/org.piwigo.android/settings.gradle new file mode 100644 index 00000000..3cc36ec2 --- /dev/null +++ b/tests/triple-t-2/build/org.piwigo.android/settings.gradle @@ -0,0 +1,2 @@ +rootProject.name = 'Piwigo-Android' +include ':app' diff --git a/tests/triple-t-2/metadata/org.piwigo.android.yml b/tests/triple-t-2/metadata/org.piwigo.android.yml new file mode 100644 index 00000000..77aaa3eb --- /dev/null +++ b/tests/triple-t-2/metadata/org.piwigo.android.yml @@ -0,0 +1,29 @@ +Categories: + - Graphics + - Multimedia +License: GPL-3.0-or-later +AuthorName: Piwigo Mobile Apps Team +AuthorEmail: android@piwigo.org +WebSite: https://piwigo.org/ +SourceCode: https://github.com/Piwigo/Piwigo-Android +IssueTracker: https://github.com/Piwigo/Piwigo-Android/issues +Translation: https://crowdin.com/project/piwigo-android + +AutoName: Piwigo + +RepoType: git +Repo: https://github.com/Piwigo/Piwigo-Android + +Builds: + - versionName: 0.9.5-beta + versionCode: 95 + commit: v0.9.5 + subdir: app + gradle: + - yes + +AutoUpdateMode: Version v%v +UpdateCheckMode: Tags +UpdateCheckIgnore: (alpha|beta|rc|RC|dev) +CurrentVersion: 0.9.5-beta +CurrentVersionCode: 95 diff --git a/tests/triple-t-anysoftkeyboard/.gitignore b/tests/triple-t-anysoftkeyboard/.gitignore new file mode 100644 index 00000000..0aba28e9 --- /dev/null +++ b/tests/triple-t-anysoftkeyboard/.gitignore @@ -0,0 +1 @@ +!build/ diff --git a/tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/addons/languages/dutch/apk/src/main/play/listings/en-US/title.txt b/tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/addons/languages/dutch/apk/src/main/play/listings/en-US/title.txt new file mode 100644 index 00000000..6e75c228 --- /dev/null +++ b/tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/addons/languages/dutch/apk/src/main/play/listings/en-US/title.txt @@ -0,0 +1 @@ +Dutch for AnySoftKeyboard diff --git a/tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/ime/app/src/main/play/listings/en-US/title.txt b/tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/ime/app/src/main/play/listings/en-US/title.txt new file mode 100644 index 00000000..7c5eeaeb --- /dev/null +++ b/tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/ime/app/src/main/play/listings/en-US/title.txt @@ -0,0 +1 @@ +AnySoftKeyboard diff --git a/tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/settings.gradle b/tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/settings.gradle new file mode 100644 index 00000000..523aadb7 --- /dev/null +++ b/tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/settings.gradle @@ -0,0 +1,77 @@ +rootProject.name = 'AnySoftKeyboard' +include ':deployment' + +include ':api' + +include ':addons:base' +include ':addons:languages' +include ':addons:languages:english', ':addons:languages:english:pack' + +//list of packs should be alphabetized-ordered +include ':addons:languages:afrikaans_oss:pack', ':addons:languages:afrikaans_oss:apk' +include ':addons:languages:armenian2:pack', ':addons:languages:armenian2:apk' +include ":addons:languages:belarusian:pack", ":addons:languages:belarusian:apk" +include ':addons:languages:brazilian:pack', ':addons:languages:brazilian:apk' +include ':addons:languages:catalan:pack', ':addons:languages:catalan:apk' +include ':addons:languages:coptic:pack', ':addons:languages:coptic:apk' +include ':addons:languages:czech:pack', ':addons:languages:czech:apk' +include ':addons:languages:english:pack', ':addons:languages:english:apk' +include ':addons:languages:esperanto:pack', ':addons:languages:esperanto:apk' +include ':addons:languages:finnish:pack', ':addons:languages:finnish:apk' +include ':addons:languages:french:pack', ':addons:languages:french:apk' +include ':addons:languages:greek:pack', ':addons:languages:greek:apk' +include ':addons:languages:hebrew:pack', ':addons:languages:hebrew:apk' +include ':addons:languages:indonesian:pack', ':addons:languages:indonesian:apk' +include ':addons:languages:italian:pack', ':addons:languages:italian:apk' +include ':addons:languages:lithuanian:pack', ':addons:languages:lithuanian:apk' +include ':addons:languages:nias:pack', ':addons:languages:nias:apk' +include ':addons:languages:ossturkish:pack', ':addons:languages:ossturkish:apk' +include ':addons:languages:persian:pack', ':addons:languages:persian:apk' +include ':addons:languages:piedmontese:pack', ':addons:languages:piedmontese:apk' +include ':addons:languages:romanian:pack', ':addons:languages:romanian:apk' +include ':addons:languages:russian2:pack', ':addons:languages:russian2:apk' +include ':addons:languages:sardinian:pack', ':addons:languages:sardinian:apk' +include ':addons:languages:serbian:pack', ':addons:languages:serbian:apk' +include ':addons:languages:spain:pack', ':addons:languages:spain:apk' +include ':addons:languages:tamazight:pack', ':addons:languages:tamazight:apk' +include ':addons:languages:arabic:pack', ':addons:languages:arabic:apk' +include ':addons:languages:danish:pack', ':addons:languages:danish:apk' +include ':addons:languages:basque:pack', ':addons:languages:basque:apk' +include ':addons:languages:bulgarian:pack', ':addons:languages:bulgarian:apk' +include ':addons:languages:german:pack', ':addons:languages:german:apk' +include ':addons:languages:croatian:pack', ':addons:languages:croatian:apk' +include ':addons:languages:georgian:pack', ':addons:languages:georgian:apk' +include ':addons:languages:latvian:pack', ':addons:languages:latvian:apk' +include ':addons:languages:kurdish:pack', ':addons:languages:kurdish:apk' +include ':addons:languages:hungarian:pack', ':addons:languages:hungarian:apk' +include ':addons:languages:kachin:pack', ':addons:languages:kachin:apk' +include ':addons:languages:dutch:pack', ':addons:languages:dutch:apk' +include ':addons:languages:luxembourgish:pack', ':addons:languages:luxembourgish:apk' +include ':addons:languages:norwegian:pack', ':addons:languages:norwegian:apk' +include ':addons:languages:polish:pack', ':addons:languages:polish:apk' +include ':addons:languages:rusyn:pack', ':addons:languages:rusyn:apk' +include ':addons:languages:sinhala:pack', ':addons:languages:sinhala:apk' +include ':addons:languages:hindi:pack', ':addons:languages:hindi:apk' +include ':addons:languages:thai:pack', ':addons:languages:thai:apk' +include ':addons:languages:portuguese:pack', ':addons:languages:portuguese:apk' + +//list of themes should be alphabetized-ordered +include ':addons:themes' +include ':addons:themes:classic_pc:pack', ':addons:themes:classic_pc:apk' +include ':addons:themes:ics:pack', ':addons:themes:ics:apk' +include ':addons:themes:israel64:pack', ':addons:themes:israel64:apk' +include ':addons:themes:three_d:pack', ':addons:themes:three_d:apk' + +//list of quick-text should be alphabetized-ordered +include ':addons:quicktexts' +include ':addons:quicktexts:bbcodes:pack', ':addons:quicktexts:bbcodes:apk' + +include ':ime' +include ':ime:base', ':ime:base-rx', ':ime:base-test', ':ime:prefs' +include ':ime:remote', ':ime:fileprovider' +include ':ime:addons' +include ':ime:dictionaries', ':ime:dictionaries:jnidictionaryv1', ':ime:dictionaries:jnidictionaryv2', ':ime:nextword' +include ':ime:pixel', ':ime:overlay' +include 'ime:voiceime' +include ':ime:app' + diff --git a/tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/addons/languages/dutch/apk/src/main/play/listings/en-US/title.txt b/tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/addons/languages/dutch/apk/src/main/play/listings/en-US/title.txt new file mode 100644 index 00000000..6e75c228 --- /dev/null +++ b/tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/addons/languages/dutch/apk/src/main/play/listings/en-US/title.txt @@ -0,0 +1 @@ +Dutch for AnySoftKeyboard diff --git a/tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/ime/app/src/main/play/listings/en-US/title.txt b/tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/ime/app/src/main/play/listings/en-US/title.txt new file mode 100644 index 00000000..7c5eeaeb --- /dev/null +++ b/tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/ime/app/src/main/play/listings/en-US/title.txt @@ -0,0 +1 @@ +AnySoftKeyboard diff --git a/tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/settings.gradle b/tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/settings.gradle new file mode 100644 index 00000000..523aadb7 --- /dev/null +++ b/tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/settings.gradle @@ -0,0 +1,77 @@ +rootProject.name = 'AnySoftKeyboard' +include ':deployment' + +include ':api' + +include ':addons:base' +include ':addons:languages' +include ':addons:languages:english', ':addons:languages:english:pack' + +//list of packs should be alphabetized-ordered +include ':addons:languages:afrikaans_oss:pack', ':addons:languages:afrikaans_oss:apk' +include ':addons:languages:armenian2:pack', ':addons:languages:armenian2:apk' +include ":addons:languages:belarusian:pack", ":addons:languages:belarusian:apk" +include ':addons:languages:brazilian:pack', ':addons:languages:brazilian:apk' +include ':addons:languages:catalan:pack', ':addons:languages:catalan:apk' +include ':addons:languages:coptic:pack', ':addons:languages:coptic:apk' +include ':addons:languages:czech:pack', ':addons:languages:czech:apk' +include ':addons:languages:english:pack', ':addons:languages:english:apk' +include ':addons:languages:esperanto:pack', ':addons:languages:esperanto:apk' +include ':addons:languages:finnish:pack', ':addons:languages:finnish:apk' +include ':addons:languages:french:pack', ':addons:languages:french:apk' +include ':addons:languages:greek:pack', ':addons:languages:greek:apk' +include ':addons:languages:hebrew:pack', ':addons:languages:hebrew:apk' +include ':addons:languages:indonesian:pack', ':addons:languages:indonesian:apk' +include ':addons:languages:italian:pack', ':addons:languages:italian:apk' +include ':addons:languages:lithuanian:pack', ':addons:languages:lithuanian:apk' +include ':addons:languages:nias:pack', ':addons:languages:nias:apk' +include ':addons:languages:ossturkish:pack', ':addons:languages:ossturkish:apk' +include ':addons:languages:persian:pack', ':addons:languages:persian:apk' +include ':addons:languages:piedmontese:pack', ':addons:languages:piedmontese:apk' +include ':addons:languages:romanian:pack', ':addons:languages:romanian:apk' +include ':addons:languages:russian2:pack', ':addons:languages:russian2:apk' +include ':addons:languages:sardinian:pack', ':addons:languages:sardinian:apk' +include ':addons:languages:serbian:pack', ':addons:languages:serbian:apk' +include ':addons:languages:spain:pack', ':addons:languages:spain:apk' +include ':addons:languages:tamazight:pack', ':addons:languages:tamazight:apk' +include ':addons:languages:arabic:pack', ':addons:languages:arabic:apk' +include ':addons:languages:danish:pack', ':addons:languages:danish:apk' +include ':addons:languages:basque:pack', ':addons:languages:basque:apk' +include ':addons:languages:bulgarian:pack', ':addons:languages:bulgarian:apk' +include ':addons:languages:german:pack', ':addons:languages:german:apk' +include ':addons:languages:croatian:pack', ':addons:languages:croatian:apk' +include ':addons:languages:georgian:pack', ':addons:languages:georgian:apk' +include ':addons:languages:latvian:pack', ':addons:languages:latvian:apk' +include ':addons:languages:kurdish:pack', ':addons:languages:kurdish:apk' +include ':addons:languages:hungarian:pack', ':addons:languages:hungarian:apk' +include ':addons:languages:kachin:pack', ':addons:languages:kachin:apk' +include ':addons:languages:dutch:pack', ':addons:languages:dutch:apk' +include ':addons:languages:luxembourgish:pack', ':addons:languages:luxembourgish:apk' +include ':addons:languages:norwegian:pack', ':addons:languages:norwegian:apk' +include ':addons:languages:polish:pack', ':addons:languages:polish:apk' +include ':addons:languages:rusyn:pack', ':addons:languages:rusyn:apk' +include ':addons:languages:sinhala:pack', ':addons:languages:sinhala:apk' +include ':addons:languages:hindi:pack', ':addons:languages:hindi:apk' +include ':addons:languages:thai:pack', ':addons:languages:thai:apk' +include ':addons:languages:portuguese:pack', ':addons:languages:portuguese:apk' + +//list of themes should be alphabetized-ordered +include ':addons:themes' +include ':addons:themes:classic_pc:pack', ':addons:themes:classic_pc:apk' +include ':addons:themes:ics:pack', ':addons:themes:ics:apk' +include ':addons:themes:israel64:pack', ':addons:themes:israel64:apk' +include ':addons:themes:three_d:pack', ':addons:themes:three_d:apk' + +//list of quick-text should be alphabetized-ordered +include ':addons:quicktexts' +include ':addons:quicktexts:bbcodes:pack', ':addons:quicktexts:bbcodes:apk' + +include ':ime' +include ':ime:base', ':ime:base-rx', ':ime:base-test', ':ime:prefs' +include ':ime:remote', ':ime:fileprovider' +include ':ime:addons' +include ':ime:dictionaries', ':ime:dictionaries:jnidictionaryv1', ':ime:dictionaries:jnidictionaryv2', ':ime:nextword' +include ':ime:pixel', ':ime:overlay' +include 'ime:voiceime' +include ':ime:app' + diff --git a/tests/triple-t-anysoftkeyboard/metadata/com.anysoftkeyboard.languagepack.dutch.yml b/tests/triple-t-anysoftkeyboard/metadata/com.anysoftkeyboard.languagepack.dutch.yml new file mode 100644 index 00000000..707bb096 --- /dev/null +++ b/tests/triple-t-anysoftkeyboard/metadata/com.anysoftkeyboard.languagepack.dutch.yml @@ -0,0 +1,76 @@ +Categories: + - Writing +License: Apache-2.0 +SourceCode: https://github.com/AnySoftKeyboard/AnySoftKeyboard/ +IssueTracker: https://github.com/AnySoftKeyboard/AnySoftKeyboard/issues +Donate: https://www.paypal.com/cgi-bin/webscr?cmd=_donations&business=KDYBGNUNMMN94&lc=US&item_name=AnySoftKeyboard + +AutoName: AnySoftKeyboard - Dutch Language Pack +Description: |- + This app has been deprecated in favor of https://f-droid.org/en/packages/com.anysoftkeyboard.languagepack.dutch_oss/ + + This is the Dutch language pack for AnySoftKeyboard. It offers an extensive + Dutch dictionary and optimised keyboard. In order to use it, first install + https://f-droid.org/packages/com.menny.android.anysoftkeyboard – a practical input method for Android + smartphones and tablets. + + The dictionary is based on the word list from Stichting OpenTaal and has the + Quality Mark Spelling (Keurmerk Spelling) of the Dutch Language Union + (Taalunie). This language pack has been created by Stichting OpenTaal and can be + installed by all without any costs. The language-specific files in this language + pack have a dual license. Both the BSD 2-Clause License as the Creative Commons, + Attribution 4.0 (unported) apply. + + Please, help us create free and open Dutch writing tools. Donate tax free to our + foundation at https://www.opentaal.org/vrienden-van-opentaal + +RepoType: git +Repo: https://github.com/AnySoftKeyboard/AnySoftKeyboard.git + +Builds: + - versionName: '1.0' + versionCode: 1 + disable: wait for upstream + commit: c5efd5986ce9beec299919f7ae9f174abd33b156 + gradle: + - yes + forceversion: true + preassemble: + - :makeDictionary + + - versionName: '1.4' + versionCode: 5 + commit: dutch-1.4 + gradle: + - yes + forceversion: true + preassemble: + - :makeDictionary + + - versionName: 4.0.1396 + versionCode: 2908 + commit: 1.10-r4 + subdir: addons/languages/dutch + sudo: + - apt-get update || apt-get update + - apt-get install -t bullseye openjdk-11-jdk-headless openjdk-11-jre-headless + - update-alternatives --auto java + gradle: + - yes + output: ../../../outputs/apks/release/addons-languages-*-$$VERCODE$$.apk + prebuild: + - echo "//gradleVersion = '6.8.3'" | tee build.gradle + - sed -i -e "s calculateApplicationId(project) 'com.anysoftkeyboard.languagepack.dutch' + " ../../../gradle/apk_module.gradle + - sed -i -e '/Data\.versionCode/ c versionCode $$VERCODE$$' -e '/Data\.versionName/ + c versionName "$$VERSION$$"' ../../../gradle/versioning_apk.gradle + scanignore: + - addons/languages/*/pack/dictionary/*.gz + ndk: r14b + +MaintainerNotes: Package ID is now com.anysoftkeyboard.languagepack.dutch_oss + +AutoUpdateMode: None +UpdateCheckMode: None +CurrentVersion: 4.0.1396 +CurrentVersionCode: 2908 diff --git a/tests/triple-t-anysoftkeyboard/metadata/com.menny.android.anysoftkeyboard.yml b/tests/triple-t-anysoftkeyboard/metadata/com.menny.android.anysoftkeyboard.yml new file mode 100644 index 00000000..4dc8d7ac --- /dev/null +++ b/tests/triple-t-anysoftkeyboard/metadata/com.menny.android.anysoftkeyboard.yml @@ -0,0 +1,523 @@ +Categories: + - Writing +License: Apache-2.0 +WebSite: https://anysoftkeyboard.github.io +SourceCode: https://github.com/AnySoftKeyboard/AnySoftKeyboard +IssueTracker: https://github.com/AnySoftKeyboard/AnySoftKeyboard/issues +Translation: https://crowdin.com/project/anysoftkeyboard +Changelog: https://github.com/AnySoftKeyboard/AnySoftKeyboard/releases +Donate: https://www.paypal.com/cgi-bin/webscr?cmd=_donations&business=KDYBGNUNMMN94&lc=US&item_name=AnySoftKeyboard + +AutoName: AnySoftKeyboard + +RepoType: git +Repo: https://github.com/AnySoftKeyboard/AnySoftKeyboard.git + +Builds: + - versionName: '20120528' + versionCode: 74 + commit: d6c2f7448 + srclibs: + - AnySoftKeyboard-API@78768bc479 + prebuild: sed -i 's@\(android.library.reference.1=\).*@\1$$AnySoftKeyboard-API$$@' + project.properties + buildjni: + - yes + ndk: r12b + + - versionName: 20120814-eye-candy + versionCode: 76 + disable: doesn't build + commit: unknown - see disabled + srclibs: + - AnySoftKeyboard-API@78768bc479 + prebuild: sed -i 's@\(android.library.reference.1=\).*@\1$$AnySoftKeyboard-API$$@' + project.properties + buildjni: + - yes + ndk: r12b + + - versionName: 20130106-eye-candy + versionCode: 85 + commit: a7723e1db5 + srclibs: + - AnySoftKeyboard-API@b21d8907 + prebuild: + - sed -i 's@\(android.library.reference.1=\).*@\1$$AnySoftKeyboard-API$$@' project.properties + - rm -rf libs/armeabi*/ libs/x86/ libs/mips/ ant/ keyboard_keystore StoreStuff/ + res/raw/words_1.dict + - wget https://softkeyboard.googlecode.com/svn-history/r544/trunk/project/dict_creation/makedict_Linux + - wget https://github.com/AnySoftKeyboard/AnySoftKeyboardTools/blob/HEAD/makedict/makedict.jar?raw=true + -O makedict.jar + - chmod +x makedict_Linux + - ./makedict_Linux dict/words.xml + buildjni: + - yes + ndk: r12b + + - versionName: 20130222-skinny-eye-candy + versionCode: 95 + commit: 50daad0b0a + srclibs: + - AnySoftKeyboard-API@b21d8907 + - AnySoftKeyboardTools@73e9a09496 + prebuild: + - sed -i 's@\(android.library.reference.1=\).*@\1$$AnySoftKeyboard-API$$@' project.properties + - sed -i 's@..\/AnySoftKeyboardTools@$$AnySoftKeyboardTools$$@g' build.xml + - rm -rf libs/armeabi*/ libs/x86/ libs/mips/ ant/ keyboard_keystore StoreStuff/ + res/raw/words_1.dict + buildjni: + - yes + ndk: r12b + + - versionName: 20130501-skinny-eye-candy-post-birthday + versionCode: 98 + commit: 6f51b8a9d + srclibs: + - AnySoftKeyboard-API@2c864957 + - AnySoftKeyboardTools@73e9a09496 + prebuild: + - sed -i 's@\(android.library.reference.1=\).*@\1$$AnySoftKeyboard-API$$@' project.properties + - sed -i 's@..\/AnySoftKeyboardTools@$$AnySoftKeyboardTools$$@g' build.xml + - rm -rf libs/armeabi*/ libs/x86/ libs/mips/ ant/ keyboard_keystore StoreStuff/ + res/raw/words_1.dict + buildjni: + - yes + ndk: r12b + + - versionName: '20130528' + versionCode: 102 + commit: 72fd519 + srclibs: + - AnySoftKeyboard-API@8d8a958e17 + - AnySoftKeyboardTools@73e9a09496 + prebuild: + - sed -i 's@\(reference.1=\).*@\1$$AnySoftKeyboard-API$$@' project.properties + - sed -i 's@..\/AnySoftKeyboardTools@$$AnySoftKeyboardTools$$@g' build.xml + - rm -rf libs/armeabi*/ libs/x86/ libs/mips/ ant/ keyboard_keystore StoreStuff/ + res/raw/words_1.dict + buildjni: + - yes + ndk: r12b + + - versionName: '20130709' + versionCode: 106 + commit: f1ba2de + srclibs: + - AnySoftKeyboard-API@30851236d7 + - AnySoftKeyboardTools@73e9a09496 + prebuild: + - sed -i 's@\(.1=\).*@\1$$AnySoftKeyboard-API$$@' project.properties + - sed -i 's@..\/AnySoftKeyboardTools@$$AnySoftKeyboardTools$$@g' build.xml + - rm -rf libs/armeabi*/ libs/x86/ libs/mips/ ant/ keyboard_keystore StoreStuff/ + res/raw/words_1.dict + buildjni: + - yes + ndk: r12b + + - versionName: '20130807' + versionCode: 107 + disable: not published + commit: unknown - see disabled + + - versionName: '20130919' + versionCode: 111 + commit: 2a68963f + srclibs: + - AnySoftKeyboard-API@30851236d7 + - AnySoftKeyboardTools@73e9a09496 + prebuild: + - sed -i 's@\(.1=\).*@\1$$AnySoftKeyboard-API$$@' project.properties + - sed -i 's@..\/AnySoftKeyboardTools@$$AnySoftKeyboardTools$$@g' build.xml + - rm -rf libs/armeabi*/ libs/x86/ libs/mips/ ant/ keyboard_keystore StoreStuff/ + res/raw/words_1.dict + buildjni: + - yes + ndk: r12b + + - versionName: '20131101' + versionCode: 114 + disable: no longer uses ant, prebuilts + commit: 82538d456f8 + srclibs: + - AnySoftKeyboard-API@08b452ff4 + - AnySoftKeyboardTools@73e9a09496 + prebuild: + - sed -i 's@\(.1=\).*@\1$$AnySoftKeyboard-API$$@' project.properties + - sed -i 's@..\/AnySoftKeyboardTools@$$AnySoftKeyboardTools$$@g' build.xml + - rm -rf libs/armeabi*/ libs/x86/ libs/mips/ ant/ keyboard_keystore StoreStuff/ + res/raw/words_1.dict + buildjni: + - yes + ndk: r12b + + - versionName: 1.2.20140705 + versionCode: 140 + disable: build words dict from source, no idea what version name/code it is + commit: 1.5-r4.1 + gradle: + - yes + rm: + - res/raw/words_1.dict + + - versionName: 1.8.84 + versionCode: 1409 + commit: 1.8-r1 + init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' build.gradle + gradle: + - yes + rm: + - src/main/res/raw/words_1.dict + preassemble: + - makeEnglishDictionary + + - versionName: 1.8.133 + versionCode: 1458 + disable: broken subdir + commit: 1.8-r3 + subdir: app + init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle + gradle: + - yes + rm: + - app/src/main/res/raw/words_1.dict + preassemble: + - :makeEnglishDictionary + + - versionName: 1.8.146 + versionCode: 1471 + commit: 1.8-r4 + subdir: app + init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle + gradle: + - yes + rm: + - app/src/main/res/raw/words_1.dict + preassemble: + - :makeEnglishDictionary + + - versionName: 1.8.190 + versionCode: 1515 + disable: weird version/vc-issue + commit: 1.8-r5.1 + subdir: app + init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle + gradle: + - yes + rm: + - app/src/main/res/raw/words_1.dict + preassemble: + - :makeEnglishDictionary + + - versionName: 1.8.195 + versionCode: 1520 + commit: 1.8-r5.2 + subdir: app + init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle + gradle: + - yes + rm: + - app/src/main/res/raw/words_1.dict + preassemble: + - :makeEnglishDictionary + + - versionName: 1.8.253 + versionCode: 1578 + commit: 1.8-r6 + subdir: app + init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle + gradle: + - yes + forceversion: true + rm: + - app/src/main/res/raw/words_1.dict + prebuild: sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' build.gradle + preassemble: + - :makeEnglishDictionary + + - versionName: 1.8.329 + versionCode: 1654 + commit: 1.8-r7 + subdir: app + init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle + gradle: + - yes + forceversion: true + rm: + - app/src/main/res/raw/words_1.dict + prebuild: sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' build.gradle + preassemble: + - :makeEnglishDictionary + + - versionName: 1.8.401 + versionCode: 1726 + commit: 1.8-r7.1 + subdir: app + init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle + gradle: + - yes + forceversion: true + rm: + - app/src/main/res/raw/words_1.dict + prebuild: + - sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' build.gradle + - sed -i -e '/crashlytics/,$d' build.gradle + preassemble: + - :makeEnglishDictionary + + - versionName: 1.8.503 + versionCode: 1828 + commit: 1.8-r8 + subdir: app + init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle + gradle: + - yes + forceversion: true + rm: + - app/src/main/res/raw/words_*.dict + prebuild: + - sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' build.gradle + - sed -i -e '/crashlytics/,$d' build.gradle + preassemble: + - :makeEnglishDictionary + + - versionName: 1.8.585 + versionCode: 1910 + commit: 1.8-r9 + subdir: app + init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle + gradle: + - yes + forceversion: true + rm: + - app/src/main/res/raw/words_*.dict + prebuild: + - sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' build.gradle + - sed -i -e '/crashlytics/,$d' build.gradle + preassemble: + - :makeEnglishDictionary + + - versionName: 1.8.735 + versionCode: 2060 + commit: 1.8-r10 + subdir: app + init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle + gradle: + - yes + forceversion: true + rm: + - app/src/main/res/raw/words_*.dict + prebuild: + - sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' build.gradle + - sed -i -e '/crashlytics/,$d' build.gradle + preassemble: + - :makeEnglishDictionary + + - versionName: 1.8.844 + versionCode: 2169 + commit: 1.8-r11 + subdir: app + init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle + gradle: + - yes + forceversion: true + rm: + - app/src/main/res/raw/words_*.dict + prebuild: + - sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' build.gradle + - sed -i -e '/crashlytics/,$d' build.gradle + preassemble: + - :makeEnglishDictionary + + - versionName: 1.8.1241 + versionCode: 2566 + commit: 1.8-r12 + subdir: app + init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle + gradle: + - yes + forceversion: true + rm: + - app/src/main/res/raw/words_*.dict + prebuild: + - sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' build.gradle + - sed -i -e '/crashlytics/,$d' build.gradle + preassemble: + - :makeEnglishDictionary + + - versionName: 1.8.1333 + versionCode: 2658 + commit: 1.8-r12.2 + subdir: app + init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle + gradle: + - yes + forceversion: true + rm: + - app/src/main/res/raw/words_*.dict + prebuild: + - sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' build.gradle + - sed -i -e '/crashlytics/,$d' build.gradle + ndk: r11c + preassemble: + - :makeEnglishDictionary + + - versionName: 1.9.1117 + versionCode: 3692 + commit: 1.9-r1 + subdir: app + init: sed -i -e '/fabric/d' -e '/canaryImplementation/,+2d' ../build.gradle build.gradle + gradle: + - yes + forceversion: true + rm: + - app/src/main/res/raw/words_*.dict + prebuild: + - sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' -e '/crashlytics/,$d' + build.gradle + - sed -i -e 's/gradle:3.1.0-alpha06/gradle:3.0.1/' ../build.gradle + scandelete: + - buildSrc + - app/src/debug + ndk: r14b + preassemble: + - :makeEnglishDictionary + + - versionName: 1.9.1944 + versionCode: 4519 + commit: 1.9-r3-fdroid + subdir: app + init: sed -i -e '/fabric/d' -e '/canaryImplementation/d' ../build.gradle build.gradle + gradle: + - yes + forceversion: true + rm: + - app/src/main/res/raw/words_*.dict + prebuild: sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' + -e '/crashlytics/,$d' build.gradle + scandelete: + - buildSrc + - app/src/debug + ndk: r14b + preassemble: + - :makeEnglishDictionary + + - versionName: 1.9.2055 + versionCode: 4630 + commit: 1.9-r4 + subdir: app + init: sed -i -e '/fabric/d' -e '/canaryImplementation/d' ../build.gradle build.gradle + gradle: + - yes + forceversion: true + rm: + - app/src/main/res/raw/words_*.dict + prebuild: sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' + -e '/crashlytics/,$d' build.gradle + scandelete: + - buildSrc + - app/src/debug + ndk: r14b + preassemble: + - :makeEnglishDictionary + + - versionName: 1.9.2445 + versionCode: 5020 + commit: 1.9-r5.1 + subdir: app + gradle: + - yes + forceversion: true + rm: + - app/src/main/res/raw/words_*.dict + prebuild: sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' + build.gradle + scandelete: + - buildSrc + - app/src/debug + ndk: r14b + preassemble: + - :makeEnglishDictionary + + - versionName: 1.9.2629 + versionCode: 5204 + commit: 1.9-r6 + subdir: app + gradle: + - yes + forceversion: true + prebuild: sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' + build.gradle + scandelete: + - buildSrc + ndk: r14b + + - versionName: 1.10.364 + versionCode: 5539 + commit: 1.10-r1 + subdir: app + gradle: + - yes + forceversion: true + prebuild: sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' + build.gradle + scandelete: + - buildSrc + ndk: r14b + + - versionName: 1.10.606 + versionCode: 5781 + commit: 1.10-r2 + subdir: app + gradle: + - yes + forceversion: true + prebuild: sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' + build.gradle + scandelete: + - buildSrc + ndk: r14b + + - versionName: 1.10.1109 + versionCode: 6279 + commit: e57c9cc852aefdc1ff60b024e52d4341337b3df7 + subdir: app + gradle: + - yes + forceversion: true + prebuild: sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' + build.gradle + scanignore: + - buildSrc + ndk: r14b + gradleprops: + - forceVersionBuildCount=4709 + + - versionName: 1.10.1786 + versionCode: 6956 + commit: 1.10-r4 + subdir: ime/app + sudo: + - apt-get update || apt-get update + - apt-get install -t bullseye openjdk-11-jdk-headless openjdk-11-jre-headless + - update-alternatives --auto java + gradle: + - yes + prebuild: + - sed -i -e "/EnvKey/ a //gradleVersion = '6.8.3'" ../build.gradle + - sed -i -e '/Data\.versionCode/ c versionCode $$VERCODE$$' -e '/Data\.versionName/ + c versionName "$$VERSION$$"' ../../gradle/versioning_apk.gradle + scanignore: + - addons/languages/*/pack/dictionary/*.gz + ndk: r14b + +MaintainerNotes: |- + * Prior to 1.9.2629, generates english dictionary binary (src/main/res/raw/words_*.dict) from source files (under `english_dictionary` folder. Mostly AOSP word-list) + + Summary and Description have been moved to the new localizable text files: + https://f-droid.org/docs/All_About_Descriptions_Graphics_and_Screenshots + +AutoUpdateMode: None +UpdateCheckMode: None +CurrentVersion: 1.10.1786 +CurrentVersionCode: 6956 diff --git a/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/contact-website.txt b/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/contact-website.txt new file mode 100644 index 00000000..5c15e481 --- /dev/null +++ b/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/contact-website.txt @@ -0,0 +1 @@ +https://emersion.fr diff --git a/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/full-description.txt b/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/full-description.txt new file mode 100644 index 00000000..b72afe53 --- /dev/null +++ b/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/full-description.txt @@ -0,0 +1,11 @@ +An IRC client for mobile devices. + +Goals: + +

        +
      • Modern: support for many IRCv3 extensions, plus some special support for IRC bouncers.
      • +
      • Easy to use: offer a simple, straightforward interface.
      • +
      • Offline-first: users should be able to read past conversations while offline, and network disruptions should be handled transparently.
      • +
      • Lightweight: go easy on resource usage to run smoothly on older phones and save battery power.
      • +
      • Cross-platform: the main target platforms are Linux and Android.
      • +
      diff --git a/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/short-description.txt b/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/short-description.txt new file mode 100644 index 00000000..98eb88a5 --- /dev/null +++ b/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/short-description.txt @@ -0,0 +1 @@ +An IRC client for mobile devices diff --git a/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/title.txt b/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/title.txt new file mode 100644 index 00000000..d2f3b402 --- /dev/null +++ b/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/title.txt @@ -0,0 +1 @@ +Goguma diff --git a/tests/triple-t-flutter/metadata/fr.emersion.goguma.yml b/tests/triple-t-flutter/metadata/fr.emersion.goguma.yml new file mode 100644 index 00000000..da1897fc --- /dev/null +++ b/tests/triple-t-flutter/metadata/fr.emersion.goguma.yml @@ -0,0 +1,30 @@ +Categories: + - Internet +License: AGPL-3.0-only +WebSite: https://sr.ht/~emersion/goguma +SourceCode: https://git.sr.ht/~emersion/goguma +IssueTracker: https://todo.sr.ht/~emersion/goguma + +AutoName: Goguma + +RepoType: git +Repo: https://git.sr.ht/~emersion/goguma + +Builds: + - versionName: 0.1.0 + versionCode: 1 + commit: 944d2d1e000901365392e850a98ee03f5dedba32 + output: build/app/outputs/flutter-apk/app-release.apk + srclibs: + - flutter@2.10.2 + build: + - $$flutter$$/bin/flutter config --no-analytics + - $$flutter$$/bin/flutter create --org fr.emersion --project-name goguma --platforms + android --no-overwrite . + - $$flutter$$/bin/flutter build apk --release --verbose + +AutoUpdateMode: Version +UpdateCheckMode: Tags +UpdateCheckData: pubspec.yaml|version:\s.+\+(\d+)|.|version:\s(.+)\+ +CurrentVersion: 0.1.0 +CurrentVersionCode: 1 diff --git a/tests/triple-t-multiple/.gitignore b/tests/triple-t-multiple/.gitignore new file mode 100644 index 00000000..0aba28e9 --- /dev/null +++ b/tests/triple-t-multiple/.gitignore @@ -0,0 +1 @@ +!build/ diff --git a/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/settings.gradle b/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/settings.gradle new file mode 100644 index 00000000..c7934558 --- /dev/null +++ b/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/settings.gradle @@ -0,0 +1,3 @@ +include ':common' +include ':verifier' +include ':wallet' diff --git a/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/verifier/src/main/play/listings/en-US/title.txt b/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/verifier/src/main/play/listings/en-US/title.txt new file mode 100644 index 00000000..03feeb11 --- /dev/null +++ b/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/verifier/src/main/play/listings/en-US/title.txt @@ -0,0 +1 @@ +COVID Certificate Check \ No newline at end of file diff --git a/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/wallet/src/main/play/listings/en-US/title.txt b/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/wallet/src/main/play/listings/en-US/title.txt new file mode 100644 index 00000000..34827e66 --- /dev/null +++ b/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/wallet/src/main/play/listings/en-US/title.txt @@ -0,0 +1 @@ +COVID Certificate \ No newline at end of file diff --git a/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/settings.gradle b/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/settings.gradle new file mode 100644 index 00000000..c7934558 --- /dev/null +++ b/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/settings.gradle @@ -0,0 +1,3 @@ +include ':common' +include ':verifier' +include ':wallet' diff --git a/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/verifier/src/main/play/listings/en-US/title.txt b/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/verifier/src/main/play/listings/en-US/title.txt new file mode 100644 index 00000000..03feeb11 --- /dev/null +++ b/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/verifier/src/main/play/listings/en-US/title.txt @@ -0,0 +1 @@ +COVID Certificate Check \ No newline at end of file diff --git a/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/wallet/src/main/play/listings/en-US/title.txt b/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/wallet/src/main/play/listings/en-US/title.txt new file mode 100644 index 00000000..34827e66 --- /dev/null +++ b/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/wallet/src/main/play/listings/en-US/title.txt @@ -0,0 +1 @@ +COVID Certificate \ No newline at end of file diff --git a/tests/triple-t-multiple/metadata/ch.admin.bag.covidcertificate.verifier.yml b/tests/triple-t-multiple/metadata/ch.admin.bag.covidcertificate.verifier.yml new file mode 100644 index 00000000..f8999482 --- /dev/null +++ b/tests/triple-t-multiple/metadata/ch.admin.bag.covidcertificate.verifier.yml @@ -0,0 +1,50 @@ +AntiFeatures: + - Tracking +Categories: + - Sports & Health +License: MPL-2.0 +AuthorName: Swiss Admin +AuthorWebSite: https://www.bit.admin.ch +SourceCode: https://github.com/admin-ch/CovidCertificate-App-Android +IssueTracker: https://github.com/admin-ch/CovidCertificate-App-Android/issues +Changelog: https://github.com/admin-ch/CovidCertificate-App-Android/releases + +AutoName: Covid Cert + +RepoType: git +Repo: https://github.com/admin-ch/CovidCertificate-App-Android +Binaries: https://github.com/admin-ch/CovidCertificate-App-Android/releases/download/v%v-%c-verifier/verifier-prod-%v-%c-signed.apk + +Builds: + - versionName: 1.2.0 + versionCode: 1200 + commit: v1.2.0-1200-verifier + subdir: verifier + submodules: true + gradle: + - prod + rm: + - sdk/sdk/testKeystore + - wallet/testKeystore + - verifier/testKeystore + prebuild: printf '\nbuildTimestamp=%s\n' "$(curl -sL https://github.com/admin-ch/CovidCertificate-App-Android/releases/download/v$$VERSION$$-$$VERCODE$$-verifier/verifier-prod-$$VERSION$$-$$VERCODE$$-buildtimestamp.txt + | tr -cd '0-9')" >> gradle.properties + + - versionName: 2.0.0 + versionCode: 2000 + commit: 5a871eabf1fce16b84e4c7b97c94fd3f2a37e910 + subdir: verifier + submodules: true + gradle: + - prod + rm: + - sdk/sdk/testKeystore + - wallet/testKeystore + - verifier/testKeystore + prebuild: printf '\nbuildTimestamp=%s\n' "$(curl -sL https://github.com/admin-ch/CovidCertificate-App-Android/releases/download/v$$VERSION$$-$$VERCODE$$-verifier/verifier-prod-$$VERSION$$-$$VERCODE$$-buildtimestamp.txt + | tr -cd '0-9')" >> gradle.properties + +AutoUpdateMode: Version +UpdateCheckMode: Tags ^v[\d.]+-\d+-verifier$ +CurrentVersion: 2.0.0 +CurrentVersionCode: 2000 diff --git a/tests/triple-t-multiple/metadata/ch.admin.bag.covidcertificate.wallet.yml b/tests/triple-t-multiple/metadata/ch.admin.bag.covidcertificate.wallet.yml new file mode 100644 index 00000000..3ea35008 --- /dev/null +++ b/tests/triple-t-multiple/metadata/ch.admin.bag.covidcertificate.wallet.yml @@ -0,0 +1,50 @@ +AntiFeatures: + - Tracking +Categories: + - Sports & Health +License: MPL-2.0 +AuthorName: Swiss Admin +AuthorWebSite: https://www.bit.admin.ch +SourceCode: https://github.com/admin-ch/CovidCertificate-App-Android +IssueTracker: https://github.com/admin-ch/CovidCertificate-App-Android/issues +Changelog: https://github.com/admin-ch/CovidCertificate-App-Android/releases + +AutoName: Covid Cert + +RepoType: git +Repo: https://github.com/admin-ch/CovidCertificate-App-Android +Binaries: https://github.com/admin-ch/CovidCertificate-App-Android/releases/download/v%v-%c-wallet/wallet-prod-%v-%c-signed.apk + +Builds: + - versionName: 1.2.0 + versionCode: 1200 + commit: v1.2.0-1200-wallet + subdir: wallet + submodules: true + gradle: + - prod + rm: + - sdk/sdk/testKeystore + - wallet/testKeystore + - verifier/testKeystore + prebuild: printf '\nbuildTimestamp=%s\n' "$(curl -sL https://github.com/admin-ch/CovidCertificate-App-Android/releases/download/v$$VERSION$$-$$VERCODE$$-wallet/wallet-prod-$$VERSION$$-$$VERCODE$$-buildtimestamp.txt + | tr -cd '0-9')" >> gradle.properties + + - versionName: 2.0.0 + versionCode: 2000 + commit: 5a871eabf1fce16b84e4c7b97c94fd3f2a37e910 + subdir: wallet + submodules: true + gradle: + - prod + rm: + - sdk/sdk/testKeystore + - wallet/testKeystore + - verifier/testKeystore + prebuild: printf '\nbuildTimestamp=%s\n' "$(curl -sL https://github.com/admin-ch/CovidCertificate-App-Android/releases/download/v$$VERSION$$-$$VERCODE$$-wallet/wallet-prod-$$VERSION$$-$$VERCODE$$-buildtimestamp.txt + | tr -cd '0-9')" >> gradle.properties + +AutoUpdateMode: Version +UpdateCheckMode: Tags ^v[\d.]+-\d+-wallet$ +CurrentVersion: 2.0.0 +CurrentVersionCode: 2000 diff --git a/tests/urzip-badcert.apk b/tests/urzip-badcert.apk new file mode 100644 index 00000000..cd7dd08f Binary files /dev/null and b/tests/urzip-badcert.apk differ diff --git a/tests/urzip-badsig.apk b/tests/urzip-badsig.apk new file mode 100644 index 00000000..89e106b9 Binary files /dev/null and b/tests/urzip-badsig.apk differ diff --git a/tests/urzip-release-unsigned.apk b/tests/urzip-release-unsigned.apk new file mode 100644 index 00000000..7bc22294 Binary files /dev/null and b/tests/urzip-release-unsigned.apk differ diff --git a/tests/urzip-release.apk b/tests/urzip-release.apk new file mode 100644 index 00000000..28a03450 Binary files /dev/null and b/tests/urzip-release.apk differ diff --git a/tests/urzip.apk b/tests/urzip.apk new file mode 100644 index 00000000..ee5e5cba Binary files /dev/null and b/tests/urzip.apk differ diff --git a/tests/v2.only.sig_2.apk b/tests/v2.only.sig_2.apk new file mode 100644 index 00000000..0b1804d3 Binary files /dev/null and b/tests/v2.only.sig_2.apk differ diff --git a/tests/valid-package-names/RandomPackageNames.java b/tests/valid-package-names/RandomPackageNames.java new file mode 100644 index 00000000..80257fd8 --- /dev/null +++ b/tests/valid-package-names/RandomPackageNames.java @@ -0,0 +1,237 @@ + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileOutputStream; +import java.io.Writer; +import java.io.InputStreamReader; +import java.io.IOException; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.io.UnsupportedEncodingException; +import java.nio.ByteBuffer; +import java.nio.CharBuffer; +import java.nio.IntBuffer; +import java.util.Random; + +// apt-get install libcommons-lang3-java +//import org.apache.commons.lang3.RandomStringUtils; + +public class RandomPackageNames { + + private static Writer validWriter; + private static Writer invalidWriter; + + private static final String[] py = { + "python3", "-c", + "import sys,re\n" + + "m = re.search(r'''" + // + "^(?:[a-z_]+(?:\\d*[a-zA-Z_]*)*)(?:\\.[a-z_]+(?:\\d*[a-zA-Z_]*)*)*$" + + "^[a-z_]+(?:\\d*[a-zA-Z_]*)(?:\\.[a-z_]+(?:\\d*[a-zA-Z_]*)*)*$" + + "''', sys.stdin.read())\n" + + "if m is not None:\n" + + " with open('/tmp/foo', 'w') as fp:\n" + + " fp.write(m.group() + '\\n')\n" + + "sys.exit(m is None)" + }; + + public static boolean checkAgainstPython(String packageName) + throws IOException, InterruptedException { + + ProcessBuilder pb = new ProcessBuilder(py); + Process process = pb.start(); + OutputStream output = process.getOutputStream(); + output.write(packageName.getBytes()); + output.write("\n".getBytes()); + output.flush(); + output.close(); + + int exitVal = process.waitFor(); + return exitVal == 0; + } + + private static boolean isValidJavaIdentifier(String packageName) { + if (packageName.length() == 0 || !Character.isJavaIdentifierStart(packageName.charAt(0))) { + //System.out.println("invalid first char: '" + packageName + "'"); + return false; + } + for (int codePoint : packageName.codePoints().toArray()) { + if (codePoint != 46 && !Character.isJavaIdentifierPart(codePoint)) { + //System.out.println("invalid char: '" + // + new StringBuilder().appendCodePoint(codePoint).toString() + "' " + // + codePoint); + return false; + } + } + return true; + } + + private static void write(String packageName) throws IOException { + if (isValidJavaIdentifier(packageName)) { + validWriter.write(packageName); + validWriter.write("\n"); + } else { + invalidWriter.write(packageName); + invalidWriter.write("\n"); + } + } + + private static void compare(String packageName) + throws IOException, InterruptedException { + boolean python = checkAgainstPython(packageName); + boolean java = isValidJavaIdentifier(packageName); + if (python && !java) { + System.out.println("MISMATCH: '" + packageName + "' " + + (python ? "py:✔" : "py:☹") + " " + + (java ? "ja:✔" : "ja:☹") + " "); + } + } + + public static void main (String[] args) + throws IOException, InterruptedException, UnsupportedEncodingException { + int[] data; + byte[] bytes; + ByteBuffer byteBuffer; + Random random = new Random(); + + validWriter = new OutputStreamWriter(new FileOutputStream("valid.txt"), "UTF-8"); + invalidWriter = new OutputStreamWriter(new FileOutputStream("invalid.txt"), "UTF-8"); + + //System.out.print("."); + + char[] validFirstLetters = new char[27]; + validFirstLetters[0] = 95; // _ + for (int i = 1; i < 27; i++) { + validFirstLetters[i] = (char) (i + 96); + } + + char[] validLetters = new char[64]; + int j = 0; + for (char c = 32; c < 123; c++) { + if ((c == 46) || (c > 47 && c < 58) || (c > 64 && c < 91) || (c > 96)) { + validLetters[j] = c; + j++; + } + } + + for (File f : new File("/home/hans/code/fdroid/fdroiddata/metadata").listFiles()) { + String name = f.getName(); + if (name.endsWith(".yml")) { + compare(name.substring(0, name.length() - 4)); + } + } + compare("SpeedoMeterApp.main"); + compare("uk.co.turtle-player"); + compare("oVPb"); + compare(" _LS"); + compare("r.vq"); + compare("r.vQ"); + compare("ra.vQ"); + compare("s.vQ"); + compare("r.tQ"); + compare("r.vR"); + compare("any.any"); + compare("org.fdroid.fdroid"); + compare("me.unfollowers.droid"); + compare("me_.unfollowers.droid"); + compare("me._unfollowers.droid"); + compare("me.unfo11llowers.droid"); + compare("me11.unfollowers.droid"); + compare("m11e.unfollowers.droid"); + compare("1me.unfollowers.droid"); + compare("me.unfollowers23.droid"); + compare("me.unfollowers.droid23d"); + compare("me.unfollowers_.droid"); + compare("me.unfollowers._droid"); + compare("me.unfollowers_._droid"); + compare("me.unfollowers.droid_"); + compare("me.unfollowers.droid32"); + compare("me.unfollowers.droid/"); + compare("me:.unfollowers.droid"); + compare(":me.unfollowers.droid"); + compare("me.unfollowers.dro;id"); + compare("me.unfollowe^rs.droid"); + compare("me.unfollowers.droid."); + compare("me.unfollowers..droid"); + compare("me.unfollowers.droid._"); + compare("me.unfollowers.11212"); + compare("me.1.unfollowers.11212"); + compare("me..unfollowers.11212"); + compare("abc"); + compare("abc."); + compare(".abc"); + + for (int i = 0; i < 300000; i++) { + String packageName; + + int count = random.nextInt(10) + 1; + byte valid = (byte) random.ints(97, 122).limit(1).toArray()[0]; + + // only valid + data = random.ints(46, 122) + .limit(count) + .filter(c -> (c == 46) || (c > 47 && c < 58) || (c > 64 && c < 91) || (c > 96)) + .toArray(); + byteBuffer = ByteBuffer.allocate(data.length); + for (int value : data) { + byteBuffer.put((byte)value); + } + if (data.length > 0) { + bytes = byteBuffer.array(); + bytes[0] = valid; + packageName = new String(byteBuffer.array(), "UTF-8"); + //System.out.println(packageName + ": " + isValidJavaIdentifier(packageName)); + compare(packageName); + write(packageName); + } + + // full US-ASCII + data = random.ints(32, 126).limit(count).toArray(); + byteBuffer = ByteBuffer.allocate(data.length); + for (int value : data) { + byteBuffer.put((byte)value); + } + bytes = byteBuffer.array(); + packageName = new String(bytes, "UTF-8"); + //System.out.println(packageName + ": " + isValidJavaIdentifier(packageName)); + compare(packageName); + write(packageName); + + // full US-ASCII with valid first letter + data = random.ints(32, 127).limit(count).toArray(); + byteBuffer = ByteBuffer.allocate(data.length * 4); + byteBuffer.asIntBuffer().put(data); + bytes = byteBuffer.array(); + bytes[0] = valid; + packageName = new String(bytes, "UTF-8"); + //System.out.println(packageName + ": " + isValidJavaIdentifier(packageName)); + compare(packageName); + write(packageName); + + // full unicode + data = random.ints(32, 0xFFFD).limit(count).toArray(); + byteBuffer = ByteBuffer.allocate(data.length * 4); + byteBuffer.asIntBuffer().put(data); + packageName = new String(byteBuffer.array(), "UTF-32"); + //System.out.println(packageName + ": " + isValidJavaIdentifier(packageName)); + compare(packageName); + write(packageName); + + // full unicode with valid first letter + data = random.ints(32, 0xFFFD).limit(count).toArray(); + byteBuffer = ByteBuffer.allocate(data.length * 4); + byteBuffer.asIntBuffer().put(data); + bytes = byteBuffer.array(); + bytes[0] = 0; + bytes[1] = 0; + bytes[2] = 0; + bytes[3] = 120; + packageName = new String(bytes, "UTF-32"); + //System.out.println(packageName + ": " + isValidJavaIdentifier(packageName)); + compare(packageName); + write(packageName); + } + + validWriter.close(); + invalidWriter.close(); + } +} diff --git a/tests/valid-package-names/random-package-names b/tests/valid-package-names/random-package-names new file mode 100755 index 00000000..bd48d43e --- /dev/null +++ b/tests/valid-package-names/random-package-names @@ -0,0 +1,10 @@ +#!/bin/sh + +set -e +set -x + +export CLASSPATH=/usr/share/java/commons-lang3.jar:. + +cd $(dirname $0) +javac -classpath $CLASSPATH RandomPackageNames.java +java -classpath $CLASSPATH RandomPackageNames diff --git a/tests/valid-package-names/test.py b/tests/valid-package-names/test.py new file mode 100755 index 00000000..eb9f95e3 --- /dev/null +++ b/tests/valid-package-names/test.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 + +import re + + +def test(packageName): + m = ANDROID_APPLICATION_ID_REGEX.match(packageName.strip()) + return m is not None + + +ANDROID_APPLICATION_ID_REGEX = re.compile(r'''(?:^[a-z_]+(?:\d*[a-zA-Z_]*)*)(?:\.[a-z_]+(?:\d*[a-zA-Z_]*)*)*$''') +valid = 0 +invalid = 0 + +test('org.fdroid.fdroid') +with open('valid.txt', encoding="utf-8") as fp: + for packageName in fp: + packageName = packageName.strip() + if not test(packageName): + valid += 1 + # print('should be valid:', packageName) + +with open('invalid.txt', encoding="utf-8") as fp: + for packageName in fp: + packageName = packageName.strip() + if test(packageName): + invalid += 1 + print('should be not valid: "' + packageName + '"') + + +print(valid, 'Java thinks is valid, but the Android regex does not') +print(invalid, 'invalid mistakes') diff --git a/updateplugin b/updateplugin deleted file mode 100755 index fe798e93..00000000 --- a/updateplugin +++ /dev/null @@ -1 +0,0 @@ -scp -r wp-fdroid/ fdroid@f-droid.org:/home/fdroid/public_html/wp-content/plugins diff --git a/wp-fdroid/AndroidManifest.xml b/wp-fdroid/AndroidManifest.xml deleted file mode 100644 index 97658a11..00000000 --- a/wp-fdroid/AndroidManifest.xml +++ /dev/null @@ -1,1639 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/wp-fdroid/android-permissions.php b/wp-fdroid/android-permissions.php deleted file mode 100644 index c59bc6d4..00000000 --- a/wp-fdroid/android-permissions.php +++ /dev/null @@ -1,114 +0,0 @@ -android_manifest_file_path = $android_manifest_file_path_in; - $this->android_strings_file_path = $android_strings_file_path_in; - $this->cache_file_path = $cache_file_path_in; - } - - // Returns an associative array with android permissions and data about them - function get_permissions_array() { - - // Check status of cache - $android_manifest_file_stat = stat($this->android_manifest_file_path); - $android_manifest_file_mtime = $android_manifest_file_stat['mtime']; - $android_strings_file_stat = stat($this->android_strings_file_path); - $android_strings_file_mtime = $android_strings_file_stat['mtime']; - $cache_file_mtime = 0; - if(file_exists($this->cache_file_path)) { - $cache_file_stat = stat($this->cache_file_path); - $cache_file_mtime = $cache_file_stat['mtime']; - } - - // If the cache is fresh, use it instead - if($android_manifest_file_mtime < $cache_file_mtime && $android_strings_file_mtime < $cache_file_mtime ) { - $cache_file_handle = fopen($this->cache_file_path, 'r'); - $cache_file_content = fread($cache_file_handle, filesize($this->cache_file_path)); - fclose($cache_file_handle); - - $permissions = unserialize($cache_file_content); - - return $permissions; - } - - // We are updating the cache, touch the file (note: race condition possible between stating the cache file above and this line...) - touch($this->cache_file_path); - - // Get permission raw data from XML - $manifestDoc = new DOMDocument; - $manifestDoc->load($this->android_manifest_file_path); - $manifestXpath = new DOMXPath($manifestDoc); - - $stringsDoc = new DOMDocument; - $stringsDoc->load($this->android_strings_file_path); - $stringsXpath = new DOMXPath($stringsDoc); - - $comment = ''; - foreach ($manifestXpath->query('node()') as $node) { - // Save permissions and permission groups from tags - if($node->nodeName == 'permission-group' || $node->nodeName == 'permission') { - $name = $node->attributes->getNamedItem('name')->value; - $name = substr(strrchr($name,'.'), 1); - - // Lookup the human readable title - $labelObject = $node->attributes->getNamedItem('label'); - $labelString = $name; - if( $labelObject !== NULL ) { - $labelName = substr(strrchr($labelObject->value,'/'),1); - $labelStringObject = $stringsXpath->query('//string[@name="'.$labelName.'"]'); - $labelString = ucfirst($labelStringObject->item(0)->nodeValue); - } - - // Lookup the human readable description - $descriptionObject = $node->attributes->getNamedItem('description'); - $descriptionString = '(Description missing)'; - if($descriptionObject !== NULL) { - $descriptionName = substr(strrchr($descriptionObject->value,'/'),1); - $descriptionStringObject = $stringsXpath->query('//string[@name="'.$descriptionName.'"]'); - $descriptionString = ucfirst($descriptionStringObject->item(0)->nodeValue); - } - - $permissions[$node->nodeName][$name]['label'] = stripslashes($labelString); - $permissions[$node->nodeName][$name]['description'] = stripslashes($descriptionString); - $permissions[$node->nodeName][$name]['comment'] = stripslashes(str_replace(array("\r\n", "\r", "\n", "\t", ' '), '', $comment)); - - if($node->nodeName == 'permission') { - $permissionGroupObject = $node->attributes->getNamedItem('permissionGroup'); - $permissionGroup = 'none'; - if($permissionGroupObject !== NULL) { - $permissionGroup = substr(strrchr($permissionGroupObject->value,'.'), 1); - } - - $permissions[$node->nodeName][$name]['permissionGroup'] = $permissionGroup; - $permissions[$node->nodeName][$name]['protectionLevel'] = $node->attributes->getNamedItem('protectionLevel')->value; - } - } - - // Cache descriptions from comments preceding the tags - if($node->nodeName == '#comment') { - $comment .= $node->textContent; - } - elseif($node->nodeName != '#text') { - $comment = ''; - } - } - - // Update cache with serialized permissions - $cache_file_handle = fopen($this->cache_file_path, 'w'); - fwrite($cache_file_handle, serialize($permissions)); - fclose($cache_file_handle); - - return $permissions; - } -} -?> diff --git a/wp-fdroid/readme.txt b/wp-fdroid/readme.txt deleted file mode 100644 index 0dfba563..00000000 --- a/wp-fdroid/readme.txt +++ /dev/null @@ -1,34 +0,0 @@ - -=== WP FDroid === -Contributors: CiaranG -Tags: android, repository -Requires at least: 2.9 -Tested up to: 2.9 -Stable tag: 0.1 - -Provides the ability to browse the contents of an FDroid repository. - -== Description == - -This plugin provides the ability to browse the contents of an FDroid -repository. The repository browser can be inserted into the site by -creating a page containing the 'fdroidrepo' shortcode. - -License: GPL v3 - -== Installation == - -1. Download and extract the zip file. -2. Upload the plugin directory to /wp-content/plugins on your server. -3. Go to the Plugins screen in your Wordpress Admin and activate it. - -== Frequently Asked Questions == - -= Is this finished? = - -No it isn't. I'm working on it. - -== Screenshots == - -1. There isn't one yet actually. - diff --git a/wp-fdroid/strings.xml b/wp-fdroid/strings.xml deleted file mode 100644 index 7b785ec5..00000000 --- a/wp-fdroid/strings.xml +++ /dev/null @@ -1,3389 +0,0 @@ - - - - - B - - KB - - MB - - GB - - TB - - PB - - %1$s%2$s - - - <untitled> - - - \u2026 - - - \u2025 - - - (No phone number) - - - (Unknown) - - - Voicemail - - - MSISDN1 - - - - Connection problem or invalid MMI code. - - Operation is restricted to fixed dialing numbers only. - - - Service was enabled. - - Service was enabled for: - - Service has been disabled. - - Registration was successful. - - Erasure was successful. - - Incorrect password. - - MMI complete. - - The old PIN you typed is not correct. - - The PUK you typed is not correct. - - The PINs you entered do not match. - - Type a PIN that is 4 to 8 numbers. - - Type a PUK that is 8 numbers or longer. - - Your SIM card is PUK-locked. Type the PUK code to unlock it. - Type PUK2 to unblock SIM card. - - - Incoming Caller ID - - Outgoing Caller ID - - Call forwarding - - Call waiting - - Call barring - - Password change - - PIN change - Calling number present - Calling number restricted - Three way calling - Rejection of undesired annoying calls - Calling number delivery - Do not disturb - - - Caller ID defaults to restricted. Next call: Restricted - - Caller ID defaults to restricted. Next call: Not restricted - - Caller ID defaults to not restricted. Next call: Restricted - - Caller ID defaults to not restricted. Next call: Not restricted - - - - Service not provisioned. - - The caller ID setting cannot be changed. - - - Restricted access changed - - Data service is blocked. - - Emergency service is blocked. - - Voice service is blocked. - - All Voice services are blocked. - - SMS service is blocked. - - Voice/Data services are blocked. - - Voice/SMS services are blocked. - - All Voice/Data/SMS services are blocked. - - - - Voice - - Data - - FAX - - SMS - - Async - - Sync - - Packet - - PAD - - - - Roaming Indicator On - Roaming Indicator Off - Roaming Indicator Flashing - Out of Neighborhood - Out of Building - Roaming - Preferred System - Roaming - Available System - Roaming - Alliance Partner - Roaming - Premium Partner - Roaming - Full Service Functionality - Roaming - Partial Service Functionality - Roaming Banner On - Roaming Banner Off - Searching for Service - - - - - {0}: Not forwarded - - {0}: {1} - - {0}: {1} after {2} seconds - - {0}: Not forwarded - - {0}: Not forwarded - - - - Feature code complete. - - Connection problem or invalid feature code. - - - OK - - A network error occurred. - - The URL could not be found. - - The site authentication scheme is not supported. - - Authentication was unsuccessful. - - Authentication via the proxy server was unsuccessful. - - The connection to the server was unsuccessful. - - The server couldn\'t communicate. Try again later. - - The connection to the server timed out. - - The page contains too many server redirects. - - The protocol is not supported. - - A secure connection could not be established. - - The page could not be opened because the URL is invalid. - - The file could not be accessed. - - The requested file was not found. - - Too many requests are being processed. Try again later. - - - - Sign-in error for %1$s - - - - Sync - - Sync - - Too many %s deletes. - - - Tablet storage is full! Delete some files to free space. - - Phone storage is full! Delete some files to free space. - - - - Me - - - - Tablet options - - Phone options - - Silent mode - - Turn on wireless - - Turn off wireless - - Screen lock - - Power off - - Ringer off - - Ringer vibrate - - Ringer on - - - Shutting down\u2026 - - - Your tablet will shut down. - - Your phone will shut down. - - - Would you like to shut down? - - - Recent - - No recent applications. - - - Tablet options - - Phone options - - - Screen lock - - - Power off - - - Silent mode - - - Sound is OFF - - - Sound is ON - - - Airplane mode - - - Airplane mode is ON - - - Airplane mode is OFF - - - 999+ - - - Safe mode - - - Android System - - - Services that cost you money - - Allow applications to do things - that can cost you money. - - - Your messages - - Read and write your SMS, - e-mail, and other messages. - - - Your personal information - - Direct access to your contacts - and calendar stored on the tablet. - - Direct access to your contacts - and calendar stored on the phone. - - - Your location - - Monitor your physical location - - - Network communication - - Allow applications to access - various network features. - - - Your accounts - - Access the available accounts. - - - Hardware controls - - Direct access to hardware on - the handset. - - - Phone calls - - Monitor, record, and process - phone calls. - - - System tools - - Lower-level access and control - of the system. - - - Development tools - - Features only needed for - application developers. - - - Storage - - Access the USB storage. - - Access the SD card. - - - - - disable or modify status bar - - Allows application to disable - the status bar or add and remove system icons. - - - status bar - - Allows the application to be the status bar. - - - expand/collapse status bar - - Allows application to - expand or collapse the status bar. - - - intercept outgoing calls - - Allows application to - process outgoing calls and change the number to be dialed. Malicious - applications may monitor, redirect, or prevent outgoing calls. - - - receive SMS - - Allows application to receive - and process SMS messages. Malicious applications may monitor - your messages or delete them without showing them to you. - - - receive MMS - - Allows application to receive - and process MMS messages. Malicious applications may monitor - your messages or delete them without showing them to you. - - - receive emergency broadcasts - - Allows application to receive - and process emergency broadcast messages. This permission is only available - to system applications. - - - send SMS messages - - Allows application to send SMS - messages. Malicious applications may cost you money by sending - messages without your confirmation. - - - send SMS messages with no confirmation - - Allows application to send SMS - messages. Malicious applications may cost you money by sending - messages without your confirmation. - - - read SMS or MMS - - Allows application to read - SMS messages stored on your tablet or SIM card. Malicious applications - may read your confidential messages. - - Allows application to read - SMS messages stored on your phone or SIM card. Malicious applications - may read your confidential messages. - - - edit SMS or MMS - - Allows application to write - to SMS messages stored on your tablet or SIM card. Malicious applications - may delete your messages. - - Allows application to write - to SMS messages stored on your phone or SIM card. Malicious applications - may delete your messages. - - - receive WAP - - Allows application to receive - and process WAP messages. Malicious applications may monitor - your messages or delete them without showing them to you. - - - retrieve running applications - - Allows application to retrieve - information about currently and recently running tasks. May allow - malicious applications to discover private information about other applications. - - - reorder running applications - - Allows an application to move - tasks to the foreground and background. Malicious applications can force - themselves to the front without your control. - - - stop running applications - - Allows an application to remove - tasks and kill their applications. Malicious applications can disrupt - the behavior of other applications. - - - enable application debugging - - Allows an application to turn - on debugging for another application. Malicious applications can use this - to kill other applications. - - - change your UI settings - - Allows an application to - change the current configuration, such as the locale or overall font - size. - - - enable car mode - - Allows an application to - enable the car mode. - - - kill background processes - - Allows an application to - kill background processes of other applications, even if memory - isn\'t low. - - - force stop other applications - - Allows an application to - forcibly stop other applications. - - - force application to close - - Allows an application to force any - activity that is in the foreground to close and go back. - Should never be needed for normal applications. - - - retrieve system internal state - - Allows application to retrieve - internal state of the system. Malicious applications may retrieve - a wide variety of private and secure information that they should - never normally need. - - - retrieve screen content - - Allows application to retrieve - the content of the active window. Malicious applications may retrieve - the entire window content and examine all its text except passwords. - - - partial shutdown - - Puts the activity manager into a shutdown - state. Does not perform a complete shutdown. - - - prevent app switches - - Prevents the user from switching to - another application. - - - monitor and control all application launching - - Allows an application to - monitor and control how the system launches activities. - Malicious applications may completely compromise the system. This - permission is only needed for development, never for normal - use. - - - send package removed broadcast - - Allows an application to - broadcast a notification that an application package has been removed. - Malicious applications may use this to kill any other running - application. - - - send SMS-received broadcast - - Allows an application to - broadcast a notification that an SMS message has been received. - Malicious applications may use this to forge incoming SMS messages. - - - send WAP-PUSH-received broadcast - - Allows an application to - broadcast a notification that a WAP PUSH message has been received. - Malicious applications may use this to forge MMS message receipt or to - silently replace the content of any web page with malicious variants. - - - limit number of running processes - - Allows an application - to control the maximum number of processes that will run. Never - needed for normal applications. - - - make all background applications close - - Allows an application - to control whether activities are always finished as soon as they - go to the background. Never needed for normal applications. - - - modify battery statistics - - Allows the modification of - collected battery statistics. Not for use by normal applications. - - - control system backup and restore - - Allows the application to control the system\'s backup and restore mechanism. Not for use by normal applications. - - - confirm a full backup or restore operation - - Allows the application to launch the full backup confirmation UI. Not to be used by any application. - - - display unauthorized windows - - Allows the creation of - windows that are intended to be used by the internal system - user interface. Not for use by normal applications. - - - display system-level alerts - - Allows an application to - show system alert windows. Malicious applications can take over the - entire screen. - - - modify global animation speed - - Allows an application to change - the global animation speed (faster or slower animations) at any time. - - - manage application tokens - - Allows applications to - create and manage their own tokens, bypassing their normal - Z-ordering. Should never be needed for normal applications. - - - press keys and control buttons - - Allows an application to deliver - its own input events (key presses, etc.) to other applications. Malicious - applications can use this to take over the tablet. - - Allows an application to deliver - its own input events (key presses, etc.) to other applications. Malicious - applications can use this to take over the phone. - - - record what you type and actions you take - - Allows applications to watch the - keys you press even when interacting with another application (such - as entering a password). Should never be needed for normal applications. - - - bind to an input method - - Allows the holder to bind to the top-level - interface of an input method. Should never be needed for normal applications. - - - bind to a text service - - Allows the holder to bind to the top-level - interface of a text service(e.g. SpellCheckerService). Should never be needed for normal applications. - - - bind to a VPN service - - Allows the holder to bind to the top-level - interface of a Vpn service. Should never be needed for normal applications. - - - bind to a wallpaper - - Allows the holder to bind to the top-level - interface of a wallpaper. Should never be needed for normal applications. - - - bind to a widget service - - Allows the holder to bind to the top-level - interface of a widget service. Should never be needed for normal applications. - - - interact with a device admin - - Allows the holder to send intents to - a device administrator. Should never be needed for normal applications. - - - change screen orientation - - Allows an application to change - the rotation of the screen at any time. Should never be needed for - normal applications. - - - change pointer speed - - Allows an application to change - the mouse or trackpad pointer speed at any time. Should never be needed for - normal applications. - - - send Linux signals to applications - - Allows application to request that the - supplied signal be sent to all persistent processes. - - - make application always run - - Allows an application to make - parts of itself persistent, so the system can\'t use it for other - applications. - - - delete applications - - Allows an application to delete - Android packages. Malicious applications can use this to delete important applications. - - - delete other applications\' data - - Allows an application to clear user data. - - delete other applications\' caches - - Allows an application to delete - cache files. - - - measure application storage space - - Allows an application to retrieve - its code, data, and cache sizes - - - directly install applications - - Allows an application to install new or updated - Android packages. Malicious applications can use this to add new applications with arbitrarily - powerful permissions. - - - delete all application cache data - - Allows an application to free tablet storage - by deleting files in application cache directory. Access is very - restricted usually to system process. - - Allows an application to free phone storage - by deleting files in application cache directory. Access is very - restricted usually to system process. - - - Move application resources - - Allows an application to move application resources from internal to external media and vice versa. - - - read sensitive log data - - Allows an application to read from the - system\'s various log files. This allows it to discover general - information about what you are doing with the tablet, potentially - including personal or private information. - - Allows an application to read from the - system\'s various log files. This allows it to discover general - information about what you are doing with the phone, potentially - including personal or private information. - - - use any media decoder for playback - - Allows an application to use any installed - media decoder to decode for playback. - - - read/write to resources owned by diag - - Allows an application to read and write to - any resource owned by the diag group; for example, files in /dev. This could - potentially affect system stability and security. This should be ONLY be used - for hardware-specific diagnostics by the manufacturer or operator. - - - enable or disable application components - - Allows an application to change whether a - component of another application is enabled or not. Malicious applications can use this - to disable important tablet capabilities. Care must be used with this permission, as it is - possible to get application components into an unusable, inconsistent, or unstable state. - - - Allows an application to change whether a - component of another application is enabled or not. Malicious applications can use this - to disable important phone capabilities. Care must be used with this permission, as it is - possible to get application components into an unusable, inconsistent, or unstable state. - - - - set preferred applications - - Allows an application to - modify your preferred applications. This can allow malicious applications - to silently change the applications that are run, spoofing your - existing applications to collect private data from you. - - - modify global system settings - - Allows an application to modify the - system\'s settings data. Malicious applications can corrupt your system\'s - configuration. - - modify secure system settings - Allows an application to modify the - system\'s secure settings data. Not for use by normal applications. - - - modify the Google services map - - Allows an application to modify the - Google services map. Not for use by normal applications. - - - automatically start at boot - - Allows an application to - have itself started as soon as the system has finished booting. - This can make it take longer to start the tablet and allow the - application to slow down the overall tablet by always running. - - Allows an application to - have itself started as soon as the system has finished booting. - This can make it take longer to start the phone and allow the - application to slow down the overall phone by always running. - - - send sticky broadcast - - Allows an application to send - sticky broadcasts, which remain after the broadcast ends. - Malicious applications can make the tablet slow or unstable by causing it - to use too much memory. - - Allows an application to send - sticky broadcasts, which remain after the broadcast ends. - Malicious applications can make the phone slow or unstable by causing it - to use too much memory. - - - read contact data - - Allows an application to read all - of the contact (address) data stored on your tablet. Malicious applications - can use this to send your data to other people. - - Allows an application to read all - of the contact (address) data stored on your phone. Malicious applications - can use this to send your data to other people. - - - write contact data - - Allows an application to modify the - contact (address) data stored on your tablet. Malicious - applications can use this to erase or modify your contact data. - - Allows an application to modify the - contact (address) data stored on your phone. Malicious - applications can use this to erase or modify your contact data. - - - read your profile data - - Allows the application to read personal - profile information stored on your device, such as your name and contact information. This - means the application can identify you and send your profile information to others. - - - write to your profile data - - Allows the application to change or add - to personal profile information stored on your device, such as your name and contact - information. This means other applications can identify you and send your profile - information to others. - - - read your social stream - Allows the application to access - and sync social updates from you and your friends. Malicious apps can use this to read - private communications between you and your friends on social networks. - - - write to your social stream - Allows the application to display - social updates from your friends. Malicious apps can use this to pretend to be a friend - and trick you into revealing passwords or other confidential information. - - - - read calendar events plus confidential information - - Allows an application to read all calendar - events stored on your tablet, including those of friends or coworkers. A malicious application with - this permission can extract personal information from these calendars without the owners\' knowledge. - - Allows an application to read all calendar - events stored on your phone, including those of friends or coworkers. A malicious application with - this permission can extract personal information from these calendars without the owners\' knowledge. - - - add or modify calendar events and send email to guests without owners\' knowledge - - Allows an application to send event invitations as the calendar owner and add, remove, - change events that you can modify on your device, including those of friends or co-workers. A malicious application with this permission - can send spam emails that appear to come from calendar owners, modify events without the owners\' knowledge, or add fake events. - - - mock location sources for testing - - Create mock location sources for testing. - Malicious applications can use this to override the location and/or status returned by real - location sources such as GPS or Network providers. - - - access extra location provider commands - - Access extra location provider commands. - Malicious applications could use this to interfere with the operation of the GPS - or other location sources. - - - permission to install a location provider - - Create mock location sources for testing. - Malicious applications can use this to override the location and/or status returned by real - location sources such as GPS or Network providers or monitor and report your location to an external source. - - - fine (GPS) location - - Access fine location sources such as the - Global Positioning System on the tablet, where available. - Malicious applications can use this to determine where you are, and may - consume additional battery power. - - Access fine location sources such as the - Global Positioning System on the phone, where available. - Malicious applications can use this to determine where you are, and may - consume additional battery power. - - - coarse (network-based) location - - Access coarse location sources such as the cellular - network database to determine an approximate tablet location, where available. Malicious - applications can use this to determine approximately where you are. - - Access coarse location sources such as the cellular - network database to determine an approximate phone location, where available. Malicious - applications can use this to determine approximately where you are. - - - access SurfaceFlinger - - Allows application to use - SurfaceFlinger low-level features. - - - read frame buffer - - Allows application to - read the content of the frame buffer. - - - change your audio settings - - Allows application to modify - global audio settings such as volume and routing. - - - record audio - - Allows application to access - the audio record path. - - - take pictures and videos - - Allows application to take pictures and videos - with the camera. This allows the application at any time to collect - images the camera is seeing. - - - permanently disable tablet - - permanently disable phone - - Allows the application to - disable the entire tablet permanently. This is very dangerous. - - Allows the application to - disable the entire phone permanently. This is very dangerous. - - - force tablet reboot - - force phone reboot - - Allows the application to - force the tablet to reboot. - - Allows the application to - force the phone to reboot. - - - mount and unmount filesystems - - Allows the application to mount and - unmount filesystems for removable storage. - - - format external storage - - Allows the application to format removable storage. - - - get information on internal storage - - Allows the application to get information on internal storage. - - - create internal storage - - Allows the application to create internal storage. - - - destroy internal storage - - Allows the application to destroy internal storage. - - - mount / unmount internal storage - - Allows the application to mount / unmount internal storage. - - - rename internal storage - - Allows the application to rename internal storage. - - - control vibrator - - Allows the application to control - the vibrator. - - - control flashlight - - Allows the application to control - the flashlight. - - - manage preferences and permissions for USB devices - - Allows the application to manage preferences and permissions for USB devices. - - - implement MTP protocol - - Allows access to the kernel MTP driver to implement the MTP USB protocol. - - - test hardware - - Allows the application to control - various peripherals for the purpose of hardware testing. - - - directly call phone numbers - - Allows the application to call - phone numbers without your intervention. Malicious applications may - cause unexpected calls on your phone bill. Note that this does not - allow the application to call emergency numbers. - - - directly call any phone numbers - - Allows the application to call - any phone number, including emergency numbers, without your intervention. - Malicious applications may place unnecessary and illegal calls to emergency - services. - - - directly start CDMA tablet setup - - directly start CDMA phone setup - - Allows the application to start CDMA provisioning. - Malicious applications may unnecessarily start CDMA provisioning - - - control location update notifications - - Allows enabling/disabling location - update notifications from the radio. Not for use by normal applications. - - - access checkin properties - - Allows read/write access to - properties uploaded by the checkin service. Not for use by normal - applications. - - - choose widgets - - Allows the application to tell the system - which widgets can be used by which application. With this permission, - applications can give access to personal data to other applications. - Not for use by normal applications. - - - modify phone state - - Allows the application to control the - phone features of the device. An application with this permission can switch - networks, turn the phone radio on and off and the like without ever notifying - you. - - - read phone state and identity - - Allows the application to access the phone - features of the device. An application with this permission can determine the phone - number and serial number of this phone, whether a call is active, the number that call - is connected to and the like. - - - prevent tablet from sleeping - - prevent phone from sleeping - - Allows an application to prevent - the tablet from going to sleep. - - Allows an application to prevent - the phone from going to sleep. - - - power tablet on or off - - power phone on or off - - Allows the application to turn the - tablet on or off. - - Allows the application to turn the - phone on or off. - - - run in factory test mode - - Run as a low-level manufacturer test, - allowing complete access to the tablet hardware. Only available - when a tablet is running in manufacturer test mode. - - Run as a low-level manufacturer test, - allowing complete access to the phone hardware. Only available - when a phone is running in manufacturer test mode. - - - set wallpaper - - Allows the application - to set the system wallpaper. - - - set wallpaper size hints - - Allows the application - to set the system wallpaper size hints. - - - reset system to factory defaults - - Allows an application to completely - reset the system to its factory settings, erasing all data, - configuration, and installed applications. - - - set time - - Allows an application to change - the tablet\'s clock time. - - Allows an application to change - the phone\'s clock time. - - - set time zone - - Allows an application to change - the tablet\'s time zone. - - Allows an application to change - the phone\'s time zone. - - - act as the AccountManagerService - - Allows an - application to make calls to AccountAuthenticators - - - discover known accounts - - Allows an application to get - the list of accounts known by the tablet. - - Allows an application to get - the list of accounts known by the phone. - - - act as an account authenticator - - Allows an application - to use the account authenticator capabilities of the - AccountManager, including creating accounts and getting and - setting their passwords. - - - manage the accounts list - - Allows an application to - perform operations like adding, and removing accounts and deleting - their password. - - - use the authentication - credentials of an account - - Allows an application to - request authentication tokens. - - - view network state - - Allows an application to view - the state of all networks. - - - full Internet access - - Allows an application to - create network sockets. - - - change/intercept network settings and traffic - - Allows an application to change network settings and to intercept and inspect all network traffic, - for example to change the proxy and port of any APN. Malicious applications could monitor, redirect, or modify network - packets without your knowledge. - - - change network connectivity - - Allows an application to change - the state of network connectivity. - - - Change tethered connectivity - - Allows an application to change - the state of tethered network connectivity. - - - change background data usage setting - - Allows an application to change - the background data usage setting. - - - view Wi-Fi state - - Allows an application to view - the information about the state of Wi-Fi. - - - change Wi-Fi state - - Allows an application to connect - to and disconnect from Wi-Fi access points, and to make changes to - configured Wi-Fi networks. - - - allow Wi-Fi Multicast - reception - - Allows an application to - receive packets not directly addressed to your device. This can be - useful when discovering services offered near by. It uses more power - than the non-multicast mode. - - - view WiMAX state - Allows an application to view - the information about the state of WiMAX. - change WiMAX state - Allows an application to connect - to and disconnect from WiMAX network. - bluetooth administration - - Allows an application to configure - the local Bluetooth tablet, and to discover and pair with remote - devices. - - Allows an application to configure - the local Bluetooth phone, and to discover and pair with remote - devices. - - - create Bluetooth connections - - Allows an application to view - configuration of the local Bluetooth tablet, and to make and accept - connections with paired devices. - - Allows an application to view - configuration of the local Bluetooth phone, and to make and accept - connections with paired devices. - - - control Near Field Communication - - Allows an application to communicate - with Near Field Communication (NFC) tags, cards, and readers. - - - disable keylock - - Allows an application to disable - the keylock and any associated password security. A legitimate example of - this is the phone disabling the keylock when receiving an incoming phone call, - then re-enabling the keylock when the call is finished. - - - read sync settings - - Allows an application to read the sync settings, - such as whether sync is enabled for Contacts. - - - write sync settings - - Allows an application to modify the sync - settings, such as whether sync is enabled for Contacts. - - - read sync statistics - - Allows an application to read the sync stats; e.g., the - history of syncs that have occurred. - - - read subscribed feeds - - Allows an application to get details about the currently synced feeds. - - - write subscribed feeds - - Allows an application to modify - your currently synced feeds. This could allow a malicious application to - change your synced feeds. - - - read user defined dictionary - - Allows an application to read any private - words, names and phrases that the user may have stored in the user dictionary. - - - write to user defined dictionary - - Allows an application to write new words into the - user dictionary. - - - modify/delete USB storage contents - - modify/delete SD card contents - - Allows an application to write to the USB storage. - - Allows an application to write to the SD card. - - - modify/delete internal media storage contents - - Allows an application to modify the contents of the internal media storage. - - - access the cache filesystem - - Allows an application to read and write the cache filesystem. - - - make/receive Internet calls - - Allows an application to use the SIP service to make/receive Internet calls. - - - read historical network usage - - Allows an application to read historical network usage for specific networks and applications. - - - manage network policy - - Allows an application to manage network policies and define application-specific rules. - - - modify network usage accounting - - Allows modification of how network usage is accounted against applications. Not for use by normal applications. - - - - - Set password rules - - Control the length and the characters - allowed in screen-unlock passwords - - Monitor screen-unlock attempts - - Monitor the number of incorrect passwords - entered when unlocking the screen, and lock the tablet or erase all the tablet\'s - data if too many incorrect passwords are entered - - Monitor the number of incorrect passwords - entered when unlocking the screen, and lock the phone or erase all the phone\'s - data if too many incorrect passwords are entered - - Change the screen-unlock password - - Change the screen-unlock password - - Lock the screen - - Control how and when the screen locks - - Erase all data - - Erase the tablet\'s data without warning, - by performing a factory data reset - - Erase the phone\'s data without warning, - by performing a factory data reset - Set the device global proxy - - Set the device global proxy - to be used while policy is enabled. Only the first device admin - sets the effective global proxy. - - Set lock-screen password expiration - - Control how frequently the lock-screen password must be - changed - - Set storage encryption - - Require that stored application data be encrypted - - - Disable cameras - - Prevent use of all device cameras - - - - - Home - Mobile - Work - Work Fax - Home Fax - Pager - Other - Custom - - - - - - Home - Work - Other - Custom - - - - - - Home - Work - Other - Custom - - - - - - Home - Work - Other - Custom - - - - - - Work - Other - Custom - - - - - - AIM - Windows Live - Yahoo - Skype - QQ - Google Talk - ICQ - Jabber - - - - Custom - - Home - - Mobile - - Work - - Work Fax - - Home Fax - - Pager - - Other - - Callback - - Car - - Company Main - - ISDN - - Main - - Other Fax - - Radio - - Telex - - TTY TDD - - Work Mobile - - Work Pager - - Assistant - - MMS - - - Custom - - Birthday - - Anniversary - - Other - - - Custom - - Home - - Work - - Other - - Mobile - - - Custom - - Home - - Work - - Other - - - Custom - - Home - - Work - - Other - - - Custom - - AIM - - Windows Live - - Yahoo - - Skype - - QQ - - Google Talk - - ICQ - - Jabber - - NetMeeting - - - Work - - Other - - Custom - - - Custom - - Assistant - - Brother - - Child - - Domestic Partner - - Father - - Friend - - Manager - - Mother - - Parent - - Partner - - Referred by - - Relative - - Sister - - Spouse - - - Custom - - Home - - Work - - Other - - - Enter PIN code - - - Enter PUK and new PIN code - - - PUK code - - New Pin Code - - - Touch to enter password - - - Enter password to unlock - - - Enter PIN to unlock - - - Incorrect PIN code! - - - To unlock, press Menu then 0. - - - Emergency number - - - - - No service. - - - Screen locked. - - - Press Menu to unlock or place emergency call. - - - Press Menu to unlock. - - - Draw pattern to unlock - - Emergency call - - Return to call - - Correct! - - Sorry, try again - - Sorry, try again - - - Charging, %d%% - - Charged. - - %d%% - - - Connect your charger. - - - No SIM card. - - No SIM card in tablet. - - No SIM card in phone. - - Please insert a SIM card. - - The SIM card is missing or not readable. Please insert a SIM card. - - Your SIM card is permanently disabled.\n - Please contact your wireless service provider to obtain another SIM card. - - - Previous track button - - Next track button - - Pause button - - Play button - - Stop button - - - Emergency calls only - - - Network locked - - - - SIM card is PUK-locked. - - Please see the User Guide or contact Customer Care. - - - SIM card is locked. - - - Unlocking SIM card\u2026 - - - - You have incorrectly drawn your unlock pattern %d times. - \n\nPlease try again in %d seconds. - - - - - You have incorrectly entered your password %d times. - \n\nPlease try again in %d seconds. - - - - - You have incorrectly entered your PIN %d times. - \n\nPlease try again in %d seconds. - - - - - You have incorrectly drawn your unlock pattern %d times. - After %d more unsuccessful attempts, - you will be asked to unlock your tablet using your Google sign-in.\n\n - Please try again in %d seconds. - - - - - You have incorrectly drawn your unlock pattern %d times. - After %d more unsuccessful attempts, - you will be asked to unlock your phone using your Google sign-in.\n\n - Please try again in %d seconds. - - - - - You have incorrectly attempted to unlock the tablet %d times. - After %d more unsuccessful attempts, - the tablet will be reset to factory default and all user data will be lost. - - - - - You have incorrectly attempted to unlock the phone %d times. - After %d more unsuccessful attempts, - the phone will be reset to factory default and all user data will be lost. - - - - - You have incorrectly attempted to unlock the tablet %d times. - The tablet will now be reset to factory default. - - - - - You have incorrectly attempted to unlock the phone %d times. - The phone will now be reset to factory default. - - - - Try again in %d seconds. - - - Forgot pattern? - - - Account unlock - - Too many pattern attempts! - - To unlock, sign in with your Google account - - Username (email) - - Password - - Sign in - - Invalid username or password. - - Forgot your username or password\?\nVisit google.com/accounts/recovery - - - Checking... - - Unlock - - Sound on - - Sound off - - - Pattern started - - Pattern cleared - - Cell added - - Pattern completed - - - - \?123 - - ABC - - ALT - - - "%-l%P" - - - "%-l%p" - - - Factory test failed - - The FACTORY_TEST action - is only supported for packages installed in /system/app. - - No package was found that provides the - FACTORY_TEST action. - - Reboot - - - Mozilla/5.0 (Linux; U; Android %s) - AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 %sSafari/534.30 - - "Mobile " - - - The page at \'%s\' says: - - JavaScript - - Navigate away from this page?\n\n%s\n\nSelect OK to continue, or Cancel to stay on the current page. - - - Confirm - - - Tip: double-tap to zoom in and out. - - - AutoFill - - Setup AutoFill - - - \u0020 - - $1$2$3 - - - ,\u0020 - - $1$2$3 - - - attention|attn - - - province|region|other|provincia|bairro|suburb - - - company|business|organization|organisation|department|firma|firmenname|empresa|societe|société|ragione.?sociale|会社|название.?компании|单位|公司 - - - address.?line|address1|addr1|street|strasse|straße|hausnummer|housenumber|house.?name|direccion|dirección|adresse|indirizzo|住所1|morada|endereço|Адрес|地址 - - - address|adresse|indirizzo|住所|地址 - - - address.?line2|address2|addr2|street|suite|unit|adresszusatz|ergänzende.?angaben|direccion2|colonia|adicional|addresssuppl|complementnom|appartement|indirizzo2|住所2 - - - address.?line3|address3|addr3|street|line3|municipio|batiment|residence|indirizzo3 - - - country|location|国|国家 - - - zip|postal|post code|pcode|^1z$|postleitzahl|cp|cdp|cap|郵便番号|codigo|codpos|cep|Почтовый.?Индекс|邮政编码|邮编|郵遞區號 - - - zip|^-$|post2|codpos2 - - - city|town|ort|stadt|suburb|ciudad|provincia|localidad|poblacion|ville|commune|localita|市区町村|cidade|Город|市|分區 - - - state|county|region|province|land|county|principality|都道府県|estado|provincia|область|省|地區 - - - same as - - - use my - - - bill - - - ship - - - e.?mail|メールアドレス|Электронной.?Почты|邮件|邮箱|電郵地址 - - - user.?name|user.?id|vollständiger.?name|用户名 - - - ^name|full.?name|your.?name|customer.?name|firstandlastname|nombre.*y.*apellidos|^nom|お名前|氏名|^nome|姓名 - - - ^name|^nom|^nome - - - - irst.*name|initials|fname|first$|vorname|nombre|forename|prénom|prenom|名|nome|Имя - - - middle.*initial|m\\.i\\.|mi$ - - - middle.*name|mname|middle$|apellido.?materno|lastlastname - - - last.*name|lname|surname|last$|nachname|apellidos|famille|^nom|cognome|姓|morada|apelidos|surename|sobrenome|Фамилия - - - phone|telefonnummer|telefono|teléfono|telfixe|電話|telefone|telemovel|телефон|电话 - - - area.*code|acode|area - - - prefix|preselection|ddd - - - suffix - - - ext|ramal - - - card.?holder|name.?on.?card|ccname|owner|karteninhaber|nombre.*tarjeta|nom.*carte|nome.*cart|名前|Имя.*карты|信用卡开户名|开户名|持卡人姓名|持卡人姓名 - - - name - - - verification|card identification|cvn|security code|cvv code|cvc - - - number|card.?#|card.?no|ccnum|nummer|credito|numero|número|numéro|カード番号|Номер.*карты|信用卡号|信用卡号码|信用卡卡號 - - - expir|exp.*month|exp.*date|ccmonth|gueltig|gültig|monat|fecha|date.*exp|scadenza|有効期限|validade|Срок действия карты|月 - - - exp|^/|year|ablaufdatum|gueltig|gültig|yahr|fecha|scadenza|有効期限|validade|Срок действия карты|年|有效期 - - - ^card - - - fax|télécopie|telecopie|ファックス|факс|传真|傳真 - - - country.*code|ccode|_cc - - - ^\\($ - - - ^-$|^\\)$ - - - ^-$ - - - Province - - - Postal code - - - State - - - ZIP code - - - County - - - Island - - - District - - - Department - - - Prefecture - - - Parish - - - Area - - - Emirate - - - read Browser\'s history and bookmarks - - Allows the application to read all - the URLs that the Browser has visited, and all of the Browser\'s bookmarks. - - write Browser\'s history and bookmarks - - Allows an application to modify the - Browser\'s history or bookmarks stored on your tablet. Malicious applications - can use this to erase or modify your Browser\'s data. - - Allows an application to modify the - Browser\'s history or bookmarks stored on your phone. Malicious applications - can use this to erase or modify your Browser\'s data. - - - set alarm in alarm clock - - Allows the application to set an alarm in - an installed alarm clock application. Some alarm clock applications may - not implement this feature. - - - add voicemail - - Allows the application to add messages - to your voicemail inbox. - - - Modify Browser geolocation permissions - - Allows an application to modify the - Browser\'s geolocation permissions. Malicious applications - can use this to allow sending location information to arbitrary web sites. - - - verify packages - - Allows the application to verify a package is - installable. - - - bind to a package verifier - - Allows the holder to make requests of - package verifiers. Should never be needed for normal applications. - - - Do you want the browser to remember this password? - - Not now - - Remember - - Never - - - You do not have permission to open this page. - - - Text copied to clipboard. - - - More - - Menu+ - - space - - enter - - delete - - - - - Search - - Search - - Search query - - Clear query - - Submit query - - Voice search - - - 1 month ago - - Before 1 month ago - - - - 1 second ago - %d seconds ago - - - - - 1 minute ago - %d minutes ago - - - - - 1 hour ago - %d hours ago - - - - - Last %d days - - - - Last month - - - Older - - - - yesterday - %d days ago - - - - - in 1 second - in %d seconds - - - - - in 1 minute - in %d minutes - - - - - in 1 hour - in %d hours - - - - - tomorrow - in %d days - - - - - 1 sec ago - %d secs ago - - - - - 1 min ago - %d mins ago - - - - - 1 hour ago - %d hours ago - - - - - yesterday - %d days ago - - - - - in 1 sec - in %d secs - - - - - in 1 min - in %d mins - - - - - in 1 hour - in %d hours - - - - - tomorrow - in %d days - - - - on %s - - at %s - - in %s - - - day - - days - - hour - - hours - - min - - mins - - sec - - secs - - week - - weeks - - year - - years - - - - Cannot play video - - Sorry, this video is not valid for streaming to this device. - - Sorry, this video cannot be played. - - OK - - - - - - "%1$s, %2$s" - - - - "noon" - - "Noon" - - "midnight" - - "Midnight" - - - - - - - - - - - - - %1$02d:%2$02d - - - %1$d:%2$02d:%3$02d - - - Select all - - - Cut - - - Copy - - - Paste - - - Replace\u2026 - - - Delete - - - Copy URL - - - Select text... - - - Text selection - - - add to dictionary - - - delete - - - Input method - - - Text actions - - - Low on space - - Tablet storage space is getting low. - - Phone storage space is getting low. - - - OK - - Cancel - - OK - - Cancel - - Attention - - - Loading... - - - ON - - OFF - - - Complete action using - - Use by default for this action. - - Clear default in Home Settings > Applications > Manage applications. - - Select an action - - Select an application for the USB device - - No applications can perform this action. - - - - Unfortunately, %1$s has stopped. - - Unfortunately, the process %1$s has - stopped. - - - - %2$s is not responding.\n\nWould you like to close it? - - Activity %1$s is not responding.\n\nWould you like to close it? - - %1$s is not responding. Would you like to close it? - - Process %1$s is not responding.\n\nWould you like to close it? - - OK - - Report - - Wait - - Application redirected - - %1$s is now running. - - %1$s was originally launched. - - Scale - - Always show - - Re-enable this with Settings > Applications > Manage applications. - - - The application %1$s - (process %2$s) has violated its self-enforced StrictMode policy. - - The process %1$s has - has violated its self-enforced StrictMode policy. - - - Android is upgrading... - - - Optimizing application - %1$d of - %2$d. - - - Starting applications. - - - Finishing boot. - - - %1$s running - - - Select to switch to application - - - Switch applications? - - - Another application is already running - that must be stopped before you can start a new one. - - Return to %1$s - Don\'t start the new application. - - Start %1$s - Stop the old application without saving. - - - Select an action for text - - - Ringer volume - - Media volume - - Playing through Bluetooth - - Silent ringtone selected - - In-call volume - - Bluetooth in-call volume - - Alarm volume - - Notification volume - - Volume - - - Bluetooth volume - - Ringtone volume - - Call volume - - Media volume - - Notification volume - - - - Default ringtone - - Default ringtone (%1$s) - - Silent - - Ringtones - - Unknown ringtone - - - - Wi-Fi network available - Wi-Fi networks available - - - - Open Wi-Fi network available - Open Wi-Fi networks available - - - - Sign in to Wi-Fi network - - - %1$s - - - Couldn\'t connect to Wi-Fi - - \u0020has a poor internet connection. - - - AndroidAP - - - Wi-Fi Direct - Start Wi-Fi Direct operation. This will turn off Wi-Fi client/hotspot operation. - Couldn\'t start Wi-Fi Direct - Wi-Fi Direct connection setup request from %1$s. Click OK to accept. - Wi-Fi Direct connection setup request from %1$s. Enter pin to proceed. - WPS pin %1$s needs to be entered on the peer device %2$s for connection setup to proceed - Wi-Fi Direct is on - Touch for settings - - - Insert character - - - - Unknown application - - Sending SMS messages - - A large number of SMS messages are being sent. Select \"OK\" to continue, or \"Cancel\" to stop sending. - - OK - - Cancel - - - - SIM card removed - - The mobile network will be unavailable until you restart with a valid SIM card inserted. - - Done - - SIM card added - - You must restart your device to access the mobile network. - - Restart - - - - - Set time - - Set date - - Set - - - - Default - - %1$s, %2$s - - No permissions required - - Hide - - Show all - - - - USB Mass Storage - - - USB connected - - You have connected to your computer via USB. Touch the button below if you want to copy files between your computer and your Android\u2018s USB storage. - - You have connected to your computer via USB. Touch the button below if you want to copy files between your computer and your Android\u2018s SD card. - - Turn on USB storage - - There is a problem using your USB storage for USB mass storage. - - There is a problem using your SD card for USB mass storage. - - USB connected - - Select to copy files to/from your computer. - - - Turn off USB storage - - Select to turn off USB storage. - - - - - USB storage in use - - Before turning off USB storage, make sure you have unmounted (\u201cejected\u201d) your Android\u2018s USB storage from your computer. - - Before turning off USB storage, make sure you have unmounted (\u201cejected\u201d) your Android\u2018s SD card from your computer. - - Turn off USB storage - - There was a problem turning off USB storage. Check to make sure you have unmounted the USB host, then try again. - - - Turn on USB storage - - If you turn on USB storage, some applications you are using will stop and may be unavailable until you turn off USB storage. - - USB operation unsuccessful - - OK - - - Connected as a media device - - Connected as a camera - - Connected as an installer - - Connected to a USB accessory - - Touch for other USB options - - - - - Format USB storage - - Format SD card - - Format USB storage, erasing all files stored there? Action cannot be reversed! - - Are you sure you want to format the SD card? All data on your card will be lost. - - Format - - - USB debugging connected - - Select to disable USB debugging. - - - - - - - "" - - - Select input method - - Configure input methods - - \u0020ABCDEFGHIJKLMNOPQRSTUVWXYZ - \u00200123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ - - candidates - - - - Preparing USB storage - - Preparing SD card - Checking for errors. - - - Blank USB storage - - Blank SD card - - USB storage blank or has unsupported filesystem. - SD card blank or has unsupported filesystem. - - - Damaged USB storage - - Damaged SD card - - USB storage damaged. You may have to reformat it. - SD card damaged. You may have to reformat it. - - - USB storage unexpectedly removed - - SD card unexpectedly removed - - Unmount USB storage before removing to avoid data loss. - Unmount SD card before removing to avoid data loss. - - - USB storage safe to remove - - SD card safe to remove - - You can safely remove USB storage. - You can safely remove SD card. - - - Removed USB storage - - Removed SD card - - USB storage removed. Insert new media. - SD card removed. Insert a new one. - - - No matching activities found - - - - update component usage statistics - - Allows the modification of collected component usage statistics. Not for use by normal applications. - - - - Allows to invoke default container service to copy content. Not for use by normal applications. - - Allows to invoke default container service to copy content. Not for use by normal applications. - - - Tap twice for zoom control - - - - Error inflating widget - - - Go - - - Search - - - Send - - - Next - - - Done - - - Prev - - - Execute - - - - - - - Dial number\nusing %s - - - Create contact\nusing %s - - - - - - - - The following one or more applications request permission to access your account, now and in the future. - Do you want to allow this request? - Access Request - Allow - Deny - Permission Requested - Permission Requested\nfor account %s - - - Input method - - Sync - - Accessibility - - Wallpaper - - Change wallpaper - - - /data/eri.xml - - - VPN is activated. - - VPN is activated by %s - - Tap to manage the network. - - Connected to %s. Tap to manage the network. - - - - Choose file - - No file chosen - - Reset - - Submit - - - - Car mode enabled - Select to exit car mode. - - - - Tethering or hotspot active - Touch to configure - - - Back - Next - - - Skip - - - - High mobile data use - Touch to learn more about mobile data use - - - - Mobile data limit exceeded - Touch to learn more about mobile data use - - - No matches - - - Find on page - - - - - 1 match - - %d of %d - - - - Done - - - - Unmounting USB storage... - - Unmounting SD card... - - Erasing USB storage... - - Erasing SD card... - - Couldn\'t erase USB storage. - - Couldn\'t erase SD card. - - SD card was removed before being unmounted. - - USB storage is currently being checked. - - SD card is currently being checked. - - SD card has been removed. - - USB storage is currently in use by a computer. - - SD card is currently in use by a computer. - - External media in unknown state. - - - - Share - - Find - - Web Search - - - Location request from %s - - Location request - - Requested by %1$s (%2$s) - - Yes - - No - - - Delete limit exceeded - - There are %1$d deleted items for %2$s, account %3$s. What would you like to do? - - Delete the items. - - Undo the deletes. - - Do nothing for now. - - - Select an account - - "Add an account" - "Which account would you like to use?" - - - Add account - - - - Increment - - Decrement - - %s tap and hold. - - Slide up to increment and down to decrement. - - - - Increment minute - - Decrement minute - - Increment hour - - Decrement hour - - Set PM - - Set AM - - - - Increment month - - Decrement month - - Increment day - - Decrement day - - Increment year - - Decrement year - - - - checked - - not checked - - - - selected - - not selected - - - - on - - off - - - - pressed - - not pressed - - - - Alt - - Cancel - - Delete - - Done - - Mode change - - Shift - - Enter - - - - Choose an application - - - - Share with - - Share with %s - - - - - "Sliding handle. Tap and hold." - - - Up for %s. - - Down for %s. - - "Left for %s. - - Right for %s. - - - Unlock - - Camera - - Silent - - Sound on - - - Swipe to unlock. - - - Plug in a headset to hear password keys spoken aloud. - - Dot. - - - Navigate home - - Navigate up - - More options - - - Internal Storage - - - SD Card - - - USB storage - - - Edit... - - - Data usage warning - - Touch to view usage and settings - - - 2G-3G data disabled - - 4G data disabled - - Mobile data disabled - - Wi-Fi data disabled - - Touch to enable - - - 2G-3G data limit exceeded - - 4G data limit exceeded - - Mobile data limit exceeded - - Wi-Fi data limit exceeded - - %s over specified limit - - - Background data restricted - - Touch to remove restriction - - - - Security certificate - - This certificate is valid. - - Issued to: - - Common name: - - Organization: - - Organizational unit: - - Issued by: - - Validity: - - Issued on: - - Expires on: - - Serial number: - - Fingerprints: - - SHA-256 fingerprint: - - SHA-1 fingerprint: - - - See all... - - Select activity - - - Share with... - - - - - Device locked. - - - ", " - - - Sending... - - - Launch Browser? - - - Accept Call? - - diff --git a/wp-fdroid/wp-fdroid.php b/wp-fdroid/wp-fdroid.php deleted file mode 100644 index 792db5b4..00000000 --- a/wp-fdroid/wp-fdroid.php +++ /dev/null @@ -1,851 +0,0 @@ -inited=false; - $this->site_path=getenv('DOCUMENT_ROOT'); - wp_register_sidebar_widget('fdroid_latest', 'FDroid Latest', 'widget_fdroidlatest'); - } - - - // Register additional query variables. (Handler for the 'query_vars' filter) - function queryvars($qvars) { - $qvars[]='fdfilter'; - $qvars[]='fdcategory'; - $qvars[]='fdid'; - $qvars[]='fdpage'; - $qvars[]='fdstyle'; - return $qvars; - } - - - // Lazy initialise. All non-trivial members should call this before doing anything else. - function lazyinit() { - if(!$this->inited) { - load_plugin_textdomain($this->textdom, PLUGINDIR.'/'.dirname(plugin_basename(__FILE__)), dirname(plugin_basename(__FILE__))); - - $this->inited=true; - } - } - - // Gets a required query parameter by name. - function getrequiredparam($name) { - global $wp_query; - if(!isset($wp_query->query_vars[$name])) - wp_die("Missing parameter ".$name,"Error"); - return $wp_query->query_vars[$name]; - } - - // Handler for the 'fdroidrepo' shortcode. - // $attribs - shortcode attributes - // $content - optional content enclosed between the starting and - // ending shortcode - // Returns the generated content. - function do_shortcode($attribs,$content=null) { - global $wp_query,$wp_rewrite; - $this->lazyinit(); - - // Init local query vars - foreach($this->queryvars(array()) as $qv) { - if(array_key_exists($qv,$wp_query->query_vars)) { - $query_vars[$qv] = $wp_query->query_vars[$qv]; - } else { - $query_vars[$qv] = null; - } - } - - // Santiy check query vars - if(!isset($query_vars['fdpage']) || !is_numeric($query_vars['fdpage']) || $query_vars['fdpage'] <= 0) { - $query_vars['fdpage'] = 1; - } - - $out = ''; - - if(isset($attribs['search']) && $query_vars['fdfilter']===null) { - $query_vars['fdfilter'] = ''; - } - - if($query_vars['fdcategory'] == 'All categories') { - unset($query_vars['fdcategory']); - } - - if($query_vars['fdid']!==null) { - $out.=$this->get_app($query_vars); - } else { - $out.='
      '; - $out.='

      '; - $out.='

      '; - $out.=$this->makeformdata($query_vars); - $out.='
      '."\n"; - - $out.=$this->get_apps($query_vars); - } - - return $out; - } - - - // Get a URL for a full description of a license, as given by one of our - // pre-defined license abbreviations. This is a temporary function, as this - // needs to be data-driven so the same information can be used by the client, - // the web site and the documentation. - function getlicenseurl($license) { - switch($license) { - case 'MIT': - return 'http://www.gnu.org/licenses/license-list.html#X11License'; - case 'NewBSD': - return 'http://www.gnu.org/licenses/license-list.html#ModifiedBSD'; - case 'BSD': - return 'http://www.gnu.org/licenses/license-list.html#OriginalBSD'; - case 'GPLv3': - case 'GPLv3+': - return 'http://www.gnu.org/licenses/license-list.html#GNUGPL'; - case 'GPLv2': - case 'GPLv2+': - return 'http://www.gnu.org/licenses/license-list.html#GPLv2'; - case 'LGPL': - return 'http://www.gnu.org/licenses/license-list.html#LGPL'; - case 'Apache2': - return 'http://www.gnu.org/licenses/license-list.html#apache2'; - default: - return null; - } - } - - function get_app($query_vars) { - global $permissions_data; - $permissions_object = new AndroidPermissions($this->site_path.'/wp-content/plugins/wp-fdroid/AndroidManifest.xml', - $this->site_path.'/wp-content/plugins/wp-fdroid/strings.xml', - sys_get_temp_dir().'/android-permissions.cache'); - $permissions_data = $permissions_object->get_permissions_array(); - - // Get app data - $xml = simplexml_load_file($this->site_path.'/repo/index.xml'); - foreach($xml->children() as $app) { - - $attrs=$app->attributes(); - if($attrs['id']==$query_vars['fdid']) { - $apks=array();; - foreach($app->children() as $el) { - switch($el->getName()) { - case "name": - $name=$el; - break; - case "added": - $added=$el; - break; - case "icon": - $icon=$el; - break; - case "summary": - $summary=$el; - break; - case "desc": - $desc=$el; - break; - case "license": - $license=$el; - break; - case "source": - $source=$el; - break; - case "tracker": - $issues=$el; - break; - case "donate": - $donate=$el; - break; - case "web": - $web=$el; - break; - case "antifeatures"; - $antifeatures=$el; - break; - case "requirements"; - $requirements=$el; - break; - case "package": - $thisapk=array(); - foreach($el->children() as $pel) { - switch($pel->getName()) { - case "version": - $thisapk['version']=$pel; - break; - case "vercode": - $thisapk['vercode']=$pel; - break; - case "apkname": - $thisapk['apkname']=$pel; - break; - case "srcname": - $thisapk['srcname']=$pel; - break; - case "hash": - $thisapk['hash']=$pel; - break; - case "size": - $thisapk['size']=$pel; - break; - case "sdkver": - $thisapk['sdkver']=$pel; - break; - case "permissions": - $thisapk['permissions']=$pel; - break; - } - } - $apks[]=$thisapk; - - } - } - - // Generate app diff data - foreach(array_reverse($apks, true) as $key=>$apk) { - if(isset($previous)) { - // Apk size - $apks[$key]['diff']['size'] = $apk['size']-$previous['size']; - } - - // Permissions - $permissions = explode(',',$apk['permissions']); - $permissionsPrevious = isset($previous['permissions'])?explode(',',$previous['permissions']):array(); - $apks[$key]['diff']['permissions']['added'] = array_diff($permissions, $permissionsPrevious); - $apks[$key]['diff']['permissions']['removed'] = array_diff($permissionsPrevious, $permissions); - - $previous = $apk; - } - - // Output app information - $out='
      '; - $out.='
      '; - $out.='

      '.$name.""; - $out.="
      ".$summary."

      "; - $out.="
      "; - - $out.=str_replace('href="fdroid.app:', 'href="/repository/browse/?fdid=', $desc); - - if(isset($antifeatures)) { - $antifeaturesArray = explode(',',$antifeatures); - foreach($antifeaturesArray as $antifeature) { - $antifeatureDescription = $this->get_antifeature_description($antifeature); - $out.='

      '.$antifeatureDescription['name'].'
      '; - $out.=$antifeatureDescription['description'].' more...

      '; - } - } - - $out.="

      "; - $licenseurl=$this->getlicenseurl($license); - $out.="License: "; - if($licenseurl) - $out.=''; - $out.=$license; - if($licenseurl) - $out.=''; - - if(isset($requirements)) { - $out.='
      Additional requirements: '.$requirements; - } - $out.="

      "; - - $out.="

      "; - if(strlen($web)>0) - $out.='Website: '.$web.'
      '; - if(strlen($issues)>0) - $out.='Issue Tracker: '.$issues.'
      '; - if(strlen($source)>0) - $out.='Source Code: '.$source.'
      '; - if($donate && strlen($donate)>0) - $out.='Donate: '.$donate.'
      '; - $out.="

      "; - - $out.="

      For full details and additional technical information, see "; - $out.="this application's page on the F-Droid wiki.

      "; - - $out.=''; - - $out.="

      Packages

      "; - - $out.=''; - $out.="

      Although APK downloads are available below to give "; - $out.="you the choice, you should be aware that by installing that way you "; - $out.="will not receive update notifications, and it's a less secure way "; - $out.="to download. "; - $out.="We recommend that you install the F-Droid client and use that.

      "; - - $i=0; - foreach($apks as $apk) { - $first = $i+1==count($apks); - $out.="

      Version ".$apk['version']."
      "; - $out.="Added on ".$apk['added']."
      "; - - // Is this source or binary? - $srcbuild = isset($apk['srcname']) && file_exists($this->site_path.'/repo/'.$apk['srcname']); - - $out.="

      This version is built and signed by "; - if($srcbuild) { - $out.="F-Droid, and guaranteed to correspond to the source tarball below.

      "; - } else { - $out.="the original developer.

      "; - } - $out.='download apk '; - $out.=$this->human_readable_size($apk['size']); - $diffSize = $apk['diff']['size']; - if(abs($diffSize) > 500) { - $out.=' ('; - $out.=$diffSize>0?'+':''; - $out.=$this->human_readable_size($diffSize, 1).')'; - } - if($srcbuild) { - $out.='
      source tarball '; - $out.=$this->human_readable_size(filesize($this->site_path.'/repo/'.$apk['srcname'])); - } - - if(isset($apk['permissions'])) { - // Permissions diff link - if($first == false) { - $permissionsAddedCount = count($apk['diff']['permissions']['added']); - $permissionsRemovedCount = count($apk['diff']['permissions']['removed']); - $divIdDiff='permissionsDiff'.$i; - if($permissionsAddedCount || $permissionsRemovedCount) { - $out.='
      permissions diff'; - $out.=' ('; - if($permissionsAddedCount) - $out.='+'.$permissionsAddedCount; - if($permissionsAddedCount && $permissionsRemovedCount) - $out.='/'; - if($permissionsRemovedCount) - $out.='-'.$permissionsRemovedCount; - $out.=')'; - } - else - { - $out.='
      no permission changes'; - } - } - - // Permissions list link - $permissionsListString = $this->get_permission_list_string(explode(',',$apk['permissions']), $permissions_data, $summary); - /*if($i==0) - $divStyleDisplay='block'; - else*/ - $divStyleDisplay='none'; - $divId='permissions'.$i; - $out.='
      view permissions'; - $out.=' ['.$summary.']'; - $out.='
      '; - - // Permissions list - $out.='
      '; - $out.=$permissionsListString; - $out.='
      '; - - // Permissions diff - { - $out.='
      '; - $permissionsRemoved = $apk['diff']['permissions']['removed']; - usort($permissionsRemoved, "permissions_cmp"); - - // Added permissions - if($permissionsAddedCount) { - $out.='
      ADDED

      '; - $out.=$this->get_permission_list_string($apk['diff']['permissions']['added'], $permissions_data, $summary); - } - - // Removed permissions - if($permissionsRemovedCount) { - $out.='
      REMOVED

      '; - $out.=$this->get_permission_list_string($apk['diff']['permissions']['removed'], $permissions_data, $summary); - } - - $out.='
      '; - } - } - else { - $out.='
      no extra permissions needed
      '; - } - - $out.='

      '; - $i++; - } - - $out.='

      Index

      '; - - return $out; - } - } - return "

      Application not found

      "; - } - - private function get_permission_list_string($permissions, $permissions_data, &$summary) { - $out=''; - usort($permissions, "permissions_cmp"); - $permission_group_last = ''; - foreach($permissions as $permission) { - $permission_group = $permissions_data['permission'][$permission]['permissionGroup']; - if($permission_group != $permission_group_last) { - $permission_group_label = $permissions_data['permission-group'][$permission_group]['label']; - if($permission_group_label=='') $permission_group_label = 'Extra/Custom'; - $out.=''.strtoupper($permission_group_label).'
      '; - $permission_group_last = $permission_group; - } - - $out.=$this->get_permission_protection_level_icon($permissions_data['permission'][$permission]['protectionLevel']).' '; - $out.=''.$permissions_data['permission'][$permission]['label'].' ['.$permission.']
      '; - if($permissions_data['permission'][$permission]['description']) $out.=$permissions_data['permission'][$permission]['description'].'
      '; - //$out.=$permissions_data['permission'][$permission]['comment'].'
      '; - $out.='
      '; - - if(!isset($summaryCount[$permissions_data['permission'][$permission]['protectionLevel']])) - $summaryCount[$permissions_data['permission'][$permission]['protectionLevel']] = 0; - $summaryCount[$permissions_data['permission'][$permission]['protectionLevel']]++; - } - - $summary = ''; - if(isset($summaryCount)) { - foreach($summaryCount as $protectionLevel => $count) { - $summary .= $this->get_permission_protection_level_icon($protectionLevel, 'regular').' '.$count; - $summary .= ', '; - } - } - $summary = substr($summary,0,-2); - - return $out; - } - - private function get_permission_protection_level_icon($protection_level, $size='adjusted') { - $iconString = ''; - if($protection_level=='dangerous') { - $iconString .= ''; // WARNING SIGN - } - elseif($protection_level=='normal') { - $iconString .= ''; // CIRCLED LATIN SMALL LETTER I - } - elseif($protection_level=='signature') { - $iconString .= ''; // HEAVY TEARDROP-SPOKED ASTERISK - } - elseif($protection_level=='signatureOrSystem') { - $iconString .= ''; // ATOM SYMBOL - } - else { - $iconString .= ''; // GEAR - } - - return $iconString; - } - - private function human_readable_size($size, $minDiv=0) { - $si_prefix = array('bytes','kB','MB'); - $div = 1024; - - for($i=0;(abs($size) > $div && $i < count($si_prefix)) || $i<$minDiv;$i++) { - $size /= $div; - } - - return round($size,max(0,$i-1)).' '.$si_prefix[$i]; - } - - private function get_antifeature_description($antifeature) { - // Anti feature names and descriptions - $antifeatureDescription['Ads']['name'] = 'Advertising'; - $antifeatureDescription['Ads']['description'] = 'This application contains advertising.'; - $antifeatureDescription['Tracking']['name'] = 'Tracks You'; - $antifeatureDescription['Tracking']['description'] = 'This application tracks and reports your activity to somewhere.'; - $antifeatureDescription['NonFreeNet']['name'] = 'Non-Free Network Services'; - $antifeatureDescription['NonFreeNet']['description'] = 'This application promotes a non-Free network service.'; - $antifeatureDescription['NonFreeAdd']['name'] = 'Non-Free Addons'; - $antifeatureDescription['NonFreeAdd']['description'] = 'This application promotes non-Free add-ons.'; - $antifeatureDescription['NonFreeDep']['name'] = 'Non-Free Dependencies'; - $antifeatureDescription['NonFreeDep']['description'] = 'This application depends on another non-Free application.'; - $antifeatureDescription['UpstreamNonFree']['name'] = 'Upstream Non-Free'; - $antifeatureDescription['UpstreamNonFree']['description'] = 'The upstream source code is non-free.'; - - if(isset($antifeatureDescription[$antifeature])) { - return $antifeatureDescription[$antifeature]; - } - return array('name'=>$antifeature); - } - - - function get_apps($query_vars) { - - $xml = simplexml_load_file($this->site_path."/repo/index.xml"); - $matches = $this->show_apps($xml,$query_vars,$numpages); - - $out=''; - - if(($query_vars['fdfilter']===null || $query_vars['fdfilter']!='') && $numpages>0) - { - $out.='
      '; - if($query_vars['fdfilter']===null) { - - $categories = array('All categories'); - $handle = fopen(getenv('DOCUMENT_ROOT').'/repo/categories.txt', 'r'); - if ($handle) { - while (($buffer = fgets($handle, 4096)) !== false) { - $categories[] = rtrim($buffer); - } - fclose($handle); - } - - $out.='
      '; - $out.=$this->makeformdata($query_vars); - - $out.=''; - - $out.='
      '."\n"; - } - else { - $out.='Applications matching "'.sanitize_text_field($query_vars['fdfilter']).'"'; - } - $out.="
      "; - - $out.='
      '; - $out.='List | '; - $out.='Grid'; - $out.='
      '; - - $out.='
      '; - } - - if($numpages>0) { - $out.=$matches; - - $out.='

      '; - - $out.='

      '; - $out.=' Page '.$query_vars['fdpage'].' of '.$numpages.' '; - $out.='
      '; - - $out.='
      '; - if($numpages>1) { - for($i=1;$i<=$numpages;$i++) { - if($i == $query_vars['fdpage']) { - $out.=''.$i.''; - } else { - $out.=''; - $out.=$i; - $out.=''; - } - $out.=' '; - } - $out.=' '; - } - $out.='
      '; - - $out.='
      '; - if($query_vars['fdpage']!=$numpages) { - $out.='next> '; - } - $out.='
      '; - - $out.='

      '; - } else if($query_vars['fdfilter']!='') { - $out.='

      No matches

      '; - } - - return $out; - } - - - function makeformdata($query_vars) { - - $out=''; - - $out.=''; - foreach($query_vars as $name => $value) { - if($value !== null && $name != 'fdfilter' && !($name == 'fdpage' && (int)$value ==1)) - $out.=''; - } - - return $out; - } - - - function show_apps($xml,$query_vars,&$numpages) { - - $skipped=0; - $got=0; - $total=0; - - if($query_vars['fdstyle']=='grid') { - $outputter = new FDOutGrid(); - } else { - $outputter = new FDOutList(); - } - - $out = ""; - - $out.=$outputter->outputStart(); - - foreach($xml->children() as $app) { - - if($app->getName() == 'repo') continue; - $appinfo['attrs']=$app->attributes(); - $appinfo['id']=$appinfo['attrs']['id']; - foreach($app->children() as $el) { - switch($el->getName()) { - case "name": - $appinfo['name']=$el; - break; - case "icon": - $appinfo['icon']=$el; - break; - case "summary": - $appinfo['summary']=$el; - break; - case "desc": - $appinfo['description']=$el; - break; - case "license": - $appinfo['license']=$el; - break; - case "category": - $appinfo['category']=$el; - break; - } - } - - if(($query_vars['fdfilter']===null || $query_vars['fdfilter']!='' && (stristr($appinfo['name'],$query_vars['fdfilter']) || stristr($appinfo['summary'],$query_vars['fdfilter']) || stristr($appinfo['description'],$query_vars['fdfilter']))) && (!isset($query_vars['fdcategory']) || $query_vars['fdcategory'] && $query_vars['fdcategory']==$appinfo['category'])) { - if($skipped<($query_vars['fdpage']-1)*$outputter->perpage) { - $skipped++; - } else if($got<$outputter->perpage) { - $out.=$outputter->outputEntry($query_vars, $appinfo); - $got++; - } - $total++; - } - - } - - $out.=$outputter->outputEnd(); - - $numpages = ceil((float)$total/$outputter->perpage); - - return $out; - } -} - -// Class to output app entries in a detailed list format -class FDOutList -{ - var $perpage=30; - - function FDOutList() { - } - - function outputStart() { - return ''; - } - - function outputEntry($query_vars, $appinfo) { - $out=""; - $out.='
      '."\n"; - $out.=''; - $out.='
      '; - - $out.='
      '; - - $out.='
      '; - $out.='

      Details...

      '; - $out.="
      \n"; - - $out.='

      '.$appinfo['name'].""; - $out.="
      ".$appinfo['summary']."

      \n"; - - $out.="
      \n"; - $out.='
      '; - - return $out; - } - - function outputEnd() { - return ''; - } -} - -// Class to output app entries in a compact grid format -class FDOutGrid -{ - var $perpage=80; - - var $itemCount = 0; - - function FDOutGrid() { - } - - function outputStart() { - return "\n".''."\n"; - } - - function outputEntry($query_vars, $appinfo) { - $link=makelink($query_vars, array('fdid'=>$appinfo['id'])); - - $out=''; - - if($this->itemCount%4 == 0 && $this->itemCount > 0) - { - $out.=''."\n"; - } - - $out.='\n"; - - $this->itemCount++; - return $out; - } - - function outputEnd() { - return '
      '; - $out.='

      '; - $out.='

      "; - $out.='

      '; - $out.="
      '."\n"; - } -} - -function permissions_cmp($a, $b) { - global $permissions_data; - - $aProtectionLevel = $permissions_data['permission'][$a]['protectionLevel']; - $bProtectionLevel = $permissions_data['permission'][$b]['protectionLevel']; - - if($aProtectionLevel != $bProtectionLevel) { - if(strlen($aProtectionLevel)==0) return 1; - if(strlen($bProtectionLevel)==0) return -1; - - return strcmp($aProtectionLevel, $bProtectionLevel); - } - - $aGroup = $permissions_data['permission'][$a]['permissionGroup']; - $bGroup = $permissions_data['permission'][$b]['permissionGroup']; - - if($aGroup != $bGroup) { - return strcmp($aGroup, $bGroup); - } - - return strcmp($a, $b); -} - -// Make a link to this page, with the current query vars attached and desired params added/modified -function makelink($query_vars, $params=array()) { - $link=get_permalink(); - - $p = array_merge($query_vars, $params); - - // Page 1 is the default, don't clutter urls with it... - if($p['fdpage'] == 1) - unset($p['fdpage']); - // Likewise for list style... - if($p['fdstyle'] == 'list') - unset($p['fdstyle']); - - $vars=linkify($p); - if(strlen($vars)==0) - return $link; - if(strpos($link,'?')===false) - $link.='?'; - else - $link.='&'; - return $link.$vars; -} - -// Return the key value pairs in http-get-parameter format as a string -function linkify($vars) { - $retvar = ''; - foreach($vars as $k => $v) { - if($k!==null && $v!==null && $v!='') - $retvar .= $k.'='.$v.'&'; - } - return substr($retvar,0,-1); -} - -function widget_fdroidlatest($args) { - extract($args); - echo $before_widget; - echo $before_title . 'Latest Apps' . $after_title; - - $handle = fopen(getenv('DOCUMENT_ROOT').'/repo/latestapps.dat', 'r'); - if ($handle) { - while (($buffer = fgets($handle, 4096)) !== false) { - $app = explode("\t", $buffer); - echo ''; - if(isset($app[2]) && trim($app[2])) { - echo ''; - } - echo $app[1].'
      '; - if(isset($app[3]) && trim($app[3])) { - echo ''.$app[3].''; - } - echo '

      '; - } - fclose($handle); - } - - echo $after_widget; -} - -$wp_fdroid = new FDroid(); - - -?>