diff --git a/.bandit b/.bandit deleted file mode 100644 index dc28620f..00000000 --- a/.bandit +++ /dev/null @@ -1,3 +0,0 @@ -[bandit] -skips: B110,B404,B408,B603,B607,B322 -targets: . diff --git a/.editorconfig b/.editorconfig deleted file mode 100644 index 43412092..00000000 --- a/.editorconfig +++ /dev/null @@ -1,15 +0,0 @@ -root = true - -[*] -charset = utf-8 -end_of_line = lf -insert_final_newline = true -trim_trailing_whitespace = true - -[**.py] -indent_style = space -indent_size = 4 - -[.gitlab-ci.yml] -indent_style = space -indent_size = 2 diff --git a/.gitignore b/.gitignore index ce3a0e9a..461fe410 100644 --- a/.gitignore +++ b/.gitignore @@ -1,17 +1,18 @@ +/config.py +/makebs.config.py *~ *.pyc *.class *.box TAGS .idea -.ropeproject/ # files generated by build -/build/ -/dist/ +build/ +dist/ env/ ENV/ -/fdroidserver.egg-info/ +fdroidserver.egg-info/ pylint.parseable /.testfiles/ README.rst @@ -19,19 +20,17 @@ README.rst # editor tmp files .*.swp -.ropeproject/ # files generated by tests tmp/ /tests/repo/icons* -/tests/repo/status +/tests/repo/latestapps.dat # files used in manual testing -/config.yml +/config.py /tmp/ /logs/ /metadata/ -/makebs.config.py makebuildserver.config.py /tests/.fdroid.keypass.txt /tests/.fdroid.keystorepass.txt @@ -41,39 +40,20 @@ makebuildserver.config.py /tests/OBBMainPatchCurrent.apk /tests/OBBMainTwoVersions.apk /tests/archive/categories.txt -/tests/archive/diff/[1-9]*.json -/tests/archive/entry.jar -/tests/archive/entry.json /tests/archive/icons* +/tests/archive/index.jar +/tests/archive/index_unsigned.jar +/tests/archive/index.xml /tests/archive/index-v1.jar /tests/archive/index-v1.json -/tests/archive/index-v2.json -/tests/archive/index.css -/tests/archive/index.html -/tests/archive/index.jar -/tests/archive/index.png -/tests/archive/index.xml -/tests/archive/index_unsigned.jar -/tests/metadata/org.videolan.vlc/en-US/icon*.png -/tests/repo/diff/[1-9]*.json -/tests/repo/index.css -/tests/repo/index.html /tests/repo/index.jar -/tests/repo/index.png /tests/repo/index_unsigned.jar /tests/repo/index-v1.jar /tests/repo/info.guardianproject.urzip/ /tests/repo/info.guardianproject.checkey/en-US/phoneScreenshots/checkey-phone.png /tests/repo/info.guardianproject.checkey/en-US/phoneScreenshots/checkey.png -/tests/repo/obb.mainpatch.current/en-US/featureGraphic_ffhLaojxbGAfu9ROe1MJgK5ux8d0OVc6b65nmvOBaTk=.png -/tests/repo/obb.mainpatch.current/en-US/icon_WI0pkO3LsklrsTAnRr-OQSxkkoMY41lYe2-fAvXLiLg=.png -/tests/repo/org.videolan.vlc/en-US/icon_yAfSvPRJukZzMMfUzvbYqwaD1XmHXNtiPBtuPVHW-6s=.png /tests/urzip-πÇÇπÇÇ现代汉语通用字-български-عربي1234.apk -/tests/virustotal/ /unsigned/ # generated by gettext locale/*/LC_MESSAGES/fdroidserver.mo - -# sphinx -public/ diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 65510c45..1507e504 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,54 +1,12 @@ ---- -# Use merge request pipelines when a merge request is open for the branch. -# Use branch pipelines when a merge request is not open for the branch. -# https://docs.gitlab.com/ci/yaml/workflow/#switch-between-branch-pipelines-and-merge-request-pipelines -workflow: - rules: - - if: $CI_PIPELINE_SOURCE == 'merge_request_event' - - if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS - when: never - - if: $CI_COMMIT_BRANCH - - -stages: - - lint - - test # default for jobs that do not specify stage: - - deploy - - -variables: - pip: pip3 --timeout 100 --retries 10 - # speed up git checkout phase - GIT_DEPTH: 1 - - -# Run the whole test suite in an environment that is like the -# buildserver guest VM. This installs python3-babel because that is -# only used by the test suite, and not needed in the buildserver. -# -# Some extra packages are required for this test run that are not -# provided by the buildserver since they are not needed there: -# * python3-babel for compiling localization files -# * gnupg-agent for the full signing setup -# * python3-clint for fancy progress bars for users -# * python3-pycountry for linting config/mirrors.yml -buildserver run-tests: - image: registry.gitlab.com/fdroid/fdroidserver:buildserver +test: + image: registry.gitlab.com/fdroid/ci-images-server:latest script: - - apt-get update - - apt-get install gnupg-agent python3-babel python3-biplist python3-clint python3-pycountry - - ./tests/run-tests - # make sure that translations do not cause stacktraces - - cd $CI_PROJECT_DIR/locale - - for locale in *; do - test -d $locale || continue; - for cmd in `sed -n 's/.*("\(.*\)", *_.*/\1/p' $CI_PROJECT_DIR/fdroid`; do - LANGUAGE=$locale $CI_PROJECT_DIR/fdroid $cmd --help > /dev/null; - done - done + - pip3 install -e . + - cd tests + - ./complete-ci-tests -# Test that the parsing of the .yml metadata format didn't change from last +# Test that the parsing of the .txt format didn't change from last # released version. This uses the commit ID of the release tags, # rather than the release tag itself so that contributor forks do not # need to include the tags in them for this test to work. @@ -56,364 +14,145 @@ buildserver run-tests: # The COMMIT_ID should be bumped after each release, so that the list # of sed hacks needed does not continuously grow. metadata_v0: - image: registry.gitlab.com/fdroid/fdroidserver:buildserver + image: registry.gitlab.com/fdroid/ci-images-server:latest variables: - GIT_DEPTH: 1000 - RELEASE_COMMIT_ID: 50aa35772b058e76b950c01e16019c072c191b73 # after switching to `git rev-parse` + RELEASE_COMMIT_ID: 4655e2e24ebd043be6faa4adf552db391caf2be9 # 1.1a~ script: - - git fetch https://gitlab.com/fdroid/fdroidserver.git $RELEASE_COMMIT_ID + - git fetch https://gitlab.com/fdroid/fdroidserver $RELEASE_COMMIT_ID - cd tests - - export GITCOMMIT=$(git rev-parse HEAD) + - export GITCOMMIT=`git describe` - git checkout $RELEASE_COMMIT_ID - cd .. - - git clone --depth 1 https://gitlab.com/fdroid/fdroiddata.git - - rm -f fdroiddata/config.yml # ignore config for this test + - git clone --depth 1 https://gitlab.com/fdroid/fdroiddata - cd fdroiddata - ../tests/dump_internal_metadata_format.py - cd .. - git reset --hard - git checkout $GITCOMMIT - cd fdroiddata + - echo "accepted_formats = ('txt', 'yml')" >> config.py - ../tests/dump_internal_metadata_format.py - sed -i - -e '/ArchivePolicy:/d' - -e '/FlattrID:/d' - -e '/RequiresRoot:/d' + -e '/Liberapay:/d' + -e '/OpenCollective/d' metadata/dump_*/*.yaml - diff -uw metadata/dump_* -.apt-template: &apt-template +debian_buster: + image: debian:buster + script: + - apt-get -qy update + - apt-get -qy dist-upgrade + - apt-get -qy install --no-install-recommends + fdroidserver git gnupg python3-defusedxml python3-setuptools + - echo "deb http://deb.debian.org/debian sid main" >> /etc/apt/sources.list + - apt-get -qy update + - apt-get install -y --no-install-recommends aapt androguard android-platform-tools-base zipalign + - python3 -c 'import fdroidserver' + - python3 -c 'import androguard' + - export ANDROID_HOME=/usr/lib/android-sdk + - export LANG=C.UTF-8 + - cd tests + - ./run-tests + +# test using LTS set up with the PPA, including Recommends +ubuntu_lts: + image: ubuntu:latest + only: + - master@fdroid/fdroidserver + variables: + DEBIAN_FRONTEND: noninteractive + script: + - echo Etc/UTC > /etc/timezone + - apt-get -qy update + - apt-get -qy install gnupg + - while ! apt-key adv --keyserver hkp://pool.sks-keyservers.net --recv-key 9AAC253193B65D4DF1D0A13EEC4632C79C5E0151; do sleep 15; done + - export RELEASE=`sed -n 's,^deb [^ ][^ ]* \([a-z]*\).*,\1,p' /etc/apt/sources.list | head -1` + - echo "deb http://ppa.launchpad.net/fdroid/fdroidserver/ubuntu $RELEASE main" >> /etc/apt/sources.list + - apt-get -qy update + - apt-get -qy dist-upgrade + - apt-get -qy install --install-recommends fdroidserver git python3-defusedxml python3-setuptools + - export ANDROID_HOME=/usr/lib/android-sdk + - export LANG=C.UTF-8 + - cd tests + - ./run-tests + +# test using TrustyLTS with all depends from pypi +ubuntu_trusty_pip: + image: ubuntu:trusty + only: + - master@fdroid/fdroidserver variables: DEBIAN_FRONTEND: noninteractive LANG: C.UTF-8 - before_script: + script: - echo Etc/UTC > /etc/timezone - - echo 'APT::Install-Recommends "0";' - 'APT::Install-Suggests "0";' - 'APT::Get::Assume-Yes "true";' - 'Acquire::Retries "20";' - 'Dpkg::Use-Pty "0";' - 'quiet "1";' - >> /etc/apt/apt.conf.d/99gitlab - # Ubuntu and other distros often lack https:// support - - grep Debian /etc/issue.net - && { find /etc/apt/sources.list* -type f | xargs sed -i s,http:,https:, ; } - # The official Debian docker images ship without ca-certificates, - # TLS certificates cannot be verified until that is installed. The - # following code turns off TLS verification, and enables HTTPS, so - # at least unverified TLS is used for apt-get instead of plain - # HTTP. Once ca-certificates is installed, the CA verification is - # enabled by removing this config. This set up makes the initial - # `apt-get update` and `apt-get install` look the same as verified - # TLS to the network observer and hides the metadata. - - echo 'Acquire::https::Verify-Peer "false";' > /etc/apt/apt.conf.d/99nocacertificates - - apt-get update - - apt-get install ca-certificates - - rm /etc/apt/apt.conf.d/99nocacertificates - - apt-get dist-upgrade + - apt-get -qy update + - apt-get -qy dist-upgrade + - apt-get -qy install git default-jdk python3-pip python3.4-venv + - rm -rf env + - pyvenv-3.4 env + - . env/bin/activate + - echo sed -i "s/'requests.*',$/'requests',/" setup.py + - pip3 install --upgrade babel pip setuptools + - pip3 install -e . + - ./setup.py compile_catalog + - ./tests/run-tests -# For jobs that only need to run when there are changes to Python files. -.python-rules-changes: &python-rules-changes - rules: - - changes: - - .gitlab-ci.yml - - fdroid - - makebuildserver - - setup.py - - fdroidserver/*.py - - tests/*.py - - -# Since F-Droid uses Debian as its default platform, from production -# servers to CI to contributor machines, it is important to know when -# changes in Debian break our stuff. This tests against the latest -# dependencies as they are included in Debian. -debian_testing: - image: debian:testing - <<: *apt-template - rules: - - if: $CI_COMMIT_BRANCH == "master" && $CI_PROJECT_PATH == "fdroid/fdroidserver" +pip_install: + image: archlinux/base + only: + - master@fdroid/fdroidserver script: - - apt-get install - aapt - androguard - apksigner - dexdump - fdroidserver - git - gnupg - ipfs-cid - python3-biplist - python3-defusedxml - python3-libcloud - python3-pycountry - python3-setuptools - sdkmanager - - python3 -c 'import fdroidserver' - - python3 -c 'import androguard' - - python3 -c 'import sdkmanager' - - cd tests - - ./run-tests - - -# Test using latest LTS set up with the PPA, including Recommends. -ubuntu_lts_ppa: - image: ubuntu:latest - <<: *apt-template - rules: - - if: $CI_COMMIT_BRANCH == "master" && $CI_PROJECT_PATH == "fdroid/fdroidserver" - script: - - export ANDROID_HOME=/usr/lib/android-sdk - - apt-get install gnupg - - while ! apt-key adv --keyserver keyserver.ubuntu.com --recv-key 9AAC253193B65D4DF1D0A13EEC4632C79C5E0151; do sleep 15; done - - export RELEASE=$(sed -n 's,^Suites\x3a \([a-z]*\).*,\1,p' /etc/apt/sources.list.d/*.sources | head -1) - - echo "deb http://ppa.launchpad.net/fdroid/fdroidserver/ubuntu $RELEASE main" >> /etc/apt/sources.list - - apt-get update - - apt-get dist-upgrade - - apt-get install --install-recommends - dexdump - fdroidserver - git - python3-biplist - python3-pycountry - python3-setuptools - sdkmanager - - # Test things work with a default branch other than 'master' - - git config --global init.defaultBranch thisisnotmasterormain - - - cd tests - - ./run-tests - - -# Test to see how rclone works with S3 -test_deploy_to_s3_with_rclone: - image: debian:bookworm-slim - <<: *apt-template - tags: - - saas-linux-small-amd64 # the shared runners are known to support Docker. - services: - - name: docker:dind - command: ["--tls=false"] - variables: - DOCKER_HOST: "tcp://docker:2375" - DOCKER_DRIVER: overlay2 - DOCKER_TLS_CERTDIR: "" - before_script: - # ensure minio is up before executing tests - - apt-get update - - apt-get install -y - androguard - apksigner - curl - docker.io - git - python3-venv - rclone - # This job requires working docker but will silently fail if docker is not available - - docker info - - python3 -m venv --system-site-packages test-venv - - . test-venv/bin/activate - - pip install testcontainers[minio] - - pip install . - script: - - python3 -m unittest -k test_update_remote_storage_with_rclone --verbose - rules: - - changes: - - .gitlab-ci.yml - - fdroidserver/deploy.py - - tests/test_deploy.py - - tests/test_integration.py - - -# Test using Ubuntu/jammy LTS (supported til April, 2027) with depends -# from pypi and sdkmanager. The venv is used to isolate the dist -# tarball generation environment from the clean install environment. -ubuntu_jammy_pip: - image: ubuntu:jammy - <<: *apt-template - script: - - apt-get install git default-jdk-headless python3-pip python3-venv rsync - + - pacman --sync --sysupgrade --refresh --noconfirm grep python-pip python-virtualenv tar # setup venv to act as release build machine - - python3 -m venv sdist-env + - python -m venv sdist-env - . sdist-env/bin/activate - - ./setup.py sdist + - ./setup.py compile_catalog sdist - deactivate - - tar tzf dist/fdroidserver-*.tar.gz - + - tar tzf dist/fdroidserver-*.tar.gz | grep locale/de/LC_MESSAGES/fdroidserver.mo # back to bare machine to act as user's install machine - - export ANDROID_HOME=/opt/android-sdk - - $pip install sdkmanager - - sdkmanager 'build-tools;35.0.0' + - pip install dist/fdroidserver-*.tar.gz + - test -e /usr/share/locale/de/LC_MESSAGES/fdroidserver.mo + - fdroid + - fdroid readmeta + - fdroid update --help - # Install extras_require.optional from setup.py - - $pip install biplist pycountry - - - $pip install dist/fdroidserver-*.tar.gz - - tar xzf dist/fdroidserver-*.tar.gz - - cd fdroidserver-* - - export PATH=$PATH:$ANDROID_HOME/build-tools/35.0.0 - - fdroid=`which fdroid` ./tests/run-tests - - # check localization was properly installed - - LANGUAGE='de' fdroid --help | grep 'Gültige Befehle sind' - - -# Run all the various linters and static analysis tools. -hooks/pre-commit: - stage: lint - image: debian:bookworm-slim +lint_format_safety_bandit_checks: + image: alpine:3.7 variables: LANG: C.UTF-8 script: - - apt-get update - - apt-get -y install --no-install-recommends - bash - ca-certificates - dash - gcc - git - make - pycodestyle - pyflakes3 - python3-dev - python3-git - python3-nose - python3-pip - python3-yaml - - ./hooks/pre-commit - -bandit: - image: debian:bookworm-slim - <<: *python-rules-changes - <<: *apt-template - script: - - apt-get install python3-pip - - $pip install --break-system-packages bandit - - bandit -r -ii --ini .bandit - -pylint: - stage: lint - image: debian:bookworm-slim - <<: *python-rules-changes - <<: *apt-template - script: - - apt-get install pylint python3-pip - - $pip install --break-system-packages pylint-gitlab - - pylint --output-format=colorized,pylint_gitlab.GitlabCodeClimateReporter:pylint-report.json + - apk add --no-cache bash dash ca-certificates python3 + - python3 -m ensurepip + - pip3 install Babel bandit pycodestyle pyflakes 'pylint<2.0' safety + - export EXITVALUE=0 + - ./hooks/pre-commit || export EXITVALUE=1 + - bandit + -ii + -s B110,B310,B322,B404,B408,B410,B603,B607 + -x fdroidserver/dscanner.py,docker/install_agent.py,docker/drozer.py + -r $CI_PROJECT_DIR fdroid + || export EXITVALUE=1 + - safety check --full-report --ignore=38224 || export EXITVALUE=1 + - pylint --rcfile=.pylint-rcfile --output-format=colorized --reports=n fdroid makebuildserver setup.py fdroidserver/*.py tests/*.py - artifacts: - reports: - codequality: pylint-report.json - when: always - - -shellcheck: - stage: lint - image: debian:bookworm-slim - rules: - - changes: - - .gitlab-ci.yml - - hooks/install-hooks.sh - - hooks/pre-commit - - tests/run-tests - <<: *apt-template - script: - - apt-get install shellcheck - # TODO GitLab Code Quality report https://github.com/koalaman/shellcheck/issues/3155 - - shellcheck --exclude SC2046,SC2090 --severity=warning --color - hooks/install-hooks.sh - hooks/pre-commit - tests/run-tests - -# Check all the dependencies in Debian to mirror production. CVEs are -# generally fixed in the latest versions in pip/pypi.org, so it isn't -# so important to scan that kind of install in CI. -# https://docs.safetycli.com/safety-docs/installation/gitlab -safety: - image: debian:bookworm-slim - rules: - - if: $SAFETY_API_KEY - changes: - - .gitlab-ci.yml - - .safety-policy.yml - - pyproject.toml - - setup.py - <<: *apt-template - variables: - LANG: C.UTF-8 - script: - - apt-get install - fdroidserver - python3-biplist - python3-pip - python3-pycountry - - $pip install --break-system-packages . - - - $pip install --break-system-packages safety - - python3 -m safety --key "$SAFETY_API_KEY" --stage cicd scan - - -# TODO tests/*/*/*.yaml are not covered -yamllint: - stage: lint - image: debian:bookworm-slim - rules: - - changes: - - .gitlab-ci.yml - - .safety-policy.yml - - .yamllint - - tests/*.yml - - tests/*/*.yml - - tests/*/*/.*.yml - <<: *apt-template - variables: - LANG: C.UTF-8 - script: - - apt-get install yamllint - - yamllint - .gitlab-ci.yml - .safety-policy.yml - .yamllint - tests/*.yml - tests/*/*.yml - tests/*/*/.*.yml - - -locales: - stage: lint - image: debian:bookworm-slim - variables: - LANG: C.UTF-8 - script: - - apt-get update - - apt-get -y install --no-install-recommends - gettext - make - python3-babel - - export EXITVALUE=0 - - function set_error() { export EXITVALUE=1; printf "\x1b[31mERROR `history|tail -2|head -1|cut -b 6-500`\x1b[0m\n"; } - - make -C locale compile || set_error + tests/*.TestCase + || export EXITVALUE=1 + - apk add --no-cache gettext make + - make -C locale compile || export EXITVALUE=1 - rm -f locale/*/*/*.mo - - pybabel compile --domain=fdroidserver --directory locale 2>&1 | { grep -F "error:" && exit 1; } || true + - pybabel compile --domain=fdroidserver --directory locale 2>&1 | (grep -F "error:" && exit 1) || true - exit $EXITVALUE - -black: - stage: lint - image: debian:bookworm-slim - <<: *apt-template - script: - - apt-get install black - - black --check --diff --color $CI_PROJECT_DIR - fedora_latest: - image: fedora:39 # support ends on 2024-11-12 + image: fedora:latest + only: + - master@fdroid/fdroidserver script: # tricks to hopefully make runs more reliable - echo "timeout=600" >> /etc/dnf/dnf.conf @@ -421,434 +160,38 @@ fedora_latest: - echo "keepcache=True" >> /etc/dnf/dnf.conf - dnf -y update || dnf -y update - - dnf -y install @development-tools - diffutils - findutils + - dnf -y install findutils git gnupg - java-17-openjdk-devel - openssl + java-1.8.0-openjdk-devel python3 python3-babel - python3-matplotlib python3-pip - python3-pycountry rsync + unzip + wget which - - $pip install sdkmanager - - ./setup.py sdist + - ./setup.py compile_catalog sdist - useradd -m -c "test account" --password "fakepassword" testuser - - su testuser --login --command "cd `pwd`; $pip install --user dist/fdroidserver-*.tar.gz" + - su testuser --login --command "cd `pwd`; pip3 install --user dist/fdroidserver-*.tar.gz" - test -e ~testuser/.local/share/locale/de/LC_MESSAGES/fdroidserver.mo - - export BUILD_TOOLS_VERSION=`sed -n "s,^MINIMUM_APKSIGNER_BUILD_TOOLS_VERSION\s*=\s*['\"]\(.*\)[['\"],\1,p" fdroidserver/common.py` + - wget --no-verbose -O tools.zip https://dl.google.com/android/repository/tools_r25.2.4-linux.zip + - unzip -q tools.zip + - rm tools.zip + - export AAPT_VERSION=`sed -n "s,^MINIMUM_AAPT_VERSION\s*=\s*['\"]\(.*\)[['\"],\1,p" fdroidserver/common.py` + - export JAVA_HOME=/etc/alternatives/jre - export ANDROID_HOME=`pwd`/android-sdk + - mkdir $ANDROID_HOME + - mv tools $ANDROID_HOME/ - mkdir -p $ANDROID_HOME/licenses/ - - printf "\n8933bad161af4178b1185d1a37fbf41ea5269c55\nd56f5187479451eabf01fb78af6dfcb131a6481e\n24333f8a63b6825ea9c5514f83c2829b004d1fee" > $ANDROID_HOME/licenses/android-sdk-license + - printf "\n8933bad161af4178b1185d1a37fbf41ea5269c55\nd56f5187479451eabf01fb78af6dfcb131a6481e" > $ANDROID_HOME/licenses/android-sdk-license - printf "\n84831b9409646a918e30573bab4c9c91346d8abd" > $ANDROID_HOME/licenses/android-sdk-preview-license - printf "\n79120722343a6f314e0719f863036c702b0e6b2a\n84831b9409646a918e30573bab4c9c91346d8abd" > $ANDROID_HOME/licenses/android-sdk-preview-license-old - mkdir ~/.android - touch ~/.android/repositories.cfg - - sdkmanager "platform-tools" "build-tools;$BUILD_TOOLS_VERSION" + - echo y | $ANDROID_HOME/tools/bin/sdkmanager "platform-tools" + - echo y | $ANDROID_HOME/tools/bin/sdkmanager "build-tools;$AAPT_VERSION" - chown -R testuser . - cd tests - su testuser --login --command - "cd `pwd`; export CI=$CI ANDROID_HOME=$ANDROID_HOME; fdroid=~testuser/.local/bin/fdroid ./run-tests" - - -macOS: - tags: - - saas-macos-medium-m1 - rules: - - if: $CI_COMMIT_BRANCH == "master" && $CI_PROJECT_PATH == "fdroid/fdroidserver" - script: - - export HOMEBREW_CURL_RETRIES=10 - - brew update > /dev/null - - brew upgrade - - brew install fdroidserver - - # Android SDK and Java JDK - - brew install --cask android-commandlinetools temurin # temurin is a JDK - - # test suite dependencies - - brew install bash coreutils gnu-sed - # TODO port tests/run-tests to POSIX and gsed, it has a couple GNU-isms like du --bytes - - export PATH="$(brew --prefix fdroidserver)/libexec/bin:$(brew --prefix coreutils)/libexec/gnubin:$PATH" - - - brew autoremove - - brew info fdroidserver - - - export BUILD_TOOLS_VERSION=`gsed -n "s,^MINIMUM_APKSIGNER_BUILD_TOOLS_VERSION\s*=\s*['\"]\(.*\)[['\"],\1,p" fdroidserver/common.py` - - export ANDROID_HOME="$(brew --prefix)/share/android-commandlinetools" - - mkdir -p "$ANDROID_HOME/licenses" - - echo -e "\n8933bad161af4178b1185d1a37fbf41ea5269c55" > "$ANDROID_HOME/licenses/android-sdk-license" - - echo -e "\nd56f5187479451eabf01fb78af6dfcb131a6481e" >> "$ANDROID_HOME/licenses/android-sdk-license" - - echo -e "\n24333f8a63b6825ea9c5514f83c2829b004d1fee" >> "$ANDROID_HOME/licenses/android-sdk-license" - - $(brew --prefix)/bin/sdkmanager "build-tools;$BUILD_TOOLS_VERSION" - - - echo "macOS sticks with bash 3.x because of licenses, so avoid new bash syntax" - - /bin/bash --version - - /bin/bash -n tests/run-tests - - # test fdroidserver from git with current package's dependencies - - fdroid="$(brew --prefix fdroidserver)/libexec/bin/python3 $PWD/fdroid" ./tests/run-tests - - -gradle: - image: debian:trixie-slim - <<: *apt-template - rules: - - changes: - - .gitlab-ci.yml - - makebuildserver - script: - - apt-get install - ca-certificates - git - python3-colorama - python3-packaging - python3-requests - - ./tests/gradle-release-checksums.py - - -# Run an actual build in a simple, faked version of the buildserver guest VM. -fdroid build: - image: registry.gitlab.com/fdroid/fdroidserver:buildserver - rules: - - changes: - - .gitlab-ci.yml - - fdroidserver/build.py - - fdroidserver/common.py - - fdroidserver/exception.py - - fdroidserver/metadata.py - - fdroidserver/net.py - - fdroidserver/scanner.py - - fdroidserver/vmtools.py - # for the docker: job which depends on this one - - makebuildserver - - buildserver/* - cache: - key: "$CI_JOB_NAME" - paths: - - .gradle - script: - - apt-get update - - apt-get dist-upgrade - - apt-get clean - - - test -n "$fdroidserver" || source /etc/profile.d/bsenv.sh - - - ln -fsv "$CI_PROJECT_DIR" "$fdroidserver" - - # TODO remove sdkmanager install once it is included in the buildserver image - - apt-get install sdkmanager - - rm -rf "$ANDROID_HOME/tools" # TODO remove once sdkmanager can upgrade installed packages - - sdkmanager "tools" "platform-tools" "build-tools;31.0.0" - - - git ls-remote https://gitlab.com/fdroid/fdroiddata.git master - - git clone --depth 1 https://gitlab.com/fdroid/fdroiddata.git - - cd fdroiddata - - for d in build logs repo tmp unsigned $home_vagrant/.android; do - test -d $d || mkdir $d; - chown -R vagrant $d; - done - - - export GRADLE_USER_HOME=$home_vagrant/.gradle - - export fdroid="sudo --preserve-env --user vagrant - env PATH=$fdroidserver:$PATH - env PYTHONPATH=$fdroidserver:$fdroidserver/examples - env PYTHONUNBUFFERED=true - env TERM=$TERM - env HOME=$home_vagrant - fdroid" - - - git -C $home_vagrant/gradlew-fdroid pull - - - chown -R vagrant $home_vagrant - - chown -R vagrant $fdroidserver/.git - - chown vagrant $fdroidserver/ - - chown -R vagrant .git - - chown vagrant . - - # try user build - - $fdroid build --verbose --latest org.fdroid.fdroid.privileged - - # try on-server build - - $fdroid build --verbose --on-server --no-tarball --latest org.fdroid.fdroid - - # each `fdroid build --on-server` run expects sudo, then uninstalls it - - if dpkg --list sudo; then echo "sudo should not be still there"; exit 1; fi - - 'if [ ! -f repo/status/running.json ]; then echo "ERROR: running.json does not exist!"; exit 1; fi' - - 'if [ ! -f repo/status/build.json ]; then echo "ERROR: build.json does not exist!"; exit 1; fi' - - -# test the plugin API and specifically the fetchsrclibs plugin, which -# is used by the `fdroid build` job. This uses a fixed commit from -# fdroiddata because that one is known to work, and this is a CI job, -# so it should be isolated from the normal churn of fdroiddata. -plugin_fetchsrclibs: - image: debian:bookworm-slim - <<: *apt-template - rules: - - changes: - - .gitlab-ci.yml - - examples/fdroid_fetchsrclibs.py - - fdroidserver/__main__.py - script: - - apt-get install - curl - git - python3-cffi - python3-matplotlib - python3-nacl - python3-paramiko - python3-pil - python3-pip - python3-pycparser - python3-venv - - python3 -m venv --system-site-packages env - - . env/bin/activate - - export PATH="$CI_PROJECT_DIR:$PATH" - - export PYTHONPATH="$CI_PROJECT_DIR/examples" - # workaround https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=1003252 - - export SETUPTOOLS_USE_DISTUTILS=stdlib - - $pip install -e . - - fdroid | grep fetchsrclibs - - - mkdir fdroiddata - - commitid=b9e9a077d720c86ff6fff4dbb341254cc4370b1a - - curl https://gitlab.com/fdroid/fdroiddata/-/archive/${commitid}/fdroiddata-${commitid}.tar.gz - | tar -xz --directory=fdroiddata --strip-components=1 - - cd fdroiddata - - fdroid fetchsrclibs freemap.opentrail:4 --verbose - - test -d build/freemap.opentrail/.git - - test -d build/srclib/andromaps/.git - - test -d build/srclib/freemaplib/.git - - test -d build/srclib/freemaplibProj/.git - - test -d build/srclib/JCoord/.git - - test -d build/srclib/javaproj/.git - - -# test a full update and deploy cycle to gitlab.com -servergitmirrors: - image: debian:bookworm-slim - <<: *apt-template - rules: - - if: $CI_COMMIT_BRANCH == "master" && $CI_PROJECT_PATH == "fdroid/fdroidserver" - script: - - apt-get install - default-jdk-headless - git - openssh-client - openssl - python3-cffi - python3-cryptography - python3-matplotlib - python3-nacl - python3-pil - python3-pip - python3-pycparser - python3-setuptools - python3-venv - rsync - wget - - apt-get install apksigner - - python3 -m venv --system-site-packages env - - . env/bin/activate - - export PYTHONPATH=`pwd` - - export SETUPTOOLS_USE_DISTUTILS=stdlib # https://github.com/pypa/setuptools/issues/2956 - - $pip install -e . - - mkdir /root/.ssh/ - - ./tests/key-tricks.py - - ssh-keyscan gitlab.com >> /root/.ssh/known_hosts - - test -d /tmp/fdroid/repo || mkdir -p /tmp/fdroid/repo - - cp tests/config.yml tests/keystore.jks /tmp/fdroid/ - - cp tests/repo/com.politedroid_6.apk /tmp/fdroid/repo/ - - cd /tmp/fdroid - - touch fdroid-icon.png - - printf "\nservergitmirrors\x3a 'git@gitlab.com:fdroid/ci-test-servergitmirrors-repo.git'\n" >> config.yml - - $PYTHONPATH/fdroid update --verbose --create-metadata - - $PYTHONPATH/fdroid deploy --verbose - - export DLURL=`grep -Eo 'https://gitlab.com/fdroid/ci-test-servergitmirrors-repo[^"]+' repo/index-v1.json` - - echo $DLURL - - wget $DLURL/index-v1.jar - - diff repo/index-v1.jar index-v1.jar - -Build documentation: - image: debian:bookworm-slim - <<: *python-rules-changes - <<: *apt-template - script: - - apt-get install make python3-sphinx python3-numpydoc python3-pydata-sphinx-theme pydocstyle fdroidserver - - apt purge fdroidserver - # ignore vendored files - - pydocstyle --verbose --match='(?!apksigcopier|looseversion|setup|test_).*\.py' fdroidserver - - cd docs - - sphinx-apidoc -o ./source ../fdroidserver -M -e - - PYTHONPATH=.. sphinx-autogen -o generated source/*.rst - - PYTHONPATH=.. make html - artifacts: - paths: - - docs/build/html/ - - -# this job will only run in branches called "windows" until the Windows port is complete -Windows: - tags: - - windows - rules: - - if: $CI_COMMIT_BRANCH == "windows" - script: - - Import-Module "$env:ChocolateyInstall\helpers\chocolateyProfile.psm1" - - choco install --no-progress -y git --force --params "/GitAndUnixToolsOnPath" - - choco install --no-progress -y python3 --version=3.10 - - choco install --no-progress -y jdk8 - - choco install --no-progress -y rsync - - refreshenv - - python -m pip install --upgrade babel pip setuptools - - python -m pip install -e . - - - $files = @(Get-ChildItem tests\test_*.py) - - foreach ($f in $files) { - write-output $f; - python -m unittest $f; - if( $LASTEXITCODE -eq 0 ) { - write-output "SUCCESS $f"; - } else { - write-output "ERROR $f failed"; - } - } - - # these are the tests that must pass - - python -m unittest -k - checkupdates - exception - import_subcommand - test_lint - test_metadata - test_rewritemeta - test_vcs - tests.test_init - tests.test_main - after_script: - - Copy-Item C:\ProgramData\chocolatey\logs\chocolatey.log - artifacts: - when: always - paths: - - "*.log" - allow_failure: - exit_codes: 1 - - -pages: - image: alpine:latest - stage: deploy - script: - - cp docs/build/html public -r # GL Pages needs the files in a directory named "public" - artifacts: - paths: - - public - needs: - - job: "Build documentation" - optional: true - rules: - - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH' # only publish pages on default (master) branch - - -# This job pushes the official CI docker image based on the master -# branch, so in fdroid/fdroidserver, it should only run on the master -# branch. Otherwise, tags or other branches will overwrite the docker -# image which is supposed to be what is in master. -docker: - dependencies: - - fdroid build - rules: - - if: $CI_COMMIT_BRANCH == "master" && $CI_PROJECT_PATH == "fdroid/fdroidserver" - changes: - - .gitlab-ci.yml - - makebuildserver - - buildserver/* - image: docker:dind - services: - - docker:dind - variables: - RELEASE_IMAGE: $CI_REGISTRY_IMAGE:buildserver - script: - # git ref names can contain many chars that are not allowed in docker tags - - export TEST_IMAGE=$CI_REGISTRY_IMAGE:$(printf $CI_COMMIT_REF_NAME | sed 's,[^a-zA-Z0-9_.-],_,g') - - cd buildserver - - docker build -t $TEST_IMAGE --build-arg GIT_REV_PARSE_HEAD=$(git rev-parse HEAD) . - - docker tag $TEST_IMAGE $RELEASE_IMAGE - - docker tag $TEST_IMAGE ${RELEASE_IMAGE}-bookworm - - echo $CI_JOB_TOKEN | docker login -u gitlab-ci-token --password-stdin registry.gitlab.com - # This avoids filling up gitlab.com free tier accounts with unused docker images. - - if test -z "$FDROID_PUSH_DOCKER_IMAGE"; then - echo "Skipping docker push to save quota on your gitlab namespace."; - echo "If you want to enable the push, set FDROID_PUSH_DOCKER_IMAGE in"; - echo "https://gitlab.com/$CI_PROJECT_NAMESPACE/fdroidserver/-/settings/ci_cd#js-cicd-variables-settings"; - exit 0; - fi - - docker push $RELEASE_IMAGE - - docker push $RELEASE_IMAGE-bookworm - - -# PUBLISH is the signing server. It has a very minimal manual setup. -PUBLISH: - image: debian:bookworm-backports - <<: *python-rules-changes - script: - - apt-get update - - apt-get -qy upgrade - - apt-get -qy install --no-install-recommends -t bookworm-backports - androguard - apksigner - curl - default-jdk-headless - git - gpg - gpg-agent - python3-asn1crypto - python3-defusedxml - python3-git - python3-ruamel.yaml - python3-yaml - rsync - - # Run only relevant parts of the test suite, other parts will fail - # because of this minimal base setup. - - python3 -m unittest - tests/test_gpgsign.py - tests/test_metadata.py - tests/test_publish.py - tests/test_signatures.py - tests/test_signindex.py - - - cd tests - - mkdir archive - - mkdir unsigned - - cp urzip-release-unsigned.apk unsigned/info.guardianproject.urzip_100.apk - - grep '^key.*pass' config.yml | sed 's,\x3a ,=,' > $CI_PROJECT_DIR/variables - - sed -Ei 's,^(key.*pass|keystore)\x3a.*,\1\x3a {env\x3a \1},' config.yml - - printf '\ngpghome\x3a {env\x3a gpghome}\n' >> config.yml - - | - tee --append $CI_PROJECT_DIR/variables < FestplattenSchnitzel -Hans-Christoph Steiner diff --git a/.pylint-rcfile b/.pylint-rcfile new file mode 100644 index 00000000..4685d7f4 --- /dev/null +++ b/.pylint-rcfile @@ -0,0 +1,45 @@ +[MASTER] + +# Use multiple processes to speed up Pylint. +jobs=4 + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED +confidence=HIGH,INFERENCE + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once).You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use"--disable=all --enable=classes +# --disable=W" +disable=invalid-name,missing-docstring,no-member + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME,XXX,TODO + + +[BASIC] + +# Good variable names which should always be accepted, separated by a comma +good-names=i,j,k,ex,Run,_,e,f,fp + + +[ELIF] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + diff --git a/.safety-policy.yml b/.safety-policy.yml deleted file mode 100644 index ea44e7e6..00000000 --- a/.safety-policy.yml +++ /dev/null @@ -1,55 +0,0 @@ ---- - -version: '3.0' - -scanning-settings: - max-depth: 6 - exclude: - -report: - dependency-vulnerabilities: - enabled: true - auto-ignore-in-report: - vulnerabilities: - 52495: - reason: setuptools comes from Debian - expires: '2025-01-31' - 60350: - reason: GitPython comes from Debian https://security-tracker.debian.org/tracker/CVE-2023-40267 - expires: '2025-01-31' - 60789: - reason: GitPython comes from Debian https://security-tracker.debian.org/tracker/CVE-2023-40590 - expires: '2025-01-31' - 60841: - reason: GitPython comes from Debian https://security-tracker.debian.org/tracker/CVE-2023-41040 - expires: '2025-01-31' - 62044: - reason: "F-Droid doesn't fetch pip dependencies directly from hg/mercurial repositories: https://data.safetycli.com/v/62044/f17/" - expires: '2025-01-31' - 63687: - reason: Only affects Windows https://security-tracker.debian.org/tracker/CVE-2024-22190 - expires: '2026-01-31' - 67599: - reason: Only affects pip when using --extra-index-url, which is never the case in fdroidserver CI. - expires: '2026-05-31' - 70612: - reason: jinja2 is not used by fdroidserver, nor any dependencies I could find via debtree and pipdeptree. - expires: '2026-05-31' - 72132: - reason: We get these packages from Debian, zipp is not used in production, and its only a DoS. - expires: '2026-08-31' - 72236: - reason: setuptools is not used in production to download or install packages, they come from Debian. - expires: '2026-08-31' - -fail-scan-with-exit-code: - dependency-vulnerabilities: - enabled: true - fail-on-any-of: - cvss-severity: - - critical - - high - - medium - -security-updates: - dependency-vulnerabilities: diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000..97288607 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,109 @@ + +# Use the Android base system since it provides the SDK, etc. +language: java + +matrix: + include: + - os: linux + language: android + - os: osx + osx_image: xcode9.3 + env: ANDROID_SDK_ROOT=/usr/local/share/android-sdk + env: ANDROID_HOME=/usr/local/share/android-sdk + - os: osx + osx_image: xcode9.2 + env: ANDROID_SDK_ROOT=/usr/local/share/android-sdk + env: ANDROID_HOME=/usr/local/share/android-sdk + - os: osx + osx_image: xcode8.3 + env: ANDROID_SDK_ROOT=/usr/local/share/android-sdk + env: ANDROID_HOME=/usr/local/share/android-sdk + +# On Ubuntu/trusty 14.04, the PPA is needed on to provide lots of the +# dependencies, but this then also serves as a test of the PPA, which +# is used on Windows Subsystem for Linux. +addons: + apt: + update: true + sources: + - sourceline: 'ppa:fdroid/fdroidserver' + packages: + - python3-babel + - python3-defusedxml + - python3-setuptools + - fdroidserver + +android: + components: + - android-23 # required for `fdroid build` test + - build-tools-27.0.3 # required for `fdroid build` test + licenses: + - 'android-sdk-preview-.+' + - 'android-sdk-license-.+' + +# * ensure java8 is installed since Android SDK doesn't work with Java9 +# * Java needs to be at least 1.8.0_131 to have MD5 properly disabled +# https://blogs.oracle.com/java-platform-group/oracle-jre-will-no-longer-trust-md5-signed-code-by-default +# https://opsech.io/posts/2017/Jun/09/openjdk-april-2017-security-update-131-8u131-and-md5-signed-jars.html +install: + - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then + echo "Skipping Uyghur locale, this has too old a gettext to support it"; + rm -rf locale/ug; + fi + - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then + set -x; + brew update > /dev/null; + if [ "`sw_vers -productVersion | sed 's,10\.\([0-9]*\).*,\1,'`" -gt 10 ]; then + brew upgrade python; + else + brew install python3; + fi; + brew install dash bash gnu-sed gradle jenv; + export PATH="/usr/local/opt/gnu-sed/libexec/gnubin:$PATH"; + if ! ruby -e 'v = `javac -version 2>&1`.split()[1].gsub("_", "."); exit Gem::Dependency.new("", "~> 1.8.0.131").match?("", v)'; then + brew cask uninstall java --force; + brew cask install caskroom/versions/java8; + fi; + brew cask install android-sdk; + + export AAPT_VERSION=`sed -n "s,^MINIMUM_AAPT_VERSION\s*=\s*['\"]\(.*\)[['\"],\1,p" fdroidserver/common.py`; + mkdir -p "$ANDROID_HOME/licenses"; + echo -e "\n8933bad161af4178b1185d1a37fbf41ea5269c55" > "$ANDROID_HOME/licenses/android-sdk-license"; + echo -e "\nd56f5187479451eabf01fb78af6dfcb131a6481e" >> "$ANDROID_HOME/licenses/android-sdk-license"; + echo -e "\n84831b9409646a918e30573bab4c9c91346d8abd" > "$ANDROID_HOME/licenses/android-sdk-preview-license"; + echo y | $ANDROID_HOME/tools/bin/sdkmanager "platform-tools"; + echo y | $ANDROID_HOME/tools/bin/sdkmanager "build-tools;$AAPT_VERSION"; + echo y | $ANDROID_HOME/tools/bin/sdkmanager "platforms;android-23"; + + sudo pip3 install babel; + sudo pip3 install --quiet --editable . ; + sudo rm -rf fdroidserver.egg-info; + + ls -l /System/Library/Java/JavaVirtualMachines || true; + ls -l /Library/Java/JavaVirtualMachines || true; + echo $PATH; + echo $JAVA_HOME; + jenv versions; + /usr/libexec/java_home; + java -version; + which java; + javac -version; + which javac; + jarsigner -help; + which jarsigner; + keytool -help; + which keytool; + set +x; + fi + +# The OSX tests seem to run slower, they often timeout. So only run +# the test suite with the installed version of fdroid. +# +# Supporting pip on Ubuntu/trusty was too painful here, since it seems +# that pip installs conflict with the Ubuntu packages. +script: + - ./tests/run-tests + +after_failure: + - cd $TRAVIS_BUILD_DIR + - ls -lR | curl -F 'clbin=<-' https://clbin.com diff --git a/.vscode/extensions.json b/.vscode/extensions.json deleted file mode 100644 index f0fec078..00000000 --- a/.vscode/extensions.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "recommendations": [ - "ms-python.python", - ] -} diff --git a/.vscode/settings.json b/.vscode/settings.json deleted file mode 100644 index da31cd7f..00000000 --- a/.vscode/settings.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "python.formatting.blackArgs": [ - "--config=pyproject.toml" - ], - "python.formatting.provider": "black", - "python.linting.banditEnabled": true, - "python.linting.banditArgs": [ - "-ii", - "--ini=.bandit", - ], - "python.linting.enabled": true, - "python.linting.mypyArgs": [ - "--config-file=mypy.ini" - ], - "python.linting.mypyEnabled": true, - "python.linting.flake8Enabled": true, - "python.linting.pylintArgs": [ - "--rcfile=.pylint-rcfile" - ], - "python.linting.pylintEnabled": true, -} diff --git a/.weblate b/.weblate deleted file mode 100644 index cf2e653f..00000000 --- a/.weblate +++ /dev/null @@ -1,3 +0,0 @@ -[weblate] -url = https://hosted.weblate.org/api/ -translation = f-droid/fdroidserver diff --git a/.well-known/funding-manifest-urls b/.well-known/funding-manifest-urls deleted file mode 100644 index 9935b4d4..00000000 --- a/.well-known/funding-manifest-urls +++ /dev/null @@ -1 +0,0 @@ -https://f-droid.org/funding.json diff --git a/.yamllint b/.yamllint deleted file mode 100644 index 067a389e..00000000 --- a/.yamllint +++ /dev/null @@ -1,7 +0,0 @@ ---- - -extends: default -rules: - document-start: disable - line-length: disable - truthy: disable diff --git a/CHANGELOG.md b/CHANGELOG.md index 6b61a8f2..27fec605 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,458 +1,49 @@ -# Changelog -All notable changes to this project will be documented in this file. +### 1.1.4 (2019-08-15) -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - -## [2.5.0] - NEXT - -### Removed - -* deploy: `awsaccesskeyid:` and `awssecretkey:` config items removed, use the - standard env vars: `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`. - -## [2.4.2] - 2025-06-24 - -### Fixed - -* nightly: fix bug that clones nightly repo to wrong location - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1672 -* Sync translations for all supported languages: es pl ru - -## [2.4.1] - 2025-06-23 - -### Added - -* build: Clearer error messages when working with Git. -* verify: generate .json files that list all reports - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1632 - -### Fixed - -* deploy: use master branch when working complete git-mirror repo - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1666 -* update: use ctime/mtime to control _strip_and_copy_image runs - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1665 -* update: If categories.yml only has icon:, then add name: - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1659 -* update: fix handling of Triple-T 1.0.0 graphics - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1652 -* update: never execute any VCS e.g. git - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1630 -* config: lazyload environment variables in config.yml - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1645 -* config: make localized name/description/icon optional - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1649 -* lint: add repo_key_sha256 to list of valid config keys - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1643 -* build: calculate all combinations of gradle flavors - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1638 -* build: set SOURCE_DATE_EPOCH from app's git otherwise fdroiddata metadata file - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1653 -* Sync translations for all supported languages: ca cs de fr ga ja pl pt pt_BR - pt_PT ru sq tr uk zh_Hans - -### Removed - -## [2.4.0] - 2025-03-25 - -### Added - -* lint: support the base _config.yml_. - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1606 - -### Fixed - -* Expand {env: foo} config syntax to be allowed any place a string is. - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1610 -* Only show "unsafe permissions on config.yml" when secrets are present. -* Standardized config files on ruamel.yaml with a YAML 1.2 data format. - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1611 -* Brought back error when a package has multiple package types (e.g. xapk and - apk). https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1602 -* Reworked test suite to be entirely based on Python unittest (thanks @mindston). - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1587 -* publish/signindex/gpgsign no longer load the _qrcode_ and _requests_ modules, - and can operate without them installed. -* scanner: add bun.lock as lock file of package.json - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1615 -* index: fail if user sets mirrors:isPrimary wrong - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1617 - https://gitlab.com/fdroid/fdroidserver/-/issues/1125 -* Sync translations for all supported languages: bo ca cs de es fr ga hu it ja - ko nb_NO pl pt pt_BR pt_PT ro ru sq sr sw tr uk zh_Hans zh_Hant - -### Removed - -* checkupdates: remove auto_author: config, it is no longer used. -* Purge support for the long-deprecated _config.py_ config file. - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1607 - - -## [2.3.5] - 2025-01-20 - -### Fixed - -* Fix issue where APKs with v1-only signatures and targetSdkVersion < 30 could - be maliciously crafted to bypass AllowedAPKSigningKeys - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1588 -* Ignore apksigner v33.x, it has bugs verifying APKs with v3/v3.1 sigs. - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1593 -* Sync translations for: ca cs de es fr ga ja pt_BR pt_PT ru sq sr uk zh_Hans - -## [2.3.4] - 2024-12-12 - -### Fixed - -* Fix localhost network tests on systems with IPv6. -* lint: only error out on missing extlib on versions not archived. - -## [2.3.3] - 2024-12-11 - -### Added - -* verify: `--clean-up-verified` to delete files used when verifying an APK if - the verification was successful. - -### Fixed - -* Support Python 3.13 in the full test suite. -* Sync translations for: ca de fr ja pl ro ru sr ta -* update: only generate _index.png_ when making _index.html_, allowing the repo - operator to set a different repo icon, e.g. not the QR Code. - -## [2.3.2] - 2024-11-26 - -### Fixed - -* install: fix downloading from GitHub Releases and Maven Central. -* Sync translations for: ca fa fr pt ru sr ta zh_Hant - -## [2.3.1] - 2024-11-25 - -### Fixed - -* Sync all translations for: cs de es fr ga pt_BR ru sq zh_Hans. -* Drop use of deprecated imghdr library to support Python 3.13. -* Install biplist and pycountry by default on macOS. -* Fixed running test suite out of dist tarball. - -## [2.3.0] - 2024-11-21 - -### Added - -* YAML 1.2 as native format for all _.yml_ files, including metadata and config. -* install: will now fetch _F-Droid.apk_ and install it via `adb`. - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1546 -* scanner: scan APK Signing Block for known block types like Google Play - Signature aka "Frosting". - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1555 -* Support Rclone for deploying to many different cloud services. -* deploy: support deploying to GitHub Releases. - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1471 -* scanner: support libs.versions.toml - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1526 -* Consider subdir for triple-t metadata discovery in Flutter apps. - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1541 -* deploy: added `index_only:` mode for mirroring the index to small hosting - locations. https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1420 -* Support publishing repos in AltStore format. - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1465 -* Support indexing iOS IPA app files. - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1413 -* deploy: _config/mirrors.yml_ file with support for adding per-mirror metadata, - like `countryCode:`. -* Repo's categories are now set in the config files. -* lint: check syntax of config files. -* publish: `--error-on-failed` to exit when signing/verifying fails. -* scanner: `--refresh` and `refresh_config:` to control triggering a refresh of - the rule sets. -* Terminal output colorization and `--color` argument to control it. -* New languages: Catalan (ca), Irish (ga), Japanese (ja), Serbian (sr), and - Swahili (sw). -* Support donation links from `community_bridge`, `buy_me_a_coffee`. - -### Fixed - -* Use last modified time and file size for caching data about scanned APKs - instead of SHA-256 checksum. - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1542 -* `repo_web_base_url:` config for generating per-app URLs for viewing in - browsers. https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1178 -* `fdroid scanner` flags WebAssembly binary _.wasm_ files. - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1562 -* Test suite as standard Python `unittest` setup (thanks @ghost.adh). -* scanner: error on dependency files without lock file. - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1504 -* nightly: finding APKs in the wrong directory. (thanks @WrenIX) - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1512 -* `AllowedAPKSigningKeys` works with all single-signer APK signatures. - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1466 -* Sync all translations for: cs de it ko pl pt pt_BR pt_PT ro ru sq tr uk - zh_Hans zh_Hant. -* Support Androguard 4.x. -* Support Python 3.12. - -### Removed - -* Drop all uses of _stats/known_apks.txt_ and the `update_stats:` config key. - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1547 -* The `maven:` field is now always a string, with `yes` as a legacy special - value. It is no longer treated like a boolean in any case. -* scanner: jcenter is no longer an allowed Maven repo. -* build: `--reset-server` removed (thanks @gotmi1k). - -## [2.2.2] - 2024-04-24 - -### Added - -* Include sdkmanager as dep in setup.py for Homebrew package. - https://github.com/Homebrew/homebrew-core/pull/164510 - -## [2.2.1] - 2023-03-09 - -### Added - -* `download_repo_index_v2()` and `download_repo_index_v2()` API functions - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1323 - -### Fixed - -* Fix OpenJDK detection on different CPU architectures - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1315 - -### Removed - -* Purge all references to `zipalign`, that is delegated to other things - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1316 -* Remove obsolete, unused `buildozer` build type - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1322 - -## [2.2.0] - 2023-02-20 - -### Added -* Support index-v2 format, localizable Anti-Features, Categories -* New entry point for repos, entry.jar, signed with modern algorithms -* New config/ subdirectory for localizable configuration -* Script entries in metadata files (init, prebuild, build, etc) now handled as - lists so they now support using && or ; in the script, and behave like - .gitlab-ci.yml and other CI YAML. -* GPG signatures for index-v1.json and index-v2.json -* Use default.txt as fallback changelog when inserting fastlane metadata -* scanner: F-Droid signatures now maintained in fdroid/suss -* scanner: maintain signature sources in config.yml, including Exodus Privacy -* scanner: use dexdump for class names -* scanner: directly scan APK files when given a path -* scanner: recursively scan APKs for DEX and ZIP using file magic -* signindex: validate index files before signing -* update: set ArchivePolicy based on VercodeOperation/signature -* Include IPFS CIDv1 in index-v2.json for hosting repos on IPFS -* Per-repo beta channel configuration -* Add Czech translation - -### Fixed - -* apksigner v30 or higher now required for verifying and signing APKs -* 3.9 as minimum supported Python version -* Lots of translation updates -* Better pip packaging -* nightly: big overhaul for reliable operation on all Debian/Ubuntu versions -* Improved logging, fewer confusing verbose messages -* scanner: fix detection of binary files without extension -* import: more reliable operation, including Flutter apps -* Support Java 20 and up - -### Removed -* Remove obsolete `fdroid stats` command - -## [2.1.1] - 2022-09-06 - -* gradlew-fdroid: Include latest versions and checksums -* nightly: update Raw URLs to fix breakage and avoid redirects -* signindex: gpg-sign index-v1.json and deploy it -* update: fix --use-date-from-apk when used with files (#1012) - -## [2.1] - 2022-02-22 - -For a more complete overview, see the [2.1 -milestone](https://gitlab.com/fdroid/fdroidserver/-/milestones/11) - -## [2.0.5] - 2022-09-06 - -### Fixed - -* gradlew-fdroid: Include latest versions and checksums -* nightly: add support for GitHub Actions -* nightly: update Raw URLs to fix breakage and avoid redirects -* update: fix --use-date-from-apk when used with files (#1012) -* Fix GitLab CI - -## [2.0.4] - 2022-06-29 - -### Fixed - -* deploy: ensure progress is instantiated before trying to use it -* signindex: gpg-sign index-v1.json and deploy it - [1080](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1080) - [1124](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1124) - -## [2.0.3] - 2021-07-01 - -### Fixed - -* Support AutoUpdateMode: Version without pattern - [931](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/931) - -## [2.0.2] - 2021-06-01 - -### Fixed - -* fix "ruamel round_trip_dump will be removed" - [932](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/932) - -## [2.0.1] - 2021-03-09 - -### Fixed - -* metadata: stop setting up source repo when running lint/rewritemeta -* scanner: show error if scan_binary fails to run apkanalyzer -* common: properly parse version from NDK's source.properties -* update: stop extracting and storing XML icons, they're useless -* index: raise error rather than crash on bad repo file -* update: handle large, corrupt, or inaccessible fastlane/triple-t files -* Update SPDX License List -* checkupdates: set User-Agent to make gitlab.com happy -* Run push_binary_transparency only once - -## [2.0] - 2021-01-31 - -For a more complete overview, see the [2.0 -milestone](https://gitlab.com/fdroid/fdroidserver/-/milestones/10) - -### Added -* `fdroid update` inserts donation links based on upstream's _FUNDING.yml_ - ([!754](https://gitlab.com/fdroid/fdroidserver/merge_requests/754)) -* Stable, public API for most useful functions - ([!798](https://gitlab.com/fdroid/fdroidserver/merge_requests/798)) -* Load with any YAML lib and use with the API, no more custom parser needed - ([!826](https://gitlab.com/fdroid/fdroidserver/merge_requests/826)) - ([!838](https://gitlab.com/fdroid/fdroidserver/merge_requests/838)) -* _config.yml_ for a safe, easy, standard configuration format - ([!663](https://gitlab.com/fdroid/fdroidserver/merge_requests/663)) -* Config options can be set from environment variables using this syntax: - `keystorepass: {env: keystorepass}` - ([!669](https://gitlab.com/fdroid/fdroidserver/merge_requests/669)) -* Add SHA256 to filename of repo graphics - ([!669](https://gitlab.com/fdroid/fdroidserver/merge_requests/669)) -* Support for srclibs metadata in YAML format - ([!700](https://gitlab.com/fdroid/fdroidserver/merge_requests/700)) -* Check srclibs and app-metadata files with yamllint - ([!721](https://gitlab.com/fdroid/fdroidserver/merge_requests/721)) -* Added plugin system for adding subcommands to `fdroid` - ([!709](https://gitlab.com/fdroid/fdroidserver/merge_requests/709)) -* `fdroid update`, `fdroid publish`, and `fdroid signindex` now work - with SmartCard HSMs, specifically the NitroKey HSM - ([!779](https://gitlab.com/fdroid/fdroidserver/merge_requests/779)) - ([!782](https://gitlab.com/fdroid/fdroidserver/merge_requests/782)) -* `fdroid update` support for Triple-T Gradle Play Publisher v2.x - ([!683](https://gitlab.com/fdroid/fdroidserver/merge_requests/683)) -* Translated into: bo de es fr hu it ko nb_NO pl pt pt_BR pt_PT ru sq tr uk - zh_Hans zh_Hant - -### Fixed -* Smoother process for signing APKs with `apksigner` - ([!736](https://gitlab.com/fdroid/fdroidserver/merge_requests/736)) - ([!821](https://gitlab.com/fdroid/fdroidserver/merge_requests/821)) -* `apksigner` is used by default on new repos -* All parts except _build_ and _publish_ work without the Android SDK - ([!821](https://gitlab.com/fdroid/fdroidserver/merge_requests/821)) -* Description: is now passed to clients unchanged, no HTML conversion - ([!828](https://gitlab.com/fdroid/fdroidserver/merge_requests/828)) -* Lots of improvements for scanning for proprietary code and trackers - ([!748](https://gitlab.com/fdroid/fdroidserver/merge_requests/748)) - ([!REPLACE](https://gitlab.com/fdroid/fdroidserver/merge_requests/REPLACE)) - ([!844](https://gitlab.com/fdroid/fdroidserver/merge_requests/844)) -* `fdroid mirror` now generates complete, working local mirror repos -* fix build-logs dissapearing when deploying - ([!685](https://gitlab.com/fdroid/fdroidserver/merge_requests/685)) -* do not crash when system encoding can not be retrieved - ([!671](https://gitlab.com/fdroid/fdroidserver/merge_requests/671)) -* checkupdates: UpdateCheckIngore gets properly observed now - ([!659](https://gitlab.com/fdroid/fdroidserver/merge_requests/659), - [!660](https://gitlab.com/fdroid/fdroidserver/merge_requests/660)) -* keep yaml metadata when rewrite failed - ([!658](https://gitlab.com/fdroid/fdroidserver/merge_requests/658)) -* import: `template.yml` now supports omitting values - ([!657](https://gitlab.com/fdroid/fdroidserver/merge_requests/657)) -* build: deploying buildlogs with rsync - ([!651](https://gitlab.com/fdroid/fdroidserver/merge_requests/651)) -* `fdroid init` generates PKCS12 keystores, drop Java < 8 support - ([!801](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/801)) -* Parse Version Codes specified in hex - ([!692](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/692)) -* Major refactoring on core parts of code to be more Pythonic - ([!756](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/756)) -* `fdroid init` now works when installed with pip - -### Removed -* Removed all support for _.txt_ and _.json_ metadata - ([!772](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/772)) -* dropped support for Debian 8 _jessie_ and 9 _stretch_ -* dropped support for Ubuntu releases older than bionic 18.04 -* dropped `fdroid server update` and `fdroid server init`, - use `fdroid deploy` -* `fdroid dscanner` was removed. - ([!711](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/711)) -* `make_current_version_link` is now off by default -* Dropped `force_build_tools` config option - ([!797](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/797)) -* Dropped `accepted_formats` config option, there is only _.yml_ now - ([!818](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/818)) -* `Provides:` was removed as a metadata field - ([!654](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/654)) -* Remove unused `latestapps.dat` - ([!794](https://gitlab.com/fdroid/fdroidserver/-/merge_requests/794)) - - -## [1.1.4] - 2019-08-15 -### Fixed * include bitcoin validation regex required by fdroiddata + * merged Debian patches to fix test suite there -## [1.1.3] - 2019-07-03 -### Fixed +### 1.1.3 (2019-07-03) + * fixed test suite when run from source tarball + * fixed test runs in Debian -## [1.1.2] - 2019-03-29 -### Fixed -* fix bug while downloading repo index - ([!636](https://gitlab.com/fdroid/fdroidserver/merge_requests/636)) +### 1.1.2 (2019-03-29) + +* fix bug while downloading repo index ([!636](https://gitlab.com/fdroid/fdroidserver/merge_requests/636)) + +### 1.1.1 (2019-02-03) -## [1.1.1] - 2019-02-03 -### Fixed * support APK Signature v2 and v3 + * all SDK Version values are output as integers in the index JSON + * take graphics from Fastlane dirs using any valid RFC5646 locale + * print warning if not running in UTF-8 encoding + * fdroid build: hide --on-server cli flag -## [1.1] - 2019-01-28 -### Fixed +### 1.1 (2019-01-28) + * a huge update with many fixes and new features: https://gitlab.com/fdroid/fdroidserver/milestones/7 -* can run without and Android SDK installed -* much more reliable operation with large binary APK collections -* sync all translations, including newly added languages: hu it ko pl pt_PT ru -* many security fixes, based on the security audit -* NoSourceSince automatically adds SourceGone Anti-Feature -* aapt scraping works with all known aapt versions -* smoother mirror setups -* much faster `fdroid update` when using androguard -[Unreleased]: https://gitlab.com/fdroid/fdroidserver/compare/1.1.4...master -[1.1.4]: https://gitlab.com/fdroid/fdroidserver/compare/1.1.3...1.1.4 -[1.1.3]: https://gitlab.com/fdroid/fdroidserver/compare/1.1.2...1.1.3 -[1.1.2]: https://gitlab.com/fdroid/fdroidserver/compare/1.1.1...1.1.2 -[1.1.1]: https://gitlab.com/fdroid/fdroidserver/compare/1.1...1.1.1 -[1.1]: https://gitlab.com/fdroid/fdroidserver/tags/1.1 +* can run without and Android SDK installed + +* much more reliable operation with large binary APK collections + +* sync all translations, including newly added languages: hu it ko pl pt_PT ru + +* many security fixes, based on the security audit + +* NoSourceSince automatically adds SourceGone Anti-Feature + +* aapt scraping works with all known aapt versions + +* smoother mirror setups + +* much faster `fdroid update` when using androguard diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index 226c0854..00000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,66 +0,0 @@ -There are many ways to contribute, you can find out all the ways on our -[Contribute](https://f-droid.org/contribute/) page. Find out how to get -involved, including as a translator, data analyst, tester, helping others, and -much more! - -## Contributing Code - -We want more contributors and want different points of view represented. Some -parts of the code make contributing quick and easy. Other parts make it -difficult and slow, so we ask that contributors have patience. - -To submit a patch, please open a merge request on GitLab. If you are thinking of -making a large contribution, open an issue or merge request before starting -work, to get comments from the community. Someone may be already working on the -same thing, or there may be reasons why that feature isn't implemented. Once -there is agreement, then the work might need to proceed asynchronously with the -core team towards the solution. - -To make it easier to review and accept your merge request, please follow these -guidelines: - -* When at all possible, include tests. These can either be added to an existing - test, or completely new. Practicing test-driven development will make it - easiest to get merged. That usually means starting your work by writing tests. - -* See [help-wanted](https://gitlab.com/fdroid/fdroidserver/-/issues/?sort=updated_desc&state=opened&label_name%5B%5D=help-wanted) - tags for things that maintainers have marked as things they want to see - merged. - -* The amount of technical debt varies widely in this code base. There are some - parts where the code is nicely isolated with good test coverage. There are - other parts that are tangled and complicated, full of technical debt, and - difficult to test. - -* The general approach is to treat the tangled and complicated parts as an - external API (albeit a bad one). That means it needs to stay unchanged as much - as possible. Changes to those parts of the code will trigger a migration, - which can require a lot of time and coordination. When there is time for large - development efforts, we refactor the code to get rid of those areas of - technical debt. - -* We use [_black_](https://black.readthedocs.io/) code format, run `black .` to - format the code. Whenever editing code in any file, the new code should be - formatted as _black_. Some files are not yet fully in _black_ format (see - _pyproject.toml_), our goal is to opportunistically convert the code whenever - possible. As of the time of this writing, forcing the code format on all files - would be too disruptive. The officially supported _black_ version is the one - in Debian/stable. - -* Many of the tests run very fast and can be run interactively in isolation. - Some of the essential test cases run slowly because they do things like - signing files and generating signing keys. - -* Some parts of the code are difficult to test, and currently require a - relatively complete production setup in order to effectively test them. That - is mostly the code around building packages, managing the disposable VM, and - scheduling build jobs to run. - -* For user visible changes (API changes, behaviour changes, etc.), consider - adding a note in _CHANGELOG.md_. This could be a summarizing description of - the change, and could explain the grander details. Have a look through - existing entries for inspiration. Please note that this is NOT simply a copy - of git-log one-liners. Also note that security fixes get an entry in - _CHANGELOG.md_. This file helps users get more in-depth information of what - comes with a specific release without having to sift through the higher noise - ratio in git-log. diff --git a/MANIFEST.in b/MANIFEST.in index 93307ace..8be4dabf 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,4 @@ -include buildserver/config.buildserver.yml +include buildserver/config.buildserver.py include buildserver/provision-android-ndk include buildserver/provision-android-sdk include buildserver/provision-apt-get-install @@ -8,49 +8,43 @@ include buildserver/setup-env-vars include buildserver/Vagrantfile include CHANGELOG.md include completion/bash-completion -include examples/config.yml -include examples/fdroid_exportkeystore.py -include examples/fdroid_export_keystore_to_nitrokey.py -include examples/fdroid_extract_repo_pubkey.py -include examples/fdroid_fetchsrclibs.py -include examples/fdroid_nitrokeyimport.py +include docker/Dockerfile +include docker/drozer.py +include docker/enable_service.py +include docker/entrypoint.sh +include docker/install_agent.py +include docker/Makefile +include docker/README.md +include examples/config.py +include examples/fdroid-icon.png +include examples/makebuildserver.config.py include examples/opensc-fdroid.cfg include examples/public-read-only-s3-bucket-policy.json include examples/template.yml -include examples/Vagrantfile.yaml +include fdroid include gradlew-fdroid include LICENSE -include locale/ba/LC_MESSAGES/fdroidserver.po -include locale/bo/LC_MESSAGES/fdroidserver.po -include locale/ca/LC_MESSAGES/fdroidserver.po -include locale/cs/LC_MESSAGES/fdroidserver.po -include locale/de/LC_MESSAGES/fdroidserver.po -include locale/es/LC_MESSAGES/fdroidserver.po -include locale/fr/LC_MESSAGES/fdroidserver.po -include locale/ga/LC_MESSAGES/fdroidserver.po -include locale/hu/LC_MESSAGES/fdroidserver.po -include locale/it/LC_MESSAGES/fdroidserver.po -include locale/ja/LC_MESSAGES/fdroidserver.po -include locale/ko/LC_MESSAGES/fdroidserver.po -include locale/nb_NO/LC_MESSAGES/fdroidserver.po -include locale/pl/LC_MESSAGES/fdroidserver.po -include locale/pt/LC_MESSAGES/fdroidserver.po -include locale/pt_BR/LC_MESSAGES/fdroidserver.po -include locale/pt_PT/LC_MESSAGES/fdroidserver.po -include locale/ro/LC_MESSAGES/fdroidserver.po -include locale/ru/LC_MESSAGES/fdroidserver.po -include locale/sq/LC_MESSAGES/fdroidserver.po -include locale/sr/LC_MESSAGES/fdroidserver.po -include locale/sw/LC_MESSAGES/fdroidserver.po -include locale/tr/LC_MESSAGES/fdroidserver.po -include locale/uk/LC_MESSAGES/fdroidserver.po -include locale/zh_Hans/LC_MESSAGES/fdroidserver.po -include locale/zh_Hant/LC_MESSAGES/fdroidserver.po +include locale/bo/LC_MESSAGES/fdroidserver.mo +include locale/de/LC_MESSAGES/fdroidserver.mo +include locale/es/LC_MESSAGES/fdroidserver.mo +include locale/fr/LC_MESSAGES/fdroidserver.mo +include locale/hu/LC_MESSAGES/fdroidserver.mo +include locale/it/LC_MESSAGES/fdroidserver.mo +include locale/ko/LC_MESSAGES/fdroidserver.mo +include locale/nb_NO/LC_MESSAGES/fdroidserver.mo +include locale/pl/LC_MESSAGES/fdroidserver.mo +include locale/pt_BR/LC_MESSAGES/fdroidserver.mo +include locale/pt_PT/LC_MESSAGES/fdroidserver.mo +include locale/ru/LC_MESSAGES/fdroidserver.mo +include locale/tr/LC_MESSAGES/fdroidserver.mo +include locale/uk/LC_MESSAGES/fdroidserver.mo +include locale/zh_Hans/LC_MESSAGES/fdroidserver.mo +include locale/zh_Hant/LC_MESSAGES/fdroidserver.mo include makebuildserver include README.md -include tests/aosp_testkey_debug.keystore -include tests/apk.embedded_1.apk +include tests/androguard_test.py include tests/bad-unicode-*.apk +include tests/build.TestCase include tests/build-tools/17.0.0/aapt-output-com.moez.QKSMS_182.txt include tests/build-tools/17.0.0/aapt-output-com.politedroid_3.txt include tests/build-tools/17.0.0/aapt-output-com.politedroid_4.txt @@ -60,10 +54,10 @@ include tests/build-tools/17.0.0/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/17.0.0/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/17.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/17.0.0/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/17.0.0/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/17.0.0/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/17.0.0/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/17.0.0/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/17.0.0/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/17.0.0/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/17.0.0/aapt-output-souch.smsbypass_9.txt include tests/build-tools/18.1.1/aapt-output-com.moez.QKSMS_182.txt @@ -75,10 +69,10 @@ include tests/build-tools/18.1.1/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/18.1.1/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/18.1.1/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/18.1.1/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/18.1.1/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/18.1.1/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/18.1.1/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/18.1.1/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/18.1.1/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/18.1.1/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/18.1.1/aapt-output-souch.smsbypass_9.txt include tests/build-tools/19.0.0/aapt-output-com.moez.QKSMS_182.txt @@ -90,10 +84,10 @@ include tests/build-tools/19.0.0/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/19.0.0/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/19.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/19.0.0/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/19.0.0/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/19.0.0/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/19.0.0/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/19.0.0/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/19.0.0/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/19.0.0/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/19.0.0/aapt-output-souch.smsbypass_9.txt include tests/build-tools/19.1.0/aapt-output-com.moez.QKSMS_182.txt @@ -105,10 +99,10 @@ include tests/build-tools/19.1.0/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/19.1.0/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/19.1.0/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/19.1.0/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/19.1.0/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/19.1.0/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/19.1.0/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/19.1.0/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/19.1.0/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/19.1.0/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/19.1.0/aapt-output-souch.smsbypass_9.txt include tests/build-tools/20.0.0/aapt-output-com.moez.QKSMS_182.txt @@ -120,10 +114,10 @@ include tests/build-tools/20.0.0/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/20.0.0/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/20.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/20.0.0/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/20.0.0/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/20.0.0/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/20.0.0/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/20.0.0/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/20.0.0/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/20.0.0/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/20.0.0/aapt-output-souch.smsbypass_9.txt include tests/build-tools/21.1.1/aapt-output-com.moez.QKSMS_182.txt @@ -135,10 +129,10 @@ include tests/build-tools/21.1.1/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/21.1.1/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/21.1.1/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/21.1.1/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/21.1.1/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/21.1.1/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/21.1.1/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/21.1.1/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/21.1.1/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/21.1.1/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/21.1.1/aapt-output-souch.smsbypass_9.txt include tests/build-tools/21.1.2/aapt-output-com.moez.QKSMS_182.txt @@ -150,10 +144,10 @@ include tests/build-tools/21.1.2/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/21.1.2/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/21.1.2/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/21.1.2/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/21.1.2/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/21.1.2/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/21.1.2/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/21.1.2/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/21.1.2/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/21.1.2/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/21.1.2/aapt-output-souch.smsbypass_9.txt include tests/build-tools/22.0.0/aapt-output-com.moez.QKSMS_182.txt @@ -165,10 +159,10 @@ include tests/build-tools/22.0.0/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/22.0.0/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/22.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/22.0.0/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/22.0.0/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/22.0.0/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/22.0.0/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/22.0.0/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/22.0.0/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/22.0.0/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/22.0.0/aapt-output-souch.smsbypass_9.txt include tests/build-tools/22.0.1/aapt-output-com.moez.QKSMS_182.txt @@ -180,10 +174,10 @@ include tests/build-tools/22.0.1/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/22.0.1/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/22.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/22.0.1/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/22.0.1/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/22.0.1/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/22.0.1/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/22.0.1/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/22.0.1/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/22.0.1/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/22.0.1/aapt-output-souch.smsbypass_9.txt include tests/build-tools/23.0.0/aapt-output-com.moez.QKSMS_182.txt @@ -195,10 +189,10 @@ include tests/build-tools/23.0.0/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/23.0.0/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/23.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/23.0.0/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/23.0.0/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/23.0.0/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/23.0.0/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/23.0.0/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/23.0.0/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/23.0.0/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/23.0.0/aapt-output-souch.smsbypass_9.txt include tests/build-tools/23.0.1/aapt-output-com.moez.QKSMS_182.txt @@ -210,10 +204,10 @@ include tests/build-tools/23.0.1/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/23.0.1/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/23.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/23.0.1/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/23.0.1/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/23.0.1/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/23.0.1/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/23.0.1/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/23.0.1/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/23.0.1/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/23.0.1/aapt-output-souch.smsbypass_9.txt include tests/build-tools/23.0.2/aapt-output-com.moez.QKSMS_182.txt @@ -225,10 +219,10 @@ include tests/build-tools/23.0.2/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/23.0.2/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/23.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/23.0.2/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/23.0.2/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/23.0.2/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/23.0.2/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/23.0.2/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/23.0.2/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/23.0.2/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/23.0.2/aapt-output-souch.smsbypass_9.txt include tests/build-tools/23.0.3/aapt-output-com.moez.QKSMS_182.txt @@ -240,10 +234,10 @@ include tests/build-tools/23.0.3/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/23.0.3/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/23.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/23.0.3/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/23.0.3/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/23.0.3/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/23.0.3/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/23.0.3/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/23.0.3/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/23.0.3/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/23.0.3/aapt-output-souch.smsbypass_9.txt include tests/build-tools/24.0.0/aapt-output-com.moez.QKSMS_182.txt @@ -255,10 +249,10 @@ include tests/build-tools/24.0.0/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/24.0.0/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/24.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/24.0.0/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/24.0.0/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/24.0.0/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/24.0.0/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/24.0.0/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/24.0.0/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/24.0.0/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/24.0.0/aapt-output-souch.smsbypass_9.txt include tests/build-tools/24.0.1/aapt-output-com.moez.QKSMS_182.txt @@ -270,10 +264,10 @@ include tests/build-tools/24.0.1/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/24.0.1/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/24.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/24.0.1/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/24.0.1/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/24.0.1/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/24.0.1/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/24.0.1/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/24.0.1/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/24.0.1/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/24.0.1/aapt-output-souch.smsbypass_9.txt include tests/build-tools/24.0.2/aapt-output-com.moez.QKSMS_182.txt @@ -285,10 +279,10 @@ include tests/build-tools/24.0.2/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/24.0.2/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/24.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/24.0.2/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/24.0.2/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/24.0.2/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/24.0.2/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/24.0.2/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/24.0.2/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/24.0.2/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/24.0.2/aapt-output-souch.smsbypass_9.txt include tests/build-tools/24.0.3/aapt-output-com.moez.QKSMS_182.txt @@ -300,10 +294,10 @@ include tests/build-tools/24.0.3/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/24.0.3/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/24.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/24.0.3/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/24.0.3/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/24.0.3/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/24.0.3/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/24.0.3/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/24.0.3/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/24.0.3/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/24.0.3/aapt-output-souch.smsbypass_9.txt include tests/build-tools/25.0.0/aapt-output-com.moez.QKSMS_182.txt @@ -315,10 +309,10 @@ include tests/build-tools/25.0.0/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/25.0.0/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/25.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/25.0.0/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/25.0.0/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/25.0.0/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/25.0.0/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/25.0.0/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/25.0.0/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/25.0.0/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/25.0.0/aapt-output-souch.smsbypass_9.txt include tests/build-tools/25.0.1/aapt-output-com.moez.QKSMS_182.txt @@ -330,10 +324,10 @@ include tests/build-tools/25.0.1/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/25.0.1/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/25.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/25.0.1/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/25.0.1/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/25.0.1/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/25.0.1/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/25.0.1/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/25.0.1/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/25.0.1/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/25.0.1/aapt-output-souch.smsbypass_9.txt include tests/build-tools/25.0.2/aapt-output-com.moez.QKSMS_182.txt @@ -345,10 +339,10 @@ include tests/build-tools/25.0.2/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/25.0.2/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/25.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/25.0.2/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/25.0.2/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/25.0.2/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/25.0.2/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/25.0.2/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/25.0.2/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/25.0.2/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/25.0.2/aapt-output-souch.smsbypass_9.txt include tests/build-tools/25.0.3/aapt-output-com.moez.QKSMS_182.txt @@ -360,10 +354,10 @@ include tests/build-tools/25.0.3/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/25.0.3/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/25.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/25.0.3/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/25.0.3/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/25.0.3/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/25.0.3/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/25.0.3/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/25.0.3/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/25.0.3/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/25.0.3/aapt-output-souch.smsbypass_9.txt include tests/build-tools/26.0.0/aapt-output-com.moez.QKSMS_182.txt @@ -375,10 +369,10 @@ include tests/build-tools/26.0.0/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/26.0.0/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/26.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/26.0.0/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/26.0.0/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/26.0.0/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/26.0.0/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/26.0.0/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/26.0.0/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/26.0.0/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/26.0.0/aapt-output-souch.smsbypass_9.txt include tests/build-tools/26.0.1/aapt-output-com.moez.QKSMS_182.txt @@ -390,10 +384,10 @@ include tests/build-tools/26.0.1/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/26.0.1/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/26.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/26.0.1/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/26.0.1/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/26.0.1/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/26.0.1/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/26.0.1/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/26.0.1/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/26.0.1/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/26.0.1/aapt-output-souch.smsbypass_9.txt include tests/build-tools/26.0.2/aapt-output-com.moez.QKSMS_182.txt @@ -405,10 +399,10 @@ include tests/build-tools/26.0.2/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/26.0.2/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/26.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/26.0.2/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/26.0.2/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/26.0.2/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/26.0.2/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/26.0.2/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/26.0.2/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/26.0.2/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/26.0.2/aapt-output-souch.smsbypass_9.txt include tests/build-tools/26.0.3/aapt-output-com.moez.QKSMS_182.txt @@ -420,10 +414,10 @@ include tests/build-tools/26.0.3/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/26.0.3/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/26.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/26.0.3/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/26.0.3/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/26.0.3/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/26.0.3/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/26.0.3/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/26.0.3/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/26.0.3/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/26.0.3/aapt-output-souch.smsbypass_9.txt include tests/build-tools/27.0.0/aapt-output-com.moez.QKSMS_182.txt @@ -435,10 +429,10 @@ include tests/build-tools/27.0.0/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/27.0.0/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/27.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/27.0.0/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/27.0.0/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/27.0.0/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/27.0.0/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/27.0.0/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/27.0.0/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/27.0.0/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/27.0.0/aapt-output-souch.smsbypass_9.txt include tests/build-tools/27.0.1/aapt-output-com.moez.QKSMS_182.txt @@ -450,10 +444,10 @@ include tests/build-tools/27.0.1/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/27.0.1/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/27.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/27.0.1/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/27.0.1/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/27.0.1/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/27.0.1/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/27.0.1/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/27.0.1/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/27.0.1/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/27.0.1/aapt-output-souch.smsbypass_9.txt include tests/build-tools/27.0.2/aapt-output-com.moez.QKSMS_182.txt @@ -465,10 +459,10 @@ include tests/build-tools/27.0.2/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/27.0.2/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/27.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/27.0.2/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/27.0.2/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/27.0.2/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/27.0.2/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/27.0.2/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/27.0.2/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/27.0.2/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/27.0.2/aapt-output-souch.smsbypass_9.txt include tests/build-tools/27.0.3/aapt-output-com.moez.QKSMS_182.txt @@ -480,10 +474,10 @@ include tests/build-tools/27.0.3/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/27.0.3/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/27.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/27.0.3/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/27.0.3/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/27.0.3/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/27.0.3/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/27.0.3/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/27.0.3/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/27.0.3/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/27.0.3/aapt-output-souch.smsbypass_9.txt include tests/build-tools/28.0.0/aapt-output-com.moez.QKSMS_182.txt @@ -495,10 +489,10 @@ include tests/build-tools/28.0.0/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/28.0.0/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/28.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/28.0.0/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/28.0.0/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/28.0.0/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/28.0.0/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/28.0.0/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/28.0.0/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/28.0.0/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/28.0.0/aapt-output-souch.smsbypass_9.txt include tests/build-tools/28.0.1/aapt-output-com.moez.QKSMS_182.txt @@ -510,10 +504,10 @@ include tests/build-tools/28.0.1/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/28.0.1/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/28.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/28.0.1/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/28.0.1/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/28.0.1/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/28.0.1/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/28.0.1/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/28.0.1/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/28.0.1/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/28.0.1/aapt-output-souch.smsbypass_9.txt include tests/build-tools/28.0.2/aapt-output-com.politedroid_3.txt @@ -524,10 +518,10 @@ include tests/build-tools/28.0.2/aapt-output-duplicate.permisssions_9999999.txt include tests/build-tools/28.0.2/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/28.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/28.0.2/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/28.0.2/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/28.0.2/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/28.0.2/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/28.0.2/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/28.0.2/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/28.0.2/aapt-output-org.droidtr.keyboard_34.txt include tests/build-tools/28.0.2/aapt-output-souch.smsbypass_9.txt include tests/build-tools/28.0.3/aapt-output-com.example.test.helloworld_1.txt @@ -540,138 +534,98 @@ include tests/build-tools/28.0.3/aapt-output-info.guardianproject.urzip_100.txt include tests/build-tools/28.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt include tests/build-tools/28.0.3/aapt-output-no.min.target.sdk_987.txt include tests/build-tools/28.0.3/aapt-output-obb.main.oldversion_1444412523.txt +include tests/build-tools/28.0.3/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/28.0.3/aapt-output-obb.main.twoversions_1101613.txt include tests/build-tools/28.0.3/aapt-output-obb.main.twoversions_1101615.txt include tests/build-tools/28.0.3/aapt-output-obb.main.twoversions_1101617.txt -include tests/build-tools/28.0.3/aapt-output-obb.mainpatch.current_1619.txt include tests/build-tools/28.0.3/aapt-output-souch.smsbypass_9.txt include tests/build-tools/generate.sh include tests/check-fdroid-apk -include tests/com.fake.IpaApp_1000000000001.ipa -include tests/config.yml -include tests/config/antiFeatures.yml -include tests/config/categories.yml -include tests/config/de/antiFeatures.yml -include tests/config/fa/antiFeatures.yml -include tests/config/ic_antifeature_ads.xml -include tests/config/ic_antifeature_disabledalgorithm.xml -include tests/config/ic_antifeature_knownvuln.xml -include tests/config/ic_antifeature_nonfreeadd.xml -include tests/config/ic_antifeature_nonfreeassets.xml -include tests/config/ic_antifeature_nonfreedep.xml -include tests/config/ic_antifeature_nonfreenet.xml -include tests/config/ic_antifeature_nosourcesince.xml -include tests/config/ic_antifeature_nsfw.xml -include tests/config/ic_antifeature_tracking.xml -include tests/config/ic_antifeature_upstreamnonfree.xml -include tests/config/ro/antiFeatures.yml -include tests/config/zh-rCN/antiFeatures.yml -include tests/corrupt-featureGraphic.png +include tests/common.TestCase +include tests/complete-ci-tests +include tests/config.py +include tests/description-parsing.py include tests/dummy-keystore.jks include tests/dump_internal_metadata_format.py +include tests/exception.TestCase +include tests/extra/convert_metadata_to_yaml_then_txt.sh include tests/extra/manual-vmtools-test.py -include tests/funding-usernames.yaml -include tests/get_android_tools_versions/android-ndk-r10e/RELEASE.TXT -include tests/get_android_tools_versions/android-sdk/ndk-bundle/package.xml -include tests/get_android_tools_versions/android-sdk/ndk-bundle/source.properties -include tests/get_android_tools_versions/android-sdk/ndk/11.2.2725575/source.properties -include tests/get_android_tools_versions/android-sdk/ndk/17.2.4988734/source.properties -include tests/get_android_tools_versions/android-sdk/ndk/21.3.6528147/source.properties -include tests/get_android_tools_versions/android-sdk/patcher/v4/source.properties -include tests/get_android_tools_versions/android-sdk/platforms/android-30/source.properties -include tests/get_android_tools_versions/android-sdk/skiaparser/1/source.properties -include tests/get_android_tools_versions/android-sdk/tools/source.properties +include tests/getsig/getsig.java +include tests/getsig/make.sh +include tests/getsig/run.sh include tests/gnupghome/pubring.gpg include tests/gnupghome/random_seed include tests/gnupghome/secring.gpg include tests/gnupghome/trustdb.gpg -include tests/gradle-maven-blocks.yaml -include tests/gradle-release-checksums.py +include tests/import_proxy.py +include tests/import.TestCase +include tests/index.TestCase +include tests/install.TestCase include tests/IsMD5Disabled.java -include tests/issue-1128-min-sdk-30-poc.apk -include tests/issue-1128-poc1.apk -include tests/issue-1128-poc2.apk -include tests/issue-1128-poc3a.apk -include tests/issue-1128-poc3b.apk include tests/janus.apk -include tests/key-tricks.py include tests/keystore.jks -include tests/metadata-rewrite-yml/app.with.special.build.params.yml -include tests/metadata-rewrite-yml/fake.ota.update.yml -include tests/metadata-rewrite-yml/org.fdroid.fdroid.yml +include tests/lint.TestCase include tests/metadata/apk/info.guardianproject.urzip.yaml include tests/metadata/apk/org.dyndns.fules.ck.yaml -include tests/metadata/app.with.special.build.params.yml -include tests/metadata/app.with.special.build.params/en-US/antifeatures/50_Ads.txt -include tests/metadata/app.with.special.build.params/en-US/antifeatures/50_Tracking.txt -include tests/metadata/app.with.special.build.params/en-US/antifeatures/Ads.txt -include tests/metadata/app.with.special.build.params/en-US/antifeatures/NoSourceSince.txt -include tests/metadata/app.with.special.build.params/zh-CN/antifeatures/49_Tracking.txt -include tests/metadata/app.with.special.build.params/zh-CN/antifeatures/50_Ads.txt -include tests/metadata/com.politedroid.yml -include tests/metadata/dump/app.with.special.build.params.yaml +include tests/metadata/app.with.special.build.params.txt +include tests/metadata/com.politedroid.txt include tests/metadata/dump/com.politedroid.yaml include tests/metadata/dump/org.adaway.yaml include tests/metadata/dump/org.smssecure.smssecure.yaml include tests/metadata/dump/org.videolan.vlc.yaml include tests/metadata/duplicate.permisssions.yml -include tests/metadata/fake.ota.update.yml -include tests/metadata/info.guardianproject.checkey.yml +include tests/metadata/fake.ota.update.txt include tests/metadata/info.guardianproject.checkey/en-US/description.txt -include tests/metadata/info.guardianproject.checkey/en-US/name.txt include tests/metadata/info.guardianproject.checkey/en-US/phoneScreenshots/checkey-phone.png include tests/metadata/info.guardianproject.checkey/en-US/phoneScreenshots/checkey.png include tests/metadata/info.guardianproject.checkey/en-US/summary.txt -include tests/metadata/info.guardianproject.checkey/ja-JP/name.txt -include tests/metadata/info.guardianproject.urzip.yml +include tests/metadata/info.guardianproject.checkey.txt include tests/metadata/info.guardianproject.urzip/en-US/changelogs/100.txt -include tests/metadata/info.guardianproject.urzip/en-US/changelogs/default.txt include tests/metadata/info.guardianproject.urzip/en-US/full_description.txt include tests/metadata/info.guardianproject.urzip/en-US/images/featureGraphic.png include tests/metadata/info.guardianproject.urzip/en-US/images/icon.png include tests/metadata/info.guardianproject.urzip/en-US/short_description.txt include tests/metadata/info.guardianproject.urzip/en-US/title.txt include tests/metadata/info.guardianproject.urzip/en-US/video.txt +include tests/metadata/info.guardianproject.urzip.yml include tests/metadata/info.zwanenburg.caffeinetile.yml include tests/metadata/no.min.target.sdk.yml -include tests/metadata/obb.main.oldversion.yml -include tests/metadata/obb.main.twoversions.yml -include tests/metadata/obb.mainpatch.current.yml -include tests/metadata/org.adaway.yml -include tests/metadata/org.fdroid.ci.test.app.yml -include tests/metadata/org.fdroid.fdroid.yml -include tests/metadata/org.maxsdkversion.yml -include tests/metadata/org.smssecure.smssecure.yml +include tests/metadata/obb.main.oldversion.txt +include tests/metadata/obb.mainpatch.current.txt +include tests/metadata/obb.main.twoversions.txt +include tests/metadata/org.adaway.json +include tests/metadata/org.fdroid.ci.test.app.txt +include tests/metadata/org.fdroid.fdroid.txt include tests/metadata/org.smssecure.smssecure/signatures/134/28969C09.RSA include tests/metadata/org.smssecure.smssecure/signatures/134/28969C09.SF include tests/metadata/org.smssecure.smssecure/signatures/134/MANIFEST.MF include tests/metadata/org.smssecure.smssecure/signatures/135/28969C09.RSA include tests/metadata/org.smssecure.smssecure/signatures/135/28969C09.SF include tests/metadata/org.smssecure.smssecure/signatures/135/MANIFEST.MF +include tests/metadata/org.smssecure.smssecure.txt include tests/metadata/org.videolan.vlc.yml -include tests/metadata/raw.template.yml -include tests/metadata/souch.smsbypass.yml -include tests/minimal_targetsdk_30_unsigned.apk -include tests/Norway_bouvet_europe_2.obf.zip -include tests/no_targetsdk_minsdk1_unsigned.apk -include tests/no_targetsdk_minsdk30_unsigned.apk +include tests/metadata/raw.template.txt +include tests/metadata-rewrite-yml/app.with.special.build.params.yml +include tests/metadata-rewrite-yml/fake.ota.update.yml +include tests/metadata-rewrite-yml/org.fdroid.fdroid.yml +include tests/metadata/souch.smsbypass.txt +include tests/metadata.TestCase include tests/openssl-version-check-test.py include tests/org.bitbucket.tickytacky.mirrormirror_1.apk include tests/org.bitbucket.tickytacky.mirrormirror_2.apk include tests/org.bitbucket.tickytacky.mirrormirror_3.apk include tests/org.bitbucket.tickytacky.mirrormirror_4.apk include tests/org.dyndns.fules.ck_20.apk -include tests/org.sajeg.fallingblocks_3.apk +include tests/publish.TestCase +include tests/repo/categories.txt include tests/repo/com.example.test.helloworld_1.apk include tests/repo/com.politedroid_3.apk include tests/repo/com.politedroid_4.apk include tests/repo/com.politedroid_5.apk include tests/repo/com.politedroid_6.apk include tests/repo/duplicate.permisssions_9999999.apk -include tests/repo/entry.json include tests/repo/fake.ota.update_1234.zip include tests/repo/index-v1.json -include tests/repo/index-v2.json include tests/repo/index.xml include tests/repo/info.zwanenburg.caffeinetile_4.apk include tests/repo/main.1101613.obb.main.twoversions.obb @@ -680,17 +634,16 @@ include tests/repo/main.1434483388.obb.main.oldversion.obb include tests/repo/main.1619.obb.mainpatch.current.obb include tests/repo/no.min.target.sdk_987.apk include tests/repo/obb.main.oldversion_1444412523.apk -include tests/repo/obb.main.twoversions_1101613.apk -include tests/repo/obb.main.twoversions_1101615.apk -include tests/repo/obb.main.twoversions_1101617.apk -include tests/repo/obb.main.twoversions_1101617_src.tar.gz +include tests/repo/obb.mainpatch.current_1619_another-release-key.apk +include tests/repo/obb.mainpatch.current_1619.apk include tests/repo/obb.mainpatch.current/en-US/featureGraphic.png include tests/repo/obb.mainpatch.current/en-US/icon.png include tests/repo/obb.mainpatch.current/en-US/phoneScreenshots/screenshot-main.png include tests/repo/obb.mainpatch.current/en-US/sevenInchScreenshots/screenshot-tablet-main.png -include tests/repo/obb.mainpatch.current_1619.apk -include tests/repo/obb.mainpatch.current_1619_another-release-key.apk -include tests/repo/org.maxsdkversion_4.apk +include tests/repo/obb.main.twoversions_1101613.apk +include tests/repo/obb.main.twoversions_1101615.apk +include tests/repo/obb.main.twoversions_1101617.apk +include tests/repo/obb.main.twoversions_1101617_src.tar.gz include tests/repo/org.videolan.vlc/en-US/icon.png include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot10.png include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot12.png @@ -702,16 +655,16 @@ include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot4.png include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot7.png include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot9.png include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot0.png -include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot1.png include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot11.png include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot13.png include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot14.png include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot16.png include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot17.png include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot19.png -include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot2.png +include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot1.png include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot21.png include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot23.png +include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot2.png include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot3.png include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot5.png include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot6.png @@ -721,225 +674,50 @@ include tests/repo/souch.smsbypass_9.apk include tests/repo/urzip-*.apk include tests/repo/v1.v2.sig_1020.apk include tests/run-tests -include tests/SANAPPSI.RSA -include tests/SANAPPSI.SF -include tests/shared_test_code.py -include tests/signindex/guardianproject-v1.jar +include tests/scanner.TestCase +include tests/server.TestCase +include tests/signatures.TestCase include tests/signindex/guardianproject.jar +include tests/signindex/guardianproject-v1.jar include tests/signindex/testy.jar include tests/signindex/unsigned.jar include tests/source-files/at.bitfire.davdroid/build.gradle -include tests/source-files/catalog.test/app/build.gradle -include tests/source-files/catalog.test/build.gradle.kts -include tests/source-files/catalog.test/buildSrc/build.gradle.kts -include tests/source-files/catalog.test/buildSrc/settings.gradle.kts -include tests/source-files/catalog.test/buildSrc2/build.gradle.kts -include tests/source-files/catalog.test/buildSrc2/settings.gradle.kts -include tests/source-files/catalog.test/core/build.gradle -include tests/source-files/catalog.test/gradle/libs.versions.toml -include tests/source-files/catalog.test/libs.versions.toml -include tests/source-files/catalog.test/settings.gradle.kts -include tests/source-files/cn.wildfirechat.chat/avenginekit/build.gradle -include tests/source-files/cn.wildfirechat.chat/build.gradle -include tests/source-files/cn.wildfirechat.chat/chat/build.gradle -include tests/source-files/cn.wildfirechat.chat/client/build.gradle -include tests/source-files/cn.wildfirechat.chat/client/src/main/AndroidManifest.xml -include tests/source-files/cn.wildfirechat.chat/emojilibrary/build.gradle -include tests/source-files/cn.wildfirechat.chat/gradle/build_libraries.gradle -include tests/source-files/cn.wildfirechat.chat/imagepicker/build.gradle -include tests/source-files/cn.wildfirechat.chat/mars-core-release/build.gradle -include tests/source-files/cn.wildfirechat.chat/push/build.gradle -include tests/source-files/cn.wildfirechat.chat/settings.gradle -include tests/source-files/com.anpmech.launcher/app/build.gradle -include tests/source-files/com.anpmech.launcher/app/src/main/AndroidManifest.xml -include tests/source-files/com.anpmech.launcher/build.gradle -include tests/source-files/com.anpmech.launcher/settings.gradle -include tests/source-files/com.github.jameshnsears.quoteunquote/build.gradle -include tests/source-files/com.github.shadowsocks/core/build.gradle.kts -include tests/source-files/com.github.shadowsocks/mobile/build.gradle.kts -include tests/source-files/com.infomaniak.mail/Core/gradle/core.versions.toml -include tests/source-files/com.infomaniak.mail/gradle/libs.versions.toml -include tests/source-files/com.infomaniak.mail/settings.gradle -include tests/source-files/com.integreight.onesheeld/build.gradle -include tests/source-files/com.integreight.onesheeld/gradle/wrapper/gradle-wrapper.properties -include tests/source-files/com.integreight.onesheeld/localeapi/build.gradle -include tests/source-files/com.integreight.onesheeld/localeapi/src/main/AndroidManifest.xml -include tests/source-files/com.integreight.onesheeld/oneSheeld/build.gradle -include tests/source-files/com.integreight.onesheeld/oneSheeld/src/main/AndroidManifest.xml -include tests/source-files/com.integreight.onesheeld/pagerIndicator/build.gradle -include tests/source-files/com.integreight.onesheeld/pagerIndicator/src/main/AndroidManifest.xml -include tests/source-files/com.integreight.onesheeld/pullToRefreshlibrary/build.gradle -include tests/source-files/com.integreight.onesheeld/pullToRefreshlibrary/src/main/AndroidManifest.xml -include tests/source-files/com.integreight.onesheeld/quickReturnHeader/build.gradle -include tests/source-files/com.integreight.onesheeld/quickReturnHeader/src/main/AndroidManifest.xml -include tests/source-files/com.integreight.onesheeld/settings.gradle -include tests/source-files/com.jens.automation2/app/build.gradle -include tests/source-files/com.jens.automation2/build.gradle include tests/source-files/com.kunzisoft.testcase/build.gradle -include tests/source-files/com.lolo.io.onelist/app/build.gradle.kts -include tests/source-files/com.lolo.io.onelist/build.gradle.kts -include tests/source-files/com.lolo.io.onelist/gradle/libs.versions.toml -include tests/source-files/com.lolo.io.onelist/gradle/wrapper/gradle-wrapper.properties -include tests/source-files/com.lolo.io.onelist/settings.gradle +include tests/source-files/com.nextcloud.client/build.gradle include tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/full_description.txt include tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/short_description.txt include tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/title.txt include tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/full_description.txt include tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/short_description.txt include tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/title.txt -include tests/source-files/com.nextcloud.client/build.gradle include tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/full_description.txt include tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/short_description.txt include tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/title.txt include tests/source-files/com.nextcloud.client/src/versionDev/fastlane/metadata/android/en-US/full_description.txt include tests/source-files/com.nextcloud.client/src/versionDev/fastlane/metadata/android/en-US/short_description.txt include tests/source-files/com.nextcloud.client/src/versionDev/fastlane/metadata/android/en-US/title.txt -include tests/source-files/com.seafile.seadroid2/app/build.gradle -include tests/source-files/com.ubergeek42.WeechatAndroid/app/build.gradle.kts -include tests/source-files/com.ubergeek42.WeechatAndroid/app/src/main/res/values/strings.xml -include tests/source-files/de.varengold.activeTAN/build.gradle -include tests/source-files/dev.patrickgold.florisboard/app/build.gradle.kts include tests/source-files/eu.siacs.conversations/build.gradle include tests/source-files/eu.siacs.conversations/metadata/en-US/name.txt include tests/source-files/fdroid/fdroidclient/AndroidManifest.xml include tests/source-files/fdroid/fdroidclient/build.gradle -include tests/source-files/firebase-allowlisted/app/build.gradle -include tests/source-files/firebase-allowlisted/build.gradle include tests/source-files/firebase-suspect/app/build.gradle include tests/source-files/firebase-suspect/build.gradle -include tests/source-files/flavor.test/build.gradle -include tests/source-files/info.guardianproject.ripple/build.gradle -include tests/source-files/lockfile.test/flutter/.dart_tool/flutter_gen/pubspec.yaml -include tests/source-files/lockfile.test/flutter/pubspec.lock -include tests/source-files/lockfile.test/flutter/pubspec.yaml -include tests/source-files/lockfile.test/javascript/package.json -include tests/source-files/lockfile.test/javascript/yarn.lock -include tests/source-files/lockfile.test/rust/subdir/Cargo.lock -include tests/source-files/lockfile.test/rust/subdir/Cargo.toml -include tests/source-files/lockfile.test/rust/subdir/subdir/subdir/Cargo.toml -include tests/source-files/lockfile.test/rust/subdir2/Cargo.toml +include tests/source-files/firebase-whitelisted/app/build.gradle +include tests/source-files/firebase-whitelisted/build.gradle include tests/source-files/open-keychain/open-keychain/build.gradle include tests/source-files/open-keychain/open-keychain/OpenKeychain/build.gradle -include tests/source-files/org.mozilla.rocket/app/build.gradle -include tests/source-files/org.noise_planet.noisecapture/app/build.gradle -include tests/source-files/org.noise_planet.noisecapture/settings.gradle -include tests/source-files/org.noise_planet.noisecapture/sosfilter/build.gradle -include tests/source-files/org.piepmeyer.gauguin/build.gradle.kts -include tests/source-files/org.piepmeyer.gauguin/libs.versions.toml -include tests/source-files/org.piepmeyer.gauguin/settings.gradle.kts -include tests/source-files/org.tasks/app/build.gradle.kts -include tests/source-files/org.tasks/build.gradle -include tests/source-files/org.tasks/build.gradle.kts -include tests/source-files/org.tasks/buildSrc/build.gradle.kts -include tests/source-files/org.tasks/settings.gradle.kts include tests/source-files/osmandapp/osmand/build.gradle -include tests/source-files/osmandapp/osmand/gradle/wrapper/gradle-wrapper.properties -include tests/source-files/OtakuWorld/build.gradle -include tests/source-files/realm/react-native/android/build.gradle -include tests/source-files/se.manyver/android/app/build.gradle -include tests/source-files/se.manyver/android/build.gradle -include tests/source-files/se.manyver/android/gradle.properties -include tests/source-files/se.manyver/android/gradle/wrapper/gradle-wrapper.properties -include tests/source-files/se.manyver/android/settings.gradle -include tests/source-files/se.manyver/app.json -include tests/source-files/se.manyver/index.android.js -include tests/source-files/se.manyver/package.json -include tests/source-files/se.manyver/react-native.config.js -include tests/source-files/ut.ewh.audiometrytest/app/build.gradle -include tests/source-files/ut.ewh.audiometrytest/app/src/main/AndroidManifest.xml -include tests/source-files/ut.ewh.audiometrytest/build.gradle -include tests/source-files/ut.ewh.audiometrytest/settings.gradle -include tests/source-files/yuriykulikov/AlarmClock/gradle/wrapper/gradle-wrapper.properties include tests/source-files/Zillode/syncthing-silk/build.gradle include tests/SpeedoMeterApp.main_1.apk -include tests/test_build.py -include tests/test_checkupdates.py -include tests/test_common.py -include tests/test_deploy.py -include tests/test_exception.py -include tests/test_gradlew-fdroid -include tests/test_import_subcommand.py -include tests/test_index.py -include tests/test_init.py -include tests/test_install.py -include tests/test_lint.py -include tests/test_main.py -include tests/test_metadata.py -include tests/test_nightly.py -include tests/test_publish.py -include tests/test_rewritemeta.py -include tests/test_scanner.py -include tests/test_signatures.py -include tests/test_signindex.py -include tests/test_update.py -include tests/test_vcs.py -include tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/featureGraphic/play_store_feature_graphic.png -include tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/icon/icon.png -include tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/phoneScreenshots/1.png -include tests/triple-t-1-graphics/metadata/de.wivewa.dialer.yml -include tests/triple-t-2/build/org.piwigo.android/app/.gitignore -include tests/triple-t-2/build/org.piwigo.android/app/build.gradle -include tests/triple-t-2/build/org.piwigo.android/app/src/debug/res/values/constants.xml -include tests/triple-t-2/build/org.piwigo.android/app/src/debug/res/values/strings.xml -include tests/triple-t-2/build/org.piwigo.android/app/src/main/java/org/piwigo/PiwigoApplication.java -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/contact-email.txt -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/contact-website.txt -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/default-language.txt -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/full-description.txt -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/short-description.txt -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/title.txt -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/full-description.txt -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/feature-graphic/piwigo-full.png -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/icon/piwigo-icon.png -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/01_Login.jpg -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/02_Albums.jpg -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/03_Photos.jpg -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/04_Albums_horizontal.jpg -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/05_Menu.jpg -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/tablet-screenshots/01_Login.png -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/short-description.txt -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/title.txt -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/full-description.txt -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/short-description.txt -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/title.txt -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/full-description.txt -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/short-description.txt -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/title.txt -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/de-DE/default.txt -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/en-US/default.txt -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/fr-FR/default.txt -include tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/kn-IN/default.txt -include tests/triple-t-2/build/org.piwigo.android/build.gradle -include tests/triple-t-2/build/org.piwigo.android/settings.gradle -include tests/triple-t-2/metadata/org.piwigo.android.yml -include tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/addons/languages/dutch/apk/src/main/play/listings/en-US/title.txt -include tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/ime/app/src/main/play/listings/en-US/title.txt -include tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/settings.gradle -include tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/addons/languages/dutch/apk/src/main/play/listings/en-US/title.txt -include tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/ime/app/src/main/play/listings/en-US/title.txt -include tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/settings.gradle -include tests/triple-t-anysoftkeyboard/metadata/com.anysoftkeyboard.languagepack.dutch.yml -include tests/triple-t-anysoftkeyboard/metadata/com.menny.android.anysoftkeyboard.yml -include tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/contact-website.txt -include tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/full-description.txt -include tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/short-description.txt -include tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/title.txt -include tests/triple-t-flutter/metadata/fr.emersion.goguma.yml -include tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/settings.gradle -include tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/verifier/src/main/play/listings/en-US/title.txt -include tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/wallet/src/main/play/listings/en-US/title.txt -include tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/settings.gradle -include tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/verifier/src/main/play/listings/en-US/title.txt -include tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/wallet/src/main/play/listings/en-US/title.txt -include tests/triple-t-multiple/metadata/ch.admin.bag.covidcertificate.verifier.yml -include tests/triple-t-multiple/metadata/ch.admin.bag.covidcertificate.wallet.yml +include tests/stats/known_apks.txt +include tests/testcommon.py +include tests/update.TestCase +include tests/urzip.apk include tests/urzip-badcert.apk include tests/urzip-badsig.apk -include tests/urzip-release-unsigned.apk include tests/urzip-release.apk -include tests/urzip.apk +include tests/urzip-release-unsigned.apk include tests/v2.only.sig_2.apk include tests/valid-package-names/random-package-names include tests/valid-package-names/RandomPackageNames.java include tests/valid-package-names/test.py -include tests/__init__.py diff --git a/README.md b/README.md index 41f725cb..0eeab45d 100644 --- a/README.md +++ b/README.md @@ -1,133 +1,109 @@ -
+ + +| CI Builds | fdroidserver | buildserver | fdroid build --all | publishing tools | +|--------------------------|:-------------:|:-----------:|:------------------:|:----------------:| +| Debian | [![fdroidserver status on Debian](https://gitlab.com/fdroid/fdroidserver/badges/master/build.svg)](https://gitlab.com/fdroid/fdroidserver/builds) | [![buildserver status](https://jenkins.debian.net/job/reproducible_setup_fdroid_build_environment/badge/icon)](https://jenkins.debian.net/job/reproducible_setup_fdroid_build_environment) | [![fdroid build all status](https://jenkins.debian.net/job/reproducible_fdroid_build_apps/badge/icon)](https://jenkins.debian.net/job/reproducible_fdroid_build_apps/) | [![fdroid test status](https://jenkins.debian.net/job/reproducible_fdroid_test/badge/icon)](https://jenkins.debian.net/job/reproducible_fdroid_test/) | +| macOS & Ubuntu/trusty | [![fdroidserver status on macOS & Ubuntu/LTS](https://travis-ci.org/fdroidtravis/fdroidserver.svg?branch=master)](https://travis-ci.org/fdroidtravis/fdroidserver) | | | | -

# F-Droid Server -### Tools for maintaining an F-Droid repository system. -
+Server for [F-Droid](https://f-droid.org), the Free Software repository system +for Android. ---- +The F-Droid server tools provide various scripts and tools that are +used to maintain the main +[F-Droid application repository](https://f-droid.org/packages). You +can use these same tools to create your own additional or alternative +repository for publishing, or to assist in creating, testing and +submitting metadata to the main repository. -## What is F-Droid Server? - -_fdroidserver_ is a suite of tools to publish and work with collections of -Android apps (APK files) and other kinds of packages. It is used to maintain -the [f-droid.org application repository](https://f-droid.org/packages). These -same tools can be used to create additional or alternative repositories for -publishing, or to assist in creating, testing and submitting metadata to the -f-droid.org repository, also known as -[_fdroiddata_](https://gitlab.com/fdroid/fdroiddata). - -For documentation, please see . - -In the beginning, _fdroidserver_ was the complete server-side setup that ran -f-droid.org. Since then, the website and other parts have been split out into -their own projects. The name for this suite of tooling has stayed -_fdroidserver_ even though it no longer contains any proper server component. +For documentation, please see , or you can +find the source for the documentation in +[fdroid/fdroid-website](https://gitlab.com/fdroid/fdroid-website). -## Installing +### What is F-Droid? -There are many ways to install _fdroidserver_, including using a range of -package managers. All of the options are documented on the website: +F-Droid is an installable catalogue of FOSS (Free and Open Source Software) +applications for the Android platform. The client makes it easy to browse, +install, and keep track of updates on your device. + + +### Installing + +There are many ways to install _fdroidserver_, they are documented on +the website: https://f-droid.org/docs/Installing_the_Server_and_Repo_Tools - -## Releases - -The production setup of _fdroidserver_ for f-droid.org is run directly from the -_master_ branch. This is put into production on an schedule (currently weekly). -So development and testing happens in the branches. We track branches using -merge requests. Therefore, there are many WIP and long-lived merge requests. - -There are also stable releases of _fdroidserver_. This is mostly intended for -running custom repositories, where the build process is separate. It can also -be useful as a simple way to get started contributing packages to _fdroiddata_, -since the stable releases are available in package managers. +All sorts of other documentation lives there as well. -## Tests +### Tests -To run the full test suite: - - tests/run-tests - -To run the tests for individual Python modules, see the `tests/test_*.py` files, e.g.: - - python -m unittest tests/test_metadata.py - -It is also possible to run individual tests: - - python -m unittest tests.test_metadata.MetadataTest.test_rewrite_yaml_special_build_params - -There is a growing test suite that has good coverage on a number of key parts of -this code base. It does not yet cover all the code, and there are some parts -where the technical debt makes it difficult to write unit tests. New tests -should be standard Python _unittest_ test cases. Whenever possible, the old -tests written in _bash_ in _tests/run-tests_ should be ported to Python. - -This test suite has built over time a bit haphazardly, so it is not as clean, -organized, or complete as it could be. We welcome contributions. The goal is -to move towards standard Python testing patterns and to expand the unit test -coverage. Before rearchitecting any parts of it, be sure to [contact -us](https://f-droid.org/about) to discuss the changes beforehand. +There are many components to all of the tests for the components in +this git repo. The most commonly used parts of well tested, while +some parts still lack tests. This test suite has built over time a +bit haphazardly, so it is not as clean, organized, or complete as it +could be. We welcome contributions. Before rearchitecting any parts +of it, be sure to [contact us](https://f-droid.org/about) to discuss +the changes beforehand. -### Additional tests for different linux distributions +#### `fdroid` commands -These tests are also run on various configurations through GitLab CI. This is -only enabled for `master@fdroid/fdroidserver` because it takes longer to +The test suite for all of the `fdroid` commands is in the _tests/_ +subdir. _.gitlab-ci.yml_ and _.travis.yml_ run this test suite on +various configurations. + +* _tests/complete-ci-tests_ runs _pylint_ and all tests on two + different pyvenvs +* _tests/run-tests_ runs the whole test suite +* _tests/*.TestCase_ are individual unit tests for all of the `fdroid` + commands, which can be run separately, e.g. `./update.TestCase`. + + +#### Additional tests for different linux distributions + +These tests are also run on various distributions through GitLab CI. This is +only enabled for `master@fdroid/fdroidserver` because it'll take longer to complete than the regular CI tests. Most of the time you won't need to worry -about them, but sometimes it might make sense to also run them for your merge -request. In that case you need to remove [these lines from .gitlab-ci.yml](https://gitlab.com/fdroid/fdroidserver/-/blob/0124b9dde99f9cab19c034cbc7d8cc6005a99b48/.gitlab-ci.yml#L90-91) +about them but sometimes it might make sense to also run them for your merge +request. In that case you need to remove [these lines from +.gitlab-ci.yml](https://gitlab.com/fdroid/fdroidserver/blob/master/.gitlab-ci.yml#L34-35) and push this to a new branch of your fork. Alternatively [run them locally](https://docs.gitlab.com/runner/commands/README.html#gitlab-runner-exec) like this: `gitlab-runner exec docker ubuntu_lts` +#### buildserver -## Documentation +The tests for the whole build server setup are entirely separate +because they require at least 200GB of disk space, and 8GB of +RAM. These test scripts are in the root of the project, all starting +with _jenkins-_ since they are run on https://jenkins.debian.net. -The API documentation based on the docstrings gets automatically -published [here](https://fdroid.gitlab.io/fdroidserver) on every commit -on the `master` branch. -It can be built locally via -```bash -pip install -e .[docs] -cd docs -sphinx-apidoc -o ./source ../fdroidserver -M -e -sphinx-autogen -o generated source/*.rst -make html -``` +### Drozer Scanner -To additionally lint the code call -```bash -pydocstyle fdroidserver --count -``` +There is a new feature under development that can scan any APK in a +repo, or any build, using Drozer. Drozer is a dynamic exploit +scanner, it runs an app in the emulator and runs known exploits on it. -When writing docstrings you should follow the -[numpy style guide](https://numpydoc.readthedocs.io/en/latest/format.html). +This setup requires specific versions of two Python modules: +_docker-py_ 1.9.0 and _requests_ older than 2.11. Other versions +might cause the docker-py connection to break with the containers. +Newer versions of docker-py might have this fixed already. +For Debian based distributions: + + apt-get install libffi-dev libssl-dev python-docker ## Translation Everything can be translated. See [Translation and Localization](https://f-droid.org/docs/Translation_and_Localization) for more info. - -
- -[![](https://hosted.weblate.org/widgets/f-droid/-/287x66-white.png)](https://hosted.weblate.org/engage/f-droid) - -
-View translation status for all languages. - -[![](https://hosted.weblate.org/widgets/f-droid/-/fdroidserver/multi-auto.svg)](https://hosted.weblate.org/engage/f-droid/?utm_source=widget) - -
- -
+[![translation status](https://hosted.weblate.org/widgets/f-droid/-/fdroidserver/multi-auto.svg)](https://hosted.weblate.org/engage/f-droid/?utm_source=widget) diff --git a/buildserver/Dockerfile b/buildserver/Dockerfile deleted file mode 100644 index 27ada3f8..00000000 --- a/buildserver/Dockerfile +++ /dev/null @@ -1,74 +0,0 @@ - -FROM debian:bookworm - -ENV LANG=C.UTF-8 \ - DEBIAN_FRONTEND=noninteractive - -RUN echo Etc/UTC > /etc/timezone \ - && echo 'Acquire::Retries "20";' \ - 'APT::Get::Assume-Yes "true";' \ - 'APT::Install-Recommends "0";' \ - 'APT::Install-Suggests "0";' \ - 'Dpkg::Use-Pty "0";' \ - 'quiet "1";' \ - >> /etc/apt/apt.conf.d/99gitlab - -# provision-apt-proxy was deliberately omitted, its not relevant in Docker -COPY provision-android-ndk \ - provision-android-sdk \ - provision-apt-get-install \ - provision-buildserverid \ - provision-gradle \ - setup-env-vars \ - /opt/buildserver/ - -ARG GIT_REV_PARSE_HEAD=unspecified -LABEL org.opencontainers.image.revision=$GIT_REV_PARSE_HEAD - -# setup 'vagrant' user for compatibility -RUN useradd --create-home -s /bin/bash vagrant && echo -n 'vagrant:vagrant' | chpasswd - -# The provision scripts must be run in the same order as in Vagrantfile -# - vagrant needs openssh-client iproute2 ssh sudo -# - ansible needs python3 -# -# Debian Docker images will soon default to HTTPS for apt sources, so force it. -# https://github.com/debuerreotype/docker-debian-artifacts/issues/15 -# -# Ensure fdroidserver's dependencies are marked manual before purging -# unneeded packages, otherwise, all its dependencies get purged. -# -# The official Debian docker images ship without ca-certificates, so -# TLS certificates cannot be verified until that is installed. The -# following code temporarily turns off TLS verification, and enables -# HTTPS, so at least unverified TLS is used for apt-get instead of -# plain HTTP. Once ca-certificates is installed, the CA verification -# is enabled by removing the newly created config file. This set up -# makes the initial `apt-get update` and `apt-get install` look the -# same as verified TLS to the network observer and hides the metadata. -RUN printf "path-exclude=/usr/share/locale/*\npath-exclude=/usr/share/man/*\npath-exclude=/usr/share/doc/*\npath-include=/usr/share/doc/*/copyright\n" >/etc/dpkg/dpkg.cfg.d/01_nodoc \ - && mkdir -p /usr/share/man/man1 \ - && echo 'Acquire::https::Verify-Peer "false";' > /etc/apt/apt.conf.d/99nocacertificates \ - && find /etc/apt/sources.list* -type f -exec sed -i s,http:,https:, {} \; \ - && apt-get update \ - && apt-get install ca-certificates \ - && rm /etc/apt/apt.conf.d/99nocacertificates \ - && apt-get upgrade \ - && apt-get dist-upgrade \ - && apt-get install openssh-client iproute2 python3 openssh-server sudo \ - && bash /opt/buildserver/setup-env-vars /opt/android-sdk \ - && . /etc/profile.d/bsenv.sh \ - && bash /opt/buildserver/provision-apt-get-install https://deb.debian.org/debian \ - && bash /opt/buildserver/provision-android-sdk "tools;25.2.5" \ - && bash /opt/buildserver/provision-android-ndk /opt/android-sdk/ndk \ - && bash /opt/buildserver/provision-gradle \ - && bash /opt/buildserver/provision-buildserverid $GIT_REV_PARSE_HEAD \ - && rm -rf /vagrant/cache \ - && apt-get autoremove --purge \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* - -# Vagrant sudo setup for compatibility -RUN echo 'vagrant ALL = NOPASSWD: ALL' > /etc/sudoers.d/vagrant \ - && chmod 440 /etc/sudoers.d/vagrant \ - && sed -i -e 's/Defaults.*requiretty/#&/' /etc/sudoers diff --git a/buildserver/Vagrantfile b/buildserver/Vagrantfile index 61e3459a..392d3df6 100644 --- a/buildserver/Vagrantfile +++ b/buildserver/Vagrantfile @@ -1,41 +1,24 @@ + require 'yaml' require 'pathname' -require 'fileutils' - -configfile = { - 'boot_timeout' => 600, - 'cachedir' => File.join(ENV['HOME'], '.cache', 'fdroidserver'), - 'cpus' => 1, - 'debian_mirror' => 'https://deb.debian.org/debian/', - 'hwvirtex' => 'on', - 'memory' => 2048, - 'vm_provider' => 'virtualbox', -} srvpath = Pathname.new(File.dirname(__FILE__)).realpath -configpath = File.join(srvpath, "/Vagrantfile.yaml") -if File.exist? configpath - c = YAML.load_file(configpath) - if c and not c.empty? - c.each do |k,v| - configfile[k] = v - end - end -else - puts "Copying example file to #{configpath}" - FileUtils.cp('../examples/Vagrantfile.yaml', configpath) -end +configfile = YAML.load_file(File.join(srvpath, "/Vagrantfile.yaml")) Vagrant.configure("2") do |config| - if Vagrant.has_plugin?("vagrant-cachier") + # these two caching methods conflict, so only use one at a time + if Vagrant.has_plugin?("vagrant-cachier") and not configfile.has_key? "aptcachedir" config.cache.scope = :box config.cache.auto_detect = false config.cache.enable :apt config.cache.enable :chef end - config.vm.box = "debian/bookworm64" + config.vm.box = configfile['basebox'] + if configfile.has_key? "basebox_version" + config.vm.box_version = configfile['basebox_version'] + end if not configfile.has_key? "vm_provider" or configfile["vm_provider"] == "virtualbox" # default to VirtualBox if not set @@ -53,8 +36,6 @@ Vagrant.configure("2") do |config| libvirt.uri = "qemu:///system" libvirt.cpus = configfile["cpus"] libvirt.memory = configfile["memory"] - # Debian Vagrant image is only 20G, so allocate more - libvirt.machine_virtual_size = 1024 if configfile.has_key? "libvirt_disk_bus" libvirt.disk_bus = configfile["libvirt_disk_bus"] end @@ -67,8 +48,7 @@ Vagrant.configure("2") do |config| else synced_folder_type = '9p' end - config.vm.synced_folder './', '/vagrant', type: synced_folder_type, - SharedFoldersEnableSymlinksCreate: false + config.vm.synced_folder './', '/vagrant', type: synced_folder_type else abort("No supported VM Provider found, set vm_provider in Vagrantfile.yaml!") end @@ -80,30 +60,30 @@ Vagrant.configure("2") do |config| args: [configfile["aptproxy"]] end - config.vm.synced_folder configfile["cachedir"], '/vagrant/cache', - create: true, type: synced_folder_type - + # buildserver/ is shared to the VM's /vagrant by default so the old + # default does not need a custom mount + if configfile["cachedir"] != "buildserver/cache" + config.vm.synced_folder configfile["cachedir"], '/vagrant/cache', + create: true, type: synced_folder_type + end # Make sure dir exists to mount to, since buildserver/ is # automatically mounted as /vagrant in the guest VM. This is more # necessary with 9p synced folders - Dir.mkdir('cache') unless File.exist?('cache') + Dir.mkdir('cache') unless File.exists?('cache') - # Root partition needs to be resized to the new allocated space - config.vm.provision "shell", inline: <<-SHELL - growpart -v -u auto /dev/vda 1 - resize2fs /dev/vda1 - SHELL + # cache .deb packages on the host via a mount trick + if configfile.has_key? "aptcachedir" + config.vm.synced_folder configfile["aptcachedir"], "/var/cache/apt/archives", + owner: 'root', group: 'root', create: true + end - config.vm.provision "shell", name: "setup-env-vars", path: "setup-env-vars", - args: ["/opt/android-sdk"] - config.vm.provision "shell", name: "apt-get-install", path: "provision-apt-get-install", + config.vm.provision "shell", path: "setup-env-vars", + args: ["/home/vagrant/android-sdk"] + config.vm.provision "shell", path: "provision-apt-get-install", args: [configfile['debian_mirror']] - config.vm.provision "shell", name: "android-sdk", path: "provision-android-sdk" - config.vm.provision "shell", name: "android-ndk", path: "provision-android-ndk", - args: ["/opt/android-sdk/ndk"] - config.vm.provision "shell", name: "gradle", path: "provision-gradle" - config.vm.provision "shell", name: "disable-analytics", path: "provision-disable-analytics" - config.vm.provision "shell", name: "buildserverid", path: "provision-buildserverid", - args: [`git rev-parse HEAD`] + config.vm.provision "shell", path: "provision-android-sdk" + config.vm.provision "shell", path: "provision-android-ndk", + args: ["/home/vagrant/android-ndk"] + config.vm.provision "shell", path: "provision-gradle" end diff --git a/buildserver/config.buildserver.py b/buildserver/config.buildserver.py new file mode 100644 index 00000000..aeeaa0e8 --- /dev/null +++ b/buildserver/config.buildserver.py @@ -0,0 +1,17 @@ +sdk_path = "/home/vagrant/android-sdk" +ndk_paths = { + 'r10e': "/home/vagrant/android-ndk/r10e", + 'r11c': "/home/vagrant/android-ndk/r11c", + 'r12b': "/home/vagrant/android-ndk/r12b", + 'r13b': "/home/vagrant/android-ndk/r13b", + 'r14b': "/home/vagrant/android-ndk/r14b", + 'r15c': "/home/vagrant/android-ndk/r15c", + 'r16b': "/home/vagrant/android-ndk/r16b", + 'r17b': "/home/vagrant/android-ndk/r17b", + 'r18b': "/home/vagrant/android-ndk/r18b", + 'r19': "/home/vagrant/android-ndk/r19", +} +java_paths = { + '8': "/usr/lib/jvm/java-8-openjdk-amd64", +} +gradle_version_dir = "/opt/gradle/versions" diff --git a/buildserver/config.buildserver.yml b/buildserver/config.buildserver.yml deleted file mode 100644 index 944535c5..00000000 --- a/buildserver/config.buildserver.yml +++ /dev/null @@ -1,2 +0,0 @@ -sdk_path: /opt/android-sdk -gradle_version_dir: /opt/gradle/versions diff --git a/buildserver/provision-android-ndk b/buildserver/provision-android-ndk index 63f5eee7..ce5608a0 100644 --- a/buildserver/provision-android-ndk +++ b/buildserver/provision-android-ndk @@ -1,30 +1,26 @@ #!/bin/bash # -# $1 is the root dir to install the NDKs into -# $2 and after are the NDK releases to install echo $0 set -e set -x NDK_BASE=$1 -shift test -e $NDK_BASE || mkdir -p $NDK_BASE cd $NDK_BASE -for version in $@; do +if [ ! -e $NDK_BASE/r10e ]; then + 7zr x /vagrant/cache/android-ndk-r10e-linux-x86_64.bin > /dev/null + mv android-ndk-r10e r10e +fi + +for version in r11c r12b r13b r14b r15c r16b r17b r18b r19; do if [ ! -e ${NDK_BASE}/${version} ]; then - unzip /vagrant/cache/android-ndk-${version}-linux*.zip > /dev/null - mv android-ndk-${version} \ - `sed -En 's,^Pkg.Revision *= *(.+),\1,p' android-ndk-${version}/source.properties` + unzip /vagrant/cache/android-ndk-${version}-linux-x86_64.zip > /dev/null + mv android-ndk-${version} ${version} fi done -# allow gradle/etc to install missing NDK versions -chgrp vagrant $NDK_BASE -chmod g+w $NDK_BASE - -# ensure all users can read and execute the NDK chmod -R a+rX $NDK_BASE/ -find $NDK_BASE/ -type f -executable -exec chmod a+x -- {} + +find $NDK_BASE/ -type f -executable -print0 | xargs -0 chmod a+x diff --git a/buildserver/provision-android-sdk b/buildserver/provision-android-sdk index 19002a47..b032a45a 100644 --- a/buildserver/provision-android-sdk +++ b/buildserver/provision-android-sdk @@ -1,4 +1,5 @@ #!/bin/bash +# echo $0 set -e @@ -9,6 +10,19 @@ if [ -z $ANDROID_HOME ]; then exit 1 fi +# TODO remove the rm, this should work with an existing ANDROID_HOME +if [ ! -x $ANDROID_HOME/tools/android ]; then + rm -rf $ANDROID_HOME + mkdir ${ANDROID_HOME} + mkdir ${ANDROID_HOME}/temp + mkdir ${ANDROID_HOME}/platforms + mkdir ${ANDROID_HOME}/build-tools + cd $ANDROID_HOME + + tools=`ls -1 /vagrant/cache/tools_*.zip | sort -n | tail -1` + unzip -qq $tools +fi + # disable the repositories of proprietary stuff disabled=" @version@=1 @@ -26,96 +40,59 @@ for line in $disabled; do echo $line >> ${HOME}/.android/sites-settings.cfg done -# Include old makebuildserver cache that is a Vagrant synced_folder -# for sdkmanager to use. -cachedir=$HOME/.cache/sdkmanager -mkdir -p $cachedir -pushd $cachedir -for f in /vagrant/cache/*.zip; do - test -e $f && ln -s $f + +cd /vagrant/cache + +# make links for `android update sdk` to use and delete +blacklist="build-tools_r17-linux.zip + build-tools_r18.0.1-linux.zip + build-tools_r18.1-linux.zip + build-tools_r18.1.1-linux.zip + build-tools_r19-linux.zip + build-tools_r19.0.1-linux.zip + build-tools_r19.0.2-linux.zip + build-tools_r19.0.3-linux.zip + build-tools_r21-linux.zip + build-tools_r21.0.1-linux.zip + build-tools_r21.0.2-linux.zip + build-tools_r21.1-linux.zip + build-tools_r21.1.1-linux.zip + build-tools_r22-linux.zip + build-tools_r23-linux.zip + android-1.5_r04-linux.zip + android-1.6_r03-linux.zip + android-2.0_r01-linux.zip + android-2.0.1_r01-linux.zip" +latestm2=`ls -1 android_m2repository*.zip | sort -n | tail -1` +for f in $latestm2 android-[0-9]*.zip platform-[0-9]*.zip build-tools_r*-linux.zip; do + rm -f ${ANDROID_HOME}/temp/$f + if [[ $blacklist != *$f* ]]; then + ln -s /vagrant/cache/$f ${ANDROID_HOME}/temp/ + fi done -popd -# TODO do not preinstall 'tools' or 'platform-tools' at all, app builds don't need them -packages=" - tools;25.2.5 - platform-tools - build-tools;19.1.0 - build-tools;20.0.0 - build-tools;21.1.2 - build-tools;22.0.1 - build-tools;23.0.1 - build-tools;23.0.2 - build-tools;23.0.3 - build-tools;24.0.0 - build-tools;24.0.1 - build-tools;24.0.2 - build-tools;24.0.3 - build-tools;25.0.0 - build-tools;25.0.1 - build-tools;25.0.2 - build-tools;25.0.3 - build-tools;26.0.0 - build-tools;26.0.1 - build-tools;26.0.2 - build-tools;26.0.3 - build-tools;27.0.0 - build-tools;27.0.1 - build-tools;27.0.2 - build-tools;27.0.3 - build-tools;28.0.0 - build-tools;28.0.1 - build-tools;28.0.2 - build-tools;28.0.3 - build-tools;29.0.2 - build-tools;29.0.3 - build-tools;30.0.0 - build-tools;30.0.1 - build-tools;30.0.2 - build-tools;30.0.3 - build-tools;31.0.0 - build-tools;32.0.0 - build-tools;33.0.0 - platforms;android-10 - platforms;android-11 - platforms;android-12 - platforms;android-13 - platforms;android-14 - platforms;android-15 - platforms;android-16 - platforms;android-17 - platforms;android-18 - platforms;android-19 - platforms;android-20 - platforms;android-21 - platforms;android-22 - platforms;android-23 - platforms;android-24 - platforms;android-25 - platforms;android-26 - platforms;android-27 - platforms;android-28 - platforms;android-29 - platforms;android-30 - platforms;android-31 - platforms;android-32 - platforms;android-33 -" +# install all cached platforms +cached="" +for f in `ls -1 android-[0-9]*.zip platform-[0-9]*.zip`; do + sdk=`unzip -c $f "*/build.prop" | sed -n 's,^ro.build.version.sdk=,,p'` + cached=,android-${sdk}${cached} +done -if [ $# -gt 0 ]; then - echo found args - packages=$@ -fi +# install all cached build-tools +for f in `ls -1 build-tools*.zip`; do + ver=`unzip -c $f "*/source.properties" | sed -n 's,^Pkg.Revision=,,p'` + if [[ $ver == 24.0.0 ]] && [[ $f =~ .*r24\.0\.1.* ]]; then + # 24.0.1 has the wrong revision in the zip + ver=24.0.1 + fi + cached=,build-tools-${ver}${cached} +done -# temporary test of whether this script ran. It will change once -# 'tools' is no longer installed by default. -if [ ! -x $ANDROID_HOME/tools/bin/sdkmanager ]; then - mkdir -p ${ANDROID_HOME}/ - sdkmanager $packages -fi +${ANDROID_HOME}/tools/android update sdk --no-ui --all \ + --filter platform-tools,extra-android-m2repository${cached} < $ANDROID_HOME/licenses/android-sdk-license 8933bad161af4178b1185d1a37fbf41ea5269c55 d56f5187479451eabf01fb78af6dfcb131a6481e - -24333f8a63b6825ea9c5514f83c2829b004d1fee EOF cat < $ANDROID_HOME/licenses/android-sdk-preview-license @@ -134,25 +109,22 @@ cat < $ANDROID_HOME/licenses/android-sdk-preview-license EOF cat < $ANDROID_HOME/licenses/android-sdk-preview-license-old - 79120722343a6f314e0719f863036c702b0e6b2a 84831b9409646a918e30573bab4c9c91346d8abd EOF -cat < $ANDROID_HOME/licenses/intel-android-extra-license +echo y | $ANDROID_HOME/tools/bin/sdkmanager "extras;m2repository;com;android;support;constraint;constraint-layout;1.0.1" +echo y | $ANDROID_HOME/tools/bin/sdkmanager "extras;m2repository;com;android;support;constraint;constraint-layout-solver;1.0.1" +echo y | $ANDROID_HOME/tools/bin/sdkmanager "extras;m2repository;com;android;support;constraint;constraint-layout;1.0.2" +echo y | $ANDROID_HOME/tools/bin/sdkmanager "extras;m2repository;com;android;support;constraint;constraint-layout-solver;1.0.2" -d975f751698a77b662f1254ddbeed3901e976f5a -EOF - -chmod a+X $(dirname $ANDROID_HOME/) chmod -R a+rX $ANDROID_HOME/ chgrp vagrant $ANDROID_HOME chmod g+w $ANDROID_HOME find $ANDROID_HOME/ -type f -executable -print0 | xargs -0 chmod a+x # allow gradle to install newer build-tools and platforms -mkdir -p $ANDROID_HOME/{build-tools,platforms} chgrp vagrant $ANDROID_HOME/{build-tools,platforms} chmod g+w $ANDROID_HOME/{build-tools,platforms} @@ -160,8 +132,3 @@ chmod g+w $ANDROID_HOME/{build-tools,platforms} test -d $ANDROID_HOME/extras/m2repository || mkdir -p $ANDROID_HOME/extras/m2repository find $ANDROID_HOME/extras/m2repository -type d | xargs chgrp vagrant find $ANDROID_HOME/extras/m2repository -type d | xargs chmod g+w - -# allow gradle/sdkmanager to install extras;android;m2repository -test -d $ANDROID_HOME/extras/android || mkdir -p $ANDROID_HOME/extras/android -find $ANDROID_HOME/extras/android -type d | xargs chgrp vagrant -find $ANDROID_HOME/extras/android -type d | xargs chmod g+w diff --git a/buildserver/provision-apt-get-install b/buildserver/provision-apt-get-install index ca39c47b..a99b6871 100644 --- a/buildserver/provision-apt-get-install +++ b/buildserver/provision-apt-get-install @@ -5,12 +5,12 @@ set -e set -x debian_mirror=$1 -export DEBIAN_FRONTEND=noninteractive +export DEBIAN_FRONTEND=noninteractive printf 'APT::Install-Recommends "0";\nAPT::Install-Suggests "0";\n' \ > /etc/apt/apt.conf.d/99no-install-recommends -printf 'Acquire::Retries "20";\n' \ +printf 'APT::Acquire::Retries "20";\n' \ > /etc/apt/apt.conf.d/99acquire-retries cat < /etc/apt/apt.conf.d/99no-auto-updates @@ -22,118 +22,105 @@ EOF printf 'APT::Get::Assume-Yes "true";\n' \ > /etc/apt/apt.conf.d/99assumeyes -cat < /etc/apt/apt.conf.d/99quiet -Dpkg::Use-Pty "0"; -quiet "1"; -EOF - -cat < /etc/apt/apt.conf.d/99confdef -Dpkg::Options { "--force-confdef"; }; -EOF - -echo "man-db man-db/auto-update boolean false" | debconf-set-selections - if echo $debian_mirror | grep '^https' 2>&1 > /dev/null; then apt-get update || apt-get update - apt-get install ca-certificates + apt-get install apt-transport-https ca-certificates fi cat << EOF > /etc/apt/sources.list -deb ${debian_mirror} bookworm main -deb https://security.debian.org/debian-security bookworm-security main -deb ${debian_mirror} bookworm-updates main +deb ${debian_mirror} stretch main +deb http://security.debian.org/debian-security stretch/updates main +deb ${debian_mirror} stretch-updates main EOF -echo "deb ${debian_mirror} bookworm-backports main" > /etc/apt/sources.list.d/backports.list +echo "deb ${debian_mirror} stretch-backports main" > /etc/apt/sources.list.d/stretch-backports.list +echo "deb ${debian_mirror} testing main" > /etc/apt/sources.list.d/testing.list +printf "Package: *\nPin: release o=Debian,a=testing\nPin-Priority: -300\n" > /etc/apt/preferences.d/debian-testing + +dpkg --add-architecture i386 apt-get update || apt-get update - -# purge things that might come from the base box, but we don't want -# https://salsa.debian.org/cloud-team/debian-vagrant-images/-/tree/master/config_space/package_config -# cat config_space/package_config/* | sort -u | grep -v '[A-Z#]' - -purge=" - apt-listchanges - apt-utils - bash-completion - bind9-* - bsdextrautils - bzip2 - chrony - cloud-utils - cron - cron-daemon-common - dbus - debconf-i18n - debian-faq - dmidecode - doc-debian - fdisk - file - groff-base - inetutils-telnet - krb5-locales - less - locales - logrotate - lsof - manpages - nano - ncurses-term - netcat-traditional - pciutils - reportbug - rsyslog - tasksel - traceroute - unattended-upgrades - usrmerge - vim-* - wamerican - wget - whiptail - xz-utils -" -# clean up files packages to be purged, then purge the packages -rm -rf /var/run/dbus /var/log/unattended-upgrades -apt-get purge $purge - apt-get upgrade --download-only apt-get upgrade -# again after upgrade in case of keyring changes -apt-get update || apt-get update - packages=" - androguard/bookworm-backports - apksigner - default-jdk-headless - default-jre-headless + ant + asn1c + ant-contrib + autoconf + autoconf2.13 + automake + automake1.11 + autopoint + bison + bzr + ca-certificates-java + cmake curl - dexdump - fdroidserver + disorderfs + expect + faketime + flex + gettext + gettext-base + git-core git-svn - gnupg + gperf + javacc + libarchive-zip-perl + libexpat1-dev + libgcc1:i386 + libglib2.0-dev + liblzma-dev + libncurses5:i386 + librsvg2-bin + libsaxonb-java + libssl-dev + libstdc++6:i386 + libtool + libtool-bin + make + maven mercurial - patch - python3-magic - python3-packaging + nasm + openjdk-8-jre-headless + openjdk-8-jdk-headless + optipng + p7zip + pkg-config + python-gnupg + python-lxml + python-magic + python-pip + python-setuptools + python3-defusedxml + python3-git + python3-gitdb + python3-gnupg + python3-pip + python3-pyasn1 + python3-pyasn1-modules + python3-requests + python3-setuptools + python3-smmap + python3-yaml + python3-ruamel.yaml + quilt rsync - sdkmanager/bookworm-backports - sudo + scons + sqlite3 + subversion + swig unzip + xsltproc + yasm + zip + zlib1g:i386 " - apt-get install $packages --download-only apt-get install $packages -# fdroidserver comes from git, it was installed just for dependencies -apt-mark manual `apt-cache depends fdroidserver | sed -nE 's,^[| ]*Depends: ([a-z0-9 -]+),\1,p'` -apt-get purge fdroidserver - -# clean up things that will become outdated anyway -apt-get autoremove --purge -apt-get clean -rm -rf /var/lib/apt/lists/* - highestjava=`update-java-alternatives --list | sort -n | tail -1 | cut -d ' ' -f 1` update-java-alternatives --set $highestjava + +# configure headless openjdk to work without gtk accessability dependencies +sed -i -e 's@\(assistive_technologies=org.GNOME.Accessibility.AtkWrapper\)@#\1@' /etc/java-8-openjdk/accessibility.properties diff --git a/buildserver/provision-buildserverid b/buildserver/provision-buildserverid deleted file mode 100644 index f5010c39..00000000 --- a/buildserver/provision-buildserverid +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash -e - -test -n "$1" - -echo "Writing buildserver ID ...ID is $1" -set -x -echo "$1" > /home/vagrant/buildserverid -# sync data before we halt() the machine, we had an empty buildserverid otherwise -sync diff --git a/buildserver/provision-disable-analytics b/buildserver/provision-disable-analytics deleted file mode 100644 index e1ec62b7..00000000 --- a/buildserver/provision-disable-analytics +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -set -ex - -# Flutter -# https://github.com/flutter/flutter/issues/73657 -flutter_conf=/home/vagrant/.flutter -cat < $flutter_conf -{ - "enabled": false -} -EOF -chown -R vagrant:vagrant $flutter_conf -chmod -R 0644 $flutter_conf - diff --git a/buildserver/provision-gradle b/buildserver/provision-gradle index a282a4c5..d0eaf8ea 100644 --- a/buildserver/provision-gradle +++ b/buildserver/provision-gradle @@ -10,44 +10,19 @@ vergte() { test -e /opt/gradle/versions || mkdir -p /opt/gradle/versions cd /opt/gradle/versions - -glob="/vagrant/cache/gradle-*.zip" -if compgen -G $glob; then # test if glob matches anything - f=$(ls -1 --sort=version --group-directories-first $glob | tail -1) +for f in /vagrant/cache/gradle-*.zip; do ver=`echo $f | sed 's,.*gradle-\([0-9][0-9.]*\).*\.zip,\1,'` # only use versions greater or equal 2.2.1 if vergte $ver 2.2.1 && [ ! -d /opt/gradle/versions/${ver} ]; then unzip -qq $f mv gradle-${ver} /opt/gradle/versions/${ver} fi -fi +done chmod -R a+rX /opt/gradle test -e /opt/gradle/bin || mkdir -p /opt/gradle/bin -git clone --depth 1 https://gitlab.com/fdroid/gradlew-fdroid.git /home/vagrant/gradlew-fdroid/ -chmod 0755 /home/vagrant/gradlew-fdroid/gradlew-fdroid -chmod -R u+rwX,a+rX,go-w /home/vagrant/gradlew-fdroid/ -ln -fs /home/vagrant/gradlew-fdroid/gradlew-fdroid /opt/gradle/bin/gradle -ln -fs /home/vagrant/gradlew-fdroid/gradlew-fdroid /usr/local/bin/ - -chown -h vagrant:vagrant /opt/gradle/bin/gradle -chown vagrant:vagrant /opt/gradle/versions +ln -fs /home/vagrant/fdroidserver/gradlew-fdroid /opt/gradle/bin/gradle +chown -h vagrant.vagrant /opt/gradle/bin/gradle +chown vagrant.vagrant /opt/gradle/versions chmod 0755 /opt/gradle/versions - -GRADLE_HOME=/home/vagrant/.gradle -test -d $GRADLE_HOME/ || mkdir $GRADLE_HOME/ -cat < $GRADLE_HOME/gradle.properties -# builds are not reused, so the daemon is a waste of time -org.gradle.daemon=false - -# set network timeouts to 10 minutes -# https://github.com/gradle/gradle/pull/3371/files -systemProp.http.connectionTimeout=600000 -systemProp.http.socketTimeout=600000 -systemProp.org.gradle.internal.http.connectionTimeout=600000 -systemProp.org.gradle.internal.http.socketTimeout=600000 -EOF - -chown -R vagrant:vagrant $GRADLE_HOME/ -chmod -R a+rX $GRADLE_HOME/ diff --git a/buildserver/setup-env-vars b/buildserver/setup-env-vars index 1c3599e9..19259266 100644 --- a/buildserver/setup-env-vars +++ b/buildserver/setup-env-vars @@ -12,16 +12,9 @@ echo "# generated on "`date` > $bsenv echo export ANDROID_HOME=$1 >> $bsenv echo 'export PATH=$PATH:${ANDROID_HOME}/tools:${ANDROID_HOME}/platform-tools:/opt/gradle/bin' >> $bsenv echo "export DEBIAN_FRONTEND=noninteractive" >> $bsenv -echo 'export home_vagrant=/home/vagrant' >> $bsenv -echo 'export fdroidserver=$home_vagrant/fdroidserver' >> $bsenv -echo 'export LC_ALL=C.UTF-8' >> $bsenv chmod 0644 $bsenv # make sure that SSH never hangs at a password or key prompt -mkdir -p /etc/ssh/ssh_config.d/ -cat << EOF >> /etc/ssh/ssh_config.d/fdroid -Host * - StrictHostKeyChecking yes - BatchMode yes -EOF +printf ' StrictHostKeyChecking yes' >> /etc/ssh/ssh_config +printf ' BatchMode yes' >> /etc/ssh/config diff --git a/completion/bash-completion b/completion/bash-completion index 57fcfd12..d00a767a 100644 --- a/completion/bash-completion +++ b/completion/bash-completion @@ -26,15 +26,16 @@ __fdroid_init() { (( $# >= 1 )) && __complete_${1} } -__get_appid() { - files=( metadata/*.yml ) +__by_ext() { + local ext="$1" + files=( metadata/*.$ext ) files=( ${files[@]#metadata/} ) - files=${files[@]%.yml} + files=${files[@]%.$ext} echo "$files" } __package() { - files="$(__get_appid)" + files="$(__by_ext txt) $(__by_ext yml) $(__by_ext json)" COMPREPLY=( $( compgen -W "$files" -- $cur ) ) } @@ -59,14 +60,20 @@ __apk_vercode() { } __vercode() { - if [ $prev = ":" ]; then - appid="${COMP_WORDS[COMP_CWORD-2]}" - elif [ $cur = ":" ]; then - appid=$prev - cur="" - fi - versionCodes=`sed -En 's,^ +versionCode: +([0-9]+) *$,\1,p' metadata/${appid}.yml` - COMPREPLY=( $( compgen -W "$versionCodes" -- $cur ) ) + local p v + echo $cur | IFS=':' read p v + + COMPREPLY=( $( compgen -P "${p}:" -W "$( while read line; do + if [[ "$line" == "Build Version:"* ]] + then + line="${line#*,}" + printf "${line%%,*} " + elif [[ "$line" == "Build:"* ]] + then + line="${line#*,}" + printf "${line%%,*} " + fi + done < "metadata/${p}.txt" )" -- $cur ) ) } __complete_options() { @@ -81,19 +88,30 @@ __complete_options() { } __complete_build() { - opts="-v -q -l -s -t -f -a" + opts="-v -q -l -s -t -f -a -w" - lopts="--verbose --quiet --latest --stop --test --server --skip-scan --scan-binary --no-tarball --force --all --no-refresh" - case "${prev}" in - :) - __vercode - return 0;; - esac + lopts="--verbose --quiet --latest --stop --test --server --reset-server --skip-scan --no-tarball --force --all --wiki --no-refresh" case "${cur}" in -*) __complete_options return 0;; - :) + *:*) + __vercode + return 0;; + *) + __package + return 0;; + esac +} + +__complete_dscanner() { + opts="-v -q -l" + lopts="--verbose --quiet --clean-after --clean-before --clean-only --init-only --latest --repo-path" + case "${cur}" in + -*) + __complete_options + return 0;; + *:) __vercode return 0;; *) @@ -109,8 +127,8 @@ __complete_gpgsign() { } __complete_install() { - opts="-v -q -a -p -n -y" - lopts="--verbose --quiet --all --color --no-color --privacy-mode --no-privacy-mode --no --yes" + opts="-v -q" + lopts="--verbose --quiet --all" case "${cur}" in -*) __complete_options @@ -125,9 +143,9 @@ __complete_install() { } __complete_update() { - opts="-c -v -q -i -I -e" - lopts="--create-metadata --verbose --quiet - --icons --pretty --clean --delete-unknown + opts="-c -v -q -b -i -I -e -w" + lopts="--create-metadata --verbose --quiet --buildreport + --icons --wiki --pretty --clean --delete-unknown --nosign --rename-apks --use-date-from-apk" case "${prev}" in -e|--editor) @@ -155,7 +173,7 @@ __complete_publish() { __complete_checkupdates() { opts="-v -q" - lopts="--verbose --quiet --auto --autoonly --commit --allow-dirty" + lopts="--verbose --quiet --auto --autoonly --commit --gplay --allow-dirty" case "${cur}" in -*) __complete_options @@ -199,8 +217,8 @@ __complete_rewritemeta() { } __complete_lint() { - opts="-v -q -f" - lopts="--verbose --quiet --force-yamllint --format" + opts="-v -q" + lopts="--verbose --quiet" case "${cur}" in -*) __complete_options @@ -251,7 +269,7 @@ __complete_btlog() { __complete_mirror() { opts="-v" - lopts="--all --archive --build-logs --color --no-color --pgp-signatures --src-tarballs --output-dir" + lopts="--archive --output-dir" __complete_options } @@ -261,6 +279,12 @@ __complete_nightly() { __complete_options } +__complete_stats() { + opts="-v -q -d" + lopts="--verbose --quiet --download" + __complete_options +} + __complete_deploy() { opts="-i -v -q" lopts="--identity-file --local-copy-dir --sync-from-local-copy-dir @@ -270,14 +294,12 @@ __complete_deploy() { __complete_signatures() { opts="-v -q" - lopts="--verbose --color --no-color --no-check-https" + lopts="--verbose --no-check-https" case "${cur}" in -*) __complete_options return 0;; esac - _filedir 'apk' - return 0 } __complete_signindex() { @@ -289,7 +311,7 @@ __complete_signindex() { __complete_init() { opts="-v -q -d" lopts="--verbose --quiet --distinguished-name --keystore - --repo-keyalias --android-home --no-prompt --color --no-color" + --repo-keyalias --android-home --no-prompt" __complete_options } @@ -298,6 +320,7 @@ btlog \ build \ checkupdates \ deploy \ +dscanner \ gpgsign \ import \ init \ @@ -311,6 +334,7 @@ rewritemeta \ scanner \ signatures \ signindex \ +stats \ update \ verify \ " diff --git a/docker/Dockerfile b/docker/Dockerfile new file mode 100644 index 00000000..9b7b4fb6 --- /dev/null +++ b/docker/Dockerfile @@ -0,0 +1,180 @@ +# This image is intended to be used with fdroidserver for the purpose +# of dynamic scanning of pre-built APKs during the fdroid build process. + +# Start with ubuntu 12.04 (i386). +FROM ubuntu:14.04 +MAINTAINER fdroid.dscanner + +ENV DROZER_URL https://github.com/mwrlabs/drozer/releases/download/2.3.4/drozer_2.3.4.deb +ENV DROZER_DEB drozer_2.3.4.deb + +ENV AGENT_URL https://github.com/mwrlabs/drozer/releases/download/2.3.4/drozer-agent-2.3.4.apk +ENV AGENT_APK drozer-agent-2.3.4.apk + +# Specially for SSH access and port redirection +ENV ROOTPASSWORD android + +# Expose ADB, ADB control and VNC ports +EXPOSE 22 +EXPOSE 5037 +EXPOSE 5554 +EXPOSE 5555 +EXPOSE 5900 +EXPOSE 5901 + +ENV DEBIAN_FRONTEND noninteractive +RUN echo "debconf shared/accepted-oracle-license-v1-1 select true" | debconf-set-selections +RUN echo "debconf shared/accepted-oracle-license-v1-1 seen true" | debconf-set-selections + +# Update packages +RUN apt-get -y update + +# Drozer packages +RUN apt-get install wget python2.7 python-dev python2.7-dev python-openssl python-twisted python-protobuf bash-completion -y + +# First, install add-apt-repository, sshd and bzip2 +RUN apt-get -y install python-software-properties bzip2 ssh net-tools + +# ubuntu 14.04 needs this too +RUN apt-get -y install software-properties-common + +# Add oracle-jdk7 to repositories +RUN add-apt-repository ppa:webupd8team/java + +# Make sure the package repository is up to date +RUN echo "deb http://archive.ubuntu.com/ubuntu trusty main universe" > /etc/apt/sources.list + +# Update apt +RUN apt-get update + +# Add drozer +RUN useradd -ms /bin/bash drozer + +# Install oracle-jdk7 +RUN apt-get -y install oracle-java7-installer + +# Install android sdk +RUN wget http://dl.google.com/android/android-sdk_r23-linux.tgz +RUN tar -xvzf android-sdk_r23-linux.tgz +RUN mv -v android-sdk-linux /usr/local/android-sdk + +# Install apache ant +RUN wget http://archive.apache.org/dist/ant/binaries/apache-ant-1.8.4-bin.tar.gz +RUN tar -xvzf apache-ant-1.8.4-bin.tar.gz +RUN mv -v apache-ant-1.8.4 /usr/local/apache-ant + +# Add android tools and platform tools to PATH +ENV ANDROID_HOME /usr/local/android-sdk +ENV PATH $PATH:$ANDROID_HOME/tools +ENV PATH $PATH:$ANDROID_HOME/platform-tools + +# Add ant to PATH +ENV ANT_HOME /usr/local/apache-ant +ENV PATH $PATH:$ANT_HOME/bin + +# Export JAVA_HOME variable +ENV JAVA_HOME /usr/lib/jvm/java-7-oracle + +# Remove compressed files. +RUN cd /; rm android-sdk_r23-linux.tgz && rm apache-ant-1.8.4-bin.tar.gz + +# Some preparation before update +RUN chown -R root:root /usr/local/android-sdk/ + +# Install latest android tools and system images +RUN echo "y" | android update sdk --filter platform-tool --no-ui --force +RUN echo "y" | android update sdk --filter platform --no-ui --force +RUN echo "y" | android update sdk --filter build-tools-22.0.1 --no-ui -a +RUN echo "y" | android update sdk --filter sys-img-x86-android-19 --no-ui -a +#RUN echo "y" | android update sdk --filter sys-img-x86-android-21 --no-ui -a +#RUN echo "y" | android update sdk --filter sys-img-x86-android-22 --no-ui -a +RUN echo "y" | android update sdk --filter sys-img-armeabi-v7a-android-19 --no-ui -a +#RUN echo "y" | android update sdk --filter sys-img-armeabi-v7a-android-21 --no-ui -a +#RUN echo "y" | android update sdk --filter sys-img-armeabi-v7a-android-22 --no-ui -a + +# Update ADB +RUN echo "y" | android update adb + +# Create fake keymap file +RUN mkdir /usr/local/android-sdk/tools/keymaps +RUN touch /usr/local/android-sdk/tools/keymaps/en-us + +# Run sshd +RUN apt-get install -y openssh-server +RUN mkdir /var/run/sshd +RUN echo "root:$ROOTPASSWORD" | chpasswd +RUN sed -i 's/PermitRootLogin without-password/PermitRootLogin yes/' /etc/ssh/sshd_config +RUN sed -i 's/PermitEmptyPasswords no/PermitEmptyPasswords yes/' /etc/ssh/sshd_config + +# SSH login fix. Otherwise user is kicked off after login +RUN sed 's@session\s*required\s*pam_loginuid.so@session optional pam_loginuid.so@g' -i /etc/pam.d/sshd + +ENV NOTVISIBLE "in users profile" +RUN echo "export VISIBLE=now" >> /etc/profile + +# Install socat +RUN apt-get install -y socat + +# symlink android bins +RUN ln -sv /usr/local/android-sdk/tools/android /usr/local/bin/ +RUN ln -sv /usr/local/android-sdk/tools/emulator /usr/local/bin/ +RUN ln -sv /usr/local/android-sdk/tools/ddms /usr/local/bin/ +RUN ln -sv /usr/local/android-sdk/tools/scheenshot2 /usr/local/bin/ +RUN ln -sv /usr/local/android-sdk/tools/monkeyrunner /usr/local/bin/ +RUN ln -sv /usr/local/android-sdk/tools/monitor /usr/local/bin/ +RUN ln -sv /usr/local/android-sdk/tools/mksdcard /usr/local/bin/ +RUN ln -sv /usr/local/android-sdk/tools/uiautomatorviewer /usr/local/bin/ +RUN ln -sv /usr/local/android-sdk/tools/traceview /usr/local/bin/ +RUN ln -sv /usr/local/android-sdk/platform-tools/adb /usr/local/bin/ +RUN ln -sv /usr/local/android-sdk/platform-tools/fastboot /usr/local/bin/ +RUN ln -sv /usr/local/android-sdk/platform-tools/sqlite3 /usr/local/bin/ + +# Setup DROZER... +# https://labs.mwrinfosecurity.com/tools/drozer/ + +# Run as drozer user +WORKDIR /home/drozer + +# Site lists the shasums, however, I'm not sure the best way to integrate the +# checks here. No real idiomatic way for Dockerfile to do that and most of +# the examples online use chained commands but we want things to *BREAK* when +# the sha doesn't match. So far, I can't seem to reliably make Docker not +# finish the image build process. + +# Download the console +RUN wget -c $DROZER_URL + +# Install the console +RUN dpkg -i $DROZER_DEB + +# Download agent +RUN wget -c $AGENT_URL +# Keep it version agnostic for other scripts such as install_drozer.py +RUN mv -v $AGENT_APK drozer-agent.apk + +# Port forwarding required by drozer +RUN echo 'adb forward tcp:31415 tcp:31415' >> /home/drozer/.bashrc + +# Alias for Drozer +RUN echo "alias drozer='drozer console connect'" >> /home/drozer/.bashrc + +# add extra scripting +COPY install_agent.py /home/drozer/install_agent.py +RUN chmod 755 /home/drozer/install_agent.py +COPY enable_service.py /home/drozer/enable_service.py +RUN chmod 755 /home/drozer/enable_service.py +COPY drozer.py /home/drozer/drozer.py +RUN chmod 755 /home/drozer/drozer.py + +# fix ownerships +RUN chown -R drozer.drozer /home/drozer + +RUN apt-get -y --force-yes install python-pkg-resources=3.3-1ubuntu1 +RUN apt-get -y install python-pip python-setuptools git +RUN pip install "git+https://github.com/dtmilano/AndroidViewClient.git#egg=androidviewclient" +RUN apt-get -y install python-pexpect + +# Add entrypoint +COPY entrypoint.sh /home/drozer/entrypoint.sh +RUN chmod +x /home/drozer/entrypoint.sh +ENTRYPOINT ["/home/drozer/entrypoint.sh"] diff --git a/docker/Makefile b/docker/Makefile new file mode 100644 index 00000000..eacb3268 --- /dev/null +++ b/docker/Makefile @@ -0,0 +1,48 @@ +SHELL := /bin/bash +ALIAS = "dscanner" +EXISTS := $(shell docker ps -a -q -f name=$(ALIAS)) +RUNNED := $(shell docker ps -q -f name=$(ALIAS)) +ifneq "$(RUNNED)" "" +IP := $(shell docker inspect $(ALIAS) | grep "IPAddress\"" | head -n1 | cut -d '"' -f 4) +endif +STALE_IMAGES := $(shell docker images | grep "" | awk '{print($$3)}') +EMULATOR ?= "android-19" +ARCH ?= "armeabi-v7a" + +COLON := : + +.PHONY = build clean kill info + +all: help + +help: + @echo "usage: make {help|build|clean|kill|info}" + @echo "" + @echo " help this help screen" + @echo " build create docker image" + @echo " clean remove images and containers" + @echo " kill stop running containers" + @echo " info details of running container" + +build: + @docker build -t "dscanner/fdroidserver:latest" . + +clean: kill + @docker ps -a -q | xargs -n 1 -I {} docker rm -f {} +ifneq "$(STALE_IMAGES)" "" + @docker rmi -f $(STALE_IMAGES) +endif + +kill: +ifneq "$(RUNNED)" "" + @docker kill $(ALIAS) +endif + +info: + @docker ps -a -f name=$(ALIAS) +ifneq "$(RUNNED)" "" + $(eval ADBPORT := $(shell docker port $(ALIAS) | grep '5555/tcp' | awk '{split($$3,a,"$(COLON)");print a[2]}')) + @echo -e "Use:\n adb kill-server\n adb connect $(IP):$(ADBPORT)" +else + @echo "Run container" +endif diff --git a/docker/README.md b/docker/README.md new file mode 100644 index 00000000..9f2d657c --- /dev/null +++ b/docker/README.md @@ -0,0 +1,13 @@ +# dscanner docker image # + +Use `make help` for up-to-date instructions. + +``` +usage: make {help|build|clean|kill|info} + + help this help screen + build create docker image + clean remove images and containers + kill stop running containers + info details of running container +``` diff --git a/docker/drozer.py b/docker/drozer.py new file mode 100644 index 00000000..d0546934 --- /dev/null +++ b/docker/drozer.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python2 + +import pexpect +import sys + +prompt = "dz>" +target = sys.argv[1] + +drozer = pexpect.spawn("drozer console connect") +drozer.logfile = open("/tmp/drozer_report.log", "w") + + +# start +drozer.expect(prompt) + + +def send_command(command, target): + cmd = "run {0} -a {1}".format(command, target) + drozer.sendline(cmd) + drozer.expect(prompt) + +scanners = [ + "scanner.misc.native", # Find native components included in packages + #"scanner.misc.readablefiles", # Find world-readable files in the given folder + #"scanner.misc.secretcodes", # Search for secret codes that can be used from the dialer + #"scanner.misc.sflagbinaries", # Find suid/sgid binaries in the given folder (default is /system). + #"scanner.misc.writablefiles", # Find world-writable files in the given folder + "scanner.provider.finduris", # Search for content providers that can be queried. + "scanner.provider.injection", # Test content providers for SQL injection vulnerabilities. + "scanner.provider.sqltables", # Find tables accessible through SQL injection vulnerabilities. + "scanner.provider.traversal" # Test content providers for basic directory traversal +] + +for scanner in scanners: + send_command(scanner, target) diff --git a/docker/enable_service.py b/docker/enable_service.py new file mode 100755 index 00000000..803532c9 --- /dev/null +++ b/docker/enable_service.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python2 + +from com.dtmilano.android.viewclient import ViewClient + +vc = ViewClient(*ViewClient.connectToDeviceOrExit()) + +button = vc.findViewWithText("OFF") + +if button: + (x, y) = button.getXY() + button.touch() +else: + print("Button not found. Is the app currently running?") + exit() + +print("Done!") diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh new file mode 100755 index 00000000..95b5ede1 --- /dev/null +++ b/docker/entrypoint.sh @@ -0,0 +1,42 @@ +#!/bin/bash + +if [[ $EMULATOR == "" ]]; then + EMULATOR="android-19" + echo "Using default emulator $EMULATOR" +fi + +if [[ $ARCH == "" ]]; then + ARCH="x86" + echo "Using default arch $ARCH" +fi +echo EMULATOR = "Requested API: ${EMULATOR} (${ARCH}) emulator." +if [[ -n $1 ]]; then + echo "Last line of file specified as non-opt/last argument:" + tail -1 $1 +fi + +# Run sshd +/usr/sbin/sshd +adb start-server + +# Detect ip and forward ADB ports outside to outside interface +ip=$(ifconfig | grep 'inet addr:'| grep -v '127.0.0.1' | cut -d: -f2 | awk '{ print $1}') +socat tcp-listen:5037,bind=$ip,fork tcp:127.0.0.1:5037 & +socat tcp-listen:5554,bind=$ip,fork tcp:127.0.0.1:5554 & +socat tcp-listen:5555,bind=$ip,fork tcp:127.0.0.1:5555 & + +# Set up and run emulator +if [[ $ARCH == *"x86"* ]] +then + EMU="x86" +else + EMU="arm" +fi + +#FASTDROID_VNC_URL="https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/fastdroid-vnc/fastdroid-vnc" +#wget -c "${FASTDROID_VNC_URL}" + +export PATH="${PATH}:/usr/local/android-sdk/tools/:/usr/local/android-sdk/platform-tools/" + +echo "no" | android create avd -f -n test -t ${EMULATOR} --abi default/${ARCH} +echo "no" | emulator64-${EMU} -avd test -noaudio -no-window -gpu off -verbose -qemu -usbdevice tablet -vnc :0 diff --git a/docker/install_agent.py b/docker/install_agent.py new file mode 100755 index 00000000..1a0f348a --- /dev/null +++ b/docker/install_agent.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python2 + +import os +from subprocess import call, check_output +from time import sleep + +FNULL = open(os.devnull, 'w') + +print("Ensuring device is online") +call("adb wait-for-device", shell=True) + +print("Installing the drozer agent") +print("If the device just came online it is likely the package manager hasn't booted.") +print("Will try multiple attempts to install.") +print("This may need tweaking depending on hardware.") + + +attempts = 0 +time_to_sleep = 30 + +while attempts < 8: + output = check_output('adb shell "pm list packages"', shell=True) + print("Checking whether the package manager is up...") + if "Could not access the Package Manager" in output: + print("Nope. Sleeping for 30 seconds and then trying again.") + sleep(time_to_sleep) + else: + break + +time_to_sleep = 5 +attempts = 0 + +while attempts < 5: + sleep(time_to_sleep) + try: + install_output = check_output("adb install /home/drozer/drozer-agent.apk", shell=True) + except Exception: + print("Failed. Trying again.") + attempts += 1 + else: + attempts += 1 + if "Error: Could not access the Package Manager" not in install_output: + break + +print("Install attempted. Checking everything worked") + +pm_list_output = check_output('adb shell "pm list packages"', shell=True) + +if "com.mwr.dz" not in pm_list_output: + print(install_output) + exit("APK didn't install properly. Exiting.") + +print("Installed ok.") + +print("Starting the drozer agent main activity: com.mwr.dz/.activities.MainActivity") +call('adb shell "am start com.mwr.dz/.activities.MainActivity"', shell=True, stdout=FNULL) + +print("Starting the service") +# start the service +call("python /home/drozer/enable_service.py", shell=True, stdout=FNULL) + +print("Forward dem ports mon.") +call("adb forward tcp:31415 tcp:31415", shell=True, stdout=FNULL) diff --git a/docs/Makefile b/docs/Makefile deleted file mode 100644 index d0c3cbf1..00000000 --- a/docs/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line, and also -# from the environment for the first two. -SPHINXOPTS ?= -SPHINXBUILD ?= sphinx-build -SOURCEDIR = source -BUILDDIR = build - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/make.bat b/docs/make.bat deleted file mode 100644 index 6247f7e2..00000000 --- a/docs/make.bat +++ /dev/null @@ -1,35 +0,0 @@ -@ECHO OFF - -pushd %~dp0 - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set SOURCEDIR=source -set BUILDDIR=build - -if "%1" == "" goto help - -%SPHINXBUILD% >NUL 2>NUL -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% -goto end - -:help -%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% - -:end -popd diff --git a/docs/source/conf.py b/docs/source/conf.py deleted file mode 100644 index c20542de..00000000 --- a/docs/source/conf.py +++ /dev/null @@ -1,78 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -import sys - -sys.path.insert(0, os.path.abspath('../../fdroidserver')) - -# -- Project information ----------------------------------------------------- - -project = 'fdroidserver' -copyright = '2021, The F-Droid Project' -author = 'The F-Droid Project' - - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'numpydoc', - 'sphinx.ext.autodoc', - 'sphinx.ext.autosummary', - "sphinx.ext.intersphinx", -] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = "pydata_sphinx_theme" - -html_theme_options = { - "gitlab_url": "https://gitlab.com/fdroid/fdroidserver", - "show_prev_next": False, - "navbar_end": ["search-field.html", "navbar-icon-links.html"], -} - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -html_sidebars = { - "**": [], -} - -#html_sidebars = { -# '**': ['globaltoc.html', 'sourcelink.html', 'searchbox.html'], -# 'using/windows': ['windowssidebar.html', 'searchbox.html'], -#} - -html_split_index = True -#numpydoc_validation_checks = {"all"} - -intersphinx_mapping = { - "python": ("https://docs.python.org/3/", None), -} diff --git a/docs/source/index.rst b/docs/source/index.rst deleted file mode 100644 index fcd4dfe3..00000000 --- a/docs/source/index.rst +++ /dev/null @@ -1,20 +0,0 @@ -.. fdroidserver documentation master file, created by - sphinx-quickstart on Mon May 3 10:06:52 2021. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Welcome to fdroidserver's documentation! -======================================== - -.. toctree:: - :maxdepth: 2 - :caption: Contents: - -These pages contain the autogenerated module docu based on the current `sources `_. - -Indices and tables -================== - - -* Under :ref:`modindex` the different fdroidserver modules are listed. -* In :ref:`genindex` you'll find all methods sorted alphabetically. diff --git a/examples/Vagrantfile.yaml b/examples/Vagrantfile.yaml deleted file mode 100644 index 276f0179..00000000 --- a/examples/Vagrantfile.yaml +++ /dev/null @@ -1,54 +0,0 @@ ---- - -# You may want to alter these before running ./makebuildserver - -# In the process of setting up the build server, many gigs of files -# are downloaded (Android SDK components, gradle, etc). These are -# cached so that they are not redownloaded each time. By default, -# these are stored in ~/.cache/fdroidserver -# -# cachedir: buildserver/cache - -# To specify which Debian mirror the build server VM should use, by -# default it uses http.debian.net, which auto-detects which is the -# best mirror to use. -# -# debian_mirror: https://debian.osuosl.org/debian/ - -# The amount of RAM the build server will have (default: 2048) -# memory: 3584 - -# The number of CPUs the build server will have -# cpus: 1 - -# Debian package proxy server - if you have one -# aptproxy: http://192.168.0.19:8000 - -# If this is running on an older machine or on a virtualized system, -# it can run a lot slower. If the provisioning fails with a warning -# about the timeout, extend the timeout here. (default: 600 seconds) -# -# boot_timeout: 1200 - -# By default, this whole process uses VirtualBox as the provider, but -# QEMU+KVM is also supported via the libvirt plugin to vagrant. If -# this is run within a KVM guest, then libvirt's QEMU+KVM will be used -# automatically. It can also be manually enabled by uncommenting -# below: -# -# vm_provider: libvirt - -# By default libvirt uses 'virtio' for both network and disk drivers. -# Some systems (eg. nesting VMware ESXi) do not support virtio. As a -# workaround for such rare cases, this setting allows to configure -# KVM/libvirt to emulate hardware rather than using virtio. -# -# libvirt_disk_bus: sata -# libvirt_nic_model_type: rtl8139 - -# Sometimes, it is not possible to use the 9p synced folder type with -# libvirt, like if running a KVM buildserver instance inside of a -# VMware ESXi guest. In that case, using NFS or another method is -# required. -# -# synced_folder_type: nfs diff --git a/examples/config.py b/examples/config.py new file mode 100644 index 00000000..5f0be273 --- /dev/null +++ b/examples/config.py @@ -0,0 +1,331 @@ +#!/usr/bin/env python3 + +# Copy this file to config.py, then amend the settings below according to +# your system configuration. + +# Custom path to the Android SDK, defaults to $ANDROID_HOME +# sdk_path = "$ANDROID_HOME" + +# Custom paths to various versions of the Android NDK, defaults to 'r12b' set +# to $ANDROID_NDK. Most users will have the latest at $ANDROID_NDK, which is +# used by default. If a version is missing or assigned to None, it is assumed +# not installed. +# ndk_paths = { +# 'r10e': None, +# 'r11c': None, +# 'r12b': "$ANDROID_NDK", +# 'r13b': None, +# 'r14b': None, +# 'r15c': None, +# 'r16b': None, +# 'r17b': None, +# 'r18b': None, +# 'r19': None, +# } + +# Directory to store downloaded tools in (i.e. gradle versions) +# By default, these are stored in ~/.cache/fdroidserver +# cachedir = cache + +# java_paths = { +# '8': "/usr/lib/jvm/java-8-openjdk", +# } + +# Build tools version to be used +# build_tools = "25.0.2" + +# Force all build to use the above version of build -tools, good for testing +# builds without having all of the possible build-tools installed. +# force_build_tools = True + +# Command or path to binary for running Ant +# ant = "ant" + +# Command or path to binary for running maven 3 +# mvn3 = "mvn" + +# Command or path to binary for running Gradle +# Defaults to using an internal gradle wrapper (gradlew-fdroid). +# gradle = "gradle" + +# Set the maximum age (in days) of an index that a client should accept from +# this repo. Setting it to 0 or not setting it at all disables this +# functionality. If you do set this to a non-zero value, you need to ensure +# that your index is updated much more frequently than the specified interval. +# The same policy is applied to the archive repo, if there is one. +# repo_maxage = 0 + +repo_url = "https://MyFirstFDroidRepo.org/fdroid/repo" +repo_name = "My First F-Droid Repo Demo" +repo_icon = "fdroid-icon.png" +repo_description = """ +This is a repository of apps to be used with F-Droid. Applications in this +repository are either official binaries built by the original application +developers, or are binaries built from source by the admin of f-droid.org +using the tools on https://gitlab.com/u/fdroid. +""" + +# As above, but for the archive repo. +# archive_older sets the number of versions kept in the main repo, with all +# older ones going to the archive. Set it to 0, and there will be no archive +# repository, and no need to define the other archive_ values. +archive_older = 3 +archive_url = "https://f-droid.org/archive" +archive_name = "My First F-Droid Archive Demo" +archive_icon = "fdroid-icon.png" +archive_description = """ +The repository of older versions of applications from the main demo repository. +""" + +# This allows a specific kind of insecure APK to be included in the +# 'repo' section. Since April 2017, APK signatures that use MD5 are +# no longer considered valid, jarsigner and apksigner will return an +# error when verifying. `fdroid update` will move APKs with these +# disabled signatures to the archive. This option stops that +# behavior, and lets those APKs stay part of 'repo'. +# +# allow_disabled_algorithms = True + +# Normally, all apps are collected into a single app repository, like on +# https://f-droid.org. For certain situations, it is better to make a repo +# that is made up of APKs only from a single app. For example, an automated +# build server that publishes nightly builds. +# per_app_repos = True + +# `fdroid update` will create a link to the current version of a given app. +# This provides a static path to the current APK. To disable the creation of +# this link, uncomment this: +# make_current_version_link = False + +# By default, the "current version" link will be based on the "Name" of the +# app from the metadata. You can change it to use a different field from the +# metadata here: +# current_version_name_source = 'packageName' + +# Optionally, override home directory for gpg +# gpghome = '/home/fdroid/somewhere/else/.gnupg' + +# The ID of a GPG key for making detached signatures for apks. Optional. +# gpgkey = '1DBA2E89' + +# The key (from the keystore defined below) to be used for signing the +# repository itself. This is the same name you would give to keytool or +# jarsigner using -alias. (Not needed in an unsigned repository). +# repo_keyalias = "fdroidrepo" + +# Optionally, the public key for the key defined by repo_keyalias above can +# be specified here. There is no need to do this, as the public key can and +# will be retrieved from the keystore when needed. However, specifying it +# manually can allow some processing to take place without access to the +# keystore. +# repo_pubkey = "..." + +# The keystore to use for release keys when building. This needs to be +# somewhere safe and secure, and backed up! The best way to manage these +# sensitive keys is to use a "smartcard" (aka Hardware Security Module). To +# configure F-Droid to use a smartcard, set the keystore file using the keyword +# "NONE" (i.e. keystore = "NONE"). That makes Java find the keystore on the +# smartcard based on 'smartcardoptions' below. +# keystore = "~/.local/share/fdroidserver/keystore.jks" + +# You should not need to change these at all, unless you have a very +# customized setup for using smartcards in Java with keytool/jarsigner +# smartcardoptions = "-storetype PKCS11 -providerName SunPKCS11-OpenSC \ +# -providerClass sun.security.pkcs11.SunPKCS11 \ +# -providerArg opensc-fdroid.cfg" + +# The password for the keystore (at least 6 characters). If this password is +# different than the keypass below, it can be OK to store the password in this +# file for real use. But in general, sensitive passwords should not be stored +# in text files! +# keystorepass = "password1" + +# The password for keys - the same is used for each auto-generated key as well +# as for the repository key. You should not normally store this password in a +# file since it is a sensitive password. +# keypass = "password2" + +# The distinguished name used for all keys. +# keydname = "CN=Birdman, OU=Cell, O=Alcatraz, L=Alcatraz, S=California, C=US" + +# Use this to override the auto-generated key aliases with specific ones +# for particular applications. Normally, just leave it empty. +# keyaliases = {} +# keyaliases['com.example.app'] = 'example' +# You can also force an app to use the same key alias as another one, using +# the @ prefix. +# keyaliases['com.example.another.plugin'] = '@com.example.another' + + +# The full path to the root of the repository. It must be specified in +# rsync/ssh format for a remote host/path. This is used for syncing a locally +# generated repo to the server that is it hosted on. It must end in the +# standard public repo name of "/fdroid", but can be in up to three levels of +# sub-directories (i.e. /var/www/packagerepos/fdroid). You can include +# multiple servers to sync to by wrapping the whole thing in {} or [], and +# including the serverwebroot strings in a comma-separated list. +# +# serverwebroot = 'user@example:/var/www/fdroid' +# serverwebroot = { +# 'foo.com:/usr/share/nginx/www/fdroid', +# 'bar.info:/var/www/fdroid', +# } + +# Uncomment this option if you want to logs of builds and other processes to +# your repository server(s). Logs get published to all servers configured in +# 'serverwebroot'. For builds, only logs from build-jobs running inside a +# buildserver VM are supported. +# +# deploy_process_logs = True + +# The full URL to a git remote repository. You can include +# multiple servers to mirror to by wrapping the whole thing in {} or [], and +# including the servergitmirrors strings in a comma-separated list. +# Servers listed here will also be automatically inserted in the mirrors list. +# +# servergitmirrors = 'https://github.com/user/repo' +# servergitmirrors = { +# 'https://github.com/user/repo', +# 'https://gitlab.com/user/repo', +# } + +# Any mirrors of this repo, for example all of the servers declared in +# serverwebroot and all the servers declared in servergitmirrors, +# will automatically be used by the client. If one +# mirror is not working, then the client will try another. If the +# client has Tor enabled, then the client will prefer mirrors with +# .onion addresses. This base URL will be used for both the main repo +# and the archive, if it is enabled. So these URLs should end in the +# 'fdroid' base of the F-Droid part of the web server like serverwebroot. +# +# mirrors = ( +# 'https://foo.bar/fdroid', +# 'http://foobarfoobarfoobar.onion/fdroid', +# ) + +# optionally specify which identity file to use when using rsync or git over SSH +# +# identity_file = '~/.ssh/fdroid_id_rsa' + + +# If you are running the repo signing process on a completely offline machine, +# which provides the best security, then you can specify a folder to sync the +# repo to when running `fdroid server update`. This is most likely going to +# be a USB thumb drive, SD Card, or some other kind of removable media. Make +# sure it is mounted before running `fdroid server update`. Using the +# standard folder called 'fdroid' as the specified folder is recommended, like +# with serverwebroot. +# +# local_copy_dir = '/media/MyUSBThumbDrive/fdroid' + + +# If you are using local_copy_dir on an offline build/signing server, once the +# thumb drive has been plugged into the online machine, it will need to be +# synced to the copy on the online machine. To make that happen +# automatically, set sync_from_local_copy_dir to True: +# +# sync_from_local_copy_dir = True + + +# To upload the repo to an Amazon S3 bucket using `fdroid server +# update`. Warning, this deletes and recreates the whole fdroid/ +# directory each time. This prefers s3cmd, but can also use +# apache-libcloud. To customize how s3cmd interacts with the cloud +# provider, create a 's3cfg' file next to this file (config.py), and +# those settings will be used instead of any 'aws' variable below. +# +# awsbucket = 'myawsfdroid' +# awsaccesskeyid = 'SEE0CHAITHEIMAUR2USA' +# awssecretkey = 'yourverysecretkeywordpassphraserighthere' + + +# If you want to force 'fdroid server' to use a non-standard serverwebroot. +# This will allow you to have 'serverwebroot' entries which do not end in +# '/fdroid'. (Please note that some client features expect repository URLs +# to end in '/fdroid/repo'.) +# +# nonstandardwebroot = False + + +# If you want to upload the release apk file to androidobservatory.org +# +# androidobservatory = False + + +# If you want to upload the release apk file to virustotal.com +# You have to enter your profile apikey to enable the upload. +# +# virustotal_apikey = "virustotal_apikey" + + +# The build logs can be posted to a mediawiki instance, like on f-droid.org. +# wiki_protocol = "http" +# wiki_server = "server" +# wiki_path = "/wiki/" +# wiki_user = "login" +# wiki_password = "1234" + +# Keep a log of all generated index files in a git repo to provide a +# "binary transparency" log for anyone to check the history of the +# binaries that are published. This is in the form of a "git remote", +# which this machine where `fdroid update` is run has already been +# configured to allow push access (e.g. ssh key, username/password, etc) +# binary_transparency_remote = "git@gitlab.com:fdroid/binary-transparency-log.git" + +# Only set this to true when running a repository where you want to generate +# stats, and only then on the master build servers, not a development +# machine. If you want to keep the "added" and "last updated" dates for each +# app and APK in your repo, then you should enable this. +# update_stats = True + +# When used with stats, this is a list of IP addresses that are ignored for +# calculation purposes. +# stats_ignore = [] + +# Server stats logs are retrieved from. Required when update_stats is True. +# stats_server = "example.com" + +# User stats logs are retrieved from. Required when update_stats is True. +# stats_user = "bob" + +# Use the following to push stats to a Carbon instance: +# stats_to_carbon = False +# carbon_host = '0.0.0.0' +# carbon_port = 2003 + +# Set this to true to always use a build server. This saves specifying the +# --server option on dedicated secure build server hosts. +# build_server_always = True + +# By default, fdroid will use YAML .yml and the custom .txt metadata formats. It +# is also possible to have metadata in JSON by adding 'json'. +# accepted_formats = ('txt', 'yml') + +# Limit in number of characters that fields can take up +# Only the fields listed here are supported, defaults shown +# char_limits = { +# 'author': 256, +# 'name': 30, +# 'summary': 80, +# 'description': 4000, +# 'video': 256, +# 'whatsNew': 500, +# } + +# It is possible for the server operator to specify lists of apps that +# must be installed or uninstalled on the client (aka "push installs). +# If the user has opted in, or the device is already setup to respond +# to these requests, then F-Droid will automatically install/uninstall +# the packageNames listed. This is protected by the same signing key +# as the app index metadata. +# +# install_list = ( +# 'at.bitfire.davdroid', +# 'com.fsck.k9', +# 'us.replicant', +# ) +# +# uninstall_list = ( +# 'com.facebook.orca', +# 'com.android.vending', +# ) diff --git a/examples/config.yml b/examples/config.yml deleted file mode 100644 index ae4e7008..00000000 --- a/examples/config.yml +++ /dev/null @@ -1,433 +0,0 @@ ---- -# Copy this file to config.yml, then amend the settings below according to -# your system configuration. - -# Custom path to the Android SDK, defaults to $ANDROID_HOME -# sdk_path: $ANDROID_HOME - -# Paths to installed versions of the Android NDK. This will be -# automatically filled out from well known sources like -# $ANDROID_HOME/ndk-bundle and $ANDROID_HOME/ndk/*. If a required -# version is missing in the buildserver VM, it will be automatically -# downloaded and installed into the standard $ANDROID_HOME/ndk/ -# directory. Manually setting it here will override the auto-detected -# values. The keys can either be the "release" (e.g. r21e) or the -# "revision" (e.g. 21.4.7075529). -# -# ndk_paths: -# r10e: $ANDROID_HOME/android-ndk-r10e -# r17: "" -# 21.4.7075529: ~/Android/Ndk -# r22b: null - -# Directory to store downloaded tools in (i.e. gradle versions) -# By default, these are stored in ~/.cache/fdroidserver -# cachedir: cache - -# Specify paths to each major Java release that you want to support -# java_paths: -# 8: /usr/lib/jvm/java-8-openjdk - -# Command or path to binary for running Ant -# ant: ant - -# Command or path to binary for running maven 3 -# mvn3: mvn - -# Command or path to binary for running Gradle -# Defaults to using an internal gradle wrapper (gradlew-fdroid). -# gradle: gradle - -# Always scan the APKs produced by `fdroid build` for known non-free classes -# scan_binary: true - -# Set the maximum age (in days) of an index that a client should accept from -# this repo. Setting it to 0 or not setting it at all disables this -# functionality. If you do set this to a non-zero value, you need to ensure -# that your index is updated much more frequently than the specified interval. -# The same policy is applied to the archive repo, if there is one. -# repo_maxage: 0 - -# Canonical URL of the repositoy, needs to end in /repo. Is is used to identity -# the repo in the client, as well. -# repo_url: https://MyFirstFDroidRepo.org/fdroid/repo -# -# Base URL for per-package pages on the website of this repo, -# i.e. https://f-droid.org/packages// This should be accessible -# with a browser. Setting it to null or not setting this disables the -# feature. -# repo_web_base_url: https://MyFirstFDroidRepo.org/packages/ -# -# repo_name: My First F-Droid Repo Demo -# repo_description: >- -# This is a repository of apps to be used with F-Droid. Applications -# in this repository are either official binaries built by the -# original application developers, or are binaries built from source -# by the admin of f-droid.org using the tools on -# https://gitlab.com/fdroid. - -# As above, but for the archive repo. -# -# archive_url: https://f-droid.org/archive -# archive_web_base_url: -# archive_name: My First F-Droid Archive Demo -# archive_description: >- -# The repository of older versions of packages from the main demo repository. - -# archive_older sets the number of versions kept in the main repo, with all -# older ones going to the archive. Set it to 0, and there will be no archive -# repository, and no need to define the other archive_ values. -# -# archive_older: 3 - -# The repo's icon defaults to a file called 'icon.png' in the 'icons' -# folder for each section, e.g. repo/icons/icon.png and -# archive/icons/icon.png. To use a different filename for the icons, -# set the filename here. You must still copy it into place in -# repo/icons/ and/or archive/icons/. -# -# repo_icon: myicon.png -# archive_icon: myicon.png - -# This allows a specific kind of insecure APK to be included in the -# 'repo' section. Since April 2017, APK signatures that use MD5 are -# no longer considered valid, jarsigner and apksigner will return an -# error when verifying. `fdroid update` will move APKs with these -# disabled signatures to the archive. This option stops that -# behavior, and lets those APKs stay part of 'repo'. -# -# allow_disabled_algorithms: true - -# Normally, all apps are collected into a single app repository, like on -# https://f-droid.org. For certain situations, it is better to make a repo -# that is made up of APKs only from a single app. For example, an automated -# build server that publishes nightly builds. -# per_app_repos: true - -# `fdroid update` will create a link to the current version of a given app. -# This provides a static path to the current APK. To disable the creation of -# this link, uncomment this: -# make_current_version_link: false - -# By default, the "current version" link will be based on the "Name" of the -# app from the metadata. You can change it to use a different field from the -# metadata here: -# current_version_name_source: packageName - -# Optionally, override home directory for gpg -# gpghome: /home/fdroid/somewhere/else/.gnupg - -# The ID of a GPG key for making detached signatures for APKs. Optional. -# gpgkey: 1DBA2E89 - -# The key (from the keystore defined below) to be used for signing the -# repository itself. This is the same name you would give to keytool or -# jarsigner using -alias. (Not needed in an unsigned repository). -# repo_keyalias: fdroidrepo - -# Optionally, the public key for the key defined by repo_keyalias above can -# be specified here. There is no need to do this, as the public key can and -# will be retrieved from the keystore when needed. However, specifying it -# manually can allow some processing to take place without access to the -# keystore. -# repo_pubkey: ... - -# The keystore to use for release keys when building. This needs to be -# somewhere safe and secure, and backed up! The best way to manage these -# sensitive keys is to use a "smartcard" (aka Hardware Security Module). To -# configure F-Droid to use a smartcard, set the keystore file using the keyword -# "NONE" (i.e. keystore: "NONE"). That makes Java find the keystore on the -# smartcard based on 'smartcardoptions' below. -# keystore: ~/.local/share/fdroidserver/keystore.jks - -# You should not need to change these at all, unless you have a very -# customized setup for using smartcards in Java with keytool/jarsigner -# smartcardoptions: | -# -storetype PKCS11 -providerName SunPKCS11-OpenSC -# -providerClass sun.security.pkcs11.SunPKCS11 -# -providerArg opensc-fdroid.cfg - -# The password for the keystore (at least 6 characters). If this password is -# different than the keypass below, it can be OK to store the password in this -# file for real use. But in general, sensitive passwords should not be stored -# in text files! -# keystorepass: password1 - -# The password for keys - the same is used for each auto-generated key as well -# as for the repository key. You should not normally store this password in a -# file since it is a sensitive password. -# keypass: password2 - -# The distinguished name used for all keys. -# keydname: CN=Birdman, OU=Cell, O=Alcatraz, L=Alcatraz, S=California, C=US - -# Use this to override the auto-generated key aliases with specific ones -# for particular applications. Normally, just leave it empty. -# -# keyaliases: -# com.example.app: example -# -# You can also force an app to use the same key alias as another one, using -# the @ prefix. -# -# keyaliases: -# com.example.another.plugin: "@com.example.another" - - -# The full path to the root of the repository. It must be specified in -# rsync/ssh format for a remote host/path. This is used for syncing a locally -# generated repo to the server that is it hosted on. It must end in the -# standard public repo name of "/fdroid", but can be in up to three levels of -# sub-directories (i.e. /var/www/packagerepos/fdroid). You can include -# multiple servers to sync to by wrapping the whole thing in {} or [], and -# including the serverwebroot strings in a comma-separated list. -# -# serverwebroot: user@example:/var/www/fdroid -# serverwebroot: -# - foo.com:/usr/share/nginx/www/fdroid -# - bar.info:/var/www/fdroid -# -# There is a special mode to only deploy the index file: -# -# serverwebroot: -# - url: 'me@b.az:/srv/fdroid' -# index_only: true - - -# When running fdroid processes on a remote server, it is possible to -# publish extra information about the status. Each fdroid sub-command -# can create repo/status/running.json when it starts, then a -# repo/status/.json when it completes. The builds logs -# and other processes will also get published, if they are running in -# a buildserver VM. The build logs name scheme is: -# .../repo/$APPID_$VERCODE.log.gz. These files are also pushed to all -# servers configured in 'serverwebroot'. -# -# deploy_process_logs: true - -# The full URL to a git remote repository. You can include -# multiple servers to mirror to by adding strings to a YAML list or map. -# Servers listed here will also be automatically inserted in the mirrors list. -# -# servergitmirrors: https://github.com/user/repo -# servergitmirrors: -# - https://github.com/user/repo -# - https://gitlab.com/user/repo -# -# servergitmirrors: -# - url: https://github.com/user/repo -# - url: https://gitlab.com/user/repo -# index_only: true - - -# These settings allow using `fdroid deploy` for publishing APK files from -# your repository to GitHub Releases. (You should also run `fdroid update` -# every time before deploying to GitHub releases to update index files.) Here's -# an example for this deployment automation: -# https://github.com/f-droid/fdroidclient/releases/ -# -# Currently, versions which are assigned to a release channel (e.g. alpha or -# beta releases) are ignored. -# -# In the example below, tokens are read from environment variables. Putting -# tokens directly into the config file is also supported but discouraged. It is -# highly recommended to use a "Fine-grained personal access token", which is -# restricted to the minimum required permissions, which are: -# * Metadata - read -# * Contents - read/write -# (https://github.com/settings/personal-access-tokens/new) -# -# github_token: {env: GITHUB_TOKEN} -# github_releases: -# - projectUrl: https://github.com/f-droid/fdroidclient -# packageNames: -# - org.fdroid.basic -# - org.fdroid.fdroid -# release_notes_prepend: | -# Re-post of official F-Droid App release from https://f-droid.org -# - projectUrl: https://github.com/example/app -# packageNames: com.example.app -# token: {env: GITHUB_TOKEN_EXAMPLE} - - -# Most git hosting services have hard size limits for each git repo. -# `fdroid deploy` will delete the git history when the git mirror repo -# approaches this limit to ensure that the repo will still fit when -# pushed. GitHub recommends 1GB, gitlab.com recommends 10GB. -# -# git_mirror_size_limit: 10GB - -# Any mirrors of this repo, for example all of the servers declared in -# serverwebroot and all the servers declared in servergitmirrors, -# will automatically be used by the client. If one -# mirror is not working, then the client will try another. If the -# client has Tor enabled, then the client will prefer mirrors with -# .onion addresses. This base URL will be used for both the main repo -# and the archive, if it is enabled. So these URLs should end in the -# 'fdroid' base of the F-Droid part of the web server like serverwebroot. -# -# mirrors: -# - https://foo.bar/fdroid -# - http://foobarfoobarfoobar.onion/fdroid -# -# Or additional metadata can also be included by adding key/value pairs: -# -# mirrors: -# - url: https://foo.bar/fdroid -# countryCode: BA -# - url: http://foobarfoobarfoobar.onion/fdroid -# -# The list of mirrors can also be maintained in config/mirrors.yml, a -# standalone YAML file in the optional configuration directory. In -# that case, mirrors: should be removed from this file (config.yml). - - -# optionally specify which identity file to use when using rsync or git over SSH -# -# identity_file: ~/.ssh/fdroid_id_rsa - - -# If you are running the repo signing process on a completely offline machine, -# which provides the best security, then you can specify a folder to sync the -# repo to when running `fdroid deploy`. This is most likely going to -# be a USB thumb drive, SD Card, or some other kind of removable media. Make -# sure it is mounted before running `fdroid deploy`. Using the -# standard folder called 'fdroid' as the specified folder is recommended, like -# with serverwebroot. -# -# local_copy_dir: /media/MyUSBThumbDrive/fdroid - - -# If you are using local_copy_dir on an offline build/signing server, once the -# thumb drive has been plugged into the online machine, it will need to be -# synced to the copy on the online machine. To make that happen -# automatically, set sync_from_local_copy_dir to True: -# -# sync_from_local_copy_dir: true - -# To deploy to an AWS S3 "bucket" in the US East region, set the -# bucket name in the config, then set the environment variables -# AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY using the values from -# the AWS Management Console. See -# https://rclone.org/s3/#authentication -# -# awsbucket: myawsfdroidbucket - - -# For extended options for syncing to cloud drive and object store -# services, `fdroid deploy' wraps Rclone. Rclone is a full featured -# sync tool for a huge variety of cloud services. Set up your services -# using `rclone config`, then specify each config name to deploy the -# awsbucket: to. Using rclone_config: overrides the default AWS S3 US -# East setup, and will only sync to the services actually specified. -# -# awsbucket: myawsfdroidbucket -# rclone_config: -# - aws-sample-config -# - rclone-supported-service-config - - -# By default Rclone uses the user's default configuration file at -# ~/.config/rclone/rclone.conf To specify a custom configuration file, -# please add the full path to the configuration file as below. -# -# path_to_custom_rclone_config: /home/mycomputer/somedir/example.conf - - -# If you want to force 'fdroid server' to use a non-standard serverwebroot. -# This will allow you to have 'serverwebroot' entries which do not end in -# '/fdroid'. (Please note that some client features expect repository URLs -# to end in '/fdroid/repo'.) -# -# nonstandardwebroot: false - - -# If you want to upload the release APK file to androidobservatory.org -# -# androidobservatory: false - - -# If you want to upload the release APK file to virustotal.com -# You have to enter your profile apikey to enable the upload. -# -# virustotal_apikey: 9872987234982734 -# -# Or get it from an environment variable: -# -# virustotal_apikey: {env: virustotal_apikey} - - -# Keep a log of all generated index files in a git repo to provide a -# "binary transparency" log for anyone to check the history of the -# binaries that are published. This is in the form of a "git remote", -# which this machine where `fdroid update` is run has already been -# configured to allow push access (e.g. ssh key, username/password, etc) -# binary_transparency_remote: git@gitlab.com:fdroid/binary-transparency-log.git - -# Set this to true to always use a build server. This saves specifying the -# --server option on dedicated secure build server hosts. -# build_server_always: true - -# Limit in number of characters that fields can take up -# Only the fields listed here are supported, defaults shown -# char_limits: -# author: 256 -# name: 50 -# summary: 80 -# description: 4000 -# video: 256 -# whatsNew: 500 - -# It is possible for the server operator to specify lists of apps that -# must be installed or uninstalled on the client (aka "push installs). -# If the user has opted in, or the device is already setup to respond -# to these requests, then F-Droid will automatically install/uninstall -# the packageNames listed. This is protected by the same signing key -# as the app index metadata. -# -# install_list: -# - at.bitfire.davdroid -# - com.fsck.k9 -# - us.replicant -# -# uninstall_list: -# - com.facebook.orca -# - com.android.vending - -# `fdroid lint` checks licenses in metadata against a built white list. By -# default we will require license metadata to be present and only allow -# licenses approved either by FSF or OSI. We're using the standardized SPDX -# license IDs. (https://spdx.org/licenses/) -# -# We use `python3 -m spdx-license-list print --filter-fsf-or-osi` for -# generating our default list. (https://pypi.org/project/spdx-license-list) -# -# You can override our default list of allowed licenes by setting this option. -# Just supply a custom list of licene names you would like to allow. To disable -# checking licenses by the linter, assign an empty value to lint_licenses. -# -# lint_licenses: -# - Custom-License-A -# - Another-License - -# `fdroid scanner` can scan for signatures from various sources. By default -# it's configured to only use F-Droids official SUSS collection. We have -# support for these special collections: -# * 'exodus' - official exodus-privacy.org signatures -# * 'etip' - exodus privacy investigation platfrom community contributed -# signatures -# * 'suss' - official F-Droid: Suspicious or Unwanted Software Signatures -# You can also configure scanner to use custom collections of signatures here. -# They have to follow the format specified in the SUSS readme. -# (https://gitlab.com/fdroid/fdroid-suss/#cache-file-data-format) -# -# scanner_signature_sources: -# - suss -# - exodus -# - https://example.com/signatures.json - -# The scanner can use signature sources from the internet. These are -# cached locally. To force them to be refreshed from the network on -# every run, set this to true: -# -# refresh_scanner: true diff --git a/examples/fdroid-icon.png b/examples/fdroid-icon.png new file mode 100644 index 00000000..0c0d4173 Binary files /dev/null and b/examples/fdroid-icon.png differ diff --git a/examples/fdroid_clean_repos.py b/examples/fdroid_clean_repos.py deleted file mode 100644 index 6b19cacc..00000000 --- a/examples/fdroid_clean_repos.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python3 -# -# an fdroid plugin for resetting app VCSs to the latest version for the metadata - -import argparse -import logging - -from fdroidserver import _, common, metadata -from fdroidserver.exception import VCSException - -fdroid_summary = 'reset app VCSs to the latest version' - - -def main(): - parser = argparse.ArgumentParser( - usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]" - ) - common.setup_global_opts(parser) - parser.add_argument( - "appid", - nargs='*', - help=_("applicationId with optional versionCode in the form APPID[:VERCODE]"), - ) - metadata.add_metadata_arguments(parser) - options = common.parse_args(parser) - apps = common.read_app_args( - options.appid, allow_version_codes=True, sort_by_time=True - ) - common.read_config() - - for appid, app in apps.items(): - if "Builds" in app and len(app["Builds"]) > 0: - build = app.get('Builds')[-1] - logging.info(_("Cleaning up '{appid}' VCS").format(appid=appid)) - try: - vcs, build_dir = common.setup_vcs(app) - vcs.gotorevision(build.commit) - if build.submodules: - vcs.initsubmodules() - - except VCSException: - pass - - -if __name__ == "__main__": - main() diff --git a/examples/fdroid_export_keystore_to_nitrokey.py b/examples/fdroid_export_keystore_to_nitrokey.py deleted file mode 100644 index 6e920a78..00000000 --- a/examples/fdroid_export_keystore_to_nitrokey.py +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env python3 -# -# an fdroid plugin for exporting a repo's keystore in standard PEM format - -import os -from argparse import ArgumentParser - -from fdroidserver import common -from fdroidserver.common import FDroidPopen -from fdroidserver.exception import BuildException - -fdroid_summary = "export the repo's keystore file to a NitroKey HSM" - - -def run(cmd, error): - envs = {'LC_ALL': 'C.UTF-8', - 'PIN': config['smartcard_pin'], - 'FDROID_KEY_STORE_PASS': config['keystorepass'], - 'FDROID_KEY_PASS': config['keypass']} - p = FDroidPopen(cmd, envs=envs) - if p.returncode != 0: - raise BuildException(error, p.output) - - -def main(): - global config - parser = ArgumentParser() - common.setup_global_opts(parser) - common.parse_args(parser) - config = common.read_config() - destkeystore = config['keystore'].replace('.jks', '.p12').replace('/', '_') - exportkeystore = config['keystore'].replace('.jks', '.pem').replace('/', '_') - if os.path.exists(destkeystore) or os.path.exists(exportkeystore): - raise BuildException('%s exists!' % exportkeystore) - run([config['keytool'], '-importkeystore', - '-srckeystore', config['keystore'], - '-srcalias', config['repo_keyalias'], - '-srcstorepass:env', 'FDROID_KEY_STORE_PASS', - '-srckeypass:env', 'FDROID_KEY_PASS', - '-destkeystore', destkeystore, - '-deststorepass:env', 'FDROID_KEY_STORE_PASS', - '-deststoretype', 'PKCS12'], - 'Failed to convert to PKCS12!') -# run(['openssl', 'pkcs12', '-in', destkeystore, -# '-passin', 'env:FDROID_KEY_STORE_PASS', '-nokeys', -# '-out', exportkeystore, -# '-passout', 'env:FDROID_KEY_STORE_PASS'], -# 'Failed to convert to PEM!') - run(['pkcs15-init', '--delete-objects', 'privkey,pubkey', - '--id', '3', '--store-private-key', destkeystore, - '--format', 'pkcs12', '--auth-id', '3', - '--verify-pin', '--pin', 'env:PIN'], - '') - run(['pkcs15-init', '--delete-objects', 'privkey,pubkey', - '--id', '2', '--store-private-key', destkeystore, - '--format', 'pkcs12', '--auth-id', '3', - '--verify-pin', '--pin', 'env:PIN'], - '') - - -if __name__ == "__main__": - main() diff --git a/examples/fdroid_exportkeystore.py b/examples/fdroid_exportkeystore.py deleted file mode 100644 index f2a16980..00000000 --- a/examples/fdroid_exportkeystore.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python3 -# -# an fdroid plugin for exporting a repo's keystore in standard PEM format - -import os -from argparse import ArgumentParser - -from fdroidserver import common -from fdroidserver.common import FDroidPopen -from fdroidserver.exception import BuildException - -fdroid_summary = 'export the keystore in standard PEM format' - - -def main(): - parser = ArgumentParser() - common.setup_global_opts(parser) - common.parse_args(parser) - config = common.read_config() - env_vars = {'LC_ALL': 'C.UTF-8', - 'FDROID_KEY_STORE_PASS': config['keystorepass'], - 'FDROID_KEY_PASS': config['keypass']} - destkeystore = config['keystore'].replace('.jks', '.p12').replace('/', '_') - exportkeystore = config['keystore'].replace('.jks', '.pem').replace('/', '_') - if os.path.exists(destkeystore) or os.path.exists(exportkeystore): - raise BuildException('%s exists!' % exportkeystore) - p = FDroidPopen([config['keytool'], '-importkeystore', - '-srckeystore', config['keystore'], - '-srcalias', config['repo_keyalias'], - '-srcstorepass:env', 'FDROID_KEY_STORE_PASS', - '-srckeypass:env', 'FDROID_KEY_PASS', - '-destkeystore', destkeystore, - '-deststoretype', 'PKCS12', - '-deststorepass:env', 'FDROID_KEY_STORE_PASS', - '-destkeypass:env', 'FDROID_KEY_PASS'], - envs=env_vars) - if p.returncode != 0: - raise BuildException("Failed to convert to PKCS12!", p.output) - p = FDroidPopen(['openssl', 'pkcs12', '-in', destkeystore, - '-passin', 'env:FDROID_KEY_STORE_PASS', '-nokeys', - '-out', exportkeystore, - '-passout', 'env:FDROID_KEY_STORE_PASS'], - envs=env_vars) - if p.returncode != 0: - raise BuildException("Failed to convert to PEM!", p.output) - - -if __name__ == "__main__": - main() diff --git a/examples/fdroid_extract_repo_pubkey.py b/examples/fdroid_extract_repo_pubkey.py deleted file mode 100644 index cb5a895c..00000000 --- a/examples/fdroid_extract_repo_pubkey.py +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env python3 -# -# an fdroid plugin print the repo_pubkey from a repo's keystore -# - -from argparse import ArgumentParser - -from fdroidserver import common, index - -fdroid_summary = 'export the keystore in standard PEM format' - - -def main(): - parser = ArgumentParser() - common.setup_global_opts(parser) - common.parse_args(parser) - common.read_config() - pubkey, repo_pubkey_fingerprint = index.extract_pubkey() - print('repo_pubkey = "%s"' % pubkey.decode()) - - -if __name__ == "__main__": - main() diff --git a/examples/fdroid_fetchsrclibs.py b/examples/fdroid_fetchsrclibs.py deleted file mode 100644 index aba6f7fa..00000000 --- a/examples/fdroid_fetchsrclibs.py +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env python3 -# -# an fdroid plugin for setting up srclibs -# -# The 'fdroid build' gitlab-ci job uses --on-server, which does not -# set up the srclibs. This plugin does the missing setup. - -import argparse -import os -import pprint - -from fdroidserver import _, common, metadata - -fdroid_summary = 'prepare the srclibs for `fdroid build --on-server`' - - -def main(): - parser = argparse.ArgumentParser(usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]") - common.setup_global_opts(parser) - parser.add_argument("appid", nargs='*', help=_("applicationId with optional versionCode in the form APPID[:VERCODE]")) - metadata.add_metadata_arguments(parser) - options = common.parse_args(parser) - apps = common.read_app_args(options.appid, allow_version_codes=True, sort_by_time=True) - common.read_config() - srclib_dir = os.path.join('build', 'srclib') - os.makedirs(srclib_dir, exist_ok=True) - srclibpaths = [] - for appid, app in apps.items(): - vcs, _ignored = common.setup_vcs(app) - for build in app.get('Builds', []): - vcs.gotorevision(build.commit, refresh=False) - if build.submodules: - vcs.initsubmodules() - else: - vcs.deinitsubmodules() - for lib in build.srclibs: - srclibpaths.append(common.getsrclib(lib, srclib_dir, prepare=False, build=build)) - print('Set up srclibs:') - pprint.pprint(srclibpaths) - - -if __name__ == "__main__": - main() diff --git a/examples/fdroid_nitrokeyimport.py b/examples/fdroid_nitrokeyimport.py deleted file mode 100644 index d17a6186..00000000 --- a/examples/fdroid_nitrokeyimport.py +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env python3 - -from argparse import ArgumentParser - -from fdroidserver import common -from fdroidserver.common import FDroidPopen -from fdroidserver.exception import BuildException - -fdroid_summary = 'import the local keystore into a SmartCard HSM' - - -def main(): - parser = ArgumentParser() - common.setup_global_opts(parser) - common.parse_args(parser) - config = common.read_config() - env_vars = { - 'LC_ALL': 'C.UTF-8', - 'FDROID_KEY_STORE_PASS': config['keystorepass'], - 'FDROID_KEY_PASS': config['keypass'], - 'SMARTCARD_PIN': str(config['smartcard_pin']), - } - p = FDroidPopen([config['keytool'], '-importkeystore', - '-srcalias', config['repo_keyalias'], - '-srckeystore', config['keystore'], - '-srcstorepass:env', 'FDROID_KEY_STORE_PASS', - '-srckeypass:env', 'FDROID_KEY_PASS', - '-destalias', config['repo_keyalias'], - '-destkeystore', 'NONE', - '-deststoretype', 'PKCS11', - '-providerName', 'SunPKCS11-OpenSC', - '-providerClass', 'sun.security.pkcs11.SunPKCS11', - '-providerArg', 'opensc-fdroid.cfg', - '-deststorepass:env', 'SMARTCARD_PIN', - '-J-Djava.security.debug=sunpkcs11'], - envs=env_vars) - if p.returncode != 0: - raise BuildException("Failed to import into HSM!", p.output) - - -if __name__ == "__main__": - main() diff --git a/examples/makebuildserver.config.py b/examples/makebuildserver.config.py new file mode 100644 index 00000000..cb47f95f --- /dev/null +++ b/examples/makebuildserver.config.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python3 +# +# You may want to alter these before running ./makebuildserver + +# Name of the Vagrant basebox to use, by default it will be downloaded +# from Vagrant Cloud. For release builds setup, generate the basebox +# locally using https://gitlab.com/fdroid/basebox, add it to Vagrant, +# then set this to the local basebox name. +# This defaults to "fdroid/basebox-stretch64" which will download a +# prebuilt basebox from https://app.vagrantup.com/fdroid. +# +# (If you change this value you have to supply the `--clean` option on +# your next `makebuildserver` run.) +# +# basebox = "basebox-stretch64" + +# This allows you to pin your basebox to a specific versions. It defaults +# the most recent basebox version which can be aumotaically verifyed by +# `makebuildserver`. +# Please note that vagrant does not support versioning of locally added +# boxes, so we can't support that either. +# +# (If you change this value you have to supply the `--clean` option on +# your next `makebuildserver` run.) +# +# basebox_version = "0.1" + +# In the process of setting up the build server, many gigs of files +# are downloaded (Android SDK components, gradle, etc). These are +# cached so that they are not redownloaded each time. By default, +# these are stored in ~/.cache/fdroidserver +# +# cachedir = 'buildserver/cache' + +# A big part of creating a new instance is downloading packages from Debian. +# This setups up a folder in ~/.cache/fdroidserver to cache the downloaded +# packages when rebuilding the build server from scratch. This requires +# that virtualbox-guest-utils is installed. +# +# apt_package_cache = True + +# The buildserver can use some local caches to speed up builds, +# especially when the internet connection is slow and/or expensive. +# If enabled, the buildserver setup will look for standard caches in +# your HOME dir and copy them to the buildserver VM. Be aware: this +# will reduce the isolation of the buildserver from your host machine, +# so the buildserver will provide an environment only as trustworthy +# as the host machine's environment. +# +# copy_caches_from_host = True + +# To specify which Debian mirror the build server VM should use, by +# default it uses http.debian.net, which auto-detects which is the +# best mirror to use. +# +# debian_mirror = 'http://ftp.uk.debian.org/debian/' + +# The amount of RAM the build server will have (default: 2048) +# memory = 3584 + +# The number of CPUs the build server will have +# cpus = 1 + +# Debian package proxy server - if you have one +# aptproxy = "http://192.168.0.19:8000" + +# If this is running on an older machine or on a virtualized system, +# it can run a lot slower. If the provisioning fails with a warning +# about the timeout, extend the timeout here. (default: 600 seconds) +# +# boot_timeout = 1200 + +# By default, this whole process uses VirtualBox as the provider, but +# QEMU+KVM is also supported via the libvirt plugin to vagrant. If +# this is run within a KVM guest, then libvirt's QEMU+KVM will be used +# automatically. It can also be manually enabled by uncommenting +# below: +# +# vm_provider = 'libvirt' + +# By default libvirt uses 'virtio' for both network and disk drivers. +# Some systems (eg. nesting VMware ESXi) do not support virtio. As a +# workaround for such rare cases, this setting allows to configure +# KVM/libvirt to emulate hardware rather than using virtio. +# +# libvirt_disk_bus = 'sata' +# libvirt_nic_model_type = 'rtl8139' + +# Sometimes, it is not possible to use the 9p synced folder type with +# libvirt, like if running a KVM buildserver instance inside of a +# VMware ESXi guest. In that case, using NFS or another method is +# required. +# +# synced_folder_type = 'nfs' diff --git a/examples/template.yml b/examples/template.yml index c9e565f6..37d72c16 100644 --- a/examples/template.yml +++ b/examples/template.yml @@ -2,11 +2,11 @@ AuthorName: . WebSite: '' Bitcoin: null Litecoin: null -Donate: null +Donation: null License: Unknown Categories: - - Internet +- Internet IssueTracker: '' SourceCode: '' @@ -15,7 +15,7 @@ Changelog: '' Name: . Summary: . Description: | - . + . -ArchivePolicy: 2 versions -RequiresRoot: false +Archive Policy: 2 versions +Requires Root: No diff --git a/fdroid b/fdroid index 314d2467..b316346a 100755 --- a/fdroid +++ b/fdroid @@ -1,7 +1,8 @@ #!/usr/bin/env python3 # # fdroid.py - part of the FDroid server tools -# Copyright (C) 2020 Michael Pöhn +# Copyright (C) 2010-2015, Ciaran Gultnieks, ciaran@ciarang.com +# Copyright (C) 2013-2014 Daniel Marti # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -16,7 +17,154 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . +import sys +import os +import locale +import logging -import fdroidserver.__main__ +import fdroidserver.common +import fdroidserver.metadata +from fdroidserver import _ +from argparse import ArgumentError +from collections import OrderedDict -fdroidserver.__main__.main() + +commands = OrderedDict([ + ("build", _("Build a package from source")), + ("init", _("Quickly start a new repository")), + ("publish", _("Sign and place packages in the repo")), + ("gpgsign", _("Add PGP signatures using GnuPG for packages in repo")), + ("update", _("Update repo information for new packages")), + ("deploy", _("Interact with the repo HTTP server")), + ("verify", _("Verify the integrity of downloaded packages")), + ("checkupdates", _("Check for updates to applications")), + ("import", _("Add a new application from its source code")), + ("install", _("Install built packages on devices")), + ("readmeta", _("Read all the metadata files and exit")), + ("rewritemeta", _("Rewrite all the metadata files")), + ("lint", _("Warn about possible metadata errors")), + ("scanner", _("Scan the source code of a package")), + ("dscanner", _("Dynamically scan APKs post build")), + ("stats", _("Update the stats of the repo")), + ("server", _("Old, deprecated name for fdroid deploy")), + ("signindex", _("Sign indexes created using update --nosign")), + ("btlog", _("Update the binary transparency log for a URL")), + ("signatures", _("Extract signatures from APKs")), + ("nightly", _("Set up an app build for a nightly build repo")), + ("mirror", _("Download complete mirrors of small repos")), +]) + + +def print_help(): + print(_("usage: ") + _("fdroid [] [-h|--help|--version|]")) + print("") + print(_("Valid commands are:")) + for cmd, summary in commands.items(): + print(" " + cmd + ' ' * (15 - len(cmd)) + summary) + print("") + + +def main(): + + if len(sys.argv) <= 1: + print_help() + sys.exit(0) + + command = sys.argv[1] + if command not in commands: + if command in ('-h', '--help'): + print_help() + sys.exit(0) + elif command == '--version': + output = _('no version info found!') + cmddir = os.path.realpath(os.path.dirname(__file__)) + moduledir = os.path.realpath(os.path.dirname(fdroidserver.common.__file__) + '/..') + if cmddir == moduledir: + # running from git + os.chdir(cmddir) + if os.path.isdir('.git'): + import subprocess + try: + output = subprocess.check_output(['git', 'describe'], + stderr=subprocess.STDOUT, + universal_newlines=True) + except subprocess.CalledProcessError: + output = 'git commit ' + subprocess.check_output(['git', 'rev-parse', 'HEAD'], + universal_newlines=True) + elif os.path.exists('setup.py'): + import re + m = re.search(r'''.*[\s,\(]+version\s*=\s*["']([0-9a-z.]+)["'].*''', + open('setup.py').read(), flags=re.MULTILINE) + if m: + output = m.group(1) + '\n' + else: + from pkg_resources import get_distribution + output = get_distribution('fdroidserver').version + '\n' + print(output), + sys.exit(0) + else: + print(_("Command '%s' not recognised.\n" % command)) + print_help() + sys.exit(1) + + verbose = any(s in sys.argv for s in ['-v', '--verbose']) + quiet = any(s in sys.argv for s in ['-q', '--quiet']) + + # Helpful to differentiate warnings from errors even when on quiet + logformat = '%(levelname)s: %(message)s' + loglevel = logging.INFO + if verbose: + loglevel = logging.DEBUG + elif quiet: + loglevel = logging.WARN + + logging.basicConfig(format=logformat, level=loglevel) + + if verbose and quiet: + logging.critical(_("Conflicting arguments: '--verbose' and '--quiet' " + "can not be specified at the same time.")) + sys.exit(1) + + # temporary workaround until server.py becomes deploy.py + if command == 'deploy': + command = 'server' + sys.argv.insert(2, 'update') + + # Trick optparse into displaying the right usage when --help is used. + sys.argv[0] += ' ' + command + + del sys.argv[1] + mod = __import__('fdroidserver.' + command, None, None, [command]) + + system_langcode, system_encoding = locale.getdefaultlocale() + if system_encoding.lower() not in ('utf-8', 'utf8'): + logging.warn(_("Encoding is set to '{enc}' fdroid might run " + "into encoding issues. Please set it to 'UTF-8' " + "for best results.".format(enc=system_encoding))) + + try: + mod.main() + # These are ours, contain a proper message and are "expected" + except (fdroidserver.common.FDroidException, + fdroidserver.metadata.MetaDataException) as e: + if verbose: + raise + else: + logging.critical(str(e)) + sys.exit(1) + except ArgumentError as e: + logging.critical(str(e)) + sys.exit(1) + except KeyboardInterrupt: + print('') + fdroidserver.common.force_exit(1) + # These should only be unexpected crashes due to bugs in the code + # str(e) often doesn't contain a reason, so just show the backtrace + except Exception as e: + logging.critical(_("Unknown exception found!")) + raise e + sys.exit(0) + + +if __name__ == "__main__": + main() diff --git a/fdroidserver/__init__.py b/fdroidserver/__init__.py index fdf64421..bad9cefa 100644 --- a/fdroidserver/__init__.py +++ b/fdroidserver/__init__.py @@ -1,78 +1,23 @@ + import gettext import glob import os import sys + # support running straight from git and standard installs rootpaths = [ os.path.realpath(os.path.join(os.path.dirname(__file__), '..')), - os.path.realpath( - os.path.join(os.path.dirname(__file__), '..', '..', '..', '..', 'share') - ), + os.path.realpath(os.path.join(os.path.dirname(__file__), '..', '..', '..', '..', 'share')), os.path.join(sys.prefix, 'share'), ] localedir = None for rootpath in rootpaths: - found_mo = glob.glob( - os.path.join(rootpath, 'locale', '*', 'LC_MESSAGES', 'fdroidserver.mo') - ) - if len(found_mo) > 0: + if len(glob.glob(os.path.join(rootpath, 'locale', '*', 'LC_MESSAGES', 'fdroidserver.mo'))) > 0: localedir = os.path.join(rootpath, 'locale') break gettext.bindtextdomain('fdroidserver', localedir) gettext.textdomain('fdroidserver') _ = gettext.gettext - - -from fdroidserver.exception import ( - FDroidException, - MetaDataException, - VerificationException, # NOQA: E402 -) - -FDroidException # NOQA: B101 -MetaDataException # NOQA: B101 -VerificationException # NOQA: B101 - -from fdroidserver.common import genkeystore as generate_keystore # NOQA: E402 -from fdroidserver.common import verify_apk_signature - -verify_apk_signature # NOQA: B101 -generate_keystore # NOQA: B101 -from fdroidserver.index import ( - download_repo_index, - download_repo_index_v1, - download_repo_index_v2, - get_mirror_service_urls, -) -from fdroidserver.index import make as make_index # NOQA: E402 - -download_repo_index # NOQA: B101 -download_repo_index_v1 # NOQA: B101 -download_repo_index_v2 # NOQA: B101 -get_mirror_service_urls # NOQA: B101 -make_index # NOQA: B101 -from fdroidserver.update import ( - process_apk, - process_apks, - scan_apk, - scan_repo_files, # NOQA: E402 -) - -process_apk # NOQA: B101 -process_apks # NOQA: B101 -scan_apk # NOQA: B101 -scan_repo_files # NOQA: B101 -from fdroidserver.deploy import ( - update_awsbucket, - update_servergitmirrors, - update_serverwebroot, # NOQA: E402 - update_serverwebroots, -) - -update_awsbucket # NOQA: B101 -update_servergitmirrors # NOQA: B101 -update_serverwebroots # NOQA: B101 -update_serverwebroot # NOQA: B101 diff --git a/fdroidserver/__main__.py b/fdroidserver/__main__.py deleted file mode 100755 index 71c39b2c..00000000 --- a/fdroidserver/__main__.py +++ /dev/null @@ -1,227 +0,0 @@ -#!/usr/bin/env python3 -# -# fdroidserver/__main__.py - part of the FDroid server tools -# Copyright (C) 2020 Michael Pöhn -# Copyright (C) 2010-2015, Ciaran Gultnieks, ciaran@ciarang.com -# Copyright (C) 2013-2014 Daniel Marti -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -import importlib.metadata -import logging -import os -import pkgutil -import re -import sys -from argparse import ArgumentError -from collections import OrderedDict - -import git - -import fdroidserver.common -import fdroidserver.metadata -from fdroidserver import _ - -COMMANDS = OrderedDict([ - ("build", _("Build a package from source")), - ("init", _("Quickly start a new repository")), - ("publish", _("Sign and place packages in the repo")), - ("gpgsign", _("Add PGP signatures using GnuPG for packages in repo")), - ("update", _("Update repo information for new packages")), - ("deploy", _("Interact with the repo HTTP server")), - ("verify", _("Verify the integrity of downloaded packages")), - ("checkupdates", _("Check for updates to applications")), - ("import", _("Extract application metadata from a source repository")), - ("install", _("Install built packages on devices")), - ("readmeta", _("Read all the metadata files and exit")), - ("rewritemeta", _("Rewrite all the metadata files")), - ("lint", _("Warn about possible metadata errors")), - ("scanner", _("Scan the source code of a package")), - ("signindex", _("Sign indexes created using update --nosign")), - ("btlog", _("Update the binary transparency log for a URL")), - ("signatures", _("Extract signatures from APKs")), - ("nightly", _("Set up an app build for a nightly build repo")), - ("mirror", _("Download complete mirrors of small repos")), -]) - - -def print_help(available_plugins=None): - print(_("usage: ") + _("fdroid [] [-h|--help|--version|]")) - print("") - print(_("Valid commands are:")) - for cmd, summary in COMMANDS.items(): - print(" " + cmd + ' ' * (15 - len(cmd)) + summary) - if available_plugins: - print(_('commands from plugin modules:')) - for command in sorted(available_plugins.keys()): - print(' {:15}{}'.format(command, available_plugins[command]['summary'])) - print("") - - -def preparse_plugin(module_name, module_dir): - """No summary. - - Simple regex based parsing for plugin scripts. - - So we don't have to import them when we just need the summary, - but not plan on executing this particular plugin. - """ - if '.' in module_name: - raise ValueError("No '.' allowed in fdroid plugin modules: '{}'" - .format(module_name)) - path = os.path.join(module_dir, module_name + '.py') - if not os.path.isfile(path): - path = os.path.join(module_dir, module_name, '__main__.py') - if not os.path.isfile(path): - raise ValueError("unable to find main plugin script " - "for module '{n}' ('{d}')" - .format(n=module_name, - d=module_dir)) - summary = None - main = None - with open(path, 'r', encoding='utf-8') as f: - re_main = re.compile(r'^(\s*def\s+main\s*\(.*\)\s*:' - r'|\s*main\s*=\s*lambda\s*:.+)$') - re_summary = re.compile(r'^\s*fdroid_summary\s*=\s["\'](?P.+)["\']$') - for line in f: - m_summary = re_summary.match(line) - if m_summary: - summary = m_summary.group('text') - if re_main.match(line): - main = True - - if summary is None: - raise NameError("could not find 'fdroid_summary' in: '{}' plugin" - .format(module_name)) - if main is None: - raise NameError("could not find 'main' function in: '{}' plugin" - .format(module_name)) - return {'name': module_name, 'summary': summary} - - -def find_plugins(): - found_plugins = [{'name': x[1], 'dir': x[0].path} for x in pkgutil.iter_modules() if x[1].startswith('fdroid_')] - plugin_infos = {} - for plugin_def in found_plugins: - command_name = plugin_def['name'][7:] - try: - plugin_infos[command_name] = preparse_plugin(plugin_def['name'], - plugin_def['dir']) - except Exception as e: - # We need to keep module lookup fault tolerant because buggy - # modules must not prevent fdroidserver from functioning - if len(sys.argv) > 1 and sys.argv[1] == command_name: - # only raise exeption when a user specifies the broken - # plugin in explicitly in command line - raise e - return plugin_infos - - -def main(): - available_plugins = find_plugins() - - if len(sys.argv) <= 1: - print_help(available_plugins=available_plugins) - sys.exit(0) - - command = sys.argv[1] - if command not in COMMANDS and command not in available_plugins: - if command in ('-h', '--help'): - print_help(available_plugins=available_plugins) - sys.exit(0) - elif command == 'server': - print(_("""ERROR: The "server" subcommand has been removed, use "deploy"!""")) - sys.exit(1) - elif command == '--version': - try: - print(importlib.metadata.version("fdroidserver")) - sys.exit(0) - except importlib.metadata.PackageNotFoundError: - pass - try: - print( - git.repo.Repo( - os.path.dirname(os.path.dirname(__file__)) - ).git.describe(always=True, tags=True) - ) - sys.exit(0) - except git.exc.InvalidGitRepositoryError: - print(_('No version information could be found.')) - sys.exit(1) - else: - print(_("Command '%s' not recognised.\n" % command)) - print_help(available_plugins=available_plugins) - sys.exit(1) - - verbose = any(s in sys.argv for s in ['-v', '--verbose']) - quiet = any(s in sys.argv for s in ['-q', '--quiet']) - - # Helpful to differentiate warnings from errors even when on quiet - logformat = '%(asctime)s %(levelname)s: %(message)s' - loglevel = logging.INFO - if verbose: - loglevel = logging.DEBUG - elif quiet: - loglevel = logging.WARN - - logging.basicConfig(format=logformat, level=loglevel) - - if verbose and quiet: - logging.critical(_("Conflicting arguments: '--verbose' and '--quiet' " - "can not be specified at the same time.")) - sys.exit(1) - - # Trick argparse into displaying the right usage when --help is used. - sys.argv[0] += ' ' + command - - del sys.argv[1] - if command in COMMANDS.keys(): - # import is named import_subcommand internally b/c import is reserved by Python - command = 'import_subcommand' if command == 'import' else command - mod = __import__('fdroidserver.' + command, None, None, [command]) - else: - mod = __import__(available_plugins[command]['name'], None, None, [command]) - - system_encoding = sys.getdefaultencoding() - if system_encoding is None or system_encoding.lower() not in ('utf-8', 'utf8'): - logging.warning(_("Encoding is set to '{enc}' fdroid might run " - "into encoding issues. Please set it to 'UTF-8' " - "for best results.".format(enc=system_encoding))) - - try: - mod.main() - # These are ours, contain a proper message and are "expected" - except (fdroidserver.common.FDroidException, - fdroidserver.metadata.MetaDataException) as e: - if verbose: - raise - else: - logging.critical(str(e)) - sys.exit(1) - except ArgumentError as e: - logging.critical(str(e)) - sys.exit(1) - except KeyboardInterrupt: - print('') - fdroidserver.common.force_exit(1) - # These should only be unexpected crashes due to bugs in the code - # str(e) often doesn't contain a reason, so just show the backtrace - except Exception as e: - logging.critical(_("Unknown exception found!")) - raise e - sys.exit(0) - - -if __name__ == "__main__": - main() diff --git a/fdroidserver/_yaml.py b/fdroidserver/_yaml.py deleted file mode 100644 index 260f67c0..00000000 --- a/fdroidserver/_yaml.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright (C) 2025, Hans-Christoph Steiner -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -"""Standard YAML parsing and dumping. - -YAML 1.2 is the preferred format for all data files. When loading -F-Droid formats like config.yml and .yml, YAML 1.2 is -forced, and older YAML constructs should be considered an error. - -It is OK to load and dump files in other YAML versions if they are -externally defined formats, like FUNDING.yml. In those cases, these -common instances might not be appropriate to use. - -There is a separate instance for dumping based on the "round trip" aka -"rt" mode. The "rt" mode maintains order while the "safe" mode sorts -the output. Also, yaml.version is not forced in the dumper because that -makes it write out a "%YAML 1.2" header. F-Droid's formats are -explicitly defined as YAML 1.2 and meant to be human-editable. So that -header gets in the way. - -""" - -import ruamel.yaml - -yaml = ruamel.yaml.YAML(typ='safe') -yaml.version = (1, 2) - -yaml_dumper = ruamel.yaml.YAML(typ='rt') - - -def config_dump(config, fp=None): - """Dump config data in YAML 1.2 format without headers. - - This outputs YAML in a string that is suitable for use in regexps - and string replacements, as well as complete files. It is therefore - explicitly set up to avoid writing out headers and footers. - - This is modeled after PyYAML's yaml.dump(), which can dump to a file - or return a string. - - https://yaml.dev/doc/ruamel.yaml/example/#Output_of_%60dump()%60_as_a_string - - """ - dumper = ruamel.yaml.YAML(typ='rt') - dumper.default_flow_style = False - dumper.explicit_start = False - dumper.explicit_end = False - if fp is None: - with ruamel.yaml.compat.StringIO() as fp: - dumper.dump(config, fp) - return fp.getvalue() - dumper.dump(config, fp) diff --git a/fdroidserver/apksigcopier.py b/fdroidserver/apksigcopier.py deleted file mode 100644 index f36de2eb..00000000 --- a/fdroidserver/apksigcopier.py +++ /dev/null @@ -1,1019 +0,0 @@ -#!/usr/bin/python3 -# encoding: utf-8 -# SPDX-FileCopyrightText: 2023 FC Stegerman -# SPDX-License-Identifier: GPL-3.0-or-later - -# -- ; {{{1 -# -# File : apksigcopier -# Maintainer : FC Stegerman -# Date : 2023-02-08 -# -# Copyright : Copyright (C) 2023 FC Stegerman -# Version : v1.1.1 -# License : GPLv3+ -# -# -- ; }}}1 - -""" -Copy/extract/patch android apk signatures & compare apks. - -apksigcopier is a tool for copying android APK signatures from a signed APK to -an unsigned one (in order to verify reproducible builds). - -It can also be used to compare two APKs with different signatures; this requires -apksigner. - - -CLI -=== - -$ apksigcopier extract [OPTIONS] SIGNED_APK OUTPUT_DIR -$ apksigcopier patch [OPTIONS] METADATA_DIR UNSIGNED_APK OUTPUT_APK -$ apksigcopier copy [OPTIONS] SIGNED_APK UNSIGNED_APK OUTPUT_APK -$ apksigcopier compare [OPTIONS] FIRST_APK SECOND_APK - -The following environment variables can be set to 1, yes, or true to -override the default behaviour: - -* set APKSIGCOPIER_EXCLUDE_ALL_META=1 to exclude all metadata files -* set APKSIGCOPIER_COPY_EXTRA_BYTES=1 to copy extra bytes after data (e.g. a v2 sig) -* set APKSIGCOPIER_SKIP_REALIGNMENT=1 to skip realignment of ZIP entries - - -API -=== - ->> from apksigcopier import do_extract, do_patch, do_copy, do_compare ->> do_extract(signed_apk, output_dir, v1_only=NO) ->> do_patch(metadata_dir, unsigned_apk, output_apk, v1_only=NO) ->> do_copy(signed_apk, unsigned_apk, output_apk, v1_only=NO) ->> do_compare(first_apk, second_apk, unsigned=False) - -You can use False, None, and True instead of NO, AUTO, and YES respectively. - -The following global variables (which default to False), can be set to -override the default behaviour: - -* set exclude_all_meta=True to exclude all metadata files -* set copy_extra_bytes=True to copy extra bytes after data (e.g. a v2 sig) -* set skip_realignment=True to skip realignment of ZIP entries -""" - -import glob -import json -import os -import re -import struct -import sys -import zipfile -import zlib -from collections import namedtuple -from typing import ( - Any, - BinaryIO, - Callable, - Dict, - Iterable, - Iterator, - Optional, - Tuple, - Union, -) - -__version__ = "1.1.1" -NAME = "apksigcopier" - -if sys.version_info >= (3, 8): - from typing import Literal - NoAutoYes = Literal["no", "auto", "yes"] -else: - NoAutoYes = str - -DateTime = Tuple[int, int, int, int, int, int] -NoAutoYesBoolNone = Union[NoAutoYes, bool, None] -ZipInfoDataPairs = Iterable[Tuple[zipfile.ZipInfo, bytes]] - -SIGBLOCK, SIGOFFSET = "APKSigningBlock", "APKSigningBlockOffset" -NOAUTOYES: Tuple[NoAutoYes, NoAutoYes, NoAutoYes] = ("no", "auto", "yes") -NO, AUTO, YES = NOAUTOYES -APK_META = re.compile(r"^META-INF/([0-9A-Za-z_-]+\.(SF|RSA|DSA|EC)|MANIFEST\.MF)$") -META_EXT: Tuple[str, ...] = ("SF", "RSA|DSA|EC", "MF") -COPY_EXCLUDE: Tuple[str, ...] = ("META-INF/MANIFEST.MF",) -DATETIMEZERO: DateTime = (1980, 0, 0, 0, 0, 0) - -################################################################################ -# -# NB: these values are all from apksigner (the first element of each tuple, same -# as APKZipInfo) or signflinger/zipflinger, except for external_attr w/ 0664 -# permissions and flag_bits 0x08, added for completeness. -# -# NB: zipflinger changed from 0666 to 0644 in commit 895ba5fba6ab84617dd67e38f456a8f96aa37ff0 -# -# https://android.googlesource.com/platform/tools/apksig -# src/main/java/com/android/apksig/internal/zip/{CentralDirectoryRecord,LocalFileRecord,ZipUtils}.java -# https://android.googlesource.com/platform/tools/base -# signflinger/src/com/android/signflinger/SignedApk.java -# zipflinger/src/com/android/zipflinger/{CentralDirectoryRecord,LocalFileHeader,Source}.java -# -################################################################################ - -VALID_ZIP_META = dict( - compresslevel=(9, 1), # best compression, best speed - create_system=(0, 3), # fat, unx - create_version=(20, 0), # 2.0, 0.0 - external_attr=(0, # N/A - 0o100644 << 16, # regular file rw-r--r-- - 0o100664 << 16, # regular file rw-rw-r-- - 0o100666 << 16), # regular file rw-rw-rw- - extract_version=(20, 0), # 2.0, 0.0 - flag_bits=(0x800, 0, 0x08, 0x808), # 0x800 = utf8, 0x08 = data_descriptor -) - -ZipData = namedtuple("ZipData", ("cd_offset", "eocd_offset", "cd_and_eocd")) - -exclude_all_meta = False # exclude all metadata files in copy_apk() -copy_extra_bytes = False # copy extra bytes after data in copy_apk() -skip_realignment = False # skip realignment of ZIP entries in copy_apk() - - -class APKSigCopierError(Exception): - """Base class for errors.""" - - -class APKSigningBlockError(APKSigCopierError): - """Something wrong with the APK Signing Block.""" - - -class NoAPKSigningBlock(APKSigningBlockError): - """APK Signing Block Missing.""" - - -class ZipError(APKSigCopierError): - """Something wrong with ZIP file.""" - - -# FIXME: is there a better alternative? -class ReproducibleZipInfo(zipfile.ZipInfo): - """Reproducible ZipInfo hack.""" - - _override: Dict[str, Any] = {} - - def __init__(self, zinfo: zipfile.ZipInfo, **override: Any) -> None: - # pylint: disable=W0231 - if override: - self._override = {**self._override, **override} - for k in self.__slots__: - if hasattr(zinfo, k): - setattr(self, k, getattr(zinfo, k)) - - def __getattribute__(self, name: str) -> Any: - if name != "_override": - try: - return self._override[name] - except KeyError: - pass - return object.__getattribute__(self, name) - - -# See VALID_ZIP_META -class APKZipInfo(ReproducibleZipInfo): - """Reproducible ZipInfo for APK files.""" - - COMPRESSLEVEL = 9 - - _override = dict( - compress_type=8, - create_system=0, - create_version=20, - date_time=DATETIMEZERO, - external_attr=0, - extract_version=20, - flag_bits=0x800, - ) - - -def noautoyes(value: NoAutoYesBoolNone) -> NoAutoYes: - """ - Turn False into NO, None into AUTO, and True into YES. - - >>> from apksigcopier import noautoyes, NO, AUTO, YES - >>> noautoyes(False) == NO == noautoyes(NO) - True - >>> noautoyes(None) == AUTO == noautoyes(AUTO) - True - >>> noautoyes(True) == YES == noautoyes(YES) - True - - """ - if isinstance(value, str): - if value not in NOAUTOYES: - raise ValueError("expected NO, AUTO, or YES") - return value - try: - return {False: NO, None: AUTO, True: YES}[value] - except KeyError: - raise ValueError("expected False, None, or True") # pylint: disable=W0707 - - -def is_meta(filename: str) -> bool: - """ - Check whether filename is a JAR metadata file. - - Returns whether filename is a v1 (JAR) signature file (.SF), signature block - file (.RSA, .DSA, or .EC), or manifest (MANIFEST.MF). - - See https://docs.oracle.com/javase/tutorial/deployment/jar/intro.html - - >>> from apksigcopier import is_meta - >>> is_meta("classes.dex") - False - >>> is_meta("META-INF/CERT.SF") - True - >>> is_meta("META-INF/CERT.RSA") - True - >>> is_meta("META-INF/MANIFEST.MF") - True - >>> is_meta("META-INF/OOPS") - False - - """ - return APK_META.fullmatch(filename) is not None - - -def exclude_from_copying(filename: str) -> bool: - """ - Check whether to exclude a file during copy_apk(). - - Excludes filenames in COPY_EXCLUDE (i.e. MANIFEST.MF) by default; when - exclude_all_meta is set to True instead, excludes all metadata files as - matched by is_meta(). - - Directories are always excluded. - - >>> import apksigcopier - >>> from apksigcopier import exclude_from_copying - >>> exclude_from_copying("classes.dex") - False - >>> exclude_from_copying("foo/") - True - >>> exclude_from_copying("META-INF/") - True - >>> exclude_from_copying("META-INF/MANIFEST.MF") - True - >>> exclude_from_copying("META-INF/CERT.SF") - False - >>> exclude_from_copying("META-INF/OOPS") - False - - >>> apksigcopier.exclude_all_meta = True - >>> exclude_from_copying("classes.dex") - False - >>> exclude_from_copying("META-INF/") - True - >>> exclude_from_copying("META-INF/MANIFEST.MF") - True - >>> exclude_from_copying("META-INF/CERT.SF") - True - >>> exclude_from_copying("META-INF/OOPS") - False - - """ - return exclude_meta(filename) if exclude_all_meta else exclude_default(filename) - - -def exclude_default(filename: str) -> bool: - """ - Like exclude_from_copying(). - - Excludes directories and filenames in COPY_EXCLUDE (i.e. MANIFEST.MF). - """ - return is_directory(filename) or filename in COPY_EXCLUDE - - -def exclude_meta(filename: str) -> bool: - """Like exclude_from_copying(); excludes directories and all metadata files.""" - return is_directory(filename) or is_meta(filename) - - -def is_directory(filename: str) -> bool: - """ZIP entries with filenames that end with a '/' are directories.""" - return filename.endswith("/") - - -################################################################################ -# -# There is usually a 132-byte virtual entry at the start of an APK signed with a -# v1 signature by signflinger/zipflinger; almost certainly this is a default -# manifest ZIP entry created at initialisation, deleted (from the CD but not -# from the file) during v1 signing, and eventually replaced by a virtual entry. -# -# >>> (30 + len("META-INF/MANIFEST.MF") + -# ... len("Manifest-Version: 1.0\r\n" -# ... "Created-By: Android Gradle 7.1.3\r\n" -# ... "Built-By: Signflinger\r\n\r\n")) -# 132 -# -# NB: they could be a different size, depending on Created-By and Built-By. -# -# FIXME: could virtual entries occur elsewhere as well? -# -# https://android.googlesource.com/platform/tools/base -# signflinger/src/com/android/signflinger/SignedApk.java -# zipflinger/src/com/android/zipflinger/{LocalFileHeader,ZipArchive}.java -# -################################################################################ - -def zipflinger_virtual_entry(size: int) -> bytes: - """Create zipflinger virtual entry.""" - if size < 30: - raise ValueError("Minimum size for virtual entries is 30 bytes") - return ( - # header extract_version flag_bits - b"\x50\x4b\x03\x04" b"\x00\x00" b"\x00\x00" - # compress_type (1981,1,1,1,1,2) crc32 - b"\x00\x00" b"\x21\x08\x21\x02" b"\x00\x00\x00\x00" - # compress_size file_size filename length - b"\x00\x00\x00\x00" b"\x00\x00\x00\x00" b"\x00\x00" - ) + int.to_bytes(size - 30, 2, "little") + b"\x00" * (size - 30) - - -def detect_zfe(apkfile: str) -> Optional[int]: - """ - Detect zipflinger virtual entry. - - Returns the size of the virtual entry if found, None otherwise. - - Raises ZipError if the size is less than 30 or greater than 4096, or the - data isn't all zeroes. - """ - with open(apkfile, "rb") as fh: - zfe_start = zipflinger_virtual_entry(30)[:28] # w/o len(extra) - if fh.read(28) == zfe_start: - zfe_size = 30 + int.from_bytes(fh.read(2), "little") - if not (30 <= zfe_size <= 4096): - raise ZipError("Unsupported virtual entry size") - if not fh.read(zfe_size - 30) == b"\x00" * (zfe_size - 30): - raise ZipError("Unsupported virtual entry data") - return zfe_size - return None - - -################################################################################ -# -# https://en.wikipedia.org/wiki/ZIP_(file_format) -# https://source.android.com/docs/security/features/apksigning/v2#apk-signing-block-format -# -# ================================= -# | Contents of ZIP entries | -# ================================= -# | APK Signing Block | -# | ----------------------------- | -# | | size (w/o this) uint64 LE | | -# | | ... | | -# | | size (again) uint64 LE | | -# | | "APK Sig Block 42" (16B) | | -# | ----------------------------- | -# ================================= -# | ZIP Central Directory | -# ================================= -# | ZIP End of Central Directory | -# | ----------------------------- | -# | | 0x06054b50 ( 4B) | | -# | | ... (12B) | | -# | | CD Offset ( 4B) | | -# | | ... | | -# | ----------------------------- | -# ================================= -# -################################################################################ - - -# FIXME: makes certain assumptions and doesn't handle all valid ZIP files! -# FIXME: support zip64? -# FIXME: handle utf8 filenames w/o utf8 flag (as produced by zipflinger)? -# https://android.googlesource.com/platform/tools/apksig -# src/main/java/com/android/apksig/ApkSigner.java -def copy_apk(unsigned_apk: str, output_apk: str, *, - copy_extra: Optional[bool] = None, - exclude: Optional[Callable[[str], bool]] = None, - realign: Optional[bool] = None, - zfe_size: Optional[int] = None) -> DateTime: - """ - Copy APK like apksigner would, excluding files matched by exclude_from_copying(). - - Adds a zipflinger virtual entry of zfe_size bytes if one is not already - present and zfe_size is not None. - - Returns max date_time. - - The following global variables (which default to False), can be set to - override the default behaviour: - - * set exclude_all_meta=True to exclude all metadata files - * set copy_extra_bytes=True to copy extra bytes after data (e.g. a v2 sig) - * set skip_realignment=True to skip realignment of ZIP entries - - The default behaviour can also be changed using the keyword-only arguments - exclude, copy_extra, and realign; these take precedence over the global - variables when not None. NB: exclude is a callable, not a bool; realign is - the inverse of skip_realignment. - - >>> import apksigcopier, os, zipfile - >>> apk = "test/apks/apks/golden-aligned-in.apk" - >>> with zipfile.ZipFile(apk, "r") as zf: - ... infos_in = zf.infolist() - >>> with tempfile.TemporaryDirectory() as tmpdir: - ... out = os.path.join(tmpdir, "out.apk") - ... apksigcopier.copy_apk(apk, out) - ... with zipfile.ZipFile(out, "r") as zf: - ... infos_out = zf.infolist() - (2017, 5, 15, 11, 28, 40) - >>> for i in infos_in: - ... print(i.filename) - META-INF/ - META-INF/MANIFEST.MF - AndroidManifest.xml - classes.dex - temp.txt - lib/armeabi/fake.so - resources.arsc - temp2.txt - >>> for i in infos_out: - ... print(i.filename) - AndroidManifest.xml - classes.dex - temp.txt - lib/armeabi/fake.so - resources.arsc - temp2.txt - >>> infos_in[2] - - >>> infos_out[0] - - >>> repr(infos_in[2:]) == repr(infos_out) - True - - """ - if copy_extra is None: - copy_extra = copy_extra_bytes - if exclude is None: - exclude = exclude_from_copying - if realign is None: - realign = not skip_realignment - with zipfile.ZipFile(unsigned_apk, "r") as zf: - infos = zf.infolist() - zdata = zip_data(unsigned_apk) - offsets = {} - with open(unsigned_apk, "rb") as fhi, open(output_apk, "w+b") as fho: - if zfe_size: - zfe = zipflinger_virtual_entry(zfe_size) - if fhi.read(zfe_size) != zfe: - fho.write(zfe) - fhi.seek(0) - for info in sorted(infos, key=lambda info: info.header_offset): - off_i = fhi.tell() - if info.header_offset > off_i: - # copy extra bytes - fho.write(fhi.read(info.header_offset - off_i)) - hdr = fhi.read(30) - if hdr[:4] != b"\x50\x4b\x03\x04": - raise ZipError("Expected local file header signature") - n, m = struct.unpack(" bytes: - align = 4096 if info.filename.endswith(".so") else 4 - old_off = 30 + n + m + info.header_offset - new_off = 30 + n + m + off_o - old_xtr = hdr[30 + n:30 + n + m] - new_xtr = b"" - while len(old_xtr) >= 4: - hdr_id, size = struct.unpack(" len(old_xtr) - 4: - break - if not (hdr_id == 0 and size == 0): - if hdr_id == 0xd935: - if size >= 2: - align = int.from_bytes(old_xtr[4:6], "little") - else: - new_xtr += old_xtr[:size + 4] - old_xtr = old_xtr[size + 4:] - if old_off % align == 0 and new_off % align != 0: - if pad_like_apksigner: - pad = (align - (new_off - m + len(new_xtr) + 6) % align) % align - xtr = new_xtr + struct.pack(" None: - while size > 0: - data = fhi.read(min(size, blocksize)) - if not data: - break - size -= len(data) - fho.write(data) - if size != 0: - raise ZipError("Unexpected EOF") - - -def extract_meta(signed_apk: str) -> Iterator[Tuple[zipfile.ZipInfo, bytes]]: - """ - Extract v1 signature metadata files from signed APK. - - Yields (ZipInfo, data) pairs. - - >>> from apksigcopier import extract_meta - >>> apk = "test/apks/apks/golden-aligned-v1v2v3-out.apk" - >>> meta = tuple(extract_meta(apk)) - >>> [ x.filename for x, _ in meta ] - ['META-INF/RSA-2048.SF', 'META-INF/RSA-2048.RSA', 'META-INF/MANIFEST.MF'] - >>> for line in meta[0][1].splitlines()[:4]: - ... print(line.decode()) - Signature-Version: 1.0 - Created-By: 1.0 (Android) - SHA-256-Digest-Manifest: hz7AxDJU9Namxoou/kc4Z2GVRS9anCGI+M52tbCsXT0= - X-Android-APK-Signed: 2, 3 - >>> for line in meta[2][1].splitlines()[:2]: - ... print(line.decode()) - Manifest-Version: 1.0 - Created-By: 1.8.0_45-internal (Oracle Corporation) - - """ - with zipfile.ZipFile(signed_apk, "r") as zf_sig: - for info in zf_sig.infolist(): - if is_meta(info.filename): - yield info, zf_sig.read(info.filename) - - -def extract_differences(signed_apk: str, extracted_meta: ZipInfoDataPairs) \ - -> Optional[Dict[str, Any]]: - """ - Extract ZIP metadata differences from signed APK. - - >>> import apksigcopier as asc, pprint - >>> apk = "test/apks/apks/debuggable-boolean.apk" - >>> meta = tuple(asc.extract_meta(apk)) - >>> [ x.filename for x, _ in meta ] - ['META-INF/CERT.SF', 'META-INF/CERT.RSA', 'META-INF/MANIFEST.MF'] - >>> diff = asc.extract_differences(apk, meta) - >>> pprint.pprint(diff) - {'files': {'META-INF/CERT.RSA': {'flag_bits': 2056}, - 'META-INF/CERT.SF': {'flag_bits': 2056}, - 'META-INF/MANIFEST.MF': {'flag_bits': 2056}}} - - >>> meta[2][0].extract_version = 42 - >>> try: - ... asc.extract_differences(apk, meta) - ... except asc.ZipError as e: - ... print(e) - Unsupported extract_version - - >>> asc.validate_differences(diff) is None - True - >>> diff["files"]["META-INF/OOPS"] = {} - >>> asc.validate_differences(diff) - ".files key 'META-INF/OOPS' is not a metadata file" - >>> del diff["files"]["META-INF/OOPS"] - >>> diff["files"]["META-INF/CERT.RSA"]["compresslevel"] = 42 - >>> asc.validate_differences(diff) - ".files['META-INF/CERT.RSA'].compresslevel has an unexpected value" - >>> diff["oops"] = 42 - >>> asc.validate_differences(diff) - 'contains unknown key(s)' - - """ - differences: Dict[str, Any] = {} - files = {} - for info, data in extracted_meta: - diffs = {} - for k in VALID_ZIP_META: - if k != "compresslevel": - v = getattr(info, k) - if v != APKZipInfo._override[k]: - if v not in VALID_ZIP_META[k]: - raise ZipError(f"Unsupported {k}") - diffs[k] = v - level = _get_compresslevel(signed_apk, info, data) - if level != APKZipInfo.COMPRESSLEVEL: - diffs["compresslevel"] = level - if diffs: - files[info.filename] = diffs - if files: - differences["files"] = files - zfe_size = detect_zfe(signed_apk) - if zfe_size: - differences["zipflinger_virtual_entry"] = zfe_size - return differences or None - - -def validate_differences(differences: Dict[str, Any]) -> Optional[str]: - """ - Validate differences dict. - - Returns None if valid, error otherwise. - """ - if set(differences) - {"files", "zipflinger_virtual_entry"}: - return "contains unknown key(s)" - if "zipflinger_virtual_entry" in differences: - if type(differences["zipflinger_virtual_entry"]) is not int: - return ".zipflinger_virtual_entry is not an int" - if not (30 <= differences["zipflinger_virtual_entry"] <= 4096): - return ".zipflinger_virtual_entry is < 30 or > 4096" - if "files" in differences: - if not isinstance(differences["files"], dict): - return ".files is not a dict" - for name, info in differences["files"].items(): - if not is_meta(name): - return f".files key {name!r} is not a metadata file" - if not isinstance(info, dict): - return f".files[{name!r}] is not a dict" - if set(info) - set(VALID_ZIP_META): - return f".files[{name!r}] contains unknown key(s)" - for k, v in info.items(): - if v not in VALID_ZIP_META[k]: - return f".files[{name!r}].{k} has an unexpected value" - return None - - -def _get_compresslevel(apkfile: str, info: zipfile.ZipInfo, data: bytes) -> int: - if info.compress_type != 8: - raise ZipError("Unsupported compress_type") - crc = _get_compressed_crc(apkfile, info) - for level in VALID_ZIP_META["compresslevel"]: - comp = zlib.compressobj(level, 8, -15) - if zlib.crc32(comp.compress(data) + comp.flush()) == crc: - return level - raise ZipError("Unsupported compresslevel") - - -def _get_compressed_crc(apkfile: str, info: zipfile.ZipInfo) -> int: - with open(apkfile, "rb") as fh: - fh.seek(info.header_offset) - hdr = fh.read(30) - if hdr[:4] != b"\x50\x4b\x03\x04": - raise ZipError("Expected local file header signature") - n, m = struct.unpack(" None: - """ - Add v1 signature metadata to APK (removes v2 sig block, if any). - - >>> import apksigcopier as asc - >>> unsigned_apk = "test/apks/apks/golden-aligned-in.apk" - >>> signed_apk = "test/apks/apks/golden-aligned-v1v2v3-out.apk" - >>> meta = tuple(asc.extract_meta(signed_apk)) - >>> [ x.filename for x, _ in meta ] - ['META-INF/RSA-2048.SF', 'META-INF/RSA-2048.RSA', 'META-INF/MANIFEST.MF'] - >>> with zipfile.ZipFile(unsigned_apk, "r") as zf: - ... infos_in = zf.infolist() - >>> with tempfile.TemporaryDirectory() as tmpdir: - ... out = os.path.join(tmpdir, "out.apk") - ... asc.copy_apk(unsigned_apk, out) - ... asc.patch_meta(meta, out) - ... with zipfile.ZipFile(out, "r") as zf: - ... infos_out = zf.infolist() - (2017, 5, 15, 11, 28, 40) - >>> for i in infos_in: - ... print(i.filename) - META-INF/ - META-INF/MANIFEST.MF - AndroidManifest.xml - classes.dex - temp.txt - lib/armeabi/fake.so - resources.arsc - temp2.txt - >>> for i in infos_out: - ... print(i.filename) - AndroidManifest.xml - classes.dex - temp.txt - lib/armeabi/fake.so - resources.arsc - temp2.txt - META-INF/RSA-2048.SF - META-INF/RSA-2048.RSA - META-INF/MANIFEST.MF - - """ - with zipfile.ZipFile(output_apk, "r") as zf_out: - for info in zf_out.infolist(): - if is_meta(info.filename): - raise ZipError("Unexpected metadata") - with zipfile.ZipFile(output_apk, "a") as zf_out: - for info, data in extracted_meta: - if differences and "files" in differences: - more = differences["files"].get(info.filename, {}).copy() - else: - more = {} - level = more.pop("compresslevel", APKZipInfo.COMPRESSLEVEL) - zinfo = APKZipInfo(info, date_time=date_time, **more) - zf_out.writestr(zinfo, data, compresslevel=level) - - -def extract_v2_sig(apkfile: str, expected: bool = True) -> Optional[Tuple[int, bytes]]: - """ - Extract APK Signing Block and offset from APK. - - When successful, returns (sb_offset, sig_block); otherwise raises - NoAPKSigningBlock when expected is True, else returns None. - - >>> import apksigcopier as asc - >>> apk = "test/apks/apks/golden-aligned-v1v2v3-out.apk" - >>> sb_offset, sig_block = asc.extract_v2_sig(apk) - >>> sb_offset - 8192 - >>> len(sig_block) - 4096 - - >>> apk = "test/apks/apks/golden-aligned-in.apk" - >>> try: - ... asc.extract_v2_sig(apk) - ... except asc.NoAPKSigningBlock as e: - ... print(e) - No APK Signing Block - - """ - cd_offset = zip_data(apkfile).cd_offset - with open(apkfile, "rb") as fh: - fh.seek(cd_offset - 16) - if fh.read(16) != b"APK Sig Block 42": - if expected: - raise NoAPKSigningBlock("No APK Signing Block") - return None - fh.seek(-24, os.SEEK_CUR) - sb_size2 = int.from_bytes(fh.read(8), "little") - fh.seek(-sb_size2 + 8, os.SEEK_CUR) - sb_size1 = int.from_bytes(fh.read(8), "little") - if sb_size1 != sb_size2: - raise APKSigningBlockError("APK Signing Block sizes not equal") - fh.seek(-8, os.SEEK_CUR) - sb_offset = fh.tell() - sig_block = fh.read(sb_size2 + 8) - return sb_offset, sig_block - - -# FIXME: OSError for APKs < 1024 bytes [wontfix] -def zip_data(apkfile: str, count: int = 1024) -> ZipData: - """ - Extract central directory, EOCD, and offsets from ZIP. - - Returns ZipData. - - >>> import apksigcopier - >>> apk = "test/apks/apks/golden-aligned-v1v2v3-out.apk" - >>> data = apksigcopier.zip_data(apk) - >>> data.cd_offset, data.eocd_offset - (12288, 12843) - >>> len(data.cd_and_eocd) - 577 - - """ - with open(apkfile, "rb") as fh: - fh.seek(-count, os.SEEK_END) - data = fh.read() - pos = data.rfind(b"\x50\x4b\x05\x06") - if pos == -1: - raise ZipError("Expected end of central directory record (EOCD)") - fh.seek(pos - len(data), os.SEEK_CUR) - eocd_offset = fh.tell() - fh.seek(16, os.SEEK_CUR) - cd_offset = int.from_bytes(fh.read(4), "little") - fh.seek(cd_offset) - cd_and_eocd = fh.read() - return ZipData(cd_offset, eocd_offset, cd_and_eocd) - - -# FIXME: can we determine signed_sb_offset? -def patch_v2_sig(extracted_v2_sig: Tuple[int, bytes], output_apk: str) -> None: - """ - Implant extracted v2/v3 signature into APK. - - >>> import apksigcopier as asc - >>> unsigned_apk = "test/apks/apks/golden-aligned-in.apk" - >>> signed_apk = "test/apks/apks/golden-aligned-v1v2v3-out.apk" - >>> meta = tuple(asc.extract_meta(signed_apk)) - >>> v2_sig = asc.extract_v2_sig(signed_apk) - >>> with tempfile.TemporaryDirectory() as tmpdir: - ... out = os.path.join(tmpdir, "out.apk") - ... date_time = asc.copy_apk(unsigned_apk, out) - ... asc.patch_meta(meta, out, date_time=date_time) - ... asc.extract_v2_sig(out, expected=False) is None - ... asc.patch_v2_sig(v2_sig, out) - ... asc.extract_v2_sig(out) == v2_sig - ... with open(signed_apk, "rb") as a, open(out, "rb") as b: - ... a.read() == b.read() - True - True - True - - """ - signed_sb_offset, signed_sb = extracted_v2_sig - data_out = zip_data(output_apk) - if signed_sb_offset < data_out.cd_offset: - raise APKSigningBlockError("APK Signing Block offset < central directory offset") - padding = b"\x00" * (signed_sb_offset - data_out.cd_offset) - offset = len(signed_sb) + len(padding) - with open(output_apk, "r+b") as fh: - fh.seek(data_out.cd_offset) - fh.write(padding) - fh.write(signed_sb) - fh.write(data_out.cd_and_eocd) - fh.seek(data_out.eocd_offset + offset + 16) - fh.write(int.to_bytes(data_out.cd_offset + offset, 4, "little")) - - -def patch_apk(extracted_meta: ZipInfoDataPairs, extracted_v2_sig: Optional[Tuple[int, bytes]], - unsigned_apk: str, output_apk: str, *, - differences: Optional[Dict[str, Any]] = None, - exclude: Optional[Callable[[str], bool]] = None) -> None: - """Patch extracted_meta + extracted_v2_sig (if not None) onto unsigned_apk and save as output_apk.""" - if differences and "zipflinger_virtual_entry" in differences: - zfe_size = differences["zipflinger_virtual_entry"] - else: - zfe_size = None - date_time = copy_apk(unsigned_apk, output_apk, exclude=exclude, zfe_size=zfe_size) - patch_meta(extracted_meta, output_apk, date_time=date_time, differences=differences) - if extracted_v2_sig is not None: - patch_v2_sig(extracted_v2_sig, output_apk) - - -# FIXME: support multiple signers? -def do_extract(signed_apk: str, output_dir: str, v1_only: NoAutoYesBoolNone = NO, - *, ignore_differences: bool = False) -> None: - """ - Extract signatures from signed_apk and save in output_dir. - - The v1_only parameter controls whether the absence of a v1 signature is - considered an error or not: - * use v1_only=NO (or v1_only=False) to only accept (v1+)v2/v3 signatures; - * use v1_only=AUTO (or v1_only=None) to automatically detect v2/v3 signatures; - * use v1_only=YES (or v1_only=True) to ignore any v2/v3 signatures. - """ - v1_only = noautoyes(v1_only) - extracted_meta = tuple(extract_meta(signed_apk)) - if len(extracted_meta) not in (len(META_EXT), 0): - raise APKSigCopierError("Unexpected or missing metadata files in signed_apk") - for info, data in extracted_meta: - name = os.path.basename(info.filename) - with open(os.path.join(output_dir, name), "wb") as fh: - fh.write(data) - if v1_only == YES: - if not extracted_meta: - raise APKSigCopierError("Expected v1 signature") - return - expected = v1_only == NO - extracted_v2_sig = extract_v2_sig(signed_apk, expected=expected) - if extracted_v2_sig is None: - if not extracted_meta: - raise APKSigCopierError("Expected v1 and/or v2/v3 signature, found neither") - return - signed_sb_offset, signed_sb = extracted_v2_sig - with open(os.path.join(output_dir, SIGOFFSET), "w") as fh: - fh.write(str(signed_sb_offset) + "\n") - with open(os.path.join(output_dir, SIGBLOCK), "wb") as fh: - fh.write(signed_sb) - if not ignore_differences: - differences = extract_differences(signed_apk, extracted_meta) - if differences: - with open(os.path.join(output_dir, "differences.json"), "w") as fh: - json.dump(differences, fh, sort_keys=True, indent=2) - fh.write("\n") - - -# FIXME: support multiple signers? -def do_patch(metadata_dir: str, unsigned_apk: str, output_apk: str, - v1_only: NoAutoYesBoolNone = NO, *, - exclude: Optional[Callable[[str], bool]] = None, - ignore_differences: bool = False) -> None: - """ - Patch signatures from metadata_dir onto unsigned_apk and save as output_apk. - - The v1_only parameter controls whether the absence of a v1 signature is - considered an error or not: - * use v1_only=NO (or v1_only=False) to only accept (v1+)v2/v3 signatures; - * use v1_only=AUTO (or v1_only=None) to automatically detect v2/v3 signatures; - * use v1_only=YES (or v1_only=True) to ignore any v2/v3 signatures. - """ - v1_only = noautoyes(v1_only) - extracted_meta = [] - differences = None - for pat in META_EXT: - files = [fn for ext in pat.split("|") for fn in - glob.glob(os.path.join(metadata_dir, "*." + ext))] - if len(files) != 1: - continue - info = zipfile.ZipInfo("META-INF/" + os.path.basename(files[0])) - with open(files[0], "rb") as fh: - extracted_meta.append((info, fh.read())) - if len(extracted_meta) not in (len(META_EXT), 0): - raise APKSigCopierError("Unexpected or missing files in metadata_dir") - if v1_only == YES: - extracted_v2_sig = None - else: - sigoffset_file = os.path.join(metadata_dir, SIGOFFSET) - sigblock_file = os.path.join(metadata_dir, SIGBLOCK) - if v1_only == AUTO and not os.path.exists(sigblock_file): - extracted_v2_sig = None - else: - with open(sigoffset_file, "r") as fh: - signed_sb_offset = int(fh.read()) - with open(sigblock_file, "rb") as fh: - signed_sb = fh.read() - extracted_v2_sig = signed_sb_offset, signed_sb - differences_file = os.path.join(metadata_dir, "differences.json") - if not ignore_differences and os.path.exists(differences_file): - with open(differences_file, "r") as fh: - try: - differences = json.load(fh) - except json.JSONDecodeError as e: - raise APKSigCopierError(f"Invalid differences.json: {e}") # pylint: disable=W0707 - error = validate_differences(differences) - if error: - raise APKSigCopierError(f"Invalid differences.json: {error}") - if not extracted_meta and extracted_v2_sig is None: - raise APKSigCopierError("Expected v1 and/or v2/v3 signature, found neither") - patch_apk(extracted_meta, extracted_v2_sig, unsigned_apk, output_apk, - differences=differences, exclude=exclude) - - -def do_copy(signed_apk: str, unsigned_apk: str, output_apk: str, - v1_only: NoAutoYesBoolNone = NO, *, - exclude: Optional[Callable[[str], bool]] = None, - ignore_differences: bool = False) -> None: - """ - Copy signatures from signed_apk onto unsigned_apk and save as output_apk. - - The v1_only parameter controls whether the absence of a v1 signature is - considered an error or not: - * use v1_only=NO (or v1_only=False) to only accept (v1+)v2/v3 signatures; - * use v1_only=AUTO (or v1_only=None) to automatically detect v2/v3 signatures; - * use v1_only=YES (or v1_only=True) to ignore any v2/v3 signatures. - """ - v1_only = noautoyes(v1_only) - extracted_meta = tuple(extract_meta(signed_apk)) - differences = None - if v1_only == YES: - extracted_v2_sig = None - else: - extracted_v2_sig = extract_v2_sig(signed_apk, expected=v1_only == NO) - if extracted_v2_sig is not None and not ignore_differences: - differences = extract_differences(signed_apk, extracted_meta) - patch_apk(extracted_meta, extracted_v2_sig, unsigned_apk, output_apk, - differences=differences, exclude=exclude) - -# vim: set tw=80 sw=4 sts=4 et fdm=marker : diff --git a/fdroidserver/asynchronousfilereader/__init__.py b/fdroidserver/asynchronousfilereader/__init__.py index 7ba02b69..e8aa35e5 100644 --- a/fdroidserver/asynchronousfilereader/__init__.py +++ b/fdroidserver/asynchronousfilereader/__init__.py @@ -1,8 +1,9 @@ -"""Simple thread based asynchronous file reader for Python. - +""" AsynchronousFileReader ====================== +Simple thread based asynchronous file reader for Python. + see https://github.com/soxofaan/asynchronousfilereader MIT License @@ -12,7 +13,6 @@ Copyright (c) 2014 Stefaan Lippens __version__ = '0.2.1' import threading - try: # Python 2 from Queue import Queue @@ -22,9 +22,10 @@ except ImportError: class AsynchronousFileReader(threading.Thread): - """Helper class to implement asynchronous reading of a file in a separate thread. - - Pushes read lines on a queue to be consumed in another thread. + """ + Helper class to implement asynchronous reading of a file + in a separate thread. Pushes read lines on a queue to + be consumed in another thread. """ def __init__(self, fd, queue=None, autostart=True): @@ -39,7 +40,9 @@ class AsynchronousFileReader(threading.Thread): self.start() def run(self): - """Read lines and put them on the queue (the body of the tread).""" + """ + The body of the tread: read lines and put them on the queue. + """ while True: line = self._fd.readline() if not line: @@ -47,10 +50,15 @@ class AsynchronousFileReader(threading.Thread): self.queue.put(line) def eof(self): - """Check whether there is no more content to expect.""" + """ + Check whether there is no more content to expect. + """ return not self.is_alive() and self.queue.empty() def readlines(self): - """Get currently available lines.""" + """ + Get currently available lines. + """ while not self.queue.empty(): yield self.queue.get() + diff --git a/fdroidserver/btlog.py b/fdroidserver/btlog.py index 7ca3ddbf..43ea2313 100755 --- a/fdroidserver/btlog.py +++ b/fdroidserver/btlog.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -"""Update the binary transparency log for a URL.""" # # btlog.py - part of the FDroid server tools # Copyright (C) 2017, Hans-Christoph Steiner @@ -27,64 +26,51 @@ # client app so its not easy for the server to distinguish this from # the F-Droid client. + import collections +import defusedxml.minidom +import git import glob +import os import json import logging -import os +import requests import shutil import tempfile import zipfile from argparse import ArgumentParser -from typing import Optional -import defusedxml.minidom -import git -import requests - -from . import _, common, deploy +from . import _ +from . import common +from . import server from .exception import FDroidException -def make_binary_transparency_log( - repodirs: collections.abc.Iterable, - btrepo: str = 'binary_transparency', - url: Optional[str] = None, - commit_title: str = 'fdroid update', -): - """Log the indexes in a standalone git repo to serve as a "binary transparency" log. +options = None - Parameters - ---------- - repodirs - The directories of the F-Droid repository to generate the binary - transparency log for. - btrepo - The path to the Git repository of the binary transparency log. - url - The URL of the F-Droid repository to generate the binary transparency - log for. - commit_title - The commit title for commits in the binary transparency log Git - repository. - Notes - ----- - Also see https://www.eff.org/deeplinks/2014/02/open-letter-to-tech-companies . - """ +def make_binary_transparency_log(repodirs, btrepo='binary_transparency', + url=None, + commit_title='fdroid update'): + '''Log the indexes in a standalone git repo to serve as a "binary + transparency" log. + + see: https://www.eff.org/deeplinks/2014/02/open-letter-to-tech-companies + + ''' + logging.info('Committing indexes to ' + btrepo) if os.path.exists(os.path.join(btrepo, '.git')): gitrepo = git.Repo(btrepo) else: if not os.path.exists(btrepo): os.mkdir(btrepo) - gitrepo = git.Repo.init(btrepo, initial_branch=deploy.GIT_BRANCH) + gitrepo = git.Repo.init(btrepo) if not url: url = common.config['repo_url'].rstrip('/') with open(os.path.join(btrepo, 'README.md'), 'w') as fp: - fp.write( - """ + fp.write(""" # Binary Transparency Log for %s This is a log of the signed app index metadata. This is stored in a @@ -94,17 +80,15 @@ F-Droid repository was a publicly released file. For more info on this idea: * https://wiki.mozilla.org/Security/Binary_Transparency -""" - % url[: url.rindex('/')] # strip '/repo' - ) - gitrepo.index.add(['README.md']) +""" % url[:url.rindex('/')]) # strip '/repo' + gitrepo.index.add(['README.md', ]) gitrepo.index.commit('add README') for repodir in repodirs: cpdir = os.path.join(btrepo, repodir) if not os.path.exists(cpdir): os.mkdir(cpdir) - for f in ('index.xml', 'index-v1.json', 'index-v2.json', 'entry.json'): + for f in ('index.xml', 'index-v1.json'): repof = os.path.join(repodir, f) if not os.path.exists(repof): continue @@ -119,8 +103,8 @@ For more info on this idea: output = json.load(fp, object_pairs_hook=collections.OrderedDict) with open(dest, 'w') as fp: json.dump(output, fp, indent=2) - gitrepo.index.add([repof]) - for f in ('index.jar', 'index-v1.jar', 'entry.jar'): + gitrepo.index.add([repof, ]) + for f in ('index.jar', 'index-v1.jar'): repof = os.path.join(repodir, f) if not os.path.exists(repof): continue @@ -132,7 +116,7 @@ For more info on this idea: jarout.writestr(info, jarin.read(info.filename)) jarout.close() jarin.close() - gitrepo.index.add([repof]) + gitrepo.index.add([repof, ]) output_files = [] for root, dirs, files in os.walk(repodir): @@ -153,45 +137,27 @@ For more info on this idea: fslogfile = os.path.join(cpdir, 'filesystemlog.json') with open(fslogfile, 'w') as fp: json.dump(output, fp, indent=2) - gitrepo.index.add([os.path.join(repodir, 'filesystemlog.json')]) + gitrepo.index.add([os.path.join(repodir, 'filesystemlog.json'), ]) for f in glob.glob(os.path.join(cpdir, '*.HTTP-headers.json')): - gitrepo.index.add([os.path.join(repodir, os.path.basename(f))]) + gitrepo.index.add([os.path.join(repodir, os.path.basename(f)), ]) gitrepo.index.commit(commit_title) def main(): - """Generate or update a binary transparency log for a F-Droid repository. + global options - The behaviour of this function is influenced by the configuration file as - well as command line parameters. - - Raises - ------ - :exc:`~fdroidserver.exception.FDroidException` - If the specified or default Git repository does not exist. - - """ - parser = ArgumentParser() + parser = ArgumentParser(usage="%(prog)s [options]") common.setup_global_opts(parser) - parser.add_argument( - "--git-repo", - default=os.path.join(os.getcwd(), 'binary_transparency'), - help=_("Path to the git repo to use as the log"), - ) - parser.add_argument( - "-u", - "--url", - default='https://f-droid.org', - help=_("The base URL for the repo to log (default: https://f-droid.org)"), - ) - parser.add_argument( - "--git-remote", - default=None, - help=_("Push the log to this git remote repository"), - ) - options = common.parse_args(parser) + parser.add_argument("--git-repo", + default=os.path.join(os.getcwd(), 'binary_transparency'), + help=_("Path to the git repo to use as the log")) + parser.add_argument("-u", "--url", default='https://f-droid.org', + help=_("The base URL for the repo to log (default: https://f-droid.org)")) + parser.add_argument("--git-remote", default=None, + help=_("Push the log to this git remote repository")) + options = parser.parse_args() if options.verbose: logging.getLogger("requests").setLevel(logging.INFO) @@ -202,8 +168,7 @@ def main(): if not os.path.exists(options.git_repo): raise FDroidException( - '"%s" does not exist! Create it, or use --git-repo' % options.git_repo - ) + '"%s" does not exist! Create it, or use --git-repo' % options.git_repo) session = requests.Session() @@ -216,20 +181,14 @@ def main(): os.makedirs(tempdir, exist_ok=True) gitrepodir = os.path.join(options.git_repo, repodir) os.makedirs(gitrepodir, exist_ok=True) - for f in ( - 'entry.jar', - 'entry.json', - 'index-v1.jar', - 'index-v1.json', - 'index-v2.json', - 'index.jar', - 'index.xml', - ): + for f in ('index.jar', 'index.xml', 'index-v1.jar', 'index-v1.json'): dlfile = os.path.join(tempdir, f) dlurl = options.url + '/' + repodir + '/' + f http_headers_file = os.path.join(gitrepodir, f + '.HTTP-headers.json') - headers = {'User-Agent': 'F-Droid 0.102.3'} + headers = { + 'User-Agent': 'F-Droid 0.102.3' + } etag = None if os.path.exists(http_headers_file): with open(http_headers_file) as fp: @@ -237,9 +196,7 @@ def main(): r = session.head(dlurl, headers=headers, allow_redirects=False) if r.status_code != 200: - logging.debug( - 'HTTP Response (%d), did not download %s' % (r.status_code, dlurl) - ) + logging.debug('HTTP Response (' + str(r.status_code) + '), did not download ' + dlurl) continue if etag and etag == r.headers.get('ETag'): logging.debug('ETag matches, did not download ' + dlurl) @@ -260,11 +217,9 @@ def main(): if new_files: os.chdir(tempdirbase) - make_binary_transparency_log( - repodirs, options.git_repo, options.url, 'fdroid btlog' - ) + make_binary_transparency_log(repodirs, options.git_repo, options.url, 'fdroid btlog') if options.git_remote: - deploy.push_binary_transparency(options.git_repo, options.git_remote) + server.push_binary_transparency(options.git_repo, options.git_remote) shutil.rmtree(tempdirbase, ignore_errors=True) diff --git a/fdroidserver/build.py b/fdroidserver/build.py index 2e716c10..9052f262 100644 --- a/fdroidserver/build.py +++ b/fdroidserver/build.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -"""Build a package from source.""" # # build.py - part of the FDroid server tools # Copyright (C) 2010-2014, Ciaran Gultnieks, ciaran@ciarang.com @@ -18,70 +17,53 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import argparse -import glob -import logging import os +import shutil +import glob +import subprocess import posixpath import re -import shutil -import subprocess +import resource +import sys import tarfile -import tempfile import threading -import time import traceback -from gettext import ngettext -from pathlib import Path - +import time import requests +import tempfile +import argparse +from configparser import ConfigParser +import logging +from gettext import ngettext -from . import _, common, metadata, net, scanner, vmtools +from . import _ +from . import common +from . import net +from . import metadata +from . import scanner +from . import vmtools from .common import FDroidPopen -from .exception import BuildException, FDroidException, VCSException +from .exception import FDroidException, BuildException, VCSException try: import paramiko except ImportError: pass -buildserverid = None -ssh_channel = None - # Note that 'force' here also implies test mode. def build_server(app, build, vcs, build_dir, output_dir, log_dir, force): """Do a build on the builder vm. - Parameters - ---------- - app - The metadata of the app to build. - build - The build of the app to build. - vcs - The version control system controller object of the app. - build_dir - The local source-code checkout directory of the app. - output_dir - The target folder for the build result. - log_dir - The directory in the VM where the build logs are getting stored. - force - Don't refresh the already cloned repository and make the build stop on - exceptions. - - Raises - ------ - :exc:`~fdroidserver.exception.BuildException` - If Paramiko is not installed, a srclib directory or srclib metadata - file is unexpectedly missing, the build process in the VM failed or - output files of the build process are missing. - :exc:`~fdroidserver.exception.FDroidException` - If the Buildserver ID could not be obtained or copying a directory to - the server failed. + :param app: app metadata dict + :param build: + :param vcs: version control system controller object + :param build_dir: local source-code checkout of app + :param output_dir: target folder for the build result + :param force: """ - global buildserverid, ssh_channel + + global buildserverid try: paramiko @@ -92,7 +74,7 @@ def build_server(app, build, vcs, build_dir, output_dir, log_dir, force): else: logging.getLogger("paramiko").setLevel(logging.WARN) - sshinfo = vmtools.get_clean_builder('builder') + sshinfo = vmtools.get_clean_builder('builder', options.reset_server) output = None try: @@ -113,7 +95,7 @@ def build_server(app, build, vcs, build_dir, output_dir, log_dir, force): # Open SSH connection... logging.info("Connecting to virtual machine...") sshs = paramiko.SSHClient() - sshs.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # nosec B507 only connects to local VM + sshs.set_missing_host_key_policy(paramiko.AutoAddPolicy()) sshs.connect(sshinfo['hostname'], username=sshinfo['user'], port=sshinfo['port'], timeout=300, look_for_keys=False, key_filename=sshinfo['idfile']) @@ -127,9 +109,9 @@ def build_server(app, build, vcs, build_dir, output_dir, log_dir, force): # Put all the necessary files in place... ftp.chdir(homedir) + # Helper to copy the contents of a directory to the server... def send_dir(path): - """Copy the contents of a directory to the server.""" - logging.debug("rsyncing %s to %s" % (path, ftp.getcwd())) + logging.debug("rsyncing " + path + " to " + ftp.getcwd()) # TODO this should move to `vagrant rsync` from >= v1.5 try: subprocess.check_output(['rsync', '--recursive', '--perms', '--links', '--quiet', '--rsh=' @@ -144,20 +126,22 @@ def build_server(app, build, vcs, build_dir, output_dir, log_dir, force): sshinfo['user'] + "@" + sshinfo['hostname'] + ":" + ftp.getcwd()], stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: - raise FDroidException(str(e), e.output.decode()) from e + raise FDroidException(str(e), e.output.decode()) logging.info("Preparing server for build...") serverpath = os.path.abspath(os.path.dirname(__file__)) ftp.mkdir('fdroidserver') ftp.chdir('fdroidserver') ftp.put(os.path.join(serverpath, '..', 'fdroid'), 'fdroid') + ftp.put(os.path.join(serverpath, '..', 'gradlew-fdroid'), 'gradlew-fdroid') ftp.chmod('fdroid', 0o755) # nosec B103 permissions are appropriate + ftp.chmod('gradlew-fdroid', 0o755) # nosec B103 permissions are appropriate send_dir(os.path.join(serverpath)) ftp.chdir(homedir) ftp.put(os.path.join(serverpath, '..', 'buildserver', - 'config.buildserver.yml'), 'config.yml') - ftp.chmod('config.yml', 0o600) + 'config.buildserver.py'), 'config.py') + ftp.chmod('config.py', 0o600) # Copy over the ID (head commit hash) of the fdroidserver in use... with open(os.path.join(os.getcwd(), 'tmp', 'fdroidserverid'), 'wb') as fp: @@ -218,12 +202,8 @@ def build_server(app, build, vcs, build_dir, output_dir, log_dir, force): send_dir(lib) # Copy the metadata file too... ftp.chdir(posixpath.join(homedir, 'srclibs')) - srclibsfile = os.path.join('srclibs', name + '.yml') - if os.path.isfile(srclibsfile): - ftp.put(srclibsfile, os.path.basename(srclibsfile)) - else: - raise BuildException(_('cannot find required srclibs: "{path}"') - .format(path=srclibsfile)) + ftp.put(os.path.join('srclibs', name + '.txt'), + name + '.txt') # Copy the main app source code # (no need if it's a srclib) if (not basesrclib) and os.path.exists(build_dir): @@ -234,55 +214,51 @@ def build_server(app, build, vcs, build_dir, output_dir, log_dir, force): # Execute the build script... logging.info("Starting build...") - ssh_channel = sshs.get_transport().open_session() - ssh_channel.get_pty() + chan = sshs.get_transport().open_session() + chan.get_pty() cmdline = posixpath.join(homedir, 'fdroidserver', 'fdroid') cmdline += ' build --on-server' if force: cmdline += ' --force --test' if options.verbose: cmdline += ' --verbose' - if options.refresh_scanner or config.get('refresh_scanner'): - cmdline += ' --refresh-scanner' if options.skipscan: cmdline += ' --skip-scan' if options.notarball: cmdline += ' --no-tarball' - if (options.scan_binary or config.get('scan_binary')) and not options.skipscan: - cmdline += ' --scan-binary' cmdline += " %s:%s" % (app.id, build.versionCode) - ssh_channel.exec_command('bash --login -c "' + cmdline + '"') # nosec B601 inputs are sanitized + chan.exec_command('bash --login -c "' + cmdline + '"') # nosec B601 inputs are sanitized # Fetch build process output ... try: - cmd_stdout = ssh_channel.makefile('rb', 1024) + cmd_stdout = chan.makefile('rb', 1024) output = bytes() - output += common.get_android_tools_version_log().encode() - while not ssh_channel.exit_status_ready(): + output += common.get_android_tools_version_log(build.ndk_path()).encode() + while not chan.exit_status_ready(): line = cmd_stdout.readline() if line: if options.verbose: - logging.debug("buildserver > " + str(line, 'utf-8', 'replace').rstrip()) + logging.debug("buildserver > " + str(line, 'utf-8').rstrip()) output += line else: time.sleep(0.05) for line in cmd_stdout.readlines(): if options.verbose: - logging.debug("buildserver > " + str(line, 'utf-8', 'replace').rstrip()) + logging.debug("buildserver > " + str(line, 'utf-8').rstrip()) output += line finally: cmd_stdout.close() # Check build process exit status ... logging.info("...getting exit status") - returncode = ssh_channel.recv_exit_status() + returncode = chan.recv_exit_status() if returncode != 0: if timeout_event.is_set(): message = "Timeout exceeded! Build VM force-stopped for {0}:{1}" else: message = "Build.py failed on server for {0}:{1}" raise BuildException(message.format(app.id, build.versionName), - str(output, 'utf-8', 'replace')) + None if options.verbose else str(output, 'utf-8')) # Retreive logs... toolsversion_log = common.get_toolsversion_logname(app, build) @@ -291,7 +267,7 @@ def build_server(app, build, vcs, build_dir, output_dir, log_dir, force): ftp.get(toolsversion_log, os.path.join(log_dir, toolsversion_log)) logging.debug('retrieved %s', toolsversion_log) except Exception as e: - logging.warning('could not get %s from builder vm: %s' % (toolsversion_log, e)) + logging.warn('could not get %s from builder vm: %s' % (toolsversion_log, e)) # Retrieve the built files... logging.info("Retrieving build output...") @@ -300,22 +276,21 @@ def build_server(app, build, vcs, build_dir, output_dir, log_dir, force): else: ftp.chdir(posixpath.join(homedir, 'unsigned')) apkfile = common.get_release_filename(app, build) - tarball = common.get_src_tarball_name(app.id, build.versionCode) + tarball = common.getsrcname(app, build) try: ftp.get(apkfile, os.path.join(output_dir, apkfile)) if not options.notarball: ftp.get(tarball, os.path.join(output_dir, tarball)) - except Exception as exc: + except Exception: raise BuildException( "Build failed for {0}:{1} - missing output files".format( - app.id, build.versionName), str(output, 'utf-8', 'replace')) from exc + app.id, build.versionName), None if options.verbose else str(output, 'utf-8')) ftp.close() finally: # Suspend the build server. vm = vmtools.get_build_vm('builder') - logging.info('destroying buildserver after build') - vm.destroy() + vm.suspend() # deploy logfile to repository web server if output: @@ -326,15 +301,6 @@ def build_server(app, build, vcs, build_dir, output_dir, log_dir, force): def force_gradle_build_tools(build_dir, build_tools): - """Manipulate build tools version used in top level gradle file. - - Parameters - ---------- - build_dir - The directory to start looking for gradle files. - build_tools - The build tools version that should be forced to use. - """ for root, dirs, files in os.walk(build_dir): for filename in files: if not filename.endswith('.gradle'): @@ -349,7 +315,7 @@ def force_gradle_build_tools(build_dir, build_tools): def transform_first_char(string, method): - """Use method() on the first character of string.""" + """Uses method() on the first character of string.""" if len(string) == 0: return string if len(string) == 1: @@ -358,35 +324,11 @@ def transform_first_char(string, method): def get_metadata_from_apk(app, build, apkfile): - """Get the required metadata from the built APK. + """get the required metadata from the built APK - VersionName is allowed to be a blank string, i.e. '' - - Parameters - ---------- - app - The app metadata used to build the APK. - build - The build that resulted in the APK. - apkfile - The path of the APK file. - - Returns - ------- - versionCode - The versionCode from the APK or from the metadata is build.novcheck is - set. - versionName - The versionName from the APK or from the metadata is build.novcheck is - set. - - Raises - ------ - :exc:`~fdroidserver.exception.BuildException` - If native code should have been built but was not packaged, no version - information or no package ID could be found or there is a mismatch - between the package ID in the metadata and the one found in the APK. + versionName is allowed to be a blank string, i.e. '' """ + appid, versionCode, versionName = common.get_apk_id(apkfile) native_code = common.get_native_code(apkfile) @@ -406,74 +348,22 @@ def get_metadata_from_apk(app, build, apkfile): def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, extlib_dir, tmp_dir, force, onserver, refresh): - """Do a build locally. - - Parameters - ---------- - app - The metadata of the app to build. - build - The build of the app to build. - vcs - The version control system controller object of the app. - build_dir - The local source-code checkout directory of the app. - output_dir - The target folder for the build result. - log_dir - The directory in the VM where the build logs are getting stored. - srclib_dir - The path to the srclibs directory, usually 'build/srclib'. - extlib_dir - The path to the extlibs directory, usually 'build/extlib'. - tmp_dir - The temporary directory for building the source tarball. - force - Don't refresh the already cloned repository and make the build stop on - exceptions. - onserver - Assume the build is happening inside the VM. - refresh - Enable fetching the latest refs from the VCS remote. - - Raises - ------ - :exc:`~fdroidserver.exception.BuildException` - If running a `sudo` command failed, locking the root account failed, - `sudo` couldn't be removed, cleaning the build environment failed, - skipping the scanning has been requested but `scandelete` is present, - errors occurred during scanning, running the `build` commands from the - metadata failed, building native code failed, building with the - specified build method failed, no output could be found with build - method `maven`, more or less than one APK were found with build method - `gradle`, less or more than one APKs match the `output` glob specified - in the metadata, running a `postbuild` command specified in the - metadata failed, the built APK is debuggable, the unsigned APK is not - at the expected location, the APK does not contain the expected - `versionName` and `versionCode` or undesired package names have been - found in the APK. - :exc:`~fdroidserver.exception.FDroidException` - If no Android NDK version could be found and the build isn't run in a - builder VM, the selected Android NDK is not a directory. - """ + """Do a build locally.""" ndk_path = build.ndk_path() if build.ndk or (build.buildjni and build.buildjni != ['no']): if not ndk_path: - logging.warning("Android NDK version '%s' could not be found!" % build.ndk) - logging.warning("Configured versions:") + logging.critical("Android NDK version '%s' could not be found!" % build.ndk or 'r12b') + logging.critical("Configured versions:") for k, v in config['ndk_paths'].items(): if k.endswith("_orig"): continue - logging.warning(" %s: %s" % (k, v)) - if onserver: - common.auto_install_ndk(build) - else: - raise FDroidException() + logging.critical(" %s: %s" % (k, v)) + raise FDroidException() elif not os.path.isdir(ndk_path): logging.critical("Android NDK '%s' is not a directory!" % ndk_path) raise FDroidException() - common.set_FDroidPopen_env(app, build) + common.set_FDroidPopen_env(build) # create ..._toolsversion.log when running in builder vm if onserver: @@ -481,8 +371,7 @@ def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, ext if build.sudo: logging.info("Running 'sudo' commands in %s" % os.getcwd()) - p = FDroidPopen(['sudo', 'DEBIAN_FRONTEND=noninteractive', - 'bash', '-e', '-u', '-o', 'pipefail', '-x', '-c', '; '.join(build.sudo)]) + p = FDroidPopen(['sudo', 'bash', '-x', '-c', build.sudo]) if p.returncode != 0: raise BuildException("Error running sudo command for %s:%s" % (app.id, build.versionName), p.output) @@ -500,10 +389,10 @@ def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, ext log_path = os.path.join(log_dir, common.get_toolsversion_logname(app, build)) with open(log_path, 'w') as f: - f.write(common.get_android_tools_version_log()) + f.write(common.get_android_tools_version_log(build.ndk_path())) else: if build.sudo: - logging.warning('%s:%s runs this on the buildserver with sudo:\n\t%s\nThese commands were skipped because fdroid build is not running on a dedicated build server.' + logging.warning('%s:%s runs this on the buildserver with sudo:\n\t%s' % (app.id, build.versionName, build.sudo)) # Prepare the source code... @@ -535,13 +424,18 @@ def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, ext if build.preassemble: gradletasks += build.preassemble - flavors = build.gradle - if flavors == ['yes']: - flavors = [] + flavours = build.gradle + if flavours == ['yes']: + flavours = [] - flavors_cmd = ''.join([transform_first_char(flav, str.upper) for flav in flavors]) + flavours_cmd = ''.join([transform_first_char(flav, str.upper) for flav in flavours]) - gradletasks += ['assemble' + flavors_cmd + 'Release'] + gradletasks += ['assemble' + flavours_cmd + 'Release'] + + if config['force_build_tools']: + force_gradle_build_tools(build_dir, config['build_tools']) + for name, number, libpath in srclibpaths: + force_gradle_build_tools(libpath, config['build_tools']) cmd = [config['gradle']] if build.gradleprops: @@ -550,6 +444,9 @@ def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, ext cmd += ['clean'] p = FDroidPopen(cmd, cwd=root_dir, envs={"GRADLE_VERSION_DIR": config['gradle_version_dir'], "CACHEDIR": config['cachedir']}) + elif bmethod == 'buildozer': + pass + elif bmethod == 'ant': logging.info("Cleaning Ant project...") p = FDroidPopen(['ant', 'clean'], cwd=root_dir) @@ -562,14 +459,15 @@ def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, ext def del_dirs(dl): for d in dl: - shutil.rmtree(os.path.join(root, d), ignore_errors=True) + if d in dirs: + shutil.rmtree(os.path.join(root, d)) def del_files(fl): for f in fl: if f in files: os.remove(os.path.join(root, f)) - if any(f in files for f in ['build.gradle', 'build.gradle.kts', 'settings.gradle', 'settings.gradle.kts']): + if any(f in files for f in ['build.gradle', 'settings.gradle']): # Even when running clean, gradle stores task/artifact caches in # .gradle/ as binary files. To avoid overcomplicating the scanner, # manually delete them, just like `gradle clean` should have removed @@ -580,7 +478,6 @@ def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, ext os.path.join('build', 'outputs'), os.path.join('build', 'reports'), os.path.join('build', 'tmp'), - os.path.join('buildSrc', 'build'), '.gradle']) del_files(['gradlew', 'gradlew.bat']) @@ -599,7 +496,6 @@ def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, ext else: # Scan before building... logging.info("Scanning source for common problems...") - scanner.options = options # pass verbose through count = scanner.scan_source(build_dir, build) if count > 0: if force: @@ -613,7 +509,7 @@ def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, ext if not options.notarball: # Build the source tarball right before we build the release... logging.info("Creating source tarball...") - tarname = common.get_src_tarball_name(app.id, build.versionCode) + tarname = common.getsrcname(app, build) tarball = tarfile.open(os.path.join(tmp_dir, tarname), "w:gz") def tarexc(t): @@ -624,13 +520,14 @@ def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, ext # Run a build command if one is required... if build.build: logging.info("Running 'build' commands in %s" % root_dir) - cmd = common.replace_config_vars("; ".join(build.build), build) + cmd = common.replace_config_vars(build.build, build) # Substitute source library paths into commands... for name, number, libpath in srclibpaths: - cmd = cmd.replace('$$' + name + '$$', os.path.join(os.getcwd(), libpath)) + libpath = os.path.relpath(libpath, root_dir) + cmd = cmd.replace('$$' + name + '$$', libpath) - p = FDroidPopen(['bash', '-e', '-u', '-o', 'pipefail', '-x', '-c', cmd], cwd=root_dir) + p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir) if p.returncode != 0: raise BuildException("Error running build command for %s:%s" % @@ -692,6 +589,73 @@ def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, ext bindir = os.path.join(root_dir, 'target') + elif bmethod == 'buildozer': + logging.info("Building Kivy project using buildozer...") + + # parse buildozer.spez + spec = os.path.join(root_dir, 'buildozer.spec') + if not os.path.exists(spec): + raise BuildException("Expected to find buildozer-compatible spec at {0}" + .format(spec)) + defaults = {'orientation': 'landscape', 'icon': '', + 'permissions': '', 'android.api': "19"} + bconfig = ConfigParser(defaults, allow_no_value=True) + bconfig.read(spec) + + # update spec with sdk and ndk locations to prevent buildozer from + # downloading. + loc_ndk = common.env['ANDROID_NDK'] + loc_sdk = common.env['ANDROID_SDK'] + if loc_ndk == '$ANDROID_NDK': + loc_ndk = loc_sdk + '/ndk-bundle' + + bc_ndk = None + bc_sdk = None + try: + bc_ndk = bconfig.get('app', 'android.sdk_path') + except Exception: + pass + try: + bc_sdk = bconfig.get('app', 'android.ndk_path') + except Exception: + pass + + if bc_sdk is None: + bconfig.set('app', 'android.sdk_path', loc_sdk) + if bc_ndk is None: + bconfig.set('app', 'android.ndk_path', loc_ndk) + + fspec = open(spec, 'w') + bconfig.write(fspec) + fspec.close() + + logging.info("sdk_path = %s" % loc_sdk) + logging.info("ndk_path = %s" % loc_ndk) + + p = None + # execute buildozer + cmd = ['buildozer', 'android', 'release'] + try: + p = FDroidPopen(cmd, cwd=root_dir) + except Exception: + pass + + # buidozer not installed ? clone repo and run + if (p is None or p.returncode != 0): + cmd = ['git', 'clone', 'https://github.com/kivy/buildozer.git'] + p = subprocess.Popen(cmd, cwd=root_dir, shell=False) + p.wait() + if p.returncode != 0: + raise BuildException("Distribute build failed") + + cmd = ['python', 'buildozer/buildozer/scripts/client.py', 'android', 'release'] + p = FDroidPopen(cmd, cwd=root_dir) + + # expected to fail. + # Signing will fail if not set by environnment vars (cf. p4a docs). + # But the unsigned apk will be ok. + p.returncode = 0 + elif bmethod == 'gradle': logging.info("Building Gradle project...") @@ -714,16 +678,9 @@ def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, ext bindir = os.path.join(root_dir, 'bin') - if os.path.isdir(os.path.join(build_dir, '.git')): - commit_id = str(common.get_head_commit_id(build_dir)) - else: - commit_id = build.commit - if p is not None and p.returncode != 0: - raise BuildException("Build failed for %s:%s@%s" % (app.id, build.versionName, commit_id), - p.output) - logging.info("Successfully built version {versionName} of {appid} from {commit_id}" - .format(versionName=build.versionName, appid=app.id, commit_id=commit_id)) + raise BuildException("Build failed for %s:%s" % (app.id, build.versionName), p.output) + logging.info("Successfully built version " + build.versionName + ' of ' + app.id) omethod = build.output_method() if omethod == 'maven': @@ -747,6 +704,26 @@ def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, ext src = m.group(1) src = os.path.join(bindir, src) + '.apk' + elif omethod == 'buildozer': + src = None + for apks_dir in [ + os.path.join(root_dir, '.buildozer', 'android', 'platform', 'build', 'dists', bconfig.get('app', 'title'), 'bin'), + ]: + for apkglob in ['*-release-unsigned.apk', '*-unsigned.apk', '*.apk']: + apks = glob.glob(os.path.join(apks_dir, apkglob)) + + if len(apks) > 1: + raise BuildException('More than one resulting apks found in %s' % apks_dir, + '\n'.join(apks)) + if len(apks) == 1: + src = apks[0] + break + if src is not None: + break + + if src is None: + raise BuildException('Failed to find any output apks') + elif omethod == 'gradle': src = None apk_dirs = [ @@ -757,11 +734,11 @@ def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, ext # really old path os.path.join(root_dir, 'build', 'apk'), ] - # If we build with gradle flavors with gradle plugin >= 3.0 the APK will be in - # a subdirectory corresponding to the flavor command used, but with different + # If we build with gradle flavours with gradle plugin >= 3.0 the apk will be in + # a subdirectory corresponding to the flavour command used, but with different # capitalization. - if flavors_cmd: - apk_dirs.append(os.path.join(root_dir, 'build', 'outputs', 'apk', transform_first_char(flavors_cmd, str.lower), 'release')) + if flavours_cmd: + apk_dirs.append(os.path.join(root_dir, 'build', 'outputs', 'apk', transform_first_char(flavours_cmd, str.lower), 'release')) for apks_dir in apk_dirs: for apkglob in ['*-release-unsigned.apk', '*-unsigned.apk', '*.apk']: apks = glob.glob(os.path.join(apks_dir, apkglob)) @@ -794,55 +771,28 @@ def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, ext raise BuildException('No apks match %s' % globpath) src = os.path.normpath(apks[0]) - # Run a postbuild command if one is required... - if build.postbuild: - logging.info(f"Running 'postbuild' commands in {root_dir}") - cmd = common.replace_config_vars("; ".join(build.postbuild), build) - - # Substitute source library paths into commands... - for name, number, libpath in srclibpaths: - cmd = cmd.replace(f"$${name}$$", str(Path.cwd() / libpath)) - - cmd = cmd.replace('$$OUT$$', str(Path(src).resolve())) - - p = FDroidPopen(['bash', '-e', '-u', '-o', 'pipefail', '-x', '-c', cmd], cwd=root_dir) - - if p.returncode != 0: - raise BuildException("Error running postbuild command for " - f"{app.id}:{build.versionName}", p.output) - # Make sure it's not debuggable... - if common.is_debuggable_or_testOnly(src): - raise BuildException( - "%s: debuggable or testOnly set in AndroidManifest.xml" % src - ) + if common.is_apk_and_debuggable(src): + raise BuildException("APK is debuggable") # By way of a sanity check, make sure the version and version - # code in our new APK match what we expect... + # code in our new apk match what we expect... logging.debug("Checking " + src) if not os.path.exists(src): - raise BuildException("Unsigned APK is not at expected location of " + src) + raise BuildException("Unsigned apk is not at expected location of " + src) if common.get_file_extension(src) == 'apk': vercode, version = get_metadata_from_apk(app, build, src) if version != build.versionName or vercode != build.versionCode: raise BuildException(("Unexpected version/version code in output;" - " APK: '%s' / '%d', " - " Expected: '%s' / '%d'") - % (version, vercode, build.versionName, - build.versionCode)) - if (options.scan_binary or config.get('scan_binary')) and not options.skipscan: - if scanner.scan_binary(src): - raise BuildException("Found blocklisted packages in final apk!") + " APK: '%s' / '%s', " + " Expected: '%s' / '%s'") + % (version, str(vercode), build.versionName, + str(build.versionCode))) - # Copy the unsigned APK to our destination directory for further + # Copy the unsigned apk to our destination directory for further # processing (by publish.py)... - dest = os.path.join( - output_dir, - common.get_release_filename( - app, build, common.get_file_extension(src) - ) - ) + dest = os.path.join(output_dir, common.get_release_filename(app, build)) shutil.copyfile(src, dest) # Move the source tarball into the output directory... @@ -854,53 +804,23 @@ def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, ext def trybuild(app, build, build_dir, output_dir, log_dir, also_check_dir, srclib_dir, extlib_dir, tmp_dir, repo_dir, vcs, test, server, force, onserver, refresh): - """Build a particular version of an application, if it needs building. - - Parameters - ---------- - app - The metadata of the app to build. - build - The build of the app to build. - build_dir - The local source-code checkout directory of the app. - output_dir - The directory where the build output will go. Usually this is the - 'unsigned' directory. - log_dir - The directory in the VM where the build logs are getting stored. - also_check_dir - An additional location for checking if the build is necessary (usually - the archive repo). - srclib_dir - The path to the srclibs directory, usually 'build/srclib'. - extlib_dir - The path to the extlibs directory, usually 'build/extlib'. - tmp_dir - The temporary directory for building the source tarball of the app to - build. - repo_dir - The repo directory - used for checking if the build is necessary. - vcs - The version control system controller object of the app to build. - test - True if building in test mode, in which case the build will always - happen, even if the output already exists. In test mode, the output - directory should be a temporary location, not any of the real ones. - server - Use buildserver VM for building. - force - Build app regardless of disabled state or scanner errors. - onserver - Assume the build is happening inside the VM. - refresh - Enable fetching the latest refs from the VCS remote. - - Returns - ------- - status - True if the build was done, False if it wasn't necessary. """ + Build a particular version of an application, if it needs building. + + :param output_dir: The directory where the build output will go. Usually + this is the 'unsigned' directory. + :param repo_dir: The repo directory - used for checking if the build is + necessary. + :param also_check_dir: An additional location for checking if the build + is necessary (usually the archive repo) + :param test: True if building in test mode, in which case the build will + always happen, even if the output already exists. In test mode, the + output directory should be a temporary location, not any of the real + ones. + + :returns: True if the build was done, False if it wasn't necessary. + """ + dest_file = common.get_release_filename(app, build) dest = os.path.join(output_dir, dest_file) @@ -926,10 +846,6 @@ def trybuild(app, build, build_dir, output_dir, log_dir, also_check_dir, # grabbing the source now. vcs.gotorevision(build.commit, refresh) - # Initialise submodules if required - if build.submodules: - vcs.initsubmodules() - build_server(app, build, vcs, build_dir, output_dir, log_dir, force) else: build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, extlib_dir, tmp_dir, force, onserver, refresh) @@ -937,43 +853,19 @@ def trybuild(app, build, build_dir, output_dir, log_dir, also_check_dir, def force_halt_build(timeout): - """Halt the currently running Vagrant VM, to be called from a Timer. - - Parameters - ---------- - timeout - The timeout in seconds. - """ + """Halt the currently running Vagrant VM, to be called from a Timer""" logging.error(_('Force halting build after {0} sec timeout!').format(timeout)) timeout_event.set() - if ssh_channel: - ssh_channel.close() vm = vmtools.get_build_vm('builder') - vm.destroy() - - -def keep_when_not_allowed(): - """Control if APKs signed by keys not in AllowedAPKSigningKeys are removed.""" - return ( - (options is not None and options.keep_when_not_allowed) - or (config is not None and config.get('keep_when_not_allowed')) - or common.default_config['keep_when_not_allowed'] - ) + vm.halt() def parse_commandline(): - """Parse the command line. + """Parse the command line. Returns options, parser.""" - Returns - ------- - options - The resulting options parsed from the command line arguments. - parser - The argument parser. - """ parser = argparse.ArgumentParser(usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]") common.setup_global_opts(parser) - parser.add_argument("appid", nargs='*', help=_("application ID with optional versionCode in the form APPID[:VERCODE]")) + parser.add_argument("appid", nargs='*', help=_("applicationId with optional versionCode in the form APPID[:VERCODE]")) parser.add_argument("-l", "--latest", action="store_true", default=False, help=_("Build only the latest version of each package")) parser.add_argument("-s", "--stop", action="store_true", default=False, @@ -982,30 +874,28 @@ def parse_commandline(): help=_("Test mode - put output in the tmp directory only, and always build, even if the output already exists.")) parser.add_argument("--server", action="store_true", default=False, help=_("Use build server")) + parser.add_argument("--reset-server", action="store_true", default=False, + help=_("Reset and create a brand new build server, even if the existing one appears to be ok.")) # this option is internal API for telling fdroid that # it's running inside a buildserver vm. parser.add_argument("--on-server", dest="onserver", action="store_true", default=False, help=argparse.SUPPRESS) parser.add_argument("--skip-scan", dest="skipscan", action="store_true", default=False, help=_("Skip scanning the source code for binaries and other problems")) - parser.add_argument("--scan-binary", action="store_true", default=False, - help=_("Scan the resulting APK(s) for known non-free classes.")) + parser.add_argument("--dscanner", action="store_true", default=False, + help=_("Setup an emulator, install the APK on it and perform a Drozer scan")) parser.add_argument("--no-tarball", dest="notarball", action="store_true", default=False, help=_("Don't create a source tarball, useful when testing a build")) parser.add_argument("--no-refresh", dest="refresh", action="store_false", default=True, help=_("Don't refresh the repository, useful when testing a build with no internet connection")) - parser.add_argument("-r", "--refresh-scanner", dest="refresh_scanner", action="store_true", default=False, - help=_("Refresh and cache scanner rules and signatures from the network")) parser.add_argument("-f", "--force", action="store_true", default=False, help=_("Force build of disabled apps, and carries on regardless of scan problems. Only allowed in test mode.")) parser.add_argument("-a", "--all", action="store_true", default=False, help=_("Build all applications available")) - parser.add_argument("--keep-when-not-allowed", default=False, action="store_true", - help=argparse.SUPPRESS) parser.add_argument("-w", "--wiki", default=False, action="store_true", - help=argparse.SUPPRESS) + help=_("Update the wiki")) metadata.add_metadata_arguments(parser) - options = common.parse_args(parser) + options = parser.parse_args() metadata.warnings_action = options.W # Force --stop with --on-server to get correct exit code @@ -1020,29 +910,14 @@ def parse_commandline(): options = None config = None +buildserverid = None fdroidserverid = None start_timestamp = time.gmtime() -status_output = None timeout_event = threading.Event() def main(): - """Build a package from source. - The behaviour of this function is influenced by the configuration file as - well as command line parameters. - - Raises - ------ - :exc:`~fdroidserver.exception.FDroidException` - If more than one local metadata file has been found, no app metadata - has been found, there are no apps to process, downloading binaries for - checking the reproducibility of a built binary failed, the built binary - is different from supplied reference binary, the reference binary is - signed with a different signing key than expected, a VCS error occured - while building an app or a different error occured while building an - app. - """ global options, config, buildserverid, fdroidserverid options, parser = parse_commandline() @@ -1067,10 +942,18 @@ def main(): if not options.appid and not options.all: parser.error("option %s: If you really want to build all the apps, use --all" % "all") - config = common.read_config() + config = common.read_config(options) if config['build_server_always']: options.server = True + if options.reset_server and not options.server: + parser.error("option %s: Using --reset-server without --server makes no sense" % "reset-server") + + if options.onserver or not options.server: + for d in ['build-tools', 'platform-tools', 'tools']: + if not os.path.isdir(os.path.join(config['sdk_path'], d)): + raise FDroidException(_("Android SDK '{path}' does not have '{dirname}' installed!") + .format(path=config['sdk_path'], dirname=d)) log_dir = 'logs' if not os.path.isdir(log_dir): @@ -1096,11 +979,6 @@ def main(): else: also_check_dir = None - if options.onserver: - status_output = dict() # HACK dummy placeholder - else: - status_output = common.setup_status_output(start_timestamp) - repo_dir = 'repo' build_dir = 'build' @@ -1110,57 +988,56 @@ def main(): srclib_dir = os.path.join(build_dir, 'srclib') extlib_dir = os.path.join(build_dir, 'extlib') - apps = common.read_app_args(options.appid, allow_version_codes=True, sort_by_time=True) + # Read all app and srclib metadata + pkgs = common.read_pkg_args(options.appid, True) + allapps = metadata.read_metadata(not options.onserver, pkgs, options.refresh, sort_by_time=True) + apps = common.read_app_args(options.appid, allapps, True) for appid, app in list(apps.items()): - if (app.get('Disabled') and not options.force) or not app.get('RepoType') or not app.get('Builds', []): + if (app.Disabled and not options.force) or not app.RepoType or not app.builds: del apps[appid] if not apps: raise FDroidException("No apps to process.") # make sure enough open files are allowed to process everything - try: - import resource # not available on Windows - - soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) - if len(apps) > soft: - try: - soft = len(apps) * 2 - if soft > hard: - soft = hard - resource.setrlimit(resource.RLIMIT_NOFILE, (soft, hard)) - logging.debug(_('Set open file limit to {integer}') - .format(integer=soft)) - except (OSError, ValueError) as e: - logging.warning(_('Setting open file limit failed: ') + str(e)) - except ImportError: - pass + soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) + if len(apps) > soft: + try: + soft = len(apps) * 2 + if soft > hard: + soft = hard + resource.setrlimit(resource.RLIMIT_NOFILE, (soft, hard)) + logging.debug(_('Set open file limit to {integer}') + .format(integer=soft)) + except (OSError, ValueError) as e: + logging.warning(_('Setting open file limit failed: ') + str(e)) if options.latest: for app in apps.values(): - for build in reversed(app.get('Builds', [])): + for build in reversed(app.builds): if build.disable and not options.force: continue - app['Builds'] = [build] + app.builds = [build] break - if not options.onserver: - common.write_running_status_json(status_output) + if options.wiki: + import mwclient + site = mwclient.Site((config['wiki_protocol'], config['wiki_server']), + path=config['wiki_path']) + site.login(config['wiki_user'], config['wiki_password']) # Build applications... - failed_builds = [] - build_succeeded_ids = [] - status_output['failedBuilds'] = failed_builds - status_output['successfulBuildIds'] = build_succeeded_ids - # Only build for 72 hours, then stop gracefully. - endtime = time.time() + 72 * 60 * 60 + failed_apps = {} + build_succeeded = [] + # Only build for 36 hours, then stop gracefully. + endtime = time.time() + 36 * 60 * 60 max_build_time_reached = False for appid, app in apps.items(): first = True - for build in app.get('Builds', []): + for build in app.builds: if time.time() > endtime: max_build_time_reached = True break @@ -1169,7 +1046,7 @@ def main(): if build.timeout is None: timeout = 7200 else: - timeout = build.timeout + timeout = int(build.timeout) if options.server and timeout > 0: logging.debug(_('Setting {0} sec timeout for this build').format(timeout)) timer = threading.Timer(timeout, force_halt_build, [timeout]) @@ -1178,7 +1055,11 @@ def main(): else: timer = None + wikilog = None + build_starttime = common.get_wiki_timestamp() tools_version_log = '' + if not options.onserver: + tools_version_log = common.get_android_tools_version_log(build.ndk_path()) try: # For the first build of a particular app, we need to set up @@ -1188,7 +1069,8 @@ def main(): vcs, build_dir = common.setup_vcs(app) first = False - logging.debug("Checking %s:%s" % (appid, build.versionCode)) + logging.info("Using %s" % vcs.clientversion()) + logging.debug("Checking " + build.versionName) if trybuild(app, build, build_dir, output_dir, log_dir, also_check_dir, srclib_dir, extlib_dir, tmp_dir, repo_dir, vcs, options.test, @@ -1201,9 +1083,9 @@ def main(): tools_version_log = ''.join(f.readlines()) os.remove(toolslog) - if url := build.binary or app.Binaries: + if app.Binaries is not None: # This is an app where we build from source, and - # verify the APK contents against a developer's + # verify the apk contents against a developer's # binary. We get that binary now, and save it # alongside our built one in the 'unsigend' # directory. @@ -1213,10 +1095,11 @@ def main(): "developer supplied reference " "binaries: '{path}'" .format(path=binaries_dir)) + url = app.Binaries url = url.replace('%v', build.versionName) url = url.replace('%c', str(build.versionCode)) logging.info("...retrieving " + url) - of = re.sub(r'\.apk$', '.binary.apk', common.get_release_filename(app, build)) + of = re.sub(r'.apk$', '.binary.apk', common.get_release_filename(app, build)) of = os.path.join(binaries_dir, of) try: net.download_file(url, local_filename=of) @@ -1236,12 +1119,8 @@ def main(): compare_result = \ common.verify_apks(of, unsigned_apk, tmpdir) if compare_result: - if options.test: - logging.warning(_('Keeping failed build "{apkfilename}"') - .format(apkfilename=unsigned_apk)) - else: - logging.debug('removing %s', unsigned_apk) - os.remove(unsigned_apk) + logging.debug('removing %s', unsigned_apk) + os.remove(unsigned_apk) logging.debug('removing %s', of) os.remove(of) compare_result = compare_result.split('\n') @@ -1262,136 +1141,154 @@ def main(): 'supplied reference binary ' 'successfully') - used_key = common.apk_signer_fingerprint(of) - expected_keys = app['AllowedAPKSigningKeys'] - if used_key is None: - logging.warn(_('reference binary missing ' - 'signature')) - elif len(expected_keys) == 0: - logging.warn(_('AllowedAPKSigningKeys missing ' - 'but reference binary supplied')) - elif used_key not in expected_keys: - if options.test or keep_when_not_allowed(): - logging.warning(_('Keeping failed build "{apkfilename}"') - .format(apkfilename=unsigned_apk)) - else: - logging.debug('removing %s', unsigned_apk) - os.remove(unsigned_apk) - logging.debug('removing %s', of) - os.remove(of) - raise FDroidException('supplied reference ' - 'binary signed with ' - '{signer} instead of ' - 'with {expected}'. - format(signer=used_key, - expected=expected_keys)) - else: - logging.info(_('supplied reference binary has ' - 'allowed signer {signer}'). - format(signer=used_key)) - - build_succeeded_ids.append([app['id'], build.versionCode]) - - if not options.onserver: - common.write_running_status_json(status_output) + build_succeeded.append(app) + wikilog = "Build succeeded" except VCSException as vcse: reason = str(vcse).split('\n', 1)[0] if options.verbose else str(vcse) logging.error("VCS error while building app %s: %s" % ( appid, reason)) if options.stop: - logging.debug("Error encountered, stopping by user request.") + logging.debug("Error encoutered, stopping by user request.") common.force_exit(1) - failed_builds.append((appid, build.versionCode)) - common.deploy_build_log_with_rsync( - appid, build.versionCode, "".join(traceback.format_exc()) - ) - if not options.onserver: - common.write_running_status_json(status_output) - + failed_apps[appid] = vcse + wikilog = str(vcse) except FDroidException as e: - tstamp = time.strftime("%Y-%m-%d %H:%M:%SZ", time.gmtime()) with open(os.path.join(log_dir, appid + '.log'), 'a+') as f: f.write('\n\n============================================================\n') f.write('versionCode: %s\nversionName: %s\ncommit: %s\n' % (build.versionCode, build.versionName, build.commit)) f.write('Build completed at ' - + tstamp + '\n') + + common.get_wiki_timestamp() + '\n') f.write('\n' + tools_version_log + '\n') f.write(str(e)) logging.error("Could not build app %s: %s" % (appid, e)) if options.stop: - logging.debug("Error encountered, stopping by user request.") + logging.debug("Error encoutered, stopping by user request.") common.force_exit(1) - failed_builds.append((appid, build.versionCode)) - common.deploy_build_log_with_rsync( - appid, build.versionCode, "".join(traceback.format_exc()) - ) - if not options.onserver: - common.write_running_status_json(status_output) - - except Exception: + failed_apps[appid] = e + wikilog = e.get_wikitext() + except Exception as e: logging.error("Could not build app %s due to unknown error: %s" % ( appid, traceback.format_exc())) if options.stop: - logging.debug("Error encountered, stopping by user request.") + logging.debug("Error encoutered, stopping by user request.") common.force_exit(1) - failed_builds.append((appid, build.versionCode)) - common.deploy_build_log_with_rsync( - appid, build.versionCode, "".join(traceback.format_exc()) - ) - if not options.onserver: - common.write_running_status_json(status_output) + failed_apps[appid] = e + wikilog = str(e) + + if options.wiki and wikilog: + try: + # Write a page with the last build log for this version code + lastbuildpage = appid + '/lastbuild_' + build.versionCode + newpage = site.Pages[lastbuildpage] + with open(os.path.join('tmp', 'fdroidserverid')) as fp: + fdroidserverid = fp.read().rstrip() + txt = "* build session started at " + common.get_wiki_timestamp(start_timestamp) + '\n' \ + + "* this build started at " + build_starttime + '\n' \ + + "* this build completed at " + common.get_wiki_timestamp() + '\n' \ + + common.get_git_describe_link() \ + + '* fdroidserverid: [https://gitlab.com/fdroid/fdroidserver/commit/' \ + + fdroidserverid + ' ' + fdroidserverid + ']\n\n' + if buildserverid: + txt += '* buildserverid: [https://gitlab.com/fdroid/fdroidserver/commit/' \ + + buildserverid + ' ' + buildserverid + ']\n\n' + txt += tools_version_log + '\n\n' + txt += '== Build Log ==\n\n' + wikilog + newpage.save(txt, summary='Build log') + # Redirect from /lastbuild to the most recent build log + newpage = site.Pages[appid + '/lastbuild'] + newpage.save('#REDIRECT [[' + lastbuildpage + ']]', summary='Update redirect') + except Exception as e: + logging.error("Error while attempting to publish build log: %s" % e) if timer: timer.cancel() # kill the watchdog timer if max_build_time_reached: - status_output['maxBuildTimeReached'] = True logging.info("Stopping after global build timeout...") break - for app in build_succeeded_ids: - logging.info("success: %s" % app[0]) + for app in build_succeeded: + logging.info("success: %s" % (app.id)) if not options.verbose: - for fb in failed_builds: - logging.info('Build for app {}:{} failed'.format(*fb)) + for fa in failed_apps: + logging.info("Build for app %s failed:\n%s" % (fa, failed_apps[fa])) + + # perform a drozer scan of all successful builds + if options.dscanner and build_succeeded: + from .dscanner import DockerDriver + + docker = DockerDriver() + + try: + for app in build_succeeded: + + logging.info("Need to sign the app before we can install it.") + subprocess.call("fdroid publish {0}".format(app.id)) + + apk_path = None + + for f in os.listdir(repo_dir): + if f.endswith('.apk') and f.startswith(app.id): + apk_path = os.path.join(repo_dir, f) + break + + if not apk_path: + raise Exception("No signed APK found at path: {path}".format(path=apk_path)) + + if not os.path.isdir(repo_dir): + logging.critical("directory does not exists '{path}'".format(path=repo_dir)) + common.force_exit(1) + + logging.info("Performing Drozer scan on {0}.".format(app)) + docker.perform_drozer_scan(apk_path, app.id, repo_dir) + except Exception as e: + logging.error(str(e)) + logging.error("An exception happened. Making sure to clean up") + else: + logging.info("Scan succeeded.") + + logging.info("Cleaning up after ourselves.") + docker.clean() logging.info(_("Finished")) - if len(build_succeeded_ids) > 0: + if len(build_succeeded) > 0: logging.info(ngettext("{} build succeeded", - "{} builds succeeded", len(build_succeeded_ids)).format(len(build_succeeded_ids))) - if len(failed_builds) > 0: + "{} builds succeeded", len(build_succeeded)).format(len(build_succeeded))) + if len(failed_apps) > 0: logging.info(ngettext("{} build failed", - "{} builds failed", len(failed_builds)).format(len(failed_builds))) + "{} builds failed", len(failed_apps)).format(len(failed_apps))) - if options.server: + if options.wiki: + wiki_page_path = 'build_' + time.strftime('%s', start_timestamp) + newpage = site.Pages[wiki_page_path] + txt = '' + txt += "* command line: %s\n" % ' '.join(sys.argv) + txt += "* started at %s\n" % common.get_wiki_timestamp(start_timestamp) + txt += "* completed at %s\n" % common.get_wiki_timestamp() + if buildserverid: + txt += ('* buildserverid: [https://gitlab.com/fdroid/fdroidserver/commit/{id} {id}]\n' + .format(id=buildserverid)) + if fdroidserverid: + txt += ('* fdroidserverid: [https://gitlab.com/fdroid/fdroidserver/commit/{id} {id}]\n' + .format(id=fdroidserverid)) if os.cpu_count(): - status_output['hostOsCpuCount'] = os.cpu_count() + txt += "* host processors: %d\n" % os.cpu_count() if os.path.isfile('/proc/meminfo') and os.access('/proc/meminfo', os.R_OK): with open('/proc/meminfo') as fp: for line in fp: m = re.search(r'MemTotal:\s*([0-9].*)', line) if m: - status_output['hostProcMeminfoMemTotal'] = m.group(1) + txt += "* host RAM: %s\n" % m.group(1) break - buildserver_config = 'builder/Vagrantfile' - if os.path.isfile(buildserver_config) and os.access(buildserver_config, os.R_OK): - with open(buildserver_config) as configfile: - for line in configfile: - m = re.search(r'cpus\s*=\s*([0-9].*)', line) - if m: - status_output['guestVagrantVmCpus'] = m.group(1) - m = re.search(r'memory\s*=\s*([0-9].*)', line) - if m: - status_output['guestVagrantVmMemory'] = m.group(1) - - if buildserverid: - status_output['buildserver'] = {'commitId': buildserverid} - - if not options.onserver: - common.write_status_json(status_output) + txt += "* successful builds: %d\n" % len(build_succeeded) + txt += "* failed builds: %d\n" % len(failed_apps) + txt += "\n\n" + newpage.save(txt, summary='Run log') + newpage = site.Pages['build'] + newpage.save('#REDIRECT [[' + wiki_page_path + ']]', summary='Update redirect') # hack to ensure this exits, even is some threads are still running common.force_exit() diff --git a/fdroidserver/checkupdates.py b/fdroidserver/checkupdates.py index e7945910..d8d6af43 100644 --- a/fdroidserver/checkupdates.py +++ b/fdroidserver/checkupdates.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -"""Check for updates to applications.""" # # checkupdates.py - part of the FDroid server tools # Copyright (C) 2010-2015, Ciaran Gultnieks, ciaran@ciarang.com @@ -18,341 +17,298 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import configparser -import copy -import logging import os import re +import urllib.request +import urllib.error +import time import subprocess import sys -import time -import traceback -import urllib.error -import urllib.parse -import urllib.request from argparse import ArgumentParser -from pathlib import Path -from typing import Optional +import traceback +import html +from distutils.version import LooseVersion +import logging +import copy +import urllib.parse -import git - -from . import _, common, metadata, net -from .exception import ( - FDroidException, - MetaDataException, - NoSubmodulesException, - VCSException, -) - -# https://gitlab.com/fdroid/checkupdates-runner/-/blob/1861899262a62a4ed08fa24e5449c0368dfb7617/.gitlab-ci.yml#L36 -BOT_EMAIL = 'fdroidci@bubu1.eu' +from . import _ +from . import common +from . import metadata +from .exception import VCSException, NoSubmodulesException, FDroidException, MetaDataException -def check_http(app: metadata.App) -> tuple[Optional[str], Optional[int]]: - """Check for a new version by looking at a document retrieved via HTTP. +# Check for a new version by looking at a document retrieved via HTTP. +# The app's Update Check Data field is used to provide the information +# required. +def check_http(app): - The app's UpdateCheckData field is used to provide the information - required. + try: - Parameters - ---------- - app - The App instance to check for updates for. + if not app.UpdateCheckData: + raise FDroidException('Missing Update Check Data') - Returns - ------- - version - The found versionName or None if the versionName should be ignored - according to UpdateCheckIgnore. - vercode - The found versionCode or None if the versionCode should be ignored - according to UpdateCheckIgnore. - - Raises - ------ - :exc:`~fdroidserver.exception.FDroidException` - If UpdateCheckData is missing or is an invalid URL or if there is no - match for the provided versionName or versionCode regex. - """ - if not app.UpdateCheckData: - raise FDroidException('Missing Update Check Data') - - urlcode, codeex, urlver, verex = app.UpdateCheckData.split('|') - parsed = urllib.parse.urlparse(urlcode) - if not parsed.netloc or not parsed.scheme or parsed.scheme != 'https': - raise FDroidException(_('UpdateCheckData has invalid URL: {url}').format(url=urlcode)) - if urlver != '.': - parsed = urllib.parse.urlparse(urlver) + urlcode, codeex, urlver, verex = app.UpdateCheckData.split('|') + parsed = urllib.parse.urlparse(urlcode) if not parsed.netloc or not parsed.scheme or parsed.scheme != 'https': - raise FDroidException(_('UpdateCheckData has invalid URL: {url}').format(url=urlver)) + raise FDroidException(_('UpdateCheckData has invalid URL: {url}').format(url=urlcode)) + if urlver != '.': + parsed = urllib.parse.urlparse(urlver) + if not parsed.netloc or not parsed.scheme or parsed.scheme != 'https': + raise FDroidException(_('UpdateCheckData has invalid URL: {url}').format(url=urlcode)) - logging.debug("...requesting {0}".format(urlcode)) - req = urllib.request.Request(urlcode, None, headers=net.HEADERS) - resp = urllib.request.urlopen(req, None, 20) # nosec B310 scheme is filtered above - page = resp.read().decode('utf-8') + vercode = None + if len(urlcode) > 0: + logging.debug("...requesting {0}".format(urlcode)) + req = urllib.request.Request(urlcode, None) + resp = urllib.request.urlopen(req, None, 20) + page = resp.read().decode('utf-8') - m = re.search(codeex, page) - if not m: - raise FDroidException("No RE match for versionCode") - vercode = common.version_code_string_to_int(m.group(1).strip()) + m = re.search(codeex, page) + if not m: + raise FDroidException("No RE match for version code") + vercode = m.group(1).strip() - if urlver != '.': - logging.debug("...requesting {0}".format(urlver)) - req = urllib.request.Request(urlver, None) - resp = urllib.request.urlopen(req, None, 20) # nosec B310 scheme is filtered above - page = resp.read().decode('utf-8') + version = "??" + if len(urlver) > 0: + if urlver != '.': + logging.debug("...requesting {0}".format(urlver)) + req = urllib.request.Request(urlver, None) + resp = urllib.request.urlopen(req, None, 20) + page = resp.read().decode('utf-8') - m = re.search(verex, page) - if not m: - raise FDroidException("No RE match for version") - version = m.group(1) + m = re.search(verex, page) + if not m: + raise FDroidException("No RE match for version") + version = m.group(1) - if app.UpdateCheckIgnore and re.search(app.UpdateCheckIgnore, version): - logging.info("Version {version} for {appid} is ignored".format(version=version, appid=app.id)) - return (None, None) + return (version, vercode) - return (version, vercode) + except FDroidException: + msg = "Could not complete http check for app {0} due to unknown error: {1}".format(app.id, traceback.format_exc()) + return (None, msg) -def check_tags(app: metadata.App, pattern: str) -> tuple[str, int, str]: - """Check for a new version by looking at the tags in the source repo. +# Check for a new version by looking at the tags in the source repo. +# Whether this can be used reliably or not depends on +# the development procedures used by the project's developers. Use it with +# caution, because it's inappropriate for many projects. +# Returns (None, "a message") if this didn't work, or (version, vercode, tag) for +# the details of the current version. +def check_tags(app, pattern): - Whether this can be used reliably or not depends on - the development procedures used by the project's developers. Use it with - caution, because it's inappropriate for many projects. + try: - Parameters - ---------- - app - The App instance to check for updates for. - pattern - The pattern a tag needs to match to be considered. + if app.RepoType == 'srclib': + build_dir = os.path.join('build', 'srclib', app.Repo) + repotype = common.getsrclibvcs(app.Repo) + else: + build_dir = os.path.join('build', app.id) + repotype = app.RepoType - Returns - ------- - versionName - The highest found versionName. - versionCode - The highest found versionCode. - ref - The Git reference, commit hash or tag name, of the highest found - versionName, versionCode. + if repotype not in ('git', 'git-svn', 'hg', 'bzr'): + return (None, 'Tags update mode only works for git, hg, bzr and git-svn repositories currently', None) - Raises - ------ - :exc:`~fdroidserver.exception.MetaDataException` - If this function is not suitable for the RepoType of the app or - information is missing to perform this type of check. - :exc:`~fdroidserver.exception.FDroidException` - If no matching tags or no information whatsoever could be found. - """ - if app.RepoType == 'srclib': - build_dir = Path('build/srclib') / app.Repo - repotype = common.getsrclibvcs(app.Repo) - else: - build_dir = Path('build') / app.id - repotype = app.RepoType + if repotype == 'git-svn' and ';' not in app.Repo: + return (None, 'Tags update mode used in git-svn, but the repo was not set up with tags', None) - if repotype not in ('git', 'git-svn', 'hg', 'bzr'): - raise MetaDataException(_('Tags update mode only works for git, hg, bzr and git-svn repositories currently')) + # Set up vcs interface and make sure we have the latest code... + vcs = common.getvcs(app.RepoType, app.Repo, build_dir) - if repotype == 'git-svn' and ';' not in app.Repo: - raise MetaDataException(_('Tags update mode used in git-svn, but the repo was not set up with tags')) + vcs.gotorevision(None) - # Set up vcs interface and make sure we have the latest code... - vcs = common.getvcs(app.RepoType, app.Repo, build_dir) + last_build = app.get_last_build() - vcs.gotorevision(None) - - last_build = get_last_build_from_app(app) - - try_init_submodules(app, last_build, vcs) - - htag = None - hver = None - hcode = 0 - - tags = [] - if repotype == 'git': - tags = vcs.latesttags() - else: - tags = vcs.gettags() - if not tags: - raise FDroidException(_('No tags found')) - - logging.debug("All tags: " + ','.join(tags)) - if pattern: - pat = re.compile(pattern) - tags = [tag for tag in tags if pat.match(tag)] - if not tags: - raise FDroidException(_('No matching tags found')) - logging.debug("Matching tags: " + ','.join(tags)) - - if len(tags) > 5 and repotype == 'git': - tags = tags[:5] - logging.debug("Latest tags: " + ','.join(tags)) - - for tag in tags: - logging.debug("Check tag: '{0}'".format(tag)) - vcs.gotorevision(tag) try_init_submodules(app, last_build, vcs) - if app.UpdateCheckData: - filecode, codeex, filever, verex = app.UpdateCheckData.split('|') + hpak = None + htag = None + hver = None + hcode = "0" - if filecode: - filecode = build_dir / filecode - if not filecode.is_file(): - logging.debug("UpdateCheckData file {0} not found in tag {1}".format(filecode, tag)) - continue - filecontent = filecode.read_text() - else: - filecontent = tag - - vercode = tag - if codeex: - m = re.search(codeex, filecontent) - if not m: - logging.debug(f"UpdateCheckData regex {codeex} for versionCode" - f" has no match in tag {tag}") - continue - - vercode = m.group(1).strip() - - if filever: - if filever != '.': - filever = build_dir / filever - if filever.is_file(): - filecontent = filever.read_text() - else: - logging.debug("UpdateCheckData file {0} not found in tag {1}".format(filever, tag)) - else: - filecontent = tag - - version = tag - if verex: - m = re.search(verex, filecontent) - if not m: - logging.debug(f"UpdateCheckData regex {verex} for versionName" - f" has no match in tag {tag}") - continue - - version = m.group(1) - - logging.debug("UpdateCheckData found version {0} ({1})" - .format(version, vercode)) - vercode = common.version_code_string_to_int(vercode) - if vercode > hcode: - htag = tag - hcode = vercode - hver = version + tags = [] + if repotype == 'git': + tags = vcs.latesttags() else: + tags = vcs.gettags() + if not tags: + return (None, "No tags found", None) + + logging.debug("All tags: " + ','.join(tags)) + if pattern: + pat = re.compile(pattern) + tags = [tag for tag in tags if pat.match(tag)] + if not tags: + return (None, "No matching tags found", None) + logging.debug("Matching tags: " + ','.join(tags)) + + if len(tags) > 5 and repotype == 'git': + tags = tags[:5] + logging.debug("Latest tags: " + ','.join(tags)) + + for tag in tags: + logging.debug("Check tag: '{0}'".format(tag)) + vcs.gotorevision(tag) + for subdir in possible_subdirs(app): - root_dir = build_dir / subdir + if subdir == '.': + root_dir = build_dir + else: + root_dir = os.path.join(build_dir, subdir) paths = common.manifest_paths(root_dir, last_build.gradle) - version, vercode, _package = common.parse_androidmanifests(paths, app) - if version in ('Unknown', 'Ignore'): - version = tag + version, vercode, package = common.parse_androidmanifests(paths, app) if vercode: logging.debug("Manifest exists in subdir '{0}'. Found version {1} ({2})" .format(subdir, version, vercode)) - if vercode > hcode: + if int(vercode) > int(hcode): + hpak = package htag = tag - hcode = vercode + hcode = str(int(vercode)) hver = version - if hver: - if htag != tags[0]: - logging.warning( - "{appid}: latest tag {tag} does not contain highest version {version}".format( - appid=app.id, tag=tags[0], version=hver - ) - ) - try: - commit = vcs.getref(htag) - if commit: - return (hver, hcode, commit) - except VCSException: - pass - return (hver, hcode, htag) - raise FDroidException(_("Couldn't find any version information")) + if not hpak: + return (None, "Couldn't find package ID", None) + if hver: + return (hver, hcode, htag) + return (None, "Couldn't find any version information", None) + + except VCSException as vcse: + msg = "VCS error while scanning app {0}: {1}".format(app.id, vcse) + return (None, msg, None) + except Exception: + msg = "Could not scan app {0} due to unknown error: {1}".format(app.id, traceback.format_exc()) + return (None, msg, None) -def check_repomanifest(app: metadata.App, branch: Optional[str] = None) -> tuple[str, int]: - """Check for a new version by looking at the AndroidManifest.xml at the HEAD of the source repo. +# Check for a new version by looking at the AndroidManifest.xml at the HEAD +# of the source repo. Whether this can be used reliably or not depends on +# the development procedures used by the project's developers. Use it with +# caution, because it's inappropriate for many projects. +# Returns (None, "a message") if this didn't work, or (version, vercode) for +# the details of the current version. +def check_repomanifest(app, branch=None): - Whether this can be used reliably or not depends on - the development procedures used by the project's developers. Use it with - caution, because it's inappropriate for many projects. + try: - Parameters - ---------- - app - The App instance to check for updates for. - branch - The VCS branch where to search for versionCode, versionName. + if app.RepoType == 'srclib': + build_dir = os.path.join('build', 'srclib', app.Repo) + repotype = common.getsrclibvcs(app.Repo) + else: + build_dir = os.path.join('build', app.id) + repotype = app.RepoType - Returns - ------- - versionName - The highest found versionName. - versionCode - The highest found versionCode. + # Set up vcs interface and make sure we have the latest code... + vcs = common.getvcs(app.RepoType, app.Repo, build_dir) - Raises - ------ - :exc:`~fdroidserver.exception.FDroidException` - If no package id or no version information could be found. - """ - if app.RepoType == 'srclib': - build_dir = Path('build/srclib') / app.Repo - repotype = common.getsrclibvcs(app.Repo) - else: - build_dir = Path('build') / app.id - repotype = app.RepoType + if repotype == 'git': + if branch: + branch = 'origin/' + branch + vcs.gotorevision(branch) + elif repotype == 'git-svn': + vcs.gotorevision(branch) + elif repotype == 'hg': + vcs.gotorevision(branch) + elif repotype == 'bzr': + vcs.gotorevision(None) - # Set up vcs interface and make sure we have the latest code... - vcs = common.getvcs(app.RepoType, app.Repo, build_dir) + last_build = metadata.Build() + if len(app.builds) > 0: + last_build = app.builds[-1] + + try_init_submodules(app, last_build, vcs) + + hpak = None + hver = None + hcode = "0" + for subdir in possible_subdirs(app): + if subdir == '.': + root_dir = build_dir + else: + root_dir = os.path.join(build_dir, subdir) + paths = common.manifest_paths(root_dir, last_build.gradle) + version, vercode, package = common.parse_androidmanifests(paths, app) + if vercode: + logging.debug("Manifest exists in subdir '{0}'. Found version {1} ({2})" + .format(subdir, version, vercode)) + if int(vercode) > int(hcode): + hpak = package + hcode = str(int(vercode)) + hver = version + + if not hpak: + return (None, "Couldn't find package ID") + if hver: + return (hver, hcode) + return (None, "Couldn't find any version information") + + except VCSException as vcse: + msg = "VCS error while scanning app {0}: {1}".format(app.id, vcse) + return (None, msg) + except Exception: + msg = "Could not scan app {0} due to unknown error: {1}".format(app.id, traceback.format_exc()) + return (None, msg) + + +def check_repotrunk(app): + + try: + if app.RepoType == 'srclib': + build_dir = os.path.join('build', 'srclib', app.Repo) + repotype = common.getsrclibvcs(app.Repo) + else: + build_dir = os.path.join('build', app.id) + repotype = app.RepoType + + if repotype not in ('git-svn', ): + return (None, 'RepoTrunk update mode only makes sense in git-svn repositories') + + # Set up vcs interface and make sure we have the latest code... + vcs = common.getvcs(app.RepoType, app.Repo, build_dir) - if repotype == 'git': - if branch: - branch = 'origin/' + branch - vcs.gotorevision(branch) - elif repotype == 'git-svn': - vcs.gotorevision(branch) - elif repotype == 'hg': - vcs.gotorevision(branch) - elif repotype == 'bzr': vcs.gotorevision(None) - last_build = get_last_build_from_app(app) - try_init_submodules(app, last_build, vcs) - - hpak = None - hver = None - hcode = 0 - for subdir in possible_subdirs(app): - root_dir = build_dir / subdir - paths = common.manifest_paths(root_dir, last_build.gradle) - version, vercode, package = common.parse_androidmanifests(paths, app) - if vercode: - logging.debug("Manifest exists in subdir '{0}'. Found version {1} ({2})" - .format(subdir, version, vercode)) - if vercode > hcode: - hpak = package - hcode = vercode - hver = version - - if not hpak: - raise FDroidException(_("Couldn't find package ID")) - if hver: - return (hver, hcode) - raise FDroidException(_("Couldn't find any version information")) + ref = vcs.getref() + return (ref, ref) + except VCSException as vcse: + msg = "VCS error while scanning app {0}: {1}".format(app.id, vcse) + return (None, msg) + except Exception: + msg = "Could not scan app {0} due to unknown error: {1}".format(app.id, traceback.format_exc()) + return (None, msg) -def try_init_submodules(app: metadata.App, last_build: metadata.Build, vcs: common.vcs): - """Try to init submodules if the last build entry uses them. +# Check for a new version by looking at the Google Play Store. +# Returns (None, "a message") if this didn't work, or (version, None) for +# the details of the current version. +def check_gplay(app): + time.sleep(15) + url = 'https://play.google.com/store/apps/details?id=' + app.id + headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:18.0) Gecko/20100101 Firefox/18.0'} + req = urllib.request.Request(url, None, headers) + try: + resp = urllib.request.urlopen(req, None, 20) + page = resp.read().decode() + except urllib.error.HTTPError as e: + return (None, str(e.code)) + except Exception as e: + return (None, 'Failed:' + str(e)) + version = None + + m = re.search('itemprop="softwareVersion">[ ]*([^<]+)[ ]*', page) + if m: + version = html.unescape(m.group(1)) + + if version == 'Varies with device': + return (None, 'Device-variable version, cannot use this method') + + if not version: + return (None, "Couldn't find version") + return (version.strip(), None) + + +def try_init_submodules(app, last_build, vcs): + """Try to init submodules if the last build entry used them. They might have been removed from the app's repo in the meantime, so if we can't find any submodules we continue with the updates check. If there is any other error in initializing them then we stop the check. @@ -361,101 +317,47 @@ def try_init_submodules(app: metadata.App, last_build: metadata.Build, vcs: comm try: vcs.initsubmodules() except NoSubmodulesException: - logging.info("No submodules present for {}".format(_getappname(app))) - except VCSException: - logging.info("submodule broken for {}".format(_getappname(app))) + logging.info("No submodules present for {}".format(app.Name)) -def dirs_with_manifest(startdir: str): - """Find directories containing a manifest file. - - Yield all directories under startdir that contain any of the manifest - files, and thus are probably an Android project. - - Parameters - ---------- - startdir - Directory to be walked down for search - - Yields - ------ - path : :class:`pathlib.Path` or None - A directory that contains a manifest file of an Android project, None if - no directory could be found - """ +# Return all directories under startdir that contain any of the manifest +# files, and thus are probably an Android project. +def dirs_with_manifest(startdir): for root, dirs, files in os.walk(startdir): - dirs.sort() if any(m in files for m in [ - 'AndroidManifest.xml', 'pom.xml', 'build.gradle', 'build.gradle.kts']): - yield Path(root) + 'AndroidManifest.xml', 'pom.xml', 'build.gradle']): + yield root -def possible_subdirs(app: metadata.App): - """Try to find a new subdir starting from the root build_dir. +# Tries to find a new subdir starting from the root build_dir. Returns said +# subdir relative to the build dir if found, None otherwise. +def possible_subdirs(app): - Yields said subdir relative to the build dir if found, None otherwise. - - Parameters - ---------- - app - The app to check for subdirs - - Yields - ------ - subdir : :class:`pathlib.Path` or None - A possible subdir, None if no subdir could be found - """ if app.RepoType == 'srclib': - build_dir = Path('build/srclib') / app.Repo + build_dir = os.path.join('build', 'srclib', app.Repo) else: - build_dir = Path('build') / app.id + build_dir = os.path.join('build', app.id) - last_build = get_last_build_from_app(app) + last_build = app.get_last_build() for d in dirs_with_manifest(build_dir): m_paths = common.manifest_paths(d, last_build.gradle) package = common.parse_androidmanifests(m_paths, app)[2] - if package is not None or app.UpdateCheckName == "Ignore": - subdir = d.relative_to(build_dir) + if package is not None: + subdir = os.path.relpath(d, build_dir) logging.debug("Adding possible subdir %s" % subdir) yield subdir -def _getappname(app: metadata.App) -> str: - return common.get_app_display_name(app) +def fetch_autoname(app, tag): - -def _getcvname(app: metadata.App) -> str: - return '%s (%s)' % (app.CurrentVersion, app.CurrentVersionCode) - - -def fetch_autoname(app: metadata.App, tag: str) -> Optional[str]: - """Fetch AutoName. - - Get the to be displayed name of an app from the source code and adjust the - App instance in case it is different name has been found. - - Parameters - ---------- - app - The App instance to get the AutoName for. - tag - Tag to fetch AutoName at. - - Returns - ------- - commitmsg - Commit message about the name change. None in case checking for the - name is disabled, a VCSException occured or no name could be found. - """ - if not app.RepoType or app.UpdateCheckMode in ('None', 'Static') \ - or app.UpdateCheckName == "Ignore": + if not app.RepoType or app.UpdateCheckMode in ('None', 'Static'): return None if app.RepoType == 'srclib': - build_dir = Path('build/srclib') / app.Repo + build_dir = os.path.join('build', 'srclib', app.Repo) else: - build_dir = Path('build') / app.id + build_dir = os.path.join('build', app.id) try: vcs = common.getvcs(app.RepoType, app.Repo, build_dir) @@ -463,12 +365,15 @@ def fetch_autoname(app: metadata.App, tag: str) -> Optional[str]: except VCSException: return None - last_build = get_last_build_from_app(app) + last_build = app.get_last_build() - logging.debug("...fetch auto name from " + str(build_dir)) + logging.debug("...fetch auto name from " + build_dir) new_name = None for subdir in possible_subdirs(app): - root_dir = build_dir / subdir + if subdir == '.': + root_dir = build_dir + else: + root_dir = os.path.join(build_dir, subdir) new_name = common.fetch_real_name(root_dir, last_build.gradle) if new_name is not None: break @@ -478,438 +383,204 @@ def fetch_autoname(app: metadata.App, tag: str) -> Optional[str]: if new_name != app.AutoName: app.AutoName = new_name if not commitmsg: - commitmsg = "Set autoname of {0}".format(_getappname(app)) + commitmsg = "Set autoname of {0}".format(common.getappname(app)) else: logging.debug("...couldn't get autoname") return commitmsg -def operate_vercode(operation: str, vercode: int) -> int: - """Calculate a new versionCode from a mathematical operation. +def checkupdates_app(app): - Parameters - ---------- - operation - The operation to execute to get the new versionCode. - vercode - The versionCode for replacing "%c" in the operation. - - Returns - ------- - vercode - The new versionCode obtained by executing the operation. - - Raises - ------ - :exc:`~fdroidserver.exception.MetaDataException` - If the operation is invalid. - """ - if not common.VERCODE_OPERATION_RE.match(operation): - raise MetaDataException(_('Invalid VercodeOperation: {field}') - .format(field=operation)) - oldvercode = vercode - op = operation.replace("%c", str(oldvercode)) - vercode = common.calculate_math_string(op) - logging.debug("Applied vercode operation: %d -> %d" % (oldvercode, vercode)) - return vercode - - -def checkupdates_app(app: metadata.App, auto: bool, commit: bool = False) -> None: - """Check for new versions and updated name of a single app. - - Also write back changes to the metadata file and create a Git commit if - requested. - - Parameters - ---------- - app - The app to check for updates for. - - Raises - ------ - :exc:`~fdroidserver.exception.MetaDataException` - If the app has an invalid UpdateCheckMode or AutoUpdateMode. - :exc:`~fdroidserver.exception.FDroidException` - If no version information could be found, the current version is newer - than the found version, auto-update was requested but an app has no - CurrentVersionCode or (Git) commiting the changes failed. - """ # If a change is made, commitmsg should be set to a description of it. - # Only if this is set, changes will be written back to the metadata. + # Only if this is set will changes be written back to the metadata. commitmsg = None tag = None + msg = None + vercode = None + noverok = False mode = app.UpdateCheckMode if mode.startswith('Tags'): pattern = mode[5:] if len(mode) > 4 else None (version, vercode, tag) = check_tags(app, pattern) + if version == 'Unknown': + version = tag + msg = vercode elif mode == 'RepoManifest': (version, vercode) = check_repomanifest(app) + msg = vercode elif mode.startswith('RepoManifest/'): tag = mode[13:] (version, vercode) = check_repomanifest(app, tag) + msg = vercode + elif mode == 'RepoTrunk': + (version, vercode) = check_repotrunk(app) + msg = vercode elif mode == 'HTTP': (version, vercode) = check_http(app) + msg = vercode elif mode in ('None', 'Static'): - logging.debug('Checking disabled') - return + version = None + msg = 'Checking disabled' + noverok = True else: - raise MetaDataException(_('Invalid UpdateCheckMode: {mode}').format(mode=mode)) + version = None + msg = 'Invalid update check method' - if not version or not vercode: - raise FDroidException(_('no version information found')) + if version and vercode and app.VercodeOperation: + if not common.VERCODE_OPERATION_RE.match(app.VercodeOperation): + raise MetaDataException(_('Invalid VercodeOperation: {field}') + .format(field=app.VercodeOperation)) + oldvercode = str(int(vercode)) + op = app.VercodeOperation.replace("%c", oldvercode) + vercode = str(common.calculate_math_string(op)) + logging.debug("Applied vercode operation: %s -> %s" % (oldvercode, vercode)) - if app.VercodeOperation: - vercodes = sorted([ - operate_vercode(operation, vercode) for operation in app.VercodeOperation - ]) - else: - vercodes = [vercode] + if version and any(version.startswith(s) for s in [ + '${', # Gradle variable names + '@string/', # Strings we could not resolve + ]): + version = "Unknown" updating = False - if vercodes[-1] == app.CurrentVersionCode: - logging.debug("...up to date") - elif vercodes[-1] > app.CurrentVersionCode: - logging.debug("...updating - old vercode={0}, new vercode={1}".format( - app.CurrentVersionCode, vercodes[-1])) - app.CurrentVersion = version - app.CurrentVersionCode = vercodes[-1] - updating = True + if version is None: + logmsg = "...{0} : {1}".format(app.id, msg) + if noverok: + logging.info(logmsg) + else: + logging.warn(logmsg) + elif vercode == app.CurrentVersionCode: + logging.info("...up to date") else: - raise FDroidException( - _('current version is newer: old vercode={old}, new vercode={new}').format( - old=app.CurrentVersionCode, new=vercodes[-1] - ) - ) + logging.debug("...updating - old vercode={0}, new vercode={1}".format( + app.CurrentVersionCode, vercode)) + app.CurrentVersion = version + app.CurrentVersionCode = str(int(vercode)) + updating = True commitmsg = fetch_autoname(app, tag) if updating: - name = _getappname(app) - ver = _getcvname(app) + name = common.getappname(app) + ver = common.getcvname(app) logging.info('...updating to version %s' % ver) - commitmsg = 'Update CurrentVersion of %s to %s' % (name, ver) + commitmsg = 'Update CV of %s to %s' % (name, ver) - if auto: + if options.auto: mode = app.AutoUpdateMode if not app.CurrentVersionCode: - raise MetaDataException( - _("Can't auto-update app with no CurrentVersionCode") - ) + logging.warn("Can't auto-update app with no current version code: " + app.id) elif mode in ('None', 'Static'): pass - elif mode.startswith('Version'): + elif mode.startswith('Version '): pattern = mode[8:] - suffix = '' if pattern.startswith('+'): try: - suffix, pattern = pattern[1:].split(' ', 1) - except ValueError as exc: - raise MetaDataException("Invalid AutoUpdateMode: " + mode) from exc - + suffix, pattern = pattern.split(' ', 1) + except ValueError: + raise MetaDataException("Invalid AUM: " + mode) + else: + suffix = '' gotcur = False latest = None - builds = app.get('Builds', []) - - if builds: - latest = builds[-1] - if latest.versionCode == app.CurrentVersionCode: + for build in app.builds: + if int(build.versionCode) >= int(app.CurrentVersionCode): gotcur = True - elif latest.versionCode > app.CurrentVersionCode: - raise FDroidException( - _( - 'latest build recipe is newer: ' - 'old vercode={old}, new vercode={new}' - ).format(old=latest.versionCode, new=app.CurrentVersionCode) - ) + if not latest or int(build.versionCode) > int(latest.versionCode): + latest = build + + if int(latest.versionCode) > int(app.CurrentVersionCode): + logging.info("Refusing to auto update, since the latest build is newer") if not gotcur: - newbuilds = copy.deepcopy(builds[-len(vercodes):]) - - # These are either built-in or invalid in newer system versions - bookworm_blocklist = [ - 'apt-get install -y openjdk-11-jdk', - 'apt-get install openjdk-11-jdk-headless', - 'apt-get install -y openjdk-11-jdk-headless', - 'apt-get install -t stretch-backports openjdk-11-jdk-headless openjdk-11-jre-headless', - 'apt-get install -y -t stretch-backports openjdk-11-jdk-headless openjdk-11-jre-headless', - 'apt-get install -y openjdk-17-jdk', - 'apt-get install openjdk-17-jdk-headless', - 'apt-get install -y openjdk-17-jdk-headless', - 'update-alternatives --auto java', - 'update-java-alternatives -a', - ] - - for build in newbuilds: - if "sudo" in build: - if any("openjdk-11" in line for line in build["sudo"]) or any("openjdk-17" in line for line in build["sudo"]): - build["sudo"] = [line for line in build["sudo"] if line not in bookworm_blocklist] - if build["sudo"] == ['apt-get update']: - build["sudo"] = '' - - for b, v in zip(newbuilds, vercodes): - b.disable = False - b.versionCode = v - b.versionName = app.CurrentVersion + suffix.replace( - '%c', str(v) - ) - logging.info("...auto-generating build for " + b.versionName) - if tag: - b.commit = tag - else: - commit = pattern.replace('%v', app.CurrentVersion) - commit = commit.replace('%c', str(v)) - b.commit = commit - - app['Builds'].extend(newbuilds) - - name = _getappname(app) - ver = _getcvname(app) + newbuild = copy.deepcopy(latest) + newbuild.disable = False + newbuild.versionCode = app.CurrentVersionCode + newbuild.versionName = app.CurrentVersion + suffix + logging.info("...auto-generating build for " + newbuild.versionName) + commit = pattern.replace('%v', newbuild.versionName) + commit = commit.replace('%c', newbuild.versionCode) + newbuild.commit = commit + app.builds.append(newbuild) + name = common.getappname(app) + ver = common.getcvname(app) commitmsg = "Update %s to %s" % (name, ver) else: - raise MetaDataException( - _('Invalid AutoUpdateMode: {mode}').format(mode=mode) - ) + logging.warn('Invalid auto update mode "' + mode + '" on ' + app.id) if commitmsg: metadata.write_metadata(app.metadatapath, app) - if commit: + if options.commit: logging.info("Commiting update for " + app.metadatapath) gitcmd = ["git", "commit", "-m", commitmsg] + if 'auto_author' in config: + gitcmd.extend(['--author', config['auto_author']]) gitcmd.extend(["--", app.metadatapath]) if subprocess.call(gitcmd) != 0: raise FDroidException("Git commit failed") -def get_last_build_from_app(app: metadata.App) -> metadata.Build: - """Get the last build entry of an app.""" - if app.get('Builds'): - return app['Builds'][-1] - else: - return metadata.Build() +def update_wiki(gplaylog, locallog): + if config.get('wiki_server') and config.get('wiki_path'): + try: + import mwclient + site = mwclient.Site((config['wiki_protocol'], config['wiki_server']), + path=config['wiki_path']) + site.login(config['wiki_user'], config['wiki_password']) - -def get_upstream_main_branch(git_repo): - refs = list() - for ref in git_repo.remotes.upstream.refs: - if ref.name != 'upstream/HEAD': - refs.append(ref.name) - if len(refs) == 1: - return refs[0] - for name in ('upstream/main', 'upstream/master'): - if name in refs: - return name - try: - with git_repo.config_reader() as reader: - return 'upstream/%s' % reader.get_value('init', 'defaultBranch') - except configparser.NoSectionError: - return 'upstream/main' - - -def checkout_appid_branch(appid): - """Prepare the working branch named after the appid. - - This sets up everything for checkupdates_app() to run and add - commits. If there is an existing branch named after the appid, - and it has commits from users other than the checkupdates-bot, - then this will return False. Otherwise, it returns True. - - The checkupdates-runner must set the committer email address in - the git config. Then any commit with a committer or author that - does not match that will be considered to have human edits. That - email address is currently set in: - https://gitlab.com/fdroid/checkupdates-runner/-/blob/1861899262a62a4ed08fa24e5449c0368dfb7617/.gitlab-ci.yml#L36 - - """ - logging.debug(f'Creating merge request branch for {appid}') - git_repo = git.Repo.init('.') - upstream_main = get_upstream_main_branch(git_repo) - for remote in git_repo.remotes: - remote.fetch() - try: - git_repo.remotes.origin.fetch(f'{appid}:refs/remotes/origin/{appid}') - except Exception as e: - logging.debug('"%s" branch not found on origin remote:\n\t%s', appid, e) - if appid in git_repo.remotes.origin.refs: - start_point = f"origin/{appid}" - for commit in git_repo.iter_commits( - f'{upstream_main}...{start_point}', right_only=True - ): - if commit.committer.email != BOT_EMAIL or commit.author.email != BOT_EMAIL: - return False - else: - start_point = upstream_main - git_repo.git.checkout('-B', appid, start_point) - git_repo.git.rebase(upstream_main, strategy_option='ours', kill_after_timeout=120) - return True - - -def get_changes_versus_ref(git_repo, ref, f): - changes = [] - for m in re.findall( - r"^[+-].*", git_repo.git.diff(f"{ref}", '--', f), flags=re.MULTILINE - ): - if not re.match(r"^(\+\+\+|---) ", m): - changes.append(m) - return changes - - -def push_commits(branch_name='checkupdates'): - """Make git branch then push commits as merge request. - - The appid is parsed from the actual file that was changed so that - only the right branch is ever updated. - - This uses the appid as the standard branch name so that there is - only ever one open merge request per-app. If multiple apps are - included in the branch, then 'checkupdates' is used as branch - name. This is to support the old way operating, e.g. in batches. - - This uses GitLab "Push Options" to create a merge request. Git - Push Options are config data that can be sent via `git push - --push-option=... origin foo`. - - References - ---------- - * https://docs.gitlab.com/ee/user/project/push_options.html - - """ - if branch_name != "checkupdates": - if callable(getattr(git.SymbolicReference, "_check_ref_name_valid", None)): - git.SymbolicReference._check_ref_name_valid(branch_name) - - git_repo = git.Repo.init('.') - upstream_main = get_upstream_main_branch(git_repo) - files = set() - for commit in git_repo.iter_commits(f'{upstream_main}...HEAD', right_only=True): - files.update(commit.stats.files.keys()) - - files = list(files) - if len(files) == 1: - m = re.match(r'metadata/(\S+)\.yml', files[0]) - if m: - branch_name = m.group(1) # appid - if not files: - return - - # https://git-scm.com/docs/git-check-ref-format Git refname can't end with .lock - if branch_name.endswith(".lock"): - branch_name = f"{branch_name}_" - - remote = git_repo.remotes.origin - if branch_name in remote.refs: - if not get_changes_versus_ref(git_repo, f'origin/{branch_name}', files[0]): - return - - git_repo.create_head(branch_name, force=True) - push_options = [ - 'merge_request.create', - 'merge_request.remove_source_branch', - 'merge_request.title=bot: ' + git_repo.branches[branch_name].commit.summary, - 'merge_request.description=' - + '~%s checkupdates-bot run %s' % (branch_name, os.getenv('CI_JOB_URL')), - ] - - # mark as draft if there are only changes to CurrentVersion: - current_version_only = True - for m in get_changes_versus_ref(git_repo, upstream_main, files[0]): - if not re.match(r"^[-+]CurrentVersion", m): - current_version_only = False - break - if current_version_only: - push_options.append('merge_request.draft') - - progress = git.RemoteProgress() - - pushinfos = remote.push( - f"HEAD:refs/heads/{branch_name}", - progress=progress, - force=True, - set_upstream=True, - push_option=push_options, - ) - - for pushinfo in pushinfos: - logging.info(pushinfo.summary) - # Show potentially useful messages from git remote - if progress: - for line in progress.other_lines: - logging.info(line) - if pushinfo.flags & ( - git.remote.PushInfo.ERROR - | git.remote.PushInfo.REJECTED - | git.remote.PushInfo.REMOTE_FAILURE - | git.remote.PushInfo.REMOTE_REJECTED - ): - raise FDroidException( - f'{remote.url} push failed: {pushinfo.flags} {pushinfo.summary}' - ) - else: - logging.info(remote.url + ': ' + pushinfo.summary) - - -def prune_empty_appid_branches(git_repo=None, main_branch='main'): - """Remove empty branches from checkupdates-bot git remote.""" - if git_repo is None: - git_repo = git.Repo.init('.') - upstream_main = get_upstream_main_branch(git_repo) - main_branch = upstream_main.split('/')[1] - - remote = git_repo.remotes.origin - remote.update(prune=True) - merged_branches = git_repo.git().branch(remotes=True, merged=upstream_main).split() - for remote_branch in merged_branches: - if not remote_branch or '/' not in remote_branch: - continue - if remote_branch.split('/')[1] not in (main_branch, 'HEAD'): - for ref in git_repo.remotes.origin.refs: - if remote_branch == ref.name: - remote.push(':%s' % ref.remote_head, force=True) # rm remote branch - - -def status_update_json(processed: list, failed: dict) -> None: - """Output a JSON file with metadata about this run.""" - logging.debug(_('Outputting JSON')) - output = common.setup_status_output(start_timestamp) - if processed: - output['processed'] = processed - if failed: - output['failed'] = failed - common.write_status_json(output) + # Write a page with the last build log for this version code + wiki_page_path = 'checkupdates_' + time.strftime('%s', start_timestamp) + newpage = site.Pages[wiki_page_path] + txt = '' + txt += "* command line: " + ' '.join(sys.argv) + "\n" + txt += common.get_git_describe_link() + txt += "* started at " + common.get_wiki_timestamp(start_timestamp) + '\n' + txt += "* completed at " + common.get_wiki_timestamp() + '\n' + txt += "\n\n" + txt += common.get_android_tools_version_log() + txt += "\n\n" + if gplaylog: + txt += '== --gplay check ==\n\n' + txt += gplaylog + if locallog: + txt += '== local source check ==\n\n' + txt += locallog + newpage.save(txt, summary='Run log') + newpage = site.Pages['checkupdates'] + newpage.save('#REDIRECT [[' + wiki_page_path + ']]', summary='Update redirect') + except Exception as e: + logging.error(_('Error while attempting to publish log: %s') % e) config = None +options = None start_timestamp = time.gmtime() def main(): - """Check for updates for one or more apps. - The behaviour of this function is influenced by the configuration file as - well as command line parameters. - """ - global config + global config, options # Parse command line... - parser = ArgumentParser() + parser = ArgumentParser(usage="%(prog)s [options] [APPID [APPID ...]]") common.setup_global_opts(parser) - parser.add_argument("appid", nargs='*', help=_("application ID of file to operate on")) + parser.add_argument("appid", nargs='*', help=_("applicationId to check for updates")) parser.add_argument("--auto", action="store_true", default=False, help=_("Process auto-updates")) parser.add_argument("--autoonly", action="store_true", default=False, help=_("Only process apps with auto-updates")) parser.add_argument("--commit", action="store_true", default=False, help=_("Commit changes")) - parser.add_argument("--merge-request", action="store_true", default=False, - help=_("Commit changes, push, then make a merge request")) parser.add_argument("--allow-dirty", action="store_true", default=False, help=_("Run on git repo that has uncommitted changes")) + parser.add_argument("--gplay", action="store_true", default=False, + help=_("Only print differences with the Play Store")) metadata.add_metadata_arguments(parser) - options = common.parse_args(parser) + options = parser.parse_args() metadata.warnings_action = options.W - config = common.read_config() + config = common.read_config(options) if not options.allow_dirty: status = subprocess.check_output(['git', 'status', '--porcelain']) @@ -917,15 +588,40 @@ def main(): logging.error(_('Build metadata git repo has uncommited changes!')) sys.exit(1) - if options.merge_request and not (options.appid and len(options.appid) == 1): - logging.error(_('--merge-request only runs on a single appid!')) - sys.exit(1) + # Get all apps... + allapps = metadata.read_metadata() - apps = common.read_app_args(options.appid) + apps = common.read_app_args(options.appid, allapps, False) - processed = [] - failed = dict() - exit_code = 0 + gplaylog = '' + if options.gplay: + for appid, app in apps.items(): + gplaylog += '* ' + appid + '\n' + version, reason = check_gplay(app) + if version is None: + if reason == '404': + logging.info("{0} is not in the Play Store".format(common.getappname(app))) + else: + logging.info("{0} encountered a problem: {1}".format(common.getappname(app), reason)) + if version is not None: + stored = app.CurrentVersion + if not stored: + logging.info("{0} has no Current Version but has version {1} on the Play Store" + .format(common.getappname(app), version)) + elif LooseVersion(stored) < LooseVersion(version): + logging.info("{0} has version {1} on the Play Store, which is bigger than {2}" + .format(common.getappname(app), version, stored)) + else: + if stored != version: + logging.info("{0} has version {1} on the Play Store, which differs from {2}" + .format(common.getappname(app), version, stored)) + else: + logging.info("{0} has the same version {1} on the Play Store" + .format(common.getappname(app), version)) + update_wiki(gplaylog, None) + return + + locallog = '' for appid, app in apps.items(): if options.autoonly and app.AutoUpdateMode in ('None', 'Static'): @@ -934,33 +630,18 @@ def main(): msg = _("Processing {appid}").format(appid=appid) logging.info(msg) + locallog += '* ' + msg + '\n' try: - if options.merge_request: - if not checkout_appid_branch(appid): - msg = _("...checkupdate failed for {appid} : {error}").format( - appid=appid, - error='Open merge request with human edits, skipped.', - ) - logging.warning(msg) - failed[appid] = msg - continue - - checkupdates_app(app, options.auto, options.commit or options.merge_request) - processed.append(appid) + checkupdates_app(app) except Exception as e: msg = _("...checkupdate failed for {appid} : {error}").format(appid=appid, error=e) logging.error(msg) - logging.debug(traceback.format_exc()) - failed[appid] = str(e) - exit_code = 1 + locallog += msg + '\n' - if options.appid and options.merge_request: - push_commits() - prune_empty_appid_branches() + update_wiki(None, locallog) - status_update_json(processed, failed) - sys.exit(exit_code) + logging.info(_("Finished")) if __name__ == "__main__": diff --git a/fdroidserver/common.py b/fdroidserver/common.py index 127976c3..535a3f2a 100644 --- a/fdroidserver/common.py +++ b/fdroidserver/common.py @@ -1,16 +1,8 @@ #!/usr/bin/env python3 # # common.py - part of the FDroid server tools -# -# Copyright (C) 2010-2016, Ciaran Gultnieks, ciaran@ciarang.com -# Copyright (C) 2013-2017, Daniel Martí -# Copyright (C) 2013-2021, Hans-Christoph Steiner -# Copyright (C) 2017-2018, Torsten Grote -# Copyright (C) 2017, tobiasKaminsky -# Copyright (C) 2017-2021, Michael Pöhn -# Copyright (C) 2017,2021, mimi89999 -# Copyright (C) 2019-2021, Jochen Sprickerhof -# Copyright (C) 2021, Felix C. Stegerman +# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com +# Copyright (C) 2013-2014 Daniel Martí # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -25,106 +17,62 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . +# common.py is imported by all modules, so do not import third-party +# libraries here as they will become a requirement for all commands. -"""Collection of functions shared by subcommands. - -This is basically the "shared library" for all the fdroid subcommands. -The contains core functionality and a number of utility functions. -This is imported by all modules, so do not import third-party -libraries here as they will become a requirement for all commands. - -Config ------- - -Parsing and using the configuration settings from config.yml is -handled here. The data format is YAML 1.2. The config has its own -supported data types: - -* Boolean (e.g. deploy_process_logs:) -* Integer (e.g. archive_older:, repo_maxage:) -* String-only (e.g. repo_name:, sdk_path:) -* Multi-String (string, list of strings, or list of dicts with - strings, e.g. serverwebroot:, mirrors:) - -String-only fields can also use a special value {env: varname}, which -is a dict with a single key 'env' and a value that is the name of the -environment variable to include. - -""" - -import ast -import base64 -import copy -import difflib -import filecmp -import glob -import gzip -import hashlib import io -import itertools -import json -import logging -import operator import os +import sys import re +import ast +import gzip import shutil -import socket +import glob import stat import subprocess -import sys -import tempfile import time +import operator +import logging +import hashlib +import socket +import base64 import zipfile -from argparse import BooleanOptionalAction -from base64 import urlsafe_b64encode +import tempfile +import json + +# TODO change to only import defusedxml once its installed everywhere +try: + import defusedxml.ElementTree as XMLElementTree +except ImportError: + import xml.etree.ElementTree as XMLElementTree # nosec this is a fallback only + from binascii import hexlify -from datetime import datetime, timedelta, timezone -from pathlib import Path +from datetime import datetime, timedelta +from distutils.version import LooseVersion from queue import Queue -from typing import List -from urllib.parse import urlparse, urlsplit, urlunparse from zipfile import ZipFile -import defusedxml.ElementTree as XMLElementTree -import git -from asn1crypto import cms +from pyasn1.codec.der import decoder, encoder +from pyasn1_modules import rfc2315 +from pyasn1.error import PyAsn1Error import fdroidserver.metadata from fdroidserver import _ -from fdroidserver._yaml import config_dump, yaml -from fdroidserver.exception import ( - BuildException, - FDroidException, - MetaDataException, - NoSubmodulesException, - VCSException, - VerificationException, -) - -from . import apksigcopier, common +from fdroidserver.exception import FDroidException, VCSException, NoSubmodulesException,\ + BuildException, VerificationException from .asynchronousfilereader import AsynchronousFileReader -from .looseversion import LooseVersion # The path to this fdroidserver distribution FDROID_PATH = os.path.realpath(os.path.join(os.path.dirname(__file__), '..')) -# There needs to be a default, and this is the most common for software. -DEFAULT_LOCALE = 'en-US' - # this is the build-tools version, aapt has a separate version that # has to be manually set in test_aapt_version() -MINIMUM_AAPT_BUILD_TOOLS_VERSION = '26.0.0' -# 33.0.x has a bug that verifies APKs it shouldn't https://gitlab.com/fdroid/fdroidserver/-/issues/1253 -# 31.0.0 is the first version to support --v4-signing-enabled. -# we only require 30.0.0 for now as that's the version in buster-backports, see also signindex.py -# 26.0.2 is the first version recognizing md5 based signatures as valid again -# (as does android, so we want that) -MINIMUM_APKSIGNER_BUILD_TOOLS_VERSION = '30.0.0' +MINIMUM_AAPT_VERSION = '26.0.0' VERCODE_OPERATION_RE = re.compile(r'^([ 0-9/*+-]|%c)+$') # A signature block file with a .DSA, .RSA, or .EC extension -SIGNATURE_BLOCK_FILE_REGEX = re.compile(r'\AMETA-INF/.*\.(DSA|EC|RSA)\Z', re.DOTALL) +SIGNATURE_BLOCK_FILE_REGEX = re.compile(r'^META-INF/.*\.(DSA|EC|RSA)$') APK_NAME_REGEX = re.compile(r'^([a-zA-Z][\w.]*)_(-?[0-9]+)_?([0-9a-f]{7})?\.apk') APK_ID_TRIPLET_REGEX = re.compile(r"^package: name='(\w[^']*)' versionCode='([^']+)' versionName='([^']*)'") STANDARD_FILE_NAME_REGEX = re.compile(r'^(\w[\w.]*)_(-?[0-9]+)\.\w+') @@ -132,77 +80,55 @@ FDROID_PACKAGE_NAME_REGEX = re.compile(r'''^[a-f0-9]+$''', re.IGNORECASE) STRICT_APPLICATION_ID_REGEX = re.compile(r'''(?:^[a-zA-Z]+(?:\d*[a-zA-Z_]*)*)(?:\.[a-zA-Z]+(?:\d*[a-zA-Z_]*)*)+$''') VALID_APPLICATION_ID_REGEX = re.compile(r'''(?:^[a-z_]+(?:\d*[a-zA-Z_]*)*)(?:\.[a-z_]+(?:\d*[a-zA-Z_]*)*)*$''', re.IGNORECASE) -ANDROID_PLUGIN_REGEX = re.compile(r'''\s*(:?apply plugin:|id)\(?\s*['"](android|com\.android\.application)['"]\s*\)?''') MAX_VERSION_CODE = 0x7fffffff # Java's Integer.MAX_VALUE (2147483647) XMLNS_ANDROID = '{http://schemas.android.com/apk/res/android}' -# https://docs.gitlab.com/ee/user/gitlab_com/#gitlab-pages -GITLAB_COM_PAGES_MAX_SIZE = 1000000000 - -# the names used for things that are configured per-repo -ANTIFEATURES_CONFIG_NAME = 'antiFeatures' -CATEGORIES_CONFIG_NAME = 'categories' -CONFIG_CONFIG_NAME = 'config' -MIRRORS_CONFIG_NAME = 'mirrors' -RELEASECHANNELS_CONFIG_NAME = "releaseChannels" -CONFIG_NAMES = ( - ANTIFEATURES_CONFIG_NAME, - CATEGORIES_CONFIG_NAME, - CONFIG_CONFIG_NAME, - MIRRORS_CONFIG_NAME, - RELEASECHANNELS_CONFIG_NAME, -) - -CONFIG_FILE = 'config.yml' - config = None options = None env = None orig_path = None -def get_default_cachedir(): - """Get a cachedir, using platformdirs for cross-platform, but works without. - - Once platformdirs is installed everywhere, this function can be - removed. - - """ - appname = __name__.split('.')[0] - try: - import platformdirs - - return platformdirs.user_cache_dir(appname, 'F-Droid') - except ImportError: - return str(Path.home() / '.cache' / appname) - - -# All paths in the config must be strings, never pathlib.Path instances default_config = { 'sdk_path': "$ANDROID_HOME", - 'ndk_paths': {}, - 'cachedir': get_default_cachedir(), + 'ndk_paths': { + 'r10e': None, + 'r11c': None, + 'r12b': "$ANDROID_NDK", + 'r13b': None, + 'r14b': None, + 'r15c': None, + 'r16b': None, + }, + 'cachedir': os.path.join(os.getenv('HOME'), '.cache', 'fdroidserver'), + 'build_tools': MINIMUM_AAPT_VERSION, + 'force_build_tools': False, 'java_paths': None, - 'scan_binary': False, 'ant': "ant", 'mvn3': "mvn", - 'gradle': shutil.which('gradlew-fdroid'), + 'gradle': os.path.join(FDROID_PATH, 'gradlew-fdroid'), + 'gradle_version_dir': os.path.join(os.path.join(os.getenv('HOME'), '.cache', 'fdroidserver'), 'gradle'), + 'accepted_formats': ['txt', 'yml'], 'sync_from_local_copy_dir': False, 'allow_disabled_algorithms': False, - 'keep_when_not_allowed': False, 'per_app_repos': False, - 'make_current_version_link': False, + 'make_current_version_link': True, 'current_version_name_source': 'Name', 'deploy_process_logs': False, + 'update_stats': False, + 'stats_ignore': [], + 'stats_server': None, + 'stats_user': None, + 'stats_to_carbon': False, 'repo_maxage': 0, 'build_server_always': False, - 'keystore': 'keystore.p12', + 'keystore': 'keystore.jks', 'smartcardoptions': [], 'char_limits': { 'author': 256, - 'name': 50, + 'name': 30, 'summary': 80, 'description': 4000, 'video': 256, @@ -210,129 +136,30 @@ default_config = { }, 'keyaliases': {}, 'repo_url': "https://MyFirstFDroidRepo.org/fdroid/repo", - 'repo_name': "My First F-Droid Repo Demo", - 'repo_icon': "icon.png", - 'repo_description': _("""This is a repository of apps to be used with F-Droid. Applications in this repository are either official binaries built by the original application developers, or are binaries built from source by the admin of f-droid.org using the tools on https://gitlab.com/fdroid."""), # type: ignore - 'archive_name': 'My First F-Droid Archive Demo', - 'archive_description': _('These are the apps that have been archived from the main repo.'), # type: ignore + 'repo_name': "My First FDroid Repo Demo", + 'repo_icon': "fdroid-icon.png", + 'repo_description': ''' + This is a repository of apps to be used with FDroid. Applications in this + repository are either official binaries built by the original application + developers, or are binaries built from source by the admin of f-droid.org + using the tools on https://gitlab.com/u/fdroid. + ''', 'archive_older': 0, - 'git_mirror_size_limit': 10000000000, - 'scanner_signature_sources': ['suss'], } -def get_options(): - """Return options as set up by parse_args(). - - This provides an easy way to get the global instance without - having to think about very confusing import and submodule - visibility. The code should be probably refactored so it does not - need this. If each individual option value was always passed to - functions as args, for example. - - https://docs.python.org/3/reference/import.html#submodules - - """ - return fdroidserver.common.options - - -def parse_args(parser): - """Call parser.parse_args(), store result in module-level variable and return it. - - This is needed to set up the copy of the options instance in the - fdroidserver.common module. A subcommand only needs to call this - if it uses functions from fdroidserver.common that expect the - "options" variable to be initialized. - - """ - fdroidserver.common.options = parser.parse_args() - return fdroidserver.common.options - - def setup_global_opts(parser): try: # the buildserver VM might not have PIL installed from PIL import PngImagePlugin - logger = logging.getLogger(PngImagePlugin.__name__) logger.setLevel(logging.INFO) # tame the "STREAM" debug messages except ImportError: pass - parser.add_argument( - "-v", - "--verbose", - action="store_true", - default=False, - help=_("Spew out even more information than normal"), - ) - parser.add_argument( - "-q", - "--quiet", - action="store_true", - default=False, - help=_("Restrict output to warnings and errors"), - ) - parser.add_argument( - "--color", - action=BooleanOptionalAction, - default=None, - help=_("Color the log output"), - ) - - -class ColorFormatter(logging.Formatter): - - def __init__(self, msg): - logging.Formatter.__init__(self, msg) - - bright_black = "\x1b[90;20m" - yellow = "\x1b[33;20m" - red = "\x1b[31;20m" - bold_red = "\x1b[31;1m" - reset = "\x1b[0m" - - self.FORMATS = { - logging.DEBUG: bright_black + msg + reset, - logging.INFO: reset + msg + reset, # use default color - logging.WARNING: yellow + msg + reset, - logging.ERROR: red + msg + reset, - logging.CRITICAL: bold_red + msg + reset - } - - def format(self, record): - log_fmt = self.FORMATS.get(record.levelno) - formatter = logging.Formatter(log_fmt) - return formatter.format(record) - - -def set_console_logging(verbose=False, color=False): - """Globally set logging to output nicely to the console.""" - - class _StdOutFilter(logging.Filter): - def filter(self, record): - return record.levelno < logging.ERROR - - if verbose: - level = logging.DEBUG - else: - level = logging.ERROR - - if color or (color is None and sys.stdout.isatty()): - formatter = ColorFormatter - else: - formatter = logging.Formatter - - stdout_handler = logging.StreamHandler(sys.stdout) - stdout_handler.addFilter(_StdOutFilter()) - stdout_handler.setFormatter(formatter('%(message)s')) - - stderr_handler = logging.StreamHandler(sys.stderr) - stderr_handler.setLevel(logging.ERROR) - stderr_handler.setFormatter(formatter(_('ERROR: %(message)s'))) - - logging.basicConfig( - force=True, level=level, handlers=[stdout_handler, stderr_handler] - ) + parser.add_argument("-v", "--verbose", action="store_true", default=False, + help=_("Spew out even more information than normal")) + parser.add_argument("-q", "--quiet", action="store_true", default=False, + help=_("Restrict output to warnings and errors")) def _add_java_paths_to_config(pathlist, thisconfig): @@ -351,18 +178,18 @@ def _add_java_paths_to_config(pathlist, thisconfig): j = os.path.basename(d) # the last one found will be the canonical one, so order appropriately for regex in [ - r'^1\.([126-9][0-9]?)\.0\.jdk$', # OSX - r'^jdk1\.([126-9][0-9]?)\.0_[0-9]+.jdk$', # OSX and Oracle tarball - r'^jdk1\.([126-9][0-9]?)\.0_[0-9]+$', # Oracle Windows - r'^jdk([126-9][0-9]?)-openjdk$', # Arch - r'^java-([126-9][0-9]?)-openjdk$', # Arch - r'^java-([126-9][0-9]?)-jdk$', # Arch (oracle) - r'^java-1\.([126-9][0-9]?)\.0-.*$', # RedHat - r'^java-([126-9][0-9]?)-oracle$', # Debian WebUpd8 - r'^jdk-([126-9][0-9]?)-oracle-.*$', # Debian make-jpkg - r'^java-([126-9][0-9]?)-openjdk-.*$', # Debian - r'^oracle-jdk-bin-1\.([126-9][0-9]?).*$', # Gentoo (oracle) - r'^icedtea-bin-([126-9][0-9]?).*$', # Gentoo (openjdk) + r'^1\.([16-9][0-9]?)\.0\.jdk$', # OSX + r'^jdk1\.([16-9][0-9]?)\.0_[0-9]+.jdk$', # OSX and Oracle tarball + r'^jdk1\.([16-9][0-9]?)\.0_[0-9]+$', # Oracle Windows + r'^jdk([16-9][0-9]?)-openjdk$', # Arch + r'^java-([16-9][0-9]?)-openjdk$', # Arch + r'^java-([16-9][0-9]?)-jdk$', # Arch (oracle) + r'^java-1\.([16-9][0-9]?)\.0-.*$', # RedHat + r'^java-([16-9][0-9]?)-oracle$', # Debian WebUpd8 + r'^jdk-([16-9][0-9]?)-oracle-.*$', # Debian make-jpkg + r'^java-([16-9][0-9]?)-openjdk-[^c][^o][^m].*$', # Debian + r'^oracle-jdk-bin-1\.([17-9][0-9]?).*$', # Gentoo (oracle) + r'^icedtea-bin-([17-9][0-9]?).*$', # Gentoo (openjdk) ]: m = re.match(regex, j) if not m: @@ -373,24 +200,13 @@ def _add_java_paths_to_config(pathlist, thisconfig): def fill_config_defaults(thisconfig): - """Fill in the global config dict with relevant defaults. - - For config values that have a path that can be expanded, e.g. an - env var or a ~/, this will store the original value using "_orig" - appended to the key name so that if the config gets written out, - it will preserve the original, unexpanded string. - - """ for k, v in default_config.items(): if k not in thisconfig: - if isinstance(v, dict) or isinstance(v, list): - thisconfig[k] = v.copy() - else: - thisconfig[k] = v + thisconfig[k] = v # Expand paths (~users and $vars) def expand_path(path): - if not path or not isinstance(path, str): + if path is None: return None orig = path path = os.path.expanduser(path) @@ -399,7 +215,7 @@ def fill_config_defaults(thisconfig): return None return path - for k in ['sdk_path', 'ant', 'mvn3', 'gradle', 'keystore']: + for k in ['sdk_path', 'ant', 'mvn3', 'gradle', 'keystore', 'repo_icon']: v = thisconfig[k] exp = expand_path(v) if exp is not None: @@ -410,20 +226,19 @@ def fill_config_defaults(thisconfig): if thisconfig['java_paths'] is None: thisconfig['java_paths'] = dict() pathlist = [] - pathlist += glob.glob('/usr/lib/jvm/j*[126-9]*') - pathlist += glob.glob('/usr/java/jdk1.[126-9]*') - pathlist += glob.glob('/System/Library/Java/JavaVirtualMachines/1.[126-9][0-9]?.0.jdk') + pathlist += glob.glob('/usr/lib/jvm/j*[16-9]*') + pathlist += glob.glob('/usr/java/jdk1.[16-9]*') + pathlist += glob.glob('/System/Library/Java/JavaVirtualMachines/1.[16-9][0-9]?.0.jdk') pathlist += glob.glob('/Library/Java/JavaVirtualMachines/*jdk*[0-9]*') pathlist += glob.glob('/opt/oracle-jdk-*1.[0-9]*') pathlist += glob.glob('/opt/icedtea-*[0-9]*') if os.getenv('JAVA_HOME') is not None: pathlist.append(os.getenv('JAVA_HOME')) if os.getenv('PROGRAMFILES') is not None: - pathlist += glob.glob(os.path.join(os.getenv('PROGRAMFILES'), 'Java', 'jdk1.[126-9][0-9]?.*')) + pathlist += glob.glob(os.path.join(os.getenv('PROGRAMFILES'), 'Java', 'jdk1.[16-9][0-9]?.*')) _add_java_paths_to_config(pathlist, thisconfig) - for java_version in range(29, 6, -1): - java_version = str(java_version) + for java_version in ('14', '13', '12', '11', '10', '9', '8', '7'): if java_version not in thisconfig['java_paths']: continue java_home = thisconfig['java_paths'][java_version] @@ -438,17 +253,6 @@ def fill_config_defaults(thisconfig): if 'keytool' not in thisconfig and shutil.which('keytool'): thisconfig['keytool'] = shutil.which('keytool') - # enable apksigner by default so v2/v3 APK signatures validate - find_apksigner(thisconfig) - if not thisconfig.get('apksigner'): - logging.warning(_('apksigner not found! Cannot sign or verify modern APKs')) - - if 'ipfs_cid' not in thisconfig and shutil.which('ipfs_cid'): - thisconfig['ipfs_cid'] = shutil.which('ipfs_cid') - cmd = sys.argv[1] if len(sys.argv) >= 2 else '' - if cmd == 'update' and not thisconfig.get('ipfs_cid'): - logging.debug(_("ipfs_cid not found, skipping CIDv1 generation")) - for k in ['ndk_paths', 'java_paths']: d = thisconfig[k] for k2 in d.copy(): @@ -458,70 +262,6 @@ def fill_config_defaults(thisconfig): thisconfig[k][k2] = exp thisconfig[k][k2 + '_orig'] = v - ndk_paths = thisconfig.get('ndk_paths', {}) - - ndk_bundle = os.path.join(thisconfig['sdk_path'], 'ndk-bundle') - if os.path.exists(ndk_bundle): - version = get_ndk_version(ndk_bundle) - if version not in ndk_paths: - ndk_paths[version] = ndk_bundle - - ndk_dir = os.path.join(thisconfig['sdk_path'], 'ndk') - if os.path.exists(ndk_dir): - for ndk in glob.glob(os.path.join(ndk_dir, '*')): - version = get_ndk_version(ndk) - if version not in ndk_paths: - ndk_paths[version] = ndk - - if 'cachedir_scanner' not in thisconfig: - thisconfig['cachedir_scanner'] = str(Path(thisconfig['cachedir']) / 'scanner') - if 'gradle_version_dir' not in thisconfig: - thisconfig['gradle_version_dir'] = str(Path(thisconfig['cachedir']) / 'gradle') - - -def get_config(): - """Get the initalized, singleton config instance. - - config and options are intertwined in read_config(), so they have - to be here too. In the current ugly state of things, there are - multiple potential instances of config and options in use: - - * global - * module-level in the subcommand module (e.g. fdroidserver/build.py) - * module-level in fdroidserver.common - - There are some insane parts of the code that are probably - referring to multiple instances of these at different points. - This can be super confusing and maddening. - - The current intermediate refactoring step is to move all - subcommands to always get/set config and options via this function - so that there is no longer a distinction between the global and - module-level instances. Then there can be only one module-level - instance in fdroidserver.common. - - """ - global config - - if config is not None: - return config - - read_config() - - # make sure these values are available in common.py even if they didn't - # declare global in a scope - common.config = config - - return config - - -def get_cachedir(): - cachedir = config and config.get('cachedir') - if cachedir and os.path.exists(cachedir): - return Path(cachedir) - else: - return Path(tempfile.mkdtemp()) - def regsub_file(pattern, repl, path): with open(path, 'rb') as f: @@ -531,457 +271,150 @@ def regsub_file(pattern, repl, path): f.write(text) -def config_type_check(path, data): - if Path(path).name == 'mirrors.yml': - expected_type = list - else: - expected_type = dict - if expected_type == dict: - if not isinstance(data, dict): - msg = _('{path} is not "key: value" dict, but a {datatype}!') - raise TypeError(msg.format(path=path, datatype=type(data).__name__)) - elif not isinstance(data, expected_type): - msg = _('{path} is not {expected_type}, but a {datatype}!') - raise TypeError( - msg.format( - path=path, - expected_type=expected_type.__name__, - datatype=type(data).__name__, - ) - ) +def read_config(opts, config_file='config.py'): + """Read the repository config - -class _Config(dict): - def __init__(self, default={}): - super(_Config, self).__init__(default) - self.loaded = {} - - def lazyget(self, key): - if key not in self.loaded: - value = super(_Config, self).__getitem__(key) - - if key == 'serverwebroot': - roots = parse_list_of_dicts(value) - rootlist = [] - for d in roots: - # since this is used with rsync, where trailing slashes have - # meaning, ensure there is always a trailing slash - rootstr = d.get('url') - if not rootstr: - logging.error('serverwebroot: has blank value!') - continue - if rootstr[-1] != '/': - rootstr += '/' - d['url'] = rootstr.replace('//', '/') - rootlist.append(d) - self.loaded[key] = rootlist - - elif key == 'servergitmirrors': - self.loaded[key] = parse_list_of_dicts(value) - - elif isinstance(value, dict) and 'env' in value and len(value) == 1: - var = value['env'] - if var in os.environ: - self.loaded[key] = os.getenv(var) - else: - logging.error( - _( - 'Environment variable {var} from {configname} is not set!' - ).format(var=value['env'], configname=key) - ) - self.loaded[key] = None - else: - self.loaded[key] = value - - return self.loaded[key] - - def __getitem__(self, key): - return self.lazyget(key) - - def get(self, key, default=None, /): - try: - return self.lazyget(key) - except KeyError: - return default - - -def read_config(): - """Read the repository config. - - The config is read from config.yml, which is in the current + The config is read from config_file, which is in the current directory when any of the repo management commands are used. If - there is a local metadata file in the git repo, then the config is + there is a local metadata file in the git repo, then config.py is not required, just use defaults. - config.yml is the preferred form because no code is executed when - reading it. config.py is deprecated and no longer supported. - - config.yml requires ASCII or UTF-8 encoding because this code does - not auto-detect the file's encoding. That is left up to the YAML - library. YAML allows ASCII, UTF-8, UTF-16, and UTF-32 encodings. - Since it is a good idea to manage config.yml (WITHOUT PASSWORDS!) - in git, it makes sense to use a globally standard encoding. - """ - global config + global config, options if config is not None: return config + options = opts + config = {} - if os.path.exists(CONFIG_FILE): - logging.debug(_("Reading '{config_file}'").format(config_file=CONFIG_FILE)) - with open(CONFIG_FILE, encoding='utf-8') as fp: - config = yaml.load(fp) - if not config: - config = {} - config_type_check(CONFIG_FILE, config) + if os.path.isfile(config_file): + logging.debug(_("Reading '{config_file}'").format(config_file=config_file)) + with io.open(config_file, "rb") as f: + code = compile(f.read(), config_file, 'exec') + exec(code, None, config) # nosec TODO switch to YAML file + else: + logging.warning(_("No 'config.py' found, using defaults.")) - old_config_file = 'config.py' - if os.path.exists(old_config_file): - logging.warning( - _("""Ignoring deprecated {oldfile}, use {newfile}!""").format( - oldfile=old_config_file, newfile=CONFIG_FILE - ) - ) + for k in ('mirrors', 'install_list', 'uninstall_list', 'serverwebroot', 'servergitroot'): + if k in config: + if not type(config[k]) in (str, list, tuple): + logging.warning( + _("'{field}' will be in random order! Use () or [] brackets if order is important!") + .format(field=k)) # smartcardoptions must be a list since its command line args for Popen - smartcardoptions = config.get('smartcardoptions') - if isinstance(smartcardoptions, str): - sco_items = re.sub(r'\s+', r' ', config['smartcardoptions']).split(' ') - config['smartcardoptions'] = [i.strip() for i in sco_items if i] - elif not smartcardoptions and 'keystore' in config and config['keystore'] == 'NONE': + if 'smartcardoptions' in config: + config['smartcardoptions'] = config['smartcardoptions'].split(' ') + elif 'keystore' in config and config['keystore'] == 'NONE': # keystore='NONE' means use smartcard, these are required defaults config['smartcardoptions'] = ['-storetype', 'PKCS11', '-providerName', 'SunPKCS11-OpenSC', '-providerClass', 'sun.security.pkcs11.SunPKCS11', '-providerArg', 'opensc-fdroid.cfg'] + if any(k in config for k in ["keystore", "keystorepass", "keypass"]): + st = os.stat(config_file) + if st.st_mode & stat.S_IRWXG or st.st_mode & stat.S_IRWXO: + logging.warning(_("unsafe permissions on '{config_file}' (should be 0600)!") + .format(config_file=config_file)) + fill_config_defaults(config) + for k in ["repo_description", "archive_description"]: + if k in config: + config[k] = clean_description(config[k]) + + if 'serverwebroot' in config: + if isinstance(config['serverwebroot'], str): + roots = [config['serverwebroot']] + elif all(isinstance(item, str) for item in config['serverwebroot']): + roots = config['serverwebroot'] + else: + raise TypeError(_('only accepts strings, lists, and tuples')) + rootlist = [] + for rootstr in roots: + # since this is used with rsync, where trailing slashes have + # meaning, ensure there is always a trailing slash + if rootstr[-1] != '/': + rootstr += '/' + rootlist.append(rootstr.replace('//', '/')) + config['serverwebroot'] = rootlist + if 'servergitmirrors' in config: - limit = config['git_mirror_size_limit'] - config['git_mirror_size_limit'] = parse_human_readable_size(limit) + if isinstance(config['servergitmirrors'], str): + roots = [config['servergitmirrors']] + elif all(isinstance(item, str) for item in config['servergitmirrors']): + roots = config['servergitmirrors'] + else: + raise TypeError(_('only accepts strings, lists, and tuples')) + config['servergitmirrors'] = roots - if 'repo_url' in config: - if not config['repo_url'].endswith('/repo'): - raise FDroidException(_('repo_url needs to end with /repo')) - - if 'archive_url' in config: - if not config['archive_url'].endswith('/archive'): - raise FDroidException(_('archive_url needs to end with /archive')) - - confignames_to_delete = set() - for configname, dictvalue in config.items(): - if configname == 'java_paths': - new = dict() - for k, v in dictvalue.items(): - new[str(k)] = v - config[configname] = new - elif configname in ('ndk_paths', 'java_paths', 'char_limits', 'keyaliases'): - continue - elif isinstance(dictvalue, dict): - for k, v in dictvalue.items(): - if k != 'env': - confignames_to_delete.add(configname) - logging.error(_('Unknown entry {key} in {configname}') - .format(key=k, configname=configname)) - - for configname in confignames_to_delete: - del config[configname] - - if any(k in config and config.get(k) for k in ["keystorepass", "keypass"]): - st = os.stat(CONFIG_FILE) - if st.st_mode & stat.S_IRWXG or st.st_mode & stat.S_IRWXO: - logging.warning( - _("unsafe permissions on '{config_file}' (should be 0600)!").format( - config_file=CONFIG_FILE - ) - ) - - config = _Config(config) return config -def expand_env_dict(s): - """Expand env var dict to a string value. - - {env: varName} syntax can be used to replace any string value in the - config with the value of an environment variable "varName". This - allows for secrets management when commiting the config file to a - public git repo. - - """ - if not s or type(s) not in (str, dict): - return - if isinstance(s, dict): - if 'env' not in s or len(s) > 1: - raise TypeError(_('Only accepts a single key "env"')) - var = s['env'] - s = os.getenv(var) - if not s: - logging.error( - _('Environment variable {{env: {var}}} is not set!').format(var=var) - ) - return - return os.path.expanduser(s) - - -def parse_list_of_dicts(l_of_d): - """Parse config data structure that is a list of dicts of strings. - - The value can be specified as a string, list of strings, or list of dictionary maps - where the values are strings. - - """ - if isinstance(l_of_d, str): - return [{"url": expand_env_dict(l_of_d)}] - if isinstance(l_of_d, dict): - return [{"url": expand_env_dict(l_of_d)}] - if all(isinstance(item, str) for item in l_of_d): - return [{'url': expand_env_dict(i)} for i in l_of_d] - if all(isinstance(item, dict) for item in l_of_d): - for item in l_of_d: - item['url'] = expand_env_dict(item['url']) - return l_of_d - raise TypeError(_('only accepts strings, lists, and tuples')) - - -def get_mirrors(url, filename=None): - """Get list of dict entries for mirrors, appending filename if provided.""" - # TODO use cached index if it exists - if isinstance(url, str): - url = urlsplit(url) - - if url.netloc == 'f-droid.org': - mirrors = FDROIDORG_MIRRORS - else: - mirrors = parse_list_of_dicts(url.geturl()) - - if filename: - return append_filename_to_mirrors(filename, mirrors) - else: - return mirrors - - -def append_filename_to_mirrors(filename, mirrors): - """Append the filename to all "url" entries in the mirrors dict.""" - appended = copy.deepcopy(mirrors) - for mirror in appended: - parsed = urlparse(mirror['url']) - mirror['url'] = urlunparse( - parsed._replace(path=os.path.join(parsed.path, filename)) - ) - return appended - - -def file_entry(filename, hash_value=None): - meta = {} - meta["name"] = "/" + Path(filename).as_posix().split("/", 1)[1] - meta["sha256"] = hash_value or sha256sum(filename) - meta["size"] = os.stat(filename).st_size - return meta - - -def load_localized_config(name, repodir): - """Load localized config files and put them into internal dict format. - - This will maintain the order as came from the data files, e.g - YAML. The locale comes from unsorted paths on the filesystem, so - that is separately sorted. - - """ - ret = dict() - found_config_file = False - for f in Path().glob("config/**/{name}.yml".format(name=name)): - found_config_file = True - locale = f.parts[1] - if len(f.parts) == 2: - locale = DEFAULT_LOCALE - with open(f, encoding="utf-8") as fp: - elem = yaml.load(fp) - if not isinstance(elem, dict): - msg = _('{path} is not "key: value" dict, but a {datatype}!') - raise TypeError(msg.format(path=f, datatype=type(elem).__name__)) - for afname, field_dict in elem.items(): - if afname not in ret: - ret[afname] = dict() - for key, value in field_dict.items(): - if key not in ret[afname]: - ret[afname][key] = dict() - if key == "icon": - icons_dir = os.path.join(repodir, 'icons') - if not os.path.exists(icons_dir): - os.makedirs(icons_dir, exist_ok=True) - src = os.path.join("config", value) - dest = os.path.join(icons_dir, os.path.basename(src)) - if not os.path.exists(dest) or not filecmp.cmp(src, dest): - shutil.copy2(src, dest) - ret[afname][key][locale] = file_entry( - os.path.join(icons_dir, value) - ) - else: - ret[afname][key][locale] = value - - if not found_config_file: - for f in Path().glob("config/*.yml"): - if f.stem not in CONFIG_NAMES: - msg = _('{path} is not a standard config file!').format(path=f) - m = difflib.get_close_matches(f.stem, CONFIG_NAMES, 1) - if m: - msg += ' ' - msg += _('Did you mean config/{name}.yml?').format(name=m[0]) - logging.error(msg) - - for elem in ret.values(): - for afname in elem: - elem[afname] = {locale: v for locale, v in sorted(elem[afname].items())} - return ret - - -def parse_human_readable_size(size): - units = { - 'b': 1, - 'kb': 1000, 'mb': 1000**2, 'gb': 1000**3, 'tb': 1000**4, - 'kib': 1024, 'mib': 1024**2, 'gib': 1024**3, 'tib': 1024**4, - } - try: - return int(float(size)) - except (ValueError, TypeError) as exc: - if type(size) != str: - raise ValueError(_('Could not parse size "{size}", wrong type "{type}"') - .format(size=size, type=type(size))) from exc - s = size.lower().replace(' ', '') - m = re.match(r'^(?P[0-9][0-9.]*) *(?P' + r'|'.join(units.keys()) + r')$', s) - if not m: - raise ValueError(_('Not a valid size definition: "{}"').format(size)) from exc - return int(float(m.group("value")) * units[m.group("unit")]) - - -def get_dir_size(path_or_str): - """Get the total size of all files in the given directory.""" - if isinstance(path_or_str, str): - path_or_str = Path(path_or_str) - return sum(f.stat().st_size for f in path_or_str.glob('**/*') if f.is_file()) - - def assert_config_keystore(config): """Check weather keystore is configured correctly and raise exception if not.""" + nosigningkey = False if 'repo_keyalias' not in config: nosigningkey = True - logging.critical(_("'repo_keyalias' not found in config.yml!")) + logging.critical(_("'repo_keyalias' not found in config.py!")) if 'keystore' not in config: nosigningkey = True - logging.critical(_("'keystore' not found in config.yml!")) - elif config['keystore'] == 'NONE': - if not config.get('smartcardoptions'): - nosigningkey = True - logging.critical(_("'keystore' is NONE and 'smartcardoptions' is blank!")) + logging.critical(_("'keystore' not found in config.py!")) elif not os.path.exists(config['keystore']): nosigningkey = True logging.critical("'" + config['keystore'] + "' does not exist!") if 'keystorepass' not in config: nosigningkey = True - logging.critical(_("'keystorepass' not found in config.yml!")) - if 'keypass' not in config and config.get('keystore') != 'NONE': + logging.critical(_("'keystorepass' not found in config.py!")) + if 'keypass' not in config: nosigningkey = True - logging.critical(_("'keypass' not found in config.yml!")) + logging.critical(_("'keypass' not found in config.py!")) if nosigningkey: raise FDroidException("This command requires a signing key, " + "you can create one using: fdroid update --create-key") -def find_apksigner(config): - """Search for the best version apksigner and adds it to the config. - - Returns the best version of apksigner following this algorithm: - - * use config['apksigner'] if set - * try to find apksigner in path - * find apksigner in build-tools starting from newest installed - going down to MINIMUM_APKSIGNER_BUILD_TOOLS_VERSION - - Returns - ------- - str - path to apksigner or None if no version is found - - """ - command = 'apksigner' - if command in config: - return - - tmp = find_command(command) - if tmp is not None: - config[command] = tmp - return - - build_tools_path = os.path.join(config.get('sdk_path', ''), 'build-tools') - if not os.path.isdir(build_tools_path): - return - for f in sorted(os.listdir(build_tools_path), reverse=True): - if not os.path.isdir(os.path.join(build_tools_path, f)): - continue - try: - version = LooseVersion(f) - if version >= LooseVersion('33') and version < LooseVersion('34'): - logging.warning( - _('apksigner in build-tools;{version} passes APKs with invalid v3 signatures, ignoring.').format( - version=version - ) - ) - continue - if version < LooseVersion(MINIMUM_APKSIGNER_BUILD_TOOLS_VERSION): - logging.debug("Local Android SDK only has outdated apksigner versions") - return - except TypeError: - continue - if os.path.exists(os.path.join(build_tools_path, f, 'apksigner')): - apksigner = os.path.join(build_tools_path, f, 'apksigner') - logging.info("Using %s " % apksigner) - config['apksigner'] = apksigner - return - - def find_sdk_tools_cmd(cmd): - """Find a working path to a tool from the Android SDK.""" + '''find a working path to a tool from the Android SDK''' + tooldirs = [] if config is not None and 'sdk_path' in config and os.path.exists(config['sdk_path']): # try to find a working path to this command, in all the recent possible paths - build_tools = os.path.join(config['sdk_path'], 'build-tools') - if os.path.isdir(build_tools): - for f in sorted(os.listdir(build_tools), reverse=True): - if os.path.isdir(os.path.join(build_tools, f)): - tooldirs.append(os.path.join(build_tools, f)) + if 'build_tools' in config: + build_tools = os.path.join(config['sdk_path'], 'build-tools') + # if 'build_tools' was manually set and exists, check only that one + configed_build_tools = os.path.join(build_tools, config['build_tools']) + if os.path.exists(configed_build_tools): + tooldirs.append(configed_build_tools) + else: + # no configed version, so hunt known paths for it + for f in sorted(os.listdir(build_tools), reverse=True): + if os.path.isdir(os.path.join(build_tools, f)): + tooldirs.append(os.path.join(build_tools, f)) + tooldirs.append(build_tools) sdk_tools = os.path.join(config['sdk_path'], 'tools') if os.path.exists(sdk_tools): tooldirs.append(sdk_tools) - tooldirs.append(os.path.join(sdk_tools, 'bin')) sdk_platform_tools = os.path.join(config['sdk_path'], 'platform-tools') if os.path.exists(sdk_platform_tools): tooldirs.append(sdk_platform_tools) - sdk_build_tools = glob.glob(os.path.join(config['sdk_path'], 'build-tools', '*.*')) - if sdk_build_tools: - tooldirs.append(sorted(sdk_build_tools)[-1]) # use most recent version - if os.path.exists('/usr/bin'): - tooldirs.append('/usr/bin') + tooldirs.append('/usr/bin') for d in tooldirs: path = os.path.join(d, cmd) - if not os.path.isfile(path): - path += '.exe' if os.path.isfile(path): if cmd == 'aapt': test_aapt_version(path) return path # did not find the command, exit with error message - test_sdk_exists(config) # ignore result so None is never returned - raise FDroidException(_("Android SDK tool {cmd} not found!").format(cmd=cmd)) + ensure_build_tools_exists(config) def test_aapt_version(aapt): - """Check whether the version of aapt is new enough.""" + '''Check whether the version of aapt is new enough''' output = subprocess.check_output([aapt, 'version'], universal_newlines=True) if output is None or output == '': logging.error(_("'{path}' failed to execute!").format(path=aapt)) @@ -994,29 +427,25 @@ def test_aapt_version(aapt): # the Debian package has the version string like "v0.2-23.0.2" too_old = False if '.' in bugfix: - if LooseVersion(bugfix) < LooseVersion(MINIMUM_AAPT_BUILD_TOOLS_VERSION): + if LooseVersion(bugfix) < LooseVersion(MINIMUM_AAPT_VERSION): too_old = True elif LooseVersion('.'.join((major, minor, bugfix))) < LooseVersion('0.2.4062713'): too_old = True if too_old: logging.warning(_("'{aapt}' is too old, fdroid requires build-tools-{version} or newer!") - .format(aapt=aapt, version=MINIMUM_AAPT_BUILD_TOOLS_VERSION)) + .format(aapt=aapt, version=MINIMUM_AAPT_VERSION)) else: logging.warning(_('Unknown version of aapt, might cause problems: ') + output) def test_sdk_exists(thisconfig): if 'sdk_path' not in thisconfig: - # check the 'apksigner' value in the config to see if its new enough - f = thisconfig.get('apksigner', '') - if os.path.isfile(f): - sdk_path = os.path.dirname(os.path.dirname(os.path.dirname(f))) - tmpconfig = {'sdk_path': sdk_path} - find_apksigner(tmpconfig) - if os.path.exists(tmpconfig.get('apksigner', '')): - return True - logging.error(_("'sdk_path' not set in config.yml!")) - return False + if 'aapt' in thisconfig and os.path.isfile(thisconfig['aapt']): + test_aapt_version(thisconfig['aapt']) + return True + else: + logging.error(_("'sdk_path' not set in 'config.py'!")) + return False if thisconfig['sdk_path'] == default_config['sdk_path']: logging.error(_('No Android SDK found!')) logging.error(_('You can use ANDROID_HOME to set the path to your SDK, i.e.:')) @@ -1030,50 +459,42 @@ def test_sdk_exists(thisconfig): logging.critical(_("Android SDK path '{path}' is not a directory!") .format(path=thisconfig['sdk_path'])) return False - find_apksigner(thisconfig) - if not os.path.exists(thisconfig.get('apksigner', '')): - return False return True +def ensure_build_tools_exists(thisconfig): + if not test_sdk_exists(thisconfig): + raise FDroidException(_("Android SDK not found!")) + build_tools = os.path.join(thisconfig['sdk_path'], 'build-tools') + versioned_build_tools = os.path.join(build_tools, thisconfig['build_tools']) + if not os.path.isdir(versioned_build_tools): + raise FDroidException( + _("Android build-tools path '{path}' does not exist!") + .format(path=versioned_build_tools)) + + def get_local_metadata_files(): - """Get any metadata files local to an app's source repo. + '''get any metadata files local to an app's source repo This tries to ignore anything that does not count as app metdata, - including emacs cruft ending in ~ + including emacs cruft ending in ~ and the .fdroid.key*pass.txt files. - """ + ''' return glob.glob('.fdroid.[a-jl-z]*[a-rt-z]') -def read_pkg_args(appid_versionCode_pairs, allow_version_codes=False): - """No summary. - - Parameters - ---------- - appids - arguments in the form of multiple appid:[versionCode] strings - - Returns - ------- - a dictionary with the set of vercodes specified for each package +def read_pkg_args(appid_versionCode_pairs, allow_vercodes=False): + """ + :param appids: arguments in the form of multiple appid:[vc] strings + :returns: a dictionary with the set of vercodes specified for each package """ vercodes = {} if not appid_versionCode_pairs: return vercodes - error = False - apk_regex = re.compile(r'_(\d+)\.apk$') for p in appid_versionCode_pairs: - # Convert the apk name to a appid:versioncode pair - p = apk_regex.sub(r':\1', p) - if allow_version_codes and ':' in p: + if allow_vercodes and ':' in p: package, vercode = p.split(':') - try: - vercode = version_code_string_to_int(vercode) - except ValueError as e: - logging.error('"%s": %s' % (p, str(e))) - error = True else: package, vercode = p, None if package not in vercodes: @@ -1082,52 +503,20 @@ def read_pkg_args(appid_versionCode_pairs, allow_version_codes=False): elif vercode and vercode not in vercodes[package]: vercodes[package] += [vercode] if vercode else [] - if error: - raise FDroidException(_("Found invalid versionCodes for some apps")) - return vercodes -def get_metadata_files(vercodes): - """ - Build a list of metadata files and raise an exception for invalid appids. - - Parameters - ---------- - vercodes - versionCodes as returned by read_pkg_args() - - Returns - ------- - List - a list of corresponding metadata/*.yml files - """ - found_invalid = False - metadatafiles = [] - for appid in vercodes.keys(): - f = Path('metadata') / ('%s.yml' % appid) - if f.exists(): - metadatafiles.append(f) - else: - found_invalid = True - logging.critical(_("No such package: %s") % appid) - if found_invalid: - raise FDroidException(_("Found invalid appids in arguments")) - return metadatafiles - - -def read_app_args(appid_versionCode_pairs, allow_version_codes=False, sort_by_time=False): - """Build a list of App instances for processing. +def read_app_args(appid_versionCode_pairs, allapps, allow_vercodes=False): + """Build a list of App instances for processing On top of what read_pkg_args does, this returns the whole app metadata, but limiting the builds list to the builds matching the - appid_versionCode_pairs and vercodes specified. If no - appid_versionCode_pairs are specified, then all App and Build instances are - returned. + appid_versionCode_pairs and vercodes specified. If no appid_versionCode_pairs are specified, then + all App and Build instances are returned. """ - vercodes = read_pkg_args(appid_versionCode_pairs, allow_version_codes) - allapps = fdroidserver.metadata.read_metadata(vercodes, sort_by_time) + + vercodes = read_pkg_args(appid_versionCode_pairs, allow_vercodes) if not vercodes: return allapps @@ -1137,6 +526,11 @@ def read_app_args(appid_versionCode_pairs, allow_version_codes=False, sort_by_ti if appid in vercodes: apps[appid] = app + if len(apps) != len(vercodes): + for p in vercodes: + if p not in allapps: + logging.critical(_("No such package: %s") % p) + raise FDroidException(_("Found invalid appids in arguments")) if not apps: raise FDroidException(_("No packages specified")) @@ -1145,10 +539,10 @@ def read_app_args(appid_versionCode_pairs, allow_version_codes=False, sort_by_ti vc = vercodes[appid] if not vc: continue - app['Builds'] = [b for b in app.get('Builds', []) if b.versionCode in vc] - if len(app.get('Builds', [])) != len(vercodes[appid]): + app.builds = [b for b in app.builds if b.versionCode in vc] + if len(app.builds) != len(vercodes[appid]): error = True - allvcs = [b.versionCode for b in app.get('Builds', [])] + allvcs = [b.versionCode for b in app.builds] for v in vercodes[appid]: if v not in allvcs: logging.critical(_("No such versionCode {versionCode} for app {appid}") @@ -1161,23 +555,40 @@ def read_app_args(appid_versionCode_pairs, allow_version_codes=False, sort_by_ti def get_extension(filename): - """Get name and extension of filename, with extension always lower case.""" base, ext = os.path.splitext(filename) if not ext: return base, '' return base, ext.lower()[1:] +def has_extension(filename, ext): + _ignored, f_ext = get_extension(filename) + return ext == f_ext + + publish_name_regex = re.compile(r"^(.+)_([0-9]+)\.(apk|zip)$") +def clean_description(description): + 'Remove unneeded newlines and spaces from a block of description text' + returnstring = '' + # this is split up by paragraph to make removing the newlines easier + for paragraph in re.split(r'\n\n', description): + paragraph = re.sub('\r', '', paragraph) + paragraph = re.sub('\n', ' ', paragraph) + paragraph = re.sub(' {2,}', ' ', paragraph) + paragraph = re.sub(r'^\s*(\w)', r'\1', paragraph) + returnstring += paragraph + '\n\n' + return returnstring.rstrip('\n') + + def publishednameinfo(filename): filename = os.path.basename(filename) m = publish_name_regex.match(filename) try: - result = (m.group(1), int(m.group(2))) - except AttributeError as exc: - raise FDroidException(_("Invalid name for published file: %s") % filename) from exc + result = (m.group(1), m.group(2)) + except AttributeError: + raise FDroidException(_("Invalid name for published file: %s") % filename) return result @@ -1186,15 +597,13 @@ apk_release_filename_with_sigfp = re.compile(r'(?P[a-zA-Z0-9_\.]+)_(?P= 2.3 git_config = [ @@ -1570,7 +843,7 @@ class vcs_git(vcs): '-c', 'core.sshCommand=/bin/false', '-c', 'url.https://.insteadOf=ssh://', ] - for domain in ('bitbucket.org', 'github.com', 'gitlab.com', 'codeberg.org'): + for domain in ('bitbucket.org', 'github.com', 'gitlab.com'): git_config.append('-c') git_config.append('url.https://u:p@' + domain + '/.insteadOf=git@' + domain + ':') git_config.append('-c') @@ -1587,28 +860,22 @@ class vcs_git(vcs): envs=envs, cwd=cwd, output=output) def checkrepo(self): - """No summary. - - If the local directory exists, but is somehow not a git repository, + """If the local directory exists, but is somehow not a git repository, git will traverse up the directory tree until it finds one that is (i.e. fdroidserver) and then we'll proceed to destroy it! This is called as a safety check. """ - cmd = ['git', 'rev-parse', '--show-toplevel'] - p = FDroidPopen(cmd, cwd=self.local, output=False) + + p = FDroidPopen(['git', 'rev-parse', '--show-toplevel'], cwd=self.local, output=False) result = p.output.rstrip() - if p.returncode > 0: - raise VCSException( - f"`{' '.join(cmd)}` failed, (in '{os.path.abspath(self.local)}') {result}" - ) - if Path(result) != Path(self.local).resolve(): - raise VCSException(f"Repository mismatch ('{self.local}' != '{result}')") + if not result.endswith(self.local): + raise VCSException('Repository mismatch') def gotorevisionx(self, rev): if not os.path.exists(self.local): # Brand new checkout - p = self.git(['clone', '--', self.remote, str(self.local)]) + p = self.git(['clone', '--', self.remote, self.local]) if p.returncode != 0: self.clone_failed = True raise VCSException("Git clone failed", p.output) @@ -1618,26 +885,20 @@ class vcs_git(vcs): # Discard any working tree changes p = FDroidPopen(['git', 'submodule', 'foreach', '--recursive', 'git', 'reset', '--hard'], cwd=self.local, output=False) - if p.returncode != 0: - logging.debug("Git submodule reset failed (ignored) {output}".format(output=p.output)) - p = FDroidPopen(['git', 'reset', '--hard'], cwd=self.local, output=False) if p.returncode != 0: raise VCSException(_("Git reset failed"), p.output) # Remove untracked files now, in case they're tracked in the target # revision (it happens!) p = FDroidPopen(['git', 'submodule', 'foreach', '--recursive', 'git', 'clean', '-dffx'], cwd=self.local, output=False) - if p.returncode != 0: - logging.debug("Git submodule cleanup failed (ignored) {output}".format(output=p.output)) - p = FDroidPopen(['git', 'clean', '-dffx'], cwd=self.local, output=False) if p.returncode != 0: raise VCSException(_("Git clean failed"), p.output) if not self.refreshed: # Get latest commits and tags from remote - p = self.git(['fetch', '--prune', '--prune-tags', '--force', 'origin'], cwd=self.local) + p = self.git(['fetch', 'origin'], cwd=self.local) if p.returncode != 0: raise VCSException(_("Git fetch failed"), p.output) - p = self.git(['fetch', '--prune', '--tags', '--force', 'origin'], output=False, cwd=self.local) + p = self.git(['fetch', '--prune', '--tags', 'origin'], output=False, cwd=self.local) if p.returncode != 0: raise VCSException(_("Git fetch failed"), p.output) # Recreate origin/HEAD as git clone would do it, in case it disappeared @@ -1645,14 +906,12 @@ class vcs_git(vcs): if p.returncode != 0: lines = p.output.splitlines() if 'Multiple remote HEAD branches' not in lines[0]: - logging.warning(_("Git remote set-head failed: \"%s\"") % p.output.strip()) - else: - branch = lines[1].split(' ')[-1] - p2 = FDroidPopen(['git', 'remote', 'set-head', 'origin', '--', branch], - cwd=self.local, output=False) - if p2.returncode != 0: - logging.warning(_("Git remote set-head failed: \"%s\"") - % p.output.strip() + '\n' + p2.output.strip()) + raise VCSException(_("Git remote set-head failed"), p.output) + branch = lines[1].split(' ')[-1] + p2 = FDroidPopen(['git', 'remote', 'set-head', 'origin', '--', branch], + cwd=self.local, output=False) + if p2.returncode != 0: + raise VCSException(_("Git remote set-head failed"), p.output + '\n' + p2.output) self.refreshed = True # origin/HEAD is the HEAD of the remote, e.g. the "default branch" on # a github repo. Most of the time this is the same as origin/master. @@ -1687,33 +946,23 @@ class vcs_git(vcs): if p.returncode != 0: raise VCSException(_("Git submodule update failed"), p.output) - def deinitsubmodules(self): - self.checkrepo() - p = FDroidPopen(['git', 'submodule', 'deinit', '--all', '--force'], cwd=self.local, output=False) - if p.returncode != 0: - raise VCSException(_("Git submodule deinit failed"), p.output) - def _gettags(self): self.checkrepo() p = FDroidPopen(['git', 'tag'], cwd=self.local, output=False) return p.output.splitlines() - def latesttags(self): - """Return a list of latest tags.""" - self.checkrepo() - return [tag.name for tag in sorted( - git.Repo(self.local).tags, - key=lambda t: t.commit.committed_date, - reverse=True - )] + tag_format = re.compile(r'tag: ([^),]*)') - def getref(self, revname='HEAD'): + def latesttags(self): self.checkrepo() - repo = git.Repo(self.local) - try: - return repo.commit(revname).hexsha - except git.BadName: - return None + p = FDroidPopen(['git', 'log', '--tags', + '--simplify-by-decoration', '--pretty=format:%d'], + cwd=self.local, output=False) + tags = [] + for line in p.output.splitlines(): + for tag in self.tag_format.findall(line): + tags.append(tag) + return tags class vcs_gitsvn(vcs): @@ -1725,9 +974,7 @@ class vcs_gitsvn(vcs): return ['git', 'svn', '--version'] def checkrepo(self): - """No summary. - - If the local directory exists, but is somehow not a git repository, + """If the local directory exists, but is somehow not a git repository, git will traverse up the directory tree until it finds one that is (i.e. fdroidserver) and then we'll proceed to destory it! This is called as a safety check. @@ -1735,11 +982,11 @@ class vcs_gitsvn(vcs): """ p = FDroidPopen(['git', 'rev-parse', '--show-toplevel'], cwd=self.local, output=False) result = p.output.rstrip() - if Path(result) != Path(self.local).resolve(): + if not result.endswith(self.local): raise VCSException('Repository mismatch') def git(self, args, envs=dict(), cwd=None, output=True): - """Prevent git fetch/clone/submodule from hanging at the username/password prompt. + '''Prevent git fetch/clone/submodule from hanging at the username/password prompt AskPass is set to /bin/true to let the process try to connect without a username/password. @@ -1748,7 +995,7 @@ class vcs_gitsvn(vcs): (supported in git >= 2.3). This protects against CVE-2017-1000117. - """ + ''' git_config = [ '-c', 'core.askpass=/bin/true', '-c', 'core.sshCommand=/bin/false', @@ -1785,18 +1032,15 @@ class vcs_gitsvn(vcs): raise VCSException(_('HTTPS must be used with Subversion URLs!')) # git-svn sucks at certificate validation, this throws useful errors: - try: - import requests - r = requests.head(remote, timeout=300) - r.raise_for_status() - except Exception as e: - raise VCSException('SVN certificate pre-validation failed: ' + str(e)) from e + import requests + r = requests.head(remote) + r.raise_for_status() location = r.headers.get('location') if location and not location.startswith('https://'): raise VCSException(_('Invalid redirect to non-HTTPS: {before} -> {after} ') .format(before=remote, after=location)) - gitsvn_args.extend(['--', remote, str(self.local)]) + gitsvn_args.extend(['--', remote, self.local]) p = self.git(gitsvn_args) if p.returncode != 0: self.clone_failed = True @@ -1878,9 +1122,9 @@ class vcs_gitsvn(vcs): if os.path.isdir(d): return os.listdir(d) - def getref(self, revname='HEAD'): + def getref(self): self.checkrepo() - p = FDroidPopen(['git', 'svn', 'find-rev', revname], cwd=self.local, output=False) + p = FDroidPopen(['git', 'svn', 'find-rev', 'HEAD'], cwd=self.local, output=False) if p.returncode != 0: return None return p.output.strip() @@ -1896,17 +1140,17 @@ class vcs_hg(vcs): def gotorevisionx(self, rev): if not os.path.exists(self.local): - p = FDroidPopen(['hg', 'clone', '--ssh', '/bin/false', '--', self.remote, str(self.local)], + p = FDroidPopen(['hg', 'clone', '--ssh', '/bin/false', '--', self.remote, self.local], output=False) if p.returncode != 0: self.clone_failed = True raise VCSException("Hg clone failed", p.output) else: - p = FDroidPopen(['hg', 'status', '-uiS'], cwd=self.local, output=False) + p = FDroidPopen(['hg', 'status', '-uS'], cwd=self.local, output=False) if p.returncode != 0: raise VCSException("Hg status failed", p.output) for line in p.output.splitlines(): - if not line.startswith('? ') and not line.startswith('I '): + if not line.startswith('? '): raise VCSException("Unexpected output from hg status -uS: " + line) FDroidPopen(['rm', '-rf', '--', line[2:]], cwd=self.local, output=False) if not self.refreshed: @@ -1921,6 +1165,16 @@ class vcs_hg(vcs): p = FDroidPopen(['hg', 'update', '-C', '--', rev], cwd=self.local, output=False) if p.returncode != 0: raise VCSException("Hg checkout of '%s' failed" % rev, p.output) + p = FDroidPopen(['hg', 'purge', '--all'], cwd=self.local, output=False) + # Also delete untracked files, we have to enable purge extension for that: + if "'purge' is provided by the following extension" in p.output: + with open(os.path.join(self.local, '.hg', 'hgrc'), "a") as myfile: + myfile.write("\n[extensions]\nhgext.purge=\n") + p = FDroidPopen(['hg', 'purge', '--all'], cwd=self.local, output=False) + if p.returncode != 0: + raise VCSException("HG purge failed", p.output) + elif p.returncode != 0: + raise VCSException("HG purge failed", p.output) def _gettags(self): p = FDroidPopen(['hg', 'tags', '-q'], cwd=self.local, output=False) @@ -1936,7 +1190,7 @@ class vcs_bzr(vcs): return ['bzr', '--version'] def bzr(self, args, envs=dict(), cwd=None, output=True): - """Prevent bzr from ever using SSH to avoid security vulns.""" + '''Prevent bzr from ever using SSH to avoid security vulns''' envs.update({ 'BZR_SSH': 'false', }) @@ -1944,7 +1198,7 @@ class vcs_bzr(vcs): def gotorevisionx(self, rev): if not os.path.exists(self.local): - p = self.bzr(['branch', self.remote, str(self.local)], output=False) + p = self.bzr(['branch', self.remote, self.local], output=False) if p.returncode != 0: self.clone_failed = True raise VCSException("Bzr branch failed", p.output) @@ -1980,11 +1234,7 @@ def unescape_string(string): def retrieve_string(app_dir, string, xmlfiles=None): - if string.startswith('@string/'): - name = string[len('@string/'):] - elif string.startswith('${'): - return '' # Gradle variable - else: + if not string.startswith('@string/'): return unescape_string(string) if xmlfiles is None: @@ -1997,20 +1247,18 @@ def retrieve_string(app_dir, string, xmlfiles=None): if os.path.basename(root) == 'values': xmlfiles += [os.path.join(root, x) for x in files if x.endswith('.xml')] + name = string[len('@string/'):] + def element_content(element): if element.text is None: return "" s = XMLElementTree.tostring(element, encoding='utf-8', method='text') return s.decode('utf-8').strip() - for path in sorted(xmlfiles): + for path in xmlfiles: if not os.path.isfile(path): continue - try: - xml = parse_xml(path) - except (XMLElementTree.ParseError, ValueError): - logging.warning(_("Problem with xml at '{path}'").format(path=path)) - continue + xml = parse_xml(path) element = xml.find('string[@name="' + name + '"]') if element is not None: content = element_content(element) @@ -2023,36 +1271,31 @@ def retrieve_string_singleline(app_dir, string, xmlfiles=None): return retrieve_string(app_dir, string, xmlfiles).replace('\n', ' ').strip() -def manifest_paths(app_dir, flavors): - """Return list of existing files that will be used to find the highest vercode.""" - possible_manifests = \ - [Path(app_dir) / 'AndroidManifest.xml', - Path(app_dir) / 'src/main/AndroidManifest.xml', - Path(app_dir) / 'src/AndroidManifest.xml', - Path(app_dir) / 'build.gradle', - Path(app_dir) / 'build-extras.gradle', - Path(app_dir) / 'build.gradle.kts'] +def manifest_paths(app_dir, flavours): + '''Return list of existing files that will be used to find the highest vercode''' - for flavor in flavors: - if flavor == 'yes': + possible_manifests = \ + [os.path.join(app_dir, 'AndroidManifest.xml'), + os.path.join(app_dir, 'src', 'main', 'AndroidManifest.xml'), + os.path.join(app_dir, 'src', 'AndroidManifest.xml'), + os.path.join(app_dir, 'build.gradle')] + + for flavour in flavours: + if flavour == 'yes': continue possible_manifests.append( - Path(app_dir) / 'src' / flavor / 'AndroidManifest.xml') + os.path.join(app_dir, 'src', flavour, 'AndroidManifest.xml')) - return [path for path in possible_manifests if path.is_file()] + return [path for path in possible_manifests if os.path.isfile(path)] -def fetch_real_name(app_dir, flavors): - """Retrieve the package name. Returns the name, or None if not found.""" - for path in manifest_paths(app_dir, flavors): - if not path.suffix == '.xml' or not path.is_file(): - continue - logging.debug("fetch_real_name: Checking manifest at %s" % path) - try: - xml = parse_xml(path) - except (XMLElementTree.ParseError, ValueError): - logging.warning(_("Problem with xml at '{path}'").format(path=path)) +def fetch_real_name(app_dir, flavours): + '''Retrieve the package name. Returns the name, or None if not found.''' + for path in manifest_paths(app_dir, flavours): + if not has_extension(path, 'xml') or not os.path.isfile(path): continue + logging.debug("fetch_real_name: Checking manifest at " + path) + xml = parse_xml(path) app = xml.find('application') if app is None: continue @@ -2105,11 +1348,11 @@ def remove_debuggable_flags(root_dir): os.path.join(root, 'AndroidManifest.xml')) -vcsearch_g = re.compile(r'''\b[Vv]ersionCode\s*=?\s*["'(]*([0-9][0-9_]*)["')]*''').search -vnsearch_g = re.compile(r'''\b[Vv]ersionName\s*=?\s*\(?(["'])((?:(?=(\\?))\3.)*?)\1''').search -vnssearch_g = re.compile(r'''\b[Vv]ersionNameSuffix\s*=?\s*(["'])((?:(?=(\\?))\3.)*?)\1''').search -psearch_g = re.compile(r'''\b(packageName|applicationId|namespace)\s*=*\s*["']([^"']+)["']''').search -fsearch_g = re.compile(r'''\b(applicationIdSuffix)\s*=*\s*["']([^"']+)["']''').search +vcsearch_g = re.compile(r'''.*[Vv]ersionCode\s*=?\s*["']*([0-9]+)["']*''').search +vnsearch_g = re.compile(r'''.*[Vv]ersionName\s*=?\s*(["'])((?:(?=(\\?))\3.)*?)\1.*''').search +vnssearch_g = re.compile(r'''.*[Vv]ersionNameSuffix\s*=?\s*(["'])((?:(?=(\\?))\3.)*?)\1.*''').search +psearch_g = re.compile(r'''.*(packageName|applicationId)\s*=*\s*["']([^"']+)["'].*''').search +fsearch_g = re.compile(r'''.*(applicationIdSuffix)\s*=*\s*["']([^"']+)["'].*''').search def app_matches_packagename(app, package): @@ -2122,15 +1365,12 @@ def app_matches_packagename(app, package): def parse_androidmanifests(paths, app): - """Extract some information from the AndroidManifest.xml at the given path. - + """ + Extract some information from the AndroidManifest.xml at the given path. Returns (version, vercode, package), any or all of which might be None. All values returned are strings. - - Android Studio recommends "you use UTF-8 encoding whenever possible", so - this code assumes the files use UTF-8. - https://sites.google.com/a/android.com/tools/knownissues/encoding """ + ignoreversions = app.UpdateCheckIgnore ignoresearch = re.compile(ignoreversions).search if ignoreversions else None @@ -2141,20 +1381,9 @@ def parse_androidmanifests(paths, app): max_vercode = None max_package = None - def vnsearch(line): - matches = vnsearch_g(line) - if matches and not any( - matches.group(2).startswith(s) - for s in [ - '${', # Gradle variable names - '@string/', # Strings we could not resolve - ] - ): - return matches.group(2) - return None - for path in paths: - if not path.is_file(): + + if not os.path.isfile(path): continue logging.debug(_("Parsing manifest at '{path}'").format(path=path)) @@ -2162,17 +1391,16 @@ def parse_androidmanifests(paths, app): vercode = None package = None - flavors = None + flavour = None temp_app_id = None temp_version_name = None - if len(app.get('Builds', [])) > 0 and 'gradle' in app['Builds'][-1] and app['Builds'][-1].gradle: - flavors = app['Builds'][-1].gradle + if app.builds and 'gradle' in app.builds[-1] and app.builds[-1].gradle: + flavour = app.builds[-1].gradle[-1] - if path.suffix == '.gradle' or path.name.endswith('.gradle.kts'): - with open(path, 'r', encoding='utf-8') as f: - android_plugin_file = False - inside_flavor_group = 0 - inside_required_flavor = 0 + if has_extension(path, 'gradle'): + with open(path, 'r') as f: + inside_flavour_group = 0 + inside_required_flavour = 0 for line in f: if gradle_comment.match(line): continue @@ -2183,12 +1411,12 @@ def parse_androidmanifests(paths, app): temp_app_id = matches.group(2) if "versionName" in line and not temp_version_name: - matches = vnsearch(line) + matches = vnsearch_g(line) if matches: - temp_version_name = matches + temp_version_name = matches.group(2) - if inside_flavor_group > 0: - if inside_required_flavor > 1: + if inside_flavour_group > 0: + if inside_required_flavour > 0: matches = psearch_g(line) if matches: s = matches.group(2) @@ -2203,12 +1431,11 @@ def parse_androidmanifests(paths, app): if app_matches_packagename(app, temp_app_id): package = temp_app_id - matches = vnsearch(line) + matches = vnsearch_g(line) if matches: - version = matches - + version = matches.group(2) else: - # If build.gradle contains applicationNameSuffix add it to the end of versionName + # If build.gradle contains applicationNameSuffix add it to the end of version name matches = vnssearch_g(line) if matches and temp_version_name: name_suffix = matches.group(2) @@ -2216,31 +1443,23 @@ def parse_androidmanifests(paths, app): matches = vcsearch_g(line) if matches: - vercode = version_code_string_to_int(matches.group(1)) + vercode = matches.group(1) - if inside_required_flavor > 0: if '{' in line: - inside_required_flavor += 1 + inside_required_flavour += 1 if '}' in line: - inside_required_flavor -= 1 - if inside_required_flavor == 1: - inside_required_flavor -= 1 - elif flavors: - for flavor in flavors: - if re.match(r'.*[\'"\s]{flavor}[\'"\s].*\{{.*'.format(flavor=flavor), line): - inside_required_flavor = 2 - break - if re.match(r'.*[\'"\s]{flavor}[\'"\s].*'.format(flavor=flavor), line): - inside_required_flavor = 1 - break + inside_required_flavour -= 1 + else: + if flavour and (flavour in line): + inside_required_flavour = 1 if '{' in line: - inside_flavor_group += 1 + inside_flavour_group += 1 if '}' in line: - inside_flavor_group -= 1 + inside_flavour_group -= 1 else: if "productFlavors" in line: - inside_flavor_group = 1 + inside_flavour_group = 1 if not package: matches = psearch_g(line) if matches: @@ -2248,41 +1467,30 @@ def parse_androidmanifests(paths, app): if app_matches_packagename(app, s): package = s if not version: - matches = vnsearch(line) + matches = vnsearch_g(line) if matches: - version = matches + version = matches.group(2) if not vercode: matches = vcsearch_g(line) if matches: - vercode = version_code_string_to_int(matches.group(1)) - if not android_plugin_file and ANDROID_PLUGIN_REGEX.match(line): - android_plugin_file = True - if android_plugin_file: - if package: - max_package = package - if version: - max_version = version - if vercode: - max_vercode = vercode - if max_package and max_version and max_vercode: - break + vercode = matches.group(1) else: try: xml = parse_xml(path) - except (XMLElementTree.ParseError, ValueError): + if "package" in xml.attrib: + s = xml.attrib["package"] + if app_matches_packagename(app, s): + package = s + if XMLNS_ANDROID + "versionName" in xml.attrib: + version = xml.attrib[XMLNS_ANDROID + "versionName"] + base_dir = os.path.dirname(path) + version = retrieve_string_singleline(base_dir, version) + if XMLNS_ANDROID + "versionCode" in xml.attrib: + a = xml.attrib[XMLNS_ANDROID + "versionCode"] + if string_is_integer(a): + vercode = a + except Exception: logging.warning(_("Problem with xml at '{path}'").format(path=path)) - continue - if "package" in xml.attrib: - s = xml.attrib["package"] - if app_matches_packagename(app, s): - package = s - if XMLNS_ANDROID + "versionName" in xml.attrib: - version = xml.attrib[XMLNS_ANDROID + "versionName"] - base_dir = os.path.dirname(path) - version = retrieve_string_singleline(base_dir, version) - if XMLNS_ANDROID + "versionCode" in xml.attrib: - vercode = version_code_string_to_int( - xml.attrib[XMLNS_ANDROID + "versionCode"]) # Remember package name, may be defined separately from version+vercode if package is None: @@ -2291,8 +1499,8 @@ def parse_androidmanifests(paths, app): logging.debug("..got package={0}, version={1}, vercode={2}" .format(package, version, vercode)) - # Always grab the package name and versionName in case they are not - # together with the highest versionCode + # Always grab the package name and version name in case they are not + # together with the highest version code if max_package is None and package is not None: max_package = package if max_version is None and version is not None: @@ -2300,7 +1508,7 @@ def parse_androidmanifests(paths, app): if vercode is not None \ and (max_vercode is None or vercode > max_vercode): - if version and (not ignoresearch or not ignoresearch(version)): + if not ignoresearch or not ignoresearch(version): if version is not None: max_version = version if vercode is not None: @@ -2314,7 +1522,7 @@ def parse_androidmanifests(paths, app): max_version = "Unknown" if max_package: - msg = _("Invalid application ID {appid}").format(appid=max_package) + msg = _("Invalid package name {0}").format(max_package) if not is_valid_package_name(max_package): raise FDroidException(msg) elif not is_strict_application_id(max_package): @@ -2324,7 +1532,7 @@ def parse_androidmanifests(paths, app): def is_valid_package_name(name): - """Check whether name is a valid fdroid package name. + """Check whether name is a valid fdroid package name APKs and manually defined package names must use a valid Java Package Name. Automatically generated package names for non-APK @@ -2336,7 +1544,7 @@ def is_valid_package_name(name): def is_strict_application_id(name): - """Check whether name is a valid Android Application ID. + """Check whether name is a valid Android Application ID The Android ApplicationID is basically a Java Package Name, but with more restrictive naming rules: @@ -2345,8 +1553,6 @@ def is_strict_application_id(name): * Each segment must start with a letter. * All characters must be alphanumeric or an underscore [a-zA-Z0-9_]. - References - ---------- https://developer.android.com/studio/build/application-id """ @@ -2354,58 +1560,27 @@ def is_strict_application_id(name): and '.' in name -def parse_srclib_spec(spec): - - if type(spec) != str: - raise MetaDataException(_("can not parse scrlib spec " - "(not a string): '{}'") - .format(spec)) - - tokens = spec.split('@', 1) - if not tokens[0]: - raise MetaDataException( - _("could not parse srclib spec (no name specified): '{}'").format(spec) - ) - if len(tokens) < 2 or not tokens[1]: - raise MetaDataException( - _("could not parse srclib spec (no ref specified): '{}'").format(spec) - ) - - name = tokens[0] - ref = tokens[1] - number = None - subdir = None - - if ':' in name: - number, name = name.split(':', 1) - if '/' in name: - name, subdir = name.split('/', 1) - - return (name, ref, number, subdir) - - -def getsrclib(spec, srclib_dir, basepath=False, +def getsrclib(spec, srclib_dir, subdir=None, basepath=False, raw=False, prepare=True, preponly=False, refresh=True, build=None): """Get the specified source library. - Return the path to it. Normally this is the path to be used when + Returns the path to it. Normally this is the path to be used when referencing it, which may be a subdirectory of the actual project. If you want the base directory of the project, pass 'basepath=True'. - spec and srclib_dir are both strings, not pathlib.Path. """ number = None subdir = None - if not isinstance(spec, str): - spec = str(spec) - if not isinstance(srclib_dir, str): - spec = str(srclib_dir) if raw: name = spec ref = None else: - name, ref, number, subdir = parse_srclib_spec(spec) + name, ref = spec.split('@') + if ':' in name: + number, name = name.split(':', 1) + if '/' in name: + name, subdir = name.split('/', 1) if name not in fdroidserver.metadata.srclibs: raise VCSException('srclib ' + name + ' not found.') @@ -2415,7 +1590,7 @@ def getsrclib(spec, srclib_dir, basepath=False, sdir = os.path.join(srclib_dir, name) if not preponly: - vcs = getvcs(srclib["RepoType"], srclib["Repo"], sdir) + vcs = getvcs(srclib["Repo Type"], srclib["Repo"], sdir) vcs.srclib = (name, number, sdir) if ref: vcs.gotorevision(ref, refresh) @@ -2442,9 +1617,9 @@ def getsrclib(spec, srclib_dir, basepath=False, if prepare: if srclib["Prepare"]: - cmd = replace_config_vars("; ".join(srclib["Prepare"]), build) + cmd = replace_config_vars(srclib["Prepare"], build) - p = FDroidPopen(['bash', '-e', '-u', '-o', 'pipefail', '-x', '-c', '--', cmd], cwd=libdir) + p = FDroidPopen(['bash', '-x', '-c', '--', cmd], cwd=libdir) if p.returncode != 0: raise BuildException("Error running prepare command for srclib %s" % name, p.output) @@ -2459,31 +1634,21 @@ gradle_version_regex = re.compile(r"[^/]*'com\.android\.tools\.build:gradle:([^\ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=False, refresh=True): - """Prepare the source code for a particular build. + """ Prepare the source code for a particular build - Parameters - ---------- - vcs - the appropriate vcs object for the application - app - the application details from the metadata - build - the build details from the metadata - build_dir - the path to the build directory, usually 'build/app.id' - srclib_dir - the path to the source libraries directory, usually 'build/srclib' - extlib_dir - the path to the external libraries directory, usually 'build/extlib' + :param vcs: the appropriate vcs object for the application + :param app: the application details from the metadata + :param build: the build details from the metadata + :param build_dir: the path to the build directory, usually 'build/app.id' + :param srclib_dir: the path to the source libraries directory, usually 'build/srclib' + :param extlib_dir: the path to the external libraries directory, usually 'build/extlib' - Returns - ------- - root - is the root directory, which may be the same as 'build_dir' or may - be a subdirectory of it. - srclibpaths - is information on the srclibs being used + Returns the (root, srclibpaths) where: + :param root: is the root directory, which may be the same as 'build_dir' or may + be a subdirectory of it. + :param srclibpaths: is information on the srclibs being used """ + # Optionally, the actual app source can be in a subdirectory if build.subdir: root_dir = os.path.join(build_dir, build.subdir) @@ -2498,8 +1663,6 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver= if build.submodules: logging.info(_("Initialising submodules")) vcs.initsubmodules() - else: - vcs.deinitsubmodules() # Check that a subdir (if we're using one) exists. This has to happen # after the checkout, since it might not exist elsewhere @@ -2508,10 +1671,10 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver= # Run an init command if one is required if build.init: - cmd = replace_config_vars("; ".join(build.init), build) + cmd = replace_config_vars(build.init, build) logging.info("Running 'init' commands in %s" % root_dir) - p = FDroidPopen(['bash', '-e', '-u', '-o', 'pipefail', '-x', '-c', '--', cmd], cwd=root_dir) + p = FDroidPopen(['bash', '-x', '-c', '--', cmd], cwd=root_dir) if p.returncode != 0: raise BuildException("Error running init command for %s:%s" % (app.id, build.versionName), p.output) @@ -2532,7 +1695,7 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver= if build.srclibs: logging.info("Collecting source libraries") for lib in build.srclibs: - srclibpaths.append(getsrclib(lib, srclib_dir, preponly=onserver, + srclibpaths.append(getsrclib(lib, srclib_dir, build, preponly=onserver, refresh=refresh, build=build)) for name, number, libpath in srclibpaths: @@ -2584,52 +1747,44 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver= with open(path, 'w', encoding='iso-8859-1') as f: f.write(props) - flavors = [] + flavours = [] if build.build_method() == 'gradle': - flavors = build.gradle + flavours = build.gradle if build.target: n = build.target.split('-')[1] - build_gradle = os.path.join(root_dir, "build.gradle") - build_gradle_kts = build_gradle + ".kts" - if os.path.exists(build_gradle): - gradlefile = build_gradle - elif os.path.exists(build_gradle_kts): - gradlefile = build_gradle_kts - else: - raise BuildException("No gradle file found") regsub_file(r'compileSdkVersion[ =]+[0-9]+', r'compileSdkVersion %s' % n, - gradlefile) + os.path.join(root_dir, 'build.gradle')) # Remove forced debuggable flags remove_debuggable_flags(root_dir) - # Insert versionCode and number into the manifest if necessary + # Insert version code and number into the manifest if necessary if build.forceversion: - logging.info("Changing the versionName") - for path in manifest_paths(root_dir, flavors): + logging.info("Changing the version name") + for path in manifest_paths(root_dir, flavours): if not os.path.isfile(path): continue - if path.suffix == '.xml': + if has_extension(path, 'xml'): regsub_file(r'android:versionName="[^"]*"', r'android:versionName="%s"' % build.versionName, path) - elif path.suffix == '.gradle': + elif has_extension(path, 'gradle'): regsub_file(r"""(\s*)versionName[\s'"=]+.*""", r"""\1versionName '%s'""" % build.versionName, path) if build.forcevercode: - logging.info("Changing the versionCode") - for path in manifest_paths(root_dir, flavors): - if not path.is_file(): + logging.info("Changing the version code") + for path in manifest_paths(root_dir, flavours): + if not os.path.isfile(path): continue - if path.suffix == '.xml': + if has_extension(path, 'xml'): regsub_file(r'android:versionCode="[^"]*"', r'android:versionCode="%s"' % build.versionCode, path) - elif path.suffix == '.gradle': + elif has_extension(path, 'gradle'): regsub_file(r'versionCode[ =]+[0-9]+', r'versionCode %s' % build.versionCode, path) @@ -2665,25 +1820,19 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver= if not os.path.exists(libsrc): raise BuildException("Missing extlib file {0}".format(libsrc)) shutil.copyfile(libsrc, os.path.join(libsdir, libf)) - # Add extlibs to scanignore (this is relative to the build dir root, *sigh*) - if build.subdir: - scanignorepath = os.path.join(build.subdir, 'libs', libf) - else: - scanignorepath = os.path.join('libs', libf) - if scanignorepath not in build.scanignore: - build.scanignore.append(scanignorepath) # Run a pre-build command if one is required if build.prebuild: logging.info("Running 'prebuild' commands in %s" % root_dir) - cmd = replace_config_vars("; ".join(build.prebuild), build) + cmd = replace_config_vars(build.prebuild, build) # Substitute source library paths into prebuild commands for name, number, libpath in srclibpaths: - cmd = cmd.replace('$$' + name + '$$', os.path.join(os.getcwd(), libpath)) + libpath = os.path.relpath(libpath, root_dir) + cmd = cmd.replace('$$' + name + '$$', libpath) - p = FDroidPopen(['bash', '-e', '-u', '-o', 'pipefail', '-x', '-c', '--', cmd], cwd=root_dir) + p = FDroidPopen(['bash', '-x', '-c', '--', cmd], cwd=root_dir) if p.returncode != 0: raise BuildException("Error running prebuild command for %s:%s" % (app.id, build.versionName), p.output) @@ -2724,27 +1873,21 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver= def getpaths_map(build_dir, globpaths): - """Extend via globbing the paths from a field and return them as a map from original path to resulting paths.""" + """Extend via globbing the paths from a field and return them as a map from original path to resulting paths""" paths = dict() - not_found_paths = [] for p in globpaths: p = p.strip() full_path = os.path.join(build_dir, p) full_path = os.path.normpath(full_path) - paths[p] = [r[len(str(build_dir)) + 1:] for r in glob.glob(full_path)] + paths[p] = [r[len(build_dir) + 1:] for r in glob.glob(full_path)] if not paths[p]: - not_found_paths.append(p) - return paths, not_found_paths + raise FDroidException("glob path '%s' did not match any files/dirs" % p) + return paths def getpaths(build_dir, globpaths): - """Extend via globbing the paths from a field and return them as a set.""" - paths_map, not_found_paths = getpaths_map(build_dir, globpaths) - if not_found_paths: - raise FDroidException( - "Some glob paths did not match any files/dirs:\n" - + "\n".join(not_found_paths) - ) + """Extend via globbing the paths from a field and return them as a set""" + paths_map = getpaths_map(build_dir, globpaths) paths = set() for k, v in paths_map.items(): for p in v: @@ -2757,7 +1900,7 @@ def natural_key(s): def check_system_clock(dt_obj, path): - """Check if system clock is updated based on provided date. + """Check if system clock is updated based on provided date If an APK has files newer than the system time, suggest updating the system clock. This is useful for offline systems, used for @@ -2774,105 +1917,136 @@ def check_system_clock(dt_obj, path): class KnownApks: - """Permanent store of existing APKs with the date they were added. + """permanent store of existing APKs with the date they were added This is currently the only way to permanently store the "updated" date of APKs. """ def __init__(self): - """Load filename/date info about previously seen APKs. + '''Load filename/date info about previously seen APKs Since the appid and date strings both will never have spaces, this is parsed as a list from the end to allow the filename to have any combo of spaces. - """ + ''' + + self.path = os.path.join('stats', 'known_apks.txt') self.apks = {} - for part in ('repo', 'archive'): - path = os.path.join(part, 'index-v2.json') - if os.path.isfile(path): - with open(path, 'r', encoding='utf-8') as f: - index = json.load(f) - for appid, data in index["packages"].items(): - for version in data["versions"].values(): - filename = version["file"]["name"][1:] - date = datetime.fromtimestamp(version["added"] // 1000, tz=timezone.utc) - self.apks[filename] = date + if os.path.isfile(self.path): + with open(self.path, 'r') as f: + for line in f: + t = line.rstrip().split(' ') + if len(t) == 2: + self.apks[t[0]] = (t[1], None) + else: + appid = t[-2] + date = datetime.strptime(t[-1], '%Y-%m-%d') + filename = line[0:line.rfind(appid) - 1] + self.apks[filename] = (appid, date) + check_system_clock(date, self.path) + self.changed = False - def recordapk(self, apkName, default_date=None): - """ - Record an APK (if it's new, otherwise does nothing). + def writeifchanged(self): + if not self.changed: + return - Returns - ------- - datetime - the date it was added as a datetime instance. - """ + if not os.path.exists('stats'): + os.mkdir('stats') + + lst = [] + for apk, app in self.apks.items(): + appid, added = app + line = apk + ' ' + appid + if added: + line += ' ' + added.strftime('%Y-%m-%d') + lst.append(line) + + with open(self.path, 'w') as f: + for line in sorted(lst, key=natural_key): + f.write(line + '\n') + + def recordapk(self, apkName, app, default_date=None): + ''' + Record an apk (if it's new, otherwise does nothing) + Returns the date it was added as a datetime instance + ''' if apkName not in self.apks: if default_date is None: - default_date = datetime.now(timezone.utc) - self.apks[apkName] = default_date - return self.apks[apkName] + default_date = datetime.utcnow() + self.apks[apkName] = (app, default_date) + self.changed = True + _ignored, added = self.apks[apkName] + return added + + def getapp(self, apkname): + """Look up information - given the 'apkname', returns (app id, date added/None). + + Or returns None for an unknown apk. + """ + if apkname in self.apks: + return self.apks[apkname] + return None + + def getlatest(self, num): + """Get the most recent 'num' apps added to the repo, as a list of package ids with the most recent first""" + apps = {} + for apk, app in self.apks.items(): + appid, added = app + if added: + if appid in apps: + if apps[appid] > added: + apps[appid] = added + else: + apps[appid] = added + sortedapps = sorted(apps.items(), key=operator.itemgetter(1))[-num:] + lst = [app for app, _ignored in sortedapps] + lst.reverse() + return lst def get_file_extension(filename): - """Get the normalized file extension, can be blank string but never None.""" + """get the normalized file extension, can be blank string but never None""" if isinstance(filename, bytes): filename = filename.decode('utf-8') return os.path.splitext(filename)[1].lower()[1:] -def _androguard_logging_level(level=logging.ERROR): - """Tames androguard's default debug output. +def use_androguard(): + """Report if androguard is available, and config its debug logging""" - There should be no debug output when the functions are being used - via the API. Otherwise, the output is controlled by the --verbose - flag. - - To get coverage across the full range of androguard >= 3.3.5, this - includes all known logger names that are relevant. So some of - these names might not be present in the version of androguard - currently in use. - - """ - if options and options.verbose: - level = logging.WARNING - - for name in ( - 'androguard.apk', - 'androguard.axml', - 'androguard.core.api_specific_resources', - 'androguard.core.apk', - 'androguard.core.axml', - ): - logging.getLogger(name).setLevel(level) - - # some parts of androguard 4.x use loguru instead of logging try: - from loguru import logger - logger.remove() + import androguard + if use_androguard.show_path: + logging.debug(_('Using androguard from "{path}"').format(path=androguard.__file__)) + use_androguard.show_path = False + if options and options.verbose: + logging.getLogger("androguard.axml").setLevel(logging.INFO) + return True except ImportError: - pass + return False -def get_androguard_APK(apkfile, skip_analysis=False): +use_androguard.show_path = True + + +def _get_androguard_APK(apkfile): try: - # these were moved in androguard 4.0 - from androguard.core.apk import APK - except ImportError: from androguard.core.bytecodes.apk import APK - _androguard_logging_level() + except ImportError: + raise FDroidException("androguard library is not installed and aapt not present") - return APK(apkfile, skip_analysis=skip_analysis) + return APK(apkfile) def ensure_final_value(packageName, arsc, value): - """Ensure incoming value is always the value, not the resid. + """Ensure incoming value is always the value, not the resid androguard will sometimes return the Android "resId" aka Resource ID instead of the actual value. This checks whether the value is actually a resId, then performs the Android Resource lookup as needed. + """ if value: returnValue = value @@ -2887,29 +2061,20 @@ def ensure_final_value(packageName, arsc, value): return '' -def is_debuggable_or_testOnly(apkfile): - """Return True if the given file is an APK and is debuggable or testOnly. +def is_apk_and_debuggable_aapt(apkfile): + p = SdkToolsPopen(['aapt', 'dump', 'xmltree', apkfile, 'AndroidManifest.xml'], + output=False) + if p.returncode != 0: + raise FDroidException(_("Failed to get APK manifest information")) + for line in p.output.splitlines(): + if 'android:debuggable' in line and not line.endswith('0x0'): + return True + return False - These two settings should never be enabled in release builds. This - parses - from the APK and nothing else to run fast, since it is run on - every APK as part of update. - - Parameters - ---------- - apkfile - full path to the APK to check - - """ - if get_file_extension(apkfile) != 'apk': - return False - try: - # these were moved in androguard 4.0 - from androguard.core.axml import START_TAG, AXMLParser, format_value - except ImportError: - from androguard.core.bytecodes.axml import START_TAG, AXMLParser, format_value - _androguard_logging_level() +def is_apk_and_debuggable_androguard(apkfile): + """Parse only from the APK""" + from androguard.core.bytecodes.axml import AXMLParser, format_value, START_TAG with ZipFile(apkfile) as apk: with apk.open('AndroidManifest.xml') as manifest: axml = AXMLParser(manifest.read()) @@ -2918,7 +2083,7 @@ def is_debuggable_or_testOnly(apkfile): if _type == START_TAG and axml.getName() == 'application': for i in range(0, axml.getAttributeCount()): name = axml.getAttributeName(i) - if name in ('debuggable', 'testOnly'): + if name == 'debuggable': _type = axml.getAttributeValueType(i) _data = axml.getAttributeValueData(i) value = format_value(_type, _data, lambda _: axml.getAttributeValue(i)) @@ -2930,6 +2095,20 @@ def is_debuggable_or_testOnly(apkfile): return False +def is_apk_and_debuggable(apkfile): + """Returns True if the given file is an APK and is debuggable + + :param apkfile: full path to the apk to check""" + + if get_file_extension(apkfile) != 'apk': + return False + + if use_androguard(): + return is_apk_and_debuggable_androguard(apkfile) + else: + return is_apk_and_debuggable_aapt(apkfile) + + def get_apk_id(apkfile): """Extract identification information from APK. @@ -2938,30 +2117,23 @@ def get_apk_id(apkfile): APK, aapt still can. So aapt is also used as the final fallback method. - Parameters - ---------- - apkfile - path to an APK file. - - Returns - ------- - appid - versionCode - versionName + :param apkfile: path to an APK file. + :returns: triplet (appid, version code, version name) """ - try: - return get_apk_id_androguard(apkfile) - except zipfile.BadZipFile as e: - if config and 'aapt' in config: + if use_androguard(): + try: + return get_apk_id_androguard(apkfile) + except zipfile.BadZipFile as e: logging.error(apkfile + ': ' + str(e)) - return get_apk_id_aapt(apkfile) - else: - raise e + if 'aapt' in config: + return get_apk_id_aapt(apkfile) + else: + return get_apk_id_aapt(apkfile) def get_apk_id_androguard(apkfile): - """Read (appid, versionCode, versionName) from an APK. + """Read (appid, versionCode, versionName) from an APK This first tries to do quick binary XML parsing to just get the values that are needed. It will fallback to full androguard @@ -2969,34 +2141,12 @@ def get_apk_id_androguard(apkfile): versionName is set to a Android String Resource (e.g. an integer hex value that starts with @). - This function is part of androguard as get_apkid(), so this - vendored and modified to return versionCode as an integer. - """ if not os.path.exists(apkfile): raise FDroidException(_("Reading packageName/versionCode/versionName failed, APK invalid: '{apkfilename}'") .format(apkfilename=apkfile)) - try: - # these were moved in androguard 4.0 - from androguard.core.axml import ( - END_DOCUMENT, - END_TAG, - START_TAG, - TEXT, - AXMLParser, - format_value, - ) - except ImportError: - from androguard.core.bytecodes.axml import ( - END_DOCUMENT, - END_TAG, - START_TAG, - TEXT, - AXMLParser, - format_value, - ) - _androguard_logging_level() + from androguard.core.bytecodes.axml import AXMLParser, format_value, START_TAG, END_TAG, TEXT, END_DOCUMENT appid = None versionCode = None @@ -3018,20 +2168,20 @@ def get_apk_id_androguard(apkfile): appid = value elif versionCode is None and name == 'versionCode': if value.startswith('0x'): - versionCode = int(value, 16) + versionCode = str(int(value, 16)) else: - versionCode = int(value) + versionCode = value elif versionName is None and name == 'versionName': versionName = value if axml.getName() == 'manifest': break - elif _type in (END_TAG, TEXT, END_DOCUMENT): + elif _type == END_TAG or _type == TEXT or _type == END_DOCUMENT: raise RuntimeError('{path}: must be the first element in AndroidManifest.xml' .format(path=apkfile)) if not versionName or versionName[0] == '@': - a = get_androguard_APK(apkfile) + a = _get_androguard_APK(apkfile) versionName = ensure_final_value(a.package, a.get_android_resources(), a.get_androidversion_name()) if not versionName: versionName = '' # versionName is expected to always be a str @@ -3040,22 +2190,17 @@ def get_apk_id_androguard(apkfile): def get_apk_id_aapt(apkfile): - """Read (appid, versionCode, versionName) from an APK.""" p = SdkToolsPopen(['aapt', 'dump', 'badging', apkfile], output=False) m = APK_ID_TRIPLET_REGEX.match(p.output[0:p.output.index('\n')]) if m: - return m.group(1), int(m.group(2)), m.group(3) - raise FDroidException(_( - "Reading packageName/versionCode/versionName failed," - "APK invalid: '{apkfilename}'" - ).format(apkfilename=apkfile)) + return m.group(1), m.group(2), m.group(3) + raise FDroidException(_("Reading packageName/versionCode/versionName failed, APK invalid: '{apkfilename}'") + .format(apkfilename=apkfile)) def get_native_code(apkfile): - """Aapt checks if there are architecture folders under the lib/ folder. - - We are simulating the same behaviour. - """ + """aapt checks if there are architecture folders under the lib/ folder + so we are simulating the same behaviour""" arch_re = re.compile("^lib/(.*)/.*$") archset = set() with ZipFile(apkfile) as apk: @@ -3066,10 +2211,26 @@ def get_native_code(apkfile): return sorted(list(archset)) +def get_minSdkVersion_aapt(apkfile): + """Extract the minimum supported Android SDK from an APK using aapt + + :param apkfile: path to an APK file. + :returns: the integer representing the SDK version + """ + r = re.compile(r"^sdkVersion:'([0-9]+)'") + p = SdkToolsPopen(['aapt', 'dump', 'badging', apkfile], output=False) + for line in p.output.splitlines(): + m = r.match(line) + if m: + return int(m.group(1)) + raise FDroidException(_('Reading minSdkVersion failed: "{apkfilename}"') + .format(apkfilename=apkfile)) + + class PopenResult: - def __init__(self, returncode=None, output=None): - self.returncode = returncode - self.output = output + def __init__(self): + self.returncode = None + self.output = None def SdkToolsPopen(commands, cwd=None, output=True): @@ -3089,19 +2250,12 @@ def FDroidPopenBytes(commands, cwd=None, envs=None, output=True, stderr_to_stdou """ Run a command and capture the possibly huge output as bytes. - Parameters - ---------- - commands - command and argument list like in subprocess.Popen - cwd - optionally specifies a working directory - envs - a optional dictionary of environment variables and their values - - Returns - ------- - A PopenResult. + :param commands: command and argument list like in subprocess.Popen + :param cwd: optionally specifies a working directory + :param envs: a optional dictionary of environment variables and their values + :returns: A PopenResult. """ + global env if env is None: set_FDroidPopen_env() @@ -3124,7 +2278,7 @@ def FDroidPopenBytes(commands, cwd=None, envs=None, output=True, stderr_to_stdou stderr=stderr_param) except OSError as e: raise BuildException("OSError while trying to execute " - + ' '.join(commands) + ': ' + str(e)) from e + + ' '.join(commands) + ': ' + str(e)) # TODO are these AsynchronousFileReader threads always exiting? if not stderr_to_stdout and options.verbose: @@ -3147,7 +2301,7 @@ def FDroidPopenBytes(commands, cwd=None, envs=None, output=True, stderr_to_stdou while not stdout_reader.eof(): while not stdout_queue.empty(): line = stdout_queue.get() - if output and options and options.verbose: + if output and options.verbose: # Output directly to console sys.stderr.buffer.write(line) sys.stderr.flush() @@ -3171,18 +2325,10 @@ def FDroidPopen(commands, cwd=None, envs=None, output=True, stderr_to_stdout=Tru """ Run a command and capture the possibly huge output as a str. - Parameters - ---------- - commands - command and argument list like in subprocess.Popen - cwd - optionally specifies a working directory - envs - a optional dictionary of environment variables and their values - - Returns - ------- - A PopenResult. + :param commands: command and argument list like in subprocess.Popen + :param cwd: optionally specifies a working directory + :param envs: a optional dictionary of environment variables and their values + :returns: A PopenResult. """ result = FDroidPopenBytes(commands, cwd, envs, output, stderr_to_stdout) result.output = result.output.decode('utf-8', 'ignore') @@ -3192,21 +2338,17 @@ def FDroidPopen(commands, cwd=None, envs=None, output=True, stderr_to_stdout=Tru gradle_comment = re.compile(r'[ ]*//') gradle_signing_configs = re.compile(r'^[\t ]*signingConfigs[ \t]*{[ \t]*$') gradle_line_matches = [ - re.compile(r'^[\t ]*signingConfig\s*[= ]\s*[^ ]*$'), + re.compile(r'^[\t ]*signingConfig [^ ]*$'), re.compile(r'.*android\.signingConfigs\.[^{]*$'), - re.compile(r'.*release\.signingConfig *= *'), + re.compile(r'.*\.readLine\(.*'), ] def remove_signing_keys(build_dir): for root, dirs, files in os.walk(build_dir): - gradlefile = None if 'build.gradle' in files: - gradlefile = "build.gradle" - elif 'build.gradle.kts' in files: - gradlefile = "build.gradle.kts" - if gradlefile: - path = os.path.join(root, gradlefile) + path = os.path.join(root, 'build.gradle') + with open(path, "r") as o: lines = o.readlines() @@ -3244,7 +2386,7 @@ def remove_signing_keys(build_dir): o.write(line) if changed: - logging.info("Cleaned %s of keysigning configs at %s" % (gradlefile, path)) + logging.info("Cleaned build.gradle of keysigning configs at %s" % path) for propfile in [ 'project.properties', @@ -3271,28 +2413,23 @@ def remove_signing_keys(build_dir): logging.info("Cleaned %s of keysigning configs at %s" % (propfile, path)) -def set_FDroidPopen_env(app=None, build=None): - """Set up the environment variables for the build environment. +def set_FDroidPopen_env(build=None): + ''' + set up the environment variables for the build environment There is only a weak standard, the variables used by gradle, so also set up the most commonly used environment variables for SDK and NDK. Also, if there is no locale set, this will set the locale (e.g. LANG) to en_US.UTF-8. - - If an App instance is provided, then the SOURCE_DATE_EPOCH - environment variable will be set based on that app's source repo. - - """ + ''' global env, orig_path if env is None: env = os.environ orig_path = env['PATH'] - if config: - if config.get('sdk_path'): - for n in ['ANDROID_HOME', 'ANDROID_SDK', 'ANDROID_SDK_ROOT']: - env[n] = config['sdk_path'] - for k, v in config.get('java_paths', {}).items(): - env['JAVA%s_HOME' % k] = v + for n in ['ANDROID_HOME', 'ANDROID_SDK']: + env[n] = config['sdk_path'] + for k, v in config['java_paths'].items(): + env['JAVA%s_HOME' % k] = v missinglocale = True for k, v in env.items(): @@ -3303,12 +2440,10 @@ def set_FDroidPopen_env(app=None, build=None): if missinglocale: env['LANG'] = 'en_US.UTF-8' - if app: - env['SOURCE_DATE_EPOCH'] = get_source_date_epoch(get_build_dir(app)) if build is not None: path = build.ndk_path() paths = orig_path.split(os.pathsep) - if path and path not in paths: + if path not in paths: paths = [path] + paths env['PATH'] = os.pathsep.join(paths) for n in ['ANDROID_NDK', 'NDK', 'ANDROID_NDK_HOME']: @@ -3318,13 +2453,14 @@ def set_FDroidPopen_env(app=None, build=None): def replace_build_vars(cmd, build): cmd = cmd.replace('$$COMMIT$$', build.commit) cmd = cmd.replace('$$VERSION$$', build.versionName) - cmd = cmd.replace('$$VERCODE$$', str(build.versionCode)) + cmd = cmd.replace('$$VERCODE$$', build.versionCode) return cmd def replace_config_vars(cmd, build): cmd = cmd.replace('$$SDK$$', config['sdk_path']) cmd = cmd.replace('$$NDK$$', build.ndk_path()) + cmd = cmd.replace('$$MVN3$$', config['mvn3']) if build is not None: cmd = replace_build_vars(cmd, build) return cmd @@ -3362,170 +2498,99 @@ def signer_fingerprint_short(cert_encoded): Extracts the first 7 hexadecimal digits of sha256 signing-key fingerprint for a given pkcs7 signature. - Parameters - ---------- - cert_encoded - Contents of an APK signing certificate. - - Returns - ------- - shortened signing-key fingerprint. + :param cert_encoded: Contents of an APK signing certificate. + :returns: shortened signing-key fingerprint. """ return signer_fingerprint(cert_encoded)[:7] def signer_fingerprint(cert_encoded): - """Return SHA-256 signer fingerprint for PKCS#7 DER-encoded signature. + """Obtain sha256 signing-key fingerprint for pkcs7 DER certificate. - Parameters - ---------- - Contents of an APK signature. - - Returns - ------- - Standard SHA-256 signer fingerprint. + Extracts hexadecimal sha256 signing-key fingerprint string + for a given pkcs7 signature. + :param: Contents of an APK signature. + :returns: shortened signature fingerprint. """ return hashlib.sha256(cert_encoded).hexdigest() def get_first_signer_certificate(apkpath): - """Get the first signing certificate from the APK, DER-encoded. - - JAR and APK Signatures allow for multiple signers, though it is - rarely used, and this is poorly documented. So this method only - fetches the first certificate, and errors out if there are more. - - Starting with targetSdkVersion 30, APK v2 Signatures are required. - https://developer.android.com/about/versions/11/behavior-changes-11#minimum-signature-scheme - - When a APK v2+ signature is present, the JAR signature is not - verified. The verifier parses the signers from the v2+ signature - and does not seem to look at the JAR signature. - https://source.android.com/docs/security/features/apksigning/v2#apk-signature-scheme-v2-block - https://android.googlesource.com/platform/tools/apksig/+/refs/tags/android-13.0.0_r3/src/main/java/com/android/apksig/ApkVerifier.java#270 - - apksigner checks that the signers from all the APK signatures match: - https://android.googlesource.com/platform/tools/apksig/+/refs/tags/android-13.0.0_r3/src/main/java/com/android/apksig/ApkVerifier.java#383 - - apksigner verifies each signer's signature block file - .(RSA|DSA|EC) against the corresponding signature file .SF - https://android.googlesource.com/platform/tools/apksig/+/refs/tags/android-13.0.0_r3/src/main/java/com/android/apksig/internal/apk/v1/V1SchemeVerifier.java#280 - - NoOverwriteDict is a workaround for: - https://github.com/androguard/androguard/issues/1030 - - Lots more discusion here: - https://gitlab.com/fdroid/fdroidserver/-/issues/1128 - - """ - - class NoOverwriteDict(dict): - def __setitem__(self, k, v): - if k not in self: - super().__setitem__(k, v) - + """Get the first signing certificate from the APK, DER-encoded""" + certs = None cert_encoded = None - found_certs = [] - apkobject = get_androguard_APK(apkpath) - apkobject._v2_blocks = NoOverwriteDict() - certs_v3 = apkobject.get_certificates_der_v3() - if certs_v3: - cert_v3 = certs_v3[0] - found_certs.append(cert_v3) - if not cert_encoded: - logging.debug(_('Using APK Signature v3')) - cert_encoded = cert_v3 + with zipfile.ZipFile(apkpath, 'r') as apk: + cert_files = [n for n in apk.namelist() if SIGNATURE_BLOCK_FILE_REGEX.match(n)] + if len(cert_files) > 1: + logging.error(_("Found multiple JAR Signature Block Files in {path}").format(path=apkpath)) + return None + elif len(cert_files) == 1: + cert_encoded = get_certificate(apk.read(cert_files[0])) - certs_v2 = apkobject.get_certificates_der_v2() - if certs_v2: - cert_v2 = certs_v2[0] - found_certs.append(cert_v2) + if not cert_encoded and use_androguard(): + apkobject = _get_androguard_APK(apkpath) + certs = apkobject.get_certificates_der_v2() + if len(certs) > 0: + logging.info(_('Using APK Signature v2')) + cert_encoded = certs[0] if not cert_encoded: - logging.debug(_('Using APK Signature v2')) - cert_encoded = cert_v2 - - if get_min_sdk_version(apkobject) < 24 or ( - not (certs_v3 or certs_v2) and get_effective_target_sdk_version(apkobject) < 30 - ): - with zipfile.ZipFile(apkpath, 'r') as apk: - cert_files = [ - n for n in apk.namelist() if SIGNATURE_BLOCK_FILE_REGEX.match(n) - ] - if len(cert_files) > 1: - logging.error( - _("Found multiple JAR Signature Block Files in {path}").format( - path=apkpath - ) - ) - return - elif len(cert_files) == 1: - signature_block_file = cert_files[0] - signature_file = ( - cert_files[0][: signature_block_file.rindex('.')] + '.SF' - ) - cert_v1 = get_certificate( - apk.read(signature_block_file), - apk.read(signature_file), - ) - found_certs.append(cert_v1) - if not cert_encoded: - logging.debug(_('Using JAR Signature')) - cert_encoded = cert_v1 + certs = apkobject.get_certificates_der_v3() + if len(certs) > 0: + logging.info(_('Using APK Signature v3')) + cert_encoded = certs[0] if not cert_encoded: logging.error(_("No signing certificates found in {path}").format(path=apkpath)) - return - - if not all(cert == found_certs[0] for cert in found_certs): - logging.error( - _("APK signatures have different certificates in {path}:").format( - path=apkpath - ) - ) - return - + return None return cert_encoded def apk_signer_fingerprint(apk_path): - """Get SHA-256 fingerprint string for the first signer from given APK. + """Obtain sha256 signing-key fingerprint for APK. - Parameters - ---------- - apk_path - path to APK - - Returns - ------- - Standard SHA-256 signer fingerprint + Extracts hexadecimal sha256 signing-key fingerprint string + for a given APK. + :param apk_path: path to APK + :returns: signature fingerprint """ + cert_encoded = get_first_signer_certificate(apk_path) if not cert_encoded: return None return signer_fingerprint(cert_encoded) +def apk_signer_fingerprint_short(apk_path): + """Obtain shortened sha256 signing-key fingerprint for APK. + + Extracts the first 7 hexadecimal digits of sha256 signing-key fingerprint + for a given pkcs7 APK. + + :param apk_path: path to APK + :returns: shortened signing-key fingerprint + """ + return apk_signer_fingerprint(apk_path)[:7] + + def metadata_get_sigdir(appid, vercode=None): - """Get signature directory for app.""" + """Get signature directory for app""" if vercode: - return os.path.join('metadata', appid, 'signatures', str(vercode)) + return os.path.join('metadata', appid, 'signatures', vercode) else: return os.path.join('metadata', appid, 'signatures') def metadata_find_developer_signature(appid, vercode=None): - """Try to find the developer signature for given appid. + """Tires to find the developer signature for given appid. This picks the first signature file found in metadata an returns its signature. - Returns - ------- - sha256 signing key fingerprint of the developer signing key. - None in case no signature can not be found. - """ + :returns: sha256 signing key fingerprint of the developer signing key. + None in case no signature can not be found.""" + # fetch list of dirs for all versions of signatures appversigdirs = [] if vercode: @@ -3540,75 +2605,45 @@ def metadata_find_developer_signature(appid, vercode=None): appversigdirs.append(appversigdir) for sigdir in appversigdirs: - signature_block_files = ( - glob.glob(os.path.join(sigdir, '*.DSA')) - + glob.glob(os.path.join(sigdir, '*.EC')) - + glob.glob(os.path.join(sigdir, '*.RSA')) - ) - if len(signature_block_files) > 1: + sigs = glob.glob(os.path.join(sigdir, '*.DSA')) + \ + glob.glob(os.path.join(sigdir, '*.EC')) + \ + glob.glob(os.path.join(sigdir, '*.RSA')) + if len(sigs) > 1: raise FDroidException('ambiguous signatures, please make sure there is only one signature in \'{}\'. (The signature has to be the App maintainers signature for version of the APK.)'.format(sigdir)) - for signature_block_file in signature_block_files: - with open(signature_block_file, 'rb') as f: + for sig in sigs: + with open(sig, 'rb') as f: return signer_fingerprint(get_certificate(f.read())) return None def metadata_find_signing_files(appid, vercode): - """Get a list of signed manifests and signatures. + """Gets a list of singed manifests and signatures. - Parameters - ---------- - appid - app id string - vercode - app versionCode - - Returns - ------- - List - of 4-tuples for each signing key with following paths: - (signature_file, signature_block_file, manifest, v2_files), where v2_files - is either a (apk_signing_block_offset_file, apk_signing_block_file) pair or None - - References - ---------- - * https://docs.oracle.com/javase/tutorial/deployment/jar/intro.html - * https://source.android.com/security/apksigning/v2 - * https://source.android.com/security/apksigning/v3 + :param appid: app id string + :param vercode: app version code + :returns: a list of triplets for each signing key with following paths: + (signature_file, singed_file, manifest_file) """ ret = [] sigdir = metadata_get_sigdir(appid, vercode) - signature_block_files = ( - glob.glob(os.path.join(sigdir, '*.DSA')) - + glob.glob(os.path.join(sigdir, '*.EC')) - + glob.glob(os.path.join(sigdir, '*.RSA')) - ) - signature_block_pat = re.compile(r'(\.DSA|\.EC|\.RSA)$') - apk_signing_block = os.path.join(sigdir, "APKSigningBlock") - apk_signing_block_offset = os.path.join(sigdir, "APKSigningBlockOffset") - if os.path.isfile(apk_signing_block) and os.path.isfile(apk_signing_block_offset): - v2_files = apk_signing_block, apk_signing_block_offset - else: - v2_files = None - for signature_block_file in signature_block_files: - signature_file = signature_block_pat.sub('.SF', signature_block_file) - if os.path.isfile(signature_file): - manifest = os.path.join(sigdir, 'MANIFEST.MF') - if os.path.isfile(manifest): - ret.append((signature_block_file, signature_file, manifest, v2_files)) + sigs = glob.glob(os.path.join(sigdir, '*.DSA')) + \ + glob.glob(os.path.join(sigdir, '*.EC')) + \ + glob.glob(os.path.join(sigdir, '*.RSA')) + extre = re.compile(r'(\.DSA|\.EC|\.RSA)$') + for sig in sigs: + sf = extre.sub('.SF', sig) + if os.path.isfile(sf): + mf = os.path.join(sigdir, 'MANIFEST.MF') + if os.path.isfile(mf): + ret.append((sig, sf, mf)) return ret def metadata_find_developer_signing_files(appid, vercode): """Get developer signature files for specified app from metadata. - Returns - ------- - List - of 4-tuples for each signing key with following paths: - (signature_file, signature_block_file, manifest, v2_files), where v2_files - is either a (apk_signing_block_offset_file, apk_signing_block_file) pair or None - + :returns: A triplet of paths for signing files from metadata: + (signature_file, singed_file, manifest_file) """ allsigningfiles = metadata_find_signing_files(appid, vercode) if allsigningfiles and len(allsigningfiles) == 1: @@ -3617,47 +2652,12 @@ def metadata_find_developer_signing_files(appid, vercode): return None -class ClonedZipInfo(zipfile.ZipInfo): - """Hack to allow fully cloning ZipInfo instances. +def apk_strip_signatures(signed_apk, strip_manifest=False): + """Removes signatures from APK. - The zipfile library has some bugs that prevent it from fully - cloning ZipInfo entries. https://bugs.python.org/issue43547 - - """ - - def __init__(self, zinfo): - super().__init__() - self.original = zinfo - for k in self.__slots__: - try: - setattr(self, k, getattr(zinfo, k)) - except AttributeError: - pass - - def __getattribute__(self, name): - if name in ("date_time", "external_attr", "flag_bits"): - return getattr(self.original, name) - return object.__getattribute__(self, name) - - -def apk_has_v1_signatures(apkfile): - """Test whether an APK has v1 signature files.""" - with ZipFile(apkfile, 'r') as apk: - for info in apk.infolist(): - if APK_SIGNATURE_FILES.match(info.filename): - return True - return False - - -def apk_strip_v1_signatures(signed_apk, strip_manifest=False): - """Remove signatures from APK. - - Parameters - ---------- - signed_apk - path to APK file. - strip_manifest - when set to True also the manifest file will be removed from the APK. + :param signed_apk: path to apk file. + :param strip_manifest: when set to True also the manifest file will + be removed from the APK. """ with tempfile.TemporaryDirectory() as tmpdir: tmp_apk = os.path.join(tmpdir, 'tmp.apk') @@ -3669,256 +2669,215 @@ def apk_strip_v1_signatures(signed_apk, strip_manifest=False): if strip_manifest: if info.filename != 'META-INF/MANIFEST.MF': buf = in_apk.read(info.filename) - out_apk.writestr(ClonedZipInfo(info), buf) + out_apk.writestr(info, buf) else: buf = in_apk.read(info.filename) - out_apk.writestr(ClonedZipInfo(info), buf) + out_apk.writestr(info, buf) -def apk_implant_signatures(apkpath, outpath, manifest): - """Implant a signature from metadata into an APK. +def _zipalign(unsigned_apk, aligned_apk): + """run 'zipalign' using standard flags used by Gradle Android Plugin + + -p was added in build-tools-23.0.0 + + https://developer.android.com/studio/publish/app-signing#sign-manually + """ + p = SdkToolsPopen(['zipalign', '-v', '-p', '4', unsigned_apk, aligned_apk]) + if p.returncode != 0: + raise BuildException("Failed to align application") + + +def apk_implant_signatures(apkpath, signaturefile, signedfile, manifest): + """Implats a signature from metadata into an APK. Note: this changes there supplied APK in place. So copy it if you need the original to be preserved. - Parameters - ---------- - apkpath - location of the unsigned apk - outpath - location of the output apk - - References - ---------- - * https://docs.oracle.com/javase/tutorial/deployment/jar/intro.html - * https://source.android.com/security/apksigning/v2 - * https://source.android.com/security/apksigning/v3 - + :param apkpath: location of the apk """ - sigdir = os.path.dirname(manifest) # FIXME - apksigcopier.do_patch(sigdir, apkpath, outpath, v1_only=None, - exclude=apksigcopier.exclude_meta) + # get list of available signature files in metadata + with tempfile.TemporaryDirectory() as tmpdir: + apkwithnewsig = os.path.join(tmpdir, 'newsig.apk') + with ZipFile(apkpath, 'r') as in_apk: + with ZipFile(apkwithnewsig, 'w') as out_apk: + for sig_file in [signaturefile, signedfile, manifest]: + with open(sig_file, 'rb') as fp: + buf = fp.read() + info = zipfile.ZipInfo('META-INF/' + os.path.basename(sig_file)) + info.compress_type = zipfile.ZIP_DEFLATED + info.create_system = 0 # "Windows" aka "FAT", what Android SDK uses + out_apk.writestr(info, buf) + for info in in_apk.infolist(): + if not APK_SIGNATURE_FILES.match(info.filename): + if info.filename != 'META-INF/MANIFEST.MF': + buf = in_apk.read(info.filename) + out_apk.writestr(info, buf) + os.remove(apkpath) + _zipalign(apkwithnewsig, apkpath) -def apk_extract_signatures(apkpath, outdir): - """Extract a signature files from APK and puts them into target directory. - - Parameters - ---------- - apkpath - location of the apk - outdir - older where the extracted signature files will be stored - - References - ---------- - * https://docs.oracle.com/javase/tutorial/deployment/jar/intro.html - * https://source.android.com/security/apksigning/v2 - * https://source.android.com/security/apksigning/v3 +def apk_extract_signatures(apkpath, outdir, manifest=True): + """Extracts a signature files from APK and puts them into target directory. + :param apkpath: location of the apk + :param outdir: folder where the extracted signature files will be stored + :param manifest: (optionally) disable extracting manifest file """ - apksigcopier.do_extract(apkpath, outdir, v1_only=None) - - -def get_min_sdk_version(apk): - """Wrap the androguard function to always return an integer. - - Fall back to 1 if we can't get a valid minsdk version. - - Parameters - ---------- - apk - androguard APK object - - Returns - ------- - minSdkVersion: int - """ - try: - return int(apk.get_min_sdk_version()) - except TypeError: - return 1 - - -def get_effective_target_sdk_version(apk): - """Wrap the androguard function to always return an integer. - - Parameters - ---------- - apk - androguard APK object - - Returns - ------- - targetSdkVersion: int - """ - try: - return int(apk.get_effective_target_sdk_version()) - except TypeError: - return get_min_sdk_version(apk) - - -def get_apksigner_smartcardoptions(smartcardoptions): - if '-providerName' in smartcardoptions.copy(): - pos = smartcardoptions.index('-providerName') - # remove -providerName and it's argument - del smartcardoptions[pos] - del smartcardoptions[pos] - replacements = {'-storetype': '--ks-type', - '-providerClass': '--provider-class', - '-providerArg': '--provider-arg'} - return [replacements.get(n, n) for n in smartcardoptions] + with ZipFile(apkpath, 'r') as in_apk: + for f in in_apk.infolist(): + if APK_SIGNATURE_FILES.match(f.filename) or \ + (manifest and f.filename == 'META-INF/MANIFEST.MF'): + newpath = os.path.join(outdir, os.path.basename(f.filename)) + with open(newpath, 'wb') as out_file: + out_file.write(in_apk.read(f.filename)) def sign_apk(unsigned_path, signed_path, keyalias): - """Sign an unsigned APK, then save to a new file, deleting the unsigned. + """Sign and zipalign an unsigned APK, then save to a new file, deleting the unsigned - NONE is a Java keyword used to configure smartcards as the - keystore. Otherwise, the keystore is a local file. - https://docs.oracle.com/javase/7/docs/technotes/guides/security/p11guide.html#KeyToolJarSigner + android-18 (4.3) finally added support for reasonable hash + algorithms, like SHA-256, before then, the only options were MD5 + and SHA1 :-/ This aims to use SHA-256 when the APK does not target + older Android versions, and is therefore safe to do so. - When using smartcards, apksigner does not use the same options has - Java/keytool/jarsigner (-providerName, -providerClass, - -providerArg, -storetype). apksigner documents the options as - --ks-provider-class and --ks-provider-arg. Those seem to be - accepted but fail when actually making a signature with weird - internal exceptions. We use the options that actually work. From: - https://geoffreymetais.github.io/code/key-signing/#scripting + https://issuetracker.google.com/issues/36956587 + https://android-review.googlesource.com/c/platform/libcore/+/44491 """ - if config['keystore'] == 'NONE': - signing_args = get_apksigner_smartcardoptions(config['smartcardoptions']) + + if get_minSdkVersion_aapt(unsigned_path) < 18: + signature_algorithm = ['-sigalg', 'SHA1withRSA', '-digestalg', 'SHA1'] else: - signing_args = ['--key-pass', 'env:FDROID_KEY_PASS'] - apksigner = config.get('apksigner', '') - if not shutil.which(apksigner): - raise BuildException(_("apksigner not found, it's required for signing!")) - cmd = [apksigner, 'sign', - '--ks', config['keystore'], - '--ks-pass', 'env:FDROID_KEY_STORE_PASS'] - cmd += signing_args - cmd += ['--ks-key-alias', keyalias, - '--in', unsigned_path, - '--out', signed_path] - p = FDroidPopen(cmd, envs={ - 'FDROID_KEY_STORE_PASS': config['keystorepass'], - 'FDROID_KEY_PASS': config.get('keypass', "")}) + signature_algorithm = ['-sigalg', 'SHA256withRSA', '-digestalg', 'SHA-256'] + + p = FDroidPopen([config['jarsigner'], '-keystore', config['keystore'], + '-storepass:env', 'FDROID_KEY_STORE_PASS', + '-keypass:env', 'FDROID_KEY_PASS'] + + signature_algorithm + [unsigned_path, keyalias], + envs={ + 'FDROID_KEY_STORE_PASS': config['keystorepass'], + 'FDROID_KEY_PASS': config['keypass'], }) if p.returncode != 0: - if os.path.exists(signed_path): - os.remove(signed_path) raise BuildException(_("Failed to sign application"), p.output) + + _zipalign(unsigned_path, signed_path) os.remove(unsigned_path) -def verify_apks( - signed_apk, unsigned_apk, tmp_dir, v1_only=None, clean_up_verified=False -): - """Verify that two apks are the same. +def verify_apks(signed_apk, unsigned_apk, tmp_dir): + """Verify that two apks are the same One of the inputs is signed, the other is unsigned. The signature metadata - is transferred from the signed to the unsigned apk, and then apksigner is - used to verify that the signature from the signed APK is also valid for + is transferred from the signed to the unsigned apk, and then jarsigner is + used to verify that the signature from the signed apk is also varlid for the unsigned one. If the APK given as unsigned actually does have a signature, it will be stripped out and ignored. - Parameters - ---------- - signed_apk - Path to a signed APK file - unsigned_apk - Path to an unsigned APK file expected to match it - tmp_dir - Path to directory for temporary files - v1_only - True for v1-only signatures, False for v1 and v2 signatures, - or None for autodetection - clean_up_verified - Remove any files created here if the verification succeeded. - - Returns - ------- - None if the verification is successful, otherwise a string describing what went wrong. + :param signed_apk: Path to a signed apk file + :param unsigned_apk: Path to an unsigned apk file expected to match it + :param tmp_dir: Path to directory for temporary files + :returns: None if the verification is successful, otherwise a string + describing what went wrong. """ - if not verify_apk_signature(signed_apk): - logging.info('...NOT verified - {0}'.format(signed_apk)) - return 'verification of signed APK failed' if not os.path.isfile(signed_apk): return 'can not verify: file does not exists: {}'.format(signed_apk) + if not os.path.isfile(unsigned_apk): return 'can not verify: file does not exists: {}'.format(unsigned_apk) - tmp_apk = os.path.join(tmp_dir, 'sigcp_' + os.path.basename(unsigned_apk)) + with ZipFile(signed_apk, 'r') as signed: + meta_inf_files = ['META-INF/MANIFEST.MF'] + for f in signed.namelist(): + if APK_SIGNATURE_FILES.match(f): + meta_inf_files.append(f) + if len(meta_inf_files) < 3: + return "Signature files missing from {0}".format(signed_apk) - try: - apksigcopier.do_copy(signed_apk, unsigned_apk, tmp_apk, v1_only=v1_only, - exclude=apksigcopier.exclude_meta) - except apksigcopier.APKSigCopierError as e: - logging.info('...NOT verified - {0}'.format(tmp_apk)) - error = 'signature copying failed: {}'.format(str(e)) - result = compare_apks(signed_apk, unsigned_apk, tmp_dir, - os.path.dirname(unsigned_apk)) - if result is not None: - error += '\nComparing reference APK to unsigned APK...\n' + result - return error + tmp_apk = os.path.join(tmp_dir, 'sigcp_' + os.path.basename(unsigned_apk)) + with ZipFile(unsigned_apk, 'r') as unsigned: + # only read the signature from the signed APK, everything else from unsigned + with ZipFile(tmp_apk, 'w') as tmp: + for filename in meta_inf_files: + tmp.writestr(signed.getinfo(filename), signed.read(filename)) + for info in unsigned.infolist(): + if info.filename in meta_inf_files: + logging.warning('Ignoring %s from %s', info.filename, unsigned_apk) + continue + if info.filename in tmp.namelist(): + return "duplicate filename found: " + info.filename + tmp.writestr(info, unsigned.read(info.filename)) - if not verify_apk_signature(tmp_apk): - logging.info('...NOT verified - {0}'.format(tmp_apk)) - error = 'verification of APK with copied signature failed' - result = compare_apks(signed_apk, tmp_apk, tmp_dir, - os.path.dirname(unsigned_apk)) - if result is not None: - error += '\nComparing reference APK to APK with copied signature...\n' + result - return error - if clean_up_verified and os.path.exists(tmp_apk): - logging.info(f"...cleaned up {tmp_apk} after successful verification") - os.remove(tmp_apk) + verified = verify_apk_signature(tmp_apk) - logging.info('...successfully verified') + if not verified: + logging.info("...NOT verified - {0}".format(tmp_apk)) + return compare_apks(signed_apk, tmp_apk, tmp_dir, + os.path.dirname(unsigned_apk)) + + logging.info("...successfully verified") return None def verify_jar_signature(jar): - """Verify the signature of a given JAR file. + """Verifies the signature of a given JAR file. jarsigner is very shitty: unsigned JARs pass as "verified"! So this has to turn on -strict then check for result 4, since this does not expect the signature to be from a CA-signed certificate. - Raises - ------ - VerificationException - If the JAR's signature could not be verified. + :raises: VerificationException() if the JAR's signature could not be verified """ + error = _('JAR signature failed to verify: {path}').format(path=jar) try: - output = subprocess.check_output( - [config['jarsigner'], '-strict', '-verify', jar], stderr=subprocess.STDOUT - ) + output = subprocess.check_output([config['jarsigner'], '-strict', '-verify', jar], + stderr=subprocess.STDOUT) raise VerificationException(error + '\n' + output.decode('utf-8')) except subprocess.CalledProcessError as e: if e.returncode == 4: logging.debug(_('JAR signature verified: {path}').format(path=jar)) else: - raise VerificationException(error + '\n' + e.output.decode('utf-8')) from e + raise VerificationException(error + '\n' + e.output.decode('utf-8')) -def verify_deprecated_jar_signature(jar): - """Verify the signature of a given JAR file, allowing deprecated algorithms. +def verify_apk_signature(apk, min_sdk_version=None): + """verify the signature on an APK - index.jar (v0) and index-v1.jar are both signed by MD5/SHA1 by - definition, so this method provides a way to verify those. Also, - apksigner has different deprecation rules than jarsigner, so this - is our current hack to try to represent the apksigner rules when - executing jarsigner. + Try to use apksigner whenever possible since jarsigner is very + shitty: unsigned APKs pass as "verified"! Warning, this does + not work on JARs with apksigner >= 0.7 (build-tools 26.0.1) - jarsigner is very shitty: unsigned JARs pass as "verified"! So - this has to turn on -strict then check for result 4, since this - does not expect the signature to be from a CA-signed certificate. + :returns: boolean whether the APK was verified + """ + if set_command_in_config('apksigner'): + args = [config['apksigner'], 'verify'] + if min_sdk_version: + args += ['--min-sdk-version=' + min_sdk_version] + if options.verbose: + args += ['--verbose'] + try: + output = subprocess.check_output(args + [apk]) + if options.verbose: + logging.debug(apk + ': ' + output.decode('utf-8')) + return True + except subprocess.CalledProcessError as e: + logging.error('\n' + apk + ': ' + e.output.decode('utf-8')) + else: + if not config.get('jarsigner_warning_displayed'): + config['jarsigner_warning_displayed'] = True + logging.warning(_("Using Java's jarsigner, not recommended for verifying APKs! Use apksigner")) + try: + verify_jar_signature(apk) + return True + except Exception as e: + logging.error(e) + return False - Also used to verify the signature on an archived APK, supporting deprecated - algorithms. + +def verify_old_apk_signature(apk): + """verify the signature on an archived APK, supporting deprecated algorithms F-Droid aims to keep every single binary that it ever published. Therefore, it needs to be able to verify APK signatures that include deprecated/removed @@ -3931,68 +2890,37 @@ def verify_deprecated_jar_signature(jar): file permissions while in use. That should prevent a bad actor from changing the settings during operation. - Raises - ------ - VerificationException - If the JAR's signature could not be verified. + :returns: boolean whether the APK was verified """ - error = _('JAR signature failed to verify: {path}').format(path=jar) - with tempfile.TemporaryDirectory() as tmpdir: - java_security = os.path.join(tmpdir, 'java.security') - with open(java_security, 'w') as fp: - fp.write('jdk.jar.disabledAlgorithms=MD2, RSA keySize < 1024') - os.chmod(java_security, 0o400) - try: - cmd = [ - config['jarsigner'], - '-J-Djava.security.properties=' + java_security, - '-strict', '-verify', jar - ] - output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) - raise VerificationException(error + '\n' + output.decode('utf-8')) - except subprocess.CalledProcessError as e: - if e.returncode == 4: - logging.debug(_('JAR signature verified: {path}').format(path=jar)) - else: - raise VerificationException(error + '\n' + e.output.decode('utf-8')) from e + _java_security = os.path.join(os.getcwd(), '.java.security') + if os.path.exists(_java_security): + os.remove(_java_security) + with open(_java_security, 'w') as fp: + fp.write('jdk.jar.disabledAlgorithms=MD2, RSA keySize < 1024') + os.chmod(_java_security, 0o400) - -def verify_apk_signature(apk, min_sdk_version=None): - """Verify the signature on an APK. - - Try to use apksigner whenever possible since jarsigner is very - shitty: unsigned APKs pass as "verified"! Warning, this does - not work on JARs with apksigner >= 0.7 (build-tools 26.0.1) - - Returns - ------- - Boolean - whether the APK was verified - """ - if set_command_in_config('apksigner'): - args = [config['apksigner'], 'verify'] - if min_sdk_version: - args += ['--min-sdk-version=' + min_sdk_version] - if options and options.verbose: - args += ['--verbose'] - try: - output = subprocess.check_output(args + [apk]) - if options and options.verbose: - logging.debug(apk + ': ' + output.decode('utf-8')) + try: + cmd = [ + config['jarsigner'], + '-J-Djava.security.properties=' + _java_security, + '-strict', '-verify', apk + ] + output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as e: + if e.returncode != 4: + output = e.output + else: + logging.debug(_('JAR signature verified: {path}').format(path=apk)) return True - except subprocess.CalledProcessError as e: - logging.error('\n' + apk + ': ' + e.output.decode('utf-8')) - else: - if not config.get('jarsigner_warning_displayed'): - config['jarsigner_warning_displayed'] = True - logging.warning(_("Using Java's jarsigner, not recommended for verifying APKs! Use apksigner")) - try: - verify_deprecated_jar_signature(apk) - return True - except Exception as e: - logging.error(e) + finally: + if os.path.exists(_java_security): + os.chmod(_java_security, 0o600) + os.remove(_java_security) + + logging.error(_('Old APK signature failed to verify: {path}').format(path=apk) + + '\n' + output.decode('utf-8')) return False @@ -4000,14 +2928,13 @@ apk_badchars = re.compile('''[/ :;'"]''') def compare_apks(apk1, apk2, tmp_dir, log_dir=None): - """Compare two apks. + """Compare two apks - Returns - ------- - None if the APK content is the same (apart from the signing key), + Returns None if the apk content is the same (apart from the signing key), otherwise a string describing what's different, or what went wrong when trying to do the comparison. """ + if not log_dir: log_dir = tmp_dir @@ -4022,7 +2949,7 @@ def compare_apks(apk1, apk2, tmp_dir, log_dir=None): '--max-report-size', '12345678', '--max-diff-block-lines', '128', '--html', htmlfile, '--text', textfile, absapk1, absapk2]) != 0: - return "Failed to run diffoscope " + apk1 + return("Failed to run diffoscope " + apk1) apk1dir = os.path.join(tmp_dir, apk_badchars.sub('_', apk1[0:-4])) # trim .apk apk2dir = os.path.join(tmp_dir, apk_badchars.sub('_', apk2[0:-4])) # trim .apk @@ -4039,21 +2966,19 @@ def compare_apks(apk1, apk2, tmp_dir, log_dir=None): f.extractall(path=os.path.join(apk2dir, 'content')) if set_command_in_config('apktool'): - if subprocess.call( - [config['apktool'], 'd', absapk1, '--output', 'apktool'], cwd=apk1dir - ): - return "Failed to run apktool " + apk1 - if subprocess.call( - [config['apktool'], 'd', absapk2, '--output', 'apktool'], cwd=apk2dir - ): - return "Failed to run apktool " + apk2 + if subprocess.call([config['apktool'], 'd', absapk1, '--output', 'apktool'], + cwd=apk1dir) != 0: + return("Failed to run apktool " + apk1) + if subprocess.call([config['apktool'], 'd', absapk2, '--output', 'apktool'], + cwd=apk2dir) != 0: + return("Failed to run apktool " + apk2) p = FDroidPopen(['diff', '-r', apk1dir, apk2dir], output=False) lines = p.output.splitlines() if len(lines) != 1 or 'META-INF' not in lines[0]: if set_command_in_config('meld'): p = FDroidPopen([config['meld'], apk1dir, apk2dir], output=False) - return "Unexpected diff output:\n" + p.output.replace("\r", "^M") + return("Unexpected diff output:\n" + p.output) # since everything verifies, delete the comparison to keep cruft down shutil.rmtree(apk1dir) @@ -4064,12 +2989,11 @@ def compare_apks(apk1, apk2, tmp_dir, log_dir=None): def set_command_in_config(command): - """Try to find specified command in the path, if it hasn't been manually set in config.yml. - - If found, it is added to the config + '''Try to find specified command in the path, if it hasn't been + manually set in config.py. If found, it is added to the config dict. The return value says whether the command is available. - """ + ''' if command in config: return True else: @@ -4081,7 +3005,8 @@ def set_command_in_config(command): def find_command(command): - """Find the full path of a command, or None if it can't be found in the PATH.""" + '''find the full path of a command, or None if it can't be found in the PATH''' + def is_exe(fpath): return os.path.isfile(fpath) and os.access(fpath, os.X_OK) @@ -4100,7 +3025,7 @@ def find_command(command): def genpassword(): - """Generate a random password for when generating keys.""" + '''generate a random password for when generating keys''' h = hashlib.sha256() h.update(os.urandom(16)) # salt h.update(socket.getfqdn().encode('utf-8')) @@ -4109,15 +3034,9 @@ def genpassword(): def genkeystore(localconfig): - """Generate a new key with password provided in localconfig and add it to new keystore. - - Parameters - ---------- - localconfig - - Returns - ------- - hexed public key, public key fingerprint + """ + Generate a new key with password provided in :param localconfig and add it to new keystore + :return: hexed public key, public key fingerprint """ logging.info('Generating a new key in "' + localconfig['keystore'] + '"...') keystoredir = os.path.dirname(localconfig['keystore']) @@ -4128,34 +3047,26 @@ def genkeystore(localconfig): env_vars = {'LC_ALL': 'C.UTF-8', 'FDROID_KEY_STORE_PASS': localconfig['keystorepass'], - 'FDROID_KEY_PASS': localconfig.get('keypass', "")} - - cmd = [config['keytool'], '-genkey', - '-keystore', localconfig['keystore'], - '-alias', localconfig['repo_keyalias'], - '-keyalg', 'RSA', '-keysize', '4096', - '-sigalg', 'SHA256withRSA', - '-validity', '10000', - '-storetype', 'pkcs12', - '-storepass:env', 'FDROID_KEY_STORE_PASS', - '-dname', localconfig['keydname'], - '-J-Duser.language=en'] - if localconfig['keystore'] == "NONE": - cmd += localconfig['smartcardoptions'] - else: - cmd += '-keypass:env', 'FDROID_KEY_PASS' - p = FDroidPopen(cmd, envs=env_vars) + 'FDROID_KEY_PASS': localconfig['keypass']} + p = FDroidPopen([config['keytool'], '-genkey', + '-keystore', localconfig['keystore'], + '-alias', localconfig['repo_keyalias'], + '-keyalg', 'RSA', '-keysize', '4096', + '-sigalg', 'SHA256withRSA', + '-validity', '10000', + '-storepass:env', 'FDROID_KEY_STORE_PASS', + '-keypass:env', 'FDROID_KEY_PASS', + '-dname', localconfig['keydname'], + '-J-Duser.language=en'], envs=env_vars) if p.returncode != 0: raise BuildException("Failed to generate key", p.output) - if localconfig['keystore'] != "NONE": - os.chmod(localconfig['keystore'], 0o0600) + os.chmod(localconfig['keystore'], 0o0600) if not options.quiet: # now show the lovely key that was just generated p = FDroidPopen([config['keytool'], '-list', '-v', '-keystore', localconfig['keystore'], '-alias', localconfig['repo_keyalias'], - '-storepass:env', 'FDROID_KEY_STORE_PASS', '-J-Duser.language=en'] - + config['smartcardoptions'], envs=env_vars) + '-storepass:env', 'FDROID_KEY_STORE_PASS', '-J-Duser.language=en'], envs=env_vars) logging.info(p.output.strip() + '\n\n') # get the public key p = FDroidPopenBytes([config['keytool'], '-exportcert', @@ -4172,238 +3083,119 @@ def genkeystore(localconfig): def get_cert_fingerprint(pubkey): - """Generate a certificate fingerprint the same way keytool does it (but with slightly different formatting).""" + """ + Generate a certificate fingerprint the same way keytool does it + (but with slightly different formatting) + """ digest = hashlib.sha256(pubkey).digest() ret = [' '.join("%02X" % b for b in bytearray(digest))] return " ".join(ret) -def get_certificate(signature_block_file, signature_file=None): - """Extract a single DER certificate from JAR Signature's "Signature Block File". +def get_certificate(signature_block_file): + """Extracts a DER certificate from JAR Signature's "Signature Block File". - If there is more than one signer certificate, this exits with an - error, unless the signature_file is provided. If that is set, it - will return the certificate that matches the Signature File, for - example, if there is a certificate chain, like TLS does. In the - fdroidserver use cases, there should always be a single signer. - But rarely, some APKs include certificate chains. + :param signature_block_file: file bytes (as string) representing the + certificate, as read directly out of the APK/ZIP - This could be replaced by androguard's APK.get_certificate_der() - provided the cert chain fix was merged there. Maybe in 4.1.2? - https://github.com/androguard/androguard/pull/1038 - - https://docs.oracle.com/en/java/javase/21/docs/specs/man/jarsigner.html#the-signed-jar-file - - Parameters - ---------- - signature_block_file - Bytes representing the PKCS#7 signer certificate and - signature, as read directly out of the JAR/APK, e.g. CERT.RSA. - - signature_file - Bytes representing the manifest signed by the Signature Block - File, e.g. CERT.SF. If this is not given, the assumption is - there will be only a single certificate in - signature_block_file, otherwise it is an error. - - Returns - ------- - A binary representation of the certificate's public key, + :return: A binary representation of the certificate's public key, or None in case of error """ - pkcs7obj = cms.ContentInfo.load(signature_block_file) - certificates = pkcs7obj['content']['certificates'] - if len(certificates) == 1: - return certificates[0].chosen.dump() - elif not signature_file: - logging.error(_('Found multiple Signer Certificates!')) - return - certificate = get_jar_signer_certificate(pkcs7obj, signature_file) - if certificate: - return certificate.chosen.dump() + content = decoder.decode(signature_block_file, asn1Spec=rfc2315.ContentInfo())[0] + if content.getComponentByName('contentType') != rfc2315.signedData: + return None + content = decoder.decode(content.getComponentByName('content'), + asn1Spec=rfc2315.SignedData())[0] + try: + certificates = content.getComponentByName('certificates') + cert = certificates[0].getComponentByName('certificate') + except PyAsn1Error: + logging.error("Certificates not found.") + return None + return encoder.encode(cert) -def _find_matching_certificate(signer_info, certificate): - """Find the certificates that matches signer_info using issuer and serial number. - - https://android.googlesource.com/platform/tools/apksig/+/refs/tags/android-13.0.0_r3/src/main/java/com/android/apksig/internal/apk/v1/V1SchemeVerifier.java#590 - https://android.googlesource.com/platform/tools/apksig/+/refs/tags/android-13.0.0_r3/src/main/java/com/android/apksig/internal/x509/Certificate.java#55 +def load_stats_fdroid_signing_key_fingerprints(): + """Load list of signing-key fingerprints stored by fdroid publish from file. + :returns: list of dictionanryies containing the singing-key fingerprints. """ - certificate_serial = certificate.chosen['tbs_certificate']['serial_number'] - expected_issuer_serial = signer_info['sid'].chosen - return ( - expected_issuer_serial['issuer'] == certificate.chosen.issuer - and expected_issuer_serial['serial_number'] == certificate_serial - ) - - -def get_jar_signer_certificate(pkcs7obj: cms.ContentInfo, signature_file: bytes): - """Return the one certificate in a chain that actually signed the manifest. - - PKCS#7-signed data can include certificate chains for use cases - where an Certificate Authority (CA) is used. Android does not - validate the certificate chain on APK signatures, so neither does - this. - https://android.googlesource.com/platform/tools/apksig/+/refs/tags/android-13.0.0_r3/src/main/java/com/android/apksig/internal/apk/v1/V1SchemeVerifier.java#512 - - Some useful fodder for understanding all this: - https://docs.oracle.com/javase/tutorial/deployment/jar/intro.html - https://technotes.shemyak.com/posts/jar-signature-block-file-format/ - https://docs.oracle.com/en/java/javase/21/docs/specs/man/jarsigner.html#the-signed-jar-file - https://qistoph.blogspot.com/2012/01/manual-verify-pkcs7-signed-data-with.html - - """ - import oscrypto.asymmetric - import oscrypto.errors - - # Android attempts to verify all SignerInfos and then picks the first verified SignerInfo. - first_verified_signer_info = None - first_verified_signer_info_signing_certificate = None - for signer_info in pkcs7obj['content']['signer_infos']: - signature = signer_info['signature'].contents - digest_algorithm = signer_info["digest_algorithm"]["algorithm"].native - public_key = None - for certificate in pkcs7obj['content']['certificates']: - if _find_matching_certificate(signer_info, certificate): - public_key = oscrypto.asymmetric.load_public_key(certificate.chosen.public_key) - break - if public_key is None: - logging.info('No certificate found that matches signer info!') - continue - - signature_algo = signer_info['signature_algorithm'].signature_algo - if signature_algo == 'rsassa_pkcs1v15': - # ASN.1 - 1.2.840.113549.1.1.1 - verify_func = oscrypto.asymmetric.rsa_pkcs1v15_verify - elif signature_algo == 'rsassa_pss': - # ASN.1 - 1.2.840.113549.1.1.10 - verify_func = oscrypto.asymmetric.rsa_pss_verify - elif signature_algo == 'dsa': - # ASN.1 - 1.2.840.10040.4.1 - verify_func = oscrypto.asymmetric.dsa_verify - elif signature_algo == 'ecdsa': - # ASN.1 - 1.2.840.10045.4 - verify_func = oscrypto.asymmetric.ecdsa_verify - else: - logging.error( - 'Unknown signature algorithm %s:\n %s\n %s' - % ( - signature_algo, - hexlify(certificate.chosen.sha256).decode(), - certificate.chosen.subject.human_friendly, - ), - ) - return - - try: - verify_func(public_key, signature, signature_file, digest_algorithm) - if not first_verified_signer_info: - first_verified_signer_info = signer_info - first_verified_signer_info_signing_certificate = certificate - - except oscrypto.errors.SignatureError as e: - logging.error( - '"%s", skipping:\n %s\n %s' % ( - e, - hexlify(certificate.chosen.sha256).decode(), - certificate.chosen.subject.human_friendly), - ) - - if first_verified_signer_info_signing_certificate: - return first_verified_signer_info_signing_certificate - - -def load_publish_signer_fingerprints(): - """Load signing-key fingerprints stored in file generated by fdroid publish. - - Returns - ------- - dict - containing the signing-key fingerprints. - """ - jar_file = os.path.join('repo', 'signer-index.jar') + jar_file = os.path.join('stats', 'publishsigkeys.jar') if not os.path.isfile(jar_file): return {} - try: - verify_deprecated_jar_signature(jar_file) - except VerificationException as e: + cmd = [config['jarsigner'], '-strict', '-verify', jar_file] + p = FDroidPopen(cmd, output=False) + if p.returncode != 4: raise FDroidException("Signature validation of '{}' failed! " - "Please run publish again to rebuild this file.".format(jar_file)) from e + "Please run publish again to rebuild this file.".format(jar_file)) jar_sigkey = apk_signer_fingerprint(jar_file) repo_key_sig = config.get('repo_key_sha256') if repo_key_sig: if jar_sigkey != repo_key_sig: - raise FDroidException("Signature key fingerprint of file '{}' does not match repo_key_sha256 in config.yml (found fingerprint: '{}')".format(jar_file, jar_sigkey)) + raise FDroidException("Signature key fingerprint of file '{}' does not match repo_key_sha256 in config.py (found fingerprint: '{}')".format(jar_file, jar_sigkey)) else: - logging.warning("repo_key_sha256 not in config.yml, setting it to the signature key fingerprint of '{}'".format(jar_file)) + logging.warning("repo_key_sha256 not in config.py, setting it to the signature key fingerprint of '{}'".format(jar_file)) config['repo_key_sha256'] = jar_sigkey write_to_config(config, 'repo_key_sha256') with zipfile.ZipFile(jar_file, 'r') as f: - return json.loads(str(f.read('signer-index.json'), 'utf-8')) + return json.loads(str(f.read('publishsigkeys.json'), 'utf-8')) -def write_config_file(config): - """Write the provided string to config.yml with the right path and encoding.""" - Path(CONFIG_FILE).write_text(config, encoding='utf-8') - - -def write_to_config(thisconfig, key, value=None): - """Write a key/value to the local config.yml. - - The config.yml is defined as YAML 1.2 in UTF-8 encoding on all - platforms. +def write_to_config(thisconfig, key, value=None, config_file=None): + '''write a key/value to the local config.py NOTE: only supports writing string variables. - Parameters - ---------- - thisconfig - config dictionary - key - variable name in config to be overwritten/added - value - optional value to be written, instead of fetched + :param thisconfig: config dictionary + :param key: variable name in config.py to be overwritten/added + :param value: optional value to be written, instead of fetched from 'thisconfig' dictionary. - - """ + ''' if value is None: origkey = key + '_orig' value = thisconfig[origkey] if origkey in thisconfig else thisconfig[key] + cfg = config_file if config_file else 'config.py' # load config file, create one if it doesn't exist - if not os.path.exists(CONFIG_FILE): - write_config_file('') - logging.info(_("Creating empty {config_file}").format(config_file=CONFIG_FILE)) - with open(CONFIG_FILE) as fp: - lines = fp.readlines() + if not os.path.exists(cfg): + open(cfg, 'a').close() + logging.info("Creating empty " + cfg) + with open(cfg, 'r') as f: + lines = f.readlines() # make sure the file ends with a carraige return if len(lines) > 0: if not lines[-1].endswith('\n'): lines[-1] += '\n' - pattern = re.compile(r'^[\s#]*' + key + r':.*\n') - repl = config_dump({key: value}) + # regex for finding and replacing python string variable + # definitions/initializations + pattern = re.compile(r'^[\s#]*' + key + r'\s*=\s*"[^"]*"') + repl = key + ' = "' + value + '"' + pattern2 = re.compile(r'^[\s#]*' + key + r"\s*=\s*'[^']*'") + repl2 = key + " = '" + value + "'" # If we replaced this line once, we make sure won't be a # second instance of this line for this key in the document. didRepl = False # edit config file - with open(CONFIG_FILE, 'w', encoding='utf-8') as f: + with open(cfg, 'w') as f: for line in lines: - if pattern.match(line): + if pattern.match(line) or pattern2.match(line): if not didRepl: line = pattern.sub(repl, line) + line = pattern2.sub(repl2, line) f.write(line) didRepl = True else: f.write(line) if not didRepl: + f.write('\n') f.write(repl) + f.write('\n') def parse_xml(path): @@ -4412,55 +3204,21 @@ def parse_xml(path): def string_is_integer(string): try: - int(string, 0) + int(string) return True except ValueError: - try: - int(string) - return True - except ValueError: - return False + return False -def version_code_string_to_int(vercode): - """Convert an versionCode string of any base into an int.""" - # TODO: Python 3.6 allows underscores in numeric literals - vercode = vercode.replace('_', '') - try: - return int(vercode, 0) - except ValueError: - return int(vercode) - - -def get_app_display_name(app): - """Get a human readable name for the app for logging and sorting. - - When trying to find a localized name, this first tries en-US since - that his the historical language used for sorting. - - """ - if app.get('Name'): - return app['Name'] - if app.get('localized'): - localized = app['localized'].get(DEFAULT_LOCALE) - if not localized: - for v in app['localized'].values(): - localized = v - break - if localized.get('name'): - return localized['name'] - return app.get('AutoName') or app['id'] - - -def local_rsync(options, from_paths: List[str], todir: str): - """Rsync method for local to local copying of things. +def local_rsync(options, fromdir, todir): + '''Rsync method for local to local copying of things This is an rsync wrapper with all the settings for safe use within the various fdroidserver use cases. This uses stricter rsync checking on all files since people using offline mode are already prioritizing security above ease and speed. - """ + ''' rsyncargs = ['rsync', '--recursive', '--safe-links', '--times', '--perms', '--one-file-system', '--delete', '--chmod=Da+rx,Fa-x,a+r,u+w'] if not options.no_checksum: @@ -4469,82 +3227,78 @@ def local_rsync(options, from_paths: List[str], todir: str): rsyncargs += ['--verbose'] if options.quiet: rsyncargs += ['--quiet'] - logging.debug(' '.join(rsyncargs + from_paths + [todir])) - if subprocess.call(rsyncargs + from_paths + [todir]) != 0: + logging.debug(' '.join(rsyncargs + [fromdir, todir])) + if subprocess.call(rsyncargs + [fromdir, todir]) != 0: raise FDroidException() -def deploy_build_log_with_rsync(appid, vercode, log_content): +def deploy_build_log_with_rsync(appid, vercode, log_content, + timestamp=int(time.time())): """Upload build log of one individual app build to an fdroid repository. - Parameters - ---------- - appid - package name for dientifying to which app this log belongs. - vercode - version of the app to which this build belongs. - log_content - Content of the log which is about to be posted. - Should be either a string or bytes. (bytes will - be decoded as 'utf-8') + :param appid: package name for dientifying to which app this log belongs. + :param vercode: version of the app to which this build belongs. + :param log_content: Content of the log which is about to be posted. + Should be either a string or bytes. (bytes will + be decoded as 'utf-8') + :param timestamp: timestamp for avoiding logfile name collisions. """ - if not log_content: - logging.warning(_('skip deploying full build logs: log content is empty')) - return - if not os.path.exists('repo'): - os.mkdir('repo') - - # gzip compress log file - log_gz_path = os.path.join('repo', - '{appid}_{versionCode}.log.gz'.format(appid=appid, - versionCode=vercode)) - - with gzip.open(log_gz_path, 'wb') as f: - if isinstance(log_content, str): - f.write(bytes(log_content, 'utf-8')) - else: - f.write(log_content) - rsync_status_file_to_repo(log_gz_path) - - -def rsync_status_file_to_repo(path, repo_subdir=None): - """Copy a build log or status JSON to the repo using rsync.""" + # check if deploying logs is enabled in config if not config.get('deploy_process_logs', False): logging.debug(_('skip deploying full build logs: not enabled in config')) return - for d in config.get('serverwebroot', []): - webroot = d['url'] - cmd = ['rsync', - '--archive', - '--delete-after', - '--safe-links'] - if options.verbose: - cmd += ['--verbose'] - if options.quiet: - cmd += ['--quiet'] - if 'identity_file' in config: - cmd += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + config['identity_file']] + if not log_content: + logging.warning(_('skip deploying full build logs: log content is empty')) + return - dest_path = os.path.join(webroot, "repo") - if repo_subdir is not None: - dest_path = os.path.join(dest_path, repo_subdir) - if not dest_path.endswith('/'): - dest_path += '/' # make sure rsync knows this is a directory - cmd += [path, dest_path] + if not (isinstance(timestamp, int) or isinstance(timestamp, float)): + raise ValueError(_("supplied timestamp value '{timestamp}' is not a unix timestamp" + .format(timestamp=timestamp))) - retcode = subprocess.call(cmd) - if retcode: - logging.error(_('process log deploy {path} to {dest} failed!') - .format(path=path, dest=webroot)) - else: - logging.debug(_('deployed process log {path} to {dest}') - .format(path=path, dest=webroot)) + with tempfile.TemporaryDirectory() as tmpdir: + # gzip compress log file + log_gz_path = os.path.join( + tmpdir, '{pkg}_{ver}_{ts}.log.gz'.format(pkg=appid, + ver=vercode, + ts=int(timestamp))) + with gzip.open(log_gz_path, 'wb') as f: + if isinstance(log_content, str): + f.write(bytes(log_content, 'utf-8')) + else: + f.write(log_content) + + # TODO: sign compressed log file, if a signing key is configured + + for webroot in config.get('serverwebroot', []): + dest_path = os.path.join(webroot, "buildlogs") + if not dest_path.endswith('/'): + dest_path += '/' # make sure rsync knows this is a directory + cmd = ['rsync', + '--archive', + '--delete-after', + '--safe-links'] + if options.verbose: + cmd += ['--verbose'] + if options.quiet: + cmd += ['--quiet'] + if 'identity_file' in config: + cmd += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + config['identity_file']] + cmd += [log_gz_path, dest_path] + + # TODO: also deploy signature file if present + + retcode = subprocess.call(cmd) + if retcode: + logging.warning(_("failed deploying build logs to '{path}'").format(path=webroot)) + else: + logging.info(_("deployed build logs to '{path}'").format(path=webroot)) def get_per_app_repos(): - """Per-app repos are dirs named with the packageName of a single app.""" + '''per-app repos are dirs named with the packageName of a single app''' + # Android packageNames are Java packages, they may contain uppercase or # lowercase letters ('A' through 'Z'), numbers, and underscores # ('_'). However, individual package name parts may only start with @@ -4565,54 +3319,26 @@ def get_per_app_repos(): return repos -# list of index files that are never gpg-signed -NO_GPG_INDEX_FILES = [ - "entry.jar", - "index-v1.jar", - "index.css", - "index.html", - "index.jar", - "index.png", - "index.xml", - "signer-index.jar", -] - -# list of index files that are signed by gpgsign.py to make a .asc file -GPG_INDEX_FILES = [ - "altstore-index.json", - "entry.json", - "index-v1.json", - "index-v2.json", - "signer-index.json", -] - - -INDEX_FILES = sorted( - NO_GPG_INDEX_FILES + GPG_INDEX_FILES + [i + '.asc' for i in GPG_INDEX_FILES] -) - - -def is_repo_file(filename, for_gpg_signing=False): - """Whether the file in a repo is a build product to be delivered to users.""" +def is_repo_file(filename): + '''Whether the file in a repo is a build product to be delivered to users''' if isinstance(filename, str): filename = filename.encode('utf-8', errors="surrogateescape") - ignore_files = [i.encode() for i in NO_GPG_INDEX_FILES] - ignore_files.append(b'index_unsigned.jar') - if not for_gpg_signing: - ignore_files += [i.encode() for i in GPG_INDEX_FILES] - - return ( - os.path.isfile(filename) - and not filename.endswith(b'.asc') - and not filename.endswith(b'.sig') - and not filename.endswith(b'.idsig') - and not filename.endswith(b'.log.gz') - and os.path.basename(filename) not in ignore_files - ) + return os.path.isfile(filename) \ + and not filename.endswith(b'.asc') \ + and not filename.endswith(b'.sig') \ + and os.path.basename(filename) not in [ + b'index.jar', + b'index_unsigned.jar', + b'index.xml', + b'index.html', + b'index-v1.jar', + b'index-v1.json', + b'categories.txt', + ] def get_examples_dir(): - """Return the dir where the fdroidserver example files are available.""" + '''Return the dir where the fdroidserver example files are available''' examplesdir = None tmp = os.path.dirname(sys.argv[0]) if os.path.basename(tmp) == 'bin': @@ -4634,46 +3360,74 @@ def get_examples_dir(): return examplesdir -def get_android_tools_versions(): - """Get a list of the versions of all installed Android SDK/NDK components.""" +def get_wiki_timestamp(timestamp=None): + """Return current time in the standard format for posting to the wiki""" + + if timestamp is None: + timestamp = time.gmtime() + return time.strftime("%Y-%m-%d %H:%M:%SZ", timestamp) + + +def get_android_tools_versions(ndk_path=None): + '''get a list of the versions of all installed Android SDK/NDK components''' + global config sdk_path = config['sdk_path'] if sdk_path[-1] != '/': sdk_path += '/' - components = set() - for ndk_path in config.get('ndk_paths', {}).values(): - version = get_ndk_version(ndk_path) - components.add((os.path.relpath(ndk_path, sdk_path), str(version))) + components = [] + if ndk_path: + ndk_release_txt = os.path.join(ndk_path, 'RELEASE.TXT') + if os.path.isfile(ndk_release_txt): + with open(ndk_release_txt, 'r') as fp: + components.append((os.path.basename(ndk_path), fp.read()[:-1])) - pattern = re.compile(r'^Pkg.Revision *= *(.+)', re.MULTILINE) + pattern = re.compile('^Pkg.Revision=(.+)', re.MULTILINE) for root, dirs, files in os.walk(sdk_path): if 'source.properties' in files: source_properties = os.path.join(root, 'source.properties') with open(source_properties, 'r') as fp: m = pattern.search(fp.read()) if m: - components.add((os.path.relpath(root, sdk_path), m.group(1))) + components.append((root[len(sdk_path):], m.group(1))) - return sorted(components) + return components -def get_android_tools_version_log(): - """Get a list of the versions of all installed Android SDK/NDK components.""" +def get_android_tools_version_log(ndk_path=None): + '''get a list of the versions of all installed Android SDK/NDK components''' log = '== Installed Android Tools ==\n\n' - components = get_android_tools_versions() + components = get_android_tools_versions(ndk_path) for name, version in sorted(components): log += '* ' + name + ' (' + version + ')\n' return log +def get_git_describe_link(): + """Get a link to the current fdroiddata commit, to post to the wiki + + """ + try: + output = subprocess.check_output(['git', 'describe', '--always', '--dirty', '--abbrev=0'], + universal_newlines=True).strip() + except subprocess.CalledProcessError: + pass + if output: + commit = output.replace('-dirty', '') + return ('* fdroiddata: [https://gitlab.com/fdroid/fdroiddata/commit/{commit} {id}]\n' + .format(commit=commit, id=output)) + else: + logging.error(_("'{path}' failed to execute!").format(path='git describe')) + return '' + + def calculate_math_string(expr): ops = { ast.Add: operator.add, ast.Mult: operator.mul, ast.Sub: operator.sub, ast.USub: operator.neg, - ast.Pow: operator.pow, } def execute_ast(node): @@ -4691,14 +3445,14 @@ def calculate_math_string(expr): if '#' in expr: raise SyntaxError('no comments allowed') return execute_ast(ast.parse(expr, mode='eval').body) - except SyntaxError as exc: + except SyntaxError: raise SyntaxError("could not parse expression '{expr}', " "only basic math operations are allowed (+, -, *)" - .format(expr=expr)) from exc + .format(expr=expr)) def force_exit(exitvalue=0): - """Force exit when thread operations could block the exit. + """force exit when thread operations could block the exit The build command has to use some threading stuff to handle the timeout and locks. This seems to prevent the command from @@ -4708,252 +3462,3 @@ def force_exit(exitvalue=0): sys.stdout.flush() sys.stderr.flush() os._exit(exitvalue) - - -YAML_LINT_CONFIG = {'extends': 'default', - 'rules': {'document-start': 'disable', - 'line-length': 'disable', - 'truthy': 'disable'}} - - -def run_yamllint(path, indent=0): - path = Path(path) - try: - import yamllint.config - import yamllint.linter - except ImportError: - return '' - - result = [] - with path.open('r', encoding='utf-8') as f: - problems = yamllint.linter.run(f, yamllint.config.YamlLintConfig(json.dumps(YAML_LINT_CONFIG))) - for problem in problems: - result.append(' ' * indent + str(path) + ':' + str(problem.line) + ': ' + problem.message) - return '\n'.join(result) - - -def calculate_IPFS_cid(filename): - """Calculate the IPFS CID of a file and add it to the index. - - uses ipfs_cid package at https://packages.debian.org/sid/ipfs-cid - Returns CIDv1 of a file as per IPFS recommendation - """ - cmd = config and config.get('ipfs_cid') - if not cmd: - return - file_cid = subprocess.run([cmd, filename], capture_output=True) - - if file_cid.returncode == 0: - cid_output = file_cid.stdout.decode() - cid_output_dict = json.loads(cid_output) - return cid_output_dict['CIDv1'] - - -def sha256sum(filename): - """Calculate the sha256 of the given file.""" - sha = hashlib.sha256() - with open(filename, 'rb') as f: - while True: - t = f.read(16384) - if len(t) == 0: - break - sha.update(t) - return sha.hexdigest() - - -def sha256base64(filename): - """Calculate the sha256 of the given file as URL-safe base64.""" - hasher = hashlib.sha256() - with open(filename, 'rb') as f: - while True: - t = f.read(16384) - if len(t) == 0: - break - hasher.update(t) - return urlsafe_b64encode(hasher.digest()).decode() - - -def get_ndk_version(ndk_path): - """Get the version info from the metadata in the NDK package. - - Since r11, the info is nice and easy to find in - sources.properties. Before, there was a kludgey format in - RELEASE.txt. This is only needed for r10e. - - """ - source_properties = os.path.join(ndk_path, 'source.properties') - release_txt = os.path.join(ndk_path, 'RELEASE.TXT') - if os.path.exists(source_properties): - with open(source_properties) as fp: - m = re.search(r'^Pkg.Revision *= *(.+)', fp.read(), flags=re.MULTILINE) - if m: - return m.group(1) - elif os.path.exists(release_txt): - with open(release_txt) as fp: - return fp.read().split('-')[0] - - -def auto_install_ndk(build): - """Auto-install the NDK in the build, this assumes its in a buildserver guest VM. - - Download, verify, and install the NDK version as specified via the - "ndk:" field in the build entry. As it uncompresses the zipball, - this forces the permissions to work for all users, since this - might uncompress as root and then be used from a different user. - - This needs to be able to install multiple versions of the NDK, - since this is also used in CI builds, where multiple `fdroid build - --onserver` calls can run in a single session. The production - buildserver is reset between every build. - - The default ANDROID_SDK_ROOT base dir of /opt/android-sdk is hard-coded in - buildserver/Vagrantfile. The $ANDROID_HOME/ndk subdir is where Android - Studio will install the NDK into versioned subdirs. - https://developer.android.com/studio/projects/configure-agp-ndk#agp_version_41 - - Also, r10e and older cannot be handled via this mechanism because - they are packaged differently. - - """ - import sdkmanager - - global config - if build.get('disable'): - return - ndk = build.get('ndk') - if not ndk: - return - if isinstance(ndk, str): - sdkmanager.build_package_list(use_net=True) - _install_ndk(ndk) - elif isinstance(ndk, list): - sdkmanager.build_package_list(use_net=True) - for n in ndk: - _install_ndk(n) - else: - raise BuildException(_('Invalid ndk: entry in build: "{ndk}"') - .format(ndk=str(ndk))) - - -def _install_ndk(ndk): - """Install specified NDK if it is not already installed. - - Parameters - ---------- - ndk - The NDK version to install, either in "release" form (r21e) or - "revision" form (21.4.7075529). - """ - import sdkmanager - - sdk_path = config['sdk_path'] - sdkmanager.install(f'ndk;{ndk}', sdk_path) - for found in glob.glob(f'{sdk_path}/ndk/*'): - version = get_ndk_version(found) - if 'ndk_paths' not in config: - config['ndk_paths'] = dict() - config['ndk_paths'][ndk] = found - config['ndk_paths'][version] = found - logging.info( - _('Set NDK {release} ({version}) up').format(release=ndk, version=version) - ) - - -def calculate_archive_policy(app, default): - """Calculate the archive policy from the metadata and default config.""" - if app.get('ArchivePolicy') is not None: - archive_policy = app['ArchivePolicy'] - else: - archive_policy = default - if app.get('VercodeOperation'): - archive_policy *= len(app['VercodeOperation']) - builds = [build for build in app.Builds if not build.disable] - if app.Builds and archive_policy > len(builds): - archive_policy = len(builds) - return archive_policy - - -def calculate_gradle_flavor_combination(flavors): - """Calculate all combinations of gradle flavors.""" - combination_lists = itertools.product(*[[flavor, ''] for flavor in flavors]) - combinations = [ - re.sub( - r' +\w', - lambda pat: pat.group(0)[-1].upper(), - ' '.join(combination_list).strip(), - ) - for combination_list in combination_lists - ] - return combinations - - -FDROIDORG_MIRRORS = [ - { - 'isPrimary': True, - 'url': 'https://f-droid.org/repo', - 'dnsA': ['65.21.79.229', '136.243.44.143'], - 'dnsAAAA': ['2a01:4f8:212:c98::2', '2a01:4f9:3b:546d::2'], - 'worksWithoutSNI': True, - }, - { - 'url': 'http://fdroidorg6cooksyluodepej4erfctzk7rrjpjbbr6wx24jh3lqyfwyd.onion/fdroid/repo' - }, - { - 'url': 'http://dotsrccccbidkzg7oc7oj4ugxrlfbt64qebyunxbrgqhxiwj3nl6vcad.onion/fdroid/repo' - }, - { - 'url': 'http://ftpfaudev4triw2vxiwzf4334e3mynz7osqgtozhbc77fixncqzbyoyd.onion/fdroid/repo' - }, - { - 'url': 'http://lysator7eknrfl47rlyxvgeamrv7ucefgrrlhk7rouv3sna25asetwid.onion/pub/fdroid/repo' - }, - { - 'url': 'http://mirror.ossplanetnyou5xifr6liw5vhzwc2g2fmmlohza25wwgnnaw65ytfsad.onion/fdroid/repo' - }, - {'url': 'https://fdroid.tetaneutral.net/fdroid/repo', 'countryCode': 'FR'}, - { - 'url': 'https://ftp.agdsn.de/fdroid/repo', - 'countryCode': 'DE', - "dnsA": ["141.30.235.39"], - "dnsAAAA": ["2a13:dd85:b00:12::1"], - "worksWithoutSNI": True, - }, - { - 'url': 'https://ftp.fau.de/fdroid/repo', - 'countryCode': 'DE', - "dnsA": ["131.188.12.211"], - "dnsAAAA": ["2001:638:a000:1021:21::1"], - "worksWithoutSNI": True, - }, - {'url': 'https://ftp.gwdg.de/pub/android/fdroid/repo', 'countryCode': 'DE'}, - { - 'url': 'https://ftp.lysator.liu.se/pub/fdroid/repo', - 'countryCode': 'SE', - "dnsA": ["130.236.254.251", "130.236.254.253"], - "dnsAAAA": ["2001:6b0:17:f0a0::fb", "2001:6b0:17:f0a0::fd"], - "worksWithoutSNI": True, - }, - {'url': 'https://mirror.cyberbits.eu/fdroid/repo', 'countryCode': 'FR'}, - { - 'url': 'https://mirror.fcix.net/fdroid/repo', - 'countryCode': 'US', - "dnsA": ["23.152.160.16"], - "dnsAAAA": ["2620:13b:0:1000::16"], - "worksWithoutSNI": True, - }, - {'url': 'https://mirror.kumi.systems/fdroid/repo', 'countryCode': 'AT'}, - {'url': 'https://mirror.level66.network/fdroid/repo', 'countryCode': 'DE'}, - {'url': 'https://mirror.ossplanet.net/fdroid/repo', 'countryCode': 'TW'}, - {'url': 'https://mirrors.dotsrc.org/fdroid/repo', 'countryCode': 'DK'}, - {'url': 'https://opencolo.mm.fcix.net/fdroid/repo', 'countryCode': 'US'}, - { - 'url': 'https://plug-mirror.rcac.purdue.edu/fdroid/repo', - 'countryCode': 'US', - "dnsA": ["128.211.151.252"], - "dnsAAAA": ["2001:18e8:804:35::1337"], - "worksWithoutSNI": True, - }, -] -FDROIDORG_FINGERPRINT = ( - '43238D512C1E5EB2D6569F4A3AFBF5523418B82E0A3ED1552770ABB9A9C9CCAB' -) diff --git a/fdroidserver/deploy.py b/fdroidserver/deploy.py deleted file mode 100644 index f1dcce21..00000000 --- a/fdroidserver/deploy.py +++ /dev/null @@ -1,1177 +0,0 @@ -#!/usr/bin/env python3 -# -# deploy.py - part of the FDroid server tools -# Copyright (C) 2010-15, Ciaran Gultnieks, ciaran@ciarang.com -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -import configparser -import glob -import json -import logging -import os -import pathlib -import re -import shutil -import subprocess -import sys -import time -import urllib -from argparse import ArgumentParser -from typing import Dict, List - -import git -import yaml -from git import Repo - -import fdroidserver.github - -from . import _, common, index -from .exception import FDroidException - -config = None -start_timestamp = time.gmtime() - -GIT_BRANCH = 'master' - -BINARY_TRANSPARENCY_DIR = 'binary_transparency' - -REMOTE_HOSTNAME_REGEX = re.compile(r'\W*\w+\W+(\w+).*') - -EMBEDDED_RCLONE_CONF = 'rclone.conf' - - -def _get_index_file_paths(base_dir): - """Return the list of files to be synced last, since they finalize the deploy. - - The process of pushing all the new packages to the various - services can take a while. So the index files should be updated - last. That ensures that the package files are available when the - client learns about them from the new index files. - - signer-index.* are only published in the repo/ section. - - """ - return [ - os.path.join(base_dir, filename) - for filename in common.INDEX_FILES - if not (filename.startswith('signer-index.') and base_dir.endswith('archive')) - ] - - -def _get_index_excludes(base_dir): - indexes = _get_index_file_paths(base_dir) - index_excludes = [] - for f in indexes: - index_excludes.append('--exclude') - index_excludes.append(f) - return index_excludes - - -def _get_index_includes(base_dir): - indexes = _get_index_file_paths(base_dir) - index_includes = [] - for f in indexes: - index_includes.append('--include') - index_includes.append(f) - return index_includes - - -def _remove_missing_files(files: List[str]) -> List[str]: - """Remove files that are missing from the file system.""" - existing = [] - for f in files: - if os.path.exists(f): - existing.append(f) - return existing - - -def _generate_rclone_include_pattern(files): - """Generate a pattern for rclone's --include flag (https://rclone.org/filtering/).""" - return "{" + ",".join(sorted(set(files))) + "}" - - -def update_awsbucket(repo_section, is_index_only=False, verbose=False, quiet=False): - """Sync the directory `repo_section` (including subdirectories) to AWS S3 US East. - - This is a shim function for public API compatibility. - - Requires AWS credentials set as environment variables: - https://rclone.org/s3/#authentication - - """ - update_remote_storage_with_rclone(repo_section, is_index_only, verbose, quiet) - - -def update_remote_storage_with_rclone( - repo_section, - awsbucket, - is_index_only=False, - verbose=False, - quiet=False, - checksum=False, -): - """Sync the directory `repo_section` (including subdirectories) to configed cloud services. - - Rclone sync can send the files to any supported remote storage - service once without numerous polling. If remote storage is S3 e.g - AWS S3, Wasabi, Filebase, etc, then path will be - bucket_name/fdroid/repo where bucket_name will be an S3 bucket. If - remote storage is storage drive/sftp e.g google drive, rsync.net the - new path will be bucket_name/fdroid/repo where bucket_name will be a - folder - - See https://rclone.org/docs/#config-config-file - - rclone filtering works differently than rsync. For example, - "--include" implies "--exclude **" at the end of an rclone internal - filter list. - - If rclone.conf is in the root of the repo, then it will be preferred - over the rclone default config paths. - - """ - logging.debug(_('Using rclone to sync to "{name}"').format(name=awsbucket)) - - rclone_config = config.get('rclone_config', []) - if rclone_config and isinstance(rclone_config, str): - rclone_config = [rclone_config] - - path = config.get('path_to_custom_rclone_config') - if path: - if not os.path.exists(path): - logging.error( - _('path_to_custom_rclone_config: "{path}" does not exist!').format( - path=path - ) - ) - sys.exit(1) - configfilename = path - elif os.path.exists(EMBEDDED_RCLONE_CONF): - path = EMBEDDED_RCLONE_CONF # in this case, only for display - configfilename = EMBEDDED_RCLONE_CONF - if not rclone_config: - raise FDroidException(_("'rclone_config' must be set in config.yml!")) - else: - configfilename = None - output = subprocess.check_output(['rclone', 'config', 'file'], text=True) - default_config_path = output.split('\n')[-2] - if os.path.exists(default_config_path): - path = default_config_path - if path: - logging.info(_('Using "{path}" for rclone config.').format(path=path)) - - upload_dir = 'fdroid/' + repo_section - - if not rclone_config: - env = os.environ - # Check both canonical and backup names, but only tell user about canonical. - if not env.get("AWS_SECRET_ACCESS_KEY") and not env.get("AWS_SECRET_KEY"): - raise FDroidException( - _( - """"AWS_SECRET_ACCESS_KEY" must be set as an environmental variable!""" - ) - ) - if not env.get("AWS_ACCESS_KEY_ID") and not env.get('AWS_ACCESS_KEY'): - raise FDroidException( - _(""""AWS_ACCESS_KEY_ID" must be set as an environmental variable!""") - ) - - default_remote = "AWS-S3-US-East-1" - env_rclone_config = configparser.ConfigParser() - env_rclone_config.add_section(default_remote) - env_rclone_config.set( - default_remote, - '; = This file is auto-generated by fdroid deploy, do not edit!', - '', - ) - env_rclone_config.set(default_remote, "type", "s3") - env_rclone_config.set(default_remote, "provider", "AWS") - env_rclone_config.set(default_remote, "region", "us-east-1") - env_rclone_config.set(default_remote, "env_auth", "true") - - configfilename = ".fdroid-deploy-rclone.conf" - with open(configfilename, "w", encoding="utf-8") as autoconfigfile: - env_rclone_config.write(autoconfigfile) - rclone_config = [default_remote] - - rclone_sync_command = ['rclone', 'sync', '--delete-after'] - if configfilename: - rclone_sync_command += ['--config', configfilename] - - if checksum: - rclone_sync_command.append('--checksum') - - if verbose: - rclone_sync_command += ['--verbose'] - elif quiet: - rclone_sync_command += ['--quiet'] - - # TODO copying update_serverwebroot rsync algo - for remote_config in rclone_config: - complete_remote_path = f'{remote_config}:{awsbucket}/{upload_dir}' - logging.info(f'rclone sync to {complete_remote_path}') - if is_index_only: - index_only_files = common.INDEX_FILES + ['diff/*.*'] - include_pattern = _generate_rclone_include_pattern(index_only_files) - cmd = rclone_sync_command + [ - '--include', - include_pattern, - '--delete-excluded', - repo_section, - complete_remote_path, - ] - logging.info(cmd) - if subprocess.call(cmd) != 0: - raise FDroidException() - else: - cmd = ( - rclone_sync_command - + _get_index_excludes(repo_section) - + [ - repo_section, - complete_remote_path, - ] - ) - if subprocess.call(cmd) != 0: - raise FDroidException() - cmd = rclone_sync_command + [ - repo_section, - complete_remote_path, - ] - if subprocess.call(cmd) != 0: - raise FDroidException() - - -def update_serverwebroot(serverwebroot, repo_section): - """Deploy the index files to the serverwebroot using rsync. - - Upload the first time without the index files and delay the - deletion as much as possible. That keeps the repo functional - while this update is running. Then once it is complete, rerun the - command again to upload the index files. Always using the same - target with rsync allows for very strict settings on the receiving - server, you can literally specify the one rsync command that is - allowed to run in ~/.ssh/authorized_keys. (serverwebroot is - guaranteed to have a trailing slash in common.py) - - It is possible to optionally use a checksum comparison for - accurate comparisons on different filesystems, for example, FAT - has a low resolution timestamp - - """ - try: - subprocess.run(['rsync', '--version'], capture_output=True, check=True) - except Exception as e: - raise FDroidException( - _('rsync is missing or broken: {error}').format(error=e) - ) from e - rsyncargs = ['rsync', '--archive', '--delete-after', '--safe-links'] - options = common.get_options() - if not options or not options.no_checksum: - rsyncargs.append('--checksum') - if options and options.verbose: - rsyncargs += ['--verbose'] - if options and options.quiet: - rsyncargs += ['--quiet'] - if options and options.identity_file: - rsyncargs += [ - '-e', - 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + options.identity_file, - ] - elif config and config.get('identity_file'): - rsyncargs += [ - '-e', - 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + config['identity_file'], - ] - url = serverwebroot['url'] - is_index_only = serverwebroot.get('index_only', False) - logging.info('rsyncing ' + repo_section + ' to ' + url) - if is_index_only: - files_to_upload = _get_index_file_paths(repo_section) - files_to_upload = _remove_missing_files(files_to_upload) - - rsyncargs += files_to_upload - rsyncargs += [f'{url}/{repo_section}/'] - logging.info(rsyncargs) - if subprocess.call(rsyncargs) != 0: - raise FDroidException() - else: - excludes = _get_index_excludes(repo_section) - if subprocess.call(rsyncargs + excludes + [repo_section, url]) != 0: - raise FDroidException() - if subprocess.call(rsyncargs + [repo_section, url]) != 0: - raise FDroidException() - # upload "current version" symlinks if requested - if ( - config - and config.get('make_current_version_link') - and repo_section == 'repo' - ): - links_to_upload = [] - for f in ( - glob.glob('*.apk') + glob.glob('*.apk.asc') + glob.glob('*.apk.sig') - ): - if os.path.islink(f): - links_to_upload.append(f) - if len(links_to_upload) > 0: - if subprocess.call(rsyncargs + links_to_upload + [url]) != 0: - raise FDroidException() - - -def update_serverwebroots(serverwebroots, repo_section, standardwebroot=True): - for d in serverwebroots: - # this supports both an ssh host:path and just a path - serverwebroot = d['url'] - s = serverwebroot.rstrip('/').split(':') - if len(s) == 1: - fdroiddir = s[0] - elif len(s) == 2: - host, fdroiddir = s - else: - logging.error(_('Malformed serverwebroot line:') + ' ' + serverwebroot) - sys.exit(1) - repobase = os.path.basename(fdroiddir) - if standardwebroot and repobase != 'fdroid': - logging.error( - _( - 'serverwebroot: path does not end with "fdroid", perhaps you meant one of these:' - ) - + '\n\t' - + serverwebroot.rstrip('/') - + '/fdroid\n\t' - + serverwebroot.rstrip('/').rstrip(repobase) - + 'fdroid' - ) - sys.exit(1) - update_serverwebroot(d, repo_section) - - -def sync_from_localcopy(repo_section, local_copy_dir): - """Sync the repo from "local copy dir" filesystem to this box. - - In setups that use offline signing, this is the last step that - syncs the repo from the "local copy dir" e.g. a thumb drive to the - repo on the local filesystem. That local repo is then used to - push to all the servers that are configured. - - """ - logging.info('Syncing from local_copy_dir to this repo.') - - # trailing slashes have a meaning in rsync which is not needed here, so - # make sure both paths have exactly one trailing slash - common.local_rsync( - common.get_options(), - [os.path.join(local_copy_dir, repo_section).rstrip('/') + '/'], - repo_section.rstrip('/') + '/', - ) - - offline_copy = os.path.join(local_copy_dir, BINARY_TRANSPARENCY_DIR) - if os.path.exists(os.path.join(offline_copy, '.git')): - online_copy = os.path.join(os.getcwd(), BINARY_TRANSPARENCY_DIR) - push_binary_transparency(offline_copy, online_copy) - - -def update_localcopy(repo_section, local_copy_dir): - """Copy data from offline to the "local copy dir" filesystem. - - This updates the copy of this repo used to shuttle data from an - offline signing machine to the online machine, e.g. on a thumb - drive. - - """ - # local_copy_dir is guaranteed to have a trailing slash in main() below - common.local_rsync(common.get_options(), [repo_section], local_copy_dir) - - offline_copy = os.path.join(os.getcwd(), BINARY_TRANSPARENCY_DIR) - if os.path.isdir(os.path.join(offline_copy, '.git')): - online_copy = os.path.join(local_copy_dir, BINARY_TRANSPARENCY_DIR) - push_binary_transparency(offline_copy, online_copy) - - -def _get_size(start_path='.'): - """Get size of all files in a dir https://stackoverflow.com/a/1392549.""" - total_size = 0 - for root, dirs, files in os.walk(start_path): - for f in files: - fp = os.path.join(root, f) - total_size += os.path.getsize(fp) - return total_size - - -def update_servergitmirrors(servergitmirrors, repo_section): - """Update repo mirrors stored in git repos. - - This is a hack to use public git repos as F-Droid repos. It - recreates the git repo from scratch each time, so that there is no - history. That keeps the size of the git repo small. Services - like GitHub or GitLab have a size limit of something like 1 gig. - This git repo is only a git repo for the purpose of being hosted. - For history, there is the archive section, and there is the binary - transparency log. - - This will attempt to use the existing remote branch so that it does - not have to push all of the files in the repo each time. Old setups - or runs of `fdroid nightly` might use the "master" branch. For the - "index only" mode, it will recreate the branch from scratch each - time since usually all the files are changed. In any case, the - index files are small compared to the full repo. - - """ - from clint.textui import progress - - if config.get('local_copy_dir') and not config.get('sync_from_local_copy_dir'): - logging.debug( - _('Offline machine, skipping git mirror generation until `fdroid deploy`') - ) - return - - options = common.get_options() - workspace_dir = pathlib.Path(os.getcwd()) - - # right now we support only 'repo' git-mirroring - if repo_section == 'repo': - git_mirror_path = workspace_dir / 'git-mirror' - dotgit = os.path.join(git_mirror_path, '.git') - git_fdroiddir = os.path.join(git_mirror_path, 'fdroid') - git_repodir = os.path.join(git_fdroiddir, repo_section) - if not os.path.isdir(git_repodir): - os.makedirs(git_repodir) - # github/gitlab use bare git repos, so only count the .git folder - # test: generate giant APKs by including AndroidManifest.xml and and large - # file from /dev/urandom, then sign it. Then add those to the git repo. - dotgit_size = _get_size(dotgit) - dotgit_over_limit = dotgit_size > config['git_mirror_size_limit'] - if os.path.isdir(dotgit) and dotgit_over_limit: - logging.warning( - _( - 'Deleting git-mirror history, repo is too big ({size} max {limit})' - ).format(size=dotgit_size, limit=config['git_mirror_size_limit']) - ) - shutil.rmtree(dotgit) - if options.no_keep_git_mirror_archive and dotgit_over_limit: - logging.warning( - _('Deleting archive, repo is too big ({size} max {limit})').format( - size=dotgit_size, limit=config['git_mirror_size_limit'] - ) - ) - archive_path = os.path.join(git_mirror_path, 'fdroid', 'archive') - shutil.rmtree(archive_path, ignore_errors=True) - - # use custom SSH command if identity_file specified - ssh_cmd = 'ssh -oBatchMode=yes' - if options.identity_file is not None: - ssh_cmd += ' -oIdentitiesOnly=yes -i "%s"' % options.identity_file - elif 'identity_file' in config: - ssh_cmd += ' -oIdentitiesOnly=yes -i "%s"' % config['identity_file'] - - if options.verbose: - progressbar = progress.Bar() - - class MyProgressPrinter(git.RemoteProgress): - def update(self, op_code, current, maximum=None, message=None): - if isinstance(maximum, float): - progressbar.show(current, maximum) - - progress = MyProgressPrinter() - else: - progress = None - - repo = git.Repo.init(git_mirror_path, initial_branch=GIT_BRANCH) - - enabled_remotes = [] - for d in servergitmirrors: - is_index_only = d.get('index_only', False) - - # Use a separate branch for the index only mode as it needs a different set of files to commit - if is_index_only: - local_branch_name = 'index_only' - else: - local_branch_name = GIT_BRANCH - if local_branch_name in repo.heads: - repo.git.switch(local_branch_name) - else: - repo.git.switch('--orphan', local_branch_name) - - # trailing slashes have a meaning in rsync which is not needed here, so - # make sure both paths have exactly one trailing slash - if is_index_only: - files_to_sync = _get_index_file_paths(str(workspace_dir / repo_section)) - files_to_sync = _remove_missing_files(files_to_sync) - else: - files_to_sync = [str(workspace_dir / repo_section).rstrip('/') + '/'] - common.local_rsync( - common.get_options(), files_to_sync, git_repodir.rstrip('/') + '/' - ) - - upload_to_servergitmirror( - mirror_config=d, - local_repo=repo, - enabled_remotes=enabled_remotes, - repo_section=repo_section, - is_index_only=is_index_only, - fdroid_dir=git_fdroiddir, - git_mirror_path=str(git_mirror_path), - ssh_cmd=ssh_cmd, - progress=progress, - ) - if progress: - progressbar.done() - - -def upload_to_servergitmirror( - mirror_config: Dict[str, str], - local_repo: Repo, - enabled_remotes: List[str], - repo_section: str, - is_index_only: bool, - fdroid_dir: str, - git_mirror_path: str, - ssh_cmd: str, - progress: git.RemoteProgress, -) -> None: - remote_branch_name = GIT_BRANCH - local_branch_name = local_repo.active_branch.name - - remote_url = mirror_config['url'] - name = REMOTE_HOSTNAME_REGEX.sub(r'\1', remote_url) - enabled_remotes.append(name) - r = git.remote.Remote(local_repo, name) - if r in local_repo.remotes: - r = local_repo.remote(name) - if 'set_url' in dir(r): # force remote URL if using GitPython 2.x - r.set_url(remote_url) - else: - local_repo.create_remote(name, remote_url) - logging.info('Mirroring to: ' + remote_url) - - if is_index_only: - files_to_upload = _get_index_file_paths( - os.path.join(local_repo.working_tree_dir, 'fdroid', repo_section) - ) - files_to_upload = _remove_missing_files(files_to_upload) - local_repo.index.add(files_to_upload) - else: - # sadly index.add don't allow the --all parameter - logging.debug('Adding all files to git mirror') - local_repo.git.add(all=True) - - logging.debug('Committing files into git mirror') - local_repo.index.commit("fdroidserver git-mirror") - - # only deploy to GitLab Artifacts if too big for GitLab Pages - if ( - is_index_only - or common.get_dir_size(fdroid_dir) <= common.GITLAB_COM_PAGES_MAX_SIZE - ): - gitlab_ci_job_name = 'pages' - else: - gitlab_ci_job_name = 'GitLab Artifacts' - logging.warning( - _('Skipping GitLab Pages mirror because the repo is too large (>%.2fGB)!') - % (common.GITLAB_COM_PAGES_MAX_SIZE / 1000000000) - ) - - # push. This will overwrite the git history - remote = local_repo.remote(name) - if remote.name == 'gitlab': - logging.debug('Writing .gitlab-ci.yml to deploy to GitLab Pages') - with open(os.path.join(git_mirror_path, ".gitlab-ci.yml"), "wt") as fp: - yaml.dump( - { - gitlab_ci_job_name: { - 'script': [ - 'mkdir .public', - 'cp -r * .public/', - 'mv .public public', - ], - 'artifacts': {'paths': ['public']}, - 'variables': {'GIT_DEPTH': 1}, - } - }, - fp, - default_flow_style=False, - ) - - local_repo.index.add(['.gitlab-ci.yml']) - local_repo.index.commit("fdroidserver git-mirror: Deploy to GitLab Pages") - - logging.debug(_('Pushing to {url}').format(url=remote.url)) - with local_repo.git.custom_environment(GIT_SSH_COMMAND=ssh_cmd): - pushinfos = remote.push( - f"{local_branch_name}:{remote_branch_name}", - force=True, - set_upstream=True, - progress=progress, - ) - for pushinfo in pushinfos: - if pushinfo.flags & ( - git.remote.PushInfo.ERROR - | git.remote.PushInfo.REJECTED - | git.remote.PushInfo.REMOTE_FAILURE - | git.remote.PushInfo.REMOTE_REJECTED - ): - # Show potentially useful messages from git remote - if progress: - for line in progress.other_lines: - if line.startswith('remote:'): - logging.debug(line) - raise FDroidException( - remote.url - + ' push failed: ' - + str(pushinfo.flags) - + ' ' - + pushinfo.summary - ) - else: - logging.debug(remote.url + ': ' + pushinfo.summary) - - -def upload_to_android_observatory(repo_section): - import requests - - requests # stop unused import warning - - if common.get_options().verbose: - logging.getLogger("requests").setLevel(logging.INFO) - logging.getLogger("urllib3").setLevel(logging.INFO) - else: - logging.getLogger("requests").setLevel(logging.WARNING) - logging.getLogger("urllib3").setLevel(logging.WARNING) - - if repo_section == 'repo': - for f in sorted(glob.glob(os.path.join(repo_section, '*.apk'))): - upload_apk_to_android_observatory(f) - - -def upload_apk_to_android_observatory(path): - # depend on requests and lxml only if users enable AO - import requests - from lxml.html import fromstring - - from . import net - - apkfilename = os.path.basename(path) - r = requests.post( - 'https://androidobservatory.org/', - data={'q': common.sha256sum(path), 'searchby': 'hash'}, - headers=net.HEADERS, - timeout=300, - ) - if r.status_code == 200: - # from now on XPath will be used to retrieve the message in the HTML - # androidobservatory doesn't have a nice API to talk with - # so we must scrape the page content - tree = fromstring(r.text) - - href = None - for element in tree.xpath("//html/body/div/div/table/tbody/tr/td/a"): - a = element.attrib.get('href') - if a: - m = re.match(r'^/app/[0-9A-F]{40}$', a) - if m: - href = m.group() - - page = 'https://androidobservatory.org' - if href: - message = _('Found {apkfilename} at {url}').format( - apkfilename=apkfilename, url=(page + href) - ) - logging.debug(message) - return - - # upload the file with a post request - logging.info( - _('Uploading {apkfilename} to androidobservatory.org').format( - apkfilename=apkfilename - ) - ) - r = requests.post( - 'https://androidobservatory.org/upload', - files={'apk': (apkfilename, open(path, 'rb'))}, - headers=net.HEADERS, - allow_redirects=False, - timeout=300, - ) - - -def upload_to_virustotal(repo_section, virustotal_apikey): - import requests - - requests # stop unused import warning - - if repo_section == 'repo': - if not os.path.exists('virustotal'): - os.mkdir('virustotal') - - if os.path.exists(os.path.join(repo_section, 'index-v1.json')): - with open(os.path.join(repo_section, 'index-v1.json')) as fp: - data = json.load(fp) - else: - local_jar = os.path.join(repo_section, 'index-v1.jar') - data, _ignored, _ignored = index.get_index_from_jar(local_jar) - - for packageName, packages in data['packages'].items(): - for package in packages: - upload_apk_to_virustotal(virustotal_apikey, **package) - - -def upload_apk_to_virustotal( - virustotal_apikey, packageName, apkName, hash, versionCode, **kwargs -): - import requests - - logging.getLogger("urllib3").setLevel(logging.WARNING) - logging.getLogger("requests").setLevel(logging.WARNING) - - outputfilename = os.path.join( - 'virustotal', packageName + '_' + str(versionCode) + '_' + hash + '.json' - ) - if os.path.exists(outputfilename): - logging.debug(apkName + ' results are in ' + outputfilename) - return outputfilename - repofilename = os.path.join('repo', apkName) - logging.info('Checking if ' + repofilename + ' is on virustotal') - - headers = {"User-Agent": "F-Droid"} - if 'headers' in kwargs: - for k, v in kwargs['headers'].items(): - headers[k] = v - - apikey = { - 'apikey': virustotal_apikey, - 'resource': hash, - } - needs_file_upload = False - while True: - report_url = ( - 'https://www.virustotal.com/vtapi/v2/file/report?' - + urllib.parse.urlencode(apikey) - ) - r = requests.get(report_url, headers=headers, timeout=300) - if r.status_code == 200: - response = r.json() - if response['response_code'] == 0: - needs_file_upload = True - else: - response['filename'] = apkName - response['packageName'] = packageName - response['versionCode'] = versionCode - if kwargs.get('versionName'): - response['versionName'] = kwargs.get('versionName') - with open(outputfilename, 'w') as fp: - json.dump(response, fp, indent=2, sort_keys=True) - - if response.get('positives', 0) > 0: - logging.warning( - _('{path} has been flagged by virustotal {count} times:').format( - path=repofilename, count=response['positives'] - ), - +'\n\t' + response['permalink'], - ) - break - if r.status_code == 204: - logging.warning(_('virustotal.com is rate limiting, waiting to retry...')) - time.sleep(30) # wait for public API rate limiting - - upload_url = None - if needs_file_upload: - manual_url = 'https://www.virustotal.com/' - size = os.path.getsize(repofilename) - if size > 200000000: - # VirusTotal API 200MB hard limit - logging.error( - _('{path} more than 200MB, manually upload: {url}').format( - path=repofilename, url=manual_url - ) - ) - elif size > 32000000: - # VirusTotal API requires fetching a URL to upload bigger files - query_url = ( - 'https://www.virustotal.com/vtapi/v2/file/scan/upload_url?' - + urllib.parse.urlencode(apikey) - ) - r = requests.get(query_url, headers=headers, timeout=300) - if r.status_code == 200: - upload_url = r.json().get('upload_url') - elif r.status_code == 403: - logging.error( - _( - 'VirusTotal API key cannot upload files larger than 32MB, ' - + 'use {url} to upload {path}.' - ).format(path=repofilename, url=manual_url) - ) - else: - r.raise_for_status() - else: - upload_url = 'https://www.virustotal.com/vtapi/v2/file/scan' - - if upload_url: - logging.info( - _('Uploading {apkfilename} to virustotal').format(apkfilename=repofilename) - ) - r = requests.post( - upload_url, - data=apikey, - headers=headers, - files={'file': (apkName, open(repofilename, 'rb'))}, - timeout=300, - ) - logging.debug( - _('If this upload fails, try manually uploading to {url}').format( - url=manual_url - ) - ) - r.raise_for_status() - response = r.json() - logging.info(response['verbose_msg'] + " " + response['permalink']) - - return outputfilename - - -def push_binary_transparency(git_repo_path, git_remote): - """Push the binary transparency git repo to the specifed remote. - - If the remote is a local directory, make sure it exists, and is a - git repo. This is used to move this git repo from an offline - machine onto a flash drive, then onto the online machine. Also, - this pulls because pushing to a non-bare git repo is error prone. - - This is also used in offline signing setups, where it then also - creates a "local copy dir" git repo that serves to shuttle the git - data from the offline machine to the online machine. In that - case, git_remote is a dir on the local file system, e.g. a thumb - drive. - - """ - logging.info(_('Pushing binary transparency log to {url}').format(url=git_remote)) - - if os.path.isdir(os.path.dirname(git_remote)): - # from offline machine to thumbdrive - remote_path = os.path.abspath(git_repo_path) - if not os.path.isdir(os.path.join(git_remote, '.git')): - os.makedirs(git_remote, exist_ok=True) - thumbdriverepo = git.Repo.init(git_remote, initial_branch=GIT_BRANCH) - local = thumbdriverepo.create_remote('local', remote_path) - else: - thumbdriverepo = git.Repo(git_remote) - local = git.remote.Remote(thumbdriverepo, 'local') - if local in thumbdriverepo.remotes: - local = thumbdriverepo.remote('local') - if 'set_url' in dir(local): # force remote URL if using GitPython 2.x - local.set_url(remote_path) - else: - local = thumbdriverepo.create_remote('local', remote_path) - local.pull(GIT_BRANCH) - else: - # from online machine to remote on a server on the internet - gitrepo = git.Repo(git_repo_path) - origin = git.remote.Remote(gitrepo, 'origin') - if origin in gitrepo.remotes: - origin = gitrepo.remote('origin') - if 'set_url' in dir(origin): # added in GitPython 2.x - origin.set_url(git_remote) - else: - origin = gitrepo.create_remote('origin', git_remote) - for _i in range(3): - try: - origin.push(GIT_BRANCH) - except git.GitCommandError as e: - logging.error(e) - continue - break - else: - raise FDroidException(_("Pushing to remote server failed!")) - - -def find_release_infos(index_v2_path, repo_dir, package_names): - """Find files, texts, etc. for uploading to a release page in index-v2.json. - - This function parses index-v2.json for file-paths elegible for deployment - to release pages. (e.g. GitHub releases) It also groups these files by - packageName and versionName. e.g. to get a list of files for all specific - release of fdroid client you may call: - - find_binary_release_infos()['org.fdroid.fdroid']['0.19.2'] - - All paths in the returned data-structure are of type pathlib.Path. - """ - release_infos = {} - with open(index_v2_path, 'r') as f: - idx = json.load(f) - for package_name in package_names: - package = idx.get('packages', {}).get(package_name, {}) - for version in package.get('versions', {}).values(): - if package_name not in release_infos: - release_infos[package_name] = {} - version_name = version['manifest']['versionName'] - version_path = repo_dir / version['file']['name'].lstrip("/") - files = [version_path] - asc_path = pathlib.Path(str(version_path) + '.asc') - if asc_path.is_file(): - files.append(asc_path) - sig_path = pathlib.Path(str(version_path) + '.sig') - if sig_path.is_file(): - files.append(sig_path) - release_infos[package_name][version_name] = { - 'files': files, - 'whatsNew': version.get('whatsNew', {}).get("en-US"), - 'hasReleaseChannels': len(version.get('releaseChannels', [])) > 0, - } - return release_infos - - -def upload_to_github_releases(repo_section, gh_config, global_gh_token): - repo_dir = pathlib.Path(repo_section) - index_v2_path = repo_dir / 'index-v2.json' - if not index_v2_path.is_file(): - logging.warning( - _( - "Error deploying 'github_releases', {} not present. (You might " - "need to run `fdroid update` first.)" - ).format(index_v2_path) - ) - return - - package_names = [] - for repo_conf in gh_config: - for package_name in repo_conf.get('packageNames', []): - package_names.append(package_name) - - release_infos = fdroidserver.deploy.find_release_infos( - index_v2_path, repo_dir, package_names - ) - - for repo_conf in gh_config: - upload_to_github_releases_repo(repo_conf, release_infos, global_gh_token) - - -def upload_to_github_releases_repo(repo_conf, release_infos, global_gh_token): - projectUrl = repo_conf.get("projectUrl") - if not projectUrl: - logging.warning( - _( - "One of the 'github_releases' config items is missing the " - "'projectUrl' value. skipping ..." - ) - ) - return - token = repo_conf.get("token") or global_gh_token - if not token: - logging.warning( - _( - "One of the 'github_releases' config items is missing the " - "'token' value. skipping ..." - ) - ) - return - conf_package_names = repo_conf.get("packageNames", []) - if type(conf_package_names) == str: - conf_package_names = [conf_package_names] - if not conf_package_names: - logging.warning( - _( - "One of the 'github_releases' config items is missing the " - "'packageNames' value. skipping ..." - ) - ) - return - - # lookup all versionNames (git tags) for all packages available in the - # local fdroid repo - all_local_versions = set() - for package_name in conf_package_names: - for version in release_infos.get(package_name, {}).keys(): - all_local_versions.add(version) - - gh = fdroidserver.github.GithubApi(token, projectUrl) - unreleased_tags = gh.list_unreleased_tags() - - for version in all_local_versions: - if version in unreleased_tags: - # Making sure we're not uploading this version when releaseChannels - # is set. (releaseChannels usually mean it's e.g. an alpha or beta - # version) - if ( - not release_infos.get(conf_package_names[0], {}) - .get(version, {}) - .get('hasReleaseChannels') - ): - # collect files associated with this github release - files = [] - for package in conf_package_names: - files.extend( - release_infos.get(package, {}).get(version, {}).get('files', []) - ) - # always use the whatsNew text from the first app listed in - # config.yml github_releases.packageNames - text = ( - release_infos.get(conf_package_names[0], {}) - .get(version, {}) - .get('whatsNew') - or '' - ) - if 'release_notes_prepend' in repo_conf: - text = repo_conf['release_notes_prepend'] + "\n\n" + text - # create new release on github and upload all associated files - gh.create_release(version, files, text) - - -def main(): - global config - - parser = ArgumentParser() - common.setup_global_opts(parser) - parser.add_argument( - "-i", - "--identity-file", - default=None, - help=_("Specify an identity file to provide to SSH for rsyncing"), - ) - parser.add_argument( - "--local-copy-dir", - default=None, - help=_("Specify a local folder to sync the repo to"), - ) - parser.add_argument( - "--no-checksum", - action="store_true", - default=False, - help=_("Don't use rsync checksums"), - ) - parser.add_argument( - "--no-keep-git-mirror-archive", - action="store_true", - default=False, - help=_("If a git mirror gets to big, allow the archive to be deleted"), - ) - options = common.parse_args(parser) - config = common.read_config() - - if config.get('nonstandardwebroot') is True: - standardwebroot = False - else: - standardwebroot = True - - if options.local_copy_dir is not None: - local_copy_dir = options.local_copy_dir - elif config.get('local_copy_dir'): - local_copy_dir = config['local_copy_dir'] - else: - local_copy_dir = None - if local_copy_dir is not None: - fdroiddir = local_copy_dir.rstrip('/') - if os.path.exists(fdroiddir) and not os.path.isdir(fdroiddir): - logging.error(_('local_copy_dir must be directory, not a file!')) - sys.exit(1) - if not os.path.exists(os.path.dirname(fdroiddir)): - logging.error( - _('The root dir for local_copy_dir "{path}" does not exist!').format( - path=os.path.dirname(fdroiddir) - ) - ) - sys.exit(1) - if not os.path.isabs(fdroiddir): - logging.error(_('local_copy_dir must be an absolute path!')) - sys.exit(1) - repobase = os.path.basename(fdroiddir) - if standardwebroot and repobase != 'fdroid': - logging.error( - _( - 'local_copy_dir does not end with "fdroid", ' - + 'perhaps you meant: "{path}"' - ).format(path=fdroiddir + '/fdroid') - ) - sys.exit(1) - if local_copy_dir[-1] != '/': - local_copy_dir += '/' - local_copy_dir = local_copy_dir.replace('//', '/') - if not os.path.exists(fdroiddir): - os.mkdir(fdroiddir) - - if ( - not config.get('awsbucket') - and not config.get('serverwebroot') - and not config.get('servergitmirrors') - and not config.get('androidobservatory') - and not config.get('binary_transparency_remote') - and not config.get('virustotal_apikey') - and not config.get('github_releases') - and local_copy_dir is None - ): - logging.warning( - _('No option set! Edit your config.yml to set at least one of these:') - + '\nserverwebroot, servergitmirrors, local_copy_dir, awsbucket, ' - + 'virustotal_apikey, androidobservatory, github_releases ' - + 'or binary_transparency_remote' - ) - sys.exit(1) - - repo_sections = ['repo'] - if config['archive_older'] != 0: - repo_sections.append('archive') - if not os.path.exists('archive'): - os.mkdir('archive') - if config['per_app_repos']: - repo_sections += common.get_per_app_repos() - - if os.path.isdir('unsigned') or ( - local_copy_dir is not None - and os.path.isdir(os.path.join(local_copy_dir, 'unsigned')) - ): - repo_sections.append('unsigned') - - for repo_section in repo_sections: - if local_copy_dir is not None: - if config['sync_from_local_copy_dir']: - sync_from_localcopy(repo_section, local_copy_dir) - else: - update_localcopy(repo_section, local_copy_dir) - if config.get('serverwebroot'): - update_serverwebroots( - config['serverwebroot'], repo_section, standardwebroot - ) - if config.get('servergitmirrors'): - # update_servergitmirrors will take care of multiple mirrors so don't need a foreach - update_servergitmirrors(config['servergitmirrors'], repo_section) - if config.get('awsbucket'): - awsbucket = config['awsbucket'] - index_only = config.get('awsbucket_index_only') - update_remote_storage_with_rclone( - repo_section, - awsbucket, - index_only, - options.verbose, - options.quiet, - not options.no_checksum, - ) - if config.get('androidobservatory'): - upload_to_android_observatory(repo_section) - if config.get('virustotal_apikey'): - upload_to_virustotal(repo_section, config.get('virustotal_apikey')) - if config.get('github_releases'): - upload_to_github_releases( - repo_section, config.get('github_releases'), config.get('github_token') - ) - - binary_transparency_remote = config.get('binary_transparency_remote') - if binary_transparency_remote: - push_binary_transparency(BINARY_TRANSPARENCY_DIR, binary_transparency_remote) - - common.write_status_json(common.setup_status_output(start_timestamp)) - sys.exit(0) - - -if __name__ == "__main__": - main() diff --git a/fdroidserver/dscanner.py b/fdroidserver/dscanner.py new file mode 100644 index 00000000..49362e5f --- /dev/null +++ b/fdroidserver/dscanner.py @@ -0,0 +1,484 @@ +#!/usr/bin/env python3 +# +# dscanner.py - part of the FDroid server tools +# Copyright (C) 2016-2017 Shawn Gustaw +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import logging +import os +import json +import sys +from time import sleep +from argparse import ArgumentParser +from subprocess import CalledProcessError, check_output + +from . import _ +from . import common +from . import metadata + +try: + from docker import Client +except ImportError: + logging.error(("Docker client not installed." + "Install it using pip install docker-py")) + +config = None +options = None + + +class DockerConfig: + ALIAS = "dscanner" + CONTAINER = "dscanner/fdroidserver" + EMULATOR = "android-19" + ARCH = "armeabi-v7a" + + +class DockerDriver(object): + """ + Handles all the interactions with the docker container the + Android emulator runs in. + """ + class Commands: + build = ['docker', 'build', '--no-cache=false', '--pull=true', + '--quiet=false', '--rm=true', '-t', + '{0}:latest'.format(DockerConfig.CONTAINER), '.'] + run = [ + 'docker', 'run', + '-e', '"EMULATOR={0}"'.format(DockerConfig.EMULATOR), + '-e', '"ARCH={0}"'.format(DockerConfig.ARCH), + '-d', '-P', '--name', + '{0}'.format(DockerConfig.ALIAS), '--log-driver=json-file', + DockerConfig.CONTAINER] + start = ['docker', 'start', '{0}'.format(DockerConfig.ALIAS)] + inspect = ['docker', 'inspect', '{0}'.format(DockerConfig.ALIAS)] + pm_list = 'adb shell "pm list packages"' + install_drozer = "docker exec {0} python /home/drozer/install_agent.py" + run_drozer = 'python /home/drozer/drozer.py {0}' + copy_to_container = 'docker cp "{0}" {1}:{2}' + copy_from_container = 'docker cp {0}:{1} "{2}"' + + def __init__(self, init_only=False, fresh_start=False, clean_only=False): + self.container_id = None + self.ip_address = None + + self.cli = Client(base_url='unix://var/run/docker.sock') + + if fresh_start or clean_only: + self.clean() + + if clean_only: + logging.info("Cleaned containers and quitting.") + exit(0) + + self.init_docker() + + if init_only: + logging.info("Initialized and quitting.") + exit(0) + + def _copy_to_container(self, src_path, dest_path): + """ + Copies a file (presumed to be an apk) from src_path + to home directory on container. + """ + path = '/home/drozer/{path}.apk'.format(path=dest_path) + command = self.Commands.copy_to_container.format(src_path, + self.container_id, + path) + + try: + check_output(command, shell=True) + except CalledProcessError as e: + logging.error(('Command "{command}" failed with ' + 'error code {code}'.format(command=command, + code=e.returncode))) + raise + + def _copy_from_container(self, src_path, dest_path): + """ + Copies a file from src_path on the container to + dest_path on the host machine. + """ + command = self.Commands.copy_from_container.format(self.container_id, + src_path, + dest_path) + try: + check_output(command, shell=True) + except CalledProcessError as e: + logging.error(('Command "{command}" failed with ' + 'error code {code}'.format(command=command, + code=e.returncode))) + raise + + logging.info("Log stored at {path}".format(path=dest_path)) + + def _adb_install_apk(self, apk_path): + """ + Installs an apk on the device running in the container + using adb. + """ + logging.info("Attempting to install an apk.") + exec_id = self.cli.exec_create( + self.container_id, 'adb install {0}' + .format(apk_path) + )['Id'] + output = self.cli.exec_start(exec_id).decode('utf-8') + + if "INSTALL_PARSE_FAILED_NO_CERTIFICATES" in output: + raise Exception('Install parse failed, no certificates') + elif "INSTALL_FAILED_ALREADY_EXISTS" in output: + logging.info("APK already installed. Skipping.") + elif "Success" not in output: + logging.error("APK didn't install properly") + return False + return True + + def _adb_uninstall_apk(self, app_id): + """ + Uninstalls an application from the device running in the container + via its app_id. + """ + logging.info( + "Uninstalling {app_id} from the emulator." + .format(app_id=app_id) + ) + exec_id = self.cli.exec_create( + self.container_id, + 'adb uninstall {0}'.format(app_id) + )['Id'] + output = self.cli.exec_start(exec_id).decode('utf-8') + + if 'Success' in output: + logging.info("Successfully uninstalled.") + + return True + + def _verify_apk_install(self, app_id): + """ + Checks that the app_id is installed on the device running in the + container. + """ + logging.info( + "Verifying {app} is installed on the device." + .format(app=app_id) + ) + exec_id = self.cli.exec_create( + self.container_id, self.Commands.pm_list + )['Id'] + output = self.cli.exec_start(exec_id).decode('utf-8') + + if ("Could not access the Package Manager" in output + or "device offline" in output): + logging.info("Device or package manager isn't up") + + if app_id.split('_')[0] in output: # TODO: this is a temporary fix + logging.info("{app} is installed.".format(app=app_id)) + return True + + logging.error("APK not found in packages list on emulator.") + + def _delete_file(self, path): + """ + Deletes file off the container to preserve space if scanning many apps + """ + command = "rm {path}".format(path=path) + exec_id = self.cli.exec_create(self.container_id, command)['Id'] + logging.info("Deleting {path} on the container.".format(path=path)) + self.cli.exec_start(exec_id) + + def _install_apk(self, apk_path, app_id): + """ + Installs apk found at apk_path on the emulator. Will then + verify it installed properly by looking up its app_id in + the package manager. + """ + if not all([self.container_id, self.ip_address]): + # TODO: maybe have this fail nicely + raise Exception("Went to install apk and couldn't find container") + + path = "/home/drozer/{app_id}.apk".format(app_id=app_id) + self._copy_to_container(apk_path, app_id) + self._adb_install_apk(path) + self._verify_apk_install(app_id) + self._delete_file(path) + + def _install_drozer(self): + """ + Performs all the initialization of drozer within the emulator. + """ + logging.info("Attempting to install com.mwr.dz on the emulator") + logging.info("This could take a while so be patient...") + logging.info(("We need to wait for the device to boot AND" + " the package manager to come online.")) + command = self.Commands.install_drozer.format(self.container_id) + try: + output = check_output(command, + shell=True).decode('utf-8') + except CalledProcessError as e: + logging.error(('Command "{command}" failed with ' + 'error code {code}'.format(command=command, + code=e.returncode))) + raise + + if 'Installed ok' in output: + return True + + def _run_drozer_scan(self, app): + """ + Runs the drozer agent which connects to the app running + on the emulator. + """ + logging.info("Running the drozer agent") + exec_id = self.cli.exec_create( + self.container_id, + self.Commands.run_drozer.format(app) + )['Id'] + self.cli.exec_start(exec_id) + + def _container_is_running(self): + """ + Checks whether the emulator container is running. + """ + for container in self.cli.containers(): + if DockerConfig.ALIAS in container['Image']: + return True + + def _docker_image_exists(self): + """ + Check whether the docker image exists already. + If this returns false we'll need to build the image + from the DockerFile. + """ + for image in self.cli.images(): + for tag in image['RepoTags']: + if DockerConfig.ALIAS in tag: + return True + + _image_queue = {} + + def _build_docker_image(self): + """ + Builds the docker container so we can run the android emulator + inside it. + """ + logging.info("Pulling the container from docker hub") + logging.info("Image is roughly 5 GB so be patient") + + logging.info("(Progress output is slow and requires a tty.)") + # we pause briefly to narrow race condition windows of opportunity + sleep(1) + + is_a_tty = os.isatty(sys.stdout.fileno()) + + for output in self.cli.pull( + DockerConfig.CONTAINER, + stream=True, + tag="latest"): + if not is_a_tty: + # run silent, run quick + continue + try: + p = json.loads(output.decode('utf-8')) + p_id = p['id'] + self._image_queue[p_id] = p + t, c, j = 1, 1, 0 + for k in sorted(self._image_queue): + j += 1 + v = self._image_queue[k] + vd = v['progressDetail'] + t += vd['total'] + c += vd['current'] + msg = "\rDownloading: {0}/{1} {2}% [{3} jobs]" + msg = msg.format(c, t, int(c / t * 100), j) + sys.stdout.write(msg) + sys.stdout.flush() + except Exception: + pass + print("\nDONE!\n") + + def _verify_apk_exists(self, full_apk_path): + """ + Verifies that the apk path we have is actually a file. + """ + return os.path.isfile(full_apk_path) + + def init_docker(self): + """ + Perform all the initialization required before a drozer scan. + 1. build the image + 2. run the container + 3. install drozer and enable the service within the app + """ + built = self._docker_image_exists() + + if not built: + self._build_docker_image() + + running = self._container_is_running() + + if not running: + logging.info('Trying to run container...') + try: + check_output(self.Commands.run) + except CalledProcessError as e: + logging.error(( + 'Command "{command}" failed with error code {code}' + .format(command=self.Commands.run, code=e.returncode) + )) + running = self._container_is_running() + + if not running: + logging.info('Trying to start container...') + try: + check_output(self.Commands.start) + except CalledProcessError as e: + logging.error(( + 'Command "{command}" failed with error code {code}' + .format(command=self.Commands.run, code=e.returncode) + )) + running = self._container_is_running() + + if not running: + raise Exception("Running container not found, critical error.") + + containers = self.cli.containers() + + for container in containers: + if DockerConfig.ALIAS in container['Image']: + self.container_id = container['Id'] + n = container['NetworkSettings']['Networks'] + self.ip_address = n['bridge']['IPAddress'] + break + + if not self.container_id or not self.ip_address: + logging.error("No ip address or container id found.") + exit(1) + + if self._verify_apk_install('com.mwr.dz'): + return + + self._install_drozer() + + def clean(self): + """ + Clean up all the containers made by this script. + Should be run after the drozer scan completes. + """ + for container in self.cli.containers(): + if DockerConfig.ALIAS in container['Image']: + logging.info("Removing container {0}".format(container['Id'])) + self.cli.remove_container(container['Id'], force=True) + + def perform_drozer_scan(self, apk_path, app_id): + """ + Entrypoint for scanning an android app. Performs the following steps: + 1. installs an apk on the device + 2. runs a drozer scan + 3. copies the report off the container + 4. uninstalls the apk to save space on the device + """ + self._install_apk(apk_path, app_id) + logging.info("Running the drozer scan.") + self._run_drozer_scan(app_id) + logging.info("Scan finished. Moving the report off the container") + dest = apk_path + '.drozer' + self._copy_from_container('/tmp/drozer_report.log', dest) + self._adb_uninstall_apk(app_id) + + +def main(): + global config, options + + # Parse command line... + parser = ArgumentParser( + usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]" + ) + common.setup_global_opts(parser) + + parser.add_argument( + "app_id", nargs='*', + help=_("applicationId with optional versionCode in the form APPID[:VERCODE]")) + parser.add_argument( + "-l", "--latest", action="store_true", default=False, + help=_("Scan only the latest version of each package")) + parser.add_argument( + "--clean-after", default=False, action='store_true', + help=_("Clean after all scans have finished")) + parser.add_argument( + "--clean-before", default=False, action='store_true', + help=_("Clean before the scans start and rebuild the container")) + parser.add_argument( + "--clean-only", default=False, action='store_true', + help=_("Clean up all containers and then exit")) + parser.add_argument( + "--init-only", default=False, action='store_true', + help=_("Prepare Drozer to run a scan")) + parser.add_argument( + "--repo-path", default="repo", action="store", + help=_("Override path for repo APKs (default: ./repo)")) + + options = parser.parse_args() + config = common.read_config(options) + + if not os.path.isdir(options.repo_path): + sys.stderr.write("repo-path not found: \"" + options.repo_path + "\"") + exit(1) + + # Read all app and srclib metadata + allapps = metadata.read_metadata() + apps = common.read_app_args(options.app_id, allapps, True) + + docker = DockerDriver( + init_only=options.init_only, + fresh_start=options.clean_before, + clean_only=options.clean_only + ) + + if options.clean_before: + docker.clean() + + if options.clean_only: + exit(0) + + for app_id, app in apps.items(): + vercode = 0 + if ':' in app_id: + vercode = app_id.split(':')[1] + for build in reversed(app.builds): + if build.disable: + continue + if options.latest or vercode == 0 or build.versionCode == vercode: + app.builds = [build] + break + continue + continue + + for app_id, app in apps.items(): + for build in app.builds: + apks = [] + for f in os.listdir(options.repo_path): + n = common.get_release_filename(app, build) + if f == n: + apks.append(f) + for apk in sorted(apks): + apk_path = os.path.join(options.repo_path, apk) + docker.perform_drozer_scan(apk_path, app.id) + + if options.clean_after: + docker.clean() + + +if __name__ == "__main__": + main() diff --git a/fdroidserver/exception.py b/fdroidserver/exception.py index 682ccef7..f9f876ce 100644 --- a/fdroidserver/exception.py +++ b/fdroidserver/exception.py @@ -1,6 +1,6 @@ class FDroidException(Exception): + def __init__(self, value=None, detail=None): - super().__init__() self.value = value self.detail = detail @@ -9,22 +9,26 @@ class FDroidException(Exception): return self.detail return '[...]\n' + self.detail[-16000:] + def get_wikitext(self): + ret = repr(self.value) + "\n" + if self.detail: + ret += "=detail=\n" + ret += "
\n" + self.shortened_detail() + "
\n" + return ret + def __str__(self): if self.value is None: ret = __name__ else: ret = str(self.value) if self.detail: - ret += ( - "\n==== detail begin ====\n%s\n==== detail end ====" - % ''.join(self.detail).strip() - ) + ret += "\n==== detail begin ====\n%s\n==== detail end ====" % ''.join(self.detail).strip() return ret class MetaDataException(Exception): + def __init__(self, value): - super().__init__() self.value = value def __str__(self): @@ -35,10 +39,6 @@ class VCSException(FDroidException): pass -class NoVersionCodeException(FDroidException): - pass - - class NoSubmodulesException(VCSException): pass @@ -49,10 +49,3 @@ class BuildException(FDroidException): class VerificationException(FDroidException): pass - - -class ConfigurationException(FDroidException): - def __init__(self, value=None, detail=None): - super().__init__() - self.value = value - self.detail = detail diff --git a/fdroidserver/github.py b/fdroidserver/github.py deleted file mode 100644 index 34a3ee53..00000000 --- a/fdroidserver/github.py +++ /dev/null @@ -1,178 +0,0 @@ -#!/usr/bin/env python3 -# -# github.py - part of the FDroid server tools -# Copyright (C) 2024, Michael Pöhn, michael@poehn.at -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -import json -import pathlib -import urllib.parse -import urllib.request - - -class GithubApi: - """Wrapper for some select calls to GitHub Json/REST API. - - This class wraps some calls to api.github.com. This is not intended to be a - general API wrapper. Instead it's purpose is to return pre-filtered and - transformed data that's playing well with other fdroidserver functions. - - With the GitHub API, the token is optional, but it has pretty - severe rate limiting. - - """ - - def __init__(self, api_token, repo_path): - self._api_token = api_token - if repo_path.startswith("https://github.com/"): - self._repo_path = repo_path[19:] - else: - self._repo_path = repo_path - - def _req(self, url, data=None): - h = { - "Accept": "application/vnd.github+json", - "X-GitHub-Api-Version": "2022-11-28", - } - if self._api_token: - h["Authorization"] = f"Bearer {self._api_token}" - return urllib.request.Request( - url, - headers=h, - data=data, - ) - - def list_released_tags(self): - """List of all tags that are associated with a release for this repo on GitHub.""" - names = [] - req = self._req(f"https://api.github.com/repos/{self._repo_path}/releases") - with urllib.request.urlopen(req) as resp: # nosec CWE-22 disable bandit warning - releases = json.load(resp) - for release in releases: - names.append(release['tag_name']) - return names - - def list_unreleased_tags(self): - all_tags = self.list_all_tags() - released_tags = self.list_released_tags() - return [x for x in all_tags if x not in released_tags] - - def get_latest_apk(self): - req = self._req( - f"https://api.github.com/repos/{self._repo_path}/releases/latest" - ) - with urllib.request.urlopen(req) as resp: # nosec CWE-22 disable bandit warning - assets = json.load(resp)['assets'] - for asset in assets: - url = asset.get('browser_download_url') - if url and url.endswith('.apk'): - return url - - def tag_exists(self, tag): - """ - Check if git tag is present on github. - - https://docs.github.com/en/rest/git/refs?apiVersion=2022-11-28#list-matching-references--fine-grained-access-tokens - """ - req = self._req( - f"https://api.github.com/repos/{self._repo_path}/git/matching-refs/tags/{tag}" - ) - with urllib.request.urlopen(req) as resp: # nosec CWE-22 disable bandit warning - rd = json.load(resp) - return len(rd) == 1 and rd[0].get("ref", False) == f"refs/tags/{tag}" - return False - - def list_all_tags(self): - """Get list of all tags for this repo on GitHub.""" - tags = [] - req = self._req( - f"https://api.github.com/repos/{self._repo_path}/git/matching-refs/tags/" - ) - with urllib.request.urlopen(req) as resp: # nosec CWE-22 disable bandit warning - refs = json.load(resp) - for ref in refs: - r = ref.get('ref', '') - if r.startswith('refs/tags/'): - tags.append(r[10:]) - return tags - - def create_release(self, tag, files, body=''): - """ - Create a new release on github. - - also see: https://docs.github.com/en/rest/releases/releases?apiVersion=2022-11-28#create-a-release - - :returns: True if release was created, False if release already exists - :raises: urllib exceptions in case of network or api errors, also - raises an exception when the tag doesn't exists. - """ - # Querying github to create a new release for a non-existent tag, will - # also create that tag on github. So we need an additional check to - # prevent this behavior. - if not self.tag_exists(tag): - raise Exception( - f"can't create github release for {self._repo_path} {tag}, tag doesn't exists" - ) - # create the relase on github - req = self._req( - f"https://api.github.com/repos/{self._repo_path}/releases", - data=json.dumps( - { - "tag_name": tag, - "body": body, - } - ).encode("utf-8"), - ) - try: - with urllib.request.urlopen( # nosec CWE-22 disable bandit warning - req - ) as resp: - release_id = json.load(resp)['id'] - except urllib.error.HTTPError as e: - if e.status == 422: - codes = [x['code'] for x in json.load(e).get('errors', [])] - if "already_exists" in codes: - return False - raise e - - # attach / upload all files for the relase - for file in files: - self._create_release_asset(release_id, file) - - return True - - def _create_release_asset(self, release_id, file): - """ - Attach a file to a release on GitHub. - - This uploads a file to github relases, it will be attached to the supplied release - - also see: https://docs.github.com/en/rest/releases/assets?apiVersion=2022-11-28#upload-a-release-asset - """ - file = pathlib.Path(file) - with open(file, 'rb') as f: - req = urllib.request.Request( - f"https://uploads.github.com/repos/{self._repo_path}/releases/{release_id}/assets?name={file.name}", - headers={ - "Accept": "application/vnd.github+json", - "Authorization": f"Bearer {self._api_token}", - "X-GitHub-Api-Version": "2022-11-28", - "Content-Type": "application/octet-stream", - }, - data=f.read(), - ) - with urllib.request.urlopen(req): # nosec CWE-22 disable bandit warning - return True - return False diff --git a/fdroidserver/gpgsign.py b/fdroidserver/gpgsign.py index 4341cb36..b942a21b 100644 --- a/fdroidserver/gpgsign.py +++ b/fdroidserver/gpgsign.py @@ -16,60 +16,53 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import glob -import logging import os -import time +import glob from argparse import ArgumentParser +import logging -from . import _, common +from . import _ +from . import common from .common import FDroidPopen from .exception import FDroidException config = None -start_timestamp = time.gmtime() - - -def status_update_json(signed): - """Output a JSON file with metadata about this run.""" - logging.debug(_('Outputting JSON')) - output = common.setup_status_output(start_timestamp) - if signed: - output['signed'] = signed - common.write_status_json(output) +options = None def main(): - global config + + global config, options # Parse command line... - parser = ArgumentParser() + parser = ArgumentParser(usage="%(prog)s [options]") common.setup_global_opts(parser) - common.parse_args(parser) + options = parser.parse_args() - config = common.read_config() + config = common.read_config(options) repodirs = ['repo'] if config['archive_older'] != 0: repodirs.append('archive') - signed = [] for output_dir in repodirs: if not os.path.isdir(output_dir): - raise FDroidException( - _("Missing output directory") + " '" + output_dir + "'" - ) + raise FDroidException(_("Missing output directory") + " '" + output_dir + "'") # Process any apks that are waiting to be signed... for f in sorted(glob.glob(os.path.join(output_dir, '*.*'))): - if not common.is_repo_file(f, for_gpg_signing=True): + if common.get_file_extension(f) == 'asc': + continue + if not common.is_repo_file(f): continue filename = os.path.basename(f) sigfilename = filename + ".asc" sigpath = os.path.join(output_dir, sigfilename) if not os.path.exists(sigpath): - gpgargs = ['gpg', '-a', '--output', sigpath, '--detach-sig'] + gpgargs = ['gpg', '-a', + '--output', sigpath, + '--detach-sig'] if 'gpghome' in config: gpgargs.extend(['--homedir', config['gpghome']]) if 'gpgkey' in config: @@ -79,9 +72,7 @@ def main(): if p.returncode != 0: raise FDroidException("Signing failed.") - signed.append(filename) logging.info('Signed ' + filename) - status_update_json(signed) if __name__ == "__main__": diff --git a/fdroidserver/import.py b/fdroidserver/import.py new file mode 100644 index 00000000..51713cee --- /dev/null +++ b/fdroidserver/import.py @@ -0,0 +1,322 @@ +#!/usr/bin/env python3 +# +# import.py - part of the FDroid server tools +# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com +# Copyright (C) 2013-2014 Daniel Martí +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import binascii +import os +import re +import shutil +import urllib.request +from argparse import ArgumentParser +from configparser import ConfigParser +import logging + +from . import _ +from . import common +from . import metadata +from .exception import FDroidException + + +SETTINGS_GRADLE = re.compile(r'''include\s+['"]:([^'"]*)['"]''') + + +# Get the repo type and address from the given web page. The page is scanned +# in a rather naive manner for 'git clone xxxx', 'hg clone xxxx', etc, and +# when one of these is found it's assumed that's the information we want. +# Returns repotype, address, or None, reason +def getrepofrompage(url): + + req = urllib.request.urlopen(url) + if req.getcode() != 200: + return (None, 'Unable to get ' + url + ' - return code ' + str(req.getcode())) + page = req.read().decode(req.headers.get_content_charset()) + + # Works for BitBucket + m = re.search('data-fetch-url="(.*)"', page) + if m is not None: + repo = m.group(1) + + if repo.endswith('.git'): + return ('git', repo) + + return ('hg', repo) + + # Works for BitBucket (obsolete) + index = page.find('hg clone') + if index != -1: + repotype = 'hg' + repo = page[index + 9:] + index = repo.find('<') + if index == -1: + return (None, _("Error while getting repo address")) + repo = repo[:index] + repo = repo.split('"')[0] + return (repotype, repo) + + # Works for BitBucket (obsolete) + index = page.find('git clone') + if index != -1: + repotype = 'git' + repo = page[index + 10:] + index = repo.find('<') + if index == -1: + return (None, _("Error while getting repo address")) + repo = repo[:index] + repo = repo.split('"')[0] + return (repotype, repo) + + return (None, _("No information found.") + page) + + +config = None +options = None + + +def get_metadata_from_url(app, url): + + tmp_dir = 'tmp' + if not os.path.isdir(tmp_dir): + logging.info(_("Creating temporary directory")) + os.makedirs(tmp_dir) + + # Figure out what kind of project it is... + projecttype = None + app.WebSite = url # by default, we might override it + if url.startswith('git://'): + projecttype = 'git' + repo = url + repotype = 'git' + app.SourceCode = "" + app.WebSite = "" + elif url.startswith('https://github.com'): + projecttype = 'github' + repo = url + repotype = 'git' + app.SourceCode = url + app.IssueTracker = url + '/issues' + app.WebSite = "" + elif url.startswith('https://gitlab.com/'): + projecttype = 'gitlab' + # git can be fussy with gitlab URLs unless they end in .git + if url.endswith('.git'): + url = url[:-4] + repo = url + '.git' + repotype = 'git' + app.WebSite = url + app.SourceCode = url + '/tree/HEAD' + app.IssueTracker = url + '/issues' + elif url.startswith('https://notabug.org/'): + projecttype = 'notabug' + if url.endswith('.git'): + url = url[:-4] + repo = url + '.git' + repotype = 'git' + app.SourceCode = url + app.IssueTracker = url + '/issues' + app.WebSite = "" + elif url.startswith('https://bitbucket.org/'): + if url.endswith('/'): + url = url[:-1] + projecttype = 'bitbucket' + app.SourceCode = url + '/src' + app.IssueTracker = url + '/issues' + # Figure out the repo type and adddress... + repotype, repo = getrepofrompage(url) + if not repotype: + raise FDroidException("Unable to determine vcs type. " + repo) + elif url.startswith('https://') and url.endswith('.git'): + projecttype = 'git' + repo = url + repotype = 'git' + app.SourceCode = "" + app.WebSite = "" + if not projecttype: + raise FDroidException("Unable to determine the project type. " + + "The URL you supplied was not in one of the supported formats. " + + "Please consult the manual for a list of supported formats, " + + "and supply one of those.") + + # Ensure we have a sensible-looking repo address at this point. If not, we + # might have got a page format we weren't expecting. (Note that we + # specifically don't want git@...) + if ((repotype != 'bzr' and (not repo.startswith('http://') + and not repo.startswith('https://') + and not repo.startswith('git://'))) + or ' ' in repo): + raise FDroidException("Repo address '{0}' does not seem to be valid".format(repo)) + + # Get a copy of the source so we can extract some info... + logging.info('Getting source from ' + repotype + ' repo at ' + repo) + build_dir = os.path.join(tmp_dir, 'importer') + if os.path.exists(build_dir): + shutil.rmtree(build_dir) + vcs = common.getvcs(repotype, repo, build_dir) + vcs.gotorevision(options.rev) + root_dir = get_subdir(build_dir) + + app.RepoType = repotype + app.Repo = repo + + return root_dir, build_dir + + +config = None +options = None + + +def get_subdir(build_dir): + if options.subdir: + return os.path.join(build_dir, options.subdir) + + settings_gradle = os.path.join(build_dir, 'settings.gradle') + if os.path.exists(settings_gradle): + with open(settings_gradle) as fp: + m = SETTINGS_GRADLE.search(fp.read()) + if m: + return os.path.join(build_dir, m.group(1)) + + return build_dir + + +def main(): + + global config, options + + # Parse command line... + parser = ArgumentParser() + common.setup_global_opts(parser) + parser.add_argument("-u", "--url", default=None, + help=_("Project URL to import from.")) + parser.add_argument("-s", "--subdir", default=None, + help=_("Path to main Android project subdirectory, if not in root.")) + parser.add_argument("-c", "--categories", default=None, + help=_("Comma separated list of categories.")) + parser.add_argument("-l", "--license", default=None, + help=_("Overall license of the project.")) + parser.add_argument("--rev", default=None, + help=_("Allows a different revision (or git branch) to be specified for the initial import")) + metadata.add_metadata_arguments(parser) + options = parser.parse_args() + metadata.warnings_action = options.W + + config = common.read_config(options) + + apps = metadata.read_metadata() + app = metadata.App() + app.UpdateCheckMode = "Tags" + + root_dir = None + build_dir = None + + local_metadata_files = common.get_local_metadata_files() + if local_metadata_files != []: + raise FDroidException(_("This repo already has local metadata: %s") % local_metadata_files[0]) + + build = metadata.Build() + if options.url is None and os.path.isdir('.git'): + app.AutoName = os.path.basename(os.getcwd()) + app.RepoType = 'git' + + root_dir = get_subdir(os.getcwd()) + if os.path.exists('build.gradle'): + build.gradle = ['yes'] + + import git + repo = git.repo.Repo(root_dir) # git repo + for remote in git.Remote.iter_items(repo): + if remote.name == 'origin': + url = repo.remotes.origin.url + if url.startswith('https://git'): # github, gitlab + app.SourceCode = url.rstrip('.git') + app.Repo = url + break + # repo.head.commit.binsha is a bytearray stored in a str + build.commit = binascii.hexlify(bytearray(repo.head.commit.binsha)) + write_local_file = True + elif options.url: + root_dir, build_dir = get_metadata_from_url(app, options.url) + build.commit = '?' + build.disable = 'Generated by import.py - check/set version fields and commit id' + write_local_file = False + else: + raise FDroidException("Specify project url.") + + # Extract some information... + paths = common.manifest_paths(root_dir, []) + if paths: + + versionName, versionCode, package = common.parse_androidmanifests(paths, app) + if not package: + raise FDroidException(_("Couldn't find package ID")) + if not versionName: + logging.warn(_("Couldn't find latest version name")) + if not versionCode: + logging.warn(_("Couldn't find latest version code")) + else: + spec = os.path.join(root_dir, 'buildozer.spec') + if os.path.exists(spec): + defaults = {'orientation': 'landscape', 'icon': '', + 'permissions': '', 'android.api': "18"} + bconfig = ConfigParser(defaults, allow_no_value=True) + bconfig.read(spec) + package = bconfig.get('app', 'package.domain') + '.' + bconfig.get('app', 'package.name') + versionName = bconfig.get('app', 'version') + versionCode = None + else: + raise FDroidException(_("No android or kivy project could be found. Specify --subdir?")) + + # Make sure it's actually new... + if package in apps: + raise FDroidException("Package " + package + " already exists") + + # Create a build line... + build.versionName = versionName or 'Unknown' + build.versionCode = versionCode or '0' # TODO heinous but this is still a str + if options.subdir: + build.subdir = options.subdir + if options.license: + app.License = options.license + if options.categories: + app.Categories = options.categories.split(',') + if os.path.exists(os.path.join(root_dir, 'jni')): + build.buildjni = ['yes'] + if os.path.exists(os.path.join(root_dir, 'build.gradle')): + build.gradle = ['yes'] + + metadata.post_metadata_parse(app) + + app.builds.append(build) + + if write_local_file: + metadata.write_metadata('.fdroid.yml', app) + else: + # Keep the repo directory to save bandwidth... + if not os.path.exists('build'): + os.mkdir('build') + if build_dir is not None: + shutil.move(build_dir, os.path.join('build', package)) + with open('build/.fdroidvcs-' + package, 'w') as f: + f.write(app.RepoType + ' ' + app.Repo) + + metadatapath = os.path.join('metadata', package + '.yml') + metadata.write_metadata(metadatapath, app) + logging.info("Wrote " + metadatapath) + + +if __name__ == "__main__": + main() diff --git a/fdroidserver/import_subcommand.py b/fdroidserver/import_subcommand.py deleted file mode 100644 index 017ebe54..00000000 --- a/fdroidserver/import_subcommand.py +++ /dev/null @@ -1,470 +0,0 @@ -#!/usr/bin/env python3 -"""Extract application metadata from a source repository.""" -# -# import_subcommand.py - part of the FDroid server tools -# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com -# Copyright (C) 2013-2014 Daniel Martí -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -import json -import logging -import os -import re -import shutil -import stat -import sys -import urllib -from argparse import ArgumentParser -from pathlib import Path -from typing import Optional - -import git -import yaml - -try: - from yaml import CSafeLoader as SafeLoader -except ImportError: - from yaml import SafeLoader - -from . import _, common, metadata -from .exception import FDroidException - -config = None - -SETTINGS_GRADLE_REGEX = re.compile(r'settings\.gradle(?:\.kts)?') -GRADLE_SUBPROJECT_REGEX = re.compile(r'''['"]:?([^'"]+)['"]''') -APPLICATION_ID_REGEX = re.compile(r'''\s*applicationId\s=?\s?['"].*['"]''') - - -def get_all_gradle_and_manifests(build_dir): - paths = [] - for root, dirs, files in os.walk(build_dir): - for f in sorted(files): - if f == 'AndroidManifest.xml' or f.endswith(('.gradle', '.gradle.kts')): - full = Path(root) / f - paths.append(full) - return paths - - -def get_gradle_subdir(build_dir, paths): - """Get the subdir where the gradle build is based.""" - first_gradle_dir = None - for path in paths: - if not first_gradle_dir: - first_gradle_dir = path.parent.relative_to(build_dir) - if path.exists() and SETTINGS_GRADLE_REGEX.match(path.name): - for m in GRADLE_SUBPROJECT_REGEX.finditer(path.read_text(encoding='utf-8')): - for f in (path.parent / m.group(1)).glob('build.gradle*'): - with f.open(encoding='utf-8') as fp: - for line in fp: - if common.ANDROID_PLUGIN_REGEX.match( - line - ) or APPLICATION_ID_REGEX.match(line): - return f.parent.relative_to(build_dir) - if first_gradle_dir and first_gradle_dir != Path('.'): - return first_gradle_dir - - -def handle_retree_error_on_windows(function, path, excinfo): - """Python can't remove a readonly file on Windows so chmod first.""" - if function in (os.unlink, os.rmdir, os.remove) and excinfo[0] == PermissionError: - os.chmod(path, stat.S_IWRITE) - function(path) - - -def clone_to_tmp_dir(app: metadata.App, rev=None) -> Path: - """Clone the source repository of an app to a temporary directory for further processing. - - Parameters - ---------- - app - The App instance to clone the source of. - - Returns - ------- - tmp_dir - The (temporary) directory the apps source has been cloned into. - - """ - tmp_dir = Path('tmp') - tmp_dir.mkdir(exist_ok=True) - - tmp_dir = tmp_dir / 'importer' - - if tmp_dir.exists(): - shutil.rmtree(str(tmp_dir), onerror=handle_retree_error_on_windows) - vcs = common.getvcs(app.RepoType, app.Repo, tmp_dir) - vcs.gotorevision(rev) - - return tmp_dir - - -def getrepofrompage(url: str) -> tuple[Optional[str], str]: - """Get the repo type and address from the given web page. - - The page is scanned in a rather naive manner for 'git clone xxxx', - 'hg clone xxxx', etc, and when one of these is found it's assumed - that's the information we want. Returns repotype, address, or - None, reason - - Parameters - ---------- - url - The url to look for repository information at. - - Returns - ------- - repotype_or_none - The found repository type or None if an error occured. - address_or_reason - The address to the found repository or the reason if an error occured. - - """ - if not url.startswith('http'): - return (None, _('{url} does not start with "http"!'.format(url=url))) - req = urllib.request.urlopen(url) # nosec B310 non-http URLs are filtered out - if req.getcode() != 200: - return (None, 'Unable to get ' + url + ' - return code ' + str(req.getcode())) - page = req.read().decode(req.headers.get_content_charset()) - - # Works for BitBucket - m = re.search('data-fetch-url="(.*)"', page) - if m is not None: - repo = m.group(1) - - if repo.endswith('.git'): - return ('git', repo) - - return ('hg', repo) - - # Works for BitBucket (obsolete) - index = page.find('hg clone') - if index != -1: - repotype = 'hg' - repo = page[index + 9 :] - index = repo.find('<') - if index == -1: - return (None, _("Error while getting repo address")) - repo = repo[:index] - repo = repo.split('"')[0] - return (repotype, repo) - - # Works for BitBucket (obsolete) - index = page.find('git clone') - if index != -1: - repotype = 'git' - repo = page[index + 10 :] - index = repo.find('<') - if index == -1: - return (None, _("Error while getting repo address")) - repo = repo[:index] - repo = repo.split('"')[0] - return (repotype, repo) - - return (None, _("No information found.") + page) - - -def get_app_from_url(url: str) -> metadata.App: - """Guess basic app metadata from the URL. - - The URL must include a network hostname, unless it is an lp:, - file:, or git/ssh URL. This throws ValueError on bad URLs to - match urlparse(). - - Parameters - ---------- - url - The URL to look to look for app metadata at. - - Returns - ------- - app - App instance with the found metadata. - - Raises - ------ - :exc:`~fdroidserver.exception.FDroidException` - If the VCS type could not be determined. - :exc:`ValueError` - If the URL is invalid. - - """ - parsed = urllib.parse.urlparse(url) - invalid_url = False - if not parsed.scheme or not parsed.path: - invalid_url = True - - app = metadata.App() - app.Repo = url - if url.startswith('git://') or url.startswith('git@'): - app.RepoType = 'git' - elif parsed.netloc == 'github.com': - app.RepoType = 'git' - app.SourceCode = url - app.IssueTracker = url + '/issues' - elif parsed.netloc in ('gitlab.com', 'framagit.org'): - # git can be fussy with gitlab URLs unless they end in .git - if url.endswith('.git'): - url = url[:-4] - app.Repo = url + '.git' - app.RepoType = 'git' - app.SourceCode = url - app.IssueTracker = url + '/issues' - elif parsed.netloc == 'notabug.org': - if url.endswith('.git'): - url = url[:-4] - app.Repo = url + '.git' - app.RepoType = 'git' - app.SourceCode = url - app.IssueTracker = url + '/issues' - elif parsed.netloc == 'bitbucket.org': - if url.endswith('/'): - url = url[:-1] - app.SourceCode = url + '/src' - app.IssueTracker = url + '/issues' - # Figure out the repo type and adddress... - app.RepoType, app.Repo = getrepofrompage(url) - elif parsed.netloc == 'codeberg.org': - app.RepoType = 'git' - app.SourceCode = url - app.IssueTracker = url + '/issues' - elif url.startswith('https://') and url.endswith('.git'): - app.RepoType = 'git' - - if not parsed.netloc and parsed.scheme in ('git', 'http', 'https', 'ssh'): - invalid_url = True - - if invalid_url: - raise ValueError(_('"{url}" is not a valid URL!'.format(url=url))) - - if not app.RepoType: - raise FDroidException("Unable to determine vcs type. " + app.Repo) - - return app - - -def main(): - """Extract app metadata and write it to a file. - - The behaviour of this function is influenced by the configuration file as - well as command line parameters. - - Raises - ------ - :exc:`~fdroidserver.exception.FDroidException` - If the repository already has local metadata, no URL is specified and - the current directory is not a Git repository, no application ID could - be found, no Gradle project could be found or there is already metadata - for the found application ID. - - """ - global config - - # Parse command line... - parser = ArgumentParser() - common.setup_global_opts(parser) - parser.add_argument("-u", "--url", help=_("Project URL to import from.")) - parser.add_argument( - "-s", - "--subdir", - help=_("Path to main Android project subdirectory, if not in root."), - ) - parser.add_argument( - "-c", - "--categories", - help=_("Comma separated list of categories."), - ) - parser.add_argument("-l", "--license", help=_("Overall license of the project.")) - parser.add_argument( - "--omit-disable", - action="store_true", - help=_("Do not add 'disable:' to the generated build entries"), - ) - parser.add_argument( - "--rev", - help=_( - "Allows a different revision (or git branch) to be specified for the initial import" - ), - ) - metadata.add_metadata_arguments(parser) - options = common.parse_args(parser) - metadata.warnings_action = options.W - - config = common.read_config() - - apps = metadata.read_metadata() - app = None - - tmp_importer_dir = None - - local_metadata_files = common.get_local_metadata_files() - if local_metadata_files: - raise FDroidException( - _("This repo already has local metadata: %s") % local_metadata_files[0] - ) - - build = metadata.Build() - app = metadata.App() - if options.url is None and Path('.git').is_dir(): - app.RepoType = 'git' - tmp_importer_dir = Path.cwd() - git_repo = git.Repo(tmp_importer_dir) - for remote in git.Remote.iter_items(git_repo): - if remote.name == 'origin': - url = git_repo.remotes.origin.url - app = get_app_from_url(url) - break - write_local_file = True - elif options.url: - app = get_app_from_url(options.url) - tmp_importer_dir = clone_to_tmp_dir(app, options.rev) - git_repo = git.Repo(tmp_importer_dir) - - if not options.omit_disable: - build.disable = ( - 'Generated by `fdroid import` - check version fields and commitid' - ) - write_local_file = False - else: - raise FDroidException("Specify project url.") - - app.AutoUpdateMode = 'Version' - app.UpdateCheckMode = 'Tags' - build.commit = common.get_head_commit_id(tmp_importer_dir) - - # Extract some information... - paths = get_all_gradle_and_manifests(tmp_importer_dir) - gradle_subdir = get_gradle_subdir(tmp_importer_dir, paths) - if paths: - versionName, versionCode, appid = common.parse_androidmanifests(paths, app) - if not appid: - raise FDroidException(_("Couldn't find Application ID")) - if not versionName: - logging.warning(_('Could not find latest versionName')) - if not versionCode: - logging.warning(_('Could not find latest versionCode')) - else: - raise FDroidException(_("No gradle project could be found. Specify --subdir?")) - - # Make sure it's actually new... - if appid in apps: - raise FDroidException(_('Package "{appid}" already exists').format(appid=appid)) - - # Create a build line... - build.versionName = versionName or 'Unknown' - app.CurrentVersion = build.versionName - build.versionCode = versionCode or 0 - app.CurrentVersionCode = build.versionCode - if options.subdir: - build.subdir = options.subdir - elif gradle_subdir: - build.subdir = gradle_subdir.as_posix() - # subdir might be None - subdir = Path(tmp_importer_dir / build.subdir) if build.subdir else tmp_importer_dir - - if options.license: - app.License = options.license - if options.categories: - app.Categories = options.categories.split(',') - if (subdir / 'jni').exists(): - build.buildjni = ['yes'] - if (subdir / 'build.gradle').exists() or (subdir / 'build.gradle.kts').exists(): - build.gradle = ['yes'] - - app.AutoName = common.fetch_real_name(subdir, build.gradle) - - package_json = tmp_importer_dir / 'package.json' # react-native - pubspec_yaml = tmp_importer_dir / 'pubspec.yaml' # flutter - if package_json.exists(): - build.sudo = [ - 'sysctl fs.inotify.max_user_watches=524288 || true', - 'apt-get update', - 'apt-get install -y npm', - ] - build.init = ['npm install --build-from-source'] - with package_json.open() as fp: - data = json.load(fp) - app.AutoName = app.AutoName or data.get('name') - app.License = data.get('license', app.License) - app.Description = data.get('description', app.Description) - app.WebSite = data.get('homepage', app.WebSite) - app_json = tmp_importer_dir / 'app.json' - build.scanignore = ['android/build.gradle'] - build.scandelete = ['node_modules'] - if app_json.exists(): - with app_json.open() as fp: - data = json.load(fp) - app.AutoName = app.AutoName or data.get('name') - if pubspec_yaml.exists(): - with pubspec_yaml.open() as fp: - data = yaml.load(fp, Loader=SafeLoader) - app.AutoName = app.AutoName or data.get('name') - app.License = data.get('license', app.License) - app.Description = data.get('description', app.Description) - app.UpdateCheckData = 'pubspec.yaml|version:\\s.+\\+(\\d+)|.|version:\\s(.+)\\+' - build.srclibs = ['flutter@stable'] - build.output = 'build/app/outputs/flutter-apk/app-release.apk' - build.subdir = None - build.gradle = None - build.prebuild = [ - 'export PUB_CACHE=$(pwd)/.pub-cache', - '$$flutter$$/bin/flutter config --no-analytics', - '$$flutter$$/bin/flutter packages pub get', - ] - build.scandelete = [ - '.pub-cache', - ] - build.build = [ - 'export PUB_CACHE=$(pwd)/.pub-cache', - '$$flutter$$/bin/flutter build apk', - ] - - git_modules = tmp_importer_dir / '.gitmodules' - if git_modules.exists(): - build.submodules = True - - metadata.post_parse_yaml_metadata(app) - - app['Builds'].append(build) - - if write_local_file: - metadata.write_metadata(Path('.fdroid.yml'), app) - else: - # Keep the repo directory to save bandwidth... - Path('build').mkdir(exist_ok=True) - build_dir = Path('build') / appid - if build_dir.exists(): - logging.warning( - _('{path} already exists, ignoring import results!').format( - path=build_dir - ) - ) - sys.exit(1) - elif tmp_importer_dir: - # For Windows: Close the repo or a git.exe instance holds handles to repo - try: - git_repo.close() - except AttributeError: # Debian/stretch's version does not have close() - pass - shutil.move(tmp_importer_dir, build_dir) - Path('build/.fdroidvcs-' + appid).write_text(app.RepoType + ' ' + app.Repo) - - metadatapath = Path('metadata') / (appid + '.yml') - metadata.write_metadata(metadatapath, app) - logging.info("Wrote " + str(metadatapath)) - - -if __name__ == "__main__": - main() diff --git a/fdroidserver/index.py b/fdroidserver/index.py index b63729e4..eb04d597 100644 --- a/fdroidserver/index.py +++ b/fdroidserver/index.py @@ -20,81 +20,54 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -"""Process the index files. - -This module is loaded by all fdroid subcommands since it is loaded in -fdroidserver/__init__.py. Any narrowly used dependencies should be -imported where they are used to limit dependencies for subcommands -like publish/signindex/gpgsign. This eliminates the need to have -these installed on the signing server. - -""" - -import calendar import collections -import hashlib +import copy import json import logging import os import re import shutil -import sys import tempfile import urllib.parse import zipfile +import calendar from binascii import hexlify, unhexlify from datetime import datetime, timezone -from pathlib import Path from xml.dom.minidom import Document -from fdroidserver._yaml import yaml -from fdroidserver.common import ( - ANTIFEATURES_CONFIG_NAME, - CATEGORIES_CONFIG_NAME, - CONFIG_CONFIG_NAME, - DEFAULT_LOCALE, - MIRRORS_CONFIG_NAME, - RELEASECHANNELS_CONFIG_NAME, - FDroidPopen, - FDroidPopenBytes, - load_publish_signer_fingerprints, -) -from fdroidserver.exception import FDroidException, VerificationException - -from . import _, common, metadata, signindex +from . import _ +from . import common +from . import metadata +from . import net +from . import signindex +from fdroidserver.common import FDroidPopen, FDroidPopenBytes, load_stats_fdroid_signing_key_fingerprints +from fdroidserver.exception import FDroidException, VerificationException, MetaDataException -def make(apps, apks, repodir, archive): +def make(apps, sortedids, apks, repodir, archive): """Generate the repo index files. This requires properly initialized options and config objects. - Parameters - ---------- - apps - OrderedDict of apps to go into the index, each app should have - at least one associated apk - apks - list of apks to go into the index - repodir - the repo directory - archive - True if this is the archive repo, False if it's the - main one. + :param apps: fully populated apps list + :param sortedids: app package IDs, sorted + :param apks: full populated apks list + :param repodir: the repo directory + :param archive: True if this is the archive repo, False if it's the + main one. """ from fdroidserver.update import METADATA_VERSION - if not hasattr(common.options, 'nosign') or not common.options.nosign: + def _resolve_description_link(appid): + if appid in apps: + return "fdroid.app:" + appid, apps[appid].Name + raise MetaDataException("Cannot resolve app id " + appid) + + if not common.options.nosign: common.assert_config_keystore(common.config) - # Historically the index has been sorted by App Name, so we enforce this ordering here - sortedids = sorted(apps, key=lambda appid: common.get_app_display_name(apps[appid]).upper()) - sortedapps = collections.OrderedDict() - for appid in sortedids: - sortedapps[appid] = apps[appid] - repodict = collections.OrderedDict() - repodict['timestamp'] = datetime.now(timezone.utc) + repodict['timestamp'] = datetime.utcnow().replace(tzinfo=timezone.utc) repodict['version'] = METADATA_VERSION if common.config['repo_maxage'] != 0: @@ -102,23 +75,51 @@ def make(apps, apks, repodir, archive): if archive: repodict['name'] = common.config['archive_name'] - repodict['icon'] = common.config.get('archive_icon', common.default_config['repo_icon']) + repodict['icon'] = os.path.basename(common.config['archive_icon']) + repodict['address'] = common.config['archive_url'] repodict['description'] = common.config['archive_description'] - archive_url = common.config.get('archive_url', common.config['repo_url'][:-4] + 'archive') - repodict['address'] = archive_url - if 'archive_web_base_url' in common.config: - repodict["webBaseUrl"] = common.config['archive_web_base_url'] - repo_section = os.path.basename(urllib.parse.urlparse(archive_url).path) + urlbasepath = os.path.basename(urllib.parse.urlparse(common.config['archive_url']).path) else: repodict['name'] = common.config['repo_name'] - repodict['icon'] = common.config.get('repo_icon', common.default_config['repo_icon']) + repodict['icon'] = os.path.basename(common.config['repo_icon']) repodict['address'] = common.config['repo_url'] - if 'repo_web_base_url' in common.config: - repodict["webBaseUrl"] = common.config['repo_web_base_url'] repodict['description'] = common.config['repo_description'] - repo_section = os.path.basename(urllib.parse.urlparse(common.config['repo_url']).path) + urlbasepath = os.path.basename(urllib.parse.urlparse(common.config['repo_url']).path) - add_mirrors_to_repodict(repo_section, repodict) + mirrorcheckfailed = False + mirrors = [] + for mirror in sorted(common.config.get('mirrors', [])): + base = os.path.basename(urllib.parse.urlparse(mirror).path.rstrip('/')) + if common.config.get('nonstandardwebroot') is not True and base != 'fdroid': + logging.error(_("mirror '%s' does not end with 'fdroid'!") % mirror) + mirrorcheckfailed = True + # must end with / or urljoin strips a whole path segment + if mirror.endswith('/'): + mirrors.append(urllib.parse.urljoin(mirror, urlbasepath)) + else: + mirrors.append(urllib.parse.urljoin(mirror + '/', urlbasepath)) + for mirror in common.config.get('servergitmirrors', []): + for url in get_mirror_service_urls(mirror): + mirrors.append(url + '/' + repodir) + if mirrorcheckfailed: + raise FDroidException(_("Malformed repository mirrors.")) + if mirrors: + repodict['mirrors'] = mirrors + + appsWithPackages = collections.OrderedDict() + for packageName in sortedids: + app = apps[packageName] + if app['Disabled']: + continue + + # only include apps with packages + for apk in apks: + if apk['packageName'] == packageName: + newapp = copy.copy(app) # update wiki needs unmodified description + newapp['Description'] = metadata.description_html(app['Description'], + _resolve_description_link) + appsWithPackages[packageName] = newapp + break requestsdict = collections.OrderedDict() for command in ('install', 'uninstall'): @@ -133,606 +134,22 @@ def make(apps, apks, repodir, archive): raise TypeError(_('only accepts strings, lists, and tuples')) requestsdict[command] = packageNames - signer_fingerprints = load_publish_signer_fingerprints() + fdroid_signing_key_fingerprints = load_stats_fdroid_signing_key_fingerprints() - make_v0(sortedapps, apks, repodir, repodict, requestsdict, signer_fingerprints) - make_v1(sortedapps, apks, repodir, repodict, requestsdict, signer_fingerprints) - make_v2( - sortedapps, apks, repodir, repodict, requestsdict, signer_fingerprints, archive - ) - make_website(sortedapps, repodir, repodict) - make_altstore( - sortedapps, - apks, - common.config, - repodir, - pretty=common.options.pretty, - ) + make_v0(appsWithPackages, apks, repodir, repodict, requestsdict, + fdroid_signing_key_fingerprints) + make_v1(appsWithPackages, apks, repodir, repodict, requestsdict, + fdroid_signing_key_fingerprints) -def _should_file_be_generated(path, magic_string): - if os.path.exists(path): - with open(path) as f: - # if the magic_string is not in the first line the file should be overwritten - if magic_string not in f.readline(): - return False - return True - - -def make_website(apps, repodir, repodict): - # do not change this string, as it will break updates for files with older versions of this string - autogenerate_comment = "auto-generated - fdroid index updates will overwrite this file" - - if not os.path.exists(repodir): - os.makedirs(repodir) - - html_name = 'index.html' - html_file = os.path.join(repodir, html_name) - - if _should_file_be_generated(html_file, autogenerate_comment): - import qrcode - - _ignored, repo_pubkey_fingerprint = extract_pubkey() - repo_pubkey_fingerprint_stripped = repo_pubkey_fingerprint.replace(" ", "") - link = repodict["address"] - link_fingerprinted = '{link}?fingerprint={fingerprint}'.format( - link=link, fingerprint=repo_pubkey_fingerprint_stripped - ) - qrcode.make(link_fingerprinted).save(os.path.join(repodir, "index.png")) - with open(html_file, 'w') as f: - name = repodict["name"] - description = repodict["description"] - icon = repodict["icon"] - f.write(""" - - - - - - - {name} - - - - - - - - - - - - -

- {name} -

-
-

- - - QR: test - - - {description} -
-
- Currently it serves - - {number_of_apps} - - apps. To add it to your F-Droid app, scan the QR code (click it to enlarge) or use this link: -

-

- - - {link} - - -

-

- If you would like to manually verify the fingerprint (SHA-256) of the repository signing key, here it is: -
- - {fingerprint} - -

-
- - -""".format(autogenerate_comment=autogenerate_comment, - description=description, - fingerprint=repo_pubkey_fingerprint, - icon=icon, - link=link, - link_fingerprinted=link_fingerprinted, - name=name, - number_of_apps=str(len(apps)))) - - css_file = os.path.join(repodir, "index.css") - if _should_file_be_generated(css_file, autogenerate_comment): - with open(css_file, "w") as f: - # this auto generated comment was not included via .format(), as python seems to have problems with css files in combination with .format() - f.write("""/* auto-generated - fdroid index updates will overwrite this file */ -BODY { - font-family : Arial, Helvetica, Sans-Serif; - color : #0000ee; - background-color : #ffffff; -} -p { - text-align : justify; -} -p.center { - text-align : center; -} -TD { - font-family : Arial, Helvetica, Sans-Serif; - color : #0000ee; -} -body,td { - font-size : 14px; -} -TH { - font-family : Arial, Helvetica, Sans-Serif; - color : #0000ee; - background-color : #F5EAD4; -} -a:link { - color : #bb0000; -} -a:visited { - color : #ff0000; -} -.zitat { - margin-left : 1cm; - margin-right : 1cm; - font-style : italic; -} -#intro { - border-spacing : 1em; - border : 1px solid gray; - border-radius : 0.5em; - box-shadow : 10px 10px 5px #888; - margin : 1.5em; - font-size : .9em; - width : 600px; - max-width : 90%; - display : table; - margin-left : auto; - margin-right : auto; - font-size : .8em; - color : #555555; -} -#intro > p { - margin-top : 0; -} -#intro p:last-child { - margin-bottom : 0; -} -.last { - border-bottom : 1px solid black; - padding-bottom : .5em; - text-align : center; -} -table { - border-collapse : collapse; -} -h2 { - text-align : center; -} -.perms { - font-family : monospace; - font-size : .8em; -} -.repoapplist { - display : table; - border-collapse : collapse; - margin-left : auto; - margin-right : auto; - width : 600px; - max-width : 90%; -} -.approw, appdetailrow { - display : table-row; -} -.appdetailrow { - display : flex; - padding : .5em; -} -.appiconbig, .appdetailblock, .appdetailcell { - display : table-cell -} -.appiconbig { - vertical-align : middle; - text-align : center; -} -.appdetailinner { - width : 100%; -} -.applinkcell { - text-align : center; - float : right; - width : 100%; - margin-bottom : .1em; -} -.paddedlink { - margin : 1em; -} -.approw { - border-spacing : 1em; - border : 1px solid gray; - border-radius : 0.5em; - padding : 0.5em; - margin : 1.5em; -} -.appdetailinner .appdetailrow:first-child { - background-color : #d5d5d5; -} -.appdetailinner .appdetailrow:first-child .appdetailcell { - min-width : 33%; - flex : 1 33%; - text-align : center; -} -.appdetailinner .appdetailrow:first-child .appdetailcell:first-child { - text-align : left; -} -.appdetailinner .appdetailrow:first-child .appdetailcell:last-child { - float : none; - text-align : right; -} -.minor-details { - font-size : .8em; - color : #555555; -} -.boldname { - font-weight : bold; -} -#appcount { - text-align : center; - margin-bottom : .5em; -} -kbd { - padding : 0.1em 0.6em; - border : 1px solid #CCC; - background-color : #F7F7F7; - color : #333; - box-shadow : 0px 1px 0px rgba(0, 0, 0, 0.2), 0px 0px 0px 2px #FFF inset; - border-radius : 3px; - display : inline-block; - margin : 0px 0.1em; - text-shadow : 0px 1px 0px #FFF; - white-space : nowrap; -} -div.filterline, div.repoline { - display : table; - margin-left : auto; - margin-right : auto; - margin-bottom : 1em; - vertical-align : middle; - display : table; - font-size : .8em; -} -.filterline form { - display : table-row; -} -.filterline .filtercell { - display : table-cell; - vertical-align : middle; -} -fieldset { - float : left; -} -fieldset select, fieldset input, #reposelect select, #reposelect input { - font-size : .9em; -} -.pager { - display : table; - margin-left : auto; - margin-right : auto; - width : 600px; - max-width : 90%; - padding-top : .6em; -} -/* should correspond to .repoapplist */ -.pagerrow { - display : table-row; -} -.pagercell { - display : table-cell; -} -.pagercell.left { - text-align : left; - padding-right : 1em; -} -.pagercell.middle { - text-align : center; - font-size : .9em; - color : #555; -} -.pagercell.right { - text-align : right; - padding-left : 1em; -} -.anti { - color : peru; -} -.antibold { - color : crimson; -} -#footer { - text-align : center; - margin-top : 1em; - font-size : 11px; - color : #555; -} -#footer img { - vertical-align : middle; -} -@media (max-width: 600px) { - .repoapplist { - display : block; - } - .appdetailinner, .appdetailrow { - display : block; - } - .appdetailcell { - display : block; - float : left; - line-height : 1.5em; - } -}""") - - -def dict_diff(source, target): - if not isinstance(target, dict) or not isinstance(source, dict): - return target - - result = {key: None for key in source if key not in target} - - for key, value in target.items(): - if key not in source: - result[key] = value - elif value != source[key]: - result[key] = dict_diff(source[key], value) - - return result - - -def convert_datetime(obj): - if isinstance(obj, datetime): - # Java prefers milliseconds - # we also need to account for time zone/daylight saving time - return int(calendar.timegm(obj.timetuple()) * 1000) - return obj - - -def package_metadata(app, repodir): - meta = {} - for element in ( - "added", - # "binaries", - "Categories", - "Changelog", - "IssueTracker", - "lastUpdated", - "License", - "SourceCode", - "Translation", - "WebSite", - "featureGraphic", - "promoGraphic", - "tvBanner", - "screenshots", - "AuthorEmail", - "AuthorName", - "AuthorPhone", - "AuthorWebSite", - "Bitcoin", - "Liberapay", - "Litecoin", - "OpenCollective", - ): - if element in app and app[element]: - element_new = element[:1].lower() + element[1:] - meta[element_new] = convert_datetime(app[element]) - - for element in ( - "Name", - "Summary", - "Description", - "video", - ): - element_new = element[:1].lower() + element[1:] - if element in app and app[element]: - meta[element_new] = {DEFAULT_LOCALE: convert_datetime(app[element])} - elif "localized" in app: - localized = {k: v[element_new] for k, v in app["localized"].items() if element_new in v} - if localized: - meta[element_new] = localized - - if "name" not in meta and app["AutoName"]: - meta["name"] = {DEFAULT_LOCALE: app["AutoName"]} - - # fdroidserver/metadata.py App default - if meta["license"] == "Unknown": - del meta["license"] - - if app["Donate"]: - meta["donate"] = [app["Donate"]] - - # TODO handle different resolutions - if app.get("icon"): - icon_path = os.path.join(repodir, "icons", app["icon"]) - meta["icon"] = {DEFAULT_LOCALE: common.file_entry(icon_path)} - - if "iconv2" in app: - meta["icon"] = app["iconv2"] - - return meta - - -def convert_version(version, app, repodir): - """Convert the internal representation of Builds: into index-v2 versions. - - The diff algorithm of index-v2 uses null/None to mean a field to - be removed, so this function handles any Nones that are in the - metadata file. - - """ - ver = {} - if "added" in version: - ver["added"] = convert_datetime(version["added"]) - else: - ver["added"] = 0 - - ver["file"] = { - "name": "/{}".format(version["apkName"]), - version["hashType"]: version["hash"], - "size": version["size"], - } - - ipfsCIDv1 = version.get("ipfsCIDv1") - if ipfsCIDv1: - ver["file"]["ipfsCIDv1"] = ipfsCIDv1 - - if "srcname" in version: - ver["src"] = common.file_entry( - os.path.join(repodir, version["srcname"]), - version["srcnameSha256"], - ) - - if "obbMainFile" in version: - ver["obbMainFile"] = common.file_entry( - os.path.join(repodir, version["obbMainFile"]), - version["obbMainFileSha256"], - ) - - if "obbPatchFile" in version: - ver["obbPatchFile"] = common.file_entry( - os.path.join(repodir, version["obbPatchFile"]), - version["obbPatchFileSha256"], - ) - - ver["manifest"] = manifest = {} - - for element in ( - "nativecode", - "versionName", - "maxSdkVersion", - ): - if element in version: - manifest[element] = version[element] - - if "versionCode" in version: - manifest["versionCode"] = version["versionCode"] - - if "features" in version and version["features"]: - manifest["features"] = features = [] - for feature in version["features"]: - # TODO get version from manifest, default (0) is omitted - # features.append({"name": feature, "version": 1}) - features.append({"name": feature}) - - if "minSdkVersion" in version: - manifest["usesSdk"] = {} - manifest["usesSdk"]["minSdkVersion"] = version["minSdkVersion"] - if "targetSdkVersion" in version: - manifest["usesSdk"]["targetSdkVersion"] = version["targetSdkVersion"] - else: - # https://developer.android.com/guide/topics/manifest/uses-sdk-element.html#target - manifest["usesSdk"]["targetSdkVersion"] = manifest["usesSdk"]["minSdkVersion"] - - if "signer" in version: - manifest["signer"] = {"sha256": [version["signer"]]} - - for element in ("uses-permission", "uses-permission-sdk-23"): - en = element.replace("uses-permission", "usesPermission").replace("-sdk-23", "Sdk23") - if element in version and version[element]: - manifest[en] = [] - for perm in version[element]: - if perm[1]: - manifest[en].append({"name": perm[0], "maxSdkVersion": perm[1]}) - else: - manifest[en].append({"name": perm[0]}) - - # index-v2 has only per-version antifeatures, not per package. - antiFeatures = app.get('AntiFeatures', {}).copy() - for name, descdict in version.get('antiFeatures', dict()).items(): - antiFeatures[name] = descdict - if antiFeatures: - ver['antiFeatures'] = { - k: dict(sorted(antiFeatures[k].items())) for k in sorted(antiFeatures) - } - - if "versionCode" in version: - if version["versionCode"] > app["CurrentVersionCode"]: - ver[RELEASECHANNELS_CONFIG_NAME] = ["Beta"] - - builds = app.get("Builds", []) - - if len(builds) > 0 and version["versionCode"] == builds[-1]["versionCode"]: - if "localized" in app: - localized = {k: v["whatsNew"] for k, v in app["localized"].items() if "whatsNew" in v} - if localized: - ver["whatsNew"] = localized - - for build in builds: - if build['versionCode'] == version['versionCode'] and "whatsNew" in build: - ver["whatsNew"] = build["whatsNew"] - break - - return ver - - -def v2_repo(repodict, repodir, archive): - repo = {} - - repo["name"] = {DEFAULT_LOCALE: repodict["name"]} - repo["description"] = {DEFAULT_LOCALE: repodict["description"]} - repo["icon"] = { - DEFAULT_LOCALE: common.file_entry("%s/icons/%s" % (repodir, repodict["icon"])) - } - - config = common.load_localized_config(CONFIG_CONFIG_NAME, repodir) - if config: - localized_config = config["archive" if archive else "repo"] - if "name" in localized_config: - repo["name"] = localized_config["name"] - if "description" in localized_config: - repo["description"] = localized_config["description"] - if "icon" in localized_config: - repo["icon"] = localized_config["icon"] - - repo["address"] = repodict["address"] - if "mirrors" in repodict: - repo["mirrors"] = repodict["mirrors"] - if "webBaseUrl" in repodict: - repo["webBaseUrl"] = repodict["webBaseUrl"] - - repo["timestamp"] = repodict["timestamp"] - - antiFeatures = common.load_localized_config(ANTIFEATURES_CONFIG_NAME, repodir) - if antiFeatures: - repo[ANTIFEATURES_CONFIG_NAME] = antiFeatures - - categories = common.load_localized_config(CATEGORIES_CONFIG_NAME, repodir) - if categories: - repo[CATEGORIES_CONFIG_NAME] = categories - - releaseChannels = common.load_localized_config(RELEASECHANNELS_CONFIG_NAME, repodir) - if releaseChannels: - repo[RELEASECHANNELS_CONFIG_NAME] = releaseChannels - - return repo - - -def make_v2(apps, packages, repodir, repodict, requestsdict, signer_fingerprints, archive): +def make_v1(apps, packages, repodir, repodict, requestsdict, fdroid_signing_key_fingerprints): def _index_encoder_default(obj): if isinstance(obj, set): return sorted(list(obj)) if isinstance(obj, datetime): # Java prefers milliseconds - # we also need to account for time zone/daylight saving time + # we also need to accound for time zone/daylight saving time return int(calendar.timegm(obj.timetuple()) * 1000) if isinstance(obj, dict): d = collections.OrderedDict() @@ -742,176 +159,25 @@ def make_v2(apps, packages, repodir, repodict, requestsdict, signer_fingerprints raise TypeError(repr(obj) + " is not JSON serializable") output = collections.OrderedDict() - output["repo"] = v2_repo(repodict, repodir, archive) - if requestsdict and (requestsdict["install"] or requestsdict["uninstall"]): - output["repo"]["requests"] = requestsdict - - # establish sort order of the index - sort_package_versions(packages, signer_fingerprints) - - output_packages = collections.OrderedDict() - output['packages'] = output_packages - categories_used_by_apps = set() - for package in packages: - packageName = package['packageName'] - if packageName not in apps: - logging.info(_('Ignoring package without metadata: ') + package['apkName']) - continue - if not package.get('versionName'): - app = apps[packageName] - for build in app.get('Builds', []): - if build['versionCode'] == package['versionCode']: - versionName = build.get('versionName') - logging.info(_('Overriding blank versionName in {apkfilename} from metadata: {version}') - .format(apkfilename=package['apkName'], version=versionName)) - package['versionName'] = versionName - break - if packageName in output_packages: - packagelist = output_packages[packageName] - else: - packagelist = {} - output_packages[packageName] = packagelist - app = apps[packageName] - categories_used_by_apps.update(app.get('Categories', [])) - packagelist["metadata"] = package_metadata(app, repodir) - if "signer" in package: - packagelist["metadata"]["preferredSigner"] = package["signer"] - - packagelist["versions"] = {} - - packagelist["versions"][package["hash"]] = convert_version(package, apps[packageName], repodir) - - if categories_used_by_apps and not output['repo'].get(CATEGORIES_CONFIG_NAME): - output['repo'][CATEGORIES_CONFIG_NAME] = dict() - # include definitions for "auto-defined" categories, e.g. just used in app metadata - for category in sorted(categories_used_by_apps): - if category not in output['repo'][CATEGORIES_CONFIG_NAME]: - output['repo'][CATEGORIES_CONFIG_NAME][category] = dict() - if 'name' not in output['repo'][CATEGORIES_CONFIG_NAME][category]: - output['repo'][CATEGORIES_CONFIG_NAME][category]['name'] = {DEFAULT_LOCALE: category} - # do not include defined categories if no apps use them - for category in list(output['repo'].get(CATEGORIES_CONFIG_NAME, list())): - if category not in categories_used_by_apps: - del output['repo'][CATEGORIES_CONFIG_NAME][category] - msg = _('Category "{category}" defined but not used for any apps!') - logging.warning(msg.format(category=category)) - - entry = {} - entry["timestamp"] = repodict["timestamp"] - - entry["version"] = repodict["version"] - if "maxage" in repodict: - entry["maxAge"] = repodict["maxage"] - - json_name = 'index-v2.json' - index_file = os.path.join(repodir, json_name) - with open(index_file, "w", encoding="utf-8") as fp: - if common.options.pretty: - json.dump(output, fp, default=_index_encoder_default, indent=2, ensure_ascii=False) - else: - json.dump(output, fp, default=_index_encoder_default, ensure_ascii=False) - - json_name = "tmp/{}_{}.json".format(repodir, convert_datetime(repodict["timestamp"])) - with open(json_name, "w", encoding="utf-8") as fp: - if common.options.pretty: - json.dump(output, fp, default=_index_encoder_default, indent=2, ensure_ascii=False) - else: - json.dump(output, fp, default=_index_encoder_default, ensure_ascii=False) - - entry["index"] = common.file_entry(index_file) - entry["index"]["numPackages"] = len(output.get("packages", [])) - - indexes = sorted(Path().glob("tmp/{}*.json".format(repodir)), key=lambda x: x.name) - indexes.pop() # remove current index - # remove older indexes - while len(indexes) > 10: - indexes.pop(0).unlink() - - indexes = [json.loads(Path(fn).read_text(encoding="utf-8")) for fn in indexes] - - for diff in Path().glob("{}/diff/*.json".format(repodir)): - diff.unlink() - - entry["diffs"] = {} - for old in indexes: - diff_name = str(old["repo"]["timestamp"]) + ".json" - diff_file = os.path.join(repodir, "diff", diff_name) - diff = dict_diff(old, output) - if not os.path.exists(os.path.join(repodir, "diff")): - os.makedirs(os.path.join(repodir, "diff")) - with open(diff_file, "w", encoding="utf-8") as fp: - if common.options.pretty: - json.dump(diff, fp, default=_index_encoder_default, indent=2, ensure_ascii=False) - else: - json.dump(diff, fp, default=_index_encoder_default, ensure_ascii=False) - - entry["diffs"][old["repo"]["timestamp"]] = common.file_entry(diff_file) - entry["diffs"][old["repo"]["timestamp"]]["numPackages"] = len(diff.get("packages", [])) - - json_name = "entry.json" - index_file = os.path.join(repodir, json_name) - with open(index_file, "w", encoding="utf-8") as fp: - if common.options.pretty: - json.dump(entry, fp, default=_index_encoder_default, indent=2, ensure_ascii=False) - else: - json.dump(entry, fp, default=_index_encoder_default, ensure_ascii=False) - - if common.options.nosign: - _copy_to_local_copy_dir(repodir, index_file) - logging.debug(_('index-v2 must have a signature, use `fdroid signindex` to create it!')) - else: - signindex.config = common.config - signindex.sign_index(repodir, json_name) - - -def make_v1(apps, packages, repodir, repodict, requestsdict, signer_fingerprints): - - def _index_encoder_default(obj): - if isinstance(obj, set): - return sorted(list(obj)) - if isinstance(obj, datetime): - # Java prefers milliseconds - # we also need to account for time zone/daylight saving time - return int(calendar.timegm(obj.timetuple()) * 1000) - if isinstance(obj, dict): - d = collections.OrderedDict() - for key in sorted(obj.keys()): - d[key] = obj[key] - return d - raise TypeError(repr(obj) + " is not JSON serializable") - - output = collections.OrderedDict() - output['repo'] = repodict.copy() + output['repo'] = repodict output['requests'] = requestsdict - # index-v1 only supports a list of URL strings for additional mirrors - mirrors = [] - for mirror in repodict.get('mirrors', []): - url = mirror['url'] - if url != repodict['address']: - mirrors.append(mirror['url']) - if mirrors: - output['repo']['mirrors'] = mirrors - # establish sort order of the index - sort_package_versions(packages, signer_fingerprints) + v1_sort_packages(packages, fdroid_signing_key_fingerprints) appslist = [] output['apps'] = appslist - for packageName, app_dict in apps.items(): + for packageName, appdict in apps.items(): d = collections.OrderedDict() appslist.append(d) - for k, v in sorted(app_dict.items()): + for k, v in sorted(appdict.items()): if not v: continue - if k in ('Builds', 'metadatapath', - 'ArchivePolicy', 'AutoName', 'AutoUpdateMode', 'MaintainerNotes', + if k in ('builds', 'comments', 'metadatapath', + 'ArchivePolicy', 'AutoUpdateMode', 'MaintainerNotes', 'Provides', 'Repo', 'RepoType', 'RequiresRoot', 'UpdateCheckData', 'UpdateCheckIgnore', 'UpdateCheckMode', - 'UpdateCheckName', 'NoSourceSince', 'VercodeOperation', - 'summary', 'description', 'promoGraphic', 'screenshots', 'whatsNew', - 'featureGraphic', 'iconv2', 'tvBanner', - ): + 'UpdateCheckName', 'NoSourceSince', 'VercodeOperation'): continue # name things after the App class fields in fdroidclient @@ -919,27 +185,26 @@ def make_v1(apps, packages, repodir, repodict, requestsdict, signer_fingerprints k = 'packageName' elif k == 'CurrentVersionCode': # TODO make SuggestedVersionCode the canonical name k = 'suggestedVersionCode' - v = str(v) elif k == 'CurrentVersion': # TODO make SuggestedVersionName the canonical name k = 'suggestedVersionName' + elif k == 'AutoName': + if 'Name' not in apps[packageName]: + d['name'] = v + continue else: k = k[:1].lower() + k[1:] d[k] = v - # establish sort order in lists, sets, and localized dicts - for app_dict in output['apps']: - localized = app_dict.get('localized') + # establish sort order in localized dicts + for app in output['apps']: + localized = app.get('localized') if localized: lordered = collections.OrderedDict() for lkey, lvalue in sorted(localized.items()): lordered[lkey] = collections.OrderedDict() for ikey, iname in sorted(lvalue.items()): lordered[lkey][ikey] = iname - app_dict['localized'] = lordered - # v1 uses a list of keys for Anti-Features - antiFeatures = app_dict.get('antiFeatures', dict()).keys() - if antiFeatures: - app_dict['antiFeatures'] = sorted(set(antiFeatures)) + app['localized'] = lordered output_packages = collections.OrderedDict() output['packages'] = output_packages @@ -950,8 +215,9 @@ def make_v1(apps, packages, repodir, repodict, requestsdict, signer_fingerprints continue if not package.get('versionName'): app = apps[packageName] - for build in app.get('Builds', []): - if build['versionCode'] == package['versionCode']: + versionCodeStr = str(package['versionCode']) # TODO build.versionCode should be int! + for build in app['builds']: + if build['versionCode'] == versionCodeStr: versionName = build.get('versionName') logging.info(_('Overriding blank versionName in {apkfilename} from metadata: {version}') .format(apkfilename=package['apkName'], version=versionName)) @@ -967,10 +233,7 @@ def make_v1(apps, packages, repodir, repodict, requestsdict, signer_fingerprints for k, v in sorted(package.items()): if not v: continue - if k in ('icon', 'icons', 'icons_src', 'ipfsCIDv1', 'name', 'srcnameSha256'): - continue - if k == 'antiFeatures': - d[k] = sorted(v.keys()) + if k in ('icon', 'icons', 'icons_src', 'name', ): continue d[k] = v @@ -983,37 +246,21 @@ def make_v1(apps, packages, repodir, repodict, requestsdict, signer_fingerprints json.dump(output, fp, default=_index_encoder_default) if common.options.nosign: - _copy_to_local_copy_dir(repodir, index_file) logging.debug(_('index-v1 must have a signature, use `fdroid signindex` to create it!')) else: signindex.config = common.config - signindex.sign_index(repodir, json_name) + signindex.sign_index_v1(repodir, json_name) -def _copy_to_local_copy_dir(repodir, f): - local_copy_dir = common.config.get('local_copy_dir', '') - if os.path.exists(local_copy_dir): - destdir = os.path.join(local_copy_dir, repodir) - if not os.path.exists(destdir): - os.mkdir(destdir) - shutil.copy2(f, destdir, follow_symlinks=False) - elif local_copy_dir: - raise FDroidException(_('"local_copy_dir" {path} does not exist!') - .format(path=local_copy_dir)) - - -def sort_package_versions(packages, signer_fingerprints): - """Sort to ensure a deterministic order for package versions in the index file. - - This sort-order also expresses +def v1_sort_packages(packages, fdroid_signing_key_fingerprints): + """Sorts the supplied list to ensure a deterministic sort order for + package entries in the index file. This sort-order also expresses installation preference to the clients. (First in this list = first to install) - Parameters - ---------- - packages - list of packages which need to be sorted before but into index file. + :param packages: list of packages which need to be sorted before but into index file. """ + GROUP_DEV_SIGNED = 1 GROUP_FDROID_SIGNED = 2 GROUP_OTHER_SIGNED = 3 @@ -1021,28 +268,31 @@ def sort_package_versions(packages, signer_fingerprints): def v1_sort_keys(package): packageName = package.get('packageName', None) - signer = package.get('signer', None) + sig = package.get('signer', None) - dev_signer = common.metadata_find_developer_signature(packageName) + dev_sig = common.metadata_find_developer_signature(packageName) group = GROUP_OTHER_SIGNED - if dev_signer and dev_signer == signer: + if dev_sig and dev_sig == sig: group = GROUP_DEV_SIGNED else: - fdroid_signer = signer_fingerprints.get(packageName, {}).get('signer') - if fdroid_signer and fdroid_signer == signer: + fdroidsig = fdroid_signing_key_fingerprints.get(packageName, {}).get('signer') + if fdroidsig and fdroidsig == sig: group = GROUP_FDROID_SIGNED versionCode = None if package.get('versionCode', None): - versionCode = -package['versionCode'] + versionCode = -int(package['versionCode']) - return packageName, group, signer, versionCode + return(packageName, group, sig, versionCode) packages.sort(key=v1_sort_keys) -def make_v0(apps, apks, repodir, repodict, requestsdict, signer_fingerprints): - """Aka index.jar aka index.xml.""" +def make_v0(apps, apks, repodir, repodict, requestsdict, fdroid_signing_key_fingerprints): + """ + aka index.jar aka index.xml + """ + doc = Document() def addElement(name, value, doc, parent): @@ -1061,8 +311,13 @@ def make_v0(apps, apks, repodir, repodict, requestsdict, signer_fingerprints): value = str(apk[key]) addElement(name, value, doc, parent) + def addElementCDATA(name, value, doc, parent): + el = doc.createElement(name) + el.appendChild(doc.createCDATASection(value)) + parent.appendChild(el) + def addElementCheckLocalized(name, app, key, doc, parent, default=''): - """Fill in field from metadata or localized block. + """Fill in field from metadata or localized block For name/summary/description, they can come only from the app source, or from a dir in fdroiddata. They can be entirely missing from the @@ -1073,12 +328,13 @@ def make_v0(apps, apks, repodir, repodict, requestsdict, signer_fingerprints): alpha- sort order. """ + el = doc.createElement(name) value = app.get(key) lkey = key[:1].lower() + key[1:] localized = app.get('localized') if not value and localized: - for lang in [DEFAULT_LOCALE] + [x for x in localized.keys()]: + for lang in ['en-US'] + [x for x in localized.keys()]: if not lang.startswith('en'): continue if lang in localized: @@ -1090,8 +346,6 @@ def make_v0(apps, apks, repodir, repodict, requestsdict, signer_fingerprints): value = localized[lang].get(lkey) if not value: value = default - if not value and name == 'name' and app.get('AutoName'): - value = app['AutoName'] el.appendChild(doc.createTextNode(value)) parent.appendChild(el) @@ -1099,7 +353,7 @@ def make_v0(apps, apks, repodir, repodict, requestsdict, signer_fingerprints): doc.appendChild(root) repoel = doc.createElement("repo") - repoel.setAttribute("icon", repodict['icon']) + repoel.setAttribute("icon", os.path.basename(repodict['icon'])) if 'maxage' in repodict: repoel.setAttribute("maxage", str(repodict['maxage'])) repoel.setAttribute("name", repodict['name']) @@ -1110,11 +364,8 @@ def make_v0(apps, apks, repodir, repodict, requestsdict, signer_fingerprints): repoel.setAttribute("version", str(repodict['version'])) addElement('description', repodict['description'], doc, repoel) - # index v0 only supports a list of URL strings for additional mirrors for mirror in repodict.get('mirrors', []): - url = mirror['url'] - if url != repodict['address']: - addElement('mirror', url, doc, repoel) + addElement('mirror', mirror, doc, repoel) root.appendChild(repoel) @@ -1124,27 +375,24 @@ def make_v0(apps, apks, repodir, repodict, requestsdict, signer_fingerprints): root.appendChild(element) element.setAttribute('packageName', packageName) - for appid, app_dict in apps.items(): - app = metadata.App(app_dict) + for appid, appdict in apps.items(): + app = metadata.App(appdict) - if app.get('Disabled') is not None: + if app.Disabled is not None: continue # Get a list of the apks for this app... apklist = [] - name_from_apk = None apksbyversion = collections.defaultdict(lambda: []) for apk in apks: if apk.get('versionCode') and apk.get('packageName') == appid: apksbyversion[apk['versionCode']].append(apk) - if name_from_apk is None: - name_from_apk = apk.get('name') for versionCode, apksforver in apksbyversion.items(): - fdroid_signer = signer_fingerprints.get(appid, {}).get('signer') + fdroidsig = fdroid_signing_key_fingerprints.get(appid, {}).get('signer') fdroid_signed_apk = None name_match_apk = None for x in apksforver: - if fdroid_signer and x.get('signer', None) == fdroid_signer: + if fdroidsig and x.get('signer', None) == fdroidsig: fdroid_signed_apk = x if common.apk_release_filename.match(x.get('apkName', '')): name_match_apk = x @@ -1170,14 +418,14 @@ def make_v0(apps, apks, repodir, repodict, requestsdict, signer_fingerprints): if app.lastUpdated: addElement('lastupdated', app.lastUpdated.strftime('%Y-%m-%d'), doc, apel) - addElementCheckLocalized('name', app, 'Name', doc, apel, name_from_apk) + addElementCheckLocalized('name', app, 'Name', doc, apel) addElementCheckLocalized('summary', app, 'Summary', doc, apel) if app.icon: addElement('icon', app.icon, doc, apel) addElementCheckLocalized('desc', app, 'Description', doc, apel, - 'No description available') + '

No description available

') addElement('license', app.License, doc, apel) if app.Categories: @@ -1195,13 +443,15 @@ def make_v0(apps, apks, repodir, repodict, requestsdict, signer_fingerprints): addElementNonEmpty('donate', app.Donate, doc, apel) addElementNonEmpty('bitcoin', app.Bitcoin, doc, apel) addElementNonEmpty('litecoin', app.Litecoin, doc, apel) + addElementNonEmpty('flattr', app.FlattrID, doc, apel) + addElementNonEmpty('liberapay', app.LiberapayID, doc, apel) addElementNonEmpty('openCollective', app.OpenCollective, doc, apel) # These elements actually refer to the current version (i.e. which # one is recommended. They are historically mis-named, and need # changing, but stay like this for now to support existing clients. addElement('marketversion', app.CurrentVersion, doc, apel) - addElement('marketvercode', str(app.CurrentVersionCode), doc, apel) + addElement('marketvercode', app.CurrentVersionCode, doc, apel) if app.Provides: pv = app.Provides.split(',') @@ -1209,16 +459,14 @@ def make_v0(apps, apks, repodir, repodict, requestsdict, signer_fingerprints): if app.RequiresRoot: addElement('requirements', 'root', doc, apel) - # Sort the APK list into version order, just so the web site + # Sort the apk list into version order, just so the web site # doesn't have to do any work by default... apklist = sorted(apklist, key=lambda apk: apk['versionCode'], reverse=True) - antiFeatures = list(app.AntiFeatures) if 'antiFeatures' in apklist[0]: - antiFeatures.extend(apklist[0]['antiFeatures']) - if antiFeatures: - afout = sorted(set(antiFeatures)) - addElementNonEmpty('antifeatures', ','.join(afout), doc, apel) + app.AntiFeatures.extend(apklist[0]['antiFeatures']) + if app.AntiFeatures: + addElementNonEmpty('antifeatures', ','.join(app.AntiFeatures), doc, apel) # Check for duplicates - they will make the client unhappy... for i in range(len(apklist) - 1): @@ -1238,21 +486,19 @@ def make_v0(apps, apks, repodir, repodict, requestsdict, signer_fingerprints): for apk in apklist: file_extension = common.get_file_extension(apk['apkName']) # find the APK for the "Current Version" - if current_version_code < app.CurrentVersionCode: - current_version_file = apk['apkName'] if current_version_code < apk['versionCode']: current_version_code = apk['versionCode'] + if current_version_code < int(app.CurrentVersionCode): + current_version_file = apk['apkName'] apkel = doc.createElement("package") apel.appendChild(apkel) versionName = apk.get('versionName') if not versionName: - for build in app.get('Builds', []): - if ( - build['versionCode'] == apk['versionCode'] - and 'versionName' in build - ): + versionCodeStr = str(apk['versionCode']) # TODO build.versionCode should be int! + for build in app.builds: + if build['versionCode'] == versionCodeStr and 'versionName' in build: versionName = build['versionName'] break if versionName: @@ -1317,12 +563,7 @@ def make_v0(apps, apks, repodir, repodict, requestsdict, signer_fingerprints): and common.config['make_current_version_link'] \ and repodir == 'repo': # only create these namefield = common.config['current_version_name_source'] - name = app.get(namefield) - if not name and namefield == 'Name': - name = app.get('localized', {}).get(DEFAULT_LOCALE, {}).get('name') - if not name: - name = app.id - sanitized_name = re.sub(b'''[ '"&%?+=/]''', b'', str(name).encode('utf-8')) + sanitized_name = re.sub(b'''[ '"&%?+=/]''', b'', app.get(namefield).encode('utf-8')) apklinkname = sanitized_name + os.path.splitext(current_version_file)[1].encode('utf-8') current_version_path = os.path.join(repodir, current_version_file).encode('utf-8', 'surrogateescape') if os.path.islink(apklinkname): @@ -1337,29 +578,6 @@ def make_v0(apps, apks, repodir, repodict, requestsdict, signer_fingerprints): os.remove(siglinkname) os.symlink(sigfile_path, siglinkname) - if sys.version_info.minor >= 13: - # Python 3.13 changed minidom so it no longer converts " to an XML entity. - # https://github.com/python/cpython/commit/154477be722ae5c4e18d22d0860e284006b09c4f - # This just puts back the previous implementation, with black code format. - import inspect - import xml.dom.minidom - - def _write_data(writer, text, attr): # pylint: disable=unused-argument - if text: - text = ( - text.replace('&', '&') - .replace('<', '<') - .replace('"', '"') - .replace('>', '>') - ) - writer.write(text) - - argnames = tuple(inspect.signature(xml.dom.minidom._write_data).parameters) - if argnames == ('writer', 'text', 'attr'): - xml.dom.minidom._write_data = _write_data - else: - logging.warning('Failed to monkey patch minidom for index.xml support!') - if common.options.pretty: output = doc.toprettyxml(encoding='utf-8') else: @@ -1368,8 +586,7 @@ def make_v0(apps, apks, repodir, repodict, requestsdict, signer_fingerprints): with open(os.path.join(repodir, 'index.xml'), 'wb') as f: f.write(output) - if 'repo_keyalias' in common.config \ - or (common.options.nosign and 'repo_pubkey' in common.config): + if 'repo_keyalias' in common.config: if common.options.nosign: logging.info(_("Creating unsigned index in preparation for signing")) @@ -1386,58 +603,34 @@ def make_v0(apps, apks, repodir, repodict, requestsdict, signer_fingerprints): # Sign the index... signed = os.path.join(repodir, 'index.jar') if common.options.nosign: - _copy_to_local_copy_dir(repodir, os.path.join(repodir, jar_output)) # Remove old signed index if not signing if os.path.exists(signed): os.remove(signed) else: signindex.config = common.config - signindex.sign_jar(signed, use_old_algs=True) + signindex.sign_jar(signed) # Copy the repo icon into the repo directory... icon_dir = os.path.join(repodir, 'icons') - repo_icon = common.config.get('repo_icon', common.default_config['repo_icon']) - iconfilename = os.path.join(icon_dir, os.path.basename(repo_icon)) - if os.path.exists(repo_icon): - shutil.copyfile(common.config['repo_icon'], iconfilename) - else: - logging.warning(_('repo_icon "repo/icons/%s" does not exist, generating placeholder.') - % repo_icon) - os.makedirs(os.path.dirname(iconfilename), exist_ok=True) - try: - import qrcode - - qrcode.make(common.config['repo_url']).save(iconfilename) - except ModuleNotFoundError as e: - raise ModuleNotFoundError( - _( - 'The "qrcode" Python package is not installed (e.g. apt-get install python3-qrcode)!' - ) - ) from e - except Exception: - exampleicon = os.path.join(common.get_examples_dir(), - common.default_config['repo_icon']) - shutil.copy(exampleicon, iconfilename) + iconfilename = os.path.join(icon_dir, os.path.basename(common.config['repo_icon'])) + shutil.copyfile(common.config['repo_icon'], iconfilename) def extract_pubkey(): - """Extract and return the repository's public key from the keystore. - - Returns - ------- - public key in hex - repository fingerprint + """ + Extracts and returns the repository's public key from the keystore. + :return: public key in hex, repository fingerprint """ if 'repo_pubkey' in common.config: pubkey = unhexlify(common.config['repo_pubkey']) - elif 'keystorepass' in common.config: + else: env_vars = {'LC_ALL': 'C.UTF-8', 'FDROID_KEY_STORE_PASS': common.config['keystorepass']} p = FDroidPopenBytes([common.config['keytool'], '-exportcert', '-alias', common.config['repo_keyalias'], '-keystore', common.config['keystore'], '-storepass:env', 'FDROID_KEY_STORE_PASS'] - + list(common.config['smartcardoptions']), + + common.config['smartcardoptions'], envs=env_vars, output=False, stderr_to_stdout=False) if p.returncode != 0 or len(p.output) < 20: msg = "Failed to get repo pubkey!" @@ -1445,142 +638,23 @@ def extract_pubkey(): msg += ' Is your crypto smartcard plugged in?' raise FDroidException(msg) pubkey = p.output - else: - raise FDroidException(_('Neither "repo_pubkey" nor "keystorepass" set in config.yml')) - repo_pubkey_fingerprint = common.get_cert_fingerprint(pubkey) return hexlify(pubkey), repo_pubkey_fingerprint -def add_mirrors_to_repodict(repo_section, repodict): - """Convert config into final dict of mirror metadata for the repo. - - Internally and in index-v2, mirrors is a list of dicts, but it can - be specified in the config as a string or list of strings. Also, - index v0 and v1 use a list of URL strings as the data structure. - - The first entry is traditionally the primary mirror and canonical - URL. 'mirrors' should not be present in the index if there is - only the canonical URL, and no other mirrors. - - The metadata items for each mirror entry are sorted by key to - ensure minimum diffs in the index files. - - """ - mirrors_config = common.config.get('mirrors', []) - if type(mirrors_config) not in (list, tuple): - mirrors_config = [mirrors_config] - - mirrors_yml = Path(f'config/{MIRRORS_CONFIG_NAME}.yml') - if mirrors_yml.exists(): - if mirrors_config: - raise FDroidException( - _('mirrors set twice, in config.yml and {path}!').format( - path=mirrors_yml - ) - ) - with mirrors_yml.open() as fp: - mirrors_config = yaml.load(fp) - if not isinstance(mirrors_config, list): - msg = _('{path} is not list, but a {datatype}!') - raise TypeError( - msg.format(path=mirrors_yml, datatype=type(mirrors_config).__name__) - ) - - if type(mirrors_config) not in (list, tuple, set): - msg = 'In config.yml, mirrors: is not list, but a {datatype}!' - raise TypeError(msg.format(datatype=type(mirrors_config).__name__)) - - mirrorcheckfailed = False - mirrors = [] - urls = set() - for mirror in mirrors_config: - if isinstance(mirror, str): - mirror = {'url': mirror} - elif not isinstance(mirror, dict): - logging.error( - _('Bad entry type "{mirrortype}" in mirrors config: {mirror}').format( - mirrortype=type(mirror), mirror=mirror - ) - ) - mirrorcheckfailed = True - continue - config_url = mirror['url'] - base = os.path.basename(urllib.parse.urlparse(config_url).path.rstrip('/')) - if common.config.get('nonstandardwebroot') is not True and base != 'fdroid': - logging.error(_("mirror '%s' does not end with 'fdroid'!") % config_url) - mirrorcheckfailed = True - # must end with / or urljoin strips a whole path segment - if config_url.endswith('/'): - mirror['url'] = urllib.parse.urljoin(config_url, repo_section) - else: - mirror['url'] = urllib.parse.urljoin(config_url + '/', repo_section) - mirrors.append(mirror) - if mirror['url'] in urls: - mirrorcheckfailed = True - logging.error( - _('Duplicate entry "%s" in mirrors config!') % mirror['url'] - ) - urls.add(mirror['url']) - for mirror in common.config.get('servergitmirrors', []): - for url in get_mirror_service_urls(mirror): - mirrors.append({'url': url + '/' + repo_section}) - if mirrorcheckfailed: - raise FDroidException(_("Malformed repository mirrors.")) - - if not mirrors: - return - - repodict['mirrors'] = [] - canonical_url = repodict['address'] - found_primary = False - errors = 0 - for mirror in mirrors: - if canonical_url == mirror['url']: - found_primary = True - mirror['isPrimary'] = True - sortedmirror = dict() - for k in sorted(mirror.keys()): - sortedmirror[k] = mirror[k] - repodict['mirrors'].insert(0, sortedmirror) - elif mirror.get('isPrimary'): - errors += 1 - logging.error( - _('Mirror config for {url} contains "isPrimary" key!').format( - url=mirror['url'] - ) - ) - else: - repodict['mirrors'].append(mirror) - - if errors: - raise FDroidException(_('"isPrimary" key should not be added to mirrors!')) - - if repodict['mirrors'] and not found_primary: - repodict['mirrors'].insert(0, {'isPrimary': True, 'url': repodict['address']}) - - -def get_mirror_service_urls(mirror): - """Get direct URLs from git service for use by fdroidclient. +def get_mirror_service_urls(url): + '''Get direct URLs from git service for use by fdroidclient Via 'servergitmirrors', fdroidserver can create and push a mirror - to certain well known git services like GitLab or GitHub. This + to certain well known git services like gitlab or github. This will always use the 'master' branch since that is the default branch in git. The files are then accessible via alternate URLs, where they are served in their raw format via a CDN rather than from git. + ''' - Both of the GitLab URLs will work with F-Droid, but only the - GitLab Pages will work in the browser This is because the "raw" - URLs are not served with the correct mime types, so any index.html - which is put in the repo will not be rendered. Putting an - index.html file in the repo root is a common way for to make - information about the repo available to end user. - - """ - url = mirror['url'] if url.startswith('git@'): - url = re.sub(r'^git@([^:]+):(.+)', r'https://\1/\2', url) + url = re.sub(r'^git@(.*):(.*)', r'https://\1/\2', url) segments = url.split("/") @@ -1600,78 +674,36 @@ def get_mirror_service_urls(mirror): segments.extend([branch, folder]) urls.append('/'.join(segments)) elif hostname == "gitlab.com": - git_mirror_path = os.path.join('git-mirror', folder) - if ( - mirror.get('index_only') - or common.get_dir_size(git_mirror_path) <= common.GITLAB_COM_PAGES_MAX_SIZE - ): - # Gitlab-like Pages segments "https://user.gitlab.io/repo/folder" - gitlab_pages = ["https:", "", user + ".gitlab.io", repo, folder] - urls.append('/'.join(gitlab_pages)) - else: - logging.warning( - _( - 'Skipping GitLab Pages mirror because the repo is too large (>%.2fGB)!' - ) - % (common.GITLAB_COM_PAGES_MAX_SIZE / 1000000000) - ) - # GitLab Raw "https://gitlab.com/user/repo/-/raw/branch/folder" - gitlab_raw = segments + ['-', 'raw', branch, folder] + # Both these Gitlab URLs will work with F-Droid, but only the first will work in the browser + # This is because the `raw` URLs are not served with the correct mime types, so any + # index.html which is put in the repo will not be rendered. Putting an index.html file in + # the repo root is a common way for to make information about the repo available to end user. + + # Gitlab-like Pages segments "https://user.gitlab.io/repo/folder" + gitlab_pages = ["https:", "", user + ".gitlab.io", repo, folder] + urls.append('/'.join(gitlab_pages)) + # Gitlab Raw "https://gitlab.com/user/repo/raw/branch/folder" + gitlab_raw = segments + ['raw', branch, folder] urls.append('/'.join(gitlab_raw)) - # GitLab Artifacts "https://user.gitlab.io/-/repo/-/jobs/job_id/artifacts/public/folder" - job_id = os.getenv('CI_JOB_ID') - try: - int(job_id) - gitlab_artifacts = [ - "https:", - "", - user + ".gitlab.io", - '-', - repo, - '-', - 'jobs', - job_id, - 'artifacts', - 'public', - folder, - ] - urls.append('/'.join(gitlab_artifacts)) - except (TypeError, ValueError): - pass # no Job ID to use, ignore + return urls return urls def download_repo_index(url_str, etag=None, verify_fingerprint=True, timeout=600): - """Download and verifies index v1 file, then returns its data. - - Use the versioned functions to be sure you are getting the - expected data format. - - """ - return download_repo_index_v1(url_str, etag, verify_fingerprint, timeout) - - -def download_repo_index_v1(url_str, etag=None, verify_fingerprint=True, timeout=600): - """Download and verifies index v1 file, then returns its data. + """Downloads and verifies index file, then returns its data. Downloads the repository index from the given :param url_str and verifies the repository's fingerprint if :param verify_fingerprint is not False. - Raises - ------ - VerificationException() if the repository could not be verified + :raises: VerificationException() if the repository could not be verified - Returns - ------- - A tuple consisting of: - - The index in JSON v1 format or None if the index did not change + :return: A tuple consisting of: + - The index in JSON format or None if the index did not change - The new eTag as returned by the HTTP request """ - from . import net - url = urllib.parse.urlsplit(url_str) fingerprint = None @@ -1681,12 +713,7 @@ def download_repo_index_v1(url_str, etag=None, verify_fingerprint=True, timeout= raise VerificationException(_("No fingerprint in URL.")) fingerprint = query['fingerprint'][0] - if url.path.endswith('/index-v1.jar'): - path = url.path[:-13].rstrip('/') - else: - path = url.path.rstrip('/') - - url = urllib.parse.SplitResult(url.scheme, url.netloc, path + '/index-v1.jar', '', '') + url = urllib.parse.SplitResult(url.scheme, url.netloc, url.path + '/index-v1.jar', '', '') download, new_etag = net.http_get(url.geturl(), etag, timeout) if download is None: @@ -1695,127 +722,38 @@ def download_repo_index_v1(url_str, etag=None, verify_fingerprint=True, timeout= with tempfile.NamedTemporaryFile() as fp: fp.write(download) fp.flush() - index, public_key, public_key_fingerprint = get_index_from_jar( - fp.name, fingerprint, allow_deprecated=True - ) + index, public_key, public_key_fingerprint = get_index_from_jar(fp.name, fingerprint) index["repo"]["pubkey"] = hexlify(public_key).decode() index["repo"]["fingerprint"] = public_key_fingerprint index["apps"] = [metadata.App(app) for app in index["apps"]] return index, new_etag -def download_repo_index_v2(url_str, etag=None, verify_fingerprint=True, timeout=None): - """Download and verifies index v2 file, then returns its data. - - Downloads the repository index from the given :param url_str and - verifies the repository's fingerprint if :param verify_fingerprint - is not False. In order to verify the data, the fingerprint must - be provided as part of the URL. - - Raises - ------ - VerificationException() if the repository could not be verified - - Returns - ------- - A tuple consisting of: - - The index in JSON v2 format or None if the index did not change - - The new eTag as returned by the HTTP request +def get_index_from_jar(jarfile, fingerprint=None): + """Returns the data, public key, and fingerprint from index-v1.jar + :raises: VerificationException() if the repository could not be verified """ - from . import net - etag # etag is unused but needs to be there to keep the same API as the earlier functions. - - url = urllib.parse.urlsplit(url_str) - - if timeout is not None: - logging.warning('"timeout" argument of download_repo_index_v2() is deprecated!') - - fingerprint = None - if verify_fingerprint: - query = urllib.parse.parse_qs(url.query) - if 'fingerprint' not in query: - raise VerificationException(_("No fingerprint in URL.")) - fingerprint = query['fingerprint'][0] - - if url.path.endswith('/entry.jar') or url.path.endswith('/index-v2.json'): - path = url.path.rsplit('/', 1)[0] - else: - path = url.path.rstrip('/') - url = urllib.parse.SplitResult(url.scheme, url.netloc, path, '', '') - - mirrors = common.get_mirrors(url, 'entry.jar') - f = net.download_using_mirrors(mirrors) - entry, public_key, fingerprint = get_index_from_jar(f, fingerprint) - - sha256 = entry['index']['sha256'] - mirrors = common.get_mirrors(url, entry['index']['name'][1:]) - f = net.download_using_mirrors(mirrors) - with open(f, 'rb') as fp: - index = fp.read() - if sha256 != hashlib.sha256(index).hexdigest(): - raise VerificationException( - _("SHA-256 of {url} does not match entry!").format(url=url) - ) - return json.loads(index), None - - -def get_index_from_jar(jarfile, fingerprint=None, allow_deprecated=False): - """Return the data, public key and fingerprint from an index JAR with one JSON file. - - The F-Droid index files always contain a single data file and a - JAR Signature. Since index-v1, the data file is always JSON. - That single data file is named the same as the JAR file. - - Parameters - ---------- - fingerprint is the SHA-256 fingerprint of signing key. Only - hex digits count, all other chars will can be discarded. - - Raises - ------ - VerificationException() if the repository could not be verified - - """ logging.debug(_('Verifying index signature:')) - - if allow_deprecated: - common.verify_deprecated_jar_signature(jarfile) - else: - common.verify_jar_signature(jarfile) - + common.verify_jar_signature(jarfile) with zipfile.ZipFile(jarfile) as jar: public_key, public_key_fingerprint = get_public_key_from_jar(jar) if fingerprint is not None: - fingerprint = re.sub(r'[^0-9A-F]', r'', fingerprint.upper()) - if fingerprint != public_key_fingerprint: - raise VerificationException( - _("The repository's fingerprint does not match.") - ) - for f in jar.namelist(): - if not f.startswith('META-INF/'): - jsonfile = f - break - data = json.loads(jar.read(jsonfile)) + if fingerprint.upper() != public_key_fingerprint: + raise VerificationException(_("The repository's fingerprint does not match.")) + data = json.loads(jar.read('index-v1.json').decode()) return data, public_key, public_key_fingerprint def get_public_key_from_jar(jar): - """Get the public key and its fingerprint from a JAR file. + """ + Get the public key and its fingerprint from a JAR file. - Raises - ------ - VerificationException() if the JAR was not signed exactly once + :raises: VerificationException() if the JAR was not signed exactly once - Parameters - ---------- - jar - a zipfile.ZipFile object - - Returns - ------- - the public key from the jar and its fingerprint + :param jar: a zipfile.ZipFile object + :return: the public key from the jar and its fingerprint """ # extract certificate from jar certs = [n for n in jar.namelist() if common.SIGNATURE_BLOCK_FILE_REGEX.match(n)] @@ -1829,100 +767,3 @@ def get_public_key_from_jar(jar): public_key_fingerprint = common.get_cert_fingerprint(public_key).replace(' ', '') return public_key, public_key_fingerprint - - -def make_altstore(apps, apks, config, repodir, pretty=False): - """Assemble altstore-index.json for iOS (.ipa) apps. - - builds index files based on: - https://faq.altstore.io/distribute-your-apps/make-a-source - https://faq.altstore.io/distribute-your-apps/updating-apps - """ - if not any(Path(repodir).glob('*.ipa')): - # no IPA files present in repo, nothing to do here, exiting early - return - - indent = 2 if pretty else None - # for now alt-store support is english only - for lang in ['en']: - - # prepare minimal altstore index - idx = { - 'name': config['repo_name'], - "apps": [], - "news": [], - } - - # add optional values if available - # idx["subtitle"] F-Droid doesn't have a corresponding value - if config.get("repo_description"): - idx['description'] = config['repo_description'] - if (Path(repodir) / 'icons' / config['repo_icon']).exists(): - idx['iconURL'] = f"{config['repo_url']}/icons/{config['repo_icon']}" - # idx["headerURL"] F-Droid doesn't have a corresponding value - # idx["website"] F-Droid doesn't have a corresponding value - # idx["patreonURL"] F-Droid doesn't have a corresponding value - # idx["tintColor"] F-Droid doesn't have a corresponding value - # idx["featuredApps"] = [] maybe mappable to F-Droids what's new? - - # assemble "apps" - for packageName, app in apps.items(): - app_name = app.get("Name") or app.get("AutoName") - icon_url = "{}{}".format( - config['repo_url'], - app.get('iconv2', {}).get(DEFAULT_LOCALE, {}).get('name', ''), - ) - screenshot_urls = [ - "{}{}".format(config["repo_url"], s["name"]) - for s in app.get("screenshots", {}) - .get("phone", {}) - .get(DEFAULT_LOCALE, {}) - ] - - a = { - "name": app_name, - 'bundleIdentifier': packageName, - 'developerName': app.get("AuthorName") or f"{app_name} team", - 'iconURL': icon_url, - "localizedDescription": "", - 'appPermissions': { - "entitlements": set(), - "privacy": {}, - }, - 'versions': [], - } - - if app.get('summary'): - a['subtitle'] = app['summary'] - # a["tintColor"] F-Droid doesn't have a corresponding value - # a["category"] F-Droid doesn't have a corresponding value - # a['patreon'] F-Droid doesn't have a corresponding value - a["screenshots"] = screenshot_urls - - # populate 'versions' - for apk in apks: - last4 = apk.get('apkName', '').lower()[-4:] - if apk['packageName'] == packageName and last4 == '.ipa': - v = { - "version": apk["versionName"], - "date": apk["added"].isoformat(), - "downloadURL": f"{config['repo_url']}/{apk['apkName']}", - "size": apk['size'], - } - - # v['localizedDescription'] maybe what's new text? - v["minOSVersion"] = apk["ipa_MinimumOSVersion"] - v["maxOSVersion"] = apk["ipa_DTPlatformVersion"] - - # writing this spot here has the effect that always the - # permissions of the latest processed permissions list used - a['appPermissions']['privacy'] = apk['ipa_permissions'] - a['appPermissions']['entitlements'] = list(apk['ipa_entitlements']) - - a['versions'].append(v) - - if len(a['versions']) > 0: - idx['apps'].append(a) - - with open(Path(repodir) / 'altstore-index.json', "w", encoding="utf-8") as f: - json.dump(idx, f, indent=indent) diff --git a/fdroidserver/init.py b/fdroidserver/init.py index 39b18c1a..7eb8bbe2 100644 --- a/fdroidserver/init.py +++ b/fdroidserver/init.py @@ -19,70 +19,53 @@ # along with this program. If not, see . import glob -import logging import os import re import shutil import socket import sys from argparse import ArgumentParser +import logging -from . import _, common +from . import _ +from . import common from .exception import FDroidException config = {} +options = None def disable_in_config(key, value): - """Write a key/value to the local config.yml, then comment it out.""" - import yaml - - with open(common.CONFIG_FILE) as fp: - data = fp.read() - pattern = r'\n[\s#]*' + key + r':.*' - repl = '\n#' + yaml.dump({key: value}, default_flow_style=False) + '''write a key/value to the local config.py, then comment it out''' + with open('config.py', 'r') as f: + data = f.read() + pattern = r'\n[\s#]*' + key + r'\s*=\s*"[^"]*"' + repl = '\n#' + key + ' = "' + value + '"' data = re.sub(pattern, repl, data) - with open(common.CONFIG_FILE, 'w') as fp: - fp.writelines(data) + with open('config.py', 'w') as f: + f.writelines(data) def main(): - global config + + global options, config # Parse command line... parser = ArgumentParser() common.setup_global_opts(parser) - parser.add_argument( - "-d", - "--distinguished-name", - default=None, - help=_("X.509 'Distinguished Name' used when generating keys"), - ) - parser.add_argument( - "--keystore", - default=None, - help=_("Path to the keystore for the repo signing key"), - ) - parser.add_argument( - "--repo-keyalias", - default=None, - help=_("Alias of the repo signing key in the keystore"), - ) - parser.add_argument( - "--android-home", - default=None, - help=_("Path to the Android SDK (sometimes set in ANDROID_HOME)"), - ) - parser.add_argument( - "--no-prompt", - action="store_true", - default=False, - help=_("Do not prompt for Android SDK path, just fail"), - ) - options = common.parse_args(parser) - - common.set_console_logging(options.verbose, options.color) + parser.add_argument("-d", "--distinguished-name", default=None, + help=_("X.509 'Distinguished Name' used when generating keys")) + parser.add_argument("--keystore", default=None, + help=_("Path to the keystore for the repo signing key")) + parser.add_argument("--repo-keyalias", default=None, + help=_("Alias of the repo signing key in the keystore")) + parser.add_argument("--android-home", default=None, + help=_("Path to the Android SDK (sometimes set in ANDROID_HOME)")) + parser.add_argument("--no-prompt", action="store_true", default=False, + help=_("Do not prompt for Android SDK path, just fail")) + options = parser.parse_args() + aapt = None fdroiddir = os.getcwd() test_config = dict() examplesdir = common.get_examples_dir() @@ -92,35 +75,34 @@ def main(): # in ANDROID_HOME if that exists, otherwise None if options.android_home is not None: test_config['sdk_path'] = options.android_home + elif common.use_androguard(): + pass elif not common.test_sdk_exists(test_config): - # if neither --android-home nor the default sdk_path - # exist, prompt the user using platform-specific default - # and if the user leaves it blank, ignore and move on. - default_sdk_path = '' - if sys.platform in ('win32', 'cygwin'): - p = os.path.join( - os.getenv('USERPROFILE'), 'AppData', 'Local', 'Android', 'android-sdk' - ) - elif sys.platform == 'darwin': - # on OSX, Homebrew is common and has an easy path to detect - p = '/usr/local/opt/android-sdk' - elif os.path.isdir('/usr/lib/android-sdk'): - # if the Debian packages are installed, suggest them - p = '/usr/lib/android-sdk' + if os.path.isfile('/usr/bin/aapt'): + # remove sdk_path and build_tools, they are not required + test_config.pop('sdk_path', None) + test_config.pop('build_tools', None) + # make sure at least aapt is found, since this can't do anything without it + test_config['aapt'] = common.find_sdk_tools_cmd('aapt') else: - p = '/opt/android-sdk' - if os.path.exists(p): - default_sdk_path = p - test_config['sdk_path'] = default_sdk_path + # if neither --android-home nor the default sdk_path + # exist, prompt the user using platform-specific default + default_sdk_path = '/opt/android-sdk' + if sys.platform == 'win32' or sys.platform == 'cygwin': + p = os.path.join(os.getenv('USERPROFILE'), + 'AppData', 'Local', 'Android', 'android-sdk') + elif sys.platform == 'darwin': + # on OSX, Homebrew is common and has an easy path to detect + p = '/usr/local/opt/android-sdk' + else: + # if the Debian packages are installed, suggest them + p = '/usr/lib/android-sdk' + if os.path.exists(p): + default_sdk_path = p - if not common.test_sdk_exists(test_config): - del test_config['sdk_path'] while not options.no_prompt: try: - s = input( - _('Enter the path to the Android SDK (%s) here:\n> ') - % default_sdk_path - ) + s = input(_('Enter the path to the Android SDK (%s) here:\n> ') % default_sdk_path) except KeyboardInterrupt: print('') sys.exit(1) @@ -130,31 +112,17 @@ def main(): test_config['sdk_path'] = s if common.test_sdk_exists(test_config): break - default_sdk_path = '' + if (options.android_home is not None or not common.use_androguard()) \ + and not common.test_sdk_exists(test_config): + raise FDroidException("Android SDK not found.") - if test_config.get('sdk_path') and not common.test_sdk_exists(test_config): - raise FDroidException( - _("Android SDK not found at {path}!").format(path=test_config['sdk_path']) - ) - - if not os.path.exists(common.CONFIG_FILE): + if not os.path.exists('config.py'): # 'metadata' and 'tmp' are created in fdroid if not os.path.exists('repo'): os.mkdir('repo') - example_config_yml = os.path.join(examplesdir, common.CONFIG_FILE) - if os.path.exists(example_config_yml): - shutil.copyfile(example_config_yml, common.CONFIG_FILE) - else: - from pkg_resources import get_distribution - - versionstr = get_distribution('fdroidserver').version - if not versionstr: - versionstr = 'master' - with open(common.CONFIG_FILE, 'w') as fp: - fp.write('# see https://gitlab.com/fdroid/fdroidserver/blob/') - fp.write(versionstr) - fp.write(f'/examples/{common.CONFIG_FILE}\n') - os.chmod(common.CONFIG_FILE, 0o0600) + shutil.copy(os.path.join(examplesdir, 'fdroid-icon.png'), fdroiddir) + shutil.copyfile(os.path.join(examplesdir, 'config.py'), 'config.py') + os.chmod('config.py', 0o0600) # If android_home is None, test_config['sdk_path'] will be used and # "$ANDROID_HOME" may be used if the env var is set up correctly. # If android_home is not None, the path given from the command line @@ -162,20 +130,43 @@ def main(): if 'sdk_path' in test_config: common.write_to_config(test_config, 'sdk_path', options.android_home) else: - logging.warning( - 'Looks like this is already an F-Droid repo, cowardly refusing to overwrite it...' - ) + logging.warn('Looks like this is already an F-Droid repo, cowardly refusing to overwrite it...') logging.info('Try running `fdroid init` in an empty directory.') raise FDroidException('Repository already exists.') - # now that we have a local config.yml, read configuration... - config = common.read_config() + if common.use_androguard(): + pass + elif 'aapt' not in test_config or not os.path.isfile(test_config['aapt']): + # try to find a working aapt, in all the recent possible paths + build_tools = os.path.join(test_config['sdk_path'], 'build-tools') + aaptdirs = [] + aaptdirs.append(os.path.join(build_tools, test_config['build_tools'])) + aaptdirs.append(build_tools) + for f in os.listdir(build_tools): + if os.path.isdir(os.path.join(build_tools, f)): + aaptdirs.append(os.path.join(build_tools, f)) + for d in sorted(aaptdirs, reverse=True): + if os.path.isfile(os.path.join(d, 'aapt')): + aapt = os.path.join(d, 'aapt') + break + if aapt and os.path.isfile(aapt): + dirname = os.path.basename(os.path.dirname(aapt)) + if dirname == 'build-tools': + # this is the old layout, before versioned build-tools + test_config['build_tools'] = '' + else: + test_config['build_tools'] = dirname + common.write_to_config(test_config, 'build_tools') + common.ensure_build_tools_exists(test_config) + + # now that we have a local config.py, read configuration... + config = common.read_config(options) # the NDK is optional and there may be multiple versions of it, so it's # left for the user to configure # find or generate the keystore for the repo signing key. First try the - # path written in the default config.yml. Then check if the user has + # path written in the default config.py. Then check if the user has # specified a path from the command line, which will trump all others. # Otherwise, create ~/.local/share/fdroidserver and stick it in there. If # keystore is set to NONE, that means that Java will look for keys in a @@ -188,9 +179,8 @@ def main(): else: keystore = os.path.abspath(options.keystore) if not os.path.exists(keystore): - logging.info( - '"' + keystore + '" does not exist, creating a new keystore there.' - ) + logging.info('"' + keystore + + '" does not exist, creating a new keystore there.') common.write_to_config(test_config, 'keystore', keystore) repo_keyalias = None keydname = None @@ -201,19 +191,12 @@ def main(): keydname = options.distinguished_name common.write_to_config(test_config, 'keydname', keydname) if keystore == 'NONE': # we're using a smartcard - common.write_to_config( - test_config, 'repo_keyalias', '1' - ) # seems to be the default + common.write_to_config(test_config, 'repo_keyalias', '1') # seems to be the default disable_in_config('keypass', 'never used with smartcard') - common.write_to_config( - test_config, - 'smartcardoptions', - ( - '-storetype PKCS11 ' - + '-providerClass sun.security.pkcs11.SunPKCS11 ' - + '-providerArg opensc-fdroid.cfg' - ), - ) + common.write_to_config(test_config, 'smartcardoptions', + ('-storetype PKCS11 -providerName SunPKCS11-OpenSC ' + + '-providerClass sun.security.pkcs11.SunPKCS11 ' + + '-providerArg opensc-fdroid.cfg')) # find opensc-pkcs11.so if not os.path.exists('opensc-fdroid.cfg'): if os.path.exists('/usr/lib/opensc-pkcs11.so'): @@ -221,49 +204,34 @@ def main(): elif os.path.exists('/usr/lib64/opensc-pkcs11.so'): opensc_so = '/usr/lib64/opensc-pkcs11.so' else: - files = glob.glob( - '/usr/lib/' + os.uname()[4] + '-*-gnu/opensc-pkcs11.so' - ) + files = glob.glob('/usr/lib/' + os.uname()[4] + '-*-gnu/opensc-pkcs11.so') if len(files) > 0: opensc_so = files[0] else: opensc_so = '/usr/lib/opensc-pkcs11.so' - logging.warning( - 'No OpenSC PKCS#11 module found, ' - + 'install OpenSC then edit "opensc-fdroid.cfg"!' - ) + logging.warn('No OpenSC PKCS#11 module found, ' + + 'install OpenSC then edit "opensc-fdroid.cfg"!') + with open(os.path.join(examplesdir, 'opensc-fdroid.cfg'), 'r') as f: + opensc_fdroid = f.read() + opensc_fdroid = re.sub('^library.*', 'library = ' + opensc_so, opensc_fdroid, + flags=re.MULTILINE) with open('opensc-fdroid.cfg', 'w') as f: - f.write('name = OpenSC\nlibrary = ') - f.write(opensc_so) - f.write('\n') - logging.info( - "Repo setup using a smartcard HSM. Please edit keystorepass and repo_keyalias in config.yml." - ) - logging.info( - "If you want to generate a new repo signing key in the HSM you can do that with 'fdroid update " - "--create-key'." - ) + f.write(opensc_fdroid) elif os.path.exists(keystore): to_set = ['keystorepass', 'keypass', 'repo_keyalias', 'keydname'] if repo_keyalias: to_set.remove('repo_keyalias') if keydname: to_set.remove('keydname') - logging.warning( - '\n' - + _('Using existing keystore "{path}"').format(path=keystore) - + '\n' - + _('Now set these in config.yml:') - + ' ' - + ', '.join(to_set) - + '\n' - ) + logging.warning('\n' + _('Using existing keystore "{path}"').format(path=keystore) + + '\n' + _('Now set these in config.py:') + ' ' + + ', '.join(to_set) + '\n') else: password = common.genpassword() c = dict(test_config) c['keystorepass'] = password c['keypass'] = password - c['repo_keyalias'] = repo_keyalias or socket.getfqdn() + c['repo_keyalias'] = socket.getfqdn() c['keydname'] = 'CN=' + c['repo_keyalias'] + ', OU=F-Droid' common.write_to_config(test_config, 'keystorepass', password) common.write_to_config(test_config, 'keypass', password) @@ -274,25 +242,17 @@ def main(): msg = '\n' msg += _('Built repo based in "%s" with this config:') % fdroiddir msg += '\n\n Android SDK:\t\t\t' + config['sdk_path'] + if aapt: + msg += '\n Android SDK Build Tools:\t' + os.path.dirname(aapt) + msg += '\n Android NDK r12b (optional):\t$ANDROID_NDK' msg += '\n ' + _('Keystore for signing key:\t') + keystore if repo_keyalias is not None: msg += '\n Alias for key in store:\t' + repo_keyalias - msg += '\n\n' - msg += ( - _( - """To complete the setup, add your APKs to "%s" + msg += '\n\n' + '''To complete the setup, add your APKs to "%s" then run "fdroid update -c; fdroid update". You might also want to edit -"config.yml" to set the URL, repo name, and more. You should also set up +"config.py" to set the URL, repo name, and more. You should also set up a signing key (a temporary one might have been automatically generated). For more info: https://f-droid.org/docs/Setup_an_F-Droid_App_Repo -and https://f-droid.org/docs/Signing_Process""" - ) - % os.path.join(fdroiddir, 'repo') - ) - if not options.quiet: - # normally, INFO is only shown with --verbose, but show this unless --quiet - logger = logging.getLogger() - logger.setLevel(logging.INFO) - logger.info(msg) - logging.shutdown() +and https://f-droid.org/docs/Signing_Process''' % os.path.join(fdroiddir, 'repo') + logging.info(msg) diff --git a/fdroidserver/install.py b/fdroidserver/install.py index 8c1dc948..968bb28f 100644 --- a/fdroidserver/install.py +++ b/fdroidserver/install.py @@ -17,372 +17,62 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import glob -import locale -import logging -import os import sys -import termios -import tty -from argparse import ArgumentParser, BooleanOptionalAction -from pathlib import Path -from urllib.parse import urlencode, urlparse, urlunparse +import os +import glob +from argparse import ArgumentParser +import logging -import defusedxml.ElementTree as XMLElementTree - -from . import _, common, github, index, net +from . import _ +from . import common +from .common import SdkToolsPopen from .exception import FDroidException -DEFAULT_IPFS_GATEWAYS = ("https://gateway.ipfs.io/ipfs/",) -MAVEN_CENTRAL_MIRRORS = [ - { - "url": "https://repo1.maven.org/maven2/", - "dnsA": ["199.232.16.209"], - "worksWithoutSNI": True, - }, - { - "url": "https://repo.maven.apache.org/maven2/", - "dnsA": ["199.232.16.215"], - "worksWithoutSNI": True, - }, - { - "url": "https://maven-central-asia.storage-download.googleapis.com/maven2/", - }, - { - "url": "https://maven-central-eu.storage-download.googleapis.com/maven2/", - }, - { - "url": "https://maven-central.storage-download.googleapis.com/maven2/", - }, -] - - -# pylint: disable=unused-argument -def download_apk(appid='org.fdroid.fdroid', privacy_mode=False): - """Download an APK from F-Droid via the first mirror that works.""" - url = urlunparse( - urlparse(common.FDROIDORG_MIRRORS[0]['url'])._replace( - query=urlencode({'fingerprint': common.FDROIDORG_FINGERPRINT}) - ) - ) - - data, _ignored = index.download_repo_index_v2(url) - app = data.get('packages', dict()).get(appid) - preferred_version = None - for version in app['versions'].values(): - if not preferred_version: - # if all else fails, use the first one - preferred_version = version - if not version.get('releaseChannels'): - # prefer APK in default release channel - preferred_version = version - break - - mirrors = common.append_filename_to_mirrors( - preferred_version['file']['name'][1:], common.FDROIDORG_MIRRORS - ) - ipfsCIDv1 = preferred_version['file'].get('ipfsCIDv1') - if ipfsCIDv1: - for gateway in DEFAULT_IPFS_GATEWAYS: - mirrors.append({'url': os.path.join(gateway, ipfsCIDv1)}) - f = net.download_using_mirrors(mirrors) - if f and os.path.exists(f): - versionCode = preferred_version['manifest']['versionCode'] - f = Path(f) - return str(f.rename(f.with_stem(f'{appid}_{versionCode}')).resolve()) - - -def download_fdroid_apk(privacy_mode=False): # pylint: disable=unused-argument - """Directly download the current F-Droid APK and verify it. - - This downloads the "download button" link, which is the version - that is best tested for new installs. - - """ - mirror = common.FDROIDORG_MIRRORS[0] - mirror['url'] = urlunparse(urlparse(mirror['url'])._replace(path='F-Droid.apk')) - return net.download_using_mirrors([mirror]) - - -def download_fdroid_apk_from_github(privacy_mode=False): - """Download F-Droid.apk from F-Droid's GitHub Releases.""" - if common.config and not privacy_mode: - token = common.config.get('github_token') - else: - token = None - gh = github.GithubApi(token, 'https://github.com/f-droid/fdroidclient') - latest_apk = gh.get_latest_apk() - filename = os.path.basename(latest_apk) - return net.download_file(latest_apk, os.path.join(common.get_cachedir(), filename)) - - -def download_fdroid_apk_from_ipns(privacy_mode=False): - """Download the F-Droid APK from an IPNS repo.""" - cid = 'k51qzi5uqu5dl4hbcksbdmplanu9n4hivnqsupqe6vzve1pdbeh418ssptldd3' - mirrors = [ - {"url": f"https://ipfs.io/ipns/{cid}/F-Droid.apk"}, - ] - if not privacy_mode: - mirrors.append({"url": f"https://{cid}.ipns.dweb.link/F-Droid.apk"}) - return net.download_using_mirrors(mirrors) - - -def download_fdroid_apk_from_maven(privacy_mode=False): - """Download F-Droid.apk from Maven Central and official mirrors.""" - path = 'org/fdroid/fdroid/F-Droid' - if privacy_mode: - mirrors = MAVEN_CENTRAL_MIRRORS[:2] # skip the Google servers - else: - mirrors = MAVEN_CENTRAL_MIRRORS - metadata = net.download_using_mirrors( - common.append_filename_to_mirrors( - os.path.join(path, 'maven-metadata.xml'), mirrors - ) - ) - version = XMLElementTree.parse(metadata).getroot().findall('*.//latest')[0].text - mirrors = common.append_filename_to_mirrors( - os.path.join(path, version, f'F-Droid-{version}.apk'), mirrors - ) - return net.download_using_mirrors(mirrors) - - -def install_fdroid_apk(privacy_mode=False): - """Download and install F-Droid.apk using all tricks we can muster. - - By default, this first tries to fetch the official install APK - which is offered when someone clicks the "download" button on - https://f-droid.org/. Then it will try all the mirrors and - methods until it gets something successful, or runs out of - options. - - There is privacy_mode which tries to download from mirrors first, - so that this downloads from a mirror that has many different kinds - of files available, thereby breaking the clear link to F-Droid. - - Returns - ------- - None for success or the error message. - - """ - country_code = locale.getlocale()[0].split('_')[-1] - if privacy_mode is None and country_code in ('CN', 'HK', 'IR', 'TM'): - logging.warning( - _('Privacy mode was enabled based on your locale ({country_code}).').format( - country_code=country_code - ) - ) - privacy_mode = True - - if privacy_mode or not (common.config and common.config.get('jarsigner')): - download_methods = [ - download_fdroid_apk_from_maven, - download_fdroid_apk_from_ipns, - download_fdroid_apk_from_github, - ] - else: - download_methods = [ - download_apk, - download_fdroid_apk_from_maven, - download_fdroid_apk_from_github, - download_fdroid_apk_from_ipns, - download_fdroid_apk, - ] - for method in download_methods: - try: - f = method(privacy_mode=privacy_mode) - break - except Exception as e: - logging.info(e) - else: - return _('F-Droid.apk could not be downloaded from any known source!') - - fingerprint = common.apk_signer_fingerprint(f) - if fingerprint.upper() != common.FDROIDORG_FINGERPRINT: - return _('{path} has the wrong fingerprint ({fingerprint})!').format( - path=f, fingerprint=fingerprint - ) - install_apk(f) - - -def install_apk(f): - if common.config and common.config.get('apksigner'): - # TODO this should always verify, but that requires APK sig verification in Python #94 - logging.info(_('Verifying package {path} with apksigner.').format(path=f)) - common.verify_apk_signature(f) - if common.config and common.config.get('adb'): - if devices(): - install_apks_to_devices([f]) - os.remove(f) - else: - os.remove(f) - return _('No devices found for `adb install`! Please plug one in.') +options = None +config = None def devices(): - """Get the list of device serials for use with adb commands.""" - p = common.SdkToolsPopen(['adb', "devices"]) + p = SdkToolsPopen(['adb', "devices"]) if p.returncode != 0: raise FDroidException("An error occured when finding devices: %s" % p.output) - serials = list() - for line in p.output.splitlines(): - columns = line.strip().split("\t", maxsplit=1) - if len(columns) == 2: - serial, status = columns - if status == 'device': - serials.append(serial) - else: - d = {'serial': serial, 'status': status} - logging.warning(_('adb reports {serial} is "{status}"!'.format(**d))) - return serials - - -def install_apks_to_devices(apks): - """Install the list of APKs to all Android devices reported by `adb devices`.""" - for apk in apks: - # Get device list each time to avoid device not found errors - devs = devices() - if not devs: - raise FDroidException(_("No attached devices found")) - logging.info(_("Installing %s...") % apk) - for dev in devs: - logging.info( - _("Installing '{apkfilename}' on {dev}...").format( - apkfilename=apk, dev=dev - ) - ) - p = common.SdkToolsPopen(['adb', "-s", dev, "install", apk]) - fail = "" - for line in p.output.splitlines(): - if line.startswith("Failure"): - fail = line[9:-1] - if not fail: - continue - - if fail == "INSTALL_FAILED_ALREADY_EXISTS": - logging.warning( - _('"{apkfilename}" is already installed on {dev}.').format( - apkfilename=apk, dev=dev - ) - ) - else: - raise FDroidException( - _("Failed to install '{apkfilename}' on {dev}: {error}").format( - apkfilename=apk, dev=dev, error=fail - ) - ) - - -def read_char(): - """Read input from the terminal prompt one char at a time.""" - fd = sys.stdin.fileno() - old_settings = termios.tcgetattr(fd) - try: - tty.setraw(fd) - ch = sys.stdin.read(1) - finally: - termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) - return ch - - -def strtobool(val): - """Convert a localized string representation of truth to True or False.""" - return val.lower() in ('', 'y', 'yes', _('yes'), _('true')) # '' is pressing Enter - - -def prompt_user(yes, msg): - """Prompt user for yes/no, supporting Enter and Esc as accepted answers.""" - run_install = yes - if yes is None and sys.stdout.isatty(): - print(msg, end=' ', flush=True) - answer = '' - while True: - in_char = read_char() - if in_char == '\r': # Enter key - break - if not in_char.isprintable(): - sys.exit(1) - print(in_char, end='', flush=True) - answer += in_char - run_install = strtobool(answer) - print() - return run_install + lines = [l for l in p.output.splitlines() if not l.startswith('* ')] + if len(lines) < 3: + return [] + lines = lines[1:-1] + return [l.split()[0] for l in lines] def main(): - parser = ArgumentParser( - usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]" - ) + + global options, config + + # Parse command line... + parser = ArgumentParser(usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]") common.setup_global_opts(parser) - parser.add_argument( - "appid", - nargs='*', - help=_("application ID with optional versionCode in the form APPID[:VERCODE]"), - ) - parser.add_argument( - "-a", - "--all", - action="store_true", - default=False, - help=_("Install all signed applications available"), - ) - parser.add_argument( - "-p", - "--privacy-mode", - action=BooleanOptionalAction, - default=None, - help=_("Download F-Droid.apk using mirrors that leak less to the network"), - ) - parser.add_argument( - "-y", - "--yes", - action="store_true", - default=None, - help=_("Automatic yes to all prompts."), - ) - parser.add_argument( - "-n", - "--no", - action="store_false", - dest='yes', - help=_("Automatic no to all prompts."), - ) - options = common.parse_args(parser) - - common.set_console_logging(options.verbose, options.color) - logging.captureWarnings(True) # for SNIMissingWarning - - common.get_config() + parser.add_argument("appid", nargs='*', help=_("applicationId with optional versionCode in the form APPID[:VERCODE]")) + parser.add_argument("-a", "--all", action="store_true", default=False, + help=_("Install all signed applications available")) + options = parser.parse_args() if not options.appid and not options.all: - run_install = prompt_user( - options.yes, - _('Would you like to download and install F-Droid.apk via adb? (YES/no)'), - ) - if run_install: - sys.exit(install_fdroid_apk(options.privacy_mode)) - sys.exit(1) + parser.error(_("option %s: If you really want to install all the signed apps, use --all") % "all") + + config = common.read_config(options) output_dir = 'repo' - if (options.appid or options.all) and not os.path.isdir(output_dir): - logging.error(_("No signed output directory - nothing to do")) - run_install = prompt_user( - options.yes, - _('Would you like to download the app(s) from f-droid.org? (YES/no)'), - ) - if run_install: - for appid in options.appid: - f = download_apk(appid) - install_apk(f) - sys.exit(install_fdroid_apk(options.privacy_mode)) - sys.exit(1) + if not os.path.isdir(output_dir): + logging.info(_("No signed output directory - nothing to do")) + sys.exit(0) if options.appid: + vercodes = common.read_pkg_args(options.appid, True) - common.get_metadata_files(vercodes) # only check appids apks = {appid: None for appid in vercodes} - # Get the signed APK with the highest vercode + # Get the signed apk with the highest vercode for apkfile in sorted(glob.glob(os.path.join(output_dir, '*.apk'))): + try: appid, vercode = common.publishednameinfo(apkfile) except FDroidException: @@ -395,15 +85,35 @@ def main(): for appid, apk in apks.items(): if not apk: - raise FDroidException(_("No signed APK available for %s") % appid) - install_apks_to_devices(apks.values()) + raise FDroidException(_("No signed apk available for %s") % appid) - elif options.all: - apks = { - common.publishednameinfo(apkfile)[0]: apkfile - for apkfile in sorted(glob.glob(os.path.join(output_dir, '*.apk'))) - } - install_apks_to_devices(apks.values()) + else: + + apks = {common.publishednameinfo(apkfile)[0]: apkfile for apkfile in + sorted(glob.glob(os.path.join(output_dir, '*.apk')))} + + for appid, apk in apks.items(): + # Get device list each time to avoid device not found errors + devs = devices() + if not devs: + raise FDroidException(_("No attached devices found")) + logging.info(_("Installing %s...") % apk) + for dev in devs: + logging.info(_("Installing '{apkfilename}' on {dev}...").format(apkfilename=apk, dev=dev)) + p = SdkToolsPopen(['adb', "-s", dev, "install", apk]) + fail = "" + for line in p.output.splitlines(): + if line.startswith("Failure"): + fail = line[9:-1] + if not fail: + continue + + if fail == "INSTALL_FAILED_ALREADY_EXISTS": + logging.warn(_("'{apkfilename}' is already installed on {dev}.") + .format(apkfilename=apk, dev=dev)) + else: + raise FDroidException(_("Failed to install '{apkfilename}' on {dev}: {error}") + .format(apkfilename=apk, dev=dev, error=fail)) logging.info('\n' + _('Finished')) diff --git a/fdroidserver/lint.py b/fdroidserver/lint.py index 99b1a392..6f8a768e 100644 --- a/fdroidserver/lint.py +++ b/fdroidserver/lint.py @@ -16,28 +16,25 @@ # You should have received a copy of the GNU Affero General Public Licen # along with this program. If not, see . -import difflib -import platform +from argparse import ArgumentParser +import glob +import os import re import sys import urllib.parse -from argparse import ArgumentParser -from pathlib import Path -from fdroidserver._yaml import yaml - -from . import _, common, metadata, rewritemeta +from . import _ +from . import common +from . import metadata +from . import rewritemeta config = None +options = None def enforce_https(domain): - return ( - re.compile( - r'^http://([^/]*\.)?' + re.escape(domain) + r'(/.*)?', re.IGNORECASE - ), - domain + " URLs should always use https://", - ) + return (re.compile(r'^[^h][^t][^t][^p][^s]://[^/]*' + re.escape(domain) + r'(/.*)?', re.IGNORECASE), + domain + " URLs should always use https://") https_enforcings = [ @@ -62,10 +59,8 @@ https_enforcings = [ def forbid_shortener(domain): - return ( - re.compile(r'https?://[^/]*' + re.escape(domain) + r'/.*'), - _("URL shorteners should not be used"), - ) + return (re.compile(r'https?://[^/]*' + re.escape(domain) + r'/.*'), + _("URL shorteners should not be used")) http_url_shorteners = [ @@ -124,200 +119,68 @@ http_url_shorteners = [ forbid_shortener('➡.ws'), ] -http_checks = ( - https_enforcings - + http_url_shorteners - + [ - ( - re.compile(r'^(?!https?://)[^/]+'), - _("URL must start with https:// or http://"), - ), - ( - re.compile(r'^https://(github|gitlab)\.com(/[^/]+){2,3}\.git'), - _("Appending .git is not necessary"), - ), - ( - re.compile( - r'^https://[^/]*(github|gitlab|bitbucket|rawgit|githubusercontent)\.[a-zA-Z]+/([^/]+/){2,3}(master|main)/' - ), - _( - "Use /HEAD instead of /master or /main to point at a file in the default branch" - ), - ), - ] -) +http_checks = https_enforcings + http_url_shorteners + [ + (re.compile(r'.*github\.com/[^/]+/[^/]+\.git'), + _("Appending .git is not necessary")), + (re.compile(r'.*://[^/]*(github|gitlab|bitbucket|rawgit)[^/]*/([^/]+/){1,3}master'), + _("Use /HEAD instead of /master to point at a file in the default branch")), +] regex_checks = { 'WebSite': http_checks, 'SourceCode': http_checks, 'Repo': https_enforcings, 'UpdateCheckMode': https_enforcings, - 'IssueTracker': http_checks - + [ - (re.compile(r'.*github\.com/[^/]+/[^/]+/*$'), _("/issues is missing")), - (re.compile(r'.*gitlab\.com/[^/]+/[^/]+/*$'), _("/issues is missing")), + 'IssueTracker': http_checks + [ + (re.compile(r'.*github\.com/[^/]+/[^/]+/*$'), + _("/issues is missing")), + (re.compile(r'.*gitlab\.com/[^/]+/[^/]+/*$'), + _("/issues is missing")), ], - 'Donate': http_checks - + [ - ( - re.compile(r'.*liberapay\.com'), - _("Liberapay donation methods belong in the Liberapay: field"), - ), - ( - re.compile(r'.*opencollective\.com'), - _("OpenCollective donation methods belong in the OpenCollective: field"), - ), + 'Donate': http_checks + [ + (re.compile(r'.*flattr\.com'), + _("Flattr donation methods belong in the FlattrID: field")), + (re.compile(r'.*liberapay\.com'), + _("Liberapay donation methods belong in the Liberapay: field")), + (re.compile(r'.*opencollective\.com'), + _("OpenCollective donation methods belong in the OpenCollective: field")), ], 'Changelog': http_checks, 'Author Name': [ - (re.compile(r'^\s'), _("Unnecessary leading space")), - (re.compile(r'.*\s$'), _("Unnecessary trailing space")), + (re.compile(r'^\s'), + _("Unnecessary leading space")), + (re.compile(r'.*\s$'), + _("Unnecessary trailing space")), ], 'Summary': [ - ( - re.compile(r'.*\b(free software|open source)\b.*', re.IGNORECASE), - _("No need to specify that the app is Free Software"), - ), - ( - re.compile( - r'.*((your|for).*android|android.*(app|device|client|port|version))', - re.IGNORECASE, - ), - _("No need to specify that the app is for Android"), - ), - (re.compile(r'.*[a-z0-9][.!?]( |$)'), _("Punctuation should be avoided")), - (re.compile(r'^\s'), _("Unnecessary leading space")), - (re.compile(r'.*\s$'), _("Unnecessary trailing space")), + (re.compile(r'.*\b(free software|open source)\b.*', re.IGNORECASE), + _("No need to specify that the app is Free Software")), + (re.compile(r'.*((your|for).*android|android.*(app|device|client|port|version))', re.IGNORECASE), + _("No need to specify that the app is for Android")), + (re.compile(r'.*[a-z0-9][.!?]( |$)'), + _("Punctuation should be avoided")), + (re.compile(r'^\s'), + _("Unnecessary leading space")), + (re.compile(r'.*\s$'), + _("Unnecessary trailing space")), ], - 'Description': https_enforcings - + http_url_shorteners - + [ - (re.compile(r'\s*[*#][^ .]'), _("Invalid bulleted list")), - ( - re.compile(r'https://f-droid.org/[a-z][a-z](_[A-Za-z]{2,4})?/'), - _("Locale included in f-droid.org URL"), - ), - (re.compile(r'^\s'), _("Unnecessary leading space")), - (re.compile(r'.*\s$'), _("Unnecessary trailing space")), - ( - re.compile( - r'.*<(applet|base|body|button|embed|form|head|html|iframe|img|input|link|object|picture|script|source|style|svg|video).*', - re.IGNORECASE, - ), - _("Forbidden HTML tags"), - ), - ( - re.compile(r""".*\s+src=["']javascript:.*"""), - _("Javascript in HTML src attributes"), - ), + 'Description': https_enforcings + http_url_shorteners + [ + (re.compile(r'\s*[*#][^ .]'), + _("Invalid bulleted list")), + (re.compile(r'https://f-droid.org/[a-z][a-z](_[A-Za-z]{2,4})?/'), + _("Locale included in f-droid.org URL")), + (re.compile(r'^\s'), + _("Unnecessary leading space")), + (re.compile(r'.*\s$'), + _("Unnecessary trailing space")), + (re.compile(r'.*<(applet|base|body|button|embed|form|head|html|iframe|img|input|link|object|picture|script|source|style|svg|video).*', re.IGNORECASE), + _("Forbidden HTML tags")), + (re.compile(r'''.*\s+src=["']javascript:.*'''), + _("Javascript in HTML src attributes")), ], } -# config keys that are currently ignored by lint, but could be supported. -ignore_config_keys = ( - 'github_releases', - 'java_paths', -) - -bool_keys = ( - 'allow_disabled_algorithms', - 'androidobservatory', - 'build_server_always', - 'deploy_process_logs', - 'keep_when_not_allowed', - 'make_current_version_link', - 'nonstandardwebroot', - 'per_app_repos', - 'refresh_scanner', - 'scan_binary', - 'sync_from_local_copy_dir', -) - -check_config_keys = ( - 'ant', - 'apk_signing_key_block_list', - 'archive', - 'archive_description', - 'archive_icon', - 'archive_name', - 'archive_older', - 'archive_url', - 'archive_web_base_url', - 'awsbucket', - 'awsbucket_index_only', - 'binary_transparency_remote', - 'cachedir', - 'char_limits', - 'current_version_name_source', - 'git_mirror_size_limit', - 'github_token', - 'gpghome', - 'gpgkey', - 'gradle', - 'identity_file', - 'install_list', - 'java_paths', - 'keyaliases', - 'keydname', - 'keypass', - 'keystore', - 'keystorepass', - 'lint_licenses', - 'local_copy_dir', - 'mirrors', - 'mvn3', - 'ndk_paths', - 'path_to_custom_rclone_config', - 'rclone_config', - 'repo', - 'repo_description', - 'repo_icon', - 'repo_key_sha256', - 'repo_keyalias', - 'repo_maxage', - 'repo_name', - 'repo_pubkey', - 'repo_url', - 'repo_web_base_url', - 'scanner_signature_sources', - 'sdk_path', - 'servergitmirrors', - 'serverwebroot', - 'smartcardoptions', - 'sync_from_local_copy_dir', - 'uninstall_list', - 'virustotal_apikey', -) - -locale_pattern = re.compile(r"[a-z]{2,3}(-([A-Z][a-zA-Z]+|\d+|[a-z]+))*") - -versioncode_check_pattern = re.compile(r"(\\d|\[(0-9|\\d)_?(a-fA-F)?])[+]") - -ANTIFEATURES_KEYS = None -ANTIFEATURES_PATTERN = None -CATEGORIES_KEYS = list() - - -def load_antiFeatures_config(): - """Lazy loading, since it might read a lot of files.""" - global ANTIFEATURES_KEYS, ANTIFEATURES_PATTERN - k = common.ANTIFEATURES_CONFIG_NAME - if not ANTIFEATURES_KEYS or k not in common.config: - common.config[k] = common.load_localized_config(k, 'repo') - ANTIFEATURES_KEYS = sorted(common.config[k].keys()) - ANTIFEATURES_PATTERN = ','.join(ANTIFEATURES_KEYS) - - -def load_categories_config(): - """Lazy loading, since it might read a lot of files.""" - global CATEGORIES_KEYS - k = common.CATEGORIES_CONFIG_NAME - if not CATEGORIES_KEYS: - if config and k in config: - CATEGORIES_KEYS = config[k] - else: - config[k] = common.load_localized_config(k, 'repo') - CATEGORIES_KEYS = list(config[k].keys()) +locale_pattern = re.compile(r'^[a-z]{2,3}(-[A-Z][A-Z])?$') def check_regexes(app): @@ -326,9 +189,9 @@ def check_regexes(app): v = app.get(f) t = metadata.fieldtype(f) if t == metadata.TYPE_MULTILINE: - for line in v.splitlines(): - if m.match(line): - yield "%s at line '%s': %s" % (f, line, r) + for l in v.splitlines(): + if m.match(l): + yield "%s at line '%s': %s" % (f, l, r) else: if v is None: continue @@ -341,17 +204,18 @@ def get_lastbuild(builds): lastbuild = None for build in builds: if not build.disable: - vercode = build.versionCode + vercode = int(build.versionCode) if lowest_vercode == -1 or vercode < lowest_vercode: lowest_vercode = vercode - if not lastbuild or build.versionCode > lastbuild.versionCode: + if not lastbuild or int(build.versionCode) > int(lastbuild.versionCode): lastbuild = build return lastbuild -def check_update_check_data_url(app): # noqa: D403 - """UpdateCheckData must have a valid HTTPS URL to protect checkupdates runs.""" - if app.UpdateCheckData and app.UpdateCheckMode == 'HTTP': +def check_update_check_data_url(app): + """UpdateCheckData must have a valid HTTPS URL to protect checkupdates runs + """ + if app.UpdateCheckData: urlcode, codeex, urlver, verex = app.UpdateCheckData.split('|') for url in (urlcode, urlver): if url != '.': @@ -362,58 +226,34 @@ def check_update_check_data_url(app): # noqa: D403 yield _('UpdateCheckData must use HTTPS URL: {url}').format(url=url) -def check_update_check_data_int(app): # noqa: D403 - """UpdateCheckData regex must match integers.""" - if app.UpdateCheckData: - urlcode, codeex, urlver, verex = app.UpdateCheckData.split('|') - # codeex can be empty as well - if codeex and not versioncode_check_pattern.search(codeex): - yield _( - f'UpdateCheckData must match the versionCode as integer (\\d or [0-9]): {codeex}' - ) - - def check_vercode_operation(app): - if not app.VercodeOperation: - return - invalid_ops = [] - for op in app.VercodeOperation: - if not common.VERCODE_OPERATION_RE.match(op): - invalid_ops += op - if invalid_ops: - yield _('Invalid VercodeOperation: {invalid_ops}').format( - invalid_ops=invalid_ops - ) + if app.VercodeOperation and not common.VERCODE_OPERATION_RE.match(app.VercodeOperation): + yield _('Invalid VercodeOperation: {field}').format(field=app.VercodeOperation) def check_ucm_tags(app): - lastbuild = get_lastbuild(app.get('Builds', [])) - if ( - lastbuild is not None - and lastbuild.commit - and app.UpdateCheckMode == 'RepoManifest' - and not lastbuild.commit.startswith('unknown') - and lastbuild.versionCode == app.CurrentVersionCode - and not lastbuild.forcevercode - and any(s in lastbuild.commit for s in '.,_-/') - ): - yield _( - "Last used commit '{commit}' looks like a tag, but UpdateCheckMode is '{ucm}'" - ).format(commit=lastbuild.commit, ucm=app.UpdateCheckMode) + lastbuild = get_lastbuild(app.builds) + if (lastbuild is not None + and lastbuild.commit + and app.UpdateCheckMode == 'RepoManifest' + and not lastbuild.commit.startswith('unknown') + and lastbuild.versionCode == app.CurrentVersionCode + and not lastbuild.forcevercode + and any(s in lastbuild.commit for s in '.,_-/')): + yield _("Last used commit '{commit}' looks like a tag, but Update Check Mode is '{ucm}'")\ + .format(commit=lastbuild.commit, ucm=app.UpdateCheckMode) def check_char_limits(app): limits = config['char_limits'] if len(app.Summary) > limits['summary']: - yield _("Summary of length {length} is over the {limit} char limit").format( - length=len(app.Summary), limit=limits['summary'] - ) + yield _("Summary of length {length} is over the {limit} char limit")\ + .format(length=len(app.Summary), limit=limits['summary']) if len(app.Description) > limits['description']: - yield _("Description of length {length} is over the {limit} char limit").format( - length=len(app.Description), limit=limits['description'] - ) + yield _("Description of length {length} is over the {limit} char limit")\ + .format(length=len(app.Description), limit=limits['description']) def check_old_links(app): @@ -430,14 +270,13 @@ def check_old_links(app): for f in ['WebSite', 'SourceCode', 'IssueTracker', 'Changelog']: v = app.get(f) if any(s in v for s in old_sites): - yield _("App is in '{repo}' but has a link to {url}").format( - repo=app.Repo, url=v - ) + yield _("App is in '{repo}' but has a link to {url}")\ + .format(repo=app.Repo, url=v) def check_useless_fields(app): if app.UpdateCheckName == app.id: - yield _("UpdateCheckName is set to the known application ID, it can be removed") + yield _("Update Check Name is set to the known app id - it can be removed") filling_ucms = re.compile(r'^(Tags.*|RepoManifest.*)') @@ -445,10 +284,8 @@ filling_ucms = re.compile(r'^(Tags.*|RepoManifest.*)') def check_checkupdates_ran(app): if filling_ucms.match(app.UpdateCheckMode): - if not app.AutoName and not app.CurrentVersion and app.CurrentVersionCode == 0: - yield _( - "UpdateCheckMode is set but it looks like checkupdates hasn't been run yet." - ) + if not app.AutoName and not app.CurrentVersion and app.CurrentVersionCode == '0': + yield _("UCM is set but it looks like checkupdates hasn't been run yet") def check_empty_fields(app): @@ -456,14 +293,37 @@ def check_empty_fields(app): yield _("Categories are not set") +all_categories = set([ + "Connectivity", + "Development", + "Games", + "Graphics", + "Internet", + "Money", + "Multimedia", + "Navigation", + "Phone & SMS", + "Reading", + "Science & Education", + "Security", + "Sports & Health", + "System", + "Theming", + "Time", + "Writing", +]) + + def check_categories(app): - """App uses 'Categories' key and parsed config uses 'categories' key.""" for categ in app.Categories: - if categ not in CATEGORIES_KEYS: + if categ not in all_categories: yield _("Categories '%s' is not valid" % categ) def check_duplicates(app): + if app.Name and app.Name == app.AutoName: + yield _("Name '%s' is just the auto name - remove it") % app.Name + links_seen = set() for f in ['Source Code', 'Web Site', 'Issue Tracker', 'Changelog']: v = app.get(f) @@ -475,7 +335,7 @@ def check_duplicates(app): else: links_seen.add(v) - name = common.get_app_display_name(app) + name = app.Name or app.AutoName if app.Summary and name: if app.Summary.lower() == name.lower(): yield _("Summary '%s' is just the app's name") % app.Summary @@ -485,12 +345,12 @@ def check_duplicates(app): yield _("Description '%s' is just the app's summary") % app.Summary seenlines = set() - for line in app.Description.splitlines(): - if len(line) < 1: + for l in app.Description.splitlines(): + if len(l) < 1: continue - if line in seenlines: + if l in seenlines: yield _("Description has a duplicate line") - seenlines.add(line) + seenlines.add(l) desc_url = re.compile(r'(^|[^[])\[([^ ]+)( |\]|$)') @@ -505,281 +365,186 @@ def check_mediawiki_links(app): yield _("URL {url} in Description: {error}").format(url=url, error=r) +def check_bulleted_lists(app): + validchars = ['*', '#'] + lchar = '' + lcount = 0 + for l in app.Description.splitlines(): + if len(l) < 1: + lcount = 0 + continue + + if l[0] == lchar and l[1] == ' ': + lcount += 1 + if lcount > 2 and lchar not in validchars: + yield _("Description has a list (%s) but it isn't bulleted (*) nor numbered (#)") % lchar + break + else: + lchar = l[0] + lcount = 1 + + def check_builds(app): supported_flags = set(metadata.build_flags) # needed for YAML and JSON - for build in app.get('Builds', []): + for build in app.builds: if build.disable: if build.disable.startswith('Generated by import.py'): - yield _( - "Build generated by `fdroid import` - remove disable line once ready" - ) + yield _("Build generated by `fdroid import` - remove disable line once ready") continue - for s in ['master', 'main', 'origin', 'HEAD', 'default', 'trunk']: + for s in ['master', 'origin', 'HEAD', 'default', 'trunk']: if build.commit and build.commit.startswith(s): - yield _( - "Branch '{branch}' used as commit in build '{versionName}'" - ).format(branch=s, versionName=build.versionName) + yield _("Branch '{branch}' used as commit in build '{versionName}'")\ + .format(branch=s, versionName=build.versionName) for srclib in build.srclibs: if '@' in srclib: ref = srclib.split('@')[1].split('/')[0] if ref.startswith(s): - yield _( - "Branch '{branch}' used as commit in srclib '{srclib}'" - ).format(branch=s, srclib=srclib) + yield _("Branch '{branch}' used as commit in srclib '{srclib}'")\ + .format(branch=s, srclib=srclib) else: - yield ( - _('srclibs missing name and/or @') - + ' (srclibs: ' - + srclib - + ')' - ) + yield _('srclibs missing name and/or @') + ' (srclibs: ' + srclib + ')' for key in build.keys(): if key not in supported_flags: yield _('%s is not an accepted build field') % key def check_files_dir(app): - dir_path = Path('metadata') / app.id - if not dir_path.is_dir(): + dir_path = os.path.join('metadata', app.id) + if not os.path.isdir(dir_path): return files = set() - for path in dir_path.iterdir(): - name = path.name - if not ( - path.is_file() or name == 'signatures' or locale_pattern.fullmatch(name) - ): + for name in os.listdir(dir_path): + path = os.path.join(dir_path, name) + if not (os.path.isfile(path) or name == 'signatures' or locale_pattern.match(name)): yield _("Found non-file at %s") % path continue files.add(name) - used = { - 'signatures', - } - for build in app.get('Builds', []): + used = {'signatures', } + for build in app.builds: for fname in build.patch: if fname not in files: - yield _("Unknown file '{filename}' in build '{versionName}'").format( - filename=fname, versionName=build.versionName - ) + yield _("Unknown file '{filename}' in build '{versionName}'")\ + .format(filename=fname, versionName=build.versionName) else: used.add(fname) for name in files.difference(used): - if locale_pattern.fullmatch(name): + if locale_pattern.match(name): continue - yield _("Unused file at %s") % (dir_path / name) + yield _("Unused file at %s") % os.path.join(dir_path, name) def check_format(app): - if common.options.format and not rewritemeta.proper_format(app): + if options.format and not rewritemeta.proper_format(app): yield _("Run rewritemeta to fix formatting") def check_license_tag(app): - """Ensure all license tags contain only valid/approved values. - - It is possible to disable license checking by setting a null or empty value, - e.g. `lint_licenses: ` or `lint_licenses: []` - - """ - if 'lint_licenses' in config: - lint_licenses = config['lint_licenses'] - if lint_licenses is None: - return - else: - lint_licenses = APPROVED_LICENSES - if app.License not in lint_licenses: - if lint_licenses == APPROVED_LICENSES: - yield _( - 'Unexpected license tag "{}"! Only use FSF or OSI ' - 'approved tags from https://spdx.org/license-list' - ).format(app.License) - else: - yield _( - 'Unexpected license tag "{}"! Only use license tags ' - 'configured in your config file' - ).format(app.License) + '''Ensure all license tags are in https://spdx.org/license-list''' + if app.License.rstrip('+') not in SPDX: + yield _('Invalid license tag "%s"! Use only tags from https://spdx.org/license-list') \ + % (app.License) def check_extlib_dir(apps): - dir_path = Path('build/extlib') - extlib_files = set() - for path in dir_path.glob('**/*'): - if path.is_file(): - extlib_files.add(path.relative_to(dir_path)) + dir_path = os.path.join('build', 'extlib') + unused_extlib_files = set() + for root, dirs, files in os.walk(dir_path): + for name in files: + unused_extlib_files.add(os.path.join(root, name)[len(dir_path) + 1:]) used = set() for app in apps: - if app.Disabled: - continue - archive_policy = common.calculate_archive_policy( - app, common.config['archive_older'] - ) - builds = [build for build in app.Builds if not build.disable] - - for i in range(len(builds)): - build = builds[i] + for build in app.builds: for path in build.extlibs: - path = Path(path) - if path not in extlib_files: - # Don't show error on archived versions - if i >= len(builds) - archive_policy: - yield _( - "{appid}: Unknown extlib {path} in build '{versionName}'" - ).format(appid=app.id, path=path, versionName=build.versionName) + if path not in unused_extlib_files: + yield _("{appid}: Unknown extlib {path} in build '{versionName}'")\ + .format(appid=app.id, path=path, versionName=build.versionName) else: used.add(path) - for path in extlib_files.difference(used): - if path.name not in [ - '.gitignore', - 'source.txt', - 'origin.txt', - 'md5.txt', - 'LICENSE', - 'LICENSE.txt', - 'COPYING', - 'COPYING.txt', - 'NOTICE', - 'NOTICE.txt', - ]: - yield _("Unused extlib at %s") % (dir_path / path) + for path in unused_extlib_files.difference(used): + if any(path.endswith(s) for s in [ + '.gitignore', + 'source.txt', 'origin.txt', 'md5.txt', + 'LICENSE', 'LICENSE.txt', + 'COPYING', 'COPYING.txt', + 'NOTICE', 'NOTICE.txt', + ]): + continue + yield _("Unused extlib at %s") % os.path.join(dir_path, path) def check_app_field_types(app): - """Check the fields have valid data types.""" + """Check the fields have valid data types""" + for field in app.keys(): v = app.get(field) t = metadata.fieldtype(field) if v is None: continue - elif field == 'Builds': + elif field == 'builds': if not isinstance(v, list): - yield ( - _( - "{appid}: {field} must be a '{type}', but it is a '{fieldtype}'!" - ).format( - appid=app.id, - field=field, - type='list', - fieldtype=v.__class__.__name__, - ) - ) + yield(_("{appid}: {field} must be a '{type}', but it is a '{fieldtype}'!") + .format(appid=app.id, field=field, + type='list', fieldtype=v.__class__.__name__)) elif t == metadata.TYPE_LIST and not isinstance(v, list): - yield ( - _( - "{appid}: {field} must be a '{type}', but it is a '{fieldtype}!'" - ).format( - appid=app.id, - field=field, - type='list', - fieldtype=v.__class__.__name__, - ) - ) - elif t == metadata.TYPE_STRING and type(v) not in (str, bool, dict): - yield ( - _( - "{appid}: {field} must be a '{type}', but it is a '{fieldtype}'!" - ).format( - appid=app.id, - field=field, - type='str', - fieldtype=v.__class__.__name__, - ) - ) - elif t == metadata.TYPE_STRINGMAP and not isinstance(v, dict): - yield ( - _( - "{appid}: {field} must be a '{type}', but it is a '{fieldtype}'!" - ).format( - appid=app.id, - field=field, - type='dict', - fieldtype=v.__class__.__name__, - ) - ) - elif t == metadata.TYPE_INT and not isinstance(v, int): - yield ( - _( - "{appid}: {field} must be a '{type}', but it is a '{fieldtype}'!" - ).format( - appid=app.id, - field=field, - type='int', - fieldtype=v.__class__.__name__, - ) - ) - - -def check_antiFeatures(app): - """Check the Anti-Features keys match those declared in the config.""" - pattern = ANTIFEATURES_PATTERN - msg = _("'{value}' is not a valid {field} in {appid}. Regex pattern: {pattern}") - - field = 'AntiFeatures' # App entries use capitalized CamelCase - for value in app.get(field, []): - if value not in ANTIFEATURES_KEYS: - yield msg.format(value=value, field=field, appid=app.id, pattern=pattern) - - field = 'antifeatures' # Build entries use all lowercase - for build in app.get('Builds', []): - build_antiFeatures = build.get(field, []) - for value in build_antiFeatures: - if value not in ANTIFEATURES_KEYS: - yield msg.format( - value=value, field=field, appid=app.id, pattern=pattern - ) + yield(_("{appid}: {field} must be a '{type}', but it is a '{fieldtype}!'") + .format(appid=app.id, field=field, + type='list', fieldtype=v.__class__.__name__)) + elif t == metadata.TYPE_STRING and not type(v) in (str, bool, dict): + yield(_("{appid}: {field} must be a '{type}', but it is a '{fieldtype}'!") + .format(appid=app.id, field=field, + type='str', fieldtype=v.__class__.__name__)) def check_for_unsupported_metadata_files(basedir=""): - """Check whether any non-metadata files are in metadata/.""" - basedir = Path(basedir) + """Checks whether any non-metadata files are in metadata/""" + global config - if not (basedir / 'metadata').exists(): - return False return_value = False - for f in (basedir / 'metadata').iterdir(): - if f.is_dir(): - if not Path(str(f) + '.yml').exists(): + formats = config['accepted_formats'] + for f in glob.glob(basedir + 'metadata/*') + glob.glob(basedir + 'metadata/.*'): + if os.path.isdir(f): + exists = False + for t in formats: + exists = exists or os.path.exists(f + '.' + t) + if not exists: print(_('"%s/" has no matching metadata file!') % f) return_value = True - elif f.suffix == '.yml': - packageName = f.stem + elif os.path.splitext(f)[1][1:] in formats: + packageName = os.path.splitext(os.path.basename(f))[0] if not common.is_valid_package_name(packageName): - print( - '"' - + packageName - + '" is an invalid package name!\n' - + 'https://developer.android.com/studio/build/application-id' - ) + print('"' + packageName + '" is an invalid package name!\n' + + 'https://developer.android.com/studio/build/application-id') return_value = True else: - print( - _( - '"{path}" is not a supported file format (use: metadata/*.yml)' - ).format(path=f.relative_to(basedir)) - ) + print('"' + f.replace(basedir, '') + + '" is not a supported file format: (' + ','.join(formats) + ')') return_value = True return return_value def check_current_version_code(app): - """Check that the CurrentVersionCode is currently available.""" - if app.get('ArchivePolicy') == 0: + """Check that the CurrentVersionCode is currently available""" + + archive_policy = app.get('ArchivePolicy') + if archive_policy and archive_policy.split()[0] == "0": return cv = app.get('CurrentVersionCode') - if cv is not None and cv == 0: + if cv is not None and int(cv) == 0: return - builds = app.get('Builds') + builds = app.get('builds') active_builds = 0 min_versionCode = None if builds: for build in builds: - vc = build['versionCode'] + vc = int(build['versionCode']) if min_versionCode is None or min_versionCode > vc: min_versionCode = vc if not build.get('disable'): @@ -788,232 +553,35 @@ def check_current_version_code(app): break if active_builds == 0: return # all builds are disabled - if cv is not None and cv < min_versionCode: - yield ( - _( - 'CurrentVersionCode {cv} is less than oldest build entry {versionCode}' - ).format(cv=cv, versionCode=min_versionCode) - ) - - -def check_updates_expected(app): - """Check if update checking makes sense.""" - if (app.get('NoSourceSince') or app.get('ArchivePolicy') == 0) and not all( - app.get(key, 'None') == 'None' for key in ('AutoUpdateMode', 'UpdateCheckMode') - ): - yield _( - 'App has NoSourceSince or ArchivePolicy "0 versions" or 0 but AutoUpdateMode or UpdateCheckMode are not None' - ) - - -def check_updates_ucm_http_aum_pattern(app): # noqa: D403 - """AutoUpdateMode with UpdateCheckMode: HTTP must have a pattern.""" - if app.UpdateCheckMode == "HTTP" and app.AutoUpdateMode == "Version": - yield _("AutoUpdateMode with UpdateCheckMode: HTTP must have a pattern.") - - -def check_certificate_pinned_binaries(app): - keys = app.get('AllowedAPKSigningKeys') - known_keys = common.config.get('apk_signing_key_block_list', []) - if keys: - if known_keys: - for key in keys: - if key in known_keys: - yield _('Known debug key is used in AllowedAPKSigningKeys: ') + key - return - if app.get('Binaries') is not None: - yield _( - 'App has Binaries but does not have corresponding AllowedAPKSigningKeys to pin certificate.' - ) - return - builds = app.get('Builds') - if builds is None: - return - for build in builds: - if build.get('binary') is not None: - yield _( - 'App version has binary but does not have corresponding AllowedAPKSigningKeys to pin certificate.' - ) - return - - -def lint_config(arg): - path = Path(arg) - passed = True - - mirrors_name = f'{common.MIRRORS_CONFIG_NAME}.yml' - config_name = f'{common.CONFIG_CONFIG_NAME}.yml' - categories_name = f'{common.CATEGORIES_CONFIG_NAME}.yml' - antifeatures_name = f'{common.ANTIFEATURES_CONFIG_NAME}.yml' - - yamllintresult = common.run_yamllint(path) - if yamllintresult: - print(yamllintresult) - passed = False - - with path.open() as fp: - data = yaml.load(fp) - common.config_type_check(arg, data) - - if path.name == mirrors_name: - import pycountry - - valid_country_codes = [c.alpha_2 for c in pycountry.countries] - for mirror in data: - code = mirror.get('countryCode') - if code and code not in valid_country_codes: - passed = False - msg = _( - '{path}: "{code}" is not a valid ISO_3166-1 alpha-2 country code!' - ).format(path=str(path), code=code) - if code.upper() in valid_country_codes: - m = [code.upper()] - else: - m = difflib.get_close_matches( - code.upper(), valid_country_codes, 2, 0.5 - ) - if m: - msg += ' ' - msg += _('Did you mean {code}?').format(code=', '.join(sorted(m))) - print(msg) - elif path.name == config_name and path.parent.name != 'config': - valid_keys = set(tuple(common.default_config) + bool_keys + check_config_keys) - for key in ignore_config_keys: - if key in valid_keys: - valid_keys.remove(key) - for key in data: - if key not in valid_keys: - passed = False - msg = _("ERROR: {key} not a valid key!").format(key=key) - m = difflib.get_close_matches(key.lower(), valid_keys, 2, 0.5) - if m: - msg += ' ' - msg += _('Did you mean {code}?').format(code=', '.join(sorted(m))) - print(msg) - continue - - if key in bool_keys: - t = bool - else: - t = type(common.default_config.get(key, "")) - - show_error = False - if t is str: - if type(data[key]) not in (str, list, dict): - passed = False - show_error = True - elif type(data[key]) != t: - passed = False - show_error = True - if show_error: - print( - _("ERROR: {key}'s value should be of type {t}!").format( - key=key, t=t.__name__ - ) - ) - elif path.name in (config_name, categories_name, antifeatures_name): - for key in data: - if path.name == config_name and key not in ('archive', 'repo'): - passed = False - print( - _('ERROR: {key} in {path} is not "archive" or "repo"!').format( - key=key, path=path - ) - ) - allowed_keys = ['name'] - if path.name in [config_name, antifeatures_name]: - allowed_keys.append('description') - # only for source strings currently - if path.parent.name == 'config': - allowed_keys.append('icon') - for subkey in data[key]: - if subkey not in allowed_keys: - passed = False - print( - _( - 'ERROR: {key}:{subkey} in {path} is not in allowed keys: {allowed_keys}!' - ).format( - key=key, - subkey=subkey, - path=path, - allowed_keys=', '.join(allowed_keys), - ) - ) - - return passed + if cv is not None and int(cv) < min_versionCode: + yield(_('CurrentVersionCode {cv} is less than oldest build entry {versionCode}') + .format(cv=cv, versionCode=min_versionCode)) def main(): - global config + + global config, options # Parse command line... - parser = ArgumentParser() + parser = ArgumentParser(usage="%(prog)s [options] [APPID [APPID ...]]") common.setup_global_opts(parser) - parser.add_argument( - "-f", - "--format", - action="store_true", - default=False, - help=_("Also warn about formatting issues, like rewritemeta -l"), - ) - parser.add_argument( - '--force-yamllint', - action="store_true", - default=False, - help=_( - "When linting the entire repository yamllint is disabled by default. " - "This option forces yamllint regardless." - ), - ) - parser.add_argument( - "appid", nargs='*', help=_("application ID of file to operate on") - ) + parser.add_argument("-f", "--format", action="store_true", default=False, + help=_("Also warn about formatting issues, like rewritemeta -l")) + parser.add_argument("appid", nargs='*', help=_("applicationId in the form APPID")) metadata.add_metadata_arguments(parser) - options = common.parse_args(parser) + options = parser.parse_args() metadata.warnings_action = options.W - config = common.read_config() - load_antiFeatures_config() - load_categories_config() + config = common.read_config(options) - if options.force_yamllint: - import yamllint # throw error if it is not installed - - yamllint # make pyflakes ignore this - - paths = list() - for arg in options.appid: - if ( - arg == common.CONFIG_FILE - or Path(arg).parent.name == 'config' - or Path(arg).parent.parent.name == 'config' # localized - ): - paths.append(arg) - - failed = 0 - if paths: - for path in paths: - options.appid.remove(path) - if not lint_config(path): - failed += 1 - # an empty list of appids means check all apps, avoid that if files were given - if not options.appid: - sys.exit(failed) - - if not lint_metadata(options): - failed += 1 - - if failed: - sys.exit(failed) - - -def lint_metadata(options): - apps = common.read_app_args(options.appid) + # Get all apps... + allapps = metadata.read_metadata(xref=True) + apps = common.read_app_args(options.appid, allapps, False) anywarns = check_for_unsupported_metadata_files() apps_check_funcs = [] - if not options.appid: + if len(options.appid) == 0: # otherwise it finds tons of unused extlibs apps_check_funcs.append(check_extlib_dir) for check_func in apps_check_funcs: @@ -1025,41 +593,10 @@ def lint_metadata(options): if app.Disabled: continue - # only run yamllint when linting individual apps. - if options.appid or options.force_yamllint: - # run yamllint on app metadata - ymlpath = Path('metadata') / (appid + '.yml') - if ymlpath.is_file(): - yamllintresult = common.run_yamllint(ymlpath) - if yamllintresult: - print(yamllintresult) - - # run yamllint on srclib metadata - srclibs = set() - for build in app.get('Builds', []): - for srclib in build.srclibs: - name, _ref, _number, _subdir = common.parse_srclib_spec(srclib) - srclibs.add(name + '.yml') - for srclib in srclibs: - srclibpath = Path('srclibs') / srclib - if srclibpath.is_file(): - if platform.system() == 'Windows': - # Handle symlink on Windows - symlink = srclibpath.read_text() - if symlink in srclibs: - continue - elif (srclibpath.parent / symlink).is_file(): - srclibpath = srclibpath.parent / symlink - yamllintresult = common.run_yamllint(srclibpath) - if yamllintresult: - print(yamllintresult) - app_check_funcs = [ check_app_field_types, - check_antiFeatures, check_regexes, check_update_check_data_url, - check_update_check_data_int, check_vercode_operation, check_ucm_tags, check_char_limits, @@ -1070,14 +607,12 @@ def lint_metadata(options): check_categories, check_duplicates, check_mediawiki_links, + check_bulleted_lists, check_builds, check_files_dir, check_format, check_license_tag, check_current_version_code, - check_updates_expected, - check_updates_ucm_http_aum_pattern, - check_certificate_pinned_binaries, ] for check_func in app_check_funcs: @@ -1085,186 +620,360 @@ def lint_metadata(options): anywarns = True print("%s: %s" % (appid, warn)) - return not anywarns + if anywarns: + sys.exit(1) -# A compiled, public domain list of official SPDX license tags. generated -# using: `python3 -m spdx_license_list print --filter-fsf-or-osi` Only contains -# licenes approved by either FSF to be free/libre software or OSI to be open -# source -APPROVED_LICENSES = [ - '0BSD', - 'AAL', - 'AFL-1.1', - 'AFL-1.2', - 'AFL-2.0', - 'AFL-2.1', - 'AFL-3.0', - 'AGPL-3.0-only', - 'AGPL-3.0-or-later', - 'APL-1.0', - 'APSL-1.0', - 'APSL-1.1', - 'APSL-1.2', - 'APSL-2.0', - 'Apache-1.0', - 'Apache-1.1', - 'Apache-2.0', - 'Artistic-1.0', - 'Artistic-1.0-Perl', - 'Artistic-1.0-cl8', - 'Artistic-2.0', - 'BSD-1-Clause', - 'BSD-2-Clause', - 'BSD-2-Clause-Patent', - 'BSD-3-Clause', - 'BSD-3-Clause-Clear', - 'BSD-3-Clause-LBNL', - 'BSD-4-Clause', - 'BSL-1.0', - 'BitTorrent-1.1', - 'CAL-1.0', - 'CAL-1.0-Combined-Work-Exception', - 'CATOSL-1.1', - 'CC-BY-4.0', - 'CC-BY-SA-4.0', - 'CC0-1.0', - 'CDDL-1.0', - 'CECILL-2.0', - 'CECILL-2.1', - 'CECILL-B', - 'CECILL-C', - 'CNRI-Python', - 'CPAL-1.0', - 'CPL-1.0', - 'CUA-OPL-1.0', - 'ClArtistic', - 'Condor-1.1', - 'ECL-1.0', - 'ECL-2.0', - 'EFL-1.0', - 'EFL-2.0', - 'EPL-1.0', - 'EPL-2.0', - 'EUDatagrid', - 'EUPL-1.1', - 'EUPL-1.2', - 'Entessa', - 'FSFAP', - 'FTL', - 'Fair', - 'Frameworx-1.0', - 'GFDL-1.1-only', - 'GFDL-1.1-or-later', - 'GFDL-1.2-only', - 'GFDL-1.2-or-later', - 'GFDL-1.3-only', - 'GFDL-1.3-or-later', - 'GPL-2.0-only', - 'GPL-2.0-or-later', - 'GPL-3.0-only', - 'GPL-3.0-or-later', - 'HPND', - 'IJG', - 'IPA', - 'IPL-1.0', - 'ISC', - 'Imlib2', - 'Intel', - 'LGPL-2.0-only', - 'LGPL-2.0-or-later', - 'LGPL-2.1-only', - 'LGPL-2.1-or-later', - 'LGPL-3.0-only', - 'LGPL-3.0-or-later', - 'LPL-1.0', - 'LPL-1.02', - 'LPPL-1.2', - 'LPPL-1.3a', - 'LPPL-1.3c', - 'LiLiQ-P-1.1', - 'LiLiQ-R-1.1', - 'LiLiQ-Rplus-1.1', - 'MIT', - 'MIT-0', - 'MPL-1.0', - 'MPL-1.1', - 'MPL-2.0', - 'MPL-2.0-no-copyleft-exception', - 'MS-PL', - 'MS-RL', - 'MirOS', - 'Motosoto', - 'MulanPSL-2.0', - 'Multics', - 'NASA-1.3', - 'NCSA', - 'NGPL', - 'NOSL', - 'NPL-1.0', - 'NPL-1.1', - 'NPOSL-3.0', - 'NTP', - 'Naumen', - 'Nokia', - 'OCLC-2.0', - 'ODbL-1.0', - 'OFL-1.0', - 'OFL-1.1', - 'OFL-1.1-RFN', - 'OFL-1.1-no-RFN', - 'OGTSL', - 'OLDAP-2.3', - 'OLDAP-2.7', - 'OLDAP-2.8', - 'OSET-PL-2.1', - 'OSL-1.0', - 'OSL-1.1', - 'OSL-2.0', - 'OSL-2.1', - 'OSL-3.0', - 'OpenSSL', - 'PHP-3.0', - 'PHP-3.01', - 'PostgreSQL', - 'Python-2.0', - 'QPL-1.0', - 'RPL-1.1', - 'RPL-1.5', - 'RPSL-1.0', - 'RSCPL', - 'Ruby', - 'SGI-B-2.0', - 'SISSL', - 'SMLNJ', - 'SPL-1.0', - 'SimPL-2.0', - 'Sleepycat', - 'UCL-1.0', - 'UPL-1.0', - 'Unicode-DFS-2016', - 'Unlicense', - 'VSL-1.0', - 'Vim', - 'W3C', - 'WTFPL', - 'Watcom-1.0', - 'X11', - 'XFree86-1.1', - 'Xnet', - 'YPL-1.1', - 'ZPL-2.0', - 'ZPL-2.1', - 'Zend-2.0', - 'Zimbra-1.3', - 'Zlib', - 'gnuplot', - 'iMatix', - 'xinetd', +# A compiled, public domain list of official SPDX license tags from: +# https://github.com/sindresorhus/spdx-license-list/blob/v4.0.0/spdx-simple.json +# The deprecated license tags have been removed from the list, they are at the +# bottom, starting after the last license tags that start with Z. +# This is at the bottom, since its a long list of data +SPDX = [ + "PublicDomain", # an F-Droid addition, until we can enforce a better option + "0BSD", + "AAL", + "Abstyles", + "Adobe-2006", + "Adobe-Glyph", + "ADSL", + "AFL-1.1", + "AFL-1.2", + "AFL-2.0", + "AFL-2.1", + "AFL-3.0", + "Afmparse", + "AGPL-1.0", + "AGPL-3.0-only", + "AGPL-3.0-or-later", + "Aladdin", + "AMDPLPA", + "AML", + "AMPAS", + "ANTLR-PD", + "Apache-1.0", + "Apache-1.1", + "Apache-2.0", + "APAFML", + "APL-1.0", + "APSL-1.0", + "APSL-1.1", + "APSL-1.2", + "APSL-2.0", + "Artistic-1.0-cl8", + "Artistic-1.0-Perl", + "Artistic-1.0", + "Artistic-2.0", + "Bahyph", + "Barr", + "Beerware", + "BitTorrent-1.0", + "BitTorrent-1.1", + "Borceux", + "BSD-1-Clause", + "BSD-2-Clause-FreeBSD", + "BSD-2-Clause-NetBSD", + "BSD-2-Clause-Patent", + "BSD-2-Clause", + "BSD-3-Clause-Attribution", + "BSD-3-Clause-Clear", + "BSD-3-Clause-LBNL", + "BSD-3-Clause-No-Nuclear-License-2014", + "BSD-3-Clause-No-Nuclear-License", + "BSD-3-Clause-No-Nuclear-Warranty", + "BSD-3-Clause", + "BSD-4-Clause-UC", + "BSD-4-Clause", + "BSD-Protection", + "BSD-Source-Code", + "BSL-1.0", + "bzip2-1.0.5", + "bzip2-1.0.6", + "Caldera", + "CATOSL-1.1", + "CC-BY-1.0", + "CC-BY-2.0", + "CC-BY-2.5", + "CC-BY-3.0", + "CC-BY-4.0", + "CC-BY-NC-1.0", + "CC-BY-NC-2.0", + "CC-BY-NC-2.5", + "CC-BY-NC-3.0", + "CC-BY-NC-4.0", + "CC-BY-NC-ND-1.0", + "CC-BY-NC-ND-2.0", + "CC-BY-NC-ND-2.5", + "CC-BY-NC-ND-3.0", + "CC-BY-NC-ND-4.0", + "CC-BY-NC-SA-1.0", + "CC-BY-NC-SA-2.0", + "CC-BY-NC-SA-2.5", + "CC-BY-NC-SA-3.0", + "CC-BY-NC-SA-4.0", + "CC-BY-ND-1.0", + "CC-BY-ND-2.0", + "CC-BY-ND-2.5", + "CC-BY-ND-3.0", + "CC-BY-ND-4.0", + "CC-BY-SA-1.0", + "CC-BY-SA-2.0", + "CC-BY-SA-2.5", + "CC-BY-SA-3.0", + "CC-BY-SA-4.0", + "CC0-1.0", + "CDDL-1.0", + "CDDL-1.1", + "CDLA-Permissive-1.0", + "CDLA-Sharing-1.0", + "CECILL-1.0", + "CECILL-1.1", + "CECILL-2.0", + "CECILL-2.1", + "CECILL-B", + "CECILL-C", + "ClArtistic", + "CNRI-Jython", + "CNRI-Python-GPL-Compatible", + "CNRI-Python", + "Condor-1.1", + "CPAL-1.0", + "CPL-1.0", + "CPOL-1.02", + "Crossword", + "CrystalStacker", + "CUA-OPL-1.0", + "Cube", + "curl", + "D-FSL-1.0", + "diffmark", + "DOC", + "Dotseqn", + "DSDP", + "dvipdfm", + "ECL-1.0", + "ECL-2.0", + "EFL-1.0", + "EFL-2.0", + "eGenix", + "Entessa", + "EPL-1.0", + "EPL-2.0", + "ErlPL-1.1", + "EUDatagrid", + "EUPL-1.0", + "EUPL-1.1", + "EUPL-1.2", + "Eurosym", + "Fair", + "Frameworx-1.0", + "FreeImage", + "FSFAP", + "FSFUL", + "FSFULLR", + "FTL", + "GFDL-1.1-only", + "GFDL-1.1-or-later", + "GFDL-1.2-only", + "GFDL-1.2-or-later", + "GFDL-1.3-only", + "GFDL-1.3-or-later", + "Giftware", + "GL2PS", + "Glide", + "Glulxe", + "gnuplot", + "GPL-1.0-only", + "GPL-1.0-or-later", + "GPL-2.0-only", + "GPL-2.0-or-later", + "GPL-3.0-only", + "GPL-3.0-or-later", + "gSOAP-1.3b", + "HaskellReport", + "HPND", + "IBM-pibs", + "ICU", + "IJG", + "ImageMagick", + "iMatix", + "Imlib2", + "Info-ZIP", + "Intel-ACPI", + "Intel", + "Interbase-1.0", + "IPA", + "IPL-1.0", + "ISC", + "JasPer-2.0", + "JSON", + "LAL-1.2", + "LAL-1.3", + "Latex2e", + "Leptonica", + "LGPL-2.0-only", + "LGPL-2.0-or-later", + "LGPL-2.1-only", + "LGPL-2.1-or-later", + "LGPL-3.0-only", + "LGPL-3.0-or-later", + "LGPLLR", + "Libpng", + "libtiff", + "LiLiQ-P-1.1", + "LiLiQ-R-1.1", + "LiLiQ-Rplus-1.1", + "LPL-1.0", + "LPL-1.02", + "LPPL-1.0", + "LPPL-1.1", + "LPPL-1.2", + "LPPL-1.3a", + "LPPL-1.3c", + "MakeIndex", + "MirOS", + "MIT-advertising", + "MIT-CMU", + "MIT-enna", + "MIT-feh", + "MIT", + "MITNFA", + "Motosoto", + "mpich2", + "MPL-1.0", + "MPL-1.1", + "MPL-2.0-no-copyleft-exception", + "MPL-2.0", + "MS-PL", + "MS-RL", + "MTLL", + "Multics", + "Mup", + "NASA-1.3", + "Naumen", + "NBPL-1.0", + "NCSA", + "Net-SNMP", + "NetCDF", + "Newsletr", + "NGPL", + "NLOD-1.0", + "NLPL", + "Nokia", + "NOSL", + "Noweb", + "NPL-1.0", + "NPL-1.1", + "NPOSL-3.0", + "NRL", + "NTP", + "OCCT-PL", + "OCLC-2.0", + "ODbL-1.0", + "OFL-1.0", + "OFL-1.1", + "OGTSL", + "OLDAP-1.1", + "OLDAP-1.2", + "OLDAP-1.3", + "OLDAP-1.4", + "OLDAP-2.0.1", + "OLDAP-2.0", + "OLDAP-2.1", + "OLDAP-2.2.1", + "OLDAP-2.2.2", + "OLDAP-2.2", + "OLDAP-2.3", + "OLDAP-2.4", + "OLDAP-2.5", + "OLDAP-2.6", + "OLDAP-2.7", + "OLDAP-2.8", + "OML", + "OpenSSL", + "OPL-1.0", + "OSET-PL-2.1", + "OSL-1.0", + "OSL-1.1", + "OSL-2.0", + "OSL-2.1", + "OSL-3.0", + "PDDL-1.0", + "PHP-3.0", + "PHP-3.01", + "Plexus", + "PostgreSQL", + "psfrag", + "psutils", + "Python-2.0", + "Qhull", + "QPL-1.0", + "Rdisc", + "RHeCos-1.1", + "RPL-1.1", + "RPL-1.5", + "RPSL-1.0", + "RSA-MD", + "RSCPL", + "Ruby", + "SAX-PD", + "Saxpath", + "SCEA", + "Sendmail", + "SGI-B-1.0", + "SGI-B-1.1", + "SGI-B-2.0", + "SimPL-2.0", + "SISSL-1.2", + "SISSL", + "Sleepycat", + "SMLNJ", + "SMPPL", + "SNIA", + "Spencer-86", + "Spencer-94", + "Spencer-99", + "SPL-1.0", + "SugarCRM-1.1.3", + "SWL", + "TCL", + "TCP-wrappers", + "TMate", + "TORQUE-1.1", + "TOSL", + "Unicode-DFS-2015", + "Unicode-DFS-2016", + "Unicode-TOU", + "Unlicense", + "UPL-1.0", + "Vim", + "VOSTROM", + "VSL-1.0", + "W3C-19980720", + "W3C-20150513", + "W3C", + "Watcom-1.0", + "Wsuipa", + "WTFPL", + "X11", + "Xerox", + "XFree86-1.1", + "xinetd", + "Xnet", + "xpp", + "XSkat", + "YPL-1.0", + "YPL-1.1", + "Zed", + "Zend-2.0", + "Zimbra-1.3", + "Zimbra-1.4", + "zlib-acknowledgement", + "Zlib", + "ZPL-1.1", + "ZPL-2.0", + "ZPL-2.1", ] -# an F-Droid addition, until we can enforce a better option -APPROVED_LICENSES.append("PublicDomain") - if __name__ == "__main__": main() diff --git a/fdroidserver/looseversion.py b/fdroidserver/looseversion.py deleted file mode 100644 index c2a32213..00000000 --- a/fdroidserver/looseversion.py +++ /dev/null @@ -1,300 +0,0 @@ -# PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 -# -------------------------------------------- -# -# 1. This LICENSE AGREEMENT is between the Python Software Foundation -# ("PSF"), and the Individual or Organization ("Licensee") accessing and -# otherwise using this software ("Python") in source or binary form and -# its associated documentation. -# -# 2. Subject to the terms and conditions of this License Agreement, PSF hereby -# grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -# analyze, test, perform and/or display publicly, prepare derivative works, -# distribute, and otherwise use Python alone or in any derivative version, -# provided, however, that PSF's License Agreement and PSF's notice of copyright, -# i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -# 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022 Python Software Foundation; -# All Rights Reserved" are retained in Python alone or in any derivative version -# prepared by Licensee. -# -# 3. In the event Licensee prepares a derivative work that is based on -# or incorporates Python or any part thereof, and wants to make -# the derivative work available to others as provided herein, then -# Licensee hereby agrees to include in any such work a brief summary of -# the changes made to Python. -# -# 4. PSF is making Python available to Licensee on an "AS IS" -# basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -# IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -# DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -# FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -# INFRINGE ANY THIRD PARTY RIGHTS. -# -# 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -# FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -# A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -# OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. -# -# 6. This License Agreement will automatically terminate upon a material -# breach of its terms and conditions. -# -# 7. Nothing in this License Agreement shall be deemed to create any -# relationship of agency, partnership, or joint venture between PSF and -# Licensee. This License Agreement does not grant permission to use PSF -# trademarks or trade name in a trademark sense to endorse or promote -# products or services of Licensee, or any third party. -# -# 8. By copying, installing or otherwise using Python, Licensee -# agrees to be bound by the terms and conditions of this License -# Agreement. -# -# SPDX-License-Identifier: Python-2.0 -# -# downloaded from: -# https://github.com/effigies/looseversion/blob/e1a5a176a92dc6825deda4205c1be6d05e9ed352/src/looseversion/__init__.py - -"""Provides classes to represent module version numbers (one class for -each style of version numbering). There are currently two such classes -implemented: StrictVersion and LooseVersion. - -Every version number class implements the following interface: - * the 'parse' method takes a string and parses it to some internal - representation; if the string is an invalid version number, - 'parse' raises a ValueError exception - * the class constructor takes an optional string argument which, - if supplied, is passed to 'parse' - * __str__ reconstructs the string that was passed to 'parse' (or - an equivalent string -- ie. one that will generate an equivalent - version number instance) - * __repr__ generates Python code to recreate the version number instance - * _cmp compares the current instance with either another instance - of the same class or a string (which will be parsed to an instance - of the same class, thus must follow the same rules) -""" -import re -import sys - -__license__ = "Python License 2.0" - -# The rules according to Greg Stein: -# 1) a version number has 1 or more numbers separated by a period or by -# sequences of letters. If only periods, then these are compared -# left-to-right to determine an ordering. -# 2) sequences of letters are part of the tuple for comparison and are -# compared lexicographically -# 3) recognize the numeric components may have leading zeroes -# -# The LooseVersion class below implements these rules: a version number -# string is split up into a tuple of integer and string components, and -# comparison is a simple tuple comparison. This means that version -# numbers behave in a predictable and obvious way, but a way that might -# not necessarily be how people *want* version numbers to behave. There -# wouldn't be a problem if people could stick to purely numeric version -# numbers: just split on period and compare the numbers as tuples. -# However, people insist on putting letters into their version numbers; -# the most common purpose seems to be: -# - indicating a "pre-release" version -# ('alpha', 'beta', 'a', 'b', 'pre', 'p') -# - indicating a post-release patch ('p', 'pl', 'patch') -# but of course this can't cover all version number schemes, and there's -# no way to know what a programmer means without asking him. -# -# The problem is what to do with letters (and other non-numeric -# characters) in a version number. The current implementation does the -# obvious and predictable thing: keep them as strings and compare -# lexically within a tuple comparison. This has the desired effect if -# an appended letter sequence implies something "post-release": -# eg. "0.99" < "0.99pl14" < "1.0", and "5.001" < "5.001m" < "5.002". -# -# However, if letters in a version number imply a pre-release version, -# the "obvious" thing isn't correct. Eg. you would expect that -# "1.5.1" < "1.5.2a2" < "1.5.2", but under the tuple/lexical comparison -# implemented here, this just isn't so. -# -# Two possible solutions come to mind. The first is to tie the -# comparison algorithm to a particular set of semantic rules, as has -# been done in the StrictVersion class above. This works great as long -# as everyone can go along with bondage and discipline. Hopefully a -# (large) subset of Python module programmers will agree that the -# particular flavor of bondage and discipline provided by StrictVersion -# provides enough benefit to be worth using, and will submit their -# version numbering scheme to its domination. The free-thinking -# anarchists in the lot will never give in, though, and something needs -# to be done to accommodate them. -# -# Perhaps a "moderately strict" version class could be implemented that -# lets almost anything slide (syntactically), and makes some heuristic -# assumptions about non-digits in version number strings. This could -# sink into special-case-hell, though; if I was as talented and -# idiosyncratic as Larry Wall, I'd go ahead and implement a class that -# somehow knows that "1.2.1" < "1.2.2a2" < "1.2.2" < "1.2.2pl3", and is -# just as happy dealing with things like "2g6" and "1.13++". I don't -# think I'm smart enough to do it right though. -# -# In any case, I've coded the test suite for this module (see -# ../test/test_version.py) specifically to fail on things like comparing -# "1.2a2" and "1.2". That's not because the *code* is doing anything -# wrong, it's because the simple, obvious design doesn't match my -# complicated, hairy expectations for real-world version numbers. It -# would be a snap to fix the test suite to say, "Yep, LooseVersion does -# the Right Thing" (ie. the code matches the conception). But I'd rather -# have a conception that matches common notions about version numbers. - - -if sys.version_info >= (3,): - - class _Py2Int(int): - """Integer object that compares < any string""" - - def __gt__(self, other): - if isinstance(other, str): - return False - return super().__gt__(other) - - def __lt__(self, other): - if isinstance(other, str): - return True - return super().__lt__(other) - -else: - _Py2Int = int - - -class LooseVersion(object): - """Version numbering for anarchists and software realists. - Implements the standard interface for version number classes as - described above. A version number consists of a series of numbers, - separated by either periods or strings of letters. When comparing - version numbers, the numeric components will be compared - numerically, and the alphabetic components lexically. The following - are all valid version numbers, in no particular order: - - 1.5.1 - 1.5.2b2 - 161 - 3.10a - 8.02 - 3.4j - 1996.07.12 - 3.2.pl0 - 3.1.1.6 - 2g6 - 11g - 0.960923 - 2.2beta29 - 1.13++ - 5.5.kw - 2.0b1pl0 - - In fact, there is no such thing as an invalid version number under - this scheme; the rules for comparison are simple and predictable, - but may not always give the results you want (for some definition - of "want"). - """ - - component_re = re.compile(r"(\d+ | [a-z]+ | \.)", re.VERBOSE) - - def __init__(self, vstring=None): - if vstring: - self.parse(vstring) - - def __eq__(self, other): - c = self._cmp(other) - if c is NotImplemented: - return NotImplemented - return c == 0 - - def __lt__(self, other): - c = self._cmp(other) - if c is NotImplemented: - return NotImplemented - return c < 0 - - def __le__(self, other): - c = self._cmp(other) - if c is NotImplemented: - return NotImplemented - return c <= 0 - - def __gt__(self, other): - c = self._cmp(other) - if c is NotImplemented: - return NotImplemented - return c > 0 - - def __ge__(self, other): - c = self._cmp(other) - if c is NotImplemented: - return NotImplemented - return c >= 0 - - def parse(self, vstring): - # I've given up on thinking I can reconstruct the version string - # from the parsed tuple -- so I just store the string here for - # use by __str__ - self.vstring = vstring - components = [x for x in self.component_re.split(vstring) if x and x != "."] - for i, obj in enumerate(components): - try: - components[i] = int(obj) - except ValueError: - pass - - self.version = components - - def __str__(self): - return self.vstring - - def __repr__(self): - return "LooseVersion ('%s')" % str(self) - - def _cmp(self, other): - other = self._coerce(other) - if other is NotImplemented: - return NotImplemented - - if self.version == other.version: - return 0 - if self.version < other.version: - return -1 - if self.version > other.version: - return 1 - return NotImplemented - - @classmethod - def _coerce(cls, other): - if isinstance(other, cls): - return other - elif isinstance(other, str): - return cls(other) - elif "distutils" in sys.modules: - # Using this check to avoid importing distutils and suppressing the warning - try: - from distutils.version import LooseVersion as deprecated - except ImportError: - return NotImplemented - if isinstance(other, deprecated): - return cls(str(other)) - return NotImplemented - - -class LooseVersion2(LooseVersion): - """LooseVersion variant that restores Python 2 semantics - - In Python 2, comparing LooseVersions where paired components could be string - and int always resulted in the string being "greater". In Python 3, this produced - a TypeError. - """ - - def parse(self, vstring): - # I've given up on thinking I can reconstruct the version string - # from the parsed tuple -- so I just store the string here for - # use by __str__ - self.vstring = vstring - components = [x for x in self.component_re.split(vstring) if x and x != "."] - for i, obj in enumerate(components): - try: - components[i] = _Py2Int(obj) - except ValueError: - pass - - self.version = components diff --git a/fdroidserver/metadata.py b/fdroidserver/metadata.py index 0d9195be..57c675c7 100644 --- a/fdroidserver/metadata.py +++ b/fdroidserver/metadata.py @@ -18,19 +18,20 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import logging -import math +import json import os -import platform import re +import glob +import html +import logging +import textwrap +import io +import yaml from collections import OrderedDict -from pathlib import Path -import ruamel.yaml - -from . import _, common -from ._yaml import yaml -from .exception import MetaDataException +import fdroidserver.common +from fdroidserver import _ +from fdroidserver.exception import MetaDataException, FDroidException srclibs = None warnings_action = None @@ -40,22 +41,68 @@ warnings_action = None VALID_USERNAME_REGEX = re.compile(r'^[a-z\d](?:[a-z\d/._-]){0,38}$', re.IGNORECASE) -def _warn_or_exception(value, cause=None): - """Output warning or Exception depending on -W.""" +def warn_or_exception(value): + '''output warning or Exception depending on -W''' if warnings_action == 'ignore': pass elif warnings_action == 'error': - if cause: - raise MetaDataException(value) from cause - else: - raise MetaDataException(value) + raise MetaDataException(value) else: logging.warning(value) +# To filter which ones should be written to the metadata files if +# present +app_fields = set([ + 'Disabled', + 'AntiFeatures', + 'Provides', + 'Categories', + 'License', + 'Author Name', + 'Author Email', + 'Author Web Site', + 'Web Site', + 'Source Code', + 'Issue Tracker', + 'Translation', + 'Changelog', + 'Donate', + 'FlattrID', + 'Liberapay', + 'LiberapayID', + 'OpenCollective', + 'Bitcoin', + 'Litecoin', + 'Name', + 'Auto Name', + 'Summary', + 'Description', + 'Requires Root', + 'Repo Type', + 'Repo', + 'Binaries', + 'Maintainer Notes', + 'Archive Policy', + 'Auto Update Mode', + 'Update Check Mode', + 'Update Check Ignore', + 'Vercode Operation', + 'Update Check Name', + 'Update Check Data', + 'Current Version', + 'Current Version Code', + 'No Source Since', + 'Build', + + 'comments', # For formats that don't do inline comments + 'builds', # For formats that do builds as a list +]) + yaml_app_field_order = [ 'Disabled', 'AntiFeatures', + 'Provides', 'Categories', 'License', 'AuthorName', @@ -67,7 +114,9 @@ yaml_app_field_order = [ 'Translation', 'Changelog', 'Donate', + 'FlattrID', 'Liberapay', + 'LiberapayID', 'OpenCollective', 'Bitcoin', 'Litecoin', @@ -85,8 +134,6 @@ yaml_app_field_order = [ '\n', 'Builds', '\n', - 'AllowedAPKSigningKeys', - '\n', 'MaintainerNotes', '\n', 'ArchivePolicy', @@ -107,6 +154,7 @@ yaml_app_fields = [x for x in yaml_app_field_order if x != '\n'] class App(dict): + def __init__(self, copydict=None): if copydict: super().__init__(copydict) @@ -114,7 +162,7 @@ class App(dict): super().__init__() self.Disabled = None - self.AntiFeatures = dict() + self.AntiFeatures = [] self.Provides = None self.Categories = [] self.License = 'Unknown' @@ -127,7 +175,9 @@ class App(dict): self.Translation = '' self.Changelog = '' self.Donate = None + self.FlattrID = None self.Liberapay = None + self.LiberapayID = None self.OpenCollective = None self.Bitcoin = None self.Litecoin = None @@ -139,13 +189,12 @@ class App(dict): self.RepoType = '' self.Repo = '' self.Binaries = None - self.AllowedAPKSigningKeys = [] self.MaintainerNotes = '' self.ArchivePolicy = None self.AutoUpdateMode = 'None' self.UpdateCheckMode = 'None' self.UpdateCheckIgnore = None - self.VercodeOperation = [] + self.VercodeOperation = None self.UpdateCheckName = None self.UpdateCheckData = None self.CurrentVersion = '' @@ -154,7 +203,8 @@ class App(dict): self.id = None self.metadatapath = None - self.Builds = [] + self.builds = [] + self.comments = {} self.added = None self.lastUpdated = None @@ -173,7 +223,15 @@ class App(dict): else: raise AttributeError("No such attribute: " + name) + def get_last_build(self): + if len(self.builds) > 0: + return self.builds[-1] + else: + return Build() + +TYPE_UNKNOWN = 0 +TYPE_OBSOLETE = 1 TYPE_STRING = 2 TYPE_BOOL = 3 TYPE_LIST = 4 @@ -181,19 +239,15 @@ TYPE_SCRIPT = 5 TYPE_MULTILINE = 6 TYPE_BUILD = 7 TYPE_INT = 8 -TYPE_STRINGMAP = 9 fieldtypes = { 'Description': TYPE_MULTILINE, 'MaintainerNotes': TYPE_MULTILINE, 'Categories': TYPE_LIST, - 'AntiFeatures': TYPE_STRINGMAP, - 'RequiresRoot': TYPE_BOOL, - 'AllowedAPKSigningKeys': TYPE_LIST, - 'Builds': TYPE_BUILD, - 'VercodeOperation': TYPE_LIST, - 'CurrentVersionCode': TYPE_INT, - 'ArchivePolicy': TYPE_INT, + 'AntiFeatures': TYPE_LIST, + 'Build': TYPE_BUILD, + 'BuildVersion': TYPE_OBSOLETE, + 'UseBuilt': TYPE_OBSOLETE, } @@ -205,9 +259,7 @@ def fieldtype(name): # In the order in which they are laid out on files -build_flags = [ - 'versionName', - 'versionCode', +build_flags_order = [ 'disable', 'commit', 'timeout', @@ -218,8 +270,8 @@ build_flags = [ 'patch', 'gradle', 'maven', + 'buildozer', 'output', - 'binary', 'srclibs', 'oldsdkloc', 'encoding', @@ -238,13 +290,17 @@ build_flags = [ 'preassemble', 'gradleprops', 'antcommands', - 'postbuild', 'novcheck', 'antifeatures', ] +# old .txt format has version name/code inline in the 'Build:' line +# but YAML and JSON have a explicit key for them +build_flags = ['versionName', 'versionCode'] + build_flags_order + class Build(dict): + def __init__(self, copydict=None): super().__init__() self.disable = '' @@ -256,9 +312,9 @@ class Build(dict): self.init = '' self.patch = [] self.gradle = [] - self.maven = None + self.maven = False + self.buildozer = False self.output = None - self.binary = None self.srclibs = [] self.oldsdkloc = False self.encoding = None @@ -277,9 +333,8 @@ class Build(dict): self.preassemble = [] self.gradleprops = [] self.antcommands = [] - self.postbuild = '' self.novcheck = False - self.antifeatures = dict() + self.antifeatures = [] if copydict: super().__init__(copydict) return @@ -299,12 +354,8 @@ class Build(dict): else: raise AttributeError("No such attribute: " + name) - @classmethod - def to_yaml(cls, representer, node): - return representer.represent_dict(node) - def build_method(self): - for f in ['maven', 'gradle']: + for f in ['maven', 'gradle', 'buildozer']: if self.get(f): return f if self.output: @@ -315,25 +366,19 @@ class Build(dict): def output_method(self): if self.output: return 'raw' - for f in ['maven', 'gradle']: + for f in ['maven', 'gradle', 'buildozer']: if self.get(f): return f return 'ant' - def ndk_path(self) -> str: - """Return the path string of the first configured NDK or an empty string.""" - ndk = self.ndk - if isinstance(ndk, list): - ndk = self.ndk[0] - path = common.config['ndk_paths'].get(ndk) - if path and not isinstance(path, str): - raise TypeError('NDK path is not string') - if path: - return path - for vsn, path in common.config['ndk_paths'].items(): - if not vsn.endswith("_orig") and path and os.path.basename(path) == ndk: - return path - return '' + def ndk_path(self): + version = self.ndk + if not version: + version = 'r12b' # falls back to latest + paths = fdroidserver.common.config['ndk_paths'] + if version not in paths: + return '' + return paths[version] flagtypes = { @@ -354,13 +399,12 @@ flagtypes = { 'init': TYPE_SCRIPT, 'prebuild': TYPE_SCRIPT, 'build': TYPE_SCRIPT, - 'postbuild': TYPE_SCRIPT, 'submodules': TYPE_BOOL, 'oldsdkloc': TYPE_BOOL, 'forceversion': TYPE_BOOL, 'forcevercode': TYPE_BOOL, 'novcheck': TYPE_BOOL, - 'antifeatures': TYPE_STRINGMAP, + 'antifeatures': TYPE_LIST, 'timeout': TYPE_INT, } @@ -371,8 +415,9 @@ def flagtype(name): return TYPE_STRING -class FieldValidator: - """Designate App metadata field types and checks that it matches. +class FieldValidator(): + """ + Designates App metadata field types and checks that it matches 'name' - The long name of the field type 'matching' - List of possible values or regex expression @@ -395,23 +440,26 @@ class FieldValidator: values = [v] for v in values: if not self.compiled.match(v): - _warn_or_exception( - _( - "'{value}' is not a valid {field} in {appid}. Regex pattern: {pattern}" - ).format( - value=v, field=self.name, appid=appid, pattern=self.matching - ) - ) + warn_or_exception(_("'{value}' is not a valid {field} in {appid}. Regex pattern: {pattern}") + .format(value=v, field=self.name, appid=appid, pattern=self.matching)) # Generic value types valuetypes = { + FieldValidator("Flattr ID", + r'^[0-9a-z]+$', + ['FlattrID']), + FieldValidator("Liberapay", VALID_USERNAME_REGEX, ['Liberapay']), + FieldValidator("Liberapay ID", + r'^[0-9]+$', + ['LiberapayID']), + FieldValidator("Open Collective", - VALID_USERNAME_REGEX, + r'^[0-9A-Za-z-]+$', ['OpenCollective']), FieldValidator("HTTP link", @@ -427,7 +475,7 @@ valuetypes = { ["Bitcoin"]), FieldValidator("Litecoin address", - r'^([LM3][a-km-zA-HJ-NP-Z1-9]{26,33}|ltc1[a-z0-9]{39})$', + r'^[LM3][a-km-zA-HJ-NP-Z1-9]{26,33}$', ["Litecoin"]), FieldValidator("Repo Type", @@ -438,16 +486,20 @@ valuetypes = { r'^http[s]?://', ["Binaries"]), - FieldValidator("AllowedAPKSigningKeys", - r'^[a-fA-F0-9]{64}$', - ["AllowedAPKSigningKeys"]), + FieldValidator("Archive Policy", + r'^[0-9]+ versions$', + ["ArchivePolicy"]), + + FieldValidator("Anti-Feature", + r'^(Ads|Tracking|NonFreeNet|NonFreeDep|NonFreeAdd|UpstreamNonFree|NonFreeAssets|KnownVuln|ApplicationDebuggable|NoSourceSince)$', + ["AntiFeatures"]), FieldValidator("Auto Update Mode", - r"^(Version.*|None)$", + r"^(Version .+|None)$", ["AutoUpdateMode"]), FieldValidator("Update Check Mode", - r"^(Tags|Tags .+|RepoManifest|RepoManifest/.+|HTTP|Static|None)$", + r"^(Tags|Tags .+|RepoManifest|RepoManifest/.+|RepoTrunk|HTTP|Static|None)$", ["UpdateCheckMode"]) } @@ -459,62 +511,239 @@ def check_metadata(app): v.check(app[k], app.id) -def parse_yaml_srclib(metadatapath): - thisinfo = {'RepoType': '', 'Repo': '', 'Subdir': None, 'Prepare': None} +# Formatter for descriptions. Create an instance, and call parseline() with +# each line of the description source from the metadata. At the end, call +# end() and then text_txt and text_html will contain the result. +class DescriptionFormatter: - if not metadatapath.exists(): - _warn_or_exception( - _("Invalid scrlib metadata: '{file}' does not exist").format( - file=metadatapath - ) - ) + stNONE = 0 + stPARA = 1 + stUL = 2 + stOL = 3 + + def __init__(self, linkres): + self.bold = False + self.ital = False + self.state = self.stNONE + self.laststate = self.stNONE + self.text_html = '' + self.text_txt = '' + self.html = io.StringIO() + self.text = io.StringIO() + self.para_lines = [] + self.linkResolver = None + self.linkResolver = linkres + + def endcur(self, notstates=None): + if notstates and self.state in notstates: + return + if self.state == self.stPARA: + self.endpara() + elif self.state == self.stUL: + self.endul() + elif self.state == self.stOL: + self.endol() + + def endpara(self): + self.laststate = self.state + self.state = self.stNONE + whole_para = ' '.join(self.para_lines) + self.addtext(whole_para) + wrapped = textwrap.fill(whole_para, 80, + break_long_words=False, + break_on_hyphens=False) + self.text.write(wrapped) + self.html.write('

') + del self.para_lines[:] + + def endul(self): + self.html.write('') + self.laststate = self.state + self.state = self.stNONE + + def endol(self): + self.html.write('') + self.laststate = self.state + self.state = self.stNONE + + def formatted(self, txt, htmlbody): + res = '' + if htmlbody: + txt = html.escape(txt, quote=False) + while True: + index = txt.find("''") + if index == -1: + return res + txt + res += txt[:index] + txt = txt[index:] + if txt.startswith("'''"): + if htmlbody: + if self.bold: + res += '' + else: + res += '' + self.bold = not self.bold + txt = txt[3:] + else: + if htmlbody: + if self.ital: + res += '' + else: + res += '' + self.ital = not self.ital + txt = txt[2:] + + def linkify(self, txt): + res_plain = '' + res_html = '' + while True: + index = txt.find("[") + if index == -1: + return (res_plain + self.formatted(txt, False), res_html + self.formatted(txt, True)) + res_plain += self.formatted(txt[:index], False) + res_html += self.formatted(txt[:index], True) + txt = txt[index:] + if txt.startswith("[["): + index = txt.find("]]") + if index == -1: + warn_or_exception(_("Unterminated ]]")) + url = txt[2:index] + if self.linkResolver: + url, urltext = self.linkResolver(url) + else: + urltext = url + res_html += '' + html.escape(urltext, quote=False) + '' + res_plain += urltext + txt = txt[index + 2:] + else: + index = txt.find("]") + if index == -1: + warn_or_exception(_("Unterminated ]")) + url = txt[1:index] + index2 = url.find(' ') + if index2 == -1: + urltxt = url + else: + urltxt = url[index2 + 1:] + url = url[:index2] + if url == urltxt: + warn_or_exception(_("URL title is just the URL, use brackets: [URL]")) + res_html += '' + html.escape(urltxt, quote=False) + '' + res_plain += urltxt + if urltxt != url: + res_plain += ' (' + url + ')' + txt = txt[index + 1:] + + def addtext(self, txt): + p, h = self.linkify(txt) + self.html.write(h) + + def parseline(self, line): + if not line: + self.endcur() + elif line.startswith('* '): + self.endcur([self.stUL]) + if self.state != self.stUL: + self.html.write('
    ') + self.state = self.stUL + if self.laststate != self.stNONE: + self.text.write('\n\n') + else: + self.text.write('\n') + self.text.write(line) + self.html.write('
  • ') + self.addtext(line[1:]) + self.html.write('
  • ') + elif line.startswith('# '): + self.endcur([self.stOL]) + if self.state != self.stOL: + self.html.write('
      ') + self.state = self.stOL + if self.laststate != self.stNONE: + self.text.write('\n\n') + else: + self.text.write('\n') + self.text.write(line) + self.html.write('
    1. ') + self.addtext(line[1:]) + self.html.write('
    2. ') + else: + self.para_lines.append(line) + self.endcur([self.stPARA]) + if self.state == self.stNONE: + self.state = self.stPARA + if self.laststate != self.stNONE: + self.text.write('\n\n') + self.html.write('

      ') + + def end(self): + self.endcur() + self.text_txt = self.text.getvalue() + self.text_html = self.html.getvalue() + self.text.close() + self.html.close() + + +# Parse multiple lines of description as written in a metadata file, returning +# a single string in text format and wrapped to 80 columns. +def description_txt(s): + ps = DescriptionFormatter(None) + for line in s.splitlines(): + ps.parseline(line) + ps.end() + return ps.text_txt + + +# Parse multiple lines of description as written in a metadata file, returning +# a single string in wiki format. Used for the Maintainer Notes field as well, +# because it's the same format. +def description_wiki(s): + return s + + +# Parse multiple lines of description as written in a metadata file, returning +# a single string in HTML format. +def description_html(s, linkres): + ps = DescriptionFormatter(linkres) + for line in s.splitlines(): + ps.parseline(line) + ps.end() + return ps.text_html + + +def parse_srclib(metadatapath): + + thisinfo = {} + + # Defaults for fields that come from metadata + thisinfo['Repo Type'] = '' + thisinfo['Repo'] = '' + thisinfo['Subdir'] = None + thisinfo['Prepare'] = None + + if not os.path.exists(metadatapath): return thisinfo - with metadatapath.open("r", encoding="utf-8") as f: - try: - data = yaml.load(f) - if type(data) is not dict: - if platform.system() == 'Windows': - # Handle symlink on Windows - symlink = metadatapath.parent / metadatapath.read_text(encoding='utf-8') - if symlink.is_file(): - with symlink.open("r", encoding="utf-8") as s: - data = yaml.load(s) - if type(data) is not dict: - raise ruamel.yaml.YAMLError( - _('{file} is blank or corrupt!').format(file=metadatapath) - ) - except ruamel.yaml.YAMLError as e: - _warn_or_exception(_("Invalid srclib metadata: could not " - "parse '{file}'") - .format(file=metadatapath) + '\n' - + common.run_yamllint(metadatapath, indent=4), - cause=e) - return thisinfo + metafile = open(metadatapath, "r") - for key in data: - if key not in thisinfo: - _warn_or_exception( - _("Invalid srclib metadata: unknown key '{key}' in '{file}'").format( - key=key, file=metadatapath - ) - ) - return thisinfo + n = 0 + for line in metafile: + n += 1 + line = line.rstrip('\r\n') + if not line or line.startswith("#"): + continue + + try: + f, v = line.split(':', 1) + except ValueError: + warn_or_exception(_("Invalid metadata in %s:%d") % (line, n)) + + if f == "Subdir": + thisinfo[f] = v.split(',') else: - if key == 'Subdir': - if isinstance(data[key], str): - thisinfo[key] = data[key].split(',') - elif isinstance(data[key], list): - thisinfo[key] = data[key] - elif data[key] is None: - thisinfo[key] = [''] - elif key == 'Prepare' or flagtype(key) == TYPE_SCRIPT: - if isinstance(data[key], list): - thisinfo[key] = data[key] - else: - thisinfo[key] = [data[key]] if data[key] else [] - else: - thisinfo[key] = str(data[key] or '') + thisinfo[f] = v + + metafile.close() return thisinfo @@ -524,7 +753,7 @@ def read_srclibs(): The information read will be accessible as metadata.srclibs, which is a dictionary, keyed on srclib name, with the values each being a dictionary - in the same format as that returned by the parse_yaml_srclib function. + in the same format as that returned by the parse_srclib function. A MetaDataException is raised if there are any problems with the srclib metadata. @@ -537,24 +766,32 @@ def read_srclibs(): srclibs = {} - srclibs_dir = Path('srclibs') - srclibs_dir.mkdir(exist_ok=True) + srcdir = 'srclibs' + if not os.path.exists(srcdir): + os.makedirs(srcdir) - for metadatapath in sorted(srclibs_dir.glob('*.yml')): - srclibs[metadatapath.stem] = parse_yaml_srclib(metadatapath) + for metadatapath in sorted(glob.glob(os.path.join(srcdir, '*.txt'))): + srclibname = os.path.basename(metadatapath[:-4]) + srclibs[srclibname] = parse_srclib(metadatapath) -def read_metadata(appid_to_vercode={}, sort_by_time=False): - """Return a list of App instances sorted newest first. +def read_metadata(xref=True, check_vcs=[], refresh=True, sort_by_time=False): + """Return a list of App instances sorted newest first This reads all of the metadata files in a 'data' repository, then builds a list of App instances from those files. The list is sorted based on creation time, newest first. Most of the time, the newer files are the most interesting. - appid_to_vercode is a dict with appids a keys and versionCodes as values. + If there are multiple metadata files for a single appid, then the first + file that is parsed wins over all the others, and the rest throw an + exception. So the original .txt format is parsed first, at least until + newer formats stabilize. + + check_vcs is the list of appids to check for .fdroid.yml in source """ + # Always read the srclibs before the apps, since they can use a srlib as # their source repository. read_srclibs() @@ -562,17 +799,18 @@ def read_metadata(appid_to_vercode={}, sort_by_time=False): apps = OrderedDict() for basedir in ('metadata', 'tmp'): - Path(basedir).mkdir(exist_ok=True) + if not os.path.exists(basedir): + os.makedirs(basedir) - if appid_to_vercode: - metadatafiles = common.get_metadata_files(appid_to_vercode) - else: - metadatafiles = list(Path('metadata').glob('*.yml')) + list( - Path('.').glob('.fdroid.yml') - ) + metadatafiles = (glob.glob(os.path.join('metadata', '*.txt')) + + glob.glob(os.path.join('metadata', '*.json')) + + glob.glob(os.path.join('metadata', '*.yml')) + + glob.glob('.fdroid.txt') + + glob.glob('.fdroid.json') + + glob.glob('.fdroid.yml')) if sort_by_time: - entries = ((path.stat().st_mtime, path) for path in metadatafiles) + entries = ((os.stat(path).st_mtime, path) for path in metadatafiles) metadatafiles = [] for _ignored, path in sorted(entries, reverse=True): metadatafiles.append(path) @@ -581,704 +819,804 @@ def read_metadata(appid_to_vercode={}, sort_by_time=False): metadatafiles = sorted(metadatafiles) for metadatapath in metadatafiles: - appid = metadatapath.stem - if appid != '.fdroid' and not common.is_valid_package_name(appid): - _warn_or_exception( - _("{appid} from {path} is not a valid Java Package Name!").format( - appid=appid, path=metadatapath - ) - ) + if metadatapath == '.fdroid.txt': + warn_or_exception(_('.fdroid.txt is not supported! Convert to .fdroid.yml or .fdroid.json.')) + appid, _ignored = fdroidserver.common.get_extension(os.path.basename(metadatapath)) + if appid != '.fdroid' and not fdroidserver.common.is_valid_package_name(appid): + warn_or_exception(_("{appid} from {path} is not a valid Java Package Name!") + .format(appid=appid, path=metadatapath)) if appid in apps: - _warn_or_exception( - _("Found multiple metadata files for {appid}").format(appid=appid) - ) - app = parse_metadata(metadatapath) + warn_or_exception(_("Found multiple metadata files for {appid}") + .format(appid=appid)) + app = parse_metadata(metadatapath, appid in check_vcs, refresh) check_metadata(app) apps[app.id] = app + if xref: + # Parse all descriptions at load time, just to ensure cross-referencing + # errors are caught early rather than when they hit the build server. + def linkres(appid): + if appid in apps: + return ("fdroid.app:" + appid, "Dummy name - don't know yet") + warn_or_exception(_("Cannot resolve app id {appid}").format(appid=appid)) + + for appid, app in apps.items(): + try: + description_html(app.Description, linkres) + except MetaDataException as e: + warn_or_exception(_("Problem with description of {appid}: {error}") + .format(appid=appid, error=str(e))) + return apps -def parse_metadata(metadatapath): - """Parse metadata file, also checking the source repo for .fdroid.yml. +# Port legacy ';' separators +list_sep = re.compile(r'[,;]') - This function finds the relevant files, gets them parsed, converts - dicts into App and Build instances, and combines the results into - a single App instance. - If this is a metadata file from fdroiddata, it will first load the - source repo type and URL from fdroiddata, then read .fdroid.yml if - it exists, then include the rest of the metadata as specified in - fdroiddata, so that fdroiddata has precedence over the metadata in - the source code. +def split_list_values(s): + res = [] + for v in re.split(list_sep, s): + if not v: + continue + v = v.strip() + if not v: + continue + res.append(v) + return res - .fdroid.yml is embedded in the app's source repo, so it is - "user-generated". That means that it can have weird things in it - that need to be removed so they don't break the overall process, - e.g. if the upstream developer includes some broken field, it can - be overridden in the metadata file. - Parameters - ---------- - metadatapath - The file path to read. The "Application ID" aka "Package Name" - for the application comes from this filename. - - Raises - ------ - FDroidException when there are syntax errors. - - Returns - ------- - Returns a dictionary containing all the details of the - application. There are two major kinds of information in the - dictionary. Keys beginning with capital letters correspond - directory to identically named keys in the metadata file. Keys - beginning with lower case letters are generated in one way or - another, and are not found verbatim in the metadata. - - """ - metadatapath = Path(metadatapath) - app = App() - app.metadatapath = metadatapath.as_posix() - if metadatapath.suffix == '.yml': - with metadatapath.open('r', encoding='utf-8') as mf: - app.update(parse_yaml_metadata(mf)) +def get_default_app_info(metadatapath=None): + if metadatapath is None: + appid = None else: - _warn_or_exception( - _('Unknown metadata format: {path} (use: *.yml)').format(path=metadatapath) - ) + appid, _ignored = fdroidserver.common.get_extension(os.path.basename(metadatapath)) - if metadatapath.stem != '.fdroid': - app.id = metadatapath.stem - parse_localized_antifeatures(app) + if appid == '.fdroid': # we have local metadata in the app's source + if os.path.exists('AndroidManifest.xml'): + manifestroot = fdroidserver.common.parse_xml('AndroidManifest.xml') + else: + pattern = re.compile(r""".*manifest\.srcFile\s+'AndroidManifest\.xml'.*""") + for root, dirs, files in os.walk(os.getcwd()): + if 'build.gradle' in files: + p = os.path.join(root, 'build.gradle') + with open(p, 'rb') as f: + data = f.read() + m = pattern.search(data) + if m: + logging.debug('Using: ' + os.path.join(root, 'AndroidManifest.xml')) + manifestroot = fdroidserver.common.parse_xml(os.path.join(root, 'AndroidManifest.xml')) + break + if manifestroot is None: + warn_or_exception(_("Cannot find an appid for {path}!") + .format(path=metadatapath)) + appid = manifestroot.attrib['package'] - if metadatapath.name != '.fdroid.yml' and app.Repo: - build_dir = common.get_build_dir(app) - metadata_in_repo = build_dir / '.fdroid.yml' - if metadata_in_repo.is_file(): - commit_id = common.get_head_commit_id(build_dir) - if commit_id is not None: - logging.debug( - _('Including metadata from %s@%s') % (metadata_in_repo, commit_id) - ) - else: - logging.debug( - _('Including metadata from {path}').format(path=metadata_in_repo) - ) + app = App() + app.metadatapath = metadatapath + if appid is not None: + app.id = appid + + return app + + +def sorted_builds(builds): + return sorted(builds, key=lambda build: int(build.versionCode)) + + +esc_newlines = re.compile(r'\\( |\n)') + + +def post_metadata_parse(app): + # TODO keep native types, convert only for .txt metadata + for k, v in app.items(): + if type(v) in (float, int): + app[k] = str(v) + + if 'Builds' in app: + app['builds'] = app.pop('Builds') + + if 'flavours' in app and app['flavours'] == [True]: + app['flavours'] = 'yes' + + for field, fieldtype in fieldtypes.items(): + if fieldtype != TYPE_LIST: + continue + value = app.get(field) + if isinstance(value, str): + app[field] = [value, ] + elif value is not None: + app[field] = [str(i) for i in value] + + def _yaml_bool_unmapable(v): + return v in (True, False, [True], [False]) + + def _yaml_bool_unmap(v): + if v is True: + return 'yes' + elif v is False: + return 'no' + elif v == [True]: + return ['yes'] + elif v == [False]: + return ['no'] + + _bool_allowed = ('maven', 'buildozer') + + builds = [] + if 'builds' in app: + for build in app['builds']: + if not isinstance(build, Build): + build = Build(build) + for k, v in build.items(): + if not (v is None): + if flagtype(k) == TYPE_LIST: + if _yaml_bool_unmapable(v): + build[k] = _yaml_bool_unmap(v) + + if isinstance(v, str): + build[k] = [v] + elif isinstance(v, bool): + if v: + build[k] = ['yes'] + else: + build[k] = [] + elif flagtype(k) is TYPE_INT: + build[k] = str(v) + elif flagtype(k) is TYPE_STRING: + if isinstance(v, bool) and k in _bool_allowed: + build[k] = v + else: + if _yaml_bool_unmapable(v): + build[k] = _yaml_bool_unmap(v) + else: + build[k] = str(v) + builds.append(build) + + app.builds = sorted_builds(builds) + + +# Parse metadata for a single application. +# +# 'metadatapath' - the filename to read. The "Application ID" aka +# "Package Name" for the application comes from this +# filename. Pass None to get a blank entry. +# +# Returns a dictionary containing all the details of the application. There are +# two major kinds of information in the dictionary. Keys beginning with capital +# letters correspond directory to identically named keys in the metadata file. +# Keys beginning with lower case letters are generated in one way or another, +# and are not found verbatim in the metadata. +# +# Known keys not originating from the metadata are: +# +# 'builds' - a list of dictionaries containing build information +# for each defined build +# 'comments' - a list of comments from the metadata file. Each is +# a list of the form [field, comment] where field is +# the name of the field it preceded in the metadata +# file. Where field is None, the comment goes at the +# end of the file. Alternatively, 'build:version' is +# for a comment before a particular build version. +# 'descriptionlines' - original lines of description as formatted in the +# metadata file. +# + + +bool_true = re.compile(r'([Yy]es|[Tt]rue)') +bool_false = re.compile(r'([Nn]o|[Ff]alse)') + + +def _decode_bool(s): + if bool_true.match(s): + return True + if bool_false.match(s): + return False + warn_or_exception(_("Invalid boolean '%s'") % s) + + +def parse_metadata(metadatapath, check_vcs=False, refresh=True): + '''parse metadata file, optionally checking the git repo for metadata first''' + + _ignored, ext = fdroidserver.common.get_extension(metadatapath) + accepted = fdroidserver.common.config['accepted_formats'] + if ext not in accepted: + warn_or_exception(_('"{path}" is not an accepted format, convert to: {formats}') + .format(path=metadatapath, formats=', '.join(accepted))) + + app = App() + app.metadatapath = metadatapath + name, _ignored = fdroidserver.common.get_extension(os.path.basename(metadatapath)) + if name == '.fdroid': + check_vcs = False + else: + app.id = name + + with open(metadatapath, 'r') as mf: + if ext == 'txt': + parse_txt_metadata(mf, app) + elif ext == 'json': + parse_json_metadata(mf, app) + elif ext == 'yml': + parse_yaml_metadata(mf, app) + else: + warn_or_exception(_('Unknown metadata format: {path}') + .format(path=metadatapath)) + + if check_vcs and app.Repo: + build_dir = fdroidserver.common.get_build_dir(app) + metadata_in_repo = os.path.join(build_dir, '.fdroid.yml') + if not os.path.isfile(metadata_in_repo): + vcs, build_dir = fdroidserver.common.setup_vcs(app) + if isinstance(vcs, fdroidserver.common.vcs_git): + vcs.gotorevision('HEAD', refresh) # HEAD since we can't know where else to go + if os.path.isfile(metadata_in_repo): + logging.debug('Including metadata from ' + metadata_in_repo) + # do not include fields already provided by main metadata file app_in_repo = parse_metadata(metadata_in_repo) for k, v in app_in_repo.items(): if k not in app: app[k] = v - builds = [] - for build in app.get('Builds', []): - builds.append(Build(build)) - if builds: - app['Builds'] = builds + post_metadata_parse(app) - # if only .fdroid.yml was found, then this finds the appid if not app.id: - if app.get('Builds'): - build = app['Builds'][-1] + if app.builds: + build = app.builds[-1] if build.subdir: - root_dir = Path(build.subdir) + root_dir = build.subdir else: - root_dir = Path('.') - paths = common.manifest_paths(root_dir, build.gradle) - _ignored, _ignored, app.id = common.parse_androidmanifests(paths, app) + root_dir = '.' + paths = fdroidserver.common.manifest_paths(root_dir, build.gradle) + _ignored, _ignored, app.id = fdroidserver.common.parse_androidmanifests(paths, app) return app -def parse_yaml_metadata(mf): - """Parse the .yml file and post-process it. +def parse_json_metadata(mf, app): - This function handles parsing a metadata YAML file and converting - all the various data types into a consistent internal - representation. The results are meant to update an existing App - instance or used as a plain dict. - - Clean metadata .yml files can be used directly, but in order to - make a better user experience for people editing .yml files, there - is post processing. That makes the parsing perform something like - Strict YAML. - - """ - try: - yamldata = common.yaml.load(mf) - except ruamel.yaml.YAMLError as e: - _warn_or_exception( - _("could not parse '{path}'").format(path=mf.name) - + '\n' - + common.run_yamllint(mf.name, indent=4), - cause=e, - ) - - if yamldata is None or yamldata == '': - yamldata = dict() - if not isinstance(yamldata, dict): - _warn_or_exception( - _("'{path}' has invalid format, it should be a dictionary!").format( - path=mf.name - ) - ) - logging.error(_('Using blank dictionary instead of contents of {path}!').format( - path=mf.name) - ) - yamldata = dict() - - deprecated_in_yaml = ['Provides'] - - for field in tuple(yamldata.keys()): - if field not in yaml_app_fields + deprecated_in_yaml: - msg = _("Unrecognised app field '{fieldname}' in '{path}'").format( - fieldname=field, path=mf.name - ) - if Path(mf.name).name == '.fdroid.yml': - logging.error(msg) - del yamldata[field] - else: - _warn_or_exception(msg) - - for deprecated_field in deprecated_in_yaml: - if deprecated_field in yamldata: - del yamldata[deprecated_field] - logging.warning( - _( - "Ignoring '{field}' in '{metapath}' " - "metadata because it is deprecated." - ).format(field=deprecated_field, metapath=mf.name) - ) - - msg = _("Unrecognised build flag '{build_flag}' in '{path}'") - for build in yamldata.get('Builds', []): - for build_flag in build: - if build_flag not in build_flags: - _warn_or_exception(msg.format(build_flag=build_flag, path=mf.name)) - - post_parse_yaml_metadata(yamldata) - return yamldata - - -def parse_localized_antifeatures(app): - """Read in localized Anti-Features files from the filesystem. - - To support easy integration with Weblate and other translation - systems, there is a special type of metadata that can be - maintained in a Fastlane-style directory layout, where each field - is represented by a text file on directories that specified which - app it belongs to, which locale, etc. This function reads those - in and puts them into the internal dict, to be merged with any - related data that came from the metadata.yml file. - - This needs to be run after parse_yaml_metadata() since that - normalizes the data structure. Also, these values are lower - priority than what comes from the metadata file. So this should - not overwrite anything parse_yaml_metadata() puts into the App - instance. - - metadata///antifeatures/_.txt - metadata///antifeatures/.txt - - └── metadata/ - └── / - ├── en-US/ - │ └── antifeatures/ - │ ├── 123_Ads.txt -> "includes ad lib" - │ ├── 123_Tracking.txt -> "standard suspects" - │ └── NoSourceSince.txt -> "it vanished" - │ - └── zh-CN/ - └── antifeatures/ - └── 123_Ads.txt -> "包括广告库" - - Gets parsed into the metadata data structure: - - AntiFeatures: - NoSourceSince: - en-US: it vanished - Builds: - - versionCode: 123 - antifeatures: - Ads: - en-US: includes ad lib - zh-CN: 包括广告库 - Tracking: - en-US: standard suspects - - """ - app_dir = Path('metadata', app['id']) - if not app_dir.is_dir(): - return - af_dup_msg = _('Duplicate Anti-Feature declaration at {path} was ignored!') - - if app.get('AntiFeatures'): - app_has_AntiFeatures = True - else: - app_has_AntiFeatures = False - - has_versionCode = re.compile(r'^-?[0-9]+_.*') - has_antifeatures_from_app = set() - for build in app.get('Builds', []): - antifeatures = build.get('antifeatures') - if antifeatures: - has_antifeatures_from_app.add(build['versionCode']) - - for f in sorted(app_dir.glob('*/antifeatures/*.txt')): - path = f.as_posix() - left = path.index('/', 9) # 9 is length of "metadata/" - right = path.index('/', left + 1) - locale = path[left + 1 : right] - description = f.read_text() - if has_versionCode.match(f.stem): - i = f.stem.index('_') - versionCode = int(f.stem[:i]) - antifeature = f.stem[i + 1 :] - if versionCode in has_antifeatures_from_app: - logging.error(af_dup_msg.format(path=f)) - continue - if 'Builds' not in app: - app['Builds'] = [] - found = False - for build in app['Builds']: - # loop though builds again, there might be duplicate versionCodes - if versionCode == build['versionCode']: - found = True - if 'antifeatures' not in build: - build['antifeatures'] = dict() - if antifeature not in build['antifeatures']: - build['antifeatures'][antifeature] = dict() - build['antifeatures'][antifeature][locale] = description - if not found: - app['Builds'].append( - { - 'versionCode': versionCode, - 'antifeatures': { - antifeature: {locale: description}, - }, - } - ) - elif app_has_AntiFeatures: - logging.error(af_dup_msg.format(path=f)) - continue - else: - if 'AntiFeatures' not in app: - app['AntiFeatures'] = dict() - if f.stem not in app['AntiFeatures']: - app['AntiFeatures'][f.stem] = dict() - app['AntiFeatures'][f.stem][locale] = f.read_text() - - -def _normalize_type_int(k, v): - """Normalize anything that can be reliably converted to an integer.""" - if isinstance(v, int) and not isinstance(v, bool): - return v - if v is None: - return None - if isinstance(v, str): - try: - return int(v) - except ValueError: - pass - msg = _('{build_flag} must be an integer, found: {value}') - _warn_or_exception(msg.format(build_flag=k, value=v)) - - -def _normalize_type_string(v): - """Normalize any data to TYPE_STRING. - - YAML 1.2's booleans are all lowercase. - - Things like versionName are strings, but without quotes can be - numbers. Like "versionName: 1.0" would be a YAML float, but - should be a string. - - SHA-256 values are string values, but YAML 1.2 can interpret some - unquoted values as decimal ints. This converts those to a string - if they are over 50 digits. In the wild, the longest 0 padding on - a SHA-256 key fingerprint I found was 8 zeros. - - """ - if isinstance(v, bool): + # fdroid metadata is only strings and booleans, no floats or ints. + # TODO create schema using https://pypi.python.org/pypi/jsonschema + jsoninfo = json.load(mf, parse_int=lambda s: s, + parse_float=lambda s: s) + app.update(jsoninfo) + for f in ['Description', 'Maintainer Notes']: + v = app.get(f) if v: - return 'true' - return 'false' - if isinstance(v, float): - # YAML 1.2 values for NaN, Inf, and -Inf - if math.isnan(v): - return '.nan' - if math.isinf(v): - if v > 0: - return '.inf' - return '-.inf' - if v and isinstance(v, int): - if math.log10(v) > 50: # only if the int has this many digits - return '%064d' % v - return str(v) + app[f] = '\n'.join(v) + return app -def _normalize_type_stringmap(k, v): - """Normalize any data to TYPE_STRINGMAP. +def parse_yaml_metadata(mf, app): + yamldata = yaml.safe_load(mf) - The internal representation of this format is a dict of dicts, - where the outer dict's keys are things like tag names of - Anti-Features, the inner dict's keys are locales, and the ultimate - values are human readable text. - - Metadata entries like AntiFeatures: can be written in many - forms, including a simple one-entry string, a list of strings, - a dict with keys and descriptions as values, or a dict with - localization. - - Returns - ------- - A dictionary with string keys, where each value is either a string - message or a dict with locale keys and string message values. - - """ - if v is None: - return dict() - if isinstance(v, str) or isinstance(v, int) or isinstance(v, float): - return {_normalize_type_string(v): dict()} - if isinstance(v, list) or isinstance(v, tuple) or isinstance(v, set): - retdict = dict() - for i in v: - if isinstance(i, dict): - # transitional format - if len(i) != 1: - _warn_or_exception( - _( - "'{value}' is not a valid {field}, should be {pattern}" - ).format(field=k, value=v, pattern='key: value') - ) - afname = _normalize_type_string(next(iter(i))) - desc = _normalize_type_string(next(iter(i.values()))) - retdict[afname] = {common.DEFAULT_LOCALE: desc} - else: - retdict[_normalize_type_string(i)] = {} - return retdict - - retdict = dict() - for af, afdict in v.items(): - key = _normalize_type_string(af) - if afdict: - if isinstance(afdict, dict): - retdict[key] = afdict - else: - retdict[key] = {common.DEFAULT_LOCALE: _normalize_type_string(afdict)} - else: - retdict[key] = dict() - - return retdict - - -def _normalize_type_list(k, v): - """Normalize any data to TYPE_LIST, which is always a list of strings.""" - if isinstance(v, dict): - msg = _('{build_flag} must be list or string, found: {value}') - _warn_or_exception(msg.format(build_flag=k, value=v)) - elif type(v) not in (list, tuple, set): - v = [v] - return [_normalize_type_string(i) for i in v] + if yamldata: + for field in yamldata: + if field not in yaml_app_fields: + warn_or_exception(_("Unrecognised app field '{fieldname}' " + "in '{path}'").format(fieldname=field, + path=mf.name)) + if yamldata.get('Builds', None): + for build in yamldata.get('Builds', []): + # put all build flag keywords into a set to avoid + # excessive looping action + build_flag_set = set() + for build_flag in build.keys(): + build_flag_set.add(build_flag) + for build_flag in build_flag_set: + if build_flag not in build_flags: + warn_or_exception( + _("Unrecognised build flag '{build_flag}' " + "in '{path}'").format(build_flag=build_flag, + path=mf.name)) + post_parse_yaml_metadata(yamldata) + app.update(yamldata) + return app def post_parse_yaml_metadata(yamldata): - """Convert human-readable metadata data structures into consistent data structures. - - "Be conservative in what is written out, be liberal in what is parsed." - https://en.wikipedia.org/wiki/Robustness_principle - - This also handles conversions that make metadata YAML behave - something like StrictYAML. Specifically, a field should have a - fixed value type, regardless of YAML 1.2's type auto-detection. - - TODO: None values should probably be treated as the string 'null', - since YAML 1.2 uses that for nulls - - """ - for k, v in yamldata.items(): - _fieldtype = fieldtype(k) - if _fieldtype == TYPE_LIST: - if v or v == 0: - yamldata[k] = _normalize_type_list(k, v) - elif _fieldtype == TYPE_INT: - # ArchivePolicy used to require " versions" in the value. - if k == 'ArchivePolicy' and isinstance(v, str): - v = v.split(' ', maxsplit=1)[0] - v = _normalize_type_int(k, v) - if v or v == 0: - yamldata[k] = v - elif _fieldtype == TYPE_STRING: - if v or v == 0: - yamldata[k] = _normalize_type_string(v) - elif _fieldtype == TYPE_STRINGMAP: - if v or v == 0: # TODO probably want just `if v:` - yamldata[k] = _normalize_type_stringmap(k, v) - elif _fieldtype == TYPE_BOOL: - yamldata[k] = bool(v) - else: - if type(v) in (float, int): - yamldata[k] = str(v) - - builds = [] + """transform yaml metadata to our internal data format""" for build in yamldata.get('Builds', []): - for k, v in build.items(): - if v is None: - continue + for flag in build.keys(): + _flagtype = flagtype(flag) - _flagtype = flagtype(k) - if _flagtype == TYPE_STRING: - if v or v == 0: - build[k] = _normalize_type_string(v) - elif _flagtype == TYPE_INT: - v = _normalize_type_int(k, v) - if v or v == 0: - build[k] = v - elif _flagtype in (TYPE_LIST, TYPE_SCRIPT): - if v or v == 0: - build[k] = _normalize_type_list(k, v) - elif _flagtype == TYPE_STRINGMAP: - if v or v == 0: - build[k] = _normalize_type_stringmap(k, v) - elif _flagtype == TYPE_BOOL: - build[k] = bool(v) - - builds.append(build) - - if builds: - yamldata['Builds'] = sorted(builds, key=lambda build: build['versionCode']) - - no_source_since = yamldata.get("NoSourceSince") - # do not overwrite the description if it is there - if no_source_since and not yamldata.get('AntiFeatures', {}).get('NoSourceSince'): - if 'AntiFeatures' not in yamldata: - yamldata['AntiFeatures'] = dict() - yamldata['AntiFeatures']['NoSourceSince'] = { - common.DEFAULT_LOCALE: no_source_since - } - - -def _format_multiline(value): - """TYPE_MULTILINE with newlines in them are saved as YAML literal strings.""" - if '\n' in value: - return ruamel.yaml.scalarstring.preserve_literal(str(value)) - return str(value) - - -def _format_list(value): - """TYPE_LIST should not contain null values.""" - return [v for v in value if v] - - -def _format_script(value): - """TYPE_SCRIPT with one value are converted to YAML string values.""" - value = [v for v in value if v] - if len(value) == 1: - return value[0] - return value - - -def _format_stringmap(appid, field, stringmap, versionCode=None): - """Format TYPE_STRINGMAP taking into account localized files in the metadata dir. - - If there are any localized versions on the filesystem already, - then move them all there. Otherwise, keep them in the .yml file. - - The directory for the localized files that is named after the - field is all lower case, following the convention set by Fastlane - metadata, and used by fdroidserver. - - """ - app_dir = Path('metadata', appid) - try: - next(app_dir.glob('*/%s/*.txt' % field.lower())) - files = [] - overwrites = [] - for name, descdict in stringmap.items(): - for locale, desc in descdict.items(): - outdir = app_dir / locale / field.lower() - if versionCode: - filename = '%d_%s.txt' % (versionCode, name) - else: - filename = '%s.txt' % name - outfile = outdir / filename - files.append(str(outfile)) - if outfile.exists(): - if desc != outfile.read_text(): - overwrites.append(str(outfile)) - else: - if not outfile.parent.exists(): - outfile.parent.mkdir(parents=True) - outfile.write_text(desc) - if overwrites: - _warn_or_exception( - _( - 'Conflicting "{field}" definitions between .yml and localized files:' - ).format(field=field) - + '\n' - + '\n'.join(sorted(overwrites)) - ) - logging.warning( - _('Moving Anti-Features declarations to localized files:') - + '\n' - + '\n'.join(sorted(files)) - ) - return - except StopIteration: - pass - make_list = True - outlist = [] - for name in sorted(stringmap): - outlist.append(name) - descdict = stringmap.get(name) - if descdict and any(descdict.values()): - make_list = False - break - if make_list: - return sorted(outlist, key=str.lower) - return stringmap - - -def _del_duplicated_NoSourceSince(app): - # noqa: D403 NoSourceSince is the word. - """NoSourceSince gets auto-added to AntiFeatures, but can also be manually added.""" - key = 'NoSourceSince' - if key in app: - no_source_since = app.get(key) - af_no_source_since = app.get('AntiFeatures', dict()).get(key) - if af_no_source_since == {common.DEFAULT_LOCALE: no_source_since}: - del app['AntiFeatures'][key] - - -def _builds_to_yaml(app): - """Reformat Builds: flags for output to YAML 1.2. - - This will strip any flag/value that is not set or is empty. - TYPE_BOOL fields are removed when they are false. 0 is valid - value, it should not be stripped, so there are special cases to - handle that. - - """ - builds = ruamel.yaml.comments.CommentedSeq() - for build in app.get('Builds', []): - b = ruamel.yaml.comments.CommentedMap() - for field in build_flags: - v = build.get(field) - if v is None or v is False or v == '' or v == dict() or v == list(): - continue - _flagtype = flagtype(field) - if _flagtype == TYPE_MULTILINE: - v = _format_multiline(v) - elif _flagtype == TYPE_LIST: - v = _format_list(v) - elif _flagtype == TYPE_SCRIPT: - v = _format_script(v) - elif _flagtype == TYPE_STRINGMAP: - v = _format_stringmap(app['id'], field, v, build['versionCode']) - - if v or v == 0: - b[field] = v - - builds.append(b) - - # insert extra empty lines between build entries - for i in range(1, len(builds)): - builds.yaml_set_comment_before_after_key(i, 'bogus') - builds.ca.items[i][1][-1].value = '\n' - - return builds - - -def _app_to_yaml(app): - cm = ruamel.yaml.comments.CommentedMap() - insert_newline = False - for field in yaml_app_field_order: - if field == '\n': - # next iteration will need to insert a newline - insert_newline = True - else: - value = app.get(field) - if value or field in ('Builds', 'ArchivePolicy'): - _fieldtype = fieldtype(field) - if field == 'Builds': - if app.get('Builds'): - cm.update({field: _builds_to_yaml(app)}) - elif field == 'Categories': - cm[field] = sorted(value, key=str.lower) - elif field == 'AntiFeatures': - v = _format_stringmap(app['id'], field, value) - if v: - cm[field] = v - elif field == 'AllowedAPKSigningKeys': - value = [str(i).lower() for i in value] - if len(value) == 1: - cm[field] = value[0] - else: - cm[field] = value - elif field == 'ArchivePolicy': - if value is None: - continue - cm[field] = value - elif _fieldtype == TYPE_MULTILINE: - v = _format_multiline(value) - if v: - cm[field] = v - elif _fieldtype == TYPE_SCRIPT: - v = _format_script(value) - if v: - cm[field] = v - else: - if value: - cm[field] = value - - if insert_newline: - # we need to prepend a newline in front of this field - insert_newline = False - # inserting empty lines is not supported so we add a - # bogus comment and over-write its value - cm.yaml_set_comment_before_after_key(field, 'bogus') - cm.ca.items[field][1][-1].value = '\n' - return cm + # concatenate script flags into a single string if they are stored as list + if _flagtype is TYPE_SCRIPT: + if isinstance(build[flag], list): + build[flag] = ' && '.join(build[flag]) def write_yaml(mf, app): """Write metadata in yaml format. - This requires the 'rt' round trip dumper to maintain order and needs - custom indent settings, so it needs to instantiate its own YAML - instance. Therefore, this function deliberately avoids using any of - the common YAML parser setups. - - Parameters - ---------- - mf - active file discriptor for writing - app - app metadata to written to the YAML file - + :param mf: active file discriptor for writing + :param app: app metadata to written to the yaml file """ - _del_duplicated_NoSourceSince(app) + + # import rumael.yaml and check version + try: + import ruamel.yaml + except ImportError as e: + raise FDroidException('ruamel.yaml not instlled, can not write metadata.') from e + if not ruamel.yaml.__version__: + raise FDroidException('ruamel.yaml.__version__ not accessible. Please make sure a ruamel.yaml >= 0.13 is installed..') + m = re.match(r'(?P[0-9]+)\.(?P[0-9]+)\.(?P[0-9]+)(-.+)?', + ruamel.yaml.__version__) + if not m: + raise FDroidException('ruamel.yaml version malfored, please install an upstream version of ruamel.yaml') + if int(m.group('major')) < 0 or int(m.group('minor')) < 13: + raise FDroidException('currently installed version of ruamel.yaml ({}) is too old, >= 1.13 required.'.format(ruamel.yaml.__version__)) + # suiteable version ruamel.yaml imported successfully + + _yaml_bools_true = ('y', 'Y', 'yes', 'Yes', 'YES', + 'true', 'True', 'TRUE', + 'on', 'On', 'ON') + _yaml_bools_false = ('n', 'N', 'no', 'No', 'NO', + 'false', 'False', 'FALSE', + 'off', 'Off', 'OFF') + _yaml_bools_plus_lists = [] + _yaml_bools_plus_lists.extend(_yaml_bools_true) + _yaml_bools_plus_lists.extend([[x] for x in _yaml_bools_true]) + _yaml_bools_plus_lists.extend(_yaml_bools_false) + _yaml_bools_plus_lists.extend([[x] for x in _yaml_bools_false]) + + def _class_as_dict_representer(dumper, data): + '''Creates a YAML representation of a App/Build instance''' + return dumper.represent_dict(data) + + def _field_to_yaml(typ, value): + if typ is TYPE_STRING: + if value in _yaml_bools_plus_lists: + return ruamel.yaml.scalarstring.SingleQuotedScalarString(str(value)) + return str(value) + elif typ is TYPE_INT: + return int(value) + elif typ is TYPE_MULTILINE: + if '\n' in value: + return ruamel.yaml.scalarstring.preserve_literal(str(value)) + else: + return str(value) + elif typ is TYPE_SCRIPT: + if type(value) == list: + if len(value) == 1: + return value[0] + else: + return value + else: + script_lines = value.split(' && ') + if len(script_lines) > 1: + return script_lines + else: + return value + else: + return value + + def _app_to_yaml(app): + cm = ruamel.yaml.comments.CommentedMap() + insert_newline = False + for field in yaml_app_field_order: + if field == '\n': + # next iteration will need to insert a newline + insert_newline = True + else: + if app.get(field) or field == 'Builds': + # .txt calls it 'builds' internally, everywhere else its 'Builds' + if field == 'Builds': + if app.get('builds'): + cm.update({field: _builds_to_yaml(app)}) + elif field == 'CurrentVersionCode': + cm.update({field: _field_to_yaml(TYPE_INT, getattr(app, field))}) + else: + cm.update({field: _field_to_yaml(fieldtype(field), getattr(app, field))}) + + if insert_newline: + # we need to prepend a newline in front of this field + insert_newline = False + # inserting empty lines is not supported so we add a + # bogus comment and over-write its value + cm.yaml_set_comment_before_after_key(field, 'bogus') + cm.ca.items[field][1][-1].value = '\n' + return cm + + def _builds_to_yaml(app): + builds = ruamel.yaml.comments.CommentedSeq() + for build in app.builds: + b = ruamel.yaml.comments.CommentedMap() + for field in build_flags: + value = getattr(build, field) + if hasattr(build, field) and value: + if field == 'gradle' and value == ['off']: + value = [ruamel.yaml.scalarstring.SingleQuotedScalarString('off')] + if field in ('maven', 'buildozer'): + if value == 'no': + continue + elif value == 'yes': + value = 'yes' + b.update({field: _field_to_yaml(flagtype(field), value)}) + builds.append(b) + + # insert extra empty lines between build entries + for i in range(1, len(builds)): + builds.yaml_set_comment_before_after_key(i, 'bogus') + builds.ca.items[i][1][-1].value = '\n' + + return builds + yaml_app = _app_to_yaml(app) - yamlmf = ruamel.yaml.YAML(typ='rt') - yamlmf.indent(mapping=2, sequence=4, offset=2) - yamlmf.dump(yaml_app, stream=mf) + ruamel.yaml.round_trip_dump(yaml_app, mf, indent=4, block_seq_indent=2) + + +build_line_sep = re.compile(r'(? -# Copyright (C) 2022 FC Stegerman # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -17,170 +16,48 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import copy -import logging import os -import random -import tempfile -import time -import urllib import requests -import urllib3 -from requests.adapters import HTTPAdapter, Retry - -from . import _, common - -logger = logging.getLogger(__name__) - -HEADERS = {'User-Agent': 'F-Droid'} -def download_file(url, local_filename=None, dldir='tmp', retries=3, backoff_factor=0.1): - """Try hard to download the file, including retrying on failures. - - This has two retry cycles, one inside of the requests session, the - other provided by this function. The requests retry logic applies - to failed DNS lookups, socket connections and connection timeouts, - never to requests where data has made it to the server. This - handles ChunkedEncodingError during transfer in its own retry - loop. This can result in more retries than are specified in the - retries parameter. - - """ - filename = urllib.parse.urlparse(url).path.split('/')[-1] +def download_file(url, local_filename=None, dldir='tmp'): + filename = url.split('/')[-1] if local_filename is None: local_filename = os.path.join(dldir, filename) - for i in range(retries + 1): - if retries: - max_retries = Retry(total=retries - i, backoff_factor=backoff_factor) - adapter = HTTPAdapter(max_retries=max_retries) - session = requests.Session() - session.mount('http://', adapter) - session.mount('https://', adapter) - else: - session = requests - # the stream=True parameter keeps memory usage low - r = session.get( - url, stream=True, allow_redirects=True, headers=HEADERS, timeout=300 - ) - r.raise_for_status() - try: - with open(local_filename, 'wb') as f: - for chunk in r.iter_content(chunk_size=1024): - if chunk: # filter out keep-alive new chunks - f.write(chunk) - f.flush() - return local_filename - except requests.exceptions.ChunkedEncodingError as err: - if i == retries: - raise err - logger.warning('Download interrupted, retrying...') - time.sleep(backoff_factor * 2**i) - raise ValueError("retries must be >= 0") - - -def download_using_mirrors(mirrors, local_filename=None): - """Try to download the file from any working mirror. - - Download the file that all URLs in the mirrors list point to, - trying all the tricks, starting with the most private methods - first. The list of mirrors is converted into a list of mirror - configurations to try, in order that the should be attempted. - - This builds mirror_configs_to_try using all possible combos to - try. If a mirror is marked with worksWithoutSNI: True, then this - logic will try it twice: first without SNI, then again with SNI. - - """ - mirrors = common.parse_list_of_dicts(mirrors) - mirror_configs_to_try = [] - for mirror in mirrors: - mirror_configs_to_try.append(mirror) - if mirror.get('worksWithoutSNI'): - m = copy.deepcopy(mirror) - del m['worksWithoutSNI'] - mirror_configs_to_try.append(m) - - if not local_filename: - for mirror in mirrors: - filename = urllib.parse.urlparse(mirror['url']).path.split('/')[-1] - if filename: - break - if filename: - local_filename = os.path.join(common.get_cachedir(), filename) - else: - local_filename = tempfile.mkstemp(prefix='fdroid-') - - timeouts = (2, 10, 100) - last_exception = None - for timeout in timeouts: - for mirror in mirror_configs_to_try: - last_exception = None - urllib3.util.ssl_.HAS_SNI = not mirror.get('worksWithoutSNI') - try: - # the stream=True parameter keeps memory usage low - r = requests.get( - mirror['url'], - stream=True, - allow_redirects=False, - headers=HEADERS, - # add jitter to the timeout to be less predictable - timeout=timeout + random.randint(0, timeout), # nosec B311 - ) - if r.status_code != 200: - raise requests.exceptions.HTTPError(r.status_code, response=r) - with open(local_filename, 'wb') as f: - for chunk in r.iter_content(chunk_size=1024): - if chunk: # filter out keep-alive new chunks - f.write(chunk) - f.flush() - return local_filename - except ( - ConnectionError, - requests.exceptions.ChunkedEncodingError, - requests.exceptions.ConnectionError, - requests.exceptions.ContentDecodingError, - requests.exceptions.HTTPError, - requests.exceptions.SSLError, - requests.exceptions.StreamConsumedError, - requests.exceptions.Timeout, - requests.exceptions.UnrewindableBodyError, - ) as e: - last_exception = e - logger.debug(_('Retrying failed download: %s') % str(e)) - # if it hasn't succeeded by now, then give up and raise last exception - if last_exception: - raise last_exception + # the stream=True parameter keeps memory usage low + r = requests.get(url, stream=True, allow_redirects=True) + r.raise_for_status() + with open(local_filename, 'wb') as f: + for chunk in r.iter_content(chunk_size=1024): + if chunk: # filter out keep-alive new chunks + f.write(chunk) + f.flush() + return local_filename def http_get(url, etag=None, timeout=600): - """Download the content from the given URL by making a GET request. + """ + Downloads the content from the given URL by making a GET request. If an ETag is given, it will do a HEAD request first, to see if the content changed. - Parameters - ---------- - url - The URL to download from. - etag - The last ETag to be used for the request (optional). - - Returns - ------- - A tuple consisting of: - - The raw content that was downloaded or None if it did not change - - The new eTag as returned by the HTTP request + :param url: The URL to download from. + :param etag: The last ETag to be used for the request (optional). + :return: A tuple consisting of: + - The raw content that was downloaded or None if it did not change + - The new eTag as returned by the HTTP request """ + headers = {'User-Agent': 'F-Droid'} # TODO disable TLS Session IDs and TLS Session Tickets # (plain text cookie visible to anyone who can see the network traffic) if etag: - r = requests.head(url, headers=HEADERS, timeout=timeout) + r = requests.head(url, headers=headers, timeout=timeout) r.raise_for_status() if 'ETag' in r.headers and etag == r.headers['ETag']: return None, etag - r = requests.get(url, headers=HEADERS, timeout=timeout) + r = requests.get(url, headers=headers, timeout=timeout) r.raise_for_status() new_etag = None diff --git a/fdroidserver/nightly.py b/fdroidserver/nightly.py index 372390ea..b67a4097 100644 --- a/fdroidserver/nightly.py +++ b/fdroidserver/nightly.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -"""Set up an app build for a nightly build repo.""" # # nightly.py - part of the FDroid server tools # Copyright (C) 2017 Hans-Christoph Steiner @@ -19,26 +18,23 @@ import base64 import datetime +import git import hashlib -import inspect import logging import os +import paramiko import platform import shutil -import ssl import subprocess import sys import tempfile -from argparse import ArgumentParser -from typing import Optional -from urllib.parse import urlparse - -import git -import paramiko import yaml +from urllib.parse import urlparse +from argparse import ArgumentParser + +from . import _ +from . import common -from . import _, common -from .exception import VCSException # hard coded defaults for Android ~/.android/debug.keystore files # https://developers.google.com/android/guides/client-auth @@ -51,253 +47,65 @@ DISTINGUISHED_NAME = 'CN=Android Debug,O=Android,C=US' NIGHTLY = '-nightly' -def _get_keystore_secret_var(keystore: str) -> str: - """Get keystore secret as base64. - - Parameters - ---------- - keystore - The path of the keystore. - - Returns - ------- - base64_secret - The keystore secret as base64 string. - """ - with open(keystore, 'rb') as fp: - return base64.standard_b64encode(fp.read()).decode('ascii') - - -def _ssh_key_from_debug_keystore(keystore: Optional[str] = None) -> str: - """Convert a debug keystore to an SSH private key. - - This leaves the original keystore file in place. - - Parameters - ---------- - keystore - The keystore to convert to a SSH private key. - - Returns - ------- - key_path - The SSH private key file path in the temporary directory. - """ - if keystore is None: - # set this here so it can be overridden in the tests - # TODO convert this to a class to get rid of this nonsense - keystore = KEYSTORE_FILE +def _ssh_key_from_debug_keystore(keystore=KEYSTORE_FILE): tmp_dir = tempfile.mkdtemp(prefix='.') privkey = os.path.join(tmp_dir, '.privkey') key_pem = os.path.join(tmp_dir, '.key.pem') p12 = os.path.join(tmp_dir, '.keystore.p12') _config = dict() common.fill_config_defaults(_config) - subprocess.check_call( - [ - _config['keytool'], - '-importkeystore', - '-srckeystore', - keystore, - '-srcalias', - KEY_ALIAS, - '-srcstorepass', - PASSWORD, - '-srckeypass', - PASSWORD, - '-destkeystore', - p12, - '-destalias', - KEY_ALIAS, - '-deststorepass', - PASSWORD, - '-destkeypass', - PASSWORD, - '-deststoretype', - 'PKCS12', - ], - env={'LC_ALL': 'C.UTF-8'}, - ) - subprocess.check_call( - [ - 'openssl', - 'pkcs12', - '-in', - p12, - '-out', - key_pem, - '-passin', - 'pass:' + PASSWORD, - '-passout', - 'pass:' + PASSWORD, - ], - env={'LC_ALL': 'C.UTF-8'}, - ) - - # OpenSSL 3.0 changed the default output format from PKCS#1 to - # PKCS#8, which paramiko does not support. - # https://www.openssl.org/docs/man3.0/man1/openssl-rsa.html#traditional - # https://github.com/paramiko/paramiko/issues/1015 - openssl_rsa_cmd = ['openssl', 'rsa'] - if ssl.OPENSSL_VERSION_INFO[0] >= 3: - openssl_rsa_cmd += ['-traditional'] - subprocess.check_call( - openssl_rsa_cmd - + [ - '-in', - key_pem, - '-out', - privkey, - '-passin', - 'pass:' + PASSWORD, - ], - env={'LC_ALL': 'C.UTF-8'}, - ) + subprocess.check_call([_config['keytool'], '-importkeystore', + '-srckeystore', keystore, '-srcalias', KEY_ALIAS, + '-srcstorepass', PASSWORD, '-srckeypass', PASSWORD, + '-destkeystore', p12, '-destalias', KEY_ALIAS, + '-deststorepass', PASSWORD, '-destkeypass', PASSWORD, + '-deststoretype', 'PKCS12'], + env={'LC_ALL': 'C.UTF-8'}) + subprocess.check_call(['openssl', 'pkcs12', '-in', p12, '-out', key_pem, + '-passin', 'pass:' + PASSWORD, '-passout', 'pass:' + PASSWORD], + env={'LC_ALL': 'C.UTF-8'}) + subprocess.check_call(['openssl', 'rsa', '-in', key_pem, '-out', privkey, + '-passin', 'pass:' + PASSWORD], + env={'LC_ALL': 'C.UTF-8'}) os.remove(key_pem) os.remove(p12) os.chmod(privkey, 0o600) # os.umask() should cover this, but just in case rsakey = paramiko.RSAKey.from_private_key_file(privkey) - fingerprint = ( - base64.b64encode(hashlib.sha256(rsakey.asbytes()).digest()) - .decode('ascii') - .rstrip('=') - ) - ssh_private_key_file = os.path.join( - tmp_dir, 'debug_keystore_' + fingerprint.replace('/', '_') + '_id_rsa' - ) + fingerprint = base64.b64encode(hashlib.sha256(rsakey.asbytes()).digest()).decode('ascii').rstrip('=') + ssh_private_key_file = os.path.join(tmp_dir, 'debug_keystore_' + + fingerprint.replace('/', '_') + '_id_rsa') shutil.move(privkey, ssh_private_key_file) pub = rsakey.get_name() + ' ' + rsakey.get_base64() + ' ' + ssh_private_key_file with open(ssh_private_key_file + '.pub', 'w') as fp: fp.write(pub) - logging.info(_('\nSSH public key to be used as deploy key:') + '\n' + pub) + logging.info(_('\nSSH Public Key to be used as Deploy Key:') + '\n' + pub) return ssh_private_key_file -def get_repo_base_url( - clone_url: str, repo_git_base: str, force_type: Optional[str] = None -) -> str: - """Generate the base URL for the F-Droid repository. - - Parameters - ---------- - clone_url - The URL to clone the Git repository. - repo_git_base - The project path of the Git repository at the Git forge. - force_type - The Git forge of the project. - - Returns - ------- - repo_base_url - The base URL of the F-Droid repository. - """ - if force_type is None: - force_type = urlparse(clone_url).netloc - if force_type == 'gitlab.com': - return clone_url + '/-/raw/master/fdroid' - if force_type == 'github.com': - return 'https://raw.githubusercontent.com/%s/master/fdroid' % repo_git_base - print(_('ERROR: unsupported git host "%s", patches welcome!') % force_type) - sys.exit(1) - - -def clone_git_repo(clone_url, git_mirror_path): - """Clone a git repo into the given path, failing if a password is required. - - If GitPython's safe mode is present, this will use that. Otherwise, - this includes a very limited version of the safe mode just to ensure - this won't hang on password prompts. - - https://github.com/gitpython-developers/GitPython/pull/2029 - - """ - logging.debug(_('cloning {url}').format(url=clone_url)) - try: - sig = inspect.signature(git.Repo.clone_from) - if 'safe' in sig.parameters: - git.Repo.clone_from(clone_url, git_mirror_path, safe=True) - else: - git.Repo.clone_from( - clone_url, - git_mirror_path, - env={ - 'GIT_ASKPASS': '/bin/true', - 'SSH_ASKPASS': '/bin/true', - 'GIT_USERNAME': 'u', - 'GIT_PASSWORD': 'p', - 'GIT_HTTP_USERNAME': 'u', - 'GIT_HTTP_PASSWORD': 'p', - 'GIT_SSH': '/bin/false', # for git < 2.3 - 'GIT_TERMINAL_PROMPT': '0', - }, - ) - except git.exc.GitCommandError as e: - logging.warning(_('WARNING: only public git repos are supported!')) - raise VCSException(f'git clone {clone_url} failed:', str(e)) from e - - def main(): - """Deploy to F-Droid repository or generate SSH private key from keystore. - The behaviour of this function is influenced by the configuration file as - well as command line parameters. - - Raises - ------ - :exc:`~fdroidserver.exception.VCSException` - If the nightly Git repository could not be cloned during an attempt to - deploy. - """ - parser = ArgumentParser() + parser = ArgumentParser(usage="%(prog)s") common.setup_global_opts(parser) - parser.add_argument( - "--keystore", - default=KEYSTORE_FILE, - help=_("Specify which debug keystore file to use."), - ) - parser.add_argument( - "--show-secret-var", - action="store_true", - default=False, - help=_("Print the secret variable to the terminal for easy copy/paste"), - ) - parser.add_argument( - "--keep-private-keys", - action="store_true", - default=False, - help=_("Do not remove the private keys generated from the keystore"), - ) - parser.add_argument( - "--no-deploy", - action="store_true", - default=False, - help=_("Do not deploy the new files to the repo"), - ) - parser.add_argument( - "--file", - default='app/build/outputs/apk/*.apk', - help=_('The file to be included in the repo (path or glob)'), - ) - parser.add_argument( - "--no-checksum", - action="store_true", - default=False, - help=_("Don't use rsync checksums"), - ) - archive_older_unset = -1 - parser.add_argument( - "--archive-older", - type=int, - default=archive_older_unset, - help=_("Set maximum releases in repo before older ones are archived"), - ) + parser.add_argument("--keystore", default=KEYSTORE_FILE, + help=_("Specify which debug keystore file to use.")) + parser.add_argument("--show-secret-var", action="store_true", default=False, + help=_("Print the secret variable to the terminal for easy copy/paste")) + parser.add_argument("--keep-private-keys", action="store_true", default=False, + help=_("Do not remove the private keys generated from the keystore")) + parser.add_argument("--no-deploy", action="store_true", default=False, + help=_("Do not deploy the new files to the repo")) + parser.add_argument("--file", default='app/build/outputs/apk/*.apk', + help=_('The file to be included in the repo (path or glob)')) + parser.add_argument("--no-checksum", action="store_true", default=False, + help=_("Don't use rsync checksums")) + parser.add_argument("--archive-older", type=int, default=20, + help=_("Set maximum releases in repo before older ones are archived")) # TODO add --with-btlog - options = common.parse_args(parser) + options = parser.parse_args() # force a tighter umask since this writes private key material umask = os.umask(0o077) @@ -321,86 +129,55 @@ def main(): cibase = os.getcwd() os.makedirs(repodir, exist_ok=True) - # the 'master' branch is hardcoded in fdroidserver/deploy.py if 'CI_PROJECT_PATH' in os.environ and 'CI_PROJECT_URL' in os.environ: # we are in GitLab CI repo_git_base = os.getenv('CI_PROJECT_PATH') + NIGHTLY clone_url = os.getenv('CI_PROJECT_URL') + NIGHTLY - repo_base = get_repo_base_url( - clone_url, repo_git_base, force_type='gitlab.com' - ) + repo_base = clone_url + '/raw/master/fdroid' servergitmirror = 'git@' + urlparse(clone_url).netloc + ':' + repo_git_base - deploy_key_url = ( - f'{clone_url}/-/settings/repository#js-deploy-keys-settings' - ) + deploy_key_url = clone_url + '/settings/repository' git_user_name = os.getenv('GITLAB_USER_NAME') git_user_email = os.getenv('GITLAB_USER_EMAIL') elif 'TRAVIS_REPO_SLUG' in os.environ: # we are in Travis CI repo_git_base = os.getenv('TRAVIS_REPO_SLUG') + NIGHTLY clone_url = 'https://github.com/' + repo_git_base - repo_base = get_repo_base_url( - clone_url, repo_git_base, force_type='github.com' - ) + _branch = os.getenv('TRAVIS_BRANCH') + repo_base = 'https://raw.githubusercontent.com/' + repo_git_base + '/' + _branch + '/fdroid' servergitmirror = 'git@github.com:' + repo_git_base - deploy_key_url = ( - f'https://github.com/{repo_git_base}/settings/keys' - + '\nhttps://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys' - ) + deploy_key_url = ('https://github.com/' + repo_git_base + '/settings/keys' + + '\nhttps://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys') git_user_name = repo_git_base git_user_email = os.getenv('USER') + '@' + platform.node() - elif ( - 'CIRCLE_REPOSITORY_URL' in os.environ - and 'CIRCLE_PROJECT_USERNAME' in os.environ - and 'CIRCLE_PROJECT_REPONAME' in os.environ - ): + elif 'CIRCLE_REPOSITORY_URL' in os.environ \ + and 'CIRCLE_PROJECT_USERNAME' in os.environ \ + and 'CIRCLE_PROJECT_REPONAME' in os.environ: # we are in Circle CI - repo_git_base = ( - os.getenv('CIRCLE_PROJECT_USERNAME') - + '/' - + os.getenv('CIRCLE_PROJECT_REPONAME') - + NIGHTLY - ) + repo_git_base = (os.getenv('CIRCLE_PROJECT_USERNAME') + + '/' + os.getenv('CIRCLE_PROJECT_REPONAME') + NIGHTLY) clone_url = os.getenv('CIRCLE_REPOSITORY_URL') + NIGHTLY - repo_base = get_repo_base_url( - clone_url, repo_git_base, force_type='github.com' - ) + repo_base = clone_url + '/raw/master/fdroid' servergitmirror = 'git@' + urlparse(clone_url).netloc + ':' + repo_git_base - deploy_key_url = ( - f'https://github.com/{repo_git_base}/settings/keys' - + '\nhttps://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys' - ) + deploy_key_url = ('https://github.com/' + repo_git_base + '/settings/keys' + + '\nhttps://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys') git_user_name = os.getenv('CIRCLE_USERNAME') git_user_email = git_user_name + '@' + platform.node() - elif 'GITHUB_ACTIONS' in os.environ: - # we are in Github actions - repo_git_base = os.getenv('GITHUB_REPOSITORY') + NIGHTLY - clone_url = os.getenv('GITHUB_SERVER_URL') + '/' + repo_git_base - repo_base = get_repo_base_url( - clone_url, repo_git_base, force_type='github.com' - ) - servergitmirror = 'git@' + urlparse(clone_url).netloc + ':' + repo_git_base - deploy_key_url = ( - f'https://github.com/{repo_git_base}/settings/keys' - + '\nhttps://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys' - ) - git_user_name = os.getenv('GITHUB_ACTOR') - git_user_email = git_user_name + '@' + platform.node() else: print(_('ERROR: unsupported CI type, patches welcome!')) sys.exit(1) repo_url = repo_base + '/repo' git_mirror_path = os.path.join(repo_basedir, 'git-mirror') - git_mirror_fdroiddir = os.path.join(git_mirror_path, 'fdroid') - git_mirror_repodir = os.path.join(git_mirror_fdroiddir, 'repo') - git_mirror_metadatadir = os.path.join(git_mirror_fdroiddir, 'metadata') + git_mirror_repodir = os.path.join(git_mirror_path, 'fdroid', 'repo') + git_mirror_metadatadir = os.path.join(git_mirror_path, 'fdroid', 'metadata') if not os.path.isdir(git_mirror_repodir): - clone_git_repo(clone_url, git_mirror_path) + logging.debug(_('cloning {url}').format(url=clone_url)) + try: + git.Repo.clone_from(clone_url, git_mirror_path) + except Exception: + pass if not os.path.isdir(git_mirror_repodir): os.makedirs(git_mirror_repodir, mode=0o755) - if os.path.exists('LICENSE'): - shutil.copy2('LICENSE', git_mirror_path) mirror_git_repo = git.Repo.init(git_mirror_path) writer = mirror_git_repo.config_writer() @@ -414,31 +191,32 @@ def main(): readme = ''' # {repo_git_base} -This is an app repository for nightly versions. -You can use it with the [F-Droid](https://f-droid.org/) Android app. +[![{repo_url}](icon.png)]({repo_url}) -[![{repo_url}]({repo_url}/icons/icon.png)](https://fdroid.link/#{repo_url}) - -Last updated: {date}'''.format( - repo_git_base=repo_git_base, - repo_url=repo_url, - date=datetime.datetime.now(datetime.timezone.utc).strftime( - '%Y-%m-%d %H:%M:%S UTC' - ), - ) +Last updated: {date}'''.format(repo_git_base=repo_git_base, + repo_url=repo_url, + date=datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S UTC')) with open(readme_path, 'w') as fp: fp.write(readme) mirror_git_repo.git.add(all=True) mirror_git_repo.index.commit("update README") + icon_path = os.path.join(git_mirror_path, 'icon.png') + try: + import qrcode + qrcode.make(repo_url).save(icon_path) + except Exception: + exampleicon = os.path.join(common.get_examples_dir(), 'fdroid-icon.png') + shutil.copy(exampleicon, icon_path) mirror_git_repo.git.add(all=True) mirror_git_repo.index.commit("update repo/website icon") + shutil.copy(icon_path, repo_basedir) os.chdir(repo_basedir) if os.path.isdir(git_mirror_repodir): - common.local_rsync(options, [git_mirror_repodir + '/'], 'repo/') + common.local_rsync(options, git_mirror_repodir + '/', 'repo/') if os.path.isdir(git_mirror_metadatadir): - common.local_rsync(options, [git_mirror_metadatadir + '/'], 'metadata/') + common.local_rsync(options, git_mirror_metadatadir + '/', 'metadata/') ssh_private_key_file = _ssh_key_from_debug_keystore() # this is needed for GitPython to find the SSH key @@ -449,89 +227,58 @@ Last updated: {date}'''.format( with open(ssh_config, 'a') as fp: fp.write('\n\nHost *\n\tIdentityFile %s\n' % ssh_private_key_file) - if options.archive_older == archive_older_unset: - fdroid_size = common.get_dir_size(git_mirror_fdroiddir) - max_size = common.GITLAB_COM_PAGES_MAX_SIZE - if fdroid_size < max_size: - options.archive_older = 20 - else: - options.archive_older = 3 - print( - 'WARNING: repo is %s over the GitLab Pages limit (%s)' - % (fdroid_size - max_size, max_size) - ) - print('Setting --archive-older to 3') - - config = { - 'identity_file': ssh_private_key_file, - 'repo_name': repo_git_base, - 'repo_url': repo_url, - 'repo_description': 'Nightly builds from %s' % git_user_email, - 'archive_name': repo_git_base + ' archive', - 'archive_url': repo_base + '/archive', - 'archive_description': 'Old nightly builds that have been archived.', - 'archive_older': options.archive_older, - 'servergitmirrors': [{"url": servergitmirror}], - 'keystore': KEYSTORE_FILE, - 'repo_keyalias': KEY_ALIAS, - 'keystorepass': PASSWORD, - 'keypass': PASSWORD, - 'keydname': DISTINGUISHED_NAME, - 'make_current_version_link': False, - } - with open(common.CONFIG_FILE, 'w', encoding='utf-8') as fp: - yaml.dump(config, fp, default_flow_style=False) - os.chmod(common.CONFIG_FILE, 0o600) - config = common.read_config() + config = '' + config += "identity_file = '%s'\n" % ssh_private_key_file + config += "repo_name = '%s'\n" % repo_git_base + config += "repo_url = '%s'\n" % repo_url + config += "repo_icon = 'icon.png'\n" + config += "repo_description = 'Nightly builds from %s'\n" % git_user_email + config += "archive_name = '%s'\n" % (repo_git_base + ' archive') + config += "archive_url = '%s'\n" % (repo_base + '/archive') + config += "archive_icon = 'icon.png'\n" + config += "archive_description = 'Old nightly builds that have been archived.'\n" + config += "archive_older = %i\n" % options.archive_older + config += "servergitmirrors = '%s'\n" % servergitmirror + config += "keystore = '%s'\n" % KEYSTORE_FILE + config += "repo_keyalias = '%s'\n" % KEY_ALIAS + config += "keystorepass = '%s'\n" % PASSWORD + config += "keypass = '%s'\n" % PASSWORD + config += "keydname = '%s'\n" % DISTINGUISHED_NAME + config += "make_current_version_link = False\n" + config += "accepted_formats = ('txt', 'yml')\n" + # TODO add update_stats = True + with open('config.py', 'w') as fp: + fp.write(config) + os.chmod('config.py', 0o600) + config = common.read_config(options) common.assert_config_keystore(config) - logging.debug( - _( - 'Run over {cibase} to find -debug.apk. and skip repo_basedir {repo_basedir}' - ).format(cibase=cibase, repo_basedir=repo_basedir) - ) - for root, dirs, files in os.walk(cibase): - for d in ('.git', '.gradle'): + for d in ('fdroid', '.git', '.gradle'): if d in dirs: dirs.remove(d) - if root == cibase and 'fdroid' in dirs: - dirs.remove('fdroid') - for f in files: if f.endswith('-debug.apk'): apkfilename = os.path.join(root, f) - logging.debug( - _('Stripping mystery signature from {apkfilename}').format( - apkfilename=apkfilename - ) - ) + logging.debug(_('Striping mystery signature from {apkfilename}') + .format(apkfilename=apkfilename)) destapk = os.path.join(repodir, os.path.basename(f)) os.chmod(apkfilename, 0o644) - logging.debug( - _( - 'Resigning {apkfilename} with provided debug.keystore' - ).format(apkfilename=os.path.basename(apkfilename)) - ) + logging.debug(_('Resigning {apkfilename} with provided debug.keystore') + .format(apkfilename=os.path.basename(apkfilename))) + common.apk_strip_signatures(apkfilename, strip_manifest=True) common.sign_apk(apkfilename, destapk, KEY_ALIAS) if options.verbose: - logging.debug(_('attempting bare SSH connection to test deploy key:')) + logging.debug(_('attempting bare ssh connection to test deploy key:')) try: - subprocess.check_call( - [ - 'ssh', - '-Tvi', - ssh_private_key_file, - '-oIdentitiesOnly=yes', - '-oStrictHostKeyChecking=no', - servergitmirror.split(':')[0], - ] - ) + subprocess.check_call(['ssh', '-Tvi', ssh_private_key_file, + '-oIdentitiesOnly=yes', '-oStrictHostKeyChecking=no', + servergitmirror.split(':')[0]]) except subprocess.CalledProcessError: pass - app_url = clone_url[: -len(NIGHTLY)] + app_url = clone_url[:-len(NIGHTLY)] template = dict() template['AuthorName'] = clone_url.split('/')[4] template['AuthorWebSite'] = '/'.join(clone_url.split('/')[:4]) @@ -543,26 +290,19 @@ Last updated: {date}'''.format( with open('template.yml', 'w') as fp: yaml.dump(template, fp) - subprocess.check_call( - ['fdroid', 'update', '--rename-apks', '--create-metadata', '--verbose'], - cwd=repo_basedir, - ) - common.local_rsync( - options, [repo_basedir + '/metadata/'], git_mirror_metadatadir + '/' - ) + subprocess.check_call(['fdroid', 'update', '--rename-apks', '--create-metadata', '--verbose'], + cwd=repo_basedir) + common.local_rsync(options, repo_basedir + '/metadata/', git_mirror_metadatadir + '/') mirror_git_repo.git.add(all=True) mirror_git_repo.index.commit("update app metadata") if not options.no_deploy: try: - cmd = ['fdroid', 'deploy', '--verbose', '--no-keep-git-mirror-archive'] + cmd = ['fdroid', 'server', 'update', '--verbose', '--no-keep-git-mirror-archive'] subprocess.check_call(cmd, cwd=repo_basedir) except subprocess.CalledProcessError: - logging.error( - _('cannot publish update, did you set the deploy key?') - + '\n' - + deploy_key_url - ) + logging.error(_('cannot publish update, did you set the deploy key?') + + '\n' + deploy_key_url) sys.exit(1) if not options.keep_private_keys: @@ -576,33 +316,25 @@ Last updated: {date}'''.format( if not os.path.exists(androiddir): os.mkdir(androiddir) logging.info(_('created {path}').format(path=androiddir)) - logging.error( - _('{path} does not exist! Create it by running:').format( - path=options.keystore - ) - + '\n keytool -genkey -v -keystore ' - + options.keystore - + ' -storepass android \\' - + '\n -alias androiddebugkey -keypass android -keyalg RSA -keysize 2048 -validity 10000 \\' - + '\n -dname "CN=Android Debug,O=Android,C=US"' - ) + logging.error(_('{path} does not exist! Create it by running:').format(path=options.keystore) + + '\n keytool -genkey -v -keystore ' + options.keystore + ' -storepass android \\' + + '\n -alias androiddebugkey -keypass android -keyalg RSA -keysize 2048 -validity 10000 \\' + + '\n -dname "CN=Android Debug,O=Android,C=US"') sys.exit(1) ssh_dir = os.path.join(os.getenv('HOME'), '.ssh') + os.makedirs(os.path.dirname(ssh_dir), exist_ok=True) privkey = _ssh_key_from_debug_keystore(options.keystore) - if os.path.exists(ssh_dir): - ssh_private_key_file = os.path.join(ssh_dir, os.path.basename(privkey)) - shutil.move(privkey, ssh_private_key_file) - shutil.move(privkey + '.pub', ssh_private_key_file + '.pub') + ssh_private_key_file = os.path.join(ssh_dir, os.path.basename(privkey)) + shutil.move(privkey, ssh_private_key_file) + shutil.move(privkey + '.pub', ssh_private_key_file + '.pub') if shutil.rmtree.avoids_symlink_attacks: shutil.rmtree(os.path.dirname(privkey)) if options.show_secret_var: - debug_keystore = _get_keystore_secret_var(options.keystore) - print( - _('\n{path} encoded for the DEBUG_KEYSTORE secret variable:').format( - path=options.keystore - ) - ) + with open(options.keystore, 'rb') as fp: + debug_keystore = base64.standard_b64encode(fp.read()).decode('ascii') + print(_('\n{path} encoded for the DEBUG_KEYSTORE secret variable:') + .format(path=options.keystore)) print(debug_keystore) os.umask(umask) diff --git a/fdroidserver/publish.py b/fdroidserver/publish.py index 42945166..1369d177 100644 --- a/fdroidserver/publish.py +++ b/fdroidserver/publish.py @@ -3,7 +3,6 @@ # publish.py - part of the FDroid server tools # Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com # Copyright (C) 2013-2014 Daniel Martí -# Copyright (C) 2021 Felix C. Stegerman # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -18,40 +17,32 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -"""Sign APKs using keys or via reproducible builds signature copying. - -This command takes unsigned APKs and signs them. It looks for -unsigned APKs in the unsigned/ directory and puts successfully signed -APKs into the repo/ directory. The default is to run in a kind of -batch mode, where it will only quit on certain kinds of errors. It -mostly reports success by moving an APK from unsigned/ to repo/ - -""" - -import glob -import hashlib -import json -import logging +import sys import os import re import shutil -import sys -import time -import zipfile +import glob +import hashlib from argparse import ArgumentParser from collections import OrderedDict +import logging from gettext import ngettext +import json +import zipfile -from . import _, common, metadata +from . import _ +from . import common +from . import metadata from .common import FDroidPopen from .exception import BuildException, FDroidException config = None -start_timestamp = time.gmtime() +options = None def publish_source_tarball(apkfilename, unsigned_dir, output_dir): """Move the source tarball into the output directory...""" + tarfilename = apkfilename[:-4] + '_src.tar.gz' tarfile = os.path.join(unsigned_dir, tarfilename) if os.path.exists(tarfile): @@ -62,9 +53,7 @@ def publish_source_tarball(apkfilename, unsigned_dir, output_dir): def key_alias(appid): - """No summary. - - Get the alias which F-Droid uses to indentify the singing key + """Get the alias which F-Droid uses to indentify the singing key for this App in F-Droids keystore. """ if config and 'keyaliases' in config and appid in config['keyaliases']: @@ -82,27 +71,23 @@ def key_alias(appid): def read_fingerprints_from_keystore(): - """Obtain a dictionary containing all singning-key fingerprints which are managed by F-Droid, grouped by appid.""" - env_vars = {'LC_ALL': 'C.UTF-8', 'FDROID_KEY_STORE_PASS': config['keystorepass']} - cmd = [ - config['keytool'], - '-list', - '-v', - '-keystore', - config['keystore'], - '-storepass:env', - 'FDROID_KEY_STORE_PASS', - ] - if config['keystore'] == 'NONE': - cmd += config['smartcardoptions'] - p = FDroidPopen(cmd, envs=env_vars, output=False) + """Obtain a dictionary containing all singning-key fingerprints which + are managed by F-Droid, grouped by appid. + """ + env_vars = {'LC_ALL': 'C.UTF-8', + 'FDROID_KEY_STORE_PASS': config['keystorepass'], + 'FDROID_KEY_PASS': config['keypass']} + p = FDroidPopen([config['keytool'], '-list', + '-v', '-keystore', config['keystore'], + '-storepass:env', 'FDROID_KEY_STORE_PASS'], + envs=env_vars, output=False) if p.returncode != 0: raise FDroidException('could not read keystore {}'.format(config['keystore'])) - realias = re.compile('Alias name: (?P.+)' + os.linesep) - resha256 = re.compile(r'\s+SHA256: (?P[:0-9A-F]{95})' + os.linesep) + realias = re.compile('Alias name: (?P.+)\n') + resha256 = re.compile(r'\s+SHA256: (?P[:0-9A-F]{95})\n') fps = {} - for block in p.output.split(('*' * 43) + os.linesep + '*' * 43): + for block in p.output.split(('*' * 43) + '\n' + '*' * 43): s_alias = realias.search(block) s_sha256 = resha256.search(block) if s_alias and s_sha256: @@ -112,9 +97,8 @@ def read_fingerprints_from_keystore(): def sign_sig_key_fingerprint_list(jar_file): - """Sign the list of app-signing key fingerprints. - - This is used primaryily by fdroid update to determine which APKs + """sign the list of app-signing key fingerprints which is + used primaryily by fdroid update to determine which APKs where built and signed by F-Droid and which ones were manually added by users. """ @@ -128,22 +112,19 @@ def sign_sig_key_fingerprint_list(jar_file): cmd += config['smartcardoptions'] else: # smardcards never use -keypass cmd += '-keypass:env', 'FDROID_KEY_PASS' - env_vars = { - 'FDROID_KEY_STORE_PASS': config['keystorepass'], - 'FDROID_KEY_PASS': config.get('keypass', ""), - } + env_vars = {'FDROID_KEY_STORE_PASS': config['keystorepass'], + 'FDROID_KEY_PASS': config['keypass']} p = common.FDroidPopen(cmd, envs=env_vars) if p.returncode != 0: raise FDroidException("Failed to sign '{}'!".format(jar_file)) -def store_publish_signer_fingerprints(appids, indent=None): +def store_stats_fdroid_signing_key_fingerprints(appids, indent=None): """Store list of all signing-key fingerprints for given appids to HD. - This list will later on be needed by fdroid update. """ - if not os.path.exists('repo'): - os.makedirs('repo') + if not os.path.exists('stats'): + os.makedirs('stats') data = OrderedDict() fps = read_fingerprints_from_keystore() for appid in sorted(appids): @@ -151,153 +132,29 @@ def store_publish_signer_fingerprints(appids, indent=None): if alias in fps: data[appid] = {'signer': fps[key_alias(appid)]} - jar_file = os.path.join('repo', 'signer-index.jar') - output = json.dumps(data, indent=indent) + jar_file = os.path.join('stats', 'publishsigkeys.jar') with zipfile.ZipFile(jar_file, 'w', zipfile.ZIP_DEFLATED) as jar: - jar.writestr('signer-index.json', output) - with open(os.path.join('repo', 'signer-index.json'), 'w') as fp: - fp.write(output) + jar.writestr('publishsigkeys.json', json.dumps(data, indent=indent)) sign_sig_key_fingerprint_list(jar_file) -def status_update_json(generatedKeys, signedApks): - """Output a JSON file with metadata about this run.""" - logging.debug(_('Outputting JSON')) - output = common.setup_status_output(start_timestamp) - output['apksigner'] = shutil.which(config.get('apksigner', '')) - output['jarsigner'] = shutil.which(config.get('jarsigner', '')) - output['keytool'] = shutil.which(config.get('keytool', '')) - if generatedKeys: - output['generatedKeys'] = generatedKeys - if signedApks: - output['signedApks'] = signedApks - common.write_status_json(output) - - -def check_for_key_collisions(allapps): - """Make sure there's no collision in keyaliases from apps. - - It was suggested at - https://dev.guardianproject.info/projects/bazaar/wiki/FDroid_Audit - that a package could be crafted, such that it would use the same signing - key as an existing app. While it may be theoretically possible for such a - colliding package ID to be generated, it seems virtually impossible that - the colliding ID would be something that would be a) a valid package ID, - and b) a sane-looking ID that would make its way into the repo. - Nonetheless, to be sure, before publishing we check that there are no - collisions, and refuse to do any publishing if that's the case. - - Parameters - ---------- - allapps - a dict of all apps to process - - Returns - ------- - a list of all aliases corresponding to allapps - """ - allaliases = [] - for appid in allapps: - m = hashlib.md5() # nosec just used to generate a keyalias - m.update(appid.encode('utf-8')) - keyalias = m.hexdigest()[:8] - if keyalias in allaliases: - logging.error(_("There is a keyalias collision - publishing halted")) - sys.exit(1) - allaliases.append(keyalias) - return allaliases - - -def create_key_if_not_existing(keyalias): - """Ensure a signing key with the given keyalias exists. - - Returns - ------- - boolean - True if a new key was created, False otherwise - """ - # See if we already have a key for this application, and - # if not generate one... - env_vars = { - 'LC_ALL': 'C.UTF-8', - 'FDROID_KEY_STORE_PASS': config['keystorepass'], - 'FDROID_KEY_PASS': config.get('keypass', ""), - } - cmd = [ - config['keytool'], - '-list', - '-alias', - keyalias, - '-keystore', - config['keystore'], - '-storepass:env', - 'FDROID_KEY_STORE_PASS', - ] - if config['keystore'] == 'NONE': - cmd += config['smartcardoptions'] - p = FDroidPopen(cmd, envs=env_vars) - if p.returncode != 0: - logging.info("Key does not exist - generating...") - cmd = [ - config['keytool'], - '-genkey', - '-keystore', - config['keystore'], - '-alias', - keyalias, - '-keyalg', - 'RSA', - '-keysize', - '2048', - '-validity', - '10000', - '-storepass:env', - 'FDROID_KEY_STORE_PASS', - '-dname', - config['keydname'], - ] - if config['keystore'] == 'NONE': - cmd += config['smartcardoptions'] - else: - cmd += '-keypass:env', 'FDROID_KEY_PASS' - p = FDroidPopen(cmd, envs=env_vars) - if p.returncode != 0: - raise BuildException("Failed to generate key", p.output) - return True - else: - return False - - def main(): - global config + + global config, options # Parse command line... - parser = ArgumentParser( - usage="%(prog)s [options] " "[APPID[:VERCODE] [APPID[:VERCODE] ...]]" - ) + parser = ArgumentParser(usage="%(prog)s [options] " + "[APPID[:VERCODE] [APPID[:VERCODE] ...]]") common.setup_global_opts(parser) - parser.add_argument( - "-e", - "--error-on-failed", - action="store_true", - default=False, - help=_("When signing or verifying fails, exit with an error code."), - ) - parser.add_argument( - "appid", - nargs='*', - help=_("application ID with optional versionCode in the form APPID[:VERCODE]"), - ) + parser.add_argument("appid", nargs='*', help=_("applicationId with optional versionCode in the form APPID[:VERCODE]")) metadata.add_metadata_arguments(parser) - options = common.parse_args(parser) + options = parser.parse_args() metadata.warnings_action = options.W - config = common.read_config() + config = common.read_config(options) if not ('jarsigner' in config and 'keytool' in config): - logging.critical( - _('Java JDK not found! Install in standard location or set java_paths!') - ) + logging.critical(_('Java JDK not found! Install in standard location or set java_paths!')) sys.exit(1) common.assert_config_keystore(config) @@ -323,28 +180,37 @@ def main(): sys.exit(1) binaries_dir = os.path.join(unsigned_dir, 'binaries') - if not config['keystore'] == "NONE" and not os.path.exists(config['keystore']): + if not os.path.exists(config['keystore']): logging.error("Config error - missing '{0}'".format(config['keystore'])) sys.exit(1) + # It was suggested at + # https://dev.guardianproject.info/projects/bazaar/wiki/FDroid_Audit + # that a package could be crafted, such that it would use the same signing + # key as an existing app. While it may be theoretically possible for such a + # colliding package ID to be generated, it seems virtually impossible that + # the colliding ID would be something that would be a) a valid package ID, + # and b) a sane-looking ID that would make its way into the repo. + # Nonetheless, to be sure, before publishing we check that there are no + # collisions, and refuse to do any publishing if that's the case... allapps = metadata.read_metadata() vercodes = common.read_pkg_args(options.appid, True) - common.get_metadata_files(vercodes) # only check appids - signed_apks = dict() - generated_keys = dict() - allaliases = check_for_key_collisions(allapps) - logging.info( - ngettext( - '{0} app, {1} key aliases', '{0} apps, {1} key aliases', len(allapps) - ).format(len(allapps), len(allaliases)) - ) + allaliases = [] + for appid in allapps: + m = hashlib.md5() # nosec just used to generate a keyalias + m.update(appid.encode('utf-8')) + keyalias = m.hexdigest()[:8] + if keyalias in allaliases: + logging.error(_("There is a keyalias collision - publishing halted")) + sys.exit(1) + allaliases.append(keyalias) + logging.info(ngettext('{0} app, {1} key aliases', + '{0} apps, {1} key aliases', len(allapps)).format(len(allapps), len(allaliases))) - failed = 0 # Process any APKs or ZIPs that are waiting to be signed... - for apkfile in sorted( - glob.glob(os.path.join(unsigned_dir, '*.apk')) - + glob.glob(os.path.join(unsigned_dir, '*.zip')) - ): + for apkfile in sorted(glob.glob(os.path.join(unsigned_dir, '*.apk')) + + glob.glob(os.path.join(unsigned_dir, '*.zip'))): + appid, vercode = common.publishednameinfo(apkfile) apkfilename = os.path.basename(apkfile) if vercodes and appid not in vercodes: @@ -357,17 +223,13 @@ def main(): # There ought to be valid metadata for this app, otherwise why are we # trying to publish it? if appid not in allapps: - logging.error( - "Unexpected {0} found in unsigned directory".format(apkfilename) - ) + logging.error("Unexpected {0} found in unsigned directory" + .format(apkfilename)) sys.exit(1) app = allapps[appid] - build = None - for b in app.get("Builds", ()): - if b.get("versionCode") == vercode: - build = b - if app.Binaries or (build and build.binary): + if app.Binaries: + # It's an app where we build from source, and verify the apk # contents against a developer's binary, and then publish their # version if everything checks out. @@ -378,22 +240,14 @@ def main(): srcapk = srcapk.replace(unsigned_dir, binaries_dir) if not os.path.isfile(srcapk): - logging.error( - "...reference binary missing - publish skipped: '{refpath}'".format( - refpath=srcapk - ) - ) - failed += 1 + logging.error("...reference binary missing - publish skipped: " + "'{refpath}'".format(refpath=srcapk)) else: # Compare our unsigned one with the downloaded one... compare_result = common.verify_apks(srcapk, apkfile, tmp_dir) if compare_result: - logging.error( - "...verification failed - publish skipped : {result}".format( - result=compare_result - ) - ) - failed += 1 + logging.error("...verification failed - publish skipped : " + "{result}".format(result=compare_result)) else: # Success! So move the downloaded file to the repo, and remove # our built version. @@ -404,6 +258,7 @@ def main(): logging.info('Published ' + apkfilename) elif apkfile.endswith('.zip'): + # OTA ZIPs built by fdroid do not need to be signed by jarsigner, # just to be moved into place in the repo shutil.move(apkfile, os.path.join(output_dir, apkfilename)) @@ -411,6 +266,7 @@ def main(): logging.info('Published ' + apkfilename) else: + # It's a 'normal' app, i.e. we sign and publish it... skipsigning = False @@ -421,58 +277,93 @@ def main(): # metadata. This means we're going to prepare both a locally # signed APK and a version signed with the developers key. - signature_file, _ignored, manifest, v2_files = signingfiles + signaturefile, signedfile, manifest = signingfiles - with open(signature_file, 'rb') as f: - devfp = common.signer_fingerprint_short( - common.get_certificate(f.read()) - ) + with open(signaturefile, 'rb') as f: + devfp = common.signer_fingerprint_short(common.get_certificate(f.read())) devsigned = '{}_{}_{}.apk'.format(appid, vercode, devfp) devsignedtmp = os.path.join(tmp_dir, devsigned) + shutil.copy(apkfile, devsignedtmp) - common.apk_implant_signatures(apkfile, devsignedtmp, manifest=manifest) + common.apk_implant_signatures(devsignedtmp, signaturefile, + signedfile, manifest) if common.verify_apk_signature(devsignedtmp): shutil.move(devsignedtmp, os.path.join(output_dir, devsigned)) else: os.remove(devsignedtmp) logging.error('...verification failed - skipping: %s', devsigned) skipsigning = True - failed += 1 # Now we sign with the F-Droid key. + + # Figure out the key alias name we'll use. Only the first 8 + # characters are significant, so we'll use the first 8 from + # the MD5 of the app's ID and hope there are no collisions. + # If a collision does occur later, we're going to have to + # come up with a new alogrithm, AND rename all existing keys + # in the keystore! if not skipsigning: - keyalias = key_alias(appid) + if appid in config['keyaliases']: + # For this particular app, the key alias is overridden... + keyalias = config['keyaliases'][appid] + if keyalias.startswith('@'): + m = hashlib.md5() # nosec just used to generate a keyalias + m.update(keyalias[1:].encode('utf-8')) + keyalias = m.hexdigest()[:8] + else: + m = hashlib.md5() # nosec just used to generate a keyalias + m.update(appid.encode('utf-8')) + keyalias = m.hexdigest()[:8] logging.info("Key alias: " + keyalias) - if create_key_if_not_existing(keyalias): - generated_keys[appid] = keyalias + # See if we already have a key for this application, and + # if not generate one... + env_vars = {'LC_ALL': 'C.UTF-8', + 'FDROID_KEY_STORE_PASS': config['keystorepass'], + 'FDROID_KEY_PASS': config['keypass']} + p = FDroidPopen([config['keytool'], '-list', + '-alias', keyalias, '-keystore', config['keystore'], + '-storepass:env', 'FDROID_KEY_STORE_PASS'], envs=env_vars) + if p.returncode != 0: + logging.info("Key does not exist - generating...") + p = FDroidPopen([config['keytool'], '-genkey', + '-keystore', config['keystore'], + '-alias', keyalias, + '-keyalg', 'RSA', '-keysize', '2048', + '-validity', '10000', + '-storepass:env', 'FDROID_KEY_STORE_PASS', + '-keypass:env', 'FDROID_KEY_PASS', + '-dname', config['keydname']], envs=env_vars) + if p.returncode != 0: + raise BuildException("Failed to generate key", p.output) signed_apk_path = os.path.join(output_dir, apkfilename) if os.path.exists(signed_apk_path): - raise BuildException( - _( - "Refusing to sign '{path}', file exists in both {dir1} and {dir2} folder." - ).format(path=apkfilename, dir1=unsigned_dir, dir2=output_dir) - ) + raise BuildException("Refusing to sign '{0}' file exists in both " + "{1} and {2} folder.".format(apkfilename, + unsigned_dir, + output_dir)) + # TODO replace below with common.sign_apk() once it has proven stable # Sign the application... - common.sign_apk(apkfile, signed_apk_path, keyalias) - if appid not in signed_apks: - signed_apks[appid] = [] - signed_apks[appid].append({"keyalias": keyalias, "filename": apkfile}) + p = FDroidPopen([config['jarsigner'], '-keystore', config['keystore'], + '-storepass:env', 'FDROID_KEY_STORE_PASS', + '-keypass:env', 'FDROID_KEY_PASS', '-sigalg', + 'SHA1withRSA', '-digestalg', 'SHA1', + apkfile, keyalias], envs=env_vars) + if p.returncode != 0: + raise BuildException(_("Failed to sign application"), p.output) + + # Zipalign it... + common._zipalign(apkfile, os.path.join(output_dir, apkfilename)) + os.remove(apkfile) publish_source_tarball(apkfilename, unsigned_dir, output_dir) logging.info('Published ' + apkfilename) - store_publish_signer_fingerprints(allapps.keys()) - status_update_json(generated_keys, signed_apks) + store_stats_fdroid_signing_key_fingerprints(allapps.keys()) logging.info('published list signing-key fingerprints') - if failed: - logging.error(_('%d APKs failed to be signed or verified!') % failed) - if options.error_on_failed: - sys.exit(failed) - if __name__ == "__main__": main() diff --git a/fdroidserver/readmeta.py b/fdroidserver/readmeta.py index b3ef7c3b..7ec1cb05 100644 --- a/fdroidserver/readmeta.py +++ b/fdroidserver/readmeta.py @@ -17,19 +17,22 @@ # along with this program. If not, see . from argparse import ArgumentParser +from . import common +from . import metadata -from . import common, metadata +options = None def main(): - parser = ArgumentParser() + + parser = ArgumentParser(usage="%(prog)s") common.setup_global_opts(parser) metadata.add_metadata_arguments(parser) options = parser.parse_args() metadata.warnings_action = options.W - common.read_config() + common.read_config(None) - metadata.read_metadata() + metadata.read_metadata(xref=True) if __name__ == "__main__": diff --git a/fdroidserver/rewritemeta.py b/fdroidserver/rewritemeta.py index 4bbe810d..07fb7492 100644 --- a/fdroidserver/rewritemeta.py +++ b/fdroidserver/rewritemeta.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # # rewritemeta.py - part of the FDroid server tools -# This cleans up the original .yml metadata file format. +# This cleans up the original .txt metadata file format. # Copyright (C) 2010-12, Ciaran Gultnieks, ciaran@ciarang.com # # This program is free software: you can redistribute it and/or modify @@ -17,94 +17,101 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import io -import logging -import shutil -import tempfile from argparse import ArgumentParser -from pathlib import Path +import os +import logging +import io -from . import _, common, metadata +from . import _ +from . import common +from . import metadata config = None +options = None def proper_format(app): s = io.StringIO() # TODO: currently reading entire file again, should reuse first # read in metadata.py - cur_content = Path(app.metadatapath).read_text(encoding='utf-8') - if Path(app.metadatapath).suffix == '.yml': + with open(app.metadatapath, 'r') as f: + cur_content = f.read() + _ignored, extension = common.get_extension(app.metadatapath) + if extension == 'yml': metadata.write_yaml(s, app) + elif extension == 'txt': + metadata.write_txt(s, app) content = s.getvalue() s.close() return content == cur_content -def remove_blank_flags_from_builds(builds): - """Remove unset entries from Builds so they are not written out.""" - if not builds: - return list() - newbuilds = list() - for build in builds: - new = dict() - for k in metadata.build_flags: - v = build.get(k) - # 0 is valid value, it should not be stripped - if v is None or v is False or v == '' or v == dict() or v == list(): - continue - new[k] = v - newbuilds.append(new) - return newbuilds - - def main(): - global config - parser = ArgumentParser() + global config, options + + supported = ['txt', 'yml'] + + # Parse command line... + parser = ArgumentParser(usage="%(prog)s [options] [APPID [APPID ...]]") common.setup_global_opts(parser) - parser.add_argument( - "-l", - "--list", - action="store_true", - default=False, - help=_("List files that would be reformatted (dry run)"), - ) - parser.add_argument( - "appid", nargs='*', help=_("application ID of file to operate on") - ) + parser.add_argument("-l", "--list", action="store_true", default=False, + help=_("List files that would be reformatted")) + parser.add_argument("-t", "--to", default=None, + help=_("Rewrite to a specific format: ") + ', '.join(supported)) + parser.add_argument("appid", nargs='*', help=_("applicationId in the form APPID")) metadata.add_metadata_arguments(parser) - options = common.parse_args(parser) + options = parser.parse_args() metadata.warnings_action = options.W - config = common.read_config() + config = common.read_config(options) - apps = common.read_app_args(options.appid) + # Get all apps... + allapps = metadata.read_metadata(xref=True) + apps = common.read_app_args(options.appid, allapps, False) + + if options.list and options.to is not None: + parser.error(_("Cannot use --list and --to at the same time")) + + if options.to is not None and options.to not in supported: + parser.error(_("Unsupported metadata format, use: --to [{supported}]") + .format(supported=' '.join(supported))) for appid, app in apps.items(): - path = Path(app.metadatapath) - if path.suffix == '.yml': - logging.info(_("Rewriting '{appid}'").format(appid=appid)) - else: - logging.warning(_('Cannot rewrite "{path}"').format(path=path)) + path = app.metadatapath + base, ext = common.get_extension(path) + if not options.to and ext not in supported: + logging.info(_("Ignoring {ext} file at '{path}'").format(ext=ext, path=path)) continue + elif options.to is not None: + logging.info(_("Rewriting '{appid}' to '{path}'").format(appid=appid, path=options.to)) + else: + logging.info(_("Rewriting '{appid}'").format(appid=appid)) + + to_ext = ext + if options.to is not None: + to_ext = options.to if options.list: if not proper_format(app): print(path) continue - # TODO these should be moved to metadata.write_yaml() - builds = remove_blank_flags_from_builds(app.get('Builds')) - if builds: - app['Builds'] = builds + newbuilds = [] + for build in app.builds: + new = metadata.Build() + for k in metadata.build_flags: + v = build[k] + if v is None or v is False or v == [] or v == '': + continue + new[k] = v + newbuilds.append(new) + app.builds = newbuilds - # rewrite to temporary file before overwriting existing - # file in case there's a bug in write_metadata - with tempfile.TemporaryDirectory() as tmpdir: - tmp_path = Path(tmpdir) / path.name - metadata.write_metadata(tmp_path, app) - shutil.move(tmp_path, path) + metadata.write_metadata(base + '.' + to_ext, app) + + if ext != to_ext: + os.remove(path) logging.debug(_("Finished")) diff --git a/fdroidserver/scanner.py b/fdroidserver/scanner.py index f28e3803..5f315736 100644 --- a/fdroidserver/scanner.py +++ b/fdroidserver/scanner.py @@ -16,743 +16,105 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import itertools -import json -import logging import os import re -import sys import traceback -import urllib.parse -import urllib.request -import zipfile from argparse import ArgumentParser -from dataclasses import dataclass, field, fields -from datetime import datetime, timedelta, timezone -from enum import IntEnum -from pathlib import Path -from tempfile import TemporaryDirectory -from typing import Union +import logging +import itertools -try: - import magic -except ImportError: - import puremagic as magic +from . import _ +from . import common +from . import metadata +from .exception import BuildException, VCSException -if sys.version_info >= (3, 11): - import tomllib -else: - import tomli as tomllib - -from . import _, common, metadata, scanner -from .exception import BuildException, ConfigurationException, VCSException - - -@dataclass -class MessageStore: - infos: list = field(default_factory=list) - warnings: list = field(default_factory=list) - errors: list = field(default_factory=list) - - -MAVEN_URL_REGEX = re.compile( - r"""\smaven\s*(?:{.*?(?:setUrl|url)|\(\s*(?:url)?)\s*=?\s*(?:uri|URI|Uri\.create)?\(?\s*["']?([^\s"']+)["']?[^})]*[)}]""", - re.DOTALL, -) - -DEPFILE = { - "Cargo.toml": ["Cargo.lock"], - "pubspec.yaml": ["pubspec.lock"], - "package.json": ["package-lock.json", "yarn.lock", "pnpm-lock.yaml", "bun.lock"], -} - -SCANNER_CACHE_VERSION = 1 - -DEFAULT_CATALOG_PREFIX_REGEX = re.compile( - r'''defaultLibrariesExtensionName\s*=\s*['"](\w+)['"]''' -) -GRADLE_CATALOG_FILE_REGEX = re.compile( - r'''(?:create\()?['"]?(\w+)['"]?\)?\s*\{[^}]*from\(files\(['"]([^"]+)['"]\)\)''' -) -VERSION_CATALOG_REGEX = re.compile(r'versionCatalogs\s*\{') - -APK_SIGNING_BLOCK_IDS = { - # https://source.android.com/docs/security/features/apksigning/v2#apk-signing-block - # 0x7109871a: 'APK signature scheme v2', - # https://source.android.com/docs/security/features/apksigning/v3#apk-signing-block - # 0xf05368c0: 'APK signature scheme v3', - # See "Security metadata in early 2018" - # https://android-developers.googleblog.com/2017/12/improving-app-security-and-performance.html - 0x2146444E: 'Google Play Signature aka "Frosting"', - # 0x42726577: 'Verity padding', - # 0x6DFF800D: 'Source stamp V2 X509 cert', - # JSON with some metadata, used by Chinese company Meituan - 0x71777777: 'Meituan payload', - # Dependencies metadata generated by Gradle and encrypted by Google Play. - # '...The data is compressed, encrypted by a Google Play signing key...' - # https://developer.android.com/studio/releases/gradle-plugin#dependency-metadata - 0x504B4453: 'Dependency metadata', -} - - -class ExitCode(IntEnum): - NONFREE_CODE = 1 - - -class GradleVersionCatalog: - """Parse catalog from libs.versions.toml. - - https://docs.gradle.org/current/userguide/platforms.html - """ - - def __init__(self, catalog): - self.version = { - alias: self.get_version(version) - for alias, version in catalog.get("versions", {}).items() - } - self.libraries = { - self.alias_to_accessor(alias): self.library_to_coordinate(library) - for alias, library in catalog.get("libraries", {}).items() - } - self.plugins = { - self.alias_to_accessor(alias): self.plugin_to_coordinate(plugin) - for alias, plugin in catalog.get("plugins", {}).items() - } - self.bundles = { - self.alias_to_accessor(alias): self.bundle_to_coordinates(bundle) - for alias, bundle in catalog.get("bundles", {}).items() - } - - @staticmethod - def alias_to_accessor(alias: str) -> str: - """Covert alias to accessor. - - https://docs.gradle.org/current/userguide/platforms.html#sub:mapping-aliases-to-accessors - Alias is used to define a lib in catalog. Accessor is used to access it. - """ - return alias.replace("-", ".").replace("_", ".") - - def get_version(self, version: Union[dict, str]) -> str: - if isinstance(version, str): - return version - ref = version.get("ref") - if ref: - return self.version.get(ref, "") - return ( - version.get("prefer", "") - or version.get("require", "") - or version.get("strictly", "") - ) - - def library_to_coordinate(self, library: Union[dict, str]) -> str: - """Generate the Gradle dependency coordinate from catalog.""" - if isinstance(library, str): - return library - module = library.get("module") - if not module: - group = library.get("group") - name = library.get("name") - if group and name: - module = f"{group}:{name}" - else: - return "" - - version = library.get("version") - if version: - return f"{module}:{self.get_version(version)}" - else: - return module - - def plugin_to_coordinate(self, plugin: Union[dict, str]) -> str: - """Generate the Gradle plugin coordinate from catalog.""" - if isinstance(plugin, str): - return plugin - id = plugin.get("id") - if not id: - return "" - - version = plugin.get("version") - if version: - return f"{id}:{self.get_version(version)}" - else: - return id - - def bundle_to_coordinates(self, bundle: list[str]) -> list[str]: - """Generate the Gradle dependency bundle coordinate from catalog.""" - coordinates = [] - for alias in bundle: - library = self.libraries.get(self.alias_to_accessor(alias)) - if library: - coordinates.append(library) - return coordinates - - def get_coordinate(self, accessor: str) -> list[str]: - """Get the Gradle coordinate from the catalog with an accessor.""" - if accessor.startswith("plugins."): - return [ - self.plugins.get(accessor[8:].removesuffix(".asLibraryDependency"), "") - ] - if accessor.startswith("bundles."): - return self.bundles.get(accessor[8:], []) - return [self.libraries.get(accessor, "")] - - -def get_catalogs(root: str) -> dict[str, GradleVersionCatalog]: - """Get all Gradle dependency catalogs from settings.gradle[.kts]. - - Returns a dict with the extension and the corresponding catalog. - The extension is used as the prefix of the accessor to access libs in the catalog. - """ - root = Path(root) - catalogs = {} - default_prefix = "libs" - catalog_files_m = [] - - def find_block_end(s, start): - pat = re.compile("[{}]") - depth = 1 - for m in pat.finditer(s, pos=start): - if m.group() == "{": - depth += 1 - else: - depth -= 1 - if depth == 0: - return m.start() - else: - return -1 - - groovy_file = root / "settings.gradle" - kotlin_file = root / "settings.gradle.kts" - if groovy_file.is_file(): - gradle_file = groovy_file - elif kotlin_file.is_file(): - gradle_file = kotlin_file - else: - return {} - - s = gradle_file.read_text(encoding="utf-8") - version_catalogs_m = VERSION_CATALOG_REGEX.search(s) - if version_catalogs_m: - start = version_catalogs_m.end() - end = find_block_end(s, start) - catalog_files_m = GRADLE_CATALOG_FILE_REGEX.finditer(s, start, end) - - m_default = DEFAULT_CATALOG_PREFIX_REGEX.search(s) - if m_default: - default_prefix = m_default.group(1) - default_catalog_file = Path(root) / "gradle/libs.versions.toml" - if default_catalog_file.is_file(): - with default_catalog_file.open("rb") as f: - catalogs[default_prefix] = GradleVersionCatalog(tomllib.load(f)) - for m in catalog_files_m: - catalog_file = Path(root) / m.group(2).replace("$rootDir/", "") - if catalog_file.is_file(): - with catalog_file.open("rb") as f: - catalogs[m.group(1)] = GradleVersionCatalog(tomllib.load(f)) - return catalogs +config = None +options = None def get_gradle_compile_commands(build): - compileCommands = [ - 'alias', - 'api', - 'apk', - 'classpath', - 'compile', - 'compileOnly', - 'id', - 'implementation', - 'provided', - 'runtimeOnly', - ] + compileCommands = ['compile', + 'provided', + 'apk', + 'implementation', + 'api', + 'compileOnly', + 'runtimeOnly'] buildTypes = ['', 'release'] + flavors = [''] if build.gradle and build.gradle != ['yes']: - flavors = common.calculate_gradle_flavor_combination(build.gradle) - else: - flavors = [''] + flavors += build.gradle - return [''.join(c) for c in itertools.product(flavors, buildTypes, compileCommands)] + commands = [''.join(c) for c in itertools.product(flavors, buildTypes, compileCommands)] + return [re.compile(r'\s*' + c, re.IGNORECASE) for c in commands] -def get_gradle_compile_commands_without_catalog(build): - return [ - re.compile(rf'''\s*{c}.*\s*\(?['"].*['"]''', re.IGNORECASE) - for c in get_gradle_compile_commands(build) - ] - - -def get_gradle_compile_commands_with_catalog(build, prefix): - return [ - re.compile(rf'\s*{c}.*\s*\(?{prefix}\.([a-z0-9.]+)', re.IGNORECASE) - for c in get_gradle_compile_commands(build) - ] - - -def get_embedded_classes(apkfile, depth=0): - """Get the list of Java classes embedded into all DEX files. - - :return: set of Java classes names as string +def scan_source(build_dir, build=metadata.Build()): + """Scan the source code in the given directory (and all subdirectories) + and return the number of fatal problems encountered """ - if depth > 10: # zipbomb protection - return {_('Max recursion depth in ZIP file reached: %s') % apkfile} - archive_regex = re.compile(r'.*\.(aab|aar|apk|apks|jar|war|xapk|zip)$') - class_regex = re.compile(r'classes.*\.dex') - classes = set() - - try: - with TemporaryDirectory() as tmp_dir, zipfile.ZipFile(apkfile, 'r') as apk_zip: - for info in apk_zip.infolist(): - # apk files can contain apk files, again - with apk_zip.open(info) as apk_fp: - if zipfile.is_zipfile(apk_fp): - classes = classes.union(get_embedded_classes(apk_fp, depth + 1)) - if not archive_regex.search(info.filename): - classes.add( - 'ZIP file without proper file extension: %s' - % info.filename - ) - continue - - with apk_zip.open(info.filename) as fp: - file_magic = fp.read(3) - if file_magic == b'dex': - if not class_regex.search(info.filename): - classes.add('DEX file with fake name: %s' % info.filename) - apk_zip.extract(info, tmp_dir) - run = common.SdkToolsPopen( - ["dexdump", '{}/{}'.format(tmp_dir, info.filename)], - output=False, - ) - classes = classes.union( - set(re.findall(r'[A-Z]+((?:\w+\/)+\w+)', run.output)) - ) - except zipfile.BadZipFile as ex: - return {_('Problem with ZIP file: %s, error %s') % (apkfile, ex)} - - return classes - - -def _datetime_now(): - """Get datetime.now(), using this funciton allows mocking it for testing.""" - return datetime.now(timezone.utc) - - -def _scanner_cachedir(): - """Get `Path` to fdroidserver cache dir.""" - cfg = common.get_config() - if not cfg: - raise ConfigurationException('config not initialized') - if "cachedir_scanner" not in cfg: - raise ConfigurationException("could not load 'cachedir_scanner' from config") - cachedir = Path(cfg["cachedir_scanner"]) - cachedir.mkdir(exist_ok=True, parents=True) - return cachedir - - -class SignatureDataMalformedException(Exception): - pass - - -class SignatureDataOutdatedException(Exception): - pass - - -class SignatureDataCacheMissException(Exception): - pass - - -class SignatureDataNoDefaultsException(Exception): - pass - - -class SignatureDataVersionMismatchException(Exception): - pass - - -class SignatureDataController: - def __init__(self, name, filename, url): - self.name = name - self.filename = filename - self.url = url - # by default we assume cache is valid indefinitely - self.cache_duration = timedelta(days=999999) - self.data = {} - - def check_data_version(self): - if self.data.get("version") != SCANNER_CACHE_VERSION: - raise SignatureDataVersionMismatchException() - - def check_last_updated(self): - """Check if the last_updated value is ok and raise an exception if expired or inaccessible. - - :raises SignatureDataMalformedException: when timestamp value is - inaccessible or not parse-able - :raises SignatureDataOutdatedException: when timestamp is older then - `self.cache_duration` - """ - last_updated = self.data.get("last_updated", None) - if last_updated: - try: - last_updated = datetime.fromtimestamp(last_updated, timezone.utc) - except ValueError as e: - raise SignatureDataMalformedException() from e - except TypeError as e: - raise SignatureDataMalformedException() from e - delta = (last_updated + self.cache_duration) - scanner._datetime_now() - if delta > timedelta(seconds=0): - logging.debug( - _('next {name} cache update due in {time}').format( - name=self.filename, time=delta - ) - ) - else: - raise SignatureDataOutdatedException() - - def fetch(self): - try: - self.fetch_signatures_from_web() - self.write_to_cache() - except Exception as e: - raise Exception( - _("downloading scanner signatures from '{}' failed").format(self.url) - ) from e - - def load(self): - try: - try: - self.load_from_cache() - self.verify_data() - self.check_last_updated() - except SignatureDataCacheMissException: - self.load_from_defaults() - except (SignatureDataOutdatedException, SignatureDataNoDefaultsException): - self.fetch_signatures_from_web() - self.write_to_cache() - except ( - SignatureDataMalformedException, - SignatureDataVersionMismatchException, - ) as e: - logging.critical( - _( - "scanner cache is malformed! You can clear it with: '{clear}'" - ).format( - clear='rm -r {}'.format(common.get_config()['cachedir_scanner']) - ) - ) - raise e - - def load_from_defaults(self): - raise SignatureDataNoDefaultsException() - - def load_from_cache(self): - sig_file = scanner._scanner_cachedir() / self.filename - if not sig_file.exists(): - raise SignatureDataCacheMissException() - with open(sig_file) as f: - self.set_data(json.load(f)) - - def write_to_cache(self): - sig_file = scanner._scanner_cachedir() / self.filename - with open(sig_file, "w", encoding="utf-8") as f: - json.dump(self.data, f, indent=2) - logging.debug("write '{}' to cache".format(self.filename)) - - def verify_data(self): - """Clean and validate `self.data`. - - Right now this function does just a basic key sanitation. - """ - self.check_data_version() - valid_keys = [ - 'timestamp', - 'last_updated', - 'version', - 'signatures', - 'cache_duration', - ] - - for k in list(self.data.keys()): - if k not in valid_keys: - del self.data[k] - - def set_data(self, new_data): - self.data = new_data - if 'cache_duration' in new_data: - self.cache_duration = timedelta(seconds=new_data['cache_duration']) - - def fetch_signatures_from_web(self): - if not self.url.startswith("https://"): - raise Exception(_("can't open non-https url: '{};".format(self.url))) - logging.debug(_("downloading '{}'").format(self.url)) - with urllib.request.urlopen(self.url) as f: # nosec B310 scheme filtered above - self.set_data(json.load(f)) - self.data['last_updated'] = scanner._datetime_now().timestamp() - - -class ExodusSignatureDataController(SignatureDataController): - def __init__(self): - super().__init__( - 'Exodus signatures', - 'exodus.json', - 'https://reports.exodus-privacy.eu.org/api/trackers', - ) - self.cache_duration = timedelta(days=1) # refresh exodus cache after one day - self.has_trackers_json_key = True - - def fetch_signatures_from_web(self): - logging.debug(_("downloading '{}'").format(self.url)) - - data = { - "signatures": {}, - "timestamp": scanner._datetime_now().timestamp(), - "last_updated": scanner._datetime_now().timestamp(), - "version": SCANNER_CACHE_VERSION, - } - - if not self.url.startswith("https://"): - raise Exception(_("can't open non-https url: '{};".format(self.url))) - with urllib.request.urlopen(self.url) as f: # nosec B310 scheme filtered above - trackerlist = json.load(f) - if self.has_trackers_json_key: - trackerlist = trackerlist["trackers"].values() - for tracker in trackerlist: - if tracker.get('code_signature'): - data["signatures"][tracker["name"]] = { - "name": tracker["name"], - "warn_code_signatures": [tracker["code_signature"]], - # exodus also provides network signatures, unused atm. - # "network_signatures": [tracker["network_signature"]], - "AntiFeatures": ["Tracking"], # TODO - "license": "NonFree", # We assume all trackers in exodus - # are non-free, although free - # trackers like piwik, acra, - # etc. might be listed by exodus - # too. - } - self.set_data(data) - - -class EtipSignatureDataController(ExodusSignatureDataController): - def __init__(self): - super().__init__() - self.name = 'ETIP signatures' - self.filename = 'etip.json' - self.url = 'https://etip.exodus-privacy.eu.org/api/trackers/?format=json' - self.has_trackers_json_key = False - - -class SUSSDataController(SignatureDataController): - def __init__(self): - super().__init__( - 'SUSS', 'suss.json', 'https://fdroid.gitlab.io/fdroid-suss/suss.json' - ) - - def load_from_defaults(self): - self.set_data(json.loads(SUSS_DEFAULT)) - - -class ScannerTool: - refresh_allowed = True - - def __init__(self): - # we could add support for loading additional signature source - # definitions from config.yml here - - self.scanner_data_lookup() - - options = common.get_options() - options_refresh_scanner = ( - hasattr(options, "refresh_scanner") - and options.refresh_scanner - and ScannerTool.refresh_allowed - ) - if options_refresh_scanner or common.get_config().get('refresh_scanner'): - self.refresh() - - self.load() - self.compile_regexes() - - def scanner_data_lookup(self): - sigsources = common.get_config().get('scanner_signature_sources', []) - logging.debug( - "scanner is configured to use signature data from: '{}'".format( - "', '".join(sigsources) - ) - ) - self.sdcs = [] - for i, source_url in enumerate(sigsources): - if source_url.lower() == 'suss': - self.sdcs.append(SUSSDataController()) - elif source_url.lower() == 'exodus': - self.sdcs.append(ExodusSignatureDataController()) - elif source_url.lower() == 'etip': - self.sdcs.append(EtipSignatureDataController()) - else: - u = urllib.parse.urlparse(source_url) - if u.scheme != 'https' or u.path == "": - raise ConfigurationException( - "Invalid 'scanner_signature_sources' configuration: '{}'. " - "Has to be a valid HTTPS-URL or match a predefined " - "constants: 'suss', 'exodus'".format(source_url) - ) - self.sdcs.append( - SignatureDataController( - source_url, - '{}_{}'.format(i, os.path.basename(u.path)), - source_url, - ) - ) - - def load(self): - for sdc in self.sdcs: - sdc.load() - - def compile_regexes(self): - self.regexs = { - 'err_code_signatures': {}, - 'err_gradle_signatures': {}, - 'warn_code_signatures': {}, - 'warn_gradle_signatures': {}, - } - for sdc in self.sdcs: - for signame, sigdef in sdc.data.get('signatures', {}).items(): - for sig in sigdef.get('code_signatures', []): - self.regexs['err_code_signatures'][sig] = re.compile( - '.*' + sig, re.IGNORECASE - ) - for sig in sigdef.get('gradle_signatures', []): - self.regexs['err_gradle_signatures'][sig] = re.compile( - '.*' + sig, re.IGNORECASE - ) - for sig in sigdef.get('warn_code_signatures', []): - self.regexs['warn_code_signatures'][sig] = re.compile( - '.*' + sig, re.IGNORECASE - ) - for sig in sigdef.get('warn_gradle_signatures', []): - self.regexs['warn_gradle_signatures'][sig] = re.compile( - '.*' + sig, re.IGNORECASE - ) - - def refresh(self): - for sdc in self.sdcs: - sdc.fetch_signatures_from_web() - sdc.write_to_cache() - - def add(self, new_controller: SignatureDataController): - self.sdcs.append(new_controller) - self.compile_regexes() - - -# TODO: change this from singleton instance to dependency injection -# use `_get_tool()` instead of accessing this directly -_SCANNER_TOOL = None - - -def _get_tool(): - """Lazy loading function for getting a ScannerTool instance. - - ScannerTool initialization need to access `common.config` values. Those are only available after initialization through `common.read_config()`. So this factory assumes config was called at an erlier point in time. - """ - if not scanner._SCANNER_TOOL: - scanner._SCANNER_TOOL = ScannerTool() - return scanner._SCANNER_TOOL - - -def scan_binary(apkfile): - """Scan output of dexdump for known non-free classes.""" - logging.info(_('Scanning APK with dexdump for known non-free classes.')) - result = get_embedded_classes(apkfile) - problems, warnings = 0, 0 - for classname in result: - for suspect, regexp in _get_tool().regexs['warn_code_signatures'].items(): - if regexp.match(classname): - logging.debug("Warning: found class '%s'" % classname) - warnings += 1 - for suspect, regexp in _get_tool().regexs['err_code_signatures'].items(): - if regexp.match(classname): - logging.debug("Problem: found class '%s'" % classname) - problems += 1 - - logging.info(_('Scanning APK for extra signing blocks.')) - a = common.get_androguard_APK(str(apkfile)) - a.parse_v2_v3_signature() - for b in a._v2_blocks: - if b in APK_SIGNING_BLOCK_IDS: - logging.debug( - f"Problem: found extra signing block '{APK_SIGNING_BLOCK_IDS[b]}'" - ) - problems += 1 - - if warnings: - logging.warning( - _("Found {count} warnings in {filename}").format( - count=warnings, filename=apkfile - ) - ) - if problems: - logging.critical( - _("Found {count} problems in {filename}").format( - count=problems, filename=apkfile - ) - ) - return problems - - -def scan_source(build_dir, build=metadata.Build(), json_per_build=None): - """Scan the source code in the given directory (and all subdirectories). - - Returns - ------- - the number of fatal problems encountered. - - """ count = 0 - if not json_per_build: - json_per_build = MessageStore() + # Common known non-free blobs (always lower case): + usual_suspects = { + exp: re.compile(r'.*' + exp, re.IGNORECASE) for exp in [ + r'flurryagent', + r'paypal.*mpl', + r'google.*analytics', + r'admob.*sdk.*android', + r'google.*ad.*view', + r'google.*admob', + r'google.*play.*services', + r'crittercism', + r'heyzap', + r'jpct.*ae', + r'youtube.*android.*player.*api', + r'bugsense', + r'crashlytics', + r'ouya.*sdk', + r'libspen23', + r'firebase', + r'''["']com.facebook.android['":]''', + ] + } + + whitelisted = [ + 'firebase-jobdispatcher', # https://github.com/firebase/firebase-jobdispatcher-android/blob/master/LICENSE + 'com.firebaseui', # https://github.com/firebase/FirebaseUI-Android/blob/master/LICENSE + 'geofire-android' # https://github.com/firebase/geofire-java/blob/master/LICENSE + ] + + def is_whitelisted(s): + return any(wl in s for wl in whitelisted) def suspects_found(s): - for n, r in _get_tool().regexs['err_gradle_signatures'].items(): - if r.match(s): + for n, r in usual_suspects.items(): + if r.match(s) and not is_whitelisted(s): yield n - allowed_repos = [ - re.compile(r'^https://' + re.escape(repo) + r'/*') - for repo in [ - 'repo1.maven.org/maven2', # mavenCentral() - 'jitpack.io', - 'www.jitpack.io', - 'repo.maven.apache.org/maven2', - 'oss.jfrog.org/artifactory/oss-snapshot-local', - 'central.sonatype.com/repository/maven-snapshots', - 'oss.sonatype.org/content/repositories/snapshots', - 'oss.sonatype.org/content/repositories/releases', - 'oss.sonatype.org/content/groups/public', - 'oss.sonatype.org/service/local/staging/deploy/maven2', - 's01.oss.sonatype.org/content/repositories/snapshots', - 's01.oss.sonatype.org/content/repositories/releases', - 's01.oss.sonatype.org/content/groups/public', - 's01.oss.sonatype.org/service/local/staging/deploy/maven2', - 'clojars.org/repo', # Clojure free software libs - 'repo.clojars.org', # Clojure free software libs - 's3.amazonaws.com/repo.commonsware.com', # CommonsWare - 'plugins.gradle.org/m2', # Gradle plugin repo - 'maven.google.com', # google() - ] - ] + [ - re.compile(r'^file://' + re.escape(repo) + r'/*') - for repo in [ - '/usr/share/maven-repo', # local repo on Debian installs + gradle_mavenrepo = re.compile(r'maven *{ *(url)? *[\'"]?([^ \'"]*)[\'"]?') + + allowed_repos = [re.compile(r'^https?://' + re.escape(repo) + r'/*') for repo in [ + 'repo1.maven.org/maven2', # mavenCentral() + 'jcenter.bintray.com', # jcenter() + 'jitpack.io', + 'www.jitpack.io', + 'repo.maven.apache.org/maven2', + 'oss.jfrog.org/artifactory/oss-snapshot-local', + 'oss.sonatype.org/content/repositories/snapshots', + 'oss.sonatype.org/content/repositories/releases', + 'oss.sonatype.org/content/groups/public', + 'clojars.org/repo', # Clojure free software libs + 's3.amazonaws.com/repo.commonsware.com', # CommonsWare + 'plugins.gradle.org/m2', # Gradle plugin repo + 'maven.google.com', # Google Maven Repo, https://developer.android.com/studio/build/dependencies.html#google-maven + 'file:///usr/share/maven-repo', # local repo on Debian installs ] ] - scanignore, scanignore_not_found_paths = common.getpaths_map( - build_dir, build.scanignore - ) - scandelete, scandelete_not_found_paths = common.getpaths_map( - build_dir, build.scandelete - ) + scanignore = common.getpaths_map(build_dir, build.scanignore) + scandelete = common.getpaths_map(build_dir, build.scandelete) scanignore_worked = set() scandelete_worked = set() @@ -773,117 +135,32 @@ def scan_source(build_dir, build=metadata.Build(), json_per_build=None): return True return False - def ignoreproblem(what, path_in_build_dir, json_per_build): - """No summary. - - Parameters - ---------- - what: string - describing the problem, will be printed in log messages - path_in_build_dir - path to the file relative to `build`-dir - - Returns - ------- - 0 as we explicitly ignore the file, so don't count an error - - """ - msg = 'Ignoring %s at %s' % (what, path_in_build_dir) - logging.info(msg) - if json_per_build is not None: - json_per_build.infos.append([msg, path_in_build_dir]) + def ignoreproblem(what, path_in_build_dir): + logging.info('Ignoring %s at %s' % (what, path_in_build_dir)) return 0 - def removeproblem(what, path_in_build_dir, filepath, json_per_build): - """No summary. - - Parameters - ---------- - what: string - describing the problem, will be printed in log messages - path_in_build_dir - path to the file relative to `build`-dir - filepath - Path (relative to our current path) to the file - - Returns - ------- - 0 as we deleted the offending file - - """ - msg = 'Removing %s at %s' % (what, path_in_build_dir) - logging.info(msg) - if json_per_build is not None: - json_per_build.infos.append([msg, path_in_build_dir]) - try: - os.remove(filepath) - except FileNotFoundError: - # File is already gone, nothing to do. - # This can happen if we find multiple problems in one file that is setup for scandelete - # I.e. build.gradle files containig multiple unknown maven repos. - pass + def removeproblem(what, path_in_build_dir, filepath): + logging.info('Removing %s at %s' % (what, path_in_build_dir)) + os.remove(filepath) return 0 - def warnproblem(what, path_in_build_dir, json_per_build): - """No summary. - - Parameters - ---------- - what: string - describing the problem, will be printed in log messages - path_in_build_dir - path to the file relative to `build`-dir - - Returns - ------- - 0, as warnings don't count as errors - - """ + def warnproblem(what, path_in_build_dir): if toignore(path_in_build_dir): - return 0 - logging.warning('Found %s at %s' % (what, path_in_build_dir)) - if json_per_build is not None: - json_per_build.warnings.append([what, path_in_build_dir]) - return 0 + return + logging.warn('Found %s at %s' % (what, path_in_build_dir)) - def handleproblem(what, path_in_build_dir, filepath, json_per_build): - """Dispatches to problem handlers (ignore, delete, warn). - - Or returns 1 for increasing the error count. - - Parameters - ---------- - what: string - describing the problem, will be printed in log messages - path_in_build_dir - path to the file relative to `build`-dir - filepath - Path (relative to our current path) to the file - - Returns - ------- - 0 if the problem was ignored/deleted/is only a warning, 1 otherwise - - """ - options = common.get_options() + def handleproblem(what, path_in_build_dir, filepath): if toignore(path_in_build_dir): - return ignoreproblem(what, path_in_build_dir, json_per_build) + return ignoreproblem(what, path_in_build_dir) if todelete(path_in_build_dir): - return removeproblem(what, path_in_build_dir, filepath, json_per_build) - if 'src/test' in path_in_build_dir or '/test/' in path_in_build_dir: - return warnproblem(what, path_in_build_dir, json_per_build) - if options and 'json' in vars(options) and options.json: - json_per_build.errors.append([what, path_in_build_dir]) - if options and ( - options.verbose or not ('json' in vars(options) and options.json) - ): - logging.error('Found %s at %s' % (what, path_in_build_dir)) + return removeproblem(what, path_in_build_dir, filepath) + logging.error('Found %s at %s' % (what, path_in_build_dir)) return 1 def is_executable(path): return os.path.exists(path) and os.access(path, os.X_OK) - textchars = bytearray({7, 8, 9, 10, 12, 13, 27} | set(range(0x20, 0x100)) - {0x7f}) # fmt: skip + textchars = bytearray({7, 8, 9, 10, 12, 13, 27} | set(range(0x20, 0x100)) - {0x7f}) def is_binary(path): d = None @@ -892,54 +169,33 @@ def scan_source(build_dir, build=metadata.Build(), json_per_build=None): return bool(d.translate(None, textchars)) # False positives patterns for files that are binary and executable. - safe_paths = [ - re.compile(r) - for r in [ - r".*/drawable[^/]*/.*\.png$", # png drawables - r".*/mipmap[^/]*/.*\.png$", # png mipmaps + safe_paths = [re.compile(r) for r in [ + r".*/drawable[^/]*/.*\.png$", # png drawables + r".*/mipmap[^/]*/.*\.png$", # png mipmaps ] ] - def is_image_file(path): - try: - mimetype = magic.from_file(path, mime=True) - if mimetype and mimetype.startswith('image/'): - return True - except Exception as e: - logging.info(e) - - def safe_path(path_in_build_dir): + def safe_path(path): for sp in safe_paths: - if sp.match(path_in_build_dir): + if sp.match(path): return True return False - def is_used_by_gradle_without_catalog(line): - return any( - command.match(line) - for command in get_gradle_compile_commands_without_catalog(build) - ) + gradle_compile_commands = get_gradle_compile_commands(build) - def is_used_by_gradle_with_catalog(line, prefix): - for m in ( - command.match(line) - for command in get_gradle_compile_commands_with_catalog(build, prefix) - ): - if m: - return m + def is_used_by_gradle(line): + return any(command.match(line) for command in gradle_compile_commands) - all_catalogs = {} # Iterate through all files in the source code for root, dirs, files in os.walk(build_dir, topdown=True): + # It's topdown, so checking the basename is enough for ignoredir in ('.hg', '.git', '.svn', '.bzr'): if ignoredir in dirs: dirs.remove(ignoredir) - if "settings.gradle" in files or "settings.gradle.kts" in files: - all_catalogs[str(root)] = get_catalogs(root) - for curfile in files: + if curfile in ['.DS_Store']: continue @@ -950,261 +206,94 @@ def scan_source(build_dir, build=metadata.Build(), json_per_build=None): continue path_in_build_dir = os.path.relpath(filepath, build_dir) + _ignored, ext = common.get_extension(path_in_build_dir) - if curfile in ('gradle-wrapper.jar', 'gradlew', 'gradlew.bat'): - removeproblem(curfile, path_in_build_dir, filepath, json_per_build) - elif curfile.endswith('.apk'): - removeproblem( - _('Android APK file'), path_in_build_dir, filepath, json_per_build - ) + if ext == 'so': + count += handleproblem('shared library', path_in_build_dir, filepath) + elif ext == 'a': + count += handleproblem('static library', path_in_build_dir, filepath) + elif ext == 'class': + count += handleproblem('Java compiled class', path_in_build_dir, filepath) + elif ext == 'apk': + removeproblem('APK file', path_in_build_dir, filepath) - elif curfile.endswith('.a'): - count += handleproblem( - _('static library'), path_in_build_dir, filepath, json_per_build - ) - elif curfile.endswith('.aar'): - count += handleproblem( - _('Android AAR library'), - path_in_build_dir, - filepath, - json_per_build, - ) - elif curfile.endswith('.class'): - count += handleproblem( - _('Java compiled class'), - path_in_build_dir, - filepath, - json_per_build, - ) - elif curfile.endswith('.dex'): - count += handleproblem( - _('Android DEX code'), path_in_build_dir, filepath, json_per_build - ) - elif curfile.endswith('.gz') or curfile.endswith('.tgz'): - count += handleproblem( - _('gzip file archive'), path_in_build_dir, filepath, json_per_build - ) - # We use a regular expression here to also match versioned shared objects like .so.0.0.0 - elif re.match(r'.*\.so(\..+)*$', curfile): - count += handleproblem( - _('shared library'), path_in_build_dir, filepath, json_per_build - ) - elif curfile.endswith('.zip'): - count += handleproblem( - _('ZIP file archive'), path_in_build_dir, filepath, json_per_build - ) - elif curfile.endswith('.jar'): + elif ext == 'jar': for name in suspects_found(curfile): - count += handleproblem( - 'usual suspect \'%s\'' % name, - path_in_build_dir, - filepath, - json_per_build, - ) - count += handleproblem( - _('Java JAR file'), path_in_build_dir, filepath, json_per_build - ) - elif curfile.endswith('.wasm'): - count += handleproblem( - _('WebAssembly binary file'), - path_in_build_dir, - filepath, - json_per_build, - ) + count += handleproblem('usual suspect \'%s\'' % name, path_in_build_dir, filepath) + if curfile == 'gradle-wrapper.jar': + removeproblem('gradle-wrapper.jar', path_in_build_dir, filepath) + else: + warnproblem('JAR file', path_in_build_dir) - elif curfile.endswith('.java'): + elif ext == 'aar': + warnproblem('AAR file', path_in_build_dir) + + elif ext == 'java': if not os.path.isfile(filepath): continue with open(filepath, 'r', errors='replace') as f: for line in f: if 'DexClassLoader' in line: - count += handleproblem( - 'DexClassLoader', - path_in_build_dir, - filepath, - json_per_build, - ) + count += handleproblem('DexClassLoader', path_in_build_dir, filepath) break - elif curfile.endswith('.gradle') or curfile.endswith('.gradle.kts'): - catalog_path = str(build_dir) - # Find the longest path of dir that the curfile is in - for p in all_catalogs: - if os.path.commonpath([root, p]) == p: - catalog_path = p - catalogs = all_catalogs.get(catalog_path, {}) - + elif ext == 'gradle': if not os.path.isfile(filepath): continue with open(filepath, 'r', errors='replace') as f: lines = f.readlines() for i, line in enumerate(lines): - if is_used_by_gradle_without_catalog(line): + if is_used_by_gradle(line): for name in suspects_found(line): - count += handleproblem( - f"usual suspect '{name}'", - path_in_build_dir, - filepath, - json_per_build, - ) - for prefix, catalog in catalogs.items(): - m = is_used_by_gradle_with_catalog(line, prefix) - if not m: - continue - accessor = m[1] - coordinates = catalog.get_coordinate(accessor) - for coordinate in coordinates: - for name in suspects_found(coordinate): - count += handleproblem( - f"usual suspect '{prefix}.{accessor}: {name}'", - path_in_build_dir, - filepath, - json_per_build, - ) - noncomment_lines = [ - line for line in lines if not common.gradle_comment.match(line) - ] - no_comments = re.sub( - r'/\*.*?\*/', '', ''.join(noncomment_lines), flags=re.DOTALL - ) - for url in MAVEN_URL_REGEX.findall(no_comments): + count += handleproblem('usual suspect \'%s\' at line %d' % (name, i + 1), path_in_build_dir, filepath) + noncomment_lines = [l for l in lines if not common.gradle_comment.match(l)] + joined = re.sub(r'[\n\r\s]+', ' ', ' '.join(noncomment_lines)) + for m in gradle_mavenrepo.finditer(joined): + url = m.group(2) if not any(r.match(url) for r in allowed_repos): - count += handleproblem( - 'unknown maven repo \'%s\'' % url, - path_in_build_dir, - filepath, - json_per_build, - ) + count += handleproblem('unknown maven repo \'%s\'' % url, path_in_build_dir, filepath) - elif os.path.splitext(path_in_build_dir)[1] in ['', '.bin', '.out', '.exe']: + elif ext in ['', 'bin', 'out', 'exe']: if is_binary(filepath): - count += handleproblem( - 'binary', path_in_build_dir, filepath, json_per_build - ) - - elif curfile in DEPFILE: - d = root - while d.startswith(str(build_dir)): - for lockfile in DEPFILE[curfile]: - if os.path.isfile(os.path.join(d, lockfile)): - break - else: - d = os.path.dirname(d) - continue - break - else: - count += handleproblem( - _('dependency file without lock'), - path_in_build_dir, - filepath, - json_per_build, - ) + count += handleproblem('binary', path_in_build_dir, filepath) elif is_executable(filepath): - if is_binary(filepath) and not ( - safe_path(path_in_build_dir) or is_image_file(filepath) - ): - warnproblem( - _('executable binary, possibly code'), - path_in_build_dir, - json_per_build, - ) - - for p in scanignore_not_found_paths: - logging.error(_("Non-exist scanignore path: %s") % p) - count += 1 + if is_binary(filepath) and not safe_path(path_in_build_dir): + warnproblem('possible binary', path_in_build_dir) for p in scanignore: if p not in scanignore_worked: - logging.error(_('Unused scanignore path: %s') % p) + logging.error('Unused scanignore path: %s' % p) count += 1 - for p in scandelete_not_found_paths: - logging.error(_("Non-exist scandelete path: %s") % p) - count += 1 - for p in scandelete: if p not in scandelete_worked: - logging.error(_('Unused scandelete path: %s') % p) + logging.error('Unused scandelete path: %s' % p) count += 1 return count def main(): - parser = ArgumentParser( - usage="%(prog)s [options] [(APPID[:VERCODE] | path/to.apk) ...]" - ) + + global config, options + + # Parse command line... + parser = ArgumentParser(usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]") common.setup_global_opts(parser) - parser.add_argument( - "appid", - nargs='*', - help=_("application ID with optional versionCode in the form APPID[:VERCODE]"), - ) - parser.add_argument( - "-f", - "--force", - action="store_true", - default=False, - help=_("Force scan of disabled apps and builds."), - ) - parser.add_argument( - "--json", action="store_true", default=False, help=_("Output JSON to stdout.") - ) - parser.add_argument( - "-r", - "--refresh", - dest="refresh_scanner", - action="store_true", - default=False, - help=_("fetch the latest version of signatures from the web"), - ) - parser.add_argument( - "-e", - "--exit-code", - action="store_true", - default=False, - help=_("Exit with a non-zero code if problems were found"), - ) + parser.add_argument("appid", nargs='*', help=_("applicationId with optional versionCode in the form APPID[:VERCODE]")) metadata.add_metadata_arguments(parser) - options = common.parse_args(parser) + options = parser.parse_args() metadata.warnings_action = options.W - json_output = dict() - if options.json: - if options.verbose: - logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) - else: - logging.getLogger().setLevel(logging.ERROR) + config = common.read_config(options) - # initialize/load configuration values - common.get_config() + # Read all app and srclib metadata + allapps = metadata.read_metadata() + apps = common.read_app_args(options.appid, allapps, True) probcount = 0 - appids = [] - for apk in options.appid: - if os.path.isfile(apk): - count = scanner.scan_binary(apk) - if count > 0: - logging.warning( - _('Scanner found {count} problems in {apk}').format( - count=count, apk=apk - ) - ) - probcount += count - else: - appids.append(apk) - - if not appids: - if options.exit_code and probcount > 0: - sys.exit(ExitCode.NONFREE_CODE) - if options.refresh_scanner: - _get_tool() - return - - apps = common.read_app_args(appids, allow_version_codes=True) - build_dir = 'build' if not os.path.isdir(build_dir): logging.info("Creating build directory") @@ -1213,13 +302,9 @@ def main(): extlib_dir = os.path.join(build_dir, 'extlib') for appid, app in apps.items(): - json_per_appid = dict() - if app.Disabled and not options.force: + if app.Disabled: logging.info(_("Skipping {appid}: disabled").format(appid=appid)) - json_per_appid['disabled'] = MessageStore().infos.append( - 'Skipping: disabled' - ) continue try: @@ -1228,1776 +313,55 @@ def main(): else: build_dir = os.path.join('build', appid) - if app.get('Builds'): + if app.builds: logging.info(_("Processing {appid}").format(appid=appid)) - # Set up vcs interface and make sure we have the latest code... - vcs = common.getvcs(app.RepoType, app.Repo, build_dir) else: - logging.info( - _( - "{appid}: no builds specified, running on current source state" - ).format(appid=appid) - ) - json_per_build = MessageStore() - json_per_appid['current-source-state'] = json_per_build - count = scan_source(build_dir, json_per_build=json_per_build) + logging.info(_("{appid}: no builds specified, running on current source state") + .format(appid=appid)) + count = scan_source(build_dir) if count > 0: - logging.warning( - _('Scanner found {count} problems in {appid}:').format( - count=count, appid=appid - ) - ) + logging.warn(_('Scanner found {count} problems in {appid}:') + .format(count=count, appid=appid)) probcount += count - app['Builds'] = [] + continue - for build in app.get('Builds', []): - json_per_build = MessageStore() - json_per_appid[build.versionCode] = json_per_build + # Set up vcs interface and make sure we have the latest code... + vcs = common.getvcs(app.RepoType, app.Repo, build_dir) - if build.disable and not options.force: - logging.info( - "...skipping version %s - %s" - % (build.versionName, build.get('disable', build.commit[1:])) - ) + for build in app.builds: + + if build.disable: + logging.info("...skipping version %s - %s" % ( + build.versionName, build.get('disable', build.commit[1:]))) continue logging.info("...scanning version " + build.versionName) # Prepare the source code... - common.prepare_source( - vcs, app, build, build_dir, srclib_dir, extlib_dir, False - ) + common.prepare_source(vcs, app, build, + build_dir, srclib_dir, + extlib_dir, False) - count = scan_source(build_dir, build, json_per_build=json_per_build) + count = scan_source(build_dir, build) if count > 0: - logging.warning( - _( - 'Scanner found {count} problems in {appid}:{versionCode}:' - ).format( - count=count, appid=appid, versionCode=build.versionCode - ) - ) + logging.warn(_('Scanner found {count} problems in {appid}:{versionCode}:') + .format(count=count, appid=appid, versionCode=build.versionCode)) probcount += count except BuildException as be: - logging.warning( - 'Could not scan app %s due to BuildException: %s' % (appid, be) - ) + logging.warn("Could not scan app %s due to BuildException: %s" % ( + appid, be)) probcount += 1 except VCSException as vcse: - logging.warning('VCS error while scanning app %s: %s' % (appid, vcse)) + logging.warn("VCS error while scanning app %s: %s" % (appid, vcse)) probcount += 1 except Exception: - logging.warning( - 'Could not scan app %s due to unknown error: %s' - % (appid, traceback.format_exc()) - ) + logging.warn("Could not scan app %s due to unknown error: %s" % ( + appid, traceback.format_exc())) probcount += 1 - for k, v in json_per_appid.items(): - if len(v.errors) or len(v.warnings) or len(v.infos): - json_output[appid] = { - k: dict((field.name, getattr(v, field.name)) for field in fields(v)) - for k, v in json_per_appid.items() - } - break - logging.info(_("Finished")) - if options.json: - print(json.dumps(json_output)) - elif probcount or options.verbose: - print(_("%d problems found") % probcount) + print(_("%d problems found") % probcount) if __name__ == "__main__": main() - - -SUSS_DEFAULT = r'''{ - "cache_duration": 86400, - "signatures": { - "com.amazon.device.ads": { - "anti_features": [ - "Ads", - "NonFreeComp" - ], - "code_signatures": [ - "com/amazon/device/ads" - ], - "description": "an interface for views used to retrieve and display Amazon ads.", - "license": "NonFree" - }, - "com.amazon.device.associates": { - "anti_features": [ - "Ads", - "NonFreeComp" - ], - "code_signatures": [ - "com/amazon/device/associates" - ], - "description": "library for Amazon\u2019s affiliate marketing program.", - "license": "NonFree" - }, - "com.amazon.device.iap": { - "anti_features": [ - "NonFreeComp", - "NonFreeNet" - ], - "code_signatures": [ - "com/amazon/device/iap" - ], - "description": "allows an app to present, process, and fulfill purchases of digital content and subscriptions within your app.", - "license": "NonFree" - }, - "com.amazonaws": { - "code_signatures": [ - "com/amazonaws/AbortedException", - "com/amazonaws/AmazonClientException", - "com/amazonaws/AmazonServiceException$ErrorType", - "com/amazonaws/AmazonServiceException", - "com/amazonaws/AmazonWebServiceClient", - "com/amazonaws/AmazonWebServiceRequest", - "com/amazonaws/AmazonWebServiceResponse", - "com/amazonaws/async", - "com/amazonaws/auth", - "com/amazonaws/ClientConfiguration", - "com/amazonaws/cognito", - "com/amazonaws/DefaultRequest", - "com/amazonaws/event", - "com/amazonaws/handlers", - "com/amazonaws/http", - "com/amazonaws/HttpMethod", - "com/amazonaws/internal", - "com/amazonaws/logging", - "com/amazonaws/metrics", - "com/amazonaws/mobile", - "com/amazonaws/mobileconnectors", - "com/amazonaws/Protocol", - "com/amazonaws/regions", - "com/amazonaws/RequestClientOptions$Marker", - "com/amazonaws/RequestClientOptions", - "com/amazonaws/Request", - "com/amazonaws/ResponseMetadata", - "com/amazonaws/Response", - "com/amazonaws/retry", - "com/amazonaws/SDKGlobalConfiguration", - "com/amazonaws/ServiceNameFactory", - "com/amazonaws/services", - "com/amazonaws/transform", - "com/amazonaws/util" - ], - "gradle_signatures": [ - "com.amazonaws:amazon-kinesis-aggregator", - "com.amazonaws:amazon-kinesis-connectors", - "com.amazonaws:amazon-kinesis-deaggregator", - "com.amazonaws:aws-android-sdk-apigateway-core", - "com.amazonaws:aws-android-sdk-auth-core", - "com.amazonaws:aws-android-sdk-auth-facebook", - "com.amazonaws:aws-android-sdk-auth-google", - "com.amazonaws:aws-android-sdk-auth-ui", - "com.amazonaws:aws-android-sdk-auth-userpools", - "com.amazonaws:aws-android-sdk-cognito", - "com.amazonaws:aws-android-sdk-cognitoauth", - "com.amazonaws:aws-android-sdk-cognitoidentityprovider-asf", - "com.amazonaws:aws-android-sdk-comprehend", - "com.amazonaws:aws-android-sdk-core", - "com.amazonaws:aws-android-sdk-ddb", - "com.amazonaws:aws-android-sdk-ddb-document", - "com.amazonaws:aws-android-sdk-iot", - "com.amazonaws:aws-android-sdk-kinesis", - "com.amazonaws:aws-android-sdk-kinesisvideo", - "com.amazonaws:aws-android-sdk-kinesisvideo-archivedmedia", - "com.amazonaws:aws-android-sdk-kms", - "com.amazonaws:aws-android-sdk-lambda", - "com.amazonaws:aws-android-sdk-lex", - "com.amazonaws:aws-android-sdk-location", - "com.amazonaws:aws-android-sdk-logs", - "com.amazonaws:aws-android-sdk-mobileanalytics", - "com.amazonaws:aws-android-sdk-mobile-client", - "com.amazonaws:aws-android-sdk-pinpoint", - "com.amazonaws:aws-android-sdk-polly", - "com.amazonaws:aws-android-sdk-rekognition", - "com.amazonaws:aws-android-sdk-s3", - "com.amazonaws:aws-android-sdk-ses", - "com.amazonaws:aws-android-sdk-sns", - "com.amazonaws:aws-android-sdk-sqs", - "com.amazonaws:aws-android-sdk-textract", - "com.amazonaws:aws-android-sdk-transcribe", - "com.amazonaws:aws-android-sdk-translate", - "com.amazonaws:dynamodb-key-diagnostics-library", - "com.amazonaws:DynamoDBLocal", - "com.amazonaws:dynamodb-lock-client", - "com.amazonaws:ivs-broadcast", - "com.amazonaws:ivs-player", - "com.amazonaws:kinesis-storm-spout" - ], - "license": "NonFree", - "name": "AmazonAWS" - }, - "com.android.billingclient": { - "code_signatures": [ - "com/android/billingclient" - ], - "documentation": [ - "https://developer.android.com/google/play/billing/integrate" - ], - "gradle_signatures": [ - "com.android.billingclient", - "com.google.androidbrowserhelper:billing", - "com.anjlab.android.iab.v3:library", - "com.github.penn5:donations", - "me.proton.core:payment-iap" - ], - "license": "NonFree", - "name": "BillingClient" - }, - "com.android.installreferrer": { - "anti_features": [ - "NonFreeDep", - "NonFreeNet" - ], - "code_signatures": [ - "com/android/installreferrer" - ], - "documentation": [ - "https://developer.android.com/google/play/installreferrer/library" - ], - "gradle_signatures": [ - "com.android.installreferrer" - ], - "license": "NonFree", - "name": "Play Install Referrer Library" - }, - "com.anychart": { - "anti_features": [ - "NonFreeComp" - ], - "code_signatures": [ - "com/anychart" - ], - "description": "a data visualization library for easily creating interactive charts in Android apps.", - "license": "NonFree" - }, - "com.appboy": { - "anti_features": [ - "NonFreeComp", - "Tracking" - ], - "code_signatures": [ - "com/appboy" - ], - "description": "Targets customers based on personal interests, location, past purchases, and more; profiles users, segments audiences, and utilizes analytics for targeted advertisements.", - "license": "NonFree" - }, - "com.appbrain": { - "anti_features": [ - "Ads", - "NonFreeComp" - ], - "code_signatures": [ - "com/appbrain" - ], - "description": "See Exodus Privacy.", - "license": "NonFree" - }, - "com.applause.android": { - "anti_features": [ - "NonFreeComp", - "Tracking" - ], - "code_signatures": [ - "com/applause/android" - ], - "description": "crowd-sourced testing. See Crunchbase and Exodus Privacy.", - "license": "NonFree" - }, - "com.applovin": { - "anti_features": [ - "Ads" - ], - "code_signatures": [ - "com/applovin" - ], - "description": "a mobile advertising technology company that enables brands to create mobile marketing campaigns that are fueled by data. Primary targets games.", - "license": "NonFree" - }, - "com.appsflyer": { - "anti_features": [ - "NonFreeComp", - "Tracking" - ], - "code_signatures": [ - "com/appsflyer" - ], - "description": "a mobile & attribution analytics platform.", - "license": "NonFree" - }, - "com.apptentive": { - "anti_features": [ - "NonFreeComp", - "Tracking" - ], - "code_signatures": [ - "com/apptentive" - ], - "description": "See Exodus Privacy.", - "license": "NonFree" - }, - "com.apptimize": { - "anti_features": [ - "NonFreeComp", - "Tracking" - ], - "code_signatures": [ - "com/apptimize" - ], - "description": "See Exodus Privacy and Crunchbase.", - "license": "NonFree" - }, - "com.askingpoint": { - "anti_features": [ - "NonFreeComp", - "Tracking" - ], - "code_signatures": [ - "com/askingpoint" - ], - "description": "complete mobile user engagement solution (power local, In-application evaluations and audits, input, user support, mobile reviews and informing).", - "license": "NonFree" - }, - "com.baidu.mobstat": { - "code_signatures": [ - "com/baidu/mobstat" - ], - "documentation": [ - "https://mtj.baidu.com/web/sdk/index" - ], - "gradle_signatures": [ - "com.baidu.mobstat" - ], - "license": "NonFree", - "name": "\u767e\u5ea6\u79fb\u52a8\u7edf\u8ba1SDK" - }, - "com.batch": { - "anti_features": [ - "Ads", - "NonFreeComp", - "Tracking" - ], - "code_signatures": [ - "com/batch" - ], - "description": "mobile engagement platform to execute CRM tactics over iOS, Android & mobile websites.", - "license": "NonFree" - }, - "com.bosch.mtprotocol": { - "anti_features": [ - "NonFreeComp" - ], - "code_signatures": [ - "com/bosch/mtprotocol" - ], - "description": "simplify and manage use of Bosch GLM and PLR laser rangefinders with Bluetooth connectivity.", - "license": "NonFree" - }, - "com.bugsee.library.Bugsee": { - "anti_features": [ - "NonFreeComp", - "Tracking" - ], - "code_signatures": [ - "com/bugsee/library/Bugsee" - ], - "description": "see video, network and logs that led to bugs and crashes in live apps. No need to reproduce intermittent bugs. With Bugsee, all the crucial data is always there.", - "license": "NonFree" - }, - "com.bugsense": { - "code_signatures": [ - "com/bugsense" - ], - "documentation": [ - "https://github.com/bugsense/docs/blob/master/android.md" - ], - "gradle_signatures": [ - "com.bugsense" - ], - "license": "NonFree", - "name": "BugSense" - }, - "com.chartboost.sdk": { - "anti_features": [ - "Ads", - "NonFreeComp" - ], - "code_signatures": [ - "com/chartboost/sdk" - ], - "description": "create customized interstitial and video ads, promote new games, and swap traffic with one another. For more details, see Wikipedia.", - "license": "NonFree" - }, - "com.cloudrail": { - "code_signature": [ - "com/cloudrail" - ], - "documentation": [ - "https://cloudrail.com/" - ], - "gradle_signatures": [ - "com.cloudrail" - ], - "license": "NonFree", - "name": "CloudRail" - }, - "com.comscore.analytics": { - "anti_features": [ - "NonFreeComp", - "Tracking" - ], - "code_signatures": [ - "com/comscore" - ], - "description": "See Wikipedia for details.", - "license": "NonFree" - }, - "com.crashlytics.sdk.android": { - "code_signatures": [ - "com/crashlytics" - ], - "documentation": [ - "https://firebase.google.com/docs/crashlytics" - ], - "gradle_signatures": [ - "crashlytics" - ], - "license": "NonFree", - "name": "Firebase Crashlytics" - }, - "com.crittercism": { - "code_signatures": [ - "com/crittercism" - ], - "documentation": [ - "https://github.com/crittercism/crittercism-unity-android" - ], - "gradle_signatures": [ - "com.crittercism" - ], - "license": "NonFree", - "name": "Crittercism Plugin for Unity Crash Reporting" - }, - "com.criware": { - "anti_features": [ - "NonFreeComp", - "NonFreeAssets" - ], - "code_signatures": [ - "com/criware" - ], - "description": "audio and video solutions that can be integrated with popular game engines.", - "license": "NonFree" - }, - "com.deezer.sdk": { - "anti_features": [ - "NonFreeComp", - "NonFreeNet" - ], - "code_signatures": [ - "com/deezer/sdk" - ], - "description": "a closed-source API for the Deezer music streaming service.", - "license": "NonFree" - }, - "com.dynamicyield": { - "anti_features": [ - "Ads", - "NonFreeComp", - "Tracking" - ], - "code_signatures": [ - "com/dynamicyield" - ], - "description": "targeted advertising. Tracks user via location (GPS, WiFi, location data). Collects PII, profiling. See Exodus Privacy for more details.", - "license": "NonFree" - }, - "com.dynatrace.android.app": { - "anti_features": [ - "NonFreeComp", - "Tracking" - ], - "code_signatures": [ - "com/dynatrace/android/app" - ], - "description": "See Crunchbase and Exodus Privacy.", - "license": "NonFree" - }, - "com.ensighten": { - "anti_features": [ - "NonFreeComp", - "Tracking" - ], - "code_signatures": [ - "com/ensighten" - ], - "description": "organizations can leverage first-party customer data and profiles to fuel omni-channel action and insight using their existing technology investments. See Crunchbase and Exodus Privacy.", - "license": "NonFree" - }, - "com.epicgames.mobile.eossdk": { - "anti_features": [ - "NonFreeComp", - "NonFreeNet" - ], - "code_signatures": [ - "com/epicgames/mobile/eossdk" - ], - "description": "integrate games with Epic Account Services and Epic Games Store", - "license": "NonFree" - }, - "com.facebook.android": { - "code_signatures": [ - "com/facebook/AccessToken", - "com/facebook/AccessTokenCache", - "com/facebook/AccessTokenManager", - "com/facebook/AccessTokenSource", - "com/facebook/AccessTokenTracker", - "com/facebook/all/All", - "com/facebook/appevents/aam/MetadataIndexer", - "com/facebook/appevents/aam/MetadataMatcher", - "com/facebook/appevents/aam/MetadataRule", - "com/facebook/appevents/aam/MetadataViewObserver", - "com/facebook/appevents/AccessTokenAppIdPair", - "com/facebook/appevents/AnalyticsUserIDStore", - "com/facebook/appevents/AppEvent", - "com/facebook/appevents/AppEventCollection", - "com/facebook/appevents/AppEventDiskStore", - "com/facebook/appevents/AppEventQueue", - "com/facebook/appevents/AppEventsConstants", - "com/facebook/appevents/AppEventsLogger", - "com/facebook/appevents/AppEventsLoggerImpl", - "com/facebook/appevents/AppEventsManager", - "com/facebook/appevents/AppEventStore", - "com/facebook/appevents/cloudbridge/AppEventsCAPIManager", - "com/facebook/appevents/cloudbridge/AppEventsConversionsAPITransformer", - "com/facebook/appevents/cloudbridge/AppEventsConversionsAPITransformerWebRequests", - "com/facebook/appevents/codeless/CodelessLoggingEventListener", - "com/facebook/appevents/codeless/CodelessManager", - "com/facebook/appevents/codeless/CodelessMatcher", - "com/facebook/appevents/codeless/internal/Constants", - "com/facebook/appevents/codeless/internal/EventBinding", - "com/facebook/appevents/codeless/internal/ParameterComponent", - "com/facebook/appevents/codeless/internal/PathComponent", - "com/facebook/appevents/codeless/internal/SensitiveUserDataUtils", - "com/facebook/appevents/codeless/internal/UnityReflection", - "com/facebook/appevents/codeless/internal/ViewHierarchy", - "com/facebook/appevents/codeless/RCTCodelessLoggingEventListener", - "com/facebook/appevents/codeless/ViewIndexer", - "com/facebook/appevents/codeless/ViewIndexingTrigger", - "com/facebook/appevents/eventdeactivation/EventDeactivationManager", - "com/facebook/appevents/FacebookSDKJSInterface", - "com/facebook/appevents/FlushReason", - "com/facebook/appevents/FlushResult", - "com/facebook/appevents/FlushStatistics", - "com/facebook/appevents/iap/InAppPurchaseActivityLifecycleTracker", - "com/facebook/appevents/iap/InAppPurchaseAutoLogger", - "com/facebook/appevents/iap/InAppPurchaseBillingClientWrapper", - "com/facebook/appevents/iap/InAppPurchaseEventManager", - "com/facebook/appevents/iap/InAppPurchaseLoggerManager", - "com/facebook/appevents/iap/InAppPurchaseManager", - "com/facebook/appevents/iap/InAppPurchaseSkuDetailsWrapper", - "com/facebook/appevents/iap/InAppPurchaseUtils", - "com/facebook/appevents/integrity/BlocklistEventsManager", - "com/facebook/appevents/integrity/IntegrityManager", - "com/facebook/appevents/integrity/MACARuleMatchingManager", - "com/facebook/appevents/integrity/ProtectedModeManager", - "com/facebook/appevents/integrity/RedactedEventsManager", - "com/facebook/appevents/internal/ActivityLifecycleTracker", - "com/facebook/appevents/InternalAppEventsLogger", - "com/facebook/appevents/internal/AppEventsLoggerUtility", - "com/facebook/appevents/internal/AppEventUtility", - "com/facebook/appevents/internal/AutomaticAnalyticsLogger", - "com/facebook/appevents/internal/Constants", - "com/facebook/appevents/internal/FileDownloadTask", - "com/facebook/appevents/internal/HashUtils", - "com/facebook/appevents/internal/SessionInfo", - "com/facebook/appevents/internal/SessionLogger", - "com/facebook/appevents/internal/SourceApplicationInfo", - "com/facebook/appevents/internal/ViewHierarchyConstants", - "com/facebook/appevents/ml/Model", - "com/facebook/appevents/ml/ModelManager", - "com/facebook/appevents/ml/MTensor", - "com/facebook/appevents/ml/Operator", - "com/facebook/appevents/ml/Utils", - "com/facebook/appevents/ondeviceprocessing/OnDeviceProcessingManager", - "com/facebook/appevents/ondeviceprocessing/RemoteServiceParametersHelper", - "com/facebook/appevents/ondeviceprocessing/RemoteServiceWrapper", - "com/facebook/appevents/PersistedEvents", - "com/facebook/appevents/restrictivedatafilter/RestrictiveDataManager", - "com/facebook/appevents/SessionEventsState", - "com/facebook/appevents/suggestedevents/FeatureExtractor", - "com/facebook/appevents/suggestedevents/PredictionHistoryManager", - "com/facebook/appevents/suggestedevents/SuggestedEventsManager", - "com/facebook/appevents/suggestedevents/SuggestedEventViewHierarchy", - "com/facebook/appevents/suggestedevents/ViewObserver", - "com/facebook/appevents/suggestedevents/ViewOnClickListener", - "com/facebook/appevents/UserDataStore", - "com/facebook/applinks/AppLinkData", - "com/facebook/applinks/AppLinks", - "com/facebook/applinks/FacebookAppLinkResolver", - "com/facebook/AuthenticationToken", - "com/facebook/AuthenticationTokenCache", - "com/facebook/AuthenticationTokenClaims", - "com/facebook/AuthenticationTokenHeader", - "com/facebook/AuthenticationTokenManager", - "com/facebook/AuthenticationTokenTracker", - "com/facebook/bolts/AggregateException", - "com/facebook/bolts/AndroidExecutors", - "com/facebook/bolts/AppLink", - "com/facebook/bolts/AppLinkResolver", - "com/facebook/bolts/AppLinks", - "com/facebook/bolts/BoltsExecutors", - "com/facebook/bolts/CancellationToken", - "com/facebook/bolts/CancellationTokenRegistration", - "com/facebook/bolts/CancellationTokenSource", - "com/facebook/bolts/Continuation", - "com/facebook/bolts/ExecutorException", - "com/facebook/bolts/Task", - "com/facebook/bolts/TaskCompletionSource", - "com/facebook/bolts/UnobservedErrorNotifier", - "com/facebook/bolts/UnobservedTaskException", - "com/facebook/CallbackManager", - "com/facebook/common/Common", - "com/facebook/core/Core", - "com/facebook/CurrentAccessTokenExpirationBroadcastReceiver", - "com/facebook/CustomTabActivity", - "com/facebook/CustomTabMainActivity", - "com/facebook/devicerequests/internal/DeviceRequestsHelper", - "com/facebook/FacebookActivity", - "com/facebook/FacebookAuthorizationException", - "com/facebook/FacebookBroadcastReceiver", - "com/facebook/FacebookButtonBase", - "com/facebook/FacebookCallback", - "com/facebook/FacebookContentProvider", - "com/facebook/FacebookDialog", - "com/facebook/FacebookDialogException", - "com/facebook/FacebookException", - "com/facebook/FacebookGraphResponseException", - "com/facebook/FacebookOperationCanceledException", - "com/facebook/FacebookRequestError", - "com/facebook/FacebookSdk", - "com/facebook/FacebookSdkNotInitializedException", - "com/facebook/FacebookSdkVersion", - "com/facebook/FacebookServiceException", - "com/facebook/gamingservices/cloudgaming/AppToUserNotificationSender", - "com/facebook/gamingservices/cloudgaming/CloudGameLoginHandler", - "com/facebook/gamingservices/cloudgaming/DaemonReceiver", - "com/facebook/gamingservices/cloudgaming/DaemonRequest", - "com/facebook/gamingservices/cloudgaming/GameFeaturesLibrary", - "com/facebook/gamingservices/cloudgaming/InAppAdLibrary", - "com/facebook/gamingservices/cloudgaming/InAppPurchaseLibrary", - "com/facebook/gamingservices/cloudgaming/internal/SDKAnalyticsEvents", - "com/facebook/gamingservices/cloudgaming/internal/SDKConstants", - "com/facebook/gamingservices/cloudgaming/internal/SDKLogger", - "com/facebook/gamingservices/cloudgaming/internal/SDKMessageEnum", - "com/facebook/gamingservices/cloudgaming/internal/SDKShareIntentEnum", - "com/facebook/gamingservices/cloudgaming/PlayableAdsLibrary", - "com/facebook/gamingservices/ContextChooseDialog", - "com/facebook/gamingservices/ContextCreateDialog", - "com/facebook/gamingservices/ContextSwitchDialog", - "com/facebook/gamingservices/CustomUpdate", - "com/facebook/gamingservices/FriendFinderDialog", - "com/facebook/gamingservices/GameRequestDialog", - "com/facebook/gamingservices/GamingContext", - "com/facebook/gamingservices/GamingGroupIntegration", - "com/facebook/gamingservices/GamingImageUploader", - "com/facebook/gamingservices/GamingPayload", - "com/facebook/gamingservices/GamingServices", - "com/facebook/gamingservices/GamingVideoUploader", - "com/facebook/gamingservices/internal/DateFormatter", - "com/facebook/gamingservices/internal/GamingMediaUploader", - "com/facebook/gamingservices/internal/TournamentJoinDialogURIBuilder", - "com/facebook/gamingservices/internal/TournamentScoreType", - "com/facebook/gamingservices/internal/TournamentShareDialogURIBuilder", - "com/facebook/gamingservices/internal/TournamentSortOrder", - "com/facebook/gamingservices/model/ContextChooseContent", - "com/facebook/gamingservices/model/ContextCreateContent", - "com/facebook/gamingservices/model/ContextSwitchContent", - "com/facebook/gamingservices/model/CustomUpdateContent", - "com/facebook/gamingservices/OpenGamingMediaDialog", - "com/facebook/gamingservices/Tournament", - "com/facebook/gamingservices/TournamentConfig", - "com/facebook/gamingservices/TournamentFetcher", - "com/facebook/gamingservices/TournamentJoinDialog", - "com/facebook/gamingservices/TournamentShareDialog", - "com/facebook/gamingservices/TournamentUpdater", - "com/facebook/GraphRequest", - "com/facebook/GraphRequestAsyncTask", - "com/facebook/GraphRequestBatch", - "com/facebook/GraphResponse", - "com/facebook/HttpMethod", - "com/facebook/internal/AnalyticsEvents", - "com/facebook/internal/AppCall", - "com/facebook/internal/AttributionIdentifiers", - "com/facebook/internal/BoltsMeasurementEventListener", - "com/facebook/internal/BundleJSONConverter", - "com/facebook/internal/CallbackManagerImpl", - "com/facebook/internal/CollectionMapper", - "com/facebook/internal/CustomTab", - "com/facebook/internal/CustomTabUtils", - "com/facebook/internal/DialogFeature", - "com/facebook/internal/DialogPresenter", - "com/facebook/internal/FacebookDialogBase", - "com/facebook/internal/FacebookDialogFragment", - "com/facebook/internal/FacebookGamingAction", - "com/facebook/internal/FacebookInitProvider", - "com/facebook/internal/FacebookRequestErrorClassification", - "com/facebook/internal/FacebookSignatureValidator", - "com/facebook/internal/FacebookWebFallbackDialog", - "com/facebook/internal/FeatureManager", - "com/facebook/internal/FetchedAppGateKeepersManager", - "com/facebook/internal/FetchedAppSettings", - "com/facebook/internal/FetchedAppSettingsManager", - "com/facebook/internal/FileLruCache", - "com/facebook/internal/FragmentWrapper", - "com/facebook/internal/gatekeeper/GateKeeper", - "com/facebook/internal/gatekeeper/GateKeeperRuntimeCache", - "com/facebook/internal/ImageDownloader", - "com/facebook/internal/ImageRequest", - "com/facebook/internal/ImageResponse", - "com/facebook/internal/ImageResponseCache", - "com/facebook/internal/InstagramCustomTab", - "com/facebook/internal/InstallReferrerUtil", - "com/facebook/internal/instrument/anrreport/ANRDetector", - "com/facebook/internal/instrument/anrreport/ANRHandler", - "com/facebook/internal/instrument/crashreport/CrashHandler", - "com/facebook/internal/instrument/crashshield/AutoHandleExceptions", - "com/facebook/internal/instrument/crashshield/CrashShieldHandler", - "com/facebook/internal/instrument/crashshield/NoAutoExceptionHandling", - "com/facebook/internal/instrument/errorreport/ErrorReportData", - "com/facebook/internal/instrument/errorreport/ErrorReportHandler", - "com/facebook/internal/instrument/ExceptionAnalyzer", - "com/facebook/internal/instrument/InstrumentData", - "com/facebook/internal/instrument/InstrumentManager", - "com/facebook/internal/instrument/InstrumentUtility", - "com/facebook/internal/instrument/threadcheck/ThreadCheckHandler", - "com/facebook/internal/InternalSettings", - "com/facebook/internal/LockOnGetVariable", - "com/facebook/internal/Logger", - "com/facebook/internal/logging/dumpsys/EndToEndDumper", - "com/facebook/internal/Mutable", - "com/facebook/internal/NativeAppCallAttachmentStore", - "com/facebook/internal/NativeProtocol", - "com/facebook/internal/PlatformServiceClient", - "com/facebook/internal/ProfileInformationCache", - "com/facebook/internal/qualityvalidation/Excuse", - "com/facebook/internal/qualityvalidation/ExcusesForDesignViolations", - "com/facebook/internal/security/CertificateUtil", - "com/facebook/internal/security/OidcSecurityUtil", - "com/facebook/internal/ServerProtocol", - "com/facebook/internal/SmartLoginOption", - "com/facebook/internal/UrlRedirectCache", - "com/facebook/internal/Utility", - "com/facebook/internal/Validate", - "com/facebook/internal/WebDialog", - "com/facebook/internal/WorkQueue", - "com/facebook/LegacyTokenHelper", - "com/facebook/LoggingBehavior", - "com/facebook/login/CodeChallengeMethod", - "com/facebook/login/CustomTabLoginMethodHandler", - "com/facebook/login/CustomTabPrefetchHelper", - "com/facebook/login/DefaultAudience", - "com/facebook/login/DeviceAuthDialog", - "com/facebook/login/DeviceAuthMethodHandler", - "com/facebook/login/DeviceLoginManager", - "com/facebook/login/GetTokenClient", - "com/facebook/login/GetTokenLoginMethodHandler", - "com/facebook/login/InstagramAppLoginMethodHandler", - "com/facebook/login/KatanaProxyLoginMethodHandler", - "com/facebook/login/Login", - "com/facebook/login/LoginBehavior", - "com/facebook/login/LoginClient", - "com/facebook/login/LoginConfiguration", - "com/facebook/login/LoginFragment", - "com/facebook/login/LoginLogger", - "com/facebook/login/LoginManager", - "com/facebook/login/LoginMethodHandler", - "com/facebook/login/LoginResult", - "com/facebook/login/LoginStatusClient", - "com/facebook/login/LoginTargetApp", - "com/facebook/login/NativeAppLoginMethodHandler", - "com/facebook/login/NonceUtil", - "com/facebook/login/PKCEUtil", - "com/facebook/login/StartActivityDelegate", - "com/facebook/LoginStatusCallback", - "com/facebook/login/WebLoginMethodHandler", - "com/facebook/login/WebViewLoginMethodHandler", - "com/facebook/login/widget/DeviceLoginButton", - "com/facebook/login/widget/LoginButton", - "com/facebook/login/widget/ProfilePictureView", - "com/facebook/login/widget/ToolTipPopup", - "com/facebook/messenger/Messenger", - "com/facebook/messenger/MessengerThreadParams", - "com/facebook/messenger/MessengerUtils", - "com/facebook/messenger/ShareToMessengerParams", - "com/facebook/messenger/ShareToMessengerParamsBuilder", - "com/facebook/Profile", - "com/facebook/ProfileCache", - "com/facebook/ProfileManager", - "com/facebook/ProfileTracker", - "com/facebook/ProgressNoopOutputStream", - "com/facebook/ProgressOutputStream", - "com/facebook/RequestOutputStream", - "com/facebook/RequestProgress", - "com/facebook/share/internal/CameraEffectFeature", - "com/facebook/share/internal/CameraEffectJSONUtility", - "com/facebook/share/internal/GameRequestValidation", - "com/facebook/share/internal/LegacyNativeDialogParameters", - "com/facebook/share/internal/MessageDialogFeature", - "com/facebook/share/internal/NativeDialogParameters", - "com/facebook/share/internal/ResultProcessor", - "com/facebook/share/internal/ShareConstants", - "com/facebook/share/internal/ShareContentValidation", - "com/facebook/share/internal/ShareDialogFeature", - "com/facebook/share/internal/ShareFeedContent", - "com/facebook/share/internal/ShareInternalUtility", - "com/facebook/share/internal/ShareStoryFeature", - "com/facebook/share/internal/VideoUploader", - "com/facebook/share/internal/WebDialogParameters", - "com/facebook/share/model/AppGroupCreationContent", - "com/facebook/share/model/CameraEffectArguments", - "com/facebook/share/model/CameraEffectTextures", - "com/facebook/share/model/GameRequestContent", - "com/facebook/share/model/ShareCameraEffectContent", - "com/facebook/share/model/ShareContent", - "com/facebook/share/model/ShareHashtag", - "com/facebook/share/model/ShareLinkContent", - "com/facebook/share/model/ShareMedia", - "com/facebook/share/model/ShareMediaContent", - "com/facebook/share/model/ShareMessengerActionButton", - "com/facebook/share/model/ShareMessengerURLActionButton", - "com/facebook/share/model/ShareModel", - "com/facebook/share/model/ShareModelBuilder", - "com/facebook/share/model/SharePhoto", - "com/facebook/share/model/SharePhotoContent", - "com/facebook/share/model/ShareStoryContent", - "com/facebook/share/model/ShareVideo", - "com/facebook/share/model/ShareVideoContent", - "com/facebook/share/Share", - "com/facebook/share/ShareApi", - "com/facebook/share/ShareBuilder", - "com/facebook/share/Sharer", - "com/facebook/share/widget/GameRequestDialog", - "com/facebook/share/widget/MessageDialog", - "com/facebook/share/widget/SendButton", - "com/facebook/share/widget/ShareButton", - "com/facebook/share/widget/ShareButtonBase", - "com/facebook/share/widget/ShareDialog", - "com/facebook/UserSettingsManager", - "com/facebook/WebDialog" - ], - "documentation": [ - "https://developers.facebook.com/docs/android" - ], - "gradle_signatures": [ - "com.facebook.android" - ], - "license": "NonFree", - "name": "Facebook Android SDK" - }, - "com.flurry.android": { - "code_signature": [ - "com/flurry" - ], - "documentation": [ - "https://www.flurry.com/" - ], - "gradle_signatures": [ - "com.flurry.android" - ], - "license": "NonFree", - "name": "Flurry Android SDK" - }, - "com.garmin.android.connectiq": { - "anti_features": [ - "NonFreeComp" - ], - "code_signatures": [ - "com/garmin/android/apps/connectmobile/connectiq" - ], - "description": "SDK to build unique wearable experiences leveraging Garmin device sensors and features.", - "license": "NonFree" - }, - "com.garmin.connectiq": { - "code_signatures": [ - "com/garmin/android/connectiq" - ], - "documentation": [ - "https://developer.garmin.com/connect-iq/core-topics/mobile-sdk-for-android/" - ], - "gradle_signatures": [ - "com.garmin.connectiq:ciq-companion-app-sdk" - ], - "license": "NonFree", - "name": "Connect IQ Mobile SDK for Android" - }, - "com.garmin.fit": { - "anti_features": [ - "NonFreeComp" - ], - "code_signatures": [ - "com/garmin/fit" - ], - "description": "SDK to access the Garmin Fit.", - "license": "NonFree" - }, - "com.geetest": { - "code_signatures": [ - "com/geetest" - ], - "documentation": [ - "https://docs.geetest.com/" - ], - "gradle_signatures": [ - "com.geetest" - ], - "license": "NonFree", - "name": "GeeTest" - }, - "com.github.junrar": { - "code_signatures": [ - "com/github/junrar" - ], - "documentation": [ - "https://github.com/junrar/junrar" - ], - "gradle_signatures": [ - "com.github.junrar:junrar" - ], - "license": "NonFree", - "name": "Junrar" - }, - "com.github.omicronapps.7-Zip-JBinding-4Android": { - "documentation": [ - "https://github.com/omicronapps/7-Zip-JBinding-4Android" - ], - "gradle_signatures": [ - "com.github.omicronapps:7-Zip-JBinding-4Android" - ], - "license": "NonFree", - "name": "7-Zip-JBinding-4Android" - }, - "com.google.ads": { - "code_signatures": [ - "com/google/ads" - ], - "documentation": [ - "https://developers.google.com/interactive-media-ads/docs/sdks/android/client-side" - ], - "gradle_signatures": [ - "com.google.ads", - "com.google.android.exoplayer:extension-ima", - "androidx.media3:media3-exoplayer-ima" - ], - "license": "NonFree", - "name": "IMA SDK for Android" - }, - "com.google.android.apps.auto.sdk": { - "anti_features": [ - "NonFreeComp" - ], - "code_signatures": [ - "com/google/android/apps/auto/sdk" - ], - "description": "Framework to develop apps for Android Auto", - "license": "NonFree" - }, - "com.google.android.gcm": { - "anti_features": [ - "NonFreeComp", - "NonFreeNet" - ], - "code_signatures": [ - "com/google/android/gcm" - ], - "description": "Google Cloud Messaging is a mobile notification service developed by Google that enables third-party application developers to send notification data or information from developer-run servers to app.", - "license": "NonFree" - }, - "com.google.android.gms": { - "code_signatures": [ - "com/google/android/gms" - ], - "documentation": [ - "https://www.android.com/gms/" - ], - "gradle_signatures": [ - "com.google.android.gms(?!.(oss-licenses-plugin|strict-version-matcher-plugin))", - "com.google.android.ump", - "androidx.core:core-google-shortcuts", - "androidx.credentials:credentials-play-services-auth", - "androidx.media3:media3-cast", - "androidx.media3:media3-datasource-cronet", - "androidx.wear:wear-remote-interactions", - "androidx.work:work-gcm", - "com.google.android.exoplayer:extension-cast", - "com.google.android.exoplayer:extension-cronet", - "com.evernote:android-job", - "com.cloudinary:cloudinary-android.*:2\\.[12]\\.", - "com.pierfrancescosoffritti.androidyoutubeplayer:chromecast-sender", - "com.yayandroid:locationmanager", - "(?Home channels for mobile apps.", - "license": "NonFree" - }, - "com.google.android.play": { - "anti_features": [ - "NonFreeDep", - "NonFreeNet" - ], - "code_signatures": [ - "com/google/android/play/core" - ], - "documentation": [ - "https://developer.android.com/guide/playcore" - ], - "gradle_signatures": [ - "com.google.android.play:app-update", - "com.google.android.play:asset-delivery", - "com.google.android.play:core.*", - "com.google.android.play:feature-delivery", - "com.google.android.play:review", - "androidx.navigation:navigation-dynamic-features", - "com.github.SanojPunchihewa:InAppUpdater", - "com.suddenh4x.ratingdialog:awesome-app-rating" - ], - "license": "NonFree", - "name": "Google Play Core" - }, - "com.google.android.play.appupdate": { - "anti_features": [ - "NonFreeComp" - ], - "code_signatures": [ - "com/google/android/play/appupdate" - ], - "description": "manages operations that allow an app to initiate its own updates.", - "license": "NonFree" - }, - "com.google.android.play.integrity": { - "anti_features": [ - "NonFreeComp", - "NonFreeNet" - ], - "code_signatures": [ - "com/google/android/play/integrity" - ], - "description": "helps you check that interactions and server requests are coming from your genuine app binary running on a genuine Android device.", - "license": "NonFree" - }, - "com.google.android.play.review": { - "anti_features": [ - "NonFreeComp" - ], - "code_signatures": [ - "com/google/android/play/review" - ], - "description": "lets you prompt users to submit Play Store ratings and reviews without the inconvenience of leaving your app or game.", - "license": "NonFree" - }, - "com.google.android.vending": { - "anti_features": [ - "NonFreeComp" - ], - "code_signatures": [ - "com/google/android/vending/(?!licensing|expansion)" - ], - "description": "the Google Play Store app and its libaries, parts are FOSS and get vendored in libs as they are", - "documentation": [ - "https://github.com/google/play-licensing/tree/master/lvl_library/src/main", - "https://github.com/googlearchive/play-apk-expansion/tree/master/zip_file/src/com/google/android/vending/expansion/zipfile", - "https://github.com/googlearchive/play-apk-expansion/tree/master/apkx_library/src/com/google/android/vending/expansion/downloader" - ], - "license": "NonFree" - }, - "com.google.android.wearable": { - "code_signatures": [ - "com/google/android/wearable/(?!compat/WearableActivityController)" - ], - "description": "an API for the Android Wear platform, note that androidx.wear:wear has a stub https://android.googlesource.com/platform/frameworks/support/+/refs/heads/androidx-master-release/wear/wear/src/androidTest/java/com/google/android/wearable/compat/WearableActivityController.java#26", - "gradle_signatures": [ - "com.google.android.support:wearable", - "com.google.android.wearable:wearable" - ], - "license": "NonFree" - }, - "com.google.android.youtube.player": { - "anti_features": [ - "NonFreeComp", - "NonFreeNet" - ], - "code_signatures": [ - "com/google/android/youtube/player" - ], - "description": "enables you to easily play YouTube videos and display thumbnails of YouTube videos in your Android application.", - "license": "NonFree" - }, - "com.google.mlkit": { - "code_signatures": [ - "com/google/mlkit" - ], - "documentation": [ - "https://developers.google.com/ml-kit" - ], - "gradle_signatures": [ - "com.google.mlkit", - "io.github.g00fy2.quickie" - ], - "license": "NonFree", - "name": "ML Kit" - }, - "com.google.vr": { - "anti_features": [ - "NonFreeComp" - ], - "code_signatures": [ - "com/google/vr" - ], - "description": "enables Daydream and Cardboard app development on Android.", - "license": "NonFree" - }, - "com.heapanalytics": { - "anti_features": [ - "NonFreeComp", - "Tracking" - ], - "code_signatures": [ - "com/heapanalytics" - ], - "description": "automatically captures every web, mobile, and cloud interaction: clicks, submits, transactions, emails, and more. Retroactively analyze your data without writing code.", - "license": "NonFree" - }, - "com.heyzap": { - "code_signatures": [ - "com/heyzap" - ], - "documentation": [ - "https://www.digitalturbine.com/" - ], - "license": "NonFree", - "name": "Heyzap" - }, - "com.huawei.hms": { - "anti_features": [ - "NonFreeComp" - ], - "code_signatures": [ - "com/huawei/hms" - ], - "description": "Huawei's pendant to GMS (Google Mobile Services)", - "license": "NonFree" - }, - "com.hypertrack": { - "code_signatures": [ - "com/hypertrack/(?!hyperlog)" - ], - "documentation": [ - "https://github.com/hypertrack/sdk-android" - ], - "gradle_signatures": [ - "com.hypertrack(?!:hyperlog)" - ], - "gradle_signatures_negative_examples": [ - "com.hypertrack:hyperlog" - ], - "license": "NonFree", - "name": "HyperTrack SDK for Android" - }, - "com.instabug": { - "anti_features": [ - "NonFreeComp", - "Tracking" - ], - "code_signatures": [ - "com/instabug" - ], - "description": "In-App Feedback and Bug Reporting for Mobile Apps.", - "license": "NonFree" - }, - "com.kiddoware.kidsplace.sdk": { - "anti_features": [ - "NonFreeComp" - ], - "code_signatures": [ - "com/kiddoware/kidsplace/sdk" - ], - "description": "parental control", - "license": "NonFree" - }, - "com.kochava.android.tracker": { - "anti_features": [ - "NonFreeComp", - "Tracking" - ], - "code_signatures": [ - "com/kochava/android/tracker" - ], - "description": "provides holistic, unbiased measurement for precise, real-time visualization of app performance through the funnel. See Crunchbase and Exodus Privacy.", - "license": "NonFree" - }, - "com.mapbox": { - "MaintainerNotes": "It seems that all libs in https://github.com/mapbox/mapbox-java is fully FOSS\nsince 3.0.0.\n", - "documentation": [ - "https://docs.mapbox.com/android/java/overview/", - "https://github.com/mapbox/mapbox-java" - ], - "gradle_signatures": [ - "com\\.mapbox(?!\\.mapboxsdk:mapbox-sdk-(services|geojson|turf):([3-5]))" - ], - "gradle_signatures_negative_examples": [ - "com.mapbox.mapboxsdk:mapbox-sdk-services:5.0.0", - "com.github.johan12345:mapbox-events-android:a21c324501", - "implementation(\"com.github.johan12345.AnyMaps:anymaps-mapbox:$anyMapsVersion\")" - ], - "gradle_signatures_positive_examples": [ - "com.mapbox.mapboxsdk:mapbox-android-plugin-annotation-v7:0.6.0", - "com.mapbox.mapboxsdk:mapbox-android-plugin-annotation-v8:0.7.0", - "com.mapbox.mapboxsdk:mapbox-android-plugin-localization-v7:0.7.0", - "com.mapbox.mapboxsdk:mapbox-android-plugin-locationlayer:0.4.0", - "com.mapbox.mapboxsdk:mapbox-android-plugin-markerview-v8:0.3.0", - "com.mapbox.mapboxsdk:mapbox-android-plugin-places-v8:0.9.0", - "com.mapbox.mapboxsdk:mapbox-android-plugin-scalebar-v8:0.2.0", - "com.mapbox.mapboxsdk:mapbox-android-sdk:7.3.0" - ], - "license": "NonFree", - "name": "Mapbox Java SDK" - }, - "com.microblink": { - "anti_features": [ - "NonFreeComp", - "NonFreeNet", - "Tracking" - ], - "code_signatures": [ - "com/microblink" - ], - "description": "verify users at scale and automate your document-based workflow with computer vision tech built for a remote world.", - "license": "NonFree" - }, - "com.microsoft.band": { - "anti_features": [ - "NonFreeComp" - ], - "code_signatures": [ - "com/microsoft/band" - ], - "description": "library to access the Microsoft Band smartwatch.", - "license": "NonFree" - }, - "com.mopub.mobileads": { - "anti_features": [ - "Ads", - "NonFreeComp" - ], - "code_signatures": [ - "com/mopub/mobileads" - ], - "description": "ad framework run by Twitter until 1/2022, then sold to AppLovin.", - "license": "NonFree" - }, - "com.newrelic.agent": { - "anti_features": [ - "NonFreeComp", - "Tracking" - ], - "code_signatures": [ - "com/newrelic/agent" - ], - "description": "delivering full-stack visibility and analytics to enterprises around the world. See Crunchbase and Exodus Privacy.", - "license": "NonFree" - }, - "com.onesignal": { - "code_signatures": [ - "com/onesignal" - ], - "documentation": [ - "https://github.com/OneSignal/OneSignal-Android-SDK" - ], - "gradle_signatures": [ - "com.onesignal:OneSignal" - ], - "license": "NonFree", - "name": "OneSignal Android Push Notification Plugin" - }, - "com.optimizely": { - "anti_features": [ - "NonFreeComp", - "Tracking" - ], - "code_signatures": [ - "com/optimizely" - ], - "description": "part of the comScore, Inc. market research community, a leading global market research effort that studies and reports on Internet trends and behavior.", - "license": "NonFree" - }, - "com.paypal.sdk": { - "code_signatures": [ - "com/paypal" - ], - "documentation": [ - "https://github.com/paypal/PayPal-Android-SDK", - "https://github.com/paypal/android-checkout-sdk" - ], - "gradle_signatures": [ - "com.paypal" - ], - "license": "NonFree", - "name": "PayPal Android SDK" - }, - "com.pushwoosh": { - "anti_features": [ - "NonFreeComp", - "Tracking" - ], - "code_signatures": [ - "com/pushwoosh" - ], - "description": "mobile analytics under the cover of push messaging.", - "license": "NonFree" - }, - "com.quantcast.measurement.service": { - "anti_features": [ - "NonFreeComp", - "Tracking" - ], - "code_signatures": [ - "com/quantcast/measurement/service" - ], - "description": "processes real-time data at the intersection of commerce and culture, providing useful, actionable insights for brands and publishers. See Crunchbase and Exodus Privacy.", - "license": "NonFree" - }, - "com.revenuecat.purchases": { - "code_signatures": [ - "com/revenuecat/purchases" - ], - "documentation": [ - "https://www.revenuecat.com/" - ], - "gradle_signatures": [ - "com.revenuecat.purchases" - ], - "license": "NonFree", - "name": "RevenueCat Purchases" - }, - "com.samsung.accessory": { - "anti_features": [ - "NonFreeComp" - ], - "code_signatures": [ - "com/samsung/accessory" - ], - "description": "provides a stable environment in which you can use a variety features by connecting accessories to your mobile device.", - "license": "NonFree" - }, - "com.samsung.android.sdk.look": { - "anti_features": [ - "NonFreeComp" - ], - "code_signatures": [ - "com/samsung/android/sdk/look" - ], - "description": "offers specialized widgets and service components for extended functions of the Samsung Android devices.", - "license": "NonFree" - }, - "com.sendbird.android": { - "anti_features": [ - "NonFreeComp", - "NonFreeNet", - "Tracking" - ], - "code_signatures": [ - "com/sendbird/android" - ], - "description": "an easy-to-use Chat API, native Chat SDKs, and a fully-managed chat platform on the backend means faster time-to-market.", - "license": "NonFree" - }, - "com.smaato.soma": { - "anti_features": [ - "Ads", - "NonFreeComp" - ], - "code_signatures": [ - "com/smaato/soma" - ], - "description": "a mobile ad platform that includes video ads.", - "license": "NonFree" - }, - "com.spotify.sdk": { - "anti_features": [ - "NonFreeComp", - "NonFreeNet" - ], - "code_signatures": [ - "com/spotify/sdk" - ], - "description": "allows your application to interact with the Spotify app service. (Note that while the SDK repo claims Apache license, the code is not available there)", - "license": "NonFree" - }, - "com.startapp.android": { - "anti_features": [ - "Ads", - "Tracking", - "NonFreeComp" - ], - "code_signatures": [ - "com/startapp" - ], - "description": "partly quite intrusive ad network.", - "license": "NonFree" - }, - "com.telerik.android": { - "anti_features": [ - "NonFreeComp" - ], - "code_signatures": [ - "com/telerik/android" - ], - "description": "offers high quality Xamarin Forms UI components and Visual Studio item templates to enable every developer.", - "license": "NonFree" - }, - "com.tencent.bugly": { - "code_signatures": [ - "com/tencent/bugly" - ], - "documentation": [ - "https://bugly.qq.com/" - ], - "gradle_signatures": [ - "com.tencent.bugly" - ], - "license": "NonFree", - "name": "Bugly Android SDK" - }, - "com.tencent.mapsdk": { - "anti_features": [ - "NonFreeNet" - ], - "code_signatures": [ - "com/tencent/tencentmap" - ], - "description": "giving access to Tencent Maps.", - "license": "NonFree" - }, - "com.tenjin.android.TenjinSDK": { - "anti_features": [ - "Tracking" - ], - "code_signatures": [ - "com/tenjin/android/TenjinSDK" - ], - "description": "a marketing platform designed for mobile that features analytics, automated aggregation, and direct data visualization with direct SQL access.", - "license": "NonFree" - }, - "com.umeng.umsdk": { - "code_signatures": [ - "com/umeng" - ], - "documentation": [ - "https://developer.umeng.com/docs/119267/detail/118584" - ], - "gradle_signatures": [ - "com.umeng" - ], - "license": "NonFree", - "name": "Umeng SDK" - }, - "com.wei.android.lib": { - "code_signatures": [ - "com/wei/android/lib/fingerprintidentify" - ], - "documentation": [ - "https://github.com/uccmawei/FingerprintIdentify" - ], - "gradle_signatures": [ - "com.wei.android.lib:fingerprintidentify", - "com.github.uccmawei:FingerprintIdentify" - ], - "gradle_signatures_positive_examples": [ - "implementation \"com.github.uccmawei:fingerprintidentify:${safeExtGet(\"fingerprintidentify\", \"1.2.6\")}\"" - ], - "license": "NonFree", - "name": "FingerprintIdentify" - }, - "com.yandex.android": { - "code_signatures": [ - "com/yandex/android/(?!:authsdk)" - ], - "gradle_signatures": [ - "com\\.yandex\\.android(?!:authsdk)" - ], - "gradle_signatures_negative_examples": [ - "com.yandex.android:authsdk" - ], - "license": "NonFree", - "name": "Yandex SDK" - }, - "com.yandex.metrica": { - "anti_features": [ - "NonFreeComp", - "Tracking" - ], - "code_signatures": [ - "com/yandex/metrica" - ], - "description": "a mobile attribution and analytics platform developed by Yandex. It is free, real-time and has no data limits restriction. See Crunchbase and Exodus Privacy.", - "license": "NonFree" - }, - "com.yandex.mobile.ads": { - "anti_features": [ - "Ads", - "NonFreeComp" - ], - "code_signatures": [ - "com/yandex/mobile/ads" - ], - "description": "See Exodus Privacy.", - "license": "NonFree" - }, - "de.epgpaid": { - "anti_features": [ - "NonFreeComp" - ], - "code_signatures": [ - "de/epgpaid" - ], - "description": "access paid EPG (Electronic Program Guide, for TV) data (after payment, of course). Part of TVBrowser.", - "license": "NonFree" - }, - "de.innosystec.unrar": { - "code_signatures": [ - "de/innosystec/unrar" - ], - "description": "java unrar util", - "license": "NonFree" - }, - "firebase": { - "code_signatures": [ - "com/google/firebase" - ], - "documentation": [ - "https://www.firebase.com" - ], - "gradle_signatures": [ - "com(\\.google)?\\.firebase[.:](?!firebase-jobdispatcher|geofire-java)", - "com.microsoft.appcenter:appcenter-push" - ], - "gradle_signatures_negative_examples": [ - " compile 'com.firebase:firebase-jobdispatcher:0.8.4'", - "implementation 'com.firebase:geofire-java:3.0.0'", - " compile 'com.firebaseui:firebase-ui-auth:3.1.3'", - "com.firebaseui:firebase-ui-database", - "com.firebaseui:firebase-ui-storage", - "com.github.axet:android-firebase-fake", - "com.github.b3er.rxfirebase:firebase-database", - "com.github.b3er.rxfirebase:firebase-database-kotlin", - "com.segment.analytics.android.integrations:firebase" - ], - "gradle_signatures_positive_examples": [ - "\tcompile 'com.google.firebase:firebase-crash:11.0.8'", - "\tcompile 'com.google.firebase:firebase-core:11.0.8'", - "com.firebase:firebase-client-android:2.5.2", - "com.google.firebase.crashlytics", - "com.google.firebase.firebase-perf", - "com.google.firebase:firebase-ads", - "com.google.firebase:firebase-analytics", - "com.google.firebase:firebase-appindexing", - "com.google.firebase:firebase-auth", - "com.google.firebase:firebase-config", - "com.google.firebase:firebase-core", - "com.google.firebase:firebase-crash", - "com.google.firebase:firebase-crashlytics", - "com.google.firebase:firebase-database", - "com.google.firebase:firebase-dynamic-links", - "com.google.firebase:firebase-firestore", - "com.google.firebase:firebase-inappmessaging", - "com.google.firebase:firebase-inappmessaging-display", - "com.google.firebase:firebase-messaging", - "com.google.firebase:firebase-ml-natural-language", - "com.google.firebase:firebase-ml-natural-language-smart-reply-model", - "com.google.firebase:firebase-ml-vision", - "com.google.firebase:firebase-perf", - "com.google.firebase:firebase-plugins", - "com.google.firebase:firebase-storage" - ], - "license": "NonFree", - "name": "Firebase" - }, - "google-maps": { - "anti_features": [ - "NonFreeDep", - "NonFreeNet" - ], - "api_key_ids": [ - "com\\.google\\.android\\.geo\\.API_KEY", - "com\\.google\\.android\\.maps\\.v2\\.API_KEY" - ], - "documentation": [ - "https://developers.google.com/maps/documentation/android-sdk/overview" - ], - "license": "NonFree", - "name": "Google Maps" - }, - "io.fabric.sdk.android": { - "anti_features": [ - "NonFreeComp", - "Tracking" - ], - "code_signatures": [ - "io/fabric/sdk/android" - ], - "description": "Framework to integrate services. Provides e.g. crash reports and analytics. Aquired by Google in 2017.", - "license": "NonFree" - }, - "io.github.sinaweibosdk": { - "code_signatures": [ - "com/sina" - ], - "documentation": [ - "https://github.com/sinaweibosdk/weibo_android_sdk" - ], - "gradle_signatures": [ - "io.github.sinaweibosdk" - ], - "license": "NonFree", - "name": "SinaWeiboSDK" - }, - "io.intercom": { - "anti_features": [ - "NonFreeComp", - "NonFreeNet" - ], - "code_signatures": [ - "io/intercom" - ], - "description": "engage customers with email, push, and in\u2011app messages and support them with an integrated knowledge base and help desk.", - "license": "NonFree" - }, - "io.objectbox": { - "code_signatures": [ - "io/objectbox" - ], - "documentation": [ - "https://objectbox.io/faq/#license-pricing" - ], - "gradle_signatures": [ - "io.objectbox:objectbox-gradle-plugin" - ], - "license": "NonFree", - "name": "ObjectBox Database" - }, - "me.pushy": { - "code_signatures": [ - "me/pushy" - ], - "documentation": [ - "https://pushy.me/" - ], - "gradle_signatures": [ - "me.pushy" - ], - "license": "NonFree", - "name": "Pushy" - }, - "org.gradle.toolchains.foojay-resolver-convention": { - "documentation": [ - "https://github.com/gradle/foojay-toolchains" - ], - "gradle_signatures": [ - "org.gradle.toolchains.foojay-resolver" - ], - "license": "Apache-2.0", - "name": "Foojay Toolchains Plugin" - }, - "org.mariuszgromada.math": { - "code_signatures": [ - "org/mariuszgromada/math/mxparser/parsertokens/SyntaxStringBuilder", - "org/mariuszgromada/math/mxparser/CalcStepRecord", - "org/mariuszgromada/math/mxparser/CalcStepsRegister", - "org/mariuszgromada/math/mxparser/License", - "org/mariuszgromada/math/mxparser/CloneCache", - "org/mariuszgromada/math/mxparser/ElementAtTheEnd", - "org/mariuszgromada/math/mxparser/CompilationDetails", - "org/mariuszgromada/math/mxparser/CompiledElement" - ], - "documentation": [ - "https://mathparser.org", - "https://mathparser.org/mxparser-license/" - ], - "gradle_signatures": [ - "org.mariuszgromada.math:MathParser.org-mXparser:[5-9]" - ], - "license": "NonFree", - "name": "mXparser" - }, - "tornaco.android.sec": { - "anti_features": [ - "NonFreeComp" - ], - "code_signatures": [ - "tornaco/android/sec" - ], - "description": "proprietary part of the Thanox application", - "license": "NonFree" - } - }, - "timestamp": 1747829076.702502, - "version": 1, - "last_updated": 1750710966.431471 -}''' diff --git a/fdroidserver/server.py b/fdroidserver/server.py new file mode 100644 index 00000000..ce210f84 --- /dev/null +++ b/fdroidserver/server.py @@ -0,0 +1,769 @@ +#!/usr/bin/env python3 +# +# server.py - part of the FDroid server tools +# Copyright (C) 2010-15, Ciaran Gultnieks, ciaran@ciarang.com +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import sys +import glob +import hashlib +import os +import paramiko +import pwd +import re +import subprocess +import time +from argparse import ArgumentParser +import logging +import shutil + +from . import _ +from . import common +from . import index +from .exception import FDroidException + +config = None +options = None +start_timestamp = time.gmtime() + +BINARY_TRANSPARENCY_DIR = 'binary_transparency' + +AUTO_S3CFG = '.fdroid-server-update-s3cfg' +USER_S3CFG = 's3cfg' + + +def update_awsbucket(repo_section): + ''' + Upload the contents of the directory `repo_section` (including + subdirectories) to the AWS S3 "bucket". The contents of that subdir of the + bucket will first be deleted. + + Requires AWS credentials set in config.py: awsaccesskeyid, awssecretkey + ''' + + logging.debug('Syncing "' + repo_section + '" to Amazon S3 bucket "' + + config['awsbucket'] + '"') + + if common.set_command_in_config('s3cmd'): + update_awsbucket_s3cmd(repo_section) + else: + update_awsbucket_libcloud(repo_section) + + +def update_awsbucket_s3cmd(repo_section): + '''upload using the CLI tool s3cmd, which provides rsync-like sync + + The upload is done in multiple passes to reduce the chance of + interfering with an existing client-server interaction. In the + first pass, only new files are uploaded. In the second pass, + changed files are uploaded, overwriting what is on the server. On + the third/last pass, the indexes are uploaded, and any removed + files are deleted from the server. The last pass is the only pass + to use a full MD5 checksum of all files to detect changes. + ''' + + logging.debug(_('Using s3cmd to sync with: {url}') + .format(url=config['awsbucket'])) + + if os.path.exists(USER_S3CFG): + logging.info(_('Using "{path}" for configuring s3cmd.').format(path=USER_S3CFG)) + configfilename = USER_S3CFG + else: + fd = os.open(AUTO_S3CFG, os.O_CREAT | os.O_TRUNC | os.O_WRONLY, 0o600) + logging.debug(_('Creating "{path}" for configuring s3cmd.').format(path=AUTO_S3CFG)) + os.write(fd, '[default]\n'.encode('utf-8')) + os.write(fd, ('access_key = ' + config['awsaccesskeyid'] + '\n').encode('utf-8')) + os.write(fd, ('secret_key = ' + config['awssecretkey'] + '\n').encode('utf-8')) + os.close(fd) + configfilename = AUTO_S3CFG + + s3bucketurl = 's3://' + config['awsbucket'] + s3cmd = [config['s3cmd'], '--config=' + configfilename] + if subprocess.call(s3cmd + ['info', s3bucketurl]) != 0: + logging.warning(_('Creating new S3 bucket: {url}') + .format(url=s3bucketurl)) + if subprocess.call(s3cmd + ['mb', s3bucketurl]) != 0: + logging.error(_('Failed to create S3 bucket: {url}') + .format(url=s3bucketurl)) + raise FDroidException() + + s3cmd_sync = s3cmd + ['sync', '--acl-public'] + if options.verbose: + s3cmd_sync += ['--verbose'] + if options.quiet: + s3cmd_sync += ['--quiet'] + indexxml = os.path.join(repo_section, 'index.xml') + indexjar = os.path.join(repo_section, 'index.jar') + indexv1jar = os.path.join(repo_section, 'index-v1.jar') + + s3url = s3bucketurl + '/fdroid/' + logging.debug('s3cmd sync new files in ' + repo_section + ' to ' + s3url) + logging.debug(_('Running first pass with MD5 checking disabled')) + if subprocess.call(s3cmd_sync + + ['--no-check-md5', '--skip-existing', + '--exclude', indexxml, + '--exclude', indexjar, + '--exclude', indexv1jar, + repo_section, s3url]) != 0: + raise FDroidException() + logging.debug('s3cmd sync all files in ' + repo_section + ' to ' + s3url) + if subprocess.call(s3cmd_sync + + ['--no-check-md5', + '--exclude', indexxml, + '--exclude', indexjar, + '--exclude', indexv1jar, + repo_section, s3url]) != 0: + raise FDroidException() + + logging.debug(_('s3cmd sync indexes {path} to {url} and delete') + .format(path=repo_section, url=s3url)) + s3cmd_sync.append('--delete-removed') + s3cmd_sync.append('--delete-after') + if options.no_checksum: + s3cmd_sync.append('--no-check-md5') + else: + s3cmd_sync.append('--check-md5') + if subprocess.call(s3cmd_sync + [repo_section, s3url]) != 0: + raise FDroidException() + + +def update_awsbucket_libcloud(repo_section): + ''' + Upload the contents of the directory `repo_section` (including + subdirectories) to the AWS S3 "bucket". The contents of that subdir of the + bucket will first be deleted. + + Requires AWS credentials set in config.py: awsaccesskeyid, awssecretkey + ''' + + logging.debug(_('using Apache libcloud to sync with {url}') + .format(url=config['awsbucket'])) + + import libcloud.security + libcloud.security.VERIFY_SSL_CERT = True + from libcloud.storage.types import Provider, ContainerDoesNotExistError + from libcloud.storage.providers import get_driver + + if not config.get('awsaccesskeyid') or not config.get('awssecretkey'): + raise FDroidException( + _('To use awsbucket, awssecretkey and awsaccesskeyid must also be set in config.py!')) + awsbucket = config['awsbucket'] + + if os.path.exists(USER_S3CFG): + raise FDroidException(_('"{path}" exists but s3cmd is not installed!') + .format(path=USER_S3CFG)) + + cls = get_driver(Provider.S3) + driver = cls(config['awsaccesskeyid'], config['awssecretkey']) + try: + container = driver.get_container(container_name=awsbucket) + except ContainerDoesNotExistError: + container = driver.create_container(container_name=awsbucket) + logging.info(_('Created new container "{name}"') + .format(name=container.name)) + + upload_dir = 'fdroid/' + repo_section + objs = dict() + for obj in container.list_objects(): + if obj.name.startswith(upload_dir + '/'): + objs[obj.name] = obj + + for root, dirs, files in os.walk(os.path.join(os.getcwd(), repo_section)): + for name in files: + upload = False + file_to_upload = os.path.join(root, name) + object_name = 'fdroid/' + os.path.relpath(file_to_upload, os.getcwd()) + if object_name not in objs: + upload = True + else: + obj = objs.pop(object_name) + if obj.size != os.path.getsize(file_to_upload): + upload = True + else: + # if the sizes match, then compare by MD5 + md5 = hashlib.md5() # nosec AWS uses MD5 + with open(file_to_upload, 'rb') as f: + while True: + data = f.read(8192) + if not data: + break + md5.update(data) + if obj.hash != md5.hexdigest(): + s3url = 's3://' + awsbucket + '/' + obj.name + logging.info(' deleting ' + s3url) + if not driver.delete_object(obj): + logging.warn('Could not delete ' + s3url) + upload = True + + if upload: + logging.debug(' uploading "' + file_to_upload + '"...') + extra = {'acl': 'public-read'} + if file_to_upload.endswith('.sig'): + extra['content_type'] = 'application/pgp-signature' + elif file_to_upload.endswith('.asc'): + extra['content_type'] = 'application/pgp-signature' + logging.info(' uploading ' + os.path.relpath(file_to_upload) + + ' to s3://' + awsbucket + '/' + object_name) + with open(file_to_upload, 'rb') as iterator: + obj = driver.upload_object_via_stream(iterator=iterator, + container=container, + object_name=object_name, + extra=extra) + # delete the remnants in the bucket, they do not exist locally + while objs: + object_name, obj = objs.popitem() + s3url = 's3://' + awsbucket + '/' + object_name + if object_name.startswith(upload_dir): + logging.warn(' deleting ' + s3url) + driver.delete_object(obj) + else: + logging.info(' skipping ' + s3url) + + +def update_serverwebroot(serverwebroot, repo_section): + # use a checksum comparison for accurate comparisons on different + # filesystems, for example, FAT has a low resolution timestamp + rsyncargs = ['rsync', '--archive', '--delete-after', '--safe-links'] + if not options.no_checksum: + rsyncargs.append('--checksum') + if options.verbose: + rsyncargs += ['--verbose'] + if options.quiet: + rsyncargs += ['--quiet'] + if options.identity_file is not None: + rsyncargs += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + options.identity_file] + elif 'identity_file' in config: + rsyncargs += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + config['identity_file']] + indexxml = os.path.join(repo_section, 'index.xml') + indexjar = os.path.join(repo_section, 'index.jar') + indexv1jar = os.path.join(repo_section, 'index-v1.jar') + # Upload the first time without the index files and delay the deletion as + # much as possible, that keeps the repo functional while this update is + # running. Then once it is complete, rerun the command again to upload + # the index files. Always using the same target with rsync allows for + # very strict settings on the receiving server, you can literally specify + # the one rsync command that is allowed to run in ~/.ssh/authorized_keys. + # (serverwebroot is guaranteed to have a trailing slash in common.py) + logging.info('rsyncing ' + repo_section + ' to ' + serverwebroot) + if subprocess.call(rsyncargs + + ['--exclude', indexxml, + '--exclude', indexjar, + '--exclude', indexv1jar, + repo_section, serverwebroot]) != 0: + raise FDroidException() + if subprocess.call(rsyncargs + [repo_section, serverwebroot]) != 0: + raise FDroidException() + # upload "current version" symlinks if requested + if config['make_current_version_link'] and repo_section == 'repo': + links_to_upload = [] + for f in glob.glob('*.apk') \ + + glob.glob('*.apk.asc') + glob.glob('*.apk.sig'): + if os.path.islink(f): + links_to_upload.append(f) + if len(links_to_upload) > 0: + if subprocess.call(rsyncargs + links_to_upload + [serverwebroot]) != 0: + raise FDroidException() + + +def sync_from_localcopy(repo_section, local_copy_dir): + '''Syncs the repo from "local copy dir" filesystem to this box + + In setups that use offline signing, this is the last step that + syncs the repo from the "local copy dir" e.g. a thumb drive to the + repo on the local filesystem. That local repo is then used to + push to all the servers that are configured. + + ''' + logging.info('Syncing from local_copy_dir to this repo.') + # trailing slashes have a meaning in rsync which is not needed here, so + # make sure both paths have exactly one trailing slash + common.local_rsync(options, + os.path.join(local_copy_dir, repo_section).rstrip('/') + '/', + repo_section.rstrip('/') + '/') + + offline_copy = os.path.join(local_copy_dir, BINARY_TRANSPARENCY_DIR) + if os.path.exists(os.path.join(offline_copy, '.git')): + online_copy = os.path.join(os.getcwd(), BINARY_TRANSPARENCY_DIR) + push_binary_transparency(offline_copy, online_copy) + + +def update_localcopy(repo_section, local_copy_dir): + '''copy data from offline to the "local copy dir" filesystem + + This updates the copy of this repo used to shuttle data from an + offline signing machine to the online machine, e.g. on a thumb + drive. + + ''' + # local_copy_dir is guaranteed to have a trailing slash in main() below + common.local_rsync(options, repo_section, local_copy_dir) + + offline_copy = os.path.join(os.getcwd(), BINARY_TRANSPARENCY_DIR) + if os.path.isdir(os.path.join(offline_copy, '.git')): + online_copy = os.path.join(local_copy_dir, BINARY_TRANSPARENCY_DIR) + push_binary_transparency(offline_copy, online_copy) + + +def _get_size(start_path='.'): + '''get size of all files in a dir https://stackoverflow.com/a/1392549''' + total_size = 0 + for root, dirs, files in os.walk(start_path): + for f in files: + fp = os.path.join(root, f) + total_size += os.path.getsize(fp) + return total_size + + +def update_servergitmirrors(servergitmirrors, repo_section): + '''update repo mirrors stored in git repos + + This is a hack to use public git repos as F-Droid repos. It + recreates the git repo from scratch each time, so that there is no + history. That keeps the size of the git repo small. Services + like GitHub or GitLab have a size limit of something like 1 gig. + This git repo is only a git repo for the purpose of being hosted. + For history, there is the archive section, and there is the binary + transparency log. + + ''' + import git + from clint.textui import progress + if config.get('local_copy_dir') \ + and not config.get('sync_from_local_copy_dir'): + logging.debug('Offline machine, skipping git mirror generation until `fdroid server update`') + return + + # right now we support only 'repo' git-mirroring + if repo_section == 'repo': + git_mirror_path = 'git-mirror' + dotgit = os.path.join(git_mirror_path, '.git') + git_repodir = os.path.join(git_mirror_path, 'fdroid', repo_section) + if not os.path.isdir(git_repodir): + os.makedirs(git_repodir) + if os.path.isdir(dotgit) and _get_size(git_mirror_path) > 1000000000: + logging.warning('Deleting git-mirror history, repo is too big (1 gig max)') + shutil.rmtree(dotgit) + if options.no_keep_git_mirror_archive and _get_size(git_mirror_path) > 1000000000: + logging.warning('Deleting archive, repo is too big (1 gig max)') + archive_path = os.path.join(git_mirror_path, 'fdroid', 'archive') + shutil.rmtree(archive_path, ignore_errors=True) + + # rsync is very particular about trailing slashes + common.local_rsync(options, + repo_section.rstrip('/') + '/', + git_repodir.rstrip('/') + '/') + + # use custom SSH command if identity_file specified + ssh_cmd = 'ssh -oBatchMode=yes' + if options.identity_file is not None: + ssh_cmd += ' -oIdentitiesOnly=yes -i "%s"' % options.identity_file + elif 'identity_file' in config: + ssh_cmd += ' -oIdentitiesOnly=yes -i "%s"' % config['identity_file'] + + repo = git.Repo.init(git_mirror_path) + + for remote_url in servergitmirrors: + hostname = re.sub(r'\W*\w+\W+(\w+).*', r'\1', remote_url) + r = git.remote.Remote(repo, hostname) + if r in repo.remotes: + r = repo.remote(hostname) + if 'set_url' in dir(r): # force remote URL if using GitPython 2.x + r.set_url(remote_url) + else: + repo.create_remote(hostname, remote_url) + logging.info('Mirroring to: ' + remote_url) + + # sadly index.add don't allow the --all parameter + logging.debug('Adding all files to git mirror') + repo.git.add(all=True) + logging.debug('Committing all files into git mirror') + repo.index.commit("fdroidserver git-mirror") + + if options.verbose: + bar = progress.Bar() + + class MyProgressPrinter(git.RemoteProgress): + def update(self, op_code, current, maximum=None, message=None): + if isinstance(maximum, float): + bar.show(current, maximum) + progress = MyProgressPrinter() + else: + progress = None + + # push for every remote. This will overwrite the git history + for remote in repo.remotes: + if remote.name == 'gitlab': + logging.debug('Writing .gitlab-ci.yml to deploy to GitLab Pages') + with open(os.path.join(git_mirror_path, ".gitlab-ci.yml"), "wt") as out_file: + out_file.write("""pages: + script: + - mkdir .public + - cp -r * .public/ + - mv .public public + artifacts: + paths: + - public +""") + + repo.git.add(all=True) + repo.index.commit("fdroidserver git-mirror: Deploy to GitLab Pages") + + logging.debug(_('Pushing to {url}').format(url=remote.url)) + with repo.git.custom_environment(GIT_SSH_COMMAND=ssh_cmd): + pushinfos = remote.push('master', force=True, set_upstream=True, progress=progress) + for pushinfo in pushinfos: + if pushinfo.flags & (git.remote.PushInfo.ERROR + | git.remote.PushInfo.REJECTED + | git.remote.PushInfo.REMOTE_FAILURE + | git.remote.PushInfo.REMOTE_REJECTED): + raise FDroidException(remote.url + ' push failed: ' + str(pushinfo.flags) + + ' ' + pushinfo.summary) + else: + logging.debug(remote.url + ': ' + pushinfo.summary) + + if progress: + bar.done() + + +def upload_to_android_observatory(repo_section): + # depend on requests and lxml only if users enable AO + import requests + from lxml.html import fromstring + + if repo_section == 'repo': + for f in glob.glob(os.path.join(repo_section, '*.apk')): + fpath = f + fname = os.path.basename(f) + logging.info('Uploading ' + fname + ' to androidobservatory.org') + + # upload the file with a post request + r = requests.post('https://androidobservatory.org/upload', files={'apk': (fname, open(fpath, 'rb'))}) + response = r.text + page = r.url + + # from now on XPath will be used to retrieve the message in the HTML + # androidobservatory doesn't have a nice API to talk with + # so we must scrape the page content + tree = fromstring(response) + alert = tree.xpath("//html/body/div[@class='container content-container']/div[@class='alert alert-info']")[0] + + message = "" + appurl = page + for el in alert: + # if the application was added successfully we retrive the url + # if the application was already uploaded we use the redirect page url + if el.attrib.get("href") is not None: + appurl = page + el.attrib["href"][1:] + message += el.text.replace(" here", "") + el.tail + else: + message += el.tail + message = message.strip() + " " + appurl + logging.info(message) + + +def upload_to_virustotal(repo_section, vt_apikey): + import json + import requests + + logging.getLogger("urllib3").setLevel(logging.WARNING) + logging.getLogger("requests").setLevel(logging.WARNING) + + if repo_section == 'repo': + if not os.path.exists('virustotal'): + os.mkdir('virustotal') + + if os.path.exists(os.path.join(repo_section, 'index-v1.json')): + with open(os.path.join(repo_section, 'index-v1.json')) as fp: + data = json.load(fp) + else: + data, _ignored, _ignored = index.get_index_from_jar(os.path.join(repo_section, 'index-v1.jar')) + + for packageName, packages in data['packages'].items(): + for package in packages: + outputfilename = os.path.join('virustotal', + packageName + '_' + str(package.get('versionCode')) + + '_' + package['hash'] + '.json') + if os.path.exists(outputfilename): + logging.debug(package['apkName'] + ' results are in ' + outputfilename) + continue + filename = package['apkName'] + repofilename = os.path.join(repo_section, filename) + logging.info('Checking if ' + repofilename + ' is on virustotal') + + headers = { + "User-Agent": "F-Droid" + } + params = { + 'apikey': vt_apikey, + 'resource': package['hash'], + } + needs_file_upload = False + while True: + r = requests.post('https://www.virustotal.com/vtapi/v2/file/report', + params=params, headers=headers) + if r.status_code == 200: + response = r.json() + if response['response_code'] == 0: + needs_file_upload = True + else: + response['filename'] = filename + response['packageName'] = packageName + response['versionCode'] = package.get('versionCode') + response['versionName'] = package.get('versionName') + with open(outputfilename, 'w') as fp: + json.dump(response, fp, indent=2, sort_keys=True) + + if response.get('positives', 0) > 0: + logging.warning(repofilename + ' has been flagged by virustotal ' + + str(response['positives']) + ' times:' + + '\n\t' + response['permalink']) + break + elif r.status_code == 204: + time.sleep(10) # wait for public API rate limiting + + if needs_file_upload: + logging.info('Uploading ' + repofilename + ' to virustotal') + files = { + 'file': (filename, open(repofilename, 'rb')) + } + r = requests.post('https://www.virustotal.com/vtapi/v2/file/scan', + params=params, headers=headers, files=files) + response = r.json() + + logging.info(response['verbose_msg'] + " " + response['permalink']) + + +def push_binary_transparency(git_repo_path, git_remote): + '''push the binary transparency git repo to the specifed remote. + + If the remote is a local directory, make sure it exists, and is a + git repo. This is used to move this git repo from an offline + machine onto a flash drive, then onto the online machine. Also, + this pulls because pushing to a non-bare git repo is error prone. + + This is also used in offline signing setups, where it then also + creates a "local copy dir" git repo that serves to shuttle the git + data from the offline machine to the online machine. In that + case, git_remote is a dir on the local file system, e.g. a thumb + drive. + + ''' + import git + + logging.info(_('Pushing binary transparency log to {url}') + .format(url=git_remote)) + + if os.path.isdir(os.path.dirname(git_remote)): + # from offline machine to thumbdrive + remote_path = os.path.abspath(git_repo_path) + if not os.path.isdir(os.path.join(git_remote, '.git')): + os.makedirs(git_remote, exist_ok=True) + thumbdriverepo = git.Repo.init(git_remote) + local = thumbdriverepo.create_remote('local', remote_path) + else: + thumbdriverepo = git.Repo(git_remote) + local = git.remote.Remote(thumbdriverepo, 'local') + if local in thumbdriverepo.remotes: + local = thumbdriverepo.remote('local') + if 'set_url' in dir(local): # force remote URL if using GitPython 2.x + local.set_url(remote_path) + else: + local = thumbdriverepo.create_remote('local', remote_path) + local.pull('master') + else: + # from online machine to remote on a server on the internet + gitrepo = git.Repo(git_repo_path) + origin = git.remote.Remote(gitrepo, 'origin') + if origin in gitrepo.remotes: + origin = gitrepo.remote('origin') + if 'set_url' in dir(origin): # added in GitPython 2.x + origin.set_url(git_remote) + else: + origin = gitrepo.create_remote('origin', git_remote) + origin.push('master') + + +def update_wiki(): + try: + import mwclient + site = mwclient.Site((config['wiki_protocol'], config['wiki_server']), + path=config['wiki_path']) + site.login(config['wiki_user'], config['wiki_password']) + + # Write a page with the last build log for this version code + wiki_page_path = 'deploy_' + time.strftime('%s', start_timestamp) + newpage = site.Pages[wiki_page_path] + txt = '' + txt += "* command line: " + ' '.join(sys.argv) + "\n" + txt += "* started at " + common.get_wiki_timestamp(start_timestamp) + '\n' + txt += "* completed at " + common.get_wiki_timestamp() + '\n' + txt += "\n\n" + newpage.save(txt, summary='Run log') + newpage = site.Pages['deploy'] + newpage.save('#REDIRECT [[' + wiki_page_path + ']]', summary='Update redirect') + except Exception as e: + logging.error(_('Error while attempting to publish log: %s') % e) + + +def main(): + global config, options + + # Parse command line... + parser = ArgumentParser() + common.setup_global_opts(parser) + parser.add_argument("command", help=_("command to execute, either 'init' or 'update'")) + parser.add_argument("-i", "--identity-file", default=None, + help=_("Specify an identity file to provide to SSH for rsyncing")) + parser.add_argument("--local-copy-dir", default=None, + help=_("Specify a local folder to sync the repo to")) + parser.add_argument("--no-checksum", action="store_true", default=False, + help=_("Don't use rsync checksums")) + parser.add_argument("--no-keep-git-mirror-archive", action="store_true", default=False, + help=_("If a git mirror gets to big, allow the archive to be deleted")) + options = parser.parse_args() + + config = common.read_config(options) + + if options.command != 'init' and options.command != 'update': + logging.critical(_("The only commands currently supported are 'init' and 'update'")) + sys.exit(1) + + if config.get('nonstandardwebroot') is True: + standardwebroot = False + else: + standardwebroot = True + + for serverwebroot in config.get('serverwebroot', []): + # this supports both an ssh host:path and just a path + s = serverwebroot.rstrip('/').split(':') + if len(s) == 1: + fdroiddir = s[0] + elif len(s) == 2: + host, fdroiddir = s + else: + logging.error(_('Malformed serverwebroot line:') + ' ' + serverwebroot) + sys.exit(1) + repobase = os.path.basename(fdroiddir) + if standardwebroot and repobase != 'fdroid': + logging.error('serverwebroot path does not end with "fdroid", ' + + 'perhaps you meant one of these:\n\t' + + serverwebroot.rstrip('/') + '/fdroid\n\t' + + serverwebroot.rstrip('/').rstrip(repobase) + 'fdroid') + sys.exit(1) + + if options.local_copy_dir is not None: + local_copy_dir = options.local_copy_dir + elif config.get('local_copy_dir'): + local_copy_dir = config['local_copy_dir'] + else: + local_copy_dir = None + if local_copy_dir is not None: + fdroiddir = local_copy_dir.rstrip('/') + if os.path.exists(fdroiddir) and not os.path.isdir(fdroiddir): + logging.error(_('local_copy_dir must be directory, not a file!')) + sys.exit(1) + if not os.path.exists(os.path.dirname(fdroiddir)): + logging.error(_('The root dir for local_copy_dir "{path}" does not exist!') + .format(path=os.path.dirname(fdroiddir))) + sys.exit(1) + if not os.path.isabs(fdroiddir): + logging.error(_('local_copy_dir must be an absolute path!')) + sys.exit(1) + repobase = os.path.basename(fdroiddir) + if standardwebroot and repobase != 'fdroid': + logging.error(_('local_copy_dir does not end with "fdroid", ' + + 'perhaps you meant: "{path}"') + .format(path=fdroiddir + '/fdroid')) + sys.exit(1) + if local_copy_dir[-1] != '/': + local_copy_dir += '/' + local_copy_dir = local_copy_dir.replace('//', '/') + if not os.path.exists(fdroiddir): + os.mkdir(fdroiddir) + + if not config.get('awsbucket') \ + and not config.get('serverwebroot') \ + and not config.get('servergitmirrors') \ + and not config.get('androidobservatory') \ + and not config.get('binary_transparency_remote') \ + and not config.get('virustotal_apikey') \ + and local_copy_dir is None: + logging.warn(_('No option set! Edit your config.py to set at least one of these:') + + '\nserverwebroot, servergitmirrors, local_copy_dir, awsbucket, virustotal_apikey, androidobservatory, or binary_transparency_remote') + sys.exit(1) + + repo_sections = ['repo'] + if config['archive_older'] != 0: + repo_sections.append('archive') + if not os.path.exists('archive'): + os.mkdir('archive') + if config['per_app_repos']: + repo_sections += common.get_per_app_repos() + + if options.command == 'init': + ssh = paramiko.SSHClient() + ssh.load_system_host_keys() + for serverwebroot in config.get('serverwebroot', []): + sshstr, remotepath = serverwebroot.rstrip('/').split(':') + if sshstr.find('@') >= 0: + username, hostname = sshstr.split('@') + else: + username = pwd.getpwuid(os.getuid())[0] # get effective uid + hostname = sshstr + ssh.connect(hostname, username=username) + sftp = ssh.open_sftp() + if os.path.basename(remotepath) \ + not in sftp.listdir(os.path.dirname(remotepath)): + sftp.mkdir(remotepath, mode=0o755) + for repo_section in repo_sections: + repo_path = os.path.join(remotepath, repo_section) + if os.path.basename(repo_path) \ + not in sftp.listdir(remotepath): + sftp.mkdir(repo_path, mode=0o755) + sftp.close() + ssh.close() + elif options.command == 'update': + for repo_section in repo_sections: + if local_copy_dir is not None: + if config['sync_from_local_copy_dir']: + sync_from_localcopy(repo_section, local_copy_dir) + else: + update_localcopy(repo_section, local_copy_dir) + for serverwebroot in config.get('serverwebroot', []): + update_serverwebroot(serverwebroot, repo_section) + if config.get('servergitmirrors', []): + # update_servergitmirrors will take care of multiple mirrors so don't need a foreach + servergitmirrors = config.get('servergitmirrors', []) + update_servergitmirrors(servergitmirrors, repo_section) + if config.get('awsbucket'): + update_awsbucket(repo_section) + if config.get('androidobservatory'): + upload_to_android_observatory(repo_section) + if config.get('virustotal_apikey'): + upload_to_virustotal(repo_section, config.get('virustotal_apikey')) + + binary_transparency_remote = config.get('binary_transparency_remote') + if binary_transparency_remote: + push_binary_transparency(BINARY_TRANSPARENCY_DIR, + binary_transparency_remote) + + if config.get('wiki_server') and config.get('wiki_path'): + update_wiki() + + sys.exit(0) + + +if __name__ == "__main__": + main() diff --git a/fdroidserver/signatures.py b/fdroidserver/signatures.py index 00c9d264..03b2d6f2 100644 --- a/fdroidserver/signatures.py +++ b/fdroidserver/signatures.py @@ -15,17 +15,21 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import logging -import os -import re -import sys from argparse import ArgumentParser -from . import _, common +import re +import os +import sys +import logging + +from . import _ +from . import common +from . import net from .exception import FDroidException def extract_signature(apkpath): + if not os.path.exists(apkpath): raise FDroidException("file APK does not exists '{}'".format(apkpath)) if not common.verify_apk_signature(apkpath): @@ -42,6 +46,7 @@ def extract_signature(apkpath): def extract(options): + # Create tmp dir if missing… tmp_dir = 'tmp' if not os.path.exists(tmp_dir): @@ -57,53 +62,39 @@ def extract(options): try: if os.path.isfile(apk): sigdir = extract_signature(apk) - logging.info( - _("Fetched signatures for '{apkfilename}' -> '{sigdir}'").format( - apkfilename=apk, sigdir=sigdir - ) - ) + logging.info(_("Fetched signatures for '{apkfilename}' -> '{sigdir}'") + .format(apkfilename=apk, sigdir=sigdir)) elif httpre.match(apk): if apk.startswith('https') or options.no_check_https: try: - from . import net - tmp_apk = os.path.join(tmp_dir, 'signed.apk') net.download_file(apk, tmp_apk) sigdir = extract_signature(tmp_apk) - logging.info( - _( - "Fetched signatures for '{apkfilename}' -> '{sigdir}'" - ).format(apkfilename=apk, sigdir=sigdir) - ) + logging.info(_("Fetched signatures for '{apkfilename}' -> '{sigdir}'") + .format(apkfilename=apk, sigdir=sigdir)) finally: if tmp_apk and os.path.exists(tmp_apk): os.remove(tmp_apk) else: - logging.warning( - _( - 'refuse downloading via insecure HTTP connection ' - '(use HTTPS or specify --no-https-check): {apkfilename}' - ).format(apkfilename=apk) - ) + logging.warn(_('refuse downloading via insecure HTTP connection (use HTTPS or specify --no-https-check): {apkfilename}').format(apkfilename=apk)) except FDroidException as e: - logging.warning( - _("Failed fetching signatures for '{apkfilename}': {error}").format( - apkfilename=apk, error=e - ) - ) + logging.warning(_("Failed fetching signatures for '{apkfilename}': {error}") + .format(apkfilename=apk, error=e)) if e.detail: logging.debug(e.detail) def main(): - parser = ArgumentParser() + + # Parse command line... + parser = ArgumentParser(usage="%(prog)s [options] APK [APK...]") common.setup_global_opts(parser) - parser.add_argument( - "APK", nargs='*', help=_("signed APK, either a file-path or HTTPS URL.") - ) + parser.add_argument("APK", nargs='*', + help=_("signed APK, either a file-path or HTTPS URL.")) parser.add_argument("--no-check-https", action="store_true", default=False) - options = common.parse_args(parser) - common.set_console_logging(options.verbose, options.color) - common.read_config() + options = parser.parse_args() + + # Read config.py... + common.read_config(options) extract(options) diff --git a/fdroidserver/signindex.py b/fdroidserver/signindex.py index 47cd5ec2..cbd55239 100644 --- a/fdroidserver/signindex.py +++ b/fdroidserver/signindex.py @@ -16,182 +16,85 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import json -import logging import os -import time import zipfile from argparse import ArgumentParser +import logging -from . import _, common, metadata +from . import _ +from . import common from .exception import FDroidException config = None -start_timestamp = time.gmtime() +options = None -def sign_jar(jar, use_old_algs=False): - """Sign a JAR file with the best available algorithm. - - The current signing method uses apksigner to sign the JAR so that - it will automatically select algorithms that are compatible with - Android SDK 23, which added the most recent algorithms: - https://developer.android.com/reference/java/security/Signature - - This signing method uses then inherits the default signing - algothim settings, since Java and Android both maintain those. - That helps avoid a repeat of being stuck on an old signing - algorithm. That means specifically that this call to apksigner - does not specify any of the algorithms. - - The old indexes must be signed by SHA1withRSA otherwise they will - no longer be compatible with old Androids. +def sign_jar(jar): + """ + Sign a JAR file with Java's jarsigner. This method requires a properly initialized config object. + This does use old hashing algorithms, i.e. SHA1, but that's not + broken yet for file verification. This could be set to SHA256, + but then Android < 4.3 would not be able to verify it. + https://code.google.com/p/android/issues/detail?id=38321 """ - if use_old_algs: - # This does use old hashing algorithms, i.e. SHA1, but that's not - # broken yet for file verification. This could be set to SHA256, - # but then Android < 4.3 would not be able to verify it. - # https://code.google.com/p/android/issues/detail?id=38321 - args = [ - config['jarsigner'], - '-keystore', - config['keystore'], - '-storepass:env', - 'FDROID_KEY_STORE_PASS', - '-digestalg', - 'SHA1', - '-sigalg', - 'SHA1withRSA', - jar, - config['repo_keyalias'], - ] - if config['keystore'] == 'NONE': - args += config['smartcardoptions'] - else: # smardcards never use -keypass - args += ['-keypass:env', 'FDROID_KEY_PASS'] - else: - # https://developer.android.com/studio/command-line/apksigner - args = [ - config['apksigner'], - 'sign', - '--min-sdk-version', - '23', # enable all current algorithms - '--max-sdk-version', - '24', # avoid future incompatible algorithms - # disable all APK signature types, only use JAR sigs aka v1 - '--v1-signing-enabled', - 'true', - '--v2-signing-enabled', - 'false', - '--v3-signing-enabled', - 'false', - '--v4-signing-enabled', - 'false', - '--ks', - config['keystore'], - '--ks-pass', - 'env:FDROID_KEY_STORE_PASS', - '--ks-key-alias', - config['repo_keyalias'], - ] - if config['keystore'] == 'NONE': - args += common.get_apksigner_smartcardoptions(config['smartcardoptions']) - else: # smardcards never use --key-pass - args += ['--key-pass', 'env:FDROID_KEY_PASS'] - args += [jar] + args = [config['jarsigner'], '-keystore', config['keystore'], + '-storepass:env', 'FDROID_KEY_STORE_PASS', + '-digestalg', 'SHA1', '-sigalg', 'SHA1withRSA', + jar, config['repo_keyalias']] + if config['keystore'] == 'NONE': + args += config['smartcardoptions'] + else: # smardcards never use -keypass + args += ['-keypass:env', 'FDROID_KEY_PASS'] env_vars = { 'FDROID_KEY_STORE_PASS': config['keystorepass'], - 'FDROID_KEY_PASS': config.get('keypass', ""), + 'FDROID_KEY_PASS': config['keypass'], } p = common.FDroidPopen(args, envs=env_vars) - if not use_old_algs and p.returncode != 0: - # workaround for apksigner v30 on f-droid.org publish server - v4 = args.index("--v4-signing-enabled") - del args[v4 + 1] - del args[v4] - p = common.FDroidPopen(args, envs=env_vars) - if p.returncode != 0: - raise FDroidException("Failed to sign %s: %s" % (jar, p.output)) + if p.returncode != 0: + raise FDroidException("Failed to sign %s!" % jar) -def sign_index(repodir, json_name): - """Sign data file like entry.json to make a signed JAR like entry.jar. - - The data file like index-v1.json means that there is unsigned - data. That file is then stuck into a jar and signed by the - signing process. This is a bit different than sign_jar, which is - used for index.jar: that creates index.xml then puts that in a - index_unsigned.jar, then that file is signed. - - This also checks to make sure that the JSON files are intact - before signing them. Broken JSON files should never be signed, so - taking some extra time and failing hard is the preferred - option. This signing process can happen on an entirely separate - machine and file tree, so this ensures that nothing got broken - during transfer. - +def sign_index_v1(repodir, json_name): """ - json_file = os.path.join(repodir, json_name) - with open(json_file, encoding="utf-8") as fp: - data = json.load(fp) - if json_name == 'entry.json': - index_file = os.path.join(repodir, data['index']['name'].lstrip('/')) - sha256 = common.sha256sum(index_file) - if sha256 != data['index']['sha256']: - raise FDroidException( - _('%s has bad SHA-256: %s') % (index_file, sha256) - ) - with open(index_file) as fp: - index = json.load(fp) - if not isinstance(index, dict): - raise FDroidException(_('%s did not produce a dict!') % index_file) - elif json_name == 'index-v1.json': - [metadata.App(app) for app in data["apps"]] + Sign index-v1.json to make index-v1.jar + This is a bit different than index.jar: instead of their being index.xml + and index_unsigned.jar, the presence of index-v1.json means that there is + unsigned data. That file is then stuck into a jar and signed by the + signing process. index-v1.json is never published to the repo. It is + included in the binary transparency log, if that is enabled. + """ name, ext = common.get_extension(json_name) + index_file = os.path.join(repodir, json_name) jar_file = os.path.join(repodir, name + '.jar') with zipfile.ZipFile(jar_file, 'w', zipfile.ZIP_DEFLATED) as jar: - jar.write(json_file, json_name) - - if json_name in ('index.xml', 'index-v1.json'): - sign_jar(jar_file, use_old_algs=True) - else: - sign_jar(jar_file) - - -def status_update_json(signed): - """Output a JSON file with metadata about this run.""" - logging.debug(_('Outputting JSON')) - output = common.setup_status_output(start_timestamp) - if signed: - output['signed'] = signed - common.write_status_json(output) + jar.write(index_file, json_name) + sign_jar(jar_file) def main(): - global config - parser = ArgumentParser() + global config, options + + # Parse command line... + parser = ArgumentParser(usage="%(prog)s [options]") common.setup_global_opts(parser) - common.parse_args(parser) + options = parser.parse_args() - config = common.read_config() + config = common.read_config(options) if 'jarsigner' not in config: raise FDroidException( - _( - 'Java jarsigner not found! Install in standard location or set java_paths!' - ) - ) + _('Java jarsigner not found! Install in standard location or set java_paths!')) repodirs = ['repo'] if config['archive_older'] != 0: repodirs.append('archive') - signed = [] + signed = 0 for output_dir in repodirs: if not os.path.isdir(output_dir): raise FDroidException("Missing output directory '" + output_dir + "'") @@ -199,28 +102,20 @@ def main(): unsigned = os.path.join(output_dir, 'index_unsigned.jar') if os.path.exists(unsigned): sign_jar(unsigned) - index_jar = os.path.join(output_dir, 'index.jar') - os.rename(unsigned, index_jar) + os.rename(unsigned, os.path.join(output_dir, 'index.jar')) logging.info('Signed index in ' + output_dir) - signed.append(index_jar) + signed += 1 json_name = 'index-v1.json' index_file = os.path.join(output_dir, json_name) if os.path.exists(index_file): - sign_index(output_dir, json_name) + sign_index_v1(output_dir, json_name) + os.remove(index_file) logging.info('Signed ' + index_file) - signed.append(index_file) + signed += 1 - json_name = 'entry.json' - index_file = os.path.join(output_dir, json_name) - if os.path.exists(index_file): - sign_index(output_dir, json_name) - logging.info('Signed ' + index_file) - signed.append(index_file) - - if not signed: + if signed == 0: logging.info(_("Nothing to do")) - status_update_json(signed) if __name__ == "__main__": diff --git a/fdroidserver/stats.py b/fdroidserver/stats.py new file mode 100644 index 00000000..9d6cbfdc --- /dev/null +++ b/fdroidserver/stats.py @@ -0,0 +1,306 @@ +#!/usr/bin/env python3 +# +# stats.py - part of the FDroid server tools +# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import sys +import os +import re +import time +import traceback +import glob +import json +from argparse import ArgumentParser +import paramiko +import socket +import logging +import subprocess +from collections import Counter + +from . import _ +from . import common +from . import metadata + + +def carbon_send(key, value): + s = socket.socket() + s.connect((config['carbon_host'], config['carbon_port'])) + msg = '%s %d %d\n' % (key, value, int(time.time())) + s.sendall(msg) + s.close() + + +options = None +config = None + + +def most_common_stable(counts): + pairs = [] + for s in counts: + pairs.append((s, counts[s])) + return sorted(pairs, key=lambda t: (-t[1], t[0])) + + +def main(): + + global options, config + + # Parse command line... + parser = ArgumentParser() + common.setup_global_opts(parser) + parser.add_argument("-d", "--download", action="store_true", default=False, + help=_("Download logs we don't have")) + parser.add_argument("--recalc", action="store_true", default=False, + help=_("Recalculate aggregate stats - use when changes " + "have been made that would invalidate old cached data.")) + parser.add_argument("--nologs", action="store_true", default=False, + help=_("Don't do anything logs-related")) + metadata.add_metadata_arguments(parser) + options = parser.parse_args() + metadata.warnings_action = options.W + + config = common.read_config(options) + + if not config['update_stats']: + logging.info("Stats are disabled - set \"update_stats = True\" in your config.py") + sys.exit(1) + + # Get all metadata-defined apps... + allmetaapps = [app for app in metadata.read_metadata().values()] + metaapps = [app for app in allmetaapps if not app.Disabled] + + statsdir = 'stats' + logsdir = os.path.join(statsdir, 'logs') + datadir = os.path.join(statsdir, 'data') + if not os.path.exists(statsdir): + os.mkdir(statsdir) + if not os.path.exists(logsdir): + os.mkdir(logsdir) + if not os.path.exists(datadir): + os.mkdir(datadir) + + if options.download: + # Get any access logs we don't have... + ssh = None + ftp = None + try: + logging.info('Retrieving logs') + ssh = paramiko.SSHClient() + ssh.load_system_host_keys() + ssh.connect(config['stats_server'], username=config['stats_user'], + timeout=10, key_filename=config['webserver_keyfile']) + ftp = ssh.open_sftp() + ftp.get_channel().settimeout(60) + logging.info("...connected") + + ftp.chdir('logs') + files = ftp.listdir() + for f in files: + if f.startswith('access-') and f.endswith('.log.gz'): + + destpath = os.path.join(logsdir, f) + destsize = ftp.stat(f).st_size + if not os.path.exists(destpath) \ + or os.path.getsize(destpath) != destsize: + logging.debug("...retrieving " + f) + ftp.get(f, destpath) + except Exception: + traceback.print_exc() + sys.exit(1) + finally: + # Disconnect + if ftp is not None: + ftp.close() + if ssh is not None: + ssh.close() + + knownapks = common.KnownApks() + unknownapks = [] + + if not options.nologs: + # Process logs + logging.info('Processing logs...') + appscount = Counter() + appsvercount = Counter() + logexpr = r'(?P[.:0-9a-fA-F]+) - - \[(?P

      In order to keep away curious eyes, SMS-bypass filters incoming SMS messages before they reach your inbox. Based on bughunter2.smsfilter.

      Features:

      • Discrete fake app \"Battery level\": Long tap on Battery percentage will show SMS.
      • Filter incoming SMS specified address: redirect the SMS to SMS-bypass messages list; remove SMS arrival sound or vibration; show a discreet notification icon (battery level); vibrate if checked in settings
      • Add contact from contact list
      • Export messages to a text file
      ", "donate": "http://rodolphe.souchaud.free.fr/donate", + "flattrID": "cad90e036b975ed129a3ce80a0750466", "issueTracker": "https://gitlab.com/souch/SMSbypass/issues", "license": "GPL-3.0-only", + "name": "Battery level", "sourceCode": "https://gitlab.com/souch/SMSbypass/tree/HEAD", "summary": "Filter SMS and show them in a fake app", "webSite": "https://gitlab.com/souch/SMSbypass", "added": 1524700800000, "icon": "souch.smsbypass.9.png", "packageName": "souch.smsbypass", - "lastUpdated": 1524700800000, - "localized": { - "en-US": { - "name": "Battery level" - } - } + "lastUpdated": 1524700800000 }, { "categories": [ @@ -53,6 +50,7 @@ "name": "Caffeine Tile", "summary": "Test app for extracting icons when an XML one is default", "added": 1539129600000, + "icon": "info.zwanenburg.caffeinetile.4.xml", "packageName": "info.zwanenburg.caffeinetile", "lastUpdated": 1539129600000 }, @@ -75,31 +73,17 @@ ], "suggestedVersionName": "0.2.1", "suggestedVersionCode": "2000", - "description": "F-Droid can make use of system privileges or permissions to\ninstall, update and remove applications on its own. The only way to obtain those\nprivileges is to become a system app.\n\nThis is where the Privileged Extension comes in - being a separate app and much\nsmaller, it can be installed as a system app and communicate with the main app\nvia AIDL IPC.\n\nThis has several advantages:\n\n* Reduced disk usage in the system partition\n* System updates don't remove F-Droid\n* The process of installing into system via root is safer\n\nThis is packaged as an OTA (Over-The-Air) update ZIP file. It must be installed\nusing TWRP or other Android recovery that can flash updates to the system from\nthe /data/data/org.fdroid.fdroid folder on the /data partition. The standalone\nAPK is called F-Droid Privileged Extension.", + "description": "

      F-Droid can make use of system privileges or permissions to install, update and remove applications on its own. The only way to obtain those privileges is to become a system app.

      This is where the Privileged Extension comes in - being a separate app and much smaller, it can be installed as a system app and communicate with the main app via AIDL IPC.

      This has several advantages:

      • Reduced disk usage in the system partition
      • System updates don't remove F-Droid
      • The process of installing into system via root is safer

      This is packaged as an OTA (Over-The-Air) update ZIP file. It must be installed using TWRP or other Android recovery that can flash updates to the system from the /data/data/org.fdroid.fdroid folder on the /data partition. The standalone APK is called F-Droid Privileged Extension.

      ", "donate": "https://f-droid.org/about", "issueTracker": "https://gitlab.com/fdroid/privileged-extension/issues", "license": "Apache-2.0", + "name": "fake.ota.update_1234", "sourceCode": "https://gitlab.com/fdroid/privileged-extension", "summary": "Tests whether OTA ZIP files are being include", "webSite": "https://f-droid.org", "added": 1457568000000, "packageName": "fake.ota.update", - "lastUpdated": 1457568000000, - "localized": { - "en-US": { - "name": "fake.ota.update_1234" - } - } - }, - { - "suggestedVersionCode": "4", - "license": "Unknown", - "name": "maxSdkVersion", - "summary": "Test setting maxSdkVersion in an APK", - "added": 1739805810000, - "icon": "org.maxsdkversion.4.png", - "packageName": "org.maxsdkversion", - "lastUpdated": 1739805810000 + "lastUpdated": 1457568000000 }, { "categories": [ @@ -122,16 +106,12 @@ "suggestedVersionCode": "99999999", "liberapay": "12334", "license": "GPL-3.0-only", + "name": "OBB Main Old Version", "sourceCode": "https://github.com/eighthave/urzip", "added": 1388448000000, "icon": "obb.main.oldversion.1444412523.png", "packageName": "obb.main.oldversion", - "lastUpdated": 1388448000000, - "localized": { - "en-US": { - "name": "OBB Main Old Version" - } - } + "lastUpdated": 1388448000000 }, { "bitcoin": "1Fi5xUHiAPRKxHvyUGVFGt9extBe8Srdbk", @@ -140,16 +120,12 @@ ], "suggestedVersionCode": "99999999", "license": "GPL-3.0-only", + "name": "OBB Main Two Versions", "sourceCode": "https://github.com/eighthave/urzip", "added": 1444608000000, "icon": "obb.main.twoversions.1101617.png", "packageName": "obb.main.twoversions", - "lastUpdated": 1466380800000, - "localized": { - "en-US": { - "name": "OBB Main Two Versions" - } - } + "lastUpdated": 1466380800000 }, { "bitcoin": "1Fi5xUHiAPRKxHvyUGVFGt9extBe8Srdbk", @@ -158,6 +134,7 @@ ], "suggestedVersionCode": "99999999", "license": "GPL-3.0-only", + "name": "OBB Main/Patch Current", "sourceCode": "https://github.com/eighthave/urzip", "added": 1461369600000, "icon": "obb.mainpatch.current.1619.png", @@ -165,9 +142,8 @@ "lastUpdated": 1496275200000, "localized": { "en-US": { - "featureGraphic": "featureGraphic_ffhLaojxbGAfu9ROe1MJgK5ux8d0OVc6b65nmvOBaTk=.png", - "icon": "icon_WI0pkO3LsklrsTAnRr-OQSxkkoMY41lYe2-fAvXLiLg=.png", - "name": "OBB Main/Patch Current", + "featureGraphic": "featureGraphic.png", + "icon": "icon.png", "phoneScreenshots": [ "screenshot-main.png" ], @@ -179,30 +155,23 @@ }, { "antiFeatures": [ - "NoSourceSince", - "NonFreeNet" + "NoSourceSince" ], "categories": [ - "Multimedia", - "Security", "Time" ], "suggestedVersionName": "1.5", "suggestedVersionCode": "6", - "description": "Activates silent mode during calendar events.", + "description": "

      Activates silent mode during calendar events.

      ", "issueTracker": "https://github.com/miguelvps/PoliteDroid/issues", "license": "GPL-3.0-only", + "name": "Polite Droid", "sourceCode": "https://github.com/miguelvps/PoliteDroid", "summary": "Calendar tool", "added": 1498176000000, "icon": "com.politedroid.6.png", "packageName": "com.politedroid", - "lastUpdated": 1498176000000, - "localized": { - "en-US": { - "name": "Polite Droid" - } - } + "lastUpdated": 1498176000000 }, { "authorWebSite": "https://guardianproject.info", @@ -214,9 +183,11 @@ "2.0" ], "suggestedVersionCode": "2147483647", - "description": "It\u2019s Urzip \u662f\u4e00\u4e2a\u83b7\u5f97\u5df2\u5b89\u88c5 APK \u76f8\u5173\u4fe1\u606f\u7684\u5b9e\u7528\u5de5\u5177\u3002\u5b83\u4ece\u60a8\u7684\u8bbe\u5907\u4e0a\u5df2\u5b89\u88c5\u7684\u6240\u6709\u5e94\u7528\u5f00\u59cb\uff0c\u4e00\u952e\u89e6\u6478\u5373\u53ef\u663e\u793a APK \u7684\u6307\u7eb9\uff0c\u5e76\u4e14\u63d0\u4f9b\u5230\u8fbe virustotal.com \u548c androidobservatory.org \u7684\u5feb\u6377\u94fe\u63a5\uff0c\u8ba9\u60a8\u65b9\u4fbf\u5730\u4e86\u89e3\u7279\u5b9a APK \u7684\u6863\u6848\u3002\u5b83\u8fd8\u53ef\u4ee5\u8ba9\u60a8\u5bfc\u51fa\u7b7e\u540d\u8bc1\u4e66\u548c\u751f\u6210 ApkSignaturePin Pin \u6587\u4ef6\u4f9b TrustedIntents \u5e93\u4f7f\u7528\u3002\n\n\u2605 Urzip \u652f\u6301\u4e0b\u5217\u8bed\u8a00\uff1a Deutsch, English, espa\u00f1ol, suomi, \u65e5\u672c\u8a9e, \ud55c\uad6d\uc5b4, Norsk, portugu\u00eas (Portugal), \u0420\u0443\u0441\u0441\u043a\u0438\u0439, Sloven\u0161\u010dina, T\u00fcrk\u00e7e\n\u6ca1\u770b\u5230\u60a8\u7684\u8bed\u8a00\uff1f\u5e2e\u5fd9\u7ffb\u8bd1\u672c\u5e94\u7528\u5427\uff1a\nhttps://www.transifex.com/projects/p/urzip\n\n\u2605 \u81f4\u7528\u6237\uff1a\u6211\u4eec\u8fd8\u7f3a\u5c11\u4f60\u559c\u6b22\u7684\u529f\u80fd\uff1f\u53d1\u73b0\u4e86\u4e00\u4e2a bug\uff1f\u8bf7\u544a\u8bc9\u6211\u4eec\uff01\u6211\u4eec\u4e50\u4e8e\u542c\u53d6\u60a8\u7684\u610f\u89c1\u3002\u8bf7\u53d1\u9001\u7535\u5b50\u90ae\u4ef6\u81f3: support@guardianproject.info \u6216\u8005\u52a0\u5165\u6211\u4eec\u7684\u804a\u5929\u5ba4 https://guardianproject.info/contact\n", + "description": "

      It\u2019s Urzip \u662f\u4e00\u4e2a\u83b7\u5f97\u5df2\u5b89\u88c5 APK \u76f8\u5173\u4fe1\u606f\u7684\u5b9e\u7528\u5de5\u5177\u3002\u5b83\u4ece\u60a8\u7684\u8bbe\u5907\u4e0a\u5df2\u5b89\u88c5\u7684\u6240\u6709\u5e94\u7528\u5f00\u59cb\uff0c\u4e00\u952e\u89e6\u6478\u5373\u53ef\u663e\u793a APK \u7684\u6307\u7eb9\uff0c\u5e76\u4e14\u63d0\u4f9b\u5230\u8fbe virustotal.com \u548c androidobservatory.org \u7684\u5feb\u6377\u94fe\u63a5\uff0c\u8ba9\u60a8\u65b9\u4fbf\u5730\u4e86\u89e3\u7279\u5b9a APK \u7684\u6863\u6848\u3002\u5b83\u8fd8\u53ef\u4ee5\u8ba9\u60a8\u5bfc\u51fa\u7b7e\u540d\u8bc1\u4e66\u548c\u751f\u6210 ApkSignaturePin Pin \u6587\u4ef6\u4f9b TrustedIntents \u5e93\u4f7f\u7528\u3002

      \u2605 Urzip \u652f\u6301\u4e0b\u5217\u8bed\u8a00\uff1a Deutsch, English, espa\u00f1ol, suomi, \u65e5\u672c\u8a9e, \ud55c\uad6d\uc5b4, Norsk, portugu\u00eas (Portugal), \u0420\u0443\u0441\u0441\u043a\u0438\u0439, Sloven\u0161\u010dina, T\u00fcrk\u00e7e \u6ca1\u770b\u5230\u60a8\u7684\u8bed\u8a00\uff1f\u5e2e\u5fd9\u7ffb\u8bd1\u672c\u5e94\u7528\u5427\uff1a https://www.transifex.com/projects/p/urzip

      \u2605 \u81f4\u7528\u6237\uff1a\u6211\u4eec\u8fd8\u7f3a\u5c11\u4f60\u559c\u6b22\u7684\u529f\u80fd\uff1f\u53d1\u73b0\u4e86\u4e00\u4e2a bug\uff1f\u8bf7\u544a\u8bc9\u6211\u4eec\uff01\u6211\u4eec\u4e50\u4e8e\u542c\u53d6\u60a8\u7684\u610f\u89c1\u3002\u8bf7\u53d1\u9001\u7535\u5b50\u90ae\u4ef6\u81f3: support@guardianproject.info \u6216\u8005\u52a0\u5165\u6211\u4eec\u7684\u804a\u5929\u5ba4 https://guardianproject.info/contact

      ", "issueTracker": "https://dev.guardianproject.info/projects/urzip/issues", + "liberapayID": "9999999", "license": "GPL-3.0-only", + "name": "urzip-\u03c0\u00c7\u00c7\u03c0\u00c7\u00c7\u73b0\u4ee3\u6c49\u8bed\u901a\u7528\u5b57-\u0431\u044a\u043b\u0433\u0430\u0440\u0441\u043a\u0438-\u0639\u0631\u0628\u064a1234", "openCollective": "f-droid-just-testing", "sourceCode": "https://github.com/guardianproject/urzip", "summary": "\u4e00\u4e2a\u5b9e\u7528\u5de5\u5177\uff0c\u83b7\u53d6\u5df2\u5b89\u88c5\u5728\u60a8\u7684\u8bbe\u5907\u4e0a\u7684\u5e94\u7528\u7684\u6709\u5173\u4fe1\u606f", @@ -228,12 +199,11 @@ "localized": { "en-US": { "description": "full description\n", - "featureGraphic": "featureGraphic_GFRT5BovZsENGpJq1HqPODGWBRPWQsx25B95Ol5w_wU=.png", - "icon": "icon_NJXNzMcyf-v9i5a1ElJi0j9X1LvllibCa48xXYPlOqQ=.png", - "name": "title", - "summary": "short description", - "video": "video", - "whatsNew": "default" + "featureGraphic": "featureGraphic.png", + "icon": "icon.png", + "name": "title\n", + "summary": "short description\n", + "video": "video\n" } } } @@ -243,8 +213,7 @@ { "added": 1498176000000, "antiFeatures": [ - "KnownVuln", - "NonFreeAssets" + "NoSourceSince" ], "apkName": "com.politedroid_6.apk", "hash": "70c2f776a2bac38a58a7d521f96ee0414c6f0fb1de973c3ca8b10862a009247d", @@ -270,6 +239,9 @@ }, { "added": 1498176000000, + "antiFeatures": [ + "NoSourceSince" + ], "apkName": "com.politedroid_5.apk", "hash": "5bdbfa071cca4b8d05ced41d6b28763595d6e8096cca5bbf0f9253c9a2622e5d", "hashType": "sha256", @@ -294,6 +266,9 @@ }, { "added": 1498176000000, + "antiFeatures": [ + "NoSourceSince" + ], "apkName": "com.politedroid_4.apk", "hash": "c809bdff83715fbf919f3840ee09869b038e209378b906e135ee40d3f0e1f075", "hashType": "sha256", @@ -331,7 +306,9 @@ "added": 1498176000000, "antiFeatures": [ "KnownVuln", - "NonFreeAssets" + "NoSourceSince", + "NonFreeAssets", + "UpstreamNonFree" ], "apkName": "com.politedroid_3.apk", "hash": "665d03d61ebc642289fda697f71a59305b0202b16cafc5ffdae91cbe91f0b25d", @@ -660,32 +637,6 @@ "versionName": "0.1" } ], - "org.maxsdkversion": [ - { - "added": 1739805810000, - "apkName": "org.maxsdkversion_4.apk", - "features": [ - "android.hardware.camera.front" - ], - "hash": "877d582369d2840fc0d6892e44feaaad21419b0e35af42f22b3e127bcd08274d", - "hashType": "sha256", - "maxSdkVersion": 25, - "minSdkVersion": 14, - "packageName": "org.maxsdkversion", - "sig": "1a5e67bcef6b2d6242f2d36982b54589", - "signer": "401a3a5843a3d5cebc22e6de5cb76d08eaa6797122d7fe1283df1d192e132f5e", - "size": 12768, - "targetSdkVersion": 19, - "uses-permission": [ - [ - "android.permission.CAMERA", - null - ] - ], - "versionCode": 4, - "versionName": "1.0.3" - } - ], "souch.smsbypass": [ { "added": 1524700800000, @@ -729,4 +680,4 @@ } ] } -} \ No newline at end of file +} diff --git a/tests/repo/index-v2.json b/tests/repo/index-v2.json deleted file mode 100644 index 263dffd2..00000000 --- a/tests/repo/index-v2.json +++ /dev/null @@ -1,1472 +0,0 @@ -{ - "repo": { - "name": { - "en-US": "My First F-Droid Repo Demo" - }, - "description": { - "en-US": "This is a repository of apps to be used with F-Droid. Applications in this repository are either official binaries built by the original application developers, or are binaries built from source by the admin of f-droid.org using the tools on https://gitlab.com/fdroid." - }, - "icon": { - "en-US": { - "name": "/icons/icon.png", - "sha256": "b1f27fa87f8cabca50cdcd462a0f500d79d883b965a498d0e49eea560b39be1f", - "size": 715 - } - }, - "address": "https://MyFirstFDroidRepo.org/fdroid/repo", - "mirrors": [ - { - "isPrimary": true, - "url": "https://MyFirstFDroidRepo.org/fdroid/repo" - }, - { - "url": "http://foobarfoobarfoobar.onion/fdroid/repo" - }, - { - "url": "https://foo.bar/fdroid/repo" - } - ], - "timestamp": 1676634233000, - "antiFeatures": { - "Ads": { - "description": { - "de": "Diese App enthält Werbung", - "en-US": "This app contains advertising", - "fa": "این کاره دارای تبلیغات است", - "ro": "Aplicația conține reclamă", - "zh-rCN": "此应用包含广告" - }, - "icon": { - "de": { - "name": "/icons/ic_antifeature_ads.xml", - "sha256": "b333528573134c5de73484862a1b567a0bdfd6878d183f8500287abadc0ba60e", - "size": 1564 - }, - "en-US": { - "name": "/icons/ic_antifeature_ads.xml", - "sha256": "b333528573134c5de73484862a1b567a0bdfd6878d183f8500287abadc0ba60e", - "size": 1564 - }, - "fa": { - "name": "/icons/ic_antifeature_ads.xml", - "sha256": "b333528573134c5de73484862a1b567a0bdfd6878d183f8500287abadc0ba60e", - "size": 1564 - }, - "ro": { - "name": "/icons/ic_antifeature_ads.xml", - "sha256": "b333528573134c5de73484862a1b567a0bdfd6878d183f8500287abadc0ba60e", - "size": 1564 - }, - "zh-rCN": { - "name": "/icons/ic_antifeature_ads.xml", - "sha256": "b333528573134c5de73484862a1b567a0bdfd6878d183f8500287abadc0ba60e", - "size": 1564 - } - }, - "name": { - "de": "Werbung", - "en-US": "Ads", - "fa": "تبلیغات", - "ro": "Reclame", - "zh-rCN": "广告" - } - }, - "DisabledAlgorithm": { - "description": { - "de": "Diese App hat eine schwache Sicherheitssignatur", - "en-US": "This app has a weak security signature", - "fa": "این کاره، امضای امنیتی ضعیفی دارد", - "ro": "Aplicația are o semnătură slab securizată", - "zh-rCN": "此应用的安全签名较弱" - }, - "icon": { - "de": { - "name": "/icons/ic_antifeature_disabledalgorithm.xml", - "sha256": "94dea590c7c0aa37d351ab62a69fc7eefbc2cdbb84b79df3934c2e9332e1dcfb", - "size": 2313 - }, - "en-US": { - "name": "/icons/ic_antifeature_disabledalgorithm.xml", - "sha256": "94dea590c7c0aa37d351ab62a69fc7eefbc2cdbb84b79df3934c2e9332e1dcfb", - "size": 2313 - }, - "fa": { - "name": "/icons/ic_antifeature_disabledalgorithm.xml", - "sha256": "94dea590c7c0aa37d351ab62a69fc7eefbc2cdbb84b79df3934c2e9332e1dcfb", - "size": 2313 - }, - "ro": { - "name": "/icons/ic_antifeature_disabledalgorithm.xml", - "sha256": "94dea590c7c0aa37d351ab62a69fc7eefbc2cdbb84b79df3934c2e9332e1dcfb", - "size": 2313 - }, - "zh-rCN": { - "name": "/icons/ic_antifeature_disabledalgorithm.xml", - "sha256": "94dea590c7c0aa37d351ab62a69fc7eefbc2cdbb84b79df3934c2e9332e1dcfb", - "size": 2313 - } - }, - "name": { - "de": "Mit einem unsicheren Algorithmus signiert", - "en-US": "Signed Using An Unsafe Algorithm", - "fa": "امضا شده با الگوریتمی ناامن", - "ro": "Algoritm nesigur semnătură", - "zh-rCN": "使用不安全算法签名" - } - }, - "KnownVuln": { - "description": { - "de": "Diese App enthält eine bekannte Sicherheitslücke", - "en-US": "This app contains a known security vulnerability", - "fa": "این کاره، آسیب‌پذیری امنیتی شناخته‌شده‌ای دارد", - "ro": "Aplicația conține o vulnerabilitate de securitate cunoscută", - "zh-rCN": "此应用包含已知的安全漏洞" - }, - "icon": { - "de": { - "name": "/icons/ic_antifeature_knownvuln.xml", - "sha256": "743ddcad0120896b03bf62bca9b3b9902878ac9366959a0b77b2c50beeb37f9d", - "size": 1415 - }, - "en-US": { - "name": "/icons/ic_antifeature_knownvuln.xml", - "sha256": "743ddcad0120896b03bf62bca9b3b9902878ac9366959a0b77b2c50beeb37f9d", - "size": 1415 - }, - "fa": { - "name": "/icons/ic_antifeature_knownvuln.xml", - "sha256": "743ddcad0120896b03bf62bca9b3b9902878ac9366959a0b77b2c50beeb37f9d", - "size": 1415 - }, - "ro": { - "name": "/icons/ic_antifeature_knownvuln.xml", - "sha256": "743ddcad0120896b03bf62bca9b3b9902878ac9366959a0b77b2c50beeb37f9d", - "size": 1415 - }, - "zh-rCN": { - "name": "/icons/ic_antifeature_knownvuln.xml", - "sha256": "743ddcad0120896b03bf62bca9b3b9902878ac9366959a0b77b2c50beeb37f9d", - "size": 1415 - } - }, - "name": { - "de": "Bekannte Sicherheitslücke", - "en-US": "Known Vulnerability", - "fa": "آسیب‌پذیری شناخته", - "ro": "Vulnerabilitate cunoscută", - "zh-rCN": "含有已知漏洞" - } - }, - "NSFW": { - "description": { - "de": "Diese App enthält Inhalte, die nicht überall veröffentlicht oder sichtbar sein sollten", - "en-US": "This app contains content that should not be publicized or visible everywhere", - "fa": "این کاره محتوایی دارد که نباید عمومی شده یا همه‌حا نمایان باشد", - "ro": "Această aplicație conține conținut care nu ar trebui să fie făcut public sau vizibil peste tot", - "zh-rCN": "此应用包含不应宣扬或随处可见的内容" - }, - "icon": { - "de": { - "name": "/icons/ic_antifeature_nsfw.xml", - "sha256": "acab2a7a846700529cd7f2b7a7980f7d04a291f22db8434f3e966f7350ed1465", - "size": 871 - }, - "en-US": { - "name": "/icons/ic_antifeature_nsfw.xml", - "sha256": "acab2a7a846700529cd7f2b7a7980f7d04a291f22db8434f3e966f7350ed1465", - "size": 871 - }, - "fa": { - "name": "/icons/ic_antifeature_nsfw.xml", - "sha256": "acab2a7a846700529cd7f2b7a7980f7d04a291f22db8434f3e966f7350ed1465", - "size": 871 - }, - "ro": { - "name": "/icons/ic_antifeature_nsfw.xml", - "sha256": "acab2a7a846700529cd7f2b7a7980f7d04a291f22db8434f3e966f7350ed1465", - "size": 871 - }, - "zh-rCN": { - "name": "/icons/ic_antifeature_nsfw.xml", - "sha256": "acab2a7a846700529cd7f2b7a7980f7d04a291f22db8434f3e966f7350ed1465", - "size": 871 - } - }, - "name": { - "de": "NSFW", - "en-US": "NSFW", - "fa": "NSFW", - "ro": "NSFW", - "zh-rCN": "NSFW" - } - }, - "NoSourceSince": { - "description": { - "en-US": "The source code is no longer available, no updates possible." - }, - "icon": { - "de": { - "name": "/icons/ic_antifeature_nosourcesince.xml", - "sha256": "69c880b075967fe9598c777e18d600e1c1612bf061111911421fe8f6b9d88d4f", - "size": 1102 - }, - "en-US": { - "name": "/icons/ic_antifeature_nosourcesince.xml", - "sha256": "69c880b075967fe9598c777e18d600e1c1612bf061111911421fe8f6b9d88d4f", - "size": 1102 - }, - "fa": { - "name": "/icons/ic_antifeature_nosourcesince.xml", - "sha256": "69c880b075967fe9598c777e18d600e1c1612bf061111911421fe8f6b9d88d4f", - "size": 1102 - }, - "ro": { - "name": "/icons/ic_antifeature_nosourcesince.xml", - "sha256": "69c880b075967fe9598c777e18d600e1c1612bf061111911421fe8f6b9d88d4f", - "size": 1102 - }, - "zh-rCN": { - "name": "/icons/ic_antifeature_nosourcesince.xml", - "sha256": "69c880b075967fe9598c777e18d600e1c1612bf061111911421fe8f6b9d88d4f", - "size": 1102 - } - }, - "name": { - "de": "Der Quellcode ist nicht mehr erhältlich, keine Aktualisierungen möglich.", - "en-US": "Newer Source Not Available", - "fa": "کد مبدأ دیگر در دسترس نیست. به‌روز رسانی ناممکن است.", - "ro": "Codul sursă nu mai este disponibil, nu mai există posibilitatea de a actualiza.", - "zh-rCN": "源代码不再可用,无法更新。" - } - }, - "NonFreeAdd": { - "description": { - "de": "Diese App bewirbt nicht-quelloffene Erweiterungen", - "en-US": "This app promotes non-free add-ons", - "fa": "این کاره، افزونه‌های ناآزاد را تبلیغ می‌کند", - "ro": "Aplicația promovează anexe ce nu sunt software liber", - "zh-rCN": "此应用推广非自由的附加组件" - }, - "icon": { - "de": { - "name": "/icons/ic_antifeature_nonfreeadd.xml", - "sha256": "a1d1f2070bdaabf80ca5a55bccef98c82031ea2f31cc040be5ec009f44ddeef2", - "size": 1846 - }, - "en-US": { - "name": "/icons/ic_antifeature_nonfreeadd.xml", - "sha256": "a1d1f2070bdaabf80ca5a55bccef98c82031ea2f31cc040be5ec009f44ddeef2", - "size": 1846 - }, - "fa": { - "name": "/icons/ic_antifeature_nonfreeadd.xml", - "sha256": "a1d1f2070bdaabf80ca5a55bccef98c82031ea2f31cc040be5ec009f44ddeef2", - "size": 1846 - }, - "ro": { - "name": "/icons/ic_antifeature_nonfreeadd.xml", - "sha256": "a1d1f2070bdaabf80ca5a55bccef98c82031ea2f31cc040be5ec009f44ddeef2", - "size": 1846 - }, - "zh-rCN": { - "name": "/icons/ic_antifeature_nonfreeadd.xml", - "sha256": "a1d1f2070bdaabf80ca5a55bccef98c82031ea2f31cc040be5ec009f44ddeef2", - "size": 1846 - } - }, - "name": { - "de": "Nicht-quelloffene Erweiterungen", - "en-US": "Non-Free Addons", - "fa": "افزونه‌های ناآزاد", - "ro": "Anexe ne-libere", - "zh-rCN": "非自由附加组件" - } - }, - "NonFreeAssets": { - "description": { - "de": "Diese App enthält nicht-quelloffene Bestandteile", - "en-US": "This app contains non-free assets", - "fa": "این کاره دارای بخش‌های ناآزاد است", - "ro": "Aceasta aplicație conține resurse ce nu sunt la disponibile la liber", - "zh-rCN": "此应用包含非自由资源" - }, - "icon": { - "de": { - "name": "/icons/ic_antifeature_nonfreeassets.xml", - "sha256": "b39fe384386fc67fb30fa2f91402594110e2e42c961d76adc93141b8bd774008", - "size": 1784 - }, - "en-US": { - "name": "/icons/ic_antifeature_nonfreeassets.xml", - "sha256": "b39fe384386fc67fb30fa2f91402594110e2e42c961d76adc93141b8bd774008", - "size": 1784 - }, - "fa": { - "name": "/icons/ic_antifeature_nonfreeassets.xml", - "sha256": "b39fe384386fc67fb30fa2f91402594110e2e42c961d76adc93141b8bd774008", - "size": 1784 - }, - "ro": { - "name": "/icons/ic_antifeature_nonfreeassets.xml", - "sha256": "b39fe384386fc67fb30fa2f91402594110e2e42c961d76adc93141b8bd774008", - "size": 1784 - }, - "zh-rCN": { - "name": "/icons/ic_antifeature_nonfreeassets.xml", - "sha256": "b39fe384386fc67fb30fa2f91402594110e2e42c961d76adc93141b8bd774008", - "size": 1784 - } - }, - "name": { - "de": "Nicht-quelloffene Bestandteile", - "en-US": "Non-Free Assets", - "fa": "بخش‌های ناآزاد", - "ro": "Resurse ne-libere", - "zh-rCN": "非自由资产" - } - }, - "NonFreeDep": { - "description": { - "de": "Diese App ist abhängig von anderen nicht-quelloffenen Apps", - "en-US": "This app depends on other non-free apps", - "fa": "این کاره به دیگر کاره‌های ناآزاد وابسته است", - "ro": "Aplicația depinde de alte aplicații ce nu sunt software liber", - "zh-rCN": "此应用依赖于其它非自由应用" - }, - "icon": { - "de": { - "name": "/icons/ic_antifeature_nonfreedep.xml", - "sha256": "c1b4052a8f58125b2120d9ca07adb725d47bfa7cfcea80c4d6bbbc432b5cb83a", - "size": 1396 - }, - "en-US": { - "name": "/icons/ic_antifeature_nonfreedep.xml", - "sha256": "c1b4052a8f58125b2120d9ca07adb725d47bfa7cfcea80c4d6bbbc432b5cb83a", - "size": 1396 - }, - "fa": { - "name": "/icons/ic_antifeature_nonfreedep.xml", - "sha256": "c1b4052a8f58125b2120d9ca07adb725d47bfa7cfcea80c4d6bbbc432b5cb83a", - "size": 1396 - }, - "ro": { - "name": "/icons/ic_antifeature_nonfreedep.xml", - "sha256": "c1b4052a8f58125b2120d9ca07adb725d47bfa7cfcea80c4d6bbbc432b5cb83a", - "size": 1396 - }, - "zh-rCN": { - "name": "/icons/ic_antifeature_nonfreedep.xml", - "sha256": "c1b4052a8f58125b2120d9ca07adb725d47bfa7cfcea80c4d6bbbc432b5cb83a", - "size": 1396 - } - }, - "name": { - "de": "Nicht-quelloffene Abhängigkeiten", - "en-US": "Non-Free Dependencies", - "fa": "وابستگی‌های ناآزاد", - "ro": "Dependențe ne-libere", - "zh-rCN": "非自由依赖项" - } - }, - "NonFreeNet": { - "description": { - "de": "Diese App bewirbt nicht-quelloffene Netzwerkdienste", - "en-US": "This app promotes or depends entirely on a non-changeable or non-free network service", - "fa": "این کاره، خدمات شبکه‌های ناآزاد را ترویج می‌کند", - "ro": "Aplicația promovează servicii de rețea ce nu sunt accesibile la liber", - "zh-rCN": "此应用推广非自由的网络服务" - }, - "icon": { - "de": { - "name": "/icons/ic_antifeature_nonfreenet.xml", - "sha256": "7fff45c847ed2ecc94e85ba2341685c8f113fa5fdf7267a25637dc38ee0275f6", - "size": 3038 - }, - "en-US": { - "name": "/icons/ic_antifeature_nonfreenet.xml", - "sha256": "7fff45c847ed2ecc94e85ba2341685c8f113fa5fdf7267a25637dc38ee0275f6", - "size": 3038 - }, - "fa": { - "name": "/icons/ic_antifeature_nonfreenet.xml", - "sha256": "7fff45c847ed2ecc94e85ba2341685c8f113fa5fdf7267a25637dc38ee0275f6", - "size": 3038 - }, - "ro": { - "name": "/icons/ic_antifeature_nonfreenet.xml", - "sha256": "7fff45c847ed2ecc94e85ba2341685c8f113fa5fdf7267a25637dc38ee0275f6", - "size": 3038 - }, - "zh-rCN": { - "name": "/icons/ic_antifeature_nonfreenet.xml", - "sha256": "7fff45c847ed2ecc94e85ba2341685c8f113fa5fdf7267a25637dc38ee0275f6", - "size": 3038 - } - }, - "name": { - "de": "Nicht-quelloffene Netzwerkdienste", - "en-US": "Non-Free Network Services", - "fa": "خدمات شبکه‌ای ناآزاد", - "ro": "Servicii de rețea ne-libere", - "zh-rCN": "非自由网络服务" - } - }, - "Tracking": { - "description": { - "de": "Diese App verfolgt und versendet Ihre Aktivitäten", - "en-US": "This app tracks and reports your activity", - "fa": "این کاره، فعّالیتتان را ردیابی و گزارش می‌کند", - "ro": "Aplicația îți înregistrează și raportează activitatea undeva", - "zh-rCN": "此应用会记录并报告你的活动" - }, - "icon": { - "de": { - "name": "/icons/ic_antifeature_tracking.xml", - "sha256": "4779337b5b0a12c4b4a8a83d0d8a994a2477460db702784df4c8d3e3730be961", - "size": 2493 - }, - "en-US": { - "name": "/icons/ic_antifeature_tracking.xml", - "sha256": "4779337b5b0a12c4b4a8a83d0d8a994a2477460db702784df4c8d3e3730be961", - "size": 2493 - }, - "fa": { - "name": "/icons/ic_antifeature_tracking.xml", - "sha256": "4779337b5b0a12c4b4a8a83d0d8a994a2477460db702784df4c8d3e3730be961", - "size": 2493 - }, - "ro": { - "name": "/icons/ic_antifeature_tracking.xml", - "sha256": "4779337b5b0a12c4b4a8a83d0d8a994a2477460db702784df4c8d3e3730be961", - "size": 2493 - }, - "zh-rCN": { - "name": "/icons/ic_antifeature_tracking.xml", - "sha256": "4779337b5b0a12c4b4a8a83d0d8a994a2477460db702784df4c8d3e3730be961", - "size": 2493 - } - }, - "name": { - "de": "Tracking", - "en-US": "Tracking", - "fa": "ردیابی", - "ro": "Urmărire", - "zh-rCN": "跟踪用户" - } - } - }, - "categories": { - "Time": { - "name": { - "en-US": "Time" - } - }, - "Development": { - "name": { - "en-US": "Development" - } - }, - "GuardianProject": { - "name": { - "en-US": "Guardian Project" - } - }, - "Multimedia": { - "name": { - "en-US": "Multimedia" - } - }, - "Phone & SMS": { - "name": { - "en-US": "Phone & SMS" - } - }, - "Security": { - "name": { - "en-US": "Security" - } - }, - "System": { - "name": { - "en-US": "System" - } - }, - "1": { - "name": { - "en-US": "1" - } - }, - "2.0": { - "name": { - "en-US": "2.0" - } - }, - "tests": { - "name": { - "en-US": "tests" - } - } - }, - "requests": { - "install": [ - "org.adaway" - ], - "uninstall": [ - "com.android.vending", - "com.facebook.orca" - ] - } - }, - "packages": { - "com.politedroid": { - "metadata": { - "added": 1498176000000, - "categories": [ - "Multimedia", - "Security", - "Time" - ], - "issueTracker": "https://github.com/miguelvps/PoliteDroid/issues", - "lastUpdated": 1498176000000, - "license": "GPL-3.0-only", - "sourceCode": "https://github.com/miguelvps/PoliteDroid", - "name": { - "en-US": "Polite Droid" - }, - "summary": { - "en-US": "Calendar tool" - }, - "description": { - "en-US": "Activates silent mode during calendar events." - }, - "icon": { - "en-US": { - "name": "/icons/com.politedroid.6.png", - "sha256": "edf8d30b97a06821337e267168b131a6a16d81df9e7007e017778f9781f5c8f3", - "size": 559 - } - }, - "preferredSigner": "32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6" - }, - "versions": { - "70c2f776a2bac38a58a7d521f96ee0414c6f0fb1de973c3ca8b10862a009247d": { - "added": 1498176000000, - "file": { - "name": "/com.politedroid_6.apk", - "sha256": "70c2f776a2bac38a58a7d521f96ee0414c6f0fb1de973c3ca8b10862a009247d", - "size": 16578, - "ipfsCIDv1": "bafybeidvgxrq77qr7yqkcnykdfvszsxjqc5kzt6ya5k7r666wriadrylt4" - }, - "manifest": { - "versionName": "1.5", - "versionCode": 6, - "usesSdk": { - "minSdkVersion": 14, - "targetSdkVersion": 21 - }, - "signer": { - "sha256": [ - "32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6" - ] - }, - "usesPermission": [ - { - "name": "android.permission.READ_CALENDAR" - }, - { - "name": "android.permission.RECEIVE_BOOT_COMPLETED" - } - ] - }, - "antiFeatures": { - "KnownVuln": {}, - "NoSourceSince": { - "en-US": "1.5" - }, - "NonFreeAssets": {}, - "NonFreeNet": {} - } - }, - "5bdbfa071cca4b8d05ced41d6b28763595d6e8096cca5bbf0f9253c9a2622e5d": { - "added": 1498176000000, - "file": { - "name": "/com.politedroid_5.apk", - "sha256": "5bdbfa071cca4b8d05ced41d6b28763595d6e8096cca5bbf0f9253c9a2622e5d", - "size": 18817, - "ipfsCIDv1": "bafybeifbrio5rumqvgfd5sihs7yihux2yktfvd5i7jimlgrwchzcvi6ldu" - }, - "manifest": { - "versionName": "1.4", - "versionCode": 5, - "usesSdk": { - "minSdkVersion": 3, - "targetSdkVersion": 10 - }, - "signer": { - "sha256": [ - "32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6" - ] - }, - "usesPermission": [ - { - "name": "android.permission.READ_CALENDAR" - }, - { - "name": "android.permission.RECEIVE_BOOT_COMPLETED" - } - ] - }, - "antiFeatures": { - "NoSourceSince": { - "en-US": "1.5" - }, - "NonFreeNet": {} - } - }, - "c809bdff83715fbf919f3840ee09869b038e209378b906e135ee40d3f0e1f075": { - "added": 1498176000000, - "file": { - "name": "/com.politedroid_4.apk", - "sha256": "c809bdff83715fbf919f3840ee09869b038e209378b906e135ee40d3f0e1f075", - "size": 18489, - "ipfsCIDv1": "bafybeicridbev22c2rt3lwbfsrkafcf3yepak7kpvk6zgbayrxls2mmwim" - }, - "manifest": { - "versionName": "1.3", - "versionCode": 4, - "usesSdk": { - "minSdkVersion": 3, - "targetSdkVersion": 3 - }, - "signer": { - "sha256": [ - "32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6" - ] - }, - "usesPermission": [ - { - "name": "android.permission.READ_CALENDAR" - }, - { - "name": "android.permission.RECEIVE_BOOT_COMPLETED" - }, - { - "name": "android.permission.WRITE_EXTERNAL_STORAGE" - }, - { - "name": "android.permission.READ_PHONE_STATE" - }, - { - "name": "android.permission.READ_EXTERNAL_STORAGE" - } - ] - }, - "antiFeatures": { - "NoSourceSince": { - "en-US": "1.5" - }, - "NonFreeNet": {} - } - }, - "665d03d61ebc642289fda697f71a59305b0202b16cafc5ffdae91cbe91f0b25d": { - "added": 1498176000000, - "file": { - "name": "/com.politedroid_3.apk", - "sha256": "665d03d61ebc642289fda697f71a59305b0202b16cafc5ffdae91cbe91f0b25d", - "size": 17552, - "ipfsCIDv1": "bafybeib7arokhivttalcnq5ieu5fx5pzn7vo5qpmdiozqodzhb4ba53nd4" - }, - "manifest": { - "versionName": "1.2", - "versionCode": 3, - "usesSdk": { - "minSdkVersion": 3, - "targetSdkVersion": 3 - }, - "signer": { - "sha256": [ - "32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6" - ] - }, - "usesPermission": [ - { - "name": "android.permission.READ_CALENDAR" - }, - { - "name": "android.permission.RECEIVE_BOOT_COMPLETED" - }, - { - "name": "android.permission.WRITE_EXTERNAL_STORAGE" - }, - { - "name": "android.permission.READ_PHONE_STATE" - }, - { - "name": "android.permission.READ_EXTERNAL_STORAGE" - } - ] - }, - "antiFeatures": { - "KnownVuln": {}, - "NoSourceSince": { - "en-US": "1.5" - }, - "NonFreeAssets": {}, - "NonFreeNet": {} - } - } - } - }, - "duplicate.permisssions": { - "metadata": { - "added": 1513900800000, - "categories": [ - "tests" - ], - "lastUpdated": 1513900800000, - "name": { - "en-US": "Duplicate Permisssions" - }, - "summary": { - "en-US": "Test app for all possible " - }, - "icon": { - "en-US": { - "name": "/icons/duplicate.permisssions.9999999.png", - "sha256": "fb0f3bb45312b53e349a762b67af4f48d010a38a245f75c3a0b152097d7b067f", - "size": 1301 - } - }, - "preferredSigner": "659e1fd284549f70d13fb02c620100e27eeea3420558cce62b0f5d4cf2b77d84" - }, - "versions": { - "8367857fe75f85321ce2c344b34804d0bc193707f6ba03710d025d9030803434": { - "added": 1513900800000, - "file": { - "name": "/duplicate.permisssions_9999999.apk", - "sha256": "8367857fe75f85321ce2c344b34804d0bc193707f6ba03710d025d9030803434", - "size": 27446, - "ipfsCIDv1": "bafybeicucr4lk7fynyde4fpxubudpl6m6wqnuq2j6vjroutjyryw24en3u" - }, - "manifest": { - "versionName": "", - "versionCode": 9999999, - "usesSdk": { - "minSdkVersion": 18, - "targetSdkVersion": 27 - }, - "signer": { - "sha256": [ - "659e1fd284549f70d13fb02c620100e27eeea3420558cce62b0f5d4cf2b77d84" - ] - }, - "usesPermission": [ - { - "name": "android.permission.INTERNET" - }, - { - "name": "android.permission.ACCESS_NETWORK_STATE" - }, - { - "name": "android.permission.ACCESS_WIFI_STATE" - }, - { - "name": "android.permission.CHANGE_WIFI_MULTICAST_STATE" - }, - { - "name": "android.permission.INTERNET" - }, - { - "name": "android.permission.WRITE_EXTERNAL_STORAGE", - "maxSdkVersion": 18 - }, - { - "name": "android.permission.READ_EXTERNAL_STORAGE", - "maxSdkVersion": 18 - } - ], - "usesPermissionSdk23": [ - { - "name": "android.permission.REQUEST_IGNORE_BATTERY_OPTIMIZATIONS", - "maxSdkVersion": 27 - }, - { - "name": "android.permission.REQUEST_INSTALL_PACKAGES" - } - ] - } - } - } - }, - "fake.ota.update": { - "metadata": { - "added": 1457568000000, - "categories": [ - "System" - ], - "issueTracker": "https://gitlab.com/fdroid/privileged-extension/issues", - "lastUpdated": 1457568000000, - "license": "Apache-2.0", - "sourceCode": "https://gitlab.com/fdroid/privileged-extension", - "webSite": "https://f-droid.org", - "name": { - "en-US": "fake.ota.update_1234" - }, - "summary": { - "en-US": "Tests whether OTA ZIP files are being include" - }, - "description": { - "en-US": "F-Droid can make use of system privileges or permissions to\ninstall, update and remove applications on its own. The only way to obtain those\nprivileges is to become a system app.\n\nThis is where the Privileged Extension comes in - being a separate app and much\nsmaller, it can be installed as a system app and communicate with the main app\nvia AIDL IPC.\n\nThis has several advantages:\n\n* Reduced disk usage in the system partition\n* System updates don't remove F-Droid\n* The process of installing into system via root is safer\n\nThis is packaged as an OTA (Over-The-Air) update ZIP file. It must be installed\nusing TWRP or other Android recovery that can flash updates to the system from\nthe /data/data/org.fdroid.fdroid folder on the /data partition. The standalone\nAPK is called F-Droid Privileged Extension." - }, - "donate": [ - "https://f-droid.org/about" - ] - }, - "versions": { - "897a92a4ccff4f415f6ba275b2af16d4ecaee60a983b215bddcb9f8964e7a24c": { - "added": 1457568000000, - "file": { - "name": "/fake.ota.update_1234.zip", - "sha256": "897a92a4ccff4f415f6ba275b2af16d4ecaee60a983b215bddcb9f8964e7a24c", - "size": 233 - }, - "manifest": { - "versionName": "897a92a", - "versionCode": 1234 - } - } - } - }, - "info.guardianproject.urzip": { - "metadata": { - "added": 1466640000000, - "categories": [ - "Development", - "GuardianProject", - "1", - "2.0" - ], - "issueTracker": "https://dev.guardianproject.info/projects/urzip/issues", - "lastUpdated": 1466640000000, - "license": "GPL-3.0-only", - "sourceCode": "https://github.com/guardianproject/urzip", - "webSite": "https://dev.guardianproject.info/projects/urzip", - "featureGraphic": { - "en-US": { - "name": "/info.guardianproject.urzip/en-US/featureGraphic_GFRT5BovZsENGpJq1HqPODGWBRPWQsx25B95Ol5w_wU=.png", - "sha256": "185453e41a2f66c10d1a926ad47a8f3831960513d642cc76e41f793a5e70ff05", - "size": 36027 - } - }, - "authorWebSite": "https://guardianproject.info", - "bitcoin": "1Fi5xUHiAPRKxHvyUGVFGt9extBe8Srdbk", - "openCollective": "f-droid-just-testing", - "name": { - "en-US": "title" - }, - "summary": { - "en-US": "一个实用工具,获取已安装在您的设备上的应用的有关信息" - }, - "description": { - "en-US": "It’s Urzip 是一个获得已安装 APK 相关信息的实用工具。它从您的设备上已安装的所有应用开始,一键触摸即可显示 APK 的指纹,并且提供到达 virustotal.com 和 androidobservatory.org 的快捷链接,让您方便地了解特定 APK 的档案。它还可以让您导出签名证书和生成 ApkSignaturePin Pin 文件供 TrustedIntents 库使用。\n\n★ Urzip 支持下列语言: Deutsch, English, español, suomi, 日本語, 한국어, Norsk, português (Portugal), Русский, Slovenščina, Türkçe\n没看到您的语言?帮忙翻译本应用吧:\nhttps://www.transifex.com/projects/p/urzip\n\n★ 致用户:我们还缺少你喜欢的功能?发现了一个 bug?请告诉我们!我们乐于听取您的意见。请发送电子邮件至: support@guardianproject.info 或者加入我们的聊天室 https://guardianproject.info/contact\n" - }, - "video": { - "en-US": "video" - }, - "icon": { - "en-US": { - "name": "/info.guardianproject.urzip/en-US/icon_NJXNzMcyf-v9i5a1ElJi0j9X1LvllibCa48xXYPlOqQ=.png", - "sha256": "3495cdccc7327febfd8b96b5125262d23f57d4bbe59626c26b8f315d83e53aa4", - "size": 1413 - } - }, - "preferredSigner": "32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6" - }, - "versions": { - "15c0ec72c74a3791f42cdb43c57df0fb11a4dbb656851bbb8cf05b26a8372789": { - "added": 1466640000000, - "file": { - "name": "/urzip-; Рахма́, [rɐxˈmanʲɪnəf] سيرجي_رخمانينوف 谢·.apk", - "sha256": "15c0ec72c74a3791f42cdb43c57df0fb11a4dbb656851bbb8cf05b26a8372789", - "size": 11471, - "ipfsCIDv1": "bafybeig77jwqx243si3gh55iqx4gkcxhltkt6pjimzgigfsk3kshsi6qem" - }, - "manifest": { - "versionName": "0.1", - "versionCode": 100, - "usesSdk": { - "minSdkVersion": 4, - "targetSdkVersion": 18 - }, - "signer": { - "sha256": [ - "32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6" - ] - } - } - } - } - }, - "info.zwanenburg.caffeinetile": { - "metadata": { - "added": 1539129600000, - "categories": [ - "Development" - ], - "lastUpdated": 1539129600000, - "name": { - "en-US": "Caffeine Tile" - }, - "summary": { - "en-US": "Test app for extracting icons when an XML one is default" - }, - "preferredSigner": "51cfa5c8a743833ad89acf81cb755936876a5c8b8eca54d1ffdcec0cdca25d0e" - }, - "versions": { - "dbbdd7deadb038862f426b71efe4a64df8c3edf25d669e935f349510e16f65db": { - "added": 1539129600000, - "file": { - "name": "/info.zwanenburg.caffeinetile_4.apk", - "sha256": "dbbdd7deadb038862f426b71efe4a64df8c3edf25d669e935f349510e16f65db", - "size": 11740, - "ipfsCIDv1": "bafybeigormhkorw3mk6pkkfk63kkmxpvwylthgj67geulvskc5acr65sym" - }, - "manifest": { - "versionName": "1.3", - "versionCode": 4, - "usesSdk": { - "minSdkVersion": 24, - "targetSdkVersion": 25 - }, - "signer": { - "sha256": [ - "51cfa5c8a743833ad89acf81cb755936876a5c8b8eca54d1ffdcec0cdca25d0e" - ] - }, - "usesPermission": [ - { - "name": "android.permission.WAKE_LOCK" - } - ] - } - } - } - }, - "no.min.target.sdk": { - "metadata": { - "added": 1539129600000, - "categories": [ - "Development" - ], - "lastUpdated": 1539129600000, - "name": { - "en-US": "No minSdkVersion or targetSdkVersion" - }, - "summary": { - "en-US": "An APK without any block in AndroidManifest.xml" - }, - "icon": { - "en-US": { - "name": "/icons/no.min.target.sdk.987.png", - "sha256": "fb0f3bb45312b53e349a762b67af4f48d010a38a245f75c3a0b152097d7b067f", - "size": 1301 - } - }, - "preferredSigner": "32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6" - }, - "versions": { - "e2e1dc1d550df2b5bc383860139207258645b5540abeccd305ed8b2cb6459d2c": { - "added": 1539129600000, - "file": { - "name": "/no.min.target.sdk_987.apk", - "sha256": "e2e1dc1d550df2b5bc383860139207258645b5540abeccd305ed8b2cb6459d2c", - "size": 14102, - "ipfsCIDv1": "bafybeidwxseoagnew3gtlasttqovl7ciuwxaud5a5p4a5pzpbrfcfj2gaa" - }, - "manifest": { - "versionName": "1.2-fake", - "versionCode": 987, - "usesSdk": { - "minSdkVersion": 3, - "targetSdkVersion": 3 - }, - "signer": { - "sha256": [ - "32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6" - ] - }, - "usesPermission": [ - { - "name": "android.permission.WRITE_EXTERNAL_STORAGE" - }, - { - "name": "android.permission.READ_PHONE_STATE" - }, - { - "name": "android.permission.READ_EXTERNAL_STORAGE" - } - ] - } - } - } - }, - "obb.main.oldversion": { - "metadata": { - "added": 1388448000000, - "categories": [ - "Development" - ], - "lastUpdated": 1388448000000, - "license": "GPL-3.0-only", - "sourceCode": "https://github.com/eighthave/urzip", - "bitcoin": "1Fi5xUHiAPRKxHvyUGVFGt9extBe8Srdbk", - "liberapay": "12334", - "name": { - "en-US": "OBB Main Old Version" - }, - "icon": { - "en-US": { - "name": "/icons/obb.main.oldversion.1444412523.png", - "sha256": "fb0f3bb45312b53e349a762b67af4f48d010a38a245f75c3a0b152097d7b067f", - "size": 1301 - } - }, - "preferredSigner": "818e469465f96b704e27be2fee4c63ab9f83ddf30e7a34c7371a4728d83b0bc1" - }, - "versions": { - "c5f149e526f89c05c62923bdb7bb1e2be5673c46ec85143f41e514340631449c": { - "added": 1388448000000, - "file": { - "name": "/obb.main.oldversion_1444412523.apk", - "sha256": "c5f149e526f89c05c62923bdb7bb1e2be5673c46ec85143f41e514340631449c", - "size": 14323, - "ipfsCIDv1": "bafybeicnwnpiyfke3tbk3nve62meig65vved34i6kesjkksdciff6242ui" - }, - "obbMainFile": { - "name": "/main.1434483388.obb.main.oldversion.obb", - "sha256": "d3eb539a556352f3f47881d71fb0e5777b2f3e9a4251d283c18c67ce996774b7", - "size": 6 - }, - "manifest": { - "versionName": "0.1", - "versionCode": 1444412523, - "usesSdk": { - "minSdkVersion": 4, - "targetSdkVersion": 18 - }, - "signer": { - "sha256": [ - "818e469465f96b704e27be2fee4c63ab9f83ddf30e7a34c7371a4728d83b0bc1" - ] - }, - "usesPermission": [ - { - "name": "android.permission.INTERNET" - }, - { - "name": "android.permission.ACCESS_NETWORK_STATE", - "maxSdkVersion": 22 - }, - { - "name": "android.permission.ACCESS_WIFI_STATE" - }, - { - "name": "android.permission.CHANGE_WIFI_MULTICAST_STATE" - }, - { - "name": "android.permission.CHANGE_NETWORK_STATE" - }, - { - "name": "android.permission.CHANGE_WIFI_STATE" - }, - { - "name": "android.permission.BLUETOOTH" - }, - { - "name": "android.permission.BLUETOOTH_ADMIN", - "maxSdkVersion": 18 - }, - { - "name": "android.permission.RECEIVE_BOOT_COMPLETED" - }, - { - "name": "android.permission.NFC" - } - ], - "usesPermissionSdk23": [ - { - "name": "android.permission.WRITE_EXTERNAL_STORAGE" - }, - { - "name": "android.permission.WRITE_SETTINGS", - "maxSdkVersion": 25 - } - ] - }, - "releaseChannels": [ - "Beta" - ] - } - } - }, - "obb.main.twoversions": { - "metadata": { - "added": 1444608000000, - "categories": [ - "Development" - ], - "lastUpdated": 1466380800000, - "license": "GPL-3.0-only", - "sourceCode": "https://github.com/eighthave/urzip", - "bitcoin": "1Fi5xUHiAPRKxHvyUGVFGt9extBe8Srdbk", - "name": { - "en-US": "OBB Main Two Versions" - }, - "icon": { - "en-US": { - "name": "/icons/obb.main.twoversions.1101617.png", - "sha256": "d27c9866adeda6dd466628e43c62ccac95a0e9480c4bb095ac7e0b1c2b58a77d", - "size": 1413 - } - }, - "preferredSigner": "32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6" - }, - "versions": { - "9bc74566f089ef030ac33e7fbd99d92f1a38f363fb499fed138d9e7b774e821c": { - "added": 1466380800000, - "file": { - "name": "/obb.main.twoversions_1101617.apk", - "sha256": "9bc74566f089ef030ac33e7fbd99d92f1a38f363fb499fed138d9e7b774e821c", - "size": 11481, - "ipfsCIDv1": "bafybeiblpfmwololxgsrum337rbbbsqg2gk6hytvt6szf4njubosju3bme" - }, - "src": { - "name": "/obb.main.twoversions_1101617_src.tar.gz", - "sha256": "3115241ed53aa047191f64db2c14e609a7dc0a803211f56a9b17e2d027763f9d", - "size": 150 - }, - "obbMainFile": { - "name": "/main.1101615.obb.main.twoversions.obb", - "sha256": "d3eb539a556352f3f47881d71fb0e5777b2f3e9a4251d283c18c67ce996774b7", - "size": 6 - }, - "manifest": { - "versionName": "0.1", - "versionCode": 1101617, - "usesSdk": { - "minSdkVersion": 4, - "targetSdkVersion": 18 - }, - "signer": { - "sha256": [ - "32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6" - ] - } - } - }, - "7b0b7b9ba248e15751a16e3a0e01e1e24cbb673686c38422030cb75d5c33f0bb": { - "added": 1451606400000, - "file": { - "name": "/obb.main.twoversions_1101615.apk", - "sha256": "7b0b7b9ba248e15751a16e3a0e01e1e24cbb673686c38422030cb75d5c33f0bb", - "size": 11480, - "ipfsCIDv1": "bafybeigglr3iefb3es4lp2sgfacppk3w2qqtuykjgf4actebpalyizef3q" - }, - "obbMainFile": { - "name": "/main.1101615.obb.main.twoversions.obb", - "sha256": "d3eb539a556352f3f47881d71fb0e5777b2f3e9a4251d283c18c67ce996774b7", - "size": 6 - }, - "manifest": { - "versionName": "0.1", - "versionCode": 1101615, - "usesSdk": { - "minSdkVersion": 4, - "targetSdkVersion": 18 - }, - "signer": { - "sha256": [ - "32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6" - ] - } - } - }, - "cce97a52ff18d843185be7f22ecb1a557c36b7a9f8ba07a8be94e328e00b35dc": { - "added": 1444608000000, - "file": { - "name": "/obb.main.twoversions_1101613.apk", - "sha256": "cce97a52ff18d843185be7f22ecb1a557c36b7a9f8ba07a8be94e328e00b35dc", - "size": 11477, - "ipfsCIDv1": "bafybeicocjo4khzp2rkui2ltvrhbksrm373lr3pb43ut7hqgbllfjpv6ti" - }, - "obbMainFile": { - "name": "/main.1101613.obb.main.twoversions.obb", - "sha256": "d3eb539a556352f3f47881d71fb0e5777b2f3e9a4251d283c18c67ce996774b7", - "size": 6 - }, - "manifest": { - "versionName": "0.1", - "versionCode": 1101613, - "usesSdk": { - "minSdkVersion": 4, - "targetSdkVersion": 18 - }, - "signer": { - "sha256": [ - "32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6" - ] - } - } - } - } - }, - "obb.mainpatch.current": { - "metadata": { - "added": 1461369600000, - "categories": [ - "Development" - ], - "lastUpdated": 1496275200000, - "license": "GPL-3.0-only", - "sourceCode": "https://github.com/eighthave/urzip", - "featureGraphic": { - "en-US": { - "name": "/obb.mainpatch.current/en-US/featureGraphic_ffhLaojxbGAfu9ROe1MJgK5ux8d0OVc6b65nmvOBaTk=.png", - "sha256": "7df84b6a88f16c601fbbd44e7b530980ae6ec7c77439573a6fae679af3816939", - "size": 24336 - } - }, - "screenshots": { - "phone": { - "en-US": [ - { - "name": "/obb.mainpatch.current/en-US/phoneScreenshots/screenshot-main.png", - "sha256": "719f95f0811e6e6e03e6dbd47553eafe9d8d96b14107e26f2dee3ccdabbdc6bf", - "size": 44990 - } - ] - }, - "sevenInch": { - "en-US": [ - { - "name": "/obb.mainpatch.current/en-US/sevenInchScreenshots/screenshot-tablet-main.png", - "sha256": "460c6ce2e5e3987ae6688f398c9093a07802991e7cdee559a578f201189c6630", - "size": 56049 - } - ] - } - }, - "bitcoin": "1Fi5xUHiAPRKxHvyUGVFGt9extBe8Srdbk", - "name": { - "en-US": "OBB Main/Patch Current" - }, - "icon": { - "en-US": { - "name": "/obb.mainpatch.current/en-US/icon_WI0pkO3LsklrsTAnRr-OQSxkkoMY41lYe2-fAvXLiLg=.png", - "sha256": "588d2990edcbb2496bb1302746bf8e412c64928318e359587b6f9f02f5cb88b8", - "size": 260113 - } - }, - "preferredSigner": "32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6" - }, - "versions": { - "eda5fc3ecfdac3252717e36bdbc9820865baeef162264af9ba5db7364f0e7a0c": { - "added": 1461369600000, - "file": { - "name": "/obb.mainpatch.current_1619.apk", - "sha256": "eda5fc3ecfdac3252717e36bdbc9820865baeef162264af9ba5db7364f0e7a0c", - "size": 11479, - "ipfsCIDv1": "bafybeievo4e234mllujityvtjgeltauyfbriszoqddzygmimcm4mo3zyqu" - }, - "obbMainFile": { - "name": "/main.1619.obb.mainpatch.current.obb", - "sha256": "d3eb539a556352f3f47881d71fb0e5777b2f3e9a4251d283c18c67ce996774b7", - "size": 6 - }, - "obbPatchFile": { - "name": "/patch.1619.obb.mainpatch.current.obb", - "sha256": "d3eb539a556352f3f47881d71fb0e5777b2f3e9a4251d283c18c67ce996774b7", - "size": 6 - }, - "manifest": { - "versionName": "0.1", - "versionCode": 1619, - "usesSdk": { - "minSdkVersion": 4, - "targetSdkVersion": 18 - }, - "signer": { - "sha256": [ - "32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6" - ] - } - } - }, - "42e7d6d2f8254aaf9fe95ba6ecc233ee8c3cd543a3e4f3f9ebe1b638221122fa": { - "added": 1496275200000, - "file": { - "name": "/obb.mainpatch.current_1619_another-release-key.apk", - "sha256": "42e7d6d2f8254aaf9fe95ba6ecc233ee8c3cd543a3e4f3f9ebe1b638221122fa", - "size": 10541, - "ipfsCIDv1": "bafybeiatdbzlxairqzvdowevwuy7nk24rknc55jpip2wb2sq4c3f7mtngm" - }, - "obbMainFile": { - "name": "/main.1619.obb.mainpatch.current.obb", - "sha256": "d3eb539a556352f3f47881d71fb0e5777b2f3e9a4251d283c18c67ce996774b7", - "size": 6 - }, - "obbPatchFile": { - "name": "/patch.1619.obb.mainpatch.current.obb", - "sha256": "d3eb539a556352f3f47881d71fb0e5777b2f3e9a4251d283c18c67ce996774b7", - "size": 6 - }, - "manifest": { - "versionName": "0.1", - "versionCode": 1619, - "usesSdk": { - "minSdkVersion": 4, - "targetSdkVersion": 18 - }, - "signer": { - "sha256": [ - "ce9e200667f02d96d49891a2e08a3c178870e91853d61bdd33ef5f0b54701aa5" - ] - } - } - } - } - }, - "org.maxsdkversion": { - "metadata": { - "added": 1739805810000, - "lastUpdated": 1739805810000, - "name": { - "en-US": "maxSdkVersion" - }, - "summary": { - "en-US": "Test setting maxSdkVersion in an APK" - }, - "icon": { - "en-US": { - "name": "/icons/org.maxsdkversion.4.png", - "sha256": "428442a0b75be04af3191a066041341f8a0b289972f99868687f43e02055a348", - "size": 91 - } - }, - "preferredSigner": "401a3a5843a3d5cebc22e6de5cb76d08eaa6797122d7fe1283df1d192e132f5e" - }, - "versions": { - "877d582369d2840fc0d6892e44feaaad21419b0e35af42f22b3e127bcd08274d": { - "added": 1739805810000, - "file": { - "name": "/org.maxsdkversion_4.apk", - "sha256": "877d582369d2840fc0d6892e44feaaad21419b0e35af42f22b3e127bcd08274d", - "size": 12768, - "ipfsCIDv1": "bafybeibdls2h4mpfw5gks3iirsne2qaez6uefwb5xmqkhahqbakvdszk6y" - }, - "manifest": { - "versionName": "1.0.3", - "maxSdkVersion": 25, - "versionCode": 4, - "features": [ - { - "name": "android.hardware.camera.front" - } - ], - "usesSdk": { - "minSdkVersion": 14, - "targetSdkVersion": 19 - }, - "signer": { - "sha256": [ - "401a3a5843a3d5cebc22e6de5cb76d08eaa6797122d7fe1283df1d192e132f5e" - ] - }, - "usesPermission": [ - { - "name": "android.permission.CAMERA" - } - ] - } - } - } - }, - "souch.smsbypass": { - "metadata": { - "added": 1524700800000, - "categories": [ - "Phone & SMS" - ], - "issueTracker": "https://gitlab.com/souch/SMSbypass/issues", - "lastUpdated": 1524700800000, - "license": "GPL-3.0-only", - "sourceCode": "https://gitlab.com/souch/SMSbypass/tree/HEAD", - "webSite": "https://gitlab.com/souch/SMSbypass", - "name": { - "en-US": "Battery level" - }, - "summary": { - "en-US": "Filter SMS and show them in a fake app" - }, - "description": { - "en-US": "In order to keep away curious eyes, SMS-bypass filters incoming SMS messages\nbefore they reach your inbox. Based on bughunter2.smsfilter.\n\nFeatures:\n\n* Discrete fake app \"Battery level\": Long tap on Battery percentage will show SMS.\n* Filter incoming SMS specified address: redirect the SMS to SMS-bypass messages list; remove SMS arrival sound or vibration; show a discreet notification icon (battery level); vibrate if checked in settings\n* Add contact from contact list\n* Export messages to a text file" - }, - "donate": [ - "http://rodolphe.souchaud.free.fr/donate" - ], - "icon": { - "en-US": { - "name": "/icons/souch.smsbypass.9.png", - "sha256": "8fee034537477fcd40fd33887868786b70258fcf7b9acffaff7436bca8748c8a", - "size": 1558 - } - }, - "preferredSigner": "d3aec784b1fd71549fc22c999789122e3639895db6bd585da5835fbe3db6985c" - }, - "versions": { - "80b0ae68a1189baa3ee6717092e3dbf1a4210165f7f7e5f2f9616bd63a2ec01d": { - "added": 1524700800000, - "file": { - "name": "/souch.smsbypass_9.apk", - "sha256": "80b0ae68a1189baa3ee6717092e3dbf1a4210165f7f7e5f2f9616bd63a2ec01d", - "size": 81295, - "ipfsCIDv1": "bafybeihaccfnt32q2iwfulh2m7jvdivuunlw6t72wa7jfi7igxvqxjqszy" - }, - "manifest": { - "versionName": "0.9", - "versionCode": 9, - "usesSdk": { - "minSdkVersion": 8, - "targetSdkVersion": 18 - }, - "signer": { - "sha256": [ - "d3aec784b1fd71549fc22c999789122e3639895db6bd585da5835fbe3db6985c" - ] - }, - "usesPermission": [ - { - "name": "android.permission.RECEIVE_SMS" - }, - { - "name": "android.permission.SEND_SMS" - }, - { - "name": "android.permission.READ_CONTACTS" - }, - { - "name": "android.permission.WRITE_EXTERNAL_STORAGE" - }, - { - "name": "android.permission.VIBRATE" - }, - { - "name": "android.permission.READ_EXTERNAL_STORAGE" - } - ] - } - } - } - } - } -} \ No newline at end of file diff --git a/tests/repo/index.xml b/tests/repo/index.xml index 04e8f87d..20b526dc 100644 --- a/tests/repo/index.xml +++ b/tests/repo/index.xml @@ -1,7 +1,7 @@ - - This is a repository of apps to be used with F-Droid. Applications in this repository are either official binaries built by the original application developers, or are binaries built from source by the admin of f-droid.org using the tools on https://gitlab.com/fdroid. + + This is a repository of apps to be used with F-Droid. Applications in this repository are either official binaries built by the original application developers, or are binaries built from source by the admin of f-droid.org using the tools on https://gitlab.com/u/fdroid. http://foobarfoobarfoobar.onion/fdroid/repo https://foo.bar/fdroid/repo @@ -15,15 +15,7 @@ Battery level Filter SMS and show them in a fake app souch.smsbypass.9.png - In order to keep away curious eyes, SMS-bypass filters incoming SMS messages -before they reach your inbox. Based on bughunter2.smsfilter. - -Features: - -* Discrete fake app "Battery level": Long tap on Battery percentage will show SMS. -* Filter incoming SMS specified address: redirect the SMS to SMS-bypass messages list; remove SMS arrival sound or vibration; show a discreet notification icon (battery level); vibrate if checked in settings -* Add contact from contact list -* Export messages to a text file + <p>In order to keep away curious eyes, SMS-bypass filters incoming SMS messages before they reach your inbox. Based on bughunter2.smsfilter.</p><p>Features:</p><ul><li> Discrete fake app "Battery level": Long tap on Battery percentage will show SMS.</li><li> Filter incoming SMS specified address: redirect the SMS to SMS-bypass messages list; remove SMS arrival sound or vibration; show a discreet notification icon (battery level); vibrate if checked in settings</li><li> Add contact from contact list</li><li> Export messages to a text file</li></ul> GPL-3.0-only Phone & SMS Phone & SMS @@ -31,6 +23,7 @@ Features: https://gitlab.com/souch/SMSbypass/tree/HEAD https://gitlab.com/souch/SMSbypass/issues http://rodolphe.souchaud.free.fr/donate + cad90e036b975ed129a3ce80a0750466 0.9 9 @@ -52,7 +45,8 @@ Features: 2018-10-10 Caffeine Tile Test app for extracting icons when an XML one is default - No description available + info.zwanenburg.caffeinetile.4.xml + <p>No description available</p> Unknown Development Development @@ -81,7 +75,7 @@ Features: Duplicate Permisssions Test app for all possible <uses-permissions> duplicate.permisssions.9999999.png - No description available + <p>No description available</p> Unknown tests tests @@ -111,24 +105,7 @@ Features: 2016-03-10 fake.ota.update_1234 Tests whether OTA ZIP files are being include - F-Droid can make use of system privileges or permissions to -install, update and remove applications on its own. The only way to obtain those -privileges is to become a system app. - -This is where the Privileged Extension comes in - being a separate app and much -smaller, it can be installed as a system app and communicate with the main app -via AIDL IPC. - -This has several advantages: - -* Reduced disk usage in the system partition -* System updates don't remove F-Droid -* The process of installing into system via root is safer - -This is packaged as an OTA (Over-The-Air) update ZIP file. It must be installed -using TWRP or other Android recovery that can flash updates to the system from -the /data/data/org.fdroid.fdroid folder on the /data partition. The standalone -APK is called F-Droid Privileged Extension. + <p>F-Droid can make use of system privileges or permissions to install, update and remove applications on its own. The only way to obtain those privileges is to become a system app.</p><p>This is where the Privileged Extension comes in - being a separate app and much smaller, it can be installed as a system app and communicate with the main app via AIDL IPC.</p><p>This has several advantages:</p><ul><li> Reduced disk usage in the system partition</li><li> System updates don't remove F-Droid</li><li> The process of installing into system via root is safer</li></ul><p>This is packaged as an OTA (Over-The-Air) update ZIP file. It must be installed using TWRP or other Android recovery that can flash updates to the system from the /data/data/org.fdroid.fdroid folder on the /data partition. The standalone APK is called F-Droid Privileged Extension.</p> Apache-2.0 System System @@ -147,35 +124,6 @@ APK is called F-Droid Privileged Extension. 2016-03-10 - - org.maxsdkversion - 2025-02-17 - 2025-02-17 - maxSdkVersion - Test setting maxSdkVersion in an APK - org.maxsdkversion.4.png - No description available - Unknown - - - - - 4 - - 1.0.3 - 4 - org.maxsdkversion_4.apk - 877d582369d2840fc0d6892e44feaaad21419b0e35af42f22b3e127bcd08274d - 12768 - 14 - 19 - 25 - 2025-02-17 - 1a5e67bcef6b2d6242f2d36982b54589 - CAMERA - android.hardware.camera.front - - no.min.target.sdk 2018-10-10 @@ -183,7 +131,7 @@ APK is called F-Droid Privileged Extension. No minSdkVersion or targetSdkVersion An APK without any <uses-sdk> block in AndroidManifest.xml no.min.target.sdk.987.png - No description available + <p>No description available</p> Unknown Development Development @@ -211,7 +159,7 @@ APK is called F-Droid Privileged Extension. OBB Main Old Version obb.main.oldversion.1444412523.png - No description available + <p>No description available</p> GPL-3.0-only Development Development @@ -246,7 +194,7 @@ APK is called F-Droid Privileged Extension. OBB Main Two Versions obb.main.twoversions.1101617.png - No description available + <p>No description available</p> GPL-3.0-only Development Development @@ -304,7 +252,7 @@ APK is called F-Droid Privileged Extension. OBB Main/Patch Current obb.mainpatch.current.1619.png - No description available + <p>No description available</p> GPL-3.0-only Development Development @@ -337,16 +285,16 @@ APK is called F-Droid Privileged Extension. Polite Droid Calendar tool com.politedroid.6.png - Activates silent mode during calendar events. + <p>Activates silent mode during calendar events.</p> GPL-3.0-only - Multimedia,Security,Time - Multimedia + Time + Time https://github.com/miguelvps/PoliteDroid https://github.com/miguelvps/PoliteDroid/issues 1.5 6 - KnownVuln,NoSourceSince,NonFreeAssets,NonFreeNet + NoSourceSince 1.5 6 @@ -398,17 +346,10 @@ APK is called F-Droid Privileged Extension. info.guardianproject.urzip 2016-06-23 2016-06-23 - title + urzip-πÇÇπÇÇ现代汉语通用字-български-عربي1234 一个实用工具,获取已安装在您的设备上的应用的有关信息 info.guardianproject.urzip.100.png - It’s Urzip 是一个获得已安装 APK 相关信息的实用工具。它从您的设备上已安装的所有应用开始,一键触摸即可显示 APK 的指纹,并且提供到达 virustotal.com 和 androidobservatory.org 的快捷链接,让您方便地了解特定 APK 的档案。它还可以让您导出签名证书和生成 ApkSignaturePin Pin 文件供 TrustedIntents 库使用。 - -★ Urzip 支持下列语言: Deutsch, English, español, suomi, 日本語, 한국어, Norsk, português (Portugal), Русский, Slovenščina, Türkçe -没看到您的语言?帮忙翻译本应用吧: -https://www.transifex.com/projects/p/urzip - -★ 致用户:我们还缺少你喜欢的功能?发现了一个 bug?请告诉我们!我们乐于听取您的意见。请发送电子邮件至: support@guardianproject.info 或者加入我们的聊天室 https://guardianproject.info/contact - + <p>It’s Urzip 是一个获得已安装 APK 相关信息的实用工具。它从您的设备上已安装的所有应用开始,一键触摸即可显示 APK 的指纹,并且提供到达 virustotal.com 和 androidobservatory.org 的快捷链接,让您方便地了解特定 APK 的档案。它还可以让您导出签名证书和生成 ApkSignaturePin Pin 文件供 TrustedIntents 库使用。</p><p>★ Urzip 支持下列语言: Deutsch, English, español, suomi, 日本語, 한국어, Norsk, português (Portugal), Русский, Slovenščina, Türkçe 没看到您的语言?帮忙翻译本应用吧: https://www.transifex.com/projects/p/urzip</p><p>★ 致用户:我们还缺少你喜欢的功能?发现了一个 bug?请告诉我们!我们乐于听取您的意见。请发送电子邮件至: support@guardianproject.info 或者加入我们的聊天室 https://guardianproject.info/contact</p> GPL-3.0-only Development,GuardianProject,1,2.0 Development @@ -416,6 +357,7 @@ https://www.transifex.com/projects/p/urzip https://github.com/guardianproject/urzip https://dev.guardianproject.info/projects/urzip/issues 1Fi5xUHiAPRKxHvyUGVFGt9extBe8Srdbk + 9999999 f-droid-just-testing 2147483647 diff --git a/tests/repo/org.maxsdkversion_4.apk b/tests/repo/org.maxsdkversion_4.apk deleted file mode 100644 index 39578861..00000000 Binary files a/tests/repo/org.maxsdkversion_4.apk and /dev/null differ diff --git a/tests/run-tests b/tests/run-tests index a790d4c7..c6248954 100755 --- a/tests/run-tests +++ b/tests/run-tests @@ -6,10 +6,120 @@ echo_header() { { echo -e "==============================================================================\n$1"; } 2>/dev/null } -#------------------------------------------------------------------------------# +get_fdroid_apk_filename() { + if [ -z $aapt ]; then + python3 -c "from androguard.core.bytecodes.apk import APK; a=APK('$1'); print(a.package+'_'+a.get_androidversion_code()+'.apk')" + else + $aapt dump badging "$1" | sed -n "s,^package: name='\(.*\)' versionCode='\([0-9][0-9]*\)' .*,\1_\2.apk,p" + fi +} -if [ ! -d tests ]; then - cd .. +copy_apks_into_repo() { + set +x + find $APKDIR -type f -name '*.apk' -print0 | while IFS= read -r -d '' f; do + echo $f | grep -F -v -e unaligned -e unsigned -e badsig -e badcert -e bad-unicode -e janus.apk || continue + apk=`get_fdroid_apk_filename "$f"` + test "$f" -nt repo/$apk && rm -f repo/$apk # delete existing if $f is newer + if [ ! -e repo/$apk ] && [ ! -e archive/$apk ]; then + echo "$f --> repo/$apk" + ln "$f" $1/repo/$apk || \ + rsync -axv "$f" $1/repo/$apk # rsync if hard link is not possible + fi + done + set -x +} + +# keep this as an old version to test the automatic parsing of build-tools +# verion numbers in `fdroid init` +create_fake_android_home() { + mkdir $1/tools + mkdir $1/platform-tools + mkdir $1/build-tools + mkdir $1/build-tools/19.0.2 + touch $1/build-tools/19.0.2/aapt +} + +create_test_dir() { + test -e $WORKSPACE/.testfiles || mkdir $WORKSPACE/.testfiles + mktemp -d $WORKSPACE/.testfiles/run-tests.XXXX +} + +create_test_file() { + test -e $WORKSPACE/.testfiles || mkdir $WORKSPACE/.testfiles + TMPDIR=$WORKSPACE/.testfiles mktemp +} + +fdroid_init_with_prebuilt_keystore() { + if [ -z "$1" ]; then + keystore=$WORKSPACE/tests/keystore.jks + else + keystore="$1" + fi + $fdroid init --keystore $keystore --repo-keyalias=sova + echo 'keystorepass = "r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI="' >> config.py + echo 'keypass = "r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI="' >> config.py +} + +# the < is reverse since 0 means success in exit codes +have_git_2_3() { + python3 -c "import sys; from distutils.version import LooseVersion as V; sys.exit(V(sys.argv[3]) < V('2.3'))" `git --version` +} + +is_MD5_disabled() { + javac $WORKSPACE/tests/IsMD5Disabled.java && java -cp $WORKSPACE/tests IsMD5Disabled + return $? +} + +#------------------------------------------------------------------------------# +# "main" + +if [ "$1" = "-h" ] || [ "$1" = "--help" ]; then + set +x + echo "Usage: $0 '/path/to/folder/with/apks'" + exit 1 +fi + +if [ -z "$ANDROID_HOME" ]; then + if python3 -c "import androguard"; then + echo "ANDROID_HOME is not set, using androguard" + else + echo "ERROR: ANDROID_HOME is not set, androguard is not available!" + exit 1 + fi +else + echo "Using ANDROID_HOME=$ANDROID_HOME" +fi + +if [ -d tests ]; then + cd tests +fi + +if [ -z "$1" ]; then + APKDIR=`pwd` +else + APKDIR=$1 +fi + +if [ -z $WORKSPACE ]; then + WORKSPACE=`dirname $(pwd)` + echo "Setting Workspace to $WORKSPACE" +fi + +# allow the location of the script to be overridden +if [ -z $fdroid ]; then + fdroid="$WORKSPACE/fdroid" +fi + +# allow the location of aapt to be overridden +if [ -z $aapt ]; then + aapt=`ls -1 $ANDROID_HOME/build-tools/*/aapt 2> /dev/null | sort | tail -1` +fi + +# try to use GNU sed on OSX/BSD cuz BSD sed sucks +if which gsed; then + sed=gsed +else + sed=sed fi set -x # show each command as it is executed @@ -17,11 +127,1024 @@ set -x # show each command as it is executed #------------------------------------------------------------------------------# echo_header "run commit hooks" +cd $WORKSPACE test -x ./hooks/pre-commit && ./hooks/pre-commit -#------------------------------------------------------------------------------# -echo_header "run unit tests" -python3 -m unittest -v +#------------------------------------------------------------------------------# +echo_header "test python getsig replacement" + +cd $WORKSPACE/tests/getsig +./make.sh + +cd $WORKSPACE/tests +for testcase in $WORKSPACE/tests/*.TestCase; do + if [ $testcase == $WORKSPACE/tests/install.TestCase ]; then + echo "skipping install.TestCase, its too troublesome in CI builds" + continue + fi + $testcase +done + + +#------------------------------------------------------------------------------# +echo_header "print fdroid version" + +$fdroid --version + + +#------------------------------------------------------------------------------# +echo_header 'run process when building and signing are on separate machines' + +if which zipalign || ls -1 $ANDROID_HOME/build-tools/*/zipalign; then + REPOROOT=`create_test_dir` + cd $REPOROOT + cp $WORKSPACE/tests/keystore.jks $REPOROOT/ + $fdroid init --keystore keystore.jks --repo-keyalias=sova + echo 'keystorepass = "r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI="' >> config.py + echo 'keypass = "r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI="' >> config.py + echo "accepted_formats = ['txt', 'yml']" >> config.py + echo 'keydname = "CN=Birdman, OU=Cell, O=Alcatraz, L=Alcatraz, S=California, C=US"' >> config.py + test -d archive || mkdir archive + test -d metadata || mkdir metadata + cp $WORKSPACE/tests/metadata/info.guardianproject.urzip.yml metadata/ + test -d repo || mkdir repo + test -d unsigned || mkdir unsigned + cp $WORKSPACE/tests/urzip-release-unsigned.apk unsigned/info.guardianproject.urzip_100.apk + $fdroid publish --verbose + $fdroid update --verbose --nosign + $fdroid signindex --verbose + test -e repo/index.xml + test -e repo/index.jar + test -e repo/index-v1.jar + test -e tmp/apkcache.json + ! test -z tmp/apkcache.json + test -L urzip.apk + grep -F '> config.py +mkdir metadata +cp $WORKSPACE/tests/urzip.apk $WORKSPACE/tests/bad-unicode*.apk repo/ +cp $WORKSPACE/tests/metadata/info.guardianproject.urzip.yml metadata/ + +$fdroid readmeta +$fdroid update + + +#------------------------------------------------------------------------------# +echo_header 'run "fdroid build" in fresh git checkout from import.TestCase' + +cd $WORKSPACE/tests/tmp/importer +git remote update -p +git clean -fdx +# stick with known working commit, in case future commits break things for this code +git reset --hard fea54e1161d5eb9eb1a54e26253ef84d3ab63705 +if [ -d $ANDROID_HOME/platforms/android-23 && -d $ANDROID_HOME/build-tools/23.0.3 ]; then + echo "build_tools = '`ls -1 $ANDROID_HOME/build-tools/ | sort -n | tail -1`'" > config.py + echo "force_build_tools = True" >> config.py + $fdroid build --verbose org.fdroid.ci.test.app:300 +else + echo 'WARNING: Skipping "fdroid build" test since android-23 is missing!' +fi + +#------------------------------------------------------------------------------# +echo_header 'copy git import and run "fdroid scanner" on it' + +REPOROOT=`create_test_dir` +cd $REPOROOT +cp $WORKSPACE/examples/fdroid-icon.png $REPOROOT/ +mkdir metadata +echo "Auto Name:Just A Test" > metadata/org.fdroid.ci.test.app.txt +echo "Web Site:" >> metadata/org.fdroid.ci.test.app.txt +echo "Build:0.3,300" >> metadata/org.fdroid.ci.test.app.txt +echo " commit=0.3" >> metadata/org.fdroid.ci.test.app.txt +echo " subdir=app" >> metadata/org.fdroid.ci.test.app.txt +echo " gradle=yes" >> metadata/org.fdroid.ci.test.app.txt +echo "" >> metadata/org.fdroid.ci.test.app.txt +echo "Repo:https://gitlab.com/fdroid/ci-test-app.git" >> metadata/org.fdroid.ci.test.app.txt +echo "Repo Type:git" >> metadata/org.fdroid.ci.test.app.txt +mkdir build +cp -a $WORKSPACE/tests/tmp/importer build/org.fdroid.ci.test.app +ls -l build/org.fdroid.ci.test.app +$fdroid scanner org.fdroid.ci.test.app --verbose + + +#------------------------------------------------------------------------------# +echo_header "copy tests/repo, generate java/gpg keys, update, and gpgsign" + +REPOROOT=`create_test_dir` +GNUPGHOME=$REPOROOT/gnupghome +cd $REPOROOT +fdroid_init_with_prebuilt_keystore +cp -a $WORKSPACE/tests/metadata $WORKSPACE/tests/repo $WORKSPACE/tests/stats $REPOROOT/ +cp -a $WORKSPACE/tests/gnupghome $GNUPGHOME +chmod 0700 $GNUPGHOME +echo "accepted_formats = ['json', 'txt', 'yml']" >> config.py +echo "install_list = 'org.adaway'" >> config.py +echo "uninstall_list = ('com.android.vending', 'com.facebook.orca',)" >> config.py +echo "gpghome = '$GNUPGHOME'" >> config.py +echo "gpgkey = 'CE71F7FB'" >> config.py +echo "mirrors = ('http://foobarfoobarfoobar.onion/fdroid','https://foo.bar/fdroid',)" >> config.py +$fdroid update --verbose --pretty +test -e repo/index.xml +test -e repo/index.jar +test -e repo/index-v1.jar +grep -F '> config.py +$sed -i.tmp '/allow_disabled_algorithms/d' config.py +test -d metadata || mkdir metadata +cp $WORKSPACE/tests/metadata/*.txt metadata/ +echo 'Summary:good test version of urzip' > metadata/info.guardianproject.urzip.txt +echo 'Summary:good MD5 sig, which is disabled algorithm' > metadata/org.bitbucket.tickytacky.mirrormirror.txt +$sed -i.tmp '/Archive Policy:/d' metadata/*.txt +test -d repo || mkdir repo +cp $WORKSPACE/tests/urzip.apk \ + $WORKSPACE/tests/org.bitbucket.tickytacky.mirrormirror_[0-9].apk \ + $WORKSPACE/tests/repo/com.politedroid_[0-9].apk \ + $WORKSPACE/tests/repo/obb.main.twoversions_110161[357].apk \ + repo/ +$sed -i.tmp 's,archive_older = [0-9],archive_older = 3,' config.py + +$fdroid update --pretty --nosign +if which apksigner; then + test `grep '' archive/index.xml | wc -l` -eq 2 + test `grep '' repo/index.xml | wc -l` -eq 10 +else + echo "This will fail when jarsigner allows MD5 for APK signatures" + test `grep '' archive/index.xml | wc -l` -eq 5 + test `grep '' repo/index.xml | wc -l` -eq 7 +fi + +#------------------------------------------------------------------------------# +if ! which apksigner; then + echo_header 'test per-app "Archive Policy"' + + REPOROOT=`create_test_dir` + cd $REPOROOT + fdroid_init_with_prebuilt_keystore + echo "accepted_formats = ['txt']" >> config.py + test -d metadata || mkdir metadata + cp $WORKSPACE/tests/metadata/com.politedroid.txt metadata/ + test -d repo || mkdir repo + cp $WORKSPACE/tests/repo/com.politedroid_[0-9].apk repo/ + $sed -i.tmp 's,archive_older = [0-9],archive_older = 3,' config.py + + $fdroid update --pretty --nosign + test `grep '' archive/index.xml | wc -l` -eq 0 + test `grep '' repo/index.xml | wc -l` -eq 4 + grep -F com.politedroid_3.apk repo/index.xml + grep -F com.politedroid_4.apk repo/index.xml + grep -F com.politedroid_5.apk repo/index.xml + grep -F com.politedroid_6.apk repo/index.xml + test -e repo/com.politedroid_3.apk + test -e repo/com.politedroid_4.apk + test -e repo/com.politedroid_5.apk + test -e repo/com.politedroid_6.apk + + echo "enable one app in the repo" + $sed -i.tmp 's,^Archive Policy:4,Archive Policy:1,' metadata/com.politedroid.txt + $fdroid update --pretty --nosign + test `grep '' archive/index.xml | wc -l` -eq 3 + test `grep '' repo/index.xml | wc -l` -eq 1 + grep -F com.politedroid_3.apk archive/index.xml + grep -F com.politedroid_4.apk archive/index.xml + grep -F com.politedroid_5.apk archive/index.xml + grep -F com.politedroid_6.apk repo/index.xml + test -e archive/com.politedroid_3.apk + test -e archive/com.politedroid_4.apk + test -e archive/com.politedroid_5.apk + test -e repo/com.politedroid_6.apk + + echo "remove all apps from the repo" + $sed -i.tmp 's,^Archive Policy:1,Archive Policy:0,' metadata/com.politedroid.txt + $fdroid update --pretty --nosign + test `grep '' archive/index.xml | wc -l` -eq 4 + test `grep '' repo/index.xml | wc -l` -eq 0 + grep -F com.politedroid_3.apk archive/index.xml + grep -F com.politedroid_4.apk archive/index.xml + grep -F com.politedroid_5.apk archive/index.xml + grep -F com.politedroid_6.apk archive/index.xml + test -e archive/com.politedroid_3.apk + test -e archive/com.politedroid_4.apk + test -e archive/com.politedroid_5.apk + test -e archive/com.politedroid_6.apk + ! test -e repo/com.politedroid_6.apk + + echo "move back one from archive to the repo" + $sed -i.tmp 's,^Archive Policy:0,Archive Policy:1,' metadata/com.politedroid.txt + $fdroid update --pretty --nosign + test `grep '' archive/index.xml | wc -l` -eq 3 + test `grep '' repo/index.xml | wc -l` -eq 1 + grep -F com.politedroid_3.apk archive/index.xml + grep -F com.politedroid_4.apk archive/index.xml + grep -F com.politedroid_5.apk archive/index.xml + grep -F com.politedroid_6.apk repo/index.xml + test -e archive/com.politedroid_3.apk + test -e archive/com.politedroid_4.apk + test -e archive/com.politedroid_5.apk + ! test -e archive/com.politedroid_6.apk + test -e repo/com.politedroid_6.apk +fi + + +#------------------------------------------------------------------------------# +echo_header 'test moving old APKs to and from the archive' + +REPOROOT=`create_test_dir` +cd $REPOROOT +fdroid_init_with_prebuilt_keystore +echo "accepted_formats = ['txt']" >> config.py +test -d metadata || mkdir metadata +cp $WORKSPACE/tests/metadata/com.politedroid.txt metadata/ +$sed -i.tmp '/Archive Policy:/d' metadata/com.politedroid.txt +test -d repo || mkdir repo +cp $WORKSPACE/tests/repo/com.politedroid_[0-9].apk repo/ +$sed -i.tmp 's,archive_older = [0-9],archive_older = 3,' config.py + +$fdroid update --pretty --nosign +test `grep '' archive/index.xml | wc -l` -eq 1 +test `grep '' repo/index.xml | wc -l` -eq 3 +grep -F com.politedroid_3.apk archive/index.xml +grep -F com.politedroid_4.apk repo/index.xml +grep -F com.politedroid_5.apk repo/index.xml +grep -F com.politedroid_6.apk repo/index.xml +test -e archive/com.politedroid_3.apk +test -e repo/com.politedroid_4.apk +test -e repo/com.politedroid_5.apk +test -e repo/com.politedroid_6.apk + +$sed -i.tmp 's,archive_older = 3,archive_older = 1,' config.py +$fdroid update --pretty --nosign +test `grep '' archive/index.xml | wc -l` -eq 3 +test `grep '' repo/index.xml | wc -l` -eq 1 +grep -F com.politedroid_3.apk archive/index.xml +grep -F com.politedroid_4.apk archive/index.xml +grep -F com.politedroid_5.apk archive/index.xml +grep -F com.politedroid_6.apk repo/index.xml +test -e archive/com.politedroid_3.apk +test -e archive/com.politedroid_4.apk +test -e archive/com.politedroid_5.apk +test -e repo/com.politedroid_6.apk + +# disabling deletes from the archive +$sed -i.tmp 's/Build:1.3,4/Build:1.3,4\n disable=testing deletion/' metadata/com.politedroid.txt +$fdroid update --pretty --nosign +test `grep '' archive/index.xml | wc -l` -eq 2 +test `grep '' repo/index.xml | wc -l` -eq 1 +grep -F com.politedroid_3.apk archive/index.xml +! grep -F com.politedroid_4.apk archive/index.xml +grep -F com.politedroid_5.apk archive/index.xml +grep -F com.politedroid_6.apk repo/index.xml +test -e archive/com.politedroid_3.apk +! test -e archive/com.politedroid_4.apk +test -e archive/com.politedroid_5.apk +test -e repo/com.politedroid_6.apk + +# disabling deletes from the repo, and promotes one from the archive +$sed -i.tmp 's/Build:1.5,6/Build:1.5,6\n disable=testing deletion/' metadata/com.politedroid.txt +$fdroid update --pretty --nosign +test `grep '' archive/index.xml | wc -l` -eq 1 +test `grep '' repo/index.xml | wc -l` -eq 1 +grep -F com.politedroid_3.apk archive/index.xml +grep -F com.politedroid_5.apk repo/index.xml +! grep -F com.politedroid_6.apk repo/index.xml +test -e archive/com.politedroid_3.apk +test -e repo/com.politedroid_5.apk +! test -e repo/com.politedroid_6.apk + + +#------------------------------------------------------------------------------# +echo_header 'test that verify can succeed and fail' + +REPOROOT=`create_test_dir` +cd $REPOROOT +test -d tmp || mkdir tmp +test -d unsigned || mkdir unsigned +cp $WORKSPACE/tests/repo/com.politedroid_6.apk tmp/ +cp $WORKSPACE/tests/repo/com.politedroid_6.apk unsigned/ +$fdroid verify --reuse-remote-apk --verbose com.politedroid +# force a fail +cp $WORKSPACE/tests/repo/com.politedroid_5.apk unsigned/com.politedroid_6.apk +! $fdroid verify --reuse-remote-apk --verbose com.politedroid + + +#------------------------------------------------------------------------------# +echo_header 'test allowing disabled signatures in repo and archive' + +REPOROOT=`create_test_dir` +cd $REPOROOT +fdroid_init_with_prebuilt_keystore +echo "accepted_formats = ['txt']" >> config.py +echo 'allow_disabled_algorithms = True' >> config.py +$sed -i.tmp 's,archive_older = [0-9],archive_older = 3,' config.py +test -d metadata || mkdir metadata +cp $WORKSPACE/tests/metadata/com.politedroid.txt metadata/ +echo 'Summary:good test version of urzip' > metadata/info.guardianproject.urzip.txt +echo 'Summary:good MD5 sig, disabled algorithm' > metadata/org.bitbucket.tickytacky.mirrormirror.txt +$sed -i.tmp '/Archive Policy:/d' metadata/*.txt +test -d repo || mkdir repo +cp $WORKSPACE/tests/repo/com.politedroid_[0-9].apk \ + $WORKSPACE/tests/org.bitbucket.tickytacky.mirrormirror_[0-9].apk \ + $WORKSPACE/tests/urzip-badsig.apk \ + repo/ + +$fdroid update --pretty --nosign +test `grep '' archive/index.xml | wc -l` -eq 2 +test `grep '' repo/index.xml | wc -l` -eq 6 +grep -F com.politedroid_3.apk archive/index.xml +grep -F com.politedroid_4.apk repo/index.xml +grep -F com.politedroid_5.apk repo/index.xml +grep -F com.politedroid_6.apk repo/index.xml +grep -F org.bitbucket.tickytacky.mirrormirror_1.apk archive/index.xml +grep -F org.bitbucket.tickytacky.mirrormirror_2.apk repo/index.xml +grep -F org.bitbucket.tickytacky.mirrormirror_3.apk repo/index.xml +grep -F org.bitbucket.tickytacky.mirrormirror_4.apk repo/index.xml +! grep -F urzip-badsig.apk repo/index.xml +! grep -F urzip-badsig.apk archive/index.xml +test -e archive/com.politedroid_3.apk +test -e repo/com.politedroid_4.apk +test -e repo/com.politedroid_5.apk +test -e repo/com.politedroid_6.apk +test -e archive/org.bitbucket.tickytacky.mirrormirror_1.apk +test -e repo/org.bitbucket.tickytacky.mirrormirror_2.apk +test -e repo/org.bitbucket.tickytacky.mirrormirror_3.apk +test -e repo/org.bitbucket.tickytacky.mirrormirror_4.apk +test -e archive/urzip-badsig.apk + +if ! which apksigner; then + $sed -i.tmp '/allow_disabled_algorithms/d' config.py + $fdroid update --pretty --nosign + test `grep '' archive/index.xml | wc -l` -eq 5 + test `grep '' repo/index.xml | wc -l` -eq 3 + grep -F org.bitbucket.tickytacky.mirrormirror_1.apk archive/index.xml + grep -F org.bitbucket.tickytacky.mirrormirror_2.apk archive/index.xml + grep -F org.bitbucket.tickytacky.mirrormirror_3.apk archive/index.xml + grep -F org.bitbucket.tickytacky.mirrormirror_4.apk archive/index.xml + grep -F com.politedroid_3.apk archive/index.xml + grep -F com.politedroid_4.apk repo/index.xml + grep -F com.politedroid_5.apk repo/index.xml + grep -F com.politedroid_6.apk repo/index.xml + ! grep -F urzip-badsig.apk repo/index.xml + ! grep -F urzip-badsig.apk archive/index.xml + test -e archive/org.bitbucket.tickytacky.mirrormirror_1.apk + test -e archive/org.bitbucket.tickytacky.mirrormirror_2.apk + test -e archive/org.bitbucket.tickytacky.mirrormirror_3.apk + test -e archive/org.bitbucket.tickytacky.mirrormirror_4.apk + test -e archive/com.politedroid_3.apk + test -e archive/urzip-badsig.apk + test -e repo/com.politedroid_4.apk + test -e repo/com.politedroid_5.apk + test -e repo/com.politedroid_6.apk +fi + + +#------------------------------------------------------------------------------# +echo_header 'rename apks with `fdroid update --rename-apks`, --nosign for speed' + +REPOROOT=`create_test_dir` +cd $REPOROOT +fdroid_init_with_prebuilt_keystore +echo "accepted_formats = ['txt', 'yml']" >> config.py +echo 'keydname = "CN=Birdman, OU=Cell, O=Alcatraz, L=Alcatraz, S=California, C=US"' >> config.py +test -d metadata || mkdir metadata +cp $WORKSPACE/tests/metadata/info.guardianproject.urzip.yml metadata/ +test -d repo || mkdir repo +cp $WORKSPACE/tests/urzip.apk "repo/asdfiuhk urzip-πÇÇπÇÇ现代汉语通用字-български-عربي1234 ö.apk" +$fdroid update --rename-apks --pretty --nosign +test -e repo/info.guardianproject.urzip_100.apk +grep -F 'info.guardianproject.urzip_100.apk' repo/index-v1.json repo/index.xml +cp $WORKSPACE/tests/urzip-release.apk repo/ +$fdroid update --rename-apks --pretty --nosign +test -e repo/info.guardianproject.urzip_100.apk +test -e repo/info.guardianproject.urzip_100_b4964fd.apk +grep -F 'info.guardianproject.urzip_100.apk' repo/index-v1.json repo/index.xml +grep -F 'info.guardianproject.urzip_100_b4964fd.apk' repo/index-v1.json +! grep -F 'info.guardianproject.urzip_100_b4964fd.apk' repo/index.xml +cp $WORKSPACE/tests/urzip-release.apk repo/ +$fdroid update --rename-apks --pretty --nosign +test -e repo/info.guardianproject.urzip_100.apk +test -e repo/info.guardianproject.urzip_100_b4964fd.apk +test -e duplicates/repo/info.guardianproject.urzip_100_b4964fd.apk +grep -F 'info.guardianproject.urzip_100.apk' repo/index-v1.json repo/index.xml +grep -F 'info.guardianproject.urzip_100_b4964fd.apk' repo/index-v1.json +! grep -F 'info.guardianproject.urzip_100_b4964fd.apk' repo/index.xml + + +#------------------------------------------------------------------------------# +echo_header "test metadata checks" + +REPOROOT=`create_test_dir` +cd $REPOROOT + +mkdir repo +cp $WORKSPACE/tests/urzip.apk $REPOROOT/repo/ + +set +e +$fdroid build +if [ $? -eq 0 ]; then + echo "This should have failed because there is no metadata!" + exit 1 +else + echo "testing metadata checks passed" +fi +set -e + +mkdir $REPOROOT/metadata/ +cp $WORKSPACE/tests/metadata/org.smssecure.smssecure.txt $REPOROOT/metadata/ +$fdroid readmeta + +# now make a fake duplicate +touch $REPOROOT/metadata/org.smssecure.smssecure.yml + +set +e +$fdroid readmeta +if [ $? -eq 0 ]; then + echo "This should have failed because there is a duplicate metadata file!" + exit 1 +else + echo "testing duplicate metadata checks passed" +fi +set -e + + +#------------------------------------------------------------------------------# +echo_header "ensure commands that don't need the JDK work without a JDK configed" + +REPOROOT=`create_test_dir` +cd $REPOROOT +mkdir repo +mkdir metadata +echo "License:GPL-2.0-only" >> metadata/fake.txt +echo "Summary:Yup still fake" >> metadata/fake.txt +echo "Categories:Internet" >> metadata/fake.txt +echo "Description:" >> metadata/fake.txt +echo "this is fake" >> metadata/fake.txt +echo "." >> metadata/fake.txt + +# fake that no JDKs are available +echo 'java_paths = {}' > config.py + +LOCAL_COPY_DIR=`create_test_dir`/fdroid +mkdir -p $LOCAL_COPY_DIR/repo +echo "local_copy_dir = '$LOCAL_COPY_DIR'" >> config.py + +$fdroid checkupdates --allow-dirty +which gpg && $fdroid gpgsign +$fdroid lint +$fdroid readmeta +$fdroid rewritemeta fake +$fdroid deploy +$fdroid server update +$fdroid scanner + +# run these to get their output, but the are not setup, so don't fail +$fdroid build || true +$fdroid import || true +$fdroid install || true + + +#------------------------------------------------------------------------------# +# only run this test if running from a git repo, not all files are in the tarball +if [ -e .git/config ]; then + echo_header "create a source tarball" + + cd $WORKSPACE + ./setup.py compile_catalog sdist + + REPOROOT=`create_test_dir` + cd $REPOROOT + tar xzf `ls -1 $WORKSPACE/dist/fdroidserver-*.tar.gz | sort -n | tail -1` + cd $REPOROOT + ./fdroidserver-*/fdroid init + copy_apks_into_repo $REPOROOT + ./fdroidserver-*/fdroid update --create-metadata --verbose +fi + +#------------------------------------------------------------------------------# +echo_header "test config checks of local_copy_dir" + +REPOROOT=`create_test_dir` +cd $REPOROOT +$fdroid init +$fdroid update --create-metadata --verbose +$fdroid readmeta +$fdroid server update --local-copy-dir=/tmp/fdroid +$fdroid deploy --local-copy-dir=/tmp/fdroid --verbose + +# now test the errors work +set +e +$fdroid server update --local-copy-dir=thisisnotanabsolutepath +if [ $? -eq 0 ]; then + echo "This should have failed because thisisnotanabsolutepath is not an absolute path!" + exit 1 +else + echo "testing absolute path checker passed" +fi +$fdroid server update --local-copy-dir=/tmp/IReallyDoubtThisPathExistsasdfasdf +if [ $? -eq 0 ]; then + echo "This should have failed because the path does not end with 'fdroid'!" + exit 1 +else + echo "testing dirname exists checker passed" +fi +$fdroid server update --local-copy-dir=/tmp/IReallyDoubtThisPathExistsasdfasdf/fdroid +if [ $? -eq 0 ]; then + echo "This should have failed because the dirname path does not exist!" + exit 1 +else + echo "testing dirname exists checker passed" +fi +set -e + + +#------------------------------------------------------------------------------# +echo_header "setup a new repo from scratch using ANDROID_HOME and do a local sync" + +REPOROOT=`create_test_dir` +cd $REPOROOT +fdroid_init_with_prebuilt_keystore +copy_apks_into_repo $REPOROOT +$fdroid update --create-metadata --verbose +$fdroid readmeta +grep -F '> config.py +cp $WORKSPACE/tests/repo/obb.mainpatch.current_1619.apk $REPOROOT/repo/ +cp $WORKSPACE/tests/repo/obb.mainpatch.current_1619_another-release-key.apk $REPOROOT/repo/ +$fdroid update --pretty +grep -F 'obb.mainpatch.current_1619.apk' repo/index.xml repo/index-v1.json +grep -F 'obb.mainpatch.current_1619_another-release-key.apk' repo/index-v1.json +! grep -F 'obb.mainpatch.current_1619_another-release-key.apk' repo/index.xml +# die if there are exact duplicates +cp $WORKSPACE/tests/repo/obb.mainpatch.current_1619.apk $REPOROOT/repo/duplicate.apk +! $fdroid update + + +#------------------------------------------------------------------------------# +echo_header "setup new repo from scratch using ANDROID_HOME, putting APKs in repo first" + +REPOROOT=`create_test_dir` +cd $REPOROOT +mkdir repo +copy_apks_into_repo $REPOROOT +fdroid_init_with_prebuilt_keystore +$fdroid update --create-metadata --verbose +$fdroid readmeta +grep -F '> config.py +echo 'repo_keyalias = "foo"' >> config.py +echo 'keystorepass = "foo"' >> config.py +echo 'keypass = "foo"' >> config.py +set +e +$fdroid update --create-metadata --verbose +if [ $? -eq 0 ]; then + echo "This should have failed because this repo has a bad/fake keystore!" + exit 1 +else + echo '`fdroid update` prompted to add keystore' +fi +set -e + + +#------------------------------------------------------------------------------# +echo_header "copy tests/repo, update with binary transparency log" + +REPOROOT=`create_test_dir` +GIT_REMOTE=`create_test_dir` +GNUPGHOME=$REPOROOT/gnupghome +cd $REPOROOT +fdroid_init_with_prebuilt_keystore +cp -a $WORKSPACE/tests/metadata $WORKSPACE/tests/repo $WORKSPACE/tests/stats $REPOROOT/ +echo "binary_transparency_remote = '$GIT_REMOTE'" >> config.py +echo "accepted_formats = ['json', 'txt', 'yml']" >> config.py +$fdroid update --verbose +if have_git_2_3; then + $fdroid server update --verbose + test -e repo/index.xml + test -e repo/index.jar + test -e repo/index-v1.jar + grep -F '> config.py + echo "servergitmirrors = '$SERVER_GIT_MIRROR'" >> config.py + echo "local_copy_dir = '$LOCAL_COPY_DIR'" >> config.py + echo "accepted_formats = ['json', 'txt', 'yml']" >> config.py + $fdroid update --pretty + grep -F '' repo/index.xml + grep -F '/fdroid/archive' archive/index.xml + test `grep '' repo/index.xml | wc -l` -eq 2 + test `grep '' archive/index.xml | wc -l` -eq 2 + cd binary_transparency + [ `git rev-list --count HEAD` == "1" ] + cd .. + $fdroid server update --verbose + grep -F '> config.py + echo "sync_from_local_copy_dir = True" >> config.py + echo "serverwebroots = '$SERVERWEBROOT'" >> config.py + echo "servergitmirrors = '$SERVER_GIT_MIRROR'" >> config.py + echo "local_copy_dir = '$LOCAL_COPY_DIR'" >> config.py + echo "binary_transparency_remote = '$BINARY_TRANSPARENCY_REMOTE'" >> config.py + $fdroid server update --verbose + cd $BINARY_TRANSPARENCY_REMOTE + [ `git rev-list --count HEAD` == "1" ] + cd $SERVER_GIT_MIRROR + [ `git rev-list --count HEAD` == "1" ] +fi + + +#------------------------------------------------------------------------------# +echo_header 'test extracting and publishing with developer signature' + +REPOROOT=`create_test_dir` +cd $REPOROOT +fdroid_init_with_prebuilt_keystore +echo "accepted_formats = ['txt']" >> config.py +echo 'keydname = "CN=Birdman, OU=Cell, O=Alcatraz, L=Alcatraz, S=California, C=US"' >> config.py +test -d metadata || mkdir metadata +cp $WORKSPACE/tests/metadata/com.politedroid.txt metadata/ +test -d repo || mkdir repo +test -d unsigned || mkdir unsigned +cp $WORKSPACE/tests/repo/com.politedroid_6.apk unsigned/ +$fdroid signatures unsigned/com.politedroid_6.apk +test -d metadata/com.politedroid/signatures/6 +test -f metadata/com.politedroid/signatures/6/MANIFEST.MF +test -f metadata/com.politedroid/signatures/6/RELEASE.RSA +test -f metadata/com.politedroid/signatures/6/RELEASE.SF +! test -f repo/com.politedroid_6.apk +$fdroid publish +test -f repo/com.politedroid_6.apk +if which jarsigner; then + jarsigner -verify repo/com.politedroid_6.apk +fi +if which apksigner; then + apksigner verify repo/com.politedroid_6.apk +fi + + +#------------------------------------------------------------------------------# + +# remove this to prevent git conflicts and complaining +rm -rf $WORKSPACE/fdroidserver.egg-info/ echo SUCCESS diff --git a/tests/scanner.TestCase b/tests/scanner.TestCase new file mode 100755 index 00000000..6e41b905 --- /dev/null +++ b/tests/scanner.TestCase @@ -0,0 +1,51 @@ +#!/usr/bin/env python3 + +import glob +import inspect +import logging +import optparse +import os +import sys +import unittest + +localmodule = os.path.realpath( + os.path.join(os.path.dirname(inspect.getfile(inspect.currentframe())), '..')) +print('localmodule: ' + localmodule) +if localmodule not in sys.path: + sys.path.insert(0, localmodule) + +import fdroidserver.common +import fdroidserver.metadata +import fdroidserver.scanner + + +class ScannerTest(unittest.TestCase): + + def setUp(self): + logging.basicConfig(level=logging.INFO) + self.basedir = os.path.join(localmodule, 'tests') + + def test_scan_source_files(self): + source_files = os.path.join(self.basedir, 'source-files') + projects = { + 'Zillode': 1, + 'firebase-suspect': 1 + } + for d in glob.glob(os.path.join(source_files, '*')): + build = fdroidserver.metadata.Build() + fatal_problems = fdroidserver.scanner.scan_source(d, build) + self.assertEqual(projects.get(os.path.basename(d), 0), + fatal_problems) + + +if __name__ == "__main__": + os.chdir(os.path.dirname(__file__)) + + parser = optparse.OptionParser() + parser.add_option("-v", "--verbose", action="store_true", default=False, + help="Spew out even more information than normal") + (fdroidserver.common.options, args) = parser.parse_args(['--verbose']) + + newSuite = unittest.TestSuite() + newSuite.addTest(unittest.makeSuite(ScannerTest)) + unittest.main(failfast=False) diff --git a/tests/server.TestCase b/tests/server.TestCase new file mode 100755 index 00000000..5d058153 --- /dev/null +++ b/tests/server.TestCase @@ -0,0 +1,156 @@ +#!/usr/bin/env python3 + +import inspect +import logging +import optparse +import os +import sys +import tempfile +import unittest +from unittest import mock + +localmodule = os.path.realpath( + os.path.join(os.path.dirname(inspect.getfile(inspect.currentframe())), '..')) +if localmodule not in sys.path: + sys.path.insert(0, localmodule) + +import fdroidserver.common +import fdroidserver.server +from testcommon import TmpCwd + + +class ServerTest(unittest.TestCase): + '''fdroidserver/server.py''' + + def setUp(self): + logging.basicConfig(level=logging.DEBUG) + self.basedir = os.path.join(localmodule, 'tests') + + fdroidserver.server.options = mock.Mock() + fdroidserver.server.config = {} + + def test_update_serverwebroot_make_cur_version_link(self): + + # setup parameters for this test run + fdroidserver.server.options.no_chcksum = True + fdroidserver.server.options.identity_file = None + fdroidserver.server.options.verbose = False + fdroidserver.server.options.quiet = True + fdroidserver.server.options.identity_file = None + fdroidserver.server.config['make_current_version_link'] = True + serverwebroot = "example.com:/var/www/fdroid" + repo_section = 'repo' + + # setup function for asserting subprocess.call invocations + call_iteration = 0 + + def update_server_webroot_call(cmd): + nonlocal call_iteration + if call_iteration == 0: + self.assertListEqual(cmd, ['rsync', + '--archive', + '--delete-after', + '--safe-links', + '--quiet', + '--exclude', 'repo/index.xml', + '--exclude', 'repo/index.jar', + '--exclude', 'repo/index-v1.jar', + 'repo', + 'example.com:/var/www/fdroid']) + elif call_iteration == 1: + self.assertListEqual(cmd, ['rsync', + '--archive', + '--delete-after', + '--safe-links', + '--quiet', + 'repo', + serverwebroot]) + elif call_iteration == 2: + self.assertListEqual(cmd, ['rsync', + '--archive', + '--delete-after', + '--safe-links', + '--quiet', + 'Sym.apk', + 'Sym.apk.asc', + 'Sym.apk.sig', + 'example.com:/var/www/fdroid']) + else: + self.fail('unexpected subprocess.call invocation') + call_iteration += 1 + return 0 + + with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + os.mkdir('repo') + os.symlink('repo/com.example.sym.apk', 'Sym.apk') + os.symlink('repo/com.example.sym.apk.asc', 'Sym.apk.asc') + os.symlink('repo/com.example.sym.apk.sig', 'Sym.apk.sig') + with mock.patch('subprocess.call', side_effect=update_server_webroot_call): + fdroidserver.server.update_serverwebroot(serverwebroot, + repo_section) + self.assertEqual(call_iteration, 3, 'expected 3 invocations of subprocess.call') + + def test_update_serverwebroot_with_id_file(self): + + # setup parameters for this test run + fdroidserver.server.options.no_chcksum = False + fdroidserver.server.options.verbose = True + fdroidserver.server.options.quiet = False + fdroidserver.server.options.identity_file = None + fdroidserver.server.config['identity_file'] = './id_rsa' + fdroidserver.server.config['make_current_version_link'] = False + serverwebroot = "example.com:/var/www/fdroid" + repo_section = 'archive' + + # setup function for asserting subprocess.call invocations + call_iteration = 0 + + def update_server_webroot_call(cmd): + nonlocal call_iteration + if call_iteration == 0: + self.assertListEqual(cmd, ['rsync', + '--archive', + '--delete-after', + '--safe-links', + '--verbose', + '-e', + 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + + fdroidserver.server.config['identity_file'], + '--exclude', 'archive/index.xml', + '--exclude', 'archive/index.jar', + '--exclude', 'archive/index-v1.jar', + 'archive', + serverwebroot]) + elif call_iteration == 1: + self.assertListEqual(cmd, ['rsync', + '--archive', + '--delete-after', + '--safe-links', + '--verbose', + '-e', + 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + + fdroidserver.server.config['identity_file'], + 'archive', + serverwebroot]) + else: + self.fail('unexpected subprocess.call invocation') + call_iteration += 1 + return 0 + + with mock.patch('subprocess.call', side_effect=update_server_webroot_call): + fdroidserver.server.update_serverwebroot(serverwebroot, + repo_section) + self.assertEqual(call_iteration, 2, 'expected 2 invocations of subprocess.call') + + +if __name__ == "__main__": + os.chdir(os.path.dirname(__file__)) + + parser = optparse.OptionParser() + parser.add_option("-v", "--verbose", action="store_true", default=False, + help="Spew out even more information than normal") + (fdroidserver.common.options, args) = parser.parse_args(['--verbose']) + + newSuite = unittest.TestSuite() + newSuite.addTest(unittest.makeSuite(ServerTest)) + unittest.main(failfast=False) diff --git a/tests/shared_test_code.py b/tests/shared_test_code.py deleted file mode 100644 index 3e34900b..00000000 --- a/tests/shared_test_code.py +++ /dev/null @@ -1,88 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright (C) 2017, Michael Poehn -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -import os -import sys -import tempfile -import unittest -import unittest.mock -from pathlib import Path - -GP_FINGERPRINT = 'B7C2EEFD8DAC7806AF67DFCD92EB18126BC08312A7F2D6F3862E46013C7A6135' - - -class VerboseFalseOptions: - verbose = False - - -class TmpCwd: - """Context-manager for temporarily changing the current working directory.""" - - def __init__(self, new_cwd): - self.new_cwd = new_cwd - - def __enter__(self): - self.orig_cwd = os.getcwd() - os.chdir(self.new_cwd) - - def __exit__(self, a, b, c): - os.chdir(self.orig_cwd) - - -class TmpPyPath: - """Context-manager for temporarily adding a directory to Python path.""" - - def __init__(self, additional_path): - self.additional_path = additional_path - - def __enter__(self): - sys.path.append(self.additional_path) - - def __exit__(self, a, b, c): - sys.path.remove(self.additional_path) - - -def mock_open_to_str(mock): - """For accessing all data written into a unittest.mock.mock_open() instance as a string.""" - - return "".join( - [x.args[0] for x in mock.mock_calls if str(x).startswith("call().write(")] - ) - - -def mkdtemp(): - if sys.version_info < (3, 10): # ignore_cleanup_errors was added in 3.10 - return tempfile.TemporaryDirectory() - else: - return tempfile.TemporaryDirectory(ignore_cleanup_errors=True) - - -def mkdir_testfiles(localmodule, test): - """Keep the test files in a labeled test dir for easy reference""" - testroot = Path(localmodule) / '.testfiles' - testroot.mkdir(exist_ok=True) - testdir = testroot / unittest.TestCase.id(test) - testdir.mkdir(exist_ok=True) - return Path(tempfile.mkdtemp(dir=testdir)) - - -def mock_urlopen(status=200, body=None): - resp = unittest.mock.MagicMock() - resp.getcode.return_value = status - resp.read.return_value = body - resp.__enter__.return_value = resp - return unittest.mock.Mock(return_value=resp) diff --git a/tests/signatures.TestCase b/tests/signatures.TestCase new file mode 100755 index 00000000..b6af0eb5 --- /dev/null +++ b/tests/signatures.TestCase @@ -0,0 +1,69 @@ +#!/usr/bin/env python3 + +import inspect +import optparse +import os +import sys +import unittest +import hashlib +import logging +from tempfile import TemporaryDirectory + +localmodule = os.path.realpath( + os.path.join(os.path.dirname(inspect.getfile(inspect.currentframe())), '..')) +print('localmodule: ' + localmodule) +if localmodule not in sys.path: + sys.path.insert(0, localmodule) + +from testcommon import TmpCwd +from fdroidserver import common, signatures + + +class SignaturesTest(unittest.TestCase): + + def setUp(self): + logging.basicConfig(level=logging.DEBUG) + common.config = None + config = common.read_config(common.options) + config['jarsigner'] = common.find_sdk_tools_cmd('jarsigner') + config['verbose'] = True + common.config = config + + def test_main(self): + + # option fixture class: + class OptionsFixture: + APK = [os.path.abspath(os.path.join('repo', 'com.politedroid_3.apk'))] + + with TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): + signatures.extract(OptionsFixture()) + + # check if extracted signatures are where they are supposed to be + # also verify weather if extracted file contian what they should + filesAndHashes = ( + (os.path.join('metadata', 'com.politedroid', 'signatures', '3', 'MANIFEST.MF'), + '7dcd83f0c41a75457fd2311bf3c4578f80d684362d74ba8dc52838d353f31cf2'), + (os.path.join('metadata', 'com.politedroid', 'signatures', '3', 'RELEASE.RSA'), + '883ef3d5a6e0bf69d2a58d9e255a7930f08a49abc38e216ed054943c99c8fdb4'), + (os.path.join('metadata', 'com.politedroid', 'signatures', '3', 'RELEASE.SF'), + '99fbb3211ef5d7c1253f3a7ad4836eadc9905103ce6a75916c40de2831958284'), + ) + for path, checksum in filesAndHashes: + self.assertTrue(os.path.isfile(path), + msg="check whether '{path}' was extracted " + "correctly.".format(path=path)) + with open(path, 'rb') as f: + self.assertEqual(hashlib.sha256(f.read()).hexdigest(), checksum) + + +if __name__ == "__main__": + os.chdir(os.path.dirname(__file__)) + + parser = optparse.OptionParser() + parser.add_option("-v", "--verbose", action="store_true", default=False, + help="Spew out even more information than normal") + (common.options, args) = parser.parse_args(['--verbose']) + + newSuite = unittest.TestSuite() + newSuite.addTest(unittest.makeSuite(SignaturesTest)) + unittest.main(failfast=False) diff --git a/tests/source-files/OtakuWorld/build.gradle b/tests/source-files/OtakuWorld/build.gradle deleted file mode 100644 index cfbc1450..00000000 --- a/tests/source-files/OtakuWorld/build.gradle +++ /dev/null @@ -1,166 +0,0 @@ -// Top-level build file where you can add configuration options common to all sub-projects/modules. -buildscript { - ext.kotlin_version = "1.6.21" - ext.latestAboutLibsRelease = "10.3.0" - - ext.coroutinesVersion = "1.6.2" - - ext.coroutinesCore = "org.jetbrains.kotlinx:kotlinx-coroutines-core:$coroutinesVersion" - ext.coroutinesAndroid = "org.jetbrains.kotlinx:kotlinx-coroutines-android:$coroutinesVersion" - ext.coroutinesRX = "org.jetbrains.kotlinx:kotlinx-coroutines-rx2:$coroutinesVersion" - - ext.gson = 'com.google.code.gson:gson:2.9.0' - - ext.glideVersion = "4.13.1" - ext.glide = "com.github.bumptech.glide:glide:$glideVersion" - ext.glideCompiler = "com.github.bumptech.glide:compiler:$glideVersion" - - ext.paging_version = "3.1.1" - - ext.rxkotlin = "io.reactivex.rxjava2:rxkotlin:2.4.0" - ext.rxandroid = "io.reactivex.rxjava2:rxandroid:2.1.1" - ext.rxbinding = 'com.jakewharton.rxbinding2:rxbinding:2.2.0' - ext.rxbindingKotlin = 'com.jakewharton.rxbinding2:rxbinding-kotlin:2.2.0' - - ext.androidCore = 'androidx.core:core-ktx:1.8.0' - ext.appCompat = 'androidx.appcompat:appcompat:1.4.2' - ext.material = 'com.google.android.material:material:1.6.0-beta01' - - ext.preference = "androidx.preference:preference-ktx:1.2.0" - - ext.recyclerview = 'androidx.recyclerview:recyclerview:1.2.1' - ext.constraintlayout = 'androidx.constraintlayout:constraintlayout:2.1.4' - ext.swiperefresh = 'androidx.swiperefreshlayout:swiperefreshlayout:1.1.0' - - ext.jsoup = 'org.jsoup:jsoup:1.15.1' - - ext.crashlytics = 'com.google.firebase:firebase-crashlytics:18.2.8' - ext.analytics = 'com.google.firebase:firebase-analytics:20.1.0' - ext.play_services = 'com.google.android.gms:play-services-auth:20.1.0' - - ext.exoplayer_version = "2.16.1" - ext.room_version = "2.4.2" - - ext.nav_version = "2.4.2" - - def koin_version = "3.0.2" - - // Koin main features for Android (Scope,ViewModel ...) - ext.koinAndroid = "io.insert-koin:koin-android:$koin_version" - // Koin Android - experimental builder extensions - ext.koinAndroidExt = "io.insert-koin:koin-android-ext:$koin_version" - - ext.lottieVersion = "4.2.2" - - ext.coil = "2.1.0" - - ext.jetpack = "1.2.0-rc01" - - ext.accompanist = "0.24.11-rc" - - ext.composeUi = "androidx.compose.ui:ui:$jetpack" - // Tooling support (Previews, etc.) - ext.composeUiTooling = "androidx.compose.ui:ui-tooling:$jetpack" - // Foundation (Border, Background, Box, Image, Scroll, shapes, animations, etc.) - ext.composeFoundation = "androidx.compose.foundation:foundation:$jetpack" - // Material Design - ext.composeMaterial = "androidx.compose.material:material:$jetpack" - ext.materialYou = "androidx.compose.material3:material3:1.0.0-alpha13" - // Material design icons - ext.composeMaterialIconsCore = "androidx.compose.material:material-icons-core:$jetpack" - ext.composeMaterialIconsExtended = "androidx.compose.material:material-icons-extended:$jetpack" - // Integration with activities - ext.composeActivity = 'androidx.activity:activity-compose:1.4.0' - // Integration with ViewModels - ext.composeLifecycle = 'androidx.lifecycle:lifecycle-viewmodel-compose:2.4.1' - // Integration with observables - ext.composeRuntimeLivedata = "androidx.compose.runtime:runtime-livedata:$jetpack" - ext.composeRuntimeRxjava2 = "androidx.compose.runtime:runtime-rxjava2:$jetpack" - ext.composeMaterialThemeAdapter = "com.google.android.material:compose-theme-adapter:1.1.11" - ext.composeMaterial3ThemeAdapter = "com.google.android.material:compose-theme-adapter-3:1.0.11" - ext.landscapistGlide = "com.github.skydoves:landscapist-glide:1.5.2" - ext.composeConstraintLayout = "androidx.constraintlayout:constraintlayout-compose:1.0.1" - ext.composeAnimation = "androidx.compose.animation:animation:$jetpack" - ext.materialPlaceholder = "com.google.accompanist:accompanist-placeholder-material:$accompanist" - ext.drawablePainter = "com.google.accompanist:accompanist-drawablepainter:$accompanist" - ext.permissions = "com.google.accompanist:accompanist-permissions:$accompanist" - ext.uiUtil = "androidx.compose.ui:ui-util:$jetpack" - ext.coilCompose = "io.coil-kt:coil-compose:$coil" - ext.navCompose = "androidx.navigation:navigation-compose:$nav_version" - ext.navMaterial = "com.google.accompanist:accompanist-navigation-material:$accompanist" - ext.navAnimation = "com.google.accompanist:accompanist-navigation-animation:$accompanist" - - ext.swipeRefresh = "com.google.accompanist:accompanist-swiperefresh:$accompanist" - ext.systemUiController = "com.google.accompanist:accompanist-systemuicontroller:$accompanist" - - ext.inset = "com.google.accompanist:accompanist-insets:$accompanist" - // If using insets-ui - ext.insetUi = "com.google.accompanist:accompanist-insets-ui:$accompanist" - - ext.datastore = "androidx.datastore:datastore:1.0.0" - ext.datastorePref = "androidx.datastore:datastore-preferences:1.0.0" - - ext { - jakepurple13Tools = [ - helpfultools: [ - Deps.gsonutils, - Deps.helpfulutils, - Deps.loggingutils, - Deps.dragswipe, - Deps.funutils, - Deps.rxutils - ] - ] - room = [ - room: [ - "androidx.room:room-runtime:$room_version", - "androidx.room:room-ktx:$room_version", - "androidx.room:room-rxjava2:$room_version" - ] - ] - - koin = [koin: [koinAndroid, koinAndroidExt]] - - compose = [ - compose: [ - composeUi, composeUiTooling, composeFoundation, composeMaterial, - composeMaterialIconsCore, composeMaterialIconsExtended, - composeActivity, composeLifecycle, - composeRuntimeLivedata, composeRuntimeRxjava2, - composeMaterialThemeAdapter, composeMaterial3ThemeAdapter, - landscapistGlide, coilCompose, - composeConstraintLayout, permissions, - materialPlaceholder, drawablePainter, uiUtil, - materialYou, - inset, insetUi, - navCompose, navMaterial, navAnimation, - swipeRefresh, systemUiController - ] - ] - - firebaseCrash = [ crash: [crashlytics, analytics] ] - - datastore = [ datastore: [datastore, datastorePref] ] - } - - repositories { - google() - gradlePluginPortal() - mavenCentral() - } - dependencies { - classpath 'com.android.tools.build:gradle:7.1.2' - classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" - classpath 'com.google.gms:google-services:4.3.10' - classpath 'com.google.firebase:firebase-crashlytics-gradle:2.8.1' - classpath "com.mikepenz.aboutlibraries.plugin:aboutlibraries-plugin:10.3.0" - classpath "org.jetbrains.kotlin:kotlin-serialization:$kotlin_version" - // NOTE: Do not place your application dependencies here; they belong - // in the individual module build.gradle files - classpath "androidx.navigation:navigation-safe-args-gradle-plugin:$nav_version" - } -} - -task clean(type: Delete) { - delete rootProject.buildDir -} diff --git a/tests/source-files/catalog.test/app/build.gradle b/tests/source-files/catalog.test/app/build.gradle deleted file mode 100644 index 72c9d184..00000000 --- a/tests/source-files/catalog.test/app/build.gradle +++ /dev/null @@ -1,2 +0,0 @@ -implementation libs.bundles.firebase -implementation libs.play.service.ads diff --git a/tests/source-files/catalog.test/build.gradle.kts b/tests/source-files/catalog.test/build.gradle.kts deleted file mode 100644 index 5572706f..00000000 --- a/tests/source-files/catalog.test/build.gradle.kts +++ /dev/null @@ -1,5 +0,0 @@ -plugins { - alias(libs.plugins.google.services) - alias(libs.plugins.firebase.crashlytics) - alias(projectLibs.plugins.firebase.crashlytics) -} diff --git a/tests/source-files/catalog.test/buildSrc/build.gradle.kts b/tests/source-files/catalog.test/buildSrc/build.gradle.kts deleted file mode 100644 index 40eeaa54..00000000 --- a/tests/source-files/catalog.test/buildSrc/build.gradle.kts +++ /dev/null @@ -1,9 +0,0 @@ -plugins { - alias(libs.plugins.google.services) - alias(libs.plugins.firebase.crashlytics) - alias(projectLibs.plugins.firebase.crashlytics) -} - -dependencies { - implementation(libs.plugins.androidApplication.asLibraryDependency) -} diff --git a/tests/source-files/catalog.test/buildSrc/settings.gradle.kts b/tests/source-files/catalog.test/buildSrc/settings.gradle.kts deleted file mode 100644 index 98644daf..00000000 --- a/tests/source-files/catalog.test/buildSrc/settings.gradle.kts +++ /dev/null @@ -1,22 +0,0 @@ -pluginManagement { - repositories { - gradlePluginPortal() - } -} - -dependencyResolutionManagement { - repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS) - repositories { - google() - mavenCentral() - } - - versionCatalogs { - create("libs") { - from(files("../gradle/libs.versions.toml")) - } - } -} - -rootProject.name = "buildSrc" -rootProject.buildFileName = "buildSrc.gradle.kts" diff --git a/tests/source-files/catalog.test/buildSrc2/build.gradle.kts b/tests/source-files/catalog.test/buildSrc2/build.gradle.kts deleted file mode 100644 index 5572706f..00000000 --- a/tests/source-files/catalog.test/buildSrc2/build.gradle.kts +++ /dev/null @@ -1,5 +0,0 @@ -plugins { - alias(libs.plugins.google.services) - alias(libs.plugins.firebase.crashlytics) - alias(projectLibs.plugins.firebase.crashlytics) -} diff --git a/tests/source-files/catalog.test/buildSrc2/settings.gradle.kts b/tests/source-files/catalog.test/buildSrc2/settings.gradle.kts deleted file mode 100644 index 98644daf..00000000 --- a/tests/source-files/catalog.test/buildSrc2/settings.gradle.kts +++ /dev/null @@ -1,22 +0,0 @@ -pluginManagement { - repositories { - gradlePluginPortal() - } -} - -dependencyResolutionManagement { - repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS) - repositories { - google() - mavenCentral() - } - - versionCatalogs { - create("libs") { - from(files("../gradle/libs.versions.toml")) - } - } -} - -rootProject.name = "buildSrc" -rootProject.buildFileName = "buildSrc.gradle.kts" diff --git a/tests/source-files/catalog.test/core/build.gradle b/tests/source-files/catalog.test/core/build.gradle deleted file mode 100644 index 72c9d184..00000000 --- a/tests/source-files/catalog.test/core/build.gradle +++ /dev/null @@ -1,2 +0,0 @@ -implementation libs.bundles.firebase -implementation libs.play.service.ads diff --git a/tests/source-files/catalog.test/gradle/libs.versions.toml b/tests/source-files/catalog.test/gradle/libs.versions.toml deleted file mode 100644 index 9fb3707f..00000000 --- a/tests/source-files/catalog.test/gradle/libs.versions.toml +++ /dev/null @@ -1,18 +0,0 @@ -[versions] -firebase = "1.1.1" -gms = "1.2.1" -androidGradlePlugin = "8.12.0" - -[libraries] -firebase-crash = { module = "com.google.firebase:firebase-crash", version.ref = "firebase" } -firebase_core = { module = "com.google.firebase:firebase-core", version = "2.2.2" } -"play.service.ads" = { module = "com.google.android.gms:play-services-ads", version.ref = "gms" } -jacoco = "org.jacoco:org.jacoco.core:0.8.7" - -[plugins] -google-services = { id = "com.google.gms.google-services", version.ref = "gms" } -firebase-crashlytics = { id = "com.google.firebase.crashlytics", version.ref = "firebase" } -androidApplication = { id = "com.android.application", version.ref = "androidGradlePlugin" } - -[bundles] -firebase = ["firebase-crash", "firebase_core"] diff --git a/tests/source-files/catalog.test/libs.versions.toml b/tests/source-files/catalog.test/libs.versions.toml deleted file mode 100644 index 666a0f7f..00000000 --- a/tests/source-files/catalog.test/libs.versions.toml +++ /dev/null @@ -1,15 +0,0 @@ -[versions] -firebase = "1.1.1" -gms = "1.2.1" - -[libraries] -firebase-crash = { module = "com.google.firebase:firebase-crash", version.ref = "firebase" } -firebase_core = { module = "com.google.firebase:firebase-core", version = "2.2.2" } -"play.service.ads" = { module = "com.google.android.gms:play-services-ads", version.ref = "gms"} - -[plugins] -google-services = { id = "com.google.gms.google-services", version.ref = "gms" } -firebase-crashlytics = { id = "com.google.firebase.crashlytics", version.ref = "firebase" } - -[bundles] -firebase = ["firebase-crash", "firebase_core"] diff --git a/tests/source-files/catalog.test/settings.gradle.kts b/tests/source-files/catalog.test/settings.gradle.kts deleted file mode 100644 index fd9ba80c..00000000 --- a/tests/source-files/catalog.test/settings.gradle.kts +++ /dev/null @@ -1,14 +0,0 @@ -dependencyResolutionManagement { - repositories { - mavenCentral() - } - defaultLibrariesExtensionName = "projectLibs" - versionCatalogs { - create("libs") { - from(files("./libs.versions.toml")) - } - create("anotherLibs") { - from(files("$rootDir/libs.versions.toml")) - } - } -} diff --git a/tests/source-files/cn.wildfirechat.chat/avenginekit/build.gradle b/tests/source-files/cn.wildfirechat.chat/avenginekit/build.gradle deleted file mode 100644 index 02955512..00000000 --- a/tests/source-files/cn.wildfirechat.chat/avenginekit/build.gradle +++ /dev/null @@ -1,2 +0,0 @@ -configurations.maybeCreate("default") -artifacts.add("default", file('avenginekit.aar')) \ No newline at end of file diff --git a/tests/source-files/cn.wildfirechat.chat/build.gradle b/tests/source-files/cn.wildfirechat.chat/build.gradle deleted file mode 100644 index acc41375..00000000 --- a/tests/source-files/cn.wildfirechat.chat/build.gradle +++ /dev/null @@ -1,41 +0,0 @@ -// Top-level build file where you can add configuration options common to all sub-projects/modules. - -buildscript { - - repositories { - google() - jcenter() - mavenCentral() - } - dependencies { - classpath 'com.android.tools.build:gradle:3.4.2' - classpath 'com.github.dcendents:android-maven-gradle-plugin:2.1' - - // NOTE: Do not place your application dependencies here; they belong - // in the individual module build.gradle files - } -} - -allprojects { - repositories { - google() - jcenter() - maven { - url "http://developer.huawei.com/repo/" - } - - maven { url 'https://jitpack.io' } - maven { url 'https://dl.bintray.com/jenly/maven' } - } - configurations { - all { - resolutionStrategy { - //force "android.arch.lifecycle:runtime:1.1.1" - } - } - } -} - -task clean(type: Delete) { - delete rootProject.buildDir -} diff --git a/tests/source-files/cn.wildfirechat.chat/chat/build.gradle b/tests/source-files/cn.wildfirechat.chat/chat/build.gradle deleted file mode 100644 index f2503356..00000000 --- a/tests/source-files/cn.wildfirechat.chat/chat/build.gradle +++ /dev/null @@ -1,115 +0,0 @@ -apply plugin: 'com.android.application' - -android { - signingConfigs { - wfc { - keyAlias 'wfc' - keyPassword 'wildfirechat' - storeFile file('../wfc.keystore') - storePassword 'wildfirechat' - } - } - compileSdkVersion 28 - aaptOptions.cruncherEnabled = false - aaptOptions.useNewCruncher = false - defaultConfig { - applicationId "cn.wildfirechat.chat" - minSdkVersion 16 - targetSdkVersion 28 //当targetversion大于23时,需要使用fileprovider - versionCode 23 - versionName "0.6.9" - multiDexEnabled true - javaCompileOptions { - annotationProcessorOptions { - includeCompileClasspath true - } - } - signingConfig signingConfigs.wfc - -// buildConfigField("String", "BuglyId", '"34490ba79f"') - - ndk { - abiFilters "armeabi-v7a", 'x86', 'x86_64' // ,'armeabi', 'arm64-v8a', 'x86', 'x86_64' - } - } - buildTypes { - release { - minifyEnabled true - proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' - signingConfig signingConfigs.wfc - } - debug { - minifyEnabled false - proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' - signingConfig signingConfigs.wfc - } - } - compileOptions { - sourceCompatibility JavaVersion.VERSION_1_8 - targetCompatibility JavaVersion.VERSION_1_8 - } - lintOptions { - abortOnError false - } - sourceSets { - main { - // wfc kit start - jniLibs.srcDirs += ['libs', 'kit/libs'] - res.srcDirs += ['kit/src/main/res', 'kit/src/main/res-av'] - assets.srcDirs += ['kit/src/main/assets'] - java.srcDirs += ['kit/src/main/java'] - // wfc kit end - } - } - productFlavors { - } - - compileOptions { - sourceCompatibility 1.8 - targetCompatibility 1.8 - } -} - -dependencies { - implementation fileTree(include: ['*.jar'], dir: 'libs') - implementation files('libs/TencentLocationSDK_v4.9.7.12_r247861_161205_1104.jar') - implementation files('libs/TencentMapSDK_Raster_v_1.2.7_51ae0e7.jar') - implementation files('libs/TencentSearch1.1.3.jar') - - implementation 'com.tencent.bugly:crashreport:2.8.6.0' - implementation 'com.tencent.bugly:nativecrashreport:3.6.0.1' - implementation 'com.lqr.adapter:library:1.0.2' - implementation 'com.jaeger.statusbaruitl:library:1.3.5' - implementation project(':push') - - // wfc kit start - implementation fileTree(include: ['*.jar'], dir: 'kit/libs') - implementation 'androidx.appcompat:appcompat:1.1.0-beta01' - implementation 'com.google.android.material:material:1.1.0-alpha10' - implementation 'cjt.library.wheel:camera:1.1.9' - implementation 'com.kyleduo.switchbutton:library:1.4.4' - implementation 'com.squareup.okhttp3:okhttp:3.11.0' - implementation 'com.squareup.okio:okio:1.14.0' - implementation 'com.jakewharton:butterknife:10.2.0' - annotationProcessor 'com.jakewharton:butterknife-compiler:10.2.0' - implementation 'com.github.bumptech.glide:glide:4.8.0' - annotationProcessor 'com.github.bumptech.glide:compiler:4.8.0' - implementation 'com.github.chrisbanes:PhotoView:2.3.0' - implementation 'org.webrtc:google-webrtc:1.0.21929' - implementation 'com.afollestad.material-dialogs:core:0.9.6.0' - implementation 'q.rorbin:badgeview:1.1.3' - implementation 'com.google.code.gson:gson:2.8.5' - - // ViewModel and LiveData - def lifecycle_version = '2.2.0-alpha05' - implementation "androidx.lifecycle:lifecycle-extensions:$lifecycle_version" - - implementation project(':client') - implementation project(':avenginekit') - implementation project(':emojilibrary') - implementation project(':imagepicker') - - implementation 'com.king.zxing:zxing-lite:1.1.1' - implementation 'androidx.swiperefreshlayout:swiperefreshlayout:1.0.0' - // kit wfc end -} diff --git a/tests/source-files/cn.wildfirechat.chat/client/build.gradle b/tests/source-files/cn.wildfirechat.chat/client/build.gradle deleted file mode 100644 index ce41d062..00000000 --- a/tests/source-files/cn.wildfirechat.chat/client/build.gradle +++ /dev/null @@ -1,57 +0,0 @@ -apply plugin: 'com.android.library' -apply plugin: 'com.github.dcendents.android-maven' - -group = 'com.github.wildfirechat' - -android { - compileSdkVersion 28 - - - defaultConfig { - minSdkVersion 16 - targetSdkVersion 28 - versionCode 1 - versionName "1.0" - - // testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" - ndk { - // TODO: changes this for your application if needed - moduleName = "mmnet" - //abiFilter "armeabi" //去掉armeabi架构,armeabi-v7a可以兼容armeabi架构。 - abiFilter "armeabi-v7a" - abiFilter "arm64-v8a" - abiFilter "x86" - abiFilter "x86_64" - } - } - - - buildTypes { - release { - minifyEnabled false - proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' - } - } - - - sourceSets { - main { - jniLibs.srcDirs = ['libs'] - } - } - - lintOptions { - abortOnError false - } - compileOptions { - targetCompatibility 1.8 - sourceCompatibility 1.8 - } -} - - -dependencies { - api project(':mars-core-release') - def lifecycle_version = '2.0.0-beta01' - implementation "androidx.lifecycle:lifecycle-extensions:$lifecycle_version" -} diff --git a/tests/source-files/cn.wildfirechat.chat/client/src/main/AndroidManifest.xml b/tests/source-files/cn.wildfirechat.chat/client/src/main/AndroidManifest.xml deleted file mode 100644 index 0c056938..00000000 --- a/tests/source-files/cn.wildfirechat.chat/client/src/main/AndroidManifest.xml +++ /dev/null @@ -1,26 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/source-files/cn.wildfirechat.chat/emojilibrary/build.gradle b/tests/source-files/cn.wildfirechat.chat/emojilibrary/build.gradle deleted file mode 100755 index 50ea5f5a..00000000 --- a/tests/source-files/cn.wildfirechat.chat/emojilibrary/build.gradle +++ /dev/null @@ -1,34 +0,0 @@ -apply plugin: 'com.android.library' - -android { - compileSdkVersion 28 - - - defaultConfig { - minSdkVersion 16 - targetSdkVersion 28 - versionCode 1 - versionName "1.0" - - testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" - - } - buildTypes { - release { - minifyEnabled false - proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' - } - } - - android { - lintOptions { - abortOnError false - } - } - -} - -dependencies { - implementation fileTree(include: ['*.jar'], dir: 'libs') - implementation 'androidx.appcompat:appcompat:1.0.0-beta01' -} \ No newline at end of file diff --git a/tests/source-files/cn.wildfirechat.chat/gradle/build_libraries.gradle b/tests/source-files/cn.wildfirechat.chat/gradle/build_libraries.gradle deleted file mode 100644 index 42020666..00000000 --- a/tests/source-files/cn.wildfirechat.chat/gradle/build_libraries.gradle +++ /dev/null @@ -1,42 +0,0 @@ -def checkExecResult(execResult) { - if (execResult) { - if (execResult.getExitValue() != 0) { - throw new GradleException('Non-zero exit value: ' + execResult.getExitValue()) - } - - } else { - throw new GradleException('Returned a null execResult object') - } -} - -task buildLibrariesForAndroid(type: Exec) { - workingDir '../' - - def sdkDir = System.env.ANDROID_HOME - def ndkDir = System.env.ANDROID_NDK_HOME - - if (rootProject.file("local.properties").exists()) { - Properties properties = new Properties() - properties.load(project.rootProject.file('local.properties').newDataInputStream()) - sdkDir = properties.getProperty('sdk.dir') - ndkDir = properties.getProperty('ndk.dir') - } - - def path = System.env.PATH - - def envMap = [ - 'ANDROID_HOME' : sdkDir, - 'ANDROID_NDK_HOME': ndkDir, - '_ARCH_' : 'armeabi', - 'PATH' : ndkDir, - ] - environment envMap - - print envMap - - commandLine 'python', 'build_android.py', '2', 'armeabi' - - doLast { - checkExecResult(execResult) - } -} diff --git a/tests/source-files/cn.wildfirechat.chat/imagepicker/build.gradle b/tests/source-files/cn.wildfirechat.chat/imagepicker/build.gradle deleted file mode 100755 index 66b153a1..00000000 --- a/tests/source-files/cn.wildfirechat.chat/imagepicker/build.gradle +++ /dev/null @@ -1,30 +0,0 @@ -apply plugin: 'com.android.library' - -android { - compileSdkVersion 28 - - defaultConfig { - minSdkVersion 16 - targetSdkVersion 28 - versionCode 1 - versionName "1.0" - - testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" - - } - buildTypes { - release { - minifyEnabled false - proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' - } - } - lintOptions { - abortOnError false - } -} - -dependencies { - implementation 'androidx.appcompat:appcompat:1.0.0-beta01' - implementation 'com.github.chrisbanes.photoview:library:1.2.4' - implementation 'com.github.bumptech.glide:glide:4.8.0' -} diff --git a/tests/source-files/cn.wildfirechat.chat/mars-core-release/build.gradle b/tests/source-files/cn.wildfirechat.chat/mars-core-release/build.gradle deleted file mode 100644 index 65e8c7fe..00000000 --- a/tests/source-files/cn.wildfirechat.chat/mars-core-release/build.gradle +++ /dev/null @@ -1,2 +0,0 @@ -configurations.maybeCreate("default") -artifacts.add("default", file('mars-core-release.aar')) \ No newline at end of file diff --git a/tests/source-files/cn.wildfirechat.chat/push/build.gradle b/tests/source-files/cn.wildfirechat.chat/push/build.gradle deleted file mode 100644 index 26f5cbe2..00000000 --- a/tests/source-files/cn.wildfirechat.chat/push/build.gradle +++ /dev/null @@ -1,55 +0,0 @@ -apply plugin: 'com.android.library' - -android { - compileSdkVersion 28 - - - defaultConfig { - minSdkVersion 16 - targetSdkVersion 28 - versionCode 1 - versionName "1.0" - - testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" - - manifestPlaceholders = [ - - MI_APP_ID : "2882303761517722456", - MI_APP_KEY : "5731772292456", - - HMS_APP_ID : "100221325", - - MEIZU_APP_ID : "113616", - MEIZU_APP_KEY: "fcd886f51c144b45b87a67a28e2934d1", - - VIVO_APP_ID : "12918", - VIVO_APP_KEY : "c42feb05-de6c-427d-af55-4f902d9e0a75" - ] - } - - buildTypes { - release { - minifyEnabled false - proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' - } - } - compileOptions { - sourceCompatibility 1.8 - targetCompatibility 1.8 - } - -} - -dependencies { - implementation fileTree(dir: 'libs', include: ['*.jar']) - - implementation 'com.huawei.android.hms:push:2.5.3.305' - implementation 'com.huawei.android.hms:base:2.5.3.305' - - implementation 'androidx.appcompat:appcompat:1.0.0-beta01' - implementation project(':client') - implementation 'com.meizu.flyme.internet:push-internal:3.4.2@aar' - - def lifecycle_version = '2.2.0-alpha05' - implementation "androidx.lifecycle:lifecycle-extensions:$lifecycle_version" -} diff --git a/tests/source-files/cn.wildfirechat.chat/settings.gradle b/tests/source-files/cn.wildfirechat.chat/settings.gradle deleted file mode 100644 index e98e916c..00000000 --- a/tests/source-files/cn.wildfirechat.chat/settings.gradle +++ /dev/null @@ -1,7 +0,0 @@ -include ':client', - ':push', - ':chat', - ':mars-core-release', - ':emojilibrary', - ':imagepicker', - ':avenginekit' diff --git a/tests/source-files/com.anpmech.launcher/app/build.gradle b/tests/source-files/com.anpmech.launcher/app/build.gradle deleted file mode 100644 index 97d2e4fc..00000000 --- a/tests/source-files/com.anpmech.launcher/app/build.gradle +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright 2015-2017 Hayai Software - * Copyright 2018 The KeikaiLauncher Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the - * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND - * either express or implied. See the License for the specific language governing permissions and - * limitations under the License. - */ -plugins { - id 'com.android.application' - id 'pl.allegro.tech.build.axion-release' version '1.8.1' -} - -scmVersion { - tag { - prefix = '' - } -} - -/** - * Takes version {@code major.minor.patch[-suffix]} and returns numeric versionCode based on it - * Example: {@code 1.2.3-SNAPSHOT} will return {@code 1002003} - */ -static int versionCode(String versionName) { - def matcher = (versionName =~ /(\d+)\.(\d+)\.(\d+).*/) - return matcher.matches() ? - matcher.collect { version, major, minor, patch -> - major.toInteger() * 10000 + minor.toInteger() * 100 + patch.toInteger() - }.head() : - -1 -} - -def androidVersion = [ - name: scmVersion.version, - code: versionCode(scmVersion.version), -] - -android { - compileSdkVersion 28 - defaultConfig { - applicationId 'com.anpmech.launcher' - minSdkVersion 15 - targetSdkVersion 28 - versionName androidVersion.name - versionCode androidVersion.code - } - lintOptions { - abortOnError false - } - buildTypes { - all { - buildConfigField("String", "GITHUB_USER", "\"KeikaiLauncher\"") - buildConfigField("String", "GITHUB_PROJECT", "\"KeikaiLauncher\"") - } - release { - minifyEnabled true - shrinkResources true - proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.txt' - } - debug { - versionNameSuffix "-debug" - } - } - dependencies { - implementation 'com.android.support:support-annotations:28.0.0' - } -} - -dependencies { -} diff --git a/tests/source-files/com.anpmech.launcher/app/src/main/AndroidManifest.xml b/tests/source-files/com.anpmech.launcher/app/src/main/AndroidManifest.xml deleted file mode 100644 index 77c4e22f..00000000 --- a/tests/source-files/com.anpmech.launcher/app/src/main/AndroidManifest.xml +++ /dev/null @@ -1,66 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/source-files/com.anpmech.launcher/build.gradle b/tests/source-files/com.anpmech.launcher/build.gradle deleted file mode 100644 index a92bf663..00000000 --- a/tests/source-files/com.anpmech.launcher/build.gradle +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2015-2017 Hayai Software - * Copyright 2018 The KeikaiLauncher Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the - * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND - * either express or implied. See the License for the specific language governing permissions and - * limitations under the License. - */ - -// Top-level build file where you can add configuration options common to all sub-projects/modules. -buildscript { - repositories { - jcenter() - google() - } - dependencies { - classpath 'com.android.tools.build:gradle:3.2.1' - } -} - -allprojects { - repositories { - jcenter() - google() - } -} - -buildscript { - repositories { - mavenCentral() - } - dependencies { - classpath 'org.owasp:dependency-check-gradle:5.2.4' - } -} -apply plugin: 'org.owasp.dependencycheck' -dependencyCheck { - format='JSON' -} diff --git a/tests/source-files/com.anpmech.launcher/settings.gradle b/tests/source-files/com.anpmech.launcher/settings.gradle deleted file mode 100644 index 4d775aa7..00000000 --- a/tests/source-files/com.anpmech.launcher/settings.gradle +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright 2015-2017 Hayai Software - * Copyright 2018 The KeikaiLauncher Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the - * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND - * either express or implied. See the License for the specific language governing permissions and - * limitations under the License. - */ - -include ':app' diff --git a/tests/source-files/com.github.jameshnsears.quoteunquote/build.gradle b/tests/source-files/com.github.jameshnsears.quoteunquote/build.gradle deleted file mode 100644 index bb55589e..00000000 --- a/tests/source-files/com.github.jameshnsears.quoteunquote/build.gradle +++ /dev/null @@ -1,232 +0,0 @@ -apply plugin: 'com.android.application' -apply plugin: 'com.google.gms.google-services' -apply plugin: 'kotlin-android' - -apply from: '../jacoco.gradle' -apply from: '../ktlint.gradle' -apply from: '../detekt.gradle' -apply from: '../checkstyle.gradle' -apply from: '../sonarcube.gradle' - -def localPropertiesFile = rootProject.file("local.properties") -def localProperties = new Properties() - -if (!localPropertiesFile.exists()) { - localProperties.setProperty("RELEASE_STORE_PASSWORD", "") - localProperties.setProperty("RELEASE_KEY_PASSWORD", "") - localProperties.setProperty("RELEASE_KEY_ALIAS", "") - localProperties.setProperty("RELEASE_STORE_FILE", "keystore.jks") - Writer writer = new FileWriter(localPropertiesFile, false) - localProperties.store(writer, "empty, as creating the file is done manually via gpg") - writer.close() - - file(project(':app').projectDir.path + "/keystore.jks").text = "" -} - -localProperties.load(new FileInputStream(localPropertiesFile)) - -android { - compileSdkVersion 30 - // compileSdkVersion "android-S" - - signingConfigs { - googleplay { - keyAlias localProperties['RELEASE_KEY_ALIAS'] - keyPassword localProperties['RELEASE_KEY_PASSWORD'] - storeFile file(localProperties['RELEASE_STORE_FILE']) - storePassword localProperties['RELEASE_STORE_PASSWORD'] - } - } - - defaultConfig { - minSdkVersion 24 - targetSdkVersion 30 - // minSdkVersion "S" - // targetSdkVersion "S" - - applicationId "com.github.jameshnsears.quoteunquote" - - versionCode 73 - versionName "2.5.2" - - testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" - testInstrumentationRunnerArguments clearPackageData: 'true' - - javaCompileOptions { - annotationProcessorOptions { - arguments += ["room.schemaLocation": - "$projectDir/schemas".toString()] - } - } - } - - packagingOptions { - exclude "**/module-info.class" - exclude 'LICENSE' - exclude 'README.md' - } - - lintOptions { - abortOnError true - warningsAsErrors false - checkAllWarnings = true - xmlReport false - htmlReport true - } - - buildTypes { - def gitHash = { -> - def stdout = new ByteArrayOutputStream() - exec { - commandLine 'git', 'rev-parse', '--short=8', 'HEAD' - standardOutput = stdout - } - return stdout.toString().trim() - } - - release { - minifyEnabled true - shrinkResources true - proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' - - buildConfigField("String", "GIT_HASH", "\"$gitHash\"") - buildConfigField("String", "DATABASE_QUOTATIONS", "\"quotations.db.prod\"") - } - debug { - testCoverageEnabled true - buildConfigField("String", "GIT_HASH", "\"$gitHash\"") - buildConfigField("String", "DATABASE_QUOTATIONS", "\"quotations.db.dev\"") - } - } - - compileOptions { - sourceCompatibility JavaVersion.VERSION_1_8 - targetCompatibility JavaVersion.VERSION_1_8 - } - - kotlinOptions { - jvmTarget = JavaVersion.VERSION_1_8.toString() - } - - flavorDimensions 'Version' - productFlavors { - 'googleplay' { - dimension 'Version' - versionNameSuffix "-googleplay" - signingConfig signingConfigs.googleplay - } - 'googleplayS' { - dimension 'Version' - versionNameSuffix "-googleplay-S" - signingConfig signingConfigs.googleplay - } - 'fdroid' { - dimension 'Version' - versionNameSuffix "-fdroid" - isDefault true - } - 'fdroidS' { - dimension 'Version' - versionNameSuffix "-fdroid-S" - } - } - - sourceSets { - androidTest { - assets.srcDirs += files("$projectDir/schemas".toString()) - } - fdroid { - assets.srcDirs = ['src/main/assets'] - java.srcDirs = ['src/main/java', 'src/fdroid/java'] - } - fdroidS { - assets.srcDirs = ['src/main/assets'] - java.srcDirs = ['src/main/java', 'src/fdroid/java'] - } - googleplay { - assets.srcDirs = ['src/main/assets'] - java.srcDirs = ['src/main/java'] - } - googleplayS { - assets.srcDirs = ['src/main/assets'] - java.srcDirs = ['src/main/java'] - } - } - - testOptions { - // will make tests run very slowly on the emulator/device + affects coverage # - // execution 'ANDROIDX_TEST_ORCHESTRATOR' - - animationsDisabled true - unitTests { - includeAndroidResources = true - returnDefaultValues = true - all { - maxHeapSize = "1024m" - jacoco { - includeNoLocationClasses = true - excludes = ['jdk.internal.*'] - } - } - } - } - - buildFeatures { - viewBinding = true - } -} - -dependencies { - androidTestImplementation "androidx.arch.core:core-testing:2.1.0" - androidTestImplementation 'androidx.room:room-testing:2.3.0' - androidTestImplementation 'androidx.test:core:1.4.0-beta01' - androidTestImplementation 'androidx.test.ext:junit:1.1.2' - androidTestImplementation 'androidx.test:rules:1.3.0' - androidTestImplementation 'androidx.test:runner:1.3.0' - androidTestImplementation 'io.mockk:mockk-android:1.11.0' - - annotationProcessor 'androidx.room:room-compiler:2.3.0' - - debugImplementation 'androidx.fragment:fragment-testing:1.3.4' - debugImplementation 'androidx.test:core:1.4.0-beta01' - debugImplementation 'com.squareup.leakcanary:leakcanary-android:2.7' - - implementation 'androidx.activity:activity:1.2.3' - implementation 'androidx.fragment:fragment:1.3.4' - implementation 'androidx.constraintlayout:constraintlayout:2.0.4' - implementation 'androidx.core:core-ktx:1.5.0' - fdroidSImplementation 'androidx.core:core-ktx:1.6.0-beta02' - googleplaySImplementation 'androidx.core:core-ktx:1.6.0-beta02' - implementation 'androidx.legacy:legacy-support-v4:1.0.0' - implementation 'androidx.lifecycle:lifecycle-common-java8:2.3.1' - implementation 'androidx.lifecycle:lifecycle-extensions:2.2.0' - implementation 'androidx.lifecycle:lifecycle-viewmodel-ktx:2.3.1' - implementation 'androidx.multidex:multidex:2.0.1' - implementation 'androidx.room:room-guava:2.3.0' - implementation 'androidx.room:room-runtime:2.3.0' - implementation 'androidx.room:room-rxjava2:2.3.0' - implementation 'com.google.android.material:material:1.3.0' - implementation 'com.jakewharton.rxbinding2:rxbinding:2.2.0' - implementation 'com.jakewharton.timber:timber:4.7.1' - implementation fileTree(include: ['*.jar'], dir: 'libs') - implementation 'io.reactivex.rxjava2:rxandroid:2.1.1' - implementation 'io.reactivex.rxjava2:rxjava:2.2.21' - implementation 'org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.5.10' - - implementation project(path: ':cloudLib') - implementation project(path: ':utilsLib') - - testImplementation 'androidx.arch.core:core-testing:2.1.0' - testImplementation 'androidx.room:room-testing:2.3.0' - testImplementation 'androidx.test:core-ktx:1.3.0' - testImplementation 'androidx.test.ext:junit:1.1.2' - testImplementation 'androidx.test:rules:1.3.0' - testImplementation 'com.google.guava:guava:30.1.1-jre' - testImplementation 'io.mockk:mockk:1.11.0' - testImplementation 'junit:junit:4.13.2' - testImplementation 'org.robolectric:robolectric:4.5.1' -} - -repositories { - mavenCentral() -} diff --git a/tests/source-files/com.github.shadowsocks/core/build.gradle.kts b/tests/source-files/com.github.shadowsocks/core/build.gradle.kts deleted file mode 100644 index b9708b02..00000000 --- a/tests/source-files/com.github.shadowsocks/core/build.gradle.kts +++ /dev/null @@ -1,94 +0,0 @@ -import com.android.build.gradle.internal.tasks.factory.dependsOn - -plugins { - id("com.android.library") - id("org.mozilla.rust-android-gradle.rust-android") - kotlin("android") - kotlin("kapt") - id("kotlin-parcelize") -} - -setupCore() - -android { - defaultConfig { - consumerProguardFiles("proguard-rules.pro") - - externalNativeBuild.ndkBuild { - abiFilters("armeabi-v7a", "arm64-v8a", "x86", "x86_64") - arguments("-j${Runtime.getRuntime().availableProcessors()}") - } - - kapt.arguments { - arg("room.incremental", true) - arg("room.schemaLocation", "$projectDir/schemas") - } - } - - externalNativeBuild.ndkBuild.path("src/main/jni/Android.mk") - - sourceSets.getByName("androidTest") { - assets.setSrcDirs(assets.srcDirs + files("$projectDir/schemas")) - } -} - -cargo { - module = "src/main/rust/shadowsocks-rust" - libname = "sslocal" - targets = listOf("arm", "arm64", "x86", "x86_64") - profile = findProperty("CARGO_PROFILE")?.toString() ?: currentFlavor - extraCargoBuildArguments = listOf("--bin", libname!!) - featureSpec.noDefaultBut(arrayOf( - "stream-cipher", - "aead-cipher-extra", - "logging", - "local-flow-stat", - "local-dns")) - exec = { spec, toolchain -> - spec.environment("RUST_ANDROID_GRADLE_LINKER_WRAPPER_PY", "$projectDir/$module/../linker-wrapper.py") - spec.environment("RUST_ANDROID_GRADLE_TARGET", "target/${toolchain.target}/$profile/lib$libname.so") - } -} - -tasks.whenTaskAdded { - when (name) { - "mergeDebugJniLibFolders", "mergeReleaseJniLibFolders" -> dependsOn("cargoBuild") - } -} - -tasks.register("cargoClean") { - executable("cargo") // cargo.cargoCommand - args("clean") - workingDir("$projectDir/${cargo.module}") -} -tasks.clean.dependsOn("cargoClean") - -dependencies { - val coroutinesVersion = "1.5.2" - val roomVersion = "2.3.0" - val workVersion = "2.7.0-beta01" - - api(project(":plugin")) - api("androidx.core:core-ktx:1.6.0") - // https://android-developers.googleblog.com/2019/07/android-q-beta-5-update.html - api("androidx.drawerlayout:drawerlayout:1.1.1") - api("androidx.fragment:fragment-ktx:1.3.6") - api("com.google.android.material:material:1.4.0") - - api("androidx.lifecycle:lifecycle-livedata-core-ktx:$lifecycleVersion") - api("androidx.preference:preference:1.1.1") - api("androidx.room:room-runtime:$roomVersion") - api("androidx.work:work-multiprocess:$workVersion") - api("androidx.work:work-runtime-ktx:$workVersion") - api("com.google.android.gms:play-services-oss-licenses:17.0.0") - api("com.google.code.gson:gson:2.8.8") - api("com.google.firebase:firebase-analytics-ktx:19.0.1") - api("com.google.firebase:firebase-crashlytics:18.2.1") - api("com.jakewharton.timber:timber:5.0.1") - api("dnsjava:dnsjava:3.4.1") - api("org.jetbrains.kotlinx:kotlinx-coroutines-android:$coroutinesVersion") - api("org.jetbrains.kotlinx:kotlinx-coroutines-play-services:$coroutinesVersion") - kapt("androidx.room:room-compiler:$roomVersion") - androidTestImplementation("androidx.room:room-testing:$roomVersion") - androidTestImplementation("androidx.test.ext:junit-ktx:1.1.3") -} diff --git a/tests/source-files/com.github.shadowsocks/mobile/build.gradle.kts b/tests/source-files/com.github.shadowsocks/mobile/build.gradle.kts deleted file mode 100644 index 874ea857..00000000 --- a/tests/source-files/com.github.shadowsocks/mobile/build.gradle.kts +++ /dev/null @@ -1,28 +0,0 @@ -plugins { - id("com.android.application") - id("com.google.android.gms.oss-licenses-plugin") - id("com.google.gms.google-services") - id("com.google.firebase.crashlytics") - kotlin("android") - id("kotlin-parcelize") -} - -setupApp() - -android.defaultConfig.applicationId = "com.github.shadowsocks" - -dependencies { - val cameraxVersion = "1.0.1" - - implementation("androidx.browser:browser:1.3.0") - implementation("androidx.camera:camera-camera2:$cameraxVersion") - implementation("androidx.camera:camera-lifecycle:$cameraxVersion") - implementation("androidx.camera:camera-view:1.0.0-alpha28") - implementation("androidx.constraintlayout:constraintlayout:2.1.0") - implementation("androidx.lifecycle:lifecycle-runtime-ktx:$lifecycleVersion") - implementation("com.google.mlkit:barcode-scanning:17.0.0") - implementation("com.google.zxing:core:3.4.1") - implementation("com.takisoft.preferencex:preferencex-simplemenu:1.1.0") - implementation("com.twofortyfouram:android-plugin-api-for-locale:1.0.4") - implementation("me.zhanghai.android.fastscroll:library:1.1.7") -} diff --git a/tests/source-files/com.infomaniak.mail/Core/gradle/core.versions.toml b/tests/source-files/com.infomaniak.mail/Core/gradle/core.versions.toml deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/source-files/com.infomaniak.mail/gradle/libs.versions.toml b/tests/source-files/com.infomaniak.mail/gradle/libs.versions.toml deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/source-files/com.infomaniak.mail/settings.gradle b/tests/source-files/com.infomaniak.mail/settings.gradle deleted file mode 100644 index bb9b1161..00000000 --- a/tests/source-files/com.infomaniak.mail/settings.gradle +++ /dev/null @@ -1,44 +0,0 @@ -pluginManagement { - repositories { - gradlePluginPortal() - google() - mavenCentral() - } -} - -dependencyResolutionManagement { - repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS) - repositories { - google() - mavenCentral() - maven { url 'https://jitpack.io' } - } - versionCatalogs { - create("core") { from(files("Core/gradle/core.versions.toml")) } - } -} - -rootProject.name = 'Infomaniak Mail' -include ':app', - ':Core:AppIntegrity', - ':Core:Auth', - ':Core:Avatar', - ':Core:Coil', - ':Core:Compose:Basics', - ':Core:Compose:Margin', - ':Core:Compose:MaterialThemeFromXml', - ':Core:CrossAppLogin', - ':Core:CrossAppLoginUI', - ':Core:FragmentNavigation', - ':Core:Legacy', - ':Core:Legacy:AppLock', - ':Core:Legacy:BugTracker', - ':Core:Legacy:Confetti', - ':Core:Legacy:Stores', - ':Core:Matomo', - ':Core:MyKSuite', - ':Core:Network', - ':Core:Network:Models', - ':Core:Sentry', - ':EmojiComponents', - ':HtmlCleaner' diff --git a/tests/source-files/com.integreight.onesheeld/build.gradle b/tests/source-files/com.integreight.onesheeld/build.gradle deleted file mode 100644 index 4aa9de97..00000000 --- a/tests/source-files/com.integreight.onesheeld/build.gradle +++ /dev/null @@ -1,16 +0,0 @@ -// Top-level build file where you can add configuration options common to all sub-projects/modules. -buildscript { - repositories { - jcenter() - } - dependencies { - classpath 'com.android.tools.build:gradle:2.3.2' - classpath 'com.google.gms:google-services:3.0.0' - } -} - -allprojects { - repositories { - jcenter() - } -} diff --git a/tests/source-files/com.integreight.onesheeld/gradle/wrapper/gradle-wrapper.properties b/tests/source-files/com.integreight.onesheeld/gradle/wrapper/gradle-wrapper.properties deleted file mode 100644 index c88a02a7..00000000 --- a/tests/source-files/com.integreight.onesheeld/gradle/wrapper/gradle-wrapper.properties +++ /dev/null @@ -1,6 +0,0 @@ -#Wed Mar 15 14:07:53 EET 2017 -distributionBase=GRADLE_USER_HOME -distributionPath=wrapper/dists -zipStoreBase=GRADLE_USER_HOME -zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-3.3-all.zip diff --git a/tests/source-files/com.integreight.onesheeld/localeapi/build.gradle b/tests/source-files/com.integreight.onesheeld/localeapi/build.gradle deleted file mode 100644 index 83e327f8..00000000 --- a/tests/source-files/com.integreight.onesheeld/localeapi/build.gradle +++ /dev/null @@ -1,22 +0,0 @@ -apply plugin: 'com.android.library' - -android { - compileSdkVersion 25 - buildToolsVersion "25.0.3" - - defaultConfig { - minSdkVersion 9 - targetSdkVersion 17 - } - - buildTypes { - release { - minifyEnabled false - proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt' - } - } -} - -dependencies { - compile 'com.android.support:support-v4:25.1.0' -} diff --git a/tests/source-files/com.integreight.onesheeld/localeapi/src/main/AndroidManifest.xml b/tests/source-files/com.integreight.onesheeld/localeapi/src/main/AndroidManifest.xml deleted file mode 100644 index 3178f4f4..00000000 --- a/tests/source-files/com.integreight.onesheeld/localeapi/src/main/AndroidManifest.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/tests/source-files/com.integreight.onesheeld/oneSheeld/build.gradle b/tests/source-files/com.integreight.onesheeld/oneSheeld/build.gradle deleted file mode 100644 index 60559586..00000000 --- a/tests/source-files/com.integreight.onesheeld/oneSheeld/build.gradle +++ /dev/null @@ -1,129 +0,0 @@ -buildscript { - repositories { - jcenter() - maven { url 'https://maven.fabric.io/public' } - } - - dependencies { - classpath 'io.fabric.tools:gradle:1.+' - } -} -apply plugin: 'com.android.application' -apply plugin: 'io.fabric' - -repositories { - jcenter() - maven { url 'https://maven.fabric.io/public' } - maven { url "https://jitpack.io" } -} - - -android { - compileSdkVersion 25 - buildToolsVersion "25.0.3" - - defaultConfig { - applicationId "com.integreight.onesheeld" - minSdkVersion 9 - targetSdkVersion 25 - versionCode 170521 - versionName "1.9.0" - archivesBaseName = "1Sheeld.v$versionName.$versionCode" - buildConfigField "long", "TIMESTAMP", System.currentTimeMillis() + "L" - } - - buildTypes.all { - ext.enableCrashlytics = isCrashlyticsPropertiesAvailable() - } - - buildTypes { - debug { - versionNameSuffix getWorkingBranchSuffix() - minifyEnabled true - proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' - } - release { - minifyEnabled true - debuggable false - proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' - } - } - - packagingOptions { - exclude 'META-INF/LICENSE' - exclude 'META-INF/NOTICE' - exclude 'META-INF/LICENSE.txt' - exclude 'META-INF/NOTICE.txt' - } - - lintOptions { - abortOnError false - } - - useLibrary 'org.apache.http.legacy' -} - -dependencies { - compile project(':localeapi') - compile project(':pullToRefreshlibrary') - compile project(':quickReturnHeader') - compile project(':pagerIndicator') - compile fileTree(dir: 'libs', include: ['*.jar']) - compile 'com.android.support:support-v4:25.3.1' - compile 'com.facebook.android:facebook-android-sdk:4.5.0' - compile 'com.google.android.gms:play-services-analytics:10.0.1' - compile 'com.google.android.gms:play-services-location:10.0.1' - compile 'com.google.android.gms:play-services-auth:10.0.1' - compile 'com.google.android.gms:play-services-vision:10.0.1' - compile 'com.loopj.android:android-async-http:1.4.9' - compile 'com.snappydb:snappydb-lib:0.5.0' - compile 'com.esotericsoftware.kryo:kryo:2.24.0' - compile 'com.github.hotchemi:android-rate:0.5.0' - compile('com.crashlytics.sdk.android:crashlytics:2.6.8@aar') { - transitive = true; - } - - compile('com.google.android.gms:play-services-identity:10.0.1') { - transitive = true; - } - compile('com.google.api-client:google-api-client-android:1.22.0') { - exclude group: 'org.apache.httpcomponents' - } - compile('com.google.apis:google-api-services-gmail:v1-rev48-1.22.0') { - exclude group: 'org.apache.httpcomponents' - } - compile 'org.twitter4j:twitter4j-core:4.0.4' - compile 'org.twitter4j:twitter4j-async:4.0.4' - compile 'org.twitter4j:twitter4j-stream:4.0.4' - compile 'cz.msebera.android:httpclient:4.4.1.1' - compile 'net.sf.supercsv:super-csv:2.4.0' - compile 'com.github.amlcurran.showcaseview:library:5.4.3' - compile 'com.github.emanzanoaxa:RippleEffect:52ea2a0ab6' - compile 'com.drewnoakes:metadata-extractor:2.8.1' - compile 'com.integreight.onesheeld:sdk:2.2.0' - compile 'com.google.firebase:firebase-core:10.0.1' - compile 'com.google.firebase:firebase-messaging:10.0.1' -} - -def isCrashlyticsPropertiesAvailable() { - return new File("./oneSheeld/fabric.properties").exists() -} - -def getWorkingBranchSuffix() { - def workingBranchSuffix = "" - try { - def workingBranch = "git --git-dir=${rootDir}/.git --work-tree=${rootDir} rev-parse --abbrev-ref HEAD".execute().text.trim() - workingBranchSuffix = (workingBranch != "") ? " - branch:" + workingBranch : "" - } - catch (all) { - } - return workingBranchSuffix -} - -def isGoogleServicesFileAvailable() { - return new File("./oneSheeld/google-services.json").exists() -} - -if (isGoogleServicesFileAvailable()) { - apply plugin: 'com.google.gms.google-services' -} \ No newline at end of file diff --git a/tests/source-files/com.integreight.onesheeld/oneSheeld/src/main/AndroidManifest.xml b/tests/source-files/com.integreight.onesheeld/oneSheeld/src/main/AndroidManifest.xml deleted file mode 100644 index 05350254..00000000 --- a/tests/source-files/com.integreight.onesheeld/oneSheeld/src/main/AndroidManifest.xml +++ /dev/null @@ -1,280 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/tests/source-files/com.integreight.onesheeld/pagerIndicator/build.gradle b/tests/source-files/com.integreight.onesheeld/pagerIndicator/build.gradle deleted file mode 100644 index 254932bd..00000000 --- a/tests/source-files/com.integreight.onesheeld/pagerIndicator/build.gradle +++ /dev/null @@ -1,22 +0,0 @@ -apply plugin: 'com.android.library' - -android { - compileSdkVersion 25 - buildToolsVersion "25.0.3" - - defaultConfig { - minSdkVersion 9 - targetSdkVersion 9 - } - - buildTypes { - release { - minifyEnabled false - proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt' - } - } -} - -dependencies { - compile 'com.android.support:support-v4:25.1.0' -} diff --git a/tests/source-files/com.integreight.onesheeld/pagerIndicator/src/main/AndroidManifest.xml b/tests/source-files/com.integreight.onesheeld/pagerIndicator/src/main/AndroidManifest.xml deleted file mode 100644 index 4314f1b3..00000000 --- a/tests/source-files/com.integreight.onesheeld/pagerIndicator/src/main/AndroidManifest.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - diff --git a/tests/source-files/com.integreight.onesheeld/pullToRefreshlibrary/build.gradle b/tests/source-files/com.integreight.onesheeld/pullToRefreshlibrary/build.gradle deleted file mode 100644 index 7db12afa..00000000 --- a/tests/source-files/com.integreight.onesheeld/pullToRefreshlibrary/build.gradle +++ /dev/null @@ -1,20 +0,0 @@ -apply plugin: 'com.android.library' -android { - compileSdkVersion 25 - buildToolsVersion "25.0.3" - - defaultConfig { - minSdkVersion 9 - targetSdkVersion 9 - } - - buildTypes { - release { - minifyEnabled false - proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt' - } - } -} - -dependencies { -} \ No newline at end of file diff --git a/tests/source-files/com.integreight.onesheeld/pullToRefreshlibrary/src/main/AndroidManifest.xml b/tests/source-files/com.integreight.onesheeld/pullToRefreshlibrary/src/main/AndroidManifest.xml deleted file mode 100644 index c3db5673..00000000 --- a/tests/source-files/com.integreight.onesheeld/pullToRefreshlibrary/src/main/AndroidManifest.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/tests/source-files/com.integreight.onesheeld/quickReturnHeader/build.gradle b/tests/source-files/com.integreight.onesheeld/quickReturnHeader/build.gradle deleted file mode 100644 index 83e327f8..00000000 --- a/tests/source-files/com.integreight.onesheeld/quickReturnHeader/build.gradle +++ /dev/null @@ -1,22 +0,0 @@ -apply plugin: 'com.android.library' - -android { - compileSdkVersion 25 - buildToolsVersion "25.0.3" - - defaultConfig { - minSdkVersion 9 - targetSdkVersion 17 - } - - buildTypes { - release { - minifyEnabled false - proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt' - } - } -} - -dependencies { - compile 'com.android.support:support-v4:25.1.0' -} diff --git a/tests/source-files/com.integreight.onesheeld/quickReturnHeader/src/main/AndroidManifest.xml b/tests/source-files/com.integreight.onesheeld/quickReturnHeader/src/main/AndroidManifest.xml deleted file mode 100644 index 235aef0b..00000000 --- a/tests/source-files/com.integreight.onesheeld/quickReturnHeader/src/main/AndroidManifest.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/tests/source-files/com.integreight.onesheeld/settings.gradle b/tests/source-files/com.integreight.onesheeld/settings.gradle deleted file mode 100644 index fe8d1fea..00000000 --- a/tests/source-files/com.integreight.onesheeld/settings.gradle +++ /dev/null @@ -1,5 +0,0 @@ -include ':pagerIndicator' -include ':pullToRefreshlibrary' -include ':quickReturnHeader' -include ':localeapi' -include ':oneSheeld' diff --git a/tests/source-files/com.jens.automation2/app/build.gradle b/tests/source-files/com.jens.automation2/app/build.gradle deleted file mode 100644 index c2ffeda9..00000000 --- a/tests/source-files/com.jens.automation2/app/build.gradle +++ /dev/null @@ -1,78 +0,0 @@ -plugins { - id 'com.android.application' -} - -android { - compileSdkVersion 29 - - defaultConfig { - applicationId "com.jens.automation2" - minSdkVersion 16 - compileSdkVersion 29 - buildToolsVersion '29.0.2' - useLibrary 'org.apache.http.legacy' - versionCode 96 - versionName "1.6.21" - - testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" - } - - buildTypes { - release { - minifyEnabled false - proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' - } - } - compileOptions { - sourceCompatibility JavaVersion.VERSION_1_8 - targetCompatibility JavaVersion.VERSION_1_8 - } - - lintOptions { - checkReleaseBuilds false - abortOnError false - } - - flavorDimensions "version" - - productFlavors - { - googlePlayFlavor - { - dimension "version" -// applicationIdSuffix ".googlePlay" - versionNameSuffix "-googlePlay" - targetSdkVersion 29 - } - - fdroidFlavor - { - dimension "version" -// applicationIdSuffix ".fdroid" - versionNameSuffix "-fdroid" - targetSdkVersion 28 - } - - apkFlavor - { - dimension "version" -// applicationIdSuffix ".apk" - versionNameSuffix "-apk" - targetSdkVersion 28 - } - } -} - -dependencies { - googlePlayFlavorImplementation 'com.google.firebase:firebase-appindexing:19.2.0' - googlePlayFlavorImplementation 'com.google.android.gms:play-services-location:17.1.0' - - apkFlavorImplementation 'com.google.firebase:firebase-appindexing:19.2.0' - apkFlavorImplementation 'com.google.android.gms:play-services-location:17.1.0' - - implementation 'androidx.appcompat:appcompat:1.2.0' - implementation 'com.google.android.material:material:1.3.0' - testImplementation 'junit:junit:4.+' - androidTestImplementation 'androidx.test.ext:junit:1.1.2' - androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0' -} \ No newline at end of file diff --git a/tests/source-files/com.jens.automation2/build.gradle b/tests/source-files/com.jens.automation2/build.gradle deleted file mode 100644 index 1ed6b0cf..00000000 --- a/tests/source-files/com.jens.automation2/build.gradle +++ /dev/null @@ -1,77 +0,0 @@ -plugins { - id 'com.android.application' -} - -android { - compileSdkVersion 29 - - defaultConfig { - applicationId "com.jens.automation2" - minSdkVersion 16 - compileSdkVersion 29 - buildToolsVersion '29.0.2' - useLibrary 'org.apache.http.legacy' - versionCode 105 - versionName "1.6.34" - - testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" - } - - buildTypes { - release { - minifyEnabled false - proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' - } - } - compileOptions { - sourceCompatibility JavaVersion.VERSION_1_8 - targetCompatibility JavaVersion.VERSION_1_8 - } - - lintOptions { - checkReleaseBuilds false - abortOnError false - } - - flavorDimensions "version" - - productFlavors - { - googlePlayFlavor - { - dimension "version" -// applicationIdSuffix ".googlePlay" - versionNameSuffix "-googlePlay" - targetSdkVersion 29 - } - - fdroidFlavor - { - dimension "version" -// applicationIdSuffix ".fdroid" - versionNameSuffix "-fdroid" - targetSdkVersion 28 - } - - apkFlavor - { - dimension "version" -// applicationIdSuffix ".apk" - versionNameSuffix "-apk" - targetSdkVersion 28 - } - } -} - -dependencies { - - - - implementation 'com.linkedin.dexmaker:dexmaker:2.25.0' - - implementation 'androidx.appcompat:appcompat:1.2.0' - implementation 'com.google.android.material:material:1.3.0' - testImplementation 'junit:junit:4.+' - androidTestImplementation 'androidx.test.ext:junit:1.1.2' - androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0' -} diff --git a/tests/source-files/com.kunzisoft.testcase/build.gradle b/tests/source-files/com.kunzisoft.testcase/build.gradle index 13cc4bc2..4319e20d 100644 --- a/tests/source-files/com.kunzisoft.testcase/build.gradle +++ b/tests/source-files/com.kunzisoft.testcase/build.gradle @@ -49,25 +49,6 @@ android { // Version code : 1 // Version name : 1.0-free } - underscore { - applicationIdSuffix = ".underscore" - versionCode 2018_04_30 - versionName "20180430-underscore" - buildConfigField "boolean", "FULL_VERSION", "true" - buildConfigField "boolean", "CLOSED_STORE", "true" - // ApplicationId : com.kunzisoft.fdroidtest.applicationidsuffix.underscore - // Version code : 2018_04_30 - // Version name : 20180430-underscore - } - underscore_first { - applicationIdSuffix = ".underscore_first" - versionCode _04_30 - buildConfigField "boolean", "FULL_VERSION", "true" - buildConfigField "boolean", "CLOSED_STORE", "true" - // ApplicationId : com.kunzisoft.fdroidtest.applicationidsuffix.underscore_first - // Version code : 1 - // Version name : 1.0 - } } } diff --git a/tests/source-files/com.lolo.io.onelist/app/build.gradle.kts b/tests/source-files/com.lolo.io.onelist/app/build.gradle.kts deleted file mode 100644 index 261cfe2f..00000000 --- a/tests/source-files/com.lolo.io.onelist/app/build.gradle.kts +++ /dev/null @@ -1,118 +0,0 @@ -import java.io.FileInputStream -import java.util.Properties - -plugins { - alias(libs.plugins.android.application) - alias(libs.plugins.kotlin.android) - alias(libs.plugins.google.services) - alias(libs.plugins.firebase.crashlytics) - alias(libs.plugins.ksp) -} -android { - namespace = "com.lolo.io.onelist" - - val versionPropsFile = file("../version.properties") - var versionCodeCI: Int? = null - if (versionPropsFile.canRead()) { - val versionProps = Properties() - versionProps.load(FileInputStream(versionPropsFile)) - val v = versionProps["VERSION_CODE"] - versionCodeCI = (versionProps["VERSION_CODE"] as String).toInt() - } - - - defaultConfig { - multiDexEnabled = true - applicationId = "com.lolo.io.onelist" - compileSdk = 34 - minSdk = 23 - targetSdk = 34 - versionCode = versionCodeCI ?: 19 - versionName = "1.4.2" - vectorDrawables.useSupportLibrary = true - } - - androidResources { - generateLocaleConfig = true - } - - buildFeatures { - viewBinding = true - buildConfig = true - } - - - ksp { - arg("room.schemaLocation", "$projectDir/schemas") - } - - buildTypes { - getByName("debug") { - applicationIdSuffix = ".debug" - versionNameSuffix = "-DEBUG" - resValue("string", "app_name", "1ListDev") - } - getByName("release") { - isMinifyEnabled = true - isShrinkResources = true - - proguardFiles( - getDefaultProguardFile("proguard-android-optimize.txt"), - "proguard-rules.pro" - ) - resValue("string", "app_name", "1List") - } - } - - compileOptions { - sourceCompatibility = JavaVersion.VERSION_17 - targetCompatibility = JavaVersion.VERSION_17 - } - -} -repositories { - google() - mavenCentral() - maven { url = uri("https://jitpack.io") } -} - -dependencies { - - // android - implementation(libs.androidx.core.splashscreen) - implementation(libs.androidx.preference.ktx) - implementation(libs.androidx.lifecycle.extensions) - implementation(libs.androidx.legacy.support.v4) - implementation(libs.androidx.appcompat) - - // android - design - implementation(libs.constraint.layout) - implementation(libs.androidx.recyclerview) - implementation(libs.flexbox) - implementation(libs.material) - implementation(libs.androidx.swiperefreshlayout) - - // kotlin - implementation(libs.kotlinx.coroutines.core) - implementation(libs.kotlin.stdlib.jdk7) - - // firebase - implementation(libs.firebase.crashlytics) - - // koin di - implementation(libs.koin.android) - implementation(libs.koin.androidx.navigation) - - // room - implementation(libs.androidx.room.runtime) - implementation(libs.androidx.room.ktx) - ksp(libs.androidx.room.compiler) - - // json - implementation(libs.gson) - - // other libs - implementation(libs.whatsnew) - implementation(libs.storage) - implementation(libs.advrecyclerview) -} diff --git a/tests/source-files/com.lolo.io.onelist/build.gradle.kts b/tests/source-files/com.lolo.io.onelist/build.gradle.kts deleted file mode 100644 index baad9726..00000000 --- a/tests/source-files/com.lolo.io.onelist/build.gradle.kts +++ /dev/null @@ -1,11 +0,0 @@ -plugins { - alias(libs.plugins.android.application) apply false - alias(libs.plugins.kotlin.android) apply false - alias(libs.plugins.google.services) apply false - alias(libs.plugins.firebase.crashlytics) apply false - alias(libs.plugins.ksp) apply false -} - -tasks.register("clean", Delete::class) { - delete(rootProject.layout.buildDirectory) -} \ No newline at end of file diff --git a/tests/source-files/com.lolo.io.onelist/gradle/libs.versions.toml b/tests/source-files/com.lolo.io.onelist/gradle/libs.versions.toml deleted file mode 100644 index d6cf9869..00000000 --- a/tests/source-files/com.lolo.io.onelist/gradle/libs.versions.toml +++ /dev/null @@ -1,58 +0,0 @@ -[versions] -advrecyclerview = "1.0.0" -appcompat = "1.6.1" -constraint-layout = "2.0.4" -crashlytics = "18.6.2" -firebase-crashlytics-gradle-plugin = "2.9.9" -flexbox = "3.0.0" -gson = "2.5.6" -kotlin = "1.9.20" -kotlin-coroutines = "1.6.4" -legacy-support-v4 = "1.0.0" -lifecycle-extensions = "2.2.0" -material = "1.11.0" -preference-ktx = "1.2.1" -recyclerview = "1.3.2" -splashscreen ="1.0.1" -koin ="3.5.0" -room="2.6.1" -storage = "1.5.5" -swiperefreshlayout = "1.1.0" -whatsnew = "0.1.7" -ksp-plugin="1.9.20-1.0.14" - -# plugins versions -android-application-plugin="8.3.0" -kotlin-android-plugin="1.9.22" -google-services-plugin = "4.4.1" - -[libraries] -advrecyclerview = { module = "com.h6ah4i.android.widget.advrecyclerview:advrecyclerview", version.ref = "advrecyclerview" } -androidx-appcompat = { module = "androidx.appcompat:appcompat", version.ref = "appcompat" } -androidx-core-splashscreen = { module = "androidx.core:core-splashscreen", version.ref = "splashscreen" } -androidx-legacy-support-v4 = { module = "androidx.legacy:legacy-support-v4", version.ref = "legacy-support-v4" } -androidx-lifecycle-extensions = { module = "androidx.lifecycle:lifecycle-extensions", version.ref = "lifecycle-extensions" } -androidx-preference-ktx = { module = "androidx.preference:preference-ktx", version.ref = "preference-ktx" } -androidx-recyclerview = { module = "androidx.recyclerview:recyclerview", version.ref = "recyclerview" } -androidx-room-runtime = { group = "androidx.room", name = "room-runtime", version.ref = "room" } -androidx-room-ktx = { group = "androidx.room", name = "room-ktx", version.ref = "room" } -androidx-room-compiler = { group = "androidx.room", name = "room-compiler", version.ref = "room" } -androidx-swiperefreshlayout = { module = "androidx.swiperefreshlayout:swiperefreshlayout", version.ref = "swiperefreshlayout" } -constraint-layout = { module = "com.android.support.constraint:constraint-layout", version.ref = "constraint-layout" } -firebase-crashlytics = { module = "com.google.firebase:firebase-crashlytics", version.ref = "crashlytics" } -flexbox = { module = "com.google.android.flexbox:flexbox", version.ref = "flexbox" } -gson = { module = "org.immutables:gson", version.ref = "gson" } -koin-android = { module = "io.insert-koin:koin-android", version.ref = "koin" } -koin-androidx-navigation = { module = "io.insert-koin:koin-androidx-navigation", version.ref = "koin" } -kotlin-stdlib-jdk7 = { module = "org.jetbrains.kotlin:kotlin-stdlib-jdk7", version.ref = "kotlin" } -kotlinx-coroutines-core = { module = "org.jetbrains.kotlinx:kotlinx-coroutines-core", version.ref = "kotlin-coroutines" } -material = { module = "com.google.android.material:material", version.ref = "material" } -storage = { module = "com.anggrayudi:storage", version.ref = "storage" } -whatsnew = { module = "io.github.tonnyl:whatsnew", version.ref = "whatsnew" } - -[plugins] -ksp = { id = "com.google.devtools.ksp", version.ref = "ksp-plugin" } -android-application = { id = "com.android.application", version.ref = "android-application-plugin" } -kotlin-android = { id = "org.jetbrains.kotlin.android", version.ref = "kotlin-android-plugin" } -google-services = { id = "com.google.gms.google-services", version.ref = "google-services-plugin" } -firebase-crashlytics = { id = "com.google.firebase.crashlytics", version.ref = "firebase-crashlytics-gradle-plugin" } \ No newline at end of file diff --git a/tests/source-files/com.lolo.io.onelist/gradle/wrapper/gradle-wrapper.properties b/tests/source-files/com.lolo.io.onelist/gradle/wrapper/gradle-wrapper.properties deleted file mode 100644 index e411586a..00000000 --- a/tests/source-files/com.lolo.io.onelist/gradle/wrapper/gradle-wrapper.properties +++ /dev/null @@ -1,5 +0,0 @@ -distributionBase=GRADLE_USER_HOME -distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-bin.zip -zipStoreBase=GRADLE_USER_HOME -zipStorePath=wrapper/dists diff --git a/tests/source-files/com.lolo.io.onelist/settings.gradle b/tests/source-files/com.lolo.io.onelist/settings.gradle deleted file mode 100644 index 533aeeeb..00000000 --- a/tests/source-files/com.lolo.io.onelist/settings.gradle +++ /dev/null @@ -1,9 +0,0 @@ -pluginManagement { - repositories { - google() - mavenCentral() - gradlePluginPortal() - } -} - -include 'app' diff --git a/tests/source-files/com.seafile.seadroid2/app/build.gradle b/tests/source-files/com.seafile.seadroid2/app/build.gradle deleted file mode 100644 index 55813267..00000000 --- a/tests/source-files/com.seafile.seadroid2/app/build.gradle +++ /dev/null @@ -1,122 +0,0 @@ -apply plugin: 'com.android.application' - -android { - compileSdkVersion rootProject.ext.compileSdkVersion - - defaultConfig { - applicationId 'com.seafile.seadroid2' - minSdkVersion rootProject.ext.minSdkVersion - targetSdkVersion rootProject.ext.targetSdkVersion - versionCode 93 - versionName "2.2.18" - multiDexEnabled true - resValue "string", "authorities", applicationId + '.cameraupload.provider' - resValue "string", "account_type", "com.seafile.seadroid2.account.api2" - buildConfigField "String", "ACCOUNT_TYPE", '"com.seafile.seadroid2.account.api2"' - ndk { - abiFilters 'armeabi', 'armeabi-v7a', 'x86' - } - } - - lintOptions { - abortOnError false - disable 'MissingTranslation' - } - - - compileOptions { - sourceCompatibility JavaVersion.VERSION_1_8 - targetCompatibility JavaVersion.VERSION_1_8 - } - - signingConfigs { - debug { -// def props = new Properties() -// props.load(new FileInputStream(project.file("debugkey.properties"))) -// storeFile project.file(props.keyStore) -// storePassword props.keyStorePassword -// keyAlias props.keyAlias -// keyPassword props.keyAliasPassword - } - release { -// Signing code for manual signing -// storeFile file(System.console().readLine("\n\$ Enter keystore path: ")) -// storePassword System.console().readPassword("\n\$ Enter keystore password: ").toString() -// keyAlias System.console().readLine("\n\$ Enter key alias: ") -// keyPassword System.console().readPassword("\n\$ Enter key password: ").toString() - - def props = new Properties() - props.load(new FileInputStream(project.file("key.properties"))) - storeFile project.file(props.keyStore) - storePassword props.keyStorePassword - keyAlias props.keyAlias - keyPassword props.keyAliasPassword - } - } - - buildTypes { - debug { - debuggable true - applicationIdSuffix ".debug" - resValue "string", "authorities", defaultConfig.applicationId + '.debug.cameraupload.provider' - resValue "string", "account_type", "com.seafile.seadroid2.debug.account.api2" - buildConfigField "String", "ACCOUNT_TYPE", '"com.seafile.seadroid2.debug.account.api2"' - signingConfig signingConfigs.debug - minifyEnabled false - proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-project.txt' - applicationVariants.all { variant -> - variant.outputs.all { output -> - if (output.outputFile != null && output.outputFile.name.endsWith('.apk')) { - if (variant.name == "debug") - outputFileName = "seafile-debug-" + defaultConfig.versionName + ".apk" - } - } - } - } - release { - signingConfig signingConfigs.release - minifyEnabled true - proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-project.txt' - applicationVariants.all { variant -> - variant.outputs.all { output -> - if (output.outputFile != null && output.outputFile.name.endsWith('.apk')) { - if (variant.name == "release") { - outputFileName = "seafile-" + defaultConfig.versionName + ".apk" - } - } - } - } - } - } - - allprojects { - repositories { - maven { url 'https://jitpack.io' } - } - } - dependencies { - implementation fileTree(include: ['*.jar'], dir: 'libs') - implementation "com.android.support:appcompat-v7:${rootProject.ext.supportLibVersion}" - implementation "com.android.support:design:${rootProject.ext.supportLibVersion}" - implementation 'com.github.JakeWharton:ViewPagerIndicator:2.4.1' - implementation 'com.github.kevinsawicki:http-request:6.0' - implementation 'commons-io:commons-io:2.4' - implementation 'com.google.guava:guava:18.0' - implementation 'com.nostra13.universalimageloader:universal-image-loader:1.9.3' - implementation 'com.cocosw:bottomsheet:1.3.1' - implementation 'com.commit451:PhotoView:1.2.4' - implementation 'com.joanzapata.iconify:android-iconify-material-community:2.2.1' - testImplementation 'junit:junit:4.12' - testImplementation 'org.robolectric:robolectric:3.0' - implementation 'com.madgag.spongycastle:core:1.54.0.0' - implementation 'com.madgag.spongycastle:prov:1.54.0.0' - implementation 'com.shuyu:gsyVideoPlayer-java:2.1.0' - implementation 'com.shuyu:gsyVideoPlayer-ex_so:2.1.0' - implementation 'com.squareup.okhttp3:okhttp:3.9.1' - - implementation 'com.yydcdut:markdown-processor:0.1.3' - implementation 'ren.qinc.edit:lib:0.0.5'//editor undo redo - implementation 'com.github.tiagohm.MarkdownView:library:0.19.0' - } -} - diff --git a/tests/source-files/com.ubergeek42.WeechatAndroid/app/build.gradle.kts b/tests/source-files/com.ubergeek42.WeechatAndroid/app/build.gradle.kts deleted file mode 100644 index 88c03a08..00000000 --- a/tests/source-files/com.ubergeek42.WeechatAndroid/app/build.gradle.kts +++ /dev/null @@ -1,306 +0,0 @@ -import com.android.build.api.transform.* -import com.android.build.api.variant.VariantInfo -import com.android.utils.FileUtils -import org.gradle.internal.os.OperatingSystem -import org.aspectj.bridge.IMessage -import org.aspectj.bridge.MessageHandler -import org.aspectj.tools.ajc.Main - -plugins { - id("com.android.application") - kotlin("android") - kotlin("kapt") -} - -dependencies { - implementation(project(":cats")) - implementation(project(":relay")) - - implementation("org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.7.0") - - // these two are required for logging within the relay module. todo remove? - implementation("org.slf4j:slf4j-api:1.7.36") - implementation("com.noveogroup.android:android-logger:1.3.6") - - implementation("androidx.core:core-ktx:1.8.0") - implementation("androidx.legacy:legacy-support-v4:1.0.0") - implementation("androidx.annotation:annotation:1.3.0") // For @Nullable/@NonNull - implementation("androidx.appcompat:appcompat:1.4.2") - implementation("androidx.emoji2:emoji2:1.1.0") - implementation("androidx.preference:preference-ktx:1.2.0") // preference fragment & al - implementation("androidx.legacy:legacy-preference-v14:1.0.0") // styling for the fragment - implementation("androidx.lifecycle:lifecycle-runtime-ktx:2.4.1") - implementation("androidx.lifecycle:lifecycle-common-java8:2.4.1") - implementation("androidx.sharetarget:sharetarget:1.2.0-rc01") - - implementation("org.jetbrains.kotlinx:kotlinx-coroutines-android:1.6.2") - - implementation("com.github.bumptech.glide:glide:4.13.2") - kapt("com.github.bumptech.glide:compiler:4.13.2") - implementation("com.squareup.okhttp3:okhttp:4.10.0") - - val roomVersion = "2.4.2" - implementation("androidx.room:room-runtime:$roomVersion") - annotationProcessor("androidx.room:room-compiler:$roomVersion") - kapt("androidx.room:room-compiler:$roomVersion") - - implementation("org.yaml:snakeyaml:1.30") - - implementation("org.bouncycastle:bcpkix-jdk15on:1.70") - - // needed for thread-safe date formatting as SimpleDateFormat isn"t thread-safe - // the alternatives, including apache commons and threetenabp, seem to be much slower - // todo perhaps replace with core library desugaring, if it"s fast - implementation("net.danlew:android.joda:2.10.14") - - implementation("org.greenrobot:eventbus:3.3.1") - - debugImplementation("org.aspectj:aspectjrt:1.9.9.1") - debugImplementation("com.squareup.leakcanary:leakcanary-android:2.9.1") - - testImplementation("org.junit.jupiter:junit-jupiter:5.8.2") - testImplementation("org.junit.jupiter:junit-jupiter-params:5.8.2") -} - -tasks.withType { - options.encoding = "UTF-8" -} - -android { - compileSdk = 31 - - defaultConfig { - versionCode = 1_08_01 - versionName = "1.8.1" - - minSdk = 21 - targetSdk = 31 - buildConfigField("String", "VERSION_BANNER", "\"" + versionBanner() + "\"") - - vectorDrawables.useSupportLibrary = true - - javaCompileOptions { - annotationProcessorOptions { - arguments["room.schemaLocation"] = "$projectDir/schemas" - arguments["room.incremental"] = "true" - } - } - - kotlinOptions { - freeCompilerArgs = listOf( - "-language-version", "1.7", - "-api-version", "1.7") - jvmTarget = "11" - } - } - - signingConfigs { - create("dev") { - try { - storeFile = file(project.properties["devStorefile"] as String) - storePassword = project.properties["devStorePassword"] as String - keyAlias = project.properties["devKeyAlias"] as String - keyPassword = project.properties["devKeyPassword"] as String - } catch (e: Exception) { - project.logger.warn("WARNING: Set the values devStorefile, devStorePassword, " + - "devKeyAlias, and devKeyPassword " + - "in ~/.gradle/gradle.properties to sign the release.") - } - } - } - - buildTypes { - getByName("debug") { - applicationIdSuffix = ".debug" - versionNameSuffix = "-debug" - } - - getByName("release") { - isMinifyEnabled = true - proguardFiles(getDefaultProguardFile("proguard-android-optimize.txt"), - "proguard-rules.pro", - "../cats/proguard-rules.pro") - // kotlinx-coroutines-core debug-only artifact - // see https://github.com/Kotlin/kotlinx.coroutines#avoiding-including-the-debug-infrastructure-in-the-resulting-apk - packagingOptions { - resources.excludes += "DebugProbesKt.bin" - } - } - - create("dev") { - initWith(getByName("release")) - matchingFallbacks += listOf("release") - applicationIdSuffix = ".dev" - versionNameSuffix = "-dev" - signingConfig = signingConfigs.getByName("dev") - } - - compileOptions { - sourceCompatibility = JavaVersion.VERSION_11 - targetCompatibility = JavaVersion.VERSION_11 - } - } - - buildFeatures { - viewBinding = true - } -} - -fun versionBanner(): String { - val os = org.apache.commons.io.output.ByteArrayOutputStream() - project.exec { - commandLine = "git describe --long".split(" ") - standardOutput = os - } - return String(os.toByteArray()).trim() -} - -//////////////////////////////////////////////////////////////////////////////////////////////////// -/////////////////////////////////////////////////////////////////////////////////////////////// cats -//////////////////////////////////////////////////////////////////////////////////////////////////// - -// ajc gets hold of some files such as R.jar, and on Windows it leads to errors such as: -// The process cannot access the file because it is being used by another process -// to avoid these, weave in a process, which `javaexec` will helpfully launch for us. - -fun weave(classPath: Iterable, aspectPath: Iterable, input: Iterable, output: File) { - val runInAProcess = OperatingSystem.current().isWindows - val bootClassPath = android.bootClasspath - - println(if (runInAProcess) ":: weaving in a process..." else ":: weaving...") - println(":: boot class path: $bootClassPath") - println(":: class path: $classPath") - println(":: aspect path: $aspectPath") - println(":: input: $input") - println(":: output: $output") - - val arguments = listOf("-showWeaveInfo", - "-1.8", - "-preserveAllLocals", - "-bootclasspath", bootClassPath.asArgument, - "-classpath", classPath.asArgument, - "-aspectpath", aspectPath.asArgument, - "-inpath", input.asArgument, - "-d", output.absolutePath) - - if (runInAProcess) { - javaexec { - classpath = weaving - main = "org.aspectj.tools.ajc.Main" - args = arguments - } - } else { - val handler = MessageHandler(true) - Main().run(arguments.toTypedArray(), handler) - - val log = project.logger - for (message in handler.getMessages(null, true)) { - when (message.kind) { - IMessage.DEBUG -> log.debug("DEBUG " + message.message, message.thrown) - IMessage.INFO -> log.info("INFO: " + message.message, message.thrown) - IMessage.WARNING -> log.warn("WARN: " + message.message, message.thrown) - IMessage.FAIL, - IMessage.ERROR, - IMessage.ABORT -> log.error("ERROR: " + message.message, message.thrown) - } - } - } -} - -// the only purpose of the following is to get a hold of aspectjtools jar -// this jar is already on build script classpath, but that classpath is impossible to get -// see https://discuss.gradle.org/t/how-do-i-determine-buildscript-classpath/37973/3 - -val weaving: Configuration by configurations.creating - -dependencies { - weaving("org.aspectj:aspectjtools:1.9.9.1") -} - -// historical note: the problem with weaving Kotlin and Java in-place is that: -// * Java is compiled by task compileDebugJavaWithJavac -// * gradle can run either one of these tasks, or both of them -// * compileDebugJavaWithJavac depends on compileDebugKotlin -// * weaving Kotlin requires Java classes -// -// a transformation is a poorly advertised feature that works on merged code, and also has its own -// inputs and outputs, so this fixes all of our problems... - - class TransformCats : Transform() { - override fun getName(): String = TransformCats::class.simpleName!! - - override fun getInputTypes() = setOf(QualifiedContent.DefaultContentType.CLASSES) - - // only look for annotations in app classes - // transformation will consume these and put woven classes in the output dir - override fun getScopes() = mutableSetOf(QualifiedContent.Scope.PROJECT) - - // but also have the rest on our class path - // these will not be touched by the transformation - override fun getReferencedScopes() = mutableSetOf(QualifiedContent.Scope.SUB_PROJECTS, - QualifiedContent.Scope.EXTERNAL_LIBRARIES) - - override fun isIncremental() = false - - // only run on debug builds - override fun applyToVariant(variant: VariantInfo) = variant.isDebuggable - - override fun transform(invocation: TransformInvocation) { - if (!invocation.isIncremental) { - invocation.outputProvider.deleteAll() - } - - val output = invocation.outputProvider.getContentLocation(name, outputTypes, - scopes, Format.DIRECTORY) - if (output.isDirectory) FileUtils.deleteDirectoryContents(output) - FileUtils.mkdirs(output) - - val input = mutableListOf() - val classPath = mutableListOf() - val aspectPath = mutableListOf() - - invocation.inputs.forEach { source -> - source.directoryInputs.forEach { dir -> - input.add(dir.file) - classPath.add(dir.file) - } - - source.jarInputs.forEach { jar -> - input.add(jar.file) - classPath.add(jar.file) - } - } - - invocation.referencedInputs.forEach { source -> - source.directoryInputs.forEach { dir -> - classPath.add(dir.file) - } - - source.jarInputs.forEach { jar -> - classPath.add(jar.file) - // this used to read `if (jar.name == ":cats") ...`, - // but with android gradle plugin 4.2.0 jar names contain garbage - // this is a very simple but a bit fragile workaround. todo improve - if (jar.file.directoriesInsideRootProject().contains("cats")) { - aspectPath.add(jar.file) - } - } - - } - - weave(classPath, aspectPath, input, output) - } -} - -android.registerTransform(TransformCats()) - -val Iterable.asArgument get() = joinToString(File.pathSeparator) - -fun File.directoriesInsideRootProject() = sequence { - var file = this@directoriesInsideRootProject - while (true) { - yield(file.name) - file = file.parentFile ?: break - if (file == rootProject.projectDir) break - } -} diff --git a/tests/source-files/com.ubergeek42.WeechatAndroid/app/src/main/res/values/strings.xml b/tests/source-files/com.ubergeek42.WeechatAndroid/app/src/main/res/values/strings.xml deleted file mode 100644 index 0e1ff29b..00000000 --- a/tests/source-files/com.ubergeek42.WeechatAndroid/app/src/main/res/values/strings.xml +++ /dev/null @@ -1,1023 +0,0 @@ - - -]> - - - - - - - - - Weechat-Android - com.ubergeek42.WeechatAndroid - - - - Relay host is not set - Relay password is not set - SSH host is not set - SSH password is not set - SSH private key is not set - - - - Server unexpectedly closed connection while connecting. - Wrong password or connection type? - - Could not resolve address %s - - Error: %s - Buffer lost some hot messages because of new lines - There are no hot buffers for now - Not connected - Buffer list empty - Activity not found for intent %s - - - - Upload file - Tab - Upload - Send - - Cancel search - Search up - Search down - More - - Go to bottom - - Fetch more lines - Fetching lines… - - search - - filter - - Open drawer - Close drawer - - - - - - Search - - Users - - Hotlist - Show hot message - - Close - - Connect - Disconnect - Stop connecting - - Settings - - Filter lines - - Dark theme - - Attach image - Attach media - Attach file - Attach image - Attach media - Take photo - - Debug - Sync hotlist - Die - - - - Prefix - Message - Both - - Regex - Case sensitive - - - - With timestamps - Without timestamps - Messages only - - - - - - Connection Status - Hotlist - - Waiting for network - Will connect in %d seconds… - Connecting now… - Connected to %s - - - - %d message - %d messages - - -  in %d buffer -  in %d buffers - - - - - New message in %2$s - %1$d new messages in %2$s - - - - (user unknown) - (users unknown) - - (message not fetched) - - (message not fetched) - (%d messages not fetched) - - - - Me - - Reply - - - - - - - - %1$s (%2$s) - - %1$d user - %1$d users - - %s (away) - - - Copy - Select text - - Paste - - - Permission required - - To take photos, app needs write access to public storage - OK - - - - - - Issued to:

      - %1$s
      -
      - Issued by:
      - %2$s
      -
      - Validity period:
      - Issued on: %3$s
      - Expires on: %4$s
      -
      - SHA-256 fingerprint:
      - %5$s - ]]> - Unknown - - Reject - Back to safety - - - - - Invalid hostname - - %1$s - but the certificate is only valid for the following hosts: %2$s - ]]> - Note that Android P and beyond does not fall back to Common Name (CN) validation. - Subject Alternative Name (SAN) must be used instead. - Learn more - ]]> -       %s - ]]> -       (none) - ]]> - - - - - Certificate expired - - This certificate is no longer valid. - Please make sure that the device date is correct. - - - - - Certificate not yet valid - - This certificate will be valid in the future. - Please make sure that the device date is correct. - - - - - Untrusted certificate - - This certificate isn’t trusted by Android, but you can still connect. - The app will remember the selected certificate - and trust it and any certificates signed by it. - - - Accept selected - - - - - Certificate not pinned - - This server is trusted by Android, - but a setting requires you to confirm that you trust it as well. - The app will remember the selected certificate - and trust it and any certificates signed by it. - - - Pin selected - - - - - - - Unknown - - - - - Unknown server - %1$s has never been encountered. -
      -
      %2$s key SHA256 fingerprint: -
      %3$s - ]]>
      - - Accept server key - Reject - - - - - Server changed key - ⚠ Warning: it’s possible that someone is trying to hijack this connection! -
      -
      Server at %1$s is known, but its key doesn’t match the key you previously accepted. -
      -
      %2$s key SHA256 fingerprint: -
      %3$s -
      -
      If you want to continue, please clear known hosts in preferences. - ]]>
      - - - - - - - - - - Clear - Paste - Choose file - %1$s (%2$s) - set - not set - Clipboard is empty - - password - - Save - Default - Discard changes? - Cancel - Discard - - None - Unknown - - Invalid number - No spaces allowed in hostnames - - - - - - Connection - - Connection type - - Plain connection - WeeChat SSL - SSH tunnel - WebSocket - WebSocket (SSL) - - - - - WebSocket path - - - - - - SSL settings - - - Require certificate pins - - Prompt to confirm that you trust the server, even if the system trusts it - - Clear certificates - No trusted certificates - One trusted certificate - - One trusted certificate - %s trusted certificates - - Clear certificates? - Clear - Cancel - Certificates cleared - Could not clear certificates - - - Client certificate - - PKCS #12 file containing private key and client certificate - - Certificate was stored inside security hardware - - Certificate was stored inside software key store - - Certificate was stored inside key store - - Certificate forgotten - - - Server is asking for a client certificate but none is set. - Wanted: %1$s certificate issued by: %2$s - - Server is asking for a client certificate but the one we have doesn’t fit. - Wanted: %1$s certificate issued by: %2$s - - - - SSH tunnel settings - - SSH host - - SSH port - - SSH username - - Authentication method - - Password - Key - - - Password - - Private key - Ed25519, ECDSA, RSA or DSA key - - %s key was stored inside security hardware - - %s key was stored inside software key store - - %s key was stored inside key store - - %1$s key was stored inside the app. - \n - \nThe key couldn’t be stored in the key store: %2$s - - Key forgotten - - Clear known hosts - No entries - - %s entry - %s entries - - Clear known hosts? - Cancel - Clear - Known hosts cleared - - - Failed to authenticate with password - - Failed to authenticate with key - - - - Relay - - Relay host - - Relay port - - Relay password - - - - Handshake settings - - Handshake - - Compatibility - Modern & fast - Modern - - - In compatibility mode, the password isn’t hashed. This is the fastest method. - This method is required if using WeeChat < 2.9, but works on the later versions as well. - -

      Modern & fast handshake limits algorithms to the SHA-2 family. - -

      Modern handshake also includes PBKDF2. - These algorithms can be very slow, depending on the number of iterations. - -

      Password hashing offers little to no benefit if the connection is encrypted. - Learn more - ]]> - - - - Synchronization settings - - - Only sync open buffers - - Can significantly reduce traffic and battery usage, - but hotlist updates will only happen once 5 minutes - - - Sync buffer read status - - Mark buffers as read in WeeChat when you read them in this app - - - Number of lines to fetch - - The number of lines requested when opening a buffer - or when you press the “Load more lines” button (%s) - - - Number of lines to fetch for search - - When starting a new search, unless already fetched, - the app will request up to this many lines from WeeChat (%s) - - - These settings take effect after reconnection. - Note that due to filtering the number of lines actually shown - might be less than the number of loaded lines. - Also note that due to WeeChat’s limitations - the app has to re-fetch all lines every times it requests more lines. - - - - Miscellaneous - - Reconnect on connection loss - - Connect on system boot - - - - Ping settings - - - Enable ping - - Periodically check that the relay connection is still alive when idle - - - Idle time - - Number of seconds to wait before sending a ping when the connection is idle (%s) - - - Ping timeout - - Number of seconds to wait before closing an unresponsive connection (%s) - - - - - - Buffer list - - - Sort buffer list - - Sort by number of highlights/private messages/unread messages - - - Hide non-conversation buffers - - E.g. server buffers and plugin buffers - - - Hide hidden buffers - - Hide buffers hidden with /buffer hide - - - Show buffer filter - - Filter matches full buffer names and reveals matching hidden buffers - - - System gesture exclusion zone - - On Android Q, the left side of the screen is reserved for the back gesture. - Enable this to have a small area in the bottom of the screen - where you can open the buffer list normally. - - - - - - Look & feel - - Text size - - - Hide action bar - - Hide action bar when the keyboard is open or when scrolling up - - - Filter messages - - Hide messages filtered by WeeChat (e.g. irc_smart_filter) - - - Prefix alignment - - Left aligned - Right aligned - Timestamp aligned - No alignment - - - - Maximum width of prefix - - In terms of letters; longer nicknames will be cut (%s) - - - Enclose nicknames - - Enclose the nicknames in < and > - - Timestamp format - %s (default: HH:mm:ss) - Invalid timestamp format - - - Buffer font - Default - Import - Imported: %s - - - Non-monospace fonts will not work well with alignment. - Import fonts from the dialog, - or put them into one of the following locations:%1$s - - - - - - Theme - - @string/pref__theme__theme__system - Dark - Light - - - Set by battery saver - - Theme switch - Show theme switch in the menu - - Light color scheme - Dark color scheme - Error loading color scheme %s - Not set - Error - Import - Imported: %s - - Learn more -
      -
      Import color schemes from the dialogs - or put them into the following location:%1$s - ]]>
      - - - Dim down non-human lines - - Display joins/quits in a faint color, as set in the color scheme - - - - - - Buttons - - Show tab button - - Show send button - - Show paperclip button - - Paperclip button short tap - - @string/pref__buttons__paperclip__actions__content_images - @string/pref__buttons__paperclip__actions__content_media - @string/pref__buttons__paperclip__actions__content_anything - @string/pref__buttons__paperclip__actions__mediastore_images - @string/pref__buttons__paperclip__actions__mediastore_media - @string/pref__buttons__paperclip__actions__camera - - - Paperclip button long tap - - @string/pref__buttons__paperclip__actions__none - @string/pref__buttons__paperclip__actions__content_images - @string/pref__buttons__paperclip__actions__content_media - @string/pref__buttons__paperclip__actions__content_anything - @string/pref__buttons__paperclip__actions__mediastore_images - @string/pref__buttons__paperclip__actions__mediastore_media - @string/pref__buttons__paperclip__actions__camera - - - Disabled - - System: attach images - - System: attach images and videos - - System: attach any files - - Gallery: attach images - - Gallery: attach images and videos - - Take photo - - - When the paperclip button gets hidden to provide more space for the input field, - you can still attach files via overflow menu. - - - Volume buttons change text size - - If set, volume buttons will change text size instead of volume - - - - - - Notifications - - - Enable notifications - - Notify about hot messages such as private messages or highlights - - Notification sound - - Vibration - - Notification light - - - - - - Media preview - - Enabled - - Never - On Wi-Fi only - On unmetered networks only - Always - - - Context - Disabled everywhere - Enabled for %s - - Chat - Paste dialog - Notifications - - - - Insecure requests - - Allow - Rewrite as HTTPS - Disallow - - - -
      ⚠ Warning: the app is accessing the web directly. - A malicious person could craft a website to learn your IP address and other data. - To prevent the app from accessing websites you don’t know, - remove the strategy for the wildcard host “*” or set it to “none”. - Learn more - ]]>
      - - - Strategies - - Defines the ways images are fetched from individual websites, and some filters. - \n - \n%1$s; %2$s; - \n - \n%3$s - Error - Message filter set - Message filter not set - line filters not set - - %d line filter set - %d line filters set - - No strategies loaded - Strategies: %s - -"# don’t look for links in the part -# of the message that matches the -# following regex. this prevents -# the app from showing broken links -# in matrix clients’ quotes, e.g. -# <nick "http://broken.co"> message -#message filter: -# ^<[^ ]{1,16} \".{1,33}\">\\s - -line filters: -# don’t display thumbnails for any -# lines that match the following regex -- regex: '^(?:Title: |[↑^] )' - -# don’t display thumbnails -# for any lines from bot -#- nicks: [bot] - -# don’t display thumbnails -# for any lines from bot -# that also math the given regex -#- nicks: [bot] -# regex: ^<\\S+>\\s - -strategies: -- name: skip pastebins - type: none - hosts: - - pastebin.com - - bpa.st - - dpaste.com - - termbin.com - -- name: skip site banners - type: none - hosts: - - github.com - - gist.github.com - - stackoverflow.com - - '*.stackexchange.com' - - twitch.tv - - '*.twitch.tv' - -#- name: skip the rest, including redirects -# type: none -# hosts: ['*'] - -- name: try the rest - type: any - hosts: ['*'] - -- name: youtube - type: image - hosts: [www.youtube.com, m.youtube.com, youtube.com, youtu.be] - regex: (?i)^https?://(?:(?:www\\.|m\\.)?youtube\\.com/watch\\?v=|youtu\\.be/)([A-Za-z0-9_-]+) - small: https://img.youtube.com/vi/$1/mqdefault.jpg - big: https://img.youtube.com/vi/$1/hqdefault.jpg - -- name: i.imgur - type: image - hosts: [i.imgur.com] - regex: (?i)^https?://i\\.imgur\\.com/([A-Za-z0-9]+) - small: https://i.imgur.com/$1m.jpg - big: https://i.imgur.com/$1h.jpg - -- name: imgur/gallery - type: any - hosts: [imgur.com, www.imgur.com] - regex: (?i)^https?://(?:www\\.)?imgur\\.com/gallery/(.*) - sub: https://imgur.com/a/$1 - -- name: 9gag - type: image - hosts: [9gag.com, img-9gag-fun.9cache.com] - regex: (?i)^https?://(?:9gag\\.com/gag|img-9gag-fun\\.9cache\\.com/photo)/([^_]+) - small: https://images-cdn.9gag.com/photo/$1_700b.jpg - big: https://images-cdn.9gag.com/photo/$1_700b.jpg - -- name: mobile.twitter - type: any - hosts: [mobile.twitter.com] - regex: (?i)^https?://mobile\\.twitter\\.com/(.*) - sub: https://twitter.com/$1 - -- name: common - type: any - regex: (?i)^https?://(.+) - sub: https://$1 - hosts: - - '*.wikipedia.org' - - gfycat.com - - imgur.com - -- name: reddit - type: any - hosts: [v.redd.it, reddit.com, www.reddit.com, old.reddit.com] - body size: 196608 -" - - Advanced - - Download size limit - %s MB - - - Disk cache - - %s MB; takes effect on restart - - - Success cooldown - - %s hours. The app will consider successfully fetched image available, - either from cache or the web, - for the specified amount of time. - - Thumbnail width - - Minimum thumbnail height - - Maximum thumbnail height - - - - - - File sharing - - Accept from other apps - - Text only - Text, images and videos - Everything - - - Direct share - - Disabled - Up to one buffer - Up to two buffers - Up to three buffers - Up to four buffers - - - Upload URL - - File field - - Regex - - -
      curl -s --user user:pass \\ -
        --header \'Additional: Header\' \\ -
        --form additional=field \\ -
        --form file=@file.ext \\ -
        https://example.com | perl -nle \\ -
        \'m#^https://\\S+#; print $1//$&\'
      -
      -
      If the regular expression is set, it is used to find the URL in the response body; - either the first capture group is used, or the whole match. - Learn more - ]]>
      - - Advanced - - Additional headers - - Additional fields - - Authentication - - None - Basic - - - User - - Password - - Remember uploads for - %s hours - - - - - - About - - Weechat-Android %s - - created by ubergeek42 - - build ID %s - Unknown version ID - - - \nThis project uses the following libraries: - \n - \n    • Android Logger by Noveo Group - \n    • AspectJ by Eclipse Foundation - \n    • Bouncy Castle - \n    • EventBus by greenrobot - \n    • Glide by Bump Technologies - \n    • Hugo by Jake Wharton - \n    • Java-WebSocket by Nathan Rajlich - \n    • joda-time-android by Daniel Lew - \n    • JSch by JCraft - \n    • LeakCanary by Square, Inc - \n    • Mockito by Szczepan Faber and friends - \n    • nv-websocket-client by Takahiko Kawasaki - \n    • OkHttp by Square, Inc - \n    • SLF4J by QOS.ch - \n    • SnakeYAML by Andrey Somov - \n    • sshlib by ConnectBot - \n - \nPlease create an issue on GitHub - if you find a bug or have a feature request. - \n - \n - diff --git a/tests/source-files/de.varengold.activeTAN/build.gradle b/tests/source-files/de.varengold.activeTAN/build.gradle deleted file mode 100644 index e950be92..00000000 --- a/tests/source-files/de.varengold.activeTAN/build.gradle +++ /dev/null @@ -1,115 +0,0 @@ -apply plugin: 'com.android.application' - -android { - compileSdkVersion versions.compileSdk - defaultConfig { - versionCode 34 - versionName "2021-06-30" - - // Requires API level 23 (Android 6.0) to use Android keystore system for cryptography. - minSdkVersion 23 - targetSdkVersion versions.targetSdk - - testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" - - vectorDrawables.useSupportLibrary = true - - javaCompileOptions { - annotationProcessorOptions { - // Export database schema history as JSON files. - arguments = ["room.schemaLocation": "$projectDir/schemas".toString()] - } - } - sourceSets { - // Include database schema history for migration testing. - androidTest.assets.srcDirs += files("$projectDir/schemas".toString()) - } - } - - buildFeatures { - viewBinding = true - } - - buildTypes { - debug { - // Don't mess with the release versions during debugging, so use a different appId. - applicationIdSuffix ".debug" - debuggable true - } - release { - minifyEnabled true - shrinkResources true - proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' - } - } - - flavorDimensions "client", "environment" - productFlavors { - prod { - dimension "environment" - } - - qs { - dimension "environment" - // To be able to install a second app variant, we must change the applicationId. - // Otherwise it would not be possible to use the same device for testing and production. - applicationIdSuffix ".QS" - } - - EFD { - // Demo portal efdis-online.de (extern) / dailybuild.efdis-online.de (intern) - dimension "client" - applicationId "de.efdis.activeTAN" - } - - VAR { - dimension "client" - applicationId "de.varengold.activeTAN" - } - - } - compileOptions { - // ZXing uses Java 8 language features from the core library - coreLibraryDesugaringEnabled true - - targetCompatibility JavaVersion.VERSION_1_8 - sourceCompatibility JavaVersion.VERSION_1_8 - } - -} - -tasks.withType(JavaCompile) { - options.deprecation = true -} - -dependencies { - implementation fileTree(dir: 'libs', include: ['*.jar']) - - implementation project(':material-design-icons') - implementation project(":barcodescanner") - - implementation 'com.google.android.material:material:1.3.0' - - implementation 'androidx.appcompat:appcompat:1.3.0' - implementation 'androidx.biometric:biometric:1.1.0' - - implementation 'androidx.constraintlayout:constraintlayout:2.0.4' - implementation 'androidx.recyclerview:recyclerview:1.2.1' - - coreLibraryDesugaring 'com.android.tools:desugar_jdk_libs:1.1.5' - implementation 'com.google.zxing:core:' + versions.zxing - - def room_version = '2.3.0' - implementation "androidx.room:room-runtime:$room_version" - annotationProcessor "androidx.room:room-compiler:$room_version" - androidTestImplementation "androidx.room:room-testing:$room_version" - - testImplementation 'junit:junit:4.13.1' - - androidTestImplementation 'androidx.test.ext:junit:1.1.2' - androidTestImplementation 'androidx.test:runner:1.3.0' - androidTestImplementation 'androidx.test:rules:1.3.0' - androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0' - androidTestImplementation 'androidx.test.espresso:espresso-contrib:3.3.0' - androidTestImplementation 'androidx.test.espresso:espresso-intents:3.3.0' -} diff --git a/tests/source-files/dev.patrickgold.florisboard/app/build.gradle.kts b/tests/source-files/dev.patrickgold.florisboard/app/build.gradle.kts deleted file mode 100644 index 14767fa8..00000000 --- a/tests/source-files/dev.patrickgold.florisboard/app/build.gradle.kts +++ /dev/null @@ -1,97 +0,0 @@ -plugins { - id("com.android.application") version "4.1.2" - kotlin("android") version "1.4.30" -} - -android { - compileSdkVersion(30) - buildToolsVersion("30.0.3") - - compileOptions { - sourceCompatibility = JavaVersion.VERSION_1_8 - targetCompatibility = JavaVersion.VERSION_1_8 - } - - kotlinOptions { - jvmTarget = JavaVersion.VERSION_1_8.toString() - freeCompilerArgs = listOf("-Xallow-result-return-type") // enables use of kotlin.Result - } - - defaultConfig { - applicationId = "dev.patrickgold.florisboard" - minSdkVersion(23) - targetSdkVersion(30) - versionCode(29) - versionName("0.3.10") - - testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner" - } - - buildFeatures { - viewBinding = true - } - - buildTypes { - named("debug").configure { - applicationIdSuffix = ".debug" - versionNameSuffix = "-debug" - - resValue("mipmap", "floris_app_icon", "@mipmap/ic_app_icon_debug") - resValue("mipmap", "floris_app_icon_round", "@mipmap/ic_app_icon_debug_round") - resValue("string", "floris_app_name", "FlorisBoard Debug") - } - - create("beta") // Needed because by default the "beta" BuildType does not exist - named("beta").configure { - applicationIdSuffix = ".beta" - versionNameSuffix = "-beta01" - proguardFiles.add(getDefaultProguardFile("proguard-android-optimize.txt")) - - resValue("mipmap", "floris_app_icon", "@mipmap/ic_app_icon_beta") - resValue("mipmap", "floris_app_icon_round", "@mipmap/ic_app_icon_beta_round") - resValue("string", "floris_app_name", "FlorisBoard Beta") - } - - named("release").configure { - proguardFiles.add(getDefaultProguardFile("proguard-android-optimize.txt")) - - resValue("mipmap", "floris_app_icon", "@mipmap/ic_app_icon_release") - resValue("mipmap", "floris_app_icon_round", "@mipmap/ic_app_icon_release_round") - resValue("string", "floris_app_name", "@string/app_name") - } - } - - testOptions { - unitTests { - isIncludeAndroidResources = true - } - } - - lintOptions { - isAbortOnError = false - } -} - -dependencies { - implementation("androidx.activity", "activity-ktx", "1.2.1") - implementation("androidx.appcompat", "appcompat", "1.2.0") - implementation("androidx.core", "core-ktx", "1.3.2") - implementation("androidx.fragment", "fragment-ktx", "1.3.0") - implementation("androidx.preference", "preference-ktx", "1.1.1") - implementation("androidx.constraintlayout", "constraintlayout", "2.0.4") - implementation("androidx.lifecycle", "lifecycle-service", "2.2.0") - implementation("com.google.android", "flexbox", "2.0.1") // requires jcenter as of version 2.0.1 - implementation("com.squareup.moshi", "moshi-kotlin", "1.11.0") - implementation("com.squareup.moshi", "moshi-adapters", "1.11.0") - implementation("com.google.android.material", "material", "1.3.0") - implementation("org.jetbrains.kotlinx", "kotlinx-coroutines-android", "1.4.2") - implementation("com.jaredrummler", "colorpicker", "1.1.0") - implementation("com.jakewharton.timber", "timber", "4.7.1") - implementation("com.nambimobile.widgets", "expandable-fab", "1.0.2") - - testImplementation("junit", "junit", "4.13.1") - testImplementation("org.mockito", "mockito-inline", "3.7.7") - testImplementation("org.robolectric", "robolectric", "4.5.1") - androidTestImplementation("androidx.test.ext", "junit", "1.1.2") - androidTestImplementation("androidx.test.espresso", "espresso-core", "3.3.0") -} diff --git a/tests/source-files/firebase-allowlisted/app/build.gradle b/tests/source-files/firebase-whitelisted/app/build.gradle similarity index 100% rename from tests/source-files/firebase-allowlisted/app/build.gradle rename to tests/source-files/firebase-whitelisted/app/build.gradle diff --git a/tests/source-files/firebase-allowlisted/build.gradle b/tests/source-files/firebase-whitelisted/build.gradle similarity index 100% rename from tests/source-files/firebase-allowlisted/build.gradle rename to tests/source-files/firebase-whitelisted/build.gradle diff --git a/tests/source-files/flavor.test/build.gradle b/tests/source-files/flavor.test/build.gradle deleted file mode 100644 index 2c958bdc..00000000 --- a/tests/source-files/flavor.test/build.gradle +++ /dev/null @@ -1,15 +0,0 @@ -dependenies { - /// dependencies for app building - fossImplementation 'com.android.support:multidex:1.0.2' - implementation 'com.github.nextcloud:android-library:1.0.33' - devImplementation 'com.github.nextcloud:android-library:master-SNAPSHOT' // use always latest master - implementation "com.android.support:support-v4:${supportLibraryVersion}" - prodImplementation "com.android.support:design:${supportLibraryVersion}" - gplayImplementation 'com.jakewharton:disklrucache:2.0.2' - implementation "com.android.support:appcompat-v7:${supportLibraryVersion}" - gplayProdImplementation "com.android.support:cardview-v7:${supportLibraryVersion}" - implementation "com.android.support:exifinterface:${supportLibraryVersion}" - fossDevImplementation 'com.github.tobiasKaminsky:android-floating-action-button:1.10.2' - gplayDevImplementation 'com.github.albfernandez:juniversalchardet:v2.0.0' - fossProdImplementation 'com.google.code.findbugs:annotations:2.0.1' -} diff --git a/tests/source-files/info.guardianproject.ripple/build.gradle b/tests/source-files/info.guardianproject.ripple/build.gradle deleted file mode 100644 index 5062b208..00000000 --- a/tests/source-files/info.guardianproject.ripple/build.gradle +++ /dev/null @@ -1,18 +0,0 @@ -buildscript { - repositories { - maven { url 'file:///usr/share/maven-repo' } - maven { url 'https://maven.google.com' } - jcenter() - } - dependencies { - classpath 'com.android.tools.build:gradle:2.2.2' - } -} - -allprojects { - repositories { - maven { url 'file:///usr/share/maven-repo' } - maven { url 'https://maven.google.com' } - jcenter() - } -} diff --git a/tests/source-files/lockfile.test/flutter/.dart_tool/flutter_gen/pubspec.yaml b/tests/source-files/lockfile.test/flutter/.dart_tool/flutter_gen/pubspec.yaml deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/source-files/lockfile.test/flutter/pubspec.lock b/tests/source-files/lockfile.test/flutter/pubspec.lock deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/source-files/lockfile.test/flutter/pubspec.yaml b/tests/source-files/lockfile.test/flutter/pubspec.yaml deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/source-files/lockfile.test/javascript/package.json b/tests/source-files/lockfile.test/javascript/package.json deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/source-files/lockfile.test/javascript/yarn.lock b/tests/source-files/lockfile.test/javascript/yarn.lock deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/source-files/lockfile.test/rust/subdir/Cargo.lock b/tests/source-files/lockfile.test/rust/subdir/Cargo.lock deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/source-files/lockfile.test/rust/subdir/Cargo.toml b/tests/source-files/lockfile.test/rust/subdir/Cargo.toml deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/source-files/lockfile.test/rust/subdir/subdir/subdir/Cargo.toml b/tests/source-files/lockfile.test/rust/subdir/subdir/subdir/Cargo.toml deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/source-files/lockfile.test/rust/subdir2/Cargo.toml b/tests/source-files/lockfile.test/rust/subdir2/Cargo.toml deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/source-files/org.mozilla.rocket/app/build.gradle b/tests/source-files/org.mozilla.rocket/app/build.gradle deleted file mode 100644 index f05d2899..00000000 --- a/tests/source-files/org.mozilla.rocket/app/build.gradle +++ /dev/null @@ -1,414 +0,0 @@ -apply plugin: 'com.android.application' -apply plugin: 'kotlin-android' -apply plugin: 'kotlin-android-extensions' -apply plugin: 'kotlin-kapt' -apply plugin: 'com.google.android.gms.oss-licenses-plugin' -apply from: '../buildSrc/pmd.gradle' -apply from: '../buildSrc/checkstyle.gradle' -apply from: '../buildSrc/findbugs.gradle' -apply from: 'buildscripts/l10n.gradle' - - -android { - compileSdkVersion Versions.compile_sdk - buildToolsVersion Versions.build_tools - defaultConfig { - applicationId "org.mozilla" - minSdkVersion Versions.min_sdk - targetSdkVersion Versions.target_sdk - versionCode Versions.version_code - versionName Versions.version_name - if (SystemEnv.auto_screenshot == "1") { - testInstrumentationRunner "org.mozilla.focus.test.runner.ScreenshotTestRunner" - testInstrumentationRunnerArguments clearPackageData: 'true' - } else { - // general UI test, using notAnnotation to filter out auto screenshot classes - testInstrumentationRunner "org.mozilla.focus.test.runner.CustomTestRunner" - testInstrumentationRunnerArguments clearPackageData: 'true', notAnnotation: 'org.mozilla.focus.annotation.ScreengrabOnly,android.support.test.filters.FlakyTest' - } - testInstrumentationRunnerArgument 'disableAnalytics', 'true' - - multiDexEnabled true - - vectorDrawables { - useSupportLibrary false - generatedDensities = [] - } - - def bitrise_build_number = System.getenv("BITRISE_BUILD_NUMBER") - if (bitrise_build_number?.trim()) { - versionCode bitrise_build_number.toInteger() - versionNameSuffix "(" + bitrise_build_number + ")" - } - - // used by Room, to test migrations - javaCompileOptions { - annotationProcessorOptions { - arguments = ["room.schemaLocation": "$projectDir/schemas".toString()] - } - } - - compileOptions { - sourceCompatibility JavaVersion.VERSION_1_8 - targetCompatibility JavaVersion.VERSION_1_8 - } - } - - dexOptions { - preDexLibraries true - } - - bundle { - language { - enableSplit = false - } - density { - enableSplit = false - } - abi { - enableSplit = true - } - } - - // We have a three dimensional build configuration: - // BUILD TYPE (debug, release) - - buildTypes { - release { - minifyEnabled true - shrinkResources true - proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' - matchingFallbacks = ["firebase"] - } - debug { - def userName = System.getenv("USER") - applicationIdSuffix ".debug." + userName - versionNameSuffix applicationIdSuffix - matchingFallbacks = ["firebase_no_op"] - } - // Use a separate buildType for coverage: testCoverageEnabled produces slower code (4-5x slower - // in places that I've benchmarked), and more importantly seems to break debugging with Android Studio - // for some developers (i.e. variables can't be inspected or seen). - coverage { - initWith debug - applicationIdSuffix ".coverage" - testCoverageEnabled true - matchingFallbacks = ["debug", "firebase_no_op"] - } - // special build type to develop Firebase related stuff - firebase { - initWith debug - applicationIdSuffix ".debug.firebase" - - versionNameSuffix applicationIdSuffix - matchingFallbacks = ["debug", "firebase"] - } - } - - testBuildType "firebase" - - testOptions { - animationsDisabled = true - unitTests.returnDefaultValues = true - unitTests.includeAndroidResources = true - execution 'ANDROID_TEST_ORCHESTRATOR' - } - - // used by Room, to test migrations - sourceSets { - androidTest.assets.srcDirs += files("$projectDir/schemas".toString()) - } - - flavorDimensions "product", "engine" - - productFlavors { - focus { - resConfigs Localization.KEPT_LOCALE - dimension "product" - - applicationIdSuffix ".rocket" - } - - preview { - dimension "product" - applicationId "gro.allizom.zelda.beta" - applicationIdSuffix "" - versionNameSuffix ".nightly" - } - - // We can build with two engines: webkit or gecko - webkit { - dimension "engine" - } - - } - - variantFilter { variant -> - def flavors = variant.flavors*.name - // We only need a nightly release for now - if (flavors.contains("preview") && variant.buildType.name != "release") { - setIgnore(true) - } - } - - sourceSets { - test { - resources { - // Make the default asset folder available as test resource folder. Robolectric seems - // to fail to read assets for our setup. With this we can just read the files directly - // and do not need to rely on Robolectric. - srcDir "${projectDir}/src/main/assets/" - } - } - - preview { - res.srcDir 'src/preview/res' - } - - // used by Room, to test migrations - androidTest.assets.srcDirs += files("$projectDir/schemas".toString()) - } -} - -repositories { - flatDir { - dirs 'libs' - } - mavenCentral() -} - -dependencies { - implementation project(':telemetry-annotation') - kapt project(':telemetry-compiler') - - implementation project(':third_party:subsampling-scale-image-view') - implementation project(':third_party:glide:annotation') - implementation project(':third_party:glide:library') - kapt "com.github.bumptech.glide:compiler:${Versions.glide}" - - implementation project(':firebase') - implementation project(':feature-tabs') - implementation project(':HttpRequest') - implementation project(':httptask') - implementation project(':urlutils') - implementation project(':fileutils') - implementation project(':icon') - implementation project(':logger') - implementation project(':threadutils') - implementation project(':cachedrequestloader') - implementation project(':permissionhandler') - - implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:${Versions.kotlin}" - - // We didn't use CustomTabs so far. This is a build hack to force Android-Components to use - // same version of support library as we are. Android-Components depends on CustomTabs which - // version will be override by this. - // We can get rid of this once Android-Components' issue #404 has been resolve. - implementation "com.android.support:customtabs:${Versions.support}" - implementation "com.android.support:support-v4:${Versions.support}" - implementation "com.android.support:appcompat-v7:${Versions.support}" - implementation "com.android.support:design:${Versions.support}" - implementation "com.android.support:cardview-v7:${Versions.support}" - implementation "com.android.support:recyclerview-v7:${Versions.support}" - implementation "com.android.support.constraint:constraint-layout:${Versions.constraint}" - implementation "android.arch.work:work-runtime:${Versions.arch_work}" - - - // Architecture components - implementation "android.arch.lifecycle:extensions:${Versions.lifecycle}" - implementation "android.arch.lifecycle:common-java8:${Versions.lifecycle}" - implementation "android.arch.persistence.room:runtime:${Versions.room}" - implementation "android.arch.navigation:navigation-fragment:${Versions.navigation}" - kapt "android.arch.persistence.room:compiler:${Versions.room}" - - implementation("com.google.code.findbugs:annotations:${Versions.findbugs}", { - // We really only need the SuppressFBWarnings annotation, everything else can be ignored. - // Without this we get weird failures due to dependencies. - transitive = false - }) - - implementation "org.mozilla.components:browser-session:${Versions.android_components}" - implementation "org.mozilla.components:service-telemetry:${Versions.android_components}" - implementation "org.mozilla.components:browser-domains:${Versions.android_components}" - implementation "org.mozilla.components:ui-autocomplete:${Versions.android_components}" - - implementation "com.adjust.sdk:adjust-android:${Versions.adjust}" - implementation "com.google.android.gms:play-services-analytics:${Versions.firebase}" // Required by Adjust - // Required by Adjust - - implementation "com.airbnb.android:lottie:${Versions.lottie}" - - testImplementation "junit:junit:${Versions.junit}" - testImplementation "org.robolectric:robolectric:${Versions.robolectric}" - testImplementation "org.mockito:mockito-core:${Versions.mockito}" - - androidTestImplementation("com.android.support.test.espresso:espresso-core:${Versions.espresso}", { - exclude group: 'com.android.support', module: 'support-annotations' - }) - androidTestImplementation "com.android.support.test:runner:${Versions.test_runner}" - androidTestImplementation "com.android.support.test.espresso:espresso-idling-resource:${Versions.espresso}" - androidTestImplementation "com.android.support:support-annotations:${Versions.support}" - androidTestImplementation "com.android.support.test.uiautomator:uiautomator-v18:${Versions.uiautomator}" - androidTestImplementation "com.squareup.okhttp3:mockwebserver:${Versions.mockwebserver}" - androidTestImplementation "android.arch.persistence.room:testing:${Versions.room}" - androidTestImplementation "android.arch.core:core-testing:${Versions.arch_core}" - androidTestImplementation("com.android.support.test.espresso:espresso-contrib:${Versions.espresso}", { - exclude group: 'com.android.support', module: 'appcompat' - exclude group: 'com.android.support', module: 'support-v4' - exclude module: 'recyclerview-v7' - }) - androidTestImplementation "com.android.support.test.espresso:espresso-web:${Versions.espresso}" - androidTestImplementation "com.android.support.test.espresso:espresso-intents:${Versions.espresso}" - androidTestImplementation "tools.fastlane:screengrab:${Versions.fastlane_screengrab}" - androidTestImplementation "com.jraska:falcon:${Versions.jraska_falcon}" - androidTestUtil "com.android.support.test:orchestrator:${Versions.test_runner}" - - // LeakCanary - debugImplementation "com.squareup.leakcanary:leakcanary-android:${Versions.leakcanary}" - coverageImplementation "com.squareup.leakcanary:leakcanary-android-no-op:${Versions.leakcanary}" - releaseImplementation "com.squareup.leakcanary:leakcanary-android-no-op:${Versions.leakcanary}" - firebaseImplementation "com.squareup.leakcanary:leakcanary-android:${Versions.leakcanary}" - - implementation project(':bhaskar') - implementation project(':newspoint') - implementation project(':partnerrepository') -} - -// ------------------------------------------------------------------------------------------------- -// LeakCanary - Ensure the no-op dependency is always used in JVM tests. -// ------------------------------------------------------------------------------------------------- - -configurations.all { config -> - if (config.name.contains('UnitTest') || config.name.contains('AndroidTest')) { - config.resolutionStrategy.eachDependency { details -> - if (details.requested.group == 'com.squareup.leakcanary' && details.requested.name == 'leakcanary-android') { - details.useTarget(group: details.requested.group, name: 'leakcanary-android-no-op', version: details.requested.version) - } - } - } -} - -// ------------------------------------------------------------------------------------------------- -// Generate blocklists -// ------------------------------------------------------------------------------------------------- - -def blockListOutputDir = 'src/webkit/res/raw' - -task buildBlocklists(type: Copy) { - from('../shavar-prod-lists') { - include '*.json' - } - into blockListOutputDir - - // Android can't handle dashes in the filename, so we need to rename: - rename 'disconnect-blacklist.json', 'blocklist.json' - rename 'disconnect-entitylist.json', 'entitylist.json' - // google_mapping.json already has an expected name -} - -clean.doLast { - file(blockListOutputDir).deleteDir() -} - -tasks.whenTaskAdded { task -> - def name = task.name - if (name.contains("generate") && name.contains("Webkit") && name.contains("Resources")) { - task.dependsOn buildBlocklists - } -} - -// ------------------------------------------------------------------------------------------------- -// Adjust: Read token from environment variable (Only release builds) -// ------------------------------------------------------------------------------------------------- - -android.applicationVariants.all { variant -> - def variantName = variant.getName() - - print(variantName + ": ") - - // release and nightly will have Adjust. just nightly will use sandbox environment. - if (variantName.contains("Release")) { - def token = System.getenv("ADJUST_TOKEN_FOCUS") ?: null - - if (token != null) { - buildConfigField 'String', 'ADJUST_TOKEN', '"' + token + '"' - if (variantName.contains("preview")) { - buildConfigField 'String', 'ADJUST_ENVIRONMENT', 'com.adjust.sdk.AdjustConfig.ENVIRONMENT_SANDBOX' - } else if (variantName.contains("focus")) { - buildConfigField 'String', 'ADJUST_ENVIRONMENT', 'com.adjust.sdk.AdjustConfig.ENVIRONMENT_PRODUCTION' - } else { - buildConfigField 'String', 'ADJUST_ENVIRONMENT', 'null' - } - println "Added adjust token set from environment variable" - - def tracker = System.getenv("ADJUST_SIDELOAD_TRACKER") ?: null - if (tracker != null) { - buildConfigField 'String', 'ADJUST_DEFAULT_TRACKER', '"' + tracker + '"' - } else { - buildConfigField 'String', 'ADJUST_DEFAULT_TRACKER', 'null' - logger.error(variant.getName() + ": Not setting adjust default tracker (environment variable not set)") - } - } else { - buildConfigField 'String', 'ADJUST_TOKEN', 'null' - buildConfigField 'String', 'ADJUST_ENVIRONMENT', 'null' - buildConfigField 'String', 'ADJUST_DEFAULT_TRACKER', 'null' - println("Not setting adjust token (environment variable not set)") - } - } else { - buildConfigField 'String', 'ADJUST_TOKEN', 'null' - buildConfigField 'String', 'ADJUST_ENVIRONMENT', 'null' - buildConfigField 'String', 'ADJUST_DEFAULT_TRACKER', 'null' - - println("Not setting adjust token (Not a focus release build)") - } - if (variant.buildType.name == "release" || variant.buildType.name == "firebase") { - variant.assemble.doFirst { - if (SystemEnv.google_app_id == null || SystemEnv.default_web_client_id == null || - SystemEnv.firebase_database_url == null || SystemEnv.gcm_defaultSenderId == null || - SystemEnv.google_api_key == null || SystemEnv.google_crash_reporting_api_key == null || - SystemEnv.project_id == null) { - logger.warn("If you want to enable Firebase, please follow the steps:") - logger.warn("1. Download google-services.json and put it in the folder where you run below command.") - logger.warn("2. Run 'python./tools/firebase/firebase_setup.py' and follow the steps.\n") - } - } - } -} - -tasks.whenTaskAdded { task -> - if (name.contains("compile")) { - task.dependsOn generatePreviewLocaleList - task.dependsOn generateFocusLocaleList - } -} - -tasks.withType(org.jetbrains.kotlin.gradle.tasks.KotlinCompile).all { - kotlinOptions { - kotlinOptions.allWarningsAsErrors = true - } -} - - - -afterEvaluate { - check.dependsOn 'findbugs', 'pmd', 'checkstyle', 'checkTelemetryDocDirty', 'ktlint' -} - -task checkTelemetryDocDirty() { - - doLast { - Process p = Runtime.getRuntime().exec("git diff ./docs/events.md"); - p.waitFor(); - - BufferedReader reader = new BufferedReader(new InputStreamReader(p.getInputStream())); - - StringBuilder sb = new StringBuilder(); - String line = ""; - while ((line = reader.readLine()) != null) { - sb.append(line + "\n"); - } - if (sb.length() > 0) { - throw new GradleException("events.md is drity, please commit the change first.\n" + sb.toString()) - } - } -} diff --git a/tests/source-files/org.noise_planet.noisecapture/app/build.gradle b/tests/source-files/org.noise_planet.noisecapture/app/build.gradle deleted file mode 100644 index 158712bd..00000000 --- a/tests/source-files/org.noise_planet.noisecapture/app/build.gradle +++ /dev/null @@ -1,116 +0,0 @@ -apply plugin: 'com.android.application' - -def getCheckedOutGitCommitHash() { - 'git rev-parse --verify --short HEAD'.execute().text.trim() -} - -def getAvailableLocales() { - new File("app/src/main/res").list(new FilenameFilter() { - @Override - boolean accept(File dir, String name) { - return name.startsWith("values-") && new File(new File(dir,name), "strings.xml").exists(); - } - }).collect() { fold -> fold.substring("values-".length())}.join(",") -} - -android { - compileSdkVersion 30 - buildToolsVersion '30.0.0' - def signingFilePath = System.getProperty("user.home") + "/.idea/signing.gradle" - if (new File(signingFilePath).exists()) { - apply from: signingFilePath - } - defaultConfig { - applicationId "org.noise_planet.noisecapture" - minSdkVersion 15 - targetSdkVersion 30 - versionCode 55 - versionName "1.2.19" - // Store build date in apk - buildConfigField "long", "TIMESTAMP", System.currentTimeMillis() + "L" - buildConfigField "String", "GITHASH", "\"${getCheckedOutGitCommitHash().toString()}\"" - buildConfigField "String", "SUPPORTEDLOCALES", "\"${getAvailableLocales()}\"" - // Enabling multidex support. - multiDexEnabled false - - vectorDrawables.useSupportLibrary = true - - testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" - } - - dexOptions { - javaMaxHeapSize "4g" - } - lintOptions { - abortOnError false - disable 'MissingTranslation' - } - buildTypes { - release { - minifyEnabled true - proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' - if (new File(signingFilePath).exists()) { - signingConfig signingConfigs.release - } - } - debug { - debuggable true - if (new File(signingFilePath).exists()) { - signingConfig signingConfigs.debug - } - } - } - compileOptions { - sourceCompatibility JavaVersion.VERSION_1_7 - targetCompatibility JavaVersion.VERSION_1_7 - } - testOptions { - unitTests { - includeAndroidResources = true - } - } -} - -// For using the MPAndroidChart package -// https://github.com/PhilJay/MPAndroidChart -// Apache License, Version 2.0 -task listrepos { - doLast { - println "Repositories:" - project.repositories.each { println "Name: " + it.name + "; url: " + it.url } - } -} - -repositories { - mavenLocal() -} - -dependencies { - implementation 'com.github.PhilJay:MPAndroidChart:v2.2.5' - implementation 'org.slf4j:slf4j-simple:1.7.12' - implementation name: 'org/noise-planet/jwarble/0.2.3/jwarble-0.2.3' - implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.10' - implementation group: 'com.fasterxml.jackson.core', name: 'jackson-core', version: '2.9.10' - - // multithreaded FFT for realtime visualisation of spectrum only - implementation 'com.github.wendykierp:JTransforms:3.1' - implementation 'org.apache.commons:commons-math3:3.5' - implementation 'androidx.appcompat:appcompat:1.0.0' - implementation 'com.google.android.material:material:1.0.0' - implementation 'androidx.vectordrawable:vectordrawable:1.0.0' - implementation 'com.nhaarman.supertooltips:library:3.0.0' - //compile 'com.android.support:multidex:1.0.0' - // Testing-only dependencies - // Force usage of support annotations in the test app, since it is internally used by the runner module. - implementation 'androidx.constraintlayout:constraintlayout:1.1.3' - androidTestImplementation 'androidx.annotation:annotation:1.0.0' - androidTestImplementation 'androidx.test.ext:junit:1.1.1' - androidTestImplementation 'androidx.test:rules:1.1.1' - androidTestImplementation 'androidx.test.espresso:espresso-core:3.1.0' - // unit test - testImplementation group: 'org.robolectric', name: 'robolectric', version: '4.3.1' - testImplementation 'junit:junit:4.12' - testImplementation group: 'com.googlecode.soundlibs', name: 'jorbis', version: '0.0.17.4' - implementation project(':sosfilter') -} - diff --git a/tests/source-files/org.noise_planet.noisecapture/settings.gradle b/tests/source-files/org.noise_planet.noisecapture/settings.gradle deleted file mode 100644 index 3af2006a..00000000 --- a/tests/source-files/org.noise_planet.noisecapture/settings.gradle +++ /dev/null @@ -1 +0,0 @@ -include 'sosfilter', 'app' diff --git a/tests/source-files/org.noise_planet.noisecapture/sosfilter/build.gradle b/tests/source-files/org.noise_planet.noisecapture/sosfilter/build.gradle deleted file mode 100644 index ab7b0caa..00000000 --- a/tests/source-files/org.noise_planet.noisecapture/sosfilter/build.gradle +++ /dev/null @@ -1,31 +0,0 @@ -apply plugin: 'java' -apply plugin: 'idea' -apply plugin: 'java' - -group = 'org.orbisgis' -version = '1.0-SNAPSHOT' - - -description = """Signal processing, A-weighting and third-octave bands filtering""" - -compileJava { - sourceCompatibility = 1.7 - targetCompatibility = 1.7 -} -repositories { - mavenCentral() -} - -dependencies { - compile group: 'org.slf4j', name: 'slf4j-api', version:'1.7.12' - compile group: 'com.github.wendykierp', name: 'JTransforms', version:'3.1' - testCompile group: 'org.slf4j', name: 'slf4j-simple', version:'1.7.12' - testCompile group: 'junit', name: 'junit', version:'4.12' -} - -// Copy resource for unit tests -task copyTestResources(type: Copy) { - from "${projectDir}/src/test/resources" - into "${buildDir}/classes/test" -} -processTestResources.dependsOn copyTestResources diff --git a/tests/source-files/org.piepmeyer.gauguin/build.gradle.kts b/tests/source-files/org.piepmeyer.gauguin/build.gradle.kts deleted file mode 100644 index cb7d1d02..00000000 --- a/tests/source-files/org.piepmeyer.gauguin/build.gradle.kts +++ /dev/null @@ -1,47 +0,0 @@ -import java.net.URI - -buildscript { - dependencies { - classpath("com.android.tools.build:gradle:8.6.0") - } -} - -plugins { - alias(libs.plugins.android.application) apply false - alias(libs.plugins.android.library) apply false - alias(libs.plugins.kotlin.android) apply false - alias(libs.plugins.kotlin.jvm) apply false - alias(libs.plugins.sonarqube) - alias(libs.plugins.ktlint) - alias(libs.plugins.ksp) - alias(libs.plugins.roborazzi) apply false - alias(libs.plugins.gms) apply false -} - -sonarqube { - properties { - property("sonar.projectKey", "org.piepmeyer.gauguin") - property("sonar.organization", "meikpiep") - property("sonar.verbose", "true") - property("sonar.host.url", "https://sonarcloud.io") - } -} - -tasks.sonar { - onlyIf("There is no property 'buildserver'") { - project.hasProperty("buildserver") - } - dependsOn(":gauguin-app:lint") -} - -allprojects { - repositories { - mavenCentral() - google() - maven { url = URI("https://jitpack.io") } - } -} - -subprojects { - apply(plugin = "org.jlleitschuh.gradle.ktlint") -} diff --git a/tests/source-files/org.piepmeyer.gauguin/libs.versions.toml b/tests/source-files/org.piepmeyer.gauguin/libs.versions.toml deleted file mode 100644 index 7159985c..00000000 --- a/tests/source-files/org.piepmeyer.gauguin/libs.versions.toml +++ /dev/null @@ -1,91 +0,0 @@ -[versions] - -kotlin = "1.9.23" -koin = "3.5.6" -koin-annotations="1.3.1" -kotest = "5.9.1" -kotest-extensions = "1.3.0" -kotlin-coroutines = "1.8.1" -android-gradle-plugin = "8.6.0" -androidUiTestingUtils = "2.3.3" -roborazzi = "1.26.0" - -[libraries] - -kotlin-coroutines-core = { group = "org.jetbrains.kotlinx", name = "kotlinx-coroutines-core", version.ref = "kotlin-coroutines" } -kotlin-coroutines-debug = { group = "org.jetbrains.kotlinx", name = "kotlinx-coroutines-debug", version.ref = "kotlin-coroutines" } -kotlin-serialization = { group = "org.jetbrains.kotlinx", name = "kotlinx-serialization-json", version = "1.6.3" } - -logging-kotlin = { group = "io.github.oshai", name = "kotlin-logging-jvm", version = "6.0.9" } -logging-slf = { group = "org.slf4j", name = "slf4j-api", version = "2.0.13" } -logging-logback-android = { group = "com.github.tony19", name = "logback-android", version = "3.0.0" } -logging-logback-kotlin = { group = "ch.qos.logback", name = "logback-classic", version = "1.5.6" } - -android-material = { group = "com.google.android.material", name = "material", version = "1.12.0" } - -androidx-annotation = { group = "androidx.annotation", name = "annotation", version = "1.8.2" } -androidx-ktx = { group = "androidx.core", name = "core-ktx", version = "1.13.1" } -androidx-constraintlayout = { group = "androidx.constraintlayout", name = "constraintlayout", version = "2.1.4" } -androidx-drawerlayout = { group = "androidx.drawerlayout", name = "drawerlayout", version = "1.2.0" } -androidx-fragment = { group = "androidx.fragment", name = "fragment-ktx", version = "1.8.3" } -androidx-gridlayout = { group = "androidx.gridlayout", name = "gridlayout", version = "1.0.0" } -androidx-lifecycle-runtime = { group = "androidx.lifecycle", name = "lifecycle-runtime-ktx", version = "2.8.5" } -androidx-lifecycle-viewmodel = { group = "androidx.lifecycle", name = "lifecycle-viewmodel-ktx", version = "2.8.5" } -androidx-preference = { group = "androidx.preference", name = "preference-ktx", version = "1.2.1" } -androidx-recyclerview = { group = "androidx.recyclerview", name = "recyclerview", version = "1.3.2" } -androidx-transition = { group = "androidx.transition", name = "transition", version = "1.5.1" } -androidx-window = { group = "androidx.window", name = "window", version = "1.3.0" } -androidx-window-core = { group = "androidx.window", name = "window-core", version = "1.3.0" } - -androidx-test-junit-ktx = { group = "androidx.test.ext", name = "junit-ktx", version = "1.2.1" } -androidx-test-rules = { group = "androidx.test", name = "rules", version = "1.6.1" } -androidx-test-runner = { group = "androidx.test", name = "runner", version = "1.6.2" } - -koin-core = { group = "io.insert-koin", name = "koin-core", version.ref = "koin" } -koin-annotations = { group = "io.insert-koin", name = "koin-annotations", version.ref = "koin-annotations" } -koin-ksp-compiler = { group = "io.insert-koin", name = "koin-ksp-compiler", version.ref = "koin-annotations" } -koin-test = { group = "io.insert-koin", name = "koin-test", version.ref = "koin" } -koin-android = { group = "io.insert-koin", name = "koin-android", version.ref = "koin" } - -kotest-runner = { group = "io.kotest", name = "kotest-runner-junit5", version.ref = "kotest" } -kotest-assertions = { group = "io.kotest", name = "kotest-assertions-core", version.ref = "kotest" } -kotest-parametrizedtests = { group = "io.kotest", name = "kotest-framework-datatest", version.ref = "kotest" } -kotest-koin = { group = "io.kotest.extensions", name = "kotest-extensions-koin", version.ref = "kotest-extensions" } - -test-mockk = { group = "io.mockk", name = "mockk", version = "1.13.11" } - -androiduitestingutils-utils = { group = "com.github.sergio-sastre.AndroidUiTestingUtils", name = "utils", version.ref = "androidUiTestingUtils" } -androiduitestingutils-robolectric = { group = "com.github.sergio-sastre.AndroidUiTestingUtils", name = "robolectric", version.ref = "androidUiTestingUtils" } -roboelectric = { group = "org.robolectric", name = "robolectric", version = "4.13" } -roborazzi = { group = "io.github.takahirom.roborazzi", name = "roborazzi", version.ref = "roborazzi" } -roborazzi-junit = { group = "io.github.takahirom.roborazzi", name = "roborazzi-junit-rule", version.ref = "roborazzi" } -junit-vintage-engine = { group = "org.junit.vintage", name = "junit-vintage-engine", version = "5.10.3" } - -thirdparty-konfetti = { group = "nl.dionsegijn", name = "konfetti-xml", version = "2.0.4" } -#thirdparty-ferriswheel = { group = "ru.github.igla", name = "ferriswheel", version = "1.2" } -thirdparty-navigationdrawer = { group = "com.mikepenz", name = "materialdrawer", version = "9.0.2" } -thirdparty-balloon = { group = "com.github.skydoves", name = "balloon", version = "1.6.7" } -thirdparty-vico = { group = "com.patrykandpatrick.vico", name = "views", version = "2.0.0-alpha.25" } -thirdparty-androidplot = { group = "com.androidplot", name = "androidplot-core", version = "1.5.11" } -thirdparty-leakcanary = { group = "com.squareup.leakcanary", name = "leakcanary-android", version = "2.14" } - -[plugins] - -android-application = { id = "com.android.application", version.ref = "android-gradle-plugin" } -android-library = { id = "com.android.library", version.ref = "android-gradle-plugin" } -kotlin-android = { id = "org.jetbrains.kotlin.android", version.ref = "kotlin" } -kotlin-jvm = { id = "org.jetbrains.kotlin.jvm", version.ref = "kotlin" } -sonarqube = { id = "org.sonarqube", version = "5.0.0.4638" } -ktlint = { id = "org.jlleitschuh.gradle.ktlint", version = "12.1.1" } -ksp = { id = "com.google.devtools.ksp", version = "1.9.23-1.0.20" } -roborazzi = { id = "io.github.takahirom.roborazzi", version.ref = "roborazzi" } -gms = { id = "com.google.gms.google-services", version = "1" } - -[bundles] - -logging = ["logging-kotlin", "logging-slf"] -kotest = ["kotest-runner", "kotest-assertions", "kotest-parametrizedtests", "kotest-koin"] -koin = ["koin-core", "koin-annotations", "koin-ksp-compiler"] -androidx-test = ["androidx-test-junit-ktx", "androidx-test-rules", "androidx-test-runner"] -screenshotTests = ["androiduitestingutils-utils", "androiduitestingutils-robolectric", "roboelectric", "roborazzi", "roborazzi-junit", "junit-vintage-engine"] - diff --git a/tests/source-files/org.piepmeyer.gauguin/settings.gradle.kts b/tests/source-files/org.piepmeyer.gauguin/settings.gradle.kts deleted file mode 100644 index 46f4acda..00000000 --- a/tests/source-files/org.piepmeyer.gauguin/settings.gradle.kts +++ /dev/null @@ -1,24 +0,0 @@ -pluginManagement { - repositories { - google() - mavenCentral() - gradlePluginPortal() - } -} - -dependencyResolutionManagement { - versionCatalogs { - create("libs") { - from(files("libs.versions.toml")) - } - } -} - -plugins { - id("org.gradle.toolchains.foojay-resolver-convention") version ("0.8.0") -} - -rootProject.name = "gauguin" - -include(":gauguin-app") -include(":gauguin-core") diff --git a/tests/source-files/org.tasks/app/build.gradle.kts b/tests/source-files/org.tasks/app/build.gradle.kts deleted file mode 100644 index 23b0524d..00000000 --- a/tests/source-files/org.tasks/app/build.gradle.kts +++ /dev/null @@ -1,225 +0,0 @@ -import com.android.build.gradle.api.ApplicationVariant - -plugins { - id("com.android.application") - id("checkstyle") - id("io.fabric") - id("com.cookpad.android.licensetools") - kotlin("android") -} - -repositories { - jcenter() - google() - maven(url = "https://jitpack.io") -} - -android { - bundle { - language { - enableSplit = false - } - } - - dexOptions { - javaMaxHeapSize = "2g" - } - - lintOptions { - setLintConfig(file("lint.xml")) - textOutput("stdout") - textReport = true - } - - compileSdkVersion(Versions.targetSdk) - - defaultConfig { - testApplicationId = "org.tasks.test" - applicationId = "org.tasks" - versionCode = 651 - versionName = "7.6.1" - targetSdkVersion(Versions.targetSdk) - minSdkVersion(Versions.minSdk) - multiDexEnabled = true - testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner" - - javaCompileOptions { - annotationProcessorOptions { - arguments["room.schemaLocation"] = "$projectDir/schemas" - } - } - } - - signingConfigs { - create("release") { - val tasksKeyAlias: String? by project - val tasksStoreFile: String? by project - val tasksStorePassword: String? by project - val tasksKeyPassword: String? by project - - keyAlias = tasksKeyAlias - storeFile = file(tasksStoreFile?: "none") - storePassword = tasksStorePassword - keyPassword = tasksKeyPassword - } - } - - compileOptions { - sourceCompatibility = JavaVersion.VERSION_1_8 - targetCompatibility = JavaVersion.VERSION_1_8 - } - - @Suppress("LocalVariableName") - buildTypes { - getByName("debug") { - val tasks_mapbox_key_debug: String? by project - val tasks_google_key_debug: String? by project - - applicationIdSuffix = ".debug" - resValue("string", "mapbox_key", tasks_mapbox_key_debug ?: "") - resValue("string", "google_key", tasks_google_key_debug ?: "") - isTestCoverageEnabled = true - } - getByName("release") { - val tasks_mapbox_key: String? by project - val tasks_google_key: String? by project - - resValue("string", "mapbox_key", tasks_mapbox_key ?: "") - resValue("string", "google_key", tasks_google_key ?: "") - isMinifyEnabled = true - proguardFiles(getDefaultProguardFile("proguard-android.txt"), "proguard.pro") - signingConfig = signingConfigs.getByName("release") - } - } - - applicationVariants.all(object : Action { - override fun execute(variant: ApplicationVariant) { - variant.resValue("string", "app_package", variant.applicationId) - } - }) - - flavorDimensions("store") - - productFlavors { - create("generic") { - setDimension("store") - proguardFile("generic.pro") - } - create("googleplay") { - setDimension("store") - } - create("amazon") { - setDimension("store") - } - } - - viewBinding { - isEnabled = true - } - - dataBinding { - isEnabled = true - } - - packagingOptions { - exclude("META-INF/*.kotlin_module") - } -} - -configure { - configFile = project.file("google_checks.xml") - toolVersion = "8.16" -} - -configurations.all { - exclude(group = "com.google.guava", module = "guava-jdk5") - exclude(group = "org.apache.httpcomponents", module = "httpclient") - exclude(group = "com.google.http-client", module = "google-http-client-apache") - resolutionStrategy { - force("com.squareup.okhttp3:okhttp:" + Versions.okhttp) - } -} - -val googleplayImplementation by configurations -val amazonImplementation by configurations - -dependencies { - implementation("com.gitlab.bitfireAT:dav4jvm:1.0") - implementation("com.gitlab.bitfireAT:ical4android:be6d515db8") { - exclude(group = "org.threeten", module = "threetenbp") - } - implementation("com.gitlab.bitfireAT:cert4android:1488e39a66") - - annotationProcessor("com.google.dagger:dagger-compiler:${Versions.dagger}") - implementation("com.google.dagger:dagger:${Versions.dagger}") - - implementation("androidx.room:room-rxjava2:${Versions.room}") - annotationProcessor("androidx.room:room-compiler:${Versions.room}") - implementation("androidx.lifecycle:lifecycle-extensions:2.1.0") - implementation("io.reactivex.rxjava2:rxandroid:2.1.1") - implementation("androidx.paging:paging-runtime:2.1.1") - - annotationProcessor("com.jakewharton:butterknife-compiler:${Versions.butterknife}") - implementation("com.jakewharton:butterknife:${Versions.butterknife}") - - debugImplementation("com.facebook.flipper:flipper:${Versions.flipper}") - debugImplementation("com.facebook.flipper:flipper-network-plugin:${Versions.flipper}") - debugImplementation("com.facebook.soloader:soloader:0.8.0") - - debugImplementation("com.squareup.leakcanary:leakcanary-android:${Versions.leakcanary}") - - implementation("org.jetbrains.kotlin:kotlin-stdlib:${Versions.kotlin}") - implementation("io.github.luizgrp.sectionedrecyclerviewadapter:sectionedrecyclerviewadapter:2.0.0") - implementation("androidx.multidex:multidex:2.0.1") - implementation("me.saket:better-link-movement-method:2.2.0") - implementation("com.squareup.okhttp3:okhttp:${Versions.okhttp}") - implementation("com.google.code.gson:gson:2.8.5") - implementation("com.github.rey5137:material:1.2.5") - implementation("com.nononsenseapps:filepicker:4.2.1") - implementation("com.google.android.material:material:1.1.0-rc01") - implementation("androidx.annotation:annotation:1.1.0") - implementation("androidx.constraintlayout:constraintlayout:2.0.0-beta4") - implementation("androidx.swiperefreshlayout:swiperefreshlayout:1.0.0") - implementation("com.jakewharton.timber:timber:4.7.1") - implementation("com.jakewharton.threetenabp:threetenabp:1.2.1") - implementation("com.google.guava:guava:27.1-android") - implementation("com.jakewharton:process-phoenix:2.0.0") - implementation("com.google.android.apps.dashclock:dashclock-api:2.0.0") - implementation("com.twofortyfouram:android-plugin-api-for-locale:1.0.2") - implementation("com.rubiconproject.oss:jchronic:0.2.6") { - isTransitive = false - } - implementation("org.scala-saddle:google-rfc-2445:20110304") { - isTransitive = false - } - implementation("com.wdullaer:materialdatetimepicker:4.0.1") - implementation("me.leolin:ShortcutBadger:1.1.22@aar") - implementation("com.google.apis:google-api-services-tasks:v1-rev59-1.25.0") - implementation("com.google.apis:google-api-services-drive:v3-rev188-1.25.0") - implementation("com.google.api-client:google-api-client-android:1.30.7") - implementation("androidx.work:work-runtime:${Versions.work}") - implementation("com.mapbox.mapboxsdk:mapbox-android-sdk:7.3.0") - implementation("com.mapbox.mapboxsdk:mapbox-sdk-services:4.6.0") - - googleplayImplementation("com.crashlytics.sdk.android:crashlytics:${Versions.crashlytics}") - googleplayImplementation("com.google.firebase:firebase-analytics:${Versions.firebase}") - googleplayImplementation("com.google.android.gms:play-services-location:17.0.0") - googleplayImplementation("com.google.android.gms:play-services-maps:17.0.0") - googleplayImplementation("com.google.android.libraries.places:places:2.1.0") - googleplayImplementation("com.android.billingclient:billing:1.2.2") - - amazonImplementation(fileTree(mapOf("dir" to "libs", "include" to listOf("*.jar")))) - amazonImplementation("com.crashlytics.sdk.android:crashlytics:${Versions.crashlytics}") - amazonImplementation("com.google.firebase:firebase-core:${Versions.firebase}") - - androidTestAnnotationProcessor("com.google.dagger:dagger-compiler:${Versions.dagger}") - androidTestAnnotationProcessor("com.jakewharton:butterknife-compiler:${Versions.butterknife}") - androidTestImplementation("com.google.dexmaker:dexmaker-mockito:1.2") - androidTestImplementation("com.natpryce:make-it-easy:4.0.1") - androidTestImplementation("androidx.test:runner:1.2.0") - androidTestImplementation("androidx.test:rules:1.2.0") - androidTestImplementation("androidx.test.ext:junit:1.1.1") - androidTestImplementation("androidx.annotation:annotation:1.1.0") -} - -apply(mapOf("plugin" to "com.google.gms.google-services")) diff --git a/tests/source-files/org.tasks/build.gradle b/tests/source-files/org.tasks/build.gradle deleted file mode 100644 index 2edd2b70..00000000 --- a/tests/source-files/org.tasks/build.gradle +++ /dev/null @@ -1,13 +0,0 @@ - -buildscript { - repositories { - mavenCentral() - } - dependencies { - classpath 'org.owasp:dependency-check-gradle:1.3.2.1' - } -} -apply plugin: 'org.owasp.dependencycheck' -dependencyCheck { - format='JSON' -} diff --git a/tests/source-files/org.tasks/build.gradle.kts b/tests/source-files/org.tasks/build.gradle.kts deleted file mode 100644 index f766cea2..00000000 --- a/tests/source-files/org.tasks/build.gradle.kts +++ /dev/null @@ -1,26 +0,0 @@ -buildscript { - repositories { - jcenter() - google() - maven("https://maven.fabric.io/public") - } - - dependencies { - classpath("com.android.tools.build:gradle:3.6.0-rc01") - classpath("com.google.gms:google-services:4.3.3") - // https://docs.fabric.io/android/changelog.html#fabric-gradle-plugin - classpath("io.fabric.tools:gradle:1.31.2") - classpath("com.github.ben-manes:gradle-versions-plugin:0.27.0") - classpath("com.cookpad.android.licensetools:license-tools-plugin:1.7.0") - classpath("org.jetbrains.kotlin:kotlin-gradle-plugin:${Versions.kotlin}") - } -} - -plugins { - id("com.github.ben-manes.versions") version "0.21.0" -} - -tasks.getByName("wrapper") { - gradleVersion = "5.6.4" - distributionType = Wrapper.DistributionType.ALL -} diff --git a/tests/source-files/org.tasks/buildSrc/build.gradle.kts b/tests/source-files/org.tasks/buildSrc/build.gradle.kts deleted file mode 100644 index c39a297b..00000000 --- a/tests/source-files/org.tasks/buildSrc/build.gradle.kts +++ /dev/null @@ -1,7 +0,0 @@ -plugins { - `kotlin-dsl` -} - -repositories { - jcenter() -} \ No newline at end of file diff --git a/tests/source-files/org.tasks/settings.gradle.kts b/tests/source-files/org.tasks/settings.gradle.kts deleted file mode 100644 index 15a801b1..00000000 --- a/tests/source-files/org.tasks/settings.gradle.kts +++ /dev/null @@ -1 +0,0 @@ -include(":app") diff --git a/tests/source-files/osmandapp/osmand/gradle/wrapper/gradle-wrapper.properties b/tests/source-files/osmandapp/osmand/gradle/wrapper/gradle-wrapper.properties deleted file mode 100644 index a3185c1e..00000000 --- a/tests/source-files/osmandapp/osmand/gradle/wrapper/gradle-wrapper.properties +++ /dev/null @@ -1,6 +0,0 @@ -#Mon Sep 01 10:23:06 EEST 2014 -distributionBase=GRADLE_USER_HOME -distributionPath=wrapper/dists -zipStoreBase=GRADLE_USER_HOME -zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-2.2.1-all.zip diff --git a/tests/source-files/realm/react-native/android/build.gradle b/tests/source-files/realm/react-native/android/build.gradle deleted file mode 100644 index d6244d2b..00000000 --- a/tests/source-files/realm/react-native/android/build.gradle +++ /dev/null @@ -1,409 +0,0 @@ -buildscript { - repositories { - google() - jcenter() - } - dependencies { - classpath 'com.android.tools.build:gradle:3.1.4' - classpath 'de.undercouch:gradle-download-task:1.2' - } -} - -allprojects { - repositories { - google() - jcenter() - mavenLocal() - maven { - // All of React Native (JS, Obj-C sources, Android binaries) is installed from npm - url "$projectDir/../../tests/react-test-app/node_modules/react-native/android" - } - } -} - -apply plugin: 'com.android.library' -apply plugin: 'maven' -apply plugin: 'signing' -apply plugin: 'de.undercouch.download' - -import de.undercouch.gradle.tasks.download.Download -import org.apache.tools.ant.taskdefs.condition.Os -import org.apache.tools.ant.filters.ReplaceTokens - -// We download various C++ open-source dependencies into downloads. -// We then copy both the downloaded code and our custom makefiles and headers into third-party-ndk. -// After that we build native code from src/main/jni with module path pointing at third-party-ndk. - - -ext.coreVersion = getDependenciesVersion("REALM_CORE_VERSION").trim() -ext.syncVersion = getDependenciesVersion("REALM_SYNC_VERSION").trim() -def currentVersion = getDependenciesVersion("VERSION").trim() -println "Realm Core Version: $ext.coreVersion" -println "Realm Sync Version: $ext.syncVersion" - -def downloadsDir = new File("$projectDir/downloads") -def jscDownloadDir = new File("$projectDir/src/main/jni/jsc") -def coreDownloadDir = new File("$projectDir/src/main/jni") -def publishDir = new File("$projectDir/../../android/") -// to build with sync run: ./gradlew assembleDebug -PbuildWithSync=true -ext.buildSync = project.hasProperty('buildWithSync') ? project.getProperty('buildWithSync').toBoolean() : true - -task generateVersionClass(type: Copy) { - from 'src/main/templates/Version.java' - into 'build/generated-src/main/java/io/realm/react' - filter(ReplaceTokens, tokens: [version: currentVersion]) - outputs.upToDateWhen { false } -} - -task createNativeDepsDirectories { - downloadsDir.mkdirs() -} - -task downloadJSCHeaders(type: Download) { - def jscAPIBaseURL = 'https://svn.webkit.org/repository/webkit/!svn/bc/174650/trunk/Source/JavaScriptCore/API/' - def jscHeaderFiles = ['JSBase.h', 'JSContextRef.h', 'JSObjectRef.h', 'JSRetainPtr.h', 'JSStringRef.h', 'JSValueRef.h', 'WebKitAvailability.h'] - - def output = new File(jscDownloadDir, 'JavaScriptCore') - output.mkdirs() - src(jscHeaderFiles.collect { headerName -> "$jscAPIBaseURL$headerName" }) - onlyIfNewer true - overwrite false - dest output - } - -task downloadRealmCore(type: Download) { - if (project.buildSync) { - src "https://static.realm.io/downloads/sync/realm-sync-android-${project.syncVersion}.tar.gz" - } else { - src "https://static.realm.io/downloads/core/realm-core-android-${project.coreVersion}.tar.gz" - } - onlyIfNewer true - overwrite true - if (project.buildSync) { - dest new File(downloadsDir, "realm-core-android-${project.syncVersion}.tar.gz") - } else { - dest new File(downloadsDir, "realm-core-android-${project.coreVersion}.tar.gz") - } -} - -task prepareRealmCore(dependsOn: downloadRealmCore, type:Copy) { - from tarTree(downloadRealmCore.dest) - into "$coreDownloadDir/core" - rename { String fileName -> - fileName.replace("-arm-", "-armeabi-") - } -} - -task downloadOpenSSL_x86(type: Download) { - src "https://static.realm.io/downloads/openssl/1.0.2k/Android/x86/openssl-release-1.0.2k-Android-x86.tar.gz" - onlyIfNewer true - overwrite true - dest new File(downloadsDir, "openssl-release-1.0.2k-Android-x86.tar.gz") -} - -task prepareOpenSSL_x86(dependsOn: downloadOpenSSL_x86, type:Copy) { - from tarTree(downloadOpenSSL_x86.dest) - into "$coreDownloadDir/core" -} - -task downloadOpenSSL_arm(type: Download) { - src "https://static.realm.io/downloads/openssl/1.0.2k/Android/armeabi-v7a/openssl-release-1.0.2k-Android-armeabi-v7a.tar.gz" - onlyIfNewer true - overwrite true - dest new File(downloadsDir, "openssl-release-1.0.2k-Android-armeabi-v7a.tar.gz") -} - -task prepareOpenSSL_arm(dependsOn: downloadOpenSSL_arm, type:Copy) { - from tarTree(downloadOpenSSL_arm.dest) - into "$coreDownloadDir/core" - rename { String fileName -> - fileName.replace("-arm-", "-armeabi-") - } -} - -def getDependenciesVersion(keyName) { - def inputFile = new File(buildscript.sourceFile.getParent() + "/../../dependencies.list") - def line - inputFile.withReader { reader -> - while ((line = reader.readLine())!=null) { - def (key, value) = line.tokenize('=') - if (keyName == key) { - return value - } - } - - throw new GradleException("${keyName} not found in dependencies.list.") - } -} - -def getNdkBuildName() { - if (Os.isFamily(Os.FAMILY_WINDOWS)) { - return "ndk-build.cmd" - } else { - return "ndk-build" - } -} - -def findNdkBuildFullPath() { - // we allow to provide full path to ndk-build tool - if (hasProperty('ndk.command')) { - return property('ndk.command') - } - // or just a path to the containing directory - if (hasProperty('ndk.path')) { - def ndkDir = property('ndk.path') - return new File(ndkDir, getNdkBuildName()).getAbsolutePath() - } - if (System.getenv('ANDROID_NDK') != null) { - def ndkDir = System.getenv('ANDROID_NDK') - return new File(ndkDir, getNdkBuildName()).getAbsolutePath() - } - if (System.getenv('ANDROID_NDK_HOME') != null) { - def ndkDir = System.getenv('ANDROID_NDK_HOME') - return new File(ndkDir, getNdkBuildName()).getAbsolutePath() - } - def ndkDir = android.hasProperty('plugin') ? android.plugin.ndkFolder : - plugins.getPlugin('com.android.library').sdkHandler.getNdkFolder() - if (ndkDir) { - return new File(ndkDir, getNdkBuildName()).getAbsolutePath() - } - return null -} - -def checkNdkVersion(ndkBuildFullPath) { - def ndkPath = new File(ndkBuildFullPath).getParent() - def detectedNdkVersion - def releaseFile = new File(ndkPath, 'RELEASE.TXT') - def propertyFile = new File(ndkPath, 'source.properties') - if (releaseFile.isFile()) { - detectedNdkVersion = releaseFile.text.trim().split()[0].split('-')[0] - } else if (propertyFile.isFile()) { - detectedNdkVersion = getValueFromPropertiesFile(propertyFile, 'Pkg.Revision') - if (detectedNdkVersion == null) { - throw new GradleException("Failed to obtain the NDK version information from ${ndkPath}/source.properties") - } - } else { - throw new GradleException("Neither ${releaseFile.getAbsolutePath()} nor ${propertyFile.getAbsolutePath()} is a file.") - } - if (detectedNdkVersion != project.ndkVersion) { - throw new GradleException("Your NDK version: ${detectedNdkVersion}." - + " Realm JNI must be compiled with the version ${project.ndkVersion} of NDK.") - } -} - -static def getValueFromPropertiesFile(File propFile, String key) { - if (!propFile.isFile() || !propFile.canRead()) { - return null - } - def prop = new Properties() - def reader = propFile.newReader() - try { - prop.load(reader) - } finally { - reader.close() - } - return prop.get(key) -} - -def getNdkBuildFullPath() { - def ndkBuildFullPath = findNdkBuildFullPath() - if (ndkBuildFullPath == null) { - throw new GradleScriptException( - "ndk-build binary cannot be found, check if you've set " + - "\$ANDROID_NDK environment variable correctly or if ndk.dir is " + - "setup in local.properties", - null) - } - if (!new File(ndkBuildFullPath).canExecute()) { - throw new GradleScriptException( - "ndk-build binary " + ndkBuildFullPath + " doesn't exist or isn't executable.\n" + - "Check that the \$ANDROID_NDK environment variable, or ndk.dir in local.proerties, is set correctly.\n" + - "(On Windows, make sure you escape backslashes in local.properties or use forward slashes, e.g. C:\\\\ndk or C:/ndk rather than C:\\ndk)", - null) - } - - checkNdkVersion(ndkBuildFullPath); - - return ndkBuildFullPath -} - -task buildReactNdkLib(dependsOn: [downloadJSCHeaders,prepareRealmCore,prepareOpenSSL_x86,prepareOpenSSL_arm], type: Exec) { - inputs.files('src/main/jni') - outputs.dir("$buildDir/realm-react-ndk/all") - commandLine getNdkBuildFullPath(), - '-e', - project.buildSync ? 'BUILD_TYPE_SYNC=1' : 'BUILD_TYPE_SYNC=0', - 'NDK_PROJECT_PATH=null', - "NDK_APPLICATION_MK=$projectDir/src/main/jni/Application.mk", - 'NDK_OUT=' + temporaryDir, - "NDK_LIBS_OUT=$buildDir/realm-react-ndk/all", - '-C', file('src/main/jni').absolutePath, - 'NDK_LOG=1', - 'NDK_DEBUG=' + (DEBUG_BUILD.toBoolean() ? '1' : '0'), - '--jobs', Runtime.runtime.availableProcessors(), - 'V=1' -} - -task cleanReactNdkLib(type: Exec) { - commandLine getNdkBuildFullPath(), - '-C', file('src/main/jni').absolutePath, - 'clean' -} - -task packageReactNdkLibs(dependsOn: buildReactNdkLib, type: Copy) { - from "$buildDir/realm-react-ndk/all" - exclude '**/libjsc.so' - exclude '**/gdbserver' - exclude '**/gdb.setup' - into "$buildDir/realm-react-ndk/exported" -} - -android { - compileSdkVersion 26 - - defaultConfig { - minSdkVersion 16 - targetSdkVersion 26 - } - - sourceSets.main { - java.srcDir "$buildDir/generated-src/main/java" - jni.srcDirs = [] - jniLibs.srcDir "$buildDir/realm-react-ndk/exported" - res.srcDirs = ['src/main/res/devsupport', 'src/main/res/shell'] - } - - tasks.withType(JavaCompile) { - compileTask -> compileTask.dependsOn generateVersionClass, packageReactNdkLibs - } - - clean.dependsOn cleanReactNdkLib - - lintOptions { - abortOnError false - } -} - -task publishAndroid(dependsOn: [generateVersionClass, packageReactNdkLibs], type: Sync) { - // Copy task can only have one top level - into "$publishDir" - - // copy java source - into ('/src/main') { - from "$projectDir/src/main", "$buildDir/generated-src/main" - exclude '**/jni/**', '**/templates/**' - } - - // add compiled shared object - into ('/src/main/jniLibs') { - from "$buildDir/realm-react-ndk/exported/" - } - - // copy gradle wrapper files - FileTree gradleWrapper = fileTree(projectDir).include('gradlew*').include('gradle/**') - into ('/') { - from gradleWrapper - } - - // copy and rename template build.gradle - into ('/') { - from "$projectDir/publish_android_template" - rename { String fileName -> - 'build.gradle' - } - } - - // copy analytics script - into ('/') { - from "$projectDir/analytics_template" - rename { String fileName -> - 'analytics.gradle' - } - } -} - -// publishing into maven local - -def configureRealmReactNativePom(def pom) { - pom.project { - name POM_NAME - artifactId POM_ARTIFACT_ID - packaging POM_PACKAGING - description POM_DESCRIPTION - url 'https://github.com/realm/realm-js' - - issueManagement { - system 'github' - url 'https://github.com/realm/realm-js/issues' - } - - scm { - url 'scm:https://github.com/realm/realm-js' - connection 'scm:git@github.com:realm/realm-js.git' - developerConnection 'scm:git@github.com:realm/realm-js.git' - } - - licenses { - license { - name 'The Apache Software License, Version 2.0' - url 'http://www.apache.org/licenses/LICENSE-2.0.txt' - distribution 'repo' - } - } - } -} - -afterEvaluate { project -> - task androidSourcesJar(type: Jar) { - classifier = 'sources' - from android.sourceSets.main.java.srcDirs - include '**/*.java' - } - - android.libraryVariants.all { variant -> - def name = variant.name.capitalize() - task "jar${name}"(type: Jar, dependsOn: variant.javaCompile) { - from variant.javaCompile.destinationDir - } - } - - artifacts { - archives androidSourcesJar - } - - version = currentVersion - group = GROUP - - signing { - required { false } - sign configurations.archives - } - - task installArchives(type: Upload) { - configuration = configurations.archives - repositories.mavenDeployer { - beforeDeployment { - MavenDeployment deployment -> signing.signPom(deployment) - } - - repository url: "file://${System.properties['user.home']}/.m2/repository" - configureRealmReactNativePom pom - } - } -} - -def dependencyType = "implementation" -def providedDependencyType = "compileOnly" -try { - project.getConfigurations().getByName("implementation") -} catch (UnknownConfigurationException e) { - // Pre 3.0 Android Gradle Plugin - dependencyType = "compile" - providedDependencyType = "provided" -} - -project.dependencies { - add(providedDependencyType, 'com.squareup.okhttp3:okhttp:3.9.0') - add(providedDependencyType, 'com.facebook.react:react-native:+') - add(dependencyType, 'org.nanohttpd:nanohttpd:2.2.0') -} diff --git a/tests/source-files/se.manyver/android/app/build.gradle b/tests/source-files/se.manyver/android/app/build.gradle deleted file mode 100644 index 1c77f965..00000000 --- a/tests/source-files/se.manyver/android/app/build.gradle +++ /dev/null @@ -1,272 +0,0 @@ -apply plugin: "com.android.application" - -import com.android.build.OutputFile - -/** - * The react.gradle file registers a task for each build variant (e.g. bundleDebugJsAndAssets - * and bundleReleaseJsAndAssets). - * These basically call `react-native bundle` with the correct arguments during the Android build - * cycle. By default, bundleDebugJsAndAssets is skipped, as in debug/dev mode we prefer to load the - * bundle directly from the development server. Below you can see all the possible configurations - * and their defaults. If you decide to add a configuration block, make sure to add it before the - * `apply from: "../../node_modules/react-native/react.gradle"` line. - * - * project.ext.react = [ - * // the name of the generated asset file containing your JS bundle - * bundleAssetName: "index.android.bundle", - * - * // the entry file for bundle generation - * entryFile: "index.android.js", - * - * // https://facebook.github.io/react-native/docs/performance#enable-the-ram-format - * bundleCommand: "ram-bundle", - * - * // whether to bundle JS and assets in debug mode - * bundleInDebug: false, - * - * // whether to bundle JS and assets in release mode - * bundleInRelease: true, - * - * // whether to bundle JS and assets in another build variant (if configured). - * // See http://tools.android.com/tech-docs/new-build-system/user-guide#TOC-Build-Variants - * // The configuration property can be in the following formats - * // 'bundleIn${productFlavor}${buildType}' - * // 'bundleIn${buildType}' - * // bundleInFreeDebug: true, - * // bundleInPaidRelease: true, - * // bundleInBeta: true, - * - * // whether to disable dev mode in custom build variants (by default only disabled in release) - * // for example: to disable dev mode in the staging build type (if configured) - * devDisabledInStaging: true, - * // The configuration property can be in the following formats - * // 'devDisabledIn${productFlavor}${buildType}' - * // 'devDisabledIn${buildType}' - * - * // the root of your project, i.e. where "package.json" lives - * root: "../../", - * - * // where to put the JS bundle asset in debug mode - * jsBundleDirDebug: "$buildDir/intermediates/assets/debug", - * - * // where to put the JS bundle asset in release mode - * jsBundleDirRelease: "$buildDir/intermediates/assets/release", - * - * // where to put drawable resources / React Native assets, e.g. the ones you use via - * // require('./image.png')), in debug mode - * resourcesDirDebug: "$buildDir/intermediates/res/merged/debug", - * - * // where to put drawable resources / React Native assets, e.g. the ones you use via - * // require('./image.png')), in release mode - * resourcesDirRelease: "$buildDir/intermediates/res/merged/release", - * - * // by default the gradle tasks are skipped if none of the JS files or assets change; this means - * // that we don't look at files in android/ or ios/ to determine whether the tasks are up to - * // date; if you have any other folders that you want to ignore for performance reasons (gradle - * // indexes the entire tree), add them here. Alternatively, if you have JS files in android/ - * // for example, you might want to remove it from here. - * inputExcludes: ["android/**", "ios/**"], - * - * // override which node gets called and with what additional arguments - * nodeExecutableAndArgs: ["node"], - * - * // supply additional arguments to the packager - * extraPackagerArgs: [] - * ] - */ - -project.ext.vectoricons = [ - iconFontNames: [ 'MaterialIcons.ttf', 'MaterialCommunityIcons.ttf' ] -] -project.ext.react = [ - entryFile: "index.android.js", - enableHermes: false, // clean and rebuild if changing - hermesCommand: "../../node_modules/hermes-engine/%OS-BIN%/hermes", -] -apply from: "../../node_modules/react-native-vector-icons/fonts.gradle" -apply from: "../../node_modules/react-native/react.gradle" - -/** - * Set this to true to create two separate APKs instead of one: - * - An APK that only works on ARM devices - * - An APK that only works on x86 devices - * The advantage is the size of the APK is reduced by about 4MB. - * Upload all the APKs to the Play Store and people will download - * the correct one based on the CPU architecture of their device. - */ -def enableSeparateBuildPerCPUArchitecture = false - -/** - * Run Proguard to shrink the Java bytecode in release builds. - */ -def enableProguardInReleaseBuilds = false - -/** - * The preferred build flavor of JavaScriptCore. - * - * For example, to use the international variant, you can use: - * `def jscFlavor = 'org.webkit:android-jsc-intl:+'` - * - * The international variant includes ICU i18n library and necessary data - * allowing to use e.g. `Date.toLocaleString` and `String.localeCompare` that - * give correct results when using with locales other than en-US. Note that - * this variant is about 6MiB larger per architecture than default. - */ -def jscFlavor = 'org.webkit:android-jsc:+' - -/** - * Whether to enable the Hermes VM. - * - * This should be set on project.ext.react and mirrored here. If it is not set - * on project.ext.react, JavaScript will not be compiled to Hermes Bytecode - * and the benefits of using Hermes will therefore be sharply reduced. - */ -def enableHermes = project.ext.react.get("enableHermes", false); - -android { - compileSdkVersion rootProject.ext.compileSdkVersion - - compileOptions { - sourceCompatibility JavaVersion.VERSION_1_8 - targetCompatibility JavaVersion.VERSION_1_8 - } - - flavorDimensions "store" - - defaultConfig { - versionCode 78 - versionName "0.1911.27-beta" - applicationId "se.manyver" - minSdkVersion rootProject.ext.minSdkVersion - targetSdkVersion rootProject.ext.targetSdkVersion - missingDimensionStrategy "RNN.reactNativeVersion", "reactNative60" - ndk { - abiFilters "armeabi-v7a", "arm64-v8a" // , "x86", "x86_64" - } - aaptOptions { - ignoreAssetsPattern '!.svn:!.git:!.ds_store:!*.scc:!CVS:!thumbs.db:!picasa.ini:!*~' - } - vectorDrawables.useSupportLibrary = true - } - - // dexOptions { - // javaMaxHeapSize "4g" - // } - - productFlavors { - indie { - dimension "store" - } - - googlePlay { - dimension "store" - versionNameSuffix "-googlePlay" - targetSdkVersion rootProject.ext.targetSdkVersionForGooglePlay - } - } - - signingConfigs { - // debug { - // storeFile file('debug.keystore') - // storePassword 'android' - // keyAlias 'androiddebugkey' - // keyPassword 'android' - // } - release { - if (project.hasProperty('MYAPP_RELEASE_STORE_FILE')) { - storeFile file(MYAPP_RELEASE_STORE_FILE) - storePassword MYAPP_RELEASE_STORE_PASSWORD - keyAlias MYAPP_RELEASE_KEY_ALIAS - keyPassword MYAPP_RELEASE_KEY_PASSWORD - } - } - } - - splits { - abi { - reset() - enable enableSeparateBuildPerCPUArchitecture - universalApk false // If true, also generate a universal APK - include "armeabi-v7a", "arm64-v8a"//, "x86", "x86_64" - } - } - - buildTypes { - debug { - signingConfig signingConfigs.debug - } - release { - // Caution! In production, you need to generate your own keystore file. - // see https://facebook.github.io/react-native/docs/signed-apk-android. - // signingConfig signingConfigs.debug - minifyEnabled enableProguardInReleaseBuilds - proguardFiles getDefaultProguardFile("proguard-android.txt"), "proguard-rules.pro" - if (project.hasProperty('MYAPP_RELEASE_STORE_FILE')) { - signingConfig signingConfigs.release - } - } - } - - // applicationVariants are e.g. debug, release - applicationVariants.all { variant -> - variant.outputs.each { output -> - // For each separate APK per architecture, set a unique version code as described here: - // https://developer.android.com/studio/build/configure-apk-splits.html - def versionCodes = ["armeabi-v7a": 1, "arm64-v8a": 2] //, "x86":3, "x86_64":4] - def abi = output.getFilter(OutputFile.ABI) - if (abi != null) { // null for the universal-debug, universal-release variants - output.versionCodeOverride = - versionCodes.get(abi) * 1048576 + defaultConfig.versionCode - } - } - } -} - -def acraVersion = '5.3.0' - -dependencies { - implementation "ch.acra:acra-core:$acraVersion" - implementation "ch.acra:acra-mail:$acraVersion" - implementation "ch.acra:acra-dialog:$acraVersion" - implementation project(':nodejs-mobile-react-native') - implementation project(':@react-native-community_async-storage') - implementation project(':react-native-bluetooth-socket-bridge') - implementation project(':react-native-bluetooth-status') - implementation project(':react-native-dialogs') - implementation project(':react-native-vector-icons') - implementation project(':react-native-os-staltz') - implementation project(':react-native-randombytes') - implementation project(':react-native-image-crop-picker') - implementation project(':react-native-navigation') - implementation project(':react-native-android-local-notification') - implementation project(':react-native-android-wifi') - implementation project(':react-native-has-internet') - implementation project(':react-native-flag-secure-android') - implementation project(':react-native-orientation-locker') - implementation project(':react-native-fs') - implementation project(':react-native-splash-screen') - implementation project(':@react-native-community_viewpager') - implementation fileTree(dir: "libs", include: ["*.jar"]) - implementation 'androidx.appcompat:appcompat:1.0.2' - implementation("com.facebook.react:react-native:+") { - force = true - } - implementation 'com.facebook.fresco:animated-gif:1.3.0' - - if (enableHermes) { - def hermesPath = "../../node_modules/hermes-engine/android/"; - debugImplementation files(hermesPath + "hermes-debug.aar") - releaseImplementation files(hermesPath + "hermes-release.aar") - } else { - implementation jscFlavor - } -} - -// Run this once to be able to run the application with BUCK -// puts all compile dependencies into folder libs for BUCK to use -task copyDownloadableDepsToLibs(type: Copy) { - from configurations.compile - into 'libs' -} - -apply from: file("../../node_modules/@react-native-community/cli-platform-android/native_modules.gradle"); applyNativeModulesAppBuildGradle(project) \ No newline at end of file diff --git a/tests/source-files/se.manyver/android/build.gradle b/tests/source-files/se.manyver/android/build.gradle deleted file mode 100644 index de8f5e77..00000000 --- a/tests/source-files/se.manyver/android/build.gradle +++ /dev/null @@ -1,64 +0,0 @@ -// Top-level build file where you can add configuration options common to all sub-projects/modules. - -buildscript { - ext { - minSdkVersion = 21 - targetSdkVersion = 26 - targetSdkVersionForGooglePlay = 28 - compileSdkVersion = 28 - } - repositories { - google() - jcenter() - mavenLocal() - mavenCentral() - } - dependencies { - classpath("com.android.tools.build:gradle:3.4.2") - - // NOTE: Do not place your application dependencies here; they belong - // in the individual module build.gradle files - } -} - -allprojects { - repositories { - mavenCentral() - mavenLocal() - maven { - // All of React Native (JS, Obj-C sources, Android binaries) is installed from npm - url("$rootDir/../node_modules/react-native/android") - } - maven { - // Android JSC is installed from npm - url("$rootDir/../node_modules/jsc-android/dist") - } - maven { - url 'https://maven.google.com' - } - maven { - url "https://jitpack.io" - } - google() - jcenter() - } -} - -subprojects { subproject -> - afterEvaluate { - if ((subproject.plugins.hasPlugin('android') || subproject.plugins.hasPlugin('android-library'))) { - android { - variantFilter { variant -> - def names = variant.flavors*.name - if (names.contains("reactNative51")) setIgnore(true) - if (names.contains("reactNative55")) setIgnore(true) - if (names.contains("reactNative56")) setIgnore(true) - if (names.contains("reactNative57")) setIgnore(true) - if (names.contains("reactNative57_5")) setIgnore(true) - if (names.contains("reactNative57WixFork")) setIgnore(true) - if (names.contains("reactNative59")) setIgnore(true) - } - } - } - } -} diff --git a/tests/source-files/se.manyver/android/gradle.properties b/tests/source-files/se.manyver/android/gradle.properties deleted file mode 100644 index 33fd4ac9..00000000 --- a/tests/source-files/se.manyver/android/gradle.properties +++ /dev/null @@ -1,22 +0,0 @@ -# Project-wide Gradle settings. - -# IDE (e.g. Android Studio) users: -# Gradle settings configured through the IDE *will override* -# any settings specified in this file. - -# For more details on how to configure your build environment visit -# http://www.gradle.org/docs/current/userguide/build_environment.html - -# Specifies the JVM arguments used for the daemon process. -# The setting is particularly useful for tweaking memory settings. -# Default value: -Xmx10248m -XX:MaxPermSize=256m -# org.gradle.jvmargs=-Xmx2048m -XX:MaxPermSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8 - -# When configured, Gradle will run in incubating parallel mode. -# This option should only be used with decoupled projects. More details, visit -# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects -# org.gradle.parallel=true - -android.useAndroidX=true -android.enableJetifier=true -org.gradle.jvmargs=-Xmx4608M \ No newline at end of file diff --git a/tests/source-files/se.manyver/android/gradle/wrapper/gradle-wrapper.properties b/tests/source-files/se.manyver/android/gradle/wrapper/gradle-wrapper.properties deleted file mode 100644 index a14cb8c9..00000000 --- a/tests/source-files/se.manyver/android/gradle/wrapper/gradle-wrapper.properties +++ /dev/null @@ -1,5 +0,0 @@ -distributionBase=GRADLE_USER_HOME -distributionPath=wrapper/dists -zipStoreBase=GRADLE_USER_HOME -zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-5.5-all.zip \ No newline at end of file diff --git a/tests/source-files/se.manyver/android/settings.gradle b/tests/source-files/se.manyver/android/settings.gradle deleted file mode 100644 index 6a4c91ed..00000000 --- a/tests/source-files/se.manyver/android/settings.gradle +++ /dev/null @@ -1,40 +0,0 @@ -rootProject.name = 'Manyverse' -apply from: file("../node_modules/@react-native-community/cli-platform-android/native_modules.gradle"); applyNativeModulesSettingsGradle(settings) -include ':@react-native-community_async-storage' -project(':@react-native-community_async-storage').projectDir = new File(rootProject.projectDir, '../node_modules/@react-native-community/async-storage/android') -include ':react-native-bluetooth-status' -project(':react-native-bluetooth-status').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-bluetooth-status/android') -include ':react-native-bluetooth-socket-bridge' -project(':react-native-bluetooth-socket-bridge').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-bluetooth-socket-bridge/android') -include ':react-native-image-crop-picker' -project(':react-native-image-crop-picker').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-image-crop-picker/android') -include ':nodejs-mobile-react-native' -project(':nodejs-mobile-react-native').projectDir = new File(rootProject.projectDir, '../node_modules/nodejs-mobile-react-native/android') -include ':react-native-dialogs' -project(':react-native-dialogs').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-dialogs/android') -include ':react-native-vector-icons' -project(':react-native-vector-icons').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-vector-icons/android') -include ':react-native-os-staltz' -project(':react-native-os-staltz').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-os-staltz/android') -include ':react-native-randombytes' -project(':react-native-randombytes').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-randombytes/android') -include ':react-native-navigation' -project(':react-native-navigation').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-navigation/lib/android/app/') -include ':react-native-android-local-notification' -project(':react-native-android-local-notification').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-android-local-notification/android') -include ':react-native-android-wifi' -project(':react-native-android-wifi').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-android-wifi/android') -include ':react-native-has-internet' -project(':react-native-has-internet').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-has-internet/android') -include ':react-native-flag-secure-android' -project(':react-native-flag-secure-android').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-flag-secure-android/android') -include ':react-native-orientation-locker', ':app' -project(':react-native-orientation-locker').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-orientation-locker/android') -include ':react-native-fs' -project(':react-native-fs').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-fs/android') -include ':react-native-splash-screen' -project(':react-native-splash-screen').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-splash-screen/android') -include ':@react-native-community_viewpager' -project(':@react-native-community_viewpager').projectDir = new File(rootProject.projectDir, '../node_modules/@react-native-community/viewpager/android') - -include ':app' diff --git a/tests/source-files/se.manyver/app.json b/tests/source-files/se.manyver/app.json deleted file mode 100644 index 8931f1dd..00000000 --- a/tests/source-files/se.manyver/app.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "name": "Manyverse", - "displayName": "Manyverse" -} diff --git a/tests/source-files/se.manyver/index.android.js b/tests/source-files/se.manyver/index.android.js deleted file mode 100644 index aac66bea..00000000 --- a/tests/source-files/se.manyver/index.android.js +++ /dev/null @@ -1,17 +0,0 @@ -/* Copyright (C) 2018-2019 The Manyverse Authors. - * - * This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -import 'react-native-ssb-shims'; -import {run} from 'cycle-native-navigation'; -import { - screens, - drivers, - welcomeLayout, - defaultNavOptions, -} from './lib/frontend/index'; -// import './snoopy'; // Log and debug the React Native JS<-->Native Bridge - -run(screens, drivers, welcomeLayout, defaultNavOptions); diff --git a/tests/source-files/se.manyver/package.json b/tests/source-files/se.manyver/package.json deleted file mode 100644 index 08af1994..00000000 --- a/tests/source-files/se.manyver/package.json +++ /dev/null @@ -1,135 +0,0 @@ -{ - "name": "manyverse", - "version": "0.1911.27-beta", - "private": true, - "scripts": { - "postinstall": "patch-package", - "lib": "tsc", - "clean-bundler": "watchman watch-del-all && rm -rf $TMPDIR/react-*", - "clean-android": "adb uninstall se.manyver && cd android && ./gradlew clean", - "full-clean": "npm run clean-android && npm run clean-bundler && rm -rf node_modules && rm -rf lib", - "propagate-replacements": "propagate-replacement-fields --field=react-native", - "build-backend": "./tools/build-backend && ./tools/minify-backend", - "build-android-assets": "npm run lib && npm run propagate-replacements && npm run build-backend", - "build-android-debug": "npm run build-android-assets && react-native run-android --variant=indieDebug", - "build-android-release": "npm run build-android-assets && cd android && ./gradlew assembleRelease && cd ..", - "start": "npm run lib && npm run propagate-replacements && react-native start", - "psdr": "./tools/print-service-desk-report.js", - "test-e2e-android": "./tools/test-e2e-android", - "changelog": "npm run update-repo-changelog && npm run update-dat-latest-readme", - "update-repo-changelog": "./tools/update-repo-changelog.js", - "update-dat-latest-readme": "./tools/update-dat-latest-readme.js", - "echo-ssb-post": "./tools/echo-ssb-post.js", - "update-version": "./tools/update-version.js", - "dat-release": "./tools/dat-release", - "commit-release": "./tools/commit-release", - "release": "npm run update-version && npm run clean-bundler && npm run clean-android && npm run build-android-release && npm run test-e2e-android && npm run changelog && npm run commit-release && npm run dat-release && npm run echo-ssb-post" - }, - "dependencies": { - "@cycle/isolate": "5.1.0", - "@cycle/react": "2.6.0", - "@cycle/run": "5.3.0", - "@cycle/state": "1.3.0", - "@react-native-community/viewpager": "3.1.0", - "@types/node": "~12.7.5", - "@types/react": "16.9.x", - "@types/react-native": "0.60.23", - "@types/react-native-vector-icons": "6.4.3", - "buffer": "5.4.3", - "color-hash": "1.0.3", - "cycle-native-alert": "1.1.0", - "cycle-native-android-local-notification": "1.1.0", - "cycle-native-asyncstorage": "2.0.0", - "cycle-native-clipboard": "1.0.0", - "cycle-native-keyboard": "1.2.0", - "cycle-native-linking": "1.1.0", - "cycle-native-navigation": "6.1.0", - "cycle-native-share": "1.1.0", - "cycle-native-toastandroid": "1.1.0", - "mdast-normalize-react-native": "3.2.x", - "nodejs-mobile-react-native": "0.5.0", - "path": "~0.12.7", - "promisify-tuple": "1.0.0", - "pull-flat-list": "2.10.0", - "pull-pushable": "2.2.0", - "pull-stream": "3.6.14", - "pull-thenable": "1.0.0", - "react": "16.9.0", - "react-human-time": "^1.1.0", - "react-markdown": "4.0.2", - "react-native": "0.61.5", - "react-native-android-local-notification": "3.0.0", - "react-native-android-wifi": "0.0.41", - "react-native-bluetooth-socket-bridge": "1.2.0", - "react-native-bluetooth-status": "1.3.0", - "react-native-dialogs": "1.1.0", - "react-native-flag-secure-android": "1.0.2", - "react-native-floating-action": "1.19.1", - "react-native-fs": "~2.16.2", - "react-native-has-internet": "4.0.0", - "react-native-image-crop-picker": "~0.26.1", - "react-native-image-view": "~2.1.6", - "react-native-navigation": "4.0.2", - "react-native-orientation-locker": "1.1.7", - "react-native-popup-menu": "0.15.6", - "react-native-splash-screen": "^3.2.0", - "react-native-ssb-client": "7.0.0", - "react-native-ssb-shims": "4.6.0", - "react-native-swiper": "1.5.14", - "react-native-vector-icons": "6.6.0", - "react-propify-methods": "16.3.1", - "react-xstream-hoc": "1.0.0", - "remark": "~9.0.0", - "remark-gemoji-to-emoji": "1.1.0", - "remark-images-to-ssb-serve-blobs": "2.1.0-1", - "remark-linkify-regex": "1.0.0", - "remark-ssb-mentions": "~2.0.0", - "rn-viewpager": "1.2.9", - "ssb-cached-about": "~1.0.0", - "ssb-conn-query": "~0.4.4", - "ssb-ref": "2.13.9", - "ssb-room": "~1.1.1", - "ssb-serve-blobs": "2.1.0", - "ssb-threads": "3.6.0", - "ssb-typescript": "1.4.0", - "xstream": "11.11.0", - "xstream-backoff": "1.0", - "xstream-between": "1.0", - "xstream-from-callback": "1.0", - "xstream-from-pull-stream": "1.1", - "xstream-sample": "1.0" - }, - "devDependencies": { - "@babel/core": "~7.7.2", - "@babel/runtime": "~7.7.2", - "add-stream": "~1.0.0", - "conventional-changelog": "~3.1.15", - "husky": "^3.1.0", - "into-stream": "~5.1.1", - "jase": "1.2.0", - "left-pad": "1.3.0", - "metro-react-native-babel-preset": "^0.56.0", - "patch-package": "6.2.0", - "prettier": "~1.19.1", - "pretty-quick": "~2.0.1", - "propagate-replacement-fields": "1.2.0", - "react-native-version": "3.2.0", - "rn-snoopy": "2.0.2", - "tslint": "~5.20.1", - "typescript": "~3.7.2" - }, - "optionalDependencies": { - "appium": "1.14.0", - "tap-spec": "5.0.0", - "tape": "~4.9.1", - "wd": "1.11.3" - }, - "husky": { - "hooks": { - "pre-commit": "pretty-quick --staged --pattern \"**/*.*(ts|tsx|js|jsx)\"" - } - }, - "react-native": { - "os": "react-native-os-staltz" - } -} diff --git a/tests/source-files/se.manyver/react-native.config.js b/tests/source-files/se.manyver/react-native.config.js deleted file mode 100644 index 26adf2e4..00000000 --- a/tests/source-files/se.manyver/react-native.config.js +++ /dev/null @@ -1,18 +0,0 @@ -module.exports = { - dependencies: { - 'nodejs-mobile-react-native': { - // Ignored because we need to set this up manually in order to - // call some APIs of this library directly in our MainActivity.java - platforms: { - android: null, - }, - }, - 'react-native-bluetooth-socket-bridge': { - // This package needs some config passed as arguments to the constructor - // so we need to "link" it manually in MainApplication.java - platforms: { - android: null, - }, - }, - }, -}; diff --git a/tests/source-files/ut.ewh.audiometrytest/app/build.gradle b/tests/source-files/ut.ewh.audiometrytest/app/build.gradle deleted file mode 100644 index 640b6678..00000000 --- a/tests/source-files/ut.ewh.audiometrytest/app/build.gradle +++ /dev/null @@ -1,41 +0,0 @@ -apply plugin: 'android' - -android { - compileSdkVersion 21 - buildToolsVersion "21.1.1" - - defaultConfig { - minSdkVersion 8 - targetSdkVersion 21 - versionCode 14 - versionName "1.65" - } - signingConfigs{ - releaseSign{ - storeFile file("/Users/reecestevens/keys/keystore.jks") - //storePassword System.console().readLine("\nKeystore password: ") - storePassword System.getenv("KSTOREPWD") - keyAlias "AppKey" - //keyPassword System.console().readLine("\nKey password: ") - keyPassword System.getenv("KEYPWD") - } - } - buildTypes { - release { - minifyEnabled true; - debuggable false - signingConfig signingConfigs.releaseSign - proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt' - } - } -} - -repositories { - maven { url "https://jitpack.io" } -} - -dependencies { - compile 'com.android.support:appcompat-v7:21.0.2' - compile 'com.github.PhilJay:MPAndroidChart:v2.0.9' - compile fileTree(dir: 'libs', include: ['*.jar']) -} diff --git a/tests/source-files/ut.ewh.audiometrytest/app/src/main/AndroidManifest.xml b/tests/source-files/ut.ewh.audiometrytest/app/src/main/AndroidManifest.xml deleted file mode 100644 index 1ef6bb12..00000000 --- a/tests/source-files/ut.ewh.audiometrytest/app/src/main/AndroidManifest.xml +++ /dev/null @@ -1,124 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/source-files/ut.ewh.audiometrytest/build.gradle b/tests/source-files/ut.ewh.audiometrytest/build.gradle deleted file mode 100644 index a90b6488..00000000 --- a/tests/source-files/ut.ewh.audiometrytest/build.gradle +++ /dev/null @@ -1,29 +0,0 @@ -// Top-level build file where you can add configuration options common to all sub-projects/modules. - -buildscript { - repositories { - mavenCentral() - } - dependencies { - classpath 'com.android.tools.build:gradle:1.0.0-rc4' - } -} - -allprojects { - repositories { - mavenCentral() - } -} - -buildscript { - repositories { - mavenCentral() - } - dependencies { - classpath 'org.owasp:dependency-check-gradle:1.3.2.1' - } -} -apply plugin: 'org.owasp.dependencycheck' -dependencyCheck { - format='JSON' -} diff --git a/tests/source-files/ut.ewh.audiometrytest/settings.gradle b/tests/source-files/ut.ewh.audiometrytest/settings.gradle deleted file mode 100644 index e7b4def4..00000000 --- a/tests/source-files/ut.ewh.audiometrytest/settings.gradle +++ /dev/null @@ -1 +0,0 @@ -include ':app' diff --git a/tests/source-files/yuriykulikov/AlarmClock/gradle/wrapper/gradle-wrapper.properties b/tests/source-files/yuriykulikov/AlarmClock/gradle/wrapper/gradle-wrapper.properties deleted file mode 100644 index a7a1a8ca..00000000 --- a/tests/source-files/yuriykulikov/AlarmClock/gradle/wrapper/gradle-wrapper.properties +++ /dev/null @@ -1,6 +0,0 @@ -#Wed Jan 30 10:59:12 CET 2019 -distributionBase=GRADLE_USER_HOME -distributionPath=wrapper/dists -zipStoreBase=GRADLE_USER_HOME -zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-5.1.1-all.zip diff --git a/tests/stats/known_apks.txt b/tests/stats/known_apks.txt new file mode 100644 index 00000000..d25073a0 --- /dev/null +++ b/tests/stats/known_apks.txt @@ -0,0 +1,16 @@ +com.politedroid_3.apk com.politedroid 2017-06-23 +com.politedroid_4.apk com.politedroid 2017-06-23 +com.politedroid_5.apk com.politedroid 2017-06-23 +com.politedroid_6.apk com.politedroid 2017-06-23 +duplicate.permisssions_9999999.apk duplicate.permisssions 2017-12-22 +fake.ota.update_1234.zip fake.ota.update 2016-03-10 +info.zwanenburg.caffeinetile_4.apk info.zwanenburg.caffeinetile 2018-10-10 +no.min.target.sdk_987.apk no.min.target.sdk 2018-10-10 +obb.main.oldversion_1444412523.apk obb.main.oldversion 2013-12-31 +obb.main.twoversions_1101613.apk obb.main.twoversions 2015-10-12 +obb.main.twoversions_1101615.apk obb.main.twoversions 2016-01-01 +obb.main.twoversions_1101617.apk obb.main.twoversions 2016-06-20 +obb.mainpatch.current_1619.apk obb.mainpatch.current 2016-04-23 +obb.mainpatch.current_1619_another-release-key.apk obb.mainpatch.current 2017-06-01 +souch.smsbypass_9.apk souch.smsbypass 2018-04-26 +urzip-; Рахма́, [rɐxˈmanʲɪnəf] سيرجي_رخمانينوف 谢·.apk info.guardianproject.urzip 2016-06-23 diff --git a/tests/test_api.py b/tests/test_api.py deleted file mode 100755 index ba18caa6..00000000 --- a/tests/test_api.py +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/env python3 - -import os -import shutil -import unittest -from pathlib import Path -from unittest import mock - -import fdroidserver -from fdroidserver import common, signindex - -from .shared_test_code import GP_FINGERPRINT, mkdtemp - -basedir = Path(__file__).parent - - -class ApiTest(unittest.TestCase): - """Test the public API in the base "fdroidserver" module - - This is mostly a smokecheck to make sure the public API as - declared in fdroidserver/__init__.py is working. The functions - are all implemented in other modules, with their own tests. - - """ - - def setUp(self): - os.chdir(basedir) - - self._td = mkdtemp() - self.testdir = self._td.name - - common.config = None - config = common.read_config() - config['jarsigner'] = common.find_sdk_tools_cmd('jarsigner') - common.config = config - signindex.config = config - - def tearDown(self): - self._td.cleanup() - - def test_download_repo_index_no_fingerprint(self): - with self.assertRaises(fdroidserver.VerificationException): - fdroidserver.download_repo_index("http://example.org") - - @mock.patch('fdroidserver.net.http_get') - def test_download_repo_index_url_parsing(self, mock_http_get): - """Test whether it is trying to download the right file - - This passes the URL back via the etag return value just as a - hack to check which URL was actually attempted. - - """ - mock_http_get.side_effect = lambda url, etag, timeout: (None, url) - repo_url = 'https://example.org/fdroid/repo' - index_url = 'https://example.org/fdroid/repo/index-v1.jar' - for url in (repo_url, index_url): - _ignored, etag_set_to_url = fdroidserver.download_repo_index( - url, verify_fingerprint=False - ) - self.assertEqual(index_url, etag_set_to_url) - - @mock.patch('fdroidserver.net.http_get') - def test_download_repo_index_v1_url_parsing(self, mock_http_get): - """Test whether it is trying to download the right file - - This passes the URL back via the etag return value just as a - hack to check which URL was actually attempted. - - """ - mock_http_get.side_effect = lambda url, etag, timeout: (None, url) - repo_url = 'https://example.org/fdroid/repo' - index_url = 'https://example.org/fdroid/repo/index-v1.jar' - for url in (repo_url, index_url): - _ignored, etag_set_to_url = fdroidserver.download_repo_index_v1( - url, verify_fingerprint=False - ) - self.assertEqual(index_url, etag_set_to_url) - - @mock.patch('fdroidserver.net.download_using_mirrors') - def test_download_repo_index_v2(self, mock_download_using_mirrors): - """Basically a copy of IndexTest.test_download_repo_index_v2""" - mock_download_using_mirrors.side_effect = lambda mirrors: os.path.join( - self.testdir, 'repo', os.path.basename(mirrors[0]['url']) - ) - os.chdir(self.testdir) - signindex.config['keystore'] = os.path.join(basedir, 'keystore.jks') - os.mkdir('repo') - shutil.copy(basedir / 'repo' / 'entry.json', 'repo') - shutil.copy(basedir / 'repo' / 'index-v2.json', 'repo') - signindex.sign_index('repo', 'entry.json') - repo_url = 'https://fake.url/fdroid/repo' - entry_url = 'https://fake.url/fdroid/repo/entry.jar' - index_url = 'https://fake.url/fdroid/repo/index-v2.json' - fingerprint_url = 'https://fake.url/fdroid/repo?fingerprint=' + GP_FINGERPRINT - slash_url = 'https://fake.url/fdroid/repo//?fingerprint=' + GP_FINGERPRINT - for url in (repo_url, entry_url, index_url, fingerprint_url, slash_url): - data, _ignored = fdroidserver.download_repo_index_v2( - url, verify_fingerprint=False - ) - self.assertEqual(['repo', 'packages'], list(data)) - self.assertEqual( - 'My First F-Droid Repo Demo', data['repo']['name']['en-US'] - ) diff --git a/tests/test_build.py b/tests/test_build.py deleted file mode 100755 index 578837ed..00000000 --- a/tests/test_build.py +++ /dev/null @@ -1,1092 +0,0 @@ -#!/usr/bin/env python3 - -import os -import shutil -import sys -import tempfile -import textwrap -import unittest -from pathlib import Path -from unittest import mock - -import yaml - -import fdroidserver.build -import fdroidserver.common - -from .shared_test_code import TmpCwd, mkdtemp - - -class FakeProcess: - output = 'fake output' - returncode = 0 - - def __init__(self, args, **kwargs): - print('FakeFDroidPopen', args, kwargs) - - -class Options: - keep_when_not_allowed = False - - -class BuildTest(unittest.TestCase): - '''fdroidserver/build.py''' - - def setUp(self): - self.basedir = str(Path(__file__).parent) - os.chdir(self.basedir) - fdroidserver.common.config = None - fdroidserver.build.config = None - fdroidserver.build.options = None - self._td = mkdtemp() - self.testdir = self._td.name - - def tearDown(self): - os.chdir(self.basedir) - self._td.cleanup() - - def create_fake_android_home(self, d): - os.makedirs(os.path.join(d, 'build-tools'), exist_ok=True) - os.makedirs(os.path.join(d, 'platform-tools'), exist_ok=True) - os.makedirs(os.path.join(d, 'tools'), exist_ok=True) - - @unittest.skipIf(sys.byteorder == 'big', "androguard is not ported to big-endian") - def test_get_apk_metadata(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.build.config = config - try: - config['aapt'] = fdroidserver.common.find_sdk_tools_cmd('aapt') - except fdroidserver.exception.FDroidException: - pass # aapt is not required if androguard is present - - testcases = [ - ( - 'repo/obb.main.twoversions_1101613.apk', - 'obb.main.twoversions', - 1101613, - '0.1', - None, - ), - ( - 'org.bitbucket.tickytacky.mirrormirror_1.apk', - 'org.bitbucket.tickytacky.mirrormirror', - 1, - '1.0', - None, - ), - ( - 'org.bitbucket.tickytacky.mirrormirror_2.apk', - 'org.bitbucket.tickytacky.mirrormirror', - 2, - '1.0.1', - None, - ), - ( - 'org.bitbucket.tickytacky.mirrormirror_3.apk', - 'org.bitbucket.tickytacky.mirrormirror', - 3, - '1.0.2', - None, - ), - ( - 'org.bitbucket.tickytacky.mirrormirror_4.apk', - 'org.bitbucket.tickytacky.mirrormirror', - 4, - '1.0.3', - None, - ), - ( - 'org.dyndns.fules.ck_20.apk', - 'org.dyndns.fules.ck', - 20, - 'v1.6pre2', - [ - 'arm64-v8a', - 'armeabi', - 'armeabi-v7a', - 'mips', - 'mips64', - 'x86', - 'x86_64', - ], - ), - ('urzip.apk', 'info.guardianproject.urzip', 100, '0.1', None), - ('urzip-badcert.apk', 'info.guardianproject.urzip', 100, '0.1', None), - ('urzip-badsig.apk', 'info.guardianproject.urzip', 100, '0.1', None), - ('urzip-release.apk', 'info.guardianproject.urzip', 100, '0.1', None), - ( - 'urzip-release-unsigned.apk', - 'info.guardianproject.urzip', - 100, - '0.1', - None, - ), - ('repo/com.politedroid_3.apk', 'com.politedroid', 3, '1.2', None), - ('repo/com.politedroid_4.apk', 'com.politedroid', 4, '1.3', None), - ('repo/com.politedroid_5.apk', 'com.politedroid', 5, '1.4', None), - ('repo/com.politedroid_6.apk', 'com.politedroid', 6, '1.5', None), - ( - 'repo/duplicate.permisssions_9999999.apk', - 'duplicate.permisssions', - 9999999, - '', - None, - ), - ( - 'repo/info.zwanenburg.caffeinetile_4.apk', - 'info.zwanenburg.caffeinetile', - 4, - '1.3', - None, - ), - ( - 'repo/obb.main.oldversion_1444412523.apk', - 'obb.main.oldversion', - 1444412523, - '0.1', - None, - ), - ( - 'repo/obb.mainpatch.current_1619_another-release-key.apk', - 'obb.mainpatch.current', - 1619, - '0.1', - None, - ), - ( - 'repo/obb.mainpatch.current_1619.apk', - 'obb.mainpatch.current', - 1619, - '0.1', - None, - ), - ( - 'repo/obb.main.twoversions_1101613.apk', - 'obb.main.twoversions', - 1101613, - '0.1', - None, - ), - ( - 'repo/obb.main.twoversions_1101615.apk', - 'obb.main.twoversions', - 1101615, - '0.1', - None, - ), - ( - 'repo/obb.main.twoversions_1101617.apk', - 'obb.main.twoversions', - 1101617, - '0.1', - None, - ), - ( - 'repo/urzip-; Рахма́, [rɐxˈmanʲɪnəf] سيرجي_رخمانينوف 谢·.apk', - 'info.guardianproject.urzip', - 100, - '0.1', - None, - ), - ] - for apkfilename, appid, versionCode, versionName, nativecode in testcases: - app = fdroidserver.metadata.App() - app.id = appid - build = fdroidserver.metadata.Build() - build.buildjni = ['yes'] if nativecode else build.buildjni - build.versionCode = versionCode - build.versionName = versionName - vc, vn = fdroidserver.build.get_metadata_from_apk(app, build, apkfilename) - self.assertEqual(versionCode, vc) - self.assertEqual(versionName, vn) - - @mock.patch('fdroidserver.common.get_apk_id') - @mock.patch('fdroidserver.build.FDroidPopen') - @mock.patch('fdroidserver.common.is_debuggable_or_testOnly', lambda f: False) - @mock.patch('fdroidserver.common.get_native_code', lambda f: 'x86') - @mock.patch('fdroidserver.common.get_source_date_epoch', lambda f: '1234567890') - def test_build_local_maven(self, fake_FDroidPopen, fake_get_apk_id): - """Test build_local() with a maven project""" - - # pylint: disable=unused-argument - def _side_effect(cmd, cwd=None): - p = mock.MagicMock() - p.output = '[INFO] fake apkbuilder target/no.apk' - with open(os.path.join(self.testdir, 'target', 'no.apk'), 'w') as fp: - fp.write('placeholder') - p.returncode = 0 - return p - - fake_FDroidPopen.side_effect = _side_effect - os.chdir(self.testdir) - os.mkdir('target') - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.build.config = config - fdroidserver.build.options = mock.Mock() - fdroidserver.build.options.scan_binary = False - fdroidserver.build.options.notarball = True - fdroidserver.build.options.skipscan = False - - app = fdroidserver.metadata.App() - app.id = 'mocked.app.id' - build = fdroidserver.metadata.Build() - build.commit = '1.0' - build.versionCode = 1 - build.versionName = '1.0' - fake_get_apk_id.side_effect = lambda f: ( - app.id, - build.versionCode, - build.versionName, - ) - vcs = mock.Mock() - - build.maven = 'yes@..' - fdroidserver.build.build_local( - app, - build, - vcs, - build_dir=self.testdir, - output_dir=self.testdir, - log_dir=os.getcwd(), - srclib_dir=None, - extlib_dir=None, - tmp_dir=None, - force=False, - onserver=True, - refresh=False, - ) - - build.maven = 'yes' - fdroidserver.build.build_local( - app, - build, - vcs, - build_dir=self.testdir, - output_dir=self.testdir, - log_dir=os.getcwd(), - srclib_dir=None, - extlib_dir=None, - tmp_dir=None, - force=False, - onserver=True, - refresh=False, - ) - - @mock.patch('sdkmanager.build_package_list', lambda use_net: None) - def test_build_local_ndk(self): - """Test if `fdroid build` detects installed NDKs and auto-installs when missing""" - with tempfile.TemporaryDirectory() as testdir, TmpCwd( - testdir - ), tempfile.TemporaryDirectory() as sdk_path: - config = {'ndk_paths': {}, 'sdk_path': sdk_path} - fdroidserver.common.config = config - fdroidserver.build.config = config - fdroidserver.build.options = mock.Mock() - fdroidserver.build.options.scan_binary = False - fdroidserver.build.options.notarball = True - fdroidserver.build.options.skipscan = True - - app = fdroidserver.metadata.App() - app.id = 'mocked.app.id' - build = fdroidserver.metadata.Build() - build.commit = '1.0' - build.output = app.id + '.apk' - build.versionCode = 1 - build.versionName = '1.0' - build.ndk = 'r21e' # aka 21.4.7075529 - ndk_version = '21.4.7075529' - ndk_dir = Path(config['sdk_path']) / 'ndk' / ndk_version - vcs = mock.Mock() - - def make_fake_apk(output, build): - with open(build.output, 'w') as fp: - fp.write('APK PLACEHOLDER') - return output - - # pylint: disable=unused-argument - def fake_sdkmanager_install(to_install, android_home=None): - ndk_dir.mkdir(parents=True) - self.assertNotEqual(ndk_version, to_install) # converts r21e to version - with (ndk_dir / 'source.properties').open('w') as fp: - fp.write('Pkg.Revision = %s\n' % ndk_version) - - # use "as _ignored" just to make a pretty layout - with mock.patch( - 'fdroidserver.common.replace_build_vars', wraps=make_fake_apk - ) as _ignored, mock.patch( - 'fdroidserver.common.get_native_code', return_value='x86' - ) as _ignored, mock.patch( - 'fdroidserver.common.get_apk_id', - return_value=(app.id, build.versionCode, build.versionName), - ) as _ignored, mock.patch( - 'fdroidserver.common.sha256sum', - return_value='ad7ce5467e18d40050dc51b8e7affc3e635c85bd8c59be62de32352328ed467e', - ) as _ignored, mock.patch( - 'fdroidserver.common.is_debuggable_or_testOnly', - return_value=False, - ) as _ignored, mock.patch( - 'fdroidserver.build.FDroidPopen', FakeProcess - ) as _ignored, mock.patch( - 'sdkmanager.install', wraps=fake_sdkmanager_install - ) as _ignored, mock.patch( - 'fdroidserver.common.get_source_date_epoch', lambda f: '1234567890' - ) as _ignored: - _ignored # silence the linters - with self.assertRaises( - fdroidserver.exception.FDroidException, - msg="No NDK setup, `fdroid build` should fail with error", - ): - fdroidserver.build.build_local( - app, - build, - vcs, - build_dir=testdir, - output_dir=testdir, - log_dir=None, - srclib_dir=None, - extlib_dir=None, - tmp_dir=None, - force=False, - onserver=False, - refresh=False, - ) - # now run `fdroid build --onserver` - print('now run `fdroid build --onserver`') - self.assertFalse(ndk_dir.exists()) - self.assertFalse('r21e' in config['ndk_paths']) - self.assertFalse(ndk_version in config['ndk_paths']) - fdroidserver.build.build_local( - app, - build, - vcs, - build_dir=testdir, - output_dir=testdir, - log_dir=os.getcwd(), - srclib_dir=None, - extlib_dir=None, - tmp_dir=None, - force=False, - onserver=True, - refresh=False, - ) - self.assertTrue(ndk_dir.exists()) - self.assertTrue(os.path.exists(config['ndk_paths'][ndk_version])) - # All paths in the config must be strings, never pathlib.Path instances - self.assertIsInstance(config['ndk_paths'][ndk_version], str) - - @mock.patch('sdkmanager.build_package_list', lambda use_net: None) - @mock.patch('fdroidserver.build.FDroidPopen', FakeProcess) - @mock.patch('fdroidserver.common.get_native_code', lambda _ignored: 'x86') - @mock.patch('fdroidserver.common.is_debuggable_or_testOnly', lambda _ignored: False) - @mock.patch('fdroidserver.common.get_source_date_epoch', lambda f: '1234567890') - @mock.patch( - 'fdroidserver.common.sha256sum', - lambda f: 'ad7ce5467e18d40050dc51b8e7affc3e635c85bd8c59be62de32352328ed467e', - ) - def test_build_local_ndk_some_installed(self): - """Test if `fdroid build` detects installed NDKs and auto-installs when missing""" - with tempfile.TemporaryDirectory() as testdir, TmpCwd( - testdir - ), tempfile.TemporaryDirectory() as sdk_path: - ndk_r24 = os.path.join(sdk_path, 'ndk', '24.0.8215888') - os.makedirs(ndk_r24) - with open(os.path.join(ndk_r24, 'source.properties'), 'w') as fp: - fp.write('Pkg.Revision = 24.0.8215888\n') - config = {'ndk_paths': {'r24': ndk_r24}, 'sdk_path': sdk_path} - fdroidserver.common.config = config - fdroidserver.build.config = config - fdroidserver.build.options = mock.Mock() - fdroidserver.build.options.scan_binary = False - fdroidserver.build.options.notarball = True - fdroidserver.build.options.skipscan = True - - app = fdroidserver.metadata.App() - app.id = 'mocked.app.id' - build = fdroidserver.metadata.Build() - build.commit = '1.0' - build.output = app.id + '.apk' - build.versionCode = 1 - build.versionName = '1.0' - build.ndk = 'r21e' # aka 21.4.7075529 - ndk_version = '21.4.7075529' - ndk_dir = Path(config['sdk_path']) / 'ndk' / ndk_version - vcs = mock.Mock() - - def make_fake_apk(output, build): - with open(build.output, 'w') as fp: - fp.write('APK PLACEHOLDER') - return output - - # pylint: disable=unused-argument - def fake_sdkmanager_install(to_install, android_home=None): - ndk_dir.mkdir(parents=True) - self.assertNotEqual(ndk_version, to_install) # converts r21e to version - with (ndk_dir / 'source.properties').open('w') as fp: - fp.write('Pkg.Revision = %s\n' % ndk_version) - - # use "as _ignored" just to make a pretty layout - with mock.patch( - 'fdroidserver.common.replace_build_vars', wraps=make_fake_apk - ) as _ignored, mock.patch( - 'fdroidserver.common.get_apk_id', - return_value=(app.id, build.versionCode, build.versionName), - ) as _ignored, mock.patch( - 'sdkmanager.install', wraps=fake_sdkmanager_install - ) as _ignored: - _ignored # silence the linters - self.assertFalse(ndk_dir.exists()) - self.assertFalse('r21e' in config['ndk_paths']) - self.assertFalse(ndk_version in config['ndk_paths']) - fdroidserver.build.build_local( - app, - build, - vcs, - build_dir=testdir, - output_dir=testdir, - log_dir=os.getcwd(), - srclib_dir=None, - extlib_dir=None, - tmp_dir=None, - force=False, - onserver=True, - refresh=False, - ) - self.assertTrue(ndk_dir.exists()) - self.assertTrue(os.path.exists(config['ndk_paths'][ndk_version])) - - @mock.patch('fdroidserver.common.get_source_date_epoch', lambda f: '1234567890') - def test_build_local_clean(self): - """Test if `fdroid build` cleans ant and gradle build products""" - os.chdir(self.testdir) - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.build.config = config - fdroidserver.build.options = mock.Mock() - fdroidserver.build.options.scan_binary = False - fdroidserver.build.options.notarball = True - fdroidserver.build.options.skipscan = False - - app = fdroidserver.metadata.App() - app.id = 'mocked.app.id' - build = fdroidserver.metadata.Build() - build.commit = '1.0' - build.output = app.id + '.apk' - build.scandelete = ['baz.so'] - build.scanignore = ['foo.aar'] - build.versionCode = 1 - build.versionName = '1.0' - vcs = mock.Mock() - - os.mkdir('reports') - os.mkdir('target') - - for f in ('baz.so', 'foo.aar', 'gradle-wrapper.jar'): - with open(f, 'w') as fp: - fp.write('placeholder') - self.assertTrue(os.path.exists(f)) - - os.mkdir('build') - os.mkdir('build/reports') - with open('build.gradle', 'w', encoding='utf-8') as fp: - fp.write('// placeholder') - - os.mkdir('bin') - os.mkdir('gen') - with open('build.xml', 'w', encoding='utf-8') as fp: - fp.write( - textwrap.dedent( - """ - - - - """ - ) - ) - - def make_fake_apk(output, build): - with open(build.output, 'w') as fp: - fp.write('APK PLACEHOLDER') - return output - - with mock.patch('fdroidserver.common.replace_build_vars', wraps=make_fake_apk): - with mock.patch('fdroidserver.common.get_native_code', return_value='x86'): - with mock.patch( - 'fdroidserver.common.get_apk_id', - return_value=(app.id, build.versionCode, build.versionName), - ): - with mock.patch( - 'fdroidserver.common.is_debuggable_or_testOnly', - return_value=False, - ): - fdroidserver.build.build_local( - app, - build, - vcs, - build_dir=self.testdir, - output_dir=self.testdir, - log_dir=None, - srclib_dir=None, - extlib_dir=None, - tmp_dir=None, - force=False, - onserver=False, - refresh=False, - ) - - self.assertTrue(os.path.exists('foo.aar')) - self.assertTrue(os.path.isdir('build')) - self.assertTrue(os.path.isdir('reports')) - self.assertTrue(os.path.isdir('target')) - self.assertFalse(os.path.exists('baz.so')) - self.assertFalse(os.path.exists('bin')) - self.assertFalse(os.path.exists('build/reports')) - self.assertFalse(os.path.exists('gen')) - self.assertFalse(os.path.exists('gradle-wrapper.jar')) - - def test_scan_with_extlib(self): - os.chdir(self.testdir) - os.mkdir("build") - - config = fdroidserver.common.read_config() - config['sdk_path'] = os.getenv('ANDROID_HOME') - config['ndk_paths'] = {'r10d': os.getenv('ANDROID_NDK_HOME')} - fdroidserver.common.config = config - app = fdroidserver.metadata.App() - app.id = 'com.gpl.rpg.AndorsTrail' - build = fdroidserver.metadata.Build() - build.commit = 'master' - build.androidupdate = ['no'] - os.makedirs("extlib/android") - # write a fake binary jar file the scanner should definitely error on - with open('extlib/android/android-support-v4r11.jar', 'wb') as file: - file.write( - b'PK\x03\x04\x14\x00\x08\x00\x08\x00-\x0eiA\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\t\x00\x04\x00META-INF/\xfe\xca\x00\x00\x03\x00PK\x07\x08\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00' - ) - - class FakeVcs: - # no need to change to the correct commit here - def gotorevision(self, rev, refresh=True): - pass - - def getsrclib(self): - return None - - def deinitsubmodules(self): - pass - - # Test we trigger a scanner error without extlibs - build.extlibs = [] - os.makedirs('build/libs') - shutil.copy('extlib/android/android-support-v4r11.jar', 'build/libs') - fdroidserver.common.prepare_source( - FakeVcs(), app, build, "build", "ignore", "extlib" - ) - count = fdroidserver.scanner.scan_source("build", build) - self.assertEqual(1, count, "Should produce a scanner error without extlib") - - # Now try again as an extlib - build.extlibs = ['android/android-support-v4r11.jar'] - fdroidserver.common.prepare_source( - FakeVcs(), app, build, "build", "ignore", "extlib" - ) - count = fdroidserver.scanner.scan_source("build", build) - self.assertEqual(0, count, "Shouldn't error on jar from extlib") - - def test_failed_verifies_are_not_in_unsigned(self): - os.chdir(self.testdir) - sdk_path = os.path.join(self.testdir, 'android-sdk') - self.create_fake_android_home(sdk_path) - with open(fdroidserver.common.CONFIG_FILE, 'w') as fp: - yaml.dump({'sdk_path': sdk_path, 'keep_when_not_allowed': True}, fp) - os.chmod(fdroidserver.common.CONFIG_FILE, 0o600) - fdroidserver.build.config = fdroidserver.common.read_config() - - os.mkdir('metadata') - appid = 'info.guardianproject.checkey' - metadata_file = os.path.join('metadata', appid + '.yml') - shutil.copy(os.path.join(self.basedir, metadata_file), 'metadata') - with open(metadata_file) as fp: - app = fdroidserver.metadata.App(yaml.safe_load(fp)) - app['RepoType'] = 'git' - app[ - 'Binaries' - ] = 'https://example.com/fdroid/repo/info.guardianproject.checkey_%v.apk' - build = fdroidserver.metadata.Build( - { - 'versionCode': 123, - 'versionName': '1.2.3', - 'commit': '1.2.3', - 'disable': False, - } - ) - app['Builds'] = [build] - fdroidserver.metadata.write_metadata(metadata_file, app) - - os.makedirs(os.path.join('unsigned', 'binaries')) - production_result = os.path.join( - 'unsigned', '%s_%d.apk' % (appid, build['versionCode']) - ) - production_compare_file = os.path.join( - 'unsigned', 'binaries', '%s_%d.binary.apk' % (appid, build['versionCode']) - ) - os.makedirs(os.path.join('tmp', 'binaries')) - test_result = os.path.join('tmp', '%s_%d.apk' % (appid, build['versionCode'])) - test_compare_file = os.path.join( - 'tmp', 'binaries', '%s_%d.binary.apk' % (appid, build['versionCode']) - ) - with mock.patch( - 'fdroidserver.common.force_exit', lambda *args: None - ) as a, mock.patch( - 'fdroidserver.common.get_android_tools_version_log', lambda: 'fake' - ) as b, mock.patch( - 'fdroidserver.common.FDroidPopen', FakeProcess - ) as c, mock.patch( - 'fdroidserver.build.FDroidPopen', FakeProcess - ) as d, mock.patch( - 'fdroidserver.build.trybuild', lambda *args: True - ) as e, mock.patch( - 'fdroidserver.net.download_file', lambda *args, **kwargs: None - ) as f: - a, b, c, d, e, f # silence linters' "unused" warnings - - with mock.patch('sys.argv', ['fdroid build', appid]): - # successful comparison - open(production_result, 'w').close() - open(production_compare_file, 'w').close() - with mock.patch('fdroidserver.common.verify_apks', lambda *args: None): - fdroidserver.build.main() - self.assertTrue(os.path.exists(production_result)) - self.assertTrue(os.path.exists(production_compare_file)) - # failed comparison - open(production_result, 'w').close() - open(production_compare_file, 'w').close() - with mock.patch( - 'fdroidserver.common.verify_apks', lambda *args: 'failed' - ): - fdroidserver.build.main() - self.assertFalse(os.path.exists(production_result)) - self.assertFalse(os.path.exists(production_compare_file)) - - with mock.patch('sys.argv', ['fdroid build', '--test', appid]): - # successful comparison - open(test_result, 'w').close() - open(test_compare_file, 'w').close() - with mock.patch('fdroidserver.common.verify_apks', lambda *args: None): - fdroidserver.build.main() - self.assertTrue(os.path.exists(test_result)) - self.assertTrue(os.path.exists(test_compare_file)) - self.assertFalse(os.path.exists(production_result)) - self.assertFalse(os.path.exists(production_compare_file)) - # failed comparison - open(test_result, 'w').close() - open(test_compare_file, 'w').close() - with mock.patch( - 'fdroidserver.common.verify_apks', lambda *args: 'failed' - ): - fdroidserver.build.main() - self.assertTrue(os.path.exists(test_result)) - self.assertFalse(os.path.exists(test_compare_file)) - self.assertFalse(os.path.exists(production_result)) - self.assertFalse(os.path.exists(production_compare_file)) - - def test_failed_allowedapksigningkeys_are_not_in_unsigned(self): - os.chdir(self.testdir) - os.mkdir('metadata') - appid = 'info.guardianproject.checkey' - metadata_file = os.path.join('metadata', appid + '.yml') - shutil.copy(os.path.join(self.basedir, metadata_file), 'metadata') - with open(metadata_file) as fp: - app = fdroidserver.metadata.App(yaml.safe_load(fp)) - app['RepoType'] = 'git' - app[ - 'Binaries' - ] = 'https://example.com/fdroid/repo/info.guardianproject.checkey_%v.apk' - build = fdroidserver.metadata.Build( - { - 'versionCode': 123, - 'versionName': '1.2.3', - 'commit': '1.2.3', - 'disable': False, - } - ) - app['Builds'] = [build] - expected_key = 'a' * 64 - bogus_key = 'b' * 64 - app['AllowedAPKSigningKeys'] = [expected_key] - fdroidserver.metadata.write_metadata(metadata_file, app) - - os.makedirs(os.path.join('unsigned', 'binaries')) - production_result = os.path.join( - 'unsigned', '%s_%d.apk' % (appid, build['versionCode']) - ) - production_compare_file = os.path.join( - 'unsigned', 'binaries', '%s_%d.binary.apk' % (appid, build['versionCode']) - ) - os.makedirs(os.path.join('tmp', 'binaries')) - test_result = os.path.join('tmp', '%s_%d.apk' % (appid, build['versionCode'])) - test_compare_file = os.path.join( - 'tmp', 'binaries', '%s_%d.binary.apk' % (appid, build['versionCode']) - ) - with mock.patch( - 'fdroidserver.common.force_exit', lambda *args: None - ) as a, mock.patch( - 'fdroidserver.common.get_android_tools_version_log', lambda: 'fake' - ) as b, mock.patch( - 'fdroidserver.common.FDroidPopen', FakeProcess - ) as c, mock.patch( - 'fdroidserver.build.FDroidPopen', FakeProcess - ) as d, mock.patch( - 'fdroidserver.build.trybuild', lambda *args: True - ) as e, mock.patch( - 'fdroidserver.net.download_file', lambda *args, **kwargs: None - ) as f: - a, b, c, d, e, f # silence linters' "unused" warnings - - with mock.patch('sys.argv', ['fdroid build', appid]): - # successful comparison, successful signer - open(production_result, 'w').close() - open(production_compare_file, 'w').close() - with mock.patch( - 'fdroidserver.common.verify_apks', lambda *args: None - ) as g, mock.patch( - 'fdroidserver.common.apk_signer_fingerprint', - lambda *args: expected_key, - ) as h: - g, h - fdroidserver.build.main() - self.assertTrue(os.path.exists(production_result)) - self.assertTrue(os.path.exists(production_compare_file)) - # successful comparison, failed signer - open(production_result, 'w').close() - open(production_compare_file, 'w').close() - with mock.patch( - 'fdroidserver.common.verify_apks', lambda *args: None - ) as g, mock.patch( - 'fdroidserver.common.apk_signer_fingerprint', - lambda *args: bogus_key, - ) as h: - g, h - fdroidserver.build.main() - self.assertFalse(os.path.exists(production_result)) - self.assertFalse(os.path.exists(production_compare_file)) - # failed comparison - open(production_result, 'w').close() - open(production_compare_file, 'w').close() - with mock.patch( - 'fdroidserver.common.verify_apks', lambda *args: 'failed' - ): - fdroidserver.build.main() - self.assertFalse(os.path.exists(production_result)) - self.assertFalse(os.path.exists(production_compare_file)) - - with mock.patch('sys.argv', ['fdroid build', '--test', appid]): - # successful comparison, successful signer - open(test_result, 'w').close() - open(test_compare_file, 'w').close() - with mock.patch( - 'fdroidserver.common.verify_apks', lambda *args: None - ) as g, mock.patch( - 'fdroidserver.common.apk_signer_fingerprint', - lambda *args: expected_key, - ) as h: - g, h - fdroidserver.build.main() - self.assertTrue(os.path.exists(test_result)) - self.assertTrue(os.path.exists(test_compare_file)) - self.assertFalse(os.path.exists(production_result)) - self.assertFalse(os.path.exists(production_compare_file)) - # successful comparison, failed signer - open(test_result, 'w').close() - open(test_compare_file, 'w').close() - with mock.patch( - 'fdroidserver.common.verify_apks', lambda *args: None - ) as g, mock.patch( - 'fdroidserver.common.apk_signer_fingerprint', - lambda *args: bogus_key, - ) as h: - g, h - fdroidserver.build.main() - self.assertTrue(os.path.exists(test_result)) - self.assertFalse(os.path.exists(test_compare_file)) - self.assertFalse(os.path.exists(production_result)) - self.assertFalse(os.path.exists(production_compare_file)) - # failed comparison - open(test_result, 'w').close() - open(test_compare_file, 'w').close() - with mock.patch( - 'fdroidserver.common.verify_apks', lambda *args: 'failed' - ): - fdroidserver.build.main() - self.assertTrue(os.path.exists(test_result)) - self.assertFalse(os.path.exists(test_compare_file)) - self.assertFalse(os.path.exists(production_result)) - self.assertFalse(os.path.exists(production_compare_file)) - - @mock.patch('fdroidserver.vmtools.get_build_vm') - @mock.patch('fdroidserver.vmtools.get_clean_builder') - @mock.patch('paramiko.SSHClient') - @mock.patch('subprocess.check_output') - def test_build_server_cmdline( - self, - subprocess_check_output, - paramiko_SSHClient, - fdroidserver_vmtools_get_clean_builder, - fdroidserver_vmtools_get_build_vm, - ): - """Test command line flags passed to the buildserver""" - global cmdline_args - test_flag = ['', False] - - def _exec_command(args): - flag = test_flag[0] - if test_flag[1]: - self.assertTrue(flag in args, flag + ' should be present') - else: - self.assertFalse(flag in args, flag + ' should not be present') - - os.chdir(self.testdir) - os.mkdir('tmp') - - chan = mock.MagicMock() - chan.exec_command = _exec_command - chan.recv_exit_status = lambda: 0 - transport = mock.MagicMock() - transport.open_session = mock.Mock(return_value=chan) - sshs = mock.MagicMock() - sshs.get_transport = mock.Mock(return_value=transport) - paramiko_SSHClient.return_value = sshs - subprocess_check_output.return_value = ( - b'0123456789abcdef0123456789abcdefcafebabe' - ) - fdroidserver_vmtools_get_clean_builder.side_effect = lambda s: { - 'hostname': 'example.com', - 'idfile': '/path/to/id/file', - 'port': 123, - 'user': 'fake', - } - fdroidserver.common.config = {'sdk_path': '/fake/android/sdk/path'} - fdroidserver.build.options = mock.MagicMock() - vcs = mock.Mock() - vcs.getsrclib = mock.Mock(return_value=None) - app = fdroidserver.metadata.App() - app['metadatapath'] = 'metadata/fake.id.yml' - app['id'] = 'fake.id' - app['RepoType'] = 'git' - build = fdroidserver.metadata.Build( - { - 'versionCode': 123, - 'versionName': '1.2.3', - 'commit': '1.2.3', - 'disable': False, - } - ) - app['Builds'] = [build] - - test_flag = ('--on-server', True) - fdroidserver.build.build_server(app, build, vcs, '', '', '', False) - self.assertTrue(fdroidserver_vmtools_get_build_vm.called) - - for force in (True, False): - test_flag = ('--force', force) - fdroidserver.build.build_server(app, build, vcs, '', '', '', force) - - fdroidserver.build.options.notarball = True - test_flag = ('--no-tarball', True) - fdroidserver.build.build_server(app, build, vcs, '', '', '', False) - fdroidserver.build.options.notarball = False - test_flag = ('--no-tarball', False) - fdroidserver.build.build_server(app, build, vcs, '', '', '', False) - - fdroidserver.build.options.skipscan = False - test_flag = ('--scan-binary', True) - fdroidserver.build.build_server(app, build, vcs, '', '', '', False) - fdroidserver.build.options.skipscan = True - test_flag = ('--scan-binary', False) - fdroidserver.build.build_server(app, build, vcs, '', '', '', False) - test_flag = ('--skip-scan', True) - fdroidserver.build.build_server(app, build, vcs, '', '', '', False) - - @mock.patch('fdroidserver.vmtools.get_build_vm') - @mock.patch('fdroidserver.vmtools.get_clean_builder') - @mock.patch('paramiko.SSHClient') - @mock.patch('subprocess.check_output') - @mock.patch('fdroidserver.common.getsrclib') - @mock.patch('fdroidserver.common.prepare_source') - @mock.patch('fdroidserver.build.build_local') - @mock.patch('fdroidserver.common.get_android_tools_version_log', lambda: 'versions') - @mock.patch('fdroidserver.common.deploy_build_log_with_rsync', lambda a, b, c: None) - def test_build_server_no_local_prepare( - self, - build_build_local, - common_prepare_source, - common_getsrclib, - subprocess_check_output, - paramiko_SSHClient, - fdroidserver_vmtools_get_clean_builder, - fdroidserver_vmtools_get_build_vm, # pylint: disable=unused-argument - ): - """srclibs Prepare: should only be executed in the buildserver""" - - def _exec_command(args): - print('chan.exec_command', args) - - def _getsrclib( - spec, - srclib_dir, - basepath=False, - raw=False, - prepare=True, - preponly=False, - refresh=True, - build=None, - ): - # pylint: disable=unused-argument - name, ref = spec.split('@') - libdir = os.path.join(srclib_dir, name) - os.mkdir(libdir) - self.assertFalse(prepare, 'Prepare: scripts should never run on host') - return name, None, libdir # TODO - - os.chdir(self.testdir) - - chan = mock.MagicMock() - chan.exec_command = _exec_command - chan.recv_exit_status = lambda: 0 - transport = mock.MagicMock() - transport.open_session = mock.Mock(return_value=chan) - sshs = mock.MagicMock() - sshs.get_transport = mock.Mock(return_value=transport) - paramiko_SSHClient.return_value = sshs - subprocess_check_output.return_value = ( - b'0123456789abcdef0123456789abcdefcafebabe' - ) - fdroidserver_vmtools_get_clean_builder.side_effect = lambda s: { - 'hostname': 'example.com', - 'idfile': '/path/to/id/file', - 'port': 123, - 'user': 'fake', - } - - fdroidserver.metadata.srclibs = { - 'flutter': { - 'RepoType': 'git', - 'Repo': 'https://github.com/flutter/flutter', - } - } - os.mkdir('srclibs') - with open('srclibs/flutter.yml', 'w') as fp: - yaml.dump(fdroidserver.metadata.srclibs, fp) - common_getsrclib.side_effect = _getsrclib - - options = mock.MagicMock() - options.force = False - options.notarball = True - options.onserver = False - options.refresh = False - options.scan_binary = False - options.server = True - options.skipscan = True - options.test = False - options.verbose = True - fdroidserver.build.options = options - fdroidserver.build.config = {'sdk_path': '/fake/android/sdk/path'} - - vcs = mock.Mock() - vcs.getsrclib = mock.Mock(return_value=None) - app = fdroidserver.metadata.App() - app['metadatapath'] = 'metadata/fake.id.yml' - app['id'] = 'fake.id' - app['RepoType'] = 'git' - spec = 'flutter@v1.7.8' - build = fdroidserver.metadata.Build( - { - 'versionCode': 123, - 'versionName': '1.2.3', - 'commit': '1.2.3', - 'disable': False, - 'srclibs': [spec], - } - ) - app['Builds'] = [build] - - build_dir = 'build' - srclib_dir = os.path.join(build_dir, 'srclib') - extlib_dir = os.path.join(build_dir, 'extlib') - os.mkdir('tmp') - os.mkdir(build_dir) - os.mkdir(srclib_dir) - - fdroidserver.build.trybuild( - app, - build, - build_dir, - 'unsigned', - 'logs', - None, - srclib_dir, - extlib_dir, - 'tmp', - 'repo', - vcs, - options.test, - options.server, - options.force, - options.onserver, - options.refresh, - ) - - common_getsrclib.assert_called_once_with( - spec, srclib_dir, basepath=True, prepare=False - ) - common_prepare_source.assert_not_called() - build_build_local.assert_not_called() - - def test_keep_when_not_allowed_default(self): - self.assertFalse(fdroidserver.build.keep_when_not_allowed()) - - def test_keep_when_not_allowed_config_true(self): - fdroidserver.build.config = {'keep_when_not_allowed': True} - self.assertTrue(fdroidserver.build.keep_when_not_allowed()) - - def test_keep_when_not_allowed_config_false(self): - fdroidserver.build.config = {'keep_when_not_allowed': False} - self.assertFalse(fdroidserver.build.keep_when_not_allowed()) - - def test_keep_when_not_allowed_options_true(self): - fdroidserver.build.options = Options - fdroidserver.build.options.keep_when_not_allowed = True - self.assertTrue(fdroidserver.build.keep_when_not_allowed()) - - def test_keep_when_not_allowed_options_false(self): - fdroidserver.build.options = Options - fdroidserver.build.options.keep_when_not_allowed = False - self.assertFalse(fdroidserver.build.keep_when_not_allowed()) - - def test_keep_when_not_allowed_options_true_override_config(self): - fdroidserver.build.options = Options - fdroidserver.build.options.keep_when_not_allowed = True - fdroidserver.build.config = {'keep_when_not_allowed': False} - self.assertTrue(fdroidserver.build.keep_when_not_allowed()) - - def test_keep_when_not_allowed_options_default_does_not_override(self): - fdroidserver.build.options = Options - fdroidserver.build.options.keep_when_not_allowed = False - fdroidserver.build.config = {'keep_when_not_allowed': True} - self.assertTrue(fdroidserver.build.keep_when_not_allowed()) - - def test_keep_when_not_allowed_all_true(self): - fdroidserver.build.options = Options - fdroidserver.build.options.keep_when_not_allowed = True - fdroidserver.build.config = {'keep_when_not_allowed': True} - self.assertTrue(fdroidserver.build.keep_when_not_allowed()) - - def test_keep_when_not_allowed_all_false(self): - fdroidserver.build.options = Options - fdroidserver.build.options.keep_when_not_allowed = False - fdroidserver.build.config = {'keep_when_not_allowed': False} - self.assertFalse(fdroidserver.build.keep_when_not_allowed()) diff --git a/tests/test_checkupdates.py b/tests/test_checkupdates.py deleted file mode 100755 index 107caf29..00000000 --- a/tests/test_checkupdates.py +++ /dev/null @@ -1,707 +0,0 @@ -#!/usr/bin/env python3 - -import os -import platform -import shutil -import tempfile -import time -import unittest -from pathlib import Path -from unittest import mock - -import git - -import fdroidserver -import fdroidserver.checkupdates - -basedir = Path(__file__).parent - - -class CheckupdatesTest(unittest.TestCase): - '''fdroidserver/checkupdates.py''' - - def setUp(self): - os.chdir(basedir) - self.testdir = tempfile.TemporaryDirectory( - str(time.time()), self._testMethodName + '_' - ) - - def tearDown(self): - self.testdir.cleanup() - - def test_autoupdatemode_no_suffix(self): - fdroidserver.checkupdates.config = {} - - app = fdroidserver.metadata.App() - app.id = 'loop.starts.shooting' - app.metadatapath = 'metadata/' + app.id + '.yml' - app.CurrentVersion = '1.1.8-fdroid' - app.CurrentVersionCode = 10108 - app.UpdateCheckMode = 'HTTP' - app.AutoUpdateMode = 'Version %v' - - build = fdroidserver.metadata.Build() - build.versionCode = app.CurrentVersionCode - build.versionName = app.CurrentVersion - app['Builds'].append(build) - - with mock.patch( - 'fdroidserver.checkupdates.check_http', lambda app: ('1.1.9', 10109) - ): - with mock.patch('fdroidserver.metadata.write_metadata', mock.Mock()): - with mock.patch('subprocess.call', lambda cmd: 0): - fdroidserver.checkupdates.checkupdates_app(app, auto=True) - - build = app['Builds'][-1] - self.assertEqual(build.versionName, '1.1.9') - self.assertEqual(build.commit, '1.1.9') - - with mock.patch( - 'fdroidserver.checkupdates.check_http', lambda app: ('1.7.9', 10107) - ): - with mock.patch('fdroidserver.metadata.write_metadata', mock.Mock()): - with mock.patch('subprocess.call', lambda cmd: 0): - with self.assertRaises(fdroidserver.exception.FDroidException): - fdroidserver.checkupdates.checkupdates_app(app, auto=True) - - build = app['Builds'][-1] - self.assertEqual(build.versionName, '1.1.9') - self.assertEqual(build.commit, '1.1.9') - - def test_autoupdatemode_suffix(self): - fdroidserver.checkupdates.config = {} - - app = fdroidserver.metadata.App() - app.id = 'loop.starts.shooting' - app.metadatapath = 'metadata/' + app.id + '.yml' - app.CurrentVersion = '1.1.8-fdroid' - app.CurrentVersionCode = 10108 - app.UpdateCheckMode = 'HTTP' - app.AutoUpdateMode = r'Version +.%c-fdroid v%v_%c' - - build = fdroidserver.metadata.Build() - build.versionCode = app.CurrentVersionCode - build.versionName = app.CurrentVersion - app['Builds'].append(build) - - with mock.patch( - 'fdroidserver.checkupdates.check_http', lambda app: ('1.1.9', 10109) - ): - with mock.patch('fdroidserver.metadata.write_metadata', mock.Mock()): - with mock.patch('subprocess.call', lambda cmd: 0): - fdroidserver.checkupdates.checkupdates_app(app, auto=True) - - build = app['Builds'][-1] - self.assertEqual(build.versionName, '1.1.9.10109-fdroid') - self.assertEqual(build.commit, 'v1.1.9_10109') - - def test_autoupdate_multi_variants(self): - fdroidserver.checkupdates.config = {} - - app = fdroidserver.metadata.App() - app.id = 'loop.starts.shooting' - app.metadatapath = 'metadata/' + app.id + '.yml' - app.CurrentVersion = '1.1.8' - app.CurrentVersionCode = 101083 - app.UpdateCheckMode = 'Tags' - app.AutoUpdateMode = r'Version' - app.VercodeOperation = [ - "10*%c+1", - "10*%c+3", - ] - - build = fdroidserver.metadata.Build() - build.versionCode = app.CurrentVersionCode - 2 - build.versionName = app.CurrentVersion - build.gradle = ["arm"] - app['Builds'].append(build) - - build = fdroidserver.metadata.Build() - build.versionCode = app.CurrentVersionCode - build.versionName = app.CurrentVersion - build.gradle = ["x86"] - app['Builds'].append(build) - - with mock.patch( - 'fdroidserver.checkupdates.check_tags', - lambda app, pattern: ('1.1.9', 10109, 'v1.1.9'), - ): - with mock.patch('fdroidserver.metadata.write_metadata', mock.Mock()): - with mock.patch('subprocess.call', lambda cmd: 0): - fdroidserver.checkupdates.checkupdates_app(app, auto=True) - - build = app['Builds'][-2] - self.assertEqual(build.versionName, '1.1.9') - self.assertEqual(build.versionCode, 101091) - self.assertEqual(build.gradle, ["arm"]) - - build = app['Builds'][-1] - self.assertEqual(build.versionName, '1.1.9') - self.assertEqual(build.versionCode, 101093) - self.assertEqual(build.gradle, ["x86"]) - - self.assertEqual(app.CurrentVersion, '1.1.9') - self.assertEqual(app.CurrentVersionCode, 101093) - - def test_checkupdates_app_http(self): - fdroidserver.checkupdates.config = {} - - app = fdroidserver.metadata.App() - app.id = 'loop.starts.shooting' - app.metadatapath = 'metadata/' + app.id + '.yml' - app.CurrentVersionCode = 10108 - app.UpdateCheckMode = 'HTTP' - app.UpdateCheckData = 'mock' - - with mock.patch( - 'fdroidserver.checkupdates.check_http', lambda app: (None, 'bla') - ): - with self.assertRaises(fdroidserver.exception.FDroidException): - fdroidserver.checkupdates.checkupdates_app(app, auto=True) - - with mock.patch( - 'fdroidserver.checkupdates.check_http', lambda app: ('1.1.9', 10109) - ): - with mock.patch( - 'fdroidserver.metadata.write_metadata', mock.Mock() - ) as wrmock: - with mock.patch('subprocess.call', lambda cmd: 0): - fdroidserver.checkupdates.checkupdates_app(app, auto=True) - wrmock.assert_called_with(app.metadatapath, app) - - def test_checkupdates_app_tags(self): - fdroidserver.checkupdates.config = {} - - app = fdroidserver.metadata.App() - app.id = 'loop.starts.shooting' - app.metadatapath = 'metadata/' + app.id + '.yml' - app.CurrentVersion = '1.1.8' - app.CurrentVersionCode = 10108 - app.UpdateCheckMode = 'Tags' - app.AutoUpdateMode = 'Version' - - build = fdroidserver.metadata.Build() - build.versionCode = app.CurrentVersionCode - build.versionName = app.CurrentVersion - app['Builds'].append(build) - - with mock.patch( - 'fdroidserver.checkupdates.check_tags', - lambda app, pattern: (None, 'bla', None), - ): - with self.assertRaises(fdroidserver.exception.FDroidException): - fdroidserver.checkupdates.checkupdates_app(app, auto=True) - - with mock.patch( - 'fdroidserver.checkupdates.check_tags', - lambda app, pattern: ('1.1.9', 10109, 'v1.1.9'), - ): - with mock.patch('fdroidserver.metadata.write_metadata', mock.Mock()): - with mock.patch('subprocess.call', lambda cmd: 0): - fdroidserver.checkupdates.checkupdates_app(app, auto=True) - - build = app['Builds'][-1] - self.assertEqual(build.versionName, '1.1.9') - self.assertEqual(build.commit, 'v1.1.9') - - def test_check_http(self): - app = fdroidserver.metadata.App() - app.id = 'loop.starts.shooting' - app.metadatapath = 'metadata/' + app.id + '.yml' - app.CurrentVersionCode = 10108 - app.UpdateCheckMode = 'HTTP' - app.UpdateCheckData = r'https://a.net/b.txt|c(.*)|https://d.net/e.txt|v(.*)' - app.UpdateCheckIgnore = 'beta' - - respmock = mock.Mock() - respmock.read = lambda: 'v1.1.9\nc10109'.encode('utf-8') - with mock.patch('urllib.request.urlopen', lambda a, b, c: respmock): - vername, vercode = fdroidserver.checkupdates.check_http(app) - self.assertEqual(vername, '1.1.9') - self.assertEqual(vercode, 10109) - - def test_check_http_blocks_unknown_schemes(self): - app = fdroidserver.metadata.App() - for scheme in ('file', 'ssh', 'http', ';pwn'): - app.id = scheme - faked = scheme + '://fake.url/for/testing/scheme' - app.UpdateCheckData = faked + '|ignored|' + faked + '|ignored' - app.metadatapath = 'metadata/' + app.id + '.yml' - with self.assertRaises(fdroidserver.exception.FDroidException): - fdroidserver.checkupdates.check_http(app) - - def test_check_http_ignore(self): - app = fdroidserver.metadata.App() - app.id = 'loop.starts.shooting' - app.metadatapath = 'metadata/' + app.id + '.yml' - app.CurrentVersionCode = 10108 - app.UpdateCheckMode = 'HTTP' - app.UpdateCheckData = r'https://a.net/b.txt|c(.*)|https://d.net/e.txt|v(.*)' - app.UpdateCheckIgnore = 'beta' - - respmock = mock.Mock() - respmock.read = lambda: 'v1.1.9-beta\nc10109'.encode('utf-8') - with mock.patch('urllib.request.urlopen', lambda a, b, c: respmock): - vername, vercode = fdroidserver.checkupdates.check_http(app) - self.assertEqual(vername, None) - - def test_check_tags_data(self): - app = fdroidserver.metadata.App() - app.id = 'loop.starts.shooting' - app.metadatapath = 'metadata/' + app.id + '.yml' - app.RepoType = 'git' - app.CurrentVersionCode = 10108 - app.UpdateCheckMode = 'Tags' - app.UpdateCheckData = r'b.txt|c(.*)|e.txt|v(.*)' - - vcs = mock.Mock() - vcs.latesttags.return_value = ['1.1.9', '1.1.8'] - with mock.patch( - 'pathlib.Path.read_text', lambda a: 'v1.1.9\nc10109' - ) as _ignored, mock.patch.object(Path, 'is_file') as mock_path, mock.patch( - 'fdroidserver.common.getvcs', return_value=vcs - ): - _ignored # silence the linters - mock_path.is_file.return_falue = True - vername, vercode, _tag = fdroidserver.checkupdates.check_tags(app, None) - self.assertEqual(vername, '1.1.9') - self.assertEqual(vercode, 10109) - - app.UpdateCheckData = r'b.txt|c(.*)|.|v(.*)' - with mock.patch( - 'pathlib.Path.read_text', lambda a: 'v1.1.0\nc10109' - ) as _ignored, mock.patch.object(Path, 'is_file') as mock_path, mock.patch( - 'fdroidserver.common.getvcs', return_value=vcs - ): - _ignored # silence the linters - mock_path.is_file.return_falue = True - vername, vercode, _tag = fdroidserver.checkupdates.check_tags(app, None) - self.assertEqual(vername, '1.1.0') - self.assertEqual(vercode, 10109) - - app.UpdateCheckData = r'b.txt|c(.*)||' - with mock.patch( - 'pathlib.Path.read_text', lambda a: 'v1.1.9\nc10109' - ) as _ignored, mock.patch.object(Path, 'is_file') as mock_path, mock.patch( - 'fdroidserver.common.getvcs', return_value=vcs - ): - _ignored # silence the linters - mock_path.is_file.return_falue = True - vername, vercode, _tag = fdroidserver.checkupdates.check_tags(app, None) - self.assertEqual(vername, '1.1.9') - self.assertEqual(vercode, 10109) - - vcs.latesttags.return_value = ['Android-1.1.0', '1.1.8'] - app.UpdateCheckData = r'b.txt|c(.*)||Android-([\d.]+)' - with mock.patch( - 'pathlib.Path.read_text', lambda a: 'v1.1.9\nc10109' - ) as _ignored, mock.patch.object(Path, 'is_file') as mock_path, mock.patch( - 'fdroidserver.common.getvcs', return_value=vcs - ): - _ignored # silence the linters - mock_path.is_file.return_falue = True - vername, vercode, _tag = fdroidserver.checkupdates.check_tags(app, None) - self.assertEqual(vername, '1.1.0') - self.assertEqual(vercode, 10109) - - app.UpdateCheckData = r'|\+(\d+)||Android-([\d.]+)' - vcs.latesttags.return_value = ['Android-1.1.0+1'] - with mock.patch('fdroidserver.common.getvcs', return_value=vcs): - vername, vercode, _tag = fdroidserver.checkupdates.check_tags(app, None) - self.assertEqual(vername, '1.1.0') - self.assertEqual(vercode, 1) - - app.UpdateCheckData = '|||' - vcs.latesttags.return_value = ['2'] - with mock.patch('fdroidserver.common.getvcs', return_value=vcs): - vername, vercode, _tag = fdroidserver.checkupdates.check_tags(app, None) - self.assertEqual(vername, '2') - self.assertEqual(vercode, 2) - - def _get_test_git_repos(self): - testdir = self.testdir.name - os.chdir(testdir) - os.mkdir('metadata') - for f in (basedir / 'metadata').glob('*.yml'): - shutil.copy(f, 'metadata') - git_repo = git.Repo.init(testdir) - with git_repo.config_writer() as cw: - cw.set_value('user', 'name', 'Foo Bar') - cw.set_value('user', 'email', 'foo@bar.com') - git_repo.git.add(all=True) - git_repo.index.commit("all metadata files") - - git_remote_upstream = os.path.join(testdir, 'git_remote_upstream') - upstream_repo = git.Repo.init(git_remote_upstream, bare=True) - with upstream_repo.config_writer() as cw: - cw.set_value('receive', 'advertisePushOptions', True) - git_repo.create_remote('upstream', 'file://' + git_remote_upstream) - - git_remote_origin = os.path.join(testdir, 'git_remote_origin') - origin_repo = git.Repo.init(git_remote_origin, bare=True) - with origin_repo.config_writer() as cw: - cw.set_value('receive', 'advertisePushOptions', True) - git_repo.create_remote('origin', 'file://' + git_remote_origin) - - return git_repo, origin_repo, upstream_repo - - def test_get_changes_versus_ref(self): - def _make_commit_new_app(git_repo, metadata_file): - app = fdroidserver.metadata.App() - fdroidserver.metadata.write_metadata(metadata_file, app) - git_repo.git.add(metadata_file) - git_repo.git.commit(metadata_file, message=f'changed {metadata_file}') - - git_repo, origin_repo, upstream_repo = self._get_test_git_repos() - for remote in git_repo.remotes: - remote.push(git_repo.active_branch) - appid = 'com.testvalue' - metadata_file = f'metadata/{appid}.yml' - - # set up remote branch with change to app - git_repo.git.checkout('-b', appid) - _make_commit_new_app(git_repo, metadata_file) - git_repo.remotes.origin.push(appid) - - # reset local branch and there should be differences - upstream_main = fdroidserver.checkupdates.get_upstream_main_branch(git_repo) - git_repo.git.reset(upstream_main) - self.assertTrue( - fdroidserver.checkupdates.get_changes_versus_ref( - git_repo, f'origin/{appid}', metadata_file - ) - ) - # make new commit that matches the previous, different commit, no diff - _make_commit_new_app(git_repo, metadata_file) - self.assertFalse( - fdroidserver.checkupdates.get_changes_versus_ref( - git_repo, f'origin/{appid}', metadata_file - ) - ) - - def test_push_commits(self): - git_repo, origin_repo, upstream_repo = self._get_test_git_repos() - for remote in git_repo.remotes: - remote.push(git_repo.active_branch) - self.assertEqual(git_repo.head, upstream_repo.head) - self.assertEqual(origin_repo.head, upstream_repo.head) - # pretend that checkupdates ran but didn't create any new commits - fdroidserver.checkupdates.push_commits() - - appid = 'org.adaway' - self.assertNotIn(appid, git_repo.branches) - self.assertNotIn(appid, origin_repo.branches) - self.assertNotIn(appid, upstream_repo.branches) - self.assertNotIn('checkupdates', git_repo.branches) - - # now make commit - app = fdroidserver.metadata.read_metadata({appid: -1})[appid] - build = fdroidserver.metadata.Build() - build.versionName = 'fake' - build.versionCode = 999999999 - app.Builds.append(build) - metadata_file = 'metadata/%s.yml' % appid - fdroidserver.metadata.write_metadata(metadata_file, app) - git_repo.index.add(metadata_file) - git_repo.index.commit('changed ' + appid) - - # and push the new commit to the dynamic branch - fdroidserver.checkupdates.push_commits() - self.assertIn(appid, git_repo.branches) - self.assertIn(appid, git_repo.remotes.origin.refs) - self.assertNotIn('checkupdates', git_repo.branches) - self.assertNotIn(appid, git_repo.remotes.upstream.refs) - - def test_push_commits_verbose(self): - class Options: - verbose = True - - fdroidserver.checkupdates.options = Options - repos = self._get_test_git_repos() - git_repo = repos[0] - git_repo.remotes.origin.push(git_repo.active_branch) - git_repo.remotes.upstream.push(git_repo.active_branch) - - # make commit - appid = 'org.adaway' - app = fdroidserver.metadata.read_metadata({appid: -1})[appid] - build = fdroidserver.metadata.Build() - build.versionName = 'fake' - build.versionCode = 999999999 - app.Builds.append(build) - metadata_file = 'metadata/%s.yml' % appid - fdroidserver.metadata.write_metadata(metadata_file, app) - git_repo.index.add(metadata_file) - git_repo.index.commit('changed ' + appid) - - # and push the new commit to the dynamic branch - fdroidserver.checkupdates.push_commits() - self.assertIn(appid, git_repo.branches) - self.assertIn(appid, git_repo.remotes.origin.refs) - - def test_prune_empty_appid_branches(self): - git_repo, origin_repo, upstream_repo = self._get_test_git_repos() - for remote in git_repo.remotes: - remote.push(git_repo.active_branch) - self.assertEqual(git_repo.head, upstream_repo.head) - self.assertEqual(origin_repo.head, upstream_repo.head) - - appid = 'org.adaway' - git_repo.create_head(appid, force=True) - git_repo.remotes.origin.push(appid, force=True) - self.assertIn(appid, git_repo.branches) - self.assertIn(appid, origin_repo.branches) - self.assertIn(appid, git_repo.remotes.origin.refs) - self.assertNotIn(appid, git_repo.remotes.upstream.refs) - fdroidserver.checkupdates.prune_empty_appid_branches() - self.assertNotIn(appid, origin_repo.branches) - self.assertNotIn(appid, git_repo.remotes.origin.refs) - self.assertNotIn(appid, git_repo.remotes.upstream.refs) - - @mock.patch('sys.exit') - @mock.patch('fdroidserver.metadata.read_metadata') - def test_merge_requests_flag(self, read_metadata, sys_exit): - def _sys_exit(return_code=0): - self.assertNotEqual(return_code, 0) - raise fdroidserver.exception.FDroidException('sys.exit() ran') - - def _read_metadata(a=None, b=None): - raise StopIteration('read_metadata() ran, test is successful') - - appid = 'com.example' - # read_metadata.return_value = dict() # {appid: dict()} - read_metadata.side_effect = _read_metadata - sys_exit.side_effect = _sys_exit - - # set up clean git repo - os.chdir(self.testdir.name) - git_repo = git.Repo.init() - open('foo', 'w').close() - git_repo.git.add(all=True) - git_repo.index.commit("all files") - - with mock.patch('sys.argv', ['fdroid checkupdates', '--merge-request']): - with self.assertRaises(fdroidserver.exception.FDroidException): - fdroidserver.checkupdates.main() - sys_exit.assert_called() - - sys_exit.reset_mock() - with mock.patch('sys.argv', ['fdroid checkupdates', '--merge-request', appid]): - with self.assertRaises(StopIteration): - fdroidserver.checkupdates.main() - sys_exit.assert_not_called() - - @unittest.skipIf( - platform.system() == 'Darwin', - 'It is difficult to configure the base system for this test.', - ) - def test_get_upstream_main_branch(self): - os.chdir(self.testdir.name) - testvalue = 'foo' - git_repo = git.Repo.init('.', initial_branch=testvalue) - - open('foo', 'w').close() - git_repo.git.add(all=True) - git_repo.index.commit("all files") - git_repo.create_remote('upstream', os.getcwd()).fetch() - - branch = fdroidserver.checkupdates.get_upstream_main_branch(git_repo) - self.assertEqual( - f'upstream/{testvalue}', - branch, - f'The default branch should be called {testvalue}!', - ) - - def test_get_upstream_main_branch_git_config(self): - os.chdir(self.testdir.name) - testvalue = 'foo' - git_repo = git.Repo.init('.', initial_branch=testvalue) - with git_repo.config_writer() as cw: - cw.set_value('init', 'defaultBranch', testvalue) - - open('foo', 'w').close() - git_repo.git.add(all=True) - git_repo.index.commit("all files") - git_repo.git.branch('somethingelse') # make another remote branch - git_repo.create_remote('upstream', os.getcwd()).fetch() - - branch = fdroidserver.checkupdates.get_upstream_main_branch(git_repo) - self.assertEqual( - f'upstream/{testvalue}', - branch, - f'The default branch should be called {testvalue}!', - ) - - def test_checkout_appid_branch_does_not_exist(self): - appid = 'com.example' - os.chdir(self.testdir.name) - git_repo = git.Repo.init('.') - open('foo', 'w').close() - git_repo.git.add(all=True) - git_repo.index.commit("all files") - # --merge-request assumes remotes called 'origin' and 'upstream' - git_repo.create_remote('origin', os.getcwd()).fetch() - git_repo.create_remote('upstream', os.getcwd()).fetch() - self.assertNotIn(appid, git_repo.heads) - fdroidserver.checkupdates.checkout_appid_branch(appid) - self.assertIn(appid, git_repo.heads) - - def test_checkout_appid_branch_exists(self): - appid = 'com.example' - - upstream_dir = os.path.join(self.testdir.name, 'upstream_git') - os.mkdir(upstream_dir) - upstream_repo = git.Repo.init(upstream_dir) - (Path(upstream_dir) / 'README').write_text('README') - upstream_repo.git.add(all=True) - upstream_repo.index.commit("README") - upstream_repo.create_head(appid) - - local_dir = os.path.join(self.testdir.name, 'local_git') - git.Repo.clone_from(upstream_dir, local_dir) - os.chdir(local_dir) - git_repo = git.Repo.init('.') - # --merge-request assumes remotes called 'origin' and 'upstream' - git_repo.create_remote('upstream', upstream_dir).fetch() - - self.assertNotIn(appid, git_repo.heads) - fdroidserver.checkupdates.checkout_appid_branch(appid) - self.assertIn(appid, git_repo.heads) - - def test_checkout_appid_branch_skip_bot_commit(self): - appid = 'com.example' - - upstream_dir = os.path.join(self.testdir.name, 'upstream_git') - os.mkdir(upstream_dir) - upstream_repo = git.Repo.init(upstream_dir) - (Path(upstream_dir) / 'README').write_text('README') - upstream_repo.git.add(all=True) - upstream_repo.index.commit("README") - upstream_repo.create_head(appid) - - local_dir = os.path.join(self.testdir.name, 'local_git') - git.Repo.clone_from(upstream_dir, local_dir) - os.chdir(local_dir) - git_repo = git.Repo.init('.') - # --merge-request assumes remotes called 'origin' and 'upstream' - git_repo.create_remote('upstream', upstream_dir).fetch() - - os.mkdir('metadata') - git_repo.create_head(appid, f'origin/{appid}', force=True) - git_repo.git.checkout(appid) - - # fake checkupdates-bot commit - Path(f'metadata/{appid}.yml').write_text('AutoName: Example\n') - with git_repo.config_writer() as cw: - cw.set_value('user', 'email', fdroidserver.checkupdates.BOT_EMAIL) - git_repo.git.add(all=True) - git_repo.index.commit("Example") - - # set up starting from remote branch - git_repo.remotes.origin.push(appid) - upstream_main = fdroidserver.checkupdates.get_upstream_main_branch(git_repo) - git_repo.git.checkout(upstream_main.split('/')[1]) - git_repo.delete_head(appid, force=True) - - self.assertTrue( - fdroidserver.checkupdates.checkout_appid_branch(appid), - 'This should have been true since there are only bot commits.', - ) - - def test_checkout_appid_branch_skip_human_edits(self): - appid = 'com.example' - - upstream_dir = os.path.join(self.testdir.name, 'upstream_git') - os.mkdir(upstream_dir) - upstream_repo = git.Repo.init(upstream_dir) - (Path(upstream_dir) / 'README').write_text('README') - upstream_repo.git.add(all=True) - upstream_repo.index.commit("README") - upstream_repo.create_head(appid) - - local_dir = os.path.join(self.testdir.name, 'local_git') - git.Repo.clone_from(upstream_dir, local_dir) - os.chdir(local_dir) - git_repo = git.Repo.init('.') - # --merge-request assumes remotes called 'origin' and 'upstream' - git_repo.create_remote('upstream', upstream_dir).fetch() - - os.mkdir('metadata') - git_repo.create_head(appid, f'origin/{appid}', force=True) - git_repo.git.checkout(appid) - - with git_repo.config_writer() as cw: - cw.set_value('user', 'email', fdroidserver.checkupdates.BOT_EMAIL) - - # fake checkupdates-bot commit - Path(f'metadata/{appid}.yml').write_text('AutoName: Example\n') - git_repo.git.add(all=True) - git_repo.index.commit("Example") - - # fake commit added on top by a human - Path(f'metadata/{appid}.yml').write_text('AutoName: Example\nName: Foo\n') - with git_repo.config_writer() as cw: - cw.set_value('user', 'email', 'human@bar.com') - git_repo.git.add(all=True) - git_repo.index.commit("Example") - - # set up starting from remote branch - git_repo.remotes.origin.push(appid) - upstream_main = fdroidserver.checkupdates.get_upstream_main_branch(git_repo) - git_repo.git.reset(upstream_main.split('/')[1]) - - self.assertFalse( - fdroidserver.checkupdates.checkout_appid_branch(appid), - 'This should have been false since there are human edits.', - ) - - @mock.patch('git.remote.Remote.push') - @mock.patch('sys.exit') - @mock.patch('fdroidserver.common.read_app_args') - @mock.patch('fdroidserver.checkupdates.checkupdates_app') - def test_merge_requests_branch( - self, checkupdates_app, read_app_args, sys_exit, push - ): - def _sys_exit(return_code=0): - self.assertEqual(return_code, 0) - - def _checkupdates_app(app, auto, commit): # pylint: disable=unused-argument - os.mkdir('metadata') - Path(f'metadata/{app["packageName"]}.yml').write_text('AutoName: Example') - git_repo.git.add(all=True) - git_repo.index.commit("Example") - - def _read_app_args(apps=[]): - appid = apps[0] - return {appid: {'packageName': appid}} - - appid = 'com.example' - read_app_args.side_effect = _read_app_args - checkupdates_app.side_effect = _checkupdates_app - sys_exit.side_effect = _sys_exit - - # set up clean git repo - os.chdir(self.testdir.name) - git_repo = git.Repo.init() - open('foo', 'w').close() - git_repo.git.add(all=True) - git_repo.index.commit("all files") - # --merge-request assumes remotes called 'origin' and 'upstream' - git_repo.create_remote('origin', os.getcwd()).fetch() - git_repo.create_remote('upstream', os.getcwd()).fetch() - - self.assertNotIn(appid, git_repo.heads) - with mock.patch('sys.argv', ['fdroid checkupdates', '--merge-request', appid]): - fdroidserver.checkupdates.main() - push.assert_called_once() - sys_exit.assert_called_once() - self.assertIn(appid, git_repo.heads) - - def test_push_commits_invalid_branch_name(self): - git_repo, origin_repo, upstream_repo = self._get_test_git_repos() - for remote in git_repo.remotes: - remote.push(git_repo.active_branch) - self.assertEqual(git_repo.head, upstream_repo.head) - self.assertEqual(origin_repo.head, upstream_repo.head) - # pretend that checkupdates ran but didn't create any new commits - fdroidserver.checkupdates.push_commits('') diff --git a/tests/test_common.py b/tests/test_common.py deleted file mode 100755 index 3110b446..00000000 --- a/tests/test_common.py +++ /dev/null @@ -1,3653 +0,0 @@ -#!/usr/bin/env python3 - -import difflib -import glob -import gzip -import importlib -import json -import logging -import os -import re -import shutil -import subprocess -import sys -import tempfile -import textwrap -import time -import unittest -from argparse import ArgumentParser -from datetime import datetime, timezone -from pathlib import Path -from unittest import mock -from zipfile import BadZipFile, ZipFile - -import git -import ruamel.yaml - -import fdroidserver -import fdroidserver.common -import fdroidserver.metadata -import fdroidserver.signindex -from fdroidserver._yaml import config_dump, yaml, yaml_dumper -from fdroidserver.common import ANTIFEATURES_CONFIG_NAME, CATEGORIES_CONFIG_NAME -from fdroidserver.exception import ( - FDroidException, - MetaDataException, - VCSException, - VerificationException, -) -from fdroidserver.looseversion import LooseVersion - -from .shared_test_code import TmpCwd, mkdir_testfiles, mkdtemp - -basedir = Path(__file__).parent - - -def _mock_common_module_options_instance(): - """Helper method to deal with difficult visibility of the module-level options.""" - fdroidserver.common.options = mock.Mock() - fdroidserver.common.options.verbose = False - - -class SetUpTearDownMixin: - """A mixin with no tests in it for shared setUp and tearDown.""" - - def setUp(self): - logging.basicConfig(level=logging.DEBUG) - logger = logging.getLogger('androguard.axml') - logger.setLevel(logging.INFO) # tame the axml debug messages - os.chdir(basedir) - - self.verbose = '-v' in sys.argv or '--verbose' in sys.argv - fdroidserver.common.set_console_logging(self.verbose) - - # these are declared as None at the top of the module file - fdroidserver.common.config = None - fdroidserver.common.options = None - fdroidserver.metadata.srclibs = None - - self.testdir = mkdir_testfiles(basedir, self) - - def tearDown(self): - fdroidserver.common.config = None - fdroidserver.common.options = None - os.chdir(basedir) - if os.path.exists(self.testdir): - shutil.rmtree(self.testdir) - - -class CommonTest(SetUpTearDownMixin, unittest.TestCase): - '''fdroidserver/common.py''' - - def test_yaml_1_2(self): - """Return a ruamel.yaml instance that supports YAML 1.2 - - There should be no "Norway Problem", and other things like this: - https://yaml.org/spec/1.2.2/ext/changes/ - - YAML 1.2 says "underlines _ cannot be used within numerical - values", but ruamel.yaml seems to ignore that. 1_0 should be a - string, but it is read as a 10. - - """ - os.chdir(self.testdir) - yaml12file = Path('YAML 1.2.yml') - yaml12file.write_text('[true, no, 0b010, 010, 0o10, "\\/"]', encoding='utf-8') - with yaml12file.open() as fp: - self.assertEqual( - [True, 'no', 2, 10, 8, '/'], - yaml.load(fp), - ) - - def test_parse_human_readable_size(self): - for k, v in ( - (9827, 9827), - (123.456, 123), - ('123b', 123), - ('1.2', 1), - ('10.43 KiB', 10680), - ('11GB', 11000000000), - ('59kb', 59000), - ('343.1 mb', 343100000), - ('99.9GiB', 107266808217), - ('1MB', 1000000), - ): - self.assertEqual(fdroidserver.common.parse_human_readable_size(k), v) - for v in ((12, 123), '0xfff', [], None, '12,123', '123GG', '982374bb', self): - with self.assertRaises(ValueError): - fdroidserver.common.parse_human_readable_size(v) - - def test_assert_config_keystore(self): - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - with self.assertRaises(FDroidException): - fdroidserver.common.assert_config_keystore({}) - - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - c = { - 'repo_keyalias': 'localhost', - 'keystore': 'keystore.jks', - 'keystorepass': '12345', - 'keypass': '12345', - } - with open('keystore.jks', 'w'): - pass - fdroidserver.common.assert_config_keystore(c) - - def _set_build_tools(self): - build_tools = os.path.join( - fdroidserver.common.config['sdk_path'], 'build-tools' - ) - if os.path.exists(build_tools): - for f in sorted(os.listdir(build_tools), reverse=True): - versioned = os.path.join(build_tools, f) - if os.path.isdir(versioned) and os.path.isfile( - os.path.join(versioned, 'apksigner') - ): - break - return True - else: - print('no build-tools found: ' + build_tools) - return False - - def _find_all(self): - tools = ['aapt', 'adb', 'jarsigner'] - if os.path.exists(os.path.join(os.getenv('ANDROID_HOME'), 'tools', 'android')): - tools.append('android') - for cmd in tools: - try: - path = fdroidserver.common.find_sdk_tools_cmd(cmd) - self.assertTrue(os.path.exists(path)) - self.assertTrue(os.path.isfile(path)) - except fdroidserver.exception.FDroidException: - pass - - @unittest.skipUnless(os.getenv('ANDROID_HOME'), "Needs ANDROID_HOME env var") - def test_find_sdk_tools_cmd(self): - fdroidserver.common.config = dict() - # TODO add this once everything works without sdk_path set in config - # self._find_all() - sdk_path = os.getenv('ANDROID_HOME') - if os.path.exists(sdk_path): - fdroidserver.common.config['sdk_path'] = sdk_path - build_tools = os.path.join(sdk_path, 'build-tools') - if self._set_build_tools() or os.path.exists('/usr/bin/aapt'): - self._find_all() - else: - print('no build-tools found: ' + build_tools) - - def test_find_java_root_path(self): - os.chdir(self.testdir) - - all_pathlists = [ - ( - [ # Debian - '/usr/lib/jvm/java-1.5.0-gcj-5-amd64', - '/usr/lib/jvm/java-8-openjdk-amd64', - '/usr/lib/jvm/java-1.8.0-openjdk-amd64', - ], - '/usr/lib/jvm/java-8-openjdk-amd64', - ), - ( - [ # OSX - '/Library/Java/JavaVirtualMachines/jdk1.8.0_202.jdk', - '/Library/Java/JavaVirtualMachines/jdk1.8.0_45.jdk', - '/System/Library/Java/JavaVirtualMachines/jdk1.7.0_80.jdk', - ], - '/Library/Java/JavaVirtualMachines/jdk1.8.0_202.jdk', - ), - ] - - for pathlist, choice in all_pathlists: - # strip leading / to make relative paths to test without root - pathlist = [p[1:] for p in pathlist] - - # create test file used in common._add_java_paths_to_config() - for p in pathlist: - if p.startswith('/System') or p.startswith('/Library'): - _dir = os.path.join(p, 'Contents', 'Home', 'bin') - else: - _dir = os.path.join(p, 'bin') - os.makedirs(_dir) - open(os.path.join(_dir, 'javac'), 'w').close() - - config = dict() - config['java_paths'] = dict() - fdroidserver.common._add_java_paths_to_config(pathlist, config) - self.assertEqual(config['java_paths']['8'], choice[1:]) - - @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') - def test_is_debuggable_or_testOnly(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - - # these are set debuggable - for apkfile in ('urzip.apk', 'urzip-badsig.apk', 'urzip-badcert.apk'): - self.assertTrue( - fdroidserver.common.is_debuggable_or_testOnly(str(basedir / apkfile)), - "debuggable APK state was not properly parsed!", - ) - - # these are set NOT debuggable - testfiles = 'urzip-release.apk', 'urzip-release-unsigned.apk', 'v2.only.sig_2.apk' - for apkfile in testfiles: - self.assertFalse( - fdroidserver.common.is_debuggable_or_testOnly(apkfile), - "debuggable APK state was not properly parsed!", - ) - - VALID_STRICT_PACKAGE_NAMES = [ - "An.stop", - "SpeedoMeterApp.main", - "a2dp.Vol", - "au.com.darkside.XServer", - "click.dummer.UartSmartwatch", - "com.Bisha.TI89EmuDonation", - "com.MarcosDiez.shareviahttp", - "com.Pau.ImapNotes2", - "com.app.Zensuren", - "com.darshancomputing.BatteryIndicator", - "com.geecko.QuickLyric", - "com.genonbeta.TrebleShot", - "com.gpl.rpg.AndorsTrail", - "com.hobbyone.HashDroid", - "com.moez.QKSMS", - "com.platypus.SAnd", - "com.prhlt.aemus.Read4SpeechExperiments", - "de.syss.MifareClassicTool", - "org.fdroid.fdroid", - "org.f_droid.fdr0ID", - ] - - def test_is_valid_package_name(self): - for name in self.VALID_STRICT_PACKAGE_NAMES + [ - "_SpeedoMeterApp.main", - "05041684efd9b16c2888b1eddbadd0359f655f311b89bdd1737f560a10d20fb8", - ]: - self.assertTrue( - fdroidserver.common.is_valid_package_name(name), - "{0} should be a valid package name".format(name), - ) - for name in [ - "0rg.fdroid.fdroid", - ".f_droid.fdr0ID", - "trailingdot.", - "org.fdroid/fdroid", - "/org.fdroid.fdroid", - ]: - self.assertFalse( - fdroidserver.common.is_valid_package_name(name), - "{0} should not be a valid package name".format(name), - ) - - def test_is_strict_application_id(self): - """see also tests/valid-package-names/""" - for name in self.VALID_STRICT_PACKAGE_NAMES: - self.assertTrue( - fdroidserver.common.is_strict_application_id(name), - "{0} should be a strict application id".format(name), - ) - for name in [ - "0rg.fdroid.fdroid", - ".f_droid.fdr0ID", - "oneword", - "trailingdot.", - "cafebabe", - "org.fdroid/fdroid", - "/org.fdroid.fdroid", - "_SpeedoMeterApp.main", - "05041684efd9b16c2888b1eddbadd0359f655f311b89bdd1737f560a10d20fb8", - ]: - self.assertFalse( - fdroidserver.common.is_strict_application_id(name), - "{0} should not be a strict application id".format(name), - ) - - def test_prepare_sources(self): - testint = 99999999 - teststr = 'FAKE_STR_FOR_TESTING' - - shutil.copytree( - os.path.join(basedir, 'source-files'), - os.path.join(self.testdir, 'source-files'), - ) - - fdroidclient_testdir = os.path.join( - self.testdir, 'source-files', 'fdroid', 'fdroidclient' - ) - - config = dict() - config['sdk_path'] = os.getenv('ANDROID_HOME') - config['ndk_paths'] = {'r10d': os.getenv('ANDROID_NDK_HOME')} - fdroidserver.common.config = config - app = fdroidserver.metadata.App() - app.id = 'org.fdroid.froid' - build = fdroidserver.metadata.Build() - build.commit = 'master' - build.forceversion = True - build.forcevercode = True - build.gradle = ['yes'] - build.target = 'android-' + str(testint) - build.versionName = teststr - build.versionCode = testint - - class FakeVcs: - # no need to change to the correct commit here - def gotorevision(self, rev, refresh=True): - pass - - # no srclib info needed, but it could be added... - def getsrclib(self): - return None - - def deinitsubmodules(self): - pass - - fdroidserver.common.prepare_source(FakeVcs(), app, build, - fdroidclient_testdir, fdroidclient_testdir, fdroidclient_testdir) - - fdroidclient_testdir = Path(fdroidclient_testdir) - build_gradle = fdroidclient_testdir / 'build.gradle' - filedata = build_gradle.read_text(encoding='utf-8') - self.assertIsNotNone( - re.search(r"\s+compileSdkVersion %s\s+" % testint, filedata) - ) - - androidmanifest_xml = fdroidclient_testdir / 'AndroidManifest.xml' - filedata = androidmanifest_xml.read_text(encoding='utf-8') - self.assertIsNone(re.search('android:debuggable', filedata)) - self.assertIsNotNone( - re.search('android:versionName="%s"' % build.versionName, filedata) - ) - self.assertIsNotNone( - re.search('android:versionCode="%s"' % build.versionCode, filedata) - ) - - @unittest.skipIf(os.name == 'nt', "`fdroid build` assumes POSIX scripting") - def test_prepare_sources_with_prebuild_subdir(self): - app_build_dir = os.path.join(self.testdir, 'build', 'com.example') - shutil.copytree( - basedir / 'source-files' / 'fdroid' / 'fdroidclient', - app_build_dir, - ) - - subdir = 'baz/bar' - subdir_path = Path(app_build_dir) / subdir - subdir_path.mkdir(parents=True, exist_ok=True) - build_gradle = subdir_path / 'build.gradle' - build_gradle.write_text('// just a test placeholder', encoding='utf-8') - - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - _mock_common_module_options_instance() - - srclibname = 'FakeSrcLib' - srclib_testdir = os.path.join(self.testdir, 'build', 'srclib') - os.makedirs(os.path.join(srclib_testdir, srclibname, 'testdirshouldexist')) - fdroidserver.metadata.srclibs = { - srclibname: { - 'RepoType': 'git', - 'Repo': 'https://example.com/foo/fakesrclib', - 'Subdir': None, - 'Prepare': None, - } - } - - app = fdroidserver.metadata.App() - app.id = 'app.has.srclibs' - build = fdroidserver.metadata.Build() - build.commit = 'master' - build.gradle = ['yes'] - build.prebuild = ['test -d $$FakeSrcLib$$/testdirshouldexist'] # actual test condition - build.srclibs = [srclibname + '@1.2.3'] - build.subdir = subdir - build.versionCode = 0xCAFE - build.versionName = 'vCAFE' - - class FakeVcs: - # no need to change to the correct commit here - def gotorevision(self, rev, refresh=True): - pass - - # no srclib info needed, but it could be added... - def getsrclib(self): - return None - - def deinitsubmodules(self): - pass - - fdroidserver.common.prepare_source(FakeVcs(), app, build, - app_build_dir, srclib_testdir, app_build_dir, - onserver=True, refresh=False) # do not clone in this test - - def test_prepare_sources_refresh(self): - _mock_common_module_options_instance() - packageName = 'org.fdroid.ci.test.app' - os.chdir(self.testdir) - os.mkdir('build') - os.mkdir('metadata') - - # use a local copy if available to avoid hitting the network - tmprepo = os.path.join(basedir, 'tmp', 'importer') - if os.path.exists(tmprepo): - git_url = tmprepo - else: - git_url = 'https://gitlab.com/fdroid/ci-test-app.git' - - metadata = dict() - metadata['Description'] = 'This is just a test app' - metadata['RepoType'] = 'git' - metadata['Repo'] = git_url - with open(os.path.join('metadata', packageName + '.yml'), 'w') as fp: - yaml_dumper.dump(metadata, fp) - - gitrepo = os.path.join(self.testdir, 'build', packageName) - vcs0 = fdroidserver.common.getvcs('git', git_url, gitrepo) - vcs0.gotorevision('0.3', refresh=True) - vcs1 = fdroidserver.common.getvcs('git', git_url, gitrepo) - vcs1.gotorevision('0.3', refresh=False) - - def test_setup_vcs_srclib(self): - app = fdroidserver.metadata.App( - { - 'RepoType': 'srclib', - 'Repo': 'TransportsRennes', - } - ) - srclib = { - 'RepoType': 'git', - 'Repo': 'https://github.com/ybonnel/TransportsRennes', - } - fdroidserver.metadata.srclibs = {'TransportsRennes': srclib} - vcs, build_dir = fdroidserver.common.setup_vcs(app) - self.assertIsNotNone(vcs) - self.assertEqual(build_dir, Path('build/srclib/TransportsRennes')) - - def test_getvcs_srclib(self): - vcstype = 'srclib' - remote = 'TransportsRennes' - local = 'build/srclib/' + remote - fdroidserver.metadata.srclibs = { - remote: { - 'RepoType': 'git', - 'Repo': 'https://github.com/ybonnel/TransportsRennes', - } - } - self.assertIsNotNone(fdroidserver.common.getvcs(vcstype, remote, local)) - self.assertIsNotNone(fdroidserver.common.getvcs(vcstype, Path(remote), local)) - self.assertIsNotNone(fdroidserver.common.getvcs(vcstype, remote, Path(local))) - self.assertIsNotNone(fdroidserver.common.getvcs( - vcstype, Path(remote), Path(local) - )) - with self.assertRaises(VCSException): - fdroidserver.common.getvcs(vcstype, remote, 'bad') - with self.assertRaises(VCSException): - fdroidserver.common.getvcs(vcstype, remote, Path('bad')) - with self.assertRaises(VCSException): - fdroidserver.common.getvcs(vcstype, Path(remote), 'bad') - with self.assertRaises(VCSException): - fdroidserver.common.getvcs(vcstype, Path(remote), Path('bad')) - - def test_fdroid_popen_stderr_redirect(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - _mock_common_module_options_instance() - - commands = ['sh', '-c', 'echo stdout message && echo stderr message 1>&2'] - - p = fdroidserver.common.FDroidPopen(commands) - self.assertEqual(p.output, 'stdout message\nstderr message\n') - - p = fdroidserver.common.FDroidPopen(commands, stderr_to_stdout=False) - self.assertEqual(p.output, 'stdout message\n') - - def test_signjar(self): - _mock_common_module_options_instance() - config = fdroidserver.common.read_config() - config['jarsigner'] = fdroidserver.common.find_sdk_tools_cmd('jarsigner') - fdroidserver.common.config = config - fdroidserver.signindex.config = config - - sourcedir = os.path.join(basedir, 'signindex') - for f in ('testy.jar', 'guardianproject.jar'): - sourcefile = os.path.join(sourcedir, f) - testfile = os.path.join(self.testdir, f) - shutil.copy(sourcefile, self.testdir) - fdroidserver.signindex.sign_jar(testfile, use_old_algs=True) - # these should be resigned, and therefore different - self.assertNotEqual( - open(sourcefile, 'rb').read(), open(testfile, 'rb').read() - ) - - def test_verify_apk_signature(self): - _mock_common_module_options_instance() - config = fdroidserver.common.read_config() - fdroidserver.common.config = config - - self.assertTrue(fdroidserver.common.verify_apk_signature('bad-unicode-πÇÇ现代通用字-български-عربي1.apk')) - if 'apksigner' in fdroidserver.common.config: # apksigner considers MD5 signatures valid - self.assertTrue(fdroidserver.common.verify_apk_signature('org.bitbucket.tickytacky.mirrormirror_1.apk')) - self.assertTrue(fdroidserver.common.verify_apk_signature('org.bitbucket.tickytacky.mirrormirror_2.apk')) - self.assertTrue(fdroidserver.common.verify_apk_signature('org.bitbucket.tickytacky.mirrormirror_3.apk')) - self.assertTrue(fdroidserver.common.verify_apk_signature('org.bitbucket.tickytacky.mirrormirror_4.apk')) - else: - self.assertFalse(fdroidserver.common.verify_apk_signature('org.bitbucket.tickytacky.mirrormirror_1.apk')) - self.assertFalse(fdroidserver.common.verify_apk_signature('org.bitbucket.tickytacky.mirrormirror_2.apk')) - self.assertFalse(fdroidserver.common.verify_apk_signature('org.bitbucket.tickytacky.mirrormirror_3.apk')) - self.assertFalse(fdroidserver.common.verify_apk_signature('org.bitbucket.tickytacky.mirrormirror_4.apk')) - self.assertTrue(fdroidserver.common.verify_apk_signature('org.dyndns.fules.ck_20.apk')) - self.assertTrue(fdroidserver.common.verify_apk_signature('urzip.apk')) - self.assertFalse(fdroidserver.common.verify_apk_signature('urzip-badcert.apk')) - self.assertFalse(fdroidserver.common.verify_apk_signature('urzip-badsig.apk')) - self.assertTrue(fdroidserver.common.verify_apk_signature('urzip-release.apk')) - self.assertFalse(fdroidserver.common.verify_apk_signature('urzip-release-unsigned.apk')) - - def test_verify_old_apk_signature(self): - _mock_common_module_options_instance() - config = fdroidserver.common.read_config() - config['jarsigner'] = fdroidserver.common.find_sdk_tools_cmd('jarsigner') - fdroidserver.common.config = config - - try: - fdroidserver.common.verify_deprecated_jar_signature('bad-unicode-πÇÇ现代通用字-български-عربي1.apk') - fdroidserver.common.verify_deprecated_jar_signature('org.bitbucket.tickytacky.mirrormirror_1.apk') - fdroidserver.common.verify_deprecated_jar_signature('org.bitbucket.tickytacky.mirrormirror_2.apk') - fdroidserver.common.verify_deprecated_jar_signature('org.bitbucket.tickytacky.mirrormirror_3.apk') - fdroidserver.common.verify_deprecated_jar_signature('org.bitbucket.tickytacky.mirrormirror_4.apk') - fdroidserver.common.verify_deprecated_jar_signature('org.dyndns.fules.ck_20.apk') - fdroidserver.common.verify_deprecated_jar_signature('urzip.apk') - fdroidserver.common.verify_deprecated_jar_signature('urzip-release.apk') - except VerificationException: - self.fail("failed to jarsigner failed to verify an old apk") - self.assertRaises(VerificationException, fdroidserver.common.verify_deprecated_jar_signature, 'urzip-badcert.apk') - self.assertRaises(VerificationException, fdroidserver.common.verify_deprecated_jar_signature, 'urzip-badsig.apk') - self.assertRaises(VerificationException, fdroidserver.common.verify_deprecated_jar_signature, 'urzip-release-unsigned.apk') - - def test_verify_jar_signature(self): - """Sign entry.jar and make sure it validates""" - config = fdroidserver.common.read_config() - config['jarsigner'] = fdroidserver.common.find_sdk_tools_cmd('jarsigner') - config['keystore'] = os.path.join(basedir, 'keystore.jks') - config['repo_keyalias'] = 'sova' - config['keystorepass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' - config['keypass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' - fdroidserver.common.config = config - fdroidserver.signindex.config = config - repo_dir = Path(self.testdir) / 'repo' - repo_dir.mkdir() - shutil.copy('repo/entry.json', repo_dir) - shutil.copy('repo/index-v2.json', repo_dir) - os.chdir(self.testdir) - fdroidserver.signindex.sign_index('repo', 'entry.json') - fdroidserver.common.verify_jar_signature('repo/entry.jar') - - def test_verify_jar_signature_fails(self): - """Test verify_jar_signature fails on unsigned and deprecated algorithms""" - config = fdroidserver.common.read_config() - config['jarsigner'] = fdroidserver.common.find_sdk_tools_cmd('jarsigner') - fdroidserver.common.config = config - source_dir = os.path.join(basedir, 'signindex') - for f in ('unsigned.jar', 'testy.jar', 'guardianproject.jar', 'guardianproject-v1.jar'): - testfile = os.path.join(source_dir, f) - with self.assertRaises(fdroidserver.index.VerificationException): - fdroidserver.common.verify_jar_signature(testfile) - - def test_verify_deprecated_jar_signature(self): - config = fdroidserver.common.read_config() - config['jarsigner'] = fdroidserver.common.find_sdk_tools_cmd('jarsigner') - fdroidserver.common.config = config - source_dir = os.path.join(basedir, 'signindex') - for f in ('testy.jar', 'guardianproject.jar'): - testfile = os.path.join(source_dir, f) - fdroidserver.common.verify_deprecated_jar_signature(testfile) - - testfile = os.path.join(source_dir, 'unsigned.jar') - with self.assertRaises(fdroidserver.index.VerificationException): - fdroidserver.common.verify_deprecated_jar_signature(testfile) - - def test_verify_apks(self): - config = fdroidserver.common.read_config() - fdroidserver.common.config = config - _mock_common_module_options_instance() - - sourceapk = os.path.join(basedir, 'urzip.apk') - - copyapk = os.path.join(self.testdir, 'urzip-copy.apk') - shutil.copy(sourceapk, copyapk) - self.assertTrue(fdroidserver.common.verify_apk_signature(copyapk)) - self.assertIsNone( - fdroidserver.common.verify_apks(sourceapk, copyapk, self.testdir) - ) - - unsignedapk = os.path.join(self.testdir, 'urzip-unsigned.apk') - with ZipFile(sourceapk, 'r') as apk: - with ZipFile(unsignedapk, 'w') as testapk: - for info in apk.infolist(): - if not info.filename.startswith('META-INF/'): - testapk.writestr(info, apk.read(info.filename)) - self.assertIsNone( - fdroidserver.common.verify_apks(sourceapk, unsignedapk, self.testdir) - ) - - twosigapk = os.path.join(self.testdir, 'urzip-twosig.apk') - otherapk = ZipFile(os.path.join(basedir, 'urzip-release.apk'), 'r') - with ZipFile(sourceapk, 'r') as apk: - with ZipFile(twosigapk, 'w') as testapk: - for info in apk.infolist(): - testapk.writestr(info, apk.read(info.filename)) - if info.filename.startswith('META-INF/'): - testapk.writestr(info.filename, otherapk.read(info.filename)) - otherapk.close() - self.assertFalse(fdroidserver.common.verify_apk_signature(twosigapk)) - self.assertIsNone(fdroidserver.common.verify_apks(sourceapk, twosigapk, self.testdir)) - - def test_get_certificate_with_chain_sandisk(self): - """Test that APK signatures with a cert chain are parsed like apksigner. - - SanDisk signs their APKs with a X.509 certificate chain of - trust, so there are actually three certificates - included. apksigner only cares about the certificate in the - chain that actually signs the manifest. - - The correct value comes from: - apksigner verify --print-certs 883cbdae7aeb2e4b122e8ee8d89966c7062d0d49107a130235fa220a5b994a79.apk - - """ - cert = fdroidserver.common.get_certificate( - signature_block_file=Path('SANAPPSI.RSA').read_bytes(), - signature_file=Path('SANAPPSI.SF').read_bytes(), - ) - self.assertEqual( - 'ea0abbf2a142e4b167405d516b2cc408c4af4b29cd50ba281aa4470d4aab3e53', - fdroidserver.common.signer_fingerprint(cert), - ) - - def test_write_to_config(self): - """Test that config items can be added without messing up config.yml. - - The '_orig' key are where the original string values of paths - are stored. Paths have tilde expansion and env vars replaced - in fill_config_defaults(). - - """ - os.chdir(self.testdir) - fdroidserver.common.write_config_file( - textwrap.dedent( - """\ - # abc - # test: 'example value' - a_path: ~/android-sdk - - # comment - do_not_touch: good value - a_path: "!!!" - - key: "123" # inline""" - ) - ) - - config = {'key': 111, 'a_path_orig': '~/android-sdk'} - fdroidserver.common.write_to_config(config, 'key') - fdroidserver.common.write_to_config(config, 'a_path') - fdroidserver.common.write_to_config(config, 'test', value='test value') - fdroidserver.common.write_to_config(config, 'new_key', value='new') - - with open(fdroidserver.common.CONFIG_FILE) as fp: - self.assertEqual( - fp.read(), - textwrap.dedent( - """\ - # abc - test: test value - a_path: ~/android-sdk - - # comment - do_not_touch: good value - - key: 111 - new_key: new - """ - ), - ) - - def test_write_to_config_when_empty(self): - os.chdir(self.testdir) - config_yml = Path(fdroidserver.common.CONFIG_FILE) - config_yml.write_text('', encoding='utf-8') - fdroidserver.common.write_to_config({}, 'key', 'val') - self.assertEqual(config_yml.read_text(), 'key: val\n') - - def test_apk_name_regex(self): - good = [ - 'urzipπÇÇπÇÇ现代汉语通用字българскиعربي1234ö_-123456.apk', - 'urzipπÇÇπÇÇ现代汉语通用字българскиعربي1234ö_123456_abcdef0.apk', - 'urzip_-123456.apk', - 'a0_0.apk', - 'Z0_0.apk', - 'a0_0_abcdef0.apk', - 'a_a_a_a_0_abcdef0.apk', - 'a_____0.apk', - 'a_____123456_abcdef0.apk', - 'org.fdroid.fdroid_123456.apk', - # valid, but "_99999" is part of packageName rather than versionCode - 'org.fdroid.fdroid_99999_123456.apk', - # should be valid, but I can't figure out the regex since \w includes digits - # 'πÇÇπÇÇ现代汉语通用字българскиعربي1234ö_0_123bafd.apk', - ] - for name in good: - m = fdroidserver.common.APK_NAME_REGEX.match(name) - self.assertIsNotNone(m) - self.assertIn(m.group(2), ('-123456', '0', '123456')) - self.assertIn(m.group(3), ('abcdef0', None)) - - bad = [ - 'urzipπÇÇπÇÇ现代汉语通用字българскиعربي1234ö_123456_abcdefg.apk', - 'urzip-_-198274.apk', - 'urzip-_0_123bafd.apk', - 'no spaces allowed_123.apk', - '0_0.apk', - '0_0_abcdef0.apk', - ] - for name in bad: - self.assertIsNone(fdroidserver.common.APK_NAME_REGEX.match(name)) - - def test_standard_file_name_regex(self): - good = [ - 'urzipπÇÇπÇÇ现代汉语通用字българскиعربي1234ö_-123456.mp3', - 'urzipπÇÇπÇÇ现代汉语通用字българскиعربي1234ö_123456.mov', - 'Document_-123456.pdf', - 'WTF_0.MOV', - 'Z0_0.ebk', - 'a_a_a_a_0.txt', - 'org.fdroid.fdroid.privileged.ota_123456.zip', - 'πÇÇπÇÇ现代汉语通用字българскиعربي1234ö_0.jpeg', - 'a_____0.PNG', - # valid, but "_99999" is part of packageName rather than versionCode - 'a_____99999_123456.zip', - 'org.fdroid.fdroid_99999_123456.zip', - ] - for name in good: - m = fdroidserver.common.STANDARD_FILE_NAME_REGEX.match(name) - self.assertIsNotNone(m) - self.assertIn(m.group(2), ('-123456', '0', '123456')) - - bad = [ - 'urzipπÇÇπÇÇ现代汉语通用字българскиعربي1234ö_abcdefg.JPEG', - 'urzip-_-198274.zip', - 'urzip-_123bafd.pdf', - 'no spaces allowed_123.foobar', - 'a_____0.', - ] - for name in bad: - self.assertIsNone(fdroidserver.common.STANDARD_FILE_NAME_REGEX.match(name)) - - @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') - def test_apk_signer_fingerprint(self): - - # fingerprints fetched with: keytool -printcert -file ____.RSA - testapks = (('repo/obb.main.oldversion_1444412523.apk', - '818e469465f96b704e27be2fee4c63ab9f83ddf30e7a34c7371a4728d83b0bc1'), - ('repo/obb.main.twoversions_1101613.apk', - '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6'), - ('repo/obb.main.twoversions_1101617.apk', - '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6')) - - for apkfile, keytoolcertfingerprint in testapks: - self.assertEqual(keytoolcertfingerprint, - fdroidserver.common.apk_signer_fingerprint(apkfile)) - - def test_find_apksigner_system_package_default_path(self): - """apksigner should be automatically used from the PATH""" - usr_bin_apksigner = '/usr/bin/apksigner' - if not os.path.isfile(usr_bin_apksigner): - self.skipTest('SKIPPING since %s is not installed!' % usr_bin_apksigner) - with mock.patch.dict(os.environ, clear=True): - os.environ['PATH'] = '/usr/local/bin:/usr/bin:/bin' - config = {} - fdroidserver.common.find_apksigner(config) - self.assertEqual(usr_bin_apksigner, config.get('apksigner')) - - def test_find_apksigner_config_overrides(self): - """apksigner should come from config before any auto-detection""" - os.chdir(self.testdir) - android_home = os.path.join(self.testdir, 'ANDROID_HOME') - do_not_use = os.path.join(android_home, 'build-tools', '30.0.3', 'apksigner') - os.makedirs(os.path.dirname(do_not_use)) - with open(do_not_use, 'w') as fp: - fp.write('#!/bin/sh\ndate\n') - os.chmod(do_not_use, 0o0755) # nosec B103 - apksigner = os.path.join(self.testdir, 'apksigner') - config = {'apksigner': apksigner} - with mock.patch.dict(os.environ, clear=True): - os.environ['ANDROID_HOME'] = android_home - os.environ['PATH'] = '%s:/usr/local/bin:/usr/bin:/bin' % android_home - fdroidserver.common.find_apksigner(config) - self.assertEqual(apksigner, config.get('apksigner')) - - def test_find_apksigner_prefer_path(self): - """apksigner should come from PATH before ANDROID_HOME""" - os.chdir(self.testdir) - apksigner = os.path.join(self.testdir, 'apksigner') - with open(apksigner, 'w') as fp: - fp.write('#!/bin/sh\ndate\n') - os.chmod(apksigner, 0o0755) # nosec B103 - - android_home = os.path.join(self.testdir, 'ANDROID_HOME') - do_not_use = os.path.join(android_home, 'build-tools', '30.0.3', 'apksigner') - os.makedirs(os.path.dirname(do_not_use)) - with open(do_not_use, 'w') as fp: - fp.write('#!/bin/sh\ndate\n') - os.chmod(do_not_use, 0o0755) # nosec B103 - - config = {'sdk_path': android_home} - with mock.patch.dict(os.environ, clear=True): - os.environ['ANDROID_HOME'] = android_home - os.environ['PATH'] = '%s:/usr/local/bin:/usr/bin:/bin' % os.path.dirname(apksigner) - fdroidserver.common.find_apksigner(config) - self.assertEqual(apksigner, config.get('apksigner')) - - def test_find_apksigner_prefer_newest(self): - """apksigner should be the newest available in ANDROID_HOME""" - os.chdir(self.testdir) - android_home = os.path.join(self.testdir, 'ANDROID_HOME') - - apksigner = os.path.join(android_home, 'build-tools', '30.0.3', 'apksigner') - os.makedirs(os.path.dirname(apksigner)) - with open(apksigner, 'w') as fp: - fp.write('#!/bin/sh\necho 30.0.3\n') - os.chmod(apksigner, 0o0755) # nosec B103 - - do_not_use = os.path.join(android_home, 'build-tools', '29.0.3', 'apksigner') - os.makedirs(os.path.dirname(do_not_use)) - with open(do_not_use, 'w') as fp: - fp.write('#!/bin/sh\necho 29.0.3\n') - os.chmod(do_not_use, 0o0755) # nosec B103 - - config = {'sdk_path': android_home} - with mock.patch.dict(os.environ, clear=True): - os.environ['PATH'] = '/fake/path/to/avoid/conflicts' - fdroidserver.common.find_apksigner(config) - self.assertEqual(apksigner, config.get('apksigner')) - - def test_find_apksigner_system_package_android_home(self): - """Test that apksigner v30 or newer is found""" - os.chdir(self.testdir) - android_home = os.getenv('ANDROID_HOME') - if not android_home or not os.path.isdir(android_home): - self.skipTest('SKIPPING since ANDROID_HOME (%s) is not a dir!' % android_home) - build_tools = glob.glob(os.path.join(android_home, 'build-tools', '*', 'apksigner')) - if not build_tools: - self.skipTest('SKIPPING since ANDROID_HOME (%s) build-tools has no apksigner!' % android_home) - min_version = fdroidserver.common.MINIMUM_APKSIGNER_BUILD_TOOLS_VERSION - version = '0' - for bt in sorted(build_tools): - v = bt.split('/')[-2] - if v == 'debian': - continue - if LooseVersion(version) < LooseVersion(v): - version = v - if LooseVersion(version) < LooseVersion(min_version): - self.skipTest('SKIPPING since build-tools %s or higher is required!' % min_version) - fdroidserver.common.config = {'sdk_path': android_home} - with mock.patch.dict(os.environ, clear=True): - os.environ['PATH'] = '/fake/path/to/avoid/conflicts' - config = fdroidserver.common.read_config() - fdroidserver.common.find_apksigner(config) - self.assertEqual( - os.path.join(android_home, 'build-tools'), - os.path.dirname(os.path.dirname(config.get('apksigner'))), - ) - - @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') - def test_sign_apk(self): - _mock_common_module_options_instance() - config = fdroidserver.common.read_config() - if 'apksigner' not in config: - self.skipTest('SKIPPING test_sign_apk, apksigner not installed!') - - config['keyalias'] = 'sova' - config['keystorepass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' - config['keypass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' - config['keystore'] = os.path.join(basedir, 'keystore.jks') - fdroidserver.common.config = config - fdroidserver.signindex.config = config - - unsigned = os.path.join(self.testdir, 'urzip-release-unsigned.apk') - signed = os.path.join(self.testdir, 'urzip-release.apk') - shutil.copy(os.path.join(basedir, 'urzip-release-unsigned.apk'), self.testdir) - - self.assertFalse(fdroidserver.common.verify_apk_signature(unsigned)) - - fdroidserver.common.sign_apk(unsigned, signed, config['keyalias']) - self.assertTrue(os.path.isfile(signed)) - self.assertFalse(os.path.isfile(unsigned)) - self.assertTrue(fdroidserver.common.verify_apk_signature(signed)) - - # now sign an APK with minSdkVersion >= 18 - unsigned = os.path.join(self.testdir, 'duplicate.permisssions_9999999-unsigned.apk') - signed = os.path.join(self.testdir, 'duplicate.permisssions_9999999.apk') - shutil.copy( - os.path.join(basedir, 'repo', 'duplicate.permisssions_9999999.apk'), - os.path.join(unsigned), - ) - fdroidserver.common.apk_strip_v1_signatures(unsigned, strip_manifest=True) - fdroidserver.common.sign_apk(unsigned, signed, config['keyalias']) - self.assertTrue(os.path.isfile(signed)) - self.assertFalse(os.path.isfile(unsigned)) - self.assertTrue(fdroidserver.common.verify_apk_signature(signed)) - self.assertEqual('18', fdroidserver.common.get_androguard_APK(signed).get_min_sdk_version()) - - shutil.copy(os.path.join(basedir, 'minimal_targetsdk_30_unsigned.apk'), self.testdir) - unsigned = os.path.join(self.testdir, 'minimal_targetsdk_30_unsigned.apk') - signed = os.path.join(self.testdir, 'minimal_targetsdk_30.apk') - - self.assertFalse(fdroidserver.common.verify_apk_signature(unsigned)) - fdroidserver.common.sign_apk(unsigned, signed, config['keyalias']) - - self.assertTrue(os.path.isfile(signed)) - self.assertFalse(os.path.isfile(unsigned)) - self.assertTrue(fdroidserver.common.verify_apk_signature(signed)) - # verify it has a v2 signature - self.assertTrue(fdroidserver.common.get_androguard_APK(signed).is_signed_v2()) - - shutil.copy(os.path.join(basedir, 'no_targetsdk_minsdk30_unsigned.apk'), self.testdir) - unsigned = os.path.join(self.testdir, 'no_targetsdk_minsdk30_unsigned.apk') - signed = os.path.join(self.testdir, 'no_targetsdk_minsdk30_signed.apk') - - fdroidserver.common.sign_apk(unsigned, signed, config['keyalias']) - self.assertTrue(fdroidserver.common.verify_apk_signature(signed)) - self.assertTrue(fdroidserver.common.get_androguard_APK(signed).is_signed_v2()) - - shutil.copy(os.path.join(basedir, 'no_targetsdk_minsdk1_unsigned.apk'), self.testdir) - unsigned = os.path.join(self.testdir, 'no_targetsdk_minsdk1_unsigned.apk') - signed = os.path.join(self.testdir, 'no_targetsdk_minsdk1_signed.apk') - - self.assertFalse(fdroidserver.common.verify_apk_signature(unsigned)) - fdroidserver.common.sign_apk(unsigned, signed, config['keyalias']) - - self.assertTrue(os.path.isfile(signed)) - self.assertFalse(os.path.isfile(unsigned)) - self.assertTrue(fdroidserver.common.verify_apk_signature(signed)) - - @unittest.skipIf(os.getuid() == 0, 'This is meaningless when run as root') - def test_sign_apk_fail(self): - _mock_common_module_options_instance() - config = fdroidserver.common.read_config() - if 'apksigner' not in config: - self.skipTest('SKIPPING test_sign_apk_fail, apksigner not installed!') - - config['keyalias'] = 'sova' - config['keystorepass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' - config['keypass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' - config['keystore'] = os.path.join(basedir, 'keystore.jks') - fdroidserver.common.config = config - fdroidserver.signindex.config = config - - unsigned = os.path.join(self.testdir, 'urzip-release-unsigned.apk') - signed = os.path.join(self.testdir, 'urzip-release.apk') - shutil.copy(os.path.join(basedir, 'urzip-release-unsigned.apk'), self.testdir) - - os.chmod(unsigned, 0o000) - with self.assertRaises(fdroidserver.exception.BuildException): - fdroidserver.common.sign_apk(unsigned, signed, config['keyalias']) - os.chmod(unsigned, 0o777) # nosec B103 - self.assertTrue(os.path.isfile(unsigned)) - self.assertFalse(os.path.isfile(signed)) - - def test_sign_apk_corrupt(self): - _mock_common_module_options_instance() - config = fdroidserver.common.read_config() - if 'apksigner' not in config: - self.skipTest('SKIPPING test_sign_apk_corrupt, apksigner not installed!') - - config['keyalias'] = 'sova' - config['keystorepass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' - config['keypass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' - config['keystore'] = os.path.join(basedir, 'keystore.jks') - fdroidserver.common.config = config - fdroidserver.signindex.config = config - - unsigned = os.path.join(self.testdir, 'urzip-release-unsigned.apk') - signed = os.path.join(self.testdir, 'urzip-release.apk') - with open(unsigned, 'w') as fp: - fp.write('this is a corrupt APK') - - with self.assertRaises(fdroidserver.exception.BuildException): - fdroidserver.common.sign_apk(unsigned, signed, config['keyalias']) - self.assertTrue(os.path.isfile(unsigned)) - self.assertFalse(os.path.isfile(signed)) - - @unittest.skipUnless( - os.path.exists('tests/SystemWebView-repack.apk'), "file too big for sdist" - ) - def test_resign_apk(self): - """When using apksigner, it should resign signed APKs""" - _mock_common_module_options_instance() - config = fdroidserver.common.read_config() - if 'apksigner' not in config: - self.skipTest('SKIPPING test_resign_apk, apksigner not installed!') - if sys.byteorder == 'big': - self.skipTest('SKIPPING androguard is not ported to big-endian') - - config['keyalias'] = 'sova' - config['keystorepass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' - config['keypass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' - config['keystore'] = os.path.join(basedir, 'keystore.jks') - fdroidserver.common.config = config - fdroidserver.signindex.config = config - - os.chdir(self.testdir) - os.mkdir('unsigned') - os.mkdir('repo') - - for apk in ( - 'org.bitbucket.tickytacky.mirrormirror_4.apk', - 'v2.only.sig_2.apk', - 'SystemWebView-repack.apk', - ): - original = os.path.join(basedir, apk) - unsigned = os.path.join('unsigned', apk) - resign = os.path.join('repo', apk) - shutil.copy(original, unsigned) - fdroidserver.common.sign_apk(unsigned, resign, config['keyalias']) - self.assertTrue( - fdroidserver.common.verify_apk_signature(resign), apk + " verifies" - ) - self.assertTrue(os.path.isfile(resign)) - self.assertFalse(os.path.isfile(unsigned)) - self.assertNotEqual( - fdroidserver.common.get_first_signer_certificate(original), - fdroidserver.common.get_first_signer_certificate(resign) - ) - - @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') - def test_get_apk_id(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - self._set_build_tools() - try: - config['aapt'] = fdroidserver.common.find_sdk_tools_cmd('aapt') - except fdroidserver.exception.FDroidException: - pass # aapt is not required if androguard is present - - testcases = [ - ('repo/obb.main.twoversions_1101613.apk', 'obb.main.twoversions', 1101613, '0.1'), - ('org.bitbucket.tickytacky.mirrormirror_1.apk', 'org.bitbucket.tickytacky.mirrormirror', 1, '1.0'), - ('org.bitbucket.tickytacky.mirrormirror_2.apk', 'org.bitbucket.tickytacky.mirrormirror', 2, '1.0.1'), - ('org.bitbucket.tickytacky.mirrormirror_3.apk', 'org.bitbucket.tickytacky.mirrormirror', 3, '1.0.2'), - ('org.bitbucket.tickytacky.mirrormirror_4.apk', 'org.bitbucket.tickytacky.mirrormirror', 4, '1.0.3'), - ('org.dyndns.fules.ck_20.apk', 'org.dyndns.fules.ck', 20, 'v1.6pre2'), - ('issue-1128-min-sdk-30-poc.apk', 'org.fdroid.ci', 1, '1.0'), - ('issue-1128-poc1.apk', 'android.appsecurity.cts.tinyapp', 10, '1.0'), - ('issue-1128-poc2.apk', 'android.appsecurity.cts.tinyapp', 10, '1.0'), - ('issue-1128-poc3a.apk', 'android.appsecurity.cts.tinyapp', 10, '1.0'), - ('issue-1128-poc3b.apk', 'android.appsecurity.cts.tinyapp', 10, '1.0'), - ('urzip.apk', 'info.guardianproject.urzip', 100, '0.1'), - ('urzip-badcert.apk', 'info.guardianproject.urzip', 100, '0.1'), - ('urzip-badsig.apk', 'info.guardianproject.urzip', 100, '0.1'), - ('urzip-release.apk', 'info.guardianproject.urzip', 100, '0.1'), - ('urzip-release-unsigned.apk', 'info.guardianproject.urzip', 100, '0.1'), - ('repo/com.politedroid_3.apk', 'com.politedroid', 3, '1.2'), - ('repo/com.politedroid_4.apk', 'com.politedroid', 4, '1.3'), - ('repo/com.politedroid_5.apk', 'com.politedroid', 5, '1.4'), - ('repo/com.politedroid_6.apk', 'com.politedroid', 6, '1.5'), - ('repo/duplicate.permisssions_9999999.apk', 'duplicate.permisssions', 9999999, ''), - ('repo/info.zwanenburg.caffeinetile_4.apk', 'info.zwanenburg.caffeinetile', 4, '1.3'), - ('repo/obb.main.oldversion_1444412523.apk', 'obb.main.oldversion', 1444412523, '0.1'), - ('repo/obb.mainpatch.current_1619_another-release-key.apk', 'obb.mainpatch.current', 1619, '0.1'), - ('repo/obb.mainpatch.current_1619.apk', 'obb.mainpatch.current', 1619, '0.1'), - ('repo/obb.main.twoversions_1101613.apk', 'obb.main.twoversions', 1101613, '0.1'), - ('repo/obb.main.twoversions_1101615.apk', 'obb.main.twoversions', 1101615, '0.1'), - ('repo/obb.main.twoversions_1101617.apk', 'obb.main.twoversions', 1101617, '0.1'), - ('repo/urzip-; Рахма́, [rɐxˈmanʲɪnəf] سيرجي_رخمانينوف 谢·.apk', 'info.guardianproject.urzip', 100, '0.1'), - ] - for apkfilename, appid, versionCode, versionName in testcases: - a, vc, vn = fdroidserver.common.get_apk_id(apkfilename) - self.assertEqual(appid, a, 'androguard appid parsing failed for ' + apkfilename) - self.assertEqual(versionName, vn, 'androguard versionName parsing failed for ' + apkfilename) - self.assertEqual(versionCode, vc, 'androguard versionCode parsing failed for ' + apkfilename) - if 'aapt' in config: - a, vc, vn = fdroidserver.common.get_apk_id_aapt(apkfilename) - self.assertEqual(appid, a, 'aapt appid parsing failed for ' + apkfilename) - self.assertEqual(versionCode, vc, 'aapt versionCode parsing failed for ' + apkfilename) - self.assertEqual(versionName, vn, 'aapt versionName parsing failed for ' + apkfilename) - - @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') - def test_get_apk_id_bad_apk(self): - """get_apk_id should never return None on error, only raise exceptions""" - with self.assertRaises(KeyError): - fdroidserver.common.get_apk_id('Norway_bouvet_europe_2.obf.zip') - shutil.copy('Norway_bouvet_europe_2.obf.zip', self.testdir) - os.chdir(self.testdir) - with ZipFile('Norway_bouvet_europe_2.obf.zip', 'a') as zipfp: - zipfp.writestr('AndroidManifest.xml', 'not a manifest') - with self.assertRaises(KeyError): - fdroidserver.common.get_apk_id('Norway_bouvet_europe_2.obf.zip') - - @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') - def test_get_apk_id_bad_path(self): - with self.assertRaises(FDroidException): - fdroidserver.common.get_apk_id('nope') - - @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') - def test_get_apk_id_api_call(self): - self.assertEqual( - ('info.guardianproject.urzip', 100, '0.1'), - fdroidserver.common.get_apk_id('urzip.apk'), - ) - - @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') - def test_get_apk_id_bad_zip(self): - os.chdir(self.testdir) - badzip = 'badzip.apk' - with open(badzip, 'w') as fp: - fp.write('not a ZIP') - with self.assertRaises(BadZipFile): - fdroidserver.common.get_apk_id(badzip) - - def test_get_apk_id_aapt_regex(self): - files = glob.glob(os.path.join(basedir, 'build-tools', '[1-9]*.*', '*.txt')) - self.assertNotEqual(0, len(files)) - for f in files: - appid, versionCode = os.path.splitext(os.path.basename(f))[0][12:].split('_') - with open(f, encoding='utf-8') as fp: - m = fdroidserver.common.APK_ID_TRIPLET_REGEX.match(fp.read()) - if m: - self.assertEqual(appid, m.group(1)) - self.assertEqual(versionCode, m.group(2)) - else: - self.fail('could not parse aapt output: {}'.format(f)) - - def test_get_native_code(self): - testcases = [ - ('repo/obb.main.twoversions_1101613.apk', []), - ('org.bitbucket.tickytacky.mirrormirror_1.apk', []), - ('org.bitbucket.tickytacky.mirrormirror_2.apk', []), - ('org.bitbucket.tickytacky.mirrormirror_3.apk', []), - ('org.bitbucket.tickytacky.mirrormirror_4.apk', []), - ('org.dyndns.fules.ck_20.apk', ['arm64-v8a', 'armeabi', 'armeabi-v7a', 'mips', 'mips64', 'x86', 'x86_64']), - ('urzip.apk', []), - ('urzip-badcert.apk', []), - ('urzip-badsig.apk', []), - ('urzip-release.apk', []), - ('urzip-release-unsigned.apk', []), - ('repo/com.politedroid_3.apk', []), - ('repo/com.politedroid_4.apk', []), - ('repo/com.politedroid_5.apk', []), - ('repo/com.politedroid_6.apk', []), - ('repo/duplicate.permisssions_9999999.apk', []), - ('repo/info.zwanenburg.caffeinetile_4.apk', []), - ('repo/obb.main.oldversion_1444412523.apk', []), - ('repo/obb.mainpatch.current_1619_another-release-key.apk', []), - ('repo/obb.mainpatch.current_1619.apk', []), - ('repo/obb.main.twoversions_1101613.apk', []), - ('repo/obb.main.twoversions_1101615.apk', []), - ('repo/obb.main.twoversions_1101617.apk', []), - ('repo/urzip-; Рахма́, [rɐxˈmanʲɪnəf] سيرجي_رخمانينوف 谢·.apk', []), - ] - for apkfilename, native_code in testcases: - nc = fdroidserver.common.get_native_code(apkfilename) - self.assertEqual(native_code, nc) - - @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') - def test_get_sdkversions_androguard(self): - """This is a sanity test that androguard isn't broken""" - - def get_minSdkVersion(apkfile): - apk = fdroidserver.common.get_androguard_APK(apkfile) - return fdroidserver.common.get_min_sdk_version(apk) - - def get_targetSdkVersion(apkfile): - apk = fdroidserver.common.get_androguard_APK(apkfile) - return apk.get_effective_target_sdk_version() - - self.assertEqual(4, get_minSdkVersion('bad-unicode-πÇÇ现代通用字-български-عربي1.apk')) - self.assertEqual(30, get_minSdkVersion('issue-1128-min-sdk-30-poc.apk')) - self.assertEqual(29, get_minSdkVersion('issue-1128-poc1.apk')) - self.assertEqual(29, get_minSdkVersion('issue-1128-poc2.apk')) - self.assertEqual(23, get_minSdkVersion('issue-1128-poc3a.apk')) - self.assertEqual(23, get_minSdkVersion('issue-1128-poc3b.apk')) - self.assertEqual(14, get_minSdkVersion('org.bitbucket.tickytacky.mirrormirror_1.apk')) - self.assertEqual(14, get_minSdkVersion('org.bitbucket.tickytacky.mirrormirror_2.apk')) - self.assertEqual(14, get_minSdkVersion('org.bitbucket.tickytacky.mirrormirror_3.apk')) - self.assertEqual(14, get_minSdkVersion('org.bitbucket.tickytacky.mirrormirror_4.apk')) - self.assertEqual(7, get_minSdkVersion('org.dyndns.fules.ck_20.apk')) - self.assertEqual(4, get_minSdkVersion('urzip.apk')) - self.assertEqual(4, get_minSdkVersion('urzip-badcert.apk')) - self.assertEqual(4, get_minSdkVersion('urzip-badsig.apk')) - self.assertEqual(4, get_minSdkVersion('urzip-release.apk')) - self.assertEqual(4, get_minSdkVersion('urzip-release-unsigned.apk')) - self.assertEqual(27, get_minSdkVersion('v2.only.sig_2.apk')) - self.assertEqual(3, get_minSdkVersion('repo/com.politedroid_3.apk')) - self.assertEqual(3, get_minSdkVersion('repo/com.politedroid_4.apk')) - self.assertEqual(3, get_minSdkVersion('repo/com.politedroid_5.apk')) - self.assertEqual(14, get_minSdkVersion('repo/com.politedroid_6.apk')) - self.assertEqual(4, get_minSdkVersion('repo/obb.main.oldversion_1444412523.apk')) - self.assertEqual(4, get_minSdkVersion('repo/obb.mainpatch.current_1619_another-release-key.apk')) - self.assertEqual(4, get_minSdkVersion('repo/obb.mainpatch.current_1619.apk')) - self.assertEqual(4, get_minSdkVersion('repo/obb.main.twoversions_1101613.apk')) - self.assertEqual(4, get_minSdkVersion('repo/obb.main.twoversions_1101615.apk')) - self.assertEqual(4, get_minSdkVersion('repo/obb.main.twoversions_1101617.apk')) - self.assertEqual(4, get_minSdkVersion('repo/urzip-; Рахма́, [rɐxˈmanʲɪnəf] سيرجي_رخمانينوف 谢·.apk')) - - self.assertEqual(30, get_targetSdkVersion('minimal_targetsdk_30_unsigned.apk')) - self.assertEqual(1, get_targetSdkVersion('no_targetsdk_minsdk1_unsigned.apk')) - self.assertEqual(30, get_targetSdkVersion('no_targetsdk_minsdk30_unsigned.apk')) - - def test_apk_release_name(self): - appid, vercode, sigfp = fdroidserver.common.apk_parse_release_filename('com.serwylo.lexica_905.apk') - self.assertEqual(appid, 'com.serwylo.lexica') - self.assertEqual(vercode, 905) - self.assertEqual(sigfp, None) - - appid, vercode, sigfp = fdroidserver.common.apk_parse_release_filename('com.serwylo.lexica_905_c82e0f6.apk') - self.assertEqual(appid, 'com.serwylo.lexica') - self.assertEqual(vercode, 905) - self.assertEqual(sigfp, 'c82e0f6') - - appid, vercode, sigfp = fdroidserver.common.apk_parse_release_filename('beverly_hills-90210.apk') - self.assertEqual(appid, None) - self.assertEqual(vercode, None) - self.assertEqual(sigfp, None) - - def test_metadata_find_developer_signature(self): - sig = fdroidserver.common.metadata_find_developer_signature('org.smssecure.smssecure') - self.assertEqual('b30bb971af0d134866e158ec748fcd553df97c150f58b0a963190bbafbeb0868', sig) - - def test_parse_xml(self): - manifest = Path('source-files/fdroid/fdroidclient/AndroidManifest.xml') - parsed = fdroidserver.common.parse_xml(manifest) - self.assertIsNotNone(parsed) - self.assertEqual(str(type(parsed)), "") - - def test_parse_androidmanifests(self): - app = fdroidserver.metadata.App() - app.id = 'org.fdroid.fdroid' - paths = [ - Path('source-files/fdroid/fdroidclient/AndroidManifest.xml'), - Path('source-files/fdroid/fdroidclient/build.gradle'), - ] - for path in paths: - self.assertTrue(os.path.isfile(path)) - self.assertEqual(('0.94-test', 940, 'org.fdroid.fdroid'), - fdroidserver.common.parse_androidmanifests(paths, app)) - - app = fdroidserver.metadata.App() - app.AutoName = 'android-chat' - app.RepoType = 'git' - url = 'https://github.com/wildfirechat/android-chat.git' - app.SourceCode = url.rstrip('.git') - app.Repo = url - paths = [ - Path('source-files/cn.wildfirechat.chat/avenginekit/build.gradle'), - Path('source-files/cn.wildfirechat.chat/build.gradle'), - Path('source-files/cn.wildfirechat.chat/client/build.gradle'), - Path('source-files/cn.wildfirechat.chat/client/src/main/AndroidManifest.xml'), - Path('source-files/cn.wildfirechat.chat/emojilibrary/build.gradle'), - Path('source-files/cn.wildfirechat.chat/gradle/build_libraries.gradle'), - Path('source-files/cn.wildfirechat.chat/imagepicker/build.gradle'), - Path('source-files/cn.wildfirechat.chat/mars-core-release/build.gradle'), - Path('source-files/cn.wildfirechat.chat/push/build.gradle'), - Path('source-files/cn.wildfirechat.chat/settings.gradle'), - Path('source-files/cn.wildfirechat.chat/chat/build.gradle'), - ] - for path in paths: - self.assertTrue(os.path.isfile(path)) - self.assertEqual(('0.6.9', 23, 'cn.wildfirechat.chat'), - fdroidserver.common.parse_androidmanifests(paths, app)) - - app = fdroidserver.metadata.App() - app.Repo = 'https://github.com/Integreight/1Sheeld-Android-App' - paths = [ - Path('source-files/com.integreight.onesheeld/pagerIndicator/src/main/AndroidManifest.xml'), - Path('source-files/com.integreight.onesheeld/pagerIndicator/build.gradle'), - Path('source-files/com.integreight.onesheeld/oneSheeld/src/main/AndroidManifest.xml'), - Path('source-files/com.integreight.onesheeld/oneSheeld/build.gradle'), - Path('source-files/com.integreight.onesheeld/localeapi/src/main/AndroidManifest.xml'), - Path('source-files/com.integreight.onesheeld/localeapi/build.gradle'), - Path('source-files/com.integreight.onesheeld/build.gradle'), - Path('source-files/com.integreight.onesheeld/settings.gradle'), - Path('source-files/com.integreight.onesheeld/quickReturnHeader/src/main/AndroidManifest.xml'), - Path('source-files/com.integreight.onesheeld/quickReturnHeader/build.gradle'), - Path('source-files/com.integreight.onesheeld/pullToRefreshlibrary/src/main/AndroidManifest.xml'), - Path('source-files/com.integreight.onesheeld/pullToRefreshlibrary/build.gradle'), - ] - for path in paths: - self.assertTrue(os.path.isfile(path)) - self.assertEqual(('1.9.0', 170521, 'com.integreight.onesheeld'), - fdroidserver.common.parse_androidmanifests(paths, app)) - - app = fdroidserver.metadata.App() - app.id = 'dev.patrickgold.florisboard' - paths = [ - Path('source-files/dev.patrickgold.florisboard/app/build.gradle.kts'), - ] - for path in paths: - self.assertTrue(os.path.isfile(path)) - self.assertEqual(('0.3.10', 29, 'dev.patrickgold.florisboard'), - fdroidserver.common.parse_androidmanifests(paths, app)) - - app = fdroidserver.metadata.App() - app.id = 'com.ubergeek42.WeechatAndroid' - paths = [ - Path('source-files/com.ubergeek42.WeechatAndroid/app/build.gradle.kts'), - Path('source-files/com.ubergeek42.WeechatAndroid/app/src/main/res/values/strings.xml'), - ] - for path in paths: - self.assertTrue(os.path.isfile(path)) - self.assertEqual(('1.8.1', 10801, None), - fdroidserver.common.parse_androidmanifests(paths, app)) - - def test_parse_androidmanifests_ignore(self): - app = fdroidserver.metadata.App() - app.id = 'org.fdroid.fdroid' - app.UpdateCheckIgnore = '-test' - paths = [ - Path('source-files/fdroid/fdroidclient/AndroidManifest.xml'), - Path('source-files/fdroid/fdroidclient/build.gradle'), - ] - for path in paths: - self.assertTrue(os.path.isfile(path)) - self.assertEqual(('Ignore', None, 'org.fdroid.fdroid'), - fdroidserver.common.parse_androidmanifests(paths, app)) - - def test_parse_androidmanifests_with_flavor(self): - app = fdroidserver.metadata.App() - build = fdroidserver.metadata.Build() - build.gradle = ['devVersion'] - app['Builds'] = [build] - app.id = 'org.fdroid.fdroid.dev' - paths = [ - Path('source-files/fdroid/fdroidclient/AndroidManifest.xml'), - Path('source-files/fdroid/fdroidclient/build.gradle'), - ] - for path in paths: - self.assertTrue(os.path.isfile(path)) - self.assertEqual(('0.95-dev', 949, 'org.fdroid.fdroid.dev'), - fdroidserver.common.parse_androidmanifests(paths, app)) - - app = fdroidserver.metadata.App() - build = fdroidserver.metadata.Build() - build.gradle = ['free'] - app['Builds'] = [build] - app.id = 'eu.siacs.conversations' - paths = [ - Path('source-files/eu.siacs.conversations/build.gradle'), - ] - for path in paths: - self.assertTrue(os.path.isfile(path)) - self.assertEqual(('1.23.1', 245, 'eu.siacs.conversations'), - fdroidserver.common.parse_androidmanifests(paths, app)) - - app = fdroidserver.metadata.App() - build = fdroidserver.metadata.Build() - build.gradle = ['generic'] - app['Builds'] = [build] - app.id = 'com.nextcloud.client' - paths = [ - Path('source-files/com.nextcloud.client/build.gradle'), - ] - for path in paths: - self.assertTrue(os.path.isfile(path)) - self.assertEqual(('2.0.0', 20000099, 'com.nextcloud.client'), - fdroidserver.common.parse_androidmanifests(paths, app)) - - app = fdroidserver.metadata.App() - build = fdroidserver.metadata.Build() - build.gradle = ['versionDev'] - app['Builds'] = [build] - app.id = 'com.nextcloud.android.beta' - paths = [ - Path('source-files/com.nextcloud.client/build.gradle'), - ] - for path in paths: - self.assertTrue(os.path.isfile(path)) - self.assertEqual(('20171223', 20171223, 'com.nextcloud.android.beta'), - fdroidserver.common.parse_androidmanifests(paths, app)) - - app = fdroidserver.metadata.App() - build = fdroidserver.metadata.Build() - build.gradle = ['standard'] - app['Builds'] = [build] - app.id = 'at.bitfire.davdroid' - paths = [ - Path('source-files/at.bitfire.davdroid/build.gradle'), - ] - for path in paths: - self.assertTrue(os.path.isfile(path)) - self.assertEqual(('1.9.8.1-ose', 197, 'at.bitfire.davdroid'), - fdroidserver.common.parse_androidmanifests(paths, app)) - - app = fdroidserver.metadata.App() - build = fdroidserver.metadata.Build() - build.gradle = ['libre'] - app['Builds'] = [build] - app.id = 'com.kunzisoft.fdroidtest.applicationidsuffix.libre' - paths = [ - Path('source-files/com.kunzisoft.testcase/build.gradle'), - ] - for path in paths: - self.assertTrue(os.path.isfile(path)) - self.assertEqual(('1.0-libre', 1, 'com.kunzisoft.fdroidtest.applicationidsuffix.libre'), - fdroidserver.common.parse_androidmanifests(paths, app)) - - app = fdroidserver.metadata.App() - build = fdroidserver.metadata.Build() - build.gradle = ['pro'] - app['Builds'] = [build] - app.id = 'com.kunzisoft.fdroidtest.applicationidsuffix.pro' - paths = [ - Path('source-files/com.kunzisoft.testcase/build.gradle'), - ] - for path in paths: - self.assertTrue(os.path.isfile(path)) - self.assertEqual(('20180430-pro', 20180430, 'com.kunzisoft.fdroidtest.applicationidsuffix.pro'), - fdroidserver.common.parse_androidmanifests(paths, app)) - - app = fdroidserver.metadata.App() - build = fdroidserver.metadata.Build() - build.gradle = ['free'] - app['Builds'] = [build] - app.id = 'com.kunzisoft.fdroidtest.applicationidsuffix' - paths = [ - Path('source-files/com.kunzisoft.testcase/build.gradle'), - ] - for path in paths: - self.assertTrue(os.path.isfile(path)) - self.assertEqual(('1.0-free', 1, 'com.kunzisoft.fdroidtest.applicationidsuffix'), - fdroidserver.common.parse_androidmanifests(paths, app)) - - app = fdroidserver.metadata.App() - build = fdroidserver.metadata.Build() - build.gradle = ['underscore'] - app['Builds'] = [build] - app.id = 'com.kunzisoft.fdroidtest.applicationidsuffix.underscore' - paths = [ - Path('source-files/com.kunzisoft.testcase/build.gradle'), - ] - for path in paths: - self.assertTrue(os.path.isfile(path)) - self.assertEqual(('20180430-underscore', 20180430, 'com.kunzisoft.fdroidtest.applicationidsuffix.underscore'), - fdroidserver.common.parse_androidmanifests(paths, app)) - - app = fdroidserver.metadata.App() - build = fdroidserver.metadata.Build() - build.gradle = ['underscore_first'] - app['Builds'] = [build] - app.id = 'com.kunzisoft.fdroidtest.applicationidsuffix.underscore_first' - paths = [ - Path('source-files/com.kunzisoft.testcase/build.gradle'), - ] - for path in paths: - self.assertTrue(os.path.isfile(path)) - self.assertEqual(('1.0', 1, 'com.kunzisoft.fdroidtest.applicationidsuffix.underscore_first'), - fdroidserver.common.parse_androidmanifests(paths, app)) - - app = fdroidserver.metadata.App() - build = fdroidserver.metadata.Build() - build.gradle = ['fdroid'] - app['Builds'] = [build] - app.id = 'com.github.jameshnsears.quoteunquote' - paths = [ - Path('source-files/com.github.jameshnsears.quoteunquote/build.gradle'), - ] - for path in paths: - self.assertTrue(os.path.isfile(path)) - self.assertEqual(('2.5.2-fdroid', 73, 'com.github.jameshnsears.quoteunquote'), - fdroidserver.common.parse_androidmanifests(paths, app)) - - app = fdroidserver.metadata.App() - build = fdroidserver.metadata.Build() - build.gradle = ['fdroidFlavor'] - app['Builds'] = [build] - app.id = 'com.jens.automation2' - paths = [ - Path('source-files/com.jens.automation2/build.gradle'), - ] - for path in paths: - self.assertTrue(os.path.isfile(path)) - self.assertEqual(('1.6.34-fdroid', 105, 'com.jens.automation2'), - fdroidserver.common.parse_androidmanifests(paths, app)) - - app = fdroidserver.metadata.App() - build = fdroidserver.metadata.Build() - build.gradle = ['VAR', 'prod'] - app['Builds'] = [build] - app.id = 'de.varengold.activeTAN' - paths = [ - Path('source-files/de.varengold.activeTAN/build.gradle'), - ] - for path in paths: - self.assertTrue(os.path.isfile(path)) - self.assertEqual(('2021-06-30', 34, 'de.varengold.activeTAN'), - fdroidserver.common.parse_androidmanifests(paths, app)) - - def test_parse_srclib_spec_good(self): - self.assertEqual(fdroidserver.common.parse_srclib_spec('osmand-external-skia@android/oreo'), - ('osmand-external-skia', 'android/oreo', None, None)) - self.assertEqual(fdroidserver.common.parse_srclib_spec('1:appcompat@v7'), - ('appcompat', 'v7', '1', None)) - self.assertEqual(fdroidserver.common.parse_srclib_spec('1:Support/v7/appcompat@android-4.4_r1.1'), - ('Support', 'android-4.4_r1.1', '1', 'v7/appcompat')) - - def test_parse_srclib_spec_many_ats(self): - self.assertEqual( - fdroidserver.common.parse_srclib_spec('foo@@v2'), ('foo', '@v2', None, None) - ) - self.assertEqual( - fdroidserver.common.parse_srclib_spec('bar@2@f'), ('bar', '2@f', None, None) - ) - - def test_parse_srclib_spec_none(self): - with self.assertRaises(MetaDataException): - fdroidserver.common.parse_srclib_spec(None) - - def test_parse_srclib_spec_no_ref(self): - with self.assertRaises(MetaDataException): - fdroidserver.common.parse_srclib_spec('no-ref') - with self.assertRaises(MetaDataException): - fdroidserver.common.parse_srclib_spec('noref@') - - def test_parse_srclib_spec_no_name(self): - with self.assertRaises(MetaDataException): - fdroidserver.common.parse_srclib_spec('@ref') - - def test_remove_signing_keys(self): - shutil.copytree( - os.path.join(basedir, 'source-files'), - os.path.join(self.testdir, 'source-files'), - ) - os.chdir(self.testdir) - with_signingConfigs = [ - 'source-files/com.seafile.seadroid2/app/build.gradle', - 'source-files/eu.siacs.conversations/build.gradle', - 'source-files/info.guardianproject.ripple/build.gradle', - 'source-files/open-keychain/open-keychain/build.gradle', - 'source-files/open-keychain/open-keychain/OpenKeychain/build.gradle', - 'source-files/org.tasks/app/build.gradle.kts', - 'source-files/osmandapp/osmand/build.gradle', - 'source-files/ut.ewh.audiometrytest/app/build.gradle', - ] - for f in with_signingConfigs: - build_dir = os.path.join(*f.split(os.sep)[:2]) - if not os.path.isdir(build_dir): - continue - fdroidserver.common.remove_signing_keys(build_dir) - fromfile = os.path.join(basedir, f) - with open(f) as fp: - content = fp.read() - if 'signingConfig' in content: - with open(f) as fp: - b = fp.readlines() - with open(fromfile) as fp: - a = fp.readlines() - diff = difflib.unified_diff(a, b, fromfile, f) - sys.stdout.writelines(diff) - self.assertFalse(True) - do_not_modify = [ - 'source-files/Zillode/syncthing-silk/build.gradle', - 'source-files/at.bitfire.davdroid/build.gradle', - 'source-files/com.kunzisoft.testcase/build.gradle', - 'source-files/com.nextcloud.client/build.gradle', - 'source-files/fdroid/fdroidclient/build.gradle', - 'source-files/firebase-suspect/app/build.gradle', - 'source-files/firebase-suspect/build.gradle', - 'source-files/firebase-allowlisted/app/build.gradle', - 'source-files/firebase-allowlisted/build.gradle', - 'source-files/org.mozilla.rocket/app/build.gradle', - 'source-files/realm/react-native/android/build.gradle', - 'triple-t-2/build/org.piwigo.android/app/build.gradle', - ] - for f in do_not_modify: - build_dir = os.path.join(*f.split(os.sep)[:2]) - if not os.path.isdir(build_dir): - continue - fdroidserver.common.remove_signing_keys(build_dir) - fromfile = os.path.join(basedir, f) - with open(fromfile) as fp: - a = fp.readlines() - with open(f) as fp: - b = fp.readlines() - diff = list(difflib.unified_diff(a, b, fromfile, f)) - self.assertEqual(0, len(diff), 'This file should not have been modified:\n' + ''.join(diff)) - - def test_calculate_math_string(self): - self.assertEqual(1234, - fdroidserver.common.calculate_math_string('1234')) - self.assertEqual((1 + 1) * 2, - fdroidserver.common.calculate_math_string('(1 + 1) * 2')) - self.assertEqual((1 - 1) * 2 + 3 * 1 - 1, - fdroidserver.common.calculate_math_string('(1 - 1) * 2 + 3 * 1 - 1')) - self.assertEqual(0 - 12345, - fdroidserver.common.calculate_math_string('0 - 12345')) - self.assertEqual(0xffff, - fdroidserver.common.calculate_math_string('0xffff')) - self.assertEqual(0xcafe * 123, - fdroidserver.common.calculate_math_string('0xcafe * 123')) - self.assertEqual(-1, - fdroidserver.common.calculate_math_string('-1')) - with self.assertRaises(SyntaxError): - fdroidserver.common.calculate_math_string('__import__("urllib")') - with self.assertRaises(SyntaxError): - fdroidserver.common.calculate_math_string('self') - with self.assertRaises(SyntaxError): - fdroidserver.common.calculate_math_string('Ox9()') - with self.assertRaises(SyntaxError): - fdroidserver.common.calculate_math_string('1+1; print(1)') - with self.assertRaises(SyntaxError): - fdroidserver.common.calculate_math_string('1-1 # no comment') - - def test_calculate_IPFS_cid_with_no_tool(self): - fdroidserver.common.config = {'ipfs_cid': None} - self.assertIsNone(fdroidserver.common.calculate_IPFS_cid('urzip.apk')) - self.assertIsNone(fdroidserver.common.calculate_IPFS_cid('FileDoesNotExist')) - - @unittest.skipUnless(shutil.which('ipfs_cid'), 'calculate_IPFS_cid needs ipfs_cid') - def test_calculate_IPFS_cid(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - self.assertIsNone(fdroidserver.common.calculate_IPFS_cid('FileDoesNotExist')) - self.assertEqual( - fdroidserver.common.calculate_IPFS_cid('urzip.apk'), - "bafybeigmtgrwyvj77jaflje2rf533haeqtpu2wtwsctryjusjnsawacsam", - ) - - def test_deploy_build_log_with_rsync_with_id_file(self): - - mocklogcontent = bytes( - textwrap.dedent( - """\ - build started - building... - build completed - profit!""" - ), - 'utf-8', - ) - - fdroidserver.common.options = mock.Mock() - fdroidserver.common.options.verbose = False - fdroidserver.common.options.quiet = False - fdroidserver.common.config = {} - fdroidserver.common.config['serverwebroot'] = [ - {'url': 'example.com:/var/www/fdroid/'}, - {'url': 'example.com:/var/www/fbot/'}, - ] - fdroidserver.common.config['deploy_process_logs'] = True - fdroidserver.common.config['identity_file'] = 'ssh/id_rsa' - - assert_subprocess_call_iteration = 0 - - def assert_subprocess_call(cmd): - nonlocal assert_subprocess_call_iteration - logging.debug(cmd) - if assert_subprocess_call_iteration == 0: - self.assertListEqual(['rsync', - '--archive', - '--delete-after', - '--safe-links', - '-e', - 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ssh/id_rsa', - cmd[6], - 'example.com:/var/www/fdroid/repo/'], - cmd) - self.assertTrue(cmd[6].endswith('/com.example.app_4711.log.gz')) - with gzip.open(cmd[6], 'r') as f: - self.assertTrue(f.read(), mocklogcontent) - elif assert_subprocess_call_iteration == 1: - self.assertListEqual(['rsync', - '--archive', - '--delete-after', - '--safe-links', - '-e', - 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ssh/id_rsa', - cmd[6], - 'example.com:/var/www/fbot/repo/'], - cmd) - self.assertTrue(cmd[6].endswith('/com.example.app_4711.log.gz')) - with gzip.open(cmd[6], 'r') as f: - self.assertTrue(f.read(), mocklogcontent) - else: - self.fail('unexpected subprocess.call invocation ({})' - .format(assert_subprocess_call_iteration)) - assert_subprocess_call_iteration += 1 - return 0 - - with mock.patch('subprocess.call', - side_effect=assert_subprocess_call): - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - fdroidserver.common.deploy_build_log_with_rsync( - 'com.example.app', 4711, mocklogcontent) - - expected_log_path = os.path.join(tmpdir, 'repo', 'com.example.app_4711.log.gz') - self.assertTrue(os.path.isfile(expected_log_path)) - with gzip.open(expected_log_path, 'r') as f: - self.assertEqual(f.read(), mocklogcontent) - - def test_deploy_status_json(self): - os.chdir(self.testdir) - fakesubcommand = 'fakesubcommand' - fake_timestamp = 1234567890 - fakeserver = 'example.com:/var/www/fbot/' - expected_dir = os.path.join(self.testdir, fakeserver.replace(':', ''), 'repo', 'status') - - fdroidserver.common.options = mock.Mock() - fdroidserver.common.config = {} - fdroidserver.common.config['serverwebroot'] = [{'url': fakeserver}] - fdroidserver.common.config['identity_file'] = 'ssh/id_rsa' - - def assert_subprocess_call(cmd): - dest_path = os.path.join(self.testdir, cmd[-1].replace(':', '')) - if not os.path.exists(dest_path): - os.makedirs(dest_path) - return subprocess.run(cmd[:-1] + [dest_path]).returncode - - with mock.patch('subprocess.call', side_effect=assert_subprocess_call): - with mock.patch.object(sys, 'argv', ['fdroid ' + fakesubcommand]): - output = fdroidserver.common.setup_status_output(time.localtime(fake_timestamp)) - self.assertFalse(os.path.exists(os.path.join(expected_dir, 'running.json'))) - with mock.patch.object(sys, 'argv', ['fdroid ' + fakesubcommand]): - fdroidserver.common.write_status_json(output) - self.assertFalse(os.path.exists(os.path.join(expected_dir, fakesubcommand + '.json'))) - - fdroidserver.common.config['deploy_process_logs'] = True - - output = fdroidserver.common.setup_status_output(time.localtime(fake_timestamp)) - expected_path = os.path.join(expected_dir, 'running.json') - self.assertTrue(os.path.isfile(expected_path)) - with open(expected_path) as fp: - data = json.load(fp) - self.assertEqual(fake_timestamp * 1000, data['startTimestamp']) - self.assertFalse('endTimestamp' in data) - - testvalue = 'asdfasd' - output['testvalue'] = testvalue - - fdroidserver.common.write_status_json(output) - expected_path = os.path.join(expected_dir, fakesubcommand + '.json') - self.assertTrue(os.path.isfile(expected_path)) - with open(expected_path) as fp: - data = json.load(fp) - self.assertEqual(fake_timestamp * 1000, data['startTimestamp']) - self.assertTrue('endTimestamp' in data) - self.assertEqual(testvalue, output.get('testvalue')) - - def test_string_is_integer(self): - self.assertTrue(fdroidserver.common.string_is_integer('0x10')) - self.assertTrue(fdroidserver.common.string_is_integer('010')) - self.assertTrue(fdroidserver.common.string_is_integer('123')) - self.assertFalse(fdroidserver.common.string_is_integer('0xgg')) - self.assertFalse(fdroidserver.common.string_is_integer('01g')) - self.assertFalse(fdroidserver.common.string_is_integer('o123')) - - def test_version_code_string_to_int(self): - self.assertEqual(16, fdroidserver.common.version_code_string_to_int('0x10')) - self.assertEqual(198712389, fdroidserver.common.version_code_string_to_int('198712389')) - self.assertEqual(8, fdroidserver.common.version_code_string_to_int('0o10')) - self.assertEqual(10, fdroidserver.common.version_code_string_to_int('010')) - self.assertEqual(123, fdroidserver.common.version_code_string_to_int('0000123')) - self.assertEqual(-42, fdroidserver.common.version_code_string_to_int('-42')) - - def test_getsrclibvcs(self): - fdroidserver.metadata.srclibs = {'somelib': {'RepoType': 'git'}, - 'yeslib': {'RepoType': 'hg'}, - 'nolib': {'RepoType': 'git-svn'}} - self.assertEqual(fdroidserver.common.getsrclibvcs('somelib'), 'git') - self.assertEqual(fdroidserver.common.getsrclibvcs('yeslib'), 'hg') - self.assertEqual(fdroidserver.common.getsrclibvcs('nolib'), 'git-svn') - with self.assertRaises(VCSException): - fdroidserver.common.getsrclibvcs('nonexistentlib') - - def test_getsrclib_not_found(self): - fdroidserver.common.config = {'sdk_path': '', - 'java_paths': {}} - fdroidserver.metadata.srclibs = {} - - with self.assertRaisesRegex(VCSException, 'srclib SDL not found.'): - fdroidserver.common.getsrclib('SDL@release-2.0.3', 'srclib') - - def test_getsrclib_gotorevision_raw(self): - fdroidserver.common.config = {'sdk_path': '', - 'java_paths': {}} - fdroidserver.metadata.srclibs = {'SDL': {'RepoType': 'git', - 'Repo': ''}} - - vcs = mock.Mock() - - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - os.makedirs(os.path.join('srclib', 'SDL')) - with mock.patch('fdroidserver.common.getvcs', return_value=vcs): - ret = fdroidserver.common.getsrclib('SDL', 'srclib', raw=True) - self.assertEqual(vcs.srclib, ('SDL', None, 'srclib/SDL')) - self.assertEqual(ret, vcs) - - def test_getsrclib_gotorevision_ref(self): - fdroidserver.common.config = {'sdk_path': '', - 'java_paths': {}} - fdroidserver.metadata.srclibs = {'ACRA': {'RepoType': 'git', - 'Repo': 'https://github.com/ACRA/acra.git', - 'Subdir': None, - 'Prepare': None}} - - vcs = mock.Mock() - skm = mock.Mock() - dfm = mock.Mock() - - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - os.makedirs(os.path.join('srclib', 'ACRA')) - with mock.patch('fdroidserver.common.getvcs', return_value=vcs): - with mock.patch('fdroidserver.common.remove_signing_keys', skm): - with mock.patch('fdroidserver.common.remove_debuggable_flags', dfm): - ret = fdroidserver.common.getsrclib('ACRA@acra-4.6.2', 'srclib') - self.assertEqual(vcs.srclib, ('ACRA', None, 'srclib/ACRA')) - vcs.gotorevision.assert_called_once_with('acra-4.6.2', True) - skm.assert_called_once_with('srclib/ACRA') - dfm.assert_called_once_with('srclib/ACRA') - self.assertEqual(ret, ('ACRA', None, 'srclib/ACRA')) - - def test_run_yamllint_wellformed(self): - try: - import yamllint.config - - yamllint.config # make pyflakes ignore this - except ImportError: - self.skipTest('yamllint not installed') - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - with open('wellformed.yml', 'w') as f: - f.write( - textwrap.dedent( - '''\ - yaml: - file: - - for - - test - purposeses: true - ''' - ) - ) - result = fdroidserver.common.run_yamllint('wellformed.yml') - self.assertEqual(result, '') - - def test_run_yamllint_malformed(self): - try: - import yamllint.config - - yamllint.config # make pyflakes ignore this - except ImportError: - self.skipTest('yamllint not installed') - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - with open('malformed.yml', 'w') as f: - f.write( - textwrap.dedent( - '''\ - yaml: - - that - fails - - test - ''' - ) - ) - result = fdroidserver.common.run_yamllint('malformed.yml') - self.assertIsNotNone(result) - self.assertNotEqual(result, '') - - def test_with_no_config(self): - """It should set defaults if no config file is found""" - os.chdir(self.testdir) - self.assertFalse(os.path.exists(fdroidserver.common.CONFIG_FILE)) - config = fdroidserver.common.read_config() - self.assertIsNotNone(config.get('char_limits')) - - def test_with_zero_size_config(self): - """It should set defaults if config file has nothing in it""" - os.chdir(self.testdir) - fdroidserver.common.write_config_file('') - self.assertTrue(os.path.exists(fdroidserver.common.CONFIG_FILE)) - config = fdroidserver.common.read_config() - self.assertIsNotNone(config.get('char_limits')) - - def test_with_config_yml(self): - """Make sure it is possible to use config.yml alone.""" - os.chdir(self.testdir) - fdroidserver.common.write_config_file('apksigner: yml') - self.assertTrue(os.path.exists(fdroidserver.common.CONFIG_FILE)) - config = fdroidserver.common.read_config() - self.assertEqual('yml', config.get('apksigner')) - - def test_with_config_yml_utf8(self): - """Make sure it is possible to use config.yml in UTF-8 encoding.""" - os.chdir(self.testdir) - teststr = '/πÇÇ现代通用字-български-عربي1/ö/yml' - fdroidserver.common.write_config_file('apksigner: ' + teststr) - self.assertTrue(os.path.exists(fdroidserver.common.CONFIG_FILE)) - config = fdroidserver.common.read_config() - self.assertEqual(teststr, config.get('apksigner')) - - def test_with_config_yml_utf8_as_ascii(self): - """Make sure it is possible to use config.yml Unicode encoded as ASCII.""" - os.chdir(self.testdir) - teststr = '/πÇÇ现代通用字-български-عربي1/ö/yml' - with open(fdroidserver.common.CONFIG_FILE, 'w', encoding='utf-8') as fp: - config_dump({'apksigner': teststr}, fp) - self.assertTrue(os.path.exists(fdroidserver.common.CONFIG_FILE)) - config = fdroidserver.common.read_config() - self.assertEqual(teststr, config.get('apksigner')) - - def test_with_config_yml_with_env_var(self): - """Make sure it is possible to use config.yml alone.""" - os.chdir(self.testdir) - with mock.patch.dict(os.environ): - os.environ['SECRET'] = 'mysecretpassword' # nosec B105 - fdroidserver.common.write_config_file("""keypass: {'env': 'SECRET'}\n""") - self.assertTrue(os.path.exists(fdroidserver.common.CONFIG_FILE)) - config = fdroidserver.common.read_config() - self.assertEqual(os.getenv('SECRET', 'fail'), config.get('keypass')) - - def test_with_config_yml_is_dict(self): - os.chdir(self.testdir) - Path(fdroidserver.common.CONFIG_FILE).write_text('apksigner = /bin/apksigner') - with self.assertRaises(TypeError): - fdroidserver.common.read_config() - - def test_with_config_yml_is_not_mixed_type(self): - os.chdir(self.testdir) - Path(fdroidserver.common.CONFIG_FILE).write_text('k: v\napksigner = /bin/apk') - with self.assertRaises(ruamel.yaml.scanner.ScannerError): - fdroidserver.common.read_config() - - def test_config_repo_url(self): - """repo_url ends in /repo, archive_url ends in /archive.""" - os.chdir(self.testdir) - fdroidserver.common.write_config_file( - textwrap.dedent( - """\ - repo_url: https://MyFirstFDroidRepo.org/fdroid/repo - archive_url: https://MyFirstFDroidRepo.org/fdroid/archive - """ - ) - ) - config = fdroidserver.common.read_config() - self.assertEqual( - 'https://MyFirstFDroidRepo.org/fdroid/repo', config.get('repo_url') - ) - self.assertEqual( - 'https://MyFirstFDroidRepo.org/fdroid/archive', config.get('archive_url') - ) - - def test_config_repo_url_extra_slash(self): - """repo_url ends in /repo, archive_url ends in /archive.""" - os.chdir(self.testdir) - fdroidserver.common.write_config_file('repo_url: https://MyFirstFDroidRepo.org/fdroid/repo/') - with self.assertRaises(FDroidException): - fdroidserver.common.read_config() - - def test_config_repo_url_not_repo(self): - """repo_url ends in /repo, archive_url ends in /archive.""" - os.chdir(self.testdir) - fdroidserver.common.write_config_file('repo_url: https://MyFirstFDroidRepo.org/fdroid/foo') - with self.assertRaises(FDroidException): - fdroidserver.common.read_config() - - def test_config_archive_url_extra_slash(self): - """repo_url ends in /repo, archive_url ends in /archive.""" - os.chdir(self.testdir) - fdroidserver.common.write_config_file('archive_url: https://MyFirstFDroidRepo.org/fdroid/archive/') - with self.assertRaises(FDroidException): - fdroidserver.common.read_config() - - def test_config_archive_url_not_repo(self): - """repo_url ends in /repo, archive_url ends in /archive.""" - os.chdir(self.testdir) - fdroidserver.common.write_config_file('archive_url: https://MyFirstFDroidRepo.org/fdroid/foo') - with self.assertRaises(FDroidException): - fdroidserver.common.read_config() - - def test_write_to_config_yml(self): - os.chdir(self.testdir) - fdroidserver.common.write_config_file('apksigner: yml') - os.chmod(fdroidserver.common.CONFIG_FILE, 0o0600) - self.assertTrue(os.path.exists(fdroidserver.common.CONFIG_FILE)) - config = fdroidserver.common.read_config() - self.assertFalse('keypass' in config) - self.assertEqual('yml', config.get('apksigner')) - fdroidserver.common.write_to_config(config, 'keypass', 'mysecretpassword') - fdroidserver.common.config = None - config = fdroidserver.common.read_config() - self.assertEqual('mysecretpassword', config['keypass']) - - def test_config_dict_with_int_keys(self): - os.chdir(self.testdir) - fdroidserver.common.write_config_file( - textwrap.dedent( - """ - java_paths: - 8: /usr/lib/jvm/java-8-openjdk - """ - ) - ) - self.assertTrue(os.path.exists(fdroidserver.common.CONFIG_FILE)) - config = fdroidserver.common.read_config() - self.assertEqual('/usr/lib/jvm/java-8-openjdk', config['java_paths']['8']) - - @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) - def test_config_lazy_load_env_vars(self): - """Test the environment variables in config.yml is lazy loaded. - - It shouldn't throw errors when read the config if the environment variables are - not set. It should throw errors when the variables are get from the config. - """ - os.chdir(self.testdir) - fdroidserver.common.write_config_file( - textwrap.dedent( - """ - serverwebroot: {env: serverwebroot} - servergitmirrors: - - url: {env: mirror1} - - url: {env: mirror2} - keypass: {env: keypass} - keystorepass: {env: keystorepass} - """ - ) - ) - with self.assertNoLogs(level=logging.ERROR): - config = fdroidserver.common.read_config() - - # KeyError should be raised if a key is not in the config.yml - with self.assertRaises(KeyError): - config['gpghome'] - - self.assertEqual(config.get('gpghome', 'gpg'), 'gpg') - os.environ.update({key: f"{key}supersecret" for key in ["serverwebroot", "mirror1", "mirror2", "keystorepass"]}) - self.assertEqual(config['keystorepass'], 'keystorepasssupersecret') - self.assertEqual(config['serverwebroot'], [{'url': 'serverwebrootsupersecret/'}]) - self.assertEqual(config['servergitmirrors'], [{'url': 'mirror1supersecret'}, {'url': 'mirror2supersecret'}]) - - @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) - def test_config_lazy_load_env_vars_not_set(self): - os.chdir(self.testdir) - fdroidserver.common.write_config_file('keypass: {env: keypass}') - fdroidserver.common.read_config() - with self.assertLogs(level=logging.ERROR) as lw: - fdroidserver.common.config['keypass'] - self.assertTrue('is not set' in lw.output[0]) - self.assertEqual(1, len(lw.output)) - - @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) - def test_test_sdk_exists_fails_on_bad_sdk_path(self): - config = {'sdk_path': 'nothinghere'} - self.assertFalse(fdroidserver.common.test_sdk_exists(config)) - - @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) - def test_test_sdk_exists_fails_on_empty(self): - self.assertFalse(fdroidserver.common.test_sdk_exists(dict())) - - @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) - def test_test_sdk_exists_fails_on_non_existent(self): - config = {'sdk_path': os.path.join(self.testdir, 'non_existent')} - self.assertFalse(fdroidserver.common.test_sdk_exists(config)) - - @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) - def test_test_sdk_exists_fails_on_file(self): - f = os.path.join(self.testdir, 'testfile') - open(f, 'w').close() - config = {'sdk_path': f} - self.assertFalse(fdroidserver.common.test_sdk_exists(config)) - - @mock.patch.dict(os.environ, {'PATH': '/nonexistent'}, clear=True) - def test_test_sdk_exists_valid_apksigner_in_config(self): - apksigner = os.path.join( - self.testdir, - 'build-tools', - fdroidserver.common.MINIMUM_APKSIGNER_BUILD_TOOLS_VERSION, - 'apksigner', - ) - os.makedirs(os.path.dirname(apksigner)) - with open(apksigner, 'w') as fp: - fp.write('#!/bin/sh\ndate\n') - os.chmod(apksigner, 0o0755) # nosec B103 - config = {'apksigner': apksigner} - self.assertTrue(fdroidserver.common.test_sdk_exists(config)) - - @mock.patch.dict(os.environ, {'PATH': '/nonexistent'}, clear=True) - def test_test_sdk_exists_old_apksigner_in_config(self): - apksigner = os.path.join(self.testdir, 'build-tools', '28.0.0', 'apksigner') - os.makedirs(os.path.dirname(apksigner)) - with open(apksigner, 'w') as fp: - fp.write('#!/bin/sh\ndate\n') - os.chmod(apksigner, 0o0755) # nosec B103 - config = {'apksigner': apksigner} - self.assertFalse(fdroidserver.common.test_sdk_exists(config)) - - @mock.patch.dict(os.environ, {'PATH': '/nonexistent'}, clear=True) - def test_test_sdk_exists_with_valid_apksigner(self): - apksigner = ( - Path(self.testdir) - / 'build-tools' - / fdroidserver.common.MINIMUM_APKSIGNER_BUILD_TOOLS_VERSION - / 'apksigner' - ) - apksigner.parent.mkdir(parents=True) - apksigner.write_text('#!/bin/sh\ndate\n') - apksigner.chmod(0o0755) - config = {'sdk_path': self.testdir} - self.assertTrue(fdroidserver.common.test_sdk_exists(config)) - - @mock.patch.dict(os.environ, {'PATH': '/nonexistent'}, clear=True) - def test_test_sdk_exists_with_old_apksigner(self): - apksigner = Path(self.testdir) / 'build-tools' / '17.0.0' / 'apksigner' - apksigner.parent.mkdir(parents=True) - apksigner.write_text('#!/bin/sh\ndate\n') - apksigner.chmod(0o0755) - config = {'sdk_path': self.testdir} - self.assertFalse(fdroidserver.common.test_sdk_exists(config)) - - def test_loading_config_buildserver_yml(self): - """Smoke check to make sure this file is properly parsed""" - os.chdir(self.testdir) - shutil.copy( - os.path.join(basedir, '..', 'buildserver', 'config.buildserver.yml'), - fdroidserver.common.CONFIG_FILE, - ) - fdroidserver.common.read_config() - - @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) - def test_config_with_env_string(self): - """Test whether env works in keys with string values.""" - os.chdir(self.testdir) - testvalue = 'this is just a test' - Path('config.yml').write_text('keypass: {env: foo}') - os.environ['foo'] = testvalue - self.assertEqual(testvalue, fdroidserver.common.get_config()['keypass']) - - @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) - def test_config_with_env_path(self): - """Test whether env works in keys with path values.""" - os.chdir(self.testdir) - path = 'user@server:/path/to/bar/' - os.environ['foo'] = path - Path('config.yml').write_text('serverwebroot: {env: foo}') - self.assertEqual( - [{'url': path}], - fdroidserver.common.get_config()['serverwebroot'], - ) - - def test_setup_status_output(self): - os.chdir(self.testdir) - start_timestamp = time.gmtime() - subcommand = 'test' - - fakecmd = ['fdroid ' + subcommand, '--option'] - sys.argv = fakecmd - fdroidserver.common.config = dict() - fdroidserver.common.setup_status_output(start_timestamp) - with open(os.path.join('repo', 'status', 'running.json')) as fp: - data = json.load(fp) - self.assertFalse(os.path.exists('.git')) - self.assertFalse('fdroiddata' in data) - self.assertEqual(fakecmd, data['commandLine']) - self.assertEqual(subcommand, data['subcommand']) - - def test_setup_status_output_in_git_repo(self): - os.chdir(self.testdir) - logging.getLogger('git.cmd').setLevel(logging.INFO) - git_repo = git.Repo.init(self.testdir) - file_in_git = 'README.md' - with open(file_in_git, 'w') as fp: - fp.write('this is just a test') - git_repo.git.add(all=True) - git_repo.index.commit("update README") - - start_timestamp = time.gmtime() - fakecmd = ['fdroid test2', '--option'] - sys.argv = fakecmd - fdroidserver.common.config = dict() - fdroidserver.common.setup_status_output(start_timestamp) - with open(os.path.join('repo', 'status', 'running.json')) as fp: - data = json.load(fp) - self.assertTrue(os.path.exists('.git')) - self.assertIsNotNone(re.match(r'[0-9a-f]{40}', data['fdroiddata']['commitId']), - 'Must be a valid git SHA1 commit ID!') - self.assertFalse(data['fdroiddata']['isDirty']) - self.assertEqual(fakecmd, data['commandLine']) - - self.assertEqual([], - data['fdroiddata']['untrackedFiles']) - dirtyfile = 'dirtyfile' - with open(dirtyfile, 'w', encoding='utf-8') as fp: - fp.write('this is just a test') - with open(file_in_git, 'a', encoding='utf-8') as fp: - fp.write('\nappend some stuff') - self.assertEqual([], - data['fdroiddata']['modifiedFiles']) - fdroidserver.common.setup_status_output(start_timestamp) - with open(os.path.join('repo', 'status', 'running.json')) as fp: - data = json.load(fp) - self.assertTrue(data['fdroiddata']['isDirty']) - self.assertEqual([file_in_git], - data['fdroiddata']['modifiedFiles']) - self.assertEqual([dirtyfile, 'repo/status/running.json'], - data['fdroiddata']['untrackedFiles']) - - def test_get_app_display_name(self): - testvalue = 'WIN!' - for app in [ - {'Name': testvalue}, - {'AutoName': testvalue}, - {'id': testvalue}, - {'id': 'a', 'localized': {'de-AT': {'name': testvalue}}}, - {'id': 'a', 'localized': { - 'de-AT': {'name': 'nope'}, - 'en-US': {'name': testvalue}, - }}, - {'AutoName': 'ignore me', 'Name': testvalue, 'id': 'nope'}, - {'AutoName': testvalue, 'id': 'nope'}]: - self.assertEqual(testvalue, fdroidserver.common.get_app_display_name(app)) - - @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) - def test_get_android_tools_versions(self): - sdk_path = os.path.join(basedir, 'get_android_tools_versions/android-sdk') - config = { - 'ndk_paths': {'r10e': os.path.join(sdk_path, '..', 'android-ndk-r10e')}, - 'sdk_path': sdk_path, - } - fdroidserver.common.config = config - fdroidserver.common.fill_config_defaults(config) - components = fdroidserver.common.get_android_tools_versions() - expected = ( - ('../android-ndk-r10e', 'r10e'), - ('ndk-bundle', '21.4.7075529'), - ('ndk/11.2.2725575', '11.2.2725575'), - ('ndk/17.2.4988734', '17.2.4988734'), - ('ndk/21.3.6528147', '21.3.6528147'), - ('patcher/v4', '1'), - ('platforms/android-30', '3'), - ('skiaparser/1', '6'), - ('tools', '26.1.1'), - ) - self.assertSequenceEqual(expected, sorted(components)) - - @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) - def test_get_android_tools_versions_no_ndk(self): - with tempfile.TemporaryDirectory() as tmpdir: - sdk_path = Path(tmpdir) / 'get_android_tools_versions' - shutil.copytree( - os.path.join(basedir, 'get_android_tools_versions'), sdk_path - ) - shutil.rmtree(sdk_path / 'android-ndk-r10e') - shutil.rmtree(sdk_path / 'android-sdk/ndk') - shutil.rmtree(sdk_path / 'android-sdk/ndk-bundle') - fdroidserver.common.config = {'sdk_path': str(sdk_path)} - components = fdroidserver.common.get_android_tools_versions() - expected = ( - ('android-sdk/patcher/v4', '1'), - ('android-sdk/platforms/android-30', '3'), - ('android-sdk/skiaparser/1', '6'), - ('android-sdk/tools', '26.1.1'), - ) - self.assertSequenceEqual(expected, sorted(components)) - - def test_read_pkg_args(self): - allow_vercodes = False - self.assertEqual( - {'org.fdroid.fdroid': []}, - fdroidserver.common.read_pkg_args(['org.fdroid.fdroid'], allow_vercodes), - ) - self.assertNotEqual( - {'com.example': [123456]}, - fdroidserver.common.read_pkg_args(['com.example:123456'], allow_vercodes), - ) - - allow_vercodes = True - self.assertEqual( - {'org.fdroid.fdroid': []}, - fdroidserver.common.read_pkg_args(['org.fdroid.fdroid'], allow_vercodes), - ) - self.assertEqual( - {'com.example': [123456]}, - fdroidserver.common.read_pkg_args(['com.example:123456'], allow_vercodes), - ) - self.assertEqual( - {'org.debian_kit': [6]}, - fdroidserver.common.read_pkg_args(['org.debian_kit_6.apk'], allow_vercodes), - ) - appid_versionCode_pairs = ( - 'org.fdroid.fdroid:1', - 'com.example:12345', - 'com.example:67890', - ) - self.assertEqual( - {'com.example': [12345, 67890], 'org.fdroid.fdroid': [1]}, - fdroidserver.common.read_pkg_args(appid_versionCode_pairs, allow_vercodes), - ) - appid_versionCode_pairs = ( - 'com.example:67890', - 'org.c_base.c_beam_29.apk', - ) - self.assertEqual( - {'com.example': [67890], 'org.c_base.c_beam': [29]}, - fdroidserver.common.read_pkg_args(appid_versionCode_pairs, allow_vercodes), - ) - - def test_read_pkg_args_errors(self): - allow_vercodes = True - with self.assertRaises(FDroidException): - fdroidserver.common.read_pkg_args(['org.fdroid.fdroid:'], allow_vercodes), - with self.assertRaises(FDroidException): - fdroidserver.common.read_pkg_args(['org.fdroid.fdroid:foo'], allow_vercodes), - - def test_apk_strip_v1_signatures(self): - before = os.path.join(basedir, 'no_targetsdk_minsdk1_unsigned.apk') - after = os.path.join(self.testdir, 'after.apk') - shutil.copy(before, after) - fdroidserver.common.apk_strip_v1_signatures(after, strip_manifest=False) - - def test_metadata_find_developer_signing_files(self): - appid = 'org.smssecure.smssecure' - - self.assertIsNone( - fdroidserver.common.metadata_find_developer_signing_files(appid, 133) - ) - - vc = '135' - self.assertEqual( - ( - os.path.join('metadata', appid, 'signatures', vc, '28969C09.RSA'), - os.path.join('metadata', appid, 'signatures', vc, '28969C09.SF'), - os.path.join('metadata', appid, 'signatures', vc, 'MANIFEST.MF'), - None, - ), - fdroidserver.common.metadata_find_developer_signing_files(appid, vc), - ) - - vc = '134' - self.assertEqual( - ( - os.path.join('metadata', appid, 'signatures', vc, '28969C09.RSA'), - os.path.join('metadata', appid, 'signatures', vc, '28969C09.SF'), - os.path.join('metadata', appid, 'signatures', vc, 'MANIFEST.MF'), - None, - ), - fdroidserver.common.metadata_find_developer_signing_files(appid, vc), - ) - - @mock.patch('sdkmanager.build_package_list', lambda use_net: None) - def test_auto_install_ndk(self): - """Test all possible field data types for build.ndk""" - fdroidserver.common.config = {'sdk_path': self.testdir} - sdk_path = self.testdir - build = fdroidserver.metadata.Build() - - none_entry = mock.Mock() - with mock.patch('sdkmanager.install', none_entry): - fdroidserver.common.auto_install_ndk(build) - none_entry.assert_not_called() - - empty_list = mock.Mock() - build.ndk = [] - with mock.patch('sdkmanager.install', empty_list): - fdroidserver.common.auto_install_ndk(build) - empty_list.assert_not_called() - - release_entry = mock.Mock() - build.ndk = 'r21e' - with mock.patch('sdkmanager.install', release_entry): - fdroidserver.common.auto_install_ndk(build) - release_entry.assert_called_once_with('ndk;r21e', sdk_path) - - revision_entry = mock.Mock() - build.ndk = '21.4.7075529' - with mock.patch('sdkmanager.install', revision_entry): - fdroidserver.common.auto_install_ndk(build) - revision_entry.assert_called_once_with('ndk;21.4.7075529', sdk_path) - - list_entry = mock.Mock() - calls = [] - build.ndk = ['r10e', '11.0.2655954', 'r12b', 'r21e'] - for n in build.ndk: - calls.append(mock.call(f'ndk;{n}', sdk_path)) - with mock.patch('sdkmanager.install', list_entry): - fdroidserver.common.auto_install_ndk(build) - list_entry.assert_has_calls(calls) - - @unittest.skipIf(importlib.util.find_spec('sdkmanager') is None, 'needs sdkmanager') - @mock.patch('sdkmanager.build_package_list', lambda use_net: None) - @mock.patch('sdkmanager._install_zipball_from_cache', lambda a, b: None) - @mock.patch('sdkmanager._generate_package_xml', lambda a, b, c: None) - def test_auto_install_ndk_mock_dl(self): - """Test NDK installs by actually calling sdkmanager""" - import importlib.metadata - - import sdkmanager - - sdkmanager_version = LooseVersion(importlib.metadata.version('sdkmanager')) - if sdkmanager_version < LooseVersion('0.6.4'): - raise unittest.SkipTest('needs fdroid sdkmanager >= 0.6.4') - - fdroidserver.common.config = {'sdk_path': 'placeholder'} - build = fdroidserver.metadata.Build() - url = 'https://dl.google.com/android/repository/android-ndk-r24-linux.zip' - path = sdkmanager.get_cachedir() / os.path.basename(url) - sdkmanager.packages = { - ('ndk', '24.0.8215888'): url, - ('ndk', 'r24'): url, - } - build.ndk = 'r24' - firstrun = mock.Mock() - with mock.patch('sdkmanager.download_file', firstrun): - fdroidserver.common.auto_install_ndk(build) - firstrun.assert_called_once_with(url, path) - build.ndk = '24.0.8215888' - secondrun = mock.Mock() - with mock.patch('sdkmanager.download_file', secondrun): - fdroidserver.common.auto_install_ndk(build) - secondrun.assert_called_once_with(url, path) - - @unittest.skip("This test downloads and unzips a 1GB file.") - def test_install_ndk(self): - """NDK r10e is a special case since its missing source.properties""" - config = {'sdk_path': self.testdir} - fdroidserver.common.config = config - fdroidserver.common._install_ndk('r10e') - r10e = os.path.join(self.testdir, 'ndk', 'r10e') - self.assertEqual('r10e', fdroidserver.common.get_ndk_version(r10e)) - fdroidserver.common.fill_config_defaults(config) - self.assertEqual({'r10e': r10e}, config['ndk_paths']) - - def test_fill_config_defaults(self): - """Test the auto-detection of NDKs installed in standard paths""" - ndk_bundle = os.path.join(self.testdir, 'ndk-bundle') - os.makedirs(ndk_bundle) - with open(os.path.join(ndk_bundle, 'source.properties'), 'w') as fp: - fp.write('Pkg.Desc = Android NDK\nPkg.Revision = 17.2.4988734\n') - config = {'sdk_path': self.testdir} - fdroidserver.common.fill_config_defaults(config) - self.assertEqual({'17.2.4988734': ndk_bundle}, config['ndk_paths']) - - r21e = os.path.join(self.testdir, 'ndk', '21.4.7075529') - os.makedirs(r21e) - with open(os.path.join(r21e, 'source.properties'), 'w') as fp: - fp.write('Pkg.Desc = Android NDK\nPkg.Revision = 21.4.7075529\n') - config = {'sdk_path': self.testdir} - fdroidserver.common.fill_config_defaults(config) - self.assertEqual( - {'17.2.4988734': ndk_bundle, '21.4.7075529': r21e}, - config['ndk_paths'], - ) - - r10e = os.path.join(self.testdir, 'ndk', 'r10e') - os.makedirs(r10e) - with open(os.path.join(r10e, 'RELEASE.TXT'), 'w') as fp: - fp.write('r10e-rc4 (64-bit)\n') - config = {'sdk_path': self.testdir} - fdroidserver.common.fill_config_defaults(config) - self.assertEqual( - {'r10e': r10e, '17.2.4988734': ndk_bundle, '21.4.7075529': r21e}, - config['ndk_paths'], - ) - - @unittest.skipIf(not os.path.isdir('/usr/lib/jvm/default-java'), 'uses Debian path') - def test_fill_config_defaults_java(self): - """Test the auto-detection of Java installed in standard paths""" - config = {'sdk_path': self.testdir} - fdroidserver.common.fill_config_defaults(config) - java_paths = [] - # use presence of javac to make sure its JDK not just JRE - for f in glob.glob('/usr/lib/jvm/java-*-openjdk-*/bin/javac'): - jdk = os.path.dirname(os.path.dirname(f)) - if not os.path.islink(jdk): - java_paths.append(jdk) - self.assertEqual( - len(java_paths), - len(config['java_paths']) - ) - for f in config['java_paths'].values(): - self.assertTrue(f in java_paths) - self.assertTrue(isinstance(f, str)) # paths in config must be str - - @mock.patch.dict(os.environ, clear=True) - def test_sdk_path_in_config_must_be_strings(self): - """All paths in config must be strings, and never pathlib.Path instances""" - os.environ['PATH'] = '/usr/bin:/usr/sbin' - config = {'sdk_path': Path('/opt/android-sdk')} - fdroidserver.common.fill_config_defaults(config) - build = fdroidserver.metadata.Build() - with self.assertRaises(TypeError): - fdroidserver.common.set_FDroidPopen_env(build=build) - - @mock.patch.dict(os.environ, clear=True) - def test_ndk_paths_in_config_must_be_strings(self): - """All paths in config must be strings, and never pathlib.Path instances""" - fdroidserver.common.config = { - 'ndk_paths': {'r21d': Path('/opt/android-sdk/ndk/r21d')} - } - build = fdroidserver.metadata.Build() - build.ndk = 'r21d' - os.environ['PATH'] = '/usr/bin:/usr/sbin' - with self.assertRaises(TypeError): - fdroidserver.common.set_FDroidPopen_env(build=build) - - @mock.patch.dict(os.environ, clear=True) - def test_FDroidPopen_envs_paths_can_be_pathlib(self): - _mock_common_module_options_instance() - os.environ['PATH'] = '/usr/bin:/usr/sbin' - envs = {'PATHLIB': Path('/pathlib/path'), 'STRING': '/string/path'} - p = fdroidserver.common.FDroidPopen(['/bin/sh', '-c', 'export'], envs=envs) - self.assertIn('/string/path', p.output) - self.assertIn('/pathlib/path', p.output) - - def test_vcs_git_latesttags(self): - tags = [ - "1.1.1", - "2.2.2", - "v3.0", - "0.0.4", - "0.5.0-beta", - "666(6)", - "seven", - ] - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - repo = git.Repo.init(Path.cwd()) - f = Path("test") - date = 10 ** 9 - for tag in tags: - date += 1 - f.write_text(tag) - repo.index.add([str(f)]) - repo.index.commit(tag, commit_date=str(date) + " +0000") - repo.create_tag(tag) - - vcs = fdroidserver.common.vcs_git(None, Path.cwd()) - self.assertEqual(vcs.latesttags(), tags[::-1]) - - def test_vcs_git_getref(self): - - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - repo = git.Repo.init(Path.cwd()) - tag = "1.1.1" - f = Path("test") - f.write_text(tag) - repo.index.add([str(f)]) - repo.index.commit("foo") - repo.create_tag(tag) - - vcs = fdroidserver.common.vcs_git(None, Path.cwd()) - - self.assertIsNotNone(vcs.getref("1.1.1")) - self.assertIsNone(vcs.getref("invalid")) - - def test_get_release_filename(self): - app = fdroidserver.metadata.App() - app.id = 'test.app' - build = fdroidserver.metadata.Build() - build.versionCode = 123 - - build.output = 'build/apk/*' - self.assertEqual( - fdroidserver.common.get_release_filename(app, build), - "%s_%s.apk" % (app.id, build.versionCode), - ) - - build.output = 'build/apk/*.zip' - self.assertEqual( - fdroidserver.common.get_release_filename(app, build), - "%s_%s.zip" % (app.id, build.versionCode), - ) - - build.output = 'build/apk/*.apk' - self.assertEqual( - fdroidserver.common.get_release_filename(app, build), - "%s_%s.apk" % (app.id, build.versionCode), - ) - - build.output = 'build/apk/*.apk' - self.assertEqual( - fdroidserver.common.get_release_filename(app, build, 'exe'), - "%s_%s.exe" % (app.id, build.versionCode), - ) - - def test_no_zero_length_ndk_path_prefixes(self): - fdroidserver.common.config = {'ndk_paths': {}} - build = fdroidserver.metadata.Build() - - with mock.patch.dict(os.environ, clear=True): - os.environ['PATH'] = '/usr/bin:/usr/sbin' - fdroidserver.common.set_FDroidPopen_env(build=build) - self.assertNotIn('', os.getenv('PATH').split(os.pathsep)) - - def test_is_repo_file(self): - is_repo_file = fdroidserver.common.is_repo_file - self.assertFalse(is_repo_file('does-not-exist')) - - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - Path('repo').mkdir() - repo_files = [ - 'repo/com.example.test.helloworld_1.apk', - 'repo/com.politedroid_6.apk', - 'repo/duplicate.permisssions_9999999.apk', - 'repo/fake.ota.update_1234.zip', - 'repo/info.guardianproject.index-v1.jar_123.apk', - 'repo/info.zwanenburg.caffeinetile_4.apk', - 'repo/main.1101613.obb.main.twoversions.obb', - ] - index_files = [ - 'repo/entry.jar', - 'repo/entry.json', - 'repo/index-v1.jar', - 'repo/index-v1.json', - 'repo/index-v2.json', - 'repo/index.css', - 'repo/index.html', - 'repo/index.jar', - 'repo/index.png', - 'repo/index.xml', - ] - for f in repo_files + index_files: - open(f, 'w').close() - - repo_dirs = [ - 'repo/com.politedroid', - 'repo/info.guardianproject.index-v1.jar', - 'repo/status', - ] - for d in repo_dirs: - os.mkdir(d) - - for f in repo_files: - self.assertTrue(os.path.exists(f), f + ' was created') - self.assertTrue(is_repo_file(f), f + ' is repo file') - - for f in index_files: - self.assertTrue(os.path.exists(f), f + ' was created') - self.assertFalse(is_repo_file(f), f + ' is repo file') - gpg_signed = [ - 'repo/entry.json', - 'repo/index-v1.json', - 'repo/index-v2.json', - ] - self.assertEqual( - (f in gpg_signed or is_repo_file(f, for_gpg_signing=False)), - is_repo_file(f, for_gpg_signing=True), - f + ' gpg signable?', - ) - - for d in repo_dirs: - self.assertTrue(os.path.exists(d), d + ' was created') - self.assertFalse(is_repo_file(d), d + ' not repo file') - - def test_get_apksigner_smartcardoptions(self): - os.chdir(self.testdir) - with open(fdroidserver.common.CONFIG_FILE, 'w', encoding='utf-8') as fp: - d = { - 'smartcardoptions': '-storetype PKCS11' - ' -providerName SunPKCS11-OpenSC' - ' -providerClass sun.security.pkcs11.SunPKCS11' - ' -providerArg opensc-fdroid.cfg' - } - config_dump(d, fp) - config = fdroidserver.common.read_config() - fdroidserver.common.config = config - self.assertTrue(isinstance(d['smartcardoptions'], str)) - self.assertTrue(isinstance(config['smartcardoptions'], list)) - self.assertEqual( - [ - '--ks-type', - 'PKCS11', - '--provider-class', - 'sun.security.pkcs11.SunPKCS11', - '--provider-arg', - 'opensc-fdroid.cfg', - ], - fdroidserver.common.get_apksigner_smartcardoptions( - config['smartcardoptions'] - ), - ) - - def test_get_smartcardoptions_list(self): - os.chdir(self.testdir) - fdroidserver.common.write_config_file( - textwrap.dedent( - """ - smartcardoptions: - - -storetype - - PKCS11 - - -providerName - - SunPKCS11-OpenSC - - -providerClass - - sun.security.pkcs11.SunPKCS11 - - -providerArg - - opensc-fdroid.cfg - """ - ) - ) - config = fdroidserver.common.read_config() - fdroidserver.common.config = config - self.assertTrue(isinstance(config['smartcardoptions'], list)) - self.assertEqual( - [ - '-storetype', - 'PKCS11', - '-providerName', - 'SunPKCS11-OpenSC', - '-providerClass', - 'sun.security.pkcs11.SunPKCS11', - '-providerArg', - 'opensc-fdroid.cfg', - ], - config['smartcardoptions'], - ) - - def test_get_smartcardoptions_spaces(self): - os.chdir(self.testdir) - fdroidserver.common.write_config_file( - textwrap.dedent( - """ - smartcardoptions: | - -storetype PKCS11 - -providerClass sun.security.pkcs11.SunPKCS11 - -providerArg /etc/pkcs11_java.cfg - """ - ) - ) - config = fdroidserver.common.read_config() - fdroidserver.common.config = config - self.assertTrue(isinstance(config['smartcardoptions'], list)) - self.assertEqual( - [ - '-storetype', - 'PKCS11', - '-providerClass', - 'sun.security.pkcs11.SunPKCS11', - '-providerArg', - '/etc/pkcs11_java.cfg', - ], - config['smartcardoptions'], - ) - - def test_load_localized_config(self): - """It should load""" - antiFeatures = fdroidserver.common.load_localized_config( - ANTIFEATURES_CONFIG_NAME, 'repo' - ) - self.assertEqual( - [ - 'Ads', - 'DisabledAlgorithm', - 'KnownVuln', - 'NSFW', - 'NoSourceSince', - 'NonFreeAdd', - 'NonFreeAssets', - 'NonFreeDep', - 'NonFreeNet', - 'Tracking', - ], - list(antiFeatures.keys()), - ) - self.assertEqual( - ['de', 'en-US', 'fa', 'ro', 'zh-rCN'], - list(antiFeatures['Ads']['description'].keys()), - ) - self.assertEqual( - ['en-US'], - list(antiFeatures['NoSourceSince']['description'].keys()), - ) - # it should have copied the icon files into place - for v in antiFeatures.values(): - p = Path(os.path.dirname(__file__) + '/repo' + v['icon']['en-US']['name']) - self.assertTrue(p.exists()) - - def test_load_localized_config_categories(self): - """It should load""" - categories = fdroidserver.common.load_localized_config( - CATEGORIES_CONFIG_NAME, 'repo' - ) - self.assertEqual( - [ - 'Time', - 'Development', - 'GuardianProject', - 'Multimedia', - 'Phone & SMS', - 'Security', - 'System', - ], - list(categories.keys()), - ) - self.assertEqual(['en-US'], list(categories['GuardianProject']['name'].keys())) - - def test_load_localized_config_copy_icon(self): - os.chdir(self.testdir) - os.mkdir('config') - Path('config/categories.yml').write_text('System:\n icon: system.png') - source_file = 'config/system.png' - Path(source_file).write_text('placeholder') - time.sleep(0.01) # ensure reliable failure if mtime isn't preserved - fdroidserver.common.load_localized_config(CATEGORIES_CONFIG_NAME, 'repo') - dest_file = f'repo/icons/{os.path.basename(source_file)}' - self.assertEqual(os.path.getsize(source_file), os.path.getsize(dest_file)) - self.assertEqual(os.path.getmtime(source_file), os.path.getmtime(dest_file)) - - def test_load_localized_config_copy_unchanged(self): - """The destination file should only change if the source file did.""" - os.chdir(self.testdir) - os.mkdir('config') - Path('config/categories.yml').write_text('System:\n icon: system.png') - source_file = 'config/system.png' - Path(source_file).write_text('placeholder') - fdroidserver.common.load_localized_config(CATEGORIES_CONFIG_NAME, 'repo') - delta = 0.01 - time.sleep(delta) # ensure reliable failure if file isn't preserved - fdroidserver.common.load_localized_config(CATEGORIES_CONFIG_NAME, 'repo') - dest_file = f'repo/icons/{os.path.basename(source_file)}' - self.assertAlmostEqual( - os.path.getctime(source_file), os.path.getctime(dest_file), delta=delta - ) - - def test_load_localized_config_copy_over_dest(self): - os.chdir(self.testdir) - os.mkdir('config') - Path('config/categories.yml').write_text('System:\n icon: system.png') - source_file = Path('config/system.png') - dest_file = Path(f'repo/icons/{os.path.basename(source_file)}') - source_file.write_text('placeholder') - dest_file.parent.mkdir(parents=True) - dest_file.write_text('different contents') - fdroidserver.common.load_localized_config(CATEGORIES_CONFIG_NAME, 'repo') - self.assertEqual(os.path.getsize(source_file), os.path.getsize(dest_file)) - - def test_load_localized_config_0_file(self): - os.chdir(self.testdir) - os.mkdir('config') - Path('config/categories.yml').write_text('') - with self.assertRaises(TypeError): - fdroidserver.common.load_localized_config(CATEGORIES_CONFIG_NAME, 'repo') - - def test_load_localized_config_string(self): - os.chdir(self.testdir) - os.mkdir('config') - Path('config/categories.yml').write_text('this is a string') - with self.assertRaises(TypeError): - fdroidserver.common.load_localized_config(CATEGORIES_CONFIG_NAME, 'repo') - - def test_load_localized_config_list(self): - os.chdir(self.testdir) - os.mkdir('config') - Path('config/categories.yml').write_text('- System') - with self.assertRaises(TypeError): - fdroidserver.common.load_localized_config(CATEGORIES_CONFIG_NAME, 'repo') - - def test_config_type_check_config_yml_dict(self): - fdroidserver.common.config_type_check('config.yml', dict()) - - def test_config_type_check_config_yml_list(self): - with self.assertRaises(TypeError): - fdroidserver.common.config_type_check('config.yml', list()) - - def test_config_type_check_config_yml_set(self): - with self.assertRaises(TypeError): - fdroidserver.common.config_type_check('config.yml', set()) - - def test_config_type_check_config_yml_str(self): - with self.assertRaises(TypeError): - fdroidserver.common.config_type_check('config.yml', str()) - - def test_config_type_check_mirrors_list(self): - fdroidserver.common.config_type_check('config/mirrors.yml', list()) - - def test_config_type_check_mirrors_dict(self): - with self.assertRaises(TypeError): - fdroidserver.common.config_type_check('config/mirrors.yml', dict()) - - def test_config_type_check_mirrors_set(self): - with self.assertRaises(TypeError): - fdroidserver.common.config_type_check('config/mirrors.yml', set()) - - def test_config_type_check_mirrors_str(self): - with self.assertRaises(TypeError): - fdroidserver.common.config_type_check('config/mirrors.yml', str()) - - def test_config_serverwebroot_str(self): - os.chdir(self.testdir) - fdroidserver.common.write_config_file( - """serverwebroot: 'foo@example.com:/var/www'""" - ) - self.assertEqual( - [{'url': 'foo@example.com:/var/www/'}], - fdroidserver.common.read_config()['serverwebroot'], - ) - - def test_config_serverwebroot_list(self): - os.chdir(self.testdir) - fdroidserver.common.write_config_file( - """serverwebroot:\n - foo@example.com:/var/www""" - ) - self.assertEqual( - [{'url': 'foo@example.com:/var/www/'}], - fdroidserver.common.read_config()['serverwebroot'], - ) - - def test_config_serverwebroot_dict(self): - os.chdir(self.testdir) - fdroidserver.common.write_config_file( - """serverwebroot:\n - url: 'foo@example.com:/var/www'""" - ) - self.assertEqual( - [{'url': 'foo@example.com:/var/www/'}], - fdroidserver.common.read_config()['serverwebroot'], - ) - - @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) - def test_config_serverwebroot_list_of_dicts_env(self): - os.chdir(self.testdir) - url = 'foo@example.com:/var/www/' - os.environ['serverwebroot'] = url - fdroidserver.common.write_config_file( - textwrap.dedent( - """\ - serverwebroot: - - url: {env: serverwebroot} - index_only: true - """ - ) - ) - self.assertEqual( - [{'url': url, 'index_only': True}], - fdroidserver.common.read_config()['serverwebroot'], - ) - - def test_expand_env_dict_fake_str(self): - testvalue = '"{env: foo}"' - self.assertEqual(testvalue, fdroidserver.common.expand_env_dict(testvalue)) - - @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) - def test_expand_env_dict_good(self): - name = 'foo' - value = 'bar' - os.environ[name] = value - self.assertEqual(value, fdroidserver.common.expand_env_dict({'env': name})) - - @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) - def test_expand_env_dict_bad_dict(self): - with self.assertRaises(TypeError): - fdroidserver.common.expand_env_dict({'env': 'foo', 'foo': 'bar'}) - - def test_parse_list_of_dicts_str(self): - s = 'foo@example.com:/var/www' - mirrors = yaml.load("""'%s'""" % s) - self.assertEqual( - [{'url': s}], fdroidserver.common.parse_list_of_dicts(mirrors) - ) - - def test_parse_list_of_dicts_list(self): - s = 'foo@example.com:/var/www' - mirrors = yaml.load("""- '%s'""" % s) - self.assertEqual( - [{'url': s}], fdroidserver.common.parse_list_of_dicts(mirrors) - ) - - def test_parse_list_of_dicts_dict(self): - s = 'foo@example.com:/var/www' - mirrors = yaml.load("""- url: '%s'""" % s) - self.assertEqual( - [{'url': s}], fdroidserver.common.parse_list_of_dicts(mirrors) - ) - - @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH'), 'foo': 'bar'}, clear=True) - def test_parse_list_of_dicts_env_str(self): - mirrors = yaml.load('{env: foo}') - self.assertEqual( - [{'url': 'bar'}], fdroidserver.common.parse_list_of_dicts(mirrors) - ) - - def test_parse_list_of_dicts_env_list(self): - s = 'foo@example.com:/var/www' - mirrors = yaml.load("""- '%s'""" % s) - self.assertEqual( - [{'url': s}], fdroidserver.common.parse_list_of_dicts(mirrors) - ) - - def test_parse_list_of_dicts_env_dict(self): - s = 'foo@example.com:/var/www' - mirrors = yaml.load("""- url: '%s'""" % s) - self.assertEqual( - [{'url': s}], fdroidserver.common.parse_list_of_dicts(mirrors) - ) - - def test_KnownApks_recordapk(self): - """Test that added dates are being fetched from the index. - - There are more related tests in tests/run-tests. - - """ - now = datetime.now(timezone.utc) - knownapks = fdroidserver.common.KnownApks() - for apkName in knownapks.apks: - knownapks.recordapk(apkName, default_date=now) - for added in knownapks.apks.values(): - self.assertNotEqual(added, now) - - def test_KnownApks_recordapk_new(self): - """Test that new added dates work, and are not replaced later. - - There are more related tests in tests/run-tests. - - """ - now = datetime.now(timezone.utc) - knownapks = fdroidserver.common.KnownApks() - fake_apk = 'fake.apk' - knownapks.recordapk(fake_apk, default_date=now) - for apk, added in knownapks.apks.items(): - if apk == fake_apk: - self.assertEqual(added, now) - else: - self.assertNotEqual(added, now) - knownapks.recordapk(fake_apk, default_date=datetime.now(timezone.utc)) - self.assertEqual(knownapks.apks[fake_apk], now) - - def test_get_mirrors_fdroidorg(self): - mirrors = fdroidserver.common.get_mirrors( - 'https://f-droid.org/repo', 'entry.jar' - ) - self.assertEqual( - 'https://f-droid.org/repo/entry.jar', - mirrors[0]['url'], - ) - - def test_get_mirrors_other(self): - self.assertEqual( - [{'url': 'https://example.com/fdroid/repo/index-v2.json'}], - fdroidserver.common.get_mirrors( - 'https://example.com/fdroid/repo', 'index-v2.json' - ), - ) - - def test_append_filename_to_mirrors(self): - filename = 'test.apk' - url = 'https://example.com/fdroid/repo' - mirrors = [{'url': url}] - self.assertEqual( - [{'url': url + '/' + filename}], - fdroidserver.common.append_filename_to_mirrors(filename, mirrors), - ) - - def test_append_filename_to_mirrors_full(self): - filename = 'test.apk' - mirrors = fdroidserver.common.FDROIDORG_MIRRORS - for mirror in fdroidserver.common.append_filename_to_mirrors(filename, mirrors): - self.assertTrue(mirror['url'].endswith('/' + filename)) - - def test_get_source_date_epoch(self): - git_repo = git.Repo.init(self.testdir) - Path('README').write_text('file to commit') - git_repo.git.add(all=True) - git_repo.index.commit("README") - self.assertEqual( - git_repo.git.log(n=1, pretty='%ct'), - fdroidserver.common.get_source_date_epoch(self.testdir), - ) - - def test_get_source_date_epoch_no_scm(self): - self.assertIsNone(fdroidserver.common.get_source_date_epoch(self.testdir)) - - def test_get_source_date_epoch_not_git(self): - """Test when build_dir is not a git repo, e.g. hg, svn, etc.""" - appid = 'com.example' - build_dir = Path(self.testdir) / 'build' / appid - fdroiddata = build_dir.parent.parent - (fdroiddata / 'metadata').mkdir() - build_dir.mkdir(parents=True) - os.chdir(build_dir) - git_repo = git.Repo.init(fdroiddata) # fdroiddata is always a git repo - with (fdroiddata / f'metadata/{appid}.yml').open('w') as fp: - fp.write('AutoName: Example App\n') - git_repo.git.add(all=True) - git_repo.index.commit("update README") - self.assertEqual( - git.repo.Repo(fdroiddata).git.log(n=1, pretty='%ct'), - fdroidserver.common.get_source_date_epoch(build_dir), - ) - - @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) - def test_set_FDroidPopen_env_with_app(self): - """Test SOURCE_DATE_EPOCH in FDroidPopen when build_dir is a git repo.""" - os.chdir(self.testdir) - app = fdroidserver.metadata.App() - app.id = 'com.example' - build_dir = Path(self.testdir) / 'build' / app.id - git_repo = git.Repo.init(build_dir) - Path('README').write_text('file to commit') - git_repo.git.add(all=True) - now = datetime.now(timezone.utc) - git_repo.index.commit("README", commit_date=now) - fdroidserver.common.set_FDroidPopen_env(app) - p = fdroidserver.common.FDroidPopen(['printenv', 'SOURCE_DATE_EPOCH']) - self.assertEqual(int(p.output), int(now.timestamp())) - - def test_calculate_gradle_flavor_combination(self): - flavors = ['aa', 'BB', 'δδ'] - combinations = ['aaBBΔδ', 'aaBB', 'aaΔδ', 'aa', 'BBΔδ', 'BB', 'δδ', ''] - self.assertEqual(fdroidserver.common.calculate_gradle_flavor_combination(flavors), combinations) - - -APKS_WITH_JAR_SIGNATURES = ( - ( - 'SpeedoMeterApp.main_1.apk', - '2e6b3126fb7e0db6a9d4c2a06df690620655454d6e152cf244cc9efe9787a77d', - ), - ( - 'apk.embedded_1.apk', - '764f0eaac0cdcde35023658eea865c4383ab580f9827c62fdd3daf9e654199ee', - ), - ( - 'bad-unicode-πÇÇ现代通用字-български-عربي1.apk', - '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', - ), - ( - 'issue-1128-poc3a.apk', - '1dbb8be012293e988a0820f7d455b07abd267d2c0b500fc793fcfd80141cb5ce', - ), - ( - 'issue-1128-poc3b.apk', - '1dbb8be012293e988a0820f7d455b07abd267d2c0b500fc793fcfd80141cb5ce', - ), - ( - 'janus.apk', - 'ebb0fedf1942a099b287c3db00ff732162152481abb2b6c7cbcdb2ba5894a768', - ), - ( - 'org.bitbucket.tickytacky.mirrormirror_1.apk', - 'feaa63df35b4635cf091513dfcd6d11209632555efdfc47e33b70d4e4eb5ba28', - ), - ( - 'org.bitbucket.tickytacky.mirrormirror_2.apk', - 'feaa63df35b4635cf091513dfcd6d11209632555efdfc47e33b70d4e4eb5ba28', - ), - ( - 'org.bitbucket.tickytacky.mirrormirror_3.apk', - 'feaa63df35b4635cf091513dfcd6d11209632555efdfc47e33b70d4e4eb5ba28', - ), - ( - 'org.bitbucket.tickytacky.mirrormirror_4.apk', - 'feaa63df35b4635cf091513dfcd6d11209632555efdfc47e33b70d4e4eb5ba28', - ), - ( - 'org.dyndns.fules.ck_20.apk', - '9326a2cc1a2f148202bc7837a0af3b81200bd37fd359c9e13a2296a71d342056', - ), - ( - 'org.sajeg.fallingblocks_3.apk', - '033389681f4288fdb3e72a28058c8506233ca50de75452ab6c9c76ea1ca2d70f', - ), - ( - 'repo/com.example.test.helloworld_1.apk', - 'c3a5ca5465a7585a1bda30218ae4017083605e3576867aa897d724208d99696c', - ), - ( - 'repo/com.politedroid_3.apk', - '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', - ), - ( - 'repo/com.politedroid_4.apk', - '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', - ), - ( - 'repo/com.politedroid_5.apk', - '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', - ), - ( - 'repo/com.politedroid_6.apk', - '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', - ), - ( - 'repo/duplicate.permisssions_9999999.apk', - '659e1fd284549f70d13fb02c620100e27eeea3420558cce62b0f5d4cf2b77d84', - ), - ( - 'repo/info.zwanenburg.caffeinetile_4.apk', - '51cfa5c8a743833ad89acf81cb755936876a5c8b8eca54d1ffdcec0cdca25d0e', - ), - ( - 'repo/no.min.target.sdk_987.apk', - '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', - ), - ( - 'repo/obb.main.oldversion_1444412523.apk', - '818e469465f96b704e27be2fee4c63ab9f83ddf30e7a34c7371a4728d83b0bc1', - ), - ( - 'repo/obb.main.twoversions_1101613.apk', - '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', - ), - ( - 'repo/obb.main.twoversions_1101615.apk', - '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', - ), - ( - 'repo/obb.main.twoversions_1101617.apk', - '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', - ), - ( - 'repo/obb.mainpatch.current_1619.apk', - '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', - ), - ( - 'repo/obb.mainpatch.current_1619_another-release-key.apk', - 'ce9e200667f02d96d49891a2e08a3c178870e91853d61bdd33ef5f0b54701aa5', - ), - ( - 'repo/souch.smsbypass_9.apk', - 'd3aec784b1fd71549fc22c999789122e3639895db6bd585da5835fbe3db6985c', - ), - ( - 'repo/urzip-; Рахма́, [rɐxˈmanʲɪnəf] سيرجي_رخمانينوف 谢·.apk', - '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', - ), - ( - 'repo/v1.v2.sig_1020.apk', - '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', - ), - ( - 'urzip-release.apk', - '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', - ), - ( - 'urzip.apk', - '7eabd8c15de883d1e82b5df2fd4f7f769e498078e9ad6dc901f0e96db77ceac3', - ), -) -APKS_WITHOUT_JAR_SIGNATURES = ( - ( - 'issue-1128-poc1.apk', # APK v3 Signature only - '1dbb8be012293e988a0820f7d455b07abd267d2c0b500fc793fcfd80141cb5ce', - ), - ( - 'issue-1128-poc2.apk', # APK v3 Signature only - '1dbb8be012293e988a0820f7d455b07abd267d2c0b500fc793fcfd80141cb5ce', - ), - ( - 'issue-1128-min-sdk-30-poc.apk', # APK v3 Signature only - '09350d5f3460a8a0ea5cf6b68ccd296a58754f7e683ba6aa08c19be8353504f3', - ), - ( - 'v2.only.sig_2.apk', - '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', - ), -) - - -class SignerExtractionTest(unittest.TestCase): - """Test extraction of the signer certificate from JARs and APKs - - These fingerprints can be confirmed with: - apksigner verify --print-certs foo.apk | grep SHA-256 - keytool -printcert -file ____.RSA - """ - - def setUp(self): - os.chdir(basedir) - self._td = mkdtemp() - self.testdir = self._td.name - - self.apksigner = shutil.which('apksigner') - self.keytool = shutil.which('keytool') - - def tearDown(self): - self._td.cleanup() - - @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') - def test_get_first_signer_certificate_with_jars(self): - for jar in ( - 'signindex/guardianproject-v1.jar', - 'signindex/guardianproject.jar', - 'signindex/testy.jar', - ): - outdir = os.path.join(self.testdir, jar[:-4].replace('/', '_')) - os.mkdir(outdir) - fdroidserver.common.apk_extract_signatures(jar, outdir) - certs = glob.glob(os.path.join(outdir, '*.RSA')) - with open(certs[0], 'rb') as fp: - self.assertEqual( - fdroidserver.common.get_certificate(fp.read()), - fdroidserver.common.get_first_signer_certificate(jar), - ) - - @unittest.skip("slow and only needed when adding to APKS_WITH_JAR_SIGNATURES") - def test_vs_keytool(self): - if not self.keytool: - self.skipTest('requires keytool to run') - pat = re.compile(r'[0-9A-F:]{95}') - cmd = [self.keytool, '-printcert', '-jarfile'] - for apk, fingerprint in APKS_WITH_JAR_SIGNATURES: - o = subprocess.check_output(cmd + [apk], text=True) - try: - self.assertEqual( - fingerprint, - pat.search(o).group().replace(':', '').lower(), - ) - except AttributeError as e: - print(e, o) - - @unittest.skip("slow and only needed when adding to APKS_WITH_JAR_SIGNATURES") - def test_vs_apksigner(self): - if not self.apksigner: - self.skipTest('requires apksigner to run') - pat = re.compile(r'\s[0-9a-f]{64}\s') - cmd = [self.apksigner, 'verify', '--print-certs'] - for apk, fingerprint in APKS_WITH_JAR_SIGNATURES + APKS_WITHOUT_JAR_SIGNATURES: - output = subprocess.check_output(cmd + [apk], text=True) - self.assertEqual( - fingerprint, - pat.search(output).group().strip(), - apk + " should have matching signer fingerprints", - ) - - @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') - def test_apk_signer_fingerprint_with_v1_apks(self): - for apk, fingerprint in APKS_WITH_JAR_SIGNATURES: - self.assertEqual( - fingerprint, - fdroidserver.common.apk_signer_fingerprint(apk), - f'apk_signer_fingerprint should match stored fingerprint for {apk}', - ) - - @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') - def test_apk_signer_fingerprint_without_v1_apks(self): - for apk, fingerprint in APKS_WITHOUT_JAR_SIGNATURES: - self.assertEqual( - fingerprint, - fdroidserver.common.apk_signer_fingerprint(apk), - f'apk_signer_fingerprint should match stored fingerprint for {apk}', - ) - - @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') - def test_get_first_signer_certificate_with_unsigned_jar(self): - self.assertIsNone( - fdroidserver.common.get_first_signer_certificate('signindex/unsigned.jar') - ) - - def test_apk_extract_fingerprint(self): - """Test extraction of JAR signatures (does not cover APK v2+ extraction).""" - for apk, fingerprint in APKS_WITH_JAR_SIGNATURES: - outdir = os.path.join(self.testdir, apk[:-4].replace('/', '_')) - os.mkdir(outdir) - try: - fdroidserver.common.apk_extract_signatures(apk, outdir) - except fdroidserver.apksigcopier.APKSigCopierError: - # nothing to test here when this error is thrown - continue - v1_certs = [str(cert) for cert in Path(outdir).glob('*.[DR]SA')] - cert = fdroidserver.common.get_certificate( - signature_block_file=Path(v1_certs[0]).read_bytes(), - signature_file=Path(v1_certs[0][:-4] + '.SF').read_bytes(), - ) - self.assertEqual( - fingerprint, - fdroidserver.common.signer_fingerprint(cert), - ) - apkobject = fdroidserver.common.get_androguard_APK(apk, skip_analysis=True) - v2_certs = apkobject.get_certificates_der_v2() - if v2_certs: - if v1_certs: - self.assertEqual(len(v1_certs), len(v2_certs)) - self.assertEqual( - fingerprint, - fdroidserver.common.signer_fingerprint(v2_certs[0]), - ) - v3_certs = apkobject.get_certificates_der_v3() - if v3_certs: - if v2_certs: - self.assertEqual(len(v2_certs), len(v3_certs)) - self.assertEqual( - fingerprint, - fdroidserver.common.signer_fingerprint(v3_certs[0]), - ) - - def test_signature_block_file_regex(self): - for apkpath, fingerprint in APKS_WITH_JAR_SIGNATURES: - with ZipFile(apkpath, 'r') as apk: - cert_files = [ - n - for n in apk.namelist() - if fdroidserver.common.SIGNATURE_BLOCK_FILE_REGEX.match(n) - ] - self.assertEqual(1, len(cert_files)) - - def test_signature_block_file_regex_malicious(self): - apkpath = os.path.join(self.testdir, 'malicious.apk') - with ZipFile(apkpath, 'w') as apk: - apk.writestr('META-INF/MANIFEST.MF', 'this is fake sig data') - apk.writestr('META-INF/CERT.SF\n', 'this is fake sig data') - apk.writestr('META-INF/AFTER.SF', 'this is fake sig data') - apk.writestr('META-INF/CERT.RSA\n', 'this is fake sig data') - apk.writestr('META-INF/AFTER.RSA', 'this is fake sig data') - with ZipFile(apkpath, 'r') as apk: - self.assertEqual( - ['META-INF/AFTER.RSA'], - [ - n - for n in apk.namelist() - if fdroidserver.common.SIGNATURE_BLOCK_FILE_REGEX.match(n) - ], - ) - - -class IgnoreApksignerV33Test(SetUpTearDownMixin, unittest.TestCase): - """apksigner v33 should be entirely ignored - - https://gitlab.com/fdroid/fdroidserver/-/issues/1253 - """ - - BAD_VERSIONS = [ - '33.0.0-rc1', - '33.0.0-rc2', - '33.0.0-rc3', - '33.0.0-rc4', - '33.0.0', - '33.0.1', - '33.0.2', - '33.0.3', - ] - - def setUp(self): - super().setUp() - self.config = {'sdk_path': self.testdir} - - def _create_fake_build_tools(self, version): - for v in self.BAD_VERSIONS + [version]: - apksigner = os.path.join(self.testdir, 'build-tools', v, 'apksigner') - os.makedirs(os.path.dirname(apksigner)) - with open(apksigner, 'w') as fp: - fp.write(f'#!/bin/sh\necho {v}[\n') - os.chmod(apksigner, 0o0755) # nosec B103 - - def test_find_apksigner_choose_version_32_over_any_33(self): - good = '32.0.0' - self._create_fake_build_tools(good) - with mock.patch.dict(os.environ, clear=True): - os.environ['PATH'] = '/fake/path/to/avoid/conflicts' - fdroidserver.common.find_apksigner(self.config) - self.assertEqual( - os.path.join(self.testdir, 'build-tools', good, 'apksigner'), - self.config.get('apksigner'), - ) - - def test_find_apksigner_choose_no_version_over_any_33(self): - """apksigner v33 should be entirely ignored""" - self._create_fake_build_tools('29.0.0') # too old a version - with mock.patch.dict(os.environ, clear=True): - os.environ['PATH'] = '/fake/path/to/avoid/conflicts' - fdroidserver.common.find_apksigner(self.config) - self.assertIsNone(self.config.get('apksigner')) - - -class ConfigOptionsScopeTest(unittest.TestCase): - """Test assumptions about variable scope for "config" and "options". - - The ancient architecture of config and options in fdroidserver has - weird issues around unexpected scope, like there are cases where - the global config is not the same as the module-level config, and - more. - - This is about describing what is happening, it is not about - documenting behaviors that are good design. The config and options - handling should really be refactored into a well-known, workable - Pythonic pattern. - - """ - - def setUp(self): - # these are declared as None at the top of the module file - fdroidserver.common.config = None - fdroidserver.common.options = None - - def tearDown(self): - fdroidserver.common.config = None - fdroidserver.common.options = None - if 'config' in globals(): - global config - del config - if 'options' in globals(): - global options - del options - - def test_parse_args(self): - """Test that options is properly set up at the module-level and not global.""" - self.assertFalse('options' in globals()) - self.assertIsNone(fdroidserver.common.options) - parser = ArgumentParser() - fdroidserver.common.setup_global_opts(parser) - with mock.patch('sys.argv', ['$0']): - o = fdroidserver.common.parse_args(parser) - self.assertEqual(o, fdroidserver.common.options) - - # No function should set options as a global, and the global - # keyword does not create the variable. - global options - with self.assertRaises(NameError): - options - self.assertFalse('options' in globals()) - - def test_parse_args_without_args(self): - """Test that the parsing function works fine when there are no args.""" - parser = ArgumentParser() - fdroidserver.common.setup_global_opts(parser) - with mock.patch('sys.argv', ['$0']): - o = fdroidserver.common.parse_args(parser) - self.assertFalse(o.verbose) - - def test_parse_args_with_args(self): - parser = ArgumentParser() - fdroidserver.common.setup_global_opts(parser) - with mock.patch('sys.argv', ['$0', '-v']): - o = fdroidserver.common.parse_args(parser) - self.assertTrue(o.verbose) - - def test_get_config(self): - """Show how the module-level variables are initialized.""" - self.assertTrue('config' not in vars() and 'config' not in globals()) - self.assertIsNone(fdroidserver.common.config) - config = fdroidserver.common.read_config() - self.assertIsNotNone(fdroidserver.common.config) - self.assertTrue(isinstance(config, dict)) - self.assertEqual(config, fdroidserver.common.config) - - def test_get_config_global(self): - """Test assumptions about variable scope using global keyword.""" - global config - self.assertTrue('config' not in vars() and 'config' not in globals()) - self.assertIsNone(fdroidserver.common.config) - c = fdroidserver.common.read_config() - self.assertIsNotNone(fdroidserver.common.config) - self.assertTrue(isinstance(c, dict)) - self.assertEqual(c, fdroidserver.common.config) - self.assertTrue( - 'config' not in vars() and 'config' not in globals(), - "The config should not be set in the global context, only module-level.", - ) - - -class UnsafePermissionsTest(SetUpTearDownMixin, unittest.TestCase): - def setUp(self): - config = dict() - fdroidserver.common.find_apksigner(config) - if not config.get('apksigner'): - self.skipTest('SKIPPING, apksigner not installed!') - - super().setUp() - os.chdir(self.testdir) - fdroidserver.common.write_config_file('keypass: {env: keypass}') - os.chmod(fdroidserver.common.CONFIG_FILE, 0o666) # nosec B103 - - def test_config_perm_no_warning(self): - fdroidserver.common.write_config_file('keystore: foo.jks') - with self.assertNoLogs(level=logging.WARNING): - fdroidserver.common.read_config() - - def test_config_perm_keypass_warning(self): - fdroidserver.common.write_config_file('keypass: supersecret') - with self.assertLogs(level=logging.WARNING) as lw: - fdroidserver.common.read_config() - self.assertTrue('unsafe' in lw.output[0]) - - @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) - def test_config_perm_env_warning(self): - os.environ['keypass'] = 'supersecret' - fdroidserver.common.write_config_file('keypass: {env: keypass}') - with self.assertLogs(level=logging.WARNING) as lw: - fdroidserver.common.read_config() - self.assertTrue('unsafe' in lw.output[0]) - self.assertEqual(1, len(lw.output)) - - -class GetHeadCommitIdTest(unittest.TestCase): - """Test and compare two methods of getting the commit ID.""" - - def setUp(self): - self._td = mkdtemp() - self.testdir = self._td.name - os.chdir(self.testdir) - logging.getLogger('git.cmd').setLevel(logging.INFO) - - def tearDown(self): - os.chdir(basedir) - self._td.cleanup() - - @unittest.skipUnless((basedir.parent / '.git').exists(), 'Needs a working git repo') - def test_get_head_commit_id_compare(self): - """Run on this git repo to get some real world noise in there.""" - git_dir = basedir.parent - self.assertIsNotNone(fdroidserver.common.get_head_commit_id(git_dir)) - - def test_get_head_commit_id_error_bare_repo(self): - """Error because it is an empty, bare git repo.""" - git_repo = git.Repo.init(self.testdir) - self.assertIsNone(fdroidserver.common.get_head_commit_id(git_repo)) - - def test_get_head_commit_id_error_no_repo(self): - """Error because there is no .git/ dir.""" - with self.assertLogs('root', level=logging.DEBUG): - self.assertIsNone(fdroidserver.common.get_head_commit_id(self.testdir)) - - def test_get_head_commit_id_detached_and_branch(self): - """Fetching commit ID must work from detached HEADs and branches.""" - git_repo = git.Repo.init(self.testdir) - Path('README').write_text('this is just a test') - git_repo.git.add(all=True) - git_repo.index.commit("add README") - Path('LICENSE').write_text('free!') - git_repo.git.add(all=True) - git_repo.index.commit("add LICENSE") - self.assertIsNotNone(fdroidserver.common.get_head_commit_id(git_repo)) - # detached HEAD - git_repo.git.checkout('HEAD^') - self.assertIsNotNone(fdroidserver.common.get_head_commit_id(git_repo)) - # on a branch with a new commits - git_repo.git.checkout('test', b=True) - Path('foo.py').write_text('print("code!")') - git_repo.git.add(all=True) - git_repo.index.commit("add code") - self.assertIsNotNone(fdroidserver.common.get_head_commit_id(git_repo)) diff --git a/tests/test_deploy.py b/tests/test_deploy.py deleted file mode 100755 index d7de7545..00000000 --- a/tests/test_deploy.py +++ /dev/null @@ -1,1226 +0,0 @@ -#!/usr/bin/env python3 - -import configparser -import os -import shutil -import tempfile -import unittest -from pathlib import Path -from unittest import mock - -import git - -import fdroidserver - -from .shared_test_code import TmpCwd, VerboseFalseOptions, mkdtemp - -basedir = Path(__file__).parent -FILES = basedir - - -def _mock_rclone_config_file(cmd, text): # pylint: disable=unused-argument - """Mock output from rclone 1.60.1 but with nonexistent conf file.""" - return "Configuration file doesn't exist, but rclone will use this path:\n/nonexistent/rclone.conf\n" - - -class DeployTest(unittest.TestCase): - '''fdroidserver/deploy.py''' - - def setUp(self): - os.chdir(basedir) - self._td = mkdtemp() - self.testdir = self._td.name - - fdroidserver.common.options = mock.Mock() - fdroidserver.deploy.config = {} - - def tearDown(self): - self._td.cleanup() - - def test_update_serverwebroots_bad_None(self): - with self.assertRaises(TypeError): - fdroidserver.deploy.update_serverwebroots(None, 'repo') - - def test_update_serverwebroots_bad_int(self): - with self.assertRaises(TypeError): - fdroidserver.deploy.update_serverwebroots(9, 'repo') - - def test_update_serverwebroots_bad_float(self): - with self.assertRaises(TypeError): - fdroidserver.deploy.update_serverwebroots(1.0, 'repo') - - def test_update_serverwebroots(self): - """rsync works with file paths, so this test uses paths for the URLs""" - os.chdir(self.testdir) - repo = Path('repo') - repo.mkdir() - fake_apk = repo / 'fake.apk' - with fake_apk.open('w') as fp: - fp.write('not an APK, but has the right filename') - url0 = Path('url0/fdroid') - url0.mkdir(parents=True) - url1 = Path('url1/fdroid') - url1.mkdir(parents=True) - - # setup parameters for this test run - fdroidserver.common.options.identity_file = None - fdroidserver.deploy.config['make_current_version_link'] = False - - dest_apk0 = url0 / fake_apk - dest_apk1 = url1 / fake_apk - self.assertFalse(dest_apk0.is_file()) - self.assertFalse(dest_apk1.is_file()) - fdroidserver.deploy.update_serverwebroots( - [ - {'url': str(url0)}, - {'url': str(url1)}, - ], - str(repo), - ) - self.assertTrue(dest_apk0.is_file()) - self.assertTrue(dest_apk1.is_file()) - - def test_update_serverwebroots_url_does_not_end_with_fdroid(self): - with self.assertRaises(SystemExit): - fdroidserver.deploy.update_serverwebroots([{'url': 'url'}], 'repo') - - def test_update_serverwebroots_bad_ssh_url(self): - with self.assertRaises(SystemExit): - fdroidserver.deploy.update_serverwebroots( - [{'url': 'f@b.ar::/path/to/fdroid'}], 'repo' - ) - - def test_update_serverwebroots_unsupported_ssh_url(self): - with self.assertRaises(SystemExit): - fdroidserver.deploy.update_serverwebroots([{'url': 'ssh://nope'}], 'repo') - - @unittest.skipUnless(shutil.which('rclone'), 'requires rclone') - def test_update_remote_storage_with_rclone(self): - os.chdir(self.testdir) - repo = Path('repo') - repo.mkdir(parents=True, exist_ok=True) - - fake_apk = repo / 'another_fake.apk' - with fake_apk.open('w') as fp: - fp.write('not an APK, but has the right filename') - fake_index = repo / fdroidserver.common.INDEX_FILES[0] - with fake_index.open('w') as fp: - fp.write('not an index, but has the right filename') - - # write out rclone config for test use - rclone_config = configparser.ConfigParser() - rclone_config.add_section("test-local-config") - rclone_config.set("test-local-config", "type", "local") - - rclone_config_path = Path('rclone_config_path') - rclone_config_path.mkdir(parents=True, exist_ok=True) - rclone_file = rclone_config_path / 'rclone.conf' - with open(rclone_file, 'w') as configfile: - rclone_config.write(configfile) - - # setup parameters for this test run - awsbucket = 'test_bucket_folder' - fdroidserver.deploy.config['awsbucket'] = awsbucket - fdroidserver.deploy.config['rclone_config'] = 'test-local-config' - fdroidserver.deploy.config['path_to_custom_rclone_config'] = str(rclone_file) - fdroidserver.common.options = VerboseFalseOptions - - # write out destination path - destination = Path(f'{awsbucket}/fdroid') - destination.mkdir(parents=True, exist_ok=True) - dest_apk = Path(destination) / fake_apk - dest_index = Path(destination) / fake_index - self.assertFalse(dest_apk.is_file()) - self.assertFalse(dest_index.is_file()) - repo_section = str(repo) - fdroidserver.deploy.update_remote_storage_with_rclone(repo_section, awsbucket) - self.assertTrue(dest_apk.is_file()) - self.assertTrue(dest_index.is_file()) - - @unittest.skipUnless(shutil.which('rclone'), 'requires rclone') - def test_update_remote_storage_with_rclone_in_index_only_mode(self): - os.chdir(self.testdir) - repo = Path('repo') - repo.mkdir(parents=True, exist_ok=True) - - fake_apk = repo / 'another_fake.apk' - with fake_apk.open('w') as fp: - fp.write('not an APK, but has the right filename') - fake_index = repo / fdroidserver.common.INDEX_FILES[0] - with fake_index.open('w') as fp: - fp.write('not an index, but has the right filename') - - # write out rclone config for test use - rclone_config = configparser.ConfigParser() - rclone_config.add_section("test-local-config") - rclone_config.set("test-local-config", "type", "local") - - rclone_config_path = Path('rclone_config_path') - rclone_config_path.mkdir(parents=True, exist_ok=True) - rclone_file = rclone_config_path / 'rclone.conf' - with open(rclone_file, 'w') as configfile: - rclone_config.write(configfile) - - # setup parameters for this test run - awsbucket = 'test_bucket_folder' - fdroidserver.deploy.config['awsbucket'] = awsbucket - fdroidserver.deploy.config['rclone_config'] = 'test-local-config' - fdroidserver.deploy.config['path_to_custom_rclone_config'] = str(rclone_file) - fdroidserver.common.options = VerboseFalseOptions - - # write out destination path - destination = Path(f'{awsbucket}/fdroid') - destination.mkdir(parents=True, exist_ok=True) - dest_apk = Path(destination) / fake_apk - dest_index = Path(destination) / fake_index - self.assertFalse(dest_apk.is_file()) - self.assertFalse(dest_index.is_file()) - repo_section = str(repo) - fdroidserver.deploy.update_remote_storage_with_rclone( - repo_section, awsbucket, is_index_only=True - ) - self.assertFalse(dest_apk.is_file()) - self.assertTrue(dest_index.is_file()) - - @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) - @mock.patch('subprocess.check_output', _mock_rclone_config_file) - def test_update_remote_storage_with_rclone_awsbucket_no_env_vars(self): - with self.assertRaises(fdroidserver.exception.FDroidException): - fdroidserver.deploy.update_remote_storage_with_rclone('repo', 'foobucket') - - @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) - @mock.patch('subprocess.check_output', _mock_rclone_config_file) - def test_update_remote_storage_with_rclone_awsbucket_no_AWS_SECRET_ACCESS_KEY(self): - os.environ['AWS_ACCESS_KEY_ID'] = 'accesskey' - with self.assertRaises(fdroidserver.exception.FDroidException): - fdroidserver.deploy.update_remote_storage_with_rclone('repo', 'foobucket') - - @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) - @mock.patch('subprocess.check_output', _mock_rclone_config_file) - def test_update_remote_storage_with_rclone_awsbucket_no_AWS_ACCESS_KEY_ID(self): - os.environ['AWS_SECRET_ACCESS_KEY'] = 'secrets' # nosec B105 - with self.assertRaises(fdroidserver.exception.FDroidException): - fdroidserver.deploy.update_remote_storage_with_rclone('repo', 'foobucket') - - @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) - @mock.patch('subprocess.check_output', _mock_rclone_config_file) - @mock.patch('subprocess.call') - def test_update_remote_storage_with_rclone_awsbucket_env_vars(self, mock_call): - awsbucket = 'test_bucket_folder' - os.environ['AWS_ACCESS_KEY_ID'] = 'accesskey' - os.environ['AWS_SECRET_ACCESS_KEY'] = 'secrets' # nosec B105 - - def _mock_subprocess_call(cmd): - self.assertEqual( - cmd[:5], - [ - 'rclone', - 'sync', - '--delete-after', - '--config', - '.fdroid-deploy-rclone.conf', - ], - ) - return 0 - - mock_call.side_effect = _mock_subprocess_call - fdroidserver.deploy.config = {'awsbucket': awsbucket} - fdroidserver.deploy.update_remote_storage_with_rclone('repo', awsbucket) - mock_call.assert_called() - - @mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True) - @mock.patch('subprocess.check_output', _mock_rclone_config_file) - @mock.patch('subprocess.call') - def test_update_remote_storage_with_rclone_mock_awsbucket(self, mock_call): - awsbucket = 'test_bucket_folder' - os.environ['AWS_ACCESS_KEY_ID'] = 'accesskey' - os.environ['AWS_SECRET_ACCESS_KEY'] = 'secrets' # nosec B105 - self.last_cmd = None - - def _mock_subprocess_call(cmd): - self.last_cmd = cmd - return 0 - - mock_call.side_effect = _mock_subprocess_call - - fdroidserver.deploy.config = {'awsbucket': awsbucket} - fdroidserver.deploy.update_remote_storage_with_rclone('repo', awsbucket) - self.maxDiff = None - self.assertEqual( - self.last_cmd, - [ - 'rclone', - 'sync', - '--delete-after', - '--config', - '.fdroid-deploy-rclone.conf', - 'repo', - f'AWS-S3-US-East-1:{awsbucket}/fdroid/repo', - ], - ) - - @mock.patch('subprocess.check_output', _mock_rclone_config_file) - @mock.patch('subprocess.call') - def test_update_remote_storage_with_rclone_mock_rclone_config(self, mock_call): - awsbucket = 'test_bucket_folder' - self.last_cmd = None - - def _mock_subprocess_call(cmd): - self.last_cmd = cmd - return 0 - - mock_call.side_effect = _mock_subprocess_call - - fdroidserver.deploy.config = { - 'awsbucket': awsbucket, - 'rclone_config': 'test_local_config', - } - fdroidserver.deploy.update_remote_storage_with_rclone('repo', awsbucket) - self.maxDiff = None - self.assertEqual( - self.last_cmd, - [ - 'rclone', - 'sync', - '--delete-after', - 'repo', - 'test_local_config:test_bucket_folder/fdroid/repo', - ], - ) - - @mock.patch('subprocess.check_output', _mock_rclone_config_file) - @mock.patch('subprocess.call') - def test_update_remote_storage_with_rclone_mock_default_user_path(self, mock_call): - self.last_cmd = None - - def _mock_subprocess_call(cmd): - self.last_cmd = cmd - return 0 - - mock_call.side_effect = _mock_subprocess_call - - os.chdir(self.testdir) - config_name = 'test_local_config' - Path('rclone.conf').write_text('placeholder, contents ignored') - - awsbucket = 'test_bucket_folder' - fdroidserver.deploy.config['awsbucket'] = awsbucket - fdroidserver.deploy.config['rclone_config'] = config_name - fdroidserver.deploy.update_remote_storage_with_rclone('repo', awsbucket) - self.maxDiff = None - self.assertEqual( - self.last_cmd, - [ - 'rclone', - 'sync', - '--delete-after', - '--config', - fdroidserver.deploy.EMBEDDED_RCLONE_CONF, - 'repo', - f'{config_name}:{awsbucket}/fdroid/repo', - ], - ) - - def test_update_serverwebroot(self): - """rsync works with file paths, so this test uses paths for the URLs""" - os.chdir(self.testdir) - repo = Path('repo') - repo.mkdir(parents=True) - fake_apk = repo / 'fake.apk' - with fake_apk.open('w') as fp: - fp.write('not an APK, but has the right filename') - fake_index = repo / fdroidserver.common.INDEX_FILES[0] - with fake_index.open('w') as fp: - fp.write('not an index, but has the right filename') - url = Path('url') - url.mkdir() - - # setup parameters for this test run - fdroidserver.common.options = mock.Mock() - fdroidserver.common.options.identity_file = None - fdroidserver.common.options.identity_file = None - fdroidserver.deploy.config['make_current_version_link'] = False - - dest_apk = Path(url) / fake_apk - dest_index = Path(url) / fake_index - self.assertFalse(dest_apk.is_file()) - self.assertFalse(dest_index.is_file()) - - fdroidserver.deploy.update_serverwebroot({'url': str(url)}, 'repo') - self.assertTrue(dest_apk.is_file()) - self.assertTrue(dest_index.is_file()) - - def test_update_serverwebroot_in_index_only_mode(self): - os.chdir(self.testdir) - repo = Path('repo') - repo.mkdir() - fake_apk = repo / 'fake.apk' - with fake_apk.open('w') as fp: - fp.write('not an APK, but has the right filename') - fake_index = repo / fdroidserver.common.INDEX_FILES[0] - with fake_index.open('w') as fp: - fp.write('not an index, but has the right filename') - url = Path('url') - url.mkdir() - - # setup parameters for this test run - fdroidserver.common.options = mock.Mock() - fdroidserver.common.options.identity_file = None - fdroidserver.deploy.config['make_current_version_link'] = False - - dest_apk = Path(url) / fake_apk - dest_index = Path(url) / fake_index - self.assertFalse(dest_apk.is_file()) - self.assertFalse(dest_index.is_file()) - - fdroidserver.deploy.update_serverwebroot( - {'url': str(url), 'index_only': True}, 'repo' - ) - self.assertFalse(dest_apk.is_file()) - self.assertTrue(dest_index.is_file()) - - @mock.patch.dict(os.environ, clear=True) - def test_update_serverwebroot_no_rsync_error(self): - os.environ['PATH'] = self.testdir - os.chdir(self.testdir) - with self.assertRaises(fdroidserver.exception.FDroidException): - fdroidserver.deploy.update_serverwebroot('serverwebroot', 'repo') - - def test_update_serverwebroot_make_cur_version_link(self): - self.maxDiff = None - - # setup parameters for this test run - fdroidserver.common.options = mock.Mock() - fdroidserver.common.options.no_checksum = True - fdroidserver.common.options.identity_file = None - fdroidserver.common.options.verbose = False - fdroidserver.common.options.quiet = True - fdroidserver.common.options.index_only = False - fdroidserver.deploy.config = {'make_current_version_link': True} - url = "example.com:/var/www/fdroid" - repo_section = 'repo' - - # setup function for asserting subprocess.call invocations - call_iteration = 0 - - def update_server_webroot_call(cmd): - nonlocal call_iteration - if call_iteration == 0: - self.assertListEqual( - cmd, - [ - 'rsync', - '--archive', - '--delete-after', - '--safe-links', - '--quiet', - '--exclude', - 'repo/altstore-index.json', - '--exclude', - 'repo/altstore-index.json.asc', - '--exclude', - 'repo/entry.jar', - '--exclude', - 'repo/entry.json', - '--exclude', - 'repo/entry.json.asc', - '--exclude', - 'repo/index-v1.jar', - '--exclude', - 'repo/index-v1.json', - '--exclude', - 'repo/index-v1.json.asc', - '--exclude', - 'repo/index-v2.json', - '--exclude', - 'repo/index-v2.json.asc', - '--exclude', - 'repo/index.css', - '--exclude', - 'repo/index.html', - '--exclude', - 'repo/index.jar', - '--exclude', - 'repo/index.png', - '--exclude', - 'repo/index.xml', - '--exclude', - 'repo/signer-index.jar', - '--exclude', - 'repo/signer-index.json', - '--exclude', - 'repo/signer-index.json.asc', - 'repo', - 'example.com:/var/www/fdroid', - ], - ) - elif call_iteration == 1: - self.assertListEqual( - cmd, - [ - 'rsync', - '--archive', - '--delete-after', - '--safe-links', - '--quiet', - 'repo', - url, - ], - ) - elif call_iteration == 2: - self.assertListEqual( - cmd, - [ - 'rsync', - '--archive', - '--delete-after', - '--safe-links', - '--quiet', - 'Sym.apk', - 'Sym.apk.asc', - 'Sym.apk.sig', - 'example.com:/var/www/fdroid', - ], - ) - else: - self.fail('unexpected subprocess.call invocation') - call_iteration += 1 - return 0 - - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - os.mkdir('repo') - os.symlink('repo/com.example.sym.apk', 'Sym.apk') - os.symlink('repo/com.example.sym.apk.asc', 'Sym.apk.asc') - os.symlink('repo/com.example.sym.apk.sig', 'Sym.apk.sig') - with mock.patch('subprocess.call', side_effect=update_server_webroot_call): - fdroidserver.deploy.update_serverwebroot({'url': url}, repo_section) - self.assertEqual(call_iteration, 3, 'expected 3 invocations of subprocess.call') - - def test_update_serverwebroot_make_cur_version_link_in_index_only_mode(self): - # setup parameters for this test run - fdroidserver.common.options = mock.Mock() - fdroidserver.common.options.no_checksum = True - fdroidserver.common.options.identity_file = None - fdroidserver.common.options.verbose = False - fdroidserver.common.options.quiet = True - fdroidserver.common.options.identity_file = None - fdroidserver.deploy.config['make_current_version_link'] = True - url = "example.com:/var/www/fdroid" - repo_section = 'repo' - - # setup function for asserting subprocess.call invocations - call_iteration = 0 - - def update_server_webroot_call(cmd): - nonlocal call_iteration - if call_iteration == 0: - self.assertListEqual( - cmd, - [ - 'rsync', - '--archive', - '--delete-after', - '--safe-links', - '--quiet', - 'repo/altstore-index.json', - 'repo/altstore-index.json.asc', - 'repo/entry.jar', - 'repo/entry.json', - 'repo/entry.json.asc', - 'repo/index-v1.jar', - 'repo/index-v1.json', - 'repo/index-v1.json.asc', - 'repo/index-v2.json', - 'repo/index-v2.json.asc', - 'repo/index.css', - 'repo/index.html', - 'repo/index.jar', - 'repo/index.png', - 'repo/index.xml', - 'repo/signer-index.jar', - 'repo/signer-index.json', - 'repo/signer-index.json.asc', - 'example.com:/var/www/fdroid/repo/', - ], - ) - elif call_iteration == 1: - self.assertListEqual( - cmd, - [ - 'rsync', - '--archive', - '--delete-after', - '--safe-links', - '--quiet', - 'repo', - url, - ], - ) - elif call_iteration == 2: - self.assertListEqual( - cmd, - [ - 'rsync', - '--archive', - '--delete-after', - '--safe-links', - '--quiet', - 'Sym.apk', - 'Sym.apk.asc', - 'Sym.apk.sig', - 'example.com:/var/www/fdroid', - ], - ) - else: - self.fail('unexpected subprocess.call invocation') - call_iteration += 1 - return 0 - - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - os.mkdir(repo_section) - os.symlink('repo/com.example.sym.apk', 'Sym.apk') - os.symlink('repo/com.example.sym.apk.asc', 'Sym.apk.asc') - os.symlink('repo/com.example.sym.apk.sig', 'Sym.apk.sig') - - fake_files = fdroidserver.common.INDEX_FILES - for filename in fake_files: - fake_file = Path(repo_section) / filename - with fake_file.open('w') as fp: - fp.write('not a real one, but has the right filename') - - with mock.patch('subprocess.call', side_effect=update_server_webroot_call): - fdroidserver.deploy.update_serverwebroot( - {'url': url, 'index_only': True}, repo_section - ) - self.assertEqual(call_iteration, 1, 'expected 1 invocations of subprocess.call') - - def test_update_serverwebroot_with_id_file(self): - # setup parameters for this test run - fdroidserver.common.options = mock.Mock() - fdroidserver.common.options.identity_file = None - fdroidserver.common.options.no_checksum = True - fdroidserver.common.options.verbose = True - fdroidserver.common.options.quiet = False - fdroidserver.common.options.identity_file = None - fdroidserver.common.options.index_only = False - fdroidserver.deploy.config = {'identity_file': './id_rsa'} - url = "example.com:/var/www/fdroid" - repo_section = 'archive' - - # setup function for asserting subprocess.call invocations - call_iteration = 0 - - def update_server_webroot_call(cmd): - nonlocal call_iteration - if call_iteration == 0: - self.assertListEqual( - cmd, - [ - 'rsync', - '--archive', - '--delete-after', - '--safe-links', - '--verbose', - '-e', - 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' - + fdroidserver.deploy.config['identity_file'], - '--exclude', - 'archive/altstore-index.json', - '--exclude', - 'archive/altstore-index.json.asc', - '--exclude', - 'archive/entry.jar', - '--exclude', - 'archive/entry.json', - '--exclude', - 'archive/entry.json.asc', - '--exclude', - 'archive/index-v1.jar', - '--exclude', - 'archive/index-v1.json', - '--exclude', - 'archive/index-v1.json.asc', - '--exclude', - 'archive/index-v2.json', - '--exclude', - 'archive/index-v2.json.asc', - '--exclude', - 'archive/index.css', - '--exclude', - 'archive/index.html', - '--exclude', - 'archive/index.jar', - '--exclude', - 'archive/index.png', - '--exclude', - 'archive/index.xml', - 'archive', - url, - ], - ) - elif call_iteration == 1: - self.assertListEqual( - cmd, - [ - 'rsync', - '--archive', - '--delete-after', - '--safe-links', - '--verbose', - '-e', - 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' - + fdroidserver.deploy.config['identity_file'], - 'archive', - url, - ], - ) - else: - self.fail('unexpected subprocess.call invocation') - call_iteration += 1 - return 0 - - with mock.patch('subprocess.call', side_effect=update_server_webroot_call): - fdroidserver.deploy.update_serverwebroot({'url': url}, repo_section) - self.assertEqual(call_iteration, 2, 'expected 2 invocations of subprocess.call') - - def test_update_serverwebroot_with_id_file_in_index_only_mode(self): - # setup parameters for this test run - fdroidserver.common.options = mock.Mock() - fdroidserver.common.options.no_chcksum = False - fdroidserver.common.options.verbose = True - fdroidserver.common.options.quiet = False - fdroidserver.common.options.identity_file = None - fdroidserver.deploy.config['identity_file'] = './id_rsa' - fdroidserver.deploy.config['make_current_version_link'] = False - url = "example.com:/var/www/fdroid" - repo_section = 'archive' - - # setup function for asserting subprocess.call invocations - call_iteration = 0 - - def update_server_webroot_call(cmd): - nonlocal call_iteration - if call_iteration == 0: - self.assertListEqual( - cmd, - [ - 'rsync', - '--archive', - '--delete-after', - '--safe-links', - '--verbose', - '-e', - 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' - + fdroidserver.deploy.config['identity_file'], - 'archive/altstore-index.json', - 'archive/altstore-index.json.asc', - 'archive/entry.jar', - 'archive/entry.json', - 'archive/entry.json.asc', - 'archive/index-v1.jar', - 'archive/index-v1.json', - 'archive/index-v1.json.asc', - 'archive/index-v2.json', - 'archive/index-v2.json.asc', - 'archive/index.css', - 'archive/index.html', - 'archive/index.jar', - 'archive/index.png', - 'archive/index.xml', - "example.com:/var/www/fdroid/archive/", - ], - ) - elif call_iteration == 1: - self.assertListEqual( - cmd, - [ - 'rsync', - '--archive', - '--delete-after', - '--safe-links', - '--verbose', - '-e', - 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' - + fdroidserver.deploy.config['identity_file'], - "example.com:/var/www/fdroid/archive/", - ], - ) - else: - self.fail('unexpected subprocess.call invocation') - call_iteration += 1 - return 0 - - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - with mock.patch('subprocess.call', side_effect=update_server_webroot_call): - os.mkdir(repo_section) - fake_files = fdroidserver.common.INDEX_FILES - for filename in fake_files: - fake_file = Path(repo_section) / filename - with fake_file.open('w') as fp: - fp.write('not a real one, but has the right filename') - - fdroidserver.deploy.update_serverwebroot( - {'url': url, 'index_only': True}, repo_section - ) - self.assertEqual(call_iteration, 1, 'expected 1 invocations of subprocess.call') - - @unittest.skipIf( - not os.getenv('VIRUSTOTAL_API_KEY'), 'VIRUSTOTAL_API_KEY is not set' - ) - def test_upload_to_virustotal(self): - fdroidserver.common.options = mock.Mock() - fdroidserver.common.options.verbose = True - virustotal_apikey = os.getenv('VIRUSTOTAL_API_KEY') - fdroidserver.deploy.upload_to_virustotal('repo', virustotal_apikey) - - def test_remote_hostname_regex(self): - for remote_url, name in ( - ('git@github.com:guardianproject/fdroid-repo', 'github'), - ('git@gitlab.com:guardianproject/fdroid-repo', 'gitlab'), - ('https://github.com:guardianproject/fdroid-repo', 'github'), - ('https://gitlab.com/guardianproject/fdroid-repo', 'gitlab'), - ('https://salsa.debian.org/foo/repo', 'salsa'), - ): - self.assertEqual( - name, fdroidserver.deploy.REMOTE_HOSTNAME_REGEX.sub(r'\1', remote_url) - ) - - def test_update_servergitmirrors(self): - # setup parameters for this test run - fdroidserver.common.options = mock.Mock() - fdroidserver.common.options.identity_file = None - fdroidserver.common.options.no_keep_git_mirror_archive = False - fdroidserver.common.options.verbose = False - fdroidserver.common.options.quiet = True - - config = {} - fdroidserver.common.fill_config_defaults(config) - fdroidserver.deploy.config = config - - os.chdir(self.testdir) - - repo_section = 'repo' - initial_branch = fdroidserver.deploy.GIT_BRANCH - - remote_repo = Path(self.testdir) / 'remote' - remote_repo.mkdir(parents=True) - remote_git_repo = git.Repo.init( - remote_repo, initial_branch=initial_branch, bare=True - ) - fdroidserver.deploy.config["servergitmirrors"] = [{"url": str(remote_repo)}] - - os.chdir(self.testdir) - repo = Path('repo') - repo.mkdir(parents=True) - fake_apk = 'Sym.apk' - fake_files = fdroidserver.common.INDEX_FILES + [fake_apk] - for filename in fake_files: - fake_file = repo / filename - with fake_file.open('w') as fp: - fp.write('not a real one, but has the right filename') - - fdroidserver.deploy.update_servergitmirrors( - fdroidserver.deploy.config["servergitmirrors"], repo_section - ) - - verify_repo = remote_git_repo.clone( - Path(self.testdir) / 'verify', - ) - - for filename in fake_files: - remote_file = f"fdroid/{repo_section}/{filename}" - - self.assertIsNotNone(verify_repo.working_tree_dir) - if verify_repo.working_tree_dir is not None: - self.assertTrue( - (Path(verify_repo.working_tree_dir) / remote_file).exists() - ) - - def test_update_servergitmirrors_in_index_only_mode(self): - # setup parameters for this test run - fdroidserver.common.options = mock.Mock() - fdroidserver.common.options.identity_file = None - fdroidserver.common.options.no_keep_git_mirror_archive = False - fdroidserver.common.options.verbose = False - fdroidserver.common.options.quiet = True - - config = {} - fdroidserver.common.fill_config_defaults(config) - fdroidserver.deploy.config = config - - os.chdir(self.testdir) - - repo_section = 'repo' - initial_branch = fdroidserver.deploy.GIT_BRANCH - - remote_repo = Path(self.testdir) / 'remote' - remote_repo.mkdir(parents=True) - remote_git_repo = git.Repo.init( - remote_repo, initial_branch=initial_branch, bare=True - ) - fdroidserver.deploy.config["servergitmirrors"] = [ - {"url": str(remote_repo), "index_only": True} - ] - - os.chdir(self.testdir) - repo = Path('repo') - repo.mkdir(parents=True) - fake_apk = 'Sym.apk' - fake_files = fdroidserver.common.INDEX_FILES + [fake_apk] - for filename in fake_files: - fake_file = repo / filename - with fake_file.open('w') as fp: - fp.write('not a real one, but has the right filename') - - fdroidserver.deploy.update_servergitmirrors( - fdroidserver.deploy.config["servergitmirrors"], repo_section - ) - - verify_repo = remote_git_repo.clone( - Path(self.testdir) / 'verify', - ) - - for filename in fdroidserver.common.INDEX_FILES: - remote_file = f"fdroid/{repo_section}/{filename}" - - self.assertIsNotNone(verify_repo.working_tree_dir) - if verify_repo.working_tree_dir is not None: - self.assertTrue( - (Path(verify_repo.working_tree_dir) / remote_file).exists() - ) - - # Should not have the APK file - remote_file = f"fdroid/{repo_section}/{fake_apk}" - if verify_repo.working_tree_dir is not None: - self.assertFalse( - (Path(verify_repo.working_tree_dir) / remote_file).exists() - ) - - def test_upload_to_servergitmirror_in_index_only_mode(self): - # setup parameters for this test run - fdroidserver.common.options = mock.Mock() - fdroidserver.common.options.identity_file = None - fdroidserver.common.options.no_keep_git_mirror_archive = False - fdroidserver.common.options.verbose = False - fdroidserver.common.options.quiet = True - fdroidserver.common.options.identity_file = None - - config = {} - fdroidserver.common.fill_config_defaults(config) - fdroidserver.deploy.config = config - - repo_section = 'repo' - initial_branch = fdroidserver.deploy.GIT_BRANCH - - os.chdir(self.testdir) - - local_git_repo_path = Path(self.testdir) / 'local' - local_git_repo = git.Repo.init( - local_git_repo_path, initial_branch=initial_branch - ) - - fdroid_dir = local_git_repo_path / 'fdroid' - repo_dir = fdroid_dir / repo_section - repo_dir.mkdir(parents=True) - fake_apk = 'Sym.apk' - fake_files = fdroidserver.common.INDEX_FILES + [fake_apk] - for filename in fake_files: - fake_file = repo_dir / filename - with fake_file.open('w') as fp: - fp.write('not a real one, but has the right filename') - - # The remote repo must be a bare repo to allow being pushed to - remote_git_repo_dir = Path(self.testdir) / 'remote' - remote_git_repo = git.Repo.init( - remote_git_repo_dir, initial_branch=initial_branch, bare=True - ) - - mirror_config = {"url": str(remote_git_repo_dir), "index_only": True} - enabled_remotes = [] - ssh_cmd = 'ssh -oBatchMode=yes' - fdroidserver.deploy.upload_to_servergitmirror( - mirror_config=mirror_config, - local_repo=local_git_repo, - enabled_remotes=enabled_remotes, - repo_section=repo_section, - is_index_only=mirror_config['index_only'], - fdroid_dir=str(fdroid_dir), - git_mirror_path=str(local_git_repo_path), - ssh_cmd=ssh_cmd, - progress=git.RemoteProgress(), - ) - - verify_repo = remote_git_repo.clone( - Path(self.testdir) / 'verify', - ) - - for filename in fdroidserver.common.INDEX_FILES: - remote_file = f"fdroid/{repo_section}/{filename}" - - self.assertIsNotNone(verify_repo.working_tree_dir) - if verify_repo.working_tree_dir is not None: - self.assertTrue( - (Path(verify_repo.working_tree_dir) / remote_file).exists() - ) - - # Should not have the APK file - remote_file = f"fdroid/{repo_section}/{fake_apk}" - if verify_repo.working_tree_dir is not None: - self.assertFalse( - (Path(verify_repo.working_tree_dir) / remote_file).exists() - ) - - -class GitHubReleasesTest(unittest.TestCase): - def test_find_release_infos(self): - self.maxDiff = None - - index_mock = b""" - { - "packages": { - "com.example.app": { - "versions": { - "2e6f263c1927506015bfc98bce0818247836f2e7fe29a04e1af2b33c97848750": { - "file": { - "name": "/com.example.app_123.apk" - }, - "whatsNew": { - "en-US": "fake what's new" - }, - "manifest": { - "versionName": "1.2.3", - "versionCode": "123" - } - }, - "8a6f263c8327506015bfc98bce0815247836f2e7fe29a04e1af2bffa6409998d": { - "file": { - "name": "/com.example.app_100.apk" - }, - "manifest": { - "versionName": "1.0-alpha", - "versionCode": "123" - }, - "releaseChannels": ["alpha"] - } - } - }, - "another.app": { - "versions": { - "30602ffc19a7c0601bbfa93bce00082c78a6f2ddfe29a04e1af253fc9f84eda0": { - "file": { - "name": "/another.app_1.apk" - }, - "manifest": { - "versionName": "1", - "versionCode": "1" - } - } - } - }, - "fildered.app": { - "versions": { - "93ae02fc19a7c0601adfa93bce0443fc78a6f2ddfe3df04e1af093fca9a1ff09": { - "file": { - "name": "/another.app_1.apk" - }, - "manifest": { - "versionName": "1", - "versionCode": "1" - } - } - } - } - } - } - """ - with unittest.mock.patch( - "fdroidserver.deploy.open", unittest.mock.mock_open(read_data=index_mock) - ): - release_infos = fdroidserver.deploy.find_release_infos( - "fake_path", - Path('fake_repo'), - ["com.example.app", "another.app"], - ) - - self.assertDictEqual( - release_infos, - { - "another.app": { - "1": { - "files": [Path('fake_repo') / "another.app_1.apk"], - "hasReleaseChannels": False, - "whatsNew": None, - }, - }, - "com.example.app": { - "1.0-alpha": { - "files": [ - Path("fake_repo") / "com.example.app_100.apk", - ], - "hasReleaseChannels": True, - "whatsNew": None, - }, - "1.2.3": { - "files": [ - Path("fake_repo") / "com.example.app_123.apk", - ], - "hasReleaseChannels": False, - "whatsNew": "fake what's new", - }, - }, - }, - ) - - def test_upload_to_github_releases(self): - gh_config = [ - { - "projectUrl": "https://github.com/example/app", - "packageNames": ["com.example.app", "another.app"], - }, - { - "projectUrl": "https://github.com/custom/app", - "packageNames": ["more.custom.app"], - "token": "custom_token", - }, - ] - - fri_mock = unittest.mock.Mock(return_value="fri_result") - urr_mock = unittest.mock.Mock() - with unittest.mock.patch( - "fdroidserver.deploy.find_release_infos", fri_mock - ), unittest.mock.patch( - "fdroidserver.deploy.upload_to_github_releases_repo", urr_mock - ), tempfile.TemporaryDirectory() as tmpdir: - with open(Path(tmpdir) / "index-v2.json", "w") as f: - f.write("") - - fdroidserver.deploy.upload_to_github_releases( - tmpdir, gh_config, "fake_global_token" - ) - - fri_mock.assert_called_once_with( - Path(tmpdir) / "index-v2.json", - Path(tmpdir), - ["com.example.app", "another.app", "more.custom.app"], - ) - - self.maxDiff = None - self.assertListEqual( - urr_mock.call_args_list, - [ - unittest.mock.call( - { - "projectUrl": "https://github.com/example/app", - "packageNames": ["com.example.app", "another.app"], - }, - "fri_result", - "fake_global_token", - ), - unittest.mock.call( - { - "projectUrl": "https://github.com/custom/app", - "packageNames": ["more.custom.app"], - "token": "custom_token", - }, - "fri_result", - "fake_global_token", - ), - ], - ) - - -class Test_UploadToGithubReleasesRepo(unittest.TestCase): - def setUp(self): - self.repo_conf = { - "projectUrl": "https://github.com/example/app", - "packageNames": ["com.example.app", "com.example.altapp", "another.app"], - } - self.release_infos = { - "com.example.app": { - "1.0.0": { - "files": [ - Path("fake_repo") / "com.example.app_100100.apk", - ], - "hasReleaseChannels": False, - "whatsNew": "what's new com.example.app 1.0.0", - }, - "1.0.0-beta1": { - "files": [ - Path("fake_repo") / "com.example.app_100007.apk", - ], - "hasReleaseChannels": True, - "whatsNew": None, - }, - }, - "com.example.altapp": { - "1.0.0": { - "files": [ - Path("fake_repo") / "com.example.altapp_100100.apk", - Path("fake_repo") / "com.example.altapp_100100.apk.asc", - Path("fake_repo") / "com.example.altapp_100100.apk.idsig", - ], - "whatsNew": "what's new com.example.altapp 1.0.0", - }, - }, - } - - self.api = unittest.mock.Mock() - self.api.list_unreleased_tags = lambda: ["1.0.0", "1.0.0-beta1"] - self.api_constructor = unittest.mock.Mock(return_value=self.api) - - def test_global_token(self): - with unittest.mock.patch("fdroidserver.github.GithubApi", self.api_constructor): - fdroidserver.deploy.upload_to_github_releases_repo( - self.repo_conf, - self.release_infos, - "global_token", - ) - - self.api_constructor.assert_called_once_with( - "global_token", "https://github.com/example/app" - ) - - self.assertListEqual( - self.api.create_release.call_args_list, - [ - unittest.mock.call( - "1.0.0", - [ - Path("fake_repo/com.example.app_100100.apk"), - Path("fake_repo/com.example.altapp_100100.apk"), - Path("fake_repo/com.example.altapp_100100.apk.asc"), - Path("fake_repo/com.example.altapp_100100.apk.idsig"), - ], - "what's new com.example.app 1.0.0", - ), - ], - ) - - def test_local_token(self): - self.repo_conf["token"] = "local_token" # nosec B105 - with unittest.mock.patch("fdroidserver.github.GithubApi", self.api_constructor): - fdroidserver.deploy.upload_to_github_releases_repo( - self.repo_conf, - self.release_infos, - "global_token", - ) - - self.api_constructor.assert_called_once_with( - "local_token", "https://github.com/example/app" - ) - - self.assertListEqual( - self.api.create_release.call_args_list, - [ - unittest.mock.call( - "1.0.0", - [ - Path("fake_repo/com.example.app_100100.apk"), - Path("fake_repo/com.example.altapp_100100.apk"), - Path("fake_repo/com.example.altapp_100100.apk.asc"), - Path("fake_repo/com.example.altapp_100100.apk.idsig"), - ], - "what's new com.example.app 1.0.0", - ), - ], - ) diff --git a/tests/test_github.py b/tests/test_github.py deleted file mode 100755 index f30ce0bb..00000000 --- a/tests/test_github.py +++ /dev/null @@ -1,136 +0,0 @@ -#!/usr/bin/env python3 - -import unittest -import unittest.mock - -import fdroidserver - -from .shared_test_code import mock_urlopen - - -class GithubApiTest(unittest.TestCase): - def test__init(self): - api = fdroidserver.github.GithubApi('faketoken', 'fakerepopath') - self.assertEqual(api._api_token, 'faketoken') - self.assertEqual(api._repo_path, 'fakerepopath') - - def test__req(self): - api = fdroidserver.github.GithubApi('faketoken', 'fakerepopath') - r = api._req('https://fakeurl', data='fakedata') - self.assertEqual(r.full_url, 'https://fakeurl') - self.assertEqual(r.data, "fakedata") - self.assertDictEqual( - r.headers, - { - 'Accept': 'application/vnd.github+json', - 'Authorization': 'Bearer faketoken', - 'X-github-api-version': '2022-11-28', - }, - ) - - def test_list_released_tags(self): - api = fdroidserver.github.GithubApi('faketoken', 'fakerepopath') - uomock = mock_urlopen( - body='[{"tag_name": "fake"}, {"tag_name": "double_fake"}]' - ) - with unittest.mock.patch("urllib.request.urlopen", uomock): - result = api.list_released_tags() - self.assertListEqual(result, ['fake', 'double_fake']) - - def test_list_unreleased_tags(self): - api = fdroidserver.github.GithubApi('faketoken', 'fakerepopath') - - api.list_all_tags = unittest.mock.Mock(return_value=[1, 2, 3, 4]) - api.list_released_tags = unittest.mock.Mock(return_value=[1, 2]) - - result = api.list_unreleased_tags() - - self.assertListEqual(result, [3, 4]) - - def test_tag_exists(self): - api = fdroidserver.github.GithubApi('faketoken', 'fakerepopath') - uomock = mock_urlopen(body='[{"ref": "refs/tags/fake_tag"}]') - with unittest.mock.patch("urllib.request.urlopen", uomock): - result = api.tag_exists('fake_tag') - self.assertTrue(result) - - def test_tag_exists_failure(self): - api = fdroidserver.github.GithubApi('faketoken', 'fakerepopath') - - uomock = mock_urlopen(body='[{"error": "failure"}]') - - with unittest.mock.patch("urllib.request.urlopen", uomock): - success = api.tag_exists('fake_tag') - - self.assertFalse(success) - - def test_list_all_tags(self): - api = fdroidserver.github.GithubApi('faketoken', 'fakerepopath') - - uomock = mock_urlopen( - body='[{"ref": "refs/tags/fake"}, {"ref": "refs/tags/double_fake"}]' - ) - - with unittest.mock.patch("urllib.request.urlopen", uomock): - result = api.list_all_tags() - - self.assertListEqual(result, ['fake', 'double_fake']) - - def test_create_release(self): - api = fdroidserver.github.GithubApi('faketoken', 'fakerepopath') - - uomock = mock_urlopen(body='{"id": "fakeid"}') - api.tag_exists = lambda x: True - api._create_release_asset = unittest.mock.Mock() - - with unittest.mock.patch("urllib.request.urlopen", uomock): - success = api.create_release('faketag', ['file_a', 'file_b'], body="bdy") - self.assertTrue(success) - - req = uomock.call_args_list[0][0][0] - self.assertEqual(1, len(uomock.call_args_list)) - self.assertEqual(2, len(uomock.call_args_list[0])) - self.assertEqual(1, len(uomock.call_args_list[0][0])) - self.assertEqual( - req.full_url, - 'https://api.github.com/repos/fakerepopath/releases', - ) - self.assertEqual(req.data, b'{"tag_name": "faketag", "body": "bdy"}') - self.assertListEqual( - api._create_release_asset.call_args_list, - [ - unittest.mock.call('fakeid', 'file_a'), - unittest.mock.call('fakeid', 'file_b'), - ], - ) - - def test__create_release_asset(self): - api = fdroidserver.github.GithubApi('faketoken', 'fakerepopath') - uomock = mock_urlopen() - - with unittest.mock.patch( - 'fdroidserver.github.open', - unittest.mock.mock_open(read_data=b"fake_content"), - ), unittest.mock.patch("urllib.request.urlopen", uomock): - success = api._create_release_asset('fake_id', 'fake_file') - - self.assertTrue(success) - - req = uomock.call_args_list[0][0][0] - self.assertEqual(1, len(uomock.call_args_list)) - self.assertEqual(2, len(uomock.call_args_list[0])) - self.assertEqual(1, len(uomock.call_args_list[0][0])) - self.assertEqual( - req.full_url, - 'https://uploads.github.com/repos/fakerepopath/releases/fake_id/assets?name=fake_file', - ) - self.assertDictEqual( - req.headers, - { - "Accept": "application/vnd.github+json", - 'Authorization': 'Bearer faketoken', - 'Content-type': 'application/octet-stream', - 'X-github-api-version': '2022-11-28', - }, - ) - self.assertEqual(req.data, b'fake_content') diff --git a/tests/test_gpgsign.py b/tests/test_gpgsign.py deleted file mode 100755 index 84634874..00000000 --- a/tests/test_gpgsign.py +++ /dev/null @@ -1,60 +0,0 @@ -#!/usr/bin/env python3 - -import json -import os -import shutil -import tempfile -import unittest -from pathlib import Path -from unittest.mock import MagicMock, patch - -from fdroidserver import common, gpgsign - -basedir = Path(__file__).parent - - -class GpgsignTest(unittest.TestCase): - def setUp(self): - self.tempdir = tempfile.TemporaryDirectory() - os.chdir(self.tempdir.name) - self.repodir = Path('repo') - self.repodir.mkdir() - - gpgsign.config = None - config = common.read_config() - config['verbose'] = True - config['gpghome'] = str((basedir / 'gnupghome').resolve()) - config['gpgkey'] = '1DBA2E89' - gpgsign.config = config - - def tearDown(self): - self.tempdir.cleanup() - - @patch('sys.argv', ['fdroid gpgsign', '--verbose']) - @patch('fdroidserver.gpgsign.FDroidPopen') - def test_sign_index(self, FDroidPopen): - """This skips running gpg because its hard to setup in a test env""" - index_v1_json = 'repo/index-v1.json' - shutil.copy(basedir / index_v1_json, 'repo') - shutil.copy(basedir / 'SpeedoMeterApp.main_1.apk', 'repo') - - def _side_effect(gpg): - f = gpg[-1] - sig = gpg[3] - self.assertTrue(sig.startswith(f)) - open(sig, 'w').close() - p = MagicMock() - p.returncode = 0 - return p - - FDroidPopen.side_effect = _side_effect - gpgsign.main() - self.assertTrue(FDroidPopen.called) - self.assertTrue((self.repodir / 'index-v1.json').exists()) - self.assertTrue((self.repodir / 'index-v1.json.asc').exists()) - self.assertTrue((self.repodir / 'SpeedoMeterApp.main_1.apk.asc').exists()) - self.assertFalse((self.repodir / 'index.jar.asc').exists()) - # smoke check status JSON - with (self.repodir / 'status/gpgsign.json').open() as fp: - data = json.load(fp) - self.assertIn('index-v1.json', data['signed']) diff --git a/tests/test_import_subcommand.py b/tests/test_import_subcommand.py deleted file mode 100755 index 530e10fb..00000000 --- a/tests/test_import_subcommand.py +++ /dev/null @@ -1,199 +0,0 @@ -#!/usr/bin/env python3 - -import logging -import os -import shutil -import sys -import tempfile -import unittest -from pathlib import Path -from unittest import mock - -import git -import requests -import yaml - -import fdroidserver -import fdroidserver.import_subcommand - -from .shared_test_code import TmpCwd, VerboseFalseOptions, mkdtemp - -basedir = Path(__file__).parent -logging.basicConfig(level=logging.DEBUG) - - -class ImportTest(unittest.TestCase): - '''fdroid import''' - - def setUp(self): - os.chdir(basedir) - self._td = mkdtemp() - self.testdir = self._td.name - - def tearDown(self): - os.chdir(basedir) - self._td.cleanup() - - def test_get_all_gradle_and_manifests(self): - """Test whether the function works with relative and absolute paths""" - a = fdroidserver.import_subcommand.get_all_gradle_and_manifests( - Path('source-files/cn.wildfirechat.chat') - ) - paths = [ - 'avenginekit/build.gradle', - 'build.gradle', - 'chat/build.gradle', - 'client/build.gradle', - 'client/src/main/AndroidManifest.xml', - 'emojilibrary/build.gradle', - 'gradle/build_libraries.gradle', - 'imagepicker/build.gradle', - 'mars-core-release/build.gradle', - 'push/build.gradle', - 'settings.gradle', - ] - paths = [Path('source-files/cn.wildfirechat.chat') / path for path in paths] - self.assertEqual(sorted(paths), sorted(a)) - - abspath = basedir / 'source-files/realm' - p = fdroidserver.import_subcommand.get_all_gradle_and_manifests(abspath) - self.assertEqual(1, len(p)) - self.assertTrue(p[0].is_relative_to(abspath)) - - def test_get_gradle_subdir(self): - subdirs = { - 'cn.wildfirechat.chat': 'chat', - 'com.anpmech.launcher': 'app', - 'org.tasks': 'app', - 'ut.ewh.audiometrytest': 'app', - 'org.noise_planet.noisecapture': 'app', - } - for k, v in subdirs.items(): - build_dir = Path('source-files') / k - paths = fdroidserver.import_subcommand.get_all_gradle_and_manifests( - build_dir - ) - logging.info(paths) - subdir = fdroidserver.import_subcommand.get_gradle_subdir(build_dir, paths) - self.assertEqual(v, str(subdir)) - - def test_import_gitlab(self): - with tempfile.TemporaryDirectory() as testdir, TmpCwd(testdir): - # FDroidPopen needs some config to work - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - - url = 'https://gitlab.com/fdroid/ci-test-app' - r = requests.head(url, timeout=300) - if r.status_code != 200: - print("ERROR", url, 'unreachable (', r.status_code, ')') - print('Skipping ImportTest!') - return - - fdroidserver.common.options = VerboseFalseOptions - app = fdroidserver.import_subcommand.get_app_from_url(url) - fdroidserver.import_subcommand.clone_to_tmp_dir(app) - self.assertEqual(app.RepoType, 'git') - self.assertEqual(app.Repo, 'https://gitlab.com/fdroid/ci-test-app.git') - - def test_get_app_from_url(self): - with tempfile.TemporaryDirectory() as testdir, TmpCwd(testdir): - testdir = Path(testdir) - (testdir / 'tmp').mkdir() - tmp_importer = testdir / 'tmp/importer' - data = ( - ( - 'cn.wildfirechat.chat', - 'https://github.com/wildfirechat/android-chat', - '0.6.9', - 23, - ), - ( - 'com.anpmech.launcher', - 'https://github.com/KeikaiLauncher/KeikaiLauncher', - 'Unknown', - None, - ), - ( - 'ut.ewh.audiometrytest', - 'https://github.com/ReeceStevens/ut_ewh_audiometer_2014', - '1.65', - 14, - ), - ) - for appid, url, vn, vc in data: - shutil.rmtree( - tmp_importer, - onerror=fdroidserver.import_subcommand.handle_retree_error_on_windows, - ) - shutil.copytree(basedir / 'source-files' / appid, tmp_importer) - - app = fdroidserver.import_subcommand.get_app_from_url(url) - with mock.patch( - 'fdroidserver.common.getvcs', - lambda a, b, c: fdroidserver.common.vcs(url, testdir), - ), mock.patch( - 'fdroidserver.common.vcs.gotorevision', lambda s, rev: None - ), mock.patch( - 'shutil.rmtree', lambda a, onerror=None: None - ): - build_dir = fdroidserver.import_subcommand.clone_to_tmp_dir(app) - self.assertEqual('git', app.RepoType) - self.assertEqual(url, app.Repo) - self.assertEqual(url, app.SourceCode) - logging.info(build_dir) - paths = fdroidserver.import_subcommand.get_all_gradle_and_manifests( - build_dir - ) - self.assertNotEqual(paths, []) - ( - versionName, - versionCode, - package, - ) = fdroidserver.common.parse_androidmanifests(paths, app) - self.assertEqual(vn, versionName) - self.assertEqual(vc, versionCode) - self.assertEqual(appid, package) - - def test_bad_urls(self): - for url in ( - 'asdf', - 'file://thing.git', - 'https:///github.com/my/project', - 'git:///so/many/slashes', - 'ssh:/notabug.org/missing/a/slash', - 'git:notabug.org/missing/some/slashes', - 'https//github.com/bar/baz', - ): - with self.assertRaises(ValueError): - fdroidserver.import_subcommand.get_app_from_url(url) - - @mock.patch('sys.argv', ['fdroid import', '-u', 'https://example.com/mystery/url']) - @mock.patch('fdroidserver.import_subcommand.clone_to_tmp_dir', lambda a: None) - def test_unrecognized_url(self): - """Test whether error is thrown when the RepoType was not found. - - clone_to_tmp_dir is mocked out to prevent this test from using - the network, if it gets past the code that throws the error. - - """ - with self.assertRaises(fdroidserver.exception.FDroidException): - fdroidserver.import_subcommand.main() - - @mock.patch('sys.argv', ['fdroid import', '-u', 'https://fake/git/url.git']) - @mock.patch( - 'fdroidserver.import_subcommand.clone_to_tmp_dir', lambda a, r: Path('td') - ) - def test_main_local_git(self): - os.chdir(self.testdir) - git.Repo.init('td') - Path('td/build.gradle').write_text( - 'android { defaultConfig { applicationId "com.example" } }' - ) - fdroidserver.import_subcommand.main() - with open('metadata/com.example.yml') as fp: - data = yaml.safe_load(fp) - self.assertEqual(data['Repo'], sys.argv[2]) - self.assertEqual(data['RepoType'], 'git') - self.assertEqual(1, len(data['Builds'])) diff --git a/tests/test_index.py b/tests/test_index.py deleted file mode 100755 index c8ff5cbe..00000000 --- a/tests/test_index.py +++ /dev/null @@ -1,918 +0,0 @@ -#!/usr/bin/env python3 - -import copy -import datetime -import glob -import json -import os -import shutil -import tempfile -import unittest -import zipfile -from pathlib import Path -from unittest.mock import patch - -import requests -import yaml - -import fdroidserver -from fdroidserver import common, index, publish, signindex, update - -from .shared_test_code import GP_FINGERPRINT, TmpCwd, mkdtemp - -basedir = Path(__file__).parent - - -class Options: - nosign = True - pretty = False - verbose = False - - -class IndexTest(unittest.TestCase): - @classmethod - def setUpClass(cls): - # TODO something should remove cls.index_v1_jar, but it was - # causing the tests to be flaky. There seems to be something - # that is running the background somehow, maybe sign_index() - # exits before jarsigner actually finishes? - cls.index_v1_jar = basedir / 'repo' / 'index-v1.jar' - - def setUp(self): - (basedir / common.CONFIG_FILE).chmod(0o600) - os.chdir(basedir) # so read_config() can find config.yml - - common.config = None - common.options = Options - config = common.read_config() - config['jarsigner'] = common.find_sdk_tools_cmd('jarsigner') - common.config = config - signindex.config = config - update.config = config - - self._td = mkdtemp() - self.testdir = self._td.name - - def tearDown(self): - self._td.cleanup() - - def _sign_test_index_v1_jar(self): - if not self.index_v1_jar.exists(): - signindex.sign_index(self.index_v1_jar.parent, 'index-v1.json') - - def test_get_public_key_from_jar_succeeds(self): - source_dir = basedir / 'signindex' - for f in ('testy.jar', 'guardianproject.jar'): - testfile = os.path.join(source_dir, f) - jar = zipfile.ZipFile(testfile) - _, fingerprint = index.get_public_key_from_jar(jar) - # comparing fingerprints should be sufficient - if f == 'testy.jar': - self.assertEqual( - fingerprint, - '818E469465F96B704E27BE2FEE4C63AB' - + '9F83DDF30E7A34C7371A4728D83B0BC1', - ) - if f == 'guardianproject.jar': - self.assertTrue(fingerprint == GP_FINGERPRINT) - - def test_get_public_key_from_jar_fails(self): - source_dir = basedir / 'signindex' - testfile = os.path.join(source_dir, 'unsigned.jar') - jar = zipfile.ZipFile(testfile) - with self.assertRaises(index.VerificationException): - index.get_public_key_from_jar(jar) - - def test_download_repo_index_no_fingerprint(self): - with self.assertRaises(index.VerificationException): - index.download_repo_index("http://example.org") - - def test_download_repo_index_no_jar(self): - with self.assertRaises(requests.exceptions.RequestException): - index.download_repo_index("http://example.org?fingerprint=nope") - - def test_get_repo_key_fingerprint(self): - self._sign_test_index_v1_jar() - pubkey, fingerprint = index.extract_pubkey() - ( - data, - public_key, - public_key_fingerprint, - ) = index.get_index_from_jar( - 'repo/index-v1.jar', fingerprint, allow_deprecated=True - ) - self.assertIsNotNone(data) - self.assertIsNotNone(public_key) - self.assertIsNotNone(public_key_fingerprint) - - def test_get_index_from_jar_with_bad_fingerprint(self): - pubkey, fingerprint = index.extract_pubkey() - fingerprint = fingerprint[:-1] + 'G' - with self.assertRaises(fdroidserver.exception.VerificationException): - index.get_index_from_jar( - 'repo/index-v1.jar', fingerprint, allow_deprecated=True - ) - - def test_get_index_from_jar_with_chars_to_be_stripped(self): - self._sign_test_index_v1_jar() - fingerprint = 'NOOOO F4 9A F3 F1 1E FD DF 20 DF FD 70 F5 E3 11 7B 99 76 67 41 67 AD CA 28 0E 6B 19 32 A0 60 1B 26 F6' - index.get_index_from_jar( - 'repo/index-v1.jar', fingerprint, allow_deprecated=True - ) - - @patch('requests.head') - def test_download_repo_index_same_etag(self, head): - url = 'http://example.org?fingerprint=test' - etag = '"4de5-54d840ce95cb9"' - - head.return_value.headers = {'ETag': etag} - data, new_etag = index.download_repo_index(url, etag=etag) - - self.assertIsNone(data) - self.assertEqual(etag, new_etag) - - @patch('requests.get') - @patch('requests.head') - def test_download_repo_index_new_etag(self, head, get): - url = 'http://example.org?fingerprint=' + GP_FINGERPRINT - etag = '"4de5-54d840ce95cb9"' - - # fake HTTP answers - head.return_value.headers = {'ETag': 'new_etag'} - get.return_value.headers = {'ETag': 'new_etag'} - get.return_value.status_code = 200 - testfile = os.path.join('signindex', 'guardianproject-v1.jar') - with open(testfile, 'rb') as file: - get.return_value.content = file.read() - - data, new_etag = index.download_repo_index(url, etag=etag) - - # assert that the index was retrieved properly - self.assertEqual('Guardian Project Official Releases', data['repo']['name']) - self.assertEqual(GP_FINGERPRINT, data['repo']['fingerprint']) - self.assertTrue(len(data['repo']['pubkey']) > 500) - self.assertEqual(10, len(data['apps'])) - self.assertEqual(10, len(data['packages'])) - self.assertEqual('new_etag', new_etag) - - @patch('fdroidserver.net.http_get') - def test_download_repo_index_url_parsing(self, mock_http_get): - """Test whether it is trying to download the right file - - This passes the URL back via the etag return value just as a - hack to check which URL was actually attempted. - - """ - mock_http_get.side_effect = lambda url, etag, timeout: (None, url) - repo_url = 'https://fake.url/fdroid/repo' - index_url = 'https://fake.url/fdroid/repo/index-v1.jar' - fingerprint_url = 'https://fake.url/fdroid/repo?fingerprint=' + GP_FINGERPRINT - slash_url = 'https://fake.url/fdroid/repo//?fingerprint=' + GP_FINGERPRINT - for url in (repo_url, index_url, fingerprint_url, slash_url): - ilist = index.download_repo_index(url, verify_fingerprint=False) - self.assertEqual(index_url, ilist[1]) # etag item used to return URL - - @patch('fdroidserver.net.download_using_mirrors') - def test_download_repo_index_v2(self, mock_download_using_mirrors): - mock_download_using_mirrors.side_effect = lambda mirrors: os.path.join( - self.testdir, 'repo', os.path.basename(mirrors[0]['url']) - ) - os.chdir(self.testdir) - signindex.config['keystore'] = os.path.join(basedir, 'keystore.jks') - os.mkdir('repo') - shutil.copy(basedir / 'repo' / 'entry.json', 'repo') - shutil.copy(basedir / 'repo' / 'index-v2.json', 'repo') - signindex.sign_index('repo', 'entry.json') - repo_url = 'https://fake.url/fdroid/repo' - entry_url = 'https://fake.url/fdroid/repo/entry.jar' - index_url = 'https://fake.url/fdroid/repo/index-v2.json' - fingerprint_url = 'https://fake.url/fdroid/repo?fingerprint=' + GP_FINGERPRINT - slash_url = 'https://fake.url/fdroid/repo//?fingerprint=' + GP_FINGERPRINT - for url in (repo_url, entry_url, index_url, fingerprint_url, slash_url): - data, _ignored = index.download_repo_index_v2(url, verify_fingerprint=False) - self.assertEqual(['repo', 'packages'], list(data.keys())) - self.assertEqual( - 'My First F-Droid Repo Demo', data['repo']['name']['en-US'] - ) - - @patch('fdroidserver.net.download_using_mirrors') - def test_download_repo_index_v2_bad_fingerprint(self, mock_download_using_mirrors): - mock_download_using_mirrors.side_effect = lambda mirrors: os.path.join( - self.testdir, 'repo', os.path.basename(mirrors[0]['url']) - ) - os.chdir(self.testdir) - signindex.config['keystore'] = os.path.join(basedir, 'keystore.jks') - os.mkdir('repo') - shutil.copy(basedir / 'repo' / 'entry.json', 'repo') - shutil.copy(basedir / 'repo' / 'index-v2.json', 'repo') - signindex.sign_index('repo', 'entry.json') - bad_fp = '0123456789001234567890012345678900123456789001234567890012345678' - bad_fp_url = 'https://fake.url/fdroid/repo?fingerprint=' + bad_fp - with self.assertRaises(fdroidserver.exception.VerificationException): - data, _ignored = index.download_repo_index_v2(bad_fp_url) - - @patch('fdroidserver.net.download_using_mirrors') - def test_download_repo_index_v2_entry_verify(self, mock_download_using_mirrors): - def download_using_mirrors_def(mirrors): - f = os.path.join(tempfile.mkdtemp(), os.path.basename(mirrors[0]['url'])) - Path(f).write_text('not the entry.jar file contents') - return f - - mock_download_using_mirrors.side_effect = download_using_mirrors_def - url = 'https://fake.url/fdroid/repo?fingerprint=' + GP_FINGERPRINT - with self.assertRaises(fdroidserver.exception.VerificationException): - data, _ignored = index.download_repo_index_v2(url) - - @patch('fdroidserver.net.download_using_mirrors') - def test_download_repo_index_v2_index_verify(self, mock_download_using_mirrors): - def download_using_mirrors_def(mirrors): - f = os.path.join(tempfile.mkdtemp(), os.path.basename(mirrors[0]['url'])) - Path(f).write_text('not the index-v2.json file contents') - return f - - mock_download_using_mirrors.side_effect = download_using_mirrors_def - os.chdir(self.testdir) - signindex.config['keystore'] = os.path.join(basedir, 'keystore.jks') - os.mkdir('repo') - shutil.copy(basedir / 'repo' / 'entry.json', 'repo') - shutil.copy(basedir / 'repo' / 'index-v2.json', 'repo') - signindex.sign_index('repo', 'entry.json') - url = 'https://fake.url/fdroid/repo?fingerprint=' + GP_FINGERPRINT - with self.assertRaises(fdroidserver.exception.VerificationException): - data, _ignored = index.download_repo_index_v2(url) - - def test_sort_package_versions(self): - i = [ - { - 'packageName': 'org.smssecure.smssecure', - 'apkName': 'org.smssecure.smssecure_134.apk', - 'signer': 'b33a601a9da97c82e6eb121eb6b90adab561f396602ec4dc8b0019fb587e2af6', - 'versionCode': 134, - }, - { - 'packageName': 'org.smssecure.smssecure', - 'apkName': 'org.smssecure.smssecure_134_b30bb97.apk', - 'signer': 'b30bb971af0d134866e158ec748fcd553df97c150f58b0a963190bbafbeb0868', - 'versionCode': 134, - }, - { - 'packageName': 'b075b32b4ef1e8a869e00edb136bd48e34a0382b85ced8628f164d1199584e4e' - }, - { - 'packageName': '43af70d1aca437c2f9974c4634cc5abe45bdc4d5d71529ac4e553488d3bb3ff6' - }, - { - 'packageName': 'org.smssecure.smssecure', - 'apkName': 'org.smssecure.smssecure_135_b30bb97.apk', - 'signer': 'b30bb971af0d134866e158ec748fcd553df97c150f58b0a963190bbafbeb0868', - 'versionCode': 135, - }, - { - 'packageName': 'org.smssecure.smssecure', - 'apkName': 'org.smssecure.smssecure_135.apk', - 'signer': 'b33a601a9da97c82e6eb121eb6b90adab561f396602ec4dc8b0019fb587e2af6', - 'versionCode': 135, - }, - { - 'packageName': 'org.smssecure.smssecure', - 'apkName': 'org.smssecure.smssecure_133.apk', - 'signer': 'b33a601a9da97c82e6eb121eb6b90adab561f396602ec4dc8b0019fb587e2af6', - 'versionCode': 133, - }, - { - 'packageName': 'org.smssecure.smssecure', - 'apkName': 'smssecure-weird-version.apk', - 'signer': '99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff', - 'versionCode': 133, - }, - { - 'packageName': 'org.smssecure.smssecure', - 'apkName': 'smssecure-custom.apk', - 'signer': '1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef', - 'versionCode': 133, - }, - { - 'packageName': 'org.smssecure.smssecure', - 'apkName': 'smssecure-new-custom.apk', - 'signer': '1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef', - 'versionCode': 135, - }, - ] - - o = [ - { - 'packageName': '43af70d1aca437c2f9974c4634cc5abe45bdc4d5d71529ac4e553488d3bb3ff6' - }, - { - 'packageName': 'b075b32b4ef1e8a869e00edb136bd48e34a0382b85ced8628f164d1199584e4e' - }, - # app test data - # # packages with reproducible developer signature - { - 'packageName': 'org.smssecure.smssecure', - 'apkName': 'org.smssecure.smssecure_135_b30bb97.apk', - 'signer': 'b30bb971af0d134866e158ec748fcd553df97c150f58b0a963190bbafbeb0868', - 'versionCode': 135, - }, - { - 'packageName': 'org.smssecure.smssecure', - 'apkName': 'org.smssecure.smssecure_134_b30bb97.apk', - 'signer': 'b30bb971af0d134866e158ec748fcd553df97c150f58b0a963190bbafbeb0868', - 'versionCode': 134, - }, - # # packages build and signed by fdroid - { - 'packageName': 'org.smssecure.smssecure', - 'apkName': 'org.smssecure.smssecure_135.apk', - 'signer': 'b33a601a9da97c82e6eb121eb6b90adab561f396602ec4dc8b0019fb587e2af6', - 'versionCode': 135, - }, - { - 'packageName': 'org.smssecure.smssecure', - 'apkName': 'org.smssecure.smssecure_134.apk', - 'signer': 'b33a601a9da97c82e6eb121eb6b90adab561f396602ec4dc8b0019fb587e2af6', - 'versionCode': 134, - }, - { - 'packageName': 'org.smssecure.smssecure', - 'apkName': 'org.smssecure.smssecure_133.apk', - 'signer': 'b33a601a9da97c82e6eb121eb6b90adab561f396602ec4dc8b0019fb587e2af6', - 'versionCode': 133, - }, - # # packages signed with unkown keys - { - 'packageName': 'org.smssecure.smssecure', - 'apkName': 'smssecure-new-custom.apk', - 'signer': '1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef', - 'versionCode': 135, - }, - { - 'packageName': 'org.smssecure.smssecure', - 'apkName': 'smssecure-custom.apk', - 'signer': '1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef', - 'versionCode': 133, - }, - { - 'packageName': 'org.smssecure.smssecure', - 'apkName': 'smssecure-weird-version.apk', - 'signer': '99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff99ff', - 'versionCode': 133, - }, - ] - - common.config = {} - common.fill_config_defaults(common.config) - publish.config = common.config - publish.config['keystorepass'] = '123456' - publish.config['keypass'] = '123456' - publish.config['keystore'] = os.path.join(os.getcwd(), 'dummy-keystore.jks') - publish.config['repo_keyalias'] = 'repokey' - - testsmetadir = os.path.join(os.getcwd(), 'metadata') - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - shutil.copytree(testsmetadir, 'metadata') - sigkeyfps = { - "org.smssecure.smssecure": { - "signer": "b33a601a9da97c82e6eb121eb6b90adab561f396602ec4dc8b0019fb587e2af6" - } - } - os.mkdir('repo') - jarfile = 'repo/signer-index.jar' - with zipfile.ZipFile(jarfile, 'w', zipfile.ZIP_DEFLATED) as jar: - jar.writestr('signer-index.json', json.dumps(sigkeyfps)) - publish.sign_sig_key_fingerprint_list(jarfile) - common.write_config_file('') - - index.sort_package_versions(i, common.load_publish_signer_fingerprints()) - self.maxDiff = None - self.assertEqual(json.dumps(i, indent=2), json.dumps(o, indent=2)) - - # and test it still works with get_first_signer_certificate - outdir = os.path.join(self.testdir, 'index-signer-fingerprints') - os.mkdir(outdir) - common.apk_extract_signatures(jarfile, outdir) - certs = glob.glob(os.path.join(outdir, '*.RSA')) - with open(certs[0], 'rb') as fp: - self.assertEqual( - common.get_certificate(fp.read()), - common.get_first_signer_certificate(jarfile), - ) - - def test_make_v0_repo_only(self): - os.chdir(self.testdir) - os.mkdir('repo') - repo_icons_dir = os.path.join('repo', 'icons') - self.assertFalse(os.path.isdir(repo_icons_dir)) - repodict = { - 'address': 'https://example.com/fdroid/repo', - 'description': 'This is just a test', - 'icon': 'blahblah', - 'name': 'test', - 'timestamp': datetime.datetime.now(), - 'version': 12, - } - requestsdict = {'install': [], 'uninstall': []} - common.config['repo_pubkey'] = 'ffffffffffffffffffffffffffffffffff' - index.make_v0({}, [], 'repo', repodict, requestsdict, {}) - self.assertTrue(os.path.isdir(repo_icons_dir)) - self.assertTrue( - os.path.exists( - os.path.join(repo_icons_dir, common.default_config['repo_icon']) - ) - ) - self.assertTrue(os.path.exists(os.path.join('repo', 'index.xml'))) - - def test_make_v0(self): - os.chdir(self.testdir) - os.mkdir('metadata') - os.mkdir('repo') - metadatafile = 'metadata/info.zwanenburg.caffeinetile.yml' - shutil.copy(os.path.join(basedir, metadatafile), metadatafile) - repo_icons_dir = os.path.join('repo', 'icons') - self.assertFalse(os.path.isdir(repo_icons_dir)) - repodict = { - 'address': 'https://example.com/fdroid/repo', - 'description': 'This is just a test', - 'icon': 'blahblah', - 'mirrors': [ - {'isPrimary': True, 'url': 'https://example.com/fdroid/repo'}, - {'extra': 'data', 'url': 'http://one/fdroid/repo'}, - {'url': 'http://two/fdroid/repo'}, - ], - 'name': 'test', - 'timestamp': datetime.datetime.now(), - 'version': 12, - } - app = fdroidserver.metadata.parse_metadata(metadatafile) - app['icon'] = 'info.zwanenburg.caffeinetile.4.xml' - app['CurrentVersionCode'] = 4 - apps = {app.id: app} - orig_apps = copy.deepcopy(apps) - apk = { - 'hash': 'dbbdd7deadb038862f426b71efe4a64df8c3edf25d669e935f349510e16f65db', - 'hashType': 'sha256', - 'uses-permission': [['android.permission.WAKE_LOCK', None]], - 'uses-permission-sdk-23': [], - 'features': [], - 'icons_src': { - '160': 'res/drawable/ic_coffee_on.xml', - '-1': 'res/drawable/ic_coffee_on.xml', - }, - 'icons': {'160': 'info.zwanenburg.caffeinetile.4.xml'}, - 'antiFeatures': ['KnownVuln'], - 'packageName': 'info.zwanenburg.caffeinetile', - 'versionCode': 4, - 'name': 'Caffeine Tile', - 'versionName': '1.3', - 'minSdkVersion': 24, - 'targetSdkVersion': 25, - 'sig': '03f9b2f848d22fd1d8d1331e8b1b486d', - 'signer': '51cfa5c8a743833ad89acf81cb755936876a5c8b8eca54d1ffdcec0cdca25d0e', - 'size': 11740, - 'apkName': 'info.zwanenburg.caffeinetile_4.apk', - 'icon': 'info.zwanenburg.caffeinetile.4.xml', - 'added': datetime.datetime.fromtimestamp(1539122400), - } - requestsdict = {'install': [], 'uninstall': []} - common.config['repo_pubkey'] = 'ffffffffffffffffffffffffffffffffff' - common.config['make_current_version_link'] = True - index.make_v0(apps, [apk], 'repo', repodict, requestsdict, {}) - self.assertTrue(os.path.isdir(repo_icons_dir)) - self.assertTrue( - os.path.exists( - os.path.join(repo_icons_dir, common.default_config['repo_icon']) - ) - ) - self.assertTrue(os.path.exists(os.path.join('repo', 'index.xml'))) - self.assertEqual(orig_apps, apps, "apps was modified when building the index") - - def test_v0_invalid_config_exception(self): - """Index v0 needs additional config values when using --nosign - - index.xml aka Index v0 includes the full repo public key in - the XML itself. So when running `fdroid update --nosign`, - there needs to be either repo_pubkey or a full keystore config - present. - - """ - os.chdir(self.testdir) - os.mkdir('repo') - repo_icons_dir = os.path.join('repo', 'icons') - self.assertFalse(os.path.isdir(repo_icons_dir)) - repodict = { - 'address': 'https://example.com/fdroid/repo', - 'description': 'This is just a test', - 'icon': 'blahblah', - 'name': 'test', - 'timestamp': datetime.datetime.now(), - 'version': 12, - } - requestsdict = {'install': [], 'uninstall': []} - - common.options.nosign = False - with self.assertRaises(fdroidserver.exception.FDroidException): - index.make_v0({}, [], 'repo', repodict, requestsdict, {}) - - common.options.nosign = True - with self.assertRaises(fdroidserver.exception.FDroidException): - index.make_v0({}, [], 'repo', repodict, requestsdict, {}) - - common.config['repo_pubkey'] = 'ffffffffffffffffffffffffffffffffff' - self.assertFalse(os.path.exists(os.path.join('repo', 'index.xml'))) - self.assertFalse(os.path.exists(os.path.join('repo', 'index_unsigned.jar'))) - self.assertFalse(os.path.exists(os.path.join('repo', 'index.jar'))) - index.make_v0({}, [], 'repo', repodict, requestsdict, {}) - self.assertTrue(os.path.exists(os.path.join('repo', 'index.xml'))) - self.assertTrue(os.path.exists(os.path.join('repo', 'index_unsigned.jar'))) - self.assertFalse(os.path.exists(os.path.join('repo', 'index.jar'))) - - def test_make_v1_with_mirrors(self): - os.chdir(self.testdir) - os.mkdir('repo') - repodict = { - 'address': 'https://example.com/fdroid/repo', - 'mirrors': [ - {'isPrimary': True, 'url': 'https://example.com/fdroid/repo'}, - {'extra': 'data', 'url': 'http://one/fdroid/repo'}, - {'url': 'http://two/fdroid/repo'}, - ], - } - index.make_v1({}, [], 'repo', repodict, {}, {}) - index_v1 = Path('repo/index-v1.json') - self.assertTrue(index_v1.exists()) - with index_v1.open() as fp: - self.assertEqual( - json.load(fp)['repo']['mirrors'], - ['http://one/fdroid/repo', 'http://two/fdroid/repo'], - ) - - def test_github_get_mirror_service_urls(self): - for url in [ - 'git@github.com:foo/bar', - 'git@github.com:foo/bar.git', - 'https://github.com/foo/bar', - 'https://github.com/foo/bar.git', - ]: - self.assertEqual( - ['https://raw.githubusercontent.com/foo/bar/master/fdroid'], - index.get_mirror_service_urls({"url": url}), - ) - - @patch.dict(os.environ, clear=True) - def test_gitlab_get_mirror_service_urls(self): - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - git_mirror_path = Path('git-mirror/fdroid') - git_mirror_path.mkdir(parents=True) - ci_job_id = '12345678' - artifacts_url = ( - 'https://group.gitlab.io/-/project/-/jobs/%s/artifacts/public/fdroid' - % ci_job_id - ) - with (git_mirror_path / 'placeholder').open('w') as fp: - fp.write(' ') - for url in [ - 'git@gitlab.com:group/project', - 'git@gitlab.com:group/project.git', - 'https://gitlab.com/group/project', - 'https://gitlab.com/group/project.git', - ]: - with patch('fdroidserver.common.GITLAB_COM_PAGES_MAX_SIZE', 1000): - expected = [ - 'https://group.gitlab.io/project/fdroid', - 'https://gitlab.com/group/project/-/raw/master/fdroid', - ] - self.assertEqual( - expected, - index.get_mirror_service_urls({"url": url}), - ) - with patch.dict(os.environ, clear=True): - os.environ['CI_JOB_ID'] = ci_job_id - self.assertEqual( - expected + [artifacts_url], - index.get_mirror_service_urls({"url": url}), - ) - with patch('fdroidserver.common.GITLAB_COM_PAGES_MAX_SIZE', 10): - expected = [ - 'https://gitlab.com/group/project/-/raw/master/fdroid', - ] - self.assertEqual( - expected, - index.get_mirror_service_urls({"url": url}), - ) - with patch.dict(os.environ, clear=True): - os.environ['CI_JOB_ID'] = ci_job_id - self.assertEqual( - expected + [artifacts_url], - index.get_mirror_service_urls({"url": url}), - ) - - def test_make_website(self): - os.chdir(self.testdir) - os.mkdir('metadata') - os.mkdir('repo') - - repodict = { - 'address': 'https://example.com/fdroid/repo', - 'description': 'This is just a test', - 'icon': 'blahblah', - 'name': 'test', - 'timestamp': datetime.datetime.now(), - 'version': 12, - } - - common.config['repo_pubkey'] = 'ffffffffffffffffffffffffffffffffff' - - index.make_website([], "repo", repodict) - self.assertTrue(os.path.exists(os.path.join('repo', 'index.html'))) - self.assertTrue(os.path.exists(os.path.join('repo', 'index.css'))) - self.assertTrue(os.path.exists(os.path.join('repo', 'index.png'))) - - try: - from html5print import CSSBeautifier, HTMLBeautifier - except ImportError: - print('WARNING: skipping rest of test since html5print is missing!') - return - - with open(os.path.join("repo", "index.html")) as f: - html = f.read() - pretty_html = HTMLBeautifier.beautify(html) - self.maxDiff = None - self.assertEqual(html, pretty_html) - - with open(os.path.join("repo", "index.css")) as f: - css = f.read() - pretty_css = CSSBeautifier.beautify(css) - self.maxDiff = None - self.assertEqual(css, pretty_css) - - def test_sort_package_versions_with_invalid(self): - i = [ - { - 'packageName': 'org.smssecure.smssecure', - 'apkName': 'smssecure-custom.fake', - 'signer': None, - 'versionCode': 11111, - } - ] - - index.sort_package_versions(i, common.load_publish_signer_fingerprints()) - - def test_package_metadata(self): - """A smoke check and format check of index.package_metadata()""" - - def _kn(key): - return key[0].lower() + key[1:] - - apps = fdroidserver.metadata.read_metadata() - update.insert_localized_app_metadata(apps) - - # smoke check all metadata files - for appid, app in apps.items(): - metadata = index.package_metadata(app, 'repo') - for k in ('Description', 'Name', 'Summary', 'video'): - if app.get(k): - self.assertTrue(isinstance(metadata[_kn(k)], dict)) - for k in ('AuthorWebSite', 'IssueTracker', 'Translation', 'WebSite'): - if app.get(k): - self.assertTrue(isinstance(metadata[_kn(k)], str)) - - # make sure these known values were properly parsed and included - appid = 'info.guardianproject.urzip' - app = apps[appid] - metadata = index.package_metadata(app, 'repo') - # files - self.assertEqual( - os.path.getsize(f'repo/{appid}/en-US/featureGraphic.png'), - metadata['featureGraphic']['en-US']['size'], - ) - self.assertEqual( - os.path.getsize(f'repo/{appid}/en-US/icon.png'), - metadata['icon']['en-US']['size'], - ) - # localized strings - self.assertEqual({'en-US': 'title'}, metadata['name']) - self.assertEqual({'en-US': 'video'}, metadata['video']) - # strings - self.assertEqual( - 'https://dev.guardianproject.info/projects/urzip', - metadata['webSite'], - ) - - def test_add_mirrors_to_repodict(self): - """Test based on the contents of tests/config.yml""" - repodict = {'address': common.config['repo_url']} - index.add_mirrors_to_repodict('repo', repodict) - self.assertEqual( - repodict['mirrors'], - [ - {'isPrimary': True, 'url': 'https://MyFirstFDroidRepo.org/fdroid/repo'}, - {'url': 'http://foobarfoobarfoobar.onion/fdroid/repo'}, - {'url': 'https://foo.bar/fdroid/repo'}, - ], - ) - - def test_custom_config_yml_with_mirrors(self): - """Test based on custom contents of config.yml""" - os.chdir(self.testdir) - repo_url = 'https://example.com/fdroid/repo' - c = {'repo_url': repo_url, 'mirrors': ['http://one/fdroid']} - with open(common.CONFIG_FILE, 'w', encoding='utf-8') as fp: - yaml.dump(c, fp) - common.config = None - common.read_config() - repodict = {'address': common.config['repo_url']} - index.add_mirrors_to_repodict('repo', repodict) - self.assertEqual( - repodict['mirrors'], - [ - {'url': 'https://example.com/fdroid/repo', 'isPrimary': True}, - {'url': 'http://one/fdroid/repo'}, - ], - ) - - def test_no_mirrors_config(self): - common.config = dict() - repodict = {'address': 'https://example.com/fdroid/repo'} - index.add_mirrors_to_repodict('repo', repodict) - self.assertFalse('mirrors' in repodict) - - def test_add_metadata_to_canonical_in_mirrors_config(self): - """It is possible to add extra metadata to the canonical URL""" - common.config = { - 'repo_url': 'http://one/fdroid/repo', - 'mirrors': [ - {'url': 'http://one/fdroid', 'extra': 'data'}, - {'url': 'http://two/fdroid'}, - ], - } - repodict = {'address': common.config['repo_url']} - index.add_mirrors_to_repodict('repo', repodict) - self.assertEqual( - repodict['mirrors'], - [ - {'extra': 'data', 'isPrimary': True, 'url': 'http://one/fdroid/repo'}, - {'url': 'http://two/fdroid/repo'}, - ], - ) - - def test_duplicate_primary_in_mirrors_config(self): - """There can be only one primary mirror aka canonical URL""" - common.config = { - 'repo_url': 'http://one/fdroid', - 'mirrors': [ - {'url': 'http://one/fdroid', 'countryCode': 'SA'}, - {'url': 'http://two/fdroid'}, - {'url': 'http://one/fdroid'}, - ], - } - repodict = {'address': common.config['repo_url']} - with self.assertRaises(fdroidserver.exception.FDroidException): - index.add_mirrors_to_repodict('repo', repodict) - - def test_bad_type_in_mirrors_config(self): - for i in (1, 2.3, b'asdf'): - common.config = {'mirrors': i} - repodict = dict() - with self.assertRaises(fdroidserver.exception.FDroidException): - index.add_mirrors_to_repodict('repo', repodict) - - def test_load_mirrors_config_from_file(self): - # empty the dict for *.config, see setUp() - for k in sorted(common.config.keys()): - del common.config[k] - - os.chdir(self.testdir) - os.mkdir('config') - primary = 'https://primary.com/fdroid/repo' - mirror = 'https://mirror.com/fdroid' - with open('config/mirrors.yml', 'w') as fp: - yaml.dump([{'url': mirror}], fp) - repodict = {'address': primary} - index.add_mirrors_to_repodict('repo', repodict) - self.assertEqual( - repodict['mirrors'], - [ - {'isPrimary': True, 'url': primary}, - {'url': mirror + '/repo'}, - ], - ) - - def test_error_when_load_mirrors_from_config_and_file(self): - # empty the dict for *.config, see setUp() - for k in sorted(common.config.keys()): - del common.config[k] - - os.chdir(self.testdir) - os.mkdir('config') - with open('config/mirrors.yml', 'w') as fp: - yaml.dump([{'url': 'https://foo.com'}], fp) - repodict = { - 'address': 'https://foo.com', - 'mirrors': {'url': 'http://two/fdroid/repo'}, - } - with self.assertRaises(fdroidserver.exception.FDroidException): - index.add_mirrors_to_repodict('repo', repodict) - - def test_erroneous_isPrimary_in_mirrors_config(self): - """There can be only one primary mirror aka canonical URL""" - common.config = { - 'repo_url': 'http://one/fdroid', - 'mirrors': [ - {'url': 'http://one/fdroid', 'countryCode': 'SA'}, - {'url': 'http://two/fdroid', 'isPrimary': True}, - ], - } - repodict = {'address': common.config['repo_url']} - with self.assertRaises(fdroidserver.exception.FDroidException): - index.add_mirrors_to_repodict('repo', repodict) - - -class AltstoreIndexTest(unittest.TestCase): - def test_make_altstore(self): - self.maxDiff = None - - apps = { - "app.fake": { - "AutoName": "Fake App", - "AuthorName": "Fake Author", - "iconv2": {"en_US": "fake_icon.png"}, - } - } - apks = [ - { - "packageName": "app.fake", - "apkName": "app.fake_123.ipa", - "versionName": "v123", - "added": datetime.datetime(2000, 2, 2, 2, 2, 2), - "size": 123, - "ipa_MinimumOSVersion": "10.0", - "ipa_DTPlatformVersion": "12.0", - "ipa_permissions": [ - "NSCameraUsageDescription", - "NSDocumentsFolderUsageDescription", - ], - "ipa_entitlements": [ - "com.apple.developer.team-identifier", - "com.apple.developer.web-browser", - "keychain-access-groups", - ], - }, - ] - config = { - "repo_icon": "fake_repo_icon.png", - "repo_name": "fake_repo", - "repo_url": "gopher://fake-repo.com/fdroid/repo", - } - - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - repodir = Path(tmpdir) / 'repo' - repodir.mkdir() - (repodir / "fake.ipa").touch() - - fdroidserver.index.make_altstore( - apps, - apks, - config, - repodir, - True, - ) - - with open(repodir / "altstore-index.json", 'r') as f: - self.assertDictEqual( - { - "apps": [ - { - "appPermissions": { - "entitlements": [ - 'com.apple.developer.team-identifier', - 'com.apple.developer.web-browser', - 'keychain-access-groups', - ], - 'privacy': [ - 'NSCameraUsageDescription', - 'NSDocumentsFolderUsageDescription', - ], - }, - 'bundleIdentifier': 'app.fake', - 'developerName': 'Fake Author', - 'iconURL': 'gopher://fake-repo.com/fdroid/repo', - 'localizedDescription': '', - 'name': 'Fake App', - 'screenshots': [], - 'versions': [ - { - 'date': '2000-02-02T02:02:02', - 'downloadURL': 'gopher://fake-repo.com/fdroid/repo/app.fake_123.ipa', - 'maxOSVersion': '12.0', - 'minOSVersion': '10.0', - 'size': 123, - 'version': 'v123', - } - ], - }, - ], - 'name': 'fake_repo', - 'news': [], - }, - json.load(f), - ) diff --git a/tests/test_init.py b/tests/test_init.py deleted file mode 100755 index a038493b..00000000 --- a/tests/test_init.py +++ /dev/null @@ -1,60 +0,0 @@ -#!/usr/bin/env python3 - -import os -import pathlib -import shutil -import sys -import unittest - -import fdroidserver.common -import fdroidserver.init - -from .shared_test_code import mkdtemp - -basedir = pathlib.Path(__file__).parent - - -class InitTest(unittest.TestCase): - '''fdroidserver/init.py''' - - def setUp(self): - fdroidserver.common.config = None - fdroidserver.init.config = None - self._td = mkdtemp() - self.testdir = self._td.name - os.chdir(self.testdir) - - def tearDown(self): - os.chdir(basedir) - self._td.cleanup() - - def test_disable_in_config(self): - test = 'mysupersecrets' - configfile = pathlib.Path(fdroidserver.common.CONFIG_FILE) - configfile.write_text(f'keystore: NONE\nkeypass: {test}\n', encoding='utf-8') - configfile.chmod(0o600) - config = fdroidserver.common.read_config() - self.assertEqual('NONE', config['keystore']) - self.assertEqual(test, config['keypass']) - fdroidserver.init.disable_in_config('keypass', 'comment') - self.assertIn('#keypass:', configfile.read_text()) - fdroidserver.common.config = None - config = fdroidserver.common.read_config() - self.assertIsNone(config.get('keypass')) - - @unittest.skipIf(os.name == 'nt', "calling main() like this hangs on Windows") - def test_main_in_empty_dir(self): - """Test that `fdroid init` will find apksigner and add it to the config""" - - shutil.copy(basedir / 'keystore.jks', self.testdir) - - bindir = os.path.join(os.getcwd(), 'bin') - os.mkdir(bindir) - apksigner = os.path.join(bindir, 'apksigner') - open(apksigner, 'w').close() - os.chmod(apksigner, 0o755) # nosec B103 - - sys.argv = ['fdroid init', '--keystore', 'keystore.jks', '--repo-keyalias=sova'] - with unittest.mock.patch.dict(os.environ, {'PATH': bindir}): - fdroidserver.init.main() - self.assertEqual(apksigner, fdroidserver.init.config.get('apksigner')) diff --git a/tests/test_install.py b/tests/test_install.py deleted file mode 100755 index aa239d4d..00000000 --- a/tests/test_install.py +++ /dev/null @@ -1,256 +0,0 @@ -#!/usr/bin/env python3 - -import os -import textwrap -import unittest -from pathlib import Path -from unittest.mock import Mock, patch - -import fdroidserver -from fdroidserver import common, install -from fdroidserver.exception import BuildException, FDroidException - - -@unittest.skipIf(os.uname().machine == 's390x', 'adb is not ported to s390x') -class InstallTest(unittest.TestCase): - '''fdroidserver/install.py''' - - def tearDown(self): - common.config = None - - def test_devices(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - try: - config['adb'] = fdroidserver.common.find_sdk_tools_cmd('adb') - except FDroidException as e: - self.skipTest(f'Skipping test because: {e}') - self.assertTrue(os.path.exists(config['adb'])) - self.assertTrue(os.path.isfile(config['adb'])) - devices = fdroidserver.install.devices() - self.assertIsInstance(devices, list, 'install.devices() did not return a list!') - for device in devices: - self.assertIsInstance(device, str) - - def test_devices_fail(self): - common.config = dict() - common.fill_config_defaults(common.config) - common.config['adb'] = '/bin/false' - with self.assertRaises(FDroidException): - fdroidserver.install.devices() - - def test_devices_fail_nonexistent(self): - """This is mostly just to document this strange difference in behavior""" - common.config = dict() - common.fill_config_defaults(common.config) - common.config['adb'] = '/nonexistent' - with self.assertRaises(BuildException): - fdroidserver.install.devices() - - @patch('fdroidserver.common.SdkToolsPopen') - def test_devices_with_mock_none(self, mock_SdkToolsPopen): - p = Mock() - mock_SdkToolsPopen.return_value = p - p.output = 'List of devices attached\n\n' - p.returncode = 0 - common.config = dict() - common.fill_config_defaults(common.config) - self.assertEqual([], fdroidserver.install.devices()) - - @patch('fdroidserver.common.SdkToolsPopen') - def test_devices_with_mock_one(self, mock_SdkToolsPopen): - p = Mock() - mock_SdkToolsPopen.return_value = p - p.output = 'List of devices attached\n05995813\tdevice\n\n' - p.returncode = 0 - common.config = dict() - common.fill_config_defaults(common.config) - self.assertEqual(['05995813'], fdroidserver.install.devices()) - - @patch('fdroidserver.common.SdkToolsPopen') - def test_devices_with_mock_many(self, mock_SdkToolsPopen): - p = Mock() - mock_SdkToolsPopen.return_value = p - p.output = textwrap.dedent( - """* daemon not running; starting now at tcp:5037 - * daemon started successfully - List of devices attached - RZCT809FTQM device - 05995813 device - emulator-5556 device - emulator-5554 unauthorized - 0a388e93 no permissions (missing udev rules? user is in the plugdev group); see [http://developer.android.com/tools/device.html] - 986AY133QL device - 09301JEC215064 device - 015d165c3010200e device - 4DCESKVGUC85VOTO device - - """ - ) - p.returncode = 0 - common.config = dict() - common.fill_config_defaults(common.config) - self.assertEqual( - [ - 'RZCT809FTQM', - '05995813', - 'emulator-5556', - '986AY133QL', - '09301JEC215064', - '015d165c3010200e', - '4DCESKVGUC85VOTO', - ], - fdroidserver.install.devices(), - ) - - @patch('fdroidserver.common.SdkToolsPopen') - def test_devices_with_mock_error(self, mock_SdkToolsPopen): - p = Mock() - mock_SdkToolsPopen.return_value = p - p.output = textwrap.dedent( - """* daemon not running. starting it now on port 5037 * - * daemon started successfully * - ** daemon still not running - error: cannot connect to daemon - """ - ) - p.returncode = 0 - common.config = dict() - common.fill_config_defaults(common.config) - self.assertEqual([], fdroidserver.install.devices()) - - @patch('fdroidserver.common.SdkToolsPopen') - def test_devices_with_mock_no_permissions(self, mock_SdkToolsPopen): - p = Mock() - mock_SdkToolsPopen.return_value = p - p.output = textwrap.dedent( - """List of devices attached - ???????????????? no permissions - """ - ) - p.returncode = 0 - common.config = dict() - common.fill_config_defaults(common.config) - self.assertEqual([], fdroidserver.install.devices()) - - @patch('fdroidserver.common.SdkToolsPopen') - def test_devices_with_mock_unauthorized(self, mock_SdkToolsPopen): - p = Mock() - mock_SdkToolsPopen.return_value = p - p.output = textwrap.dedent( - """List of devices attached - aeef5e4e unauthorized - """ - ) - p.returncode = 0 - common.config = dict() - common.fill_config_defaults(common.config) - self.assertEqual([], fdroidserver.install.devices()) - - @patch('fdroidserver.common.SdkToolsPopen') - def test_devices_with_mock_no_permissions_with_serial(self, mock_SdkToolsPopen): - p = Mock() - mock_SdkToolsPopen.return_value = p - p.output = textwrap.dedent( - """List of devices attached - 4DCESKVGUC85VOTO no permissions (missing udev rules? user is in the plugdev group); see [http://developer.android.com/tools/device.html] - - """ - ) - p.returncode = 0 - common.config = dict() - common.fill_config_defaults(common.config) - self.assertEqual([], fdroidserver.install.devices()) - - @staticmethod - def _download_raise(privacy_mode): - raise Exception('fake failed download') - - @patch('fdroidserver.install.download_apk') - @patch('fdroidserver.install.download_fdroid_apk') - @patch('fdroidserver.install.download_fdroid_apk_from_github') - @patch('fdroidserver.install.download_fdroid_apk_from_ipns') - @patch('fdroidserver.install.download_fdroid_apk_from_maven') - def test_install_fdroid_apk_privacy_mode_true( - self, maven, ipns, github, download_fdroid_apk, download_apk - ): - download_apk.side_effect = self._download_raise - download_fdroid_apk.side_effect = self._download_raise - github.side_effect = self._download_raise - ipns.side_effect = self._download_raise - maven.side_effect = self._download_raise - fdroidserver.common.config = {'jarsigner': 'fakepath'} - install.install_fdroid_apk(privacy_mode=True) - download_apk.assert_not_called() - download_fdroid_apk.assert_not_called() - github.assert_called_once() - ipns.assert_called_once() - maven.assert_called_once() - - @patch('fdroidserver.install.download_apk') - @patch('fdroidserver.install.download_fdroid_apk') - @patch('fdroidserver.install.download_fdroid_apk_from_github') - @patch('fdroidserver.install.download_fdroid_apk_from_ipns') - @patch('fdroidserver.install.download_fdroid_apk_from_maven') - def test_install_fdroid_apk_privacy_mode_false( - self, maven, ipns, github, download_fdroid_apk, download_apk - ): - download_apk.side_effect = self._download_raise - download_fdroid_apk.side_effect = self._download_raise - github.side_effect = self._download_raise - ipns.side_effect = self._download_raise - maven.side_effect = self._download_raise - fdroidserver.common.config = {'jarsigner': 'fakepath'} - install.install_fdroid_apk(privacy_mode=False) - download_apk.assert_called_once() - download_fdroid_apk.assert_called_once() - github.assert_called_once() - ipns.assert_called_once() - maven.assert_called_once() - - @patch('fdroidserver.install.download_apk') - @patch('fdroidserver.install.download_fdroid_apk') - @patch('fdroidserver.install.download_fdroid_apk_from_github') - @patch('fdroidserver.install.download_fdroid_apk_from_ipns') - @patch('fdroidserver.install.download_fdroid_apk_from_maven') - @patch('locale.getlocale', lambda: ('zh_CN', 'UTF-8')) - def test_install_fdroid_apk_privacy_mode_locale_auto( - self, maven, ipns, github, download_fdroid_apk, download_apk - ): - download_apk.side_effect = self._download_raise - download_fdroid_apk.side_effect = self._download_raise - github.side_effect = self._download_raise - ipns.side_effect = self._download_raise - maven.side_effect = self._download_raise - fdroidserver.common.config = {'jarsigner': 'fakepath'} - install.install_fdroid_apk(privacy_mode=None) - download_apk.assert_not_called() - download_fdroid_apk.assert_not_called() - github.assert_called_once() - ipns.assert_called_once() - maven.assert_called_once() - - @patch('fdroidserver.net.download_using_mirrors', lambda m: 'testvalue') - def test_download_fdroid_apk_smokecheck(self): - self.assertEqual('testvalue', install.download_fdroid_apk()) - - @unittest.skipUnless(os.getenv('test_download_fdroid_apk'), 'requires net access') - def test_download_fdroid_apk(self): - f = install.download_fdroid_apk() - self.assertTrue(Path(f).exists()) - - @unittest.skipUnless(os.getenv('test_download_fdroid_apk'), 'requires net access') - def test_download_fdroid_apk_from_maven(self): - f = install.download_fdroid_apk_from_maven() - self.assertTrue(Path(f).exists()) - - @unittest.skipUnless(os.getenv('test_download_fdroid_apk'), 'requires net access') - def test_download_fdroid_apk_from_ipns(self): - f = install.download_fdroid_apk_from_ipns() - self.assertTrue(Path(f).exists()) - - @unittest.skipUnless(os.getenv('test_download_fdroid_apk'), 'requires net access') - def test_download_fdroid_apk_from_github(self): - f = install.download_fdroid_apk_from_github() - self.assertTrue(Path(f).exists()) diff --git a/tests/test_integration.py b/tests/test_integration.py deleted file mode 100755 index 2cdf19d9..00000000 --- a/tests/test_integration.py +++ /dev/null @@ -1,1691 +0,0 @@ -import configparser -import itertools -import os -import platform -import re -import shlex -import shutil -import subprocess -import sys -import threading -import unittest -from datetime import datetime, timezone -from http.server import SimpleHTTPRequestHandler, ThreadingHTTPServer -from pathlib import Path - -try: - from androguard.core.bytecodes.apk import get_apkid # androguard <4 -except ModuleNotFoundError: - from androguard.core.apk import get_apkid - -from fdroidserver._yaml import yaml, yaml_dumper - -from .shared_test_code import mkdir_testfiles, VerboseFalseOptions - -# TODO: port generic tests that use index.xml to index-v2 (test that -# explicitly test index-v0 should still use index.xml) - - -basedir = Path(__file__).parent -FILES = basedir - -try: - WORKSPACE = Path(os.environ["WORKSPACE"]) -except KeyError: - WORKSPACE = basedir.parent - -from fdroidserver import common -from fdroidserver import deploy - -conf = {"sdk_path": os.getenv("ANDROID_HOME", "")} -common.find_apksigner(conf) -USE_APKSIGNER = "apksigner" in conf - - -def docker_exists(): - try: - subprocess.check_output(["docker", "info"]) - except Exception: - return False - else: - return True - - -@unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') -class IntegrationTest(unittest.TestCase): - @classmethod - def setUpClass(cls): - try: - cls.fdroid_cmd = shlex.split(os.environ["fdroid"]) - except KeyError: - cls.fdroid_cmd = [WORKSPACE / "fdroid"] - - os.environ.update( - { - "GIT_AUTHOR_NAME": "Test", - "GIT_AUTHOR_EMAIL": "no@mail", - "GIT_COMMITTER_NAME": "Test", - "GIT_COMMITTER_EMAIL": "no@mail", - "GIT_ALLOW_PROTOCOL": "file:https", - } - ) - - def setUp(self): - self.prev_cwd = Path() - self.testdir = mkdir_testfiles(WORKSPACE, self) - self.tmp_repo_root = self.testdir / "fdroid" - self.tmp_repo_root.mkdir(parents=True) - deploy.config = {} - os.chdir(self.tmp_repo_root) - - def tearDown(self): - os.chdir(self.prev_cwd) - shutil.rmtree(self.testdir) - - def assert_run(self, *args, **kwargs): - proc = subprocess.run(*args, **kwargs) - self.assertEqual(proc.returncode, 0) - return proc - - def assert_run_fail(self, *args, **kwargs): - proc = subprocess.run(*args, **kwargs) - self.assertNotEqual(proc.returncode, 0) - return proc - - @staticmethod - def update_yaml(path, items, replace=False): - """Update a .yml file, e.g. config.yml, with the given items.""" - doc = {} - if not replace: - try: - with open(path) as f: - doc = yaml.load(f) - except FileNotFoundError: - pass - doc.update(items) - with open(path, "w") as f: - yaml_dumper.dump(doc, f) - - @staticmethod - def remove_lines(path, unwanted_strings): - """Remove the lines in the path that contain the unwanted strings.""" - - def contains_unwanted(line, unwanted_strings): - for str in unwanted_strings: - if str in line: - return True - return False - - with open(path) as f: - filtered = [ - line for line in f if not contains_unwanted(line, unwanted_strings) - ] - - with open(path, "w") as f: - for line in filtered: - f.write(line) - - @staticmethod - def copy_apks_into_repo(): - def to_skip(name): - for str in [ - "unaligned", - "unsigned", - "badsig", - "badcert", - "bad-unicode", - "janus.apk", - ]: - if str in name: - return True - return False - - for f in FILES.glob("*.apk"): - if not to_skip(f.name): - appid, versionCode, _ignored = get_apkid(f) - shutil.copy( - f, - Path("repo") / common.get_release_apk_filename(appid, versionCode), - ) - - @staticmethod - def create_fake_android_home(path): - (path / "tools").mkdir() - (path / "platform-tools").mkdir() - (path / "build-tools/34.0.0").mkdir(parents=True) - (path / "build-tools/34.0.0/aapt").touch() - - def fdroid_init_with_prebuilt_keystore(self, keystore_path=FILES / "keystore.jks"): - self.assert_run( - self.fdroid_cmd - + ["init", "--keystore", keystore_path, "--repo-keyalias", "sova"] - ) - self.update_yaml( - common.CONFIG_FILE, - { - "keystorepass": "r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=", - "keypass": "r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=", - }, - ) - - @unittest.skipUnless(USE_APKSIGNER, "requires apksigner") - def test_run_process_when_building_and_signing_are_on_separate_machines(self): - shutil.copy(FILES / "keystore.jks", "keystore.jks") - self.fdroid_init_with_prebuilt_keystore("keystore.jks") - self.update_yaml( - common.CONFIG_FILE, - { - "make_current_version_link": True, - "keydname": "CN=Birdman, OU=Cell, O=Alcatraz, L=Alcatraz, S=California, C=US", - }, - ) - - Path("metadata").mkdir() - shutil.copy(FILES / "metadata/info.guardianproject.urzip.yml", "metadata") - Path("unsigned").mkdir() - shutil.copy( - FILES / "urzip-release-unsigned.apk", - "unsigned/info.guardianproject.urzip_100.apk", - ) - - self.assert_run(self.fdroid_cmd + ["publish", "--verbose"]) - self.assert_run(self.fdroid_cmd + ["update", "--verbose", "--nosign"]) - self.assert_run(self.fdroid_cmd + ["signindex", "--verbose"]) - - self.assertIn( - '', - Path("repo/index.xml").read_text(), - ) - self.assertTrue(Path("repo/index.jar").is_file()) - self.assertTrue(Path("repo/index-v1.jar").is_file()) - apkcache = Path("tmp/apkcache.json") - self.assertTrue(apkcache.is_file()) - self.assertTrue(apkcache.stat().st_size > 0) - self.assertTrue(Path("urzip.apk").is_symlink()) - - def test_utf8_metadata(self): - self.fdroid_init_with_prebuilt_keystore() - self.update_yaml( - common.CONFIG_FILE, - { - "repo_description": "获取已安装在您的设备上的应用的", - "mirrors": ["https://foo.bar/fdroid", "http://secret.onion/fdroid"], - }, - ) - shutil.copy(FILES / "urzip.apk", "repo") - shutil.copy(FILES / "bad-unicode-πÇÇ现代通用字-български-عربي1.apk", "repo") - Path("metadata").mkdir() - shutil.copy(FILES / "metadata/info.guardianproject.urzip.yml", "metadata") - - self.assert_run(self.fdroid_cmd + ["readmeta"]) - self.assert_run(self.fdroid_cmd + ["update"]) - - def test_copy_git_import_and_run_fdroid_scanner_on_it(self): - url = "https://gitlab.com/fdroid/ci-test-app.git" - Path("metadata").mkdir() - self.update_yaml( - "metadata/org.fdroid.ci.test.app.yml", - { - "AutoName": "Just A Test", - "WebSite": None, - "Builds": [ - { - "versionName": "0.3", - "versionCode": 300, - "commit": "0.3", - "subdir": "app", - "gradle": ["yes"], - } - ], - "Repo": url, - "RepoType": "git", - }, - ) - - self.assert_run(["git", "clone", url, "build/org.fdroid.ci.test.app"]) - self.assert_run( - self.fdroid_cmd + ["scanner", "org.fdroid.ci.test.app", "--verbose"] - ) - - @unittest.skipUnless(shutil.which("gpg"), "requires command line gpg") - def test_copy_repo_generate_java_gpg_keys_update_and_gpgsign(self): - """Needs tricks to make gpg-agent run in a test harness.""" - self.fdroid_init_with_prebuilt_keystore() - shutil.copytree(FILES / "repo", "repo", dirs_exist_ok=True) - for dir in ["config", "metadata"]: - shutil.copytree(FILES / dir, dir) - # gpg requires a short path to the socket to talk to gpg-agent - gnupghome = (WORKSPACE / '.testfiles/gnupghome').resolve() - shutil.rmtree(gnupghome, ignore_errors=True) - shutil.copytree(FILES / "gnupghome", gnupghome) - os.chmod(gnupghome, 0o700) - self.update_yaml( - common.CONFIG_FILE, - { - "install_list": "org.adaway", - "uninstall_list": ["com.android.vending", "com.facebook.orca"], - "gpghome": str(gnupghome), - "gpgkey": "CE71F7FB", - "mirrors": [ - "http://foobarfoobarfoobar.onion/fdroid", - "https://foo.bar/fdroid", - ], - }, - ) - self.assert_run( - self.fdroid_cmd + ["update", "--verbose", "--pretty"], - env=os.environ | {"LC_MESSAGES": "C.UTF-8"}, - ) - index_xml = Path("repo/index.xml").read_text() - self.assertIn("" in line) - with open("repo/index.xml") as f: - repo_cnt = sum(1 for line in f if "" in line) - if USE_APKSIGNER: - self.assertEqual(archive_cnt, 2) - self.assertEqual(repo_cnt, 10) - else: - # This will fail when jarsigner allows MD5 for APK signatures - self.assertEqual(archive_cnt, 5) - self.assertEqual(repo_cnt, 7) - - @unittest.skipIf(USE_APKSIGNER, "runs only without apksigner") - def test_per_app_archive_policy(self): - self.fdroid_init_with_prebuilt_keystore() - Path("metadata").mkdir() - shutil.copy(FILES / "metadata/com.politedroid.yml", "metadata") - for f in FILES.glob("repo/com.politedroid_[0-9].apk"): - shutil.copy(f, "repo") - self.update_yaml(common.CONFIG_FILE, {"archive_older": 3}) - - self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) - repo = Path("repo/index.xml").read_text() - repo_cnt = sum(1 for line in repo.splitlines() if "" in line) - archive = Path("archive/index.xml").read_text() - archive_cnt = sum(1 for line in archive.splitlines() if "" in line) - self.assertEqual(repo_cnt, 4) - self.assertEqual(archive_cnt, 0) - self.assertIn("com.politedroid_3.apk", repo) - self.assertIn("com.politedroid_4.apk", repo) - self.assertIn("com.politedroid_5.apk", repo) - self.assertIn("com.politedroid_6.apk", repo) - self.assertTrue(Path("repo/com.politedroid_3.apk").is_file()) - self.assertTrue(Path("repo/com.politedroid_4.apk").is_file()) - self.assertTrue(Path("repo/com.politedroid_5.apk").is_file()) - self.assertTrue(Path("repo/com.politedroid_6.apk").is_file()) - - # enable one app in the repo - self.update_yaml("metadata/com.politedroid.yml", {"ArchivePolicy": 1}) - self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) - repo = Path("repo/index.xml").read_text() - repo_cnt = sum(1 for line in repo.splitlines() if "" in line) - archive = Path("archive/index.xml").read_text() - archive_cnt = sum(1 for line in archive.splitlines() if "" in line) - self.assertEqual(repo_cnt, 1) - self.assertEqual(archive_cnt, 3) - self.assertIn("com.politedroid_6.apk", repo) - self.assertIn("com.politedroid_3.apk", archive) - self.assertIn("com.politedroid_4.apk", archive) - self.assertIn("com.politedroid_5.apk", archive) - self.assertTrue(Path("repo/com.politedroid_6.apk").is_file()) - self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) - self.assertTrue(Path("archive/com.politedroid_4.apk").is_file()) - self.assertTrue(Path("archive/com.politedroid_5.apk").is_file()) - - # remove all apps from the repo - self.update_yaml("metadata/com.politedroid.yml", {"ArchivePolicy": 0}) - self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) - repo = Path("repo/index.xml").read_text() - repo_cnt = sum(1 for line in repo.splitlines() if "" in line) - archive = Path("archive/index.xml").read_text() - archive_cnt = sum(1 for line in archive.splitlines() if "" in line) - self.assertEqual(repo_cnt, 0) - self.assertEqual(archive_cnt, 4) - self.assertIn("com.politedroid_3.apk", archive) - self.assertIn("com.politedroid_4.apk", archive) - self.assertIn("com.politedroid_5.apk", archive) - self.assertIn("com.politedroid_6.apk", archive) - self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) - self.assertTrue(Path("archive/com.politedroid_4.apk").is_file()) - self.assertTrue(Path("archive/com.politedroid_5.apk").is_file()) - self.assertTrue(Path("archive/com.politedroid_6.apk").is_file()) - self.assertFalse(Path("repo/com.politedroid_6.apk").exists()) - - # move back one from archive to the repo - self.update_yaml("metadata/com.politedroid.yml", {"ArchivePolicy": 1}) - self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) - repo = Path("repo/index.xml").read_text() - repo_cnt = sum(1 for line in repo.splitlines() if "" in line) - archive = Path("archive/index.xml").read_text() - archive_cnt = sum(1 for line in archive.splitlines() if "" in line) - self.assertEqual(repo_cnt, 1) - self.assertEqual(archive_cnt, 3) - self.assertIn("com.politedroid_6.apk", repo) - self.assertIn("com.politedroid_3.apk", archive) - self.assertIn("com.politedroid_4.apk", archive) - self.assertIn("com.politedroid_5.apk", archive) - self.assertTrue(Path("repo/com.politedroid_6.apk").is_file()) - self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) - self.assertTrue(Path("archive/com.politedroid_4.apk").is_file()) - self.assertTrue(Path("archive/com.politedroid_5.apk").is_file()) - self.assertFalse(Path("archive/com.politedroid_6.apk").exists()) - - # set an earlier version as CVC and test that it's the only one not archived - self.update_yaml("metadata/com.politedroid.yml", {"CurrentVersionCode": 5}) - self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) - repo = Path("repo/index.xml").read_text() - repo_cnt = sum(1 for line in repo.splitlines() if "" in line) - archive = Path("archive/index.xml").read_text() - archive_cnt = sum(1 for line in archive.splitlines() if "" in line) - self.assertEqual(repo_cnt, 1) - self.assertEqual(archive_cnt, 3) - self.assertIn("com.politedroid_5.apk", repo) - self.assertIn("com.politedroid_3.apk", archive) - self.assertIn("com.politedroid_4.apk", archive) - self.assertIn("com.politedroid_6.apk", archive) - self.assertTrue(Path("repo/com.politedroid_5.apk").is_file()) - self.assertFalse(Path("repo/com.politedroid_6.apk").exists()) - self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) - self.assertTrue(Path("archive/com.politedroid_4.apk").is_file()) - self.assertTrue(Path("archive/com.politedroid_6.apk").is_file()) - - def test_moving_old_apks_to_and_from_the_archive(self): - self.fdroid_init_with_prebuilt_keystore() - Path("metadata").mkdir() - shutil.copy(FILES / "metadata/com.politedroid.yml", "metadata") - self.remove_lines("metadata/com.politedroid.yml", ["ArchivePolicy:"]) - for f in FILES.glob("repo/com.politedroid_[0-9].apk"): - shutil.copy(f, "repo") - self.update_yaml(common.CONFIG_FILE, {"archive_older": 3}) - - self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) - repo = Path("repo/index.xml").read_text() - repo_cnt = sum(1 for line in repo.splitlines() if "" in line) - self.assertEqual(repo_cnt, 3) - self.assertIn("com.politedroid_4.apk", repo) - self.assertIn("com.politedroid_5.apk", repo) - self.assertIn("com.politedroid_6.apk", repo) - self.assertTrue(Path("repo/com.politedroid_4.apk").is_file()) - self.assertTrue(Path("repo/com.politedroid_5.apk").is_file()) - self.assertTrue(Path("repo/com.politedroid_6.apk").is_file()) - archive = Path("archive/index.xml").read_text() - archive_cnt = sum(1 for line in archive.splitlines() if "" in line) - self.assertEqual(archive_cnt, 1) - self.assertIn("com.politedroid_3.apk", archive) - self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) - - self.update_yaml(common.CONFIG_FILE, {"archive_older": 1}) - self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) - repo = Path("repo/index.xml").read_text() - repo_cnt = sum(1 for line in repo.splitlines() if "" in line) - self.assertEqual(repo_cnt, 1) - self.assertIn("com.politedroid_6.apk", repo) - self.assertTrue(Path("repo/com.politedroid_6.apk").is_file()) - archive = Path("archive/index.xml").read_text() - archive_cnt = sum(1 for line in archive.splitlines() if "" in line) - self.assertEqual(archive_cnt, 3) - self.assertIn("com.politedroid_3.apk", archive) - self.assertIn("com.politedroid_4.apk", archive) - self.assertIn("com.politedroid_5.apk", archive) - self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) - self.assertTrue(Path("archive/com.politedroid_4.apk").is_file()) - self.assertTrue(Path("archive/com.politedroid_5.apk").is_file()) - - # disabling deletes from the archive - metadata_path = Path("metadata/com.politedroid.yml") - metadata = metadata_path.read_text() - metadata = re.sub( - "versionCode: 4", "versionCode: 4\n disable: testing deletion", metadata - ) - metadata_path.write_text(metadata) - self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) - repo = Path("repo/index.xml").read_text() - repo_cnt = sum(1 for line in repo.splitlines() if "" in line) - self.assertEqual(repo_cnt, 1) - self.assertIn("com.politedroid_6.apk", repo) - self.assertTrue(Path("repo/com.politedroid_6.apk").is_file()) - archive = Path("archive/index.xml").read_text() - archive_cnt = sum(1 for line in archive.splitlines() if "" in line) - self.assertEqual(archive_cnt, 2) - self.assertIn("com.politedroid_3.apk", archive) - self.assertNotIn("com.politedroid_4.apk", archive) - self.assertIn("com.politedroid_5.apk", archive) - self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) - self.assertFalse(Path("archive/com.politedroid_4.apk").exists()) - self.assertTrue(Path("archive/com.politedroid_5.apk").is_file()) - - # disabling deletes from the repo, and promotes one from the archive - metadata = re.sub( - "versionCode: 6", "versionCode: 6\n disable: testing deletion", metadata - ) - metadata_path.write_text(metadata) - self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) - repo = Path("repo/index.xml").read_text() - repo_cnt = sum(1 for line in repo.splitlines() if "" in line) - self.assertEqual(repo_cnt, 1) - self.assertIn("com.politedroid_5.apk", repo) - self.assertNotIn("com.politedroid_6.apk", repo) - self.assertTrue(Path("repo/com.politedroid_5.apk").is_file()) - self.assertFalse(Path("repo/com.politedroid_6.apk").exists()) - archive = Path("archive/index.xml").read_text() - archive_cnt = sum(1 for line in archive.splitlines() if "" in line) - self.assertEqual(archive_cnt, 1) - self.assertIn("com.politedroid_3.apk", archive) - self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) - self.assertFalse(Path("archive/com.politedroid_6.apk").exists()) - - def test_that_verify_can_succeed_and_fail(self): - Path("tmp").mkdir() - Path("unsigned").mkdir() - shutil.copy(FILES / "repo/com.politedroid_6.apk", "tmp") - shutil.copy(FILES / "repo/com.politedroid_6.apk", "unsigned") - self.assert_run( - self.fdroid_cmd - + ["verify", "--reuse-remote-apk", "--verbose", "com.politedroid"] - ) - # force a fail - shutil.copy( - FILES / "repo/com.politedroid_5.apk", "unsigned/com.politedroid_6.apk" - ) - self.assert_run_fail( - self.fdroid_cmd - + ["verify", "--reuse-remote-apk", "--verbose", "com.politedroid"] - ) - - def test_allowing_disabled_signatures_in_repo_and_archive(self): - self.fdroid_init_with_prebuilt_keystore() - self.update_yaml( - common.CONFIG_FILE, {"allow_disabled_algorithms": True, "archive_older": 3} - ) - Path("metadata").mkdir() - shutil.copy(FILES / "metadata/com.politedroid.yml", "metadata") - self.update_yaml( - "metadata/info.guardianproject.urzip.yml", - {"Summary": "good test version of urzip"}, - replace=True, - ) - self.update_yaml( - "metadata/org.bitbucket.tickytacky.mirrormirror.yml", - {"Summary": "good MD5 sig, disabled algorithm"}, - replace=True, - ) - for f in Path("metadata").glob("*.yml"): - self.remove_lines(f, ["ArchivePolicy:"]) - for f in itertools.chain( - FILES.glob("urzip-badsig.apk"), - FILES.glob("org.bitbucket.tickytacky.mirrormirror_[0-9].apk"), - FILES.glob("repo/com.politedroid_[0-9].apk"), - ): - shutil.copy(f, "repo") - - self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) - repo = Path("repo/index.xml").read_text() - repo_cnt = sum(1 for line in repo.splitlines() if "" in line) - archive = Path("archive/index.xml").read_text() - archive_cnt = sum(1 for line in archive.splitlines() if "" in line) - self.assertEqual(repo_cnt, 6) - self.assertEqual(archive_cnt, 2) - self.assertIn("com.politedroid_4.apk", repo) - self.assertIn("com.politedroid_5.apk", repo) - self.assertIn("com.politedroid_6.apk", repo) - self.assertIn("com.politedroid_3.apk", archive) - self.assertIn("org.bitbucket.tickytacky.mirrormirror_2.apk", repo) - self.assertIn("org.bitbucket.tickytacky.mirrormirror_3.apk", repo) - self.assertIn("org.bitbucket.tickytacky.mirrormirror_4.apk", repo) - self.assertIn("org.bitbucket.tickytacky.mirrormirror_1.apk", archive) - self.assertNotIn("urzip-badsig.apk", repo) - self.assertNotIn("urzip-badsig.apk", archive) - self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) - self.assertTrue(Path("repo/com.politedroid_4.apk").is_file()) - self.assertTrue(Path("repo/com.politedroid_5.apk").is_file()) - self.assertTrue(Path("repo/com.politedroid_6.apk").is_file()) - self.assertTrue( - Path("archive/org.bitbucket.tickytacky.mirrormirror_1.apk").is_file() - ) - self.assertTrue( - Path("repo/org.bitbucket.tickytacky.mirrormirror_2.apk").is_file() - ) - self.assertTrue( - Path("repo/org.bitbucket.tickytacky.mirrormirror_3.apk").is_file() - ) - self.assertTrue( - Path("repo/org.bitbucket.tickytacky.mirrormirror_4.apk").is_file() - ) - self.assertTrue(Path("archive/urzip-badsig.apk").is_file()) - - if not USE_APKSIGNER: - self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) - repo = Path("repo/index.xml").read_text() - repo_cnt = sum(1 for line in repo.splitlines() if "" in line) - archive = Path("archive/index.xml").read_text() - archive_cnt = sum(1 for line in archive.splitlines() if "" in line) - self.assertEqual(repo_cnt, 3) - self.assertEqual(archive_cnt, 5) - self.assertIn("com.politedroid_4.apk", repo) - self.assertIn("com.politedroid_5.apk", repo) - self.assertIn("com.politedroid_6.apk", repo) - self.assertNotIn("urzip-badsig.apk", repo) - self.assertIn("org.bitbucket.tickytacky.mirrormirror_1.apk", archive) - self.assertIn("org.bitbucket.tickytacky.mirrormirror_2.apk", archive) - self.assertIn("org.bitbucket.tickytacky.mirrormirror_3.apk", archive) - self.assertIn("org.bitbucket.tickytacky.mirrormirror_4.apk", archive) - self.assertIn("com.politedroid_3.apk", archive) - self.assertNotIn("urzip-badsig.apk", archive) - self.assertTrue(Path("repo/com.politedroid_4.apk").is_file()) - self.assertTrue(Path("repo/com.politedroid_5.apk").is_file()) - self.assertTrue(Path("repo/com.politedroid_6.apk").is_file()) - self.assertTrue( - Path("archive/org.bitbucket.tickytacky.mirrormirror_1.apk").is_file() - ) - self.assertTrue( - Path("archive/org.bitbucket.tickytacky.mirrormirror_2.apk").is_file() - ) - self.assertTrue( - Path("archive/org.bitbucket.tickytacky.mirrormirror_3.apk").is_file() - ) - self.assertTrue( - Path("archive/org.bitbucket.tickytacky.mirrormirror_4.apk").is_file() - ) - self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) - self.assertTrue(Path("archive/urzip-badsig.apk").is_file()) - - # test unarchiving when disabled_algorithms are allowed again - self.update_yaml(common.CONFIG_FILE, {"allow_disabled_algorithms": True}) - self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) - with open("archive/index.xml") as f: - archive_cnt = sum(1 for line in f if "" in line) - with open("repo/index.xml") as f: - repo_cnt = sum(1 for line in f if "" in line) - self.assertEqual(repo_cnt, 6) - self.assertEqual(archive_cnt, 2) - self.assertIn("com.politedroid_4.apk", repo) - self.assertIn("com.politedroid_5.apk", repo) - self.assertIn("com.politedroid_6.apk", repo) - self.assertIn("org.bitbucket.tickytacky.mirrormirror_2.apk", repo) - self.assertIn("org.bitbucket.tickytacky.mirrormirror_3.apk", repo) - self.assertIn("org.bitbucket.tickytacky.mirrormirror_4.apk", repo) - self.assertNotIn("urzip-badsig.apk", repo) - self.assertIn("com.politedroid_3.apk", archive) - self.assertIn("org.bitbucket.tickytacky.mirrormirror_1.apk", archive) - self.assertNotIn("urzip-badsig.apk", archive) - self.assertTrue(Path("repo/com.politedroid_4.apk").is_file()) - self.assertTrue(Path("repo/com.politedroid_5.apk").is_file()) - self.assertTrue(Path("repo/com.politedroid_6.apk").is_file()) - self.assertTrue( - Path("repo/org.bitbucket.tickytacky.mirrormirror_2.apk").is_file() - ) - self.assertTrue( - Path("repo/org.bitbucket.tickytacky.mirrormirror_3.apk").is_file() - ) - self.assertTrue( - Path("repo/org.bitbucket.tickytacky.mirrormirror_4.apk").is_file() - ) - self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) - self.assertTrue( - Path("archive/org.bitbucket.tickytacky.mirrormirror_1.apk").is_file() - ) - self.assertTrue(Path("archive/urzip-badsig.apk").is_file()) - - def test_rename_apks_with_fdroid_update_rename_apks_opt_nosign_opt_for_speed(self): - self.fdroid_init_with_prebuilt_keystore() - self.update_yaml( - common.CONFIG_FILE, - { - "keydname": "CN=Birdman, OU=Cell, O=Alcatraz, L=Alcatraz, S=California, C=US" - }, - ) - Path("metadata").mkdir() - shutil.copy(FILES / "metadata/info.guardianproject.urzip.yml", "metadata") - shutil.copy( - FILES / "urzip.apk", - "repo/asdfiuhk urzip-πÇÇπÇÇ现代汉语通用字-български-عربي1234 ö.apk", - ) - self.assert_run( - self.fdroid_cmd + ["update", "--rename-apks", "--pretty", "--nosign"] - ) - self.assertTrue(Path("repo/info.guardianproject.urzip_100.apk").is_file()) - index_xml = Path("repo/index.xml").read_text() - index_v1_json = Path("repo/index-v1.json").read_text() - self.assertIn("info.guardianproject.urzip_100.apk", index_v1_json) - self.assertIn("info.guardianproject.urzip_100.apk", index_xml) - - shutil.copy(FILES / "urzip-release.apk", "repo") - self.assert_run( - self.fdroid_cmd + ["update", "--rename-apks", "--pretty", "--nosign"] - ) - self.assertTrue(Path("repo/info.guardianproject.urzip_100.apk").is_file()) - self.assertTrue( - Path("repo/info.guardianproject.urzip_100_b4964fd.apk").is_file() - ) - index_xml = Path("repo/index.xml").read_text() - index_v1_json = Path("repo/index-v1.json").read_text() - self.assertIn("info.guardianproject.urzip_100.apk", index_v1_json) - self.assertIn("info.guardianproject.urzip_100.apk", index_xml) - self.assertIn("info.guardianproject.urzip_100_b4964fd.apk", index_v1_json) - self.assertNotIn("info.guardianproject.urzip_100_b4964fd.apk", index_xml) - - shutil.copy(FILES / "urzip-release.apk", "repo") - self.assert_run( - self.fdroid_cmd + ["update", "--rename-apks", "--pretty", "--nosign"] - ) - self.assertTrue(Path("repo/info.guardianproject.urzip_100.apk").is_file()) - self.assertTrue( - Path("repo/info.guardianproject.urzip_100_b4964fd.apk").is_file() - ) - self.assertTrue( - Path("duplicates/repo/info.guardianproject.urzip_100_b4964fd.apk").is_file() - ) - index_xml = Path("repo/index.xml").read_text() - index_v1_json = Path("repo/index-v1.json").read_text() - self.assertIn("info.guardianproject.urzip_100.apk", index_v1_json) - self.assertIn("info.guardianproject.urzip_100.apk", index_xml) - self.assertIn("info.guardianproject.urzip_100_b4964fd.apk", index_v1_json) - self.assertNotIn("info.guardianproject.urzip_100_b4964fd.apk", index_xml) - - def test_for_added_date_being_set_correctly_for_repo_and_archive(self): - self.fdroid_init_with_prebuilt_keystore() - self.update_yaml(common.CONFIG_FILE, {"archive_older": 3}) - Path("metadata").mkdir() - Path("archive").mkdir() - shutil.copy(FILES / "repo/com.politedroid_6.apk", "repo") - shutil.copy(FILES / "repo/index-v2.json", "repo") - shutil.copy(FILES / "repo/com.politedroid_5.apk", "archive") - shutil.copy(FILES / "metadata/com.politedroid.yml", "metadata") - - # TODO: the timestamp of the oldest apk in the file should be used, even - # if that doesn't exist anymore - self.update_yaml("metadata/com.politedroid.yml", {"ArchivePolicy": 1}) - - self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) - timestamp = int(datetime(2017, 6, 23, tzinfo=timezone.utc).timestamp()) * 1000 - index_v1_json = Path("repo/index-v1.json").read_text() - self.assertIn(f'"added": {timestamp}', index_v1_json) - # the archive will have the added timestamp for the app and for the apk, - # both need to be there - with open("archive/index-v1.json") as f: - count = sum(1 for line in f if f'"added": {timestamp}' in line) - self.assertEqual(count, 2) - - def test_whatsnew_from_fastlane_without_cvc_set(self): - self.fdroid_init_with_prebuilt_keystore() - Path("metadata/com.politedroid/en-US/changelogs").mkdir(parents=True) - shutil.copy(FILES / "repo/com.politedroid_6.apk", "repo") - shutil.copy(FILES / "metadata/com.politedroid.yml", "metadata") - self.remove_lines("metadata/com.politedroid.yml", ["CurrentVersion:"]) - Path("metadata/com.politedroid/en-US/changelogs/6.txt").write_text( - "whatsnew test" - ) - self.assert_run(self.fdroid_cmd + ["update", "--pretty", "--nosign"]) - index_v1_json = Path("repo/index-v1.json").read_text() - self.assertIn("whatsnew test", index_v1_json) - - def test_metadata_checks(self): - Path("repo").mkdir() - shutil.copy(FILES / "urzip.apk", "repo") - # this should fail because there is no metadata - self.assert_run_fail(self.fdroid_cmd + ["build"]) - Path("metadata").mkdir() - shutil.copy(FILES / "metadata/org.smssecure.smssecure.yml", "metadata") - self.assert_run(self.fdroid_cmd + ["readmeta"]) - - def test_ensure_commands_that_dont_need_the_jdk_work_without_a_jdk_configured(self): - Path("repo").mkdir() - Path("metadata").mkdir() - self.update_yaml( - "metadata/fake.yml", - { - "License": "GPL-2.0-only", - "Summary": "Yup still fake", - "Categories": ["Internet"], - "Description": "this is fake", - }, - ) - # fake that no JDKs are available - self.update_yaml( - common.CONFIG_FILE, - {"categories": ["Internet"], "java_paths": {}}, - replace=True, - ) - local_copy_dir = self.testdir / "local_copy_dir/fdroid" - (local_copy_dir / "repo").mkdir(parents=True) - self.update_yaml( - common.CONFIG_FILE, {"local_copy_dir": str(local_copy_dir.resolve())} - ) - - subprocess.run(self.fdroid_cmd + ["checkupdates", "--allow-dirty"]) - if shutil.which("gpg"): - self.assert_run(self.fdroid_cmd + ["gpgsign"]) - self.assert_run(self.fdroid_cmd + ["lint"]) - self.assert_run(self.fdroid_cmd + ["readmeta"]) - self.assert_run(self.fdroid_cmd + ["rewritemeta", "fake"]) - self.assert_run(self.fdroid_cmd + ["deploy"]) - self.assert_run(self.fdroid_cmd + ["scanner"]) - - # run these to get their output, but the are not setup, so don't fail - subprocess.run(self.fdroid_cmd + ["build"]) - subprocess.run(self.fdroid_cmd + ["import"]) - subprocess.run(self.fdroid_cmd + ["install", "-n"]) - - def test_config_checks_of_local_copy_dir(self): - self.assert_run(self.fdroid_cmd + ["init"]) - self.assert_run(self.fdroid_cmd + ["update", "--create-metadata", "--verbose"]) - self.assert_run(self.fdroid_cmd + ["readmeta"]) - local_copy_dir = (self.testdir / "local_copy_dir/fdroid").resolve() - local_copy_dir.mkdir(parents=True) - self.assert_run( - self.fdroid_cmd + ["deploy", "--local-copy-dir", local_copy_dir] - ) - self.assert_run( - self.fdroid_cmd - + ["deploy", "--local-copy-dir", local_copy_dir, "--verbose"] - ) - - # this should fail because thisisnotanabsolutepath is not an absolute path - self.assert_run_fail( - self.fdroid_cmd + ["deploy", "--local-copy-dir", "thisisnotanabsolutepath"] - ) - # this should fail because the path doesn't end with "fdroid" - self.assert_run_fail( - self.fdroid_cmd - + [ - "deploy", - "--local-copy-dir", - "/tmp/IReallyDoubtThisPathExistsasdfasdf", # nosec B108 - ] - ) - # this should fail because the dirname path does not exist - self.assert_run_fail( - self.fdroid_cmd - + [ - "deploy", - "--local-copy-dir", - "/tmp/IReallyDoubtThisPathExistsasdfasdf/fdroid", # nosec B108 - ] - ) - - def test_setup_a_new_repo_from_scratch_using_android_home_and_do_a_local_sync(self): - self.fdroid_init_with_prebuilt_keystore() - self.copy_apks_into_repo() - self.assert_run(self.fdroid_cmd + ["update", "--create-metadata", "--verbose"]) - self.assert_run(self.fdroid_cmd + ["readmeta"]) - self.assertIn(" 0) - - def test_check_duplicate_files_are_properly_handled_by_fdroid_update(self): - self.fdroid_init_with_prebuilt_keystore() - Path("metadata").mkdir() - shutil.copy(FILES / "metadata/obb.mainpatch.current.yml", "metadata") - shutil.copy(FILES / "repo/obb.mainpatch.current_1619.apk", "repo") - shutil.copy( - FILES / "repo/obb.mainpatch.current_1619_another-release-key.apk", "repo" - ) - self.assert_run(self.fdroid_cmd + ["update", "--pretty"]) - index_xml = Path("repo/index.xml").read_text() - index_v1_json = Path("repo/index-v1.json").read_text() - self.assertNotIn( - "obb.mainpatch.current_1619_another-release-key.apk", index_xml - ) - self.assertIn("obb.mainpatch.current_1619.apk", index_xml) - self.assertIn("obb.mainpatch.current_1619.apk", index_v1_json) - self.assertIn( - "obb.mainpatch.current_1619_another-release-key.apk", index_v1_json - ) - # die if there are exact duplicates - shutil.copy(FILES / "repo/obb.mainpatch.current_1619.apk", "repo/duplicate.apk") - self.assert_run_fail(self.fdroid_cmd + ["update"]) - - def test_setup_new_repo_from_scratch_using_android_home_env_var_putting_apks_in_repo_first( - self, - ): - Path("repo").mkdir() - self.copy_apks_into_repo() - self.fdroid_init_with_prebuilt_keystore() - self.assert_run(self.fdroid_cmd + ["update", "--create-metadata", "--verbose"]) - self.assert_run(self.fdroid_cmd + ["readmeta"]) - self.assertIn(" 0) - - def test_setup_a_new_repo_manually_and_generate_a_keystore(self): - self.assertFalse(Path("keystore.p12").exists()) - # this should fail because this repo has no keystore - self.assert_run_fail(self.fdroid_cmd + ["update"]) - self.assert_run(self.fdroid_cmd + ["update", "--create-key"]) - self.assertTrue(Path("keystore.p12").is_file()) - self.copy_apks_into_repo() - self.assert_run(self.fdroid_cmd + ["update", "--create-metadata", "--verbose"]) - self.assert_run(self.fdroid_cmd + ["readmeta"]) - self.assertIn(" 0) - - def test_setup_a_new_repo_from_scratch_generate_a_keystore_then_add_apk_and_update( - self, - ): - self.assert_run(self.fdroid_cmd + ["init", "--keystore", "keystore.p12"]) - self.assertTrue(Path("keystore.p12").is_file()) - self.copy_apks_into_repo() - self.assert_run(self.fdroid_cmd + ["update", "--create-metadata", "--verbose"]) - self.assert_run(self.fdroid_cmd + ["readmeta"]) - self.assertIn(" 0) - self.assertIn(" 0) - - # now set fake repo_keyalias - self.update_yaml(common.CONFIG_FILE, {"repo_keyalias": "fake"}) - # this should fail because this repo has a bad repo_keyalias - self.assert_run_fail(self.fdroid_cmd + ["update"]) - - # this should fail because a keystore is already there - self.assert_run_fail(self.fdroid_cmd + ["update", "--create-key"]) - - # now actually create the key with the existing settings - Path("keystore.jks").unlink() - self.assert_run(self.fdroid_cmd + ["update", "--create-key"]) - self.assertTrue(Path("keystore.jks").is_file()) - - def test_setup_a_new_repo_from_scratch_using_android_home_env_var_with_git_mirror( - self, - ): - server_git_mirror = self.testdir / "server_git_mirror" - server_git_mirror.mkdir() - self.assert_run( - ["git", "-C", server_git_mirror, "init", "--initial-branch", "master"] - ) - self.assert_run( - [ - "git", - "-C", - server_git_mirror, - "config", - "receive.denyCurrentBranch", - "updateInstead", - ] - ) - - self.fdroid_init_with_prebuilt_keystore() - self.update_yaml( - common.CONFIG_FILE, - {"archive_older": 3, "servergitmirrors": str(server_git_mirror)}, - ) - for f in FILES.glob("repo/com.politedroid_[345].apk"): - shutil.copy(f, "repo") - self.assert_run(self.fdroid_cmd + ["update", "--create-metadata"]) - self.assert_run(self.fdroid_cmd + ["deploy"]) - git_mirror = Path("git-mirror") - self.assertTrue((git_mirror / "fdroid/repo/com.politedroid_3.apk").is_file()) - self.assertTrue((git_mirror / "fdroid/repo/com.politedroid_4.apk").is_file()) - self.assertTrue((git_mirror / "fdroid/repo/com.politedroid_5.apk").is_file()) - self.assertTrue( - (server_git_mirror / "fdroid/repo/com.politedroid_3.apk").is_file() - ) - self.assertTrue( - (server_git_mirror / "fdroid/repo/com.politedroid_4.apk").is_file() - ) - self.assertTrue( - (server_git_mirror / "fdroid/repo/com.politedroid_5.apk").is_file() - ) - (git_mirror / ".git/test-stamp").write_text(str(datetime.now())) - - # add one more APK to trigger archiving - shutil.copy(FILES / "repo/com.politedroid_6.apk", "repo") - self.assert_run(self.fdroid_cmd + ["update"]) - self.assert_run(self.fdroid_cmd + ["deploy"]) - self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) - self.assertFalse((git_mirror / "fdroid/archive/com.politedroid_3.apk").exists()) - self.assertFalse( - (server_git_mirror / "fdroid/archive/com.politedroid_3.apk").exists() - ) - self.assertTrue((git_mirror / "fdroid/repo/com.politedroid_4.apk").is_file()) - self.assertTrue((git_mirror / "fdroid/repo/com.politedroid_5.apk").is_file()) - self.assertTrue((git_mirror / "fdroid/repo/com.politedroid_6.apk").is_file()) - self.assertTrue( - (server_git_mirror / "fdroid/repo/com.politedroid_4.apk").is_file() - ) - self.assertTrue( - (server_git_mirror / "fdroid/repo/com.politedroid_5.apk").is_file() - ) - self.assertTrue( - (server_git_mirror / "fdroid/repo/com.politedroid_6.apk").is_file() - ) - before = sum( - f.stat().st_size for f in (git_mirror / ".git").glob("**/*") if f.is_file() - ) - - self.update_yaml(common.CONFIG_FILE, {"git_mirror_size_limit": "60kb"}) - self.assert_run(self.fdroid_cmd + ["update"]) - self.assert_run(self.fdroid_cmd + ["deploy"]) - self.assertTrue(Path("archive/com.politedroid_3.apk").is_file()) - self.assertFalse( - (server_git_mirror / "fdroid/archive/com.politedroid_3.apk").exists() - ) - after = sum( - f.stat().st_size for f in (git_mirror / ".git").glob("**/*") if f.is_file() - ) - self.assertFalse((git_mirror / ".git/test-stamp").exists()) - self.assert_run(["git", "-C", git_mirror, "gc"]) - self.assert_run(["git", "-C", server_git_mirror, "gc"]) - self.assertGreater(before, after) - - def test_sign_binary_repo_in_offline_box_then_publishing_from_online_box(self): - offline_root = self.testdir / "offline_root" - offline_root.mkdir() - local_copy_dir = self.testdir / "local_copy_dir/fdroid" - local_copy_dir.mkdir(parents=True) - online_root = self.testdir / "online_root" - online_root.mkdir() - server_web_root = self.testdir / "server_web_root/fdroid" - server_web_root.mkdir(parents=True) - - # create offline binary transparency log - (offline_root / "binary_transparency").mkdir() - os.chdir(offline_root / "binary_transparency") - self.assert_run(["git", "init", "--initial-branch", "master"]) - - # fake git remote server for binary transparency log - binary_transparency_remote = self.testdir / "binary_transparency_remote" - binary_transparency_remote.mkdir() - - # fake git remote server for repo mirror - server_git_mirror = self.testdir / "server_git_mirror" - server_git_mirror.mkdir() - os.chdir(server_git_mirror) - self.assert_run(["git", "init", "--initial-branch", "master"]) - self.assert_run(["git", "config", "receive.denyCurrentBranch", "updateInstead"]) - - os.chdir(offline_root) - self.fdroid_init_with_prebuilt_keystore() - shutil.copytree(FILES / "repo", "repo", dirs_exist_ok=True) - shutil.copytree(FILES / "metadata", "metadata") - Path("unsigned").mkdir() - shutil.copy(FILES / "urzip-release-unsigned.apk", "unsigned") - self.update_yaml( - common.CONFIG_FILE, - { - "archive_older": 3, - "mirrors": [ - "http://foo.bar/fdroid", - "http://asdflkdsfjafdsdfhkjh.onion/fdroid", - ], - "servergitmirrors": str(server_git_mirror), - "local_copy_dir": str(local_copy_dir), - }, - ) - self.assert_run(self.fdroid_cmd + ["update", "--pretty"]) - index_xml = Path("repo/index.xml").read_text() - self.assertIn("", index_xml) - mirror_cnt = sum(1 for line in index_xml.splitlines() if "" in line) - self.assertEqual(mirror_cnt, 2) - - archive_xml = Path("archive/index.xml").read_text() - self.assertIn("/fdroid/archive", archive_xml) - mirror_cnt = sum(1 for line in archive_xml.splitlines() if "" in line) - self.assertEqual(mirror_cnt, 2) - - os.chdir("binary_transparency") - proc = self.assert_run( - ["git", "rev-list", "--count", "HEAD"], capture_output=True - ) - self.assertEqual(int(proc.stdout), 1) - os.chdir(offline_root) - self.assert_run(self.fdroid_cmd + ["deploy", "--verbose"]) - self.assertTrue( - Path(local_copy_dir / "unsigned/urzip-release-unsigned.apk").is_file() - ) - self.assertIn( - "', - } - - anywarns = False - for warn in fdroidserver.lint.check_regexes(app): - anywarns = True - logging.debug(warn) - self.assertTrue(anywarns) - - def test_source_urls(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.lint.config = config - - app = { - 'Name': 'My App', - 'Summary': 'just a placeholder', - 'Description': 'This app does all sorts of useful stuff', - } - good_urls = [ - 'https://github.com/Matteljay/mastermindy-android', - 'https://gitlab.com/origin/master', - 'https://gitlab.com/group/subgroup/masterthing', - 'https://raw.githubusercontent.com/Seva-coder/Finder/HEAD/ChangeLog.txt', - 'https://github.com/scoutant/blokish/blob/HEAD/README.md#changelog', - 'https://git.ieval.ro/?p=fonbot.git;a=blob;f=Changes;hb=HEAD', - 'https://htmlpreview.github.io/?https://github.com/YasuakiHonda/Maxima-on-Android-AS/blob/HEAD/app/src/main/assets/About_MoA/index.html', - '', - ] - - anywarns = False - for url in good_urls: - app['SourceCode'] = url - for warn in fdroidserver.lint.check_regexes(app): - anywarns = True - logging.debug(warn) - self.assertFalse(anywarns) - - bad_urls = [ - 'github.com/my/proj', - 'http://github.com/not/secure', - 'https://github.com/foo/bar.git', - 'https://gitlab.com/group/subgroup/project.git', - 'https://raw.githubusercontent.com/Seva-coder/Finder/master/ChangeLog.txt', - 'https://github.com/scoutant/blokish/blob/master/README.md#changelog', - 'http://htmlpreview.github.io/?https://github.com/my/project/blob/HEAD/index.html', - 'http://fdroid.gitlab.io/fdroid-website', - ] - logging.debug('bad urls:') - for url in bad_urls: - anywarns = False - app['SourceCode'] = url - for warn in fdroidserver.lint.check_regexes(app): - anywarns = True - logging.debug(warn) - self.assertTrue(anywarns, url + " does not fail lint!") - - def test_check_app_field_types(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.lint.config = config - - app = fdroidserver.metadata.App() - app.id = 'fake.app' - app.Name = 'Bad App' - app.Summary = 'We pwn you' - app.Description = 'These are some back' - - fields = { - 'Categories': { - 'good': [ - ['Sports & Health'], - ['Multimedia', 'Graphics'], - ], - 'bad': [ - 'Science & Education', - 'Multimedia,Graphics', - ], - }, - 'WebSite': { - 'good': [ - 'https://homepage.com', - ], - 'bad': [ - [], - [ - 'nope', - ], - 29, - ], - }, - } - - for field, values in fields.items(): - for bad in values['bad']: - anywarns = False - app[field] = bad - for warn in fdroidserver.lint.check_app_field_types(app): - anywarns = True - logging.debug(warn) - self.assertTrue(anywarns) - - for good in values['good']: - anywarns = False - app[field] = good - for warn in fdroidserver.lint.check_app_field_types(app): - anywarns = True - logging.debug(warn) - self.assertFalse(anywarns) - - def test_check_vercode_operation(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.lint.config = config - - app = fdroidserver.metadata.App() - app.Name = 'Bad App' - app.Summary = 'We pwn you' - app.Description = 'These are some back' - - good_fields = [ - '6%c', - '%c - 1', - '%c + 10', - '%c*10', - '%c*10 + 3', - '%c*10 + 8', - '%c + 2 ', - '%c + 3', - '%c + 7', - ] - bad_fields = [ - 'open("/etc/passwd")', - '%C + 1', - '%%c * 123', - '123 + %%', - '%c % 7', - ] - - anywarns = False - for good in good_fields: - app.VercodeOperation = [good] - for warn in fdroidserver.lint.check_vercode_operation(app): - anywarns = True - logging.debug(warn) - self.assertFalse(anywarns) - - for bad in bad_fields: - anywarns = False - app.VercodeOperation = [bad] - for warn in fdroidserver.lint.check_vercode_operation(app): - anywarns = True - logging.debug(warn) - self.assertTrue(anywarns) - - def test_check_license_tag_no_custom_pass(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.lint.config = config - - app = fdroidserver.metadata.App() - app.License = "GPL-3.0-or-later" - - anywarns = False - for warn in fdroidserver.lint.check_license_tag(app): - anywarns = True - logging.debug(warn) - self.assertFalse(anywarns) - - def test_check_license_tag_no_custom_fail(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.lint.config = config - - app = fdroidserver.metadata.App() - app.License = "Adobe-2006" - - anywarns = False - for warn in fdroidserver.lint.check_license_tag(app): - anywarns = True - logging.debug(warn) - self.assertTrue(anywarns) - - def test_check_license_tag_with_custom_pass(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.lint.config = config - config['lint_licenses'] = ['fancy-license', 'GPL-3.0-or-later'] - - app = fdroidserver.metadata.App() - app.License = "fancy-license" - - anywarns = False - for warn in fdroidserver.lint.check_license_tag(app): - anywarns = True - logging.debug(warn) - self.assertFalse(anywarns) - - def test_check_license_tag_with_custom_fail(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.lint.config = config - config['lint_licenses'] = ['fancy-license', 'GPL-3.0-or-later'] - - app = fdroidserver.metadata.App() - app.License = "Apache-2.0" - - anywarns = False - for warn in fdroidserver.lint.check_license_tag(app): - anywarns = True - logging.debug(warn) - self.assertTrue(anywarns) - - def test_check_license_tag_with_custom_empty(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.lint.config = config - config['lint_licenses'] = [] - - app = fdroidserver.metadata.App() - app.License = "Apache-2.0" - - anywarns = False - for warn in fdroidserver.lint.check_license_tag(app): - anywarns = True - logging.debug(warn) - self.assertTrue(anywarns) - - def test_check_license_tag_disabled(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.lint.config = config - config['lint_licenses'] = None - - app = fdroidserver.metadata.App() - app.License = "Apache-2.0" - - anywarns = False - for warn in fdroidserver.lint.check_license_tag(app): - anywarns = True - logging.debug(warn) - self.assertFalse(anywarns) - - def test_check_categories_in_config(self): - fdroidserver.lint.config = { - fdroidserver.common.CATEGORIES_CONFIG_NAME: ['InConfig'] - } - fdroidserver.lint.load_categories_config() - app = fdroidserver.metadata.App({'Categories': ['InConfig']}) - self.assertEqual(0, len(list(fdroidserver.lint.check_categories(app)))) - - def test_check_categories_not_in_config(self): - fdroidserver.lint.config = dict() - fdroidserver.lint.load_categories_config() - app = fdroidserver.metadata.App({'Categories': ['NotInConfig']}) - self.assertEqual(1, len(list(fdroidserver.lint.check_categories(app)))) - - def test_check_categories_empty_is_error(self): - fdroidserver.lint.config = {fdroidserver.common.CATEGORIES_CONFIG_NAME: []} - fdroidserver.lint.load_categories_config() - app = fdroidserver.metadata.App({'Categories': ['something']}) - self.assertEqual(1, len(list(fdroidserver.lint.check_categories(app)))) - - def test_check_categories_old_hardcoded_not_defined(self): - fdroidserver.lint.config = { - fdroidserver.common.CATEGORIES_CONFIG_NAME: ['foo', 'bar'] - } - fdroidserver.lint.load_categories_config() - app = fdroidserver.metadata.App({'Categories': ['Writing']}) - self.assertEqual(1, len(list(fdroidserver.lint.check_categories(app)))) - - def test_check_categories_from_config_yml(self): - """In config.yml, categories is a list.""" - os.chdir(self.testdir) - fdroidserver.common.write_config_file('categories: [foo, bar]\n') - fdroidserver.lint.config = fdroidserver.common.read_config() - fdroidserver.lint.load_categories_config() - self.assertEqual(fdroidserver.lint.CATEGORIES_KEYS, ['foo', 'bar']) - app = fdroidserver.metadata.App({'Categories': ['bar']}) - self.assertEqual(0, len(list(fdroidserver.lint.check_categories(app)))) - - def test_check_categories_from_config_categories_yml(self): - """In config/categories.yml, categories is a localized STRINGMAP dict.""" - os.chdir(self.testdir) - os.mkdir('config') - Path('config/categories.yml').write_text('{foo: {name: foo}, bar: {name: bar}}') - fdroidserver.lint.config = fdroidserver.common.read_config() - fdroidserver.lint.load_categories_config() - self.assertEqual(fdroidserver.lint.CATEGORIES_KEYS, ['foo', 'bar']) - app = fdroidserver.metadata.App({'Categories': ['bar']}) - self.assertEqual(0, len(list(fdroidserver.lint.check_categories(app)))) - - def test_lint_config_basic_mirrors_yml(self): - os.chdir(self.testdir) - with Path('mirrors.yml').open('w') as fp: - config_dump([{'url': 'https://example.com/fdroid/repo'}], fp) - self.assertTrue(fdroidserver.lint.lint_config('mirrors.yml')) - - def test_lint_config_mirrors_yml_kenya_countryCode(self): - os.chdir(self.testdir) - with Path('mirrors.yml').open('w') as fp: - config_dump( - [{'url': 'https://foo.com/fdroid/repo', 'countryCode': 'KE'}], fp - ) - self.assertTrue(fdroidserver.lint.lint_config('mirrors.yml')) - - def test_lint_config_mirrors_yml_invalid_countryCode(self): - """WV is "indeterminately reserved" so it should never be used.""" - os.chdir(self.testdir) - with Path('mirrors.yml').open('w') as fp: - config_dump( - [{'url': 'https://foo.com/fdroid/repo', 'countryCode': 'WV'}], fp - ) - self.assertFalse(fdroidserver.lint.lint_config('mirrors.yml')) - - def test_lint_config_mirrors_yml_alpha3_countryCode(self): - """Only ISO 3166-1 alpha 2 are supported""" - os.chdir(self.testdir) - with Path('mirrors.yml').open('w') as fp: - config_dump( - [{'url': 'https://de.com/fdroid/repo', 'countryCode': 'DEU'}], fp - ) - self.assertFalse(fdroidserver.lint.lint_config('mirrors.yml')) - - def test_lint_config_mirrors_yml_one_invalid_countryCode(self): - """WV is "indeterminately reserved" so it should never be used.""" - os.chdir(self.testdir) - with Path('mirrors.yml').open('w') as fp: - config_dump( - [ - {'url': 'https://bar.com/fdroid/repo', 'countryCode': 'BA'}, - {'url': 'https://foo.com/fdroid/repo', 'countryCode': 'FO'}, - {'url': 'https://wv.com/fdroid/repo', 'countryCode': 'WV'}, - ], - fp, - ) - self.assertFalse(fdroidserver.lint.lint_config('mirrors.yml')) - - def test_lint_config_bad_mirrors_yml_dict(self): - os.chdir(self.testdir) - Path('mirrors.yml').write_text('baz: [foo, bar]\n') - with self.assertRaises(TypeError): - fdroidserver.lint.lint_config('mirrors.yml') - - def test_lint_config_bad_mirrors_yml_float(self): - os.chdir(self.testdir) - Path('mirrors.yml').write_text('1.0\n') - with self.assertRaises(TypeError): - fdroidserver.lint.lint_config('mirrors.yml') - - def test_lint_config_bad_mirrors_yml_int(self): - os.chdir(self.testdir) - Path('mirrors.yml').write_text('1\n') - with self.assertRaises(TypeError): - fdroidserver.lint.lint_config('mirrors.yml') - - def test_lint_config_bad_mirrors_yml_str(self): - os.chdir(self.testdir) - Path('mirrors.yml').write_text('foo\n') - with self.assertRaises(TypeError): - fdroidserver.lint.lint_config('mirrors.yml') - - def test_lint_invalid_config_keys(self): - os.chdir(self.testdir) - os.mkdir('config') - config_yml = fdroidserver.common.CONFIG_FILE - with open(f'config/{config_yml}', 'w', encoding='utf-8') as fp: - fp.write('repo:\n invalid_key: test\n') - self.assertFalse(fdroidserver.lint.lint_config(f'config/{config_yml}')) - - def test_lint_invalid_localized_config_keys(self): - os.chdir(self.testdir) - Path('config/en').mkdir(parents=True) - Path('config/en/antiFeatures.yml').write_text('NonFreeNet:\n icon: test.png\n') - self.assertFalse(fdroidserver.lint.lint_config('config/en/antiFeatures.yml')) - - def test_check_certificate_pinned_binaries_empty(self): - fdroidserver.common.config = {} - app = fdroidserver.metadata.App() - app.AllowedAPKSigningKeys = [ - 'a40da80a59d170caa950cf15c18c454d47a39b26989d8b640ecd745ba71bf5dc' - ] - self.assertEqual( - [], - list(fdroidserver.lint.check_certificate_pinned_binaries(app)), - "when the config is empty, any signing key should be allowed", - ) - - def test_lint_known_debug_keys_no_match(self): - fdroidserver.common.config = { - "apk_signing_key_block_list": "a40da80a59d170caa950cf15c18c454d47a39b26989d8b640ecd745ba71bf5dc" - } - app = fdroidserver.metadata.App() - app.AllowedAPKSigningKeys = [ - '2fd4fd5f54babba4bcb21237809bb653361d0d2583c80964ec89b28a26e9539e' - ] - self.assertEqual( - [], - list(fdroidserver.lint.check_certificate_pinned_binaries(app)), - "A signing key that does not match one in the config should be allowed", - ) - - def test_lint_known_debug_keys(self): - fdroidserver.common.config = { - 'apk_signing_key_block_list': 'a40da80a59d170caa950cf15c18c454d47a39b26989d8b640ecd745ba71bf5dc' - } - app = fdroidserver.metadata.App() - app.AllowedAPKSigningKeys = [ - 'a40da80a59d170caa950cf15c18c454d47a39b26989d8b640ecd745ba71bf5dc' - ] - for warn in fdroidserver.lint.check_certificate_pinned_binaries(app): - anywarns = True - logging.debug(warn) - self.assertTrue(anywarns) - - -class LintAntiFeaturesTest(unittest.TestCase): - def setUp(self): - os.chdir(basedir) - fdroidserver.common.config = dict() - fdroidserver.lint.ANTIFEATURES_KEYS = None - fdroidserver.lint.load_antiFeatures_config() - - def test_check_antiFeatures_empty(self): - app = fdroidserver.metadata.App() - self.assertEqual([], list(fdroidserver.lint.check_antiFeatures(app))) - - def test_check_antiFeatures_empty_AntiFeatures(self): - app = fdroidserver.metadata.App() - app['AntiFeatures'] = [] - self.assertEqual([], list(fdroidserver.lint.check_antiFeatures(app))) - - def test_check_antiFeatures(self): - app = fdroidserver.metadata.App() - app['AntiFeatures'] = ['Ads', 'Tracking'] - self.assertEqual([], list(fdroidserver.lint.check_antiFeatures(app))) - - def test_check_antiFeatures_fails_one(self): - app = fdroidserver.metadata.App() - app['AntiFeatures'] = ['Ad'] - self.assertEqual(1, len(list(fdroidserver.lint.check_antiFeatures(app)))) - - def test_check_antiFeatures_fails_many(self): - app = fdroidserver.metadata.App() - app['AntiFeatures'] = ['Adss', 'Tracker', 'NoSourceSince', 'FAKE', 'NonFree'] - self.assertEqual(4, len(list(fdroidserver.lint.check_antiFeatures(app)))) - - def test_check_antiFeatures_build_empty(self): - app = fdroidserver.metadata.App() - app['Builds'] = [{'antifeatures': []}] - self.assertEqual([], list(fdroidserver.lint.check_antiFeatures(app))) - - def test_check_antiFeatures_build(self): - app = fdroidserver.metadata.App() - app['Builds'] = [{'antifeatures': ['Tracking']}] - self.assertEqual(0, len(list(fdroidserver.lint.check_antiFeatures(app)))) - - def test_check_antiFeatures_build_fail(self): - app = fdroidserver.metadata.App() - app['Builds'] = [{'antifeatures': ['Ads', 'Tracker']}] - self.assertEqual(1, len(list(fdroidserver.lint.check_antiFeatures(app)))) - - -class ConfigYmlTest(LintTest): - """Test data formats used in config.yml. - - lint.py uses print() and not logging so hacks are used to control - the output when running in the test runner. - - """ - - def setUp(self): - super().setUp() - self.config_yml = Path(self.testdir) / fdroidserver.common.CONFIG_FILE - - def test_config_yml_int(self): - self.config_yml.write_text('repo_maxage: 1\n') - self.assertTrue(fdroidserver.lint.lint_config(self.config_yml)) - - @mock.patch('builtins.print', mock.Mock()) # hide error message - def test_config_yml_int_bad(self): - self.config_yml.write_text('repo_maxage: "1"\n') - self.assertFalse(fdroidserver.lint.lint_config(self.config_yml)) - - def test_config_yml_str(self): - self.config_yml.write_text('sdk_path: /opt/android-sdk\n') - self.assertTrue(fdroidserver.lint.lint_config(self.config_yml)) - - def test_config_yml_str_list(self): - self.config_yml.write_text('serverwebroot: [server1, server2]\n') - self.assertTrue(fdroidserver.lint.lint_config(self.config_yml)) - - def test_config_yml_str_list_of_dicts(self): - self.config_yml.write_text( - textwrap.dedent( - """\ - serverwebroot: - - url: 'me@b.az:/srv/fdroid' - index_only: true - """ - ) - ) - self.assertTrue(fdroidserver.lint.lint_config(self.config_yml)) - - def test_config_yml_str_list_of_dicts_env(self): - """serverwebroot can be str, list of str, or list of dicts.""" - self.config_yml.write_text('serverwebroot: {env: ANDROID_HOME}\n') - self.assertTrue(fdroidserver.lint.lint_config(self.config_yml)) - - def test_config_yml_str_env(self): - self.config_yml.write_text('sdk_path: {env: ANDROID_HOME}\n') - self.assertTrue(fdroidserver.lint.lint_config(self.config_yml)) - - @mock.patch('builtins.print', mock.Mock()) # hide error message - def test_config_yml_str_bad(self): - self.config_yml.write_text('sdk_path: 1.0\n') - self.assertFalse(fdroidserver.lint.lint_config(self.config_yml)) - - def test_config_yml_bool(self): - self.config_yml.write_text("deploy_process_logs: true\n") - self.assertTrue(fdroidserver.lint.lint_config(self.config_yml)) - - @mock.patch('builtins.print', mock.Mock()) # hide error message - def test_config_yml_bool_bad(self): - self.config_yml.write_text('deploy_process_logs: 2342fe23\n') - self.assertFalse(fdroidserver.lint.lint_config(self.config_yml)) - - def test_config_yml_dict(self): - self.config_yml.write_text("keyaliases: {com.example: '@com.foo'}\n") - self.assertTrue(fdroidserver.lint.lint_config(self.config_yml)) - - @mock.patch('builtins.print', mock.Mock()) # hide error message - def test_config_yml_dict_bad(self): - self.config_yml.write_text('keyaliases: 2342fe23\n') - self.assertFalse(fdroidserver.lint.lint_config(self.config_yml)) - - @mock.patch('builtins.print', mock.Mock()) # hide error message - def test_config_yml_bad_key_name(self): - self.config_yml.write_text('keyalias: 2342fe23\n') - self.assertFalse(fdroidserver.lint.lint_config(self.config_yml)) - - @mock.patch('builtins.print', mock.Mock()) # hide error message - def test_config_yml_bad_value_for_all_keys(self): - """Check all config keys with a bad value.""" - for key in fdroidserver.lint.check_config_keys: - if key in fdroidserver.lint.bool_keys: - value = 'foobar' - else: - value = 'false' - self.config_yml.write_text(f'{key}: {value}\n') - self.assertFalse( - fdroidserver.lint.lint_config(self.config_yml), - f'{key} should fail on value of "{value}"', - ) - - def test_config_yml_keyaliases(self): - self.config_yml.write_text( - textwrap.dedent( - """\ - keyaliases: - com.example: myalias - com.foo: '@com.example' - """ - ) - ) - self.assertTrue(fdroidserver.lint.lint_config(self.config_yml)) - - @mock.patch('builtins.print', mock.Mock()) # hide error message - def test_config_yml_keyaliases_bad_str(self): - """The keyaliases: value is a dict not a str.""" - self.config_yml.write_text("keyaliases: '@com.example'\n") - self.assertFalse(fdroidserver.lint.lint_config(self.config_yml)) - - @mock.patch('builtins.print', mock.Mock()) # hide error message - def test_config_yml_keyaliases_bad_list(self): - """The keyaliases: value is a dict not a list.""" - self.config_yml.write_text( - textwrap.dedent( - """\ - keyaliases: - - com.example: myalias - """ - ) - ) - self.assertFalse(fdroidserver.lint.lint_config(self.config_yml)) diff --git a/tests/test_main.py b/tests/test_main.py deleted file mode 100755 index 68984088..00000000 --- a/tests/test_main.py +++ /dev/null @@ -1,247 +0,0 @@ -#!/usr/bin/env python3 - -import os -import pkgutil -import tempfile -import textwrap -import unittest -from unittest import mock - -import fdroidserver.__main__ - -from .shared_test_code import TmpCwd, TmpPyPath - - -class MainTest(unittest.TestCase): - '''this tests fdroid.py''' - - def test_COMMANDS_check(self): - """make sure the built in sub-command defs didn't change unintentionally""" - self.assertListEqual( - [x for x in fdroidserver.__main__.COMMANDS], - [ - 'build', - 'init', - 'publish', - 'gpgsign', - 'update', - 'deploy', - 'verify', - 'checkupdates', - 'import', - 'install', - 'readmeta', - 'rewritemeta', - 'lint', - 'scanner', - 'signindex', - 'btlog', - 'signatures', - 'nightly', - 'mirror', - ], - ) - - def test_call_init(self): - co = mock.Mock() - with mock.patch('sys.argv', ['', 'init', '-h']): - with mock.patch('fdroidserver.init.main', co): - with mock.patch('sys.exit') as exit_mock: - fdroidserver.__main__.main() - # note: this is sloppy, if `init` changes - # this might need changing too - exit_mock.assert_called_once_with(0) - co.assert_called_once_with() - - def test_call_deploy(self): - co = mock.Mock() - with mock.patch('sys.argv', ['', 'deploy', '-h']): - with mock.patch('fdroidserver.deploy.main', co): - with mock.patch('sys.exit') as exit_mock: - fdroidserver.__main__.main() - # note: this is sloppy, if `deploy` changes - # this might need changing too - exit_mock.assert_called_once_with(0) - co.assert_called_once_with() - - def test_find_plugins(self): - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - with open('fdroid_testy1.py', 'w') as f: - f.write( - textwrap.dedent( - """\ - fdroid_summary = "ttt" - main = lambda: 'all good'""" - ) - ) - with TmpPyPath(tmpdir): - plugins = fdroidserver.__main__.find_plugins() - self.assertIn('testy1', plugins.keys()) - self.assertEqual(plugins['testy1']['summary'], 'ttt') - self.assertEqual( - __import__( - plugins['testy1']['name'], None, None, ['testy1'] - ).main(), - 'all good', - ) - - def test_main_plugin_lambda(self): - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - with open('fdroid_testy2.py', 'w') as f: - f.write( - textwrap.dedent( - """\ - fdroid_summary = "ttt" - main = lambda: print('all good')""" - ) - ) - with TmpPyPath(tmpdir): - with mock.patch('sys.argv', ['', 'testy2']): - with mock.patch('sys.exit') as exit_mock: - fdroidserver.__main__.main() - exit_mock.assert_called_once_with(0) - - def test_main_plugin_def(self): - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - with open('fdroid_testy3.py', 'w') as f: - f.write( - textwrap.dedent( - """\ - fdroid_summary = "ttt" - def main(): - print('all good')""" - ) - ) - with TmpPyPath(tmpdir): - with mock.patch('sys.argv', ['', 'testy3']): - with mock.patch('sys.exit') as exit_mock: - fdroidserver.__main__.main() - exit_mock.assert_called_once_with(0) - - def test_main_broken_plugin(self): - """making sure broken plugins get their exceptions through""" - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - with open('fdroid_testy4.py', 'w') as f: - f.write( - textwrap.dedent( - """\ - fdroid_summary = "ttt" - def main(): - raise Exception("this plugin is broken")""" - ) - ) - with TmpPyPath(tmpdir): - with mock.patch('sys.argv', ['', 'testy4']): - with self.assertRaisesRegex(Exception, "this plugin is broken"): - fdroidserver.__main__.main() - - def test_main_malicious_plugin(self): - """The purpose of this test is to make sure code in plugins - doesn't get executed unintentionally. - """ - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - with open('fdroid_testy5.py', 'w') as f: - f.write( - textwrap.dedent( - """\ - fdroid_summary = "ttt" - raise Exception("this plugin is malicious") - def main(): - print("evil things")""" - ) - ) - with TmpPyPath(tmpdir): - with mock.patch('sys.argv', ['', 'lint']): - with mock.patch('sys.exit') as exit_mock: - fdroidserver.__main__.main() - # note: this is sloppy, if `lint` changes - # this might need changing too - exit_mock.assert_called_once_with(0) - - def test_main_prevent_plugin_override(self): - """making sure build-in subcommands cannot be overridden by plugins""" - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - with open('fdroid_signatures.py', 'w') as f: - f.write( - textwrap.dedent( - """\ - fdroid_summary = "ttt" - def main(): - raise("plugin overrides don't get prevent!")""" - ) - ) - with TmpPyPath(tmpdir): - with mock.patch('sys.argv', ['', 'signatures']): - with mock.patch('sys.exit') as exit_mock: - fdroidserver.__main__.main() - # note: this is sloppy, if `signatures` changes - # this might need changing too - self.assertEqual(exit_mock.call_count, 2) - - def test_preparse_plugin_lookup_bad_name(self): - self.assertRaises( - ValueError, - fdroidserver.__main__.preparse_plugin, - "some.package", - "/non/existent/module/path", - ) - - def test_preparse_plugin_lookup_bad_path(self): - self.assertRaises( - ValueError, - fdroidserver.__main__.preparse_plugin, - "fake_module_name", - "/non/existent/module/path", - ) - - def test_preparse_plugin_lookup_summary_missing(self): - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - with open('fdroid_testy6.py', 'w') as f: - f.write("main = lambda: print('all good')") - with TmpPyPath(tmpdir): - p = [x for x in pkgutil.iter_modules() if x[1].startswith('fdroid_')] - module_dir = p[0][0].path - module_name = p[0][1] - self.assertRaises( - NameError, - fdroidserver.__main__.preparse_plugin, - module_name, - module_dir, - ) - - def test_preparse_plugin_lookup_module_file(self): - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - with open('fdroid_testy7.py', 'w') as f: - f.write( - textwrap.dedent( - """\ - fdroid_summary = "ttt" - main = lambda: pritn('all good')""" - ) - ) - with TmpPyPath(tmpdir): - p = [x for x in pkgutil.iter_modules() if x[1].startswith('fdroid_')] - module_path = p[0][0].path - module_name = p[0][1] - d = fdroidserver.__main__.preparse_plugin(module_name, module_path) - self.assertDictEqual(d, {'name': 'fdroid_testy7', 'summary': 'ttt'}) - - def test_preparse_plugin_lookup_module_dir(self): - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - os.mkdir(os.path.join(tmpdir, 'fdroid_testy8')) - with open('fdroid_testy8/__main__.py', 'w') as f: - f.write( - textwrap.dedent( - """\ - fdroid_summary = "ttt" - main = lambda: print('all good')""" - ) - ) - with open('fdroid_testy8/__init__.py', 'w') as f: - pass - with TmpPyPath(tmpdir): - p = [x for x in pkgutil.iter_modules() if x[1].startswith('fdroid_')] - module_path = p[0][0].path - module_name = p[0][1] - d = fdroidserver.__main__.preparse_plugin(module_name, module_path) - self.assertDictEqual(d, {'name': 'fdroid_testy8', 'summary': 'ttt'}) diff --git a/tests/test_metadata.py b/tests/test_metadata.py deleted file mode 100755 index 84040024..00000000 --- a/tests/test_metadata.py +++ /dev/null @@ -1,2427 +0,0 @@ -#!/usr/bin/env python3 - -import copy -import io -import os -import random -import shutil -import tempfile -import textwrap -import unittest -from collections import OrderedDict -from pathlib import Path -from unittest import mock - -import ruamel.yaml - -import fdroidserver -from fdroidserver import metadata -from fdroidserver._yaml import yaml -from fdroidserver.common import DEFAULT_LOCALE -from fdroidserver.exception import MetaDataException - -from .shared_test_code import TmpCwd, mkdtemp - -basedir = Path(__file__).parent - - -def _get_mock_mf(s): - mf = io.StringIO(s) - mf.name = 'mock_filename.yaml' - return mf - - -class MetadataTest(unittest.TestCase): - '''fdroidserver/metadata.py''' - - def setUp(self): - os.chdir(basedir) - self._td = mkdtemp() - self.testdir = self._td.name - fdroidserver.metadata.warnings_action = 'error' - - def tearDown(self): - # auto-generated dirs by functions, not tests, so they are not always cleaned up - self._td.cleanup() - try: - os.rmdir("srclibs") - except OSError: - pass - try: - os.rmdir("tmp") - except OSError: - pass - - def test_fieldtypes_key_exist(self): - for k in fdroidserver.metadata.fieldtypes: - self.assertIn(k, fdroidserver.metadata.yaml_app_fields) - - def test_build_flagtypes_key_exist(self): - for k in fdroidserver.metadata.flagtypes: - self.assertIn(k, fdroidserver.metadata.build_flags) - - def test_FieldValidator_BitcoinAddress(self): - validator = None - for vali in fdroidserver.metadata.valuetypes: - if vali.name == 'Bitcoin address': - validator = vali - break - self.assertIsNotNone(validator, "could not find 'Bitcoin address' validator") - - # some valid addresses (P2PKH, P2SH, Bech32) - self.assertIsNone( - validator.check('1BrrrrErsrWetrTrnrrrrm4GFg7xJaNVN2', 'fake.app.id') - ) - self.assertIsNone( - validator.check('3JrrrrWrEZr3rNrrvrecrnyirrnqRhWNLy', 'fake.app.id') - ) - self.assertIsNone( - validator.check('bc1qar0srrr7xrkvr5lr43lrdnwrre5rgtrzrf5rrq', 'fake.app.id') - ) - - # some invalid addresses - self.assertRaises( - fdroidserver.exception.MetaDataException, - validator.check, - '21BvMrSYsrWrtrrlL5A10mlGFr7rrarrN2', - 'fake.app.id', - ) - self.assertRaises( - fdroidserver.exception.MetaDataException, - validator.check, - '5Hrgr3ur5rGLrfKrrrrrrHSrqJrroGrrzrQrrrrrrLNrsrDrrrA', - 'fake.app.id', - ) - self.assertRaises( - fdroidserver.exception.MetaDataException, - validator.check, - '92rr46rUrgTrrromrVrirW6r1rrrdrerrdbJrrrhrCsYrrrrrrc', - 'fake.app.id', - ) - self.assertRaises( - fdroidserver.exception.MetaDataException, - validator.check, - 'K1BvMrSYsrWrtrrrn5Au4m4GFr7rrarrN2', - 'fake.app.id', - ) - self.assertRaises( - fdroidserver.exception.MetaDataException, - validator.check, - 'L1BvMrSYsrWrtrrrn5Au4m4GFr7rrarrN2', - 'fake.app.id', - ) - self.assertRaises( - fdroidserver.exception.MetaDataException, - validator.check, - 'tb1qw5r8drrejxrrg4y5rrrrrraryrrrrwrkxrjrsx', - 'fake.app.id', - ) - - def test_FieldValidator_LitecoinAddress(self): - validator = None - for vali in fdroidserver.metadata.valuetypes: - if vali.name == 'Litecoin address': - validator = vali - break - self.assertIsNotNone(validator, "could not find 'Litecoin address' validator") - - # some valid addresses (L, M, 3, segwit) - self.assertIsNone( - validator.check('LgeGrrrrJAxyXprrPrrBrrX5Qrrrrrrrrd', 'fake.app.id') - ) - self.assertIsNone( - validator.check('MrrrrrrrJAxyXpanPtrrRAX5QHxvUJo8id', 'fake.app.id') - ) - self.assertIsNone(validator.check('3rereVr9rAryrranrrrrrAXrrHx', 'fake.app.id')) - self.assertIsNone( - validator.check( - 'ltc1q7euacwhn6ef99vcfa57mute92q572aqsc4c2j5', 'fake.app.id' - ) - ) - - # some invalid addresses (various special use/testnet addresses, invalid chars) - self.assertRaises( - fdroidserver.exception.MetaDataException, - validator.check, - '21BvMrSYsrWrtrrrn5Au4l4GFr7rrarrN2', - 'fake.app.id', - ) - self.assertRaises( - fdroidserver.exception.MetaDataException, - validator.check, - '5Hrgr3ur5rGLrfKrrrrrr1SrqJrroGrrzrQrrrrrrLNrsrDrrrA', - 'fake.app.id', - ) - self.assertRaises( - fdroidserver.exception.MetaDataException, - validator.check, - '92rr46rUrgTrrromrVrirW6r1rrrdrerrdbJrrrhrCsYrrrrrrc', - 'fake.app.id', - ) - self.assertRaises( - fdroidserver.exception.MetaDataException, - validator.check, - 'K1BvMrSYsrWrtrrrn5Au4m4GFr7rrarrN2', - 'fake.app.id', - ) - self.assertRaises( - fdroidserver.exception.MetaDataException, - validator.check, - 'L0000rSYsrWrtrrrn5Au4m4GFr7rrarrN2', - 'fake.app.id', - ) - self.assertRaises( - fdroidserver.exception.MetaDataException, - validator.check, - 'tb1qw5r8drrejxrrg4y5rrrrrraryrrrrwrkxrjrsx', - 'fake.app.id', - ) - - def test_valid_funding_yml_regex(self): - """Check the regex can find all the cases""" - with (basedir / 'funding-usernames.yaml').open() as fp: - data = yaml.load(fp) - - for k, entries in data.items(): - for entry in entries: - m = fdroidserver.metadata.VALID_USERNAME_REGEX.match(entry) - if k == 'custom': - pass - elif k == 'bad': - self.assertIsNone( - m, 'this is an invalid %s username: {%s}' % (k, entry) - ) - else: - self.assertIsNotNone( - m, 'this is a valid %s username: {%s}' % (k, entry) - ) - - @mock.patch('git.Repo', mock.Mock()) - @mock.patch('logging.error') - def test_read_metadata(self, logging_error): - """Read specified metadata files included in tests/, compare to stored output""" - - self.maxDiff = None - - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.metadata.warnings_action = None - - apps = fdroidserver.metadata.read_metadata() - for appid in ( - 'app.with.special.build.params', - 'org.smssecure.smssecure', - 'org.adaway', - 'org.videolan.vlc', - 'com.politedroid', - ): - savepath = Path('metadata/dump') / (appid + '.yaml') - frommeta = dict(apps[appid]) - self.assertTrue(appid in apps) - with savepath.open('r') as f: - from_yaml = yaml.load(f) - self.assertEqual(frommeta, from_yaml) - # comment above assert and uncomment below to update test - # files when new metadata fields are added - # with savepath.open('w') as fp: - # yaml.default_flow_style = False - # yaml.register_class(metadata.Build) - # yaml.dump(frommeta, fp) - - # errors are printed when .yml overrides localized - logging_error.assert_called() - self.assertEqual(3, len(logging_error.call_args_list)) - - @mock.patch('git.Repo', mock.Mock()) - def test_metadata_overrides_dot_fdroid_yml(self): - """Fields in metadata files should override anything in .fdroid.yml.""" - app = metadata.parse_metadata('metadata/info.guardianproject.urzip.yml') - self.assertEqual(app['Summary'], '一个实用工具,获取已安装在您的设备上的应用的有关信息') - - def test_dot_fdroid_yml_works_without_git(self): - """Parsing should work if .fdroid.yml is present and it is not a git repo.""" - os.chdir(self.testdir) - yml = Path('metadata/test.yml') - yml.parent.mkdir() - with yml.open('w') as fp: - fp.write('Repo: https://example.com/not/git/or/anything') - fdroid_yml = Path('build/test/.fdroid.yml') - fdroid_yml.parent.mkdir(parents=True) - with fdroid_yml.open('w') as fp: - fp.write('OpenCollective: test') - metadata.parse_metadata(yml) # should not throw an exception - - @mock.patch('git.Repo', mock.Mock()) - @mock.patch('logging.error') - def test_rewrite_yaml_fakeotaupdate(self, logging_error): - with tempfile.TemporaryDirectory() as testdir: - testdir = Path(testdir) - fdroidserver.common.config = {'accepted_formats': ['yml']} - fdroidserver.metadata.warnings_action = None - - # rewrite metadata - allapps = fdroidserver.metadata.read_metadata() - for appid, app in allapps.items(): - if appid == 'fake.ota.update': - fdroidserver.metadata.write_metadata( - testdir / (appid + '.yml'), app - ) - - # assert rewrite result - self.maxDiff = None - file_name = 'fake.ota.update.yml' - self.assertEqual( - (testdir / file_name).read_text(encoding='utf-8'), - (Path('metadata-rewrite-yml') / file_name).read_text(encoding='utf-8'), - ) - - # errors are printed when .yml overrides localized - logging_error.assert_called() - self.assertEqual(3, len(logging_error.call_args_list)) - - @mock.patch('git.Repo', mock.Mock()) - def test_rewrite_yaml_fdroidclient(self): - with tempfile.TemporaryDirectory() as testdir: - testdir = Path(testdir) - fdroidserver.common.config = {'accepted_formats': ['yml']} - - # rewrite metadata - allapps = fdroidserver.metadata.read_metadata() - for appid, app in allapps.items(): - if appid == 'org.fdroid.fdroid': - fdroidserver.metadata.write_metadata( - testdir / (appid + '.yml'), app - ) - - # assert rewrite result - self.maxDiff = None - file_name = 'org.fdroid.fdroid.yml' - self.assertEqual( - (testdir / file_name).read_text(encoding='utf-8'), - (Path('metadata-rewrite-yml') / file_name).read_text(encoding='utf-8'), - ) - - @mock.patch('git.Repo', mock.Mock()) - def test_rewrite_yaml_special_build_params(self): - """Test rewriting a plain YAML metadata file without localized files.""" - os.chdir(self.testdir) - os.mkdir('metadata') - appid = 'app.with.special.build.params' - file_name = Path('metadata/%s.yml' % appid) - shutil.copy(basedir / file_name, file_name) - - # rewrite metadata - allapps = fdroidserver.metadata.read_metadata({appid: -1}) - for appid, app in allapps.items(): - metadata.write_metadata(file_name, app) - - # assert rewrite result - self.maxDiff = None - self.assertEqual( - file_name.read_text(), - (basedir / 'metadata-rewrite-yml' / file_name.name).read_text(), - ) - - def test_normalize_type_string(self): - """TYPE_STRING currently has some quirky behavior.""" - self.assertEqual('123456', metadata._normalize_type_string(123456)) - self.assertEqual('1.0', metadata._normalize_type_string(1.0)) - self.assertEqual('0', metadata._normalize_type_string(0)) - self.assertEqual('0.0', metadata._normalize_type_string(0.0)) - self.assertEqual('0.1', metadata._normalize_type_string(0.1)) - self.assertEqual('[]', metadata._normalize_type_string(list())) - self.assertEqual('{}', metadata._normalize_type_string(dict())) - self.assertEqual('false', metadata._normalize_type_string(False)) - self.assertEqual('true', metadata._normalize_type_string(True)) - - def test_normalize_type_string_sha256(self): - """SHA-256 values are TYPE_STRING, which YAML can parse as decimal ints.""" - for v in range(1, 1000): - s = '%064d' % (v * (10**51)) - self.assertEqual(s, metadata._normalize_type_string(yaml.load(s))) - - def test_normalize_type_stringmap_none(self): - self.assertEqual(dict(), metadata._normalize_type_stringmap('key', None)) - - def test_normalize_type_stringmap_empty_list(self): - self.assertEqual(dict(), metadata._normalize_type_stringmap('AntiFeatures', [])) - - def test_normalize_type_stringmap_simple_list_format(self): - self.assertEqual( - {'Ads': {}, 'Tracking': {}}, - metadata._normalize_type_stringmap('AntiFeatures', ['Ads', 'Tracking']), - ) - - def test_normalize_type_int(self): - """TYPE_INT should be an int whenever possible.""" - self.assertEqual(0, metadata._normalize_type_int('key', 0)) - self.assertEqual(1, metadata._normalize_type_int('key', 1)) - self.assertEqual(-5, metadata._normalize_type_int('key', -5)) - self.assertEqual(0, metadata._normalize_type_int('key', '0')) - self.assertEqual(1, metadata._normalize_type_int('key', '1')) - self.assertEqual(-5, metadata._normalize_type_int('key', '-5')) - self.assertEqual( - 12345678901234567890, - metadata._normalize_type_int('key', 12345678901234567890), - ) - - def test_normalize_type_int_fails(self): - with self.assertRaises(MetaDataException): - metadata._normalize_type_int('key', '1a') - with self.assertRaises(MetaDataException): - metadata._normalize_type_int('key', 1.1) - with self.assertRaises(MetaDataException): - metadata._normalize_type_int('key', True) - - def test_normalize_type_list(self): - """TYPE_LIST is always a list of strings, no matter what YAML thinks.""" - k = 'placeholder' - self.assertEqual(['1.0'], metadata._normalize_type_list(k, 1.0)) - self.assertEqual(['1234567890'], metadata._normalize_type_list(k, 1234567890)) - self.assertEqual(['false'], metadata._normalize_type_list(k, False)) - self.assertEqual(['true'], metadata._normalize_type_list(k, True)) - self.assertEqual(['foo'], metadata._normalize_type_list(k, 'foo')) - self.assertEqual([], metadata._normalize_type_list(k, list())) - self.assertEqual([], metadata._normalize_type_list(k, tuple())) - self.assertEqual([], metadata._normalize_type_list(k, set())) - self.assertEqual(['0', '1', '2'], metadata._normalize_type_list(k, {0, 1, 2})) - self.assertEqual( - ['a', 'b', 'c', '0', '0.0'], - metadata._normalize_type_list(k, yaml.load('[a, b, c, 0, 0.0]')), - ) - self.assertEqual( - ['1', '1.0', 's', 'true', '{}'], - metadata._normalize_type_list(k, yaml.load('[1, 1.0, s, true, {}]')), - ) - self.assertEqual( - ['1', '1.0', 's', 'true', '{}'], - metadata._normalize_type_list(k, (1, 1.0, 's', True, dict())), - ) - - def test_normalize_type_list_fails(self): - with self.assertRaises(MetaDataException): - metadata._normalize_type_list('placeholder', dict()) - - def test_post_parse_yaml_metadata(self): - yamldata = dict() - metadata.post_parse_yaml_metadata(yamldata) - - yamldata[ - 'AllowedAPKSigningKeys' - ] = 'c03dac71394d6c26766f1b04d3e31cfcac5d03b55d8aa40cc9b9fa6b74354c66' - metadata.post_parse_yaml_metadata(yamldata) - - def test_post_parse_yaml_metadata_ArchivePolicy_int(self): - for i in range(20): - yamldata = {'ArchivePolicy': i} - metadata.post_parse_yaml_metadata(yamldata) - self.assertEqual(i, yamldata['ArchivePolicy']) - - def test_post_parse_yaml_metadata_ArchivePolicy_string(self): - for i in range(20): - yamldata = {'ArchivePolicy': '%d' % i} - metadata.post_parse_yaml_metadata(yamldata) - self.assertEqual(i, yamldata['ArchivePolicy']) - - def test_post_parse_yaml_metadata_ArchivePolicy_versions(self): - """Test that the old format still works.""" - for i in range(20): - yamldata = {'ArchivePolicy': '%d versions' % i} - metadata.post_parse_yaml_metadata(yamldata) - self.assertEqual(i, yamldata['ArchivePolicy']) - - def test_post_parse_yaml_metadata_fails(self): - yamldata = {'AllowedAPKSigningKeys': {'bad': 'dict-placement'}} - with self.assertRaises(MetaDataException): - metadata.post_parse_yaml_metadata(yamldata) - - def test_post_parse_yaml_metadata_0padding_sha256(self): - """SHA-256 values are strings, but YAML 1.2 will read some as decimal ints.""" - v = '0027293472934293872934729834729834729834729834792837487293847926' - yamldata = yaml.load('AllowedAPKSigningKeys: ' + v) - metadata.post_parse_yaml_metadata(yamldata) - self.assertEqual(yamldata['AllowedAPKSigningKeys'], [v]) - - def test_post_parse_yaml_metadata_builds(self): - yamldata = OrderedDict() - builds = [] - yamldata['Builds'] = builds - build = OrderedDict() - builds.append(build) - - build['versionCode'] = 1.1 - self.assertRaises( - fdroidserver.exception.MetaDataException, - fdroidserver.metadata.post_parse_yaml_metadata, - yamldata, - ) - - build['versionCode'] = '1a' - self.assertRaises( - fdroidserver.exception.MetaDataException, - fdroidserver.metadata.post_parse_yaml_metadata, - yamldata, - ) - - build['versionCode'] = 1 - build['versionName'] = 1 - fdroidserver.metadata.post_parse_yaml_metadata(yamldata) - self.assertNotEqual(1, yamldata['Builds'][0]['versionName']) - self.assertEqual('1', yamldata['Builds'][0]['versionName']) - self.assertEqual(1, yamldata['Builds'][0]['versionCode']) - - build['versionName'] = 1.0 - fdroidserver.metadata.post_parse_yaml_metadata(yamldata) - self.assertNotEqual(1.0, yamldata['Builds'][0]['versionName']) - self.assertEqual('1.0', yamldata['Builds'][0]['versionName']) - - build['commit'] = 1.0 - fdroidserver.metadata.post_parse_yaml_metadata(yamldata) - self.assertNotEqual(1.0, yamldata['Builds'][0]['commit']) - self.assertEqual('1.0', yamldata['Builds'][0]['commit']) - - teststr = '98234fab134b' - build['commit'] = teststr - fdroidserver.metadata.post_parse_yaml_metadata(yamldata) - self.assertEqual(teststr, yamldata['Builds'][0]['commit']) - - testcommitid = 1234567890 - build['commit'] = testcommitid - fdroidserver.metadata.post_parse_yaml_metadata(yamldata) - self.assertNotEqual(testcommitid, yamldata['Builds'][0]['commit']) - self.assertEqual('1234567890', yamldata['Builds'][0]['commit']) - - def test_read_metadata_sort_by_time(self): - with tempfile.TemporaryDirectory() as testdir, TmpCwd(testdir): - testdir = Path(testdir) - metadatadir = testdir / 'metadata' - metadatadir.mkdir() - - randomlist = [] - randomapps = list((basedir / 'metadata').glob('*.yml')) - random.shuffle(randomapps) - i = 1 - for f in randomapps: - shutil.copy(f, metadatadir) - new = metadatadir / f.name - stat = new.stat() - os.utime(new, (stat.st_ctime, stat.st_mtime + i)) - # prepend new item so newest is always first - randomlist = [f.stem] + randomlist - i += 1 - allapps = fdroidserver.metadata.read_metadata(sort_by_time=True) - allappids = [] - for appid, app in allapps.items(): - allappids.append(appid) - self.assertEqual(randomlist, allappids) - - def test_parse_yaml_metadata_0size_file(self): - self.assertEqual(dict(), metadata.parse_yaml_metadata(_get_mock_mf(''))) - - def test_parse_yaml_metadata_empty_dict_file(self): - self.assertEqual(dict(), metadata.parse_yaml_metadata(_get_mock_mf('{}'))) - - def test_parse_yaml_metadata_empty_string_file(self): - self.assertEqual(dict(), metadata.parse_yaml_metadata(_get_mock_mf('""'))) - - def test_parse_yaml_metadata_fail_on_root_list(self): - with self.assertRaises(MetaDataException): - metadata.parse_yaml_metadata(_get_mock_mf('-')) - with self.assertRaises(MetaDataException): - metadata.parse_yaml_metadata(_get_mock_mf('[]')) - with self.assertRaises(MetaDataException): - metadata.parse_yaml_metadata(_get_mock_mf('- AutoName: fake')) - - def test_parse_yaml_metadata_type_list_str(self): - v = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855' - mf = _get_mock_mf('AllowedAPKSigningKeys: "%s"' % v) - self.assertEqual( - v, - metadata.parse_yaml_metadata(mf)['AllowedAPKSigningKeys'][0], - ) - - def test_parse_yaml_metadata_type_list_build_str(self): - mf = _get_mock_mf('Builds: [{versionCode: 1, rm: s}]') - self.assertEqual( - metadata.parse_yaml_metadata(mf), - {'Builds': [{'rm': ['s'], 'versionCode': 1}]}, - ) - - def test_parse_yaml_metadata_app_type_list_fails(self): - mf = _get_mock_mf('AllowedAPKSigningKeys: {t: f}') - with self.assertRaises(MetaDataException): - metadata.parse_yaml_metadata(mf) - - def test_parse_yaml_metadata_build_type_list_fails(self): - mf = _get_mock_mf('Builds: [{versionCode: 1, rm: {bad: dict-placement}}]') - with self.assertRaises(MetaDataException): - metadata.parse_yaml_metadata(mf) - - def test_parse_yaml_metadata_unknown_app_field(self): - mf = io.StringIO( - textwrap.dedent( - """\ - AutoName: F-Droid - RepoType: git - Builds: [] - bad: value""" - ) - ) - mf.name = 'mock_filename.yaml' - with self.assertRaises(MetaDataException): - fdroidserver.metadata.parse_yaml_metadata(mf) - - def test_parse_yaml_metadata_unknown_build_flag(self): - mf = io.StringIO( - textwrap.dedent( - """\ - AutoName: F-Droid - RepoType: git - Builds: - - bad: value""" - ) - ) - mf.name = 'mock_filename.yaml' - with self.assertRaises(MetaDataException): - fdroidserver.metadata.parse_yaml_metadata(mf) - - @mock.patch('logging.warning') - @mock.patch('logging.error') - def test_parse_yaml_metadata_continue_on_warning(self, _error, _warning): - """When errors are disabled, parsing should provide something that can work. - - When errors are disabled, then it should try to give data that - lets something happen. A zero-length file is valid for - operation, it just declares a Application ID as "known" and - nothing else. This example gives a list as the base in the - .yml file, which is unparsable, so it gives a warning message - and carries on with a blank dict. - - """ - fdroidserver.metadata.warnings_action = None - mf = _get_mock_mf('[AntiFeatures: Tracking]') - self.assertEqual(fdroidserver.metadata.parse_yaml_metadata(mf), dict()) - _warning.assert_called_once() - _error.assert_called_once() - - def test_parse_localized_antifeatures(self): - """Unit test based on reading files included in the test repo.""" - app = dict() - app['id'] = 'app.with.special.build.params' - metadata.parse_localized_antifeatures(app) - self.maxDiff = None - self.assertEqual( - app, - { - 'AntiFeatures': { - 'Ads': {'en-US': 'please no'}, - 'NoSourceSince': {'en-US': 'no activity\n'}, - }, - 'Builds': [ - { - 'versionCode': 50, - 'antifeatures': { - 'Ads': { - 'en-US': 'includes ad lib\n', - 'zh-CN': '包括广告图书馆\n', - }, - 'Tracking': {'en-US': 'standard suspects\n'}, - }, - }, - { - 'versionCode': 49, - 'antifeatures': { - 'Tracking': {'zh-CN': 'Text from zh-CN/49_Tracking.txt'}, - }, - }, - ], - 'id': app['id'], - }, - ) - - def test_parse_localized_antifeatures_passthrough(self): - """Test app values are cleanly passed through if no localized files.""" - before = { - 'id': 'placeholder', - 'AntiFeatures': {'NonFreeDep': {}}, - 'Builds': [{'versionCode': 999, 'antifeatures': {'zero': {}, 'one': {}}}], - } - after = copy.deepcopy(before) - with tempfile.TemporaryDirectory() as testdir: - os.chdir(testdir) - os.mkdir('metadata') - os.mkdir(os.path.join('metadata', after['id'])) - metadata.parse_localized_antifeatures(after) - self.assertEqual(before, after) - - def test_parse_metadata_antifeatures_NoSourceSince(self): - """Test that NoSourceSince gets added as an Anti-Feature.""" - os.chdir(self.testdir) - yml = Path('metadata/test.yml') - yml.parent.mkdir() - with yml.open('w') as fp: - fp.write('AntiFeatures: Ads\nNoSourceSince: gone\n') - app = metadata.parse_metadata(yml) - self.assertEqual( - app['AntiFeatures'], {'Ads': {}, 'NoSourceSince': {DEFAULT_LOCALE: 'gone'}} - ) - - @mock.patch('logging.error') - def test_yml_overrides_localized_antifeatures(self, logging_error): - """Definitions in .yml files should override the localized versions.""" - app = metadata.parse_metadata('metadata/app.with.special.build.params.yml') - - self.assertEqual(app['AntiFeatures'], {'Tracking': {}}) - - self.assertEqual(49, app['Builds'][-3]['versionCode']) - self.assertEqual( - app['Builds'][-3]['antifeatures'], - {'Tracking': {DEFAULT_LOCALE: 'Uses the Facebook SDK.'}}, - ) - - self.assertEqual(50, app['Builds'][-2]['versionCode']) - self.assertEqual( - app['Builds'][-2]['antifeatures'], - { - 'Ads': { - 'en-US': 'includes ad lib\n', - 'zh-CN': '包括广告图书馆\n', - }, - 'Tracking': {'en-US': 'standard suspects\n'}, - }, - ) - # errors are printed when .yml overrides localized - logging_error.assert_called() - self.assertEqual(3, len(logging_error.call_args_list)) - - def test_parse_yaml_srclib_corrupt_file(self): - with tempfile.TemporaryDirectory() as testdir: - testdir = Path(testdir) - srclibfile = testdir / 'srclib/mock.yml' - srclibfile.parent.mkdir() - with srclibfile.open('w') as fp: - fp.write( - textwrap.dedent( - """ - - RepoType: git - - Repo: https://github.com/realm/realm-js.git - """ - ) - ) - with self.assertRaises(MetaDataException): - fdroidserver.metadata.parse_yaml_srclib(srclibfile) - - def test_write_yaml_with_placeholder_values(self): - mf = io.StringIO() - - app = fdroidserver.metadata.App() - app.Categories = ['None'] - app.SourceCode = "https://gitlab.com/fdroid/fdroidclient.git" - app.IssueTracker = "https://gitlab.com/fdroid/fdroidclient/issues" - app.RepoType = 'git' - app.Repo = 'https://gitlab.com/fdroid/fdroidclient.git' - app.AutoUpdateMode = 'None' - app.UpdateCheckMode = 'Tags' - build = fdroidserver.metadata.Build() - build.versionName = 'Unknown' # taken from fdroidserver/import.py - build.versionCode = 0 # taken from fdroidserver/import.py - build.disable = 'Generated by import.py ...' - build.commit = 'Unknown' - build.gradle = ['yes'] - app['Builds'] = [build] - - fdroidserver.metadata.write_yaml(mf, app) - - mf.seek(0) - self.assertEqual( - mf.read(), - textwrap.dedent( - """\ - Categories: - - None - License: Unknown - SourceCode: https://gitlab.com/fdroid/fdroidclient.git - IssueTracker: https://gitlab.com/fdroid/fdroidclient/issues - - RepoType: git - Repo: https://gitlab.com/fdroid/fdroidclient.git - - Builds: - - versionName: Unknown - versionCode: 0 - disable: Generated by import.py ... - commit: Unknown - gradle: - - yes - - AutoUpdateMode: None - UpdateCheckMode: Tags - """ - ), - ) - - def test_parse_yaml_metadata_prebuild_list(self): - mf = io.StringIO( - textwrap.dedent( - """\ - AutoName: F-Droid - RepoType: git - Builds: - - versionCode: 1 - versionName: v0.1.0 - sudo: - - apt-get update - - apt-get install -y whatever - - sed -i -e 's/> /a/file - build: - - ./gradlew someSpecialTask - - sed -i 'd/that wrong config/' gradle.properties - - ./gradlew compile - """ - ) - ) - mf.name = 'mock_filename.yaml' - mf.seek(0) - result = fdroidserver.metadata.parse_yaml_metadata(mf) - self.maxDiff = None - self.assertDictEqual( - result, - { - 'AutoName': 'F-Droid', - 'RepoType': 'git', - 'Builds': [ - { - 'versionCode': 1, - 'versionName': 'v0.1.0', - 'sudo': [ - "apt-get update", - "apt-get install -y whatever", - "sed -i -e 's/> /a/file", - ], - 'build': [ - "./gradlew someSpecialTask", - "sed -i 'd/that wrong config/' gradle.properties", - "./gradlew compile", - ], - } - ], - }, - ) - - def test_parse_yaml_metadata_prebuild_strings(self): - mf = io.StringIO( - textwrap.dedent( - """\ - AutoName: F-Droid - RepoType: git - Builds: - - versionCode: 1 - versionName: v0.1.0 - sudo: |- - apt-get update && apt-get install -y whatever && sed -i -e 's/> /a/file - build: |- - ./gradlew someSpecialTask && sed -i 'd/that wrong config/' gradle.properties && ./gradlew compile - """ - ) - ) - mf.name = 'mock_filename.yaml' - mf.seek(0) - result = fdroidserver.metadata.parse_yaml_metadata(mf) - self.maxDiff = None - self.assertDictEqual( - result, - { - 'AutoName': 'F-Droid', - 'RepoType': 'git', - 'Builds': [ - { - 'versionCode': 1, - 'versionName': 'v0.1.0', - 'sudo': [ - "apt-get update && " - "apt-get install -y whatever && " - "sed -i -e 's/> /a/file" - ], - 'build': [ - "./gradlew someSpecialTask && " - "sed -i 'd/that wrong config/' gradle.properties && " - "./gradlew compile" - ], - } - ], - }, - ) - - def test_parse_yaml_metadata_prebuild_string(self): - mf = io.StringIO( - textwrap.dedent( - """\ - AutoName: F-Droid - RepoType: git - Builds: - - versionCode: 1 - versionName: v0.1.0 - prebuild: |- - a && b && sed -i 's,a,b,' - """ - ) - ) - mf.name = 'mock_filename.yaml' - mf.seek(0) - result = fdroidserver.metadata.parse_yaml_metadata(mf) - self.assertDictEqual( - result, - { - 'AutoName': 'F-Droid', - 'RepoType': 'git', - 'Builds': [ - { - 'versionCode': 1, - 'versionName': 'v0.1.0', - 'prebuild': ["a && b && sed -i 's,a,b,'"], - } - ], - }, - ) - - def test_parse_yaml_provides_should_be_ignored(self): - mf = io.StringIO( - textwrap.dedent( - """\ - Provides: this.is.deprecated - AutoName: F-Droid - RepoType: git - Builds: - - versionCode: 1 - versionName: v0.1.0 - prebuild: |- - a && b && sed -i 's,a,b,' - """ - ) - ) - mf.name = 'mock_filename.yaml' - mf.seek(0) - result = fdroidserver.metadata.parse_yaml_metadata(mf) - self.assertNotIn('Provides', result) - self.assertNotIn('provides', result) - - def test_parse_yaml_app_antifeatures_dict(self): - nonfreenet = 'free it!' - tracking = 'so many' - mf = io.StringIO( - textwrap.dedent( - f""" - AntiFeatures: - Tracking: {tracking} - NonFreeNet: {nonfreenet} - """ - ) - ) - self.assertEqual( - metadata.parse_yaml_metadata(mf), - { - 'AntiFeatures': { - 'NonFreeNet': {DEFAULT_LOCALE: nonfreenet}, - 'Tracking': {DEFAULT_LOCALE: tracking}, - } - }, - ) - - def test_parse_yaml_metadata_build_antifeatures_old_style(self): - mf = _get_mock_mf( - textwrap.dedent( - """ - AntiFeatures: - - Ads - Builds: - - versionCode: 123 - antifeatures: - - KnownVuln - - NonFreeAssets - """ - ) - ) - self.assertEqual( - metadata.parse_yaml_metadata(mf), - { - 'AntiFeatures': {'Ads': {}}, - 'Builds': [ - { - 'antifeatures': {'KnownVuln': {}, 'NonFreeAssets': {}}, - 'versionCode': 123, - } - ], - }, - ) - - def test_parse_yaml_metadata_antifeatures_sort(self): - """All data should end up sorted, to minimize diffs in the index files.""" - self.assertEqual( - metadata.parse_yaml_metadata( - _get_mock_mf( - textwrap.dedent( - """ - Builds: - - versionCode: 123 - antifeatures: - KnownVuln: - es: 2nd - az: zero - en-US: first - Tracking: - NonFreeAssets: - AntiFeatures: - NonFreeDep: - Ads: - sw: 2nd - zh-CN: 3rd - de: 1st - """ - ) - ) - ), - { - 'AntiFeatures': { - 'Ads': {'de': '1st', 'sw': '2nd', 'zh-CN': '3rd'}, - 'NonFreeDep': {}, - }, - 'Builds': [ - { - 'antifeatures': { - 'KnownVuln': {'az': 'zero', 'en-US': 'first', 'es': '2nd'}, - 'NonFreeAssets': {}, - 'Tracking': {}, - }, - 'versionCode': 123, - } - ], - }, - ) - - def test_parse_yaml_app_antifeatures_str(self): - self.assertEqual( - metadata.parse_yaml_metadata(io.StringIO('AntiFeatures: Tracking')), - {'AntiFeatures': {'Tracking': {}}}, - ) - - def test_parse_yaml_app_antifeatures_bool(self): - self.assertEqual( - metadata.parse_yaml_metadata(io.StringIO('AntiFeatures: true')), - {'AntiFeatures': {'true': {}}}, - ) - - def test_parse_yaml_app_antifeatures_float_nan(self): - self.assertEqual( - metadata.parse_yaml_metadata(io.StringIO('AntiFeatures: .nan')), - {'AntiFeatures': {'.nan': {}}}, - ) - - def test_parse_yaml_app_antifeatures_float_inf(self): - self.assertEqual( - metadata.parse_yaml_metadata(io.StringIO('AntiFeatures: .inf')), - {'AntiFeatures': {'.inf': {}}}, - ) - - def test_parse_yaml_app_antifeatures_float_negative_inf(self): - self.assertEqual( - metadata.parse_yaml_metadata(io.StringIO('AntiFeatures: -.inf')), - {'AntiFeatures': {'-.inf': {}}}, - ) - - def test_parse_yaml_app_antifeatures_int(self): - self.assertEqual( - metadata.parse_yaml_metadata(io.StringIO('AntiFeatures: 1')), - {'AntiFeatures': {'1': {}}}, - ) - - def test_parse_yaml_app_antifeatures_float(self): - self.assertEqual( - metadata.parse_yaml_metadata(io.StringIO('AntiFeatures: 1.0')), - {'AntiFeatures': {'1.0': {}}}, - ) - - def test_parse_yaml_app_antifeatures_list_float(self): - self.assertEqual( - metadata.parse_yaml_metadata(io.StringIO('AntiFeatures:\n - 1.0\n')), - {'AntiFeatures': {'1.0': {}}}, - ) - - def test_parse_yaml_app_antifeatures_dict_float(self): - mf = io.StringIO('AntiFeatures:\n 0.0: too early\n') - self.assertEqual( - metadata.parse_yaml_metadata(mf), - {'AntiFeatures': {'0.0': {'en-US': 'too early'}}}, - ) - - def test_parse_yaml_app_antifeatures_dict_float_fail_value(self): - mf = io.StringIO('AntiFeatures:\n NoSourceSince: 1.0\n') - self.assertEqual( - metadata.parse_yaml_metadata(mf), - {'AntiFeatures': {'NoSourceSince': {'en-US': '1.0'}}}, - ) - - def test_parse_yaml_metadata_type_stringmap_old_list(self): - mf = _get_mock_mf( - textwrap.dedent( - """ - AntiFeatures: - - Ads - - Tracking - """ - ) - ) - self.assertEqual( - {'AntiFeatures': {'Ads': {}, 'Tracking': {}}}, - metadata.parse_yaml_metadata(mf), - ) - - def test_parse_yaml_app_antifeatures_dict_no_value(self): - mf = io.StringIO( - textwrap.dedent( - """\ - AntiFeatures: - Tracking: - NonFreeNet: - """ - ) - ) - self.assertEqual( - metadata.parse_yaml_metadata(mf), - {'AntiFeatures': {'NonFreeNet': {}, 'Tracking': {}}}, - ) - - def test_parse_yaml_metadata_type_stringmap_transitional(self): - """Support a transitional format, where users just append a text""" - ads = 'Has ad lib in it.' - tracking = 'opt-out reports with ACRA' - mf = _get_mock_mf( - textwrap.dedent( - f""" - AntiFeatures: - - Ads: {ads} - - Tracking: {tracking} - """ - ) - ) - self.assertEqual( - metadata.parse_yaml_metadata(mf), - { - 'AntiFeatures': { - 'Ads': {DEFAULT_LOCALE: ads}, - 'Tracking': {DEFAULT_LOCALE: tracking}, - } - }, - ) - - def test_parse_yaml_app_antifeatures_dict_mixed_values(self): - ads = 'true' - tracking = 'many' - nonfreenet = '1' - mf = io.StringIO( - textwrap.dedent( - f""" - AntiFeatures: - Ads: {ads} - Tracking: {tracking} - NonFreeNet: {nonfreenet} - """ - ) - ) - self.assertEqual( - metadata.parse_yaml_metadata(mf), - { - 'AntiFeatures': { - 'Ads': {DEFAULT_LOCALE: ads}, - 'NonFreeNet': {DEFAULT_LOCALE: nonfreenet}, - 'Tracking': {DEFAULT_LOCALE: tracking}, - } - }, - ) - - def test_parse_yaml_app_antifeatures_stringmap_full(self): - ads = 'watching' - tracking = 'many' - nonfreenet = 'pipes' - nonfreenet_zh = '非免费网络' - self.maxDiff = None - mf = io.StringIO( - textwrap.dedent( - f""" - AntiFeatures: - Ads: - {DEFAULT_LOCALE}: {ads} - Tracking: - {DEFAULT_LOCALE}: {tracking} - NonFreeNet: - {DEFAULT_LOCALE}: {nonfreenet} - zh-CN: {nonfreenet_zh} - """ - ) - ) - self.assertEqual( - metadata.parse_yaml_metadata(mf), - { - 'AntiFeatures': { - 'Ads': {DEFAULT_LOCALE: ads}, - 'NonFreeNet': {DEFAULT_LOCALE: nonfreenet, 'zh-CN': nonfreenet_zh}, - 'Tracking': {DEFAULT_LOCALE: tracking}, - } - }, - ) - - def test_parse_yaml_build_type_int_fail(self): - mf = io.StringIO('Builds: [{versionCode: 1a}]') - with self.assertRaises(MetaDataException): - fdroidserver.metadata.parse_yaml_metadata(mf) - - def test_parse_yaml_int_strict_typing_fails(self): - """Things that cannot be preserved when parsing as YAML.""" - mf = io.StringIO('Builds: [{versionCode: 1, rm: 0xf}]') - self.assertEqual( - {'Builds': [{'rm': ['15'], 'versionCode': 1}]}, # 15 != 0xf - fdroidserver.metadata.parse_yaml_metadata(mf), - ) - mf = io.StringIO('Builds: [{versionCode: 1, rm: 0x010}]') - self.assertEqual( - {'Builds': [{'rm': ['16'], 'versionCode': 1}]}, # 16 != 0x010 - fdroidserver.metadata.parse_yaml_metadata(mf), - ) - mf = io.StringIO('Builds: [{versionCode: 1, rm: 0o015}]') - self.assertEqual( - {'Builds': [{'rm': ['13'], 'versionCode': 1}]}, # 13 != 0o015 - fdroidserver.metadata.parse_yaml_metadata(mf), - ) - mf = io.StringIO('Builds: [{versionCode: 1, rm: 10_000}]') - self.assertEqual( - {'Builds': [{'rm': ['10000'], 'versionCode': 1}]}, # 10000 != 10_000 - fdroidserver.metadata.parse_yaml_metadata(mf), - ) - - def test_write_yaml_1_line_scripts_as_string(self): - mf = io.StringIO() - app = fdroidserver.metadata.App() - app.Categories = ['None'] - app['Builds'] = [] - build = fdroidserver.metadata.Build() - build.versionCode = 102030 - build.versionName = 'v1.2.3' - build.sudo = ["chmod +rwx /opt"] - build.init = ["sed -i -e 'g/what/ever/' /some/file"] - build.prebuild = ["sed -i 'd/that wrong config/' gradle.properties"] - build.build = ["./gradlew compile"] - app['Builds'].append(build) - fdroidserver.metadata.write_yaml(mf, app) - mf.seek(0) - self.assertEqual( - mf.read(), - textwrap.dedent( - """\ - Categories: - - None - License: Unknown - - Builds: - - versionName: v1.2.3 - versionCode: 102030 - sudo: chmod +rwx /opt - init: sed -i -e 'g/what/ever/' /some/file - prebuild: sed -i 'd/that wrong config/' gradle.properties - build: ./gradlew compile - - AutoUpdateMode: None - UpdateCheckMode: None - """ - ), - ) - - def test_write_yaml_1_line_scripts_as_list(self): - mf = io.StringIO() - app = fdroidserver.metadata.App() - app.Categories = ['None'] - app['Builds'] = [] - build = fdroidserver.metadata.Build() - build.versionCode = 102030 - build.versionName = 'v1.2.3' - build.sudo = ["chmod +rwx /opt"] - build.init = ["sed -i -e 'g/what/ever/' /some/file"] - build.prebuild = ["sed -i 'd/that wrong config/' gradle.properties"] - build.build = ["./gradlew compile"] - app['Builds'].append(build) - fdroidserver.metadata.write_yaml(mf, app) - mf.seek(0) - self.assertEqual( - mf.read(), - textwrap.dedent( - """\ - Categories: - - None - License: Unknown - - Builds: - - versionName: v1.2.3 - versionCode: 102030 - sudo: chmod +rwx /opt - init: sed -i -e 'g/what/ever/' /some/file - prebuild: sed -i 'd/that wrong config/' gradle.properties - build: ./gradlew compile - - AutoUpdateMode: None - UpdateCheckMode: None - """ - ), - ) - - def test_write_yaml_multiline_scripts_from_list(self): - mf = io.StringIO() - app = fdroidserver.metadata.App() - app.Categories = ['None'] - app['Builds'] = [] - build = fdroidserver.metadata.Build() - build.versionCode = 102030 - build.versionName = 'v1.2.3' - build.sudo = [ - "apt-get update", - "apt-get install -y whatever", - "sed -i -e 's/> /a/file"] - build.build = [ - "./gradlew someSpecialTask", - "sed -i 'd/that wrong config/' gradle.properties", - "./gradlew compile", - ] - app['Builds'].append(build) - fdroidserver.metadata.write_yaml(mf, app) - mf.seek(0) - self.assertEqual( - mf.read(), - textwrap.dedent( - """\ - Categories: - - None - License: Unknown - - Builds: - - versionName: v1.2.3 - versionCode: 102030 - sudo: - - apt-get update - - apt-get install -y whatever - - sed -i -e 's/> /a/file - build: - - ./gradlew someSpecialTask - - sed -i 'd/that wrong config/' gradle.properties - - ./gradlew compile - - AutoUpdateMode: None - UpdateCheckMode: None - """ - ), - ) - - def test_write_yaml_multiline_scripts_from_string(self): - mf = io.StringIO() - app = fdroidserver.metadata.App() - app.Categories = ['None'] - app['Builds'] = [] - build = fdroidserver.metadata.Build() - build.versionCode = 102030 - build.versionName = 'v1.2.3' - build.sudo = [ - "apt-get update", - "apt-get install -y whatever", - "sed -i -e 's/> /a/file"] - build.build = [ - "./gradlew someSpecialTask", - "sed -i 'd/that wrong config/' gradle.properties", - "./gradlew compile", - ] - app['Builds'].append(build) - fdroidserver.metadata.write_yaml(mf, app) - mf.seek(0) - self.assertEqual( - mf.read(), - textwrap.dedent( - """\ - Categories: - - None - License: Unknown - - Builds: - - versionName: v1.2.3 - versionCode: 102030 - sudo: - - apt-get update - - apt-get install -y whatever - - sed -i -e 's/> /a/file - build: - - ./gradlew someSpecialTask - - sed -i 'd/that wrong config/' gradle.properties - - ./gradlew compile - - AutoUpdateMode: None - UpdateCheckMode: None - """ - ), - ) - - def test_write_yaml_build_antifeatures(self): - mf = io.StringIO() - app = metadata.App( - { - 'License': 'Apache-2.0', - 'Builds': [ - metadata.Build( - { - 'versionCode': 102030, - 'versionName': 'v1.2.3', - 'gradle': ['yes'], - 'antifeatures': { - 'a': {}, - 'b': {'de': 'Probe', 'en-US': 'test'}, - }, - } - ), - ], - 'id': 'placeholder', - } - ) - metadata.write_yaml(mf, app) - mf.seek(0) - self.assertEqual( - mf.read(), - textwrap.dedent( - """\ - License: Apache-2.0 - - Builds: - - versionName: v1.2.3 - versionCode: 102030 - gradle: - - yes - antifeatures: - a: {} - b: - de: Probe - en-US: test - """ - ), - ) - - def test_write_yaml_build_antifeatures_old_style(self): - mf = io.StringIO() - app = metadata.App( - { - 'License': 'Apache-2.0', - 'Builds': [ - metadata.Build( - { - 'versionCode': 102030, - 'versionName': 'v1.2.3', - 'gradle': ['yes'], - 'antifeatures': {'b': {}, 'a': {}}, - } - ), - ], - 'id': 'placeholder', - } - ) - metadata.write_yaml(mf, app) - mf.seek(0) - self.assertEqual( - mf.read(), - textwrap.dedent( - """\ - License: Apache-2.0 - - Builds: - - versionName: v1.2.3 - versionCode: 102030 - gradle: - - yes - antifeatures: - - a - - b - """ - ), - ) - - def test_write_yaml_make_sure_provides_does_not_get_written(self): - mf = io.StringIO() - app = fdroidserver.metadata.App() - app.Categories = ['None'] - app.Provides = 'this.is.deprecated' - app['Builds'] = [] - build = fdroidserver.metadata.Build() - build.versionCode = 102030 - build.versionName = 'v1.2.3' - build.gradle = ['yes'] - app['Builds'].append(build) - fdroidserver.metadata.write_yaml(mf, app) - mf.seek(0) - self.assertEqual( - mf.read(), - textwrap.dedent( - """\ - Categories: - - None - License: Unknown - - Builds: - - versionName: v1.2.3 - versionCode: 102030 - gradle: - - yes - - AutoUpdateMode: None - UpdateCheckMode: None - """ - ), - ) - - def test_parse_yaml_srclib_unknown_key(self): - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - with Path('test.yml').open('w', encoding='utf-8') as f: - f.write( - textwrap.dedent( - '''\ - RepoType: git - Repo: https://example.com/test.git - Evil: I should not be here. - ''' - ) - ) - with self.assertRaisesRegex( - MetaDataException, - "Invalid srclib metadata: unknown key 'Evil' in 'test.yml'", - ): - fdroidserver.metadata.parse_yaml_srclib(Path('test.yml')) - - def test_parse_yaml_srclib_does_not_exists(self): - with self.assertRaisesRegex( - MetaDataException, - "Invalid scrlib metadata: " - r"'non(/|\\)existent-test-srclib.yml' " - "does not exist", - ): - fdroidserver.metadata.parse_yaml_srclib( - Path('non/existent-test-srclib.yml') - ) - - def test_parse_yaml_srclib_simple(self): - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - with Path('simple.yml').open('w', encoding='utf-8') as f: - f.write( - textwrap.dedent( - '''\ - # this should be simple - RepoType: git - Repo: https://git.host/repo.git - ''' - ) - ) - srclib = fdroidserver.metadata.parse_yaml_srclib(Path('simple.yml')) - self.assertDictEqual( - { - 'Repo': 'https://git.host/repo.git', - 'RepoType': 'git', - 'Subdir': None, - 'Prepare': None, - }, - srclib, - ) - - def test_parse_yaml_srclib_simple_with_blanks(self): - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - with Path('simple.yml').open('w', encoding='utf-8') as f: - f.write( - textwrap.dedent( - '''\ - # this should be simple - - RepoType: git - - Repo: https://git.host/repo.git - - Subdir: - - Prepare: - ''' - ) - ) - srclib = fdroidserver.metadata.parse_yaml_srclib(Path('simple.yml')) - self.assertDictEqual( - { - 'Repo': 'https://git.host/repo.git', - 'RepoType': 'git', - 'Subdir': [''], - 'Prepare': [], - }, - srclib, - ) - - def test_parse_yaml_srclib_Changelog_cketti(self): - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - with Path('Changelog-cketti.yml').open('w', encoding='utf-8') as f: - f.write( - textwrap.dedent( - '''\ - RepoType: git - Repo: https://github.com/cketti/ckChangeLog - - Subdir: library,ckChangeLog/src/main - Prepare: "[ -f project.properties ] || echo 'source.dir=java' > ant.properties && echo -e 'android.library=true\\\\ntarget=android-19' > project.properties" - ''' - ) - ) - srclib = fdroidserver.metadata.parse_yaml_srclib( - Path('Changelog-cketti.yml') - ) - self.assertDictEqual( - srclib, - { - 'Repo': 'https://github.com/cketti/ckChangeLog', - 'RepoType': 'git', - 'Subdir': ['library', 'ckChangeLog/src/main'], - 'Prepare': [ - "[ -f project.properties ] || echo 'source.dir=java' > " - "ant.properties && echo -e " - "'android.library=true\\ntarget=android-19' > project.properties" - ], - }, - ) - - def test_read_srclibs_yml_subdir_list(self): - fdroidserver.metadata.srclibs = None - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - Path('srclibs').mkdir() - with Path('srclibs/with-list.yml').open('w', encoding='utf-8') as f: - f.write( - textwrap.dedent( - '''\ - # this should be simple - RepoType: git - Repo: https://git.host/repo.git - - Subdir: - - This is your last chance. - - After this, there is no turning back. - - You take the blue pill—the story ends, - - you wake up in your bed - - and believe whatever you want to believe. - - You take the red pill—you stay in Wonderland - - and I show you how deep the rabbit-hole goes. - Prepare: - There is a difference between knowing the path - and walking the path. - ''' - ) - ) - fdroidserver.metadata.read_srclibs() - self.maxDiff = None - self.assertDictEqual( - fdroidserver.metadata.srclibs, - { - 'with-list': { - 'RepoType': 'git', - 'Repo': 'https://git.host/repo.git', - 'Subdir': [ - 'This is your last chance.', - 'After this, there is no turning back.', - 'You take the blue pill—the story ends,', - 'you wake up in your bed', - 'and believe whatever you want to believe.', - 'You take the red pill—you stay in Wonderland', - 'and I show you how deep the rabbit-hole goes.', - ], - 'Prepare': [ - 'There is a difference between knowing the path ' - 'and walking the path.' - ], - } - }, - ) - - def test_read_srclibs_yml_prepare_list(self): - fdroidserver.metadata.srclibs = None - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - Path('srclibs').mkdir() - with Path('srclibs/with-list.yml').open('w', encoding='utf-8') as f: - f.write( - textwrap.dedent( - '''\ - # this should be simple - RepoType: git - Repo: https://git.host/repo.git - - Subdir: - Prepare: - - Many - - invalid - - commands - - here. - ''' - ) - ) - fdroidserver.metadata.read_srclibs() - self.maxDiff = None - self.assertDictEqual( - fdroidserver.metadata.srclibs, - { - 'with-list': { - 'RepoType': 'git', - 'Repo': 'https://git.host/repo.git', - 'Subdir': [''], - 'Prepare': [ - 'Many', - 'invalid', - 'commands', - 'here.', - ], - } - }, - ) - - def test_read_srclibs(self): - fdroidserver.metadata.srclibs = None - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - Path('srclibs').mkdir() - with Path('srclibs/simple.yml').open('w', encoding='utf-8') as f: - f.write( - textwrap.dedent( - '''\ - RepoType: git - Repo: https://git.host/repo.git - ''' - ) - ) - with Path('srclibs/simple-wb.yml').open('w', encoding='utf-8') as f: - f.write( - textwrap.dedent( - '''\ - # this should be simple - RepoType: git - Repo: https://git.host/repo.git - - Subdir: - Prepare: - ''' - ) - ) - fdroidserver.metadata.read_srclibs() - self.assertDictEqual( - fdroidserver.metadata.srclibs, - { - 'simple-wb': { - 'RepoType': 'git', - 'Repo': 'https://git.host/repo.git', - 'Subdir': [''], - 'Prepare': [], - }, - 'simple': { - 'RepoType': 'git', - 'Repo': 'https://git.host/repo.git', - 'Subdir': None, - 'Prepare': None, - }, - }, - ) - - def test_build_ndk_path(self): - with tempfile.TemporaryDirectory(prefix='android-sdk-') as sdk_path: - config = {'ndk_paths': {}, 'sdk_path': sdk_path} - fdroidserver.common.config = config - - build = fdroidserver.metadata.Build() - build.ndk = 'r10e' - self.assertEqual('', build.ndk_path()) - - correct = '/fake/path/ndk/r21b' - config['ndk_paths'] = {'r21b': correct} - self.assertEqual('', build.ndk_path()) - config['ndk_paths'] = {'r10e': correct} - self.assertEqual(correct, build.ndk_path()) - - r10e = '/fake/path/ndk/r10e' - r22b = '/fake/path/ndk/r22e' - config['ndk_paths'] = {'r10e': r10e, 'r22b': r22b} - self.assertEqual(r10e, build.ndk_path()) - - build.ndk = ['r10e', 'r22b'] - self.assertEqual(r10e, build.ndk_path()) - - build.ndk = ['r22b', 'r10e'] - self.assertEqual(r22b, build.ndk_path()) - - def test_build_ndk_path_only_accepts_str(self): - """Paths in the config must be strings, never pathlib.Path instances""" - config = {'ndk_paths': {'r24': Path('r24')}} - fdroidserver.common.config = config - build = fdroidserver.metadata.Build() - build.ndk = 'r24' - with self.assertRaises(TypeError): - build.ndk_path() - - def test_del_duplicated_NoSourceSince(self): - app = { - 'AntiFeatures': {'Ads': {}, 'NoSourceSince': {DEFAULT_LOCALE: '1.0'}}, - 'NoSourceSince': '1.0', - } - metadata._del_duplicated_NoSourceSince(app) - self.assertEqual(app, {'AntiFeatures': {'Ads': {}}, 'NoSourceSince': '1.0'}) - - def test_check_manually_extended_NoSourceSince(self): - app = { - 'AntiFeatures': {'NoSourceSince': {DEFAULT_LOCALE: '1.0', 'de': '1,0'}}, - 'NoSourceSince': '1.0', - } - metadata._del_duplicated_NoSourceSince(app) - self.assertEqual( - app, - { - 'AntiFeatures': {'NoSourceSince': {DEFAULT_LOCALE: '1.0', 'de': '1,0'}}, - 'NoSourceSince': '1.0', - }, - ) - - def test_make_sure_nosourcesince_does_not_get_written(self): - appid = 'com.politedroid' - app = metadata.read_metadata({appid: -1})[appid] - builds = app['Builds'] - app['Builds'] = [copy.deepcopy(builds[0])] - mf = io.StringIO() - metadata.write_yaml(mf, app) - mf.seek(0) - self.maxDiff = None - self.assertEqual( - mf.read(), - textwrap.dedent( - """\ - AntiFeatures: - - NonFreeNet - Categories: - - Multimedia - - Security - - Time - License: GPL-3.0-only - SourceCode: https://github.com/miguelvps/PoliteDroid - IssueTracker: https://github.com/miguelvps/PoliteDroid/issues - - AutoName: Polite Droid - Summary: Calendar tool - Description: Activates silent mode during calendar events. - - RepoType: git - Repo: https://github.com/miguelvps/PoliteDroid.git - - Builds: - - versionName: '1.2' - versionCode: 3 - commit: 6a548e4b19 - target: android-10 - antifeatures: - - KnownVuln - - NonFreeAssets - - ArchivePolicy: 4 - AutoUpdateMode: Version v%v - UpdateCheckMode: Tags - CurrentVersion: '1.5' - CurrentVersionCode: 6 - - NoSourceSince: '1.5' - """ - ), - ) - - def test_app_to_yaml_smokecheck(self): - self.assertTrue( - isinstance(metadata._app_to_yaml(dict()), ruamel.yaml.comments.CommentedMap) - ) - - def test_app_to_yaml_build_list_empty(self): - app = metadata.App({'Builds': [metadata.Build({'rm': []})]}) - self.assertEqual(dict(), metadata._app_to_yaml(app)['Builds'][0]) - - def test_app_to_yaml_build_list_one(self): - app = metadata.App({'Builds': [metadata.Build({'rm': ['one']})]}) - self.assertEqual({'rm': ['one']}, metadata._app_to_yaml(app)['Builds'][0]) - - def test_app_to_yaml_build_list_two(self): - app = metadata.App({'Builds': [metadata.Build({'rm': ['1', '2']})]}) - self.assertEqual({'rm': ['1', '2']}, metadata._app_to_yaml(app)['Builds'][0]) - - def test_app_to_yaml_build_list(self): - app = metadata.App({'Builds': [metadata.Build({'rm': ['b2', 'NO1']})]}) - self.assertEqual({'rm': ['b2', 'NO1']}, metadata._app_to_yaml(app)['Builds'][0]) - - def test_app_to_yaml_AllowedAPKSigningKeys_two(self): - cm = metadata._app_to_yaml(metadata.App({'AllowedAPKSigningKeys': ['b', 'A']})) - self.assertEqual(['b', 'a'], cm['AllowedAPKSigningKeys']) - - def test_app_to_yaml_AllowedAPKSigningKeys_one(self): - cm = metadata._app_to_yaml(metadata.App({'AllowedAPKSigningKeys': ['One']})) - self.assertEqual('one', cm['AllowedAPKSigningKeys']) - - def test_app_to_yaml_int_hex(self): - cm = metadata._app_to_yaml(metadata.App({'CurrentVersionCode': 0xFF})) - self.assertEqual(255, cm['CurrentVersionCode']) - - def test_app_to_yaml_int_underscore(self): - cm = metadata._app_to_yaml(metadata.App({'CurrentVersionCode': 1_2_3})) - self.assertEqual(123, cm['CurrentVersionCode']) - - def test_app_to_yaml_int_0(self): - """Document that 0 values fail to make it through.""" - # TODO it should be possible to use `CurrentVersionCode: 0` - cm = metadata._app_to_yaml(metadata.App({'CurrentVersionCode': 0})) - self.assertFalse('CurrentVersionCode' in cm) - - def test_format_multiline(self): - self.assertEqual(metadata._format_multiline('description'), 'description') - - def test_format_multiline_empty(self): - self.assertEqual(metadata._format_multiline(''), '') - - def test_format_multiline_newline_char(self): - self.assertEqual(metadata._format_multiline('one\\ntwo'), 'one\\ntwo') - - def test_format_multiline_newlines(self): - self.assertEqual( - metadata._format_multiline( - textwrap.dedent( - """ - one - two - three - """ - ) - ), - '\none\ntwo\nthree\n', - ) - - def test_format_list_empty(self): - self.assertEqual(metadata._format_list(['', None]), list()) - - def test_format_list_one_empty(self): - self.assertEqual(metadata._format_list(['foo', None]), ['foo']) - - def test_format_list_two(self): - self.assertEqual(metadata._format_list(['2', '1']), ['2', '1']) - - def test_format_list_newline(self): - self.assertEqual(metadata._format_list(['one\ntwo']), ['one\ntwo']) - - def test_format_list_newline_char(self): - self.assertEqual(metadata._format_list(['one\\ntwo']), ['one\\ntwo']) - - def test_format_script_empty(self): - self.assertEqual(metadata._format_script(['', None]), list()) - - def test_format_script_newline(self): - self.assertEqual(metadata._format_script(['one\ntwo']), 'one\ntwo') - - def test_format_script_newline_char(self): - self.assertEqual(metadata._format_script(['one\\ntwo']), 'one\\ntwo') - - def test_format_stringmap_empty(self): - self.assertEqual( - metadata._format_stringmap('🔥', 'test', dict()), - list(), - ) - - def test_format_stringmap_one_list(self): - self.assertEqual( - metadata._format_stringmap('🔥', 'test', {'Tracking': {}, 'Ads': {}}), - ['Ads', 'Tracking'], - ) - - def test_format_stringmap_one_list_empty_desc(self): - self.assertEqual( - metadata._format_stringmap('🔥', 'test', {'NonFree': {}, 'Ads': {'en': ''}}), - ['Ads', 'NonFree'], - ) - - def test_format_stringmap_three_list(self): - self.assertEqual( - metadata._format_stringmap('🔥', 'test', {'B': {}, 'A': {}, 'C': {}}), - ['A', 'B', 'C'], - ) - - def test_format_stringmap_two_dict(self): - self.assertEqual( - metadata._format_stringmap('🔥', 'test', {'1': {'uz': 'a'}, '2': {}}), - {'1': {'uz': 'a'}, '2': {}}, - ) - - def test_format_stringmap_three_locales(self): - self.assertEqual( - metadata._format_stringmap( - '🔥', 'test', {'AF': {'uz': 'a', 'ko': 'b', 'zh': 'c'}} - ), - {'AF': {'ko': 'b', 'uz': 'a', 'zh': 'c'}}, - ) - - def test_format_stringmap_move_build_antifeatures_to_filesystem(self): - os.chdir(self.testdir) - appid = 'a' - yml = Path('metadata/a.yml') - yml.parent.mkdir() - self.assertEqual( - metadata._format_stringmap( - appid, 'antifeatures', {'AF': {'uz': 'a', 'ko': 'b', 'zh': 'c'}} - ), - {'AF': {'ko': 'b', 'uz': 'a', 'zh': 'c'}}, - ) - - def test_format_stringmap_app_antifeatures_conflict(self): - """Raise an error if a YAML Anti-Feature conflicts with a localized file.""" - os.chdir(self.testdir) - appid = 'a' - field = 'AntiFeatures' - locale = 'ko' - yml = Path('metadata/a.yml') - antifeatures_ko = yml.parent / appid / locale / field.lower() - antifeatures_ko.mkdir(parents=True) - afname = 'Anti-🔥' - (antifeatures_ko / (afname + '.txt')).write_text('SOMETHING ELSE') - with self.assertRaises(MetaDataException): - metadata._format_stringmap( - appid, field, {afname: {'uz': 'a', locale: 'b', 'zh': 'c'}} - ) - - def test_format_stringmap_app_antifeatures_conflict_same_contents(self): - """Raise an error if a YAML Anti-Feature conflicts with a localized file.""" - os.chdir(self.testdir) - appid = 'a' - field = 'AntiFeatures' - locale = 'ko' - yml = Path('metadata/a.yml') - antifeatures_ko = yml.parent / appid / locale / field.lower() - antifeatures_ko.mkdir(parents=True) - afname = 'Anti-🔥' - (antifeatures_ko / (afname + '.txt')).write_text('b') - metadata._format_stringmap( - appid, field, {afname: {'uz': 'a', locale: 'b', 'zh': 'c'}} - ) - - def test_format_stringmap_build_antifeatures_conflict(self): - """Raise an error if a YAML Anti-Feature conflicts with a localized file.""" - os.chdir(self.testdir) - appid = 'a' - field = 'antifeatures' - locale = 'ko' - versionCode = 123 - yml = Path('metadata/a.yml') - antifeatures_ko = yml.parent / appid / locale / field.lower() - antifeatures_ko.mkdir(parents=True) - afname = 'Anti-🔥' - with (antifeatures_ko / ('%d_%s.txt' % (versionCode, afname))).open('w') as fp: - fp.write('SOMETHING ELSE') - with self.assertRaises(MetaDataException): - metadata._format_stringmap( - appid, field, {afname: {'uz': 'a', locale: 'b', 'zh': 'c'}}, versionCode - ) - - def test_app_to_yaml_one_category(self): - """Categories does not get simplified to string when outputting YAML.""" - self.assertEqual( - metadata._app_to_yaml({'Categories': ['one']}), - {'Categories': ['one']}, - ) - - def test_app_to_yaml_categories(self): - """Sort case-insensitive before outputting YAML.""" - self.assertEqual( - metadata._app_to_yaml({'Categories': ['c', 'a', 'B']}), - {'Categories': ['a', 'B', 'c']}, - ) - - def test_builds_to_yaml_gradle_yes(self): - app = {'Builds': [{'versionCode': 0, 'gradle': ['yes']}]} - self.assertEqual( - metadata._builds_to_yaml(app), [{'versionCode': 0, 'gradle': ['yes']}] - ) - - def test_builds_to_yaml_gradle_off(self): - app = {'Builds': [{'versionCode': 0, 'gradle': ['off']}]} - self.assertEqual( - metadata._builds_to_yaml(app), [{'versionCode': 0, 'gradle': ['off']}] - ) - - def test_builds_to_yaml_gradle_true(self): - app = {'Builds': [{'versionCode': 0, 'gradle': ['true']}]} - self.assertEqual( - metadata._builds_to_yaml(app), [{'versionCode': 0, 'gradle': ['true']}] - ) - - def test_builds_to_yaml_gradle_false(self): - app = {'Builds': [{'versionCode': 0, 'gradle': ['false']}]} - self.assertEqual( - metadata._builds_to_yaml(app), [{'versionCode': 0, 'gradle': ['false']}] - ) - - def test_builds_to_yaml_stripped(self): - self.assertEqual( - metadata._builds_to_yaml( - { - 'Builds': [ - metadata.Build({'versionCode': 0, 'rm': [None], 'init': ['']}) - ] - } - ), - [{'versionCode': 0}], - ) - - def test_builds_to_yaml(self): - """Include one of each flag type with a valid value.""" - app = { - 'Builds': [ - metadata.Build( - { - 'versionCode': 0, - 'gradle': ['free'], - 'rm': ['0', '2'], - 'submodules': True, - 'timeout': 0, - 'init': ['false', 'two'], - } - ) - ] - } - # check that metadata.Build() inited flag values - self.assertEqual(app['Builds'][0]['scanignore'], list()) - # then unchanged values should be removed by _builds_to_yaml - self.assertEqual( - metadata._builds_to_yaml(app), - [ - { - 'versionCode': 0, - 'gradle': ['free'], - 'rm': ['0', '2'], - 'submodules': True, - 'timeout': 0, - 'init': ['false', 'two'], - } - ], - ) - - -class PostMetadataParseTest(unittest.TestCase): - """Test the functions that post process the YAML input. - - The following series of "post_metadata_parse" tests map out the - current state of automatic type conversion in the YAML post - processing. They are not necessary a statement of how things - should be, but more to surface the details of it functions. - - """ - - def setUp(self): - fdroidserver.metadata.warnings_action = 'error' - - def _post_metadata_parse_app_int(self, from_yaml, expected): - app = {'ArchivePolicy': from_yaml} - metadata.post_parse_yaml_metadata(app) - return {'ArchivePolicy': expected}, app - - def _post_metadata_parse_app_list(self, from_yaml, expected): - app = {'AllowedAPKSigningKeys': from_yaml} - metadata.post_parse_yaml_metadata(app) - return {'AllowedAPKSigningKeys': expected}, app - - def _post_metadata_parse_app_string(self, from_yaml, expected): - app = {'Repo': from_yaml} - metadata.post_parse_yaml_metadata(app) - return {'Repo': expected}, app - - def _post_metadata_parse_build_bool(self, from_yaml, expected): - tested_key = 'submodules' - app = {'Builds': [{'versionCode': 1, tested_key: from_yaml}]} - post = copy.deepcopy(app) - metadata.post_parse_yaml_metadata(post) - del app['Builds'][0]['versionCode'] - del post['Builds'][0]['versionCode'] - for build in post['Builds']: - for k in list(build): - if k != tested_key: - del build[k] - app['Builds'][0][tested_key] = expected - return app, post - - def _post_metadata_parse_build_int(self, from_yaml, expected): - tested_key = 'versionCode' - app = {'Builds': [{'versionCode': from_yaml}]} - post = copy.deepcopy(app) - metadata.post_parse_yaml_metadata(post) - for build in post['Builds']: - for k in list(build): - if k != tested_key: - del build[k] - app['Builds'][0][tested_key] = expected - return app, post - - def _post_metadata_parse_build_list(self, from_yaml, expected): - tested_key = 'rm' - app = {'Builds': [{'versionCode': 1, tested_key: from_yaml}]} - post = copy.deepcopy(app) - metadata.post_parse_yaml_metadata(post) - del app['Builds'][0]['versionCode'] - del post['Builds'][0]['versionCode'] - for build in post['Builds']: - for k in list(build): - if k != tested_key: - del build[k] - app['Builds'][0][tested_key] = expected - return app, post - - def _post_metadata_parse_build_script(self, from_yaml, expected): - tested_key = 'build' - app = {'Builds': [{'versionCode': 1, tested_key: from_yaml}]} - post = copy.deepcopy(app) - metadata.post_parse_yaml_metadata(post) - del app['Builds'][0]['versionCode'] - del post['Builds'][0]['versionCode'] - for build in post['Builds']: - for k in list(build): - if k != tested_key: - del build[k] - app['Builds'][0][tested_key] = expected - return app, post - - def _post_metadata_parse_build_string(self, from_yaml, expected): - tested_key = 'commit' - app = {'Builds': [{'versionCode': 1, tested_key: from_yaml}]} - post = copy.deepcopy(app) - metadata.post_parse_yaml_metadata(post) - del app['Builds'][0]['versionCode'] - del post['Builds'][0]['versionCode'] - for build in post['Builds']: - for k in list(build): - if k != tested_key: - del build[k] - app['Builds'][0][tested_key] = expected - return app, post - - def test_post_metadata_parse_none(self): - """Run None aka YAML null or blank through the various field and flag types.""" - self.assertEqual(*self._post_metadata_parse_app_int(None, None)) - self.assertEqual(*self._post_metadata_parse_app_list(None, None)) - self.assertEqual(*self._post_metadata_parse_app_string(None, None)) - self.assertEqual(*self._post_metadata_parse_build_bool(None, None)) - self.assertEqual(*self._post_metadata_parse_build_int(None, None)) - self.assertEqual(*self._post_metadata_parse_build_list(None, None)) - self.assertEqual(*self._post_metadata_parse_build_script(None, None)) - self.assertEqual(*self._post_metadata_parse_build_string(None, None)) - - def test_post_metadata_parse_int(self): - """Run the int 123456 through the various field and flag types.""" - self.assertEqual(*self._post_metadata_parse_app_int(123456, 123456)) - self.assertEqual(*self._post_metadata_parse_app_list(123456, ['123456'])) - self.assertEqual(*self._post_metadata_parse_app_string(123456, '123456')) - self.assertEqual(*self._post_metadata_parse_build_bool(123456, True)) - self.assertEqual(*self._post_metadata_parse_build_int(123456, 123456)) - self.assertEqual(*self._post_metadata_parse_build_list(123456, ['123456'])) - self.assertEqual(*self._post_metadata_parse_build_script(123456, ['123456'])) - self.assertEqual(*self._post_metadata_parse_build_string(123456, '123456')) - - def test_post_metadata_parse_sha256(self): - """Run a SHA-256 that YAML calls an int through the various types. - - The current f-droid.org signer set has SHA-256 values with a - maximum of two leading zeros, but this will handle more. - - """ - str_sha256 = '0000000000000498456908409534729834729834729834792837487293847926' - sha256 = yaml.load('a: ' + str_sha256)['a'] - self.assertEqual(*self._post_metadata_parse_app_int(sha256, int(str_sha256))) - self.assertEqual(*self._post_metadata_parse_app_list(sha256, [str_sha256])) - self.assertEqual(*self._post_metadata_parse_app_string(sha256, str_sha256)) - self.assertEqual(*self._post_metadata_parse_build_bool(sha256, True)) - self.assertEqual(*self._post_metadata_parse_build_int(sha256, sha256)) - self.assertEqual(*self._post_metadata_parse_build_list(sha256, [str_sha256])) - self.assertEqual(*self._post_metadata_parse_build_script(sha256, [str_sha256])) - self.assertEqual(*self._post_metadata_parse_build_string(sha256, str_sha256)) - - def test_post_metadata_parse_int_0(self): - """Run the int 0 through the various field and flag types.""" - self.assertEqual(*self._post_metadata_parse_app_int(0, 0)) - self.assertEqual(*self._post_metadata_parse_app_list(0, ['0'])) - self.assertEqual(*self._post_metadata_parse_app_string(0, '0')) - self.assertEqual(*self._post_metadata_parse_build_bool(0, False)) - self.assertEqual(*self._post_metadata_parse_build_int(0, 0)) - self.assertEqual(*self._post_metadata_parse_build_list(0, ['0'])) - self.assertEqual(*self._post_metadata_parse_build_script(0, ['0'])) - self.assertEqual(*self._post_metadata_parse_build_string(0, '0')) - - def test_post_metadata_parse_float_0_0(self): - """Run the float 0.0 through the various field and flag types.""" - with self.assertRaises(MetaDataException): - self._post_metadata_parse_app_int(0.0, MetaDataException) - self.assertEqual(*self._post_metadata_parse_app_list(0.0, ['0.0'])) - self.assertEqual(*self._post_metadata_parse_app_string(0.0, '0.0')) - self.assertEqual(*self._post_metadata_parse_build_bool(0.0, False)) - with self.assertRaises(MetaDataException): - self._post_metadata_parse_build_int(0.0, MetaDataException) - self.assertEqual(*self._post_metadata_parse_build_list(0.0, ['0.0'])) - self.assertEqual(*self._post_metadata_parse_build_script(0.0, ['0.0'])) - self.assertEqual(*self._post_metadata_parse_build_string(0.0, '0.0')) - - def test_post_metadata_parse_float_0_1(self): - """Run the float 0.1 through the various field and flag types.""" - with self.assertRaises(MetaDataException): - self._post_metadata_parse_app_int(0.1, MetaDataException) - self.assertEqual(*self._post_metadata_parse_app_list(0.1, ['0.1'])) - self.assertEqual(*self._post_metadata_parse_app_string(0.1, '0.1')) - self.assertEqual(*self._post_metadata_parse_build_bool(0.1, True)) - with self.assertRaises(MetaDataException): - self._post_metadata_parse_build_int(0.1, MetaDataException) - self.assertEqual(*self._post_metadata_parse_build_list(0.1, ['0.1'])) - self.assertEqual(*self._post_metadata_parse_build_script(0.1, ['0.1'])) - self.assertEqual(*self._post_metadata_parse_build_string(0.1, '0.1')) - - def test_post_metadata_parse_float_1_0(self): - """Run the float 1.0 through the various field and flag types.""" - with self.assertRaises(MetaDataException): - self._post_metadata_parse_app_int(1.0, MetaDataException) - self.assertEqual(*self._post_metadata_parse_app_list(1.0, ['1.0'])) - self.assertEqual(*self._post_metadata_parse_app_string(1.0, '1.0')) - self.assertEqual(*self._post_metadata_parse_build_bool(1.0, True)) - with self.assertRaises(MetaDataException): - self._post_metadata_parse_build_int(1.0, MetaDataException) - self.assertEqual(*self._post_metadata_parse_build_list(1.0, ['1.0'])) - self.assertEqual(*self._post_metadata_parse_build_script(1.0, ['1.0'])) - self.assertEqual(*self._post_metadata_parse_build_string(1.0, '1.0')) - - def test_post_metadata_parse_empty_list(self): - with self.assertRaises(MetaDataException): - self._post_metadata_parse_app_int(list(), MetaDataException) - self.assertEqual(*self._post_metadata_parse_app_list(list(), list())) - self.assertEqual(*self._post_metadata_parse_app_string(list(), list())) - self.assertEqual(*self._post_metadata_parse_build_bool(list(), False)) - with self.assertRaises(MetaDataException): - self._post_metadata_parse_build_int(list(), MetaDataException) - self.assertEqual(*self._post_metadata_parse_build_list(list(), list())) - self.assertEqual(*self._post_metadata_parse_build_script(list(), list())) - self.assertEqual(*self._post_metadata_parse_build_string(list(), list())) - - def test_post_metadata_parse_set_of_1(self): - with self.assertRaises(MetaDataException): - self._post_metadata_parse_app_int({1}, MetaDataException) - self.assertEqual(*self._post_metadata_parse_app_list({1}, ['1'])) - self.assertEqual(*self._post_metadata_parse_app_string({1}, '{1}')) - self.assertEqual(*self._post_metadata_parse_build_bool({1}, True)) - with self.assertRaises(MetaDataException): - self._post_metadata_parse_build_int({1}, MetaDataException) - self.assertEqual(*self._post_metadata_parse_build_list({1}, ['1'])) - self.assertEqual(*self._post_metadata_parse_build_script({1}, ['1'])) - self.assertEqual(*self._post_metadata_parse_build_string({1}, '{1}')) - - def test_post_metadata_parse_empty_dict(self): - with self.assertRaises(MetaDataException): - self._post_metadata_parse_app_int(dict(), MetaDataException) - self.assertEqual(*self._post_metadata_parse_app_list(dict(), dict())) - self.assertEqual(*self._post_metadata_parse_app_string(dict(), dict())) - self.assertEqual(*self._post_metadata_parse_build_bool(dict(), False)) - with self.assertRaises(MetaDataException): - self._post_metadata_parse_build_int(dict(), MetaDataException) - self.assertEqual(*self._post_metadata_parse_build_list(dict(), dict())) - self.assertEqual(*self._post_metadata_parse_build_script(dict(), dict())) - self.assertEqual(*self._post_metadata_parse_build_string(dict(), dict())) - - def test_post_metadata_parse_list_int_string(self): - with self.assertRaises(MetaDataException): - self._post_metadata_parse_app_int([1, 'a'], MetaDataException) - self.assertEqual(*self._post_metadata_parse_app_list([1, 'a'], ['1', 'a'])) - self.assertEqual(*self._post_metadata_parse_app_string([1, 'a'], "[1, 'a']")) - self.assertEqual(*self._post_metadata_parse_build_bool([1, 'a'], True)) - with self.assertRaises(MetaDataException): - self._post_metadata_parse_build_int([1, 'a'], MetaDataException) - self.assertEqual(*self._post_metadata_parse_build_list([1, 'a'], ['1', 'a'])) - self.assertEqual(*self._post_metadata_parse_build_script([1, 'a'], ['1', 'a'])) - self.assertEqual(*self._post_metadata_parse_build_string([1, 'a'], "[1, 'a']")) - - def test_post_metadata_parse_dict_int_string(self): - with self.assertRaises(MetaDataException): - self._post_metadata_parse_app_int({'k': 1}, MetaDataException) - with self.assertRaises(MetaDataException): - self._post_metadata_parse_app_list({'k': 1}, MetaDataException) - self.assertEqual(*self._post_metadata_parse_app_string({'k': 1}, "{'k': 1}")) - self.assertEqual(*self._post_metadata_parse_build_bool({'k': 1}, True)) - with self.assertRaises(MetaDataException): - self._post_metadata_parse_build_int({'k': 1}, MetaDataException) - with self.assertRaises(MetaDataException): - self._post_metadata_parse_build_list({'k': 1}, MetaDataException) - with self.assertRaises(MetaDataException): - self._post_metadata_parse_build_script({'k': 1}, MetaDataException) - self.assertEqual(*self._post_metadata_parse_build_string({'k': 1}, "{'k': 1}")) - - def test_post_metadata_parse_false(self): - with self.assertRaises(MetaDataException): - self._post_metadata_parse_app_int(False, MetaDataException) - self.assertEqual(*self._post_metadata_parse_app_list(False, ['false'])) - self.assertEqual(*self._post_metadata_parse_app_string(False, 'false')) - self.assertEqual(*self._post_metadata_parse_build_bool(False, False)) - with self.assertRaises(MetaDataException): - self._post_metadata_parse_build_int(False, MetaDataException) - self.assertEqual(*self._post_metadata_parse_build_list(False, ['false'])) - self.assertEqual(*self._post_metadata_parse_build_script(False, ['false'])) - self.assertEqual(*self._post_metadata_parse_build_string(False, 'false')) - - def test_post_metadata_parse_true(self): - with self.assertRaises(MetaDataException): - self._post_metadata_parse_app_int(True, MetaDataException) - self.assertEqual(*self._post_metadata_parse_app_list(True, ['true'])) - self.assertEqual(*self._post_metadata_parse_app_string(True, 'true')) - self.assertEqual(*self._post_metadata_parse_build_bool(True, True)) - with self.assertRaises(MetaDataException): - self._post_metadata_parse_build_int(True, MetaDataException) - self.assertEqual(*self._post_metadata_parse_build_list(True, ['true'])) - self.assertEqual(*self._post_metadata_parse_build_script(True, ['true'])) - self.assertEqual(*self._post_metadata_parse_build_string(True, 'true')) diff --git a/tests/test_net.py b/tests/test_net.py deleted file mode 100755 index beacd9af..00000000 --- a/tests/test_net.py +++ /dev/null @@ -1,146 +0,0 @@ -#!/usr/bin/env python3 - -import os -import random -import socket -import tempfile -import threading -import time -import unittest -from pathlib import Path -from unittest.mock import MagicMock, patch - -import requests - -from fdroidserver import net - - -class RetryServer: - """A stupid simple HTTP server that can fail to connect. - - Proxy settings via environment variables can interfere with this - test. The requests library will automatically pick up proxy - settings from environment variables. Proxy settings can force the - local connection over the proxy, which might not support that, - then this fails with an error like 405 or others. - - """ - - def __init__(self, port=None, failures=3): - self.port = port - if self.port is None: - self.port = random.randint(1024, 65535) # nosec B311 - self.failures = failures - self.stop_event = threading.Event() - threading.Thread(target=self.run_fake_server).start() - - def stop(self): - self.stop_event.set() - - def run_fake_server(self): - addr = ('localhost', self.port) - # localhost might not be a valid name for all families, use the first available - family = socket.getaddrinfo(addr[0], addr[1], type=socket.SOCK_STREAM)[0][0] - server_sock = socket.create_server(addr, family=family) - server_sock.listen(5) - server_sock.settimeout(5) - time.sleep(0.001) # wait for it to start - - while not self.stop_event.is_set(): - self.failures -= 1 - conn = None - try: - conn, address = server_sock.accept() - conn.settimeout(5) - except TimeoutError: - break - if self.failures > 0: - conn.close() - continue - conn.recv(8192) # request ignored - self.reply = b"""HTTP/1.1 200 OK - Date: Mon, 26 Feb 2024 09:00:14 GMT - Connection: close - Content-Type: text/html - - Hello World! - """ - self.reply = self.reply.replace(b' ', b'') # dedent - conn.sendall(self.reply) - conn.shutdown(socket.SHUT_RDWR) - conn.close() - - self.stop_event.wait(timeout=1) - server_sock.shutdown(socket.SHUT_RDWR) - server_sock.close() - - -class NetTest(unittest.TestCase): - def setUp(self): - self.tempdir = tempfile.TemporaryDirectory() - os.chdir(self.tempdir.name) - Path('tmp').mkdir() - - def tearDown(self): - self.tempdir.cleanup() - - @patch('requests.get') - def test_download_file_url_parsing(self, requests_get): - # pylint: disable=unused-argument - def _get(url, stream, allow_redirects, headers, timeout): - return MagicMock() - - requests_get.side_effect = _get - f = net.download_file('https://f-droid.org/repo/entry.jar', retries=0) - requests_get.assert_called() - self.assertTrue(os.path.exists(f)) - self.assertEqual('tmp/entry.jar', f) - - f = net.download_file( - 'https://d-05.example.com/custom/com.downloader.aegis-3175421.apk?_fn=QVBLUHVyZV92My4xNy41NF9hcGtwdXJlLmNvbS5hcGs&_p=Y29tLmFwa3B1cmUuYWVnb24&am=6avvTpfJ1dMl9-K6JYKzQw&arg=downloader%3A%2F%2Fcampaign%2F%3Futm_medium%3Ddownloader%26utm_source%3Daegis&at=1652080635&k=1f6e58465df3a441665e585719ab0b13627a117f&r=https%3A%2F%2Fdownloader.com%2Fdownloader-app.html%3Ficn%3Daegis%26ici%3Dimage_qr&uu=http%3A%2F%2F172.16.82.1%2Fcustom%2Fcom.downloader.aegis-3175421.apk%3Fk%3D3fb9c4ae0be578206f6a1c330736fac1627a117f', - retries=0, - ) - self.assertTrue(requests_get.called) - self.assertTrue(os.path.exists(f)) - self.assertEqual('tmp/com.downloader.aegis-3175421.apk', f) - - @patch.dict(os.environ, clear=True) - def test_download_file_retries(self): - server = RetryServer() - f = net.download_file('http://localhost:%d/f.txt' % server.port) - # strip the HTTP headers and compare the reply - self.assertEqual(server.reply.split(b'\n\n')[1], Path(f).read_bytes()) - server.stop() - - @patch.dict(os.environ, clear=True) - def test_download_file_retries_not_forever(self): - """The retry logic should eventually exit with an error.""" - server = RetryServer(failures=5) - with self.assertRaises(requests.exceptions.ConnectionError): - net.download_file('http://localhost:%d/f.txt' % server.port) - server.stop() - - @unittest.skipIf(os.getenv('CI'), 'FIXME this fails mysteriously only in GitLab CI') - @patch.dict(os.environ, clear=True) - def test_download_using_mirrors_retries(self): - server = RetryServer() - f = net.download_using_mirrors( - [ - 'https://fake.com/f.txt', # 404 or 301 Redirect - 'https://httpbin.org/status/403', - 'https://httpbin.org/status/500', - 'http://localhost:1/f.txt', # ConnectionError - 'http://localhost:%d/should-succeed' % server.port, - ], - ) - # strip the HTTP headers and compare the reply - self.assertEqual(server.reply.split(b'\n\n')[1], Path(f).read_bytes()) - server.stop() - - @patch.dict(os.environ, clear=True) - def test_download_using_mirrors_retries_not_forever(self): - """The retry logic should eventually exit with an error.""" - server = RetryServer(failures=5) - with self.assertRaises(requests.exceptions.ConnectionError): - net.download_using_mirrors(['http://localhost:%d/' % server.port]) - server.stop() diff --git a/tests/test_nightly.py b/tests/test_nightly.py deleted file mode 100755 index fb1614b7..00000000 --- a/tests/test_nightly.py +++ /dev/null @@ -1,372 +0,0 @@ -#!/usr/bin/env python3 - -import os -import platform -import shutil -import subprocess -import tempfile -import time -import unittest -from pathlib import Path -from unittest.mock import patch - -import requests -import yaml - -from fdroidserver import common, exception, index, nightly - -DEBUG_KEYSTORE = '/u3+7QAAAAIAAAABAAAAAQAPYW5kcm9pZGRlYnVna2V5AAABNYhAuskAAAK8MIICuDAOBgorBgEEASoCEQEBBQAEggKkqRnFlhidQmVff83bsAeewXPIsF0jiymzJnvrnUAQtCK0MV9uZonu37Mrj/qKLn56mf6QcvEoKvpCstZxzftgYYpAHWMVLM+hy2Z707QZEHlY7Ukppt8DItj+dXkeqGt7f8KzOb2AQwDbt9lm1fJb+MefLowTaubtvrLMcKIne43CbCu2D8HyN7RPWpEkVetA2Qgr5W4sa3tIUT80afqo9jzwJjKCspuxY9A1M8EIM3/kvyLo2B9r0cuWwRjYZXJ6gmTYI2ARNz0KQnCZUok14NDg+mZTb1B7AzRfb0lfjbA6grbzuAL+WaEpO8/LgGfuOh7QBZBT498TElOaFfQ9toQWA79wAmrQCm4OoFukpPIy2m/l6VjJSmlK5Q+CMOl/Au7OG1sUUCTvPaIr0XKnsiwDJ7a71n9garnPWHkvuWapSRCzCNgaUoGQjB+fTMJFFrwT8P1aLfM6onc3KNrDStoQZuYe5ngCLlNS56bENkVGvJBfdkboxtHZjqDXXON9jWGSOI527J3o2D5sjSVyx3T9XPrsL4TA/nBtdU+c/+M6aoASZR2VymzAKdMrGfj9kE5GXp8vv2vkJj9+OJ4Jm5yeczocc/Idtojjb1yg+sq1yY8kAQxgezpY1rpgi2jF3tSN01c23DNvAaSJLJX2ZuH8sD40ACc80Y1Qp1nUTdpwBZUeaeNruBwx4PHU8GnC71FwtiUpwNs0OoSl0pgDUJ3ODC5bs8B5QmW1wu1eg7I4mMSmCsNGW6VN3sFcu+WEqnmTxPoZombdFZKxsr2oq359Nn4bJ6Uc9PBz/sXsns7Zx1vND/oK/Jv5Y269UVAMeKX/eGpfnxzagW3tqGbOu12C2p9Azo5VxiU2fG/tmk2PjaG5hV/ywReco7I6C1p8OWM2fwAAAAEABVguNTA5AAAB6TCCAeUwggFOoAMCAQICBE89gTUwDQYJKoZIhvcNAQEFBQAwNzELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0FuZHJvaWQxFjAUBgNVBAMTDUFuZHJvaWQgRGVidWcwHhcNMTIwMjE2MjIyMDM3WhcNNDIwMjA4MjIyMDM3WjA3MQswCQYDVQQGEwJVUzEQMA4GA1UEChMHQW5kcm9pZDEWMBQGA1UEAxMNQW5kcm9pZCBEZWJ1ZzCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA3AKU7S7JXhUjEwxWP1/LPHXieh61SaA/+xbpqsPA+yjGz1sAcGAyuG6bjNAVm56pq7nkjJzicX7Wi83nUBo58DEC/quxOLdy0C4PEOSAeTnTT1RJIwMDvOgiL1GFCErvQ7gCH6zuAID/JRFbN6nIkhDjs2DYnSBl7aJJf8wCLc0CAwEAATANBgkqhkiG9w0BAQUFAAOBgQAoq/TJffA0l+ZGf89xndmHdxrO6qi+TzSlByvLZ4eFfCovTh1iO+Edrd5V1yXGLxyyvdsadMAFZT8SaxMrP5xxhJ0nra0APWYLpA96M//auMhQBWPgqPntwgvEZuEH7f0kdItjBJ39yijbG8xfgwid6XqNUo0TDDkp/wNWKpJ9tJe+2PrGw1NAvrgSydoH2j8DI1Eq' -DEBUG_KEYSTORE_KEY_FILE_NAME = ( - 'debug_keystore_QW+xRCJDGHXyyFtgCW8QRajj+6uYmsLwGWpCfYqYQ5M_id_rsa' -) - -AOSP_TESTKEY_DEBUG_KEYSTORE = '/u3+7QAAAAIAAAABAAAAAQAPYW5kcm9pZGRlYnVna2V5AAABejjuIU0AAAUBMIIE/TAOBgorBgEEASoCEQEBBQAEggTpvqhdBtq9D3jRUZGnhKLbFH1LMtCKqwGg25ETAEhvK1GVRNuWAHAUUedCnarjgeUy/zx9OsHuZq18KjUI115kWq/jxkf00fIg7wrOmXoyJf5Dbc7NGKjU64rRmppQEkJ417Lq4Uola9EBJ/WweEu6UTjTn5HcNl4mVloWKMBKNPkVfhZhAkXUyjiZ9rCVHMjLOVKG5vyTWZLwXpYR00Xz6VyzSunTyDza5oUOT/Fh7Gw74V7iNHANydkBHmH+UJ100p0vNPRFvt/3ABfMjkNbRXKNERnyN7NeBmCAOceuXjme/n0XLUidP9/NYk1yAmRJgUnauKD6UPSZYaUPuNSSdf4dD5fCQ7OVDq95e7vmqRDfrKUoWmtpndN7hbVl+OHVZXk2ngvXbvoS+F7ShsEfbq7+c37dnOcVrIlrY+wlOWX2jN42T+AkGt3AfA8zdIPdNgLGk64Op+aP4vGyLQqbuUEzOTNG9uExjGlamogPKFf93GAF83xv7AChYLR/9H+B1E955FL58bRuYOXVWJfLRsO/jyjXsilhBggo3VD1omRuOp98AkKP+P9JXCTswK7IZgvbMK3GB6QIzD20vlT0eK6JGLeWE7cXVn6oT26zvnqAjJ94PjS+YckMOExhqwCivPp1VaX6JzpQ1wr52OsGDUvconcjYrBEHBiY+UnMUk0Wj4mhZlJd1lpybZcWZ3vhTIlM0uMt4udl7t+zsgZ6BW97/pkGaa+QoxeTvgNlHGYyDYp8hveM3bCLXTHULw8mXUHxOJawq/J3E6vZ5/h2nzfmQmWtZtBOGWCkq+gKusTFUsHghjvHsPcQ2+EVfMcePBb/FKvtzSgH59C3iNOHE29l3ceSqccgxlxfStzbf+QkP7gxGVGZ8rLnCn3s8WzkGHZE4LtS0Zm3Y+hV5igrClk940YZP1hmilt2y7adPE4gCyQjb44JXgc3/NxlkZJcmeZTfAGxMXT8HG6Use/Kti114phsF7GDrqk1kPbB51Hr3xF1NAJUWP3csg3jgTS3E6jgD5XjPPG9BEDE2MwnBlUUMe3TC8TIWkK+AlwjlsDr5B9nqy2Fevv62+k5Adplw+fsQ8VzZREZF+MllWO3vtkD6srdx9h4vPD3dp5urFCFXNRaoD3SMDk27z3EVCQZ4bPL5PsVpB/ZBotLGkUZ0yi+5oC+u7ByP1ihMXMsRgvXbQpyOonEqDy84EZiIPWbyzGd0tEAXLz3mMh1x/IqZ1wxyDT/vkxhNCFqlBNlRW6GbMN2cng4A9Cigj9eNu9ptL1tdgFTxwndjoNRQMJ0NAc6WnsQ1UeIu8nMsa8/kLDtnVFLVmPQv2ZBUM4mxLrwC1mxOiQrWBW2XJ1OIheimSkLHfQOef1mIH3Z0cBuLBKGkRYGaXiZ6RX7po+ch0WFGjBef3e3uczl1mT5WGKdIG4x1+aRAtJHL+9K7Z6wzG0ygoamdiX2Fd0xBrWjTU72DzYbceqc+uHrbcLKDa5w0ENhyYK0+XEzG5fXHjFgmawY1D7xZQOJZO3jxStcv+xzoiTnNSrIxbxog/0Fez/WhMM9H6gV4eeDjMWEg79cJLugCBNwqmp3Yoe5EDU2TxQlLT53tye3Aji3FbocuDWjLI3Jc5VDxd7lrbzeIbFzSNpoFG8DSgjSiq41WJVeuzXxmdl7HM4zQpGRAAAAAQAFWC41MDkAAASsMIIEqDCCA5CgAwIBAgIJAJNurL4H8gHfMA0GCSqGSIb3DQEBBQUAMIGUMQswCQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAeFw0wODAyMjkwMTMzNDZaFw0zNTA3MTcwMTMzNDZaMIGUMQswCQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASAwDQYJKoZIhvcNAQEBBQADggENADCCAQgCggEBANaTGQTexgskse3HYuDZ2CU+Ps1s6x3i/waMqOi8qM1r03hupwqnbOYOuw+ZNVn/2T53qUPn6D1LZLjk/qLT5lbx4meoG7+yMLV4wgRDvkxyGLhG9SEVhvA4oU6Jwr44f46+z4/Kw9oe4zDJ6pPQp8PcSvNQIg1QCAcy4ICXF+5qBTNZ5qaU7Cyz8oSgpGbIepTYOzEJOmc3Li9kEsBubULxWBjf/gOBzAzURNps3cO4JFgZSAGzJWQTT7/emMkod0jb9WdqVA2BVMi7yge54kdVMxHEa5r3b97szI5p58ii0I54JiCUP5lyfTwE/nKZHZnfm644oLIXf6MdW2r+6R8CAQOjgfwwgfkwHQYDVR0OBBYEFEhZAFY9JyxGrhGGBaR0GawJyowRMIHJBgNVHSMEgcEwgb6AFEhZAFY9JyxGrhGGBaR0GawJyowRoYGapIGXMIGUMQswCQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbYIJAJNurL4H8gHfMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAHqvlozrUMRBBVEY0NqrrwFbinZaJ6cVosK0TyIUFf/azgMJWr+kLfcHCHJsIGnlw27drgQAvilFLAhLwn62oX6snb4YLCBOsVMR9FXYJLZW2+TcIkCRLXWG/oiVHQGo/rWuWkJgU134NDEFJCJGjDbiLCpe+ZTWHdcwauTJ9pUbo8EvHRkU3cYfGmLaLfgn9gP+pWA7LFQNvXwBnDa6sppCccEX31I828XzgXpJ4O+mDL1/dBd+ek8ZPUP0IgdyZm5MTYPhvVqGCHzzTy3sIeJFymwrsBbmg2OAUNLEMO6nwmocSdN2ClirfxqCzJOLSDE4QyS9BAH6EhY6UFcOaE21IJawTAEXnf52TqT7diFUlWRSnQ==' -AOSP_TESTKEY_DEBUG_KEYSTORE_KEY_FILE_NAME = ( - 'debug_keystore_k47SVrA85+oMZAexHc62PkgvIgO8TJBYN00U82xSlxc_id_rsa' -) - -basedir = Path(__file__).parent -testroot = basedir.with_name('.testfiles') - - -class Options: - allow_disabled_algorithms = False - clean = False - delete_unknown = False - nosign = False - pretty = True - rename_apks = False - verbose = False - - -@unittest.skipUnless( - platform.system() == 'Linux', - 'skipping test_nightly, it currently only works GNU/Linux', -) -class NightlyTest(unittest.TestCase): - path = os.environ['PATH'] - - def setUp(self): - common.config = None - nightly.config = None - testroot.mkdir(exist_ok=True) - os.chdir(basedir) - self.tempdir = tempfile.TemporaryDirectory( - str(time.time()), self._testMethodName + '_', testroot - ) - self.testdir = Path(self.tempdir.name) - self.home = self.testdir / 'home' - self.home.mkdir() - self.dot_android = self.home / '.android' - nightly.KEYSTORE_FILE = str(self.dot_android / 'debug.keystore') - - def tearDown(self): - self.tempdir.cleanup() - try: - os.rmdir(testroot) - except OSError: # other test modules might have left stuff around - pass - - def _copy_test_debug_keystore(self): - self.dot_android.mkdir() - shutil.copy( - basedir / 'aosp_testkey_debug.keystore', - self.dot_android / 'debug.keystore', - ) - - def _copy_debug_apk(self): - outputdir = Path('app/build/output/apk/debug') - outputdir.mkdir(parents=True) - shutil.copy(basedir / 'urzip.apk', outputdir / 'urzip-debug.apk') - - def test_get_repo_base_url(self): - for clone_url, repo_git_base, result in [ - ( - 'https://github.com/onionshare/onionshare-android-nightly', - 'onionshare/onionshare-android-nightly', - 'https://raw.githubusercontent.com/onionshare/onionshare-android-nightly/master/fdroid', - ), - ( - 'https://gitlab.com/fdroid/fdroidclient-nightly', - 'fdroid/fdroidclient-nightly', - 'https://gitlab.com/fdroid/fdroidclient-nightly/-/raw/master/fdroid', - ), - ]: - url = nightly.get_repo_base_url(clone_url, repo_git_base) - self.assertEqual(result, url) - r = requests.head(os.path.join(url, 'repo/index-v1.jar'), timeout=300) - # gitlab.com often returns 403 Forbidden from their cloudflare restrictions - self.assertTrue(r.status_code in (200, 403), 'should not be a redirect') - - def test_get_keystore_secret_var(self): - self.assertEqual( - AOSP_TESTKEY_DEBUG_KEYSTORE, - nightly._get_keystore_secret_var(basedir / 'aosp_testkey_debug.keystore'), - ) - - @patch.dict(os.environ, clear=True) - def test_ssh_key_from_debug_keystore(self): - os.environ['HOME'] = str(self.home) - os.environ['PATH'] = self.path - ssh_private_key_file = nightly._ssh_key_from_debug_keystore( - basedir / 'aosp_testkey_debug.keystore' - ) - with open(ssh_private_key_file) as fp: - self.assertIn('-----BEGIN RSA PRIVATE KEY-----', fp.read()) - with open(ssh_private_key_file + '.pub') as fp: - self.assertEqual(fp.read(8), 'ssh-rsa ') - shutil.rmtree(os.path.dirname(ssh_private_key_file)) - - @patch.dict(os.environ, clear=True) - @patch('sys.argv', ['fdroid nightly', '--verbose']) - def test_main_empty_dot_android(self): - """Test that it exits with an error when ~/.android is empty""" - os.environ['HOME'] = str(self.home) - os.environ['PATH'] = self.path - with self.assertRaises(SystemExit) as cm: - nightly.main() - self.assertEqual(cm.exception.code, 1) - - @patch.dict(os.environ, clear=True) - @patch('sys.argv', ['fdroid nightly', '--verbose']) - def test_main_empty_dot_ssh(self): - """Test that it does not create ~/.ssh if it does not exist - - Careful! If the test env is wrong, it can mess up the local - SSH setup. - - """ - dot_ssh = self.home / '.ssh' - self._copy_test_debug_keystore() - os.environ['HOME'] = str(self.home) - os.environ['PATH'] = self.path - self.assertFalse(dot_ssh.exists()) - nightly.main() - self.assertFalse(dot_ssh.exists()) - - @patch.dict(os.environ, clear=True) - @patch('sys.argv', ['fdroid nightly', '--verbose']) - def test_main_on_user_machine(self): - """Test that `fdroid nightly` runs on the user's machine - - Careful! If the test env is wrong, it can mess up the local - SSH setup. - - """ - dot_ssh = self.home / '.ssh' - dot_ssh.mkdir() - self._copy_test_debug_keystore() - os.environ['HOME'] = str(self.home) - os.environ['PATH'] = self.path - nightly.main() - self.assertTrue((dot_ssh / AOSP_TESTKEY_DEBUG_KEYSTORE_KEY_FILE_NAME).exists()) - self.assertTrue( - (dot_ssh / (AOSP_TESTKEY_DEBUG_KEYSTORE_KEY_FILE_NAME + '.pub')).exists() - ) - - @patch('fdroidserver.common.vcs_git.git', lambda args, e: common.PopenResult(1)) - @patch('sys.argv', ['fdroid nightly', '--verbose']) - def test_private_or_non_existent_git_mirror(self): - """Test that this exits with an error when the git mirror repo won't work - - Careful! If the test environment is setup wrong, it can mess - up local files in ~/.ssh or ~/.android. - - """ - os.chdir(self.testdir) - with patch.dict( - os.environ, - { - 'CI': 'true', - 'CI_PROJECT_PATH': 'thisshouldneverexist/orthistoo', - 'CI_PROJECT_URL': 'https://gitlab.com/thisshouldneverexist/orthistoo', - 'DEBUG_KEYSTORE': DEBUG_KEYSTORE, - 'GITLAB_USER_NAME': 'username', - 'GITLAB_USER_EMAIL': 'username@example.com', - 'HOME': str(self.testdir), - 'PATH': os.getenv('PATH'), - }, - clear=True, - ): - with self.assertRaises(exception.VCSException): - nightly.main() - - def test_clone_git_repo(self): - os.chdir(self.testdir) - common.options = Options - d = 'fakeappid' - nightly.clone_git_repo('https://gitlab.com/fdroid/ci-test-tiny-repo.git', d) - self.assertTrue(os.path.isdir(Path(d) / '.git')) - - def test_clone_git_repo_fails_on_gitlab_password_prompt(self): - os.chdir(self.testdir) - common.options = Options - d = 'shouldnotbecreated' - with self.assertRaises(exception.VCSException): - nightly.clone_git_repo(f'https://gitlab.com/{d}/{d}.git', d) - self.assertFalse(os.path.isdir(Path(d))) - - def test_clone_git_repo_fails_on_github_password_prompt(self): - os.chdir(self.testdir) - common.options = Options - d = 'shouldnotbecreated' - with self.assertRaises(exception.VCSException): - nightly.clone_git_repo(f'https://github.com/{d}/{d}.git', d) - self.assertFalse(os.path.isdir(Path(d))) - - def _put_fdroid_in_args(self, args): - """Find fdroid command that belongs to this source code tree""" - fdroid = os.path.join(basedir.parent, 'fdroid') - if not os.path.exists(fdroid): - fdroid = os.getenv('fdroid') - return [fdroid] + args[1:] - - @patch('sys.argv', ['fdroid nightly', '--verbose']) - @patch('platform.node', lambda: 'example.com') - def test_github_actions(self): - """Careful! If the test env is bad, it'll mess up the local SSH setup - - https://docs.github.com/en/actions/learn-github-actions/environment-variables - - """ - - called = [] - orig_check_call = subprocess.check_call - os.chdir(self.testdir) - os.makedirs('fdroid/git-mirror/fdroid/repo') # fake this to avoid cloning - self._copy_test_debug_keystore() - self._copy_debug_apk() - - def _subprocess_check_call(args, cwd=None, env=None): - if os.path.basename(args[0]) in ('keytool', 'openssl'): - orig_check_call(args, cwd=cwd, env=env) - elif args[:2] == ['fdroid', 'update']: - orig_check_call(self._put_fdroid_in_args(args), cwd=cwd, env=env) - else: - called.append(args[:2]) - return - - with patch.dict( - os.environ, - { - 'CI': 'true', - 'DEBUG_KEYSTORE': DEBUG_KEYSTORE, - 'GITHUB_ACTIONS': 'true', - 'GITHUB_ACTOR': 'username', - 'GITHUB_REPOSITORY': 'f-droid/test', - 'GITHUB_SERVER_URL': 'https://github.com', - 'HOME': str(self.testdir), - 'PATH': os.getenv('PATH'), - 'fdroid': os.getenv('fdroid', ''), - }, - clear=True, - ): - self.assertTrue(testroot == Path.home().parent) - with patch('subprocess.check_call', _subprocess_check_call): - try: - nightly.main() - except exception.BuildException as e: - if "apksigner not found" in e.value: - self.skipTest("skipping, apksigner not found due to fake $HOME") - else: - raise - - self.assertEqual(called, [['ssh', '-Tvi'], ['fdroid', 'deploy']]) - git_url = 'git@github.com:f-droid/test-nightly' - mirror_url = index.get_mirror_service_urls({"url": git_url})[0] - expected = { - 'archive_description': 'Old nightly builds that have been archived.', - 'archive_name': 'f-droid/test-nightly archive', - 'archive_older': 20, - 'archive_url': mirror_url + '/archive', - 'keydname': 'CN=Android Debug,O=Android,C=US', - 'keypass': 'android', - 'keystore': nightly.KEYSTORE_FILE, - 'keystorepass': 'android', - 'make_current_version_link': False, - 'repo_description': 'Nightly builds from username@example.com', - 'repo_keyalias': 'androiddebugkey', - 'repo_name': 'f-droid/test-nightly', - 'repo_url': mirror_url + '/repo', - 'servergitmirrors': [{"url": git_url}], - } - with open(common.CONFIG_FILE) as fp: - config = yaml.safe_load(fp) - # .ssh is random tmpdir set in nightly.py, so test basename only - self.assertEqual( - os.path.basename(config['identity_file']), - DEBUG_KEYSTORE_KEY_FILE_NAME, - ) - del config['identity_file'] - self.assertEqual(expected, config) - - @patch('sys.argv', ['fdroid nightly', '--verbose']) - def test_gitlab_ci(self): - """Careful! If the test env is bad, it can mess up the local SSH setup""" - called = [] - orig_check_call = subprocess.check_call - os.chdir(self.testdir) - os.makedirs('fdroid/git-mirror/fdroid/repo') # fake this to avoid cloning - self._copy_test_debug_keystore() - self._copy_debug_apk() - - def _subprocess_check_call(args, cwd=None, env=None): - if os.path.basename(args[0]) in ('keytool', 'openssl'): - orig_check_call(args, cwd=cwd, env=env) - elif args[:2] == ['fdroid', 'update']: - orig_check_call(self._put_fdroid_in_args(args), cwd=cwd, env=env) - else: - called.append(args[:2]) - return - - with patch.dict( - os.environ, - { - 'CI': 'true', - 'CI_PROJECT_PATH': 'fdroid/test', - 'CI_PROJECT_URL': 'https://gitlab.com/fdroid/test', - 'DEBUG_KEYSTORE': DEBUG_KEYSTORE, - 'GITLAB_USER_NAME': 'username', - 'GITLAB_USER_EMAIL': 'username@example.com', - 'HOME': str(self.testdir), - 'PATH': os.getenv('PATH'), - 'fdroid': os.getenv('fdroid', ''), - }, - clear=True, - ): - self.assertTrue(testroot == Path.home().parent) - with patch('subprocess.check_call', _subprocess_check_call): - try: - nightly.main() - except exception.BuildException as e: - if "apksigner not found" in e.value: - self.skipTest("skipping, apksigner not found due to fake $HOME") - else: - raise - - self.assertEqual(called, [['ssh', '-Tvi'], ['fdroid', 'deploy']]) - expected = { - 'archive_description': 'Old nightly builds that have been archived.', - 'archive_name': 'fdroid/test-nightly archive', - 'archive_older': 20, - 'archive_url': 'https://gitlab.com/fdroid/test-nightly/-/raw/master/fdroid/archive', - 'keydname': 'CN=Android Debug,O=Android,C=US', - 'keypass': 'android', - 'keystore': nightly.KEYSTORE_FILE, - 'keystorepass': 'android', - 'make_current_version_link': False, - 'repo_description': 'Nightly builds from username@example.com', - 'repo_keyalias': 'androiddebugkey', - 'repo_name': 'fdroid/test-nightly', - 'repo_url': 'https://gitlab.com/fdroid/test-nightly/-/raw/master/fdroid/repo', - 'servergitmirrors': [{"url": 'git@gitlab.com:fdroid/test-nightly'}], - } - with open(common.CONFIG_FILE) as fp: - config = yaml.safe_load(fp) - # .ssh is random tmpdir set in nightly.py, so test basename only - self.assertEqual( - os.path.basename(config['identity_file']), - DEBUG_KEYSTORE_KEY_FILE_NAME, - ) - del config['identity_file'] - self.assertEqual(expected, config) diff --git a/tests/test_publish.py b/tests/test_publish.py deleted file mode 100755 index 82c670d7..00000000 --- a/tests/test_publish.py +++ /dev/null @@ -1,413 +0,0 @@ -#!/usr/bin/env python3 - -# -# command which created the keystore used in this test case: -# -# $ for ALIAS in repokey a163ec9b d2d51ff2 dc3b169e 78688a0f; \ -# do keytool -genkey -keystore dummy-keystore.jks \ -# -alias $ALIAS -keyalg 'RSA' -keysize '2048' \ -# -validity '10000' -storepass 123456 -storetype jks \ -# -keypass 123456 -dname 'CN=test, OU=F-Droid'; done -# - -import json -import os -import pathlib -import shutil -import sys -import tempfile -import unittest -from unittest import mock - -from fdroidserver import common, metadata, publish, signatures -from fdroidserver._yaml import yaml -from fdroidserver.exception import FDroidException - -from .shared_test_code import VerboseFalseOptions, mkdtemp - -basedir = pathlib.Path(__file__).parent - - -class PublishTest(unittest.TestCase): - '''fdroidserver/publish.py''' - - def setUp(self): - os.chdir(basedir) - self._td = mkdtemp() - self.testdir = self._td.name - - def tearDown(self): - self._td.cleanup() - os.chdir(basedir) - - def test_key_alias(self): - publish.config = {} - self.assertEqual('a163ec9b', publish.key_alias('com.example.app')) - self.assertEqual('d2d51ff2', publish.key_alias('com.example.anotherapp')) - self.assertEqual('dc3b169e', publish.key_alias('org.test.testy')) - self.assertEqual('78688a0f', publish.key_alias('org.org.org')) - - self.assertEqual('ee8807d2', publish.key_alias("org.schabi.newpipe")) - self.assertEqual('b53c7e11', publish.key_alias("de.grobox.liberario")) - - publish.config = { - 'keyaliases': {'yep.app': '@org.org.org', 'com.example.app': '1a2b3c4d'} - } - self.assertEqual('78688a0f', publish.key_alias('yep.app')) - self.assertEqual('1a2b3c4d', publish.key_alias('com.example.app')) - - def test_read_fingerprints_from_keystore(self): - common.config = {} - common.fill_config_defaults(common.config) - publish.config = common.config - publish.config['keystorepass'] = '123456' - publish.config['keypass'] = '123456' - publish.config['keystore'] = 'dummy-keystore.jks' - - expected = { - '78688a0f': '277655a6235bc6b0ef2d824396c51ba947f5ebc738c293d887e7083ff338af82', - 'd2d51ff2': 'fa3f6a017541ee7fe797be084b1bcfbf92418a7589ef1f7fdeb46741b6d2e9c3', - 'dc3b169e': '6ae5355157a47ddcc3834a71f57f6fb5a8c2621c8e0dc739e9ddf59f865e497c', - 'a163ec9b': 'd34f678afbaa8f2fa6cc0edd6f0c2d1d2e2e9eb08bea521b24c740806016bff4', - 'repokey': 'c58460800c7b250a619c30c13b07b7359a43e5af71a4352d86c58ae18c9f6d41', - } - result = publish.read_fingerprints_from_keystore() - self.maxDiff = None - self.assertEqual(expected, result) - - def test_store_and_load_signer_fingerprints(self): - common.config = {} - common.fill_config_defaults(common.config) - publish.config = common.config - publish.config['keystorepass'] = '123456' - publish.config['keypass'] = '123456' - publish.config['keystore'] = os.path.join(basedir, 'dummy-keystore.jks') - publish.config['repo_keyalias'] = 'repokey' - - appids = [ - 'com.example.app', - 'net.unavailable', - 'org.test.testy', - 'com.example.anotherapp', - 'org.org.org', - ] - - os.chdir(self.testdir) - common.write_config_file('') - - publish.store_publish_signer_fingerprints(appids, indent=2) - - self.maxDiff = None - expected = { - "com.example.anotherapp": { - "signer": "fa3f6a017541ee7fe797be084b1bcfbf92418a7589ef1f7fdeb46741b6d2e9c3" - }, - "com.example.app": { - "signer": "d34f678afbaa8f2fa6cc0edd6f0c2d1d2e2e9eb08bea521b24c740806016bff4" - }, - "org.org.org": { - "signer": "277655a6235bc6b0ef2d824396c51ba947f5ebc738c293d887e7083ff338af82" - }, - "org.test.testy": { - "signer": "6ae5355157a47ddcc3834a71f57f6fb5a8c2621c8e0dc739e9ddf59f865e497c" - }, - } - self.assertEqual(expected, common.load_publish_signer_fingerprints()) - - with open(common.CONFIG_FILE) as fp: - config = yaml.load(fp) - self.assertEqual( - 'c58460800c7b250a619c30c13b07b7359a43e5af71a4352d86c58ae18c9f6d41', - config['repo_key_sha256'], - ) - - def test_store_and_load_signer_fingerprints_with_missmatch(self): - common.config = {} - common.fill_config_defaults(common.config) - publish.config = common.config - publish.config['keystorepass'] = '123456' - publish.config['keypass'] = '123456' - publish.config['keystore'] = os.path.join(basedir, 'dummy-keystore.jks') - publish.config['repo_keyalias'] = 'repokey' - publish.config['repo_key_sha256'] = 'bad bad bad bad bad bad bad bad bad bad' - - os.chdir(self.testdir) - publish.store_publish_signer_fingerprints({}, indent=2) - with self.assertRaises(FDroidException): - common.load_publish_signer_fingerprints() - - def test_reproducible_binaries_process(self): - common.config = {} - common.fill_config_defaults(common.config) - publish.config = common.config - publish.config['keystore'] = 'keystore.jks' - publish.config['repo_keyalias'] = 'sova' - publish.config['keystorepass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' - publish.config['keypass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' - shutil.copy('keystore.jks', self.testdir) - os.mkdir(os.path.join(self.testdir, 'repo')) - metadata_dir = os.path.join(self.testdir, 'metadata') - os.mkdir(metadata_dir) - shutil.copy(os.path.join('metadata', 'com.politedroid.yml'), metadata_dir) - with open(os.path.join(metadata_dir, 'com.politedroid.yml'), 'a') as fp: - fp.write('\nBinaries: https://placeholder/foo%v.apk\n') - os.mkdir(os.path.join(self.testdir, 'unsigned')) - shutil.copy( - 'repo/com.politedroid_6.apk', os.path.join(self.testdir, 'unsigned') - ) - os.mkdir(os.path.join(self.testdir, 'unsigned', 'binaries')) - shutil.copy( - 'repo/com.politedroid_6.apk', - os.path.join( - self.testdir, 'unsigned', 'binaries', 'com.politedroid_6.binary.apk' - ), - ) - - os.chdir(self.testdir) - with mock.patch.object(sys, 'argv', ['fdroid fakesubcommand']): - publish.main() - - def test_check_for_key_collisions(self): - from fdroidserver.metadata import App - - common.config = {} - common.fill_config_defaults(common.config) - publish.config = common.config - - randomappids = [ - "org.fdroid.fdroid", - "a.b.c", - "u.v.w.x.y.z", - "lpzpkgqwyevnmzvrlaazhgardbyiyoybyicpmifkyrxkobljoz", - "vuslsm.jlrevavz.qnbsenmizhur.lprwbjiujtu.ekiho", - "w.g.g.w.p.v.f.v.gvhyz", - "nlozuqer.ufiinmrbjqboogsjgmpfks.dywtpcpnyssjmqz", - ] - allapps = {} - for appid in randomappids: - allapps[appid] = App() - allaliases = publish.check_for_key_collisions(allapps) - self.assertEqual(len(randomappids), len(allaliases)) - - allapps = {'tof.cv.mpp': App(), 'j6mX276h': App()} - self.assertEqual(publish.key_alias('tof.cv.mpp'), publish.key_alias('j6mX276h')) - self.assertRaises(SystemExit, publish.check_for_key_collisions, allapps) - - def test_create_key_if_not_existing(self): - try: - import jks - import jks.util - except ImportError: - self.skipTest("pyjks not installed") - common.config = {} - common.fill_config_defaults(common.config) - publish.config = common.config - publish.config['keystorepass'] = '123456' - publish.config['keypass'] = '654321' - publish.config['keystore'] = "keystore.jks" - publish.config[ - 'keydname' - ] = 'CN=Birdman, OU=Cell, O=Alcatraz, L=Alcatraz, S=CA, C=US' - os.chdir(self.testdir) - keystore = jks.KeyStore.new("jks", []) - keystore.save(publish.config['keystore'], publish.config['keystorepass']) - - self.assertTrue(publish.create_key_if_not_existing("newalias")) - # The second time we try that, a new key should not be created - self.assertFalse(publish.create_key_if_not_existing("newalias")) - self.assertTrue(publish.create_key_if_not_existing("anotheralias")) - - keystore = jks.KeyStore.load( - publish.config['keystore'], publish.config['keystorepass'] - ) - self.assertCountEqual(keystore.private_keys, ["newalias", "anotheralias"]) - for alias, pk in keystore.private_keys.items(): - self.assertFalse(pk.is_decrypted()) - pk.decrypt(publish.config['keypass']) - self.assertTrue(pk.is_decrypted()) - self.assertEqual(jks.util.RSA_ENCRYPTION_OID, pk.algorithm_oid) - - def test_status_update_json(self): - common.config = {} - publish.config = {} - with tempfile.TemporaryDirectory() as tmpdir: - os.chdir(tmpdir) - with mock.patch('sys.argv', ['fdroid publish', '']): - publish.status_update_json([], []) - with open('repo/status/publish.json') as fp: - data = json.load(fp) - self.assertTrue('apksigner' in data) - - publish.config = { - 'apksigner': 'apksigner', - } - publish.status_update_json([], []) - with open('repo/status/publish.json') as fp: - data = json.load(fp) - self.assertEqual( - shutil.which(publish.config['apksigner']), data['apksigner'] - ) - - publish.config = {} - common.fill_config_defaults(publish.config) - publish.status_update_json([], []) - with open('repo/status/publish.json') as fp: - data = json.load(fp) - self.assertEqual(publish.config.get('apksigner'), data['apksigner']) - self.assertEqual(publish.config['jarsigner'], data['jarsigner']) - self.assertEqual(publish.config['keytool'], data['keytool']) - - @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') - def test_sign_then_implant_signature(self): - os.chdir(self.testdir) - - common.options = VerboseFalseOptions - config = common.read_config() - if 'apksigner' not in config: - self.skipTest( - 'SKIPPING test_sign_then_implant_signature, apksigner not installed!' - ) - config['repo_keyalias'] = 'sova' - config['keystorepass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' - config['keypass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' - shutil.copy(basedir / 'keystore.jks', self.testdir) - config['keystore'] = 'keystore.jks' - config['keydname'] = 'CN=Birdman, OU=Cell, O=Alcatraz, L=Alcatraz, S=CA, C=US' - publish.config = config - common.config = config - - app = metadata.App() - app.id = 'org.fdroid.ci' - versionCode = 1 - build = metadata.Build( - { - 'versionCode': versionCode, - 'versionName': '1.0', - } - ) - app.Builds = [build] - os.mkdir('metadata') - metadata.write_metadata(os.path.join('metadata', '%s.yml' % app.id), app) - - os.mkdir('unsigned') - testapk = basedir / 'no_targetsdk_minsdk1_unsigned.apk' - unsigned = os.path.join('unsigned', common.get_release_filename(app, build)) - signed = os.path.join('repo', common.get_release_filename(app, build)) - shutil.copy(testapk, unsigned) - - # sign the unsigned APK - self.assertTrue(os.path.exists(unsigned)) - self.assertFalse(os.path.exists(signed)) - with mock.patch( - 'sys.argv', ['fdroid publish', '%s:%d' % (app.id, versionCode)] - ): - publish.main() - self.assertFalse(os.path.exists(unsigned)) - self.assertTrue(os.path.exists(signed)) - - with mock.patch('sys.argv', ['fdroid signatures', signed]): - signatures.main() - self.assertTrue( - os.path.exists( - os.path.join( - 'metadata', 'org.fdroid.ci', 'signatures', '1', 'MANIFEST.MF' - ) - ) - ) - os.remove(signed) - - # implant the signature into the unsigned APK - shutil.copy(testapk, unsigned) - self.assertTrue(os.path.exists(unsigned)) - self.assertFalse(os.path.exists(signed)) - with mock.patch( - 'sys.argv', ['fdroid publish', '%s:%d' % (app.id, versionCode)] - ): - publish.main() - self.assertFalse(os.path.exists(unsigned)) - self.assertTrue(os.path.exists(signed)) - - @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') - def test_exit_on_error(self): - """Exits properly on errors, with and without --error-on-failed. - - `fdroid publish` runs on the signing server and does large - batches. In that case, it shouldn't exit after a single - failure since it should try to complete the whole batch. For - CI and other use cases, there is --error-on-failed to force it - to exit after a failure. - - """ - - class Options: - error_on_failed = True - verbose = False - - os.chdir(self.testdir) - - common.options = Options - config = common.read_config() - if 'apksigner' not in config: - self.skipTest('SKIPPING test_error_on_failed, apksigner not installed!') - config['repo_keyalias'] = 'sova' - config['keystorepass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' - config['keypass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' - shutil.copy(basedir / 'keystore.jks', self.testdir) - config['keystore'] = 'keystore.jks' - config['keydname'] = 'CN=Birdman, OU=Cell, O=Alcatraz, L=Alcatraz, S=CA, C=US' - publish.config = config - common.config = config - - app = metadata.App() - app.id = 'org.fdroid.ci' - versionCode = 1 - build = metadata.Build( - { - 'versionCode': versionCode, - 'versionName': '1.0', - } - ) - app.Builds = [build] - os.mkdir('metadata') - metadata.write_metadata(os.path.join('metadata', '%s.yml' % app.id), app) - - os.mkdir('unsigned') - testapk = basedir / 'no_targetsdk_minsdk1_unsigned.apk' - unsigned = os.path.join('unsigned', common.get_release_filename(app, build)) - signed = os.path.join('repo', common.get_release_filename(app, build)) - shutil.copy(testapk, unsigned) - - # sign the unsigned APK - self.assertTrue(os.path.exists(unsigned)) - self.assertFalse(os.path.exists(signed)) - with mock.patch( - 'sys.argv', ['fdroid publish', '%s:%d' % (app.id, versionCode)] - ): - publish.main() - self.assertFalse(os.path.exists(unsigned)) - self.assertTrue(os.path.exists(signed)) - - with mock.patch('sys.argv', ['fdroid signatures', signed]): - signatures.main() - mf = os.path.join('metadata', 'org.fdroid.ci', 'signatures', '1', 'MANIFEST.MF') - self.assertTrue(os.path.exists(mf)) - os.remove(signed) - - with open(mf, 'a') as fp: - fp.write('appended to break signature') - - # implant the signature into the unsigned APK - shutil.copy(testapk, unsigned) - self.assertTrue(os.path.exists(unsigned)) - self.assertFalse(os.path.exists(signed)) - apk_id = '%s:%d' % (app.id, versionCode) - - # by default, it should complete without exiting - with mock.patch('sys.argv', ['fdroid publish', apk_id]): - publish.main() - - # --error-on-failed should make it exit - with mock.patch('sys.argv', ['fdroid publish', '--error-on-failed', apk_id]): - with self.assertRaises(SystemExit) as e: - publish.main() - self.assertEqual(e.exception.code, 1) diff --git a/tests/test_rewritemeta.py b/tests/test_rewritemeta.py deleted file mode 100755 index 4dcdd03f..00000000 --- a/tests/test_rewritemeta.py +++ /dev/null @@ -1,257 +0,0 @@ -#!/usr/bin/env python3 - -import os -import tempfile -import textwrap -import unittest -from pathlib import Path -from unittest import mock - -from fdroidserver import metadata, rewritemeta - -from .shared_test_code import TmpCwd, mkdtemp - -basedir = Path(__file__).parent - - -class RewriteMetaTest(unittest.TestCase): - '''fdroidserver/publish.py''' - - def setUp(self): - os.chdir(basedir) - metadata.warnings_action = 'error' - self._td = mkdtemp() - self.testdir = self._td.name - - def tearDown(self): - self._td.cleanup() - - def test_remove_blank_flags_from_builds_com_politedroid_3(self): - """Unset fields in Builds: entries should be removed.""" - appid = 'com.politedroid' - app = metadata.read_metadata({appid: -1})[appid] - builds = rewritemeta.remove_blank_flags_from_builds(app.get('Builds')) - self.assertEqual( - builds[0], - { - 'versionName': '1.2', - 'versionCode': 3, - 'commit': '6a548e4b19', - 'target': 'android-10', - 'antifeatures': { - 'KnownVuln': {}, - 'NonFreeAssets': {}, - }, - }, - ) - - def test_remove_blank_flags_from_builds_com_politedroid_4(self): - """Unset fields in Builds: entries should be removed.""" - appid = 'com.politedroid' - app = metadata.read_metadata({appid: -1})[appid] - builds = rewritemeta.remove_blank_flags_from_builds(app.get('Builds')) - self.assertEqual( - builds[1], - { - 'versionName': '1.3', - 'versionCode': 4, - 'commit': 'ad865b57bf3ac59580f38485608a9b1dda4fa7dc', - 'target': 'android-15', - }, - ) - - def test_remove_blank_flags_from_builds_org_adaway_52(self): - """Unset fields in Builds: entries should be removed.""" - appid = 'org.adaway' - app = metadata.read_metadata({appid: -1})[appid] - builds = rewritemeta.remove_blank_flags_from_builds(app.get('Builds')) - self.assertEqual( - builds[-1], - { - 'buildjni': ['yes'], - 'commit': 'v3.0', - 'gradle': ['yes'], - 'preassemble': ['renameExecutables'], - 'subdir': 'AdAway', - 'versionCode': 52, - 'versionName': '3.0', - }, - ) - - def test_remove_blank_flags_from_builds_no_builds(self): - """Unset fields in Builds: entries should be removed.""" - self.assertEqual( - rewritemeta.remove_blank_flags_from_builds(None), - list(), - ) - self.assertEqual( - rewritemeta.remove_blank_flags_from_builds(dict()), - list(), - ) - self.assertEqual( - rewritemeta.remove_blank_flags_from_builds(list()), - list(), - ) - self.assertEqual( - rewritemeta.remove_blank_flags_from_builds(set()), - list(), - ) - self.assertEqual( - rewritemeta.remove_blank_flags_from_builds(tuple()), - list(), - ) - - def test_remove_blank_flags_from_builds_0_is_a_value(self): - self.assertEqual( - rewritemeta.remove_blank_flags_from_builds([{'versionCode': 0}]), - [{'versionCode': 0}], - ) - - def test_remove_blank_flags_from_builds_values_to_purge(self): - self.assertEqual( - rewritemeta.remove_blank_flags_from_builds( - [ - { - 'antifeatures': dict(), - 'forceversion': False, - 'init': None, - 'rm': '', - 'scandelete': list(), - 'versionCode': 0, - }, - {'antifeatures': list(), 'versionCode': 1}, - {'antifeatures': '', 'versionCode': 2}, - ] - ), - [{'versionCode': 0}, {'versionCode': 1}, {'versionCode': 2}], - ) - - @mock.patch('sys.argv', ['fdroid rewritemeta', 'a']) - def test_rewrite_no_builds(self): - os.chdir(self.testdir) - Path('metadata').mkdir() - with Path('metadata/a.yml').open('w') as f: - f.write('AutoName: a') - rewritemeta.main() - self.assertEqual( - Path('metadata/a.yml').read_text(encoding='utf-8'), - textwrap.dedent( - '''\ - License: Unknown - - AutoName: a - - AutoUpdateMode: None - UpdateCheckMode: None - ''' - ), - ) - - @mock.patch('sys.argv', ['fdroid rewritemeta', 'a']) - def test_rewrite_empty_build_field(self): - os.chdir(self.testdir) - Path('metadata').mkdir() - with Path('metadata/a.yml').open('w') as fp: - fp.write( - textwrap.dedent( - """ - License: Apache-2.0 - Builds: - - versionCode: 4 - versionName: a - rm: - """ - ) - ) - rewritemeta.main() - self.assertEqual( - Path('metadata/a.yml').read_text(encoding='utf-8'), - textwrap.dedent( - '''\ - License: Apache-2.0 - - Builds: - - versionName: a - versionCode: 4 - - AutoUpdateMode: None - UpdateCheckMode: None - ''' - ), - ) - - def test_remove_blank_flags_from_builds_app_with_special_build_params(self): - appid = 'app.with.special.build.params' - app = metadata.read_metadata({appid: -1})[appid] - builds = rewritemeta.remove_blank_flags_from_builds(app.get('Builds')) - self.assertEqual( - builds[-1], - { - 'versionName': '2.1.2', - 'versionCode': 51, - 'disable': 'Labelled as pre-release, so skipped', - }, - ) - - def test_remove_blank_flags_from_builds_app_with_special_build_params_af(self): - """Unset fields in Builds: entries should be removed.""" - appid = 'app.with.special.build.params' - app = metadata.read_metadata({appid: -1})[appid] - builds = rewritemeta.remove_blank_flags_from_builds(app.get('Builds')) - self.assertEqual( - builds[-2], - { - 'antifeatures': { - 'Ads': {'en-US': 'includes ad lib\n', 'zh-CN': '包括广告图书馆\n'}, - 'Tracking': {'en-US': 'standard suspects\n'}, - }, - 'commit': '2.1.1', - 'maven': '2', - 'patch': [ - 'manifest-ads.patch', - 'mobilecore.patch', - ], - 'srclibs': ['FacebookSDK@sdk-version-3.0.2'], - 'versionCode': 50, - 'versionName': '2.1.1-c', - }, - ) - - @mock.patch('sys.argv', ['fdroid rewritemeta', 'a', 'b']) - def test_rewrite_scenario_trivial(self): - with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - Path('metadata').mkdir() - with Path('metadata/a.yml').open('w') as f: - f.write('AutoName: a') - with Path('metadata/b.yml').open('w') as f: - f.write('AutoName: b') - - rewritemeta.main() - - self.assertEqual( - Path('metadata/a.yml').read_text(encoding='utf-8'), - textwrap.dedent( - '''\ - License: Unknown - - AutoName: a - - AutoUpdateMode: None - UpdateCheckMode: None - ''' - ), - ) - - self.assertEqual( - Path('metadata/b.yml').read_text(encoding='utf-8'), - textwrap.dedent( - '''\ - License: Unknown - - AutoName: b - - AutoUpdateMode: None - UpdateCheckMode: None - ''' - ), - ) diff --git a/tests/test_scanner.py b/tests/test_scanner.py deleted file mode 100755 index 8da5d5cb..00000000 --- a/tests/test_scanner.py +++ /dev/null @@ -1,928 +0,0 @@ -#!/usr/bin/env python3 - -import logging -import os -import pathlib -import re -import shutil -import sys -import tempfile -import textwrap -import unittest -import uuid -import zipfile -from dataclasses import asdict -from datetime import datetime, timedelta, timezone -from unittest import mock - -if sys.version_info >= (3, 11): - import tomllib -else: - import tomli as tomllib -import yaml - -import fdroidserver.build -import fdroidserver.common -import fdroidserver.exception -import fdroidserver.metadata -import fdroidserver.scanner - -from .shared_test_code import TmpCwd, mkdtemp, mock_open_to_str - -basedir = pathlib.Path(__file__).parent - - -def _dexdump_found(): - """Find if dexdump is available in the PATH or in an Android SDK install. - - This must be run after common.config is setup. - - """ - try: - dexdump = fdroidserver.common.find_sdk_tools_cmd("dexdump") - logging.debug('Found dexdump: %s', dexdump) - return dexdump is not None - except fdroidserver.exception.FDroidException: - pass - return False - - -# Always use built-in default rules so changes in downloaded rules don't break tests. -@mock.patch( - 'fdroidserver.scanner.SUSSDataController.load', - fdroidserver.scanner.SUSSDataController.load_from_defaults, -) -class ScannerTest(unittest.TestCase): - def setUp(self): - os.chdir(basedir) - self._td = mkdtemp() - self.testdir = self._td.name - fdroidserver.scanner.ScannerTool.refresh_allowed = False - - def tearDown(self): - os.chdir(basedir) - self._td.cleanup() - - def test_scan_source_files(self): - fdroidserver.common.options = mock.Mock() - fdroidserver.common.options.json = False - projects = { - 'OtakuWorld': 2, - 'Zillode': 1, - 'cn.wildfirechat.chat': 4, - 'com.github.shadowsocks': 9, - 'com.integreight.onesheeld': 17, - 'com.jens.automation2': 3, - 'firebase-suspect': 1, - 'org.mozilla.rocket': 2, - 'org.piepmeyer.gauguin': 1, - 'org.tasks': 3, - 'realm': 1, - 'se.manyver': 3, - 'lockfile.test': 1, - 'com.lolo.io.onelist': 6, - 'catalog.test': 22, - } - for d in (basedir / 'source-files').iterdir(): - build = fdroidserver.metadata.Build() - should = projects.get(d.name, 0) - if should > 0: - with self.assertLogs(level=logging.ERROR): - fatal_problems = fdroidserver.scanner.scan_source(d, build) - else: - with self.assertNoLogs(): - fatal_problems = fdroidserver.scanner.scan_source(d, build) - self.assertEqual( - should, fatal_problems, f'{d} should have {should} errors!' - ) - - def test_get_gradle_compile_commands_without_catalog(self): - test_files = [ - ('source-files/fdroid/fdroidclient/build.gradle', 'yes', 15), - ('source-files/com.nextcloud.client/build.gradle', 'generic', 24), - ('source-files/com.kunzisoft.testcase/build.gradle', 'libre', 3), - ('source-files/cn.wildfirechat.chat/chat/build.gradle', 'yes', 30), - ('source-files/org.tasks/app/build.gradle.kts', 'generic', 41), - ('source-files/at.bitfire.davdroid/build.gradle', 'standard', 15), - ('source-files/se.manyver/android/app/build.gradle', 'indie', 26), - ('source-files/osmandapp/osmand/build.gradle', 'free', 2), - ('source-files/eu.siacs.conversations/build.gradle', 'free', 21), - ('source-files/org.mozilla.rocket/app/build.gradle', 'focus', 40), - ('source-files/com.jens.automation2/app/build.gradle', 'fdroidFlavor', 5), - ('source-files/flavor.test/build.gradle', ['foss', 'prod'], 7), - ] - - for f, flavor, count in test_files: - i = 0 - build = fdroidserver.metadata.Build() - if isinstance(flavor, list): - build.gradle = flavor - else: - build.gradle = [flavor] - regexs = fdroidserver.scanner.get_gradle_compile_commands_without_catalog( - build - ) - with open(f, encoding='utf-8') as fp: - for line in fp.readlines(): - for regex in regexs: - m = regex.match(line) - if m: - i += 1 - self.assertEqual(count, i) - - def test_get_gradle_compile_commands_with_catalog(self): - test_files = [ - ('source-files/com.lolo.io.onelist/build.gradle.kts', 'yes', 5), - ('source-files/com.lolo.io.onelist/app/build.gradle.kts', 'yes', 26), - ('source-files/catalog.test/build.gradle.kts', 'yes', 3), - ('source-files/catalog.test/app/build.gradle', 'yes', 2), - ] - - for f, flavor, count in test_files: - i = 0 - build = fdroidserver.metadata.Build() - build.gradle = [flavor] - regexs = fdroidserver.scanner.get_gradle_compile_commands_with_catalog( - build, "libs" - ) - with open(f, encoding='utf-8') as fp: - for line in fp.readlines(): - for regex in regexs: - m = regex.match(line) - if m: - i += 1 - self.assertEqual(count, i) - - def test_catalog(self): - accessor_coordinate_pairs = { - 'firebase.crash': ['com.google.firebase:firebase-crash:1.1.1'], - 'firebase.core': ['com.google.firebase:firebase-core:2.2.2'], - 'play.service.ads': ['com.google.android.gms:play-services-ads:1.2.1'], - 'jacoco': ['org.jacoco:org.jacoco.core:0.8.7'], - 'plugins.google.services': ['com.google.gms.google-services:1.2.1'], - 'plugins.firebase.crashlytics': ['com.google.firebase.crashlytics:1.1.1'], - 'bundles.firebase': [ - 'com.google.firebase:firebase-crash:1.1.1', - 'com.google.firebase:firebase-core:2.2.2', - ], - 'plugins.androidApplication.asLibraryDependency': [ - 'com.android.application:8.12.0' - ], - } - with open('source-files/catalog.test/gradle/libs.versions.toml', 'rb') as f: - catalog = fdroidserver.scanner.GradleVersionCatalog(tomllib.load(f)) - for accessor, coordinate in accessor_coordinate_pairs.items(): - self.assertEqual(catalog.get_coordinate(accessor), coordinate) - - def test_get_catalogs(self): - test_files = [ - ('source-files/com.lolo.io.onelist/', 1), - ('source-files/catalog.test/', 3), - ('source-files/org.piepmeyer.gauguin/', 1), - ('source-files/com.infomaniak.mail/', 2), - ] - - for root, count in test_files: - self.assertEqual(count, len(fdroidserver.scanner.get_catalogs(root))) - - def test_scan_source_files_sneaky_maven(self): - """Check for sneaking in banned maven repos""" - os.chdir(self.testdir) - fdroidserver.scanner.config = None - fdroidserver.common.options = mock.Mock() - fdroidserver.common.options.json = True - with open('build.gradle', 'w', encoding='utf-8') as fp: - fp.write( - textwrap.dedent( - """ - maven { - "https://jitpack.io" - url 'https://maven.fabric.io/public' - } - maven { - "https://maven.google.com" - setUrl('https://evilcorp.com/maven') - } - """ - ) - ) - with self.assertLogs(level=logging.ERROR): - count = fdroidserver.scanner.scan_source(self.testdir) - self.assertEqual(2, count, 'there should be this many errors') - - def test_scan_source_file_types(self): - """Build product files are not allowed, test they are detected - - This test runs as if `fdroid build` running to test the - difference between absolute and relative paths. - - """ - build_dir = os.path.join('build', 'fake.app') - abs_build_dir = os.path.join(self.testdir, build_dir) - os.makedirs(abs_build_dir, exist_ok=True) - os.chdir(abs_build_dir) - - fdroidserver.scanner.config = None - fdroidserver.common.options = mock.Mock() - fdroidserver.common.options.json = True - - keep = [ - 'arg.jar', - 'ascii.out', - 'baz.so', - 'classes.dex', - 'sqlcipher.aar', - 'static.a', - 'src/test/resources/classes.dex', - ] - remove = ['gradle-wrapper.jar', 'gradlew', 'gradlew.bat'] - os.makedirs('src/test/resources', exist_ok=True) - for f in keep + remove: - with open(f, 'w') as fp: - fp.write('placeholder') - self.assertTrue(os.path.exists(f)) - binaries = ['binary.out', 'fake.png', 'snippet.png'] - with open('binary.out', 'wb') as fp: - fp.write(b'\x00\x00') - fp.write(uuid.uuid4().bytes) - shutil.copyfile('binary.out', 'fake.png') - os.chmod('fake.png', 0o755) # nosec B103 - with open('snippet.png', 'wb') as fp: - fp.write( - b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x000\x00\x00' - b'\x000\x08\x06\x00\x00\x00W\x02\xf9\x87\x00\x00\x00\x04sB' - b'IT\x08\x08\x08\x08|\x08d\x88\x00\x00\x00\tpHYs\x00\x00\n' - b'a\x00\x00\na\x01\xfc\xccJ%\x00\x00\x00\x19tEXtSoftware' - ) - os.chmod('snippet.png', 0o755) # nosec B103 - - # run scanner as if from `fdroid build` - os.chdir(self.testdir) - json_per_build = fdroidserver.scanner.MessageStore() - with self.assertLogs(level=logging.ERROR): - count = fdroidserver.scanner.scan_source( - build_dir, json_per_build=json_per_build - ) - self.assertEqual(6, count, 'there should be this many errors') - os.chdir(build_dir) - - for f in keep + binaries: - self.assertTrue(os.path.exists(f), f + ' should still be there') - for f in remove: - self.assertFalse(os.path.exists(f), f + ' should have been removed') - - json_per_build_asdict = asdict(json_per_build) - files = dict() - for section in ('errors', 'infos', 'warnings'): - files[section] = [] - for msg, f in json_per_build_asdict[section]: - files[section].append(f) - - self.assertFalse( - 'ascii.out' in files['errors'], 'ASCII .out file is not an error' - ) - self.assertFalse( - 'snippet.png' in files['errors'], 'executable valid image is not an error' - ) - - self.assertTrue('arg.jar' in files['errors'], 'all JAR files are errors') - self.assertTrue('baz.so' in files['errors'], 'all .so files are errors') - self.assertTrue( - 'binary.out' in files['errors'], 'a binary .out file is an error' - ) - self.assertTrue( - 'classes.dex' in files['errors'], 'all classes.dex files are errors' - ) - self.assertTrue('sqlcipher.aar' in files['errors'], 'all AAR files are errors') - self.assertTrue('static.a' in files['errors'], 'all .a files are errors') - - self.assertTrue( - 'fake.png' in files['warnings'], - 'a random binary that is executable that is not an image is a warning', - ) - self.assertTrue( - 'src/test/resources/classes.dex' in files['warnings'], - 'suspicious file but in a test dir is a warning', - ) - - for f in remove: - self.assertTrue( - f in files['infos'], '%s should be removed with an info message' % f - ) - - def test_build_local_scanner(self): - """`fdroid build` calls scanner functions, test them here""" - os.chdir(self.testdir) - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.build.config = config - fdroidserver.build.options = mock.Mock() - fdroidserver.build.options.json = False - fdroidserver.build.options.scan_binary = False - fdroidserver.build.options.notarball = True - fdroidserver.build.options.skipscan = False - fdroidserver.common.options = fdroidserver.build.options - - app = fdroidserver.metadata.App() - app.id = 'mocked.app.id' - build = fdroidserver.metadata.Build() - build.commit = '1.0' - build.output = app.id + '.apk' - build.scanignore = ['baz.so', 'foo.aar'] - build.versionCode = 1 - build.versionName = '1.0' - vcs = mock.Mock() - - for f in ('baz.so', 'foo.aar', 'gradle-wrapper.jar'): - with open(f, 'w') as fp: - fp.write('placeholder') - self.assertTrue(os.path.exists(f)) - - with open('build.xml', 'w', encoding='utf-8') as fp: - fp.write( - textwrap.dedent( - """ - - - - """ - ) - ) - - def make_fake_apk(output, build): - with open(build.output, 'w') as fp: - fp.write('APK PLACEHOLDER') - return output - - with mock.patch('fdroidserver.common.replace_build_vars', wraps=make_fake_apk): - with mock.patch('fdroidserver.common.get_native_code', return_value='x86'): - with ( - mock.patch( - 'fdroidserver.common.get_apk_id', - return_value=(app.id, build.versionCode, build.versionName), - ), - mock.patch( - 'fdroidserver.common.get_source_date_epoch', - lambda f: '1234567890', - ), - ): - with mock.patch( - 'fdroidserver.common.is_debuggable_or_testOnly', - return_value=False, - ): - fdroidserver.build.build_local( - app, - build, - vcs, - build_dir=self.testdir, - output_dir=self.testdir, - log_dir=None, - srclib_dir=None, - extlib_dir=None, - tmp_dir=None, - force=False, - onserver=False, - refresh=False, - ) - self.assertTrue(os.path.exists('baz.so')) - self.assertTrue(os.path.exists('foo.aar')) - self.assertFalse(os.path.exists('gradle-wrapper.jar')) - - def test_gradle_maven_url_regex(self): - """Check the regex can find all the cases""" - with open(basedir / 'gradle-maven-blocks.yaml') as fp: - data = yaml.safe_load(fp) - - urls = [] - for entry in data: - found = False - for m in fdroidserver.scanner.MAVEN_URL_REGEX.findall(entry): - urls.append(m) - found = True - self.assertTrue(found, 'this block should produce a URL:\n' + entry) - self.assertEqual(len(data), len(urls), 'each data example should produce a URL') - - def test_scan_gradle_file_with_multiple_problems(self): - """Check that the scanner can handle scandelete with gradle files with multiple problems""" - os.chdir(self.testdir) - fdroidserver.scanner.config = None - fdroidserver.common.options = mock.Mock() - build = fdroidserver.metadata.Build() - build.scandelete = ['build.gradle'] - with open('build.gradle', 'w', encoding='utf-8') as fp: - fp.write( - textwrap.dedent( - """ - maven { - url 'https://maven.fabric.io/public' - } - maven { - url 'https://evilcorp.com/maven' - } - """ - ) - ) - count = fdroidserver.scanner.scan_source(self.testdir, build) - self.assertFalse(os.path.exists("build.gradle")) - self.assertEqual(0, count, 'there should be this many errors') - - def test_get_embedded_classes(self): - config = dict() - fdroidserver.common.config = config - fdroidserver.common.fill_config_defaults(config) - if not _dexdump_found(): - self.skipTest('Some Debian arches lack dexdump') - for f in ( - 'apk.embedded_1.apk', - 'bad-unicode-πÇÇ现代通用字-български-عربي1.apk', - 'janus.apk', - 'minimal_targetsdk_30_unsigned.apk', - 'no_targetsdk_minsdk1_unsigned.apk', - 'org.bitbucket.tickytacky.mirrormirror_1.apk', - 'org.bitbucket.tickytacky.mirrormirror_2.apk', - 'org.bitbucket.tickytacky.mirrormirror_3.apk', - 'org.bitbucket.tickytacky.mirrormirror_4.apk', - 'org.dyndns.fules.ck_20.apk', - 'SpeedoMeterApp.main_1.apk', - 'urzip.apk', - 'urzip-badcert.apk', - 'urzip-badsig.apk', - 'urzip-release.apk', - 'urzip-release-unsigned.apk', - 'repo/com.example.test.helloworld_1.apk', - 'repo/com.politedroid_3.apk', - 'repo/com.politedroid_4.apk', - 'repo/com.politedroid_5.apk', - 'repo/com.politedroid_6.apk', - 'repo/duplicate.permisssions_9999999.apk', - 'repo/info.zwanenburg.caffeinetile_4.apk', - 'repo/no.min.target.sdk_987.apk', - 'repo/obb.main.oldversion_1444412523.apk', - 'repo/obb.mainpatch.current_1619_another-release-key.apk', - 'repo/obb.mainpatch.current_1619.apk', - 'repo/obb.main.twoversions_1101613.apk', - 'repo/obb.main.twoversions_1101615.apk', - 'repo/obb.main.twoversions_1101617.apk', - 'repo/souch.smsbypass_9.apk', - 'repo/urzip-; Рахма́, [rɐxˈmanʲɪnəf] سيرجي_رخمانينوف 谢·.apk', - 'repo/v1.v2.sig_1020.apk', - ): - self.assertNotEqual( - set(), - fdroidserver.scanner.get_embedded_classes(f), - 'should return results for ' + f, - ) - - def test_get_embedded_classes_empty_archives(self): - config = dict() - fdroidserver.common.config = config - fdroidserver.common.fill_config_defaults(config) - for f in ( - 'Norway_bouvet_europe_2.obf.zip', - 'repo/fake.ota.update_1234.zip', - ): - self.assertEqual( - set(), - fdroidserver.scanner.get_embedded_classes(f), - 'should return not results for ' + f, - ) - - def test_get_embedded_classes_secret_apk(self): - """Try to hide an APK+DEX in an APK and see if we can find it""" - config = dict() - fdroidserver.common.config = config - fdroidserver.common.fill_config_defaults(config) - if not _dexdump_found(): - self.skipTest('Some Debian arches lack dexdump') - apk = 'urzip.apk' - mapzip = 'Norway_bouvet_europe_2.obf.zip' - secretfile = os.path.join( - basedir, 'org.bitbucket.tickytacky.mirrormirror_1.apk' - ) - with tempfile.TemporaryDirectory() as tmpdir: - shutil.copy(apk, tmpdir) - shutil.copy(mapzip, tmpdir) - os.chdir(tmpdir) - with zipfile.ZipFile(mapzip, 'a') as zipfp: - zipfp.write(secretfile, 'secretapk') - with zipfile.ZipFile(apk) as readfp: - with readfp.open('classes.dex') as cfp: - zipfp.writestr('secretdex', cfp.read()) - with zipfile.ZipFile(apk, 'a') as zipfp: - zipfp.write(mapzip) - - cls = fdroidserver.scanner.get_embedded_classes(apk) - self.assertTrue( - 'org/bitbucket/tickytacky/mirrormirror/MainActivity' in cls, - 'this should find the classes in the hidden, embedded APK', - ) - self.assertTrue( - 'DEX file with fake name: secretdex' in cls, - 'badly named embedded DEX fils should throw an error', - ) - self.assertTrue( - 'ZIP file without proper file extension: secretapk' in cls, - 'badly named embedded ZIPs should throw an error', - ) - - -class Test_scan_binary(unittest.TestCase): - def setUp(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.common.options = mock.Mock() - - if not _dexdump_found(): - self.skipTest('Some Debian arches lack dexdump') - - fdroidserver.scanner._SCANNER_TOOL = mock.Mock() - fdroidserver.scanner._SCANNER_TOOL.regexs = {} - fdroidserver.scanner._SCANNER_TOOL.regexs['err_code_signatures'] = { - "java/lang/Object": re.compile( - r'.*java/lang/Object', re.IGNORECASE | re.UNICODE - ) - } - fdroidserver.scanner._SCANNER_TOOL.regexs['warn_code_signatures'] = {} - - def test_code_signature_match(self): - apkfile = os.path.join(basedir, 'no_targetsdk_minsdk1_unsigned.apk') - with self.assertLogs(level=logging.CRITICAL): - problems = fdroidserver.scanner.scan_binary(apkfile) - self.assertEqual( - 1, - problems, - "Did not find expected code signature '{}' in binary '{}'".format( - fdroidserver.scanner._SCANNER_TOOL.regexs[ - 'err_code_signatures' - ].values(), - apkfile, - ), - ) - - def test_bottom_level_embedded_apk_code_signature(self): - apkfile = os.path.join(basedir, 'apk.embedded_1.apk') - fdroidserver.scanner._SCANNER_TOOL.regexs['err_code_signatures'] = { - "org/bitbucket/tickytacky/mirrormirror/MainActivity": re.compile( - r'.*org/bitbucket/tickytacky/mirrormirror/MainActivity', - re.IGNORECASE | re.UNICODE, - ) - } - - with self.assertLogs(level=logging.CRITICAL): - problems = fdroidserver.scanner.scan_binary(apkfile) - self.assertEqual( - 1, - problems, - "Did not find expected code signature '{}' in binary '{}'".format( - fdroidserver.scanner._SCANNER_TOOL.regexs[ - 'err_code_signatures' - ].values(), - apkfile, - ), - ) - - def test_top_level_signature_embedded_apk_present(self): - apkfile = os.path.join(basedir, 'apk.embedded_1.apk') - fdroidserver.scanner._SCANNER_TOOL.regexs['err_code_signatures'] = { - "org/fdroid/ci/BuildConfig": re.compile( - r'.*org/fdroid/ci/BuildConfig', re.IGNORECASE | re.UNICODE - ) - } - with self.assertLogs(level=logging.CRITICAL): - problems = fdroidserver.scanner.scan_binary(apkfile) - self.assertEqual( - 1, - problems, - "Did not find expected code signature '{}' in binary '{}'".format( - fdroidserver.scanner._SCANNER_TOOL.regexs[ - 'err_code_signatures' - ].values(), - apkfile, - ), - ) - - -class Test_SignatureDataController(unittest.TestCase): - def test_init(self): - sdc = fdroidserver.scanner.SignatureDataController( - 'nnn', 'fff.yml', 'https://example.com/test.json' - ) - self.assertEqual(sdc.name, 'nnn') - self.assertEqual(sdc.filename, 'fff.yml') - self.assertEqual(sdc.cache_duration, timedelta(999999)) - self.assertDictEqual(sdc.data, {}) - - # check_last_updated - def test_check_last_updated_ok(self): - sdc = fdroidserver.scanner.SignatureDataController( - 'nnn', 'fff.yml', 'https://example.com/test.json' - ) - sdc.data['last_updated'] = datetime.now(timezone.utc).timestamp() - sdc.check_last_updated() - - def test_check_last_updated_exception_cache_outdated(self): - sdc = fdroidserver.scanner.SignatureDataController( - 'nnn', 'fff.yml', 'https://example.com/test.json' - ) - sdc.cache_duration = timedelta(days=7) - sdc.data['last_updated'] = ( - datetime.now(timezone.utc) - timedelta(days=30) - ).timestamp() - with self.assertRaises(fdroidserver.scanner.SignatureDataOutdatedException): - sdc.check_last_updated() - - def test_check_last_updated_exception_not_string(self): - sdc = fdroidserver.scanner.SignatureDataController( - 'nnn', 'fff.yml', 'https://example.com/test.json' - ) - sdc.data['last_updated'] = 'sepp' - with self.assertRaises(fdroidserver.scanner.SignatureDataMalformedException): - sdc.check_last_updated() - - def test_check_last_updated_exception_not_iso_formatted_string(self): - sdc = fdroidserver.scanner.SignatureDataController( - 'nnn', 'fff.yml', 'https://example.com/test.json' - ) - sdc.data['last_updated'] = '01/09/2002 10:11' - with self.assertRaises(fdroidserver.scanner.SignatureDataMalformedException): - sdc.check_last_updated() - - def test_check_last_updated_no_exception_missing_when_last_updated_not_set(self): - sdc = fdroidserver.scanner.SignatureDataController( - 'nnn', 'fff.yml', 'https://example.com/test.json' - ) - sdc.check_last_updated() - - # check_data_version - def test_check_data_version_ok(self): - sdc = fdroidserver.scanner.SignatureDataController( - 'nnn', 'fff.yml', 'https://example.com/test.json' - ) - sdc.data['version'] = fdroidserver.scanner.SCANNER_CACHE_VERSION - sdc.check_data_version() - - def test_check_data_version_exception(self): - sdc = fdroidserver.scanner.SignatureDataController( - 'nnn', 'fff.yml', 'https://example.com/test.json' - ) - with self.assertRaises( - fdroidserver.scanner.SignatureDataVersionMismatchException - ): - sdc.check_data_version() - - def test_load_ok(self): - sdc = fdroidserver.scanner.SignatureDataController( - 'nnn', 'fff.yml', 'https://example.com/test.json' - ) - func_lfc = mock.Mock() - func_vd = mock.Mock() - func_clu = mock.Mock() - with ( - mock.patch( - 'fdroidserver.scanner.SignatureDataController.load_from_cache', - func_lfc, - ), - mock.patch( - 'fdroidserver.scanner.SignatureDataController.verify_data', - func_vd, - ), - mock.patch( - 'fdroidserver.scanner.SignatureDataController.check_last_updated', - func_clu, - ), - ): - sdc.load() - func_lfc.assert_called_once_with() - func_vd.assert_called_once_with() - func_clu.assert_called_once_with() - - def test_load_initial_cache_miss(self): - sdc = fdroidserver.scanner.SignatureDataController( - 'nnn', 'fff.yml', 'https://example.com/test.json' - ) - func_lfc = mock.Mock( - side_effect=fdroidserver.scanner.SignatureDataCacheMissException - ) - func_lfd = mock.Mock() - with ( - mock.patch( - 'fdroidserver.scanner.SignatureDataController.load_from_cache', - func_lfc, - ), - mock.patch( - 'fdroidserver.scanner.SignatureDataController.load_from_defaults', - func_lfd, - ), - ): - sdc.load() - func_lfc.assert_called_once_with() - func_lfd.assert_called_once_with() - - def test_load_cache_auto_refresh(self): - sdc = fdroidserver.scanner.SignatureDataController( - 'nnn', 'fff.yml', 'https://example.com/test.json' - ) - func_lfc = mock.Mock() - func_vd = mock.Mock() - func_clu = mock.Mock( - side_effect=fdroidserver.scanner.SignatureDataOutdatedException() - ) - func_fsfw = mock.Mock() - func_wtc = mock.Mock() - with ( - mock.patch( - 'fdroidserver.scanner.SignatureDataController.load_from_cache', - func_lfc, - ), - mock.patch( - 'fdroidserver.scanner.SignatureDataController.verify_data', - func_vd, - ), - mock.patch( - 'fdroidserver.scanner.SignatureDataController.check_last_updated', - func_clu, - ), - mock.patch( - 'fdroidserver.scanner.SignatureDataController.fetch_signatures_from_web', - func_fsfw, - ), - mock.patch( - 'fdroidserver.scanner.SignatureDataController.write_to_cache', - func_wtc, - ), - ): - sdc.load() - func_lfc.assert_called_once_with() - func_vd.assert_called_once_with() - func_clu.assert_called_once_with() - func_fsfw.assert_called_once_with() - func_wtc.assert_called_once_with() - - def test_load_try_web_when_no_defaults(self): - sdc = fdroidserver.scanner.SignatureDataController( - 'nnn', 'fff.yml', 'https://example.com/test.json' - ) - func_lfc = mock.Mock( - side_effect=fdroidserver.scanner.SignatureDataCacheMissException() - ) - func_lfd = mock.Mock( - side_effect=fdroidserver.scanner.SignatureDataNoDefaultsException() - ) - func_fsfw = mock.Mock() - func_wtc = mock.Mock() - with ( - mock.patch( - 'fdroidserver.scanner.SignatureDataController.load_from_cache', - func_lfc, - ), - mock.patch( - 'fdroidserver.scanner.SignatureDataController.load_from_defaults', - func_lfd, - ), - mock.patch( - 'fdroidserver.scanner.SignatureDataController.fetch_signatures_from_web', - func_fsfw, - ), - mock.patch( - 'fdroidserver.scanner.SignatureDataController.write_to_cache', - func_wtc, - ), - ): - sdc.load() - func_lfc.assert_called_once_with() - func_lfd.assert_called_once_with() - func_fsfw.assert_called_once_with() - func_wtc.assert_called_once_with() - - @unittest.skipIf( - sys.version_info < (3, 9, 0), - "mock_open doesn't allow easy access to written data in older python versions", - ) - def test_write_to_cache(self): - open_func = mock.mock_open() - sdc = fdroidserver.scanner.SignatureDataController( - 'nnn', 'fff.yml', 'https://example.com/test.json' - ) - sdc.data = {"mocked": "data"} - - with ( - mock.patch("builtins.open", open_func), - mock.patch( - "fdroidserver.scanner._scanner_cachedir", - return_value=pathlib.Path('.'), - ), - ): - sdc.write_to_cache() - - open_func.assert_called_with(pathlib.Path('fff.yml'), 'w', encoding="utf-8") - self.assertEqual(mock_open_to_str(open_func), """{\n "mocked": "data"\n}""") - - -class Test_ScannerTool(unittest.TestCase): - def setUp(self): - fdroidserver.common.options = None - fdroidserver.common.config = None - os.chdir(basedir) - self._td = mkdtemp() - self.testdir = self._td.name - fdroidserver.scanner.ScannerTool.refresh_allowed = True - - def tearDown(self): - fdroidserver.common.options = None - fdroidserver.common.config = None - os.chdir(basedir) - self._td.cleanup() - - def test_load(self): - st = mock.Mock() - st.sdcs = [mock.Mock(), mock.Mock()] - fdroidserver.scanner.ScannerTool.load(st) - st.sdcs[0].load.assert_called_once_with() - st.sdcs[1].load.assert_called_once_with() - - def test_refresh_no_options_or_config(self): - """This simulates what happens when running something like scan_source()""" - with mock.patch('fdroidserver.scanner.ScannerTool.refresh') as refresh: - fdroidserver.scanner.ScannerTool() - refresh.assert_not_called() - - def test_refresh_true(self): - fdroidserver.common.options = mock.Mock() - fdroidserver.common.options.refresh_scanner = True - with mock.patch('fdroidserver.scanner.ScannerTool.refresh') as refresh: - fdroidserver.scanner.ScannerTool() - refresh.assert_called_once() - - def test_refresh_false(self): - fdroidserver.common.options = mock.Mock() - fdroidserver.common.options.refresh_scanner = False - with mock.patch('fdroidserver.scanner.ScannerTool.refresh') as refresh: - fdroidserver.scanner.ScannerTool() - refresh.assert_not_called() - - def test_refresh_from_config(self): - os.chdir(self.testdir) - fdroidserver.common.write_config_file('refresh_scanner: true\n') - with mock.patch('fdroidserver.scanner.ScannerTool.refresh') as refresh: - fdroidserver.scanner.ScannerTool() - refresh.assert_called_once() - - def test_refresh_options_overrides_config(self): - fdroidserver.common.options = mock.Mock() - fdroidserver.common.options.refresh_scanner = True - os.chdir(self.testdir) - fdroidserver.common.write_config_file('refresh_scanner: false\n') - with mock.patch('fdroidserver.scanner.ScannerTool.refresh') as refresh: - fdroidserver.scanner.ScannerTool() - refresh.assert_called_once() - - -class Test_main(unittest.TestCase): - def setUp(self): - self.args = ["com.example.app", "local/additional.apk", "another.apk"] - self.exit_func = mock.Mock() - self.read_app_args_func = mock.Mock(return_value={}) - self.scan_binary_func = mock.Mock(return_value=0) - - def test_parsing_appid(self): - """This test verifies that app id get parsed correctly - (doesn't test how they get processed) - """ - self.args = ["com.example.app"] - with ( - tempfile.TemporaryDirectory() as tmpdir, - TmpCwd(tmpdir), - mock.patch("sys.exit", self.exit_func), - mock.patch("sys.argv", ["fdroid scanner", *self.args]), - mock.patch("fdroidserver.common.read_app_args", self.read_app_args_func), - mock.patch("fdroidserver.scanner.scan_binary", self.scan_binary_func), - ): - fdroidserver.scanner.main() - - self.exit_func.assert_not_called() - self.read_app_args_func.assert_called_once_with( - ['com.example.app'], allow_version_codes=True - ) - self.scan_binary_func.assert_not_called() - - def test_parsing_apkpath(self): - """This test verifies that apk paths get parsed correctly - (doesn't test how they get processed) - """ - self.args = ["local.application.apk"] - with ( - tempfile.TemporaryDirectory() as tmpdir, - TmpCwd(tmpdir), - mock.patch("sys.exit", self.exit_func), - mock.patch("sys.argv", ["fdroid scanner", *self.args]), - mock.patch("fdroidserver.common.read_app_args", self.read_app_args_func), - mock.patch("fdroidserver.scanner.scan_binary", self.scan_binary_func), - ): - pathlib.Path(self.args[0]).touch() - fdroidserver.scanner.main() - - self.exit_func.assert_not_called() - self.read_app_args_func.assert_not_called() - self.scan_binary_func.assert_called_once_with('local.application.apk') diff --git a/tests/test_signatures.py b/tests/test_signatures.py deleted file mode 100755 index 4f7bd105..00000000 --- a/tests/test_signatures.py +++ /dev/null @@ -1,59 +0,0 @@ -#!/usr/bin/env python3 - -import hashlib -import os -import sys -import unittest -from tempfile import TemporaryDirectory - -from fdroidserver import common, signatures - -from .shared_test_code import TmpCwd - -basedir = os.path.dirname(__file__) - - -class SignaturesTest(unittest.TestCase): - def setUp(self): - common.config = None - config = common.read_config() - config['jarsigner'] = common.find_sdk_tools_cmd('jarsigner') - common.config = config - - @unittest.skipIf(sys.byteorder == 'big', "androguard is not ported to big-endian") - def test_main(self): - class OptionsFixture: - APK = [os.path.join(basedir, 'repo', 'com.politedroid_3.apk')] - - with TemporaryDirectory() as tmpdir, TmpCwd(tmpdir): - signatures.extract(OptionsFixture) - - # check if extracted signatures are where they are supposed to be - # also verify weather if extracted file contain what they should - filesAndHashes = ( - ( - os.path.join( - 'metadata', 'com.politedroid', 'signatures', '3', 'MANIFEST.MF' - ), - '7dcd83f0c41a75457fd2311bf3c4578f80d684362d74ba8dc52838d353f31cf2', - ), - ( - os.path.join( - 'metadata', 'com.politedroid', 'signatures', '3', 'RELEASE.RSA' - ), - '883ef3d5a6e0bf69d2a58d9e255a7930f08a49abc38e216ed054943c99c8fdb4', - ), - ( - os.path.join( - 'metadata', 'com.politedroid', 'signatures', '3', 'RELEASE.SF' - ), - '99fbb3211ef5d7c1253f3a7ad4836eadc9905103ce6a75916c40de2831958284', - ), - ) - for path, checksum in filesAndHashes: - self.assertTrue( - os.path.isfile(path), - f'check whether {path!r} was extracted correctly.', - ) - with open(path, 'rb') as f: - self.assertEqual(hashlib.sha256(f.read()).hexdigest(), checksum) diff --git a/tests/test_signindex.py b/tests/test_signindex.py deleted file mode 100755 index 21d54585..00000000 --- a/tests/test_signindex.py +++ /dev/null @@ -1,178 +0,0 @@ -#!/usr/bin/env python3 - -import json -import os -import shutil -import subprocess -import tempfile -import unittest -from pathlib import Path -from unittest.mock import patch - -from fdroidserver import apksigcopier, common, exception, signindex, update - - -class Options: - allow_disabled_algorithms = False - clean = False - delete_unknown = False - nosign = False - pretty = True - rename_apks = False - verbose = False - - -class SignindexTest(unittest.TestCase): - basedir = Path(__file__).resolve().parent - - def setUp(self): - signindex.config = None - config = common.read_config() - config['jarsigner'] = common.find_sdk_tools_cmd('jarsigner') - config['verbose'] = True - config['keystore'] = str(self.basedir / 'keystore.jks') - config['repo_keyalias'] = 'sova' - config['keystorepass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' - config['keypass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' - signindex.config = config - - self.tempdir = tempfile.TemporaryDirectory() - os.chdir(self.tempdir.name) - self.repodir = Path('repo') - self.repodir.mkdir() - - def tearDown(self): - self.tempdir.cleanup() - - def test_sign_index(self): - shutil.copy(str(self.basedir / 'repo/index-v1.json'), 'repo') - signindex.sign_index(str(self.repodir), 'index-v1.json') - self.assertTrue((self.repodir / 'index-v1.jar').exists()) - self.assertTrue((self.repodir / 'index-v1.json').exists()) - - def test_sign_index_corrupt(self): - with open('repo/index-v1.json', 'w') as fp: - fp.write('corrupt JSON!') - with self.assertRaises(json.decoder.JSONDecodeError, msg='error on bad JSON'): - signindex.sign_index(str(self.repodir), 'index-v1.json') - - def test_sign_entry(self): - entry = 'repo/entry.json' - v2 = 'repo/index-v2.json' - shutil.copy(self.basedir / entry, entry) - shutil.copy(self.basedir / v2, v2) - signindex.sign_index(self.repodir, 'entry.json') - self.assertTrue((self.repodir / 'entry.jar').exists()) - - def test_sign_entry_corrupt(self): - """sign_index should exit with error if entry.json is bad JSON""" - entry = 'repo/entry.json' - with open(entry, 'w') as fp: - fp.write('{') - with self.assertRaises(json.decoder.JSONDecodeError, msg='error on bad JSON'): - signindex.sign_index(self.repodir, 'entry.json') - self.assertFalse((self.repodir / 'entry.jar').exists()) - - def test_sign_entry_corrupt_leave_entry_jar(self): - """sign_index should not touch existing entry.jar if entry.json is corrupt""" - existing = 'repo/entry.jar' - testvalue = "Don't touch!" - with open(existing, 'w') as fp: - fp.write(testvalue) - with open('repo/entry.json', 'w') as fp: - fp.write('{') - with self.assertRaises(json.decoder.JSONDecodeError, msg='error on bad JSON'): - signindex.sign_index(self.repodir, 'entry.json') - with open(existing) as fp: - self.assertEqual(testvalue, fp.read()) - - def test_sign_corrupt_index_v2_json(self): - """sign_index should exit with error if index-v2.json JSON is corrupt""" - with open('repo/index-v2.json', 'w') as fp: - fp.write('{"key": "not really an index"') - good_entry = { - "timestamp": 1676583021000, - "version": 20002, - "index": { - "name": "/index-v2.json", - "sha256": common.sha256sum('repo/index-v2.json'), - "size": os.path.getsize('repo/index-v2.json'), - "numPackages": 0, - }, - } - with open('repo/entry.json', 'w') as fp: - json.dump(good_entry, fp) - with self.assertRaises(json.decoder.JSONDecodeError, msg='error on bad JSON'): - signindex.sign_index(self.repodir, 'entry.json') - self.assertFalse((self.repodir / 'entry.jar').exists()) - - def test_sign_index_v2_corrupt_sha256(self): - """sign_index should exit with error if SHA-256 of file in entry is wrong""" - entry = 'repo/entry.json' - v2 = 'repo/index-v2.json' - shutil.copy(self.basedir / entry, entry) - shutil.copy(self.basedir / v2, v2) - with open(v2, 'a') as fp: - fp.write(' ') - with self.assertRaises(exception.FDroidException, msg='error on bad SHA-256'): - signindex.sign_index(self.repodir, 'entry.json') - self.assertFalse((self.repodir / 'entry.jar').exists()) - - def test_signindex(self): - if common.find_apksigner({}) is None: # TODO remove me for buildserver-bullseye - self.skipTest('SKIPPING test_signindex, apksigner not installed!') - os.mkdir('archive') - metadata = Path('metadata') - metadata.mkdir() - with (metadata / 'info.guardianproject.urzip.yml').open('w') as fp: - fp.write('# placeholder') - shutil.copy(str(self.basedir / 'urzip.apk'), 'repo') - index_files = [] - for f in ( - 'entry.jar', - 'entry.json', - 'index-v1.jar', - 'index-v1.json', - 'index-v2.json', - 'index.jar', - 'index.xml', - ): - for section in (Path('repo'), Path('archive')): - path = section / f - self.assertFalse(path.exists(), '%s should not exist yet!' % path) - index_files.append(path) - common.options = Options - with patch('sys.argv', ['fdroid update']): - update.main() - with patch('sys.argv', ['fdroid signindex', '--verbose']): - signindex.main() - for f in index_files: - self.assertTrue(f.exists(), '%s should exist!' % f) - self.assertFalse(os.path.exists('index-v2.jar')) # no JAR version of this file - - # index.jar aka v0 must by signed by SHA1withRSA - f = 'repo/index.jar' - common.verify_deprecated_jar_signature(f) - self.assertIsNone(apksigcopier.extract_v2_sig(f, expected=False)) - cp = subprocess.run( - ['jarsigner', '-verify', '-verbose', f], stdout=subprocess.PIPE - ) - self.assertTrue(b'SHA1withRSA' in cp.stdout) - - # index-v1.jar must by signed by SHA1withRSA - f = 'repo/index-v1.jar' - common.verify_deprecated_jar_signature(f) - self.assertIsNone(apksigcopier.extract_v2_sig(f, expected=False)) - cp = subprocess.run( - ['jarsigner', '-verify', '-verbose', f], stdout=subprocess.PIPE - ) - self.assertTrue(b'SHA1withRSA' in cp.stdout) - - # entry.jar aka index v2 must by signed by a modern algorithm - f = 'repo/entry.jar' - common.verify_deprecated_jar_signature(f) - self.assertIsNone(apksigcopier.extract_v2_sig(f, expected=False)) - cp = subprocess.run( - ['jarsigner', '-verify', '-verbose', f], stdout=subprocess.PIPE - ) - self.assertFalse(b'SHA1withRSA' in cp.stdout) diff --git a/tests/test_update.py b/tests/test_update.py deleted file mode 100755 index 623f48cc..00000000 --- a/tests/test_update.py +++ /dev/null @@ -1,2467 +0,0 @@ -#!/usr/bin/env python3 - -import copy -import glob -import hashlib -import json -import logging -import os -import random -import shutil -import string -import subprocess -import sys -import textwrap -import time -import unittest -import zipfile -from binascii import hexlify -from datetime import datetime -from pathlib import Path -from unittest import mock - -import git -import yaml - -try: - # these were moved in androguard 4.0 - from androguard.core.apk import APK -except ImportError: - from androguard.core.bytecodes.apk import APK - -try: - from yaml import CSafeLoader as SafeLoader -except ImportError: - from yaml import SafeLoader - -try: - from yaml import CFullLoader as FullLoader -except ImportError: - try: - # FullLoader is available from PyYaml 5.1+, as we don't load user - # controlled data here, it's okay to fall back the unsafe older - # Loader - from yaml import FullLoader - except ImportError: - from yaml import Loader as FullLoader - -from PIL import PngImagePlugin - -import fdroidserver.common -import fdroidserver.exception -import fdroidserver.metadata -import fdroidserver.update -from fdroidserver.common import CATEGORIES_CONFIG_NAME -from fdroidserver.looseversion import LooseVersion - -from .shared_test_code import TmpCwd, mkdtemp - -DONATION_FIELDS = ('Donate', 'Liberapay', 'OpenCollective') - -logging.getLogger(PngImagePlugin.__name__).setLevel(logging.INFO) -basedir = Path(__file__).parent - - -class Options: - allow_disabled_algorithms = False - clean = False - delete_unknown = False - nosign = False - pretty = True - rename_apks = False - verbose = False - - -@unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') -class UpdateTest(unittest.TestCase): - '''fdroid update''' - - def setUp(self): - os.chdir(basedir) - self._td = mkdtemp() - self.testdir = self._td.name - - fdroidserver.common.config = None - fdroidserver.common.options = None - - def tearDown(self): - os.chdir(basedir) - self._td.cleanup() - - def test_insert_store_metadata(self): - os.chdir(self.testdir) - - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.update.config = config - - repo_dir = basedir / 'repo' - os.mkdir('metadata') - for packageName in ( - 'obb.mainpatch.current', - 'org.videolan.vlc', - ): - shutil.copytree( - repo_dir / packageName, os.path.join('repo', packageName) - ) - for packageName in ( - 'info.guardianproject.checkey', - 'info.guardianproject.urzip', - 'org.smssecure.smssecure', - ): - shutil.copytree( - basedir / 'metadata' / packageName, - os.path.join('metadata', packageName), - ) - for packageName in ( - 'com.nextcloud.client', - 'com.nextcloud.client.dev', - 'eu.siacs.conversations', - ): - shutil.copytree( - basedir / 'source-files' / packageName, - os.path.join(self.testdir, 'build', packageName), - ) - - testfilename = 'icon_yAfSvPRJukZzMMfUzvbYqwaD1XmHXNtiPBtuPVHW-6s=.png' - testfile = repo_dir / 'org.videolan.vlc/en-US/icon.png' - cpdir = os.path.join('metadata', 'org.videolan.vlc', 'en-US') - cpfile = os.path.join(cpdir, testfilename) - os.makedirs(cpdir, exist_ok=True) - shutil.copy(testfile, cpfile) - shutil.copystat(testfile, cpfile) - - apps = dict() - for packageName in ( - 'info.guardianproject.urzip', - 'org.videolan.vlc', - 'obb.mainpatch.current', - 'com.nextcloud.client', - 'com.nextcloud.client.dev', - 'eu.siacs.conversations', - ): - apps[packageName] = fdroidserver.metadata.App() - apps[packageName]['id'] = packageName - apps[packageName]['CurrentVersionCode'] = 0xCAFEBEEF - - apps['info.guardianproject.urzip']['CurrentVersionCode'] = 100 - - buildnextcloudclient = fdroidserver.metadata.Build() - buildnextcloudclient.gradle = ['generic'] - apps['com.nextcloud.client']['Builds'] = [buildnextcloudclient] - - buildnextclouddevclient = fdroidserver.metadata.Build() - buildnextclouddevclient.gradle = ['versionDev'] - apps['com.nextcloud.client.dev']['Builds'] = [buildnextclouddevclient] - - build_conversations = fdroidserver.metadata.Build() - build_conversations.gradle = ['free'] - apps['eu.siacs.conversations']['Builds'] = [build_conversations] - - fdroidserver.update.insert_localized_app_metadata(apps) - fdroidserver.update.ingest_screenshots_from_repo_dir(apps) - - appdir = Path('repo/info.guardianproject.urzip/en-US') - self.assertTrue( - os.path.isfile( - os.path.join( - appdir, 'icon_NJXNzMcyf-v9i5a1ElJi0j9X1LvllibCa48xXYPlOqQ=.png' - ) - ) - ) - self.assertTrue( - os.path.isfile( - os.path.join( - appdir, - 'featureGraphic_GFRT5BovZsENGpJq1HqPODGWBRPWQsx25B95Ol5w_wU=.png', - ) - ) - ) - - self.assertEqual(6, len(apps)) - for packageName, app in apps.items(): - self.assertIn('localized', app, packageName) - self.assertIn('en-US', app['localized']) - self.assertEqual(1, len(app['localized'])) - if packageName == 'info.guardianproject.urzip': - self.assertEqual(7, len(app['localized']['en-US'])) - self.assertEqual('full description\n', app['localized']['en-US']['description']) - self.assertEqual('title', app['localized']['en-US']['name']) - self.assertEqual('short description', app['localized']['en-US']['summary']) - self.assertEqual('video', app['localized']['en-US']['video']) - self.assertEqual('icon_NJXNzMcyf-v9i5a1ElJi0j9X1LvllibCa48xXYPlOqQ=.png', - app['localized']['en-US']['icon']) - self.assertEqual('featureGraphic_GFRT5BovZsENGpJq1HqPODGWBRPWQsx25B95Ol5w_wU=.png', - app['localized']['en-US']['featureGraphic']) - self.assertEqual('100\n', app['localized']['en-US']['whatsNew']) - elif packageName == 'org.videolan.vlc': - self.assertEqual(testfilename, app['localized']['en-US']['icon']) - self.assertEqual(9, len(app['localized']['en-US']['phoneScreenshots'])) - self.assertEqual(15, len(app['localized']['en-US']['sevenInchScreenshots'])) - elif packageName == 'obb.mainpatch.current': - self.assertEqual('icon_WI0pkO3LsklrsTAnRr-OQSxkkoMY41lYe2-fAvXLiLg=.png', - app['localized']['en-US']['icon']) - self.assertEqual('featureGraphic_ffhLaojxbGAfu9ROe1MJgK5ux8d0OVc6b65nmvOBaTk=.png', - app['localized']['en-US']['featureGraphic']) - self.assertEqual(1, len(app['localized']['en-US']['phoneScreenshots'])) - self.assertEqual(1, len(app['localized']['en-US']['sevenInchScreenshots'])) - elif packageName == 'com.nextcloud.client': - self.assertEqual('Nextcloud', app['localized']['en-US']['name']) - self.assertEqual(1073, len(app['localized']['en-US']['description'])) - self.assertEqual(78, len(app['localized']['en-US']['summary'])) - elif packageName == 'com.nextcloud.client.dev': - self.assertEqual('Nextcloud Dev', app['localized']['en-US']['name']) - self.assertEqual(586, len(app['localized']['en-US']['description'])) - self.assertEqual(78, len(app['localized']['en-US']['summary'])) - elif packageName == 'eu.siacs.conversations': - self.assertEqual('Conversations', app['localized']['en-US']['name']) - - def test_insert_fastlane_default_txt_changelog(self): - """Test that Fastlane's default.txt is handled properly - - https://docs.fastlane.tools/actions/supply/#changelogs-whats-new - """ - os.chdir(self.testdir) - - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.update.config = config - - app = fdroidserver.metadata.App() - app.id = 'com.example.app' - changelogs_dir = 'build/%s/metadata/en-US/changelogs' % app.id - os.makedirs(changelogs_dir) - with open(os.path.join(changelogs_dir, 'default.txt'), 'w') as fp: - fp.write('default') - with open(os.path.join(changelogs_dir, '42.txt'), 'w') as fp: - fp.write('42') - apps = {app.id: app} - build = fdroidserver.metadata.Build() - build.versionCode = 42 - app['Builds'] = [build] - - fdroidserver.update.insert_localized_app_metadata(apps) - self.assertEqual('default', apps[app.id]['localized']['en-US']['whatsNew']) - - app.CurrentVersionCode = 1 - fdroidserver.update.insert_localized_app_metadata(apps) - self.assertEqual('default', apps[app.id]['localized']['en-US']['whatsNew']) - - app.CurrentVersionCode = 10000 - fdroidserver.update.insert_localized_app_metadata(apps) - self.assertEqual('default', apps[app.id]['localized']['en-US']['whatsNew']) - - app.CurrentVersionCode = 42 - fdroidserver.update.insert_localized_app_metadata(apps) - self.assertEqual('42', apps[app.id]['localized']['en-US']['whatsNew']) - - def test_fastlane_with_subdir(self): - """Test if fastlane in simple one-level subdir is found.""" - os.chdir(self.testdir) - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.update.config = config - - app = fdroidserver.metadata.App() - app.id = 'com.example.app' - build_dir = f'build/{app.id}' - flavor = 'flavor' - subdir = 'subproject' - apps = {app.id: app} - build = fdroidserver.metadata.Build() - build.versionCode = 42 - build.gradle = [flavor] - build.subdir = subdir - app['Builds'] = [build] - - first_value = 'first' - first_dir = Path(f'{build_dir}/src/{flavor}/fastlane/metadata/android/en-US') - first_dir.mkdir(parents=True) - (first_dir / 'title.txt').write_text(first_value) - fdroidserver.update.insert_localized_app_metadata(apps) - self.assertEqual(first_value, apps[app.id]['localized']['en-US']['name']) - - second_value = 'second' - second_dir = Path(f'{build_dir}/{subdir}/fastlane/metadata/android/en-US') - second_dir.mkdir(parents=True) - (second_dir / 'title.txt').write_text(second_value) - fdroidserver.update.insert_localized_app_metadata(apps) - self.assertEqual(second_value, apps[app.id]['localized']['en-US']['name']) - - def test_fastlane_with_schildichat(self): - """Test if fastlane is found in this tangle of dirs and symlinks. - - https://github.com/SchildiChat/schildichat-android-next/tree/sc_v0.10.3-ex_25_6_2 - """ - os.chdir(self.testdir) - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.update.config = config - - app = fdroidserver.metadata.App() - app.id = 'chat.schildi.android' - build_dir = f'build/{app.id}' - flavors = ['fdroid', 'sc', 'default'] - subdir = 'app' - apps = {app.id: app} - build = fdroidserver.metadata.Build() - build.versionCode = 42 - build.gradle = flavors - build.subdir = subdir - app['Builds'] = [build] - - wrong_value = 'wrong' - wrong_dir = Path(f'{build_dir}/upstream_infra/fastlane/metadata/android/en-US') - wrong_dir.mkdir(parents=True) - (wrong_dir / 'title.txt').write_text(wrong_value) - - right_value = 'right' - right_dir = Path(f'{build_dir}/metadata/en-US') - right_dir.mkdir(parents=True) - (right_dir / 'title.txt').write_text(right_value) - _fastlane = Path('.fastlane/metadata') - _fastlane.mkdir(parents=True) - os.symlink('../../metadata', _fastlane / 'android') - os.symlink('.fastlane', 'fastlane') - fdroidserver.update.insert_localized_app_metadata(apps) - self.assertEqual(right_value, apps[app.id]['localized']['en-US']['name']) - - def test_fastlane_with_multi_level_subdir(self): - """Test if fastlane in multi-level subdir is found.""" - os.chdir(self.testdir) - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.update.config = config - - app = fdroidserver.metadata.App() - app.id = 'org.videolan.vlc' - build_dir = f'build/{app.id}' - subdir = 'application/app' - apps = {app.id: app} - build = fdroidserver.metadata.Build() - build.versionCode = 42 - build.gradle = ['yes'] - build.subdir = subdir - app['Builds'] = [build] - - first_value = 'first' - first_dir = Path(f'{build_dir}/{subdir}/fastlane/metadata/android/en-US') - first_dir.mkdir(parents=True) - (first_dir / 'title.txt').write_text(first_value) - fdroidserver.update.insert_localized_app_metadata(apps) - self.assertEqual(first_value, apps[app.id]['localized']['en-US']['name']) - - # I'm not sure that it is correct behavior for this path to - # override the above path, but it is how it is working now. It - # seems to me it should be the other way around, but that is - # really hard to implement using the current algorithm. - second_value = 'second' - second_dir = Path(f'{build_dir}/fastlane/metadata/android/en-US') - second_dir.mkdir(parents=True) - (second_dir / 'title.txt').write_text(second_value) - fdroidserver.update.insert_localized_app_metadata(apps) - self.assertEqual(second_value, apps[app.id]['localized']['en-US']['name']) - - def test_name_title_scraping(self): - """metadata file --> fdroiddata localized files --> fastlane/triple-t in app source --> APK""" - shutil.copytree(basedir, self.testdir, dirs_exist_ok=True) - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.update.config = config - os.chdir(self.testdir) - fdroidserver.common.options = Options - fdroidserver.update.options = fdroidserver.common.options - fdroidserver.update.options.clean = True - fdroidserver.update.options.delete_unknown = True - - apps = fdroidserver.metadata.read_metadata() - apps['info.guardianproject.urzip']['CurrentVersionCode'] = 100 - knownapks = fdroidserver.common.KnownApks() - apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks, False) - fdroidserver.update.insert_localized_app_metadata(apps) - fdroidserver.update.ingest_screenshots_from_repo_dir(apps) - fdroidserver.update.apply_info_from_latest_apk(apps, apks) - app = apps['info.guardianproject.urzip'] - self.assertIsNone(app.Name) - self.assertTrue('localized' in app) - self.assertEqual('title', app['localized']['en-US']['name']) - self.assertEqual('100\n', app['localized']['en-US']['whatsNew']) - app = apps['org.videolan.vlc'] - self.assertIsNone(app.Name) - self.assertTrue('localized' in app) - self.assertFalse('name' in app['localized']['en-US']) - app = apps['info.guardianproject.checkey'] - self.assertEqual('Checkey the app!', app.Name) - self.assertTrue('localized' in app) - self.assertEqual('Checkey: info on local apps', app['localized']['en-US']['name']) - self.assertEqual('Checkey: ローカルアプリの情報', app['localized']['ja-JP']['name']) - app = apps['org.adaway'] - self.assertIsNone(app.Name) - self.assertFalse('localized' in app) - app = apps['obb.main.twoversions'] - self.assertIsNone(app.Name) - self.assertFalse('localized' in app) - - def test_insert_missing_app_names_from_apks(self): - """en-US serves as the final, default, fallback value with index-v1""" - testvalue = 'TESTVALUE!' - apps = { - 'none': {}, - 'name': {'Name': testvalue}, - 'onlyapk': {'Name': None}, - 'autoname': {'AutoName': 'autoname', 'Name': None}, - 'onlylocalized': {'localized': {'en-US': {'name': testvalue}}}, - 'non_en_us_localized': {'localized': {'de-AT': {'name': 'leiwand'}}}, - 'apks': {}, - } - apks = [ - {'packageName': 'none', 'name': '', 'versionCode': 1}, - {'packageName': 'name', 'name': 'fromapk', 'versionCode': 1}, - {'packageName': 'onlyapk', 'name': testvalue, 'versionCode': 1}, - {'packageName': 'autoname', 'name': testvalue, 'versionCode': 1}, - {'packageName': 'onlylocalized', 'name': 'fromapk', 'versionCode': 1}, - {'packageName': 'non_en_us_localized', 'name': testvalue, 'versionCode': 0xcafe}, - {'packageName': 'apks', 'name': 'fromapk1', 'versionCode': 1}, - {'packageName': 'apks', 'name': 'fromapk2', 'versionCode': 2}, - {'packageName': 'apks', 'name': testvalue, 'versionCode': 3}, - ] - fdroidserver.common.options = Options - fdroidserver.update.insert_missing_app_names_from_apks(apps, apks) - for appid, app in apps.items(): - if appid == 'none': - self.assertIsNone(app.get('Name')) - self.assertIsNone(app.get('localized')) - elif appid == 'onlyapk': - self.assertIsNone(app.get('Name')) - self.assertEqual(testvalue, app['localized']['en-US']['name']) - elif appid == 'autoname': - self.assertIsNone(app.get('Name')) - self.assertEqual(testvalue, app['localized']['en-US']['name']) - elif appid == 'onlylocalized': - self.assertIsNone(app.get('Name')) - self.assertEqual(testvalue, app['localized']['en-US']['name']) - elif appid == 'non_en_us_localized': - self.assertIsNone(app.get('Name')) - self.assertEqual(testvalue, app['localized']['en-US']['name']) - elif appid == 'name': - self.assertEqual(testvalue, app['Name']) - self.assertIsNone(app.get('localized')) - elif appid == 'apks': - self.assertIsNone(app.get('Name')) - self.assertEqual(testvalue, app['localized']['en-US']['name']) - - def test_insert_missing_app_names_from_apks_from_repo(self): - os.chdir(self.testdir) - shutil.copytree(basedir, self.testdir, dirs_exist_ok=True) - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.update.config = config - fdroidserver.common.options = Options - fdroidserver.update.options = fdroidserver.common.options - fdroidserver.update.options.clean = True - fdroidserver.update.options.delete_unknown = True - - apps = fdroidserver.metadata.read_metadata() - knownapks = fdroidserver.common.KnownApks() - apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks, False) - - appid = 'info.guardianproject.checkey' - testapps = {appid: copy.copy(apps[appid])} - self.assertEqual('Checkey the app!', testapps[appid]['Name']) - del testapps[appid]['Name'] - fdroidserver.update.insert_missing_app_names_from_apks(testapps, apks) - self.assertIsNone(testapps[appid].get('Name')) - - repoapps = fdroidserver.update.prepare_apps(apps, apks, 'repo') - fdroidserver.update.insert_missing_app_names_from_apks(repoapps, apks) - self.assertIsNone(repoapps['com.politedroid']['Name']) - self.assertEqual('Polite Droid', - repoapps['com.politedroid']['localized']['en-US']['name']) - self.assertEqual('Duplicate Permisssions', repoapps['duplicate.permisssions']['Name']) - self.assertEqual('Caffeine Tile', repoapps['info.zwanenburg.caffeinetile']['Name']) - self.assertEqual('No minSdkVersion or targetSdkVersion', repoapps['no.min.target.sdk']['Name']) - self.assertIsNone(repoapps['obb.main.oldversion'].get('Name')) - self.assertEqual('OBB Main Old Version', - repoapps['obb.main.oldversion']['localized']['en-US']['name']) - self.assertIsNone(repoapps['obb.main.twoversions'].get('Name')) - self.assertEqual('OBB Main Two Versions', - repoapps['obb.main.twoversions']['localized']['en-US']['name']) - self.assertIsNone(repoapps['souch.smsbypass'].get('Name')) - self.assertEqual('Battery level', - repoapps['souch.smsbypass']['localized']['en-US']['name']) - self.assertIsNone(repoapps['info.guardianproject.urzip'].get('Name')) - self.assertEqual('title', - repoapps['info.guardianproject.urzip']['localized']['en-US']['name']) - self.assertIsNone(repoapps['obb.mainpatch.current'].get('Name')) - - del repoapps['info.guardianproject.urzip']['localized'] - fdroidserver.update.insert_missing_app_names_from_apks(repoapps, apks) - self.assertEqual('urzip-πÇÇπÇÇ现代汉语通用字-български-عربي1234', - repoapps['info.guardianproject.urzip']['localized']['en-US']['name']) - - def test_insert_triple_t_metadata(self): - importer = basedir / 'tmp/importer' - packageName = 'org.fdroid.ci.test.app' - if not os.path.isdir(importer): - logging.warning('skipping test_insert_triple_t_metadata, test_import.py must run first!') - return - packageDir = os.path.join(self.testdir, 'build', packageName) - shutil.copytree(importer, packageDir) - - # always use the same commit so these tests work when ci-test-app.git is updated - repo = git.Repo(packageDir) - for remote in repo.remotes: - remote.fetch() - repo.git.reset('--hard', 'b9e5d1a0d8d6fc31d4674b2f0514fef10762ed4f') - repo.git.clean('-fdx') - - os.mkdir(os.path.join(self.testdir, 'metadata')) - metadata = dict() - metadata['Description'] = 'This is just a test app' - with open(os.path.join(self.testdir, 'metadata', packageName + '.yml'), 'w') as fp: - yaml.dump(metadata, fp) - - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.update.config = config - os.chdir(self.testdir) - - apps = fdroidserver.metadata.read_metadata() - fdroidserver.update.copy_triple_t_store_metadata(apps) - - # TODO ideally, this would compare the whole dict like in test_metadata.test_read_metadata() - correctlocales = [ - 'ar', 'ast_ES', 'az', 'ca', 'ca_ES', 'cs-CZ', 'cs_CZ', 'da', - 'da-DK', 'de', 'de-DE', 'el', 'en-US', 'es', 'es-ES', 'es_ES', 'et', - 'fi', 'fr', 'fr-FR', 'he_IL', 'hi-IN', 'hi_IN', 'hu', 'id', 'it', - 'it-IT', 'it_IT', 'iw-IL', 'ja', 'ja-JP', 'kn_IN', 'ko', 'ko-KR', - 'ko_KR', 'lt', 'nb', 'nb_NO', 'nl', 'nl-NL', 'no', 'pl', 'pl-PL', - 'pl_PL', 'pt', 'pt-BR', 'pt-PT', 'pt_BR', 'ro', 'ro_RO', 'ru-RU', - 'ru_RU', 'sv-SE', 'sv_SE', 'te', 'tr', 'tr-TR', 'uk', 'uk_UA', 'vi', - 'vi_VN', 'zh-CN', 'zh_CN', 'zh_TW', - ] - locales = sorted(apps['org.fdroid.ci.test.app']['localized']) - self.assertEqual(correctlocales, locales) - - def test_insert_triple_t_1_graphics(self): - packageName = 'de.wivewa.dialer' - shutil.copytree(basedir / 'triple-t-1-graphics', self.testdir, dirs_exist_ok=True) - os.chdir(self.testdir) - - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.update.config = config - - apps = fdroidserver.metadata.read_metadata() - fdroidserver.update.copy_triple_t_store_metadata(apps) - - os.chdir(os.path.join('repo', packageName)) - self.assertTrue(os.path.exists(os.path.join('en-US', 'icon.png'))) - self.assertTrue(os.path.exists(os.path.join('en-US', 'featureGraphic.png'))) - self.assertTrue(os.path.exists(os.path.join('en-US', 'phoneScreenshots', '1.png'))) - - def test_insert_triple_t_2_metadata(self): - packageName = 'org.piwigo.android' - shutil.copytree(basedir / 'triple-t-2', self.testdir, dirs_exist_ok=True) - os.chdir(self.testdir) - - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.update.config = config - - apps = fdroidserver.metadata.read_metadata() - self.assertTrue(packageName in apps) - fdroidserver.update.copy_triple_t_store_metadata(apps) - correctlocales = ['de-DE', 'en-US', 'fr-FR', 'kn-IN'] - app = apps[packageName] - self.assertEqual('android@piwigo.org', app['authorEmail']) - self.assertEqual('https://www.piwigo.org', app['authorWebSite']) - locales = sorted(list(app['localized'].keys())) - self.assertEqual(correctlocales, locales) - kn_IN = app['localized']['kn-IN'] - self.assertTrue('description' in kn_IN) - self.assertTrue('name' in kn_IN) - self.assertTrue('summary' in kn_IN) - en_US = app['localized']['en-US'] - self.assertTrue('whatsNew' in en_US) - - os.chdir(os.path.join('repo', packageName)) - self.assertTrue(os.path.exists(os.path.join('en-US', 'icon.png'))) - self.assertTrue(os.path.exists(os.path.join('en-US', 'featureGraphic.png'))) - self.assertTrue(os.path.exists(os.path.join('en-US', 'phoneScreenshots', '01_Login.jpg'))) - self.assertTrue(os.path.exists(os.path.join('en-US', 'sevenInchScreenshots', '01_Login.png'))) - self.assertFalse(os.path.exists(os.path.join('de-DE', 'icon.png'))) - self.assertFalse(os.path.exists(os.path.join('de-DE', 'featureGraphic.png'))) - self.assertFalse(os.path.exists(os.path.join('de-DE', 'phoneScreenshots', '01_Login.jpg'))) - self.assertFalse(os.path.exists(os.path.join('de-DE', 'sevenInchScreenshots', '01_Login.png'))) - - def test_insert_triple_t_anysoftkeyboard(self): - packages = ('com.anysoftkeyboard.languagepack.dutch', 'com.menny.android.anysoftkeyboard') - names = ('Dutch for AnySoftKeyboard', 'AnySoftKeyboard') - - shutil.copytree(basedir / 'triple-t-anysoftkeyboard', self.testdir, dirs_exist_ok=True) - os.chdir(self.testdir) - - for packageName, name in zip(packages, names): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.update.config = config - fdroidserver.update.options = fdroidserver.common.options - - apps = fdroidserver.metadata.read_metadata() - self.assertTrue(packageName in apps) - fdroidserver.update.copy_triple_t_store_metadata(apps) - app = apps[packageName] - self.assertEqual(app['localized']['en-US']['name'], name) - - def test_insert_triple_t_multiple_metadata(self): - namespace = 'ch.admin.bag.covidcertificate.' - packages = ('verifier', 'wallet') - names = dict(verifier='COVID Certificate Check', wallet='COVID Certificate') - - shutil.copytree(basedir / 'triple-t-multiple', self.testdir, dirs_exist_ok=True) - os.chdir(self.testdir) - - for p in packages: - packageName = namespace + p - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.update.config = config - fdroidserver.update.options = fdroidserver.common.options - - apps = fdroidserver.metadata.read_metadata() - self.assertTrue(packageName in apps) - fdroidserver.update.copy_triple_t_store_metadata(apps) - app = apps[packageName] - self.assertEqual(app['localized']['en-US']['name'], names[p]) - - def test_insert_triple_t_flutter(self): - packageName = 'fr.emersion.goguma' - - shutil.copytree(basedir / 'triple-t-flutter', self.testdir, dirs_exist_ok=True) - os.chdir(self.testdir) - - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.update.config = config - fdroidserver.update.options = fdroidserver.common.options - - apps = fdroidserver.metadata.read_metadata() - self.assertTrue(packageName in apps) - fdroidserver.update.copy_triple_t_store_metadata(apps) - app = apps[packageName] - self.assertEqual(app['authorWebSite'], 'https://emersion.fr') - self.assertEqual(app['localized']['en-US']['name'], 'Goguma') - self.assertEqual(app['localized']['en-US']['summary'], 'An IRC client for mobile devices') - - def testBadGetsig(self): - """getsig() should still be able to fetch the fingerprint of bad signatures""" - # config needed to use jarsigner and keytool - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.update.config = config - - apkfile = 'urzip-badsig.apk' - sig = fdroidserver.update.getsig(apkfile) - self.assertEqual(sig, 'e0ecb5fc2d63088e4a07ae410a127722', - "python sig should be: " + str(sig)) - - apkfile = 'urzip-badcert.apk' - sig = fdroidserver.update.getsig(apkfile) - self.assertEqual(sig, 'e0ecb5fc2d63088e4a07ae410a127722', - "python sig should be: " + str(sig)) - - def test_getsig(self): - # config needed to use jarsigner and keytool - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.update.config = config - - sig = fdroidserver.update.getsig('urzip-release-unsigned.apk') - self.assertIsNone(sig) - - good_fingerprint = 'b4964fd759edaa54e65bb476d0276880' - - apkpath = 'urzip-release.apk' # v1 only - sig = fdroidserver.update.getsig(apkpath) - self.assertEqual(good_fingerprint, sig, - 'python sig was: ' + str(sig)) - - apkpath = 'repo/v1.v2.sig_1020.apk' - sig = fdroidserver.update.getsig(apkpath) - self.assertEqual(good_fingerprint, sig, - 'python sig was: ' + str(sig)) - # check that v1 and v2 have the same certificate - apkobject = APK(apkpath) - cert_encoded = apkobject.get_certificates_der_v2()[0] - self.assertEqual(good_fingerprint, sig, - hashlib.md5(hexlify(cert_encoded)).hexdigest()) # nosec just used as ID for signing key - - filename = 'v2.only.sig_2.apk' - with zipfile.ZipFile(filename) as z: - self.assertTrue('META-INF/MANIFEST.MF' in z.namelist(), 'META-INF/MANIFEST.MF required') - for f in z.namelist(): - # ensure there are no v1 signature files - self.assertIsNone(fdroidserver.common.SIGNATURE_BLOCK_FILE_REGEX.match(f)) - sig = fdroidserver.update.getsig(filename) - self.assertEqual(good_fingerprint, sig, - "python sig was: " + str(sig)) - - def testScanApksAndObbs(self): - os.chdir(self.testdir) - shutil.copytree(basedir / 'repo', 'repo') - shutil.copytree(basedir / 'metadata', 'metadata') - config = dict() - fdroidserver.common.fill_config_defaults(config) - config['ndk_paths'] = dict() - fdroidserver.common.config = config - fdroidserver.update.config = config - - fdroidserver.common.options = Options - fdroidserver.update.options = fdroidserver.common.options - fdroidserver.update.options.clean = True - fdroidserver.update.options.delete_unknown = True - - apps = fdroidserver.metadata.read_metadata() - knownapks = fdroidserver.common.KnownApks() - apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks, False) - self.assertEqual(len(apks), 18) - apk = apks[1] - self.assertEqual(apk['packageName'], 'com.politedroid') - self.assertEqual(apk['versionCode'], 3) - self.assertEqual(apk['minSdkVersion'], 3) - self.assertIsNone(apk.get('targetSdkVersion')) - self.assertFalse('maxSdkVersion' in apk) - apk = apks[8] - self.assertEqual(apk['packageName'], 'obb.main.oldversion') - self.assertEqual(apk['versionCode'], 1444412523) - self.assertEqual(apk['minSdkVersion'], 4) - self.assertEqual(apk['targetSdkVersion'], 18) - self.assertFalse('maxSdkVersion' in apk) - - fdroidserver.update.insert_obbs('repo', apps, apks) - for apk in apks: - if apk['packageName'] == 'obb.mainpatch.current': - self.assertEqual(apk.get('obbMainFile'), 'main.1619.obb.mainpatch.current.obb') - self.assertEqual(apk.get('obbPatchFile'), 'patch.1619.obb.mainpatch.current.obb') - elif apk['packageName'] == 'obb.main.oldversion': - self.assertEqual(apk.get('obbMainFile'), 'main.1434483388.obb.main.oldversion.obb') - self.assertIsNone(apk.get('obbPatchFile')) - elif apk['packageName'] == 'obb.main.twoversions': - self.assertIsNone(apk.get('obbPatchFile')) - if apk['versionCode'] == 1101613: - self.assertEqual(apk.get('obbMainFile'), 'main.1101613.obb.main.twoversions.obb') - elif apk['versionCode'] == 1101615: - self.assertEqual(apk.get('obbMainFile'), 'main.1101615.obb.main.twoversions.obb') - elif apk['versionCode'] == 1101617: - self.assertEqual(apk.get('obbMainFile'), 'main.1101615.obb.main.twoversions.obb') - else: - self.assertTrue(False) - elif apk['packageName'] == 'info.guardianproject.urzip': - self.assertIsNone(apk.get('obbMainFile')) - self.assertIsNone(apk.get('obbPatchFile')) - - def test_apkcache_json(self): - """test the migration from pickle to json""" - os.chdir(self.testdir) - shutil.copytree(basedir / 'repo', 'repo') - config = dict() - fdroidserver.common.fill_config_defaults(config) - config['ndk_paths'] = dict() - fdroidserver.common.config = config - fdroidserver.update.config = config - - fdroidserver.common.options = Options - fdroidserver.update.options = fdroidserver.common.options - fdroidserver.update.options.clean = True - fdroidserver.update.options.delete_unknown = True - - fdroidserver.metadata.read_metadata() - knownapks = fdroidserver.common.KnownApks() - apkcache = fdroidserver.update.get_cache() - self.assertEqual(2, len(apkcache)) - self.assertEqual(fdroidserver.update.METADATA_VERSION, apkcache["METADATA_VERSION"]) - self.assertEqual(fdroidserver.update.options.allow_disabled_algorithms, - apkcache['allow_disabled_algorithms']) - apks, cachechanged = fdroidserver.update.process_apks(apkcache, 'repo', knownapks, False) - fdroidserver.update.write_cache(apkcache) - - fdroidserver.update.options.clean = False - read_from_json = fdroidserver.update.get_cache() - self.assertEqual(20, len(read_from_json)) - for f in glob.glob('repo/*.apk'): - self.assertTrue(os.path.basename(f) in read_from_json) - - fdroidserver.update.options.clean = True - reset = fdroidserver.update.get_cache() - self.assertEqual(2, len(reset)) - - def test_scan_repo_files(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.update.config = config - - os.chdir(self.testdir) - os.mkdir('repo') - filename = 'Norway_bouvet_europe_2.obf.zip' - shutil.copy(basedir / filename, 'repo') - knownapks = fdroidserver.common.KnownApks() - files, fcachechanged = fdroidserver.update.scan_repo_files(dict(), 'repo', knownapks, False) - self.assertTrue(fcachechanged) - - info = files[0] - self.assertEqual(filename, info['apkName']) - self.assertEqual(datetime, type(info['added'])) - self.assertEqual(os.path.getsize(os.path.join('repo', filename)), info['size']) - self.assertEqual( - '531190bdbc07e77d5577249949106f32dac7f62d38d66d66c3ae058be53a729d', - info['hash'], - ) - - def test_read_added_date_from_all_apks(self): - os.chdir(self.testdir) - shutil.copytree(basedir / 'repo', 'repo') - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.update.config = config - fdroidserver.common.options = Options - apps = fdroidserver.metadata.read_metadata() - knownapks = fdroidserver.common.KnownApks() - apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks) - fdroidserver.update.read_added_date_from_all_apks(apps, apks) - - def test_apply_info_from_latest_apk(self): - os.chdir(self.testdir) - shutil.copytree(basedir / 'repo', 'repo') - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.update.config = config - fdroidserver.common.options = Options - fdroidserver.update.options = fdroidserver.common.options - apps = fdroidserver.metadata.read_metadata() - knownapks = fdroidserver.common.KnownApks() - apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks) - fdroidserver.update.apply_info_from_latest_apk(apps, apks) - - def test_scan_apk(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.update.config = config - os.chdir(basedir) - - if 'apksigner' in config: - apk_info = fdroidserver.update.scan_apk('v2.only.sig_2.apk') - self.assertIsNone(apk_info.get('maxSdkVersion')) - self.assertEqual(apk_info.get('versionName'), 'v2-only') - self.assertEqual(apk_info.get('versionCode'), 2) - else: - print('WARNING: skipping v2-only test since apksigner cannot be found') - apk_info = fdroidserver.update.scan_apk('repo/v1.v2.sig_1020.apk') - self.assertIsNone(apk_info.get('maxSdkVersion')) - self.assertEqual(apk_info.get('versionName'), 'v1+2') - self.assertEqual(apk_info.get('versionCode'), 1020) - - apk_info = fdroidserver.update.scan_apk('repo/souch.smsbypass_9.apk') - self.assertIsNone(apk_info.get('maxSdkVersion')) - self.assertEqual(apk_info.get('versionName'), '0.9') - - apk_info = fdroidserver.update.scan_apk('repo/duplicate.permisssions_9999999.apk') - self.assertEqual(apk_info.get('versionName'), '') - self.assertEqual(apk_info['icons_src'], {'160': 'res/drawable/ic_launcher.png', - '-1': 'res/drawable/ic_launcher.png'}) - - apk_info = fdroidserver.update.scan_apk('org.dyndns.fules.ck_20.apk') - self.assertEqual(apk_info['icons_src'], {'240': 'res/drawable-hdpi-v4/icon_launcher.png', - '120': 'res/drawable-ldpi-v4/icon_launcher.png', - '160': 'res/drawable-mdpi-v4/icon_launcher.png', - '-1': 'res/drawable-mdpi-v4/icon_launcher.png'}) - self.assertEqual(apk_info['icons'], {}) - self.assertEqual(apk_info['features'], []) - self.assertEqual(apk_info['antiFeatures'], dict()) - self.assertEqual(apk_info['versionName'], 'v1.6pre2') - self.assertEqual(apk_info['hash'], - '897486e1f857c6c0ee32ccbad0e1b8cd82f6d0e65a44a23f13f852d2b63a18c8') - self.assertEqual(apk_info['packageName'], 'org.dyndns.fules.ck') - self.assertEqual(apk_info['versionCode'], 20) - self.assertEqual(apk_info['size'], 132453) - self.assertEqual(apk_info['nativecode'], - ['arm64-v8a', 'armeabi', 'armeabi-v7a', 'mips', 'mips64', 'x86', 'x86_64']) - self.assertEqual(apk_info['minSdkVersion'], 7) - self.assertEqual(apk_info['sig'], '9bf7a6a67f95688daec75eab4b1436ac') - self.assertEqual(apk_info['hashType'], 'sha256') - self.assertEqual(apk_info['targetSdkVersion'], 8) - - apk_info = fdroidserver.update.scan_apk('org.bitbucket.tickytacky.mirrormirror_4.apk') - self.assertEqual(apk_info.get('versionName'), '1.0.3') - self.assertEqual(apk_info['icons_src'], {'160': 'res/drawable-mdpi/mirror.png', - '-1': 'res/drawable-mdpi/mirror.png'}) - - apk_info = fdroidserver.update.scan_apk('repo/info.zwanenburg.caffeinetile_4.apk') - self.assertEqual(apk_info.get('versionName'), '1.3') - self.assertEqual(apk_info['icons_src'], {}) - - apk_info = fdroidserver.update.scan_apk('repo/com.politedroid_6.apk') - self.assertEqual(apk_info.get('versionName'), '1.5') - self.assertEqual(apk_info['icons_src'], {'120': 'res/drawable-ldpi-v4/icon.png', - '160': 'res/drawable-mdpi-v4/icon.png', - '240': 'res/drawable-hdpi-v4/icon.png', - '320': 'res/drawable-xhdpi-v4/icon.png', - '-1': 'res/drawable-mdpi-v4/icon.png'}) - - apk_info = fdroidserver.update.scan_apk('SpeedoMeterApp.main_1.apk') - self.assertEqual(apk_info.get('versionName'), '1.0') - self.assertEqual(apk_info['icons_src'], {}) - - def test_scan_apk_no_min_target(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.update.config = config - apk_info = fdroidserver.update.scan_apk('repo/no.min.target.sdk_987.apk') - self.maxDiff = None - expected = { - 'icons': {}, - 'icons_src': {'-1': 'res/drawable/ic_launcher.png', - '160': 'res/drawable/ic_launcher.png'}, - 'name': 'No minSdkVersion or targetSdkVersion', - 'signer': '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', - 'hashType': 'sha256', - 'packageName': 'no.min.target.sdk', - 'features': [], - 'antiFeatures': dict(), - 'size': 14102, - 'sig': 'b4964fd759edaa54e65bb476d0276880', - 'versionName': '1.2-fake', - 'uses-permission-sdk-23': [], - 'hash': 'e2e1dc1d550df2b5bc383860139207258645b5540abeccd305ed8b2cb6459d2c', - 'versionCode': 987, - 'minSdkVersion': 3, - 'uses-permission': [ - fdroidserver.update.UsesPermission(name='android.permission.WRITE_EXTERNAL_STORAGE', - maxSdkVersion=None), - fdroidserver.update.UsesPermission(name='android.permission.READ_PHONE_STATE', - maxSdkVersion=None), - fdroidserver.update.UsesPermission(name='android.permission.READ_EXTERNAL_STORAGE', - maxSdkVersion=None), - ], - } - if config.get('ipfs_cid'): - expected['ipfsCIDv1'] = 'bafybeidwxseoagnew3gtlasttqovl7ciuwxaud5a5p4a5pzpbrfcfj2gaa' - - self.assertDictEqual(apk_info, expected) - - def test_scan_apk_no_sig(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.update.config = config - os.chdir(basedir) - if os.path.basename(os.getcwd()) != 'tests': - raise Exception('This test must be run in the "tests/" subdir') - - with self.assertRaises(fdroidserver.exception.BuildException): - fdroidserver.update.scan_apk('urzip-release-unsigned.apk') - - def test_scan_apk_bad_zip(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.update.config = config - os.chdir(self.testdir) - os.mkdir('repo') - apkfile = 'repo/badzip_1.apk' - with open(apkfile, 'w') as fp: - fp.write('this is not a zip file') - with self.assertRaises(fdroidserver.exception.BuildException): - fdroidserver.update.scan_apk(apkfile) - - @unittest.skipUnless( - os.path.exists('tests/SystemWebView-repack.apk'), "file too big for sdist" - ) - def test_scan_apk_bad_icon_id(self): - """Some APKs can produce an exception when extracting the icon - - This kind of parsing exception should be reported then ignored - so that working APKs can be included in the index. There are - so many weird things that make it into APKs, that does not - automatically disqualify them from inclusion. For example: - - ValueError: invalid literal for int() with base 16: '<0x801FF, type 0x07>' - - The test APK was made from: - https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1018#note_690565333 - It was then stripped down by doing: - - * mkdir SystemWebView - * cd SystemWebView/ - * unzip ../SystemWebView.apk - * rm -rf META-INF/ lib assets/icudtl.dat assets/stored-locales/ - * jar cf ../SystemWebView-repack.apk * - """ - # reset the state, perhaps this should be in setUp() - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.update.config = config - with mkdtemp() as tmpdir, TmpCwd(tmpdir): - os.mkdir('repo') - apkfile = 'repo/SystemWebView-repack.apk' - shutil.copy(basedir / os.path.basename(apkfile), apkfile) - fdroidserver.update.scan_apk(apkfile) - - def test_scan_apk_bad_namespace_in_manifest(self): - """Some APKs can produce an exception when parsing the AndroidManifest.xml - - This kind of parsing exception should be reported then ignored - so that working APKs can be included in the index. There are - so many weird things that make it into APKs, that does not - automatically disqualify them from inclusion. - - This APK has elements with messed up namespaces: - - - """ - # reset the state, perhaps this should be in setUp() - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.update.config = config - with mkdtemp() as tmpdir, TmpCwd(tmpdir): - os.mkdir('repo') - apkfile = 'repo/org.sajeg.fallingblocks_3.apk' - shutil.copy(basedir / os.path.basename(apkfile), apkfile) - fdroidserver.update.scan_apk(apkfile) - - def test_process_apk(self): - def _build_yaml_representer(dumper, data): - '''Creates a YAML representation of a Build instance''' - return dumper.represent_dict(data) - - os.chdir(self.testdir) - shutil.copytree(basedir, 'tests') - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.update.config = config - os.chdir("tests") - - config['ndk_paths'] = dict() - fdroidserver.common.config = config - fdroidserver.update.config = config - - fdroidserver.common.options = Options - fdroidserver.update.options = fdroidserver.common.options - fdroidserver.update.options.clean = True - fdroidserver.update.options.delete_unknown = True - - for icon_dir in fdroidserver.update.get_all_icon_dirs('repo'): - if not os.path.exists(icon_dir): - os.makedirs(icon_dir) - - knownapks = fdroidserver.common.KnownApks() - apkList = ['../urzip.apk', '../org.dyndns.fules.ck_20.apk'] - - for apkName in apkList: - _, apk, cachechanged = fdroidserver.update.process_apk({}, apkName, 'repo', knownapks, - False) - # Don't care about the date added to the repo and relative apkName - self.assertEqual(datetime, type(apk['added'])) - del apk['added'] - del apk['apkName'] - - # ensure that icons have been extracted properly - if apkName == '../urzip.apk': - self.assertEqual(apk['icon'], 'info.guardianproject.urzip.100.png') - if apkName == '../org.dyndns.fules.ck_20.apk': - self.assertEqual(apk['icon'], 'org.dyndns.fules.ck.20.png') - for density in fdroidserver.update.screen_densities: - icon_path = os.path.join( - fdroidserver.update.get_icon_dir('repo', density), apk['icon'] - ) - self.assertTrue(os.path.isfile(icon_path)) - self.assertTrue(os.path.getsize(icon_path) > 1) - - savepath = os.path.join('metadata', 'apk', apk['packageName'] + '.yaml') - # Uncomment to save APK metadata - # with open(savepath, 'w') as f: - # yaml.add_representer(fdroidserver.metadata.Build, _build_yaml_representer) - # yaml.dump(apk, f, default_flow_style=False) - - # CFullLoader doesn't always work - # https://github.com/yaml/pyyaml/issues/266#issuecomment-559116876 - TestLoader = FullLoader - try: - testyaml = '- !!python/object/new:fdroidserver.update.UsesPermission\n - test\n - null' - from_yaml = yaml.load(testyaml, Loader=TestLoader) # nosec B506 - except yaml.constructor.ConstructorError: - from yaml import UnsafeLoader as TestLoader - - with open(savepath, 'r') as f: - from_yaml = yaml.load(f, Loader=TestLoader) # nosec B506 - self.maxDiff = None - if not config.get('ipfs_cid'): - del from_yaml['ipfsCIDv1'] # handle when ipfs_cid is not installed - self.assertEqual(apk, from_yaml) - - def test_process_apk_signed_by_disabled_algorithms(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.update.config = config - - config['ndk_paths'] = dict() - fdroidserver.common.config = config - fdroidserver.update.config = config - - fdroidserver.common.options = Options - fdroidserver.update.options = fdroidserver.common.options - fdroidserver.update.options.clean = True - fdroidserver.update.options.verbose = True - fdroidserver.update.options.delete_unknown = True - - knownapks = fdroidserver.common.KnownApks() - - with mkdtemp() as tmptestsdir, TmpCwd(tmptestsdir): - os.mkdir('repo') - os.mkdir('archive') - # setup the repo, create icons dirs, etc. - fdroidserver.update.process_apks({}, 'repo', knownapks) - fdroidserver.update.process_apks({}, 'archive', knownapks) - - disabledsigs = ['org.bitbucket.tickytacky.mirrormirror_2.apk'] - for apkName in disabledsigs: - shutil.copy(basedir / apkName, - os.path.join(tmptestsdir, 'repo')) - - skip, apk, cachechanged = fdroidserver.update.process_apk({}, apkName, 'repo', - knownapks, - allow_disabled_algorithms=True, - archive_bad_sig=False) - self.assertFalse(skip) - self.assertIsNotNone(apk) - self.assertTrue(cachechanged) - self.assertFalse(os.path.exists(os.path.join('archive', apkName))) - self.assertTrue(os.path.exists(os.path.join('repo', apkName))) - - if os.path.exists('/usr/bin/apksigner') or 'apksigner' in config: - print('SKIPPING: apksigner installed and it allows MD5 signatures') - return - - javac = config['jarsigner'].replace('jarsigner', 'javac') - v = subprocess.check_output([javac, '-version'], stderr=subprocess.STDOUT)[6:-1].decode('utf-8') - if LooseVersion(v) < LooseVersion('1.8.0_132'): - print('SKIPPING: running tests with old Java (' + v + ')') - return - - # this test only works on systems with fully updated Java/jarsigner - # that has MD5 listed in jdk.jar.disabledAlgorithms in java.security - # https://blogs.oracle.com/java-platform-group/oracle-jre-will-no-longer-trust-md5-signed-code-by-default - skip, apk, cachechanged = fdroidserver.update.process_apk({}, apkName, 'repo', - knownapks, - allow_disabled_algorithms=False, - archive_bad_sig=True) - self.assertTrue(skip) - self.assertIsNone(apk) - self.assertFalse(cachechanged) - self.assertTrue(os.path.exists(os.path.join('archive', apkName))) - self.assertFalse(os.path.exists(os.path.join('repo', apkName))) - - skip, apk, cachechanged = fdroidserver.update.process_apk({}, apkName, 'archive', - knownapks, - allow_disabled_algorithms=False, - archive_bad_sig=False) - self.assertFalse(skip) - self.assertIsNotNone(apk) - self.assertTrue(cachechanged) - self.assertTrue(os.path.exists(os.path.join('archive', apkName))) - self.assertFalse(os.path.exists(os.path.join('repo', apkName))) - - # ensure that icons have been moved to the archive as well - for density in fdroidserver.update.screen_densities: - icon_path = os.path.join(fdroidserver.update.get_icon_dir('archive', density), - apk['icon']) - self.assertTrue(os.path.isfile(icon_path)) - self.assertTrue(os.path.getsize(icon_path) > 1) - - badsigs = ['urzip-badcert.apk', 'urzip-badsig.apk', 'urzip-release-unsigned.apk', ] - for apkName in badsigs: - shutil.copy(basedir / apkName, - os.path.join(self.testdir, 'repo')) - - skip, apk, cachechanged = fdroidserver.update.process_apk({}, apkName, 'repo', - knownapks, - allow_disabled_algorithms=False, - archive_bad_sig=False) - self.assertTrue(skip) - self.assertIsNone(apk) - self.assertFalse(cachechanged) - - def test_process_invalid_apk(self): - os.chdir(basedir) - if os.path.basename(os.getcwd()) != 'tests': - raise Exception('This test must be run in the "tests/" subdir') - - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.update.config = config - fdroidserver.common.options = Options - fdroidserver.update.options = fdroidserver.common.options - fdroidserver.update.options.delete_unknown = False - - knownapks = fdroidserver.common.KnownApks() - apk = 'fake.ota.update_1234.zip' # this is not an APK, scanning should fail - (skip, apk, cachechanged) = fdroidserver.update.process_apk({}, apk, 'repo', knownapks, - False) - - self.assertTrue(skip) - self.assertIsNone(apk) - self.assertFalse(cachechanged) - - def test_get_apks_without_allowed_signatures(self): - """Test when no AllowedAPKSigningKeys is specified""" - os.chdir(self.testdir) - shutil.copytree(basedir / 'repo', 'repo') - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.common.options = Options - fdroidserver.update.options = fdroidserver.common.options - - app = fdroidserver.metadata.App() - knownapks = fdroidserver.common.KnownApks() - apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks) - apkfile = 'v1.v2.sig_1020.apk' - self.assertIn( - apkfile, - os.listdir('repo'), - f'{apkfile} was archived or otherwise removed from "repo"', - ) - (skip, apk, cachechanged) = fdroidserver.update.process_apk( - {}, apkfile, 'repo', knownapks, False - ) - - r = fdroidserver.update.get_apks_without_allowed_signatures(app, apk) - self.assertIsNone(r) - - def test_get_apks_without_allowed_signatures_allowed(self): - """Test when the APK matches the specified AllowedAPKSigningKeys""" - os.chdir(self.testdir) - shutil.copytree(basedir / 'repo', 'repo') - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.common.options = Options - fdroidserver.update.options = fdroidserver.common.options - - app = fdroidserver.metadata.App( - { - 'AllowedAPKSigningKeys': '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6' - } - ) - knownapks = fdroidserver.common.KnownApks() - apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks) - apkfile = 'v1.v2.sig_1020.apk' - (skip, apk, cachechanged) = fdroidserver.update.process_apk( - {}, apkfile, 'repo', knownapks, False - ) - - r = fdroidserver.update.get_apks_without_allowed_signatures(app, apk) - self.assertIsNone(r) - - def test_get_apks_without_allowed_signatures_blocked(self): - """Test when the APK does not match any specified AllowedAPKSigningKeys""" - os.chdir(self.testdir) - shutil.copytree(basedir / 'repo', 'repo') - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.common.options = Options - fdroidserver.update.options = fdroidserver.common.options - - app = fdroidserver.metadata.App( - { - 'AllowedAPKSigningKeys': 'fa4edeadfa4edeadfa4edeadfa4edeadfa4edeadfa4edeadfa4edeadfa4edead' - } - ) - knownapks = fdroidserver.common.KnownApks() - apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks) - apkfile = 'v1.v2.sig_1020.apk' - (skip, apk, cachechanged) = fdroidserver.update.process_apk( - {}, apkfile, 'repo', knownapks, False - ) - - r = fdroidserver.update.get_apks_without_allowed_signatures(app, apk) - self.assertEqual(apkfile, r) - - def test_update_with_AllowedAPKSigningKeys(self): - """Test that APKs without allowed signatures get deleted.""" - os.chdir(self.testdir) - os.mkdir('repo') - testapk = os.path.join('repo', 'com.politedroid_6.apk') - shutil.copy(basedir / testapk, testapk) - os.mkdir('metadata') - metadatafile = os.path.join('metadata', 'com.politedroid.yml') - - # Copy and manipulate metadata file - shutil.copy(basedir / metadatafile, metadatafile) - with open(metadatafile, 'a') as fp: - fp.write( - '\n\nAllowedAPKSigningKeys: 32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6\n' - ) - - # Set up options - fdroidserver.common.options = Options - config = fdroidserver.common.read_config() - if 'apksigner' not in config: # TODO remove me for buildserver-bullseye - self.skipTest('SKIPPING test_update_with_AllowedAPKSigningKeys, apksigner not installed!') - config['repo_keyalias'] = 'sova' - config['keystorepass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' - config['keypass'] = 'r9aquRHYoI8+dYz6jKrLntQ5/NJNASFBacJh7Jv2BlI=' - config['keystore'] = os.path.join(basedir, 'keystore.jks') - - self.assertTrue(os.path.exists(testapk)) - - # Test for non-deletion - with mock.patch('sys.argv', ['fdroid update', '--delete-unknown']): - fdroidserver.update.main() - self.assertTrue(os.path.exists(testapk)) - - # Copy and manipulate metadata file again - shutil.copy(basedir / metadatafile, metadatafile) - with open(metadatafile, 'a') as fp: - fp.write( - '\n\nAllowedAPKSigningKeys: fa4edeadfa4edeadfa4edeadfa4edeadfa4edeadfa4edeadfa4edeadfa4edead\n' - ) - - # Test for deletion - with mock.patch('sys.argv', ['fdroid update', '--delete-unknown']): - fdroidserver.update.main() - self.assertFalse(os.path.exists(testapk)) - - def test_translate_per_build_anti_features(self): - os.chdir(self.testdir) - shutil.copytree(basedir / 'repo', 'repo') - shutil.copytree(basedir / 'metadata', 'metadata') - config = dict() - fdroidserver.common.fill_config_defaults(config) - config['ndk_paths'] = dict() - fdroidserver.common.config = config - fdroidserver.update.config = config - - fdroidserver.common.options = Options - fdroidserver.update.options = fdroidserver.common.options - fdroidserver.update.options.clean = True - fdroidserver.update.options.delete_unknown = True - - apps = fdroidserver.metadata.read_metadata() - knownapks = fdroidserver.common.KnownApks() - apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks, False) - fdroidserver.update.translate_per_build_anti_features(apps, apks) - self.assertEqual(len(apks), 18) - foundtest = False - for apk in apks: - if apk['packageName'] == 'com.politedroid' and apk['versionCode'] == 3: - antiFeatures = apk.get('antiFeatures') - self.assertTrue('KnownVuln' in antiFeatures) - self.assertEqual(2, len(antiFeatures)) - foundtest = True - self.assertTrue(foundtest) - - def test_create_metadata_from_template(self): - os.chdir(self.testdir) - os.mkdir('repo') - os.mkdir('metadata') - shutil.copy(basedir / 'urzip.apk', 'repo') - - config = dict() - fdroidserver.common.fill_config_defaults(config) - config['ndk_paths'] = dict() - fdroidserver.common.config = config - fdroidserver.update.config = config - - fdroidserver.common.options = Options - fdroidserver.update.options = fdroidserver.common.options - fdroidserver.update.options.clean = True - - knownapks = fdroidserver.common.KnownApks() - apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks, False) - self.assertEqual(1, len(apks)) - apk = apks[0] - - testfile = 'metadata/info.guardianproject.urzip.yml' - # create empty 0 byte .yml file, run read_metadata, it should work - open(testfile, 'a').close() - apps = fdroidserver.metadata.read_metadata() - self.assertEqual(1, len(apps)) - os.remove(testfile) - - # test using internal template - apps = fdroidserver.metadata.read_metadata() - self.assertEqual(0, len(apps)) - fdroidserver.update.create_metadata_from_template(apk) - self.assertTrue(os.path.exists(testfile)) - apps = fdroidserver.metadata.read_metadata() - self.assertEqual(1, len(apps)) - for app in apps.values(): - self.assertEqual('urzip', app['Name']) - self.assertEqual(1, len(app['Categories'])) - break - - # test using external template.yml - os.remove(testfile) - self.assertFalse(os.path.exists(testfile)) - shutil.copy(basedir.with_name('examples') / 'template.yml', self.testdir) - fdroidserver.update.create_metadata_from_template(apk) - self.assertTrue(os.path.exists(testfile)) - apps = fdroidserver.metadata.read_metadata() - self.assertEqual(1, len(apps)) - for app in apps.values(): - self.assertEqual('urzip', app['Name']) - self.assertEqual(1, len(app['Categories'])) - self.assertEqual('Internet', app['Categories'][0]) - break - with open(testfile) as fp: - data = yaml.load(fp, Loader=SafeLoader) - self.assertEqual('urzip', data['Name']) - self.assertEqual('urzip', data['Summary']) - - def test_has_known_vulnerability(self): - good = [ - 'org.bitbucket.tickytacky.mirrormirror_1.apk', - 'org.bitbucket.tickytacky.mirrormirror_2.apk', - 'org.bitbucket.tickytacky.mirrormirror_3.apk', - 'org.bitbucket.tickytacky.mirrormirror_4.apk', - 'org.dyndns.fules.ck_20.apk', - 'urzip.apk', - 'urzip-badcert.apk', - 'urzip-badsig.apk', - 'urzip-release.apk', - 'urzip-release-unsigned.apk', - 'repo/com.politedroid_3.apk', - 'repo/com.politedroid_4.apk', - 'repo/com.politedroid_5.apk', - 'repo/com.politedroid_6.apk', - 'repo/obb.main.oldversion_1444412523.apk', - 'repo/obb.mainpatch.current_1619_another-release-key.apk', - 'repo/obb.mainpatch.current_1619.apk', - 'repo/obb.main.twoversions_1101613.apk', - 'repo/obb.main.twoversions_1101615.apk', - 'repo/obb.main.twoversions_1101617.apk', - 'repo/urzip-; Рахма́, [rɐxˈmanʲɪnəf] سيرجي_رخمانينوف 谢·.apk', - ] - for f in good: - self.assertFalse(fdroidserver.update.has_known_vulnerability(f)) - with self.assertRaises(fdroidserver.exception.FDroidException): - fdroidserver.update.has_known_vulnerability('janus.apk') - - def test_get_apk_icon_when_src_is_none(self): - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.common.config = config - fdroidserver.update.config = config - - # pylint: disable=protected-access - icons_src = fdroidserver.update._get_apk_icons_src('urzip-release.apk', None) - self.assertFalse(icons_src) - - def test_strip_and_copy_image(self): - in_file = basedir / 'metadata/info.guardianproject.urzip/en-US/images/icon.png' - out_file = os.path.join(self.testdir, 'icon.png') - with self.assertLogs(level=logging.DEBUG): - fdroidserver.update._strip_and_copy_image(in_file, out_file) - self.assertTrue(os.path.exists(out_file)) - - def test_strip_and_copy_image_bad_filename(self): - in_file = basedir / 'corrupt-featureGraphic.png' - out_file = os.path.join(self.testdir, 'corrupt-featureGraphic.png') - with self.assertLogs(level=logging.DEBUG): - fdroidserver.update._strip_and_copy_image(in_file, out_file) - self.assertFalse(os.path.exists(out_file)) - - def test_strip_and_copy_image_unchanged(self): - in_file = basedir / 'metadata/info.guardianproject.urzip/en-US/images/icon.png' - out_file = os.path.join(self.testdir, 'icon.png') - shutil.copy2(in_file, out_file) - ctime = os.path.getctime(out_file) - delta = 0.01 - time.sleep(delta) # ensure reliable failure if file isn't preserved - with self.assertLogs(level=logging.DEBUG): # suppress log output - fdroidserver.update._strip_and_copy_image(in_file, out_file) - self.assertAlmostEqual(ctime, os.path.getctime(out_file), delta=delta) - - def test_strip_and_copy_image_in_file_ctime_changed(self): - out_file = os.path.join(self.testdir, 'icon.png') - with open(out_file, 'w') as fp: - fp.write('to be replaced') - size = os.path.getsize(out_file) - delta = 0.01 - time.sleep(delta) # ensure reliable failure when testing ctime - src_file = basedir / 'metadata/info.guardianproject.urzip/en-US/images/icon.png' - in_file = os.path.join(self.testdir, 'in-icon.png') - shutil.copy(src_file, in_file) - time.sleep(delta) # ensure reliable failure when testing ctime - with self.assertLogs(level=logging.DEBUG): # suppress log output - fdroidserver.update._strip_and_copy_image(in_file, out_file) - self.assertNotEqual(size, os.path.getsize(out_file)) - self.assertTrue(os.path.getctime(in_file) <= os.path.getctime(out_file)) - # _strip_and_copy_image syncs mtime from in_file to out_file - self.assertAlmostEqual( - os.path.getmtime(in_file), os.path.getmtime(out_file), delta=delta - ) - - def test_strip_and_copy_image_in_file_mtime_changed(self): - in_file = basedir / 'metadata/info.guardianproject.urzip/en-US/images/icon.png' - out_file = os.path.join(self.testdir, 'icon.png') - shutil.copy(in_file, out_file) - os.utime(out_file, (12345, 12345)) # set atime/mtime to something old - with self.assertLogs(level=logging.DEBUG): # suppress log output - fdroidserver.update._strip_and_copy_image(in_file, out_file) - delta = 0.01 - self.assertNotAlmostEqual( - os.path.getctime(in_file), os.path.getctime(out_file), delta=delta - ) - # _strip_and_copy_image syncs mtime from in_file to out_file - self.assertAlmostEqual( - os.path.getmtime(in_file), os.path.getmtime(out_file), delta=delta - ) - - def test_create_metadata_from_template_empty_keys(self): - apk = {'packageName': 'rocks.janicerand'} - with mkdtemp() as tmpdir, TmpCwd(tmpdir): - os.mkdir('metadata') - with open('template.yml', 'w') as f: - f.write( - textwrap.dedent( - '''\ - Disabled: - License: - AuthorName: - AuthorEmail: - AuthorWebSite: - WebSite: - SourceCode: - IssueTracker: - Translation: - Changelog: - Donate: - Bitcoin: - Litecoin: - Name: - AutoName: - Summary: - RequiresRoot: - RepoType: - Repo: - Binaries: - Builds: - ArchivePolicy: - AutoUpdateMode: - UpdateCheckMode: - UpdateCheckIgnore: - VercodeOperation: - UpdateCheckName: - UpdateCheckData: - CurrentVersion: - CurrentVersionCode: - NoSourceSince: - ''' - ) - ) - fdroidserver.update.create_metadata_from_template(apk) - with open(os.path.join('metadata', 'rocks.janicerand.yml')) as f: - metadata_content = yaml.load(f, Loader=SafeLoader) - self.maxDiff = None - self.assertDictEqual( - metadata_content, - { - 'ArchivePolicy': None, - 'AuthorEmail': '', - 'AuthorName': '', - 'AuthorWebSite': '', - 'AutoName': 'rocks.janicerand', - 'AutoUpdateMode': '', - 'Binaries': '', - 'Bitcoin': '', - 'Builds': None, - 'Changelog': '', - 'CurrentVersion': '', - 'CurrentVersionCode': None, - 'Disabled': '', - 'Donate': '', - 'IssueTracker': '', - 'License': '', - 'Litecoin': '', - 'Name': 'rocks.janicerand', - 'NoSourceSince': '', - 'Repo': '', - 'RepoType': '', - 'RequiresRoot': None, - 'SourceCode': '', - 'Summary': 'rocks.janicerand', - 'Translation': '', - 'UpdateCheckData': '', - 'UpdateCheckIgnore': '', - 'UpdateCheckMode': '', - 'UpdateCheckName': '', - 'VercodeOperation': None, - 'WebSite': '', - }, - ) - - def test_insert_funding_yml_donation_links(self): - os.chdir(self.testdir) - os.mkdir('build') - content = textwrap.dedent( - """ - community_bridge: '' - custom: [LINK1, LINK2] - github: USERNAME - issuehunt: USERNAME - ko_fi: USERNAME - liberapay: USERNAME - open_collective: USERNAME - otechie: USERNAME - patreon: USERNAME - """ - ) - app = fdroidserver.metadata.App() - app.id = 'fake.app.id' - apps = {app.id: app} - os.mkdir(os.path.join('build', app.id)) - fdroidserver.update.insert_funding_yml_donation_links(apps) - for field in DONATION_FIELDS: - self.assertFalse(app.get(field)) - with open(os.path.join('build', app.id, 'FUNDING.yml'), 'w') as fp: - fp.write(content) - - fdroidserver.update.insert_funding_yml_donation_links(apps) - for field in DONATION_FIELDS: - self.assertIsNotNone(app.get(field), field) - self.assertEqual('LINK1', app.get('Donate')) - self.assertEqual('USERNAME', app.get('Liberapay')) - self.assertEqual('USERNAME', app.get('OpenCollective')) - - app['Donate'] = 'keepme' - app['Liberapay'] = 'keepme' - app['OpenCollective'] = 'keepme' - fdroidserver.update.insert_funding_yml_donation_links(apps) - for field in DONATION_FIELDS: - self.assertEqual('keepme', app.get(field)) - - def test_insert_funding_yml_donation_links_one_at_a_time(self): - """Exercise the FUNDING.yml code one entry at a time""" - os.chdir(self.testdir) - os.mkdir('build') - - app = fdroidserver.metadata.App() - app.id = 'fake.app.id' - apps = {app.id: app} - os.mkdir(os.path.join('build', app.id)) - fdroidserver.update.insert_funding_yml_donation_links(apps) - for field in DONATION_FIELDS: - self.assertIsNone(app.get(field)) - - content = textwrap.dedent( - """ - community_bridge: 'blah-de-blah' - github: USERNAME - issuehunt: USERNAME - ko_fi: USERNAME - liberapay: USERNAME - open_collective: USERNAME - patreon: USERNAME - """ - ) - for line in content.split('\n'): - if not line: - continue - app = fdroidserver.metadata.App() - app.id = 'fake.app.id' - apps = {app.id: app} - with open(os.path.join('build', app.id, 'FUNDING.yml'), 'w') as fp: - fp.write(line) - data = yaml.load(line, Loader=SafeLoader) - fdroidserver.update.insert_funding_yml_donation_links(apps) - if 'liberapay' in data: - self.assertEqual(data['liberapay'], app.get('Liberapay')) - elif 'open_collective' in data: - self.assertEqual(data['open_collective'], app.get('OpenCollective')) - else: - for v in data.values(): - self.assertEqual(app.get('Donate', '').split('/')[-1], v) - - def test_insert_funding_yml_donation_links_with_corrupt_file(self): - os.chdir(self.testdir) - os.mkdir('build') - app = fdroidserver.metadata.App() - app.id = 'fake.app.id' - apps = {app.id: app} - os.mkdir(os.path.join('build', app.id)) - with open(os.path.join('build', app.id, 'FUNDING.yml'), 'w') as fp: - fp.write( - textwrap.dedent( - """ - opencollective: foo - custom: [] - liberapay: : - """ - ) - ) - fdroidserver.update.insert_funding_yml_donation_links(apps) - for field in DONATION_FIELDS: - self.assertIsNone(app.get(field)) - - def test_sanitize_funding_yml(self): - with open(basedir / 'funding-usernames.yaml') as fp: - data = yaml.load(fp, Loader=SafeLoader) - for k, entries in data.items(): - for entry in entries: - if k in 'custom': - m = fdroidserver.update.sanitize_funding_yml_entry(entry) - else: - m = fdroidserver.update.sanitize_funding_yml_name(entry) - if k == 'bad': - self.assertIsNone(m) - else: - self.assertIsNotNone(m) - self.assertIsNone(fdroidserver.update.sanitize_funding_yml_entry('foo\nbar')) - self.assertIsNone(fdroidserver.update.sanitize_funding_yml_entry( - ''.join(chr(random.randint(65, 90)) for _ in range(2049)))) # nosec B311 - - # not recommended but valid entries - self.assertIsNotNone(fdroidserver.update.sanitize_funding_yml_entry(12345)) - self.assertIsNotNone(fdroidserver.update.sanitize_funding_yml_entry(5.0)) - self.assertIsNotNone(fdroidserver.update.sanitize_funding_yml_entry(' WhyIncludeWhitespace ')) - self.assertIsNotNone(fdroidserver.update.sanitize_funding_yml_entry(['first', 'second'])) - - def test_set_localized_text_entry(self): - os.chdir(self.testdir) - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.update.config = config - fdroidserver.update.options = fdroidserver.common.options - - files = { - 'full-description.txt': 'description', - 'short-description.txt': 'summary', - 'title.txt': 'name', - 'video-url.txt': 'video', - } - - for f, key in files.items(): - limit = config['char_limits'][key] - with open(f, 'w') as fp: - fp.write(''.join(random.choice(string.ascii_letters) for i in range(limit + 100))) # nosec B311 - locale = 'ru_US' - app = dict() - fdroidserver.update._set_localized_text_entry(app, locale, key, f) - self.assertEqual(limit, len(app['localized'][locale][key])) - - f = 'badlink-' + f - os.symlink('/path/to/nowhere', f) - app = dict() - fdroidserver.update._set_localized_text_entry(app, locale, key, f) - self.assertIsNone(app['localized'].get(locale, {}).get(key)) - - def test_set_author_entry(self): - os.chdir(self.testdir) - config = dict() - fdroidserver.common.fill_config_defaults(config) - fdroidserver.update.config = config - fdroidserver.update.options = fdroidserver.common.options - - f = 'contact-website.txt' - key = 'author' - url = 'https://f-droid.org/' - limit = config['char_limits']['author'] - with open(f, 'w') as fp: - fp.write(url) - fp.write('\n') - app = dict() - fdroidserver.update._set_author_entry(app, key, f) - self.assertEqual(url, app[key]) - - f = 'limits.txt' - key = 'author' - limit = config['char_limits']['author'] - for key in ('authorEmail', 'authorPhone', 'authorWebSite'): - with open(f, 'w') as fp: - fp.write(''.join(random.choice(string.ascii_letters) for i in range(limit + 100))) # nosec B311 - app = dict() - fdroidserver.update._set_author_entry(app, key, f) - self.assertEqual(limit, len(app[key])) - - f = 'badlink.txt' - os.symlink('/path/to/nowhere', f) - app = dict() - fdroidserver.update._set_author_entry(app, key, f) - self.assertIsNone(app.get(key)) - - def test_status_update_json(self): - fdroidserver.common.config = {} - fdroidserver.update.config = {} - fdroidserver.update.options = Options - with mkdtemp() as tmpdir: - os.chdir(tmpdir) - with mock.patch('sys.argv', ['fdroid update', '']): - fdroidserver.update.status_update_json({}, [], []) - with open('repo/status/update.json') as fp: - data = json.load(fp) - self.assertTrue('apksigner' in data) - - fdroidserver.update.config = { - 'apksigner': 'apksigner', - } - fdroidserver.update.status_update_json({}, [], []) - with open('repo/status/update.json') as fp: - data = json.load(fp) - self.assertEqual(shutil.which(fdroidserver.update.config['apksigner']), data['apksigner']) - - fdroidserver.update.config = {} - fdroidserver.common.fill_config_defaults(fdroidserver.update.config) - fdroidserver.update.status_update_json({}, [], []) - with open('repo/status/update.json') as fp: - data = json.load(fp) - self.assertEqual(fdroidserver.update.config.get('apksigner'), data['apksigner']) - self.assertEqual(fdroidserver.update.config['jarsigner'], data['jarsigner']) - self.assertEqual(fdroidserver.update.config['keytool'], data['keytool']) - - def test_scan_metadata_androguard(self): - - def _create_apkmetadata_object(apkName): - """Create an empty apk metadata object.""" - apk = {} - apk['apkName'] = apkName - apk['uses-permission'] = [] - apk['uses-permission-sdk-23'] = [] - apk['features'] = [] - apk['icons_src'] = {} - return apk - - apkList = [ - ( - 'org.dyndns.fules.ck_20.apk', - { - 'apkName': 'org.dyndns.fules.ck_20.apk', - 'uses-permission': [ - fdroidserver.update.UsesPermission( - name='android.permission.BIND_INPUT_METHOD', - maxSdkVersion=None, - ), - fdroidserver.update.UsesPermission( - name='android.permission.READ_EXTERNAL_STORAGE', - maxSdkVersion=None, - ), - fdroidserver.update.UsesPermission( - name='android.permission.VIBRATE', maxSdkVersion=None - ), - ], - 'uses-permission-sdk-23': [], - 'features': [], - 'icons_src': { - '240': 'res/drawable-hdpi-v4/icon_launcher.png', - '120': 'res/drawable-ldpi-v4/icon_launcher.png', - '160': 'res/drawable-mdpi-v4/icon_launcher.png', - '-1': 'res/drawable-mdpi-v4/icon_launcher.png', - }, - 'packageName': 'org.dyndns.fules.ck', - 'versionCode': 20, - 'versionName': 'v1.6pre2', - 'minSdkVersion': 7, - 'name': 'Compass Keyboard', - 'targetSdkVersion': 8, - 'nativecode': [ - 'arm64-v8a', - 'armeabi', - 'armeabi-v7a', - 'mips', - 'mips64', - 'x86', - 'x86_64', - ], - }, - ) - ] - - for apkfile, apkaapt in apkList: - apkandroguard = _create_apkmetadata_object(apkfile) - fdroidserver.update.scan_apk_androguard(apkandroguard, apkfile) - - self.maxDiff = None - self.assertEqual(apkaapt, apkandroguard) - - def test_exclude_disabled_apks(self): - os.chdir(self.testdir) - os.mkdir('repo') - testapk = os.path.join('repo', 'com.politedroid_6.apk') - testapk_new = os.path.join('repo', 'Politedroid-1.5.apk') - shutil.copy(basedir / testapk, testapk_new) - - config = dict() - fdroidserver.common.fill_config_defaults(config) - config['ndk_paths'] = dict() - fdroidserver.common.config = config - fdroidserver.update.config = config - - fdroidserver.common.options = Options - fdroidserver.update.options = fdroidserver.common.options - fdroidserver.update.options.clean = True - - app = fdroidserver.metadata.App() - app.id = 'com.politedroid' - apps = {app.id: app} - build = fdroidserver.metadata.Build() - build.versionCode = 6 - build.disable = "disabled" - app['Builds'] = [build] - - knownapks = fdroidserver.common.KnownApks() - apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks, False, apps) - self.assertEqual([], apks) - - def test_archive_old_apks_ArchivePolicy_0(self): - app = fdroidserver.metadata.App() - app.id = 'test' - app.ArchivePolicy = 0 - apps = {app.id: app} - with self.assertLogs(level='DEBUG') as cm: - fdroidserver.update.archive_old_apks(apps, [], [], '', '', 3) - self.assertEqual(cm.output, [ - "DEBUG:root:Checking archiving for test - apks:0, keepversions:0, archapks:0" - ]) - - def test_archive_old_apks(self): - app = fdroidserver.metadata.App() - app.id = 'test' - app.VercodeOperation = ['%c+1', '%c+2', '%c+3', '%c+4'] - apps = {app.id: app} - with self.assertLogs(level='DEBUG') as cm: - fdroidserver.update.archive_old_apks(apps, [], [], '', '', 3) - self.assertEqual(cm.output, [ - "DEBUG:root:Checking archiving for test - apks:0, keepversions:12, archapks:0" - ]) - - app = fdroidserver.metadata.App() - app.id = 'org.smssecure.smssecure' - app.CurrentVersionCode = 135 - apps = {app.id: app} - with self.assertLogs(level='DEBUG') as cm: - fdroidserver.update.archive_old_apks(apps, [], [], '', '', 3) - self.assertEqual(cm.output, [ - "DEBUG:root:Checking archiving for org.smssecure.smssecure - apks:0, keepversions:6, archapks:0" - ]) - - def test_categories_txt_is_removed_by_delete_unknown(self): - """categories.txt used to be a part of this system, now its nothing.""" - os.chdir(self.testdir) - fdroidserver.common.write_config_file( - 'repo_pubkey: ffffffffffffffffffffffffffffffffffffffff\n' - ) - - categories_txt = Path('repo/categories.txt') - categories_txt.parent.mkdir() - categories_txt.write_text('placeholder') - - self.assertTrue(categories_txt.exists()) - with mock.patch('sys.argv', ['fdroid update', '--delete-unknown', '--nosign']): - fdroidserver.update.main() - self.assertFalse(categories_txt.exists()) - - def test_no_blank_auto_defined_categories(self): - """When no app has Categories, there should be no definitions in the repo.""" - os.chdir(self.testdir) - os.mkdir('metadata') - os.mkdir('repo') - fdroidserver.common.write_config_file( - 'repo_pubkey: ffffffffffffffffffffffffffffffffffffffff\n' - ) - - testapk = os.path.join('repo', 'com.politedroid_6.apk') - shutil.copy(basedir / testapk, testapk) - Path('metadata/com.politedroid.yml').write_text('Name: Polite') - - with mock.patch('sys.argv', ['fdroid update', '--delete-unknown', '--nosign']): - fdroidserver.update.main() - with open('repo/index-v2.json') as fp: - index = json.load(fp) - self.assertNotIn(CATEGORIES_CONFIG_NAME, index['repo']) - - def test_auto_defined_categories(self): - """Repos that don't define categories in config/ should use auto-generated.""" - os.chdir(self.testdir) - os.mkdir('metadata') - os.mkdir('repo') - fdroidserver.common.write_config_file( - 'repo_pubkey: ffffffffffffffffffffffffffffffffffffffff\n' - ) - - testapk = os.path.join('repo', 'com.politedroid_6.apk') - shutil.copy(basedir / testapk, testapk) - Path('metadata/com.politedroid.yml').write_text('Categories: [Time]') - - with mock.patch('sys.argv', ['fdroid update', '--delete-unknown', '--nosign']): - fdroidserver.update.main() - with open('repo/index-v2.json') as fp: - index = json.load(fp) - self.assertEqual( - {'Time': {'name': {'en-US': 'Time'}}}, - index['repo'][CATEGORIES_CONFIG_NAME], - ) - - def test_categories_with_only_icon_defined(self): - """If cateogories.yml only includes the icon, the name should be added.""" - os.chdir(self.testdir) - os.mkdir('config') - os.mkdir('metadata') - os.mkdir('repo') - fdroidserver.common.write_config_file( - 'repo_pubkey: ffffffffffffffffffffffffffffffffffffffff\n' - ) - testvalue = 'Time' - Path('config/time.png').write_text('placeholder') - Path('config/categories.yml').write_text(testvalue + ': {icon: time.png}') - - testapk = os.path.join('repo', 'com.politedroid_6.apk') - shutil.copy(basedir / testapk, testapk) - Path('metadata/com.politedroid.yml').write_text(f'Categories: [{testvalue}]') - - with mock.patch('sys.argv', ['fdroid update', '--delete-unknown', '--nosign']): - fdroidserver.update.main() - with open('repo/index-v2.json') as fp: - index = json.load(fp) - self.assertEqual( - { - 'icon': { - 'en-US': { - 'name': '/icons/time.png', - 'sha256': '4097889236a2af26c293033feb964c4cf118c0224e0d063fec0a89e9d0569ef2', - 'size': 11, - } - }, - 'name': {'en-US': testvalue}, - }, - index['repo'][CATEGORIES_CONFIG_NAME][testvalue], - ) - - def test_auto_defined_categories_two_apps(self): - """Repos that don't define categories in config/ should use auto-generated.""" - os.chdir(self.testdir) - os.mkdir('metadata') - os.mkdir('repo') - fdroidserver.common.write_config_file( - 'repo_pubkey: ffffffffffffffffffffffffffffffffffffffff\n' - ) - - testapk = os.path.join('repo', 'com.politedroid_6.apk') - shutil.copy(basedir / testapk, testapk) - Path('metadata/com.politedroid.yml').write_text('Categories: [bar]') - testapk = os.path.join('repo', 'souch.smsbypass_9.apk') - shutil.copy(basedir / testapk, testapk) - Path('metadata/souch.smsbypass.yml').write_text('Categories: [foo, bar]') - - with mock.patch('sys.argv', ['fdroid update', '--delete-unknown', '--nosign']): - fdroidserver.update.main() - with open('repo/index-v2.json') as fp: - index = json.load(fp) - self.assertEqual( - {'bar': {'name': {'en-US': 'bar'}}, 'foo': {'name': {'en-US': 'foo'}}}, - index['repo'][CATEGORIES_CONFIG_NAME], - ) - - def test_auto_defined_categories_mix_into_config_categories(self): - """Repos that don't define all categories in config/ also use auto-generated.""" - os.chdir(self.testdir) - os.mkdir('config') - Path('config/categories.yml').write_text('System: {name: System Apps}') - os.mkdir('metadata') - os.mkdir('repo') - fdroidserver.common.write_config_file( - 'repo_pubkey: ffffffffffffffffffffffffffffffffffffffff\n' - ) - - testapk = os.path.join('repo', 'com.politedroid_6.apk') - shutil.copy(basedir / testapk, testapk) - Path('metadata/com.politedroid.yml').write_text('Categories: [Time]') - testapk = os.path.join('repo', 'souch.smsbypass_9.apk') - shutil.copy(basedir / testapk, testapk) - Path('metadata/souch.smsbypass.yml').write_text('Categories: [System, Time]') - - with mock.patch('sys.argv', ['fdroid update', '--delete-unknown', '--nosign']): - fdroidserver.update.main() - with open('repo/index-v2.json') as fp: - index = json.load(fp) - self.assertEqual( - { - 'System': {'name': {'en-US': 'System Apps'}}, - 'Time': {'name': {'en-US': 'Time'}}, - }, - index['repo'][CATEGORIES_CONFIG_NAME], - ) - - def test_empty_categories_not_in_index(self): - """A category with no apps should be ignored, even if defined in config.""" - os.chdir(self.testdir) - os.mkdir('config') - Path('config/categories.yml').write_text('System: {name: S}\nTime: {name: T}\n') - os.mkdir('metadata') - os.mkdir('repo') - fdroidserver.common.write_config_file( - 'repo_pubkey: ffffffffffffffffffffffffffffffffffffffff\n' - ) - - testapk = os.path.join('repo', 'com.politedroid_6.apk') - shutil.copy(basedir / testapk, testapk) - Path('metadata/com.politedroid.yml').write_text('Categories: [Time]') - - with mock.patch('sys.argv', ['fdroid update', '--delete-unknown', '--nosign']): - fdroidserver.update.main() - with open('repo/index-v2.json') as fp: - index = json.load(fp) - self.assertEqual( - {'Time': {'name': {'en-US': 'T'}}}, - index['repo'][CATEGORIES_CONFIG_NAME], - ) - - -class TestParseIpa(unittest.TestCase): - def test_parse_ipa(self): - self.maxDiff = None - try: - import biplist # Fedora does not have a biplist package - - biplist # silence the linters - except ImportError as e: - self.skipTest(str(e)) - ipa_path = os.path.join(basedir, 'com.fake.IpaApp_1000000000001.ipa') - result = fdroidserver.update.parse_ipa(ipa_path, 'fake_size', 'fake_sha') - self.assertDictEqual( - result, - { - 'apkName': 'com.fake.IpaApp_1000000000001.ipa', - 'hash': 'fake_sha', - 'hashType': 'sha256', - 'packageName': 'org.onionshare.OnionShare', - 'size': 'fake_size', - 'versionCode': 1000000000001, - 'versionName': '1.0.1', - 'ipa_DTPlatformVersion': '16.4', - 'ipa_MinimumOSVersion': '15.0', - 'ipa_entitlements': set(), - 'ipa_permissions': { - 'NSCameraUsageDescription': - 'Please allow access to your ' - 'camera, if you want to ' - 'create photos or videos for ' - 'direct sharing.', - 'NSMicrophoneUsageDescription': - 'Please allow access to ' - 'your microphone, if you ' - 'want to create videos ' - 'for direct sharing.', - 'NSPhotoLibraryUsageDescription': - 'Please allow access to ' - 'your photo library, if ' - 'you want to share ' - 'photos.', - }, - 'name': 'OnionShare', - }, - ) - - -class TestUpdateVersionStringToInt(unittest.TestCase): - def test_version_string_to_int(self): - self.assertEqual( - fdroidserver.update.version_string_to_int("1.2.3"), 1000002000003 - ) - self.assertEqual(fdroidserver.update.version_string_to_int("0.0.0003"), 3) - self.assertEqual(fdroidserver.update.version_string_to_int("0.0.0"), 0) - self.assertEqual( - fdroidserver.update.version_string_to_int("4321.321.21"), 4321000321000021 - ) - self.assertEqual( - fdroidserver.update.version_string_to_int("18446744.073709.551615"), - 18446744073709551615, - ) - - def test_version_string_to_int_value_errors(self): - with self.assertRaises(ValueError): - fdroidserver.update.version_string_to_int("1.2.3a") - with self.assertRaises(ValueError): - fdroidserver.update.version_string_to_int("asdfasdf") - with self.assertRaises(ValueError): - fdroidserver.update.version_string_to_int("1.2.-3") - with self.assertRaises(ValueError): - fdroidserver.update.version_string_to_int("-1.2.-3") - with self.assertRaises(ValueError): - fdroidserver.update.version_string_to_int("0.0.0x3") - - -class TestScanRepoForIpas(unittest.TestCase): - def test_scan_repo_for_ipas_no_cache(self): - self.maxDiff = None - with mkdtemp() as tmpdir: - os.chdir(tmpdir) - os.mkdir("repo") - with open('repo/abc.Def_123.ipa', 'w') as f: - f.write('abc') - with open('repo/xyz.XXX_123.ipa', 'w') as f: - f.write('xyz') - - apkcache = mock.MagicMock() - repodir = "repo" - knownapks = mock.MagicMock() - - def mocked_parse(p, s, c): - # pylint: disable=unused-argument - return {'packageName': 'abc' if 'abc' in p else 'xyz'} - - with mock.patch('fdroidserver.update.parse_ipa', mocked_parse): - ipas, checkchanged = fdroidserver.update.scan_repo_for_ipas( - apkcache, repodir, knownapks - ) - - self.assertEqual(checkchanged, True) - self.assertEqual(len(ipas), 2) - package_names_in_ipas = [x['packageName'] for x in ipas] - self.assertTrue('abc' in package_names_in_ipas) - self.assertTrue('xyz' in package_names_in_ipas) - - apkcache_setter_package_name = [ - x.args[1]['packageName'] for x in apkcache.__setitem__.mock_calls - ] - self.assertTrue('abc' in apkcache_setter_package_name) - self.assertTrue('xyz' in apkcache_setter_package_name) - self.assertEqual(apkcache.__setitem__.call_count, 2) - - knownapks.recordapk.call_count = 2 - self.assertTrue( - unittest.mock.call('abc.Def_123.ipa') in knownapks.recordapk.mock_calls - ) - self.assertTrue( - unittest.mock.call('xyz.XXX_123.ipa') in knownapks.recordapk.mock_calls - ) - - -class TestParseIosScreenShotName(unittest.TestCase): - def setUp(self): - self.maxDiff = None - - def test_parse_ios_screenshot_name_atforamt_iphone8(self): - self.assertEqual( - fdroidserver.update.parse_ios_screenshot_name(Path("iPhone 8+ @ iOS 16-1.png")), - ("phoneScreenshots", "iPhone 8+", "iOS 16",), - ) - - def test_parse_ios_screenshot_name_atforamt_ipad13(self): - self.assertEqual( - fdroidserver.update.parse_ios_screenshot_name(Path("iPad Pro 12.9\" 2gen @ iOS 16-1.png")), - ("tenInchScreenshots", "iPad Pro 12.9\" 2gen", "iOS 16",), - ) - - def test_parse_ios_screenshot_name_underscoreforamt_ipad(self): - self.assertEqual( - fdroidserver.update.parse_ios_screenshot_name(Path("1_ipadPro129_1.1.png")), - ("tenInchScreenshots", "ipadpro129", "unknown",), - ) - - def test_parse_ios_screenshot_name_underscoreforamt_iphone(self): - self.assertEqual( - fdroidserver.update.parse_ios_screenshot_name(Path("1_iphone6Plus_1.1.png")), - ("phoneScreenshots", "iphone6plus", "unknown",), - ) - - -class TestInsertLocalizedIosAppMetadata(unittest.TestCase): - - def test_insert_localized_ios_app_metadata(self): - self.maxDiff = None - - self.apps_with_packages = { - "org.fake": {} - } - - def _mock_discover(fastlane_dir): - self.assertEqual( - fastlane_dir, - Path('build/org.fake/fastlane'), - ) - return {"fake screenshots": "fake"} - - def _mock_copy(screenshots, package_name): - self.assertEqual(screenshots, {"fake screenshots": "fake"}) - self.assertEqual(package_name, "org.fake") - - with mock.patch('fdroidserver.update.discover_ios_screenshots', _mock_discover): - self.set_localized_mock = mock.Mock() - with mock.patch('fdroidserver.update.copy_ios_screenshots_to_repo', _mock_copy): - with mock.patch("fdroidserver.update._set_localized_text_entry", self.set_localized_mock): - return fdroidserver.update.insert_localized_ios_app_metadata( - self.apps_with_packages - ) - - self.assertListEqual( - self.set_localized_mock.call_args_list, - [ - mock.call({}, 'en-US', 'name', Path('build/org.fake/fastlane/metadata/en-US/name.txt')), - mock.call({}, 'en-US', 'summary', Path('build/org.fake/fastlane/metadata/en-US/subtitle.txt')), - mock.call({}, 'en-US', 'description', Path('build/org.fake/fastlane/metadata/en-US/description.txt')), - mock.call({}, 'de-DE', 'name', Path('build/org.fake/fastlane/metadata/de-DE/name.txt')), - mock.call({}, 'de-DE', 'summary', Path('build/org.fake/fastlane/metadata/de-DE/subtitle.txt')), - mock.call({}, 'de-DE', 'description', Path('build/org.fake/fastlane/metadata/de-DE/description.txt')), - ], - ) - - -class TestDiscoverIosScreenshots(unittest.TestCase): - def test_discover_ios_screenshots(self): - self.maxDiff = None - - with mkdtemp() as fastlane_dir: - fastlane_dir = Path(fastlane_dir) - (fastlane_dir / "screenshots/en-US").mkdir(parents=True) - with open(fastlane_dir / "screenshots/en-US/iPhone 8+ @ iOS 16-1.png", 'w') as f: - f.write("1") - with open(fastlane_dir / "screenshots/en-US/iPad Pro 12.9\" 2gen @ iOS 16-1.png", "w") as f: - f.write("2") - with open(fastlane_dir / "screenshots/en-US/iPad Pro 12.9\" 2gen @ iOS 16-2.png", "w") as f: - f.write("3") - (fastlane_dir / "screenshots/de-DE").mkdir(parents=True) - with open(fastlane_dir / "screenshots/de-DE/1_ipadPro129_1.1.png", "w") as f: - f.write("4") - - screenshots = fdroidserver.update.discover_ios_screenshots(fastlane_dir) - - self.assertDictEqual( - screenshots, - { - "en-US": { - "phoneScreenshots": [ - fastlane_dir / "screenshots/en-US/iPhone 8+ @ iOS 16-1.png", - ], - "tenInchScreenshots": [ - fastlane_dir / "screenshots/en-US/iPad Pro 12.9\" 2gen @ iOS 16-1.png", - fastlane_dir / "screenshots/en-US/iPad Pro 12.9\" 2gen @ iOS 16-2.png", - ], - }, - "de-DE": { - "tenInchScreenshots": [ - fastlane_dir / "screenshots/de-DE/1_ipadPro129_1.1.png", - ], - }, - }, - ) - - -class TestCopyIosScreenshotsToRepo(unittest.TestCase): - def setUp(self): - self._td = mkdtemp() - os.chdir(self._td.name) - - def tearDown(self): - os.chdir(basedir) - self._td.cleanup() - - def test_copy_ios_screenshots_to_repo(self): - self.maxDiff = None - - screenshot_dir_en = Path("build/org.fake/fastlane/screenshots/en-US") - s1 = screenshot_dir_en / "iPhone 8+ @ iOS 16-1.png" - s2 = screenshot_dir_en / "iPad Pro 12.9\" 2gen @ iOS 16-1.png" - s3 = screenshot_dir_en / "iPad Pro 12.9\" 2gen @ iOS 16-2.png" - screenshot_dir_de = Path("build/org.fake/fastlane/screenshots/de-DE") - s4 = screenshot_dir_de / "1_ipadPro129_1.1.png" - - cmock = mock.Mock() - with mock.patch("fdroidserver.update._strip_and_copy_image", cmock): - fdroidserver.update.copy_ios_screenshots_to_repo( - { - "en-US": { - "phoneScreenshots": [s1], - "tenInchScreenshots": [s2, s3], - }, - "de-DE": { - "tenInchScreenshots": [s4], - }, - }, - "org.fake", - ) - - self.assertListEqual( - cmock.call_args_list, - [ - mock.call( - 'build/org.fake/fastlane/screenshots/en-US/iPhone 8+ @ iOS 16-1.png', - 'repo/org.fake/en-US/phoneScreenshots/iPhone_8+_@_iOS_16-1.png', - ), - mock.call( - 'build/org.fake/fastlane/screenshots/en-US/iPad Pro 12.9" 2gen @ iOS 16-1.png', - 'repo/org.fake/en-US/tenInchScreenshots/iPad_Pro_12.9"_2gen_@_iOS_16-1.png', - ), - mock.call( - 'build/org.fake/fastlane/screenshots/en-US/iPad Pro 12.9" 2gen @ iOS 16-2.png', - 'repo/org.fake/en-US/tenInchScreenshots/iPad_Pro_12.9"_2gen_@_iOS_16-2.png', - ), - mock.call( - 'build/org.fake/fastlane/screenshots/de-DE/1_ipadPro129_1.1.png', - 'repo/org.fake/de-DE/tenInchScreenshots/1_ipadPro129_1.1.png', - ), - ], - ) - - -class TestGetIpaIcon(unittest.TestCase): - def test_get_ipa_icon(self): - self.maxDiff = None - - with mkdtemp() as tmpdir: - tmpdir = Path(tmpdir) - (tmpdir / 'OnionBrowser.xcodeproj').mkdir() - with open(tmpdir / 'OnionBrowser.xcodeproj/project.pbxproj', "w") as f: - f.write("") - icondir = tmpdir / "fake_icon.appiconset" - icondir.mkdir() - with open(icondir / "Contents.json", "w", encoding="utf-8") as f: - f.write(""" - {"images": [ - {"scale": "2x", "size": "128x128", "filename": "nope"}, - {"scale": "1x", "size": "512x512", "filename": "nope"}, - {"scale": "1x", "size": "16x16", "filename": "nope"}, - {"scale": "1x", "size": "32x32", "filename": "yep"} - ]} - """) - - pfp = mock.Mock(return_value="fake_icon") - with mock.patch("fdroidserver.update._parse_from_pbxproj", pfp): - p = fdroidserver.update._get_ipa_icon(tmpdir) - self.assertEqual(str(icondir / "yep"), p) - - -class TestParseFromPbxproj(unittest.TestCase): - def test_parse_from_pbxproj(self): - self.maxDiff = None - - with mkdtemp() as tmpdir: - with open(Path(tmpdir) / "asdf.pbxproj", 'w', encoding="utf-8") as f: - f.write(""" - 230jfaod=flc' - ASSETCATALOG_COMPILER_APPICON_NAME = MyIcon; - cm opa1c p[m - """) - v = fdroidserver.update._parse_from_pbxproj( - Path(tmpdir) / "asdf.pbxproj", - "ASSETCATALOG_COMPILER_APPICON_NAME" - ) - self.assertEqual(v, "MyIcon") diff --git a/tests/test_vcs.py b/tests/test_vcs.py deleted file mode 100755 index a007feae..00000000 --- a/tests/test_vcs.py +++ /dev/null @@ -1,68 +0,0 @@ -#!/usr/bin/env python3 - -import os -import unittest - -from git import Repo - -import fdroidserver.common -import fdroidserver.metadata - -from .shared_test_code import VerboseFalseOptions, mkdtemp - - -class VCSTest(unittest.TestCase): - """For some reason the VCS classes are in fdroidserver/common.py""" - - def setUp(self): - self._td = mkdtemp() - os.chdir(self._td.name) - - def tearDown(self): - self._td.cleanup() - - def test_remote_set_head_can_fail(self): - # First create an upstream repo with one commit - upstream_repo = Repo.init("upstream_repo") - with open(upstream_repo.working_dir + "/file", 'w') as f: - f.write("Hello World!") - upstream_repo.index.add([upstream_repo.working_dir + "/file"]) - upstream_repo.index.commit("initial commit") - commitid = upstream_repo.head.commit.hexsha - - # Now clone it once manually, like gitlab runner gitlab-runner sets up a repo during CI - clone1 = Repo.init("clone1") - clone1.create_remote("upstream", "file://" + upstream_repo.working_dir) - clone1.remote("upstream").fetch() - clone1.head.reference = clone1.commit(commitid) - clone1.head.reset(index=True, working_tree=True) - self.assertTrue(clone1.head.is_detached) - - # and now we want to use this clone as a source repo for fdroid build - config = {} - os.mkdir("build") - config['sdk_path'] = 'MOCKPATH' - config['ndk_paths'] = {'r10d': os.getenv('ANDROID_NDK_HOME')} - config['java_paths'] = {'fake': 'fake'} - fdroidserver.common.config = config - app = fdroidserver.metadata.App() - app.RepoType = 'git' - app.Repo = clone1.working_dir - app.id = 'com.gpl.rpg.AndorsTrail' - build = fdroidserver.metadata.Build() - build.commit = commitid - build.androidupdate = ['no'] - vcs, build_dir = fdroidserver.common.setup_vcs(app) - # force an init of the repo, the remote head error only occurs on the second gotorevision call - - fdroidserver.common.options = VerboseFalseOptions - vcs.gotorevision(build.commit) - fdroidserver.common.prepare_source( - vcs, - app, - build, - build_dir=build_dir, - srclib_dir="ignore", - extlib_dir="ignore", - ) - self.assertTrue(os.path.isfile("build/com.gpl.rpg.AndorsTrail/file")) diff --git a/tests/test_verify.py b/tests/test_verify.py deleted file mode 100755 index e5a2f7c4..00000000 --- a/tests/test_verify.py +++ /dev/null @@ -1,169 +0,0 @@ -#!/usr/bin/env python3 - -import json -import os -import shutil -import sys -import tempfile -import unittest -from pathlib import Path -from unittest.mock import patch - -from fdroidserver import verify - -TEST_APP_ENTRY = { - "1539780240.3885746": { - "local": { - "file": "unsigned/com.politedroid_6.apk", - "packageName": "com.politedroid", - "sha256": "70c2f776a2bac38a58a7d521f96ee0414c6f0fb1de973c3ca8b10862a009247d", - "timestamp": 1234567.8900000, - "versionCode": "6", - "versionName": "1.5", - }, - "remote": { - "file": "tmp/com.politedroid_6.apk", - "packageName": "com.politedroid", - "sha256": "70c2f776a2bac38a58a7d521f96ee0414c6f0fb1de973c3ca8b10862a009247d", - "timestamp": 1234567.8900000, - "versionCode": "6", - "versionName": "1.5", - }, - "url": "https://f-droid.org/repo/com.politedroid_6.apk", - "verified": True, - } -} - -basedir = Path(__file__).parent - - -class VerifyTest(unittest.TestCase): - def setUp(self): - self.tempdir = tempfile.TemporaryDirectory() - os.chdir(self.tempdir.name) - self.repodir = Path('repo') - self.repodir.mkdir() - self.apk_reports_json = basedir / 'org.fdroid.fdroid_1019051.apk.json' - - def tearDown(self): - self.tempdir.cleanup() - - def test_get_verified_json_creation(self): - self.assertEqual({'packages': {}}, verify.get_verified_json('does-not-exist')) - - def test_get_verified_json_existing(self): - f = 'verified.json' - reports = {'packages': {'placeholder': {}}} - with open(f, 'w') as fp: - json.dump(reports, fp) - self.assertEqual(reports, verify.get_verified_json(f)) - - def test_get_verified_json_pull_in_one_report(self): - shutil.copy(self.apk_reports_json, self.tempdir.name) - with open(self.apk_reports_json) as fp: - reports = json.load(fp) - self.assertEqual( - {'packages': {'org.fdroid.fdroid': [reports['1708238023.6572325']]}}, - verify.get_verified_json('does-not-exist'), - ) - - def test_get_verified_json_ignore_corrupt(self): - f = 'verified.json' - with open(f, 'w') as fp: - fp.write("""{"packages": {"placeholder": {""") - shutil.copy(self.apk_reports_json, self.tempdir.name) - with open(self.apk_reports_json) as fp: - reports = json.load(fp) - self.assertEqual( - {'packages': {'org.fdroid.fdroid': [reports['1708238023.6572325']]}}, - verify.get_verified_json(f), - ) - - def test_get_verified_json_ignore_apk_reports(self): - """When an intact verified.json exists, it should ignore the .apk.json reports.""" - f = 'verified.json' - placeholder = {'packages': {'placeholder': {}}} - with open(f, 'w') as fp: - json.dump(placeholder, fp) - shutil.copy(self.apk_reports_json, self.tempdir.name) - with open(self.apk_reports_json) as fp: - json.load(fp) - self.assertEqual(placeholder, verify.get_verified_json(f)) - - @unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian') - @patch('fdroidserver.common.sha256sum') - def test_write_json_report(self, sha256sum): - sha256sum.return_value = ( - '70c2f776a2bac38a58a7d521f96ee0414c6f0fb1de973c3ca8b10862a009247d' - ) - os.mkdir('tmp') - os.mkdir('unsigned') - verified_json = Path('unsigned/verified.json') - packageName = 'com.politedroid' - apk_name = packageName + '_6.apk' - remote_apk = 'tmp/' + apk_name - unsigned_apk = 'unsigned/' + apk_name - # TODO common.use apk_strip_v1_signatures() on unsigned_apk - shutil.copy(basedir / 'repo' / apk_name, remote_apk) - shutil.copy(basedir / 'repo' / apk_name, unsigned_apk) - url = TEST_APP_ENTRY['1539780240.3885746']['url'] - - self.assertFalse(verified_json.exists()) - verify.write_json_report(url, remote_apk, unsigned_apk, {}) - self.assertTrue(verified_json.exists()) - # smoke check status JSON - with verified_json.open() as fp: - firstpass = json.load(fp) - - verify.write_json_report(url, remote_apk, unsigned_apk, {}) - with verified_json.open() as fp: - secondpass = json.load(fp) - - self.assertEqual(firstpass, secondpass) - - @patch('fdroidserver.common.sha256sum') - @patch('fdroidserver.verify.write_verified_json', lambda s: s) - def test_write_json_report_appid_json(self, sha256sum): - sha256sum.return_value = ( - '70c2f776a2bac38a58a7d521f96ee0414c6f0fb1de973c3ca8b10862a009247d' - ) - os.mkdir('tmp') - os.mkdir('unsigned') - appid = 'com.politedroid' - apk_name = f'{appid}_6.apk' - remote_apk = 'tmp/' + apk_name - unsigned_apk = 'unsigned/' + apk_name - shutil.copy(basedir / 'repo' / apk_name, remote_apk) - shutil.copy(basedir / 'repo' / apk_name, unsigned_apk) - url = TEST_APP_ENTRY['1539780240.3885746']['url'] - with open(f'unsigned/{apk_name}.json', 'w') as fp: - json.dump(TEST_APP_ENTRY, fp) - - # make a fake existing report where the newer one broke verifiability - with open(f'unsigned/{appid}_16.apk.json', 'w') as fp: - json.dump( - { - "1444444444.4444444": { - 'local': {'versionCode': 16}, - 'verified': False, - }, - "1333333333.3333333": { - 'local': {'versionCode': 16}, - 'verified': True, - }, - }, - fp, - ) - - verify.write_json_report(url, remote_apk, unsigned_apk, {'fake': 'fail'}) - with open(f'unsigned/{appid}.json') as fp: - self.assertEqual( - { - 'apkReports': [ - 'unsigned/com.politedroid_6.apk.json', - 'unsigned/com.politedroid_16.apk.json', - ], - 'lastRunVerified': False, - }, - json.load(fp), - ) diff --git a/tests/testcommon.py b/tests/testcommon.py new file mode 100644 index 00000000..a637012e --- /dev/null +++ b/tests/testcommon.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python3 +# +# Copyright (C) 2017, Michael Poehn +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import os + + +class TmpCwd(): + """Context-manager for temporarily changing the current working + directory. + """ + + def __init__(self, new_cwd): + self.new_cwd = new_cwd + + def __enter__(self): + self.orig_cwd = os.getcwd() + os.chdir(self.new_cwd) + + def __exit__(self, a, b, c): + os.chdir(self.orig_cwd) diff --git a/tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/featureGraphic/play_store_feature_graphic.png b/tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/featureGraphic/play_store_feature_graphic.png deleted file mode 100644 index 0d5e3591..00000000 Binary files a/tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/featureGraphic/play_store_feature_graphic.png and /dev/null differ diff --git a/tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/icon/icon.png b/tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/icon/icon.png deleted file mode 100644 index 17a31d54..00000000 Binary files a/tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/icon/icon.png and /dev/null differ diff --git a/tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/phoneScreenshots/1.png b/tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/phoneScreenshots/1.png deleted file mode 100644 index 717be319..00000000 Binary files a/tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/phoneScreenshots/1.png and /dev/null differ diff --git a/tests/triple-t-1-graphics/metadata/de.wivewa.dialer.yml b/tests/triple-t-1-graphics/metadata/de.wivewa.dialer.yml deleted file mode 100644 index a86e2c53..00000000 --- a/tests/triple-t-1-graphics/metadata/de.wivewa.dialer.yml +++ /dev/null @@ -1,25 +0,0 @@ -Categories: - - Phone & SMS - - System -License: GPL-3.0-only -AuthorEmail: welefon@jolo.software -SourceCode: https://codeberg.org/wivewa/wivewa-dialer-android -IssueTracker: https://codeberg.org/wivewa/wivewa-dialer-android/issues - -AutoName: Welefon - -RepoType: git -Repo: https://codeberg.org/wivewa/wivewa-dialer-android.git - -Builds: - - versionName: 1.7.0 - versionCode: 13 - commit: 3550193fa6b6f7836876f2ca9bf5819a34eef404 - subdir: app - gradle: - - yes - -AutoUpdateMode: Version -UpdateCheckMode: Tags -CurrentVersion: 1.7.0 -CurrentVersionCode: 13 diff --git a/tests/triple-t-2/build/org.piwigo.android/app/.gitignore b/tests/triple-t-2/build/org.piwigo.android/app/.gitignore deleted file mode 100644 index 796b96d1..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/build diff --git a/tests/triple-t-2/build/org.piwigo.android/app/build.gradle b/tests/triple-t-2/build/org.piwigo.android/app/build.gradle deleted file mode 100644 index 6b8dd5de..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/build.gradle +++ /dev/null @@ -1,151 +0,0 @@ -plugins { - id("com.android.application") - id("com.github.triplet.play") version "2.0.0" -} - -def isCi = "true" == System.getenv("CI") -def preDexEnabled = "true" == System.getProperty("pre-dex", "true") - - -if (project.file('../PiwigoSigning.properties').exists()) { - Properties props = new Properties() - props.load(new FileInputStream(file('../PiwigoSigning.properties'))) - - android { - signingConfigs { - release { - storeFile file("../piwigo_android_keystore.jks") - storePassword props['keystore.password'] - keyAlias 'publishing' - keyPassword props['key.password'] - } - localRelease { - storeFile file("${System.properties['user.home']}${File.separator}/.android_keystore_default") - storePassword props['keystore_default.password'] - keyAlias 'defaultRelease' - keyPassword props['key_default.password'] - } - - } - - buildTypes { - release { - signingConfig signingConfigs.release - } - localRelease { - signingConfig signingConfigs.localRelease - } - } - } - play { - defaultToAppBundles = true - track = 'beta' - } -} - -android { - compileSdkVersion 29 - defaultConfig { - applicationId "org.piwigo.android" - minSdkVersion 16 - targetSdkVersion 29 - versionCode 95 - versionName "0.9.5-beta" - multiDexEnabled true - } - buildTypes { - debug { - applicationIdSuffix ".debug" - versionNameSuffix "-debug" - } - release { - minifyEnabled false - proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' - } - } - flavorDimensions "default" - compileOptions { - sourceCompatibility JavaVersion.VERSION_1_8 - targetCompatibility JavaVersion.VERSION_1_8 - } - lintOptions { - disable 'InvalidPackage' - abortOnError false - } - dexOptions { - preDexLibraries = preDexEnabled && !isCi - - } - dataBinding { - enabled = true - } - testOptions { - unitTests { - returnDefaultValues = true - includeAndroidResources = true - } - } - /* Triple-T GPP is applied as plugin in all cases, so we need to configure it always */ - play { - serviceAccountCredentials = file("../upload_key.json") - } -} - -def daggerVersion = '2.23.2' -def okhttpVersion = '3.11.0' -def retrofitVersion = '2.6.1' -def assertjVersion = '1.2.0' -def acraVersion = '5.4.0' - -dependencies { - implementation fileTree(include: ['*.jar'], dir: 'libs') - implementation 'androidx.appcompat:appcompat:1.1.0' - implementation 'androidx.annotation:annotation:1.1.0' - implementation 'com.google.android.material:material:1.0.0' - implementation 'androidx.cardview:cardview:1.0.0' - implementation 'com.android.support:multidex:1.0.3' - implementation "com.google.dagger:dagger:${daggerVersion}" - implementation 'androidx.lifecycle:lifecycle-extensions:2.1.0' - annotationProcessor 'androidx.lifecycle:lifecycle-compiler:2.1.0' - - annotationProcessor "com.google.dagger:dagger-compiler:${daggerVersion}" - implementation "com.google.dagger:dagger-android:${daggerVersion}" - implementation "com.google.dagger:dagger-android-support:${daggerVersion}" - annotationProcessor "com.google.dagger:dagger-android-processor:${daggerVersion}" - implementation "com.squareup.okhttp3:okhttp:${okhttpVersion}" - implementation "com.squareup.okhttp3:logging-interceptor:${okhttpVersion}" - implementation "com.squareup.retrofit2:retrofit:${retrofitVersion}" - implementation "com.squareup.retrofit2:converter-gson:${retrofitVersion}" - implementation "com.squareup.retrofit2:adapter-rxjava:${retrofitVersion}" - implementation 'com.squareup.picasso:picasso:2.5.2' - implementation 'com.jakewharton.picasso:picasso2-okhttp3-downloader:1.1.0' - implementation 'io.reactivex:rxjava:1.3.2' - implementation 'io.reactivex:rxandroid:1.2.1' - implementation 'com.google.guava:guava:24.1-jre' - annotationProcessor 'com.google.guava:guava:24.1-jre' - implementation 'org.apache.commons:commons-lang3:3.8.1' - - implementation "ch.acra:acra-mail:$acraVersion" - implementation "ch.acra:acra-dialog:$acraVersion" - - implementation 'com.github.jorgecastilloprz:fabprogresscircle:1.01@aar' - implementation "com.leinardi.android:speed-dial:3.0.0" - implementation 'com.github.tingyik90:snackprogressbar:6.1.1' - implementation 'org.greenrobot:eventbus:3.1.1' - /* Don't forget to add to string libraries if you add a library here. */ - - debugImplementation 'com.squareup.leakcanary:leakcanary-android:2.0-beta-3' - - testImplementation 'junit:junit:4.12' - testImplementation 'org.robolectric:robolectric:4.3' - testImplementation("com.squareup.assertj:assertj-android:${assertjVersion}") { - exclude group: 'com.android.support' - } - testAnnotationProcessor "com.google.dagger:dagger-compiler:${daggerVersion}" - testImplementation 'androidx.arch.core:core-testing:2.1.0' - testImplementation 'org.mockito:mockito-core:2.19.0' - testImplementation 'com.google.guava:guava:24.1-jre' - testImplementation 'androidx.appcompat:appcompat:1.1.0' - testAnnotationProcessor 'com.google.guava:guava:24.1-jre' - testImplementation 'com.google.code.findbugs:jsr305:3.0.2' -} diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/debug/res/values/constants.xml b/tests/triple-t-2/build/org.piwigo.android/app/src/debug/res/values/constants.xml deleted file mode 100644 index fe7ac7bd..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/src/debug/res/values/constants.xml +++ /dev/null @@ -1,5 +0,0 @@ - - - org.piwigo.account_debug - - diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/debug/res/values/strings.xml b/tests/triple-t-2/build/org.piwigo.android/app/src/debug/res/values/strings.xml deleted file mode 100644 index 093dda0e..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/src/debug/res/values/strings.xml +++ /dev/null @@ -1,3 +0,0 @@ - - Piwigo Debug - diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/java/org/piwigo/PiwigoApplication.java b/tests/triple-t-2/build/org.piwigo.android/app/src/main/java/org/piwigo/PiwigoApplication.java deleted file mode 100644 index 6037e1fb..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/src/main/java/org/piwigo/PiwigoApplication.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Piwigo for Android - * Copyright (C) 2016-2017 Piwigo Team http://piwigo.org - * Copyright (C) 2018 Raphael Mack http://www.raphael-mack.de - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package org.piwigo; - -import android.app.Activity; -import android.app.Application; -import android.app.Service; -import android.content.Context; -import androidx.databinding.DataBindingUtil; -import androidx.multidex.MultiDex; - -import org.acra.ACRA; -import org.acra.ReportField; -import org.acra.annotation.AcraCore; -import org.acra.annotation.AcraDialog; -import org.acra.annotation.AcraMailSender; -import org.acra.data.StringFormat; -import org.piwigo.helper.DialogHelper; -import org.piwigo.helper.NetworkHelper; -import org.piwigo.helper.NotificationHelper; -import org.piwigo.internal.di.component.ApplicationComponent; -import org.piwigo.internal.di.component.BindingComponent; -import org.piwigo.internal.di.component.DaggerApplicationComponent; -import org.piwigo.internal.di.component.DaggerBindingComponent; -import org.piwigo.internal.di.module.ApplicationModule; - -import javax.inject.Inject; - -import dagger.android.AndroidInjector; -import dagger.android.DispatchingAndroidInjector; -import dagger.android.HasActivityInjector; -import dagger.android.HasServiceInjector; - -@AcraCore(reportContent = { ReportField.APP_VERSION_CODE, - ReportField.APP_VERSION_NAME, - ReportField.USER_COMMENT, - ReportField.SHARED_PREFERENCES, - ReportField.ANDROID_VERSION, - ReportField.CUSTOM_DATA, - ReportField.STACK_TRACE, - ReportField.BUILD, - ReportField.BUILD_CONFIG, - ReportField.CRASH_CONFIGURATION, - ReportField.DISPLAY - }, - alsoReportToAndroidFramework = true, - reportFormat = StringFormat.KEY_VALUE_LIST -) -@AcraMailSender(mailTo = "android@piwigo.org") -@AcraDialog(resCommentPrompt = R.string.crash_dialog_comment_prompt, - resText = R.string.crash_dialog_text) -public class PiwigoApplication extends Application implements HasActivityInjector, HasServiceInjector { - - @Inject DispatchingAndroidInjector dispatchingAndroidInjector; - @Inject DispatchingAndroidInjector dispatchingAndroidServiceInjector; - - private ApplicationComponent applicationComponent; - - @Override public void onCreate() { - super.onCreate(); - - new NetworkHelper(); - new NotificationHelper(getApplicationContext()); - new DialogHelper() -; initializeDependencyInjection(); - } - - @Override - protected void attachBaseContext(Context base) { - super.attachBaseContext(base); - MultiDex.install(base); - ACRA.init(this); - } - - @Override public AndroidInjector activityInjector() { - return dispatchingAndroidInjector; - } - - private void initializeDependencyInjection() { - applicationComponent = DaggerApplicationComponent.builder() - .applicationModule(new ApplicationModule(this)) - .build(); - applicationComponent.inject(this); - - BindingComponent bindingComponent = DaggerBindingComponent.builder() - .applicationComponent(applicationComponent) - .build(); - DataBindingUtil.setDefaultComponent(bindingComponent); - } - - /** - * Returns an {@link AndroidInjector} of {@link Service}s. - */ - @Override - public AndroidInjector serviceInjector() { - return dispatchingAndroidServiceInjector; - } -} \ No newline at end of file diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/contact-email.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/contact-email.txt deleted file mode 100644 index f0291ce8..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/contact-email.txt +++ /dev/null @@ -1 +0,0 @@ -android@piwigo.org diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/contact-website.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/contact-website.txt deleted file mode 100644 index 40046686..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/contact-website.txt +++ /dev/null @@ -1 +0,0 @@ -https://www.piwigo.org diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/default-language.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/default-language.txt deleted file mode 100644 index beb9970b..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/default-language.txt +++ /dev/null @@ -1 +0,0 @@ -en-US diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/full-description.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/full-description.txt deleted file mode 100644 index 69efe2c6..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/full-description.txt +++ /dev/null @@ -1,5 +0,0 @@ -Piwigo ist eine Android-APP für die freie Open Source-Foto-Hosting-Plattform Piwigo. Mit dieser App können Sie sich selbst gehostete Galerie ansehen und Fotos von Ihrem Smart-Gerät hochladen. - -Piwigo wird von einer aktiven Community von Benutzern und Entwicklern bereitgestellt. - -Piwigo ermöglicht eine eigene Fotogalerie im Internet zu erstellen und bietet viele leistungsstarke Funktionen wie Alben, Tags, Geolokalisierung, viele Anpassungsstufen, Upload von Besuchern, Privatsphäre, Kalender oder Statistiken. diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/short-description.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/short-description.txt deleted file mode 100644 index 4ea23371..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/short-description.txt +++ /dev/null @@ -1 +0,0 @@ -Greifen Sie auf die Bilder Ihrer Piwigo-Foto-Gallerie zu. diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/title.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/title.txt deleted file mode 100644 index e0394ea1..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/de-DE/title.txt +++ /dev/null @@ -1 +0,0 @@ -Piwigo diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/full-description.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/full-description.txt deleted file mode 100644 index 2d3e92ec..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/full-description.txt +++ /dev/null @@ -1,5 +0,0 @@ -Piwigo is a native Android Application for the free and open source photo hosting platform Piwigo. With this app you can browse you self-hosted gallery and upload photos from your smart device. - -Piwigo is built by an active community of users and developers. - -Piwigo empowers you to create your own photo gallery on the web and includes many powerful features such as albums, tags, geolocation, many levels of customization, upload by visitors, privacy, calendar or statistics. diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/feature-graphic/piwigo-full.png b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/feature-graphic/piwigo-full.png deleted file mode 100644 index e2d5035a..00000000 Binary files a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/feature-graphic/piwigo-full.png and /dev/null differ diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/icon/piwigo-icon.png b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/icon/piwigo-icon.png deleted file mode 100644 index 02f745b9..00000000 Binary files a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/icon/piwigo-icon.png and /dev/null differ diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/01_Login.jpg b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/01_Login.jpg deleted file mode 100644 index 66ef322e..00000000 Binary files a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/01_Login.jpg and /dev/null differ diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/02_Albums.jpg b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/02_Albums.jpg deleted file mode 100644 index c21b517b..00000000 Binary files a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/02_Albums.jpg and /dev/null differ diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/03_Photos.jpg b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/03_Photos.jpg deleted file mode 100644 index 5cacc9b1..00000000 Binary files a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/03_Photos.jpg and /dev/null differ diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/04_Albums_horizontal.jpg b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/04_Albums_horizontal.jpg deleted file mode 100644 index 27a60e30..00000000 Binary files a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/04_Albums_horizontal.jpg and /dev/null differ diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/05_Menu.jpg b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/05_Menu.jpg deleted file mode 100644 index 6dc4cdf1..00000000 Binary files a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/phone-screenshots/05_Menu.jpg and /dev/null differ diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/tablet-screenshots/01_Login.png b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/tablet-screenshots/01_Login.png deleted file mode 100644 index c86cd9fd..00000000 Binary files a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/graphics/tablet-screenshots/01_Login.png and /dev/null differ diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/short-description.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/short-description.txt deleted file mode 100644 index bd96f636..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/short-description.txt +++ /dev/null @@ -1 +0,0 @@ -Access photos in your Piwigo photo gallery. diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/title.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/title.txt deleted file mode 100644 index e0394ea1..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/en-US/title.txt +++ /dev/null @@ -1 +0,0 @@ -Piwigo diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/full-description.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/full-description.txt deleted file mode 100644 index 65654165..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/full-description.txt +++ /dev/null @@ -1,5 +0,0 @@ -Piwigo est une application Android native pour la plate-forme d'hébergement photo gratuite et open source Piwigo. Avec cette application, vous pouvez parcourir votre galerie auto-hébergée et télécharger des photos depuis votre smartphone. - -Piwigo est développé par une communauté active d'utilisateurs et de développeurs. - -Piwigo vous permet de créer votre propre galerie de photos sur le Web et comprend de nombreuses fonctionnalités puissantes telles que des albums, des tags, la géolocalisation, de nombreux niveaux de personnalisation, le téléchargement par les visiteurs, la confidentialité, un calendrier ou de statistiques. diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/short-description.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/short-description.txt deleted file mode 100644 index 90d5f1f5..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/short-description.txt +++ /dev/null @@ -1 +0,0 @@ -Accédez aux photos dans votre galerie de photos Piwigo. diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/title.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/title.txt deleted file mode 100644 index e0394ea1..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/fr-FR/title.txt +++ /dev/null @@ -1 +0,0 @@ -Piwigo diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/full-description.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/full-description.txt deleted file mode 100644 index 2d3e92ec..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/full-description.txt +++ /dev/null @@ -1,5 +0,0 @@ -Piwigo is a native Android Application for the free and open source photo hosting platform Piwigo. With this app you can browse you self-hosted gallery and upload photos from your smart device. - -Piwigo is built by an active community of users and developers. - -Piwigo empowers you to create your own photo gallery on the web and includes many powerful features such as albums, tags, geolocation, many levels of customization, upload by visitors, privacy, calendar or statistics. diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/short-description.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/short-description.txt deleted file mode 100644 index bd96f636..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/short-description.txt +++ /dev/null @@ -1 +0,0 @@ -Access photos in your Piwigo photo gallery. diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/title.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/title.txt deleted file mode 100644 index e0394ea1..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/listings/kn-IN/title.txt +++ /dev/null @@ -1 +0,0 @@ -Piwigo diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/de-DE/default.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/de-DE/default.txt deleted file mode 100644 index 84d85227..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/de-DE/default.txt +++ /dev/null @@ -1,7 +0,0 @@ -Dies ist eine Beta-Version, mit großen Verbesserungen, aber noch nicht für den produktiven Einsatz vorgesehen. Seien Sie vorsichtig und stellen Sie sicher, ein vollständiges Backup zu haben. - -In dieser Version haben wir -- die Unterstützung für Android 4.0 und 4.1 entfernt -- neues Design und weitere Sprachen hinzugefügt -- Unterstützung der Erstellung von Alben -- automatische Korrektur der Galerie-Seite hinzugefügt (falls möglich wird automatisch HTTPS verwendet) diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/en-US/default.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/en-US/default.txt deleted file mode 100644 index e2f915ca..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/en-US/default.txt +++ /dev/null @@ -1,9 +0,0 @@ -This is a beta version, with major improvements but still not intended for production use. Please be careful and ensure you have proper backups of your gallery data. - -In this version we -- dropped support for Android 4.0 "Ice Cream Sandwich" and 4.1 "Jelly Bean" -- adjusted the UI to new Piwigo style -- added German, French and initial Kannada translation -- support creation of albums -- added auto detection correction of the gallery site (automatically choosing HTTPS if possible) - diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/fr-FR/default.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/fr-FR/default.txt deleted file mode 100644 index 699a1549..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/fr-FR/default.txt +++ /dev/null @@ -1,9 +0,0 @@ -This is a beta version, with major improvements but still not intended for production use. Please be careful and ensure you have proper backups of your gallery data. - -Dans cette version nous avons : -- dropped support for Android 4.0 "Ice Cream Sandwich" and 4.1 "Jelly Bean" -- adjusted the UI to new Piwigo style -- added German, French and initial Kannada translation -- support creation of albums -- added auto detection correction of the gallery site (automatically choosing HTTPS if possible) - diff --git a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/kn-IN/default.txt b/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/kn-IN/default.txt deleted file mode 100644 index e2f915ca..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/app/src/main/play/release-notes/kn-IN/default.txt +++ /dev/null @@ -1,9 +0,0 @@ -This is a beta version, with major improvements but still not intended for production use. Please be careful and ensure you have proper backups of your gallery data. - -In this version we -- dropped support for Android 4.0 "Ice Cream Sandwich" and 4.1 "Jelly Bean" -- adjusted the UI to new Piwigo style -- added German, French and initial Kannada translation -- support creation of albums -- added auto detection correction of the gallery site (automatically choosing HTTPS if possible) - diff --git a/tests/triple-t-2/build/org.piwigo.android/build.gradle b/tests/triple-t-2/build/org.piwigo.android/build.gradle deleted file mode 100644 index 450ccbe8..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/build.gradle +++ /dev/null @@ -1,21 +0,0 @@ -buildscript { - repositories { - jcenter() - mavenCentral() - maven { url 'https://maven.google.com' } - google() - } - dependencies { - classpath 'com.android.tools.build:gradle:3.5.0' - } -} - -allprojects { - repositories { - google() - jcenter() - mavenCentral() - maven { url 'https://maven.google.com' } - maven { url "https://jitpack.io" } - } -} diff --git a/tests/triple-t-2/build/org.piwigo.android/settings.gradle b/tests/triple-t-2/build/org.piwigo.android/settings.gradle deleted file mode 100644 index 3cc36ec2..00000000 --- a/tests/triple-t-2/build/org.piwigo.android/settings.gradle +++ /dev/null @@ -1,2 +0,0 @@ -rootProject.name = 'Piwigo-Android' -include ':app' diff --git a/tests/triple-t-2/metadata/org.piwigo.android.yml b/tests/triple-t-2/metadata/org.piwigo.android.yml deleted file mode 100644 index 77aaa3eb..00000000 --- a/tests/triple-t-2/metadata/org.piwigo.android.yml +++ /dev/null @@ -1,29 +0,0 @@ -Categories: - - Graphics - - Multimedia -License: GPL-3.0-or-later -AuthorName: Piwigo Mobile Apps Team -AuthorEmail: android@piwigo.org -WebSite: https://piwigo.org/ -SourceCode: https://github.com/Piwigo/Piwigo-Android -IssueTracker: https://github.com/Piwigo/Piwigo-Android/issues -Translation: https://crowdin.com/project/piwigo-android - -AutoName: Piwigo - -RepoType: git -Repo: https://github.com/Piwigo/Piwigo-Android - -Builds: - - versionName: 0.9.5-beta - versionCode: 95 - commit: v0.9.5 - subdir: app - gradle: - - yes - -AutoUpdateMode: Version v%v -UpdateCheckMode: Tags -UpdateCheckIgnore: (alpha|beta|rc|RC|dev) -CurrentVersion: 0.9.5-beta -CurrentVersionCode: 95 diff --git a/tests/triple-t-anysoftkeyboard/.gitignore b/tests/triple-t-anysoftkeyboard/.gitignore deleted file mode 100644 index 0aba28e9..00000000 --- a/tests/triple-t-anysoftkeyboard/.gitignore +++ /dev/null @@ -1 +0,0 @@ -!build/ diff --git a/tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/addons/languages/dutch/apk/src/main/play/listings/en-US/title.txt b/tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/addons/languages/dutch/apk/src/main/play/listings/en-US/title.txt deleted file mode 100644 index 6e75c228..00000000 --- a/tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/addons/languages/dutch/apk/src/main/play/listings/en-US/title.txt +++ /dev/null @@ -1 +0,0 @@ -Dutch for AnySoftKeyboard diff --git a/tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/ime/app/src/main/play/listings/en-US/title.txt b/tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/ime/app/src/main/play/listings/en-US/title.txt deleted file mode 100644 index 7c5eeaeb..00000000 --- a/tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/ime/app/src/main/play/listings/en-US/title.txt +++ /dev/null @@ -1 +0,0 @@ -AnySoftKeyboard diff --git a/tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/settings.gradle b/tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/settings.gradle deleted file mode 100644 index 523aadb7..00000000 --- a/tests/triple-t-anysoftkeyboard/build/com.anysoftkeyboard.languagepack.dutch/settings.gradle +++ /dev/null @@ -1,77 +0,0 @@ -rootProject.name = 'AnySoftKeyboard' -include ':deployment' - -include ':api' - -include ':addons:base' -include ':addons:languages' -include ':addons:languages:english', ':addons:languages:english:pack' - -//list of packs should be alphabetized-ordered -include ':addons:languages:afrikaans_oss:pack', ':addons:languages:afrikaans_oss:apk' -include ':addons:languages:armenian2:pack', ':addons:languages:armenian2:apk' -include ":addons:languages:belarusian:pack", ":addons:languages:belarusian:apk" -include ':addons:languages:brazilian:pack', ':addons:languages:brazilian:apk' -include ':addons:languages:catalan:pack', ':addons:languages:catalan:apk' -include ':addons:languages:coptic:pack', ':addons:languages:coptic:apk' -include ':addons:languages:czech:pack', ':addons:languages:czech:apk' -include ':addons:languages:english:pack', ':addons:languages:english:apk' -include ':addons:languages:esperanto:pack', ':addons:languages:esperanto:apk' -include ':addons:languages:finnish:pack', ':addons:languages:finnish:apk' -include ':addons:languages:french:pack', ':addons:languages:french:apk' -include ':addons:languages:greek:pack', ':addons:languages:greek:apk' -include ':addons:languages:hebrew:pack', ':addons:languages:hebrew:apk' -include ':addons:languages:indonesian:pack', ':addons:languages:indonesian:apk' -include ':addons:languages:italian:pack', ':addons:languages:italian:apk' -include ':addons:languages:lithuanian:pack', ':addons:languages:lithuanian:apk' -include ':addons:languages:nias:pack', ':addons:languages:nias:apk' -include ':addons:languages:ossturkish:pack', ':addons:languages:ossturkish:apk' -include ':addons:languages:persian:pack', ':addons:languages:persian:apk' -include ':addons:languages:piedmontese:pack', ':addons:languages:piedmontese:apk' -include ':addons:languages:romanian:pack', ':addons:languages:romanian:apk' -include ':addons:languages:russian2:pack', ':addons:languages:russian2:apk' -include ':addons:languages:sardinian:pack', ':addons:languages:sardinian:apk' -include ':addons:languages:serbian:pack', ':addons:languages:serbian:apk' -include ':addons:languages:spain:pack', ':addons:languages:spain:apk' -include ':addons:languages:tamazight:pack', ':addons:languages:tamazight:apk' -include ':addons:languages:arabic:pack', ':addons:languages:arabic:apk' -include ':addons:languages:danish:pack', ':addons:languages:danish:apk' -include ':addons:languages:basque:pack', ':addons:languages:basque:apk' -include ':addons:languages:bulgarian:pack', ':addons:languages:bulgarian:apk' -include ':addons:languages:german:pack', ':addons:languages:german:apk' -include ':addons:languages:croatian:pack', ':addons:languages:croatian:apk' -include ':addons:languages:georgian:pack', ':addons:languages:georgian:apk' -include ':addons:languages:latvian:pack', ':addons:languages:latvian:apk' -include ':addons:languages:kurdish:pack', ':addons:languages:kurdish:apk' -include ':addons:languages:hungarian:pack', ':addons:languages:hungarian:apk' -include ':addons:languages:kachin:pack', ':addons:languages:kachin:apk' -include ':addons:languages:dutch:pack', ':addons:languages:dutch:apk' -include ':addons:languages:luxembourgish:pack', ':addons:languages:luxembourgish:apk' -include ':addons:languages:norwegian:pack', ':addons:languages:norwegian:apk' -include ':addons:languages:polish:pack', ':addons:languages:polish:apk' -include ':addons:languages:rusyn:pack', ':addons:languages:rusyn:apk' -include ':addons:languages:sinhala:pack', ':addons:languages:sinhala:apk' -include ':addons:languages:hindi:pack', ':addons:languages:hindi:apk' -include ':addons:languages:thai:pack', ':addons:languages:thai:apk' -include ':addons:languages:portuguese:pack', ':addons:languages:portuguese:apk' - -//list of themes should be alphabetized-ordered -include ':addons:themes' -include ':addons:themes:classic_pc:pack', ':addons:themes:classic_pc:apk' -include ':addons:themes:ics:pack', ':addons:themes:ics:apk' -include ':addons:themes:israel64:pack', ':addons:themes:israel64:apk' -include ':addons:themes:three_d:pack', ':addons:themes:three_d:apk' - -//list of quick-text should be alphabetized-ordered -include ':addons:quicktexts' -include ':addons:quicktexts:bbcodes:pack', ':addons:quicktexts:bbcodes:apk' - -include ':ime' -include ':ime:base', ':ime:base-rx', ':ime:base-test', ':ime:prefs' -include ':ime:remote', ':ime:fileprovider' -include ':ime:addons' -include ':ime:dictionaries', ':ime:dictionaries:jnidictionaryv1', ':ime:dictionaries:jnidictionaryv2', ':ime:nextword' -include ':ime:pixel', ':ime:overlay' -include 'ime:voiceime' -include ':ime:app' - diff --git a/tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/addons/languages/dutch/apk/src/main/play/listings/en-US/title.txt b/tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/addons/languages/dutch/apk/src/main/play/listings/en-US/title.txt deleted file mode 100644 index 6e75c228..00000000 --- a/tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/addons/languages/dutch/apk/src/main/play/listings/en-US/title.txt +++ /dev/null @@ -1 +0,0 @@ -Dutch for AnySoftKeyboard diff --git a/tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/ime/app/src/main/play/listings/en-US/title.txt b/tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/ime/app/src/main/play/listings/en-US/title.txt deleted file mode 100644 index 7c5eeaeb..00000000 --- a/tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/ime/app/src/main/play/listings/en-US/title.txt +++ /dev/null @@ -1 +0,0 @@ -AnySoftKeyboard diff --git a/tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/settings.gradle b/tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/settings.gradle deleted file mode 100644 index 523aadb7..00000000 --- a/tests/triple-t-anysoftkeyboard/build/com.menny.android.anysoftkeyboard/settings.gradle +++ /dev/null @@ -1,77 +0,0 @@ -rootProject.name = 'AnySoftKeyboard' -include ':deployment' - -include ':api' - -include ':addons:base' -include ':addons:languages' -include ':addons:languages:english', ':addons:languages:english:pack' - -//list of packs should be alphabetized-ordered -include ':addons:languages:afrikaans_oss:pack', ':addons:languages:afrikaans_oss:apk' -include ':addons:languages:armenian2:pack', ':addons:languages:armenian2:apk' -include ":addons:languages:belarusian:pack", ":addons:languages:belarusian:apk" -include ':addons:languages:brazilian:pack', ':addons:languages:brazilian:apk' -include ':addons:languages:catalan:pack', ':addons:languages:catalan:apk' -include ':addons:languages:coptic:pack', ':addons:languages:coptic:apk' -include ':addons:languages:czech:pack', ':addons:languages:czech:apk' -include ':addons:languages:english:pack', ':addons:languages:english:apk' -include ':addons:languages:esperanto:pack', ':addons:languages:esperanto:apk' -include ':addons:languages:finnish:pack', ':addons:languages:finnish:apk' -include ':addons:languages:french:pack', ':addons:languages:french:apk' -include ':addons:languages:greek:pack', ':addons:languages:greek:apk' -include ':addons:languages:hebrew:pack', ':addons:languages:hebrew:apk' -include ':addons:languages:indonesian:pack', ':addons:languages:indonesian:apk' -include ':addons:languages:italian:pack', ':addons:languages:italian:apk' -include ':addons:languages:lithuanian:pack', ':addons:languages:lithuanian:apk' -include ':addons:languages:nias:pack', ':addons:languages:nias:apk' -include ':addons:languages:ossturkish:pack', ':addons:languages:ossturkish:apk' -include ':addons:languages:persian:pack', ':addons:languages:persian:apk' -include ':addons:languages:piedmontese:pack', ':addons:languages:piedmontese:apk' -include ':addons:languages:romanian:pack', ':addons:languages:romanian:apk' -include ':addons:languages:russian2:pack', ':addons:languages:russian2:apk' -include ':addons:languages:sardinian:pack', ':addons:languages:sardinian:apk' -include ':addons:languages:serbian:pack', ':addons:languages:serbian:apk' -include ':addons:languages:spain:pack', ':addons:languages:spain:apk' -include ':addons:languages:tamazight:pack', ':addons:languages:tamazight:apk' -include ':addons:languages:arabic:pack', ':addons:languages:arabic:apk' -include ':addons:languages:danish:pack', ':addons:languages:danish:apk' -include ':addons:languages:basque:pack', ':addons:languages:basque:apk' -include ':addons:languages:bulgarian:pack', ':addons:languages:bulgarian:apk' -include ':addons:languages:german:pack', ':addons:languages:german:apk' -include ':addons:languages:croatian:pack', ':addons:languages:croatian:apk' -include ':addons:languages:georgian:pack', ':addons:languages:georgian:apk' -include ':addons:languages:latvian:pack', ':addons:languages:latvian:apk' -include ':addons:languages:kurdish:pack', ':addons:languages:kurdish:apk' -include ':addons:languages:hungarian:pack', ':addons:languages:hungarian:apk' -include ':addons:languages:kachin:pack', ':addons:languages:kachin:apk' -include ':addons:languages:dutch:pack', ':addons:languages:dutch:apk' -include ':addons:languages:luxembourgish:pack', ':addons:languages:luxembourgish:apk' -include ':addons:languages:norwegian:pack', ':addons:languages:norwegian:apk' -include ':addons:languages:polish:pack', ':addons:languages:polish:apk' -include ':addons:languages:rusyn:pack', ':addons:languages:rusyn:apk' -include ':addons:languages:sinhala:pack', ':addons:languages:sinhala:apk' -include ':addons:languages:hindi:pack', ':addons:languages:hindi:apk' -include ':addons:languages:thai:pack', ':addons:languages:thai:apk' -include ':addons:languages:portuguese:pack', ':addons:languages:portuguese:apk' - -//list of themes should be alphabetized-ordered -include ':addons:themes' -include ':addons:themes:classic_pc:pack', ':addons:themes:classic_pc:apk' -include ':addons:themes:ics:pack', ':addons:themes:ics:apk' -include ':addons:themes:israel64:pack', ':addons:themes:israel64:apk' -include ':addons:themes:three_d:pack', ':addons:themes:three_d:apk' - -//list of quick-text should be alphabetized-ordered -include ':addons:quicktexts' -include ':addons:quicktexts:bbcodes:pack', ':addons:quicktexts:bbcodes:apk' - -include ':ime' -include ':ime:base', ':ime:base-rx', ':ime:base-test', ':ime:prefs' -include ':ime:remote', ':ime:fileprovider' -include ':ime:addons' -include ':ime:dictionaries', ':ime:dictionaries:jnidictionaryv1', ':ime:dictionaries:jnidictionaryv2', ':ime:nextword' -include ':ime:pixel', ':ime:overlay' -include 'ime:voiceime' -include ':ime:app' - diff --git a/tests/triple-t-anysoftkeyboard/metadata/com.anysoftkeyboard.languagepack.dutch.yml b/tests/triple-t-anysoftkeyboard/metadata/com.anysoftkeyboard.languagepack.dutch.yml deleted file mode 100644 index 707bb096..00000000 --- a/tests/triple-t-anysoftkeyboard/metadata/com.anysoftkeyboard.languagepack.dutch.yml +++ /dev/null @@ -1,76 +0,0 @@ -Categories: - - Writing -License: Apache-2.0 -SourceCode: https://github.com/AnySoftKeyboard/AnySoftKeyboard/ -IssueTracker: https://github.com/AnySoftKeyboard/AnySoftKeyboard/issues -Donate: https://www.paypal.com/cgi-bin/webscr?cmd=_donations&business=KDYBGNUNMMN94&lc=US&item_name=AnySoftKeyboard - -AutoName: AnySoftKeyboard - Dutch Language Pack -Description: |- - This app has been deprecated in favor of https://f-droid.org/en/packages/com.anysoftkeyboard.languagepack.dutch_oss/ - - This is the Dutch language pack for AnySoftKeyboard. It offers an extensive - Dutch dictionary and optimised keyboard. In order to use it, first install - https://f-droid.org/packages/com.menny.android.anysoftkeyboard – a practical input method for Android - smartphones and tablets. - - The dictionary is based on the word list from Stichting OpenTaal and has the - Quality Mark Spelling (Keurmerk Spelling) of the Dutch Language Union - (Taalunie). This language pack has been created by Stichting OpenTaal and can be - installed by all without any costs. The language-specific files in this language - pack have a dual license. Both the BSD 2-Clause License as the Creative Commons, - Attribution 4.0 (unported) apply. - - Please, help us create free and open Dutch writing tools. Donate tax free to our - foundation at https://www.opentaal.org/vrienden-van-opentaal - -RepoType: git -Repo: https://github.com/AnySoftKeyboard/AnySoftKeyboard.git - -Builds: - - versionName: '1.0' - versionCode: 1 - disable: wait for upstream - commit: c5efd5986ce9beec299919f7ae9f174abd33b156 - gradle: - - yes - forceversion: true - preassemble: - - :makeDictionary - - - versionName: '1.4' - versionCode: 5 - commit: dutch-1.4 - gradle: - - yes - forceversion: true - preassemble: - - :makeDictionary - - - versionName: 4.0.1396 - versionCode: 2908 - commit: 1.10-r4 - subdir: addons/languages/dutch - sudo: - - apt-get update || apt-get update - - apt-get install -t bullseye openjdk-11-jdk-headless openjdk-11-jre-headless - - update-alternatives --auto java - gradle: - - yes - output: ../../../outputs/apks/release/addons-languages-*-$$VERCODE$$.apk - prebuild: - - echo "//gradleVersion = '6.8.3'" | tee build.gradle - - sed -i -e "s calculateApplicationId(project) 'com.anysoftkeyboard.languagepack.dutch' - " ../../../gradle/apk_module.gradle - - sed -i -e '/Data\.versionCode/ c versionCode $$VERCODE$$' -e '/Data\.versionName/ - c versionName "$$VERSION$$"' ../../../gradle/versioning_apk.gradle - scanignore: - - addons/languages/*/pack/dictionary/*.gz - ndk: r14b - -MaintainerNotes: Package ID is now com.anysoftkeyboard.languagepack.dutch_oss - -AutoUpdateMode: None -UpdateCheckMode: None -CurrentVersion: 4.0.1396 -CurrentVersionCode: 2908 diff --git a/tests/triple-t-anysoftkeyboard/metadata/com.menny.android.anysoftkeyboard.yml b/tests/triple-t-anysoftkeyboard/metadata/com.menny.android.anysoftkeyboard.yml deleted file mode 100644 index 4dc8d7ac..00000000 --- a/tests/triple-t-anysoftkeyboard/metadata/com.menny.android.anysoftkeyboard.yml +++ /dev/null @@ -1,523 +0,0 @@ -Categories: - - Writing -License: Apache-2.0 -WebSite: https://anysoftkeyboard.github.io -SourceCode: https://github.com/AnySoftKeyboard/AnySoftKeyboard -IssueTracker: https://github.com/AnySoftKeyboard/AnySoftKeyboard/issues -Translation: https://crowdin.com/project/anysoftkeyboard -Changelog: https://github.com/AnySoftKeyboard/AnySoftKeyboard/releases -Donate: https://www.paypal.com/cgi-bin/webscr?cmd=_donations&business=KDYBGNUNMMN94&lc=US&item_name=AnySoftKeyboard - -AutoName: AnySoftKeyboard - -RepoType: git -Repo: https://github.com/AnySoftKeyboard/AnySoftKeyboard.git - -Builds: - - versionName: '20120528' - versionCode: 74 - commit: d6c2f7448 - srclibs: - - AnySoftKeyboard-API@78768bc479 - prebuild: sed -i 's@\(android.library.reference.1=\).*@\1$$AnySoftKeyboard-API$$@' - project.properties - buildjni: - - yes - ndk: r12b - - - versionName: 20120814-eye-candy - versionCode: 76 - disable: doesn't build - commit: unknown - see disabled - srclibs: - - AnySoftKeyboard-API@78768bc479 - prebuild: sed -i 's@\(android.library.reference.1=\).*@\1$$AnySoftKeyboard-API$$@' - project.properties - buildjni: - - yes - ndk: r12b - - - versionName: 20130106-eye-candy - versionCode: 85 - commit: a7723e1db5 - srclibs: - - AnySoftKeyboard-API@b21d8907 - prebuild: - - sed -i 's@\(android.library.reference.1=\).*@\1$$AnySoftKeyboard-API$$@' project.properties - - rm -rf libs/armeabi*/ libs/x86/ libs/mips/ ant/ keyboard_keystore StoreStuff/ - res/raw/words_1.dict - - wget https://softkeyboard.googlecode.com/svn-history/r544/trunk/project/dict_creation/makedict_Linux - - wget https://github.com/AnySoftKeyboard/AnySoftKeyboardTools/blob/HEAD/makedict/makedict.jar?raw=true - -O makedict.jar - - chmod +x makedict_Linux - - ./makedict_Linux dict/words.xml - buildjni: - - yes - ndk: r12b - - - versionName: 20130222-skinny-eye-candy - versionCode: 95 - commit: 50daad0b0a - srclibs: - - AnySoftKeyboard-API@b21d8907 - - AnySoftKeyboardTools@73e9a09496 - prebuild: - - sed -i 's@\(android.library.reference.1=\).*@\1$$AnySoftKeyboard-API$$@' project.properties - - sed -i 's@..\/AnySoftKeyboardTools@$$AnySoftKeyboardTools$$@g' build.xml - - rm -rf libs/armeabi*/ libs/x86/ libs/mips/ ant/ keyboard_keystore StoreStuff/ - res/raw/words_1.dict - buildjni: - - yes - ndk: r12b - - - versionName: 20130501-skinny-eye-candy-post-birthday - versionCode: 98 - commit: 6f51b8a9d - srclibs: - - AnySoftKeyboard-API@2c864957 - - AnySoftKeyboardTools@73e9a09496 - prebuild: - - sed -i 's@\(android.library.reference.1=\).*@\1$$AnySoftKeyboard-API$$@' project.properties - - sed -i 's@..\/AnySoftKeyboardTools@$$AnySoftKeyboardTools$$@g' build.xml - - rm -rf libs/armeabi*/ libs/x86/ libs/mips/ ant/ keyboard_keystore StoreStuff/ - res/raw/words_1.dict - buildjni: - - yes - ndk: r12b - - - versionName: '20130528' - versionCode: 102 - commit: 72fd519 - srclibs: - - AnySoftKeyboard-API@8d8a958e17 - - AnySoftKeyboardTools@73e9a09496 - prebuild: - - sed -i 's@\(reference.1=\).*@\1$$AnySoftKeyboard-API$$@' project.properties - - sed -i 's@..\/AnySoftKeyboardTools@$$AnySoftKeyboardTools$$@g' build.xml - - rm -rf libs/armeabi*/ libs/x86/ libs/mips/ ant/ keyboard_keystore StoreStuff/ - res/raw/words_1.dict - buildjni: - - yes - ndk: r12b - - - versionName: '20130709' - versionCode: 106 - commit: f1ba2de - srclibs: - - AnySoftKeyboard-API@30851236d7 - - AnySoftKeyboardTools@73e9a09496 - prebuild: - - sed -i 's@\(.1=\).*@\1$$AnySoftKeyboard-API$$@' project.properties - - sed -i 's@..\/AnySoftKeyboardTools@$$AnySoftKeyboardTools$$@g' build.xml - - rm -rf libs/armeabi*/ libs/x86/ libs/mips/ ant/ keyboard_keystore StoreStuff/ - res/raw/words_1.dict - buildjni: - - yes - ndk: r12b - - - versionName: '20130807' - versionCode: 107 - disable: not published - commit: unknown - see disabled - - - versionName: '20130919' - versionCode: 111 - commit: 2a68963f - srclibs: - - AnySoftKeyboard-API@30851236d7 - - AnySoftKeyboardTools@73e9a09496 - prebuild: - - sed -i 's@\(.1=\).*@\1$$AnySoftKeyboard-API$$@' project.properties - - sed -i 's@..\/AnySoftKeyboardTools@$$AnySoftKeyboardTools$$@g' build.xml - - rm -rf libs/armeabi*/ libs/x86/ libs/mips/ ant/ keyboard_keystore StoreStuff/ - res/raw/words_1.dict - buildjni: - - yes - ndk: r12b - - - versionName: '20131101' - versionCode: 114 - disable: no longer uses ant, prebuilts - commit: 82538d456f8 - srclibs: - - AnySoftKeyboard-API@08b452ff4 - - AnySoftKeyboardTools@73e9a09496 - prebuild: - - sed -i 's@\(.1=\).*@\1$$AnySoftKeyboard-API$$@' project.properties - - sed -i 's@..\/AnySoftKeyboardTools@$$AnySoftKeyboardTools$$@g' build.xml - - rm -rf libs/armeabi*/ libs/x86/ libs/mips/ ant/ keyboard_keystore StoreStuff/ - res/raw/words_1.dict - buildjni: - - yes - ndk: r12b - - - versionName: 1.2.20140705 - versionCode: 140 - disable: build words dict from source, no idea what version name/code it is - commit: 1.5-r4.1 - gradle: - - yes - rm: - - res/raw/words_1.dict - - - versionName: 1.8.84 - versionCode: 1409 - commit: 1.8-r1 - init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' build.gradle - gradle: - - yes - rm: - - src/main/res/raw/words_1.dict - preassemble: - - makeEnglishDictionary - - - versionName: 1.8.133 - versionCode: 1458 - disable: broken subdir - commit: 1.8-r3 - subdir: app - init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle - gradle: - - yes - rm: - - app/src/main/res/raw/words_1.dict - preassemble: - - :makeEnglishDictionary - - - versionName: 1.8.146 - versionCode: 1471 - commit: 1.8-r4 - subdir: app - init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle - gradle: - - yes - rm: - - app/src/main/res/raw/words_1.dict - preassemble: - - :makeEnglishDictionary - - - versionName: 1.8.190 - versionCode: 1515 - disable: weird version/vc-issue - commit: 1.8-r5.1 - subdir: app - init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle - gradle: - - yes - rm: - - app/src/main/res/raw/words_1.dict - preassemble: - - :makeEnglishDictionary - - - versionName: 1.8.195 - versionCode: 1520 - commit: 1.8-r5.2 - subdir: app - init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle - gradle: - - yes - rm: - - app/src/main/res/raw/words_1.dict - preassemble: - - :makeEnglishDictionary - - - versionName: 1.8.253 - versionCode: 1578 - commit: 1.8-r6 - subdir: app - init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle - gradle: - - yes - forceversion: true - rm: - - app/src/main/res/raw/words_1.dict - prebuild: sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' build.gradle - preassemble: - - :makeEnglishDictionary - - - versionName: 1.8.329 - versionCode: 1654 - commit: 1.8-r7 - subdir: app - init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle - gradle: - - yes - forceversion: true - rm: - - app/src/main/res/raw/words_1.dict - prebuild: sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' build.gradle - preassemble: - - :makeEnglishDictionary - - - versionName: 1.8.401 - versionCode: 1726 - commit: 1.8-r7.1 - subdir: app - init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle - gradle: - - yes - forceversion: true - rm: - - app/src/main/res/raw/words_1.dict - prebuild: - - sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' build.gradle - - sed -i -e '/crashlytics/,$d' build.gradle - preassemble: - - :makeEnglishDictionary - - - versionName: 1.8.503 - versionCode: 1828 - commit: 1.8-r8 - subdir: app - init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle - gradle: - - yes - forceversion: true - rm: - - app/src/main/res/raw/words_*.dict - prebuild: - - sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' build.gradle - - sed -i -e '/crashlytics/,$d' build.gradle - preassemble: - - :makeEnglishDictionary - - - versionName: 1.8.585 - versionCode: 1910 - commit: 1.8-r9 - subdir: app - init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle - gradle: - - yes - forceversion: true - rm: - - app/src/main/res/raw/words_*.dict - prebuild: - - sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' build.gradle - - sed -i -e '/crashlytics/,$d' build.gradle - preassemble: - - :makeEnglishDictionary - - - versionName: 1.8.735 - versionCode: 2060 - commit: 1.8-r10 - subdir: app - init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle - gradle: - - yes - forceversion: true - rm: - - app/src/main/res/raw/words_*.dict - prebuild: - - sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' build.gradle - - sed -i -e '/crashlytics/,$d' build.gradle - preassemble: - - :makeEnglishDictionary - - - versionName: 1.8.844 - versionCode: 2169 - commit: 1.8-r11 - subdir: app - init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle - gradle: - - yes - forceversion: true - rm: - - app/src/main/res/raw/words_*.dict - prebuild: - - sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' build.gradle - - sed -i -e '/crashlytics/,$d' build.gradle - preassemble: - - :makeEnglishDictionary - - - versionName: 1.8.1241 - versionCode: 2566 - commit: 1.8-r12 - subdir: app - init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle - gradle: - - yes - forceversion: true - rm: - - app/src/main/res/raw/words_*.dict - prebuild: - - sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' build.gradle - - sed -i -e '/crashlytics/,$d' build.gradle - preassemble: - - :makeEnglishDictionary - - - versionName: 1.8.1333 - versionCode: 2658 - commit: 1.8-r12.2 - subdir: app - init: sed -i -e '/fabric/d' -e '/canaryCompile/,+2d' ../build.gradle build.gradle - gradle: - - yes - forceversion: true - rm: - - app/src/main/res/raw/words_*.dict - prebuild: - - sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' build.gradle - - sed -i -e '/crashlytics/,$d' build.gradle - ndk: r11c - preassemble: - - :makeEnglishDictionary - - - versionName: 1.9.1117 - versionCode: 3692 - commit: 1.9-r1 - subdir: app - init: sed -i -e '/fabric/d' -e '/canaryImplementation/,+2d' ../build.gradle build.gradle - gradle: - - yes - forceversion: true - rm: - - app/src/main/res/raw/words_*.dict - prebuild: - - sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' -e '/crashlytics/,$d' - build.gradle - - sed -i -e 's/gradle:3.1.0-alpha06/gradle:3.0.1/' ../build.gradle - scandelete: - - buildSrc - - app/src/debug - ndk: r14b - preassemble: - - :makeEnglishDictionary - - - versionName: 1.9.1944 - versionCode: 4519 - commit: 1.9-r3-fdroid - subdir: app - init: sed -i -e '/fabric/d' -e '/canaryImplementation/d' ../build.gradle build.gradle - gradle: - - yes - forceversion: true - rm: - - app/src/main/res/raw/words_*.dict - prebuild: sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' - -e '/crashlytics/,$d' build.gradle - scandelete: - - buildSrc - - app/src/debug - ndk: r14b - preassemble: - - :makeEnglishDictionary - - - versionName: 1.9.2055 - versionCode: 4630 - commit: 1.9-r4 - subdir: app - init: sed -i -e '/fabric/d' -e '/canaryImplementation/d' ../build.gradle build.gradle - gradle: - - yes - forceversion: true - rm: - - app/src/main/res/raw/words_*.dict - prebuild: sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' - -e '/crashlytics/,$d' build.gradle - scandelete: - - buildSrc - - app/src/debug - ndk: r14b - preassemble: - - :makeEnglishDictionary - - - versionName: 1.9.2445 - versionCode: 5020 - commit: 1.9-r5.1 - subdir: app - gradle: - - yes - forceversion: true - rm: - - app/src/main/res/raw/words_*.dict - prebuild: sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' - build.gradle - scandelete: - - buildSrc - - app/src/debug - ndk: r14b - preassemble: - - :makeEnglishDictionary - - - versionName: 1.9.2629 - versionCode: 5204 - commit: 1.9-r6 - subdir: app - gradle: - - yes - forceversion: true - prebuild: sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' - build.gradle - scandelete: - - buildSrc - ndk: r14b - - - versionName: 1.10.364 - versionCode: 5539 - commit: 1.10-r1 - subdir: app - gradle: - - yes - forceversion: true - prebuild: sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' - build.gradle - scandelete: - - buildSrc - ndk: r14b - - - versionName: 1.10.606 - versionCode: 5781 - commit: 1.10-r2 - subdir: app - gradle: - - yes - forceversion: true - prebuild: sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' - build.gradle - scandelete: - - buildSrc - ndk: r14b - - - versionName: 1.10.1109 - versionCode: 6279 - commit: e57c9cc852aefdc1ff60b024e52d4341337b3df7 - subdir: app - gradle: - - yes - forceversion: true - prebuild: sed -i -e '/versionCode/d' -e '/versionName/aversionCode $$VERCODE$$' - build.gradle - scanignore: - - buildSrc - ndk: r14b - gradleprops: - - forceVersionBuildCount=4709 - - - versionName: 1.10.1786 - versionCode: 6956 - commit: 1.10-r4 - subdir: ime/app - sudo: - - apt-get update || apt-get update - - apt-get install -t bullseye openjdk-11-jdk-headless openjdk-11-jre-headless - - update-alternatives --auto java - gradle: - - yes - prebuild: - - sed -i -e "/EnvKey/ a //gradleVersion = '6.8.3'" ../build.gradle - - sed -i -e '/Data\.versionCode/ c versionCode $$VERCODE$$' -e '/Data\.versionName/ - c versionName "$$VERSION$$"' ../../gradle/versioning_apk.gradle - scanignore: - - addons/languages/*/pack/dictionary/*.gz - ndk: r14b - -MaintainerNotes: |- - * Prior to 1.9.2629, generates english dictionary binary (src/main/res/raw/words_*.dict) from source files (under `english_dictionary` folder. Mostly AOSP word-list) - - Summary and Description have been moved to the new localizable text files: - https://f-droid.org/docs/All_About_Descriptions_Graphics_and_Screenshots - -AutoUpdateMode: None -UpdateCheckMode: None -CurrentVersion: 1.10.1786 -CurrentVersionCode: 6956 diff --git a/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/contact-website.txt b/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/contact-website.txt deleted file mode 100644 index 5c15e481..00000000 --- a/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/contact-website.txt +++ /dev/null @@ -1 +0,0 @@ -https://emersion.fr diff --git a/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/full-description.txt b/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/full-description.txt deleted file mode 100644 index b72afe53..00000000 --- a/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/full-description.txt +++ /dev/null @@ -1,11 +0,0 @@ -An IRC client for mobile devices. - -Goals: - -

        -
      • Modern: support for many IRCv3 extensions, plus some special support for IRC bouncers.
      • -
      • Easy to use: offer a simple, straightforward interface.
      • -
      • Offline-first: users should be able to read past conversations while offline, and network disruptions should be handled transparently.
      • -
      • Lightweight: go easy on resource usage to run smoothly on older phones and save battery power.
      • -
      • Cross-platform: the main target platforms are Linux and Android.
      • -
      diff --git a/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/short-description.txt b/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/short-description.txt deleted file mode 100644 index 98eb88a5..00000000 --- a/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/short-description.txt +++ /dev/null @@ -1 +0,0 @@ -An IRC client for mobile devices diff --git a/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/title.txt b/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/title.txt deleted file mode 100644 index d2f3b402..00000000 --- a/tests/triple-t-flutter/build/fr.emersion.goguma/android/app/src/main/play/listings/en-US/title.txt +++ /dev/null @@ -1 +0,0 @@ -Goguma diff --git a/tests/triple-t-flutter/metadata/fr.emersion.goguma.yml b/tests/triple-t-flutter/metadata/fr.emersion.goguma.yml deleted file mode 100644 index da1897fc..00000000 --- a/tests/triple-t-flutter/metadata/fr.emersion.goguma.yml +++ /dev/null @@ -1,30 +0,0 @@ -Categories: - - Internet -License: AGPL-3.0-only -WebSite: https://sr.ht/~emersion/goguma -SourceCode: https://git.sr.ht/~emersion/goguma -IssueTracker: https://todo.sr.ht/~emersion/goguma - -AutoName: Goguma - -RepoType: git -Repo: https://git.sr.ht/~emersion/goguma - -Builds: - - versionName: 0.1.0 - versionCode: 1 - commit: 944d2d1e000901365392e850a98ee03f5dedba32 - output: build/app/outputs/flutter-apk/app-release.apk - srclibs: - - flutter@2.10.2 - build: - - $$flutter$$/bin/flutter config --no-analytics - - $$flutter$$/bin/flutter create --org fr.emersion --project-name goguma --platforms - android --no-overwrite . - - $$flutter$$/bin/flutter build apk --release --verbose - -AutoUpdateMode: Version -UpdateCheckMode: Tags -UpdateCheckData: pubspec.yaml|version:\s.+\+(\d+)|.|version:\s(.+)\+ -CurrentVersion: 0.1.0 -CurrentVersionCode: 1 diff --git a/tests/triple-t-multiple/.gitignore b/tests/triple-t-multiple/.gitignore deleted file mode 100644 index 0aba28e9..00000000 --- a/tests/triple-t-multiple/.gitignore +++ /dev/null @@ -1 +0,0 @@ -!build/ diff --git a/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/settings.gradle b/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/settings.gradle deleted file mode 100644 index c7934558..00000000 --- a/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/settings.gradle +++ /dev/null @@ -1,3 +0,0 @@ -include ':common' -include ':verifier' -include ':wallet' diff --git a/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/verifier/src/main/play/listings/en-US/title.txt b/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/verifier/src/main/play/listings/en-US/title.txt deleted file mode 100644 index 03feeb11..00000000 --- a/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/verifier/src/main/play/listings/en-US/title.txt +++ /dev/null @@ -1 +0,0 @@ -COVID Certificate Check \ No newline at end of file diff --git a/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/wallet/src/main/play/listings/en-US/title.txt b/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/wallet/src/main/play/listings/en-US/title.txt deleted file mode 100644 index 34827e66..00000000 --- a/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.verifier/wallet/src/main/play/listings/en-US/title.txt +++ /dev/null @@ -1 +0,0 @@ -COVID Certificate \ No newline at end of file diff --git a/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/settings.gradle b/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/settings.gradle deleted file mode 100644 index c7934558..00000000 --- a/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/settings.gradle +++ /dev/null @@ -1,3 +0,0 @@ -include ':common' -include ':verifier' -include ':wallet' diff --git a/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/verifier/src/main/play/listings/en-US/title.txt b/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/verifier/src/main/play/listings/en-US/title.txt deleted file mode 100644 index 03feeb11..00000000 --- a/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/verifier/src/main/play/listings/en-US/title.txt +++ /dev/null @@ -1 +0,0 @@ -COVID Certificate Check \ No newline at end of file diff --git a/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/wallet/src/main/play/listings/en-US/title.txt b/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/wallet/src/main/play/listings/en-US/title.txt deleted file mode 100644 index 34827e66..00000000 --- a/tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/wallet/src/main/play/listings/en-US/title.txt +++ /dev/null @@ -1 +0,0 @@ -COVID Certificate \ No newline at end of file diff --git a/tests/triple-t-multiple/metadata/ch.admin.bag.covidcertificate.verifier.yml b/tests/triple-t-multiple/metadata/ch.admin.bag.covidcertificate.verifier.yml deleted file mode 100644 index f8999482..00000000 --- a/tests/triple-t-multiple/metadata/ch.admin.bag.covidcertificate.verifier.yml +++ /dev/null @@ -1,50 +0,0 @@ -AntiFeatures: - - Tracking -Categories: - - Sports & Health -License: MPL-2.0 -AuthorName: Swiss Admin -AuthorWebSite: https://www.bit.admin.ch -SourceCode: https://github.com/admin-ch/CovidCertificate-App-Android -IssueTracker: https://github.com/admin-ch/CovidCertificate-App-Android/issues -Changelog: https://github.com/admin-ch/CovidCertificate-App-Android/releases - -AutoName: Covid Cert - -RepoType: git -Repo: https://github.com/admin-ch/CovidCertificate-App-Android -Binaries: https://github.com/admin-ch/CovidCertificate-App-Android/releases/download/v%v-%c-verifier/verifier-prod-%v-%c-signed.apk - -Builds: - - versionName: 1.2.0 - versionCode: 1200 - commit: v1.2.0-1200-verifier - subdir: verifier - submodules: true - gradle: - - prod - rm: - - sdk/sdk/testKeystore - - wallet/testKeystore - - verifier/testKeystore - prebuild: printf '\nbuildTimestamp=%s\n' "$(curl -sL https://github.com/admin-ch/CovidCertificate-App-Android/releases/download/v$$VERSION$$-$$VERCODE$$-verifier/verifier-prod-$$VERSION$$-$$VERCODE$$-buildtimestamp.txt - | tr -cd '0-9')" >> gradle.properties - - - versionName: 2.0.0 - versionCode: 2000 - commit: 5a871eabf1fce16b84e4c7b97c94fd3f2a37e910 - subdir: verifier - submodules: true - gradle: - - prod - rm: - - sdk/sdk/testKeystore - - wallet/testKeystore - - verifier/testKeystore - prebuild: printf '\nbuildTimestamp=%s\n' "$(curl -sL https://github.com/admin-ch/CovidCertificate-App-Android/releases/download/v$$VERSION$$-$$VERCODE$$-verifier/verifier-prod-$$VERSION$$-$$VERCODE$$-buildtimestamp.txt - | tr -cd '0-9')" >> gradle.properties - -AutoUpdateMode: Version -UpdateCheckMode: Tags ^v[\d.]+-\d+-verifier$ -CurrentVersion: 2.0.0 -CurrentVersionCode: 2000 diff --git a/tests/triple-t-multiple/metadata/ch.admin.bag.covidcertificate.wallet.yml b/tests/triple-t-multiple/metadata/ch.admin.bag.covidcertificate.wallet.yml deleted file mode 100644 index 3ea35008..00000000 --- a/tests/triple-t-multiple/metadata/ch.admin.bag.covidcertificate.wallet.yml +++ /dev/null @@ -1,50 +0,0 @@ -AntiFeatures: - - Tracking -Categories: - - Sports & Health -License: MPL-2.0 -AuthorName: Swiss Admin -AuthorWebSite: https://www.bit.admin.ch -SourceCode: https://github.com/admin-ch/CovidCertificate-App-Android -IssueTracker: https://github.com/admin-ch/CovidCertificate-App-Android/issues -Changelog: https://github.com/admin-ch/CovidCertificate-App-Android/releases - -AutoName: Covid Cert - -RepoType: git -Repo: https://github.com/admin-ch/CovidCertificate-App-Android -Binaries: https://github.com/admin-ch/CovidCertificate-App-Android/releases/download/v%v-%c-wallet/wallet-prod-%v-%c-signed.apk - -Builds: - - versionName: 1.2.0 - versionCode: 1200 - commit: v1.2.0-1200-wallet - subdir: wallet - submodules: true - gradle: - - prod - rm: - - sdk/sdk/testKeystore - - wallet/testKeystore - - verifier/testKeystore - prebuild: printf '\nbuildTimestamp=%s\n' "$(curl -sL https://github.com/admin-ch/CovidCertificate-App-Android/releases/download/v$$VERSION$$-$$VERCODE$$-wallet/wallet-prod-$$VERSION$$-$$VERCODE$$-buildtimestamp.txt - | tr -cd '0-9')" >> gradle.properties - - - versionName: 2.0.0 - versionCode: 2000 - commit: 5a871eabf1fce16b84e4c7b97c94fd3f2a37e910 - subdir: wallet - submodules: true - gradle: - - prod - rm: - - sdk/sdk/testKeystore - - wallet/testKeystore - - verifier/testKeystore - prebuild: printf '\nbuildTimestamp=%s\n' "$(curl -sL https://github.com/admin-ch/CovidCertificate-App-Android/releases/download/v$$VERSION$$-$$VERCODE$$-wallet/wallet-prod-$$VERSION$$-$$VERCODE$$-buildtimestamp.txt - | tr -cd '0-9')" >> gradle.properties - -AutoUpdateMode: Version -UpdateCheckMode: Tags ^v[\d.]+-\d+-wallet$ -CurrentVersion: 2.0.0 -CurrentVersionCode: 2000 diff --git a/tests/update.TestCase b/tests/update.TestCase new file mode 100755 index 00000000..f9a46d88 --- /dev/null +++ b/tests/update.TestCase @@ -0,0 +1,847 @@ +#!/usr/bin/env python3 + +# http://www.drdobbs.com/testing/unit-testing-with-python/240165163 + +import git +import glob +import inspect +import logging +import optparse +import os +import shutil +import subprocess +import sys +import tempfile +import unittest +import yaml +import zipfile +from binascii import unhexlify +from distutils.version import LooseVersion + +localmodule = os.path.realpath( + os.path.join(os.path.dirname(inspect.getfile(inspect.currentframe())), '..')) +print('localmodule: ' + localmodule) +if localmodule not in sys.path: + sys.path.insert(0, localmodule) + +import fdroidserver.common +import fdroidserver.exception +import fdroidserver.metadata +import fdroidserver.update +from fdroidserver.common import FDroidPopen + + +class UpdateTest(unittest.TestCase): + '''fdroid update''' + + def setUp(self): + logging.basicConfig(level=logging.INFO) + self.basedir = os.path.join(localmodule, 'tests') + self.tmpdir = os.path.abspath(os.path.join(self.basedir, '..', '.testfiles')) + if not os.path.exists(self.tmpdir): + os.makedirs(self.tmpdir) + os.chdir(self.basedir) + + def testInsertStoreMetadata(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + config['accepted_formats'] = ('txt', 'yml') + fdroidserver.update.config = config + fdroidserver.update.options = fdroidserver.common.options + os.chdir(os.path.join(localmodule, 'tests')) + + shutil.rmtree(os.path.join('repo', 'info.guardianproject.urzip'), ignore_errors=True) + + shutil.rmtree(os.path.join('build', 'com.nextcloud.client'), ignore_errors=True) + shutil.copytree(os.path.join('source-files', 'com.nextcloud.client'), + os.path.join('build', 'com.nextcloud.client')) + + shutil.rmtree(os.path.join('build', 'com.nextcloud.client.dev'), ignore_errors=True) + shutil.copytree(os.path.join('source-files', 'com.nextcloud.client.dev'), + os.path.join('build', 'com.nextcloud.client.dev')) + + shutil.rmtree(os.path.join('build', 'eu.siacs.conversations'), ignore_errors=True) + shutil.copytree(os.path.join('source-files', 'eu.siacs.conversations'), + os.path.join('build', 'eu.siacs.conversations')) + + apps = dict() + for packageName in ('info.guardianproject.urzip', 'org.videolan.vlc', 'obb.mainpatch.current', + 'com.nextcloud.client', 'com.nextcloud.client.dev', + 'eu.siacs.conversations'): + apps[packageName] = fdroidserver.metadata.App() + apps[packageName]['id'] = packageName + apps[packageName]['CurrentVersionCode'] = 0xcafebeef + + apps['info.guardianproject.urzip']['CurrentVersionCode'] = 100 + + buildnextcloudclient = fdroidserver.metadata.Build() + buildnextcloudclient.gradle = ['generic'] + apps['com.nextcloud.client']['builds'] = [buildnextcloudclient] + + buildnextclouddevclient = fdroidserver.metadata.Build() + buildnextclouddevclient.gradle = ['versionDev'] + apps['com.nextcloud.client.dev']['builds'] = [buildnextclouddevclient] + + build_conversations = fdroidserver.metadata.Build() + build_conversations.gradle = ['free'] + apps['eu.siacs.conversations']['builds'] = [build_conversations] + + fdroidserver.update.insert_localized_app_metadata(apps) + + appdir = os.path.join('repo', 'info.guardianproject.urzip', 'en-US') + self.assertTrue(os.path.isfile(os.path.join(appdir, 'icon.png'))) + self.assertTrue(os.path.isfile(os.path.join(appdir, 'featureGraphic.png'))) + + self.assertEqual(6, len(apps)) + for packageName, app in apps.items(): + self.assertTrue('localized' in app) + self.assertTrue('en-US' in app['localized']) + self.assertEqual(1, len(app['localized'])) + if packageName == 'info.guardianproject.urzip': + self.assertEqual(7, len(app['localized']['en-US'])) + self.assertEqual('full description\n', app['localized']['en-US']['description']) + self.assertEqual('title\n', app['localized']['en-US']['name']) + self.assertEqual('short description\n', app['localized']['en-US']['summary']) + self.assertEqual('video\n', app['localized']['en-US']['video']) + self.assertEqual('icon.png', app['localized']['en-US']['icon']) + self.assertEqual('featureGraphic.png', app['localized']['en-US']['featureGraphic']) + self.assertEqual('100\n', app['localized']['en-US']['whatsNew']) + elif packageName == 'org.videolan.vlc': + self.assertEqual('icon.png', app['localized']['en-US']['icon']) + self.assertEqual(9, len(app['localized']['en-US']['phoneScreenshots'])) + self.assertEqual(15, len(app['localized']['en-US']['sevenInchScreenshots'])) + elif packageName == 'obb.mainpatch.current': + self.assertEqual('icon.png', app['localized']['en-US']['icon']) + self.assertEqual('featureGraphic.png', app['localized']['en-US']['featureGraphic']) + self.assertEqual(1, len(app['localized']['en-US']['phoneScreenshots'])) + self.assertEqual(1, len(app['localized']['en-US']['sevenInchScreenshots'])) + elif packageName == 'com.nextcloud.client': + self.assertEqual('Nextcloud', app['localized']['en-US']['name']) + self.assertEqual(1073, len(app['localized']['en-US']['description'])) + self.assertEqual(78, len(app['localized']['en-US']['summary'])) + elif packageName == 'com.nextcloud.client.dev': + self.assertEqual('Nextcloud Dev', app['localized']['en-US']['name']) + self.assertEqual(586, len(app['localized']['en-US']['description'])) + self.assertEqual(79, len(app['localized']['en-US']['summary'])) + elif packageName == 'eu.siacs.conversations': + self.assertEqual('Conversations', app['localized']['en-US']['name']) + + def test_insert_triple_t_metadata(self): + importer = os.path.join(self.basedir, 'tmp', 'importer') + packageName = 'org.fdroid.ci.test.app' + if not os.path.isdir(importer): + logging.warning('skipping test_insert_triple_t_metadata, import.TestCase must run first!') + return + tmptestsdir = tempfile.mkdtemp(prefix=inspect.currentframe().f_code.co_name, + dir=self.tmpdir) + packageDir = os.path.join(tmptestsdir, 'build', packageName) + shutil.copytree(importer, packageDir) + + # always use the same commit so these tests work when ci-test-app.git is updated + repo = git.Repo(packageDir) + for remote in repo.remotes: + remote.fetch() + repo.git.reset('--hard', 'b9e5d1a0d8d6fc31d4674b2f0514fef10762ed4f') + repo.git.clean('-fdx') + + os.mkdir(os.path.join(tmptestsdir, 'metadata')) + metadata = dict() + metadata['Description'] = 'This is just a test app' + with open(os.path.join(tmptestsdir, 'metadata', packageName + '.yml'), 'w') as fp: + yaml.dump(metadata, fp) + + config = dict() + fdroidserver.common.fill_config_defaults(config) + config['accepted_formats'] = ('yml') + fdroidserver.common.config = config + fdroidserver.update.config = config + fdroidserver.update.options = fdroidserver.common.options + os.chdir(tmptestsdir) + + apps = fdroidserver.metadata.read_metadata(xref=True) + fdroidserver.update.copy_triple_t_store_metadata(apps) + + # TODO ideally, this would compare the whole dict like in metadata.TestCase's test_read_metadata() + correctlocales = [ + 'ar', 'ast_ES', 'az', 'ca', 'ca_ES', 'cs-CZ', 'cs_CZ', 'da', + 'da-DK', 'de', 'de-DE', 'el', 'en-US', 'es', 'es-ES', 'es_ES', 'et', + 'fi', 'fr', 'fr-FR', 'he_IL', 'hi-IN', 'hi_IN', 'hu', 'id', 'it', + 'it-IT', 'it_IT', 'iw-IL', 'ja', 'ja-JP', 'kn_IN', 'ko', 'ko-KR', + 'ko_KR', 'lt', 'nb', 'nb_NO', 'nl', 'nl-NL', 'no', 'pl', 'pl-PL', + 'pl_PL', 'pt', 'pt-BR', 'pt-PT', 'pt_BR', 'ro', 'ro_RO', 'ru-RU', + 'ru_RU', 'sv-SE', 'sv_SE', 'te', 'tr', 'tr-TR', 'uk', 'uk_UA', 'vi', + 'vi_VN', 'zh-CN', 'zh_CN', 'zh_TW', + ] + locales = sorted(list(apps['org.fdroid.ci.test.app']['localized'].keys())) + self.assertEqual(correctlocales, locales) + + def javagetsig(self, apkfile): + getsig_dir = 'getsig' + if not os.path.exists(getsig_dir + "/getsig.class"): + logging.critical("getsig.class not found. To fix: cd '%s' && ./make.sh" % getsig_dir) + sys.exit(1) + # FDroidPopen needs some config to work + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + p = FDroidPopen(['java', '-cp', 'getsig', + 'getsig', apkfile]) + sig = None + for line in p.output.splitlines(): + if line.startswith('Result:'): + sig = line[7:].strip() + break + if p.returncode == 0: + return sig + else: + return None + + def testGoodGetsig(self): + # config needed to use jarsigner and keytool + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.update.config = config + apkfile = 'urzip.apk' + sig = self.javagetsig(apkfile) + self.assertIsNotNone(sig, "sig is None") + pysig = fdroidserver.update.getsig(apkfile) + self.assertIsNotNone(pysig, "pysig is None") + self.assertEqual(sig, fdroidserver.update.getsig(apkfile), + "python sig not equal to java sig!") + self.assertEqual(len(sig), len(pysig), + "the length of the two sigs are different!") + try: + self.assertEqual(unhexlify(sig), unhexlify(pysig), + "the length of the two sigs are different!") + except TypeError as e: + print(e) + self.assertTrue(False, 'TypeError!') + + def testBadGetsig(self): + """getsig() should still be able to fetch the fingerprint of bad signatures""" + # config needed to use jarsigner and keytool + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.update.config = config + + apkfile = 'urzip-badsig.apk' + sig = fdroidserver.update.getsig(apkfile) + self.assertEqual(sig, 'e0ecb5fc2d63088e4a07ae410a127722', + "python sig should be: " + str(sig)) + + apkfile = 'urzip-badcert.apk' + sig = fdroidserver.update.getsig(apkfile) + self.assertEqual(sig, 'e0ecb5fc2d63088e4a07ae410a127722', + "python sig should be: " + str(sig)) + + def test_getsig(self): + # config needed to use jarsigner and keytool + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.update.config = config + + sig = fdroidserver.update.getsig('urzip-release-unsigned.apk') + self.assertIsNone(sig) + + good_fingerprint = 'b4964fd759edaa54e65bb476d0276880' + + apkpath = 'urzip-release.apk' # v1 only + sig = fdroidserver.update.getsig(apkpath) + self.assertEqual(good_fingerprint, sig, + 'python sig was: ' + str(sig)) + + apkpath = 'repo/v1.v2.sig_1020.apk' + sig = fdroidserver.update.getsig(apkpath) + self.assertEqual(good_fingerprint, sig, + 'python sig was: ' + str(sig)) + # check that v1 and v2 have the same certificate + try: + import hashlib + from binascii import hexlify + from androguard.core.bytecodes.apk import APK + except ImportError: + print('WARNING: skipping rest of test since androguard is missing!') + return + apkobject = APK(apkpath) + cert_encoded = apkobject.get_certificates_der_v2()[0] + self.assertEqual(good_fingerprint, sig, + hashlib.md5(hexlify(cert_encoded)).hexdigest()) # nosec just used as ID for signing key + + filename = 'v2.only.sig_2.apk' + with zipfile.ZipFile(filename) as z: + self.assertTrue('META-INF/MANIFEST.MF' in z.namelist(), 'META-INF/MANIFEST.MF required') + for f in z.namelist(): + # ensure there are no v1 signature files + self.assertIsNone(fdroidserver.common.SIGNATURE_BLOCK_FILE_REGEX.match(f)) + sig = fdroidserver.update.getsig(filename) + self.assertEqual(good_fingerprint, sig, + "python sig was: " + str(sig)) + + def testScanApksAndObbs(self): + os.chdir(os.path.join(localmodule, 'tests')) + if os.path.basename(os.getcwd()) != 'tests': + raise Exception('This test must be run in the "tests/" subdir') + + config = dict() + fdroidserver.common.fill_config_defaults(config) + config['ndk_paths'] = dict() + config['accepted_formats'] = ['json', 'txt', 'yml'] + fdroidserver.common.config = config + fdroidserver.update.config = config + + fdroidserver.update.options = type('', (), {})() + fdroidserver.update.options.clean = True + fdroidserver.update.options.delete_unknown = True + fdroidserver.update.options.rename_apks = False + fdroidserver.update.options.allow_disabled_algorithms = False + + apps = fdroidserver.metadata.read_metadata(xref=True) + knownapks = fdroidserver.common.KnownApks() + apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks, False) + self.assertEqual(len(apks), 17) + apk = apks[1] + self.assertEqual(apk['packageName'], 'com.politedroid') + self.assertEqual(apk['versionCode'], 3) + self.assertEqual(apk['minSdkVersion'], 3) + self.assertIsNone(apk.get('targetSdkVersion')) + self.assertFalse('maxSdkVersion' in apk) + apk = apks[8] + self.assertEqual(apk['packageName'], 'obb.main.oldversion') + self.assertEqual(apk['versionCode'], 1444412523) + self.assertEqual(apk['minSdkVersion'], 4) + self.assertEqual(apk['targetSdkVersion'], 18) + self.assertFalse('maxSdkVersion' in apk) + + fdroidserver.update.insert_obbs('repo', apps, apks) + for apk in apks: + if apk['packageName'] == 'obb.mainpatch.current': + self.assertEqual(apk.get('obbMainFile'), 'main.1619.obb.mainpatch.current.obb') + self.assertEqual(apk.get('obbPatchFile'), 'patch.1619.obb.mainpatch.current.obb') + elif apk['packageName'] == 'obb.main.oldversion': + self.assertEqual(apk.get('obbMainFile'), 'main.1434483388.obb.main.oldversion.obb') + self.assertIsNone(apk.get('obbPatchFile')) + elif apk['packageName'] == 'obb.main.twoversions': + self.assertIsNone(apk.get('obbPatchFile')) + if apk['versionCode'] == 1101613: + self.assertEqual(apk.get('obbMainFile'), 'main.1101613.obb.main.twoversions.obb') + elif apk['versionCode'] == 1101615: + self.assertEqual(apk.get('obbMainFile'), 'main.1101615.obb.main.twoversions.obb') + elif apk['versionCode'] == 1101617: + self.assertEqual(apk.get('obbMainFile'), 'main.1101615.obb.main.twoversions.obb') + else: + self.assertTrue(False) + elif apk['packageName'] == 'info.guardianproject.urzip': + self.assertIsNone(apk.get('obbMainFile')) + self.assertIsNone(apk.get('obbPatchFile')) + + def test_apkcache_json(self): + """test the migration from pickle to json""" + os.chdir(os.path.join(localmodule, 'tests')) + if os.path.basename(os.getcwd()) != 'tests': + raise Exception('This test must be run in the "tests/" subdir') + + config = dict() + fdroidserver.common.fill_config_defaults(config) + config['ndk_paths'] = dict() + config['accepted_formats'] = ['json', 'txt', 'yml'] + fdroidserver.common.config = config + fdroidserver.update.config = config + + fdroidserver.update.options = type('', (), {})() + fdroidserver.update.options.clean = True + fdroidserver.update.options.delete_unknown = True + fdroidserver.update.options.rename_apks = False + fdroidserver.update.options.allow_disabled_algorithms = False + + fdroidserver.metadata.read_metadata(xref=True) + knownapks = fdroidserver.common.KnownApks() + apkcache = fdroidserver.update.get_cache() + self.assertEqual(2, len(apkcache)) + self.assertEqual(fdroidserver.update.METADATA_VERSION, apkcache["METADATA_VERSION"]) + self.assertEqual(fdroidserver.update.options.allow_disabled_algorithms, + apkcache['allow_disabled_algorithms']) + apks, cachechanged = fdroidserver.update.process_apks(apkcache, 'repo', knownapks, False) + fdroidserver.update.write_cache(apkcache) + + fdroidserver.update.options.clean = False + read_from_json = fdroidserver.update.get_cache() + self.assertEqual(19, len(read_from_json)) + for f in glob.glob('repo/*.apk'): + self.assertTrue(os.path.basename(f) in read_from_json) + + fdroidserver.update.options.clean = True + reset = fdroidserver.update.get_cache() + self.assertEqual(2, len(reset)) + + def test_scan_apk(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + os.chdir(os.path.join(localmodule, 'tests')) + if os.path.basename(os.getcwd()) != 'tests': + raise Exception('This test must be run in the "tests/" subdir') + + try: + config['aapt'] = fdroidserver.common.find_sdk_tools_cmd('aapt') + except fdroidserver.exception.FDroidException: + pass # aapt is not required if androguard is present + + for use_androguard in (True, False): + if use_androguard: + try: + import androguard + androguard + + def func(): + return True + fdroidserver.common.use_androguard = func + except ImportError: + continue + else: + if 'aapt' in config: + def func(): + return False + fdroidserver.common.use_androguard = func + else: + continue + + print('USE_ANDROGUARD', use_androguard) + + try: + apksigner = fdroidserver.common.find_sdk_tools_cmd('apksigner') + if use_androguard and apksigner: # v2 parsing needs both + config['apksigner'] = apksigner + apk_info = fdroidserver.update.scan_apk('v2.only.sig_2.apk') + self.assertIsNone(apk_info.get('maxSdkVersion')) + self.assertEqual(apk_info.get('versionName'), 'v2-only') + self.assertEqual(apk_info.get('versionCode'), 2) + except fdroidserver.exception.FDroidException: + print('WARNING: skipping v2-only test since apksigner cannot be found') + + apk_info = fdroidserver.update.scan_apk('repo/v1.v2.sig_1020.apk') + self.assertIsNone(apk_info.get('maxSdkVersion')) + self.assertEqual(apk_info.get('versionName'), 'v1+2') + self.assertEqual(apk_info.get('versionCode'), 1020) + + apk_info = fdroidserver.update.scan_apk('repo/souch.smsbypass_9.apk') + self.assertIsNone(apk_info.get('maxSdkVersion')) + self.assertEqual(apk_info.get('versionName'), '0.9') + + apk_info = fdroidserver.update.scan_apk('repo/duplicate.permisssions_9999999.apk') + self.assertEqual(apk_info.get('versionName'), '') + self.assertEqual(apk_info['icons_src'], {'160': 'res/drawable/ic_launcher.png', + '-1': 'res/drawable/ic_launcher.png'}) + + apk_info = fdroidserver.update.scan_apk('org.dyndns.fules.ck_20.apk') + self.assertEqual(apk_info['icons_src'], {'240': 'res/drawable-hdpi-v4/icon_launcher.png', + '120': 'res/drawable-ldpi-v4/icon_launcher.png', + '160': 'res/drawable-mdpi-v4/icon_launcher.png', + '-1': 'res/drawable-mdpi-v4/icon_launcher.png'}) + self.assertEqual(apk_info['icons'], {}) + self.assertEqual(apk_info['features'], []) + self.assertEqual(apk_info['antiFeatures'], set()) + self.assertEqual(apk_info['versionName'], 'v1.6pre2') + self.assertEqual(apk_info['hash'], + '897486e1f857c6c0ee32ccbad0e1b8cd82f6d0e65a44a23f13f852d2b63a18c8') + self.assertEqual(apk_info['packageName'], 'org.dyndns.fules.ck') + self.assertEqual(apk_info['versionCode'], 20) + self.assertEqual(apk_info['size'], 132453) + self.assertEqual(apk_info['nativecode'], + ['arm64-v8a', 'armeabi', 'armeabi-v7a', 'mips', 'mips64', 'x86', 'x86_64']) + self.assertEqual(apk_info['minSdkVersion'], 7) + self.assertEqual(apk_info['sig'], '9bf7a6a67f95688daec75eab4b1436ac') + self.assertEqual(apk_info['hashType'], 'sha256') + self.assertEqual(apk_info['targetSdkVersion'], 8) + + apk_info = fdroidserver.update.scan_apk('org.bitbucket.tickytacky.mirrormirror_4.apk') + self.assertEqual(apk_info.get('versionName'), '1.0.3') + self.assertEqual(apk_info['icons_src'], {'160': 'res/drawable-mdpi/mirror.png', + '-1': 'res/drawable-mdpi/mirror.png'}) + + apk_info = fdroidserver.update.scan_apk('repo/info.zwanenburg.caffeinetile_4.apk') + self.assertEqual(apk_info.get('versionName'), '1.3') + self.assertEqual(apk_info['icons_src'], {'160': 'res/drawable/ic_coffee_on.xml', + '-1': 'res/drawable/ic_coffee_on.xml'}) + + apk_info = fdroidserver.update.scan_apk('repo/com.politedroid_6.apk') + self.assertEqual(apk_info.get('versionName'), '1.5') + self.assertEqual(apk_info['icons_src'], {'120': 'res/drawable-ldpi-v4/icon.png', + '160': 'res/drawable-mdpi-v4/icon.png', + '240': 'res/drawable-hdpi-v4/icon.png', + '320': 'res/drawable-xhdpi-v4/icon.png', + '-1': 'res/drawable-mdpi-v4/icon.png'}) + + apk_info = fdroidserver.update.scan_apk('SpeedoMeterApp.main_1.apk') + self.assertEqual(apk_info.get('versionName'), '1.0') + self.assertEqual(apk_info['icons_src'], {}) + + def test_scan_apk_no_min_target(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + apk_info = fdroidserver.update.scan_apk('repo/no.min.target.sdk_987.apk') + self.maxDiff = None + self.assertDictEqual(apk_info, { + 'icons': {}, + 'icons_src': {'-1': 'res/drawable/ic_launcher.png', + '160': 'res/drawable/ic_launcher.png'}, + 'name': 'No minSdkVersion or targetSdkVersion', + 'signer': '32a23624c201b949f085996ba5ed53d40f703aca4989476949cae891022e0ed6', + 'hashType': 'sha256', + 'packageName': 'no.min.target.sdk', + 'features': [], + 'antiFeatures': set(), + 'size': 14102, + 'sig': 'b4964fd759edaa54e65bb476d0276880', + 'versionName': '1.2-fake', + 'uses-permission-sdk-23': [], + 'hash': 'e2e1dc1d550df2b5bc383860139207258645b5540abeccd305ed8b2cb6459d2c', + 'versionCode': 987, + 'minSdkVersion': 3, + 'uses-permission': [ + fdroidserver.update.UsesPermission(name='android.permission.WRITE_EXTERNAL_STORAGE', + maxSdkVersion=None), + fdroidserver.update.UsesPermission(name='android.permission.READ_PHONE_STATE', + maxSdkVersion=None), + fdroidserver.update.UsesPermission(name='android.permission.READ_EXTERNAL_STORAGE', + maxSdkVersion=None)]}) + + def test_scan_apk_no_sig(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.update.config = config + os.chdir(os.path.join(localmodule, 'tests')) + if os.path.basename(os.getcwd()) != 'tests': + raise Exception('This test must be run in the "tests/" subdir') + + with self.assertRaises(fdroidserver.exception.BuildException): + fdroidserver.update.scan_apk('urzip-release-unsigned.apk') + + def test_process_apk(self): + + def _build_yaml_representer(dumper, data): + '''Creates a YAML representation of a Build instance''' + return dumper.represent_dict(data) + + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.update.config = config + os.chdir(os.path.join(localmodule, 'tests')) + if os.path.basename(os.getcwd()) != 'tests': + raise Exception('This test must be run in the "tests/" subdir') + + config['ndk_paths'] = dict() + config['accepted_formats'] = ['json', 'txt', 'yml'] + fdroidserver.common.config = config + fdroidserver.update.config = config + + fdroidserver.update.options = type('', (), {})() + fdroidserver.update.options.clean = True + fdroidserver.update.options.rename_apks = False + fdroidserver.update.options.delete_unknown = True + fdroidserver.update.options.allow_disabled_algorithms = False + + for icon_dir in fdroidserver.update.get_all_icon_dirs('repo'): + if not os.path.exists(icon_dir): + os.makedirs(icon_dir) + + knownapks = fdroidserver.common.KnownApks() + apkList = ['../urzip.apk', '../org.dyndns.fules.ck_20.apk'] + + for apkName in apkList: + _, apk, cachechanged = fdroidserver.update.process_apk({}, apkName, 'repo', knownapks, + False) + # Don't care about the date added to the repo and relative apkName + del apk['added'] + del apk['apkName'] + # avoid AAPT application name bug + del apk['name'] + + # ensure that icons have been extracted properly + if apkName == '../urzip.apk': + self.assertEqual(apk['icon'], 'info.guardianproject.urzip.100.png') + if apkName == '../org.dyndns.fules.ck_20.apk': + self.assertEqual(apk['icon'], 'org.dyndns.fules.ck.20.png') + for density in fdroidserver.update.screen_densities: + icon_path = os.path.join(fdroidserver.update.get_icon_dir('repo', density), + apk['icon']) + self.assertTrue(os.path.isfile(icon_path)) + self.assertTrue(os.path.getsize(icon_path) > 1) + + savepath = os.path.join('metadata', 'apk', apk['packageName'] + '.yaml') + # Uncomment to save APK metadata + # with open(savepath, 'w') as f: + # yaml.add_representer(fdroidserver.metadata.Build, _build_yaml_representer) + # yaml.dump(apk, f, default_flow_style=False) + + with open(savepath, 'r') as f: + from_yaml = yaml.load(f) + self.maxDiff = None + self.assertEqual(apk, from_yaml) + + def test_process_apk_signed_by_disabled_algorithms(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.update.config = config + + config['ndk_paths'] = dict() + config['accepted_formats'] = ['json', 'txt', 'yml'] + fdroidserver.common.config = config + fdroidserver.update.config = config + + fdroidserver.update.options = type('', (), {})() + fdroidserver.update.options.clean = True + fdroidserver.update.options.verbose = True + fdroidserver.update.options.rename_apks = False + fdroidserver.update.options.delete_unknown = True + fdroidserver.update.options.allow_disabled_algorithms = False + + knownapks = fdroidserver.common.KnownApks() + + tmptestsdir = tempfile.mkdtemp(prefix=inspect.currentframe().f_code.co_name, + dir=self.tmpdir) + print('tmptestsdir', tmptestsdir) + os.chdir(tmptestsdir) + os.mkdir('repo') + os.mkdir('archive') + # setup the repo, create icons dirs, etc. + fdroidserver.update.process_apks({}, 'repo', knownapks) + fdroidserver.update.process_apks({}, 'archive', knownapks) + + disabledsigs = ['org.bitbucket.tickytacky.mirrormirror_2.apk', ] + for apkName in disabledsigs: + shutil.copy(os.path.join(self.basedir, apkName), + os.path.join(tmptestsdir, 'repo')) + + skip, apk, cachechanged = fdroidserver.update.process_apk({}, apkName, 'repo', + knownapks, + allow_disabled_algorithms=True, + archive_bad_sig=False) + self.assertFalse(skip) + self.assertIsNotNone(apk) + self.assertTrue(cachechanged) + self.assertFalse(os.path.exists(os.path.join('archive', apkName))) + self.assertTrue(os.path.exists(os.path.join('repo', apkName))) + + if os.path.exists('/usr/bin/apksigner') or 'apksigner' in config: + print('SKIPPING: apksigner installed and it allows MD5 signatures') + return + + javac = config['jarsigner'].replace('jarsigner', 'javac') + v = subprocess.check_output([javac, '-version'], stderr=subprocess.STDOUT)[6:-1].decode('utf-8') + if LooseVersion(v) < LooseVersion('1.8.0_132'): + print('SKIPPING: running tests with old Java (' + v + ')') + return + + # this test only works on systems with fully updated Java/jarsigner + # that has MD5 listed in jdk.jar.disabledAlgorithms in java.security + # https://blogs.oracle.com/java-platform-group/oracle-jre-will-no-longer-trust-md5-signed-code-by-default + skip, apk, cachechanged = fdroidserver.update.process_apk({}, apkName, 'repo', + knownapks, + allow_disabled_algorithms=False, + archive_bad_sig=True) + self.assertTrue(skip) + self.assertIsNone(apk) + self.assertFalse(cachechanged) + self.assertTrue(os.path.exists(os.path.join('archive', apkName))) + self.assertFalse(os.path.exists(os.path.join('repo', apkName))) + + skip, apk, cachechanged = fdroidserver.update.process_apk({}, apkName, 'archive', + knownapks, + allow_disabled_algorithms=False, + archive_bad_sig=False) + self.assertFalse(skip) + self.assertIsNotNone(apk) + self.assertTrue(cachechanged) + self.assertTrue(os.path.exists(os.path.join('archive', apkName))) + self.assertFalse(os.path.exists(os.path.join('repo', apkName))) + + # ensure that icons have been moved to the archive as well + for density in fdroidserver.update.screen_densities: + icon_path = os.path.join(fdroidserver.update.get_icon_dir('archive', density), + apk['icon']) + self.assertTrue(os.path.isfile(icon_path)) + self.assertTrue(os.path.getsize(icon_path) > 1) + + badsigs = ['urzip-badcert.apk', 'urzip-badsig.apk', 'urzip-release-unsigned.apk', ] + for apkName in badsigs: + shutil.copy(os.path.join(self.basedir, apkName), + os.path.join(tmptestsdir, 'repo')) + + skip, apk, cachechanged = fdroidserver.update.process_apk({}, apkName, 'repo', + knownapks, + allow_disabled_algorithms=False, + archive_bad_sig=False) + self.assertTrue(skip) + self.assertIsNone(apk) + self.assertFalse(cachechanged) + + def test_process_invalid_apk(self): + os.chdir(os.path.join(localmodule, 'tests')) + if os.path.basename(os.getcwd()) != 'tests': + raise Exception('This test must be run in the "tests/" subdir') + + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + fdroidserver.update.options.delete_unknown = False + + knownapks = fdroidserver.common.KnownApks() + apk = 'fake.ota.update_1234.zip' # this is not an APK, scanning should fail + (skip, apk, cachechanged) = fdroidserver.update.process_apk({}, apk, 'repo', knownapks, + False) + + self.assertTrue(skip) + self.assertIsNone(apk) + self.assertFalse(cachechanged) + + def test_translate_per_build_anti_features(self): + os.chdir(os.path.join(localmodule, 'tests')) + if os.path.basename(os.getcwd()) != 'tests': + raise Exception('This test must be run in the "tests/" subdir') + + config = dict() + fdroidserver.common.fill_config_defaults(config) + config['ndk_paths'] = dict() + config['accepted_formats'] = ['json', 'txt', 'yml'] + fdroidserver.common.config = config + fdroidserver.update.config = config + + fdroidserver.update.options = type('', (), {})() + fdroidserver.update.options.clean = True + fdroidserver.update.options.delete_unknown = True + fdroidserver.update.options.rename_apks = False + fdroidserver.update.options.allow_disabled_algorithms = False + + apps = fdroidserver.metadata.read_metadata(xref=True) + knownapks = fdroidserver.common.KnownApks() + apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks, False) + fdroidserver.update.translate_per_build_anti_features(apps, apks) + self.assertEqual(len(apks), 17) + foundtest = False + for apk in apks: + if apk['packageName'] == 'com.politedroid' and apk['versionCode'] == 3: + antiFeatures = apk.get('antiFeatures') + self.assertTrue('KnownVuln' in antiFeatures) + self.assertEqual(3, len(antiFeatures)) + foundtest = True + self.assertTrue(foundtest) + + def test_create_metadata_from_template(self): + tmptestsdir = tempfile.mkdtemp(prefix=inspect.currentframe().f_code.co_name, + dir=self.tmpdir) + print('tmptestsdir', tmptestsdir) + os.chdir(tmptestsdir) + os.mkdir('repo') + os.mkdir('metadata') + shutil.copy(os.path.join(localmodule, 'tests', 'urzip.apk'), 'repo') + + config = dict() + fdroidserver.common.fill_config_defaults(config) + config['ndk_paths'] = dict() + config['accepted_formats'] = ['json', 'txt', 'yml'] + fdroidserver.common.config = config + fdroidserver.update.config = config + + fdroidserver.update.options = type('', (), {})() + fdroidserver.update.options.clean = True + fdroidserver.update.options.delete_unknown = False + fdroidserver.update.options.rename_apks = False + fdroidserver.update.options.allow_disabled_algorithms = False + + knownapks = fdroidserver.common.KnownApks() + apks, cachechanged = fdroidserver.update.process_apks({}, 'repo', knownapks, False) + self.assertEqual(1, len(apks)) + apk = apks[0] + + testfile = 'metadata/info.guardianproject.urzip.yml' + # create empty 0 byte .yml file, run read_metadata, it should work + open(testfile, 'a').close() + apps = fdroidserver.metadata.read_metadata(xref=True) + self.assertEqual(1, len(apps)) + os.remove(testfile) + + # test using internal template + apps = fdroidserver.metadata.read_metadata(xref=True) + self.assertEqual(0, len(apps)) + fdroidserver.update.create_metadata_from_template(apk) + self.assertTrue(os.path.exists(testfile)) + apps = fdroidserver.metadata.read_metadata(xref=True) + self.assertEqual(1, len(apps)) + for app in apps.values(): + self.assertEqual('urzip', app['Name']) + self.assertEqual(1, len(app['Categories'])) + break + + # test using external template.yml + os.remove(testfile) + self.assertFalse(os.path.exists(testfile)) + shutil.copy(os.path.join(localmodule, 'examples', 'template.yml'), tmptestsdir) + fdroidserver.update.create_metadata_from_template(apk) + self.assertTrue(os.path.exists(testfile)) + apps = fdroidserver.metadata.read_metadata(xref=True) + self.assertEqual(1, len(apps)) + for app in apps.values(): + self.assertEqual('urzip', app['Name']) + self.assertEqual(1, len(app['Categories'])) + self.assertEqual('Internet', app['Categories'][0]) + break + with open(testfile) as fp: + data = yaml.load(fp) + self.assertEqual('urzip', data['Name']) + self.assertEqual('urzip', data['Summary']) + + def test_has_known_vulnerability(self): + good = [ + 'org.bitbucket.tickytacky.mirrormirror_1.apk', + 'org.bitbucket.tickytacky.mirrormirror_2.apk', + 'org.bitbucket.tickytacky.mirrormirror_3.apk', + 'org.bitbucket.tickytacky.mirrormirror_4.apk', + 'org.dyndns.fules.ck_20.apk', + 'urzip.apk', + 'urzip-badcert.apk', + 'urzip-badsig.apk', + 'urzip-release.apk', + 'urzip-release-unsigned.apk', + 'repo/com.politedroid_3.apk', + 'repo/com.politedroid_4.apk', + 'repo/com.politedroid_5.apk', + 'repo/com.politedroid_6.apk', + 'repo/obb.main.oldversion_1444412523.apk', + 'repo/obb.mainpatch.current_1619_another-release-key.apk', + 'repo/obb.mainpatch.current_1619.apk', + 'repo/obb.main.twoversions_1101613.apk', + 'repo/obb.main.twoversions_1101615.apk', + 'repo/obb.main.twoversions_1101617.apk', + 'repo/urzip-; Рахма́, [rɐxˈmanʲɪnəf] سيرجي_رخمانينوف 谢·.apk', + ] + for f in good: + self.assertFalse(fdroidserver.update.has_known_vulnerability(f)) + with self.assertRaises(fdroidserver.exception.FDroidException): + fdroidserver.update.has_known_vulnerability('janus.apk') + + def test_get_apk_icon_when_src_is_none(self): + config = dict() + fdroidserver.common.fill_config_defaults(config) + fdroidserver.common.config = config + fdroidserver.update.config = config + + # pylint: disable=protected-access + icons_src = fdroidserver.update._get_apk_icons_src('urzip-release.apk', None) + assert icons_src == {} + + +if __name__ == "__main__": + os.chdir(os.path.dirname(__file__)) + + parser = optparse.OptionParser() + parser.add_option("-v", "--verbose", action="store_true", default=False, + help="Spew out even more information than normal") + (fdroidserver.common.options, args) = parser.parse_args(['--verbose']) + + newSuite = unittest.TestSuite() + newSuite.addTest(unittest.makeSuite(UpdateTest)) + unittest.main(failfast=False) diff --git a/tests/valid-package-names/RandomPackageNames.java b/tests/valid-package-names/RandomPackageNames.java index 80257fd8..82d77bbe 100644 --- a/tests/valid-package-names/RandomPackageNames.java +++ b/tests/valid-package-names/RandomPackageNames.java @@ -115,7 +115,7 @@ public class RandomPackageNames { for (File f : new File("/home/hans/code/fdroid/fdroiddata/metadata").listFiles()) { String name = f.getName(); - if (name.endsWith(".yml")) { + if (name.endsWith(".yml") || name.endsWith(".txt")) { compare(name.substring(0, name.length() - 4)); } }