mirror of
https://github.com/f-droid/fdroidserver.git
synced 2025-11-03 06:00:28 +03:00
Remove libcloud and s3cmd from fdroidserver
This commit is contained in:
parent
a9856cfb92
commit
dbd769db9f
9 changed files with 417 additions and 819 deletions
|
|
@ -186,6 +186,42 @@ ubuntu_lts_ppa:
|
|||
- ./run-tests
|
||||
|
||||
|
||||
# Test to see how rclone works with S3
|
||||
test_deploy_to_s3_with_rclone:
|
||||
image: debian:bookworm-slim
|
||||
<<: *apt-template
|
||||
services:
|
||||
- name: docker:dind
|
||||
command: ["--tls=false"]
|
||||
variables:
|
||||
DOCKER_HOST: "tcp://docker:2375"
|
||||
DOCKER_DRIVER: overlay2
|
||||
DOCKER_TLS_CERTDIR: ""
|
||||
before_script:
|
||||
# ensure minio is up before executing tests
|
||||
- apt-get update
|
||||
- apt-get install -y
|
||||
androguard
|
||||
apksigner
|
||||
curl
|
||||
docker.io
|
||||
git
|
||||
python3-venv
|
||||
rclone
|
||||
- python3 -m venv --system-site-packages test-venv
|
||||
- . test-venv/bin/activate
|
||||
- pip install testcontainers[minio]
|
||||
- pip install .
|
||||
script:
|
||||
- python3 -m unittest -k test_update_remote_storage_with_rclone --verbose
|
||||
rules:
|
||||
- changes:
|
||||
- .gitlab-ci.yml
|
||||
- fdroidserver/deploy.py
|
||||
- tests/test_deploy.py
|
||||
- tests/test_integration.py
|
||||
|
||||
|
||||
# Test using Ubuntu/jammy LTS (supported til April, 2027) with depends
|
||||
# from pypi and sdkmanager. The venv is used to isolate the dist
|
||||
# tarball generation environment from the clean install environment.
|
||||
|
|
|
|||
|
|
@ -4,6 +4,13 @@ All notable changes to this project will be documented in this file.
|
|||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
|
||||
|
||||
## [2.5.0] - NEXT
|
||||
|
||||
### Removed
|
||||
|
||||
* deploy: `awsaccesskeyid:` and `awssecretkey:` config items removed, use the
|
||||
standard env vars: `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`.
|
||||
|
||||
## [2.4.2] - 2025-06-24
|
||||
|
||||
### Fixed
|
||||
|
|
|
|||
|
|
@ -305,70 +305,33 @@
|
|||
#
|
||||
# sync_from_local_copy_dir: true
|
||||
|
||||
# To deploy to an AWS S3 "bucket" in the US East region, set the
|
||||
# bucket name in the config, then set the environment variables
|
||||
# AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY using the values from
|
||||
# the AWS Management Console. See
|
||||
# https://rclone.org/s3/#authentication
|
||||
#
|
||||
# awsbucket: myawsfdroidbucket
|
||||
|
||||
# To upload the repo to an Amazon S3 bucket using `fdroid deploy'
|
||||
# . rclone, s3cmd and apache libcloud are the available options.
|
||||
# If rclone and s3cmd are not installed, apache libcloud is used.
|
||||
# To use apache libcloud, add the following options to this file
|
||||
# (config.yml)
|
||||
|
||||
# For extended options for syncing to cloud drive and object store
|
||||
# services, `fdroid deploy' wraps Rclone. Rclone is a full featured
|
||||
# sync tool for a huge variety of cloud services. Set up your services
|
||||
# using `rclone config`, then specify each config name to deploy the
|
||||
# awsbucket: to. Using rclone_config: overrides the default AWS S3 US
|
||||
# East setup, and will only sync to the services actually specified.
|
||||
#
|
||||
# awsbucket: myawsfdroid
|
||||
# awsaccesskeyid: SEE0CHAITHEIMAUR2USA
|
||||
# awssecretkey: {env: awssecretkey}
|
||||
#
|
||||
# In case s3cmd is installed and rclone is not installed,
|
||||
# s3cmd will be the preferred sync option.
|
||||
# It will delete and recreate the whole fdroid directory each time.
|
||||
# To customize how s3cmd interacts with the cloud
|
||||
# provider, create a 's3cfg' file next to this file (config.yml), and
|
||||
# those settings will be used instead of any 'aws' variable below.
|
||||
# Secrets can be fetched from environment variables to ensure that
|
||||
# they are not leaked as part of this file.
|
||||
#
|
||||
# awsbucket: myawsfdroid
|
||||
# awsaccesskeyid: SEE0CHAITHEIMAUR2USA
|
||||
# awssecretkey: {env: awssecretkey}
|
||||
#
|
||||
# In case rclone is installed and s3cmd is not installed,
|
||||
# rclone will be the preferred sync option.
|
||||
# It will sync the local folders with remote folders without
|
||||
# deleting anything in one go.
|
||||
# To ensure success, install rclone as per
|
||||
# the instructions at https://rclone.org/install/ and also configure for
|
||||
# object storage services as detailed at https://rclone.org/s3/#configuration
|
||||
# By default rclone uses the configuration file at ~/.config/rclone/rclone.conf
|
||||
# To specify a custom configuration file, please add the full path to the
|
||||
# configuration file as below
|
||||
# awsbucket: myawsfdroidbucket
|
||||
# rclone_config:
|
||||
# - aws-sample-config
|
||||
# - rclone-supported-service-config
|
||||
|
||||
|
||||
# By default Rclone uses the user's default configuration file at
|
||||
# ~/.config/rclone/rclone.conf To specify a custom configuration file,
|
||||
# please add the full path to the configuration file as below.
|
||||
#
|
||||
# path_to_custom_rclone_config: /home/mycomputer/somedir/example.conf
|
||||
#
|
||||
# This setting will ignore the default rclone config found at
|
||||
# ~/.config/rclone/rclone.conf
|
||||
#
|
||||
# Please note that rclone_config can be assigned a string or list
|
||||
#
|
||||
# awsbucket: myawsfdroid
|
||||
# rclone_config: aws-sample-config
|
||||
#
|
||||
# or
|
||||
#
|
||||
# awsbucket: myawsfdroid
|
||||
# rclone_config: [aws-sample-config, rclone-supported-service-config]
|
||||
#
|
||||
# In case both rclone and s3cmd are installed, the preferred sync
|
||||
# tool can be specified in this file (config.yml)
|
||||
# if s3cmd is preferred, set it as below
|
||||
#
|
||||
# s3cmd: true
|
||||
#
|
||||
# if rclone is preferred, set it as below
|
||||
#
|
||||
# rclone: true
|
||||
#
|
||||
# Please note that only one can be set to true at any time
|
||||
# Also, in the event that both s3cmd and rclone are installed
|
||||
# and both are missing from the config.yml file, the preferred
|
||||
# tool will be s3cmd.
|
||||
|
||||
|
||||
# If you want to force 'fdroid server' to use a non-standard serverwebroot.
|
||||
|
|
|
|||
|
|
@ -691,10 +691,7 @@ def read_config():
|
|||
for configname in confignames_to_delete:
|
||||
del config[configname]
|
||||
|
||||
if any(
|
||||
k in config and config.get(k)
|
||||
for k in ["awssecretkey", "keystorepass", "keypass"]
|
||||
):
|
||||
if any(k in config and config.get(k) for k in ["keystorepass", "keypass"]):
|
||||
st = os.stat(CONFIG_FILE)
|
||||
if st.st_mode & stat.S_IRWXG or st.st_mode & stat.S_IRWXO:
|
||||
logging.warning(
|
||||
|
|
|
|||
|
|
@ -16,8 +16,8 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import configparser
|
||||
import glob
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
|
@ -47,9 +47,6 @@ GIT_BRANCH = 'master'
|
|||
|
||||
BINARY_TRANSPARENCY_DIR = 'binary_transparency'
|
||||
|
||||
AUTO_S3CFG = '.fdroid-deploy-s3cfg'
|
||||
USER_S3CFG = 's3cfg'
|
||||
USER_RCLONE_CONF = None
|
||||
REMOTE_HOSTNAME_REGEX = re.compile(r'\W*\w+\W+(\w+).*')
|
||||
|
||||
|
||||
|
|
@ -98,356 +95,145 @@ def _remove_missing_files(files: List[str]) -> List[str]:
|
|||
return existing
|
||||
|
||||
|
||||
def _generate_rclone_include_pattern(files):
|
||||
"""Generate a pattern for rclone's --include flag (https://rclone.org/filtering/)."""
|
||||
return "{" + ",".join(sorted(set(files))) + "}"
|
||||
|
||||
|
||||
def update_awsbucket(repo_section, is_index_only=False, verbose=False, quiet=False):
|
||||
"""Upload the contents of the directory `repo_section` (including subdirectories) to the AWS S3 "bucket".
|
||||
"""Sync the directory `repo_section` (including subdirectories) to AWS S3 US East.
|
||||
|
||||
The contents of that subdir of the
|
||||
bucket will first be deleted.
|
||||
This is a shim function for public API compatibility.
|
||||
|
||||
Requires AWS credentials set as environment variables:
|
||||
https://rclone.org/s3/#authentication
|
||||
|
||||
Requires AWS credentials set in config.yml: awsaccesskeyid, awssecretkey
|
||||
"""
|
||||
logging.debug(
|
||||
f'''Syncing "{repo_section}" to Amazon S3 bucket "{config['awsbucket']}"'''
|
||||
)
|
||||
|
||||
if common.set_command_in_config('s3cmd') and common.set_command_in_config('rclone'):
|
||||
logging.info(
|
||||
'Both rclone and s3cmd are installed. Checking config.yml for preference.'
|
||||
)
|
||||
if config['s3cmd'] is not True and config['rclone'] is not True:
|
||||
logging.warning(
|
||||
'No syncing tool set in config.yml!. Defaulting to using s3cmd'
|
||||
)
|
||||
update_awsbucket_s3cmd(repo_section, is_index_only)
|
||||
if config['s3cmd'] is True and config['rclone'] is True:
|
||||
logging.warning(
|
||||
'Both syncing tools set in config.yml!. Defaulting to using s3cmd'
|
||||
)
|
||||
update_awsbucket_s3cmd(repo_section, is_index_only)
|
||||
if config['s3cmd'] is True and config['rclone'] is not True:
|
||||
update_awsbucket_s3cmd(repo_section, is_index_only)
|
||||
if config['rclone'] is True and config['s3cmd'] is not True:
|
||||
update_remote_storage_with_rclone(
|
||||
repo_section, is_index_only, verbose, quiet
|
||||
)
|
||||
|
||||
elif common.set_command_in_config('s3cmd'):
|
||||
update_awsbucket_s3cmd(repo_section, is_index_only)
|
||||
elif common.set_command_in_config('rclone'):
|
||||
update_remote_storage_with_rclone(repo_section, is_index_only, verbose, quiet)
|
||||
else:
|
||||
update_awsbucket_libcloud(repo_section, is_index_only)
|
||||
|
||||
|
||||
def update_awsbucket_s3cmd(repo_section, is_index_only=False):
|
||||
"""Upload using the CLI tool s3cmd, which provides rsync-like sync.
|
||||
|
||||
The upload is done in multiple passes to reduce the chance of
|
||||
interfering with an existing client-server interaction. In the
|
||||
first pass, only new files are uploaded. In the second pass,
|
||||
changed files are uploaded, overwriting what is on the server. On
|
||||
the third/last pass, the indexes are uploaded, and any removed
|
||||
files are deleted from the server. The last pass is the only pass
|
||||
to use a full MD5 checksum of all files to detect changes.
|
||||
"""
|
||||
logging.debug(_('Using s3cmd to sync with: {url}').format(url=config['awsbucket']))
|
||||
|
||||
if os.path.exists(USER_S3CFG):
|
||||
logging.info(_('Using "{path}" for configuring s3cmd.').format(path=USER_S3CFG))
|
||||
configfilename = USER_S3CFG
|
||||
else:
|
||||
fd = os.open(AUTO_S3CFG, os.O_CREAT | os.O_TRUNC | os.O_WRONLY, 0o600)
|
||||
logging.debug(
|
||||
_('Creating "{path}" for configuring s3cmd.').format(path=AUTO_S3CFG)
|
||||
)
|
||||
os.write(fd, '[default]\n'.encode('utf-8'))
|
||||
os.write(
|
||||
fd, ('access_key = ' + config['awsaccesskeyid'] + '\n').encode('utf-8')
|
||||
)
|
||||
os.write(fd, ('secret_key = ' + config['awssecretkey'] + '\n').encode('utf-8'))
|
||||
os.close(fd)
|
||||
configfilename = AUTO_S3CFG
|
||||
|
||||
s3bucketurl = 's3://' + config['awsbucket']
|
||||
s3cmd = [config['s3cmd'], '--config=' + configfilename]
|
||||
if subprocess.call(s3cmd + ['info', s3bucketurl]) != 0:
|
||||
logging.warning(_('Creating new S3 bucket: {url}').format(url=s3bucketurl))
|
||||
if subprocess.call(s3cmd + ['mb', s3bucketurl]) != 0:
|
||||
logging.error(
|
||||
_('Failed to create S3 bucket: {url}').format(url=s3bucketurl)
|
||||
)
|
||||
raise FDroidException()
|
||||
|
||||
s3cmd_sync = s3cmd + ['sync', '--acl-public']
|
||||
options = common.get_options()
|
||||
if options and options.verbose:
|
||||
s3cmd_sync += ['--verbose']
|
||||
if options and options.quiet:
|
||||
s3cmd_sync += ['--quiet']
|
||||
|
||||
s3url = s3bucketurl + '/fdroid/'
|
||||
|
||||
logging.debug(
|
||||
_('s3cmd sync indexes {path} to {url} and delete').format(
|
||||
path=repo_section, url=s3url
|
||||
)
|
||||
)
|
||||
|
||||
if is_index_only:
|
||||
logging.debug(
|
||||
_('s3cmd syncs indexes from {path} to {url} and deletes removed').format(
|
||||
path=repo_section, url=s3url
|
||||
)
|
||||
)
|
||||
sync_indexes_flags = []
|
||||
sync_indexes_flags.extend(_get_index_includes(repo_section))
|
||||
sync_indexes_flags.append('--delete-removed')
|
||||
sync_indexes_flags.append('--delete-after')
|
||||
if options.no_checksum:
|
||||
sync_indexes_flags.append('--no-check-md5')
|
||||
else:
|
||||
sync_indexes_flags.append('--check-md5')
|
||||
returncode = subprocess.call(
|
||||
s3cmd_sync + sync_indexes_flags + [repo_section, s3url]
|
||||
)
|
||||
if returncode != 0:
|
||||
raise FDroidException()
|
||||
else:
|
||||
logging.debug('s3cmd sync new files in ' + repo_section + ' to ' + s3url)
|
||||
logging.debug(_('Running first pass with MD5 checking disabled'))
|
||||
excludes = _get_index_excludes(repo_section)
|
||||
returncode = subprocess.call(
|
||||
s3cmd_sync
|
||||
+ excludes
|
||||
+ ['--no-check-md5', '--skip-existing', repo_section, s3url]
|
||||
)
|
||||
if returncode != 0:
|
||||
raise FDroidException()
|
||||
logging.debug('s3cmd sync all files in ' + repo_section + ' to ' + s3url)
|
||||
returncode = subprocess.call(
|
||||
s3cmd_sync + excludes + ['--no-check-md5', repo_section, s3url]
|
||||
)
|
||||
if returncode != 0:
|
||||
raise FDroidException()
|
||||
|
||||
logging.debug(
|
||||
_('s3cmd sync indexes {path} to {url} and delete').format(
|
||||
path=repo_section, url=s3url
|
||||
)
|
||||
)
|
||||
s3cmd_sync.append('--delete-removed')
|
||||
s3cmd_sync.append('--delete-after')
|
||||
if options.no_checksum:
|
||||
s3cmd_sync.append('--no-check-md5')
|
||||
else:
|
||||
s3cmd_sync.append('--check-md5')
|
||||
if subprocess.call(s3cmd_sync + [repo_section, s3url]) != 0:
|
||||
raise FDroidException()
|
||||
update_remote_storage_with_rclone(repo_section, is_index_only, verbose, quiet)
|
||||
|
||||
|
||||
def update_remote_storage_with_rclone(
|
||||
repo_section, is_index_only=False, verbose=False, quiet=False
|
||||
repo_section, awsbucket, is_index_only=False, verbose=False, quiet=False
|
||||
):
|
||||
"""
|
||||
Upload fdroid repo folder to remote storage using rclone sync.
|
||||
"""Sync the directory `repo_section` (including subdirectories) to configed cloud services.
|
||||
|
||||
Rclone sync can send the files to any supported remote storage
|
||||
service once without numerous polling.
|
||||
If remote storage is s3 e.g aws s3, wasabi, filebase then path will be
|
||||
bucket_name/fdroid/repo where bucket_name will be an s3 bucket
|
||||
If remote storage is storage drive/sftp e.g google drive, rsync.net
|
||||
the new path will be bucket_name/fdroid/repo where bucket_name
|
||||
will be a folder
|
||||
service once without numerous polling. If remote storage is S3 e.g
|
||||
AWS S3, Wasabi, Filebase, etc, then path will be
|
||||
bucket_name/fdroid/repo where bucket_name will be an S3 bucket. If
|
||||
remote storage is storage drive/sftp e.g google drive, rsync.net the
|
||||
new path will be bucket_name/fdroid/repo where bucket_name will be a
|
||||
folder
|
||||
|
||||
See https://rclone.org/docs/#config-config-file
|
||||
|
||||
rclone filtering works differently than rsync. For example,
|
||||
"--include" implies "--exclude **" at the end of an rclone internal
|
||||
filter list.
|
||||
|
||||
Better than the s3cmd command as it does the syncing in one command
|
||||
Check https://rclone.org/docs/#config-config-file (optional config file)
|
||||
"""
|
||||
logging.debug(_('Using rclone to sync with: {url}').format(url=config['awsbucket']))
|
||||
logging.debug(_('Using rclone to sync to "{name}"').format(name=awsbucket))
|
||||
|
||||
if config.get('path_to_custom_rclone_config') is not None:
|
||||
USER_RCLONE_CONF = config['path_to_custom_rclone_config']
|
||||
if os.path.exists(USER_RCLONE_CONF):
|
||||
logging.info("'path_to_custom_rclone_config' found in config.yml")
|
||||
logging.info(
|
||||
_('Using "{path}" for syncing with remote storage.').format(
|
||||
path=USER_RCLONE_CONF
|
||||
rclone_config = config.get('rclone_config', [])
|
||||
if rclone_config and isinstance(rclone_config, str):
|
||||
rclone_config = [rclone_config]
|
||||
|
||||
path = config.get('path_to_custom_rclone_config')
|
||||
if path:
|
||||
if not os.path.exists(path):
|
||||
logging.error(
|
||||
_('path_to_custom_rclone_config: "{path}" does not exist!').format(
|
||||
path=path
|
||||
)
|
||||
)
|
||||
configfilename = USER_RCLONE_CONF
|
||||
else:
|
||||
logging.info('Custom configuration not found.')
|
||||
logging.info(
|
||||
'Using default configuration at {}'.format(
|
||||
subprocess.check_output(['rclone', 'config', 'file'], text=True)
|
||||
)
|
||||
)
|
||||
configfilename = None
|
||||
sys.exit(1)
|
||||
configfilename = path
|
||||
else:
|
||||
logging.warning("'path_to_custom_rclone_config' not found in config.yml")
|
||||
logging.info('Custom configuration not found.')
|
||||
logging.info(
|
||||
'Using default configuration at {}'.format(
|
||||
subprocess.check_output(['rclone', 'config', 'file'], text=True)
|
||||
)
|
||||
)
|
||||
configfilename = None
|
||||
output = subprocess.check_output(['rclone', 'config', 'file'], text=True)
|
||||
default_config_path = output.split('\n')[-2]
|
||||
if os.path.exists(default_config_path):
|
||||
path = default_config_path
|
||||
if path:
|
||||
logging.info(_('Using "{path}" for rclone config.').format(path=path))
|
||||
|
||||
upload_dir = 'fdroid/' + repo_section
|
||||
|
||||
if not config.get('rclone_config') or not config.get('awsbucket'):
|
||||
raise FDroidException(
|
||||
_('To use rclone, rclone_config and awsbucket must be set in config.yml!')
|
||||
)
|
||||
|
||||
if is_index_only:
|
||||
sources = _get_index_file_paths(repo_section)
|
||||
sources = _remove_missing_files(sources)
|
||||
else:
|
||||
sources = [repo_section]
|
||||
|
||||
if isinstance(config['rclone_config'], str):
|
||||
rclone_config = [config['rclone_config']]
|
||||
else:
|
||||
rclone_config = config['rclone_config']
|
||||
|
||||
for source in sources:
|
||||
for remote_config in rclone_config:
|
||||
complete_remote_path = f'{remote_config}:{config["awsbucket"]}/{upload_dir}'
|
||||
rclone_sync_command = ['rclone', 'sync', source, complete_remote_path]
|
||||
|
||||
if verbose:
|
||||
rclone_sync_command += ['--verbose']
|
||||
elif quiet:
|
||||
rclone_sync_command += ['--quiet']
|
||||
|
||||
if configfilename:
|
||||
rclone_sync_command += ['--config=' + configfilename]
|
||||
|
||||
logging.debug(
|
||||
"rclone sync all files in " + source + ' to ' + complete_remote_path
|
||||
)
|
||||
|
||||
if subprocess.call(rclone_sync_command) != 0:
|
||||
raise FDroidException()
|
||||
|
||||
|
||||
def update_awsbucket_libcloud(repo_section, is_index_only=False):
|
||||
"""No summary.
|
||||
|
||||
Upload the contents of the directory `repo_section` (including
|
||||
subdirectories) to the AWS S3 "bucket".
|
||||
|
||||
The contents of that subdir of the
|
||||
bucket will first be deleted.
|
||||
|
||||
Requires AWS credentials set in config.yml: awsaccesskeyid, awssecretkey
|
||||
"""
|
||||
logging.debug(
|
||||
_('using Apache libcloud to sync with {url}').format(url=config['awsbucket'])
|
||||
)
|
||||
|
||||
import libcloud.security
|
||||
|
||||
libcloud.security.VERIFY_SSL_CERT = True
|
||||
from libcloud.storage.providers import get_driver
|
||||
from libcloud.storage.types import ContainerDoesNotExistError, Provider
|
||||
|
||||
if not config.get('awsaccesskeyid') or not config.get('awssecretkey'):
|
||||
raise FDroidException(
|
||||
_(
|
||||
'To use awsbucket, awssecretkey and awsaccesskeyid must also be set in config.yml!'
|
||||
)
|
||||
)
|
||||
awsbucket = config['awsbucket']
|
||||
|
||||
if os.path.exists(USER_S3CFG):
|
||||
raise FDroidException(
|
||||
_('"{path}" exists but s3cmd is not installed!').format(path=USER_S3CFG)
|
||||
)
|
||||
|
||||
cls = get_driver(Provider.S3)
|
||||
driver = cls(config['awsaccesskeyid'], config['awssecretkey'])
|
||||
try:
|
||||
container = driver.get_container(container_name=awsbucket)
|
||||
except ContainerDoesNotExistError:
|
||||
container = driver.create_container(container_name=awsbucket)
|
||||
logging.info(_('Created new container "{name}"').format(name=container.name))
|
||||
|
||||
upload_dir = 'fdroid/' + repo_section
|
||||
objs = dict()
|
||||
for obj in container.list_objects():
|
||||
if obj.name.startswith(upload_dir + '/'):
|
||||
objs[obj.name] = obj
|
||||
|
||||
if is_index_only:
|
||||
index_files = [
|
||||
f"{os.getcwd()}/{name}" for name in _get_index_file_paths(repo_section)
|
||||
]
|
||||
files_to_upload = [
|
||||
os.path.join(root, name)
|
||||
for root, dirs, files in os.walk(os.path.join(os.getcwd(), repo_section))
|
||||
for name in files
|
||||
]
|
||||
files_to_upload = list(set(files_to_upload) & set(index_files))
|
||||
files_to_upload = _remove_missing_files(files_to_upload)
|
||||
|
||||
else:
|
||||
files_to_upload = [
|
||||
os.path.join(root, name)
|
||||
for root, dirs, files in os.walk(os.path.join(os.getcwd(), repo_section))
|
||||
for name in files
|
||||
]
|
||||
|
||||
for file_to_upload in files_to_upload:
|
||||
upload = False
|
||||
object_name = 'fdroid/' + os.path.relpath(file_to_upload, os.getcwd())
|
||||
if object_name not in objs:
|
||||
upload = True
|
||||
else:
|
||||
obj = objs.pop(object_name)
|
||||
if obj.size != os.path.getsize(file_to_upload):
|
||||
upload = True
|
||||
else:
|
||||
# if the sizes match, then compare by MD5
|
||||
md5 = hashlib.md5() # nosec AWS uses MD5
|
||||
with open(file_to_upload, 'rb') as f:
|
||||
while True:
|
||||
data = f.read(8192)
|
||||
if not data:
|
||||
break
|
||||
md5.update(data)
|
||||
if obj.hash != md5.hexdigest():
|
||||
s3url = 's3://' + awsbucket + '/' + obj.name
|
||||
logging.info(' deleting ' + s3url)
|
||||
if not driver.delete_object(obj):
|
||||
logging.warning('Could not delete ' + s3url)
|
||||
upload = True
|
||||
|
||||
if upload:
|
||||
logging.debug(' uploading "' + file_to_upload + '"...')
|
||||
extra = {'acl': 'public-read'}
|
||||
if file_to_upload.endswith('.sig'):
|
||||
extra['content_type'] = 'application/pgp-signature'
|
||||
elif file_to_upload.endswith('.asc'):
|
||||
extra['content_type'] = 'application/pgp-signature'
|
||||
path = os.path.relpath(file_to_upload)
|
||||
logging.info(f' uploading {path} to s3://{awsbucket}/{object_name}')
|
||||
with open(file_to_upload, 'rb') as iterator:
|
||||
obj = driver.upload_object_via_stream(
|
||||
iterator=iterator,
|
||||
container=container,
|
||||
object_name=object_name,
|
||||
extra=extra,
|
||||
if not rclone_config:
|
||||
env = os.environ
|
||||
# Check both canonical and backup names, but only tell user about canonical.
|
||||
if not env.get("AWS_SECRET_ACCESS_KEY") and not env.get("AWS_SECRET_KEY"):
|
||||
raise FDroidException(
|
||||
_(
|
||||
""""AWS_SECRET_ACCESS_KEY" must be set as an environmental variable!"""
|
||||
)
|
||||
# delete the remnants in the bucket, they do not exist locally
|
||||
while objs:
|
||||
object_name, obj = objs.popitem()
|
||||
s3url = 's3://' + awsbucket + '/' + object_name
|
||||
if object_name.startswith(upload_dir):
|
||||
logging.warning(' deleting ' + s3url)
|
||||
driver.delete_object(obj)
|
||||
)
|
||||
if not env.get("AWS_ACCESS_KEY_ID") and not env.get('AWS_ACCESS_KEY'):
|
||||
raise FDroidException(
|
||||
_(""""AWS_ACCESS_KEY_ID" must be set as an environmental variable!""")
|
||||
)
|
||||
|
||||
default_remote = "AWS-S3-US-East-1"
|
||||
env_rclone_config = configparser.ConfigParser()
|
||||
env_rclone_config.add_section(default_remote)
|
||||
env_rclone_config.set(
|
||||
default_remote,
|
||||
'; = This file is auto-generated by fdroid deploy, do not edit!',
|
||||
'',
|
||||
)
|
||||
env_rclone_config.set(default_remote, "type", "s3")
|
||||
env_rclone_config.set(default_remote, "provider", "AWS")
|
||||
env_rclone_config.set(default_remote, "region", "us-east-1")
|
||||
env_rclone_config.set(default_remote, "env_auth", "true")
|
||||
|
||||
configfilename = ".fdroid-deploy-rclone.conf"
|
||||
with open(configfilename, "w", encoding="utf-8") as autoconfigfile:
|
||||
env_rclone_config.write(autoconfigfile)
|
||||
rclone_config = [default_remote]
|
||||
|
||||
rclone_sync_command = ['rclone', 'sync', '--delete-after']
|
||||
if configfilename:
|
||||
rclone_sync_command += ['--config', configfilename]
|
||||
|
||||
if verbose:
|
||||
rclone_sync_command += ['--verbose']
|
||||
elif quiet:
|
||||
rclone_sync_command += ['--quiet']
|
||||
|
||||
# TODO copying update_serverwebroot rsync algo
|
||||
for remote_config in rclone_config:
|
||||
complete_remote_path = f'{remote_config}:{awsbucket}/{upload_dir}'
|
||||
logging.info(f'rclone sync to {complete_remote_path}')
|
||||
if is_index_only:
|
||||
index_only_files = common.INDEX_FILES + ['diff/*.*']
|
||||
include_pattern = _generate_rclone_include_pattern(index_only_files)
|
||||
cmd = rclone_sync_command + [
|
||||
'--include',
|
||||
include_pattern,
|
||||
'--delete-excluded',
|
||||
repo_section,
|
||||
complete_remote_path,
|
||||
]
|
||||
logging.info(cmd)
|
||||
if subprocess.call(cmd) != 0:
|
||||
raise FDroidException()
|
||||
else:
|
||||
logging.info(' skipping ' + s3url)
|
||||
cmd = (
|
||||
rclone_sync_command
|
||||
+ _get_index_excludes(repo_section)
|
||||
+ [
|
||||
repo_section,
|
||||
complete_remote_path,
|
||||
]
|
||||
)
|
||||
if subprocess.call(cmd) != 0:
|
||||
raise FDroidException()
|
||||
cmd = rclone_sync_command + [
|
||||
repo_section,
|
||||
complete_remote_path,
|
||||
]
|
||||
if subprocess.call(cmd) != 0:
|
||||
raise FDroidException()
|
||||
|
||||
|
||||
def update_serverwebroot(serverwebroot, repo_section):
|
||||
|
|
@ -1342,8 +1128,11 @@ def main():
|
|||
# update_servergitmirrors will take care of multiple mirrors so don't need a foreach
|
||||
update_servergitmirrors(config['servergitmirrors'], repo_section)
|
||||
if config.get('awsbucket'):
|
||||
awsbucket = config['awsbucket']
|
||||
index_only = config.get('awsbucket_index_only')
|
||||
update_awsbucket(repo_section, index_only, options.verbose, options.quiet)
|
||||
update_remote_storage_with_rclone(
|
||||
repo_section, awsbucket, index_only, options.verbose, options.quiet
|
||||
)
|
||||
if config.get('androidobservatory'):
|
||||
upload_to_android_observatory(repo_section)
|
||||
if config.get('virustotal_apikey'):
|
||||
|
|
|
|||
|
|
@ -228,9 +228,7 @@ bool_keys = (
|
|||
'make_current_version_link',
|
||||
'nonstandardwebroot',
|
||||
'per_app_repos',
|
||||
'rclone',
|
||||
'refresh_scanner',
|
||||
's3cmd',
|
||||
'scan_binary',
|
||||
'sync_from_local_copy_dir',
|
||||
)
|
||||
|
|
@ -245,9 +243,8 @@ check_config_keys = (
|
|||
'archive_older',
|
||||
'archive_url',
|
||||
'archive_web_base_url',
|
||||
'awsaccesskeyid',
|
||||
'awsbucket',
|
||||
'awssecretkey',
|
||||
'awsbucket_index_only',
|
||||
'binary_transparency_remote',
|
||||
'cachedir',
|
||||
'char_limits',
|
||||
|
|
|
|||
3
setup.py
3
setup.py
|
|
@ -101,7 +101,6 @@ setup(
|
|||
'oscrypto',
|
||||
'paramiko',
|
||||
'Pillow',
|
||||
'apache-libcloud >= 0.14.1',
|
||||
'puremagic',
|
||||
'pycountry ; sys_platform=="darwin"',
|
||||
'python-vagrant',
|
||||
|
|
@ -123,7 +122,7 @@ setup(
|
|||
'pycountry',
|
||||
'python-magic',
|
||||
],
|
||||
'test': ['pyjks', 'html5print'],
|
||||
'test': ['pyjks', 'html5print', 'testcontainers[minio]'],
|
||||
'docs': [
|
||||
'sphinx',
|
||||
'numpydoc',
|
||||
|
|
|
|||
|
|
@ -15,6 +15,12 @@ import fdroidserver
|
|||
from .shared_test_code import TmpCwd, VerboseFalseOptions, mkdtemp
|
||||
|
||||
basedir = Path(__file__).parent
|
||||
FILES = basedir
|
||||
|
||||
|
||||
def _mock_rclone_config_file(cmd, text): # pylint: disable=unused-argument
|
||||
"""Mock output from rclone 1.60.1 but with nonexistent conf file."""
|
||||
return "Configuration file doesn't exist, but rclone will use this path:\n/nonexistent/rclone.conf\n"
|
||||
|
||||
|
||||
class DeployTest(unittest.TestCase):
|
||||
|
|
@ -27,7 +33,6 @@ class DeployTest(unittest.TestCase):
|
|||
|
||||
fdroidserver.common.options = mock.Mock()
|
||||
fdroidserver.deploy.config = {}
|
||||
fdroidserver.deploy.USER_RCLONE_CONF = False
|
||||
|
||||
def tearDown(self):
|
||||
self._td.cleanup()
|
||||
|
|
@ -89,7 +94,7 @@ class DeployTest(unittest.TestCase):
|
|||
with self.assertRaises(SystemExit):
|
||||
fdroidserver.deploy.update_serverwebroots([{'url': 'ssh://nope'}], 'repo')
|
||||
|
||||
@unittest.skipUnless(shutil.which('rclone'), '/usr/bin/rclone')
|
||||
@unittest.skipUnless(shutil.which('rclone'), 'requires rclone')
|
||||
def test_update_remote_storage_with_rclone(self):
|
||||
os.chdir(self.testdir)
|
||||
repo = Path('repo')
|
||||
|
|
@ -114,26 +119,25 @@ class DeployTest(unittest.TestCase):
|
|||
rclone_config.write(configfile)
|
||||
|
||||
# setup parameters for this test run
|
||||
fdroidserver.deploy.config['awsbucket'] = 'test_bucket_folder'
|
||||
fdroidserver.deploy.config['rclone'] = True
|
||||
awsbucket = 'test_bucket_folder'
|
||||
fdroidserver.deploy.config['awsbucket'] = awsbucket
|
||||
fdroidserver.deploy.config['rclone_config'] = 'test-local-config'
|
||||
fdroidserver.deploy.config['path_to_custom_rclone_config'] = str(rclone_file)
|
||||
fdroidserver.common.options = VerboseFalseOptions
|
||||
|
||||
# write out destination path
|
||||
destination = Path('test_bucket_folder/fdroid')
|
||||
destination = Path(f'{awsbucket}/fdroid')
|
||||
destination.mkdir(parents=True, exist_ok=True)
|
||||
dest_apk = Path(destination) / fake_apk
|
||||
dest_index = Path(destination) / fake_index
|
||||
self.assertFalse(dest_apk.is_file())
|
||||
self.assertFalse(dest_index.is_file())
|
||||
repo_section = str(repo)
|
||||
# fdroidserver.deploy.USER_RCLONE_CONF = str(rclone_file)
|
||||
fdroidserver.deploy.update_remote_storage_with_rclone(repo_section)
|
||||
fdroidserver.deploy.update_remote_storage_with_rclone(repo_section, awsbucket)
|
||||
self.assertTrue(dest_apk.is_file())
|
||||
self.assertTrue(dest_index.is_file())
|
||||
|
||||
@unittest.skipUnless(shutil.which('rclone'), '/usr/bin/rclone')
|
||||
@unittest.skipUnless(shutil.which('rclone'), 'requires rclone')
|
||||
def test_update_remote_storage_with_rclone_in_index_only_mode(self):
|
||||
os.chdir(self.testdir)
|
||||
repo = Path('repo')
|
||||
|
|
@ -158,51 +162,131 @@ class DeployTest(unittest.TestCase):
|
|||
rclone_config.write(configfile)
|
||||
|
||||
# setup parameters for this test run
|
||||
fdroidserver.deploy.config['awsbucket'] = 'test_bucket_folder'
|
||||
fdroidserver.deploy.config['rclone'] = True
|
||||
awsbucket = 'test_bucket_folder'
|
||||
fdroidserver.deploy.config['awsbucket'] = awsbucket
|
||||
fdroidserver.deploy.config['rclone_config'] = 'test-local-config'
|
||||
fdroidserver.deploy.config['path_to_custom_rclone_config'] = str(rclone_file)
|
||||
fdroidserver.common.options = VerboseFalseOptions
|
||||
|
||||
# write out destination path
|
||||
destination = Path('test_bucket_folder/fdroid')
|
||||
destination = Path(f'{awsbucket}/fdroid')
|
||||
destination.mkdir(parents=True, exist_ok=True)
|
||||
dest_apk = Path(destination) / fake_apk
|
||||
dest_index = Path(destination) / fake_index
|
||||
self.assertFalse(dest_apk.is_file())
|
||||
self.assertFalse(dest_index.is_file())
|
||||
repo_section = str(repo)
|
||||
# fdroidserver.deploy.USER_RCLONE_CONF = str(rclone_file)
|
||||
fdroidserver.deploy.update_remote_storage_with_rclone(
|
||||
repo_section, is_index_only=True
|
||||
repo_section, awsbucket, is_index_only=True
|
||||
)
|
||||
self.assertFalse(dest_apk.is_file())
|
||||
self.assertTrue(dest_index.is_file())
|
||||
|
||||
@mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True)
|
||||
@mock.patch('subprocess.check_output', _mock_rclone_config_file)
|
||||
def test_update_remote_storage_with_rclone_awsbucket_no_env_vars(self):
|
||||
with self.assertRaises(fdroidserver.exception.FDroidException):
|
||||
fdroidserver.deploy.update_remote_storage_with_rclone('repo', 'foobucket')
|
||||
|
||||
@mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True)
|
||||
@mock.patch('subprocess.check_output', _mock_rclone_config_file)
|
||||
def test_update_remote_storage_with_rclone_awsbucket_no_AWS_SECRET_ACCESS_KEY(self):
|
||||
os.environ['AWS_ACCESS_KEY_ID'] = 'accesskey'
|
||||
with self.assertRaises(fdroidserver.exception.FDroidException):
|
||||
fdroidserver.deploy.update_remote_storage_with_rclone('repo', 'foobucket')
|
||||
|
||||
@mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True)
|
||||
@mock.patch('subprocess.check_output', _mock_rclone_config_file)
|
||||
def test_update_remote_storage_with_rclone_awsbucket_no_AWS_ACCESS_KEY_ID(self):
|
||||
os.environ['AWS_SECRET_ACCESS_KEY'] = 'secrets' # nosec B105
|
||||
with self.assertRaises(fdroidserver.exception.FDroidException):
|
||||
fdroidserver.deploy.update_remote_storage_with_rclone('repo', 'foobucket')
|
||||
|
||||
@mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True)
|
||||
@mock.patch('subprocess.check_output', _mock_rclone_config_file)
|
||||
@mock.patch('subprocess.call')
|
||||
@mock.patch('subprocess.check_output', lambda cmd, text: '/path/to/rclone.conf')
|
||||
def test_update_remote_storage_with_rclone_mock(self, mock_call):
|
||||
def test_update_remote_storage_with_rclone_awsbucket_env_vars(self, mock_call):
|
||||
awsbucket = 'test_bucket_folder'
|
||||
os.environ['AWS_ACCESS_KEY_ID'] = 'accesskey'
|
||||
os.environ['AWS_SECRET_ACCESS_KEY'] = 'secrets' # nosec B105
|
||||
|
||||
def _mock_subprocess_call(cmd):
|
||||
self.assertEqual(
|
||||
cmd,
|
||||
cmd[:5],
|
||||
[
|
||||
'rclone',
|
||||
'sync',
|
||||
'repo',
|
||||
'test_local_config:test_bucket_folder/fdroid/repo',
|
||||
'--delete-after',
|
||||
'--config',
|
||||
'.fdroid-deploy-rclone.conf',
|
||||
],
|
||||
)
|
||||
return 0
|
||||
|
||||
mock_call.side_effect = _mock_subprocess_call
|
||||
fdroidserver.deploy.config = {'awsbucket': awsbucket}
|
||||
fdroidserver.deploy.update_remote_storage_with_rclone('repo', awsbucket)
|
||||
mock_call.assert_called()
|
||||
|
||||
@mock.patch.dict(os.environ, {'PATH': os.getenv('PATH')}, clear=True)
|
||||
@mock.patch('subprocess.check_output', _mock_rclone_config_file)
|
||||
@mock.patch('subprocess.call')
|
||||
def test_update_remote_storage_with_rclone_mock_awsbucket(self, mock_call):
|
||||
awsbucket = 'test_bucket_folder'
|
||||
os.environ['AWS_ACCESS_KEY_ID'] = 'accesskey'
|
||||
os.environ['AWS_SECRET_ACCESS_KEY'] = 'secrets' # nosec B105
|
||||
self.last_cmd = None
|
||||
|
||||
def _mock_subprocess_call(cmd):
|
||||
self.last_cmd = cmd
|
||||
return 0
|
||||
|
||||
mock_call.side_effect = _mock_subprocess_call
|
||||
|
||||
fdroidserver.deploy.config = {'awsbucket': awsbucket}
|
||||
fdroidserver.deploy.update_remote_storage_with_rclone('repo', awsbucket)
|
||||
self.maxDiff = None
|
||||
self.assertEqual(
|
||||
self.last_cmd,
|
||||
[
|
||||
'rclone',
|
||||
'sync',
|
||||
'--delete-after',
|
||||
'--config',
|
||||
'.fdroid-deploy-rclone.conf',
|
||||
'repo',
|
||||
f'AWS-S3-US-East-1:{awsbucket}/fdroid/repo',
|
||||
],
|
||||
)
|
||||
|
||||
@mock.patch('subprocess.check_output', _mock_rclone_config_file)
|
||||
@mock.patch('subprocess.call')
|
||||
def test_update_remote_storage_with_rclone_mock_rclone_config(self, mock_call):
|
||||
awsbucket = 'test_bucket_folder'
|
||||
self.last_cmd = None
|
||||
|
||||
def _mock_subprocess_call(cmd):
|
||||
self.last_cmd = cmd
|
||||
return 0
|
||||
|
||||
mock_call.side_effect = _mock_subprocess_call
|
||||
|
||||
fdroidserver.deploy.config = {
|
||||
'awsbucket': 'test_bucket_folder',
|
||||
'rclone': True,
|
||||
'awsbucket': awsbucket,
|
||||
'rclone_config': 'test_local_config',
|
||||
}
|
||||
fdroidserver.deploy.update_remote_storage_with_rclone('repo')
|
||||
mock_call.assert_called_once()
|
||||
fdroidserver.deploy.update_remote_storage_with_rclone('repo', awsbucket)
|
||||
self.maxDiff = None
|
||||
self.assertEqual(
|
||||
self.last_cmd,
|
||||
[
|
||||
'rclone',
|
||||
'sync',
|
||||
'--delete-after',
|
||||
'repo',
|
||||
'test_local_config:test_bucket_folder/fdroid/repo',
|
||||
],
|
||||
)
|
||||
|
||||
def test_update_serverwebroot(self):
|
||||
"""rsync works with file paths, so this test uses paths for the URLs"""
|
||||
|
|
@ -668,399 +752,6 @@ class DeployTest(unittest.TestCase):
|
|||
name, fdroidserver.deploy.REMOTE_HOSTNAME_REGEX.sub(r'\1', remote_url)
|
||||
)
|
||||
|
||||
def test_update_awsbucket_s3cmd(self):
|
||||
# setup parameters for this test run
|
||||
fdroidserver.common.options = mock.Mock()
|
||||
fdroidserver.common.options.no_checksum = True
|
||||
fdroidserver.common.options.verbose = False
|
||||
fdroidserver.common.options.quiet = True
|
||||
|
||||
config = {}
|
||||
fdroidserver.common.fill_config_defaults(config)
|
||||
fdroidserver.deploy.config = config
|
||||
fdroidserver.deploy.config["awsbucket"] = "bucket"
|
||||
fdroidserver.deploy.config["awsaccesskeyid"] = "accesskeyid"
|
||||
fdroidserver.deploy.config["awssecretkey"] = "secretkey"
|
||||
fdroidserver.deploy.config["s3cmd"] = "s3cmd"
|
||||
|
||||
repo_section = 'repo'
|
||||
|
||||
# setup function for asserting subprocess.call invocations
|
||||
call_iteration = 0
|
||||
|
||||
def update_awsbucket_s3cmd_call(cmd):
|
||||
nonlocal call_iteration
|
||||
if call_iteration == 0:
|
||||
self.assertListEqual(
|
||||
cmd,
|
||||
[
|
||||
's3cmd',
|
||||
f"--config={fdroidserver.deploy.AUTO_S3CFG}",
|
||||
'info',
|
||||
f"s3://{fdroidserver.deploy.config['awsbucket']}",
|
||||
],
|
||||
)
|
||||
elif call_iteration == 1:
|
||||
self.assertListEqual(
|
||||
cmd,
|
||||
[
|
||||
's3cmd',
|
||||
f"--config={fdroidserver.deploy.AUTO_S3CFG}",
|
||||
'sync',
|
||||
'--acl-public',
|
||||
'--quiet',
|
||||
'--exclude',
|
||||
'repo/altstore-index.json',
|
||||
'--exclude',
|
||||
'repo/altstore-index.json.asc',
|
||||
'--exclude',
|
||||
'repo/entry.jar',
|
||||
'--exclude',
|
||||
'repo/entry.json',
|
||||
'--exclude',
|
||||
'repo/entry.json.asc',
|
||||
'--exclude',
|
||||
'repo/index-v1.jar',
|
||||
'--exclude',
|
||||
'repo/index-v1.json',
|
||||
'--exclude',
|
||||
'repo/index-v1.json.asc',
|
||||
'--exclude',
|
||||
'repo/index-v2.json',
|
||||
'--exclude',
|
||||
'repo/index-v2.json.asc',
|
||||
'--exclude',
|
||||
'repo/index.css',
|
||||
'--exclude',
|
||||
'repo/index.html',
|
||||
'--exclude',
|
||||
'repo/index.jar',
|
||||
'--exclude',
|
||||
'repo/index.png',
|
||||
'--exclude',
|
||||
'repo/index.xml',
|
||||
'--exclude',
|
||||
'repo/signer-index.jar',
|
||||
'--exclude',
|
||||
'repo/signer-index.json',
|
||||
'--exclude',
|
||||
'repo/signer-index.json.asc',
|
||||
'--no-check-md5',
|
||||
'--skip-existing',
|
||||
repo_section,
|
||||
f"s3://{fdroidserver.deploy.config['awsbucket']}/fdroid/",
|
||||
],
|
||||
)
|
||||
elif call_iteration == 2:
|
||||
self.assertListEqual(
|
||||
cmd,
|
||||
[
|
||||
's3cmd',
|
||||
f"--config={fdroidserver.deploy.AUTO_S3CFG}",
|
||||
'sync',
|
||||
'--acl-public',
|
||||
'--quiet',
|
||||
'--exclude',
|
||||
'repo/altstore-index.json',
|
||||
'--exclude',
|
||||
'repo/altstore-index.json.asc',
|
||||
'--exclude',
|
||||
'repo/entry.jar',
|
||||
'--exclude',
|
||||
'repo/entry.json',
|
||||
'--exclude',
|
||||
'repo/entry.json.asc',
|
||||
'--exclude',
|
||||
'repo/index-v1.jar',
|
||||
'--exclude',
|
||||
'repo/index-v1.json',
|
||||
'--exclude',
|
||||
'repo/index-v1.json.asc',
|
||||
'--exclude',
|
||||
'repo/index-v2.json',
|
||||
'--exclude',
|
||||
'repo/index-v2.json.asc',
|
||||
'--exclude',
|
||||
'repo/index.css',
|
||||
'--exclude',
|
||||
'repo/index.html',
|
||||
'--exclude',
|
||||
'repo/index.jar',
|
||||
'--exclude',
|
||||
'repo/index.png',
|
||||
'--exclude',
|
||||
'repo/index.xml',
|
||||
'--exclude',
|
||||
'repo/signer-index.jar',
|
||||
'--exclude',
|
||||
'repo/signer-index.json',
|
||||
'--exclude',
|
||||
'repo/signer-index.json.asc',
|
||||
'--no-check-md5',
|
||||
repo_section,
|
||||
f"s3://{fdroidserver.deploy.config['awsbucket']}/fdroid/",
|
||||
],
|
||||
)
|
||||
elif call_iteration == 3:
|
||||
self.assertListEqual(
|
||||
cmd,
|
||||
[
|
||||
's3cmd',
|
||||
f"--config={fdroidserver.deploy.AUTO_S3CFG}",
|
||||
'sync',
|
||||
'--acl-public',
|
||||
'--quiet',
|
||||
'--delete-removed',
|
||||
'--delete-after',
|
||||
'--no-check-md5',
|
||||
repo_section,
|
||||
f"s3://{fdroidserver.deploy.config['awsbucket']}/fdroid/",
|
||||
],
|
||||
)
|
||||
else:
|
||||
self.fail('unexpected subprocess.call invocation')
|
||||
call_iteration += 1
|
||||
return 0
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir):
|
||||
os.mkdir('repo')
|
||||
os.symlink('repo/com.example.sym.apk', 'Sym.apk')
|
||||
os.symlink('repo/com.example.sym.apk.asc', 'Sym.apk.asc')
|
||||
os.symlink('repo/com.example.sym.apk.sig', 'Sym.apk.sig')
|
||||
with mock.patch('subprocess.call', side_effect=update_awsbucket_s3cmd_call):
|
||||
fdroidserver.deploy.update_awsbucket_s3cmd(repo_section)
|
||||
self.assertEqual(call_iteration, 4, 'expected 4 invocations of subprocess.call')
|
||||
|
||||
def test_update_awsbucket_s3cmd_in_index_only_mode(self):
|
||||
# setup parameters for this test run
|
||||
fdroidserver.common.options = mock.Mock()
|
||||
fdroidserver.common.options.no_checksum = True
|
||||
fdroidserver.common.options.verbose = False
|
||||
fdroidserver.common.options.quiet = True
|
||||
|
||||
config = {}
|
||||
fdroidserver.common.fill_config_defaults(config)
|
||||
fdroidserver.deploy.config = config
|
||||
fdroidserver.deploy.config["awsbucket"] = "bucket"
|
||||
fdroidserver.deploy.config["awsaccesskeyid"] = "accesskeyid"
|
||||
fdroidserver.deploy.config["awssecretkey"] = "secretkey"
|
||||
fdroidserver.deploy.config["s3cmd"] = "s3cmd"
|
||||
|
||||
repo_section = 'repo'
|
||||
|
||||
# setup function for asserting subprocess.call invocations
|
||||
call_iteration = 0
|
||||
|
||||
def update_awsbucket_s3cmd_call(cmd):
|
||||
nonlocal call_iteration
|
||||
if call_iteration == 0:
|
||||
self.assertListEqual(
|
||||
cmd,
|
||||
[
|
||||
's3cmd',
|
||||
f"--config={fdroidserver.deploy.AUTO_S3CFG}",
|
||||
'info',
|
||||
f"s3://{fdroidserver.deploy.config['awsbucket']}",
|
||||
],
|
||||
)
|
||||
elif call_iteration == 1:
|
||||
self.assertListEqual(
|
||||
cmd,
|
||||
[
|
||||
's3cmd',
|
||||
f"--config={fdroidserver.deploy.AUTO_S3CFG}",
|
||||
'sync',
|
||||
'--acl-public',
|
||||
'--quiet',
|
||||
'--include',
|
||||
'repo/altstore-index.json',
|
||||
'--include',
|
||||
'repo/altstore-index.json.asc',
|
||||
'--include',
|
||||
'repo/entry.jar',
|
||||
'--include',
|
||||
'repo/entry.json',
|
||||
'--include',
|
||||
'repo/entry.json.asc',
|
||||
'--include',
|
||||
'repo/index-v1.jar',
|
||||
'--include',
|
||||
'repo/index-v1.json',
|
||||
'--include',
|
||||
'repo/index-v1.json.asc',
|
||||
'--include',
|
||||
'repo/index-v2.json',
|
||||
'--include',
|
||||
'repo/index-v2.json.asc',
|
||||
'--include',
|
||||
'repo/index.css',
|
||||
'--include',
|
||||
'repo/index.html',
|
||||
'--include',
|
||||
'repo/index.jar',
|
||||
'--include',
|
||||
'repo/index.png',
|
||||
'--include',
|
||||
'repo/index.xml',
|
||||
'--include',
|
||||
'repo/signer-index.jar',
|
||||
'--include',
|
||||
'repo/signer-index.json',
|
||||
'--include',
|
||||
'repo/signer-index.json.asc',
|
||||
'--delete-removed',
|
||||
'--delete-after',
|
||||
'--no-check-md5',
|
||||
repo_section,
|
||||
f"s3://{fdroidserver.deploy.config['awsbucket']}/fdroid/",
|
||||
],
|
||||
)
|
||||
else:
|
||||
self.fail('unexpected subprocess.call invocation')
|
||||
call_iteration += 1
|
||||
return 0
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir):
|
||||
os.mkdir('repo')
|
||||
os.symlink('repo/com.example.sym.apk', 'Sym.apk')
|
||||
os.symlink('repo/com.example.sym.apk.asc', 'Sym.apk.asc')
|
||||
os.symlink('repo/com.example.sym.apk.sig', 'Sym.apk.sig')
|
||||
with mock.patch('subprocess.call', side_effect=update_awsbucket_s3cmd_call):
|
||||
fdroidserver.deploy.update_awsbucket_s3cmd(
|
||||
repo_section, is_index_only=True
|
||||
)
|
||||
self.assertEqual(call_iteration, 2, 'expected 2 invocations of subprocess.call')
|
||||
|
||||
def test_update_awsbucket_libcloud(self):
|
||||
from libcloud.storage.base import Container
|
||||
|
||||
# setup parameters for this test run
|
||||
fdroidserver.common.options = mock.Mock()
|
||||
fdroidserver.common.options.no_checksum = True
|
||||
fdroidserver.common.options.verbose = False
|
||||
fdroidserver.common.options.quiet = True
|
||||
|
||||
config = {}
|
||||
fdroidserver.common.fill_config_defaults(config)
|
||||
fdroidserver.deploy.config = config
|
||||
fdroidserver.deploy.config["awsbucket"] = "bucket"
|
||||
fdroidserver.deploy.config["awsaccesskeyid"] = "accesskeyid"
|
||||
fdroidserver.deploy.config["awssecretkey"] = "secretkey"
|
||||
fdroidserver.deploy.config["s3cmd"] = "s3cmd"
|
||||
|
||||
repo_section = 'repo'
|
||||
|
||||
os.chdir(self.testdir)
|
||||
repo = Path('repo')
|
||||
repo.mkdir(parents=True)
|
||||
fake_apk = repo / 'Sym.apk'
|
||||
with fake_apk.open('w') as fp:
|
||||
fp.write('not an APK, but has the right filename')
|
||||
fake_index = repo / fdroidserver.common.INDEX_FILES[0]
|
||||
with fake_index.open('w') as fp:
|
||||
fp.write('not an index, but has the right filename')
|
||||
|
||||
with mock.patch(
|
||||
'libcloud.storage.drivers.s3.S3StorageDriver'
|
||||
) as mock_driver_class:
|
||||
mock_driver = mock_driver_class.return_value
|
||||
mock_container = mock.MagicMock(spec=Container)
|
||||
mock_container.list_objects.return_value = [
|
||||
mock.MagicMock(name='Sym.apk'),
|
||||
mock.MagicMock(name=fdroidserver.common.INDEX_FILES[0]),
|
||||
]
|
||||
|
||||
mock_driver.get_container.return_value = mock_container
|
||||
mock_driver.upload_object_via_stream.return_value = None
|
||||
|
||||
fdroidserver.deploy.update_awsbucket_libcloud(repo_section)
|
||||
|
||||
mock_driver.get_container.assert_called_once_with(
|
||||
container_name=fdroidserver.deploy.config["awsbucket"]
|
||||
)
|
||||
mock_container.list_objects.assert_called_once_with()
|
||||
files_to_upload = [
|
||||
'fdroid/repo/Sym.apk',
|
||||
f"fdroid/repo/{fdroidserver.common.INDEX_FILES[0]}",
|
||||
]
|
||||
calls = [
|
||||
mock.call(
|
||||
iterator=mock.ANY,
|
||||
container=mock_container,
|
||||
object_name=file,
|
||||
extra={'acl': 'public-read'},
|
||||
)
|
||||
for file in files_to_upload
|
||||
]
|
||||
mock_driver.upload_object_via_stream.assert_has_calls(calls, any_order=True)
|
||||
self.assertEqual(mock_driver.upload_object_via_stream.call_count, 2)
|
||||
|
||||
def test_update_awsbucket_libcloud_in_index_only_mode(self):
|
||||
from libcloud.storage.base import Container
|
||||
|
||||
# setup parameters for this test run
|
||||
fdroidserver.common.options = mock.Mock()
|
||||
fdroidserver.common.options.no_checksum = True
|
||||
fdroidserver.common.options.verbose = False
|
||||
fdroidserver.common.options.quiet = True
|
||||
|
||||
config = {}
|
||||
fdroidserver.common.fill_config_defaults(config)
|
||||
fdroidserver.deploy.config = config
|
||||
fdroidserver.deploy.config["awsbucket"] = "bucket"
|
||||
fdroidserver.deploy.config["awsaccesskeyid"] = "accesskeyid"
|
||||
fdroidserver.deploy.config["awssecretkey"] = "secretkey"
|
||||
fdroidserver.deploy.config["s3cmd"] = "s3cmd"
|
||||
|
||||
repo_section = 'repo'
|
||||
|
||||
os.chdir(self.testdir)
|
||||
repo = Path('repo')
|
||||
repo.mkdir(parents=True)
|
||||
fake_apk = repo / 'Sym.apk'
|
||||
with fake_apk.open('w') as fp:
|
||||
fp.write('not an APK, but has the right filename')
|
||||
fake_index = repo / fdroidserver.common.INDEX_FILES[0]
|
||||
with fake_index.open('w') as fp:
|
||||
fp.write('not an index, but has the right filename')
|
||||
|
||||
with mock.patch(
|
||||
'libcloud.storage.drivers.s3.S3StorageDriver'
|
||||
) as mock_driver_class:
|
||||
mock_driver = mock_driver_class.return_value
|
||||
mock_container = mock.MagicMock(spec=Container)
|
||||
mock_container.list_objects.return_value = [
|
||||
mock.MagicMock(name='Sym.apk'),
|
||||
mock.MagicMock(name=fdroidserver.common.INDEX_FILES[0]),
|
||||
]
|
||||
|
||||
mock_driver.get_container.return_value = mock_container
|
||||
mock_driver.upload_object_via_stream.return_value = None
|
||||
|
||||
fdroidserver.deploy.update_awsbucket_libcloud(
|
||||
repo_section, is_index_only=True
|
||||
)
|
||||
|
||||
mock_driver.get_container.assert_called_once_with(
|
||||
container_name=fdroidserver.deploy.config["awsbucket"]
|
||||
)
|
||||
mock_container.list_objects.assert_called_once_with()
|
||||
files_to_upload = [f"fdroid/repo/{fdroidserver.common.INDEX_FILES[0]}"]
|
||||
calls = [
|
||||
mock.call(
|
||||
iterator=mock.ANY,
|
||||
container=mock_container,
|
||||
object_name=file,
|
||||
extra={'acl': 'public-read'},
|
||||
)
|
||||
for file in files_to_upload
|
||||
]
|
||||
mock_driver.upload_object_via_stream.assert_has_calls(
|
||||
calls,
|
||||
any_order=False,
|
||||
)
|
||||
self.assertEqual(mock_driver.upload_object_via_stream.call_count, 1)
|
||||
|
||||
def test_update_servergitmirrors(self):
|
||||
# setup parameters for this test run
|
||||
fdroidserver.common.options = mock.Mock()
|
||||
|
|
|
|||
|
|
@ -1,9 +1,11 @@
|
|||
import configparser
|
||||
import itertools
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shlex
|
||||
import shutil
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
import threading
|
||||
|
|
@ -19,7 +21,7 @@ except ModuleNotFoundError:
|
|||
|
||||
from fdroidserver._yaml import yaml, yaml_dumper
|
||||
|
||||
from .shared_test_code import mkdir_testfiles
|
||||
from .shared_test_code import mkdir_testfiles, VerboseFalseOptions
|
||||
|
||||
# TODO: port generic tests that use index.xml to index-v2 (test that
|
||||
# explicitly test index-v0 should still use index.xml)
|
||||
|
|
@ -34,12 +36,17 @@ except KeyError:
|
|||
WORKSPACE = basedir.parent
|
||||
|
||||
from fdroidserver import common
|
||||
from fdroidserver import deploy
|
||||
|
||||
conf = {"sdk_path": os.getenv("ANDROID_HOME", "")}
|
||||
common.find_apksigner(conf)
|
||||
USE_APKSIGNER = "apksigner" in conf
|
||||
|
||||
|
||||
def docker_socket_exists(path="/var/run/docker.sock"):
|
||||
return os.path.exists(path) and stat.S_ISSOCK(os.stat(path).st_mode)
|
||||
|
||||
|
||||
@unittest.skipIf(sys.byteorder == 'big', 'androguard is not ported to big-endian')
|
||||
class IntegrationTest(unittest.TestCase):
|
||||
@classmethod
|
||||
|
|
@ -64,6 +71,7 @@ class IntegrationTest(unittest.TestCase):
|
|||
self.testdir = mkdir_testfiles(WORKSPACE, self)
|
||||
self.tmp_repo_root = self.testdir / "fdroid"
|
||||
self.tmp_repo_root.mkdir(parents=True)
|
||||
deploy.config = {}
|
||||
os.chdir(self.tmp_repo_root)
|
||||
|
||||
def tearDown(self):
|
||||
|
|
@ -1556,3 +1564,114 @@ class IntegrationTest(unittest.TestCase):
|
|||
self.fdroid_cmd + ["checkupdates", "--allow-dirty", "--auto", "-v"]
|
||||
)
|
||||
self.assertIn("CurrentVersionCode: 1", Path("metadata/fake.yml").read_text())
|
||||
|
||||
@unittest.skipUnless(docker_socket_exists(), "Docker is not available")
|
||||
def test_update_remote_storage_with_rclone_and_minio(self):
|
||||
try:
|
||||
from testcontainers.minio import MinioContainer
|
||||
except ImportError:
|
||||
self.skipTest('Requires testcontainers.minio to run')
|
||||
with MinioContainer(image="quay.io/minio/minio:latest") as minio:
|
||||
# Set up minio bukcet
|
||||
client = minio.get_client()
|
||||
client.make_bucket('test-bucket')
|
||||
host_ip = minio.get_config()['endpoint']
|
||||
|
||||
# Set up Repo dir
|
||||
os.chdir(self.testdir)
|
||||
repo_section = 'repo'
|
||||
repo = Path(repo_section)
|
||||
repo.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy(basedir / 'SpeedoMeterApp.main_1.apk', repo)
|
||||
shutil.copy(basedir / 'repo/index-v2.json', repo)
|
||||
|
||||
# write out config for test use
|
||||
rclone_config = configparser.ConfigParser()
|
||||
rclone_config.add_section("test-minio-config")
|
||||
rclone_config.set("test-minio-config", "type", "s3")
|
||||
rclone_config.set("test-minio-config", "provider", "Minio")
|
||||
rclone_config.set("test-minio-config", "endpoint", "http://" + host_ip)
|
||||
rclone_config.set("test-minio-config", "acl", "public-read")
|
||||
rclone_config.set("test-minio-config", "env_auth", "true")
|
||||
rclone_config.set("test-minio-config", "region", "us-east-1")
|
||||
rclone_config.set("test-minio-config", "access_key_id", "minioadmin")
|
||||
rclone_config.set("test-minio-config", "secret_access_key", "minioadmin")
|
||||
|
||||
rclone_config_path = Path('rclone_config_path')
|
||||
rclone_config_path.mkdir(parents=True, exist_ok=True)
|
||||
rclone_file = rclone_config_path / 'rclone-minio.conf'
|
||||
with open(rclone_file, "w", encoding="utf-8") as configfile:
|
||||
rclone_config.write(configfile)
|
||||
|
||||
# set up config for run
|
||||
awsbucket = "test-bucket"
|
||||
deploy.config['awsbucket'] = awsbucket
|
||||
deploy.config['rclone_config'] = "test-minio-config"
|
||||
deploy.config['path_to_custom_rclone_config'] = str(rclone_file)
|
||||
common.options = VerboseFalseOptions
|
||||
|
||||
# call function
|
||||
deploy.update_remote_storage_with_rclone(repo_section, awsbucket)
|
||||
|
||||
# check if apk and index file are available
|
||||
bucket_content = client.list_objects('test-bucket', recursive=True)
|
||||
files_in_bucket = {obj.object_name for obj in bucket_content}
|
||||
self.assertEqual(
|
||||
files_in_bucket,
|
||||
{'fdroid/repo/SpeedoMeterApp.main_1.apk', 'fdroid/repo/index-v2.json'},
|
||||
)
|
||||
|
||||
@unittest.skipUnless(docker_socket_exists(), "Docker is not available")
|
||||
def test_update_remote_storage_with_rclone_and_minio_in_index_only_mode(self):
|
||||
try:
|
||||
from testcontainers.minio import MinioContainer
|
||||
except ImportError:
|
||||
self.skipTest('Requires testcontainers.minio to run')
|
||||
with MinioContainer(image="quay.io/minio/minio:latest") as minio:
|
||||
# Set up minio bukcet
|
||||
client = minio.get_client()
|
||||
client.make_bucket('test-bucket')
|
||||
host_ip = minio.get_config()['endpoint']
|
||||
|
||||
# Set up Repo dir
|
||||
os.chdir(self.testdir)
|
||||
repo_section = 'repo'
|
||||
repo = Path(repo_section)
|
||||
repo.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy(basedir / 'SpeedoMeterApp.main_1.apk', repo)
|
||||
shutil.copy(basedir / 'repo/index-v2.json', repo)
|
||||
|
||||
# write out config for test use
|
||||
rclone_config = configparser.ConfigParser()
|
||||
rclone_config.add_section("test-minio-config")
|
||||
rclone_config.set("test-minio-config", "type", "s3")
|
||||
rclone_config.set("test-minio-config", "provider", "Minio")
|
||||
rclone_config.set("test-minio-config", "endpoint", "http://" + host_ip)
|
||||
rclone_config.set("test-minio-config", "acl", "public-read")
|
||||
rclone_config.set("test-minio-config", "env_auth", "true")
|
||||
rclone_config.set("test-minio-config", "region", "us-east-1")
|
||||
rclone_config.set("test-minio-config", "access_key_id", "minioadmin")
|
||||
rclone_config.set("test-minio-config", "secret_access_key", "minioadmin")
|
||||
|
||||
rclone_config_path = Path('rclone_config_path')
|
||||
rclone_config_path.mkdir(parents=True, exist_ok=True)
|
||||
rclone_file = rclone_config_path / 'rclone-minio.conf'
|
||||
with open(rclone_file, "w", encoding="utf-8") as configfile:
|
||||
rclone_config.write(configfile)
|
||||
|
||||
# set up config for run
|
||||
awsbucket = "test-bucket"
|
||||
deploy.config['awsbucket'] = awsbucket
|
||||
deploy.config['rclone_config'] = "test-minio-config"
|
||||
deploy.config['path_to_custom_rclone_config'] = str(rclone_file)
|
||||
common.options = VerboseFalseOptions
|
||||
|
||||
# call function
|
||||
deploy.update_remote_storage_with_rclone(
|
||||
repo_section, awsbucket, is_index_only=True
|
||||
)
|
||||
|
||||
# check if apk and index file are available
|
||||
bucket_content = client.list_objects('test-bucket', recursive=True)
|
||||
files_in_bucket = {obj.object_name for obj in bucket_content}
|
||||
self.assertEqual(files_in_bucket, {'fdroid/repo/index-v2.json'})
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue