mirror of
https://github.com/f-droid/fdroidserver.git
synced 2025-09-13 22:42:29 +03:00
Merge branch 'net-install-tests' into 'master'
tests and code format from %"mirroring resilience" work See merge request fdroid/fdroidserver!1451
This commit is contained in:
commit
2f556a9026
7 changed files with 167 additions and 55 deletions
|
@ -486,14 +486,7 @@ def read_config(opts=None):
|
||||||
fill_config_defaults(config)
|
fill_config_defaults(config)
|
||||||
|
|
||||||
if 'serverwebroot' in config:
|
if 'serverwebroot' in config:
|
||||||
if isinstance(config['serverwebroot'], str):
|
roots = parse_mirrors_config(config['serverwebroot'])
|
||||||
roots = [{'url': config['serverwebroot']}]
|
|
||||||
elif all(isinstance(item, str) for item in config['serverwebroot']):
|
|
||||||
roots = [{'url': i} for i in config['serverwebroot']]
|
|
||||||
elif all(isinstance(item, dict) for item in config['serverwebroot']):
|
|
||||||
roots = config['serverwebroot']
|
|
||||||
else:
|
|
||||||
raise TypeError(_('only accepts strings, lists, and tuples'))
|
|
||||||
rootlist = []
|
rootlist = []
|
||||||
for d in roots:
|
for d in roots:
|
||||||
# since this is used with rsync, where trailing slashes have
|
# since this is used with rsync, where trailing slashes have
|
||||||
|
@ -506,15 +499,7 @@ def read_config(opts=None):
|
||||||
config['serverwebroot'] = rootlist
|
config['serverwebroot'] = rootlist
|
||||||
|
|
||||||
if 'servergitmirrors' in config:
|
if 'servergitmirrors' in config:
|
||||||
if isinstance(config['servergitmirrors'], str):
|
config['servergitmirrors'] = parse_mirrors_config(config['servergitmirrors'])
|
||||||
roots = [{"url": config['servergitmirrors']}]
|
|
||||||
elif all(isinstance(item, str) for item in config['servergitmirrors']):
|
|
||||||
roots = [{'url': i} for i in config['servergitmirrors']]
|
|
||||||
elif all(isinstance(item, dict) for item in config['servergitmirrors']):
|
|
||||||
roots = config['servergitmirrors']
|
|
||||||
else:
|
|
||||||
raise TypeError(_('only accepts strings, lists, and tuples'))
|
|
||||||
config['servergitmirrors'] = roots
|
|
||||||
|
|
||||||
limit = config['git_mirror_size_limit']
|
limit = config['git_mirror_size_limit']
|
||||||
config['git_mirror_size_limit'] = parse_human_readable_size(limit)
|
config['git_mirror_size_limit'] = parse_human_readable_size(limit)
|
||||||
|
@ -557,6 +542,18 @@ def read_config(opts=None):
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
||||||
|
def parse_mirrors_config(mirrors):
|
||||||
|
"""Mirrors can be specified as a string, list of strings, or dictionary map."""
|
||||||
|
if isinstance(mirrors, str):
|
||||||
|
return [{"url": mirrors}]
|
||||||
|
elif all(isinstance(item, str) for item in mirrors):
|
||||||
|
return [{'url': i} for i in mirrors]
|
||||||
|
elif all(isinstance(item, dict) for item in mirrors):
|
||||||
|
return mirrors
|
||||||
|
else:
|
||||||
|
raise TypeError(_('only accepts strings, lists, and tuples'))
|
||||||
|
|
||||||
|
|
||||||
def file_entry(filename, hash_value=None):
|
def file_entry(filename, hash_value=None):
|
||||||
meta = {}
|
meta = {}
|
||||||
meta["name"] = "/" + filename.split("/", 1)[1]
|
meta["name"] = "/" + filename.split("/", 1)[1]
|
||||||
|
|
|
@ -47,17 +47,31 @@ def main():
|
||||||
global options, config
|
global options, config
|
||||||
|
|
||||||
# Parse command line...
|
# Parse command line...
|
||||||
parser = ArgumentParser(usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
|
parser = ArgumentParser(
|
||||||
|
usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]"
|
||||||
|
)
|
||||||
common.setup_global_opts(parser)
|
common.setup_global_opts(parser)
|
||||||
parser.add_argument("appid", nargs='*', help=_("application ID with optional versionCode in the form APPID[:VERCODE]"))
|
parser.add_argument(
|
||||||
parser.add_argument("-a", "--all", action="store_true", default=False,
|
"appid",
|
||||||
help=_("Install all signed applications available"))
|
nargs='*',
|
||||||
|
help=_("application ID with optional versionCode in the form APPID[:VERCODE]"),
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-a",
|
||||||
|
"--all",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help=_("Install all signed applications available"),
|
||||||
|
)
|
||||||
options = parser.parse_args()
|
options = parser.parse_args()
|
||||||
|
|
||||||
common.set_console_logging(options.verbose)
|
common.set_console_logging(options.verbose)
|
||||||
|
|
||||||
if not options.appid and not options.all:
|
if not options.appid and not options.all:
|
||||||
parser.error(_("option %s: If you really want to install all the signed apps, use --all") % "all")
|
parser.error(
|
||||||
|
_("option %s: If you really want to install all the signed apps, use --all")
|
||||||
|
% "all"
|
||||||
|
)
|
||||||
|
|
||||||
config = common.read_config(options)
|
config = common.read_config(options)
|
||||||
|
|
||||||
|
@ -67,14 +81,12 @@ def main():
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
if options.appid:
|
if options.appid:
|
||||||
|
|
||||||
vercodes = common.read_pkg_args(options.appid, True)
|
vercodes = common.read_pkg_args(options.appid, True)
|
||||||
common.get_metadata_files(vercodes) # only check appids
|
common.get_metadata_files(vercodes) # only check appids
|
||||||
apks = {appid: None for appid in vercodes}
|
apks = {appid: None for appid in vercodes}
|
||||||
|
|
||||||
# Get the signed APK with the highest vercode
|
# Get the signed APK with the highest vercode
|
||||||
for apkfile in sorted(glob.glob(os.path.join(output_dir, '*.apk'))):
|
for apkfile in sorted(glob.glob(os.path.join(output_dir, '*.apk'))):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
appid, vercode = common.publishednameinfo(apkfile)
|
appid, vercode = common.publishednameinfo(apkfile)
|
||||||
except FDroidException:
|
except FDroidException:
|
||||||
|
@ -90,9 +102,10 @@ def main():
|
||||||
raise FDroidException(_("No signed APK available for %s") % appid)
|
raise FDroidException(_("No signed APK available for %s") % appid)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
apks = {
|
||||||
apks = {common.publishednameinfo(apkfile)[0]: apkfile for apkfile in
|
common.publishednameinfo(apkfile)[0]: apkfile
|
||||||
sorted(glob.glob(os.path.join(output_dir, '*.apk')))}
|
for apkfile in sorted(glob.glob(os.path.join(output_dir, '*.apk')))
|
||||||
|
}
|
||||||
|
|
||||||
for appid, apk in apks.items():
|
for appid, apk in apks.items():
|
||||||
# Get device list each time to avoid device not found errors
|
# Get device list each time to avoid device not found errors
|
||||||
|
@ -101,7 +114,11 @@ def main():
|
||||||
raise FDroidException(_("No attached devices found"))
|
raise FDroidException(_("No attached devices found"))
|
||||||
logging.info(_("Installing %s...") % apk)
|
logging.info(_("Installing %s...") % apk)
|
||||||
for dev in devs:
|
for dev in devs:
|
||||||
logging.info(_("Installing '{apkfilename}' on {dev}...").format(apkfilename=apk, dev=dev))
|
logging.info(
|
||||||
|
_("Installing '{apkfilename}' on {dev}...").format(
|
||||||
|
apkfilename=apk, dev=dev
|
||||||
|
)
|
||||||
|
)
|
||||||
p = SdkToolsPopen(['adb', "-s", dev, "install", apk])
|
p = SdkToolsPopen(['adb', "-s", dev, "install", apk])
|
||||||
fail = ""
|
fail = ""
|
||||||
for line in p.output.splitlines():
|
for line in p.output.splitlines():
|
||||||
|
@ -111,11 +128,17 @@ def main():
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if fail == "INSTALL_FAILED_ALREADY_EXISTS":
|
if fail == "INSTALL_FAILED_ALREADY_EXISTS":
|
||||||
logging.warning(_('"{apkfilename}" is already installed on {dev}.')
|
logging.warning(
|
||||||
.format(apkfilename=apk, dev=dev))
|
_('"{apkfilename}" is already installed on {dev}.').format(
|
||||||
|
apkfilename=apk, dev=dev
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
raise FDroidException(_("Failed to install '{apkfilename}' on {dev}: {error}")
|
raise FDroidException(
|
||||||
.format(apkfilename=apk, dev=dev, error=fail))
|
_("Failed to install '{apkfilename}' on {dev}: {error}").format(
|
||||||
|
apkfilename=apk, dev=dev, error=fail
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
logging.info('\n' + _('Finished'))
|
logging.info('\n' + _('Finished'))
|
||||||
|
|
||||||
|
|
|
@ -29,12 +29,20 @@ HEADERS = {'User-Agent': 'F-Droid'}
|
||||||
|
|
||||||
|
|
||||||
def download_file(url, local_filename=None, dldir='tmp', retries=3, backoff_factor=0.1):
|
def download_file(url, local_filename=None, dldir='tmp', retries=3, backoff_factor=0.1):
|
||||||
|
"""Try hard to download the file, including retrying on failures.
|
||||||
|
|
||||||
|
This has two retry cycles, one inside of the requests session, the
|
||||||
|
other provided by this function. The requests retry logic applies
|
||||||
|
to failed DNS lookups, socket connections and connection timeouts,
|
||||||
|
never to requests where data has made it to the server. This
|
||||||
|
handles ChunkedEncodingError during transfer in its own retry
|
||||||
|
loop. This can result in more retries than are specified in the
|
||||||
|
retries parameter.
|
||||||
|
|
||||||
|
"""
|
||||||
filename = urllib.parse.urlparse(url).path.split('/')[-1]
|
filename = urllib.parse.urlparse(url).path.split('/')[-1]
|
||||||
if local_filename is None:
|
if local_filename is None:
|
||||||
local_filename = os.path.join(dldir, filename)
|
local_filename = os.path.join(dldir, filename)
|
||||||
# Retry applies to failed DNS lookups, socket connections and connection
|
|
||||||
# timeouts, never to requests where data has made it to the server; so we
|
|
||||||
# handle ChunkedEncodingError during transfer ourselves.
|
|
||||||
for i in range(retries + 1):
|
for i in range(retries + 1):
|
||||||
if retries:
|
if retries:
|
||||||
max_retries = Retry(total=retries - i, backoff_factor=backoff_factor)
|
max_retries = Retry(total=retries - i, backoff_factor=backoff_factor)
|
||||||
|
|
|
@ -41,7 +41,6 @@ force-exclude = '''(
|
||||||
| fdroidserver/deploy\.py
|
| fdroidserver/deploy\.py
|
||||||
| fdroidserver/import_subcommand\.py
|
| fdroidserver/import_subcommand\.py
|
||||||
| fdroidserver/index\.py
|
| fdroidserver/index\.py
|
||||||
| fdroidserver/install\.py
|
|
||||||
| fdroidserver/metadata\.py
|
| fdroidserver/metadata\.py
|
||||||
| fdroidserver/nightly\.py
|
| fdroidserver/nightly\.py
|
||||||
| fdroidserver/publish\.py
|
| fdroidserver/publish\.py
|
||||||
|
@ -53,7 +52,6 @@ force-exclude = '''(
|
||||||
| tests/extra/manual-vmtools-test\.py
|
| tests/extra/manual-vmtools-test\.py
|
||||||
| tests/gradle-release-checksums\.py
|
| tests/gradle-release-checksums\.py
|
||||||
| tests/openssl-version-check-test\.py
|
| tests/openssl-version-check-test\.py
|
||||||
| tests/testcommon\.py
|
|
||||||
| tests/valid-package-names/test\.py
|
| tests/valid-package-names/test\.py
|
||||||
| tests/checkupdates\.TestCase
|
| tests/checkupdates\.TestCase
|
||||||
| tests/common\.TestCase
|
| tests/common\.TestCase
|
||||||
|
|
|
@ -12,6 +12,7 @@ import logging
|
||||||
import optparse
|
import optparse
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import ruamel.yaml
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
@ -2892,6 +2893,27 @@ class CommonTest(unittest.TestCase):
|
||||||
fdroidserver.common.read_config()['serverwebroot'],
|
fdroidserver.common.read_config()['serverwebroot'],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_parse_mirrors_config_str(self):
|
||||||
|
s = 'foo@example.com:/var/www'
|
||||||
|
mirrors = ruamel.yaml.YAML(typ='safe').load("""'%s'""" % s)
|
||||||
|
self.assertEqual(
|
||||||
|
[{'url': s}], fdroidserver.common.parse_mirrors_config(mirrors)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_parse_mirrors_config_list(self):
|
||||||
|
s = 'foo@example.com:/var/www'
|
||||||
|
mirrors = ruamel.yaml.YAML(typ='safe').load("""- '%s'""" % s)
|
||||||
|
self.assertEqual(
|
||||||
|
[{'url': s}], fdroidserver.common.parse_mirrors_config(mirrors)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_parse_mirrors_config_dict(self):
|
||||||
|
s = 'foo@example.com:/var/www'
|
||||||
|
mirrors = ruamel.yaml.YAML(typ='safe').load("""- url: '%s'""" % s)
|
||||||
|
self.assertEqual(
|
||||||
|
[{'url': s}], fdroidserver.common.parse_mirrors_config(mirrors)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
os.chdir(os.path.dirname(__file__))
|
os.chdir(os.path.dirname(__file__))
|
||||||
|
|
|
@ -4,8 +4,13 @@ import inspect
|
||||||
import logging
|
import logging
|
||||||
import optparse
|
import optparse
|
||||||
import os
|
import os
|
||||||
|
import random
|
||||||
|
import requests
|
||||||
|
import socket
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
import unittest
|
import unittest
|
||||||
from unittest.mock import MagicMock, patch
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
|
@ -20,6 +25,57 @@ from fdroidserver import common, net
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
class RetryServer:
|
||||||
|
"""A stupid simple HTTP server that can fail to connect"""
|
||||||
|
|
||||||
|
def __init__(self, port=None, failures=3):
|
||||||
|
self.port = port
|
||||||
|
if self.port is None:
|
||||||
|
self.port = random.randint(1024, 65535)
|
||||||
|
self.failures = failures
|
||||||
|
self.stop_event = threading.Event()
|
||||||
|
threading.Thread(target=self.run_fake_server).start()
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self.stop_event.set()
|
||||||
|
|
||||||
|
def run_fake_server(self):
|
||||||
|
server_sock = socket.socket()
|
||||||
|
server_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
server_sock.bind(('127.0.0.1', self.port))
|
||||||
|
server_sock.listen(5)
|
||||||
|
server_sock.settimeout(5)
|
||||||
|
time.sleep(0.001) # wait for it to start
|
||||||
|
|
||||||
|
while not self.stop_event.is_set():
|
||||||
|
self.failures -= 1
|
||||||
|
conn = None
|
||||||
|
try:
|
||||||
|
conn, address = server_sock.accept()
|
||||||
|
conn.settimeout(5)
|
||||||
|
except TimeoutError:
|
||||||
|
break
|
||||||
|
if self.failures > 0:
|
||||||
|
conn.close()
|
||||||
|
continue
|
||||||
|
conn.recv(8192) # request ignored
|
||||||
|
self.reply = b"""HTTP/1.1 200 OK
|
||||||
|
Date: Mon, 26 Feb 2024 09:00:14 GMT
|
||||||
|
Connection: close
|
||||||
|
Content-Type: text/html
|
||||||
|
|
||||||
|
<HTML><BODY>Hello World!</HEAD></HTML>
|
||||||
|
"""
|
||||||
|
self.reply = self.reply.replace(b' ', b'') # dedent
|
||||||
|
conn.sendall(self.reply)
|
||||||
|
conn.shutdown(socket.SHUT_RDWR)
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
self.stop_event.wait(timeout=1)
|
||||||
|
server_sock.shutdown(socket.SHUT_RDWR)
|
||||||
|
server_sock.close()
|
||||||
|
|
||||||
|
|
||||||
class NetTest(unittest.TestCase):
|
class NetTest(unittest.TestCase):
|
||||||
basedir = Path(__file__).resolve().parent
|
basedir = Path(__file__).resolve().parent
|
||||||
|
|
||||||
|
@ -39,10 +95,10 @@ class NetTest(unittest.TestCase):
|
||||||
return MagicMock()
|
return MagicMock()
|
||||||
|
|
||||||
requests_get.side_effect = _get
|
requests_get.side_effect = _get
|
||||||
f = net.download_file('https://f-droid.org/repo/index-v1.jar', retries=0)
|
f = net.download_file('https://f-droid.org/repo/entry.jar', retries=0)
|
||||||
self.assertTrue(requests_get.called)
|
requests_get.assert_called()
|
||||||
self.assertTrue(os.path.exists(f))
|
self.assertTrue(os.path.exists(f))
|
||||||
self.assertEqual('tmp/index-v1.jar', f)
|
self.assertEqual('tmp/entry.jar', f)
|
||||||
|
|
||||||
f = net.download_file(
|
f = net.download_file(
|
||||||
'https://d-05.example.com/custom/com.downloader.aegis-3175421.apk?_fn=QVBLUHVyZV92My4xNy41NF9hcGtwdXJlLmNvbS5hcGs&_p=Y29tLmFwa3B1cmUuYWVnb24&am=6avvTpfJ1dMl9-K6JYKzQw&arg=downloader%3A%2F%2Fcampaign%2F%3Futm_medium%3Ddownloader%26utm_source%3Daegis&at=1652080635&k=1f6e58465df3a441665e585719ab0b13627a117f&r=https%3A%2F%2Fdownloader.com%2Fdownloader-app.html%3Ficn%3Daegis%26ici%3Dimage_qr&uu=http%3A%2F%2F172.16.82.1%2Fcustom%2Fcom.downloader.aegis-3175421.apk%3Fk%3D3fb9c4ae0be578206f6a1c330736fac1627a117f',
|
'https://d-05.example.com/custom/com.downloader.aegis-3175421.apk?_fn=QVBLUHVyZV92My4xNy41NF9hcGtwdXJlLmNvbS5hcGs&_p=Y29tLmFwa3B1cmUuYWVnb24&am=6avvTpfJ1dMl9-K6JYKzQw&arg=downloader%3A%2F%2Fcampaign%2F%3Futm_medium%3Ddownloader%26utm_source%3Daegis&at=1652080635&k=1f6e58465df3a441665e585719ab0b13627a117f&r=https%3A%2F%2Fdownloader.com%2Fdownloader-app.html%3Ficn%3Daegis%26ici%3Dimage_qr&uu=http%3A%2F%2F172.16.82.1%2Fcustom%2Fcom.downloader.aegis-3175421.apk%3Fk%3D3fb9c4ae0be578206f6a1c330736fac1627a117f',
|
||||||
|
@ -52,6 +108,20 @@ class NetTest(unittest.TestCase):
|
||||||
self.assertTrue(os.path.exists(f))
|
self.assertTrue(os.path.exists(f))
|
||||||
self.assertEqual('tmp/com.downloader.aegis-3175421.apk', f)
|
self.assertEqual('tmp/com.downloader.aegis-3175421.apk', f)
|
||||||
|
|
||||||
|
def test_download_file_retries(self):
|
||||||
|
server = RetryServer()
|
||||||
|
f = net.download_file('http://localhost:%d/f.txt' % server.port)
|
||||||
|
# strip the HTTP headers and compare the reply
|
||||||
|
self.assertEqual(server.reply.split(b'\n\n')[1], Path(f).read_bytes())
|
||||||
|
server.stop()
|
||||||
|
|
||||||
|
def test_download_file_retries_not_forever(self):
|
||||||
|
"""The retry logic should eventually exit with an error."""
|
||||||
|
server = RetryServer(failures=5)
|
||||||
|
with self.assertRaises(requests.exceptions.ConnectionError):
|
||||||
|
net.download_file('http://localhost:%d/f.txt' % server.port)
|
||||||
|
server.stop()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
os.chdir(os.path.dirname(__file__))
|
os.chdir(os.path.dirname(__file__))
|
||||||
|
|
|
@ -20,10 +20,8 @@ import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
|
|
||||||
class TmpCwd():
|
class TmpCwd:
|
||||||
"""Context-manager for temporarily changing the current working
|
"""Context-manager for temporarily changing the current working directory."""
|
||||||
directory.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, new_cwd):
|
def __init__(self, new_cwd):
|
||||||
self.new_cwd = new_cwd
|
self.new_cwd = new_cwd
|
||||||
|
@ -36,9 +34,8 @@ class TmpCwd():
|
||||||
os.chdir(self.orig_cwd)
|
os.chdir(self.orig_cwd)
|
||||||
|
|
||||||
|
|
||||||
class TmpPyPath():
|
class TmpPyPath:
|
||||||
"""Context-manager for temporarily adding a direcory to python path
|
"""Context-manager for temporarily adding a directory to Python path."""
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, additional_path):
|
def __init__(self, additional_path):
|
||||||
self.additional_path = additional_path
|
self.additional_path = additional_path
|
||||||
|
@ -51,14 +48,11 @@ class TmpPyPath():
|
||||||
|
|
||||||
|
|
||||||
def mock_open_to_str(mock):
|
def mock_open_to_str(mock):
|
||||||
"""
|
"""For accessing all data written into a unittest.mock.mock_open() instance as a string."""
|
||||||
helper function for accessing all data written into a
|
|
||||||
unittest.mock.mock_open() instance as a string.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return "".join([
|
return "".join(
|
||||||
x.args[0] for x in mock.mock_calls if str(x).startswith("call().write(")
|
[x.args[0] for x in mock.mock_calls if str(x).startswith("call().write(")]
|
||||||
])
|
)
|
||||||
|
|
||||||
|
|
||||||
def mkdtemp():
|
def mkdtemp():
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue