index: download_repo_index_v2() uses mirrors

test_download_repo_index_v2_url_parsing is no longer needed, since all the
things it tested are now handled in test_download_repo_index_v2
This commit is contained in:
Hans-Christoph Steiner 2024-03-04 12:44:20 +01:00
parent 2e3f6d273a
commit 59fcfa5dec
6 changed files with 119 additions and 81 deletions

View file

@ -61,7 +61,7 @@ from base64 import urlsafe_b64encode
from binascii import hexlify
from datetime import datetime, timedelta, timezone
from queue import Queue
from urllib.parse import urlparse, urlunparse
from urllib.parse import urlparse, urlsplit, urlunparse
from zipfile import ZipFile
import fdroidserver.metadata
@ -619,6 +619,23 @@ def parse_mirrors_config(mirrors):
raise TypeError(_('only accepts strings, lists, and tuples'))
def get_mirrors(url, filename=None):
"""Get list of dict entries for mirrors, appending filename if provided."""
# TODO use cached index if it exists
if isinstance(url, str):
url = urlsplit(url)
if url.netloc == 'f-droid.org':
mirrors = FDROIDORG_MIRRORS
else:
mirrors = parse_mirrors_config(url.geturl())
if filename:
return append_filename_to_mirrors(filename, mirrors)
else:
return mirrors
def append_filename_to_mirrors(filename, mirrors):
"""Append the filename to all "url" entries in the mirrors dict."""
appended = copy.deepcopy(mirrors)

View file

@ -1633,7 +1633,7 @@ def download_repo_index_v1(url_str, etag=None, verify_fingerprint=True, timeout=
return index, new_etag
def download_repo_index_v2(url_str, etag=None, verify_fingerprint=True, timeout=600):
def download_repo_index_v2(url_str, etag=None, verify_fingerprint=True, timeout=None):
"""Download and verifies index v2 file, then returns its data.
Downloads the repository index from the given :param url_str and
@ -1652,8 +1652,13 @@ def download_repo_index_v2(url_str, etag=None, verify_fingerprint=True, timeout=
- The new eTag as returned by the HTTP request
"""
etag # etag is unused but needs to be there to keep the same API as the earlier functions.
url = urllib.parse.urlsplit(url_str)
if timeout is not None:
logging.warning('"timeout" argument of download_repo_index_v2() is deprecated!')
fingerprint = None
if verify_fingerprint:
query = urllib.parse.parse_qs(url.query)
@ -1665,29 +1670,22 @@ def download_repo_index_v2(url_str, etag=None, verify_fingerprint=True, timeout=
path = url.path.rsplit('/', 1)[0]
else:
path = url.path.rstrip('/')
url = urllib.parse.SplitResult(url.scheme, url.netloc, path, '', '')
url = urllib.parse.SplitResult(url.scheme, url.netloc, path + '/entry.jar', '', '')
download, new_etag = net.http_get(url.geturl(), etag, timeout)
mirrors = common.get_mirrors(url, 'entry.jar')
f = net.download_using_mirrors(mirrors)
entry, public_key, fingerprint = get_index_from_jar(f, fingerprint)
if download is None:
return None, new_etag
# jarsigner is used to verify the JAR, it requires a file for input
with tempfile.TemporaryDirectory() as dirname:
with (Path(dirname) / 'entry.jar').open('wb') as fp:
fp.write(download)
fp.flush()
entry, public_key, fingerprint = get_index_from_jar(fp.name, fingerprint)
name = entry['index']['name']
sha256 = entry['index']['sha256']
url = urllib.parse.SplitResult(url.scheme, url.netloc, path + name, '', '')
index, _ignored = net.http_get(url.geturl(), None, timeout)
mirrors = common.get_mirrors(url, entry['index']['name'][1:])
f = net.download_using_mirrors(mirrors)
with open(f, 'rb') as fp:
index = fp.read()
if sha256 != hashlib.sha256(index).hexdigest():
raise VerificationException(
_("SHA-256 of {url} does not match entry!").format(url=url)
)
return json.loads(index), new_etag
return json.loads(index), None
def get_index_from_jar(jarfile, fingerprint=None, allow_deprecated=False):