mirror of
https://github.com/f-droid/fdroidserver.git
synced 2025-11-06 07:20:29 +03:00
run black to reformat code that does not have WIP merge requests
This commit is contained in:
parent
2e6cad57aa
commit
307cf8958c
9 changed files with 519 additions and 207 deletions
|
|
@ -33,8 +33,15 @@ def _run_wget(path, urls):
|
|||
with open(urls_file, 'w') as fp:
|
||||
for url in urls:
|
||||
fp.write(url.split('?')[0] + '\n') # wget puts query string in the filename
|
||||
subprocess.call(['wget', verbose, '--continue', '--user-agent="fdroid mirror"',
|
||||
'--input-file=' + urls_file])
|
||||
subprocess.call(
|
||||
[
|
||||
'wget',
|
||||
verbose,
|
||||
'--continue',
|
||||
'--user-agent="fdroid mirror"',
|
||||
'--input-file=' + urls_file,
|
||||
]
|
||||
)
|
||||
os.remove(urls_file)
|
||||
|
||||
|
||||
|
|
@ -43,21 +50,47 @@ def main():
|
|||
|
||||
parser = ArgumentParser()
|
||||
common.setup_global_opts(parser)
|
||||
parser.add_argument("url", nargs='?',
|
||||
help=_('Base URL to mirror, can include the index signing key '
|
||||
+ 'using the query string: ?fingerprint='))
|
||||
parser.add_argument("--all", action='store_true', default=False,
|
||||
help=_("Mirror the full repo and archive, all file types."))
|
||||
parser.add_argument("--archive", action='store_true', default=False,
|
||||
help=_("Also mirror the full archive section"))
|
||||
parser.add_argument("--build-logs", action='store_true', default=False,
|
||||
help=_("Include the build logs in the mirror"))
|
||||
parser.add_argument("--pgp-signatures", action='store_true', default=False,
|
||||
help=_("Include the PGP signature .asc files in the mirror"))
|
||||
parser.add_argument("--src-tarballs", action='store_true', default=False,
|
||||
help=_("Include the source tarballs in the mirror"))
|
||||
parser.add_argument("--output-dir", default=None,
|
||||
help=_("The directory to write the mirror to"))
|
||||
parser.add_argument(
|
||||
"url",
|
||||
nargs='?',
|
||||
help=_(
|
||||
'Base URL to mirror, can include the index signing key '
|
||||
+ 'using the query string: ?fingerprint='
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--all",
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_("Mirror the full repo and archive, all file types."),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--archive",
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_("Also mirror the full archive section"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--build-logs",
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_("Include the build logs in the mirror"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--pgp-signatures",
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_("Include the PGP signature .asc files in the mirror"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--src-tarballs",
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=_("Include the source tarballs in the mirror"),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output-dir", default=None, help=_("The directory to write the mirror to")
|
||||
)
|
||||
options = parser.parse_args()
|
||||
|
||||
if options.all:
|
||||
|
|
@ -77,24 +110,31 @@ def main():
|
|||
def _append_to_url_path(*args):
|
||||
"""Append the list of path components to URL, keeping the rest the same."""
|
||||
newpath = posixpath.join(path, *args)
|
||||
return urllib.parse.urlunparse((scheme, hostname, newpath, params, query, fragment))
|
||||
return urllib.parse.urlunparse(
|
||||
(scheme, hostname, newpath, params, query, fragment)
|
||||
)
|
||||
|
||||
if fingerprint:
|
||||
config = common.read_config(options)
|
||||
if not ('jarsigner' in config or 'apksigner' in config):
|
||||
logging.error(_('Java JDK not found! Install in standard location or set java_paths!'))
|
||||
logging.error(
|
||||
_('Java JDK not found! Install in standard location or set java_paths!')
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
def _get_index(section, etag=None):
|
||||
url = _append_to_url_path(section)
|
||||
data, etag = index.download_repo_index(url, etag=etag)
|
||||
return data, etag, _append_to_url_path(section, 'index-v1.jar')
|
||||
|
||||
else:
|
||||
|
||||
def _get_index(section, etag=None):
|
||||
import io
|
||||
import json
|
||||
import zipfile
|
||||
from . import net
|
||||
|
||||
url = _append_to_url_path(section, 'index-v1.jar')
|
||||
content, etag = net.http_get(url)
|
||||
with zipfile.ZipFile(io.BytesIO(content)) as zip:
|
||||
|
|
@ -107,21 +147,30 @@ def main():
|
|||
ip = ipaddress.ip_address(hostname)
|
||||
except ValueError:
|
||||
pass
|
||||
if hostname == 'f-droid.org' \
|
||||
or (ip is not None and hostname in socket.gethostbyname_ex('f-droid.org')[2]):
|
||||
print(_('ERROR: this command should never be used to mirror f-droid.org!\n'
|
||||
'A full mirror of f-droid.org requires more than 200GB.'))
|
||||
if hostname == 'f-droid.org' or (
|
||||
ip is not None and hostname in socket.gethostbyname_ex('f-droid.org')[2]
|
||||
):
|
||||
print(
|
||||
_(
|
||||
'ERROR: this command should never be used to mirror f-droid.org!\n'
|
||||
'A full mirror of f-droid.org requires more than 200GB.'
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
path = path.rstrip('/')
|
||||
if path.endswith('repo') or path.endswith('archive'):
|
||||
logging.warning(_('Do not include "{path}" in URL!')
|
||||
.format(path=path.split('/')[-1]))
|
||||
logging.warning(
|
||||
_('Do not include "{path}" in URL!').format(path=path.split('/')[-1])
|
||||
)
|
||||
elif not path.endswith('fdroid'):
|
||||
logging.warning(_('{url} does not end with "fdroid", check the URL path!')
|
||||
.format(url=options.url))
|
||||
logging.warning(
|
||||
_('{url} does not end with "fdroid", check the URL path!').format(
|
||||
url=options.url
|
||||
)
|
||||
)
|
||||
|
||||
icondirs = ['icons', ]
|
||||
icondirs = ['icons']
|
||||
for density in update.screen_densities:
|
||||
icondirs.append('icons-' + density)
|
||||
|
||||
|
|
@ -134,7 +183,7 @@ def main():
|
|||
if options.archive:
|
||||
sections = ('repo', 'archive')
|
||||
else:
|
||||
sections = ('repo', )
|
||||
sections = ('repo',)
|
||||
|
||||
for section in sections:
|
||||
sectiondir = os.path.join(basedir, section)
|
||||
|
|
@ -152,23 +201,29 @@ def main():
|
|||
for packageName, packageList in data['packages'].items():
|
||||
for package in packageList:
|
||||
to_fetch = []
|
||||
keys = ['apkName', ]
|
||||
keys = ['apkName']
|
||||
if options.src_tarballs:
|
||||
keys.append('srcname')
|
||||
for k in keys:
|
||||
if k in package:
|
||||
to_fetch.append(package[k])
|
||||
elif k == 'apkName':
|
||||
logging.error(_('{appid} is missing {name}')
|
||||
.format(appid=package['packageName'], name=k))
|
||||
logging.error(
|
||||
_('{appid} is missing {name}').format(
|
||||
appid=package['packageName'], name=k
|
||||
)
|
||||
)
|
||||
for f in to_fetch:
|
||||
if not os.path.exists(f) \
|
||||
or (f.endswith('.apk') and os.path.getsize(f) != package['size']):
|
||||
if not os.path.exists(f) or (
|
||||
f.endswith('.apk') and os.path.getsize(f) != package['size']
|
||||
):
|
||||
urls.append(_append_to_url_path(section, f))
|
||||
if options.pgp_signatures:
|
||||
urls.append(_append_to_url_path(section, f + '.asc'))
|
||||
if options.build_logs and f.endswith('.apk'):
|
||||
urls.append(_append_to_url_path(section, f[:-4] + '.log.gz'))
|
||||
urls.append(
|
||||
_append_to_url_path(section, f[:-4] + '.log.gz')
|
||||
)
|
||||
|
||||
_run_wget(sectiondir, urls)
|
||||
|
||||
|
|
@ -181,7 +236,7 @@ def main():
|
|||
for k in update.GRAPHIC_NAMES:
|
||||
f = d.get(k)
|
||||
if f:
|
||||
filepath_tuple = components + (f, )
|
||||
filepath_tuple = components + (f,)
|
||||
urls.append(_append_to_url_path(*filepath_tuple))
|
||||
_run_wget(os.path.join(basedir, *components), urls)
|
||||
for k in update.SCREENSHOT_DIRS:
|
||||
|
|
@ -190,14 +245,16 @@ def main():
|
|||
if filelist:
|
||||
components = (section, app['packageName'], locale, k)
|
||||
for f in filelist:
|
||||
filepath_tuple = components + (f, )
|
||||
filepath_tuple = components + (f,)
|
||||
urls.append(_append_to_url_path(*filepath_tuple))
|
||||
_run_wget(os.path.join(basedir, *components), urls)
|
||||
|
||||
urls = dict()
|
||||
for app in data['apps']:
|
||||
if 'icon' not in app:
|
||||
logging.error(_('no "icon" in {appid}').format(appid=app['packageName']))
|
||||
logging.error(
|
||||
_('no "icon" in {appid}').format(appid=app['packageName'])
|
||||
)
|
||||
continue
|
||||
icon = app['icon']
|
||||
for icondir in icondirs:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue