Merge branch 'fix_760' into 'master'

update.py (and index.py) refactoring

Closes #524 and #760

See merge request fdroid/fdroidserver!756
This commit is contained in:
Marcus 2020-06-24 22:37:51 +00:00
commit 1902bfb6fc
9 changed files with 1360 additions and 78 deletions

View file

@ -21,7 +21,6 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import collections
import copy
import json
import logging
import os
@ -41,31 +40,32 @@ from . import metadata
from . import net
from . import signindex
from fdroidserver.common import FDroidPopen, FDroidPopenBytes, load_stats_fdroid_signing_key_fingerprints
from fdroidserver.exception import FDroidException, VerificationException, MetaDataException
from fdroidserver.exception import FDroidException, VerificationException
def make(apps, sortedids, apks, repodir, archive):
def make(apps, apks, repodir, archive):
"""Generate the repo index files.
This requires properly initialized options and config objects.
:param apps: fully populated apps list
:param sortedids: app package IDs, sorted
:param apks: full populated apks list
:param apps: OrderedDict of apps to go into the index, each app should have
at least one associated apk
:param apks: list of apks to go into the index
:param repodir: the repo directory
:param archive: True if this is the archive repo, False if it's the
main one.
"""
from fdroidserver.update import METADATA_VERSION
def _resolve_description_link(appid):
if appid in apps:
return "fdroid.app:" + appid, apps[appid].Name
raise MetaDataException("Cannot resolve app id " + appid)
if not common.options.nosign:
common.assert_config_keystore(common.config)
# Historically the index has been sorted by App Name, so we enforce this ordering here
sortedids = sorted(apps, key=lambda appid: apps[appid].Name.upper())
sortedapps = collections.OrderedDict()
for appid in sortedids:
sortedapps[appid] = apps[appid]
repodict = collections.OrderedDict()
repodict['timestamp'] = datetime.utcnow().replace(tzinfo=timezone.utc)
repodict['version'] = METADATA_VERSION
@ -106,21 +106,6 @@ def make(apps, sortedids, apks, repodir, archive):
if mirrors:
repodict['mirrors'] = mirrors
appsWithPackages = collections.OrderedDict()
for packageName in sortedids:
app = apps[packageName]
if app['Disabled']:
continue
# only include apps with packages
for apk in apks:
if apk['packageName'] == packageName:
newapp = copy.copy(app) # update wiki needs unmodified description
newapp['Description'] = metadata.description_html(app['Description'],
_resolve_description_link)
appsWithPackages[packageName] = newapp
break
requestsdict = collections.OrderedDict()
for command in ('install', 'uninstall'):
packageNames = []
@ -136,9 +121,9 @@ def make(apps, sortedids, apks, repodir, archive):
fdroid_signing_key_fingerprints = load_stats_fdroid_signing_key_fingerprints()
make_v0(appsWithPackages, apks, repodir, repodict, requestsdict,
make_v0(sortedapps, apks, repodir, repodict, requestsdict,
fdroid_signing_key_fingerprints)
make_v1(appsWithPackages, apks, repodir, repodict, requestsdict,
make_v1(sortedapps, apks, repodir, repodict, requestsdict,
fdroid_signing_key_fingerprints)
@ -311,11 +296,6 @@ def make_v0(apps, apks, repodir, repodict, requestsdict, fdroid_signing_key_fing
value = str(apk[key])
addElement(name, value, doc, parent)
def addElementCDATA(name, value, doc, parent):
el = doc.createElement(name)
el.appendChild(doc.createCDATASection(value))
parent.appendChild(el)
def addElementCheckLocalized(name, app, key, doc, parent, default=''):
"""Fill in field from metadata or localized block

View file

@ -616,7 +616,7 @@ class DescriptionFormatter:
warn_or_exception(_("Unterminated ]]"))
url = txt[2:index]
if self.linkResolver:
url, urltext = self.linkResolver(url)
url, urltext = self.linkResolver.resolve_description_link(url)
else:
urltext = url
res_html += '<a href="' + url + '">' + html.escape(urltext, quote=False) + '</a>'
@ -899,14 +899,9 @@ def read_metadata(xref=True, check_vcs=[], refresh=True, sort_by_time=False):
if xref:
# Parse all descriptions at load time, just to ensure cross-referencing
# errors are caught early rather than when they hit the build server.
def linkres(appid):
if appid in apps:
return ("fdroid.app:" + appid, "Dummy name - don't know yet")
warn_or_exception(_("Cannot resolve app id {appid}").format(appid=appid))
for appid, app in apps.items():
try:
description_html(app.Description, linkres)
description_html(app.Description, DummyDescriptionResolver(apps))
except MetaDataException as e:
warn_or_exception(_("Problem with description of {appid}: {error}")
.format(appid=appid, error=str(e)))
@ -1679,3 +1674,21 @@ def add_metadata_arguments(parser):
'''add common command line flags related to metadata processing'''
parser.add_argument("-W", choices=['error', 'warn', 'ignore'], default='error',
help=_("force metadata errors (default) to be warnings, or to be ignored."))
class DescriptionResolver:
def __init__(self, apps):
self.apps = apps
def resolve_description_link(self, appid):
if appid in self.apps:
if self.apps[appid].Name:
return "fdroid.app:" + appid, self.apps[appid].Name
raise MetaDataException("Cannot resolve app id " + appid)
class DummyDescriptionResolver(DescriptionResolver):
def resolve_description_link(self, appid):
if appid in self.apps:
return "fdroid.app:" + appid, "Dummy name - don't know yet"
warn_or_exception(_("Cannot resolve app id {appid}").format(appid=appid))

View file

@ -126,7 +126,7 @@ def disabled_algorithms_allowed():
return options.allow_disabled_algorithms or config['allow_disabled_algorithms']
def status_update_json(apps, sortedids, apks):
def status_update_json(apps, apks):
"""Output a JSON file with metadata about this `fdroid update` run
:param apps: fully populated list of all applications
@ -141,7 +141,7 @@ def status_update_json(apps, sortedids, apks):
output['failedBuilds'] = dict()
output['noPackages'] = []
for appid in sortedids:
for appid in apps:
app = apps[appid]
for af in app.get('AntiFeatures', []):
antiFeatures = output['antiFeatures'] # JSON camelCase
@ -177,7 +177,7 @@ def status_update_json(apps, sortedids, apks):
common.write_status_json(output, options.pretty)
def update_wiki(apps, sortedids, apks):
def update_wiki(apps, apks):
"""Update the wiki
:param apps: fully populated list of all applications
@ -193,7 +193,7 @@ def update_wiki(apps, sortedids, apks):
generated_pages = {}
generated_redirects = {}
for appid in sortedids:
for appid in apps:
app = metadata.App(apps[appid])
wikidata = ''
@ -1113,7 +1113,7 @@ def insert_localized_app_metadata(apps):
...as well as the /metadata/<packageName>/<locale> directory.
If it finds them, they will be added to the dict of all packages, with the
versions in the /metadata/ folder taking precendence over the what
versions in the /metadata/ folder taking precedence over the what
is in the app's source repo.
The <locale> is the locale of the files supplied in that directory, using
@ -2009,19 +2009,19 @@ def make_categories_txt(repodir, categories):
def archive_old_apks(apps, apks, archapks, repodir, archivedir, defaultkeepversions):
def filter_apk_list_sorted(apk_list):
res = []
apkList = []
currentVersionApk = None
for apk in apk_list:
if apk['packageName'] == appid:
if apk['versionCode'] == common.version_code_string_to_int(app.CurrentVersionCode):
currentVersionApk = apk
continue
res.append(apk)
if app.CurrentVersionCode is not None:
if apk['versionCode'] == common.version_code_string_to_int(app.CurrentVersionCode):
currentVersionApk = apk
continue
apkList.append(apk)
# Sort the apk list by version code. First is highest/newest.
sorted_list = sorted(res, key=lambda apk: apk['versionCode'], reverse=True)
sorted_list = sorted(apkList, key=lambda apk: apk['versionCode'], reverse=True)
if currentVersionApk:
# Insert apk which corresponds to currentVersion at the front
sorted_list.insert(0, currentVersionApk)
@ -2166,6 +2166,75 @@ def create_metadata_from_template(apk):
logging.info(_("Generated skeleton metadata for {appid}").format(appid=apk['packageName']))
def read_names_from_apks(apps, apks):
"""This is a stripped down copy of apply_info_from_latest_apk that only parses app names"""
for appid, app in apps.items():
bestver = UNSET_VERSION_CODE
for apk in apks:
if apk['packageName'] == appid:
if apk['versionCode'] > bestver:
bestver = apk['versionCode']
bestapk = apk
if bestver == UNSET_VERSION_CODE:
if app.Name is None:
app.Name = app.AutoName or appid
app.icon = None
else:
if app.Name is None:
app.Name = bestapk['name']
def render_app_descriptions(apps, all_apps):
"""
Renders the app html description.
For resolving inter-app links it needs the full list of apps, even if they end up in
separate repos (i.e. archive or per app repos).
"""
for app in apps.values():
app['Description'] = metadata.description_html(app['Description'], metadata.DescriptionResolver(all_apps))
def get_apps_with_packages(apps, apks):
"""Returns a deepcopy of that subset apps that actually has any associated packages. Skips disabled apps."""
appsWithPackages = collections.OrderedDict()
for packageName in apps:
app = apps[packageName]
if app['Disabled']:
continue
# only include apps with packages
for apk in apks:
if apk['packageName'] == packageName:
newapp = copy.copy(app)
appsWithPackages[packageName] = newapp
break
return appsWithPackages
def prepare_apps(apps, apks, repodir):
"""Encapsulates all necessary preparation steps before we can build an index out of apps and apks.
:param apps: All apps as read from metadata
:param apks: list of apks that belong into repo, this gets modified in place
:param repodir: the target repository directory, metadata files will be copied here
:return: the relevant subset of apps (as a deepcopy)
"""
apps_with_packages = get_apps_with_packages(apps, apks)
apply_info_from_latest_apk(apps_with_packages, apks)
render_app_descriptions(apps_with_packages, apps)
insert_funding_yml_donation_links(apps)
# This is only currently done for /repo because doing it for the archive
# will take a lot of time and bloat the archive mirrors and index
if repodir == 'repo':
copy_triple_t_store_metadata(apps_with_packages)
insert_obbs(repodir, apps_with_packages, apks)
translate_per_build_anti_features(apps_with_packages, apks)
if repodir == 'repo':
insert_localized_app_metadata(apps_with_packages)
return apps_with_packages
config = None
options = None
start_timestamp = time.gmtime()
@ -2302,12 +2371,6 @@ def main():
else:
logging.warning(msg + '\n\t' + _('Use `fdroid update -c` to create it.'))
insert_funding_yml_donation_links(apps)
copy_triple_t_store_metadata(apps)
insert_obbs(repodirs[0], apps, apks)
insert_localized_app_metadata(apps)
translate_per_build_anti_features(apps, apks)
# Scan the archive repo for apks as well
if len(repodirs) > 1:
archapks, cc = process_apks(apkcache, repodirs[1], knownapks, options.use_date_from_apk)
@ -2316,13 +2379,18 @@ def main():
else:
archapks = []
# Apply information from latest apks to the application and update dates
apply_info_from_latest_apk(apps, apks + archapks)
# We need app.Name populated for all apps regardless of which repo they end up in
# for the old-style inter-app links, so let's do it before we do anything else.
# This will be done again (as part of apply_info_from_latest_apk) for repo and archive
# separately later on, but it's fairly cheap anyway.
read_names_from_apks(apps, apks + archapks)
# Sort the app list by name, then the web site doesn't have to by default.
# (we had to wait until we'd scanned the apks to do this, because mostly the
# name comes from there!)
sortedids = sorted(apps.keys(), key=lambda appid: apps[appid].Name.upper())
if len(repodirs) > 1:
archive_old_apks(apps, apks, archapks, repodirs[0], repodirs[1], config['archive_older'])
archived_apps = prepare_apps(apps, archapks, repodirs[1])
index.make(archived_apps, archapks, repodirs[1], True)
repoapps = prepare_apps(apps, apks, repodirs[0])
# APKs are placed into multiple repos based on the app package, providing
# per-app subscription feeds for nightly builds and things like it
@ -2333,25 +2401,15 @@ def main():
appdict = dict()
appdict[appid] = app
if os.path.isdir(repodir):
index.make(appdict, [appid], apks, repodir, False)
index.make(appdict, apks, repodir, False)
else:
logging.info(_('Skipping index generation for {appid}').format(appid=appid))
return
if len(repodirs) > 1:
archive_old_apks(apps, apks, archapks, repodirs[0], repodirs[1], config['archive_older'])
# Make the index for the main repo...
index.make(apps, sortedids, apks, repodirs[0], False)
index.make(repoapps, apks, repodirs[0], False)
make_categories_txt(repodirs[0], categories)
# If there's an archive repo, make the index for it. We already scanned it
# earlier on.
if len(repodirs) > 1:
archived_apps = copy.deepcopy(apps)
apply_info_from_latest_apk(archived_apps, archapks)
index.make(archived_apps, sortedids, archapks, repodirs[1], True)
git_remote = config.get('binary_transparency_remote')
if git_remote or os.path.isdir(os.path.join('binary_transparency', '.git')):
from . import btlog
@ -2381,8 +2439,8 @@ def main():
# Update the wiki...
if options.wiki:
update_wiki(apps, sortedids, apks + archapks)
status_update_json(apps, sortedids, apks + archapks)
update_wiki(apps, apks + archapks)
status_update_json(apps, apks + archapks)
logging.info(_("Finished"))