mirror of
https://github.com/f-droid/fdroidserver.git
synced 2025-11-04 22:40:29 +03:00
Merge branch 'update-stages-in-status-json' into 'master'
update: add execution stages to status JSON See merge request fdroid/fdroidserver!1627
This commit is contained in:
commit
8d17b67642
2 changed files with 34 additions and 2 deletions
|
|
@ -1216,6 +1216,19 @@ class Encoder(json.JSONEncoder):
|
||||||
return super().default(obj)
|
return super().default(obj)
|
||||||
|
|
||||||
|
|
||||||
|
def epoch_millis_now():
|
||||||
|
"""Get the current time in epoch milliseconds.
|
||||||
|
|
||||||
|
This is the format returned by Java's System.currentTimeMillis().
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
millis
|
||||||
|
Java-style integer time since UNIX epoch in milliseconds
|
||||||
|
"""
|
||||||
|
return int(datetime.now(timezone.utc).timestamp() * 1000)
|
||||||
|
|
||||||
|
|
||||||
def setup_status_output(start_timestamp):
|
def setup_status_output(start_timestamp):
|
||||||
"""Create the common output dictionary for public status updates."""
|
"""Create the common output dictionary for public status updates."""
|
||||||
output = {
|
output = {
|
||||||
|
|
@ -1258,7 +1271,7 @@ def write_status_json(output, pretty=False, name=None):
|
||||||
if not os.path.exists(status_dir):
|
if not os.path.exists(status_dir):
|
||||||
os.makedirs(status_dir)
|
os.makedirs(status_dir)
|
||||||
if not name:
|
if not name:
|
||||||
output['endTimestamp'] = int(datetime.now(timezone.utc).timestamp() * 1000)
|
output['endTimestamp'] = epoch_millis_now()
|
||||||
names = ['running', sys.argv[0].split()[1]] # fdroid subcommand
|
names = ['running', sys.argv[0].split()[1]] # fdroid subcommand
|
||||||
else:
|
else:
|
||||||
names = [name]
|
names = [name]
|
||||||
|
|
|
||||||
|
|
@ -202,6 +202,13 @@ def status_update_json(apps, apks):
|
||||||
common.write_status_json(output, options.pretty)
|
common.write_status_json(output, options.pretty)
|
||||||
|
|
||||||
|
|
||||||
|
def output_status_stage(output, stage):
|
||||||
|
if 'stages' not in output:
|
||||||
|
output['stages'] = dict()
|
||||||
|
output['stages'][stage] = common.epoch_millis_now()
|
||||||
|
common.write_running_status_json(output)
|
||||||
|
|
||||||
|
|
||||||
def delete_disabled_builds(apps, apkcache, repodirs):
|
def delete_disabled_builds(apps, apkcache, repodirs):
|
||||||
"""Delete disabled build outputs.
|
"""Delete disabled build outputs.
|
||||||
|
|
||||||
|
|
@ -2588,7 +2595,7 @@ def main():
|
||||||
metadata.warnings_action = options.W
|
metadata.warnings_action = options.W
|
||||||
|
|
||||||
config = common.read_config()
|
config = common.read_config()
|
||||||
common.setup_status_output(start_timestamp)
|
status_output = common.setup_status_output(start_timestamp)
|
||||||
|
|
||||||
if not (('jarsigner' in config or 'apksigner' in config)
|
if not (('jarsigner' in config or 'apksigner' in config)
|
||||||
and 'keytool' in config):
|
and 'keytool' in config):
|
||||||
|
|
@ -2651,12 +2658,15 @@ def main():
|
||||||
cache_timestamp = get_cache_mtime()
|
cache_timestamp = get_cache_mtime()
|
||||||
|
|
||||||
# Delete builds for disabled apps
|
# Delete builds for disabled apps
|
||||||
|
output_status_stage(status_output, 'delete_disabled_builds')
|
||||||
delete_disabled_builds(apps, apkcache, repodirs)
|
delete_disabled_builds(apps, apkcache, repodirs)
|
||||||
|
|
||||||
# Scan all apks in the main repo
|
# Scan all apks in the main repo
|
||||||
|
output_status_stage(status_output, 'process_apks')
|
||||||
apks, cachechanged = process_apks(apkcache, repodirs[0], knownapks,
|
apks, cachechanged = process_apks(apkcache, repodirs[0], knownapks,
|
||||||
options.use_date_from_apk, apps, cache_timestamp)
|
options.use_date_from_apk, apps, cache_timestamp)
|
||||||
|
|
||||||
|
output_status_stage(status_output, 'scan_repo_files')
|
||||||
files, fcachechanged = scan_repo_files(apkcache, repodirs[0], knownapks,
|
files, fcachechanged = scan_repo_files(apkcache, repodirs[0], knownapks,
|
||||||
options.use_date_from_apk)
|
options.use_date_from_apk)
|
||||||
cachechanged = cachechanged or fcachechanged
|
cachechanged = cachechanged or fcachechanged
|
||||||
|
|
@ -2666,6 +2676,7 @@ def main():
|
||||||
cachechanged = cachechanged or icachechanged
|
cachechanged = cachechanged or icachechanged
|
||||||
apks += ipas
|
apks += ipas
|
||||||
|
|
||||||
|
output_status_stage(status_output, 'remove_apks')
|
||||||
appid_has_apks = set()
|
appid_has_apks = set()
|
||||||
appid_has_repo_files = set()
|
appid_has_repo_files = set()
|
||||||
sha256_has_files = collections.defaultdict(list)
|
sha256_has_files = collections.defaultdict(list)
|
||||||
|
|
@ -2748,18 +2759,25 @@ def main():
|
||||||
if cachechanged:
|
if cachechanged:
|
||||||
write_cache(apkcache)
|
write_cache(apkcache)
|
||||||
|
|
||||||
|
output_status_stage(status_output, 'read_added_date_from_all_apks')
|
||||||
# The added date currently comes from the oldest apk which might be in the archive.
|
# The added date currently comes from the oldest apk which might be in the archive.
|
||||||
# So we need this populated at app level before continuing with only processing /repo
|
# So we need this populated at app level before continuing with only processing /repo
|
||||||
# or /archive
|
# or /archive
|
||||||
read_added_date_from_all_apks(apps, apks + archapks)
|
read_added_date_from_all_apks(apps, apks + archapks)
|
||||||
|
|
||||||
if len(repodirs) > 1:
|
if len(repodirs) > 1:
|
||||||
|
output_status_stage(status_output, 'archive_old_apks archive')
|
||||||
archive_old_apks(apps, apks, archapks, repodirs[0], repodirs[1], config['archive_older'])
|
archive_old_apks(apps, apks, archapks, repodirs[0], repodirs[1], config['archive_older'])
|
||||||
|
output_status_stage(status_output, 'prepare_apps archive')
|
||||||
archived_apps = prepare_apps(apps, archapks, repodirs[1])
|
archived_apps = prepare_apps(apps, archapks, repodirs[1])
|
||||||
|
output_status_stage(status_output, 'index.make archive')
|
||||||
fdroidserver.index.make(archived_apps, archapks, repodirs[1], True)
|
fdroidserver.index.make(archived_apps, archapks, repodirs[1], True)
|
||||||
|
|
||||||
|
output_status_stage(status_output, 'prepare_apps repo')
|
||||||
repoapps = prepare_apps(apps, apks, repodirs[0])
|
repoapps = prepare_apps(apps, apks, repodirs[0])
|
||||||
|
|
||||||
|
output_status_stage(status_output, 'index.make repo')
|
||||||
|
|
||||||
# APKs are placed into multiple repos based on the app package, providing
|
# APKs are placed into multiple repos based on the app package, providing
|
||||||
# per-app subscription feeds for nightly builds and things like it
|
# per-app subscription feeds for nightly builds and things like it
|
||||||
if config['per_app_repos']:
|
if config['per_app_repos']:
|
||||||
|
|
@ -2780,6 +2798,7 @@ def main():
|
||||||
git_remote = config.get('binary_transparency_remote')
|
git_remote = config.get('binary_transparency_remote')
|
||||||
if git_remote or os.path.isdir(os.path.join('binary_transparency', '.git')):
|
if git_remote or os.path.isdir(os.path.join('binary_transparency', '.git')):
|
||||||
from . import btlog
|
from . import btlog
|
||||||
|
output_status_stage(status_output, 'make_binary_transparency_log')
|
||||||
btlog.make_binary_transparency_log(repodirs)
|
btlog.make_binary_transparency_log(repodirs)
|
||||||
|
|
||||||
status_update_json(apps, apks + archapks)
|
status_update_json(apps, apks + archapks)
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue