mirror of
https://github.com/f-droid/fdroidserver.git
synced 2025-11-06 07:20:29 +03:00
Merge branch 'write-status-json' into 'master'
write buildserver status as public JSON files See merge request fdroid/fdroidserver!716
This commit is contained in:
commit
28117f31d3
14 changed files with 316 additions and 61 deletions
|
|
@ -173,10 +173,14 @@ The repository of older versions of applications from the main demo repository.
|
||||||
# 'bar.info:/var/www/fdroid',
|
# 'bar.info:/var/www/fdroid',
|
||||||
# }
|
# }
|
||||||
|
|
||||||
# Uncomment this option if you want to logs of builds and other processes to
|
# When running fdroid processes on a remote server, it is possible to
|
||||||
# your repository server(s). Logs get published to all servers configured in
|
# publish extra information about the status. Each fdroid sub-command
|
||||||
# 'serverwebroot'. The name scheme is: .../repo/$APPID_$VERCODE.log.gz
|
# can create repo/status/running.json when it starts, then a
|
||||||
# Only logs from build-jobs running inside a buildserver VM are supported.
|
# repo/status/<sub-command>.json when it completes. The builds logs
|
||||||
|
# and other processes will also get published, if they are running in
|
||||||
|
# a buildserver VM. The build logs name scheme is:
|
||||||
|
# .../repo/$APPID_$VERCODE.log.gz. These files are also pushed to all
|
||||||
|
# servers configured in 'serverwebroot'.
|
||||||
#
|
#
|
||||||
# deploy_process_logs = True
|
# deploy_process_logs = True
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -83,6 +83,7 @@ def build_server(app, build, vcs, build_dir, output_dir, log_dir, force):
|
||||||
buildserverid = subprocess.check_output(['vagrant', 'ssh', '-c',
|
buildserverid = subprocess.check_output(['vagrant', 'ssh', '-c',
|
||||||
'cat /home/vagrant/buildserverid'],
|
'cat /home/vagrant/buildserverid'],
|
||||||
cwd='builder').strip().decode()
|
cwd='builder').strip().decode()
|
||||||
|
status_output['buildserverid'] = buildserverid
|
||||||
logging.debug(_('Fetched buildserverid from VM: {buildserverid}')
|
logging.debug(_('Fetched buildserverid from VM: {buildserverid}')
|
||||||
.format(buildserverid=buildserverid))
|
.format(buildserverid=buildserverid))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|
@ -912,6 +913,7 @@ config = None
|
||||||
buildserverid = None
|
buildserverid = None
|
||||||
fdroidserverid = None
|
fdroidserverid = None
|
||||||
start_timestamp = time.gmtime()
|
start_timestamp = time.gmtime()
|
||||||
|
status_output = None
|
||||||
timeout_event = threading.Event()
|
timeout_event = threading.Event()
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -978,6 +980,8 @@ def main():
|
||||||
else:
|
else:
|
||||||
also_check_dir = None
|
also_check_dir = None
|
||||||
|
|
||||||
|
status_output = common.setup_status_output(start_timestamp)
|
||||||
|
|
||||||
repo_dir = 'repo'
|
repo_dir = 'repo'
|
||||||
|
|
||||||
build_dir = 'build'
|
build_dir = 'build'
|
||||||
|
|
@ -1029,6 +1033,8 @@ def main():
|
||||||
# Build applications...
|
# Build applications...
|
||||||
failed_apps = {}
|
failed_apps = {}
|
||||||
build_succeeded = []
|
build_succeeded = []
|
||||||
|
status_output['failedBuilds'] = failed_apps
|
||||||
|
status_output['successfulBuilds'] = build_succeeded
|
||||||
# Only build for 36 hours, then stop gracefully.
|
# Only build for 36 hours, then stop gracefully.
|
||||||
endtime = time.time() + 36 * 60 * 60
|
endtime = time.time() + 36 * 60 * 60
|
||||||
max_build_time_reached = False
|
max_build_time_reached = False
|
||||||
|
|
@ -1201,10 +1207,12 @@ def main():
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error("Error while attempting to publish build log: %s" % e)
|
logging.error("Error while attempting to publish build log: %s" % e)
|
||||||
|
|
||||||
|
common.write_running_status_json(status_output)
|
||||||
if timer:
|
if timer:
|
||||||
timer.cancel() # kill the watchdog timer
|
timer.cancel() # kill the watchdog timer
|
||||||
|
|
||||||
if max_build_time_reached:
|
if max_build_time_reached:
|
||||||
|
status_output['maxBuildTimeReached'] = True
|
||||||
logging.info("Stopping after global build timeout...")
|
logging.info("Stopping after global build timeout...")
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
@ -1263,6 +1271,8 @@ def main():
|
||||||
newpage = site.Pages['build']
|
newpage = site.Pages['build']
|
||||||
newpage.save('#REDIRECT [[' + wiki_page_path + ']]', summary='Update redirect')
|
newpage.save('#REDIRECT [[' + wiki_page_path + ']]', summary='Update redirect')
|
||||||
|
|
||||||
|
common.write_status_json(status_output, options.pretty)
|
||||||
|
|
||||||
# hack to ensure this exits, even is some threads are still running
|
# hack to ensure this exits, even is some threads are still running
|
||||||
common.force_exit()
|
common.force_exit()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -543,6 +543,18 @@ def checkupdates_app(app):
|
||||||
raise FDroidException("Git commit failed")
|
raise FDroidException("Git commit failed")
|
||||||
|
|
||||||
|
|
||||||
|
def status_update_json(processed, failed):
|
||||||
|
"""Output a JSON file with metadata about this run"""
|
||||||
|
|
||||||
|
logging.debug(_('Outputting JSON'))
|
||||||
|
output = common.setup_status_output(start_timestamp)
|
||||||
|
if processed:
|
||||||
|
output['processed'] = processed
|
||||||
|
if failed:
|
||||||
|
output['failed'] = failed
|
||||||
|
common.write_status_json(output)
|
||||||
|
|
||||||
|
|
||||||
def update_wiki(gplaylog, locallog):
|
def update_wiki(gplaylog, locallog):
|
||||||
if config.get('wiki_server') and config.get('wiki_path'):
|
if config.get('wiki_server') and config.get('wiki_path'):
|
||||||
try:
|
try:
|
||||||
|
|
@ -644,6 +656,8 @@ def main():
|
||||||
return
|
return
|
||||||
|
|
||||||
locallog = ''
|
locallog = ''
|
||||||
|
processed = []
|
||||||
|
failed = dict()
|
||||||
for appid, app in apps.items():
|
for appid, app in apps.items():
|
||||||
|
|
||||||
if options.autoonly and app.AutoUpdateMode in ('None', 'Static'):
|
if options.autoonly and app.AutoUpdateMode in ('None', 'Static'):
|
||||||
|
|
@ -656,13 +670,15 @@ def main():
|
||||||
|
|
||||||
try:
|
try:
|
||||||
checkupdates_app(app)
|
checkupdates_app(app)
|
||||||
|
processed.append(appid)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = _("...checkupdate failed for {appid} : {error}").format(appid=appid, error=e)
|
msg = _("...checkupdate failed for {appid} : {error}").format(appid=appid, error=e)
|
||||||
logging.error(msg)
|
logging.error(msg)
|
||||||
locallog += msg + '\n'
|
locallog += msg + '\n'
|
||||||
|
failed[appid] = str(e)
|
||||||
|
|
||||||
update_wiki(None, locallog)
|
update_wiki(None, locallog)
|
||||||
|
status_update_json(processed, failed)
|
||||||
logging.info(_("Finished"))
|
logging.info(_("Finished"))
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -20,6 +20,7 @@
|
||||||
# common.py is imported by all modules, so do not import third-party
|
# common.py is imported by all modules, so do not import third-party
|
||||||
# libraries here as they will become a requirement for all commands.
|
# libraries here as they will become a requirement for all commands.
|
||||||
|
|
||||||
|
import git
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
@ -47,7 +48,7 @@ except ImportError:
|
||||||
import xml.etree.ElementTree as XMLElementTree # nosec this is a fallback only
|
import xml.etree.ElementTree as XMLElementTree # nosec this is a fallback only
|
||||||
|
|
||||||
from binascii import hexlify
|
from binascii import hexlify
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta, timezone
|
||||||
from distutils.version import LooseVersion
|
from distutils.version import LooseVersion
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
from zipfile import ZipFile
|
from zipfile import ZipFile
|
||||||
|
|
@ -587,17 +588,13 @@ def read_app_args(appid_versionCode_pairs, allapps, allow_vercodes=False):
|
||||||
|
|
||||||
|
|
||||||
def get_extension(filename):
|
def get_extension(filename):
|
||||||
|
"""get name and extension of filename, with extension always lower case"""
|
||||||
base, ext = os.path.splitext(filename)
|
base, ext = os.path.splitext(filename)
|
||||||
if not ext:
|
if not ext:
|
||||||
return base, ''
|
return base, ''
|
||||||
return base, ext.lower()[1:]
|
return base, ext.lower()[1:]
|
||||||
|
|
||||||
|
|
||||||
def has_extension(filename, ext):
|
|
||||||
_ignored, f_ext = get_extension(filename)
|
|
||||||
return ext == f_ext
|
|
||||||
|
|
||||||
|
|
||||||
publish_name_regex = re.compile(r"^(.+)_([0-9]+)\.(apk|zip)$")
|
publish_name_regex = re.compile(r"^(.+)_([0-9]+)\.(apk|zip)$")
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -674,6 +671,66 @@ def get_build_dir(app):
|
||||||
return os.path.join('build', app.id)
|
return os.path.join('build', app.id)
|
||||||
|
|
||||||
|
|
||||||
|
class Encoder(json.JSONEncoder):
|
||||||
|
def default(self, obj):
|
||||||
|
if isinstance(obj, set):
|
||||||
|
return sorted(obj)
|
||||||
|
return super().default(obj)
|
||||||
|
|
||||||
|
|
||||||
|
def setup_status_output(start_timestamp):
|
||||||
|
"""Create the common output dictionary for public status updates"""
|
||||||
|
output = {
|
||||||
|
'commandLine': sys.argv,
|
||||||
|
'startTimestamp': int(time.mktime(start_timestamp) * 1000),
|
||||||
|
'subcommand': sys.argv[0].split()[1],
|
||||||
|
}
|
||||||
|
if os.path.isdir('.git'):
|
||||||
|
git_repo = git.repo.Repo(os.getcwd())
|
||||||
|
output['fdroiddata'] = {
|
||||||
|
'commitId': get_head_commit_id(git_repo),
|
||||||
|
'isDirty': git_repo.is_dirty(),
|
||||||
|
}
|
||||||
|
fdroidserver_dir = os.path.dirname(sys.argv[0])
|
||||||
|
if os.path.isdir(os.path.join(fdroidserver_dir, '.git')):
|
||||||
|
git_repo = git.repo.Repo(fdroidserver_dir)
|
||||||
|
output['fdroidserver'] = {
|
||||||
|
'commitId': get_head_commit_id(git_repo),
|
||||||
|
'isDirty': git_repo.is_dirty(),
|
||||||
|
}
|
||||||
|
write_running_status_json(output)
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
def write_running_status_json(output):
|
||||||
|
write_status_json(output, pretty=True, name='running')
|
||||||
|
|
||||||
|
|
||||||
|
def write_status_json(output, pretty=False, name=None):
|
||||||
|
"""Write status out as JSON, and rsync it to the repo server"""
|
||||||
|
status_dir = os.path.join('repo', 'status')
|
||||||
|
if not os.path.exists(status_dir):
|
||||||
|
os.mkdir(status_dir)
|
||||||
|
if not name:
|
||||||
|
output['endTimestamp'] = int(datetime.now(timezone.utc).timestamp() * 1000)
|
||||||
|
name = sys.argv[0].split()[1] # fdroid subcommand
|
||||||
|
path = os.path.join(status_dir, name + '.json')
|
||||||
|
with open(path, 'w') as fp:
|
||||||
|
if pretty:
|
||||||
|
json.dump(output, fp, sort_keys=True, cls=Encoder, indent=2)
|
||||||
|
else:
|
||||||
|
json.dump(output, fp, sort_keys=True, cls=Encoder, separators=(',', ':'))
|
||||||
|
rsync_status_file_to_repo(path, repo_subdir='status')
|
||||||
|
|
||||||
|
|
||||||
|
def get_head_commit_id(git_repo):
|
||||||
|
"""Get git commit ID for HEAD as a str
|
||||||
|
|
||||||
|
repo.head.commit.binsha is a bytearray stored in a str
|
||||||
|
"""
|
||||||
|
return hexlify(bytearray(git_repo.head.commit.binsha)).decode()
|
||||||
|
|
||||||
|
|
||||||
def setup_vcs(app):
|
def setup_vcs(app):
|
||||||
'''checkout code from VCS and return instance of vcs and the build dir'''
|
'''checkout code from VCS and return instance of vcs and the build dir'''
|
||||||
build_dir = get_build_dir(app)
|
build_dir = get_build_dir(app)
|
||||||
|
|
@ -1316,7 +1373,7 @@ def manifest_paths(app_dir, flavours):
|
||||||
def fetch_real_name(app_dir, flavours):
|
def fetch_real_name(app_dir, flavours):
|
||||||
'''Retrieve the package name. Returns the name, or None if not found.'''
|
'''Retrieve the package name. Returns the name, or None if not found.'''
|
||||||
for path in manifest_paths(app_dir, flavours):
|
for path in manifest_paths(app_dir, flavours):
|
||||||
if not has_extension(path, 'xml') or not os.path.isfile(path):
|
if not path.endswith('.xml') or not os.path.isfile(path):
|
||||||
continue
|
continue
|
||||||
logging.debug("fetch_real_name: Checking manifest at " + path)
|
logging.debug("fetch_real_name: Checking manifest at " + path)
|
||||||
xml = parse_xml(path)
|
xml = parse_xml(path)
|
||||||
|
|
@ -1808,11 +1865,11 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
|
||||||
for path in manifest_paths(root_dir, flavours):
|
for path in manifest_paths(root_dir, flavours):
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
continue
|
continue
|
||||||
if has_extension(path, 'xml'):
|
if path.endswith('.xml'):
|
||||||
regsub_file(r'android:versionName="[^"]*"',
|
regsub_file(r'android:versionName="[^"]*"',
|
||||||
r'android:versionName="%s"' % build.versionName,
|
r'android:versionName="%s"' % build.versionName,
|
||||||
path)
|
path)
|
||||||
elif has_extension(path, 'gradle'):
|
elif path.endswith('.gradle'):
|
||||||
regsub_file(r"""(\s*)versionName[\s'"=]+.*""",
|
regsub_file(r"""(\s*)versionName[\s'"=]+.*""",
|
||||||
r"""\1versionName '%s'""" % build.versionName,
|
r"""\1versionName '%s'""" % build.versionName,
|
||||||
path)
|
path)
|
||||||
|
|
@ -1822,11 +1879,11 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
|
||||||
for path in manifest_paths(root_dir, flavours):
|
for path in manifest_paths(root_dir, flavours):
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
continue
|
continue
|
||||||
if has_extension(path, 'xml'):
|
if path.endswith('.xml'):
|
||||||
regsub_file(r'android:versionCode="[^"]*"',
|
regsub_file(r'android:versionCode="[^"]*"',
|
||||||
r'android:versionCode="%s"' % build.versionCode,
|
r'android:versionCode="%s"' % build.versionCode,
|
||||||
path)
|
path)
|
||||||
elif has_extension(path, 'gradle'):
|
elif path.endswith('.gradle'):
|
||||||
regsub_file(r'versionCode[ =]+[0-9]+',
|
regsub_file(r'versionCode[ =]+[0-9]+',
|
||||||
r'versionCode %s' % build.versionCode,
|
r'versionCode %s' % build.versionCode,
|
||||||
path)
|
path)
|
||||||
|
|
@ -3300,11 +3357,6 @@ def deploy_build_log_with_rsync(appid, vercode, log_content):
|
||||||
be decoded as 'utf-8')
|
be decoded as 'utf-8')
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# check if deploying logs is enabled in config
|
|
||||||
if not config.get('deploy_process_logs', False):
|
|
||||||
logging.debug(_('skip deploying full build logs: not enabled in config'))
|
|
||||||
return
|
|
||||||
|
|
||||||
if not log_content:
|
if not log_content:
|
||||||
logging.warning(_('skip deploying full build logs: log content is empty'))
|
logging.warning(_('skip deploying full build logs: log content is empty'))
|
||||||
return
|
return
|
||||||
|
|
@ -3322,13 +3374,17 @@ def deploy_build_log_with_rsync(appid, vercode, log_content):
|
||||||
f.write(bytes(log_content, 'utf-8'))
|
f.write(bytes(log_content, 'utf-8'))
|
||||||
else:
|
else:
|
||||||
f.write(log_content)
|
f.write(log_content)
|
||||||
|
rsync_status_file_to_repo(log_gz_path)
|
||||||
|
|
||||||
# TODO: sign compressed log file, if a signing key is configured
|
|
||||||
|
def rsync_status_file_to_repo(path, repo_subdir=None):
|
||||||
|
"""Copy a build log or status JSON to the repo using rsync"""
|
||||||
|
|
||||||
|
if not config.get('deploy_process_logs', False):
|
||||||
|
logging.debug(_('skip deploying full build logs: not enabled in config'))
|
||||||
|
return
|
||||||
|
|
||||||
for webroot in config.get('serverwebroot', []):
|
for webroot in config.get('serverwebroot', []):
|
||||||
dest_path = os.path.join(webroot, "repo")
|
|
||||||
if not dest_path.endswith('/'):
|
|
||||||
dest_path += '/' # make sure rsync knows this is a directory
|
|
||||||
cmd = ['rsync',
|
cmd = ['rsync',
|
||||||
'--archive',
|
'--archive',
|
||||||
'--delete-after',
|
'--delete-after',
|
||||||
|
|
@ -3339,15 +3395,21 @@ def deploy_build_log_with_rsync(appid, vercode, log_content):
|
||||||
cmd += ['--quiet']
|
cmd += ['--quiet']
|
||||||
if 'identity_file' in config:
|
if 'identity_file' in config:
|
||||||
cmd += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + config['identity_file']]
|
cmd += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + config['identity_file']]
|
||||||
cmd += [log_gz_path, dest_path]
|
|
||||||
|
|
||||||
# TODO: also deploy signature file if present
|
dest_path = os.path.join(webroot, "repo")
|
||||||
|
if repo_subdir is not None:
|
||||||
|
dest_path = os.path.join(dest_path, repo_subdir)
|
||||||
|
if not dest_path.endswith('/'):
|
||||||
|
dest_path += '/' # make sure rsync knows this is a directory
|
||||||
|
cmd += [path, dest_path]
|
||||||
|
|
||||||
retcode = subprocess.call(cmd)
|
retcode = subprocess.call(cmd)
|
||||||
if retcode:
|
if retcode:
|
||||||
logging.warning(_("failed deploying build logs to '{path}'").format(path=webroot))
|
logging.error(_('process log deploy {path} to {dest} failed!')
|
||||||
|
.format(path=path, dest=webroot))
|
||||||
else:
|
else:
|
||||||
logging.info(_("deployed build logs to '{path}'").format(path=webroot))
|
logging.debug(_('deployed process log {path} to {dest}')
|
||||||
|
.format(path=path, dest=webroot))
|
||||||
|
|
||||||
|
|
||||||
def get_per_app_repos():
|
def get_per_app_repos():
|
||||||
|
|
|
||||||
|
|
@ -20,6 +20,7 @@ import os
|
||||||
import glob
|
import glob
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import logging
|
import logging
|
||||||
|
import time
|
||||||
|
|
||||||
from . import _
|
from . import _
|
||||||
from . import common
|
from . import common
|
||||||
|
|
@ -28,6 +29,17 @@ from .exception import FDroidException
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
options = None
|
options = None
|
||||||
|
start_timestamp = time.gmtime()
|
||||||
|
|
||||||
|
|
||||||
|
def status_update_json(signed):
|
||||||
|
"""Output a JSON file with metadata about this run"""
|
||||||
|
|
||||||
|
logging.debug(_('Outputting JSON'))
|
||||||
|
output = common.setup_status_output(start_timestamp)
|
||||||
|
if signed:
|
||||||
|
output['signed'] = signed
|
||||||
|
common.write_status_json(output)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
@ -45,6 +57,7 @@ def main():
|
||||||
if config['archive_older'] != 0:
|
if config['archive_older'] != 0:
|
||||||
repodirs.append('archive')
|
repodirs.append('archive')
|
||||||
|
|
||||||
|
signed = []
|
||||||
for output_dir in repodirs:
|
for output_dir in repodirs:
|
||||||
if not os.path.isdir(output_dir):
|
if not os.path.isdir(output_dir):
|
||||||
raise FDroidException(_("Missing output directory") + " '" + output_dir + "'")
|
raise FDroidException(_("Missing output directory") + " '" + output_dir + "'")
|
||||||
|
|
@ -72,7 +85,9 @@ def main():
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise FDroidException("Signing failed.")
|
raise FDroidException("Signing failed.")
|
||||||
|
|
||||||
|
signed.append(filename)
|
||||||
logging.info('Signed ' + filename)
|
logging.info('Signed ' + filename)
|
||||||
|
status_update_json(signed)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
|
||||||
|
|
@ -17,12 +17,13 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import binascii
|
import git
|
||||||
import glob
|
import glob
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
|
import sys
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import yaml
|
import yaml
|
||||||
|
|
@ -230,7 +231,7 @@ def main():
|
||||||
apps = metadata.read_metadata()
|
apps = metadata.read_metadata()
|
||||||
app = None
|
app = None
|
||||||
|
|
||||||
build_dir = None
|
tmp_importer_dir = None
|
||||||
|
|
||||||
local_metadata_files = common.get_local_metadata_files()
|
local_metadata_files = common.get_local_metadata_files()
|
||||||
if local_metadata_files != []:
|
if local_metadata_files != []:
|
||||||
|
|
@ -241,35 +242,34 @@ def main():
|
||||||
app = metadata.App()
|
app = metadata.App()
|
||||||
app.AutoName = os.path.basename(os.getcwd())
|
app.AutoName = os.path.basename(os.getcwd())
|
||||||
app.RepoType = 'git'
|
app.RepoType = 'git'
|
||||||
app.UpdateCheckMode = "Tags"
|
|
||||||
|
|
||||||
if os.path.exists('build.gradle') or os.path.exists('build.gradle.kts'):
|
if os.path.exists('build.gradle') or os.path.exists('build.gradle.kts'):
|
||||||
build.gradle = ['yes']
|
build.gradle = ['yes']
|
||||||
|
|
||||||
import git
|
git_repo = git.repo.Repo(os.getcwd())
|
||||||
repo = git.repo.Repo(os.getcwd()) # git repo
|
for remote in git.Remote.iter_items(git_repo):
|
||||||
for remote in git.Remote.iter_items(repo):
|
|
||||||
if remote.name == 'origin':
|
if remote.name == 'origin':
|
||||||
url = repo.remotes.origin.url
|
url = git_repo.remotes.origin.url
|
||||||
if url.startswith('https://git'): # github, gitlab
|
if url.startswith('https://git'): # github, gitlab
|
||||||
app.SourceCode = url.rstrip('.git')
|
app.SourceCode = url.rstrip('.git')
|
||||||
app.Repo = url
|
app.Repo = url
|
||||||
break
|
break
|
||||||
# repo.head.commit.binsha is a bytearray stored in a str
|
|
||||||
build.commit = binascii.hexlify(bytearray(repo.head.commit.binsha))
|
|
||||||
write_local_file = True
|
write_local_file = True
|
||||||
elif options.url:
|
elif options.url:
|
||||||
app = get_app_from_url(options.url)
|
app = get_app_from_url(options.url)
|
||||||
build_dir = clone_to_tmp_dir(app)
|
tmp_importer_dir = clone_to_tmp_dir(app)
|
||||||
build.commit = '?'
|
git_repo = git.repo.Repo(tmp_importer_dir)
|
||||||
build.disable = 'Generated by import.py - check/set version fields and commit id'
|
build.disable = 'Generated by import.py - check/set version fields and commit id'
|
||||||
write_local_file = False
|
write_local_file = False
|
||||||
else:
|
else:
|
||||||
raise FDroidException("Specify project url.")
|
raise FDroidException("Specify project url.")
|
||||||
|
|
||||||
|
app.UpdateCheckMode = 'Tags'
|
||||||
|
build.commit = common.get_head_commit_id(git_repo)
|
||||||
|
|
||||||
# Extract some information...
|
# Extract some information...
|
||||||
paths = get_all_gradle_and_manifests(build_dir)
|
paths = get_all_gradle_and_manifests(tmp_importer_dir)
|
||||||
subdir = get_gradle_subdir(build_dir, paths)
|
subdir = get_gradle_subdir(tmp_importer_dir, paths)
|
||||||
if paths:
|
if paths:
|
||||||
versionName, versionCode, package = common.parse_androidmanifests(paths, app)
|
versionName, versionCode, package = common.parse_androidmanifests(paths, app)
|
||||||
if not package:
|
if not package:
|
||||||
|
|
@ -303,8 +303,8 @@ def main():
|
||||||
or os.path.exists(os.path.join(subdir, 'build.gradle')):
|
or os.path.exists(os.path.join(subdir, 'build.gradle')):
|
||||||
build.gradle = ['yes']
|
build.gradle = ['yes']
|
||||||
|
|
||||||
package_json = os.path.join(build_dir, 'package.json') # react-native
|
package_json = os.path.join(tmp_importer_dir, 'package.json') # react-native
|
||||||
pubspec_yaml = os.path.join(build_dir, 'pubspec.yaml') # flutter
|
pubspec_yaml = os.path.join(tmp_importer_dir, 'pubspec.yaml') # flutter
|
||||||
if os.path.exists(package_json):
|
if os.path.exists(package_json):
|
||||||
build.sudo = ['apt-get install npm', 'npm install -g react-native-cli']
|
build.sudo = ['apt-get install npm', 'npm install -g react-native-cli']
|
||||||
build.init = ['npm install']
|
build.init = ['npm install']
|
||||||
|
|
@ -314,7 +314,7 @@ def main():
|
||||||
app.License = data.get('license', app.License)
|
app.License = data.get('license', app.License)
|
||||||
app.Description = data.get('description', app.Description)
|
app.Description = data.get('description', app.Description)
|
||||||
app.WebSite = data.get('homepage', app.WebSite)
|
app.WebSite = data.get('homepage', app.WebSite)
|
||||||
app_json = os.path.join(build_dir, 'app.json')
|
app_json = os.path.join(tmp_importer_dir, 'app.json')
|
||||||
if os.path.exists(app_json):
|
if os.path.exists(app_json):
|
||||||
with open(app_json) as fp:
|
with open(app_json) as fp:
|
||||||
data = json.load(fp)
|
data = json.load(fp)
|
||||||
|
|
@ -343,8 +343,13 @@ def main():
|
||||||
# Keep the repo directory to save bandwidth...
|
# Keep the repo directory to save bandwidth...
|
||||||
if not os.path.exists('build'):
|
if not os.path.exists('build'):
|
||||||
os.mkdir('build')
|
os.mkdir('build')
|
||||||
if build_dir is not None:
|
build_dir = os.path.join('build', package)
|
||||||
shutil.move(build_dir, os.path.join('build', package))
|
if os.path.exists(build_dir):
|
||||||
|
logging.warning(_('{path} already exists, ignoring import results!')
|
||||||
|
.format(path=build_dir))
|
||||||
|
sys.exit(1)
|
||||||
|
elif tmp_importer_dir is not None:
|
||||||
|
shutil.move(tmp_importer_dir, build_dir)
|
||||||
with open('build/.fdroidvcs-' + package, 'w') as f:
|
with open('build/.fdroidvcs-' + package, 'w') as f:
|
||||||
f.write(app.RepoType + ' ' + app.Repo)
|
f.write(app.RepoType + ' ' + app.Repo)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -28,6 +28,7 @@ from collections import OrderedDict
|
||||||
import logging
|
import logging
|
||||||
from gettext import ngettext
|
from gettext import ngettext
|
||||||
import json
|
import json
|
||||||
|
import time
|
||||||
import zipfile
|
import zipfile
|
||||||
|
|
||||||
from . import _
|
from . import _
|
||||||
|
|
@ -38,6 +39,7 @@ from .exception import BuildException, FDroidException
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
options = None
|
options = None
|
||||||
|
start_timestamp = time.gmtime()
|
||||||
|
|
||||||
|
|
||||||
def publish_source_tarball(apkfilename, unsigned_dir, output_dir):
|
def publish_source_tarball(apkfilename, unsigned_dir, output_dir):
|
||||||
|
|
@ -138,6 +140,20 @@ def store_stats_fdroid_signing_key_fingerprints(appids, indent=None):
|
||||||
sign_sig_key_fingerprint_list(jar_file)
|
sign_sig_key_fingerprint_list(jar_file)
|
||||||
|
|
||||||
|
|
||||||
|
def status_update_json(newKeyAliases, generatedKeys, signedApks):
|
||||||
|
"""Output a JSON file with metadata about this run"""
|
||||||
|
|
||||||
|
logging.debug(_('Outputting JSON'))
|
||||||
|
output = common.setup_status_output(start_timestamp)
|
||||||
|
if newKeyAliases:
|
||||||
|
output['newKeyAliases'] = newKeyAliases
|
||||||
|
if generatedKeys:
|
||||||
|
output['generatedKeys'] = generatedKeys
|
||||||
|
if signedApks:
|
||||||
|
output['signedApks'] = signedApks
|
||||||
|
common.write_status_json(output)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
global config, options
|
global config, options
|
||||||
|
|
@ -195,6 +211,9 @@ def main():
|
||||||
# collisions, and refuse to do any publishing if that's the case...
|
# collisions, and refuse to do any publishing if that's the case...
|
||||||
allapps = metadata.read_metadata()
|
allapps = metadata.read_metadata()
|
||||||
vercodes = common.read_pkg_args(options.appid, True)
|
vercodes = common.read_pkg_args(options.appid, True)
|
||||||
|
signed_apks = dict()
|
||||||
|
new_key_aliases = []
|
||||||
|
generated_keys = dict()
|
||||||
allaliases = []
|
allaliases = []
|
||||||
for appid in allapps:
|
for appid in allapps:
|
||||||
m = hashlib.md5() # nosec just used to generate a keyalias
|
m = hashlib.md5() # nosec just used to generate a keyalias
|
||||||
|
|
@ -314,6 +333,7 @@ def main():
|
||||||
m = hashlib.md5() # nosec just used to generate a keyalias
|
m = hashlib.md5() # nosec just used to generate a keyalias
|
||||||
m.update(appid.encode('utf-8'))
|
m.update(appid.encode('utf-8'))
|
||||||
keyalias = m.hexdigest()[:8]
|
keyalias = m.hexdigest()[:8]
|
||||||
|
new_key_aliases.append(keyalias)
|
||||||
logging.info("Key alias: " + keyalias)
|
logging.info("Key alias: " + keyalias)
|
||||||
|
|
||||||
# See if we already have a key for this application, and
|
# See if we already have a key for this application, and
|
||||||
|
|
@ -336,6 +356,9 @@ def main():
|
||||||
'-dname', config['keydname']], envs=env_vars)
|
'-dname', config['keydname']], envs=env_vars)
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise BuildException("Failed to generate key", p.output)
|
raise BuildException("Failed to generate key", p.output)
|
||||||
|
if appid not in generated_keys:
|
||||||
|
generated_keys[appid] = set()
|
||||||
|
generated_keys[appid].add(appid)
|
||||||
|
|
||||||
signed_apk_path = os.path.join(output_dir, apkfilename)
|
signed_apk_path = os.path.join(output_dir, apkfilename)
|
||||||
if os.path.exists(signed_apk_path):
|
if os.path.exists(signed_apk_path):
|
||||||
|
|
@ -353,6 +376,9 @@ def main():
|
||||||
apkfile, keyalias], envs=env_vars)
|
apkfile, keyalias], envs=env_vars)
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise BuildException(_("Failed to sign application"), p.output)
|
raise BuildException(_("Failed to sign application"), p.output)
|
||||||
|
if appid not in signed_apks:
|
||||||
|
signed_apks[appid] = []
|
||||||
|
signed_apks[appid].append(apkfile)
|
||||||
|
|
||||||
# Zipalign it...
|
# Zipalign it...
|
||||||
common._zipalign(apkfile, os.path.join(output_dir, apkfilename))
|
common._zipalign(apkfile, os.path.join(output_dir, apkfilename))
|
||||||
|
|
@ -362,6 +388,7 @@ def main():
|
||||||
logging.info('Published ' + apkfilename)
|
logging.info('Published ' + apkfilename)
|
||||||
|
|
||||||
store_stats_fdroid_signing_key_fingerprints(allapps.keys())
|
store_stats_fdroid_signing_key_fingerprints(allapps.keys())
|
||||||
|
status_update_json(new_key_aliases, generated_keys, signed_apks)
|
||||||
logging.info('published list signing-key fingerprints')
|
logging.info('published list signing-key fingerprints')
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -809,6 +809,7 @@ def main():
|
||||||
if config.get('wiki_server') and config.get('wiki_path'):
|
if config.get('wiki_server') and config.get('wiki_path'):
|
||||||
update_wiki()
|
update_wiki()
|
||||||
|
|
||||||
|
common.write_status_json(common.setup_status_output(start_timestamp))
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -17,6 +17,7 @@
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import time
|
||||||
import zipfile
|
import zipfile
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import logging
|
import logging
|
||||||
|
|
@ -27,6 +28,7 @@ from .exception import FDroidException
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
options = None
|
options = None
|
||||||
|
start_timestamp = time.gmtime()
|
||||||
|
|
||||||
|
|
||||||
def sign_jar(jar):
|
def sign_jar(jar):
|
||||||
|
|
@ -75,6 +77,16 @@ def sign_index_v1(repodir, json_name):
|
||||||
sign_jar(jar_file)
|
sign_jar(jar_file)
|
||||||
|
|
||||||
|
|
||||||
|
def status_update_json(signed):
|
||||||
|
"""Output a JSON file with metadata about this run"""
|
||||||
|
|
||||||
|
logging.debug(_('Outputting JSON'))
|
||||||
|
output = common.setup_status_output(start_timestamp)
|
||||||
|
if signed:
|
||||||
|
output['signed'] = signed
|
||||||
|
common.write_status_json(output)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
global config, options
|
global config, options
|
||||||
|
|
@ -94,7 +106,7 @@ def main():
|
||||||
if config['archive_older'] != 0:
|
if config['archive_older'] != 0:
|
||||||
repodirs.append('archive')
|
repodirs.append('archive')
|
||||||
|
|
||||||
signed = 0
|
signed = []
|
||||||
for output_dir in repodirs:
|
for output_dir in repodirs:
|
||||||
if not os.path.isdir(output_dir):
|
if not os.path.isdir(output_dir):
|
||||||
raise FDroidException("Missing output directory '" + output_dir + "'")
|
raise FDroidException("Missing output directory '" + output_dir + "'")
|
||||||
|
|
@ -102,9 +114,10 @@ def main():
|
||||||
unsigned = os.path.join(output_dir, 'index_unsigned.jar')
|
unsigned = os.path.join(output_dir, 'index_unsigned.jar')
|
||||||
if os.path.exists(unsigned):
|
if os.path.exists(unsigned):
|
||||||
sign_jar(unsigned)
|
sign_jar(unsigned)
|
||||||
os.rename(unsigned, os.path.join(output_dir, 'index.jar'))
|
index_jar = os.path.join(output_dir, 'index.jar')
|
||||||
|
os.rename(unsigned, index_jar)
|
||||||
logging.info('Signed index in ' + output_dir)
|
logging.info('Signed index in ' + output_dir)
|
||||||
signed += 1
|
signed.append(index_jar)
|
||||||
|
|
||||||
json_name = 'index-v1.json'
|
json_name = 'index-v1.json'
|
||||||
index_file = os.path.join(output_dir, json_name)
|
index_file = os.path.join(output_dir, json_name)
|
||||||
|
|
@ -112,10 +125,11 @@ def main():
|
||||||
sign_index_v1(output_dir, json_name)
|
sign_index_v1(output_dir, json_name)
|
||||||
os.remove(index_file)
|
os.remove(index_file)
|
||||||
logging.info('Signed ' + index_file)
|
logging.info('Signed ' + index_file)
|
||||||
signed += 1
|
signed.append(index_file)
|
||||||
|
|
||||||
if signed == 0:
|
if not signed:
|
||||||
logging.info(_("Nothing to do"))
|
logging.info(_("Nothing to do"))
|
||||||
|
status_update_json(signed)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
|
||||||
|
|
@ -121,6 +121,57 @@ def disabled_algorithms_allowed():
|
||||||
return options.allow_disabled_algorithms or config['allow_disabled_algorithms']
|
return options.allow_disabled_algorithms or config['allow_disabled_algorithms']
|
||||||
|
|
||||||
|
|
||||||
|
def status_update_json(apps, sortedids, apks):
|
||||||
|
"""Output a JSON file with metadata about this `fdroid update` run
|
||||||
|
|
||||||
|
:param apps: fully populated list of all applications
|
||||||
|
:param apks: all to be published apks
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
logging.debug(_('Outputting JSON'))
|
||||||
|
output = common.setup_status_output(start_timestamp)
|
||||||
|
output['antiFeatures'] = dict()
|
||||||
|
output['disabled'] = []
|
||||||
|
output['failedBuilds'] = dict()
|
||||||
|
output['noPackages'] = []
|
||||||
|
|
||||||
|
for appid in sortedids:
|
||||||
|
app = apps[appid]
|
||||||
|
for af in app.get('AntiFeatures', []):
|
||||||
|
antiFeatures = output['antiFeatures'] # JSON camelCase
|
||||||
|
if af not in antiFeatures:
|
||||||
|
antiFeatures[af] = dict()
|
||||||
|
if appid not in antiFeatures[af]:
|
||||||
|
antiFeatures[af]['apps'] = set()
|
||||||
|
antiFeatures[af]['apps'].add(appid)
|
||||||
|
|
||||||
|
apklist = []
|
||||||
|
for apk in apks:
|
||||||
|
if apk['packageName'] == appid:
|
||||||
|
apklist.append(apk)
|
||||||
|
builds = app.get('builds', [])
|
||||||
|
validapks = 0
|
||||||
|
for build in builds:
|
||||||
|
if not build.get('disable'):
|
||||||
|
builtit = False
|
||||||
|
for apk in apklist:
|
||||||
|
if apk['versionCode'] == int(build.versionCode):
|
||||||
|
builtit = True
|
||||||
|
validapks += 1
|
||||||
|
break
|
||||||
|
if not builtit:
|
||||||
|
failedBuilds = output['failedBuilds']
|
||||||
|
if appid not in failedBuilds:
|
||||||
|
failedBuilds[appid] = []
|
||||||
|
failedBuilds[appid].append(build.versionCode)
|
||||||
|
if validapks == 0:
|
||||||
|
output['noPackages'].append(appid)
|
||||||
|
if app.get('Disabled'):
|
||||||
|
output['disabled'].append(appid)
|
||||||
|
common.write_status_json(output, options.pretty)
|
||||||
|
|
||||||
|
|
||||||
def update_wiki(apps, sortedids, apks):
|
def update_wiki(apps, sortedids, apks):
|
||||||
"""Update the wiki
|
"""Update the wiki
|
||||||
|
|
||||||
|
|
@ -2200,6 +2251,7 @@ def main():
|
||||||
# Update the wiki...
|
# Update the wiki...
|
||||||
if options.wiki:
|
if options.wiki:
|
||||||
update_wiki(apps, sortedids, apks + archapks)
|
update_wiki(apps, sortedids, apks + archapks)
|
||||||
|
status_update_json(apps, sortedids, apks + archapks)
|
||||||
|
|
||||||
logging.info(_("Finished"))
|
logging.info(_("Finished"))
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -64,13 +64,6 @@ class Decoder(json.JSONDecoder):
|
||||||
return set(values), end
|
return set(values), end
|
||||||
|
|
||||||
|
|
||||||
class Encoder(json.JSONEncoder):
|
|
||||||
def default(self, obj):
|
|
||||||
if isinstance(obj, set):
|
|
||||||
return sorted(obj)
|
|
||||||
return super().default(obj)
|
|
||||||
|
|
||||||
|
|
||||||
def write_json_report(url, remote_apk, unsigned_apk, compare_result):
|
def write_json_report(url, remote_apk, unsigned_apk, compare_result):
|
||||||
"""write out the results of the verify run to JSON
|
"""write out the results of the verify run to JSON
|
||||||
|
|
||||||
|
|
@ -118,7 +111,7 @@ def write_json_report(url, remote_apk, unsigned_apk, compare_result):
|
||||||
data['packages'][packageName] = set()
|
data['packages'][packageName] = set()
|
||||||
data['packages'][packageName].add(output)
|
data['packages'][packageName].add(output)
|
||||||
with open(jsonfile, 'w') as fp:
|
with open(jsonfile, 'w') as fp:
|
||||||
json.dump(data, fp, cls=Encoder, sort_keys=True)
|
json.dump(data, fp, cls=common.Encoder, sort_keys=True)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
|
||||||
|
|
@ -55,6 +55,7 @@ fi
|
||||||
gpg --import $GNUPGHOME/secring.gpg
|
gpg --import $GNUPGHOME/secring.gpg
|
||||||
|
|
||||||
echo "build_server_always = True" >> config.py
|
echo "build_server_always = True" >> config.py
|
||||||
|
echo "deploy_process_logs = True" >> config.py
|
||||||
echo "make_current_version_link = False" >> config.py
|
echo "make_current_version_link = False" >> config.py
|
||||||
echo "gpghome = '$GNUPGHOME'" >> config.py
|
echo "gpghome = '$GNUPGHOME'" >> config.py
|
||||||
echo "gpgkey = 'CE71F7FB'" >> config.py
|
echo "gpgkey = 'CE71F7FB'" >> config.py
|
||||||
|
|
@ -66,6 +67,7 @@ test -d repo || mkdir repo
|
||||||
test -d archive || mkdir archive
|
test -d archive || mkdir archive
|
||||||
# when everything is copied over to run on SIGN machine
|
# when everything is copied over to run on SIGN machine
|
||||||
../fdroid publish
|
../fdroid publish
|
||||||
|
|
||||||
../fdroid gpgsign
|
../fdroid gpgsign
|
||||||
# when everything is copied over to run on BUILD machine,
|
# when everything is copied over to run on BUILD machine,
|
||||||
# which does not have a keyring, only a cached pubkey
|
# which does not have a keyring, only a cached pubkey
|
||||||
|
|
|
||||||
|
|
@ -5,13 +5,16 @@
|
||||||
import difflib
|
import difflib
|
||||||
import glob
|
import glob
|
||||||
import inspect
|
import inspect
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
import optparse
|
import optparse
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import time
|
||||||
import unittest
|
import unittest
|
||||||
import textwrap
|
import textwrap
|
||||||
import yaml
|
import yaml
|
||||||
|
|
@ -1131,6 +1134,55 @@ class CommonTest(unittest.TestCase):
|
||||||
with gzip.open(expected_log_path, 'r') as f:
|
with gzip.open(expected_log_path, 'r') as f:
|
||||||
self.assertEqual(f.read(), mocklogcontent)
|
self.assertEqual(f.read(), mocklogcontent)
|
||||||
|
|
||||||
|
def test_deploy_status_json(self):
|
||||||
|
testdir = tempfile.mkdtemp(prefix=inspect.currentframe().f_code.co_name, dir=self.tmpdir)
|
||||||
|
|
||||||
|
fakesubcommand = 'fakesubcommand'
|
||||||
|
fake_timestamp = 1234567890
|
||||||
|
fakeserver = 'example.com:/var/www/fbot/'
|
||||||
|
expected_dir = os.path.join(testdir, fakeserver.replace(':', ''), 'repo', 'status')
|
||||||
|
|
||||||
|
fdroidserver.common.options = mock.Mock()
|
||||||
|
fdroidserver.common.config = {}
|
||||||
|
fdroidserver.common.config['serverwebroot'] = [fakeserver]
|
||||||
|
fdroidserver.common.config['identity_file'] = 'ssh/id_rsa'
|
||||||
|
|
||||||
|
def assert_subprocess_call(cmd):
|
||||||
|
dest_path = os.path.join(testdir, cmd[-1].replace(':', ''))
|
||||||
|
if not os.path.exists(dest_path):
|
||||||
|
os.makedirs(dest_path)
|
||||||
|
return subprocess.run(cmd[:-1] + [dest_path]).returncode
|
||||||
|
|
||||||
|
with mock.patch('subprocess.call', side_effect=assert_subprocess_call):
|
||||||
|
with mock.patch.object(sys, 'argv', ['fdroid ' + fakesubcommand]):
|
||||||
|
output = fdroidserver.common.setup_status_output(time.localtime(fake_timestamp))
|
||||||
|
self.assertFalse(os.path.exists(os.path.join(expected_dir, 'running.json')))
|
||||||
|
with mock.patch.object(sys, 'argv', ['fdroid ' + fakesubcommand]):
|
||||||
|
fdroidserver.common.write_status_json(output)
|
||||||
|
self.assertFalse(os.path.exists(os.path.join(expected_dir, fakesubcommand + '.json')))
|
||||||
|
|
||||||
|
fdroidserver.common.config['deploy_process_logs'] = True
|
||||||
|
|
||||||
|
output = fdroidserver.common.setup_status_output(time.localtime(fake_timestamp))
|
||||||
|
expected_path = os.path.join(expected_dir, 'running.json')
|
||||||
|
self.assertTrue(os.path.isfile(expected_path))
|
||||||
|
with open(expected_path) as fp:
|
||||||
|
data = json.load(fp)
|
||||||
|
self.assertEqual(fake_timestamp * 1000, data['startTimestamp'])
|
||||||
|
self.assertFalse('endTimestamp' in data)
|
||||||
|
|
||||||
|
testvalue = 'asdfasd'
|
||||||
|
output['testvalue'] = testvalue
|
||||||
|
|
||||||
|
fdroidserver.common.write_status_json(output)
|
||||||
|
expected_path = os.path.join(expected_dir, fakesubcommand + '.json')
|
||||||
|
self.assertTrue(os.path.isfile(expected_path))
|
||||||
|
with open(expected_path) as fp:
|
||||||
|
data = json.load(fp)
|
||||||
|
self.assertEqual(fake_timestamp * 1000, data['startTimestamp'])
|
||||||
|
self.assertTrue('endTimestamp' in data)
|
||||||
|
self.assertEqual(testvalue, output.get('testvalue'))
|
||||||
|
|
||||||
def test_string_is_integer(self):
|
def test_string_is_integer(self):
|
||||||
self.assertTrue(fdroidserver.common.string_is_integer('0x10'))
|
self.assertTrue(fdroidserver.common.string_is_integer('0x10'))
|
||||||
self.assertTrue(fdroidserver.common.string_is_integer('010'))
|
self.assertTrue(fdroidserver.common.string_is_integer('010'))
|
||||||
|
|
|
||||||
|
|
@ -19,6 +19,7 @@ import sys
|
||||||
import unittest
|
import unittest
|
||||||
import tempfile
|
import tempfile
|
||||||
import textwrap
|
import textwrap
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
localmodule = os.path.realpath(
|
localmodule = os.path.realpath(
|
||||||
os.path.join(os.path.dirname(inspect.getfile(inspect.currentframe())), '..'))
|
os.path.join(os.path.dirname(inspect.getfile(inspect.currentframe())), '..'))
|
||||||
|
|
@ -158,6 +159,7 @@ class PublishTest(unittest.TestCase):
|
||||||
os.path.join(testdir, 'unsigned', 'binaries', 'com.politedroid_6.binary.apk'))
|
os.path.join(testdir, 'unsigned', 'binaries', 'com.politedroid_6.binary.apk'))
|
||||||
|
|
||||||
os.chdir(testdir)
|
os.chdir(testdir)
|
||||||
|
with mock.patch.object(sys, 'argv', ['fdroid fakesubcommand']):
|
||||||
publish.main()
|
publish.main()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue