mirror of
https://github.com/f-droid/fdroidserver.git
synced 2025-11-04 06:30:27 +03:00
Merge branch 'buildbot-subcommands-schedule-verify' into 'master'
new subcommand "schedule_verify" to generate events for apps to verify See merge request fdroid/fdroidserver!1710
This commit is contained in:
commit
89dcb1aa1f
4 changed files with 310 additions and 29 deletions
|
|
@ -69,6 +69,7 @@ COMMANDS_INTERNAL = [
|
|||
"exec",
|
||||
"pull",
|
||||
"push",
|
||||
"schedule_verify",
|
||||
"up",
|
||||
]
|
||||
|
||||
|
|
|
|||
163
fdroidserver/schedule_verify.py
Normal file
163
fdroidserver/schedule_verify.py
Normal file
|
|
@ -0,0 +1,163 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# schedule_verify.py - part of the FDroid server tools
|
||||
# Copyright (C) 2024-2025, Hans-Christoph Steiner <hans@eds.org>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Schedule packages to build and verify.
|
||||
|
||||
This creates a list of Application ID/Version Code that need the
|
||||
verify process to be run.
|
||||
|
||||
Since this is an internal command, the strings are not localized.
|
||||
|
||||
"""
|
||||
|
||||
import collections
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
from argparse import ArgumentParser
|
||||
from pathlib import Path
|
||||
|
||||
from . import common, index, metadata
|
||||
|
||||
|
||||
def get_versions(
|
||||
repo='https://f-droid.org/repo?fingerprint=43238D512C1E5EB2D6569F4A3AFBF5523418B82E0A3ED1552770ABB9A9C9CCAB',
|
||||
):
|
||||
"""Get the list of versions that need to be built, newest first.
|
||||
|
||||
Newest are built first because older versions that are still not
|
||||
built are most likely to be failing builds. Repeating failed
|
||||
builds mostly results in another failure.
|
||||
|
||||
If there are versions with the same versionCode but different
|
||||
signers, there will still only be a single entry returned. If the
|
||||
rebuild matches one signature, that is enough.
|
||||
|
||||
"""
|
||||
data, _ignored = index.download_repo_index_v2(repo)
|
||||
to_schedule = collections.defaultdict(list)
|
||||
for appid, package in data['packages'].items():
|
||||
for version in package['versions'].values():
|
||||
versionCode = version['manifest']['versionCode']
|
||||
ext = common.get_file_extension(version['file']['name'])
|
||||
jsonf = f'unsigned/{appid}_{versionCode}.{ext}.json'
|
||||
if not os.path.exists(jsonf):
|
||||
to_schedule[version['added']].append(
|
||||
{'applicationId': appid, 'versionCode': versionCode}
|
||||
)
|
||||
|
||||
ret = list()
|
||||
for added in sorted(to_schedule, reverse=True):
|
||||
for i in to_schedule[added]:
|
||||
if i not in ret:
|
||||
ret.append(i)
|
||||
return ret
|
||||
|
||||
|
||||
def get_scheduled(versions):
|
||||
"""Get versions that need to be built and there is local build metadata for it."""
|
||||
apps = metadata.read_metadata()
|
||||
schedule = []
|
||||
for version in versions:
|
||||
app = apps.get(version['applicationId'])
|
||||
if app and not app.get("Disabled"):
|
||||
for build in app.get("Builds", []):
|
||||
versionCode = build['versionCode']
|
||||
if versionCode == version['versionCode'] and not build.get("disable"):
|
||||
schedule.append(
|
||||
{
|
||||
"applicationId": app.id,
|
||||
"versionCode": versionCode,
|
||||
}
|
||||
)
|
||||
return schedule
|
||||
|
||||
|
||||
def sendchange(scheduled, verbose=False):
|
||||
"""Use `buildbot sendchange` to submit builds to the queue.
|
||||
|
||||
This requires the automatically generated password to authenticate
|
||||
to the buildbot instance, which is created at a static path by the
|
||||
buildbot master:
|
||||
https://gitlab.com/fdroid/buildbot/-/merge_requests/1
|
||||
|
||||
The passwd file's path is hardcoded in the server setup, which is
|
||||
defined outside of fdroidserver. Think of the path as a variable
|
||||
name for accessing a value from the filesystem.
|
||||
|
||||
"""
|
||||
git_revision = common.get_head_commit_id('.')
|
||||
passwd = Path('/tmp/fdroid-buildbot-sendchange/passwd').read_text().strip() # nosec
|
||||
for d in scheduled:
|
||||
command = [
|
||||
'buildbot',
|
||||
'sendchange',
|
||||
'--master=127.0.0.1:9999',
|
||||
f'--auth=fdroid:{passwd}',
|
||||
'--branch=master',
|
||||
'--repository=https://gitlab.com/fdroid/fdroiddata',
|
||||
f'--revision={git_revision}',
|
||||
'--category=verify',
|
||||
f"--who={d['applicationId']}",
|
||||
f"--project={d['applicationId']}",
|
||||
f"--property=versionCode:{d['versionCode']}",
|
||||
f"--property=packageName:{d['applicationId']}",
|
||||
f"metadata/{d['applicationId']}.yml",
|
||||
]
|
||||
if verbose:
|
||||
logging.info(' '.join(command))
|
||||
subprocess.run(command, check=True)
|
||||
|
||||
|
||||
def main():
|
||||
parser = ArgumentParser(description="Schedule packages to build and verify.")
|
||||
common.setup_global_opts(parser)
|
||||
parser.add_argument(
|
||||
"url",
|
||||
default='https://f-droid.org/repo?fingerprint=43238D512C1E5EB2D6569F4A3AFBF5523418B82E0A3ED1552770ABB9A9C9CCAB',
|
||||
nargs='?',
|
||||
help='Base URL to mirror, can include the index signing key using the query string: ?fingerprint=',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sendchange',
|
||||
action="store_true",
|
||||
help='Call buildbot sendchange with the results instead of printing to stdout.',
|
||||
)
|
||||
options = common.parse_args(parser)
|
||||
common.get_config()
|
||||
common.set_console_logging(options.verbose)
|
||||
|
||||
# TODO support priority list, and ignore list (see buildbot-sendchange-build)
|
||||
if not os.path.exists('metadata'):
|
||||
logging.error("'metadata/' directory does not exist!")
|
||||
sys.exit(1)
|
||||
|
||||
versions = get_versions(options.url)
|
||||
scheduled = get_scheduled(versions)
|
||||
|
||||
if options.sendchange:
|
||||
sendchange(scheduled, options.verbose)
|
||||
else:
|
||||
print(json.dumps(scheduled))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
111
tests/test_schedule_verify.py
Executable file
111
tests/test_schedule_verify.py
Executable file
|
|
@ -0,0 +1,111 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import os
|
||||
import unittest
|
||||
|
||||
from pathlib import Path
|
||||
from unittest import mock, skipUnless
|
||||
|
||||
from fdroidserver import common, schedule_verify
|
||||
from .shared_test_code import mkdtemp
|
||||
|
||||
|
||||
basedir = Path(__file__).parent
|
||||
|
||||
FULL_LIST = [
|
||||
{'applicationId': 'org.maxsdkversion', 'versionCode': 4},
|
||||
{'applicationId': 'info.zwanenburg.caffeinetile', 'versionCode': 4},
|
||||
{'applicationId': 'no.min.target.sdk', 'versionCode': 987},
|
||||
{'applicationId': 'souch.smsbypass', 'versionCode': 9},
|
||||
{'applicationId': 'duplicate.permisssions', 'versionCode': 9999999},
|
||||
{'applicationId': 'com.politedroid', 'versionCode': 6},
|
||||
{'applicationId': 'com.politedroid', 'versionCode': 5},
|
||||
{'applicationId': 'com.politedroid', 'versionCode': 4},
|
||||
{'applicationId': 'com.politedroid', 'versionCode': 3},
|
||||
{'applicationId': 'obb.mainpatch.current', 'versionCode': 1619},
|
||||
{'applicationId': 'info.guardianproject.urzip', 'versionCode': 100},
|
||||
{'applicationId': 'obb.main.twoversions', 'versionCode': 1101617},
|
||||
{'applicationId': 'fake.ota.update', 'versionCode': 1234},
|
||||
{'applicationId': 'obb.main.twoversions', 'versionCode': 1101615},
|
||||
{'applicationId': 'obb.main.twoversions', 'versionCode': 1101613},
|
||||
{'applicationId': 'obb.main.oldversion', 'versionCode': 1444412523},
|
||||
]
|
||||
|
||||
|
||||
def _mock(repo): # pylint: disable=unused-argument
|
||||
indexf = basedir / 'repo' / 'index-v2.json'
|
||||
return json.loads(indexf.read_text()), None
|
||||
|
||||
|
||||
class Schedule_verifyTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self._td = mkdtemp()
|
||||
self.testdir = self._td.name
|
||||
os.chdir(self.testdir)
|
||||
os.mkdir('unsigned')
|
||||
|
||||
def tearDown(self):
|
||||
self._td.cleanup()
|
||||
common.config = None
|
||||
|
||||
|
||||
@skipUnless(False, 'This involves downloading the full index')
|
||||
class Schedule_verify_main(Schedule_verifyTest):
|
||||
def test_main_smokecheck(self):
|
||||
schedule_verify.main()
|
||||
|
||||
|
||||
class Schedule_verify_get_versions(Schedule_verifyTest):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
common.config = {'sdk_path': os.getenv('ANDROID_HOME')}
|
||||
common.config['jarsigner'] = common.find_sdk_tools_cmd('jarsigner')
|
||||
|
||||
@mock.patch('fdroidserver.index.download_repo_index_v2', _mock)
|
||||
def test_get_versions_none_exist(self):
|
||||
self.assertEqual(FULL_LIST, schedule_verify.get_versions())
|
||||
|
||||
@mock.patch('fdroidserver.index.download_repo_index_v2', _mock)
|
||||
def test_get_versions_all_json_exist(self):
|
||||
for d in FULL_LIST:
|
||||
appid = d['applicationId']
|
||||
if appid == 'fake.ota.update':
|
||||
ext = 'zip'
|
||||
else:
|
||||
ext = 'apk'
|
||||
Path(f"unsigned/{appid}_{d['versionCode']}.{ext}.json").write_text('{}')
|
||||
self.assertEqual([], schedule_verify.get_versions())
|
||||
|
||||
@mock.patch('fdroidserver.index.download_repo_index_v2', _mock)
|
||||
def test_get_versions_all_apks_exist(self):
|
||||
for d in FULL_LIST:
|
||||
appid = d['applicationId']
|
||||
if appid != 'fake.ota.update':
|
||||
Path(f"unsigned/{appid}_{d['versionCode']}.apk.json").write_text('{}')
|
||||
self.assertEqual(
|
||||
[{'applicationId': 'fake.ota.update', 'versionCode': 1234}],
|
||||
schedule_verify.get_versions(),
|
||||
)
|
||||
|
||||
|
||||
class Schedule_verify_get_scheduled(Schedule_verifyTest):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
os.chdir(basedir)
|
||||
common.config = {'sdk_path': os.getenv('ANDROID_HOME')}
|
||||
common.config['jarsigner'] = common.find_sdk_tools_cmd('jarsigner')
|
||||
|
||||
@mock.patch('fdroidserver.index.download_repo_index_v2', _mock)
|
||||
def test_get_scheduled_none_exist(self):
|
||||
versions = schedule_verify.get_versions(basedir / 'repo')
|
||||
self.assertEqual(
|
||||
[
|
||||
{'applicationId': 'souch.smsbypass', 'versionCode': 9},
|
||||
{'applicationId': 'com.politedroid', 'versionCode': 6},
|
||||
{'applicationId': 'com.politedroid', 'versionCode': 5},
|
||||
{'applicationId': 'com.politedroid', 'versionCode': 4},
|
||||
{'applicationId': 'com.politedroid', 'versionCode': 3},
|
||||
],
|
||||
schedule_verify.get_scheduled(versions),
|
||||
)
|
||||
|
|
@ -4,56 +4,62 @@ import hashlib
|
|||
import os
|
||||
import sys
|
||||
import unittest
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
from fdroidserver import common, signatures
|
||||
|
||||
from .shared_test_code import TmpCwd
|
||||
from .shared_test_code import mkdtemp
|
||||
|
||||
basedir = os.path.dirname(__file__)
|
||||
|
||||
|
||||
class SignaturesTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self._td = mkdtemp()
|
||||
self.testdir = self._td.name
|
||||
os.chdir(self.testdir)
|
||||
|
||||
common.config = None
|
||||
config = common.read_config()
|
||||
config['jarsigner'] = common.find_sdk_tools_cmd('jarsigner')
|
||||
common.config = config
|
||||
|
||||
def tearDown(self):
|
||||
self._td.cleanup()
|
||||
common.config = None
|
||||
|
||||
@unittest.skipIf(sys.byteorder == 'big', "androguard is not ported to big-endian")
|
||||
def test_main(self):
|
||||
class OptionsFixture:
|
||||
APK = [os.path.join(basedir, 'repo', 'com.politedroid_3.apk')]
|
||||
|
||||
with TemporaryDirectory() as tmpdir, TmpCwd(tmpdir):
|
||||
signatures.extract(OptionsFixture)
|
||||
signatures.extract(OptionsFixture)
|
||||
|
||||
# check if extracted signatures are where they are supposed to be
|
||||
# also verify weather if extracted file contain what they should
|
||||
filesAndHashes = (
|
||||
(
|
||||
os.path.join(
|
||||
'metadata', 'com.politedroid', 'signatures', '3', 'MANIFEST.MF'
|
||||
),
|
||||
'7dcd83f0c41a75457fd2311bf3c4578f80d684362d74ba8dc52838d353f31cf2',
|
||||
# check if extracted signatures are where they are supposed to be
|
||||
# also verify weather if extracted file contain what they should
|
||||
filesAndHashes = (
|
||||
(
|
||||
os.path.join(
|
||||
'metadata', 'com.politedroid', 'signatures', '3', 'MANIFEST.MF'
|
||||
),
|
||||
(
|
||||
os.path.join(
|
||||
'metadata', 'com.politedroid', 'signatures', '3', 'RELEASE.RSA'
|
||||
),
|
||||
'883ef3d5a6e0bf69d2a58d9e255a7930f08a49abc38e216ed054943c99c8fdb4',
|
||||
'7dcd83f0c41a75457fd2311bf3c4578f80d684362d74ba8dc52838d353f31cf2',
|
||||
),
|
||||
(
|
||||
os.path.join(
|
||||
'metadata', 'com.politedroid', 'signatures', '3', 'RELEASE.RSA'
|
||||
),
|
||||
(
|
||||
os.path.join(
|
||||
'metadata', 'com.politedroid', 'signatures', '3', 'RELEASE.SF'
|
||||
),
|
||||
'99fbb3211ef5d7c1253f3a7ad4836eadc9905103ce6a75916c40de2831958284',
|
||||
'883ef3d5a6e0bf69d2a58d9e255a7930f08a49abc38e216ed054943c99c8fdb4',
|
||||
),
|
||||
(
|
||||
os.path.join(
|
||||
'metadata', 'com.politedroid', 'signatures', '3', 'RELEASE.SF'
|
||||
),
|
||||
'99fbb3211ef5d7c1253f3a7ad4836eadc9905103ce6a75916c40de2831958284',
|
||||
),
|
||||
)
|
||||
for path, checksum in filesAndHashes:
|
||||
self.assertTrue(
|
||||
os.path.isfile(path),
|
||||
f'check whether {path!r} was extracted correctly.',
|
||||
)
|
||||
for path, checksum in filesAndHashes:
|
||||
self.assertTrue(
|
||||
os.path.isfile(path),
|
||||
f'check whether {path!r} was extracted correctly.',
|
||||
)
|
||||
with open(path, 'rb') as f:
|
||||
self.assertEqual(hashlib.sha256(f.read()).hexdigest(), checksum)
|
||||
with open(path, 'rb') as f:
|
||||
self.assertEqual(hashlib.sha256(f.read()).hexdigest(), checksum)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue