mirror of
https://github.com/f-droid/fdroidserver.git
synced 2025-11-04 22:40:29 +03:00
Merge branch 'refactor-into-single-yaml-post-process' into 'master'
refactor into a single function to do YAML post processing See merge request fdroid/fdroidserver!1346
This commit is contained in:
commit
2135a9e7fb
14 changed files with 485 additions and 152 deletions
|
|
@ -58,6 +58,7 @@ metadata_v0:
|
|||
- ../tests/dump_internal_metadata_format.py
|
||||
- sed -i
|
||||
-e "/buildozer/d"
|
||||
-e '/^comments\W /d'
|
||||
-e 's,maven\(\W\) false,maven\1 null,'
|
||||
metadata/dump_*/*.yaml
|
||||
- diff -uw metadata/dump_*
|
||||
|
|
@ -487,7 +488,7 @@ servergitmirrors:
|
|||
- diff repo/index-v1.jar index-v1.jar
|
||||
|
||||
Build documentation:
|
||||
image: debian:bullseye-backports
|
||||
image: debian:bookworm
|
||||
<<: *apt-template
|
||||
script:
|
||||
- apt-get install make python3-sphinx python3-numpydoc python3-pydata-sphinx-theme pydocstyle fdroidserver
|
||||
|
|
|
|||
|
|
@ -2,4 +2,6 @@
|
|||
|
||||
extends: default
|
||||
rules:
|
||||
document-start: disable
|
||||
line-length: disable
|
||||
truthy: disable
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
---
|
||||
|
||||
# You may want to alter these before running ./makebuildserver
|
||||
|
||||
|
|
|
|||
|
|
@ -323,7 +323,7 @@ def main():
|
|||
if git_modules.exists():
|
||||
build.submodules = True
|
||||
|
||||
metadata.post_metadata_parse(app)
|
||||
metadata.post_parse_yaml_metadata(app)
|
||||
|
||||
app['Builds'].append(build)
|
||||
|
||||
|
|
|
|||
|
|
@ -878,7 +878,7 @@ def make_v1(apps, packages, repodir, repodict, requestsdict, fdroid_signing_key_
|
|||
for k, v in sorted(appdict.items()):
|
||||
if not v:
|
||||
continue
|
||||
if k in ('Builds', 'comments', 'metadatapath',
|
||||
if k in ('Builds', 'metadatapath',
|
||||
'ArchivePolicy', 'AutoName', 'AutoUpdateMode', 'MaintainerNotes',
|
||||
'Provides', 'Repo', 'RepoType', 'RequiresRoot',
|
||||
'UpdateCheckData', 'UpdateCheckIgnore', 'UpdateCheckMode',
|
||||
|
|
|
|||
|
|
@ -159,7 +159,6 @@ class App(dict):
|
|||
self.id = None
|
||||
self.metadatapath = None
|
||||
self.Builds = []
|
||||
self.comments = {}
|
||||
self.added = None
|
||||
self.lastUpdated = None
|
||||
|
||||
|
|
@ -607,91 +606,53 @@ def read_metadata(appids={}, sort_by_time=False):
|
|||
return apps
|
||||
|
||||
|
||||
def sorted_builds(builds):
|
||||
return sorted(builds, key=lambda build: build.versionCode)
|
||||
|
||||
|
||||
def post_metadata_parse(app):
|
||||
for k, v in app.items():
|
||||
if fieldtype(k) == TYPE_LIST:
|
||||
if isinstance(v, str):
|
||||
app[k] = [v, ]
|
||||
elif v:
|
||||
app[k] = [str(i) for i in v]
|
||||
elif fieldtype(k) == TYPE_INT:
|
||||
if v:
|
||||
app[k] = int(v)
|
||||
elif fieldtype(k) == TYPE_STRING:
|
||||
if v:
|
||||
app[k] = str(v)
|
||||
else:
|
||||
if type(v) in (float, int):
|
||||
app[k] = str(v)
|
||||
|
||||
builds = []
|
||||
if 'Builds' in app:
|
||||
for build in app.get('Builds', []):
|
||||
if not isinstance(build, Build):
|
||||
build = Build(build)
|
||||
for k, v in build.items():
|
||||
if not (v is None):
|
||||
if flagtype(k) == TYPE_LIST:
|
||||
if isinstance(v, str):
|
||||
build[k] = [v]
|
||||
elif flagtype(k) is TYPE_INT:
|
||||
build[k] = v
|
||||
elif flagtype(k) is TYPE_STRING:
|
||||
build[k] = str(v)
|
||||
builds.append(build)
|
||||
|
||||
app['Builds'] = sorted_builds(builds)
|
||||
|
||||
|
||||
# Parse metadata for a single application.
|
||||
#
|
||||
# 'metadatapath' - the file path to read. The "Application ID" aka
|
||||
# "Package Name" for the application comes from this
|
||||
# filename. Pass None to get a blank entry.
|
||||
#
|
||||
# Returns a dictionary containing all the details of the application. There are
|
||||
# two major kinds of information in the dictionary. Keys beginning with capital
|
||||
# letters correspond directory to identically named keys in the metadata file.
|
||||
# Keys beginning with lower case letters are generated in one way or another,
|
||||
# and are not found verbatim in the metadata.
|
||||
#
|
||||
# Known keys not originating from the metadata are:
|
||||
#
|
||||
# 'comments' - a list of comments from the metadata file. Each is
|
||||
# a list of the form [field, comment] where field is
|
||||
# the name of the field it preceded in the metadata
|
||||
# file. Where field is None, the comment goes at the
|
||||
# end of the file. Alternatively, 'build:version' is
|
||||
# for a comment before a particular build version.
|
||||
# 'descriptionlines' - original lines of description as formatted in the
|
||||
# metadata file.
|
||||
#
|
||||
|
||||
|
||||
def parse_metadata(metadatapath):
|
||||
"""Parse metadata file, also checking the source repo for .fdroid.yml.
|
||||
|
||||
This function finds the relevant files, gets them parsed, converts
|
||||
dicts into App and Build instances, and combines the results into
|
||||
a single App instance.
|
||||
|
||||
If this is a metadata file from fdroiddata, it will first load the
|
||||
source repo type and URL from fdroiddata, then read .fdroid.yml if
|
||||
it exists, then include the rest of the metadata as specified in
|
||||
fdroiddata, so that fdroiddata has precedence over the metadata in
|
||||
the source code.
|
||||
|
||||
.fdroid.yml is embedded in the app's source repo, so it is
|
||||
"user-generated". That means that it can have weird things in it
|
||||
that need to be removed so they don't break the overall process,
|
||||
e.g. if the upstream developer includes some broken field, it can
|
||||
be overridden in the metadata file.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
metadatapath
|
||||
The file path to read. The "Application ID" aka "Package Name"
|
||||
for the application comes from this filename.
|
||||
|
||||
Raises
|
||||
------
|
||||
FDroidException when there are syntax errors.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Returns a dictionary containing all the details of the
|
||||
application. There are two major kinds of information in the
|
||||
dictionary. Keys beginning with capital letters correspond
|
||||
directory to identically named keys in the metadata file. Keys
|
||||
beginning with lower case letters are generated in one way or
|
||||
another, and are not found verbatim in the metadata.
|
||||
|
||||
"""
|
||||
metadatapath = Path(metadatapath)
|
||||
app = App()
|
||||
app.metadatapath = metadatapath.as_posix()
|
||||
name = metadatapath.stem
|
||||
if name != '.fdroid':
|
||||
app.id = name
|
||||
|
||||
if metadatapath.stem != '.fdroid':
|
||||
app.id = metadatapath.stem
|
||||
if metadatapath.suffix == '.yml':
|
||||
with metadatapath.open('r', encoding='utf-8') as mf:
|
||||
parse_yaml_metadata(mf, app)
|
||||
app.update(parse_yaml_metadata(mf))
|
||||
else:
|
||||
_warn_or_exception(_('Unknown metadata format: {path} (use: *.yml)')
|
||||
.format(path=metadatapath))
|
||||
|
|
@ -712,8 +673,13 @@ def parse_metadata(metadatapath):
|
|||
if k not in app:
|
||||
app[k] = v
|
||||
|
||||
post_metadata_parse(app)
|
||||
builds = []
|
||||
for build in app.get('Builds', []):
|
||||
builds.append(Build(build))
|
||||
if builds:
|
||||
app['Builds'] = builds
|
||||
|
||||
# if only .fdroid.yml was found, then this finds the appid
|
||||
if not app.id:
|
||||
if app.get('Builds'):
|
||||
build = app['Builds'][-1]
|
||||
|
|
@ -727,15 +693,18 @@ def parse_metadata(metadatapath):
|
|||
return app
|
||||
|
||||
|
||||
def parse_yaml_metadata(mf, app):
|
||||
def parse_yaml_metadata(mf):
|
||||
"""Parse the .yml file and post-process it.
|
||||
|
||||
This function handles parsing a metadata YAML file and converting
|
||||
all the various data types into a consistent internal
|
||||
representation. The results are meant to update an existing App
|
||||
instance or used as a plain dict.
|
||||
|
||||
Clean metadata .yml files can be used directly, but in order to
|
||||
make a better user experience for people editing .yml files, there
|
||||
is post processing. .fdroid.yml is embedded in the app's source
|
||||
repo, so it is "user-generated". That means that it can have
|
||||
weird things in it that need to be removed so they don't break the
|
||||
overall process.
|
||||
is post processing. That makes the parsing perform something like
|
||||
Strict YAML.
|
||||
|
||||
"""
|
||||
try:
|
||||
|
|
@ -747,13 +716,26 @@ def parse_yaml_metadata(mf, app):
|
|||
+ common.run_yamllint(mf.name, indent=4),
|
||||
cause=e)
|
||||
|
||||
if yamldata is None or yamldata == '':
|
||||
yamldata = dict()
|
||||
if not isinstance(yamldata, dict):
|
||||
_warn_or_exception(
|
||||
_("'{path}' has invalid format, it should be a dictionary!").format(
|
||||
path=mf.name
|
||||
)
|
||||
)
|
||||
logging.error(_('Using blank dictionary instead of contents of {path}!').format(
|
||||
path=mf.name)
|
||||
)
|
||||
yamldata = dict()
|
||||
|
||||
deprecated_in_yaml = ['Provides']
|
||||
|
||||
if yamldata:
|
||||
for field in tuple(yamldata.keys()):
|
||||
if field not in yaml_app_fields + deprecated_in_yaml:
|
||||
msg = (_("Unrecognised app field '{fieldname}' in '{path}'")
|
||||
.format(fieldname=field, path=mf.name))
|
||||
msg = _("Unrecognised app field '{fieldname}' in '{path}'").format(
|
||||
fieldname=field, path=mf.name
|
||||
)
|
||||
if Path(mf.name).name == '.fdroid.yml':
|
||||
logging.error(msg)
|
||||
del yamldata[field]
|
||||
|
|
@ -762,48 +744,101 @@ def parse_yaml_metadata(mf, app):
|
|||
|
||||
for deprecated_field in deprecated_in_yaml:
|
||||
if deprecated_field in yamldata:
|
||||
logging.warning(_("Ignoring '{field}' in '{metapath}' "
|
||||
"metadata because it is deprecated.")
|
||||
.format(field=deprecated_field,
|
||||
metapath=mf.name))
|
||||
del yamldata[deprecated_field]
|
||||
logging.warning(
|
||||
_(
|
||||
"Ignoring '{field}' in '{metapath}' "
|
||||
"metadata because it is deprecated."
|
||||
).format(field=deprecated_field, metapath=mf.name)
|
||||
)
|
||||
|
||||
if yamldata.get('Builds', None):
|
||||
msg = _("Unrecognised build flag '{build_flag}' in '{path}'")
|
||||
for build in yamldata.get('Builds', []):
|
||||
# put all build flag keywords into a set to avoid
|
||||
# excessive looping action
|
||||
build_flag_set = set()
|
||||
for build_flag in build.keys():
|
||||
build_flag_set.add(build_flag)
|
||||
for build_flag in build_flag_set:
|
||||
for build_flag in build:
|
||||
if build_flag not in build_flags:
|
||||
_warn_or_exception(
|
||||
_("Unrecognised build flag '{build_flag}' "
|
||||
"in '{path}'").format(build_flag=build_flag,
|
||||
path=mf.name))
|
||||
_warn_or_exception(msg.format(build_flag=build_flag, path=mf.name))
|
||||
|
||||
post_parse_yaml_metadata(yamldata)
|
||||
app.update(yamldata)
|
||||
return app
|
||||
return yamldata
|
||||
|
||||
|
||||
def _normalize_type_string(v):
|
||||
"""Normalize any data to TYPE_STRING.
|
||||
|
||||
YAML 1.2's booleans are all lowercase.
|
||||
|
||||
Things like versionName are strings, but without quotes can be
|
||||
numbers. Like "versionName: 1.0" would be a YAML float, but
|
||||
should be a string.
|
||||
|
||||
"""
|
||||
if isinstance(v, bool):
|
||||
if v:
|
||||
return 'true'
|
||||
return 'false'
|
||||
return str(v)
|
||||
|
||||
|
||||
def post_parse_yaml_metadata(yamldata):
|
||||
"""Transform yaml metadata to our internal data format."""
|
||||
for build in yamldata.get('Builds', []):
|
||||
for flag in build.keys():
|
||||
_flagtype = flagtype(flag)
|
||||
"""Convert human-readable metadata data structures into consistent data structures.
|
||||
|
||||
if _flagtype is TYPE_SCRIPT:
|
||||
if isinstance(build[flag], str):
|
||||
build[flag] = [build[flag]]
|
||||
elif _flagtype is TYPE_STRING:
|
||||
# things like versionNames are strings, but without quotes can be numbers
|
||||
if isinstance(build[flag], float) or isinstance(build[flag], int):
|
||||
build[flag] = str(build[flag])
|
||||
This also handles conversions that make metadata YAML behave
|
||||
something like StrictYAML. Specifically, a field should have a
|
||||
fixed value type, regardless of YAML 1.2's type auto-detection.
|
||||
|
||||
"""
|
||||
for k, v in yamldata.items():
|
||||
if fieldtype(k) == TYPE_LIST:
|
||||
if isinstance(v, str):
|
||||
yamldata[k] = [v, ]
|
||||
elif v:
|
||||
yamldata[k] = [str(i) for i in v]
|
||||
elif fieldtype(k) == TYPE_INT:
|
||||
if v:
|
||||
yamldata[k] = int(v)
|
||||
elif fieldtype(k) == TYPE_STRING:
|
||||
if v or v == 0:
|
||||
yamldata[k] = _normalize_type_string(v)
|
||||
else:
|
||||
if type(v) in (float, int):
|
||||
yamldata[k] = str(v)
|
||||
|
||||
builds = []
|
||||
for build in yamldata.get('Builds', []):
|
||||
for k, v in build.items():
|
||||
if v is None:
|
||||
continue
|
||||
|
||||
_flagtype = flagtype(k)
|
||||
if _flagtype is TYPE_STRING:
|
||||
if v or v == 0:
|
||||
build[k] = _normalize_type_string(v)
|
||||
elif _flagtype is TYPE_INT:
|
||||
build[k] = v
|
||||
# versionCode must be int
|
||||
if not isinstance(build[flag], int):
|
||||
_warn_or_exception(_('{build_flag} must be an integer, found: {value}')
|
||||
.format(build_flag=flag, value=build[flag]))
|
||||
if not isinstance(v, int):
|
||||
_warn_or_exception(
|
||||
_('{build_flag} must be an integer, found: {value}').format(
|
||||
build_flag=k, value=v
|
||||
)
|
||||
)
|
||||
elif _flagtype in (TYPE_LIST, TYPE_SCRIPT):
|
||||
if isinstance(v, str) or isinstance(v, int):
|
||||
build[k] = [_normalize_type_string(v)]
|
||||
else:
|
||||
build[k] = v
|
||||
# float and dict are here only to keep things compatible
|
||||
if type(build[k]) not in (list, tuple, set, float, dict):
|
||||
_warn_or_exception(
|
||||
_('{build_flag} must be list or string, found: {value}').format(
|
||||
build_flag=k, value=v
|
||||
)
|
||||
)
|
||||
|
||||
builds.append(build)
|
||||
|
||||
if builds:
|
||||
yamldata['Builds'] = sorted(builds, key=lambda build: build['versionCode'])
|
||||
|
||||
|
||||
def write_yaml(mf, app):
|
||||
|
|
@ -832,6 +867,7 @@ def write_yaml(mf, app):
|
|||
# suiteable version ruamel.yaml imported successfully
|
||||
|
||||
def _field_to_yaml(typ, value):
|
||||
"""Convert data to YAML 1.2 format that keeps the right TYPE_*."""
|
||||
if typ is TYPE_STRING:
|
||||
return str(value)
|
||||
elif typ is TYPE_INT:
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ if [ -z "$files" ]; then
|
|||
SH_FILES="hooks/pre-commit"
|
||||
BASH_FILES="gradlew-fdroid jenkins-build-all jenkins-setup-build-environment jenkins-test completion/bash-completion buildserver/provision-*"
|
||||
RB_FILES="buildserver/Vagrantfile"
|
||||
YML_FILES="buildserver/*.yml examples/*.yml"
|
||||
YML_FILES=".*.yml .yamllint */*.yml */*.yaml"
|
||||
else
|
||||
# if actually committing right now, then only run on the files
|
||||
# that are going to be committed at this moment
|
||||
|
|
@ -36,7 +36,7 @@ else
|
|||
*.rb)
|
||||
RB_FILES+=" $f"
|
||||
;;
|
||||
*.yml)
|
||||
*.yml|.*.yml|.yamllint)
|
||||
YML_FILES+=" $f"
|
||||
;;
|
||||
*)
|
||||
|
|
@ -94,6 +94,7 @@ find_command() {
|
|||
}
|
||||
|
||||
DASH=$(find_command dash)
|
||||
PYDOCSTYLE=$(find_command pydocstyle)
|
||||
PYFLAKES=$(find_command pyflakes)
|
||||
PEP8=$(find_command pycodestyle pep8)
|
||||
RUBY=$(find_command ruby)
|
||||
|
|
@ -103,6 +104,9 @@ if [ "$PY_FILES $PY_TEST_FILES" != " " ]; then
|
|||
if ! $PYFLAKES $PY_FILES $PY_TEST_FILES; then
|
||||
err "pyflakes tests failed!"
|
||||
fi
|
||||
if ! $PYDOCSTYLE $PY_FILES $PY_TEST_FILES; then
|
||||
err "pydocstyle tests failed!"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$PY_FILES" != "" ]; then
|
||||
|
|
@ -139,7 +143,7 @@ for f in $RB_FILES; do
|
|||
done
|
||||
|
||||
for f in $YML_FILES; do
|
||||
if ! $YAMLLINT $f 1>/dev/null; then
|
||||
if ! $YAMLLINT $f; then
|
||||
err ".yml tests failed on $f!"
|
||||
fi
|
||||
done
|
||||
|
|
|
|||
6
setup.py
6
setup.py
|
|
@ -10,15 +10,15 @@ from setuptools.command.install import install
|
|||
|
||||
|
||||
class VersionCheckCommand(Command):
|
||||
"""Make sure git tag and version match before uploading"""
|
||||
"""Make sure git tag and version match before uploading."""
|
||||
|
||||
user_options = []
|
||||
|
||||
def initialize_options(self):
|
||||
"""Abstract method that is required to be overwritten"""
|
||||
"""Abstract method that is required to be overwritten."""
|
||||
|
||||
def finalize_options(self):
|
||||
"""Abstract method that is required to be overwritten"""
|
||||
"""Abstract method that is required to be overwritten."""
|
||||
|
||||
def run(self):
|
||||
version = self.distribution.get_version()
|
||||
|
|
|
|||
|
|
@ -2,17 +2,20 @@
|
|||
|
||||
# http://www.drdobbs.com/testing/unit-testing-with-python/240165163
|
||||
|
||||
import git
|
||||
import logging
|
||||
import optparse
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
import yaml
|
||||
from unittest import mock
|
||||
from pathlib import Path
|
||||
|
||||
import requests
|
||||
from testcommon import TmpCwd
|
||||
from testcommon import TmpCwd, mkdtemp
|
||||
|
||||
localmodule = Path(__file__).resolve().parent.parent
|
||||
print('localmodule: ' + str(localmodule))
|
||||
|
|
@ -22,6 +25,7 @@ if localmodule not in sys.path:
|
|||
import fdroidserver.common
|
||||
import fdroidserver.import_subcommand
|
||||
import fdroidserver.metadata
|
||||
from fdroidserver.exception import FDroidException
|
||||
|
||||
|
||||
class ImportTest(unittest.TestCase):
|
||||
|
|
@ -32,6 +36,13 @@ class ImportTest(unittest.TestCase):
|
|||
self.basedir = localmodule / 'tests'
|
||||
fdroidserver.import_subcommand.options = mock.Mock()
|
||||
fdroidserver.import_subcommand.options.rev = None
|
||||
os.chdir(self.basedir)
|
||||
self._td = mkdtemp()
|
||||
self.testdir = self._td.name
|
||||
|
||||
def tearDown(self):
|
||||
os.chdir(self.basedir)
|
||||
self._td.cleanup()
|
||||
|
||||
def test_import_gitlab(self):
|
||||
with tempfile.TemporaryDirectory() as testdir, TmpCwd(testdir):
|
||||
|
|
@ -122,6 +133,32 @@ class ImportTest(unittest.TestCase):
|
|||
with self.assertRaises(ValueError):
|
||||
fdroidserver.import_subcommand.get_app_from_url(url)
|
||||
|
||||
@mock.patch('sys.argv', ['fdroid import', '-u', 'https://example.com/mystery/url'])
|
||||
@mock.patch('fdroidserver.import_subcommand.clone_to_tmp_dir', lambda a: None)
|
||||
def test_unrecognized_url(self):
|
||||
"""Test whether error is thrown when the RepoType was not found.
|
||||
|
||||
clone_to_tmp_dir is mocked out to prevent this test from using
|
||||
the network, if it gets past the code that throws the error.
|
||||
|
||||
"""
|
||||
with self.assertRaises(FDroidException):
|
||||
fdroidserver.import_subcommand.main()
|
||||
|
||||
@mock.patch('sys.argv', ['fdroid import', '-u', 'https://fake/git/url.git'])
|
||||
@mock.patch('fdroidserver.import_subcommand.clone_to_tmp_dir', lambda a: Path('td'))
|
||||
def test_main_local_git(self):
|
||||
os.chdir(self.testdir)
|
||||
git.Repo.init('td')
|
||||
with Path('td/build.gradle').open('w') as fp:
|
||||
fp.write('android { defaultConfig { applicationId "com.example" } }')
|
||||
fdroidserver.import_subcommand.main()
|
||||
with open('metadata/com.example.yml') as fp:
|
||||
data = yaml.safe_load(fp)
|
||||
self.assertEqual(data['Repo'], sys.argv[2])
|
||||
self.assertEqual(data['RepoType'], 'git')
|
||||
self.assertEqual(1, len(data['Builds']))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = optparse.OptionParser()
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import copy
|
||||
import io
|
||||
import logging
|
||||
import optparse
|
||||
|
|
@ -26,9 +27,8 @@ print('localmodule: ' + str(localmodule))
|
|||
if localmodule not in sys.path:
|
||||
sys.path.insert(0, str(localmodule))
|
||||
|
||||
|
||||
import fdroidserver.common
|
||||
import fdroidserver.metadata
|
||||
import fdroidserver
|
||||
from fdroidserver import metadata
|
||||
from fdroidserver.exception import MetaDataException
|
||||
|
||||
|
||||
|
|
@ -288,6 +288,18 @@ class MetadataTest(unittest.TestCase):
|
|||
(Path('metadata-rewrite-yml') / file_name).read_text(encoding='utf-8'),
|
||||
)
|
||||
|
||||
def test_normalize_type_string(self):
|
||||
"""TYPE_STRING currently has some quirky behavior."""
|
||||
self.assertEqual('123456', metadata._normalize_type_string(123456))
|
||||
self.assertEqual('1.0', metadata._normalize_type_string(1.0))
|
||||
self.assertEqual('0', metadata._normalize_type_string(0))
|
||||
self.assertEqual('0.0', metadata._normalize_type_string(0.0))
|
||||
self.assertEqual('0.1', metadata._normalize_type_string(0.1))
|
||||
self.assertEqual('[]', metadata._normalize_type_string(list()))
|
||||
self.assertEqual('{}', metadata._normalize_type_string(dict()))
|
||||
self.assertEqual('false', metadata._normalize_type_string(False))
|
||||
self.assertEqual('true', metadata._normalize_type_string(True))
|
||||
|
||||
def test_post_parse_yaml_metadata(self):
|
||||
fdroidserver.metadata.warnings_action = 'error'
|
||||
yamldata = OrderedDict()
|
||||
|
|
@ -362,6 +374,21 @@ class MetadataTest(unittest.TestCase):
|
|||
allappids.append(appid)
|
||||
self.assertEqual(randomlist, allappids)
|
||||
|
||||
def test_parse_yaml_metadata_0size_file(self):
|
||||
mf = io.StringIO('')
|
||||
mf.name = 'mock_filename.yaml'
|
||||
self.assertEqual(fdroidserver.metadata.parse_yaml_metadata(mf), dict())
|
||||
|
||||
def test_parse_yaml_metadata_empty_dict_file(self):
|
||||
mf = io.StringIO('{}')
|
||||
mf.name = 'mock_filename.yaml'
|
||||
self.assertEqual(fdroidserver.metadata.parse_yaml_metadata(mf), dict())
|
||||
|
||||
def test_parse_yaml_metadata_empty_string_file(self):
|
||||
mf = io.StringIO('""')
|
||||
mf.name = 'mock_filename.yaml'
|
||||
self.assertEqual(fdroidserver.metadata.parse_yaml_metadata(mf), dict())
|
||||
|
||||
def test_parse_yaml_metadata_unknown_app_field(self):
|
||||
mf = io.StringIO(
|
||||
textwrap.dedent(
|
||||
|
|
@ -375,7 +402,7 @@ class MetadataTest(unittest.TestCase):
|
|||
mf.name = 'mock_filename.yaml'
|
||||
with mock.patch('fdroidserver.metadata.warnings_action', 'error'):
|
||||
with self.assertRaises(MetaDataException):
|
||||
fdroidserver.metadata.parse_yaml_metadata(mf, {})
|
||||
fdroidserver.metadata.parse_yaml_metadata(mf)
|
||||
|
||||
def test_parse_yaml_metadata_unknown_build_flag(self):
|
||||
mf = io.StringIO(
|
||||
|
|
@ -390,7 +417,23 @@ class MetadataTest(unittest.TestCase):
|
|||
mf.name = 'mock_filename.yaml'
|
||||
with mock.patch('fdroidserver.metadata.warnings_action', 'error'):
|
||||
with self.assertRaises(MetaDataException):
|
||||
fdroidserver.metadata.parse_yaml_metadata(mf, {})
|
||||
fdroidserver.metadata.parse_yaml_metadata(mf)
|
||||
|
||||
def test_parse_yaml_metadata_continue_on_warning(self):
|
||||
"""When errors are disabled, parsing should provide something that can work.
|
||||
|
||||
When errors are disabled, then it should try to give data that
|
||||
lets something happen. A zero-length file is valid for
|
||||
operation, it just declares a Application ID as "known" and
|
||||
nothing else. This example gives a list as the base in the
|
||||
.yml file, which is unparsable, so it gives a warning message
|
||||
and carries on with a blank dict.
|
||||
|
||||
"""
|
||||
fdroidserver.metadata.warnings_action = None
|
||||
mf = io.StringIO('[AntiFeatures: Tracking]')
|
||||
mf.name = 'mock_filename.yaml'
|
||||
self.assertEqual(fdroidserver.metadata.parse_yaml_metadata(mf), dict())
|
||||
|
||||
def test_parse_yaml_srclib_corrupt_file(self):
|
||||
with tempfile.TemporaryDirectory() as testdir:
|
||||
|
|
@ -487,9 +530,8 @@ class MetadataTest(unittest.TestCase):
|
|||
)
|
||||
mf.name = 'mock_filename.yaml'
|
||||
mf.seek(0)
|
||||
result = {}
|
||||
with mock.patch('fdroidserver.metadata.warnings_action', 'error'):
|
||||
fdroidserver.metadata.parse_yaml_metadata(mf, result)
|
||||
result = fdroidserver.metadata.parse_yaml_metadata(mf)
|
||||
self.maxDiff = None
|
||||
self.assertDictEqual(
|
||||
result,
|
||||
|
|
@ -543,9 +585,8 @@ class MetadataTest(unittest.TestCase):
|
|||
)
|
||||
mf.name = 'mock_filename.yaml'
|
||||
mf.seek(0)
|
||||
result = {}
|
||||
with mock.patch('fdroidserver.metadata.warnings_action', 'error'):
|
||||
fdroidserver.metadata.parse_yaml_metadata(mf, result)
|
||||
result = fdroidserver.metadata.parse_yaml_metadata(mf)
|
||||
self.maxDiff = None
|
||||
self.assertDictEqual(
|
||||
result,
|
||||
|
|
@ -594,9 +635,8 @@ class MetadataTest(unittest.TestCase):
|
|||
)
|
||||
mf.name = 'mock_filename.yaml'
|
||||
mf.seek(0)
|
||||
result = {}
|
||||
with mock.patch('fdroidserver.metadata.warnings_action', 'error'):
|
||||
fdroidserver.metadata.parse_yaml_metadata(mf, result)
|
||||
result = fdroidserver.metadata.parse_yaml_metadata(mf)
|
||||
self.assertDictEqual(
|
||||
result,
|
||||
{
|
||||
|
|
@ -629,9 +669,8 @@ class MetadataTest(unittest.TestCase):
|
|||
)
|
||||
mf.name = 'mock_filename.yaml'
|
||||
mf.seek(0)
|
||||
result = {}
|
||||
with mock.patch('fdroidserver.metadata.warnings_action', 'error'):
|
||||
fdroidserver.metadata.parse_yaml_metadata(mf, result)
|
||||
result = fdroidserver.metadata.parse_yaml_metadata(mf)
|
||||
self.assertNotIn('Provides', result)
|
||||
self.assertNotIn('provides', result)
|
||||
|
||||
|
|
@ -1151,6 +1190,223 @@ class MetadataTest(unittest.TestCase):
|
|||
build.ndk_path()
|
||||
|
||||
|
||||
class PostMetadataParseTest(unittest.TestCase):
|
||||
"""Test the functions that post process the YAML input.
|
||||
|
||||
The following series of "post_metadata_parse" tests map out the
|
||||
current state of automatic type conversion in the YAML post
|
||||
processing. They are not necessary a statement of how things
|
||||
should be, but more to surface the details of it functions.
|
||||
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
fdroidserver.metadata.warnings_action = 'error'
|
||||
|
||||
def _post_metadata_parse_app_list(self, from_yaml, expected):
|
||||
app = {'AntiFeatures': from_yaml}
|
||||
metadata.post_parse_yaml_metadata(app)
|
||||
return {'AntiFeatures': expected}, app
|
||||
|
||||
def _post_metadata_parse_app_string(self, from_yaml, expected):
|
||||
app = {'Repo': from_yaml}
|
||||
metadata.post_parse_yaml_metadata(app)
|
||||
return {'Repo': expected}, app
|
||||
|
||||
def _post_metadata_parse_build_bool(self, from_yaml, expected):
|
||||
tested_key = 'submodules'
|
||||
app = {'Builds': [{'versionCode': 1, tested_key: from_yaml}]}
|
||||
post = copy.deepcopy(app)
|
||||
metadata.post_parse_yaml_metadata(post)
|
||||
del app['Builds'][0]['versionCode']
|
||||
del post['Builds'][0]['versionCode']
|
||||
for build in post['Builds']:
|
||||
for k in list(build):
|
||||
if k != tested_key:
|
||||
del build[k]
|
||||
app['Builds'][0][tested_key] = expected
|
||||
return app, post
|
||||
|
||||
def _post_metadata_parse_build_int(self, from_yaml, expected):
|
||||
tested_key = 'versionCode'
|
||||
app = {'Builds': [{'versionCode': from_yaml}]}
|
||||
post = copy.deepcopy(app)
|
||||
metadata.post_parse_yaml_metadata(post)
|
||||
for build in post['Builds']:
|
||||
for k in list(build):
|
||||
if k != tested_key:
|
||||
del build[k]
|
||||
app['Builds'][0][tested_key] = expected
|
||||
return app, post
|
||||
|
||||
def _post_metadata_parse_build_list(self, from_yaml, expected):
|
||||
tested_key = 'rm'
|
||||
app = {'Builds': [{'versionCode': 1, tested_key: from_yaml}]}
|
||||
post = copy.deepcopy(app)
|
||||
metadata.post_parse_yaml_metadata(post)
|
||||
del app['Builds'][0]['versionCode']
|
||||
del post['Builds'][0]['versionCode']
|
||||
for build in post['Builds']:
|
||||
for k in list(build):
|
||||
if k != tested_key:
|
||||
del build[k]
|
||||
app['Builds'][0][tested_key] = expected
|
||||
return app, post
|
||||
|
||||
def _post_metadata_parse_build_script(self, from_yaml, expected):
|
||||
tested_key = 'build'
|
||||
app = {'Builds': [{'versionCode': 1, tested_key: from_yaml}]}
|
||||
post = copy.deepcopy(app)
|
||||
metadata.post_parse_yaml_metadata(post)
|
||||
del app['Builds'][0]['versionCode']
|
||||
del post['Builds'][0]['versionCode']
|
||||
for build in post['Builds']:
|
||||
for k in list(build):
|
||||
if k != tested_key:
|
||||
del build[k]
|
||||
app['Builds'][0][tested_key] = expected
|
||||
return app, post
|
||||
|
||||
def _post_metadata_parse_build_string(self, from_yaml, expected):
|
||||
tested_key = 'commit'
|
||||
app = {'Builds': [{'versionCode': 1, tested_key: from_yaml}]}
|
||||
post = copy.deepcopy(app)
|
||||
metadata.post_parse_yaml_metadata(post)
|
||||
del app['Builds'][0]['versionCode']
|
||||
del post['Builds'][0]['versionCode']
|
||||
for build in post['Builds']:
|
||||
for k in list(build):
|
||||
if k != tested_key:
|
||||
del build[k]
|
||||
app['Builds'][0][tested_key] = expected
|
||||
return app, post
|
||||
|
||||
def test_post_metadata_parse_int(self):
|
||||
"""Run the int 123456 through the various field and flag types."""
|
||||
with self.assertRaises(TypeError):
|
||||
self._post_metadata_parse_app_list(123456, TypeError)
|
||||
self.assertEqual(*self._post_metadata_parse_app_string(123456, '123456'))
|
||||
self.assertEqual(*self._post_metadata_parse_build_bool(123456, 123456))
|
||||
self.assertEqual(*self._post_metadata_parse_build_int(123456, 123456))
|
||||
self.assertEqual(*self._post_metadata_parse_build_list(123456, ['123456']))
|
||||
self.assertEqual(*self._post_metadata_parse_build_script(123456, ['123456']))
|
||||
self.assertEqual(*self._post_metadata_parse_build_string(123456, '123456'))
|
||||
|
||||
def test_post_metadata_parse_int_0(self):
|
||||
"""Run the int 0 through the various field and flag types."""
|
||||
self.assertEqual(*self._post_metadata_parse_app_list(0, 0))
|
||||
self.assertEqual(*self._post_metadata_parse_app_string(0, '0'))
|
||||
self.assertEqual(*self._post_metadata_parse_build_bool(0, 0))
|
||||
self.assertEqual(*self._post_metadata_parse_build_int(0, 0))
|
||||
self.assertEqual(*self._post_metadata_parse_build_list(0, ['0']))
|
||||
self.assertEqual(*self._post_metadata_parse_build_script(0, ['0']))
|
||||
self.assertEqual(*self._post_metadata_parse_build_string(0, '0'))
|
||||
|
||||
def test_post_metadata_parse_float_0_0(self):
|
||||
"""Run the float 0.0 through the various field and flag types."""
|
||||
self.assertEqual(*self._post_metadata_parse_app_list(0.0, 0.0))
|
||||
self.assertEqual(*self._post_metadata_parse_app_string(0.0, '0.0'))
|
||||
self.assertEqual(*self._post_metadata_parse_build_bool(0.0, 0.0))
|
||||
with self.assertRaises(MetaDataException):
|
||||
self._post_metadata_parse_build_int(0.0, MetaDataException)
|
||||
self.assertEqual(*self._post_metadata_parse_build_list(0.0, 0.0))
|
||||
self.assertEqual(*self._post_metadata_parse_build_script(0.0, 0.0))
|
||||
self.assertEqual(*self._post_metadata_parse_build_string(0.0, '0.0'))
|
||||
|
||||
def test_post_metadata_parse_float_0_1(self):
|
||||
"""Run the float 0.1 through the various field and flag types."""
|
||||
with self.assertRaises(TypeError):
|
||||
self._post_metadata_parse_app_list(0.1, TypeError)
|
||||
self.assertEqual(*self._post_metadata_parse_app_string(0.1, '0.1'))
|
||||
self.assertEqual(*self._post_metadata_parse_build_bool(0.1, 0.1))
|
||||
with self.assertRaises(MetaDataException):
|
||||
self._post_metadata_parse_build_int(0.1, MetaDataException)
|
||||
self.assertEqual(*self._post_metadata_parse_build_list(0.1, 0.1))
|
||||
self.assertEqual(*self._post_metadata_parse_build_script(0.1, 0.1))
|
||||
self.assertEqual(*self._post_metadata_parse_build_string(0.1, '0.1'))
|
||||
|
||||
def test_post_metadata_parse_float_1_0(self):
|
||||
"""Run the float 1.0 through the various field and flag types."""
|
||||
with self.assertRaises(TypeError):
|
||||
self._post_metadata_parse_app_list(1.0, TypeError)
|
||||
self.assertEqual(*self._post_metadata_parse_app_string(1.0, '1.0'))
|
||||
self.assertEqual(*self._post_metadata_parse_build_bool(1.0, 1.0))
|
||||
with self.assertRaises(MetaDataException):
|
||||
self._post_metadata_parse_build_int(1.0, MetaDataException)
|
||||
self.assertEqual(*self._post_metadata_parse_build_list(1.0, 1.0))
|
||||
self.assertEqual(*self._post_metadata_parse_build_script(1.0, 1.0))
|
||||
self.assertEqual(*self._post_metadata_parse_build_string(1.0, '1.0'))
|
||||
|
||||
def test_post_metadata_parse_empty_list(self):
|
||||
self.assertEqual(*self._post_metadata_parse_app_list(list(), list()))
|
||||
self.assertEqual(*self._post_metadata_parse_app_string(list(), list()))
|
||||
self.assertEqual(*self._post_metadata_parse_build_bool(list(), list()))
|
||||
with self.assertRaises(MetaDataException):
|
||||
self._post_metadata_parse_build_int(list(), MetaDataException)
|
||||
self.assertEqual(*self._post_metadata_parse_build_list(list(), list()))
|
||||
self.assertEqual(*self._post_metadata_parse_build_script(list(), list()))
|
||||
self.assertEqual(*self._post_metadata_parse_build_string(list(), list()))
|
||||
|
||||
def test_post_metadata_parse_set_of_1(self):
|
||||
self.assertEqual(*self._post_metadata_parse_app_list({1}, ['1']))
|
||||
self.assertEqual(*self._post_metadata_parse_app_string({1}, '{1}'))
|
||||
self.assertEqual(*self._post_metadata_parse_build_bool({1}, {1}))
|
||||
with self.assertRaises(MetaDataException):
|
||||
self._post_metadata_parse_build_int({1}, MetaDataException)
|
||||
self.assertEqual(*self._post_metadata_parse_build_list({1}, {1}))
|
||||
self.assertEqual(*self._post_metadata_parse_build_script({1}, {1}))
|
||||
self.assertEqual(*self._post_metadata_parse_build_string({1}, '{1}'))
|
||||
|
||||
def test_post_metadata_parse_empty_dict(self):
|
||||
self.assertEqual(*self._post_metadata_parse_app_list(dict(), dict()))
|
||||
self.assertEqual(*self._post_metadata_parse_app_string(dict(), dict()))
|
||||
self.assertEqual(*self._post_metadata_parse_build_bool(dict(), dict()))
|
||||
with self.assertRaises(MetaDataException):
|
||||
self._post_metadata_parse_build_int(dict(), MetaDataException)
|
||||
self.assertEqual(*self._post_metadata_parse_build_list(dict(), dict()))
|
||||
self.assertEqual(*self._post_metadata_parse_build_script(dict(), dict()))
|
||||
self.assertEqual(*self._post_metadata_parse_build_string(dict(), dict()))
|
||||
|
||||
def test_post_metadata_parse_list_int_string(self):
|
||||
self.assertEqual(*self._post_metadata_parse_app_list([1, 'a'], ['1', 'a']))
|
||||
self.assertEqual(*self._post_metadata_parse_app_string([1, 'a'], "[1, 'a']"))
|
||||
self.assertEqual(*self._post_metadata_parse_build_bool([1, 'a'], [1, 'a']))
|
||||
with self.assertRaises(MetaDataException):
|
||||
self._post_metadata_parse_build_int([1, 'a'], MetaDataException)
|
||||
self.assertEqual(*self._post_metadata_parse_build_list([1, 'a'], [1, 'a']))
|
||||
self.assertEqual(*self._post_metadata_parse_build_script([1, 'a'], [1, 'a']))
|
||||
self.assertEqual(*self._post_metadata_parse_build_string([1, 'a'], "[1, 'a']"))
|
||||
|
||||
def test_post_metadata_parse_dict_int_string(self):
|
||||
self.assertEqual(*self._post_metadata_parse_app_list({'k': 1}, ['k']))
|
||||
self.assertEqual(*self._post_metadata_parse_app_string({'k': 1}, "{'k': 1}"))
|
||||
self.assertEqual(*self._post_metadata_parse_build_bool({'k': 1}, {'k': 1}))
|
||||
with self.assertRaises(MetaDataException):
|
||||
self._post_metadata_parse_build_int({'k': 1}, MetaDataException)
|
||||
self.assertEqual(*self._post_metadata_parse_build_list({'k': 1}, {'k': 1}))
|
||||
self.assertEqual(*self._post_metadata_parse_build_script({'k': 1}, {'k': 1}))
|
||||
self.assertEqual(*self._post_metadata_parse_build_string({'k': 1}, "{'k': 1}"))
|
||||
|
||||
def test_post_metadata_parse_false(self):
|
||||
self.assertEqual(*self._post_metadata_parse_app_list(False, False))
|
||||
self.assertEqual(*self._post_metadata_parse_app_string(False, 'false'))
|
||||
self.assertEqual(*self._post_metadata_parse_build_bool(False, False))
|
||||
self.assertEqual(*self._post_metadata_parse_build_int(False, False))
|
||||
self.assertEqual(*self._post_metadata_parse_build_list(False, ['false']))
|
||||
self.assertEqual(*self._post_metadata_parse_build_script(False, ['false']))
|
||||
self.assertEqual(*self._post_metadata_parse_build_string(False, 'false'))
|
||||
|
||||
def test_post_metadata_parse_true(self):
|
||||
with self.assertRaises(TypeError):
|
||||
self._post_metadata_parse_app_list(True, TypeError)
|
||||
self.assertEqual(*self._post_metadata_parse_app_string(True, 'true'))
|
||||
self.assertEqual(*self._post_metadata_parse_build_bool(True, True))
|
||||
self.assertEqual(*self._post_metadata_parse_build_int(True, True))
|
||||
self.assertEqual(*self._post_metadata_parse_build_list(True, ['true']))
|
||||
self.assertEqual(*self._post_metadata_parse_build_script(True, ['true']))
|
||||
self.assertEqual(*self._post_metadata_parse_build_string(True, 'true'))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = optparse.OptionParser()
|
||||
parser.add_option(
|
||||
|
|
@ -1164,4 +1420,4 @@ if __name__ == "__main__":
|
|||
|
||||
newSuite = unittest.TestSuite()
|
||||
newSuite.addTest(unittest.makeSuite(MetadataTest))
|
||||
unittest.main(failfast=True)
|
||||
unittest.main(failfast=False)
|
||||
|
|
|
|||
|
|
@ -185,7 +185,6 @@ UpdateCheckName: null
|
|||
VercodeOperation: []
|
||||
WebSite: ''
|
||||
added: null
|
||||
comments: {}
|
||||
id: com.politedroid
|
||||
lastUpdated: null
|
||||
metadatapath: metadata/com.politedroid.yml
|
||||
|
|
|
|||
|
|
@ -1136,7 +1136,6 @@ UpdateCheckName: null
|
|||
VercodeOperation: []
|
||||
WebSite: http://sufficientlysecure.org/index.php/adaway
|
||||
added: null
|
||||
comments: {}
|
||||
id: org.adaway
|
||||
lastUpdated: null
|
||||
metadatapath: metadata/org.adaway.yml
|
||||
|
|
|
|||
|
|
@ -406,7 +406,6 @@ UpdateCheckName: null
|
|||
VercodeOperation: []
|
||||
WebSite: http://www.smssecure.org
|
||||
added: null
|
||||
comments: {}
|
||||
id: org.smssecure.smssecure
|
||||
lastUpdated: null
|
||||
metadatapath: metadata/org.smssecure.smssecure.yml
|
||||
|
|
|
|||
|
|
@ -2643,7 +2643,6 @@ VercodeOperation:
|
|||
- '%c + 5'
|
||||
WebSite: http://www.videolan.org/vlc/download-android.html
|
||||
added: null
|
||||
comments: {}
|
||||
id: org.videolan.vlc
|
||||
lastUpdated: null
|
||||
metadatapath: metadata/org.videolan.vlc.yml
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue