Rework app into a class

This simplifies usage, goes from

	app['Foo']
to
	app.Foo

Also makes static analyzers able to detect invalid attributes as the set
is now limited in the class definition.

As a bonus, setting of the default field values is now done in the
constructor, not separately and manually.
This commit is contained in:
Daniel Martí 2015-11-28 13:09:47 +01:00
parent de12cfdbe1
commit ab614ab442
18 changed files with 515 additions and 433 deletions

View file

@ -320,11 +320,11 @@ def build_server(app, thisbuild, vcs, build_dir, output_dir, force):
ftp.mkdir('metadata') ftp.mkdir('metadata')
ftp.mkdir('srclibs') ftp.mkdir('srclibs')
ftp.chdir('metadata') ftp.chdir('metadata')
ftp.put(os.path.join('metadata', app['id'] + '.txt'), ftp.put(os.path.join('metadata', app.id + '.txt'),
app['id'] + '.txt') app.id + '.txt')
# And patches if there are any... # And patches if there are any...
if os.path.exists(os.path.join('metadata', app['id'])): if os.path.exists(os.path.join('metadata', app.id)):
send_dir(os.path.join('metadata', app['id'])) send_dir(os.path.join('metadata', app.id))
ftp.chdir(homedir) ftp.chdir(homedir)
# Create the build directory... # Create the build directory...
@ -375,7 +375,7 @@ def build_server(app, thisbuild, vcs, build_dir, output_dir, force):
# (no need if it's a srclib) # (no need if it's a srclib)
if (not basesrclib) and os.path.exists(build_dir): if (not basesrclib) and os.path.exists(build_dir):
ftp.chdir(homedir + '/build') ftp.chdir(homedir + '/build')
fv = '.fdroidvcs-' + app['id'] fv = '.fdroidvcs-' + app.id
ftp.put(os.path.join('build', fv), fv) ftp.put(os.path.join('build', fv), fv)
send_dir(build_dir) send_dir(build_dir)
@ -389,7 +389,7 @@ def build_server(app, thisbuild, vcs, build_dir, output_dir, force):
cmdline += ' --force --test' cmdline += ' --force --test'
if options.verbose: if options.verbose:
cmdline += ' --verbose' cmdline += ' --verbose'
cmdline += " %s:%s" % (app['id'], thisbuild['vercode']) cmdline += " %s:%s" % (app.id, thisbuild['vercode'])
chan.exec_command('bash -c ". ~/.bsenv && ' + cmdline + '"') chan.exec_command('bash -c ". ~/.bsenv && ' + cmdline + '"')
output = '' output = ''
while not chan.exit_status_ready(): while not chan.exit_status_ready():
@ -406,7 +406,7 @@ def build_server(app, thisbuild, vcs, build_dir, output_dir, force):
if returncode != 0: if returncode != 0:
raise BuildException( raise BuildException(
"Build.py failed on server for {0}:{1}".format( "Build.py failed on server for {0}:{1}".format(
app['id'], thisbuild['version']), output) app.id, thisbuild['version']), output)
# Retrieve the built files... # Retrieve the built files...
logging.info("Retrieving build output...") logging.info("Retrieving build output...")
@ -423,7 +423,7 @@ def build_server(app, thisbuild, vcs, build_dir, output_dir, force):
except: except:
raise BuildException( raise BuildException(
"Build failed for %s:%s - missing output files".format( "Build failed for %s:%s - missing output files".format(
app['id'], thisbuild['version']), output) app.id, thisbuild['version']), output)
ftp.close() ftp.close()
finally: finally:
@ -543,7 +543,7 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
if p is not None and p.returncode != 0: if p is not None and p.returncode != 0:
raise BuildException("Error cleaning %s:%s" % raise BuildException("Error cleaning %s:%s" %
(app['id'], thisbuild['version']), p.output) (app.id, thisbuild['version']), p.output)
for root, dirs, files in os.walk(build_dir): for root, dirs, files in os.walk(build_dir):
@ -612,7 +612,7 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
if p.returncode != 0: if p.returncode != 0:
raise BuildException("Error running build command for %s:%s" % raise BuildException("Error running build command for %s:%s" %
(app['id'], thisbuild['version']), p.output) (app.id, thisbuild['version']), p.output)
# Build native stuff if required... # Build native stuff if required...
if thisbuild['buildjni'] and thisbuild['buildjni'] != ['no']: if thisbuild['buildjni'] and thisbuild['buildjni'] != ['no']:
@ -640,7 +640,7 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
del manifest_text del manifest_text
p = FDroidPopen(cmd, cwd=os.path.join(root_dir, d)) p = FDroidPopen(cmd, cwd=os.path.join(root_dir, d))
if p.returncode != 0: if p.returncode != 0:
raise BuildException("NDK build failed for %s:%s" % (app['id'], thisbuild['version']), p.output) raise BuildException("NDK build failed for %s:%s" % (app.id, thisbuild['version']), p.output)
p = None p = None
# Build the release... # Build the release...
@ -702,7 +702,7 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
raise BuildException("Distribute build failed") raise BuildException("Distribute build failed")
cid = bconfig.get('app', 'package.domain') + '.' + bconfig.get('app', 'package.name') cid = bconfig.get('app', 'package.domain') + '.' + bconfig.get('app', 'package.name')
if cid != app['id']: if cid != app.id:
raise BuildException("Package ID mismatch between metadata and spec") raise BuildException("Package ID mismatch between metadata and spec")
orientation = bconfig.get('app', 'orientation', 'landscape') orientation = bconfig.get('app', 'orientation', 'landscape')
@ -712,7 +712,7 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
cmd = ['./build.py' cmd = ['./build.py'
'--dir', root_dir, '--dir', root_dir,
'--name', bconfig.get('app', 'title'), '--name', bconfig.get('app', 'title'),
'--package', app['id'], '--package', app.id,
'--version', bconfig.get('app', 'version'), '--version', bconfig.get('app', 'version'),
'--orientation', orientation '--orientation', orientation
] ]
@ -759,8 +759,8 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
bindir = os.path.join(root_dir, 'bin') bindir = os.path.join(root_dir, 'bin')
if p is not None and p.returncode != 0: if p is not None and p.returncode != 0:
raise BuildException("Build failed for %s:%s" % (app['id'], thisbuild['version']), p.output) raise BuildException("Build failed for %s:%s" % (app.id, thisbuild['version']), p.output)
logging.info("Successfully built version " + thisbuild['version'] + ' of ' + app['id']) logging.info("Successfully built version " + thisbuild['version'] + ' of ' + app.id)
if thisbuild['type'] == 'maven': if thisbuild['type'] == 'maven':
stdout_apk = '\n'.join([ stdout_apk = '\n'.join([
@ -860,8 +860,8 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
raise BuildException("Could not find version information in build in output") raise BuildException("Could not find version information in build in output")
if not foundid: if not foundid:
raise BuildException("Could not find package ID in output") raise BuildException("Could not find package ID in output")
if foundid != app['id']: if foundid != app.id:
raise BuildException("Wrong package ID - build " + foundid + " but expected " + app['id']) raise BuildException("Wrong package ID - build " + foundid + " but expected " + app.id)
# Some apps (e.g. Timeriffic) have had the bonkers idea of # Some apps (e.g. Timeriffic) have had the bonkers idea of
# including the entire changelog in the version number. Remove # including the entire changelog in the version number. Remove
@ -941,7 +941,7 @@ def trybuild(app, thisbuild, build_dir, output_dir, also_check_dir, srclib_dir,
return False return False
logging.info("Building version %s (%s) of %s" % ( logging.info("Building version %s (%s) of %s" % (
thisbuild['version'], thisbuild['vercode'], app['id'])) thisbuild['version'], thisbuild['vercode'], app.id))
if server: if server:
# When using server mode, still keep a local cache of the repo, by # When using server mode, still keep a local cache of the repo, by
@ -1051,7 +1051,7 @@ def main():
apps = common.read_app_args(options.appid, allapps, True) apps = common.read_app_args(options.appid, allapps, True)
for appid, app in apps.items(): for appid, app in apps.items():
if (app['Disabled'] and not options.force) or not app['Repo Type'] or not app['builds']: if (app.Disabled and not options.force) or not app.RepoType or not app.builds:
del apps[appid] del apps[appid]
if not apps: if not apps:
@ -1059,10 +1059,10 @@ def main():
if options.latest: if options.latest:
for app in apps.itervalues(): for app in apps.itervalues():
for build in reversed(app['builds']): for build in reversed(app.builds):
if build['disable'] and not options.force: if build['disable'] and not options.force:
continue continue
app['builds'] = [build] app.builds = [build]
break break
if options.wiki: if options.wiki:
@ -1078,7 +1078,7 @@ def main():
first = True first = True
for thisbuild in app['builds']: for thisbuild in app.builds:
wikilog = None wikilog = None
try: try:
@ -1086,15 +1086,15 @@ def main():
# the source repo. We can reuse it on subsequent builds, if # the source repo. We can reuse it on subsequent builds, if
# there are any. # there are any.
if first: if first:
if app['Repo Type'] == 'srclib': if app.RepoType == 'srclib':
build_dir = os.path.join('build', 'srclib', app['Repo']) build_dir = os.path.join('build', 'srclib', app.Repo)
else: else:
build_dir = os.path.join('build', appid) build_dir = os.path.join('build', appid)
# Set up vcs interface and make sure we have the latest code... # Set up vcs interface and make sure we have the latest code...
logging.debug("Getting {0} vcs interface for {1}" logging.debug("Getting {0} vcs interface for {1}"
.format(app['Repo Type'], app['Repo'])) .format(app.RepoType, app.Repo))
vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir) vcs = common.getvcs(app.RepoType, app.Repo, build_dir)
first = False first = False
@ -1105,17 +1105,17 @@ def main():
options.server, options.force, options.server, options.force,
options.onserver, options.refresh): options.onserver, options.refresh):
if app.get('Binaries', None): if app.Binaries is not None:
# This is an app where we build from source, and # This is an app where we build from source, and
# verify the apk contents against a developer's # verify the apk contents against a developer's
# binary. We get that binary now, and save it # binary. We get that binary now, and save it
# alongside our built one in the 'unsigend' # alongside our built one in the 'unsigend'
# directory. # directory.
url = app['Binaries'] url = app.Binaries
url = url.replace('%v', thisbuild['version']) url = url.replace('%v', thisbuild['version'])
url = url.replace('%c', str(thisbuild['vercode'])) url = url.replace('%c', str(thisbuild['vercode']))
logging.info("...retrieving " + url) logging.info("...retrieving " + url)
of = "{0}_{1}.apk.binary".format(app['id'], thisbuild['vercode']) of = "{0}_{1}.apk.binary".format(app.id, thisbuild['vercode'])
of = os.path.join(output_dir, of) of = os.path.join(output_dir, of)
net.download_file(url, local_filename=of) net.download_file(url, local_filename=of)
@ -1159,7 +1159,7 @@ def main():
logging.error("Error while attempting to publish build log") logging.error("Error while attempting to publish build log")
for app in build_succeeded: for app in build_succeeded:
logging.info("success: %s" % (app['id'])) logging.info("success: %s" % (app.id))
if not options.verbose: if not options.verbose:
for fa in failed_apps: for fa in failed_apps:

View file

@ -43,10 +43,10 @@ def check_http(app):
try: try:
if 'Update Check Data' not in app: if not app.UpdateCheckData:
raise FDroidException('Missing Update Check Data') raise FDroidException('Missing Update Check Data')
urlcode, codeex, urlver, verex = app['Update Check Data'].split('|') urlcode, codeex, urlver, verex = app.UpdateCheckData.split('|')
vercode = "99999999" vercode = "99999999"
if len(urlcode) > 0: if len(urlcode) > 0:
@ -76,7 +76,7 @@ def check_http(app):
return (version, vercode) return (version, vercode)
except FDroidException: except FDroidException:
msg = "Could not complete http check for app {0} due to unknown error: {1}".format(app['id'], traceback.format_exc()) msg = "Could not complete http check for app {0} due to unknown error: {1}".format(app.id, traceback.format_exc())
return (None, msg) return (None, msg)
@ -90,28 +90,28 @@ def check_tags(app, pattern):
try: try:
if app['Repo Type'] == 'srclib': if app.RepoType == 'srclib':
build_dir = os.path.join('build', 'srclib', app['Repo']) build_dir = os.path.join('build', 'srclib', app.Repo)
repotype = common.getsrclibvcs(app['Repo']) repotype = common.getsrclibvcs(app.Repo)
else: else:
build_dir = os.path.join('build', app['id']) build_dir = os.path.join('build', app.id)
repotype = app['Repo Type'] repotype = app.RepoType
if repotype not in ('git', 'git-svn', 'hg', 'bzr'): if repotype not in ('git', 'git-svn', 'hg', 'bzr'):
return (None, 'Tags update mode only works for git, hg, bzr and git-svn repositories currently', None) return (None, 'Tags update mode only works for git, hg, bzr and git-svn repositories currently', None)
if repotype == 'git-svn' and ';' not in app['Repo']: if repotype == 'git-svn' and ';' not in app.Repo:
return (None, 'Tags update mode used in git-svn, but the repo was not set up with tags', None) return (None, 'Tags update mode used in git-svn, but the repo was not set up with tags', None)
# Set up vcs interface and make sure we have the latest code... # Set up vcs interface and make sure we have the latest code...
vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir) vcs = common.getvcs(app.RepoType, app.Repo, build_dir)
vcs.gotorevision(None) vcs.gotorevision(None)
flavours = [] flavours = []
if len(app['builds']) > 0: if len(app.builds) > 0:
if app['builds'][-1]['gradle']: if app.builds[-1]['gradle']:
flavours = app['builds'][-1]['gradle'] flavours = app.builds[-1]['gradle']
hpak = None hpak = None
htag = None htag = None
@ -161,10 +161,10 @@ def check_tags(app, pattern):
return (None, "Couldn't find any version information", None) return (None, "Couldn't find any version information", None)
except VCSException as vcse: except VCSException as vcse:
msg = "VCS error while scanning app {0}: {1}".format(app['id'], vcse) msg = "VCS error while scanning app {0}: {1}".format(app.id, vcse)
return (None, msg, None) return (None, msg, None)
except Exception: except Exception:
msg = "Could not scan app {0} due to unknown error: {1}".format(app['id'], traceback.format_exc()) msg = "Could not scan app {0} due to unknown error: {1}".format(app.id, traceback.format_exc())
return (None, msg, None) return (None, msg, None)
@ -178,15 +178,15 @@ def check_repomanifest(app, branch=None):
try: try:
if app['Repo Type'] == 'srclib': if app.RepoType == 'srclib':
build_dir = os.path.join('build', 'srclib', app['Repo']) build_dir = os.path.join('build', 'srclib', app.Repo)
repotype = common.getsrclibvcs(app['Repo']) repotype = common.getsrclibvcs(app.Repo)
else: else:
build_dir = os.path.join('build', app['id']) build_dir = os.path.join('build', app.id)
repotype = app['Repo Type'] repotype = app.RepoType
# Set up vcs interface and make sure we have the latest code... # Set up vcs interface and make sure we have the latest code...
vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir) vcs = common.getvcs(app.RepoType, app.Repo, build_dir)
if repotype == 'git': if repotype == 'git':
if branch: if branch:
@ -200,9 +200,9 @@ def check_repomanifest(app, branch=None):
vcs.gotorevision(None) vcs.gotorevision(None)
flavours = [] flavours = []
if len(app['builds']) > 0: if len(app.builds) > 0:
if app['builds'][-1]['gradle']: if app.builds[-1]['gradle']:
flavours = app['builds'][-1]['gradle'] flavours = app.builds[-1]['gradle']
hpak = None hpak = None
hver = None hver = None
@ -229,38 +229,38 @@ def check_repomanifest(app, branch=None):
return (None, "Couldn't find any version information") return (None, "Couldn't find any version information")
except VCSException as vcse: except VCSException as vcse:
msg = "VCS error while scanning app {0}: {1}".format(app['id'], vcse) msg = "VCS error while scanning app {0}: {1}".format(app.id, vcse)
return (None, msg) return (None, msg)
except Exception: except Exception:
msg = "Could not scan app {0} due to unknown error: {1}".format(app['id'], traceback.format_exc()) msg = "Could not scan app {0} due to unknown error: {1}".format(app.id, traceback.format_exc())
return (None, msg) return (None, msg)
def check_repotrunk(app, branch=None): def check_repotrunk(app, branch=None):
try: try:
if app['Repo Type'] == 'srclib': if app.RepoType == 'srclib':
build_dir = os.path.join('build', 'srclib', app['Repo']) build_dir = os.path.join('build', 'srclib', app.Repo)
repotype = common.getsrclibvcs(app['Repo']) repotype = common.getsrclibvcs(app.Repo)
else: else:
build_dir = os.path.join('build', app['id']) build_dir = os.path.join('build', app.id)
repotype = app['Repo Type'] repotype = app.RepoType
if repotype not in ('git-svn', ): if repotype not in ('git-svn', ):
return (None, 'RepoTrunk update mode only makes sense in git-svn repositories') return (None, 'RepoTrunk update mode only makes sense in git-svn repositories')
# Set up vcs interface and make sure we have the latest code... # Set up vcs interface and make sure we have the latest code...
vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir) vcs = common.getvcs(app.RepoType, app.Repo, build_dir)
vcs.gotorevision(None) vcs.gotorevision(None)
ref = vcs.getref() ref = vcs.getref()
return (ref, ref) return (ref, ref)
except VCSException as vcse: except VCSException as vcse:
msg = "VCS error while scanning app {0}: {1}".format(app['id'], vcse) msg = "VCS error while scanning app {0}: {1}".format(app.id, vcse)
return (None, msg) return (None, msg)
except Exception: except Exception:
msg = "Could not scan app {0} due to unknown error: {1}".format(app['id'], traceback.format_exc()) msg = "Could not scan app {0} due to unknown error: {1}".format(app.id, traceback.format_exc())
return (None, msg) return (None, msg)
@ -269,7 +269,7 @@ def check_repotrunk(app, branch=None):
# the details of the current version. # the details of the current version.
def check_gplay(app): def check_gplay(app):
time.sleep(15) time.sleep(15)
url = 'https://play.google.com/store/apps/details?id=' + app['id'] url = 'https://play.google.com/store/apps/details?id=' + app.id
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:18.0) Gecko/20100101 Firefox/18.0'} headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:18.0) Gecko/20100101 Firefox/18.0'}
req = urllib2.Request(url, None, headers) req = urllib2.Request(url, None, headers)
try: try:
@ -308,14 +308,14 @@ def dirs_with_manifest(startdir):
# subdir relative to the build dir if found, None otherwise. # subdir relative to the build dir if found, None otherwise.
def possible_subdirs(app): def possible_subdirs(app):
if app['Repo Type'] == 'srclib': if app.RepoType == 'srclib':
build_dir = os.path.join('build', 'srclib', app['Repo']) build_dir = os.path.join('build', 'srclib', app.Repo)
else: else:
build_dir = os.path.join('build', app['id']) build_dir = os.path.join('build', app.id)
flavours = [] flavours = []
if len(app['builds']) > 0: if len(app.builds) > 0:
build = app['builds'][-1] build = app.builds[-1]
if build['gradle']: if build['gradle']:
flavours = build['gradle'] flavours = build['gradle']
@ -330,24 +330,24 @@ def possible_subdirs(app):
def fetch_autoname(app, tag): def fetch_autoname(app, tag):
if not app["Repo Type"] or app['Update Check Mode'] in ('None', 'Static'): if not app.RepoType or app.UpdateCheckMode in ('None', 'Static'):
return None return None
if app['Repo Type'] == 'srclib': if app.RepoType == 'srclib':
build_dir = os.path.join('build', 'srclib', app['Repo']) build_dir = os.path.join('build', 'srclib', app.Repo)
else: else:
build_dir = os.path.join('build', app['id']) build_dir = os.path.join('build', app.id)
try: try:
vcs = common.getvcs(app["Repo Type"], app["Repo"], build_dir) vcs = common.getvcs(app.RepoType, app.Repo, build_dir)
vcs.gotorevision(tag) vcs.gotorevision(tag)
except VCSException: except VCSException:
return None return None
flavours = [] flavours = []
if len(app['builds']) > 0: if len(app.builds) > 0:
if app['builds'][-1]['gradle']: if app.builds[-1]['gradle']:
flavours = app['builds'][-1]['gradle'] flavours = app.builds[-1]['gradle']
logging.debug("...fetch auto name from " + build_dir) logging.debug("...fetch auto name from " + build_dir)
new_name = None new_name = None
@ -362,8 +362,8 @@ def fetch_autoname(app, tag):
commitmsg = None commitmsg = None
if new_name: if new_name:
logging.debug("...got autoname '" + new_name + "'") logging.debug("...got autoname '" + new_name + "'")
if new_name != app['Auto Name']: if new_name != app.AutoName:
app['Auto Name'] = new_name app.AutoName = new_name
if not commitmsg: if not commitmsg:
commitmsg = "Set autoname of {0}".format(common.getappname(app)) commitmsg = "Set autoname of {0}".format(common.getappname(app))
else: else:
@ -382,7 +382,7 @@ def checkupdates_app(app, first=True):
msg = None msg = None
vercode = None vercode = None
noverok = False noverok = False
mode = app['Update Check Mode'] mode = app.UpdateCheckMode
if mode.startswith('Tags'): if mode.startswith('Tags'):
pattern = mode[5:] if len(mode) > 4 else None pattern = mode[5:] if len(mode) > 4 else None
(version, vercode, tag) = check_tags(app, pattern) (version, vercode, tag) = check_tags(app, pattern)
@ -408,9 +408,9 @@ def checkupdates_app(app, first=True):
version = None version = None
msg = 'Invalid update check method' msg = 'Invalid update check method'
if version and vercode and app['Vercode Operation']: if version and vercode and app.VercodeOperation:
oldvercode = str(int(vercode)) oldvercode = str(int(vercode))
op = app['Vercode Operation'].replace("%c", oldvercode) op = app.VercodeOperation.replace("%c", oldvercode)
vercode = str(eval(op)) vercode = str(eval(op))
logging.debug("Applied vercode operation: %s -> %s" % (oldvercode, vercode)) logging.debug("Applied vercode operation: %s -> %s" % (oldvercode, vercode))
@ -422,16 +422,16 @@ def checkupdates_app(app, first=True):
updating = False updating = False
if version is None: if version is None:
logmsg = "...{0} : {1}".format(app['id'], msg) logmsg = "...{0} : {1}".format(app.id, msg)
if noverok: if noverok:
logging.info(logmsg) logging.info(logmsg)
else: else:
logging.warn(logmsg) logging.warn(logmsg)
elif vercode == app['Current Version Code']: elif vercode == app.CurrentVersionCode:
logging.info("...up to date") logging.info("...up to date")
else: else:
app['Current Version'] = version app.CurrentVersion = version
app['Current Version Code'] = str(int(vercode)) app.CurrentVersionCode = str(int(vercode))
updating = True updating = True
commitmsg = fetch_autoname(app, tag) commitmsg = fetch_autoname(app, tag)
@ -443,7 +443,7 @@ def checkupdates_app(app, first=True):
commitmsg = 'Update CV of %s to %s' % (name, ver) commitmsg = 'Update CV of %s to %s' % (name, ver)
if options.auto: if options.auto:
mode = app['Auto Update Mode'] mode = app.AutoUpdateMode
if mode in ('None', 'Static'): if mode in ('None', 'Static'):
pass pass
elif mode.startswith('Version '): elif mode.startswith('Version '):
@ -457,13 +457,13 @@ def checkupdates_app(app, first=True):
suffix = '' suffix = ''
gotcur = False gotcur = False
latest = None latest = None
for build in app['builds']: for build in app.builds:
if int(build['vercode']) >= int(app['Current Version Code']): if int(build['vercode']) >= int(app.CurrentVersionCode):
gotcur = True gotcur = True
if not latest or int(build['vercode']) > int(latest['vercode']): if not latest or int(build['vercode']) > int(latest['vercode']):
latest = build latest = build
if int(latest['vercode']) > int(app['Current Version Code']): if int(latest['vercode']) > int(app.CurrentVersionCode):
logging.info("Refusing to auto update, since the latest build is newer") logging.info("Refusing to auto update, since the latest build is newer")
if not gotcur: if not gotcur:
@ -471,21 +471,21 @@ def checkupdates_app(app, first=True):
if 'origlines' in newbuild: if 'origlines' in newbuild:
del newbuild['origlines'] del newbuild['origlines']
newbuild['disable'] = False newbuild['disable'] = False
newbuild['vercode'] = app['Current Version Code'] newbuild['vercode'] = app.CurrentVersionCode
newbuild['version'] = app['Current Version'] + suffix newbuild['version'] = app.CurrentVersion + suffix
logging.info("...auto-generating build for " + newbuild['version']) logging.info("...auto-generating build for " + newbuild['version'])
commit = pattern.replace('%v', newbuild['version']) commit = pattern.replace('%v', newbuild['version'])
commit = commit.replace('%c', newbuild['vercode']) commit = commit.replace('%c', newbuild['vercode'])
newbuild['commit'] = commit newbuild['commit'] = commit
app['builds'].append(newbuild) app.builds.append(newbuild)
name = common.getappname(app) name = common.getappname(app)
ver = common.getcvname(app) ver = common.getcvname(app)
commitmsg = "Update %s to %s" % (name, ver) commitmsg = "Update %s to %s" % (name, ver)
else: else:
logging.warn('Invalid auto update mode "' + mode + '" on ' + app['id']) logging.warn('Invalid auto update mode "' + mode + '" on ' + app.id)
if commitmsg: if commitmsg:
metadatapath = os.path.join('metadata', app['id'] + '.txt') metadatapath = os.path.join('metadata', app.id + '.txt')
with open(metadatapath, 'w') as f: with open(metadatapath, 'w') as f:
metadata.write_metadata('txt', f, app) metadata.write_metadata('txt', f, app)
if options.commit: if options.commit:
@ -537,7 +537,7 @@ def main():
else: else:
logging.info("{0} encountered a problem: {1}".format(common.getappname(app), reason)) logging.info("{0} encountered a problem: {1}".format(common.getappname(app), reason))
if version is not None: if version is not None:
stored = app['Current Version'] stored = app.CurrentVersion
if not stored: if not stored:
logging.info("{0} has no Current Version but has version {1} on the Play Store" logging.info("{0} has no Current Version but has version {1} on the Play Store"
.format(common.getappname(app), version)) .format(common.getappname(app), version))
@ -555,7 +555,7 @@ def main():
for appid, app in apps.iteritems(): for appid, app in apps.iteritems():
if options.autoonly and app['Auto Update Mode'] in ('None', 'Static'): if options.autoonly and app.AutoUpdateMode in ('None', 'Static'):
logging.debug("Nothing to do for {0}...".format(appid)) logging.debug("Nothing to do for {0}...".format(appid))
continue continue

View file

@ -363,10 +363,10 @@ def read_app_args(args, allapps, allow_vercodes=False):
vc = vercodes[appid] vc = vercodes[appid]
if not vc: if not vc:
continue continue
app['builds'] = [b for b in app['builds'] if b['vercode'] in vc] app.builds = [b for b in app.builds if b['vercode'] in vc]
if len(app['builds']) != len(vercodes[appid]): if len(app.builds) != len(vercodes[appid]):
error = True error = True
allvcs = [b['vercode'] for b in app['builds']] allvcs = [b['vercode'] for b in app.builds]
for v in vercodes[appid]: for v in vercodes[appid]:
if v not in allvcs: if v not in allvcs:
logging.critical("No such vercode %s for app %s" % (v, appid)) logging.critical("No such vercode %s for app %s" % (v, appid))
@ -419,23 +419,23 @@ def apknameinfo(filename):
def getapkname(app, build): def getapkname(app, build):
return "%s_%s.apk" % (app['id'], build['vercode']) return "%s_%s.apk" % (app.id, build['vercode'])
def getsrcname(app, build): def getsrcname(app, build):
return "%s_%s_src.tar.gz" % (app['id'], build['vercode']) return "%s_%s_src.tar.gz" % (app.id, build['vercode'])
def getappname(app): def getappname(app):
if app['Name']: if app.Name:
return app['Name'] return app.Name
if app['Auto Name']: if app.AutoName:
return app['Auto Name'] return app.AutoName
return app['id'] return app.id
def getcvname(app): def getcvname(app):
return '%s (%s)' % (app['Current Version'], app['Current Version Code']) return '%s (%s)' % (app.CurrentVersion, app.CurrentVersionCode)
def getvcs(vcstype, remote, local): def getvcs(vcstype, remote, local):
@ -1026,7 +1026,7 @@ psearch_g = re.compile(r'.*(packageName|applicationId) *=* *["\']([^"]+)["\'].*'
def app_matches_packagename(app, package): def app_matches_packagename(app, package):
if not package: if not package:
return False return False
appid = app['Update Check Name'] or app['id'] appid = app.UpdateCheckName or app.id
if appid is None or appid == "Ignore": if appid is None or appid == "Ignore":
return True return True
return appid == package return appid == package
@ -1037,7 +1037,7 @@ def app_matches_packagename(app, package):
# All values returned are strings. # All values returned are strings.
def parse_androidmanifests(paths, app): def parse_androidmanifests(paths, app):
ignoreversions = app['Update Check Ignore'] ignoreversions = app.UpdateCheckIgnore
ignoresearch = re.compile(ignoreversions).search if ignoreversions else None ignoresearch = re.compile(ignoreversions).search if ignoreversions else None
if not paths: if not paths:
@ -1277,7 +1277,7 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir) p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir)
if p.returncode != 0: if p.returncode != 0:
raise BuildException("Error running init command for %s:%s" % raise BuildException("Error running init command for %s:%s" %
(app['id'], build['version']), p.output) (app.id, build['version']), p.output)
# Apply patches if any # Apply patches if any
if build['patch']: if build['patch']:
@ -1285,7 +1285,7 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
for patch in build['patch']: for patch in build['patch']:
patch = patch.strip() patch = patch.strip()
logging.info("Applying " + patch) logging.info("Applying " + patch)
patch_path = os.path.join('metadata', app['id'], patch) patch_path = os.path.join('metadata', app.id, patch)
p = FDroidPopen(['patch', '-p1', '-i', os.path.abspath(patch_path)], cwd=build_dir) p = FDroidPopen(['patch', '-p1', '-i', os.path.abspath(patch_path)], cwd=build_dir)
if p.returncode != 0: if p.returncode != 0:
raise BuildException("Failed to apply patch %s" % patch_path) raise BuildException("Failed to apply patch %s" % patch_path)
@ -1460,7 +1460,7 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir) p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir)
if p.returncode != 0: if p.returncode != 0:
raise BuildException("Error running prebuild command for %s:%s" % raise BuildException("Error running prebuild command for %s:%s" %
(app['id'], build['version']), p.output) (app.id, build['version']), p.output)
# Generate (or update) the ant build file, build.xml... # Generate (or update) the ant build file, build.xml...
if build['update'] and build['update'] != ['no'] and build['type'] == 'ant': if build['update'] and build['update'] != ['no'] and build['type'] == 'ant':

View file

@ -79,20 +79,20 @@ def get_metadata_from_url(app, url):
# Figure out what kind of project it is... # Figure out what kind of project it is...
projecttype = None projecttype = None
app['Web Site'] = url # by default, we might override it app.WebSite = url # by default, we might override it
if url.startswith('git://'): if url.startswith('git://'):
projecttype = 'git' projecttype = 'git'
repo = url repo = url
repotype = 'git' repotype = 'git'
app['Source Code'] = "" app.SourceCode = ""
app['Web Site'] = "" app.WebSite = ""
elif url.startswith('https://github.com'): elif url.startswith('https://github.com'):
projecttype = 'github' projecttype = 'github'
repo = url repo = url
repotype = 'git' repotype = 'git'
app['Source Code'] = url app.SourceCode = url
app['Issue Tracker'] = url + '/issues' app.IssueTracker = url + '/issues'
app['Web Site'] = "" app.WebSite = ""
elif url.startswith('https://gitlab.com/'): elif url.startswith('https://gitlab.com/'):
projecttype = 'gitlab' projecttype = 'gitlab'
# git can be fussy with gitlab URLs unless they end in .git # git can be fussy with gitlab URLs unless they end in .git
@ -101,16 +101,16 @@ def get_metadata_from_url(app, url):
else: else:
repo = url + '.git' repo = url + '.git'
repotype = 'git' repotype = 'git'
app['Source Code'] = url + '/tree/HEAD' app.SourceCode = url + '/tree/HEAD'
app['Issue Tracker'] = url + '/issues' app.IssueTracker = url + '/issues'
elif url.startswith('https://bitbucket.org/'): elif url.startswith('https://bitbucket.org/'):
if url.endswith('/'): if url.endswith('/'):
url = url[:-1] url = url[:-1]
projecttype = 'bitbucket' projecttype = 'bitbucket'
app['Source Code'] = url + '/src' app.SourceCode = url + '/src'
app['Issue Tracker'] = url + '/issues' app.IssueTracker = url + '/issues'
# Figure out the repo type and adddress... # Figure out the repo type and adddress...
repotype, repo = getrepofrompage(app['Source Code']) repotype, repo = getrepofrompage(app.SourceCode)
if not repotype: if not repotype:
logging.error("Unable to determine vcs type. " + repo) logging.error("Unable to determine vcs type. " + repo)
sys.exit(1) sys.exit(1)
@ -139,8 +139,8 @@ def get_metadata_from_url(app, url):
vcs.gotorevision(options.rev) vcs.gotorevision(options.rev)
root_dir = get_subdir(build_dir) root_dir = get_subdir(build_dir)
app['Repo Type'] = repotype app.RepoType = repotype
app['Repo'] = repo app.Repo = repo
return root_dir, build_dir return root_dir, build_dir
@ -175,8 +175,8 @@ def main():
apps = metadata.read_metadata() apps = metadata.read_metadata()
package, app = metadata.get_default_app_info() package, app = metadata.get_default_app_info()
app['id'] = None app.id = None
app['Update Check Mode'] = "Tags" app.UpdateCheckMode = "Tags"
root_dir = None root_dir = None
build_dir = None build_dir = None
@ -185,7 +185,7 @@ def main():
root_dir, build_dir = get_metadata_from_url(app, options.url) root_dir, build_dir = get_metadata_from_url(app, options.url)
elif os.path.isdir('.git'): elif os.path.isdir('.git'):
if options.url: if options.url:
app['Web Site'] = options.url app.WebSite = options.url
root_dir = get_subdir(os.getcwd()) root_dir = get_subdir(os.getcwd())
else: else:
logging.error("Specify project url.") logging.error("Specify project url.")
@ -238,7 +238,7 @@ def main():
continue continue
build[flag] = value build[flag] = value
app['builds'].append(build) app.builds.append(build)
# Keep the repo directory to save bandwidth... # Keep the repo directory to save bandwidth...
if not os.path.exists('build'): if not os.path.exists('build'):
@ -246,7 +246,7 @@ def main():
if build_dir is not None: if build_dir is not None:
shutil.move(build_dir, os.path.join('build', package)) shutil.move(build_dir, os.path.join('build', package))
with open('build/.fdroidvcs-' + package, 'w') as f: with open('build/.fdroidvcs-' + package, 'w') as f:
f.write(app['Repo Type'] + ' ' + app['Repo']) f.write(app.RepoType + ' ' + app.Repo)
metadatapath = os.path.join('metadata', package + '.txt') metadatapath = os.path.join('metadata', package + '.txt')
with open(metadatapath, 'w') as f: with open(metadatapath, 'w') as f:

View file

@ -106,7 +106,7 @@ regex_checks = {
def check_regexes(app): def check_regexes(app):
for f, checks in regex_checks.iteritems(): for f, checks in regex_checks.iteritems():
for m, r in checks: for m, r in checks:
v = app[f] v = app.get_field(f)
if type(v) == str: if type(v) == str:
if v is None: if v is None:
continue continue
@ -132,27 +132,27 @@ def get_lastbuild(builds):
def check_ucm_tags(app): def check_ucm_tags(app):
lastbuild = get_lastbuild(app['builds']) lastbuild = get_lastbuild(app.builds)
if (lastbuild is not None if (lastbuild is not None
and lastbuild['commit'] and lastbuild['commit']
and app['Update Check Mode'] == 'RepoManifest' and app.UpdateCheckMode == 'RepoManifest'
and not lastbuild['commit'].startswith('unknown') and not lastbuild['commit'].startswith('unknown')
and lastbuild['vercode'] == app['Current Version Code'] and lastbuild['vercode'] == app.CurrentVersionCode
and not lastbuild['forcevercode'] and not lastbuild['forcevercode']
and any(s in lastbuild['commit'] for s in '.,_-/')): and any(s in lastbuild['commit'] for s in '.,_-/')):
yield "Last used commit '%s' looks like a tag, but Update Check Mode is '%s'" % ( yield "Last used commit '%s' looks like a tag, but Update Check Mode is '%s'" % (
lastbuild['commit'], app['Update Check Mode']) lastbuild['commit'], app.UpdateCheckMode)
def check_char_limits(app): def check_char_limits(app):
limits = config['char_limits'] limits = config['char_limits']
summ_chars = len(app['Summary']) summ_chars = len(app.Summary)
if summ_chars > limits['Summary']: if summ_chars > limits['Summary']:
yield "Summary of length %s is over the %i char limit" % ( yield "Summary of length %s is over the %i char limit" % (
summ_chars, limits['Summary']) summ_chars, limits['Summary'])
desc_charcount = sum(len(l) for l in app['Description']) desc_charcount = sum(len(l) for l in app.Description)
if desc_charcount > limits['Description']: if desc_charcount > limits['Description']:
yield "Description of length %s is over the %i char limit" % ( yield "Description of length %s is over the %i char limit" % (
desc_charcount, limits['Description']) desc_charcount, limits['Description'])
@ -168,31 +168,28 @@ def check_old_links(app):
'gitorious.org', 'gitorious.org',
'code.google.com', 'code.google.com',
] ]
if any(s in app['Repo'] for s in usual_sites): if any(s in app.Repo for s in usual_sites):
for f in ['Web Site', 'Source Code', 'Issue Tracker', 'Changelog']: for f in ['Web Site', 'Source Code', 'Issue Tracker', 'Changelog']:
if any(s in app[f] for s in old_sites): v = app.get_field(f)
yield "App is in '%s' but has a link to '%s'" % (app['Repo'], app[f]) if any(s in v for s in old_sites):
yield "App is in '%s' but has a link to '%s'" % (app.Repo, v)
def check_useless_fields(app): def check_useless_fields(app):
if app['Update Check Name'] == app['id']: if app.UpdateCheckName == app.id:
yield "Update Check Name is set to the known app id - it can be removed" yield "Update Check Name is set to the known app id - it can be removed"
filling_ucms = re.compile(r'^(Tags.*|RepoManifest.*)') filling_ucms = re.compile(r'^(Tags.*|RepoManifest.*)')
def check_checkupdates_ran(app): def check_checkupdates_ran(app):
if filling_ucms.match(app['Update Check Mode']): if filling_ucms.match(app.UpdateCheckMode):
if all(app[f] == metadata.app_defaults[f] for f in [ if not app.AutoName and not app.CurrentVersion and app.CurrentVersionCode == '0':
'Auto Name',
'Current Version',
'Current Version Code',
]):
yield "UCM is set but it looks like checkupdates hasn't been run yet" yield "UCM is set but it looks like checkupdates hasn't been run yet"
def check_empty_fields(app): def check_empty_fields(app):
if not app['Categories']: if not app.Categories:
yield "Categories are not set" yield "Categories are not set"
all_categories = Set([ all_categories = Set([
@ -217,37 +214,37 @@ all_categories = Set([
def check_categories(app): def check_categories(app):
for categ in app['Categories']: for categ in app.Categories:
if categ not in all_categories: if categ not in all_categories:
yield "Category '%s' is not valid" % categ yield "Category '%s' is not valid" % categ
def check_duplicates(app): def check_duplicates(app):
if app['Name'] and app['Name'] == app['Auto Name']: if app.Name and app.Name == app.AutoName:
yield "Name '%s' is just the auto name - remove it" % app['Name'] yield "Name '%s' is just the auto name - remove it" % app.Name
links_seen = set() links_seen = set()
for f in ['Source Code', 'Web Site', 'Issue Tracker', 'Changelog']: for f in ['Source Code', 'Web Site', 'Issue Tracker', 'Changelog']:
if not app[f]: v = app.get_field(f)
if not v:
continue continue
v = app[f].lower() v = v.lower()
if v in links_seen: if v in links_seen:
yield "Duplicate link in '%s': %s" % (f, v) yield "Duplicate link in '%s': %s" % (f, v)
else: else:
links_seen.add(v) links_seen.add(v)
name = app['Name'] or app['Auto Name'] name = app.Name or app.AutoName
if app['Summary'] and name: if app.Summary and name:
if app['Summary'].lower() == name.lower(): if app.Summary.lower() == name.lower():
yield "Summary '%s' is just the app's name" % app['Summary'] yield "Summary '%s' is just the app's name" % app.Summary
desc = app['Description'] if app.Summary and app.Description and len(app.Description) == 1:
if app['Summary'] and desc and len(desc) == 1: if app.Summary.lower() == app.Description[0].lower():
if app['Summary'].lower() == desc[0].lower(): yield "Description '%s' is just the app's summary" % app.Summary
yield "Description '%s' is just the app's summary" % app['Summary']
seenlines = set() seenlines = set()
for l in app['Description']: for l in app.Description:
if len(l) < 1: if len(l) < 1:
continue continue
if l in seenlines: if l in seenlines:
@ -259,7 +256,7 @@ desc_url = re.compile(r'(^|[^[])\[([^ ]+)( |\]|$)')
def check_mediawiki_links(app): def check_mediawiki_links(app):
wholedesc = ' '.join(app['Description']) wholedesc = ' '.join(app.Description)
for um in desc_url.finditer(wholedesc): for um in desc_url.finditer(wholedesc):
url = um.group(1) url = um.group(1)
for m, r in http_checks: for m, r in http_checks:
@ -271,7 +268,7 @@ def check_bulleted_lists(app):
validchars = ['*', '#'] validchars = ['*', '#']
lchar = '' lchar = ''
lcount = 0 lcount = 0
for l in app['Description']: for l in app.Description:
if len(l) < 1: if len(l) < 1:
lcount = 0 lcount = 0
continue continue
@ -287,7 +284,7 @@ def check_bulleted_lists(app):
def check_builds(app): def check_builds(app):
for build in app['builds']: for build in app.builds:
if build['disable']: if build['disable']:
continue continue
for s in ['master', 'origin', 'HEAD', 'default', 'trunk']: for s in ['master', 'origin', 'HEAD', 'default', 'trunk']:
@ -318,7 +315,7 @@ def main():
apps = common.read_app_args(options.appid, allapps, False) apps = common.read_app_args(options.appid, allapps, False)
for appid, app in apps.iteritems(): for appid, app in apps.iteritems():
if app['Disabled']: if app.Disabled:
continue continue
warns = [] warns = []

View file

@ -53,43 +53,128 @@ class MetaDataException(Exception):
def __str__(self): def __str__(self):
return self.value return self.value
# In the order in which they are laid out on files app_fields = set([
app_defaults = OrderedDict([ 'Disabled',
('Disabled', None), 'AntiFeatures',
('AntiFeatures', []), 'Provides',
('Provides', None), 'Categories',
('Categories', ['None']), 'License',
('License', 'Unknown'), 'Web Site',
('Web Site', ''), 'Source Code',
('Source Code', ''), 'Issue Tracker',
('Issue Tracker', ''), 'Changelog',
('Changelog', ''), 'Donate',
('Donate', None), 'FlattrID',
('FlattrID', None), 'Bitcoin',
('Bitcoin', None), 'Litecoin',
('Litecoin', None), 'Name',
('Name', None), 'Auto Name',
('Auto Name', ''), 'Summary',
('Summary', ''), 'Description',
('Description', []), 'Requires Root',
('Requires Root', False), 'Repo Type',
('Repo Type', ''), 'Repo',
('Repo', ''), 'Binaries',
('Binaries', None), 'Maintainer Notes',
('Maintainer Notes', []), 'Archive Policy',
('Archive Policy', None), 'Auto Update Mode',
('Auto Update Mode', 'None'), 'Update Check Mode',
('Update Check Mode', 'None'), 'Update Check Ignore',
('Update Check Ignore', None), 'Vercode Operation',
('Vercode Operation', None), 'Update Check Name',
('Update Check Name', None), 'Update Check Data',
('Update Check Data', None), 'Current Version',
('Current Version', ''), 'Current Version Code',
('Current Version Code', '0'), 'No Source Since',
('No Source Since', ''),
'comments', # For formats that don't do inline comments
'builds', # For formats that do builds as a list
]) ])
class App():
def __init__(self):
self.Disabled = None
self.AntiFeatures = []
self.Provides = None
self.Categories = ['None']
self.License = 'Unknown'
self.WebSite = ''
self.SourceCode = ''
self.IssueTracker = ''
self.Changelog = ''
self.Donate = None
self.FlattrID = None
self.Bitcoin = None
self.Litecoin = None
self.Name = None
self.AutoName = ''
self.Summary = ''
self.Description = []
self.RequiresRoot = False
self.RepoType = ''
self.Repo = ''
self.Binaries = None
self.MaintainerNotes = []
self.ArchivePolicy = None
self.AutoUpdateMode = 'None'
self.UpdateCheckMode = 'None'
self.UpdateCheckIgnore = None
self.VercodeOperation = None
self.UpdateCheckName = None
self.UpdateCheckData = None
self.CurrentVersion = ''
self.CurrentVersionCode = '0'
self.NoSourceSince = ''
self.id = None
self.metadatapath = None
self.builds = []
self.comments = {}
self.added = None
self.lastupdated = None
@classmethod
def field_to_attr(cls, f):
return f.replace(' ', '')
@classmethod
def attr_to_field(cls, k):
if k in app_fields:
return k
f = re.sub(r'([a-z])([A-Z])', r'\1 \2', k)
return f
def field_dict(self):
return {App.attr_to_field(k): v for k, v in self.__dict__.iteritems()}
def get_field(self, f):
if f not in app_fields:
raise MetaDataException('Unrecognised app field: ' + f)
k = App.field_to_attr(f)
return getattr(self, k)
def set_field(self, f, v):
if f not in app_fields:
raise MetaDataException('Unrecognised app field: ' + f)
k = App.field_to_attr(f)
self.__dict__[k] = v
def append_field(self, f, v):
if f not in app_fields:
raise MetaDataException('Unrecognised app field: ' + f)
k = App.field_to_attr(f)
if k not in self.__dict__:
self.__dict__[k] = [v]
else:
self.__dict__[k].append(v)
def update_fields(self, d):
for f, v in d.iteritems():
self.set_field(f, v)
# In the order in which they are laid out on files # In the order in which they are laid out on files
# Sorted by their action and their place in the build timeline # Sorted by their action and their place in the build timeline
# These variables can have varying datatypes. For example, anything with # These variables can have varying datatypes. For example, anything with
@ -238,13 +323,13 @@ valuetypes = {
# Check an app's metadata information for integrity errors # Check an app's metadata information for integrity errors
def check_metadata(info): def check_metadata(app):
for v in valuetypes: for v in valuetypes:
for field in v.fields: for field in v.fields:
v.check(info[field], info['id']) v.check(app.get_field(field), app.id)
for build in info['builds']: for build in app.builds:
for attr in v.attrs: for attr in v.attrs:
v.check(build[attr], info['id']) v.check(build[attr], app.id)
# Formatter for descriptions. Create an instance, and call parseline() with # Formatter for descriptions. Create an instance, and call parseline() with
@ -519,11 +604,11 @@ def read_metadata(xref=True):
+ glob.glob(os.path.join('metadata', '*.json')) + glob.glob(os.path.join('metadata', '*.json'))
+ glob.glob(os.path.join('metadata', '*.xml')) + glob.glob(os.path.join('metadata', '*.xml'))
+ glob.glob(os.path.join('metadata', '*.yaml'))): + glob.glob(os.path.join('metadata', '*.yaml'))):
appid, appinfo = parse_metadata(metadatapath) app = parse_metadata(metadatapath)
if appid in apps: if app.id in apps:
raise MetaDataException("Found multiple metadata files for " + appid) raise MetaDataException("Found multiple metadata files for " + app.id)
check_metadata(appinfo) check_metadata(app)
apps[appid] = appinfo apps[app.id] = app
if xref: if xref:
# Parse all descriptions at load time, just to ensure cross-referencing # Parse all descriptions at load time, just to ensure cross-referencing
@ -535,7 +620,7 @@ def read_metadata(xref=True):
for appid, app in apps.iteritems(): for appid, app in apps.iteritems():
try: try:
description_html(app['Description'], linkres) description_html(app.Description, linkres)
except MetaDataException, e: except MetaDataException, e:
raise MetaDataException("Problem with description of " + appid + raise MetaDataException("Problem with description of " + appid +
" - " + str(e)) " - " + str(e))
@ -555,7 +640,7 @@ def metafieldtype(name):
return 'buildv2' return 'buildv2'
if name == 'Use Built': if name == 'Use Built':
return 'obsolete' return 'obsolete'
if name not in app_defaults: if name not in app_fields:
return 'unknown' return 'unknown'
return 'string' return 'string'
@ -603,44 +688,38 @@ def get_default_app_info(metadatapath=None):
else: else:
appid, _ = common.get_extension(os.path.basename(metadatapath)) appid, _ = common.get_extension(os.path.basename(metadatapath))
thisinfo = {} app = App()
thisinfo.update(app_defaults) app.metadatapath = metadatapath
thisinfo['metadatapath'] = metadatapath
if appid is not None: if appid is not None:
thisinfo['id'] = appid app.id = appid
# General defaults... return app
thisinfo['builds'] = []
thisinfo['comments'] = dict()
return appid, thisinfo
def sorted_builds(builds): def sorted_builds(builds):
return sorted(builds, key=lambda build: int(build['vercode'])) return sorted(builds, key=lambda build: int(build['vercode']))
def post_metadata_parse(thisinfo): def post_metadata_parse(app):
supported_metadata = app_defaults.keys() + ['comments', 'builds', 'id', 'metadatapath'] for f in app_fields:
for k, v in thisinfo.iteritems(): v = app.get_field(f)
if k not in supported_metadata:
raise MetaDataException("Unrecognised metadata: {0}: {1}"
.format(k, v))
if type(v) in (float, int): if type(v) in (float, int):
thisinfo[k] = str(v) app.set_field(f, str(v))
# convert to the odd internal format # convert to the odd internal format
for k in ('Description', 'Maintainer Notes'): for f in ('Description', 'Maintainer Notes'):
if isinstance(thisinfo[k], basestring): v = app.get_field(f)
text = thisinfo[k].rstrip().lstrip() if isinstance(v, basestring):
thisinfo[k] = text.split('\n') text = v.rstrip().lstrip()
app.set_field(f, text.split('\n'))
supported_flags = (flag_defaults.keys() supported_flags = (flag_defaults.keys()
+ ['vercode', 'version', 'versionCode', 'versionName']) + ['vercode', 'version', 'versionCode', 'versionName',
'type', 'ndk_path'])
esc_newlines = re.compile('\\\\( |\\n)') esc_newlines = re.compile('\\\\( |\\n)')
for build in thisinfo['builds']: for build in app.builds:
for k, v in build.items(): for k, v in build.items():
if k not in supported_flags: if k not in supported_flags:
raise MetaDataException("Unrecognised build flag: {0}={1}" raise MetaDataException("Unrecognised build flag: {0}={1}"
@ -683,13 +762,13 @@ def post_metadata_parse(thisinfo):
if isinstance(v, bool): if isinstance(v, bool):
build[k] = 'yes' if v else 'no' build[k] = 'yes' if v else 'no'
if not thisinfo['Description']: if not app.Description:
thisinfo['Description'].append('No description available') app.Description = ['No description available']
for build in thisinfo['builds']: for build in app.builds:
fill_build_defaults(build) fill_build_defaults(build)
thisinfo['builds'] = sorted_builds(thisinfo['builds']) app.builds = sorted_builds(app.builds)
# Parse metadata for a single application. # Parse metadata for a single application.
@ -772,7 +851,7 @@ def parse_metadata(metadatapath):
def parse_json_metadata(metadatapath): def parse_json_metadata(metadatapath):
appid, thisinfo = get_default_app_info(metadatapath) app = get_default_app_info(metadatapath)
# fdroid metadata is only strings and booleans, no floats or ints. And # fdroid metadata is only strings and booleans, no floats or ints. And
# json returns unicode, and fdroidserver still uses plain python strings # json returns unicode, and fdroidserver still uses plain python strings
@ -781,15 +860,15 @@ def parse_json_metadata(metadatapath):
object_hook=_decode_dict, object_hook=_decode_dict,
parse_int=lambda s: s, parse_int=lambda s: s,
parse_float=lambda s: s) parse_float=lambda s: s)
thisinfo.update(jsoninfo) app.update_fields(jsoninfo)
post_metadata_parse(thisinfo) post_metadata_parse(app)
return (appid, thisinfo) return app
def parse_xml_metadata(metadatapath): def parse_xml_metadata(metadatapath):
appid, thisinfo = get_default_app_info(metadatapath) app = get_default_app_info(metadatapath)
tree = ElementTree.ElementTree(file=metadatapath) tree = ElementTree.ElementTree(file=metadatapath)
root = tree.getroot() root = tree.getroot()
@ -798,54 +877,46 @@ def parse_xml_metadata(metadatapath):
logging.critical(metadatapath + ' does not have root as <resources></resources>!') logging.critical(metadatapath + ' does not have root as <resources></resources>!')
sys.exit(1) sys.exit(1)
supported_metadata = app_defaults.keys()
for child in root: for child in root:
if child.tag != 'builds': if child.tag != 'builds':
# builds does not have name="" attrib # builds does not have name="" attrib
name = child.attrib['name'] name = child.attrib['name']
if name not in supported_metadata:
raise MetaDataException("Unrecognised metadata: <"
+ child.tag + ' name="' + name + '">'
+ child.text
+ "</" + child.tag + '>')
if child.tag == 'string': if child.tag == 'string':
thisinfo[name] = child.text app.set_field(name, child.text)
elif child.tag == 'string-array': elif child.tag == 'string-array':
items = [] items = []
for item in child: for item in child:
items.append(item.text) items.append(item.text)
thisinfo[name] = items app.set_field(name, items)
elif child.tag == 'builds': elif child.tag == 'builds':
builds = []
for build in child: for build in child:
builddict = dict() builddict = dict()
for key in build: for key in build:
builddict[key.tag] = key.text builddict[key.tag] = key.text
builds.append(builddict) app.builds.append(builddict)
thisinfo['builds'] = builds
# TODO handle this using <xsd:element type="xsd:boolean> in a schema # TODO handle this using <xsd:element type="xsd:boolean> in a schema
if not isinstance(thisinfo['Requires Root'], bool): if not isinstance(app.RequiresRoot, bool):
if thisinfo['Requires Root'] == 'true': if app.RequiresRoot == 'true':
thisinfo['Requires Root'] = True app.RequiresRoot = True
else: else:
thisinfo['Requires Root'] = False app.RequiresRoot = False
post_metadata_parse(thisinfo) post_metadata_parse(app)
return (appid, thisinfo) return app
def parse_yaml_metadata(metadatapath): def parse_yaml_metadata(metadatapath):
appid, thisinfo = get_default_app_info(metadatapath) app = get_default_app_info(metadatapath)
yamlinfo = yaml.load(open(metadatapath, 'r'), Loader=YamlLoader) yamlinfo = yaml.load(open(metadatapath, 'r'), Loader=YamlLoader)
thisinfo.update(yamlinfo) app.update_fields(yamlinfo)
post_metadata_parse(thisinfo) post_metadata_parse(app)
return (appid, thisinfo) return app
def parse_txt_metadata(metadatapath): def parse_txt_metadata(metadatapath):
@ -918,10 +989,10 @@ def parse_txt_metadata(metadatapath):
def add_comments(key): def add_comments(key):
if not curcomments: if not curcomments:
return return
thisinfo['comments'][key] = list(curcomments) app.comments[key] = list(curcomments)
del curcomments[:] del curcomments[:]
appid, thisinfo = get_default_app_info(metadatapath) app = get_default_app_info(metadatapath)
metafile = open(metadatapath, "r") metafile = open(metadatapath, "r")
mode = 0 mode = 0
@ -942,7 +1013,7 @@ def parse_txt_metadata(metadatapath):
raise MetaDataException("No commit specified for {0} in {1}" raise MetaDataException("No commit specified for {0} in {1}"
.format(curbuild['version'], linedesc)) .format(curbuild['version'], linedesc))
thisinfo['builds'].append(curbuild) app.builds.append(curbuild)
add_comments('build:' + curbuild['vercode']) add_comments('build:' + curbuild['vercode'])
mode = 0 mode = 0
else: else:
@ -978,21 +1049,20 @@ def parse_txt_metadata(metadatapath):
add_comments(field) add_comments(field)
if fieldtype == 'multiline': if fieldtype == 'multiline':
mode = 1 mode = 1
thisinfo[field] = []
if value: if value:
raise MetaDataException("Unexpected text on same line as " + field + " in " + linedesc) raise MetaDataException("Unexpected text on same line as " + field + " in " + linedesc)
elif fieldtype == 'string': elif fieldtype == 'string':
thisinfo[field] = value app.set_field(field, value)
elif fieldtype == 'list': elif fieldtype == 'list':
thisinfo[field] = split_list_values(value) app.set_field(field, split_list_values(value))
elif fieldtype == 'build': elif fieldtype == 'build':
if value.endswith("\\"): if value.endswith("\\"):
mode = 2 mode = 2
buildlines = [value[:-1]] buildlines = [value[:-1]]
else: else:
curbuild = parse_buildline([value]) curbuild = parse_buildline([value])
thisinfo['builds'].append(curbuild) app.builds.append(curbuild)
add_comments('build:' + thisinfo['builds'][-1]['vercode']) add_comments('build:' + app.builds[-1]['vercode'])
elif fieldtype == 'buildv2': elif fieldtype == 'buildv2':
curbuild = {} curbuild = {}
vv = value.split(',') vv = value.split(',')
@ -1015,15 +1085,15 @@ def parse_txt_metadata(metadatapath):
if line == '.': if line == '.':
mode = 0 mode = 0
else: else:
thisinfo[field].append(line) app.append_field(field, line)
elif mode == 2: # Line continuation mode in Build Version elif mode == 2: # Line continuation mode in Build Version
if line.endswith("\\"): if line.endswith("\\"):
buildlines.append(line[:-1]) buildlines.append(line[:-1])
else: else:
buildlines.append(line) buildlines.append(line)
curbuild = parse_buildline(buildlines) curbuild = parse_buildline(buildlines)
thisinfo['builds'].append(curbuild) app.builds.append(curbuild)
add_comments('build:' + thisinfo['builds'][-1]['vercode']) add_comments('build:' + app.builds[-1]['vercode'])
mode = 0 mode = 0
add_comments(None) add_comments(None)
@ -1035,34 +1105,34 @@ def parse_txt_metadata(metadatapath):
elif mode == 3: elif mode == 3:
raise MetaDataException("Unterminated build in " + metafile.name) raise MetaDataException("Unterminated build in " + metafile.name)
post_metadata_parse(thisinfo) post_metadata_parse(app)
return (appid, thisinfo) return app
def write_plaintext_metadata(mf, app, w_comment, w_field, w_build): def write_plaintext_metadata(mf, app, w_comment, w_field, w_build):
def w_comments(key): def w_comments(key):
if key not in app['comments']: if key not in app.comments:
return return
for line in app['comments'][key]: for line in app.comments[key]:
w_comment(line) w_comment(line)
def w_field_always(field, value=None): def w_field_always(field, value=None):
if value is None: if value is None:
value = app[field] value = app.get_field(field)
w_comments(field) w_comments(field)
w_field(field, value) w_field(field, value)
def w_field_nonempty(field, value=None): def w_field_nonempty(field, value=None):
if value is None: if value is None:
value = app[field] value = app.get_field(field)
w_comments(field) w_comments(field)
if value: if value:
w_field(field, value) w_field(field, value)
w_field_nonempty('Disabled') w_field_nonempty('Disabled')
if app['AntiFeatures']: if app.AntiFeatures:
w_field_always('AntiFeatures') w_field_always('AntiFeatures')
w_field_nonempty('Provides') w_field_nonempty('Provides')
w_field_always('Categories') w_field_always('Categories')
@ -1079,19 +1149,19 @@ def write_plaintext_metadata(mf, app, w_comment, w_field, w_build):
w_field_nonempty('Name') w_field_nonempty('Name')
w_field_nonempty('Auto Name') w_field_nonempty('Auto Name')
w_field_always('Summary') w_field_always('Summary')
w_field_always('Description', description_txt(app['Description'])) w_field_always('Description', description_txt(app.Description))
mf.write('\n') mf.write('\n')
if app['Requires Root']: if app.RequiresRoot:
w_field_always('Requires Root', 'yes') w_field_always('Requires Root', 'yes')
mf.write('\n') mf.write('\n')
if app['Repo Type']: if app.RepoType:
w_field_always('Repo Type') w_field_always('Repo Type')
w_field_always('Repo') w_field_always('Repo')
if app['Binaries']: if app.Binaries:
w_field_always('Binaries') w_field_always('Binaries')
mf.write('\n') mf.write('\n')
for build in sorted_builds(app['builds']): for build in sorted_builds(app.builds):
if build['version'] == "Ignore": if build['version'] == "Ignore":
continue continue
@ -1100,8 +1170,8 @@ def write_plaintext_metadata(mf, app, w_comment, w_field, w_build):
w_build(build) w_build(build)
mf.write('\n') mf.write('\n')
if app['Maintainer Notes']: if app.MaintainerNotes:
w_field_always('Maintainer Notes', app['Maintainer Notes']) w_field_always('Maintainer Notes', app.MaintainerNotes)
mf.write('\n') mf.write('\n')
w_field_nonempty('Archive Policy') w_field_nonempty('Archive Policy')
@ -1111,10 +1181,10 @@ def write_plaintext_metadata(mf, app, w_comment, w_field, w_build):
w_field_nonempty('Vercode Operation') w_field_nonempty('Vercode Operation')
w_field_nonempty('Update Check Name') w_field_nonempty('Update Check Name')
w_field_nonempty('Update Check Data') w_field_nonempty('Update Check Data')
if app['Current Version']: if app.CurrentVersion:
w_field_always('Current Version') w_field_always('Current Version')
w_field_always('Current Version Code') w_field_always('Current Version Code')
if app['No Source Since']: if app.NoSourceSince:
mf.write('\n') mf.write('\n')
w_field_always('No Source Since') w_field_always('No Source Since')
w_comments(None) w_comments(None)

View file

@ -117,7 +117,7 @@ def main():
sys.exit(1) sys.exit(1)
app = allapps[appid] app = allapps[appid]
if app.get('Binaries', None): if app.Binaries is not None:
# It's an app where we build from source, and verify the apk # It's an app where we build from source, and verify the apk
# contents against a developer's binary, and then publish their # contents against a developer's binary, and then publish their

View file

@ -59,7 +59,7 @@ def main():
parser.error("Must give a valid format to --to") parser.error("Must give a valid format to --to")
for appid, app in apps.iteritems(): for appid, app in apps.iteritems():
metadatapath = app['metadatapath'] metadatapath = app.metadatapath
base, ext = common.get_extension(metadatapath) base, ext = common.get_extension(metadatapath)
if not options.to and ext not in supported: if not options.to and ext not in supported:
logging.info("Ignoring %s file at '%s'" % (ext, metadatapath)) logging.info("Ignoring %s file at '%s'" % (ext, metadatapath))

View file

@ -274,10 +274,10 @@ def main():
for appid, app in apps.iteritems(): for appid, app in apps.iteritems():
if app['Disabled']: if app.Disabled:
logging.info("Skipping %s: disabled" % appid) logging.info("Skipping %s: disabled" % appid)
continue continue
if not app['builds']: if not app.builds:
logging.info("Skipping %s: no builds specified" % appid) logging.info("Skipping %s: no builds specified" % appid)
continue continue
@ -285,15 +285,15 @@ def main():
try: try:
if app['Repo Type'] == 'srclib': if app.RepoType == 'srclib':
build_dir = os.path.join('build', 'srclib', app['Repo']) build_dir = os.path.join('build', 'srclib', app.Repo)
else: else:
build_dir = os.path.join('build', appid) build_dir = os.path.join('build', appid)
# Set up vcs interface and make sure we have the latest code... # Set up vcs interface and make sure we have the latest code...
vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir) vcs = common.getvcs(app.RepoType, app.Repo, build_dir)
for thisbuild in app['builds']: for thisbuild in app.builds:
if thisbuild['disable']: if thisbuild['disable']:
logging.info("...skipping version %s - %s" % ( logging.info("...skipping version %s - %s" % (

View file

@ -217,9 +217,9 @@ def main():
logging.info("Processing repo types...") logging.info("Processing repo types...")
repotypes = Counter() repotypes = Counter()
for app in metaapps: for app in metaapps:
rtype = app['Repo Type'] or 'none' rtype = app.RepoType or 'none'
if rtype == 'srclib': if rtype == 'srclib':
rtype = common.getsrclibvcs(app['Repo']) rtype = common.getsrclibvcs(app.Repo)
repotypes[rtype] += 1 repotypes[rtype] += 1
with open(os.path.join(statsdir, 'repotypes.txt'), 'w') as f: with open(os.path.join(statsdir, 'repotypes.txt'), 'w') as f:
for rtype, count in repotypes.most_common(): for rtype, count in repotypes.most_common():
@ -229,7 +229,7 @@ def main():
logging.info("Processing update check modes...") logging.info("Processing update check modes...")
ucms = Counter() ucms = Counter()
for app in metaapps: for app in metaapps:
checkmode = app['Update Check Mode'] checkmode = app.UpdateCheckMode
if checkmode.startswith('RepoManifest/'): if checkmode.startswith('RepoManifest/'):
checkmode = checkmode[:12] checkmode = checkmode[:12]
if checkmode.startswith('Tags '): if checkmode.startswith('Tags '):
@ -242,7 +242,7 @@ def main():
logging.info("Processing categories...") logging.info("Processing categories...")
ctgs = Counter() ctgs = Counter()
for app in metaapps: for app in metaapps:
for category in app['Categories']: for category in app.Categories:
ctgs[category] += 1 ctgs[category] += 1
with open(os.path.join(statsdir, 'categories.txt'), 'w') as f: with open(os.path.join(statsdir, 'categories.txt'), 'w') as f:
for category, count in ctgs.most_common(): for category, count in ctgs.most_common():
@ -251,9 +251,9 @@ def main():
logging.info("Processing antifeatures...") logging.info("Processing antifeatures...")
afs = Counter() afs = Counter()
for app in metaapps: for app in metaapps:
if app['AntiFeatures'] is None: if app.AntiFeatures is None:
continue continue
for antifeature in app['AntiFeatures']: for antifeature in app.AntiFeatures:
afs[antifeature] += 1 afs[antifeature] += 1
with open(os.path.join(statsdir, 'antifeatures.txt'), 'w') as f: with open(os.path.join(statsdir, 'antifeatures.txt'), 'w') as f:
for antifeature, count in afs.most_common(): for antifeature, count in afs.most_common():
@ -263,7 +263,7 @@ def main():
logging.info("Processing licenses...") logging.info("Processing licenses...")
licenses = Counter() licenses = Counter()
for app in metaapps: for app in metaapps:
license = app['License'] license = app.License
licenses[license] += 1 licenses[license] += 1
with open(os.path.join(statsdir, 'licenses.txt'), 'w') as f: with open(os.path.join(statsdir, 'licenses.txt'), 'w') as f:
for license, count in licenses.most_common(): for license, count in licenses.most_common():

View file

@ -94,43 +94,43 @@ def update_wiki(apps, sortedids, apks):
app = apps[appid] app = apps[appid]
wikidata = '' wikidata = ''
if app['Disabled']: if app.Disabled:
wikidata += '{{Disabled|' + app['Disabled'] + '}}\n' wikidata += '{{Disabled|' + app.Disabled + '}}\n'
if 'AntiFeatures' in app: if app.AntiFeatures:
for af in app['AntiFeatures']: for af in app.AntiFeatures:
wikidata += '{{AntiFeature|' + af + '}}\n' wikidata += '{{AntiFeature|' + af + '}}\n'
if app['Requires Root']: if app.RequiresRoot:
requiresroot = 'Yes' requiresroot = 'Yes'
else: else:
requiresroot = 'No' requiresroot = 'No'
wikidata += '{{App|id=%s|name=%s|added=%s|lastupdated=%s|source=%s|tracker=%s|web=%s|changelog=%s|donate=%s|flattr=%s|bitcoin=%s|litecoin=%s|license=%s|root=%s}}\n' % ( wikidata += '{{App|id=%s|name=%s|added=%s|lastupdated=%s|source=%s|tracker=%s|web=%s|changelog=%s|donate=%s|flattr=%s|bitcoin=%s|litecoin=%s|license=%s|root=%s}}\n' % (
appid, appid,
app['Name'], app.Name,
time.strftime('%Y-%m-%d', app['added']) if 'added' in app else '', time.strftime('%Y-%m-%d', app.added) if app.added else '',
time.strftime('%Y-%m-%d', app['lastupdated']) if 'lastupdated' in app else '', time.strftime('%Y-%m-%d', app.lastupdated) if app.lastupdated else '',
app['Source Code'], app.SourceCode,
app['Issue Tracker'], app.IssueTracker,
app['Web Site'], app.WebSite,
app['Changelog'], app.Changelog,
app['Donate'], app.Donate,
app['FlattrID'], app.FlattrID,
app['Bitcoin'], app.Bitcoin,
app['Litecoin'], app.Litecoin,
app['License'], app.License,
requiresroot) requiresroot)
if app['Provides']: if app.Provides:
wikidata += "This app provides: %s" % ', '.join(app['Summary'].split(',')) wikidata += "This app provides: %s" % ', '.join(app.Summary.split(','))
wikidata += app['Summary'] wikidata += app.Summary
wikidata += " - [https://f-droid.org/repository/browse/?fdid=" + appid + " view in repository]\n\n" wikidata += " - [https://f-droid.org/repository/browse/?fdid=" + appid + " view in repository]\n\n"
wikidata += "=Description=\n" wikidata += "=Description=\n"
wikidata += metadata.description_wiki(app['Description']) + "\n" wikidata += metadata.description_wiki(app.Description) + "\n"
wikidata += "=Maintainer Notes=\n" wikidata += "=Maintainer Notes=\n"
if 'Maintainer Notes' in app: if app.MaintainerNotes:
wikidata += metadata.description_wiki(app['Maintainer Notes']) + "\n" wikidata += metadata.description_wiki(app.MaintainerNotes) + "\n"
wikidata += "\nMetadata: [https://gitlab.com/fdroid/fdroiddata/blob/master/metadata/{0}.txt current] [https://gitlab.com/fdroid/fdroiddata/commits/master/metadata/{0}.txt history]\n".format(appid) wikidata += "\nMetadata: [https://gitlab.com/fdroid/fdroiddata/blob/master/metadata/{0}.txt current] [https://gitlab.com/fdroid/fdroiddata/commits/master/metadata/{0}.txt history]\n".format(appid)
# Get a list of all packages for this application... # Get a list of all packages for this application...
@ -140,13 +140,13 @@ def update_wiki(apps, sortedids, apks):
buildfails = False buildfails = False
for apk in apks: for apk in apks:
if apk['id'] == appid: if apk['id'] == appid:
if str(apk['versioncode']) == app['Current Version Code']: if str(apk['versioncode']) == app.CurrentVersionCode:
gotcurrentver = True gotcurrentver = True
apklist.append(apk) apklist.append(apk)
# Include ones we can't build, as a special case... # Include ones we can't build, as a special case...
for thisbuild in app['builds']: for thisbuild in app.builds:
if thisbuild['disable']: if thisbuild['disable']:
if thisbuild['vercode'] == app['Current Version Code']: if thisbuild['vercode'] == app.CurrentVersionCode:
cantupdate = True cantupdate = True
# TODO: Nasty: vercode is a string in the build, and an int elsewhere # TODO: Nasty: vercode is a string in the build, and an int elsewhere
apklist.append({'versioncode': int(thisbuild['vercode']), apklist.append({'versioncode': int(thisbuild['vercode']),
@ -165,7 +165,7 @@ def update_wiki(apps, sortedids, apks):
'version': thisbuild['version'], 'version': thisbuild['version'],
'buildproblem': "The build for this version appears to have failed. Check the [[{0}/lastbuild_{1}|build log]].".format(appid, thisbuild['vercode']), 'buildproblem': "The build for this version appears to have failed. Check the [[{0}/lastbuild_{1}|build log]].".format(appid, thisbuild['vercode']),
}) })
if app['Current Version Code'] == '0': if app.CurrentVersionCode == '0':
cantupdate = True cantupdate = True
# Sort with most recent first... # Sort with most recent first...
apklist = sorted(apklist, key=lambda apk: apk['versioncode'], reverse=True) apklist = sorted(apklist, key=lambda apk: apk['versioncode'], reverse=True)
@ -177,13 +177,13 @@ def update_wiki(apps, sortedids, apks):
wikidata += "We don't have the current version of this app." wikidata += "We don't have the current version of this app."
else: else:
wikidata += "We have the current version of this app." wikidata += "We have the current version of this app."
wikidata += " (Check mode: " + app['Update Check Mode'] + ") " wikidata += " (Check mode: " + app.UpdateCheckMode + ") "
wikidata += " (Auto-update mode: " + app['Auto Update Mode'] + ")\n\n" wikidata += " (Auto-update mode: " + app.AutoUpdateMode + ")\n\n"
if len(app['No Source Since']) > 0: if len(app.NoSourceSince) > 0:
wikidata += "This application has partially or entirely been missing source code since version " + app['No Source Since'] + ".\n\n" wikidata += "This application has partially or entirely been missing source code since version " + app.NoSourceSince + ".\n\n"
if len(app['Current Version']) > 0: if len(app.CurrentVersion) > 0:
wikidata += "The current (recommended) version is " + app['Current Version'] wikidata += "The current (recommended) version is " + app.CurrentVersion
wikidata += " (version code " + app['Current Version Code'] + ").\n\n" wikidata += " (version code " + app.CurrentVersionCode + ").\n\n"
validapks = 0 validapks = 0
for apk in apklist: for apk in apklist:
wikidata += "==" + apk['version'] + "==\n" wikidata += "==" + apk['version'] + "==\n"
@ -200,21 +200,21 @@ def update_wiki(apps, sortedids, apks):
wikidata += "Version code: " + str(apk['versioncode']) + '\n' wikidata += "Version code: " + str(apk['versioncode']) + '\n'
wikidata += '\n[[Category:' + wikicat + ']]\n' wikidata += '\n[[Category:' + wikicat + ']]\n'
if len(app['No Source Since']) > 0: if len(app.NoSourceSince) > 0:
wikidata += '\n[[Category:Apps missing source code]]\n' wikidata += '\n[[Category:Apps missing source code]]\n'
if validapks == 0 and not app['Disabled']: if validapks == 0 and not app.Disabled:
wikidata += '\n[[Category:Apps with no packages]]\n' wikidata += '\n[[Category:Apps with no packages]]\n'
if cantupdate and not app['Disabled']: if cantupdate and not app.Disabled:
wikidata += "\n[[Category:Apps we can't update]]\n" wikidata += "\n[[Category:Apps we can't update]]\n"
if buildfails and not app['Disabled']: if buildfails and not app.Disabled:
wikidata += "\n[[Category:Apps with failing builds]]\n" wikidata += "\n[[Category:Apps with failing builds]]\n"
elif not gotcurrentver and not cantupdate and not app['Disabled'] and app['Update Check Mode'] != "Static": elif not gotcurrentver and not cantupdate and not app.Disabled and app.UpdateCheckMode != "Static":
wikidata += '\n[[Category:Apps to Update]]\n' wikidata += '\n[[Category:Apps to Update]]\n'
if app['Disabled']: if app.Disabled:
wikidata += '\n[[Category:Apps that are disabled]]\n' wikidata += '\n[[Category:Apps that are disabled]]\n'
if app['Update Check Mode'] == 'None' and not app['Disabled']: if app.UpdateCheckMode == 'None' and not app.Disabled:
wikidata += '\n[[Category:Apps with no update check]]\n' wikidata += '\n[[Category:Apps with no update check]]\n'
for appcat in app['Categories']: for appcat in app.Categories:
wikidata += '\n[[Category:{0}]]\n'.format(appcat) wikidata += '\n[[Category:{0}]]\n'.format(appcat)
# We can't have underscores in the page name, even if they're in # We can't have underscores in the page name, even if they're in
@ -231,7 +231,7 @@ def update_wiki(apps, sortedids, apks):
# Make a redirect from the name to the ID too, unless there's # Make a redirect from the name to the ID too, unless there's
# already an existing page with the name and it isn't a redirect. # already an existing page with the name and it isn't a redirect.
noclobber = False noclobber = False
apppagename = app['Name'].replace('_', ' ') apppagename = app.Name.replace('_', ' ')
apppagename = apppagename.replace('{', '') apppagename = apppagename.replace('{', '')
apppagename = apppagename.replace('}', ' ') apppagename = apppagename.replace('}', ' ')
apppagename = apppagename.replace(':', ' ') apppagename = apppagename.replace(':', ' ')
@ -290,7 +290,7 @@ def delete_disabled_builds(apps, apkcache, repodirs):
:param repodirs: the repo directories to process :param repodirs: the repo directories to process
""" """
for appid, app in apps.iteritems(): for appid, app in apps.iteritems():
for build in app['builds']: for build in app.builds:
if not build['disable']: if not build['disable']:
continue continue
apkfilename = appid + '_' + str(build['vercode']) + '.apk' apkfilename = appid + '_' + str(build['vercode']) + '.apk'
@ -805,7 +805,7 @@ def make_index(apps, sortedids, apks, repodir, archive, categories):
for appid in sortedids: for appid in sortedids:
app = apps[appid] app = apps[appid]
if app['Disabled'] is not None: if app.Disabled is not None:
continue continue
# Get a list of the apks for this app... # Get a list of the apks for this app...
@ -818,57 +818,57 @@ def make_index(apps, sortedids, apks, repodir, archive, categories):
continue continue
apel = doc.createElement("application") apel = doc.createElement("application")
apel.setAttribute("id", app['id']) apel.setAttribute("id", app.id)
root.appendChild(apel) root.appendChild(apel)
addElement('id', app['id'], doc, apel) addElement('id', app.id, doc, apel)
if 'added' in app: if app.added:
addElement('added', time.strftime('%Y-%m-%d', app['added']), doc, apel) addElement('added', time.strftime('%Y-%m-%d', app.added), doc, apel)
if 'lastupdated' in app: if app.lastupdated:
addElement('lastupdated', time.strftime('%Y-%m-%d', app['lastupdated']), doc, apel) addElement('lastupdated', time.strftime('%Y-%m-%d', app.lastupdated), doc, apel)
addElement('name', app['Name'], doc, apel) addElement('name', app.Name, doc, apel)
addElement('summary', app['Summary'], doc, apel) addElement('summary', app.Summary, doc, apel)
if app['icon']: if app.icon:
addElement('icon', app['icon'], doc, apel) addElement('icon', app.icon, doc, apel)
def linkres(appid): def linkres(appid):
if appid in apps: if appid in apps:
return ("fdroid.app:" + appid, apps[appid]['Name']) return ("fdroid.app:" + appid, apps[appid].Name)
raise MetaDataException("Cannot resolve app id " + appid) raise MetaDataException("Cannot resolve app id " + appid)
addElement('desc', addElement('desc',
metadata.description_html(app['Description'], linkres), metadata.description_html(app.Description, linkres),
doc, apel) doc, apel)
addElement('license', app['License'], doc, apel) addElement('license', app.License, doc, apel)
if 'Categories' in app and app['Categories']: if app.Categories:
addElement('categories', ','.join(app["Categories"]), doc, apel) addElement('categories', ','.join(app.Categories), doc, apel)
# We put the first (primary) category in LAST, which will have # We put the first (primary) category in LAST, which will have
# the desired effect of making clients that only understand one # the desired effect of making clients that only understand one
# category see that one. # category see that one.
addElement('category', app["Categories"][0], doc, apel) addElement('category', app.Categories[0], doc, apel)
addElement('web', app['Web Site'], doc, apel) addElement('web', app.WebSite, doc, apel)
addElement('source', app['Source Code'], doc, apel) addElement('source', app.SourceCode, doc, apel)
addElement('tracker', app['Issue Tracker'], doc, apel) addElement('tracker', app.IssueTracker, doc, apel)
addElementNonEmpty('changelog', app['Changelog'], doc, apel) addElementNonEmpty('changelog', app.Changelog, doc, apel)
addElementNonEmpty('donate', app['Donate'], doc, apel) addElementNonEmpty('donate', app.Donate, doc, apel)
addElementNonEmpty('bitcoin', app['Bitcoin'], doc, apel) addElementNonEmpty('bitcoin', app.Bitcoin, doc, apel)
addElementNonEmpty('litecoin', app['Litecoin'], doc, apel) addElementNonEmpty('litecoin', app.Litecoin, doc, apel)
addElementNonEmpty('flattr', app['FlattrID'], doc, apel) addElementNonEmpty('flattr', app.FlattrID, doc, apel)
# These elements actually refer to the current version (i.e. which # These elements actually refer to the current version (i.e. which
# one is recommended. They are historically mis-named, and need # one is recommended. They are historically mis-named, and need
# changing, but stay like this for now to support existing clients. # changing, but stay like this for now to support existing clients.
addElement('marketversion', app['Current Version'], doc, apel) addElement('marketversion', app.CurrentVersion, doc, apel)
addElement('marketvercode', app['Current Version Code'], doc, apel) addElement('marketvercode', app.CurrentVersionCode, doc, apel)
if app['AntiFeatures']: if app.AntiFeatures:
af = app['AntiFeatures'] af = app.AntiFeatures
if af: if af:
addElementNonEmpty('antifeatures', ','.join(af), doc, apel) addElementNonEmpty('antifeatures', ','.join(af), doc, apel)
if app['Provides']: if app.Provides:
pv = app['Provides'].split(',') pv = app.Provides.split(',')
addElementNonEmpty('provides', ','.join(pv), doc, apel) addElementNonEmpty('provides', ','.join(pv), doc, apel)
if app['Requires Root']: if app.RequiresRoot:
addElement('requirements', 'root', doc, apel) addElement('requirements', 'root', doc, apel)
# Sort the apk list into version order, just so the web site # Sort the apk list into version order, just so the web site
@ -888,7 +888,7 @@ def make_index(apps, sortedids, apks, repodir, archive, categories):
# find the APK for the "Current Version" # find the APK for the "Current Version"
if current_version_code < apk['versioncode']: if current_version_code < apk['versioncode']:
current_version_code = apk['versioncode'] current_version_code = apk['versioncode']
if current_version_code < int(app['Current Version Code']): if current_version_code < int(app.CurrentVersionCode):
current_version_file = apk['apkname'] current_version_file = apk['apkname']
apkel = doc.createElement("package") apkel = doc.createElement("package")
@ -920,8 +920,8 @@ def make_index(apps, sortedids, apks, repodir, archive, categories):
if current_version_file is not None \ if current_version_file is not None \
and config['make_current_version_link'] \ and config['make_current_version_link'] \
and repodir == 'repo': # only create these and repodir == 'repo': # only create these
sanitized_name = re.sub('''[ '"&%?+=/]''', '', namefield = config['current_version_name_source']
app[config['current_version_name_source']]) sanitized_name = re.sub('''[ '"&%?+=/]''', '', app.get_field(namefield))
apklinkname = sanitized_name + '.apk' apklinkname = sanitized_name + '.apk'
current_version_path = os.path.join(repodir, current_version_file) current_version_path = os.path.join(repodir, current_version_file)
if os.path.islink(apklinkname): if os.path.islink(apklinkname):
@ -996,8 +996,8 @@ def archive_old_apks(apps, apks, archapks, repodir, archivedir, defaultkeepversi
for appid, app in apps.iteritems(): for appid, app in apps.iteritems():
if app['Archive Policy']: if app.ArchivePolicy:
keepversions = int(app['Archive Policy'][:-9]) keepversions = int(app.ArchivePolicy[:-9])
else: else:
keepversions = defaultkeepversions keepversions = defaultkeepversions
@ -1163,7 +1163,7 @@ def main():
# Generate a list of categories... # Generate a list of categories...
categories = set() categories = set()
for app in apps.itervalues(): for app in apps.itervalues():
categories.update(app['Categories']) categories.update(app.Categories)
# Read known apks data (will be updated and written back when we've finished) # Read known apks data (will be updated and written back when we've finished)
knownapks = common.KnownApks() knownapks = common.KnownApks()
@ -1234,8 +1234,6 @@ def main():
# same time. # same time.
for appid, app in apps.iteritems(): for appid, app in apps.iteritems():
bestver = 0 bestver = 0
added = None
lastupdated = None
for apk in apks + archapks: for apk in apks + archapks:
if apk['id'] == appid: if apk['id'] == appid:
if apk['versioncode'] > bestver: if apk['versioncode'] > bestver:
@ -1243,34 +1241,30 @@ def main():
bestapk = apk bestapk = apk
if 'added' in apk: if 'added' in apk:
if not added or apk['added'] < added: if not app.added or apk['added'] < app.added:
added = apk['added'] app.added = apk['added']
if not lastupdated or apk['added'] > lastupdated: if not app.lastupdated or apk['added'] > app.lastupdated:
lastupdated = apk['added'] app.lastupdated = apk['added']
if added: if not app.added:
app['added'] = added
else:
logging.debug("Don't know when " + appid + " was added") logging.debug("Don't know when " + appid + " was added")
if lastupdated: if not app.lastupdated:
app['lastupdated'] = lastupdated
else:
logging.debug("Don't know when " + appid + " was last updated") logging.debug("Don't know when " + appid + " was last updated")
if bestver == 0: if bestver == 0:
if app['Name'] is None: if app.Name is None:
app['Name'] = app['Auto Name'] or appid app.Name = app.AutoName or appid
app['icon'] = None app.icon = None
logging.debug("Application " + appid + " has no packages") logging.debug("Application " + appid + " has no packages")
else: else:
if app['Name'] is None: if app.Name is None:
app['Name'] = bestapk['name'] app.Name = bestapk['name']
app['icon'] = bestapk['icon'] if 'icon' in bestapk else None app.icon = bestapk['icon'] if 'icon' in bestapk else None
# Sort the app list by name, then the web site doesn't have to by default. # Sort the app list by name, then the web site doesn't have to by default.
# (we had to wait until we'd scanned the apks to do this, because mostly the # (we had to wait until we'd scanned the apks to do this, because mostly the
# name comes from there!) # name comes from there!)
sortedids = sorted(apps.iterkeys(), key=lambda appid: apps[appid]['Name'].upper()) sortedids = sorted(apps.iterkeys(), key=lambda appid: apps[appid].Name.upper())
# APKs are placed into multiple repos based on the app package, providing # APKs are placed into multiple repos based on the app package, providing
# per-app subscription feeds for nightly builds and things like it # per-app subscription feeds for nightly builds and things like it
@ -1309,10 +1303,10 @@ def main():
appid = line.rstrip() appid = line.rstrip()
data += appid + "\t" data += appid + "\t"
app = apps[appid] app = apps[appid]
data += app['Name'] + "\t" data += app.Name + "\t"
if app['icon'] is not None: if app.icon is not None:
data += app['icon'] + "\t" data += app.icon + "\t"
data += app['License'] + "\n" data += app.License + "\n"
with open(os.path.join(repodirs[0], 'latestapps.dat'), 'w') as f: with open(os.path.join(repodirs[0], 'latestapps.dat'), 'w') as f:
f.write(data) f.write(data)

View file

@ -116,8 +116,8 @@ class CommonTest(unittest.TestCase):
config['sdk_path'] = os.getenv('ANDROID_HOME') config['sdk_path'] = os.getenv('ANDROID_HOME')
config['build_tools'] = 'FAKE_BUILD_TOOLS_VERSION' config['build_tools'] = 'FAKE_BUILD_TOOLS_VERSION'
fdroidserver.common.config = config fdroidserver.common.config = config
app = dict() app = fdroidserver.metadata.App()
app['id'] = 'org.fdroid.froid' app.id = 'org.fdroid.froid'
build = dict(fdroidserver.metadata.flag_defaults) build = dict(fdroidserver.metadata.flag_defaults)
build['commit'] = 'master' build['commit'] = 'master'
build['forceversion'] = True build['forceversion'] = True

View file

@ -30,12 +30,12 @@ class ImportTest(unittest.TestCase):
fdroidserver.common.config['sdk_path'] = '/fake/path/to/android-sdk' fdroidserver.common.config['sdk_path'] = '/fake/path/to/android-sdk'
url = 'https://gitlab.com/fdroid/fdroidclient' url = 'https://gitlab.com/fdroid/fdroidclient'
appid, app = fdroidserver.metadata.get_default_app_info() app = fdroidserver.metadata.get_default_app_info()
app['Update Check Mode'] = "Tags" app.UpdateCheckMode = "Tags"
root_dir, src_dir = import_proxy.get_metadata_from_url(app, url) root_dir, src_dir = import_proxy.get_metadata_from_url(app, url)
self.assertEquals(app['Repo Type'], 'git') self.assertEquals(app.RepoType, 'git')
self.assertEquals(app['Web Site'], 'https://gitlab.com/fdroid/fdroidclient') self.assertEquals(app.WebSite, 'https://gitlab.com/fdroid/fdroidclient')
self.assertEquals(app['Repo'], 'https://gitlab.com/fdroid/fdroidclient.git') self.assertEquals(app.Repo, 'https://gitlab.com/fdroid/fdroidclient.git')
if __name__ == "__main__": if __name__ == "__main__":

View file

@ -39,10 +39,15 @@ class MetadataTest(unittest.TestCase):
apps = fdroidserver.metadata.read_metadata(xref=True) apps = fdroidserver.metadata.read_metadata(xref=True)
for appid in ('org.smssecure.smssecure', 'org.adaway', 'net.osmand.plus', 'org.videolan.vlc'): for appid in ('org.smssecure.smssecure', 'org.adaway', 'net.osmand.plus', 'org.videolan.vlc'):
with open(os.path.join('metadata', appid + '.pickle'), 'r') as f: app = apps[appid]
savepath = os.path.join('metadata', appid + '.pickle')
self.assertTrue(appid in apps)
with open(savepath, 'r') as f:
frompickle = pickle.load(f) frompickle = pickle.load(f)
self.assertTrue(appid in apps.keys()) frommeta = app.field_dict()
self.assertEquals(apps[appid], frompickle) self.assertEquals(frommeta, frompickle)
# with open(savepath, 'wb') as f:
# pickle.dump(app, f)
if __name__ == "__main__": if __name__ == "__main__":

View file

@ -491,12 +491,16 @@ S'metadata/net.osmand.plus.xml'
p178 p178
sS'Disabled' sS'Disabled'
p179 p179
NsS'Update Check Name' NsS'added'
p180 p180
NsS'Vercode Operation' NsS'lastupdated'
p181 p181
NsS'Current Version' NsS'Update Check Name'
p182 p182
S'1.9.5' NsS'Vercode Operation'
p183 p183
NsS'Current Version'
p184
S'1.9.5'
p185
s. s.

View file

@ -2290,4 +2290,8 @@ NsS'Update Check Name'
p483 p483
NsS'Vercode Operation' NsS'Vercode Operation'
p484 p484
NsS'added'
p485
NsS'lastupdated'
p486
Ns. Ns.

View file

@ -772,4 +772,8 @@ NsS'Update Check Name'
p227 p227
NsS'Vercode Operation' NsS'Vercode Operation'
p228 p228
NsS'added'
p229
NsS'lastupdated'
p230
Ns. Ns.

View file

@ -5610,14 +5610,18 @@ S'metadata/org.videolan.vlc.yaml'
p1519 p1519
sS'Disabled' sS'Disabled'
p1520 p1520
NsS'Update Check Name' NsS'added'
p1521 p1521
NsS'Vercode Operation' NsS'lastupdated'
p1522 p1522
S'%c + 5' NsS'Update Check Name'
p1523 p1523
sS'Current Version' NsS'Vercode Operation'
p1524 p1524
S'1.2.6' S'%c + 5'
p1525 p1525
sS'Current Version'
p1526
S'1.2.6'
p1527
s. s.