Rework build into a class

This simplifies usage, goes from

    build['flag']
to
    build.flag

Also makes static analyzers able to detect invalid attributes as the set
is now limited in the class definition.

As a bonus, setting of the default field values is now done in the
constructor, not separately and manually.

While at it, unify "build", "thisbuild", "info", "thisinfo", etc into
just "build".
This commit is contained in:
Daniel Martí 2015-11-28 17:55:27 +01:00
parent 2c12485aeb
commit bf8518ee8f
15 changed files with 6997 additions and 8636 deletions

View file

@ -245,7 +245,7 @@ def release_vm():
# Note that 'force' here also implies test mode. # Note that 'force' here also implies test mode.
def build_server(app, thisbuild, vcs, build_dir, output_dir, force): def build_server(app, build, vcs, build_dir, output_dir, force):
"""Do a build on the build server.""" """Do a build on the build server."""
try: try:
@ -333,9 +333,9 @@ def build_server(app, thisbuild, vcs, build_dir, output_dir, force):
ftp.mkdir('extlib') ftp.mkdir('extlib')
ftp.mkdir('srclib') ftp.mkdir('srclib')
# Copy any extlibs that are required... # Copy any extlibs that are required...
if thisbuild['extlibs']: if build.extlibs:
ftp.chdir(homedir + '/build/extlib') ftp.chdir(homedir + '/build/extlib')
for lib in thisbuild['extlibs']: for lib in build.extlibs:
lib = lib.strip() lib = lib.strip()
libsrc = os.path.join('build/extlib', lib) libsrc = os.path.join('build/extlib', lib)
if not os.path.exists(libsrc): if not os.path.exists(libsrc):
@ -350,8 +350,8 @@ def build_server(app, thisbuild, vcs, build_dir, output_dir, force):
ftp.chdir('..') ftp.chdir('..')
# Copy any srclibs that are required... # Copy any srclibs that are required...
srclibpaths = [] srclibpaths = []
if thisbuild['srclibs']: if build.srclibs:
for lib in thisbuild['srclibs']: for lib in build.srclibs:
srclibpaths.append( srclibpaths.append(
common.getsrclib(lib, 'build/srclib', basepath=True, prepare=False)) common.getsrclib(lib, 'build/srclib', basepath=True, prepare=False))
@ -389,7 +389,7 @@ def build_server(app, thisbuild, vcs, build_dir, output_dir, force):
cmdline += ' --force --test' cmdline += ' --force --test'
if options.verbose: if options.verbose:
cmdline += ' --verbose' cmdline += ' --verbose'
cmdline += " %s:%s" % (app.id, thisbuild['vercode']) cmdline += " %s:%s" % (app.id, build.vercode)
chan.exec_command('bash -c ". ~/.bsenv && ' + cmdline + '"') chan.exec_command('bash -c ". ~/.bsenv && ' + cmdline + '"')
output = '' output = ''
while not chan.exit_status_ready(): while not chan.exit_status_ready():
@ -406,7 +406,7 @@ def build_server(app, thisbuild, vcs, build_dir, output_dir, force):
if returncode != 0: if returncode != 0:
raise BuildException( raise BuildException(
"Build.py failed on server for {0}:{1}".format( "Build.py failed on server for {0}:{1}".format(
app.id, thisbuild['version']), output) app.id, build.version), output)
# Retrieve the built files... # Retrieve the built files...
logging.info("Retrieving build output...") logging.info("Retrieving build output...")
@ -414,8 +414,8 @@ def build_server(app, thisbuild, vcs, build_dir, output_dir, force):
ftp.chdir(homedir + '/tmp') ftp.chdir(homedir + '/tmp')
else: else:
ftp.chdir(homedir + '/unsigned') ftp.chdir(homedir + '/unsigned')
apkfile = common.getapkname(app, thisbuild) apkfile = common.getapkname(app, build)
tarball = common.getsrcname(app, thisbuild) tarball = common.getsrcname(app, build)
try: try:
ftp.get(apkfile, os.path.join(output_dir, apkfile)) ftp.get(apkfile, os.path.join(output_dir, apkfile))
if not options.notarball: if not options.notarball:
@ -423,7 +423,7 @@ def build_server(app, thisbuild, vcs, build_dir, output_dir, force):
except: except:
raise BuildException( raise BuildException(
"Build failed for %s:%s - missing output files".format( "Build failed for %s:%s - missing output files".format(
app.id, thisbuild['version']), output) app.id, build.version), output)
ftp.close() ftp.close()
finally: finally:
@ -457,32 +457,33 @@ def capitalize_intact(string):
return string[0].upper() + string[1:] return string[0].upper() + string[1:]
def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_dir, tmp_dir, force, onserver, refresh): def build_local(app, build, vcs, build_dir, output_dir, srclib_dir, extlib_dir, tmp_dir, force, onserver, refresh):
"""Do a build locally.""" """Do a build locally."""
if thisbuild['buildjni'] and thisbuild['buildjni'] != ['no']: ndk_path = build.ndk_path()
if not thisbuild['ndk_path']: if build.buildjni and build.buildjni != ['no']:
logging.critical("Android NDK version '%s' could not be found!" % thisbuild['ndk']) if not ndk_path:
logging.critical("Android NDK version '%s' could not be found!" % build.ndk or 'r10e')
logging.critical("Configured versions:") logging.critical("Configured versions:")
for k, v in config['ndk_paths'].iteritems(): for k, v in config['ndk_paths'].iteritems():
if k.endswith("_orig"): if k.endswith("_orig"):
continue continue
logging.critical(" %s: %s" % (k, v)) logging.critical(" %s: %s" % (k, v))
sys.exit(3) sys.exit(3)
elif not os.path.isdir(thisbuild['ndk_path']): elif not os.path.isdir(ndk_path):
logging.critical("Android NDK '%s' is not a directory!" % thisbuild['ndk_path']) logging.critical("Android NDK '%s' is not a directory!" % ndk_path)
sys.exit(3) sys.exit(3)
# Set up environment vars that depend on each build # Set up environment vars that depend on each build
for n in ['ANDROID_NDK', 'NDK', 'ANDROID_NDK_HOME']: for n in ['ANDROID_NDK', 'NDK', 'ANDROID_NDK_HOME']:
common.env[n] = thisbuild['ndk_path'] common.env[n] = ndk_path
common.reset_env_path() common.reset_env_path()
# Set up the current NDK to the PATH # Set up the current NDK to the PATH
common.add_to_env_path(thisbuild['ndk_path']) common.add_to_env_path(ndk_path)
# Prepare the source code... # Prepare the source code...
root_dir, srclibpaths = common.prepare_source(vcs, app, thisbuild, root_dir, srclibpaths = common.prepare_source(vcs, app, build,
build_dir, srclib_dir, build_dir, srclib_dir,
extlib_dir, onserver, refresh) extlib_dir, onserver, refresh)
@ -490,26 +491,27 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
# different from the default ones # different from the default ones
p = None p = None
gradletasks = [] gradletasks = []
if thisbuild['type'] == 'maven': method = build.method()
if method == 'maven':
logging.info("Cleaning Maven project...") logging.info("Cleaning Maven project...")
cmd = [config['mvn3'], 'clean', '-Dandroid.sdk.path=' + config['sdk_path']] cmd = [config['mvn3'], 'clean', '-Dandroid.sdk.path=' + config['sdk_path']]
if '@' in thisbuild['maven']: if '@' in build.maven:
maven_dir = os.path.join(root_dir, thisbuild['maven'].split('@', 1)[1]) maven_dir = os.path.join(root_dir, build.maven.split('@', 1)[1])
maven_dir = os.path.normpath(maven_dir) maven_dir = os.path.normpath(maven_dir)
else: else:
maven_dir = root_dir maven_dir = root_dir
p = FDroidPopen(cmd, cwd=maven_dir) p = FDroidPopen(cmd, cwd=maven_dir)
elif thisbuild['type'] == 'gradle': elif method == 'gradle':
logging.info("Cleaning Gradle project...") logging.info("Cleaning Gradle project...")
if thisbuild['preassemble']: if build.preassemble:
gradletasks += thisbuild['preassemble'] gradletasks += build.preassemble
flavours = thisbuild['gradle'] flavours = build.gradle
if flavours == ['yes']: if flavours == ['yes']:
flavours = [] flavours = []
@ -522,8 +524,8 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
adapt_gradle(libpath) adapt_gradle(libpath)
cmd = [config['gradle']] cmd = [config['gradle']]
if thisbuild['gradleprops']: if build.gradleprops:
cmd += ['-P'+kv for kv in thisbuild['gradleprops']] cmd += ['-P'+kv for kv in build.gradleprops]
for task in gradletasks: for task in gradletasks:
parts = task.split(':') parts = task.split(':')
@ -534,16 +536,16 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
p = FDroidPopen(cmd, cwd=root_dir) p = FDroidPopen(cmd, cwd=root_dir)
elif thisbuild['type'] == 'kivy': elif method == 'kivy':
pass pass
elif thisbuild['type'] == 'ant': elif method == 'ant':
logging.info("Cleaning Ant project...") logging.info("Cleaning Ant project...")
p = FDroidPopen(['ant', 'clean'], cwd=root_dir) p = FDroidPopen(['ant', 'clean'], cwd=root_dir)
if p is not None and p.returncode != 0: if p is not None and p.returncode != 0:
raise BuildException("Error cleaning %s:%s" % raise BuildException("Error cleaning %s:%s" %
(app.id, thisbuild['version']), p.output) (app.id, build.version), p.output)
for root, dirs, files in os.walk(build_dir): for root, dirs, files in os.walk(build_dir):
@ -575,12 +577,12 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
del_dirs(['obj']) del_dirs(['obj'])
if options.skipscan: if options.skipscan:
if thisbuild['scandelete']: if build.scandelete:
raise BuildException("Refusing to skip source scan since scandelete is present") raise BuildException("Refusing to skip source scan since scandelete is present")
else: else:
# Scan before building... # Scan before building...
logging.info("Scanning source for common problems...") logging.info("Scanning source for common problems...")
count = scanner.scan_source(build_dir, root_dir, thisbuild) count = scanner.scan_source(build_dir, root_dir, build)
if count > 0: if count > 0:
if force: if force:
logging.warn('Scanner found %d problems' % count) logging.warn('Scanner found %d problems' % count)
@ -590,7 +592,7 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
if not options.notarball: if not options.notarball:
# Build the source tarball right before we build the release... # Build the source tarball right before we build the release...
logging.info("Creating source tarball...") logging.info("Creating source tarball...")
tarname = common.getsrcname(app, thisbuild) tarname = common.getsrcname(app, build)
tarball = tarfile.open(os.path.join(tmp_dir, tarname), "w:gz") tarball = tarfile.open(os.path.join(tmp_dir, tarname), "w:gz")
def tarexc(f): def tarexc(f):
@ -599,9 +601,9 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
tarball.close() tarball.close()
# Run a build command if one is required... # Run a build command if one is required...
if thisbuild['build']: if build.build:
logging.info("Running 'build' commands in %s" % root_dir) logging.info("Running 'build' commands in %s" % root_dir)
cmd = common.replace_config_vars(thisbuild['build'], thisbuild) cmd = common.replace_config_vars(build.build, build)
# Substitute source library paths into commands... # Substitute source library paths into commands...
for name, number, libpath in srclibpaths: for name, number, libpath in srclibpaths:
@ -612,16 +614,16 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
if p.returncode != 0: if p.returncode != 0:
raise BuildException("Error running build command for %s:%s" % raise BuildException("Error running build command for %s:%s" %
(app.id, thisbuild['version']), p.output) (app.id, build.version), p.output)
# Build native stuff if required... # Build native stuff if required...
if thisbuild['buildjni'] and thisbuild['buildjni'] != ['no']: if build.buildjni and build.buildjni != ['no']:
logging.info("Building the native code") logging.info("Building the native code")
jni_components = thisbuild['buildjni'] jni_components = build.buildjni
if jni_components == ['yes']: if jni_components == ['yes']:
jni_components = [''] jni_components = ['']
cmd = [os.path.join(thisbuild['ndk_path'], "ndk-build"), "-j1"] cmd = [os.path.join(ndk_path, "ndk-build"), "-j1"]
for d in jni_components: for d in jni_components:
if d: if d:
logging.info("Building native code in '%s'" % d) logging.info("Building native code in '%s'" % d)
@ -640,15 +642,15 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
del manifest_text del manifest_text
p = FDroidPopen(cmd, cwd=os.path.join(root_dir, d)) p = FDroidPopen(cmd, cwd=os.path.join(root_dir, d))
if p.returncode != 0: if p.returncode != 0:
raise BuildException("NDK build failed for %s:%s" % (app.id, thisbuild['version']), p.output) raise BuildException("NDK build failed for %s:%s" % (app.id, build.version), p.output)
p = None p = None
# Build the release... # Build the release...
if thisbuild['type'] == 'maven': if method == 'maven':
logging.info("Building Maven project...") logging.info("Building Maven project...")
if '@' in thisbuild['maven']: if '@' in build.maven:
maven_dir = os.path.join(root_dir, thisbuild['maven'].split('@', 1)[1]) maven_dir = os.path.join(root_dir, build.maven.split('@', 1)[1])
else: else:
maven_dir = root_dir maven_dir = root_dir
@ -656,12 +658,12 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
'-Dmaven.jar.sign.skip=true', '-Dmaven.test.skip=true', '-Dmaven.jar.sign.skip=true', '-Dmaven.test.skip=true',
'-Dandroid.sign.debug=false', '-Dandroid.release=true', '-Dandroid.sign.debug=false', '-Dandroid.release=true',
'package'] 'package']
if thisbuild['target']: if build.target:
target = thisbuild["target"].split('-')[1] target = build.target.split('-')[1]
common.regsub_file(r'<platform>[0-9]*</platform>', common.regsub_file(r'<platform>[0-9]*</platform>',
r'<platform>%s</platform>' % target, r'<platform>%s</platform>' % target,
os.path.join(root_dir, 'pom.xml')) os.path.join(root_dir, 'pom.xml'))
if '@' in thisbuild['maven']: if '@' in build.maven:
common.regsub_file(r'<platform>[0-9]*</platform>', common.regsub_file(r'<platform>[0-9]*</platform>',
r'<platform>%s</platform>' % target, r'<platform>%s</platform>' % target,
os.path.join(maven_dir, 'pom.xml')) os.path.join(maven_dir, 'pom.xml'))
@ -670,7 +672,7 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
bindir = os.path.join(root_dir, 'target') bindir = os.path.join(root_dir, 'target')
elif thisbuild['type'] == 'kivy': elif method == 'kivy':
logging.info("Building Kivy project...") logging.info("Building Kivy project...")
spec = os.path.join(root_dir, 'buildozer.spec') spec = os.path.join(root_dir, 'buildozer.spec')
@ -690,8 +692,8 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
modules = bconfig.get('app', 'requirements').split(',') modules = bconfig.get('app', 'requirements').split(',')
cmd = 'ANDROIDSDK=' + config['sdk_path'] cmd = 'ANDROIDSDK=' + config['sdk_path']
cmd += ' ANDROIDNDK=' + thisbuild['ndk_path'] cmd += ' ANDROIDNDK=' + ndk_path
cmd += ' ANDROIDNDKVER=' + thisbuild['ndk'] cmd += ' ANDROIDNDKVER=' + build.ndk
cmd += ' ANDROIDAPI=' + str(bconfig.get('app', 'android.api')) cmd += ' ANDROIDAPI=' + str(bconfig.get('app', 'android.api'))
cmd += ' VIRTUALENV=virtualenv' cmd += ' VIRTUALENV=virtualenv'
cmd += ' ./distribute.sh' cmd += ' ./distribute.sh'
@ -731,27 +733,27 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
cmd.append('release') cmd.append('release')
p = FDroidPopen(cmd, cwd=distdir) p = FDroidPopen(cmd, cwd=distdir)
elif thisbuild['type'] == 'gradle': elif method == 'gradle':
logging.info("Building Gradle project...") logging.info("Building Gradle project...")
# Avoid having to use lintOptions.abortOnError false # Avoid having to use lintOptions.abortOnError false
if thisbuild['gradlepluginver'] >= LooseVersion('0.7'): if build.gradlepluginver >= LooseVersion('0.7'):
with open(os.path.join(root_dir, 'build.gradle'), "a") as f: with open(os.path.join(root_dir, 'build.gradle'), "a") as f:
f.write("\nandroid { lintOptions { checkReleaseBuilds false } }\n") f.write("\nandroid { lintOptions { checkReleaseBuilds false } }\n")
cmd = [config['gradle']] cmd = [config['gradle']]
if thisbuild['gradleprops']: if build.gradleprops:
cmd += ['-P'+kv for kv in thisbuild['gradleprops']] cmd += ['-P'+kv for kv in build.gradleprops]
cmd += gradletasks cmd += gradletasks
p = FDroidPopen(cmd, cwd=root_dir) p = FDroidPopen(cmd, cwd=root_dir)
elif thisbuild['type'] == 'ant': elif method == 'ant':
logging.info("Building Ant project...") logging.info("Building Ant project...")
cmd = ['ant'] cmd = ['ant']
if thisbuild['antcommands']: if build.antcommands:
cmd += thisbuild['antcommands'] cmd += build.antcommands
else: else:
cmd += ['release'] cmd += ['release']
p = FDroidPopen(cmd, cwd=root_dir) p = FDroidPopen(cmd, cwd=root_dir)
@ -759,10 +761,10 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
bindir = os.path.join(root_dir, 'bin') bindir = os.path.join(root_dir, 'bin')
if p is not None and p.returncode != 0: if p is not None and p.returncode != 0:
raise BuildException("Build failed for %s:%s" % (app.id, thisbuild['version']), p.output) raise BuildException("Build failed for %s:%s" % (app.id, build.version), p.output)
logging.info("Successfully built version " + thisbuild['version'] + ' of ' + app.id) logging.info("Successfully built version " + build.version + ' of ' + app.id)
if thisbuild['type'] == 'maven': if method == 'maven':
stdout_apk = '\n'.join([ stdout_apk = '\n'.join([
line for line in p.output.splitlines() if any( line for line in p.output.splitlines() if any(
a in line for a in ('.apk', '.ap_', '.jar'))]) a in line for a in ('.apk', '.ap_', '.jar'))])
@ -782,14 +784,14 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
raise BuildException('Failed to find output') raise BuildException('Failed to find output')
src = m.group(1) src = m.group(1)
src = os.path.join(bindir, src) + '.apk' src = os.path.join(bindir, src) + '.apk'
elif thisbuild['type'] == 'kivy': elif method == 'kivy':
src = os.path.join('python-for-android', 'dist', 'default', 'bin', src = os.path.join('python-for-android', 'dist', 'default', 'bin',
'{0}-{1}-release.apk'.format( '{0}-{1}-release.apk'.format(
bconfig.get('app', 'title'), bconfig.get('app', 'title'),
bconfig.get('app', 'version'))) bconfig.get('app', 'version')))
elif thisbuild['type'] == 'gradle': elif method == 'gradle':
if thisbuild['gradlepluginver'] >= LooseVersion('0.11'): if build.gradlepluginver >= LooseVersion('0.11'):
apks_dir = os.path.join(root_dir, 'build', 'outputs', 'apk') apks_dir = os.path.join(root_dir, 'build', 'outputs', 'apk')
else: else:
apks_dir = os.path.join(root_dir, 'build', 'apk') apks_dir = os.path.join(root_dir, 'build', 'apk')
@ -801,14 +803,14 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
if len(apks) < 1: if len(apks) < 1:
raise BuildException('Failed to find gradle output in %s' % apks_dir) raise BuildException('Failed to find gradle output in %s' % apks_dir)
src = apks[0] src = apks[0]
elif thisbuild['type'] == 'ant': elif method == 'ant':
stdout_apk = '\n'.join([ stdout_apk = '\n'.join([
line for line in p.output.splitlines() if '.apk' in line]) line for line in p.output.splitlines() if '.apk' in line])
src = re.match(r".*^.*Creating (.+) for release.*$.*", stdout_apk, src = re.match(r".*^.*Creating (.+) for release.*$.*", stdout_apk,
re.S | re.M).group(1) re.S | re.M).group(1)
src = os.path.join(bindir, src) src = os.path.join(bindir, src)
elif thisbuild['type'] == 'raw': elif method == 'raw':
src = os.path.join(root_dir, thisbuild['output']) src = os.path.join(root_dir, build.output)
src = os.path.normpath(src) src = os.path.normpath(src)
# Make sure it's not debuggable... # Make sure it's not debuggable...
@ -850,12 +852,12 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
nativecode = nativecode.strip() nativecode = nativecode.strip()
nativecode = None if not nativecode else nativecode nativecode = None if not nativecode else nativecode
if thisbuild['buildjni'] and thisbuild['buildjni'] != ['no']: if build.buildjni and build.buildjni != ['no']:
if nativecode is None: if nativecode is None:
raise BuildException("Native code should have been built but none was packaged") raise BuildException("Native code should have been built but none was packaged")
if thisbuild['novcheck']: if build.novcheck:
vercode = thisbuild['vercode'] vercode = build.vercode
version = thisbuild['version'] version = build.version
if not version or not vercode: if not version or not vercode:
raise BuildException("Could not find version information in build in output") raise BuildException("Could not find version information in build in output")
if not foundid: if not foundid:
@ -871,13 +873,13 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
if index != -1: if index != -1:
version = version[:index] version = version[:index]
if (version != thisbuild['version'] or if (version != build.version or
vercode != thisbuild['vercode']): vercode != build.vercode):
raise BuildException(("Unexpected version/version code in output;" raise BuildException(("Unexpected version/version code in output;"
" APK: '%s' / '%s', " " APK: '%s' / '%s', "
" Expected: '%s' / '%s'") " Expected: '%s' / '%s'")
% (version, str(vercode), thisbuild['version'], % (version, str(vercode), build.version,
str(thisbuild['vercode'])) str(build.vercode))
) )
# Add information for 'fdroid verify' to be able to reproduce the build # Add information for 'fdroid verify' to be able to reproduce the build
@ -895,7 +897,7 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
# Copy the unsigned apk to our destination directory for further # Copy the unsigned apk to our destination directory for further
# processing (by publish.py)... # processing (by publish.py)...
dest = os.path.join(output_dir, common.getapkname(app, thisbuild)) dest = os.path.join(output_dir, common.getapkname(app, build))
shutil.copyfile(src, dest) shutil.copyfile(src, dest)
# Move the source tarball into the output directory... # Move the source tarball into the output directory...
@ -904,7 +906,7 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
os.path.join(output_dir, tarname)) os.path.join(output_dir, tarname))
def trybuild(app, thisbuild, build_dir, output_dir, also_check_dir, srclib_dir, extlib_dir, def trybuild(app, build, build_dir, output_dir, also_check_dir, srclib_dir, extlib_dir,
tmp_dir, repo_dir, vcs, test, server, force, onserver, refresh): tmp_dir, repo_dir, vcs, test, server, force, onserver, refresh):
""" """
Build a particular version of an application, if it needs building. Build a particular version of an application, if it needs building.
@ -923,7 +925,7 @@ def trybuild(app, thisbuild, build_dir, output_dir, also_check_dir, srclib_dir,
:returns: True if the build was done, False if it wasn't necessary. :returns: True if the build was done, False if it wasn't necessary.
""" """
dest_apk = common.getapkname(app, thisbuild) dest_apk = common.getapkname(app, build)
dest = os.path.join(output_dir, dest_apk) dest = os.path.join(output_dir, dest_apk)
dest_repo = os.path.join(repo_dir, dest_apk) dest_repo = os.path.join(repo_dir, dest_apk)
@ -937,20 +939,20 @@ def trybuild(app, thisbuild, build_dir, output_dir, also_check_dir, srclib_dir,
if os.path.exists(dest_also): if os.path.exists(dest_also):
return False return False
if thisbuild['disable'] and not options.force: if build.disable and not options.force:
return False return False
logging.info("Building version %s (%s) of %s" % ( logging.info("Building version %s (%s) of %s" % (
thisbuild['version'], thisbuild['vercode'], app.id)) build.version, build.vercode, app.id))
if server: if server:
# When using server mode, still keep a local cache of the repo, by # When using server mode, still keep a local cache of the repo, by
# grabbing the source now. # grabbing the source now.
vcs.gotorevision(thisbuild['commit']) vcs.gotorevision(build.commit)
build_server(app, thisbuild, vcs, build_dir, output_dir, force) build_server(app, build, vcs, build_dir, output_dir, force)
else: else:
build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_dir, tmp_dir, force, onserver, refresh) build_local(app, build, vcs, build_dir, output_dir, srclib_dir, extlib_dir, tmp_dir, force, onserver, refresh)
return True return True
@ -1060,7 +1062,7 @@ def main():
if options.latest: if options.latest:
for app in apps.itervalues(): for app in apps.itervalues():
for build in reversed(app.builds): for build in reversed(app.builds):
if build['disable'] and not options.force: if build.disable and not options.force:
continue continue
app.builds = [build] app.builds = [build]
break break
@ -1078,7 +1080,7 @@ def main():
first = True first = True
for thisbuild in app.builds: for build in app.builds:
wikilog = None wikilog = None
try: try:
@ -1098,8 +1100,8 @@ def main():
first = False first = False
logging.debug("Checking " + thisbuild['version']) logging.debug("Checking " + build.version)
if trybuild(app, thisbuild, build_dir, output_dir, if trybuild(app, build, build_dir, output_dir,
also_check_dir, srclib_dir, extlib_dir, also_check_dir, srclib_dir, extlib_dir,
tmp_dir, repo_dir, vcs, options.test, tmp_dir, repo_dir, vcs, options.test,
options.server, options.force, options.server, options.force,
@ -1112,10 +1114,10 @@ def main():
# alongside our built one in the 'unsigend' # alongside our built one in the 'unsigend'
# directory. # directory.
url = app.Binaries url = app.Binaries
url = url.replace('%v', thisbuild['version']) url = url.replace('%v', build.version)
url = url.replace('%c', str(thisbuild['vercode'])) url = url.replace('%c', str(build.vercode))
logging.info("...retrieving " + url) logging.info("...retrieving " + url)
of = "{0}_{1}.apk.binary".format(app.id, thisbuild['vercode']) of = "{0}_{1}.apk.binary".format(app.id, build.vercode)
of = os.path.join(output_dir, of) of = os.path.join(output_dir, of)
net.download_file(url, local_filename=of) net.download_file(url, local_filename=of)
@ -1148,7 +1150,7 @@ def main():
if options.wiki and wikilog: if options.wiki and wikilog:
try: try:
# Write a page with the last build log for this version code # Write a page with the last build log for this version code
lastbuildpage = appid + '/lastbuild_' + thisbuild['vercode'] lastbuildpage = appid + '/lastbuild_' + build.vercode
newpage = site.Pages[lastbuildpage] newpage = site.Pages[lastbuildpage]
txt = "Build completed at " + time.strftime("%Y-%m-%d %H:%M:%SZ", time.gmtime()) + "\n\n" + wikilog txt = "Build completed at " + time.strftime("%Y-%m-%d %H:%M:%SZ", time.gmtime()) + "\n\n" + wikilog
newpage.save(txt, summary='Build log') newpage.save(txt, summary='Build log')

View file

@ -108,10 +108,12 @@ def check_tags(app, pattern):
vcs.gotorevision(None) vcs.gotorevision(None)
flavours = [] last_build = metadata.Build()
if len(app.builds) > 0: if len(app.builds) > 0:
if app.builds[-1]['gradle']: last_build = app.builds[-1]
flavours = app.builds[-1]['gradle']
if last_build.submodules:
vcs.initsubmodules()
hpak = None hpak = None
htag = None htag = None
@ -143,7 +145,7 @@ def check_tags(app, pattern):
root_dir = build_dir root_dir = build_dir
else: else:
root_dir = os.path.join(build_dir, subdir) root_dir = os.path.join(build_dir, subdir)
paths = common.manifest_paths(root_dir, flavours) paths = common.manifest_paths(root_dir, last_build.gradle)
version, vercode, package = common.parse_androidmanifests(paths, app) version, vercode, package = common.parse_androidmanifests(paths, app)
if vercode: if vercode:
logging.debug("Manifest exists in subdir '{0}'. Found version {1} ({2})" logging.debug("Manifest exists in subdir '{0}'. Found version {1} ({2})"
@ -199,10 +201,12 @@ def check_repomanifest(app, branch=None):
elif repotype == 'bzr': elif repotype == 'bzr':
vcs.gotorevision(None) vcs.gotorevision(None)
flavours = [] last_build = metadata.Build()
if len(app.builds) > 0: if len(app.builds) > 0:
if app.builds[-1]['gradle']: last_build = app.builds[-1]
flavours = app.builds[-1]['gradle']
if last_build.submodules:
vcs.initsubmodules()
hpak = None hpak = None
hver = None hver = None
@ -212,7 +216,7 @@ def check_repomanifest(app, branch=None):
root_dir = build_dir root_dir = build_dir
else: else:
root_dir = os.path.join(build_dir, subdir) root_dir = os.path.join(build_dir, subdir)
paths = common.manifest_paths(root_dir, flavours) paths = common.manifest_paths(root_dir, last_build.gradle)
version, vercode, package = common.parse_androidmanifests(paths, app) version, vercode, package = common.parse_androidmanifests(paths, app)
if vercode: if vercode:
logging.debug("Manifest exists in subdir '{0}'. Found version {1} ({2})" logging.debug("Manifest exists in subdir '{0}'. Found version {1} ({2})"
@ -313,14 +317,12 @@ def possible_subdirs(app):
else: else:
build_dir = os.path.join('build', app.id) build_dir = os.path.join('build', app.id)
flavours = [] last_build = metadata.Build()
if len(app.builds) > 0: if len(app.builds) > 0:
build = app.builds[-1] last_build = app.builds[-1]
if build['gradle']:
flavours = build['gradle']
for d in dirs_with_manifest(build_dir): for d in dirs_with_manifest(build_dir):
m_paths = common.manifest_paths(d, flavours) m_paths = common.manifest_paths(d, last_build.gradle)
package = common.parse_androidmanifests(m_paths, app)[2] package = common.parse_androidmanifests(m_paths, app)[2]
if package is not None: if package is not None:
subdir = os.path.relpath(d, build_dir) subdir = os.path.relpath(d, build_dir)
@ -344,10 +346,9 @@ def fetch_autoname(app, tag):
except VCSException: except VCSException:
return None return None
flavours = [] last_build = metadata.Build()
if len(app.builds) > 0: if len(app.builds) > 0:
if app.builds[-1]['gradle']: last_build = app.builds[-1]
flavours = app.builds[-1]['gradle']
logging.debug("...fetch auto name from " + build_dir) logging.debug("...fetch auto name from " + build_dir)
new_name = None new_name = None
@ -356,7 +357,7 @@ def fetch_autoname(app, tag):
root_dir = build_dir root_dir = build_dir
else: else:
root_dir = os.path.join(build_dir, subdir) root_dir = os.path.join(build_dir, subdir)
new_name = common.fetch_real_name(root_dir, flavours) new_name = common.fetch_real_name(root_dir, last_build.gradle)
if new_name is not None: if new_name is not None:
break break
commitmsg = None commitmsg = None
@ -458,25 +459,25 @@ def checkupdates_app(app, first=True):
gotcur = False gotcur = False
latest = None latest = None
for build in app.builds: for build in app.builds:
if int(build['vercode']) >= int(app.CurrentVersionCode): if int(build.vercode) >= int(app.CurrentVersionCode):
gotcur = True gotcur = True
if not latest or int(build['vercode']) > int(latest['vercode']): if not latest or int(build.vercode) > int(latest.vercode):
latest = build latest = build
if int(latest['vercode']) > int(app.CurrentVersionCode): if int(latest.vercode) > int(app.CurrentVersionCode):
logging.info("Refusing to auto update, since the latest build is newer") logging.info("Refusing to auto update, since the latest build is newer")
if not gotcur: if not gotcur:
newbuild = latest.copy() newbuild = latest.copy()
if 'origlines' in newbuild: if newbuild.origlines:
del newbuild['origlines'] del newbuild.origlines[:]
newbuild['disable'] = False newbuild.disable = False
newbuild['vercode'] = app.CurrentVersionCode newbuild.vercode = app.CurrentVersionCode
newbuild['version'] = app.CurrentVersion + suffix newbuild.version = app.CurrentVersion + suffix
logging.info("...auto-generating build for " + newbuild['version']) logging.info("...auto-generating build for " + newbuild.version)
commit = pattern.replace('%v', newbuild['version']) commit = pattern.replace('%v', newbuild.version)
commit = commit.replace('%c', newbuild['vercode']) commit = commit.replace('%c', newbuild.vercode)
newbuild['commit'] = commit newbuild.commit = commit
app.builds.append(newbuild) app.builds.append(newbuild)
name = common.getappname(app) name = common.getappname(app)
ver = common.getcvname(app) ver = common.getcvname(app)

View file

@ -221,15 +221,6 @@ def read_config(opts, config_file='config.py'):
return config return config
def get_ndk_path(version):
if version is None:
version = 'r10e' # falls back to latest
paths = config['ndk_paths']
if version not in paths:
return ''
return paths[version] or ''
def find_sdk_tools_cmd(cmd): def find_sdk_tools_cmd(cmd):
'''find a working path to a tool from the Android SDK''' '''find a working path to a tool from the Android SDK'''
@ -363,10 +354,10 @@ def read_app_args(args, allapps, allow_vercodes=False):
vc = vercodes[appid] vc = vercodes[appid]
if not vc: if not vc:
continue continue
app.builds = [b for b in app.builds if b['vercode'] in vc] app.builds = [b for b in app.builds if b.vercode in vc]
if len(app.builds) != len(vercodes[appid]): if len(app.builds) != len(vercodes[appid]):
error = True error = True
allvcs = [b['vercode'] for b in app.builds] allvcs = [b.vercode for b in app.builds]
for v in vercodes[appid]: for v in vercodes[appid]:
if v not in allvcs: if v not in allvcs:
logging.critical("No such vercode %s for app %s" % (v, appid)) logging.critical("No such vercode %s for app %s" % (v, appid))
@ -419,11 +410,11 @@ def apknameinfo(filename):
def getapkname(app, build): def getapkname(app, build):
return "%s_%s.apk" % (app.id, build['vercode']) return "%s_%s.apk" % (app.id, build.vercode)
def getsrcname(app, build): def getsrcname(app, build):
return "%s_%s_src.tar.gz" % (app.id, build['vercode']) return "%s_%s_src.tar.gz" % (app.id, build.vercode)
def getappname(app): def getappname(app):
@ -1250,17 +1241,17 @@ gradle_version_regex = re.compile(r"[^/]*'com\.android\.tools\.build:gradle:([^\
def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=False, refresh=True): def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=False, refresh=True):
# Optionally, the actual app source can be in a subdirectory # Optionally, the actual app source can be in a subdirectory
if build['subdir']: if build.subdir:
root_dir = os.path.join(build_dir, build['subdir']) root_dir = os.path.join(build_dir, build.subdir)
else: else:
root_dir = build_dir root_dir = build_dir
# Get a working copy of the right revision # Get a working copy of the right revision
logging.info("Getting source for revision " + build['commit']) logging.info("Getting source for revision " + build.commit)
vcs.gotorevision(build['commit'], refresh) vcs.gotorevision(build.commit, refresh)
# Initialise submodules if required # Initialise submodules if required
if build['submodules']: if build.submodules:
logging.info("Initialising submodules") logging.info("Initialising submodules")
vcs.initsubmodules() vcs.initsubmodules()
@ -1270,19 +1261,19 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
raise BuildException('Missing subdir ' + root_dir) raise BuildException('Missing subdir ' + root_dir)
# Run an init command if one is required # Run an init command if one is required
if build['init']: if build.init:
cmd = replace_config_vars(build['init'], build) cmd = replace_config_vars(build.init, build)
logging.info("Running 'init' commands in %s" % root_dir) logging.info("Running 'init' commands in %s" % root_dir)
p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir) p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir)
if p.returncode != 0: if p.returncode != 0:
raise BuildException("Error running init command for %s:%s" % raise BuildException("Error running init command for %s:%s" %
(app.id, build['version']), p.output) (app.id, build.version), p.output)
# Apply patches if any # Apply patches if any
if build['patch']: if build.patch:
logging.info("Applying patches") logging.info("Applying patches")
for patch in build['patch']: for patch in build.patch:
patch = patch.strip() patch = patch.strip()
logging.info("Applying " + patch) logging.info("Applying " + patch)
patch_path = os.path.join('metadata', app.id, patch) patch_path = os.path.join('metadata', app.id, patch)
@ -1292,9 +1283,9 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
# Get required source libraries # Get required source libraries
srclibpaths = [] srclibpaths = []
if build['srclibs']: if build.srclibs:
logging.info("Collecting source libraries") logging.info("Collecting source libraries")
for lib in build['srclibs']: for lib in build.srclibs:
srclibpaths.append(getsrclib(lib, srclib_dir, build, preponly=onserver, refresh=refresh)) srclibpaths.append(getsrclib(lib, srclib_dir, build, preponly=onserver, refresh=refresh))
for name, number, libpath in srclibpaths: for name, number, libpath in srclibpaths:
@ -1307,8 +1298,8 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
# Update the local.properties file # Update the local.properties file
localprops = [os.path.join(build_dir, 'local.properties')] localprops = [os.path.join(build_dir, 'local.properties')]
if build['subdir']: if build.subdir:
parts = build['subdir'].split(os.sep) parts = build.subdir.split(os.sep)
cur = build_dir cur = build_dir
for d in parts: for d in parts:
cur = os.path.join(cur, d) cur = os.path.join(cur, d)
@ -1324,26 +1315,27 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
logging.info("Creating local.properties file at %s" % path) logging.info("Creating local.properties file at %s" % path)
# Fix old-fashioned 'sdk-location' by copying # Fix old-fashioned 'sdk-location' by copying
# from sdk.dir, if necessary # from sdk.dir, if necessary
if build['oldsdkloc']: if build.oldsdkloc:
sdkloc = re.match(r".*^sdk.dir=(\S+)$.*", props, sdkloc = re.match(r".*^sdk.dir=(\S+)$.*", props,
re.S | re.M).group(1) re.S | re.M).group(1)
props += "sdk-location=%s\n" % sdkloc props += "sdk-location=%s\n" % sdkloc
else: else:
props += "sdk.dir=%s\n" % config['sdk_path'] props += "sdk.dir=%s\n" % config['sdk_path']
props += "sdk-location=%s\n" % config['sdk_path'] props += "sdk-location=%s\n" % config['sdk_path']
if build['ndk_path']: ndk_path = build.ndk_path()
if ndk_path:
# Add ndk location # Add ndk location
props += "ndk.dir=%s\n" % build['ndk_path'] props += "ndk.dir=%s\n" % ndk_path
props += "ndk-location=%s\n" % build['ndk_path'] props += "ndk-location=%s\n" % ndk_path
# Add java.encoding if necessary # Add java.encoding if necessary
if build['encoding']: if build.encoding:
props += "java.encoding=%s\n" % build['encoding'] props += "java.encoding=%s\n" % build.encoding
with open(path, 'w') as f: with open(path, 'w') as f:
f.write(props) f.write(props)
flavours = [] flavours = []
if build['type'] == 'gradle': if build.method() == 'gradle':
flavours = build['gradle'] flavours = build.gradle
gradlepluginver = None gradlepluginver = None
@ -1372,13 +1364,13 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
break break
if gradlepluginver: if gradlepluginver:
build['gradlepluginver'] = LooseVersion(gradlepluginver) build.gradlepluginver = LooseVersion(gradlepluginver)
else: else:
logging.warn("Could not fetch the gradle plugin version, defaulting to 0.11") logging.warn("Could not fetch the gradle plugin version, defaulting to 0.11")
build['gradlepluginver'] = LooseVersion('0.11') build.gradlepluginver = LooseVersion('0.11')
if build['target']: if build.target:
n = build["target"].split('-')[1] n = build.target.split('-')[1]
regsub_file(r'compileSdkVersion[ =]+[0-9]+', regsub_file(r'compileSdkVersion[ =]+[0-9]+',
r'compileSdkVersion %s' % n, r'compileSdkVersion %s' % n,
os.path.join(root_dir, 'build.gradle')) os.path.join(root_dir, 'build.gradle'))
@ -1387,38 +1379,38 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
remove_debuggable_flags(root_dir) remove_debuggable_flags(root_dir)
# Insert version code and number into the manifest if necessary # Insert version code and number into the manifest if necessary
if build['forceversion']: if build.forceversion:
logging.info("Changing the version name") logging.info("Changing the version name")
for path in manifest_paths(root_dir, flavours): for path in manifest_paths(root_dir, flavours):
if not os.path.isfile(path): if not os.path.isfile(path):
continue continue
if has_extension(path, 'xml'): if has_extension(path, 'xml'):
regsub_file(r'android:versionName="[^"]*"', regsub_file(r'android:versionName="[^"]*"',
r'android:versionName="%s"' % build['version'], r'android:versionName="%s"' % build.version,
path) path)
elif has_extension(path, 'gradle'): elif has_extension(path, 'gradle'):
regsub_file(r"""(\s*)versionName[\s'"=]+.*""", regsub_file(r"""(\s*)versionName[\s'"=]+.*""",
r"""\1versionName '%s'""" % build['version'], r"""\1versionName '%s'""" % build.version,
path) path)
if build['forcevercode']: if build.forcevercode:
logging.info("Changing the version code") logging.info("Changing the version code")
for path in manifest_paths(root_dir, flavours): for path in manifest_paths(root_dir, flavours):
if not os.path.isfile(path): if not os.path.isfile(path):
continue continue
if has_extension(path, 'xml'): if has_extension(path, 'xml'):
regsub_file(r'android:versionCode="[^"]*"', regsub_file(r'android:versionCode="[^"]*"',
r'android:versionCode="%s"' % build['vercode'], r'android:versionCode="%s"' % build.vercode,
path) path)
elif has_extension(path, 'gradle'): elif has_extension(path, 'gradle'):
regsub_file(r'versionCode[ =]+[0-9]+', regsub_file(r'versionCode[ =]+[0-9]+',
r'versionCode %s' % build['vercode'], r'versionCode %s' % build.vercode,
path) path)
# Delete unwanted files # Delete unwanted files
if build['rm']: if build.rm:
logging.info("Removing specified files") logging.info("Removing specified files")
for part in getpaths(build_dir, build['rm']): for part in getpaths(build_dir, build.rm):
dest = os.path.join(build_dir, part) dest = os.path.join(build_dir, part)
logging.info("Removing {0}".format(part)) logging.info("Removing {0}".format(part))
if os.path.lexists(dest): if os.path.lexists(dest):
@ -1432,12 +1424,12 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
remove_signing_keys(build_dir) remove_signing_keys(build_dir)
# Add required external libraries # Add required external libraries
if build['extlibs']: if build.extlibs:
logging.info("Collecting prebuilt libraries") logging.info("Collecting prebuilt libraries")
libsdir = os.path.join(root_dir, 'libs') libsdir = os.path.join(root_dir, 'libs')
if not os.path.exists(libsdir): if not os.path.exists(libsdir):
os.mkdir(libsdir) os.mkdir(libsdir)
for lib in build['extlibs']: for lib in build.extlibs:
lib = lib.strip() lib = lib.strip()
logging.info("...installing extlib {0}".format(lib)) logging.info("...installing extlib {0}".format(lib))
libf = os.path.basename(lib) libf = os.path.basename(lib)
@ -1447,10 +1439,10 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
shutil.copyfile(libsrc, os.path.join(libsdir, libf)) shutil.copyfile(libsrc, os.path.join(libsdir, libf))
# Run a pre-build command if one is required # Run a pre-build command if one is required
if build['prebuild']: if build.prebuild:
logging.info("Running 'prebuild' commands in %s" % root_dir) logging.info("Running 'prebuild' commands in %s" % root_dir)
cmd = replace_config_vars(build['prebuild'], build) cmd = replace_config_vars(build.prebuild, build)
# Substitute source library paths into prebuild commands # Substitute source library paths into prebuild commands
for name, number, libpath in srclibpaths: for name, number, libpath in srclibpaths:
@ -1460,20 +1452,20 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir) p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir)
if p.returncode != 0: if p.returncode != 0:
raise BuildException("Error running prebuild command for %s:%s" % raise BuildException("Error running prebuild command for %s:%s" %
(app.id, build['version']), p.output) (app.id, build.version), p.output)
# Generate (or update) the ant build file, build.xml... # Generate (or update) the ant build file, build.xml...
if build['update'] and build['update'] != ['no'] and build['type'] == 'ant': if build.update and build.update != ['no'] and build.method() == 'ant':
parms = ['android', 'update', 'lib-project'] parms = ['android', 'update', 'lib-project']
lparms = ['android', 'update', 'project'] lparms = ['android', 'update', 'project']
if build['target']: if build.target:
parms += ['-t', build['target']] parms += ['-t', build.target]
lparms += ['-t', build['target']] lparms += ['-t', build.target]
if build['update'] == ['auto']: if build.update:
update_dirs = ant_subprojects(root_dir) + ['.'] update_dirs = build.update
else: else:
update_dirs = build['update'] update_dirs = ant_subprojects(root_dir) + ['.']
for d in update_dirs: for d in update_dirs:
subdir = os.path.join(root_dir, d) subdir = os.path.join(root_dir, d)
@ -1770,9 +1762,9 @@ def replace_config_vars(cmd, build):
cmd = cmd.replace('$$NDK$$', env['ANDROID_NDK']) cmd = cmd.replace('$$NDK$$', env['ANDROID_NDK'])
cmd = cmd.replace('$$MVN3$$', config['mvn3']) cmd = cmd.replace('$$MVN3$$', config['mvn3'])
if build is not None: if build is not None:
cmd = cmd.replace('$$COMMIT$$', build['commit']) cmd = cmd.replace('$$COMMIT$$', build.commit)
cmd = cmd.replace('$$VERSION$$', build['version']) cmd = cmd.replace('$$VERSION$$', build.version)
cmd = cmd.replace('$$VERCODE$$', build['vercode']) cmd = cmd.replace('$$VERCODE$$', build.vercode)
return cmd return cmd

View file

@ -223,20 +223,15 @@ def main():
sys.exit(1) sys.exit(1)
# Create a build line... # Create a build line...
build = {} build = metadata.Build()
build['version'] = version or '?' build.version = version or '?'
build['vercode'] = vercode or '?' build.vercode = vercode or '?'
build['commit'] = '?' build.commit = '?'
build['disable'] = 'Generated by import.py - check/set version fields and commit id' build.disable = 'Generated by import.py - check/set version fields and commit id'
if options.subdir: if options.subdir:
build['subdir'] = options.subdir build.subdir = options.subdir
if os.path.exists(os.path.join(root_dir, 'jni')): if os.path.exists(os.path.join(root_dir, 'jni')):
build['buildjni'] = ['yes'] build.buildjni = ['yes']
for flag, value in metadata.flag_defaults.iteritems():
if flag in build:
continue
build[flag] = value
app.builds.append(build) app.builds.append(build)

View file

@ -122,11 +122,11 @@ def get_lastbuild(builds):
lowest_vercode = -1 lowest_vercode = -1
lastbuild = None lastbuild = None
for build in builds: for build in builds:
if not build['disable']: if not build.disable:
vercode = int(build['vercode']) vercode = int(build.vercode)
if lowest_vercode == -1 or vercode < lowest_vercode: if lowest_vercode == -1 or vercode < lowest_vercode:
lowest_vercode = vercode lowest_vercode = vercode
if not lastbuild or int(build['vercode']) > int(lastbuild['vercode']): if not lastbuild or int(build.vercode) > int(lastbuild.vercode):
lastbuild = build lastbuild = build
return lastbuild return lastbuild
@ -134,14 +134,14 @@ def get_lastbuild(builds):
def check_ucm_tags(app): def check_ucm_tags(app):
lastbuild = get_lastbuild(app.builds) lastbuild = get_lastbuild(app.builds)
if (lastbuild is not None if (lastbuild is not None
and lastbuild['commit'] and lastbuild.commit
and app.UpdateCheckMode == 'RepoManifest' and app.UpdateCheckMode == 'RepoManifest'
and not lastbuild['commit'].startswith('unknown') and not lastbuild.commit.startswith('unknown')
and lastbuild['vercode'] == app.CurrentVersionCode and lastbuild.vercode == app.CurrentVersionCode
and not lastbuild['forcevercode'] and not lastbuild.forcevercode
and any(s in lastbuild['commit'] for s in '.,_-/')): and any(s in lastbuild.commit for s in '.,_-/')):
yield "Last used commit '%s' looks like a tag, but Update Check Mode is '%s'" % ( yield "Last used commit '%s' looks like a tag, but Update Check Mode is '%s'" % (
lastbuild['commit'], app.UpdateCheckMode) lastbuild.commit, app.UpdateCheckMode)
def check_char_limits(app): def check_char_limits(app):
@ -285,12 +285,12 @@ def check_bulleted_lists(app):
def check_builds(app): def check_builds(app):
for build in app.builds: for build in app.builds:
if build['disable']: if build.disable:
continue continue
for s in ['master', 'origin', 'HEAD', 'default', 'trunk']: for s in ['master', 'origin', 'HEAD', 'default', 'trunk']:
if build['commit'] and build['commit'].startswith(s): if build.commit and build.commit.startswith(s):
yield "Branch '%s' used as commit in build '%s'" % (s, build['version']) yield "Branch '%s' used as commit in build '%s'" % (s, build.version)
for srclib in build['srclibs']: for srclib in build.srclibs:
ref = srclib.split('@')[1].split('/')[0] ref = srclib.split('@')[1].split('/')[0]
if ref.startswith(s): if ref.startswith(s):
yield "Branch '%s' used as commit in srclib '%s'" % (s, srclib) yield "Branch '%s' used as commit in srclib '%s'" % (s, srclib)

View file

@ -38,8 +38,6 @@ except ImportError:
# use the C implementation when available # use the C implementation when available
import xml.etree.cElementTree as ElementTree import xml.etree.cElementTree as ElementTree
from collections import OrderedDict
import common import common
srclibs = None srclibs = None
@ -155,7 +153,16 @@ class App():
# Constructs an old-fashioned dict with the human-readable field # Constructs an old-fashioned dict with the human-readable field
# names. Should only be used for tests. # names. Should only be used for tests.
def field_dict(self): def field_dict(self):
return {App.attr_to_field(k): v for k, v in self.__dict__.iteritems()} d = {}
for k, v in self.__dict__.iteritems():
if k == 'builds':
d['builds'] = []
for build in v:
d['builds'].append(build.__dict__)
else:
k = App.attr_to_field(k)
d[k] = v
return d
# Gets the value associated to a field name, e.g. 'Auto Name' # Gets the value associated to a field name, e.g. 'Auto Name'
def get_field(self, f): def get_field(self, f):
@ -184,44 +191,156 @@ class App():
# Like dict.update(), but using human-readable field names # Like dict.update(), but using human-readable field names
def update_fields(self, d): def update_fields(self, d):
for f, v in d.iteritems(): for f, v in d.iteritems():
self.set_field(f, v) if f == 'builds':
for b in v:
build = Build()
build.update_flags(b)
self.builds.append(build)
else:
self.set_field(f, v)
def metafieldtype(name):
if name in ['Description', 'Maintainer Notes']:
return 'multiline'
if name in ['Categories', 'AntiFeatures']:
return 'list'
if name == 'Build Version':
return 'build'
if name == 'Build':
return 'buildv2'
if name == 'Use Built':
return 'obsolete'
if name not in app_fields:
return 'unknown'
return 'string'
# In the order in which they are laid out on files # In the order in which they are laid out on files
# Sorted by their action and their place in the build timeline build_flags_order = [
# These variables can have varying datatypes. For example, anything with 'disable',
# flagtype(v) == 'list' is inited as False, then set as a list of strings. 'commit',
flag_defaults = OrderedDict([ 'subdir',
('disable', False), 'submodules',
('commit', None), 'init',
('subdir', None), 'patch',
('submodules', False), 'gradle',
('init', ''), 'maven',
('patch', []), 'kivy',
('gradle', False), 'output',
('maven', False), 'srclibs',
('kivy', False), 'oldsdkloc',
('output', None), 'encoding',
('srclibs', []), 'forceversion',
('oldsdkloc', False), 'forcevercode',
('encoding', None), 'rm',
('forceversion', False), 'extlibs',
('forcevercode', False), 'prebuild',
('rm', []), 'update',
('extlibs', []), 'target',
('prebuild', ''), 'scanignore',
('update', ['auto']), 'scandelete',
('target', None), 'build',
('scanignore', []), 'buildjni',
('scandelete', []), 'ndk',
('build', ''), 'preassemble',
('buildjni', []), 'gradleprops',
('ndk', 'r10e'), # defaults to latest 'antcommands',
('preassemble', []), 'novcheck',
('gradleprops', []), ]
('antcommands', None),
('novcheck', False),
]) build_flags = set(build_flags_order + ['version', 'vercode'])
class Build():
def __init__(self):
self.disable = False
self.commit = None
self.subdir = None
self.submodules = False
self.init = ''
self.patch = []
self.gradle = False
self.maven = False
self.kivy = False
self.output = None
self.srclibs = []
self.oldsdkloc = False
self.encoding = None
self.forceversion = False
self.forcevercode = False
self.rm = []
self.extlibs = []
self.prebuild = ''
self.update = None
self.target = None
self.scanignore = []
self.scandelete = []
self.build = ''
self.buildjni = []
self.ndk = None
self.preassemble = []
self.gradleprops = []
self.antcommands = None
self.novcheck = False
def get_flag(self, f):
if f not in build_flags:
raise MetaDataException('Unrecognised build flag: ' + f)
return getattr(self, f)
def set_flag(self, f, v):
if f == 'versionName':
f = 'version'
if f == 'versionCode':
f = 'vercode'
if f not in build_flags:
raise MetaDataException('Unrecognised build flag: ' + f)
setattr(self, f, v)
def append_flag(self, f, v):
if f not in build_flags:
raise MetaDataException('Unrecognised build flag: ' + f)
if f not in self.__dict__:
self.__dict__[f] = [v]
else:
self.__dict__[f].append(v)
def method(self):
for f in ['maven', 'gradle', 'kivy']:
if self.get_flag(f):
return f
if build.output:
return 'raw'
return 'ant'
def ndk_path(self):
version = self.ndk
if not version:
version = 'r10e' # falls back to latest
paths = common.config['ndk_paths']
if version not in paths:
return ''
return paths[version]
def update_flags(self, d):
for f, v in d.iteritems():
self.set_flag(f, v)
def flagtype(name):
if name in ['extlibs', 'srclibs', 'patch', 'rm', 'buildjni', 'preassemble',
'update', 'scanignore', 'scandelete', 'gradle', 'antcommands',
'gradleprops']:
return 'list'
if name in ['init', 'prebuild', 'build']:
return 'script'
if name in ['submodules', 'oldsdkloc', 'forceversion', 'forcevercode',
'novcheck']:
return 'bool'
return 'string'
# Designates a metadata field type and checks that it matches # Designates a metadata field type and checks that it matches
@ -230,18 +349,18 @@ flag_defaults = OrderedDict([
# 'matching' - List of possible values or regex expression # 'matching' - List of possible values or regex expression
# 'sep' - Separator to use if value may be a list # 'sep' - Separator to use if value may be a list
# 'fields' - Metadata fields (Field:Value) of this type # 'fields' - Metadata fields (Field:Value) of this type
# 'attrs' - Build attributes (attr=value) of this type # 'flags' - Build flags (flag=value) of this type
# #
class FieldValidator(): class FieldValidator():
def __init__(self, name, matching, sep, fields, attrs): def __init__(self, name, matching, sep, fields, flags):
self.name = name self.name = name
self.matching = matching self.matching = matching
if type(matching) is str: if type(matching) is str:
self.compiled = re.compile(matching) self.compiled = re.compile(matching)
self.sep = sep self.sep = sep
self.fields = fields self.fields = fields
self.attrs = attrs self.flags = flags
def _assert_regex(self, values, appid): def _assert_regex(self, values, appid):
for v in values: for v in values:
@ -257,13 +376,13 @@ class FieldValidator():
% (v, self.name, appid) + % (v, self.name, appid) +
"Possible values: %s" % (", ".join(self.matching))) "Possible values: %s" % (", ".join(self.matching)))
def check(self, value, appid): def check(self, v, appid):
if type(value) is not str or not value: if type(v) is not str or not v:
return return
if self.sep is not None: if self.sep is not None:
values = value.split(self.sep) values = v.split(self.sep)
else: else:
values = [value] values = [v]
if type(self.matching) is list: if type(self.matching) is list:
self._assert_list(values, appid) self._assert_list(values, appid)
else: else:
@ -337,11 +456,11 @@ valuetypes = {
# Check an app's metadata information for integrity errors # Check an app's metadata information for integrity errors
def check_metadata(app): def check_metadata(app):
for v in valuetypes: for v in valuetypes:
for field in v.fields: for f in v.fields:
v.check(app.get_field(field), app.id) v.check(app.get_field(f), app.id)
for build in app.builds: for build in app.builds:
for attr in v.attrs: for f in v.flags:
v.check(build[attr], app.id) v.check(build.get_flag(f), app.id)
# Formatter for descriptions. Create an instance, and call parseline() with # Formatter for descriptions. Create an instance, and call parseline() with
@ -554,14 +673,14 @@ def parse_srclib(metadatapath):
continue continue
try: try:
field, value = line.split(':', 1) f, v = line.split(':', 1)
except ValueError: except ValueError:
raise MetaDataException("Invalid metadata in %s:%d" % (line, n)) raise MetaDataException("Invalid metadata in %s:%d" % (line, n))
if field == "Subdir": if f == "Subdir":
thisinfo[field] = value.split(',') thisinfo[f] = v.split(',')
else: else:
thisinfo[field] = value thisinfo[f] = v
return thisinfo return thisinfo
@ -640,54 +759,6 @@ def read_metadata(xref=True):
return apps return apps
# Get the type expected for a given metadata field.
def metafieldtype(name):
if name in ['Description', 'Maintainer Notes']:
return 'multiline'
if name in ['Categories', 'AntiFeatures']:
return 'list'
if name == 'Build Version':
return 'build'
if name == 'Build':
return 'buildv2'
if name == 'Use Built':
return 'obsolete'
if name not in app_fields:
return 'unknown'
return 'string'
def flagtype(name):
if name in ['extlibs', 'srclibs', 'patch', 'rm', 'buildjni', 'preassemble',
'update', 'scanignore', 'scandelete', 'gradle', 'antcommands',
'gradleprops']:
return 'list'
if name in ['init', 'prebuild', 'build']:
return 'script'
if name in ['submodules', 'oldsdkloc', 'forceversion', 'forcevercode',
'novcheck']:
return 'bool'
return 'string'
def fill_build_defaults(build):
def get_build_type():
for t in ['maven', 'gradle', 'kivy']:
if build[t]:
return t
if build['output']:
return 'raw'
return 'ant'
for flag, value in flag_defaults.iteritems():
if flag in build:
continue
build[flag] = value
build['type'] = get_build_type()
build['ndk_path'] = common.get_ndk_path(build['ndk'])
def split_list_values(s): def split_list_values(s):
# Port legacy ';' separators # Port legacy ';' separators
l = [v.strip() for v in s.replace(';', ',').split(',')] l = [v.strip() for v in s.replace(';', ',').split(',')]
@ -709,7 +780,7 @@ def get_default_app_info(metadatapath=None):
def sorted_builds(builds): def sorted_builds(builds):
return sorted(builds, key=lambda build: int(build['vercode'])) return sorted(builds, key=lambda build: int(build.vercode))
def post_metadata_parse(app): def post_metadata_parse(app):
@ -726,60 +797,30 @@ def post_metadata_parse(app):
text = v.rstrip().lstrip() text = v.rstrip().lstrip()
app.set_field(f, text.split('\n')) app.set_field(f, text.split('\n'))
supported_flags = (flag_defaults.keys()
+ ['vercode', 'version', 'versionCode', 'versionName',
'type', 'ndk_path'])
esc_newlines = re.compile('\\\\( |\\n)') esc_newlines = re.compile('\\\\( |\\n)')
for build in app.builds: for build in app.builds:
for k, v in build.items(): for k in build_flags:
if k not in supported_flags: v = build.get_flag(k)
raise MetaDataException("Unrecognised build flag: {0}={1}"
.format(k, v))
if k == 'versionCode': if type(v) in (float, int):
build['vercode'] = str(v) build.set_flag(k, v)
del build['versionCode']
elif k == 'versionName':
build['version'] = str(v)
del build['versionName']
elif type(v) in (float, int):
build[k] = str(v)
else: else:
keyflagtype = flagtype(k) keyflagtype = flagtype(k)
if keyflagtype == 'list':
# these can be bools, strings or lists, but ultimately are lists
if isinstance(v, basestring):
build[k] = [v]
elif isinstance(v, bool):
build[k] = ['yes' if v else 'no']
elif isinstance(v, list):
build[k] = []
for e in v:
if isinstance(e, bool):
build[k].append('yes' if v else 'no')
else:
build[k].append(e)
elif keyflagtype == 'script': if keyflagtype == 'script':
build[k] = re.sub(esc_newlines, '', v).lstrip().rstrip() build.set_flag(k, re.sub(esc_newlines, '', v).lstrip().rstrip())
elif keyflagtype == 'bool': elif keyflagtype == 'bool':
# TODO handle this using <xsd:element type="xsd:boolean> in a schema # TODO handle this using <xsd:element type="xsd:boolean> in a schema
if isinstance(v, basestring): if isinstance(v, basestring) and v == 'true':
if v == 'true': build.set_flag(k, 'true')
build[k] = True
else:
build[k] = False
elif keyflagtype == 'string': elif keyflagtype == 'string':
if isinstance(v, bool): if isinstance(v, bool) and v:
build[k] = 'yes' if v else 'no' build.set_flag(k, 'yes')
if not app.Description: if not app.Description:
app.Description = ['No description available'] app.Description = ['No description available']
for build in app.builds:
fill_build_defaults(build)
app.builds = sorted_builds(app.builds) app.builds = sorted_builds(app.builds)
@ -826,16 +867,16 @@ def _decode_list(data):
def _decode_dict(data): def _decode_dict(data):
'''convert items in a dict from unicode to basestring''' '''convert items in a dict from unicode to basestring'''
rv = {} rv = {}
for key, value in data.iteritems(): for k, v in data.iteritems():
if isinstance(key, unicode): if isinstance(k, unicode):
key = key.encode('utf-8') k = k.encode('utf-8')
if isinstance(value, unicode): if isinstance(v, unicode):
value = value.encode('utf-8') v = v.encode('utf-8')
elif isinstance(value, list): elif isinstance(v, list):
value = _decode_list(value) v = _decode_list(v)
elif isinstance(value, dict): elif isinstance(v, dict):
value = _decode_dict(value) v = _decode_dict(v)
rv[key] = value rv[k] = v
return rv return rv
@ -897,16 +938,14 @@ def parse_xml_metadata(metadatapath):
if child.tag == 'string': if child.tag == 'string':
app.set_field(name, child.text) app.set_field(name, child.text)
elif child.tag == 'string-array': elif child.tag == 'string-array':
items = []
for item in child: for item in child:
items.append(item.text) app.append_field(name, item.text)
app.set_field(name, items)
elif child.tag == 'builds': elif child.tag == 'builds':
for build in child: for b in child:
builddict = dict() build = Build()
for key in build: for key in b:
builddict[key.tag] = key.text build.set_flag(key.tag, key.text)
app.builds.append(builddict) app.builds.append(build)
# TODO handle this using <xsd:element type="xsd:boolean> in a schema # TODO handle this using <xsd:element type="xsd:boolean> in a schema
if not isinstance(app.RequiresRoot, bool): if not isinstance(app.RequiresRoot, bool):
@ -935,7 +974,7 @@ def parse_txt_metadata(metadatapath):
linedesc = None linedesc = None
def add_buildflag(p, thisbuild): def add_buildflag(p, build):
if not p.strip(): if not p.strip():
raise MetaDataException("Empty build flag at {1}" raise MetaDataException("Empty build flag at {1}"
.format(buildlines[0], linedesc)) .format(buildlines[0], linedesc))
@ -943,13 +982,10 @@ def parse_txt_metadata(metadatapath):
if len(bv) != 2: if len(bv) != 2:
raise MetaDataException("Invalid build flag at {0} in {1}" raise MetaDataException("Invalid build flag at {0} in {1}"
.format(buildlines[0], linedesc)) .format(buildlines[0], linedesc))
pk, pv = bv
if pk in thisbuild:
raise MetaDataException("Duplicate definition on {0} in version {1} of {2}"
.format(pk, thisbuild['version'], linedesc))
pk, pv = bv
pk = pk.lstrip() pk = pk.lstrip()
if pk not in flag_defaults: if pk not in build_flags:
raise MetaDataException("Unrecognised build flag at {0} in {1}" raise MetaDataException("Unrecognised build flag at {0} in {1}"
.format(p, linedesc)) .format(p, linedesc))
t = flagtype(pk) t = flagtype(pk)
@ -958,45 +994,45 @@ def parse_txt_metadata(metadatapath):
if pk == 'gradle': if pk == 'gradle':
if len(pv) == 1 and pv[0] in ['main', 'yes']: if len(pv) == 1 and pv[0] in ['main', 'yes']:
pv = ['yes'] pv = ['yes']
thisbuild[pk] = pv build.set_flag(pk, pv)
elif t == 'string' or t == 'script': elif t == 'string' or t == 'script':
thisbuild[pk] = pv build.set_flag(pk, pv)
elif t == 'bool': elif t == 'bool':
value = pv == 'yes' v = pv == 'yes'
if value: if v:
thisbuild[pk] = True build.set_flag(pk, True)
else: else:
raise MetaDataException("Unrecognised build flag type '%s' at %s in %s" raise MetaDataException("Unrecognised build flag type '%s' at %s in %s"
% (t, p, linedesc)) % (t, p, linedesc))
def parse_buildline(lines): def parse_buildline(lines):
value = "".join(lines) v = "".join(lines)
parts = [p.replace("\\,", ",") parts = [p.replace("\\,", ",")
for p in re.split(r"(?<!\\),", value)] for p in re.split(r"(?<!\\),", v)]
if len(parts) < 3: if len(parts) < 3:
raise MetaDataException("Invalid build format: " + value + " in " + metafile.name) raise MetaDataException("Invalid build format: " + v + " in " + metafile.name)
thisbuild = {} build = Build()
thisbuild['origlines'] = lines build.origlines = lines
thisbuild['version'] = parts[0] build.version = parts[0]
thisbuild['vercode'] = parts[1] build.vercode = parts[1]
if parts[2].startswith('!'): if parts[2].startswith('!'):
# For backwards compatibility, handle old-style disabling, # For backwards compatibility, handle old-style disabling,
# including attempting to extract the commit from the message # including attempting to extract the commit from the message
thisbuild['disable'] = parts[2][1:] build.disable = parts[2][1:]
commit = 'unknown - see disabled' commit = 'unknown - see disabled'
index = parts[2].rfind('at ') index = parts[2].rfind('at ')
if index != -1: if index != -1:
commit = parts[2][index + 3:] commit = parts[2][index + 3:]
if commit.endswith(')'): if commit.endswith(')'):
commit = commit[:-1] commit = commit[:-1]
thisbuild['commit'] = commit build.commit = commit
else: else:
thisbuild['commit'] = parts[2] build.commit = parts[2]
for p in parts[3:]: for p in parts[3:]:
add_buildflag(p, thisbuild) add_buildflag(p, build)
return thisbuild return build
def add_comments(key): def add_comments(key):
if not curcomments: if not curcomments:
@ -1010,7 +1046,7 @@ def parse_txt_metadata(metadatapath):
mode = 0 mode = 0
buildlines = [] buildlines = []
curcomments = [] curcomments = []
curbuild = None build = None
vc_seen = {} vc_seen = {}
c = 0 c = 0
@ -1020,13 +1056,12 @@ def parse_txt_metadata(metadatapath):
line = line.rstrip('\r\n') line = line.rstrip('\r\n')
if mode == 3: if mode == 3:
if not any(line.startswith(s) for s in (' ', '\t')): if not any(line.startswith(s) for s in (' ', '\t')):
commit = curbuild['commit'] if 'commit' in curbuild else None if not build.commit and not build.disable:
if not commit and 'disable' not in curbuild:
raise MetaDataException("No commit specified for {0} in {1}" raise MetaDataException("No commit specified for {0} in {1}"
.format(curbuild['version'], linedesc)) .format(build.version, linedesc))
app.builds.append(curbuild) app.builds.append(build)
add_comments('build:' + curbuild['vercode']) add_comments('build:' + build.vercode)
mode = 0 mode = 0
else: else:
if line.endswith('\\'): if line.endswith('\\'):
@ -1034,7 +1069,7 @@ def parse_txt_metadata(metadatapath):
else: else:
buildlines.append(line.lstrip()) buildlines.append(line.lstrip())
bl = ''.join(buildlines) bl = ''.join(buildlines)
add_buildflag(bl, curbuild) add_buildflag(bl, build)
buildlines = [] buildlines = []
if mode == 0: if mode == 0:
@ -1044,74 +1079,74 @@ def parse_txt_metadata(metadatapath):
curcomments.append(line[1:].strip()) curcomments.append(line[1:].strip())
continue continue
try: try:
field, value = line.split(':', 1) f, v = line.split(':', 1)
except ValueError: except ValueError:
raise MetaDataException("Invalid metadata in " + linedesc) raise MetaDataException("Invalid metadata in " + linedesc)
if field != field.strip() or value != value.strip(): if f != f.strip() or v != v.strip():
raise MetaDataException("Extra spacing found in " + linedesc) raise MetaDataException("Extra spacing found in " + linedesc)
# Translate obsolete fields... # Translate obsolete fields...
if field == 'Market Version': if f == 'Market Version':
field = 'Current Version' f = 'Current Version'
if field == 'Market Version Code': if f == 'Market Version Code':
field = 'Current Version Code' f = 'Current Version Code'
fieldtype = metafieldtype(field) fieldtype = metafieldtype(f)
if fieldtype not in ['build', 'buildv2']: if fieldtype not in ['build', 'buildv2']:
add_comments(field) add_comments(f)
if fieldtype == 'multiline': if fieldtype == 'multiline':
mode = 1 mode = 1
if value: if v:
raise MetaDataException("Unexpected text on same line as " + field + " in " + linedesc) raise MetaDataException("Unexpected text on same line as " + f + " in " + linedesc)
elif fieldtype == 'string': elif fieldtype == 'string':
app.set_field(field, value) app.set_field(f, v)
elif fieldtype == 'list': elif fieldtype == 'list':
app.set_field(field, split_list_values(value)) app.set_field(f, split_list_values(v))
elif fieldtype == 'build': elif fieldtype == 'build':
if value.endswith("\\"): if v.endswith("\\"):
mode = 2 mode = 2
buildlines = [value[:-1]] buildlines = [v[:-1]]
else: else:
curbuild = parse_buildline([value]) build = parse_buildline([v])
app.builds.append(curbuild) app.builds.append(build)
add_comments('build:' + app.builds[-1]['vercode']) add_comments('build:' + app.builds[-1].vercode)
elif fieldtype == 'buildv2': elif fieldtype == 'buildv2':
curbuild = {} build = Build()
vv = value.split(',') vv = v.split(',')
if len(vv) != 2: if len(vv) != 2:
raise MetaDataException('Build should have comma-separated version and vercode, not "{0}", in {1}' raise MetaDataException('Build should have comma-separated version and vercode, not "{0}", in {1}'
.format(value, linedesc)) .format(v, linedesc))
curbuild['version'] = vv[0] build.version = vv[0]
curbuild['vercode'] = vv[1] build.vercode = vv[1]
if curbuild['vercode'] in vc_seen: if build.vercode in vc_seen:
raise MetaDataException('Duplicate build recipe found for vercode %s in %s' % ( raise MetaDataException('Duplicate build recipe found for vercode %s in %s' % (
curbuild['vercode'], linedesc)) build.vercode, linedesc))
vc_seen[curbuild['vercode']] = True vc_seen[build.vercode] = True
buildlines = [] buildlines = []
mode = 3 mode = 3
elif fieldtype == 'obsolete': elif fieldtype == 'obsolete':
pass # Just throw it away! pass # Just throw it away!
else: else:
raise MetaDataException("Unrecognised field type for " + field + " in " + linedesc) raise MetaDataException("Unrecognised field type for " + f + " in " + linedesc)
elif mode == 1: # Multiline field elif mode == 1: # Multiline field
if line == '.': if line == '.':
mode = 0 mode = 0
else: else:
app.append_field(field, line) app.append_field(f, line)
elif mode == 2: # Line continuation mode in Build Version elif mode == 2: # Line continuation mode in Build Version
if line.endswith("\\"): if line.endswith("\\"):
buildlines.append(line[:-1]) buildlines.append(line[:-1])
else: else:
buildlines.append(line) buildlines.append(line)
curbuild = parse_buildline(buildlines) build = parse_buildline(buildlines)
app.builds.append(curbuild) app.builds.append(build)
add_comments('build:' + app.builds[-1]['vercode']) add_comments('build:' + app.builds[-1].vercode)
mode = 0 mode = 0
add_comments(None) add_comments(None)
# Mode at end of file should always be 0... # Mode at end of file should always be 0...
if mode == 1: if mode == 1:
raise MetaDataException(field + " not terminated in " + metafile.name) raise MetaDataException(f + " not terminated in " + metafile.name)
elif mode == 2: elif mode == 2:
raise MetaDataException("Unterminated continuation in " + metafile.name) raise MetaDataException("Unterminated continuation in " + metafile.name)
elif mode == 3: elif mode == 3:
@ -1130,18 +1165,18 @@ def write_plaintext_metadata(mf, app, w_comment, w_field, w_build):
for line in app.comments[key]: for line in app.comments[key]:
w_comment(line) w_comment(line)
def w_field_always(field, value=None): def w_field_always(f, v=None):
if value is None: if v is None:
value = app.get_field(field) v = app.get_field(f)
w_comments(field) w_comments(f)
w_field(field, value) w_field(f, v)
def w_field_nonempty(field, value=None): def w_field_nonempty(f, v=None):
if value is None: if v is None:
value = app.get_field(field) v = app.get_field(f)
w_comments(field) w_comments(f)
if value: if v:
w_field(field, value) w_field(f, v)
w_field_nonempty('Disabled') w_field_nonempty('Disabled')
if app.AntiFeatures: if app.AntiFeatures:
@ -1175,10 +1210,10 @@ def write_plaintext_metadata(mf, app, w_comment, w_field, w_build):
for build in sorted_builds(app.builds): for build in sorted_builds(app.builds):
if build['version'] == "Ignore": if build.version == "Ignore":
continue continue
w_comments('build:' + build['vercode']) w_comments('build:' + build.vercode)
w_build(build) w_build(build)
mf.write('\n') mf.write('\n')
@ -1211,39 +1246,37 @@ def write_txt_metadata(mf, app):
def w_comment(line): def w_comment(line):
mf.write("# %s\n" % line) mf.write("# %s\n" % line)
def w_field(field, value): def w_field(f, v):
t = metafieldtype(field) t = metafieldtype(f)
if t == 'list': if t == 'list':
value = ','.join(value) v = ','.join(v)
elif t == 'multiline': elif t == 'multiline':
if type(value) == list: if type(v) == list:
value = '\n' + '\n'.join(value) + '\n.' v = '\n' + '\n'.join(v) + '\n.'
else: else:
value = '\n' + value + '\n.' v = '\n' + v + '\n.'
mf.write("%s:%s\n" % (field, value)) mf.write("%s:%s\n" % (f, v))
def w_build(build): def w_build(build):
mf.write("Build:%s,%s\n" % (build['version'], build['vercode'])) mf.write("Build:%s,%s\n" % (build.version, build.vercode))
for key in flag_defaults: for f in build_flags_order:
value = build[key] v = build.get_flag(f)
if not value: if not v:
continue
if value == flag_defaults[key]:
continue continue
t = flagtype(key) t = flagtype(f)
v = ' %s=' % key out = ' %s=' % f
if t == 'string': if t == 'string':
v += value out += v
elif t == 'bool': elif t == 'bool':
v += 'yes' out += 'yes'
elif t == 'script': elif t == 'script':
v += '&& \\\n '.join([s.lstrip() for s in value.split('&& ')]) out += '&& \\\n '.join([s.lstrip() for s in v.split('&& ')])
elif t == 'list': elif t == 'list':
v += ','.join(value) if type(value) == list else value out += ','.join(v) if type(v) == list else v
mf.write(v) mf.write(out)
mf.write('\n') mf.write('\n')
write_plaintext_metadata(mf, app, w_comment, w_field, w_build) write_plaintext_metadata(mf, app, w_comment, w_field, w_build)
@ -1254,26 +1287,26 @@ def write_yaml_metadata(mf, app):
def w_comment(line): def w_comment(line):
mf.write("# %s\n" % line) mf.write("# %s\n" % line)
def escape(value): def escape(v):
if not value: if not v:
return '' return ''
if any(c in value for c in [': ', '%', '@', '*']): if any(c in v for c in [': ', '%', '@', '*']):
return "'" + value.replace("'", "''") + "'" return "'" + v.replace("'", "''") + "'"
return value return v
def w_field(field, value, prefix='', t=None): def w_field(f, v, prefix='', t=None):
if t is None: if t is None:
t = metafieldtype(field) t = metafieldtype(f)
v = '' v = ''
if t == 'list': if t == 'list':
v = '\n' v = '\n'
for e in value: for e in v:
v += prefix + ' - ' + escape(e) + '\n' v += prefix + ' - ' + escape(e) + '\n'
elif t == 'multiline': elif t == 'multiline':
v = ' |\n' v = ' |\n'
lines = value lines = v
if type(value) == str: if type(v) == str:
lines = value.splitlines() lines = v.splitlines()
for l in lines: for l in lines:
if l: if l:
v += prefix + ' ' + l + '\n' v += prefix + ' ' + l + '\n'
@ -1282,16 +1315,16 @@ def write_yaml_metadata(mf, app):
elif t == 'bool': elif t == 'bool':
v = ' yes\n' v = ' yes\n'
elif t == 'script': elif t == 'script':
cmds = [s + '&& \\' for s in value.split('&& ')] cmds = [s + '&& \\' for s in v.split('&& ')]
if len(cmds) > 0: if len(cmds) > 0:
cmds[-1] = cmds[-1][:-len('&& \\')] cmds[-1] = cmds[-1][:-len('&& \\')]
w_field(field, cmds, prefix, 'multiline') w_field(f, cmds, prefix, 'multiline')
return return
else: else:
v = ' ' + escape(value) + '\n' v = ' ' + escape(v) + '\n'
mf.write(prefix) mf.write(prefix)
mf.write(field) mf.write(f)
mf.write(":") mf.write(":")
mf.write(v) mf.write(v)
@ -1304,16 +1337,14 @@ def write_yaml_metadata(mf, app):
mf.write("builds:\n") mf.write("builds:\n")
first_build = False first_build = False
w_field('versionName', build['version'], ' - ', 'string') w_field('versionName', build.version, ' - ', 'string')
w_field('versionCode', build['vercode'], ' ', 'strsng') w_field('versionCode', build.vercode, ' ', 'strsng')
for key in flag_defaults: for f in build_flags_order:
value = build[key] v = build.get_flag(f)
if not value: if not v:
continue
if value == flag_defaults[key]:
continue continue
w_field(key, value, ' ', flagtype(key)) w_field(f, v, ' ', flagtype(f))
write_plaintext_metadata(mf, app, w_comment, w_field, w_build) write_plaintext_metadata(mf, app, w_comment, w_field, w_build)

View file

@ -31,18 +31,18 @@ config = None
options = None options = None
def get_gradle_compile_commands(thisbuild): def get_gradle_compile_commands(build):
compileCommands = ['compile', 'releaseCompile'] compileCommands = ['compile', 'releaseCompile']
if thisbuild['gradle'] and thisbuild['gradle'] != ['yes']: if build.gradle and build.gradle != ['yes']:
compileCommands += [flavor + 'Compile' for flavor in thisbuild['gradle']] compileCommands += [flavor + 'Compile' for flavor in build.gradle]
compileCommands += [flavor + 'ReleaseCompile' for flavor in thisbuild['gradle']] compileCommands += [flavor + 'ReleaseCompile' for flavor in build.gradle]
return [re.compile(r'\s*' + c, re.IGNORECASE) for c in compileCommands] return [re.compile(r'\s*' + c, re.IGNORECASE) for c in compileCommands]
# Scan the source code in the given directory (and all subdirectories) # Scan the source code in the given directory (and all subdirectories)
# and return the number of fatal problems encountered # and return the number of fatal problems encountered
def scan_source(build_dir, root_dir, thisbuild): def scan_source(build_dir, root_dir, build):
count = 0 count = 0
@ -85,8 +85,8 @@ def scan_source(build_dir, root_dir, thisbuild):
] ]
] ]
scanignore = common.getpaths_map(build_dir, thisbuild['scanignore']) scanignore = common.getpaths_map(build_dir, build.scanignore)
scandelete = common.getpaths_map(build_dir, thisbuild['scandelete']) scandelete = common.getpaths_map(build_dir, build.scandelete)
scanignore_worked = set() scanignore_worked = set()
scandelete_worked = set() scandelete_worked = set()
@ -153,7 +153,7 @@ def scan_source(build_dir, root_dir, thisbuild):
return True return True
return False return False
gradle_compile_commands = get_gradle_compile_commands(thisbuild) gradle_compile_commands = get_gradle_compile_commands(build)
def is_used_by_gradle(line): def is_used_by_gradle(line):
return any(command.match(line) for command in gradle_compile_commands) return any(command.match(line) for command in gradle_compile_commands)
@ -240,7 +240,7 @@ def scan_source(build_dir, root_dir, thisbuild):
# indicate a problem (if it's not a problem, explicitly use # indicate a problem (if it's not a problem, explicitly use
# buildjni=no to bypass this check) # buildjni=no to bypass this check)
if (os.path.exists(os.path.join(root_dir, 'jni')) and if (os.path.exists(os.path.join(root_dir, 'jni')) and
not thisbuild['buildjni']): not build.buildjni):
logging.error('Found jni directory, but buildjni is not enabled. Set it to \'no\' to ignore.') logging.error('Found jni directory, but buildjni is not enabled. Set it to \'no\' to ignore.')
count += 1 count += 1
@ -293,24 +293,24 @@ def main():
# Set up vcs interface and make sure we have the latest code... # Set up vcs interface and make sure we have the latest code...
vcs = common.getvcs(app.RepoType, app.Repo, build_dir) vcs = common.getvcs(app.RepoType, app.Repo, build_dir)
for thisbuild in app.builds: for build in app.builds:
if thisbuild['disable']: if build.disable:
logging.info("...skipping version %s - %s" % ( logging.info("...skipping version %s - %s" % (
thisbuild['version'], thisbuild.get('disable', thisbuild['commit'][1:]))) build.version, build.get('disable', build.commit[1:])))
else: else:
logging.info("...scanning version " + thisbuild['version']) logging.info("...scanning version " + build.version)
# Prepare the source code... # Prepare the source code...
root_dir, _ = common.prepare_source(vcs, app, thisbuild, root_dir, _ = common.prepare_source(vcs, app, build,
build_dir, srclib_dir, build_dir, srclib_dir,
extlib_dir, False) extlib_dir, False)
# Do the scan... # Do the scan...
count = scan_source(build_dir, root_dir, thisbuild) count = scan_source(build_dir, root_dir, build)
if count > 0: if count > 0:
logging.warn('Scanner found %d problems in %s (%s)' % ( logging.warn('Scanner found %d problems in %s (%s)' % (
count, appid, thisbuild['vercode'])) count, appid, build.vercode))
probcount += count probcount += count
except BuildException as be: except BuildException as be:

View file

@ -144,26 +144,26 @@ def update_wiki(apps, sortedids, apks):
gotcurrentver = True gotcurrentver = True
apklist.append(apk) apklist.append(apk)
# Include ones we can't build, as a special case... # Include ones we can't build, as a special case...
for thisbuild in app.builds: for build in app.builds:
if thisbuild['disable']: if build.disable:
if thisbuild['vercode'] == app.CurrentVersionCode: if build.vercode == app.CurrentVersionCode:
cantupdate = True cantupdate = True
# TODO: Nasty: vercode is a string in the build, and an int elsewhere # TODO: Nasty: vercode is a string in the build, and an int elsewhere
apklist.append({'versioncode': int(thisbuild['vercode']), apklist.append({'versioncode': int(build.vercode),
'version': thisbuild['version'], 'version': build.version,
'buildproblem': "The build for this version was manually disabled. Reason: {0}".format(thisbuild['disable']), 'buildproblem': "The build for this version was manually disabled. Reason: {0}".format(build.disable),
}) })
else: else:
builtit = False builtit = False
for apk in apklist: for apk in apklist:
if apk['versioncode'] == int(thisbuild['vercode']): if apk['versioncode'] == int(build.vercode):
builtit = True builtit = True
break break
if not builtit: if not builtit:
buildfails = True buildfails = True
apklist.append({'versioncode': int(thisbuild['vercode']), apklist.append({'versioncode': int(build.vercode),
'version': thisbuild['version'], 'version': build.version,
'buildproblem': "The build for this version appears to have failed. Check the [[{0}/lastbuild_{1}|build log]].".format(appid, thisbuild['vercode']), 'buildproblem': "The build for this version appears to have failed. Check the [[{0}/lastbuild_{1}|build log]].".format(appid, build.vercode),
}) })
if app.CurrentVersionCode == '0': if app.CurrentVersionCode == '0':
cantupdate = True cantupdate = True
@ -291,12 +291,12 @@ def delete_disabled_builds(apps, apkcache, repodirs):
""" """
for appid, app in apps.iteritems(): for appid, app in apps.iteritems():
for build in app.builds: for build in app.builds:
if not build['disable']: if not build.disable:
continue continue
apkfilename = appid + '_' + str(build['vercode']) + '.apk' apkfilename = appid + '_' + str(build.vercode) + '.apk'
iconfilename = "%s.%s.png" % ( iconfilename = "%s.%s.png" % (
appid, appid,
build['vercode']) build.vercode)
for repodir in repodirs: for repodir in repodirs:
files = [ files = [
os.path.join(repodir, apkfilename), os.path.join(repodir, apkfilename),
@ -453,8 +453,8 @@ def scan_apks(apps, apkcache, repodir, knownapks):
usecache = False usecache = False
if apkfilename in apkcache: if apkfilename in apkcache:
thisinfo = apkcache[apkfilename] apk = apkcache[apkfilename]
if thisinfo['sha256'] == shasum: if apk['sha256'] == shasum:
logging.debug("Reading " + apkfilename + " from cache") logging.debug("Reading " + apkfilename + " from cache")
usecache = True usecache = True
else: else:
@ -462,17 +462,17 @@ def scan_apks(apps, apkcache, repodir, knownapks):
if not usecache: if not usecache:
logging.debug("Processing " + apkfilename) logging.debug("Processing " + apkfilename)
thisinfo = {} apk = {}
thisinfo['apkname'] = apkfilename apk['apkname'] = apkfilename
thisinfo['sha256'] = shasum apk['sha256'] = shasum
srcfilename = apkfilename[:-4] + "_src.tar.gz" srcfilename = apkfilename[:-4] + "_src.tar.gz"
if os.path.exists(os.path.join(repodir, srcfilename)): if os.path.exists(os.path.join(repodir, srcfilename)):
thisinfo['srcname'] = srcfilename apk['srcname'] = srcfilename
thisinfo['size'] = os.path.getsize(apkfile) apk['size'] = os.path.getsize(apkfile)
thisinfo['permissions'] = set() apk['permissions'] = set()
thisinfo['features'] = set() apk['features'] = set()
thisinfo['icons_src'] = {} apk['icons_src'] = {}
thisinfo['icons'] = {} apk['icons'] = {}
p = SdkToolsPopen(['aapt', 'dump', 'badging', apkfile], output=False) p = SdkToolsPopen(['aapt', 'dump', 'badging', apkfile], output=False)
if p.returncode != 0: if p.returncode != 0:
if options.delete_unknown: if options.delete_unknown:
@ -487,51 +487,51 @@ def scan_apks(apps, apkcache, repodir, knownapks):
for line in p.output.splitlines(): for line in p.output.splitlines():
if line.startswith("package:"): if line.startswith("package:"):
try: try:
thisinfo['id'] = re.match(name_pat, line).group(1) apk['id'] = re.match(name_pat, line).group(1)
thisinfo['versioncode'] = int(re.match(vercode_pat, line).group(1)) apk['versioncode'] = int(re.match(vercode_pat, line).group(1))
thisinfo['version'] = re.match(vername_pat, line).group(1) apk['version'] = re.match(vername_pat, line).group(1)
except Exception, e: except Exception, e:
logging.error("Package matching failed: " + str(e)) logging.error("Package matching failed: " + str(e))
logging.info("Line was: " + line) logging.info("Line was: " + line)
sys.exit(1) sys.exit(1)
elif line.startswith("application:"): elif line.startswith("application:"):
thisinfo['name'] = re.match(label_pat, line).group(1) apk['name'] = re.match(label_pat, line).group(1)
# Keep path to non-dpi icon in case we need it # Keep path to non-dpi icon in case we need it
match = re.match(icon_pat_nodpi, line) match = re.match(icon_pat_nodpi, line)
if match: if match:
thisinfo['icons_src']['-1'] = match.group(1) apk['icons_src']['-1'] = match.group(1)
elif line.startswith("launchable-activity:"): elif line.startswith("launchable-activity:"):
# Only use launchable-activity as fallback to application # Only use launchable-activity as fallback to application
if not thisinfo['name']: if not apk['name']:
thisinfo['name'] = re.match(label_pat, line).group(1) apk['name'] = re.match(label_pat, line).group(1)
if '-1' not in thisinfo['icons_src']: if '-1' not in apk['icons_src']:
match = re.match(icon_pat_nodpi, line) match = re.match(icon_pat_nodpi, line)
if match: if match:
thisinfo['icons_src']['-1'] = match.group(1) apk['icons_src']['-1'] = match.group(1)
elif line.startswith("application-icon-"): elif line.startswith("application-icon-"):
match = re.match(icon_pat, line) match = re.match(icon_pat, line)
if match: if match:
density = match.group(1) density = match.group(1)
path = match.group(2) path = match.group(2)
thisinfo['icons_src'][density] = path apk['icons_src'][density] = path
elif line.startswith("sdkVersion:"): elif line.startswith("sdkVersion:"):
m = re.match(sdkversion_pat, line) m = re.match(sdkversion_pat, line)
if m is None: if m is None:
logging.error(line.replace('sdkVersion:', '') logging.error(line.replace('sdkVersion:', '')
+ ' is not a valid minSdkVersion!') + ' is not a valid minSdkVersion!')
else: else:
thisinfo['sdkversion'] = m.group(1) apk['sdkversion'] = m.group(1)
elif line.startswith("maxSdkVersion:"): elif line.startswith("maxSdkVersion:"):
thisinfo['maxsdkversion'] = re.match(sdkversion_pat, line).group(1) apk['maxsdkversion'] = re.match(sdkversion_pat, line).group(1)
elif line.startswith("native-code:"): elif line.startswith("native-code:"):
thisinfo['nativecode'] = [] apk['nativecode'] = []
for arch in line[13:].split(' '): for arch in line[13:].split(' '):
thisinfo['nativecode'].append(arch[1:-1]) apk['nativecode'].append(arch[1:-1])
elif line.startswith("uses-permission:"): elif line.startswith("uses-permission:"):
perm = re.match(string_pat, line).group(1) perm = re.match(string_pat, line).group(1)
if perm.startswith("android.permission."): if perm.startswith("android.permission."):
perm = perm[19:] perm = perm[19:]
thisinfo['permissions'].add(perm) apk['permissions'].add(perm)
elif line.startswith("uses-feature:"): elif line.startswith("uses-feature:"):
perm = re.match(string_pat, line).group(1) perm = re.match(string_pat, line).group(1)
# Filter out this, it's only added with the latest SDK tools and # Filter out this, it's only added with the latest SDK tools and
@ -540,11 +540,11 @@ def scan_apks(apps, apkcache, repodir, knownapks):
and perm != "android.hardware.screen.landscape": and perm != "android.hardware.screen.landscape":
if perm.startswith("android.feature."): if perm.startswith("android.feature."):
perm = perm[16:] perm = perm[16:]
thisinfo['features'].add(perm) apk['features'].add(perm)
if 'sdkversion' not in thisinfo: if 'sdkversion' not in apk:
logging.warn("No SDK version information found in {0}".format(apkfile)) logging.warn("No SDK version information found in {0}".format(apkfile))
thisinfo['sdkversion'] = 0 apk['sdkversion'] = 0
# Check for debuggable apks... # Check for debuggable apks...
if common.isApkDebuggable(apkfile, config): if common.isApkDebuggable(apkfile, config):
@ -552,20 +552,20 @@ def scan_apks(apps, apkcache, repodir, knownapks):
# Get the signature (or md5 of, to be precise)... # Get the signature (or md5 of, to be precise)...
logging.debug('Getting signature of {0}'.format(apkfile)) logging.debug('Getting signature of {0}'.format(apkfile))
thisinfo['sig'] = getsig(os.path.join(os.getcwd(), apkfile)) apk['sig'] = getsig(os.path.join(os.getcwd(), apkfile))
if not thisinfo['sig']: if not apk['sig']:
logging.critical("Failed to get apk signature") logging.critical("Failed to get apk signature")
sys.exit(1) sys.exit(1)
apk = zipfile.ZipFile(apkfile, 'r') apkzip = zipfile.ZipFile(apkfile, 'r')
# if an APK has files newer than the system time, suggest updating # if an APK has files newer than the system time, suggest updating
# the system clock. This is useful for offline systems, used for # the system clock. This is useful for offline systems, used for
# signing, which do not have another source of clock sync info. It # signing, which do not have another source of clock sync info. It
# has to be more than 24 hours newer because ZIP/APK files do not # has to be more than 24 hours newer because ZIP/APK files do not
# store timezone info # store timezone info
info = apk.getinfo('AndroidManifest.xml') manifest = apkzip.getinfo('AndroidManifest.xml')
dt_obj = datetime(*info.date_time) dt_obj = datetime(*manifest.date_time)
checkdt = dt_obj - timedelta(1) checkdt = dt_obj - timedelta(1)
if datetime.today() < checkdt: if datetime.today() < checkdt:
logging.warn('System clock is older than manifest in: ' logging.warn('System clock is older than manifest in: '
@ -573,44 +573,44 @@ def scan_apks(apps, apkcache, repodir, knownapks):
+ 'sudo date -s "' + str(dt_obj) + '"') + 'sudo date -s "' + str(dt_obj) + '"')
iconfilename = "%s.%s.png" % ( iconfilename = "%s.%s.png" % (
thisinfo['id'], apk['id'],
thisinfo['versioncode']) apk['versioncode'])
# Extract the icon file... # Extract the icon file...
empty_densities = [] empty_densities = []
for density in screen_densities: for density in screen_densities:
if density not in thisinfo['icons_src']: if density not in apk['icons_src']:
empty_densities.append(density) empty_densities.append(density)
continue continue
iconsrc = thisinfo['icons_src'][density] iconsrc = apk['icons_src'][density]
icon_dir = get_icon_dir(repodir, density) icon_dir = get_icon_dir(repodir, density)
icondest = os.path.join(icon_dir, iconfilename) icondest = os.path.join(icon_dir, iconfilename)
try: try:
with open(icondest, 'wb') as f: with open(icondest, 'wb') as f:
f.write(apk.read(iconsrc)) f.write(apkzip.read(iconsrc))
thisinfo['icons'][density] = iconfilename apk['icons'][density] = iconfilename
except: except:
logging.warn("Error retrieving icon file") logging.warn("Error retrieving icon file")
del thisinfo['icons'][density] del apk['icons'][density]
del thisinfo['icons_src'][density] del apk['icons_src'][density]
empty_densities.append(density) empty_densities.append(density)
if '-1' in thisinfo['icons_src']: if '-1' in apk['icons_src']:
iconsrc = thisinfo['icons_src']['-1'] iconsrc = apk['icons_src']['-1']
iconpath = os.path.join( iconpath = os.path.join(
get_icon_dir(repodir, '0'), iconfilename) get_icon_dir(repodir, '0'), iconfilename)
with open(iconpath, 'wb') as f: with open(iconpath, 'wb') as f:
f.write(apk.read(iconsrc)) f.write(apkzip.read(iconsrc))
try: try:
im = Image.open(iconpath) im = Image.open(iconpath)
dpi = px_to_dpi(im.size[0]) dpi = px_to_dpi(im.size[0])
for density in screen_densities: for density in screen_densities:
if density in thisinfo['icons']: if density in apk['icons']:
break break
if density == screen_densities[-1] or dpi >= int(density): if density == screen_densities[-1] or dpi >= int(density):
thisinfo['icons'][density] = iconfilename apk['icons'][density] = iconfilename
shutil.move(iconpath, shutil.move(iconpath,
os.path.join(get_icon_dir(repodir, density), iconfilename)) os.path.join(get_icon_dir(repodir, density), iconfilename))
empty_densities.remove(density) empty_densities.remove(density)
@ -618,10 +618,10 @@ def scan_apks(apps, apkcache, repodir, knownapks):
except Exception, e: except Exception, e:
logging.warn("Failed reading {0} - {1}".format(iconpath, e)) logging.warn("Failed reading {0} - {1}".format(iconpath, e))
if thisinfo['icons']: if apk['icons']:
thisinfo['icon'] = iconfilename apk['icon'] = iconfilename
apk.close() apkzip.close()
# First try resizing down to not lose quality # First try resizing down to not lose quality
last_density = None last_density = None
@ -675,19 +675,19 @@ def scan_apks(apps, apkcache, repodir, knownapks):
# Copy from icons-mdpi to icons since mdpi is the baseline density # Copy from icons-mdpi to icons since mdpi is the baseline density
baseline = os.path.join(get_icon_dir(repodir, '160'), iconfilename) baseline = os.path.join(get_icon_dir(repodir, '160'), iconfilename)
if os.path.isfile(baseline): if os.path.isfile(baseline):
thisinfo['icons']['0'] = iconfilename apk['icons']['0'] = iconfilename
shutil.copyfile(baseline, shutil.copyfile(baseline,
os.path.join(get_icon_dir(repodir, '0'), iconfilename)) os.path.join(get_icon_dir(repodir, '0'), iconfilename))
# Record in known apks, getting the added date at the same time.. # Record in known apks, getting the added date at the same time..
added = knownapks.recordapk(thisinfo['apkname'], thisinfo['id']) added = knownapks.recordapk(apk['apkname'], apk['id'])
if added: if added:
thisinfo['added'] = added apk['added'] = added
apkcache[apkfilename] = thisinfo apkcache[apkfilename] = apk
cachechanged = True cachechanged = True
apks.append(thisinfo) apks.append(apk)
return apks, cachechanged return apks, cachechanged

View file

@ -114,20 +114,19 @@ class CommonTest(unittest.TestCase):
config = dict() config = dict()
config['sdk_path'] = os.getenv('ANDROID_HOME') config['sdk_path'] = os.getenv('ANDROID_HOME')
config['ndk_paths'] = {'r10d': os.getenv('ANDROID_NDK_HOME')}
config['build_tools'] = 'FAKE_BUILD_TOOLS_VERSION' config['build_tools'] = 'FAKE_BUILD_TOOLS_VERSION'
fdroidserver.common.config = config fdroidserver.common.config = config
app = fdroidserver.metadata.App() app = fdroidserver.metadata.App()
app.id = 'org.fdroid.froid' app.id = 'org.fdroid.froid'
build = dict(fdroidserver.metadata.flag_defaults) build = fdroidserver.metadata.Build()
build['commit'] = 'master' build.commit = 'master'
build['forceversion'] = True build.forceversion = True
build['forcevercode'] = True build.forcevercode = True
build['gradle'] = ['yes'] build.gradle = ['yes']
build['ndk_path'] = os.getenv('ANDROID_NDK_HOME') build.target = 'android-' + str(testint)
build['target'] = 'android-' + str(testint) build.version = teststr
build['type'] = 'gradle' build.vercode = testint
build['version'] = teststr
build['vercode'] = testint
class FakeVcs(): class FakeVcs():
# no need to change to the correct commit here # no need to change to the correct commit here
@ -147,8 +146,8 @@ class CommonTest(unittest.TestCase):
with open(os.path.join(testdir, 'AndroidManifest.xml')) as f: with open(os.path.join(testdir, 'AndroidManifest.xml')) as f:
filedata = f.read() filedata = f.read()
self.assertIsNone(re.search('android:debuggable', filedata)) self.assertIsNone(re.search('android:debuggable', filedata))
self.assertIsNotNone(re.search('android:versionName="%s"' % build['version'], filedata)) self.assertIsNotNone(re.search('android:versionName="%s"' % build.version, filedata))
self.assertIsNotNone(re.search('android:versionCode="%s"' % build['vercode'], filedata)) self.assertIsNotNone(re.search('android:versionCode="%s"' % build.vercode, filedata))
if __name__ == "__main__": if __name__ == "__main__":

View file

@ -46,8 +46,9 @@ class MetadataTest(unittest.TestCase):
frompickle = pickle.load(f) frompickle = pickle.load(f)
frommeta = app.field_dict() frommeta = app.field_dict()
self.assertEquals(frommeta, frompickle) self.assertEquals(frommeta, frompickle)
# Uncomment to overwrite
# with open(savepath, 'wb') as f: # with open(savepath, 'wb') as f:
# pickle.dump(app, f) # pickle.dump(frommeta, f)
if __name__ == "__main__": if __name__ == "__main__":

View file

@ -10,87 +10,87 @@ S'Tracking'
p5 p5
aS'NonFreeNet' aS'NonFreeNet'
p6 p6
asS'Web Site' asS'Litecoin'
p7 p7
S'http://osmand.net' NsS'comments'
p8 p8
sS'Auto Update Mode' (dp9
p9
S'None'
p10
sS'Provides' sS'Provides'
p11 p10
NsS'Issue Tracker' NsS'Issue Tracker'
p12 p11
S'https://github.com/osmandapp/Osmand/issues' S'https://github.com/osmandapp/Osmand/issues'
p13 p12
sS'Donate' sS'Donate'
p14 p13
S'https://code.google.com/p/osmand/#Please_support_the_project' S'https://code.google.com/p/osmand/#Please_support_the_project'
p14
sS'Archive Policy'
p15 p15
sS'id' NsS'Description'
p16 p16
S'net.osmand.plus' (lp17
p17
sS'Description'
p18
(lp19
S"Osmand~'s features can be extended by enabling the plugins via the settings," S"Osmand~'s features can be extended by enabling the plugins via the settings,"
p20 p18
aS'which include online maps from many sources, tracking, OpenStreetMap (OSM) editing and' aS'which include online maps from many sources, tracking, OpenStreetMap (OSM) editing and'
p21 p19
aS'accessibility enhancements.' aS'accessibility enhancements.'
p22 p20
aS'' aS''
p23 p21
aS'Map data of both vector and raster types can be stored on the phone memory' aS'Map data of both vector and raster types can be stored on the phone memory'
p24 p22
aS'card for offline usage, and navigation by default uses offline methods. Map' aS'card for offline usage, and navigation by default uses offline methods. Map'
p25 p23
aS'data packages for many territories can be downloaded from within the app and' aS'data packages for many territories can be downloaded from within the app and'
p26 p24
aS'there is a desktop program available on the website as well for creating your' aS'there is a desktop program available on the website as well for creating your'
p27 p25
aS'own.' aS'own.'
p28 p26
ag23 ag21
aS'Anti-Features: Tracking - It will send your device and application specs to an' aS'Anti-Features: Tracking - It will send your device and application specs to an'
p29 p27
aS'Analytics server upon downloading the list of maps you can download.' aS'Analytics server upon downloading the list of maps you can download.'
p30 p28
ag23 ag21
aS'[https://osmandapp.github.io/changes.html Changelog]' aS'[https://osmandapp.github.io/changes.html Changelog]'
p31 p29
asS'Requires Root' asS'Requires Root'
p32 p30
I00 I00
sS'comments' sS'lastupdated'
p31
NsS'id'
p32
S'net.osmand.plus'
p33 p33
(dp34
sS'Repo Type'
p35
S'git'
p36
sS'Repo' sS'Repo'
p37 p34
S'https://github.com/mvdan/OsmAnd-submodules' S'https://github.com/mvdan/OsmAnd-submodules'
p38 p35
sS'No Source Since' sS'No Source Since'
p39 p36
g23 g21
sS'Repo Type'
p37
S'git'
p38
sS'Auto Name' sS'Auto Name'
p40 p39
g23 g21
sS'Categories' sS'Categories'
p41 p40
(lp42 (lp41
S'Navigation' S'None'
p42
aS'Navigation'
p43 p43
asS'Source Code' asS'Source Code'
p44 p44
S'https://github.com/osmandapp/Osmand' S'https://github.com/osmandapp/Osmand'
p45 p45
sS'Litecoin' sS'added'
p46 p46
NsS'Update Check Ignore' NsS'Update Check Ignore'
p47 p47
@ -104,7 +104,7 @@ S'GPLv3'
p51 p51
sS'Changelog' sS'Changelog'
p52 p52
g23 g21
sS'Update Check Mode' sS'Update Check Mode'
p53 p53
S'None' S'None'
@ -113,394 +113,344 @@ sS'Summary'
p55 p55
S'Offline/online maps and navigation' S'Offline/online maps and navigation'
p56 p56
sS'Maintainer Notes' sS'Current Version'
p57 p57
(lp58 S'1.9.5'
S'No UCMs apply because git never contains actual releases, only pre-releses.' p58
sS'Maintainer Notes'
p59 p59
ag23 (lp60
aS'The build instructions have been moved to a script in the root of the repo,' S'No UCMs apply because git never contains actual releases, only pre-releses.'
p60
aS"'build'. This way it can be updated along with the submodules."
p61 p61
asS'Current Version Code' ag21
aS'The build instructions have been moved to a script in the root of the repo,'
p62 p62
S'197' aS"'build'. This way it can be updated along with the submodules."
p63 p63
sS'Binaries' asS'Current Version Code'
p64 p64
NsS'Archive Policy' S'197'
p65 p65
NsS'builds' sS'Binaries'
p66 p66
(lp67 NsS'builds'
(dp68 p67
(lp68
(dp69
S'submodules' S'submodules'
p69
I01
sS'vercode'
p70 p70
S'182' S'true'
p71 p71
sS'forceversion' sS'kivy'
p72 p72
I00 I00
sS'oldsdkloc' sS'forceversion'
p73 p73
I00 I00
sS'gradleprops' sS'oldsdkloc'
p74 p74
(lp75 I00
sS'scanignore' sS'antcommands'
p75
NsS'scanignore'
p76 p76
(lp77 (lp77
sS'patch' sS'gradle'
p78 p78
(lp79
sS'srclibs'
p80
(lp81
sS'output'
p82
S'bin/OsmAnd-release-unsigned.apk'
p83
sS'encoding'
p84
NsS'extlibs'
p85
(lp86
sS'init'
p87
g23
sS'version'
p88
S'1.8.2'
p89
sS'build'
p90
S'./old-ndk-build.sh && ant -Dsdk.dir="$ANDROID_SDK" -Dndk.dir="$ANDROID_NDK" -DBLACKBERRY_BUILD=false -DBUILD_SUFFIX= -DAPK_NUMBER_VERSION=182 "-DFEATURES=+play_market +gps_status -parking_plugin -blackberry -amazon -route_nav" -DCLEAN_CPP=false -DPACKAGE_TO_BUILT=net.osmand.plus -DAPK_VERSION=1.8.2 -Dnet.osmand.plus= -Dbuild.version=1.8.2 -Dbuild.version.code=182 -Dnativeoff=false "-DversionFeatures=+play_market +gps_status -parking_plugin -blackberry -amazon -route_nav" clean release'
p91
sS'rm'
p92
(lp93
sS'kivy'
p94
I00 I00
sS'srclibs'
p79
(lp80
sS'encoding'
p81
NsS'extlibs'
p82
(lp83
sS'init'
p84
g21
sS'version'
p85
S'1.8.2'
p86
sS'subdir' sS'subdir'
p95 p87
S'android/OsmAnd' S'android/OsmAnd'
p96 p88
sS'rm'
p89
(lp90
sS'build'
p91
S'./old-ndk-build.sh && ant -Dsdk.dir="$ANDROID_SDK" -Dndk.dir="$ANDROID_NDK" -DBLACKBERRY_BUILD=false -DBUILD_SUFFIX= -DAPK_NUMBER_VERSION=182 "-DFEATURES=+play_market +gps_status -parking_plugin -blackberry -amazon -route_nav" -DCLEAN_CPP=false -DPACKAGE_TO_BUILT=net.osmand.plus -DAPK_VERSION=1.8.2 -Dnet.osmand.plus= -Dbuild.version=1.8.2 -Dbuild.version.code=182 -Dnativeoff=false "-DversionFeatures=+play_market +gps_status -parking_plugin -blackberry -amazon -route_nav" clean release'
p92
sS'vercode'
p93
S'182'
p94
sS'forcevercode' sS'forcevercode'
p97 p95
I00 I00
sS'preassemble' sS'preassemble'
p98 p96
(lp99 (lp97
sS'update' sS'update'
p100 p98
(lp101 NsS'maven'
S'auto' p99
p102
asS'maven'
p103
I00 I00
sS'disable' sS'disable'
p104 p100
I00
sS'ndk_path'
p105
g23
sS'scandelete'
p106
(lp107
sS'buildjni'
p108
(lp109
S'no'
p110
asS'ndk'
p111
S'r10e'
p112
sS'target'
p113
NsS'type'
p114
S'raw'
p115
sS'antcommands'
p116
NsS'gradle'
p117
I00
sS'prebuild'
p118
S'sed -i \'s/"OsmAnd+"/"OsmAnd~"/g\' build.xml'
p119
sS'novcheck'
p120
I00 I00
sS'commit' sS'commit'
p121 p101
S'76ada6c8a08afe69acb755503373ac36328ef665' S'76ada6c8a08afe69acb755503373ac36328ef665'
p122 p102
sa(dp123 sS'scandelete'
S'submodules' p103
p124 (lp104
I01 sS'buildjni'
sg70 p105
S'183' S'no'
p125 p106
sS'ndk'
p107
NsS'target'
p108
NsS'gradleprops'
p109
(lp110
sS'patch'
p111
(lp112
sS'prebuild'
p113
S'sed -i \'s/"OsmAnd+"/"OsmAnd~"/g\' build.xml'
p114
sS'novcheck'
p115
I00
sS'output'
p116
S'bin/OsmAnd-release-unsigned.apk'
p117
sa(dp118
g70
g71
sg72 sg72
I00 I00
sg73 sg73
I00 I00
sg74 sg74
(lp126 I00
sg76 sg75
g77 Nsg76
(lp119
sg78 sg78
g79 I00
sg80 sg79
g81 (lp120
sS'output' sg81
p127 Nsg82
S'bin/OsmAnd-release-unsigned.apk' (lp121
p128
sg84 sg84
Nsg85 g21
g86 sg85
S'1.8.3'
p122
sg87 sg87
g23
sg88
S'1.8.3'
p129
sS'subdir'
p130
S'android/OsmAnd' S'android/OsmAnd'
p131 p123
sg92 sg89
g93 (lp124
sg94 sg91
I00
sS'build'
p132
S'../../build' S'../../build'
p133 p125
sg97 sg93
S'183'
p126
sg95
I00 I00
sg96
(lp127
sg98 sg98
g99 Nsg99
I00
sg100 sg100
g101
sg103
I00 I00
sg104 sg101
I00
sg105
g23
sg106
g107
sS'buildjni'
p134
(lp135
S'no'
p136
asg111
g112
sg113
Nsg114
g115
sg116
Nsg117
I00
sS'prebuild'
p137
g23
sg120
I00
sS'commit'
p138
S'1.8.3' S'1.8.3'
p128
sg103
(lp129
sg105
S'no'
p130
sg107
Nsg108
Nsg109
(lp131
sg111
(lp132
sg113
g21
sg115
I00
sg116
S'bin/OsmAnd-release-unsigned.apk'
p133
sa(dp134
g70
g71
sg72
I00
sg73
I00
sg74
I00
sg75
Nsg76
(lp135
sg78
I00
sg79
(lp136
sg81
Nsg82
(lp137
sg84
g21
sg85
S'1.9.4'
p138
sg87
S'android/OsmAnd'
p139 p139
sa(dp140 sg89
S'submodules' (lp140
sg91
S'../../build'
p141 p141
I01 sg93
sg70
S'196' S'196'
p142 p142
sg72 sg95
I00 I00
sg73 sg96
I00
sg74
(lp143 (lp143
sg76
g77
sg78
g79
sg80
g81
sS'output'
p144
S'bin/OsmAnd-release-unsigned.apk'
p145
sg84
Nsg85
g86
sg87
g23
sg88
S'1.9.4'
p146
sS'subdir'
p147
S'android/OsmAnd'
p148
sg92
g93
sg94
I00
sS'build'
p149
S'../../build'
p150
sg97
I00
sg98 sg98
g99 Nsg99
I00
sg100 sg100
g101
sg103
I00 I00
sg104 sg101
I00
sg105
g23
sg106
g107
sS'buildjni'
p151
(lp152
S'no'
p153
asS'ndk'
p154
S'r10d'
p155
sg113
Nsg114
g115
sg116
Nsg117
I00
sg137
g23
sg120
I00
sS'commit'
p156
S'1.9.4' S'1.9.4'
p157 p144
sa(dp158 sg103
S'submodules' (lp145
p159 sg105
I01 S'no'
sg70 p146
S'197' sg107
p160 S'r10d'
p147
sg108
Nsg109
(lp148
sg111
(lp149
sg113
g21
sg115
I00
sg116
S'bin/OsmAnd-release-unsigned.apk'
p150
sa(dp151
g70
g71
sg72 sg72
I00 I00
sg73 sg73
I00 I00
sg74 sg74
(lp161 I00
sg76 sg75
g77 Nsg76
(lp152
sg78 sg78
g79 I00
sg80 sg79
g81 (lp153
sS'output' sg81
p162 Nsg82
S'bin/OsmAnd-release-unsigned.apk' (lp154
p163
sg84 sg84
Nsg85 g21
g86 sg85
S'1.9.5'
p155
sg87 sg87
g23
sg88
S'1.9.5'
p164
sS'subdir'
p165
S'android/OsmAnd' S'android/OsmAnd'
p166 p156
sg92 sg89
g93 (lp157
sg94 sg91
I00
sS'build'
p167
S'../../build' S'../../build'
p168 p158
sg97 sg93
S'197'
p159
sg95
I00 I00
sg96
(lp160
sg98 sg98
g99 Nsg99
I00
sg100 sg100
g101 I00
sg101
S'1.9.5'
p161
sg103 sg103
I00 (lp162
sg104
I00
sg105 sg105
g23
sg106
g107
sS'buildjni'
p169
(lp170
S'no' S'no'
p171 p163
asS'ndk' sg107
p172
S'r10d' S'r10d'
p173 p164
sg108
Nsg109
(lp165
sg111
(lp166
sg113 sg113
Nsg114 g21
g115 sg115
I00
sg116 sg116
Nsg117 S'bin/OsmAnd-release-unsigned.apk'
I00 p167
sg137
g23
sg120
I00
sS'commit'
p174
S'1.9.5'
p175
sasS'FlattrID' sasS'FlattrID'
p176 p168
NsS'metadatapath' NsS'metadatapath'
p177 p169
S'metadata/net.osmand.plus.xml' S'metadata/net.osmand.plus.xml'
p178 p170
sS'Disabled' sS'Disabled'
p179 p171
NsS'added' NsS'Web Site'
p180 p172
NsS'lastupdated' S'http://osmand.net'
p181 p173
NsS'Update Check Name' sS'Update Check Name'
p182 p174
NsS'Vercode Operation' NsS'Vercode Operation'
p183 p175
NsS'Current Version' NsS'Auto Update Mode'
p184 p176
S'1.9.5' S'None'
p185 p177
s. s.

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -1,15 +0,0 @@
#!/usr/bin/env python2
#
# This script is for updating the .pickle test files when there are changes to
# the default metadata, e.g. adding a new key/tag.
import glob
import pickle
for picklefile in glob.glob('*.pickle'):
p = pickle.load(open(picklefile))
for build in p['builds']:
build['gradleprops'] = []
pickle.dump(p, open(picklefile, 'w'))