mirror of
https://github.com/f-droid/fdroidserver.git
synced 2025-11-05 06:50:29 +03:00
Merge branch 'master' into 'master'
last PEP8 fixes (everything but E501 line too long and E123 close bracket indent) These two commits fix all the rest of the PEP8 errors and warnings except for: * E123 closing bracket does not match indentation of opening bracket's line * E501 line too long (x > 79 characters) Almost all of the fixed issues were these errors: * E124 closing bracket does not match visual indentation * E125 continuation line does not distinguish itself from next logical line * E126 continuation line over-indented for hanging indent * E127 continuation line over-indented for visual indent * E128 continuation line under-indented for visual indent * E226 missing whitespace around arithmetic operator If you would like to make it run as part of the automated builds, it would just be a matter of adding this line to `jenkins-build`: ``` pep8 --ignore=E123,E501,W fdroid fdroidserver/*.py examples/*.py ```
This commit is contained in:
commit
7a699e4a48
15 changed files with 374 additions and 359 deletions
2
fdroid
2
fdroid
|
|
@ -44,7 +44,7 @@ def print_help():
|
||||||
print
|
print
|
||||||
print "Valid commands are:"
|
print "Valid commands are:"
|
||||||
for cmd, summary in commands.items():
|
for cmd, summary in commands.items():
|
||||||
print " " + cmd + ' '*(15-len(cmd)) + summary
|
print " " + cmd + ' ' * (15 - len(cmd)) + summary
|
||||||
print
|
print
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -449,7 +449,8 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
|
||||||
|
|
||||||
# Prepare the source code...
|
# Prepare the source code...
|
||||||
root_dir, srclibpaths = common.prepare_source(vcs, app, thisbuild,
|
root_dir, srclibpaths = common.prepare_source(vcs, app, thisbuild,
|
||||||
build_dir, srclib_dir, extlib_dir, onserver)
|
build_dir, srclib_dir,
|
||||||
|
extlib_dir, onserver)
|
||||||
|
|
||||||
# We need to clean via the build tool in case the binary dirs are
|
# We need to clean via the build tool in case the binary dirs are
|
||||||
# different from the default ones
|
# different from the default ones
|
||||||
|
|
@ -536,9 +537,11 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
|
||||||
with open(manifest, 'r') as f:
|
with open(manifest, 'r') as f:
|
||||||
manifestcontent = f.read()
|
manifestcontent = f.read()
|
||||||
manifestcontent = manifestcontent.replace('</manifest>',
|
manifestcontent = manifestcontent.replace('</manifest>',
|
||||||
'<fdroid buildserverid="' + buildserverid + '"' +
|
'<fdroid buildserverid="'
|
||||||
' fdroidserverid="' + fdroidserverid + '"' +
|
+ buildserverid + '"'
|
||||||
'/></manifest>')
|
+ ' fdroidserverid="'
|
||||||
|
+ fdroidserverid + '"'
|
||||||
|
+ '/></manifest>')
|
||||||
with open(manifest, 'w') as f:
|
with open(manifest, 'w') as f:
|
||||||
f.write(manifestcontent)
|
f.write(manifestcontent)
|
||||||
|
|
||||||
|
|
@ -601,12 +604,16 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
|
||||||
if 'target' in thisbuild:
|
if 'target' in thisbuild:
|
||||||
target = thisbuild["target"].split('-')[1]
|
target = thisbuild["target"].split('-')[1]
|
||||||
FDroidPopen(['sed', '-i',
|
FDroidPopen(['sed', '-i',
|
||||||
's@<platform>[0-9]*</platform>@<platform>'+target+'</platform>@g',
|
's@<platform>[0-9]*</platform>@<platform>'
|
||||||
'pom.xml'], cwd=root_dir)
|
+ target + '</platform>@g',
|
||||||
|
'pom.xml'],
|
||||||
|
cwd=root_dir)
|
||||||
if '@' in thisbuild['maven']:
|
if '@' in thisbuild['maven']:
|
||||||
FDroidPopen(['sed', '-i',
|
FDroidPopen(['sed', '-i',
|
||||||
's@<platform>[0-9]*</platform>@<platform>'+target+'</platform>@g',
|
's@<platform>[0-9]*</platform>@<platform>'
|
||||||
'pom.xml'], cwd=maven_dir)
|
+ target + '</platform>@g',
|
||||||
|
'pom.xml'],
|
||||||
|
cwd=maven_dir)
|
||||||
|
|
||||||
if 'mvnflags' in thisbuild:
|
if 'mvnflags' in thisbuild:
|
||||||
mvncmd += thisbuild['mvnflags']
|
mvncmd += thisbuild['mvnflags']
|
||||||
|
|
@ -697,7 +704,7 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
|
||||||
if flavours_cmd:
|
if flavours_cmd:
|
||||||
flavours_cmd = flavours_cmd[0].upper() + flavours_cmd[1:]
|
flavours_cmd = flavours_cmd[0].upper() + flavours_cmd[1:]
|
||||||
|
|
||||||
commands += ['assemble'+flavours_cmd+'Release']
|
commands += ['assemble' + flavours_cmd + 'Release']
|
||||||
|
|
||||||
p = FDroidPopen(commands, cwd=gradle_dir)
|
p = FDroidPopen(commands, cwd=gradle_dir)
|
||||||
|
|
||||||
|
|
@ -748,7 +755,7 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
|
||||||
else:
|
else:
|
||||||
name = '-'.join([basename, '-'.join(flavours), 'release', 'unsigned'])
|
name = '-'.join([basename, '-'.join(flavours), 'release', 'unsigned'])
|
||||||
dd = os.path.normpath(dd)
|
dd = os.path.normpath(dd)
|
||||||
src = os.path.join(dd, 'build', 'apk', name+'.apk')
|
src = os.path.join(dd, 'build', 'apk', name + '.apk')
|
||||||
elif thisbuild['type'] == 'ant':
|
elif thisbuild['type'] == 'ant':
|
||||||
stdout_apk = '\n'.join([
|
stdout_apk = '\n'.join([
|
||||||
line for line in p.stdout.splitlines() if '.apk' in line])
|
line for line in p.stdout.splitlines() if '.apk' in line])
|
||||||
|
|
@ -769,8 +776,8 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
|
||||||
if not os.path.exists(src):
|
if not os.path.exists(src):
|
||||||
raise BuildException("Unsigned apk is not at expected location of " + src)
|
raise BuildException("Unsigned apk is not at expected location of " + src)
|
||||||
|
|
||||||
p = SilentPopen([os.path.join(config['sdk_path'],
|
p = SilentPopen([os.path.join(config['sdk_path'], 'build-tools',
|
||||||
'build-tools', config['build_tools'], 'aapt'),
|
config['build_tools'], 'aapt'),
|
||||||
'dump', 'badging', src])
|
'dump', 'badging', src])
|
||||||
|
|
||||||
vercode = None
|
vercode = None
|
||||||
|
|
@ -820,7 +827,8 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
|
||||||
raise BuildException(("Unexpected version/version code in output;"
|
raise BuildException(("Unexpected version/version code in output;"
|
||||||
" APK: '%s' / '%s', "
|
" APK: '%s' / '%s', "
|
||||||
" Expected: '%s' / '%s'")
|
" Expected: '%s' / '%s'")
|
||||||
% (version, str(vercode), thisbuild['version'], str(thisbuild['vercode']))
|
% (version, str(vercode), thisbuild['version'],
|
||||||
|
str(thisbuild['vercode']))
|
||||||
)
|
)
|
||||||
|
|
||||||
# Copy the unsigned apk to our destination directory for further
|
# Copy the unsigned apk to our destination directory for further
|
||||||
|
|
@ -1020,16 +1028,18 @@ def main():
|
||||||
build_dir = os.path.join('build', app['id'])
|
build_dir = os.path.join('build', app['id'])
|
||||||
|
|
||||||
# Set up vcs interface and make sure we have the latest code...
|
# Set up vcs interface and make sure we have the latest code...
|
||||||
logging.debug("Getting {0} vcs interface for {1}".format(
|
logging.debug("Getting {0} vcs interface for {1}"
|
||||||
app['Repo Type'], app['Repo']))
|
.format(app['Repo Type'], app['Repo']))
|
||||||
vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir)
|
vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir)
|
||||||
|
|
||||||
first = False
|
first = False
|
||||||
|
|
||||||
logging.debug("Checking " + thisbuild['version'])
|
logging.debug("Checking " + thisbuild['version'])
|
||||||
if trybuild(app, thisbuild, build_dir, output_dir, also_check_dir,
|
if trybuild(app, thisbuild, build_dir, output_dir,
|
||||||
srclib_dir, extlib_dir, tmp_dir, repo_dir, vcs, options.test,
|
also_check_dir, srclib_dir, extlib_dir,
|
||||||
options.server, options.force, options.onserver):
|
tmp_dir, repo_dir, vcs, options.test,
|
||||||
|
options.server, options.force,
|
||||||
|
options.onserver):
|
||||||
build_succeeded.append(app)
|
build_succeeded.append(app)
|
||||||
wikilog = "Build succeeded"
|
wikilog = "Build succeeded"
|
||||||
except BuildException as be:
|
except BuildException as be:
|
||||||
|
|
|
||||||
|
|
@ -138,8 +138,8 @@ def check_tags(app, pattern):
|
||||||
if not package or package != appid or not version or not vercode:
|
if not package or package != appid or not version or not vercode:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
logging.debug("Manifest exists. Found version {0} ({1})".format(
|
logging.debug("Manifest exists. Found version {0} ({1})"
|
||||||
version, vercode))
|
.format(version, vercode))
|
||||||
if int(vercode) > int(hcode):
|
if int(vercode) > int(hcode):
|
||||||
htag = tag
|
htag = tag
|
||||||
hcode = str(int(vercode))
|
hcode = str(int(vercode))
|
||||||
|
|
@ -183,7 +183,7 @@ def check_repomanifest(app, branch=None):
|
||||||
|
|
||||||
if repotype == 'git':
|
if repotype == 'git':
|
||||||
if branch:
|
if branch:
|
||||||
branch = 'origin/'+branch
|
branch = 'origin/' + branch
|
||||||
vcs.gotorevision(branch)
|
vcs.gotorevision(branch)
|
||||||
elif repotype == 'git-svn':
|
elif repotype == 'git-svn':
|
||||||
vcs.gotorevision(branch)
|
vcs.gotorevision(branch)
|
||||||
|
|
@ -340,18 +340,18 @@ def main():
|
||||||
if version is not None:
|
if version is not None:
|
||||||
stored = app['Current Version']
|
stored = app['Current Version']
|
||||||
if not stored:
|
if not stored:
|
||||||
logging.info("{0} has no Current Version but has version {1} on the Play Store".format(
|
logging.info("{0} has no Current Version but has version {1} on the Play Store"
|
||||||
common.getappname(app), version))
|
.format(common.getappname(app), version))
|
||||||
elif LooseVersion(stored) < LooseVersion(version):
|
elif LooseVersion(stored) < LooseVersion(version):
|
||||||
logging.info("{0} has version {1} on the Play Store, which is bigger than {2}".format(
|
logging.info("{0} has version {1} on the Play Store, which is bigger than {2}"
|
||||||
common.getappname(app), version, stored))
|
.format(common.getappname(app), version, stored))
|
||||||
else:
|
else:
|
||||||
if stored != version:
|
if stored != version:
|
||||||
logging.info("{0} has version {1} on the Play Store, which differs from {2}".format(
|
logging.info("{0} has version {1} on the Play Store, which differs from {2}"
|
||||||
common.getappname(app), version, stored))
|
.format(common.getappname(app), version, stored))
|
||||||
else:
|
else:
|
||||||
logging.info("{0} has the same version {1} on the Play Store".format(
|
logging.info("{0} has the same version {1} on the Play Store"
|
||||||
common.getappname(app), version))
|
.format(common.getappname(app), version))
|
||||||
return
|
return
|
||||||
|
|
||||||
for app in apps:
|
for app in apps:
|
||||||
|
|
@ -507,8 +507,7 @@ def main():
|
||||||
metadata.write_metadata(metafile, app)
|
metadata.write_metadata(metafile, app)
|
||||||
if options.commit:
|
if options.commit:
|
||||||
logging.info("Commiting update for " + metafile)
|
logging.info("Commiting update for " + metafile)
|
||||||
gitcmd = ["git", "commit", "-m",
|
gitcmd = ["git", "commit", "-m", commitmsg]
|
||||||
commitmsg]
|
|
||||||
if 'auto_author' in config:
|
if 'auto_author' in config:
|
||||||
gitcmd.extend(['--author', config['auto_author']])
|
gitcmd.extend(['--author', config['auto_author']])
|
||||||
gitcmd.extend(["--", metafile])
|
gitcmd.extend(["--", metafile])
|
||||||
|
|
|
||||||
|
|
@ -460,7 +460,7 @@ class vcs_git(vcs):
|
||||||
|
|
||||||
def latesttags(self, alltags, number):
|
def latesttags(self, alltags, number):
|
||||||
self.checkrepo()
|
self.checkrepo()
|
||||||
p = SilentPopen(['echo "'+'\n'.join(alltags)+'" | \
|
p = SilentPopen(['echo "' + '\n'.join(alltags) + '" | \
|
||||||
xargs -I@ git log --format=format:"%at @%n" -1 @ | \
|
xargs -I@ git log --format=format:"%at @%n" -1 @ | \
|
||||||
sort -n | awk \'{print $2}\''],
|
sort -n | awk \'{print $2}\''],
|
||||||
cwd=self.local, shell=True)
|
cwd=self.local, shell=True)
|
||||||
|
|
@ -653,7 +653,7 @@ class vcs_hg(vcs):
|
||||||
p = SilentPopen(['hg', 'purge', '--all'], cwd=self.local)
|
p = SilentPopen(['hg', 'purge', '--all'], cwd=self.local)
|
||||||
# Also delete untracked files, we have to enable purge extension for that:
|
# Also delete untracked files, we have to enable purge extension for that:
|
||||||
if "'purge' is provided by the following extension" in p.stdout:
|
if "'purge' is provided by the following extension" in p.stdout:
|
||||||
with open(self.local+"/.hg/hgrc", "a") as myfile:
|
with open(self.local + "/.hg/hgrc", "a") as myfile:
|
||||||
myfile.write("\n[extensions]\nhgext.purge=\n")
|
myfile.write("\n[extensions]\nhgext.purge=\n")
|
||||||
p = SilentPopen(['hg', 'purge', '--all'], cwd=self.local)
|
p = SilentPopen(['hg', 'purge', '--all'], cwd=self.local)
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
|
|
@ -713,9 +713,9 @@ def retrieve_string(app_dir, string, xmlfiles=None):
|
||||||
|
|
||||||
string_search = None
|
string_search = None
|
||||||
if string.startswith('@string/'):
|
if string.startswith('@string/'):
|
||||||
string_search = re.compile(r'.*"'+string[8:]+'".*?>([^<]+?)<.*').search
|
string_search = re.compile(r'.*"' + string[8:] + '".*?>([^<]+?)<.*').search
|
||||||
elif string.startswith('&') and string.endswith(';'):
|
elif string.startswith('&') and string.endswith(';'):
|
||||||
string_search = re.compile(r'.*<!ENTITY.*'+string[1:-1]+'.*?"([^"]+?)".*>').search
|
string_search = re.compile(r'.*<!ENTITY.*' + string[1:-1] + '.*?"([^"]+?)".*>').search
|
||||||
|
|
||||||
if string_search is not None:
|
if string_search is not None:
|
||||||
for xmlfile in xmlfiles:
|
for xmlfile in xmlfiles:
|
||||||
|
|
@ -731,7 +731,8 @@ def retrieve_string(app_dir, string, xmlfiles=None):
|
||||||
# Return list of existing files that will be used to find the highest vercode
|
# Return list of existing files that will be used to find the highest vercode
|
||||||
def manifest_paths(app_dir, flavour):
|
def manifest_paths(app_dir, flavour):
|
||||||
|
|
||||||
possible_manifests = [os.path.join(app_dir, 'AndroidManifest.xml'),
|
possible_manifests = \
|
||||||
|
[os.path.join(app_dir, 'AndroidManifest.xml'),
|
||||||
os.path.join(app_dir, 'src', 'main', 'AndroidManifest.xml'),
|
os.path.join(app_dir, 'src', 'main', 'AndroidManifest.xml'),
|
||||||
os.path.join(app_dir, 'src', 'AndroidManifest.xml'),
|
os.path.join(app_dir, 'src', 'AndroidManifest.xml'),
|
||||||
os.path.join(app_dir, 'build.gradle')]
|
os.path.join(app_dir, 'build.gradle')]
|
||||||
|
|
@ -1111,14 +1112,16 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
|
||||||
if 'target' in build:
|
if 'target' in build:
|
||||||
n = build["target"].split('-')[1]
|
n = build["target"].split('-')[1]
|
||||||
FDroidPopen(['sed', '-i',
|
FDroidPopen(['sed', '-i',
|
||||||
's@compileSdkVersion *[0-9]*@compileSdkVersion '+n+'@g',
|
's@compileSdkVersion *[0-9]*@compileSdkVersion ' + n + '@g',
|
||||||
'build.gradle'], cwd=root_dir)
|
'build.gradle'],
|
||||||
|
cwd=root_dir)
|
||||||
if '@' in build['gradle']:
|
if '@' in build['gradle']:
|
||||||
gradle_dir = os.path.join(root_dir, build['gradle'].split('@', 1)[1])
|
gradle_dir = os.path.join(root_dir, build['gradle'].split('@', 1)[1])
|
||||||
gradle_dir = os.path.normpath(gradle_dir)
|
gradle_dir = os.path.normpath(gradle_dir)
|
||||||
FDroidPopen(['sed', '-i',
|
FDroidPopen(['sed', '-i',
|
||||||
's@compileSdkVersion *[0-9]*@compileSdkVersion '+n+'@g',
|
's@compileSdkVersion *[0-9]*@compileSdkVersion ' + n + '@g',
|
||||||
'build.gradle'], cwd=gradle_dir)
|
'build.gradle'],
|
||||||
|
cwd=gradle_dir)
|
||||||
|
|
||||||
# Remove forced debuggable flags
|
# Remove forced debuggable flags
|
||||||
remove_debuggable_flags(root_dir)
|
remove_debuggable_flags(root_dir)
|
||||||
|
|
@ -1131,13 +1134,15 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
|
||||||
continue
|
continue
|
||||||
if has_extension(path, 'xml'):
|
if has_extension(path, 'xml'):
|
||||||
p = SilentPopen(['sed', '-i',
|
p = SilentPopen(['sed', '-i',
|
||||||
's/android:versionName="[^"]*"/android:versionName="' + build['version'] + '"/g',
|
's/android:versionName="[^"]*"/android:versionName="'
|
||||||
|
+ build['version'] + '"/g',
|
||||||
path])
|
path])
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise BuildException("Failed to amend manifest")
|
raise BuildException("Failed to amend manifest")
|
||||||
elif has_extension(path, 'gradle'):
|
elif has_extension(path, 'gradle'):
|
||||||
p = SilentPopen(['sed', '-i',
|
p = SilentPopen(['sed', '-i',
|
||||||
's/versionName *=* *"[^"]*"/versionName = "' + build['version'] + '"/g',
|
's/versionName *=* *"[^"]*"/versionName = "'
|
||||||
|
+ build['version'] + '"/g',
|
||||||
path])
|
path])
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise BuildException("Failed to amend build.gradle")
|
raise BuildException("Failed to amend build.gradle")
|
||||||
|
|
@ -1148,13 +1153,15 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
|
||||||
continue
|
continue
|
||||||
if has_extension(path, 'xml'):
|
if has_extension(path, 'xml'):
|
||||||
p = SilentPopen(['sed', '-i',
|
p = SilentPopen(['sed', '-i',
|
||||||
's/android:versionCode="[^"]*"/android:versionCode="' + build['vercode'] + '"/g',
|
's/android:versionCode="[^"]*"/android:versionCode="'
|
||||||
|
+ build['vercode'] + '"/g',
|
||||||
path])
|
path])
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise BuildException("Failed to amend manifest")
|
raise BuildException("Failed to amend manifest")
|
||||||
elif has_extension(path, 'gradle'):
|
elif has_extension(path, 'gradle'):
|
||||||
p = SilentPopen(['sed', '-i',
|
p = SilentPopen(['sed', '-i',
|
||||||
's/versionCode *=* *[0-9]*/versionCode = ' + build['vercode'] + '/g',
|
's/versionCode *=* *[0-9]*/versionCode = '
|
||||||
|
+ build['vercode'] + '/g',
|
||||||
path])
|
path])
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise BuildException("Failed to amend build.gradle")
|
raise BuildException("Failed to amend build.gradle")
|
||||||
|
|
@ -1251,7 +1258,7 @@ def getpaths(build_dir, build, field):
|
||||||
p = p.strip()
|
p = p.strip()
|
||||||
full_path = os.path.join(build_dir, p)
|
full_path = os.path.join(build_dir, p)
|
||||||
full_path = os.path.normpath(full_path)
|
full_path = os.path.normpath(full_path)
|
||||||
paths += [r[len(build_dir)+1:] for r in glob.glob(full_path)]
|
paths += [r[len(build_dir) + 1:] for r in glob.glob(full_path)]
|
||||||
return paths
|
return paths
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1328,7 +1335,7 @@ def scan_source(build_dir, root_dir, thisbuild):
|
||||||
|
|
||||||
# Path (relative) to the file
|
# Path (relative) to the file
|
||||||
fp = os.path.join(r, curfile)
|
fp = os.path.join(r, curfile)
|
||||||
fd = fp[len(build_dir)+1:]
|
fd = fp[len(build_dir) + 1:]
|
||||||
|
|
||||||
# Check if this file has been explicitly excluded from scanning
|
# Check if this file has been explicitly excluded from scanning
|
||||||
if toignore(fd):
|
if toignore(fd):
|
||||||
|
|
@ -1460,8 +1467,8 @@ def isApkDebuggable(apkfile, config):
|
||||||
|
|
||||||
:param apkfile: full path to the apk to check"""
|
:param apkfile: full path to the apk to check"""
|
||||||
|
|
||||||
p = SilentPopen([os.path.join(config['sdk_path'],
|
p = SilentPopen([os.path.join(config['sdk_path'], 'build-tools',
|
||||||
'build-tools', config['build_tools'], 'aapt'),
|
config['build_tools'], 'aapt'),
|
||||||
'dump', 'xmltree', apkfile, 'AndroidManifest.xml'])
|
'dump', 'xmltree', apkfile, 'AndroidManifest.xml'])
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
logging.critical("Failed to get apk manifest information")
|
logging.critical("Failed to get apk manifest information")
|
||||||
|
|
|
||||||
|
|
@ -265,8 +265,7 @@ def main():
|
||||||
if repo_keyalias is not None:
|
if repo_keyalias is not None:
|
||||||
logging.info(' Alias for key in store:\t' + repo_keyalias)
|
logging.info(' Alias for key in store:\t' + repo_keyalias)
|
||||||
logging.info('\nTo complete the setup, add your APKs to "' +
|
logging.info('\nTo complete the setup, add your APKs to "' +
|
||||||
os.path.join(fdroiddir, 'repo') + '"' +
|
os.path.join(fdroiddir, 'repo') + '"' + '''
|
||||||
'''
|
|
||||||
then run "fdroid update -c; fdroid update". You might also want to edit
|
then run "fdroid update -c; fdroid update". You might also want to edit
|
||||||
"config.py" to set the URL, repo name, and more. You should also set up
|
"config.py" to set the URL, repo name, and more. You should also set up
|
||||||
a signing key (a temporary one might have been automatically generated).
|
a signing key (a temporary one might have been automatically generated).
|
||||||
|
|
|
||||||
|
|
@ -79,7 +79,6 @@ regex_warnings = {
|
||||||
(re.compile(r'^ '),
|
(re.compile(r'^ '),
|
||||||
"Unnecessary leading space"),
|
"Unnecessary leading space"),
|
||||||
],
|
],
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
regex_pedantic = {
|
regex_pedantic = {
|
||||||
|
|
@ -117,7 +116,7 @@ regex_pedantic = {
|
||||||
(re.compile(r'.*[a-z0-9][.,!?][ $]'),
|
(re.compile(r'.*[a-z0-9][.,!?][ $]'),
|
||||||
"Punctuation should be avoided"),
|
"Punctuation should be avoided"),
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
@ -194,7 +193,6 @@ def main():
|
||||||
pwarn("Summary '%s' probably contains redundant info already in app name '%s'" % (
|
pwarn("Summary '%s' probably contains redundant info already in app name '%s'" % (
|
||||||
summary, name))
|
summary, name))
|
||||||
|
|
||||||
|
|
||||||
# Description size limit
|
# Description size limit
|
||||||
desc_chars = sum(len(l) for l in app['Description'])
|
desc_chars = sum(len(l) for l in app['Description'])
|
||||||
if desc_chars > config['char_limits']['Description']:
|
if desc_chars > config['char_limits']['Description']:
|
||||||
|
|
@ -213,7 +211,6 @@ def main():
|
||||||
if m.match(l):
|
if m.match(l):
|
||||||
warn("%s at line '%s': %s" % (f, l, r))
|
warn("%s at line '%s': %s" % (f, l, r))
|
||||||
|
|
||||||
|
|
||||||
# Regex pedantic checks in all kinds of fields
|
# Regex pedantic checks in all kinds of fields
|
||||||
if options.pedantic:
|
if options.pedantic:
|
||||||
for f in regex_pedantic:
|
for f in regex_pedantic:
|
||||||
|
|
|
||||||
|
|
@ -61,7 +61,7 @@ app_defaults = {
|
||||||
'Repo': '',
|
'Repo': '',
|
||||||
'Requires Root': False,
|
'Requires Root': False,
|
||||||
'No Source Since': ''
|
'No Source Since': ''
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# This defines the preferred order for the build items - as in the
|
# This defines the preferred order for the build items - as in the
|
||||||
|
|
@ -73,7 +73,7 @@ ordered_flags = [
|
||||||
'extlibs', 'srclibs', 'patch', 'prebuild', 'scanignore',
|
'extlibs', 'srclibs', 'patch', 'prebuild', 'scanignore',
|
||||||
'scandelete', 'build', 'buildjni', 'preassemble', 'bindir',
|
'scandelete', 'build', 'buildjni', 'preassemble', 'bindir',
|
||||||
'antcommand', 'novcheck'
|
'antcommand', 'novcheck'
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
# Designates a metadata field type and checks that it matches
|
# Designates a metadata field type and checks that it matches
|
||||||
|
|
@ -182,7 +182,7 @@ valuetypes = {
|
||||||
r"^(Tags|Tags .+|RepoManifest|RepoManifest/.+|RepoTrunk|HTTP|Static|None)$", None,
|
r"^(Tags|Tags .+|RepoManifest|RepoManifest/.+|RepoTrunk|HTTP|Static|None)$", None,
|
||||||
["Update Check Mode"],
|
["Update Check Mode"],
|
||||||
[])
|
[])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# Check an app's metadata information for integrity errors
|
# Check an app's metadata information for integrity errors
|
||||||
|
|
@ -293,7 +293,7 @@ class DescriptionFormatter:
|
||||||
urltext = url
|
urltext = url
|
||||||
linkified_html += '<a href="' + url + '">' + cgi.escape(urltext) + '</a>'
|
linkified_html += '<a href="' + url + '">' + cgi.escape(urltext) + '</a>'
|
||||||
linkified_plain += urltext
|
linkified_plain += urltext
|
||||||
txt = txt[index+2:]
|
txt = txt[index + 2:]
|
||||||
else:
|
else:
|
||||||
index = txt.find("]")
|
index = txt.find("]")
|
||||||
if index == -1:
|
if index == -1:
|
||||||
|
|
@ -309,7 +309,7 @@ class DescriptionFormatter:
|
||||||
linkified_plain += urltxt
|
linkified_plain += urltxt
|
||||||
if urltxt != url:
|
if urltxt != url:
|
||||||
linkified_plain += ' (' + url + ')'
|
linkified_plain += ' (' + url + ')'
|
||||||
txt = txt[index+1:]
|
txt = txt[index + 1:]
|
||||||
|
|
||||||
def addtext(self, txt):
|
def addtext(self, txt):
|
||||||
p, h = self.linkify(txt)
|
p, h = self.linkify(txt)
|
||||||
|
|
@ -510,17 +510,17 @@ def parse_metadata(metafile):
|
||||||
def add_buildflag(p, thisbuild):
|
def add_buildflag(p, thisbuild):
|
||||||
bv = p.split('=', 1)
|
bv = p.split('=', 1)
|
||||||
if len(bv) != 2:
|
if len(bv) != 2:
|
||||||
raise MetaDataException("Invalid build flag at {0} in {1}".
|
raise MetaDataException("Invalid build flag at {0} in {1}"
|
||||||
format(buildlines[0], linedesc))
|
.format(buildlines[0], linedesc))
|
||||||
pk, pv = bv
|
pk, pv = bv
|
||||||
if pk in thisbuild:
|
if pk in thisbuild:
|
||||||
raise MetaDataException("Duplicate definition on {0} in version {1} of {2}".
|
raise MetaDataException("Duplicate definition on {0} in version {1} of {2}"
|
||||||
format(pk, thisbuild['version'], linedesc))
|
.format(pk, thisbuild['version'], linedesc))
|
||||||
|
|
||||||
pk = pk.lstrip()
|
pk = pk.lstrip()
|
||||||
if pk not in ordered_flags:
|
if pk not in ordered_flags:
|
||||||
raise MetaDataException("Unrecognised build flag at {0} in {1}".
|
raise MetaDataException("Unrecognised build flag at {0} in {1}"
|
||||||
format(p, linedesc))
|
.format(p, linedesc))
|
||||||
t = flagtype(pk)
|
t = flagtype(pk)
|
||||||
if t == 'list':
|
if t == 'list':
|
||||||
# Port legacy ';' separators
|
# Port legacy ';' separators
|
||||||
|
|
@ -530,8 +530,8 @@ def parse_metadata(metafile):
|
||||||
elif t == 'script':
|
elif t == 'script':
|
||||||
thisbuild[pk] = pv
|
thisbuild[pk] = pv
|
||||||
else:
|
else:
|
||||||
raise MetaDataException("Unrecognised build flag type '%s' at %s in %s" % (
|
raise MetaDataException("Unrecognised build flag type '%s' at %s in %s"
|
||||||
t, p, linedesc))
|
% (t, p, linedesc))
|
||||||
|
|
||||||
def parse_buildline(lines):
|
def parse_buildline(lines):
|
||||||
value = "".join(lines)
|
value = "".join(lines)
|
||||||
|
|
@ -550,7 +550,7 @@ def parse_metadata(metafile):
|
||||||
commit = 'unknown - see disabled'
|
commit = 'unknown - see disabled'
|
||||||
index = parts[2].rfind('at ')
|
index = parts[2].rfind('at ')
|
||||||
if index != -1:
|
if index != -1:
|
||||||
commit = parts[2][index+3:]
|
commit = parts[2][index + 3:]
|
||||||
if commit.endswith(')'):
|
if commit.endswith(')'):
|
||||||
commit = commit[:-1]
|
commit = commit[:-1]
|
||||||
thisbuild['commit'] = commit
|
thisbuild['commit'] = commit
|
||||||
|
|
@ -606,8 +606,8 @@ def parse_metadata(metafile):
|
||||||
if mode == 3:
|
if mode == 3:
|
||||||
if not any(line.startswith(s) for s in (' ', '\t')):
|
if not any(line.startswith(s) for s in (' ', '\t')):
|
||||||
if 'commit' not in curbuild and 'disable' not in curbuild:
|
if 'commit' not in curbuild and 'disable' not in curbuild:
|
||||||
raise MetaDataException("No commit specified for {0} in {1}".format(
|
raise MetaDataException("No commit specified for {0} in {1}"
|
||||||
curbuild['version'], linedesc))
|
.format(curbuild['version'], linedesc))
|
||||||
thisinfo['builds'].append(curbuild)
|
thisinfo['builds'].append(curbuild)
|
||||||
add_comments('build:' + curbuild['version'])
|
add_comments('build:' + curbuild['version'])
|
||||||
mode = 0
|
mode = 0
|
||||||
|
|
@ -629,9 +629,9 @@ def parse_metadata(metafile):
|
||||||
try:
|
try:
|
||||||
field, value = line.split(':', 1)
|
field, value = line.split(':', 1)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise MetaDataException("Invalid metadata in "+linedesc)
|
raise MetaDataException("Invalid metadata in " + linedesc)
|
||||||
if field != field.strip() or value != value.strip():
|
if field != field.strip() or value != value.strip():
|
||||||
raise MetaDataException("Extra spacing found in "+linedesc)
|
raise MetaDataException("Extra spacing found in " + linedesc)
|
||||||
|
|
||||||
# Translate obsolete fields...
|
# Translate obsolete fields...
|
||||||
if field == 'Market Version':
|
if field == 'Market Version':
|
||||||
|
|
@ -662,8 +662,8 @@ def parse_metadata(metafile):
|
||||||
curbuild = {}
|
curbuild = {}
|
||||||
vv = value.split(',')
|
vv = value.split(',')
|
||||||
if len(vv) != 2:
|
if len(vv) != 2:
|
||||||
raise MetaDataException('Build should have comma-separated version and vercode, not "{0}", in {1}'.
|
raise MetaDataException('Build should have comma-separated version and vercode, not "{0}", in {1}'
|
||||||
format(value, linedesc))
|
.format(value, linedesc))
|
||||||
curbuild['version'] = vv[0]
|
curbuild['version'] = vv[0]
|
||||||
curbuild['vercode'] = vv[1]
|
curbuild['vercode'] = vv[1]
|
||||||
buildlines = []
|
buildlines = []
|
||||||
|
|
|
||||||
|
|
@ -133,7 +133,8 @@ def main():
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
logging.info("Key does not exist - generating...")
|
logging.info("Key does not exist - generating...")
|
||||||
p = FDroidPopen(['keytool', '-genkey',
|
p = FDroidPopen(['keytool', '-genkey',
|
||||||
'-keystore', config['keystore'], '-alias', keyalias,
|
'-keystore', config['keystore'],
|
||||||
|
'-alias', keyalias,
|
||||||
'-keyalg', 'RSA', '-keysize', '2048',
|
'-keyalg', 'RSA', '-keysize', '2048',
|
||||||
'-validity', '10000',
|
'-validity', '10000',
|
||||||
'-storepass:file', config['keystorepassfile'],
|
'-storepass:file', config['keystorepassfile'],
|
||||||
|
|
|
||||||
|
|
@ -47,7 +47,7 @@ def main():
|
||||||
|
|
||||||
for app in apps:
|
for app in apps:
|
||||||
logging.info("Writing " + app['id'])
|
logging.info("Writing " + app['id'])
|
||||||
metadata.write_metadata(os.path.join('metadata', app['id'])+'.txt', app)
|
metadata.write_metadata(os.path.join('metadata', app['id']) + '.txt', app)
|
||||||
|
|
||||||
logging.info("Finished.")
|
logging.info("Finished.")
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -90,13 +90,14 @@ def main():
|
||||||
|
|
||||||
# Prepare the source code...
|
# Prepare the source code...
|
||||||
root_dir, _ = common.prepare_source(vcs, app, thisbuild,
|
root_dir, _ = common.prepare_source(vcs, app, thisbuild,
|
||||||
build_dir, srclib_dir, extlib_dir, False)
|
build_dir, srclib_dir,
|
||||||
|
extlib_dir, False)
|
||||||
|
|
||||||
# Do the scan...
|
# Do the scan...
|
||||||
buildprobs = common.scan_source(build_dir, root_dir, thisbuild)
|
buildprobs = common.scan_source(build_dir, root_dir, thisbuild)
|
||||||
for problem in buildprobs:
|
for problem in buildprobs:
|
||||||
problems.append(problem +
|
problems.append(problem + ' in ' + app['id']
|
||||||
' in ' + app['id'] + ' ' + thisbuild['version'])
|
+ ' ' + thisbuild['version'])
|
||||||
|
|
||||||
except BuildException as be:
|
except BuildException as be:
|
||||||
msg = "Could not scan app %s due to BuildException: %s" % (app['id'], be)
|
msg = "Could not scan app %s due to BuildException: %s" % (app['id'], be)
|
||||||
|
|
|
||||||
|
|
@ -129,9 +129,8 @@ def update_wiki(apps, apks):
|
||||||
if 'disable' in thisbuild:
|
if 'disable' in thisbuild:
|
||||||
if thisbuild['vercode'] == app['Current Version Code']:
|
if thisbuild['vercode'] == app['Current Version Code']:
|
||||||
cantupdate = True
|
cantupdate = True
|
||||||
apklist.append({
|
|
||||||
#TODO: Nasty: vercode is a string in the build, and an int elsewhere
|
#TODO: Nasty: vercode is a string in the build, and an int elsewhere
|
||||||
'versioncode': int(thisbuild['vercode']),
|
apklist.append({'versioncode': int(thisbuild['vercode']),
|
||||||
'version': thisbuild['version'],
|
'version': thisbuild['version'],
|
||||||
'buildproblem': thisbuild['disable']
|
'buildproblem': thisbuild['disable']
|
||||||
})
|
})
|
||||||
|
|
@ -143,8 +142,7 @@ def update_wiki(apps, apks):
|
||||||
break
|
break
|
||||||
if not builtit:
|
if not builtit:
|
||||||
buildfails = True
|
buildfails = True
|
||||||
apklist.append({
|
apklist.append({'versioncode': int(thisbuild['vercode']),
|
||||||
'versioncode': int(thisbuild['vercode']),
|
|
||||||
'version': thisbuild['version'],
|
'version': thisbuild['version'],
|
||||||
'buildproblem': "The build for this version appears to have failed. Check the [[{0}/lastbuild|build log]].".format(app['id'])
|
'buildproblem': "The build for this version appears to have failed. Check the [[{0}/lastbuild|build log]].".format(app['id'])
|
||||||
})
|
})
|
||||||
|
|
@ -221,7 +219,8 @@ def update_wiki(apps, apks):
|
||||||
# Drop double spaces caused mostly by replacing ':' above
|
# Drop double spaces caused mostly by replacing ':' above
|
||||||
apppagename = apppagename.replace(' ', ' ')
|
apppagename = apppagename.replace(' ', ' ')
|
||||||
for expagename in site.allpages(prefix=apppagename,
|
for expagename in site.allpages(prefix=apppagename,
|
||||||
filterredir='nonredirects', generator=False):
|
filterredir='nonredirects',
|
||||||
|
generator=False):
|
||||||
if expagename == apppagename:
|
if expagename == apppagename:
|
||||||
noclobber = True
|
noclobber = True
|
||||||
# Another reason not to make the redirect page is if the app name
|
# Another reason not to make the redirect page is if the app name
|
||||||
|
|
@ -379,8 +378,8 @@ def scan_apks(apps, apkcache, repodir, knownapks):
|
||||||
thisinfo['features'] = []
|
thisinfo['features'] = []
|
||||||
thisinfo['icons_src'] = {}
|
thisinfo['icons_src'] = {}
|
||||||
thisinfo['icons'] = {}
|
thisinfo['icons'] = {}
|
||||||
p = FDroidPopen([os.path.join(config['sdk_path'],
|
p = FDroidPopen([os.path.join(config['sdk_path'], 'build-tools',
|
||||||
'build-tools', config['build_tools'], 'aapt'),
|
config['build_tools'], 'aapt'),
|
||||||
'dump', 'badging', apkfile])
|
'dump', 'badging', apkfile])
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
logging.critical("Failed to get apk information")
|
logging.critical("Failed to get apk information")
|
||||||
|
|
@ -430,8 +429,8 @@ def scan_apks(apps, apkcache, repodir, knownapks):
|
||||||
perm = re.match(string_pat, line).group(1)
|
perm = re.match(string_pat, line).group(1)
|
||||||
#Filter out this, it's only added with the latest SDK tools and
|
#Filter out this, it's only added with the latest SDK tools and
|
||||||
#causes problems for lots of apps.
|
#causes problems for lots of apps.
|
||||||
if (perm != "android.hardware.screen.portrait" and
|
if perm != "android.hardware.screen.portrait" \
|
||||||
perm != "android.hardware.screen.landscape"):
|
and perm != "android.hardware.screen.landscape":
|
||||||
if perm.startswith("android.feature."):
|
if perm.startswith("android.feature."):
|
||||||
perm = perm[16:]
|
perm = perm[16:]
|
||||||
thisinfo['features'].append(perm)
|
thisinfo['features'].append(perm)
|
||||||
|
|
@ -530,8 +529,8 @@ def scan_apks(apps, apkcache, repodir, knownapks):
|
||||||
continue
|
continue
|
||||||
if last_density is None:
|
if last_density is None:
|
||||||
continue
|
continue
|
||||||
logging.info("Density %s not available, resizing down from %s" % (
|
logging.info("Density %s not available, resizing down from %s"
|
||||||
density, last_density))
|
% (density, last_density))
|
||||||
|
|
||||||
last_iconpath = os.path.join(
|
last_iconpath = os.path.join(
|
||||||
get_icon_dir(repodir, last_density), iconfilename)
|
get_icon_dir(repodir, last_density), iconfilename)
|
||||||
|
|
@ -557,8 +556,8 @@ def scan_apks(apps, apkcache, repodir, knownapks):
|
||||||
continue
|
continue
|
||||||
if last_density is None:
|
if last_density is None:
|
||||||
continue
|
continue
|
||||||
logging.info("Density %s not available, copying from lower density %s" % (
|
logging.info("Density %s not available, copying from lower density %s"
|
||||||
density, last_density))
|
% (density, last_density))
|
||||||
|
|
||||||
shutil.copyfile(
|
shutil.copyfile(
|
||||||
os.path.join(get_icon_dir(repodir, last_density), iconfilename),
|
os.path.join(get_icon_dir(repodir, last_density), iconfilename),
|
||||||
|
|
@ -704,7 +703,8 @@ def make_index(apps, apks, repodir, archive, categories):
|
||||||
return ("fdroid.app:" + link, app['Name'])
|
return ("fdroid.app:" + link, app['Name'])
|
||||||
raise MetaDataException("Cannot resolve app id " + link)
|
raise MetaDataException("Cannot resolve app id " + link)
|
||||||
addElement('desc',
|
addElement('desc',
|
||||||
metadata.description_html(app['Description'], linkres), doc, apel)
|
metadata.description_html(app['Description'], linkres),
|
||||||
|
doc, apel)
|
||||||
addElement('license', app['License'], doc, apel)
|
addElement('license', app['License'], doc, apel)
|
||||||
if 'Categories' in app:
|
if 'Categories' in app:
|
||||||
addElement('categories', ','.join(app["Categories"]), doc, apel)
|
addElement('categories', ','.join(app["Categories"]), doc, apel)
|
||||||
|
|
@ -754,9 +754,9 @@ def make_index(apps, apks, repodir, archive, categories):
|
||||||
|
|
||||||
# Check for duplicates - they will make the client unhappy...
|
# Check for duplicates - they will make the client unhappy...
|
||||||
for i in range(len(apklist) - 1):
|
for i in range(len(apklist) - 1):
|
||||||
if apklist[i]['versioncode'] == apklist[i+1]['versioncode']:
|
if apklist[i]['versioncode'] == apklist[i + 1]['versioncode']:
|
||||||
logging.critical("duplicate versions: '%s' - '%s'" % (
|
logging.critical("duplicate versions: '%s' - '%s'" % (
|
||||||
apklist[i]['apkname'], apklist[i+1]['apkname']))
|
apklist[i]['apkname'], apklist[i + 1]['apkname']))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
for apk in apklist:
|
for apk in apklist:
|
||||||
|
|
|
||||||
|
|
@ -95,7 +95,8 @@ def main():
|
||||||
os.path.join("..", "..", unsigned_dir, apkfilename)],
|
os.path.join("..", "..", unsigned_dir, apkfilename)],
|
||||||
cwd=thisdir) != 0:
|
cwd=thisdir) != 0:
|
||||||
raise Exception("Failed to unpack local build of " + apkfilename)
|
raise Exception("Failed to unpack local build of " + apkfilename)
|
||||||
if subprocess.call(['jar', 'xf', os.path.join("..", "..", remoteapk)],
|
if subprocess.call(['jar', 'xf',
|
||||||
|
os.path.join("..", "..", remoteapk)],
|
||||||
cwd=thatdir) != 0:
|
cwd=thatdir) != 0:
|
||||||
raise Exception("Failed to unpack remote build of " + apkfilename)
|
raise Exception("Failed to unpack remote build of " + apkfilename)
|
||||||
|
|
||||||
|
|
|
||||||
2
setup.py
2
setup.py
|
|
@ -20,7 +20,7 @@ setup(name='fdroidserver',
|
||||||
scripts=['fdroid', 'fd-commit'],
|
scripts=['fdroid', 'fd-commit'],
|
||||||
data_files=[
|
data_files=[
|
||||||
(sys.prefix + '/share/doc/fdroidserver/examples',
|
(sys.prefix + '/share/doc/fdroidserver/examples',
|
||||||
[ 'buildserver/config.buildserver.py',
|
['buildserver/config.buildserver.py',
|
||||||
'examples/config.py',
|
'examples/config.py',
|
||||||
'examples/makebs.config.py',
|
'examples/makebs.config.py',
|
||||||
'examples/opensc-fdroid.cfg',
|
'examples/opensc-fdroid.cfg',
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue