mirror of
https://github.com/f-droid/fdroidserver.git
synced 2025-11-05 15:00:30 +03:00
Merge branch 'master' into 'master'
fix pyflakes error and a bunch of PEP8 fixes lots of little fixes here, the first commit fixes the minor error that `pyflakes` raised, then the rest are all PEP8 fixes, as reported by the `pep8` command line tool.
This commit is contained in:
commit
ae3d1b036f
17 changed files with 221 additions and 135 deletions
|
|
@ -141,6 +141,6 @@ build_server_always = False
|
||||||
# Limit in number of characters that fields can take up
|
# Limit in number of characters that fields can take up
|
||||||
# Only the fields listed here are supported, defaults shown
|
# Only the fields listed here are supported, defaults shown
|
||||||
char_limits = {
|
char_limits = {
|
||||||
'Summary' : 50,
|
'Summary': 50,
|
||||||
'Description' : 1500
|
'Description': 1500
|
||||||
}
|
}
|
||||||
|
|
|
||||||
5
fdroid
5
fdroid
|
|
@ -38,14 +38,16 @@ commands = {
|
||||||
"server": "Interact with the repo HTTP server",
|
"server": "Interact with the repo HTTP server",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def print_help():
|
def print_help():
|
||||||
print "usage: fdroid [-h|--help] <command> [<args>]"
|
print "usage: fdroid [-h|--help] <command> [<args>]"
|
||||||
print
|
print
|
||||||
print "Valid commands are:"
|
print "Valid commands are:"
|
||||||
for cmd,summary in commands.items():
|
for cmd, summary in commands.items():
|
||||||
print " " + cmd + ' '*(15-len(cmd)) + summary
|
print " " + cmd + ' '*(15-len(cmd)) + summary
|
||||||
print
|
print
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
if len(sys.argv) <= 1:
|
if len(sys.argv) <= 1:
|
||||||
|
|
@ -85,4 +87,3 @@ def main():
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -37,8 +37,9 @@ from common import BuildException, VCSException, FDroidPopen, SilentPopen
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import paramiko
|
import paramiko
|
||||||
except:
|
except ImportError:
|
||||||
paramiko = None
|
pass
|
||||||
|
|
||||||
|
|
||||||
def get_builder_vm_id():
|
def get_builder_vm_id():
|
||||||
vd = os.path.join('builder', '.vagrant')
|
vd = os.path.join('builder', '.vagrant')
|
||||||
|
|
@ -244,7 +245,9 @@ def release_vm():
|
||||||
def build_server(app, thisbuild, vcs, build_dir, output_dir, force):
|
def build_server(app, thisbuild, vcs, build_dir, output_dir, force):
|
||||||
"""Do a build on the build server."""
|
"""Do a build on the build server."""
|
||||||
|
|
||||||
if not paramiko:
|
try:
|
||||||
|
paramiko
|
||||||
|
except NameError:
|
||||||
raise BuildException("Paramiko is required to use the buildserver")
|
raise BuildException("Paramiko is required to use the buildserver")
|
||||||
if options.verbose:
|
if options.verbose:
|
||||||
logging.getLogger("paramiko").setLevel(logging.DEBUG)
|
logging.getLogger("paramiko").setLevel(logging.DEBUG)
|
||||||
|
|
|
||||||
|
|
@ -30,7 +30,8 @@ import HTMLParser
|
||||||
from distutils.version import LooseVersion
|
from distutils.version import LooseVersion
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import common, metadata
|
import common
|
||||||
|
import metadata
|
||||||
from common import BuildException
|
from common import BuildException
|
||||||
from common import VCSException
|
from common import VCSException
|
||||||
from metadata import MetaDataException
|
from metadata import MetaDataException
|
||||||
|
|
@ -79,6 +80,7 @@ def check_http(app):
|
||||||
msg = "Could not complete http check for app {0} due to unknown error: {1}".format(app['id'], traceback.format_exc())
|
msg = "Could not complete http check for app {0} due to unknown error: {1}".format(app['id'], traceback.format_exc())
|
||||||
return (None, msg)
|
return (None, msg)
|
||||||
|
|
||||||
|
|
||||||
# Check for a new version by looking at the tags in the source repo.
|
# Check for a new version by looking at the tags in the source repo.
|
||||||
# Whether this can be used reliably or not depends on
|
# Whether this can be used reliably or not depends on
|
||||||
# the development procedures used by the project's developers. Use it with
|
# the development procedures used by the project's developers. Use it with
|
||||||
|
|
@ -157,6 +159,7 @@ def check_tags(app, pattern):
|
||||||
msg = "Could not scan app {0} due to unknown error: {1}".format(app['id'], traceback.format_exc())
|
msg = "Could not scan app {0} due to unknown error: {1}".format(app['id'], traceback.format_exc())
|
||||||
return (None, msg, None)
|
return (None, msg, None)
|
||||||
|
|
||||||
|
|
||||||
# Check for a new version by looking at the AndroidManifest.xml at the HEAD
|
# Check for a new version by looking at the AndroidManifest.xml at the HEAD
|
||||||
# of the source repo. Whether this can be used reliably or not depends on
|
# of the source repo. Whether this can be used reliably or not depends on
|
||||||
# the development procedures used by the project's developers. Use it with
|
# the development procedures used by the project's developers. Use it with
|
||||||
|
|
@ -212,9 +215,9 @@ def check_repomanifest(app, branch=None):
|
||||||
if package != appid:
|
if package != appid:
|
||||||
return (None, "Package ID mismatch")
|
return (None, "Package ID mismatch")
|
||||||
if not version:
|
if not version:
|
||||||
return (None,"Couldn't find latest version name")
|
return (None, "Couldn't find latest version name")
|
||||||
if not vercode:
|
if not vercode:
|
||||||
return (None,"Couldn't find latest version code")
|
return (None, "Couldn't find latest version code")
|
||||||
|
|
||||||
vercode = str(int(vercode))
|
vercode = str(int(vercode))
|
||||||
|
|
||||||
|
|
@ -232,6 +235,7 @@ def check_repomanifest(app, branch=None):
|
||||||
msg = "Could not scan app {0} due to unknown error: {1}".format(app['id'], traceback.format_exc())
|
msg = "Could not scan app {0} due to unknown error: {1}".format(app['id'], traceback.format_exc())
|
||||||
return (None, msg)
|
return (None, msg)
|
||||||
|
|
||||||
|
|
||||||
def check_repotrunk(app, branch=None):
|
def check_repotrunk(app, branch=None):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
@ -262,13 +266,14 @@ def check_repotrunk(app, branch=None):
|
||||||
msg = "Could not scan app {0} due to unknown error: {1}".format(app['id'], traceback.format_exc())
|
msg = "Could not scan app {0} due to unknown error: {1}".format(app['id'], traceback.format_exc())
|
||||||
return (None, msg)
|
return (None, msg)
|
||||||
|
|
||||||
|
|
||||||
# Check for a new version by looking at the Google Play Store.
|
# Check for a new version by looking at the Google Play Store.
|
||||||
# Returns (None, "a message") if this didn't work, or (version, None) for
|
# Returns (None, "a message") if this didn't work, or (version, None) for
|
||||||
# the details of the current version.
|
# the details of the current version.
|
||||||
def check_gplay(app):
|
def check_gplay(app):
|
||||||
time.sleep(15)
|
time.sleep(15)
|
||||||
url = 'https://play.google.com/store/apps/details?id=' + app['id']
|
url = 'https://play.google.com/store/apps/details?id=' + app['id']
|
||||||
headers = {'User-Agent' : 'Mozilla/5.0 (X11; Linux i686; rv:18.0) Gecko/20100101 Firefox/18.0'}
|
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:18.0) Gecko/20100101 Firefox/18.0'}
|
||||||
req = urllib2.Request(url, None, headers)
|
req = urllib2.Request(url, None, headers)
|
||||||
try:
|
try:
|
||||||
resp = urllib2.urlopen(req, None, 20)
|
resp = urllib2.urlopen(req, None, 20)
|
||||||
|
|
@ -296,6 +301,7 @@ def check_gplay(app):
|
||||||
config = None
|
config = None
|
||||||
options = None
|
options = None
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
global config, options
|
global config, options
|
||||||
|
|
@ -348,7 +354,6 @@ def main():
|
||||||
common.getappname(app), version))
|
common.getappname(app), version))
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
for app in apps:
|
for app in apps:
|
||||||
|
|
||||||
if options.autoonly and app['Auto Update Mode'] in ('None', 'Static'):
|
if options.autoonly and app['Auto Update Mode'] in ('None', 'Static'):
|
||||||
|
|
@ -515,4 +520,3 @@ def main():
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -17,7 +17,9 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import os, sys, re
|
import os
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import glob
|
import glob
|
||||||
import stat
|
import stat
|
||||||
|
|
@ -34,6 +36,7 @@ import metadata
|
||||||
config = None
|
config = None
|
||||||
options = None
|
options = None
|
||||||
|
|
||||||
|
|
||||||
def get_default_config():
|
def get_default_config():
|
||||||
return {
|
return {
|
||||||
'sdk_path': os.getenv("ANDROID_HOME"),
|
'sdk_path': os.getenv("ANDROID_HOME"),
|
||||||
|
|
@ -50,12 +53,13 @@ def get_default_config():
|
||||||
'keystore': '$HOME/.local/share/fdroidserver/keystore.jks',
|
'keystore': '$HOME/.local/share/fdroidserver/keystore.jks',
|
||||||
'smartcardoptions': [],
|
'smartcardoptions': [],
|
||||||
'char_limits': {
|
'char_limits': {
|
||||||
'Summary' : 50,
|
'Summary': 50,
|
||||||
'Description' : 1500
|
'Description': 1500
|
||||||
},
|
},
|
||||||
'keyaliases': { },
|
'keyaliases': {},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def read_config(opts, config_file='config.py'):
|
def read_config(opts, config_file='config.py'):
|
||||||
"""Read the repository config
|
"""Read the repository config
|
||||||
|
|
||||||
|
|
@ -120,8 +124,9 @@ def read_config(opts, config_file='config.py'):
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
||||||
def test_sdk_exists(c):
|
def test_sdk_exists(c):
|
||||||
if c['sdk_path'] == None:
|
if c['sdk_path'] is None:
|
||||||
# c['sdk_path'] is set to the value of ANDROID_HOME by default
|
# c['sdk_path'] is set to the value of ANDROID_HOME by default
|
||||||
logging.critical('No Android SDK found! ANDROID_HOME is not set and sdk_path is not in config.py!')
|
logging.critical('No Android SDK found! ANDROID_HOME is not set and sdk_path is not in config.py!')
|
||||||
logging.info('You can use ANDROID_HOME to set the path to your SDK, i.e.:')
|
logging.info('You can use ANDROID_HOME to set the path to your SDK, i.e.:')
|
||||||
|
|
@ -138,6 +143,7 @@ def test_sdk_exists(c):
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def write_password_file(pwtype, password=None):
|
def write_password_file(pwtype, password=None):
|
||||||
'''
|
'''
|
||||||
writes out passwords to a protected file instead of passing passwords as
|
writes out passwords to a protected file instead of passing passwords as
|
||||||
|
|
@ -145,13 +151,14 @@ def write_password_file(pwtype, password=None):
|
||||||
'''
|
'''
|
||||||
filename = '.fdroid.' + pwtype + '.txt'
|
filename = '.fdroid.' + pwtype + '.txt'
|
||||||
fd = os.open(filename, os.O_CREAT | os.O_TRUNC | os.O_WRONLY, 0600)
|
fd = os.open(filename, os.O_CREAT | os.O_TRUNC | os.O_WRONLY, 0600)
|
||||||
if password == None:
|
if password is None:
|
||||||
os.write(fd, config[pwtype])
|
os.write(fd, config[pwtype])
|
||||||
else:
|
else:
|
||||||
os.write(fd, password)
|
os.write(fd, password)
|
||||||
os.close(fd)
|
os.close(fd)
|
||||||
config[pwtype + 'file'] = filename
|
config[pwtype + 'file'] = filename
|
||||||
|
|
||||||
|
|
||||||
# Given the arguments in the form of multiple appid:[vc] strings, this returns
|
# Given the arguments in the form of multiple appid:[vc] strings, this returns
|
||||||
# a dictionary with the set of vercodes specified for each package.
|
# a dictionary with the set of vercodes specified for each package.
|
||||||
def read_pkg_args(args, allow_vercodes=False):
|
def read_pkg_args(args, allow_vercodes=False):
|
||||||
|
|
@ -173,6 +180,7 @@ def read_pkg_args(args, allow_vercodes=False):
|
||||||
|
|
||||||
return vercodes
|
return vercodes
|
||||||
|
|
||||||
|
|
||||||
# On top of what read_pkg_args does, this returns the whole app metadata, but
|
# On top of what read_pkg_args does, this returns the whole app metadata, but
|
||||||
# limiting the builds list to the builds matching the vercodes specified.
|
# limiting the builds list to the builds matching the vercodes specified.
|
||||||
def read_app_args(args, allapps, allow_vercodes=False):
|
def read_app_args(args, allapps, allow_vercodes=False):
|
||||||
|
|
@ -211,6 +219,7 @@ def read_app_args(args, allapps, allow_vercodes=False):
|
||||||
|
|
||||||
return apps
|
return apps
|
||||||
|
|
||||||
|
|
||||||
def has_extension(filename, extension):
|
def has_extension(filename, extension):
|
||||||
name, ext = os.path.splitext(filename)
|
name, ext = os.path.splitext(filename)
|
||||||
ext = ext.lower()[1:]
|
ext = ext.lower()[1:]
|
||||||
|
|
@ -218,6 +227,7 @@ def has_extension(filename, extension):
|
||||||
|
|
||||||
apk_regex = None
|
apk_regex = None
|
||||||
|
|
||||||
|
|
||||||
def apknameinfo(filename):
|
def apknameinfo(filename):
|
||||||
global apk_regex
|
global apk_regex
|
||||||
filename = os.path.basename(filename)
|
filename = os.path.basename(filename)
|
||||||
|
|
@ -230,12 +240,15 @@ def apknameinfo(filename):
|
||||||
raise Exception("Invalid apk name: %s" % filename)
|
raise Exception("Invalid apk name: %s" % filename)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def getapkname(app, build):
|
def getapkname(app, build):
|
||||||
return "%s_%s.apk" % (app['id'], build['vercode'])
|
return "%s_%s.apk" % (app['id'], build['vercode'])
|
||||||
|
|
||||||
|
|
||||||
def getsrcname(app, build):
|
def getsrcname(app, build):
|
||||||
return "%s_%s_src.tar.gz" % (app['id'], build['vercode'])
|
return "%s_%s_src.tar.gz" % (app['id'], build['vercode'])
|
||||||
|
|
||||||
|
|
||||||
def getappname(app):
|
def getappname(app):
|
||||||
if app['Name']:
|
if app['Name']:
|
||||||
return app['Name']
|
return app['Name']
|
||||||
|
|
@ -243,9 +256,11 @@ def getappname(app):
|
||||||
return app['Auto Name']
|
return app['Auto Name']
|
||||||
return app['id']
|
return app['id']
|
||||||
|
|
||||||
|
|
||||||
def getcvname(app):
|
def getcvname(app):
|
||||||
return '%s (%s)' % (app['Current Version'], app['Current Version Code'])
|
return '%s (%s)' % (app['Current Version'], app['Current Version Code'])
|
||||||
|
|
||||||
|
|
||||||
def getvcs(vcstype, remote, local):
|
def getvcs(vcstype, remote, local):
|
||||||
if vcstype == 'git':
|
if vcstype == 'git':
|
||||||
return vcs_git(remote, local)
|
return vcs_git(remote, local)
|
||||||
|
|
@ -263,12 +278,14 @@ def getvcs(vcstype, remote, local):
|
||||||
return getsrclib(remote, 'build/srclib', raw=True)
|
return getsrclib(remote, 'build/srclib', raw=True)
|
||||||
raise VCSException("Invalid vcs type " + vcstype)
|
raise VCSException("Invalid vcs type " + vcstype)
|
||||||
|
|
||||||
|
|
||||||
def getsrclibvcs(name):
|
def getsrclibvcs(name):
|
||||||
srclib_path = os.path.join('srclibs', name + ".txt")
|
srclib_path = os.path.join('srclibs', name + ".txt")
|
||||||
if not os.path.exists(srclib_path):
|
if not os.path.exists(srclib_path):
|
||||||
raise VCSException("Missing srclib " + name)
|
raise VCSException("Missing srclib " + name)
|
||||||
return metadata.parse_srclib(srclib_path)['Repo Type']
|
return metadata.parse_srclib(srclib_path)['Repo Type']
|
||||||
|
|
||||||
|
|
||||||
class vcs:
|
class vcs:
|
||||||
def __init__(self, remote, local):
|
def __init__(self, remote, local):
|
||||||
|
|
||||||
|
|
@ -354,6 +371,7 @@ class vcs:
|
||||||
def getsrclib(self):
|
def getsrclib(self):
|
||||||
return self.srclib
|
return self.srclib
|
||||||
|
|
||||||
|
|
||||||
class vcs_git(vcs):
|
class vcs_git(vcs):
|
||||||
|
|
||||||
def repotype(self):
|
def repotype(self):
|
||||||
|
|
@ -562,6 +580,7 @@ class vcs_gitsvn(vcs):
|
||||||
return None
|
return None
|
||||||
return p.stdout.strip()
|
return p.stdout.strip()
|
||||||
|
|
||||||
|
|
||||||
class vcs_svn(vcs):
|
class vcs_svn(vcs):
|
||||||
|
|
||||||
def repotype(self):
|
def repotype(self):
|
||||||
|
|
@ -604,6 +623,7 @@ class vcs_svn(vcs):
|
||||||
return line[18:]
|
return line[18:]
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
class vcs_hg(vcs):
|
class vcs_hg(vcs):
|
||||||
|
|
||||||
def repotype(self):
|
def repotype(self):
|
||||||
|
|
@ -676,6 +696,7 @@ class vcs_bzr(vcs):
|
||||||
return [tag.split(' ')[0].strip() for tag in
|
return [tag.split(' ')[0].strip() for tag in
|
||||||
p.stdout.splitlines()]
|
p.stdout.splitlines()]
|
||||||
|
|
||||||
|
|
||||||
def retrieve_string(app_dir, string, xmlfiles=None):
|
def retrieve_string(app_dir, string, xmlfiles=None):
|
||||||
|
|
||||||
res_dirs = [
|
res_dirs = [
|
||||||
|
|
@ -686,9 +707,9 @@ def retrieve_string(app_dir, string, xmlfiles=None):
|
||||||
if xmlfiles is None:
|
if xmlfiles is None:
|
||||||
xmlfiles = []
|
xmlfiles = []
|
||||||
for res_dir in res_dirs:
|
for res_dir in res_dirs:
|
||||||
for r,d,f in os.walk(res_dir):
|
for r, d, f in os.walk(res_dir):
|
||||||
if r.endswith('/values'):
|
if r.endswith('/values'):
|
||||||
xmlfiles += [os.path.join(r,x) for x in f if x.endswith('.xml')]
|
xmlfiles += [os.path.join(r, x) for x in f if x.endswith('.xml')]
|
||||||
|
|
||||||
string_search = None
|
string_search = None
|
||||||
if string.startswith('@string/'):
|
if string.startswith('@string/'):
|
||||||
|
|
@ -704,15 +725,16 @@ def retrieve_string(app_dir, string, xmlfiles=None):
|
||||||
return retrieve_string(app_dir, matches.group(1), xmlfiles)
|
return retrieve_string(app_dir, matches.group(1), xmlfiles)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return string.replace("\\'","'")
|
return string.replace("\\'", "'")
|
||||||
|
|
||||||
|
|
||||||
# Return list of existing files that will be used to find the highest vercode
|
# Return list of existing files that will be used to find the highest vercode
|
||||||
def manifest_paths(app_dir, flavour):
|
def manifest_paths(app_dir, flavour):
|
||||||
|
|
||||||
possible_manifests = [ os.path.join(app_dir, 'AndroidManifest.xml'),
|
possible_manifests = [os.path.join(app_dir, 'AndroidManifest.xml'),
|
||||||
os.path.join(app_dir, 'src', 'main', 'AndroidManifest.xml'),
|
os.path.join(app_dir, 'src', 'main', 'AndroidManifest.xml'),
|
||||||
os.path.join(app_dir, 'src', 'AndroidManifest.xml'),
|
os.path.join(app_dir, 'src', 'AndroidManifest.xml'),
|
||||||
os.path.join(app_dir, 'build.gradle') ]
|
os.path.join(app_dir, 'build.gradle')]
|
||||||
|
|
||||||
if flavour:
|
if flavour:
|
||||||
possible_manifests.append(
|
possible_manifests.append(
|
||||||
|
|
@ -720,6 +742,7 @@ def manifest_paths(app_dir, flavour):
|
||||||
|
|
||||||
return [path for path in possible_manifests if os.path.isfile(path)]
|
return [path for path in possible_manifests if os.path.isfile(path)]
|
||||||
|
|
||||||
|
|
||||||
# Retrieve the package name. Returns the name, or None if not found.
|
# Retrieve the package name. Returns the name, or None if not found.
|
||||||
def fetch_real_name(app_dir, flavour):
|
def fetch_real_name(app_dir, flavour):
|
||||||
app_search = re.compile(r'.*<application.*').search
|
app_search = re.compile(r'.*<application.*').search
|
||||||
|
|
@ -744,6 +767,7 @@ def fetch_real_name(app_dir, flavour):
|
||||||
return result
|
return result
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
# Retrieve the version name
|
# Retrieve the version name
|
||||||
def version_name(original, app_dir, flavour):
|
def version_name(original, app_dir, flavour):
|
||||||
for f in manifest_paths(app_dir, flavour):
|
for f in manifest_paths(app_dir, flavour):
|
||||||
|
|
@ -754,6 +778,7 @@ def version_name(original, app_dir, flavour):
|
||||||
return string
|
return string
|
||||||
return original
|
return original
|
||||||
|
|
||||||
|
|
||||||
def get_library_references(root_dir):
|
def get_library_references(root_dir):
|
||||||
libraries = []
|
libraries = []
|
||||||
proppath = os.path.join(root_dir, 'project.properties')
|
proppath = os.path.join(root_dir, 'project.properties')
|
||||||
|
|
@ -771,26 +796,29 @@ def get_library_references(root_dir):
|
||||||
libraries.append(path)
|
libraries.append(path)
|
||||||
return libraries
|
return libraries
|
||||||
|
|
||||||
|
|
||||||
def ant_subprojects(root_dir):
|
def ant_subprojects(root_dir):
|
||||||
subprojects = get_library_references(root_dir)
|
subprojects = get_library_references(root_dir)
|
||||||
for subpath in subprojects:
|
for subpath in subprojects:
|
||||||
subrelpath = os.path.join(root_dir, subpath)
|
subrelpath = os.path.join(root_dir, subpath)
|
||||||
for p in get_library_references(subrelpath):
|
for p in get_library_references(subrelpath):
|
||||||
relp = os.path.normpath(os.path.join(subpath,p))
|
relp = os.path.normpath(os.path.join(subpath, p))
|
||||||
if relp not in subprojects:
|
if relp not in subprojects:
|
||||||
subprojects.insert(0, relp)
|
subprojects.insert(0, relp)
|
||||||
return subprojects
|
return subprojects
|
||||||
|
|
||||||
|
|
||||||
def remove_debuggable_flags(root_dir):
|
def remove_debuggable_flags(root_dir):
|
||||||
# Remove forced debuggable flags
|
# Remove forced debuggable flags
|
||||||
logging.info("Removing debuggable flags")
|
logging.info("Removing debuggable flags")
|
||||||
for root, dirs, files in os.walk(root_dir):
|
for root, dirs, files in os.walk(root_dir):
|
||||||
if 'AndroidManifest.xml' in files:
|
if 'AndroidManifest.xml' in files:
|
||||||
path = os.path.join(root, 'AndroidManifest.xml')
|
path = os.path.join(root, 'AndroidManifest.xml')
|
||||||
p = FDroidPopen(['sed','-i', 's/android:debuggable="[^"]*"//g', path])
|
p = FDroidPopen(['sed', '-i', 's/android:debuggable="[^"]*"//g', path])
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise BuildException("Failed to remove debuggable flags of %s" % path)
|
raise BuildException("Failed to remove debuggable flags of %s" % path)
|
||||||
|
|
||||||
|
|
||||||
# Extract some information from the AndroidManifest.xml at the given path.
|
# Extract some information from the AndroidManifest.xml at the given path.
|
||||||
# Returns (version, vercode, package), any or all of which might be None.
|
# Returns (version, vercode, package), any or all of which might be None.
|
||||||
# All values returned are strings.
|
# All values returned are strings.
|
||||||
|
|
@ -856,8 +884,9 @@ def parse_androidmanifests(paths):
|
||||||
|
|
||||||
return (max_version, max_vercode, max_package)
|
return (max_version, max_vercode, max_package)
|
||||||
|
|
||||||
|
|
||||||
class BuildException(Exception):
|
class BuildException(Exception):
|
||||||
def __init__(self, value, detail = None):
|
def __init__(self, value, detail=None):
|
||||||
self.value = value
|
self.value = value
|
||||||
self.detail = detail
|
self.detail = detail
|
||||||
|
|
||||||
|
|
@ -877,6 +906,7 @@ class BuildException(Exception):
|
||||||
ret += "\n==== detail begin ====\n%s\n==== detail end ====" % self.detail.strip()
|
ret += "\n==== detail begin ====\n%s\n==== detail end ====" % self.detail.strip()
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
class VCSException(Exception):
|
class VCSException(Exception):
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.value = value
|
self.value = value
|
||||||
|
|
@ -884,6 +914,7 @@ class VCSException(Exception):
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return repr(self.value)
|
return repr(self.value)
|
||||||
|
|
||||||
|
|
||||||
# Get the specified source library.
|
# Get the specified source library.
|
||||||
# Returns the path to it. Normally this is the path to be used when referencing
|
# Returns the path to it. Normally this is the path to be used when referencing
|
||||||
# it, which may be a subdirectory of the actual project. If you want the base
|
# it, which may be a subdirectory of the actual project. If you want the base
|
||||||
|
|
@ -901,7 +932,7 @@ def getsrclib(spec, srclib_dir, srclibpaths=[], subdir=None,
|
||||||
if ':' in name:
|
if ':' in name:
|
||||||
number, name = name.split(':', 1)
|
number, name = name.split(':', 1)
|
||||||
if '/' in name:
|
if '/' in name:
|
||||||
name, subdir = name.split('/',1)
|
name, subdir = name.split('/', 1)
|
||||||
|
|
||||||
srclib_path = os.path.join('srclibs', name + ".txt")
|
srclib_path = os.path.join('srclibs', name + ".txt")
|
||||||
|
|
||||||
|
|
@ -936,7 +967,7 @@ def getsrclib(spec, srclib_dir, srclibpaths=[], subdir=None,
|
||||||
|
|
||||||
if srclib["Srclibs"]:
|
if srclib["Srclibs"]:
|
||||||
n = 1
|
n = 1
|
||||||
for lib in srclib["Srclibs"].replace(';',',').split(','):
|
for lib in srclib["Srclibs"].replace(';', ',').split(','):
|
||||||
s_tuple = None
|
s_tuple = None
|
||||||
for t in srclibpaths:
|
for t in srclibpaths:
|
||||||
if t[0] == lib:
|
if t[0] == lib:
|
||||||
|
|
@ -1040,9 +1071,9 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
|
||||||
srclibpaths.append(basesrclib)
|
srclibpaths.append(basesrclib)
|
||||||
|
|
||||||
# Update the local.properties file
|
# Update the local.properties file
|
||||||
localprops = [ os.path.join(build_dir, 'local.properties') ]
|
localprops = [os.path.join(build_dir, 'local.properties')]
|
||||||
if 'subdir' in build:
|
if 'subdir' in build:
|
||||||
localprops += [ os.path.join(root_dir, 'local.properties') ]
|
localprops += [os.path.join(root_dir, 'local.properties')]
|
||||||
for path in localprops:
|
for path in localprops:
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
continue
|
continue
|
||||||
|
|
@ -1055,7 +1086,7 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
|
||||||
# from sdk.dir, if necessary
|
# from sdk.dir, if necessary
|
||||||
if build['oldsdkloc']:
|
if build['oldsdkloc']:
|
||||||
sdkloc = re.match(r".*^sdk.dir=(\S+)$.*", props,
|
sdkloc = re.match(r".*^sdk.dir=(\S+)$.*", props,
|
||||||
re.S|re.M).group(1)
|
re.S | re.M).group(1)
|
||||||
props += "sdk-location=%s\n" % sdkloc
|
props += "sdk-location=%s\n" % sdkloc
|
||||||
else:
|
else:
|
||||||
props += "sdk.dir=%s\n" % config['sdk_path']
|
props += "sdk.dir=%s\n" % config['sdk_path']
|
||||||
|
|
@ -1083,7 +1114,7 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
|
||||||
's@compileSdkVersion *[0-9]*@compileSdkVersion '+n+'@g',
|
's@compileSdkVersion *[0-9]*@compileSdkVersion '+n+'@g',
|
||||||
'build.gradle'], cwd=root_dir)
|
'build.gradle'], cwd=root_dir)
|
||||||
if '@' in build['gradle']:
|
if '@' in build['gradle']:
|
||||||
gradle_dir = os.path.join(root_dir, build['gradle'].split('@',1)[1])
|
gradle_dir = os.path.join(root_dir, build['gradle'].split('@', 1)[1])
|
||||||
gradle_dir = os.path.normpath(gradle_dir)
|
gradle_dir = os.path.normpath(gradle_dir)
|
||||||
FDroidPopen(['sed', '-i',
|
FDroidPopen(['sed', '-i',
|
||||||
's@compileSdkVersion *[0-9]*@compileSdkVersion '+n+'@g',
|
's@compileSdkVersion *[0-9]*@compileSdkVersion '+n+'@g',
|
||||||
|
|
@ -1210,6 +1241,7 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=
|
||||||
|
|
||||||
return (root_dir, srclibpaths)
|
return (root_dir, srclibpaths)
|
||||||
|
|
||||||
|
|
||||||
# Split and extend via globbing the paths from a field
|
# Split and extend via globbing the paths from a field
|
||||||
def getpaths(build_dir, build, field):
|
def getpaths(build_dir, build, field):
|
||||||
paths = []
|
paths = []
|
||||||
|
|
@ -1222,6 +1254,7 @@ def getpaths(build_dir, build, field):
|
||||||
paths += [r[len(build_dir)+1:] for r in glob.glob(full_path)]
|
paths += [r[len(build_dir)+1:] for r in glob.glob(full_path)]
|
||||||
return paths
|
return paths
|
||||||
|
|
||||||
|
|
||||||
# Scan the source code in the given directory (and all subdirectories)
|
# Scan the source code in the given directory (and all subdirectories)
|
||||||
# and return the number of fatal problems encountered
|
# and return the number of fatal problems encountered
|
||||||
def scan_source(build_dir, root_dir, thisbuild):
|
def scan_source(build_dir, root_dir, thisbuild):
|
||||||
|
|
@ -1286,7 +1319,7 @@ def scan_source(build_dir, root_dir, thisbuild):
|
||||||
return path.endswith('/%s' % dirname) or '/%s/' % dirname in path
|
return path.endswith('/%s' % dirname) or '/%s/' % dirname in path
|
||||||
|
|
||||||
# Iterate through all files in the source code
|
# Iterate through all files in the source code
|
||||||
for r,d,f in os.walk(build_dir):
|
for r, d, f in os.walk(build_dir):
|
||||||
|
|
||||||
if any(insidedir(r, d) for d in ('.hg', '.git', '.svn', '.bzr')):
|
if any(insidedir(r, d) for d in ('.hg', '.git', '.svn', '.bzr')):
|
||||||
continue
|
continue
|
||||||
|
|
@ -1364,7 +1397,7 @@ class KnownApks:
|
||||||
self.path = os.path.join('stats', 'known_apks.txt')
|
self.path = os.path.join('stats', 'known_apks.txt')
|
||||||
self.apks = {}
|
self.apks = {}
|
||||||
if os.path.exists(self.path):
|
if os.path.exists(self.path):
|
||||||
for line in file( self.path):
|
for line in file(self.path):
|
||||||
t = line.rstrip().split(' ')
|
t = line.rstrip().split(' ')
|
||||||
if len(t) == 2:
|
if len(t) == 2:
|
||||||
self.apks[t[0]] = (t[1], None)
|
self.apks[t[0]] = (t[1], None)
|
||||||
|
|
@ -1417,10 +1450,11 @@ class KnownApks:
|
||||||
else:
|
else:
|
||||||
apps[appid] = added
|
apps[appid] = added
|
||||||
sortedapps = sorted(apps.iteritems(), key=operator.itemgetter(1))[-num:]
|
sortedapps = sorted(apps.iteritems(), key=operator.itemgetter(1))[-num:]
|
||||||
lst = [app for app,_ in sortedapps]
|
lst = [app for app, _ in sortedapps]
|
||||||
lst.reverse()
|
lst.reverse()
|
||||||
return lst
|
return lst
|
||||||
|
|
||||||
|
|
||||||
def isApkDebuggable(apkfile, config):
|
def isApkDebuggable(apkfile, config):
|
||||||
"""Returns True if the given apk file is debuggable
|
"""Returns True if the given apk file is debuggable
|
||||||
|
|
||||||
|
|
@ -1461,13 +1495,16 @@ class AsynchronousFileReader(threading.Thread):
|
||||||
'''Check whether there is no more content to expect.'''
|
'''Check whether there is no more content to expect.'''
|
||||||
return not self.is_alive() and self._queue.empty()
|
return not self.is_alive() and self._queue.empty()
|
||||||
|
|
||||||
|
|
||||||
class PopenResult:
|
class PopenResult:
|
||||||
returncode = None
|
returncode = None
|
||||||
stdout = ''
|
stdout = ''
|
||||||
|
|
||||||
|
|
||||||
def SilentPopen(commands, cwd=None, shell=False):
|
def SilentPopen(commands, cwd=None, shell=False):
|
||||||
return FDroidPopen(commands, cwd=cwd, shell=shell, output=False)
|
return FDroidPopen(commands, cwd=cwd, shell=shell, output=False)
|
||||||
|
|
||||||
|
|
||||||
def FDroidPopen(commands, cwd=None, shell=False, output=True):
|
def FDroidPopen(commands, cwd=None, shell=False, output=True):
|
||||||
"""
|
"""
|
||||||
Run a command and capture the possibly huge output.
|
Run a command and capture the possibly huge output.
|
||||||
|
|
@ -1507,6 +1544,7 @@ def FDroidPopen(commands, cwd=None, shell=False, output=True):
|
||||||
result.returncode = p.returncode
|
result.returncode = p.returncode
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def remove_signing_keys(build_dir):
|
def remove_signing_keys(build_dir):
|
||||||
comment = re.compile(r'[ ]*//')
|
comment = re.compile(r'[ ]*//')
|
||||||
signing_configs = re.compile(r'^[\t ]*signingConfigs[ \t]*{[ \t]*$')
|
signing_configs = re.compile(r'^[\t ]*signingConfigs[ \t]*{[ \t]*$')
|
||||||
|
|
@ -1566,7 +1604,8 @@ def remove_signing_keys(build_dir):
|
||||||
continue
|
continue
|
||||||
o.write(line)
|
o.write(line)
|
||||||
|
|
||||||
logging.info("Cleaned %s of keysigning configs at %s" % (propfile,path))
|
logging.info("Cleaned %s of keysigning configs at %s" % (propfile, path))
|
||||||
|
|
||||||
|
|
||||||
def replace_config_vars(cmd):
|
def replace_config_vars(cmd):
|
||||||
cmd = cmd.replace('$$SDK$$', config['sdk_path'])
|
cmd = cmd.replace('$$SDK$$', config['sdk_path'])
|
||||||
|
|
@ -1574,6 +1613,7 @@ def replace_config_vars(cmd):
|
||||||
cmd = cmd.replace('$$MVN3$$', config['mvn3'])
|
cmd = cmd.replace('$$MVN3$$', config['mvn3'])
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def place_srclib(root_dir, number, libpath):
|
def place_srclib(root_dir, number, libpath):
|
||||||
if not number:
|
if not number:
|
||||||
return
|
return
|
||||||
|
|
@ -1589,10 +1629,9 @@ def place_srclib(root_dir, number, libpath):
|
||||||
placed = False
|
placed = False
|
||||||
for line in lines:
|
for line in lines:
|
||||||
if line.startswith('android.library.reference.%d=' % number):
|
if line.startswith('android.library.reference.%d=' % number):
|
||||||
o.write('android.library.reference.%d=%s\n' % (number,relpath))
|
o.write('android.library.reference.%d=%s\n' % (number, relpath))
|
||||||
placed = True
|
placed = True
|
||||||
else:
|
else:
|
||||||
o.write(line)
|
o.write(line)
|
||||||
if not placed:
|
if not placed:
|
||||||
o.write('android.library.reference.%d=%s\n' % (number,relpath))
|
o.write('android.library.reference.%d=%s\n' % (number, relpath))
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -25,7 +25,9 @@ import urllib
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
from ConfigParser import ConfigParser
|
from ConfigParser import ConfigParser
|
||||||
import logging
|
import logging
|
||||||
import common, metadata
|
import common
|
||||||
|
import metadata
|
||||||
|
|
||||||
|
|
||||||
# Get the repo type and address from the given web page. The page is scanned
|
# Get the repo type and address from the given web page. The page is scanned
|
||||||
# in a rather naive manner for 'git clone xxxx', 'hg clone xxxx', etc, and
|
# in a rather naive manner for 'git clone xxxx', 'hg clone xxxx', etc, and
|
||||||
|
|
@ -51,7 +53,7 @@ def getrepofrompage(url):
|
||||||
return (repotype, repo)
|
return (repotype, repo)
|
||||||
|
|
||||||
# Works for Google Code and BitBucket...
|
# Works for Google Code and BitBucket...
|
||||||
index=page.find('git clone')
|
index = page.find('git clone')
|
||||||
if index != -1:
|
if index != -1:
|
||||||
repotype = 'git'
|
repotype = 'git'
|
||||||
repo = page[index + 10:]
|
repo = page[index + 10:]
|
||||||
|
|
@ -63,7 +65,7 @@ def getrepofrompage(url):
|
||||||
return (repotype, repo)
|
return (repotype, repo)
|
||||||
|
|
||||||
# Google Code only...
|
# Google Code only...
|
||||||
index=page.find('svn checkout')
|
index = page.find('svn checkout')
|
||||||
if index != -1:
|
if index != -1:
|
||||||
repotype = 'git-svn'
|
repotype = 'git-svn'
|
||||||
repo = page[index + 13:]
|
repo = page[index + 13:]
|
||||||
|
|
@ -88,6 +90,7 @@ def getrepofrompage(url):
|
||||||
config = None
|
config = None
|
||||||
options = None
|
options = None
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
global config, options
|
global config, options
|
||||||
|
|
@ -127,7 +130,7 @@ def main():
|
||||||
projecttype = None
|
projecttype = None
|
||||||
issuetracker = None
|
issuetracker = None
|
||||||
license = None
|
license = None
|
||||||
website = url #by default, we might override it
|
website = url # by default, we might override it
|
||||||
if url.startswith('git://'):
|
if url.startswith('git://'):
|
||||||
projecttype = 'git'
|
projecttype = 'git'
|
||||||
repo = url
|
repo = url
|
||||||
|
|
@ -163,7 +166,7 @@ def main():
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
elif url.startswith('http://code.google.com/p/'):
|
elif url.startswith('http://code.google.com/p/'):
|
||||||
if not url.endswith('/'):
|
if not url.endswith('/'):
|
||||||
url += '/';
|
url += '/'
|
||||||
projecttype = 'googlecode'
|
projecttype = 'googlecode'
|
||||||
sourcecode = url + 'source/checkout'
|
sourcecode = url + 'source/checkout'
|
||||||
if options.repo:
|
if options.repo:
|
||||||
|
|
@ -305,4 +308,3 @@ def main():
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -35,6 +35,7 @@ from common import FDroidPopen, BuildException
|
||||||
config = {}
|
config = {}
|
||||||
options = None
|
options = None
|
||||||
|
|
||||||
|
|
||||||
def write_to_config(key, value):
|
def write_to_config(key, value):
|
||||||
'''write a key/value to the local config.py'''
|
'''write a key/value to the local config.py'''
|
||||||
with open('config.py', 'r') as f:
|
with open('config.py', 'r') as f:
|
||||||
|
|
@ -45,6 +46,7 @@ def write_to_config(key, value):
|
||||||
with open('config.py', 'w') as f:
|
with open('config.py', 'w') as f:
|
||||||
f.writelines(data)
|
f.writelines(data)
|
||||||
|
|
||||||
|
|
||||||
def disable_in_config(key, value):
|
def disable_in_config(key, value):
|
||||||
'''write a key/value to the local config.py, then comment it out'''
|
'''write a key/value to the local config.py, then comment it out'''
|
||||||
with open('config.py', 'r') as f:
|
with open('config.py', 'r') as f:
|
||||||
|
|
@ -124,14 +126,14 @@ def main():
|
||||||
|
|
||||||
# track down where the Android SDK is, the default is to use the path set
|
# track down where the Android SDK is, the default is to use the path set
|
||||||
# in ANDROID_HOME if that exists, otherwise None
|
# in ANDROID_HOME if that exists, otherwise None
|
||||||
if options.android_home != None:
|
if options.android_home is not None:
|
||||||
test_config['sdk_path'] = options.android_home
|
test_config['sdk_path'] = options.android_home
|
||||||
elif not common.test_sdk_exists(test_config):
|
elif not common.test_sdk_exists(test_config):
|
||||||
# if neither --android-home nor the default sdk_path exist, prompt the user
|
# if neither --android-home nor the default sdk_path exist, prompt the user
|
||||||
default_sdk_path = '/opt/android-sdk'
|
default_sdk_path = '/opt/android-sdk'
|
||||||
while not options.no_prompt:
|
while not options.no_prompt:
|
||||||
s = raw_input('Enter the path to the Android SDK (' + default_sdk_path + ') here:\n> ')
|
s = raw_input('Enter the path to the Android SDK (' + default_sdk_path + ') here:\n> ')
|
||||||
if re.match('^\s*$', s) != None:
|
if re.match('^\s*$', s) is not None:
|
||||||
test_config['sdk_path'] = default_sdk_path
|
test_config['sdk_path'] = default_sdk_path
|
||||||
else:
|
else:
|
||||||
test_config['sdk_path'] = s
|
test_config['sdk_path'] = s
|
||||||
|
|
@ -246,7 +248,7 @@ def main():
|
||||||
password = genpassword()
|
password = genpassword()
|
||||||
write_to_config('keystorepass', password)
|
write_to_config('keystorepass', password)
|
||||||
write_to_config('keypass', password)
|
write_to_config('keypass', password)
|
||||||
if options.repo_keyalias == None:
|
if options.repo_keyalias is None:
|
||||||
repo_keyalias = socket.getfqdn()
|
repo_keyalias = socket.getfqdn()
|
||||||
write_to_config('repo_keyalias', repo_keyalias)
|
write_to_config('repo_keyalias', repo_keyalias)
|
||||||
if not options.distinguished_name:
|
if not options.distinguished_name:
|
||||||
|
|
@ -260,7 +262,7 @@ def main():
|
||||||
logging.info(' Android SDK Build Tools:\t' + os.path.dirname(aapt))
|
logging.info(' Android SDK Build Tools:\t' + os.path.dirname(aapt))
|
||||||
logging.info(' Android NDK (optional):\t' + ndk_path)
|
logging.info(' Android NDK (optional):\t' + ndk_path)
|
||||||
logging.info(' Keystore for signing key:\t' + keystore)
|
logging.info(' Keystore for signing key:\t' + keystore)
|
||||||
if repo_keyalias != None:
|
if repo_keyalias is not None:
|
||||||
logging.info(' Alias for key in store:\t' + repo_keyalias)
|
logging.info(' Alias for key in store:\t' + repo_keyalias)
|
||||||
logging.info('\nTo complete the setup, add your APKs to "' +
|
logging.info('\nTo complete the setup, add your APKs to "' +
|
||||||
os.path.join(fdroiddir, 'repo') + '"' +
|
os.path.join(fdroiddir, 'repo') + '"' +
|
||||||
|
|
|
||||||
|
|
@ -30,6 +30,7 @@ from common import FDroidPopen
|
||||||
options = None
|
options = None
|
||||||
config = None
|
config = None
|
||||||
|
|
||||||
|
|
||||||
def devices():
|
def devices():
|
||||||
p = FDroidPopen(["adb", "devices"])
|
p = FDroidPopen(["adb", "devices"])
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
|
|
@ -68,7 +69,7 @@ def main():
|
||||||
if args:
|
if args:
|
||||||
|
|
||||||
vercodes = common.read_pkg_args(args, True)
|
vercodes = common.read_pkg_args(args, True)
|
||||||
apks = { appid : None for appid in vercodes }
|
apks = {appid: None for appid in vercodes}
|
||||||
|
|
||||||
# Get the signed apk with the highest vercode
|
# Get the signed apk with the highest vercode
|
||||||
for apkfile in sorted(glob.glob(os.path.join(output_dir, '*.apk'))):
|
for apkfile in sorted(glob.glob(os.path.join(output_dir, '*.apk'))):
|
||||||
|
|
@ -86,8 +87,8 @@ def main():
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
apks = { common.apknameinfo(apkfile)[0] : apkfile for apkfile in
|
apks = {common.apknameinfo(apkfile)[0]: apkfile for apkfile in
|
||||||
sorted(glob.glob(os.path.join(output_dir, '*.apk'))) }
|
sorted(glob.glob(os.path.join(output_dir, '*.apk')))}
|
||||||
|
|
||||||
for appid, apk in apks.iteritems():
|
for appid, apk in apks.iteritems():
|
||||||
# Get device list each time to avoid device not found errors
|
# Get device list each time to avoid device not found errors
|
||||||
|
|
@ -97,8 +98,8 @@ def main():
|
||||||
logging.info("Installing %s..." % apk)
|
logging.info("Installing %s..." % apk)
|
||||||
for dev in devs:
|
for dev in devs:
|
||||||
logging.info("Installing %s on %s..." % (apk, dev))
|
logging.info("Installing %s on %s..." % (apk, dev))
|
||||||
p = FDroidPopen(["adb", "-s", dev, "install", apk ])
|
p = FDroidPopen(["adb", "-s", dev, "install", apk])
|
||||||
fail= ""
|
fail = ""
|
||||||
for line in p.stdout.splitlines():
|
for line in p.stdout.splitlines():
|
||||||
if line.startswith("Failure"):
|
if line.startswith("Failure"):
|
||||||
fail = line[9:-1]
|
fail = line[9:-1]
|
||||||
|
|
@ -115,4 +116,3 @@ def main():
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -20,7 +20,8 @@
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import re
|
import re
|
||||||
import logging
|
import logging
|
||||||
import common, metadata
|
import common
|
||||||
|
import metadata
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
options = None
|
options = None
|
||||||
|
|
@ -118,6 +119,7 @@ regex_pedantic = {
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
global config, options, appid, app_count, warn_count
|
global config, options, appid, app_count, warn_count
|
||||||
|
|
@ -171,7 +173,7 @@ def main():
|
||||||
lastcommit, app['Update Check Mode']))
|
lastcommit, app['Update Check Mode']))
|
||||||
|
|
||||||
# No proper license
|
# No proper license
|
||||||
if app['License'] in ('Unknown','None',''):
|
if app['License'] in ('Unknown', 'None', ''):
|
||||||
warn("License was not set")
|
warn("License was not set")
|
||||||
|
|
||||||
# Summary size limit
|
# Summary size limit
|
||||||
|
|
@ -240,4 +242,3 @@ def main():
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -17,10 +17,13 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import os, re, glob
|
import os
|
||||||
|
import re
|
||||||
|
import glob
|
||||||
import cgi
|
import cgi
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
||||||
class MetaDataException(Exception):
|
class MetaDataException(Exception):
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.value = value
|
self.value = value
|
||||||
|
|
@ -120,67 +123,68 @@ class FieldType():
|
||||||
|
|
||||||
# Generic value types
|
# Generic value types
|
||||||
valuetypes = {
|
valuetypes = {
|
||||||
'int' : FieldType("Integer",
|
'int': FieldType("Integer",
|
||||||
r'^[1-9][0-9]*$', None,
|
r'^[1-9][0-9]*$', None,
|
||||||
[ 'FlattrID' ],
|
['FlattrID'],
|
||||||
[ 'vercode' ]),
|
['vercode']),
|
||||||
|
|
||||||
'http' : FieldType("HTTP link",
|
'http': FieldType("HTTP link",
|
||||||
r'^http[s]?://', None,
|
r'^http[s]?://', None,
|
||||||
[ "Web Site", "Source Code", "Issue Tracker", "Donate" ], []),
|
["Web Site", "Source Code", "Issue Tracker", "Donate"], []),
|
||||||
|
|
||||||
'bitcoin' : FieldType("Bitcoin address",
|
'bitcoin': FieldType("Bitcoin address",
|
||||||
r'^[a-zA-Z0-9]{27,34}$', None,
|
r'^[a-zA-Z0-9]{27,34}$', None,
|
||||||
[ "Bitcoin" ],
|
["Bitcoin"],
|
||||||
[ ]),
|
[]),
|
||||||
|
|
||||||
'litecoin' : FieldType("Litecoin address",
|
'litecoin': FieldType("Litecoin address",
|
||||||
r'^L[a-zA-Z0-9]{33}$', None,
|
r'^L[a-zA-Z0-9]{33}$', None,
|
||||||
[ "Litecoin" ],
|
["Litecoin"],
|
||||||
[ ]),
|
[]),
|
||||||
|
|
||||||
'dogecoin' : FieldType("Dogecoin address",
|
'dogecoin': FieldType("Dogecoin address",
|
||||||
r'^D[a-zA-Z0-9]{33}$', None,
|
r'^D[a-zA-Z0-9]{33}$', None,
|
||||||
[ "Dogecoin" ],
|
["Dogecoin"],
|
||||||
[ ]),
|
[]),
|
||||||
|
|
||||||
'Bool' : FieldType("Boolean",
|
'Bool': FieldType("Boolean",
|
||||||
['Yes', 'No'], None,
|
['Yes', 'No'], None,
|
||||||
[ "Requires Root" ],
|
["Requires Root"],
|
||||||
[ ]),
|
[]),
|
||||||
|
|
||||||
'bool' : FieldType("Boolean",
|
'bool': FieldType("Boolean",
|
||||||
['yes', 'no'], None,
|
['yes', 'no'], None,
|
||||||
[ ],
|
[],
|
||||||
[ 'submodules', 'oldsdkloc', 'forceversion', 'forcevercode',
|
['submodules', 'oldsdkloc', 'forceversion', 'forcevercode',
|
||||||
'novcheck' ]),
|
'novcheck']),
|
||||||
|
|
||||||
'Repo Type' : FieldType("Repo Type",
|
'Repo Type': FieldType("Repo Type",
|
||||||
[ 'git', 'git-svn', 'svn', 'hg', 'bzr', 'srclib' ], None,
|
['git', 'git-svn', 'svn', 'hg', 'bzr', 'srclib'], None,
|
||||||
[ "Repo Type" ],
|
["Repo Type"],
|
||||||
[ ]),
|
[]),
|
||||||
|
|
||||||
'archive' : FieldType("Archive Policy",
|
'archive': FieldType("Archive Policy",
|
||||||
r'^[0-9]+ versions$', None,
|
r'^[0-9]+ versions$', None,
|
||||||
[ "Archive Policy" ],
|
["Archive Policy"],
|
||||||
[ ]),
|
[]),
|
||||||
|
|
||||||
'antifeatures' : FieldType("Anti-Feature",
|
'antifeatures': FieldType("Anti-Feature",
|
||||||
[ "Ads", "Tracking", "NonFreeNet", "NonFreeDep", "NonFreeAdd", "UpstreamNonFree" ], ',',
|
["Ads", "Tracking", "NonFreeNet", "NonFreeDep", "NonFreeAdd", "UpstreamNonFree"], ',',
|
||||||
[ "AntiFeatures" ],
|
["AntiFeatures"],
|
||||||
[ ]),
|
[]),
|
||||||
|
|
||||||
'autoupdatemodes' : FieldType("Auto Update Mode",
|
'autoupdatemodes': FieldType("Auto Update Mode",
|
||||||
r"^(Version .+|None)$", None,
|
r"^(Version .+|None)$", None,
|
||||||
[ "Auto Update Mode" ],
|
["Auto Update Mode"],
|
||||||
[ ]),
|
[]),
|
||||||
|
|
||||||
'updatecheckmodes' : FieldType("Update Check Mode",
|
'updatecheckmodes': FieldType("Update Check Mode",
|
||||||
r"^(Tags|Tags .+|RepoManifest|RepoManifest/.+|RepoTrunk|HTTP|Static|None)$", None,
|
r"^(Tags|Tags .+|RepoManifest|RepoManifest/.+|RepoTrunk|HTTP|Static|None)$", None,
|
||||||
[ "Update Check Mode" ],
|
["Update Check Mode"],
|
||||||
[ ])
|
[])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# Check an app's metadata information for integrity errors
|
# Check an app's metadata information for integrity errors
|
||||||
def check_metadata(info):
|
def check_metadata(info):
|
||||||
for k, t in valuetypes.iteritems():
|
for k, t in valuetypes.iteritems():
|
||||||
|
|
@ -198,6 +202,7 @@ def check_metadata(info):
|
||||||
elif k == 'bool':
|
elif k == 'bool':
|
||||||
build[attr] = False
|
build[attr] = False
|
||||||
|
|
||||||
|
|
||||||
# Formatter for descriptions. Create an instance, and call parseline() with
|
# Formatter for descriptions. Create an instance, and call parseline() with
|
||||||
# each line of the description source from the metadata. At the end, call
|
# each line of the description source from the metadata. At the end, call
|
||||||
# end() and then text_plain, text_wiki and text_html will contain the result.
|
# end() and then text_plain, text_wiki and text_html will contain the result.
|
||||||
|
|
@ -213,8 +218,10 @@ class DescriptionFormatter:
|
||||||
text_wiki = ''
|
text_wiki = ''
|
||||||
text_html = ''
|
text_html = ''
|
||||||
linkResolver = None
|
linkResolver = None
|
||||||
|
|
||||||
def __init__(self, linkres):
|
def __init__(self, linkres):
|
||||||
self.linkResolver = linkres
|
self.linkResolver = linkres
|
||||||
|
|
||||||
def endcur(self, notstates=None):
|
def endcur(self, notstates=None):
|
||||||
if notstates and self.state in notstates:
|
if notstates and self.state in notstates:
|
||||||
return
|
return
|
||||||
|
|
@ -224,13 +231,16 @@ class DescriptionFormatter:
|
||||||
self.endul()
|
self.endul()
|
||||||
elif self.state == self.stOL:
|
elif self.state == self.stOL:
|
||||||
self.endol()
|
self.endol()
|
||||||
|
|
||||||
def endpara(self):
|
def endpara(self):
|
||||||
self.text_plain += '\n'
|
self.text_plain += '\n'
|
||||||
self.text_html += '</p>'
|
self.text_html += '</p>'
|
||||||
self.state = self.stNONE
|
self.state = self.stNONE
|
||||||
|
|
||||||
def endul(self):
|
def endul(self):
|
||||||
self.text_html += '</ul>'
|
self.text_html += '</ul>'
|
||||||
self.state = self.stNONE
|
self.state = self.stNONE
|
||||||
|
|
||||||
def endol(self):
|
def endol(self):
|
||||||
self.text_html += '</ol>'
|
self.text_html += '</ol>'
|
||||||
self.state = self.stNONE
|
self.state = self.stNONE
|
||||||
|
|
@ -262,7 +272,6 @@ class DescriptionFormatter:
|
||||||
self.ital = not self.ital
|
self.ital = not self.ital
|
||||||
txt = txt[2:]
|
txt = txt[2:]
|
||||||
|
|
||||||
|
|
||||||
def linkify(self, txt):
|
def linkify(self, txt):
|
||||||
linkified_plain = ''
|
linkified_plain = ''
|
||||||
linkified_html = ''
|
linkified_html = ''
|
||||||
|
|
@ -326,7 +335,7 @@ class DescriptionFormatter:
|
||||||
self.text_html += '<ol>'
|
self.text_html += '<ol>'
|
||||||
self.state = self.stOL
|
self.state = self.stOL
|
||||||
self.text_html += '<li>'
|
self.text_html += '<li>'
|
||||||
self.text_plain += '* ' #TODO: lazy - put the numbers in!
|
self.text_plain += '* ' # TODO: lazy - put the numbers in!
|
||||||
self.addtext(line[1:])
|
self.addtext(line[1:])
|
||||||
self.text_html += '</li>'
|
self.text_html += '</li>'
|
||||||
else:
|
else:
|
||||||
|
|
@ -342,6 +351,7 @@ class DescriptionFormatter:
|
||||||
def end(self):
|
def end(self):
|
||||||
self.endcur()
|
self.endcur()
|
||||||
|
|
||||||
|
|
||||||
# Parse multiple lines of description as written in a metadata file, returning
|
# Parse multiple lines of description as written in a metadata file, returning
|
||||||
# a single string in plain text format.
|
# a single string in plain text format.
|
||||||
def description_plain(lines, linkres):
|
def description_plain(lines, linkres):
|
||||||
|
|
@ -351,6 +361,7 @@ def description_plain(lines, linkres):
|
||||||
ps.end()
|
ps.end()
|
||||||
return ps.text_plain
|
return ps.text_plain
|
||||||
|
|
||||||
|
|
||||||
# Parse multiple lines of description as written in a metadata file, returning
|
# Parse multiple lines of description as written in a metadata file, returning
|
||||||
# a single string in wiki format. Used for the Maintainer Notes field as well,
|
# a single string in wiki format. Used for the Maintainer Notes field as well,
|
||||||
# because it's the same format.
|
# because it's the same format.
|
||||||
|
|
@ -361,15 +372,17 @@ def description_wiki(lines):
|
||||||
ps.end()
|
ps.end()
|
||||||
return ps.text_wiki
|
return ps.text_wiki
|
||||||
|
|
||||||
|
|
||||||
# Parse multiple lines of description as written in a metadata file, returning
|
# Parse multiple lines of description as written in a metadata file, returning
|
||||||
# a single string in HTML format.
|
# a single string in HTML format.
|
||||||
def description_html(lines,linkres):
|
def description_html(lines, linkres):
|
||||||
ps = DescriptionFormatter(linkres)
|
ps = DescriptionFormatter(linkres)
|
||||||
for line in lines:
|
for line in lines:
|
||||||
ps.parseline(line)
|
ps.parseline(line)
|
||||||
ps.end()
|
ps.end()
|
||||||
return ps.text_html
|
return ps.text_html
|
||||||
|
|
||||||
|
|
||||||
def parse_srclib(metafile, **kw):
|
def parse_srclib(metafile, **kw):
|
||||||
|
|
||||||
thisinfo = {}
|
thisinfo = {}
|
||||||
|
|
@ -394,7 +407,7 @@ def parse_srclib(metafile, **kw):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
field, value = line.split(':',1)
|
field, value = line.split(':', 1)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise MetaDataException("Invalid metadata in %s:%d" % (line, n))
|
raise MetaDataException("Invalid metadata in %s:%d" % (line, n))
|
||||||
|
|
||||||
|
|
@ -405,6 +418,7 @@ def parse_srclib(metafile, **kw):
|
||||||
|
|
||||||
return thisinfo
|
return thisinfo
|
||||||
|
|
||||||
|
|
||||||
# Read all metadata. Returns a list of 'app' objects (which are dictionaries as
|
# Read all metadata. Returns a list of 'app' objects (which are dictionaries as
|
||||||
# returned by the parse_metadata function.
|
# returned by the parse_metadata function.
|
||||||
def read_metadata(xref=True, package=None, store=True):
|
def read_metadata(xref=True, package=None, store=True):
|
||||||
|
|
@ -437,6 +451,7 @@ def read_metadata(xref=True, package=None, store=True):
|
||||||
|
|
||||||
return apps
|
return apps
|
||||||
|
|
||||||
|
|
||||||
# Get the type expected for a given metadata field.
|
# Get the type expected for a given metadata field.
|
||||||
def metafieldtype(name):
|
def metafieldtype(name):
|
||||||
if name in ['Description', 'Maintainer Notes']:
|
if name in ['Description', 'Maintainer Notes']:
|
||||||
|
|
@ -453,6 +468,7 @@ def metafieldtype(name):
|
||||||
return 'unknown'
|
return 'unknown'
|
||||||
return 'string'
|
return 'string'
|
||||||
|
|
||||||
|
|
||||||
def flagtype(name):
|
def flagtype(name):
|
||||||
if name in ['extlibs', 'srclibs', 'patch', 'rm', 'buildjni',
|
if name in ['extlibs', 'srclibs', 'patch', 'rm', 'buildjni',
|
||||||
'update', 'scanignore', 'scandelete']:
|
'update', 'scanignore', 'scandelete']:
|
||||||
|
|
@ -461,6 +477,7 @@ def flagtype(name):
|
||||||
return 'script'
|
return 'script'
|
||||||
return 'string'
|
return 'string'
|
||||||
|
|
||||||
|
|
||||||
# Parse metadata for a single application.
|
# Parse metadata for a single application.
|
||||||
#
|
#
|
||||||
# 'metafile' - the filename to read. The package id for the application comes
|
# 'metafile' - the filename to read. The package id for the application comes
|
||||||
|
|
@ -507,7 +524,7 @@ def parse_metadata(metafile):
|
||||||
t = flagtype(pk)
|
t = flagtype(pk)
|
||||||
if t == 'list':
|
if t == 'list':
|
||||||
# Port legacy ';' separators
|
# Port legacy ';' separators
|
||||||
thisbuild[pk] = [v.strip() for v in pv.replace(';',',').split(',')]
|
thisbuild[pk] = [v.strip() for v in pv.replace(';', ',').split(',')]
|
||||||
elif t == 'string':
|
elif t == 'string':
|
||||||
thisbuild[pk] = pv
|
thisbuild[pk] = pv
|
||||||
elif t == 'script':
|
elif t == 'script':
|
||||||
|
|
@ -610,7 +627,7 @@ def parse_metadata(metafile):
|
||||||
curcomments.append(line)
|
curcomments.append(line)
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
field, value = line.split(':',1)
|
field, value = line.split(':', 1)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise MetaDataException("Invalid metadata in "+linedesc)
|
raise MetaDataException("Invalid metadata in "+linedesc)
|
||||||
if field != field.strip() or value != value.strip():
|
if field != field.strip() or value != value.strip():
|
||||||
|
|
@ -633,7 +650,7 @@ def parse_metadata(metafile):
|
||||||
elif fieldtype == 'string':
|
elif fieldtype == 'string':
|
||||||
thisinfo[field] = value
|
thisinfo[field] = value
|
||||||
elif fieldtype == 'list':
|
elif fieldtype == 'list':
|
||||||
thisinfo[field] = [v.strip() for v in value.replace(';',',').split(',')]
|
thisinfo[field] = [v.strip() for v in value.replace(';', ',').split(',')]
|
||||||
elif fieldtype == 'build':
|
elif fieldtype == 'build':
|
||||||
if value.endswith("\\"):
|
if value.endswith("\\"):
|
||||||
mode = 2
|
mode = 2
|
||||||
|
|
@ -687,6 +704,7 @@ def parse_metadata(metafile):
|
||||||
|
|
||||||
return thisinfo
|
return thisinfo
|
||||||
|
|
||||||
|
|
||||||
# Write a metadata file.
|
# Write a metadata file.
|
||||||
#
|
#
|
||||||
# 'dest' - The path to the output file
|
# 'dest' - The path to the output file
|
||||||
|
|
@ -753,7 +771,7 @@ def write_metadata(dest, app):
|
||||||
mf.write('\n')
|
mf.write('\n')
|
||||||
for build in app['builds']:
|
for build in app['builds']:
|
||||||
writecomments('build:' + build['version'])
|
writecomments('build:' + build['version'])
|
||||||
mf.write("Build:%s,%s\n" % ( build['version'], build['vercode']))
|
mf.write("Build:%s,%s\n" % (build['version'], build['vercode']))
|
||||||
|
|
||||||
def write_builditem(key, value):
|
def write_builditem(key, value):
|
||||||
if key in ['version', 'vercode', 'origlines', 'type']:
|
if key in ['version', 'vercode', 'origlines', 'type']:
|
||||||
|
|
@ -786,7 +804,6 @@ def write_metadata(dest, app):
|
||||||
mf.write('.\n')
|
mf.write('.\n')
|
||||||
mf.write('\n')
|
mf.write('\n')
|
||||||
|
|
||||||
|
|
||||||
if app['Archive Policy']:
|
if app['Archive Policy']:
|
||||||
writefield('Archive Policy')
|
writefield('Archive Policy')
|
||||||
writefield('Auto Update Mode')
|
writefield('Auto Update Mode')
|
||||||
|
|
@ -804,5 +821,3 @@ def write_metadata(dest, app):
|
||||||
mf.write('\n')
|
mf.write('\n')
|
||||||
writecomments(None)
|
writecomments(None)
|
||||||
mf.close()
|
mf.close()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -26,12 +26,14 @@ import glob
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import common, metadata
|
import common
|
||||||
|
import metadata
|
||||||
from common import FDroidPopen, BuildException
|
from common import FDroidPopen, BuildException
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
options = None
|
options = None
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
global config, options
|
global config, options
|
||||||
|
|
@ -128,7 +130,7 @@ def main():
|
||||||
p = FDroidPopen(['keytool', '-list',
|
p = FDroidPopen(['keytool', '-list',
|
||||||
'-alias', keyalias, '-keystore', config['keystore'],
|
'-alias', keyalias, '-keystore', config['keystore'],
|
||||||
'-storepass:file', config['keystorepassfile']])
|
'-storepass:file', config['keystorepassfile']])
|
||||||
if p.returncode !=0:
|
if p.returncode != 0:
|
||||||
logging.info("Key does not exist - generating...")
|
logging.info("Key does not exist - generating...")
|
||||||
p = FDroidPopen(['keytool', '-genkey',
|
p = FDroidPopen(['keytool', '-genkey',
|
||||||
'-keystore', config['keystore'], '-alias', keyalias,
|
'-keystore', config['keystore'], '-alias', keyalias,
|
||||||
|
|
@ -152,7 +154,7 @@ def main():
|
||||||
raise BuildException("Failed to sign application")
|
raise BuildException("Failed to sign application")
|
||||||
|
|
||||||
# Zipalign it...
|
# Zipalign it...
|
||||||
p = FDroidPopen([os.path.join(config['sdk_path'],'tools','zipalign'),
|
p = FDroidPopen([os.path.join(config['sdk_path'], 'tools', 'zipalign'),
|
||||||
'-v', '4', apkfile,
|
'-v', '4', apkfile,
|
||||||
os.path.join(output_dir, apkfilename)])
|
os.path.join(output_dir, apkfilename)])
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
|
|
@ -170,4 +172,3 @@ def main():
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -17,7 +17,9 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import os, metadata
|
import os
|
||||||
|
import metadata
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
|
|
@ -28,4 +30,3 @@ def main():
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -20,11 +20,13 @@
|
||||||
import os
|
import os
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import logging
|
import logging
|
||||||
import common, metadata
|
import common
|
||||||
|
import metadata
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
options = None
|
options = None
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
global config, options
|
global config, options
|
||||||
|
|
@ -51,4 +53,3 @@ def main():
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -22,13 +22,15 @@ import traceback
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import common, metadata
|
import common
|
||||||
|
import metadata
|
||||||
from common import BuildException
|
from common import BuildException
|
||||||
from common import VCSException
|
from common import VCSException
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
options = None
|
options = None
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
global config, options
|
global config, options
|
||||||
|
|
@ -113,4 +115,3 @@ def main():
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -28,6 +28,7 @@ import common
|
||||||
config = None
|
config = None
|
||||||
options = None
|
options = None
|
||||||
|
|
||||||
|
|
||||||
def update_awsbucket(repo_section):
|
def update_awsbucket(repo_section):
|
||||||
'''
|
'''
|
||||||
Upload the contents of the directory `repo_section` (including
|
Upload the contents of the directory `repo_section` (including
|
||||||
|
|
@ -91,7 +92,7 @@ def update_awsbucket(repo_section):
|
||||||
if upload:
|
if upload:
|
||||||
if options.verbose:
|
if options.verbose:
|
||||||
logging.info(' uploading "' + file_to_upload + '"...')
|
logging.info(' uploading "' + file_to_upload + '"...')
|
||||||
extra = { 'acl': 'public-read' }
|
extra = {'acl': 'public-read'}
|
||||||
if file_to_upload.endswith('.sig'):
|
if file_to_upload.endswith('.sig'):
|
||||||
extra['content_type'] = 'application/pgp-signature'
|
extra['content_type'] = 'application/pgp-signature'
|
||||||
elif file_to_upload.endswith('.asc'):
|
elif file_to_upload.endswith('.asc'):
|
||||||
|
|
@ -113,6 +114,7 @@ def update_awsbucket(repo_section):
|
||||||
else:
|
else:
|
||||||
logging.info(' skipping ' + s3url)
|
logging.info(' skipping ' + s3url)
|
||||||
|
|
||||||
|
|
||||||
def update_serverwebroot(repo_section):
|
def update_serverwebroot(repo_section):
|
||||||
rsyncargs = ['rsync', '-u', '-r', '--delete']
|
rsyncargs = ['rsync', '-u', '-r', '--delete']
|
||||||
if options.verbose:
|
if options.verbose:
|
||||||
|
|
@ -133,6 +135,7 @@ def update_serverwebroot(repo_section):
|
||||||
[indexjar, config['serverwebroot'] + repo_section]) != 0:
|
[indexjar, config['serverwebroot'] + repo_section]) != 0:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
global config, options
|
global config, options
|
||||||
|
|
||||||
|
|
@ -154,7 +157,7 @@ def main():
|
||||||
logging.critical("The only commands currently supported are 'init' and 'update'")
|
logging.critical("The only commands currently supported are 'init' and 'update'")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if config.get('nonstandardwebroot') == True:
|
if config.get('nonstandardwebroot') is True:
|
||||||
standardwebroot = False
|
standardwebroot = False
|
||||||
else:
|
else:
|
||||||
standardwebroot = True
|
standardwebroot = True
|
||||||
|
|
|
||||||
|
|
@ -32,29 +32,36 @@ import time
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import common, metadata
|
import common
|
||||||
|
import metadata
|
||||||
from common import FDroidPopen
|
from common import FDroidPopen
|
||||||
from metadata import MetaDataException
|
from metadata import MetaDataException
|
||||||
|
|
||||||
|
|
||||||
def get_densities():
|
def get_densities():
|
||||||
return ['640', '480', '320', '240', '160', '120']
|
return ['640', '480', '320', '240', '160', '120']
|
||||||
|
|
||||||
|
|
||||||
def dpi_to_px(density):
|
def dpi_to_px(density):
|
||||||
return (int(density) * 48) / 160
|
return (int(density) * 48) / 160
|
||||||
|
|
||||||
|
|
||||||
def px_to_dpi(px):
|
def px_to_dpi(px):
|
||||||
return (int(px) * 160) / 48
|
return (int(px) * 160) / 48
|
||||||
|
|
||||||
|
|
||||||
def get_icon_dir(repodir, density):
|
def get_icon_dir(repodir, density):
|
||||||
if density is None:
|
if density is None:
|
||||||
return os.path.join(repodir, "icons")
|
return os.path.join(repodir, "icons")
|
||||||
return os.path.join(repodir, "icons-%s" % density)
|
return os.path.join(repodir, "icons-%s" % density)
|
||||||
|
|
||||||
|
|
||||||
def get_icon_dirs(repodir):
|
def get_icon_dirs(repodir):
|
||||||
for density in get_densities():
|
for density in get_densities():
|
||||||
yield get_icon_dir(repodir, density)
|
yield get_icon_dir(repodir, density)
|
||||||
yield os.path.join(repodir, "icons")
|
yield os.path.join(repodir, "icons")
|
||||||
|
|
||||||
|
|
||||||
def update_wiki(apps, apks):
|
def update_wiki(apps, apks):
|
||||||
"""Update the wiki
|
"""Update the wiki
|
||||||
|
|
||||||
|
|
@ -77,7 +84,7 @@ def update_wiki(apps, apks):
|
||||||
if app['AntiFeatures']:
|
if app['AntiFeatures']:
|
||||||
for af in app['AntiFeatures'].split(','):
|
for af in app['AntiFeatures'].split(','):
|
||||||
wikidata += '{{AntiFeature|' + af + '}}\n'
|
wikidata += '{{AntiFeature|' + af + '}}\n'
|
||||||
wikidata += '{{App|id=%s|name=%s|added=%s|lastupdated=%s|source=%s|tracker=%s|web=%s|donate=%s|flattr=%s|bitcoin=%s|litecoin=%s|dogecoin=%s|license=%s|root=%s}}\n'%(
|
wikidata += '{{App|id=%s|name=%s|added=%s|lastupdated=%s|source=%s|tracker=%s|web=%s|donate=%s|flattr=%s|bitcoin=%s|litecoin=%s|dogecoin=%s|license=%s|root=%s}}\n' % (
|
||||||
app['id'],
|
app['id'],
|
||||||
app['Name'],
|
app['Name'],
|
||||||
time.strftime('%Y-%m-%d', app['added']) if 'added' in app else '',
|
time.strftime('%Y-%m-%d', app['added']) if 'added' in app else '',
|
||||||
|
|
@ -256,6 +263,7 @@ def update_wiki(apps, apks):
|
||||||
# Purge server cache to ensure counts are up to date
|
# Purge server cache to ensure counts are up to date
|
||||||
site.pages['Repository Maintenance'].purge()
|
site.pages['Repository Maintenance'].purge()
|
||||||
|
|
||||||
|
|
||||||
def delete_disabled_builds(apps, apkcache, repodirs):
|
def delete_disabled_builds(apps, apkcache, repodirs):
|
||||||
"""Delete disabled build outputs.
|
"""Delete disabled build outputs.
|
||||||
|
|
||||||
|
|
@ -277,6 +285,7 @@ def delete_disabled_builds(apps, apkcache, repodirs):
|
||||||
if apkfilename in apkcache:
|
if apkfilename in apkcache:
|
||||||
del apkcache[apkfilename]
|
del apkcache[apkfilename]
|
||||||
|
|
||||||
|
|
||||||
def resize_icon(iconpath, density):
|
def resize_icon(iconpath, density):
|
||||||
|
|
||||||
if not os.path.isfile(iconpath):
|
if not os.path.isfile(iconpath):
|
||||||
|
|
@ -296,9 +305,10 @@ def resize_icon(iconpath, density):
|
||||||
else:
|
else:
|
||||||
logging.info("%s is small enough: %s" % im.size)
|
logging.info("%s is small enough: %s" % im.size)
|
||||||
|
|
||||||
except Exception,e:
|
except Exception, e:
|
||||||
logging.error("Failed resizing {0} - {1}".format(iconpath, e))
|
logging.error("Failed resizing {0} - {1}".format(iconpath, e))
|
||||||
|
|
||||||
|
|
||||||
def resize_all_icons(repodirs):
|
def resize_all_icons(repodirs):
|
||||||
"""Resize all icons that exceed the max size
|
"""Resize all icons that exceed the max size
|
||||||
|
|
||||||
|
|
@ -311,6 +321,7 @@ def resize_all_icons(repodirs):
|
||||||
for iconpath in glob.glob(icon_glob):
|
for iconpath in glob.glob(icon_glob):
|
||||||
resize_icon(iconpath, density)
|
resize_icon(iconpath, density)
|
||||||
|
|
||||||
|
|
||||||
def scan_apks(apps, apkcache, repodir, knownapks):
|
def scan_apks(apps, apkcache, repodir, knownapks):
|
||||||
"""Scan the apks in the given repo directory.
|
"""Scan the apks in the given repo directory.
|
||||||
|
|
||||||
|
|
@ -503,7 +514,7 @@ def scan_apks(apps, apkcache, repodir, knownapks):
|
||||||
os.path.join(get_icon_dir(repodir, density), iconfilename))
|
os.path.join(get_icon_dir(repodir, density), iconfilename))
|
||||||
empty_densities.remove(density)
|
empty_densities.remove(density)
|
||||||
break
|
break
|
||||||
except Exception,e:
|
except Exception, e:
|
||||||
logging.warn("Failed reading {0} - {1}".format(iconpath, e))
|
logging.warn("Failed reading {0} - {1}".format(iconpath, e))
|
||||||
|
|
||||||
if thisinfo['icons']:
|
if thisinfo['icons']:
|
||||||
|
|
@ -581,6 +592,7 @@ def scan_apks(apps, apkcache, repodir, knownapks):
|
||||||
|
|
||||||
repo_pubkey_fingerprint = None
|
repo_pubkey_fingerprint = None
|
||||||
|
|
||||||
|
|
||||||
def make_index(apps, apks, repodir, archive, categories):
|
def make_index(apps, apks, repodir, archive, categories):
|
||||||
"""Make a repo index.
|
"""Make a repo index.
|
||||||
|
|
||||||
|
|
@ -598,6 +610,7 @@ def make_index(apps, apks, repodir, archive, categories):
|
||||||
el = doc.createElement(name)
|
el = doc.createElement(name)
|
||||||
el.appendChild(doc.createTextNode(value))
|
el.appendChild(doc.createTextNode(value))
|
||||||
parent.appendChild(el)
|
parent.appendChild(el)
|
||||||
|
|
||||||
def addElementCDATA(name, value, doc, parent):
|
def addElementCDATA(name, value, doc, parent):
|
||||||
el = doc.createElement(name)
|
el = doc.createElement(name)
|
||||||
el.appendChild(doc.createCDATASection(value))
|
el.appendChild(doc.createCDATASection(value))
|
||||||
|
|
@ -684,6 +697,7 @@ def make_index(apps, apks, repodir, archive, categories):
|
||||||
addElement('summary', app['Summary'], doc, apel)
|
addElement('summary', app['Summary'], doc, apel)
|
||||||
if app['icon']:
|
if app['icon']:
|
||||||
addElement('icon', app['icon'], doc, apel)
|
addElement('icon', app['icon'], doc, apel)
|
||||||
|
|
||||||
def linkres(link):
|
def linkres(link):
|
||||||
for app in apps:
|
for app in apps:
|
||||||
if app['id'] == link:
|
if app['id'] == link:
|
||||||
|
|
@ -813,7 +827,7 @@ def make_index(apps, apks, repodir, archive, categories):
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Copy the repo icon into the repo directory...
|
# Copy the repo icon into the repo directory...
|
||||||
icon_dir = os.path.join(repodir ,'icons')
|
icon_dir = os.path.join(repodir, 'icons')
|
||||||
iconfilename = os.path.join(icon_dir, os.path.basename(config['repo_icon']))
|
iconfilename = os.path.join(icon_dir, os.path.basename(config['repo_icon']))
|
||||||
shutil.copyfile(config['repo_icon'], iconfilename)
|
shutil.copyfile(config['repo_icon'], iconfilename)
|
||||||
|
|
||||||
|
|
@ -826,7 +840,6 @@ def make_index(apps, apks, repodir, archive, categories):
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def archive_old_apks(apps, apks, archapks, repodir, archivedir, defaultkeepversions):
|
def archive_old_apks(apps, apks, archapks, repodir, archivedir, defaultkeepversions):
|
||||||
|
|
||||||
for app in apps:
|
for app in apps:
|
||||||
|
|
@ -860,6 +873,7 @@ def archive_old_apks(apps, apks, archapks, repodir, archivedir, defaultkeepversi
|
||||||
config = None
|
config = None
|
||||||
options = None
|
options = None
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
global config, options
|
global config, options
|
||||||
|
|
@ -879,7 +893,7 @@ def main():
|
||||||
parser.add_option("-I", "--icons", action="store_true", default=False,
|
parser.add_option("-I", "--icons", action="store_true", default=False,
|
||||||
help="Resize all the icons exceeding the max pixel size and exit")
|
help="Resize all the icons exceeding the max pixel size and exit")
|
||||||
parser.add_option("-e", "--editor", default="/etc/alternatives/editor",
|
parser.add_option("-e", "--editor", default="/etc/alternatives/editor",
|
||||||
help="Specify editor to use in interactive mode. Default "+
|
help="Specify editor to use in interactive mode. Default " +
|
||||||
"is /etc/alternatives/editor")
|
"is /etc/alternatives/editor")
|
||||||
parser.add_option("-w", "--wiki", default=False, action="store_true",
|
parser.add_option("-w", "--wiki", default=False, action="store_true",
|
||||||
help="Update the wiki")
|
help="Update the wiki")
|
||||||
|
|
@ -1055,4 +1069,3 @@ def main():
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -31,6 +31,7 @@ from common import FDroidPopen
|
||||||
options = None
|
options = None
|
||||||
config = None
|
config = None
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
global options, config
|
global options, config
|
||||||
|
|
@ -116,5 +117,3 @@ def main():
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue