mirror of
https://github.com/f-droid/fdroidserver.git
synced 2025-11-05 15:00:30 +03:00
All callable scripts now implement main()
This commit is contained in:
parent
4e5b4fa77c
commit
00abc9527d
7 changed files with 970 additions and 936 deletions
|
|
@ -27,9 +27,6 @@ from optparse import OptionParser
|
||||||
import HTMLParser
|
import HTMLParser
|
||||||
import common
|
import common
|
||||||
|
|
||||||
#Read configuration...
|
|
||||||
execfile('config.py')
|
|
||||||
|
|
||||||
|
|
||||||
# Check for a new version by looking at the Google market.
|
# Check for a new version by looking at the Google market.
|
||||||
# Returns (None, "a message") if this didn't work, or (version, vercode) for
|
# Returns (None, "a message") if this didn't work, or (version, vercode) for
|
||||||
|
|
@ -66,48 +63,55 @@ def check_market(app):
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
|
||||||
# Parse command line...
|
#Read configuration...
|
||||||
parser = OptionParser()
|
execfile('config.py')
|
||||||
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
|
||||||
help="Spew out even more information than normal")
|
|
||||||
parser.add_option("-p", "--package", default=None,
|
|
||||||
help="Build only the specified package")
|
|
||||||
(options, args) = parser.parse_args()
|
|
||||||
|
|
||||||
# Get all apps...
|
# Parse command line...
|
||||||
apps = common.read_metadata(options.verbose)
|
parser = OptionParser()
|
||||||
|
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
||||||
|
help="Spew out even more information than normal")
|
||||||
|
parser.add_option("-p", "--package", default=None,
|
||||||
|
help="Build only the specified package")
|
||||||
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
html_parser = HTMLParser.HTMLParser()
|
# Get all apps...
|
||||||
|
apps = common.read_metadata(options.verbose)
|
||||||
|
|
||||||
for app in apps:
|
html_parser = HTMLParser.HTMLParser()
|
||||||
|
|
||||||
if options.package and options.package != app['id']:
|
for app in apps:
|
||||||
# Silent skip...
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
print "Processing " + app['id'] + '...'
|
|
||||||
|
|
||||||
mode = app['Update Check Mode']
|
if options.package and options.package != app['id']:
|
||||||
if mode == 'Market':
|
# Silent skip...
|
||||||
(version, vercode) = check_market(app)
|
pass
|
||||||
elif mode == 'None':
|
|
||||||
version = None
|
|
||||||
vercode = 'Checking disabled'
|
|
||||||
else:
|
else:
|
||||||
version = None
|
print "Processing " + app['id'] + '...'
|
||||||
vercode = 'Invalid update check method'
|
|
||||||
|
|
||||||
if not version:
|
mode = app['Update Check Mode']
|
||||||
print "..." + vercode
|
if mode == 'Market':
|
||||||
elif vercode == app['Current Version Code'] and version == app['Current Version']:
|
(version, vercode) = check_market(app)
|
||||||
print "...up to date"
|
elif mode == 'None':
|
||||||
else:
|
version = None
|
||||||
print '...updating to version:' + version + ' vercode:' + vercode
|
vercode = 'Checking disabled'
|
||||||
app['Current Version'] = version
|
else:
|
||||||
app['Current Version Code'] = vercode
|
version = None
|
||||||
metafile = os.path.join('metadata', app['id'] + '.txt')
|
vercode = 'Invalid update check method'
|
||||||
common.write_metadata(metafile, app)
|
|
||||||
|
|
||||||
print "Finished."
|
if not version:
|
||||||
|
print "..." + vercode
|
||||||
|
elif vercode == app['Current Version Code'] and version == app['Current Version']:
|
||||||
|
print "...up to date"
|
||||||
|
else:
|
||||||
|
print '...updating to version:' + version + ' vercode:' + vercode
|
||||||
|
app['Current Version'] = version
|
||||||
|
app['Current Version Code'] = vercode
|
||||||
|
metafile = os.path.join('metadata', app['id'] + '.txt')
|
||||||
|
common.write_metadata(metafile, app)
|
||||||
|
|
||||||
|
print "Finished."
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
||||||
|
|
|
||||||
386
import.py
386
import.py
|
|
@ -25,219 +25,225 @@ import re
|
||||||
import urllib
|
import urllib
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
|
||||||
#Read configuration...
|
def main():
|
||||||
repo_name = None
|
|
||||||
repo_description = None
|
|
||||||
repo_icon = None
|
|
||||||
repo_url = None
|
|
||||||
execfile('config.py')
|
|
||||||
|
|
||||||
import common
|
# Read configuration...
|
||||||
|
repo_name = None
|
||||||
|
repo_description = None
|
||||||
|
repo_icon = None
|
||||||
|
repo_url = None
|
||||||
|
execfile('config.py')
|
||||||
|
|
||||||
# Parse command line...
|
import common
|
||||||
parser = OptionParser()
|
|
||||||
parser.add_option("-u", "--url", default=None,
|
|
||||||
help="Project URL to import from.")
|
|
||||||
parser.add_option("-s", "--subdir", default=None,
|
|
||||||
help="Path to main android project subdirectory, if not in root.")
|
|
||||||
(options, args) = parser.parse_args()
|
|
||||||
|
|
||||||
if not options.url:
|
# Parse command line...
|
||||||
print "Specify project url."
|
parser = OptionParser()
|
||||||
sys.exit(1)
|
parser.add_option("-u", "--url", default=None,
|
||||||
url = options.url
|
help="Project URL to import from.")
|
||||||
|
parser.add_option("-s", "--subdir", default=None,
|
||||||
|
help="Path to main android project subdirectory, if not in root.")
|
||||||
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
tmp_dir = 'tmp'
|
if not options.url:
|
||||||
if not os.path.isdir(tmp_dir):
|
print "Specify project url."
|
||||||
print "Creating temporary directory"
|
|
||||||
os.makedirs(tmp_dir)
|
|
||||||
|
|
||||||
# Get all apps...
|
|
||||||
apps = common.read_metadata()
|
|
||||||
|
|
||||||
# Figure out what kind of project it is...
|
|
||||||
projecttype = None
|
|
||||||
issuetracker = None
|
|
||||||
license = None
|
|
||||||
if url.startswith('https://github.com'):
|
|
||||||
projecttype = 'github'
|
|
||||||
repo = url + '.git'
|
|
||||||
repotype = 'git'
|
|
||||||
sourcecode = url
|
|
||||||
elif url.startswith('http://code.google.com/p/'):
|
|
||||||
if not url.endswith('/'):
|
|
||||||
print "Expected format for googlecode url is http://code.google.com/p/PROJECT/"
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
projecttype = 'googlecode'
|
url = options.url
|
||||||
sourcecode = url + 'source/checkout'
|
|
||||||
issuetracker = url + 'issues/list'
|
|
||||||
|
|
||||||
# Figure out the repo type and adddress...
|
tmp_dir = 'tmp'
|
||||||
req = urllib.urlopen(sourcecode)
|
if not os.path.isdir(tmp_dir):
|
||||||
if req.getcode() != 200:
|
print "Creating temporary directory"
|
||||||
print 'Unable to find source at ' + sourcecode + ' - return code ' + str(req.getcode())
|
os.makedirs(tmp_dir)
|
||||||
sys.exit(1)
|
|
||||||
page = req.read()
|
# Get all apps...
|
||||||
repotype = None
|
apps = common.read_metadata()
|
||||||
index = page.find('hg clone')
|
|
||||||
if index != -1:
|
# Figure out what kind of project it is...
|
||||||
repotype = 'hg'
|
projecttype = None
|
||||||
repo = page[index + 9:]
|
issuetracker = None
|
||||||
index = repo.find('<')
|
license = None
|
||||||
if index == -1:
|
if url.startswith('https://github.com'):
|
||||||
print "Error while getting repo address"
|
projecttype = 'github'
|
||||||
|
repo = url + '.git'
|
||||||
|
repotype = 'git'
|
||||||
|
sourcecode = url
|
||||||
|
elif url.startswith('http://code.google.com/p/'):
|
||||||
|
if not url.endswith('/'):
|
||||||
|
print "Expected format for googlecode url is http://code.google.com/p/PROJECT/"
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
repo = repo[:index]
|
projecttype = 'googlecode'
|
||||||
if not repotype:
|
sourcecode = url + 'source/checkout'
|
||||||
index=page.find('git clone')
|
issuetracker = url + 'issues/list'
|
||||||
|
|
||||||
|
# Figure out the repo type and adddress...
|
||||||
|
req = urllib.urlopen(sourcecode)
|
||||||
|
if req.getcode() != 200:
|
||||||
|
print 'Unable to find source at ' + sourcecode + ' - return code ' + str(req.getcode())
|
||||||
|
sys.exit(1)
|
||||||
|
page = req.read()
|
||||||
|
repotype = None
|
||||||
|
index = page.find('hg clone')
|
||||||
if index != -1:
|
if index != -1:
|
||||||
repotype = 'git'
|
repotype = 'hg'
|
||||||
repo = page[index + 10:]
|
repo = page[index + 9:]
|
||||||
index = repo.find('<')
|
index = repo.find('<')
|
||||||
if index == -1:
|
if index == -1:
|
||||||
print "Error while getting repo address"
|
print "Error while getting repo address"
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
repo = repo[:index]
|
repo = repo[:index]
|
||||||
if not repotype:
|
if not repotype:
|
||||||
index=page.find('svn checkout')
|
index=page.find('git clone')
|
||||||
if index != -1:
|
if index != -1:
|
||||||
repotype = 'git-svn'
|
repotype = 'git'
|
||||||
repo = page[index + 13:]
|
repo = page[index + 10:]
|
||||||
prefix = '<strong><em>http</em></strong>'
|
index = repo.find('<')
|
||||||
if not repo.startswith(prefix):
|
if index == -1:
|
||||||
print "Unexpected checkout instructions format"
|
print "Error while getting repo address"
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
repo = 'http' + repo[len(prefix):]
|
repo = repo[:index]
|
||||||
index = repo.find('<')
|
if not repotype:
|
||||||
if index == -1:
|
index=page.find('svn checkout')
|
||||||
print "Error while getting repo address - no end tag? '" + repo + "'"
|
if index != -1:
|
||||||
sys.exit(1)
|
repotype = 'git-svn'
|
||||||
repo = repo[:index]
|
repo = page[index + 13:]
|
||||||
index = repo.find(' ')
|
prefix = '<strong><em>http</em></strong>'
|
||||||
if index == -1:
|
if not repo.startswith(prefix):
|
||||||
print "Error while getting repo address - no space? '" + repo + "'"
|
print "Unexpected checkout instructions format"
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
repo = repo[:index]
|
repo = 'http' + repo[len(prefix):]
|
||||||
if not repotype:
|
index = repo.find('<')
|
||||||
print "Unable to determine vcs type"
|
if index == -1:
|
||||||
|
print "Error while getting repo address - no end tag? '" + repo + "'"
|
||||||
|
sys.exit(1)
|
||||||
|
repo = repo[:index]
|
||||||
|
index = repo.find(' ')
|
||||||
|
if index == -1:
|
||||||
|
print "Error while getting repo address - no space? '" + repo + "'"
|
||||||
|
sys.exit(1)
|
||||||
|
repo = repo[:index]
|
||||||
|
if not repotype:
|
||||||
|
print "Unable to determine vcs type"
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Figure out the license...
|
||||||
|
req = urllib.urlopen(url)
|
||||||
|
if req.getcode() != 200:
|
||||||
|
print 'Unable to find project page at ' + sourcecode + ' - return code ' + str(req.getcode())
|
||||||
|
sys.exit(1)
|
||||||
|
page = req.read()
|
||||||
|
index = page.find('Code license')
|
||||||
|
if index == -1:
|
||||||
|
print "Couldn't find license data"
|
||||||
|
sys.exit(1)
|
||||||
|
ltext = page[index:]
|
||||||
|
lprefix = 'rel="nofollow">'
|
||||||
|
index = ltext.find(lprefix)
|
||||||
|
if index == -1:
|
||||||
|
print "Couldn't find license text"
|
||||||
|
sys.exit(1)
|
||||||
|
ltext = ltext[index + len(lprefix):]
|
||||||
|
index = ltext.find('<')
|
||||||
|
if index == -1:
|
||||||
|
print "License text not formatted as expected"
|
||||||
|
sys.exit(1)
|
||||||
|
ltext = ltext[:index]
|
||||||
|
if ltext == 'GNU GPL v3':
|
||||||
|
license = 'GPLv3'
|
||||||
|
elif ltext == 'GNU GPL v2':
|
||||||
|
license = 'GPLv2'
|
||||||
|
elif ltext == 'Apache License 2.0':
|
||||||
|
license = 'Apache2'
|
||||||
|
else:
|
||||||
|
print "License " + ltext + " is not recognised"
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if not projecttype:
|
||||||
|
print "Unable to determine the project type."
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Figure out the license...
|
# Get a copy of the source so we can extract some info...
|
||||||
req = urllib.urlopen(url)
|
print 'Getting source from ' + repotype + ' repo at ' + repo
|
||||||
if req.getcode() != 200:
|
src_dir = os.path.join(tmp_dir, 'importer')
|
||||||
print 'Unable to find project page at ' + sourcecode + ' - return code ' + str(req.getcode())
|
if os.path.exists(tmp_dir):
|
||||||
sys.exit(1)
|
shutil.rmtree(tmp_dir)
|
||||||
page = req.read()
|
vcs = common.getvcs(repotype, repo, src_dir)
|
||||||
index = page.find('Code license')
|
vcs.gotorevision(None)
|
||||||
if index == -1:
|
if options.subdir:
|
||||||
print "Couldn't find license data"
|
root_dir = os.path.join(src_dir, options.subdir)
|
||||||
sys.exit(1)
|
|
||||||
ltext = page[index:]
|
|
||||||
lprefix = 'rel="nofollow">'
|
|
||||||
index = ltext.find(lprefix)
|
|
||||||
if index == -1:
|
|
||||||
print "Couldn't find license text"
|
|
||||||
sys.exit(1)
|
|
||||||
ltext = ltext[index + len(lprefix):]
|
|
||||||
index = ltext.find('<')
|
|
||||||
if index == -1:
|
|
||||||
print "License text not formatted as expected"
|
|
||||||
sys.exit(1)
|
|
||||||
ltext = ltext[:index]
|
|
||||||
if ltext == 'GNU GPL v3':
|
|
||||||
license = 'GPLv3'
|
|
||||||
elif ltext == 'GNU GPL v2':
|
|
||||||
license = 'GPLv2'
|
|
||||||
elif ltext == 'Apache License 2.0':
|
|
||||||
license = 'Apache2'
|
|
||||||
else:
|
else:
|
||||||
print "License " + ltext + " is not recognised"
|
root_dir = src_dir
|
||||||
|
|
||||||
|
# Check AndroidManiifest.xml exists...
|
||||||
|
manifest = os.path.join(root_dir, 'AndroidManifest.xml')
|
||||||
|
if not os.path.exists(manifest):
|
||||||
|
print "AndroidManifest.xml did not exist in the expected location. Specify --subdir?"
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if not projecttype:
|
# Extract some information...
|
||||||
print "Unable to determine the project type."
|
vcsearch = re.compile(r'.*android:versionCode="([^"]+)".*').search
|
||||||
sys.exit(1)
|
vnsearch = re.compile(r'.*android:versionName="([^"]+)".*').search
|
||||||
|
psearch = re.compile(r'.*package="([^"]+)".*').search
|
||||||
# Get a copy of the source so we can extract some info...
|
version = None
|
||||||
print 'Getting source from ' + repotype + ' repo at ' + repo
|
vercode = None
|
||||||
src_dir = os.path.join(tmp_dir, 'importer')
|
package = None
|
||||||
if os.path.exists(tmp_dir):
|
for line in file(manifest):
|
||||||
shutil.rmtree(tmp_dir)
|
if not package:
|
||||||
vcs = common.getvcs(repotype, repo, src_dir)
|
matches = psearch(line)
|
||||||
vcs.gotorevision(None)
|
if matches:
|
||||||
if options.subdir:
|
package = matches.group(1)
|
||||||
root_dir = os.path.join(src_dir, options.subdir)
|
if not version:
|
||||||
else:
|
matches = vnsearch(line)
|
||||||
root_dir = src_dir
|
if matches:
|
||||||
|
version = matches.group(1)
|
||||||
# Check AndroidManiifest.xml exists...
|
if not vercode:
|
||||||
manifest = os.path.join(root_dir, 'AndroidManifest.xml')
|
matches = vcsearch(line)
|
||||||
if not os.path.exists(manifest):
|
if matches:
|
||||||
print "AndroidManifest.xml did not exist in the expected location. Specify --subdir?"
|
vercode = matches.group(1)
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
# Extract some information...
|
|
||||||
vcsearch = re.compile(r'.*android:versionCode="([^"]+)".*').search
|
|
||||||
vnsearch = re.compile(r'.*android:versionName="([^"]+)".*').search
|
|
||||||
psearch = re.compile(r'.*package="([^"]+)".*').search
|
|
||||||
version = None
|
|
||||||
vercode = None
|
|
||||||
package = None
|
|
||||||
for line in file(manifest):
|
|
||||||
if not package:
|
if not package:
|
||||||
matches = psearch(line)
|
print "Couldn't find package ID"
|
||||||
if matches:
|
sys.exit(1)
|
||||||
package = matches.group(1)
|
|
||||||
if not version:
|
if not version:
|
||||||
matches = vnsearch(line)
|
print "Couldn't find latest version name"
|
||||||
if matches:
|
sys.exit(1)
|
||||||
version = matches.group(1)
|
|
||||||
if not vercode:
|
if not vercode:
|
||||||
matches = vcsearch(line)
|
print "Couldn't find latest version code"
|
||||||
if matches:
|
|
||||||
vercode = matches.group(1)
|
|
||||||
if not package:
|
|
||||||
print "Couldn't find package ID"
|
|
||||||
sys.exit(1)
|
|
||||||
if not version:
|
|
||||||
print "Couldn't find latest version name"
|
|
||||||
sys.exit(1)
|
|
||||||
if not vercode:
|
|
||||||
print "Couldn't find latest version code"
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
# Make sure it's actually new...
|
|
||||||
for app in apps:
|
|
||||||
if app['id'] == package:
|
|
||||||
print "Package " + package + " already exists"
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Construct the metadata...
|
# Make sure it's actually new...
|
||||||
app = common.parse_metadata(None)
|
for app in apps:
|
||||||
app['id'] = package
|
if app['id'] == package:
|
||||||
app['Web Site'] = url
|
print "Package " + package + " already exists"
|
||||||
app['Source Code'] = sourcecode
|
sys.exit(1)
|
||||||
if issuetracker:
|
|
||||||
app['Issue Tracker'] = issuetracker
|
|
||||||
if license:
|
|
||||||
app['License'] = license
|
|
||||||
app['Repo Type'] = repotype
|
|
||||||
app['Repo'] = repo
|
|
||||||
|
|
||||||
# Create a build line...
|
# Construct the metadata...
|
||||||
build = {}
|
app = common.parse_metadata(None)
|
||||||
build['version'] = version
|
app['id'] = package
|
||||||
build['vercode'] = vercode
|
app['Web Site'] = url
|
||||||
build['commit'] = '?'
|
app['Source Code'] = sourcecode
|
||||||
if options.subdir:
|
if issuetracker:
|
||||||
build['subdir'] = options.subdir
|
app['Issue Tracker'] = issuetracker
|
||||||
if os.path.exists(os.path.join(root_dir, 'jni')):
|
if license:
|
||||||
build['buildjni'] = 'yes'
|
app['License'] = license
|
||||||
app['builds'].append(build)
|
app['Repo Type'] = repotype
|
||||||
app['comments'].append(('build:' + version,
|
app['Repo'] = repo
|
||||||
"#Generated by import.py - check this is the right version, and find the right commit!"))
|
|
||||||
|
|
||||||
metafile = os.path.join('metadata', package + '.txt')
|
# Create a build line...
|
||||||
common.write_metadata(metafile, app)
|
build = {}
|
||||||
print "Wrote " + metafile
|
build['version'] = version
|
||||||
|
build['vercode'] = vercode
|
||||||
|
build['commit'] = '?'
|
||||||
|
if options.subdir:
|
||||||
|
build['subdir'] = options.subdir
|
||||||
|
if os.path.exists(os.path.join(root_dir, 'jni')):
|
||||||
|
build['buildjni'] = 'yes'
|
||||||
|
app['builds'].append(build)
|
||||||
|
app['comments'].append(('build:' + version,
|
||||||
|
"#Generated by import.py - check this is the right version, and find the right commit!"))
|
||||||
|
|
||||||
|
metafile = os.path.join('metadata', package + '.txt')
|
||||||
|
common.write_metadata(metafile, app)
|
||||||
|
print "Wrote " + metafile
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
||||||
|
|
|
||||||
174
publish.py
174
publish.py
|
|
@ -31,106 +31,112 @@ from optparse import OptionParser
|
||||||
import common
|
import common
|
||||||
from common import BuildException
|
from common import BuildException
|
||||||
|
|
||||||
#Read configuration...
|
def main():
|
||||||
execfile('config.py')
|
|
||||||
|
|
||||||
# Parse command line...
|
#Read configuration...
|
||||||
parser = OptionParser()
|
execfile('config.py')
|
||||||
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
|
||||||
help="Spew out even more information than normal")
|
|
||||||
parser.add_option("-p", "--package", default=None,
|
|
||||||
help="Publish only the specified package")
|
|
||||||
(options, args) = parser.parse_args()
|
|
||||||
|
|
||||||
log_dir = 'logs'
|
# Parse command line...
|
||||||
if not os.path.isdir(log_dir):
|
parser = OptionParser()
|
||||||
print "Creating log directory"
|
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
||||||
os.makedirs(log_dir)
|
help="Spew out even more information than normal")
|
||||||
|
parser.add_option("-p", "--package", default=None,
|
||||||
|
help="Publish only the specified package")
|
||||||
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
tmp_dir = 'tmp'
|
log_dir = 'logs'
|
||||||
if not os.path.isdir(tmp_dir):
|
if not os.path.isdir(log_dir):
|
||||||
print "Creating temporary directory"
|
print "Creating log directory"
|
||||||
os.makedirs(tmp_dir)
|
os.makedirs(log_dir)
|
||||||
|
|
||||||
output_dir = 'repo'
|
tmp_dir = 'tmp'
|
||||||
if not os.path.isdir(output_dir):
|
if not os.path.isdir(tmp_dir):
|
||||||
print "Creating output directory"
|
print "Creating temporary directory"
|
||||||
os.makedirs(output_dir)
|
os.makedirs(tmp_dir)
|
||||||
|
|
||||||
unsigned_dir = 'unsigned'
|
output_dir = 'repo'
|
||||||
if not os.path.isdir(unsigned_dir):
|
if not os.path.isdir(output_dir):
|
||||||
print "No unsigned directory - nothing to do"
|
print "Creating output directory"
|
||||||
sys.exit(0)
|
os.makedirs(output_dir)
|
||||||
|
|
||||||
for apkfile in sorted(glob.glob(os.path.join(unsigned_dir, '*.apk'))):
|
unsigned_dir = 'unsigned'
|
||||||
|
if not os.path.isdir(unsigned_dir):
|
||||||
|
print "No unsigned directory - nothing to do"
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
apkfilename = os.path.basename(apkfile)
|
for apkfile in sorted(glob.glob(os.path.join(unsigned_dir, '*.apk'))):
|
||||||
i = apkfilename.rfind('_')
|
|
||||||
if i == -1:
|
|
||||||
raise BuildException("Invalid apk name")
|
|
||||||
appid = apkfilename[:i]
|
|
||||||
print "Processing " + appid
|
|
||||||
|
|
||||||
if not options.package or options.package == appid:
|
apkfilename = os.path.basename(apkfile)
|
||||||
|
i = apkfilename.rfind('_')
|
||||||
|
if i == -1:
|
||||||
|
raise BuildException("Invalid apk name")
|
||||||
|
appid = apkfilename[:i]
|
||||||
|
print "Processing " + appid
|
||||||
|
|
||||||
# Figure out the key alias name we'll use. Only the first 8
|
if not options.package or options.package == appid:
|
||||||
# characters are significant, so we'll use the first 8 from
|
|
||||||
# the MD5 of the app's ID and hope there are no collisions.
|
|
||||||
# If a collision does occur later, we're going to have to
|
|
||||||
# come up with a new alogrithm, AND rename all existing keys
|
|
||||||
# in the keystore!
|
|
||||||
if keyaliases.has_key(appid):
|
|
||||||
# For this particular app, the key alias is overridden...
|
|
||||||
keyalias = keyaliases[appid]
|
|
||||||
else:
|
|
||||||
m = md5.new()
|
|
||||||
m.update(appid)
|
|
||||||
keyalias = m.hexdigest()[:8]
|
|
||||||
print "Key alias: " + keyalias
|
|
||||||
|
|
||||||
# See if we already have a key for this application, and
|
# Figure out the key alias name we'll use. Only the first 8
|
||||||
# if not generate one...
|
# characters are significant, so we'll use the first 8 from
|
||||||
p = subprocess.Popen(['keytool', '-list',
|
# the MD5 of the app's ID and hope there are no collisions.
|
||||||
'-alias', keyalias, '-keystore', keystore,
|
# If a collision does occur later, we're going to have to
|
||||||
'-storepass', keystorepass], stdout=subprocess.PIPE)
|
# come up with a new alogrithm, AND rename all existing keys
|
||||||
output = p.communicate()[0]
|
# in the keystore!
|
||||||
if p.returncode !=0:
|
if keyaliases.has_key(appid):
|
||||||
print "Key does not exist - generating..."
|
# For this particular app, the key alias is overridden...
|
||||||
p = subprocess.Popen(['keytool', '-genkey',
|
keyalias = keyaliases[appid]
|
||||||
'-keystore', keystore, '-alias', keyalias,
|
else:
|
||||||
'-keyalg', 'RSA', '-keysize', '2048',
|
m = md5.new()
|
||||||
'-validity', '10000',
|
m.update(appid)
|
||||||
|
keyalias = m.hexdigest()[:8]
|
||||||
|
print "Key alias: " + keyalias
|
||||||
|
|
||||||
|
# See if we already have a key for this application, and
|
||||||
|
# if not generate one...
|
||||||
|
p = subprocess.Popen(['keytool', '-list',
|
||||||
|
'-alias', keyalias, '-keystore', keystore,
|
||||||
|
'-storepass', keystorepass], stdout=subprocess.PIPE)
|
||||||
|
output = p.communicate()[0]
|
||||||
|
if p.returncode !=0:
|
||||||
|
print "Key does not exist - generating..."
|
||||||
|
p = subprocess.Popen(['keytool', '-genkey',
|
||||||
|
'-keystore', keystore, '-alias', keyalias,
|
||||||
|
'-keyalg', 'RSA', '-keysize', '2048',
|
||||||
|
'-validity', '10000',
|
||||||
|
'-storepass', keystorepass, '-keypass', keypass,
|
||||||
|
'-dname', keydname], stdout=subprocess.PIPE)
|
||||||
|
output = p.communicate()[0]
|
||||||
|
print output
|
||||||
|
if p.returncode != 0:
|
||||||
|
raise BuildException("Failed to generate key")
|
||||||
|
|
||||||
|
# Sign the application...
|
||||||
|
p = subprocess.Popen(['jarsigner', '-keystore', keystore,
|
||||||
'-storepass', keystorepass, '-keypass', keypass,
|
'-storepass', keystorepass, '-keypass', keypass,
|
||||||
'-dname', keydname], stdout=subprocess.PIPE)
|
apkfile, keyalias], stdout=subprocess.PIPE)
|
||||||
output = p.communicate()[0]
|
output = p.communicate()[0]
|
||||||
print output
|
print output
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise BuildException("Failed to generate key")
|
raise BuildException("Failed to sign application")
|
||||||
|
|
||||||
# Sign the application...
|
# Zipalign it...
|
||||||
p = subprocess.Popen(['jarsigner', '-keystore', keystore,
|
p = subprocess.Popen([os.path.join(sdk_path,'tools','zipalign'),
|
||||||
'-storepass', keystorepass, '-keypass', keypass,
|
'-v', '4', apkfile,
|
||||||
apkfile, keyalias], stdout=subprocess.PIPE)
|
os.path.join(output_dir, apkfilename)],
|
||||||
output = p.communicate()[0]
|
stdout=subprocess.PIPE)
|
||||||
print output
|
output = p.communicate()[0]
|
||||||
if p.returncode != 0:
|
print output
|
||||||
raise BuildException("Failed to sign application")
|
if p.returncode != 0:
|
||||||
|
raise BuildException("Failed to align application")
|
||||||
|
os.remove(apkfile)
|
||||||
|
|
||||||
# Zipalign it...
|
# Move the source tarball into the output directory...
|
||||||
p = subprocess.Popen([os.path.join(sdk_path,'tools','zipalign'),
|
tarfilename = apkfilename[:-4] + '_src.tar.gz'
|
||||||
'-v', '4', apkfile,
|
shutil.move(os.path.join(unsigned_dir, tarfilename),
|
||||||
os.path.join(output_dir, apkfilename)],
|
os.path.join(output_dir, tarfilename))
|
||||||
stdout=subprocess.PIPE)
|
|
||||||
output = p.communicate()[0]
|
|
||||||
print output
|
|
||||||
if p.returncode != 0:
|
|
||||||
raise BuildException("Failed to align application")
|
|
||||||
os.remove(apkfile)
|
|
||||||
|
|
||||||
# Move the source tarball into the output directory...
|
print 'Published ' + apkfilename
|
||||||
tarfilename = apkfilename[:-4] + '_src.tar.gz'
|
|
||||||
shutil.move(os.path.join(unsigned_dir, tarfilename),
|
|
||||||
os.path.join(output_dir, tarfilename))
|
|
||||||
|
|
||||||
print 'Published ' + apkfilename
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -27,22 +27,26 @@ from optparse import OptionParser
|
||||||
import HTMLParser
|
import HTMLParser
|
||||||
import common
|
import common
|
||||||
|
|
||||||
#Read configuration...
|
def main():
|
||||||
execfile('config.py')
|
|
||||||
|
|
||||||
|
#Read configuration...
|
||||||
|
execfile('config.py')
|
||||||
|
|
||||||
# Parse command line...
|
# Parse command line...
|
||||||
parser = OptionParser()
|
parser = OptionParser()
|
||||||
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
||||||
help="Spew out even more information than normal")
|
help="Spew out even more information than normal")
|
||||||
(options, args) = parser.parse_args()
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
# Get all apps...
|
# Get all apps...
|
||||||
apps = common.read_metadata(options.verbose)
|
apps = common.read_metadata(options.verbose)
|
||||||
|
|
||||||
for app in apps:
|
for app in apps:
|
||||||
print "Writing " + app['id']
|
print "Writing " + app['id']
|
||||||
common.write_metadata(os.path.join('metadata', app['id']) + '.txt', app)
|
common.write_metadata(os.path.join('metadata', app['id']) + '.txt', app)
|
||||||
|
|
||||||
print "Finished."
|
print "Finished."
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
||||||
|
|
|
||||||
119
scanner.py
119
scanner.py
|
|
@ -31,80 +31,85 @@ import common
|
||||||
from common import BuildException
|
from common import BuildException
|
||||||
from common import VCSException
|
from common import VCSException
|
||||||
|
|
||||||
#Read configuration...
|
def main():
|
||||||
execfile('config.py')
|
|
||||||
|
# Read configuration...
|
||||||
|
execfile('config.py')
|
||||||
|
|
||||||
|
|
||||||
# Parse command line...
|
# Parse command line...
|
||||||
parser = OptionParser()
|
parser = OptionParser()
|
||||||
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
||||||
help="Spew out even more information than normal")
|
help="Spew out even more information than normal")
|
||||||
parser.add_option("-p", "--package", default=None,
|
parser.add_option("-p", "--package", default=None,
|
||||||
help="Scan only the specified package")
|
help="Scan only the specified package")
|
||||||
(options, args) = parser.parse_args()
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
# Get all apps...
|
# Get all apps...
|
||||||
apps = common.read_metadata(options.verbose)
|
apps = common.read_metadata(options.verbose)
|
||||||
|
|
||||||
html_parser = HTMLParser.HTMLParser()
|
html_parser = HTMLParser.HTMLParser()
|
||||||
|
|
||||||
problems = []
|
problems = []
|
||||||
|
|
||||||
extlib_dir = os.path.join('build', 'extlib')
|
extlib_dir = os.path.join('build', 'extlib')
|
||||||
|
|
||||||
for app in apps:
|
for app in apps:
|
||||||
|
|
||||||
skip = False
|
skip = False
|
||||||
if options.package and app['id'] != options.package:
|
if options.package and app['id'] != options.package:
|
||||||
skip = True
|
skip = True
|
||||||
elif app['Disabled']:
|
elif app['Disabled']:
|
||||||
print "Skipping %s: disabled" % app['id']
|
print "Skipping %s: disabled" % app['id']
|
||||||
skip = True
|
skip = True
|
||||||
elif not app['builds']:
|
elif not app['builds']:
|
||||||
print "Skipping %s: no builds specified" % app['id']
|
print "Skipping %s: no builds specified" % app['id']
|
||||||
skip = True
|
skip = True
|
||||||
|
|
||||||
if not skip:
|
if not skip:
|
||||||
|
|
||||||
print "Processing " + app['id']
|
print "Processing " + app['id']
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
||||||
build_dir = 'build/' + app['id']
|
build_dir = 'build/' + app['id']
|
||||||
|
|
||||||
# Set up vcs interface and make sure we have the latest code...
|
# Set up vcs interface and make sure we have the latest code...
|
||||||
vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir)
|
vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir)
|
||||||
|
|
||||||
for thisbuild in app['builds']:
|
for thisbuild in app['builds']:
|
||||||
|
|
||||||
if thisbuild['commit'].startswith('!'):
|
if thisbuild['commit'].startswith('!'):
|
||||||
print ("..skipping version " + thisbuild['version'] + " - " +
|
print ("..skipping version " + thisbuild['version'] + " - " +
|
||||||
thisbuild['commit'][1:])
|
thisbuild['commit'][1:])
|
||||||
else:
|
else:
|
||||||
print "..scanning version " + thisbuild['version']
|
print "..scanning version " + thisbuild['version']
|
||||||
|
|
||||||
# Prepare the source code...
|
# Prepare the source code...
|
||||||
root_dir = common.prepare_source(vcs, app, thisbuild,
|
root_dir = common.prepare_source(vcs, app, thisbuild,
|
||||||
build_dir, extlib_dir, sdk_path, ndk_path, javacc_path)
|
build_dir, extlib_dir, sdk_path, ndk_path, javacc_path)
|
||||||
|
|
||||||
# Do the scan...
|
# Do the scan...
|
||||||
buildprobs = common.scan_source(build_dir, root_dir, thisbuild)
|
buildprobs = common.scan_source(build_dir, root_dir, thisbuild)
|
||||||
for problem in buildprobs:
|
for problem in buildprobs:
|
||||||
problems.append(problem +
|
problems.append(problem +
|
||||||
' in ' + app['id'] + ' ' + thisbuild['version'])
|
' in ' + app['id'] + ' ' + thisbuild['version'])
|
||||||
|
|
||||||
except BuildException as be:
|
except BuildException as be:
|
||||||
msg = "Could not scan app %s due to BuildException: %s" % (app['id'], be)
|
msg = "Could not scan app %s due to BuildException: %s" % (app['id'], be)
|
||||||
problems.append(msg)
|
problems.append(msg)
|
||||||
except VCSException as vcse:
|
except VCSException as vcse:
|
||||||
msg = "VCS error while scanning app %s: %s" % (app['id'], vcse)
|
msg = "VCS error while scanning app %s: %s" % (app['id'], vcse)
|
||||||
problems.append(msg)
|
problems.append(msg)
|
||||||
except Exception:
|
except Exception:
|
||||||
msg = "Could not scan app %s due to unknown error: %s" % (app['id'], traceback.format_exc())
|
msg = "Could not scan app %s due to unknown error: %s" % (app['id'], traceback.format_exc())
|
||||||
problems.append(msg)
|
problems.append(msg)
|
||||||
|
|
||||||
print "Finished:"
|
print "Finished:"
|
||||||
for problem in problems:
|
for problem in problems:
|
||||||
print problem
|
print problem
|
||||||
print str(len(problems)) + ' problems.'
|
print str(len(problems)) + ' problems.'
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
||||||
|
|
|
||||||
897
update.py
897
update.py
|
|
@ -29,491 +29,496 @@ from xml.dom.minidom import Document
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import time
|
import time
|
||||||
|
|
||||||
#Read configuration...
|
def main():
|
||||||
repo_name = None
|
|
||||||
repo_description = None
|
|
||||||
repo_icon = None
|
|
||||||
repo_url = None
|
|
||||||
execfile('config.py')
|
|
||||||
|
|
||||||
import common
|
# Read configuration...
|
||||||
|
repo_name = None
|
||||||
|
repo_description = None
|
||||||
|
repo_icon = None
|
||||||
|
repo_url = None
|
||||||
|
execfile('config.py')
|
||||||
|
|
||||||
# Parse command line...
|
import common
|
||||||
parser = OptionParser()
|
|
||||||
parser.add_option("-c", "--createmeta", action="store_true", default=False,
|
# Parse command line...
|
||||||
help="Create skeleton metadata files that are missing")
|
parser = OptionParser()
|
||||||
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
parser.add_option("-c", "--createmeta", action="store_true", default=False,
|
||||||
help="Spew out even more information than normal")
|
help="Create skeleton metadata files that are missing")
|
||||||
parser.add_option("-q", "--quiet", action="store_true", default=False,
|
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
||||||
help="No output, except for warnings and errors")
|
help="Spew out even more information than normal")
|
||||||
parser.add_option("-b", "--buildreport", action="store_true", default=False,
|
parser.add_option("-q", "--quiet", action="store_true", default=False,
|
||||||
help="Report on build data status")
|
help="No output, except for warnings and errors")
|
||||||
parser.add_option("-i", "--interactive", default=False, action="store_true",
|
parser.add_option("-b", "--buildreport", action="store_true", default=False,
|
||||||
help="Interactively ask about things that need updating.")
|
help="Report on build data status")
|
||||||
parser.add_option("-e", "--editor", default="/etc/alternatives/editor",
|
parser.add_option("-i", "--interactive", default=False, action="store_true",
|
||||||
help="Specify editor to use in interactive mode. Default "+
|
help="Interactively ask about things that need updating.")
|
||||||
"is /etc/alternatives/editor")
|
parser.add_option("-e", "--editor", default="/etc/alternatives/editor",
|
||||||
parser.add_option("", "--pretty", action="store_true", default=False,
|
help="Specify editor to use in interactive mode. Default "+
|
||||||
help="Produce human-readable index.xml")
|
"is /etc/alternatives/editor")
|
||||||
(options, args) = parser.parse_args()
|
parser.add_option("", "--pretty", action="store_true", default=False,
|
||||||
|
help="Produce human-readable index.xml")
|
||||||
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
icon_dir=os.path.join('repo','icons')
|
icon_dir=os.path.join('repo','icons')
|
||||||
|
|
||||||
# Delete and re-create the icon directory...
|
# Delete and re-create the icon directory...
|
||||||
if os.path.exists(icon_dir):
|
if os.path.exists(icon_dir):
|
||||||
shutil.rmtree(icon_dir)
|
shutil.rmtree(icon_dir)
|
||||||
os.mkdir(icon_dir)
|
os.mkdir(icon_dir)
|
||||||
|
|
||||||
warnings = 0
|
warnings = 0
|
||||||
|
|
||||||
#Make sure we have the repository description...
|
# Make sure we have the repository description...
|
||||||
if (repo_url is None or repo_name is None or
|
if (repo_url is None or repo_name is None or
|
||||||
repo_icon is None or repo_description is None):
|
repo_icon is None or repo_description is None):
|
||||||
print "Repository description fields are required in config.py"
|
print "Repository description fields are required in config.py"
|
||||||
print "See config.sample.py for details"
|
print "See config.sample.py for details"
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
# Get all apps...
|
|
||||||
apps = common.read_metadata(verbose=options.verbose)
|
|
||||||
|
|
||||||
# Generate a list of categories...
|
|
||||||
categories = []
|
|
||||||
for app in apps:
|
|
||||||
if app['Category'] not in categories:
|
|
||||||
categories.append(app['Category'])
|
|
||||||
|
|
||||||
# Gather information about all the apk files in the repo directory...
|
|
||||||
apks = []
|
|
||||||
for apkfile in glob.glob(os.path.join('repo','*.apk')):
|
|
||||||
|
|
||||||
apkfilename = apkfile[5:]
|
|
||||||
if apkfilename.find(' ') != -1:
|
|
||||||
print "No spaces in APK filenames!"
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
srcfilename = apkfilename[:-4] + "_src.tar.gz"
|
|
||||||
|
|
||||||
if not options.quiet:
|
# Get all apps...
|
||||||
print "Processing " + apkfilename
|
apps = common.read_metadata(verbose=options.verbose)
|
||||||
thisinfo = {}
|
|
||||||
thisinfo['apkname'] = apkfilename
|
|
||||||
if os.path.exists(os.path.join('repo', srcfilename)):
|
|
||||||
thisinfo['srcname'] = srcfilename
|
|
||||||
thisinfo['size'] = os.path.getsize(apkfile)
|
|
||||||
thisinfo['permissions'] = []
|
|
||||||
thisinfo['features'] = []
|
|
||||||
p = subprocess.Popen([os.path.join(sdk_path, 'platform-tools', 'aapt'),
|
|
||||||
'dump', 'badging', apkfile],
|
|
||||||
stdout=subprocess.PIPE)
|
|
||||||
output = p.communicate()[0]
|
|
||||||
if options.verbose:
|
|
||||||
print output
|
|
||||||
if p.returncode != 0:
|
|
||||||
print "ERROR: Failed to get apk information"
|
|
||||||
sys.exit(1)
|
|
||||||
for line in output.splitlines():
|
|
||||||
if line.startswith("package:"):
|
|
||||||
pat = re.compile(".*name='([a-zA-Z0-9._]*)'.*")
|
|
||||||
thisinfo['id'] = re.match(pat, line).group(1)
|
|
||||||
pat = re.compile(".*versionCode='([0-9]*)'.*")
|
|
||||||
thisinfo['versioncode'] = int(re.match(pat, line).group(1))
|
|
||||||
pat = re.compile(".*versionName='([^']*)'.*")
|
|
||||||
thisinfo['version'] = re.match(pat, line).group(1)
|
|
||||||
if line.startswith("application:"):
|
|
||||||
pat = re.compile(".*label='([^']*)'.*")
|
|
||||||
thisinfo['name'] = re.match(pat, line).group(1)
|
|
||||||
pat = re.compile(".*icon='([^']*)'.*")
|
|
||||||
thisinfo['iconsrc'] = re.match(pat, line).group(1)
|
|
||||||
if line.startswith("sdkVersion:"):
|
|
||||||
pat = re.compile(".*'([0-9]*)'.*")
|
|
||||||
thisinfo['sdkversion'] = re.match(pat, line).group(1)
|
|
||||||
if line.startswith("native-code:"):
|
|
||||||
pat = re.compile(".*'([^']*)'.*")
|
|
||||||
thisinfo['nativecode'] = re.match(pat, line).group(1)
|
|
||||||
if line.startswith("uses-permission:"):
|
|
||||||
pat = re.compile(".*'([^']*)'.*")
|
|
||||||
perm = re.match(pat, line).group(1)
|
|
||||||
if perm.startswith("android.permission."):
|
|
||||||
perm = perm[19:]
|
|
||||||
thisinfo['permissions'].append(perm)
|
|
||||||
if line.startswith("uses-feature:"):
|
|
||||||
pat = re.compile(".*'([^']*)'.*")
|
|
||||||
perm = re.match(pat, line).group(1)
|
|
||||||
#Filter out this, it's only added with the latest SDK tools and
|
|
||||||
#causes problems for lots of apps.
|
|
||||||
if (perm != "android.hardware.screen.portrait" and
|
|
||||||
perm != "android.hardware.screen.landscape"):
|
|
||||||
if perm.startswith("android.feature."):
|
|
||||||
perm = perm[16:]
|
|
||||||
thisinfo['features'].append(perm)
|
|
||||||
|
|
||||||
if not thisinfo.has_key('sdkversion'):
|
# Generate a list of categories...
|
||||||
print " WARNING: no SDK version information found"
|
categories = []
|
||||||
thisinfo['sdkversion'] = 0
|
|
||||||
|
|
||||||
# Calculate the md5 and sha256...
|
|
||||||
m = hashlib.md5()
|
|
||||||
sha = hashlib.sha256()
|
|
||||||
f = open(apkfile, 'rb')
|
|
||||||
while True:
|
|
||||||
t = f.read(1024)
|
|
||||||
if len(t) == 0:
|
|
||||||
break
|
|
||||||
m.update(t)
|
|
||||||
sha.update(t)
|
|
||||||
thisinfo['md5'] = m.hexdigest()
|
|
||||||
thisinfo['sha256'] = sha.hexdigest()
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
# Get the signature (or md5 of, to be precise)...
|
|
||||||
p = subprocess.Popen(['java', 'getsig',
|
|
||||||
os.path.join(os.getcwd(), apkfile)],
|
|
||||||
cwd=os.path.join(sys.path[0], 'getsig'),
|
|
||||||
stdout=subprocess.PIPE)
|
|
||||||
output = p.communicate()[0]
|
|
||||||
if options.verbose:
|
|
||||||
print output
|
|
||||||
if p.returncode != 0 or not output.startswith('Result:'):
|
|
||||||
print "ERROR: Failed to get apk signature"
|
|
||||||
sys.exit(1)
|
|
||||||
thisinfo['sig'] = output[7:].strip()
|
|
||||||
|
|
||||||
# Extract the icon file...
|
|
||||||
apk = zipfile.ZipFile(apkfile, 'r')
|
|
||||||
thisinfo['icon'] = (thisinfo['id'] + '.' +
|
|
||||||
str(thisinfo['versioncode']) + '.png')
|
|
||||||
iconfilename = os.path.join(icon_dir, thisinfo['icon'])
|
|
||||||
try:
|
|
||||||
iconfile = open(iconfilename, 'wb')
|
|
||||||
iconfile.write(apk.read(thisinfo['iconsrc']))
|
|
||||||
iconfile.close()
|
|
||||||
except:
|
|
||||||
print "WARNING: Error retrieving icon file"
|
|
||||||
warnings += 1
|
|
||||||
apk.close()
|
|
||||||
|
|
||||||
apks.append(thisinfo)
|
|
||||||
|
|
||||||
# Some information from the apks needs to be applied up to the application
|
|
||||||
# level. When doing this, we use the info from the most recent version's apk.
|
|
||||||
for app in apps:
|
|
||||||
bestver = 0
|
|
||||||
for apk in apks:
|
|
||||||
if apk['id'] == app['id']:
|
|
||||||
if apk['versioncode'] > bestver:
|
|
||||||
bestver = apk['versioncode']
|
|
||||||
bestapk = apk
|
|
||||||
|
|
||||||
if bestver == 0:
|
|
||||||
if app['Name'] is None:
|
|
||||||
app['Name'] = app['id']
|
|
||||||
app['icon'] = ''
|
|
||||||
if app['Disabled'] is None:
|
|
||||||
print "WARNING: Application " + app['id'] + " has no packages"
|
|
||||||
else:
|
|
||||||
if app['Name'] is None:
|
|
||||||
app['Name'] = bestapk['name']
|
|
||||||
app['icon'] = bestapk['icon']
|
|
||||||
|
|
||||||
# Generate warnings for apk's with no metadata (or create skeleton
|
|
||||||
# metadata files, if requested on the command line)
|
|
||||||
for apk in apks:
|
|
||||||
found = False
|
|
||||||
for app in apps:
|
for app in apps:
|
||||||
if app['id'] == apk['id']:
|
if app['Category'] not in categories:
|
||||||
found = True
|
categories.append(app['Category'])
|
||||||
break
|
|
||||||
if not found:
|
|
||||||
if options.createmeta:
|
|
||||||
f = open(os.path.join('metadata', apk['id'] + '.txt'), 'w')
|
|
||||||
f.write("License:Unknown\n")
|
|
||||||
f.write("Web Site:\n")
|
|
||||||
f.write("Source Code:\n")
|
|
||||||
f.write("Issue Tracker:\n")
|
|
||||||
f.write("Summary:" + apk['name'] + "\n")
|
|
||||||
f.write("Description:\n")
|
|
||||||
f.write(apk['name'] + "\n")
|
|
||||||
f.write(".\n")
|
|
||||||
f.close()
|
|
||||||
print "Generated skeleton metadata for " + apk['id']
|
|
||||||
else:
|
|
||||||
print "WARNING: " + apk['apkname'] + " (" + apk['id'] + ") has no metadata"
|
|
||||||
print " " + apk['name'] + " - " + apk['version']
|
|
||||||
|
|
||||||
#Sort the app list by name, then the web site doesn't have to by default:
|
# Gather information about all the apk files in the repo directory...
|
||||||
apps = sorted(apps, key=lambda app: app['Name'].upper())
|
apks = []
|
||||||
|
for apkfile in glob.glob(os.path.join('repo','*.apk')):
|
||||||
|
|
||||||
# Create the index
|
apkfilename = apkfile[5:]
|
||||||
doc = Document()
|
if apkfilename.find(' ') != -1:
|
||||||
|
print "No spaces in APK filenames!"
|
||||||
def addElement(name, value, doc, parent):
|
|
||||||
el = doc.createElement(name)
|
|
||||||
el.appendChild(doc.createTextNode(value))
|
|
||||||
parent.appendChild(el)
|
|
||||||
|
|
||||||
root = doc.createElement("fdroid")
|
|
||||||
doc.appendChild(root)
|
|
||||||
|
|
||||||
repoel = doc.createElement("repo")
|
|
||||||
repoel.setAttribute("name", repo_name)
|
|
||||||
repoel.setAttribute("icon", os.path.basename(repo_icon))
|
|
||||||
repoel.setAttribute("url", repo_url)
|
|
||||||
|
|
||||||
if repo_keyalias != None:
|
|
||||||
|
|
||||||
# Generate a certificate fingerprint the same way keytool does it
|
|
||||||
# (but with slightly different formatting)
|
|
||||||
def cert_fingerprint(data):
|
|
||||||
digest = hashlib.sha1(data).digest()
|
|
||||||
ret = []
|
|
||||||
for i in range(4):
|
|
||||||
ret.append(":".join("%02X" % ord(b) for b in digest[i*5:i*5+5]))
|
|
||||||
return " ".join(ret)
|
|
||||||
|
|
||||||
def extract_pubkey():
|
|
||||||
p = subprocess.Popen(['keytool', '-exportcert',
|
|
||||||
'-alias', repo_keyalias,
|
|
||||||
'-keystore', keystore,
|
|
||||||
'-storepass', keystorepass],
|
|
||||||
stdout=subprocess.PIPE)
|
|
||||||
cert = p.communicate()[0]
|
|
||||||
if p.returncode != 0:
|
|
||||||
print "ERROR: Failed to get repo pubkey"
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
global repo_pubkey_fingerprint
|
srcfilename = apkfilename[:-4] + "_src.tar.gz"
|
||||||
repo_pubkey_fingerprint = cert_fingerprint(cert)
|
|
||||||
return "".join("%02x" % ord(b) for b in cert)
|
|
||||||
|
|
||||||
repoel.setAttribute("pubkey", extract_pubkey())
|
if not options.quiet:
|
||||||
|
print "Processing " + apkfilename
|
||||||
|
thisinfo = {}
|
||||||
|
thisinfo['apkname'] = apkfilename
|
||||||
|
if os.path.exists(os.path.join('repo', srcfilename)):
|
||||||
|
thisinfo['srcname'] = srcfilename
|
||||||
|
thisinfo['size'] = os.path.getsize(apkfile)
|
||||||
|
thisinfo['permissions'] = []
|
||||||
|
thisinfo['features'] = []
|
||||||
|
p = subprocess.Popen([os.path.join(sdk_path, 'platform-tools', 'aapt'),
|
||||||
|
'dump', 'badging', apkfile],
|
||||||
|
stdout=subprocess.PIPE)
|
||||||
|
output = p.communicate()[0]
|
||||||
|
if options.verbose:
|
||||||
|
print output
|
||||||
|
if p.returncode != 0:
|
||||||
|
print "ERROR: Failed to get apk information"
|
||||||
|
sys.exit(1)
|
||||||
|
for line in output.splitlines():
|
||||||
|
if line.startswith("package:"):
|
||||||
|
pat = re.compile(".*name='([a-zA-Z0-9._]*)'.*")
|
||||||
|
thisinfo['id'] = re.match(pat, line).group(1)
|
||||||
|
pat = re.compile(".*versionCode='([0-9]*)'.*")
|
||||||
|
thisinfo['versioncode'] = int(re.match(pat, line).group(1))
|
||||||
|
pat = re.compile(".*versionName='([^']*)'.*")
|
||||||
|
thisinfo['version'] = re.match(pat, line).group(1)
|
||||||
|
if line.startswith("application:"):
|
||||||
|
pat = re.compile(".*label='([^']*)'.*")
|
||||||
|
thisinfo['name'] = re.match(pat, line).group(1)
|
||||||
|
pat = re.compile(".*icon='([^']*)'.*")
|
||||||
|
thisinfo['iconsrc'] = re.match(pat, line).group(1)
|
||||||
|
if line.startswith("sdkVersion:"):
|
||||||
|
pat = re.compile(".*'([0-9]*)'.*")
|
||||||
|
thisinfo['sdkversion'] = re.match(pat, line).group(1)
|
||||||
|
if line.startswith("native-code:"):
|
||||||
|
pat = re.compile(".*'([^']*)'.*")
|
||||||
|
thisinfo['nativecode'] = re.match(pat, line).group(1)
|
||||||
|
if line.startswith("uses-permission:"):
|
||||||
|
pat = re.compile(".*'([^']*)'.*")
|
||||||
|
perm = re.match(pat, line).group(1)
|
||||||
|
if perm.startswith("android.permission."):
|
||||||
|
perm = perm[19:]
|
||||||
|
thisinfo['permissions'].append(perm)
|
||||||
|
if line.startswith("uses-feature:"):
|
||||||
|
pat = re.compile(".*'([^']*)'.*")
|
||||||
|
perm = re.match(pat, line).group(1)
|
||||||
|
#Filter out this, it's only added with the latest SDK tools and
|
||||||
|
#causes problems for lots of apps.
|
||||||
|
if (perm != "android.hardware.screen.portrait" and
|
||||||
|
perm != "android.hardware.screen.landscape"):
|
||||||
|
if perm.startswith("android.feature."):
|
||||||
|
perm = perm[16:]
|
||||||
|
thisinfo['features'].append(perm)
|
||||||
|
|
||||||
addElement('description', repo_description, doc, repoel)
|
if not thisinfo.has_key('sdkversion'):
|
||||||
root.appendChild(repoel)
|
print " WARNING: no SDK version information found"
|
||||||
|
thisinfo['sdkversion'] = 0
|
||||||
|
|
||||||
apps_inrepo = 0
|
# Calculate the md5 and sha256...
|
||||||
apps_disabled = 0
|
m = hashlib.md5()
|
||||||
apps_nopkg = 0
|
sha = hashlib.sha256()
|
||||||
|
f = open(apkfile, 'rb')
|
||||||
|
while True:
|
||||||
|
t = f.read(1024)
|
||||||
|
if len(t) == 0:
|
||||||
|
break
|
||||||
|
m.update(t)
|
||||||
|
sha.update(t)
|
||||||
|
thisinfo['md5'] = m.hexdigest()
|
||||||
|
thisinfo['sha256'] = sha.hexdigest()
|
||||||
|
f.close()
|
||||||
|
|
||||||
for app in apps:
|
# Get the signature (or md5 of, to be precise)...
|
||||||
|
p = subprocess.Popen(['java', 'getsig',
|
||||||
|
os.path.join(os.getcwd(), apkfile)],
|
||||||
|
cwd=os.path.join(sys.path[0], 'getsig'),
|
||||||
|
stdout=subprocess.PIPE)
|
||||||
|
output = p.communicate()[0]
|
||||||
|
if options.verbose:
|
||||||
|
print output
|
||||||
|
if p.returncode != 0 or not output.startswith('Result:'):
|
||||||
|
print "ERROR: Failed to get apk signature"
|
||||||
|
sys.exit(1)
|
||||||
|
thisinfo['sig'] = output[7:].strip()
|
||||||
|
|
||||||
if app['Disabled'] is None:
|
# Extract the icon file...
|
||||||
|
apk = zipfile.ZipFile(apkfile, 'r')
|
||||||
|
thisinfo['icon'] = (thisinfo['id'] + '.' +
|
||||||
|
str(thisinfo['versioncode']) + '.png')
|
||||||
|
iconfilename = os.path.join(icon_dir, thisinfo['icon'])
|
||||||
|
try:
|
||||||
|
iconfile = open(iconfilename, 'wb')
|
||||||
|
iconfile.write(apk.read(thisinfo['iconsrc']))
|
||||||
|
iconfile.close()
|
||||||
|
except:
|
||||||
|
print "WARNING: Error retrieving icon file"
|
||||||
|
warnings += 1
|
||||||
|
apk.close()
|
||||||
|
|
||||||
# Get a list of the apks for this app...
|
apks.append(thisinfo)
|
||||||
gotcurrentver = False
|
|
||||||
apklist = []
|
# Some information from the apks needs to be applied up to the application
|
||||||
|
# level. When doing this, we use the info from the most recent version's apk.
|
||||||
|
for app in apps:
|
||||||
|
bestver = 0
|
||||||
for apk in apks:
|
for apk in apks:
|
||||||
if apk['id'] == app['id']:
|
if apk['id'] == app['id']:
|
||||||
if str(apk['versioncode']) == app['Current Version Code']:
|
if apk['versioncode'] > bestver:
|
||||||
gotcurrentver = True
|
bestver = apk['versioncode']
|
||||||
apklist.append(apk)
|
bestapk = apk
|
||||||
|
|
||||||
if len(apklist) == 0:
|
if bestver == 0:
|
||||||
apps_nopkg += 1
|
if app['Name'] is None:
|
||||||
|
app['Name'] = app['id']
|
||||||
|
app['icon'] = ''
|
||||||
|
if app['Disabled'] is None:
|
||||||
|
print "WARNING: Application " + app['id'] + " has no packages"
|
||||||
else:
|
else:
|
||||||
apps_inrepo += 1
|
if app['Name'] is None:
|
||||||
apel = doc.createElement("application")
|
app['Name'] = bestapk['name']
|
||||||
apel.setAttribute("id", app['id'])
|
app['icon'] = bestapk['icon']
|
||||||
root.appendChild(apel)
|
|
||||||
|
|
||||||
addElement('id', app['id'], doc, apel)
|
# Generate warnings for apk's with no metadata (or create skeleton
|
||||||
addElement('name', app['Name'], doc, apel)
|
# metadata files, if requested on the command line)
|
||||||
addElement('summary', app['Summary'], doc, apel)
|
for apk in apks:
|
||||||
addElement('icon', app['icon'], doc, apel)
|
found = False
|
||||||
addElement('description',
|
for app in apps:
|
||||||
common.parse_description(app['Description']), doc, apel)
|
if app['id'] == apk['id']:
|
||||||
addElement('license', app['License'], doc, apel)
|
found = True
|
||||||
if 'Category' in app:
|
break
|
||||||
addElement('category', app['Category'], doc, apel)
|
if not found:
|
||||||
addElement('web', app['Web Site'], doc, apel)
|
if options.createmeta:
|
||||||
addElement('source', app['Source Code'], doc, apel)
|
f = open(os.path.join('metadata', apk['id'] + '.txt'), 'w')
|
||||||
addElement('tracker', app['Issue Tracker'], doc, apel)
|
f.write("License:Unknown\n")
|
||||||
if app['Donate'] != None:
|
f.write("Web Site:\n")
|
||||||
addElement('donate', app['Donate'], doc, apel)
|
f.write("Source Code:\n")
|
||||||
|
f.write("Issue Tracker:\n")
|
||||||
# These elements actually refer to the current version (i.e. which
|
f.write("Summary:" + apk['name'] + "\n")
|
||||||
# one is recommended. They are historically mis-named, and need
|
f.write("Description:\n")
|
||||||
# changing, but stay like this for now to support existing clients.
|
f.write(apk['name'] + "\n")
|
||||||
addElement('marketversion', app['Current Version'], doc, apel)
|
f.write(".\n")
|
||||||
addElement('marketvercode', app['Current Version Code'], doc, apel)
|
f.close()
|
||||||
|
print "Generated skeleton metadata for " + apk['id']
|
||||||
if not (app['AntiFeatures'] is None):
|
|
||||||
addElement('antifeatures', app['AntiFeatures'], doc, apel)
|
|
||||||
if app['Requires Root']:
|
|
||||||
addElement('requirements', 'root', doc, apel)
|
|
||||||
|
|
||||||
# Sort the apk list into version order, just so the web site
|
|
||||||
# doesn't have to do any work by default...
|
|
||||||
apklist = sorted(apklist, key=lambda apk: apk['versioncode'], reverse=True)
|
|
||||||
|
|
||||||
# Check for duplicates - they will make the client unhappy...
|
|
||||||
for i in range(len(apklist) - 1):
|
|
||||||
if apklist[i]['versioncode'] == apklist[i+1]['versioncode']:
|
|
||||||
print "ERROR - duplicate versions"
|
|
||||||
print apklist[i]['apkname']
|
|
||||||
print apklist[i+1]['apkname']
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
for apk in apklist:
|
|
||||||
apkel = doc.createElement("package")
|
|
||||||
apel.appendChild(apkel)
|
|
||||||
addElement('version', apk['version'], doc, apkel)
|
|
||||||
addElement('versioncode', str(apk['versioncode']), doc, apkel)
|
|
||||||
addElement('apkname', apk['apkname'], doc, apkel)
|
|
||||||
if apk.has_key('srcname'):
|
|
||||||
addElement('srcname', apk['srcname'], doc, apkel)
|
|
||||||
for hash_type in ('sha256', 'md5'):
|
|
||||||
if not hash_type in apk:
|
|
||||||
continue
|
|
||||||
hashel = doc.createElement("hash")
|
|
||||||
hashel.setAttribute("type", hash_type)
|
|
||||||
hashel.appendChild(doc.createTextNode(apk[hash_type]))
|
|
||||||
apkel.appendChild(hashel)
|
|
||||||
addElement('sig', apk['sig'], doc, apkel)
|
|
||||||
addElement('size', str(apk['size']), doc, apkel)
|
|
||||||
addElement('sdkver', str(apk['sdkversion']), doc, apkel)
|
|
||||||
perms = ""
|
|
||||||
for p in apk['permissions']:
|
|
||||||
if len(perms) > 0:
|
|
||||||
perms += ","
|
|
||||||
perms += p
|
|
||||||
if len(perms) > 0:
|
|
||||||
addElement('permissions', perms, doc, apkel)
|
|
||||||
features = ""
|
|
||||||
for f in apk['features']:
|
|
||||||
if len(features) > 0:
|
|
||||||
features += ","
|
|
||||||
features += f
|
|
||||||
if len(features) > 0:
|
|
||||||
addElement('features', features, doc, apkel)
|
|
||||||
|
|
||||||
if options.buildreport:
|
|
||||||
if len(app['builds']) == 0:
|
|
||||||
print ("WARNING: No builds defined for " + app['id'] +
|
|
||||||
" Source: " + app['Source Code'])
|
|
||||||
warnings += 1
|
|
||||||
else:
|
else:
|
||||||
if app['Current Version Code'] != '0':
|
print "WARNING: " + apk['apkname'] + " (" + apk['id'] + ") has no metadata"
|
||||||
gotbuild = False
|
print " " + apk['name'] + " - " + apk['version']
|
||||||
for build in app['builds']:
|
|
||||||
if build['vercode'] == app['Current Version Code']:
|
|
||||||
gotbuild = True
|
|
||||||
if not gotbuild:
|
|
||||||
print ("WARNING: No build data for current version of "
|
|
||||||
+ app['id'] + " (" + app['Current Version']
|
|
||||||
+ ") " + app['Source Code'])
|
|
||||||
warnings += 1
|
|
||||||
|
|
||||||
# If we don't have the current version, check if there is a build
|
#Sort the app list by name, then the web site doesn't have to by default:
|
||||||
# with a commit ID starting with '!' - this means we can't build it
|
apps = sorted(apps, key=lambda app: app['Name'].upper())
|
||||||
# for some reason, and don't want hassling about it...
|
|
||||||
if not gotcurrentver and app['Current Version Code'] != '0':
|
|
||||||
for build in app['builds']:
|
|
||||||
if build['vercode'] == app['Current Version Code']:
|
|
||||||
gotcurrentver = True
|
|
||||||
|
|
||||||
# Output a message of harassment if we don't have the current version:
|
# Create the index
|
||||||
if not gotcurrentver and app['Current Version Code'] != '0':
|
doc = Document()
|
||||||
addr = app['Source Code']
|
|
||||||
print "WARNING: Don't have current version (" + app['Current Version'] + ") of " + app['Name']
|
|
||||||
print " (" + app['id'] + ") " + addr
|
|
||||||
warnings += 1
|
|
||||||
if options.verbose:
|
|
||||||
# A bit of extra debug info, basically for diagnosing
|
|
||||||
# app developer mistakes:
|
|
||||||
print " Current vercode:" + app['Current Version Code']
|
|
||||||
print " Got:"
|
|
||||||
for apk in apks:
|
|
||||||
if apk['id'] == app['id']:
|
|
||||||
print " " + str(apk['versioncode']) + " - " + apk['version']
|
|
||||||
if options.interactive:
|
|
||||||
print "Build data out of date for " + app['id']
|
|
||||||
while True:
|
|
||||||
answer = raw_input("[I]gnore, [E]dit or [Q]uit?").lower()
|
|
||||||
if answer == 'i':
|
|
||||||
break
|
|
||||||
elif answer == 'e':
|
|
||||||
subprocess.call([options.editor,
|
|
||||||
os.path.join('metadata',
|
|
||||||
app['id'] + '.txt')])
|
|
||||||
break
|
|
||||||
elif answer == 'q':
|
|
||||||
sys.exit(0)
|
|
||||||
else:
|
|
||||||
apps_disabled += 1
|
|
||||||
|
|
||||||
of = open(os.path.join('repo','index.xml'), 'wb')
|
def addElement(name, value, doc, parent):
|
||||||
if options.pretty:
|
el = doc.createElement(name)
|
||||||
output = doc.toprettyxml()
|
el.appendChild(doc.createTextNode(value))
|
||||||
else:
|
parent.appendChild(el)
|
||||||
output = doc.toxml()
|
|
||||||
of.write(output)
|
|
||||||
of.close()
|
|
||||||
|
|
||||||
if repo_keyalias != None:
|
root = doc.createElement("fdroid")
|
||||||
|
doc.appendChild(root)
|
||||||
|
|
||||||
if not options.quiet:
|
repoel = doc.createElement("repo")
|
||||||
print "Creating signed index."
|
repoel.setAttribute("name", repo_name)
|
||||||
print "Key fingerprint:", repo_pubkey_fingerprint
|
repoel.setAttribute("icon", os.path.basename(repo_icon))
|
||||||
|
repoel.setAttribute("url", repo_url)
|
||||||
|
|
||||||
#Create a jar of the index...
|
if repo_keyalias != None:
|
||||||
p = subprocess.Popen(['jar', 'cf', 'index.jar', 'index.xml'],
|
|
||||||
cwd='repo', stdout=subprocess.PIPE)
|
|
||||||
output = p.communicate()[0]
|
|
||||||
if options.verbose:
|
|
||||||
print output
|
|
||||||
if p.returncode != 0:
|
|
||||||
print "ERROR: Failed to create jar file"
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
# Sign the index...
|
# Generate a certificate fingerprint the same way keytool does it
|
||||||
p = subprocess.Popen(['jarsigner', '-keystore', keystore,
|
# (but with slightly different formatting)
|
||||||
'-storepass', keystorepass, '-keypass', keypass,
|
def cert_fingerprint(data):
|
||||||
os.path.join('repo', 'index.jar') , repo_keyalias], stdout=subprocess.PIPE)
|
digest = hashlib.sha1(data).digest()
|
||||||
output = p.communicate()[0]
|
ret = []
|
||||||
if p.returncode != 0:
|
for i in range(4):
|
||||||
print "Failed to sign index"
|
ret.append(":".join("%02X" % ord(b) for b in digest[i*5:i*5+5]))
|
||||||
print output
|
return " ".join(ret)
|
||||||
sys.exit(1)
|
|
||||||
if options.verbose:
|
|
||||||
print output
|
|
||||||
|
|
||||||
# Copy the repo icon into the repo directory...
|
def extract_pubkey():
|
||||||
iconfilename = os.path.join(icon_dir, os.path.basename(repo_icon))
|
p = subprocess.Popen(['keytool', '-exportcert',
|
||||||
shutil.copyfile(repo_icon, iconfilename)
|
'-alias', repo_keyalias,
|
||||||
|
'-keystore', keystore,
|
||||||
|
'-storepass', keystorepass],
|
||||||
|
stdout=subprocess.PIPE)
|
||||||
|
cert = p.communicate()[0]
|
||||||
|
if p.returncode != 0:
|
||||||
|
print "ERROR: Failed to get repo pubkey"
|
||||||
|
sys.exit(1)
|
||||||
|
global repo_pubkey_fingerprint
|
||||||
|
repo_pubkey_fingerprint = cert_fingerprint(cert)
|
||||||
|
return "".join("%02x" % ord(b) for b in cert)
|
||||||
|
|
||||||
# Write a category list in the repo to allow quick access...
|
repoel.setAttribute("pubkey", extract_pubkey())
|
||||||
catdata = ''
|
|
||||||
for cat in categories:
|
|
||||||
catdata += cat + '\n'
|
|
||||||
f = open('repo/categories.txt', 'w')
|
|
||||||
f.write(catdata)
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
# Update known apks info...
|
addElement('description', repo_description, doc, repoel)
|
||||||
knownapks = common.KnownApks()
|
root.appendChild(repoel)
|
||||||
for apk in apks:
|
|
||||||
knownapks.recordapk(apk['apkname'], apk['id'])
|
apps_inrepo = 0
|
||||||
knownapks.writeifchanged()
|
apps_disabled = 0
|
||||||
|
apps_nopkg = 0
|
||||||
|
|
||||||
# Generate latest apps data for widget
|
|
||||||
data = ''
|
|
||||||
for line in file(os.path.join('stats', 'latestapps.txt')):
|
|
||||||
appid = line.rstrip()
|
|
||||||
data += appid + "\t"
|
|
||||||
for app in apps:
|
for app in apps:
|
||||||
if app['id'] == appid:
|
|
||||||
data += app['Name'] + "\t"
|
if app['Disabled'] is None:
|
||||||
data += app['icon'] + "\t"
|
|
||||||
data += app['License'] + "\n"
|
# Get a list of the apks for this app...
|
||||||
break
|
gotcurrentver = False
|
||||||
f = open('repo/latestapps.dat', 'w')
|
apklist = []
|
||||||
f.write(data)
|
for apk in apks:
|
||||||
f.close()
|
if apk['id'] == app['id']:
|
||||||
|
if str(apk['versioncode']) == app['Current Version Code']:
|
||||||
|
gotcurrentver = True
|
||||||
|
apklist.append(apk)
|
||||||
|
|
||||||
|
if len(apklist) == 0:
|
||||||
|
apps_nopkg += 1
|
||||||
|
else:
|
||||||
|
apps_inrepo += 1
|
||||||
|
apel = doc.createElement("application")
|
||||||
|
apel.setAttribute("id", app['id'])
|
||||||
|
root.appendChild(apel)
|
||||||
|
|
||||||
|
addElement('id', app['id'], doc, apel)
|
||||||
|
addElement('name', app['Name'], doc, apel)
|
||||||
|
addElement('summary', app['Summary'], doc, apel)
|
||||||
|
addElement('icon', app['icon'], doc, apel)
|
||||||
|
addElement('description',
|
||||||
|
common.parse_description(app['Description']), doc, apel)
|
||||||
|
addElement('license', app['License'], doc, apel)
|
||||||
|
if 'Category' in app:
|
||||||
|
addElement('category', app['Category'], doc, apel)
|
||||||
|
addElement('web', app['Web Site'], doc, apel)
|
||||||
|
addElement('source', app['Source Code'], doc, apel)
|
||||||
|
addElement('tracker', app['Issue Tracker'], doc, apel)
|
||||||
|
if app['Donate'] != None:
|
||||||
|
addElement('donate', app['Donate'], doc, apel)
|
||||||
|
|
||||||
|
# These elements actually refer to the current version (i.e. which
|
||||||
|
# one is recommended. They are historically mis-named, and need
|
||||||
|
# changing, but stay like this for now to support existing clients.
|
||||||
|
addElement('marketversion', app['Current Version'], doc, apel)
|
||||||
|
addElement('marketvercode', app['Current Version Code'], doc, apel)
|
||||||
|
|
||||||
|
if not (app['AntiFeatures'] is None):
|
||||||
|
addElement('antifeatures', app['AntiFeatures'], doc, apel)
|
||||||
|
if app['Requires Root']:
|
||||||
|
addElement('requirements', 'root', doc, apel)
|
||||||
|
|
||||||
|
# Sort the apk list into version order, just so the web site
|
||||||
|
# doesn't have to do any work by default...
|
||||||
|
apklist = sorted(apklist, key=lambda apk: apk['versioncode'], reverse=True)
|
||||||
|
|
||||||
|
# Check for duplicates - they will make the client unhappy...
|
||||||
|
for i in range(len(apklist) - 1):
|
||||||
|
if apklist[i]['versioncode'] == apklist[i+1]['versioncode']:
|
||||||
|
print "ERROR - duplicate versions"
|
||||||
|
print apklist[i]['apkname']
|
||||||
|
print apklist[i+1]['apkname']
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
for apk in apklist:
|
||||||
|
apkel = doc.createElement("package")
|
||||||
|
apel.appendChild(apkel)
|
||||||
|
addElement('version', apk['version'], doc, apkel)
|
||||||
|
addElement('versioncode', str(apk['versioncode']), doc, apkel)
|
||||||
|
addElement('apkname', apk['apkname'], doc, apkel)
|
||||||
|
if apk.has_key('srcname'):
|
||||||
|
addElement('srcname', apk['srcname'], doc, apkel)
|
||||||
|
for hash_type in ('sha256', 'md5'):
|
||||||
|
if not hash_type in apk:
|
||||||
|
continue
|
||||||
|
hashel = doc.createElement("hash")
|
||||||
|
hashel.setAttribute("type", hash_type)
|
||||||
|
hashel.appendChild(doc.createTextNode(apk[hash_type]))
|
||||||
|
apkel.appendChild(hashel)
|
||||||
|
addElement('sig', apk['sig'], doc, apkel)
|
||||||
|
addElement('size', str(apk['size']), doc, apkel)
|
||||||
|
addElement('sdkver', str(apk['sdkversion']), doc, apkel)
|
||||||
|
perms = ""
|
||||||
|
for p in apk['permissions']:
|
||||||
|
if len(perms) > 0:
|
||||||
|
perms += ","
|
||||||
|
perms += p
|
||||||
|
if len(perms) > 0:
|
||||||
|
addElement('permissions', perms, doc, apkel)
|
||||||
|
features = ""
|
||||||
|
for f in apk['features']:
|
||||||
|
if len(features) > 0:
|
||||||
|
features += ","
|
||||||
|
features += f
|
||||||
|
if len(features) > 0:
|
||||||
|
addElement('features', features, doc, apkel)
|
||||||
|
|
||||||
|
if options.buildreport:
|
||||||
|
if len(app['builds']) == 0:
|
||||||
|
print ("WARNING: No builds defined for " + app['id'] +
|
||||||
|
" Source: " + app['Source Code'])
|
||||||
|
warnings += 1
|
||||||
|
else:
|
||||||
|
if app['Current Version Code'] != '0':
|
||||||
|
gotbuild = False
|
||||||
|
for build in app['builds']:
|
||||||
|
if build['vercode'] == app['Current Version Code']:
|
||||||
|
gotbuild = True
|
||||||
|
if not gotbuild:
|
||||||
|
print ("WARNING: No build data for current version of "
|
||||||
|
+ app['id'] + " (" + app['Current Version']
|
||||||
|
+ ") " + app['Source Code'])
|
||||||
|
warnings += 1
|
||||||
|
|
||||||
|
# If we don't have the current version, check if there is a build
|
||||||
|
# with a commit ID starting with '!' - this means we can't build it
|
||||||
|
# for some reason, and don't want hassling about it...
|
||||||
|
if not gotcurrentver and app['Current Version Code'] != '0':
|
||||||
|
for build in app['builds']:
|
||||||
|
if build['vercode'] == app['Current Version Code']:
|
||||||
|
gotcurrentver = True
|
||||||
|
|
||||||
|
# Output a message of harassment if we don't have the current version:
|
||||||
|
if not gotcurrentver and app['Current Version Code'] != '0':
|
||||||
|
addr = app['Source Code']
|
||||||
|
print "WARNING: Don't have current version (" + app['Current Version'] + ") of " + app['Name']
|
||||||
|
print " (" + app['id'] + ") " + addr
|
||||||
|
warnings += 1
|
||||||
|
if options.verbose:
|
||||||
|
# A bit of extra debug info, basically for diagnosing
|
||||||
|
# app developer mistakes:
|
||||||
|
print " Current vercode:" + app['Current Version Code']
|
||||||
|
print " Got:"
|
||||||
|
for apk in apks:
|
||||||
|
if apk['id'] == app['id']:
|
||||||
|
print " " + str(apk['versioncode']) + " - " + apk['version']
|
||||||
|
if options.interactive:
|
||||||
|
print "Build data out of date for " + app['id']
|
||||||
|
while True:
|
||||||
|
answer = raw_input("[I]gnore, [E]dit or [Q]uit?").lower()
|
||||||
|
if answer == 'i':
|
||||||
|
break
|
||||||
|
elif answer == 'e':
|
||||||
|
subprocess.call([options.editor,
|
||||||
|
os.path.join('metadata',
|
||||||
|
app['id'] + '.txt')])
|
||||||
|
break
|
||||||
|
elif answer == 'q':
|
||||||
|
sys.exit(0)
|
||||||
|
else:
|
||||||
|
apps_disabled += 1
|
||||||
|
|
||||||
|
of = open(os.path.join('repo','index.xml'), 'wb')
|
||||||
|
if options.pretty:
|
||||||
|
output = doc.toprettyxml()
|
||||||
|
else:
|
||||||
|
output = doc.toxml()
|
||||||
|
of.write(output)
|
||||||
|
of.close()
|
||||||
|
|
||||||
|
if repo_keyalias != None:
|
||||||
|
|
||||||
|
if not options.quiet:
|
||||||
|
print "Creating signed index."
|
||||||
|
print "Key fingerprint:", repo_pubkey_fingerprint
|
||||||
|
|
||||||
|
#Create a jar of the index...
|
||||||
|
p = subprocess.Popen(['jar', 'cf', 'index.jar', 'index.xml'],
|
||||||
|
cwd='repo', stdout=subprocess.PIPE)
|
||||||
|
output = p.communicate()[0]
|
||||||
|
if options.verbose:
|
||||||
|
print output
|
||||||
|
if p.returncode != 0:
|
||||||
|
print "ERROR: Failed to create jar file"
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Sign the index...
|
||||||
|
p = subprocess.Popen(['jarsigner', '-keystore', keystore,
|
||||||
|
'-storepass', keystorepass, '-keypass', keypass,
|
||||||
|
os.path.join('repo', 'index.jar') , repo_keyalias], stdout=subprocess.PIPE)
|
||||||
|
output = p.communicate()[0]
|
||||||
|
if p.returncode != 0:
|
||||||
|
print "Failed to sign index"
|
||||||
|
print output
|
||||||
|
sys.exit(1)
|
||||||
|
if options.verbose:
|
||||||
|
print output
|
||||||
|
|
||||||
|
# Copy the repo icon into the repo directory...
|
||||||
|
iconfilename = os.path.join(icon_dir, os.path.basename(repo_icon))
|
||||||
|
shutil.copyfile(repo_icon, iconfilename)
|
||||||
|
|
||||||
|
# Write a category list in the repo to allow quick access...
|
||||||
|
catdata = ''
|
||||||
|
for cat in categories:
|
||||||
|
catdata += cat + '\n'
|
||||||
|
f = open('repo/categories.txt', 'w')
|
||||||
|
f.write(catdata)
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
# Update known apks info...
|
||||||
|
knownapks = common.KnownApks()
|
||||||
|
for apk in apks:
|
||||||
|
knownapks.recordapk(apk['apkname'], apk['id'])
|
||||||
|
knownapks.writeifchanged()
|
||||||
|
|
||||||
|
# Generate latest apps data for widget
|
||||||
|
data = ''
|
||||||
|
for line in file(os.path.join('stats', 'latestapps.txt')):
|
||||||
|
appid = line.rstrip()
|
||||||
|
data += appid + "\t"
|
||||||
|
for app in apps:
|
||||||
|
if app['id'] == appid:
|
||||||
|
data += app['Name'] + "\t"
|
||||||
|
data += app['icon'] + "\t"
|
||||||
|
data += app['License'] + "\n"
|
||||||
|
break
|
||||||
|
f = open('repo/latestapps.dat', 'w')
|
||||||
|
f.write(data)
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
print "Finished."
|
print "Finished."
|
||||||
print str(apps_inrepo) + " apps in repo"
|
print str(apps_inrepo) + " apps in repo"
|
||||||
print str(apps_disabled) + " disabled"
|
print str(apps_disabled) + " disabled"
|
||||||
print str(apps_nopkg) + " with no packages"
|
print str(apps_nopkg) + " with no packages"
|
||||||
print str(warnings) + " warnings"
|
print str(warnings) + " warnings"
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
||||||
|
|
|
||||||
218
updatestats.py
218
updatestats.py
|
|
@ -30,121 +30,125 @@ import HTMLParser
|
||||||
import paramiko
|
import paramiko
|
||||||
import common
|
import common
|
||||||
|
|
||||||
#Read configuration...
|
def main():
|
||||||
execfile('config.py')
|
|
||||||
|
# Read configuration...
|
||||||
|
execfile('config.py')
|
||||||
|
|
||||||
|
# Parse command line...
|
||||||
|
parser = OptionParser()
|
||||||
|
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
||||||
|
help="Spew out even more information than normal")
|
||||||
|
parser.add_option("-d", "--download", action="store_true", default=False,
|
||||||
|
help="Download logs we don't have")
|
||||||
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
# Parse command line...
|
statsdir = 'stats'
|
||||||
parser = OptionParser()
|
logsdir = os.path.join(statsdir, 'logs')
|
||||||
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
logsarchivedir = os.path.join(logsdir, 'archive')
|
||||||
help="Spew out even more information than normal")
|
datadir = os.path.join(statsdir, 'data')
|
||||||
parser.add_option("-d", "--download", action="store_true", default=False,
|
if not os.path.exists(statsdir):
|
||||||
help="Download logs we don't have")
|
os.mkdir(statsdir)
|
||||||
(options, args) = parser.parse_args()
|
if not os.path.exists(logsdir):
|
||||||
|
os.mkdir(logsdir)
|
||||||
|
if not os.path.exists(datadir):
|
||||||
|
os.mkdir(datadir)
|
||||||
|
|
||||||
|
if options.download:
|
||||||
|
# Get any access logs we don't have...
|
||||||
|
ssh = None
|
||||||
|
ftp = None
|
||||||
|
try:
|
||||||
|
print 'Retrieving logs'
|
||||||
|
ssh = paramiko.SSHClient()
|
||||||
|
ssh.load_system_host_keys()
|
||||||
|
ssh.connect('f-droid.org', username='fdroid', timeout=10,
|
||||||
|
key_filename=webserver_keyfile)
|
||||||
|
ftp = ssh.open_sftp()
|
||||||
|
ftp.get_channel().settimeout(15)
|
||||||
|
print "...connected"
|
||||||
|
|
||||||
statsdir = 'stats'
|
ftp.chdir('logs')
|
||||||
logsdir = os.path.join(statsdir, 'logs')
|
files = ftp.listdir()
|
||||||
logsarchivedir = os.path.join(logsdir, 'archive')
|
for f in files:
|
||||||
datadir = os.path.join(statsdir, 'data')
|
if f.startswith('access-') and f.endswith('.log'):
|
||||||
if not os.path.exists(statsdir):
|
|
||||||
os.mkdir(statsdir)
|
|
||||||
if not os.path.exists(logsdir):
|
|
||||||
os.mkdir(logsdir)
|
|
||||||
if not os.path.exists(datadir):
|
|
||||||
os.mkdir(datadir)
|
|
||||||
|
|
||||||
if options.download:
|
destpath = os.path.join(logsdir, f)
|
||||||
# Get any access logs we don't have...
|
archivepath = os.path.join(logsarchivedir, f + '.gz')
|
||||||
ssh = None
|
if os.path.exists(archivepath):
|
||||||
ftp = None
|
if os.path.exists(destpath):
|
||||||
try:
|
# Just in case we have it archived but failed to remove
|
||||||
print 'Retrieving logs'
|
# the original...
|
||||||
ssh = paramiko.SSHClient()
|
os.remove(destpath)
|
||||||
ssh.load_system_host_keys()
|
|
||||||
ssh.connect('f-droid.org', username='fdroid', timeout=10,
|
|
||||||
key_filename=webserver_keyfile)
|
|
||||||
ftp = ssh.open_sftp()
|
|
||||||
ftp.get_channel().settimeout(15)
|
|
||||||
print "...connected"
|
|
||||||
|
|
||||||
ftp.chdir('logs')
|
|
||||||
files = ftp.listdir()
|
|
||||||
for f in files:
|
|
||||||
if f.startswith('access-') and f.endswith('.log'):
|
|
||||||
|
|
||||||
destpath = os.path.join(logsdir, f)
|
|
||||||
archivepath = os.path.join(logsarchivedir, f + '.gz')
|
|
||||||
if os.path.exists(archivepath):
|
|
||||||
if os.path.exists(destpath):
|
|
||||||
# Just in case we have it archived but failed to remove
|
|
||||||
# the original...
|
|
||||||
os.remove(destpath)
|
|
||||||
else:
|
|
||||||
destsize = ftp.stat(f).st_size
|
|
||||||
if (not os.path.exists(destpath) or
|
|
||||||
os.path.getsize(destpath) != destsize):
|
|
||||||
print "...retrieving " + f
|
|
||||||
ftp.get(f, destpath)
|
|
||||||
except Exception as e:
|
|
||||||
traceback.print_exc()
|
|
||||||
sys.exit(1)
|
|
||||||
finally:
|
|
||||||
#Disconnect
|
|
||||||
if ftp != None:
|
|
||||||
ftp.close()
|
|
||||||
if ssh != None:
|
|
||||||
ssh.close()
|
|
||||||
|
|
||||||
# Process logs
|
|
||||||
logexpr = '(?P<ip>[.:0-9a-fA-F]+) - - \[(?P<time>.*?)\] "GET (?P<uri>.*?) HTTP/1.\d" (?P<statuscode>\d+) \d+ "(?P<referral>.*?)" "(?P<useragent>.*?)"'
|
|
||||||
logsearch = re.compile(logexpr).search
|
|
||||||
apps = {}
|
|
||||||
unknownapks = []
|
|
||||||
knownapks = common.KnownApks()
|
|
||||||
for logfile in glob.glob(os.path.join(logsdir,'access-*.log')):
|
|
||||||
logdate = logfile[len(logsdir) + 1 + len('access-'):-4]
|
|
||||||
matches = (logsearch(line) for line in file(logfile))
|
|
||||||
for match in matches:
|
|
||||||
if match and match.group('statuscode') == '200':
|
|
||||||
uri = match.group('uri')
|
|
||||||
if uri.endswith('.apk'):
|
|
||||||
_, apkname = os.path.split(uri)
|
|
||||||
app = knownapks.getapp(apkname)
|
|
||||||
if app:
|
|
||||||
appid, _ = app
|
|
||||||
if appid in apps:
|
|
||||||
apps[appid] += 1
|
|
||||||
else:
|
else:
|
||||||
apps[appid] = 1
|
destsize = ftp.stat(f).st_size
|
||||||
else:
|
if (not os.path.exists(destpath) or
|
||||||
if not apkname in unknownapks:
|
os.path.getsize(destpath) != destsize):
|
||||||
unknownapks.append(apkname)
|
print "...retrieving " + f
|
||||||
|
ftp.get(f, destpath)
|
||||||
|
except Exception as e:
|
||||||
|
traceback.print_exc()
|
||||||
|
sys.exit(1)
|
||||||
|
finally:
|
||||||
|
#Disconnect
|
||||||
|
if ftp != None:
|
||||||
|
ftp.close()
|
||||||
|
if ssh != None:
|
||||||
|
ssh.close()
|
||||||
|
|
||||||
# Calculate and write stats for total downloads...
|
# Process logs
|
||||||
f = open('stats/total_downloads_app.txt', 'w')
|
logexpr = '(?P<ip>[.:0-9a-fA-F]+) - - \[(?P<time>.*?)\] "GET (?P<uri>.*?) HTTP/1.\d" (?P<statuscode>\d+) \d+ "(?P<referral>.*?)" "(?P<useragent>.*?)"'
|
||||||
lst = []
|
logsearch = re.compile(logexpr).search
|
||||||
alldownloads = 0
|
apps = {}
|
||||||
for app, count in apps.iteritems():
|
unknownapks = []
|
||||||
lst.append(app + " " + str(count))
|
knownapks = common.KnownApks()
|
||||||
alldownloads += count
|
for logfile in glob.glob(os.path.join(logsdir,'access-*.log')):
|
||||||
lst.append("ALL " + str(alldownloads))
|
logdate = logfile[len(logsdir) + 1 + len('access-'):-4]
|
||||||
f.write('# Total downloads by application, since October 2011\n')
|
matches = (logsearch(line) for line in file(logfile))
|
||||||
for line in sorted(lst):
|
for match in matches:
|
||||||
f.write(line + '\n')
|
if match and match.group('statuscode') == '200':
|
||||||
f.close()
|
uri = match.group('uri')
|
||||||
|
if uri.endswith('.apk'):
|
||||||
|
_, apkname = os.path.split(uri)
|
||||||
|
app = knownapks.getapp(apkname)
|
||||||
|
if app:
|
||||||
|
appid, _ = app
|
||||||
|
if appid in apps:
|
||||||
|
apps[appid] += 1
|
||||||
|
else:
|
||||||
|
apps[appid] = 1
|
||||||
|
else:
|
||||||
|
if not apkname in unknownapks:
|
||||||
|
unknownapks.append(apkname)
|
||||||
|
|
||||||
# Write list of latest apps added to the repo...
|
# Calculate and write stats for total downloads...
|
||||||
latest = knownapks.getlatest(10)
|
f = open('stats/total_downloads_app.txt', 'w')
|
||||||
f = open('stats/latestapps.txt', 'w')
|
lst = []
|
||||||
for app in latest:
|
alldownloads = 0
|
||||||
f.write(app + '\n')
|
for app, count in apps.iteritems():
|
||||||
f.close()
|
lst.append(app + " " + str(count))
|
||||||
|
alldownloads += count
|
||||||
|
lst.append("ALL " + str(alldownloads))
|
||||||
|
f.write('# Total downloads by application, since October 2011\n')
|
||||||
|
for line in sorted(lst):
|
||||||
|
f.write(line + '\n')
|
||||||
|
f.close()
|
||||||
|
|
||||||
if len(unknownapks) > 0:
|
# Write list of latest apps added to the repo...
|
||||||
print '\nUnknown apks:'
|
latest = knownapks.getlatest(10)
|
||||||
for apk in unknownapks:
|
f = open('stats/latestapps.txt', 'w')
|
||||||
print apk
|
for app in latest:
|
||||||
|
f.write(app + '\n')
|
||||||
|
f.close()
|
||||||
|
|
||||||
print "Finished."
|
if len(unknownapks) > 0:
|
||||||
|
print '\nUnknown apks:'
|
||||||
|
for apk in unknownapks:
|
||||||
|
print apk
|
||||||
|
|
||||||
|
print "Finished."
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue