mirror of
https://github.com/f-droid/fdroidserver.git
synced 2025-09-16 16:02:33 +03:00
Merge branch 'py3' into 'master'
Python 3 I tried to keep commits separate, so if anything causes trouble, it can be reverted or changed easily. * pre-commit hooks pass * all tests pass * My use of `build`, `checkupdates`, `lint`, `import`, `publish` and `update` work as usual * 2to3 does not report anything useful anymore (only useless parentheses and list() encapsulation of iterators) * rewritemeta works exactly as usual CC @eighthave See merge request !88
This commit is contained in:
commit
f267a1d7c9
38 changed files with 339 additions and 383 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -15,4 +15,4 @@ pylint.parseable
|
||||||
docs/html/
|
docs/html/
|
||||||
|
|
||||||
# files generated by tests
|
# files generated by tests
|
||||||
tests/getsig/tmp/
|
tmp/
|
||||||
|
|
|
@ -3,6 +3,7 @@ image: mvdan/fdroid-ci:latest
|
||||||
test:
|
test:
|
||||||
script:
|
script:
|
||||||
- apt-get update
|
- apt-get update
|
||||||
- apt-get install -y python-dev gcc
|
- apt-get install -y python3-dev gcc
|
||||||
|
- pip3 install -e .
|
||||||
- cd tests
|
- cd tests
|
||||||
- ./complete-ci-tests
|
- ./complete-ci-tests
|
||||||
|
|
11
README.md
11
README.md
|
@ -23,8 +23,7 @@ install, and keep track of updates on your device.
|
||||||
|
|
||||||
### Installing
|
### Installing
|
||||||
|
|
||||||
Note that only Python 2 is supported. We recommend version 2.7.7 or
|
Note that only Python 3 is supported. We recommend version 3.4 or later.
|
||||||
later.
|
|
||||||
|
|
||||||
The easiest way to install the `fdroidserver` tools is on Ubuntu, Mint or other
|
The easiest way to install the `fdroidserver` tools is on Ubuntu, Mint or other
|
||||||
Ubuntu based distributions, you can install using:
|
Ubuntu based distributions, you can install using:
|
||||||
|
@ -56,7 +55,7 @@ or Cygwin, you can use it:
|
||||||
|
|
||||||
Python's `pip` also works:
|
Python's `pip` also works:
|
||||||
|
|
||||||
sudo pip install fdroidserver
|
sudo pip3 install fdroidserver
|
||||||
|
|
||||||
The combination of `virtualenv` and `pip` is great for testing out the
|
The combination of `virtualenv` and `pip` is great for testing out the
|
||||||
latest versions of `fdroidserver`. Using `pip`, `fdroidserver` can
|
latest versions of `fdroidserver`. Using `pip`, `fdroidserver` can
|
||||||
|
@ -67,7 +66,7 @@ via other mechanisms like Brew/dnf/pacman/emerge/Fink/MacPorts.
|
||||||
|
|
||||||
For Debian based distributions:
|
For Debian based distributions:
|
||||||
|
|
||||||
apt-get install python-dev python-pip python-virtualenv
|
apt-get install python3-dev python3-pip virtualenv
|
||||||
|
|
||||||
Then here's how to install:
|
Then here's how to install:
|
||||||
|
|
||||||
|
@ -75,5 +74,5 @@ Then here's how to install:
|
||||||
cd fdroidserver
|
cd fdroidserver
|
||||||
virtualenv env/
|
virtualenv env/
|
||||||
source env/bin/activate
|
source env/bin/activate
|
||||||
pip install -e .
|
pip3 install -e .
|
||||||
python2 setup.py install
|
python3 setup.py install
|
||||||
|
|
|
@ -83,9 +83,7 @@ intended usage. At the very least, you'll need:
|
||||||
@item
|
@item
|
||||||
GNU/Linux
|
GNU/Linux
|
||||||
@item
|
@item
|
||||||
Python 2.x
|
Python 3.4 or later
|
||||||
To be sure of being able to process all apk files without error, you need
|
|
||||||
2.7.7 or later. See @code{http://bugs.python.org/issue14315}.
|
|
||||||
@item
|
@item
|
||||||
The Android SDK Tools and Build-tools.
|
The Android SDK Tools and Build-tools.
|
||||||
Note that F-Droid does not assume that you have the Android SDK in your
|
Note that F-Droid does not assume that you have the Android SDK in your
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# Copy this file to config.py, then amend the settings below according to
|
# Copy this file to config.py, then amend the settings below according to
|
||||||
# your system configuration.
|
# your system configuration.
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
#
|
#
|
||||||
# You may want to alter these before running ./makebuildserver
|
# You may want to alter these before running ./makebuildserver
|
||||||
|
|
||||||
|
|
3
fdroid
3
fdroid
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
#
|
||||||
# fdroid.py - part of the FDroid server tools
|
# fdroid.py - part of the FDroid server tools
|
||||||
# Copyright (C) 2010-2015, Ciaran Gultnieks, ciaran@ciarang.com
|
# Copyright (C) 2010-2015, Ciaran Gultnieks, ciaran@ciarang.com
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
#
|
||||||
# build.py - part of the FDroid server tools
|
# build.py - part of the FDroid server tools
|
||||||
# Copyright (C) 2010-2014, Ciaran Gultnieks, ciaran@ciarang.com
|
# Copyright (C) 2010-2014, Ciaran Gultnieks, ciaran@ciarang.com
|
||||||
|
@ -28,15 +27,15 @@ import tarfile
|
||||||
import traceback
|
import traceback
|
||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
from ConfigParser import ConfigParser
|
from configparser import ConfigParser
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import common
|
from . import common
|
||||||
import net
|
from . import net
|
||||||
import metadata
|
from . import metadata
|
||||||
import scanner
|
from . import scanner
|
||||||
from common import FDroidException, BuildException, VCSException, FDroidPopen, SdkToolsPopen
|
from .common import FDroidException, BuildException, VCSException, FDroidPopen, SdkToolsPopen
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import paramiko
|
import paramiko
|
||||||
|
@ -463,7 +462,7 @@ def build_local(app, build, vcs, build_dir, output_dir, srclib_dir, extlib_dir,
|
||||||
if not ndk_path:
|
if not ndk_path:
|
||||||
logging.critical("Android NDK version '%s' could not be found!" % build.ndk or 'r10e')
|
logging.critical("Android NDK version '%s' could not be found!" % build.ndk or 'r10e')
|
||||||
logging.critical("Configured versions:")
|
logging.critical("Configured versions:")
|
||||||
for k, v in config['ndk_paths'].iteritems():
|
for k, v in config['ndk_paths'].items():
|
||||||
if k.endswith("_orig"):
|
if k.endswith("_orig"):
|
||||||
continue
|
continue
|
||||||
logging.critical(" %s: %s" % (k, v))
|
logging.critical(" %s: %s" % (k, v))
|
||||||
|
@ -1071,7 +1070,7 @@ def main():
|
||||||
raise FDroidException("No apps to process.")
|
raise FDroidException("No apps to process.")
|
||||||
|
|
||||||
if options.latest:
|
if options.latest:
|
||||||
for app in apps.itervalues():
|
for app in apps.values():
|
||||||
for build in reversed(app.builds):
|
for build in reversed(app.builds):
|
||||||
if build.disable and not options.force:
|
if build.disable and not options.force:
|
||||||
continue
|
continue
|
||||||
|
@ -1087,7 +1086,7 @@ def main():
|
||||||
# Build applications...
|
# Build applications...
|
||||||
failed_apps = {}
|
failed_apps = {}
|
||||||
build_succeeded = []
|
build_succeeded = []
|
||||||
for appid, app in apps.iteritems():
|
for appid, app in apps.items():
|
||||||
|
|
||||||
first = True
|
first = True
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
#
|
||||||
# checkupdates.py - part of the FDroid server tools
|
# checkupdates.py - part of the FDroid server tools
|
||||||
# Copyright (C) 2010-2015, Ciaran Gultnieks, ciaran@ciarang.com
|
# Copyright (C) 2010-2015, Ciaran Gultnieks, ciaran@ciarang.com
|
||||||
|
@ -21,20 +20,21 @@
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import urllib2
|
import urllib.request
|
||||||
|
import urllib.error
|
||||||
import time
|
import time
|
||||||
import subprocess
|
import subprocess
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import traceback
|
import traceback
|
||||||
import HTMLParser
|
from html.parser import HTMLParser
|
||||||
from distutils.version import LooseVersion
|
from distutils.version import LooseVersion
|
||||||
import logging
|
import logging
|
||||||
import copy
|
import copy
|
||||||
|
|
||||||
import common
|
from . import common
|
||||||
import metadata
|
from . import metadata
|
||||||
from common import VCSException, FDroidException
|
from .common import VCSException, FDroidException
|
||||||
from metadata import MetaDataException
|
from .metadata import MetaDataException
|
||||||
|
|
||||||
|
|
||||||
# Check for a new version by looking at a document retrieved via HTTP.
|
# Check for a new version by looking at a document retrieved via HTTP.
|
||||||
|
@ -52,8 +52,8 @@ def check_http(app):
|
||||||
vercode = "99999999"
|
vercode = "99999999"
|
||||||
if len(urlcode) > 0:
|
if len(urlcode) > 0:
|
||||||
logging.debug("...requesting {0}".format(urlcode))
|
logging.debug("...requesting {0}".format(urlcode))
|
||||||
req = urllib2.Request(urlcode, None)
|
req = urllib.request.Request(urlcode, None)
|
||||||
resp = urllib2.urlopen(req, None, 20)
|
resp = urllib.request.urlopen(req, None, 20)
|
||||||
page = resp.read()
|
page = resp.read()
|
||||||
|
|
||||||
m = re.search(codeex, page)
|
m = re.search(codeex, page)
|
||||||
|
@ -65,8 +65,8 @@ def check_http(app):
|
||||||
if len(urlver) > 0:
|
if len(urlver) > 0:
|
||||||
if urlver != '.':
|
if urlver != '.':
|
||||||
logging.debug("...requesting {0}".format(urlver))
|
logging.debug("...requesting {0}".format(urlver))
|
||||||
req = urllib2.Request(urlver, None)
|
req = urllib.request.Request(urlver, None)
|
||||||
resp = urllib2.urlopen(req, None, 20)
|
resp = urllib.request.urlopen(req, None, 20)
|
||||||
page = resp.read()
|
page = resp.read()
|
||||||
|
|
||||||
m = re.search(verex, page)
|
m = re.search(verex, page)
|
||||||
|
@ -280,11 +280,11 @@ def check_gplay(app):
|
||||||
time.sleep(15)
|
time.sleep(15)
|
||||||
url = 'https://play.google.com/store/apps/details?id=' + app.id
|
url = 'https://play.google.com/store/apps/details?id=' + app.id
|
||||||
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:18.0) Gecko/20100101 Firefox/18.0'}
|
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:18.0) Gecko/20100101 Firefox/18.0'}
|
||||||
req = urllib2.Request(url, None, headers)
|
req = urllib.request.Request(url, None, headers)
|
||||||
try:
|
try:
|
||||||
resp = urllib2.urlopen(req, None, 20)
|
resp = urllib.request.urlopen(req, None, 20)
|
||||||
page = resp.read()
|
page = resp.read()
|
||||||
except urllib2.HTTPError as e:
|
except urllib.error.HTTPError as e:
|
||||||
return (None, str(e.code))
|
return (None, str(e.code))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return (None, 'Failed:' + str(e))
|
return (None, 'Failed:' + str(e))
|
||||||
|
@ -293,7 +293,7 @@ def check_gplay(app):
|
||||||
|
|
||||||
m = re.search('itemprop="softwareVersion">[ ]*([^<]+)[ ]*</div>', page)
|
m = re.search('itemprop="softwareVersion">[ ]*([^<]+)[ ]*</div>', page)
|
||||||
if m:
|
if m:
|
||||||
html_parser = HTMLParser.HTMLParser()
|
html_parser = HTMLParser()
|
||||||
version = html_parser.unescape(m.group(1))
|
version = html_parser.unescape(m.group(1))
|
||||||
|
|
||||||
if version == 'Varies with device':
|
if version == 'Varies with device':
|
||||||
|
@ -559,7 +559,7 @@ def main():
|
||||||
.format(common.getappname(app), version))
|
.format(common.getappname(app), version))
|
||||||
return
|
return
|
||||||
|
|
||||||
for appid, app in apps.iteritems():
|
for appid, app in apps.items():
|
||||||
|
|
||||||
if options.autoonly and app.AutoUpdateMode in ('None', 'Static'):
|
if options.autoonly and app.AutoUpdateMode in ('None', 'Static'):
|
||||||
logging.debug("Nothing to do for {0}...".format(appid))
|
logging.debug("Nothing to do for {0}...".format(appid))
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# -*- coding: utf-8 -*-
|
#!/usr/bin/env python3
|
||||||
#
|
#
|
||||||
# common.py - part of the FDroid server tools
|
# common.py - part of the FDroid server tools
|
||||||
# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com
|
# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com
|
||||||
|
@ -20,6 +20,7 @@
|
||||||
# common.py is imported by all modules, so do not import third-party
|
# common.py is imported by all modules, so do not import third-party
|
||||||
# libraries here as they will become a requirement for all commands.
|
# libraries here as they will become a requirement for all commands.
|
||||||
|
|
||||||
|
import io
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import re
|
import re
|
||||||
|
@ -32,19 +33,15 @@ import operator
|
||||||
import logging
|
import logging
|
||||||
import hashlib
|
import hashlib
|
||||||
import socket
|
import socket
|
||||||
|
import base64
|
||||||
import xml.etree.ElementTree as XMLElementTree
|
import xml.etree.ElementTree as XMLElementTree
|
||||||
|
|
||||||
try:
|
from queue import Queue
|
||||||
# Python 2
|
|
||||||
from Queue import Queue
|
|
||||||
except ImportError:
|
|
||||||
# Python 3
|
|
||||||
from queue import Queue
|
|
||||||
|
|
||||||
from zipfile import ZipFile
|
from zipfile import ZipFile
|
||||||
|
|
||||||
import metadata
|
import fdroidserver.metadata
|
||||||
from fdroidserver.asynchronousfilereader import AsynchronousFileReader
|
from .asynchronousfilereader import AsynchronousFileReader
|
||||||
|
|
||||||
|
|
||||||
XMLElementTree.register_namespace('android', 'http://schemas.android.com/apk/res/android')
|
XMLElementTree.register_namespace('android', 'http://schemas.android.com/apk/res/android')
|
||||||
|
@ -206,7 +203,9 @@ def read_config(opts, config_file='config.py'):
|
||||||
config = {}
|
config = {}
|
||||||
|
|
||||||
logging.debug("Reading %s" % config_file)
|
logging.debug("Reading %s" % config_file)
|
||||||
execfile(config_file, config)
|
with io.open(config_file, "rb") as f:
|
||||||
|
code = compile(f.read(), config_file, 'exec')
|
||||||
|
exec(code, None, config)
|
||||||
|
|
||||||
# smartcardoptions must be a list since its command line args for Popen
|
# smartcardoptions must be a list since its command line args for Popen
|
||||||
if 'smartcardoptions' in config:
|
if 'smartcardoptions' in config:
|
||||||
|
@ -244,9 +243,9 @@ def read_config(opts, config_file='config.py'):
|
||||||
config[k] = clean_description(config[k])
|
config[k] = clean_description(config[k])
|
||||||
|
|
||||||
if 'serverwebroot' in config:
|
if 'serverwebroot' in config:
|
||||||
if isinstance(config['serverwebroot'], basestring):
|
if isinstance(config['serverwebroot'], str):
|
||||||
roots = [config['serverwebroot']]
|
roots = [config['serverwebroot']]
|
||||||
elif all(isinstance(item, basestring) for item in config['serverwebroot']):
|
elif all(isinstance(item, str) for item in config['serverwebroot']):
|
||||||
roots = config['serverwebroot']
|
roots = config['serverwebroot']
|
||||||
else:
|
else:
|
||||||
raise TypeError('only accepts strings, lists, and tuples')
|
raise TypeError('only accepts strings, lists, and tuples')
|
||||||
|
@ -339,9 +338,9 @@ def write_password_file(pwtype, password=None):
|
||||||
filename = '.fdroid.' + pwtype + '.txt'
|
filename = '.fdroid.' + pwtype + '.txt'
|
||||||
fd = os.open(filename, os.O_CREAT | os.O_TRUNC | os.O_WRONLY, 0o600)
|
fd = os.open(filename, os.O_CREAT | os.O_TRUNC | os.O_WRONLY, 0o600)
|
||||||
if password is None:
|
if password is None:
|
||||||
os.write(fd, config[pwtype])
|
os.write(fd, config[pwtype].encode('utf-8'))
|
||||||
else:
|
else:
|
||||||
os.write(fd, password)
|
os.write(fd, password.encode('utf-8'))
|
||||||
os.close(fd)
|
os.close(fd)
|
||||||
config[pwtype + 'file'] = filename
|
config[pwtype + 'file'] = filename
|
||||||
|
|
||||||
|
@ -378,7 +377,7 @@ def read_app_args(args, allapps, allow_vercodes=False):
|
||||||
return allapps
|
return allapps
|
||||||
|
|
||||||
apps = {}
|
apps = {}
|
||||||
for appid, app in allapps.iteritems():
|
for appid, app in allapps.items():
|
||||||
if appid in vercodes:
|
if appid in vercodes:
|
||||||
apps[appid] = app
|
apps[appid] = app
|
||||||
|
|
||||||
|
@ -391,7 +390,7 @@ def read_app_args(args, allapps, allow_vercodes=False):
|
||||||
raise FDroidException("No packages specified")
|
raise FDroidException("No packages specified")
|
||||||
|
|
||||||
error = False
|
error = False
|
||||||
for appid, app in apps.iteritems():
|
for appid, app in apps.items():
|
||||||
vc = vercodes[appid]
|
vc = vercodes[appid]
|
||||||
if not vc:
|
if not vc:
|
||||||
continue
|
continue
|
||||||
|
@ -486,9 +485,9 @@ def getvcs(vcstype, remote, local):
|
||||||
|
|
||||||
|
|
||||||
def getsrclibvcs(name):
|
def getsrclibvcs(name):
|
||||||
if name not in metadata.srclibs:
|
if name not in fdroidserver.metadata.srclibs:
|
||||||
raise VCSException("Missing srclib " + name)
|
raise VCSException("Missing srclib " + name)
|
||||||
return metadata.srclibs[name]['Repo Type']
|
return fdroidserver.metadata.srclibs[name]['Repo Type']
|
||||||
|
|
||||||
|
|
||||||
class vcs:
|
class vcs:
|
||||||
|
@ -532,6 +531,7 @@ class vcs:
|
||||||
# automatically if either of those things changes.
|
# automatically if either of those things changes.
|
||||||
fdpath = os.path.join(self.local, '..',
|
fdpath = os.path.join(self.local, '..',
|
||||||
'.fdroidvcs-' + os.path.basename(self.local))
|
'.fdroidvcs-' + os.path.basename(self.local))
|
||||||
|
fdpath = os.path.normpath(fdpath)
|
||||||
cdata = self.repotype() + ' ' + self.remote
|
cdata = self.repotype() + ' ' + self.remote
|
||||||
writeback = True
|
writeback = True
|
||||||
deleterepo = False
|
deleterepo = False
|
||||||
|
@ -563,7 +563,8 @@ class vcs:
|
||||||
|
|
||||||
# If necessary, write the .fdroidvcs file.
|
# If necessary, write the .fdroidvcs file.
|
||||||
if writeback and not self.clone_failed:
|
if writeback and not self.clone_failed:
|
||||||
with open(fdpath, 'w') as f:
|
os.makedirs(os.path.dirname(fdpath), exist_ok=True)
|
||||||
|
with open(fdpath, 'w+') as f:
|
||||||
f.write(cdata)
|
f.write(cdata)
|
||||||
|
|
||||||
if exc is not None:
|
if exc is not None:
|
||||||
|
@ -947,7 +948,7 @@ def retrieve_string(app_dir, string, xmlfiles=None):
|
||||||
if element.text is None:
|
if element.text is None:
|
||||||
return ""
|
return ""
|
||||||
s = XMLElementTree.tostring(element, encoding='utf-8', method='text')
|
s = XMLElementTree.tostring(element, encoding='utf-8', method='text')
|
||||||
return s.strip()
|
return s.decode('utf-8').strip()
|
||||||
|
|
||||||
for path in xmlfiles:
|
for path in xmlfiles:
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
|
@ -995,7 +996,7 @@ def fetch_real_name(app_dir, flavours):
|
||||||
continue
|
continue
|
||||||
if "{http://schemas.android.com/apk/res/android}label" not in app.attrib:
|
if "{http://schemas.android.com/apk/res/android}label" not in app.attrib:
|
||||||
continue
|
continue
|
||||||
label = app.attrib["{http://schemas.android.com/apk/res/android}label"].encode('utf-8')
|
label = app.attrib["{http://schemas.android.com/apk/res/android}label"]
|
||||||
result = retrieve_string_singleline(app_dir, label)
|
result = retrieve_string_singleline(app_dir, label)
|
||||||
if result:
|
if result:
|
||||||
result = result.strip()
|
result = result.strip()
|
||||||
|
@ -1008,15 +1009,16 @@ def get_library_references(root_dir):
|
||||||
proppath = os.path.join(root_dir, 'project.properties')
|
proppath = os.path.join(root_dir, 'project.properties')
|
||||||
if not os.path.isfile(proppath):
|
if not os.path.isfile(proppath):
|
||||||
return libraries
|
return libraries
|
||||||
for line in file(proppath):
|
with open(proppath, 'r') as f:
|
||||||
if not line.startswith('android.library.reference.'):
|
for line in f:
|
||||||
continue
|
if not line.startswith('android.library.reference.'):
|
||||||
path = line.split('=')[1].strip()
|
continue
|
||||||
relpath = os.path.join(root_dir, path)
|
path = line.split('=')[1].strip()
|
||||||
if not os.path.isdir(relpath):
|
relpath = os.path.join(root_dir, path)
|
||||||
continue
|
if not os.path.isdir(relpath):
|
||||||
logging.debug("Found subproject at %s" % path)
|
continue
|
||||||
libraries.append(path)
|
logging.debug("Found subproject at %s" % path)
|
||||||
|
libraries.append(path)
|
||||||
return libraries
|
return libraries
|
||||||
|
|
||||||
|
|
||||||
|
@ -1082,38 +1084,39 @@ def parse_androidmanifests(paths, app):
|
||||||
package = None
|
package = None
|
||||||
|
|
||||||
if gradle:
|
if gradle:
|
||||||
for line in file(path):
|
with open(path, 'r') as f:
|
||||||
if gradle_comment.match(line):
|
for line in f:
|
||||||
continue
|
if gradle_comment.match(line):
|
||||||
# Grab first occurence of each to avoid running into
|
continue
|
||||||
# alternative flavours and builds.
|
# Grab first occurence of each to avoid running into
|
||||||
if not package:
|
# alternative flavours and builds.
|
||||||
matches = psearch_g(line)
|
if not package:
|
||||||
if matches:
|
matches = psearch_g(line)
|
||||||
s = matches.group(2)
|
if matches:
|
||||||
if app_matches_packagename(app, s):
|
s = matches.group(2)
|
||||||
package = s
|
if app_matches_packagename(app, s):
|
||||||
if not version:
|
package = s
|
||||||
matches = vnsearch_g(line)
|
if not version:
|
||||||
if matches:
|
matches = vnsearch_g(line)
|
||||||
version = matches.group(2)
|
if matches:
|
||||||
if not vercode:
|
version = matches.group(2)
|
||||||
matches = vcsearch_g(line)
|
if not vercode:
|
||||||
if matches:
|
matches = vcsearch_g(line)
|
||||||
vercode = matches.group(1)
|
if matches:
|
||||||
|
vercode = matches.group(1)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
xml = parse_xml(path)
|
xml = parse_xml(path)
|
||||||
if "package" in xml.attrib:
|
if "package" in xml.attrib:
|
||||||
s = xml.attrib["package"].encode('utf-8')
|
s = xml.attrib["package"]
|
||||||
if app_matches_packagename(app, s):
|
if app_matches_packagename(app, s):
|
||||||
package = s
|
package = s
|
||||||
if "{http://schemas.android.com/apk/res/android}versionName" in xml.attrib:
|
if "{http://schemas.android.com/apk/res/android}versionName" in xml.attrib:
|
||||||
version = xml.attrib["{http://schemas.android.com/apk/res/android}versionName"].encode('utf-8')
|
version = xml.attrib["{http://schemas.android.com/apk/res/android}versionName"]
|
||||||
base_dir = os.path.dirname(path)
|
base_dir = os.path.dirname(path)
|
||||||
version = retrieve_string_singleline(base_dir, version)
|
version = retrieve_string_singleline(base_dir, version)
|
||||||
if "{http://schemas.android.com/apk/res/android}versionCode" in xml.attrib:
|
if "{http://schemas.android.com/apk/res/android}versionCode" in xml.attrib:
|
||||||
a = xml.attrib["{http://schemas.android.com/apk/res/android}versionCode"].encode('utf-8')
|
a = xml.attrib["{http://schemas.android.com/apk/res/android}versionCode"]
|
||||||
if string_is_integer(a):
|
if string_is_integer(a):
|
||||||
vercode = a
|
vercode = a
|
||||||
except Exception:
|
except Exception:
|
||||||
|
@ -1209,10 +1212,10 @@ def getsrclib(spec, srclib_dir, subdir=None, basepath=False,
|
||||||
if '/' in name:
|
if '/' in name:
|
||||||
name, subdir = name.split('/', 1)
|
name, subdir = name.split('/', 1)
|
||||||
|
|
||||||
if name not in metadata.srclibs:
|
if name not in fdroidserver.metadata.srclibs:
|
||||||
raise VCSException('srclib ' + name + ' not found.')
|
raise VCSException('srclib ' + name + ' not found.')
|
||||||
|
|
||||||
srclib = metadata.srclibs[name]
|
srclib = fdroidserver.metadata.srclibs[name]
|
||||||
|
|
||||||
sdir = os.path.join(srclib_dir, name)
|
sdir = os.path.join(srclib_dir, name)
|
||||||
|
|
||||||
|
@ -1510,7 +1513,7 @@ def getpaths_map(build_dir, globpaths):
|
||||||
def getpaths(build_dir, globpaths):
|
def getpaths(build_dir, globpaths):
|
||||||
paths_map = getpaths_map(build_dir, globpaths)
|
paths_map = getpaths_map(build_dir, globpaths)
|
||||||
paths = set()
|
paths = set()
|
||||||
for k, v in paths_map.iteritems():
|
for k, v in paths_map.items():
|
||||||
for p in v:
|
for p in v:
|
||||||
paths.add(p)
|
paths.add(p)
|
||||||
return paths
|
return paths
|
||||||
|
@ -1526,12 +1529,13 @@ class KnownApks:
|
||||||
self.path = os.path.join('stats', 'known_apks.txt')
|
self.path = os.path.join('stats', 'known_apks.txt')
|
||||||
self.apks = {}
|
self.apks = {}
|
||||||
if os.path.isfile(self.path):
|
if os.path.isfile(self.path):
|
||||||
for line in file(self.path):
|
with open(self.path, 'r') as f:
|
||||||
t = line.rstrip().split(' ')
|
for line in f:
|
||||||
if len(t) == 2:
|
t = line.rstrip().split(' ')
|
||||||
self.apks[t[0]] = (t[1], None)
|
if len(t) == 2:
|
||||||
else:
|
self.apks[t[0]] = (t[1], None)
|
||||||
self.apks[t[0]] = (t[1], time.strptime(t[2], '%Y-%m-%d'))
|
else:
|
||||||
|
self.apks[t[0]] = (t[1], time.strptime(t[2], '%Y-%m-%d'))
|
||||||
self.changed = False
|
self.changed = False
|
||||||
|
|
||||||
def writeifchanged(self):
|
def writeifchanged(self):
|
||||||
|
@ -1542,7 +1546,7 @@ class KnownApks:
|
||||||
os.mkdir('stats')
|
os.mkdir('stats')
|
||||||
|
|
||||||
lst = []
|
lst = []
|
||||||
for apk, app in self.apks.iteritems():
|
for apk, app in self.apks.items():
|
||||||
appid, added = app
|
appid, added = app
|
||||||
line = apk + ' ' + appid
|
line = apk + ' ' + appid
|
||||||
if added:
|
if added:
|
||||||
|
@ -1573,7 +1577,7 @@ class KnownApks:
|
||||||
# with the most recent first.
|
# with the most recent first.
|
||||||
def getlatest(self, num):
|
def getlatest(self, num):
|
||||||
apps = {}
|
apps = {}
|
||||||
for apk, app in self.apks.iteritems():
|
for apk, app in self.apks.items():
|
||||||
appid, added = app
|
appid, added = app
|
||||||
if added:
|
if added:
|
||||||
if appid in apps:
|
if appid in apps:
|
||||||
|
@ -1581,7 +1585,7 @@ class KnownApks:
|
||||||
apps[appid] = added
|
apps[appid] = added
|
||||||
else:
|
else:
|
||||||
apps[appid] = added
|
apps[appid] = added
|
||||||
sortedapps = sorted(apps.iteritems(), key=operator.itemgetter(1))[-num:]
|
sortedapps = sorted(apps.items(), key=operator.itemgetter(1))[-num:]
|
||||||
lst = [app for app, _ in sortedapps]
|
lst = [app for app, _ in sortedapps]
|
||||||
lst.reverse()
|
lst.reverse()
|
||||||
return lst
|
return lst
|
||||||
|
@ -1604,8 +1608,9 @@ def isApkDebuggable(apkfile, config):
|
||||||
|
|
||||||
|
|
||||||
class PopenResult:
|
class PopenResult:
|
||||||
returncode = None
|
def __init__(self):
|
||||||
output = ''
|
self.returncode = None
|
||||||
|
self.output = None
|
||||||
|
|
||||||
|
|
||||||
def SdkToolsPopen(commands, cwd=None, output=True):
|
def SdkToolsPopen(commands, cwd=None, output=True):
|
||||||
|
@ -1620,9 +1625,9 @@ def SdkToolsPopen(commands, cwd=None, output=True):
|
||||||
cwd=cwd, output=output)
|
cwd=cwd, output=output)
|
||||||
|
|
||||||
|
|
||||||
def FDroidPopen(commands, cwd=None, output=True, stderr_to_stdout=True):
|
def FDroidPopenBytes(commands, cwd=None, output=True, stderr_to_stdout=True):
|
||||||
"""
|
"""
|
||||||
Run a command and capture the possibly huge output.
|
Run a command and capture the possibly huge output as bytes.
|
||||||
|
|
||||||
:param commands: command and argument list like in subprocess.Popen
|
:param commands: command and argument list like in subprocess.Popen
|
||||||
:param cwd: optionally specifies a working directory
|
:param cwd: optionally specifies a working directory
|
||||||
|
@ -1653,13 +1658,14 @@ def FDroidPopen(commands, cwd=None, output=True, stderr_to_stdout=True):
|
||||||
while not stderr_reader.eof():
|
while not stderr_reader.eof():
|
||||||
while not stderr_queue.empty():
|
while not stderr_queue.empty():
|
||||||
line = stderr_queue.get()
|
line = stderr_queue.get()
|
||||||
sys.stderr.write(line)
|
sys.stderr.buffer.write(line)
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
|
|
||||||
stdout_queue = Queue()
|
stdout_queue = Queue()
|
||||||
stdout_reader = AsynchronousFileReader(p.stdout, stdout_queue)
|
stdout_reader = AsynchronousFileReader(p.stdout, stdout_queue)
|
||||||
|
buf = io.BytesIO()
|
||||||
|
|
||||||
# Check the queue for output (until there is no more to get)
|
# Check the queue for output (until there is no more to get)
|
||||||
while not stdout_reader.eof():
|
while not stdout_reader.eof():
|
||||||
|
@ -1667,13 +1673,28 @@ def FDroidPopen(commands, cwd=None, output=True, stderr_to_stdout=True):
|
||||||
line = stdout_queue.get()
|
line = stdout_queue.get()
|
||||||
if output and options.verbose:
|
if output and options.verbose:
|
||||||
# Output directly to console
|
# Output directly to console
|
||||||
sys.stderr.write(line)
|
sys.stderr.buffer.write(line)
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
result.output += line
|
buf.write(line)
|
||||||
|
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
|
|
||||||
result.returncode = p.wait()
|
result.returncode = p.wait()
|
||||||
|
result.output = buf.getvalue()
|
||||||
|
buf.close()
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def FDroidPopen(commands, cwd=None, output=True, stderr_to_stdout=True):
|
||||||
|
"""
|
||||||
|
Run a command and capture the possibly huge output as a str.
|
||||||
|
|
||||||
|
:param commands: command and argument list like in subprocess.Popen
|
||||||
|
:param cwd: optionally specifies a working directory
|
||||||
|
:returns: A PopenResult.
|
||||||
|
"""
|
||||||
|
result = FDroidPopenBytes(commands, cwd, output, stderr_to_stdout)
|
||||||
|
result.output = result.output.decode('utf-8')
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@ -1919,8 +1940,9 @@ def genpassword():
|
||||||
'''generate a random password for when generating keys'''
|
'''generate a random password for when generating keys'''
|
||||||
h = hashlib.sha256()
|
h = hashlib.sha256()
|
||||||
h.update(os.urandom(16)) # salt
|
h.update(os.urandom(16)) # salt
|
||||||
h.update(bytes(socket.getfqdn()))
|
h.update(socket.getfqdn().encode('utf-8'))
|
||||||
return h.digest().encode('base64').strip()
|
passwd = base64.b64encode(h.digest()).strip()
|
||||||
|
return passwd.decode('utf-8')
|
||||||
|
|
||||||
|
|
||||||
def genkeystore(localconfig):
|
def genkeystore(localconfig):
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
#
|
||||||
# gpgsign.py - part of the FDroid server tools
|
# gpgsign.py - part of the FDroid server tools
|
||||||
# Copyright (C) 2014, Ciaran Gultnieks, ciaran@ciarang.com
|
# Copyright (C) 2014, Ciaran Gultnieks, ciaran@ciarang.com
|
||||||
|
@ -23,8 +22,8 @@ import glob
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import common
|
from . import common
|
||||||
from common import FDroidPopen
|
from .common import FDroidPopen
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
options = None
|
options = None
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
#
|
||||||
# import.py - part of the FDroid server tools
|
# import.py - part of the FDroid server tools
|
||||||
# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com
|
# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com
|
||||||
|
@ -21,12 +20,13 @@
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import urllib
|
import urllib.request
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
from ConfigParser import ConfigParser
|
from configparser import ConfigParser
|
||||||
import logging
|
import logging
|
||||||
import common
|
|
||||||
import metadata
|
from . import common
|
||||||
|
from . import metadata
|
||||||
|
|
||||||
|
|
||||||
# Get the repo type and address from the given web page. The page is scanned
|
# Get the repo type and address from the given web page. The page is scanned
|
||||||
|
@ -35,7 +35,7 @@ import metadata
|
||||||
# Returns repotype, address, or None, reason
|
# Returns repotype, address, or None, reason
|
||||||
def getrepofrompage(url):
|
def getrepofrompage(url):
|
||||||
|
|
||||||
req = urllib.urlopen(url)
|
req = urllib.request.urlopen(url)
|
||||||
if req.getcode() != 200:
|
if req.getcode() != 200:
|
||||||
return (None, 'Unable to get ' + url + ' - return code ' + str(req.getcode()))
|
return (None, 'Unable to get ' + url + ' - return code ' + str(req.getcode()))
|
||||||
page = req.read()
|
page = req.read()
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
#
|
||||||
# update.py - part of the FDroid server tools
|
# update.py - part of the FDroid server tools
|
||||||
# Copyright (C) 2010-2013, Ciaran Gultnieks, ciaran@ciarang.com
|
# Copyright (C) 2010-2013, Ciaran Gultnieks, ciaran@ciarang.com
|
||||||
|
@ -28,7 +27,7 @@ import sys
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import common
|
from . import common
|
||||||
|
|
||||||
config = {}
|
config = {}
|
||||||
options = None
|
options = None
|
||||||
|
@ -103,8 +102,8 @@ def main():
|
||||||
default_sdk_path = '/opt/android-sdk'
|
default_sdk_path = '/opt/android-sdk'
|
||||||
while not options.no_prompt:
|
while not options.no_prompt:
|
||||||
try:
|
try:
|
||||||
s = raw_input('Enter the path to the Android SDK ('
|
s = input('Enter the path to the Android SDK ('
|
||||||
+ default_sdk_path + ') here:\n> ')
|
+ default_sdk_path + ') here:\n> ')
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print('')
|
print('')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
#
|
||||||
# install.py - part of the FDroid server tools
|
# install.py - part of the FDroid server tools
|
||||||
# Copyright (C) 2013, Ciaran Gultnieks, ciaran@ciarang.com
|
# Copyright (C) 2013, Ciaran Gultnieks, ciaran@ciarang.com
|
||||||
|
@ -24,8 +23,8 @@ import glob
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import common
|
from . import common
|
||||||
from common import SdkToolsPopen, FDroidException
|
from .common import SdkToolsPopen, FDroidException
|
||||||
|
|
||||||
options = None
|
options = None
|
||||||
config = None
|
config = None
|
||||||
|
@ -82,7 +81,7 @@ def main():
|
||||||
continue
|
continue
|
||||||
apks[appid] = apkfile
|
apks[appid] = apkfile
|
||||||
|
|
||||||
for appid, apk in apks.iteritems():
|
for appid, apk in apks.items():
|
||||||
if not apk:
|
if not apk:
|
||||||
raise FDroidException("No signed apk available for %s" % appid)
|
raise FDroidException("No signed apk available for %s" % appid)
|
||||||
|
|
||||||
|
@ -91,7 +90,7 @@ def main():
|
||||||
apks = {common.apknameinfo(apkfile)[0]: apkfile for apkfile in
|
apks = {common.apknameinfo(apkfile)[0]: apkfile for apkfile in
|
||||||
sorted(glob.glob(os.path.join(output_dir, '*.apk')))}
|
sorted(glob.glob(os.path.join(output_dir, '*.apk')))}
|
||||||
|
|
||||||
for appid, apk in apks.iteritems():
|
for appid, apk in apks.items():
|
||||||
# Get device list each time to avoid device not found errors
|
# Get device list each time to avoid device not found errors
|
||||||
devs = devices()
|
devs = devices()
|
||||||
if not devs:
|
if not devs:
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
#
|
||||||
# lint.py - part of the FDroid server tool
|
# lint.py - part of the FDroid server tool
|
||||||
# Copyright (C) 2013-2014 Daniel Martí <mvdan@mvdan.cc>
|
# Copyright (C) 2013-2014 Daniel Martí <mvdan@mvdan.cc>
|
||||||
|
@ -20,11 +19,10 @@
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from sets import Set
|
|
||||||
|
|
||||||
import common
|
from . import common
|
||||||
import metadata
|
from . import metadata
|
||||||
import rewritemeta
|
from . import rewritemeta
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
options = None
|
options = None
|
||||||
|
@ -118,7 +116,7 @@ regex_checks = {
|
||||||
|
|
||||||
|
|
||||||
def check_regexes(app):
|
def check_regexes(app):
|
||||||
for f, checks in regex_checks.iteritems():
|
for f, checks in regex_checks.items():
|
||||||
for m, r in checks:
|
for m, r in checks:
|
||||||
v = app.get_field(f)
|
v = app.get_field(f)
|
||||||
t = metadata.fieldtype(f)
|
t = metadata.fieldtype(f)
|
||||||
|
@ -205,7 +203,7 @@ def check_empty_fields(app):
|
||||||
if not app.Categories:
|
if not app.Categories:
|
||||||
yield "Categories are not set"
|
yield "Categories are not set"
|
||||||
|
|
||||||
all_categories = Set([
|
all_categories = set([
|
||||||
"Connectivity",
|
"Connectivity",
|
||||||
"Development",
|
"Development",
|
||||||
"Games",
|
"Games",
|
||||||
|
@ -333,7 +331,7 @@ def main():
|
||||||
allapps = metadata.read_metadata(xref=True)
|
allapps = metadata.read_metadata(xref=True)
|
||||||
apps = common.read_app_args(options.appid, allapps, False)
|
apps = common.read_app_args(options.appid, allapps, False)
|
||||||
|
|
||||||
for appid, app in apps.iteritems():
|
for appid, app in apps.items():
|
||||||
if app.Disabled:
|
if app.Disabled:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# -*- coding: utf-8 -*-
|
#!/usr/bin/env python3
|
||||||
#
|
#
|
||||||
# metadata.py - part of the FDroid server tools
|
# metadata.py - part of the FDroid server tools
|
||||||
# Copyright (C) 2013, Ciaran Gultnieks, ciaran@ciarang.com
|
# Copyright (C) 2013, Ciaran Gultnieks, ciaran@ciarang.com
|
||||||
|
@ -23,11 +23,7 @@ import re
|
||||||
import glob
|
import glob
|
||||||
import cgi
|
import cgi
|
||||||
import textwrap
|
import textwrap
|
||||||
|
import io
|
||||||
try:
|
|
||||||
from cStringIO import StringIO
|
|
||||||
except:
|
|
||||||
from StringIO import StringIO
|
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
# use libyaml if it is available
|
# use libyaml if it is available
|
||||||
|
@ -41,7 +37,7 @@ except ImportError:
|
||||||
# use the C implementation when available
|
# use the C implementation when available
|
||||||
import xml.etree.cElementTree as ElementTree
|
import xml.etree.cElementTree as ElementTree
|
||||||
|
|
||||||
import common
|
import fdroidserver.common
|
||||||
|
|
||||||
srclibs = None
|
srclibs = None
|
||||||
|
|
||||||
|
@ -162,11 +158,11 @@ class App():
|
||||||
# names. Should only be used for tests.
|
# names. Should only be used for tests.
|
||||||
def field_dict(self):
|
def field_dict(self):
|
||||||
d = {}
|
d = {}
|
||||||
for k, v in self.__dict__.iteritems():
|
for k, v in self.__dict__.items():
|
||||||
if k == 'builds':
|
if k == 'builds':
|
||||||
d['builds'] = []
|
d['builds'] = []
|
||||||
for build in v:
|
for build in v:
|
||||||
b = {k: v for k, v in build.__dict__.iteritems() if not k.startswith('_')}
|
b = {k: v for k, v in build.__dict__.items() if not k.startswith('_')}
|
||||||
d['builds'].append(b)
|
d['builds'].append(b)
|
||||||
elif not k.startswith('_'):
|
elif not k.startswith('_'):
|
||||||
f = App.attr_to_field(k)
|
f = App.attr_to_field(k)
|
||||||
|
@ -200,7 +196,7 @@ class App():
|
||||||
|
|
||||||
# Like dict.update(), but using human-readable field names
|
# Like dict.update(), but using human-readable field names
|
||||||
def update_fields(self, d):
|
def update_fields(self, d):
|
||||||
for f, v in d.iteritems():
|
for f, v in d.items():
|
||||||
if f == 'builds':
|
if f == 'builds':
|
||||||
for b in v:
|
for b in v:
|
||||||
build = Build()
|
build = Build()
|
||||||
|
@ -352,13 +348,13 @@ class Build():
|
||||||
version = self.ndk
|
version = self.ndk
|
||||||
if not version:
|
if not version:
|
||||||
version = 'r10e' # falls back to latest
|
version = 'r10e' # falls back to latest
|
||||||
paths = common.config['ndk_paths']
|
paths = fdroidserver.common.config['ndk_paths']
|
||||||
if version not in paths:
|
if version not in paths:
|
||||||
return ''
|
return ''
|
||||||
return paths[version]
|
return paths[version]
|
||||||
|
|
||||||
def update_flags(self, d):
|
def update_flags(self, d):
|
||||||
for f, v in d.iteritems():
|
for f, v in d.items():
|
||||||
self.set_flag(f, v)
|
self.set_flag(f, v)
|
||||||
|
|
||||||
flagtypes = {
|
flagtypes = {
|
||||||
|
@ -513,8 +509,8 @@ class DescriptionFormatter:
|
||||||
self.laststate = self.stNONE
|
self.laststate = self.stNONE
|
||||||
self.text_html = ''
|
self.text_html = ''
|
||||||
self.text_txt = ''
|
self.text_txt = ''
|
||||||
self.html = StringIO()
|
self.html = io.StringIO()
|
||||||
self.text = StringIO()
|
self.text = io.StringIO()
|
||||||
self.para_lines = []
|
self.para_lines = []
|
||||||
self.linkResolver = None
|
self.linkResolver = None
|
||||||
self.linkResolver = linkres
|
self.linkResolver = linkres
|
||||||
|
@ -534,10 +530,10 @@ class DescriptionFormatter:
|
||||||
self.state = self.stNONE
|
self.state = self.stNONE
|
||||||
whole_para = ' '.join(self.para_lines)
|
whole_para = ' '.join(self.para_lines)
|
||||||
self.addtext(whole_para)
|
self.addtext(whole_para)
|
||||||
wrapped = textwrap.fill(whole_para.decode('utf-8'), 80,
|
wrapped = textwrap.fill(whole_para, 80,
|
||||||
break_long_words=False,
|
break_long_words=False,
|
||||||
break_on_hyphens=False)
|
break_on_hyphens=False)
|
||||||
self.text.write(wrapped.encode('utf-8'))
|
self.text.write(wrapped)
|
||||||
self.html.write('</p>')
|
self.html.write('</p>')
|
||||||
del self.para_lines[:]
|
del self.para_lines[:]
|
||||||
|
|
||||||
|
@ -709,7 +705,7 @@ def parse_srclib(metadatapath):
|
||||||
if not os.path.exists(metadatapath):
|
if not os.path.exists(metadatapath):
|
||||||
return thisinfo
|
return thisinfo
|
||||||
|
|
||||||
metafile = open(metadatapath, "r")
|
metafile = open(metadatapath, "r", encoding='utf-8')
|
||||||
|
|
||||||
n = 0
|
n = 0
|
||||||
for line in metafile:
|
for line in metafile:
|
||||||
|
@ -797,7 +793,7 @@ def read_metadata(xref=True):
|
||||||
return ("fdroid.app:" + appid, "Dummy name - don't know yet")
|
return ("fdroid.app:" + appid, "Dummy name - don't know yet")
|
||||||
raise MetaDataException("Cannot resolve app id " + appid)
|
raise MetaDataException("Cannot resolve app id " + appid)
|
||||||
|
|
||||||
for appid, app in apps.iteritems():
|
for appid, app in apps.items():
|
||||||
try:
|
try:
|
||||||
description_html(app.Description, linkres)
|
description_html(app.Description, linkres)
|
||||||
except MetaDataException as e:
|
except MetaDataException as e:
|
||||||
|
@ -826,7 +822,7 @@ def get_default_app_info(metadatapath=None):
|
||||||
if metadatapath is None:
|
if metadatapath is None:
|
||||||
appid = None
|
appid = None
|
||||||
else:
|
else:
|
||||||
appid, _ = common.get_extension(os.path.basename(metadatapath))
|
appid, _ = fdroidserver.common.get_extension(os.path.basename(metadatapath))
|
||||||
|
|
||||||
app = App()
|
app = App()
|
||||||
app.metadatapath = metadatapath
|
app.metadatapath = metadatapath
|
||||||
|
@ -846,17 +842,14 @@ esc_newlines = re.compile(r'\\( |\n)')
|
||||||
# This function uses __dict__ to be faster
|
# This function uses __dict__ to be faster
|
||||||
def post_metadata_parse(app):
|
def post_metadata_parse(app):
|
||||||
|
|
||||||
for k, v in app.__dict__.iteritems():
|
for k in app._modified:
|
||||||
if k not in app._modified:
|
v = app.__dict__[k]
|
||||||
continue
|
|
||||||
if type(v) in (float, int):
|
if type(v) in (float, int):
|
||||||
app.__dict__[k] = str(v)
|
app.__dict__[k] = str(v)
|
||||||
|
|
||||||
for build in app.builds:
|
for build in app.builds:
|
||||||
for k, v in build.__dict__.iteritems():
|
for k in build._modified:
|
||||||
|
v = build.__dict__[k]
|
||||||
if k not in build._modified:
|
|
||||||
continue
|
|
||||||
if type(v) in (float, int):
|
if type(v) in (float, int):
|
||||||
build.__dict__[k] = str(v)
|
build.__dict__[k] = str(v)
|
||||||
continue
|
continue
|
||||||
|
@ -866,7 +859,7 @@ def post_metadata_parse(app):
|
||||||
build.__dict__[k] = re.sub(esc_newlines, '', v).lstrip().rstrip()
|
build.__dict__[k] = re.sub(esc_newlines, '', v).lstrip().rstrip()
|
||||||
elif ftype == TYPE_BOOL:
|
elif ftype == TYPE_BOOL:
|
||||||
# TODO handle this using <xsd:element type="xsd:boolean> in a schema
|
# TODO handle this using <xsd:element type="xsd:boolean> in a schema
|
||||||
if isinstance(v, basestring):
|
if isinstance(v, str):
|
||||||
build.__dict__[k] = _decode_bool(v)
|
build.__dict__[k] = _decode_bool(v)
|
||||||
elif ftype == TYPE_STRING:
|
elif ftype == TYPE_STRING:
|
||||||
if isinstance(v, bool) and v:
|
if isinstance(v, bool) and v:
|
||||||
|
@ -904,36 +897,6 @@ def post_metadata_parse(app):
|
||||||
#
|
#
|
||||||
|
|
||||||
|
|
||||||
def _decode_list(data):
|
|
||||||
'''convert items in a list from unicode to basestring'''
|
|
||||||
rv = []
|
|
||||||
for item in data:
|
|
||||||
if isinstance(item, unicode):
|
|
||||||
item = item.encode('utf-8')
|
|
||||||
elif isinstance(item, list):
|
|
||||||
item = _decode_list(item)
|
|
||||||
elif isinstance(item, dict):
|
|
||||||
item = _decode_dict(item)
|
|
||||||
rv.append(item)
|
|
||||||
return rv
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_dict(data):
|
|
||||||
'''convert items in a dict from unicode to basestring'''
|
|
||||||
rv = {}
|
|
||||||
for k, v in data.iteritems():
|
|
||||||
if isinstance(k, unicode):
|
|
||||||
k = k.encode('utf-8')
|
|
||||||
if isinstance(v, unicode):
|
|
||||||
v = v.encode('utf-8')
|
|
||||||
elif isinstance(v, list):
|
|
||||||
v = _decode_list(v)
|
|
||||||
elif isinstance(v, dict):
|
|
||||||
v = _decode_dict(v)
|
|
||||||
rv[k] = v
|
|
||||||
return rv
|
|
||||||
|
|
||||||
|
|
||||||
bool_true = re.compile(r'([Yy]es|[Tt]rue)')
|
bool_true = re.compile(r'([Yy]es|[Tt]rue)')
|
||||||
bool_false = re.compile(r'([Nn]o|[Ff]alse)')
|
bool_false = re.compile(r'([Nn]o|[Ff]alse)')
|
||||||
|
|
||||||
|
@ -947,17 +910,17 @@ def _decode_bool(s):
|
||||||
|
|
||||||
|
|
||||||
def parse_metadata(metadatapath):
|
def parse_metadata(metadatapath):
|
||||||
_, ext = common.get_extension(metadatapath)
|
_, ext = fdroidserver.common.get_extension(metadatapath)
|
||||||
accepted = common.config['accepted_formats']
|
accepted = fdroidserver.common.config['accepted_formats']
|
||||||
if ext not in accepted:
|
if ext not in accepted:
|
||||||
raise MetaDataException('"%s" is not an accepted format, convert to: %s' % (
|
raise MetaDataException('"%s" is not an accepted format, convert to: %s' % (
|
||||||
metadatapath, ', '.join(accepted)))
|
metadatapath, ', '.join(accepted)))
|
||||||
|
|
||||||
app = App()
|
app = App()
|
||||||
app.metadatapath = metadatapath
|
app.metadatapath = metadatapath
|
||||||
app.id, _ = common.get_extension(os.path.basename(metadatapath))
|
app.id, _ = fdroidserver.common.get_extension(os.path.basename(metadatapath))
|
||||||
|
|
||||||
with open(metadatapath, 'r') as mf:
|
with open(metadatapath, 'r', encoding='utf-8') as mf:
|
||||||
if ext == 'txt':
|
if ext == 'txt':
|
||||||
parse_txt_metadata(mf, app)
|
parse_txt_metadata(mf, app)
|
||||||
elif ext == 'json':
|
elif ext == 'json':
|
||||||
|
@ -975,11 +938,9 @@ def parse_metadata(metadatapath):
|
||||||
|
|
||||||
def parse_json_metadata(mf, app):
|
def parse_json_metadata(mf, app):
|
||||||
|
|
||||||
# fdroid metadata is only strings and booleans, no floats or ints. And
|
# fdroid metadata is only strings and booleans, no floats or ints.
|
||||||
# json returns unicode, and fdroidserver still uses plain python strings
|
|
||||||
# TODO create schema using https://pypi.python.org/pypi/jsonschema
|
# TODO create schema using https://pypi.python.org/pypi/jsonschema
|
||||||
jsoninfo = json.load(mf, object_hook=_decode_dict,
|
jsoninfo = json.load(mf, parse_int=lambda s: s,
|
||||||
parse_int=lambda s: s,
|
|
||||||
parse_float=lambda s: s)
|
parse_float=lambda s: s)
|
||||||
app.update_fields(jsoninfo)
|
app.update_fields(jsoninfo)
|
||||||
for f in ['Description', 'Maintainer Notes']:
|
for f in ['Description', 'Maintainer Notes']:
|
||||||
|
@ -1253,7 +1214,7 @@ def write_plaintext_metadata(mf, app, w_comment, w_field, w_build):
|
||||||
w_field_always('Binaries')
|
w_field_always('Binaries')
|
||||||
mf.write('\n')
|
mf.write('\n')
|
||||||
|
|
||||||
for build in sorted_builds(app.builds):
|
for build in app.builds:
|
||||||
|
|
||||||
if build.version == "Ignore":
|
if build.version == "Ignore":
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# -*- coding: utf-8 -*-
|
#!/usr/bin/env python3
|
||||||
#
|
#
|
||||||
# net.py - part of the FDroid server tools
|
# net.py - part of the FDroid server tools
|
||||||
# Copyright (C) 2015 Hans-Christoph Steiner <hans@eds.org>
|
# Copyright (C) 2015 Hans-Christoph Steiner <hans@eds.org>
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
#
|
||||||
# publish.py - part of the FDroid server tools
|
# publish.py - part of the FDroid server tools
|
||||||
# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com
|
# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com
|
||||||
|
@ -21,14 +20,14 @@
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import md5
|
|
||||||
import glob
|
import glob
|
||||||
|
import hashlib
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import common
|
from . import common
|
||||||
import metadata
|
from . import metadata
|
||||||
from common import FDroidPopen, SdkToolsPopen, BuildException
|
from .common import FDroidPopen, SdkToolsPopen, BuildException
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
options = None
|
options = None
|
||||||
|
@ -91,8 +90,8 @@ def main():
|
||||||
vercodes = common.read_pkg_args(options.appid, True)
|
vercodes = common.read_pkg_args(options.appid, True)
|
||||||
allaliases = []
|
allaliases = []
|
||||||
for appid in allapps:
|
for appid in allapps:
|
||||||
m = md5.new()
|
m = hashlib.md5()
|
||||||
m.update(appid)
|
m.update(appid.encode('utf-8'))
|
||||||
keyalias = m.hexdigest()[:8]
|
keyalias = m.hexdigest()[:8]
|
||||||
if keyalias in allaliases:
|
if keyalias in allaliases:
|
||||||
logging.error("There is a keyalias collision - publishing halted")
|
logging.error("There is a keyalias collision - publishing halted")
|
||||||
|
@ -156,12 +155,12 @@ def main():
|
||||||
# For this particular app, the key alias is overridden...
|
# For this particular app, the key alias is overridden...
|
||||||
keyalias = config['keyaliases'][appid]
|
keyalias = config['keyaliases'][appid]
|
||||||
if keyalias.startswith('@'):
|
if keyalias.startswith('@'):
|
||||||
m = md5.new()
|
m = hashlib.md5()
|
||||||
m.update(keyalias[1:])
|
m.update(keyalias[1:].encode('utf-8'))
|
||||||
keyalias = m.hexdigest()[:8]
|
keyalias = m.hexdigest()[:8]
|
||||||
else:
|
else:
|
||||||
m = md5.new()
|
m = hashlib.md5()
|
||||||
m.update(appid)
|
m.update(appid.encode('utf-8'))
|
||||||
keyalias = m.hexdigest()[:8]
|
keyalias = m.hexdigest()[:8]
|
||||||
logging.info("Key alias: " + keyalias)
|
logging.info("Key alias: " + keyalias)
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
#
|
||||||
# readmeta.py - part of the FDroid server tools
|
# readmeta.py - part of the FDroid server tools
|
||||||
# Copyright (C) 2014 Daniel Martí <mvdan@mvdan.cc>
|
# Copyright (C) 2014 Daniel Martí <mvdan@mvdan.cc>
|
||||||
|
@ -18,8 +17,8 @@
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import common
|
from . import common
|
||||||
import metadata
|
from . import metadata
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
#
|
||||||
# rewritemeta.py - part of the FDroid server tools
|
# rewritemeta.py - part of the FDroid server tools
|
||||||
# This cleans up the original .txt metadata file format.
|
# This cleans up the original .txt metadata file format.
|
||||||
|
@ -21,20 +20,17 @@
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
try:
|
import io
|
||||||
from cStringIO import StringIO
|
|
||||||
except:
|
|
||||||
from StringIO import StringIO
|
|
||||||
|
|
||||||
import common
|
from . import common
|
||||||
import metadata
|
from . import metadata
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
options = None
|
options = None
|
||||||
|
|
||||||
|
|
||||||
def proper_format(app):
|
def proper_format(app):
|
||||||
s = StringIO()
|
s = io.StringIO()
|
||||||
# TODO: currently reading entire file again, should reuse first
|
# TODO: currently reading entire file again, should reuse first
|
||||||
# read in metadata.py
|
# read in metadata.py
|
||||||
with open(app.metadatapath, 'r') as f:
|
with open(app.metadatapath, 'r') as f:
|
||||||
|
@ -73,7 +69,7 @@ def main():
|
||||||
if options.to is not None and options.to not in supported:
|
if options.to is not None and options.to not in supported:
|
||||||
parser.error("Must give a valid format to --to")
|
parser.error("Must give a valid format to --to")
|
||||||
|
|
||||||
for appid, app in apps.iteritems():
|
for appid, app in apps.items():
|
||||||
base, ext = common.get_extension(app.metadatapath)
|
base, ext = common.get_extension(app.metadatapath)
|
||||||
if not options.to and ext not in supported:
|
if not options.to and ext not in supported:
|
||||||
logging.info("Ignoring %s file at '%s'" % (ext, app.metadatapath))
|
logging.info("Ignoring %s file at '%s'" % (ext, app.metadatapath))
|
||||||
|
@ -85,7 +81,7 @@ def main():
|
||||||
|
|
||||||
if options.list:
|
if options.list:
|
||||||
if not proper_format(app):
|
if not proper_format(app):
|
||||||
print app.metadatapath
|
print(app.metadatapath)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
with open(base + '.' + to_ext, 'w') as f:
|
with open(base + '.' + to_ext, 'w') as f:
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
#
|
||||||
# scanner.py - part of the FDroid server tools
|
# scanner.py - part of the FDroid server tools
|
||||||
# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com
|
# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com
|
||||||
|
@ -23,9 +22,9 @@ import traceback
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import common
|
from . import common
|
||||||
import metadata
|
from . import metadata
|
||||||
from common import BuildException, VCSException
|
from .common import BuildException, VCSException
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
options = None
|
options = None
|
||||||
|
@ -68,7 +67,7 @@ def scan_source(build_dir, root_dir, build):
|
||||||
}
|
}
|
||||||
|
|
||||||
def suspects_found(s):
|
def suspects_found(s):
|
||||||
for n, r in usual_suspects.iteritems():
|
for n, r in usual_suspects.items():
|
||||||
if r.match(s):
|
if r.match(s):
|
||||||
yield n
|
yield n
|
||||||
|
|
||||||
|
@ -95,7 +94,7 @@ def scan_source(build_dir, root_dir, build):
|
||||||
scandelete_worked = set()
|
scandelete_worked = set()
|
||||||
|
|
||||||
def toignore(fd):
|
def toignore(fd):
|
||||||
for k, paths in scanignore.iteritems():
|
for k, paths in scanignore.items():
|
||||||
for p in paths:
|
for p in paths:
|
||||||
if fd.startswith(p):
|
if fd.startswith(p):
|
||||||
scanignore_worked.add(k)
|
scanignore_worked.add(k)
|
||||||
|
@ -103,7 +102,7 @@ def scan_source(build_dir, root_dir, build):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def todelete(fd):
|
def todelete(fd):
|
||||||
for k, paths in scandelete.iteritems():
|
for k, paths in scandelete.items():
|
||||||
for p in paths:
|
for p in paths:
|
||||||
if fd.startswith(p):
|
if fd.startswith(p):
|
||||||
scandelete_worked.add(k)
|
scandelete_worked.add(k)
|
||||||
|
@ -200,10 +199,11 @@ def scan_source(build_dir, root_dir, build):
|
||||||
elif ext == 'java':
|
elif ext == 'java':
|
||||||
if not os.path.isfile(fp):
|
if not os.path.isfile(fp):
|
||||||
continue
|
continue
|
||||||
for line in file(fp):
|
with open(fp, 'r') as f:
|
||||||
if 'DexClassLoader' in line:
|
for line in f:
|
||||||
count += handleproblem('DexClassLoader', fd, fp)
|
if 'DexClassLoader' in line:
|
||||||
break
|
count += handleproblem('DexClassLoader', fd, fp)
|
||||||
|
break
|
||||||
|
|
||||||
elif ext == 'gradle':
|
elif ext == 'gradle':
|
||||||
if not os.path.isfile(fp):
|
if not os.path.isfile(fp):
|
||||||
|
@ -267,7 +267,7 @@ def main():
|
||||||
srclib_dir = os.path.join(build_dir, 'srclib')
|
srclib_dir = os.path.join(build_dir, 'srclib')
|
||||||
extlib_dir = os.path.join(build_dir, 'extlib')
|
extlib_dir = os.path.join(build_dir, 'extlib')
|
||||||
|
|
||||||
for appid, app in apps.iteritems():
|
for appid, app in apps.items():
|
||||||
|
|
||||||
if app.Disabled:
|
if app.Disabled:
|
||||||
logging.info("Skipping %s: disabled" % appid)
|
logging.info("Skipping %s: disabled" % appid)
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
#
|
||||||
# server.py - part of the FDroid server tools
|
# server.py - part of the FDroid server tools
|
||||||
# Copyright (C) 2010-15, Ciaran Gultnieks, ciaran@ciarang.com
|
# Copyright (C) 2010-15, Ciaran Gultnieks, ciaran@ciarang.com
|
||||||
|
@ -26,7 +25,8 @@ import pwd
|
||||||
import subprocess
|
import subprocess
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import logging
|
import logging
|
||||||
import common
|
|
||||||
|
from . import common
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
options = None
|
options = None
|
||||||
|
@ -296,12 +296,12 @@ def main():
|
||||||
sftp = ssh.open_sftp()
|
sftp = ssh.open_sftp()
|
||||||
if os.path.basename(remotepath) \
|
if os.path.basename(remotepath) \
|
||||||
not in sftp.listdir(os.path.dirname(remotepath)):
|
not in sftp.listdir(os.path.dirname(remotepath)):
|
||||||
sftp.mkdir(remotepath, mode=0755)
|
sftp.mkdir(remotepath, mode=0o755)
|
||||||
for repo_section in repo_sections:
|
for repo_section in repo_sections:
|
||||||
repo_path = os.path.join(remotepath, repo_section)
|
repo_path = os.path.join(remotepath, repo_section)
|
||||||
if os.path.basename(repo_path) \
|
if os.path.basename(repo_path) \
|
||||||
not in sftp.listdir(remotepath):
|
not in sftp.listdir(remotepath):
|
||||||
sftp.mkdir(repo_path, mode=0755)
|
sftp.mkdir(repo_path, mode=0o755)
|
||||||
sftp.close()
|
sftp.close()
|
||||||
ssh.close()
|
ssh.close()
|
||||||
elif options.command == 'update':
|
elif options.command == 'update':
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
#
|
||||||
# gpgsign.py - part of the FDroid server tools
|
# gpgsign.py - part of the FDroid server tools
|
||||||
# Copyright (C) 2015, Ciaran Gultnieks, ciaran@ciarang.com
|
# Copyright (C) 2015, Ciaran Gultnieks, ciaran@ciarang.com
|
||||||
|
@ -22,8 +21,8 @@ import os
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import common
|
from . import common
|
||||||
from common import FDroidPopen
|
from .common import FDroidPopen
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
options = None
|
options = None
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
#
|
||||||
# stats.py - part of the FDroid server tools
|
# stats.py - part of the FDroid server tools
|
||||||
# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com
|
# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com
|
||||||
|
@ -28,11 +27,12 @@ from argparse import ArgumentParser
|
||||||
import paramiko
|
import paramiko
|
||||||
import socket
|
import socket
|
||||||
import logging
|
import logging
|
||||||
import common
|
|
||||||
import metadata
|
|
||||||
import subprocess
|
import subprocess
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
|
|
||||||
|
from . import common
|
||||||
|
from . import metadata
|
||||||
|
|
||||||
|
|
||||||
def carbon_send(key, value):
|
def carbon_send(key, value):
|
||||||
s = socket.socket()
|
s = socket.socket()
|
||||||
|
@ -75,7 +75,7 @@ def main():
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Get all metadata-defined apps...
|
# Get all metadata-defined apps...
|
||||||
allmetaapps = [app for app in metadata.read_metadata().itervalues()]
|
allmetaapps = [app for app in metadata.read_metadata().values()]
|
||||||
metaapps = [app for app in allmetaapps if not app.Disabled]
|
metaapps = [app for app in allmetaapps if not app.Disabled]
|
||||||
|
|
||||||
statsdir = 'stats'
|
statsdir = 'stats'
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
#
|
||||||
# update.py - part of the FDroid server tools
|
# update.py - part of the FDroid server tools
|
||||||
# Copyright (C) 2010-2015, Ciaran Gultnieks, ciaran@ciarang.com
|
# Copyright (C) 2010-2015, Ciaran Gultnieks, ciaran@ciarang.com
|
||||||
|
@ -27,7 +26,7 @@ import socket
|
||||||
import zipfile
|
import zipfile
|
||||||
import hashlib
|
import hashlib
|
||||||
import pickle
|
import pickle
|
||||||
import urlparse
|
import urllib.parse
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from xml.dom.minidom import Document
|
from xml.dom.minidom import Document
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
|
@ -35,16 +34,15 @@ import time
|
||||||
from pyasn1.error import PyAsn1Error
|
from pyasn1.error import PyAsn1Error
|
||||||
from pyasn1.codec.der import decoder, encoder
|
from pyasn1.codec.der import decoder, encoder
|
||||||
from pyasn1_modules import rfc2315
|
from pyasn1_modules import rfc2315
|
||||||
from hashlib import md5
|
|
||||||
from binascii import hexlify, unhexlify
|
from binascii import hexlify, unhexlify
|
||||||
|
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import common
|
from . import common
|
||||||
import metadata
|
from . import metadata
|
||||||
from common import FDroidPopen, SdkToolsPopen
|
from .common import FDroidPopen, FDroidPopenBytes, SdkToolsPopen
|
||||||
from metadata import MetaDataException
|
from .metadata import MetaDataException
|
||||||
|
|
||||||
screen_densities = ['640', '480', '320', '240', '160', '120']
|
screen_densities = ['640', '480', '320', '240', '160', '120']
|
||||||
|
|
||||||
|
@ -292,7 +290,7 @@ def delete_disabled_builds(apps, apkcache, repodirs):
|
||||||
:param apkcache: current apk cache information
|
:param apkcache: current apk cache information
|
||||||
:param repodirs: the repo directories to process
|
:param repodirs: the repo directories to process
|
||||||
"""
|
"""
|
||||||
for appid, app in apps.iteritems():
|
for appid, app in apps.items():
|
||||||
for build in app.builds:
|
for build in app.builds:
|
||||||
if not build.disable:
|
if not build.disable:
|
||||||
continue
|
continue
|
||||||
|
@ -402,7 +400,7 @@ def getsig(apkpath):
|
||||||
|
|
||||||
cert_encoded = encoder.encode(certificates)[4:]
|
cert_encoded = encoder.encode(certificates)[4:]
|
||||||
|
|
||||||
return md5(cert_encoded.encode('hex')).hexdigest()
|
return hashlib.md5(hexlify(cert_encoded)).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
def scan_apks(apps, apkcache, repodir, knownapks, use_date_from_apk=False):
|
def scan_apks(apps, apkcache, repodir, knownapks, use_date_from_apk=False):
|
||||||
|
@ -713,7 +711,7 @@ repo_pubkey_fingerprint = None
|
||||||
def cert_fingerprint(data):
|
def cert_fingerprint(data):
|
||||||
digest = hashlib.sha256(data).digest()
|
digest = hashlib.sha256(data).digest()
|
||||||
ret = []
|
ret = []
|
||||||
ret.append(' '.join("%02X" % ord(b) for b in digest))
|
ret.append(' '.join("%02X" % b for b in bytearray(digest)))
|
||||||
return " ".join(ret)
|
return " ".join(ret)
|
||||||
|
|
||||||
|
|
||||||
|
@ -722,12 +720,12 @@ def extract_pubkey():
|
||||||
if 'repo_pubkey' in config:
|
if 'repo_pubkey' in config:
|
||||||
pubkey = unhexlify(config['repo_pubkey'])
|
pubkey = unhexlify(config['repo_pubkey'])
|
||||||
else:
|
else:
|
||||||
p = FDroidPopen([config['keytool'], '-exportcert',
|
p = FDroidPopenBytes([config['keytool'], '-exportcert',
|
||||||
'-alias', config['repo_keyalias'],
|
'-alias', config['repo_keyalias'],
|
||||||
'-keystore', config['keystore'],
|
'-keystore', config['keystore'],
|
||||||
'-storepass:file', config['keystorepassfile']]
|
'-storepass:file', config['keystorepassfile']]
|
||||||
+ config['smartcardoptions'],
|
+ config['smartcardoptions'],
|
||||||
output=False, stderr_to_stdout=False)
|
output=False, stderr_to_stdout=False)
|
||||||
if p.returncode != 0 or len(p.output) < 20:
|
if p.returncode != 0 or len(p.output) < 20:
|
||||||
msg = "Failed to get repo pubkey!"
|
msg = "Failed to get repo pubkey!"
|
||||||
if config['keystore'] == 'NONE':
|
if config['keystore'] == 'NONE':
|
||||||
|
@ -774,7 +772,7 @@ def make_index(apps, sortedids, apks, repodir, archive, categories):
|
||||||
|
|
||||||
mirrorcheckfailed = False
|
mirrorcheckfailed = False
|
||||||
for mirror in config.get('mirrors', []):
|
for mirror in config.get('mirrors', []):
|
||||||
base = os.path.basename(urlparse.urlparse(mirror).path.rstrip('/'))
|
base = os.path.basename(urllib.parse.urlparse(mirror).path.rstrip('/'))
|
||||||
if config.get('nonstandardwebroot') is not True and base != 'fdroid':
|
if config.get('nonstandardwebroot') is not True and base != 'fdroid':
|
||||||
logging.error("mirror '" + mirror + "' does not end with 'fdroid'!")
|
logging.error("mirror '" + mirror + "' does not end with 'fdroid'!")
|
||||||
mirrorcheckfailed = True
|
mirrorcheckfailed = True
|
||||||
|
@ -788,9 +786,9 @@ def make_index(apps, sortedids, apks, repodir, archive, categories):
|
||||||
repoel.setAttribute("icon", os.path.basename(config['archive_icon']))
|
repoel.setAttribute("icon", os.path.basename(config['archive_icon']))
|
||||||
repoel.setAttribute("url", config['archive_url'])
|
repoel.setAttribute("url", config['archive_url'])
|
||||||
addElement('description', config['archive_description'], doc, repoel)
|
addElement('description', config['archive_description'], doc, repoel)
|
||||||
urlbasepath = os.path.basename(urlparse.urlparse(config['archive_url']).path)
|
urlbasepath = os.path.basename(urllib.parse.urlparse(config['archive_url']).path)
|
||||||
for mirror in config.get('mirrors', []):
|
for mirror in config.get('mirrors', []):
|
||||||
addElement('mirror', urlparse.urljoin(mirror, urlbasepath), doc, repoel)
|
addElement('mirror', urllib.parse.urljoin(mirror, urlbasepath), doc, repoel)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
repoel.setAttribute("name", config['repo_name'])
|
repoel.setAttribute("name", config['repo_name'])
|
||||||
|
@ -799,9 +797,9 @@ def make_index(apps, sortedids, apks, repodir, archive, categories):
|
||||||
repoel.setAttribute("icon", os.path.basename(config['repo_icon']))
|
repoel.setAttribute("icon", os.path.basename(config['repo_icon']))
|
||||||
repoel.setAttribute("url", config['repo_url'])
|
repoel.setAttribute("url", config['repo_url'])
|
||||||
addElement('description', config['repo_description'], doc, repoel)
|
addElement('description', config['repo_description'], doc, repoel)
|
||||||
urlbasepath = os.path.basename(urlparse.urlparse(config['repo_url']).path)
|
urlbasepath = os.path.basename(urllib.parse.urlparse(config['repo_url']).path)
|
||||||
for mirror in config.get('mirrors', []):
|
for mirror in config.get('mirrors', []):
|
||||||
addElement('mirror', urlparse.urljoin(mirror, urlbasepath), doc, repoel)
|
addElement('mirror', urllib.parse.urljoin(mirror, urlbasepath), doc, repoel)
|
||||||
|
|
||||||
repoel.setAttribute("version", "15")
|
repoel.setAttribute("version", "15")
|
||||||
repoel.setAttribute("timestamp", str(int(time.time())))
|
repoel.setAttribute("timestamp", str(int(time.time())))
|
||||||
|
@ -828,7 +826,7 @@ def make_index(apps, sortedids, apks, repodir, archive, categories):
|
||||||
logging.warning("\tfdroid update --create-key")
|
logging.warning("\tfdroid update --create-key")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
repoel.setAttribute("pubkey", extract_pubkey())
|
repoel.setAttribute("pubkey", extract_pubkey().decode('utf-8'))
|
||||||
root.appendChild(repoel)
|
root.appendChild(repoel)
|
||||||
|
|
||||||
for appid in sortedids:
|
for appid in sortedids:
|
||||||
|
@ -968,9 +966,9 @@ def make_index(apps, sortedids, apks, repodir, archive, categories):
|
||||||
os.symlink(sigfile_path, siglinkname)
|
os.symlink(sigfile_path, siglinkname)
|
||||||
|
|
||||||
if options.pretty:
|
if options.pretty:
|
||||||
output = doc.toprettyxml()
|
output = doc.toprettyxml(encoding='utf-8')
|
||||||
else:
|
else:
|
||||||
output = doc.toxml()
|
output = doc.toxml(encoding='utf-8')
|
||||||
|
|
||||||
with open(os.path.join(repodir, 'index.xml'), 'wb') as f:
|
with open(os.path.join(repodir, 'index.xml'), 'wb') as f:
|
||||||
f.write(output)
|
f.write(output)
|
||||||
|
@ -1025,7 +1023,7 @@ def make_index(apps, sortedids, apks, repodir, archive, categories):
|
||||||
|
|
||||||
def archive_old_apks(apps, apks, archapks, repodir, archivedir, defaultkeepversions):
|
def archive_old_apks(apps, apks, archapks, repodir, archivedir, defaultkeepversions):
|
||||||
|
|
||||||
for appid, app in apps.iteritems():
|
for appid, app in apps.items():
|
||||||
|
|
||||||
if app.ArchivePolicy:
|
if app.ArchivePolicy:
|
||||||
keepversions = int(app.ArchivePolicy[:-9])
|
keepversions = int(app.ArchivePolicy[:-9])
|
||||||
|
@ -1199,7 +1197,7 @@ def main():
|
||||||
|
|
||||||
# Generate a list of categories...
|
# Generate a list of categories...
|
||||||
categories = set()
|
categories = set()
|
||||||
for app in apps.itervalues():
|
for app in apps.values():
|
||||||
categories.update(app.Categories)
|
categories.update(app.Categories)
|
||||||
|
|
||||||
# Read known apks data (will be updated and written back when we've finished)
|
# Read known apks data (will be updated and written back when we've finished)
|
||||||
|
@ -1269,7 +1267,7 @@ def main():
|
||||||
# level. When doing this, we use the info from the most recent version's apk.
|
# level. When doing this, we use the info from the most recent version's apk.
|
||||||
# We deal with figuring out when the app was added and last updated at the
|
# We deal with figuring out when the app was added and last updated at the
|
||||||
# same time.
|
# same time.
|
||||||
for appid, app in apps.iteritems():
|
for appid, app in apps.items():
|
||||||
bestver = 0
|
bestver = 0
|
||||||
for apk in apks + archapks:
|
for apk in apks + archapks:
|
||||||
if apk['id'] == appid:
|
if apk['id'] == appid:
|
||||||
|
@ -1303,13 +1301,13 @@ def main():
|
||||||
# Sort the app list by name, then the web site doesn't have to by default.
|
# Sort the app list by name, then the web site doesn't have to by default.
|
||||||
# (we had to wait until we'd scanned the apks to do this, because mostly the
|
# (we had to wait until we'd scanned the apks to do this, because mostly the
|
||||||
# name comes from there!)
|
# name comes from there!)
|
||||||
sortedids = sorted(apps.iterkeys(), key=lambda appid: apps[appid].Name.upper())
|
sortedids = sorted(apps.keys(), key=lambda appid: apps[appid].Name.upper())
|
||||||
|
|
||||||
# APKs are placed into multiple repos based on the app package, providing
|
# APKs are placed into multiple repos based on the app package, providing
|
||||||
# per-app subscription feeds for nightly builds and things like it
|
# per-app subscription feeds for nightly builds and things like it
|
||||||
if config['per_app_repos']:
|
if config['per_app_repos']:
|
||||||
add_apks_to_per_app_repos(repodirs[0], apks)
|
add_apks_to_per_app_repos(repodirs[0], apks)
|
||||||
for appid, app in apps.iteritems():
|
for appid, app in apps.items():
|
||||||
repodir = os.path.join(appid, 'fdroid', 'repo')
|
repodir = os.path.join(appid, 'fdroid', 'repo')
|
||||||
appdict = dict()
|
appdict = dict()
|
||||||
appdict[appid] = app
|
appdict[appid] = app
|
||||||
|
@ -1338,14 +1336,15 @@ def main():
|
||||||
# Generate latest apps data for widget
|
# Generate latest apps data for widget
|
||||||
if os.path.exists(os.path.join('stats', 'latestapps.txt')):
|
if os.path.exists(os.path.join('stats', 'latestapps.txt')):
|
||||||
data = ''
|
data = ''
|
||||||
for line in file(os.path.join('stats', 'latestapps.txt')):
|
with open(os.path.join('stats', 'latestapps.txt'), 'r') as f:
|
||||||
appid = line.rstrip()
|
for line in f:
|
||||||
data += appid + "\t"
|
appid = line.rstrip()
|
||||||
app = apps[appid]
|
data += appid + "\t"
|
||||||
data += app.Name + "\t"
|
app = apps[appid]
|
||||||
if app.icon is not None:
|
data += app.Name + "\t"
|
||||||
data += app.icon + "\t"
|
if app.icon is not None:
|
||||||
data += app.License + "\n"
|
data += app.icon + "\t"
|
||||||
|
data += app.License + "\n"
|
||||||
with open(os.path.join(repodirs[0], 'latestapps.dat'), 'w') as f:
|
with open(os.path.join(repodirs[0], 'latestapps.dat'), 'w') as f:
|
||||||
f.write(data)
|
f.write(data)
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
#
|
||||||
# verify.py - part of the FDroid server tools
|
# verify.py - part of the FDroid server tools
|
||||||
# Copyright (C) 2013, Ciaran Gultnieks, ciaran@ciarang.com
|
# Copyright (C) 2013, Ciaran Gultnieks, ciaran@ciarang.com
|
||||||
|
@ -23,9 +22,9 @@ import glob
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import common
|
from . import common
|
||||||
import net
|
from . import net
|
||||||
from common import FDroidException
|
from .common import FDroidException
|
||||||
|
|
||||||
options = None
|
options = None
|
||||||
config = None
|
config = None
|
||||||
|
|
|
@ -46,17 +46,16 @@ else
|
||||||
done
|
done
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# In the default configuration, the checks E123, E133, E226, E241 and E242 are
|
# We ignore the following PEP8 warnings
|
||||||
# ignored because they are not rules unanimously accepted
|
# * E123: closing bracket does not match indentation of opening bracket's line
|
||||||
# On top of those, we ignore:
|
# - Broken if multiple indentation levels start on a single line
|
||||||
# * E501: line too long (82 > 79 characters)
|
# * E501: line too long (82 > 79 characters)
|
||||||
# - Recommended for readability but not enforced
|
# - Recommended for readability but not enforced
|
||||||
# - Some lines are awkward to wrap around a char limit
|
# - Some lines are awkward to wrap around a char limit
|
||||||
# * W503: line break before binary operator
|
# * W503: line break before binary operator
|
||||||
# - It's quite new
|
|
||||||
# - Quite pedantic
|
# - Quite pedantic
|
||||||
|
|
||||||
PEP8_IGNORE="E123,E133,E226,E241,E242,E501,W503"
|
PEP8_IGNORE="E123,E501,W503"
|
||||||
|
|
||||||
err() {
|
err() {
|
||||||
echo ERROR: "$@"
|
echo ERROR: "$@"
|
||||||
|
@ -71,22 +70,21 @@ cmd_exists() {
|
||||||
command -v $1 1>/dev/null
|
command -v $1 1>/dev/null
|
||||||
}
|
}
|
||||||
|
|
||||||
if cmd_exists pyflakes-python2; then
|
find_command() {
|
||||||
PYFLAKES=pyflakes-python2
|
local name=$1
|
||||||
elif cmd_exists pyflakes; then
|
for suff in "3" "-python3" ""; do
|
||||||
PYFLAKES=pyflakes
|
cmd=${1}${suff}
|
||||||
else
|
if cmd_exists $cmd; then
|
||||||
PYFLAKES=echo
|
echo -n $cmd
|
||||||
warn "pyflakes is not installed, using dummy placeholder!"
|
return 0
|
||||||
fi
|
fi
|
||||||
|
done
|
||||||
|
warn "$1 is not installed, using dummy placeholder!"
|
||||||
|
echo -n echo
|
||||||
|
}
|
||||||
|
|
||||||
if cmd_exists pep8-python2; then
|
PYFLAKES=$(find_command pyflakes)
|
||||||
PEP8=pep8-python2
|
PEP8=$(find_command pep8)
|
||||||
elif cmd_exists pep8; then
|
|
||||||
PEP8=pep8
|
|
||||||
else
|
|
||||||
err "pep8 is not installed!"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$PY_FILES $PY_TEST_FILES" != " " ]; then
|
if [ "$PY_FILES $PY_TEST_FILES" != " " ]; then
|
||||||
if ! $PYFLAKES $PY_FILES $PY_TEST_FILES; then
|
if ! $PYFLAKES $PY_FILES $PY_TEST_FILES; then
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
@ -26,7 +26,7 @@ def vagrant(params, cwd=None, printout=False):
|
||||||
line = p.stdout.readline()
|
line = p.stdout.readline()
|
||||||
if len(line) == 0:
|
if len(line) == 0:
|
||||||
break
|
break
|
||||||
print line,
|
print(line)
|
||||||
out += line
|
out += line
|
||||||
p.wait()
|
p.wait()
|
||||||
else:
|
else:
|
||||||
|
@ -63,13 +63,13 @@ config = {
|
||||||
|
|
||||||
# load config file, if present
|
# load config file, if present
|
||||||
if os.path.exists('makebuildserver.config.py'):
|
if os.path.exists('makebuildserver.config.py'):
|
||||||
execfile('makebuildserver.config.py', config)
|
exec(compile(open('makebuildserver.config.py').read(), 'makebuildserver.config.py', 'exec'), config)
|
||||||
elif os.path.exists('makebs.config.py'):
|
elif os.path.exists('makebs.config.py'):
|
||||||
# this is the old name for the config file
|
# this is the old name for the config file
|
||||||
execfile('makebs.config.py', config)
|
exec(compile(open('makebs.config.py').read(), 'makebs.config.py', 'exec'), config)
|
||||||
|
|
||||||
if not os.path.exists('makebuildserver') or not os.path.exists(serverdir):
|
if not os.path.exists('makebuildserver') or not os.path.exists(serverdir):
|
||||||
print 'This must be run from the correct directory!'
|
print('This must be run from the correct directory!')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if os.path.exists(boxfile):
|
if os.path.exists(boxfile):
|
||||||
|
@ -81,7 +81,7 @@ if options.clean:
|
||||||
# Update cached files.
|
# Update cached files.
|
||||||
cachedir = config['cachedir']
|
cachedir = config['cachedir']
|
||||||
if not os.path.exists(cachedir):
|
if not os.path.exists(cachedir):
|
||||||
os.makedirs(cachedir, 0755)
|
os.makedirs(cachedir, 0o755)
|
||||||
|
|
||||||
cachefiles = [
|
cachefiles = [
|
||||||
('android-sdk_r24.4.1-linux.tgz',
|
('android-sdk_r24.4.1-linux.tgz',
|
||||||
|
@ -318,17 +318,17 @@ for f, src, shasum in cachefiles:
|
||||||
if os.path.exists(relpath) and os.stat(relpath).st_size == 0:
|
if os.path.exists(relpath) and os.stat(relpath).st_size == 0:
|
||||||
os.remove(relpath)
|
os.remove(relpath)
|
||||||
if not os.path.exists(relpath):
|
if not os.path.exists(relpath):
|
||||||
print "Downloading " + f + " to cache"
|
print("Downloading " + f + " to cache")
|
||||||
if subprocess.call(['wget', src, '-O', f], cwd=cachedir) != 0:
|
if subprocess.call(['wget', src, '-O', f], cwd=cachedir) != 0:
|
||||||
print "...download of " + f + " failed."
|
print("...download of " + f + " failed.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
if shasum:
|
if shasum:
|
||||||
v = sha256_for_file(relpath)
|
v = sha256_for_file(relpath)
|
||||||
if v != shasum:
|
if v != shasum:
|
||||||
print "Invalid shasum of '" + v + "' detected for " + f
|
print("Invalid shasum of '" + v + "' detected for " + f)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
else:
|
else:
|
||||||
print "...shasum verified for " + f
|
print("...shasum verified for " + f)
|
||||||
|
|
||||||
wanted.append(f)
|
wanted.append(f)
|
||||||
|
|
||||||
|
@ -418,57 +418,57 @@ if os.path.exists(vf):
|
||||||
with open(vf, 'r') as f:
|
with open(vf, 'r') as f:
|
||||||
oldvf = f.read()
|
oldvf = f.read()
|
||||||
if oldvf != vagrantfile:
|
if oldvf != vagrantfile:
|
||||||
print "Server configuration has changed, rebuild from scratch is required"
|
print("Server configuration has changed, rebuild from scratch is required")
|
||||||
vagrant(['destroy', '-f'], serverdir)
|
vagrant(['destroy', '-f'], serverdir)
|
||||||
else:
|
else:
|
||||||
print "Re-provisioning existing server"
|
print("Re-provisioning existing server")
|
||||||
writevf = False
|
writevf = False
|
||||||
else:
|
else:
|
||||||
print "No existing server - building from scratch"
|
print("No existing server - building from scratch")
|
||||||
if writevf:
|
if writevf:
|
||||||
with open(vf, 'w') as f:
|
with open(vf, 'w') as f:
|
||||||
f.write(vagrantfile)
|
f.write(vagrantfile)
|
||||||
|
|
||||||
|
|
||||||
print "Configuring build server VM"
|
print("Configuring build server VM")
|
||||||
returncode, out = vagrant(['up', '--provision'], serverdir, printout=True)
|
returncode, out = vagrant(['up', '--provision'], serverdir, printout=True)
|
||||||
with open(os.path.join(serverdir, 'up.log'), 'w') as log:
|
with open(os.path.join(serverdir, 'up.log'), 'w') as log:
|
||||||
log.write(out)
|
log.write(out)
|
||||||
if returncode != 0:
|
if returncode != 0:
|
||||||
print "Failed to configure server"
|
print("Failed to configure server")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
print "Writing buildserver ID"
|
print("Writing buildserver ID")
|
||||||
p = subprocess.Popen(['git', 'rev-parse', 'HEAD'], stdout=subprocess.PIPE)
|
p = subprocess.Popen(['git', 'rev-parse', 'HEAD'], stdout=subprocess.PIPE)
|
||||||
buildserverid = p.communicate()[0].strip()
|
buildserverid = p.communicate()[0].strip()
|
||||||
print "...ID is " + buildserverid
|
print("...ID is " + buildserverid)
|
||||||
subprocess.call(
|
subprocess.call(
|
||||||
['vagrant', 'ssh', '-c', 'sh -c "echo {0} >/home/vagrant/buildserverid"'
|
['vagrant', 'ssh', '-c', 'sh -c "echo {0} >/home/vagrant/buildserverid"'
|
||||||
.format(buildserverid)],
|
.format(buildserverid)],
|
||||||
cwd=serverdir)
|
cwd=serverdir)
|
||||||
|
|
||||||
print "Stopping build server VM"
|
print("Stopping build server VM")
|
||||||
vagrant(['halt'], serverdir)
|
vagrant(['halt'], serverdir)
|
||||||
|
|
||||||
print "Waiting for build server VM to be finished"
|
print("Waiting for build server VM to be finished")
|
||||||
ready = False
|
ready = False
|
||||||
while not ready:
|
while not ready:
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
returncode, out = vagrant(['status'], serverdir)
|
returncode, out = vagrant(['status'], serverdir)
|
||||||
if returncode != 0:
|
if returncode != 0:
|
||||||
print "Error while checking status"
|
print("Error while checking status")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
for line in out.splitlines():
|
for line in out.splitlines():
|
||||||
if line.startswith("default"):
|
if line.startswith("default"):
|
||||||
if line.find("poweroff") != -1:
|
if line.find("poweroff") != -1:
|
||||||
ready = True
|
ready = True
|
||||||
else:
|
else:
|
||||||
print "Status: " + line
|
print("Status: " + line)
|
||||||
|
|
||||||
print "Packaging"
|
print("Packaging")
|
||||||
vagrant(['package', '--output', os.path.join('..', boxfile)], serverdir,
|
vagrant(['package', '--output', os.path.join('..', boxfile)], serverdir,
|
||||||
printout=options.verbose)
|
printout=options.verbose)
|
||||||
print "Adding box"
|
print("Adding box")
|
||||||
vagrant(['box', 'add', 'buildserver', boxfile, '-f'],
|
vagrant(['box', 'add', 'buildserver', boxfile, '-f'],
|
||||||
printout=options.verbose)
|
printout=options.verbose)
|
||||||
|
|
||||||
|
|
2
setup.py
2
setup.py
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
import sys
|
import sys
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# http://www.drdobbs.com/testing/unit-testing-with-python/240165163
|
# http://www.drdobbs.com/testing/unit-testing-with-python/240165163
|
||||||
|
|
||||||
|
@ -37,7 +36,7 @@ class BuildTest(unittest.TestCase):
|
||||||
break
|
break
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
print 'no build-tools found: ' + build_tools
|
print('no build-tools found: ' + build_tools)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _find_all(self):
|
def _find_all(self):
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# http://www.drdobbs.com/testing/unit-testing-with-python/240165163
|
# http://www.drdobbs.com/testing/unit-testing-with-python/240165163
|
||||||
|
|
||||||
|
@ -37,7 +36,7 @@ class CommonTest(unittest.TestCase):
|
||||||
break
|
break
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
print 'no build-tools found: ' + build_tools
|
print('no build-tools found: ' + build_tools)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _find_all(self):
|
def _find_all(self):
|
||||||
|
@ -61,7 +60,7 @@ class CommonTest(unittest.TestCase):
|
||||||
if self._set_build_tools():
|
if self._set_build_tools():
|
||||||
self._find_all()
|
self._find_all()
|
||||||
else:
|
else:
|
||||||
print 'no build-tools found: ' + build_tools
|
print('no build-tools found: ' + build_tools)
|
||||||
|
|
||||||
def testIsApkDebuggable(self):
|
def testIsApkDebuggable(self):
|
||||||
config = dict()
|
config = dict()
|
||||||
|
|
|
@ -51,10 +51,10 @@ cd $WORKSPACE/tests
|
||||||
#------------------------------------------------------------------------------#
|
#------------------------------------------------------------------------------#
|
||||||
# test building the source tarball, then installing it
|
# test building the source tarball, then installing it
|
||||||
cd $WORKSPACE
|
cd $WORKSPACE
|
||||||
python2 setup.py sdist
|
python3 setup.py sdist
|
||||||
|
|
||||||
rm -rf $WORKSPACE/env
|
rm -rf $WORKSPACE/env
|
||||||
virtualenv --python=python2 $WORKSPACE/env
|
virtualenv --python=python3 $WORKSPACE/env
|
||||||
. $WORKSPACE/env/bin/activate
|
. $WORKSPACE/env/bin/activate
|
||||||
pip install dist/fdroidserver-*.tar.gz
|
pip install dist/fdroidserver-*.tar.gz
|
||||||
|
|
||||||
|
@ -66,10 +66,10 @@ fdroid=$WORKSPACE/env/bin/fdroid $WORKSPACE/tests/run-tests $apksource
|
||||||
# test install using install direct from git repo
|
# test install using install direct from git repo
|
||||||
cd $WORKSPACE
|
cd $WORKSPACE
|
||||||
rm -rf $WORKSPACE/env
|
rm -rf $WORKSPACE/env
|
||||||
virtualenv --python=python2 --system-site-packages $WORKSPACE/env
|
virtualenv --python=python3 --system-site-packages $WORKSPACE/env
|
||||||
. $WORKSPACE/env/bin/activate
|
. $WORKSPACE/env/bin/activate
|
||||||
pip install -e $WORKSPACE
|
pip install -e $WORKSPACE
|
||||||
python2 setup.py install
|
python3 setup.py install
|
||||||
|
|
||||||
# run tests in new pip+virtualenv install
|
# run tests in new pip+virtualenv install
|
||||||
fdroid=$WORKSPACE/env/bin/fdroid $WORKSPACE/tests/run-tests $apksource
|
fdroid=$WORKSPACE/env/bin/fdroid $WORKSPACE/tests/run-tests $apksource
|
||||||
|
@ -86,7 +86,7 @@ sh hooks/pre-commit
|
||||||
cd $WORKSPACE
|
cd $WORKSPACE
|
||||||
set +e
|
set +e
|
||||||
# use the virtualenv python so pylint checks against its installed libs
|
# use the virtualenv python so pylint checks against its installed libs
|
||||||
PYTHONPATH=$WORKSPACE/.pylint-plugins python2 /usr/bin/pylint \
|
PYTHONPATH=$WORKSPACE/.pylint-plugins python3 /usr/bin/pylint \
|
||||||
--output-format=parseable --reports=n \
|
--output-format=parseable --reports=n \
|
||||||
--load-plugins astng_hashlib \
|
--load-plugins astng_hashlib \
|
||||||
fdroidserver/*.py fdroid makebuildserver setup.py > $WORKSPACE/pylint.parseable
|
fdroidserver/*.py fdroid makebuildserver setup.py > $WORKSPACE/pylint.parseable
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# http://www.drdobbs.com/testing/unit-testing-with-python/240165163
|
# http://www.drdobbs.com/testing/unit-testing-with-python/240165163
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# http://www.drdobbs.com/testing/unit-testing-with-python/240165163
|
# http://www.drdobbs.com/testing/unit-testing-with-python/240165163
|
||||||
|
|
||||||
|
@ -32,7 +31,7 @@ class InstallTest(unittest.TestCase):
|
||||||
devices = fdroidserver.install.devices()
|
devices = fdroidserver.install.devices()
|
||||||
self.assertIsInstance(devices, list, 'install.devices() did not return a list!')
|
self.assertIsInstance(devices, list, 'install.devices() did not return a list!')
|
||||||
for device in devices:
|
for device in devices:
|
||||||
self.assertIsInstance(device, basestring)
|
self.assertIsInstance(device, str)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# http://www.drdobbs.com/testing/unit-testing-with-python/240165163
|
# http://www.drdobbs.com/testing/unit-testing-with-python/240165163
|
||||||
|
|
||||||
|
@ -43,7 +42,7 @@ class MetadataTest(unittest.TestCase):
|
||||||
savepath = os.path.join('metadata', appid + '.pickle')
|
savepath = os.path.join('metadata', appid + '.pickle')
|
||||||
frommeta = app.field_dict()
|
frommeta = app.field_dict()
|
||||||
self.assertTrue(appid in apps)
|
self.assertTrue(appid in apps)
|
||||||
with open(savepath, 'r') as f:
|
with open(savepath, 'rb') as f:
|
||||||
frompickle = pickle.load(f)
|
frompickle = pickle.load(f)
|
||||||
self.assertEquals(frommeta, frompickle)
|
self.assertEquals(frommeta, frompickle)
|
||||||
# Uncomment to overwrite
|
# Uncomment to overwrite
|
||||||
|
|
|
@ -83,7 +83,7 @@ fi
|
||||||
|
|
||||||
# allow the location of python to be overridden
|
# allow the location of python to be overridden
|
||||||
if [ -z $python ]; then
|
if [ -z $python ]; then
|
||||||
python=python2
|
python=python3
|
||||||
fi
|
fi
|
||||||
|
|
||||||
set -x # show each command as it is executed
|
set -x # show each command as it is executed
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# http://www.drdobbs.com/testing/unit-testing-with-python/240165163
|
# http://www.drdobbs.com/testing/unit-testing-with-python/240165163
|
||||||
|
|
||||||
|
@ -9,6 +8,7 @@ import optparse
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
|
from binascii import unhexlify
|
||||||
|
|
||||||
localmodule = os.path.realpath(
|
localmodule = os.path.realpath(
|
||||||
os.path.join(os.path.dirname(inspect.getfile(inspect.currentframe())), '..'))
|
os.path.join(os.path.dirname(inspect.getfile(inspect.currentframe())), '..'))
|
||||||
|
@ -56,10 +56,10 @@ class UpdateTest(unittest.TestCase):
|
||||||
self.assertEquals(len(sig), len(pysig),
|
self.assertEquals(len(sig), len(pysig),
|
||||||
"the length of the two sigs are different!")
|
"the length of the two sigs are different!")
|
||||||
try:
|
try:
|
||||||
self.assertEquals(sig.decode('hex'), pysig.decode('hex'),
|
self.assertEquals(unhexlify(sig), unhexlify(pysig),
|
||||||
"the length of the two sigs are different!")
|
"the length of the two sigs are different!")
|
||||||
except TypeError as e:
|
except TypeError as e:
|
||||||
print e
|
print(e)
|
||||||
self.assertTrue(False, 'TypeError!')
|
self.assertTrue(False, 'TypeError!')
|
||||||
|
|
||||||
def testBadGetsig(self):
|
def testBadGetsig(self):
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue