mirror of
https://github.com/f-droid/fdroidserver.git
synced 2025-11-08 16:30:28 +03:00
Merge branch 'fixes-for-issuebot' into 'master'
fixes for issuebot See merge request fdroid/fdroidserver!723
This commit is contained in:
commit
82df54a8a2
9 changed files with 346 additions and 310 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -57,6 +57,7 @@ makebuildserver.config.py
|
||||||
/tests/repo/obb.mainpatch.current/en-US/icon_WI0pkO3LsklrsTAnRr-OQSxkkoMY41lYe2-fAvXLiLg=.png
|
/tests/repo/obb.mainpatch.current/en-US/icon_WI0pkO3LsklrsTAnRr-OQSxkkoMY41lYe2-fAvXLiLg=.png
|
||||||
/tests/repo/org.videolan.vlc/en-US/icon_yAfSvPRJukZzMMfUzvbYqwaD1XmHXNtiPBtuPVHW-6s=.png
|
/tests/repo/org.videolan.vlc/en-US/icon_yAfSvPRJukZzMMfUzvbYqwaD1XmHXNtiPBtuPVHW-6s=.png
|
||||||
/tests/urzip-πÇÇπÇÇ现代汉语通用字-български-عربي1234.apk
|
/tests/urzip-πÇÇπÇÇ现代汉语通用字-български-عربي1234.apk
|
||||||
|
/tests/virustotal/
|
||||||
/unsigned/
|
/unsigned/
|
||||||
|
|
||||||
# generated by gettext
|
# generated by gettext
|
||||||
|
|
|
||||||
|
|
@ -37,6 +37,8 @@ import logging
|
||||||
import hashlib
|
import hashlib
|
||||||
import socket
|
import socket
|
||||||
import base64
|
import base64
|
||||||
|
import urllib.parse
|
||||||
|
import urllib.request
|
||||||
import zipfile
|
import zipfile
|
||||||
import tempfile
|
import tempfile
|
||||||
import json
|
import json
|
||||||
|
|
@ -84,6 +86,9 @@ VALID_APPLICATION_ID_REGEX = re.compile(r'''(?:^[a-z_]+(?:\d*[a-zA-Z_]*)*)(?:\.[
|
||||||
re.IGNORECASE)
|
re.IGNORECASE)
|
||||||
ANDROID_PLUGIN_REGEX = re.compile(r'''\s*(:?apply plugin:|id)\(?\s*['"](android|com\.android\.application)['"]\s*\)?''')
|
ANDROID_PLUGIN_REGEX = re.compile(r'''\s*(:?apply plugin:|id)\(?\s*['"](android|com\.android\.application)['"]\s*\)?''')
|
||||||
|
|
||||||
|
SETTINGS_GRADLE_REGEX = re.compile(r'settings\.gradle(?:\.kts)?')
|
||||||
|
GRADLE_SUBPROJECT_REGEX = re.compile(r'''['"]:([^'"]+)['"]''')
|
||||||
|
|
||||||
MAX_VERSION_CODE = 0x7fffffff # Java's Integer.MAX_VALUE (2147483647)
|
MAX_VERSION_CODE = 0x7fffffff # Java's Integer.MAX_VALUE (2147483647)
|
||||||
|
|
||||||
XMLNS_ANDROID = '{http://schemas.android.com/apk/res/android}'
|
XMLNS_ANDROID = '{http://schemas.android.com/apk/res/android}'
|
||||||
|
|
@ -1653,6 +1658,152 @@ def is_strict_application_id(name):
|
||||||
and '.' in name
|
and '.' in name
|
||||||
|
|
||||||
|
|
||||||
|
def get_all_gradle_and_manifests(build_dir):
|
||||||
|
paths = []
|
||||||
|
for root, dirs, files in os.walk(build_dir):
|
||||||
|
for f in sorted(files):
|
||||||
|
if f == 'AndroidManifest.xml' \
|
||||||
|
or f.endswith('.gradle') or f.endswith('.gradle.kts'):
|
||||||
|
full = os.path.join(root, f)
|
||||||
|
paths.append(full)
|
||||||
|
return paths
|
||||||
|
|
||||||
|
|
||||||
|
def get_gradle_subdir(build_dir, paths):
|
||||||
|
"""get the subdir where the gradle build is based"""
|
||||||
|
first_gradle_dir = None
|
||||||
|
for path in paths:
|
||||||
|
if not first_gradle_dir:
|
||||||
|
first_gradle_dir = os.path.relpath(os.path.dirname(path), build_dir)
|
||||||
|
if os.path.exists(path) and SETTINGS_GRADLE_REGEX.match(os.path.basename(path)):
|
||||||
|
with open(path) as fp:
|
||||||
|
for m in GRADLE_SUBPROJECT_REGEX.finditer(fp.read()):
|
||||||
|
for f in glob.glob(os.path.join(os.path.dirname(path), m.group(1), 'build.gradle*')):
|
||||||
|
with open(f) as fp:
|
||||||
|
while True:
|
||||||
|
line = fp.readline()
|
||||||
|
if not line:
|
||||||
|
break
|
||||||
|
if ANDROID_PLUGIN_REGEX.match(line):
|
||||||
|
return os.path.relpath(os.path.dirname(f), build_dir)
|
||||||
|
if first_gradle_dir and first_gradle_dir != '.':
|
||||||
|
return first_gradle_dir
|
||||||
|
|
||||||
|
return ''
|
||||||
|
|
||||||
|
|
||||||
|
def getrepofrompage(url):
|
||||||
|
"""Get the repo type and address from the given web page.
|
||||||
|
|
||||||
|
The page is scanned in a rather naive manner for 'git clone xxxx',
|
||||||
|
'hg clone xxxx', etc, and when one of these is found it's assumed
|
||||||
|
that's the information we want. Returns repotype, address, or
|
||||||
|
None, reason
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not url.startswith('http'):
|
||||||
|
return (None, _('{url} does not start with "http"!'.format(url=url)))
|
||||||
|
req = urllib.request.urlopen(url) # nosec B310 non-http URLs are filtered out
|
||||||
|
if req.getcode() != 200:
|
||||||
|
return (None, 'Unable to get ' + url + ' - return code ' + str(req.getcode()))
|
||||||
|
page = req.read().decode(req.headers.get_content_charset())
|
||||||
|
|
||||||
|
# Works for BitBucket
|
||||||
|
m = re.search('data-fetch-url="(.*)"', page)
|
||||||
|
if m is not None:
|
||||||
|
repo = m.group(1)
|
||||||
|
|
||||||
|
if repo.endswith('.git'):
|
||||||
|
return ('git', repo)
|
||||||
|
|
||||||
|
return ('hg', repo)
|
||||||
|
|
||||||
|
# Works for BitBucket (obsolete)
|
||||||
|
index = page.find('hg clone')
|
||||||
|
if index != -1:
|
||||||
|
repotype = 'hg'
|
||||||
|
repo = page[index + 9:]
|
||||||
|
index = repo.find('<')
|
||||||
|
if index == -1:
|
||||||
|
return (None, _("Error while getting repo address"))
|
||||||
|
repo = repo[:index]
|
||||||
|
repo = repo.split('"')[0]
|
||||||
|
return (repotype, repo)
|
||||||
|
|
||||||
|
# Works for BitBucket (obsolete)
|
||||||
|
index = page.find('git clone')
|
||||||
|
if index != -1:
|
||||||
|
repotype = 'git'
|
||||||
|
repo = page[index + 10:]
|
||||||
|
index = repo.find('<')
|
||||||
|
if index == -1:
|
||||||
|
return (None, _("Error while getting repo address"))
|
||||||
|
repo = repo[:index]
|
||||||
|
repo = repo.split('"')[0]
|
||||||
|
return (repotype, repo)
|
||||||
|
|
||||||
|
return (None, _("No information found.") + page)
|
||||||
|
|
||||||
|
|
||||||
|
def get_app_from_url(url):
|
||||||
|
"""Guess basic app metadata from the URL.
|
||||||
|
|
||||||
|
The URL must include a network hostname, unless it is an lp:,
|
||||||
|
file:, or git/ssh URL. This throws ValueError on bad URLs to
|
||||||
|
match urlparse().
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
parsed = urllib.parse.urlparse(url)
|
||||||
|
invalid_url = False
|
||||||
|
if not parsed.scheme or not parsed.path:
|
||||||
|
invalid_url = True
|
||||||
|
|
||||||
|
app = fdroidserver.metadata.App()
|
||||||
|
app.Repo = url
|
||||||
|
if url.startswith('git://') or url.startswith('git@'):
|
||||||
|
app.RepoType = 'git'
|
||||||
|
elif parsed.netloc == 'github.com':
|
||||||
|
app.RepoType = 'git'
|
||||||
|
app.SourceCode = url
|
||||||
|
app.IssueTracker = url + '/issues'
|
||||||
|
elif parsed.netloc == 'gitlab.com':
|
||||||
|
# git can be fussy with gitlab URLs unless they end in .git
|
||||||
|
if url.endswith('.git'):
|
||||||
|
url = url[:-4]
|
||||||
|
app.Repo = url + '.git'
|
||||||
|
app.RepoType = 'git'
|
||||||
|
app.SourceCode = url
|
||||||
|
app.IssueTracker = url + '/issues'
|
||||||
|
elif parsed.netloc == 'notabug.org':
|
||||||
|
if url.endswith('.git'):
|
||||||
|
url = url[:-4]
|
||||||
|
app.Repo = url + '.git'
|
||||||
|
app.RepoType = 'git'
|
||||||
|
app.SourceCode = url
|
||||||
|
app.IssueTracker = url + '/issues'
|
||||||
|
elif parsed.netloc == 'bitbucket.org':
|
||||||
|
if url.endswith('/'):
|
||||||
|
url = url[:-1]
|
||||||
|
app.SourceCode = url + '/src'
|
||||||
|
app.IssueTracker = url + '/issues'
|
||||||
|
# Figure out the repo type and adddress...
|
||||||
|
app.RepoType, app.Repo = getrepofrompage(url)
|
||||||
|
elif url.startswith('https://') and url.endswith('.git'):
|
||||||
|
app.RepoType = 'git'
|
||||||
|
|
||||||
|
if not parsed.netloc and parsed.scheme in ('git', 'http', 'https', 'ssh'):
|
||||||
|
invalid_url = True
|
||||||
|
|
||||||
|
if invalid_url:
|
||||||
|
raise ValueError(_('"{url}" is not a valid URL!'.format(url=url)))
|
||||||
|
|
||||||
|
if not app.RepoType:
|
||||||
|
raise FDroidException("Unable to determine vcs type. " + app.Repo)
|
||||||
|
|
||||||
|
return app
|
||||||
|
|
||||||
|
|
||||||
def getsrclib(spec, srclib_dir, subdir=None, basepath=False,
|
def getsrclib(spec, srclib_dir, subdir=None, basepath=False,
|
||||||
raw=False, prepare=True, preponly=False, refresh=True,
|
raw=False, prepare=True, preponly=False, refresh=True,
|
||||||
build=None):
|
build=None):
|
||||||
|
|
|
||||||
|
|
@ -18,14 +18,10 @@
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import git
|
import git
|
||||||
import glob
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import urllib.parse
|
|
||||||
import urllib.request
|
|
||||||
import yaml
|
import yaml
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import logging
|
import logging
|
||||||
|
|
@ -40,121 +36,12 @@ from . import common
|
||||||
from . import metadata
|
from . import metadata
|
||||||
from .exception import FDroidException
|
from .exception import FDroidException
|
||||||
|
|
||||||
SETTINGS_GRADLE = re.compile(r'settings\.gradle(?:\.kts)?')
|
|
||||||
GRADLE_SUBPROJECT = re.compile(r'''['"]:([^'"]+)['"]''')
|
|
||||||
|
|
||||||
|
|
||||||
# Get the repo type and address from the given web page. The page is scanned
|
|
||||||
# in a rather naive manner for 'git clone xxxx', 'hg clone xxxx', etc, and
|
|
||||||
# when one of these is found it's assumed that's the information we want.
|
|
||||||
# Returns repotype, address, or None, reason
|
|
||||||
def getrepofrompage(url):
|
|
||||||
if not url.startswith('http'):
|
|
||||||
return (None, _('{url} does not start with "http"!'.format(url=url)))
|
|
||||||
req = urllib.request.urlopen(url) # nosec B310 non-http URLs are filtered out
|
|
||||||
if req.getcode() != 200:
|
|
||||||
return (None, 'Unable to get ' + url + ' - return code ' + str(req.getcode()))
|
|
||||||
page = req.read().decode(req.headers.get_content_charset())
|
|
||||||
|
|
||||||
# Works for BitBucket
|
|
||||||
m = re.search('data-fetch-url="(.*)"', page)
|
|
||||||
if m is not None:
|
|
||||||
repo = m.group(1)
|
|
||||||
|
|
||||||
if repo.endswith('.git'):
|
|
||||||
return ('git', repo)
|
|
||||||
|
|
||||||
return ('hg', repo)
|
|
||||||
|
|
||||||
# Works for BitBucket (obsolete)
|
|
||||||
index = page.find('hg clone')
|
|
||||||
if index != -1:
|
|
||||||
repotype = 'hg'
|
|
||||||
repo = page[index + 9:]
|
|
||||||
index = repo.find('<')
|
|
||||||
if index == -1:
|
|
||||||
return (None, _("Error while getting repo address"))
|
|
||||||
repo = repo[:index]
|
|
||||||
repo = repo.split('"')[0]
|
|
||||||
return (repotype, repo)
|
|
||||||
|
|
||||||
# Works for BitBucket (obsolete)
|
|
||||||
index = page.find('git clone')
|
|
||||||
if index != -1:
|
|
||||||
repotype = 'git'
|
|
||||||
repo = page[index + 10:]
|
|
||||||
index = repo.find('<')
|
|
||||||
if index == -1:
|
|
||||||
return (None, _("Error while getting repo address"))
|
|
||||||
repo = repo[:index]
|
|
||||||
repo = repo.split('"')[0]
|
|
||||||
return (repotype, repo)
|
|
||||||
|
|
||||||
return (None, _("No information found.") + page)
|
|
||||||
|
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
options = None
|
options = None
|
||||||
|
|
||||||
|
|
||||||
def get_app_from_url(url):
|
# WARNING! This cannot be imported as a Python module, so reuseable functions need to go into common.py!
|
||||||
"""Guess basic app metadata from the URL.
|
|
||||||
|
|
||||||
The URL must include a network hostname, unless it is an lp:,
|
|
||||||
file:, or git/ssh URL. This throws ValueError on bad URLs to
|
|
||||||
match urlparse().
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
parsed = urllib.parse.urlparse(url)
|
|
||||||
invalid_url = False
|
|
||||||
if not parsed.scheme or not parsed.path:
|
|
||||||
invalid_url = True
|
|
||||||
|
|
||||||
app = metadata.App()
|
|
||||||
app.Repo = url
|
|
||||||
if url.startswith('git://') or url.startswith('git@'):
|
|
||||||
app.RepoType = 'git'
|
|
||||||
elif parsed.netloc == 'github.com':
|
|
||||||
app.RepoType = 'git'
|
|
||||||
app.SourceCode = url
|
|
||||||
app.IssueTracker = url + '/issues'
|
|
||||||
elif parsed.netloc == 'gitlab.com':
|
|
||||||
# git can be fussy with gitlab URLs unless they end in .git
|
|
||||||
if url.endswith('.git'):
|
|
||||||
url = url[:-4]
|
|
||||||
app.Repo = url + '.git'
|
|
||||||
app.RepoType = 'git'
|
|
||||||
app.SourceCode = url
|
|
||||||
app.IssueTracker = url + '/issues'
|
|
||||||
elif parsed.netloc == 'notabug.org':
|
|
||||||
if url.endswith('.git'):
|
|
||||||
url = url[:-4]
|
|
||||||
app.Repo = url + '.git'
|
|
||||||
app.RepoType = 'git'
|
|
||||||
app.SourceCode = url
|
|
||||||
app.IssueTracker = url + '/issues'
|
|
||||||
elif parsed.netloc == 'bitbucket.org':
|
|
||||||
if url.endswith('/'):
|
|
||||||
url = url[:-1]
|
|
||||||
app.SourceCode = url + '/src'
|
|
||||||
app.IssueTracker = url + '/issues'
|
|
||||||
# Figure out the repo type and adddress...
|
|
||||||
app.RepoType, app.Repo = getrepofrompage(url)
|
|
||||||
elif url.startswith('https://') and url.endswith('.git'):
|
|
||||||
app.RepoType = 'git'
|
|
||||||
|
|
||||||
if not parsed.netloc and parsed.scheme in ('git', 'http', 'https', 'ssh'):
|
|
||||||
invalid_url = True
|
|
||||||
|
|
||||||
if invalid_url:
|
|
||||||
raise ValueError(_('"{url}" is not a valid URL!'.format(url=url)))
|
|
||||||
|
|
||||||
if not app.RepoType:
|
|
||||||
raise FDroidException("Unable to determine vcs type. " + app.Repo)
|
|
||||||
|
|
||||||
return app
|
|
||||||
|
|
||||||
|
|
||||||
def clone_to_tmp_dir(app):
|
def clone_to_tmp_dir(app):
|
||||||
tmp_dir = 'tmp'
|
tmp_dir = 'tmp'
|
||||||
|
|
@ -171,40 +58,6 @@ def clone_to_tmp_dir(app):
|
||||||
return tmp_dir
|
return tmp_dir
|
||||||
|
|
||||||
|
|
||||||
def get_all_gradle_and_manifests(build_dir):
|
|
||||||
paths = []
|
|
||||||
for root, dirs, files in os.walk(build_dir):
|
|
||||||
for f in sorted(files):
|
|
||||||
if f == 'AndroidManifest.xml' \
|
|
||||||
or f.endswith('.gradle') or f.endswith('.gradle.kts'):
|
|
||||||
full = os.path.join(root, f)
|
|
||||||
paths.append(full)
|
|
||||||
return paths
|
|
||||||
|
|
||||||
|
|
||||||
def get_gradle_subdir(build_dir, paths):
|
|
||||||
"""get the subdir where the gradle build is based"""
|
|
||||||
first_gradle_dir = None
|
|
||||||
for path in paths:
|
|
||||||
if not first_gradle_dir:
|
|
||||||
first_gradle_dir = os.path.relpath(os.path.dirname(path), build_dir)
|
|
||||||
if os.path.exists(path) and SETTINGS_GRADLE.match(os.path.basename(path)):
|
|
||||||
with open(path) as fp:
|
|
||||||
for m in GRADLE_SUBPROJECT.finditer(fp.read()):
|
|
||||||
for f in glob.glob(os.path.join(os.path.dirname(path), m.group(1), 'build.gradle*')):
|
|
||||||
with open(f) as fp:
|
|
||||||
while True:
|
|
||||||
line = fp.readline()
|
|
||||||
if not line:
|
|
||||||
break
|
|
||||||
if common.ANDROID_PLUGIN_REGEX.match(line):
|
|
||||||
return os.path.relpath(os.path.dirname(f), build_dir)
|
|
||||||
if first_gradle_dir and first_gradle_dir != '.':
|
|
||||||
return first_gradle_dir
|
|
||||||
|
|
||||||
return ''
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
global config, options
|
global config, options
|
||||||
|
|
@ -220,6 +73,8 @@ def main():
|
||||||
help=_("Comma separated list of categories."))
|
help=_("Comma separated list of categories."))
|
||||||
parser.add_argument("-l", "--license", default=None,
|
parser.add_argument("-l", "--license", default=None,
|
||||||
help=_("Overall license of the project."))
|
help=_("Overall license of the project."))
|
||||||
|
parser.add_argument("--omit-disable", default=False,
|
||||||
|
help=_("Do not add 'disable:' to the generated build entries"))
|
||||||
parser.add_argument("--rev", default=None,
|
parser.add_argument("--rev", default=None,
|
||||||
help=_("Allows a different revision (or git branch) to be specified for the initial import"))
|
help=_("Allows a different revision (or git branch) to be specified for the initial import"))
|
||||||
metadata.add_metadata_arguments(parser)
|
metadata.add_metadata_arguments(parser)
|
||||||
|
|
@ -256,10 +111,11 @@ def main():
|
||||||
break
|
break
|
||||||
write_local_file = True
|
write_local_file = True
|
||||||
elif options.url:
|
elif options.url:
|
||||||
app = get_app_from_url(options.url)
|
app = common.get_app_from_url(options.url)
|
||||||
tmp_importer_dir = clone_to_tmp_dir(app)
|
tmp_importer_dir = clone_to_tmp_dir(app)
|
||||||
git_repo = git.repo.Repo(tmp_importer_dir)
|
git_repo = git.repo.Repo(tmp_importer_dir)
|
||||||
build.disable = 'Generated by import.py - check/set version fields and commit id'
|
if not options.omit_disable:
|
||||||
|
build.disable = 'Generated by import.py - check/set version fields and commit id'
|
||||||
write_local_file = False
|
write_local_file = False
|
||||||
else:
|
else:
|
||||||
raise FDroidException("Specify project url.")
|
raise FDroidException("Specify project url.")
|
||||||
|
|
@ -268,8 +124,8 @@ def main():
|
||||||
build.commit = common.get_head_commit_id(git_repo)
|
build.commit = common.get_head_commit_id(git_repo)
|
||||||
|
|
||||||
# Extract some information...
|
# Extract some information...
|
||||||
paths = get_all_gradle_and_manifests(tmp_importer_dir)
|
paths = common.get_all_gradle_and_manifests(tmp_importer_dir)
|
||||||
subdir = get_gradle_subdir(tmp_importer_dir, paths)
|
subdir = common.get_gradle_subdir(tmp_importer_dir, paths)
|
||||||
if paths:
|
if paths:
|
||||||
versionName, versionCode, package = common.parse_androidmanifests(paths, app)
|
versionName, versionCode, package = common.parse_androidmanifests(paths, app)
|
||||||
if not package:
|
if not package:
|
||||||
|
|
@ -290,8 +146,10 @@ def main():
|
||||||
build.versionCode = versionCode or '0' # TODO heinous but this is still a str
|
build.versionCode = versionCode or '0' # TODO heinous but this is still a str
|
||||||
if options.subdir:
|
if options.subdir:
|
||||||
build.subdir = options.subdir
|
build.subdir = options.subdir
|
||||||
|
build.gradle = ['yes']
|
||||||
elif subdir:
|
elif subdir:
|
||||||
build.subdir = subdir
|
build.subdir = subdir
|
||||||
|
build.gradle = ['yes']
|
||||||
|
|
||||||
if options.license:
|
if options.license:
|
||||||
app.License = options.license
|
app.License = options.license
|
||||||
|
|
|
||||||
|
|
@ -17,16 +17,17 @@
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
HEADERS = {'User-Agent': 'F-Droid'}
|
||||||
|
|
||||||
|
|
||||||
def download_file(url, local_filename=None, dldir='tmp'):
|
def download_file(url, local_filename=None, dldir='tmp'):
|
||||||
filename = url.split('/')[-1]
|
filename = url.split('/')[-1]
|
||||||
if local_filename is None:
|
if local_filename is None:
|
||||||
local_filename = os.path.join(dldir, filename)
|
local_filename = os.path.join(dldir, filename)
|
||||||
# the stream=True parameter keeps memory usage low
|
# the stream=True parameter keeps memory usage low
|
||||||
r = requests.get(url, stream=True, allow_redirects=True)
|
r = requests.get(url, stream=True, allow_redirects=True, headers=HEADERS)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
with open(local_filename, 'wb') as f:
|
with open(local_filename, 'wb') as f:
|
||||||
for chunk in r.iter_content(chunk_size=1024):
|
for chunk in r.iter_content(chunk_size=1024):
|
||||||
|
|
@ -48,16 +49,15 @@ def http_get(url, etag=None, timeout=600):
|
||||||
- The raw content that was downloaded or None if it did not change
|
- The raw content that was downloaded or None if it did not change
|
||||||
- The new eTag as returned by the HTTP request
|
- The new eTag as returned by the HTTP request
|
||||||
"""
|
"""
|
||||||
headers = {'User-Agent': 'F-Droid'}
|
|
||||||
# TODO disable TLS Session IDs and TLS Session Tickets
|
# TODO disable TLS Session IDs and TLS Session Tickets
|
||||||
# (plain text cookie visible to anyone who can see the network traffic)
|
# (plain text cookie visible to anyone who can see the network traffic)
|
||||||
if etag:
|
if etag:
|
||||||
r = requests.head(url, headers=headers, timeout=timeout)
|
r = requests.head(url, headers=HEADERS, timeout=timeout)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
if 'ETag' in r.headers and etag == r.headers['ETag']:
|
if 'ETag' in r.headers and etag == r.headers['ETag']:
|
||||||
return None, etag
|
return None, etag
|
||||||
|
|
||||||
r = requests.get(url, headers=headers, timeout=timeout)
|
r = requests.get(url, headers=HEADERS, timeout=timeout)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
|
|
||||||
new_etag = None
|
new_etag = None
|
||||||
|
|
|
||||||
|
|
@ -19,6 +19,7 @@
|
||||||
import sys
|
import sys
|
||||||
import glob
|
import glob
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import paramiko
|
import paramiko
|
||||||
import pwd
|
import pwd
|
||||||
|
|
@ -447,9 +448,8 @@ def update_servergitmirrors(servergitmirrors, repo_section):
|
||||||
|
|
||||||
|
|
||||||
def upload_to_android_observatory(repo_section):
|
def upload_to_android_observatory(repo_section):
|
||||||
# depend on requests and lxml only if users enable AO
|
|
||||||
import requests
|
import requests
|
||||||
from lxml.html import fromstring
|
requests # stop unused import warning
|
||||||
|
|
||||||
if options.verbose:
|
if options.verbose:
|
||||||
logging.getLogger("requests").setLevel(logging.INFO)
|
logging.getLogger("requests").setLevel(logging.INFO)
|
||||||
|
|
@ -460,44 +460,53 @@ def upload_to_android_observatory(repo_section):
|
||||||
|
|
||||||
if repo_section == 'repo':
|
if repo_section == 'repo':
|
||||||
for f in sorted(glob.glob(os.path.join(repo_section, '*.apk'))):
|
for f in sorted(glob.glob(os.path.join(repo_section, '*.apk'))):
|
||||||
fpath = f
|
upload_apk_to_android_observatory(f)
|
||||||
fname = os.path.basename(f)
|
|
||||||
r = requests.post('https://androidobservatory.org/',
|
|
||||||
data={'q': update.sha256sum(f), 'searchby': 'hash'})
|
|
||||||
if r.status_code == 200:
|
|
||||||
# from now on XPath will be used to retrieve the message in the HTML
|
|
||||||
# androidobservatory doesn't have a nice API to talk with
|
|
||||||
# so we must scrape the page content
|
|
||||||
tree = fromstring(r.text)
|
|
||||||
|
|
||||||
href = None
|
|
||||||
for element in tree.xpath("//html/body/div/div/table/tbody/tr/td/a"):
|
|
||||||
a = element.attrib.get('href')
|
|
||||||
if a:
|
|
||||||
m = re.match(r'^/app/[0-9A-F]{40}$', a)
|
|
||||||
if m:
|
|
||||||
href = m.group()
|
|
||||||
|
|
||||||
page = 'https://androidobservatory.org'
|
def upload_apk_to_android_observatory(path):
|
||||||
message = ''
|
# depend on requests and lxml only if users enable AO
|
||||||
if href:
|
import requests
|
||||||
message = (_('Found {apkfilename} at {url}')
|
from . import net
|
||||||
.format(apkfilename=fname, url=(page + href)))
|
from lxml.html import fromstring
|
||||||
if message:
|
|
||||||
logging.debug(message)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# upload the file with a post request
|
apkfilename = os.path.basename(path)
|
||||||
logging.info(_('Uploading {apkfilename} to androidobservatory.org')
|
r = requests.post('https://androidobservatory.org/',
|
||||||
.format(apkfilename=fname))
|
data={'q': update.sha256sum(path), 'searchby': 'hash'},
|
||||||
r = requests.post('https://androidobservatory.org/upload',
|
headers=net.HEADERS)
|
||||||
files={'apk': (fname, open(fpath, 'rb'))},
|
if r.status_code == 200:
|
||||||
allow_redirects=False)
|
# from now on XPath will be used to retrieve the message in the HTML
|
||||||
|
# androidobservatory doesn't have a nice API to talk with
|
||||||
|
# so we must scrape the page content
|
||||||
|
tree = fromstring(r.text)
|
||||||
|
|
||||||
|
href = None
|
||||||
|
for element in tree.xpath("//html/body/div/div/table/tbody/tr/td/a"):
|
||||||
|
a = element.attrib.get('href')
|
||||||
|
if a:
|
||||||
|
m = re.match(r'^/app/[0-9A-F]{40}$', a)
|
||||||
|
if m:
|
||||||
|
href = m.group()
|
||||||
|
|
||||||
|
page = 'https://androidobservatory.org'
|
||||||
|
message = ''
|
||||||
|
if href:
|
||||||
|
message = (_('Found {apkfilename} at {url}')
|
||||||
|
.format(apkfilename=apkfilename, url=(page + href)))
|
||||||
|
if message:
|
||||||
|
logging.debug(message)
|
||||||
|
|
||||||
|
# upload the file with a post request
|
||||||
|
logging.info(_('Uploading {apkfilename} to androidobservatory.org')
|
||||||
|
.format(apkfilename=apkfilename))
|
||||||
|
r = requests.post('https://androidobservatory.org/upload',
|
||||||
|
files={'apk': (apkfilename, open(path, 'rb'))},
|
||||||
|
headers=net.HEADERS,
|
||||||
|
allow_redirects=False)
|
||||||
|
|
||||||
|
|
||||||
def upload_to_virustotal(repo_section, virustotal_apikey):
|
def upload_to_virustotal(repo_section, virustotal_apikey):
|
||||||
import json
|
|
||||||
import requests
|
import requests
|
||||||
|
requests # stop unused import warning
|
||||||
|
|
||||||
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
||||||
logging.getLogger("requests").setLevel(logging.WARNING)
|
logging.getLogger("requests").setLevel(logging.WARNING)
|
||||||
|
|
@ -514,82 +523,96 @@ def upload_to_virustotal(repo_section, virustotal_apikey):
|
||||||
|
|
||||||
for packageName, packages in data['packages'].items():
|
for packageName, packages in data['packages'].items():
|
||||||
for package in packages:
|
for package in packages:
|
||||||
outputfilename = os.path.join('virustotal',
|
upload_apk_to_virustotal(virustotal_apikey, **package)
|
||||||
packageName + '_' + str(package.get('versionCode'))
|
|
||||||
+ '_' + package['hash'] + '.json')
|
|
||||||
if os.path.exists(outputfilename):
|
|
||||||
logging.debug(package['apkName'] + ' results are in ' + outputfilename)
|
|
||||||
continue
|
|
||||||
filename = package['apkName']
|
|
||||||
repofilename = os.path.join(repo_section, filename)
|
|
||||||
logging.info('Checking if ' + repofilename + ' is on virustotal')
|
|
||||||
|
|
||||||
headers = {
|
|
||||||
"User-Agent": "F-Droid"
|
|
||||||
}
|
|
||||||
data = {
|
|
||||||
'apikey': virustotal_apikey,
|
|
||||||
'resource': package['hash'],
|
|
||||||
}
|
|
||||||
needs_file_upload = False
|
|
||||||
while True:
|
|
||||||
r = requests.get('https://www.virustotal.com/vtapi/v2/file/report?'
|
|
||||||
+ urllib.parse.urlencode(data), headers=headers)
|
|
||||||
if r.status_code == 200:
|
|
||||||
response = r.json()
|
|
||||||
if response['response_code'] == 0:
|
|
||||||
needs_file_upload = True
|
|
||||||
else:
|
|
||||||
response['filename'] = filename
|
|
||||||
response['packageName'] = packageName
|
|
||||||
response['versionCode'] = package.get('versionCode')
|
|
||||||
response['versionName'] = package.get('versionName')
|
|
||||||
with open(outputfilename, 'w') as fp:
|
|
||||||
json.dump(response, fp, indent=2, sort_keys=True)
|
|
||||||
|
|
||||||
if response.get('positives', 0) > 0:
|
def upload_apk_to_virustotal(virustotal_apikey, packageName, apkName, hash,
|
||||||
logging.warning(repofilename + ' has been flagged by virustotal '
|
versionCode, **kwargs):
|
||||||
+ str(response['positives']) + ' times:'
|
import requests
|
||||||
+ '\n\t' + response['permalink'])
|
|
||||||
break
|
|
||||||
elif r.status_code == 204:
|
|
||||||
time.sleep(10) # wait for public API rate limiting
|
|
||||||
|
|
||||||
upload_url = None
|
outputfilename = os.path.join('virustotal',
|
||||||
if needs_file_upload:
|
packageName + '_' + str(versionCode)
|
||||||
manual_url = 'https://www.virustotal.com/'
|
+ '_' + hash + '.json')
|
||||||
size = os.path.getsize(repofilename)
|
if os.path.exists(outputfilename):
|
||||||
if size > 200000000:
|
logging.debug(apkName + ' results are in ' + outputfilename)
|
||||||
# VirusTotal API 200MB hard limit
|
return outputfilename
|
||||||
logging.error(_('{path} more than 200MB, manually upload: {url}')
|
repofilename = os.path.join('repo', apkName)
|
||||||
.format(path=repofilename, url=manual_url))
|
logging.info('Checking if ' + repofilename + ' is on virustotal')
|
||||||
elif size > 32000000:
|
|
||||||
# VirusTotal API requires fetching a URL to upload bigger files
|
|
||||||
r = requests.get('https://www.virustotal.com/vtapi/v2/file/scan/upload_url?'
|
|
||||||
+ urllib.parse.urlencode(data), headers=headers)
|
|
||||||
if r.status_code == 200:
|
|
||||||
upload_url = r.json().get('upload_url')
|
|
||||||
elif r.status_code == 403:
|
|
||||||
logging.error(_('VirusTotal API key cannot upload files larger than 32MB, '
|
|
||||||
+ 'use {url} to upload {path}.')
|
|
||||||
.format(path=repofilename, url=manual_url))
|
|
||||||
else:
|
|
||||||
r.raise_for_status()
|
|
||||||
else:
|
|
||||||
upload_url = 'https://www.virustotal.com/vtapi/v2/file/scan'
|
|
||||||
|
|
||||||
if upload_url:
|
headers = {
|
||||||
logging.info(_('Uploading {apkfilename} to virustotal')
|
"User-Agent": "F-Droid"
|
||||||
.format(apkfilename=repofilename))
|
}
|
||||||
files = {
|
if 'headers' in kwargs:
|
||||||
'file': (filename, open(repofilename, 'rb'))
|
for k, v in kwargs['headers'].items():
|
||||||
}
|
headers[k] = v
|
||||||
r = requests.post(upload_url, data=data, headers=headers, files=files)
|
|
||||||
logging.debug(_('If this upload fails, try manually uploading to {url}')
|
data = {
|
||||||
.format(url=manual_url))
|
'apikey': virustotal_apikey,
|
||||||
r.raise_for_status()
|
'resource': hash,
|
||||||
response = r.json()
|
}
|
||||||
logging.info(response['verbose_msg'] + " " + response['permalink'])
|
needs_file_upload = False
|
||||||
|
while True:
|
||||||
|
r = requests.get('https://www.virustotal.com/vtapi/v2/file/report?'
|
||||||
|
+ urllib.parse.urlencode(data), headers=headers)
|
||||||
|
if r.status_code == 200:
|
||||||
|
response = r.json()
|
||||||
|
if response['response_code'] == 0:
|
||||||
|
needs_file_upload = True
|
||||||
|
else:
|
||||||
|
response['filename'] = apkName
|
||||||
|
response['packageName'] = packageName
|
||||||
|
response['versionCode'] = versionCode
|
||||||
|
if kwargs.get('versionName'):
|
||||||
|
response['versionName'] = kwargs.get('versionName')
|
||||||
|
with open(outputfilename, 'w') as fp:
|
||||||
|
json.dump(response, fp, indent=2, sort_keys=True)
|
||||||
|
|
||||||
|
if response.get('positives', 0) > 0:
|
||||||
|
logging.warning(repofilename + ' has been flagged by virustotal '
|
||||||
|
+ str(response['positives']) + ' times:'
|
||||||
|
+ '\n\t' + response['permalink'])
|
||||||
|
break
|
||||||
|
elif r.status_code == 204:
|
||||||
|
logging.warning(_('virustotal.com is rate limiting, waiting to retry...'))
|
||||||
|
time.sleep(30) # wait for public API rate limiting
|
||||||
|
|
||||||
|
upload_url = None
|
||||||
|
if needs_file_upload:
|
||||||
|
manual_url = 'https://www.virustotal.com/'
|
||||||
|
size = os.path.getsize(repofilename)
|
||||||
|
if size > 200000000:
|
||||||
|
# VirusTotal API 200MB hard limit
|
||||||
|
logging.error(_('{path} more than 200MB, manually upload: {url}')
|
||||||
|
.format(path=repofilename, url=manual_url))
|
||||||
|
elif size > 32000000:
|
||||||
|
# VirusTotal API requires fetching a URL to upload bigger files
|
||||||
|
r = requests.get('https://www.virustotal.com/vtapi/v2/file/scan/upload_url?'
|
||||||
|
+ urllib.parse.urlencode(data), headers=headers)
|
||||||
|
if r.status_code == 200:
|
||||||
|
upload_url = r.json().get('upload_url')
|
||||||
|
elif r.status_code == 403:
|
||||||
|
logging.error(_('VirusTotal API key cannot upload files larger than 32MB, '
|
||||||
|
+ 'use {url} to upload {path}.')
|
||||||
|
.format(path=repofilename, url=manual_url))
|
||||||
|
else:
|
||||||
|
r.raise_for_status()
|
||||||
|
else:
|
||||||
|
upload_url = 'https://www.virustotal.com/vtapi/v2/file/scan'
|
||||||
|
|
||||||
|
if upload_url:
|
||||||
|
logging.info(_('Uploading {apkfilename} to virustotal')
|
||||||
|
.format(apkfilename=repofilename))
|
||||||
|
files = {
|
||||||
|
'file': (apkName, open(repofilename, 'rb'))
|
||||||
|
}
|
||||||
|
r = requests.post(upload_url, data=data, headers=headers, files=files)
|
||||||
|
logging.debug(_('If this upload fails, try manually uploading to {url}')
|
||||||
|
.format(url=manual_url))
|
||||||
|
r.raise_for_status()
|
||||||
|
response = r.json()
|
||||||
|
logging.info(response['verbose_msg'] + " " + response['permalink'])
|
||||||
|
|
||||||
|
return outputfilename
|
||||||
|
|
||||||
|
|
||||||
def push_binary_transparency(git_repo_path, git_remote):
|
def push_binary_transparency(git_repo_path, git_remote):
|
||||||
|
|
|
||||||
|
|
@ -983,6 +983,48 @@ class CommonTest(unittest.TestCase):
|
||||||
self.assertEqual(('1.0-free', '1', 'com.kunzisoft.fdroidtest.applicationidsuffix'),
|
self.assertEqual(('1.0-free', '1', 'com.kunzisoft.fdroidtest.applicationidsuffix'),
|
||||||
fdroidserver.common.parse_androidmanifests(paths, app))
|
fdroidserver.common.parse_androidmanifests(paths, app))
|
||||||
|
|
||||||
|
def test_get_all_gradle_and_manifests(self):
|
||||||
|
a = fdroidserver.common.get_all_gradle_and_manifests(os.path.join('source-files', 'cn.wildfirechat.chat'))
|
||||||
|
paths = [
|
||||||
|
os.path.join('source-files', 'cn.wildfirechat.chat', 'avenginekit', 'build.gradle'),
|
||||||
|
os.path.join('source-files', 'cn.wildfirechat.chat', 'build.gradle'),
|
||||||
|
os.path.join('source-files', 'cn.wildfirechat.chat', 'chat', 'build.gradle'),
|
||||||
|
os.path.join('source-files', 'cn.wildfirechat.chat', 'client', 'build.gradle'),
|
||||||
|
os.path.join('source-files', 'cn.wildfirechat.chat', 'client', 'src', 'main', 'AndroidManifest.xml'),
|
||||||
|
os.path.join('source-files', 'cn.wildfirechat.chat', 'emojilibrary', 'build.gradle'),
|
||||||
|
os.path.join('source-files', 'cn.wildfirechat.chat', 'gradle', 'build_libraries.gradle'),
|
||||||
|
os.path.join('source-files', 'cn.wildfirechat.chat', 'imagepicker', 'build.gradle'),
|
||||||
|
os.path.join('source-files', 'cn.wildfirechat.chat', 'mars-core-release', 'build.gradle'),
|
||||||
|
os.path.join('source-files', 'cn.wildfirechat.chat', 'push', 'build.gradle'),
|
||||||
|
os.path.join('source-files', 'cn.wildfirechat.chat', 'settings.gradle'),
|
||||||
|
]
|
||||||
|
self.assertEqual(sorted(paths), sorted(a))
|
||||||
|
|
||||||
|
def test_get_gradle_subdir(self):
|
||||||
|
subdirs = {
|
||||||
|
'cn.wildfirechat.chat': 'chat',
|
||||||
|
'com.anpmech.launcher': 'app',
|
||||||
|
'org.tasks': 'app',
|
||||||
|
'ut.ewh.audiometrytest': 'app',
|
||||||
|
}
|
||||||
|
for f in ('cn.wildfirechat.chat', 'com.anpmech.launcher', 'org.tasks', 'ut.ewh.audiometrytest'):
|
||||||
|
build_dir = os.path.join('source-files', f)
|
||||||
|
paths = fdroidserver.common.get_all_gradle_and_manifests(build_dir)
|
||||||
|
logging.info(paths)
|
||||||
|
subdir = fdroidserver.common.get_gradle_subdir(build_dir, paths)
|
||||||
|
self.assertEqual(subdirs[f], subdir)
|
||||||
|
|
||||||
|
def test_bad_urls(self):
|
||||||
|
for url in ('asdf',
|
||||||
|
'file://thing.git',
|
||||||
|
'https:///github.com/my/project',
|
||||||
|
'git:///so/many/slashes',
|
||||||
|
'ssh:/notabug.org/missing/a/slash',
|
||||||
|
'git:notabug.org/missing/some/slashes',
|
||||||
|
'https//github.com/bar/baz'):
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
fdroidserver.common.get_app_from_url(url)
|
||||||
|
|
||||||
def test_remove_signing_keys(self):
|
def test_remove_signing_keys(self):
|
||||||
testdir = tempfile.mkdtemp(prefix=inspect.currentframe().f_code.co_name, dir=self.tmpdir)
|
testdir = tempfile.mkdtemp(prefix=inspect.currentframe().f_code.co_name, dir=self.tmpdir)
|
||||||
print(testdir)
|
print(testdir)
|
||||||
|
|
|
||||||
|
|
@ -49,53 +49,11 @@ class ImportTest(unittest.TestCase):
|
||||||
print('Skipping ImportTest!')
|
print('Skipping ImportTest!')
|
||||||
return
|
return
|
||||||
|
|
||||||
app = import_proxy.get_app_from_url(url)
|
app = fdroidserver.common.get_app_from_url(url)
|
||||||
import_proxy.clone_to_tmp_dir(app)
|
import_proxy.clone_to_tmp_dir(app)
|
||||||
self.assertEqual(app.RepoType, 'git')
|
self.assertEqual(app.RepoType, 'git')
|
||||||
self.assertEqual(app.Repo, 'https://gitlab.com/fdroid/ci-test-app.git')
|
self.assertEqual(app.Repo, 'https://gitlab.com/fdroid/ci-test-app.git')
|
||||||
|
|
||||||
def test_get_all_gradle_and_manifests(self):
|
|
||||||
a = import_proxy.get_all_gradle_and_manifests(os.path.join('source-files', 'cn.wildfirechat.chat'))
|
|
||||||
paths = [
|
|
||||||
os.path.join('source-files', 'cn.wildfirechat.chat', 'avenginekit', 'build.gradle'),
|
|
||||||
os.path.join('source-files', 'cn.wildfirechat.chat', 'build.gradle'),
|
|
||||||
os.path.join('source-files', 'cn.wildfirechat.chat', 'chat', 'build.gradle'),
|
|
||||||
os.path.join('source-files', 'cn.wildfirechat.chat', 'client', 'build.gradle'),
|
|
||||||
os.path.join('source-files', 'cn.wildfirechat.chat', 'client', 'src', 'main', 'AndroidManifest.xml'),
|
|
||||||
os.path.join('source-files', 'cn.wildfirechat.chat', 'emojilibrary', 'build.gradle'),
|
|
||||||
os.path.join('source-files', 'cn.wildfirechat.chat', 'gradle', 'build_libraries.gradle'),
|
|
||||||
os.path.join('source-files', 'cn.wildfirechat.chat', 'imagepicker', 'build.gradle'),
|
|
||||||
os.path.join('source-files', 'cn.wildfirechat.chat', 'mars-core-release', 'build.gradle'),
|
|
||||||
os.path.join('source-files', 'cn.wildfirechat.chat', 'push', 'build.gradle'),
|
|
||||||
os.path.join('source-files', 'cn.wildfirechat.chat', 'settings.gradle'),
|
|
||||||
]
|
|
||||||
self.assertEqual(sorted(paths), sorted(a))
|
|
||||||
|
|
||||||
def test_get_gradle_subdir(self):
|
|
||||||
subdirs = {
|
|
||||||
'cn.wildfirechat.chat': 'chat',
|
|
||||||
'com.anpmech.launcher': 'app',
|
|
||||||
'org.tasks': 'app',
|
|
||||||
'ut.ewh.audiometrytest': 'app',
|
|
||||||
}
|
|
||||||
for f in ('cn.wildfirechat.chat', 'com.anpmech.launcher', 'org.tasks', 'ut.ewh.audiometrytest'):
|
|
||||||
build_dir = os.path.join('source-files', f)
|
|
||||||
paths = import_proxy.get_all_gradle_and_manifests(build_dir)
|
|
||||||
logging.info(paths)
|
|
||||||
subdir = import_proxy.get_gradle_subdir(build_dir, paths)
|
|
||||||
self.assertEqual(subdirs[f], subdir)
|
|
||||||
|
|
||||||
def test_bad_urls(self):
|
|
||||||
for url in ('asdf',
|
|
||||||
'file://thing.git',
|
|
||||||
'https:///github.com/my/project',
|
|
||||||
'git:///so/many/slashes',
|
|
||||||
'ssh:/notabug.org/missing/a/slash',
|
|
||||||
'git:notabug.org/missing/some/slashes',
|
|
||||||
'https//github.com/bar/baz'):
|
|
||||||
with self.assertRaises(ValueError):
|
|
||||||
import_proxy.get_app_from_url(url)
|
|
||||||
|
|
||||||
def test_get_app_from_url(self):
|
def test_get_app_from_url(self):
|
||||||
testdir = tempfile.mkdtemp(prefix=inspect.currentframe().f_code.co_name, dir=self.tmpdir)
|
testdir = tempfile.mkdtemp(prefix=inspect.currentframe().f_code.co_name, dir=self.tmpdir)
|
||||||
os.chdir(testdir)
|
os.chdir(testdir)
|
||||||
|
|
@ -111,7 +69,7 @@ class ImportTest(unittest.TestCase):
|
||||||
shutil.copytree(os.path.join(self.basedir, 'source-files', appid),
|
shutil.copytree(os.path.join(self.basedir, 'source-files', appid),
|
||||||
tmp_importer)
|
tmp_importer)
|
||||||
|
|
||||||
app = import_proxy.get_app_from_url(url)
|
app = fdroidserver.common.get_app_from_url(url)
|
||||||
with mock.patch('fdroidserver.common.getvcs',
|
with mock.patch('fdroidserver.common.getvcs',
|
||||||
lambda a, b, c: fdroidserver.common.vcs(url, testdir)):
|
lambda a, b, c: fdroidserver.common.vcs(url, testdir)):
|
||||||
with mock.patch('fdroidserver.common.vcs.gotorevision',
|
with mock.patch('fdroidserver.common.vcs.gotorevision',
|
||||||
|
|
@ -122,7 +80,7 @@ class ImportTest(unittest.TestCase):
|
||||||
self.assertEqual(url, app.Repo)
|
self.assertEqual(url, app.Repo)
|
||||||
self.assertEqual(url, app.SourceCode)
|
self.assertEqual(url, app.SourceCode)
|
||||||
logging.info(build_dir)
|
logging.info(build_dir)
|
||||||
paths = import_proxy.get_all_gradle_and_manifests(build_dir)
|
paths = fdroidserver.common.get_all_gradle_and_manifests(build_dir)
|
||||||
self.assertNotEqual(paths, [])
|
self.assertNotEqual(paths, [])
|
||||||
versionName, versionCode, package = fdroidserver.common.parse_androidmanifests(paths, app)
|
versionName, versionCode, package = fdroidserver.common.parse_androidmanifests(paths, app)
|
||||||
self.assertEqual(vn, versionName)
|
self.assertEqual(vn, versionName)
|
||||||
|
|
|
||||||
|
|
@ -19,9 +19,6 @@ module = __import__('fdroidserver.import')
|
||||||
for name, obj in inspect.getmembers(module):
|
for name, obj in inspect.getmembers(module):
|
||||||
if name == 'import':
|
if name == 'import':
|
||||||
clone_to_tmp_dir = obj.clone_to_tmp_dir
|
clone_to_tmp_dir = obj.clone_to_tmp_dir
|
||||||
get_all_gradle_and_manifests = obj.get_all_gradle_and_manifests
|
|
||||||
get_app_from_url = obj.get_app_from_url
|
|
||||||
get_gradle_subdir = obj.get_gradle_subdir
|
|
||||||
obj.options = Options()
|
obj.options = Options()
|
||||||
options = obj.options
|
options = obj.options
|
||||||
break
|
break
|
||||||
|
|
|
||||||
|
|
@ -142,6 +142,12 @@ class ServerTest(unittest.TestCase):
|
||||||
repo_section)
|
repo_section)
|
||||||
self.assertEqual(call_iteration, 2, 'expected 2 invocations of subprocess.call')
|
self.assertEqual(call_iteration, 2, 'expected 2 invocations of subprocess.call')
|
||||||
|
|
||||||
|
@unittest.skipIf(not os.getenv('VIRUSTOTAL_API_KEY'), 'VIRUSTOTAL_API_KEY is not set')
|
||||||
|
def test_upload_to_virustotal(self):
|
||||||
|
fdroidserver.server.options.verbose = True
|
||||||
|
virustotal_apikey = os.getenv('VIRUSTOTAL_API_KEY')
|
||||||
|
fdroidserver.server.upload_to_virustotal('repo', virustotal_apikey)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
os.chdir(os.path.dirname(__file__))
|
os.chdir(os.path.dirname(__file__))
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue