mirror of
https://github.com/f-droid/fdroidserver.git
synced 2025-09-28 21:41:06 +03:00
Compare commits
623 commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
40fbbd2e48 | ||
![]() |
4fabdf9290 | ||
![]() |
7c2a0aa6c2 | ||
![]() |
ffc0a6cec0 | ||
![]() |
bf33fc5d9b | ||
![]() |
5ded08048b | ||
![]() |
5f6e59c76d | ||
![]() |
9b8a334dcf | ||
![]() |
8b11e098db | ||
![]() |
c9c5147cac | ||
![]() |
b6d6d46aeb | ||
![]() |
1f9fb16844 | ||
![]() |
0a87deff1c | ||
![]() |
dbd769db9f | ||
![]() |
a9856cfb92 | ||
![]() |
bbe29abaa3 | ||
![]() |
1068057524 | ||
![]() |
e8d4d8fc6a | ||
![]() |
f578684be8 | ||
![]() |
57244dec63 | ||
![]() |
2eb3986ecf | ||
![]() |
d4ad523dd2 | ||
![]() |
6fff73b678 | ||
![]() |
fbf8fc54db | ||
![]() |
f5f79ac1ea | ||
![]() |
97e9784d5d | ||
![]() |
d20a6a5dcf | ||
![]() |
058f0b7f6a | ||
![]() |
e07cdf5f0c | ||
![]() |
f9d111c8c1 | ||
![]() |
8b54e2b4cf | ||
![]() |
d594a683ab | ||
![]() |
7a98650ed3 | ||
![]() |
b19b8050db | ||
![]() |
6a3758d3c4 | ||
![]() |
546821fc3d | ||
![]() |
120a1655b4 | ||
![]() |
19d709edcd | ||
![]() |
5049645003 | ||
![]() |
e4b54c9768 | ||
![]() |
7988c54d00 | ||
![]() |
05c4bf2483 | ||
![]() |
7e219561b7 | ||
![]() |
6ad3c74bb4 | ||
![]() |
96c0d928da | ||
![]() |
ba5c78d45f | ||
![]() |
8f1411607a | ||
![]() |
97b0b0eaf8 | ||
![]() |
c6c4764b33 | ||
![]() |
7946acd52a | ||
![]() |
0a91b98aee | ||
![]() |
09a51a429b | ||
![]() |
47b5ecdc8c | ||
![]() |
909864a8c7 | ||
![]() |
47b9b24aef | ||
![]() |
f4f1c003a6 | ||
![]() |
51b55963a5 | ||
![]() |
52c1bcca70 | ||
![]() |
8a36e264b4 | ||
![]() |
d5d65cfabd | ||
![]() |
dcb804f70d | ||
![]() |
a0cae97155 | ||
![]() |
956cb11f91 | ||
![]() |
193ca5842c | ||
![]() |
a44364d661 | ||
![]() |
9fffe0aea4 | ||
![]() |
76eb0611a0 | ||
![]() |
e21fd0d167 | ||
![]() |
d5e554dcd6 | ||
![]() |
e3ee7ed42e | ||
![]() |
417ec9fe96 | ||
![]() |
a3cd45c3fb | ||
![]() |
d71fba164c | ||
![]() |
e177520379 | ||
![]() |
10f2a4c592 | ||
![]() |
6b6cc2379c | ||
![]() |
dcd5336138 | ||
![]() |
f953146505 | ||
![]() |
e110390b99 | ||
![]() |
fd3f71ad3c | ||
![]() |
336d5381ae | ||
![]() |
63298ad2ad | ||
![]() |
61777e273c | ||
![]() |
53bf6c7ce1 | ||
![]() |
51c973f15e | ||
![]() |
894fdb641e | ||
![]() |
d029095641 | ||
![]() |
85dba17d48 | ||
![]() |
57541eb52e | ||
![]() |
7e8976d9b0 | ||
![]() |
b6d7e8732b | ||
![]() |
2590f00ff0 | ||
![]() |
1135cee8b7 | ||
![]() |
76d711ba3c | ||
![]() |
b2057a1ce0 | ||
![]() |
ce018158ee | ||
![]() |
d398994ad3 | ||
![]() |
494d811846 | ||
![]() |
964861eb68 | ||
![]() |
8c14e44f63 | ||
![]() |
e1a8e1a08a | ||
![]() |
d5bc7a6942 | ||
![]() |
78e6b8f04c | ||
![]() |
59102fb07f | ||
![]() |
3cb6078059 | ||
![]() |
cd1630d2f5 | ||
![]() |
e44fd22199 | ||
![]() |
69003ae65b | ||
![]() |
d4a71cd572 | ||
![]() |
721de49104 | ||
![]() |
677301bc8a | ||
![]() |
f281068cbf | ||
![]() |
e98f0966e0 | ||
![]() |
0885cf3b49 | ||
![]() |
c70af2503c | ||
![]() |
bd10ee73c1 | ||
![]() |
cfc848771d | ||
![]() |
ac90d11906 | ||
![]() |
7cf1cd1f4c | ||
![]() |
2a939bf87f | ||
![]() |
a5d966bf0f | ||
![]() |
e957583337 | ||
![]() |
6c054f62ca | ||
![]() |
0ac750463f | ||
![]() |
8c9b0b3a2a | ||
![]() |
243a0475f9 | ||
![]() |
e538c34c32 | ||
![]() |
e7060011aa | ||
![]() |
5f534ea2cb | ||
![]() |
ffd4274503 | ||
![]() |
0a8f9ec5fc | ||
![]() |
20b36f1970 | ||
![]() |
0b6e304922 | ||
![]() |
9384c5ab70 | ||
![]() |
7b6767eb50 | ||
![]() |
4e7bda736c | ||
![]() |
2a9c8e9644 | ||
![]() |
da58061c80 | ||
![]() |
696df82999 | ||
![]() |
cc61a3e9e3 | ||
![]() |
3b78b3cf06 | ||
![]() |
0e086f5e61 | ||
![]() |
61943c3f06 | ||
![]() |
eebbfedee5 | ||
![]() |
1a9ba8e956 | ||
![]() |
dd64d557f2 | ||
![]() |
cd0b961e3c | ||
![]() |
57ec6a93ed | ||
![]() |
0e752f1ef0 | ||
![]() |
13166fce05 | ||
![]() |
22af55fb6d | ||
![]() |
858edaaa4d | ||
![]() |
f2faeca949 | ||
![]() |
9e51fa36e5 | ||
![]() |
7924888e5a | ||
![]() |
b581d08b6f | ||
![]() |
b83c1aace3 | ||
![]() |
561da8f7d1 | ||
![]() |
b93fc6f9da | ||
![]() |
265adadaba | ||
![]() |
2a33857fd0 | ||
![]() |
3b360f6b80 | ||
![]() |
004103357b | ||
![]() |
56338cd7d0 | ||
![]() |
a03cfa3fc2 | ||
![]() |
c4d46a187f | ||
![]() |
692f79ec7d | ||
![]() |
17e5a59704 | ||
![]() |
029636ed61 | ||
![]() |
d34d051329 | ||
![]() |
2aba6fd324 | ||
![]() |
f7cc4812a2 | ||
![]() |
9bcd13bfdd | ||
![]() |
7f67a882e7 | ||
![]() |
6ad79e3c06 | ||
![]() |
3e67bee037 | ||
![]() |
3b9d39ac61 | ||
![]() |
aa9cc14991 | ||
![]() |
07d499ce1f | ||
![]() |
a13dd109d3 | ||
![]() |
3582a12f18 | ||
![]() |
69f9f52ba2 | ||
![]() |
046c527ee8 | ||
![]() |
0d88a94192 | ||
![]() |
e6cff099f2 | ||
![]() |
bb7394ab3c | ||
![]() |
a011b34b97 | ||
![]() |
9a8d80ee6b | ||
![]() |
ef4bbe4612 | ||
![]() |
90b82ea7e6 | ||
![]() |
c98028136a | ||
![]() |
9fb909b648 | ||
![]() |
0cce48dc04 | ||
![]() |
eef237de85 | ||
![]() |
0c867f908f | ||
![]() |
4cefec9333 | ||
![]() |
d9046727e5 | ||
![]() |
01b948cae7 | ||
![]() |
59474437b3 | ||
![]() |
a69340a1ea | ||
![]() |
40be283c4a | ||
![]() |
eef3188600 | ||
![]() |
d024c8964f | ||
![]() |
8f351d8406 | ||
![]() |
68fad5c7f7 | ||
![]() |
5150c721f4 | ||
![]() |
8d17b67642 | ||
![]() |
acbab69722 | ||
![]() |
51487192b9 | ||
![]() |
58609aa9f0 | ||
![]() |
0b192a7694 | ||
![]() |
b29ee09fda | ||
![]() |
7cf7f4b14e | ||
![]() |
050aec31e2 | ||
![]() |
f0cd68a9fb | ||
![]() |
868eb64de6 | ||
![]() |
51457a6f8e | ||
![]() |
b95d60f24b | ||
![]() |
3c9fa6761e | ||
![]() |
4e18b38d8f | ||
![]() |
cb69c4fea3 | ||
![]() |
2b8640b95a | ||
![]() |
35d205a9b9 | ||
![]() |
78efc16fc2 | ||
![]() |
88f4a36f38 | ||
![]() |
4b9100ae80 | ||
![]() |
38378ddfb7 | ||
![]() |
8565a97465 | ||
![]() |
b911fb9ed9 | ||
![]() |
c17a9253ff | ||
![]() |
88108c8381 | ||
![]() |
80edad8947 | ||
![]() |
46939ec4ba | ||
![]() |
efd79866a7 | ||
![]() |
f6011ae304 | ||
![]() |
d67a7c1b04 | ||
![]() |
af147855f6 | ||
![]() |
dad3299810 | ||
![]() |
ada4631f15 | ||
![]() |
ec2b847e03 | ||
![]() |
f09d859281 | ||
![]() |
20569217d9 | ||
![]() |
8e39f82eb9 | ||
![]() |
5e28cd367f | ||
![]() |
0af61d8fe1 | ||
![]() |
6fbb2fec9c | ||
![]() |
4448aeb550 | ||
![]() |
65b015302d | ||
![]() |
273b763aed | ||
![]() |
c62094274c | ||
![]() |
29487d8c3a | ||
![]() |
560d7fd200 | ||
![]() |
f76114ba5c | ||
![]() |
40daf5e153 | ||
![]() |
72ca7ebc4f | ||
![]() |
db7afbedf5 | ||
![]() |
e50ce950a3 | ||
![]() |
7f207496bb | ||
![]() |
edd88cc701 | ||
![]() |
a01e91da6c | ||
![]() |
e23b196a0f | ||
![]() |
52045f9819 | ||
![]() |
ca082daaea | ||
![]() |
9429f0917b | ||
![]() |
c5d5ab3a79 | ||
![]() |
26860dbf2f | ||
![]() |
0907755efe | ||
![]() |
2a36726798 | ||
![]() |
c0da9df12f | ||
![]() |
bb5d7a0e91 | ||
![]() |
dedc92b467 | ||
![]() |
99b8f76487 | ||
![]() |
d3eff85c51 | ||
![]() |
fcbbf3bea3 | ||
![]() |
2b8c957d12 | ||
![]() |
a95f4f11b0 | ||
![]() |
b416d9b604 | ||
![]() |
0b3fe26524 | ||
![]() |
025828932d | ||
![]() |
17a4912e83 | ||
![]() |
c5f442616e | ||
![]() |
2f4e0f47a1 | ||
![]() |
3e6cb67e69 | ||
![]() |
d06e33697a | ||
![]() |
9d147c6b6b | ||
![]() |
858068c64b | ||
![]() |
36007d50e5 | ||
![]() |
f269232b96 | ||
![]() |
8cf1297e2c | ||
![]() |
081e02c109 | ||
![]() |
031ae1103e | ||
![]() |
864ccb560b | ||
![]() |
14e13b4f4a | ||
![]() |
1521d5c659 | ||
![]() |
3ab2baf542 | ||
![]() |
2f47938dbf | ||
![]() |
53b62415d3 | ||
![]() |
16fb0fbe91 | ||
![]() |
3cc6c09ffc | ||
![]() |
1f96a84f9a | ||
![]() |
642499ec94 | ||
![]() |
cb9533bd74 | ||
![]() |
a37c409c74 | ||
![]() |
0f5a1a0bfb | ||
![]() |
1ee9ea8cf9 | ||
![]() |
695d97e103 | ||
![]() |
56865f9ba6 | ||
![]() |
ecdf47d893 | ||
![]() |
4d66f30faf | ||
![]() |
f92542c7ea | ||
![]() |
dde1942520 | ||
![]() |
1694966455 | ||
![]() |
efda0f5d6c | ||
![]() |
b7c5233668 | ||
![]() |
efce0ce0e4 | ||
![]() |
820abbc876 | ||
![]() |
7a21c24e45 | ||
![]() |
b933043ca1 | ||
![]() |
8b52740636 | ||
![]() |
1720a51e9e | ||
![]() |
1cbd68af84 | ||
![]() |
565ae02678 | ||
![]() |
fa4ff197aa | ||
![]() |
aeb6c612e9 | ||
![]() |
96f9d7fdf8 | ||
![]() |
385832c1fd | ||
![]() |
62b4af4f19 | ||
![]() |
9201b3ca94 | ||
![]() |
49697d1479 | ||
![]() |
f7dc89e9ba | ||
![]() |
6df9d0ecba | ||
![]() |
a7e56598d6 | ||
![]() |
9215f5e4b1 | ||
![]() |
2b725a5966 | ||
![]() |
7c376209e0 | ||
![]() |
66d220bd9f | ||
![]() |
f9431aaf0f | ||
![]() |
0c6e976a01 | ||
![]() |
6e1c1f4cc0 | ||
![]() |
f719efcba0 | ||
![]() |
e45709beef | ||
![]() |
0930937939 | ||
![]() |
2ac925a249 | ||
![]() |
afd1a89ca5 | ||
![]() |
f3102d81b1 | ||
![]() |
c8c2b1be78 | ||
![]() |
9e03dba11e | ||
![]() |
1fc8053936 | ||
![]() |
9b5a67340e | ||
![]() |
594099a10c | ||
![]() |
f14c3e38c8 | ||
![]() |
d588afd3f9 | ||
![]() |
eecfcbe547 | ||
![]() |
06eb722b13 | ||
![]() |
d969181b55 | ||
![]() |
49d18e494b | ||
![]() |
a9404dbb42 | ||
![]() |
89f44e0fed | ||
![]() |
a3f9b244d9 | ||
![]() |
787612b93f | ||
![]() |
d742fc4f65 | ||
![]() |
f78492e8ad | ||
![]() |
7720402d72 | ||
![]() |
eb616643c4 | ||
![]() |
11574b717e | ||
![]() |
6e0b9c94b2 | ||
![]() |
c1f0914476 | ||
![]() |
8831217ca1 | ||
![]() |
3000cfcd04 | ||
![]() |
d4b348bd93 | ||
![]() |
89a282c12e | ||
![]() |
2716f93e79 | ||
![]() |
5b1b1d12a1 | ||
![]() |
4c88b19bae | ||
![]() |
f5a6aa2cbf | ||
![]() |
20caa6fa1c | ||
![]() |
0bb240fac6 | ||
![]() |
c70684ea25 | ||
![]() |
3b64dcee7a | ||
![]() |
eb49351b81 | ||
![]() |
dfbf79ef2b | ||
![]() |
f01628ca6b | ||
![]() |
99c3ea9966 | ||
![]() |
f93e30f1e9 | ||
![]() |
9b59d5cf48 | ||
![]() |
173e45f248 | ||
![]() |
c631ca7142 | ||
![]() |
b408d56c2b | ||
![]() |
d826178892 | ||
![]() |
5deb936e86 | ||
![]() |
56814824ee | ||
![]() |
78865a7b2b | ||
![]() |
ba0069aaa2 | ||
![]() |
66704e1a4c | ||
![]() |
2d12f8c731 | ||
![]() |
25daaea989 | ||
![]() |
cd25bd6ce7 | ||
![]() |
4a5d2c4422 | ||
![]() |
07b83ed101 | ||
![]() |
8658f21282 | ||
![]() |
067a216f00 | ||
![]() |
6671720d68 | ||
![]() |
e8aff6a755 | ||
![]() |
36fafaf2cc | ||
![]() |
bc118484e4 | ||
![]() |
6d40e8fa27 | ||
![]() |
46dba20ba3 | ||
![]() |
5fde2f4d03 | ||
![]() |
30e308ac8a | ||
![]() |
ad9b0c3294 | ||
![]() |
11aee5b325 | ||
![]() |
620e9f9331 | ||
![]() |
194d4b63d0 | ||
![]() |
a45893addf | ||
![]() |
dbd6034738 | ||
![]() |
222f19d167 | ||
![]() |
a8c354a2d0 | ||
![]() |
19bb2bf1aa | ||
![]() |
9ce877acdb | ||
![]() |
0b40c04422 | ||
![]() |
4ea9cdfa93 | ||
![]() |
e52a07d63a | ||
![]() |
8fc340aaca | ||
![]() |
8c81033ea3 | ||
![]() |
d2cc020336 | ||
![]() |
a2d27ba15e | ||
![]() |
a9c6eeb753 | ||
![]() |
9b2a4a90a7 | ||
![]() |
fe29f56238 | ||
![]() |
a26995e534 | ||
![]() |
25bd35a166 | ||
![]() |
865c7a33bb | ||
![]() |
e61ee39ee4 | ||
![]() |
e162278143 | ||
![]() |
4b53f4b1d3 | ||
![]() |
04f8058dfc | ||
![]() |
46dd2227e9 | ||
![]() |
8ee15b9cac | ||
![]() |
13db3f7018 | ||
![]() |
a9c9825727 | ||
![]() |
696f68a99d | ||
![]() |
bb911bf4f3 | ||
![]() |
67e27cc706 | ||
![]() |
1b0fabe8f5 | ||
![]() |
cd29dd84d2 | ||
![]() |
8a5359ab3f | ||
![]() |
432618eb03 | ||
![]() |
4d3feb93e3 | ||
![]() |
732a945fa3 | ||
![]() |
069f75b3d5 | ||
![]() |
6ff1c9733c | ||
![]() |
d69028e1b1 | ||
![]() |
eae5ae3ae8 | ||
![]() |
37dc6c121d | ||
![]() |
2a6d584713 | ||
![]() |
50b686b33b | ||
![]() |
9203941b78 | ||
![]() |
0ec9cd6921 | ||
![]() |
e3f724681a | ||
![]() |
fd15ac9276 | ||
![]() |
c97503b5f3 | ||
![]() |
fbe9152ee5 | ||
![]() |
cd8d4ef88b | ||
![]() |
20ff302e89 | ||
![]() |
17c480d299 | ||
![]() |
f34a132132 | ||
![]() |
fb880ad14f | ||
![]() |
82e47bd392 | ||
![]() |
a72be3e415 | ||
![]() |
fcf4875c76 | ||
![]() |
a8e31796d8 | ||
![]() |
56d5f360ed | ||
![]() |
e137aa9c44 | ||
![]() |
487269c667 | ||
![]() |
7ff32bc4b0 | ||
![]() |
4d6682bc70 | ||
![]() |
65119b3193 | ||
![]() |
00e9b7dcd7 | ||
![]() |
290587f449 | ||
![]() |
7e8e2403cd | ||
![]() |
801d6f9566 | ||
![]() |
118c7fc783 | ||
![]() |
87909fae74 | ||
![]() |
3bd2bc69d3 | ||
![]() |
7661518775 | ||
![]() |
d7813b2abc | ||
![]() |
5b57ced0a6 | ||
![]() |
069f088b38 | ||
![]() |
dde2947ad2 | ||
![]() |
c42edd4163 | ||
![]() |
1be808c728 | ||
![]() |
ad66baa266 | ||
![]() |
85e585161f | ||
![]() |
90eeb63809 | ||
![]() |
05e091804d | ||
![]() |
addb7b9acc | ||
![]() |
b9b4ca9778 | ||
![]() |
dfbf54bbba | ||
![]() |
96ed60fda3 | ||
![]() |
d666c030f7 | ||
![]() |
69d2c874b6 | ||
![]() |
46df7d87aa | ||
![]() |
7ee9882be5 | ||
![]() |
7da3357d47 | ||
![]() |
eec7addc16 | ||
![]() |
d6a004da56 | ||
![]() |
f37381e0ff | ||
![]() |
cc2088d7bf | ||
![]() |
4a6bb9e064 | ||
![]() |
5771cfe9f4 | ||
![]() |
770d31ce3e | ||
![]() |
aae4d7f08a | ||
![]() |
dd98d45698 | ||
![]() |
f3151bfb3e | ||
![]() |
14df3c3ec5 | ||
![]() |
12b816421f | ||
![]() |
97bdfcbe7c | ||
![]() |
ba5ed94494 | ||
![]() |
f9ddddf1c6 | ||
![]() |
f3303f88cc | ||
![]() |
df496419d4 | ||
![]() |
75995b3915 | ||
![]() |
c5b8d2823d | ||
![]() |
18d0fad4d1 | ||
![]() |
97eab1ada9 | ||
![]() |
e2ad739628 | ||
![]() |
11a7c8d1ce | ||
![]() |
0babd55762 | ||
![]() |
9e092582c2 | ||
![]() |
3e7bce38f8 | ||
![]() |
8676ab800f | ||
![]() |
8e0722d490 | ||
![]() |
3d923a07c1 | ||
![]() |
3f71f87900 | ||
![]() |
6e70263dcb | ||
![]() |
643d8da709 | ||
![]() |
acae5f1ac9 | ||
![]() |
27e3b5066a | ||
![]() |
4d22a7f67f | ||
![]() |
560472e4e5 | ||
![]() |
1eb6516f16 | ||
![]() |
3da48e64bc | ||
![]() |
b77eba824b | ||
![]() |
3a3417f6f8 | ||
![]() |
c7bc8d0fea | ||
![]() |
681d705da0 | ||
![]() |
f1b110942a | ||
![]() |
59fcfa5dec | ||
![]() |
2e3f6d273a | ||
![]() |
49dcc53076 | ||
![]() |
56bed02a29 | ||
![]() |
97b156a963 | ||
![]() |
ab28c442e2 | ||
![]() |
173c1d67f4 | ||
![]() |
5032207da0 | ||
![]() |
81c0c9d4aa | ||
![]() |
83a3227235 | ||
![]() |
25779e8b32 | ||
![]() |
21cb8ac5e0 | ||
![]() |
e3eb597331 | ||
![]() |
5c32d5bb38 | ||
![]() |
9a34590e95 | ||
![]() |
66a340fe89 | ||
![]() |
206f07364b | ||
![]() |
78b368f88b | ||
![]() |
e03915e391 | ||
![]() |
4c225f02d2 | ||
![]() |
a9db97d214 | ||
![]() |
24dc3baadf | ||
![]() |
4c3dd9c76c | ||
![]() |
399ac608c7 | ||
![]() |
3544d5195d | ||
![]() |
1e919a6e19 | ||
![]() |
de2f05e4da | ||
![]() |
5fbd5ed2d1 | ||
![]() |
40c6b7d9cf | ||
![]() |
ae42710d33 | ||
![]() |
deabd7bdc0 | ||
![]() |
f446be6c0c | ||
![]() |
eadfe415b0 | ||
![]() |
b9ff024775 | ||
![]() |
15d3a3612d | ||
![]() |
4d0a21f336 | ||
![]() |
8ab1d53318 | ||
![]() |
66018d3f09 | ||
![]() |
2ceec6b9d6 | ||
![]() |
1323751835 | ||
![]() |
2a13d6d400 | ||
![]() |
ba91b71d60 | ||
![]() |
5f7203ddf2 | ||
![]() |
680554c1c6 | ||
![]() |
17a051329e | ||
![]() |
8c4583b04e | ||
![]() |
eff0ef48f4 | ||
![]() |
528760acc8 | ||
![]() |
0d148d58e1 | ||
![]() |
0d5c7516f2 | ||
![]() |
21e91564a0 | ||
![]() |
2b9cb9cf3e | ||
![]() |
a57a612982 | ||
![]() |
0cee694c43 | ||
![]() |
96ec2da0e7 | ||
![]() |
043492277e | ||
![]() |
9a65e24930 | ||
![]() |
afc31e115e | ||
![]() |
143639b8df | ||
![]() |
feb0fdfad6 | ||
![]() |
2bf2777f5e | ||
![]() |
02f0052806 | ||
![]() |
19beb0378d | ||
![]() |
fe3d929f67 | ||
![]() |
5126a58af8 | ||
![]() |
567e3dbaba | ||
![]() |
8648954f19 | ||
![]() |
faac9b38c8 | ||
![]() |
5da4e670dd | ||
![]() |
b7749ece8d | ||
![]() |
492d4a5619 | ||
![]() |
41b9419740 | ||
![]() |
c326fc961d | ||
![]() |
eb68ae3d8c | ||
![]() |
f6f962aaf4 | ||
![]() |
d6f5a1760a |
218 changed files with 32351 additions and 24339 deletions
2
.bandit
2
.bandit
|
@ -1,3 +1,3 @@
|
||||||
[bandit]
|
[bandit]
|
||||||
skips: B110,B404,B408,B410,B603,B607
|
skips: B110,B404,B408,B603,B607,B322
|
||||||
targets: .
|
targets: .
|
||||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -27,7 +27,6 @@ tmp/
|
||||||
/tests/repo/status
|
/tests/repo/status
|
||||||
|
|
||||||
# files used in manual testing
|
# files used in manual testing
|
||||||
/config.py
|
|
||||||
/config.yml
|
/config.yml
|
||||||
/tmp/
|
/tmp/
|
||||||
/logs/
|
/logs/
|
||||||
|
|
371
.gitlab-ci.yml
371
.gitlab-ci.yml
|
@ -1,5 +1,22 @@
|
||||||
---
|
---
|
||||||
|
|
||||||
|
# Use merge request pipelines when a merge request is open for the branch.
|
||||||
|
# Use branch pipelines when a merge request is not open for the branch.
|
||||||
|
# https://docs.gitlab.com/ci/yaml/workflow/#switch-between-branch-pipelines-and-merge-request-pipelines
|
||||||
|
workflow:
|
||||||
|
rules:
|
||||||
|
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
|
||||||
|
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS
|
||||||
|
when: never
|
||||||
|
- if: $CI_COMMIT_BRANCH
|
||||||
|
|
||||||
|
|
||||||
|
stages:
|
||||||
|
- lint
|
||||||
|
- test # default for jobs that do not specify stage:
|
||||||
|
- deploy
|
||||||
|
|
||||||
|
|
||||||
variables:
|
variables:
|
||||||
pip: pip3 --timeout 100 --retries 10
|
pip: pip3 --timeout 100 --retries 10
|
||||||
# speed up git checkout phase
|
# speed up git checkout phase
|
||||||
|
@ -50,6 +67,7 @@ metadata_v0:
|
||||||
- git checkout $RELEASE_COMMIT_ID
|
- git checkout $RELEASE_COMMIT_ID
|
||||||
- cd ..
|
- cd ..
|
||||||
- git clone --depth 1 https://gitlab.com/fdroid/fdroiddata.git
|
- git clone --depth 1 https://gitlab.com/fdroid/fdroiddata.git
|
||||||
|
- rm -f fdroiddata/config.yml # ignore config for this test
|
||||||
- cd fdroiddata
|
- cd fdroiddata
|
||||||
- ../tests/dump_internal_metadata_format.py
|
- ../tests/dump_internal_metadata_format.py
|
||||||
- cd ..
|
- cd ..
|
||||||
|
@ -59,6 +77,7 @@ metadata_v0:
|
||||||
- ../tests/dump_internal_metadata_format.py
|
- ../tests/dump_internal_metadata_format.py
|
||||||
- sed -i
|
- sed -i
|
||||||
-e '/ArchivePolicy:/d'
|
-e '/ArchivePolicy:/d'
|
||||||
|
-e '/FlattrID:/d'
|
||||||
-e '/RequiresRoot:/d'
|
-e '/RequiresRoot:/d'
|
||||||
metadata/dump_*/*.yaml
|
metadata/dump_*/*.yaml
|
||||||
- diff -uw metadata/dump_*
|
- diff -uw metadata/dump_*
|
||||||
|
@ -79,12 +98,31 @@ metadata_v0:
|
||||||
# Ubuntu and other distros often lack https:// support
|
# Ubuntu and other distros often lack https:// support
|
||||||
- grep Debian /etc/issue.net
|
- grep Debian /etc/issue.net
|
||||||
&& { find /etc/apt/sources.list* -type f | xargs sed -i s,http:,https:, ; }
|
&& { find /etc/apt/sources.list* -type f | xargs sed -i s,http:,https:, ; }
|
||||||
|
# The official Debian docker images ship without ca-certificates,
|
||||||
|
# TLS certificates cannot be verified until that is installed. The
|
||||||
|
# following code turns off TLS verification, and enables HTTPS, so
|
||||||
|
# at least unverified TLS is used for apt-get instead of plain
|
||||||
|
# HTTP. Once ca-certificates is installed, the CA verification is
|
||||||
|
# enabled by removing this config. This set up makes the initial
|
||||||
|
# `apt-get update` and `apt-get install` look the same as verified
|
||||||
|
# TLS to the network observer and hides the metadata.
|
||||||
- echo 'Acquire::https::Verify-Peer "false";' > /etc/apt/apt.conf.d/99nocacertificates
|
- echo 'Acquire::https::Verify-Peer "false";' > /etc/apt/apt.conf.d/99nocacertificates
|
||||||
- apt-get update
|
- apt-get update
|
||||||
- apt-get install ca-certificates
|
- apt-get install ca-certificates
|
||||||
- rm /etc/apt/apt.conf.d/99nocacertificates
|
- rm /etc/apt/apt.conf.d/99nocacertificates
|
||||||
- apt-get dist-upgrade
|
- apt-get dist-upgrade
|
||||||
|
|
||||||
|
# For jobs that only need to run when there are changes to Python files.
|
||||||
|
.python-rules-changes: &python-rules-changes
|
||||||
|
rules:
|
||||||
|
- changes:
|
||||||
|
- .gitlab-ci.yml
|
||||||
|
- fdroid
|
||||||
|
- makebuildserver
|
||||||
|
- setup.py
|
||||||
|
- fdroidserver/*.py
|
||||||
|
- tests/*.py
|
||||||
|
|
||||||
|
|
||||||
# Since F-Droid uses Debian as its default platform, from production
|
# Since F-Droid uses Debian as its default platform, from production
|
||||||
# servers to CI to contributor machines, it is important to know when
|
# servers to CI to contributor machines, it is important to know when
|
||||||
|
@ -93,8 +131,8 @@ metadata_v0:
|
||||||
debian_testing:
|
debian_testing:
|
||||||
image: debian:testing
|
image: debian:testing
|
||||||
<<: *apt-template
|
<<: *apt-template
|
||||||
only:
|
rules:
|
||||||
- master@fdroid/fdroidserver
|
- if: $CI_COMMIT_BRANCH == "master" && $CI_PROJECT_PATH == "fdroid/fdroidserver"
|
||||||
script:
|
script:
|
||||||
- apt-get install
|
- apt-get install
|
||||||
aapt
|
aapt
|
||||||
|
@ -107,6 +145,7 @@ debian_testing:
|
||||||
ipfs-cid
|
ipfs-cid
|
||||||
python3-biplist
|
python3-biplist
|
||||||
python3-defusedxml
|
python3-defusedxml
|
||||||
|
python3-libcloud
|
||||||
python3-pycountry
|
python3-pycountry
|
||||||
python3-setuptools
|
python3-setuptools
|
||||||
sdkmanager
|
sdkmanager
|
||||||
|
@ -121,8 +160,8 @@ debian_testing:
|
||||||
ubuntu_lts_ppa:
|
ubuntu_lts_ppa:
|
||||||
image: ubuntu:latest
|
image: ubuntu:latest
|
||||||
<<: *apt-template
|
<<: *apt-template
|
||||||
only:
|
rules:
|
||||||
- master@fdroid/fdroidserver
|
- if: $CI_COMMIT_BRANCH == "master" && $CI_PROJECT_PATH == "fdroid/fdroidserver"
|
||||||
script:
|
script:
|
||||||
- export ANDROID_HOME=/usr/lib/android-sdk
|
- export ANDROID_HOME=/usr/lib/android-sdk
|
||||||
- apt-get install gnupg
|
- apt-get install gnupg
|
||||||
|
@ -147,6 +186,46 @@ ubuntu_lts_ppa:
|
||||||
- ./run-tests
|
- ./run-tests
|
||||||
|
|
||||||
|
|
||||||
|
# Test to see how rclone works with S3
|
||||||
|
test_deploy_to_s3_with_rclone:
|
||||||
|
image: debian:bookworm-slim
|
||||||
|
<<: *apt-template
|
||||||
|
tags:
|
||||||
|
- saas-linux-small-amd64 # the shared runners are known to support Docker.
|
||||||
|
services:
|
||||||
|
- name: docker:dind
|
||||||
|
command: ["--tls=false"]
|
||||||
|
variables:
|
||||||
|
DOCKER_HOST: "tcp://docker:2375"
|
||||||
|
DOCKER_DRIVER: overlay2
|
||||||
|
DOCKER_TLS_CERTDIR: ""
|
||||||
|
before_script:
|
||||||
|
# ensure minio is up before executing tests
|
||||||
|
- apt-get update
|
||||||
|
- apt-get install -y
|
||||||
|
androguard
|
||||||
|
apksigner
|
||||||
|
curl
|
||||||
|
docker.io
|
||||||
|
git
|
||||||
|
python3-venv
|
||||||
|
rclone
|
||||||
|
# This job requires working docker but will silently fail if docker is not available
|
||||||
|
- docker info
|
||||||
|
- python3 -m venv --system-site-packages test-venv
|
||||||
|
- . test-venv/bin/activate
|
||||||
|
- pip install testcontainers[minio]
|
||||||
|
- pip install .
|
||||||
|
script:
|
||||||
|
- python3 -m unittest -k test_update_remote_storage_with_rclone --verbose
|
||||||
|
rules:
|
||||||
|
- changes:
|
||||||
|
- .gitlab-ci.yml
|
||||||
|
- fdroidserver/deploy.py
|
||||||
|
- tests/test_deploy.py
|
||||||
|
- tests/test_integration.py
|
||||||
|
|
||||||
|
|
||||||
# Test using Ubuntu/jammy LTS (supported til April, 2027) with depends
|
# Test using Ubuntu/jammy LTS (supported til April, 2027) with depends
|
||||||
# from pypi and sdkmanager. The venv is used to isolate the dist
|
# from pypi and sdkmanager. The venv is used to isolate the dist
|
||||||
# tarball generation environment from the clean install environment.
|
# tarball generation environment from the clean install environment.
|
||||||
|
@ -166,7 +245,7 @@ ubuntu_jammy_pip:
|
||||||
# back to bare machine to act as user's install machine
|
# back to bare machine to act as user's install machine
|
||||||
- export ANDROID_HOME=/opt/android-sdk
|
- export ANDROID_HOME=/opt/android-sdk
|
||||||
- $pip install sdkmanager
|
- $pip install sdkmanager
|
||||||
- sdkmanager 'build-tools;33.0.0'
|
- sdkmanager 'build-tools;35.0.0'
|
||||||
|
|
||||||
# Install extras_require.optional from setup.py
|
# Install extras_require.optional from setup.py
|
||||||
- $pip install biplist pycountry
|
- $pip install biplist pycountry
|
||||||
|
@ -174,46 +253,16 @@ ubuntu_jammy_pip:
|
||||||
- $pip install dist/fdroidserver-*.tar.gz
|
- $pip install dist/fdroidserver-*.tar.gz
|
||||||
- tar xzf dist/fdroidserver-*.tar.gz
|
- tar xzf dist/fdroidserver-*.tar.gz
|
||||||
- cd fdroidserver-*
|
- cd fdroidserver-*
|
||||||
- export PATH=$PATH:$ANDROID_HOME/build-tools/33.0.0
|
- export PATH=$PATH:$ANDROID_HOME/build-tools/35.0.0
|
||||||
- fdroid=`which fdroid` ./tests/run-tests
|
- fdroid=`which fdroid` ./tests/run-tests
|
||||||
|
|
||||||
# check localization was properly installed
|
# check localization was properly installed
|
||||||
- LANGUAGE='de' fdroid --help | grep 'Gültige Befehle sind'
|
- LANGUAGE='de' fdroid --help | grep 'Gültige Befehle sind'
|
||||||
|
|
||||||
|
|
||||||
# test installation process on a bleeding edge distro with pip
|
|
||||||
arch_pip_install:
|
|
||||||
image: archlinux
|
|
||||||
only:
|
|
||||||
- master@fdroid/fdroidserver
|
|
||||||
script:
|
|
||||||
- pacman --sync --sysupgrade --refresh --noconfirm gcc git grep python-pip python-virtualenv python-wheel tar
|
|
||||||
- python -m venv venv
|
|
||||||
- source venv/bin/activate
|
|
||||||
- pip install -e .[test]
|
|
||||||
- fdroid
|
|
||||||
- fdroid readmeta
|
|
||||||
- fdroid update --help
|
|
||||||
|
|
||||||
|
|
||||||
# The gradlew-fdroid tests are isolated from the rest of the test
|
|
||||||
# suite, so they run as their own job.
|
|
||||||
gradlew-fdroid:
|
|
||||||
image: debian:bookworm-slim
|
|
||||||
<<: *apt-template
|
|
||||||
only:
|
|
||||||
changes:
|
|
||||||
- .gitlab-ci.yml
|
|
||||||
- gradlew-fdroid
|
|
||||||
- tests/test-gradlew-fdroid
|
|
||||||
script:
|
|
||||||
- apt-get install ca-certificates curl default-jdk-headless shellcheck unzip
|
|
||||||
- shellcheck --severity=error --color gradlew-fdroid tests/test-gradlew-fdroid
|
|
||||||
- ./tests/test-gradlew-fdroid
|
|
||||||
|
|
||||||
|
|
||||||
# Run all the various linters and static analysis tools.
|
# Run all the various linters and static analysis tools.
|
||||||
lint_format_bandit_checks:
|
hooks/pre-commit:
|
||||||
|
stage: lint
|
||||||
image: debian:bookworm-slim
|
image: debian:bookworm-slim
|
||||||
variables:
|
variables:
|
||||||
LANG: C.UTF-8
|
LANG: C.UTF-8
|
||||||
|
@ -228,35 +277,60 @@ lint_format_bandit_checks:
|
||||||
make
|
make
|
||||||
pycodestyle
|
pycodestyle
|
||||||
pyflakes3
|
pyflakes3
|
||||||
pylint
|
|
||||||
python3-dev
|
python3-dev
|
||||||
python3-git
|
python3-git
|
||||||
python3-nose
|
python3-nose
|
||||||
python3-pip
|
python3-pip
|
||||||
python3-yaml
|
python3-yaml
|
||||||
shellcheck
|
- ./hooks/pre-commit
|
||||||
|
|
||||||
|
bandit:
|
||||||
|
image: debian:bookworm-slim
|
||||||
|
<<: *python-rules-changes
|
||||||
|
<<: *apt-template
|
||||||
|
script:
|
||||||
|
- apt-get install python3-pip
|
||||||
- $pip install --break-system-packages bandit
|
- $pip install --break-system-packages bandit
|
||||||
- export EXITVALUE=0
|
- bandit -r -ii --ini .bandit
|
||||||
- function set_error() { export EXITVALUE=1; printf "\x1b[31mERROR `history|tail -2|head -1|cut -b 6-500`\x1b[0m\n"; }
|
|
||||||
- ./hooks/pre-commit || set_error
|
pylint:
|
||||||
- bandit
|
stage: lint
|
||||||
-r
|
image: debian:bookworm-slim
|
||||||
-ii
|
<<: *python-rules-changes
|
||||||
--ini .bandit
|
<<: *apt-template
|
||||||
|| set_error
|
script:
|
||||||
- pylint --output-format=colorized --reports=n
|
- apt-get install pylint python3-pip
|
||||||
|
- $pip install --break-system-packages pylint-gitlab
|
||||||
|
- pylint --output-format=colorized,pylint_gitlab.GitlabCodeClimateReporter:pylint-report.json
|
||||||
fdroid
|
fdroid
|
||||||
makebuildserver
|
makebuildserver
|
||||||
setup.py
|
setup.py
|
||||||
fdroidserver/*.py
|
fdroidserver/*.py
|
||||||
tests/*.py
|
tests/*.py
|
||||||
tests/*.TestCase
|
artifacts:
|
||||||
|| set_error
|
reports:
|
||||||
- shellcheck --exclude SC2046,SC2090 --severity=warning --color tests/run-tests
|
codequality: pylint-report.json
|
||||||
|| set_error
|
when: always
|
||||||
- exit $EXITVALUE
|
|
||||||
|
|
||||||
|
|
||||||
|
shellcheck:
|
||||||
|
stage: lint
|
||||||
|
image: debian:bookworm-slim
|
||||||
|
rules:
|
||||||
|
- changes:
|
||||||
|
- .gitlab-ci.yml
|
||||||
|
- hooks/install-hooks.sh
|
||||||
|
- hooks/pre-commit
|
||||||
|
- tests/run-tests
|
||||||
|
<<: *apt-template
|
||||||
|
script:
|
||||||
|
- apt-get install shellcheck
|
||||||
|
# TODO GitLab Code Quality report https://github.com/koalaman/shellcheck/issues/3155
|
||||||
|
- shellcheck --exclude SC2046,SC2090 --severity=warning --color
|
||||||
|
hooks/install-hooks.sh
|
||||||
|
hooks/pre-commit
|
||||||
|
tests/run-tests
|
||||||
|
|
||||||
# Check all the dependencies in Debian to mirror production. CVEs are
|
# Check all the dependencies in Debian to mirror production. CVEs are
|
||||||
# generally fixed in the latest versions in pip/pypi.org, so it isn't
|
# generally fixed in the latest versions in pip/pypi.org, so it isn't
|
||||||
# so important to scan that kind of install in CI.
|
# so important to scan that kind of install in CI.
|
||||||
|
@ -264,10 +338,7 @@ lint_format_bandit_checks:
|
||||||
safety:
|
safety:
|
||||||
image: debian:bookworm-slim
|
image: debian:bookworm-slim
|
||||||
rules:
|
rules:
|
||||||
# once only:/changes: are ported to rules:, this could be removed:
|
- if: $SAFETY_API_KEY
|
||||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
|
||||||
when: never
|
|
||||||
- if: $CI_PIPELINE_SOURCE == "push" && $SAFETY_API_KEY
|
|
||||||
changes:
|
changes:
|
||||||
- .gitlab-ci.yml
|
- .gitlab-ci.yml
|
||||||
- .safety-policy.yml
|
- .safety-policy.yml
|
||||||
|
@ -288,8 +359,34 @@ safety:
|
||||||
- python3 -m safety --key "$SAFETY_API_KEY" --stage cicd scan
|
- python3 -m safety --key "$SAFETY_API_KEY" --stage cicd scan
|
||||||
|
|
||||||
|
|
||||||
# Run all the various linters and static analysis tools.
|
# TODO tests/*/*/*.yaml are not covered
|
||||||
|
yamllint:
|
||||||
|
stage: lint
|
||||||
|
image: debian:bookworm-slim
|
||||||
|
rules:
|
||||||
|
- changes:
|
||||||
|
- .gitlab-ci.yml
|
||||||
|
- .safety-policy.yml
|
||||||
|
- .yamllint
|
||||||
|
- tests/*.yml
|
||||||
|
- tests/*/*.yml
|
||||||
|
- tests/*/*/.*.yml
|
||||||
|
<<: *apt-template
|
||||||
|
variables:
|
||||||
|
LANG: C.UTF-8
|
||||||
|
script:
|
||||||
|
- apt-get install yamllint
|
||||||
|
- yamllint
|
||||||
|
.gitlab-ci.yml
|
||||||
|
.safety-policy.yml
|
||||||
|
.yamllint
|
||||||
|
tests/*.yml
|
||||||
|
tests/*/*.yml
|
||||||
|
tests/*/*/.*.yml
|
||||||
|
|
||||||
|
|
||||||
locales:
|
locales:
|
||||||
|
stage: lint
|
||||||
image: debian:bookworm-slim
|
image: debian:bookworm-slim
|
||||||
variables:
|
variables:
|
||||||
LANG: C.UTF-8
|
LANG: C.UTF-8
|
||||||
|
@ -308,6 +405,7 @@ locales:
|
||||||
|
|
||||||
|
|
||||||
black:
|
black:
|
||||||
|
stage: lint
|
||||||
image: debian:bookworm-slim
|
image: debian:bookworm-slim
|
||||||
<<: *apt-template
|
<<: *apt-template
|
||||||
script:
|
script:
|
||||||
|
@ -354,14 +452,14 @@ fedora_latest:
|
||||||
- chown -R testuser .
|
- chown -R testuser .
|
||||||
- cd tests
|
- cd tests
|
||||||
- su testuser --login --command
|
- su testuser --login --command
|
||||||
"cd `pwd`; export ANDROID_HOME=$ANDROID_HOME; fdroid=~testuser/.local/bin/fdroid ./run-tests"
|
"cd `pwd`; export CI=$CI ANDROID_HOME=$ANDROID_HOME; fdroid=~testuser/.local/bin/fdroid ./run-tests"
|
||||||
|
|
||||||
|
|
||||||
macOS:
|
macOS:
|
||||||
tags:
|
tags:
|
||||||
- saas-macos-medium-m1
|
- saas-macos-medium-m1
|
||||||
only:
|
rules:
|
||||||
- master@fdroid/fdroidserver
|
- if: $CI_COMMIT_BRANCH == "master" && $CI_PROJECT_PATH == "fdroid/fdroidserver"
|
||||||
script:
|
script:
|
||||||
- export HOMEBREW_CURL_RETRIES=10
|
- export HOMEBREW_CURL_RETRIES=10
|
||||||
- brew update > /dev/null
|
- brew update > /dev/null
|
||||||
|
@ -372,7 +470,7 @@ macOS:
|
||||||
- brew install --cask android-commandlinetools temurin # temurin is a JDK
|
- brew install --cask android-commandlinetools temurin # temurin is a JDK
|
||||||
|
|
||||||
# test suite dependencies
|
# test suite dependencies
|
||||||
- brew install dash bash coreutils gnu-sed
|
- brew install bash coreutils gnu-sed
|
||||||
# TODO port tests/run-tests to POSIX and gsed, it has a couple GNU-isms like du --bytes
|
# TODO port tests/run-tests to POSIX and gsed, it has a couple GNU-isms like du --bytes
|
||||||
- export PATH="$(brew --prefix fdroidserver)/libexec/bin:$(brew --prefix coreutils)/libexec/gnubin:$PATH"
|
- export PATH="$(brew --prefix fdroidserver)/libexec/bin:$(brew --prefix coreutils)/libexec/gnubin:$PATH"
|
||||||
|
|
||||||
|
@ -389,48 +487,34 @@ macOS:
|
||||||
|
|
||||||
- echo "macOS sticks with bash 3.x because of licenses, so avoid new bash syntax"
|
- echo "macOS sticks with bash 3.x because of licenses, so avoid new bash syntax"
|
||||||
- /bin/bash --version
|
- /bin/bash --version
|
||||||
- /bin/bash -n gradlew-fdroid tests/run-tests
|
- /bin/bash -n tests/run-tests
|
||||||
|
|
||||||
# TODO remove the packages below once they are included in the Homebrew package
|
|
||||||
- $(brew --prefix fdroidserver)/libexec/bin/python3 -m pip install biplist oscrypto pycountry
|
|
||||||
|
|
||||||
# test fdroidserver from git with current package's dependencies
|
# test fdroidserver from git with current package's dependencies
|
||||||
- fdroid="$(brew --prefix fdroidserver)/libexec/bin/python3 $PWD/fdroid" ./tests/run-tests
|
- fdroid="$(brew --prefix fdroidserver)/libexec/bin/python3 $PWD/fdroid" ./tests/run-tests
|
||||||
|
|
||||||
|
|
||||||
gradle:
|
gradle:
|
||||||
image: debian:bookworm-slim
|
image: debian:trixie-slim
|
||||||
<<: *apt-template
|
<<: *apt-template
|
||||||
variables:
|
rules:
|
||||||
GIT_DEPTH: 1000
|
- changes:
|
||||||
|
- .gitlab-ci.yml
|
||||||
|
- makebuildserver
|
||||||
script:
|
script:
|
||||||
- apt-get install
|
- apt-get install
|
||||||
ca-certificates
|
ca-certificates
|
||||||
git
|
git
|
||||||
openssh-client
|
|
||||||
python3-bs4
|
|
||||||
python3-colorama
|
python3-colorama
|
||||||
python3-git
|
|
||||||
python3-gitlab
|
|
||||||
python3-packaging
|
python3-packaging
|
||||||
python3-requests
|
python3-requests
|
||||||
# if this is a merge request fork, then only check if relevant files changed
|
|
||||||
- if [ "$CI_PROJECT_NAMESPACE" != "fdroid" ]; then
|
|
||||||
git fetch https://gitlab.com/fdroid/fdroidserver.git;
|
|
||||||
for f in `git diff --name-only --diff-filter=d FETCH_HEAD...HEAD`; do
|
|
||||||
test "$f" == "makebuildserver" && export CHANGED="yes";
|
|
||||||
test "$f" == "gradlew-fdroid" && export CHANGED="yes";
|
|
||||||
done;
|
|
||||||
test -z "$CHANGED" && exit;
|
|
||||||
fi
|
|
||||||
- ./tests/gradle-release-checksums.py
|
- ./tests/gradle-release-checksums.py
|
||||||
|
|
||||||
|
|
||||||
# Run an actual build in a simple, faked version of the buildserver guest VM.
|
# Run an actual build in a simple, faked version of the buildserver guest VM.
|
||||||
fdroid build:
|
fdroid build:
|
||||||
image: registry.gitlab.com/fdroid/fdroidserver:buildserver
|
image: registry.gitlab.com/fdroid/fdroidserver:buildserver
|
||||||
only:
|
rules:
|
||||||
changes:
|
- changes:
|
||||||
- .gitlab-ci.yml
|
- .gitlab-ci.yml
|
||||||
- fdroidserver/build.py
|
- fdroidserver/build.py
|
||||||
- fdroidserver/common.py
|
- fdroidserver/common.py
|
||||||
|
@ -439,6 +523,9 @@ fdroid build:
|
||||||
- fdroidserver/net.py
|
- fdroidserver/net.py
|
||||||
- fdroidserver/scanner.py
|
- fdroidserver/scanner.py
|
||||||
- fdroidserver/vmtools.py
|
- fdroidserver/vmtools.py
|
||||||
|
# for the docker: job which depends on this one
|
||||||
|
- makebuildserver
|
||||||
|
- buildserver/*
|
||||||
cache:
|
cache:
|
||||||
key: "$CI_JOB_NAME"
|
key: "$CI_JOB_NAME"
|
||||||
paths:
|
paths:
|
||||||
|
@ -474,6 +561,8 @@ fdroid build:
|
||||||
env HOME=$home_vagrant
|
env HOME=$home_vagrant
|
||||||
fdroid"
|
fdroid"
|
||||||
|
|
||||||
|
- git -C $home_vagrant/gradlew-fdroid pull
|
||||||
|
|
||||||
- chown -R vagrant $home_vagrant
|
- chown -R vagrant $home_vagrant
|
||||||
- chown -R vagrant $fdroidserver/.git
|
- chown -R vagrant $fdroidserver/.git
|
||||||
- chown vagrant $fdroidserver/
|
- chown vagrant $fdroidserver/
|
||||||
|
@ -499,8 +588,8 @@ fdroid build:
|
||||||
plugin_fetchsrclibs:
|
plugin_fetchsrclibs:
|
||||||
image: debian:bookworm-slim
|
image: debian:bookworm-slim
|
||||||
<<: *apt-template
|
<<: *apt-template
|
||||||
only:
|
rules:
|
||||||
changes:
|
- changes:
|
||||||
- .gitlab-ci.yml
|
- .gitlab-ci.yml
|
||||||
- examples/fdroid_fetchsrclibs.py
|
- examples/fdroid_fetchsrclibs.py
|
||||||
- fdroidserver/__main__.py
|
- fdroidserver/__main__.py
|
||||||
|
@ -543,8 +632,8 @@ plugin_fetchsrclibs:
|
||||||
servergitmirrors:
|
servergitmirrors:
|
||||||
image: debian:bookworm-slim
|
image: debian:bookworm-slim
|
||||||
<<: *apt-template
|
<<: *apt-template
|
||||||
only:
|
rules:
|
||||||
- master@fdroid/fdroidserver
|
- if: $CI_COMMIT_BRANCH == "master" && $CI_PROJECT_PATH == "fdroid/fdroidserver"
|
||||||
script:
|
script:
|
||||||
- apt-get install
|
- apt-get install
|
||||||
default-jdk-headless
|
default-jdk-headless
|
||||||
|
@ -572,11 +661,11 @@ servergitmirrors:
|
||||||
- ./tests/key-tricks.py
|
- ./tests/key-tricks.py
|
||||||
- ssh-keyscan gitlab.com >> /root/.ssh/known_hosts
|
- ssh-keyscan gitlab.com >> /root/.ssh/known_hosts
|
||||||
- test -d /tmp/fdroid/repo || mkdir -p /tmp/fdroid/repo
|
- test -d /tmp/fdroid/repo || mkdir -p /tmp/fdroid/repo
|
||||||
- cp tests/config.py tests/keystore.jks /tmp/fdroid/
|
- cp tests/config.yml tests/keystore.jks /tmp/fdroid/
|
||||||
- cp tests/repo/com.politedroid_6.apk /tmp/fdroid/repo/
|
- cp tests/repo/com.politedroid_6.apk /tmp/fdroid/repo/
|
||||||
- cd /tmp/fdroid
|
- cd /tmp/fdroid
|
||||||
- touch fdroid-icon.png
|
- touch fdroid-icon.png
|
||||||
- printf "\nservergitmirrors = 'git@gitlab.com:fdroid/ci-test-servergitmirrors-repo.git'\n" >> config.py
|
- printf "\nservergitmirrors\x3a 'git@gitlab.com:fdroid/ci-test-servergitmirrors-repo.git'\n" >> config.yml
|
||||||
- $PYTHONPATH/fdroid update --verbose --create-metadata
|
- $PYTHONPATH/fdroid update --verbose --create-metadata
|
||||||
- $PYTHONPATH/fdroid deploy --verbose
|
- $PYTHONPATH/fdroid deploy --verbose
|
||||||
- export DLURL=`grep -Eo 'https://gitlab.com/fdroid/ci-test-servergitmirrors-repo[^"]+' repo/index-v1.json`
|
- export DLURL=`grep -Eo 'https://gitlab.com/fdroid/ci-test-servergitmirrors-repo[^"]+' repo/index-v1.json`
|
||||||
|
@ -586,6 +675,7 @@ servergitmirrors:
|
||||||
|
|
||||||
Build documentation:
|
Build documentation:
|
||||||
image: debian:bookworm-slim
|
image: debian:bookworm-slim
|
||||||
|
<<: *python-rules-changes
|
||||||
<<: *apt-template
|
<<: *apt-template
|
||||||
script:
|
script:
|
||||||
- apt-get install make python3-sphinx python3-numpydoc python3-pydata-sphinx-theme pydocstyle fdroidserver
|
- apt-get install make python3-sphinx python3-numpydoc python3-pydata-sphinx-theme pydocstyle fdroidserver
|
||||||
|
@ -605,8 +695,8 @@ Build documentation:
|
||||||
Windows:
|
Windows:
|
||||||
tags:
|
tags:
|
||||||
- windows
|
- windows
|
||||||
only:
|
rules:
|
||||||
- windows
|
- if: $CI_COMMIT_BRANCH == "windows"
|
||||||
script:
|
script:
|
||||||
- Import-Module "$env:ChocolateyInstall\helpers\chocolateyProfile.psm1"
|
- Import-Module "$env:ChocolateyInstall\helpers\chocolateyProfile.psm1"
|
||||||
- choco install --no-progress -y git --force --params "/GitAndUnixToolsOnPath"
|
- choco install --no-progress -y git --force --params "/GitAndUnixToolsOnPath"
|
||||||
|
@ -617,10 +707,10 @@ Windows:
|
||||||
- python -m pip install --upgrade babel pip setuptools
|
- python -m pip install --upgrade babel pip setuptools
|
||||||
- python -m pip install -e .
|
- python -m pip install -e .
|
||||||
|
|
||||||
- $files = @(Get-ChildItem tests\*.TestCase)
|
- $files = @(Get-ChildItem tests\test_*.py)
|
||||||
- foreach ($f in $files) {
|
- foreach ($f in $files) {
|
||||||
write-output $f;
|
write-output $f;
|
||||||
python $f;
|
python -m unittest $f;
|
||||||
if( $LASTEXITCODE -eq 0 ) {
|
if( $LASTEXITCODE -eq 0 ) {
|
||||||
write-output "SUCCESS $f";
|
write-output "SUCCESS $f";
|
||||||
} else {
|
} else {
|
||||||
|
@ -629,15 +719,16 @@ Windows:
|
||||||
}
|
}
|
||||||
|
|
||||||
# these are the tests that must pass
|
# these are the tests that must pass
|
||||||
- python tests\checkupdates.TestCase
|
- python -m unittest -k
|
||||||
- python tests\exception.TestCase
|
checkupdates
|
||||||
- python tests\import_subcommand.TestCase
|
exception
|
||||||
- python tests\init.TestCase
|
import_subcommand
|
||||||
- python tests\lint.TestCase
|
test_lint
|
||||||
- python tests\main.TestCase
|
test_metadata
|
||||||
- python tests\metadata.TestCase
|
test_rewritemeta
|
||||||
- python tests\rewritemeta.TestCase
|
test_vcs
|
||||||
- python tests\vcs.TestCase
|
tests.test_init
|
||||||
|
tests.test_main
|
||||||
after_script:
|
after_script:
|
||||||
- Copy-Item C:\ProgramData\chocolatey\logs\chocolatey.log
|
- Copy-Item C:\ProgramData\chocolatey\logs\chocolatey.log
|
||||||
artifacts:
|
artifacts:
|
||||||
|
@ -656,7 +747,9 @@ pages:
|
||||||
artifacts:
|
artifacts:
|
||||||
paths:
|
paths:
|
||||||
- public
|
- public
|
||||||
needs: ["Build documentation"]
|
needs:
|
||||||
|
- job: "Build documentation"
|
||||||
|
optional: true
|
||||||
rules:
|
rules:
|
||||||
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH' # only publish pages on default (master) branch
|
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH' # only publish pages on default (master) branch
|
||||||
|
|
||||||
|
@ -668,13 +761,12 @@ pages:
|
||||||
docker:
|
docker:
|
||||||
dependencies:
|
dependencies:
|
||||||
- fdroid build
|
- fdroid build
|
||||||
only:
|
rules:
|
||||||
|
- if: $CI_COMMIT_BRANCH == "master" && $CI_PROJECT_PATH == "fdroid/fdroidserver"
|
||||||
changes:
|
changes:
|
||||||
- .gitlab-ci.yml
|
- .gitlab-ci.yml
|
||||||
- makebuildserver
|
- makebuildserver
|
||||||
- buildserver/*
|
- buildserver/*
|
||||||
variables:
|
|
||||||
- $CI_COMMIT_BRANCH == "master" || $CI_PROJECT_NAMESPACE != "fdroid"
|
|
||||||
image: docker:dind
|
image: docker:dind
|
||||||
services:
|
services:
|
||||||
- docker:dind
|
- docker:dind
|
||||||
|
@ -697,3 +789,66 @@ docker:
|
||||||
fi
|
fi
|
||||||
- docker push $RELEASE_IMAGE
|
- docker push $RELEASE_IMAGE
|
||||||
- docker push $RELEASE_IMAGE-bookworm
|
- docker push $RELEASE_IMAGE-bookworm
|
||||||
|
|
||||||
|
|
||||||
|
# PUBLISH is the signing server. It has a very minimal manual setup.
|
||||||
|
PUBLISH:
|
||||||
|
image: debian:bookworm-backports
|
||||||
|
<<: *python-rules-changes
|
||||||
|
script:
|
||||||
|
- apt-get update
|
||||||
|
- apt-get -qy upgrade
|
||||||
|
- apt-get -qy install --no-install-recommends -t bookworm-backports
|
||||||
|
androguard
|
||||||
|
apksigner
|
||||||
|
curl
|
||||||
|
default-jdk-headless
|
||||||
|
git
|
||||||
|
gpg
|
||||||
|
gpg-agent
|
||||||
|
python3-asn1crypto
|
||||||
|
python3-defusedxml
|
||||||
|
python3-git
|
||||||
|
python3-ruamel.yaml
|
||||||
|
python3-yaml
|
||||||
|
rsync
|
||||||
|
|
||||||
|
# Run only relevant parts of the test suite, other parts will fail
|
||||||
|
# because of this minimal base setup.
|
||||||
|
- python3 -m unittest
|
||||||
|
tests/test_gpgsign.py
|
||||||
|
tests/test_metadata.py
|
||||||
|
tests/test_publish.py
|
||||||
|
tests/test_signatures.py
|
||||||
|
tests/test_signindex.py
|
||||||
|
|
||||||
|
- cd tests
|
||||||
|
- mkdir archive
|
||||||
|
- mkdir unsigned
|
||||||
|
- cp urzip-release-unsigned.apk unsigned/info.guardianproject.urzip_100.apk
|
||||||
|
- grep '^key.*pass' config.yml | sed 's,\x3a ,=,' > $CI_PROJECT_DIR/variables
|
||||||
|
- sed -Ei 's,^(key.*pass|keystore)\x3a.*,\1\x3a {env\x3a \1},' config.yml
|
||||||
|
- printf '\ngpghome\x3a {env\x3a gpghome}\n' >> config.yml
|
||||||
|
- |
|
||||||
|
tee --append $CI_PROJECT_DIR/variables <<EOF
|
||||||
|
gpghome=$CI_PROJECT_DIR/tests/gnupghome
|
||||||
|
keystore=$CI_PROJECT_DIR/tests/keystore.jks
|
||||||
|
serverwebroot=/tmp
|
||||||
|
export gpghome keypass keystorepass keystore serverwebroot
|
||||||
|
EOF
|
||||||
|
- source $CI_PROJECT_DIR/variables
|
||||||
|
# silence warnings
|
||||||
|
- chmod 0600 config.yml config/*.yml config/*/*.yml
|
||||||
|
- chmod 0700 $gpghome
|
||||||
|
|
||||||
|
- export PATH=$CI_PROJECT_DIR:$PATH
|
||||||
|
|
||||||
|
# run signpkg.sh
|
||||||
|
- fdroid publish --verbose
|
||||||
|
- fdroid gpgsign --verbose
|
||||||
|
- rsync --progress repo/* $serverwebroot/
|
||||||
|
|
||||||
|
# run signindex.sh
|
||||||
|
- fdroid gpgsign --verbose
|
||||||
|
- fdroid signindex --verbose
|
||||||
|
- rsync --stats repo/* $serverwebroot/
|
||||||
|
|
1
.well-known/funding-manifest-urls
Normal file
1
.well-known/funding-manifest-urls
Normal file
|
@ -0,0 +1 @@
|
||||||
|
https://f-droid.org/funding.json
|
197
CHANGELOG.md
197
CHANGELOG.md
|
@ -4,16 +4,203 @@ All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
|
||||||
|
|
||||||
## unreleased
|
## [2.5.0] - NEXT
|
||||||
|
|
||||||
### Added
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
|
|
||||||
### Removed
|
### Removed
|
||||||
|
|
||||||
|
* deploy: `awsaccesskeyid:` and `awssecretkey:` config items removed, use the
|
||||||
|
standard env vars: `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`.
|
||||||
|
|
||||||
|
## [2.4.2] - 2025-06-24
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
* nightly: fix bug that clones nightly repo to wrong location
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1672
|
||||||
|
* Sync translations for all supported languages: es pl ru
|
||||||
|
|
||||||
|
## [2.4.1] - 2025-06-23
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
* build: Clearer error messages when working with Git.
|
||||||
|
* verify: generate <appid>.json files that list all reports
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1632
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
* deploy: use master branch when working complete git-mirror repo
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1666
|
||||||
|
* update: use ctime/mtime to control _strip_and_copy_image runs
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1665
|
||||||
|
* update: If categories.yml only has icon:, then add name:
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1659
|
||||||
|
* update: fix handling of Triple-T 1.0.0 graphics
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1652
|
||||||
|
* update: never execute any VCS e.g. git
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1630
|
||||||
|
* config: lazyload environment variables in config.yml
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1645
|
||||||
|
* config: make localized name/description/icon optional
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1649
|
||||||
|
* lint: add repo_key_sha256 to list of valid config keys
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1643
|
||||||
|
* build: calculate all combinations of gradle flavors
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1638
|
||||||
|
* build: set SOURCE_DATE_EPOCH from app's git otherwise fdroiddata metadata file
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1653
|
||||||
|
* Sync translations for all supported languages: ca cs de fr ga ja pl pt pt_BR
|
||||||
|
pt_PT ru sq tr uk zh_Hans
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
|
||||||
|
## [2.4.0] - 2025-03-25
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
* lint: support the base _config.yml_.
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1606
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
* Expand {env: foo} config syntax to be allowed any place a string is.
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1610
|
||||||
|
* Only show "unsafe permissions on config.yml" when secrets are present.
|
||||||
|
* Standardized config files on ruamel.yaml with a YAML 1.2 data format.
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1611
|
||||||
|
* Brought back error when a package has multiple package types (e.g. xapk and
|
||||||
|
apk). https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1602
|
||||||
|
* Reworked test suite to be entirely based on Python unittest (thanks @mindston).
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1587
|
||||||
|
* publish/signindex/gpgsign no longer load the _qrcode_ and _requests_ modules,
|
||||||
|
and can operate without them installed.
|
||||||
|
* scanner: add bun.lock as lock file of package.json
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1615
|
||||||
|
* index: fail if user sets mirrors:isPrimary wrong
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1617
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/issues/1125
|
||||||
|
* Sync translations for all supported languages: bo ca cs de es fr ga hu it ja
|
||||||
|
ko nb_NO pl pt pt_BR pt_PT ro ru sq sr sw tr uk zh_Hans zh_Hant
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
|
||||||
|
* checkupdates: remove auto_author: config, it is no longer used.
|
||||||
|
* Purge support for the long-deprecated _config.py_ config file.
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1607
|
||||||
|
|
||||||
|
|
||||||
|
## [2.3.5] - 2025-01-20
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
* Fix issue where APKs with v1-only signatures and targetSdkVersion < 30 could
|
||||||
|
be maliciously crafted to bypass AllowedAPKSigningKeys
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1588
|
||||||
|
* Ignore apksigner v33.x, it has bugs verifying APKs with v3/v3.1 sigs.
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1593
|
||||||
|
* Sync translations for: ca cs de es fr ga ja pt_BR pt_PT ru sq sr uk zh_Hans
|
||||||
|
|
||||||
|
## [2.3.4] - 2024-12-12
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
* Fix localhost network tests on systems with IPv6.
|
||||||
|
* lint: only error out on missing extlib on versions not archived.
|
||||||
|
|
||||||
|
## [2.3.3] - 2024-12-11
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
* verify: `--clean-up-verified` to delete files used when verifying an APK if
|
||||||
|
the verification was successful.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
* Support Python 3.13 in the full test suite.
|
||||||
|
* Sync translations for: ca de fr ja pl ro ru sr ta
|
||||||
|
* update: only generate _index.png_ when making _index.html_, allowing the repo
|
||||||
|
operator to set a different repo icon, e.g. not the QR Code.
|
||||||
|
|
||||||
|
## [2.3.2] - 2024-11-26
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
* install: fix downloading from GitHub Releases and Maven Central.
|
||||||
|
* Sync translations for: ca fa fr pt ru sr ta zh_Hant
|
||||||
|
|
||||||
|
## [2.3.1] - 2024-11-25
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
* Sync all translations for: cs de es fr ga pt_BR ru sq zh_Hans.
|
||||||
|
* Drop use of deprecated imghdr library to support Python 3.13.
|
||||||
|
* Install biplist and pycountry by default on macOS.
|
||||||
|
* Fixed running test suite out of dist tarball.
|
||||||
|
|
||||||
|
## [2.3.0] - 2024-11-21
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
* YAML 1.2 as native format for all _.yml_ files, including metadata and config.
|
||||||
|
* install: will now fetch _F-Droid.apk_ and install it via `adb`.
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1546
|
||||||
|
* scanner: scan APK Signing Block for known block types like Google Play
|
||||||
|
Signature aka "Frosting".
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1555
|
||||||
|
* Support Rclone for deploying to many different cloud services.
|
||||||
|
* deploy: support deploying to GitHub Releases.
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1471
|
||||||
|
* scanner: support libs.versions.toml
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1526
|
||||||
|
* Consider subdir for triple-t metadata discovery in Flutter apps.
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1541
|
||||||
|
* deploy: added `index_only:` mode for mirroring the index to small hosting
|
||||||
|
locations. https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1420
|
||||||
|
* Support publishing repos in AltStore format.
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1465
|
||||||
|
* Support indexing iOS IPA app files.
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1413
|
||||||
|
* deploy: _config/mirrors.yml_ file with support for adding per-mirror metadata,
|
||||||
|
like `countryCode:`.
|
||||||
|
* Repo's categories are now set in the config files.
|
||||||
|
* lint: check syntax of config files.
|
||||||
|
* publish: `--error-on-failed` to exit when signing/verifying fails.
|
||||||
|
* scanner: `--refresh` and `refresh_config:` to control triggering a refresh of
|
||||||
|
the rule sets.
|
||||||
|
* Terminal output colorization and `--color` argument to control it.
|
||||||
|
* New languages: Catalan (ca), Irish (ga), Japanese (ja), Serbian (sr), and
|
||||||
|
Swahili (sw).
|
||||||
|
* Support donation links from `community_bridge`, `buy_me_a_coffee`.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
* Use last modified time and file size for caching data about scanned APKs
|
||||||
|
instead of SHA-256 checksum.
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1542
|
||||||
|
* `repo_web_base_url:` config for generating per-app URLs for viewing in
|
||||||
|
browsers. https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1178
|
||||||
|
* `fdroid scanner` flags WebAssembly binary _.wasm_ files.
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1562
|
||||||
|
* Test suite as standard Python `unittest` setup (thanks @ghost.adh).
|
||||||
|
* scanner: error on dependency files without lock file.
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1504
|
||||||
|
* nightly: finding APKs in the wrong directory. (thanks @WrenIX)
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1512
|
||||||
|
* `AllowedAPKSigningKeys` works with all single-signer APK signatures.
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1466
|
||||||
|
* Sync all translations for: cs de it ko pl pt pt_BR pt_PT ro ru sq tr uk
|
||||||
|
zh_Hans zh_Hant.
|
||||||
|
* Support Androguard 4.x.
|
||||||
|
* Support Python 3.12.
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
|
||||||
|
* Drop all uses of _stats/known_apks.txt_ and the `update_stats:` config key.
|
||||||
|
https://gitlab.com/fdroid/fdroidserver/-/merge_requests/1547
|
||||||
* The `maven:` field is now always a string, with `yes` as a legacy special
|
* The `maven:` field is now always a string, with `yes` as a legacy special
|
||||||
value. It is no longer treated like a boolean in any case.
|
value. It is no longer treated like a boolean in any case.
|
||||||
|
* scanner: jcenter is no longer an allowed Maven repo.
|
||||||
|
* build: `--reset-server` removed (thanks @gotmi1k).
|
||||||
|
|
||||||
## [2.2.2] - 2024-04-24
|
## [2.2.2] - 2024-04-24
|
||||||
|
|
||||||
|
|
199
MANIFEST.in
199
MANIFEST.in
|
@ -20,14 +20,17 @@ include examples/template.yml
|
||||||
include examples/Vagrantfile.yaml
|
include examples/Vagrantfile.yaml
|
||||||
include gradlew-fdroid
|
include gradlew-fdroid
|
||||||
include LICENSE
|
include LICENSE
|
||||||
|
include locale/ba/LC_MESSAGES/fdroidserver.po
|
||||||
include locale/bo/LC_MESSAGES/fdroidserver.po
|
include locale/bo/LC_MESSAGES/fdroidserver.po
|
||||||
include locale/ca/LC_MESSAGES/fdroidserver.po
|
include locale/ca/LC_MESSAGES/fdroidserver.po
|
||||||
include locale/cs/LC_MESSAGES/fdroidserver.po
|
include locale/cs/LC_MESSAGES/fdroidserver.po
|
||||||
include locale/de/LC_MESSAGES/fdroidserver.po
|
include locale/de/LC_MESSAGES/fdroidserver.po
|
||||||
include locale/es/LC_MESSAGES/fdroidserver.po
|
include locale/es/LC_MESSAGES/fdroidserver.po
|
||||||
include locale/fr/LC_MESSAGES/fdroidserver.po
|
include locale/fr/LC_MESSAGES/fdroidserver.po
|
||||||
|
include locale/ga/LC_MESSAGES/fdroidserver.po
|
||||||
include locale/hu/LC_MESSAGES/fdroidserver.po
|
include locale/hu/LC_MESSAGES/fdroidserver.po
|
||||||
include locale/it/LC_MESSAGES/fdroidserver.po
|
include locale/it/LC_MESSAGES/fdroidserver.po
|
||||||
|
include locale/ja/LC_MESSAGES/fdroidserver.po
|
||||||
include locale/ko/LC_MESSAGES/fdroidserver.po
|
include locale/ko/LC_MESSAGES/fdroidserver.po
|
||||||
include locale/nb_NO/LC_MESSAGES/fdroidserver.po
|
include locale/nb_NO/LC_MESSAGES/fdroidserver.po
|
||||||
include locale/pl/LC_MESSAGES/fdroidserver.po
|
include locale/pl/LC_MESSAGES/fdroidserver.po
|
||||||
|
@ -48,7 +51,6 @@ include README.md
|
||||||
include tests/aosp_testkey_debug.keystore
|
include tests/aosp_testkey_debug.keystore
|
||||||
include tests/apk.embedded_1.apk
|
include tests/apk.embedded_1.apk
|
||||||
include tests/bad-unicode-*.apk
|
include tests/bad-unicode-*.apk
|
||||||
include tests/build.TestCase
|
|
||||||
include tests/build-tools/17.0.0/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/17.0.0/aapt-output-com.moez.QKSMS_182.txt
|
||||||
include tests/build-tools/17.0.0/aapt-output-com.politedroid_3.txt
|
include tests/build-tools/17.0.0/aapt-output-com.politedroid_3.txt
|
||||||
include tests/build-tools/17.0.0/aapt-output-com.politedroid_4.txt
|
include tests/build-tools/17.0.0/aapt-output-com.politedroid_4.txt
|
||||||
|
@ -58,10 +60,10 @@ include tests/build-tools/17.0.0/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/17.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/17.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/17.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/17.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/17.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/17.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/17.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/17.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/17.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/17.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/17.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/17.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/17.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/17.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/17.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/17.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/17.0.0/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/17.0.0/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/18.1.1/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/18.1.1/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -73,10 +75,10 @@ include tests/build-tools/18.1.1/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/18.1.1/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/18.1.1/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/18.1.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/18.1.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/18.1.1/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/18.1.1/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/18.1.1/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/18.1.1/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/18.1.1/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/18.1.1/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/18.1.1/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/18.1.1/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/18.1.1/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/18.1.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/18.1.1/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/18.1.1/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/18.1.1/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/18.1.1/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/19.0.0/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/19.0.0/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -88,10 +90,10 @@ include tests/build-tools/19.0.0/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/19.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/19.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/19.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/19.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/19.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/19.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/19.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/19.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/19.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/19.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/19.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/19.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/19.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/19.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/19.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/19.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/19.0.0/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/19.0.0/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/19.1.0/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/19.1.0/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -103,10 +105,10 @@ include tests/build-tools/19.1.0/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/19.1.0/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/19.1.0/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/19.1.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/19.1.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/19.1.0/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/19.1.0/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/19.1.0/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/19.1.0/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/19.1.0/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/19.1.0/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/19.1.0/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/19.1.0/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/19.1.0/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/19.1.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/19.1.0/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/19.1.0/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/19.1.0/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/19.1.0/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/20.0.0/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/20.0.0/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -118,10 +120,10 @@ include tests/build-tools/20.0.0/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/20.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/20.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/20.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/20.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/20.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/20.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/20.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/20.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/20.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/20.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/20.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/20.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/20.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/20.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/20.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/20.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/20.0.0/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/20.0.0/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/21.1.1/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/21.1.1/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -133,10 +135,10 @@ include tests/build-tools/21.1.1/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/21.1.1/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/21.1.1/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/21.1.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/21.1.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/21.1.1/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/21.1.1/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/21.1.1/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/21.1.1/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/21.1.1/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/21.1.1/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/21.1.1/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/21.1.1/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/21.1.1/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/21.1.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/21.1.1/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/21.1.1/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/21.1.1/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/21.1.1/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/21.1.2/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/21.1.2/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -148,10 +150,10 @@ include tests/build-tools/21.1.2/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/21.1.2/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/21.1.2/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/21.1.2/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/21.1.2/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/21.1.2/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/21.1.2/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/21.1.2/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/21.1.2/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/21.1.2/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/21.1.2/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/21.1.2/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/21.1.2/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/21.1.2/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/21.1.2/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/21.1.2/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/21.1.2/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/21.1.2/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/21.1.2/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/22.0.0/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/22.0.0/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -163,10 +165,10 @@ include tests/build-tools/22.0.0/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/22.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/22.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/22.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/22.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/22.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/22.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/22.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/22.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/22.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/22.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/22.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/22.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/22.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/22.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/22.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/22.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/22.0.0/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/22.0.0/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/22.0.1/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/22.0.1/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -178,10 +180,10 @@ include tests/build-tools/22.0.1/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/22.0.1/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/22.0.1/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/22.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/22.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/22.0.1/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/22.0.1/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/22.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/22.0.1/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/22.0.1/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/22.0.1/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/22.0.1/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/22.0.1/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/22.0.1/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/22.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/22.0.1/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/22.0.1/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/22.0.1/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/22.0.1/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/23.0.0/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/23.0.0/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -193,10 +195,10 @@ include tests/build-tools/23.0.0/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/23.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/23.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/23.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/23.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/23.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/23.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/23.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/23.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/23.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/23.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/23.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/23.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/23.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/23.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/23.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/23.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/23.0.0/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/23.0.0/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/23.0.1/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/23.0.1/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -208,10 +210,10 @@ include tests/build-tools/23.0.1/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/23.0.1/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/23.0.1/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/23.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/23.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/23.0.1/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/23.0.1/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/23.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/23.0.1/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/23.0.1/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/23.0.1/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/23.0.1/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/23.0.1/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/23.0.1/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/23.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/23.0.1/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/23.0.1/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/23.0.1/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/23.0.1/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/23.0.2/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/23.0.2/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -223,10 +225,10 @@ include tests/build-tools/23.0.2/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/23.0.2/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/23.0.2/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/23.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/23.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/23.0.2/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/23.0.2/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/23.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/23.0.2/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/23.0.2/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/23.0.2/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/23.0.2/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/23.0.2/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/23.0.2/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/23.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/23.0.2/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/23.0.2/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/23.0.2/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/23.0.2/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/23.0.3/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/23.0.3/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -238,10 +240,10 @@ include tests/build-tools/23.0.3/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/23.0.3/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/23.0.3/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/23.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/23.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/23.0.3/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/23.0.3/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/23.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/23.0.3/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/23.0.3/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/23.0.3/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/23.0.3/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/23.0.3/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/23.0.3/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/23.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/23.0.3/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/23.0.3/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/23.0.3/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/23.0.3/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/24.0.0/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/24.0.0/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -253,10 +255,10 @@ include tests/build-tools/24.0.0/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/24.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/24.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/24.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/24.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/24.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/24.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/24.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/24.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/24.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/24.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/24.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/24.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/24.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/24.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/24.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/24.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/24.0.0/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/24.0.0/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/24.0.1/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/24.0.1/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -268,10 +270,10 @@ include tests/build-tools/24.0.1/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/24.0.1/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/24.0.1/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/24.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/24.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/24.0.1/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/24.0.1/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/24.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/24.0.1/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/24.0.1/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/24.0.1/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/24.0.1/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/24.0.1/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/24.0.1/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/24.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/24.0.1/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/24.0.1/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/24.0.1/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/24.0.1/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/24.0.2/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/24.0.2/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -283,10 +285,10 @@ include tests/build-tools/24.0.2/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/24.0.2/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/24.0.2/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/24.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/24.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/24.0.2/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/24.0.2/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/24.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/24.0.2/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/24.0.2/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/24.0.2/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/24.0.2/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/24.0.2/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/24.0.2/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/24.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/24.0.2/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/24.0.2/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/24.0.2/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/24.0.2/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/24.0.3/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/24.0.3/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -298,10 +300,10 @@ include tests/build-tools/24.0.3/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/24.0.3/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/24.0.3/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/24.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/24.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/24.0.3/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/24.0.3/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/24.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/24.0.3/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/24.0.3/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/24.0.3/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/24.0.3/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/24.0.3/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/24.0.3/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/24.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/24.0.3/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/24.0.3/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/24.0.3/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/24.0.3/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/25.0.0/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/25.0.0/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -313,10 +315,10 @@ include tests/build-tools/25.0.0/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/25.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/25.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/25.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/25.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/25.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/25.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/25.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/25.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/25.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/25.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/25.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/25.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/25.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/25.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/25.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/25.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/25.0.0/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/25.0.0/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/25.0.1/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/25.0.1/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -328,10 +330,10 @@ include tests/build-tools/25.0.1/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/25.0.1/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/25.0.1/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/25.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/25.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/25.0.1/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/25.0.1/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/25.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/25.0.1/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/25.0.1/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/25.0.1/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/25.0.1/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/25.0.1/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/25.0.1/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/25.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/25.0.1/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/25.0.1/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/25.0.1/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/25.0.1/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/25.0.2/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/25.0.2/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -343,10 +345,10 @@ include tests/build-tools/25.0.2/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/25.0.2/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/25.0.2/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/25.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/25.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/25.0.2/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/25.0.2/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/25.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/25.0.2/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/25.0.2/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/25.0.2/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/25.0.2/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/25.0.2/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/25.0.2/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/25.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/25.0.2/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/25.0.2/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/25.0.2/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/25.0.2/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/25.0.3/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/25.0.3/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -358,10 +360,10 @@ include tests/build-tools/25.0.3/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/25.0.3/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/25.0.3/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/25.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/25.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/25.0.3/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/25.0.3/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/25.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/25.0.3/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/25.0.3/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/25.0.3/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/25.0.3/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/25.0.3/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/25.0.3/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/25.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/25.0.3/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/25.0.3/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/25.0.3/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/25.0.3/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/26.0.0/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/26.0.0/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -373,10 +375,10 @@ include tests/build-tools/26.0.0/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/26.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/26.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/26.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/26.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/26.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/26.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/26.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/26.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/26.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/26.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/26.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/26.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/26.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/26.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/26.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/26.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/26.0.0/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/26.0.0/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/26.0.1/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/26.0.1/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -388,10 +390,10 @@ include tests/build-tools/26.0.1/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/26.0.1/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/26.0.1/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/26.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/26.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/26.0.1/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/26.0.1/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/26.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/26.0.1/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/26.0.1/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/26.0.1/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/26.0.1/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/26.0.1/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/26.0.1/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/26.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/26.0.1/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/26.0.1/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/26.0.1/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/26.0.1/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/26.0.2/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/26.0.2/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -403,10 +405,10 @@ include tests/build-tools/26.0.2/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/26.0.2/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/26.0.2/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/26.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/26.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/26.0.2/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/26.0.2/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/26.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/26.0.2/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/26.0.2/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/26.0.2/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/26.0.2/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/26.0.2/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/26.0.2/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/26.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/26.0.2/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/26.0.2/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/26.0.2/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/26.0.2/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/26.0.3/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/26.0.3/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -418,10 +420,10 @@ include tests/build-tools/26.0.3/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/26.0.3/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/26.0.3/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/26.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/26.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/26.0.3/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/26.0.3/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/26.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/26.0.3/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/26.0.3/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/26.0.3/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/26.0.3/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/26.0.3/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/26.0.3/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/26.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/26.0.3/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/26.0.3/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/26.0.3/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/26.0.3/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/27.0.0/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/27.0.0/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -433,10 +435,10 @@ include tests/build-tools/27.0.0/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/27.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/27.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/27.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/27.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/27.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/27.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/27.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/27.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/27.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/27.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/27.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/27.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/27.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/27.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/27.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/27.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/27.0.0/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/27.0.0/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/27.0.1/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/27.0.1/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -448,10 +450,10 @@ include tests/build-tools/27.0.1/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/27.0.1/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/27.0.1/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/27.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/27.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/27.0.1/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/27.0.1/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/27.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/27.0.1/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/27.0.1/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/27.0.1/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/27.0.1/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/27.0.1/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/27.0.1/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/27.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/27.0.1/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/27.0.1/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/27.0.1/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/27.0.1/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/27.0.2/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/27.0.2/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -463,10 +465,10 @@ include tests/build-tools/27.0.2/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/27.0.2/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/27.0.2/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/27.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/27.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/27.0.2/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/27.0.2/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/27.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/27.0.2/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/27.0.2/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/27.0.2/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/27.0.2/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/27.0.2/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/27.0.2/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/27.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/27.0.2/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/27.0.2/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/27.0.2/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/27.0.2/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/27.0.3/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/27.0.3/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -478,10 +480,10 @@ include tests/build-tools/27.0.3/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/27.0.3/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/27.0.3/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/27.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/27.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/27.0.3/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/27.0.3/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/27.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/27.0.3/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/27.0.3/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/27.0.3/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/27.0.3/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/27.0.3/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/27.0.3/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/27.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/27.0.3/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/27.0.3/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/27.0.3/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/27.0.3/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/28.0.0/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/28.0.0/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -493,10 +495,10 @@ include tests/build-tools/28.0.0/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/28.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/28.0.0/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/28.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/28.0.0/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/28.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/28.0.0/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/28.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/28.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/28.0.0/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/28.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/28.0.0/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/28.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/28.0.0/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/28.0.0/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/28.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/28.0.0/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/28.0.0/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/28.0.0/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/28.0.1/aapt-output-com.moez.QKSMS_182.txt
|
include tests/build-tools/28.0.1/aapt-output-com.moez.QKSMS_182.txt
|
||||||
|
@ -508,10 +510,10 @@ include tests/build-tools/28.0.1/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/28.0.1/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/28.0.1/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/28.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/28.0.1/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/28.0.1/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/28.0.1/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/28.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/28.0.1/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/28.0.1/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/28.0.1/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/28.0.1/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/28.0.1/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/28.0.1/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/28.0.1/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/28.0.1/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/28.0.1/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/28.0.1/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/28.0.1/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/28.0.2/aapt-output-com.politedroid_3.txt
|
include tests/build-tools/28.0.2/aapt-output-com.politedroid_3.txt
|
||||||
|
@ -522,10 +524,10 @@ include tests/build-tools/28.0.2/aapt-output-duplicate.permisssions_9999999.txt
|
||||||
include tests/build-tools/28.0.2/aapt-output-info.guardianproject.urzip_100.txt
|
include tests/build-tools/28.0.2/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/28.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/28.0.2/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/28.0.2/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/28.0.2/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/28.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/28.0.2/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/28.0.2/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/28.0.2/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/28.0.2/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/28.0.2/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/28.0.2/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/28.0.2/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/28.0.2/aapt-output-org.droidtr.keyboard_34.txt
|
include tests/build-tools/28.0.2/aapt-output-org.droidtr.keyboard_34.txt
|
||||||
include tests/build-tools/28.0.2/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/28.0.2/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/28.0.3/aapt-output-com.example.test.helloworld_1.txt
|
include tests/build-tools/28.0.3/aapt-output-com.example.test.helloworld_1.txt
|
||||||
|
@ -538,17 +540,15 @@ include tests/build-tools/28.0.3/aapt-output-info.guardianproject.urzip_100.txt
|
||||||
include tests/build-tools/28.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
include tests/build-tools/28.0.3/aapt-output-info.zwanenburg.caffeinetile_4.txt
|
||||||
include tests/build-tools/28.0.3/aapt-output-no.min.target.sdk_987.txt
|
include tests/build-tools/28.0.3/aapt-output-no.min.target.sdk_987.txt
|
||||||
include tests/build-tools/28.0.3/aapt-output-obb.main.oldversion_1444412523.txt
|
include tests/build-tools/28.0.3/aapt-output-obb.main.oldversion_1444412523.txt
|
||||||
include tests/build-tools/28.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
|
||||||
include tests/build-tools/28.0.3/aapt-output-obb.main.twoversions_1101613.txt
|
include tests/build-tools/28.0.3/aapt-output-obb.main.twoversions_1101613.txt
|
||||||
include tests/build-tools/28.0.3/aapt-output-obb.main.twoversions_1101615.txt
|
include tests/build-tools/28.0.3/aapt-output-obb.main.twoversions_1101615.txt
|
||||||
include tests/build-tools/28.0.3/aapt-output-obb.main.twoversions_1101617.txt
|
include tests/build-tools/28.0.3/aapt-output-obb.main.twoversions_1101617.txt
|
||||||
|
include tests/build-tools/28.0.3/aapt-output-obb.mainpatch.current_1619.txt
|
||||||
include tests/build-tools/28.0.3/aapt-output-souch.smsbypass_9.txt
|
include tests/build-tools/28.0.3/aapt-output-souch.smsbypass_9.txt
|
||||||
include tests/build-tools/generate.sh
|
include tests/build-tools/generate.sh
|
||||||
include tests/check-fdroid-apk
|
include tests/check-fdroid-apk
|
||||||
include tests/checkupdates.TestCase
|
|
||||||
include tests/com.fake.IpaApp_1000000000001.ipa
|
include tests/com.fake.IpaApp_1000000000001.ipa
|
||||||
include tests/common.TestCase
|
include tests/config.yml
|
||||||
include tests/config.py
|
|
||||||
include tests/config/antiFeatures.yml
|
include tests/config/antiFeatures.yml
|
||||||
include tests/config/categories.yml
|
include tests/config/categories.yml
|
||||||
include tests/config/de/antiFeatures.yml
|
include tests/config/de/antiFeatures.yml
|
||||||
|
@ -567,18 +567,16 @@ include tests/config/ic_antifeature_upstreamnonfree.xml
|
||||||
include tests/config/ro/antiFeatures.yml
|
include tests/config/ro/antiFeatures.yml
|
||||||
include tests/config/zh-rCN/antiFeatures.yml
|
include tests/config/zh-rCN/antiFeatures.yml
|
||||||
include tests/corrupt-featureGraphic.png
|
include tests/corrupt-featureGraphic.png
|
||||||
include tests/deploy.TestCase
|
|
||||||
include tests/dummy-keystore.jks
|
include tests/dummy-keystore.jks
|
||||||
include tests/dump_internal_metadata_format.py
|
include tests/dump_internal_metadata_format.py
|
||||||
include tests/exception.TestCase
|
|
||||||
include tests/extra/manual-vmtools-test.py
|
include tests/extra/manual-vmtools-test.py
|
||||||
include tests/funding-usernames.yaml
|
include tests/funding-usernames.yaml
|
||||||
include tests/get_android_tools_versions/android-ndk-r10e/RELEASE.TXT
|
include tests/get_android_tools_versions/android-ndk-r10e/RELEASE.TXT
|
||||||
|
include tests/get_android_tools_versions/android-sdk/ndk-bundle/package.xml
|
||||||
|
include tests/get_android_tools_versions/android-sdk/ndk-bundle/source.properties
|
||||||
include tests/get_android_tools_versions/android-sdk/ndk/11.2.2725575/source.properties
|
include tests/get_android_tools_versions/android-sdk/ndk/11.2.2725575/source.properties
|
||||||
include tests/get_android_tools_versions/android-sdk/ndk/17.2.4988734/source.properties
|
include tests/get_android_tools_versions/android-sdk/ndk/17.2.4988734/source.properties
|
||||||
include tests/get_android_tools_versions/android-sdk/ndk/21.3.6528147/source.properties
|
include tests/get_android_tools_versions/android-sdk/ndk/21.3.6528147/source.properties
|
||||||
include tests/get_android_tools_versions/android-sdk/ndk-bundle/package.xml
|
|
||||||
include tests/get_android_tools_versions/android-sdk/ndk-bundle/source.properties
|
|
||||||
include tests/get_android_tools_versions/android-sdk/patcher/v4/source.properties
|
include tests/get_android_tools_versions/android-sdk/patcher/v4/source.properties
|
||||||
include tests/get_android_tools_versions/android-sdk/platforms/android-30/source.properties
|
include tests/get_android_tools_versions/android-sdk/platforms/android-30/source.properties
|
||||||
include tests/get_android_tools_versions/android-sdk/skiaparser/1/source.properties
|
include tests/get_android_tools_versions/android-sdk/skiaparser/1/source.properties
|
||||||
|
@ -589,10 +587,6 @@ include tests/gnupghome/secring.gpg
|
||||||
include tests/gnupghome/trustdb.gpg
|
include tests/gnupghome/trustdb.gpg
|
||||||
include tests/gradle-maven-blocks.yaml
|
include tests/gradle-maven-blocks.yaml
|
||||||
include tests/gradle-release-checksums.py
|
include tests/gradle-release-checksums.py
|
||||||
include tests/import_subcommand.TestCase
|
|
||||||
include tests/index.TestCase
|
|
||||||
include tests/init.TestCase
|
|
||||||
include tests/install.TestCase
|
|
||||||
include tests/IsMD5Disabled.java
|
include tests/IsMD5Disabled.java
|
||||||
include tests/issue-1128-min-sdk-30-poc.apk
|
include tests/issue-1128-min-sdk-30-poc.apk
|
||||||
include tests/issue-1128-poc1.apk
|
include tests/issue-1128-poc1.apk
|
||||||
|
@ -600,10 +594,11 @@ include tests/issue-1128-poc2.apk
|
||||||
include tests/issue-1128-poc3a.apk
|
include tests/issue-1128-poc3a.apk
|
||||||
include tests/issue-1128-poc3b.apk
|
include tests/issue-1128-poc3b.apk
|
||||||
include tests/janus.apk
|
include tests/janus.apk
|
||||||
include tests/keystore.jks
|
|
||||||
include tests/key-tricks.py
|
include tests/key-tricks.py
|
||||||
include tests/lint.TestCase
|
include tests/keystore.jks
|
||||||
include tests/main.TestCase
|
include tests/metadata-rewrite-yml/app.with.special.build.params.yml
|
||||||
|
include tests/metadata-rewrite-yml/fake.ota.update.yml
|
||||||
|
include tests/metadata-rewrite-yml/org.fdroid.fdroid.yml
|
||||||
include tests/metadata/apk/info.guardianproject.urzip.yaml
|
include tests/metadata/apk/info.guardianproject.urzip.yaml
|
||||||
include tests/metadata/apk/org.dyndns.fules.ck.yaml
|
include tests/metadata/apk/org.dyndns.fules.ck.yaml
|
||||||
include tests/metadata/app.with.special.build.params.yml
|
include tests/metadata/app.with.special.build.params.yml
|
||||||
|
@ -621,13 +616,14 @@ include tests/metadata/dump/org.smssecure.smssecure.yaml
|
||||||
include tests/metadata/dump/org.videolan.vlc.yaml
|
include tests/metadata/dump/org.videolan.vlc.yaml
|
||||||
include tests/metadata/duplicate.permisssions.yml
|
include tests/metadata/duplicate.permisssions.yml
|
||||||
include tests/metadata/fake.ota.update.yml
|
include tests/metadata/fake.ota.update.yml
|
||||||
|
include tests/metadata/info.guardianproject.checkey.yml
|
||||||
include tests/metadata/info.guardianproject.checkey/en-US/description.txt
|
include tests/metadata/info.guardianproject.checkey/en-US/description.txt
|
||||||
include tests/metadata/info.guardianproject.checkey/en-US/name.txt
|
include tests/metadata/info.guardianproject.checkey/en-US/name.txt
|
||||||
include tests/metadata/info.guardianproject.checkey/en-US/phoneScreenshots/checkey-phone.png
|
include tests/metadata/info.guardianproject.checkey/en-US/phoneScreenshots/checkey-phone.png
|
||||||
include tests/metadata/info.guardianproject.checkey/en-US/phoneScreenshots/checkey.png
|
include tests/metadata/info.guardianproject.checkey/en-US/phoneScreenshots/checkey.png
|
||||||
include tests/metadata/info.guardianproject.checkey/en-US/summary.txt
|
include tests/metadata/info.guardianproject.checkey/en-US/summary.txt
|
||||||
include tests/metadata/info.guardianproject.checkey/ja-JP/name.txt
|
include tests/metadata/info.guardianproject.checkey/ja-JP/name.txt
|
||||||
include tests/metadata/info.guardianproject.checkey.yml
|
include tests/metadata/info.guardianproject.urzip.yml
|
||||||
include tests/metadata/info.guardianproject.urzip/en-US/changelogs/100.txt
|
include tests/metadata/info.guardianproject.urzip/en-US/changelogs/100.txt
|
||||||
include tests/metadata/info.guardianproject.urzip/en-US/changelogs/default.txt
|
include tests/metadata/info.guardianproject.urzip/en-US/changelogs/default.txt
|
||||||
include tests/metadata/info.guardianproject.urzip/en-US/full_description.txt
|
include tests/metadata/info.guardianproject.urzip/en-US/full_description.txt
|
||||||
|
@ -636,31 +632,26 @@ include tests/metadata/info.guardianproject.urzip/en-US/images/icon.png
|
||||||
include tests/metadata/info.guardianproject.urzip/en-US/short_description.txt
|
include tests/metadata/info.guardianproject.urzip/en-US/short_description.txt
|
||||||
include tests/metadata/info.guardianproject.urzip/en-US/title.txt
|
include tests/metadata/info.guardianproject.urzip/en-US/title.txt
|
||||||
include tests/metadata/info.guardianproject.urzip/en-US/video.txt
|
include tests/metadata/info.guardianproject.urzip/en-US/video.txt
|
||||||
include tests/metadata/info.guardianproject.urzip.yml
|
|
||||||
include tests/metadata/info.zwanenburg.caffeinetile.yml
|
include tests/metadata/info.zwanenburg.caffeinetile.yml
|
||||||
include tests/metadata/no.min.target.sdk.yml
|
include tests/metadata/no.min.target.sdk.yml
|
||||||
include tests/metadata/obb.main.oldversion.yml
|
include tests/metadata/obb.main.oldversion.yml
|
||||||
include tests/metadata/obb.mainpatch.current.yml
|
|
||||||
include tests/metadata/obb.main.twoversions.yml
|
include tests/metadata/obb.main.twoversions.yml
|
||||||
|
include tests/metadata/obb.mainpatch.current.yml
|
||||||
include tests/metadata/org.adaway.yml
|
include tests/metadata/org.adaway.yml
|
||||||
include tests/metadata/org.fdroid.ci.test.app.yml
|
include tests/metadata/org.fdroid.ci.test.app.yml
|
||||||
include tests/metadata/org.fdroid.fdroid.yml
|
include tests/metadata/org.fdroid.fdroid.yml
|
||||||
|
include tests/metadata/org.maxsdkversion.yml
|
||||||
|
include tests/metadata/org.smssecure.smssecure.yml
|
||||||
include tests/metadata/org.smssecure.smssecure/signatures/134/28969C09.RSA
|
include tests/metadata/org.smssecure.smssecure/signatures/134/28969C09.RSA
|
||||||
include tests/metadata/org.smssecure.smssecure/signatures/134/28969C09.SF
|
include tests/metadata/org.smssecure.smssecure/signatures/134/28969C09.SF
|
||||||
include tests/metadata/org.smssecure.smssecure/signatures/134/MANIFEST.MF
|
include tests/metadata/org.smssecure.smssecure/signatures/134/MANIFEST.MF
|
||||||
include tests/metadata/org.smssecure.smssecure/signatures/135/28969C09.RSA
|
include tests/metadata/org.smssecure.smssecure/signatures/135/28969C09.RSA
|
||||||
include tests/metadata/org.smssecure.smssecure/signatures/135/28969C09.SF
|
include tests/metadata/org.smssecure.smssecure/signatures/135/28969C09.SF
|
||||||
include tests/metadata/org.smssecure.smssecure/signatures/135/MANIFEST.MF
|
include tests/metadata/org.smssecure.smssecure/signatures/135/MANIFEST.MF
|
||||||
include tests/metadata/org.smssecure.smssecure.yml
|
|
||||||
include tests/metadata/org.videolan.vlc.yml
|
include tests/metadata/org.videolan.vlc.yml
|
||||||
include tests/metadata/raw.template.yml
|
include tests/metadata/raw.template.yml
|
||||||
include tests/metadata-rewrite-yml/app.with.special.build.params.yml
|
|
||||||
include tests/metadata-rewrite-yml/fake.ota.update.yml
|
|
||||||
include tests/metadata-rewrite-yml/org.fdroid.fdroid.yml
|
|
||||||
include tests/metadata/souch.smsbypass.yml
|
include tests/metadata/souch.smsbypass.yml
|
||||||
include tests/metadata.TestCase
|
|
||||||
include tests/minimal_targetsdk_30_unsigned.apk
|
include tests/minimal_targetsdk_30_unsigned.apk
|
||||||
include tests/nightly.TestCase
|
|
||||||
include tests/Norway_bouvet_europe_2.obf.zip
|
include tests/Norway_bouvet_europe_2.obf.zip
|
||||||
include tests/no_targetsdk_minsdk1_unsigned.apk
|
include tests/no_targetsdk_minsdk1_unsigned.apk
|
||||||
include tests/no_targetsdk_minsdk30_unsigned.apk
|
include tests/no_targetsdk_minsdk30_unsigned.apk
|
||||||
|
@ -671,7 +662,6 @@ include tests/org.bitbucket.tickytacky.mirrormirror_3.apk
|
||||||
include tests/org.bitbucket.tickytacky.mirrormirror_4.apk
|
include tests/org.bitbucket.tickytacky.mirrormirror_4.apk
|
||||||
include tests/org.dyndns.fules.ck_20.apk
|
include tests/org.dyndns.fules.ck_20.apk
|
||||||
include tests/org.sajeg.fallingblocks_3.apk
|
include tests/org.sajeg.fallingblocks_3.apk
|
||||||
include tests/publish.TestCase
|
|
||||||
include tests/repo/com.example.test.helloworld_1.apk
|
include tests/repo/com.example.test.helloworld_1.apk
|
||||||
include tests/repo/com.politedroid_3.apk
|
include tests/repo/com.politedroid_3.apk
|
||||||
include tests/repo/com.politedroid_4.apk
|
include tests/repo/com.politedroid_4.apk
|
||||||
|
@ -690,16 +680,17 @@ include tests/repo/main.1434483388.obb.main.oldversion.obb
|
||||||
include tests/repo/main.1619.obb.mainpatch.current.obb
|
include tests/repo/main.1619.obb.mainpatch.current.obb
|
||||||
include tests/repo/no.min.target.sdk_987.apk
|
include tests/repo/no.min.target.sdk_987.apk
|
||||||
include tests/repo/obb.main.oldversion_1444412523.apk
|
include tests/repo/obb.main.oldversion_1444412523.apk
|
||||||
include tests/repo/obb.mainpatch.current_1619_another-release-key.apk
|
|
||||||
include tests/repo/obb.mainpatch.current_1619.apk
|
|
||||||
include tests/repo/obb.mainpatch.current/en-US/featureGraphic.png
|
|
||||||
include tests/repo/obb.mainpatch.current/en-US/icon.png
|
|
||||||
include tests/repo/obb.mainpatch.current/en-US/phoneScreenshots/screenshot-main.png
|
|
||||||
include tests/repo/obb.mainpatch.current/en-US/sevenInchScreenshots/screenshot-tablet-main.png
|
|
||||||
include tests/repo/obb.main.twoversions_1101613.apk
|
include tests/repo/obb.main.twoversions_1101613.apk
|
||||||
include tests/repo/obb.main.twoversions_1101615.apk
|
include tests/repo/obb.main.twoversions_1101615.apk
|
||||||
include tests/repo/obb.main.twoversions_1101617.apk
|
include tests/repo/obb.main.twoversions_1101617.apk
|
||||||
include tests/repo/obb.main.twoversions_1101617_src.tar.gz
|
include tests/repo/obb.main.twoversions_1101617_src.tar.gz
|
||||||
|
include tests/repo/obb.mainpatch.current/en-US/featureGraphic.png
|
||||||
|
include tests/repo/obb.mainpatch.current/en-US/icon.png
|
||||||
|
include tests/repo/obb.mainpatch.current/en-US/phoneScreenshots/screenshot-main.png
|
||||||
|
include tests/repo/obb.mainpatch.current/en-US/sevenInchScreenshots/screenshot-tablet-main.png
|
||||||
|
include tests/repo/obb.mainpatch.current_1619.apk
|
||||||
|
include tests/repo/obb.mainpatch.current_1619_another-release-key.apk
|
||||||
|
include tests/repo/org.maxsdkversion_4.apk
|
||||||
include tests/repo/org.videolan.vlc/en-US/icon.png
|
include tests/repo/org.videolan.vlc/en-US/icon.png
|
||||||
include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot10.png
|
include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot10.png
|
||||||
include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot12.png
|
include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot12.png
|
||||||
|
@ -711,16 +702,16 @@ include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot4.png
|
||||||
include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot7.png
|
include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot7.png
|
||||||
include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot9.png
|
include tests/repo/org.videolan.vlc/en-US/phoneScreenshots/screenshot9.png
|
||||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot0.png
|
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot0.png
|
||||||
|
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot1.png
|
||||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot11.png
|
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot11.png
|
||||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot13.png
|
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot13.png
|
||||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot14.png
|
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot14.png
|
||||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot16.png
|
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot16.png
|
||||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot17.png
|
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot17.png
|
||||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot19.png
|
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot19.png
|
||||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot1.png
|
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot2.png
|
||||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot21.png
|
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot21.png
|
||||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot23.png
|
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot23.png
|
||||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot2.png
|
|
||||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot3.png
|
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot3.png
|
||||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot5.png
|
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot5.png
|
||||||
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot6.png
|
include tests/repo/org.videolan.vlc/en-US/sevenInchScreenshots/screenshot6.png
|
||||||
|
@ -729,18 +720,25 @@ include tests/repo/patch.1619.obb.mainpatch.current.obb
|
||||||
include tests/repo/souch.smsbypass_9.apk
|
include tests/repo/souch.smsbypass_9.apk
|
||||||
include tests/repo/urzip-*.apk
|
include tests/repo/urzip-*.apk
|
||||||
include tests/repo/v1.v2.sig_1020.apk
|
include tests/repo/v1.v2.sig_1020.apk
|
||||||
include tests/rewritemeta.TestCase
|
|
||||||
include tests/run-tests
|
include tests/run-tests
|
||||||
include tests/SANAPPSI.RSA
|
include tests/SANAPPSI.RSA
|
||||||
include tests/SANAPPSI.SF
|
include tests/SANAPPSI.SF
|
||||||
include tests/scanner.TestCase
|
include tests/shared_test_code.py
|
||||||
include tests/signatures.TestCase
|
|
||||||
include tests/signindex.TestCase
|
|
||||||
include tests/signindex/guardianproject.jar
|
|
||||||
include tests/signindex/guardianproject-v1.jar
|
include tests/signindex/guardianproject-v1.jar
|
||||||
|
include tests/signindex/guardianproject.jar
|
||||||
include tests/signindex/testy.jar
|
include tests/signindex/testy.jar
|
||||||
include tests/signindex/unsigned.jar
|
include tests/signindex/unsigned.jar
|
||||||
include tests/source-files/at.bitfire.davdroid/build.gradle
|
include tests/source-files/at.bitfire.davdroid/build.gradle
|
||||||
|
include tests/source-files/catalog.test/app/build.gradle
|
||||||
|
include tests/source-files/catalog.test/build.gradle.kts
|
||||||
|
include tests/source-files/catalog.test/buildSrc/build.gradle.kts
|
||||||
|
include tests/source-files/catalog.test/buildSrc/settings.gradle.kts
|
||||||
|
include tests/source-files/catalog.test/buildSrc2/build.gradle.kts
|
||||||
|
include tests/source-files/catalog.test/buildSrc2/settings.gradle.kts
|
||||||
|
include tests/source-files/catalog.test/core/build.gradle
|
||||||
|
include tests/source-files/catalog.test/gradle/libs.versions.toml
|
||||||
|
include tests/source-files/catalog.test/libs.versions.toml
|
||||||
|
include tests/source-files/catalog.test/settings.gradle.kts
|
||||||
include tests/source-files/cn.wildfirechat.chat/avenginekit/build.gradle
|
include tests/source-files/cn.wildfirechat.chat/avenginekit/build.gradle
|
||||||
include tests/source-files/cn.wildfirechat.chat/build.gradle
|
include tests/source-files/cn.wildfirechat.chat/build.gradle
|
||||||
include tests/source-files/cn.wildfirechat.chat/chat/build.gradle
|
include tests/source-files/cn.wildfirechat.chat/chat/build.gradle
|
||||||
|
@ -757,6 +755,11 @@ include tests/source-files/com.anpmech.launcher/app/src/main/AndroidManifest.xml
|
||||||
include tests/source-files/com.anpmech.launcher/build.gradle
|
include tests/source-files/com.anpmech.launcher/build.gradle
|
||||||
include tests/source-files/com.anpmech.launcher/settings.gradle
|
include tests/source-files/com.anpmech.launcher/settings.gradle
|
||||||
include tests/source-files/com.github.jameshnsears.quoteunquote/build.gradle
|
include tests/source-files/com.github.jameshnsears.quoteunquote/build.gradle
|
||||||
|
include tests/source-files/com.github.shadowsocks/core/build.gradle.kts
|
||||||
|
include tests/source-files/com.github.shadowsocks/mobile/build.gradle.kts
|
||||||
|
include tests/source-files/com.infomaniak.mail/Core/gradle/core.versions.toml
|
||||||
|
include tests/source-files/com.infomaniak.mail/gradle/libs.versions.toml
|
||||||
|
include tests/source-files/com.infomaniak.mail/settings.gradle
|
||||||
include tests/source-files/com.integreight.onesheeld/build.gradle
|
include tests/source-files/com.integreight.onesheeld/build.gradle
|
||||||
include tests/source-files/com.integreight.onesheeld/gradle/wrapper/gradle-wrapper.properties
|
include tests/source-files/com.integreight.onesheeld/gradle/wrapper/gradle-wrapper.properties
|
||||||
include tests/source-files/com.integreight.onesheeld/localeapi/build.gradle
|
include tests/source-files/com.integreight.onesheeld/localeapi/build.gradle
|
||||||
|
@ -770,16 +773,21 @@ include tests/source-files/com.integreight.onesheeld/pullToRefreshlibrary/src/ma
|
||||||
include tests/source-files/com.integreight.onesheeld/quickReturnHeader/build.gradle
|
include tests/source-files/com.integreight.onesheeld/quickReturnHeader/build.gradle
|
||||||
include tests/source-files/com.integreight.onesheeld/quickReturnHeader/src/main/AndroidManifest.xml
|
include tests/source-files/com.integreight.onesheeld/quickReturnHeader/src/main/AndroidManifest.xml
|
||||||
include tests/source-files/com.integreight.onesheeld/settings.gradle
|
include tests/source-files/com.integreight.onesheeld/settings.gradle
|
||||||
include tests/source-files/com.jens.automation2/build.gradle
|
|
||||||
include tests/source-files/com.jens.automation2/app/build.gradle
|
include tests/source-files/com.jens.automation2/app/build.gradle
|
||||||
|
include tests/source-files/com.jens.automation2/build.gradle
|
||||||
include tests/source-files/com.kunzisoft.testcase/build.gradle
|
include tests/source-files/com.kunzisoft.testcase/build.gradle
|
||||||
include tests/source-files/com.nextcloud.client/build.gradle
|
include tests/source-files/com.lolo.io.onelist/app/build.gradle.kts
|
||||||
|
include tests/source-files/com.lolo.io.onelist/build.gradle.kts
|
||||||
|
include tests/source-files/com.lolo.io.onelist/gradle/libs.versions.toml
|
||||||
|
include tests/source-files/com.lolo.io.onelist/gradle/wrapper/gradle-wrapper.properties
|
||||||
|
include tests/source-files/com.lolo.io.onelist/settings.gradle
|
||||||
include tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/full_description.txt
|
include tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/full_description.txt
|
||||||
include tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/short_description.txt
|
include tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/short_description.txt
|
||||||
include tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/title.txt
|
include tests/source-files/com.nextcloud.client.dev/src/generic/fastlane/metadata/android/en-US/title.txt
|
||||||
include tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/full_description.txt
|
include tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/full_description.txt
|
||||||
include tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/short_description.txt
|
include tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/short_description.txt
|
||||||
include tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/title.txt
|
include tests/source-files/com.nextcloud.client.dev/src/versionDev/fastlane/metadata/android/en-US/title.txt
|
||||||
|
include tests/source-files/com.nextcloud.client/build.gradle
|
||||||
include tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/full_description.txt
|
include tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/full_description.txt
|
||||||
include tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/short_description.txt
|
include tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/short_description.txt
|
||||||
include tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/title.txt
|
include tests/source-files/com.nextcloud.client/src/generic/fastlane/metadata/android/en-US/title.txt
|
||||||
|
@ -799,21 +807,26 @@ include tests/source-files/firebase-allowlisted/app/build.gradle
|
||||||
include tests/source-files/firebase-allowlisted/build.gradle
|
include tests/source-files/firebase-allowlisted/build.gradle
|
||||||
include tests/source-files/firebase-suspect/app/build.gradle
|
include tests/source-files/firebase-suspect/app/build.gradle
|
||||||
include tests/source-files/firebase-suspect/build.gradle
|
include tests/source-files/firebase-suspect/build.gradle
|
||||||
|
include tests/source-files/flavor.test/build.gradle
|
||||||
include tests/source-files/info.guardianproject.ripple/build.gradle
|
include tests/source-files/info.guardianproject.ripple/build.gradle
|
||||||
|
include tests/source-files/lockfile.test/flutter/.dart_tool/flutter_gen/pubspec.yaml
|
||||||
include tests/source-files/lockfile.test/flutter/pubspec.lock
|
include tests/source-files/lockfile.test/flutter/pubspec.lock
|
||||||
include tests/source-files/lockfile.test/flutter/pubspec.yaml
|
include tests/source-files/lockfile.test/flutter/pubspec.yaml
|
||||||
include tests/source-files/lockfile.test/javascript/package.json
|
include tests/source-files/lockfile.test/javascript/package.json
|
||||||
include tests/source-files/lockfile.test/javascript/yarn.lock
|
include tests/source-files/lockfile.test/javascript/yarn.lock
|
||||||
include tests/source-files/lockfile.test/rust/subdir2/Cargo.toml
|
|
||||||
include tests/source-files/lockfile.test/rust/subdir/Cargo.lock
|
include tests/source-files/lockfile.test/rust/subdir/Cargo.lock
|
||||||
include tests/source-files/lockfile.test/rust/subdir/Cargo.toml
|
include tests/source-files/lockfile.test/rust/subdir/Cargo.toml
|
||||||
include tests/source-files/lockfile.test/rust/subdir/subdir/subdir/Cargo.toml
|
include tests/source-files/lockfile.test/rust/subdir/subdir/subdir/Cargo.toml
|
||||||
|
include tests/source-files/lockfile.test/rust/subdir2/Cargo.toml
|
||||||
include tests/source-files/open-keychain/open-keychain/build.gradle
|
include tests/source-files/open-keychain/open-keychain/build.gradle
|
||||||
include tests/source-files/open-keychain/open-keychain/OpenKeychain/build.gradle
|
include tests/source-files/open-keychain/open-keychain/OpenKeychain/build.gradle
|
||||||
include tests/source-files/org.mozilla.rocket/app/build.gradle
|
include tests/source-files/org.mozilla.rocket/app/build.gradle
|
||||||
include tests/source-files/org.noise_planet.noisecapture/app/build.gradle
|
include tests/source-files/org.noise_planet.noisecapture/app/build.gradle
|
||||||
include tests/source-files/org.noise_planet.noisecapture/settings.gradle
|
include tests/source-files/org.noise_planet.noisecapture/settings.gradle
|
||||||
include tests/source-files/org.noise_planet.noisecapture/sosfilter/build.gradle
|
include tests/source-files/org.noise_planet.noisecapture/sosfilter/build.gradle
|
||||||
|
include tests/source-files/org.piepmeyer.gauguin/build.gradle.kts
|
||||||
|
include tests/source-files/org.piepmeyer.gauguin/libs.versions.toml
|
||||||
|
include tests/source-files/org.piepmeyer.gauguin/settings.gradle.kts
|
||||||
include tests/source-files/org.tasks/app/build.gradle.kts
|
include tests/source-files/org.tasks/app/build.gradle.kts
|
||||||
include tests/source-files/org.tasks/build.gradle
|
include tests/source-files/org.tasks/build.gradle
|
||||||
include tests/source-files/org.tasks/build.gradle.kts
|
include tests/source-files/org.tasks/build.gradle.kts
|
||||||
|
@ -821,6 +834,7 @@ include tests/source-files/org.tasks/buildSrc/build.gradle.kts
|
||||||
include tests/source-files/org.tasks/settings.gradle.kts
|
include tests/source-files/org.tasks/settings.gradle.kts
|
||||||
include tests/source-files/osmandapp/osmand/build.gradle
|
include tests/source-files/osmandapp/osmand/build.gradle
|
||||||
include tests/source-files/osmandapp/osmand/gradle/wrapper/gradle-wrapper.properties
|
include tests/source-files/osmandapp/osmand/gradle/wrapper/gradle-wrapper.properties
|
||||||
|
include tests/source-files/OtakuWorld/build.gradle
|
||||||
include tests/source-files/realm/react-native/android/build.gradle
|
include tests/source-files/realm/react-native/android/build.gradle
|
||||||
include tests/source-files/se.manyver/android/app/build.gradle
|
include tests/source-files/se.manyver/android/app/build.gradle
|
||||||
include tests/source-files/se.manyver/android/build.gradle
|
include tests/source-files/se.manyver/android/build.gradle
|
||||||
|
@ -838,11 +852,33 @@ include tests/source-files/ut.ewh.audiometrytest/settings.gradle
|
||||||
include tests/source-files/yuriykulikov/AlarmClock/gradle/wrapper/gradle-wrapper.properties
|
include tests/source-files/yuriykulikov/AlarmClock/gradle/wrapper/gradle-wrapper.properties
|
||||||
include tests/source-files/Zillode/syncthing-silk/build.gradle
|
include tests/source-files/Zillode/syncthing-silk/build.gradle
|
||||||
include tests/SpeedoMeterApp.main_1.apk
|
include tests/SpeedoMeterApp.main_1.apk
|
||||||
include tests/stats/known_apks.txt
|
include tests/test_build.py
|
||||||
include tests/testcommon.py
|
include tests/test_checkupdates.py
|
||||||
include tests/test-gradlew-fdroid
|
include tests/test_common.py
|
||||||
include tests/triple-t-2/build/org.piwigo.android/app/build.gradle
|
include tests/test_deploy.py
|
||||||
|
include tests/test_exception.py
|
||||||
|
include tests/test_gradlew-fdroid
|
||||||
|
include tests/test_import_subcommand.py
|
||||||
|
include tests/test_index.py
|
||||||
|
include tests/test_init.py
|
||||||
|
include tests/test_install.py
|
||||||
|
include tests/test_lint.py
|
||||||
|
include tests/test_main.py
|
||||||
|
include tests/test_metadata.py
|
||||||
|
include tests/test_nightly.py
|
||||||
|
include tests/test_publish.py
|
||||||
|
include tests/test_rewritemeta.py
|
||||||
|
include tests/test_scanner.py
|
||||||
|
include tests/test_signatures.py
|
||||||
|
include tests/test_signindex.py
|
||||||
|
include tests/test_update.py
|
||||||
|
include tests/test_vcs.py
|
||||||
|
include tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/featureGraphic/play_store_feature_graphic.png
|
||||||
|
include tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/icon/icon.png
|
||||||
|
include tests/triple-t-1-graphics/build/de.wivewa.dialer/app/src/main/play/en-US/listing/phoneScreenshots/1.png
|
||||||
|
include tests/triple-t-1-graphics/metadata/de.wivewa.dialer.yml
|
||||||
include tests/triple-t-2/build/org.piwigo.android/app/.gitignore
|
include tests/triple-t-2/build/org.piwigo.android/app/.gitignore
|
||||||
|
include tests/triple-t-2/build/org.piwigo.android/app/build.gradle
|
||||||
include tests/triple-t-2/build/org.piwigo.android/app/src/debug/res/values/constants.xml
|
include tests/triple-t-2/build/org.piwigo.android/app/src/debug/res/values/constants.xml
|
||||||
include tests/triple-t-2/build/org.piwigo.android/app/src/debug/res/values/strings.xml
|
include tests/triple-t-2/build/org.piwigo.android/app/src/debug/res/values/strings.xml
|
||||||
include tests/triple-t-2/build/org.piwigo.android/app/src/main/java/org/piwigo/PiwigoApplication.java
|
include tests/triple-t-2/build/org.piwigo.android/app/src/main/java/org/piwigo/PiwigoApplication.java
|
||||||
|
@ -897,14 +933,13 @@ include tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/verif
|
||||||
include tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/wallet/src/main/play/listings/en-US/title.txt
|
include tests/triple-t-multiple/build/ch.admin.bag.covidcertificate.wallet/wallet/src/main/play/listings/en-US/title.txt
|
||||||
include tests/triple-t-multiple/metadata/ch.admin.bag.covidcertificate.verifier.yml
|
include tests/triple-t-multiple/metadata/ch.admin.bag.covidcertificate.verifier.yml
|
||||||
include tests/triple-t-multiple/metadata/ch.admin.bag.covidcertificate.wallet.yml
|
include tests/triple-t-multiple/metadata/ch.admin.bag.covidcertificate.wallet.yml
|
||||||
include tests/update.TestCase
|
|
||||||
include tests/urzip.apk
|
|
||||||
include tests/urzip-badcert.apk
|
include tests/urzip-badcert.apk
|
||||||
include tests/urzip-badsig.apk
|
include tests/urzip-badsig.apk
|
||||||
include tests/urzip-release.apk
|
|
||||||
include tests/urzip-release-unsigned.apk
|
include tests/urzip-release-unsigned.apk
|
||||||
|
include tests/urzip-release.apk
|
||||||
|
include tests/urzip.apk
|
||||||
include tests/v2.only.sig_2.apk
|
include tests/v2.only.sig_2.apk
|
||||||
include tests/valid-package-names/random-package-names
|
include tests/valid-package-names/random-package-names
|
||||||
include tests/valid-package-names/RandomPackageNames.java
|
include tests/valid-package-names/RandomPackageNames.java
|
||||||
include tests/valid-package-names/test.py
|
include tests/valid-package-names/test.py
|
||||||
include tests/vcs.TestCase
|
include tests/__init__.py
|
||||||
|
|
|
@ -53,13 +53,13 @@ To run the full test suite:
|
||||||
|
|
||||||
tests/run-tests
|
tests/run-tests
|
||||||
|
|
||||||
To run the tests for individual Python modules, see the _.TestCase_ files, e.g.:
|
To run the tests for individual Python modules, see the `tests/test_*.py` files, e.g.:
|
||||||
|
|
||||||
tests/metadata.TestCase
|
python -m unittest tests/test_metadata.py
|
||||||
|
|
||||||
It is also possible to run individual tests:
|
It is also possible to run individual tests:
|
||||||
|
|
||||||
tests/metadata.TestCase MetadataTest.test_rewrite_yaml_special_build_params
|
python -m unittest tests.test_metadata.MetadataTest.test_rewrite_yaml_special_build_params
|
||||||
|
|
||||||
There is a growing test suite that has good coverage on a number of key parts of
|
There is a growing test suite that has good coverage on a number of key parts of
|
||||||
this code base. It does not yet cover all the code, and there are some parts
|
this code base. It does not yet cover all the code, and there are some parts
|
||||||
|
|
|
@ -37,11 +37,22 @@ RUN useradd --create-home -s /bin/bash vagrant && echo -n 'vagrant:vagrant' | ch
|
||||||
#
|
#
|
||||||
# Ensure fdroidserver's dependencies are marked manual before purging
|
# Ensure fdroidserver's dependencies are marked manual before purging
|
||||||
# unneeded packages, otherwise, all its dependencies get purged.
|
# unneeded packages, otherwise, all its dependencies get purged.
|
||||||
|
#
|
||||||
|
# The official Debian docker images ship without ca-certificates, so
|
||||||
|
# TLS certificates cannot be verified until that is installed. The
|
||||||
|
# following code temporarily turns off TLS verification, and enables
|
||||||
|
# HTTPS, so at least unverified TLS is used for apt-get instead of
|
||||||
|
# plain HTTP. Once ca-certificates is installed, the CA verification
|
||||||
|
# is enabled by removing the newly created config file. This set up
|
||||||
|
# makes the initial `apt-get update` and `apt-get install` look the
|
||||||
|
# same as verified TLS to the network observer and hides the metadata.
|
||||||
RUN printf "path-exclude=/usr/share/locale/*\npath-exclude=/usr/share/man/*\npath-exclude=/usr/share/doc/*\npath-include=/usr/share/doc/*/copyright\n" >/etc/dpkg/dpkg.cfg.d/01_nodoc \
|
RUN printf "path-exclude=/usr/share/locale/*\npath-exclude=/usr/share/man/*\npath-exclude=/usr/share/doc/*\npath-include=/usr/share/doc/*/copyright\n" >/etc/dpkg/dpkg.cfg.d/01_nodoc \
|
||||||
&& mkdir -p /usr/share/man/man1 \
|
&& mkdir -p /usr/share/man/man1 \
|
||||||
|
&& echo 'Acquire::https::Verify-Peer "false";' > /etc/apt/apt.conf.d/99nocacertificates \
|
||||||
|
&& find /etc/apt/sources.list* -type f -exec sed -i s,http:,https:, {} \; \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install ca-certificates \
|
&& apt-get install ca-certificates \
|
||||||
&& sed -i 's,http:,https:,' /etc/apt/sources.list.d/debian.sources \
|
&& rm /etc/apt/apt.conf.d/99nocacertificates \
|
||||||
&& apt-get upgrade \
|
&& apt-get upgrade \
|
||||||
&& apt-get dist-upgrade \
|
&& apt-get dist-upgrade \
|
||||||
&& apt-get install openssh-client iproute2 python3 openssh-server sudo \
|
&& apt-get install openssh-client iproute2 python3 openssh-server sudo \
|
||||||
|
|
4
buildserver/Vagrantfile
vendored
4
buildserver/Vagrantfile
vendored
|
@ -14,7 +14,7 @@ configfile = {
|
||||||
|
|
||||||
srvpath = Pathname.new(File.dirname(__FILE__)).realpath
|
srvpath = Pathname.new(File.dirname(__FILE__)).realpath
|
||||||
configpath = File.join(srvpath, "/Vagrantfile.yaml")
|
configpath = File.join(srvpath, "/Vagrantfile.yaml")
|
||||||
if File.exists? configpath
|
if File.exist? configpath
|
||||||
c = YAML.load_file(configpath)
|
c = YAML.load_file(configpath)
|
||||||
if c and not c.empty?
|
if c and not c.empty?
|
||||||
c.each do |k,v|
|
c.each do |k,v|
|
||||||
|
@ -86,7 +86,7 @@ Vagrant.configure("2") do |config|
|
||||||
# Make sure dir exists to mount to, since buildserver/ is
|
# Make sure dir exists to mount to, since buildserver/ is
|
||||||
# automatically mounted as /vagrant in the guest VM. This is more
|
# automatically mounted as /vagrant in the guest VM. This is more
|
||||||
# necessary with 9p synced folders
|
# necessary with 9p synced folders
|
||||||
Dir.mkdir('cache') unless File.exists?('cache')
|
Dir.mkdir('cache') unless File.exist?('cache')
|
||||||
|
|
||||||
# Root partition needs to be resized to the new allocated space
|
# Root partition needs to be resized to the new allocated space
|
||||||
config.vm.provision "shell", inline: <<-SHELL
|
config.vm.provision "shell", inline: <<-SHELL
|
||||||
|
|
|
@ -104,6 +104,7 @@ apt-get upgrade
|
||||||
apt-get update || apt-get update
|
apt-get update || apt-get update
|
||||||
|
|
||||||
packages="
|
packages="
|
||||||
|
androguard/bookworm-backports
|
||||||
apksigner
|
apksigner
|
||||||
default-jdk-headless
|
default-jdk-headless
|
||||||
default-jre-headless
|
default-jre-headless
|
||||||
|
@ -114,6 +115,7 @@ packages="
|
||||||
gnupg
|
gnupg
|
||||||
mercurial
|
mercurial
|
||||||
patch
|
patch
|
||||||
|
python3-magic
|
||||||
python3-packaging
|
python3-packaging
|
||||||
rsync
|
rsync
|
||||||
sdkmanager/bookworm-backports
|
sdkmanager/bookworm-backports
|
||||||
|
|
|
@ -10,6 +10,6 @@ cat <<EOF > $flutter_conf
|
||||||
"enabled": false
|
"enabled": false
|
||||||
}
|
}
|
||||||
EOF
|
EOF
|
||||||
chown -R vagrant.vagrant $flutter_conf
|
chown -R vagrant:vagrant $flutter_conf
|
||||||
chmod -R 0644 $flutter_conf
|
chmod -R 0644 $flutter_conf
|
||||||
|
|
||||||
|
|
|
@ -25,9 +25,14 @@ fi
|
||||||
chmod -R a+rX /opt/gradle
|
chmod -R a+rX /opt/gradle
|
||||||
|
|
||||||
test -e /opt/gradle/bin || mkdir -p /opt/gradle/bin
|
test -e /opt/gradle/bin || mkdir -p /opt/gradle/bin
|
||||||
ln -fs /home/vagrant/fdroidserver/gradlew-fdroid /opt/gradle/bin/gradle
|
git clone --depth 1 https://gitlab.com/fdroid/gradlew-fdroid.git /home/vagrant/gradlew-fdroid/
|
||||||
chown -h vagrant.vagrant /opt/gradle/bin/gradle
|
chmod 0755 /home/vagrant/gradlew-fdroid/gradlew-fdroid
|
||||||
chown vagrant.vagrant /opt/gradle/versions
|
chmod -R u+rwX,a+rX,go-w /home/vagrant/gradlew-fdroid/
|
||||||
|
ln -fs /home/vagrant/gradlew-fdroid/gradlew-fdroid /opt/gradle/bin/gradle
|
||||||
|
ln -fs /home/vagrant/gradlew-fdroid/gradlew-fdroid /usr/local/bin/
|
||||||
|
|
||||||
|
chown -h vagrant:vagrant /opt/gradle/bin/gradle
|
||||||
|
chown vagrant:vagrant /opt/gradle/versions
|
||||||
chmod 0755 /opt/gradle/versions
|
chmod 0755 /opt/gradle/versions
|
||||||
|
|
||||||
GRADLE_HOME=/home/vagrant/.gradle
|
GRADLE_HOME=/home/vagrant/.gradle
|
||||||
|
@ -44,5 +49,5 @@ systemProp.org.gradle.internal.http.connectionTimeout=600000
|
||||||
systemProp.org.gradle.internal.http.socketTimeout=600000
|
systemProp.org.gradle.internal.http.socketTimeout=600000
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
chown -R vagrant.vagrant $GRADLE_HOME/
|
chown -R vagrant:vagrant $GRADLE_HOME/
|
||||||
chmod -R a+rX $GRADLE_HOME/
|
chmod -R a+rX $GRADLE_HOME/
|
||||||
|
|
|
@ -109,8 +109,8 @@ __complete_gpgsign() {
|
||||||
}
|
}
|
||||||
|
|
||||||
__complete_install() {
|
__complete_install() {
|
||||||
opts="-v -q"
|
opts="-v -q -a -p -n -y"
|
||||||
lopts="--verbose --quiet --all"
|
lopts="--verbose --quiet --all --color --no-color --privacy-mode --no-privacy-mode --no --yes"
|
||||||
case "${cur}" in
|
case "${cur}" in
|
||||||
-*)
|
-*)
|
||||||
__complete_options
|
__complete_options
|
||||||
|
@ -251,7 +251,7 @@ __complete_btlog() {
|
||||||
|
|
||||||
__complete_mirror() {
|
__complete_mirror() {
|
||||||
opts="-v"
|
opts="-v"
|
||||||
lopts="--all --archive --build-logs --pgp-signatures --src-tarballs --output-dir"
|
lopts="--all --archive --build-logs --color --no-color --pgp-signatures --src-tarballs --output-dir"
|
||||||
__complete_options
|
__complete_options
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -270,7 +270,7 @@ __complete_deploy() {
|
||||||
|
|
||||||
__complete_signatures() {
|
__complete_signatures() {
|
||||||
opts="-v -q"
|
opts="-v -q"
|
||||||
lopts="--verbose --no-check-https"
|
lopts="--verbose --color --no-color --no-check-https"
|
||||||
case "${cur}" in
|
case "${cur}" in
|
||||||
-*)
|
-*)
|
||||||
__complete_options
|
__complete_options
|
||||||
|
@ -289,7 +289,7 @@ __complete_signindex() {
|
||||||
__complete_init() {
|
__complete_init() {
|
||||||
opts="-v -q -d"
|
opts="-v -q -d"
|
||||||
lopts="--verbose --quiet --distinguished-name --keystore
|
lopts="--verbose --quiet --distinguished-name --keystore
|
||||||
--repo-keyalias --android-home --no-prompt"
|
--repo-keyalias --android-home --no-prompt --color --no-color"
|
||||||
__complete_options
|
__complete_options
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -12,6 +12,7 @@
|
||||||
#
|
#
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
sys.path.insert(0, os.path.abspath('../../fdroidserver'))
|
sys.path.insert(0, os.path.abspath('../../fdroidserver'))
|
||||||
|
|
||||||
# -- Project information -----------------------------------------------------
|
# -- Project information -----------------------------------------------------
|
||||||
|
|
|
@ -51,6 +51,13 @@
|
||||||
# Canonical URL of the repositoy, needs to end in /repo. Is is used to identity
|
# Canonical URL of the repositoy, needs to end in /repo. Is is used to identity
|
||||||
# the repo in the client, as well.
|
# the repo in the client, as well.
|
||||||
# repo_url: https://MyFirstFDroidRepo.org/fdroid/repo
|
# repo_url: https://MyFirstFDroidRepo.org/fdroid/repo
|
||||||
|
#
|
||||||
|
# Base URL for per-package pages on the website of this repo,
|
||||||
|
# i.e. https://f-droid.org/packages/<appid>/ This should be accessible
|
||||||
|
# with a browser. Setting it to null or not setting this disables the
|
||||||
|
# feature.
|
||||||
|
# repo_web_base_url: https://MyFirstFDroidRepo.org/packages/
|
||||||
|
#
|
||||||
# repo_name: My First F-Droid Repo Demo
|
# repo_name: My First F-Droid Repo Demo
|
||||||
# repo_description: >-
|
# repo_description: >-
|
||||||
# This is a repository of apps to be used with F-Droid. Applications
|
# This is a repository of apps to be used with F-Droid. Applications
|
||||||
|
@ -62,6 +69,7 @@
|
||||||
# As above, but for the archive repo.
|
# As above, but for the archive repo.
|
||||||
#
|
#
|
||||||
# archive_url: https://f-droid.org/archive
|
# archive_url: https://f-droid.org/archive
|
||||||
|
# archive_web_base_url:
|
||||||
# archive_name: My First F-Droid Archive Demo
|
# archive_name: My First F-Droid Archive Demo
|
||||||
# archive_description: >-
|
# archive_description: >-
|
||||||
# The repository of older versions of packages from the main demo repository.
|
# The repository of older versions of packages from the main demo repository.
|
||||||
|
@ -297,70 +305,33 @@
|
||||||
#
|
#
|
||||||
# sync_from_local_copy_dir: true
|
# sync_from_local_copy_dir: true
|
||||||
|
|
||||||
|
# To deploy to an AWS S3 "bucket" in the US East region, set the
|
||||||
|
# bucket name in the config, then set the environment variables
|
||||||
|
# AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY using the values from
|
||||||
|
# the AWS Management Console. See
|
||||||
|
# https://rclone.org/s3/#authentication
|
||||||
|
#
|
||||||
|
# awsbucket: myawsfdroidbucket
|
||||||
|
|
||||||
# To upload the repo to an Amazon S3 bucket using `fdroid deploy'
|
|
||||||
# . rclone, s3cmd and apache libcloud are the available options.
|
# For extended options for syncing to cloud drive and object store
|
||||||
# If rclone and s3cmd are not installed, apache libcloud is used.
|
# services, `fdroid deploy' wraps Rclone. Rclone is a full featured
|
||||||
# To use apache libcloud, add the following options to this file
|
# sync tool for a huge variety of cloud services. Set up your services
|
||||||
# (config.yml)
|
# using `rclone config`, then specify each config name to deploy the
|
||||||
|
# awsbucket: to. Using rclone_config: overrides the default AWS S3 US
|
||||||
|
# East setup, and will only sync to the services actually specified.
|
||||||
#
|
#
|
||||||
# awsbucket: myawsfdroid
|
# awsbucket: myawsfdroidbucket
|
||||||
# awsaccesskeyid: SEE0CHAITHEIMAUR2USA
|
# rclone_config:
|
||||||
# awssecretkey: {env: awssecretkey}
|
# - aws-sample-config
|
||||||
#
|
# - rclone-supported-service-config
|
||||||
# In case s3cmd is installed and rclone is not installed,
|
|
||||||
# s3cmd will be the preferred sync option.
|
|
||||||
# It will delete and recreate the whole fdroid directory each time.
|
# By default Rclone uses the user's default configuration file at
|
||||||
# To customize how s3cmd interacts with the cloud
|
# ~/.config/rclone/rclone.conf To specify a custom configuration file,
|
||||||
# provider, create a 's3cfg' file next to this file (config.yml), and
|
# please add the full path to the configuration file as below.
|
||||||
# those settings will be used instead of any 'aws' variable below.
|
|
||||||
# Secrets can be fetched from environment variables to ensure that
|
|
||||||
# they are not leaked as part of this file.
|
|
||||||
#
|
|
||||||
# awsbucket: myawsfdroid
|
|
||||||
# awsaccesskeyid: SEE0CHAITHEIMAUR2USA
|
|
||||||
# awssecretkey: {env: awssecretkey}
|
|
||||||
#
|
|
||||||
# In case rclone is installed and s3cmd is not installed,
|
|
||||||
# rclone will be the preferred sync option.
|
|
||||||
# It will sync the local folders with remote folders without
|
|
||||||
# deleting anything in one go.
|
|
||||||
# To ensure success, install rclone as per
|
|
||||||
# the instructions at https://rclone.org/install/ and also configure for
|
|
||||||
# object storage services as detailed at https://rclone.org/s3/#configuration
|
|
||||||
# By default rclone uses the configuration file at ~/.config/rclone/rclone.conf
|
|
||||||
# To specify a custom configuration file, please add the full path to the
|
|
||||||
# configuration file as below
|
|
||||||
#
|
#
|
||||||
# path_to_custom_rclone_config: /home/mycomputer/somedir/example.conf
|
# path_to_custom_rclone_config: /home/mycomputer/somedir/example.conf
|
||||||
#
|
|
||||||
# This setting will ignore the default rclone config found at
|
|
||||||
# ~/.config/rclone/rclone.conf
|
|
||||||
#
|
|
||||||
# Please note that rclone_config can be assigned a string or list
|
|
||||||
#
|
|
||||||
# awsbucket: myawsfdroid
|
|
||||||
# rclone_config: aws-sample-config
|
|
||||||
#
|
|
||||||
# or
|
|
||||||
#
|
|
||||||
# awsbucket: myawsfdroid
|
|
||||||
# rclone_config: [aws-sample-config, rclone-supported-service-config]
|
|
||||||
#
|
|
||||||
# In case both rclone and s3cmd are installed, the preferred sync
|
|
||||||
# tool can be specified in this file (config.yml)
|
|
||||||
# if s3cmd is preferred, set it as below
|
|
||||||
#
|
|
||||||
# s3cmd: true
|
|
||||||
#
|
|
||||||
# if rclone is preferred, set it as below
|
|
||||||
#
|
|
||||||
# rclone: true
|
|
||||||
#
|
|
||||||
# Please note that only one can be set to true at any time
|
|
||||||
# Also, in the event that both s3cmd and rclone are installed
|
|
||||||
# and both are missing from the config.yml file, the preferred
|
|
||||||
# tool will be s3cmd.
|
|
||||||
|
|
||||||
|
|
||||||
# If you want to force 'fdroid server' to use a non-standard serverwebroot.
|
# If you want to force 'fdroid server' to use a non-standard serverwebroot.
|
||||||
|
@ -393,11 +364,6 @@
|
||||||
# configured to allow push access (e.g. ssh key, username/password, etc)
|
# configured to allow push access (e.g. ssh key, username/password, etc)
|
||||||
# binary_transparency_remote: git@gitlab.com:fdroid/binary-transparency-log.git
|
# binary_transparency_remote: git@gitlab.com:fdroid/binary-transparency-log.git
|
||||||
|
|
||||||
# If you want to keep the "added" and "last updated" dates for each
|
|
||||||
# app and APK in your repo, enable this. The name comes from an old
|
|
||||||
# system for tracking statistics that is no longer included.
|
|
||||||
# update_stats: true
|
|
||||||
|
|
||||||
# Set this to true to always use a build server. This saves specifying the
|
# Set this to true to always use a build server. This saves specifying the
|
||||||
# --server option on dedicated secure build server hosts.
|
# --server option on dedicated secure build server hosts.
|
||||||
# build_server_always: true
|
# build_server_always: true
|
||||||
|
|
|
@ -6,7 +6,6 @@ import argparse
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from fdroidserver import _, common, metadata
|
from fdroidserver import _, common, metadata
|
||||||
|
|
||||||
from fdroidserver.exception import VCSException
|
from fdroidserver.exception import VCSException
|
||||||
|
|
||||||
fdroid_summary = 'reset app VCSs to the latest version'
|
fdroid_summary = 'reset app VCSs to the latest version'
|
||||||
|
@ -24,9 +23,9 @@ def main():
|
||||||
)
|
)
|
||||||
metadata.add_metadata_arguments(parser)
|
metadata.add_metadata_arguments(parser)
|
||||||
options = common.parse_args(parser)
|
options = common.parse_args(parser)
|
||||||
pkgs = common.read_pkg_args(options.appid, True)
|
apps = common.read_app_args(
|
||||||
allapps = metadata.read_metadata(pkgs)
|
options.appid, allow_version_codes=True, sort_by_time=True
|
||||||
apps = common.read_app_args(options.appid, allapps, True)
|
)
|
||||||
common.read_config()
|
common.read_config()
|
||||||
|
|
||||||
for appid, app in apps.items():
|
for appid, app in apps.items():
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
from fdroidserver import common
|
from fdroidserver import common
|
||||||
from fdroidserver.common import FDroidPopen
|
from fdroidserver.common import FDroidPopen
|
||||||
from fdroidserver.exception import BuildException
|
from fdroidserver.exception import BuildException
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
from fdroidserver import common
|
from fdroidserver import common
|
||||||
from fdroidserver.common import FDroidPopen
|
from fdroidserver.common import FDroidPopen
|
||||||
from fdroidserver.exception import BuildException
|
from fdroidserver.exception import BuildException
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
#
|
#
|
||||||
|
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
from fdroidserver import common, index
|
from fdroidserver import common, index
|
||||||
|
|
||||||
fdroid_summary = 'export the keystore in standard PEM format'
|
fdroid_summary = 'export the keystore in standard PEM format'
|
||||||
|
|
|
@ -8,6 +8,7 @@
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
import pprint
|
import pprint
|
||||||
|
|
||||||
from fdroidserver import _, common, metadata
|
from fdroidserver import _, common, metadata
|
||||||
|
|
||||||
fdroid_summary = 'prepare the srclibs for `fdroid build --on-server`'
|
fdroid_summary = 'prepare the srclibs for `fdroid build --on-server`'
|
||||||
|
@ -19,9 +20,7 @@ def main():
|
||||||
parser.add_argument("appid", nargs='*', help=_("applicationId with optional versionCode in the form APPID[:VERCODE]"))
|
parser.add_argument("appid", nargs='*', help=_("applicationId with optional versionCode in the form APPID[:VERCODE]"))
|
||||||
metadata.add_metadata_arguments(parser)
|
metadata.add_metadata_arguments(parser)
|
||||||
options = common.parse_args(parser)
|
options = common.parse_args(parser)
|
||||||
pkgs = common.read_pkg_args(options.appid, True)
|
apps = common.read_app_args(options.appid, allow_version_codes=True, sort_by_time=True)
|
||||||
allapps = metadata.read_metadata(pkgs)
|
|
||||||
apps = common.read_app_args(options.appid, allapps, True)
|
|
||||||
common.read_config()
|
common.read_config()
|
||||||
srclib_dir = os.path.join('build', 'srclib')
|
srclib_dir = os.path.join('build', 'srclib')
|
||||||
os.makedirs(srclib_dir, exist_ok=True)
|
os.makedirs(srclib_dir, exist_ok=True)
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
from fdroidserver import common
|
from fdroidserver import common
|
||||||
from fdroidserver.common import FDroidPopen
|
from fdroidserver.common import FDroidPopen
|
||||||
from fdroidserver.exception import BuildException
|
from fdroidserver.exception import BuildException
|
||||||
|
|
|
@ -1,29 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
#
|
|
||||||
# This script syncs the entire repo to the primary mirrors. It is
|
|
||||||
# meant to run in a cronjob quite frequently, as often as there are
|
|
||||||
# files to send.
|
|
||||||
#
|
|
||||||
# This script expects the receiving side to have the following
|
|
||||||
# preceeding the ssh key entry in ~/.ssh/authorized_keys:
|
|
||||||
# command="rsync --server -logDtpre.iLsfx --log-format=X --delete --delay-updates . /path/to/htdocs/fdroid/",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty
|
|
||||||
#
|
|
||||||
set -e
|
|
||||||
(
|
|
||||||
flock -n 200
|
|
||||||
set -e
|
|
||||||
cd /home/fdroid
|
|
||||||
for section in repo archive; do
|
|
||||||
echo "Started $section at `date`:"
|
|
||||||
for host in fdroid@ftp-push.lysator.liu.se fdroid@plug-mirror.rcac.purdue.edu fdroid@ftp.agdsn.de; do
|
|
||||||
set -x
|
|
||||||
# be super careful with the trailing slashes here! if one is wrong, it'll delete the entire section!
|
|
||||||
rsync --archive --delay-updates --progress --delete \
|
|
||||||
--timeout=3600 \
|
|
||||||
/home/fdroid/public_html/${section} \
|
|
||||||
${host}:/srv/fdroid-mirror.at.or.at/htdocs/fdroid/ &
|
|
||||||
set +x
|
|
||||||
done
|
|
||||||
wait
|
|
||||||
done
|
|
||||||
) 200>/var/lock/root_fdroidmirrortomirror
|
|
|
@ -3,7 +3,6 @@ import glob
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|
||||||
# support running straight from git and standard installs
|
# support running straight from git and standard installs
|
||||||
rootpaths = [
|
rootpaths = [
|
||||||
os.path.realpath(os.path.join(os.path.dirname(__file__), '..')),
|
os.path.realpath(os.path.join(os.path.dirname(__file__), '..')),
|
||||||
|
@ -15,7 +14,10 @@ rootpaths = [
|
||||||
|
|
||||||
localedir = None
|
localedir = None
|
||||||
for rootpath in rootpaths:
|
for rootpath in rootpaths:
|
||||||
if len(glob.glob(os.path.join(rootpath, 'locale', '*', 'LC_MESSAGES', 'fdroidserver.mo'))) > 0:
|
found_mo = glob.glob(
|
||||||
|
os.path.join(rootpath, 'locale', '*', 'LC_MESSAGES', 'fdroidserver.mo')
|
||||||
|
)
|
||||||
|
if len(found_mo) > 0:
|
||||||
localedir = os.path.join(rootpath, 'locale')
|
localedir = os.path.join(rootpath, 'locale')
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -24,39 +26,52 @@ gettext.textdomain('fdroidserver')
|
||||||
_ = gettext.gettext
|
_ = gettext.gettext
|
||||||
|
|
||||||
|
|
||||||
from fdroidserver.exception import (FDroidException,
|
from fdroidserver.exception import (
|
||||||
|
FDroidException,
|
||||||
MetaDataException,
|
MetaDataException,
|
||||||
VerificationException) # NOQA: E402
|
VerificationException, # NOQA: E402
|
||||||
|
)
|
||||||
|
|
||||||
FDroidException # NOQA: B101
|
FDroidException # NOQA: B101
|
||||||
MetaDataException # NOQA: B101
|
MetaDataException # NOQA: B101
|
||||||
VerificationException # NOQA: B101
|
VerificationException # NOQA: B101
|
||||||
|
|
||||||
from fdroidserver.common import (verify_apk_signature,
|
from fdroidserver.common import genkeystore as generate_keystore # NOQA: E402
|
||||||
genkeystore as generate_keystore) # NOQA: E402
|
from fdroidserver.common import verify_apk_signature
|
||||||
|
|
||||||
verify_apk_signature # NOQA: B101
|
verify_apk_signature # NOQA: B101
|
||||||
generate_keystore # NOQA: B101
|
generate_keystore # NOQA: B101
|
||||||
from fdroidserver.index import (download_repo_index,
|
from fdroidserver.index import (
|
||||||
|
download_repo_index,
|
||||||
download_repo_index_v1,
|
download_repo_index_v1,
|
||||||
download_repo_index_v2,
|
download_repo_index_v2,
|
||||||
get_mirror_service_urls,
|
get_mirror_service_urls,
|
||||||
make as make_index) # NOQA: E402
|
)
|
||||||
|
from fdroidserver.index import make as make_index # NOQA: E402
|
||||||
|
|
||||||
download_repo_index # NOQA: B101
|
download_repo_index # NOQA: B101
|
||||||
download_repo_index_v1 # NOQA: B101
|
download_repo_index_v1 # NOQA: B101
|
||||||
download_repo_index_v2 # NOQA: B101
|
download_repo_index_v2 # NOQA: B101
|
||||||
get_mirror_service_urls # NOQA: B101
|
get_mirror_service_urls # NOQA: B101
|
||||||
make_index # NOQA: B101
|
make_index # NOQA: B101
|
||||||
from fdroidserver.update import (process_apk,
|
from fdroidserver.update import (
|
||||||
|
process_apk,
|
||||||
process_apks,
|
process_apks,
|
||||||
scan_apk,
|
scan_apk,
|
||||||
scan_repo_files) # NOQA: E402
|
scan_repo_files, # NOQA: E402
|
||||||
|
)
|
||||||
|
|
||||||
process_apk # NOQA: B101
|
process_apk # NOQA: B101
|
||||||
process_apks # NOQA: B101
|
process_apks # NOQA: B101
|
||||||
scan_apk # NOQA: B101
|
scan_apk # NOQA: B101
|
||||||
scan_repo_files # NOQA: B101
|
scan_repo_files # NOQA: B101
|
||||||
from fdroidserver.deploy import (update_awsbucket,
|
from fdroidserver.deploy import (
|
||||||
|
update_awsbucket,
|
||||||
update_servergitmirrors,
|
update_servergitmirrors,
|
||||||
|
update_serverwebroot, # NOQA: E402
|
||||||
update_serverwebroots,
|
update_serverwebroots,
|
||||||
update_serverwebroot) # NOQA: E402
|
)
|
||||||
|
|
||||||
update_awsbucket # NOQA: B101
|
update_awsbucket # NOQA: B101
|
||||||
update_servergitmirrors # NOQA: B101
|
update_servergitmirrors # NOQA: B101
|
||||||
update_serverwebroots # NOQA: B101
|
update_serverwebroots # NOQA: B101
|
||||||
|
|
|
@ -18,20 +18,20 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import re
|
import importlib.metadata
|
||||||
import sys
|
import logging
|
||||||
import os
|
import os
|
||||||
import pkgutil
|
import pkgutil
|
||||||
import logging
|
import re
|
||||||
import importlib.metadata
|
import sys
|
||||||
|
|
||||||
import git
|
|
||||||
import fdroidserver.common
|
|
||||||
import fdroidserver.metadata
|
|
||||||
from fdroidserver import _
|
|
||||||
from argparse import ArgumentError
|
from argparse import ArgumentError
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
import git
|
||||||
|
|
||||||
|
import fdroidserver.common
|
||||||
|
import fdroidserver.metadata
|
||||||
|
from fdroidserver import _
|
||||||
|
|
||||||
COMMANDS = OrderedDict([
|
COMMANDS = OrderedDict([
|
||||||
("build", _("Build a package from source")),
|
("build", _("Build a package from source")),
|
||||||
|
|
64
fdroidserver/_yaml.py
Normal file
64
fdroidserver/_yaml.py
Normal file
|
@ -0,0 +1,64 @@
|
||||||
|
# Copyright (C) 2025, Hans-Christoph Steiner <hans@eds.org>
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
"""Standard YAML parsing and dumping.
|
||||||
|
|
||||||
|
YAML 1.2 is the preferred format for all data files. When loading
|
||||||
|
F-Droid formats like config.yml and <Application ID>.yml, YAML 1.2 is
|
||||||
|
forced, and older YAML constructs should be considered an error.
|
||||||
|
|
||||||
|
It is OK to load and dump files in other YAML versions if they are
|
||||||
|
externally defined formats, like FUNDING.yml. In those cases, these
|
||||||
|
common instances might not be appropriate to use.
|
||||||
|
|
||||||
|
There is a separate instance for dumping based on the "round trip" aka
|
||||||
|
"rt" mode. The "rt" mode maintains order while the "safe" mode sorts
|
||||||
|
the output. Also, yaml.version is not forced in the dumper because that
|
||||||
|
makes it write out a "%YAML 1.2" header. F-Droid's formats are
|
||||||
|
explicitly defined as YAML 1.2 and meant to be human-editable. So that
|
||||||
|
header gets in the way.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import ruamel.yaml
|
||||||
|
|
||||||
|
yaml = ruamel.yaml.YAML(typ='safe')
|
||||||
|
yaml.version = (1, 2)
|
||||||
|
|
||||||
|
yaml_dumper = ruamel.yaml.YAML(typ='rt')
|
||||||
|
|
||||||
|
|
||||||
|
def config_dump(config, fp=None):
|
||||||
|
"""Dump config data in YAML 1.2 format without headers.
|
||||||
|
|
||||||
|
This outputs YAML in a string that is suitable for use in regexps
|
||||||
|
and string replacements, as well as complete files. It is therefore
|
||||||
|
explicitly set up to avoid writing out headers and footers.
|
||||||
|
|
||||||
|
This is modeled after PyYAML's yaml.dump(), which can dump to a file
|
||||||
|
or return a string.
|
||||||
|
|
||||||
|
https://yaml.dev/doc/ruamel.yaml/example/#Output_of_%60dump()%60_as_a_string
|
||||||
|
|
||||||
|
"""
|
||||||
|
dumper = ruamel.yaml.YAML(typ='rt')
|
||||||
|
dumper.default_flow_style = False
|
||||||
|
dumper.explicit_start = False
|
||||||
|
dumper.explicit_end = False
|
||||||
|
if fp is None:
|
||||||
|
with ruamel.yaml.compat.StringIO() as fp:
|
||||||
|
dumper.dump(config, fp)
|
||||||
|
return fp.getvalue()
|
||||||
|
dumper.dump(config, fp)
|
|
@ -68,9 +68,18 @@ import struct
|
||||||
import sys
|
import sys
|
||||||
import zipfile
|
import zipfile
|
||||||
import zlib
|
import zlib
|
||||||
|
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from typing import Any, BinaryIO, Callable, Dict, Iterable, Iterator, Optional, Tuple, Union
|
from typing import (
|
||||||
|
Any,
|
||||||
|
BinaryIO,
|
||||||
|
Callable,
|
||||||
|
Dict,
|
||||||
|
Iterable,
|
||||||
|
Iterator,
|
||||||
|
Optional,
|
||||||
|
Tuple,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
__version__ = "1.1.1"
|
__version__ = "1.1.1"
|
||||||
NAME = "apksigcopier"
|
NAME = "apksigcopier"
|
||||||
|
|
|
@ -28,22 +28,21 @@
|
||||||
# the F-Droid client.
|
# the F-Droid client.
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
import defusedxml.minidom
|
|
||||||
import git
|
|
||||||
import glob
|
import glob
|
||||||
import os
|
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import requests
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
import zipfile
|
import zipfile
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from . import _
|
import defusedxml.minidom
|
||||||
from . import common
|
import git
|
||||||
from . import deploy
|
import requests
|
||||||
|
|
||||||
|
from . import _, common, deploy
|
||||||
from .exception import FDroidException
|
from .exception import FDroidException
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -18,31 +18,27 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import os
|
import argparse
|
||||||
import shutil
|
|
||||||
import glob
|
import glob
|
||||||
import subprocess
|
import logging
|
||||||
|
import os
|
||||||
import posixpath
|
import posixpath
|
||||||
import re
|
import re
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
import tarfile
|
import tarfile
|
||||||
import threading
|
|
||||||
import traceback
|
|
||||||
import time
|
|
||||||
import requests
|
|
||||||
import tempfile
|
import tempfile
|
||||||
import argparse
|
import threading
|
||||||
import logging
|
import time
|
||||||
|
import traceback
|
||||||
from gettext import ngettext
|
from gettext import ngettext
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from . import _
|
import requests
|
||||||
from . import common
|
|
||||||
from . import net
|
from . import _, common, metadata, net, scanner, vmtools
|
||||||
from . import metadata
|
|
||||||
from . import scanner
|
|
||||||
from . import vmtools
|
|
||||||
from .common import FDroidPopen
|
from .common import FDroidPopen
|
||||||
from .exception import FDroidException, BuildException, VCSException
|
from .exception import BuildException, FDroidException, VCSException
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import paramiko
|
import paramiko
|
||||||
|
@ -155,9 +151,7 @@ def build_server(app, build, vcs, build_dir, output_dir, log_dir, force):
|
||||||
ftp.mkdir('fdroidserver')
|
ftp.mkdir('fdroidserver')
|
||||||
ftp.chdir('fdroidserver')
|
ftp.chdir('fdroidserver')
|
||||||
ftp.put(os.path.join(serverpath, '..', 'fdroid'), 'fdroid')
|
ftp.put(os.path.join(serverpath, '..', 'fdroid'), 'fdroid')
|
||||||
ftp.put(os.path.join(serverpath, '..', 'gradlew-fdroid'), 'gradlew-fdroid')
|
|
||||||
ftp.chmod('fdroid', 0o755) # nosec B103 permissions are appropriate
|
ftp.chmod('fdroid', 0o755) # nosec B103 permissions are appropriate
|
||||||
ftp.chmod('gradlew-fdroid', 0o755) # nosec B103 permissions are appropriate
|
|
||||||
send_dir(os.path.join(serverpath))
|
send_dir(os.path.join(serverpath))
|
||||||
ftp.chdir(homedir)
|
ftp.chdir(homedir)
|
||||||
|
|
||||||
|
@ -306,7 +300,7 @@ def build_server(app, build, vcs, build_dir, output_dir, log_dir, force):
|
||||||
else:
|
else:
|
||||||
ftp.chdir(posixpath.join(homedir, 'unsigned'))
|
ftp.chdir(posixpath.join(homedir, 'unsigned'))
|
||||||
apkfile = common.get_release_filename(app, build)
|
apkfile = common.get_release_filename(app, build)
|
||||||
tarball = common.getsrcname(app, build)
|
tarball = common.get_src_tarball_name(app.id, build.versionCode)
|
||||||
try:
|
try:
|
||||||
ftp.get(apkfile, os.path.join(output_dir, apkfile))
|
ftp.get(apkfile, os.path.join(output_dir, apkfile))
|
||||||
if not options.notarball:
|
if not options.notarball:
|
||||||
|
@ -479,7 +473,7 @@ def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, ext
|
||||||
logging.critical("Android NDK '%s' is not a directory!" % ndk_path)
|
logging.critical("Android NDK '%s' is not a directory!" % ndk_path)
|
||||||
raise FDroidException()
|
raise FDroidException()
|
||||||
|
|
||||||
common.set_FDroidPopen_env(build)
|
common.set_FDroidPopen_env(app, build)
|
||||||
|
|
||||||
# create ..._toolsversion.log when running in builder vm
|
# create ..._toolsversion.log when running in builder vm
|
||||||
if onserver:
|
if onserver:
|
||||||
|
@ -541,13 +535,13 @@ def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, ext
|
||||||
if build.preassemble:
|
if build.preassemble:
|
||||||
gradletasks += build.preassemble
|
gradletasks += build.preassemble
|
||||||
|
|
||||||
flavours = build.gradle
|
flavors = build.gradle
|
||||||
if flavours == ['yes']:
|
if flavors == ['yes']:
|
||||||
flavours = []
|
flavors = []
|
||||||
|
|
||||||
flavours_cmd = ''.join([transform_first_char(flav, str.upper) for flav in flavours])
|
flavors_cmd = ''.join([transform_first_char(flav, str.upper) for flav in flavors])
|
||||||
|
|
||||||
gradletasks += ['assemble' + flavours_cmd + 'Release']
|
gradletasks += ['assemble' + flavors_cmd + 'Release']
|
||||||
|
|
||||||
cmd = [config['gradle']]
|
cmd = [config['gradle']]
|
||||||
if build.gradleprops:
|
if build.gradleprops:
|
||||||
|
@ -619,7 +613,7 @@ def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, ext
|
||||||
if not options.notarball:
|
if not options.notarball:
|
||||||
# Build the source tarball right before we build the release...
|
# Build the source tarball right before we build the release...
|
||||||
logging.info("Creating source tarball...")
|
logging.info("Creating source tarball...")
|
||||||
tarname = common.getsrcname(app, build)
|
tarname = common.get_src_tarball_name(app.id, build.versionCode)
|
||||||
tarball = tarfile.open(os.path.join(tmp_dir, tarname), "w:gz")
|
tarball = tarfile.open(os.path.join(tmp_dir, tarname), "w:gz")
|
||||||
|
|
||||||
def tarexc(t):
|
def tarexc(t):
|
||||||
|
@ -721,8 +715,7 @@ def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, ext
|
||||||
bindir = os.path.join(root_dir, 'bin')
|
bindir = os.path.join(root_dir, 'bin')
|
||||||
|
|
||||||
if os.path.isdir(os.path.join(build_dir, '.git')):
|
if os.path.isdir(os.path.join(build_dir, '.git')):
|
||||||
import git
|
commit_id = str(common.get_head_commit_id(build_dir))
|
||||||
commit_id = common.get_head_commit_id(git.repo.Repo(build_dir))
|
|
||||||
else:
|
else:
|
||||||
commit_id = build.commit
|
commit_id = build.commit
|
||||||
|
|
||||||
|
@ -764,11 +757,11 @@ def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, ext
|
||||||
# really old path
|
# really old path
|
||||||
os.path.join(root_dir, 'build', 'apk'),
|
os.path.join(root_dir, 'build', 'apk'),
|
||||||
]
|
]
|
||||||
# If we build with gradle flavours with gradle plugin >= 3.0 the APK will be in
|
# If we build with gradle flavors with gradle plugin >= 3.0 the APK will be in
|
||||||
# a subdirectory corresponding to the flavour command used, but with different
|
# a subdirectory corresponding to the flavor command used, but with different
|
||||||
# capitalization.
|
# capitalization.
|
||||||
if flavours_cmd:
|
if flavors_cmd:
|
||||||
apk_dirs.append(os.path.join(root_dir, 'build', 'outputs', 'apk', transform_first_char(flavours_cmd, str.lower), 'release'))
|
apk_dirs.append(os.path.join(root_dir, 'build', 'outputs', 'apk', transform_first_char(flavors_cmd, str.lower), 'release'))
|
||||||
for apks_dir in apk_dirs:
|
for apks_dir in apk_dirs:
|
||||||
for apkglob in ['*-release-unsigned.apk', '*-unsigned.apk', '*.apk']:
|
for apkglob in ['*-release-unsigned.apk', '*-unsigned.apk', '*.apk']:
|
||||||
apks = glob.glob(os.path.join(apks_dir, apkglob))
|
apks = glob.glob(os.path.join(apks_dir, apkglob))
|
||||||
|
@ -1117,10 +1110,7 @@ def main():
|
||||||
srclib_dir = os.path.join(build_dir, 'srclib')
|
srclib_dir = os.path.join(build_dir, 'srclib')
|
||||||
extlib_dir = os.path.join(build_dir, 'extlib')
|
extlib_dir = os.path.join(build_dir, 'extlib')
|
||||||
|
|
||||||
# Read all app and srclib metadata
|
apps = common.read_app_args(options.appid, allow_version_codes=True, sort_by_time=True)
|
||||||
pkgs = common.read_pkg_args(options.appid, True)
|
|
||||||
allapps = metadata.read_metadata(pkgs, sort_by_time=True)
|
|
||||||
apps = common.read_app_args(options.appid, allapps, True)
|
|
||||||
|
|
||||||
for appid, app in list(apps.items()):
|
for appid, app in list(apps.items()):
|
||||||
if (app.get('Disabled') and not options.force) or not app.get('RepoType') or not app.get('Builds', []):
|
if (app.get('Disabled') and not options.force) or not app.get('RepoType') or not app.get('Builds', []):
|
||||||
|
|
|
@ -18,26 +18,34 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import configparser
|
||||||
|
import copy
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import urllib.request
|
|
||||||
import urllib.error
|
|
||||||
import time
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
from argparse import ArgumentParser
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
import logging
|
import urllib.error
|
||||||
import copy
|
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
import urllib.request
|
||||||
|
from argparse import ArgumentParser
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from . import _
|
import git
|
||||||
from . import common
|
|
||||||
from . import metadata
|
from . import _, common, metadata, net
|
||||||
from . import net
|
from .exception import (
|
||||||
from .exception import VCSException, NoSubmodulesException, FDroidException, MetaDataException
|
FDroidException,
|
||||||
|
MetaDataException,
|
||||||
|
NoSubmodulesException,
|
||||||
|
VCSException,
|
||||||
|
)
|
||||||
|
|
||||||
|
# https://gitlab.com/fdroid/checkupdates-runner/-/blob/1861899262a62a4ed08fa24e5449c0368dfb7617/.gitlab-ci.yml#L36
|
||||||
|
BOT_EMAIL = 'fdroidci@bubu1.eu'
|
||||||
|
|
||||||
|
|
||||||
def check_http(app: metadata.App) -> tuple[Optional[str], Optional[int]]:
|
def check_http(app: metadata.App) -> tuple[Optional[str], Optional[int]]:
|
||||||
|
@ -85,7 +93,7 @@ def check_http(app: metadata.App) -> tuple[Optional[str], Optional[int]]:
|
||||||
|
|
||||||
m = re.search(codeex, page)
|
m = re.search(codeex, page)
|
||||||
if not m:
|
if not m:
|
||||||
raise FDroidException("No RE match for version code")
|
raise FDroidException("No RE match for versionCode")
|
||||||
vercode = common.version_code_string_to_int(m.group(1).strip())
|
vercode = common.version_code_string_to_int(m.group(1).strip())
|
||||||
|
|
||||||
if urlver != '.':
|
if urlver != '.':
|
||||||
|
@ -205,7 +213,7 @@ def check_tags(app: metadata.App, pattern: str) -> tuple[str, int, str]:
|
||||||
if codeex:
|
if codeex:
|
||||||
m = re.search(codeex, filecontent)
|
m = re.search(codeex, filecontent)
|
||||||
if not m:
|
if not m:
|
||||||
logging.debug(f"UpdateCheckData regex {codeex} for version code"
|
logging.debug(f"UpdateCheckData regex {codeex} for versionCode"
|
||||||
f" has no match in tag {tag}")
|
f" has no match in tag {tag}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -225,7 +233,7 @@ def check_tags(app: metadata.App, pattern: str) -> tuple[str, int, str]:
|
||||||
if verex:
|
if verex:
|
||||||
m = re.search(verex, filecontent)
|
m = re.search(verex, filecontent)
|
||||||
if not m:
|
if not m:
|
||||||
logging.debug(f"UpdateCheckData regex {verex} for version name"
|
logging.debug(f"UpdateCheckData regex {verex} for versionName"
|
||||||
f" has no match in tag {tag}")
|
f" has no match in tag {tag}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -375,7 +383,8 @@ def dirs_with_manifest(startdir: str):
|
||||||
A directory that contains a manifest file of an Android project, None if
|
A directory that contains a manifest file of an Android project, None if
|
||||||
no directory could be found
|
no directory could be found
|
||||||
"""
|
"""
|
||||||
for root, _dirs, files in os.walk(startdir):
|
for root, dirs, files in os.walk(startdir):
|
||||||
|
dirs.sort()
|
||||||
if any(m in files for m in [
|
if any(m in files for m in [
|
||||||
'AndroidManifest.xml', 'pom.xml', 'build.gradle', 'build.gradle.kts']):
|
'AndroidManifest.xml', 'pom.xml', 'build.gradle', 'build.gradle.kts']):
|
||||||
yield Path(root)
|
yield Path(root)
|
||||||
|
@ -668,8 +677,6 @@ def checkupdates_app(app: metadata.App, auto: bool, commit: bool = False) -> Non
|
||||||
if commit:
|
if commit:
|
||||||
logging.info("Commiting update for " + app.metadatapath)
|
logging.info("Commiting update for " + app.metadatapath)
|
||||||
gitcmd = ["git", "commit", "-m", commitmsg]
|
gitcmd = ["git", "commit", "-m", commitmsg]
|
||||||
if 'auto_author' in config:
|
|
||||||
gitcmd.extend(['--author', config['auto_author']])
|
|
||||||
gitcmd.extend(["--", app.metadatapath])
|
gitcmd.extend(["--", app.metadatapath])
|
||||||
if subprocess.call(gitcmd) != 0:
|
if subprocess.call(gitcmd) != 0:
|
||||||
raise FDroidException("Git commit failed")
|
raise FDroidException("Git commit failed")
|
||||||
|
@ -683,6 +690,184 @@ def get_last_build_from_app(app: metadata.App) -> metadata.Build:
|
||||||
return metadata.Build()
|
return metadata.Build()
|
||||||
|
|
||||||
|
|
||||||
|
def get_upstream_main_branch(git_repo):
|
||||||
|
refs = list()
|
||||||
|
for ref in git_repo.remotes.upstream.refs:
|
||||||
|
if ref.name != 'upstream/HEAD':
|
||||||
|
refs.append(ref.name)
|
||||||
|
if len(refs) == 1:
|
||||||
|
return refs[0]
|
||||||
|
for name in ('upstream/main', 'upstream/master'):
|
||||||
|
if name in refs:
|
||||||
|
return name
|
||||||
|
try:
|
||||||
|
with git_repo.config_reader() as reader:
|
||||||
|
return 'upstream/%s' % reader.get_value('init', 'defaultBranch')
|
||||||
|
except configparser.NoSectionError:
|
||||||
|
return 'upstream/main'
|
||||||
|
|
||||||
|
|
||||||
|
def checkout_appid_branch(appid):
|
||||||
|
"""Prepare the working branch named after the appid.
|
||||||
|
|
||||||
|
This sets up everything for checkupdates_app() to run and add
|
||||||
|
commits. If there is an existing branch named after the appid,
|
||||||
|
and it has commits from users other than the checkupdates-bot,
|
||||||
|
then this will return False. Otherwise, it returns True.
|
||||||
|
|
||||||
|
The checkupdates-runner must set the committer email address in
|
||||||
|
the git config. Then any commit with a committer or author that
|
||||||
|
does not match that will be considered to have human edits. That
|
||||||
|
email address is currently set in:
|
||||||
|
https://gitlab.com/fdroid/checkupdates-runner/-/blob/1861899262a62a4ed08fa24e5449c0368dfb7617/.gitlab-ci.yml#L36
|
||||||
|
|
||||||
|
"""
|
||||||
|
logging.debug(f'Creating merge request branch for {appid}')
|
||||||
|
git_repo = git.Repo.init('.')
|
||||||
|
upstream_main = get_upstream_main_branch(git_repo)
|
||||||
|
for remote in git_repo.remotes:
|
||||||
|
remote.fetch()
|
||||||
|
try:
|
||||||
|
git_repo.remotes.origin.fetch(f'{appid}:refs/remotes/origin/{appid}')
|
||||||
|
except Exception as e:
|
||||||
|
logging.debug('"%s" branch not found on origin remote:\n\t%s', appid, e)
|
||||||
|
if appid in git_repo.remotes.origin.refs:
|
||||||
|
start_point = f"origin/{appid}"
|
||||||
|
for commit in git_repo.iter_commits(
|
||||||
|
f'{upstream_main}...{start_point}', right_only=True
|
||||||
|
):
|
||||||
|
if commit.committer.email != BOT_EMAIL or commit.author.email != BOT_EMAIL:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
start_point = upstream_main
|
||||||
|
git_repo.git.checkout('-B', appid, start_point)
|
||||||
|
git_repo.git.rebase(upstream_main, strategy_option='ours', kill_after_timeout=120)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def get_changes_versus_ref(git_repo, ref, f):
|
||||||
|
changes = []
|
||||||
|
for m in re.findall(
|
||||||
|
r"^[+-].*", git_repo.git.diff(f"{ref}", '--', f), flags=re.MULTILINE
|
||||||
|
):
|
||||||
|
if not re.match(r"^(\+\+\+|---) ", m):
|
||||||
|
changes.append(m)
|
||||||
|
return changes
|
||||||
|
|
||||||
|
|
||||||
|
def push_commits(branch_name='checkupdates'):
|
||||||
|
"""Make git branch then push commits as merge request.
|
||||||
|
|
||||||
|
The appid is parsed from the actual file that was changed so that
|
||||||
|
only the right branch is ever updated.
|
||||||
|
|
||||||
|
This uses the appid as the standard branch name so that there is
|
||||||
|
only ever one open merge request per-app. If multiple apps are
|
||||||
|
included in the branch, then 'checkupdates' is used as branch
|
||||||
|
name. This is to support the old way operating, e.g. in batches.
|
||||||
|
|
||||||
|
This uses GitLab "Push Options" to create a merge request. Git
|
||||||
|
Push Options are config data that can be sent via `git push
|
||||||
|
--push-option=... origin foo`.
|
||||||
|
|
||||||
|
References
|
||||||
|
----------
|
||||||
|
* https://docs.gitlab.com/ee/user/project/push_options.html
|
||||||
|
|
||||||
|
"""
|
||||||
|
if branch_name != "checkupdates":
|
||||||
|
if callable(getattr(git.SymbolicReference, "_check_ref_name_valid", None)):
|
||||||
|
git.SymbolicReference._check_ref_name_valid(branch_name)
|
||||||
|
|
||||||
|
git_repo = git.Repo.init('.')
|
||||||
|
upstream_main = get_upstream_main_branch(git_repo)
|
||||||
|
files = set()
|
||||||
|
for commit in git_repo.iter_commits(f'{upstream_main}...HEAD', right_only=True):
|
||||||
|
files.update(commit.stats.files.keys())
|
||||||
|
|
||||||
|
files = list(files)
|
||||||
|
if len(files) == 1:
|
||||||
|
m = re.match(r'metadata/(\S+)\.yml', files[0])
|
||||||
|
if m:
|
||||||
|
branch_name = m.group(1) # appid
|
||||||
|
if not files:
|
||||||
|
return
|
||||||
|
|
||||||
|
# https://git-scm.com/docs/git-check-ref-format Git refname can't end with .lock
|
||||||
|
if branch_name.endswith(".lock"):
|
||||||
|
branch_name = f"{branch_name}_"
|
||||||
|
|
||||||
|
remote = git_repo.remotes.origin
|
||||||
|
if branch_name in remote.refs:
|
||||||
|
if not get_changes_versus_ref(git_repo, f'origin/{branch_name}', files[0]):
|
||||||
|
return
|
||||||
|
|
||||||
|
git_repo.create_head(branch_name, force=True)
|
||||||
|
push_options = [
|
||||||
|
'merge_request.create',
|
||||||
|
'merge_request.remove_source_branch',
|
||||||
|
'merge_request.title=bot: ' + git_repo.branches[branch_name].commit.summary,
|
||||||
|
'merge_request.description='
|
||||||
|
+ '~%s checkupdates-bot run %s' % (branch_name, os.getenv('CI_JOB_URL')),
|
||||||
|
]
|
||||||
|
|
||||||
|
# mark as draft if there are only changes to CurrentVersion:
|
||||||
|
current_version_only = True
|
||||||
|
for m in get_changes_versus_ref(git_repo, upstream_main, files[0]):
|
||||||
|
if not re.match(r"^[-+]CurrentVersion", m):
|
||||||
|
current_version_only = False
|
||||||
|
break
|
||||||
|
if current_version_only:
|
||||||
|
push_options.append('merge_request.draft')
|
||||||
|
|
||||||
|
progress = git.RemoteProgress()
|
||||||
|
|
||||||
|
pushinfos = remote.push(
|
||||||
|
f"HEAD:refs/heads/{branch_name}",
|
||||||
|
progress=progress,
|
||||||
|
force=True,
|
||||||
|
set_upstream=True,
|
||||||
|
push_option=push_options,
|
||||||
|
)
|
||||||
|
|
||||||
|
for pushinfo in pushinfos:
|
||||||
|
logging.info(pushinfo.summary)
|
||||||
|
# Show potentially useful messages from git remote
|
||||||
|
if progress:
|
||||||
|
for line in progress.other_lines:
|
||||||
|
logging.info(line)
|
||||||
|
if pushinfo.flags & (
|
||||||
|
git.remote.PushInfo.ERROR
|
||||||
|
| git.remote.PushInfo.REJECTED
|
||||||
|
| git.remote.PushInfo.REMOTE_FAILURE
|
||||||
|
| git.remote.PushInfo.REMOTE_REJECTED
|
||||||
|
):
|
||||||
|
raise FDroidException(
|
||||||
|
f'{remote.url} push failed: {pushinfo.flags} {pushinfo.summary}'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logging.info(remote.url + ': ' + pushinfo.summary)
|
||||||
|
|
||||||
|
|
||||||
|
def prune_empty_appid_branches(git_repo=None, main_branch='main'):
|
||||||
|
"""Remove empty branches from checkupdates-bot git remote."""
|
||||||
|
if git_repo is None:
|
||||||
|
git_repo = git.Repo.init('.')
|
||||||
|
upstream_main = get_upstream_main_branch(git_repo)
|
||||||
|
main_branch = upstream_main.split('/')[1]
|
||||||
|
|
||||||
|
remote = git_repo.remotes.origin
|
||||||
|
remote.update(prune=True)
|
||||||
|
merged_branches = git_repo.git().branch(remotes=True, merged=upstream_main).split()
|
||||||
|
for remote_branch in merged_branches:
|
||||||
|
if not remote_branch or '/' not in remote_branch:
|
||||||
|
continue
|
||||||
|
if remote_branch.split('/')[1] not in (main_branch, 'HEAD'):
|
||||||
|
for ref in git_repo.remotes.origin.refs:
|
||||||
|
if remote_branch == ref.name:
|
||||||
|
remote.push(':%s' % ref.remote_head, force=True) # rm remote branch
|
||||||
|
|
||||||
|
|
||||||
def status_update_json(processed: list, failed: dict) -> None:
|
def status_update_json(processed: list, failed: dict) -> None:
|
||||||
"""Output a JSON file with metadata about this run."""
|
"""Output a JSON file with metadata about this run."""
|
||||||
logging.debug(_('Outputting JSON'))
|
logging.debug(_('Outputting JSON'))
|
||||||
|
@ -716,6 +901,8 @@ def main():
|
||||||
help=_("Only process apps with auto-updates"))
|
help=_("Only process apps with auto-updates"))
|
||||||
parser.add_argument("--commit", action="store_true", default=False,
|
parser.add_argument("--commit", action="store_true", default=False,
|
||||||
help=_("Commit changes"))
|
help=_("Commit changes"))
|
||||||
|
parser.add_argument("--merge-request", action="store_true", default=False,
|
||||||
|
help=_("Commit changes, push, then make a merge request"))
|
||||||
parser.add_argument("--allow-dirty", action="store_true", default=False,
|
parser.add_argument("--allow-dirty", action="store_true", default=False,
|
||||||
help=_("Run on git repo that has uncommitted changes"))
|
help=_("Run on git repo that has uncommitted changes"))
|
||||||
metadata.add_metadata_arguments(parser)
|
metadata.add_metadata_arguments(parser)
|
||||||
|
@ -730,10 +917,11 @@ def main():
|
||||||
logging.error(_('Build metadata git repo has uncommited changes!'))
|
logging.error(_('Build metadata git repo has uncommited changes!'))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Get all apps...
|
if options.merge_request and not (options.appid and len(options.appid) == 1):
|
||||||
allapps = metadata.read_metadata()
|
logging.error(_('--merge-request only runs on a single appid!'))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
apps = common.read_app_args(options.appid, allapps, False)
|
apps = common.read_app_args(options.appid)
|
||||||
|
|
||||||
processed = []
|
processed = []
|
||||||
failed = dict()
|
failed = dict()
|
||||||
|
@ -748,7 +936,17 @@ def main():
|
||||||
logging.info(msg)
|
logging.info(msg)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
checkupdates_app(app, options.auto, options.commit)
|
if options.merge_request:
|
||||||
|
if not checkout_appid_branch(appid):
|
||||||
|
msg = _("...checkupdate failed for {appid} : {error}").format(
|
||||||
|
appid=appid,
|
||||||
|
error='Open merge request with human edits, skipped.',
|
||||||
|
)
|
||||||
|
logging.warning(msg)
|
||||||
|
failed[appid] = msg
|
||||||
|
continue
|
||||||
|
|
||||||
|
checkupdates_app(app, options.auto, options.commit or options.merge_request)
|
||||||
processed.append(appid)
|
processed.append(appid)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = _("...checkupdate failed for {appid} : {error}").format(appid=appid, error=e)
|
msg = _("...checkupdate failed for {appid} : {error}").format(appid=appid, error=e)
|
||||||
|
@ -757,6 +955,10 @@ def main():
|
||||||
failed[appid] = str(e)
|
failed[appid] = str(e)
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
|
|
||||||
|
if options.appid and options.merge_request:
|
||||||
|
push_commits()
|
||||||
|
prune_empty_appid_branches()
|
||||||
|
|
||||||
status_update_json(processed, failed)
|
status_update_json(processed, failed)
|
||||||
sys.exit(exit_code)
|
sys.exit(exit_code)
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -16,29 +16,28 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import sys
|
import configparser
|
||||||
import glob
|
import glob
|
||||||
import hashlib
|
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import pathlib
|
||||||
import re
|
import re
|
||||||
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import sys
|
||||||
import time
|
import time
|
||||||
import urllib
|
import urllib
|
||||||
from typing import Dict, List
|
|
||||||
from git import Repo
|
|
||||||
import yaml
|
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import logging
|
from typing import Dict, List
|
||||||
from shlex import split
|
|
||||||
import pathlib
|
|
||||||
import shutil
|
|
||||||
import git
|
import git
|
||||||
|
import yaml
|
||||||
|
from git import Repo
|
||||||
|
|
||||||
import fdroidserver.github
|
import fdroidserver.github
|
||||||
|
|
||||||
from . import _
|
from . import _, common, index
|
||||||
from . import common
|
|
||||||
from . import index
|
|
||||||
from .exception import FDroidException
|
from .exception import FDroidException
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
|
@ -48,11 +47,10 @@ GIT_BRANCH = 'master'
|
||||||
|
|
||||||
BINARY_TRANSPARENCY_DIR = 'binary_transparency'
|
BINARY_TRANSPARENCY_DIR = 'binary_transparency'
|
||||||
|
|
||||||
AUTO_S3CFG = '.fdroid-deploy-s3cfg'
|
|
||||||
USER_S3CFG = 's3cfg'
|
|
||||||
USER_RCLONE_CONF = None
|
|
||||||
REMOTE_HOSTNAME_REGEX = re.compile(r'\W*\w+\W+(\w+).*')
|
REMOTE_HOSTNAME_REGEX = re.compile(r'\W*\w+\W+(\w+).*')
|
||||||
|
|
||||||
|
EMBEDDED_RCLONE_CONF = 'rclone.conf'
|
||||||
|
|
||||||
|
|
||||||
def _get_index_file_paths(base_dir):
|
def _get_index_file_paths(base_dir):
|
||||||
"""Return the list of files to be synced last, since they finalize the deploy.
|
"""Return the list of files to be synced last, since they finalize the deploy.
|
||||||
|
@ -61,8 +59,15 @@ def _get_index_file_paths(base_dir):
|
||||||
services can take a while. So the index files should be updated
|
services can take a while. So the index files should be updated
|
||||||
last. That ensures that the package files are available when the
|
last. That ensures that the package files are available when the
|
||||||
client learns about them from the new index files.
|
client learns about them from the new index files.
|
||||||
|
|
||||||
|
signer-index.* are only published in the repo/ section.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return [os.path.join(base_dir, filename) for filename in common.INDEX_FILES]
|
return [
|
||||||
|
os.path.join(base_dir, filename)
|
||||||
|
for filename in common.INDEX_FILES
|
||||||
|
if not (filename.startswith('signer-index.') and base_dir.endswith('archive'))
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def _get_index_excludes(base_dir):
|
def _get_index_excludes(base_dir):
|
||||||
|
@ -92,399 +97,161 @@ def _remove_missing_files(files: List[str]) -> List[str]:
|
||||||
return existing
|
return existing
|
||||||
|
|
||||||
|
|
||||||
|
def _generate_rclone_include_pattern(files):
|
||||||
|
"""Generate a pattern for rclone's --include flag (https://rclone.org/filtering/)."""
|
||||||
|
return "{" + ",".join(sorted(set(files))) + "}"
|
||||||
|
|
||||||
|
|
||||||
def update_awsbucket(repo_section, is_index_only=False, verbose=False, quiet=False):
|
def update_awsbucket(repo_section, is_index_only=False, verbose=False, quiet=False):
|
||||||
"""Upload the contents of the directory `repo_section` (including subdirectories) to the AWS S3 "bucket".
|
"""Sync the directory `repo_section` (including subdirectories) to AWS S3 US East.
|
||||||
|
|
||||||
The contents of that subdir of the
|
This is a shim function for public API compatibility.
|
||||||
bucket will first be deleted.
|
|
||||||
|
Requires AWS credentials set as environment variables:
|
||||||
|
https://rclone.org/s3/#authentication
|
||||||
|
|
||||||
Requires AWS credentials set in config.yml: awsaccesskeyid, awssecretkey
|
|
||||||
"""
|
"""
|
||||||
logging.debug(
|
|
||||||
f'''Syncing "{repo_section}" to Amazon S3 bucket "{config['awsbucket']}"'''
|
|
||||||
)
|
|
||||||
|
|
||||||
if common.set_command_in_config('s3cmd') and common.set_command_in_config('rclone'):
|
|
||||||
logging.info(
|
|
||||||
'Both rclone and s3cmd are installed. Checking config.yml for preference.'
|
|
||||||
)
|
|
||||||
if config['s3cmd'] is not True and config['rclone'] is not True:
|
|
||||||
logging.warning(
|
|
||||||
'No syncing tool set in config.yml!. Defaulting to using s3cmd'
|
|
||||||
)
|
|
||||||
update_awsbucket_s3cmd(repo_section, is_index_only)
|
|
||||||
if config['s3cmd'] is True and config['rclone'] is True:
|
|
||||||
logging.warning(
|
|
||||||
'Both syncing tools set in config.yml!. Defaulting to using s3cmd'
|
|
||||||
)
|
|
||||||
update_awsbucket_s3cmd(repo_section, is_index_only)
|
|
||||||
if config['s3cmd'] is True and config['rclone'] is not True:
|
|
||||||
update_awsbucket_s3cmd(repo_section, is_index_only)
|
|
||||||
if config['rclone'] is True and config['s3cmd'] is not True:
|
|
||||||
update_remote_storage_with_rclone(
|
|
||||||
repo_section, is_index_only, verbose, quiet
|
|
||||||
)
|
|
||||||
|
|
||||||
elif common.set_command_in_config('s3cmd'):
|
|
||||||
update_awsbucket_s3cmd(repo_section, is_index_only)
|
|
||||||
elif common.set_command_in_config('rclone'):
|
|
||||||
update_remote_storage_with_rclone(repo_section, is_index_only, verbose, quiet)
|
update_remote_storage_with_rclone(repo_section, is_index_only, verbose, quiet)
|
||||||
else:
|
|
||||||
update_awsbucket_libcloud(repo_section, is_index_only)
|
|
||||||
|
|
||||||
|
|
||||||
def update_awsbucket_s3cmd(repo_section, is_index_only=False):
|
|
||||||
"""Upload using the CLI tool s3cmd, which provides rsync-like sync.
|
|
||||||
|
|
||||||
The upload is done in multiple passes to reduce the chance of
|
|
||||||
interfering with an existing client-server interaction. In the
|
|
||||||
first pass, only new files are uploaded. In the second pass,
|
|
||||||
changed files are uploaded, overwriting what is on the server. On
|
|
||||||
the third/last pass, the indexes are uploaded, and any removed
|
|
||||||
files are deleted from the server. The last pass is the only pass
|
|
||||||
to use a full MD5 checksum of all files to detect changes.
|
|
||||||
"""
|
|
||||||
logging.debug(_('Using s3cmd to sync with: {url}').format(url=config['awsbucket']))
|
|
||||||
|
|
||||||
if os.path.exists(USER_S3CFG):
|
|
||||||
logging.info(_('Using "{path}" for configuring s3cmd.').format(path=USER_S3CFG))
|
|
||||||
configfilename = USER_S3CFG
|
|
||||||
else:
|
|
||||||
fd = os.open(AUTO_S3CFG, os.O_CREAT | os.O_TRUNC | os.O_WRONLY, 0o600)
|
|
||||||
logging.debug(
|
|
||||||
_('Creating "{path}" for configuring s3cmd.').format(path=AUTO_S3CFG)
|
|
||||||
)
|
|
||||||
os.write(fd, '[default]\n'.encode('utf-8'))
|
|
||||||
os.write(
|
|
||||||
fd, ('access_key = ' + config['awsaccesskeyid'] + '\n').encode('utf-8')
|
|
||||||
)
|
|
||||||
os.write(fd, ('secret_key = ' + config['awssecretkey'] + '\n').encode('utf-8'))
|
|
||||||
os.close(fd)
|
|
||||||
configfilename = AUTO_S3CFG
|
|
||||||
|
|
||||||
s3bucketurl = 's3://' + config['awsbucket']
|
|
||||||
s3cmd = [config['s3cmd'], '--config=' + configfilename]
|
|
||||||
if subprocess.call(s3cmd + ['info', s3bucketurl]) != 0:
|
|
||||||
logging.warning(_('Creating new S3 bucket: {url}').format(url=s3bucketurl))
|
|
||||||
if subprocess.call(s3cmd + ['mb', s3bucketurl]) != 0:
|
|
||||||
logging.error(
|
|
||||||
_('Failed to create S3 bucket: {url}').format(url=s3bucketurl)
|
|
||||||
)
|
|
||||||
raise FDroidException()
|
|
||||||
|
|
||||||
s3cmd_sync = s3cmd + ['sync', '--acl-public']
|
|
||||||
options = common.get_options()
|
|
||||||
if options and options.verbose:
|
|
||||||
s3cmd_sync += ['--verbose']
|
|
||||||
if options and options.quiet:
|
|
||||||
s3cmd_sync += ['--quiet']
|
|
||||||
|
|
||||||
s3url = s3bucketurl + '/fdroid/'
|
|
||||||
|
|
||||||
logging.debug(
|
|
||||||
_('s3cmd sync indexes {path} to {url} and delete').format(
|
|
||||||
path=repo_section, url=s3url
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if is_index_only:
|
|
||||||
logging.debug(
|
|
||||||
_('s3cmd syncs indexes from {path} to {url} and deletes removed').format(
|
|
||||||
path=repo_section, url=s3url
|
|
||||||
)
|
|
||||||
)
|
|
||||||
sync_indexes_flags = []
|
|
||||||
sync_indexes_flags.extend(_get_index_includes(repo_section))
|
|
||||||
sync_indexes_flags.append('--delete-removed')
|
|
||||||
sync_indexes_flags.append('--delete-after')
|
|
||||||
if options.no_checksum:
|
|
||||||
sync_indexes_flags.append('--no-check-md5')
|
|
||||||
else:
|
|
||||||
sync_indexes_flags.append('--check-md5')
|
|
||||||
returncode = subprocess.call(
|
|
||||||
s3cmd_sync + sync_indexes_flags + [repo_section, s3url]
|
|
||||||
)
|
|
||||||
if returncode != 0:
|
|
||||||
raise FDroidException()
|
|
||||||
else:
|
|
||||||
logging.debug('s3cmd sync new files in ' + repo_section + ' to ' + s3url)
|
|
||||||
logging.debug(_('Running first pass with MD5 checking disabled'))
|
|
||||||
excludes = _get_index_excludes(repo_section)
|
|
||||||
returncode = subprocess.call(
|
|
||||||
s3cmd_sync
|
|
||||||
+ excludes
|
|
||||||
+ ['--no-check-md5', '--skip-existing', repo_section, s3url]
|
|
||||||
)
|
|
||||||
if returncode != 0:
|
|
||||||
raise FDroidException()
|
|
||||||
logging.debug('s3cmd sync all files in ' + repo_section + ' to ' + s3url)
|
|
||||||
returncode = subprocess.call(
|
|
||||||
s3cmd_sync + excludes + ['--no-check-md5', repo_section, s3url]
|
|
||||||
)
|
|
||||||
if returncode != 0:
|
|
||||||
raise FDroidException()
|
|
||||||
|
|
||||||
logging.debug(
|
|
||||||
_('s3cmd sync indexes {path} to {url} and delete').format(
|
|
||||||
path=repo_section, url=s3url
|
|
||||||
)
|
|
||||||
)
|
|
||||||
s3cmd_sync.append('--delete-removed')
|
|
||||||
s3cmd_sync.append('--delete-after')
|
|
||||||
if options.no_checksum:
|
|
||||||
s3cmd_sync.append('--no-check-md5')
|
|
||||||
else:
|
|
||||||
s3cmd_sync.append('--check-md5')
|
|
||||||
if subprocess.call(s3cmd_sync + [repo_section, s3url]) != 0:
|
|
||||||
raise FDroidException()
|
|
||||||
|
|
||||||
|
|
||||||
def update_remote_storage_with_rclone(
|
def update_remote_storage_with_rclone(
|
||||||
repo_section, is_index_only=False, verbose=False, quiet=False
|
repo_section,
|
||||||
|
awsbucket,
|
||||||
|
is_index_only=False,
|
||||||
|
verbose=False,
|
||||||
|
quiet=False,
|
||||||
|
checksum=False,
|
||||||
):
|
):
|
||||||
"""
|
"""Sync the directory `repo_section` (including subdirectories) to configed cloud services.
|
||||||
Upload fdroid repo folder to remote storage using rclone sync.
|
|
||||||
|
|
||||||
Rclone sync can send the files to any supported remote storage
|
Rclone sync can send the files to any supported remote storage
|
||||||
service once without numerous polling.
|
service once without numerous polling. If remote storage is S3 e.g
|
||||||
If remote storage is s3 e.g aws s3, wasabi, filebase then path will be
|
AWS S3, Wasabi, Filebase, etc, then path will be
|
||||||
bucket_name/fdroid/repo where bucket_name will be an s3 bucket
|
bucket_name/fdroid/repo where bucket_name will be an S3 bucket. If
|
||||||
If remote storage is storage drive/sftp e.g google drive, rsync.net
|
remote storage is storage drive/sftp e.g google drive, rsync.net the
|
||||||
the new path will be bucket_name/fdroid/repo where bucket_name
|
new path will be bucket_name/fdroid/repo where bucket_name will be a
|
||||||
will be a folder
|
folder
|
||||||
|
|
||||||
|
See https://rclone.org/docs/#config-config-file
|
||||||
|
|
||||||
|
rclone filtering works differently than rsync. For example,
|
||||||
|
"--include" implies "--exclude **" at the end of an rclone internal
|
||||||
|
filter list.
|
||||||
|
|
||||||
|
If rclone.conf is in the root of the repo, then it will be preferred
|
||||||
|
over the rclone default config paths.
|
||||||
|
|
||||||
Better than the s3cmd command as it does the syncing in one command
|
|
||||||
Check https://rclone.org/docs/#config-config-file (optional config file)
|
|
||||||
"""
|
"""
|
||||||
logging.debug(_('Using rclone to sync with: {url}').format(url=config['awsbucket']))
|
logging.debug(_('Using rclone to sync to "{name}"').format(name=awsbucket))
|
||||||
|
|
||||||
if config.get('path_to_custom_rclone_config') is not None:
|
rclone_config = config.get('rclone_config', [])
|
||||||
USER_RCLONE_CONF = config['path_to_custom_rclone_config']
|
if rclone_config and isinstance(rclone_config, str):
|
||||||
if os.path.exists(USER_RCLONE_CONF):
|
rclone_config = [rclone_config]
|
||||||
logging.info("'path_to_custom_rclone_config' found in config.yml")
|
|
||||||
logging.info(
|
path = config.get('path_to_custom_rclone_config')
|
||||||
_('Using "{path}" for syncing with remote storage.').format(
|
if path:
|
||||||
path=USER_RCLONE_CONF
|
if not os.path.exists(path):
|
||||||
|
logging.error(
|
||||||
|
_('path_to_custom_rclone_config: "{path}" does not exist!').format(
|
||||||
|
path=path
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
configfilename = USER_RCLONE_CONF
|
sys.exit(1)
|
||||||
|
configfilename = path
|
||||||
|
elif os.path.exists(EMBEDDED_RCLONE_CONF):
|
||||||
|
path = EMBEDDED_RCLONE_CONF # in this case, only for display
|
||||||
|
configfilename = EMBEDDED_RCLONE_CONF
|
||||||
|
if not rclone_config:
|
||||||
|
raise FDroidException(_("'rclone_config' must be set in config.yml!"))
|
||||||
else:
|
else:
|
||||||
logging.info('Custom configuration not found.')
|
|
||||||
logging.info(
|
|
||||||
'Using default configuration at {}'.format(
|
|
||||||
subprocess.check_output(split("rclone config file")).decode("utf-8")
|
|
||||||
)
|
|
||||||
)
|
|
||||||
configfilename = None
|
|
||||||
else:
|
|
||||||
logging.warning("'path_to_custom_rclone_config' not found in config.yml")
|
|
||||||
logging.info('Custom configuration not found.')
|
|
||||||
logging.info(
|
|
||||||
'Using default configuration at {}'.format(
|
|
||||||
subprocess.check_output(split("rclone config file")).decode("utf-8")
|
|
||||||
)
|
|
||||||
)
|
|
||||||
configfilename = None
|
configfilename = None
|
||||||
|
output = subprocess.check_output(['rclone', 'config', 'file'], text=True)
|
||||||
|
default_config_path = output.split('\n')[-2]
|
||||||
|
if os.path.exists(default_config_path):
|
||||||
|
path = default_config_path
|
||||||
|
if path:
|
||||||
|
logging.info(_('Using "{path}" for rclone config.').format(path=path))
|
||||||
|
|
||||||
upload_dir = 'fdroid/' + repo_section
|
upload_dir = 'fdroid/' + repo_section
|
||||||
|
|
||||||
if not config.get('rclone_config') or not config.get('awsbucket'):
|
if not rclone_config:
|
||||||
raise FDroidException(
|
env = os.environ
|
||||||
_('To use rclone, rclone_config and awsbucket must be set in config.yml!')
|
# Check both canonical and backup names, but only tell user about canonical.
|
||||||
)
|
if not env.get("AWS_SECRET_ACCESS_KEY") and not env.get("AWS_SECRET_KEY"):
|
||||||
|
|
||||||
if is_index_only:
|
|
||||||
sources = _get_index_file_paths(repo_section)
|
|
||||||
sources = _remove_missing_files(sources)
|
|
||||||
else:
|
|
||||||
sources = [repo_section]
|
|
||||||
|
|
||||||
for source in sources:
|
|
||||||
if isinstance(config['rclone_config'], str):
|
|
||||||
rclone_sync_command = (
|
|
||||||
'rclone sync '
|
|
||||||
+ source
|
|
||||||
+ ' '
|
|
||||||
+ config['rclone_config']
|
|
||||||
+ ':'
|
|
||||||
+ config['awsbucket']
|
|
||||||
+ '/'
|
|
||||||
+ upload_dir
|
|
||||||
)
|
|
||||||
|
|
||||||
rclone_sync_command = split(rclone_sync_command)
|
|
||||||
|
|
||||||
if verbose:
|
|
||||||
rclone_sync_command += ['--verbose']
|
|
||||||
elif quiet:
|
|
||||||
rclone_sync_command += ['--quiet']
|
|
||||||
|
|
||||||
if configfilename:
|
|
||||||
rclone_sync_command += split('--config=' + configfilename)
|
|
||||||
|
|
||||||
complete_remote_path = (
|
|
||||||
config['rclone_config'] + ':' + config['awsbucket'] + '/' + upload_dir
|
|
||||||
)
|
|
||||||
|
|
||||||
logging.debug(
|
|
||||||
"rclone sync all files in " + source + ' to ' + complete_remote_path
|
|
||||||
)
|
|
||||||
|
|
||||||
if subprocess.call(rclone_sync_command) != 0:
|
|
||||||
raise FDroidException()
|
|
||||||
|
|
||||||
if isinstance(config['rclone_config'], list):
|
|
||||||
for remote_config in config['rclone_config']:
|
|
||||||
rclone_sync_command = (
|
|
||||||
'rclone sync '
|
|
||||||
+ source
|
|
||||||
+ ' '
|
|
||||||
+ remote_config
|
|
||||||
+ ':'
|
|
||||||
+ config['awsbucket']
|
|
||||||
+ '/'
|
|
||||||
+ upload_dir
|
|
||||||
)
|
|
||||||
|
|
||||||
rclone_sync_command = split(rclone_sync_command)
|
|
||||||
|
|
||||||
if verbose:
|
|
||||||
rclone_sync_command += ['--verbose']
|
|
||||||
elif quiet:
|
|
||||||
rclone_sync_command += ['--quiet']
|
|
||||||
|
|
||||||
if configfilename:
|
|
||||||
rclone_sync_command += split('--config=' + configfilename)
|
|
||||||
|
|
||||||
complete_remote_path = (
|
|
||||||
remote_config + ':' + config['awsbucket'] + '/' + upload_dir
|
|
||||||
)
|
|
||||||
|
|
||||||
logging.debug(
|
|
||||||
"rclone sync all files in " + source + ' to ' + complete_remote_path
|
|
||||||
)
|
|
||||||
|
|
||||||
if subprocess.call(rclone_sync_command) != 0:
|
|
||||||
raise FDroidException()
|
|
||||||
|
|
||||||
|
|
||||||
def update_awsbucket_libcloud(repo_section, is_index_only=False):
|
|
||||||
"""No summary.
|
|
||||||
|
|
||||||
Upload the contents of the directory `repo_section` (including
|
|
||||||
subdirectories) to the AWS S3 "bucket".
|
|
||||||
|
|
||||||
The contents of that subdir of the
|
|
||||||
bucket will first be deleted.
|
|
||||||
|
|
||||||
Requires AWS credentials set in config.yml: awsaccesskeyid, awssecretkey
|
|
||||||
"""
|
|
||||||
logging.debug(
|
|
||||||
_('using Apache libcloud to sync with {url}').format(url=config['awsbucket'])
|
|
||||||
)
|
|
||||||
|
|
||||||
import libcloud.security
|
|
||||||
|
|
||||||
libcloud.security.VERIFY_SSL_CERT = True
|
|
||||||
from libcloud.storage.types import Provider, ContainerDoesNotExistError
|
|
||||||
from libcloud.storage.providers import get_driver
|
|
||||||
|
|
||||||
if not config.get('awsaccesskeyid') or not config.get('awssecretkey'):
|
|
||||||
raise FDroidException(
|
raise FDroidException(
|
||||||
_(
|
_(
|
||||||
'To use awsbucket, awssecretkey and awsaccesskeyid must also be set in config.yml!'
|
""""AWS_SECRET_ACCESS_KEY" must be set as an environmental variable!"""
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
awsbucket = config['awsbucket']
|
if not env.get("AWS_ACCESS_KEY_ID") and not env.get('AWS_ACCESS_KEY'):
|
||||||
|
|
||||||
if os.path.exists(USER_S3CFG):
|
|
||||||
raise FDroidException(
|
raise FDroidException(
|
||||||
_('"{path}" exists but s3cmd is not installed!').format(path=USER_S3CFG)
|
_(""""AWS_ACCESS_KEY_ID" must be set as an environmental variable!""")
|
||||||
)
|
)
|
||||||
|
|
||||||
cls = get_driver(Provider.S3)
|
default_remote = "AWS-S3-US-East-1"
|
||||||
driver = cls(config['awsaccesskeyid'], config['awssecretkey'])
|
env_rclone_config = configparser.ConfigParser()
|
||||||
try:
|
env_rclone_config.add_section(default_remote)
|
||||||
container = driver.get_container(container_name=awsbucket)
|
env_rclone_config.set(
|
||||||
except ContainerDoesNotExistError:
|
default_remote,
|
||||||
container = driver.create_container(container_name=awsbucket)
|
'; = This file is auto-generated by fdroid deploy, do not edit!',
|
||||||
logging.info(_('Created new container "{name}"').format(name=container.name))
|
'',
|
||||||
|
)
|
||||||
|
env_rclone_config.set(default_remote, "type", "s3")
|
||||||
|
env_rclone_config.set(default_remote, "provider", "AWS")
|
||||||
|
env_rclone_config.set(default_remote, "region", "us-east-1")
|
||||||
|
env_rclone_config.set(default_remote, "env_auth", "true")
|
||||||
|
|
||||||
upload_dir = 'fdroid/' + repo_section
|
configfilename = ".fdroid-deploy-rclone.conf"
|
||||||
objs = dict()
|
with open(configfilename, "w", encoding="utf-8") as autoconfigfile:
|
||||||
for obj in container.list_objects():
|
env_rclone_config.write(autoconfigfile)
|
||||||
if obj.name.startswith(upload_dir + '/'):
|
rclone_config = [default_remote]
|
||||||
objs[obj.name] = obj
|
|
||||||
|
|
||||||
|
rclone_sync_command = ['rclone', 'sync', '--delete-after']
|
||||||
|
if configfilename:
|
||||||
|
rclone_sync_command += ['--config', configfilename]
|
||||||
|
|
||||||
|
if checksum:
|
||||||
|
rclone_sync_command.append('--checksum')
|
||||||
|
|
||||||
|
if verbose:
|
||||||
|
rclone_sync_command += ['--verbose']
|
||||||
|
elif quiet:
|
||||||
|
rclone_sync_command += ['--quiet']
|
||||||
|
|
||||||
|
# TODO copying update_serverwebroot rsync algo
|
||||||
|
for remote_config in rclone_config:
|
||||||
|
complete_remote_path = f'{remote_config}:{awsbucket}/{upload_dir}'
|
||||||
|
logging.info(f'rclone sync to {complete_remote_path}')
|
||||||
if is_index_only:
|
if is_index_only:
|
||||||
index_files = [
|
index_only_files = common.INDEX_FILES + ['diff/*.*']
|
||||||
f"{os.getcwd()}/{name}" for name in _get_index_file_paths(repo_section)
|
include_pattern = _generate_rclone_include_pattern(index_only_files)
|
||||||
|
cmd = rclone_sync_command + [
|
||||||
|
'--include',
|
||||||
|
include_pattern,
|
||||||
|
'--delete-excluded',
|
||||||
|
repo_section,
|
||||||
|
complete_remote_path,
|
||||||
]
|
]
|
||||||
files_to_upload = [
|
logging.info(cmd)
|
||||||
os.path.join(root, name)
|
if subprocess.call(cmd) != 0:
|
||||||
for root, dirs, files in os.walk(os.path.join(os.getcwd(), repo_section))
|
raise FDroidException()
|
||||||
for name in files
|
else:
|
||||||
|
cmd = (
|
||||||
|
rclone_sync_command
|
||||||
|
+ _get_index_excludes(repo_section)
|
||||||
|
+ [
|
||||||
|
repo_section,
|
||||||
|
complete_remote_path,
|
||||||
]
|
]
|
||||||
files_to_upload = list(set(files_to_upload) & set(index_files))
|
|
||||||
files_to_upload = _remove_missing_files(files_to_upload)
|
|
||||||
|
|
||||||
else:
|
|
||||||
files_to_upload = [
|
|
||||||
os.path.join(root, name)
|
|
||||||
for root, dirs, files in os.walk(os.path.join(os.getcwd(), repo_section))
|
|
||||||
for name in files
|
|
||||||
]
|
|
||||||
|
|
||||||
for file_to_upload in files_to_upload:
|
|
||||||
upload = False
|
|
||||||
object_name = 'fdroid/' + os.path.relpath(file_to_upload, os.getcwd())
|
|
||||||
if object_name not in objs:
|
|
||||||
upload = True
|
|
||||||
else:
|
|
||||||
obj = objs.pop(object_name)
|
|
||||||
if obj.size != os.path.getsize(file_to_upload):
|
|
||||||
upload = True
|
|
||||||
else:
|
|
||||||
# if the sizes match, then compare by MD5
|
|
||||||
md5 = hashlib.md5() # nosec AWS uses MD5
|
|
||||||
with open(file_to_upload, 'rb') as f:
|
|
||||||
while True:
|
|
||||||
data = f.read(8192)
|
|
||||||
if not data:
|
|
||||||
break
|
|
||||||
md5.update(data)
|
|
||||||
if obj.hash != md5.hexdigest():
|
|
||||||
s3url = 's3://' + awsbucket + '/' + obj.name
|
|
||||||
logging.info(' deleting ' + s3url)
|
|
||||||
if not driver.delete_object(obj):
|
|
||||||
logging.warning('Could not delete ' + s3url)
|
|
||||||
upload = True
|
|
||||||
|
|
||||||
if upload:
|
|
||||||
logging.debug(' uploading "' + file_to_upload + '"...')
|
|
||||||
extra = {'acl': 'public-read'}
|
|
||||||
if file_to_upload.endswith('.sig'):
|
|
||||||
extra['content_type'] = 'application/pgp-signature'
|
|
||||||
elif file_to_upload.endswith('.asc'):
|
|
||||||
extra['content_type'] = 'application/pgp-signature'
|
|
||||||
path = os.path.relpath(file_to_upload)
|
|
||||||
logging.info(f' uploading {path} to s3://{awsbucket}/{object_name}')
|
|
||||||
with open(file_to_upload, 'rb') as iterator:
|
|
||||||
obj = driver.upload_object_via_stream(
|
|
||||||
iterator=iterator,
|
|
||||||
container=container,
|
|
||||||
object_name=object_name,
|
|
||||||
extra=extra,
|
|
||||||
)
|
)
|
||||||
# delete the remnants in the bucket, they do not exist locally
|
if subprocess.call(cmd) != 0:
|
||||||
while objs:
|
raise FDroidException()
|
||||||
object_name, obj = objs.popitem()
|
cmd = rclone_sync_command + [
|
||||||
s3url = 's3://' + awsbucket + '/' + object_name
|
repo_section,
|
||||||
if object_name.startswith(upload_dir):
|
complete_remote_path,
|
||||||
logging.warning(' deleting ' + s3url)
|
]
|
||||||
driver.delete_object(obj)
|
if subprocess.call(cmd) != 0:
|
||||||
else:
|
raise FDroidException()
|
||||||
logging.info(' skipping ' + s3url)
|
|
||||||
|
|
||||||
|
|
||||||
def update_serverwebroot(serverwebroot, repo_section):
|
def update_serverwebroot(serverwebroot, repo_section):
|
||||||
|
@ -654,6 +421,13 @@ def update_servergitmirrors(servergitmirrors, repo_section):
|
||||||
For history, there is the archive section, and there is the binary
|
For history, there is the archive section, and there is the binary
|
||||||
transparency log.
|
transparency log.
|
||||||
|
|
||||||
|
This will attempt to use the existing remote branch so that it does
|
||||||
|
not have to push all of the files in the repo each time. Old setups
|
||||||
|
or runs of `fdroid nightly` might use the "master" branch. For the
|
||||||
|
"index only" mode, it will recreate the branch from scratch each
|
||||||
|
time since usually all the files are changed. In any case, the
|
||||||
|
index files are small compared to the full repo.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from clint.textui import progress
|
from clint.textui import progress
|
||||||
|
|
||||||
|
@ -724,7 +498,7 @@ def update_servergitmirrors(servergitmirrors, repo_section):
|
||||||
if is_index_only:
|
if is_index_only:
|
||||||
local_branch_name = 'index_only'
|
local_branch_name = 'index_only'
|
||||||
else:
|
else:
|
||||||
local_branch_name = 'full'
|
local_branch_name = GIT_BRANCH
|
||||||
if local_branch_name in repo.heads:
|
if local_branch_name in repo.heads:
|
||||||
repo.git.switch(local_branch_name)
|
repo.git.switch(local_branch_name)
|
||||||
else:
|
else:
|
||||||
|
@ -849,6 +623,7 @@ def upload_to_servergitmirror(
|
||||||
| git.remote.PushInfo.REMOTE_REJECTED
|
| git.remote.PushInfo.REMOTE_REJECTED
|
||||||
):
|
):
|
||||||
# Show potentially useful messages from git remote
|
# Show potentially useful messages from git remote
|
||||||
|
if progress:
|
||||||
for line in progress.other_lines:
|
for line in progress.other_lines:
|
||||||
if line.startswith('remote:'):
|
if line.startswith('remote:'):
|
||||||
logging.debug(line)
|
logging.debug(line)
|
||||||
|
@ -883,9 +658,10 @@ def upload_to_android_observatory(repo_section):
|
||||||
def upload_apk_to_android_observatory(path):
|
def upload_apk_to_android_observatory(path):
|
||||||
# depend on requests and lxml only if users enable AO
|
# depend on requests and lxml only if users enable AO
|
||||||
import requests
|
import requests
|
||||||
from . import net
|
|
||||||
from lxml.html import fromstring
|
from lxml.html import fromstring
|
||||||
|
|
||||||
|
from . import net
|
||||||
|
|
||||||
apkfilename = os.path.basename(path)
|
apkfilename = os.path.basename(path)
|
||||||
r = requests.post(
|
r = requests.post(
|
||||||
'https://androidobservatory.org/',
|
'https://androidobservatory.org/',
|
||||||
|
@ -1195,7 +971,7 @@ def upload_to_github_releases_repo(repo_conf, release_infos, global_gh_token):
|
||||||
if not token:
|
if not token:
|
||||||
logging.warning(
|
logging.warning(
|
||||||
_(
|
_(
|
||||||
"One of the 'github_releases' config itmes is missing the "
|
"One of the 'github_releases' config items is missing the "
|
||||||
"'token' value. skipping ..."
|
"'token' value. skipping ..."
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -1206,7 +982,7 @@ def upload_to_github_releases_repo(repo_conf, release_infos, global_gh_token):
|
||||||
if not conf_package_names:
|
if not conf_package_names:
|
||||||
logging.warning(
|
logging.warning(
|
||||||
_(
|
_(
|
||||||
"One of the 'github_releases' config itmes is missing the "
|
"One of the 'github_releases' config items is missing the "
|
||||||
"'packageNames' value. skipping ..."
|
"'packageNames' value. skipping ..."
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -1370,8 +1146,16 @@ def main():
|
||||||
# update_servergitmirrors will take care of multiple mirrors so don't need a foreach
|
# update_servergitmirrors will take care of multiple mirrors so don't need a foreach
|
||||||
update_servergitmirrors(config['servergitmirrors'], repo_section)
|
update_servergitmirrors(config['servergitmirrors'], repo_section)
|
||||||
if config.get('awsbucket'):
|
if config.get('awsbucket'):
|
||||||
|
awsbucket = config['awsbucket']
|
||||||
index_only = config.get('awsbucket_index_only')
|
index_only = config.get('awsbucket_index_only')
|
||||||
update_awsbucket(repo_section, index_only, options.verbose, options.quiet)
|
update_remote_storage_with_rclone(
|
||||||
|
repo_section,
|
||||||
|
awsbucket,
|
||||||
|
index_only,
|
||||||
|
options.verbose,
|
||||||
|
options.quiet,
|
||||||
|
not options.no_checksum,
|
||||||
|
)
|
||||||
if config.get('androidobservatory'):
|
if config.get('androidobservatory'):
|
||||||
upload_to_android_observatory(repo_section)
|
upload_to_android_observatory(repo_section)
|
||||||
if config.get('virustotal_apikey'):
|
if config.get('virustotal_apikey'):
|
||||||
|
|
|
@ -35,6 +35,10 @@ class VCSException(FDroidException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NoVersionCodeException(FDroidException):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class NoSubmodulesException(VCSException):
|
class NoSubmodulesException(VCSException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
|
@ -18,17 +18,20 @@
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import pathlib
|
import pathlib
|
||||||
import urllib.request
|
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
import urllib.request
|
||||||
|
|
||||||
|
|
||||||
class GithubApi:
|
class GithubApi:
|
||||||
"""
|
"""Wrapper for some select calls to GitHub Json/REST API.
|
||||||
Warpper for some select calls to GitHub Json/REST API.
|
|
||||||
|
|
||||||
This class wraps some calls to api.github.com. This is not intended to be a
|
This class wraps some calls to api.github.com. This is not intended to be a
|
||||||
general API wrapper. Instead it's purpose is to return pre-filtered and
|
general API wrapper. Instead it's purpose is to return pre-filtered and
|
||||||
transformed data that's playing well with other fdroidserver functions.
|
transformed data that's playing well with other fdroidserver functions.
|
||||||
|
|
||||||
|
With the GitHub API, the token is optional, but it has pretty
|
||||||
|
severe rate limiting.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, api_token, repo_path):
|
def __init__(self, api_token, repo_path):
|
||||||
|
@ -41,9 +44,10 @@ class GithubApi:
|
||||||
def _req(self, url, data=None):
|
def _req(self, url, data=None):
|
||||||
h = {
|
h = {
|
||||||
"Accept": "application/vnd.github+json",
|
"Accept": "application/vnd.github+json",
|
||||||
"Authorization": f"Bearer {self._api_token}",
|
|
||||||
"X-GitHub-Api-Version": "2022-11-28",
|
"X-GitHub-Api-Version": "2022-11-28",
|
||||||
}
|
}
|
||||||
|
if self._api_token:
|
||||||
|
h["Authorization"] = f"Bearer {self._api_token}"
|
||||||
return urllib.request.Request(
|
return urllib.request.Request(
|
||||||
url,
|
url,
|
||||||
headers=h,
|
headers=h,
|
||||||
|
@ -65,6 +69,17 @@ class GithubApi:
|
||||||
released_tags = self.list_released_tags()
|
released_tags = self.list_released_tags()
|
||||||
return [x for x in all_tags if x not in released_tags]
|
return [x for x in all_tags if x not in released_tags]
|
||||||
|
|
||||||
|
def get_latest_apk(self):
|
||||||
|
req = self._req(
|
||||||
|
f"https://api.github.com/repos/{self._repo_path}/releases/latest"
|
||||||
|
)
|
||||||
|
with urllib.request.urlopen(req) as resp: # nosec CWE-22 disable bandit warning
|
||||||
|
assets = json.load(resp)['assets']
|
||||||
|
for asset in assets:
|
||||||
|
url = asset.get('browser_download_url')
|
||||||
|
if url and url.endswith('.apk'):
|
||||||
|
return url
|
||||||
|
|
||||||
def tag_exists(self, tag):
|
def tag_exists(self, tag):
|
||||||
"""
|
"""
|
||||||
Check if git tag is present on github.
|
Check if git tag is present on github.
|
||||||
|
|
|
@ -16,14 +16,13 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import os
|
|
||||||
import glob
|
import glob
|
||||||
from argparse import ArgumentParser
|
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
import time
|
import time
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
from . import _
|
from . import _, common
|
||||||
from . import common
|
|
||||||
from .common import FDroidPopen
|
from .common import FDroidPopen
|
||||||
from .exception import FDroidException
|
from .exception import FDroidException
|
||||||
|
|
||||||
|
|
|
@ -18,34 +18,64 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import stat
|
|
||||||
import urllib
|
|
||||||
|
|
||||||
import git
|
|
||||||
import json
|
|
||||||
import shutil
|
import shutil
|
||||||
|
import stat
|
||||||
import sys
|
import sys
|
||||||
import yaml
|
import urllib
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import logging
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
import git
|
||||||
|
import yaml
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from yaml import CSafeLoader as SafeLoader
|
from yaml import CSafeLoader as SafeLoader
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from yaml import SafeLoader
|
from yaml import SafeLoader
|
||||||
|
|
||||||
from . import _
|
from . import _, common, metadata
|
||||||
from . import common
|
|
||||||
from . import metadata
|
|
||||||
from .exception import FDroidException
|
from .exception import FDroidException
|
||||||
|
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
|
|
||||||
|
SETTINGS_GRADLE_REGEX = re.compile(r'settings\.gradle(?:\.kts)?')
|
||||||
|
GRADLE_SUBPROJECT_REGEX = re.compile(r'''['"]:?([^'"]+)['"]''')
|
||||||
|
APPLICATION_ID_REGEX = re.compile(r'''\s*applicationId\s=?\s?['"].*['"]''')
|
||||||
|
|
||||||
|
|
||||||
|
def get_all_gradle_and_manifests(build_dir):
|
||||||
|
paths = []
|
||||||
|
for root, dirs, files in os.walk(build_dir):
|
||||||
|
for f in sorted(files):
|
||||||
|
if f == 'AndroidManifest.xml' or f.endswith(('.gradle', '.gradle.kts')):
|
||||||
|
full = Path(root) / f
|
||||||
|
paths.append(full)
|
||||||
|
return paths
|
||||||
|
|
||||||
|
|
||||||
|
def get_gradle_subdir(build_dir, paths):
|
||||||
|
"""Get the subdir where the gradle build is based."""
|
||||||
|
first_gradle_dir = None
|
||||||
|
for path in paths:
|
||||||
|
if not first_gradle_dir:
|
||||||
|
first_gradle_dir = path.parent.relative_to(build_dir)
|
||||||
|
if path.exists() and SETTINGS_GRADLE_REGEX.match(path.name):
|
||||||
|
for m in GRADLE_SUBPROJECT_REGEX.finditer(path.read_text(encoding='utf-8')):
|
||||||
|
for f in (path.parent / m.group(1)).glob('build.gradle*'):
|
||||||
|
with f.open(encoding='utf-8') as fp:
|
||||||
|
for line in fp:
|
||||||
|
if common.ANDROID_PLUGIN_REGEX.match(
|
||||||
|
line
|
||||||
|
) or APPLICATION_ID_REGEX.match(line):
|
||||||
|
return f.parent.relative_to(build_dir)
|
||||||
|
if first_gradle_dir and first_gradle_dir != Path('.'):
|
||||||
|
return first_gradle_dir
|
||||||
|
|
||||||
|
|
||||||
def handle_retree_error_on_windows(function, path, excinfo):
|
def handle_retree_error_on_windows(function, path, excinfo):
|
||||||
"""Python can't remove a readonly file on Windows so chmod first."""
|
"""Python can't remove a readonly file on Windows so chmod first."""
|
||||||
|
@ -100,6 +130,7 @@ def getrepofrompage(url: str) -> tuple[Optional[str], str]:
|
||||||
The found repository type or None if an error occured.
|
The found repository type or None if an error occured.
|
||||||
address_or_reason
|
address_or_reason
|
||||||
The address to the found repository or the reason if an error occured.
|
The address to the found repository or the reason if an error occured.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if not url.startswith('http'):
|
if not url.startswith('http'):
|
||||||
return (None, _('{url} does not start with "http"!'.format(url=url)))
|
return (None, _('{url} does not start with "http"!'.format(url=url)))
|
||||||
|
@ -122,7 +153,7 @@ def getrepofrompage(url: str) -> tuple[Optional[str], str]:
|
||||||
index = page.find('hg clone')
|
index = page.find('hg clone')
|
||||||
if index != -1:
|
if index != -1:
|
||||||
repotype = 'hg'
|
repotype = 'hg'
|
||||||
repo = page[index + 9:]
|
repo = page[index + 9 :]
|
||||||
index = repo.find('<')
|
index = repo.find('<')
|
||||||
if index == -1:
|
if index == -1:
|
||||||
return (None, _("Error while getting repo address"))
|
return (None, _("Error while getting repo address"))
|
||||||
|
@ -134,7 +165,7 @@ def getrepofrompage(url: str) -> tuple[Optional[str], str]:
|
||||||
index = page.find('git clone')
|
index = page.find('git clone')
|
||||||
if index != -1:
|
if index != -1:
|
||||||
repotype = 'git'
|
repotype = 'git'
|
||||||
repo = page[index + 10:]
|
repo = page[index + 10 :]
|
||||||
index = repo.find('<')
|
index = repo.find('<')
|
||||||
if index == -1:
|
if index == -1:
|
||||||
return (None, _("Error while getting repo address"))
|
return (None, _("Error while getting repo address"))
|
||||||
|
@ -168,6 +199,7 @@ def get_app_from_url(url: str) -> metadata.App:
|
||||||
If the VCS type could not be determined.
|
If the VCS type could not be determined.
|
||||||
:exc:`ValueError`
|
:exc:`ValueError`
|
||||||
If the URL is invalid.
|
If the URL is invalid.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
parsed = urllib.parse.urlparse(url)
|
parsed = urllib.parse.urlparse(url)
|
||||||
invalid_url = False
|
invalid_url = False
|
||||||
|
@ -243,18 +275,29 @@ def main():
|
||||||
# Parse command line...
|
# Parse command line...
|
||||||
parser = ArgumentParser()
|
parser = ArgumentParser()
|
||||||
common.setup_global_opts(parser)
|
common.setup_global_opts(parser)
|
||||||
parser.add_argument("-u", "--url", default=None,
|
parser.add_argument("-u", "--url", help=_("Project URL to import from."))
|
||||||
help=_("Project URL to import from."))
|
parser.add_argument(
|
||||||
parser.add_argument("-s", "--subdir", default=None,
|
"-s",
|
||||||
help=_("Path to main Android project subdirectory, if not in root."))
|
"--subdir",
|
||||||
parser.add_argument("-c", "--categories", default=None,
|
help=_("Path to main Android project subdirectory, if not in root."),
|
||||||
help=_("Comma separated list of categories."))
|
)
|
||||||
parser.add_argument("-l", "--license", default=None,
|
parser.add_argument(
|
||||||
help=_("Overall license of the project."))
|
"-c",
|
||||||
parser.add_argument("--omit-disable", action="store_true", default=False,
|
"--categories",
|
||||||
help=_("Do not add 'disable:' to the generated build entries"))
|
help=_("Comma separated list of categories."),
|
||||||
parser.add_argument("--rev", default=None,
|
)
|
||||||
help=_("Allows a different revision (or git branch) to be specified for the initial import"))
|
parser.add_argument("-l", "--license", help=_("Overall license of the project."))
|
||||||
|
parser.add_argument(
|
||||||
|
"--omit-disable",
|
||||||
|
action="store_true",
|
||||||
|
help=_("Do not add 'disable:' to the generated build entries"),
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--rev",
|
||||||
|
help=_(
|
||||||
|
"Allows a different revision (or git branch) to be specified for the initial import"
|
||||||
|
),
|
||||||
|
)
|
||||||
metadata.add_metadata_arguments(parser)
|
metadata.add_metadata_arguments(parser)
|
||||||
options = common.parse_args(parser)
|
options = common.parse_args(parser)
|
||||||
metadata.warnings_action = options.W
|
metadata.warnings_action = options.W
|
||||||
|
@ -268,24 +311,20 @@ def main():
|
||||||
|
|
||||||
local_metadata_files = common.get_local_metadata_files()
|
local_metadata_files = common.get_local_metadata_files()
|
||||||
if local_metadata_files:
|
if local_metadata_files:
|
||||||
raise FDroidException(_("This repo already has local metadata: %s") % local_metadata_files[0])
|
raise FDroidException(
|
||||||
|
_("This repo already has local metadata: %s") % local_metadata_files[0]
|
||||||
|
)
|
||||||
|
|
||||||
build = metadata.Build()
|
build = metadata.Build()
|
||||||
if options.url is None and Path('.git').is_dir():
|
|
||||||
app = metadata.App()
|
app = metadata.App()
|
||||||
app.AutoName = Path.cwd().name
|
if options.url is None and Path('.git').is_dir():
|
||||||
app.RepoType = 'git'
|
app.RepoType = 'git'
|
||||||
|
tmp_importer_dir = Path.cwd()
|
||||||
if Path('build.gradle').exists() or Path('build.gradle.kts').exists():
|
git_repo = git.Repo(tmp_importer_dir)
|
||||||
build.gradle = ['yes']
|
|
||||||
|
|
||||||
git_repo = git.Repo(Path.cwd())
|
|
||||||
for remote in git.Remote.iter_items(git_repo):
|
for remote in git.Remote.iter_items(git_repo):
|
||||||
if remote.name == 'origin':
|
if remote.name == 'origin':
|
||||||
url = git_repo.remotes.origin.url
|
url = git_repo.remotes.origin.url
|
||||||
if url.startswith('https://git'): # github, gitlab
|
app = get_app_from_url(url)
|
||||||
app.SourceCode = url.rstrip('.git')
|
|
||||||
app.Repo = url
|
|
||||||
break
|
break
|
||||||
write_local_file = True
|
write_local_file = True
|
||||||
elif options.url:
|
elif options.url:
|
||||||
|
@ -294,25 +333,28 @@ def main():
|
||||||
git_repo = git.Repo(tmp_importer_dir)
|
git_repo = git.Repo(tmp_importer_dir)
|
||||||
|
|
||||||
if not options.omit_disable:
|
if not options.omit_disable:
|
||||||
build.disable = 'Generated by `fdroid import` - check version fields and commitid'
|
build.disable = (
|
||||||
|
'Generated by `fdroid import` - check version fields and commitid'
|
||||||
|
)
|
||||||
write_local_file = False
|
write_local_file = False
|
||||||
else:
|
else:
|
||||||
raise FDroidException("Specify project url.")
|
raise FDroidException("Specify project url.")
|
||||||
|
|
||||||
|
app.AutoUpdateMode = 'Version'
|
||||||
app.UpdateCheckMode = 'Tags'
|
app.UpdateCheckMode = 'Tags'
|
||||||
build.commit = common.get_head_commit_id(git_repo)
|
build.commit = common.get_head_commit_id(tmp_importer_dir)
|
||||||
|
|
||||||
# Extract some information...
|
# Extract some information...
|
||||||
paths = common.get_all_gradle_and_manifests(tmp_importer_dir)
|
paths = get_all_gradle_and_manifests(tmp_importer_dir)
|
||||||
subdir = common.get_gradle_subdir(tmp_importer_dir, paths)
|
gradle_subdir = get_gradle_subdir(tmp_importer_dir, paths)
|
||||||
if paths:
|
if paths:
|
||||||
versionName, versionCode, appid = common.parse_androidmanifests(paths, app)
|
versionName, versionCode, appid = common.parse_androidmanifests(paths, app)
|
||||||
if not appid:
|
if not appid:
|
||||||
raise FDroidException(_("Couldn't find Application ID"))
|
raise FDroidException(_("Couldn't find Application ID"))
|
||||||
if not versionName:
|
if not versionName:
|
||||||
logging.warning(_('Could not find latest version name'))
|
logging.warning(_('Could not find latest versionName'))
|
||||||
if not versionCode:
|
if not versionCode:
|
||||||
logging.warning(_('Could not find latest version code'))
|
logging.warning(_('Could not find latest versionCode'))
|
||||||
else:
|
else:
|
||||||
raise FDroidException(_("No gradle project could be found. Specify --subdir?"))
|
raise FDroidException(_("No gradle project could be found. Specify --subdir?"))
|
||||||
|
|
||||||
|
@ -322,16 +364,15 @@ def main():
|
||||||
|
|
||||||
# Create a build line...
|
# Create a build line...
|
||||||
build.versionName = versionName or 'Unknown'
|
build.versionName = versionName or 'Unknown'
|
||||||
|
app.CurrentVersion = build.versionName
|
||||||
build.versionCode = versionCode or 0
|
build.versionCode = versionCode or 0
|
||||||
|
app.CurrentVersionCode = build.versionCode
|
||||||
if options.subdir:
|
if options.subdir:
|
||||||
build.subdir = options.subdir
|
build.subdir = options.subdir
|
||||||
build.gradle = ['yes']
|
elif gradle_subdir:
|
||||||
elif subdir:
|
build.subdir = gradle_subdir.as_posix()
|
||||||
build.subdir = subdir.as_posix()
|
|
||||||
build.gradle = ['yes']
|
|
||||||
else:
|
|
||||||
# subdir might be None
|
# subdir might be None
|
||||||
subdir = Path()
|
subdir = Path(tmp_importer_dir / build.subdir) if build.subdir else tmp_importer_dir
|
||||||
|
|
||||||
if options.license:
|
if options.license:
|
||||||
app.License = options.license
|
app.License = options.license
|
||||||
|
@ -339,23 +380,23 @@ def main():
|
||||||
app.Categories = options.categories.split(',')
|
app.Categories = options.categories.split(',')
|
||||||
if (subdir / 'jni').exists():
|
if (subdir / 'jni').exists():
|
||||||
build.buildjni = ['yes']
|
build.buildjni = ['yes']
|
||||||
if (subdir / 'build.gradle').exists() or (subdir / 'build.gradle').exists():
|
if (subdir / 'build.gradle').exists() or (subdir / 'build.gradle.kts').exists():
|
||||||
build.gradle = ['yes']
|
build.gradle = ['yes']
|
||||||
|
|
||||||
|
app.AutoName = common.fetch_real_name(subdir, build.gradle)
|
||||||
|
|
||||||
package_json = tmp_importer_dir / 'package.json' # react-native
|
package_json = tmp_importer_dir / 'package.json' # react-native
|
||||||
pubspec_yaml = tmp_importer_dir / 'pubspec.yaml' # flutter
|
pubspec_yaml = tmp_importer_dir / 'pubspec.yaml' # flutter
|
||||||
if package_json.exists():
|
if package_json.exists():
|
||||||
build.sudo = [
|
build.sudo = [
|
||||||
'sysctl fs.inotify.max_user_watches=524288 || true',
|
'sysctl fs.inotify.max_user_watches=524288 || true',
|
||||||
'curl -Lo node.tar.gz https://nodejs.org/download/release/v19.3.0/node-v19.3.0-linux-x64.tar.gz',
|
'apt-get update',
|
||||||
'echo "b525028ae5bb71b5b32cb7fce903ccce261dbfef4c7dd0f3e0ffc27cd6fc0b3f node.tar.gz" | sha256sum -c -',
|
'apt-get install -y npm',
|
||||||
'tar xzf node.tar.gz --strip-components=1 -C /usr/local/',
|
|
||||||
'npm -g install yarn',
|
|
||||||
]
|
]
|
||||||
build.init = ['npm install --build-from-source']
|
build.init = ['npm install --build-from-source']
|
||||||
with package_json.open() as fp:
|
with package_json.open() as fp:
|
||||||
data = json.load(fp)
|
data = json.load(fp)
|
||||||
app.AutoName = data.get('name', app.AutoName)
|
app.AutoName = app.AutoName or data.get('name')
|
||||||
app.License = data.get('license', app.License)
|
app.License = data.get('license', app.License)
|
||||||
app.Description = data.get('description', app.Description)
|
app.Description = data.get('description', app.Description)
|
||||||
app.WebSite = data.get('homepage', app.WebSite)
|
app.WebSite = data.get('homepage', app.WebSite)
|
||||||
|
@ -365,11 +406,11 @@ def main():
|
||||||
if app_json.exists():
|
if app_json.exists():
|
||||||
with app_json.open() as fp:
|
with app_json.open() as fp:
|
||||||
data = json.load(fp)
|
data = json.load(fp)
|
||||||
app.AutoName = data.get('name', app.AutoName)
|
app.AutoName = app.AutoName or data.get('name')
|
||||||
if pubspec_yaml.exists():
|
if pubspec_yaml.exists():
|
||||||
with pubspec_yaml.open() as fp:
|
with pubspec_yaml.open() as fp:
|
||||||
data = yaml.load(fp, Loader=SafeLoader)
|
data = yaml.load(fp, Loader=SafeLoader)
|
||||||
app.AutoName = data.get('name', app.AutoName)
|
app.AutoName = app.AutoName or data.get('name')
|
||||||
app.License = data.get('license', app.License)
|
app.License = data.get('license', app.License)
|
||||||
app.Description = data.get('description', app.Description)
|
app.Description = data.get('description', app.Description)
|
||||||
app.UpdateCheckData = 'pubspec.yaml|version:\\s.+\\+(\\d+)|.|version:\\s(.+)\\+'
|
app.UpdateCheckData = 'pubspec.yaml|version:\\s.+\\+(\\d+)|.|version:\\s(.+)\\+'
|
||||||
|
@ -405,8 +446,11 @@ def main():
|
||||||
Path('build').mkdir(exist_ok=True)
|
Path('build').mkdir(exist_ok=True)
|
||||||
build_dir = Path('build') / appid
|
build_dir = Path('build') / appid
|
||||||
if build_dir.exists():
|
if build_dir.exists():
|
||||||
logging.warning(_('{path} already exists, ignoring import results!')
|
logging.warning(
|
||||||
.format(path=build_dir))
|
_('{path} already exists, ignoring import results!').format(
|
||||||
|
path=build_dir
|
||||||
|
)
|
||||||
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
elif tmp_importer_dir:
|
elif tmp_importer_dir:
|
||||||
# For Windows: Close the repo or a git.exe instance holds handles to repo
|
# For Windows: Close the repo or a git.exe instance holds handles to repo
|
||||||
|
|
|
@ -20,32 +20,49 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
"""Process the index files.
|
||||||
|
|
||||||
|
This module is loaded by all fdroid subcommands since it is loaded in
|
||||||
|
fdroidserver/__init__.py. Any narrowly used dependencies should be
|
||||||
|
imported where they are used to limit dependencies for subcommands
|
||||||
|
like publish/signindex/gpgsign. This eliminates the need to have
|
||||||
|
these installed on the signing server.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import calendar
|
||||||
import collections
|
import collections
|
||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import ruamel.yaml
|
|
||||||
import shutil
|
import shutil
|
||||||
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import zipfile
|
import zipfile
|
||||||
import calendar
|
|
||||||
import qrcode
|
|
||||||
from binascii import hexlify, unhexlify
|
from binascii import hexlify, unhexlify
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from xml.dom.minidom import Document
|
from xml.dom.minidom import Document
|
||||||
|
|
||||||
from . import _
|
from fdroidserver._yaml import yaml
|
||||||
from . import common
|
from fdroidserver.common import (
|
||||||
from . import metadata
|
ANTIFEATURES_CONFIG_NAME,
|
||||||
from . import net
|
CATEGORIES_CONFIG_NAME,
|
||||||
from . import signindex
|
CONFIG_CONFIG_NAME,
|
||||||
from fdroidserver.common import ANTIFEATURES_CONFIG_NAME, CATEGORIES_CONFIG_NAME, CONFIG_CONFIG_NAME, MIRRORS_CONFIG_NAME, RELEASECHANNELS_CONFIG_NAME, DEFAULT_LOCALE, FDroidPopen, FDroidPopenBytes, load_stats_fdroid_signing_key_fingerprints
|
DEFAULT_LOCALE,
|
||||||
|
MIRRORS_CONFIG_NAME,
|
||||||
|
RELEASECHANNELS_CONFIG_NAME,
|
||||||
|
FDroidPopen,
|
||||||
|
FDroidPopenBytes,
|
||||||
|
load_publish_signer_fingerprints,
|
||||||
|
)
|
||||||
from fdroidserver.exception import FDroidException, VerificationException
|
from fdroidserver.exception import FDroidException, VerificationException
|
||||||
|
|
||||||
|
from . import _, common, metadata, signindex
|
||||||
|
|
||||||
|
|
||||||
def make(apps, apks, repodir, archive):
|
def make(apps, apks, repodir, archive):
|
||||||
"""Generate the repo index files.
|
"""Generate the repo index files.
|
||||||
|
@ -77,7 +94,7 @@ def make(apps, apks, repodir, archive):
|
||||||
sortedapps[appid] = apps[appid]
|
sortedapps[appid] = apps[appid]
|
||||||
|
|
||||||
repodict = collections.OrderedDict()
|
repodict = collections.OrderedDict()
|
||||||
repodict['timestamp'] = datetime.utcnow().replace(tzinfo=timezone.utc)
|
repodict['timestamp'] = datetime.now(timezone.utc)
|
||||||
repodict['version'] = METADATA_VERSION
|
repodict['version'] = METADATA_VERSION
|
||||||
|
|
||||||
if common.config['repo_maxage'] != 0:
|
if common.config['repo_maxage'] != 0:
|
||||||
|
@ -116,14 +133,13 @@ def make(apps, apks, repodir, archive):
|
||||||
raise TypeError(_('only accepts strings, lists, and tuples'))
|
raise TypeError(_('only accepts strings, lists, and tuples'))
|
||||||
requestsdict[command] = packageNames
|
requestsdict[command] = packageNames
|
||||||
|
|
||||||
fdroid_signing_key_fingerprints = load_stats_fdroid_signing_key_fingerprints()
|
signer_fingerprints = load_publish_signer_fingerprints()
|
||||||
|
|
||||||
make_v0(sortedapps, apks, repodir, repodict, requestsdict,
|
make_v0(sortedapps, apks, repodir, repodict, requestsdict, signer_fingerprints)
|
||||||
fdroid_signing_key_fingerprints)
|
make_v1(sortedapps, apks, repodir, repodict, requestsdict, signer_fingerprints)
|
||||||
make_v1(sortedapps, apks, repodir, repodict, requestsdict,
|
make_v2(
|
||||||
fdroid_signing_key_fingerprints)
|
sortedapps, apks, repodir, repodict, requestsdict, signer_fingerprints, archive
|
||||||
make_v2(sortedapps, apks, repodir, repodict, requestsdict,
|
)
|
||||||
fdroid_signing_key_fingerprints, archive)
|
|
||||||
make_website(sortedapps, repodir, repodict)
|
make_website(sortedapps, repodir, repodict)
|
||||||
make_altstore(
|
make_altstore(
|
||||||
sortedapps,
|
sortedapps,
|
||||||
|
@ -144,23 +160,25 @@ def _should_file_be_generated(path, magic_string):
|
||||||
|
|
||||||
|
|
||||||
def make_website(apps, repodir, repodict):
|
def make_website(apps, repodir, repodict):
|
||||||
_ignored, repo_pubkey_fingerprint = extract_pubkey()
|
|
||||||
repo_pubkey_fingerprint_stripped = repo_pubkey_fingerprint.replace(" ", "")
|
|
||||||
link = repodict["address"]
|
|
||||||
link_fingerprinted = ('{link}?fingerprint={fingerprint}'
|
|
||||||
.format(link=link, fingerprint=repo_pubkey_fingerprint_stripped))
|
|
||||||
# do not change this string, as it will break updates for files with older versions of this string
|
# do not change this string, as it will break updates for files with older versions of this string
|
||||||
autogenerate_comment = "auto-generated - fdroid index updates will overwrite this file"
|
autogenerate_comment = "auto-generated - fdroid index updates will overwrite this file"
|
||||||
|
|
||||||
if not os.path.exists(repodir):
|
if not os.path.exists(repodir):
|
||||||
os.makedirs(repodir)
|
os.makedirs(repodir)
|
||||||
|
|
||||||
qrcode.make(link_fingerprinted).save(os.path.join(repodir, "index.png"))
|
|
||||||
|
|
||||||
html_name = 'index.html'
|
html_name = 'index.html'
|
||||||
html_file = os.path.join(repodir, html_name)
|
html_file = os.path.join(repodir, html_name)
|
||||||
|
|
||||||
if _should_file_be_generated(html_file, autogenerate_comment):
|
if _should_file_be_generated(html_file, autogenerate_comment):
|
||||||
|
import qrcode
|
||||||
|
|
||||||
|
_ignored, repo_pubkey_fingerprint = extract_pubkey()
|
||||||
|
repo_pubkey_fingerprint_stripped = repo_pubkey_fingerprint.replace(" ", "")
|
||||||
|
link = repodict["address"]
|
||||||
|
link_fingerprinted = '{link}?fingerprint={fingerprint}'.format(
|
||||||
|
link=link, fingerprint=repo_pubkey_fingerprint_stripped
|
||||||
|
)
|
||||||
|
qrcode.make(link_fingerprinted).save(os.path.join(repodir, "index.png"))
|
||||||
with open(html_file, 'w') as f:
|
with open(html_file, 'w') as f:
|
||||||
name = repodict["name"]
|
name = repodict["name"]
|
||||||
description = repodict["description"]
|
description = repodict["description"]
|
||||||
|
@ -509,7 +527,6 @@ def package_metadata(app, repodir):
|
||||||
"AuthorPhone",
|
"AuthorPhone",
|
||||||
"AuthorWebSite",
|
"AuthorWebSite",
|
||||||
"Bitcoin",
|
"Bitcoin",
|
||||||
"FlattrID",
|
|
||||||
"Liberapay",
|
"Liberapay",
|
||||||
"Litecoin",
|
"Litecoin",
|
||||||
"OpenCollective",
|
"OpenCollective",
|
||||||
|
@ -578,7 +595,10 @@ def convert_version(version, app, repodir):
|
||||||
ver["file"]["ipfsCIDv1"] = ipfsCIDv1
|
ver["file"]["ipfsCIDv1"] = ipfsCIDv1
|
||||||
|
|
||||||
if "srcname" in version:
|
if "srcname" in version:
|
||||||
ver["src"] = common.file_entry(os.path.join(repodir, version["srcname"]))
|
ver["src"] = common.file_entry(
|
||||||
|
os.path.join(repodir, version["srcname"]),
|
||||||
|
version["srcnameSha256"],
|
||||||
|
)
|
||||||
|
|
||||||
if "obbMainFile" in version:
|
if "obbMainFile" in version:
|
||||||
ver["obbMainFile"] = common.file_entry(
|
ver["obbMainFile"] = common.file_entry(
|
||||||
|
@ -674,9 +694,13 @@ def v2_repo(repodict, repodir, archive):
|
||||||
|
|
||||||
config = common.load_localized_config(CONFIG_CONFIG_NAME, repodir)
|
config = common.load_localized_config(CONFIG_CONFIG_NAME, repodir)
|
||||||
if config:
|
if config:
|
||||||
repo["name"] = config["archive" if archive else "repo"]["name"]
|
localized_config = config["archive" if archive else "repo"]
|
||||||
repo["description"] = config["archive" if archive else "repo"]["description"]
|
if "name" in localized_config:
|
||||||
repo["icon"] = config["archive" if archive else "repo"]["icon"]
|
repo["name"] = localized_config["name"]
|
||||||
|
if "description" in localized_config:
|
||||||
|
repo["description"] = localized_config["description"]
|
||||||
|
if "icon" in localized_config:
|
||||||
|
repo["icon"] = localized_config["icon"]
|
||||||
|
|
||||||
repo["address"] = repodict["address"]
|
repo["address"] = repodict["address"]
|
||||||
if "mirrors" in repodict:
|
if "mirrors" in repodict:
|
||||||
|
@ -701,7 +725,7 @@ def v2_repo(repodict, repodir, archive):
|
||||||
return repo
|
return repo
|
||||||
|
|
||||||
|
|
||||||
def make_v2(apps, packages, repodir, repodict, requestsdict, fdroid_signing_key_fingerprints, archive):
|
def make_v2(apps, packages, repodir, repodict, requestsdict, signer_fingerprints, archive):
|
||||||
|
|
||||||
def _index_encoder_default(obj):
|
def _index_encoder_default(obj):
|
||||||
if isinstance(obj, set):
|
if isinstance(obj, set):
|
||||||
|
@ -723,7 +747,7 @@ def make_v2(apps, packages, repodir, repodict, requestsdict, fdroid_signing_key_
|
||||||
output["repo"]["requests"] = requestsdict
|
output["repo"]["requests"] = requestsdict
|
||||||
|
|
||||||
# establish sort order of the index
|
# establish sort order of the index
|
||||||
v1_sort_packages(packages, fdroid_signing_key_fingerprints)
|
sort_package_versions(packages, signer_fingerprints)
|
||||||
|
|
||||||
output_packages = collections.OrderedDict()
|
output_packages = collections.OrderedDict()
|
||||||
output['packages'] = output_packages
|
output['packages'] = output_packages
|
||||||
|
@ -762,7 +786,9 @@ def make_v2(apps, packages, repodir, repodict, requestsdict, fdroid_signing_key_
|
||||||
# include definitions for "auto-defined" categories, e.g. just used in app metadata
|
# include definitions for "auto-defined" categories, e.g. just used in app metadata
|
||||||
for category in sorted(categories_used_by_apps):
|
for category in sorted(categories_used_by_apps):
|
||||||
if category not in output['repo'][CATEGORIES_CONFIG_NAME]:
|
if category not in output['repo'][CATEGORIES_CONFIG_NAME]:
|
||||||
output['repo'][CATEGORIES_CONFIG_NAME][category] = {"name": {DEFAULT_LOCALE: category}}
|
output['repo'][CATEGORIES_CONFIG_NAME][category] = dict()
|
||||||
|
if 'name' not in output['repo'][CATEGORIES_CONFIG_NAME][category]:
|
||||||
|
output['repo'][CATEGORIES_CONFIG_NAME][category]['name'] = {DEFAULT_LOCALE: category}
|
||||||
# do not include defined categories if no apps use them
|
# do not include defined categories if no apps use them
|
||||||
for category in list(output['repo'].get(CATEGORIES_CONFIG_NAME, list())):
|
for category in list(output['repo'].get(CATEGORIES_CONFIG_NAME, list())):
|
||||||
if category not in categories_used_by_apps:
|
if category not in categories_used_by_apps:
|
||||||
|
@ -838,7 +864,7 @@ def make_v2(apps, packages, repodir, repodict, requestsdict, fdroid_signing_key_
|
||||||
signindex.sign_index(repodir, json_name)
|
signindex.sign_index(repodir, json_name)
|
||||||
|
|
||||||
|
|
||||||
def make_v1(apps, packages, repodir, repodict, requestsdict, fdroid_signing_key_fingerprints):
|
def make_v1(apps, packages, repodir, repodict, requestsdict, signer_fingerprints):
|
||||||
|
|
||||||
def _index_encoder_default(obj):
|
def _index_encoder_default(obj):
|
||||||
if isinstance(obj, set):
|
if isinstance(obj, set):
|
||||||
|
@ -868,7 +894,7 @@ def make_v1(apps, packages, repodir, repodict, requestsdict, fdroid_signing_key_
|
||||||
output['repo']['mirrors'] = mirrors
|
output['repo']['mirrors'] = mirrors
|
||||||
|
|
||||||
# establish sort order of the index
|
# establish sort order of the index
|
||||||
v1_sort_packages(packages, fdroid_signing_key_fingerprints)
|
sort_package_versions(packages, signer_fingerprints)
|
||||||
|
|
||||||
appslist = []
|
appslist = []
|
||||||
output['apps'] = appslist
|
output['apps'] = appslist
|
||||||
|
@ -941,7 +967,7 @@ def make_v1(apps, packages, repodir, repodict, requestsdict, fdroid_signing_key_
|
||||||
for k, v in sorted(package.items()):
|
for k, v in sorted(package.items()):
|
||||||
if not v:
|
if not v:
|
||||||
continue
|
continue
|
||||||
if k in ('icon', 'icons', 'icons_src', 'ipfsCIDv1', 'name'):
|
if k in ('icon', 'icons', 'icons_src', 'ipfsCIDv1', 'name', 'srcnameSha256'):
|
||||||
continue
|
continue
|
||||||
if k == 'antiFeatures':
|
if k == 'antiFeatures':
|
||||||
d[k] = sorted(v.keys())
|
d[k] = sorted(v.keys())
|
||||||
|
@ -976,8 +1002,8 @@ def _copy_to_local_copy_dir(repodir, f):
|
||||||
.format(path=local_copy_dir))
|
.format(path=local_copy_dir))
|
||||||
|
|
||||||
|
|
||||||
def v1_sort_packages(packages, fdroid_signing_key_fingerprints):
|
def sort_package_versions(packages, signer_fingerprints):
|
||||||
"""Sort the supplied list to ensure a deterministic sort order for package entries in the index file.
|
"""Sort to ensure a deterministic order for package versions in the index file.
|
||||||
|
|
||||||
This sort-order also expresses
|
This sort-order also expresses
|
||||||
installation preference to the clients.
|
installation preference to the clients.
|
||||||
|
@ -1002,7 +1028,7 @@ def v1_sort_packages(packages, fdroid_signing_key_fingerprints):
|
||||||
if dev_signer and dev_signer == signer:
|
if dev_signer and dev_signer == signer:
|
||||||
group = GROUP_DEV_SIGNED
|
group = GROUP_DEV_SIGNED
|
||||||
else:
|
else:
|
||||||
fdroid_signer = fdroid_signing_key_fingerprints.get(packageName, {}).get('signer')
|
fdroid_signer = signer_fingerprints.get(packageName, {}).get('signer')
|
||||||
if fdroid_signer and fdroid_signer == signer:
|
if fdroid_signer and fdroid_signer == signer:
|
||||||
group = GROUP_FDROID_SIGNED
|
group = GROUP_FDROID_SIGNED
|
||||||
|
|
||||||
|
@ -1015,7 +1041,7 @@ def v1_sort_packages(packages, fdroid_signing_key_fingerprints):
|
||||||
packages.sort(key=v1_sort_keys)
|
packages.sort(key=v1_sort_keys)
|
||||||
|
|
||||||
|
|
||||||
def make_v0(apps, apks, repodir, repodict, requestsdict, fdroid_signing_key_fingerprints):
|
def make_v0(apps, apks, repodir, repodict, requestsdict, signer_fingerprints):
|
||||||
"""Aka index.jar aka index.xml."""
|
"""Aka index.jar aka index.xml."""
|
||||||
doc = Document()
|
doc = Document()
|
||||||
|
|
||||||
|
@ -1114,7 +1140,7 @@ def make_v0(apps, apks, repodir, repodict, requestsdict, fdroid_signing_key_fing
|
||||||
if name_from_apk is None:
|
if name_from_apk is None:
|
||||||
name_from_apk = apk.get('name')
|
name_from_apk = apk.get('name')
|
||||||
for versionCode, apksforver in apksbyversion.items():
|
for versionCode, apksforver in apksbyversion.items():
|
||||||
fdroid_signer = fdroid_signing_key_fingerprints.get(appid, {}).get('signer')
|
fdroid_signer = signer_fingerprints.get(appid, {}).get('signer')
|
||||||
fdroid_signed_apk = None
|
fdroid_signed_apk = None
|
||||||
name_match_apk = None
|
name_match_apk = None
|
||||||
for x in apksforver:
|
for x in apksforver:
|
||||||
|
@ -1169,7 +1195,6 @@ def make_v0(apps, apks, repodir, repodict, requestsdict, fdroid_signing_key_fing
|
||||||
addElementNonEmpty('donate', app.Donate, doc, apel)
|
addElementNonEmpty('donate', app.Donate, doc, apel)
|
||||||
addElementNonEmpty('bitcoin', app.Bitcoin, doc, apel)
|
addElementNonEmpty('bitcoin', app.Bitcoin, doc, apel)
|
||||||
addElementNonEmpty('litecoin', app.Litecoin, doc, apel)
|
addElementNonEmpty('litecoin', app.Litecoin, doc, apel)
|
||||||
addElementNonEmpty('flattr', app.FlattrID, doc, apel)
|
|
||||||
addElementNonEmpty('openCollective', app.OpenCollective, doc, apel)
|
addElementNonEmpty('openCollective', app.OpenCollective, doc, apel)
|
||||||
|
|
||||||
# These elements actually refer to the current version (i.e. which
|
# These elements actually refer to the current version (i.e. which
|
||||||
|
@ -1312,6 +1337,29 @@ def make_v0(apps, apks, repodir, repodict, requestsdict, fdroid_signing_key_fing
|
||||||
os.remove(siglinkname)
|
os.remove(siglinkname)
|
||||||
os.symlink(sigfile_path, siglinkname)
|
os.symlink(sigfile_path, siglinkname)
|
||||||
|
|
||||||
|
if sys.version_info.minor >= 13:
|
||||||
|
# Python 3.13 changed minidom so it no longer converts " to an XML entity.
|
||||||
|
# https://github.com/python/cpython/commit/154477be722ae5c4e18d22d0860e284006b09c4f
|
||||||
|
# This just puts back the previous implementation, with black code format.
|
||||||
|
import inspect
|
||||||
|
import xml.dom.minidom
|
||||||
|
|
||||||
|
def _write_data(writer, text, attr): # pylint: disable=unused-argument
|
||||||
|
if text:
|
||||||
|
text = (
|
||||||
|
text.replace('&', '&')
|
||||||
|
.replace('<', '<')
|
||||||
|
.replace('"', '"')
|
||||||
|
.replace('>', '>')
|
||||||
|
)
|
||||||
|
writer.write(text)
|
||||||
|
|
||||||
|
argnames = tuple(inspect.signature(xml.dom.minidom._write_data).parameters)
|
||||||
|
if argnames == ('writer', 'text', 'attr'):
|
||||||
|
xml.dom.minidom._write_data = _write_data
|
||||||
|
else:
|
||||||
|
logging.warning('Failed to monkey patch minidom for index.xml support!')
|
||||||
|
|
||||||
if common.options.pretty:
|
if common.options.pretty:
|
||||||
output = doc.toprettyxml(encoding='utf-8')
|
output = doc.toprettyxml(encoding='utf-8')
|
||||||
else:
|
else:
|
||||||
|
@ -1357,7 +1405,15 @@ def make_v0(apps, apks, repodir, repodict, requestsdict, fdroid_signing_key_fing
|
||||||
% repo_icon)
|
% repo_icon)
|
||||||
os.makedirs(os.path.dirname(iconfilename), exist_ok=True)
|
os.makedirs(os.path.dirname(iconfilename), exist_ok=True)
|
||||||
try:
|
try:
|
||||||
|
import qrcode
|
||||||
|
|
||||||
qrcode.make(common.config['repo_url']).save(iconfilename)
|
qrcode.make(common.config['repo_url']).save(iconfilename)
|
||||||
|
except ModuleNotFoundError as e:
|
||||||
|
raise ModuleNotFoundError(
|
||||||
|
_(
|
||||||
|
'The "qrcode" Python package is not installed (e.g. apt-get install python3-qrcode)!'
|
||||||
|
)
|
||||||
|
) from e
|
||||||
except Exception:
|
except Exception:
|
||||||
exampleicon = os.path.join(common.get_examples_dir(),
|
exampleicon = os.path.join(common.get_examples_dir(),
|
||||||
common.default_config['repo_icon'])
|
common.default_config['repo_icon'])
|
||||||
|
@ -1424,7 +1480,7 @@ def add_mirrors_to_repodict(repo_section, repodict):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
with mirrors_yml.open() as fp:
|
with mirrors_yml.open() as fp:
|
||||||
mirrors_config = ruamel.yaml.YAML(typ='safe').load(fp)
|
mirrors_config = yaml.load(fp)
|
||||||
if not isinstance(mirrors_config, list):
|
if not isinstance(mirrors_config, list):
|
||||||
msg = _('{path} is not list, but a {datatype}!')
|
msg = _('{path} is not list, but a {datatype}!')
|
||||||
raise TypeError(
|
raise TypeError(
|
||||||
|
@ -1478,6 +1534,7 @@ def add_mirrors_to_repodict(repo_section, repodict):
|
||||||
repodict['mirrors'] = []
|
repodict['mirrors'] = []
|
||||||
canonical_url = repodict['address']
|
canonical_url = repodict['address']
|
||||||
found_primary = False
|
found_primary = False
|
||||||
|
errors = 0
|
||||||
for mirror in mirrors:
|
for mirror in mirrors:
|
||||||
if canonical_url == mirror['url']:
|
if canonical_url == mirror['url']:
|
||||||
found_primary = True
|
found_primary = True
|
||||||
|
@ -1486,9 +1543,19 @@ def add_mirrors_to_repodict(repo_section, repodict):
|
||||||
for k in sorted(mirror.keys()):
|
for k in sorted(mirror.keys()):
|
||||||
sortedmirror[k] = mirror[k]
|
sortedmirror[k] = mirror[k]
|
||||||
repodict['mirrors'].insert(0, sortedmirror)
|
repodict['mirrors'].insert(0, sortedmirror)
|
||||||
|
elif mirror.get('isPrimary'):
|
||||||
|
errors += 1
|
||||||
|
logging.error(
|
||||||
|
_('Mirror config for {url} contains "isPrimary" key!').format(
|
||||||
|
url=mirror['url']
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
repodict['mirrors'].append(mirror)
|
repodict['mirrors'].append(mirror)
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
raise FDroidException(_('"isPrimary" key should not be added to mirrors!'))
|
||||||
|
|
||||||
if repodict['mirrors'] and not found_primary:
|
if repodict['mirrors'] and not found_primary:
|
||||||
repodict['mirrors'].insert(0, {'isPrimary': True, 'url': repodict['address']})
|
repodict['mirrors'].insert(0, {'isPrimary': True, 'url': repodict['address']})
|
||||||
|
|
||||||
|
@ -1603,6 +1670,8 @@ def download_repo_index_v1(url_str, etag=None, verify_fingerprint=True, timeout=
|
||||||
- The new eTag as returned by the HTTP request
|
- The new eTag as returned by the HTTP request
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from . import net
|
||||||
|
|
||||||
url = urllib.parse.urlsplit(url_str)
|
url = urllib.parse.urlsplit(url_str)
|
||||||
|
|
||||||
fingerprint = None
|
fingerprint = None
|
||||||
|
@ -1635,7 +1704,7 @@ def download_repo_index_v1(url_str, etag=None, verify_fingerprint=True, timeout=
|
||||||
return index, new_etag
|
return index, new_etag
|
||||||
|
|
||||||
|
|
||||||
def download_repo_index_v2(url_str, etag=None, verify_fingerprint=True, timeout=600):
|
def download_repo_index_v2(url_str, etag=None, verify_fingerprint=True, timeout=None):
|
||||||
"""Download and verifies index v2 file, then returns its data.
|
"""Download and verifies index v2 file, then returns its data.
|
||||||
|
|
||||||
Downloads the repository index from the given :param url_str and
|
Downloads the repository index from the given :param url_str and
|
||||||
|
@ -1654,8 +1723,15 @@ def download_repo_index_v2(url_str, etag=None, verify_fingerprint=True, timeout=
|
||||||
- The new eTag as returned by the HTTP request
|
- The new eTag as returned by the HTTP request
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from . import net
|
||||||
|
|
||||||
|
etag # etag is unused but needs to be there to keep the same API as the earlier functions.
|
||||||
|
|
||||||
url = urllib.parse.urlsplit(url_str)
|
url = urllib.parse.urlsplit(url_str)
|
||||||
|
|
||||||
|
if timeout is not None:
|
||||||
|
logging.warning('"timeout" argument of download_repo_index_v2() is deprecated!')
|
||||||
|
|
||||||
fingerprint = None
|
fingerprint = None
|
||||||
if verify_fingerprint:
|
if verify_fingerprint:
|
||||||
query = urllib.parse.parse_qs(url.query)
|
query = urllib.parse.parse_qs(url.query)
|
||||||
|
@ -1667,29 +1743,22 @@ def download_repo_index_v2(url_str, etag=None, verify_fingerprint=True, timeout=
|
||||||
path = url.path.rsplit('/', 1)[0]
|
path = url.path.rsplit('/', 1)[0]
|
||||||
else:
|
else:
|
||||||
path = url.path.rstrip('/')
|
path = url.path.rstrip('/')
|
||||||
|
url = urllib.parse.SplitResult(url.scheme, url.netloc, path, '', '')
|
||||||
|
|
||||||
url = urllib.parse.SplitResult(url.scheme, url.netloc, path + '/entry.jar', '', '')
|
mirrors = common.get_mirrors(url, 'entry.jar')
|
||||||
download, new_etag = net.http_get(url.geturl(), etag, timeout)
|
f = net.download_using_mirrors(mirrors)
|
||||||
|
entry, public_key, fingerprint = get_index_from_jar(f, fingerprint)
|
||||||
|
|
||||||
if download is None:
|
|
||||||
return None, new_etag
|
|
||||||
|
|
||||||
# jarsigner is used to verify the JAR, it requires a file for input
|
|
||||||
with tempfile.TemporaryDirectory() as dirname:
|
|
||||||
with (Path(dirname) / 'entry.jar').open('wb') as fp:
|
|
||||||
fp.write(download)
|
|
||||||
fp.flush()
|
|
||||||
entry, public_key, fingerprint = get_index_from_jar(fp.name, fingerprint)
|
|
||||||
|
|
||||||
name = entry['index']['name']
|
|
||||||
sha256 = entry['index']['sha256']
|
sha256 = entry['index']['sha256']
|
||||||
url = urllib.parse.SplitResult(url.scheme, url.netloc, path + name, '', '')
|
mirrors = common.get_mirrors(url, entry['index']['name'][1:])
|
||||||
index, _ignored = net.http_get(url.geturl(), None, timeout)
|
f = net.download_using_mirrors(mirrors)
|
||||||
|
with open(f, 'rb') as fp:
|
||||||
|
index = fp.read()
|
||||||
if sha256 != hashlib.sha256(index).hexdigest():
|
if sha256 != hashlib.sha256(index).hexdigest():
|
||||||
raise VerificationException(
|
raise VerificationException(
|
||||||
_("SHA-256 of {url} does not match entry!").format(url=url)
|
_("SHA-256 of {url} does not match entry!").format(url=url)
|
||||||
)
|
)
|
||||||
return json.loads(index), new_etag
|
return json.loads(index), None
|
||||||
|
|
||||||
|
|
||||||
def get_index_from_jar(jarfile, fingerprint=None, allow_deprecated=False):
|
def get_index_from_jar(jarfile, fingerprint=None, allow_deprecated=False):
|
||||||
|
|
|
@ -19,16 +19,15 @@
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import glob
|
import glob
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import socket
|
import socket
|
||||||
import sys
|
import sys
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import logging
|
|
||||||
|
|
||||||
from . import _
|
from . import _, common
|
||||||
from . import common
|
|
||||||
from .exception import FDroidException
|
from .exception import FDroidException
|
||||||
|
|
||||||
config = {}
|
config = {}
|
||||||
|
@ -38,13 +37,13 @@ def disable_in_config(key, value):
|
||||||
"""Write a key/value to the local config.yml, then comment it out."""
|
"""Write a key/value to the local config.yml, then comment it out."""
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
with open('config.yml') as f:
|
with open(common.CONFIG_FILE) as fp:
|
||||||
data = f.read()
|
data = fp.read()
|
||||||
pattern = r'\n[\s#]*' + key + r':.*'
|
pattern = r'\n[\s#]*' + key + r':.*'
|
||||||
repl = '\n#' + yaml.dump({key: value}, default_flow_style=False)
|
repl = '\n#' + yaml.dump({key: value}, default_flow_style=False)
|
||||||
data = re.sub(pattern, repl, data)
|
data = re.sub(pattern, repl, data)
|
||||||
with open('config.yml', 'w') as f:
|
with open(common.CONFIG_FILE, 'w') as fp:
|
||||||
f.writelines(data)
|
fp.writelines(data)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
@ -82,7 +81,7 @@ def main():
|
||||||
)
|
)
|
||||||
options = common.parse_args(parser)
|
options = common.parse_args(parser)
|
||||||
|
|
||||||
common.set_console_logging(options.verbose)
|
common.set_console_logging(options.verbose, options.color)
|
||||||
|
|
||||||
fdroiddir = os.getcwd()
|
fdroiddir = os.getcwd()
|
||||||
test_config = dict()
|
test_config = dict()
|
||||||
|
@ -138,24 +137,24 @@ def main():
|
||||||
_("Android SDK not found at {path}!").format(path=test_config['sdk_path'])
|
_("Android SDK not found at {path}!").format(path=test_config['sdk_path'])
|
||||||
)
|
)
|
||||||
|
|
||||||
if not os.path.exists('config.yml') and not os.path.exists('config.py'):
|
if not os.path.exists(common.CONFIG_FILE):
|
||||||
# 'metadata' and 'tmp' are created in fdroid
|
# 'metadata' and 'tmp' are created in fdroid
|
||||||
if not os.path.exists('repo'):
|
if not os.path.exists('repo'):
|
||||||
os.mkdir('repo')
|
os.mkdir('repo')
|
||||||
example_config_yml = os.path.join(examplesdir, 'config.yml')
|
example_config_yml = os.path.join(examplesdir, common.CONFIG_FILE)
|
||||||
if os.path.exists(example_config_yml):
|
if os.path.exists(example_config_yml):
|
||||||
shutil.copyfile(example_config_yml, 'config.yml')
|
shutil.copyfile(example_config_yml, common.CONFIG_FILE)
|
||||||
else:
|
else:
|
||||||
from pkg_resources import get_distribution
|
from pkg_resources import get_distribution
|
||||||
|
|
||||||
versionstr = get_distribution('fdroidserver').version
|
versionstr = get_distribution('fdroidserver').version
|
||||||
if not versionstr:
|
if not versionstr:
|
||||||
versionstr = 'master'
|
versionstr = 'master'
|
||||||
with open('config.yml', 'w') as fp:
|
with open(common.CONFIG_FILE, 'w') as fp:
|
||||||
fp.write('# see https://gitlab.com/fdroid/fdroidserver/blob/')
|
fp.write('# see https://gitlab.com/fdroid/fdroidserver/blob/')
|
||||||
fp.write(versionstr)
|
fp.write(versionstr)
|
||||||
fp.write('/examples/config.yml\n')
|
fp.write(f'/examples/{common.CONFIG_FILE}\n')
|
||||||
os.chmod('config.yml', 0o0600)
|
os.chmod(common.CONFIG_FILE, 0o0600)
|
||||||
# If android_home is None, test_config['sdk_path'] will be used and
|
# If android_home is None, test_config['sdk_path'] will be used and
|
||||||
# "$ANDROID_HOME" may be used if the env var is set up correctly.
|
# "$ANDROID_HOME" may be used if the env var is set up correctly.
|
||||||
# If android_home is not None, the path given from the command line
|
# If android_home is not None, the path given from the command line
|
||||||
|
|
|
@ -17,35 +17,298 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
import glob
|
import glob
|
||||||
from argparse import ArgumentParser
|
import locale
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import termios
|
||||||
|
import tty
|
||||||
|
from argparse import ArgumentParser, BooleanOptionalAction
|
||||||
|
from pathlib import Path
|
||||||
|
from urllib.parse import urlencode, urlparse, urlunparse
|
||||||
|
|
||||||
from . import _
|
import defusedxml.ElementTree as XMLElementTree
|
||||||
from . import common
|
|
||||||
from .common import SdkToolsPopen
|
from . import _, common, github, index, net
|
||||||
from .exception import FDroidException
|
from .exception import FDroidException
|
||||||
|
|
||||||
config = None
|
DEFAULT_IPFS_GATEWAYS = ("https://gateway.ipfs.io/ipfs/",)
|
||||||
|
MAVEN_CENTRAL_MIRRORS = [
|
||||||
|
{
|
||||||
|
"url": "https://repo1.maven.org/maven2/",
|
||||||
|
"dnsA": ["199.232.16.209"],
|
||||||
|
"worksWithoutSNI": True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url": "https://repo.maven.apache.org/maven2/",
|
||||||
|
"dnsA": ["199.232.16.215"],
|
||||||
|
"worksWithoutSNI": True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url": "https://maven-central-asia.storage-download.googleapis.com/maven2/",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url": "https://maven-central-eu.storage-download.googleapis.com/maven2/",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url": "https://maven-central.storage-download.googleapis.com/maven2/",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
def download_apk(appid='org.fdroid.fdroid', privacy_mode=False):
|
||||||
|
"""Download an APK from F-Droid via the first mirror that works."""
|
||||||
|
url = urlunparse(
|
||||||
|
urlparse(common.FDROIDORG_MIRRORS[0]['url'])._replace(
|
||||||
|
query=urlencode({'fingerprint': common.FDROIDORG_FINGERPRINT})
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
data, _ignored = index.download_repo_index_v2(url)
|
||||||
|
app = data.get('packages', dict()).get(appid)
|
||||||
|
preferred_version = None
|
||||||
|
for version in app['versions'].values():
|
||||||
|
if not preferred_version:
|
||||||
|
# if all else fails, use the first one
|
||||||
|
preferred_version = version
|
||||||
|
if not version.get('releaseChannels'):
|
||||||
|
# prefer APK in default release channel
|
||||||
|
preferred_version = version
|
||||||
|
break
|
||||||
|
|
||||||
|
mirrors = common.append_filename_to_mirrors(
|
||||||
|
preferred_version['file']['name'][1:], common.FDROIDORG_MIRRORS
|
||||||
|
)
|
||||||
|
ipfsCIDv1 = preferred_version['file'].get('ipfsCIDv1')
|
||||||
|
if ipfsCIDv1:
|
||||||
|
for gateway in DEFAULT_IPFS_GATEWAYS:
|
||||||
|
mirrors.append({'url': os.path.join(gateway, ipfsCIDv1)})
|
||||||
|
f = net.download_using_mirrors(mirrors)
|
||||||
|
if f and os.path.exists(f):
|
||||||
|
versionCode = preferred_version['manifest']['versionCode']
|
||||||
|
f = Path(f)
|
||||||
|
return str(f.rename(f.with_stem(f'{appid}_{versionCode}')).resolve())
|
||||||
|
|
||||||
|
|
||||||
|
def download_fdroid_apk(privacy_mode=False): # pylint: disable=unused-argument
|
||||||
|
"""Directly download the current F-Droid APK and verify it.
|
||||||
|
|
||||||
|
This downloads the "download button" link, which is the version
|
||||||
|
that is best tested for new installs.
|
||||||
|
|
||||||
|
"""
|
||||||
|
mirror = common.FDROIDORG_MIRRORS[0]
|
||||||
|
mirror['url'] = urlunparse(urlparse(mirror['url'])._replace(path='F-Droid.apk'))
|
||||||
|
return net.download_using_mirrors([mirror])
|
||||||
|
|
||||||
|
|
||||||
|
def download_fdroid_apk_from_github(privacy_mode=False):
|
||||||
|
"""Download F-Droid.apk from F-Droid's GitHub Releases."""
|
||||||
|
if common.config and not privacy_mode:
|
||||||
|
token = common.config.get('github_token')
|
||||||
|
else:
|
||||||
|
token = None
|
||||||
|
gh = github.GithubApi(token, 'https://github.com/f-droid/fdroidclient')
|
||||||
|
latest_apk = gh.get_latest_apk()
|
||||||
|
filename = os.path.basename(latest_apk)
|
||||||
|
return net.download_file(latest_apk, os.path.join(common.get_cachedir(), filename))
|
||||||
|
|
||||||
|
|
||||||
|
def download_fdroid_apk_from_ipns(privacy_mode=False):
|
||||||
|
"""Download the F-Droid APK from an IPNS repo."""
|
||||||
|
cid = 'k51qzi5uqu5dl4hbcksbdmplanu9n4hivnqsupqe6vzve1pdbeh418ssptldd3'
|
||||||
|
mirrors = [
|
||||||
|
{"url": f"https://ipfs.io/ipns/{cid}/F-Droid.apk"},
|
||||||
|
]
|
||||||
|
if not privacy_mode:
|
||||||
|
mirrors.append({"url": f"https://{cid}.ipns.dweb.link/F-Droid.apk"})
|
||||||
|
return net.download_using_mirrors(mirrors)
|
||||||
|
|
||||||
|
|
||||||
|
def download_fdroid_apk_from_maven(privacy_mode=False):
|
||||||
|
"""Download F-Droid.apk from Maven Central and official mirrors."""
|
||||||
|
path = 'org/fdroid/fdroid/F-Droid'
|
||||||
|
if privacy_mode:
|
||||||
|
mirrors = MAVEN_CENTRAL_MIRRORS[:2] # skip the Google servers
|
||||||
|
else:
|
||||||
|
mirrors = MAVEN_CENTRAL_MIRRORS
|
||||||
|
metadata = net.download_using_mirrors(
|
||||||
|
common.append_filename_to_mirrors(
|
||||||
|
os.path.join(path, 'maven-metadata.xml'), mirrors
|
||||||
|
)
|
||||||
|
)
|
||||||
|
version = XMLElementTree.parse(metadata).getroot().findall('*.//latest')[0].text
|
||||||
|
mirrors = common.append_filename_to_mirrors(
|
||||||
|
os.path.join(path, version, f'F-Droid-{version}.apk'), mirrors
|
||||||
|
)
|
||||||
|
return net.download_using_mirrors(mirrors)
|
||||||
|
|
||||||
|
|
||||||
|
def install_fdroid_apk(privacy_mode=False):
|
||||||
|
"""Download and install F-Droid.apk using all tricks we can muster.
|
||||||
|
|
||||||
|
By default, this first tries to fetch the official install APK
|
||||||
|
which is offered when someone clicks the "download" button on
|
||||||
|
https://f-droid.org/. Then it will try all the mirrors and
|
||||||
|
methods until it gets something successful, or runs out of
|
||||||
|
options.
|
||||||
|
|
||||||
|
There is privacy_mode which tries to download from mirrors first,
|
||||||
|
so that this downloads from a mirror that has many different kinds
|
||||||
|
of files available, thereby breaking the clear link to F-Droid.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
None for success or the error message.
|
||||||
|
|
||||||
|
"""
|
||||||
|
country_code = locale.getlocale()[0].split('_')[-1]
|
||||||
|
if privacy_mode is None and country_code in ('CN', 'HK', 'IR', 'TM'):
|
||||||
|
logging.warning(
|
||||||
|
_('Privacy mode was enabled based on your locale ({country_code}).').format(
|
||||||
|
country_code=country_code
|
||||||
|
)
|
||||||
|
)
|
||||||
|
privacy_mode = True
|
||||||
|
|
||||||
|
if privacy_mode or not (common.config and common.config.get('jarsigner')):
|
||||||
|
download_methods = [
|
||||||
|
download_fdroid_apk_from_maven,
|
||||||
|
download_fdroid_apk_from_ipns,
|
||||||
|
download_fdroid_apk_from_github,
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
download_methods = [
|
||||||
|
download_apk,
|
||||||
|
download_fdroid_apk_from_maven,
|
||||||
|
download_fdroid_apk_from_github,
|
||||||
|
download_fdroid_apk_from_ipns,
|
||||||
|
download_fdroid_apk,
|
||||||
|
]
|
||||||
|
for method in download_methods:
|
||||||
|
try:
|
||||||
|
f = method(privacy_mode=privacy_mode)
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
logging.info(e)
|
||||||
|
else:
|
||||||
|
return _('F-Droid.apk could not be downloaded from any known source!')
|
||||||
|
|
||||||
|
fingerprint = common.apk_signer_fingerprint(f)
|
||||||
|
if fingerprint.upper() != common.FDROIDORG_FINGERPRINT:
|
||||||
|
return _('{path} has the wrong fingerprint ({fingerprint})!').format(
|
||||||
|
path=f, fingerprint=fingerprint
|
||||||
|
)
|
||||||
|
install_apk(f)
|
||||||
|
|
||||||
|
|
||||||
|
def install_apk(f):
|
||||||
|
if common.config and common.config.get('apksigner'):
|
||||||
|
# TODO this should always verify, but that requires APK sig verification in Python #94
|
||||||
|
logging.info(_('Verifying package {path} with apksigner.').format(path=f))
|
||||||
|
common.verify_apk_signature(f)
|
||||||
|
if common.config and common.config.get('adb'):
|
||||||
|
if devices():
|
||||||
|
install_apks_to_devices([f])
|
||||||
|
os.remove(f)
|
||||||
|
else:
|
||||||
|
os.remove(f)
|
||||||
|
return _('No devices found for `adb install`! Please plug one in.')
|
||||||
|
|
||||||
|
|
||||||
def devices():
|
def devices():
|
||||||
p = SdkToolsPopen(['adb', "devices"])
|
"""Get the list of device serials for use with adb commands."""
|
||||||
|
p = common.SdkToolsPopen(['adb', "devices"])
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise FDroidException("An error occured when finding devices: %s" % p.output)
|
raise FDroidException("An error occured when finding devices: %s" % p.output)
|
||||||
lines = [line for line in p.output.splitlines() if not line.startswith('* ')]
|
serials = list()
|
||||||
if len(lines) < 3:
|
for line in p.output.splitlines():
|
||||||
return []
|
columns = line.strip().split("\t", maxsplit=1)
|
||||||
lines = lines[1:-1]
|
if len(columns) == 2:
|
||||||
return [line.split()[0] for line in lines]
|
serial, status = columns
|
||||||
|
if status == 'device':
|
||||||
|
serials.append(serial)
|
||||||
|
else:
|
||||||
|
d = {'serial': serial, 'status': status}
|
||||||
|
logging.warning(_('adb reports {serial} is "{status}"!'.format(**d)))
|
||||||
|
return serials
|
||||||
|
|
||||||
|
|
||||||
|
def install_apks_to_devices(apks):
|
||||||
|
"""Install the list of APKs to all Android devices reported by `adb devices`."""
|
||||||
|
for apk in apks:
|
||||||
|
# Get device list each time to avoid device not found errors
|
||||||
|
devs = devices()
|
||||||
|
if not devs:
|
||||||
|
raise FDroidException(_("No attached devices found"))
|
||||||
|
logging.info(_("Installing %s...") % apk)
|
||||||
|
for dev in devs:
|
||||||
|
logging.info(
|
||||||
|
_("Installing '{apkfilename}' on {dev}...").format(
|
||||||
|
apkfilename=apk, dev=dev
|
||||||
|
)
|
||||||
|
)
|
||||||
|
p = common.SdkToolsPopen(['adb', "-s", dev, "install", apk])
|
||||||
|
fail = ""
|
||||||
|
for line in p.output.splitlines():
|
||||||
|
if line.startswith("Failure"):
|
||||||
|
fail = line[9:-1]
|
||||||
|
if not fail:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if fail == "INSTALL_FAILED_ALREADY_EXISTS":
|
||||||
|
logging.warning(
|
||||||
|
_('"{apkfilename}" is already installed on {dev}.').format(
|
||||||
|
apkfilename=apk, dev=dev
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise FDroidException(
|
||||||
|
_("Failed to install '{apkfilename}' on {dev}: {error}").format(
|
||||||
|
apkfilename=apk, dev=dev, error=fail
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def read_char():
|
||||||
|
"""Read input from the terminal prompt one char at a time."""
|
||||||
|
fd = sys.stdin.fileno()
|
||||||
|
old_settings = termios.tcgetattr(fd)
|
||||||
|
try:
|
||||||
|
tty.setraw(fd)
|
||||||
|
ch = sys.stdin.read(1)
|
||||||
|
finally:
|
||||||
|
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
|
||||||
|
return ch
|
||||||
|
|
||||||
|
|
||||||
|
def strtobool(val):
|
||||||
|
"""Convert a localized string representation of truth to True or False."""
|
||||||
|
return val.lower() in ('', 'y', 'yes', _('yes'), _('true')) # '' is pressing Enter
|
||||||
|
|
||||||
|
|
||||||
|
def prompt_user(yes, msg):
|
||||||
|
"""Prompt user for yes/no, supporting Enter and Esc as accepted answers."""
|
||||||
|
run_install = yes
|
||||||
|
if yes is None and sys.stdout.isatty():
|
||||||
|
print(msg, end=' ', flush=True)
|
||||||
|
answer = ''
|
||||||
|
while True:
|
||||||
|
in_char = read_char()
|
||||||
|
if in_char == '\r': # Enter key
|
||||||
|
break
|
||||||
|
if not in_char.isprintable():
|
||||||
|
sys.exit(1)
|
||||||
|
print(in_char, end='', flush=True)
|
||||||
|
answer += in_char
|
||||||
|
run_install = strtobool(answer)
|
||||||
|
print()
|
||||||
|
return run_install
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
global config
|
|
||||||
|
|
||||||
# Parse command line...
|
|
||||||
parser = ArgumentParser(
|
parser = ArgumentParser(
|
||||||
usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]"
|
usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]"
|
||||||
)
|
)
|
||||||
|
@ -62,22 +325,56 @@ def main():
|
||||||
default=False,
|
default=False,
|
||||||
help=_("Install all signed applications available"),
|
help=_("Install all signed applications available"),
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-p",
|
||||||
|
"--privacy-mode",
|
||||||
|
action=BooleanOptionalAction,
|
||||||
|
default=None,
|
||||||
|
help=_("Download F-Droid.apk using mirrors that leak less to the network"),
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-y",
|
||||||
|
"--yes",
|
||||||
|
action="store_true",
|
||||||
|
default=None,
|
||||||
|
help=_("Automatic yes to all prompts."),
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-n",
|
||||||
|
"--no",
|
||||||
|
action="store_false",
|
||||||
|
dest='yes',
|
||||||
|
help=_("Automatic no to all prompts."),
|
||||||
|
)
|
||||||
options = common.parse_args(parser)
|
options = common.parse_args(parser)
|
||||||
|
|
||||||
common.set_console_logging(options.verbose)
|
common.set_console_logging(options.verbose, options.color)
|
||||||
|
logging.captureWarnings(True) # for SNIMissingWarning
|
||||||
|
|
||||||
|
common.get_config()
|
||||||
|
|
||||||
if not options.appid and not options.all:
|
if not options.appid and not options.all:
|
||||||
parser.error(
|
run_install = prompt_user(
|
||||||
_("option %s: If you really want to install all the signed apps, use --all")
|
options.yes,
|
||||||
% "all"
|
_('Would you like to download and install F-Droid.apk via adb? (YES/no)'),
|
||||||
)
|
)
|
||||||
|
if run_install:
|
||||||
config = common.read_config()
|
sys.exit(install_fdroid_apk(options.privacy_mode))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
output_dir = 'repo'
|
output_dir = 'repo'
|
||||||
if not os.path.isdir(output_dir):
|
if (options.appid or options.all) and not os.path.isdir(output_dir):
|
||||||
logging.info(_("No signed output directory - nothing to do"))
|
logging.error(_("No signed output directory - nothing to do"))
|
||||||
sys.exit(0)
|
run_install = prompt_user(
|
||||||
|
options.yes,
|
||||||
|
_('Would you like to download the app(s) from f-droid.org? (YES/no)'),
|
||||||
|
)
|
||||||
|
if run_install:
|
||||||
|
for appid in options.appid:
|
||||||
|
f = download_apk(appid)
|
||||||
|
install_apk(f)
|
||||||
|
sys.exit(install_fdroid_apk(options.privacy_mode))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
if options.appid:
|
if options.appid:
|
||||||
vercodes = common.read_pkg_args(options.appid, True)
|
vercodes = common.read_pkg_args(options.appid, True)
|
||||||
|
@ -99,45 +396,14 @@ def main():
|
||||||
for appid, apk in apks.items():
|
for appid, apk in apks.items():
|
||||||
if not apk:
|
if not apk:
|
||||||
raise FDroidException(_("No signed APK available for %s") % appid)
|
raise FDroidException(_("No signed APK available for %s") % appid)
|
||||||
|
install_apks_to_devices(apks.values())
|
||||||
|
|
||||||
else:
|
elif options.all:
|
||||||
apks = {
|
apks = {
|
||||||
common.publishednameinfo(apkfile)[0]: apkfile
|
common.publishednameinfo(apkfile)[0]: apkfile
|
||||||
for apkfile in sorted(glob.glob(os.path.join(output_dir, '*.apk')))
|
for apkfile in sorted(glob.glob(os.path.join(output_dir, '*.apk')))
|
||||||
}
|
}
|
||||||
|
install_apks_to_devices(apks.values())
|
||||||
for appid, apk in apks.items():
|
|
||||||
# Get device list each time to avoid device not found errors
|
|
||||||
devs = devices()
|
|
||||||
if not devs:
|
|
||||||
raise FDroidException(_("No attached devices found"))
|
|
||||||
logging.info(_("Installing %s...") % apk)
|
|
||||||
for dev in devs:
|
|
||||||
logging.info(
|
|
||||||
_("Installing '{apkfilename}' on {dev}...").format(
|
|
||||||
apkfilename=apk, dev=dev
|
|
||||||
)
|
|
||||||
)
|
|
||||||
p = SdkToolsPopen(['adb', "-s", dev, "install", apk])
|
|
||||||
fail = ""
|
|
||||||
for line in p.output.splitlines():
|
|
||||||
if line.startswith("Failure"):
|
|
||||||
fail = line[9:-1]
|
|
||||||
if not fail:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if fail == "INSTALL_FAILED_ALREADY_EXISTS":
|
|
||||||
logging.warning(
|
|
||||||
_('"{apkfilename}" is already installed on {dev}.').format(
|
|
||||||
apkfilename=apk, dev=dev
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
raise FDroidException(
|
|
||||||
_("Failed to install '{apkfilename}' on {dev}: {error}").format(
|
|
||||||
apkfilename=apk, dev=dev, error=fail
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
logging.info('\n' + _('Finished'))
|
logging.info('\n' + _('Finished'))
|
||||||
|
|
||||||
|
|
|
@ -24,7 +24,7 @@ import urllib.parse
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import ruamel.yaml
|
from fdroidserver._yaml import yaml
|
||||||
|
|
||||||
from . import _, common, metadata, rewritemeta
|
from . import _, common, metadata, rewritemeta
|
||||||
|
|
||||||
|
@ -159,10 +159,6 @@ regex_checks = {
|
||||||
],
|
],
|
||||||
'Donate': http_checks
|
'Donate': http_checks
|
||||||
+ [
|
+ [
|
||||||
(
|
|
||||||
re.compile(r'.*flattr\.com'),
|
|
||||||
_("Flattr donation methods belong in the FlattrID: field"),
|
|
||||||
),
|
|
||||||
(
|
(
|
||||||
re.compile(r'.*liberapay\.com'),
|
re.compile(r'.*liberapay\.com'),
|
||||||
_("Liberapay donation methods belong in the Liberapay: field"),
|
_("Liberapay donation methods belong in the Liberapay: field"),
|
||||||
|
@ -217,6 +213,82 @@ regex_checks = {
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# config keys that are currently ignored by lint, but could be supported.
|
||||||
|
ignore_config_keys = (
|
||||||
|
'github_releases',
|
||||||
|
'java_paths',
|
||||||
|
)
|
||||||
|
|
||||||
|
bool_keys = (
|
||||||
|
'allow_disabled_algorithms',
|
||||||
|
'androidobservatory',
|
||||||
|
'build_server_always',
|
||||||
|
'deploy_process_logs',
|
||||||
|
'keep_when_not_allowed',
|
||||||
|
'make_current_version_link',
|
||||||
|
'nonstandardwebroot',
|
||||||
|
'per_app_repos',
|
||||||
|
'refresh_scanner',
|
||||||
|
'scan_binary',
|
||||||
|
'sync_from_local_copy_dir',
|
||||||
|
)
|
||||||
|
|
||||||
|
check_config_keys = (
|
||||||
|
'ant',
|
||||||
|
'apk_signing_key_block_list',
|
||||||
|
'archive',
|
||||||
|
'archive_description',
|
||||||
|
'archive_icon',
|
||||||
|
'archive_name',
|
||||||
|
'archive_older',
|
||||||
|
'archive_url',
|
||||||
|
'archive_web_base_url',
|
||||||
|
'awsbucket',
|
||||||
|
'awsbucket_index_only',
|
||||||
|
'binary_transparency_remote',
|
||||||
|
'cachedir',
|
||||||
|
'char_limits',
|
||||||
|
'current_version_name_source',
|
||||||
|
'git_mirror_size_limit',
|
||||||
|
'github_token',
|
||||||
|
'gpghome',
|
||||||
|
'gpgkey',
|
||||||
|
'gradle',
|
||||||
|
'identity_file',
|
||||||
|
'install_list',
|
||||||
|
'java_paths',
|
||||||
|
'keyaliases',
|
||||||
|
'keydname',
|
||||||
|
'keypass',
|
||||||
|
'keystore',
|
||||||
|
'keystorepass',
|
||||||
|
'lint_licenses',
|
||||||
|
'local_copy_dir',
|
||||||
|
'mirrors',
|
||||||
|
'mvn3',
|
||||||
|
'ndk_paths',
|
||||||
|
'path_to_custom_rclone_config',
|
||||||
|
'rclone_config',
|
||||||
|
'repo',
|
||||||
|
'repo_description',
|
||||||
|
'repo_icon',
|
||||||
|
'repo_key_sha256',
|
||||||
|
'repo_keyalias',
|
||||||
|
'repo_maxage',
|
||||||
|
'repo_name',
|
||||||
|
'repo_pubkey',
|
||||||
|
'repo_url',
|
||||||
|
'repo_web_base_url',
|
||||||
|
'scanner_signature_sources',
|
||||||
|
'sdk_path',
|
||||||
|
'servergitmirrors',
|
||||||
|
'serverwebroot',
|
||||||
|
'smartcardoptions',
|
||||||
|
'sync_from_local_copy_dir',
|
||||||
|
'uninstall_list',
|
||||||
|
'virustotal_apikey',
|
||||||
|
)
|
||||||
|
|
||||||
locale_pattern = re.compile(r"[a-z]{2,3}(-([A-Z][a-zA-Z]+|\d+|[a-z]+))*")
|
locale_pattern = re.compile(r"[a-z]{2,3}(-([A-Z][a-zA-Z]+|\d+|[a-z]+))*")
|
||||||
|
|
||||||
versioncode_check_pattern = re.compile(r"(\\d|\[(0-9|\\d)_?(a-fA-F)?])[+]")
|
versioncode_check_pattern = re.compile(r"(\\d|\[(0-9|\\d)_?(a-fA-F)?])[+]")
|
||||||
|
@ -297,7 +369,7 @@ def check_update_check_data_int(app): # noqa: D403
|
||||||
# codeex can be empty as well
|
# codeex can be empty as well
|
||||||
if codeex and not versioncode_check_pattern.search(codeex):
|
if codeex and not versioncode_check_pattern.search(codeex):
|
||||||
yield _(
|
yield _(
|
||||||
f'UpdateCheckData must match the version code as integer (\\d or [0-9]): {codeex}'
|
f'UpdateCheckData must match the versionCode as integer (\\d or [0-9]): {codeex}'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -505,11 +577,20 @@ def check_format(app):
|
||||||
|
|
||||||
|
|
||||||
def check_license_tag(app):
|
def check_license_tag(app):
|
||||||
"""Ensure all license tags contain only valid/approved values."""
|
"""Ensure all license tags contain only valid/approved values.
|
||||||
if config['lint_licenses'] is None:
|
|
||||||
|
It is possible to disable license checking by setting a null or empty value,
|
||||||
|
e.g. `lint_licenses: ` or `lint_licenses: []`
|
||||||
|
|
||||||
|
"""
|
||||||
|
if 'lint_licenses' in config:
|
||||||
|
lint_licenses = config['lint_licenses']
|
||||||
|
if lint_licenses is None:
|
||||||
return
|
return
|
||||||
if app.License not in config['lint_licenses']:
|
else:
|
||||||
if config['lint_licenses'] == APPROVED_LICENSES:
|
lint_licenses = APPROVED_LICENSES
|
||||||
|
if app.License not in lint_licenses:
|
||||||
|
if lint_licenses == APPROVED_LICENSES:
|
||||||
yield _(
|
yield _(
|
||||||
'Unexpected license tag "{}"! Only use FSF or OSI '
|
'Unexpected license tag "{}"! Only use FSF or OSI '
|
||||||
'approved tags from https://spdx.org/license-list'
|
'approved tags from https://spdx.org/license-list'
|
||||||
|
@ -530,10 +611,20 @@ def check_extlib_dir(apps):
|
||||||
|
|
||||||
used = set()
|
used = set()
|
||||||
for app in apps:
|
for app in apps:
|
||||||
for build in app.get('Builds', []):
|
if app.Disabled:
|
||||||
|
continue
|
||||||
|
archive_policy = common.calculate_archive_policy(
|
||||||
|
app, common.config['archive_older']
|
||||||
|
)
|
||||||
|
builds = [build for build in app.Builds if not build.disable]
|
||||||
|
|
||||||
|
for i in range(len(builds)):
|
||||||
|
build = builds[i]
|
||||||
for path in build.extlibs:
|
for path in build.extlibs:
|
||||||
path = Path(path)
|
path = Path(path)
|
||||||
if path not in extlib_files:
|
if path not in extlib_files:
|
||||||
|
# Don't show error on archived versions
|
||||||
|
if i >= len(builds) - archive_policy:
|
||||||
yield _(
|
yield _(
|
||||||
"{appid}: Unknown extlib {path} in build '{versionName}'"
|
"{appid}: Unknown extlib {path} in build '{versionName}'"
|
||||||
).format(appid=app.id, path=path, versionName=build.versionName)
|
).format(appid=app.id, path=path, versionName=build.versionName)
|
||||||
|
@ -761,7 +852,7 @@ def lint_config(arg):
|
||||||
passed = False
|
passed = False
|
||||||
|
|
||||||
with path.open() as fp:
|
with path.open() as fp:
|
||||||
data = ruamel.yaml.YAML(typ='safe').load(fp)
|
data = yaml.load(fp)
|
||||||
common.config_type_check(arg, data)
|
common.config_type_check(arg, data)
|
||||||
|
|
||||||
if path.name == mirrors_name:
|
if path.name == mirrors_name:
|
||||||
|
@ -785,6 +876,41 @@ def lint_config(arg):
|
||||||
msg += ' '
|
msg += ' '
|
||||||
msg += _('Did you mean {code}?').format(code=', '.join(sorted(m)))
|
msg += _('Did you mean {code}?').format(code=', '.join(sorted(m)))
|
||||||
print(msg)
|
print(msg)
|
||||||
|
elif path.name == config_name and path.parent.name != 'config':
|
||||||
|
valid_keys = set(tuple(common.default_config) + bool_keys + check_config_keys)
|
||||||
|
for key in ignore_config_keys:
|
||||||
|
if key in valid_keys:
|
||||||
|
valid_keys.remove(key)
|
||||||
|
for key in data:
|
||||||
|
if key not in valid_keys:
|
||||||
|
passed = False
|
||||||
|
msg = _("ERROR: {key} not a valid key!").format(key=key)
|
||||||
|
m = difflib.get_close_matches(key.lower(), valid_keys, 2, 0.5)
|
||||||
|
if m:
|
||||||
|
msg += ' '
|
||||||
|
msg += _('Did you mean {code}?').format(code=', '.join(sorted(m)))
|
||||||
|
print(msg)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if key in bool_keys:
|
||||||
|
t = bool
|
||||||
|
else:
|
||||||
|
t = type(common.default_config.get(key, ""))
|
||||||
|
|
||||||
|
show_error = False
|
||||||
|
if t is str:
|
||||||
|
if type(data[key]) not in (str, list, dict):
|
||||||
|
passed = False
|
||||||
|
show_error = True
|
||||||
|
elif type(data[key]) != t:
|
||||||
|
passed = False
|
||||||
|
show_error = True
|
||||||
|
if show_error:
|
||||||
|
print(
|
||||||
|
_("ERROR: {key}'s value should be of type {t}!").format(
|
||||||
|
key=key, t=t.__name__
|
||||||
|
)
|
||||||
|
)
|
||||||
elif path.name in (config_name, categories_name, antifeatures_name):
|
elif path.name in (config_name, categories_name, antifeatures_name):
|
||||||
for key in data:
|
for key in data:
|
||||||
if path.name == config_name and key not in ('archive', 'repo'):
|
if path.name == config_name and key not in ('archive', 'repo'):
|
||||||
|
@ -858,7 +984,7 @@ def main():
|
||||||
paths = list()
|
paths = list()
|
||||||
for arg in options.appid:
|
for arg in options.appid:
|
||||||
if (
|
if (
|
||||||
arg == 'config.yml'
|
arg == common.CONFIG_FILE
|
||||||
or Path(arg).parent.name == 'config'
|
or Path(arg).parent.name == 'config'
|
||||||
or Path(arg).parent.parent.name == 'config' # localized
|
or Path(arg).parent.parent.name == 'config' # localized
|
||||||
):
|
):
|
||||||
|
@ -882,9 +1008,7 @@ def main():
|
||||||
|
|
||||||
|
|
||||||
def lint_metadata(options):
|
def lint_metadata(options):
|
||||||
# Get all apps...
|
apps = common.read_app_args(options.appid)
|
||||||
allapps = metadata.read_metadata(options.appid)
|
|
||||||
apps = common.read_app_args(options.appid, allapps, False)
|
|
||||||
|
|
||||||
anywarns = check_for_unsupported_metadata_files()
|
anywarns = check_for_unsupported_metadata_files()
|
||||||
|
|
||||||
|
|
|
@ -115,7 +115,7 @@ __license__ = "Python License 2.0"
|
||||||
# been done in the StrictVersion class above. This works great as long
|
# been done in the StrictVersion class above. This works great as long
|
||||||
# as everyone can go along with bondage and discipline. Hopefully a
|
# as everyone can go along with bondage and discipline. Hopefully a
|
||||||
# (large) subset of Python module programmers will agree that the
|
# (large) subset of Python module programmers will agree that the
|
||||||
# particular flavour of bondage and discipline provided by StrictVersion
|
# particular flavor of bondage and discipline provided by StrictVersion
|
||||||
# provides enough benefit to be worth using, and will submit their
|
# provides enough benefit to be worth using, and will submit their
|
||||||
# version numbering scheme to its domination. The free-thinking
|
# version numbering scheme to its domination. The free-thinking
|
||||||
# anarchists in the lot will never give in, though, and something needs
|
# anarchists in the lot will never give in, though, and something needs
|
||||||
|
|
|
@ -18,18 +18,18 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import git
|
|
||||||
from pathlib import Path
|
|
||||||
import math
|
|
||||||
import platform
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import logging
|
import logging
|
||||||
import ruamel.yaml
|
import math
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import re
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from . import common
|
import ruamel.yaml
|
||||||
from . import _
|
|
||||||
|
from . import _, common
|
||||||
|
from ._yaml import yaml
|
||||||
from .exception import MetaDataException
|
from .exception import MetaDataException
|
||||||
|
|
||||||
srclibs = None
|
srclibs = None
|
||||||
|
@ -67,7 +67,6 @@ yaml_app_field_order = [
|
||||||
'Translation',
|
'Translation',
|
||||||
'Changelog',
|
'Changelog',
|
||||||
'Donate',
|
'Donate',
|
||||||
'FlattrID',
|
|
||||||
'Liberapay',
|
'Liberapay',
|
||||||
'OpenCollective',
|
'OpenCollective',
|
||||||
'Bitcoin',
|
'Bitcoin',
|
||||||
|
@ -128,7 +127,6 @@ class App(dict):
|
||||||
self.Translation = ''
|
self.Translation = ''
|
||||||
self.Changelog = ''
|
self.Changelog = ''
|
||||||
self.Donate = None
|
self.Donate = None
|
||||||
self.FlattrID = None
|
|
||||||
self.Liberapay = None
|
self.Liberapay = None
|
||||||
self.OpenCollective = None
|
self.OpenCollective = None
|
||||||
self.Bitcoin = None
|
self.Bitcoin = None
|
||||||
|
@ -408,10 +406,6 @@ class FieldValidator:
|
||||||
|
|
||||||
# Generic value types
|
# Generic value types
|
||||||
valuetypes = {
|
valuetypes = {
|
||||||
FieldValidator("Flattr ID",
|
|
||||||
r'^[0-9a-z]+$',
|
|
||||||
['FlattrID']),
|
|
||||||
|
|
||||||
FieldValidator("Liberapay",
|
FieldValidator("Liberapay",
|
||||||
VALID_USERNAME_REGEX,
|
VALID_USERNAME_REGEX,
|
||||||
['Liberapay']),
|
['Liberapay']),
|
||||||
|
@ -478,7 +472,6 @@ def parse_yaml_srclib(metadatapath):
|
||||||
|
|
||||||
with metadatapath.open("r", encoding="utf-8") as f:
|
with metadatapath.open("r", encoding="utf-8") as f:
|
||||||
try:
|
try:
|
||||||
yaml = ruamel.yaml.YAML(typ='safe')
|
|
||||||
data = yaml.load(f)
|
data = yaml.load(f)
|
||||||
if type(data) is not dict:
|
if type(data) is not dict:
|
||||||
if platform.system() == 'Windows':
|
if platform.system() == 'Windows':
|
||||||
|
@ -544,14 +537,14 @@ def read_srclibs():
|
||||||
|
|
||||||
srclibs = {}
|
srclibs = {}
|
||||||
|
|
||||||
srcdir = Path('srclibs')
|
srclibs_dir = Path('srclibs')
|
||||||
srcdir.mkdir(exist_ok=True)
|
srclibs_dir.mkdir(exist_ok=True)
|
||||||
|
|
||||||
for metadatapath in sorted(srcdir.glob('*.yml')):
|
for metadatapath in sorted(srclibs_dir.glob('*.yml')):
|
||||||
srclibs[metadatapath.stem] = parse_yaml_srclib(metadatapath)
|
srclibs[metadatapath.stem] = parse_yaml_srclib(metadatapath)
|
||||||
|
|
||||||
|
|
||||||
def read_metadata(appids={}, sort_by_time=False):
|
def read_metadata(appid_to_vercode={}, sort_by_time=False):
|
||||||
"""Return a list of App instances sorted newest first.
|
"""Return a list of App instances sorted newest first.
|
||||||
|
|
||||||
This reads all of the metadata files in a 'data' repository, then
|
This reads all of the metadata files in a 'data' repository, then
|
||||||
|
@ -559,7 +552,7 @@ def read_metadata(appids={}, sort_by_time=False):
|
||||||
sorted based on creation time, newest first. Most of the time,
|
sorted based on creation time, newest first. Most of the time,
|
||||||
the newer files are the most interesting.
|
the newer files are the most interesting.
|
||||||
|
|
||||||
appids is a dict with appids a keys and versionCodes as values.
|
appid_to_vercode is a dict with appids a keys and versionCodes as values.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# Always read the srclibs before the apps, since they can use a srlib as
|
# Always read the srclibs before the apps, since they can use a srlib as
|
||||||
|
@ -571,9 +564,8 @@ def read_metadata(appids={}, sort_by_time=False):
|
||||||
for basedir in ('metadata', 'tmp'):
|
for basedir in ('metadata', 'tmp'):
|
||||||
Path(basedir).mkdir(exist_ok=True)
|
Path(basedir).mkdir(exist_ok=True)
|
||||||
|
|
||||||
if appids:
|
if appid_to_vercode:
|
||||||
vercodes = common.read_pkg_args(appids)
|
metadatafiles = common.get_metadata_files(appid_to_vercode)
|
||||||
metadatafiles = common.get_metadata_files(vercodes)
|
|
||||||
else:
|
else:
|
||||||
metadatafiles = list(Path('metadata').glob('*.yml')) + list(
|
metadatafiles = list(Path('metadata').glob('*.yml')) + list(
|
||||||
Path('.').glob('.fdroid.yml')
|
Path('.').glob('.fdroid.yml')
|
||||||
|
@ -665,14 +657,12 @@ def parse_metadata(metadatapath):
|
||||||
build_dir = common.get_build_dir(app)
|
build_dir = common.get_build_dir(app)
|
||||||
metadata_in_repo = build_dir / '.fdroid.yml'
|
metadata_in_repo = build_dir / '.fdroid.yml'
|
||||||
if metadata_in_repo.is_file():
|
if metadata_in_repo.is_file():
|
||||||
try:
|
commit_id = common.get_head_commit_id(build_dir)
|
||||||
commit_id = common.get_head_commit_id(git.Repo(build_dir))
|
if commit_id is not None:
|
||||||
logging.debug(
|
logging.debug(
|
||||||
_('Including metadata from %s@%s') % (metadata_in_repo, commit_id)
|
_('Including metadata from %s@%s') % (metadata_in_repo, commit_id)
|
||||||
)
|
)
|
||||||
# See https://github.com/PyCQA/pylint/issues/2856 .
|
else:
|
||||||
# pylint: disable-next=no-member
|
|
||||||
except git.exc.InvalidGitRepositoryError:
|
|
||||||
logging.debug(
|
logging.debug(
|
||||||
_('Including metadata from {path}').format(path=metadata_in_repo)
|
_('Including metadata from {path}').format(path=metadata_in_repo)
|
||||||
)
|
)
|
||||||
|
@ -716,8 +706,7 @@ def parse_yaml_metadata(mf):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
yaml = ruamel.yaml.YAML(typ='safe')
|
yamldata = common.yaml.load(mf)
|
||||||
yamldata = yaml.load(mf)
|
|
||||||
except ruamel.yaml.YAMLError as e:
|
except ruamel.yaml.YAMLError as e:
|
||||||
_warn_or_exception(
|
_warn_or_exception(
|
||||||
_("could not parse '{path}'").format(path=mf.name)
|
_("could not parse '{path}'").format(path=mf.name)
|
||||||
|
@ -1256,19 +1245,24 @@ def _app_to_yaml(app):
|
||||||
def write_yaml(mf, app):
|
def write_yaml(mf, app):
|
||||||
"""Write metadata in yaml format.
|
"""Write metadata in yaml format.
|
||||||
|
|
||||||
|
This requires the 'rt' round trip dumper to maintain order and needs
|
||||||
|
custom indent settings, so it needs to instantiate its own YAML
|
||||||
|
instance. Therefore, this function deliberately avoids using any of
|
||||||
|
the common YAML parser setups.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
mf
|
mf
|
||||||
active file discriptor for writing
|
active file discriptor for writing
|
||||||
app
|
app
|
||||||
app metadata to written to the yaml file
|
app metadata to written to the YAML file
|
||||||
|
|
||||||
"""
|
"""
|
||||||
_del_duplicated_NoSourceSince(app)
|
_del_duplicated_NoSourceSince(app)
|
||||||
yaml_app = _app_to_yaml(app)
|
yaml_app = _app_to_yaml(app)
|
||||||
yaml = ruamel.yaml.YAML()
|
yamlmf = ruamel.yaml.YAML(typ='rt')
|
||||||
yaml.indent(mapping=2, sequence=4, offset=2)
|
yamlmf.indent(mapping=2, sequence=4, offset=2)
|
||||||
yaml.dump(yaml_app, stream=mf)
|
yamlmf.dump(yaml_app, stream=mf)
|
||||||
|
|
||||||
|
|
||||||
def write_metadata(metadatapath, app):
|
def write_metadata(metadatapath, app):
|
||||||
|
|
|
@ -7,13 +7,10 @@ import posixpath
|
||||||
import socket
|
import socket
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
from argparse import ArgumentParser
|
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
from . import _
|
from . import _, common, index, update
|
||||||
from . import common
|
|
||||||
from . import index
|
|
||||||
from . import update
|
|
||||||
|
|
||||||
|
|
||||||
def _run_wget(path, urls, verbose=False):
|
def _run_wget(path, urls, verbose=False):
|
||||||
|
@ -91,7 +88,7 @@ def main():
|
||||||
)
|
)
|
||||||
options = common.parse_args(parser)
|
options = common.parse_args(parser)
|
||||||
|
|
||||||
common.set_console_logging(options.verbose)
|
common.set_console_logging(options.verbose, options.color)
|
||||||
|
|
||||||
if options.all:
|
if options.all:
|
||||||
options.archive = True
|
options.archive = True
|
||||||
|
@ -133,6 +130,7 @@ def main():
|
||||||
import io
|
import io
|
||||||
import json
|
import json
|
||||||
import zipfile
|
import zipfile
|
||||||
|
|
||||||
from . import net
|
from . import net
|
||||||
|
|
||||||
url = _append_to_url_path(section, 'index-v1.jar')
|
url = _append_to_url_path(section, 'index-v1.jar')
|
||||||
|
|
|
@ -17,13 +17,21 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import copy
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import requests
|
import random
|
||||||
|
import tempfile
|
||||||
import time
|
import time
|
||||||
import urllib
|
import urllib
|
||||||
|
|
||||||
|
import requests
|
||||||
|
import urllib3
|
||||||
from requests.adapters import HTTPAdapter, Retry
|
from requests.adapters import HTTPAdapter, Retry
|
||||||
from requests.exceptions import ChunkedEncodingError
|
|
||||||
|
from . import _, common
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
HEADERS = {'User-Agent': 'F-Droid'}
|
HEADERS = {'User-Agent': 'F-Droid'}
|
||||||
|
|
||||||
|
@ -64,14 +72,88 @@ def download_file(url, local_filename=None, dldir='tmp', retries=3, backoff_fact
|
||||||
f.write(chunk)
|
f.write(chunk)
|
||||||
f.flush()
|
f.flush()
|
||||||
return local_filename
|
return local_filename
|
||||||
except ChunkedEncodingError as err:
|
except requests.exceptions.ChunkedEncodingError as err:
|
||||||
if i == retries:
|
if i == retries:
|
||||||
raise err
|
raise err
|
||||||
logging.warning('Download interrupted, retrying...')
|
logger.warning('Download interrupted, retrying...')
|
||||||
time.sleep(backoff_factor * 2**i)
|
time.sleep(backoff_factor * 2**i)
|
||||||
raise ValueError("retries must be >= 0")
|
raise ValueError("retries must be >= 0")
|
||||||
|
|
||||||
|
|
||||||
|
def download_using_mirrors(mirrors, local_filename=None):
|
||||||
|
"""Try to download the file from any working mirror.
|
||||||
|
|
||||||
|
Download the file that all URLs in the mirrors list point to,
|
||||||
|
trying all the tricks, starting with the most private methods
|
||||||
|
first. The list of mirrors is converted into a list of mirror
|
||||||
|
configurations to try, in order that the should be attempted.
|
||||||
|
|
||||||
|
This builds mirror_configs_to_try using all possible combos to
|
||||||
|
try. If a mirror is marked with worksWithoutSNI: True, then this
|
||||||
|
logic will try it twice: first without SNI, then again with SNI.
|
||||||
|
|
||||||
|
"""
|
||||||
|
mirrors = common.parse_list_of_dicts(mirrors)
|
||||||
|
mirror_configs_to_try = []
|
||||||
|
for mirror in mirrors:
|
||||||
|
mirror_configs_to_try.append(mirror)
|
||||||
|
if mirror.get('worksWithoutSNI'):
|
||||||
|
m = copy.deepcopy(mirror)
|
||||||
|
del m['worksWithoutSNI']
|
||||||
|
mirror_configs_to_try.append(m)
|
||||||
|
|
||||||
|
if not local_filename:
|
||||||
|
for mirror in mirrors:
|
||||||
|
filename = urllib.parse.urlparse(mirror['url']).path.split('/')[-1]
|
||||||
|
if filename:
|
||||||
|
break
|
||||||
|
if filename:
|
||||||
|
local_filename = os.path.join(common.get_cachedir(), filename)
|
||||||
|
else:
|
||||||
|
local_filename = tempfile.mkstemp(prefix='fdroid-')
|
||||||
|
|
||||||
|
timeouts = (2, 10, 100)
|
||||||
|
last_exception = None
|
||||||
|
for timeout in timeouts:
|
||||||
|
for mirror in mirror_configs_to_try:
|
||||||
|
last_exception = None
|
||||||
|
urllib3.util.ssl_.HAS_SNI = not mirror.get('worksWithoutSNI')
|
||||||
|
try:
|
||||||
|
# the stream=True parameter keeps memory usage low
|
||||||
|
r = requests.get(
|
||||||
|
mirror['url'],
|
||||||
|
stream=True,
|
||||||
|
allow_redirects=False,
|
||||||
|
headers=HEADERS,
|
||||||
|
# add jitter to the timeout to be less predictable
|
||||||
|
timeout=timeout + random.randint(0, timeout), # nosec B311
|
||||||
|
)
|
||||||
|
if r.status_code != 200:
|
||||||
|
raise requests.exceptions.HTTPError(r.status_code, response=r)
|
||||||
|
with open(local_filename, 'wb') as f:
|
||||||
|
for chunk in r.iter_content(chunk_size=1024):
|
||||||
|
if chunk: # filter out keep-alive new chunks
|
||||||
|
f.write(chunk)
|
||||||
|
f.flush()
|
||||||
|
return local_filename
|
||||||
|
except (
|
||||||
|
ConnectionError,
|
||||||
|
requests.exceptions.ChunkedEncodingError,
|
||||||
|
requests.exceptions.ConnectionError,
|
||||||
|
requests.exceptions.ContentDecodingError,
|
||||||
|
requests.exceptions.HTTPError,
|
||||||
|
requests.exceptions.SSLError,
|
||||||
|
requests.exceptions.StreamConsumedError,
|
||||||
|
requests.exceptions.Timeout,
|
||||||
|
requests.exceptions.UnrewindableBodyError,
|
||||||
|
) as e:
|
||||||
|
last_exception = e
|
||||||
|
logger.debug(_('Retrying failed download: %s') % str(e))
|
||||||
|
# if it hasn't succeeded by now, then give up and raise last exception
|
||||||
|
if last_exception:
|
||||||
|
raise last_exception
|
||||||
|
|
||||||
|
|
||||||
def http_get(url, etag=None, timeout=600):
|
def http_get(url, etag=None, timeout=600):
|
||||||
"""Download the content from the given URL by making a GET request.
|
"""Download the content from the given URL by making a GET request.
|
||||||
|
|
||||||
|
|
|
@ -19,24 +19,25 @@
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
import datetime
|
import datetime
|
||||||
import git
|
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import paramiko
|
|
||||||
import platform
|
import platform
|
||||||
import shutil
|
import shutil
|
||||||
import ssl
|
import ssl
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import yaml
|
|
||||||
from urllib.parse import urlparse
|
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from . import _
|
import git
|
||||||
from . import common
|
import paramiko
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
from . import _, common
|
||||||
from .exception import VCSException
|
from .exception import VCSException
|
||||||
|
|
||||||
# hard coded defaults for Android ~/.android/debug.keystore files
|
# hard coded defaults for Android ~/.android/debug.keystore files
|
||||||
|
@ -176,7 +177,9 @@ def _ssh_key_from_debug_keystore(keystore: Optional[str] = None) -> str:
|
||||||
return ssh_private_key_file
|
return ssh_private_key_file
|
||||||
|
|
||||||
|
|
||||||
def get_repo_base_url(clone_url: str, repo_git_base: str, force_type: Optional[str] = None) -> str:
|
def get_repo_base_url(
|
||||||
|
clone_url: str, repo_git_base: str, force_type: Optional[str] = None
|
||||||
|
) -> str:
|
||||||
"""Generate the base URL for the F-Droid repository.
|
"""Generate the base URL for the F-Droid repository.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
|
@ -203,6 +206,41 @@ def get_repo_base_url(clone_url: str, repo_git_base: str, force_type: Optional[s
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def clone_git_repo(clone_url, git_mirror_path):
|
||||||
|
"""Clone a git repo into the given path, failing if a password is required.
|
||||||
|
|
||||||
|
If GitPython's safe mode is present, this will use that. Otherwise,
|
||||||
|
this includes a very limited version of the safe mode just to ensure
|
||||||
|
this won't hang on password prompts.
|
||||||
|
|
||||||
|
https://github.com/gitpython-developers/GitPython/pull/2029
|
||||||
|
|
||||||
|
"""
|
||||||
|
logging.debug(_('cloning {url}').format(url=clone_url))
|
||||||
|
try:
|
||||||
|
sig = inspect.signature(git.Repo.clone_from)
|
||||||
|
if 'safe' in sig.parameters:
|
||||||
|
git.Repo.clone_from(clone_url, git_mirror_path, safe=True)
|
||||||
|
else:
|
||||||
|
git.Repo.clone_from(
|
||||||
|
clone_url,
|
||||||
|
git_mirror_path,
|
||||||
|
env={
|
||||||
|
'GIT_ASKPASS': '/bin/true',
|
||||||
|
'SSH_ASKPASS': '/bin/true',
|
||||||
|
'GIT_USERNAME': 'u',
|
||||||
|
'GIT_PASSWORD': 'p',
|
||||||
|
'GIT_HTTP_USERNAME': 'u',
|
||||||
|
'GIT_HTTP_PASSWORD': 'p',
|
||||||
|
'GIT_SSH': '/bin/false', # for git < 2.3
|
||||||
|
'GIT_TERMINAL_PROMPT': '0',
|
||||||
|
},
|
||||||
|
)
|
||||||
|
except git.exc.GitCommandError as e:
|
||||||
|
logging.warning(_('WARNING: only public git repos are supported!'))
|
||||||
|
raise VCSException(f'git clone {clone_url} failed:', str(e)) from e
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
"""Deploy to F-Droid repository or generate SSH private key from keystore.
|
"""Deploy to F-Droid repository or generate SSH private key from keystore.
|
||||||
|
|
||||||
|
@ -288,19 +326,27 @@ def main():
|
||||||
# we are in GitLab CI
|
# we are in GitLab CI
|
||||||
repo_git_base = os.getenv('CI_PROJECT_PATH') + NIGHTLY
|
repo_git_base = os.getenv('CI_PROJECT_PATH') + NIGHTLY
|
||||||
clone_url = os.getenv('CI_PROJECT_URL') + NIGHTLY
|
clone_url = os.getenv('CI_PROJECT_URL') + NIGHTLY
|
||||||
repo_base = get_repo_base_url(clone_url, repo_git_base, force_type='gitlab.com')
|
repo_base = get_repo_base_url(
|
||||||
|
clone_url, repo_git_base, force_type='gitlab.com'
|
||||||
|
)
|
||||||
servergitmirror = 'git@' + urlparse(clone_url).netloc + ':' + repo_git_base
|
servergitmirror = 'git@' + urlparse(clone_url).netloc + ':' + repo_git_base
|
||||||
deploy_key_url = clone_url + '/-/settings/repository#js-deploy-keys-settings'
|
deploy_key_url = (
|
||||||
|
f'{clone_url}/-/settings/repository#js-deploy-keys-settings'
|
||||||
|
)
|
||||||
git_user_name = os.getenv('GITLAB_USER_NAME')
|
git_user_name = os.getenv('GITLAB_USER_NAME')
|
||||||
git_user_email = os.getenv('GITLAB_USER_EMAIL')
|
git_user_email = os.getenv('GITLAB_USER_EMAIL')
|
||||||
elif 'TRAVIS_REPO_SLUG' in os.environ:
|
elif 'TRAVIS_REPO_SLUG' in os.environ:
|
||||||
# we are in Travis CI
|
# we are in Travis CI
|
||||||
repo_git_base = os.getenv('TRAVIS_REPO_SLUG') + NIGHTLY
|
repo_git_base = os.getenv('TRAVIS_REPO_SLUG') + NIGHTLY
|
||||||
clone_url = 'https://github.com/' + repo_git_base
|
clone_url = 'https://github.com/' + repo_git_base
|
||||||
repo_base = get_repo_base_url(clone_url, repo_git_base, force_type='github.com')
|
repo_base = get_repo_base_url(
|
||||||
|
clone_url, repo_git_base, force_type='github.com'
|
||||||
|
)
|
||||||
servergitmirror = 'git@github.com:' + repo_git_base
|
servergitmirror = 'git@github.com:' + repo_git_base
|
||||||
deploy_key_url = ('https://github.com/' + repo_git_base + '/settings/keys'
|
deploy_key_url = (
|
||||||
+ '\nhttps://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys')
|
f'https://github.com/{repo_git_base}/settings/keys'
|
||||||
|
+ '\nhttps://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys'
|
||||||
|
)
|
||||||
git_user_name = repo_git_base
|
git_user_name = repo_git_base
|
||||||
git_user_email = os.getenv('USER') + '@' + platform.node()
|
git_user_email = os.getenv('USER') + '@' + platform.node()
|
||||||
elif (
|
elif (
|
||||||
|
@ -309,23 +355,35 @@ def main():
|
||||||
and 'CIRCLE_PROJECT_REPONAME' in os.environ
|
and 'CIRCLE_PROJECT_REPONAME' in os.environ
|
||||||
):
|
):
|
||||||
# we are in Circle CI
|
# we are in Circle CI
|
||||||
repo_git_base = (os.getenv('CIRCLE_PROJECT_USERNAME')
|
repo_git_base = (
|
||||||
+ '/' + os.getenv('CIRCLE_PROJECT_REPONAME') + NIGHTLY)
|
os.getenv('CIRCLE_PROJECT_USERNAME')
|
||||||
|
+ '/'
|
||||||
|
+ os.getenv('CIRCLE_PROJECT_REPONAME')
|
||||||
|
+ NIGHTLY
|
||||||
|
)
|
||||||
clone_url = os.getenv('CIRCLE_REPOSITORY_URL') + NIGHTLY
|
clone_url = os.getenv('CIRCLE_REPOSITORY_URL') + NIGHTLY
|
||||||
repo_base = get_repo_base_url(clone_url, repo_git_base, force_type='github.com')
|
repo_base = get_repo_base_url(
|
||||||
|
clone_url, repo_git_base, force_type='github.com'
|
||||||
|
)
|
||||||
servergitmirror = 'git@' + urlparse(clone_url).netloc + ':' + repo_git_base
|
servergitmirror = 'git@' + urlparse(clone_url).netloc + ':' + repo_git_base
|
||||||
deploy_key_url = ('https://github.com/' + repo_git_base + '/settings/keys'
|
deploy_key_url = (
|
||||||
+ '\nhttps://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys')
|
f'https://github.com/{repo_git_base}/settings/keys'
|
||||||
|
+ '\nhttps://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys'
|
||||||
|
)
|
||||||
git_user_name = os.getenv('CIRCLE_USERNAME')
|
git_user_name = os.getenv('CIRCLE_USERNAME')
|
||||||
git_user_email = git_user_name + '@' + platform.node()
|
git_user_email = git_user_name + '@' + platform.node()
|
||||||
elif 'GITHUB_ACTIONS' in os.environ:
|
elif 'GITHUB_ACTIONS' in os.environ:
|
||||||
# we are in Github actions
|
# we are in Github actions
|
||||||
repo_git_base = (os.getenv('GITHUB_REPOSITORY') + NIGHTLY)
|
repo_git_base = os.getenv('GITHUB_REPOSITORY') + NIGHTLY
|
||||||
clone_url = (os.getenv('GITHUB_SERVER_URL') + '/' + repo_git_base)
|
clone_url = os.getenv('GITHUB_SERVER_URL') + '/' + repo_git_base
|
||||||
repo_base = get_repo_base_url(clone_url, repo_git_base, force_type='github.com')
|
repo_base = get_repo_base_url(
|
||||||
|
clone_url, repo_git_base, force_type='github.com'
|
||||||
|
)
|
||||||
servergitmirror = 'git@' + urlparse(clone_url).netloc + ':' + repo_git_base
|
servergitmirror = 'git@' + urlparse(clone_url).netloc + ':' + repo_git_base
|
||||||
deploy_key_url = ('https://github.com/' + repo_git_base + '/settings/keys'
|
deploy_key_url = (
|
||||||
+ '\nhttps://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys')
|
f'https://github.com/{repo_git_base}/settings/keys'
|
||||||
|
+ '\nhttps://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys'
|
||||||
|
)
|
||||||
git_user_name = os.getenv('GITHUB_ACTOR')
|
git_user_name = os.getenv('GITHUB_ACTOR')
|
||||||
git_user_email = git_user_name + '@' + platform.node()
|
git_user_email = git_user_name + '@' + platform.node()
|
||||||
else:
|
else:
|
||||||
|
@ -337,16 +395,12 @@ def main():
|
||||||
git_mirror_fdroiddir = os.path.join(git_mirror_path, 'fdroid')
|
git_mirror_fdroiddir = os.path.join(git_mirror_path, 'fdroid')
|
||||||
git_mirror_repodir = os.path.join(git_mirror_fdroiddir, 'repo')
|
git_mirror_repodir = os.path.join(git_mirror_fdroiddir, 'repo')
|
||||||
git_mirror_metadatadir = os.path.join(git_mirror_fdroiddir, 'metadata')
|
git_mirror_metadatadir = os.path.join(git_mirror_fdroiddir, 'metadata')
|
||||||
git_mirror_statsdir = os.path.join(git_mirror_fdroiddir, 'stats')
|
|
||||||
if not os.path.isdir(git_mirror_repodir):
|
if not os.path.isdir(git_mirror_repodir):
|
||||||
logging.debug(_('cloning {url}').format(url=clone_url))
|
clone_git_repo(clone_url, git_mirror_path)
|
||||||
vcs = common.getvcs('git', clone_url, git_mirror_path)
|
|
||||||
p = vcs.git(['clone', '--', vcs.remote, str(vcs.local)])
|
|
||||||
if p.returncode != 0:
|
|
||||||
print('WARNING: only public git repos are supported!')
|
|
||||||
raise VCSException('git clone %s failed:' % clone_url, p.output)
|
|
||||||
if not os.path.isdir(git_mirror_repodir):
|
if not os.path.isdir(git_mirror_repodir):
|
||||||
os.makedirs(git_mirror_repodir, mode=0o755)
|
os.makedirs(git_mirror_repodir, mode=0o755)
|
||||||
|
if os.path.exists('LICENSE'):
|
||||||
|
shutil.copy2('LICENSE', git_mirror_path)
|
||||||
|
|
||||||
mirror_git_repo = git.Repo.init(git_mirror_path)
|
mirror_git_repo = git.Repo.init(git_mirror_path)
|
||||||
writer = mirror_git_repo.config_writer()
|
writer = mirror_git_repo.config_writer()
|
||||||
|
@ -365,9 +419,13 @@ You can use it with the [F-Droid](https://f-droid.org/) Android app.
|
||||||
|
|
||||||
[](https://fdroid.link/#{repo_url})
|
[](https://fdroid.link/#{repo_url})
|
||||||
|
|
||||||
Last updated: {date}'''.format(repo_git_base=repo_git_base,
|
Last updated: {date}'''.format(
|
||||||
|
repo_git_base=repo_git_base,
|
||||||
repo_url=repo_url,
|
repo_url=repo_url,
|
||||||
date=datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S UTC'))
|
date=datetime.datetime.now(datetime.timezone.utc).strftime(
|
||||||
|
'%Y-%m-%d %H:%M:%S UTC'
|
||||||
|
),
|
||||||
|
)
|
||||||
with open(readme_path, 'w') as fp:
|
with open(readme_path, 'w') as fp:
|
||||||
fp.write(readme)
|
fp.write(readme)
|
||||||
mirror_git_repo.git.add(all=True)
|
mirror_git_repo.git.add(all=True)
|
||||||
|
@ -381,8 +439,6 @@ Last updated: {date}'''.format(repo_git_base=repo_git_base,
|
||||||
common.local_rsync(options, [git_mirror_repodir + '/'], 'repo/')
|
common.local_rsync(options, [git_mirror_repodir + '/'], 'repo/')
|
||||||
if os.path.isdir(git_mirror_metadatadir):
|
if os.path.isdir(git_mirror_metadatadir):
|
||||||
common.local_rsync(options, [git_mirror_metadatadir + '/'], 'metadata/')
|
common.local_rsync(options, [git_mirror_metadatadir + '/'], 'metadata/')
|
||||||
if os.path.isdir(git_mirror_statsdir):
|
|
||||||
common.local_rsync(options, [git_mirror_statsdir + '/'], 'stats/')
|
|
||||||
|
|
||||||
ssh_private_key_file = _ssh_key_from_debug_keystore()
|
ssh_private_key_file = _ssh_key_from_debug_keystore()
|
||||||
# this is needed for GitPython to find the SSH key
|
# this is needed for GitPython to find the SSH key
|
||||||
|
@ -422,19 +478,17 @@ Last updated: {date}'''.format(repo_git_base=repo_git_base,
|
||||||
'keypass': PASSWORD,
|
'keypass': PASSWORD,
|
||||||
'keydname': DISTINGUISHED_NAME,
|
'keydname': DISTINGUISHED_NAME,
|
||||||
'make_current_version_link': False,
|
'make_current_version_link': False,
|
||||||
'update_stats': True,
|
|
||||||
}
|
}
|
||||||
with open('config.yml', 'w') as fp:
|
with open(common.CONFIG_FILE, 'w', encoding='utf-8') as fp:
|
||||||
yaml.dump(config, fp, default_flow_style=False)
|
yaml.dump(config, fp, default_flow_style=False)
|
||||||
os.chmod('config.yml', 0o600)
|
os.chmod(common.CONFIG_FILE, 0o600)
|
||||||
config = common.read_config()
|
config = common.read_config()
|
||||||
common.assert_config_keystore(config)
|
common.assert_config_keystore(config)
|
||||||
|
|
||||||
logging.debug(
|
logging.debug(
|
||||||
_('Run over {cibase} to find -debug.apk. and skip repo_basedir {repo_basedir}').format(
|
_(
|
||||||
cibase=cibase,
|
'Run over {cibase} to find -debug.apk. and skip repo_basedir {repo_basedir}'
|
||||||
repo_basedir=repo_basedir
|
).format(cibase=cibase, repo_basedir=repo_basedir)
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
for root, dirs, files in os.walk(cibase):
|
for root, dirs, files in os.walk(cibase):
|
||||||
|
@ -496,7 +550,6 @@ Last updated: {date}'''.format(repo_git_base=repo_git_base,
|
||||||
common.local_rsync(
|
common.local_rsync(
|
||||||
options, [repo_basedir + '/metadata/'], git_mirror_metadatadir + '/'
|
options, [repo_basedir + '/metadata/'], git_mirror_metadatadir + '/'
|
||||||
)
|
)
|
||||||
common.local_rsync(options, [repo_basedir + '/stats/'], git_mirror_statsdir + '/')
|
|
||||||
mirror_git_repo.git.add(all=True)
|
mirror_git_repo.git.add(all=True)
|
||||||
mirror_git_repo.index.commit("update app metadata")
|
mirror_git_repo.index.commit("update app metadata")
|
||||||
|
|
||||||
|
@ -523,10 +576,16 @@ Last updated: {date}'''.format(repo_git_base=repo_git_base,
|
||||||
if not os.path.exists(androiddir):
|
if not os.path.exists(androiddir):
|
||||||
os.mkdir(androiddir)
|
os.mkdir(androiddir)
|
||||||
logging.info(_('created {path}').format(path=androiddir))
|
logging.info(_('created {path}').format(path=androiddir))
|
||||||
logging.error(_('{path} does not exist! Create it by running:').format(path=options.keystore)
|
logging.error(
|
||||||
+ '\n keytool -genkey -v -keystore ' + options.keystore + ' -storepass android \\'
|
_('{path} does not exist! Create it by running:').format(
|
||||||
|
path=options.keystore
|
||||||
|
)
|
||||||
|
+ '\n keytool -genkey -v -keystore '
|
||||||
|
+ options.keystore
|
||||||
|
+ ' -storepass android \\'
|
||||||
+ '\n -alias androiddebugkey -keypass android -keyalg RSA -keysize 2048 -validity 10000 \\'
|
+ '\n -alias androiddebugkey -keypass android -keyalg RSA -keysize 2048 -validity 10000 \\'
|
||||||
+ '\n -dname "CN=Android Debug,O=Android,C=US"')
|
+ '\n -dname "CN=Android Debug,O=Android,C=US"'
|
||||||
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
ssh_dir = os.path.join(os.getenv('HOME'), '.ssh')
|
ssh_dir = os.path.join(os.getenv('HOME'), '.ssh')
|
||||||
privkey = _ssh_key_from_debug_keystore(options.keystore)
|
privkey = _ssh_key_from_debug_keystore(options.keystore)
|
||||||
|
|
|
@ -28,23 +28,21 @@ mostly reports success by moving an APK from unsigned/ to repo/
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import sys
|
import glob
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import glob
|
import sys
|
||||||
import hashlib
|
|
||||||
from argparse import ArgumentParser
|
|
||||||
from collections import OrderedDict
|
|
||||||
import logging
|
|
||||||
from gettext import ngettext
|
|
||||||
import json
|
|
||||||
import time
|
import time
|
||||||
import zipfile
|
import zipfile
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
from collections import OrderedDict
|
||||||
|
from gettext import ngettext
|
||||||
|
|
||||||
from . import _
|
from . import _, common, metadata
|
||||||
from . import common
|
|
||||||
from . import metadata
|
|
||||||
from .common import FDroidPopen
|
from .common import FDroidPopen
|
||||||
from .exception import BuildException, FDroidException
|
from .exception import BuildException, FDroidException
|
||||||
|
|
||||||
|
@ -139,13 +137,13 @@ def sign_sig_key_fingerprint_list(jar_file):
|
||||||
raise FDroidException("Failed to sign '{}'!".format(jar_file))
|
raise FDroidException("Failed to sign '{}'!".format(jar_file))
|
||||||
|
|
||||||
|
|
||||||
def store_stats_fdroid_signing_key_fingerprints(appids, indent=None):
|
def store_publish_signer_fingerprints(appids, indent=None):
|
||||||
"""Store list of all signing-key fingerprints for given appids to HD.
|
"""Store list of all signing-key fingerprints for given appids to HD.
|
||||||
|
|
||||||
This list will later on be needed by fdroid update.
|
This list will later on be needed by fdroid update.
|
||||||
"""
|
"""
|
||||||
if not os.path.exists('stats'):
|
if not os.path.exists('repo'):
|
||||||
os.makedirs('stats')
|
os.makedirs('repo')
|
||||||
data = OrderedDict()
|
data = OrderedDict()
|
||||||
fps = read_fingerprints_from_keystore()
|
fps = read_fingerprints_from_keystore()
|
||||||
for appid in sorted(appids):
|
for appid in sorted(appids):
|
||||||
|
@ -153,9 +151,12 @@ def store_stats_fdroid_signing_key_fingerprints(appids, indent=None):
|
||||||
if alias in fps:
|
if alias in fps:
|
||||||
data[appid] = {'signer': fps[key_alias(appid)]}
|
data[appid] = {'signer': fps[key_alias(appid)]}
|
||||||
|
|
||||||
jar_file = os.path.join('stats', 'publishsigkeys.jar')
|
jar_file = os.path.join('repo', 'signer-index.jar')
|
||||||
|
output = json.dumps(data, indent=indent)
|
||||||
with zipfile.ZipFile(jar_file, 'w', zipfile.ZIP_DEFLATED) as jar:
|
with zipfile.ZipFile(jar_file, 'w', zipfile.ZIP_DEFLATED) as jar:
|
||||||
jar.writestr('publishsigkeys.json', json.dumps(data, indent=indent))
|
jar.writestr('signer-index.json', output)
|
||||||
|
with open(os.path.join('repo', 'signer-index.json'), 'w') as fp:
|
||||||
|
fp.write(output)
|
||||||
sign_sig_key_fingerprint_list(jar_file)
|
sign_sig_key_fingerprint_list(jar_file)
|
||||||
|
|
||||||
|
|
||||||
|
@ -344,7 +345,6 @@ def main():
|
||||||
glob.glob(os.path.join(unsigned_dir, '*.apk'))
|
glob.glob(os.path.join(unsigned_dir, '*.apk'))
|
||||||
+ glob.glob(os.path.join(unsigned_dir, '*.zip'))
|
+ glob.glob(os.path.join(unsigned_dir, '*.zip'))
|
||||||
):
|
):
|
||||||
|
|
||||||
appid, vercode = common.publishednameinfo(apkfile)
|
appid, vercode = common.publishednameinfo(apkfile)
|
||||||
apkfilename = os.path.basename(apkfile)
|
apkfilename = os.path.basename(apkfile)
|
||||||
if vercodes and appid not in vercodes:
|
if vercodes and appid not in vercodes:
|
||||||
|
@ -368,7 +368,6 @@ def main():
|
||||||
if b.get("versionCode") == vercode:
|
if b.get("versionCode") == vercode:
|
||||||
build = b
|
build = b
|
||||||
if app.Binaries or (build and build.binary):
|
if app.Binaries or (build and build.binary):
|
||||||
|
|
||||||
# It's an app where we build from source, and verify the apk
|
# It's an app where we build from source, and verify the apk
|
||||||
# contents against a developer's binary, and then publish their
|
# contents against a developer's binary, and then publish their
|
||||||
# version if everything checks out.
|
# version if everything checks out.
|
||||||
|
@ -379,15 +378,21 @@ def main():
|
||||||
srcapk = srcapk.replace(unsigned_dir, binaries_dir)
|
srcapk = srcapk.replace(unsigned_dir, binaries_dir)
|
||||||
|
|
||||||
if not os.path.isfile(srcapk):
|
if not os.path.isfile(srcapk):
|
||||||
logging.error("...reference binary missing - publish skipped: "
|
logging.error(
|
||||||
"'{refpath}'".format(refpath=srcapk))
|
"...reference binary missing - publish skipped: '{refpath}'".format(
|
||||||
|
refpath=srcapk
|
||||||
|
)
|
||||||
|
)
|
||||||
failed += 1
|
failed += 1
|
||||||
else:
|
else:
|
||||||
# Compare our unsigned one with the downloaded one...
|
# Compare our unsigned one with the downloaded one...
|
||||||
compare_result = common.verify_apks(srcapk, apkfile, tmp_dir)
|
compare_result = common.verify_apks(srcapk, apkfile, tmp_dir)
|
||||||
if compare_result:
|
if compare_result:
|
||||||
logging.error("...verification failed - publish skipped : "
|
logging.error(
|
||||||
"{result}".format(result=compare_result))
|
"...verification failed - publish skipped : {result}".format(
|
||||||
|
result=compare_result
|
||||||
|
)
|
||||||
|
)
|
||||||
failed += 1
|
failed += 1
|
||||||
else:
|
else:
|
||||||
# Success! So move the downloaded file to the repo, and remove
|
# Success! So move the downloaded file to the repo, and remove
|
||||||
|
@ -399,7 +404,6 @@ def main():
|
||||||
logging.info('Published ' + apkfilename)
|
logging.info('Published ' + apkfilename)
|
||||||
|
|
||||||
elif apkfile.endswith('.zip'):
|
elif apkfile.endswith('.zip'):
|
||||||
|
|
||||||
# OTA ZIPs built by fdroid do not need to be signed by jarsigner,
|
# OTA ZIPs built by fdroid do not need to be signed by jarsigner,
|
||||||
# just to be moved into place in the repo
|
# just to be moved into place in the repo
|
||||||
shutil.move(apkfile, os.path.join(output_dir, apkfilename))
|
shutil.move(apkfile, os.path.join(output_dir, apkfilename))
|
||||||
|
@ -407,7 +411,6 @@ def main():
|
||||||
logging.info('Published ' + apkfilename)
|
logging.info('Published ' + apkfilename)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
# It's a 'normal' app, i.e. we sign and publish it...
|
# It's a 'normal' app, i.e. we sign and publish it...
|
||||||
skipsigning = False
|
skipsigning = False
|
||||||
|
|
||||||
|
@ -446,10 +449,11 @@ def main():
|
||||||
|
|
||||||
signed_apk_path = os.path.join(output_dir, apkfilename)
|
signed_apk_path = os.path.join(output_dir, apkfilename)
|
||||||
if os.path.exists(signed_apk_path):
|
if os.path.exists(signed_apk_path):
|
||||||
raise BuildException("Refusing to sign '{0}' file exists in both "
|
raise BuildException(
|
||||||
"{1} and {2} folder.".format(apkfilename,
|
_(
|
||||||
unsigned_dir,
|
"Refusing to sign '{path}', file exists in both {dir1} and {dir2} folder."
|
||||||
output_dir))
|
).format(path=apkfilename, dir1=unsigned_dir, dir2=output_dir)
|
||||||
|
)
|
||||||
|
|
||||||
# Sign the application...
|
# Sign the application...
|
||||||
common.sign_apk(apkfile, signed_apk_path, keyalias)
|
common.sign_apk(apkfile, signed_apk_path, keyalias)
|
||||||
|
@ -460,7 +464,7 @@ def main():
|
||||||
publish_source_tarball(apkfilename, unsigned_dir, output_dir)
|
publish_source_tarball(apkfilename, unsigned_dir, output_dir)
|
||||||
logging.info('Published ' + apkfilename)
|
logging.info('Published ' + apkfilename)
|
||||||
|
|
||||||
store_stats_fdroid_signing_key_fingerprints(allapps.keys())
|
store_publish_signer_fingerprints(allapps.keys())
|
||||||
status_update_json(generated_keys, signed_apks)
|
status_update_json(generated_keys, signed_apks)
|
||||||
logging.info('published list signing-key fingerprints')
|
logging.info('published list signing-key fingerprints')
|
||||||
|
|
||||||
|
|
|
@ -17,8 +17,8 @@
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
from . import common
|
|
||||||
from . import metadata
|
from . import common, metadata
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
|
@ -17,16 +17,14 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from argparse import ArgumentParser
|
|
||||||
import logging
|
|
||||||
import io
|
import io
|
||||||
import tempfile
|
import logging
|
||||||
import shutil
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
from argparse import ArgumentParser
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from . import _
|
from . import _, common, metadata
|
||||||
from . import common
|
|
||||||
from . import metadata
|
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
|
|
||||||
|
@ -81,9 +79,7 @@ def main():
|
||||||
|
|
||||||
config = common.read_config()
|
config = common.read_config()
|
||||||
|
|
||||||
# Get all apps...
|
apps = common.read_app_args(options.appid)
|
||||||
allapps = metadata.read_metadata(options.appid)
|
|
||||||
apps = common.read_app_args(options.appid, allapps, False)
|
|
||||||
|
|
||||||
for appid, app in apps.items():
|
for appid, app in apps.items():
|
||||||
path = Path(app.metadatapath)
|
path = Path(app.metadatapath)
|
||||||
|
|
|
@ -16,7 +16,6 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import imghdr
|
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
@ -29,10 +28,21 @@ import urllib.request
|
||||||
import zipfile
|
import zipfile
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
from dataclasses import dataclass, field, fields
|
from dataclasses import dataclass, field, fields
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta, timezone
|
||||||
from enum import IntEnum
|
from enum import IntEnum
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
|
try:
|
||||||
|
import magic
|
||||||
|
except ImportError:
|
||||||
|
import puremagic as magic
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 11):
|
||||||
|
import tomllib
|
||||||
|
else:
|
||||||
|
import tomli as tomllib
|
||||||
|
|
||||||
from . import _, common, metadata, scanner
|
from . import _, common, metadata, scanner
|
||||||
from .exception import BuildException, ConfigurationException, VCSException
|
from .exception import BuildException, ConfigurationException, VCSException
|
||||||
|
@ -53,18 +63,198 @@ MAVEN_URL_REGEX = re.compile(
|
||||||
DEPFILE = {
|
DEPFILE = {
|
||||||
"Cargo.toml": ["Cargo.lock"],
|
"Cargo.toml": ["Cargo.lock"],
|
||||||
"pubspec.yaml": ["pubspec.lock"],
|
"pubspec.yaml": ["pubspec.lock"],
|
||||||
"package.json": ["package-lock.json", "yarn.lock", "pnpm-lock.yaml"],
|
"package.json": ["package-lock.json", "yarn.lock", "pnpm-lock.yaml", "bun.lock"],
|
||||||
}
|
}
|
||||||
|
|
||||||
SCANNER_CACHE_VERSION = 1
|
SCANNER_CACHE_VERSION = 1
|
||||||
|
|
||||||
|
DEFAULT_CATALOG_PREFIX_REGEX = re.compile(
|
||||||
|
r'''defaultLibrariesExtensionName\s*=\s*['"](\w+)['"]'''
|
||||||
|
)
|
||||||
|
GRADLE_CATALOG_FILE_REGEX = re.compile(
|
||||||
|
r'''(?:create\()?['"]?(\w+)['"]?\)?\s*\{[^}]*from\(files\(['"]([^"]+)['"]\)\)'''
|
||||||
|
)
|
||||||
|
VERSION_CATALOG_REGEX = re.compile(r'versionCatalogs\s*\{')
|
||||||
|
|
||||||
|
APK_SIGNING_BLOCK_IDS = {
|
||||||
|
# https://source.android.com/docs/security/features/apksigning/v2#apk-signing-block
|
||||||
|
# 0x7109871a: 'APK signature scheme v2',
|
||||||
|
# https://source.android.com/docs/security/features/apksigning/v3#apk-signing-block
|
||||||
|
# 0xf05368c0: 'APK signature scheme v3',
|
||||||
|
# See "Security metadata in early 2018"
|
||||||
|
# https://android-developers.googleblog.com/2017/12/improving-app-security-and-performance.html
|
||||||
|
0x2146444E: 'Google Play Signature aka "Frosting"',
|
||||||
|
# 0x42726577: 'Verity padding',
|
||||||
|
# 0x6DFF800D: 'Source stamp V2 X509 cert',
|
||||||
|
# JSON with some metadata, used by Chinese company Meituan
|
||||||
|
0x71777777: 'Meituan payload',
|
||||||
|
# Dependencies metadata generated by Gradle and encrypted by Google Play.
|
||||||
|
# '...The data is compressed, encrypted by a Google Play signing key...'
|
||||||
|
# https://developer.android.com/studio/releases/gradle-plugin#dependency-metadata
|
||||||
|
0x504B4453: 'Dependency metadata',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class ExitCode(IntEnum):
|
class ExitCode(IntEnum):
|
||||||
NONFREE_CODE = 1
|
NONFREE_CODE = 1
|
||||||
|
|
||||||
|
|
||||||
|
class GradleVersionCatalog:
|
||||||
|
"""Parse catalog from libs.versions.toml.
|
||||||
|
|
||||||
|
https://docs.gradle.org/current/userguide/platforms.html
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, catalog):
|
||||||
|
self.version = {
|
||||||
|
alias: self.get_version(version)
|
||||||
|
for alias, version in catalog.get("versions", {}).items()
|
||||||
|
}
|
||||||
|
self.libraries = {
|
||||||
|
self.alias_to_accessor(alias): self.library_to_coordinate(library)
|
||||||
|
for alias, library in catalog.get("libraries", {}).items()
|
||||||
|
}
|
||||||
|
self.plugins = {
|
||||||
|
self.alias_to_accessor(alias): self.plugin_to_coordinate(plugin)
|
||||||
|
for alias, plugin in catalog.get("plugins", {}).items()
|
||||||
|
}
|
||||||
|
self.bundles = {
|
||||||
|
self.alias_to_accessor(alias): self.bundle_to_coordinates(bundle)
|
||||||
|
for alias, bundle in catalog.get("bundles", {}).items()
|
||||||
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def alias_to_accessor(alias: str) -> str:
|
||||||
|
"""Covert alias to accessor.
|
||||||
|
|
||||||
|
https://docs.gradle.org/current/userguide/platforms.html#sub:mapping-aliases-to-accessors
|
||||||
|
Alias is used to define a lib in catalog. Accessor is used to access it.
|
||||||
|
"""
|
||||||
|
return alias.replace("-", ".").replace("_", ".")
|
||||||
|
|
||||||
|
def get_version(self, version: Union[dict, str]) -> str:
|
||||||
|
if isinstance(version, str):
|
||||||
|
return version
|
||||||
|
ref = version.get("ref")
|
||||||
|
if ref:
|
||||||
|
return self.version.get(ref, "")
|
||||||
|
return (
|
||||||
|
version.get("prefer", "")
|
||||||
|
or version.get("require", "")
|
||||||
|
or version.get("strictly", "")
|
||||||
|
)
|
||||||
|
|
||||||
|
def library_to_coordinate(self, library: Union[dict, str]) -> str:
|
||||||
|
"""Generate the Gradle dependency coordinate from catalog."""
|
||||||
|
if isinstance(library, str):
|
||||||
|
return library
|
||||||
|
module = library.get("module")
|
||||||
|
if not module:
|
||||||
|
group = library.get("group")
|
||||||
|
name = library.get("name")
|
||||||
|
if group and name:
|
||||||
|
module = f"{group}:{name}"
|
||||||
|
else:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
version = library.get("version")
|
||||||
|
if version:
|
||||||
|
return f"{module}:{self.get_version(version)}"
|
||||||
|
else:
|
||||||
|
return module
|
||||||
|
|
||||||
|
def plugin_to_coordinate(self, plugin: Union[dict, str]) -> str:
|
||||||
|
"""Generate the Gradle plugin coordinate from catalog."""
|
||||||
|
if isinstance(plugin, str):
|
||||||
|
return plugin
|
||||||
|
id = plugin.get("id")
|
||||||
|
if not id:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
version = plugin.get("version")
|
||||||
|
if version:
|
||||||
|
return f"{id}:{self.get_version(version)}"
|
||||||
|
else:
|
||||||
|
return id
|
||||||
|
|
||||||
|
def bundle_to_coordinates(self, bundle: list[str]) -> list[str]:
|
||||||
|
"""Generate the Gradle dependency bundle coordinate from catalog."""
|
||||||
|
coordinates = []
|
||||||
|
for alias in bundle:
|
||||||
|
library = self.libraries.get(self.alias_to_accessor(alias))
|
||||||
|
if library:
|
||||||
|
coordinates.append(library)
|
||||||
|
return coordinates
|
||||||
|
|
||||||
|
def get_coordinate(self, accessor: str) -> list[str]:
|
||||||
|
"""Get the Gradle coordinate from the catalog with an accessor."""
|
||||||
|
if accessor.startswith("plugins."):
|
||||||
|
return [
|
||||||
|
self.plugins.get(accessor[8:].removesuffix(".asLibraryDependency"), "")
|
||||||
|
]
|
||||||
|
if accessor.startswith("bundles."):
|
||||||
|
return self.bundles.get(accessor[8:], [])
|
||||||
|
return [self.libraries.get(accessor, "")]
|
||||||
|
|
||||||
|
|
||||||
|
def get_catalogs(root: str) -> dict[str, GradleVersionCatalog]:
|
||||||
|
"""Get all Gradle dependency catalogs from settings.gradle[.kts].
|
||||||
|
|
||||||
|
Returns a dict with the extension and the corresponding catalog.
|
||||||
|
The extension is used as the prefix of the accessor to access libs in the catalog.
|
||||||
|
"""
|
||||||
|
root = Path(root)
|
||||||
|
catalogs = {}
|
||||||
|
default_prefix = "libs"
|
||||||
|
catalog_files_m = []
|
||||||
|
|
||||||
|
def find_block_end(s, start):
|
||||||
|
pat = re.compile("[{}]")
|
||||||
|
depth = 1
|
||||||
|
for m in pat.finditer(s, pos=start):
|
||||||
|
if m.group() == "{":
|
||||||
|
depth += 1
|
||||||
|
else:
|
||||||
|
depth -= 1
|
||||||
|
if depth == 0:
|
||||||
|
return m.start()
|
||||||
|
else:
|
||||||
|
return -1
|
||||||
|
|
||||||
|
groovy_file = root / "settings.gradle"
|
||||||
|
kotlin_file = root / "settings.gradle.kts"
|
||||||
|
if groovy_file.is_file():
|
||||||
|
gradle_file = groovy_file
|
||||||
|
elif kotlin_file.is_file():
|
||||||
|
gradle_file = kotlin_file
|
||||||
|
else:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
s = gradle_file.read_text(encoding="utf-8")
|
||||||
|
version_catalogs_m = VERSION_CATALOG_REGEX.search(s)
|
||||||
|
if version_catalogs_m:
|
||||||
|
start = version_catalogs_m.end()
|
||||||
|
end = find_block_end(s, start)
|
||||||
|
catalog_files_m = GRADLE_CATALOG_FILE_REGEX.finditer(s, start, end)
|
||||||
|
|
||||||
|
m_default = DEFAULT_CATALOG_PREFIX_REGEX.search(s)
|
||||||
|
if m_default:
|
||||||
|
default_prefix = m_default.group(1)
|
||||||
|
default_catalog_file = Path(root) / "gradle/libs.versions.toml"
|
||||||
|
if default_catalog_file.is_file():
|
||||||
|
with default_catalog_file.open("rb") as f:
|
||||||
|
catalogs[default_prefix] = GradleVersionCatalog(tomllib.load(f))
|
||||||
|
for m in catalog_files_m:
|
||||||
|
catalog_file = Path(root) / m.group(2).replace("$rootDir/", "")
|
||||||
|
if catalog_file.is_file():
|
||||||
|
with catalog_file.open("rb") as f:
|
||||||
|
catalogs[m.group(1)] = GradleVersionCatalog(tomllib.load(f))
|
||||||
|
return catalogs
|
||||||
|
|
||||||
|
|
||||||
def get_gradle_compile_commands(build):
|
def get_gradle_compile_commands(build):
|
||||||
compileCommands = [
|
compileCommands = [
|
||||||
|
'alias',
|
||||||
'api',
|
'api',
|
||||||
'apk',
|
'apk',
|
||||||
'classpath',
|
'classpath',
|
||||||
|
@ -76,19 +266,30 @@ def get_gradle_compile_commands(build):
|
||||||
'runtimeOnly',
|
'runtimeOnly',
|
||||||
]
|
]
|
||||||
buildTypes = ['', 'release']
|
buildTypes = ['', 'release']
|
||||||
flavors = ['']
|
|
||||||
if build.gradle and build.gradle != ['yes']:
|
if build.gradle and build.gradle != ['yes']:
|
||||||
flavors += build.gradle
|
flavors = common.calculate_gradle_flavor_combination(build.gradle)
|
||||||
|
else:
|
||||||
|
flavors = ['']
|
||||||
|
|
||||||
commands = [
|
return [''.join(c) for c in itertools.product(flavors, buildTypes, compileCommands)]
|
||||||
''.join(c) for c in itertools.product(flavors, buildTypes, compileCommands)
|
|
||||||
|
|
||||||
|
def get_gradle_compile_commands_without_catalog(build):
|
||||||
|
return [
|
||||||
|
re.compile(rf'''\s*{c}.*\s*\(?['"].*['"]''', re.IGNORECASE)
|
||||||
|
for c in get_gradle_compile_commands(build)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def get_gradle_compile_commands_with_catalog(build, prefix):
|
||||||
|
return [
|
||||||
|
re.compile(rf'\s*{c}.*\s*\(?{prefix}\.([a-z0-9.]+)', re.IGNORECASE)
|
||||||
|
for c in get_gradle_compile_commands(build)
|
||||||
]
|
]
|
||||||
return [re.compile(r'\s*' + c, re.IGNORECASE) for c in commands]
|
|
||||||
|
|
||||||
|
|
||||||
def get_embedded_classes(apkfile, depth=0):
|
def get_embedded_classes(apkfile, depth=0):
|
||||||
"""
|
"""Get the list of Java classes embedded into all DEX files.
|
||||||
Get the list of Java classes embedded into all DEX files.
|
|
||||||
|
|
||||||
:return: set of Java classes names as string
|
:return: set of Java classes names as string
|
||||||
"""
|
"""
|
||||||
|
@ -134,7 +335,7 @@ def get_embedded_classes(apkfile, depth=0):
|
||||||
|
|
||||||
def _datetime_now():
|
def _datetime_now():
|
||||||
"""Get datetime.now(), using this funciton allows mocking it for testing."""
|
"""Get datetime.now(), using this funciton allows mocking it for testing."""
|
||||||
return datetime.utcnow()
|
return datetime.now(timezone.utc)
|
||||||
|
|
||||||
|
|
||||||
def _scanner_cachedir():
|
def _scanner_cachedir():
|
||||||
|
@ -183,8 +384,7 @@ class SignatureDataController:
|
||||||
raise SignatureDataVersionMismatchException()
|
raise SignatureDataVersionMismatchException()
|
||||||
|
|
||||||
def check_last_updated(self):
|
def check_last_updated(self):
|
||||||
"""
|
"""Check if the last_updated value is ok and raise an exception if expired or inaccessible.
|
||||||
Check if the last_updated value is ok and raise an exception if expired or inaccessible.
|
|
||||||
|
|
||||||
:raises SignatureDataMalformedException: when timestamp value is
|
:raises SignatureDataMalformedException: when timestamp value is
|
||||||
inaccessible or not parse-able
|
inaccessible or not parse-able
|
||||||
|
@ -194,7 +394,7 @@ class SignatureDataController:
|
||||||
last_updated = self.data.get("last_updated", None)
|
last_updated = self.data.get("last_updated", None)
|
||||||
if last_updated:
|
if last_updated:
|
||||||
try:
|
try:
|
||||||
last_updated = datetime.fromtimestamp(last_updated)
|
last_updated = datetime.fromtimestamp(last_updated, timezone.utc)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise SignatureDataMalformedException() from e
|
raise SignatureDataMalformedException() from e
|
||||||
except TypeError as e:
|
except TypeError as e:
|
||||||
|
@ -259,8 +459,7 @@ class SignatureDataController:
|
||||||
logging.debug("write '{}' to cache".format(self.filename))
|
logging.debug("write '{}' to cache".format(self.filename))
|
||||||
|
|
||||||
def verify_data(self):
|
def verify_data(self):
|
||||||
"""
|
"""Clean and validate `self.data`.
|
||||||
Clean and validate `self.data`.
|
|
||||||
|
|
||||||
Right now this function does just a basic key sanitation.
|
Right now this function does just a basic key sanitation.
|
||||||
"""
|
"""
|
||||||
|
@ -354,6 +553,8 @@ class SUSSDataController(SignatureDataController):
|
||||||
|
|
||||||
|
|
||||||
class ScannerTool:
|
class ScannerTool:
|
||||||
|
refresh_allowed = True
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
# we could add support for loading additional signature source
|
# we could add support for loading additional signature source
|
||||||
# definitions from config.yml here
|
# definitions from config.yml here
|
||||||
|
@ -362,7 +563,9 @@ class ScannerTool:
|
||||||
|
|
||||||
options = common.get_options()
|
options = common.get_options()
|
||||||
options_refresh_scanner = (
|
options_refresh_scanner = (
|
||||||
hasattr(options, "refresh_scanner") and options.refresh_scanner
|
hasattr(options, "refresh_scanner")
|
||||||
|
and options.refresh_scanner
|
||||||
|
and ScannerTool.refresh_allowed
|
||||||
)
|
)
|
||||||
if options_refresh_scanner or common.get_config().get('refresh_scanner'):
|
if options_refresh_scanner or common.get_config().get('refresh_scanner'):
|
||||||
self.refresh()
|
self.refresh()
|
||||||
|
@ -447,8 +650,7 @@ _SCANNER_TOOL = None
|
||||||
|
|
||||||
|
|
||||||
def _get_tool():
|
def _get_tool():
|
||||||
"""
|
"""Lazy loading function for getting a ScannerTool instance.
|
||||||
Lazy loading function for getting a ScannerTool instance.
|
|
||||||
|
|
||||||
ScannerTool initialization need to access `common.config` values. Those are only available after initialization through `common.read_config()`. So this factory assumes config was called at an erlier point in time.
|
ScannerTool initialization need to access `common.config` values. Those are only available after initialization through `common.read_config()`. So this factory assumes config was called at an erlier point in time.
|
||||||
"""
|
"""
|
||||||
|
@ -471,6 +673,17 @@ def scan_binary(apkfile):
|
||||||
if regexp.match(classname):
|
if regexp.match(classname):
|
||||||
logging.debug("Problem: found class '%s'" % classname)
|
logging.debug("Problem: found class '%s'" % classname)
|
||||||
problems += 1
|
problems += 1
|
||||||
|
|
||||||
|
logging.info(_('Scanning APK for extra signing blocks.'))
|
||||||
|
a = common.get_androguard_APK(str(apkfile))
|
||||||
|
a.parse_v2_v3_signature()
|
||||||
|
for b in a._v2_blocks:
|
||||||
|
if b in APK_SIGNING_BLOCK_IDS:
|
||||||
|
logging.debug(
|
||||||
|
f"Problem: found extra signing block '{APK_SIGNING_BLOCK_IDS[b]}'"
|
||||||
|
)
|
||||||
|
problems += 1
|
||||||
|
|
||||||
if warnings:
|
if warnings:
|
||||||
logging.warning(
|
logging.warning(
|
||||||
_("Found {count} warnings in {filename}").format(
|
_("Found {count} warnings in {filename}").format(
|
||||||
|
@ -492,6 +705,7 @@ def scan_source(build_dir, build=metadata.Build(), json_per_build=None):
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
the number of fatal problems encountered.
|
the number of fatal problems encountered.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
count = 0
|
count = 0
|
||||||
|
|
||||||
|
@ -507,11 +721,11 @@ def scan_source(build_dir, build=metadata.Build(), json_per_build=None):
|
||||||
re.compile(r'^https://' + re.escape(repo) + r'/*')
|
re.compile(r'^https://' + re.escape(repo) + r'/*')
|
||||||
for repo in [
|
for repo in [
|
||||||
'repo1.maven.org/maven2', # mavenCentral()
|
'repo1.maven.org/maven2', # mavenCentral()
|
||||||
'jcenter.bintray.com', # jcenter()
|
|
||||||
'jitpack.io',
|
'jitpack.io',
|
||||||
'www.jitpack.io',
|
'www.jitpack.io',
|
||||||
'repo.maven.apache.org/maven2',
|
'repo.maven.apache.org/maven2',
|
||||||
'oss.jfrog.org/artifactory/oss-snapshot-local',
|
'oss.jfrog.org/artifactory/oss-snapshot-local',
|
||||||
|
'central.sonatype.com/repository/maven-snapshots',
|
||||||
'oss.sonatype.org/content/repositories/snapshots',
|
'oss.sonatype.org/content/repositories/snapshots',
|
||||||
'oss.sonatype.org/content/repositories/releases',
|
'oss.sonatype.org/content/repositories/releases',
|
||||||
'oss.sonatype.org/content/groups/public',
|
'oss.sonatype.org/content/groups/public',
|
||||||
|
@ -533,8 +747,12 @@ def scan_source(build_dir, build=metadata.Build(), json_per_build=None):
|
||||||
]
|
]
|
||||||
]
|
]
|
||||||
|
|
||||||
scanignore = common.getpaths_map(build_dir, build.scanignore)
|
scanignore, scanignore_not_found_paths = common.getpaths_map(
|
||||||
scandelete = common.getpaths_map(build_dir, build.scandelete)
|
build_dir, build.scanignore
|
||||||
|
)
|
||||||
|
scandelete, scandelete_not_found_paths = common.getpaths_map(
|
||||||
|
build_dir, build.scandelete
|
||||||
|
)
|
||||||
|
|
||||||
scanignore_worked = set()
|
scanignore_worked = set()
|
||||||
scandelete_worked = set()
|
scandelete_worked = set()
|
||||||
|
@ -568,6 +786,7 @@ def scan_source(build_dir, build=metadata.Build(), json_per_build=None):
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
0 as we explicitly ignore the file, so don't count an error
|
0 as we explicitly ignore the file, so don't count an error
|
||||||
|
|
||||||
"""
|
"""
|
||||||
msg = 'Ignoring %s at %s' % (what, path_in_build_dir)
|
msg = 'Ignoring %s at %s' % (what, path_in_build_dir)
|
||||||
logging.info(msg)
|
logging.info(msg)
|
||||||
|
@ -590,6 +809,7 @@ def scan_source(build_dir, build=metadata.Build(), json_per_build=None):
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
0 as we deleted the offending file
|
0 as we deleted the offending file
|
||||||
|
|
||||||
"""
|
"""
|
||||||
msg = 'Removing %s at %s' % (what, path_in_build_dir)
|
msg = 'Removing %s at %s' % (what, path_in_build_dir)
|
||||||
logging.info(msg)
|
logging.info(msg)
|
||||||
|
@ -617,6 +837,7 @@ def scan_source(build_dir, build=metadata.Build(), json_per_build=None):
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
0, as warnings don't count as errors
|
0, as warnings don't count as errors
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if toignore(path_in_build_dir):
|
if toignore(path_in_build_dir):
|
||||||
return 0
|
return 0
|
||||||
|
@ -642,6 +863,7 @@ def scan_source(build_dir, build=metadata.Build(), json_per_build=None):
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
0 if the problem was ignored/deleted/is only a warning, 1 otherwise
|
0 if the problem was ignored/deleted/is only a warning, 1 otherwise
|
||||||
|
|
||||||
"""
|
"""
|
||||||
options = common.get_options()
|
options = common.get_options()
|
||||||
if toignore(path_in_build_dir):
|
if toignore(path_in_build_dir):
|
||||||
|
@ -679,8 +901,12 @@ def scan_source(build_dir, build=metadata.Build(), json_per_build=None):
|
||||||
]
|
]
|
||||||
|
|
||||||
def is_image_file(path):
|
def is_image_file(path):
|
||||||
if imghdr.what(path) is not None:
|
try:
|
||||||
|
mimetype = magic.from_file(path, mime=True)
|
||||||
|
if mimetype and mimetype.startswith('image/'):
|
||||||
return True
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logging.info(e)
|
||||||
|
|
||||||
def safe_path(path_in_build_dir):
|
def safe_path(path_in_build_dir):
|
||||||
for sp in safe_paths:
|
for sp in safe_paths:
|
||||||
|
@ -688,11 +914,21 @@ def scan_source(build_dir, build=metadata.Build(), json_per_build=None):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
gradle_compile_commands = get_gradle_compile_commands(build)
|
def is_used_by_gradle_without_catalog(line):
|
||||||
|
return any(
|
||||||
|
command.match(line)
|
||||||
|
for command in get_gradle_compile_commands_without_catalog(build)
|
||||||
|
)
|
||||||
|
|
||||||
def is_used_by_gradle(line):
|
def is_used_by_gradle_with_catalog(line, prefix):
|
||||||
return any(command.match(line) for command in gradle_compile_commands)
|
for m in (
|
||||||
|
command.match(line)
|
||||||
|
for command in get_gradle_compile_commands_with_catalog(build, prefix)
|
||||||
|
):
|
||||||
|
if m:
|
||||||
|
return m
|
||||||
|
|
||||||
|
all_catalogs = {}
|
||||||
# Iterate through all files in the source code
|
# Iterate through all files in the source code
|
||||||
for root, dirs, files in os.walk(build_dir, topdown=True):
|
for root, dirs, files in os.walk(build_dir, topdown=True):
|
||||||
# It's topdown, so checking the basename is enough
|
# It's topdown, so checking the basename is enough
|
||||||
|
@ -700,6 +936,9 @@ def scan_source(build_dir, build=metadata.Build(), json_per_build=None):
|
||||||
if ignoredir in dirs:
|
if ignoredir in dirs:
|
||||||
dirs.remove(ignoredir)
|
dirs.remove(ignoredir)
|
||||||
|
|
||||||
|
if "settings.gradle" in files or "settings.gradle.kts" in files:
|
||||||
|
all_catalogs[str(root)] = get_catalogs(root)
|
||||||
|
|
||||||
for curfile in files:
|
for curfile in files:
|
||||||
if curfile in ['.DS_Store']:
|
if curfile in ['.DS_Store']:
|
||||||
continue
|
continue
|
||||||
|
@ -765,6 +1004,13 @@ def scan_source(build_dir, build=metadata.Build(), json_per_build=None):
|
||||||
count += handleproblem(
|
count += handleproblem(
|
||||||
_('Java JAR file'), path_in_build_dir, filepath, json_per_build
|
_('Java JAR file'), path_in_build_dir, filepath, json_per_build
|
||||||
)
|
)
|
||||||
|
elif curfile.endswith('.wasm'):
|
||||||
|
count += handleproblem(
|
||||||
|
_('WebAssembly binary file'),
|
||||||
|
path_in_build_dir,
|
||||||
|
filepath,
|
||||||
|
json_per_build,
|
||||||
|
)
|
||||||
|
|
||||||
elif curfile.endswith('.java'):
|
elif curfile.endswith('.java'):
|
||||||
if not os.path.isfile(filepath):
|
if not os.path.isfile(filepath):
|
||||||
|
@ -781,15 +1027,36 @@ def scan_source(build_dir, build=metadata.Build(), json_per_build=None):
|
||||||
break
|
break
|
||||||
|
|
||||||
elif curfile.endswith('.gradle') or curfile.endswith('.gradle.kts'):
|
elif curfile.endswith('.gradle') or curfile.endswith('.gradle.kts'):
|
||||||
|
catalog_path = str(build_dir)
|
||||||
|
# Find the longest path of dir that the curfile is in
|
||||||
|
for p in all_catalogs:
|
||||||
|
if os.path.commonpath([root, p]) == p:
|
||||||
|
catalog_path = p
|
||||||
|
catalogs = all_catalogs.get(catalog_path, {})
|
||||||
|
|
||||||
if not os.path.isfile(filepath):
|
if not os.path.isfile(filepath):
|
||||||
continue
|
continue
|
||||||
with open(filepath, 'r', errors='replace') as f:
|
with open(filepath, 'r', errors='replace') as f:
|
||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
for i, line in enumerate(lines):
|
for i, line in enumerate(lines):
|
||||||
if is_used_by_gradle(line):
|
if is_used_by_gradle_without_catalog(line):
|
||||||
for name in suspects_found(line):
|
for name in suspects_found(line):
|
||||||
count += handleproblem(
|
count += handleproblem(
|
||||||
"usual suspect '%s'" % (name),
|
f"usual suspect '{name}'",
|
||||||
|
path_in_build_dir,
|
||||||
|
filepath,
|
||||||
|
json_per_build,
|
||||||
|
)
|
||||||
|
for prefix, catalog in catalogs.items():
|
||||||
|
m = is_used_by_gradle_with_catalog(line, prefix)
|
||||||
|
if not m:
|
||||||
|
continue
|
||||||
|
accessor = m[1]
|
||||||
|
coordinates = catalog.get_coordinate(accessor)
|
||||||
|
for coordinate in coordinates:
|
||||||
|
for name in suspects_found(coordinate):
|
||||||
|
count += handleproblem(
|
||||||
|
f"usual suspect '{prefix}.{accessor}: {name}'",
|
||||||
path_in_build_dir,
|
path_in_build_dir,
|
||||||
filepath,
|
filepath,
|
||||||
json_per_build,
|
json_per_build,
|
||||||
|
@ -843,11 +1110,19 @@ def scan_source(build_dir, build=metadata.Build(), json_per_build=None):
|
||||||
json_per_build,
|
json_per_build,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
for p in scanignore_not_found_paths:
|
||||||
|
logging.error(_("Non-exist scanignore path: %s") % p)
|
||||||
|
count += 1
|
||||||
|
|
||||||
for p in scanignore:
|
for p in scanignore:
|
||||||
if p not in scanignore_worked:
|
if p not in scanignore_worked:
|
||||||
logging.error(_('Unused scanignore path: %s') % p)
|
logging.error(_('Unused scanignore path: %s') % p)
|
||||||
count += 1
|
count += 1
|
||||||
|
|
||||||
|
for p in scandelete_not_found_paths:
|
||||||
|
logging.error(_("Non-exist scandelete path: %s") % p)
|
||||||
|
count += 1
|
||||||
|
|
||||||
for p in scandelete:
|
for p in scandelete:
|
||||||
if p not in scandelete_worked:
|
if p not in scandelete_worked:
|
||||||
logging.error(_('Unused scandelete path: %s') % p)
|
logging.error(_('Unused scandelete path: %s') % p)
|
||||||
|
@ -928,10 +1203,7 @@ def main():
|
||||||
_get_tool()
|
_get_tool()
|
||||||
return
|
return
|
||||||
|
|
||||||
# Read all app and srclib metadata
|
apps = common.read_app_args(appids, allow_version_codes=True)
|
||||||
|
|
||||||
allapps = metadata.read_metadata()
|
|
||||||
apps = common.read_app_args(appids, allapps, True)
|
|
||||||
|
|
||||||
build_dir = 'build'
|
build_dir = 'build'
|
||||||
if not os.path.isdir(build_dir):
|
if not os.path.isdir(build_dir):
|
||||||
|
@ -1032,7 +1304,7 @@ def main():
|
||||||
logging.info(_("Finished"))
|
logging.info(_("Finished"))
|
||||||
if options.json:
|
if options.json:
|
||||||
print(json.dumps(json_output))
|
print(json.dumps(json_output))
|
||||||
else:
|
elif probcount or options.verbose:
|
||||||
print(_("%d problems found") % probcount)
|
print(_("%d problems found") % probcount)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1955,13 +2227,13 @@ SUSS_DEFAULT = r'''{
|
||||||
"https://www.android.com/gms/"
|
"https://www.android.com/gms/"
|
||||||
],
|
],
|
||||||
"gradle_signatures": [
|
"gradle_signatures": [
|
||||||
"com.google.android.gms(?!.oss-licenses-plugin)",
|
"com.google.android.gms(?!.(oss-licenses-plugin|strict-version-matcher-plugin))",
|
||||||
"com.google.android.ump",
|
"com.google.android.ump",
|
||||||
"androidx.core:core-google-shortcuts",
|
"androidx.core:core-google-shortcuts",
|
||||||
"androidx.credentials:credentials",
|
|
||||||
"androidx.credentials:credentials-play-services-auth",
|
"androidx.credentials:credentials-play-services-auth",
|
||||||
"androidx.media3:media3-cast",
|
"androidx.media3:media3-cast",
|
||||||
"androidx.media3:media3-datasource-cronet",
|
"androidx.media3:media3-datasource-cronet",
|
||||||
|
"androidx.wear:wear-remote-interactions",
|
||||||
"androidx.work:work-gcm",
|
"androidx.work:work-gcm",
|
||||||
"com.google.android.exoplayer:extension-cast",
|
"com.google.android.exoplayer:extension-cast",
|
||||||
"com.google.android.exoplayer:extension-cronet",
|
"com.google.android.exoplayer:extension-cronet",
|
||||||
|
@ -1969,9 +2241,21 @@ SUSS_DEFAULT = r'''{
|
||||||
"com.cloudinary:cloudinary-android.*:2\\.[12]\\.",
|
"com.cloudinary:cloudinary-android.*:2\\.[12]\\.",
|
||||||
"com.pierfrancescosoffritti.androidyoutubeplayer:chromecast-sender",
|
"com.pierfrancescosoffritti.androidyoutubeplayer:chromecast-sender",
|
||||||
"com.yayandroid:locationmanager",
|
"com.yayandroid:locationmanager",
|
||||||
"play-services",
|
"(?<!org.microg.gms:)play-services",
|
||||||
"xyz.belvi.mobilevision:barcodescanner",
|
"xyz.belvi.mobilevision:barcodescanner",
|
||||||
"com.google.api-client:google-api-client-android"
|
"com.google.api-client:google-api-client-android",
|
||||||
|
"com.google.maps.android:android-maps-utils",
|
||||||
|
"com.github.budowski:android-maps-utils",
|
||||||
|
"com.microsoft.identity:common",
|
||||||
|
"com.microsoft.identity.client:msal"
|
||||||
|
],
|
||||||
|
"gradle_signatures_negative_examples": [
|
||||||
|
"com.google.android.gms.oss-licenses-plugin",
|
||||||
|
"com.google.android.gms.strict-version-matcher-plugin"
|
||||||
|
],
|
||||||
|
"gradle_signatures_positive_examples": [
|
||||||
|
"com.google.android.gms:play-services-base",
|
||||||
|
"com.google.android.gms:play-services-oss-licenses"
|
||||||
],
|
],
|
||||||
"license": "NonFree",
|
"license": "NonFree",
|
||||||
"name": "Google Mobile Services"
|
"name": "Google Mobile Services"
|
||||||
|
@ -1989,7 +2273,10 @@ SUSS_DEFAULT = r'''{
|
||||||
},
|
},
|
||||||
"com.google.android.libraries": {
|
"com.google.android.libraries": {
|
||||||
"code_signatures": [
|
"code_signatures": [
|
||||||
"com/google/android/libraries"
|
"com/google/android/libraries(?!/accessibility)"
|
||||||
|
],
|
||||||
|
"code_signatures_negative_examples": [
|
||||||
|
"com/google/android/libraries/accessibility"
|
||||||
],
|
],
|
||||||
"gradle_signatures": [
|
"gradle_signatures": [
|
||||||
"com.google.android.libraries(?!.mapsplatform.secrets-gradle-plugin)"
|
"com.google.android.libraries(?!.mapsplatform.secrets-gradle-plugin)"
|
||||||
|
@ -2110,7 +2397,8 @@ SUSS_DEFAULT = r'''{
|
||||||
"https://developers.google.com/ml-kit"
|
"https://developers.google.com/ml-kit"
|
||||||
],
|
],
|
||||||
"gradle_signatures": [
|
"gradle_signatures": [
|
||||||
"com.google.mlkit"
|
"com.google.mlkit",
|
||||||
|
"io.github.g00fy2.quickie"
|
||||||
],
|
],
|
||||||
"license": "NonFree",
|
"license": "NonFree",
|
||||||
"name": "ML Kit"
|
"name": "ML Kit"
|
||||||
|
@ -2335,6 +2623,19 @@ SUSS_DEFAULT = r'''{
|
||||||
"description": "processes real-time data at the intersection of commerce and culture, providing useful, actionable insights for brands and publishers. See <a rel='nofollow' href='https://www.crunchbase.com/organization/quantcast'>Crunchbase</a> and <a href='https://reports.exodus-privacy.eu.org/en/trackers/133/'>Exodus Privacy</a>.",
|
"description": "processes real-time data at the intersection of commerce and culture, providing useful, actionable insights for brands and publishers. See <a rel='nofollow' href='https://www.crunchbase.com/organization/quantcast'>Crunchbase</a> and <a href='https://reports.exodus-privacy.eu.org/en/trackers/133/'>Exodus Privacy</a>.",
|
||||||
"license": "NonFree"
|
"license": "NonFree"
|
||||||
},
|
},
|
||||||
|
"com.revenuecat.purchases": {
|
||||||
|
"code_signatures": [
|
||||||
|
"com/revenuecat/purchases"
|
||||||
|
],
|
||||||
|
"documentation": [
|
||||||
|
"https://www.revenuecat.com/"
|
||||||
|
],
|
||||||
|
"gradle_signatures": [
|
||||||
|
"com.revenuecat.purchases"
|
||||||
|
],
|
||||||
|
"license": "NonFree",
|
||||||
|
"name": "RevenueCat Purchases"
|
||||||
|
},
|
||||||
"com.samsung.accessory": {
|
"com.samsung.accessory": {
|
||||||
"anti_features": [
|
"anti_features": [
|
||||||
"NonFreeComp"
|
"NonFreeComp"
|
||||||
|
@ -2468,6 +2769,9 @@ SUSS_DEFAULT = r'''{
|
||||||
"com.wei.android.lib:fingerprintidentify",
|
"com.wei.android.lib:fingerprintidentify",
|
||||||
"com.github.uccmawei:FingerprintIdentify"
|
"com.github.uccmawei:FingerprintIdentify"
|
||||||
],
|
],
|
||||||
|
"gradle_signatures_positive_examples": [
|
||||||
|
"implementation \"com.github.uccmawei:fingerprintidentify:${safeExtGet(\"fingerprintidentify\", \"1.2.6\")}\""
|
||||||
|
],
|
||||||
"license": "NonFree",
|
"license": "NonFree",
|
||||||
"name": "FingerprintIdentify"
|
"name": "FingerprintIdentify"
|
||||||
},
|
},
|
||||||
|
@ -2651,6 +2955,16 @@ SUSS_DEFAULT = r'''{
|
||||||
"license": "NonFree",
|
"license": "NonFree",
|
||||||
"name": "Pushy"
|
"name": "Pushy"
|
||||||
},
|
},
|
||||||
|
"org.gradle.toolchains.foojay-resolver-convention": {
|
||||||
|
"documentation": [
|
||||||
|
"https://github.com/gradle/foojay-toolchains"
|
||||||
|
],
|
||||||
|
"gradle_signatures": [
|
||||||
|
"org.gradle.toolchains.foojay-resolver"
|
||||||
|
],
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"name": "Foojay Toolchains Plugin"
|
||||||
|
},
|
||||||
"org.mariuszgromada.math": {
|
"org.mariuszgromada.math": {
|
||||||
"code_signatures": [
|
"code_signatures": [
|
||||||
"org/mariuszgromada/math/mxparser/parsertokens/SyntaxStringBuilder",
|
"org/mariuszgromada/math/mxparser/parsertokens/SyntaxStringBuilder",
|
||||||
|
@ -2683,7 +2997,7 @@ SUSS_DEFAULT = r'''{
|
||||||
"license": "NonFree"
|
"license": "NonFree"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"timestamp": 1725205987.66681,
|
"timestamp": 1747829076.702502,
|
||||||
"version": 1,
|
"version": 1,
|
||||||
"last_updated": 1725950235.569432
|
"last_updated": 1750710966.431471
|
||||||
}'''
|
}'''
|
||||||
|
|
|
@ -15,16 +15,13 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
import re
|
from . import _, common
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from . import _
|
|
||||||
from . import common
|
|
||||||
from . import net
|
|
||||||
from .exception import FDroidException
|
from .exception import FDroidException
|
||||||
|
|
||||||
|
|
||||||
|
@ -68,6 +65,8 @@ def extract(options):
|
||||||
elif httpre.match(apk):
|
elif httpre.match(apk):
|
||||||
if apk.startswith('https') or options.no_check_https:
|
if apk.startswith('https') or options.no_check_https:
|
||||||
try:
|
try:
|
||||||
|
from . import net
|
||||||
|
|
||||||
tmp_apk = os.path.join(tmp_dir, 'signed.apk')
|
tmp_apk = os.path.join(tmp_dir, 'signed.apk')
|
||||||
net.download_file(apk, tmp_apk)
|
net.download_file(apk, tmp_apk)
|
||||||
sigdir = extract_signature(tmp_apk)
|
sigdir = extract_signature(tmp_apk)
|
||||||
|
@ -104,7 +103,7 @@ def main():
|
||||||
)
|
)
|
||||||
parser.add_argument("--no-check-https", action="store_true", default=False)
|
parser.add_argument("--no-check-https", action="store_true", default=False)
|
||||||
options = common.parse_args(parser)
|
options = common.parse_args(parser)
|
||||||
common.set_console_logging(options.verbose)
|
common.set_console_logging(options.verbose, options.color)
|
||||||
common.read_config()
|
common.read_config()
|
||||||
|
|
||||||
extract(options)
|
extract(options)
|
||||||
|
|
|
@ -17,15 +17,13 @@
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
import zipfile
|
import zipfile
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import logging
|
|
||||||
|
|
||||||
from . import _
|
from . import _, common, metadata
|
||||||
from . import common
|
|
||||||
from . import metadata
|
|
||||||
from .exception import FDroidException
|
from .exception import FDroidException
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
|
|
|
@ -28,8 +28,8 @@ Example
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
|
||||||
import threading
|
import threading
|
||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
class Tail(object):
|
class Tail(object):
|
||||||
|
|
|
@ -20,25 +20,27 @@
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import sys
|
import copy
|
||||||
import os
|
import filecmp
|
||||||
import shutil
|
|
||||||
import glob
|
import glob
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
import socket
|
|
||||||
import warnings
|
|
||||||
import zipfile
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import socket
|
||||||
|
import sys
|
||||||
import time
|
import time
|
||||||
import yaml
|
import warnings
|
||||||
import copy
|
import zipfile
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
import asn1crypto.cms
|
import asn1crypto.cms
|
||||||
import defusedxml.ElementTree as ElementTree
|
import defusedxml.ElementTree as ElementTree
|
||||||
from datetime import datetime, timezone
|
import yaml
|
||||||
from argparse import ArgumentParser
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from yaml import CSafeLoader as SafeLoader
|
from yaml import CSafeLoader as SafeLoader
|
||||||
|
@ -48,14 +50,13 @@ except ImportError:
|
||||||
import collections
|
import collections
|
||||||
from binascii import hexlify
|
from binascii import hexlify
|
||||||
|
|
||||||
from . import _
|
from PIL import Image, PngImagePlugin
|
||||||
from . import common
|
|
||||||
from . import metadata
|
|
||||||
from .common import DEFAULT_LOCALE
|
|
||||||
from .exception import BuildException, FDroidException, VerificationException
|
|
||||||
import fdroidserver.index
|
import fdroidserver.index
|
||||||
|
|
||||||
from PIL import Image, PngImagePlugin
|
from . import _, common, metadata
|
||||||
|
from .common import DEFAULT_LOCALE
|
||||||
|
from .exception import BuildException, FDroidException, NoVersionCodeException, VerificationException
|
||||||
|
|
||||||
if hasattr(Image, 'DecompressionBombWarning'):
|
if hasattr(Image, 'DecompressionBombWarning'):
|
||||||
warnings.simplefilter('error', Image.DecompressionBombWarning)
|
warnings.simplefilter('error', Image.DecompressionBombWarning)
|
||||||
|
@ -133,7 +134,7 @@ def disabled_algorithms_allowed():
|
||||||
or common.default_config['allow_disabled_algorithms'])
|
or common.default_config['allow_disabled_algorithms'])
|
||||||
|
|
||||||
|
|
||||||
def status_update_json(apps, apks):
|
def status_update_json(output, apps, apks):
|
||||||
"""Output a JSON file with metadata about this `fdroid update` run.
|
"""Output a JSON file with metadata about this `fdroid update` run.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
|
@ -145,7 +146,6 @@ def status_update_json(apps, apks):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
logging.debug(_('Outputting JSON'))
|
logging.debug(_('Outputting JSON'))
|
||||||
output = common.setup_status_output(start_timestamp)
|
|
||||||
output['antiFeatures'] = dict()
|
output['antiFeatures'] = dict()
|
||||||
output['disabled'] = []
|
output['disabled'] = []
|
||||||
output['archivePolicy0'] = []
|
output['archivePolicy0'] = []
|
||||||
|
@ -202,6 +202,13 @@ def status_update_json(apps, apks):
|
||||||
common.write_status_json(output, options.pretty)
|
common.write_status_json(output, options.pretty)
|
||||||
|
|
||||||
|
|
||||||
|
def output_status_stage(output, stage):
|
||||||
|
if 'stages' not in output:
|
||||||
|
output['stages'] = dict()
|
||||||
|
output['stages'][stage] = common.epoch_millis_now()
|
||||||
|
common.write_running_status_json(output)
|
||||||
|
|
||||||
|
|
||||||
def delete_disabled_builds(apps, apkcache, repodirs):
|
def delete_disabled_builds(apps, apkcache, repodirs):
|
||||||
"""Delete disabled build outputs.
|
"""Delete disabled build outputs.
|
||||||
|
|
||||||
|
@ -316,6 +323,13 @@ def get_cache_file():
|
||||||
return os.path.join('tmp', 'apkcache.json')
|
return os.path.join('tmp', 'apkcache.json')
|
||||||
|
|
||||||
|
|
||||||
|
def get_cache_mtime():
|
||||||
|
apkcachefile = get_cache_file()
|
||||||
|
if os.path.exists(apkcachefile):
|
||||||
|
return os.stat(apkcachefile).st_mtime
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
def get_cache():
|
def get_cache():
|
||||||
"""Get the cached dict of the APK index.
|
"""Get the cached dict of the APK index.
|
||||||
|
|
||||||
|
@ -355,7 +369,13 @@ def get_cache():
|
||||||
if not isinstance(v['antiFeatures'], dict):
|
if not isinstance(v['antiFeatures'], dict):
|
||||||
v['antiFeatures'] = {k: {} for k in sorted(v['antiFeatures'])}
|
v['antiFeatures'] = {k: {} for k in sorted(v['antiFeatures'])}
|
||||||
if 'added' in v:
|
if 'added' in v:
|
||||||
v['added'] = datetime.fromtimestamp(v['added'])
|
v['added'] = datetime.fromtimestamp(v['added'], tz=timezone.utc)
|
||||||
|
if v.get('srcname') and not v.get('srcnameSha256'):
|
||||||
|
f = f'archive/{v["srcname"]}'
|
||||||
|
if not os.path.exists(f):
|
||||||
|
f = f'repo/{v["srcname"]}'
|
||||||
|
if os.path.exists(f):
|
||||||
|
v['srcnameSha256'] = common.sha256sum(f)
|
||||||
|
|
||||||
return apkcache
|
return apkcache
|
||||||
|
|
||||||
|
@ -491,7 +511,7 @@ def insert_obbs(repodir, apps, apks):
|
||||||
obbWarnDelete(f, _('OBB filename must start with "main." or "patch.":'))
|
obbWarnDelete(f, _('OBB filename must start with "main." or "patch.":'))
|
||||||
continue
|
continue
|
||||||
if not re.match(r'^-?[0-9]+$', chunks[1]):
|
if not re.match(r'^-?[0-9]+$', chunks[1]):
|
||||||
obbWarnDelete(f, _('The OBB version code must come after "{name}.":')
|
obbWarnDelete(f, _('The OBB versionCode must come after "{name}.":')
|
||||||
.format(name=chunks[0]))
|
.format(name=chunks[0]))
|
||||||
continue
|
continue
|
||||||
versionCode = int(chunks[1])
|
versionCode = int(chunks[1])
|
||||||
|
@ -530,7 +550,7 @@ VERSION_STRING_RE = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$')
|
||||||
|
|
||||||
def version_string_to_int(version):
|
def version_string_to_int(version):
|
||||||
"""
|
"""
|
||||||
Convert sermver version designation to version code.
|
Convert semantic version designation to versionCode.
|
||||||
|
|
||||||
Approximately convert a [Major].[Minor].[Patch] version string
|
Approximately convert a [Major].[Minor].[Patch] version string
|
||||||
consisting of numeric characters (0-9) and periods to a number. The
|
consisting of numeric characters (0-9) and periods to a number. The
|
||||||
|
@ -683,7 +703,7 @@ def scan_repo_for_ipas(apkcache, repodir, knownapks):
|
||||||
apkcache[ipa_name] = ipa
|
apkcache[ipa_name] = ipa
|
||||||
cachechanged = True
|
cachechanged = True
|
||||||
|
|
||||||
added = knownapks.recordapk(ipa_name, ipa['packageName'])
|
added = knownapks.recordapk(ipa_name)
|
||||||
if added:
|
if added:
|
||||||
ipa['added'] = added
|
ipa['added'] = added
|
||||||
|
|
||||||
|
@ -787,16 +807,20 @@ def _strip_and_copy_image(in_file, outpath):
|
||||||
It is not used at all in the F-Droid ecosystem, so its much safer
|
It is not used at all in the F-Droid ecosystem, so its much safer
|
||||||
just to remove it entirely.
|
just to remove it entirely.
|
||||||
|
|
||||||
This uses size+mtime to check for a new file since this process
|
This only uses ctime/mtime to check for a new file since this
|
||||||
actually modifies the resulting file to strip out the EXIF.
|
process actually modifies the resulting file to strip out the EXIF.
|
||||||
|
Therefore, whenever the file needs to be stripped, it will have a
|
||||||
|
newer ctime and most likely a different size. The mtime is copied
|
||||||
|
from the source to the destination, so it can be the same.
|
||||||
|
|
||||||
outpath can be path to either a file or dir. The dir that outpath
|
outpath can be path to either a file or dir. The dir that outpath
|
||||||
refers to must exist before calling this.
|
refers to must exist before calling this.
|
||||||
|
|
||||||
Potential source of Python code to strip JPEGs without dependencies:
|
Potential source of Python code to strip JPEGs without dependencies:
|
||||||
http://www.fetidcascade.com/public/minimal_exif_writer.py
|
http://www.fetidcascade.com/public/minimal_exif_writer.py
|
||||||
|
|
||||||
"""
|
"""
|
||||||
logging.debug('copying ' + in_file + ' ' + outpath)
|
logging.debug('copying %s %s', in_file, outpath)
|
||||||
|
|
||||||
if not os.path.exists(in_file):
|
if not os.path.exists(in_file):
|
||||||
if os.path.islink(in_file):
|
if os.path.islink(in_file):
|
||||||
|
@ -810,11 +834,10 @@ def _strip_and_copy_image(in_file, outpath):
|
||||||
else:
|
else:
|
||||||
out_file = outpath
|
out_file = outpath
|
||||||
|
|
||||||
if os.path.exists(out_file):
|
if os.path.exists(out_file) and (
|
||||||
in_stat = os.stat(in_file)
|
os.path.getmtime(in_file) <= os.path.getmtime(out_file)
|
||||||
out_stat = os.stat(out_file)
|
and os.path.getctime(in_file) <= os.path.getctime(out_file)
|
||||||
if in_stat.st_size == out_stat.st_size \
|
):
|
||||||
and in_stat.st_mtime == out_stat.st_mtime:
|
|
||||||
return
|
return
|
||||||
|
|
||||||
extension = common.get_extension(in_file)[1]
|
extension = common.get_extension(in_file)[1]
|
||||||
|
@ -1019,16 +1042,20 @@ def copy_triple_t_store_metadata(apps):
|
||||||
for packageName, app in apps.items():
|
for packageName, app in apps.items():
|
||||||
builds = app.get('Builds', [])
|
builds = app.get('Builds', [])
|
||||||
gradle_subdirs = set()
|
gradle_subdirs = set()
|
||||||
if builds and builds[-1].subdir:
|
if builds:
|
||||||
|
subdir = builds[-1].subdir or ''
|
||||||
for flavor in builds[-1].gradle:
|
for flavor in builds[-1].gradle:
|
||||||
if flavor not in ('yes', 'no', True, False):
|
if flavor not in ('yes', 'no', True, False):
|
||||||
p = os.path.join('build', packageName, builds[-1].subdir, 'src', flavor, 'play')
|
p = os.path.join('build', packageName, subdir, 'src', flavor, 'play')
|
||||||
if os.path.exists(p):
|
if os.path.exists(p):
|
||||||
gradle_subdirs.add(p)
|
gradle_subdirs.add(p)
|
||||||
if not gradle_subdirs:
|
if not gradle_subdirs:
|
||||||
gradle_subdirs.update(glob.glob(os.path.join('build', packageName, builds[-1].subdir, 'src', '*', 'play')))
|
gradle_subdirs.update(glob.glob(os.path.join('build', packageName, subdir, 'src', '*', 'play')))
|
||||||
if not gradle_subdirs:
|
if not gradle_subdirs:
|
||||||
gradle_subdirs.update(glob.glob(os.path.join('build', packageName, builds[-1].subdir, '*', 'src', '*', 'play')))
|
gradle_subdirs.update(glob.glob(os.path.join('build', packageName, subdir, '*', 'src', '*', 'play')))
|
||||||
|
if not gradle_subdirs:
|
||||||
|
# Flutter-style android subdir
|
||||||
|
gradle_subdirs.update(glob.glob(os.path.join('build', packageName, subdir, 'android', 'app', 'src', '*', 'play')))
|
||||||
if not gradle_subdirs:
|
if not gradle_subdirs:
|
||||||
sg_list = sorted(glob.glob(os.path.join('build', packageName, 'settings.gradle*')))
|
sg_list = sorted(glob.glob(os.path.join('build', packageName, 'settings.gradle*')))
|
||||||
if sg_list:
|
if sg_list:
|
||||||
|
@ -1097,6 +1124,9 @@ def copy_triple_t_store_metadata(apps):
|
||||||
repofilename = os.path.basename(f)
|
repofilename = os.path.basename(f)
|
||||||
if segments[-2] == 'listing':
|
if segments[-2] == 'listing':
|
||||||
locale = segments[-3]
|
locale = segments[-3]
|
||||||
|
if dirname in GRAPHIC_NAMES:
|
||||||
|
repofilename = dirname + '.' + extension
|
||||||
|
dirname = ''
|
||||||
elif segments[-4] == 'listings': # v2.x
|
elif segments[-4] == 'listings': # v2.x
|
||||||
locale = segments[-3]
|
locale = segments[-3]
|
||||||
if dirname in tt_graphic_names:
|
if dirname in tt_graphic_names:
|
||||||
|
@ -1124,6 +1154,7 @@ def insert_localized_app_metadata(apps):
|
||||||
|
|
||||||
metadata/<locale>/
|
metadata/<locale>/
|
||||||
fastlane/metadata/android/<locale>/
|
fastlane/metadata/android/<locale>/
|
||||||
|
<subdir>/fastlane/metadata/android/<locale>/
|
||||||
src/<buildFlavor>/fastlane/metadata/android/<locale>/
|
src/<buildFlavor>/fastlane/metadata/android/<locale>/
|
||||||
|
|
||||||
...as well as the /metadata/<packageName>/<locale> directory.
|
...as well as the /metadata/<packageName>/<locale> directory.
|
||||||
|
@ -1143,7 +1174,7 @@ def insert_localized_app_metadata(apps):
|
||||||
https://f-droid.org/en/docs/All_About_Descriptions_Graphics_and_Screenshots/#in-the-apps-build-metadata-in-an-fdroiddata-collection
|
https://f-droid.org/en/docs/All_About_Descriptions_Graphics_and_Screenshots/#in-the-apps-build-metadata-in-an-fdroiddata-collection
|
||||||
"""
|
"""
|
||||||
sourcedirs = glob.glob(os.path.join('build', '[A-Za-z]*', 'src', '[A-Za-z]*', 'fastlane', 'metadata', 'android', '[a-z][a-z]*'))
|
sourcedirs = glob.glob(os.path.join('build', '[A-Za-z]*', 'src', '[A-Za-z]*', 'fastlane', 'metadata', 'android', '[a-z][a-z]*'))
|
||||||
sourcedirs += glob.glob(os.path.join('build', '[A-Za-z]*', 'fastlane', 'metadata', 'android', '[a-z][a-z]*'))
|
sourcedirs += glob.glob(os.path.join('build', '[A-Za-z]*', '**', 'fastlane', 'metadata', 'android', '[a-z][a-z]*'), recursive=True)
|
||||||
sourcedirs += glob.glob(os.path.join('build', '[A-Za-z]*', 'metadata', '[a-z][a-z]*'))
|
sourcedirs += glob.glob(os.path.join('build', '[A-Za-z]*', 'metadata', '[a-z][a-z]*'))
|
||||||
sourcedirs += glob.glob(os.path.join('metadata', '[A-Za-z]*', '[a-z][a-z]*'))
|
sourcedirs += glob.glob(os.path.join('metadata', '[A-Za-z]*', '[a-z][a-z]*'))
|
||||||
|
|
||||||
|
@ -1159,17 +1190,40 @@ def insert_localized_app_metadata(apps):
|
||||||
locale = segments[-1]
|
locale = segments[-1]
|
||||||
destdir = os.path.join('repo', packageName, locale)
|
destdir = os.path.join('repo', packageName, locale)
|
||||||
|
|
||||||
# flavours specified in build receipt
|
builds = apps.get(packageName, {}).get('Builds', [])
|
||||||
build_flavours = ""
|
found_in_subdir = (
|
||||||
if (
|
builds
|
||||||
apps[packageName]
|
and len(segments) > 6
|
||||||
and len(apps[packageName].get('Builds', [])) > 0
|
and segments[-4] == "fastlane"
|
||||||
and 'gradle' in apps[packageName]['Builds'][-1]
|
and segments[-3] == "metadata"
|
||||||
):
|
and segments[-2] == "android"
|
||||||
build_flavours = apps[packageName]['Builds'][-1]['gradle']
|
and '/'.join(segments[2:-4]) == builds[-1].get('subdir')
|
||||||
|
)
|
||||||
|
|
||||||
if len(segments) >= 5 and segments[4] == "fastlane" and segments[3] not in build_flavours:
|
# flavors specified in build receipt
|
||||||
logging.debug("ignoring due to wrong flavour")
|
build_flavors = []
|
||||||
|
if builds and 'gradle' in builds[-1] and builds[-1]['gradle'] != ['yes']:
|
||||||
|
build_flavors = common.calculate_gradle_flavor_combination(
|
||||||
|
builds[-1]['gradle']
|
||||||
|
)
|
||||||
|
found_in_flavor = (
|
||||||
|
len(segments) > 7
|
||||||
|
and segments[2] == 'src'
|
||||||
|
and segments[4] == "fastlane"
|
||||||
|
and segments[3] in build_flavors
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
not found_in_subdir
|
||||||
|
and not found_in_flavor
|
||||||
|
and segments[0] == 'build'
|
||||||
|
and segments[2] not in ('metadata', 'fastlane')
|
||||||
|
):
|
||||||
|
logging.debug(
|
||||||
|
'Not scanning "{dir}" with unknown subdir or gradle flavor "{value}"'.format(
|
||||||
|
dir=os.path.relpath(root), value=segments[3]
|
||||||
|
)
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for f in files:
|
for f in files:
|
||||||
|
@ -1207,9 +1261,7 @@ def insert_localized_app_metadata(apps):
|
||||||
try:
|
try:
|
||||||
versionCode = int(base)
|
versionCode = int(base)
|
||||||
locale = segments[-2]
|
locale = segments[-2]
|
||||||
if versionCode in [
|
if versionCode in [b["versionCode"] for b in builds]:
|
||||||
a["versionCode"] for a in apps[packageName]["Builds"]
|
|
||||||
]:
|
|
||||||
_set_localized_text_entry(
|
_set_localized_text_entry(
|
||||||
apps[packageName],
|
apps[packageName],
|
||||||
locale,
|
locale,
|
||||||
|
@ -1434,19 +1486,18 @@ def insert_localized_ios_app_metadata(apps_with_packages):
|
||||||
fdroidserver.update.copy_ios_screenshots_to_repo(screenshots, package_name)
|
fdroidserver.update.copy_ios_screenshots_to_repo(screenshots, package_name)
|
||||||
|
|
||||||
# lookup icons, copy them and put them into app
|
# lookup icons, copy them and put them into app
|
||||||
icon_path = _get_ipa_icon(Path('build') / package_name)
|
icon_src = _get_ipa_icon(Path('build') / package_name)
|
||||||
icon_dest = Path('repo') / package_name / 'icon.png' # for now just assume png
|
icon_dest = Path('repo') / package_name / 'icon.png' # for now just assume png
|
||||||
icon_stat = os.stat(icon_path)
|
|
||||||
app['iconv2'] = {
|
app['iconv2'] = {
|
||||||
DEFAULT_LOCALE: {
|
DEFAULT_LOCALE: {
|
||||||
'name': str(icon_dest).lstrip('repo'),
|
'name': str(icon_dest).lstrip('repo'),
|
||||||
'sha256': common.sha256sum(icon_dest),
|
'sha256': common.sha256sum(icon_dest),
|
||||||
'size': icon_stat.st_size,
|
'size': os.path.getsize(icon_src),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if not icon_dest.exists():
|
if not icon_dest.exists() or not filecmp.cmp(icon_src, icon_dest):
|
||||||
icon_dest.parent.mkdir(parents=True, exist_ok=True)
|
icon_dest.parent.mkdir(parents=True, exist_ok=True)
|
||||||
shutil.copy(icon_path, icon_dest)
|
shutil.copy2(icon_src, icon_dest)
|
||||||
|
|
||||||
|
|
||||||
def scan_repo_files(apkcache, repodir, knownapks, use_date_from_file=False):
|
def scan_repo_files(apkcache, repodir, knownapks, use_date_from_file=False):
|
||||||
|
@ -1525,8 +1576,10 @@ def scan_repo_files(apkcache, repodir, knownapks, use_date_from_file=False):
|
||||||
repo_file['packageName'] = m.group(1)
|
repo_file['packageName'] = m.group(1)
|
||||||
repo_file['versionCode'] = int(m.group(2))
|
repo_file['versionCode'] = int(m.group(2))
|
||||||
srcfilename = name + b'_src.tar.gz'
|
srcfilename = name + b'_src.tar.gz'
|
||||||
if os.path.exists(os.path.join(repodir, srcfilename)):
|
srcpath = os.path.join(repodir, srcfilename)
|
||||||
|
if os.path.exists(srcpath):
|
||||||
repo_file['srcname'] = srcfilename.decode()
|
repo_file['srcname'] = srcfilename.decode()
|
||||||
|
repo_file['srcnameSha256'] = common.sha256sum(srcpath.decode())
|
||||||
repo_file['size'] = stat.st_size
|
repo_file['size'] = stat.st_size
|
||||||
|
|
||||||
apkcache[name_utf8] = repo_file
|
apkcache[name_utf8] = repo_file
|
||||||
|
@ -1539,8 +1592,9 @@ def scan_repo_files(apkcache, repodir, knownapks, use_date_from_file=False):
|
||||||
default_date_param = None
|
default_date_param = None
|
||||||
|
|
||||||
# Record in knownapks, getting the added date at the same time..
|
# Record in knownapks, getting the added date at the same time..
|
||||||
added = knownapks.recordapk(repo_file['apkName'], repo_file['packageName'],
|
added = knownapks.recordapk(
|
||||||
default_date=default_date_param)
|
repo_file['apkName'], default_date=default_date_param
|
||||||
|
)
|
||||||
if added:
|
if added:
|
||||||
repo_file['added'] = added
|
repo_file['added'] = added
|
||||||
|
|
||||||
|
@ -1747,6 +1801,7 @@ def scan_apk_androguard(apk, apkfile):
|
||||||
|
|
||||||
xml = apkobject.get_android_manifest_xml()
|
xml = apkobject.get_android_manifest_xml()
|
||||||
androidmanifest_xml = apkobject.xml['AndroidManifest.xml']
|
androidmanifest_xml = apkobject.xml['AndroidManifest.xml']
|
||||||
|
|
||||||
if len(xml.nsmap) > 0:
|
if len(xml.nsmap) > 0:
|
||||||
# one of them surely will be the Android one, or its corrupt
|
# one of them surely will be the Android one, or its corrupt
|
||||||
xmlns = common.XMLNS_ANDROID
|
xmlns = common.XMLNS_ANDROID
|
||||||
|
@ -1756,8 +1811,12 @@ def scan_apk_androguard(apk, apkfile):
|
||||||
xmlns = '{}'
|
xmlns = '{}'
|
||||||
|
|
||||||
vcstr = androidmanifest_xml.get(xmlns + 'versionCode')
|
vcstr = androidmanifest_xml.get(xmlns + 'versionCode')
|
||||||
|
logging.debug("Version Code: %r (%s)" % (vcstr, apkfile))
|
||||||
|
|
||||||
if vcstr.startswith('0x'):
|
if not vcstr:
|
||||||
|
raise NoVersionCodeException(_("APK file {path} does not have a version code "
|
||||||
|
"in its manifest").format(path=apkfile))
|
||||||
|
elif vcstr.startswith('0x'):
|
||||||
apk['versionCode'] = int(vcstr, 16)
|
apk['versionCode'] = int(vcstr, 16)
|
||||||
else:
|
else:
|
||||||
apk['versionCode'] = int(vcstr)
|
apk['versionCode'] = int(vcstr)
|
||||||
|
@ -1850,7 +1909,7 @@ def scan_apk_androguard(apk, apkfile):
|
||||||
|
|
||||||
|
|
||||||
def process_apk(apkcache, apkfilename, repodir, knownapks, use_date_from_apk=False,
|
def process_apk(apkcache, apkfilename, repodir, knownapks, use_date_from_apk=False,
|
||||||
allow_disabled_algorithms=False, archive_bad_sig=False, apps=None):
|
allow_disabled_algorithms=False, archive_bad_sig=False, apps=None, cache_timestamp=0):
|
||||||
"""Process the apk with the given filename in the given repo directory.
|
"""Process the apk with the given filename in the given repo directory.
|
||||||
|
|
||||||
This also extracts the icons.
|
This also extracts the icons.
|
||||||
|
@ -1872,6 +1931,8 @@ def process_apk(apkcache, apkfilename, repodir, knownapks, use_date_from_apk=Fal
|
||||||
disabled algorithms in the signature (e.g. MD5)
|
disabled algorithms in the signature (e.g. MD5)
|
||||||
archive_bad_sig
|
archive_bad_sig
|
||||||
move APKs with a bad signature to the archive
|
move APKs with a bad signature to the archive
|
||||||
|
cache_timestamp
|
||||||
|
the timestamp of the cache file
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
|
@ -1885,7 +1946,8 @@ def process_apk(apkcache, apkfilename, repodir, knownapks, use_date_from_apk=Fal
|
||||||
usecache = False
|
usecache = False
|
||||||
if apkfilename in apkcache:
|
if apkfilename in apkcache:
|
||||||
apk = apkcache[apkfilename]
|
apk = apkcache[apkfilename]
|
||||||
if apk.get('hash') == common.sha256sum(apkfile):
|
stat = os.stat(apkfile)
|
||||||
|
if apk.get('size') == stat.st_size and stat.st_mtime < cache_timestamp:
|
||||||
logging.debug(_("Reading {apkfilename} from cache")
|
logging.debug(_("Reading {apkfilename} from cache")
|
||||||
.format(apkfilename=apkfilename))
|
.format(apkfilename=apkfilename))
|
||||||
usecache = True
|
usecache = True
|
||||||
|
@ -1902,6 +1964,10 @@ def process_apk(apkcache, apkfilename, repodir, knownapks, use_date_from_apk=Fal
|
||||||
logging.warning(_("Skipping '{apkfilename}' with invalid signature!")
|
logging.warning(_("Skipping '{apkfilename}' with invalid signature!")
|
||||||
.format(apkfilename=apkfilename))
|
.format(apkfilename=apkfilename))
|
||||||
return True, None, False
|
return True, None, False
|
||||||
|
except NoVersionCodeException:
|
||||||
|
logging.warning(_("Skipping '{apkfilename}' without versionCode!")
|
||||||
|
.format(apkfilename=apkfilename))
|
||||||
|
return True, None, False
|
||||||
|
|
||||||
if apps:
|
if apps:
|
||||||
if apk['packageName'] in apps:
|
if apk['packageName'] in apps:
|
||||||
|
@ -1940,8 +2006,10 @@ def process_apk(apkcache, apkfilename, repodir, knownapks, use_date_from_apk=Fal
|
||||||
|
|
||||||
apk['apkName'] = apkfilename
|
apk['apkName'] = apkfilename
|
||||||
srcfilename = apkfilename[:-4] + "_src.tar.gz"
|
srcfilename = apkfilename[:-4] + "_src.tar.gz"
|
||||||
if os.path.exists(os.path.join(repodir, srcfilename)):
|
srcpath = os.path.join(repodir, srcfilename)
|
||||||
|
if os.path.exists(srcpath):
|
||||||
apk['srcname'] = srcfilename
|
apk['srcname'] = srcfilename
|
||||||
|
apk['srcnameSha256'] = common.sha256sum(srcpath)
|
||||||
|
|
||||||
# verify the jar signature is correct, allow deprecated
|
# verify the jar signature is correct, allow deprecated
|
||||||
# algorithms only if the APK is in the archive.
|
# algorithms only if the APK is in the archive.
|
||||||
|
@ -1988,13 +2056,12 @@ def process_apk(apkcache, apkfilename, repodir, knownapks, use_date_from_apk=Fal
|
||||||
fill_missing_icon_densities(empty_densities, iconfilename, apk, repodir)
|
fill_missing_icon_densities(empty_densities, iconfilename, apk, repodir)
|
||||||
|
|
||||||
if use_date_from_apk:
|
if use_date_from_apk:
|
||||||
default_date_param = datetime.fromtimestamp(os.stat(apkfile).st_mtime)
|
default_date_param = datetime.fromtimestamp(os.stat(apkfile).st_mtime, tz=timezone.utc)
|
||||||
else:
|
else:
|
||||||
default_date_param = None
|
default_date_param = None
|
||||||
|
|
||||||
# Record in known apks, getting the added date at the same time..
|
# Record in known apks, getting the added date at the same time..
|
||||||
added = knownapks.recordapk(apk['apkName'], apk['packageName'],
|
added = knownapks.recordapk(apk['apkName'], default_date=default_date_param)
|
||||||
default_date=default_date_param)
|
|
||||||
if added:
|
if added:
|
||||||
apk['added'] = added
|
apk['added'] = added
|
||||||
|
|
||||||
|
@ -2004,7 +2071,7 @@ def process_apk(apkcache, apkfilename, repodir, knownapks, use_date_from_apk=Fal
|
||||||
return False, apk, cachechanged
|
return False, apk, cachechanged
|
||||||
|
|
||||||
|
|
||||||
def process_apks(apkcache, repodir, knownapks, use_date_from_apk=False, apps=None):
|
def process_apks(apkcache, repodir, knownapks, use_date_from_apk=False, apps=None, cache_timestamp=0):
|
||||||
"""Process the apks in the given repo directory.
|
"""Process the apks in the given repo directory.
|
||||||
|
|
||||||
This also extracts the icons.
|
This also extracts the icons.
|
||||||
|
@ -2019,6 +2086,8 @@ def process_apks(apkcache, repodir, knownapks, use_date_from_apk=False, apps=Non
|
||||||
b known apks info
|
b known apks info
|
||||||
use_date_from_apk
|
use_date_from_apk
|
||||||
use date from APK (instead of current date) for newly added APKs
|
use date from APK (instead of current date) for newly added APKs
|
||||||
|
cache_timestamp
|
||||||
|
the timestamp of the cache file
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
|
@ -2040,7 +2109,7 @@ def process_apks(apkcache, repodir, knownapks, use_date_from_apk=False, apps=Non
|
||||||
apkfilename = apkfile[len(repodir) + 1:]
|
apkfilename = apkfile[len(repodir) + 1:]
|
||||||
ada = disabled_algorithms_allowed()
|
ada = disabled_algorithms_allowed()
|
||||||
(skip, apk, cachethis) = process_apk(apkcache, apkfilename, repodir, knownapks,
|
(skip, apk, cachethis) = process_apk(apkcache, apkfilename, repodir, knownapks,
|
||||||
use_date_from_apk, ada, True, apps)
|
use_date_from_apk, ada, True, apps, cache_timestamp)
|
||||||
if skip:
|
if skip:
|
||||||
continue
|
continue
|
||||||
apks.append(apk)
|
apks.append(apk)
|
||||||
|
@ -2253,7 +2322,7 @@ def archive_old_apks(apps, apks, archapks, repodir, archivedir, defaultkeepversi
|
||||||
continue
|
continue
|
||||||
apkList.append(apk)
|
apkList.append(apk)
|
||||||
|
|
||||||
# Sort the apk list by version code. First is highest/newest.
|
# Sort the apk list by versionCode. First is highest/newest.
|
||||||
sorted_list = sorted(apkList, key=lambda apk: apk['versionCode'], reverse=True)
|
sorted_list = sorted(apkList, key=lambda apk: apk['versionCode'], reverse=True)
|
||||||
if currentVersionApk:
|
if currentVersionApk:
|
||||||
# Insert apk which corresponds to currentVersion at the front
|
# Insert apk which corresponds to currentVersion at the front
|
||||||
|
@ -2261,13 +2330,7 @@ def archive_old_apks(apps, apks, archapks, repodir, archivedir, defaultkeepversi
|
||||||
return sorted_list
|
return sorted_list
|
||||||
|
|
||||||
for appid, app in apps.items():
|
for appid, app in apps.items():
|
||||||
|
keepversions = common.calculate_archive_policy(app, defaultkeepversions)
|
||||||
if app.get('ArchivePolicy') is not None:
|
|
||||||
keepversions = app['ArchivePolicy']
|
|
||||||
else:
|
|
||||||
keepversions = defaultkeepversions
|
|
||||||
if app.get('VercodeOperation'):
|
|
||||||
keepversions *= len(app['VercodeOperation'])
|
|
||||||
if common.metadata_find_developer_signing_files(appid, app['CurrentVersionCode']):
|
if common.metadata_find_developer_signing_files(appid, app['CurrentVersionCode']):
|
||||||
keepversions *= 2
|
keepversions *= 2
|
||||||
|
|
||||||
|
@ -2405,12 +2468,12 @@ def create_metadata_from_template(apk):
|
||||||
def read_added_date_from_all_apks(apps, apks):
|
def read_added_date_from_all_apks(apps, apks):
|
||||||
"""No summary.
|
"""No summary.
|
||||||
|
|
||||||
Added dates come from the stats/known_apks.txt file but are
|
Added dates come from the repo/index-v2.json file but are
|
||||||
read when scanning apks and thus need to be applied form apk
|
read when scanning apks and thus need to be applied form apk
|
||||||
level to app level for _all_ apps and not only from non-archived
|
level to app level for _all_ apps and not only from non-archived
|
||||||
ones
|
ones
|
||||||
|
|
||||||
TODO: read the added dates directly from known_apks.txt instead of
|
TODO: read the added dates directly from index-v2.json instead of
|
||||||
going through apks that way it also works for for repos that
|
going through apks that way it also works for for repos that
|
||||||
don't keep an archive of apks.
|
don't keep an archive of apks.
|
||||||
"""
|
"""
|
||||||
|
@ -2578,7 +2641,7 @@ def main():
|
||||||
metadata.warnings_action = options.W
|
metadata.warnings_action = options.W
|
||||||
|
|
||||||
config = common.read_config()
|
config = common.read_config()
|
||||||
common.setup_status_output(start_timestamp)
|
status_output = common.setup_status_output(start_timestamp)
|
||||||
|
|
||||||
if not (('jarsigner' in config or 'apksigner' in config)
|
if not (('jarsigner' in config or 'apksigner' in config)
|
||||||
and 'keytool' in config):
|
and 'keytool' in config):
|
||||||
|
@ -2638,14 +2701,18 @@ def main():
|
||||||
|
|
||||||
# Get APK cache
|
# Get APK cache
|
||||||
apkcache = get_cache()
|
apkcache = get_cache()
|
||||||
|
cache_timestamp = get_cache_mtime()
|
||||||
|
|
||||||
# Delete builds for disabled apps
|
# Delete builds for disabled apps
|
||||||
|
output_status_stage(status_output, 'delete_disabled_builds')
|
||||||
delete_disabled_builds(apps, apkcache, repodirs)
|
delete_disabled_builds(apps, apkcache, repodirs)
|
||||||
|
|
||||||
# Scan all apks in the main repo
|
# Scan all apks in the main repo
|
||||||
|
output_status_stage(status_output, 'process_apks')
|
||||||
apks, cachechanged = process_apks(apkcache, repodirs[0], knownapks,
|
apks, cachechanged = process_apks(apkcache, repodirs[0], knownapks,
|
||||||
options.use_date_from_apk, apps)
|
options.use_date_from_apk, apps, cache_timestamp)
|
||||||
|
|
||||||
|
output_status_stage(status_output, 'scan_repo_files')
|
||||||
files, fcachechanged = scan_repo_files(apkcache, repodirs[0], knownapks,
|
files, fcachechanged = scan_repo_files(apkcache, repodirs[0], knownapks,
|
||||||
options.use_date_from_apk)
|
options.use_date_from_apk)
|
||||||
cachechanged = cachechanged or fcachechanged
|
cachechanged = cachechanged or fcachechanged
|
||||||
|
@ -2655,10 +2722,23 @@ def main():
|
||||||
cachechanged = cachechanged or icachechanged
|
cachechanged = cachechanged or icachechanged
|
||||||
apks += ipas
|
apks += ipas
|
||||||
|
|
||||||
|
output_status_stage(status_output, 'remove_apks')
|
||||||
appid_has_apks = set()
|
appid_has_apks = set()
|
||||||
appid_has_repo_files = set()
|
appid_has_repo_files = set()
|
||||||
|
sha256_has_files = collections.defaultdict(list)
|
||||||
|
errors = 0
|
||||||
remove_apks = []
|
remove_apks = []
|
||||||
for apk in apks:
|
for apk in apks:
|
||||||
|
sha256 = apk['hash']
|
||||||
|
if sha256 in sha256_has_files:
|
||||||
|
errors += 1
|
||||||
|
for path2 in sha256_has_files[sha256]:
|
||||||
|
logging.error(
|
||||||
|
_('{path1} is a duplicate of {path2}, remove one!').format(
|
||||||
|
path1=apk["apkName"], path2=path2
|
||||||
|
)
|
||||||
|
)
|
||||||
|
sha256_has_files[sha256].append(apk['apkName'])
|
||||||
to_remove = get_apks_without_allowed_signatures(apps.get(apk['packageName']), apk)
|
to_remove = get_apks_without_allowed_signatures(apps.get(apk['packageName']), apk)
|
||||||
if to_remove:
|
if to_remove:
|
||||||
remove_apks.append(apk)
|
remove_apks.append(apk)
|
||||||
|
@ -2701,19 +2781,22 @@ def main():
|
||||||
for apk in remove_apks:
|
for apk in remove_apks:
|
||||||
apks.remove(apk)
|
apks.remove(apk)
|
||||||
|
|
||||||
mismatch_errors = ''
|
|
||||||
for appid in appid_has_apks:
|
for appid in appid_has_apks:
|
||||||
if appid in appid_has_repo_files:
|
if appid in appid_has_repo_files:
|
||||||
appid_files = ', '.join(glob.glob(os.path.join('repo', appid + '_[0-9]*.*')))
|
appid_files = ', '.join(glob.glob(os.path.join('repo', appid + '_[0-9]*.*')))
|
||||||
mismatch_errors += (_('{appid} has both APKs and files: {files}')
|
errors += 1
|
||||||
.format(appid=appid, files=appid_files)) + '\n'
|
logging.error(
|
||||||
if mismatch_errors:
|
_('{appid} has both APKs and files: {files}').format(
|
||||||
raise FDroidException(mismatch_errors)
|
appid=appid, files=appid_files
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if errors:
|
||||||
|
sys.exit(errors)
|
||||||
|
|
||||||
# Scan the archive repo for apks as well
|
# Scan the archive repo for apks as well
|
||||||
if len(repodirs) > 1:
|
if len(repodirs) > 1:
|
||||||
archapks, cc = process_apks(apkcache, repodirs[1], knownapks,
|
archapks, cc = process_apks(apkcache, repodirs[1], knownapks,
|
||||||
options.use_date_from_apk, apps)
|
options.use_date_from_apk, apps, cache_timestamp)
|
||||||
if cc:
|
if cc:
|
||||||
cachechanged = True
|
cachechanged = True
|
||||||
else:
|
else:
|
||||||
|
@ -2722,18 +2805,25 @@ def main():
|
||||||
if cachechanged:
|
if cachechanged:
|
||||||
write_cache(apkcache)
|
write_cache(apkcache)
|
||||||
|
|
||||||
|
output_status_stage(status_output, 'read_added_date_from_all_apks')
|
||||||
# The added date currently comes from the oldest apk which might be in the archive.
|
# The added date currently comes from the oldest apk which might be in the archive.
|
||||||
# So we need this populated at app level before continuing with only processing /repo
|
# So we need this populated at app level before continuing with only processing /repo
|
||||||
# or /archive
|
# or /archive
|
||||||
read_added_date_from_all_apks(apps, apks + archapks)
|
read_added_date_from_all_apks(apps, apks + archapks)
|
||||||
|
|
||||||
if len(repodirs) > 1:
|
if len(repodirs) > 1:
|
||||||
|
output_status_stage(status_output, 'archive_old_apks archive')
|
||||||
archive_old_apks(apps, apks, archapks, repodirs[0], repodirs[1], config['archive_older'])
|
archive_old_apks(apps, apks, archapks, repodirs[0], repodirs[1], config['archive_older'])
|
||||||
|
output_status_stage(status_output, 'prepare_apps archive')
|
||||||
archived_apps = prepare_apps(apps, archapks, repodirs[1])
|
archived_apps = prepare_apps(apps, archapks, repodirs[1])
|
||||||
|
output_status_stage(status_output, 'index.make archive')
|
||||||
fdroidserver.index.make(archived_apps, archapks, repodirs[1], True)
|
fdroidserver.index.make(archived_apps, archapks, repodirs[1], True)
|
||||||
|
|
||||||
|
output_status_stage(status_output, 'prepare_apps repo')
|
||||||
repoapps = prepare_apps(apps, apks, repodirs[0])
|
repoapps = prepare_apps(apps, apks, repodirs[0])
|
||||||
|
|
||||||
|
output_status_stage(status_output, 'index.make repo')
|
||||||
|
|
||||||
# APKs are placed into multiple repos based on the app package, providing
|
# APKs are placed into multiple repos based on the app package, providing
|
||||||
# per-app subscription feeds for nightly builds and things like it
|
# per-app subscription feeds for nightly builds and things like it
|
||||||
if config['per_app_repos']:
|
if config['per_app_repos']:
|
||||||
|
@ -2754,13 +2844,10 @@ def main():
|
||||||
git_remote = config.get('binary_transparency_remote')
|
git_remote = config.get('binary_transparency_remote')
|
||||||
if git_remote or os.path.isdir(os.path.join('binary_transparency', '.git')):
|
if git_remote or os.path.isdir(os.path.join('binary_transparency', '.git')):
|
||||||
from . import btlog
|
from . import btlog
|
||||||
|
output_status_stage(status_output, 'make_binary_transparency_log')
|
||||||
btlog.make_binary_transparency_log(repodirs)
|
btlog.make_binary_transparency_log(repodirs)
|
||||||
|
|
||||||
if config['update_stats']:
|
status_update_json(status_output, apps, apks + archapks)
|
||||||
# Update known apks info...
|
|
||||||
knownapks.writeifchanged()
|
|
||||||
|
|
||||||
status_update_json(apps, apks + archapks)
|
|
||||||
|
|
||||||
logging.info(_("Finished"))
|
logging.info(_("Finished"))
|
||||||
|
|
||||||
|
|
|
@ -16,18 +16,17 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
import glob
|
import glob
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import requests
|
import os
|
||||||
|
import sys
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
|
||||||
from . import _
|
import requests
|
||||||
from . import common
|
|
||||||
from . import net
|
from . import _, common, net
|
||||||
from .exception import FDroidException
|
from .exception import FDroidException
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
|
@ -58,8 +57,8 @@ def _add_diffoscope_info(d):
|
||||||
]
|
]
|
||||||
d['diffoscope']['External-Tools-Required'] = external_tools
|
d['diffoscope']['External-Tools-Required'] = external_tools
|
||||||
|
|
||||||
from diffoscope.tools import OS_NAMES, get_current_os
|
|
||||||
from diffoscope.external_tools import EXTERNAL_TOOLS
|
from diffoscope.external_tools import EXTERNAL_TOOLS
|
||||||
|
from diffoscope.tools import OS_NAMES, get_current_os
|
||||||
|
|
||||||
current_os = get_current_os()
|
current_os = get_current_os()
|
||||||
os_list = [current_os] if (current_os in OS_NAMES) else iter(OS_NAMES)
|
os_list = [current_os] if (current_os in OS_NAMES) else iter(OS_NAMES)
|
||||||
|
@ -80,6 +79,30 @@ def _add_diffoscope_info(d):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def get_verified_json(path):
|
||||||
|
"""Get the full collection of reports that is written out to verified.json."""
|
||||||
|
if os.path.exists(path):
|
||||||
|
try:
|
||||||
|
with open(path) as fp:
|
||||||
|
return json.load(fp)
|
||||||
|
except Exception as e:
|
||||||
|
logging.info(f'{path}: {e}')
|
||||||
|
|
||||||
|
data = OrderedDict()
|
||||||
|
data['packages'] = OrderedDict()
|
||||||
|
|
||||||
|
for f in glob.glob(os.path.join(os.path.dirname(path), '*.apk.json')):
|
||||||
|
with open(f) as fp:
|
||||||
|
reports = json.load(fp)
|
||||||
|
for report in reports.values():
|
||||||
|
packageName = report['local']['packageName']
|
||||||
|
if packageName not in data['packages']:
|
||||||
|
data['packages'][packageName] = []
|
||||||
|
data['packages'][packageName].append(report)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
def write_json_report(url, remote_apk, unsigned_apk, compare_result):
|
def write_json_report(url, remote_apk, unsigned_apk, compare_result):
|
||||||
"""Write out the results of the verify run to JSON.
|
"""Write out the results of the verify run to JSON.
|
||||||
|
|
||||||
|
@ -120,14 +143,30 @@ def write_json_report(url, remote_apk, unsigned_apk, compare_result):
|
||||||
with open(jsonfile, 'w') as fp:
|
with open(jsonfile, 'w') as fp:
|
||||||
json.dump(data, fp, sort_keys=True)
|
json.dump(data, fp, sort_keys=True)
|
||||||
|
|
||||||
|
appid, version_code = os.path.basename(unsigned_apk[:-4]).rsplit('_', 1)
|
||||||
|
appid_base = unsigned_apk.rsplit('_', 1)[0]
|
||||||
|
apkReports = sorted(
|
||||||
|
glob.glob(f'{appid_base}_[0-9]*.json'), # don't include <appid>.json
|
||||||
|
key=lambda s: int(s[:-9].rsplit('_', 1)[1]), # numeric sort by versionCode
|
||||||
|
)
|
||||||
|
with open(apkReports[-1]) as fp:
|
||||||
|
reports = json.load(fp)
|
||||||
|
appid_output = {'apkReports': apkReports}
|
||||||
|
most_recent = 0
|
||||||
|
for report_time, run in reports.items():
|
||||||
|
if float(report_time) > most_recent:
|
||||||
|
most_recent = float(report_time)
|
||||||
|
appid_output['lastRunVerified'] = run['verified']
|
||||||
|
with open(f'{appid_base}.json', 'w') as fp:
|
||||||
|
json.dump(appid_output, fp, cls=common.Encoder, sort_keys=True)
|
||||||
|
|
||||||
if output['verified']:
|
if output['verified']:
|
||||||
|
write_verified_json(output)
|
||||||
|
|
||||||
|
|
||||||
|
def write_verified_json(output):
|
||||||
jsonfile = 'unsigned/verified.json'
|
jsonfile = 'unsigned/verified.json'
|
||||||
if os.path.exists(jsonfile):
|
data = get_verified_json(jsonfile)
|
||||||
with open(jsonfile) as fp:
|
|
||||||
data = json.load(fp)
|
|
||||||
else:
|
|
||||||
data = OrderedDict()
|
|
||||||
data['packages'] = OrderedDict()
|
|
||||||
packageName = output['local']['packageName']
|
packageName = output['local']['packageName']
|
||||||
|
|
||||||
if packageName not in data['packages']:
|
if packageName not in data['packages']:
|
||||||
|
@ -157,6 +196,12 @@ def main():
|
||||||
nargs='*',
|
nargs='*',
|
||||||
help=_("application ID with optional versionCode in the form APPID[:VERCODE]"),
|
help=_("application ID with optional versionCode in the form APPID[:VERCODE]"),
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--clean-up-verified",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help=_("Remove source tarball and any APKs if successfully verified."),
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--reuse-remote-apk",
|
"--reuse-remote-apk",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
|
@ -224,12 +269,26 @@ def main():
|
||||||
) from e
|
) from e
|
||||||
|
|
||||||
unsigned_apk = os.path.join(unsigned_dir, apkfilename)
|
unsigned_apk = os.path.join(unsigned_dir, apkfilename)
|
||||||
compare_result = common.verify_apks(remote_apk, unsigned_apk, tmp_dir)
|
compare_result = common.verify_apks(
|
||||||
|
remote_apk,
|
||||||
|
unsigned_apk,
|
||||||
|
tmp_dir,
|
||||||
|
clean_up_verified=options.clean_up_verified,
|
||||||
|
)
|
||||||
if options.output_json:
|
if options.output_json:
|
||||||
write_json_report(url, remote_apk, unsigned_apk, compare_result)
|
write_json_report(url, remote_apk, unsigned_apk, compare_result)
|
||||||
if compare_result:
|
if compare_result:
|
||||||
raise FDroidException(compare_result)
|
raise FDroidException(compare_result)
|
||||||
|
|
||||||
|
if options.clean_up_verified:
|
||||||
|
src_tarball = os.path.join(
|
||||||
|
unsigned_dir, common.get_src_tarball_name(appid, vercode)
|
||||||
|
)
|
||||||
|
for f in (remote_apk, unsigned_apk, src_tarball):
|
||||||
|
if os.path.exists(f):
|
||||||
|
logging.info(f"...cleaned up {f} after successful verification")
|
||||||
|
os.remove(f)
|
||||||
|
|
||||||
logging.info("...successfully verified")
|
logging.info("...successfully verified")
|
||||||
verified += 1
|
verified += 1
|
||||||
|
|
||||||
|
|
|
@ -16,16 +16,16 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from os.path import isdir, isfile, basename, abspath, expanduser
|
|
||||||
import os
|
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
import textwrap
|
import textwrap
|
||||||
import logging
|
|
||||||
from .common import FDroidException
|
|
||||||
|
|
||||||
import threading
|
import threading
|
||||||
|
from os.path import abspath, basename, expanduser, isdir, isfile
|
||||||
|
|
||||||
|
from .common import FDroidException
|
||||||
|
|
||||||
lock = threading.Lock()
|
lock = threading.Lock()
|
||||||
|
|
||||||
|
|
302
gradlew-fdroid
302
gradlew-fdroid
|
@ -1,302 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
bindir="$(dirname $0)"
|
|
||||||
basedir="$(dirname $bindir)"
|
|
||||||
# Check if GRADLE_VERSION_DIR/CACHEDIR is set from environment
|
|
||||||
if [ -z "$GRADLE_VERSION_DIR" ]; then
|
|
||||||
gradle_version_dir="${basedir}/versions"
|
|
||||||
else
|
|
||||||
gradle_version_dir="$GRADLE_VERSION_DIR"
|
|
||||||
fi
|
|
||||||
BUILDSERVER_CACHEDIR=/vagrant/cache
|
|
||||||
if [ -n "$CACHEDIR" ]; then
|
|
||||||
cachedir="$CACHEDIR"
|
|
||||||
elif [ -d $BUILDSERVER_CACHEDIR ]; then
|
|
||||||
cachedir=$BUILDSERVER_CACHEDIR
|
|
||||||
fi
|
|
||||||
args=("$@")
|
|
||||||
|
|
||||||
run_gradle() {
|
|
||||||
if [ ! -d "${gradle_version_dir}/${v_found}" ]; then
|
|
||||||
download_gradle ${v_found}
|
|
||||||
fi
|
|
||||||
# shellcheck disable=SC2145
|
|
||||||
echo "Running ${gradle_version_dir}/${v_found}/bin/gradle ${args[@]}"
|
|
||||||
"${gradle_version_dir}/${v_found}/bin/gradle" "${args[@]}"
|
|
||||||
exit $?
|
|
||||||
}
|
|
||||||
|
|
||||||
download_gradle() {
|
|
||||||
URL="https://downloads.gradle.org/distributions/gradle-${1}-bin.zip"
|
|
||||||
shasum=$(get_sha $1)
|
|
||||||
if [ $? != 0 ]; then
|
|
||||||
echo "No hash for gradle version $1! Exiting..."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
if [ -n "${cachedir}" ] && [ -e "${cachedir}/gradle-$1-bin.zip" ]; then
|
|
||||||
echo "Using cached ${cachedir}/gradle-$1-bin.zip ..."
|
|
||||||
gradle_zip="${cachedir}/gradle-$1-bin.zip"
|
|
||||||
else
|
|
||||||
echo "Downloading missing gradle version $1"
|
|
||||||
echo cachedir $cachedir
|
|
||||||
if [[ -n "${cachedir}" && ! -d "${cachedir}" ]]; then
|
|
||||||
mkdir -p "${cachedir}"
|
|
||||||
fi
|
|
||||||
if [[ -n "${cachedir}" && -d "${cachedir}" && -w "${cachedir}" ]]; then
|
|
||||||
tmpdir="${cachedir}"
|
|
||||||
else
|
|
||||||
tmpdir=$(mktemp -d)
|
|
||||||
fi
|
|
||||||
curl -o "${tmpdir}/gradle-$1-bin.zip" --silent --fail --show-error --location --retry 3 --retry-all-errors "${URL}"
|
|
||||||
gradle_zip="${tmpdir}/gradle-$1-bin.zip"
|
|
||||||
fi
|
|
||||||
echo "${shasum} ${gradle_zip}" | sha256sum -c -
|
|
||||||
if [ $? != 0 ]; then
|
|
||||||
echo "gradle download checksum mismatch! Exiting..."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
mkdir -p "${gradle_version_dir}/"
|
|
||||||
unzip -q -d "${gradle_version_dir}" "${gradle_zip}"
|
|
||||||
mv "${gradle_version_dir}/gradle-$1" "${gradle_version_dir}/${v_found}"
|
|
||||||
}
|
|
||||||
|
|
||||||
get_sha() {
|
|
||||||
case $1 in
|
|
||||||
'0.7') echo '4e354fcb0d5c0b0e7789cd6ee900456edaf993f6dd890c4a1c217d90d2a6a6ad' ;;
|
|
||||||
'0.8') echo '940e623ea98e40ea9ad398770a6ebb91a61c0869d394dda81aa86b0f4f0025e7' ;;
|
|
||||||
'0.9') echo '994e46d4b467254a0f25ce92b602618331b9b3ac8b32a094fd84ff0e0ceec135' ;;
|
|
||||||
'0.9.1') echo '5d48cba95db031ec109ae9ab60561e960b6507046036e8191aa78572ec27e2a5' ;;
|
|
||||||
'0.9.2') echo 'f94d7642348c558fc05ab5fd6fb947fb1ed8fed5931ddb73dd04fb0de22d669b' ;;
|
|
||||||
'1.0') echo '894bca0360a7e2040815096788f118a2dd106ff6694221b274efb9c32bce0384' ;;
|
|
||||||
'1.1') echo '552c1fc9f3a1b9668b79cc447370f0263e664ffb6d5c6e1c21e77ce0c8a20d4c' ;;
|
|
||||||
'1.2') echo 'eb53da3704d24cabb7565f34a3bf16bcd863c4b0c139917606fb15d4f27c7bdf' ;;
|
|
||||||
'1.3') echo 'ada68561efbb9f1cae0f9063974cbde15c180351a2f92bc2f1106e39ddcae5ba' ;;
|
|
||||||
'1.4') echo 'cd99e85fbcd0ae8b99e81c9992a2f10cceb7b5f009c3720ef3a0078f4f92e94e' ;;
|
|
||||||
'1.5') echo 'a5511a0659caa47d9d74fd2844c9da43157d2f78e63a0223c6289d88f5aaecbe' ;;
|
|
||||||
'1.6') echo 'de3e89d2113923dcc2e0def62d69be0947ceac910abd38b75ec333230183fac4' ;;
|
|
||||||
'1.7') echo '360c97d51621b5a1ecf66748c718594e5f790ae4fbc1499543e0c006033c9d30' ;;
|
|
||||||
'1.8') echo 'a342bbfa15fd18e2482287da4959588f45a41b60910970a16e6d97959aea5703' ;;
|
|
||||||
'1.9') echo '097ddc2bcbc9da2bb08cbf6bf8079585e35ad088bafd42e8716bc96405db98e9' ;;
|
|
||||||
'1.10') echo '6e6db4fc595f27ceda059d23693b6f6848583950606112b37dfd0e97a0a0a4fe' ;;
|
|
||||||
'1.11') echo '07e235df824964f0e19e73ea2327ce345c44bcd06d44a0123d29ab287fc34091' ;;
|
|
||||||
'1.12') echo '8734b13a401f4311ee418173ed6ca8662d2b0a535be8ff2a43ecb1c13cd406ea' ;;
|
|
||||||
'2.0') echo 'a1eb880c8755333c4d33c4351b269bebe517002532d3142c0b6164c9e8c081c3' ;;
|
|
||||||
'2.1') echo '3eee4f9ea2ab0221b89f8e4747a96d4554d00ae46d8d633f11cfda60988bf878' ;;
|
|
||||||
'2.2') echo '91e5655fe11ef414449f218c4fa2985b3a49b7903c57556da109c84fa26e1dfb' ;;
|
|
||||||
'2.2.1') echo '420aa50738299327b611c10b8304b749e8d3a579407ee9e755b15921d95ff418' ;;
|
|
||||||
'2.3') echo '010dd9f31849abc3d5644e282943b1c1c355f8e2635c5789833979ce590a3774' ;;
|
|
||||||
'2.4') echo 'c4eaecc621a81f567ded1aede4a5ddb281cc02a03a6a87c4f5502add8fc2f16f' ;;
|
|
||||||
'2.5') echo '3f953e0cb14bb3f9ebbe11946e84071547bf5dfd575d90cfe9cc4e788da38555' ;;
|
|
||||||
'2.6') echo '18a98c560af231dfa0d3f8e0802c20103ae986f12428bb0a6f5396e8f14e9c83' ;;
|
|
||||||
'2.7') echo 'cde43b90945b5304c43ee36e58aab4cc6fb3a3d5f9bd9449bb1709a68371cb06' ;;
|
|
||||||
'2.8') echo 'a88db9c2f104defdaa8011c58cf6cda6c114298ae3695ecfb8beb30da3a903cb' ;;
|
|
||||||
'2.9') echo 'c9159ec4362284c0a38d73237e224deae6139cbde0db4f0f44e1c7691dd3de2f' ;;
|
|
||||||
'2.10') echo '66406247f745fc6f05ab382d3f8d3e120c339f34ef54b86f6dc5f6efc18fbb13' ;;
|
|
||||||
'2.11') echo '8d7437082356c9fd6309a4479c8db307673965546daea445c6c72759cd6b1ed6' ;;
|
|
||||||
'2.12') echo 'e77064981906cd0476ff1e0de3e6fef747bd18e140960f1915cca8ff6c33ab5c' ;;
|
|
||||||
'2.13') echo '0f665ec6a5a67865faf7ba0d825afb19c26705ea0597cec80dd191b0f2cbb664' ;;
|
|
||||||
'2.14') echo '993b4f33b652c689e9721917d8e021cab6bbd3eae81b39ab2fd46fdb19a928d5' ;;
|
|
||||||
'2.14.1') echo 'cfc61eda71f2d12a572822644ce13d2919407595c2aec3e3566d2aab6f97ef39' ;;
|
|
||||||
'3.0') echo '39c906941a474444afbddc38144ed44166825acb0a57b0551dddb04bbf157f80' ;;
|
|
||||||
'3.1') echo 'c7de3442432253525902f7e8d7eac8b5fd6ce1623f96d76916af6d0e383010fc' ;;
|
|
||||||
'3.2') echo '5321b36837226dc0377047a328f12010f42c7bf88ee4a3b1cee0c11040082935' ;;
|
|
||||||
'3.2.1') echo '9843a3654d3e57dce54db06d05f18b664b95c22bf90c6becccb61fc63ce60689' ;;
|
|
||||||
'3.3') echo 'c58650c278d8cf0696cab65108ae3c8d95eea9c1938e0eb8b997095d5ca9a292' ;;
|
|
||||||
'3.4') echo '72d0cd4dcdd5e3be165eb7cd7bbd25cf8968baf400323d9ab1bba622c3f72205' ;;
|
|
||||||
'3.4.1') echo 'db1db193d479cc1202be843f17e4526660cfb0b21b57d62f3a87f88c878af9b2' ;;
|
|
||||||
'3.5') echo '0b7450798c190ff76b9f9a3d02e18b33d94553f708ebc08ebe09bdf99111d110' ;;
|
|
||||||
'3.5.1') echo '8dce35f52d4c7b4a4946df73aa2830e76ba7148850753d8b5e94c5dc325ceef8' ;;
|
|
||||||
'4.0') echo '56bd2dde29ba2a93903c557da1745cafd72cdd8b6b0b83c05a40ed7896b79dfe' ;;
|
|
||||||
'4.0.1') echo 'd717e46200d1359893f891dab047fdab98784143ac76861b53c50dbd03b44fd4' ;;
|
|
||||||
'4.0.2') echo '79ac421342bd11f6a4f404e0988baa9c1f5fabf07e3c6fa65b0c15c1c31dda22' ;;
|
|
||||||
'4.1') echo 'd55dfa9cfb5a3da86a1c9e75bb0b9507f9a8c8c100793ccec7beb6e259f9ed43' ;;
|
|
||||||
'4.2') echo '515dd63d32e55a9c05667809c5e40a947529de3054444ad274b3b75af5582eae' ;;
|
|
||||||
'4.2.1') echo 'b551cc04f2ca51c78dd14edb060621f0e5439bdfafa6fd167032a09ac708fbc0' ;;
|
|
||||||
'4.3') echo '8dcbf44eef92575b475dcb1ce12b5f19d38dc79e84c662670248dc8b8247654c' ;;
|
|
||||||
'4.3.1') echo '15ebe098ce0392a2d06d252bff24143cc88c4e963346582c8d88814758d93ac7' ;;
|
|
||||||
'4.4') echo 'fa4873ae2c7f5e8c02ec6948ba95848cedced6134772a0169718eadcb39e0a2f' ;;
|
|
||||||
'4.4.1') echo 'e7cf7d1853dfc30c1c44f571d3919eeeedef002823b66b6a988d27e919686389' ;;
|
|
||||||
'4.5') echo '03f2a43a314ff0fb843a85ef68078e06d181c4549c1e5fb983f289382b59b5e3' ;;
|
|
||||||
'4.5.1') echo '3e2ea0d8b96605b7c528768f646e0975bd9822f06df1f04a64fd279b1a17805e' ;;
|
|
||||||
'4.6') echo '98bd5fd2b30e070517e03c51cbb32beee3e2ee1a84003a5a5d748996d4b1b915' ;;
|
|
||||||
'4.7') echo 'fca5087dc8b50c64655c000989635664a73b11b9bd3703c7d6cabd31b7dcdb04' ;;
|
|
||||||
'4.8') echo 'f3e29692a8faa94eb0b02ebf36fa263a642b3ae8694ef806c45c345b8683f1ba' ;;
|
|
||||||
'4.8.1') echo 'af334d994b5e69e439ab55b5d2b7d086da5ea6763d78054f49f147b06370ed71' ;;
|
|
||||||
'4.9') echo 'e66e69dce8173dd2004b39ba93586a184628bc6c28461bc771d6835f7f9b0d28' ;;
|
|
||||||
'4.10') echo '248cfd92104ce12c5431ddb8309cf713fe58de8e330c63176543320022f59f18' ;;
|
|
||||||
'4.10.1') echo 'e53ce3a01cf016b5d294eef20977ad4e3c13e761ac1e475f1ffad4c6141a92bd' ;;
|
|
||||||
'4.10.2') echo 'b49c6da1b2cb67a0caf6c7480630b51c70a11ca2016ff2f555eaeda863143a29' ;;
|
|
||||||
'4.10.3') echo '8626cbf206b4e201ade7b87779090690447054bc93f052954c78480fa6ed186e' ;;
|
|
||||||
'5.0') echo '6157ac9f3410bc63644625b3b3e9e96c963afd7910ae0697792db57813ee79a6' ;;
|
|
||||||
'5.1') echo '7506638a380092a0406364c79d6c87d03d23017fc25a5770379d1ce23c3fcd4d' ;;
|
|
||||||
'5.1.1') echo '4953323605c5d7b89e97d0dc7779e275bccedefcdac090aec123375eae0cc798' ;;
|
|
||||||
'5.2') echo 'ff322863250159595e93b5a4d17a6f0d21c59a1a0497c1e1cf1d53826485503f' ;;
|
|
||||||
'5.2.1') echo '748c33ff8d216736723be4037085b8dc342c6a0f309081acf682c9803e407357' ;;
|
|
||||||
'5.3') echo 'bed2bdd3955be5a09ca7e0201e9d131f194f7f6c466e1795a733733ccfb09f25' ;;
|
|
||||||
'5.3.1') echo '1c59a17a054e9c82f0dd881871c9646e943ec4c71dd52ebc6137d17f82337436' ;;
|
|
||||||
'5.4') echo 'c8c17574245ecee9ed7fe4f6b593b696d1692d1adbfef425bef9b333e3a0e8de' ;;
|
|
||||||
'5.4.1') echo '7bdbad1e4f54f13c8a78abc00c26d44dd8709d4aedb704d913fb1bb78ac025dc' ;;
|
|
||||||
'5.5') echo '8d78b2ed63e7f07ad169c1186d119761c4773e681f332cfe1901045b1b0141bc' ;;
|
|
||||||
'5.5.1') echo '222a03fcf2fcaf3691767ce9549f78ebd4a77e73f9e23a396899fb70b420cd00' ;;
|
|
||||||
'5.6') echo '15c02ef5dd3631ec02ac52e8725703e0285d9a7eecbf4e5939aa9e924604d01d' ;;
|
|
||||||
'5.6.1') echo '0986244820e4a35d32d91df2ec4b768b5ba5d6c8246753794f85159f9963ec12' ;;
|
|
||||||
'5.6.2') echo '32fce6628848f799b0ad3205ae8db67d0d828c10ffe62b748a7c0d9f4a5d9ee0' ;;
|
|
||||||
'5.6.3') echo '60a6d8f687e3e7a4bc901cc6bc3db190efae0f02f0cc697e323e0f9336f224a3' ;;
|
|
||||||
'5.6.4') echo '1f3067073041bc44554d0efe5d402a33bc3d3c93cc39ab684f308586d732a80d' ;;
|
|
||||||
'6.0') echo '5a3578b9f0bb162f5e08cf119f447dfb8fa950cedebb4d2a977e912a11a74b91' ;;
|
|
||||||
'6.0.1') echo 'd364b7098b9f2e58579a3603dc0a12a1991353ac58ed339316e6762b21efba44' ;;
|
|
||||||
'6.1') echo 'd0c43d14e1c70a48b82442f435d06186351a2d290d72afd5b8866f15e6d7038a' ;;
|
|
||||||
'6.1.1') echo '9d94e6e4a28ad328072ef6e56bce79a810494ae756751fdcedffdeaf27c093b1' ;;
|
|
||||||
'6.2') echo 'b93a5f30d01195ec201e240f029c8b42d59c24086b8d1864112c83558e23cf8a' ;;
|
|
||||||
'6.2.1') echo 'a68ca7ba57f3404c3f6fc1f70a02d3a7d78652e6b46bbfaff83fc9a17168c279' ;;
|
|
||||||
'6.2.2') echo '0f6ba231b986276d8221d7a870b4d98e0df76e6daf1f42e7c0baec5032fb7d17' ;;
|
|
||||||
'6.3') echo '038794feef1f4745c6347107b6726279d1c824f3fc634b60f86ace1e9fbd1768' ;;
|
|
||||||
'6.4') echo 'b888659f637887e759749f6226ddfcb1cb04f828c58c41279de73c463fdbacc9' ;;
|
|
||||||
'6.4.1') echo 'e58cdff0cee6d9b422dcd08ebeb3177bc44eaa09bd9a2e838ff74c408fe1cbcd' ;;
|
|
||||||
'6.5') echo '23e7d37e9bb4f8dabb8a3ea7fdee9dd0428b9b1a71d298aefd65b11dccea220f' ;;
|
|
||||||
'6.5.1') echo '50a7d30529fa939721fe9268a0205142f3f2302bcac5fb45b27a3902e58db54a' ;;
|
|
||||||
'6.6') echo 'e6f83508f0970452f56197f610d13c5f593baaf43c0e3c6a571e5967be754025' ;;
|
|
||||||
'6.6.1') echo '7873ed5287f47ca03549ab8dcb6dc877ac7f0e3d7b1eb12685161d10080910ac' ;;
|
|
||||||
'6.7') echo '8ad57759019a9233dc7dc4d1a530cefe109dc122000d57f7e623f8cf4ba9dfc4' ;;
|
|
||||||
'6.7.1') echo '3239b5ed86c3838a37d983ac100573f64c1f3fd8e1eb6c89fa5f9529b5ec091d' ;;
|
|
||||||
'6.8') echo 'e2774e6fb77c43657decde25542dea710aafd78c4022d19b196e7e78d79d8c6c' ;;
|
|
||||||
'6.8.1') echo 'fd591a34af7385730970399f473afabdb8b28d57fd97d6625c388d090039d6fd' ;;
|
|
||||||
'6.8.2') echo '8de6efc274ab52332a9c820366dd5cf5fc9d35ec7078fd70c8ec6913431ee610' ;;
|
|
||||||
'6.8.3') echo '7faa7198769f872826c8ef4f1450f839ec27f0b4d5d1e51bade63667cbccd205' ;;
|
|
||||||
'6.9') echo '765442b8069c6bee2ea70713861c027587591c6b1df2c857a23361512560894e' ;;
|
|
||||||
'6.9.1') echo '8c12154228a502b784f451179846e518733cf856efc7d45b2e6691012977b2fe' ;;
|
|
||||||
'6.9.2') echo '8b356fd8702d5ffa2e066ed0be45a023a779bba4dd1a68fd11bc2a6bdc981e8f' ;;
|
|
||||||
'6.9.3') echo 'dcf350b8ae1aa192fc299aed6efc77b43825d4fedb224c94118ae7faf5fb035d' ;;
|
|
||||||
'6.9.4') echo '3e240228538de9f18772a574e99a0ba959e83d6ef351014381acd9631781389a' ;;
|
|
||||||
'7.0') echo 'eb8b89184261025b0430f5b2233701ff1377f96da1ef5e278af6ae8bac5cc305' ;;
|
|
||||||
'7.0.1') echo 'dccda8aa069563c8ba2f6cdfd0777df0e34a5b4d15138ca8b9757e94f4e8a8cb' ;;
|
|
||||||
'7.0.2') echo '0e46229820205440b48a5501122002842b82886e76af35f0f3a069243dca4b3c' ;;
|
|
||||||
'7.1') echo '2debee19271e1b82c6e41137d78e44e6e841035230a1a169ca47fd3fb09ed87b' ;;
|
|
||||||
'7.1.1') echo 'bf8b869948901d422e9bb7d1fa61da6a6e19411baa7ad6ee929073df85d6365d' ;;
|
|
||||||
'7.2') echo 'f581709a9c35e9cb92e16f585d2c4bc99b2b1a5f85d2badbd3dc6bff59e1e6dd' ;;
|
|
||||||
'7.3') echo 'de8f52ad49bdc759164f72439a3bf56ddb1589c4cde802d3cec7d6ad0e0ee410' ;;
|
|
||||||
'7.3.1') echo '9afb3ca688fc12c761a0e9e4321e4d24e977a4a8916c8a768b1fe05ddb4d6b66' ;;
|
|
||||||
'7.3.2') echo '23b89f8eac363f5f4b8336e0530c7295c55b728a9caa5268fdd4a532610d5392' ;;
|
|
||||||
'7.3.3') echo 'b586e04868a22fd817c8971330fec37e298f3242eb85c374181b12d637f80302' ;;
|
|
||||||
'7.4') echo '8cc27038d5dbd815759851ba53e70cf62e481b87494cc97cfd97982ada5ba634' ;;
|
|
||||||
'7.4.1') echo 'e5444a57cda4a95f90b0c9446a9e1b47d3d7f69057765bfb54bd4f482542d548' ;;
|
|
||||||
'7.4.2') echo '29e49b10984e585d8118b7d0bc452f944e386458df27371b49b4ac1dec4b7fda' ;;
|
|
||||||
'7.5') echo 'cb87f222c5585bd46838ad4db78463a5c5f3d336e5e2b98dc7c0c586527351c2' ;;
|
|
||||||
'7.5.1') echo 'f6b8596b10cce501591e92f229816aa4046424f3b24d771751b06779d58c8ec4' ;;
|
|
||||||
'7.6') echo '7ba68c54029790ab444b39d7e293d3236b2632631fb5f2e012bb28b4ff669e4b' ;;
|
|
||||||
'7.6.1') echo '6147605a23b4eff6c334927a86ff3508cb5d6722cd624c97ded4c2e8640f1f87' ;;
|
|
||||||
'7.6.2') echo 'a01b6587e15fe7ed120a0ee299c25982a1eee045abd6a9dd5e216b2f628ef9ac' ;;
|
|
||||||
'7.6.3') echo '740c2e472ee4326c33bf75a5c9f5cd1e69ecf3f9b580f6e236c86d1f3d98cfac' ;;
|
|
||||||
'7.6.4') echo 'bed1da33cca0f557ab13691c77f38bb67388119e4794d113e051039b80af9bb1' ;;
|
|
||||||
'8.0') echo '4159b938ec734a8388ce03f52aa8f3c7ed0d31f5438622545de4f83a89b79788' ;;
|
|
||||||
'8.0.1') echo '1b6b558be93f29438d3df94b7dfee02e794b94d9aca4611a92cdb79b6b88e909' ;;
|
|
||||||
'8.0.2') echo 'ff7bf6a86f09b9b2c40bb8f48b25fc19cf2b2664fd1d220cd7ab833ec758d0d7' ;;
|
|
||||||
'8.1') echo 'a62c5f99585dd9e1f95dab7b9415a0e698fa9dd1e6c38537faa81ac078f4d23e' ;;
|
|
||||||
'8.1.1') echo 'e111cb9948407e26351227dabce49822fb88c37ee72f1d1582a69c68af2e702f' ;;
|
|
||||||
'8.2') echo '38f66cd6eef217b4c35855bb11ea4e9fbc53594ccccb5fb82dfd317ef8c2c5a3' ;;
|
|
||||||
'8.2.1') echo '03ec176d388f2aa99defcadc3ac6adf8dd2bce5145a129659537c0874dea5ad1' ;;
|
|
||||||
'8.3') echo '591855b517fc635b9e04de1d05d5e76ada3f89f5fc76f87978d1b245b4f69225' ;;
|
|
||||||
'8.4') echo '3e1af3ae886920c3ac87f7a91f816c0c7c436f276a6eefdb3da152100fef72ae' ;;
|
|
||||||
'8.5') echo '9d926787066a081739e8200858338b4a69e837c3a821a33aca9db09dd4a41026' ;;
|
|
||||||
'8.6') echo '9631d53cf3e74bfa726893aee1f8994fee4e060c401335946dba2156f440f24c' ;;
|
|
||||||
'8.7') echo '544c35d6bd849ae8a5ed0bcea39ba677dc40f49df7d1835561582da2009b961d' ;;
|
|
||||||
'8.8') echo 'a4b4158601f8636cdeeab09bd76afb640030bb5b144aafe261a5e8af027dc612' ;;
|
|
||||||
'8.9') echo 'd725d707bfabd4dfdc958c624003b3c80accc03f7037b5122c4b1d0ef15cecab' ;;
|
|
||||||
'8.10') echo '5b9c5eb3f9fc2c94abaea57d90bd78747ca117ddbbf96c859d3741181a12bf2a' ;;
|
|
||||||
'8.10.1') echo '1541fa36599e12857140465f3c91a97409b4512501c26f9631fb113e392c5bd1' ;;
|
|
||||||
*) exit 1
|
|
||||||
esac
|
|
||||||
}
|
|
||||||
|
|
||||||
contains() {
|
|
||||||
local e
|
|
||||||
for e in $2; do
|
|
||||||
[[ $e == $1 ]] && return 0;
|
|
||||||
done
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
# key-value pairs of what gradle version (value) each gradle plugin version
|
|
||||||
# (key) should accept. plugin versions are actually prefixes and catch sub-
|
|
||||||
# versions as well. Pairs are taken from:
|
|
||||||
# https://developer.android.com/studio/releases/gradle-plugin#updating-gradle
|
|
||||||
d_gradle_plugin_ver_k=(8.4 8.3 8.2 8.1 8.0 7.4 7.3 7.2.0 7.1 7.0 4.2 4.1 4.0 3.6 3.5 3.4 3.3 3.2 3.1 3.0 2.3 2.2 2.1.3 2.1 2.0)
|
|
||||||
d_plugin_min_gradle_v=(8.6 8.4 8.2 8.0 8.0 7.5 7.4 7.3.3 7.2 7.0.2 6.7.1 6.5 6.1.1 5.6.4 5.4.1 5.1.1 4.10.1 4.6 4.4 4.1 3.3 2.14.1 2.14.1 2.12 2.12 2.4 2.4 2.3 2.2.1 2.2.1 2.1 2.1 1.12 1.12 1.12 1.11 1.10 1.9 1.8 1.6 1.6 1.4 1.4)
|
|
||||||
|
|
||||||
# All gradle versions we know about
|
|
||||||
plugin_v=(8.10.1 8.10 8.9 8.8 8.7 8.6 8.5 8.4 8.3 8.2.1 8.2 8.1.1 8.1 8.0.2 8.0.1 8.0 7.6.4 7.6.3 7.6.2 7.6.1 7.6 7.5.1 7.5 7.4.2 7.4.1 7.4 7.3.3 7.3.2 7.3.1 7.3 7.2 7.1.1 7.1 7.0.2 7.0.1 7.0 6.9.4 6.9.3 6.9.2 6.9.1 6.9 6.8.3 6.8.2 6.8.1 6.8 6.7.1 6.7 6.6.1 6.6 6.5.1 6.5 6.4.1 6.4 6.3 6.2.2 6.2.1 6.2 6.1.1 6.1 6.0.1 6.0 5.6.4 5.6.3 5.6.2 5.6.1 5.6 5.5.1 5.5 5.4.1 5.4 5.3.1 5.3 5.2.1 5.2 5.1.1 5.1 5.0 4.10.3 4.10.2 4.10.1 4.10 4.9 4.8.1 4.8 4.7 4.6 4.5.1 4.5 4.4.1 4.4 4.3.1 4.3 4.2.1 4.2 4.1 4.0.2 4.0.1 4.0 3.5.1 3.5 3.4.1 3.4 3.3 3.2.1 3.2 3.1 3.0 2.14.1 2.14 2.13 2.12 2.11 2.10 2.9 2.8 2.7 2.6 2.5 2.4 2.3 2.2.1 2.2 2.1 2.0 1.12 1.11 1.10 1.9 1.8 1.7 1.6 1.5 1.4 1.3 1.2 1.1 1.0 0.9.2 0.9.1 0.9 0.8 0.7)
|
|
||||||
|
|
||||||
v_all=${plugin_v[@]}
|
|
||||||
|
|
||||||
# Earliest file takes priority
|
|
||||||
# Last key takes priority if there are duplicates (matching java.util.Properties)
|
|
||||||
for f in {.,..}/gradle/wrapper/gradle-wrapper.properties; do
|
|
||||||
[[ -f $f ]] || continue
|
|
||||||
while IFS='' read -r line || [ -n "$line" ]; do
|
|
||||||
line=$(printf "$line" | tr -d '\r') # strip Windows linefeeds
|
|
||||||
if [[ $line == 'distributionUrl='* ]]; then
|
|
||||||
wrapper_ver=${line#*/gradle-}
|
|
||||||
wrapper_ver=${wrapper_ver%-*.zip}
|
|
||||||
fi
|
|
||||||
done < $f
|
|
||||||
[[ -n $wrapper_ver ]] && break
|
|
||||||
done
|
|
||||||
|
|
||||||
if [[ -n $wrapper_ver ]]; then
|
|
||||||
v_found=$wrapper_ver
|
|
||||||
echo "Found $v_found via distributionUrl"
|
|
||||||
run_gradle
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Earliest takes priority
|
|
||||||
for f in {.,..}/build.gradle{,.kts}; do
|
|
||||||
[[ -f $f ]] || continue
|
|
||||||
while IFS='' read -r line || [ -n "$line" ]; do
|
|
||||||
line=$(printf "$line" | tr -d '\r') # strip Windows linefeeds
|
|
||||||
if [[ -z "$plugin_pver" && $line == *'com.android.tools.build:gradle:'* ]]; then
|
|
||||||
plugin_pver=${line#*[\'\"]com.android.tools.build:gradle:}
|
|
||||||
plugin_pver=${plugin_pver%[\'\"]*}
|
|
||||||
elif [[ -z "$wrapper_ver" && $line == *'gradleVersion = '* ]]; then
|
|
||||||
wrapper_ver=${line#*gradleVersion*=*[\'\"]}
|
|
||||||
wrapper_ver=${wrapper_ver%[\'\"]*}
|
|
||||||
fi
|
|
||||||
done < $f
|
|
||||||
done
|
|
||||||
|
|
||||||
if [[ -n $wrapper_ver ]]; then
|
|
||||||
v_found=$wrapper_ver
|
|
||||||
echo "Found $v_found via gradleVersion"
|
|
||||||
run_gradle
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -n $plugin_pver ]]; then
|
|
||||||
i=0
|
|
||||||
match=false
|
|
||||||
for k in "${d_gradle_plugin_ver_k[@]}"; do
|
|
||||||
if [[ $plugin_pver == ${k}* ]]; then
|
|
||||||
plugin_ver=${d_plugin_min_gradle_v[$i]}
|
|
||||||
match=true
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
let i++
|
|
||||||
done
|
|
||||||
if $match; then
|
|
||||||
v_found=$plugin_ver
|
|
||||||
echo "Found $v_found via gradle plugin version $k"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Find the highest version available
|
|
||||||
for v in ${plugin_v[*]}; do
|
|
||||||
if contains $v "${v_all[*]}"; then
|
|
||||||
v_def=$v
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
if [[ -z $v_found ]]; then
|
|
||||||
echo "No suitable gradle version found - defaulting to $v_def"
|
|
||||||
v_found=$v_def
|
|
||||||
fi
|
|
||||||
|
|
||||||
run_gradle
|
|
|
@ -2,7 +2,7 @@
|
||||||
#
|
#
|
||||||
# Install all the client hooks
|
# Install all the client hooks
|
||||||
|
|
||||||
BASE_DIR="$(cd $(dirname $0); pwd -P)"
|
BASE_DIR="$(cd $(dirname $0) || exit; pwd -P)"
|
||||||
HOOK_NAMES="applypatch-msg pre-applypatch post-applypatch pre-commit prepare-commit-msg commit-msg post-commit pre-rebase post-checkout post-merge pre-receive update post-receive post-update pre-auto-gc"
|
HOOK_NAMES="applypatch-msg pre-applypatch post-applypatch pre-commit prepare-commit-msg commit-msg post-commit pre-rebase post-checkout post-merge pre-receive update post-receive post-update pre-auto-gc"
|
||||||
HOOK_DIR="$(git rev-parse --show-toplevel)/.git/hooks"
|
HOOK_DIR="$(git rev-parse --show-toplevel)/.git/hooks"
|
||||||
|
|
||||||
|
|
|
@ -9,9 +9,9 @@ exec 1>&2
|
||||||
files=`git diff-index --cached HEAD 2>&1 | sed 's/^:.* //' | uniq | cut -b100-500`
|
files=`git diff-index --cached HEAD 2>&1 | sed 's/^:.* //' | uniq | cut -b100-500`
|
||||||
if [ -z "$files" ]; then
|
if [ -z "$files" ]; then
|
||||||
PY_FILES="fdroid makebuildserver setup.py fdroidserver/*.py examples/*.py tests/*-release-checksums.py"
|
PY_FILES="fdroid makebuildserver setup.py fdroidserver/*.py examples/*.py tests/*-release-checksums.py"
|
||||||
PY_TEST_FILES="tests/*.TestCase"
|
PY_TEST_FILES="tests/test_*.py"
|
||||||
SH_FILES="hooks/pre-commit"
|
SH_FILES="hooks/pre-commit"
|
||||||
BASH_FILES="gradlew-fdroid jenkins-build-all jenkins-setup-build-environment jenkins-test completion/bash-completion buildserver/provision-*"
|
BASH_FILES="jenkins-build-all jenkins-setup-build-environment jenkins-test completion/bash-completion buildserver/provision-*"
|
||||||
RB_FILES="buildserver/Vagrantfile"
|
RB_FILES="buildserver/Vagrantfile"
|
||||||
YML_FILES=".*.yml .yamllint */*.yml */*.yaml"
|
YML_FILES=".*.yml .yamllint */*.yml */*.yaml"
|
||||||
else
|
else
|
||||||
|
@ -27,16 +27,16 @@ else
|
||||||
for f in $files; do
|
for f in $files; do
|
||||||
test -e $f || continue
|
test -e $f || continue
|
||||||
case $f in
|
case $f in
|
||||||
|
test_*.py)
|
||||||
|
PY_TEST_FILES+=" $f"
|
||||||
|
;;
|
||||||
*.py)
|
*.py)
|
||||||
PY_FILES+=" $f"
|
PY_FILES+=" $f"
|
||||||
;;
|
;;
|
||||||
*.TestCase)
|
|
||||||
PY_TEST_FILES+=" $f"
|
|
||||||
;;
|
|
||||||
*.rb)
|
*.rb)
|
||||||
RB_FILES+=" $f"
|
RB_FILES+=" $f"
|
||||||
;;
|
;;
|
||||||
*.yml|.*.yml|.yamllint)
|
*.yml|*.yaml|.yamllint)
|
||||||
YML_FILES+=" $f"
|
YML_FILES+=" $f"
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
|
@ -66,7 +66,7 @@ cmd_exists() {
|
||||||
}
|
}
|
||||||
|
|
||||||
find_command() {
|
find_command() {
|
||||||
for name in $@; do
|
for name in "$@"; do
|
||||||
for suff in "3" "-3" "-python3" ""; do
|
for suff in "3" "-3" "-python3" ""; do
|
||||||
cmd=${name}${suff}
|
cmd=${name}${suff}
|
||||||
if cmd_exists $cmd; then
|
if cmd_exists $cmd; then
|
||||||
|
@ -91,7 +91,7 @@ if [ "$PY_FILES $PY_TEST_FILES" != " " ]; then
|
||||||
err "pyflakes tests failed!"
|
err "pyflakes tests failed!"
|
||||||
fi
|
fi
|
||||||
# ignore vendored files
|
# ignore vendored files
|
||||||
if ! $PYDOCSTYLE --match='(?!apksigcopier|looseversion).*\.py' $PY_FILES $PY_TEST_FILES; then
|
if ! $PYDOCSTYLE --match='(?!apksigcopier|looseversion|setup|test_).*\.py' $PY_FILES $PY_TEST_FILES; then
|
||||||
err "pydocstyle tests failed!"
|
err "pydocstyle tests failed!"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
|
|
||||||
FILES = ../fdroid $(wildcard ../fdroidserver/*.py) \
|
FILES = $(wildcard ../fdroidserver/*.py) \
|
||||||
$(wildcard /usr/lib/python3.*/argparse.py) \
|
$(wildcard /usr/lib/python3.*/argparse.py) \
|
||||||
$(wildcard /usr/lib/python3.*/optparse.py) \
|
../fdroid
|
||||||
$(wildcard /usr/lib/python3.*/getopt.py)
|
|
||||||
|
|
||||||
# these are the supported languages
|
# these are the supported languages
|
||||||
ALL_LINGUAS = $(shell sed -En 's,include locale/([^/]+)/.*,\1,p' ../MANIFEST.in)
|
ALL_LINGUAS = $(shell sed -En 's,include locale/([^/]+)/.*,\1,p' ../MANIFEST.in)
|
||||||
|
|
|
@ -1,17 +0,0 @@
|
||||||
fdroid
|
|
||||||
fdroidserver/btlog.py
|
|
||||||
fdroidserver/build.py
|
|
||||||
fdroidserver/checkupdates.py
|
|
||||||
fdroidserver/common.py
|
|
||||||
fdroidserver/deploy.py
|
|
||||||
fdroidserver/import.py
|
|
||||||
fdroidserver/init.py
|
|
||||||
fdroidserver/install.py
|
|
||||||
fdroidserver/lint.py
|
|
||||||
fdroidserver/metadata.py
|
|
||||||
fdroidserver/publish.py
|
|
||||||
fdroidserver/rewritemeta.py
|
|
||||||
fdroidserver/scanner.py
|
|
||||||
fdroidserver/stats.py
|
|
||||||
fdroidserver/update.py
|
|
||||||
fdroidserver/verify.py
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
2695
locale/ba/LC_MESSAGES/fdroidserver.po
Normal file
2695
locale/ba/LC_MESSAGES/fdroidserver.po
Normal file
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
2716
locale/ga/LC_MESSAGES/fdroidserver.po
Normal file
2716
locale/ga/LC_MESSAGES/fdroidserver.po
Normal file
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
2682
locale/nn/LC_MESSAGES/fdroidserver.po
Normal file
2682
locale/nn/LC_MESSAGES/fdroidserver.po
Normal file
File diff suppressed because it is too large
Load diff
2681
locale/pa/LC_MESSAGES/fdroidserver.po
Normal file
2681
locale/pa/LC_MESSAGES/fdroidserver.po
Normal file
File diff suppressed because it is too large
Load diff
|
@ -5,9 +5,10 @@
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import requests
|
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
|
import git
|
||||||
|
import requests
|
||||||
|
|
||||||
projectbasedir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
projectbasedir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||||
localedir = os.path.join(projectbasedir, 'locale')
|
localedir = os.path.join(projectbasedir, 'locale')
|
||||||
|
@ -18,37 +19,79 @@ if os.path.exists(cached_file):
|
||||||
with open(cached_file) as fp:
|
with open(cached_file) as fp:
|
||||||
data = json.load(fp)
|
data = json.load(fp)
|
||||||
else:
|
else:
|
||||||
url = 'https://hosted.weblate.org/exports/stats/f-droid/fdroidserver/?format=json'
|
url = 'https://hosted.weblate.org/api/components/f-droid/fdroidserver/statistics/?format=json'
|
||||||
r = requests.get(url)
|
r = requests.get(url)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
data = r.json()
|
data = r.json()['results']
|
||||||
|
|
||||||
active = set()
|
active = set()
|
||||||
print('name locale translated approved error-free')
|
print('name locale translated approved error-free')
|
||||||
for locale in sorted(data, key=lambda locale: locale['code']):
|
for locale in sorted(data, key=lambda locale: locale['code']):
|
||||||
print('%26s' % locale['name'],
|
print(
|
||||||
|
'%26s' % locale['name'],
|
||||||
'%8s' % locale['code'],
|
'%8s' % locale['code'],
|
||||||
'%0.1f%%' % locale['translated_percent'],
|
'%0.1f%%' % locale['translated_percent'],
|
||||||
'%0.1f%%' % locale['approved_percent'],
|
'%0.1f%%' % locale['approved_percent'],
|
||||||
'%0.1f%%' % (100 - locale['failing_percent']),
|
'%0.1f%%' % (100 - locale['failing_percent']),
|
||||||
sep='\t')
|
sep='\t',
|
||||||
|
)
|
||||||
if locale['translated_percent'] >= 90 and locale['failing'] < 5:
|
if locale['translated_percent'] >= 90 and locale['failing'] < 5:
|
||||||
active.add(locale['code'])
|
active.add(locale['code'])
|
||||||
|
|
||||||
manifest_file = os.path.join(projectbasedir, 'MANIFEST.in')
|
manifest_file = os.path.join(projectbasedir, 'MANIFEST.in')
|
||||||
with open(manifest_file) as fp:
|
with open(manifest_file) as fp:
|
||||||
for line in fp.readlines():
|
manifest_in = fp.read()
|
||||||
m = re.match(r'include locale/([^/]+)/.*', line)
|
for m in re.findall(r'include locale/([^/]+)/LC_MESSAGES/fdroidserver.po', manifest_in):
|
||||||
if m:
|
active.add(m)
|
||||||
active.add(m.group(1))
|
|
||||||
|
|
||||||
|
repo = git.Repo(projectbasedir)
|
||||||
|
weblate = repo.remotes.weblate
|
||||||
|
weblate.fetch()
|
||||||
|
upstream = repo.remotes.upstream
|
||||||
|
upstream.fetch()
|
||||||
|
|
||||||
|
if 'merge_weblate' in repo.heads:
|
||||||
|
merge_weblate = repo.heads['merge_weblate']
|
||||||
|
repo.create_tag(
|
||||||
|
'previous_merge_weblate',
|
||||||
|
ref=merge_weblate,
|
||||||
|
message=('Automatically created by %s' % __file__),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
merge_weblate = repo.create_head('merge_weblate')
|
||||||
|
merge_weblate.set_commit(upstream.refs.master)
|
||||||
|
merge_weblate.checkout()
|
||||||
|
|
||||||
|
active = sorted(active)
|
||||||
manifest_lines = set()
|
manifest_lines = set()
|
||||||
for locale in active:
|
for locale in active:
|
||||||
manifest_lines.add('include locale/%s/LC_MESSAGES/fdroidserver.po\n' % locale)
|
po_file = f'locale/{locale}/LC_MESSAGES/fdroidserver.po'
|
||||||
|
manifest_lines.add(f'include {po_file}\n')
|
||||||
|
for commit in repo.iter_commits(
|
||||||
|
str(weblate.refs.master) + '...' + str(upstream.refs.master),
|
||||||
|
paths=[po_file],
|
||||||
|
max_count=10,
|
||||||
|
reverse=True,
|
||||||
|
):
|
||||||
|
print(f'{locale}: git cherry-pick', commit)
|
||||||
|
repo.git.cherry_pick(commit)
|
||||||
|
|
||||||
with open(manifest_file, 'a') as fp:
|
with open(manifest_file, 'a') as fp:
|
||||||
for line in manifest_lines:
|
for line in manifest_lines:
|
||||||
if line:
|
if line:
|
||||||
fp.write(line)
|
fp.write(line)
|
||||||
|
|
||||||
|
# first filter duplicates
|
||||||
subprocess.run(['sort', '-u', '-o', manifest_file, manifest_file])
|
subprocess.run(['sort', '-u', '-o', manifest_file, manifest_file])
|
||||||
|
# then use a stable sort order
|
||||||
|
subprocess.run(
|
||||||
|
['sort', '--ignore-case', '--stable', '-o', manifest_file, manifest_file],
|
||||||
|
env={'LC_ALL': 'C'},
|
||||||
|
)
|
||||||
|
|
||||||
|
print('\tIf all else fails, try:')
|
||||||
|
print('\tgit checkout -B merge_weblate weblate/master')
|
||||||
|
print('\tgit rebase -i upstream/master')
|
||||||
|
print('\t# select all in editor and cut all commit lines')
|
||||||
|
print('\twl-paste | grep -Eo ".* \((%s)\) .*" | wl-copy' % '|'.join(active))
|
||||||
|
print('\t# paste into editor, and make rebase\n')
|
||||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue