1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
|
#!/usr/bin/python3
import sys
import apt_pkg
import argparse
from deplib.login import LP
from deplib.ppa import PPA
from lazr.restfulclient.errors import BadRequest, ServerError
# Set of package names which signify a hit for a Python 3.6 rebuild.
REBUILD_HITS = {
'python3',
'python3-all',
}
# Set of packages to ignore for uploading/syncing to the PPA. These are
# source package names.
BLACKLISTED = {
'python3-defaults',
'python3.5',
'python3.6',
}
# Heuristically determined.
CHUNK_SIZE = 200
COMMASPACE = ', '
apt_pkg.init()
cache = apt_pkg.Cache(None)
def parseargs():
parser = argparse.ArgumentParser(
description='Find Python 3 dependencies and rebuild in a PPA')
parser.add_argument(
'-n', '--dry-run',
action='store_true', default=False,
help='Do everything but actually copy the packages to the PPA')
parser.add_argument(
'-p', '--ppa',
help='Sync to given PPA; if not given no sync is performed')
parser.add_argument(
'-m', '--main-only',
action='store_true', default=False,
help='Limit search to only main packages')
parser.add_argument(
'-r', '--revdeps',
action='store_true', default=False,
help='Chase reverse dependencies of first-order rebuilds')
parser.add_argument(
'-s', '--strict',
action='store_true', default=False,
help="""Be strict about finding dependencies. Specifically, with
this option set, packages with the prefix `python3-` will not be
specially considered (they may still be found if they have the
proper dependencies).""")
parser.add_argument(
'-d', '--display',
default='none',
choices=['sources', 'binaries', 'all', 'none'],
help="""Control which set of packages is displayed before uploading.
You can display either the set of source packages that will be copied,
the binary packages that matched the rebuild criteria, all packages,
or none. The lists are displayed in alphabetical order, and if you
display them all, binary packages are shown before source packages.""")
parser.add_argument(
'--verify',
action='store_true', default=False,
help="""Just verify that all candidate source packages are present in
the PPA, and display the ones which are missing.""")
parser.add_argument(
'-l', '--package-list',
metavar='FILE',
help="""Read the package list to sync from the given FILE""")
parser.add_argument(
'-c', '--chunk-size',
type=int, default=CHUNK_SIZE,
help='Package chunk size')
return parser.parse_args()
def check_package(pkg, binaries, args):
# Add the binary package name to the rebuild binaries if it has a
# first-order dependency on any package named in REBUILD_HITS, and
# optionally a 'python3-' prefix.
for version in pkg.version_list:
for pfile, index in version.file_list:
if pfile.component != 'main' and args.main_only:
return
# These are binary packages, so only Depends.
for depver in version.depends_list.get('Depends', []):
for dep in depver:
dep_name_arch = dep.target_pkg.name
# Split off any architecture specification.
name, colon, arch = dep_name_arch.partition(':')
prefixed = not args.strict and name.startswith('python3-')
if name in REBUILD_HITS or prefixed:
# This package needs to be rebuilt.
binaries.add(pkg.name)
# That's good enough.
return
def calculate_rebuilds(args):
binaries = set()
for pkg in cache.packages:
check_package(pkg, binaries, args)
rebuilds = binaries.copy()
if args.revdeps:
# For each package needing a rebuild, recursively chase its reverse
# dependencies and rebuild them.
chasers = binaries.copy()
while chasers:
binary = chasers.pop()
try:
for revdep in cache[binary].rev_depends_list:
name = revdep.parent_pkg.name
if name not in rebuilds:
# We haven't seen this binary package before, so mark
# it for rebuilding and chasing its reverse
# dependencies.
rebuilds.add(name)
chasers.add(name)
except KeyError:
# This binary package isn't in the cache for some reason
# (possibly we've chased into multiverse?). Just ignore it.
print('No cache entry for', binary)
return rebuilds
def calculate_sources(binaries):
sources = set()
records = apt_pkg.SourceRecords()
for binary in binaries:
records.restart()
while records.lookup(binary):
sources.add(records.package)
return sources - BLACKLISTED
def prune(ppa, sources):
need_upload = set()
# Return the set from sources which have never been uploaded.
for name in sources:
builds = ppa.archive.getBuildRecords(source_name=name)
if len(builds) == 0:
# This hasn't been uploaded yet.
need_upload.add(name)
return need_upload
def sync(lp, ppa, sources, args):
print('Copy {}/{} -> ppa:{}/{}'.format(
lp.archive.name, lp.devel.name,
ppa.username, ppa.ppa))
if not args.dry_run:
# If we try to sync the entire set of packages, we'll get a Launchpad
# timeout, so chunkify the requests. Make a copy so reporting can
# still tell us the full sources list.
syncables = sources.copy()
synced = set()
chunk_size = args.chunk_size
no_progress_count = 0
last_progress_highwater = 0
while len(syncables) > 0:
consume = min(chunk_size, len(syncables))
uploads = set(syncables.pop() for i in range(consume))
print('Total/todo/synced: {}/{}/{}'.format(
len(sources), len(syncables), len(synced)))
# syncSources() requires 'devel' version of Launchpad API.
# Otherwise, use the deprecated copyPackages().
need_upload = None
try:
ppa.archive.syncSources(
from_archive=lp.archive,
from_series=lp.devel.name,
# The whole point is to get the PPA to build the binaries!
include_binaries=False,
source_names=list(uploads),
to_pocket='Release',
to_series=lp.devel.name,
)
except ServerError:
# Probably a timeout; try this chunk again later.
need_upload = prune(ppa, uploads)
if len(need_upload) > 0:
print('Time out;', end=' ')
except BadRequest:
# Some of the packages in this chunk have already been
# upload. Retry any that haven't.
need_upload = prune(ppa, uploads)
if len(need_upload) > 0:
print('Partial;', end=' ')
# Requeue any source packages that didn't get uploaded.
# need_upload may be the empty set or None.
if need_upload:
print('Requeuing:', COMMASPACE.join(need_upload))
syncables.update(need_upload)
synced.update(uploads - need_upload)
else:
synced.update(uploads)
# Are we still making progress?
if len(synced) == last_progress_highwater:
no_progress_count += 1
print('No progress count:', no_progress_count)
if no_progress_count > 3:
if chunk_size <= 1:
# We're just not making any progress, so bail and
# provide enough information to do these manually.
print('Progress is stalled; sync these manually:')
for name in sorted(syncables):
print(name)
return
chunk_size -= 1
print('Ratcheting down chunk_size:', chunk_size)
no_progress_count = 0
else:
# We made some progress so keep trying.
last_progress_highwater = len(synced)
def verify(ppa, sources):
spph = ppa.archive.getPublishedSources(pocket='Release')
published = {
sp.source_package_name
for sp in spph
if sp.status == 'Published'
}
missing = sources - published
if len(missing) > 0:
print('Missing sources:')
for name in sorted(sources - published):
print(name)
def main():
args = parseargs()
if args.ppa:
lp = LP()
print('logged in as:', lp.me.display_name)
ppa = PPA.from_url(lp, args.ppa)
if ppa is None:
print('PPA not found:', args.ppa)
return
else:
lp = ppa = None
if args.package_list is not None:
with open(args.package_list, 'r', encoding='utf-8') as fp:
sources = {line.strip() for line in fp}
else:
binaries = calculate_rebuilds(args)
sources = calculate_sources(binaries)
if args.verify:
verify(ppa, sources)
return 0
if args.display in ('binaries', 'all'):
for name in sorted(binaries):
print(name)
if args.display in ('sources', 'all'):
for name in sorted(sources):
print(name)
if args.ppa:
sync(lp, ppa, sources, args)
return 0
if __name__ == '__main__':
sys.exit(main())
|