]> git.scottworley.com Git - pinch/blob - pinch.py
6ad2cdccbd991d596e7974f563d7844218461d96
[pinch] / pinch.py
1 import argparse
2 import configparser
3 import filecmp
4 import functools
5 import hashlib
6 import operator
7 import os
8 import os.path
9 import shutil
10 import subprocess
11 import tempfile
12 import types
13 import urllib.parse
14 import urllib.request
15 import xml.dom.minidom
16
17 from typing import (
18 Dict,
19 Iterable,
20 List,
21 NewType,
22 Tuple,
23 )
24
25 Digest16 = NewType('Digest16', str)
26 Digest32 = NewType('Digest32', str)
27
28
29 class ChannelTableEntry(types.SimpleNamespace):
30 absolute_url: str
31 digest: Digest16
32 file: str
33 size: int
34 url: str
35
36
37 class Channel(types.SimpleNamespace):
38 channel_html: bytes
39 channel_url: str
40 forwarded_url: str
41 git_cachedir: str
42 git_ref: str
43 git_repo: str
44 git_revision: str
45 old_git_revision: str
46 release_name: str
47 table: Dict[str, ChannelTableEntry]
48
49
50 class VerificationError(Exception):
51 pass
52
53
54 class Verification:
55
56 def __init__(self) -> None:
57 self.line_length = 0
58
59 def status(self, s: str) -> None:
60 print(s, end=' ', flush=True)
61 self.line_length += 1 + len(s) # Unicode??
62
63 @staticmethod
64 def _color(s: str, c: int) -> str:
65 return '\033[%2dm%s\033[00m' % (c, s)
66
67 def result(self, r: bool) -> None:
68 message, color = {True: ('OK ', 92), False: ('FAIL', 91)}[r]
69 length = len(message)
70 cols = shutil.get_terminal_size().columns
71 pad = (cols - (self.line_length + length)) % cols
72 print(' ' * pad + self._color(message, color))
73 self.line_length = 0
74 if not r:
75 raise VerificationError()
76
77 def check(self, s: str, r: bool) -> None:
78 self.status(s)
79 self.result(r)
80
81 def ok(self) -> None:
82 self.result(True)
83
84
85 def compare(a: str, b: str) -> Tuple[List[str], List[str], List[str]]:
86
87 def throw(error: OSError) -> None:
88 raise error
89
90 def join(x: str, y: str) -> str:
91 return y if x == '.' else os.path.join(x, y)
92
93 def recursive_files(d: str) -> Iterable[str]:
94 all_files: List[str] = []
95 for path, dirs, files in os.walk(d, onerror=throw):
96 rel = os.path.relpath(path, start=d)
97 all_files.extend(join(rel, f) for f in files)
98 for dir_or_link in dirs:
99 if os.path.islink(join(path, dir_or_link)):
100 all_files.append(join(rel, dir_or_link))
101 return all_files
102
103 def exclude_dot_git(files: Iterable[str]) -> Iterable[str]:
104 return (f for f in files if not f.startswith('.git/'))
105
106 files = functools.reduce(
107 operator.or_, (set(
108 exclude_dot_git(
109 recursive_files(x))) for x in [a, b]))
110 return filecmp.cmpfiles(a, b, files, shallow=False)
111
112
113 def fetch(v: Verification, channel: Channel) -> None:
114 v.status('Fetching channel')
115 request = urllib.request.urlopen(channel.channel_url, timeout=10)
116 channel.channel_html = request.read()
117 channel.forwarded_url = request.geturl()
118 v.result(request.status == 200)
119 v.check('Got forwarded', channel.channel_url != channel.forwarded_url)
120
121
122 def parse_channel(v: Verification, channel: Channel) -> None:
123 v.status('Parsing channel description as XML')
124 d = xml.dom.minidom.parseString(channel.channel_html)
125 v.ok()
126
127 v.status('Extracting release name:')
128 title_name = d.getElementsByTagName(
129 'title')[0].firstChild.nodeValue.split()[2]
130 h1_name = d.getElementsByTagName('h1')[0].firstChild.nodeValue.split()[2]
131 v.status(title_name)
132 v.result(title_name == h1_name)
133 channel.release_name = title_name
134
135 v.status('Extracting git commit:')
136 git_commit_node = d.getElementsByTagName('tt')[0]
137 channel.git_revision = git_commit_node.firstChild.nodeValue
138 v.status(channel.git_revision)
139 v.ok()
140 v.status('Verifying git commit label')
141 v.result(git_commit_node.previousSibling.nodeValue == 'Git commit ')
142
143 v.status('Parsing table')
144 channel.table = {}
145 for row in d.getElementsByTagName('tr')[1:]:
146 name = row.childNodes[0].firstChild.firstChild.nodeValue
147 url = row.childNodes[0].firstChild.getAttribute('href')
148 size = int(row.childNodes[1].firstChild.nodeValue)
149 digest = Digest16(row.childNodes[2].firstChild.firstChild.nodeValue)
150 channel.table[name] = ChannelTableEntry(
151 url=url, digest=digest, size=size)
152 v.ok()
153
154
155 def digest_string(s: bytes) -> Digest16:
156 return Digest16(hashlib.sha256(s).hexdigest())
157
158
159 def digest_file(filename: str) -> Digest16:
160 hasher = hashlib.sha256()
161 with open(filename, 'rb') as f:
162 # pylint: disable=cell-var-from-loop
163 for block in iter(lambda: f.read(4096), b''):
164 hasher.update(block)
165 return Digest16(hasher.hexdigest())
166
167
168 def to_Digest16(v: Verification, digest32: Digest32) -> Digest16:
169 v.status('Converting digest to base16')
170 process = subprocess.run(
171 ['nix', 'to-base16', '--type', 'sha256', digest32], capture_output=True)
172 v.result(process.returncode == 0)
173 return Digest16(process.stdout.decode().strip())
174
175
176 def to_Digest32(v: Verification, digest16: Digest16) -> Digest32:
177 v.status('Converting digest to base32')
178 process = subprocess.run(
179 ['nix', 'to-base32', '--type', 'sha256', digest16], capture_output=True)
180 v.result(process.returncode == 0)
181 return Digest32(process.stdout.decode().strip())
182
183
184 def fetch_with_nix_prefetch_url(
185 v: Verification,
186 url: str,
187 digest: Digest16) -> str:
188 v.status('Fetching %s' % url)
189 process = subprocess.run(
190 ['nix-prefetch-url', '--print-path', url, digest], capture_output=True)
191 v.result(process.returncode == 0)
192 prefetch_digest, path, empty = process.stdout.decode().split('\n')
193 assert empty == ''
194 v.check("Verifying nix-prefetch-url's digest",
195 to_Digest16(v, Digest32(prefetch_digest)) == digest)
196 v.status("Verifying file digest")
197 file_digest = digest_file(path)
198 v.result(file_digest == digest)
199 return path
200
201
202 def fetch_resources(v: Verification, channel: Channel) -> None:
203 for resource in ['git-revision', 'nixexprs.tar.xz']:
204 fields = channel.table[resource]
205 fields.absolute_url = urllib.parse.urljoin(
206 channel.forwarded_url, fields.url)
207 fields.file = fetch_with_nix_prefetch_url(
208 v, fields.absolute_url, fields.digest)
209 v.status('Verifying git commit on main page matches git commit in table')
210 v.result(
211 open(
212 channel.table['git-revision'].file).read(999) == channel.git_revision)
213
214
215 def git_fetch(v: Verification, channel: Channel) -> None:
216 # It would be nice if we could share the nix git cache, but as of the time
217 # of writing it is transitioning from gitv2 (deprecated) to gitv3 (not ready
218 # yet), and trying to straddle them both is too far into nix implementation
219 # details for my comfort. So we re-implement here half of nix.fetchGit.
220 # :(
221
222 # TODO: Consider using pyxdg to find this path.
223 channel.git_cachedir = os.path.expanduser(
224 '~/.cache/nix-pin-channel/git/%s' %
225 digest_string(
226 channel.git_repo.encode()))
227 if not os.path.exists(channel.git_cachedir):
228 v.status("Initializing git repo")
229 process = subprocess.run(
230 ['git', 'init', '--bare', channel.git_cachedir])
231 v.result(process.returncode == 0)
232
233 have_rev = False
234 if hasattr(channel, 'git_revision'):
235 v.status('Checking if we already have this rev:')
236 process = subprocess.run(
237 ['git', '-C', channel.git_cachedir, 'cat-file', '-e', channel.git_revision])
238 if process.returncode == 0:
239 v.status('yes')
240 if process.returncode == 1:
241 v.status('no')
242 v.result(process.returncode == 0 or process.returncode == 1)
243 have_rev = process.returncode == 0
244
245 if not have_rev:
246 v.status(
247 'Fetching ref "%s" from %s' %
248 (channel.git_ref, channel.git_repo))
249 # We don't use --force here because we want to abort and freak out if forced
250 # updates are happening.
251 process = subprocess.run(['git',
252 '-C',
253 channel.git_cachedir,
254 'fetch',
255 channel.git_repo,
256 '%s:%s' % (channel.git_ref,
257 channel.git_ref)])
258 v.result(process.returncode == 0)
259 if hasattr(channel, 'git_revision'):
260 v.status('Verifying that fetch retrieved this rev')
261 process = subprocess.run(
262 ['git', '-C', channel.git_cachedir, 'cat-file', '-e', channel.git_revision])
263 v.result(process.returncode == 0)
264
265 if not hasattr(channel, 'git_revision'):
266 channel.git_revision = open(
267 os.path.join(
268 channel.git_cachedir,
269 'refs',
270 'heads',
271 channel.git_ref)).read(999).strip()
272
273 v.status('Verifying rev is an ancestor of ref')
274 process = subprocess.run(['git',
275 '-C',
276 channel.git_cachedir,
277 'merge-base',
278 '--is-ancestor',
279 channel.git_revision,
280 channel.git_ref])
281 v.result(process.returncode == 0)
282
283 if hasattr(channel, 'old_git_revision'):
284 v.status(
285 'Verifying rev is an ancestor of previous rev %s' %
286 channel.old_git_revision)
287 process = subprocess.run(['git',
288 '-C',
289 channel.git_cachedir,
290 'merge-base',
291 '--is-ancestor',
292 channel.old_git_revision,
293 channel.git_revision])
294 v.result(process.returncode == 0)
295
296
297 def compare_tarball_and_git(
298 v: Verification,
299 channel: Channel,
300 channel_contents: str,
301 git_contents: str) -> None:
302 v.status('Comparing channel tarball with git checkout')
303 match, mismatch, errors = compare(os.path.join(
304 channel_contents, channel.release_name), git_contents)
305 v.ok()
306 v.check('%d files match' % len(match), len(match) > 0)
307 v.check('%d files differ' % len(mismatch), len(mismatch) == 0)
308 expected_errors = [
309 '.git-revision',
310 '.version-suffix',
311 'nixpkgs',
312 'programs.sqlite',
313 'svn-revision']
314 benign_errors = []
315 for ee in expected_errors:
316 if ee in errors:
317 errors.remove(ee)
318 benign_errors.append(ee)
319 v.check(
320 '%d unexpected incomparable files' %
321 len(errors),
322 len(errors) == 0)
323 v.check(
324 '(%d of %d expected incomparable files)' %
325 (len(benign_errors),
326 len(expected_errors)),
327 len(benign_errors) == len(expected_errors))
328
329
330 def extract_tarball(v: Verification, channel: Channel, dest: str) -> None:
331 v.status('Extracting tarball %s' %
332 channel.table['nixexprs.tar.xz'].file)
333 shutil.unpack_archive(
334 channel.table['nixexprs.tar.xz'].file,
335 dest)
336 v.ok()
337
338
339 def git_checkout(v: Verification, channel: Channel, dest: str) -> None:
340 v.status('Checking out corresponding git revision')
341 git = subprocess.Popen(['git',
342 '-C',
343 channel.git_cachedir,
344 'archive',
345 channel.git_revision],
346 stdout=subprocess.PIPE)
347 tar = subprocess.Popen(
348 ['tar', 'x', '-C', dest, '-f', '-'], stdin=git.stdout)
349 git.stdout.close()
350 tar.wait()
351 git.wait()
352 v.result(git.returncode == 0 and tar.returncode == 0)
353
354
355 def check_channel_metadata(
356 v: Verification,
357 channel: Channel,
358 channel_contents: str) -> None:
359 v.status('Verifying git commit in channel tarball')
360 v.result(
361 open(
362 os.path.join(
363 channel_contents,
364 channel.release_name,
365 '.git-revision')).read(999) == channel.git_revision)
366
367 v.status(
368 'Verifying version-suffix is a suffix of release name %s:' %
369 channel.release_name)
370 version_suffix = open(
371 os.path.join(
372 channel_contents,
373 channel.release_name,
374 '.version-suffix')).read(999)
375 v.status(version_suffix)
376 v.result(channel.release_name.endswith(version_suffix))
377
378
379 def check_channel_contents(v: Verification, channel: Channel) -> None:
380 with tempfile.TemporaryDirectory() as channel_contents, \
381 tempfile.TemporaryDirectory() as git_contents:
382
383 extract_tarball(v, channel, channel_contents)
384 check_channel_metadata(v, channel, channel_contents)
385
386 git_checkout(v, channel, git_contents)
387
388 compare_tarball_and_git(v, channel, channel_contents, git_contents)
389
390 v.status('Removing temporary directories')
391 v.ok()
392
393
394 def pin_channel(v: Verification, channel: Channel) -> None:
395 fetch(v, channel)
396 parse_channel(v, channel)
397 fetch_resources(v, channel)
398 git_fetch(v, channel)
399 check_channel_contents(v, channel)
400
401
402 def make_channel(conf: configparser.SectionProxy) -> Channel:
403 channel = Channel(**dict(conf.items()))
404 if hasattr(channel, 'git_revision'):
405 channel.old_git_revision = channel.git_revision
406 del channel.git_revision
407 return channel
408
409
410 def pin(args: argparse.Namespace) -> None:
411 v = Verification()
412 config = configparser.ConfigParser()
413 config.read_file(open(args.channels_file), args.channels_file)
414 for section in config.sections():
415 channel = make_channel(config[section])
416 if 'channel_url' in config[section]:
417 pin_channel(v, channel)
418 config[section]['name'] = channel.release_name
419 config[section]['tarball_url'] = channel.table['nixexprs.tar.xz'].absolute_url
420 config[section]['tarball_sha256'] = channel.table['nixexprs.tar.xz'].digest
421 else:
422 git_fetch(v, channel)
423 config[section]['git_revision'] = channel.git_revision
424
425 with open(args.channels_file, 'w') as configfile:
426 config.write(configfile)
427
428
429 def main() -> None:
430 parser = argparse.ArgumentParser(prog='pinch')
431 subparsers = parser.add_subparsers(dest='mode', required=True)
432 parser_pin = subparsers.add_parser('pin')
433 parser_pin.add_argument('channels_file', type=str)
434 parser_pin.set_defaults(func=pin)
435 args = parser.parse_args()
436 args.func(args)
437
438
439 main()