]> git.scottworley.com Git - pinch/blob - pinch.py
Don't depend upon xdg
[pinch] / pinch.py
1 import argparse
2 import configparser
3 import filecmp
4 import functools
5 import getpass
6 import hashlib
7 import operator
8 import os
9 import os.path
10 import shlex
11 import shutil
12 import subprocess
13 import sys
14 import tempfile
15 import types
16 import urllib.parse
17 import urllib.request
18 import xml.dom.minidom
19
20 from typing import (
21 Dict,
22 Iterable,
23 List,
24 NewType,
25 Tuple,
26 )
27
28 # Use xdg module when it's less painful to have as a dependency
29
30
31 class XDG(types.SimpleNamespace):
32 XDG_CACHE_HOME: str
33
34
35 xdg = XDG(
36 XDG_CACHE_HOME=os.getenv(
37 'XDG_CACHE_HOME',
38 os.path.expanduser('~/.cache')))
39
40
41 Digest16 = NewType('Digest16', str)
42 Digest32 = NewType('Digest32', str)
43
44
45 class ChannelTableEntry(types.SimpleNamespace):
46 absolute_url: str
47 digest: Digest16
48 file: str
49 size: int
50 url: str
51
52
53 class Channel(types.SimpleNamespace):
54 alias_of: str
55 channel_html: bytes
56 channel_url: str
57 forwarded_url: str
58 git_ref: str
59 git_repo: str
60 git_revision: str
61 old_git_revision: str
62 release_name: str
63 table: Dict[str, ChannelTableEntry]
64
65
66 class VerificationError(Exception):
67 pass
68
69
70 class Verification:
71
72 def __init__(self) -> None:
73 self.line_length = 0
74
75 def status(self, s: str) -> None:
76 print(s, end=' ', file=sys.stderr, flush=True)
77 self.line_length += 1 + len(s) # Unicode??
78
79 @staticmethod
80 def _color(s: str, c: int) -> str:
81 return '\033[%2dm%s\033[00m' % (c, s)
82
83 def result(self, r: bool) -> None:
84 message, color = {True: ('OK ', 92), False: ('FAIL', 91)}[r]
85 length = len(message)
86 cols = shutil.get_terminal_size().columns
87 pad = (cols - (self.line_length + length)) % cols
88 print(' ' * pad + self._color(message, color), file=sys.stderr)
89 self.line_length = 0
90 if not r:
91 raise VerificationError()
92
93 def check(self, s: str, r: bool) -> None:
94 self.status(s)
95 self.result(r)
96
97 def ok(self) -> None:
98 self.result(True)
99
100
101 def compare(a: str, b: str) -> Tuple[List[str], List[str], List[str]]:
102
103 def throw(error: OSError) -> None:
104 raise error
105
106 def join(x: str, y: str) -> str:
107 return y if x == '.' else os.path.join(x, y)
108
109 def recursive_files(d: str) -> Iterable[str]:
110 all_files: List[str] = []
111 for path, dirs, files in os.walk(d, onerror=throw):
112 rel = os.path.relpath(path, start=d)
113 all_files.extend(join(rel, f) for f in files)
114 for dir_or_link in dirs:
115 if os.path.islink(join(path, dir_or_link)):
116 all_files.append(join(rel, dir_or_link))
117 return all_files
118
119 def exclude_dot_git(files: Iterable[str]) -> Iterable[str]:
120 return (f for f in files if not f.startswith('.git/'))
121
122 files = functools.reduce(
123 operator.or_, (set(
124 exclude_dot_git(
125 recursive_files(x))) for x in [a, b]))
126 return filecmp.cmpfiles(a, b, files, shallow=False)
127
128
129 def fetch(v: Verification, channel: Channel) -> None:
130 v.status('Fetching channel')
131 request = urllib.request.urlopen(channel.channel_url, timeout=10)
132 channel.channel_html = request.read()
133 channel.forwarded_url = request.geturl()
134 v.result(request.status == 200)
135 v.check('Got forwarded', channel.channel_url != channel.forwarded_url)
136
137
138 def parse_channel(v: Verification, channel: Channel) -> None:
139 v.status('Parsing channel description as XML')
140 d = xml.dom.minidom.parseString(channel.channel_html)
141 v.ok()
142
143 v.status('Extracting release name:')
144 title_name = d.getElementsByTagName(
145 'title')[0].firstChild.nodeValue.split()[2]
146 h1_name = d.getElementsByTagName('h1')[0].firstChild.nodeValue.split()[2]
147 v.status(title_name)
148 v.result(title_name == h1_name)
149 channel.release_name = title_name
150
151 v.status('Extracting git commit:')
152 git_commit_node = d.getElementsByTagName('tt')[0]
153 channel.git_revision = git_commit_node.firstChild.nodeValue
154 v.status(channel.git_revision)
155 v.ok()
156 v.status('Verifying git commit label')
157 v.result(git_commit_node.previousSibling.nodeValue == 'Git commit ')
158
159 v.status('Parsing table')
160 channel.table = {}
161 for row in d.getElementsByTagName('tr')[1:]:
162 name = row.childNodes[0].firstChild.firstChild.nodeValue
163 url = row.childNodes[0].firstChild.getAttribute('href')
164 size = int(row.childNodes[1].firstChild.nodeValue)
165 digest = Digest16(row.childNodes[2].firstChild.firstChild.nodeValue)
166 channel.table[name] = ChannelTableEntry(
167 url=url, digest=digest, size=size)
168 v.ok()
169
170
171 def digest_string(s: bytes) -> Digest16:
172 return Digest16(hashlib.sha256(s).hexdigest())
173
174
175 def digest_file(filename: str) -> Digest16:
176 hasher = hashlib.sha256()
177 with open(filename, 'rb') as f:
178 # pylint: disable=cell-var-from-loop
179 for block in iter(lambda: f.read(4096), b''):
180 hasher.update(block)
181 return Digest16(hasher.hexdigest())
182
183
184 def to_Digest16(v: Verification, digest32: Digest32) -> Digest16:
185 v.status('Converting digest to base16')
186 process = subprocess.run(
187 ['nix', 'to-base16', '--type', 'sha256', digest32], capture_output=True)
188 v.result(process.returncode == 0)
189 return Digest16(process.stdout.decode().strip())
190
191
192 def to_Digest32(v: Verification, digest16: Digest16) -> Digest32:
193 v.status('Converting digest to base32')
194 process = subprocess.run(
195 ['nix', 'to-base32', '--type', 'sha256', digest16], capture_output=True)
196 v.result(process.returncode == 0)
197 return Digest32(process.stdout.decode().strip())
198
199
200 def fetch_with_nix_prefetch_url(
201 v: Verification,
202 url: str,
203 digest: Digest16) -> str:
204 v.status('Fetching %s' % url)
205 process = subprocess.run(
206 ['nix-prefetch-url', '--print-path', url, digest], capture_output=True)
207 v.result(process.returncode == 0)
208 prefetch_digest, path, empty = process.stdout.decode().split('\n')
209 assert empty == ''
210 v.check("Verifying nix-prefetch-url's digest",
211 to_Digest16(v, Digest32(prefetch_digest)) == digest)
212 v.status("Verifying file digest")
213 file_digest = digest_file(path)
214 v.result(file_digest == digest)
215 return path
216
217
218 def fetch_resources(v: Verification, channel: Channel) -> None:
219 for resource in ['git-revision', 'nixexprs.tar.xz']:
220 fields = channel.table[resource]
221 fields.absolute_url = urllib.parse.urljoin(
222 channel.forwarded_url, fields.url)
223 fields.file = fetch_with_nix_prefetch_url(
224 v, fields.absolute_url, fields.digest)
225 v.status('Verifying git commit on main page matches git commit in table')
226 v.result(
227 open(
228 channel.table['git-revision'].file).read(999) == channel.git_revision)
229
230
231 def git_cachedir(git_repo: str) -> str:
232 return os.path.join(
233 xdg.XDG_CACHE_HOME,
234 'pinch/git',
235 digest_string(git_repo.encode()))
236
237
238 def tarball_cache_file(channel: Channel) -> str:
239 return os.path.join(
240 xdg.XDG_CACHE_HOME,
241 'pinch/git-tarball',
242 '%s-%s-%s' %
243 (digest_string(channel.git_repo.encode()),
244 channel.git_revision,
245 channel.release_name))
246
247
248 def verify_git_ancestry(v: Verification, channel: Channel) -> None:
249 cachedir = git_cachedir(channel.git_repo)
250 v.status('Verifying rev is an ancestor of ref')
251 process = subprocess.run(['git',
252 '-C',
253 cachedir,
254 'merge-base',
255 '--is-ancestor',
256 channel.git_revision,
257 channel.git_ref])
258 v.result(process.returncode == 0)
259
260 if hasattr(channel, 'old_git_revision'):
261 v.status(
262 'Verifying rev is an ancestor of previous rev %s' %
263 channel.old_git_revision)
264 process = subprocess.run(['git',
265 '-C',
266 cachedir,
267 'merge-base',
268 '--is-ancestor',
269 channel.old_git_revision,
270 channel.git_revision])
271 v.result(process.returncode == 0)
272
273
274 def git_fetch(v: Verification, channel: Channel) -> None:
275 # It would be nice if we could share the nix git cache, but as of the time
276 # of writing it is transitioning from gitv2 (deprecated) to gitv3 (not ready
277 # yet), and trying to straddle them both is too far into nix implementation
278 # details for my comfort. So we re-implement here half of nix.fetchGit.
279 # :(
280
281 cachedir = git_cachedir(channel.git_repo)
282 if not os.path.exists(cachedir):
283 v.status("Initializing git repo")
284 process = subprocess.run(
285 ['git', 'init', '--bare', cachedir])
286 v.result(process.returncode == 0)
287
288 v.status('Fetching ref "%s" from %s' % (channel.git_ref, channel.git_repo))
289 # We don't use --force here because we want to abort and freak out if forced
290 # updates are happening.
291 process = subprocess.run(['git',
292 '-C',
293 cachedir,
294 'fetch',
295 channel.git_repo,
296 '%s:%s' % (channel.git_ref,
297 channel.git_ref)])
298 v.result(process.returncode == 0)
299
300 if hasattr(channel, 'git_revision'):
301 v.status('Verifying that fetch retrieved this rev')
302 process = subprocess.run(
303 ['git', '-C', cachedir, 'cat-file', '-e', channel.git_revision])
304 v.result(process.returncode == 0)
305 else:
306 channel.git_revision = open(
307 os.path.join(
308 cachedir,
309 'refs',
310 'heads',
311 channel.git_ref)).read(999).strip()
312
313 verify_git_ancestry(v, channel)
314
315
316 def ensure_git_rev_available(v: Verification, channel: Channel) -> None:
317 cachedir = git_cachedir(channel.git_repo)
318 if os.path.exists(cachedir):
319 v.status('Checking if we already have this rev:')
320 process = subprocess.run(
321 ['git', '-C', cachedir, 'cat-file', '-e', channel.git_revision])
322 if process.returncode == 0:
323 v.status('yes')
324 if process.returncode == 1:
325 v.status('no')
326 v.result(process.returncode == 0 or process.returncode == 1)
327 if process.returncode == 0:
328 verify_git_ancestry(v, channel)
329 return
330 git_fetch(v, channel)
331
332
333 def compare_tarball_and_git(
334 v: Verification,
335 channel: Channel,
336 channel_contents: str,
337 git_contents: str) -> None:
338 v.status('Comparing channel tarball with git checkout')
339 match, mismatch, errors = compare(os.path.join(
340 channel_contents, channel.release_name), git_contents)
341 v.ok()
342 v.check('%d files match' % len(match), len(match) > 0)
343 v.check('%d files differ' % len(mismatch), len(mismatch) == 0)
344 expected_errors = [
345 '.git-revision',
346 '.version-suffix',
347 'nixpkgs',
348 'programs.sqlite',
349 'svn-revision']
350 benign_errors = []
351 for ee in expected_errors:
352 if ee in errors:
353 errors.remove(ee)
354 benign_errors.append(ee)
355 v.check(
356 '%d unexpected incomparable files' %
357 len(errors),
358 len(errors) == 0)
359 v.check(
360 '(%d of %d expected incomparable files)' %
361 (len(benign_errors),
362 len(expected_errors)),
363 len(benign_errors) == len(expected_errors))
364
365
366 def extract_tarball(v: Verification, channel: Channel, dest: str) -> None:
367 v.status('Extracting tarball %s' %
368 channel.table['nixexprs.tar.xz'].file)
369 shutil.unpack_archive(
370 channel.table['nixexprs.tar.xz'].file,
371 dest)
372 v.ok()
373
374
375 def git_checkout(v: Verification, channel: Channel, dest: str) -> None:
376 v.status('Checking out corresponding git revision')
377 git = subprocess.Popen(['git',
378 '-C',
379 git_cachedir(channel.git_repo),
380 'archive',
381 channel.git_revision],
382 stdout=subprocess.PIPE)
383 tar = subprocess.Popen(
384 ['tar', 'x', '-C', dest, '-f', '-'], stdin=git.stdout)
385 if git.stdout:
386 git.stdout.close()
387 tar.wait()
388 git.wait()
389 v.result(git.returncode == 0 and tar.returncode == 0)
390
391
392 def git_get_tarball(v: Verification, channel: Channel) -> str:
393 cache_file = tarball_cache_file(channel)
394 if os.path.exists(cache_file):
395 cached_tarball = open(cache_file).read(9999)
396 if os.path.exists(cached_tarball):
397 return cached_tarball
398
399 with tempfile.TemporaryDirectory() as output_dir:
400 output_filename = os.path.join(
401 output_dir, channel.release_name + '.tar.xz')
402 with open(output_filename, 'w') as output_file:
403 v.status(
404 'Generating tarball for git revision %s' %
405 channel.git_revision)
406 git = subprocess.Popen(['git',
407 '-C',
408 git_cachedir(channel.git_repo),
409 'archive',
410 '--prefix=%s/' % channel.release_name,
411 channel.git_revision],
412 stdout=subprocess.PIPE)
413 xz = subprocess.Popen(['xz'], stdin=git.stdout, stdout=output_file)
414 xz.wait()
415 git.wait()
416 v.result(git.returncode == 0 and xz.returncode == 0)
417
418 v.status('Putting tarball in Nix store')
419 process = subprocess.run(
420 ['nix-store', '--add', output_filename], capture_output=True)
421 v.result(process.returncode == 0)
422 store_tarball = process.stdout.decode().strip()
423
424 os.makedirs(os.path.dirname(cache_file), exist_ok=True)
425 open(cache_file, 'w').write(store_tarball)
426 return store_tarball
427
428
429 def check_channel_metadata(
430 v: Verification,
431 channel: Channel,
432 channel_contents: str) -> None:
433 v.status('Verifying git commit in channel tarball')
434 v.result(
435 open(
436 os.path.join(
437 channel_contents,
438 channel.release_name,
439 '.git-revision')).read(999) == channel.git_revision)
440
441 v.status(
442 'Verifying version-suffix is a suffix of release name %s:' %
443 channel.release_name)
444 version_suffix = open(
445 os.path.join(
446 channel_contents,
447 channel.release_name,
448 '.version-suffix')).read(999)
449 v.status(version_suffix)
450 v.result(channel.release_name.endswith(version_suffix))
451
452
453 def check_channel_contents(v: Verification, channel: Channel) -> None:
454 with tempfile.TemporaryDirectory() as channel_contents, \
455 tempfile.TemporaryDirectory() as git_contents:
456
457 extract_tarball(v, channel, channel_contents)
458 check_channel_metadata(v, channel, channel_contents)
459
460 git_checkout(v, channel, git_contents)
461
462 compare_tarball_and_git(v, channel, channel_contents, git_contents)
463
464 v.status('Removing temporary directories')
465 v.ok()
466
467
468 def pin_channel(v: Verification, channel: Channel) -> None:
469 fetch(v, channel)
470 parse_channel(v, channel)
471 fetch_resources(v, channel)
472 ensure_git_rev_available(v, channel)
473 check_channel_contents(v, channel)
474
475
476 def git_revision_name(v: Verification, channel: Channel) -> str:
477 v.status('Getting commit date')
478 process = subprocess.run(['git',
479 '-C',
480 git_cachedir(channel.git_repo),
481 'lo',
482 '-n1',
483 '--format=%ct-%h',
484 '--abbrev=11',
485 channel.git_revision],
486 capture_output=True)
487 v.result(process.returncode == 0 and process.stdout != b'')
488 return '%s-%s' % (os.path.basename(channel.git_repo),
489 process.stdout.decode().strip())
490
491
492 def read_config(filename: str) -> configparser.ConfigParser:
493 config = configparser.ConfigParser()
494 config.read_file(open(filename), filename)
495 return config
496
497
498 def pin(args: argparse.Namespace) -> None:
499 v = Verification()
500 config = read_config(args.channels_file)
501 for section in config.sections():
502 if args.channels and section not in args.channels:
503 continue
504
505 channel = Channel(**dict(config[section].items()))
506
507 if hasattr(channel, 'alias_of'):
508 assert not hasattr(channel, 'git_repo')
509 continue
510
511 if hasattr(channel, 'git_revision'):
512 channel.old_git_revision = channel.git_revision
513 del channel.git_revision
514
515 if 'channel_url' in config[section]:
516 pin_channel(v, channel)
517 config[section]['release_name'] = channel.release_name
518 config[section]['tarball_url'] = channel.table['nixexprs.tar.xz'].absolute_url
519 config[section]['tarball_sha256'] = channel.table['nixexprs.tar.xz'].digest
520 else:
521 git_fetch(v, channel)
522 config[section]['release_name'] = git_revision_name(v, channel)
523 config[section]['git_revision'] = channel.git_revision
524
525 with open(args.channels_file, 'w') as configfile:
526 config.write(configfile)
527
528
529 def fetch_channel(
530 v: Verification,
531 section: str,
532 conf: configparser.SectionProxy) -> str:
533 if 'git_repo' not in conf or 'release_name' not in conf:
534 raise Exception(
535 'Cannot update unpinned channel "%s" (Run "pin" before "update")' %
536 section)
537
538 if 'channel_url' in conf:
539 return fetch_with_nix_prefetch_url(
540 v, conf['tarball_url'], Digest16(
541 conf['tarball_sha256']))
542
543 channel = Channel(**dict(conf.items()))
544 ensure_git_rev_available(v, channel)
545 return git_get_tarball(v, channel)
546
547
548 def update(args: argparse.Namespace) -> None:
549 v = Verification()
550 config = configparser.ConfigParser()
551 exprs: Dict[str, str] = {}
552 configs = [read_config(filename) for filename in args.channels_file]
553 for config in configs:
554 for section in config.sections():
555 if 'alias_of' in config[section]:
556 assert 'git_repo' not in config[section]
557 continue
558 tarball = fetch_channel(v, section, config[section])
559 if section in exprs:
560 raise Exception('Duplicate channel "%s"' % section)
561 exprs[section] = (
562 'f: f { name = "%s"; channelName = "%%s"; src = builtins.storePath "%s"; }' %
563 (config[section]['release_name'], tarball))
564
565 for config in configs:
566 for section in config.sections():
567 if 'alias_of' in config[section]:
568 if section in exprs:
569 raise Exception('Duplicate channel "%s"' % section)
570 exprs[section] = exprs[str(config[section]['alias_of'])]
571
572 command = [
573 'nix-env',
574 '--profile',
575 '/nix/var/nix/profiles/per-user/%s/channels' %
576 getpass.getuser(),
577 '--show-trace',
578 '--file',
579 '<nix/unpack-channel.nix>',
580 '--install',
581 '--from-expression'] + [exprs[name] % name for name in sorted(exprs.keys())]
582 if args.dry_run:
583 print(' '.join(map(shlex.quote, command)))
584 else:
585 v.status('Installing channels with nix-env')
586 process = subprocess.run(command)
587 v.result(process.returncode == 0)
588
589
590 def main() -> None:
591 parser = argparse.ArgumentParser(prog='pinch')
592 subparsers = parser.add_subparsers(dest='mode', required=True)
593 parser_pin = subparsers.add_parser('pin')
594 parser_pin.add_argument('channels_file', type=str)
595 parser_pin.add_argument('channels', type=str, nargs='*')
596 parser_pin.set_defaults(func=pin)
597 parser_update = subparsers.add_parser('update')
598 parser_update.add_argument('--dry-run', action='store_true')
599 parser_update.add_argument('channels_file', type=str, nargs='+')
600 parser_update.set_defaults(func=update)
601 args = parser.parse_args()
602 args.func(args)
603
604
605 main()