]> git.scottworley.com Git - pinch/blame_incremental - pinch.py
README is markdown
[pinch] / pinch.py
... / ...
CommitLineData
1import argparse
2import configparser
3import filecmp
4import functools
5import getpass
6import hashlib
7import operator
8import os
9import os.path
10import shlex
11import shutil
12import subprocess
13import sys
14import tarfile
15import tempfile
16import types
17import urllib.parse
18import urllib.request
19import xml.dom.minidom
20
21from typing import (
22 Callable,
23 Dict,
24 Iterable,
25 List,
26 Mapping,
27 NamedTuple,
28 NewType,
29 Optional,
30 Set,
31 Tuple,
32 Type,
33 TypeVar,
34 Union,
35)
36
37import git_cache
38
39# Use xdg module when it's less painful to have as a dependency
40
41
42class XDG(NamedTuple):
43 XDG_CACHE_HOME: str
44
45
46xdg = XDG(
47 XDG_CACHE_HOME=os.getenv(
48 'XDG_CACHE_HOME',
49 os.path.expanduser('~/.cache')))
50
51
52class VerificationError(Exception):
53 pass
54
55
56class Verification:
57
58 def __init__(self) -> None:
59 self.line_length = 0
60
61 def status(self, s: str) -> None:
62 print(s, end=' ', file=sys.stderr, flush=True)
63 self.line_length += 1 + len(s) # Unicode??
64
65 @staticmethod
66 def _color(s: str, c: int) -> str:
67 return f'\033[{c:2d}m{s}\033[00m'
68
69 def result(self, r: bool) -> None:
70 message, color = {True: ('OK ', 92), False: ('FAIL', 91)}[r]
71 length = len(message)
72 cols = shutil.get_terminal_size().columns or 80
73 pad = (cols - (self.line_length + length)) % cols
74 print(' ' * pad + self._color(message, color), file=sys.stderr)
75 self.line_length = 0
76 if not r:
77 raise VerificationError()
78
79 def check(self, s: str, r: bool) -> None:
80 self.status(s)
81 self.result(r)
82
83 def ok(self) -> None:
84 self.result(True)
85
86
87Digest16 = NewType('Digest16', str)
88Digest32 = NewType('Digest32', str)
89
90
91class ChannelTableEntry(types.SimpleNamespace):
92 absolute_url: str
93 digest: Digest16
94 file: str
95 size: int
96 url: str
97
98
99class AliasPin(NamedTuple):
100 pass
101
102
103class SymlinkPin(NamedTuple):
104 @property
105 def release_name(self) -> str:
106 return 'link'
107
108
109class GitPin(NamedTuple):
110 git_revision: str
111 release_name: str
112
113
114class ChannelPin(NamedTuple):
115 git_revision: str
116 release_name: str
117 tarball_url: str
118 tarball_sha256: str
119
120
121Pin = Union[AliasPin, SymlinkPin, GitPin, ChannelPin]
122
123
124def copy_to_nix_store(v: Verification, filename: str) -> str:
125 v.status('Putting tarball in Nix store')
126 process = subprocess.run(
127 ['nix-store', '--add', filename], stdout=subprocess.PIPE)
128 v.result(process.returncode == 0)
129 return process.stdout.decode().strip() # type: ignore # (for old mypy)
130
131
132def symlink_archive(v: Verification, path: str) -> str:
133 with tempfile.TemporaryDirectory() as td:
134 archive_filename = os.path.join(td, 'link.tar.gz')
135 os.symlink(path, os.path.join(td, 'link'))
136 with tarfile.open(archive_filename, mode='x:gz') as t:
137 t.add(os.path.join(td, 'link'), arcname='link')
138 return copy_to_nix_store(v, archive_filename)
139
140
141class AliasSearchPath(NamedTuple):
142 alias_of: str
143
144 # pylint: disable=no-self-use
145 def pin(self, _: Verification, __: Optional[Pin]) -> AliasPin:
146 return AliasPin()
147
148
149class SymlinkSearchPath(NamedTuple):
150 path: str
151
152 # pylint: disable=no-self-use
153 def pin(self, _: Verification, __: Optional[Pin]) -> SymlinkPin:
154 return SymlinkPin()
155
156 def fetch(self, v: Verification, _: Pin) -> str:
157 return symlink_archive(v, self.path)
158
159
160class GitSearchPath(NamedTuple):
161 git_ref: str
162 git_repo: str
163
164 def pin(self, v: Verification, old_pin: Optional[Pin]) -> GitPin:
165 _, new_revision = git_cache.fetch(self.git_repo, self.git_ref)
166 if old_pin is not None:
167 assert isinstance(old_pin, GitPin)
168 verify_git_ancestry(v, self, old_pin.git_revision, new_revision)
169 return GitPin(release_name=git_revision_name(v, self, new_revision),
170 git_revision=new_revision)
171
172 def fetch(self, v: Verification, pin: Pin) -> str:
173 assert isinstance(pin, GitPin)
174 git_cache.ensure_rev_available(
175 self.git_repo, self.git_ref, pin.git_revision)
176 return git_get_tarball(v, self, pin)
177
178
179class ChannelSearchPath(NamedTuple):
180 channel_url: str
181 git_ref: str
182 git_repo: str
183
184 def pin(self, v: Verification, old_pin: Optional[Pin]) -> ChannelPin:
185 if old_pin is not None:
186 assert isinstance(old_pin, ChannelPin)
187
188 channel_html, forwarded_url = fetch_channel(v, self)
189 table, new_gitpin = parse_channel(v, channel_html)
190 if old_pin is not None and old_pin.git_revision == new_gitpin.git_revision:
191 return old_pin
192 fetch_resources(v, new_gitpin, forwarded_url, table)
193 git_cache.ensure_rev_available(
194 self.git_repo, self.git_ref, new_gitpin.git_revision)
195 if old_pin is not None:
196 verify_git_ancestry(
197 v, self, old_pin.git_revision, new_gitpin.git_revision)
198 check_channel_contents(v, self, table, new_gitpin)
199 return ChannelPin(
200 release_name=new_gitpin.release_name,
201 tarball_url=table['nixexprs.tar.xz'].absolute_url,
202 tarball_sha256=table['nixexprs.tar.xz'].digest,
203 git_revision=new_gitpin.git_revision)
204
205 # pylint: disable=no-self-use
206 def fetch(self, v: Verification, pin: Pin) -> str:
207 assert isinstance(pin, ChannelPin)
208
209 return fetch_with_nix_prefetch_url(
210 v, pin.tarball_url, Digest16(pin.tarball_sha256))
211
212
213SearchPath = Union[AliasSearchPath,
214 SymlinkSearchPath,
215 GitSearchPath,
216 ChannelSearchPath]
217TarrableSearchPath = Union[GitSearchPath, ChannelSearchPath]
218
219
220def compare(a: str, b: str) -> Tuple[List[str], List[str], List[str]]:
221
222 def throw(error: OSError) -> None:
223 raise error
224
225 def join(x: str, y: str) -> str:
226 return y if x == '.' else os.path.join(x, y)
227
228 def recursive_files(d: str) -> Iterable[str]:
229 all_files: List[str] = []
230 for path, dirs, files in os.walk(d, onerror=throw):
231 rel = os.path.relpath(path, start=d)
232 all_files.extend(join(rel, f) for f in files)
233 for dir_or_link in dirs:
234 if os.path.islink(join(path, dir_or_link)):
235 all_files.append(join(rel, dir_or_link))
236 return all_files
237
238 def exclude_dot_git(files: Iterable[str]) -> Iterable[str]:
239 return (f for f in files if not f.startswith('.git/'))
240
241 files = functools.reduce(
242 operator.or_, (set(
243 exclude_dot_git(
244 recursive_files(x))) for x in [a, b]))
245 return filecmp.cmpfiles(a, b, files, shallow=False)
246
247
248def fetch_channel(
249 v: Verification, channel: ChannelSearchPath) -> Tuple[str, str]:
250 v.status(f'Fetching channel from {channel.channel_url}')
251 with urllib.request.urlopen(channel.channel_url, timeout=10) as request:
252 channel_html = request.read().decode()
253 forwarded_url = request.geturl()
254 v.result(request.status == 200)
255 v.check('Got forwarded', channel.channel_url != forwarded_url)
256 return channel_html, forwarded_url
257
258
259def parse_channel(v: Verification, channel_html: str) \
260 -> Tuple[Dict[str, ChannelTableEntry], GitPin]:
261 v.status('Parsing channel description as XML')
262 d = xml.dom.minidom.parseString(channel_html)
263 v.ok()
264
265 v.status('Extracting release name:')
266 title_name = d.getElementsByTagName(
267 'title')[0].firstChild.nodeValue.split()[2]
268 h1_name = d.getElementsByTagName('h1')[0].firstChild.nodeValue.split()[2]
269 v.status(title_name)
270 v.result(title_name == h1_name)
271
272 v.status('Extracting git commit:')
273 git_commit_node = d.getElementsByTagName('tt')[0]
274 git_revision = git_commit_node.firstChild.nodeValue
275 v.status(git_revision)
276 v.ok()
277 v.status('Verifying git commit label')
278 v.result(git_commit_node.previousSibling.nodeValue == 'Git commit ')
279
280 v.status('Parsing table')
281 table: Dict[str, ChannelTableEntry] = {}
282 for row in d.getElementsByTagName('tr')[1:]:
283 name = row.childNodes[0].firstChild.firstChild.nodeValue
284 url = row.childNodes[0].firstChild.getAttribute('href')
285 size = int(row.childNodes[1].firstChild.nodeValue)
286 digest = Digest16(row.childNodes[2].firstChild.firstChild.nodeValue)
287 table[name] = ChannelTableEntry(url=url, digest=digest, size=size)
288 v.ok()
289 return table, GitPin(release_name=title_name, git_revision=git_revision)
290
291
292def digest_string(s: bytes) -> Digest16:
293 return Digest16(hashlib.sha256(s).hexdigest())
294
295
296def digest_file(filename: str) -> Digest16:
297 hasher = hashlib.sha256()
298 with open(filename, 'rb') as f:
299 # pylint: disable=cell-var-from-loop
300 for block in iter(lambda: f.read(4096), b''):
301 hasher.update(block)
302 return Digest16(hasher.hexdigest())
303
304
305@functools.lru_cache
306def _experimental_flag_needed(v: Verification) -> bool:
307 v.status('Checking Nix version')
308 process = subprocess.run(['nix', '--help'], stdout=subprocess.PIPE)
309 v.result(process.returncode == 0)
310 return b'--experimental-features' in process.stdout
311
312
313def _nix_command(v: Verification) -> List[str]:
314 return ['nix', '--experimental-features',
315 'nix-command'] if _experimental_flag_needed(v) else ['nix']
316
317
318def to_Digest16(v: Verification, digest32: Digest32) -> Digest16:
319 v.status('Converting digest to base16')
320 process = subprocess.run(_nix_command(v) + [
321 'to-base16',
322 '--type',
323 'sha256',
324 digest32],
325 stdout=subprocess.PIPE)
326 v.result(process.returncode == 0)
327 return Digest16(process.stdout.decode().strip())
328
329
330def to_Digest32(v: Verification, digest16: Digest16) -> Digest32:
331 v.status('Converting digest to base32')
332 process = subprocess.run(_nix_command(v) + [
333 'to-base32',
334 '--type',
335 'sha256',
336 digest16],
337 stdout=subprocess.PIPE)
338 v.result(process.returncode == 0)
339 return Digest32(process.stdout.decode().strip())
340
341
342def fetch_with_nix_prefetch_url(
343 v: Verification,
344 url: str,
345 digest: Digest16) -> str:
346 v.status(f'Fetching {url}')
347 process = subprocess.run(
348 ['nix-prefetch-url', '--print-path', url, digest], stdout=subprocess.PIPE)
349 v.result(process.returncode == 0)
350 prefetch_digest, path, empty = process.stdout.decode().split('\n')
351 assert empty == ''
352 v.check("Verifying nix-prefetch-url's digest",
353 to_Digest16(v, Digest32(prefetch_digest)) == digest)
354 v.status(f"Verifying digest of {path}")
355 file_digest = digest_file(path)
356 v.result(file_digest == digest)
357 return path # type: ignore # (for old mypy)
358
359
360def fetch_resources(
361 v: Verification,
362 pin: GitPin,
363 forwarded_url: str,
364 table: Dict[str, ChannelTableEntry]) -> None:
365 for resource in ['git-revision', 'nixexprs.tar.xz']:
366 fields = table[resource]
367 fields.absolute_url = urllib.parse.urljoin(forwarded_url, fields.url)
368 fields.file = fetch_with_nix_prefetch_url(
369 v, fields.absolute_url, fields.digest)
370 v.status('Verifying git commit on main page matches git commit in table')
371 with open(table['git-revision'].file, encoding='utf-8') as rev_file:
372 v.result(rev_file.read(999) == pin.git_revision)
373
374
375def tarball_cache_file(channel: TarrableSearchPath, pin: GitPin) -> str:
376 return os.path.join(
377 xdg.XDG_CACHE_HOME,
378 'pinch/git-tarball',
379 f'{digest_string(channel.git_repo.encode())}-{pin.git_revision}-{pin.release_name}')
380
381
382def verify_git_ancestry(
383 v: Verification,
384 channel: TarrableSearchPath,
385 old_revision: str,
386 new_revision: str) -> None:
387 cachedir = git_cache.git_cachedir(channel.git_repo)
388 v.status(f'Verifying rev is an ancestor of previous rev {old_revision}')
389 process = subprocess.run(['git',
390 '-C',
391 cachedir,
392 'merge-base',
393 '--is-ancestor',
394 old_revision,
395 new_revision])
396 v.result(process.returncode == 0)
397
398
399def compare_tarball_and_git(
400 v: Verification,
401 pin: GitPin,
402 channel_contents: str,
403 git_contents: str) -> None:
404 v.status('Comparing channel tarball with git checkout')
405 match, mismatch, errors = compare(os.path.join(
406 channel_contents, pin.release_name), git_contents)
407 v.ok()
408 v.check(f'{len(match)} files match', len(match) > 0)
409 v.check(f'{len(mismatch)} files differ', len(mismatch) == 0)
410 expected_errors = [
411 '.git-revision',
412 '.version-suffix',
413 'nixpkgs',
414 'programs.sqlite',
415 'svn-revision']
416 benign_errors = []
417 for ee in expected_errors:
418 if ee in errors:
419 errors.remove(ee)
420 benign_errors.append(ee)
421 v.check(f'{len(errors)} unexpected incomparable files', len(errors) == 0)
422 v.check(
423 f'({len(benign_errors)} of {len(expected_errors)} expected incomparable files)',
424 len(benign_errors) == len(expected_errors))
425
426
427def extract_tarball(
428 v: Verification,
429 table: Dict[str, ChannelTableEntry],
430 dest: str) -> None:
431 v.status(f"Extracting tarball {table['nixexprs.tar.xz'].file}")
432 shutil.unpack_archive(table['nixexprs.tar.xz'].file, dest)
433 v.ok()
434
435
436def git_checkout(
437 v: Verification,
438 channel: TarrableSearchPath,
439 pin: GitPin,
440 dest: str) -> None:
441 v.status('Checking out corresponding git revision')
442 with subprocess.Popen(
443 ['git', '-C', git_cache.git_cachedir(channel.git_repo), 'archive', pin.git_revision],
444 stdout=subprocess.PIPE) as git:
445 with subprocess.Popen(['tar', 'x', '-C', dest, '-f', '-'], stdin=git.stdout) as tar:
446 if git.stdout:
447 git.stdout.close()
448 tar.wait()
449 git.wait()
450 v.result(git.returncode == 0 and tar.returncode == 0)
451
452
453def git_get_tarball(
454 v: Verification,
455 channel: TarrableSearchPath,
456 pin: GitPin) -> str:
457 cache_file = tarball_cache_file(channel, pin)
458 if os.path.exists(cache_file):
459 with open(cache_file, encoding='utf-8') as f:
460 cached_tarball = f.read(9999)
461 if os.path.exists(cached_tarball):
462 return cached_tarball
463
464 with tempfile.TemporaryDirectory() as output_dir:
465 output_filename = os.path.join(
466 output_dir, pin.release_name + '.tar.xz')
467 with open(output_filename, 'w', encoding='utf-8') as output_file:
468 v.status(f'Generating tarball for git revision {pin.git_revision}')
469 with subprocess.Popen(
470 ['git', '-C', git_cache.git_cachedir(channel.git_repo),
471 'archive', f'--prefix={pin.release_name}/', pin.git_revision],
472 stdout=subprocess.PIPE) as git:
473 with subprocess.Popen(['xz'], stdin=git.stdout, stdout=output_file) as xz:
474 xz.wait()
475 git.wait()
476 v.result(git.returncode == 0 and xz.returncode == 0)
477
478 store_tarball = copy_to_nix_store(v, output_filename)
479
480 os.makedirs(os.path.dirname(cache_file), exist_ok=True)
481 with open(cache_file, 'w', encoding='utf-8') as f:
482 f.write(store_tarball)
483 return store_tarball # type: ignore # (for old mypy)
484
485
486def check_channel_metadata(
487 v: Verification,
488 pin: GitPin,
489 channel_contents: str) -> None:
490 v.status('Verifying git commit in channel tarball')
491 with open(os.path.join(channel_contents, pin.release_name, '.git-revision'),
492 encoding='utf-8') as f:
493 v.result(f.read(999) == pin.git_revision)
494
495 v.status(
496 f'Verifying version-suffix is a suffix of release name {pin.release_name}:')
497 with open(os.path.join(channel_contents, pin.release_name, '.version-suffix'),
498 encoding='utf-8') as f:
499 version_suffix = f.read(999)
500 v.status(version_suffix)
501 v.result(pin.release_name.endswith(version_suffix))
502
503
504def check_channel_contents(
505 v: Verification,
506 channel: TarrableSearchPath,
507 table: Dict[str, ChannelTableEntry],
508 pin: GitPin) -> None:
509 with tempfile.TemporaryDirectory() as channel_contents, \
510 tempfile.TemporaryDirectory() as git_contents:
511
512 extract_tarball(v, table, channel_contents)
513 check_channel_metadata(v, pin, channel_contents)
514
515 git_checkout(v, channel, pin, git_contents)
516
517 compare_tarball_and_git(v, pin, channel_contents, git_contents)
518
519 v.status('Removing temporary directories')
520 v.ok()
521
522
523def git_revision_name(
524 v: Verification,
525 channel: TarrableSearchPath,
526 git_revision: str) -> str:
527 v.status('Getting commit date')
528 process = subprocess.run(['git',
529 '-C',
530 git_cache.git_cachedir(channel.git_repo),
531 'log',
532 '-n1',
533 '--format=%ct-%h',
534 '--abbrev=11',
535 '--no-show-signature',
536 git_revision],
537 stdout=subprocess.PIPE)
538 v.result(process.returncode == 0 and process.stdout != b'')
539 return f'{os.path.basename(channel.git_repo)}-{process.stdout.decode().strip()}'
540
541
542K = TypeVar('K')
543V = TypeVar('V')
544
545
546def partition_dict(pred: Callable[[K, V], bool],
547 d: Dict[K, V]) -> Tuple[Dict[K, V], Dict[K, V]]:
548 selected: Dict[K, V] = {}
549 remaining: Dict[K, V] = {}
550 for k, v in d.items():
551 if pred(k, v):
552 selected[k] = v
553 else:
554 remaining[k] = v
555 return selected, remaining
556
557
558def filter_dict(d: Dict[K, V], fields: Set[K]
559 ) -> Tuple[Dict[K, V], Dict[K, V]]:
560 return partition_dict(lambda k, v: k in fields, d)
561
562
563def read_config_section(
564 conf: configparser.SectionProxy) -> Tuple[SearchPath, Optional[Pin]]:
565 mapping: Mapping[str, Tuple[Type[SearchPath], Type[Pin]]] = {
566 'alias': (AliasSearchPath, AliasPin),
567 'channel': (ChannelSearchPath, ChannelPin),
568 'git': (GitSearchPath, GitPin),
569 'symlink': (SymlinkSearchPath, SymlinkPin),
570 }
571 SP, P = mapping[conf['type']]
572 _, all_fields = filter_dict(dict(conf.items()), set(['type']))
573 pin_fields, remaining_fields = filter_dict(all_fields, set(P._fields))
574 # Error suppression works around https://github.com/python/mypy/issues/9007
575 pin_present = pin_fields or P._fields == ()
576 pin = P(**pin_fields) if pin_present else None # type: ignore
577 return SP(**remaining_fields), pin
578
579
580def read_pinned_config_section(
581 section: str, conf: configparser.SectionProxy) -> Tuple[SearchPath, Pin]:
582 sp, pin = read_config_section(conf)
583 if pin is None:
584 raise Exception(
585 f'Cannot update unpinned channel "{section}" (Run "pin" before "update")')
586 return sp, pin
587
588
589def read_config(filename: str) -> configparser.ConfigParser:
590 config = configparser.ConfigParser()
591 with open(filename, encoding='utf-8') as f:
592 config.read_file(f, filename)
593 return config
594
595
596def read_config_files(
597 filenames: Iterable[str]) -> Dict[str, configparser.SectionProxy]:
598 merged_config: Dict[str, configparser.SectionProxy] = {}
599 for file in filenames:
600 config = read_config(file)
601 for section in config.sections():
602 if section in merged_config:
603 raise Exception('Duplicate channel "{section}"')
604 merged_config[section] = config[section]
605 return merged_config
606
607
608def pinCommand(args: argparse.Namespace) -> None:
609 v = Verification()
610 config = read_config(args.channels_file)
611 for section in config.sections():
612 if args.channels and section not in args.channels:
613 continue
614
615 sp, old_pin = read_config_section(config[section])
616
617 config[section].update(sp.pin(v, old_pin)._asdict())
618
619 with open(args.channels_file, 'w', encoding='utf-8') as configfile:
620 config.write(configfile)
621
622
623def updateCommand(args: argparse.Namespace) -> None:
624 v = Verification()
625 exprs: Dict[str, str] = {}
626 profile_manifest = os.path.join(args.profile, "manifest.nix")
627 search_paths: List[str] = [
628 "-I", "pinch_profile=" + args.profile,
629 "-I", "pinch_profile_manifest=" + os.readlink(profile_manifest)
630 ] if os.path.exists(profile_manifest) else []
631 config = {
632 section: read_pinned_config_section(section, conf) for section,
633 conf in read_config_files(
634 args.channels_file).items()}
635 alias, nonalias = partition_dict(
636 lambda k, v: isinstance(v[0], AliasSearchPath), config)
637
638 for section, (sp, pin) in sorted(nonalias.items()):
639 assert not isinstance(sp, AliasSearchPath) # mypy can't see through
640 assert not isinstance(pin, AliasPin) # partition_dict()
641 tarball = sp.fetch(v, pin)
642 search_paths.extend(
643 ["-I", f"pinch_tarball_for_{pin.release_name}={tarball}"])
644 exprs[section] = (
645 f'f: f {{ name = "{pin.release_name}"; channelName = "%s"; '
646 f'src = builtins.storePath "{tarball}"; }}')
647
648 for section, (sp, pin) in alias.items():
649 assert isinstance(sp, AliasSearchPath) # For mypy
650 exprs[section] = exprs[sp.alias_of]
651
652 command = [
653 'nix-env',
654 '--profile',
655 args.profile,
656 '--show-trace',
657 '--file',
658 '<nix/unpack-channel.nix>',
659 '--install',
660 '--remove-all',
661 ] + search_paths + ['--from-expression'] + [
662 exprs[name] % name for name in sorted(exprs.keys())]
663 if args.dry_run:
664 print(' '.join(map(shlex.quote, command)))
665 else:
666 v.status('Installing channels with nix-env')
667 process = subprocess.run(command)
668 v.result(process.returncode == 0)
669
670
671def main() -> None:
672 parser = argparse.ArgumentParser(prog='pinch')
673 subparsers = parser.add_subparsers(dest='mode', required=True)
674 parser_pin = subparsers.add_parser('pin')
675 parser_pin.add_argument('channels_file', type=str)
676 parser_pin.add_argument('channels', type=str, nargs='*')
677 parser_pin.set_defaults(func=pinCommand)
678 parser_update = subparsers.add_parser('update')
679 parser_update.add_argument('--dry-run', action='store_true')
680 parser_update.add_argument('--profile', default=(
681 f'/nix/var/nix/profiles/per-user/{getpass.getuser()}/channels'))
682 parser_update.add_argument('channels_file', type=str, nargs='+')
683 parser_update.set_defaults(func=updateCommand)
684 args = parser.parse_args()
685 args.func(args)
686
687
688if __name__ == '__main__':
689 main()