1 from abc
import ABC
, abstractmethod
19 import xml
.dom
.minidom
34 # Use xdg module when it's less painful to have as a dependency
37 class XDG(NamedTuple
):
42 XDG_CACHE_HOME
=os
.getenv(
44 os
.path
.expanduser('~/.cache')))
47 class VerificationError(Exception):
53 def __init__(self
) -> None:
56 def status(self
, s
: str) -> None:
57 print(s
, end
=' ', file=sys
.stderr
, flush
=True)
58 self
.line_length
+= 1 + len(s
) # Unicode??
61 def _color(s
: str, c
: int) -> str:
62 return '\033[%2dm%s\033[00m' % (c
, s
)
64 def result(self
, r
: bool) -> None:
65 message
, color
= {True: ('OK ', 92), False: ('FAIL', 91)}
[r
]
67 cols
= shutil
.get_terminal_size().columns
or 80
68 pad
= (cols
- (self
.line_length
+ length
)) % cols
69 print(' ' * pad
+ self
._color
(message
, color
), file=sys
.stderr
)
72 raise VerificationError()
74 def check(self
, s
: str, r
: bool) -> None:
82 Digest16
= NewType('Digest16', str)
83 Digest32
= NewType('Digest32', str)
86 class ChannelTableEntry(types
.SimpleNamespace
):
94 class AliasPin(NamedTuple
):
98 class GitPin(NamedTuple
):
103 class ChannelPin(NamedTuple
):
110 Pin
= Union
[AliasPin
, GitPin
, ChannelPin
]
113 class SearchPath(types
.SimpleNamespace
, ABC
):
116 def pin(self
, v
: Verification
) -> Pin
:
120 class AliasSearchPath(SearchPath
):
123 def pin(self
, v
: Verification
) -> AliasPin
:
124 assert not hasattr(self
, 'git_repo')
128 # (This lint-disable is for pylint bug https://github.com/PyCQA/pylint/issues/179
129 # which is fixed in pylint 2.5.)
130 class TarrableSearchPath(SearchPath
, ABC
): # pylint: disable=abstract-method
136 table
: Dict
[str, ChannelTableEntry
]
139 class GitSearchPath(TarrableSearchPath
):
140 def pin(self
, v
: Verification
) -> GitPin
:
142 self
.git_revision
if hasattr(self
, 'git_revision') else None)
143 if hasattr(self
, 'git_revision'):
144 del self
.git_revision
146 new_revision
= git_fetch(v
, self
, None, old_revision
)
147 return GitPin(release_name
=git_revision_name(v
, self
, new_revision
),
148 git_revision
=new_revision
)
150 def fetch(self
, v
: Verification
, section
: str,
151 conf
: configparser
.SectionProxy
) -> str:
152 if 'git_revision' not in conf
or 'release_name' not in conf
:
154 'Cannot update unpinned channel "%s" (Run "pin" before "update")' %
157 release_name
=conf
['release_name'],
158 git_revision
=conf
['git_revision'])
160 ensure_git_rev_available(v
, self
, the_pin
, None)
161 return git_get_tarball(v
, self
, the_pin
)
164 class ChannelSearchPath(TarrableSearchPath
):
165 def pin(self
, v
: Verification
) -> ChannelPin
:
167 self
.git_revision
if hasattr(self
, 'git_revision') else None)
168 if hasattr(self
, 'git_revision'):
169 del self
.git_revision
172 new_gitpin
= parse_channel(v
, self
)
173 fetch_resources(v
, self
, new_gitpin
)
174 ensure_git_rev_available(v
, self
, new_gitpin
, old_revision
)
175 check_channel_contents(v
, self
, new_gitpin
)
177 release_name
=new_gitpin
.release_name
,
178 tarball_url
=self
.table
['nixexprs.tar.xz'].absolute_url
,
179 tarball_sha256
=self
.table
['nixexprs.tar.xz'].digest
,
180 git_revision
=new_gitpin
.git_revision
)
182 # Lint TODO: Put tarball_url and tarball_sha256 in ChannelSearchPath
183 # pylint: disable=no-self-use
184 def fetch(self
, v
: Verification
, section
: str,
185 conf
: configparser
.SectionProxy
) -> str:
186 if 'git_repo' not in conf
or 'release_name' not in conf
:
188 'Cannot update unpinned channel "%s" (Run "pin" before "update")' %
191 return fetch_with_nix_prefetch_url(
192 v
, conf
['tarball_url'], Digest16(
193 conf
['tarball_sha256']))
196 def compare(a
: str, b
: str) -> Tuple
[List
[str], List
[str], List
[str]]:
198 def throw(error
: OSError) -> None:
201 def join(x
: str, y
: str) -> str:
202 return y
if x
== '.' else os
.path
.join(x
, y
)
204 def recursive_files(d
: str) -> Iterable
[str]:
205 all_files
: List
[str] = []
206 for path
, dirs
, files
in os
.walk(d
, onerror
=throw
):
207 rel
= os
.path
.relpath(path
, start
=d
)
208 all_files
.extend(join(rel
, f
) for f
in files
)
209 for dir_or_link
in dirs
:
210 if os
.path
.islink(join(path
, dir_or_link
)):
211 all_files
.append(join(rel
, dir_or_link
))
214 def exclude_dot_git(files
: Iterable
[str]) -> Iterable
[str]:
215 return (f
for f
in files
if not f
.startswith('.git/'))
217 files
= functools
.reduce(
220 recursive_files(x
))) for x
in [a
, b
]))
221 return filecmp
.cmpfiles(a
, b
, files
, shallow
=False)
224 def fetch(v
: Verification
, channel
: TarrableSearchPath
) -> None:
225 v
.status('Fetching channel')
226 request
= urllib
.request
.urlopen(channel
.channel_url
, timeout
=10)
227 channel
.channel_html
= request
.read()
228 channel
.forwarded_url
= request
.geturl()
229 v
.result(request
.status
== 200) # type: ignore # (for old mypy)
230 v
.check('Got forwarded', channel
.channel_url
!= channel
.forwarded_url
)
233 def parse_channel(v
: Verification
, channel
: TarrableSearchPath
) -> GitPin
:
234 v
.status('Parsing channel description as XML')
235 d
= xml
.dom
.minidom
.parseString(channel
.channel_html
)
238 v
.status('Extracting release name:')
239 title_name
= d
.getElementsByTagName(
240 'title')[0].firstChild
.nodeValue
.split()[2]
241 h1_name
= d
.getElementsByTagName('h1')[0].firstChild
.nodeValue
.split()[2]
243 v
.result(title_name
== h1_name
)
245 v
.status('Extracting git commit:')
246 git_commit_node
= d
.getElementsByTagName('tt')[0]
247 git_revision
= git_commit_node
.firstChild
.nodeValue
248 v
.status(git_revision
)
250 v
.status('Verifying git commit label')
251 v
.result(git_commit_node
.previousSibling
.nodeValue
== 'Git commit ')
253 v
.status('Parsing table')
255 for row
in d
.getElementsByTagName('tr')[1:]:
256 name
= row
.childNodes
[0].firstChild
.firstChild
.nodeValue
257 url
= row
.childNodes
[0].firstChild
.getAttribute('href')
258 size
= int(row
.childNodes
[1].firstChild
.nodeValue
)
259 digest
= Digest16(row
.childNodes
[2].firstChild
.firstChild
.nodeValue
)
260 channel
.table
[name
] = ChannelTableEntry(
261 url
=url
, digest
=digest
, size
=size
)
263 return GitPin(release_name
=title_name
, git_revision
=git_revision
)
266 def digest_string(s
: bytes) -> Digest16
:
267 return Digest16(hashlib
.sha256(s
).hexdigest())
270 def digest_file(filename
: str) -> Digest16
:
271 hasher
= hashlib
.sha256()
272 with open(filename
, 'rb') as f
:
273 # pylint: disable=cell-var-from-loop
274 for block
in iter(lambda: f
.read(4096), b
''):
276 return Digest16(hasher
.hexdigest())
279 def to_Digest16(v
: Verification
, digest32
: Digest32
) -> Digest16
:
280 v
.status('Converting digest to base16')
281 process
= subprocess
.run(
282 ['nix', 'to-base16', '--type', 'sha256', digest32
], stdout
=subprocess
.PIPE
)
283 v
.result(process
.returncode
== 0)
284 return Digest16(process
.stdout
.decode().strip())
287 def to_Digest32(v
: Verification
, digest16
: Digest16
) -> Digest32
:
288 v
.status('Converting digest to base32')
289 process
= subprocess
.run(
290 ['nix', 'to-base32', '--type', 'sha256', digest16
], stdout
=subprocess
.PIPE
)
291 v
.result(process
.returncode
== 0)
292 return Digest32(process
.stdout
.decode().strip())
295 def fetch_with_nix_prefetch_url(
298 digest
: Digest16
) -> str:
299 v
.status('Fetching %s' % url
)
300 process
= subprocess
.run(
301 ['nix-prefetch-url', '--print-path', url
, digest
], stdout
=subprocess
.PIPE
)
302 v
.result(process
.returncode
== 0)
303 prefetch_digest
, path
, empty
= process
.stdout
.decode().split('\n')
305 v
.check("Verifying nix-prefetch-url's digest",
306 to_Digest16(v
, Digest32(prefetch_digest
)) == digest
)
307 v
.status("Verifying file digest")
308 file_digest
= digest_file(path
)
309 v
.result(file_digest
== digest
)
310 return path
# type: ignore # (for old mypy)
315 channel
: ChannelSearchPath
,
316 pin
: GitPin
) -> None:
317 for resource
in ['git-revision', 'nixexprs.tar.xz']:
318 fields
= channel
.table
[resource
]
319 fields
.absolute_url
= urllib
.parse
.urljoin(
320 channel
.forwarded_url
, fields
.url
)
321 fields
.file = fetch_with_nix_prefetch_url(
322 v
, fields
.absolute_url
, fields
.digest
)
323 v
.status('Verifying git commit on main page matches git commit in table')
326 channel
.table
['git-revision'].file).read(999) == pin
.git_revision
)
329 def git_cachedir(git_repo
: str) -> str:
333 digest_string(git_repo
.encode()))
336 def tarball_cache_file(channel
: TarrableSearchPath
, pin
: GitPin
) -> str:
341 (digest_string(channel
.git_repo
.encode()),
346 def verify_git_ancestry(
348 channel
: TarrableSearchPath
,
350 old_revision
: Optional
[str]) -> None:
351 cachedir
= git_cachedir(channel
.git_repo
)
352 v
.status('Verifying rev is an ancestor of ref')
353 process
= subprocess
.run(['git',
360 v
.result(process
.returncode
== 0)
362 if old_revision
is not None:
364 'Verifying rev is an ancestor of previous rev %s' %
366 process
= subprocess
.run(['git',
373 v
.result(process
.returncode
== 0)
378 channel
: TarrableSearchPath
,
379 desired_revision
: Optional
[str],
380 old_revision
: Optional
[str]) -> str:
381 # It would be nice if we could share the nix git cache, but as of the time
382 # of writing it is transitioning from gitv2 (deprecated) to gitv3 (not ready
383 # yet), and trying to straddle them both is too far into nix implementation
384 # details for my comfort. So we re-implement here half of nix.fetchGit.
387 cachedir
= git_cachedir(channel
.git_repo
)
388 if not os
.path
.exists(cachedir
):
389 v
.status("Initializing git repo")
390 process
= subprocess
.run(
391 ['git', 'init', '--bare', cachedir
])
392 v
.result(process
.returncode
== 0)
394 v
.status('Fetching ref "%s" from %s' % (channel
.git_ref
, channel
.git_repo
))
395 # We don't use --force here because we want to abort and freak out if forced
396 # updates are happening.
397 process
= subprocess
.run(['git',
402 '%s:%s' % (channel
.git_ref
,
404 v
.result(process
.returncode
== 0)
406 if desired_revision
is not None:
407 v
.status('Verifying that fetch retrieved this rev')
408 process
= subprocess
.run(
409 ['git', '-C', cachedir
, 'cat-file', '-e', desired_revision
])
410 v
.result(process
.returncode
== 0)
417 channel
.git_ref
)).read(999).strip()
419 verify_git_ancestry(v
, channel
, new_revision
, old_revision
)
424 def ensure_git_rev_available(
426 channel
: TarrableSearchPath
,
428 old_revision
: Optional
[str]) -> None:
429 cachedir
= git_cachedir(channel
.git_repo
)
430 if os
.path
.exists(cachedir
):
431 v
.status('Checking if we already have this rev:')
432 process
= subprocess
.run(
433 ['git', '-C', cachedir
, 'cat-file', '-e', pin
.git_revision
])
434 if process
.returncode
== 0:
436 if process
.returncode
== 1:
438 v
.result(process
.returncode
== 0 or process
.returncode
== 1)
439 if process
.returncode
== 0:
440 verify_git_ancestry(v
, channel
, pin
.git_revision
, old_revision
)
442 git_fetch(v
, channel
, pin
.git_revision
, old_revision
)
445 def compare_tarball_and_git(
448 channel_contents
: str,
449 git_contents
: str) -> None:
450 v
.status('Comparing channel tarball with git checkout')
451 match
, mismatch
, errors
= compare(os
.path
.join(
452 channel_contents
, pin
.release_name
), git_contents
)
454 v
.check('%d files match' % len(match
), len(match
) > 0)
455 v
.check('%d files differ' % len(mismatch
), len(mismatch
) == 0)
463 for ee
in expected_errors
:
466 benign_errors
.append(ee
)
468 '%d unexpected incomparable files' %
472 '(%d of %d expected incomparable files)' %
474 len(expected_errors
)),
475 len(benign_errors
) == len(expected_errors
))
480 channel
: TarrableSearchPath
,
482 v
.status('Extracting tarball %s' %
483 channel
.table
['nixexprs.tar.xz'].file)
484 shutil
.unpack_archive(
485 channel
.table
['nixexprs.tar.xz'].file,
492 channel
: TarrableSearchPath
,
495 v
.status('Checking out corresponding git revision')
496 git
= subprocess
.Popen(['git',
498 git_cachedir(channel
.git_repo
),
501 stdout
=subprocess
.PIPE
)
502 tar
= subprocess
.Popen(
503 ['tar', 'x', '-C', dest
, '-f', '-'], stdin
=git
.stdout
)
508 v
.result(git
.returncode
== 0 and tar
.returncode
== 0)
513 channel
: TarrableSearchPath
,
515 cache_file
= tarball_cache_file(channel
, pin
)
516 if os
.path
.exists(cache_file
):
517 cached_tarball
= open(cache_file
).read(9999)
518 if os
.path
.exists(cached_tarball
):
519 return cached_tarball
521 with tempfile
.TemporaryDirectory() as output_dir
:
522 output_filename
= os
.path
.join(
523 output_dir
, pin
.release_name
+ '.tar.xz')
524 with open(output_filename
, 'w') as output_file
:
526 'Generating tarball for git revision %s' %
528 git
= subprocess
.Popen(['git',
530 git_cachedir(channel
.git_repo
),
532 '--prefix=%s/' % pin
.release_name
,
534 stdout
=subprocess
.PIPE
)
535 xz
= subprocess
.Popen(['xz'], stdin
=git
.stdout
, stdout
=output_file
)
538 v
.result(git
.returncode
== 0 and xz
.returncode
== 0)
540 v
.status('Putting tarball in Nix store')
541 process
= subprocess
.run(
542 ['nix-store', '--add', output_filename
], stdout
=subprocess
.PIPE
)
543 v
.result(process
.returncode
== 0)
544 store_tarball
= process
.stdout
.decode().strip()
546 os
.makedirs(os
.path
.dirname(cache_file
), exist_ok
=True)
547 open(cache_file
, 'w').write(store_tarball
)
548 return store_tarball
# type: ignore # (for old mypy)
551 def check_channel_metadata(
554 channel_contents
: str) -> None:
555 v
.status('Verifying git commit in channel tarball')
561 '.git-revision')).read(999) == pin
.git_revision
)
564 'Verifying version-suffix is a suffix of release name %s:' %
566 version_suffix
= open(
570 '.version-suffix')).read(999)
571 v
.status(version_suffix
)
572 v
.result(pin
.release_name
.endswith(version_suffix
))
575 def check_channel_contents(
577 channel
: TarrableSearchPath
,
578 pin
: GitPin
) -> None:
579 with tempfile
.TemporaryDirectory() as channel_contents
, \
580 tempfile
.TemporaryDirectory() as git_contents
:
582 extract_tarball(v
, channel
, channel_contents
)
583 check_channel_metadata(v
, pin
, channel_contents
)
585 git_checkout(v
, channel
, pin
, git_contents
)
587 compare_tarball_and_git(v
, pin
, channel_contents
, git_contents
)
589 v
.status('Removing temporary directories')
593 def git_revision_name(
595 channel
: TarrableSearchPath
,
596 git_revision
: str) -> str:
597 v
.status('Getting commit date')
598 process
= subprocess
.run(['git',
600 git_cachedir(channel
.git_repo
),
605 '--no-show-signature',
607 stdout
=subprocess
.PIPE
)
608 v
.result(process
.returncode
== 0 and process
.stdout
!= b
'')
609 return '%s-%s' % (os
.path
.basename(channel
.git_repo
),
610 process
.stdout
.decode().strip())
613 def read_search_path(conf
: configparser
.SectionProxy
) -> SearchPath
:
614 mapping
: Mapping
[str, Type
[SearchPath
]] = {
615 'alias': AliasSearchPath
,
616 'channel': ChannelSearchPath
,
617 'git': GitSearchPath
,
619 return mapping
[conf
['type']](**dict(conf
.items()))
622 def read_config(filename
: str) -> configparser
.ConfigParser
:
623 config
= configparser
.ConfigParser()
624 config
.read_file(open(filename
), filename
)
628 def read_config_files(
629 filenames
: Iterable
[str]) -> Dict
[str, configparser
.SectionProxy
]:
630 merged_config
: Dict
[str, configparser
.SectionProxy
] = {}
631 for file in filenames
:
632 config
= read_config(file)
633 for section
in config
.sections():
634 if section
in merged_config
:
635 raise Exception('Duplicate channel "%s"' % section
)
636 merged_config
[section
] = config
[section
]
640 def pinCommand(args
: argparse
.Namespace
) -> None:
642 config
= read_config(args
.channels_file
)
643 for section
in config
.sections():
644 if args
.channels
and section
not in args
.channels
:
647 sp
= read_search_path(config
[section
])
649 config
[section
].update(sp
.pin(v
)._asdict
())
651 with open(args
.channels_file
, 'w') as configfile
:
652 config
.write(configfile
)
655 def updateCommand(args
: argparse
.Namespace
) -> None:
657 exprs
: Dict
[str, str] = {}
658 config
= read_config_files(args
.channels_file
)
659 for section
in config
:
660 sp
= read_search_path(config
[section
])
661 if isinstance(sp
, AliasSearchPath
):
662 assert 'git_repo' not in config
[section
]
664 tarball
= sp
.fetch(v
, section
, config
[section
])
666 'f: f { name = "%s"; channelName = "%%s"; src = builtins.storePath "%s"; }' %
667 (config
[section
]['release_name'], tarball
))
669 for section
in config
:
670 if 'alias_of' in config
[section
]:
671 exprs
[section
] = exprs
[str(config
[section
]['alias_of'])]
676 '/nix/var/nix/profiles/per-user/%s/channels' %
680 '<nix/unpack-channel.nix>',
682 '--from-expression'] + [exprs
[name
] % name
for name
in sorted(exprs
.keys())]
684 print(' '.join(map(shlex
.quote
, command
)))
686 v
.status('Installing channels with nix-env')
687 process
= subprocess
.run(command
)
688 v
.result(process
.returncode
== 0)
692 parser
= argparse
.ArgumentParser(prog
='pinch')
693 subparsers
= parser
.add_subparsers(dest
='mode', required
=True)
694 parser_pin
= subparsers
.add_parser('pin')
695 parser_pin
.add_argument('channels_file', type=str)
696 parser_pin
.add_argument('channels', type=str, nargs
='*')
697 parser_pin
.set_defaults(func
=pinCommand
)
698 parser_update
= subparsers
.add_parser('update')
699 parser_update
.add_argument('--dry-run', action
='store_true')
700 parser_update
.add_argument('channels_file', type=str, nargs
='+')
701 parser_update
.set_defaults(func
=updateCommand
)
702 args
= parser
.parse_args()
706 if __name__
== '__main__':