18 import xml
.dom
.minidom
28 Digest16
= NewType('Digest16', str)
29 Digest32
= NewType('Digest32', str)
32 class ChannelTableEntry(types
.SimpleNamespace
):
40 class Channel(types
.SimpleNamespace
):
49 table
: Dict
[str, ChannelTableEntry
]
52 class VerificationError(Exception):
58 def __init__(self
) -> None:
61 def status(self
, s
: str) -> None:
62 print(s
, end
=' ', file=sys
.stderr
, flush
=True)
63 self
.line_length
+= 1 + len(s
) # Unicode??
66 def _color(s
: str, c
: int) -> str:
67 return '\033[%2dm%s\033[00m' % (c
, s
)
69 def result(self
, r
: bool) -> None:
70 message
, color
= {True: ('OK ', 92), False: ('FAIL', 91)}
[r
]
72 cols
= shutil
.get_terminal_size().columns
73 pad
= (cols
- (self
.line_length
+ length
)) % cols
74 print(' ' * pad
+ self
._color
(message
, color
), file=sys
.stderr
)
77 raise VerificationError()
79 def check(self
, s
: str, r
: bool) -> None:
87 def compare(a
: str, b
: str) -> Tuple
[List
[str], List
[str], List
[str]]:
89 def throw(error
: OSError) -> None:
92 def join(x
: str, y
: str) -> str:
93 return y
if x
== '.' else os
.path
.join(x
, y
)
95 def recursive_files(d
: str) -> Iterable
[str]:
96 all_files
: List
[str] = []
97 for path
, dirs
, files
in os
.walk(d
, onerror
=throw
):
98 rel
= os
.path
.relpath(path
, start
=d
)
99 all_files
.extend(join(rel
, f
) for f
in files
)
100 for dir_or_link
in dirs
:
101 if os
.path
.islink(join(path
, dir_or_link
)):
102 all_files
.append(join(rel
, dir_or_link
))
105 def exclude_dot_git(files
: Iterable
[str]) -> Iterable
[str]:
106 return (f
for f
in files
if not f
.startswith('.git/'))
108 files
= functools
.reduce(
111 recursive_files(x
))) for x
in [a
, b
]))
112 return filecmp
.cmpfiles(a
, b
, files
, shallow
=False)
115 def fetch(v
: Verification
, channel
: Channel
) -> None:
116 v
.status('Fetching channel')
117 request
= urllib
.request
.urlopen(channel
.channel_url
, timeout
=10)
118 channel
.channel_html
= request
.read()
119 channel
.forwarded_url
= request
.geturl()
120 v
.result(request
.status
== 200)
121 v
.check('Got forwarded', channel
.channel_url
!= channel
.forwarded_url
)
124 def parse_channel(v
: Verification
, channel
: Channel
) -> None:
125 v
.status('Parsing channel description as XML')
126 d
= xml
.dom
.minidom
.parseString(channel
.channel_html
)
129 v
.status('Extracting release name:')
130 title_name
= d
.getElementsByTagName(
131 'title')[0].firstChild
.nodeValue
.split()[2]
132 h1_name
= d
.getElementsByTagName('h1')[0].firstChild
.nodeValue
.split()[2]
134 v
.result(title_name
== h1_name
)
135 channel
.release_name
= title_name
137 v
.status('Extracting git commit:')
138 git_commit_node
= d
.getElementsByTagName('tt')[0]
139 channel
.git_revision
= git_commit_node
.firstChild
.nodeValue
140 v
.status(channel
.git_revision
)
142 v
.status('Verifying git commit label')
143 v
.result(git_commit_node
.previousSibling
.nodeValue
== 'Git commit ')
145 v
.status('Parsing table')
147 for row
in d
.getElementsByTagName('tr')[1:]:
148 name
= row
.childNodes
[0].firstChild
.firstChild
.nodeValue
149 url
= row
.childNodes
[0].firstChild
.getAttribute('href')
150 size
= int(row
.childNodes
[1].firstChild
.nodeValue
)
151 digest
= Digest16(row
.childNodes
[2].firstChild
.firstChild
.nodeValue
)
152 channel
.table
[name
] = ChannelTableEntry(
153 url
=url
, digest
=digest
, size
=size
)
157 def digest_string(s
: bytes) -> Digest16
:
158 return Digest16(hashlib
.sha256(s
).hexdigest())
161 def digest_file(filename
: str) -> Digest16
:
162 hasher
= hashlib
.sha256()
163 with open(filename
, 'rb') as f
:
164 # pylint: disable=cell-var-from-loop
165 for block
in iter(lambda: f
.read(4096), b
''):
167 return Digest16(hasher
.hexdigest())
170 def to_Digest16(v
: Verification
, digest32
: Digest32
) -> Digest16
:
171 v
.status('Converting digest to base16')
172 process
= subprocess
.run(
173 ['nix', 'to-base16', '--type', 'sha256', digest32
], capture_output
=True)
174 v
.result(process
.returncode
== 0)
175 return Digest16(process
.stdout
.decode().strip())
178 def to_Digest32(v
: Verification
, digest16
: Digest16
) -> Digest32
:
179 v
.status('Converting digest to base32')
180 process
= subprocess
.run(
181 ['nix', 'to-base32', '--type', 'sha256', digest16
], capture_output
=True)
182 v
.result(process
.returncode
== 0)
183 return Digest32(process
.stdout
.decode().strip())
186 def fetch_with_nix_prefetch_url(
189 digest
: Digest16
) -> str:
190 v
.status('Fetching %s' % url
)
191 process
= subprocess
.run(
192 ['nix-prefetch-url', '--print-path', url
, digest
], capture_output
=True)
193 v
.result(process
.returncode
== 0)
194 prefetch_digest
, path
, empty
= process
.stdout
.decode().split('\n')
196 v
.check("Verifying nix-prefetch-url's digest",
197 to_Digest16(v
, Digest32(prefetch_digest
)) == digest
)
198 v
.status("Verifying file digest")
199 file_digest
= digest_file(path
)
200 v
.result(file_digest
== digest
)
204 def fetch_resources(v
: Verification
, channel
: Channel
) -> None:
205 for resource
in ['git-revision', 'nixexprs.tar.xz']:
206 fields
= channel
.table
[resource
]
207 fields
.absolute_url
= urllib
.parse
.urljoin(
208 channel
.forwarded_url
, fields
.url
)
209 fields
.file = fetch_with_nix_prefetch_url(
210 v
, fields
.absolute_url
, fields
.digest
)
211 v
.status('Verifying git commit on main page matches git commit in table')
214 channel
.table
['git-revision'].file).read(999) == channel
.git_revision
)
217 def git_cachedir(git_repo
: str) -> str:
218 # TODO: Consider using pyxdg to find this path.
219 return os
.path
.expanduser(
220 '~/.cache/pinch/git/%s' %
225 def verify_git_ancestry(v
: Verification
, channel
: Channel
) -> None:
226 cachedir
= git_cachedir(channel
.git_repo
)
227 v
.status('Verifying rev is an ancestor of ref')
228 process
= subprocess
.run(['git',
233 channel
.git_revision
,
235 v
.result(process
.returncode
== 0)
237 if hasattr(channel
, 'old_git_revision'):
239 'Verifying rev is an ancestor of previous rev %s' %
240 channel
.old_git_revision
)
241 process
= subprocess
.run(['git',
246 channel
.old_git_revision
,
247 channel
.git_revision
])
248 v
.result(process
.returncode
== 0)
251 def git_fetch(v
: Verification
, channel
: Channel
) -> None:
252 # It would be nice if we could share the nix git cache, but as of the time
253 # of writing it is transitioning from gitv2 (deprecated) to gitv3 (not ready
254 # yet), and trying to straddle them both is too far into nix implementation
255 # details for my comfort. So we re-implement here half of nix.fetchGit.
258 cachedir
= git_cachedir(channel
.git_repo
)
259 if not os
.path
.exists(cachedir
):
260 v
.status("Initializing git repo")
261 process
= subprocess
.run(
262 ['git', 'init', '--bare', cachedir
])
263 v
.result(process
.returncode
== 0)
265 v
.status('Fetching ref "%s" from %s' % (channel
.git_ref
, channel
.git_repo
))
266 # We don't use --force here because we want to abort and freak out if forced
267 # updates are happening.
268 process
= subprocess
.run(['git',
273 '%s:%s' % (channel
.git_ref
,
275 v
.result(process
.returncode
== 0)
277 if hasattr(channel
, 'git_revision'):
278 v
.status('Verifying that fetch retrieved this rev')
279 process
= subprocess
.run(
280 ['git', '-C', cachedir
, 'cat-file', '-e', channel
.git_revision
])
281 v
.result(process
.returncode
== 0)
283 channel
.git_revision
= open(
288 channel
.git_ref
)).read(999).strip()
290 verify_git_ancestry(v
, channel
)
293 def ensure_git_rev_available(v
: Verification
, channel
: Channel
) -> None:
294 cachedir
= git_cachedir(channel
.git_repo
)
295 if os
.path
.exists(cachedir
):
296 v
.status('Checking if we already have this rev:')
297 process
= subprocess
.run(
298 ['git', '-C', cachedir
, 'cat-file', '-e', channel
.git_revision
])
299 if process
.returncode
== 0:
301 if process
.returncode
== 1:
303 v
.result(process
.returncode
== 0 or process
.returncode
== 1)
304 if process
.returncode
== 0:
305 verify_git_ancestry(v
, channel
)
307 git_fetch(v
, channel
)
310 def compare_tarball_and_git(
313 channel_contents
: str,
314 git_contents
: str) -> None:
315 v
.status('Comparing channel tarball with git checkout')
316 match
, mismatch
, errors
= compare(os
.path
.join(
317 channel_contents
, channel
.release_name
), git_contents
)
319 v
.check('%d files match' % len(match
), len(match
) > 0)
320 v
.check('%d files differ' % len(mismatch
), len(mismatch
) == 0)
328 for ee
in expected_errors
:
331 benign_errors
.append(ee
)
333 '%d unexpected incomparable files' %
337 '(%d of %d expected incomparable files)' %
339 len(expected_errors
)),
340 len(benign_errors
) == len(expected_errors
))
343 def extract_tarball(v
: Verification
, channel
: Channel
, dest
: str) -> None:
344 v
.status('Extracting tarball %s' %
345 channel
.table
['nixexprs.tar.xz'].file)
346 shutil
.unpack_archive(
347 channel
.table
['nixexprs.tar.xz'].file,
352 def git_checkout(v
: Verification
, channel
: Channel
, dest
: str) -> None:
353 v
.status('Checking out corresponding git revision')
354 git
= subprocess
.Popen(['git',
356 git_cachedir(channel
.git_repo
),
358 channel
.git_revision
],
359 stdout
=subprocess
.PIPE
)
360 tar
= subprocess
.Popen(
361 ['tar', 'x', '-C', dest
, '-f', '-'], stdin
=git
.stdout
)
365 v
.result(git
.returncode
== 0 and tar
.returncode
== 0)
368 def git_get_tarball(v
: Verification
, channel
: Channel
) -> str:
369 with tempfile
.TemporaryDirectory() as output_dir
:
370 output_filename
= os
.path
.join(
371 output_dir
, channel
.release_name
+ '.tar.xz')
372 with open(output_filename
, 'w') as output_file
:
374 'Generating tarball for git revision %s' %
375 channel
.git_revision
)
376 git
= subprocess
.Popen(['git',
378 git_cachedir(channel
.git_repo
),
380 '--prefix=%s/' % channel
.release_name
,
381 channel
.git_revision
],
382 stdout
=subprocess
.PIPE
)
383 xz
= subprocess
.Popen(['xz'], stdin
=git
.stdout
, stdout
=output_file
)
386 v
.result(git
.returncode
== 0 and xz
.returncode
== 0)
388 v
.status('Putting tarball in Nix store')
389 process
= subprocess
.run(
390 ['nix-store', '--add', output_filename
], capture_output
=True)
391 v
.result(process
.returncode
== 0)
392 return process
.stdout
.decode().strip()
395 def check_channel_metadata(
398 channel_contents
: str) -> None:
399 v
.status('Verifying git commit in channel tarball')
404 channel
.release_name
,
405 '.git-revision')).read(999) == channel
.git_revision
)
408 'Verifying version-suffix is a suffix of release name %s:' %
409 channel
.release_name
)
410 version_suffix
= open(
413 channel
.release_name
,
414 '.version-suffix')).read(999)
415 v
.status(version_suffix
)
416 v
.result(channel
.release_name
.endswith(version_suffix
))
419 def check_channel_contents(v
: Verification
, channel
: Channel
) -> None:
420 with tempfile
.TemporaryDirectory() as channel_contents
, \
421 tempfile
.TemporaryDirectory() as git_contents
:
423 extract_tarball(v
, channel
, channel_contents
)
424 check_channel_metadata(v
, channel
, channel_contents
)
426 git_checkout(v
, channel
, git_contents
)
428 compare_tarball_and_git(v
, channel
, channel_contents
, git_contents
)
430 v
.status('Removing temporary directories')
434 def pin_channel(v
: Verification
, channel
: Channel
) -> None:
436 parse_channel(v
, channel
)
437 fetch_resources(v
, channel
)
438 ensure_git_rev_available(v
, channel
)
439 check_channel_contents(v
, channel
)
442 def git_revision_name(v
: Verification
, channel
: Channel
) -> str:
443 v
.status('Getting commit date')
444 process
= subprocess
.run(['git',
446 git_cachedir(channel
.git_repo
),
451 channel
.git_revision
],
453 v
.result(process
.returncode
== 0 and process
.stdout
!= '')
454 return '%s-%s' % (os
.path
.basename(channel
.git_repo
),
455 process
.stdout
.decode().strip())
458 def pin(args
: argparse
.Namespace
) -> None:
460 config
= configparser
.ConfigParser()
461 config
.read_file(open(args
.channels_file
), args
.channels_file
)
462 for section
in config
.sections():
463 if args
.channels
and section
not in args
.channels
:
466 channel
= Channel(**dict(config
[section
].items()))
467 if hasattr(channel
, 'git_revision'):
468 channel
.old_git_revision
= channel
.git_revision
469 del channel
.git_revision
471 if 'channel_url' in config
[section
]:
472 pin_channel(v
, channel
)
473 config
[section
]['release_name'] = channel
.release_name
474 config
[section
]['tarball_url'] = channel
.table
['nixexprs.tar.xz'].absolute_url
475 config
[section
]['tarball_sha256'] = channel
.table
['nixexprs.tar.xz'].digest
477 git_fetch(v
, channel
)
478 config
[section
]['release_name'] = git_revision_name(v
, channel
)
479 config
[section
]['git_revision'] = channel
.git_revision
481 with open(args
.channels_file
, 'w') as configfile
:
482 config
.write(configfile
)
485 def update(args
: argparse
.Namespace
) -> None:
487 config
= configparser
.ConfigParser()
488 config
.read_file(open(args
.channels_file
), args
.channels_file
)
490 for section
in config
.sections():
491 if 'channel_url' in config
[section
]:
492 tarball
= fetch_with_nix_prefetch_url(
493 v
, config
[section
]['tarball_url'], Digest16(
494 config
[section
]['tarball_sha256']))
496 channel
= Channel(**dict(config
[section
].items()))
497 ensure_git_rev_available(v
, channel
)
498 tarball
= git_get_tarball(v
, channel
)
500 'f: f { name = "%s"; channelName = "%s"; src = builtins.storePath "%s"; }' %
501 (config
[section
]['release_name'], section
, tarball
))
505 '/nix/var/nix/profiles/per-user/%s/channels' %
509 '<nix/unpack-channel.nix>',
511 '--from-expression'] + exprs
513 print(' '.join(map(shlex
.quote
, command
)))
515 v
.status('Installing channels with nix-env')
516 process
= subprocess
.run(command
)
517 v
.result(process
.returncode
== 0)
521 parser
= argparse
.ArgumentParser(prog
='pinch')
522 subparsers
= parser
.add_subparsers(dest
='mode', required
=True)
523 parser_pin
= subparsers
.add_parser('pin')
524 parser_pin
.add_argument('channels_file', type=str)
525 parser_pin
.add_argument('channels', type=str, nargs
='*')
526 parser_pin
.set_defaults(func
=pin
)
527 parser_update
= subparsers
.add_parser('update')
528 parser_update
.add_argument('--dry-run', action
='store_true')
529 parser_update
.add_argument('channels_file', type=str)
530 parser_update
.set_defaults(func
=update
)
531 args
= parser
.parse_args()