+ digest = Digest16(row.childNodes[2].firstChild.firstChild.nodeValue)
+ channel.table[name] = ChannelTableEntry(
+ url=url, digest=digest, size=size)
+ v.ok()
+ return GitPin(release_name=title_name, git_revision=channel.git_revision)
+
+
+def digest_string(s: bytes) -> Digest16:
+ return Digest16(hashlib.sha256(s).hexdigest())
+
+
+def digest_file(filename: str) -> Digest16:
+ hasher = hashlib.sha256()
+ with open(filename, 'rb') as f:
+ # pylint: disable=cell-var-from-loop
+ for block in iter(lambda: f.read(4096), b''):
+ hasher.update(block)
+ return Digest16(hasher.hexdigest())
+
+
+def to_Digest16(v: Verification, digest32: Digest32) -> Digest16:
+ v.status('Converting digest to base16')
+ process = subprocess.run(
+ ['nix', 'to-base16', '--type', 'sha256', digest32], stdout=subprocess.PIPE)
+ v.result(process.returncode == 0)
+ return Digest16(process.stdout.decode().strip())
+
+
+def to_Digest32(v: Verification, digest16: Digest16) -> Digest32:
+ v.status('Converting digest to base32')
+ process = subprocess.run(
+ ['nix', 'to-base32', '--type', 'sha256', digest16], stdout=subprocess.PIPE)
+ v.result(process.returncode == 0)
+ return Digest32(process.stdout.decode().strip())
+
+
+def fetch_with_nix_prefetch_url(
+ v: Verification,
+ url: str,
+ digest: Digest16) -> str:
+ v.status('Fetching %s' % url)
+ process = subprocess.run(
+ ['nix-prefetch-url', '--print-path', url, digest], stdout=subprocess.PIPE)
+ v.result(process.returncode == 0)
+ prefetch_digest, path, empty = process.stdout.decode().split('\n')
+ assert empty == ''
+ v.check("Verifying nix-prefetch-url's digest",
+ to_Digest16(v, Digest32(prefetch_digest)) == digest)
+ v.status("Verifying file digest")
+ file_digest = digest_file(path)
+ v.result(file_digest == digest)
+ return path # type: ignore # (for old mypy)
+
+
+def fetch_resources(
+ v: Verification,
+ channel: ChannelSearchPath,
+ pin: GitPin) -> None:
+ for resource in ['git-revision', 'nixexprs.tar.xz']:
+ fields = channel.table[resource]
+ fields.absolute_url = urllib.parse.urljoin(
+ channel.forwarded_url, fields.url)
+ fields.file = fetch_with_nix_prefetch_url(
+ v, fields.absolute_url, fields.digest)
+ v.status('Verifying git commit on main page matches git commit in table')
+ v.result(
+ open(
+ channel.table['git-revision'].file).read(999) == pin.git_revision)
+
+
+def git_cachedir(git_repo: str) -> str:
+ return os.path.join(
+ xdg.XDG_CACHE_HOME,
+ 'pinch/git',
+ digest_string(git_repo.encode()))
+
+
+def tarball_cache_file(channel: TarrableSearchPath, pin: GitPin) -> str:
+ return os.path.join(
+ xdg.XDG_CACHE_HOME,
+ 'pinch/git-tarball',
+ '%s-%s-%s' %
+ (digest_string(channel.git_repo.encode()),
+ pin.git_revision,
+ pin.release_name))
+
+
+def verify_git_ancestry(
+ v: Verification,
+ channel: TarrableSearchPath,
+ new_revision: str,
+ old_revision: Optional[str]) -> None:
+ cachedir = git_cachedir(channel.git_repo)
+ v.status('Verifying rev is an ancestor of ref')
+ process = subprocess.run(['git',
+ '-C',
+ cachedir,
+ 'merge-base',
+ '--is-ancestor',
+ new_revision,
+ channel.git_ref])
+ v.result(process.returncode == 0)
+
+ if old_revision is not None:
+ v.status(
+ 'Verifying rev is an ancestor of previous rev %s' %
+ old_revision)
+ process = subprocess.run(['git',
+ '-C',
+ cachedir,
+ 'merge-base',
+ '--is-ancestor',
+ old_revision,
+ new_revision])
+ v.result(process.returncode == 0)
+
+
+def git_fetch(
+ v: Verification,
+ channel: TarrableSearchPath,
+ desired_revision: Optional[str],
+ old_revision: Optional[str]) -> str:
+ # It would be nice if we could share the nix git cache, but as of the time
+ # of writing it is transitioning from gitv2 (deprecated) to gitv3 (not ready
+ # yet), and trying to straddle them both is too far into nix implementation
+ # details for my comfort. So we re-implement here half of nix.fetchGit.
+ # :(
+
+ cachedir = git_cachedir(channel.git_repo)
+ if not os.path.exists(cachedir):
+ v.status("Initializing git repo")
+ process = subprocess.run(
+ ['git', 'init', '--bare', cachedir])
+ v.result(process.returncode == 0)
+
+ v.status('Fetching ref "%s" from %s' % (channel.git_ref, channel.git_repo))
+ # We don't use --force here because we want to abort and freak out if forced
+ # updates are happening.
+ process = subprocess.run(['git',
+ '-C',
+ cachedir,
+ 'fetch',
+ channel.git_repo,
+ '%s:%s' % (channel.git_ref,
+ channel.git_ref)])
+ v.result(process.returncode == 0)
+
+ if desired_revision is not None:
+ v.status('Verifying that fetch retrieved this rev')
+ process = subprocess.run(
+ ['git', '-C', cachedir, 'cat-file', '-e', desired_revision])
+ v.result(process.returncode == 0)
+
+ new_revision = open(
+ os.path.join(
+ cachedir,
+ 'refs',
+ 'heads',
+ channel.git_ref)).read(999).strip()
+
+ verify_git_ancestry(v, channel, new_revision, old_revision)
+
+ return new_revision
+
+
+def ensure_git_rev_available(
+ v: Verification,
+ channel: TarrableSearchPath,
+ pin: GitPin,
+ old_revision: Optional[str]) -> None:
+ cachedir = git_cachedir(channel.git_repo)
+ if os.path.exists(cachedir):
+ v.status('Checking if we already have this rev:')
+ process = subprocess.run(
+ ['git', '-C', cachedir, 'cat-file', '-e', pin.git_revision])
+ if process.returncode == 0:
+ v.status('yes')
+ if process.returncode == 1:
+ v.status('no')
+ v.result(process.returncode == 0 or process.returncode == 1)
+ if process.returncode == 0:
+ verify_git_ancestry(v, channel, pin.git_revision, old_revision)
+ return
+ git_fetch(v, channel, pin.git_revision, old_revision)
+
+
+def compare_tarball_and_git(
+ v: Verification,
+ pin: GitPin,
+ channel_contents: str,
+ git_contents: str) -> None:
+ v.status('Comparing channel tarball with git checkout')
+ match, mismatch, errors = compare(os.path.join(
+ channel_contents, pin.release_name), git_contents)
+ v.ok()
+ v.check('%d files match' % len(match), len(match) > 0)
+ v.check('%d files differ' % len(mismatch), len(mismatch) == 0)
+ expected_errors = [
+ '.git-revision',
+ '.version-suffix',
+ 'nixpkgs',
+ 'programs.sqlite',
+ 'svn-revision']
+ benign_errors = []
+ for ee in expected_errors:
+ if ee in errors:
+ errors.remove(ee)
+ benign_errors.append(ee)
+ v.check(
+ '%d unexpected incomparable files' %
+ len(errors),
+ len(errors) == 0)
+ v.check(
+ '(%d of %d expected incomparable files)' %
+ (len(benign_errors),
+ len(expected_errors)),
+ len(benign_errors) == len(expected_errors))
+
+
+def extract_tarball(
+ v: Verification,
+ channel: TarrableSearchPath,
+ dest: str) -> None:
+ v.status('Extracting tarball %s' %
+ channel.table['nixexprs.tar.xz'].file)
+ shutil.unpack_archive(
+ channel.table['nixexprs.tar.xz'].file,
+ dest)