]> git.scottworley.com Git - pinch/blobdiff - pinch.py
Rename Info -> Channel
[pinch] / pinch.py
index cdc0ba8c64fbac003e623badbd49f471cf675816..89e053b9bb648244ce8758e8cef4e3373c8708b0 100644 (file)
--- a/pinch.py
+++ b/pinch.py
@@ -5,6 +5,7 @@ import operator
 import os
 import os.path
 import shutil
+import subprocess
 import tempfile
 import types
 import urllib.parse
@@ -15,24 +16,28 @@ from typing import (
     Dict,
     Iterable,
     List,
+    NewType,
     Sequence,
     Tuple,
 )
 
+Digest16 = NewType('Digest16', str)
+Digest32 = NewType('Digest32', str)
 
-class InfoTableEntry(types.SimpleNamespace):
-    content: bytes
-    digest: str
+
+class ChannelTableEntry(types.SimpleNamespace):
+    digest: Digest16
     file: str
     size: int
     url: str
 
 
-class Info(types.SimpleNamespace):
+class Channel(types.SimpleNamespace):
     channel_html: bytes
     forwarded_url: str
     git_revision: str
-    table: Dict[str, InfoTableEntry]
+    release_name: str
+    table: Dict[str, ChannelTableEntry]
     url: str
 
 
@@ -102,75 +107,109 @@ def compare(a: str,
     return filecmp.cmpfiles(a, b, files, shallow=False)
 
 
-def fetch(v: Verification, channel_url: str) -> Info:
-    info = Info()
-    info.url = channel_url
+def fetch(v: Verification, channel_url: str) -> Channel:
+    channel = Channel()
+    channel.url = channel_url
     v.status('Fetching channel')
     request = urllib.request.urlopen(
         'https://channels.nixos.org/nixos-20.03', timeout=10)
-    info.channel_html = request.read()
-    info.forwarded_url = request.geturl()
+    channel.channel_html = request.read()
+    channel.forwarded_url = request.geturl()
     v.result(request.status == 200)
-    v.check('Got forwarded', info.url != info.forwarded_url)
-    return info
+    v.check('Got forwarded', channel.url != channel.forwarded_url)
+    return channel
 
 
-def parse(v: Verification, info: Info) -> None:
+def parse_channel(v: Verification, channel: Channel) -> None:
     v.status('Parsing channel description as XML')
-    d = xml.dom.minidom.parseString(info.channel_html)
+    d = xml.dom.minidom.parseString(channel.channel_html)
     v.ok()
 
-    v.status('Extracting git commit')
+    v.status('Extracting release name:')
+    title_name = d.getElementsByTagName(
+        'title')[0].firstChild.nodeValue.split()[2]
+    h1_name = d.getElementsByTagName('h1')[0].firstChild.nodeValue.split()[2]
+    v.status(title_name)
+    v.result(title_name == h1_name)
+    channel.release_name = title_name
+
+    v.status('Extracting git commit:')
     git_commit_node = d.getElementsByTagName('tt')[0]
-    info.git_commit = git_commit_node.firstChild.nodeValue
+    channel.git_commit = git_commit_node.firstChild.nodeValue
+    v.status(channel.git_commit)
     v.ok()
     v.status('Verifying git commit label')
     v.result(git_commit_node.previousSibling.nodeValue == 'Git commit ')
 
     v.status('Parsing table')
-    info.table = {}
+    channel.table = {}
     for row in d.getElementsByTagName('tr')[1:]:
         name = row.childNodes[0].firstChild.firstChild.nodeValue
         url = row.childNodes[0].firstChild.getAttribute('href')
         size = int(row.childNodes[1].firstChild.nodeValue)
-        digest = row.childNodes[2].firstChild.firstChild.nodeValue
-        info.table[name] = InfoTableEntry(url=url, digest=digest, size=size)
+        digest = Digest16(row.childNodes[2].firstChild.firstChild.nodeValue)
+        channel.table[name] = ChannelTableEntry(url=url, digest=digest, size=size)
     v.ok()
 
 
-def fetch_resources(v: Verification, info: Info) -> None:
-
+def digest_file(filename: str) -> Digest16:
+    hasher = hashlib.sha256()
+    with open(filename, 'rb') as f:
+        # pylint: disable=cell-var-from-loop
+        for block in iter(lambda: f.read(4096), b''):
+            hasher.update(block)
+    return Digest16(hasher.hexdigest())
+
+
+def to_Digest16(v: Verification, digest32: Digest32) -> Digest16:
+    v.status('Converting digest to base16')
+    process = subprocess.run(
+        ['nix', 'to-base16', '--type', 'sha256', digest32], capture_output=True)
+    v.result(process.returncode == 0)
+    return Digest16(process.stdout.decode().strip())
+
+
+def to_Digest32(v: Verification, digest16: Digest16) -> Digest32:
+    v.status('Converting digest to base32')
+    process = subprocess.run(
+        ['nix', 'to-base32', '--type', 'sha256', digest16], capture_output=True)
+    v.result(process.returncode == 0)
+    return Digest32(process.stdout.decode().strip())
+
+
+def fetch_with_nix_prefetch_url(
+        v: Verification,
+        url: str,
+        digest: Digest16) -> str:
+    v.status('Fetching %s' % url)
+    process = subprocess.run(
+        ['nix-prefetch-url', '--print-path', url, digest], capture_output=True)
+    v.result(process.returncode == 0)
+    prefetch_digest, path, empty = process.stdout.decode().split('\n')
+    assert empty == ''
+    v.check("Verifying nix-prefetch-url's digest",
+            to_Digest16(v, Digest32(prefetch_digest)) == digest)
+    v.status("Verifying file digest")
+    file_digest = digest_file(path)
+    v.result(file_digest == digest)
+    return path
+
+
+def fetch_resources(v: Verification, channel: Channel) -> None:
     for resource in ['git-revision', 'nixexprs.tar.xz']:
-        fields = info.table[resource]
-        v.status('Fetching resource "%s"' % resource)
-        url = urllib.parse.urljoin(info.forwarded_url, fields.url)
-        request = urllib.request.urlopen(url, timeout=10)
-        if fields.size < 4096:
-            fields.content = request.read()
-        else:
-            with tempfile.NamedTemporaryFile(suffix='.nixexprs.tar.xz', delete=False) as tmp_file:
-                shutil.copyfileobj(request, tmp_file)
-                fields.file = tmp_file.name
-        v.result(request.status == 200)
-        v.status('Verifying digest for "%s"' % resource)
-        if fields.size < 4096:
-            actual_hash = hashlib.sha256(fields.content).hexdigest()
-        else:
-            hasher = hashlib.sha256()
-            with open(fields.file, 'rb') as f:
-                # pylint: disable=cell-var-from-loop
-                for block in iter(lambda: f.read(4096), b''):
-                    hasher.update(block)
-            actual_hash = hasher.hexdigest()
-        v.result(actual_hash == fields.digest)
-    v.check('Verifying git commit on main page matches git commit in table',
-            info.table['git-revision'].content.decode() == info.git_commit)
-
-
-def extract_channel(v: Verification, info: Info) -> None:
+        fields = channel.table[resource]
+        url = urllib.parse.urljoin(channel.forwarded_url, fields.url)
+        fields.file = fetch_with_nix_prefetch_url(v, url, fields.digest)
+    v.status('Verifying git commit on main page matches git commit in table')
+    v.result(
+        open(
+            channel.table['git-revision'].file).read(999) == channel.git_commit)
+
+
+def check_channel_contents(v: Verification, channel: Channel) -> None:
     with tempfile.TemporaryDirectory() as d:
-        v.status('Extracting nixexprs.tar.xz')
-        shutil.unpack_archive(info.table['nixexprs.tar.xz'].file, d)
+        v.status('Extracting %s' % channel.table['nixexprs.tar.xz'].file)
+        shutil.unpack_archive(channel.table['nixexprs.tar.xz'].file, d)
         v.ok()
         v.status('Removing temporary directory')
     v.ok()
@@ -178,11 +217,11 @@ def extract_channel(v: Verification, info: Info) -> None:
 
 def main() -> None:
     v = Verification()
-    info = fetch(v, 'https://channels.nixos.org/nixos-20.03')
-    parse(v, info)
-    fetch_resources(v, info)
-    extract_channel(v, info)
-    print(info)
+    channel = fetch(v, 'https://channels.nixos.org/nixos-20.03')
+    parse_channel(v, channel)
+    fetch_resources(v, channel)
+    check_channel_contents(v, channel)
+    print(channel)
 
 
 main()