]> git.scottworley.com Git - pinch/blobdiff - pinch.py
main creates the Channel
[pinch] / pinch.py
index f974d1c0adc3982dbc08e469ed0eed4c4f1b682b..a8b6103c3bb78e157673afadeca0864ff85ce231 100644 (file)
--- a/pinch.py
+++ b/pinch.py
@@ -5,20 +5,41 @@ import operator
 import os
 import os.path
 import shutil
+import subprocess
 import tempfile
+import types
 import urllib.parse
 import urllib.request
 import xml.dom.minidom
 
 from typing import (
-    Any,
     Dict,
     Iterable,
     List,
+    NewType,
     Sequence,
     Tuple,
 )
 
+Digest16 = NewType('Digest16', str)
+Digest32 = NewType('Digest32', str)
+
+
+class ChannelTableEntry(types.SimpleNamespace):
+    digest: Digest16
+    file: str
+    size: int
+    url: str
+
+
+class Channel(types.SimpleNamespace):
+    channel_html: bytes
+    forwarded_url: str
+    git_revision: str
+    release_name: str
+    table: Dict[str, ChannelTableEntry]
+    url: str
+
 
 class VerificationError(Exception):
     pass
@@ -86,74 +107,105 @@ def compare(a: str,
     return filecmp.cmpfiles(a, b, files, shallow=False)
 
 
-def fetch(v: Verification, channel_url: str) -> Dict[str, Any]:
-    info: Dict[str, Any] = {'url': channel_url}
+def fetch(v: Verification, channel: Channel) -> None:
     v.status('Fetching channel')
-    request = urllib.request.urlopen(
-        'https://channels.nixos.org/nixos-20.03', timeout=10)
-    info['channel_html'] = request.read()
-    info['forwarded_url'] = request.geturl()
+    request = urllib.request.urlopen(channel.url, timeout=10)
+    channel.channel_html = request.read()
+    channel.forwarded_url = request.geturl()
     v.result(request.status == 200)
-    v.check('Got forwarded', info['url'] != info['forwarded_url'])
-    return info
+    v.check('Got forwarded', channel.url != channel.forwarded_url)
 
 
-def parse(v: Verification, info: Dict[str, Any]) -> None:
+def parse_channel(v: Verification, channel: Channel) -> None:
     v.status('Parsing channel description as XML')
-    d = xml.dom.minidom.parseString(info['channel_html'])
+    d = xml.dom.minidom.parseString(channel.channel_html)
     v.ok()
 
-    v.status('Extracting git commit')
+    v.status('Extracting release name:')
+    title_name = d.getElementsByTagName(
+        'title')[0].firstChild.nodeValue.split()[2]
+    h1_name = d.getElementsByTagName('h1')[0].firstChild.nodeValue.split()[2]
+    v.status(title_name)
+    v.result(title_name == h1_name)
+    channel.release_name = title_name
+
+    v.status('Extracting git commit:')
     git_commit_node = d.getElementsByTagName('tt')[0]
-    info['git_commit'] = git_commit_node.firstChild.nodeValue
+    channel.git_commit = git_commit_node.firstChild.nodeValue
+    v.status(channel.git_commit)
     v.ok()
     v.status('Verifying git commit label')
     v.result(git_commit_node.previousSibling.nodeValue == 'Git commit ')
 
     v.status('Parsing table')
-    info['table'] = {}
+    channel.table = {}
     for row in d.getElementsByTagName('tr')[1:]:
         name = row.childNodes[0].firstChild.firstChild.nodeValue
         url = row.childNodes[0].firstChild.getAttribute('href')
         size = int(row.childNodes[1].firstChild.nodeValue)
-        digest = row.childNodes[2].firstChild.firstChild.nodeValue
-        info['table'][name] = {'url': url, 'digest': digest, 'size': size}
+        digest = Digest16(row.childNodes[2].firstChild.firstChild.nodeValue)
+        channel.table[name] = ChannelTableEntry(url=url, digest=digest, size=size)
     v.ok()
 
 
-def fetch_resources(v: Verification, info: Dict[str, Any]) -> None:
-
+def digest_file(filename: str) -> Digest16:
+    hasher = hashlib.sha256()
+    with open(filename, 'rb') as f:
+        # pylint: disable=cell-var-from-loop
+        for block in iter(lambda: f.read(4096), b''):
+            hasher.update(block)
+    return Digest16(hasher.hexdigest())
+
+
+def to_Digest16(v: Verification, digest32: Digest32) -> Digest16:
+    v.status('Converting digest to base16')
+    process = subprocess.run(
+        ['nix', 'to-base16', '--type', 'sha256', digest32], capture_output=True)
+    v.result(process.returncode == 0)
+    return Digest16(process.stdout.decode().strip())
+
+
+def to_Digest32(v: Verification, digest16: Digest16) -> Digest32:
+    v.status('Converting digest to base32')
+    process = subprocess.run(
+        ['nix', 'to-base32', '--type', 'sha256', digest16], capture_output=True)
+    v.result(process.returncode == 0)
+    return Digest32(process.stdout.decode().strip())
+
+
+def fetch_with_nix_prefetch_url(
+        v: Verification,
+        url: str,
+        digest: Digest16) -> str:
+    v.status('Fetching %s' % url)
+    process = subprocess.run(
+        ['nix-prefetch-url', '--print-path', url, digest], capture_output=True)
+    v.result(process.returncode == 0)
+    prefetch_digest, path, empty = process.stdout.decode().split('\n')
+    assert empty == ''
+    v.check("Verifying nix-prefetch-url's digest",
+            to_Digest16(v, Digest32(prefetch_digest)) == digest)
+    v.status("Verifying file digest")
+    file_digest = digest_file(path)
+    v.result(file_digest == digest)
+    return path
+
+
+def fetch_resources(v: Verification, channel: Channel) -> None:
     for resource in ['git-revision', 'nixexprs.tar.xz']:
-        fields = info['table'][resource]
-        v.status('Fetching resource "%s"' % resource)
-        url = urllib.parse.urljoin(info['forwarded_url'], fields['url'])
-        request = urllib.request.urlopen(url, timeout=10)
-        if fields['size'] < 4096:
-            fields['content'] = request.read()
-        else:
-            with tempfile.NamedTemporaryFile(suffix='.nixexprs.tar.xz', delete=False) as tmp_file:
-                shutil.copyfileobj(request, tmp_file)
-                fields['file'] = tmp_file.name
-        v.result(request.status == 200)
-        v.status('Verifying digest for "%s"' % resource)
-        if fields['size'] < 4096:
-            actual_hash = hashlib.sha256(fields['content']).hexdigest()
-        else:
-            hasher = hashlib.sha256()
-            with open(fields['file'], 'rb') as f:
-                # pylint: disable=cell-var-from-loop
-                for block in iter(lambda: f.read(4096), b''):
-                    hasher.update(block)
-            actual_hash = hasher.hexdigest()
-        v.result(actual_hash == fields['digest'])
-    v.check('Verifying git commit on main page matches git commit in table',
-            info['table']['git-revision']['content'].decode() == info['git_commit'])
-
-
-def extract_channel(v: Verification, info: Dict[str, Any]) -> None:
+        fields = channel.table[resource]
+        url = urllib.parse.urljoin(channel.forwarded_url, fields.url)
+        fields.file = fetch_with_nix_prefetch_url(v, url, fields.digest)
+    v.status('Verifying git commit on main page matches git commit in table')
+    v.result(
+        open(
+            channel.table['git-revision'].file).read(999) == channel.git_commit)
+
+
+def check_channel_contents(v: Verification, channel: Channel) -> None:
     with tempfile.TemporaryDirectory() as d:
-        v.status('Extracting nixexprs.tar.xz')
-        shutil.unpack_archive(info['table']['nixexprs.tar.xz']['file'], d)
+        v.status('Extracting %s' % channel.table['nixexprs.tar.xz'].file)
+        shutil.unpack_archive(channel.table['nixexprs.tar.xz'].file, d)
         v.ok()
         v.status('Removing temporary directory')
     v.ok()
@@ -161,11 +213,12 @@ def extract_channel(v: Verification, info: Dict[str, Any]) -> None:
 
 def main() -> None:
     v = Verification()
-    info = fetch(v, 'https://channels.nixos.org/nixos-20.03')
-    parse(v, info)
-    fetch_resources(v, info)
-    extract_channel(v, info)
-    print(info)
+    channel = Channel(url='https://channels.nixos.org/nixos-20.03')
+    fetch(v, channel)
+    parse_channel(v, channel)
+    fetch_resources(v, channel)
+    check_channel_contents(v, channel)
+    print(channel)
 
 
 main()