]> git.scottworley.com Git - pinch/blob - pinch.py
Extract release name
[pinch] / pinch.py
1 import filecmp
2 import functools
3 import hashlib
4 import operator
5 import os
6 import os.path
7 import shutil
8 import subprocess
9 import tempfile
10 import types
11 import urllib.parse
12 import urllib.request
13 import xml.dom.minidom
14
15 from typing import (
16 Dict,
17 Iterable,
18 List,
19 NewType,
20 Sequence,
21 Tuple,
22 )
23
24 Digest16 = NewType('Digest16', str)
25 Digest32 = NewType('Digest32', str)
26
27
28 class InfoTableEntry(types.SimpleNamespace):
29 digest: Digest16
30 file: str
31 size: int
32 url: str
33
34
35 class Info(types.SimpleNamespace):
36 channel_html: bytes
37 forwarded_url: str
38 git_revision: str
39 release_name: str
40 table: Dict[str, InfoTableEntry]
41 url: str
42
43
44 class VerificationError(Exception):
45 pass
46
47
48 class Verification:
49
50 def __init__(self) -> None:
51 self.line_length = 0
52
53 def status(self, s: str) -> None:
54 print(s, end=' ', flush=True)
55 self.line_length += 1 + len(s) # Unicode??
56
57 @staticmethod
58 def _color(s: str, c: int) -> str:
59 return '\033[%2dm%s\033[00m' % (c, s)
60
61 def result(self, r: bool) -> None:
62 message, color = {True: ('OK ', 92), False: ('FAIL', 91)}[r]
63 length = len(message)
64 cols = shutil.get_terminal_size().columns
65 pad = (cols - (self.line_length + length)) % cols
66 print(' ' * pad + self._color(message, color))
67 self.line_length = 0
68 if not r:
69 raise VerificationError()
70
71 def check(self, s: str, r: bool) -> None:
72 self.status(s)
73 self.result(r)
74
75 def ok(self) -> None:
76 self.result(True)
77
78
79 def compare(a: str,
80 b: str) -> Tuple[Sequence[str],
81 Sequence[str],
82 Sequence[str]]:
83
84 def throw(error: OSError) -> None:
85 raise error
86
87 def join(x: str, y: str) -> str:
88 return y if x == '.' else os.path.join(x, y)
89
90 def recursive_files(d: str) -> Iterable[str]:
91 all_files: List[str] = []
92 for path, dirs, files in os.walk(d, onerror=throw):
93 rel = os.path.relpath(path, start=d)
94 all_files.extend(join(rel, f) for f in files)
95 for dir_or_link in dirs:
96 if os.path.islink(join(path, dir_or_link)):
97 all_files.append(join(rel, dir_or_link))
98 return all_files
99
100 def exclude_dot_git(files: Iterable[str]) -> Iterable[str]:
101 return (f for f in files if not f.startswith('.git/'))
102
103 files = functools.reduce(
104 operator.or_, (set(
105 exclude_dot_git(
106 recursive_files(x))) for x in [a, b]))
107 return filecmp.cmpfiles(a, b, files, shallow=False)
108
109
110 def fetch(v: Verification, channel_url: str) -> Info:
111 info = Info()
112 info.url = channel_url
113 v.status('Fetching channel')
114 request = urllib.request.urlopen(
115 'https://channels.nixos.org/nixos-20.03', timeout=10)
116 info.channel_html = request.read()
117 info.forwarded_url = request.geturl()
118 v.result(request.status == 200)
119 v.check('Got forwarded', info.url != info.forwarded_url)
120 return info
121
122
123 def parse_table(v: Verification, info: Info) -> None:
124 v.status('Parsing channel description as XML')
125 d = xml.dom.minidom.parseString(info.channel_html)
126 v.ok()
127
128 v.status('Extracting release name:')
129 title_name = d.getElementsByTagName(
130 'title')[0].firstChild.nodeValue.split()[2]
131 h1_name = d.getElementsByTagName('h1')[0].firstChild.nodeValue.split()[2]
132 v.status(title_name)
133 v.result(title_name == h1_name)
134 info.release_name = title_name
135
136 v.status('Extracting git commit:')
137 git_commit_node = d.getElementsByTagName('tt')[0]
138 info.git_commit = git_commit_node.firstChild.nodeValue
139 v.status(info.git_commit)
140 v.ok()
141 v.status('Verifying git commit label')
142 v.result(git_commit_node.previousSibling.nodeValue == 'Git commit ')
143
144 v.status('Parsing table')
145 info.table = {}
146 for row in d.getElementsByTagName('tr')[1:]:
147 name = row.childNodes[0].firstChild.firstChild.nodeValue
148 url = row.childNodes[0].firstChild.getAttribute('href')
149 size = int(row.childNodes[1].firstChild.nodeValue)
150 digest = Digest16(row.childNodes[2].firstChild.firstChild.nodeValue)
151 info.table[name] = InfoTableEntry(url=url, digest=digest, size=size)
152 v.ok()
153
154
155 def digest_file(filename: str) -> Digest16:
156 hasher = hashlib.sha256()
157 with open(filename, 'rb') as f:
158 # pylint: disable=cell-var-from-loop
159 for block in iter(lambda: f.read(4096), b''):
160 hasher.update(block)
161 return Digest16(hasher.hexdigest())
162
163
164 def to_Digest16(v: Verification, digest32: Digest32) -> Digest16:
165 v.status('Converting digest to base16')
166 process = subprocess.run(
167 ['nix', 'to-base16', '--type', 'sha256', digest32], capture_output=True)
168 v.result(process.returncode == 0)
169 return Digest16(process.stdout.decode().strip())
170
171
172 def to_Digest32(v: Verification, digest16: Digest16) -> Digest32:
173 v.status('Converting digest to base32')
174 process = subprocess.run(
175 ['nix', 'to-base32', '--type', 'sha256', digest16], capture_output=True)
176 v.result(process.returncode == 0)
177 return Digest32(process.stdout.decode().strip())
178
179
180 def fetch_with_nix_prefetch_url(
181 v: Verification,
182 url: str,
183 digest: Digest16) -> str:
184 v.status('Fetching %s' % url)
185 process = subprocess.run(
186 ['nix-prefetch-url', '--print-path', url, digest], capture_output=True)
187 v.result(process.returncode == 0)
188 prefetch_digest, path, empty = process.stdout.decode().split('\n')
189 assert empty == ''
190 v.check("Verifying nix-prefetch-url's digest",
191 to_Digest16(v, Digest32(prefetch_digest)) == digest)
192 v.status("Verifying file digest")
193 file_digest = digest_file(path)
194 v.result(file_digest == digest)
195 return path
196
197
198 def fetch_resources(v: Verification, info: Info) -> None:
199 for resource in ['git-revision', 'nixexprs.tar.xz']:
200 fields = info.table[resource]
201 url = urllib.parse.urljoin(info.forwarded_url, fields.url)
202 fields.file = fetch_with_nix_prefetch_url(v, url, fields.digest)
203 v.status('Verifying git commit on main page matches git commit in table')
204 v.result(
205 open(
206 info.table['git-revision'].file).read(999) == info.git_commit)
207
208
209 def extract_channel(v: Verification, info: Info) -> None:
210 with tempfile.TemporaryDirectory() as d:
211 v.status('Extracting %s' % info.table['nixexprs.tar.xz'].file)
212 shutil.unpack_archive(info.table['nixexprs.tar.xz'].file, d)
213 v.ok()
214 v.status('Removing temporary directory')
215 v.ok()
216
217
218 def main() -> None:
219 v = Verification()
220 info = fetch(v, 'https://channels.nixos.org/nixos-20.03')
221 parse_table(v, info)
222 fetch_resources(v, info)
223 extract_channel(v, info)
224 print(info)
225
226
227 main()