13 import xml
.dom
.minidom
24 Digest16
= NewType('Digest16', str)
25 Digest32
= NewType('Digest32', str)
28 class InfoTableEntry(types
.SimpleNamespace
):
35 class Info(types
.SimpleNamespace
):
39 table
: Dict
[str, InfoTableEntry
]
43 class VerificationError(Exception):
49 def __init__(self
) -> None:
52 def status(self
, s
: str) -> None:
53 print(s
, end
=' ', flush
=True)
54 self
.line_length
+= 1 + len(s
) # Unicode??
57 def _color(s
: str, c
: int) -> str:
58 return '\033[%2dm%s\033[00m' % (c
, s
)
60 def result(self
, r
: bool) -> None:
61 message
, color
= {True: ('OK ', 92), False: ('FAIL', 91)}
[r
]
63 cols
= shutil
.get_terminal_size().columns
64 pad
= (cols
- (self
.line_length
+ length
)) % cols
65 print(' ' * pad
+ self
._color
(message
, color
))
68 raise VerificationError()
70 def check(self
, s
: str, r
: bool) -> None:
79 b
: str) -> Tuple
[Sequence
[str],
83 def throw(error
: OSError) -> None:
86 def join(x
: str, y
: str) -> str:
87 return y
if x
== '.' else os
.path
.join(x
, y
)
89 def recursive_files(d
: str) -> Iterable
[str]:
90 all_files
: List
[str] = []
91 for path
, dirs
, files
in os
.walk(d
, onerror
=throw
):
92 rel
= os
.path
.relpath(path
, start
=d
)
93 all_files
.extend(join(rel
, f
) for f
in files
)
94 for dir_or_link
in dirs
:
95 if os
.path
.islink(join(path
, dir_or_link
)):
96 all_files
.append(join(rel
, dir_or_link
))
99 def exclude_dot_git(files
: Iterable
[str]) -> Iterable
[str]:
100 return (f
for f
in files
if not f
.startswith('.git/'))
102 files
= functools
.reduce(
105 recursive_files(x
))) for x
in [a
, b
]))
106 return filecmp
.cmpfiles(a
, b
, files
, shallow
=False)
109 def fetch(v
: Verification
, channel_url
: str) -> Info
:
111 info
.url
= channel_url
112 v
.status('Fetching channel')
113 request
= urllib
.request
.urlopen(
114 'https://channels.nixos.org/nixos-20.03', timeout
=10)
115 info
.channel_html
= request
.read()
116 info
.forwarded_url
= request
.geturl()
117 v
.result(request
.status
== 200)
118 v
.check('Got forwarded', info
.url
!= info
.forwarded_url
)
122 def parse_table(v
: Verification
, info
: Info
) -> None:
123 v
.status('Parsing channel description as XML')
124 d
= xml
.dom
.minidom
.parseString(info
.channel_html
)
127 v
.status('Extracting git commit')
128 git_commit_node
= d
.getElementsByTagName('tt')[0]
129 info
.git_commit
= git_commit_node
.firstChild
.nodeValue
131 v
.status('Verifying git commit label')
132 v
.result(git_commit_node
.previousSibling
.nodeValue
== 'Git commit ')
134 v
.status('Parsing table')
136 for row
in d
.getElementsByTagName('tr')[1:]:
137 name
= row
.childNodes
[0].firstChild
.firstChild
.nodeValue
138 url
= row
.childNodes
[0].firstChild
.getAttribute('href')
139 size
= int(row
.childNodes
[1].firstChild
.nodeValue
)
140 digest
= Digest16(row
.childNodes
[2].firstChild
.firstChild
.nodeValue
)
141 info
.table
[name
] = InfoTableEntry(url
=url
, digest
=digest
, size
=size
)
145 def digest_file(filename
: str) -> Digest16
:
146 hasher
= hashlib
.sha256()
147 with open(filename
, 'rb') as f
:
148 # pylint: disable=cell-var-from-loop
149 for block
in iter(lambda: f
.read(4096), b
''):
151 return Digest16(hasher
.hexdigest())
154 def to_Digest16(v
: Verification
, digest32
: Digest32
) -> Digest16
:
155 v
.status('Converting digest to base16')
156 process
= subprocess
.run(
157 ['nix', 'to-base16', '--type', 'sha256', digest32
], capture_output
=True)
158 v
.result(process
.returncode
== 0)
159 return Digest16(process
.stdout
.decode().strip())
162 def to_Digest32(v
: Verification
, digest16
: Digest16
) -> Digest32
:
163 v
.status('Converting digest to base32')
164 process
= subprocess
.run(
165 ['nix', 'to-base32', '--type', 'sha256', digest16
], capture_output
=True)
166 v
.result(process
.returncode
== 0)
167 return Digest32(process
.stdout
.decode().strip())
170 def fetch_with_nix_prefetch_url(
173 digest
: Digest16
) -> str:
174 v
.status('Fetching %s' % url
)
175 process
= subprocess
.run(
176 ['nix-prefetch-url', '--print-path', url
, digest
], capture_output
=True)
177 v
.result(process
.returncode
== 0)
178 prefetch_digest
, path
, empty
= process
.stdout
.decode().split('\n')
180 v
.check("Verifying nix-prefetch-url's digest",
181 to_Digest16(v
, Digest32(prefetch_digest
)) == digest
)
182 v
.status("Verifying file digest")
183 file_digest
= digest_file(path
)
184 v
.result(file_digest
== digest
)
188 def fetch_resources(v
: Verification
, info
: Info
) -> None:
189 for resource
in ['git-revision', 'nixexprs.tar.xz']:
190 fields
= info
.table
[resource
]
191 url
= urllib
.parse
.urljoin(info
.forwarded_url
, fields
.url
)
192 fields
.file = fetch_with_nix_prefetch_url(v
, url
, fields
.digest
)
193 v
.status('Verifying git commit on main page matches git commit in table')
196 info
.table
['git-revision'].file).read(999) == info
.git_commit
)
199 def extract_channel(v
: Verification
, info
: Info
) -> None:
200 with tempfile
.TemporaryDirectory() as d
:
201 v
.status('Extracting nixexprs.tar.xz')
202 shutil
.unpack_archive(info
.table
['nixexprs.tar.xz'].file, d
)
204 v
.status('Removing temporary directory')
210 info
= fetch(v
, 'https://channels.nixos.org/nixos-20.03')
212 fetch_resources(v
, info
)
213 extract_channel(v
, info
)