11 import xml
.dom
.minidom
23 class VerificationError(Exception):
29 def __init__(self
) -> None:
32 def status(self
, s
: str) -> None:
33 print(s
, end
=' ', flush
=True)
34 self
.line_length
+= 1 + len(s
) # Unicode??
37 def _color(s
: str, c
: int) -> str:
38 return '\033[%2dm%s\033[00m' % (c
, s
)
40 def result(self
, r
: bool) -> None:
41 message
, color
= {True: ('OK ', 92), False: ('FAIL', 91)}
[r
]
43 cols
= shutil
.get_terminal_size().columns
44 pad
= (cols
- (self
.line_length
+ length
)) % cols
45 print(' ' * pad
+ self
._color
(message
, color
))
48 raise VerificationError()
50 def check(self
, s
: str, r
: bool) -> None:
59 b
: str) -> Tuple
[Sequence
[str],
63 def throw(error
: OSError) -> None:
66 def join(x
: str, y
: str) -> str:
67 return y
if x
== '.' else os
.path
.join(x
, y
)
69 def recursive_files(d
: str) -> Iterable
[str]:
70 all_files
: List
[str] = []
71 for path
, dirs
, files
in os
.walk(d
, onerror
=throw
):
72 rel
= os
.path
.relpath(path
, start
=d
)
73 all_files
.extend(join(rel
, f
) for f
in files
)
74 for dir_or_link
in dirs
:
75 if os
.path
.islink(join(path
, dir_or_link
)):
76 all_files
.append(join(rel
, dir_or_link
))
79 def exclude_dot_git(files
: Iterable
[str]) -> Iterable
[str]:
80 return (f
for f
in files
if not f
.startswith('.git/'))
82 files
= functools
.reduce(
85 recursive_files(x
))) for x
in [a
, b
]))
86 return filecmp
.cmpfiles(a
, b
, files
, shallow
=False)
89 def fetch(v
: Verification
, channel_url
: str) -> Dict
[str, Any
]:
90 info
: Dict
[str, Any
] = {'url': channel_url}
91 v
.status('Fetching channel')
92 request
= urllib
.request
.urlopen(
93 'https://channels.nixos.org/nixos-20.03', timeout
=10)
94 info
['channel_html'] = request
.read()
95 info
['forwarded_url'] = request
.geturl()
96 v
.result(request
.status
== 200)
97 v
.check('Got forwarded', info
['url'] != info
['forwarded_url'])
101 def parse(v
: Verification
, info
: Dict
[str, Any
]) -> None:
102 v
.status('Parsing channel description as XML')
103 d
= xml
.dom
.minidom
.parseString(info
['channel_html'])
106 v
.status('Extracting git commit')
107 git_commit_node
= d
.getElementsByTagName('tt')[0]
108 info
['git_commit'] = git_commit_node
.firstChild
.nodeValue
110 v
.status('Verifying git commit label')
111 v
.result(git_commit_node
.previousSibling
.nodeValue
== 'Git commit ')
113 v
.status('Parsing table')
115 for row
in d
.getElementsByTagName('tr')[1:]:
116 name
= row
.childNodes
[0].firstChild
.firstChild
.nodeValue
117 url
= row
.childNodes
[0].firstChild
.getAttribute('href')
118 size
= int(row
.childNodes
[1].firstChild
.nodeValue
)
119 digest
= row
.childNodes
[2].firstChild
.firstChild
.nodeValue
120 info
['table'][name
] = {'url': url, 'digest': digest, 'size': size}
124 def fetch_resources(v
: Verification
, info
: Dict
[str, Any
]) -> None:
126 for resource
in ['git-revision', 'nixexprs.tar.xz']:
127 fields
= info
['table'][resource
]
128 v
.status('Fetching resource "%s"' % resource
)
129 url
= urllib
.parse
.urljoin(info
['forwarded_url'], fields
['url'])
130 request
= urllib
.request
.urlopen(url
, timeout
=10)
131 if fields
['size'] < 4096:
132 fields
['content'] = request
.read()
134 with tempfile
.NamedTemporaryFile(suffix
='.nixexprs.tar.xz', delete
=False) as tmp_file
:
135 shutil
.copyfileobj(request
, tmp_file
)
136 fields
['file'] = tmp_file
.name
137 v
.result(request
.status
== 200)
138 v
.status('Verifying digest for "%s"' % resource
)
139 if fields
['size'] < 4096:
140 actual_hash
= hashlib
.sha256(fields
['content']).hexdigest()
142 hasher
= hashlib
.sha256()
143 with open(fields
['file'], 'rb') as f
:
144 # pylint: disable=cell-var-from-loop
145 for block
in iter(lambda: f
.read(4096), b
''):
147 actual_hash
= hasher
.hexdigest()
148 v
.result(actual_hash
== fields
['digest'])
149 v
.check('Verifying git commit on main page matches git commit in table',
150 info
['table']['git-revision']['content'].decode() == info
['git_commit'])
153 def extract_channel(v
: Verification
, info
: Dict
[str, Any
]) -> None:
154 with tempfile
.TemporaryDirectory() as d
:
155 v
.status('Extracting nixexprs.tar.xz')
156 shutil
.unpack_archive(info
['table']['nixexprs.tar.xz']['file'], d
)
158 v
.status('Removing temporary directory')
164 info
= fetch(v
, 'https://channels.nixos.org/nixos-20.03')
166 fetch_resources(v
, info
)
167 extract_channel(v
, info
)