X-Git-Url: http://git.scottworley.com/paperdoorknob/blobdiff_plain/55958ec0820a10472e5091b105d730fa38172390..e10b5b6f112c057ab33ad46f8a3385d3bcd23e1d:/paperdoorknob.py?ds=inline diff --git a/paperdoorknob.py b/paperdoorknob.py index 6cc76a2..26eb7be 100644 --- a/paperdoorknob.py +++ b/paperdoorknob.py @@ -5,65 +5,54 @@ # Free Software Foundation, version 3. -from argparse import ArgumentParser import itertools -import os.path from typing import Iterable from bs4 import BeautifulSoup from bs4.element import Tag -import requests -import requests_cache -from xdg_base_dirs import xdg_cache_home +from args import spec_from_commandline_args +from spec import Spec -class Post: - def __init__(self, html: BeautifulSoup) -> None: - self._html = html - def text(self) -> Tag: - body = self._html.body +def parse(content: bytes) -> BeautifulSoup: + return BeautifulSoup(content, 'html.parser') + + +def replies(html: BeautifulSoup) -> Iterable[Tag]: + def text() -> Tag: + body = html.body assert body text = body.find_next("div", class_="post-post") assert isinstance(text, Tag) return text - def replies(self) -> Iterable[Tag]: - replies = self._html.find_all("div", class_="post-reply") - assert all(isinstance(r, Tag) for r in replies) - return replies - - def entries(self) -> Iterable[Tag]: - return itertools.chain([self.text()], self.replies()) - + def the_replies() -> Iterable[Tag]: + rs = html.find_all("div", class_="post-reply") + assert all(isinstance(r, Tag) for r in rs) + return rs -def command_line_parser() -> ArgumentParser: - parser = ArgumentParser(prog='paperdoorknob', description='Print glowfic') - parser.add_argument( - '--cache_path', - metavar='PATH', - help='Where to keep the http cache (instead of %(default)s)', - default=os.path.join(xdg_cache_home(), "paperdoorknob")) - parser.add_argument( - '--timeout', - help='How long to wait for HTTP requests, in seconds', - default=30) - parser.add_argument('url', help='URL to retrieve') - return parser + return itertools.chain([text()], the_replies()) -def fetch(url: str, session: requests.Session, timeout: int) -> BeautifulSoup: - with session.get(url, timeout=timeout) as r: - r.raise_for_status() - return BeautifulSoup(r.text, 'html.parser') +def process(spec: Spec) -> None: + spec.texout.write(b'\\documentclass{article}\n') + if spec.geometry is not None: + spec.texout.write(b'\\usepackage[' + + spec.geometry.encode('UTF-8') + + b']{geometry}\n') + spec.texout.write(b'\\begin{document}\n') + html = parse(spec.htmlfilter(spec.fetcher.fetch(spec.url))) + for r in replies(html): + spec.domfilter(r) + spec.texout.write(spec.texifier.texify(r)) + spec.texout.write(b'\\end{document}\n') def main() -> None: - args = command_line_parser().parse_args() - with requests_cache.CachedSession(args.cache_path, cache_control=True) as session: - html = fetch(args.url, session, args.timeout) - Post(html) + with spec_from_commandline_args() as spec: + process(spec) if __name__ == '__main__':