# Free Software Foundation, version 3.
-from argparse import ArgumentParser
import itertools
-import os.path
from typing import Iterable
from bs4 import BeautifulSoup
from bs4.element import Tag
-import requests
-import requests_cache
-from xdg_base_dirs import xdg_cache_home
+from args import spec_from_commandline_args
+from spec import Spec
-class Post:
- def __init__(self, html: BeautifulSoup) -> None:
- self._html = html
- def text(self) -> Tag:
- body = self._html.body
+def parse(content: bytes) -> BeautifulSoup:
+ return BeautifulSoup(content, 'html.parser')
+
+
+def clean(html: BeautifulSoup) -> BeautifulSoup:
+ for eb in html.find_all("div", class_="post-edit-box"):
+ eb.decompose()
+ for footer in html.find_all("div", class_="post-footer"):
+ footer.decompose()
+ return html
+
+
+def replies(html: BeautifulSoup) -> Iterable[Tag]:
+ def text() -> Tag:
+ body = html.body
assert body
text = body.find_next("div", class_="post-post")
assert isinstance(text, Tag)
return text
- def replies(self) -> Iterable[Tag]:
- replies = self._html.find_all("div", class_="post-reply")
- assert all(isinstance(r, Tag) for r in replies)
- return replies
-
- def entries(self) -> Iterable[Tag]:
- return itertools.chain([self.text()], self.replies())
-
+ def the_replies() -> Iterable[Tag]:
+ rs = html.find_all("div", class_="post-reply")
+ assert all(isinstance(r, Tag) for r in rs)
+ return rs
-def command_line_parser() -> ArgumentParser:
- parser = ArgumentParser(prog='paperdoorknob', description='Print glowfic')
- parser.add_argument(
- '--cache_path',
- metavar='PATH',
- help='Where to keep the http cache (instead of %(default)s)',
- default=os.path.join(xdg_cache_home(), "paperdoorknob"))
- parser.add_argument(
- '--timeout',
- help='How long to wait for HTTP requests, in seconds',
- default=30)
- parser.add_argument('url', help='URL to retrieve')
- return parser
+ return itertools.chain([text()], the_replies())
-def fetch(url: str, session: requests.Session, timeout: int) -> BeautifulSoup:
- with session.get(url, timeout=timeout) as r:
- r.raise_for_status()
- return BeautifulSoup(r.text, 'html.parser')
+def process(spec: Spec) -> None:
+ spec.texout.write(b'\\documentclass{article}\n\\begin{document}\n')
+ html = clean(parse(spec.fetcher.fetch(spec.url)))
+ for r in replies(html):
+ spec.texout.write(spec.texifier.texify(r))
+ spec.texout.write(b'\\end{document}\n')
def main() -> None:
- args = command_line_parser().parse_args()
- with requests_cache.CachedSession(args.cache_path, cache_control=True) as session:
- html = fetch(args.url, session, args.timeout)
- Post(html)
+ with spec_from_commandline_args() as spec:
+ process(spec)
if __name__ == '__main__':