from argparse import ArgumentParser
+import itertools
+import os.path
+
+from typing import Iterable
+
+from bs4 import BeautifulSoup
+from bs4.element import Tag
import requests
+import requests_cache
+from xdg_base_dirs import xdg_cache_home
+
+
+class Post:
+ def __init__(self, html: BeautifulSoup) -> None:
+ self._html = html
+
+ def text(self) -> Tag:
+ body = self._html.body
+ assert body
+ text = body.find_next("div", class_="post-post")
+ assert isinstance(text, Tag)
+ return text
+
+ def replies(self) -> Iterable[Tag]:
+ replies = self._html.find_all("div", class_="post-reply")
+ assert all(isinstance(r, Tag) for r in replies)
+ return replies
+
+ def entries(self) -> Iterable[Tag]:
+ return itertools.chain([self.text()], self.replies())
def command_line_parser() -> ArgumentParser:
parser = ArgumentParser(prog='paperdoorknob', description='Print glowfic')
+ parser.add_argument(
+ '--cache_path',
+ metavar='PATH',
+ help='Where to keep the http cache (instead of %(default)s)',
+ default=os.path.join(xdg_cache_home(), "paperdoorknob"))
parser.add_argument(
'--timeout',
help='How long to wait for HTTP requests, in seconds',
return parser
-def fetch(url: str, session: requests.Session, timeout: int) -> None:
+def fetch(url: str, session: requests.Session, timeout: int) -> BeautifulSoup:
with session.get(url, timeout=timeout) as r:
r.raise_for_status()
+ return BeautifulSoup(r.text, 'html.parser')
def main() -> None:
args = command_line_parser().parse_args()
- with requests.session() as session:
- fetch(args.url, session, args.timeout)
+ with requests_cache.CachedSession(args.cache_path, cache_control=True) as session:
+ html = fetch(args.url, session, args.timeout)
+ Post(html)
if __name__ == '__main__':