from argparse import ArgumentParser
+import itertools
import os.path
+
+from typing import Iterable
+
+from bs4 import BeautifulSoup
+from bs4.element import Tag
import requests
import requests_cache
from xdg_base_dirs import xdg_cache_home
return parser
-def fetch(url: str, session: requests.Session, timeout: int) -> None:
+def fetch(url: str, session: requests.Session, timeout: int) -> BeautifulSoup:
with session.get(url, timeout=timeout) as r:
r.raise_for_status()
+ return BeautifulSoup(r.text, 'html.parser')
+
+
+def clean(html: BeautifulSoup) -> BeautifulSoup:
+ for eb in html.find_all("div", class_="post-edit-box"):
+ eb.decompose()
+ for footer in html.find_all("div", class_="post-footer"):
+ footer.decompose()
+ return html
+
+
+def replies(html: BeautifulSoup) -> Iterable[Tag]:
+ def text() -> Tag:
+ body = html.body
+ assert body
+ text = body.find_next("div", class_="post-post")
+ assert isinstance(text, Tag)
+ return text
+
+ def the_replies() -> Iterable[Tag]:
+ rs = html.find_all("div", class_="post-reply")
+ assert all(isinstance(r, Tag) for r in rs)
+ return rs
+
+ return itertools.chain([text()], the_replies())
+
+
+def process(
+ url: str,
+ session: requests.Session,
+ timeout: int) -> Iterable[Tag]:
+ html = clean(fetch(url, session, timeout))
+ return replies(html)
def main() -> None:
args = command_line_parser().parse_args()
with requests_cache.CachedSession(args.cache_path, cache_control=True) as session:
- fetch(args.url, session, args.timeout)
+ process(args.url, session, args.timeout)
if __name__ == '__main__':