]> git.scottworley.com Git - paperdoorknob/blame - paperdoorknob.py
Drop Post as a class
[paperdoorknob] / paperdoorknob.py
CommitLineData
92b11a10
SW
1# paperdoorknob: Print glowfic
2#
3# This program is free software: you can redistribute it and/or modify it
4# under the terms of the GNU General Public License as published by the
5# Free Software Foundation, version 3.
6
7
8from argparse import ArgumentParser
55958ec0 9import itertools
ba3b7c52 10import os.path
a0d30541
SW
11
12from typing import Iterable
13
136277e3 14from bs4 import BeautifulSoup
6409066b 15from bs4.element import Tag
b25a2f90 16import requests
b34a368f 17import requests_cache
ba3b7c52 18from xdg_base_dirs import xdg_cache_home
92b11a10
SW
19
20
21def command_line_parser() -> ArgumentParser:
22 parser = ArgumentParser(prog='paperdoorknob', description='Print glowfic')
ba3b7c52
SW
23 parser.add_argument(
24 '--cache_path',
25 metavar='PATH',
26 help='Where to keep the http cache (instead of %(default)s)',
27 default=os.path.join(xdg_cache_home(), "paperdoorknob"))
b25a2f90
SW
28 parser.add_argument(
29 '--timeout',
30 help='How long to wait for HTTP requests, in seconds',
31 default=30)
32 parser.add_argument('url', help='URL to retrieve')
92b11a10
SW
33 return parser
34
35
136277e3 36def fetch(url: str, session: requests.Session, timeout: int) -> BeautifulSoup:
e138a9b4
SW
37 with session.get(url, timeout=timeout) as r:
38 r.raise_for_status()
136277e3 39 return BeautifulSoup(r.text, 'html.parser')
b25a2f90
SW
40
41
47cfa3cd
SW
42def clean(html: BeautifulSoup) -> BeautifulSoup:
43 for eb in html.find_all("div", class_="post-edit-box"):
44 eb.decompose()
45 for footer in html.find_all("div", class_="post-footer"):
46 footer.decompose()
47 return html
48
49
50def replies(html: BeautifulSoup) -> Iterable[Tag]:
51 def text() -> Tag:
52 body = html.body
53 assert body
54 text = body.find_next("div", class_="post-post")
55 assert isinstance(text, Tag)
56 return text
57
58 def the_replies() -> Iterable[Tag]:
59 rs = html.find_all("div", class_="post-reply")
60 assert all(isinstance(r, Tag) for r in rs)
61 return rs
62
63 return itertools.chain([text()], the_replies())
64
65
66def process(
67 url: str,
68 session: requests.Session,
69 timeout: int) -> Iterable[Tag]:
70 html = clean(fetch(url, session, timeout))
71 return replies(html)
72
73
92b11a10 74def main() -> None:
b25a2f90 75 args = command_line_parser().parse_args()
4c1cf54e 76 with requests_cache.CachedSession(args.cache_path, cache_control=True) as session:
47cfa3cd 77 process(args.url, session, args.timeout)
92b11a10
SW
78
79
80if __name__ == '__main__':
81 main()