-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathfeedfaetcher
executable file
·73 lines (53 loc) · 1.59 KB
/
feedfaetcher
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
#!/usr/bin/env python3
#
# Simple news feed downloader.
import asyncio
import concurrent.futures
import os
import shutil
import sys
import requests
var_dir = os.environ.get("XDG_VAR_HOME")
if var_dir is None or not os.path.exists(var_dir):
sys.exit(1)
feedfaetcher_dir = os.path.join(var_dir, "lib", "feedfaetcher")
cache_dir = os.path.join(feedfaetcher_dir, "cache")
url_list = os.path.join(feedfaetcher_dir, "urls.txt")
table = os.path.join(feedfaetcher_dir, "table.txt")
executor = concurrent.futures.ThreadPoolExecutor(10)
loop = asyncio.new_event_loop()
urls = []
def handle(url):
resp = get(url)
if resp is not None:
write(url, resp)
def get(url):
return requests.get(url, stream=True, timeout=5)
def write(url, resp):
fname = "{}.xml".format(
url[:121].replace("_", "-").replace(" ", "_").replace("/", "_")
)
with open(os.path.join(cache_dir, fname), "wb") as f:
f.write(resp.content)
with open(table, "a+") as f:
f.write("{} {}\n".format(url, fname))
async def make_requests():
futures = [loop.run_in_executor(executor, handle, url) for url in urls]
await asyncio.gather(*futures)
if __name__ == "__main__":
if os.path.exists(cache_dir):
shutil.rmtree(cache_dir)
os.makedirs(cache_dir)
try:
os.remove(table)
except OSError:
...
with open(url_list) as f:
for l in f:
if not l.strip().startswith("#"):
urls.append(l.strip())
if not urls:
sys.exit(1)
loop.run_until_complete(make_requests())
loop.close()
# vim: set ft=python :