forked from UltimaHoarder/UltimaScraper
-
Notifications
You must be signed in to change notification settings - Fork 0
/
start_ofd.py
executable file
·66 lines (59 loc) · 2.2 KB
/
start_ofd.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
#!/usr/bin/env python3
import asyncio
import os
import time
import traceback
import tests.main_test as main_test
try:
main_test.version_check()
main_test.check_config()
main_test.check_profiles()
if __name__ == "__main__":
import datascraper.main_datascraper as main_datascraper
import helpers.main_helper as main_helper
config_path = os.path.join(".settings", "config.json")
json_config, json_config2 = main_helper.get_config(config_path)
json_settings = json_config["settings"]
exit_on_completion = json_settings["exit_on_completion"]
infinite_loop = json_settings["infinite_loop"]
loop_timeout = json_settings["loop_timeout"]
json_sites = json_config["supported"]
domain = json_settings["auto_site_choice"]
string, site_names = main_helper.module_chooser(domain, json_sites)
# logging.basicConfig(level=logging.DEBUG, format="%(message)s")
async def main():
while True:
if domain:
if site_names:
site_name = domain
else:
print(string)
continue
else:
print(string)
x = input()
if x == "x":
break
x = int(x)
site_name = site_names[x]
site_name_lower = site_name.lower()
api = await main_datascraper.start_datascraper(
json_config, site_name_lower
)
if api:
api.close_pools()
if exit_on_completion:
print("Now exiting.")
exit(0)
elif not infinite_loop:
print("Input anything to continue")
input()
elif loop_timeout:
print("Pausing scraper for " + loop_timeout + " seconds.")
time.sleep(int(loop_timeout))
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
loop.close()
except Exception as e:
print(traceback.format_exc())
input()