-
Notifications
You must be signed in to change notification settings - Fork 22
/
main.py
64 lines (52 loc) · 1.43 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
import post_crawler as pc
import os
import time
feed_csv = "feed.csv"
out_dir = "output_without_masking"
csv = open(feed_csv)
count = 1
if out_dir not in os.listdir():
os.mkdir(out_dir)
crawler = pc.crawler()
done_list = []
temp = os.listdir(out_dir)
for el in temp:
if ".png" in el:
done_list.append(el)
for line in csv:
line_split = line.strip().split(",")
if len(line_split) != 2:
continue
querry = line_split[0].strip()
key2 = line_split[1].strip()
if len(key2) < 2:
continue
key2 = key2[1]
key1 = "구"
if len(key2) != 1:
continue
if "-" in querry:
splt = querry.split("-")
querry = ""
for el in splt:
querry = querry + el
if len(querry) != 13 or not querry.isdigit():
continue
if querry + ".png" in done_list:
continue
try:
if not crawler.save_screenshot_withhout_masking(querry, out_dir, key1, key2):
print(str(querry) + " has wrong information")
out_log = "line was " + line + "key2 was " + key2 + "\n\n"
print(out_log)
error_log = open("errored_ids.txt", "a")
error_log.write(out_log)
error_log.close()
continue
except:
print(str(querry) + " showed error")
continue
#print("JOB Number " + str(count) + " Done.\n>>>>>" + querry)
count += 1
error_log.close()
crawler.kill()