-
Notifications
You must be signed in to change notification settings - Fork 0
/
search_site.py
176 lines (136 loc) · 4.85 KB
/
search_site.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
from colorama import Fore, Style
from bs4 import BeautifulSoup
import webbrowser
import argparse
import requests
import datetime
import random
import time
number = "0123456789"
symbols = "!?@#$%^&*=<>()[]/|,.+-_"
bigchar = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
smallchar = "abcdefghijklmnopqrstuvwxyz"
def brut(bool_bigchar, bool_smallchar,
bool_number, bool_symbols):
string = ""
if bool_bigchar:
string += bigchar
if bool_smallchar:
string += smallchar
if bool_number:
string += number
if bool_symbols:
string += symbols
if string == "":
string += bigchar + \
smallchar + \
number + \
symbols
return string
def txt_grn(text):
return Fore.GREEN + text + Style.RESET_ALL
def txt_rd(text):
return Fore.RED + text + Style.RESET_ALL
def txt_yel(text):
return Fore.YELLOW + text + Style.RESET_ALL
def open_url(url, bool_url):
if bool_url:
webbrowser.open(url)
def now():
dt_obj = datetime.datetime.now()
return dt_obj.strftime("%b %d %H:%M:%S")
def main():
parser = argparse.ArgumentParser(description="search site")
parser.add_argument("-m", "--max",
type=int,
dest="max",
default=10,
help="maximum length website domain generation")
parser.add_argument("-b", "--bigchar",
action="store_true",
dest="bigchar",
help="add bigchar in alphabet")
parser.add_argument("-c", "--smallchar",
action="store_true",
dest="smallchar",
help="add smallchar in alphabet")
parser.add_argument("-n", "--number",
action="store_true",
dest="number",
help="add number in alphabet")
parser.add_argument("-s", "--symbols",
action="store_true",
dest="symbols",
help="add symbols in alphabet")
parser.add_argument("-u", "--url",
default=False,
action="store_true",
dest="url",
help="open url")
parser.add_argument("-o", "--out",
default="site.txt",
dest="out",
help="out file")
parser.add_argument("-t", "--time",
type=int,
default=10,
dest="time",
help="timeout for a request to the site")
args = parser.parse_args()
args_big_char = args.bigchar
args_small_char = args.smallchar
args_number = args.number
args_symbols = args.symbols
bool_url = args.url
out = args.out
maximus = args.max
alphabet = brut(args_big_char, args_small_char,
args_number, args_symbols)
root = [".com", ".ru", ".org", ".net"]
used_url = []
found = 0
not_found = 0
exist = 0
all_site = 0
print("----- search site -----")
while 1:
try:
url = ""
domain = ""
length = random.randrange(1, maximus)
file = open(out, "a+", encoding="utf-8")
for i in range(length):
domain += random.choice(alphabet)
for i in range(len(root)):
url = "http://" + domain + root[i]
if url not in used_url:
used_url.append(url)
else:
continue
try:
r = requests.get(url, timeout=args.time)
soup = BeautifulSoup(r.content, "html.parser")
title = soup.title.string
if r.status_code in [200, 302, 304]:
file.write('{} - {}\n'.format(url, title))
open_url(url, bool_url)
found += 1
all_site += 1
print(f'{now()} [{all_site}]: {txt_grn(f"found {url}!")}')
elif r.status_code in [502, 404, 403]:
not_found += 1
all_site += 1
print(f'{now()} [{all_site}]: {txt_yel("not found or not available!")}')
except Exception:
exist += 1
all_site += 1
print(f"{now()} [{all_site}]: {txt_rd('site not exist!')}")
finally:
file.close()
time.sleep(0.5)
except KeyboardInterrupt:
print("\n----- search statistics -----")
print(f"all/found/not/exist = {all_site}/{found}/{not_found}/{exist}")
break
if __name__ == "__main__":
main()