forked from cudeso/tools
-
Notifications
You must be signed in to change notification settings - Fork 0
/
sitereview_cudeso.py
104 lines (83 loc) · 2.84 KB
/
sitereview_cudeso.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
'''
Bluecoat SiteReview Checker (CLI)
from https://github.com/PoorBillionaire/sitereview
Slightly changed; reads a site list from a textfile
site list : "sitelist.lst"
Outputs in CSV format on screen
2018 - Koen Van Impe
'''
from __future__ import print_function
from argparse import ArgumentParser
from bs4 import BeautifulSoup
import json
import requests
import sys
import urllib2
import time
from socket import timeout
import socket
class SiteReview(object):
def __init__(self):
self.baseurl = "https://sitereview.bluecoat.com/resource/lookup"
self.headers = {"User-Agent": "Mozilla/5.0", "Content-Type": "application/json"}
def sitereview(self, url):
payload = {"url": url, "captcha":""}
try:
self.req = requests.post(
self.baseurl,
headers=self.headers,
data=json.dumps(payload),
)
except requests.ConnectionError:
sys.exit("[-] ConnectionError: " \
"A connection error occurred")
return json.loads(self.req.content.decode("UTF-8"))
def check_response(self, response):
if self.req.status_code != 200:
sys.exit("[-] HTTP {} returned".format(req.status_code))
else:
self.category = response["categorization"][0]["name"]
self.date = response["translatedRateDates"][0]["text"][0:35]
self.url = response["url"]
def printcsv(url, ip, site_live, status_code, date, category):
print("%s,%s,%s,%s,%s,%s" % (url, ip, site_live, status_code, date, category))
def main(url):
category = ""
date = ""
site_live = False
status_code = 0
ip = ""
try:
ip = socket.gethostbyname(url)
try:
response = urllib2.urlopen("http://" + url, timeout=5)
status_code = response.getcode()
s = SiteReview()
response = s.sitereview(url)
s.check_response(response)
date = s.date
category = s.category
site_live = True
except urllib2.URLError as err:
category = err.reason
except urllib2.HTTPError as err:
if err.code == 302:
category = "HTTP-302"
except:
category = "Unknown error"
printcsv(url, ip, site_live, status_code, date, category)
except socket.gaierror:
printcsv(url, ip, site_live, status_code, date, "No DNS")
if not url.startswith("www"):
main("www."+url)
if __name__ == "__main__":
file = open("sitelist.lst", "r")
for site in file:
site = site.rstrip()
if site.startswith("http://"):
site = site[7:]
if site.startswith("https://"):
site = site[8:]
main(site)
time.sleep(5)
file.close()