-
Notifications
You must be signed in to change notification settings - Fork 0
/
revproxy.py
95 lines (74 loc) · 2.98 KB
/
revproxy.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
#!/usr/bin/env python
import requests
import requests_cache
from http.server import BaseHTTPRequestHandler, HTTPServer
from pymongo import MongoClient
from urllib import parse
import json
# Specifies the time - in seconds - in which cached data will expire.
CACHE_TIMEOUT = 300
# Specifies the threshold time - in seconds - for queries.
THRESHOLD = 0.5
# Block unwanted browser requests
BLACKLIST = ['/service/favicon.ico','/favicon.ico']
# Configuration for request caching
requests_cache.install_cache('redis_cache', backend='redis', expire_after=CACHE_TIMEOUT)
requests_cache.clear()
# Database connection and collections
client = MongoClient('mongodb', 27017)
db = client['statistics']
queries = db['queries']
slow_requests = db['slow_requests']
# HTTPRequestHandler class
class HTTPServer_RequestHandler(BaseHTTPRequestHandler):
# Method to retrieve statistics from the database
def find_stats(self):
data = {"queries":{},"slow_requests":{}}
q = queries.find()
sr = slow_requests.find()
for item in sr:
data['slow_requests'][item['path']] = str(item['seconds'])+"s"
for item in q:
data['queries'][item['path']] = item['count']
return data
# GET Method
def do_GET(self):
if self.path in BLACKLIST:
return
if self.path == "/stats":
# Send response status code
self.send_response(200)
# Send headers
self.send_header('Content-type','application/json')
self.end_headers()
# Send response data
self.wfile.write(bytes(json.dumps(self.find_stats()), "utf8"))
else:
# Forward the request to the API
URL = "http://webservices.nextbus.com/service/publicXMLFeed"
headers = {'Accept-Encoding': 'gzip, deflate'}
params = parse.parse_qs(parse.urlparse(self.path).query)
r = requests.get(URL, headers=headers, params=params)
r_seconds = r.elapsed.total_seconds()
# Send response status code
self.send_response(r.status_code)
# Send headers
self.send_header('Content-type', r.headers['content-type'])
self.end_headers()
# Update stats
queries.update_one({"path": self.path},{"$inc": {"count":1}},upsert=True)
if r_seconds > THRESHOLD :
data = db.slow_requests.find_one({"path": self.path})
if data:
if r_seconds > data['seconds']:
slow_requests.update_one({"path": self.path},{"$set": {"seconds":r_seconds}})
else:
slow_requests.update_one({"path": self.path},{"$set": {"seconds":r_seconds}},upsert=True)
# Send response data
self.wfile.write(bytes(r.text, "utf8"))
def run():
# Server settings
server_address = ('0.0.0.0', 80)
httpd = HTTPServer(server_address, HTTPServer_RequestHandler)
httpd.serve_forever()
run()