This repository has been archived by the owner on Jun 8, 2022. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 1
/
send_email.py
73 lines (65 loc) · 2.81 KB
/
send_email.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
# Script to send tabular email of redash data
# This will send only top 10 data points
import requests
import bs4
from utils import parse_argument, get_config, send_email
config = get_config()
redash_config = config['redash']
def get_html_table(jsonData, query_id):
jsonData = jsonData['query_result']['data']['rows']
jsonData = jsonData[:10]
redash_url = redash_config['redash_query_url'] + query_id
template = "<html><body><table cellpadding=10 border=1></table></body></html>"
soup = bs4.BeautifulSoup(template, 'html.parser')
if len(jsonData) > 0:
header_data = jsonData[0].keys()
else:
return str(soup)
table_header_row = bs4.BeautifulSoup('<thead><tr></tr></thead>', 'html.parser')
for header_col in header_data:
table_header_data = bs4.BeautifulSoup('<th bgcolor=#dddddd>' + header_col + '</th>', 'html.parser')
table_header_row.tr.append(table_header_data)
soup.body.table.append(table_header_row)
for row in jsonData:
table_row = bs4.BeautifulSoup('<tr></tr>', 'html.parser')
for header in header_data:
table_row_data = bs4.BeautifulSoup('<td>' + str(row[header]) + '</td>', 'html.parser')
table_row.append(table_row_data)
soup.body.table.append(table_row)
table_url_link = bs4.BeautifulSoup("<a href="+ redash_url + ">View more on redash</a>", 'html.parser')
soup.body.append(table_url_link)
template = str(soup)
return template
def get_query_details(query_id):
query_url = redash_config['query_url'] + query_id
query_details = requests.get(query_url,
params={'api_key': redash_config['user_api_key']}).json()
return query_details
def get_query_results(query_id):
query_url = redash_config['query_url'] + query_id + "/results.json"
query_results = requests.get(query_url,
params={'api_key': redash_config['user_api_key']}).json()
return query_results
# function to put the refresh query logic
def put_query_refresh():
pass
def get_csv_dump(query_id):
with requests.Session() as s:
CSV_URL = redash_config['query_url'] + query_id + "/results.csv"
download = s.get(CSV_URL,
params={'api_key': redash_config['user_api_key']})
temp_file_name = query_id + '_results.csv'
with open(temp_file_name, 'w') as temp_file:
temp_file.writelines(download.content)
return temp_file_name
def send_email_alert(query_details, query_result, recepient_emails, query_id, file_name):
message = get_html_table(query_result, query_id)
send_email(recepient_emails, query_details['name'], message, file_name)
options = parse_argument()
query_details = get_query_details(options.query_id)
query_result = get_query_results(options.query_id)
temp_file_name = None
if options.send_dump == 'Y':
temp_file_name = get_csv_dump(options.query_id)
send_email_alert(query_details, query_result,
options.recepient_emails, options.query_id, temp_file_name)