-
Notifications
You must be signed in to change notification settings - Fork 8
/
main.py
134 lines (114 loc) · 4.31 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
import contextlib
import datetime
import httpx
from bonbast.server import get_prices_from_api, get_token_from_main_page
from bs4 import BeautifulSoup
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from fastapi_cache import FastAPICache
from fastapi_cache.backends.inmemory import InMemoryBackend
from fastapi_cache.decorator import cache
app = FastAPI()
FastAPICache.init(InMemoryBackend())
BONBAST_URL = "https://www.bonbast.com"
app.add_middleware(
CORSMiddleware,
allow_origins=['*'],
allow_credentials=True,
allow_methods=['*'],
allow_headers=['*']
)
def merge_and_extract_tables(tables_soup):
tables = []
for table_soup in tables_soup:
for tr in table_soup.find_all("tr")[1:]:
table = [td.text for td in tr.find_all("td")]
tables.append(table)
return tables
def crawl_soup(url: str, post_data: dict) -> BeautifulSoup:
response = httpx.post(url, data=post_data)
if response.status_code != 200:
raise Exception(f"Failed to crawl {url}")
html = response.text
return BeautifulSoup(html, 'html.parser')
@app.get("/historical/{currency}")
@cache(expire=60 * 60 * 24)
async def read_historical_currency(currency: str, date: str = datetime.date.today().strftime("%Y-%m")):
try:
date = datetime.datetime.strptime(date, "%Y-%m")
except ValueError as err:
raise HTTPException(
status_code=422, detail="Invalid Date format. Expected YYYY-MM"
) from err
soup = crawl_soup(
f"{BONBAST_URL}/historical",
{"date": date.strftime("%Y-%m-%d"), "currency": currency},
)
table_soup = soup.find("table")
table = [[td.text for td in tr.findAll("td")]
for tr in table_soup.findAll("tr")[1:]]
prices = {}
for row in table:
with contextlib.suppress(ValueError):
exact_date = row[0]
sell, buy = int(row[1]), int(row[2])
if sell > 0 and buy > 0:
prices[exact_date] = {
"sell": sell,
"buy": buy
}
return prices
@app.get("/archive/")
@cache(expire=60 * 60 * 24)
async def read_archive(date: str = (datetime.date.today() - datetime.timedelta(days=1)).strftime("%Y-%m-%d")):
try:
date = datetime.datetime.strptime(date, "%Y-%m-%d")
except ValueError as err:
raise HTTPException(
status_code=422, detail="Invalid Date format. Expected YYYY-MM-DD"
) from err
soup = crawl_soup(
f"{BONBAST_URL}/archive", {"date": date.strftime("%Y-%m-%d")}
)
table_soup = soup.find_all("table")
table = merge_and_extract_tables(table_soup[:-1])
prices = {"date": date.strftime("%Y-%m-%d")}
for row in table:
with contextlib.suppress(ValueError):
currency = row[0].lower()
sell, buy = int(row[2]), int(row[3])
if sell > 0 and buy > 0:
prices[currency] = {
"sell": sell,
"buy": buy
}
return prices
@app.get("/latest")
@cache(expire=60 * 30)
async def read_latest():
token = get_token_from_main_page()
currencies, coins, golds = get_prices_from_api(token)
currencies_data = {c.code.lower(): {"sell": c.sell, "buy": c.buy} for c in currencies}
coins_data = {c.code.lower(): {"sell": c.sell, "buy": c.buy} for c in coins}
golds_data = {c.code.lower(): {"sell": c.price, "buy": c.price} for c in golds}
return {**currencies_data, **coins_data, **golds_data}
@app.get("/archive/range")
@cache(expire=60 * 60 * 24)
async def read_archive_range(
start_date: str,
end_date: str = (datetime.date.today() - datetime.timedelta(days=1)).strftime("%Y-%m-%d")):
try:
start_date = datetime.datetime.strptime(start_date, "%Y-%m-%d")
end_date = datetime.datetime.strptime(end_date, "%Y-%m-%d")
except ValueError as err:
raise HTTPException(
status_code=422, detail="Invalid Date format. Expected YYYY-MM-DD"
) from err
price_range = {}
duration = end_date - start_date
for i in range(duration.days + 1):
day = start_date + datetime.timedelta(days=i)
price = await read_archive(day.strftime("%Y-%m-%d"))
date = price.pop("date")
price_range[date] = price
return price_range