-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathespnaWord.py
143 lines (132 loc) · 5.48 KB
/
espnaWord.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
import sqlite3
from sqlite3 import Error
# import pandas as pd
import streamlit as st
import requests
from streamlit_lottie import st_lottie
# from textblob import TextBlob
# from googletrans import Translator
# from google_trans_new import google_translator
# import googletrans
# import translators as ts # streamlit has no such module
def load_lottieurl(url):
r = requests.get(url)
if r.status_code != 200:
return None
return r.json()
def create_connection(db_file):
conn = None
try:
conn = sqlite3.connect(db_file)
except Error as e:
print(e)
return conn
def searchByWord(conn, word):
# sql = "select content from word_content where word='"+word+"'"
sql = "select word, content from word_content where word like'"+word+"%' order by word"
# print(sql)
cur = conn.cursor()
cur.execute(sql)
rows = cur.fetchall()
return rows
# names = [description[0] for description in cur.description]
# df = pd.DataFrame(rows)
# df.columns = names
# return df
def searchByWord_zh(conn, word):
# sql = "select content from word_content where word='"+word+"'"
sql = "select word, content from word_content where content like'%"+word+"%'"
# print(sql)
cur = conn.cursor()
cur.execute(sql)
rows = cur.fetchall()
return rows
def display_content(df):
if len(df) ==0:
st.write('~ Nothing Found ~')
return
# need to separate linebreak by \n
for j in range(len(df)):
col1, col2 = st.columns((1,2))
tmp = df[j][1].split(sep='\n')
with col1:
st.write(f"<p style='color:#FF4500'>{df[j][0]}</p>", unsafe_allow_html=True)
for line in tmp:
with col2:
st.write(line)
# def word_color(url):
# st.markdown(f'<p style="background-color:#0066cc;color:#33ff33;font-size:20px;border-radius:2%;">{url}</p>', unsafe_allow_html=True)
# st.markdown(f'<p style="color:#ffff00;font-size:20px;">{url}</p>', unsafe_allow_html=True)
def main():
# add animation
left_column, right_column = st.columns((3, 1))
with left_column:
st.title("LUISA · 西漢/漢西辭典")
st.subheader("Diccionario Español-Chino / Chino-Español")
st.text('Since 2008')
with right_column:
lottie_coding = load_lottieurl("https://assets7.lottiefiles.com/packages/lf20_Jmpjal.json")
st_lottie(lottie_coding, height=200, key="coding")
# st.text('Since 2008')
# st.title("LUISA · 西漢/漢西辭典")
# st.subheader("Diccionario Español-Chino / Chino-Español")
# database = r"C:/Users/CCWANG/OneDrive/PythonCodes/dashboard/espnaDict_online.sqlite"
database = r"espnaDict_online.sqlite"
conn = create_connection(database)
word = st.text_input('輸入西班牙語單字查詢中文辭義(完整單字或前部分字母皆可)', 'españ')
st.text("HELP: á é í ó ú ü ñ")
st.write('---')
df = searchByWord(conn, word)
display_content(df)
st.write('---')
word_zh = st.text_input('輸入中文字查詢相關西文單字', '番紅花')
st.write('---')
df = searchByWord_zh(conn, word_zh)
display_content(df)
st.write('---')
st.write("[Created by Luisa Chang@ntu >>>](https://luisachangntu.me/)")
# ----- Try to include other translators -------------------
# # print(googletrans.LANGUAGES)
# st.write("From other translators")
# # translator = Translator()
# # translator = google_translator()
# # translate_text = translator.translate('Hola mundo!', lang_src='es', lang_tgt='en')
# # result = translator.translate(word, src = 'es', dest='zh-tw')
# # st.write(result.text)
# # st.write(translate_text)
# # print(translate_text)
# col1, col2 = st.columns((1,2))
# with col1:
# st.write('By Google')
# with col2:
# text = ts.google('españolismo' , to_language = 'zh-TW', if_use_cn_host=True)
# st.write(text)
# col1, col2 = st.columns((1,2))
# with col1:
# st.write('By Microsoft Bing')
# with col2:
# text = ts.bing(word , to_language = 'zh-Hant')
# st.write(text)
# # text = ts.caiyun(word , to_language = 'zh', professional_field=None)
# # text = ts.baidu(word , to_language = 'zh', professional_field='common')
# # st.write("caiyun:\n" + text)
# # st.write(ts.translate_html(word, translator=ts.google, to_language='zh-TW', n_jobs=-1))
# st.write('---')
st.write("""<p style='font-size:15px;'>
網頁設計維護:José Saúl Yang ‧ Eva Chen ‧ Úlises Chuang ‧
Urbano Lee ‧ Morgan Kao ‧ Yuan-Ying Wang ‧ Pin Fang Chen ‧
Kuan Hao Chiao
</p>""", unsafe_allow_html=True)
st.write("""<p style='font-size:15px;'>資料搜集翻譯:Luisa Chang ‧ José Saúl Yang ‧ Pedro Chang ‧
Andrés Wu ‧ Vicente Hung ‧ Benito Wang ‧ Lolita Kuang ‧ Adrián Chou ‧ Danial Hou ‧ Bernardo Lin ‧
Eva Chen ‧ Enrique Lin ‧ Aurora Tsai ‧ Esther Huang ‧ Judy Yang ‧
Yolanda Cheng ‧ Rafael Lin ‧ Henry Yang ‧ Sofía Liu ‧ Aiden Chuang ‧
Ellen Chuang ‧ Carlos Chang ‧ Esperanza Hou ‧ Daniel Yen ‧ Gonzalo Yang ‧
Jessica Fan ‧ Emma Chou ‧ Pilar Hsu ‧ Paola Huang ‧ Felipe Chen ‧
Irene Chien ‧ Alberto Chang ‧ Jaime Kao ‧ Inés Hung ‧ Linda Wang ‧
Diego Chen ‧ Margarita Yao ‧ Ke Ru Lai
</p>""", unsafe_allow_html=True)
st.write('---')
conn.close()
if __name__ == '__main__':
main()