Skip to content

Commit

Permalink
Merge pull request #163 from dom-jiang/fix_filter_keywords_in_interfa…
Browse files Browse the repository at this point in the history
…ce_parameters

Fix filter keywords in interface parameters
  • Loading branch information
dom-jiang authored Aug 15, 2024
2 parents 61323a5 + ce807c2 commit d8cdd42
Show file tree
Hide file tree
Showing 4 changed files with 78 additions and 76 deletions.
15 changes: 9 additions & 6 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,9 @@
import datetime
from auth.crypto_utl import decrypt
import time
import bleach

service_version = "20240625.01"
service_version = "20240812.01"
Welcome = 'Welcome to ref datacenter API server, version ' + service_version + ', indexer %s' % \
Cfg.NETWORK[Cfg.NETWORK_ID]["INDEXER_HOST"][-3:]
# Instantiation, which can be regarded as fixed format
Expand Down Expand Up @@ -64,11 +65,13 @@ def before_request():
logger.error("decrypt error:", e)
return 'Authentication error'
data = request.args
allowed_tags = []
allowed_attributes = {}
for v in data.values():
v = str(v).lower()
pattern = r"(<.*?>)"
r = re.search(pattern, v)
if r:
v = str(v)
cleaned_value = bleach.clean(v, tags=allowed_tags, attributes=allowed_attributes)

if v != cleaned_value:
return 'Please enter the parameters of the specification!'


Expand Down Expand Up @@ -513,7 +516,7 @@ def handle_burrow_records():
account_id = request.args.get("account_id")
page_number = request.args.get("page_number", type=int, default=1)
page_size = request.args.get("page_size", type=int, default=10)
if account_id is None or account_id == '':
if account_id is None or account_id == '' or page_size == 0:
return ""
burrow_log_list, count_number = query_burrow_log(Cfg.NETWORK_ID, account_id, page_number, page_size)
if count_number % page_size == 0:
Expand Down
8 changes: 4 additions & 4 deletions data_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,10 @@ def add_redis_data(network_id, key, redis_key, values):

def get_redis_data(network_id, key, redis_key):
db_conn = get_db_connect(network_id)
sql = "select `redis_values` from t_indexer_redis_data where `key` = '%s' and redis_key = '%s'" % (key, redis_key)
sql = "select `redis_values` from t_indexer_redis_data where `key` = %s and redis_key = %s"
cursor = db_conn.cursor(cursor=pymysql.cursors.DictCursor)
try:
cursor.execute(sql)
cursor.execute(sql, (key, redis_key))
row = cursor.fetchone()
return row["redis_values"]
except Exception as e:
Expand All @@ -53,10 +53,10 @@ def get_redis_data(network_id, key, redis_key):

def batch_get_redis_data(network_id, key):
db_conn = get_db_connect(network_id)
sql = "select redis_key, redis_values from t_indexer_redis_data where `key` = '%s'" % key
sql = "select redis_key, redis_values from t_indexer_redis_data where `key` = %s"
cursor = db_conn.cursor(cursor=pymysql.cursors.DictCursor)
try:
cursor.execute(sql)
cursor.execute(sql, key)
rows = cursor.fetchall()
return rows
except Exception as e:
Expand Down
Loading

0 comments on commit d8cdd42

Please sign in to comment.