forked from jacklevin74/xenminer
-
Notifications
You must be signed in to change notification settings - Fork 100
/
Copy pathgpage.py
676 lines (528 loc) · 23.8 KB
/
gpage.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
from flask import Flask, request, jsonify, render_template
from passlib.hash import argon2
import sqlite3, base64
from datetime import datetime
import time
import re
from web3 import Web3
app = Flask(__name__)
# Global variables to hold cached difficulty level and the time it was fetched
cached_difficulty = None
last_fetched_time = 0
def create_database():
conn = sqlite3.connect('blocks.db')
c = conn.cursor()
c.execute('CREATE TABLE IF NOT EXISTS blocks (block_id INTEGER PRIMARY KEY AUTOINCREMENT, hash_to_verify TEXT, key TEXT UNIQUE, account TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS account_performance (account TEXT PRIMARY KEY, hashes_per_second REAL)')
c.execute('CREATE TABLE IF NOT EXISTS super_blocks (account TEXT, super_block_count INTEGER)')
conn.commit()
conn.close()
from flask import Flask, render_template
import sqlite3
app = Flask(__name__)
# Initialize cache dictionary and last fetched time
difficulty_cache = {}
last_fetched_time = {}
from datetime import datetime
def is_within_five_minutes_of_hour():
timestamp = datetime.now()
minutes = timestamp.minute
print ("My minutes ", minutes)
return 0 <= minutes < 5 or 55 <= minutes < 60
# Specify the file path where you want to save the messages
log_file_path = './error_log_filr.log'
def log_verification_failure(message, account):
current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
log_file_path = "your_log_file.log"
with open(log_file_path, 'a') as log_file:
log_file.write(f"{current_time} - Issued 401: {account}. Message: {message}\n")
# Function to get difficulty level
def get_difficulty(account=None):
global difficulty_cache, last_fetched_time # Declare as global to modify them
# Generate a cache key based on the account
cache_key = account if account else "default"
# Get the current time
current_time = time.time()
# Check if the cache has not expired (assuming 60 seconds as the cache duration)
if cache_key in last_fetched_time and (current_time - last_fetched_time[cache_key] < 60):
return difficulty_cache[cache_key]
# Otherwise, fetch from database
conn = sqlite3.connect('difficulty.db') # Replace with your actual database name
cursor = conn.cursor()
if account:
cursor.execute('SELECT difficulty FROM difficulty_table WHERE account = ? LIMIT 1;', (account,))
difficulty_level = cursor.fetchone()
if difficulty_level is None:
cursor.execute('SELECT level FROM difficulty LIMIT 1;')
difficulty_level = cursor.fetchone()
else:
cursor.execute('SELECT level FROM difficulty LIMIT 1;')
difficulty_level = cursor.fetchone()
conn.close()
# Update the cache and the last fetched time
if difficulty_level:
difficulty_cache[cache_key] = str(difficulty_level[0])
else:
difficulty_cache[cache_key] = '8' # or some default value
last_fetched_time[cache_key] = current_time
return difficulty_cache[cache_key]
# Function to get difficulty level
def get_difficulty2(account=None):
global cached_difficulty, last_fetched_time # Declare as global to modify them
# Check if it has been more than 60 seconds since the last fetch
current_time = time.time()
if current_time - last_fetched_time < 10:
return cached_difficulty
# Connect to SQLite database
conn = sqlite3.connect('blocks.db', timeout=10) # Replace with your actual database name
cursor = conn.cursor()
# Execute SQL query to fetch difficulty level
cursor.execute('SELECT level FROM difficulty LIMIT 1;')
difficulty_level = cursor.fetchone()
# Close connection
conn.close()
# Update last fetched time
last_fetched_time = current_time
# Update cached difficulty and return
if difficulty_level:
cached_difficulty = str(difficulty_level[0])
return cached_difficulty
else:
cached_difficulty = '8' # Return '8' or some default if no difficulty level is found
return cached_difficulty
@app.route('/difficulty', methods=['GET'])
@app.route('/difficulty/<account>', methods=['GET'])
def difficulty(account=None):
difficulty_level = get_difficulty(account)
# Check if difficulty level exists
if difficulty_level:
return jsonify({"difficulty": difficulty_level}), 200
else:
return jsonify({"error": "Difficulty level not found."}), 404
@app.route('/get_xuni_counts', methods=['GET'])
def get_account_counts():
# Initialize database connection
conn = sqlite3.connect('blocks.db')
cursor = conn.cursor()
try:
# Run the SQL query
cursor.execute("SELECT account, COUNT(*) as n FROM xuni GROUP BY account ORDER BY n;")
data = cursor.fetchall()
# Close database connection
conn.close()
# Prepare the result in JSON format
result = [{"account": account, "count": n} for account, n in data]
return jsonify(result)
except sqlite3.Error as e:
print("Database error:", e)
# Close database connection in case of an error
conn.close()
return jsonify({"error": "Database error"}), 500
@app.route('/blockrate_per_day', methods=['GET'])
def blockrate_per_day():
try:
with sqlite3.connect('blocks.db') as conn:
c = conn.cursor()
c.execute('''
SELECT account, num_blocks
FROM AccountBlockCounts
ORDER BY num_blocks DESC
LIMIT 1000
''')
rows = c.fetchall()
# Convert rows into a list of dictionaries for JSON representation
users_list = [{"account": row[0], "num_blocks": row[1]} for row in rows]
return jsonify(users_list), 200
except Exception as e:
return jsonify({"error": "An error occurred: " + str(e)}), 500
@app.route('/leaderboard', methods=['GET'])
def leaderboard():
global difficulty
difficulty=get_difficulty()
# Connect to the cache database
cache_conn = sqlite3.connect('cache.db', timeout=10)
cache_c = cache_conn.cursor()
# Read from the cache table for leaderboard data
cache_c.execute("SELECT * FROM cache_table ORDER BY total_blocks DESC")
results = cache_c.fetchall()
cache_conn.close()
# Calculate global statistics from the original blocks database
conn = sqlite3.connect('blocks.db', timeout=10)
c = conn.cursor()
c.execute('''SELECT SUM(attempts) as total_attempts,
strftime('%s', MAX(timestamp)) - strftime('%s', MIN(timestamp)) as total_time
FROM (SELECT * FROM account_attempts ORDER BY timestamp DESC LIMIT 100000)''')
result = c.fetchone()
total_attempts, total_time = result
#total_attempts_per_second = total_attempts / (total_time if total_time != 0 else 1)
total_attempts_per_second = 1
conn.close()
# Get the latest rate from the difficulty database
diff_conn = sqlite3.connect('difficulty.db', timeout=10)
diff_c = diff_conn.cursor()
diff_c.execute("SELECT rate FROM blockrate ORDER BY id DESC LIMIT 1")
latest_rate = diff_c.fetchone()
diff_c.execute("SELECT total_miners FROM miners ORDER BY id DESC LIMIT 1")
latest_miners = diff_c.fetchone()
diff_conn.close()
if latest_miners:
latest_miners = latest_miners[0]
else:
latest_miners = 0 # Default value if no data is found
if latest_rate:
latest_rate = latest_rate[0]
else:
latest_rate = 0 # Default value if no rate is found
leaderboard = [(rank + 1, account, total_blocks, round(hashes_per_second, 2), super_blocks)
for rank, (account, total_blocks, hashes_per_second, super_blocks) in enumerate(results)]
return render_template('leaderboard4.html', leaderboard=leaderboard,
total_attempts_per_second=int(round(total_attempts_per_second, 2) / 1000),
latest_rate=latest_rate, latest_miners=latest_miners, difficulty=difficulty)
@app.route('/get_balance/<account>', methods=['GET'])
def get_balance(account):
conn = None
try:
conn = sqlite3.connect('cache.db', timeout=10)
cursor = conn.cursor()
cursor.execute("SELECT total_blocks FROM cache_table WHERE LOWER(account) = LOWER(?)", (account,))
row = cursor.fetchone()
if row:
balance = row[0] * 10
return jsonify({'account': account, 'balance': balance})
else:
return jsonify({'error': 'No record found for the provided account'}), 404
except Exception as e:
return jsonify({'error': str(e)}), 500
finally:
if conn:
conn.close()
@app.route('/get_super_blocks/<account>', methods=['GET'])
def get_super_blocks(account):
conn = None
try:
conn = sqlite3.connect('cache.db', timeout=10)
cursor = conn.cursor()
cursor.execute("SELECT super_blocks FROM cache_table WHERE LOWER(account) = LOWER(?)", (account,))
row = cursor.fetchone()
if row:
return jsonify({'account': account, 'super_blocks': row[0]})
else:
return jsonify({'error': 'No record found for the provided account'}), 404
except Exception as e:
return jsonify({'error': str(e)}), 500
finally:
if conn:
conn.close()
@app.route('/total_blocks', methods=['GET'])
def total_blocks():
conn = sqlite3.connect('blocks.db')
c = conn.cursor()
# Query to get the latest block_id from the `blocks` table
c.execute('SELECT block_id FROM blocks ORDER BY block_id DESC LIMIT 1')
result = c.fetchone()
last_block_id = result[0] if result else None
conn.close()
return jsonify({'total_blocks_top100': last_block_id})
@app.route('/hash_rate', methods=['GET'])
def hash_rate():
conn = sqlite3.connect('blocks.db')
c = conn.cursor()
# Get the sum of all attempts and the time range for the last 10,000 records
c.execute('''SELECT SUM(attempts) as total_attempts,
strftime('%s', MAX(timestamp)) - strftime('%s', MIN(timestamp)) as total_time
FROM (SELECT * FROM account_attempts ORDER BY timestamp DESC LIMIT 50000)''')
# Rest of your code
result = c.fetchone()
conn.close()
total_attempts, total_time = result
total_attempts_per_second = total_attempts / (total_time if total_time != 0 else 1)
return render_template('hash_rate.html', total_attempts_per_second=total_attempts_per_second)
# Temporary storage for batch insertion
account_attempts_batch = []
blocks_batch = []
batch_size = 1
def is_valid_sha256(s):
"""Check if s is a valid SHA-256 hash."""
return re.match(r'^[a-fA-F0-9]{64}$', s) is not None
def is_hexadecimal(s):
"""Check if s is a hexadecimal string."""
return re.match(r'^[a-fA-F0-9]*$', s) is not None
def check_fourth_element(string):
pattern = re.compile(r'(?:[^$]*\$){3}WEVOMTAwODIwMjJYRU4\$')
match = pattern.search(string)
return bool(match)
def is_valid_hash(h):
"""Ensure the input is a hexadecimal hash of the expected length."""
return bool(re.match("^[a-fA-F0-9]{64}$", h))
def restore_eip55_address(lowercase_address: str) -> str:
# Restore the address using EIP-55 checksum
try:
checksum_address = Web3.to_checksum_address(lowercase_address)
print ("Checksummed address is: ", checksum_address)
return True
except ValueError as e:
# Handle the error in case the address is not a valid Ethereum address
print(f"An error occurred: {e}")
return False
def check_salt_format_and_ethereum_address(hash_to_verify: str) -> bool:
# Regular expressions for the expected patterns
pattern1 = re.compile(r'WEVOMTAwODIwMjJYRU4')
pattern2 = re.compile(r'^[A-Za-z0-9+/]{27}$')
# Extract the salt part from the hash_to_verify
parts = hash_to_verify.split("$")
if len(parts) != 6:
return False
salt = parts[4]
# Check if the salt matches the first pattern
if pattern1.search(salt):
print ("Matched old salt, continue")
return True
else:
print("Old Salt False")
# Check if the salt matches the second pattern and is base64
if pattern2.fullmatch(salt):
print ("In Pattern2 match")
try:
# The proper base64 string should have a length that is a multiple of 4.
# We need to add padding if necessary.
missing_padding = len(salt) % 4
if missing_padding:
salt += '=' * (4 - missing_padding)
# Decode the base64 string
decoded_bytes = base64.b64decode(salt)
decoded_str = decoded_bytes.hex()
print("Decoded salt: ", decoded_str)
# Check if the decoded string is a valid hexadecimal and of a specific length
if re.fullmatch(r'[0-9a-fA-F]{40}', decoded_str): # Ethereum addresses are 40 hex characters long
# Construct potential Ethereum address
potential_eth_address = '0x' + decoded_str
print("Address matched: ", potential_eth_address)
# Validate Ethereum address checksum
if restore_eip55_address(potential_eth_address):
print("Checksum of address is valid")
return True
except Exception as e:
print(f"An error occurred: {e}")
return False
return False
@app.route('/get_block', methods=['GET'])
def get_block():
key = request.args.get('key')
if not key:
return jsonify({"error": "Please provide a key"}), 400
if not is_valid_hash(key):
return jsonify({"error": "Invalid key provided"}), 400
conn = sqlite3.connect('blocks.db')
cursor = conn.cursor()
# Use a parameterized query to prevent SQL injection
cursor.execute("SELECT * FROM blocks WHERE key=?", (key,))
data = cursor.fetchone()
if data is None:
# No record was found in the 'blocks' table, try 'xuni' table
cursor.execute("SELECT * FROM xuni WHERE key=?", (key,))
data = cursor.fetchone()
if data is None:
# Record not found in either table
return jsonify({"error": "Data not found for provided key"}), 404
# Column names for both 'blocks' and 'xuni' tables
columns = ['block_id', 'hash_to_verify', 'key', 'account', 'created_at']
# Convert the tuple data to a dictionary
data_dict = dict(zip(columns, data))
conn.close()
return jsonify(data_dict), 200
@app.route('/verify', methods=['POST'])
def verify_hash():
global account_attempts_batch, blocks_batch
data = request.json
worker_id = data.get('worker_id')
if not (isinstance(worker_id, str) and len(worker_id) <= 3):
worker_id = None # Set worker_id to None if it's not a valid string of 3 characters or less
hash_to_verify = data.get('hash_to_verify')
hash_to_verify = hash_to_verify if (hash_to_verify and len(hash_to_verify) <= 150) else None
is_xuni_present = re.search('XUNI[0-9]', hash_to_verify[-87:]) is not None
key = data.get('key')
key = key if (key and len(key) <= 128) else None
account = data.get('account')
if account is not None:
account = str(account).lower().replace("'", "").replace('"', '')
account = account if len(account) <= 43 else None
attempts = data.get('attempts')
difficulty = 0
# Check if key is a hexadecimal string
if not is_hexadecimal(key):
return jsonify({"error": "Invalid key format"}), 400
#if not check_fourth_element(hash_to_verify):
if not check_salt_format_and_ethereum_address(hash_to_verify):
return jsonify({"error": "Invalid salt format"}), 400
# Check for missing data
if not hash_to_verify or not key or not account:
return jsonify({"error": "Missing hash_to_verify, key, or account"}), 400
# Get difficulty level from the database
old_difficulty = difficulty;
difficulty = get_difficulty()
submitted_difficulty = int(re.search(r'm=(\d+)', hash_to_verify).group(1))
strict_check = False
if f'm={difficulty}' in hash_to_verify and is_xuni_present:
strict_check = True
#if f'm={difficulty}' not in hash_to_verify:
# print ("Compare diff ", submitted_difficulty, int(difficulty))
if submitted_difficulty < int(difficulty):
#if abs(submitted_difficulty - int(difficulty)) > 50:
print ("This Generates 401 for difficulty being too low", submitted_difficulty, int(difficulty))
error_message = f"Hash does not contain 'm={difficulty}'. Your memory_cost setting in your miner will be autoadjusted."
log_verification_failure(error_message, account)
return jsonify({"message": error_message}), 401
stored_targets = ['XEN11'] # Adjusted list to exclude 'XUNI' since we will search for it differently
found = False
for target in stored_targets:
if target in hash_to_verify[-87:]:
found = True
print("Found Target:", target)
# Search for XUNI followed by a number
if re.search('XUNI[0-9]', hash_to_verify[-87:]) is not None:
found = True
print("Found Target: XUNI[0-9]")
if not found:
print (hash_to_verify)
error_message = f"Hash does not contain any of the valid targets {stored_targets} in the last 87 characters. Adjust target_substr in your miner."
log_verification_failure(error_message, account)
print (error_message, hash_to_verify[-87:])
return jsonify({"message": error_message}), 401
if len(hash_to_verify) > 150:
error_message = "Length of hash_to_verify should not be greater than 150 characters."
print (error_message)
log_verification_failure(error_message, account)
return jsonify({"message": error_message}), 401
try:
is_verified = argon2.verify(key, hash_to_verify)
except Exception as e:
print(f"An error occurred: {e}")
is_verified = False
#if argon2.verify(key, hash_to_verify):
if is_verified:
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
is_xen11_present = 'XEN11' in hash_to_verify[-87:]
is_xuni_present = re.search('XUNI[0-9]', hash_to_verify[-87:]) is not None
# disable XUNI
# is_xuni_present = False
conn = sqlite3.connect('blocks.db')
conn.execute('PRAGMA journal_mode = wal')
c = conn.cursor()
try:
# If XUNI is present and time is within 5 minutes of the hour, then insert to DB
if is_xuni_present and is_within_five_minutes_of_hour():
print("XUNI submitted and added to batch")
#c.execute('''INSERT INTO xuni (hash_to_verify, key, account)
# VALUES (?, ?, ?)''', (hash_to_verify, key, account))
insert_query = '''INSERT INTO xuni (hash_to_verify, key, account) VALUES (?, ?, ?)'''
data_tuple = (hash_to_verify, key, account)
success = insert_with_retry(c, insert_query, data_tuple)
if not success:
print("Could not insert into the database after multiple retries.")
elif is_xen11_present: # no time restrictions for XEN11
print("XEN11 hash added to batch")
#c.execute('''INSERT INTO blocks (hash_to_verify, key, account)
# VALUES (?, ?, ?)''', (hash_to_verify, key, account))
insert_query = '''INSERT INTO blocks (hash_to_verify, key, account) VALUES (?, ?, ?)'''
data_tuple = (hash_to_verify, key, account)
success = insert_with_retry(c, insert_query, data_tuple)
if not success:
print("Could not insert into the database after multiple retries.")
else:
return jsonify({"message": "XUNI found outside of time window"}), 401
#c.execute('''INSERT OR IGNORE INTO account_attempts (account, timestamp, attempts)
# VALUES (?, ?, ?)''', (account, timestamp, attempts))
print("This Generates 200 for difficulty being good", submitted_difficulty, int(difficulty))
print("Inserting hash into db: ", hash_to_verify)
conn.commit()
except sqlite3.IntegrityError as e:
error_message = e.args[0] if e.args else "Unknown IntegrityError"
print(f"Error: {error_message} ", hash_to_verify, key, account)
return jsonify({"message": f"Block already exists, continue"}), 400
finally:
conn.close()
return jsonify({"message": "Hash verified successfully and block saved."}), 200
else:
print ("Hash verification failed")
return jsonify({"message": "Hash verification failed."}), 401
def insert_with_retry(cursor, query, data, max_retries=3, sleep_interval=1):
attempt = 0
while attempt < max_retries:
try:
cursor.execute(query, data)
return True # Success
except sqlite3.OperationalError as e:
if str(e) == 'database is locked':
print(f"Database is locked, retrying {attempt + 1}/{max_retries}")
time.sleep(sleep_interval) # Wait for the database to be unlocked
attempt += 1
else:
raise # Other operational errors are not handled here
return False # Max retries reached, could not insert
@app.route('/validate', methods=['POST'])
def store_consensus():
data = request.json
total_count = data.get('total_count')
my_ethereum_address = data.get('my_ethereum_address')
last_block_id = data.get('last_block_id')
last_block_hash = data.get('last_block_hash')
try:
conn = sqlite3.connect('blocks.db')
c = conn.cursor()
c.execute('''INSERT INTO consensus (total_count, my_ethereum_address, last_block_id, last_block_hash)
VALUES (?, ?, ?, ?)''',
(total_count, my_ethereum_address, last_block_id, last_block_hash))
conn.commit()
conn.close()
return jsonify({"status": "success"})
except Exception as e:
return jsonify({"status": "error", "message": str(e)})
@app.route('/top_daily_block_miners', methods=['GET'])
def get_top_blocks():
conn = sqlite3.connect('blocks.db') # Assuming your database file is named blocks.db
cursor = conn.cursor()
query = '''
SELECT * FROM AccountBlockCounts
ORDER BY num_blocks DESC
LIMIT 500
'''
cursor.execute(query)
rows = cursor.fetchall()
# Close the connection
conn.close()
# Convert the rows to a JSON response
result = [{"account": row[0], "num_blocks": row[1]} for row in rows]
return jsonify(result)
@app.route('/latest_blockrate', methods=['GET'])
def get_latest_blockrate():
try:
# Connect to the difficulty database
conn = sqlite3.connect('difficulty.db')
cursor = conn.cursor()
# Query the latest blockrate using "ORDER BY id DESC LIMIT 1" for better performance
cursor.execute("SELECT id, date, rate FROM blockrate ORDER BY id DESC LIMIT 1")
record = cursor.fetchone()
if record is None:
return jsonify({"error": "No blockrate data found"}), 404
# Close the database connection
conn.close()
# Prepare the result in JSON format
result = {
"id": record[0],
"date": record[1],
"rate": record[2]
}
return jsonify(result), 200
except Exception as e:
return jsonify({"error": str(e)}), 500
@app.route('/total_blocks2', methods=['GET'])
def total_blocks2():
account = request.args.get('account')
if not account:
return jsonify({"error": "Missing account"}), 400
conn = sqlite3.connect('blocks.db')
c = conn.cursor()
c.execute('SELECT COUNT(block_id) FROM blocks WHERE account = ?', (account,))
result = c.fetchone()
conn.close()
return jsonify({"total_blocks": result[0]}), 200