Skip to content

Commit

Permalink
Perfomance fix (#30)
Browse files Browse the repository at this point in the history
* fix unneeded cast

* added 413 for content over 200kb and 411 if POST without length information

* new restriction: maximum 1000 tx ids for search

* limit light and full search to 10

* unneeded cast operator

* limit TXs to 50
  • Loading branch information
lAmeR1 authored Aug 8, 2023
1 parent f06c382 commit e54d2d1
Show file tree
Hide file tree
Showing 3 changed files with 31 additions and 3 deletions.
12 changes: 9 additions & 3 deletions endpoints/get_transactions.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

from fastapi import Path, HTTPException, Query
from pydantic import BaseModel, parse_obj_as
from sqlalchemy import Integer, cast
from sqlalchemy import Integer
from sqlalchemy.future import select

from dbsession import async_session
Expand Down Expand Up @@ -111,7 +111,7 @@ async def get_transaction(transactionId: str = Path(regex="[a-f0-9]{64}"),
tx_inputs = await s.execute(select(TransactionInput, TransactionOutput)
.outerjoin(TransactionOutput,
(TransactionOutput.transaction_id == TransactionInput.previous_outpoint_hash) &
(TransactionOutput.index == cast(TransactionInput.previous_outpoint_index, Integer)))
(TransactionOutput.index == TransactionInput.previous_outpoint_index))
.filter(TransactionInput.transaction_id == transactionId))

tx_inputs = tx_inputs.all()
Expand Down Expand Up @@ -170,6 +170,12 @@ async def search_for_transactions(txSearch: TxSearch,
"""
Get block information for a given block id
"""
if len(txSearch.transactionIds) > 1000:
raise HTTPException(422, "Too many transaction ids")

if resolve_previous_outpoints in ["light", "full"] and len(txSearch.transactionIds) > 50:
raise HTTPException(422, "Temporary issue: Transaction ids count is limited to 50 for light and full searches.")

fields = fields.split(",") if fields else []

async with async_session() as s:
Expand All @@ -186,7 +192,7 @@ async def search_for_transactions(txSearch: TxSearch,
tx_inputs = await s.execute(select(TransactionInput, TransactionOutput)
.outerjoin(TransactionOutput,
(TransactionOutput.transaction_id == TransactionInput.previous_outpoint_hash) &
(TransactionOutput.index == cast(TransactionInput.previous_outpoint_index, Integer)))
(TransactionOutput.index == TransactionInput.previous_outpoint_index))
.filter(TransactionInput.transaction_id.in_(txSearch.transactionIds)))

# without joining previous_tx_outputs
Expand Down
20 changes: 20 additions & 0 deletions helper/LimitUploadSize.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
from starlette import status
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
from starlette.requests import Request
from starlette.responses import Response

# from https://github.com/tiangolo/fastapi/issues/362

class LimitUploadSize(BaseHTTPMiddleware):
def __init__(self, app, max_upload_size: int) -> None:
super().__init__(app)
self.max_upload_size = max_upload_size

async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response:
if request.method == 'POST':
if 'content-length' not in request.headers:
return Response(status_code=status.HTTP_411_LENGTH_REQUIRED)
content_length = int(request.headers['content-length'])
if content_length > self.max_upload_size:
return Response(status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE)
return await call_next(request)
2 changes: 2 additions & 0 deletions server.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from starlette.requests import Request
from starlette.responses import JSONResponse

from helper.LimitUploadSize import LimitUploadSize
from kaspad.KaspadMultiClient import KaspadMultiClient

fastapi.logger.logger.setLevel(logging.WARNING)
Expand All @@ -28,6 +29,7 @@
)

app.add_middleware(GZipMiddleware, minimum_size=500)
app.add_middleware(LimitUploadSize, max_upload_size=200_000) # ~1MB

origins = ["*"]

Expand Down

0 comments on commit e54d2d1

Please sign in to comment.