Skip to content

Commit

Permalink
fix(main.py): Introduce request time limit on fast requests
Browse files Browse the repository at this point in the history
If user abuse db query, it should not block HTTP server for long
time.

Signed-off-by: Denys Fedoryshchenko <[email protected]>
  • Loading branch information
nuclearcat committed Dec 4, 2024
1 parent a62d03f commit 56aa638
Showing 1 changed file with 13 additions and 5 deletions.
18 changes: 13 additions & 5 deletions api/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

import os
import re
import asyncio
from typing import List, Union, Optional
from datetime import datetime
from contextlib import asynccontextmanager
Expand Down Expand Up @@ -598,6 +599,13 @@ async def get_nodes(request: Request):
add_pagination(app)


async def db_find_node_nonpaginated(query_params):
"""Find all the matching nodes without pagination"""
model = Node
translated_params = model.translate_fields(query_params)
return await db.find_by_attributes_nonpaginated(model, translated_params)


@app.get('/nodes/fast', response_model=List[Node])
async def get_nodes_fast(request: Request):
"""Get all the nodes if no request parameters have passed.
Expand All @@ -611,11 +619,11 @@ async def get_nodes_fast(request: Request):

try:
# Query using the base Node model, regardless of the specific
# node type
model = Node
translated_params = model.translate_fields(query_params)
resp = await db.find_by_attributes_nonpaginated(model,
translated_params)
# node type, use asyncio.wait_for with timeout 30 seconds
resp = await asyncio.wait_for(
db_find_node_nonpaginated(query_params),
timeout=15
)
return resp
except KeyError as error:
raise HTTPException(
Expand Down

0 comments on commit 56aa638

Please sign in to comment.