Skip to content

Commit

Permalink
Feature: User favourites (#2)
Browse files Browse the repository at this point in the history
* Feature: initial routers / methods for favourites

- add env documentation / adminer image for easier inspection of db
- create / delete favourite entry in database
- fetch list of favourites by user

* Chore: update formatting, tests

* update test methods / logging

-

* updates

* expand tests / logging
  • Loading branch information
happy-devs authored Jan 14, 2025
1 parent c71ae3a commit cd47b8d
Show file tree
Hide file tree
Showing 26 changed files with 1,075 additions and 18 deletions.
19 changes: 19 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# Authentication
TENANT_ID="<microsoft-entra-tenant-id>"
CLIENT_ID="<app-id>"
API_KEY="<strong-password>" # for accessing "public" endpoints

# Database and Storage
DB_CONNECTION="postgresql://<user>:<password>@<host>:5432/<staging|production>"
SAS_URL="https://<account-name>.blob.core.windows.net/<container-name>?<sas-token>"

# Azure OpenAI, only required for `/signals/generation`
AZURE_OPENAI_ENDPOINT="https://<subdomain>.openai.azure.com/"
AZURE_OPENAI_API_KEY="<api-key>"

# Testing, only required to run tests, must be a valid token of a regular user
API_JWT="<json-token>"

# News API
# https://newsapi.org/account
NEWS_API_KEY="<api-key>"
13 changes: 13 additions & 0 deletions .env.local
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
TENANT_ID=
CLIENT_ID=
API_KEY=

DB_CONNECTION=
SAS_URL=

AZURE_OPENAI_ENDPOINT=
AZURE_OPENAI_API_KEY=

API_JWT=

NEWS_API_KEY=
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -140,3 +140,5 @@ cython_debug/
# Manually added for this project
.idea/
**/.DS_Store
sql/create_test_user.sql
Taskfile.yml
22 changes: 19 additions & 3 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,10 +1,26 @@
FROM python:3.11.7-slim

# Install system dependencies
RUN apt-get update -y \
&& apt-get install libpq-dev -y \
&& apt-get install -y \
libpq-dev \
postgresql-client \
curl \
&& rm -rf /var/lib/apt/lists/*

WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir --upgrade -r requirements.txt

# Install Python dependencies including development dependencies
COPY requirements.txt requirements_dev.txt ./
RUN pip install --no-cache-dir --upgrade -r requirements.txt -r requirements_dev.txt

# Copy application code
COPY . .

EXPOSE 8000

# Add healthcheck
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
CMD curl --fail http://localhost:8000/signals/search || exit 1

CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@ format:
lint:
pylint main.py src/
test:
python -m pytest tests/
python -m pytest tests/
32 changes: 29 additions & 3 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,42 @@ services:
env_file: .env
environment:
- DB_CONNECTION=postgresql://postgres:password@db:5432/postgres
- ENV_MODE=local
ports:
- "8000:8000"
volumes:
- .:/app
depends_on:
- db
db:
condition: service_healthy
command: >
sh -c "sleep 5 && uvicorn main:app --host 0.0.0.0 --port 8000 --reload"
db:
image: postgres:16.4-alpine
environment:
POSTGRES_PASSWORD: password
POSTGRES_DB: postgres
ports:
- "5432:5432"
- 5432:5432
volumes:
- ./sql:/docker-entrypoint-initdb.d
- postgres_data:/var/lib/postgresql/data
- ./sql/create_tables.sql:/docker-entrypoint-initdb.d/1-create_tables.sql
- ./sql/import_data.sql:/docker-entrypoint-initdb.d/2-import_data.sql
- ./sql/init_test_data.sql:/docker-entrypoint-initdb.d/3-init_test_data.sql
- ./data:/docker-entrypoint-initdb.d/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres"]
interval: 5s
timeout: 5s
retries: 5
adminer:
image: adminer
restart: always
ports:
- 4040:8080
depends_on:
db:
condition: service_healthy

volumes:
postgres_data:
13 changes: 13 additions & 0 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,14 @@

from dotenv import load_dotenv
from fastapi import Depends, FastAPI
from fastapi.middleware.cors import CORSMiddleware

from src import routers
from src.authentication import authenticate_user
from src.config.logging_config import setup_logging

load_dotenv()
setup_logging()

app = FastAPI(
debug=False,
Expand Down Expand Up @@ -42,11 +45,21 @@
{"name": "trends", "description": "CRUD operations on trends."},
{"name": "users", "description": "CRUD operations on users."},
{"name": "choices", "description": "List valid options for forms fields."},
{"name": "favourites", "description": "Manage user's favorite signals."},
],
docs_url="/",
redoc_url=None,
)

# allow cors
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)


for router in routers.ALL:
app.include_router(router=router, dependencies=[Depends(authenticate_user)])
3 changes: 3 additions & 0 deletions requirements_dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,6 @@ isort ~= 5.13.2
pylint ~= 3.3.1
pytest ~= 8.3.3
notebook ~= 7.2.2
pytest-asyncio==0.21.1
pytest-cov==4.1.0
pytest-watch==4.2.0
13 changes: 13 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
from setuptools import find_packages, setup

setup(
name="ftss-api",
version="0.1",
packages=find_packages(),
install_requires=[
"fastapi",
"uvicorn",
"psycopg",
"pydantic",
],
)
19 changes: 17 additions & 2 deletions sql/create_tables.sql
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,13 @@ CREATE TABLE users (
role VARCHAR(255) NOT NULL,
name VARCHAR(255),
unit VARCHAR(255),
acclab BOOLEAN
acclab BOOLEAN,
api_key VARCHAR(255) UNIQUE
);

CREATE INDEX ON users (email);
CREATE INDEX ON users (role);
CREATE INDEX ON users (api_key);

-- signals table and indices
CREATE TABLE signals (
Expand Down Expand Up @@ -118,6 +120,19 @@ CREATE TABLE connections (
CONSTRAINT connection_pk PRIMARY KEY (signal_id, trend_id)
);

-- favourites table to track user's favourite signals
CREATE TABLE favourites (
user_id INT REFERENCES users(id) ON DELETE CASCADE,
signal_id INT REFERENCES signals(id) ON DELETE CASCADE,
created_at TIMESTAMP NOT NULL DEFAULT NOW(),
CONSTRAINT favourites_pk PRIMARY KEY (user_id, signal_id)
);

CREATE INDEX ON favourites (user_id, created_at);

CREATE INDEX favourites_user_signal_idx ON favourites (user_id, signal_id);
CREATE INDEX favourites_created_at_idx ON favourites (created_at DESC);

-- locations table and indices
CREATE TABLE locations (
id SERIAL PRIMARY KEY,
Expand All @@ -134,4 +149,4 @@ CREATE TABLE units (
name TEXT NOT NULL,
region VARCHAR(255)
);
CREATE INDEX ON units (name, region);
CREATE INDEX ON units (name, region);
29 changes: 29 additions & 0 deletions sql/init_test_data.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
/*
The initialisation script to create test data for local development.
This script is automatically executed by docker compose after create_tables.sql
and import_data.sql.
*/

-- Create test users
INSERT INTO users (
id,
created_at,
email,
role,
name,
unit,
acclab,
api_key
) VALUES (
1, -- This ID is expected by the test suite
NOW(),
'[email protected]',
'ADMIN',
'Test User',
'Data Futures Exchange (DFx)',
false,
'test-key'
);

-- Reset the sequence to start after our manually inserted IDs
SELECT setval('users_id_seq', (SELECT MAX(id) FROM users));
43 changes: 43 additions & 0 deletions sql/insert_test_data.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
CREATE OR REPLACE FUNCTION insert_test_signals()
RETURNS void AS $$
DECLARE
signals jsonb;
signal_record jsonb;
BEGIN
-- Read the JSON file
signals := (pg_read_file('/docker-entrypoint-initdb.d/test_data.json')::jsonb)->'signals';

-- Loop through each signal and insert
FOR signal_record IN SELECT * FROM jsonb_array_elements(signals) LOOP
WITH arrays AS (
SELECT
array(SELECT * FROM jsonb_array_elements_text(signal_record->'keywords')) as keywords,
array(SELECT * FROM jsonb_array_elements_text(signal_record->'steep_secondary')) as steep_secondary,
array(SELECT * FROM jsonb_array_elements_text(signal_record->'signature_secondary')) as signature_secondary,
array(SELECT * FROM jsonb_array_elements_text(signal_record->'sdgs')) as sdgs
)
INSERT INTO signals (
status, created_by, modified_by, headline, description, url,
relevance, keywords, location, steep_primary, steep_secondary,
signature_primary, signature_secondary, sdgs, created_unit
)
SELECT
signal_record->>'status',
signal_record->>'created_by',
signal_record->>'modified_by',
signal_record->>'headline',
signal_record->>'description',
signal_record->>'url',
signal_record->>'relevance',
keywords,
signal_record->>'location',
signal_record->>'steep_primary',
steep_secondary,
signal_record->>'signature_primary',
signature_secondary,
sdgs,
signal_record->>'created_unit'
FROM arrays;
END LOOP;
END;
$$ LANGUAGE plpgsql;
Loading

0 comments on commit cd47b8d

Please sign in to comment.