Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(detections): allow azimuth = 0 #399

Closed
wants to merge 6 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/scripts.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,5 +36,5 @@ jobs:
POSTGRES_PASSWORD: dummy_pg_pwd
POSTGRES_DB: dummy_pg_db
run: |
docker compose -f docker-compose.dev.yml up -d --build --wait
docker compose -f docker-compose.yml up -d --build --wait
python scripts/test_e2e.py
8 changes: 4 additions & 4 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@ jobs:
POSTGRES_PASSWORD: dummy_pg_pwd
POSTGRES_DB: dummy_pg_db
run: |
docker compose -f docker-compose.dev.yml up -d --build --wait
docker compose -f docker-compose.dev.yml exec -T backend pytest --cov=app --cov-report xml tests/
docker compose -f docker-compose.dev.yml cp backend:/app/coverage.xml ./coverage-src.xml
docker compose -f docker-compose.yml up -d --build --wait
docker compose -f docker-compose.yml exec -T backend pytest --cov=app --cov-report xml tests/
docker compose -f docker-compose.yml cp backend:/app/coverage.xml ./coverage-src.xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5
with:
Expand Down Expand Up @@ -66,7 +66,7 @@ jobs:
POSTGRES_PASSWORD: dummy_pg_pwd
POSTGRES_DB: dummy_pg_db
run: |
docker compose -f docker-compose.dev.yml up -d --build --wait
docker compose -f docker-compose.yml up -d --build --wait
cd client && pytest --cov=pyroclient --cov-report xml tests/
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5
Expand Down
14 changes: 7 additions & 7 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@ stop:
# the "-" are used to launch the next command even if a command fail
test:
poetry export -f requirements.txt --without-hashes --with test --output requirements.txt
docker compose -f docker-compose.dev.yml up -d --build --wait
- docker compose -f docker-compose.dev.yml exec -T backend pytest --cov=app
docker compose -f docker-compose.dev.yml down
docker compose -f docker-compose.yml up -d --build --wait
- docker compose -f docker-compose.yml exec -T backend pytest --cov=app
docker compose -f docker-compose.yml down

build-client:
pip install -e client/.
Expand All @@ -42,9 +42,9 @@ build-client:
# the "-" are used to launch the next command even if a command fail
test-client: build-client
poetry export -f requirements.txt --without-hashes --output requirements.txt
docker compose -f docker-compose.dev.yml up -d --build --wait
docker compose -f docker-compose.yml up -d --build --wait
- cd client && pytest --cov=pyroclient tests/ && cd ..
docker compose -f docker-compose.dev.yml down
docker compose -f docker-compose.yml down

# Check that docs can build for client
docs-client:
Expand All @@ -53,6 +53,6 @@ docs-client:

e2e:
poetry export -f requirements.txt --without-hashes --output requirements.txt
docker compose -f docker-compose.dev.yml up -d --build --wait
docker compose -f docker-compose.yml up -d --build --wait
- python scripts/test_e2e.py
docker compose -f docker-compose.dev.yml down
docker compose -f docker-compose.yml down
66 changes: 0 additions & 66 deletions docker-compose.dev.yml

This file was deleted.

41 changes: 14 additions & 27 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,11 @@ services:
expose:
- 5432
environment:
- POSTGRES_USER=${POSTGRES_USER}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
- POSTGRES_DB=${POSTGRES_DB}
volumes:
- postgres_data:/var/lib/postgresql/data/
- POSTGRES_USER=dummy_pg_user
- POSTGRES_PASSWORD=dummy_pg_pwd
- POSTGRES_DB=dummy_pg_db
healthcheck:
test: ["CMD-SHELL", "sh -c 'pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}'"]
test: ["CMD-SHELL", "sh -c 'pg_isready -U dummy_pg_user -d dummy_pg_db'"]
interval: 10s
timeout: 3s
retries: 3
Expand All @@ -25,19 +23,15 @@ services:
environment:
- EDGE_PORT=4566
- SERVICES=s3
- DATA_DIR=/tmp/localstack/data
- AWS_DEFAULT_REGION=${S3_REGION:-us-east-1}
volumes:
- ./scripts/localstack:/etc/localstack/init/ready.d
- localstack_data:/tmp/localstack
healthcheck:
test: ["CMD-SHELL", "awslocal --endpoint-url=http://localhost:4566 s3 ls s3://admin"]
interval: 10s
timeout: 5s
retries: 10

backend:
image: pyronear/alert-api:latest
build:
context: .
dockerfile: ./src/Dockerfile
Expand All @@ -49,31 +43,24 @@ services:
ports:
- "5050:5050"
environment:
- POSTGRES_URL=postgresql+asyncpg://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db/${POSTGRES_DB}
- SUPERADMIN_LOGIN=${SUPERADMIN_LOGIN}
- SUPERADMIN_PWD=${SUPERADMIN_PWD}
- SUPERADMIN_ORG=${SUPERADMIN_ORG}
- POSTGRES_URL=postgresql+asyncpg://dummy_pg_user:dummy_pg_pwd@db/dummy_pg_db
- SUPERADMIN_LOGIN=superadmin_login
- SUPERADMIN_PWD=superadmin_pwd
- SUPERADMIN_ORG=admin
- JWT_SECRET=${JWT_SECRET}
- SUPPORT_EMAIL=${SUPPORT_EMAIL}
- DEBUG=true
- PROMETHEUS_ENABLED=true
- SQLALCHEMY_SILENCE_UBER_WARNING=1
- S3_ENDPOINT_URL=${S3_ENDPOINT_URL:-http://localstack:4566}
- S3_ACCESS_KEY=${S3_ACCESS_KEY:-na}
- S3_SECRET_KEY=${S3_SECRET_KEY:-na}
- S3_REGION=${S3_REGION:-us-east-1}
- S3_PROXY_URL=${S3_PROXY_URL}
- SERVER_NAME=${SERVER_NAME}
- S3_ENDPOINT_URL=http://localstack:4566
- S3_ACCESS_KEY=fake
- S3_SECRET_KEY=fake
- S3_REGION=us-east-1
volumes:
- ./src/:/app/
command: "sh -c 'alembic upgrade head && python app/db.py && uvicorn app.main:app --reload --host 0.0.0.0 --port 5050 --proxy-headers'"
command: "sh -c 'python app/db.py && uvicorn app.main:app --reload --host 0.0.0.0 --port 5050 --proxy-headers'"
restart: always
healthcheck:
test: ["CMD-SHELL", "curl http://localhost:5050/status"]
interval: 10s
timeout: 3s
retries: 3

volumes:
postgres_data:
localstack_data:
retries: 6
2 changes: 1 addition & 1 deletion src/app/api/api_v1/endpoints/detections.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ async def create_detection(
min_length=2,
max_length=settings.MAX_BBOX_STR_LENGTH,
),
azimuth: float = Form(..., gt=0, lt=360, description="angle between north and direction in degrees"),
azimuth: float = Form(..., ge=0, lt=360, description="angle between north and direction in degrees"),
file: UploadFile = File(..., alias="file"),
detections: DetectionCRUD = Depends(get_detection_crud),
webhooks: WebhookCRUD = Depends(get_webhook_crud),
Expand Down
2 changes: 1 addition & 1 deletion src/app/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ class Detection(SQLModel, table=True):
__tablename__ = "detections"
id: int = Field(None, primary_key=True)
camera_id: int = Field(..., foreign_key="cameras.id", nullable=False)
azimuth: float = Field(..., gt=0, lt=360)
azimuth: float = Field(..., ge=0, lt=360)
bucket_key: str
bboxes: str = Field(..., min_length=2, max_length=settings.MAX_BBOX_STR_LENGTH, nullable=False)
is_wildfire: Union[bool, None] = None
Expand Down
2 changes: 1 addition & 1 deletion src/app/schemas/detections.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ class DetectionLabel(BaseModel):
class Azimuth(BaseModel):
azimuth: float = Field(
...,
gt=0,
ge=0,
lt=360,
description="angle between north and direction in degrees",
json_schema_extra={"examples": [110]},
Expand Down
18 changes: 9 additions & 9 deletions src/migrations/versions/2024_05_30_1200-f84a0ed81bdc_init.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"camera",
"cameras",
RonanMorgan marked this conversation as resolved.
Show resolved Hide resolved
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("angle_of_view", sa.Float(), nullable=False),
Expand All @@ -36,17 +36,17 @@ def upgrade() -> None:
sa.UniqueConstraint("name"),
)
op.create_table(
"user",
"users",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("role", sa.Enum("ADMIN", "AGENT", "USER", name="userrole"), nullable=False),
sa.Column("login", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("hashed_password", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_user_login"), "user", ["login"], unique=True)
op.create_index(op.f("ix_user_login"), "users", ["login"], unique=True)
op.create_table(
"detection",
"detections",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("camera_id", sa.Integer(), nullable=False),
sa.Column("azimuth", sa.Float(), nullable=False),
Expand All @@ -56,7 +56,7 @@ def upgrade() -> None:
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
["camera_id"],
["camera.id"],
["cameras.id"],
),
sa.PrimaryKeyConstraint("id"),
)
Expand All @@ -65,8 +65,8 @@ def upgrade() -> None:

def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("detection")
op.drop_index(op.f("ix_user_login"), table_name="user")
op.drop_table("user")
op.drop_table("camera")
op.drop_table("detections")
op.drop_index(op.f("ix_user_login"), table_name="users")
op.drop_table("users")
op.drop_table("cameras")
# ### end Alembic commands ###
Original file line number Diff line number Diff line change
Expand Up @@ -21,24 +21,24 @@

def upgrade() -> None:
op.create_table(
"organization",
"organizations",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name"),
)

# Add the 'organization_id' column to the 'camera' and 'user' tables and create foreign key constraints
op.add_column("camera", sa.Column("organization_id", sa.Integer(), nullable=True))
op.create_foreign_key("fk_camera_orga", "camera", "organization", ["organization_id"], ["id"])
op.add_column("user", sa.Column("organization_id", sa.Integer(), nullable=True))
op.create_foreign_key("fk_user_orga", "camera", "organization", ["organization_id"], ["id"])
op.add_column("cameras", sa.Column("organization_id", sa.Integer(), nullable=True))
op.create_foreign_key("fk_camera_orga", "cameras", "organizations", ["organization_id"], ["id"])
op.add_column("users", sa.Column("organization_id", sa.Integer(), nullable=True))
op.create_foreign_key("fk_user_orga", "cameras", "organizations", ["organization_id"], ["id"])


def downgrade() -> None:
# Remove the foreign key constraint and the 'organization_id' column from the 'camera' table
op.drop_constraint("fk_camera_orga", "camera", type_="foreignkey")
op.drop_constraint("fk_user_orga", "user", type_="foreignkey")
op.drop_column("camera", "organization_id")
op.drop_column("user", "organization_idation_id")
op.drop_table("organization")
op.drop_constraint("fk_camera_orga", "cameras", type_="foreignkey")
op.drop_constraint("fk_user_orga", "users", type_="foreignkey")
op.drop_column("cameras", "organization_id")
op.drop_column("users", "organization_idation_id")
op.drop_table("organizations")
Loading