Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(stopTimes): Fixed stop_times upload_or_create_chunk #43

Open
wants to merge 2 commits into
base: dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 11 additions & 13 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,25 +1,23 @@
# Windows
ifeq ($(OS),Window_NT)
TEST = docker compose -p gtfs-editor-test -f docker\docker-compose.yml --profile test
COMPOSE_DEV = docker compose -p gtfseditor-backend-dev -f docker\docker-compose.yml -f docker\docker-compose.dev.yml --profile dev
COMPOSE_PROD = docker compose -p emov-backend-prod -f docker\docker-compose.yml --profile prod
MANAGE=python backend\manage.py
DOCKER_COMPOSE = docker compose -p gtfseditor-backend-dev
# Linux
else
TEST = docker compose -p gtfs-editor-test -f docker/docker-compose.yml --profile test
COMPOSE_DEV = docker compose -p gtfseditor-backend-dev -f docker/docker-compose.yml -f docker/docker-compose.dev.yml --profile dev
COMPOSE_PROD = docker compose -p gtfseditor-backend-prod -f docker/docker-compose.yml --profile prod
COMPOSE_CERT = docker compose -p gtfseditor-backend-prod -f docker/docker-compose.yml -f docker/docker-compose.certbot.yml --profile certbot
MANAGE=python backend/manage.py
DOCKER_COMPOSE = docker compose -p gtfseditor-backend-dev
endif
PIP=pip install -r requirements-prod.txt

COMPOSE_PROD = $(DOCKER_COMPOSE) -f docker\docker-compose.yml --profile prod
COMPOSE_DEV = $(DOCKER_COMPOSE) -f docker\docker-compose.yml -f docker\docker-compose.dev.yml --profile dev
COMPOSE_TEST = $(DOCKER_COMPOSE) -f docker\docker-compose.yml -f docker\docker-compose.dev.yml --profile test
MANAGE = python manage.py
PIP = pip install -r requirements-prod.txt


test:
$(TEST) build
$(TEST) up --abort-on-container-exit
$(COMPOSE_TEST) build
$(COMPOSE_TEST) up --abort-on-container-exit
test_down:
$(TEST) down
$(COMPOSE_TEST) down
install_local:
$(PIP) -r requirements-dev.txt
config_env:
Expand Down
29 changes: 27 additions & 2 deletions docker/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -88,12 +88,20 @@ services:
networks:
- nginx_network
cache:
<<: *common
profiles:
- default
- prod
- dev
- test
image: redis:latest
networks:
- cache_network
db:
<<: *common
profiles:
- default
- prod
- dev
- test
image: postgres:16.1-alpine
volumes:
- postgres_gtfseditor_data_v16:/var/lib/postgresql/data/
Expand All @@ -102,6 +110,23 @@ services:
networks:
- cache_network
- database_network
test:
<<: *django-settings
command: test
profiles:
- test
volumes:
- static_volume:/app/static
- media_volume:/app/media
env_file:
- ./docker_env
depends_on:
- db
- cache
networks:
- database_network
- cache_network

networks:
nginx_network:
driver: bridge
Expand Down
6 changes: 3 additions & 3 deletions docker/nginx/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ RUN touch '.env.production' && \
echo 'VUE_APP_MAPBOX_TOKEN=pk.eyJ1Ijoiam9yb21lcm8iLCJhIjoiY2toa2t2NnBjMDJkYTJzcXQyZThhZTNyNSJ9.Wx6qT7xWJ-hhKHyLMNbnAQ' >> '.env.production'
RUN npm run build

FROM nginx:1.17 as prod
FROM nginx:1.17 AS prod

COPY --from=build-stage /app/dist /usr/share/nginx/html
COPY ./docker/nginx/config/local.conf /etc/nginx/conf.d/local.conf
Expand All @@ -28,7 +28,7 @@ RUN rm /etc/nginx/conf.d/default.conf

ENTRYPOINT ["/bin/bash", "entrypoint.sh"]

FROM nginx:1.17 as dev
FROM nginx:1.17 AS dev

COPY --from=build-stage /app/dist /usr/share/nginx/html
COPY ./docker/nginx/config/dev.conf /etc/nginx/conf.d/local.conf
Expand All @@ -39,7 +39,7 @@ RUN rm /etc/nginx/conf.d/default.conf

ENTRYPOINT ["/bin/bash", "entrypoint.sh"]

FROM nginx:1.17 as certbot
FROM nginx:1.17 AS certbot

COPY ./docker/nginx/config/certbot.conf /etc/nginx/conf.d/local.conf
COPY ./docker/nginx/nginx_entrypoint.sh ./entrypoint.sh
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# Generated by Django 5.1.1 on 2025-01-02 14:50

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
('rest_api', '0005_alter_route_route_desc'),
]

operations = [
migrations.AlterField(
model_name='project',
name='loading_gtfs_error_message',
field=models.CharField(default=None, max_length=256, null=True),
),
]
2 changes: 1 addition & 1 deletion rest_api/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ class Project(models.Model):
creation_status = models.CharField(max_length=20, default=CREATION_STATUS_EMPTY, choices=creation_status_choices,
null=False)
loading_gtfs_job_id = models.UUIDField(null=True)
loading_gtfs_error_message = models.CharField(max_length=100, default=None, null=True)
loading_gtfs_error_message = models.CharField(max_length=256, default=None, null=True)
last_modification = models.DateTimeField(default=timezone.now, null=False)
gtfs_file = models.FileField(upload_to=gtfs_update_to, null=True)
gtfs_file_updated_at = models.DateTimeField(null=True)
Expand Down
46 changes: 36 additions & 10 deletions rest_api/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,8 +264,7 @@ def _perform_upload(self, file, project_pk):

# This class bundles up the CSVUploadMixin and CSVDownloadMixin,
# adding a few methods that are common to many models
class CSVHandlerMixin(CSVUploadMixin,
CSVDownloadMixin):
class CSVHandlerMixin(CSVUploadMixin, CSVDownloadMixin):

def get_queryset(self):
return self.get_qs(self.kwargs)
Expand Down Expand Up @@ -927,8 +926,7 @@ def get_qs(kwargs):
.prefetch_related(Prefetch('stop_times', queryset=StopTime.objects.order_by('stop_sequence')))


class StopTimeViewSet(CSVHandlerMixin,
MyModelViewSet):
class StopTimeViewSet(CSVHandlerMixin, MyModelViewSet):
permission_classes = [IsAuthenticatedStopTimesAndFrequency]
serializer_class = StopTimeSerializer
CHUNK_SIZE = 100000
Expand Down Expand Up @@ -976,16 +974,37 @@ def update_or_create_chunk(self, chunk, project_pk, id_set, meta=None):
trip_ids = set(map(lambda entry: entry['trip_id'], chunk))
stop_ids = set(map(lambda entry: entry['stop_id'], chunk))
trip_id_map = dict()
stop_id_map = dict()

for row in Trip.objects.filter_by_project(project_pk).filter(trip_id__in=trip_ids).values_list('trip_id', 'id'):
trip_id_map[row[0]] = row[1]
stop_id_map = dict()
for row in Stop.objects.filter_by_project(project_pk).filter(stop_id__in=stop_ids).values_list('stop_id', 'id'):
stop_id_map[row[0]] = row[1]

sts = list()
trip_id_set = set()
stop_id_set = set()
for row in chunk:
row['trip_id'] = trip_id_map[row['trip_id']]
row['stop_id'] = stop_id_map[row['stop_id']]
trip_id = row['trip_id']
stop_id = row['stop_id']
try:
row['trip_id'] = trip_id_map[trip_id]
except KeyError:
trip_id_set.add(trip_id)
try:
row['stop_id'] = stop_id_map[stop_id]
except KeyError:
stop_id_set.add(stop_id)
continue
sts.append(StopTime(**row))
if not trip_id_set or not stop_id_set:
detail = dict(
detail="Foreign keys trip_id or stop_id missing",
trip_keys=list(trip_id_set),
stop_keys=list(stop_id_set),
)
raise Exception(detail)

t1 = time.time()
StopTime.objects.bulk_create(sts, batch_size=1000)
t2 = time.time()
Expand Down Expand Up @@ -1013,9 +1032,16 @@ def _perform_upload(self, file, project_pk):
chunk = list()

for entry in reader:
for k in entry:
if entry[k] == '':
entry[k] = None
missing_keys = set()
for k in self.Meta.csv_header:
try:
if entry[k] == '':
entry[k] = None
except KeyError:
missing_keys.add(k)
if missing_keys:
raise Exception(dict(detail="stop_times.txt is missing some header columns", cols=list(missing_keys)))

chunk.append(entry)
if len(chunk) >= self.CHUNK_SIZE:
log("Chunk Number", chunk_num)
Expand Down