From ac25e7b52f052b3e084633342b81333939e5ddbf Mon Sep 17 00:00:00 2001 From: Matthew Jadud Date: Fri, 20 Sep 2024 13:04:26 -0400 Subject: [PATCH 1/4] Fix submission race condition (#4292) * Working on a race hazard fix This does two things: 1. Makes the dissemination fully atomic (both the data dissemination and state machine update) 2. Removes the "SUBMITTED" status, which was never user visible, and seems to play no role (other than to be confusing). * Adding a compose file that supports two FAC instances * Includes better comments for the race hazard Makes it easier to communicate the exploration. I believe this code represents how we should structure the submission to protect against a race from multiple button presses/multiple FAC instances at time of submission. * Ready for testing, linting. This cleans up the comments and code, along with a README to support the race hazard debugging. We may yet discover more issues in this part of the code later, so the documentation is thorough for the sake of future interpretation of the thinking involved. * Linting. * Forgot to add the README. This supports the race condition debugging. --- backend/audit/models/models.py | 10 +- ...README-fac-views-race-hazard-postmortem.md | 151 +++++++++++++++ backend/audit/views/views.py | 57 ++++-- backend/docker-compose-dual-fac.yml | 182 ++++++++++++++++++ 4 files changed, 375 insertions(+), 25 deletions(-) create mode 100644 backend/audit/views/README-fac-views-race-hazard-postmortem.md create mode 100644 backend/docker-compose-dual-fac.yml diff --git a/backend/audit/models/models.py b/backend/audit/models/models.py index e74829893..0fa0b6d0b 100644 --- a/backend/audit/models/models.py +++ b/backend/audit/models/models.py @@ -573,7 +573,6 @@ def transition_to_submitted(self): The permission checks verifying that the user attempting to do this has the appropriate privileges will be done at the view level. """ - self.transition_name.append(SingleAuditChecklist.STATUS.SUBMITTED) self.transition_date.append(datetime.now(timezone.utc)) @@ -583,7 +582,6 @@ def transition_to_submitted(self): target=STATUS.DISSEMINATED, ) def transition_to_disseminated(self): - logger.info("Transitioning to DISSEMINATED") self.transition_name.append(SingleAuditChecklist.STATUS.DISSEMINATED) self.transition_date.append(datetime.now(timezone.utc)) @@ -595,7 +593,7 @@ def transition_to_disseminated(self): STATUS.AUDITEE_CERTIFIED, STATUS.CERTIFIED, ], - target=STATUS.SUBMITTED, + target=STATUS.AUDITEE_CERTIFIED, ) def transition_to_in_progress(self): """ @@ -611,7 +609,7 @@ def transition_to_in_progress(self): the model level, and will again leave it up to the views to track that changes have been made at that point. """ - self.transition_name.append(SingleAuditChecklist.STATUS.SUBMITTED) + self.transition_name.append(SingleAuditChecklist.STATUS.AUDITEE_CERTIFIED) self.transition_date.append(datetime.now(timezone.utc)) @property @@ -619,7 +617,6 @@ def is_auditee_certified(self): return self.submission_status in [ SingleAuditChecklist.STATUS.AUDITEE_CERTIFIED, SingleAuditChecklist.STATUS.CERTIFIED, - SingleAuditChecklist.STATUS.SUBMITTED, ] @property @@ -628,12 +625,11 @@ def is_auditor_certified(self): SingleAuditChecklist.STATUS.AUDITEE_CERTIFIED, SingleAuditChecklist.STATUS.AUDITOR_CERTIFIED, SingleAuditChecklist.STATUS.CERTIFIED, - SingleAuditChecklist.STATUS.SUBMITTED, ] @property def is_submitted(self): - return self.submission_status in [SingleAuditChecklist.STATUS.SUBMITTED] + return self.submission_status in [SingleAuditChecklist.STATUS.DISSEMINATED] def get_transition_date(self, status): index = self.transition_name.index(status) diff --git a/backend/audit/views/README-fac-views-race-hazard-postmortem.md b/backend/audit/views/README-fac-views-race-hazard-postmortem.md new file mode 100644 index 000000000..046f51342 --- /dev/null +++ b/backend/audit/views/README-fac-views-race-hazard-postmortem.md @@ -0,0 +1,151 @@ +# **Submission race hazard, round 2** + +There are multiple ways for a “race” to happen on the web. By this, I mean a “race condition,” which is where two processes (computers, etc.) try and do the same thing at the same time… but only one can “win” the race. + +The FAC is susceptible to race hazards in at least two different ways. + +1. Every time we update our state machine, we are writing to the DB. It is possible for multiple instances of the FAC to update the FSM out-of-order, and as a result, break the state machine. *This document will be updated when/if this concern is addressed. It appears to be very low probability given the current FSM/FAC architecture*. +2. When a user submits, they might hit the "submit" button more than once, launching the submission process multiple times on a single FAC instance, or on more than one instance simultaneously. *This document describes this condition, and the fix we implemented in Sept 2024 for this problem*. + +(There is much more to be said about [race conditions](https://en.wikipedia.org/wiki/Race_condition), or race hazards… for purposes of this document, we’ll just say that they’re not fun. However, if you like programming puzzles, Alan Downey’s *The Little Book of Semaphores* is a marvelous open text, and explores the concept of race hazards in depth through a series of programming exercises and puzzles. [https://greenteapress.com/wp/semaphores/](https://greenteapress.com/wp/semaphores/)) + +### Revision history +* 20240913 initial version + +## Scenario 1: A single FAC application + +The first scenario to consider is when a user (Alice) initiates a submission action more than once. + +```mermaid +sequenceDiagram + Alice->>+FAC: Press "submit" button + FAC->>+DB: A long running process + Alice->>+FAC: Press "submit" button again + FAC->>+DB: A long running process +``` + + +In this situation, Alice presses a button, and a socket is opened (because we are expecting a response). The server then launches a long-running process. When Alice presses the button again, the socket is broken. Because our application has no way of knowing that the connection was broken, it has no way of cleaning up or canceling the long-running process. Therefore, we end up in a situation where the long-running process is now running *twice* and *at the same time*. + +If that long-running process is the validation of a submission and the dissemination of the submission, that means we now have to answer many questions: which process will finish first? Will they both complete... partially? In full? This is why it is called a *race hazard*, because the hazard is that our application or data may end up in an indeterminate state because one process (or the other) in the race completes more or less completely, in an unpredictable order of operations. + +This is one way the FAC is susceptible to races. There is another. + +## Scenario 2: Multiple FAC applications + +```mermaid +sequenceDiagram + Alice->>+FAC1: Press "submit" button + FAC1->>+DB: A long running process + Alice->>+FAC2: Press "submit" button again + FAC2->>+DB: A long running process +``` + +On cloud.gov, we run multiple instances of the FAC. + + When Alice pushes the submit button, she can launch a process running on FAC1. When she pushes the same button a second time, it is possible that the request will be sent to FAC2—a completely different instance of the FAC application. + +In the first scenario, it is (in theory) possible to make sure that a single user does not launch the same process twice. + +In this second scenario, it is *very difficult* to prevent this behavior. It would require coordination between FAC1 and FAC2 to make sure that Alice is not asking to “do the same thing twice.” + +The end result is the same, however. If Alice has asked to complete her submission, there are now two copies of the FAC that are racing to disseminate data. And, again, it is possible for the FAC data to end up in an indeterminate state as a result. + +## The challenge: an in-memory state machine + +We use a state machine to determine where we are in the submission process. At the end of a submission process, we go from `AUDITOR_CERTIFIED` to `AUDITEE_CERTIFIED` to `SUBMITTED` to `DISSEMINATED`. The issue at hand lies in the last two states, and the fact that there are two FACs. + +When Alice pushes the `Submit` button, the following things happen in this order: + +1. 🐢 We get the report id, and load the SAC object (a complete submission) corresponding to it from the database. +2. 🐢 We run a full validation. +3. If there are errors, we return an error. +4. We then move the audit to the `SUBMITTED` state. + 1. 🐢 We do this by updating the SAC object and saving it to the DB. +5. We then set up an atomic block. + 1. 🐢 We disseminate the data (meaning, copy it from the internal table to external/public tables). + 2. We check for errors on the dissemination + 1. If no errors arose, we transition to the `DISSEMINATED` state + 1. 🐢 We do this by updating the SAC object and saving it to the DB. + 2. If there were errors, we do nothing. + +There are multiple problems with this. We will assume that there are two FACs (FAC1, FAC2), and consider each as a point where the results of the race could change. For example… + +| Timestep | FAC1 | FAC2 | +| :---- | :---- | :---- | +| 1 | *Alice pushes the button* | | +| 2 | 🐢 Get SAC | *Alice presses button second time…* | +| 3 | 🐢 *gets held up on validation…* | 🐢 Get SAC | +| 4 | | Validates quickly | +| 5 | | Save `SUBMITTED` | +| 6 | | Disseminate data | +| 7 | | Save `DISSEMINATED` | +| 8 | | *FAC2 exits cleanly* | +| 9 | Validates quickly | | +| 10 | Save `SUBMITTED` | | +| 11 | 🧨 **fails to disseminate** 🧨 | | +| 12 | *FAC1 crashes out without Alice knowing, because she is now experiencing FAC2* | | + +The end result would be data that was fully disseminated, but in a `SUBMITTED` state. + +*However*, **it will be worse than it appears**. Not only will we end up with a disseminated audit in this case… but we will see no record of it in the SAC object. Why? When FAC1 comes through and updates to the submitted state, it writes its entire *in-memory* state machine out to the SAC object. FAC1 has no idea that FAC2 has proceeded through and updated the SAC object (and the states)... so, it overwrites, in the internal table, any evidence that a submission (and dissemination) took place. We therefore have data disseminated, but the SAC object claims to have only made it to the `SUBMITTED` state, with no dissemination timestamp. + +*This is only one possible outcome of a race*. There are others that leave the database in *other different* misconfigurations. + +## A solution: `atomic` commits + +Because we live with two FACs, we cannot *casually* synchronize the FAC instances. This is because they are executing (for all intents and purposes) on separate computers. Further, they do not know about each-other; we would have to develop a whole set of machinery for them to 1\) know about all of the running FAC instances, and 2\) communicate and collaborate to make sure only one of them takes any given action. It is entirely likely we would then have to implement code to solve the [dining philosopher's problem](https://en.wikipedia.org/wiki/Dining_philosophers_problem), which is extremely error prone/tedious. + +However, all of the FAC instances use one database. This is to our benefit. (If we ever start writing to *multiple, distinct* databases, this will break again. Badly.) + +We could attempt to use the database to synchronize the state machine between multiple FAC instances, but this is harder to do than it might at first appear. (Or, it might lead to additional places where we have races on the state machine synchronization points.) This does want to be investigated, but it is not the most direct solution to the race at hand. + +Instead, we will make sure that the *entirety* of the submission process happens, or it does not happen at all. Making sure a database operation (or set of operations) all happen (or fail to happen) is known as *automaticity*. + +The critical code is in `backend/audit/views/views.py`, lines [755-815](https://github.com/GSA-TTS/FAC/blob/1cc9170b48b005315f5caebc64b57d0aa770f839/backend/audit/views/views.py#L755) in the `jadudm/fix-submission-race` branch. A simplified version of the code follows. + +```py + with transaction.atomic(): + sac.transition_to_submitted() + 🍓 sac.save(EventType.SUBMITTED) + 🍎 disseminated = sac.disseminate() + if disseminated is None: + sac.transition_to_disseminated() + 🍐sac.save(EventType.DISSEMINATED) + remove_workbook_artifacts(sac) + if disseminated is not None: + # Log/ present error to user + return redirect(reverse("audit:MySubmissions")) + +``` + +The first step (line 755\) is to declare that we want our Django interactions with the database to be atomic. This means that either *all database operations* *succeed* or *all database operations fail* within the block. There are three distinct database writes within the block. + +1. The `sac.save()` on the transition to the `SUBMITTED` state ([line 765](https://github.com/GSA-TTS/FAC/blob/1cc9170b48b005315f5caebc64b57d0aa770f839/backend/audit/views/views.py#L765), 🍓 above) +2. The dissemination of the data ([line 790](https://github.com/GSA-TTS/FAC/blob/1cc9170b48b005315f5caebc64b57d0aa770f839/backend/audit/views/views.py#L790), 🍎) +3. The save on the transition to `DISSEMINATED` ([line 801](https://github.com/GSA-TTS/FAC/blob/1cc9170b48b005315f5caebc64b57d0aa770f839/backend/audit/views/views.py#L801), 🍐) + +Now, if we look at the above race… + +| Timestep | FAC1 | FAC2 | +| :---- | :---- | :---- | +| 1 | Get SAC | *Alice presses button second time…* | +| 2 | 🐢*gets held up on validation…* | Get SAC | +| 3 | | Validates quickly | +| 4 | | Save `SUBMITTED` | +| 5 | | Disseminate data | +| 6 | | Save `DISSEMINATED` | +| 7 | | *FAC2 exits cleanly* | +| 8 | Queue save `SUBMITTED` | | +| 9 | Queue dissemination of data | | +| 10 | Queue save `DISSEMINATED` | | +| 11 | *FAC1 database update fails because of a primary key conflict on `dissemination_general`. Therefore, no database operations happen. Alice has no idea.* | | + +While we may want to have FAC1 log an error and exit gracefully, the important thing is that as far as Alice is concerned, her audit was submitted, it made it through for her to check in basic search, and we avoided ended up in an inconsistent state. + +## Conclusion + +What is described is one part of the fix for this problem. The other is to disable the submit button once it is pressed. This README and the PR with the race fix is one of two PRs that address this submission bug. + +There are other ways we could solve this. However, by putting the final submission sequence under one atomic block, we guarantee that a single user cannot 1\) button-mash their way to an error, or 2\) run the FAC in two separate windows and create a race hazard the hard way. + diff --git a/backend/audit/views/views.py b/backend/audit/views/views.py index 7820c8b52..ddb4933da 100644 --- a/backend/audit/views/views.py +++ b/backend/audit/views/views.py @@ -698,7 +698,6 @@ def post(self, request, *args, **kwargs): class SubmissionView(CertifyingAuditeeRequiredMixin, generic.View): def get(self, request, *args, **kwargs): report_id = kwargs["report_id"] - try: sac = SingleAuditChecklist.objects.get(report_id=report_id) @@ -712,8 +711,11 @@ def get(self, request, *args, **kwargs): raise PermissionDenied("You do not have access to this audit.") def post(self, request, *args, **kwargs): + # RACE HAZARD WARNING + # It is possible for a user to enter the submission multiple times, + # from multiple FAC instances. This race hazard is documented in + # backend/audit/views/README-fac-views-race-hazard-postmortem.md report_id = kwargs["report_id"] - try: sac = SingleAuditChecklist.objects.get(report_id=report_id) @@ -727,36 +729,51 @@ def post(self, request, *args, **kwargs): context, ) - sac.transition_to_submitted() - sac.save( - event_user=request.user, event_type=SubmissionEvent.EventType.SUBMITTED - ) + # Only change this value if things work... + disseminated = "DID NOT DISSEMINATE" + # BEGIN ATOMIC BLOCK with transaction.atomic(): - disseminated = sac.disseminate() - - # disseminated is None if there were no errors. - if disseminated is None: - sac.transition_to_disseminated() + sac.transition_to_submitted() sac.save( event_user=request.user, - event_type=SubmissionEvent.EventType.DISSEMINATED, + event_type=SubmissionEvent.EventType.SUBMITTED, ) + disseminated = sac.disseminate() + # `disseminated` is None if there were no errors. + if disseminated is None: + sac.transition_to_disseminated() + sac.save( + event_user=request.user, + event_type=SubmissionEvent.EventType.DISSEMINATED, + ) + # END ATOMIC BLOCK + + # IF THE DISSEMINATION SUCCEEDED + # `disseminated` is None if there were no errors. + if disseminated is None: # Remove workbook artifacts after the report has been disseminated. + # We do this outside of the atomic block. No race between + # two instances of the FAC should be able to get to this point. + # If we do, something will fail. remove_workbook_artifacts(sac) - else: - pass - # FIXME: We should now provide a reasonable error to the user. - logger.info( - "Dissemination errors: %s, report_id: %s", disseminated, report_id - ) + # IF THE DISSEMINATION FAILED + # If disseminated has a value, it is an error + # object returned from `sac.disseminate()` + if disseminated is not None: + logger.info( + "{} is a `not None` value report_id[{}] for `disseminated`".format( + report_id, disseminated + ) + ) return redirect(reverse("audit:MySubmissions")) except SingleAuditChecklist.DoesNotExist: raise PermissionDenied("You do not have access to this audit.") except TransactionManagementError: + # ORIGINAL COMMENT # This is most likely the result of a race condition, where the user hits # the submit button multiple times and the requests get round-robined to # different instances, and the second attempt tries to insert an existing @@ -765,6 +782,10 @@ def post(self, request, *args, **kwargs): # wouldn't be an entry with that report_id to cause the error), and that we # should log this but not report it to the user. # See https://github.com/GSA-TTS/FAC/issues/3347 + # UPDATED 2024-09-13 + # We have not been able to trigger this error in the most recent race + # debugging. However, that does not mean it is impossible. + # Therefore, leaving this exception handler in place. logger.info("IntegrityError on disseminating report_id: %s", report_id) if General.objects.get(report_id=sac.report_id): return redirect(reverse("audit:MySubmissions")) diff --git a/backend/docker-compose-dual-fac.yml b/backend/docker-compose-dual-fac.yml new file mode 100644 index 000000000..1bf6e9fdc --- /dev/null +++ b/backend/docker-compose-dual-fac.yml @@ -0,0 +1,182 @@ +version: "3.7" + +services: + #--------------------------------------------- + # Postgres DB + #--------------------------------------------- + db: + image: "postgres:15" + environment: + POSTGRES_HOST_AUTH_METHOD: trust + volumes: + - postgres-data:/var/lib/postgresql/data/ + ports: + - "5432:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -d postgres -U postgres"] + interval: 10s + timeout: 5s + retries: 10 + db2: + image: "postgres:15" + environment: + POSTGRES_HOST_AUTH_METHOD: "trust" + volumes: + - postgres-data2:/var/lib/postgresql/data/ + ports: + - "5431:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -d postgres -U postgres"] + interval: 10s + timeout: 5s + retries: 10 + + #--------------------------------------------- + # Historic data + #--------------------------------------------- + historic-data: + image: ghcr.io/gsa-tts/fac-historic-public-csvs/load-historic-public-data:20230912 + depends_on: + db: + condition: service_healthy + environment: + DATABASE_URL: postgres://postgres@db/postgres + + #--------------------------------------------- + # Django app + #--------------------------------------------- + web: + build: + context: . + dockerfile: "./${DOCKERFILE:-Dockerfile}" + command: /src/run.sh + depends_on: + db: + condition: service_healthy + db2: + condition: service_healthy + minio: + condition: service_started + clamav-rest: + condition: service_started + api: + condition: service_started + environment: + DATABASE_URL: postgres://postgres@db/postgres + POSTGREST_URL: http://api:3000 + DJANGO_DEBUG: true + SAM_API_KEY: ${SAM_API_KEY} + DJANGO_BASE_URL: http://localhost:8000 + DJANGO_SECRET_LOGIN_KEY: ${DJANGO_SECRET_LOGIN_KEY} + LOGIN_CLIENT_ID: ${LOGIN_CLIENT_ID} + ENV: ${ENV} + SECRET_KEY: ${SECRET_KEY} + ALLOWED_HOSTS: 0.0.0.0 127.0.0.1 localhost + AV_SCAN_URL: http://clamav-rest:${CLAMAV_PORT:-9000}/scan + DISABLE_AUTH: ${DISABLE_AUTH:-False} + PGRST_JWT_SECRET: ${PGRST_JWT_SECRET:-32_chars_fallback_secret_testing} + ENABLE_DEBUG_TOOLBAR: false + FAC_INSTANCE_NAME: web1 + env_file: + - ".env" + ports: + - "8000:8000" + volumes: + - .:/src + - /src/node_modules + - /src/staticfiles + #--------------------------------------------- + # Django app 2 + #--------------------------------------------- + web2: + build: + context: . + dockerfile: "./${DOCKERFILE:-Dockerfile}" + command: /src/run.sh + depends_on: + db: + condition: service_healthy + db2: + condition: service_healthy + minio: + condition: service_started + clamav-rest: + condition: service_started + api: + condition: service_started + environment: + DATABASE_URL: postgres://postgres@db/postgres + POSTGREST_URL: http://api:3000 + DJANGO_DEBUG: true + SAM_API_KEY: ${SAM_API_KEY} + DJANGO_BASE_URL: http://localhost:8000 + DJANGO_SECRET_LOGIN_KEY: ${DJANGO_SECRET_LOGIN_KEY} + LOGIN_CLIENT_ID: ${LOGIN_CLIENT_ID} + ENV: ${ENV} + SECRET_KEY: ${SECRET_KEY} + ALLOWED_HOSTS: 0.0.0.0 127.0.0.1 localhost + AV_SCAN_URL: http://clamav-rest:${CLAMAV_PORT:-9000}/scan + DISABLE_AUTH: ${DISABLE_AUTH:-False} + PGRST_JWT_SECRET: ${PGRST_JWT_SECRET:-32_chars_fallback_secret_testing} + ENABLE_DEBUG_TOOLBAR: false + FAC_INSTANCE_NAME: web2 + env_file: + - ".env" + ports: + - "8001:8000" + volumes: + - .:/src + - /src/node_modules + - /src/staticfiles + + #--------------------------------------------- + # ClamAV virus scanner + #--------------------------------------------- + clamav-rest: + image: ghcr.io/gsa-tts/fac/clamav:latest + environment: + MAX_FILE_SIZE: 30M + SIGNATURE_CHECKS: 1 + PORT: ${CLAMAV_PORT:-9000} + ports: + - ${CLAMAV_PORT:-9000}:${CLAMAV_PORT:-9000} + + #--------------------------------------------- + # Minio (S3 clone) + #--------------------------------------------- + minio: + container_name: "minio" + image: minio/minio + command: server /tmp/minio --console-address ":9002" + ports: + - "9001:9000" + - "9002:9002" + volumes: + - "minio-vol:/tmp/minio" + + #--------------------------------------------- + # PostgREST API provider + #--------------------------------------------- + api: + image: ghcr.io/gsa-tts/fac/postgrest:latest + ports: + - "3000:3000" + expose: + - "3000" + environment: + PGRST_DB_URI: postgres://postgres@db:5432/postgres + PGRST2_DB_URI: postgres://postgres@db:5431/postgres + PGRST_OPENAPI_SERVER_PROXY_URI: http://127.0.0.1:3000 + PGRST_DB_ANON_ROLE: anon + # See https://postgrest.org/en/stable/references/api/schemas.html#multiple-schemas for multiple schemas + PGRST_DB_SCHEMAS: "api_v1_0_3, api_v1_1_0, admin_api_v1_1_0" + PGRST_JWT_SECRET: ${PGRST_JWT_SECRET:-32_chars_fallback_secret_testing} # Fallback value for testing environments + # Enable this to inspect the DB plans for queries via EXPLAIN + PGRST_DB_PLAN_ENABLED: ${PGRST_DB_PLAN_ENABLED:-true} + depends_on: + db: + condition: service_healthy +volumes: + postgres-data: + postgres-data2: + minio-vol: From f0df5e87a97a7ce523bacd38d5ddc7bd0dbdc80f Mon Sep 17 00:00:00 2001 From: Alex Steel <130377221+asteel-gsa@users.noreply.github.com> Date: Fri, 20 Sep 2024 13:31:27 -0400 Subject: [PATCH 2/4] Remove sandbox from configuration (#4303) It is causing problems with the local files, need to deep dive on the bootstrap-env --- terraform/meta/config.tf | 1 - 1 file changed, 1 deletion(-) diff --git a/terraform/meta/config.tf b/terraform/meta/config.tf index dfc212ee7..ee13b639a 100644 --- a/terraform/meta/config.tf +++ b/terraform/meta/config.tf @@ -1,7 +1,6 @@ locals { org_name = "gsa-tts-oros-fac" spaces = { - "sandbox" = {}, "dev" = {}, "preview" = {}, "staging" = { From 076d208fe879cae8a5acef5d9fd82e3f59b2478c Mon Sep 17 00:00:00 2001 From: Alex Steel <130377221+asteel-gsa@users.noreply.github.com> Date: Fri, 20 Sep 2024 14:23:00 -0400 Subject: [PATCH 3/4] Recompile dev-requirements.txt (#4297) --- backend/dev-requirements.txt | 52 ++++++++++++++++++++---------------- 1 file changed, 29 insertions(+), 23 deletions(-) diff --git a/backend/dev-requirements.txt b/backend/dev-requirements.txt index 8f596b3af..efd6fc337 100644 --- a/backend/dev-requirements.txt +++ b/backend/dev-requirements.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.12 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # # pip-compile --allow-unsafe --generate-hashes --output-file=dev-requirements.txt ./requirements/dev-requirements.in @@ -8,7 +8,7 @@ asgiref==3.8.1 \ --hash=sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47 \ --hash=sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590 # via - # -c ./requirements\../requirements.txt + # -c ./requirements/../requirements.txt # django bandit==1.7.6 \ --hash=sha256:36da17c67fc87579a5d20c323c8d0b1643a890a2b93f00b3d1229966624694ff \ @@ -52,12 +52,7 @@ click==8.1.7 \ colorama==0.4.6 \ --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 - # via - # bandit - # build - # click - # djlint - # tqdm + # via djlint coverage==7.4.0 \ --hash=sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca \ --hash=sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471 \ @@ -119,7 +114,7 @@ django==5.1 \ --hash=sha256:848a5980e8efb76eea70872fb0e4bc5e371619c70fffbe48e3e1b50b2c09455d \ --hash=sha256:d3b811bf5371a26def053d7ee42a9df1267ef7622323fe70a601936725aa4557 # via - # -c ./requirements\../requirements.txt + # -c ./requirements/../requirements.txt # django-debug-toolbar # model-bakery django-debug-toolbar==4.2.0 \ @@ -140,7 +135,7 @@ faker==24.9.0 \ --hash=sha256:73b1e7967b0ceeac42fc99a8c973bb49e4499cc4044d20d17ab661d5cb7eda1d \ --hash=sha256:97c7874665e8eb7b517f97bf3b59f03bf3f07513fe2c159e98b6b9ea6b9f2b3d # via - # -c ./requirements\../requirements.txt + # -c ./requirements/../requirements.txt # -r ./requirements/dev-requirements.in flake8==7.0.0 \ --hash=sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132 \ @@ -265,13 +260,13 @@ numpy==1.26.4 \ --hash=sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3 \ --hash=sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f # via - # -c ./requirements\../requirements.txt + # -c ./requirements/../requirements.txt # pandas packaging==24.0 \ --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 # via - # -c ./requirements\../requirements.txt + # -c ./requirements/../requirements.txt # black # build pandas==2.2.2 \ @@ -305,7 +300,7 @@ pandas==2.2.2 \ --hash=sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce \ --hash=sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad # via - # -c ./requirements\../requirements.txt + # -c ./requirements/../requirements.txt # -r ./requirements/dev-requirements.in pathspec==0.12.1 \ --hash=sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08 \ @@ -347,14 +342,14 @@ python-dateutil==2.9.0.post0 \ --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 # via - # -c ./requirements\../requirements.txt + # -c ./requirements/../requirements.txt # faker # pandas pytz==2024.1 \ --hash=sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812 \ --hash=sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319 # via - # -c ./requirements\../requirements.txt + # -c ./requirements/../requirements.txt # pandas pyyaml==6.0.1 \ --hash=sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5 \ @@ -409,7 +404,7 @@ pyyaml==6.0.1 \ --hash=sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d \ --hash=sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f # via - # -c ./requirements\../requirements.txt + # -c ./requirements/../requirements.txt # bandit # djlint regex==2023.12.25 \ @@ -515,7 +510,7 @@ six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 # via - # -c ./requirements\../requirements.txt + # -c ./requirements/../requirements.txt # cssbeautifier # jsbeautifier # python-dateutil @@ -527,7 +522,7 @@ sqlparse==0.5.0 \ --hash=sha256:714d0a4932c059d16189f58ef5411ec2287a4360f17cdd0edd2d09d4c5087c93 \ --hash=sha256:c204494cd97479d0e39f28c93d46c0b2d5959c7b9ab904762ea6c7af211c8663 # via - # -c ./requirements\../requirements.txt + # -c ./requirements/../requirements.txt # django # django-debug-toolbar stevedore==5.1.0 \ @@ -542,6 +537,16 @@ toml==0.10.2 \ --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f # via -r ./requirements/dev-requirements.in +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via + # black + # build + # djlint + # mypy + # pip-tools + # pyproject-hooks tqdm==4.66.1 \ --hash=sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386 \ --hash=sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7 @@ -562,20 +567,21 @@ typing-extensions==4.11.0 \ --hash=sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0 \ --hash=sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a # via - # -c ./requirements\../requirements.txt + # -c ./requirements/../requirements.txt + # asgiref + # black # mypy tzdata==2024.1 \ --hash=sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd \ --hash=sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252 # via - # -c ./requirements\../requirements.txt - # django + # -c ./requirements/../requirements.txt # pandas urllib3==2.2.2 \ --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 # via - # -c ./requirements\../requirements.txt + # -c ./requirements/../requirements.txt # types-requests wheel==0.42.0 \ --hash=sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d \ @@ -598,5 +604,5 @@ setuptools==71.0.2 \ --hash=sha256:ca359bea0cd5c8ce267d7463239107e87f312f2e2a11b6ca6357565d82b6c0d7 \ --hash=sha256:f6640114f96be808024fbd1f721161215543796d3a68da4524349de700604ce8 # via - # -c ./requirements\../requirements.txt + # -c ./requirements/../requirements.txt # pip-tools From fd289edd323f91ba004a796d06e5ca76def687c2 Mon Sep 17 00:00:00 2001 From: Alex Steel <130377221+asteel-gsa@users.noreply.github.com> Date: Fri, 20 Sep 2024 19:06:14 -0400 Subject: [PATCH 4/4] Update Dev List (#4305) --- terraform/meta/config.tf | 1 - 1 file changed, 1 deletion(-) diff --git a/terraform/meta/config.tf b/terraform/meta/config.tf index ee13b639a..ec4b28211 100644 --- a/terraform/meta/config.tf +++ b/terraform/meta/config.tf @@ -27,7 +27,6 @@ locals { "alexander.steel@gsa.gov", "sudha.kumar@gsa.gov", "philip.dominguez@gsa.gov", - "ranye.mclendon@gsa.gov", "robert.novak@gsa.gov", ]