From 7854487115f813feed3ad28fb77b801b62773950 Mon Sep 17 00:00:00 2001 From: Phil Dominguez <142051477+phildominguez-gsa@users.noreply.github.com> Date: Thu, 7 Dec 2023 14:56:49 -0500 Subject: [PATCH 1/3] 2675: Improve census migration exception handling (#2944) * Improved exception handling * Using exception chaining * Lint * Removing KeyboardInterrupt handling --- .../historic_data_loader.py | 7 +- .../workbooklib/end_to_end_core.py | 68 ++++++++++++------- 2 files changed, 47 insertions(+), 28 deletions(-) diff --git a/backend/census_historical_migration/historic_data_loader.py b/backend/census_historical_migration/historic_data_loader.py index 7e69406b27..3a9497463d 100644 --- a/backend/census_historical_migration/historic_data_loader.py +++ b/backend/census_historical_migration/historic_data_loader.py @@ -30,11 +30,8 @@ def load_historic_data_for_year(audit_year, page_size, pages): dbkey = submission.DBKEY result = {"success": [], "errors": []} - try: - # Migrate a single submission - run_end_to_end(user, dbkey, audit_year, result) - except Exception as exc: - result["errors"].append(f"{exc}") + # Migrate a single submission + run_end_to_end(user, dbkey, audit_year, result) result_log[(audit_year, dbkey)] = result total_count += 1 diff --git a/backend/census_historical_migration/workbooklib/end_to_end_core.py b/backend/census_historical_migration/workbooklib/end_to_end_core.py index 1c88ee5488..e8a8ba8f3d 100644 --- a/backend/census_historical_migration/workbooklib/end_to_end_core.py +++ b/backend/census_historical_migration/workbooklib/end_to_end_core.py @@ -1,25 +1,14 @@ -from ..exception_utils import DataMigrationError -import argparse -import logging -import sys -import math from config import settings -import os -import jwt -import requests -from datetime import datetime -import traceback - +from ..exception_utils import DataMigrationError from ..workbooklib.workbook_builder_loader import ( workbook_builder_loader, ) -from ..sac_general_lib.sac_creator import setup_sac from ..workbooklib.workbook_section_handlers import ( sections_to_handlers, ) from ..workbooklib.post_upload_utils import _post_upload_pdf +from ..sac_general_lib.sac_creator import setup_sac from audit.intake_to_dissemination import IntakeToDissemination - from dissemination.models import ( AdditionalEin, AdditionalUei, @@ -33,6 +22,19 @@ SecondaryAuditor, ) +from django.core.exceptions import ValidationError + +import argparse +import logging +import sys +import math +import os +import jwt +import requests +from datetime import datetime +import traceback + + logger = logging.getLogger(__name__) logging.basicConfig() logging.getLogger().setLevel(logging.INFO) @@ -139,13 +141,13 @@ def _compare_multiline_strings(str1, str2): # Compare line counts if len(lines1) != len(lines2): - print("Line count differs.") + logger.info("Line count differs.") return False # Compare each line for index, (line1, line2) in enumerate(zip(lines1, lines2)): if line1 != line2: - print( + logger.info( f"Difference found on line {index + 1}:\n- {repr(line1)}\n- {repr(line2)}" ) return False @@ -158,10 +160,10 @@ def get_api_values(endpoint, rid, field): res = call_api(api_url, endpoint, rid, field) if res.status_code == 200: - # print(f'{res.status_code} {res.url} {res.json()}') + # logger.info(f'{res.status_code} {res.url} {res.json()}') return list(map(lambda d: d[field], res.json())) else: - print(f"{res.status_code} {res.url}") + logger.error(f"{res.status_code} {res.url}") return [] @@ -181,25 +183,32 @@ def combine_counts(combined, d): def api_check(json_test_tables): combined_summary = {"endpoints": 0, "correct_rows": 0, "incorrect_rows": 0} + for endo in json_test_tables: count(combined_summary, "endpoints") endpoint = endo["endpoint"] report_id = endo["report_id"] - print(f"-------------------- {endpoint} --------------------") summary = {} equality_results = [] + + logger.info(f"-------------------- {endpoint} --------------------") + for row_ndx, row in enumerate(endo["rows"]): count(summary, "total_rows") + if False in equality_results: count(combined_summary, "incorrect_rows") else: count(combined_summary, "correct_rows") + equality_results = [] + for field_ndx, f in enumerate(row["fields"]): # logger.info(f"Checking /{endpoint} {report_id} {f}") # logger.info(f"{get_api_values(endpoint, report_id, f)}") api_values = get_api_values(endpoint, report_id, f) this_api_value = api_values[row_ndx] + # Check if field_ndx exists in row["values"] if field_ndx < len(row["values"]): this_field_value = row["values"][field_ndx] @@ -217,13 +226,15 @@ def api_check(json_test_tables): logger.info( f"Field '{f}' with value '{this_api_value}' at index '{field_ndx}' is missing from test tables 'values'." ) + if all(equality_results): count(summary, "correct_fields") else: count(summary, "incorrect_fields") - sys.exit(-1) + logger.info(summary) combined_summary = combine_counts(combined_summary, summary) + return combined_summary @@ -235,7 +246,7 @@ def run_end_to_end(user, dbkey, year, result): sac = setup_sac(user, entity_id, dbkey) if sac.general_information["audit_type"] == "alternative-compliance-engagement": - print(f"Skipping ACE audit: {dbkey}") + logger.info(f"Skipping ACE audit: {dbkey}") raise DataMigrationError("Skipping ACE audit") else: builder_loader = workbook_builder_loader(user, sac, dbkey, year) @@ -260,8 +271,19 @@ def run_end_to_end(user, dbkey, year, result): result["success"].append(f"{sac.report_id} created") except Exception as exc: - tb = traceback.extract_tb(sys.exc_info()[2]) - for frame in tb: - print(f"{frame.filename}:{frame.lineno} {frame.name}: {frame.line}") + error_type = type(exc) + + if error_type == ValidationError: + logger.error(f"ValidationError: {exc}") + elif error_type == DataMigrationError: + logger.error(f"DataMigrationError: {exc.message}") + else: + logger.error(f"Unexpected error type {error_type}: {exc}") + + tb = traceback.extract_tb(sys.exc_info()[2]) + for frame in tb: + logger.error( + f"{frame.filename}:{frame.lineno} {frame.name}: {frame.line}" + ) result["errors"].append(f"{exc}") From a10d74f55810db71675c35c88b1f710bdce81b3f Mon Sep 17 00:00:00 2001 From: "Hassan D. M. Sambo" Date: Thu, 7 Dec 2023 15:31:52 -0500 Subject: [PATCH 2/3] 2879 enforce audit year in all queries (#2933) * Code cleaning * Enforced normalized audit year * Code cleaning * #2879 Updated additional EINs generator to use both audityear and dbkey parameters * Linting * #2879 Updated additional UEIs generator to use both audityear and dbkey parameters * #2879 Updated CAP generator to use both audityear and dbkey parameters * #2879 Updated federal awards generator to use both audityear and dbkey parameters * #2879 Updated findings text generator to use both audityear and dbkey parameters * #2879 Updated findings generator to use both audityear and dbkey parameters * #2879 Updated Notes generator to use both audityear and dbkey parameters * #2879 Updated Secondary auditors generator to use both audityear and dbkey parameters * #2879 Updated queries to use both audityear and dbkey parameters * #2879 Refactored code, removed unused logic, consolidated parameters, cleaned code, updated readme * #2879 Regenerated workbooks to ensure logic works as intended * #2879 Enforced DBKEY and AUDITYEAR parameters * Fixed commit signing issue --- backend/census_historical_migration/README.md | 2 +- .../additional-eins-workbook-177310.xlsx | Bin 225121 -> 225121 bytes .../additional-ueis-workbook-177310.xlsx | Bin 225117 -> 225118 bytes .../audit-findings-text-workbook-177310.xlsx | Bin 267321 -> 267321 bytes ...orrective-action-plan-workbook-177310.xlsx | Bin 264336 -> 264338 bytes ...awards-audit-findings-workbook-177310.xlsx | Bin 1125519 -> 1125520 bytes .../federal-awards-workbook-177310.xlsx | Bin 1550705 -> 1550706 bytes .../notes-to-sefa-workbook-177310.xlsx | Bin 264834 -> 264834 bytes .../secondary-auditors-workbook-177310.xlsx | Bin 295038 -> 295038 bytes .../additional-eins-workbook-180818.xlsx | Bin 225123 -> 225123 bytes .../additional-ueis-workbook-180818.xlsx | Bin 225118 -> 225119 bytes .../audit-findings-text-workbook-180818.xlsx | Bin 263674 -> 263675 bytes ...orrective-action-plan-workbook-180818.xlsx | Bin 263687 -> 263686 bytes ...awards-audit-findings-workbook-180818.xlsx | Bin 1125823 -> 1125822 bytes .../federal-awards-workbook-180818.xlsx | Bin 1551167 -> 1551168 bytes .../notes-to-sefa-workbook-180818.xlsx | Bin 264386 -> 264387 bytes .../secondary-auditors-workbook-180818.xlsx | Bin 295038 -> 295039 bytes .../additional-eins-workbook-217653.xlsx | Bin 225786 -> 225786 bytes .../additional-ueis-workbook-217653.xlsx | Bin 226646 -> 226646 bytes .../audit-findings-text-workbook-217653.xlsx | Bin 263674 -> 263674 bytes ...orrective-action-plan-workbook-217653.xlsx | Bin 263687 -> 263688 bytes ...awards-audit-findings-workbook-217653.xlsx | Bin 1125824 -> 1125825 bytes .../federal-awards-workbook-217653.xlsx | Bin 1550643 -> 1550645 bytes .../notes-to-sefa-workbook-217653.xlsx | Bin 264926 -> 264926 bytes .../secondary-auditors-workbook-217653.xlsx | Bin 295041 -> 295041 bytes .../additional-eins-workbook-251020.xlsx | Bin 225120 -> 225120 bytes .../additional-ueis-workbook-251020.xlsx | Bin 225116 -> 225116 bytes .../audit-findings-text-workbook-251020.xlsx | Bin 266691 -> 266692 bytes ...orrective-action-plan-workbook-251020.xlsx | Bin 264878 -> 264878 bytes ...awards-audit-findings-workbook-251020.xlsx | Bin 1125689 -> 1125688 bytes .../federal-awards-workbook-251020.xlsx | Bin 1550373 -> 1550374 bytes .../notes-to-sefa-workbook-251020.xlsx | Bin 264623 -> 264624 bytes .../secondary-auditors-workbook-251020.xlsx | Bin 295036 -> 295036 bytes .../additional-eins-workbook-69688.xlsx | Bin 225121 -> 225121 bytes .../additional-ueis-workbook-69688.xlsx | Bin 225117 -> 225117 bytes .../audit-findings-text-workbook-69688.xlsx | Bin 264511 -> 264510 bytes ...corrective-action-plan-workbook-69688.xlsx | Bin 264167 -> 264168 bytes ...-awards-audit-findings-workbook-69688.xlsx | Bin 1125733 -> 1125733 bytes .../federal-awards-workbook-69688.xlsx | Bin 1550559 -> 1550558 bytes .../notes-to-sefa-workbook-69688.xlsx | Bin 264685 -> 264684 bytes .../secondary-auditors-workbook-69688.xlsx | Bin 295201 -> 295202 bytes .../historic_data_loader.py | 10 +-- .../commands/historic_data_migrator.py | 83 +++++++++--------- .../commands/historic_workbook_generator.py | 13 +-- .../commands/run_migration_for_year.py | 28 ------ .../commands/run_paginated_migration.py | 6 +- .../sac_general_lib/audit_information.py | 12 +-- .../sac_general_lib/auditee_certification.py | 6 +- .../sac_general_lib/auditor_certification.py | 6 +- .../sac_general_lib/general_information.py | 4 +- .../sac_general_lib/report_id_generator.py | 6 +- .../sac_general_lib/sac_creator.py | 28 ++---- .../sac_general_lib/utils.py | 56 ++++++------ .../transforms/xform_string_to_date.py | 15 ---- .../workbooklib/additional_eins.py | 22 +++-- .../workbooklib/additional_ueis.py | 21 ++--- .../workbooklib/corrective_action_plan.py | 22 ++--- .../workbooklib/end_to_end_core.py | 25 ++---- .../workbooklib/excel_creation_utils.py | 22 ++--- .../workbooklib/federal_awards.py | 39 ++++---- .../workbooklib/findings.py | 29 +++--- .../workbooklib/findings_text.py | 23 +++-- .../workbooklib/notes_to_sefa.py | 42 ++++----- .../workbooklib/secondary_auditors.py | 23 +++-- .../workbooklib/workbook_builder.py | 10 +-- .../workbooklib/workbook_builder_loader.py | 4 +- 66 files changed, 235 insertions(+), 322 deletions(-) delete mode 100644 backend/census_historical_migration/management/commands/run_migration_for_year.py delete mode 100644 backend/census_historical_migration/transforms/xform_string_to_date.py diff --git a/backend/census_historical_migration/README.md b/backend/census_historical_migration/README.md index c4810cb640..4eb9a4427b 100644 --- a/backend/census_historical_migration/README.md +++ b/backend/census_historical_migration/README.md @@ -68,7 +68,7 @@ To migrate dbkeys for a given year with pagination: docker compose run --rm web python manage.py run_paginated_migration --year 2022 \ --page_size 1000 - --pages 1, 3, 4 + --pages 1,3,4 ``` - `batchSize` and `pages` are optional. The script will use default values for these if they aren't provided. diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/177310-22/additional-eins-workbook-177310.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/177310-22/additional-eins-workbook-177310.xlsx index b1d52714da3f11014acb905cf17eefdac58eee2e..1df43aeb3e4b21380888ba836cfc04d67b23c426 100644 GIT binary patch delta 487 zcmaEOkN4p{Ufuw2W)=|!1_lm>7qM*a(cAJk_+%dc|C}DZa*FsW z?jJ%{{f#`wt|ml_w|=_)#Ni36LkRm8qcr7*#|o~qYG>B8*;cIVE9wt;QpqXindK>@ zmiS6MxhO1atH$5-jP%B^2HmI`gJ;q8$O+vnGN0K40Pfe-)Bpeg delta 487 zcmaEOkN4p{Ufuw2W)=|!1_llWg^9HjdDjX8sqh2;XSaU?3Z7=T*}D`z~LHAms4K*8j>lfnzj@7tu5 zp7fenJdOL_2kn!sd-m1NkFo!C-s!dP?%e;0Pbzojc`4qEGAc{7;e5qa_c!`4D=c_e z!Qr#{5u+U&Gcbgjl{vO6b1;7O0dd-;Lm7dXX}feN^JIRI$o7pX%(BcN*7S?%%+J6= zyE2$>GlGR?WHG-43$m$7XVdDn6Rsc_!n-?P3=Jgiyo=XK2I)oCw(X&$i~ zH_qP3k`_}xcFp70x8hA^Z*TB1$FKQ+SzR*x7U$jSns0lZ{dwE2X57}}cx!fY;t9@% z1^pSJCzLK8Ex2F3S|jc+PQ8=RWvueyrOe?8EE(PsCBeA$(@c9AyDEhRrV-&DfY<#kMsoaco!O zVEpO>;Zh6EM{24NI~OQsvwFQk<9+kxDeE3G#Lecfbm_{Joz<4;FnIXO?HK$l#4@4q60_44kAcNLSUz^7)2R8T|h~GZ_ tRUWe@*ilO#N=L{uGB6~tFfa(C7+jpqJbhv|v&eLhd}aZ*!#T`ASpc|H;F|yd diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/177310-22/audit-findings-text-workbook-177310.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/177310-22/audit-findings-text-workbook-177310.xlsx index e6afc6264e216a9650e306ed801adef9c1dff64c..03d7009a6621a03835b6de5e574122a7906077fc 100644 GIT binary patch delta 501 zcmdnFLSW|#0p0*_W)=|!1_lm>AF*u{dDqqhsqmN|`!0L~3Z72h1jlDpf@*Y3V6 zYRj=RR{7qa)CuPb=5^5}+{IoH`2-Zirnuw)eY!nl?zQstM#JGOk$qpR;X z*9ZS{ek)jXOlyP3DUBcl?%?fGlP6z#5&F`ub~{H8+nWnX{)`pJ9v%{^lKz(yF=^@? zr`}dnF_&v@+;0OfK6v^^seH~ZsdZgnGryHHo>lV` zC@IqL(JuOa^J{ml|E?MAm*3s0m-sv-ly|*LrShrU z+K(d@vVVNd|HO?FTALp++OaVM!>d`DW4kg3;|Cw+pRsN2Vxf#c%(PuBl-X?yNMw6F zCyP8Yh&8>DhvgYqD2tbcnF%aZz|Zm$EEFTa;?D>&Zu&Yw7HbYzJWT&6$YKr_))r#1 r12b}jSe(InH^CS`gjg)VBF4fjW?)8+FpCwKu~L}D70mc3%;Et6$>h~* delta 501 zcmdnFLSW|#0p0*_W)=|!1_llWy@|CGdDqqhsqlMtp%cCV1y3_FFbD$al>Fp?qWpql z{p9?jRK1GaoZgAP-bV}s+Ww!qRBioJpwM}{_|eHt363AdCf&Td$ya*P#IpZ;Gw;4# z_rmdn>Sw#pXIiEl)4t4J!@2jk1Mjh`FK+2+JvP5N?}T;%_lz1V8@U2!*g_= zNX%}PYFjArVG=EQTO4hh$_T_v+r>he-L`;4w#Rd_ z$TNdj(;InMo`Hq3cv+a4z(NK5EHA-AF#;_9j9{U4f-Ke?K;__gnEp|a#T+cGEyQ96 pX5 diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/177310-22/corrective-action-plan-workbook-177310.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/177310-22/corrective-action-plan-workbook-177310.xlsx index 41972d7f54a62a468502f54a4af8ef4392b6d512..14ed7510faae31b3dcd4571531c0ef146dd14059 100644 GIT binary patch delta 673 zcmbO*QDD+U0p0*_W)=|!1_lm>H?eIKdDqqhsqlLAwq~&bo?G{eXR;m4M89BQw zr@c}C+W(i6d$QlQ$l2KaKDcnX*x|HIvGOftyPtSGVR5+NzC|l#;)~=R@_Rx>*8gLu z_j}1->Tp7G^#LW##U2NBeC}rXoby^7zV!E(yGjO98=SjsSszHYYkR-Q{joXGGpvZ~ ztyk~k8NEjj9qqN)V{5xl=k)Sid5^DB!jaOtTijinr+@CglldxZhvv2CH}A8W%(ig6 zaPpzvPL=gX@78{vaAM9=*He*EAKIoX$!)8hA8W7mYrT{8jI#Ux4$oM=CaXgA)Y=oL zr60R0WdHb@|A`wVv^GCrv}0oihF7y9$96>y#$P@lPP<$vBM>ufmkVW{wu1R>Y}@wj zf0^Z(L9D4aS)PGK4l%MYF@adqk2ABp1Pg6uVew}K88Drdjm4T19u3ny*;vfvLEM=! zbCdFQMzQJl*;v%U zHgK}DNHYtEGEO$+5Sebx&LRkQia$`4an1B>b`}{hzYEB>I>*4m5MPv?ECA_Ke` znM9Z&2?pr=@Sn$yTHgSs5*ro<27YAg7`IQq&(0zTGH4nH(A+)KWjI(gOu9#is>mFENW~&Sb;JO0C_+6aR2}S delta 666 zcmbO^h)fuL#FTL1(slKgDOoI2uqQiYkADZe9x2p2!{kv(P&TZ+oJK(T^Nb`)HTLjy` zKVN>m^5>nhi6P4kS8Fd`%C}+#|JN@{^)k1)>O@vwe)FDbR_H8+moEw@&z)>u`2T`U zTIorziN({n?|qP-$+!FWjF|Ah;tOB*-n~^XQ5m;xc96%meP;7mOIi2-TlACDIGGNs`K)%%(1{Q|+qSTyXeLWC~rYHR8v7^>E zfT_cVg@J(|*;>Y}(;u?4$bsFqI<81>3sAC`iGe{1MbWP5avUt0VB5SnfVMB2p2oqV L#`cXBXdD9owQ%J8 diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/177310-22/federal-awards-audit-findings-workbook-177310.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/177310-22/federal-awards-audit-findings-workbook-177310.xlsx index 73b58cc070ce5be8b0d3f27749dbae66d1e8a724..176a55a26102bb621638cf6cdac1c8e84a2f062b 100644 GIT binary patch delta 788 zcmeCb=`!J_3vYloGm8iV0|N)cpV+pEyld-$RCs2VRl_%+;Aut%20u{O`>+8|+kdZXjz@1TKdwk?o0J;d14u?Hx@d23sxL^cuI&(_7dz0@wQysoR2-^v-kX3bG} z@uYBe%%ts){w=KZc{S&$>#4}74{hs@mi##WEY6d{3Cat z9jg$$BKqgo_Fvp6p|$xTqa7RnUto9zcW6zya{@6J5OV`D4-oSLG2eErQ2zAoU=ybB2yCCi zBgiWP;%#4|AgIa=Vog7zEcgs8v{glLHxpQ>SY7ZVSSVdXFocmA7@O1gX$o3%!lQLM zla`YFfqSmVPFtO(G)#BSw~QfZG$E#odWS_Ynhuw*RLtRa^TioodU|?^0(y*z`zOW%KUT+5D3xmi^zGd-v_S z7mg=XKihpi(-LCk@m6`Ci@ZTlcc$r$bE+#3tKAl6<(%+EZmri*j~#}0er1UEUf-4oY{!7xcW4#i4?CtlSJN#Bw0 zCBp}5*6R6>{w=ILlNDDqakG*5nnd#=t2fC-e_v?s=U8k0cH7_fiBD_p7AT(HDpDSP zZ1aM353cXO8jliKn;$XSvGId~E4V{z$|WC0hP59ArZ40XkeI$+OOUnMhhw`B2jd+d zW|N7v?b@M?K+FWh%s|Wn#H>Kf2E^<@%mKulK+FZi+(66&#JoVvw_Q7wKYcscgsD6N z+o$pf@``|X+m|W`sxpIE)6XgkJ_8GFQxV+F1Qse$7yJko%FqxDVPpn6ar%BuL2FKU z)J|vC5;Oyg+h_?&GhJbv{E<;~dY+ab4_L4kD99|z!7%-ymY@j8GlKF!o5HnnV#7?f z^D!)VzL-G*m?~hPp;31FeJw$Cu-%;6K)vle0ua5{+Jb^$VSk`7>GKnxl5+lgIQ^^k}PGn+WC}d|~;D=cTq#GDlOuw%!C+IU$QVT2&3qUnx3K~sK&Nl6O^C&#`S2dDqqhsqnh_e_wtB3Z7lXOrMdETwA`L@^DpSSI5#%(>1w`M0Np73l~(4P@{Lh0hsg8S90HRAp< z*3Y>l|El4@=RFMFOL~-8CmWP@8(M1j#!Y#5dHd!A83)94%S0MddDiJH-2U)FkL@hLP%N04%ulr`jJ@HG9KIFU6=KMds1(8QN zC;9Q2pH!_B<-c7uJ;dg@Qfj4E#Mx^`a(egFulH;HTYcf%vYY1r4L+IZXN8^kI#=Yo z`LXT=S`V-94~<6&t<4V^?b!H1;iXy}tQ+CW$l(4-VETPqF^Oh3j_qz7jOo72-(uU^ zH-s_*F%u9o12GE_vjQ<25VHd@2M}`tF&7YX12GQ}^8zs+5c31E01yiTu@Ddo1F^{V z4WXh6FPOi_wr#&`E5^$PVzobZ7Tf;VS^P-|Sp3us@sCVk*1p-|Vay=bbn*G(&p?cA z3&b}=M7Av!{|i>MZHag^BQww=(?ynvTXVuw!1Rb^;$~p+iOa;LnXWQU{=g`z6X4Cr zB*F|&GsvE2kN_qY7-(n|FC2^aIPq)xla{ z0ktykn;{M|QDnNz3UNWOu)zv(X~s>{eO8Fefce=#z7_X;afbM!)SO~{JrIfPH;{db z`U!hFnHd;9@h~v(!#oD08yLS#Kd?ev4s6gvkin~_|6L)j3Fd3A1oK^2imS1)EfWVS F2LSWhQ|JHy delta 917 zcmex#DemK?INktnW)=|!1_llWt%eA)BY}`3-(sH))b_*wGtNi_5yvgkC z4L;`hHQz5POHMzgeVM(6Z|`vj-eXr^+|tu}Y<_d%3GIfAz857rLKl}7+-F~|8GD$q zevg^_uLcE)-R$j4)~fMNHu&CcXsx|Aa>_sBEt?NW9T1Z(6KNEeS*Md&@bP?`W#kD% zGj3BJXTuXSy`>8)s;g>dPCq|eZ_+!Sw9w36BSzD&3unbW@lTFE4>CcK~g+FkR1=!>e`Z)Vp^RO(&ZI-zUr)CBWm z-3zoHUf&-Yj}lm$A2Hgo@q>a(wK!Ne!k3Z3{gc4-2ex7o&F&oA-8mT3eVKJ8*0yg9 zWdvd-AZ7+)79eH?Vm2UV2VxE&<^*CcAm#>Q9w6ogVm=_|2Vwyr76f7;AQlEmvnMp5kmZ$>5&W_UtE4iW|lm}(&1&?q|n{W5WNu$v^8i%T;v4rQEvFqBbpdcbmV z96h$ki|63uh3ARmZCD8Vj)7@5ztFf^z I69*~-0N1iQ<^TWy diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/177310-22/notes-to-sefa-workbook-177310.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/177310-22/notes-to-sefa-workbook-177310.xlsx index ea8c9e20fa88eb721da03bf76fafb4bf861c5a41..a2110280d0503e255691ebab776a3419280d34f0 100644 GIT binary patch delta 487 zcmZpgD$q1lfH%OKnMH(wfq{d8DXwiI?^;116`pfq`O|Me!PATk41&`GIaoyMdnX?B zJ8Zzy7N7dcw)E08F4stX1wRGFK+Yq&UVGLq%3OS8#ogPNr>&K4Vm|)k|DT&-Q$Ak$ z!Z_dgyGS;9A<;Aut%2Eplp94sRBy%P^+ z9X8-;i(mR=-if~3j=r-_yZOuVh~2nx_C{8=l=`u29>2bMZ!&v(gU>qZ|8jpJy)Dys z@%|8s>TlFJ=KB7Xn%7(NlM_#PH>~J?5psrYk>sIT={buYO7cCJYPXWv>D&!w3zbw6 zO^Mc(^LtOaW`0@oW&Xl>EN;x#UTj#%Ve`1YAepndyWY&_q|djD=R&TRMyjjj-RL;{ z`}4cfD7&q`DpxanP94fv$*h&-Y#I0Dd~NK3*o`~R{L@X*EVlb_9r#D`Kxk+ zp6Q5drmuV;8t=7lQcrEqI{Clm3$uIg-maIZoV9*+jK{WVr?1ZasHMUE<7@s?aai!M zg2QL?BSt$mW?%?4D|2jD=3rzFU^bsv+pZbG2*ga=H6xfqmV-pLH~eQ-Vg|9MFJ)qR z1{UgLX3<~*3+-fOc?lL;%Esc)2r_Q^cXk$Q4p_)fx8h(i2Md>Qu-JhaTRB*q!Fqqg h7>=AQ7GRMYP8KsTV=E_%6`1jflf@Oxu;OCz0050P!Y}{; diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/177310-22/secondary-auditors-workbook-177310.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/177310-22/secondary-auditors-workbook-177310.xlsx index 364b6fceb4321c31340f4816946f054a800b677b..57b38a4f6a5df9eb2c6461aba255f02019e8a0e8 100644 GIT binary patch delta 524 zcmey@AoQ<6h&RBSnMH(wfq{d8HLh(U@7j7G75-`&8}~P$;Aut%20u}E|F8j1+y7Hns$VbB746y;=P<{?X^~Jz^puKd*Ut-&thl>>xo)&M z3wz(k-#<34y>g2CDU%)hbHjl4LgQKY+!p>ZvlOUg6u8RR+jG<9hUT4}2`awlH?Zs1 zUv0mm{G=x|VZxGWDv4st>P*$tmxisrRNq!7n8A8uv7@(O#j%HngxG}t=R{1JDwDiD z=&(VG^TvY9ZZh}g$;D55dik8&q_D(GSE4vY1152wt(j=wSIZlI^q0x+-3?X7z5*p* zcKDth9g7{9kzL2)46=CoMi2vJ?spi&ww}cTEK*U=Vg_bxtY@(TGv3v+xPlpG H4J;l20TIf> delta 524 zcmey@AoQ<6h&RBSnMH(wfq{d;YGUm~-nI2WD!fxAvh^EK@H8U>gCLMj$xjX_$}cF^ zPtGq&)vL(O>795m>#zY&Tl~@|^G=lAa`b(5+Ra~Ge!s#`{Aks=raIr~BSj-zmrPCn-;y&TvuY;>#JlF2*gt%`AKm8_OK1 z&n~{i&T{YuvxQ2oX{JQ$%K5z~T{F+F`7(duJeF$=*IsN`$YJxiAepndyWY&_q|djF zvsSKqy-9EWjznd?_vd$C&-}S1OX=mYghgE0OE_1o-w<7yCJ5vsc%C2n z@?)zVL_fT~e?=T6v^GCtv}0oihF7yP$9827#$P_n))Q;n6+#(-m^c@_u-=U@#&;OQww}cTELBm@Vg^xD&te5;ysKw%1vAVVSUdnI CW7tgq diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/180818-22/additional-eins-workbook-180818.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/180818-22/additional-eins-workbook-180818.xlsx index 8ae37b5a6588087b9f9e492d718507d23f221220..6153bf7a4736645c9e3e17d16284f8d73d3662c2 100644 GIT binary patch delta 470 zcmaESkN5FCUfuw2W)=|!1_llW?YOpyylXjtRJeNZ?BI!qwCa~hy1K;bEBGlW267(J z_1d#`(a(USEAQ@Kp0-xH>2Tl2-#<344LQaAlxZK!bHjk{Lem-N+!p>(yD41BC~#G- zx96tE4Z}OT5>$H6Z(!EHy8Mo!NM&4;g(zv!;W1)>S2eTA(8S>ZMY> z#4awquB(OL{29My%~5#qpm6e>N#=F`FW9J+P87FEtvnTZ_*zq8VexOnopoR5w;p?0 zb~`Rn=hB{>2+eEdiynv{o4jD%gY)~Z-bV?B@R>1llm*xrHosytV`ByeNV77>c4ZF6 z?>@{rac%9gp^QMxv|Tooc``ppWc%h6W?5zsYx?DM=4W7`JsHfm8NoucvY20jh1#;2 z{TV^VO}~-DYz+_S>C(B(=3wE_TxL5kV_Gh=Gg$8(7(*$K*#ayQo5yShW=zXtwgNLQ N=P|p28PfU89srl!zuW)- delta 470 zcmaESkN5FCUfuw2W)=|!1_lm>DU)g^@~-6oQsE!&?%6f*kXF5#q{5V>|%|EVb=HenWwMu9&AkhV^^1>J(Vx$cgOjQ>^)bKqIkCzd%oIm@|%kzlgeKA z)j@_rc|6PcHu1)6dsbgZz@i!ri=O11I)>EeW*X ziDeZ(`pV+>?uIl|zX>HAJ5KtYT)ks=zTBo+(_@~jIm4QLBg#l|ecnCw>;3;WU#PmA zce`F9@?5^Oo{Ij{S+NrSO}m=n|A*Bxq69)yiUY=^V9f$Hn?`nvObV(=k|V)`(tyWpI8yk zSts7Z6L^oFIofMcv&Uxt+0)B?#XU-;gd-(&x466RoqoCdPGOGr4y~}|H}|uethR8x zaPgsz)AyhnWdSo zGEV-$D9Rt;&B!Fe43D|#6G3uf9Fq+>q^6(9W#&-?>HmIxGE)LS6N9**1cL-H9$}!N zkw27ix^^g|X_IOv@~*81QsFZ0tDb%Z3Z7Uge zDEYGE?75TKmF4$sQoIZTpUruynqQ%MIq^+x+20qM`#J6=+>ZUtuYCS!uCkZrj5VvY zA4e|8`tddY6*o#?ZGOaP$Hoi{u4ZM9?aCaC-+Vxvc9~E{AZFSw6Usc9A0)DUQwp;z zGl(_)QabZ9u+Z)d<{OM)p=nvnFTp}h+06bBp({De)|^1)kZ71Lk;`lbRvMVgEX{O< zaq>q-QSAV4MkWzvc*KRT_ZGf?*oTo}$p--j31E!DKtrSG^mVz+>WU!qzF(irl)%r# zATB6@rid?;ar%Z_W|8R*p^U84`SX~0z&6U|F-tQpnQoQGECc380{K=)(wQ0Ji&Arn z_4Pm`x=a53_j$bzm~NIcGcfSOoB^a87}ri;m&YszHt0Oa;Ew69^O!Z|K`vYRP&z^$ Zs40Pkfk7BWQ^|DWd}cMaBRN3h7yvgu0NVfn diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/180818-22/audit-findings-text-workbook-180818.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/180818-22/audit-findings-text-workbook-180818.xlsx index 10ac5a73d7e4ded51a3be7d4c9a9e196d8712cbd..976ad8af66210a291a120d70a161932c547d5e5d 100644 GIT binary patch delta 681 zcmey>E%3WrfH%OKnMH(wfq{d;Dz0rJ@7j7G6~16{qT)B8;Aut%20w8C)?ovlw)mw_=AH1{b-?GZ+o5ndE2gL+}7iGYj$$t3C@NE{TZPrlrA1ExL>_mBknI_ z{hUkkuNo#8hc&qdxlL^HoU!ZZj5$-?^u4~nyd9ZflF%(?&2_+-FMQgC+aJ;o&(V1z zF{@RoO;NgQX18=ZJ=M{zID1`5E^LqgI(ew>=NS2AkpAlr-^n7L(YfgAPOyA7RVg?p}!^|Sh zbd_=P2S(B9ax5%7U_o6Lpr9DXWJ3-q?Er5^CJ|;xbONmlPZQp|>&j9l1`i1p1_@w1 z!azeK*YtiC7Im-#b^;Bs5&6x`P?4iwTvC~nS`3O|bfdPrzwSB!)Gx-wz`zYN2S_(C znoSpAWsw3KrOV19%_u+Jla)md%+CVynN=BCraLmSNMpEe=|kxVd0+}lU}0boMln)w P`Z-n>HMT@%pbP^5-&)}j delta 654 zcmey}E%2*bfH%OKnMH(wfq{cz>7?3;yld-$RJi7@>d9Y$f~Of77zBZIN`7)cQGP+O zesX?Ms$NBIPVYqD?862;ZU4Pa+D^1TmY{ZaY3g!bHtw7^X*pYYdxev;RsMc2-emUn z1|M_$n(r5tC8r+q`Ov+suYpzXE*qKP}&=`*2yry!3kU3+pbi zxT^QReluy$$+O$)&S)KMIb(TRb^Xt|UhOCQPkvpj`CqGKZ*JLH`yLx{{;d&MsptKukuwfPaF9UC(+xSEwYwkvZme)Iux+QmZ|ftYE#cqlXbGLXo2#ox^G z%plfu=YP!4z(P9znVFfuLPm@%FTp}`Of3G4V4(tL7HdwRa!52x-@?pd1{QzI%p%Qn zg>mvnM$zf=EG#@=K|K~0X&~Q=g+*N+q$f>y@2)FLnHW4IR2U?HQ3nGJjhxdb02P5X z?E-4D5&q50P?4iwTvC~nS`3QR0B=Sn5oUN|2;c7hy6XT?zZeq(12@bZAl<-dI$e;J zMG9g+E6{$q>0YcXa$tToh_5oeos~rs?4G3$r6c5l$tZz^fk7C>5P|9ESy|NB5}1K9 F3;<*|?q>i1 diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/180818-22/corrective-action-plan-workbook-180818.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/180818-22/corrective-action-plan-workbook-180818.xlsx index dec67962aea717ba3ae8407a67ef8bbdd1ff0115..6ca3018bc89eeab2f4c3182954846658ced3c724 100644 GIT binary patch delta 658 zcmZqg5oqfX;0^F*W)WdvVBlcTk87LAyS5%kg=@3rFn2a7Z!&v( zgO53W&Hu~llHs>F?^f4*+w1Jl+jceMwjRe@vy&4~a5gOH&j>xCbn$4x{p!^kaeo=> z=UkG1)iA+0tjRUVZDNz>j9o`(%$e$@@AduV?Z^a^gl;iwt^>w=;nObM{*Zonj?NQ_ zS*=oSiqc&(yQK>&s;g>dPWPX!H{q3u+9so@jQ*}F{pI32FW(3&2wrn;^M3A(u)?MV zlMnkAdB%0O=kE1RIrmBFsg7>N+3QMjW#8>~)@gl-Z{22ocHhr-L;uLg&p~V6yB^Hz z5pP`c;QIcP`Y55b`7xs%8#6GxnpHTqt8g&>_Te`G242B}Wv{O-WnyrbP?>DNA<^y@ z$_T_v+ucH$pDtwvs@Sggn^~S2#F`%PkNFu`$nrlk6BAg-j*;ahSV)VB#h(#m{PY@T z7Hdv;yi7m9%wh%>|H;fE&3JXXC<}`cm~X|xBF!YmG2JkfQFM9^3yUDwzy_cwqsa6* zEG+7FAoo4nBO&{piGkq^9|Hp~Fmhp_fl=W%Gebp=esM`o6)5BO<Pdz-Yk8GTo1nMSA)M YRu&$x-u)oGGShFfvZw)58w*es05g%|^8f$< delta 684 zcmZqc5oqrb;0^F*W)WdvVBlbwF{ySU@7j7G6)w9yB=gCLMj$xjX_$}cF^ zPtGq&)vL(O>7D57eZ)YZ?fbDa4z90Xy^jGUxHdmxv}0oi23WH)$9827#@{~7Gbh!yD~2)xG1GR%P-f0$Ad&65 zznSHkL9FTi|CpbFg)IIvGckdMY#CWzf`v4hSo|45#!atgX0hgkN5u5~%q(UKAnweV zIm!ZT3=DJB85o3t@d5)4jMo?^e`FM$uEoN_12(~ug+-c4oMXB{C{Q$;g+&l-RXtFY zSu~V!x?w1zq;`NeBa;X-B=Ui73xBppLiRfo1H&0U1_oZ3(LlO^QE~c178V(h5z|;% zq#2#3%doOYflago@*Q;kGc#1==$GdgWhdq5XM++5y4kDaiuASs&FTf3FNI>ZAtTFl gUq%+`>FZg6ZU=?YK9C)<({Hh|sIlcTvj9~C0Qi3E7ytkO diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/180818-22/federal-awards-audit-findings-workbook-180818.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/180818-22/federal-awards-audit-findings-workbook-180818.xlsx index aa8923992b56dd2189d0195fadda58ad2bceb5eb..1c4d333cabc64594893ef8c7514f5007efb6c332 100644 GIT binary patch delta 786 zcmdmg+hyNv7v2DGW)=|!1_llWhq$(hyld-$RQPQ9TMNDb1y3_FFbD$al>Fp?qWpql z{p9?jRK1GaoZg8Cy^a_Nw8f{M+&g#4RXHbf@uQQQ5*$B@O}cqE>2gcTlDpftuibrD z)Rtps%D?=5Gbh|)$*ei@`~|aHmSL{j^{<|@Hl(sIQed29x!|(q{Ej-Vayu*E%ck!X z>oYzt{LaDpT+~7(*EDlRYh*<4n*Mf#G%4~K5FJNr*>!CfC= z6S``xX2iSC~8-W5-Dr~kQC zA+|!Q?(ghhOekTs`5~hn8$T$t8edh~Uh-jNSo=X>`UPGAiDoyB?QR^54}F-O;@aA^ zLK%UW35c12m<5PgftU@5*@2h?h&h3n3y8Uamz yS5OXYkiIU^;HlHSbpYFe$%eVPFtO(bPG8vaX;S+dVB%DhB{0Ne>|a delta 754 zcmdmY+hzZ47v2DGW)=|!1_lm>m6K{G@~*81QsLi^s?>i43Z74*9N&NHMRm2W)}n={yOaI7nplr+pKxbe(&d&XOYUwrUmIWVkw%3a<)XkOc*u&8At0C;Dl3*=Yxn6|pOQ~-U z>Zkq-sAh?qwmo6ul)xnkLeJ_9)6|z3zc}g3a4z{ZDnEktHpw4Z1 ztZZRhqDEWRvrBF~_vXpPKYRLlTmO=(-kEE9#jF-6i9h+GR4=nzYF*dY%x~q4XSI9< zO1|tkckX2N*A1E5Wi*9D!-Nu8n;$URu`vUKt67m_yCMf8t1t7aNww{op^QMx1jNih%mT!$K+Fcj z>_E%`#GF9P1;pGy%mc){K+LyYGn9Y&4(8QB6AtqVY(LB^$SVTkZNIE2sLBjtP5-ST z_zW!cL{)G%6If`GhTuoA&@@fK5Jr$=roYt^wC03I;&el8K{K#;mbRcY(-p?a?-@m> z&(Rj-0Sm4M35s$|HsFxr5AbGW5@Cj<1)xW!e+0@g%Z4&eHslbQuB;;{s0egKxW*~A zxwnKF8Q66=86<#70|puzxAO{sm1Y8!f*n_@BPh*yeELisK^Ys6*6r@EyAA-=i!m`U zaKrQg=?2F4DuN90MX5Q(`g$M|*(D&4e*_tHV>+*{pd8p>JzXGw+H@~nK}~s(!MA^( c>sJ9L)t4*`48kY|cTJz9E2zeHUkj9q0msM^LI3~& diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/180818-22/federal-awards-workbook-180818.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/180818-22/federal-awards-workbook-180818.xlsx index 1f05e5396cf54f20d1aa50903372ef6c8eee11df..eb6ddb3f68c3b25daedacad22bb1671ae8c65093 100644 GIT binary patch delta 873 zcmdmgE$+azINktnW)=|!1_llWv$(d2ylXjuRQLhu1#`bmJgip#W1fbeQ-a#rrOC^A z*|>Awq~&bo?M_b4R;m4M89BQwr@c}C+W(i6d$OOlY?7P5xAeuz!y9J$*t0CWGflR0 z4g<%l4!1TFA!nZN*Q7l^zT$td$^3osEK!W# zG71(j*gP#r<=o3zZ>Dq7tKM&6^vXh+%Nrwh>=10s>0q~YfS41Axqz4(h-A*fLIWS zg@9NXh()$<3Kf0x5@hoBh4x~+Y+yS!x{7Vz=qmms1S~#dmiR{|Fso;-co;K?HT~%V z@n>M6a|^{cLxftEivI-*wJZ~lW&}BW`lIFI)`&QruC_wl3@jeMLR^~ZD&yo2jH3Jj z-i%Bl%LjAODPhmB6hU)!42q7Y7Oh08bJ%H2?qr delta 868 zcmX?bEpGp{INktnW)=|!1_lm>1(Rwg@~-6sQsF+)5k_Ap9#*S=GEZaM@dUN6OH-Hg zvT^6UNz2*F+bf)$ty25jGIDlVPJ5&Nwf`?C_h>&}`jUBH%e~|UY>%@_a>bW^eEr7Z ziK>GMdx@1;=Zl~nubVGVS(nHkH+%6tdP^Ht)Ynzboqm3{-lTUtX{$E5&1#tS zYr!({pOzW?(WX&1Y)i>w*w0HNP{JQwR zR>|Jnva|L*I?wq{#U~j@ZjyfNx7HrH5C)GA9b8J`U zVC?l}UOcI`eRC)y5HkTWGZ3=?F)I+W0Wmuea{w_X5OV=BHxTmxF)tAF0Wm)i3jnbo z5DNjZFc6Du-yACX<|RMSCYC3KLZP$UnssABGkH6 z{4ZFjb(wfHBQr3*r$1gUZjFfe>FO)Q&A{RbE5xOlt}ssi$SBGm;LXS+!VF0&zz~_f z5F{tcF@2-GnCSG&E5rpAK^`d4j(fH9r3k~`pj!+Qz+?pj4UJ1f8K+NNDK0Wya;3N+ z*loHifhJ6`pFUxwxG2b{g7P2}ko<^d!jkFzE5+5pT6Y4qGI!4shnV;pX5xRKFk}66 z=~dz~U}aXT#HFonED&diFG|fR*4G1($o>G?r>LK>r<0k1;S&!713%2UK)QkP;Pn1g x;&NbvHUJG`ET4X6mAEF@k!ka;ztU!8VE7G;D`6B(<t9+UM>z41^{QYP6Pk| diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/180818-22/notes-to-sefa-workbook-180818.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/180818-22/notes-to-sefa-workbook-180818.xlsx index 08e4c34942f9002f4888cb3f6a20379ad1eee20d..cc411fd8df170e96e557c46de62a4e29e414eb96 100644 GIT binary patch delta 651 zcmX>!QQ+`I0p0*_W)=|!1_llWm$oe+u^xnK{QDbdOz4 zu##^5H2aCi6IO>1cN3#D<;3F!@0%u?*d1ov6C(eqVS;g3ljEeQNiCi;ejS}LXKIwL z*MG@fkqbl<+SznnA4s-qdrSEI*_`MnR@CvN8BC9{YdC&Cfs)gf)laJMQs;obHc-u|Y(sQ4bp6cjU9DdId z&wcEk`t|;Qt1npGeRsQF;`5ZLy!uCHahBC5@8NjGRrfdgFDpu*gwKqbqb$J2u=ypU z85=V&c$$?swkvTkZVq5}jcaSa5y1$=OxtfnFh5-a64`#>FS8Oeh&A;o%QLXZIYt%@ zCJ<}-PiB^vV47$Lw$NR8;hAdhzqrrVU9WjgD@~MV4#8V>hue2 zEJ|QS14rwJ>4NMm>R?N)*;%9+&rA=8@f$(>Ez{?+v&ewu z_W=1;HyKzM;)_yqiuLtCWPmp#lL#{;WdI!>zC?W4qJO~Tz{bkJz>jPZ+QQ*)-0p0*_W)=|!1_lm>HIr&5@~-6sQsGH2_ON}Ocv!Q(#z1A!I&lH@2@_VZ zJQ7pA7q-@O@sSmG_b=CtHfLe)`}q6E#yhbElt|p6^lJ92pw5*vG{;cFnTx zLxmaMJG7=Pzj>c^%j&|WMac(z%lz&=`Rn|5Mrae~%;IU@ai67+Uhmr2fAZ_%|4Jou zbGMzf@39dt-#WpoK=oSqvCRv#9$w!c8jlhT;WK0AC=0MLFl>I!Xv)S643TDKj_t}E zjGF_P*G{T!zZt;@#7x_7Mle5J0utGN@Gr9xGl(_y8Ot-U$azK<4JHt4`Y&ddmtdi1 zEG+(vAk(H>u(4Pp0(yE48;hAdDBNN8&QWJz5C%pH3^XuanSPOtMG35^pm}3ULjWT~ z#18=m3785X-OwmHU5K4U9c-x$JBu{q>FE(bzC6$o;YY9i@c6~Vz>vnzz`%>FiE;Dv zdF(7QU`>00n(A*burS0IrREgt>w(AsZ$>5&W=IMF8MH)v*`j}p3=C|n3=BZC;Glu= zAR`MyMUH-Xeo=N(etx!IMQ#qd=GAdUdRu^6dVwyHf@=jjo$(DbSaW>Q^l%OqMX+nC UIe=ccF?~7*iyE5-8w*f50ENNxXaE2J diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/180818-22/secondary-auditors-workbook-180818.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/180818-22/secondary-auditors-workbook-180818.xlsx index afdcae73068d7b90d8ee01487b72b0baf8164cb5..bad6b0ee9f2144393ff51cf754a2117029ae81bf 100644 GIT binary patch delta 726 zcmey@AoRaMh&RBSnMH(wfq{d;Bd%>C@7j7G6&}1P^!hiT;Aut%20w8CUb6vDTl~^fvU40xD=vx&`?|$*a@&^7?HYyACQN~kIDdWPUbA%W zw!?iN|Nq>)cFHZ!yVW({_B#9Xwq4D*t>^J}+Z&fBDh?Oaw`eVy_#&y|#okbn`Tscl z>Rzp$c^dVJh}L1PbV+qUSid|Myq$)t;LJ(gP_l9R~(J%>V5M1`^m41wf?JZ*_+9C*1jk5Pd0+uT*VAiV=7FlKxYdTj2%QLXh=L(h_Ca_RaHOoh^ zP*@F12qQDlw&`nYS*$tX(KP*IEsGggT&s>nn&~Rz=b5TkO0O$3^X*d z7qEcTicHt6XAuND&$gaLn(^NB$a)qT8<5`Z?ytKJ05ym)F)(n$Gy>@c#((834Dm&& zImP;VAQHnZAcLMyKU>ct2R8T}h#xkctARyR9%S(C-{<;OfXVSC&|yG#!9fF~*YuzU M7B#l{wLlpL01?Xy{Qv*} delta 740 zcmez0AoQ<6h&RBSnMH(wfq{cz{iNE7yld-$RJf}c7sFSe;Aut%20u}E|F8j1+y7Hns$bXes4UVG7f_!tVFk-0G1YrvYcm%gS#fv&a@}Zi z7WTf6zkh68JH<@nwrfpi>;VtcWmg}}Hq(?gyE#uoqao${3tug_MY4x;#rH1!C|NF0 z-(x#NwkhF_n9ZcZHXfftHaf@NEGa1qY&(DB3{yIz^p_h!OmPJ}Hndz^ZZG{TMX^0Z zKji**SN-(7I~|9Af4=;BPfgtRPM>VU%S$((ax=N&Y8xxk=Nx<}bfeALe|ig|dxb@0 zne#Rke^Prdzv{RTw_3`%NWIFV*Sqd(Z2LDO#$NN6zSI4kRnh+spC~R2=UmAWJ}F-E zJX7qU_4SwIQ37l8BSt$meo$~_Tj<|D;ls$V;)B3+g#s3dW_OP5?i`HQeV8{)s%=*c zWdvd-AZFgK7|QZN3M9H+q=4lL3z+q~ghiGa#G1}s!SW0&^reC&hY2i{T+Q+kEEHbD z62iy~v~BvjS{7?gcq~o-RLf!p7T2z0k!HHWIQb)^==9h+79Ox*4oFaxW4b{oP;_1$ zi=ZOVqVRRB-5d8xF*D@8(r1tW#xD#sGzy0@PB#o?l$`z&XaZR6@4z(|7qT!g>=b4| zQ_EJs0#Yk7U8|l&5bRpJdKPKMyVIlUS!BTcN+93rZ#fG?d{JsnvA!ON4De=T5@CiU zIgne9FFV=h#Kgeh%Fe*R4|52RZeV;i{aihZ9N3`uAcI4vb2qSP%7ZlB{(Y`r1(-Nr Z0v!f)7aTM&dQJ~+U{Pb6R|}M3004&y48Z^Z diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/217653-22/additional-eins-workbook-217653.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/217653-22/additional-eins-workbook-217653.xlsx index 1b0d75a8c3bc15e84f964931c8cfaec416d77667..d71fd578b47f84ce70de519fe730ab17c1c78fd8 100644 GIT binary patch delta 470 zcmezMnD^IXUfuw2W)=|!1_lm>q`0<;ylXjtR5-K8DY1!%wCXSAxOVM|bC~1cv`DBU zdP+s~)@7YSi_7x;qqpU8@X0*>|2aKcbL;e5tTmkHk{7T&&f1VGzVzGeCk{_k9j>r% z(MnUk*!%Dt>*|b}HrtA&{aO7JjKdmSgWM)IdCvHCbjF;iZu(yTC2vJ0m?U(IS$7>U z<_n)@aN|e%;W;`_1ZKC^zxr+W`skTn>B5Tox~jR;{b%b4);YgK{@YI`?dkm8{>yM>(X{RJCkZB@~-6oQsH$M6n0HKq*Xt8gW{q!;>+ebI4u(D zh@Mgr?Rwd*GjR9z<+{=4EbX$7|9?)84m{?4nXQI%?QsXuW3KOCsd+uVezNg|cS8o> zi;^>JizE-lO3zvLQ1aV@tv|0+v&?(0UEpym>yiOiu)Xx;$(LqDzTEF*%ejVe&Ba7# z!H8oI4hgY||IeK#1{R>_}8*dw<@4{pC-YS6wdk9$XS|%|kdKSi0+@ zQvI83)^OL-S$~5ac1x&vSXITSKY!wDlXogN=);`uDNk1>sP3NZbK>dW<1c<)try&N zEo}EIHf8Uh&L>-+#OQ^GAKSbj;KTF#UHK@%5I!?zjh_=G%;5A;VnemtY~;JZ680 zP<}qMH9VlFZ^~yj2djLO&uj-~=oTa({YJ+RHEb9Zwc5Uw$xj!~1`iT|s zoOR+oJc0Mnig%ZW|Uqzv1ppuo@eDsa>wk{uJ>#GTYcf%vYV#=4W4OgXUBC_>)klq z`%!6v=$~KaKUq;iYx5&UJ2vK&xVC0xj_t}EjNg4goOanzMj&R|E*r|+DGCzVzA}qh zmKnsFelnN&8CYmj9`g-GuuyLS^GmQ$Wg)XaBQr4gr=Kojw&sAv!*sr4W^=HxM=`S< pSW9Oyvon}+9>x$YVYUE^1e7qFLDZBmTY(wJN|;^248Brk4*=xa%^Uy# delta 501 zcmcciiuc+pUfuw2W)=|!1_lm>yOU}s@~*81QsHwy3LO3h6gc11FMVN*cY1Fa&|GM`Yg_lFAHLhvKM5(!vbB=!b}7-k^X*`!*Yyp| z_3O_%z87FgE`Km#%he@`LeJ_9)6|!mc3-M*D-(-gzOm?VpVEh>!>y`3djD=3sB>E$ zOIz5MsL@vR?2?pzQaqA{YYFTZ)ubSre0 z!pj$hljly}{^{Ssx-(M}7M-y??H%`7dM018{N&f}TK`>NRNc;*T`y6ocWtZ2Qh`rp zwW&7TA*}oV`PMR`gx2OqjCO3y!0>8T=Gd;x!T8;W`N5>xcG*xyAZFSw8_L`%3KH4A zGK*Q38N`}?GMD)oSZGro^9@F@P;UYAOR!L7A+tXt$hhgJiE%2*bfH%OKnMH(wfq{deAg*m9@7j7G6&{_jq2dcr@H8U>gCLMj$xjX_$}cF^ zPtGq&)vL(O>797c>xh9sTYT!vy>pjblyfo{KRUT7!SSQmq?>n>F1MvDxx3wb?e4px zwj4WC{^jqRIpJ1CW?RGg#19fHj_#NlbKO1jkD0YVC8xky#<_j2O20(jwLLIdnth|W z{`xcL_kv!>vdKqA`}elyE6 zgILp@{xLrT3u*snW@Z8l88WiG1PjSBvG_AG0|S40J~N9o2P_t*Z(?RK2MfPpX0Zcn o(Pd$A1~ZCbjGZhj7GRMtEG%XaHLNUFU`7@ziz}G1iIv3z0H-g@?f?J) delta 499 zcmey>E%2*bfH%OKnMH(wfq{eJ{iNE7yld-$RCvKr&A2Z>!PATk41z#9B|kZ!D8Has zKRLfBRj(p9r+4DPtiuL8ZShN=%sbI{JVEX2($wX=Y}`3-(sH))_6jFwtJMCsjGSGT z)843e>Hq7=En1J4zGU9laxZxS+vBW~T=AtJU%zp9qUvD6UScJ7@}g?tb@t_&OB4C) z<7O|ur#z+SZnN{Hu*q$lGk&+um^<;Bj>mt=TapR93Eh0wT?e@N!u1miKAvxjjGU94 zBX-?@=cqzj{H>0|zdv7oy{G10$;6U$gW##AmsqY`VKmfebR z46B;r>x1J_!fNvaMmsiUU}!Zfa%@-RVEp96{9#gUyI3eA5HoET3uR_s1`^q>@S9nl z8N{0I^pE)&SV;RnGcyxd$dHlcC0Iz7iN&80WZd+8W)^D>SS(E6#LQw27JkFbVh3jE ovamRV^%lbzJ6TvPz#?B*Sj@l-T~-z=Fe8hV#TCrh#LD6U0OqyPg#Z8m diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/217653-22/corrective-action-plan-workbook-217653.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/217653-22/corrective-action-plan-workbook-217653.xlsx index 76658ca3293342bef6e01e8404d12625769a0b51..013541ed85403a284a7f99c7a749af2706d7aff6 100644 GIT binary patch delta 653 zcmZqg5$Na<;0^F*W)WdvVBla#jcc38yS5%kg>#kMkNyS}Jk7|!APA&W@{Eh9X>(#3@;{M9j z&$;Bj)%}E|_CY1h#U2M$eC{Usoby^NzV!E(w>kz=8(h0(SszHYYkR-Q{joXGPppXN ztP}6y3A{(o9PPE(V{gCr-09`M;vS_^!jY1?TijjuPQToJCo@NThi2IGoA+5wR$Dk; zIQh_Or%L|Ov}k)Zqx8y&MbpIgJS$g{`}VCwHOq`=teQJ0*!K+F3ZXy1+f;y*PR~5 z$|9!-GJ17fk=_=ddA&g6rC{a)=>|q4MwaP*j4aaAH?TsTydR`jZu)Ij7B#j!W)`3- E0Flq(W&i*H delta 668 zcmeC^5oqrb;0^F*W)WdvVBlbQG^utX@7j7G6@JQO?!m7>!PATk41z#9B|kZ!D8Has zKRLfBRj(p9r+4B(zrzMRZSkqE_Lg4qQCPH2TtI!ogcU50#8mHzt<79~WX0Xvlc%kf zZepIK|LyYyzjEtaPB?sAoe(`Bv*Xtx7j)VPgEVQRF`OpoxG@8c$#?nOg z`nZ?Pw-l%J+--0Ua+}!1IpcThjJXrs^gaGd-jYl(N$BRY?mEEDC$67Z@bP?GWaOOW zE#27?ZAk)~^>dFr{QL7|X`J2mSe2`pXI`DkSjnvQ%GoydNq=(m!O)FXXaDIfh+N7! zsZY-QW@M%CcmA`>oi+^4qgjC zV!UA8gX{NS>7xYJ=0}WnY|OymYF6ghuFS#s*9XLDR}N(aVy5lNq0F4iKqA}qe>2N7 zgILo8|1m!U3t9bVW?}*h*)y`d1Pf_1vG_BBg=(2utT}} z#>pQUMW^equ<(Edtyx&4f&6e57IiSc5yGDflDM{sylXjuRCtqJd-B(bhc)Z{cFFj>@^<%^m5c&k zIoEc*^e9QLC^)Dkwf-ST+)MsamlK=Uuyil!QDU8J@UeSh>=ZYBum6&_A`?s!y2Y%! z4g~X=PcyjjBmM9kohJgb>sw#_wtL$>Q`-E|hi_ZzXHNH@r8nu7iBY7aZi$1d-iep4 z|7^C1-Vt4W`ptW$SywF_FWh|G7CZTR((j1(GtO+@<54k9WY4o~L)m|JJL|sOZ@D(} z*@ZucXQ)R;eh#uJ*tpL;j&)O0{C~Cm3@E`6J~L*HvH%+c!{(Qarfkfmac#|t9NQH+ z7}GSZIlc;772~3{AlhMrL62On zXpM-c>BicEW?=CgZ9!?KD~yxhGm1{1r!B|>a{2VNAVE=%$p#!!+5z5-Od`yX=muIB ze&0GIK4k|V!;x2u86<#-00tTw<)`!O2&#h}V5uW0&AgLWV6q{H$n{`E>>5 wzy=xU0u7!q-A7kYQy%2T+rQ8Ct1vP!ykucu5Ju6|GkuD#pc>mlEkU3#0FMd$FaQ7m delta 692 zcmX?j+vUJ*7v2DGW)=|!1_lm>&y#8=@~-6sQsMksiBVrB9@ebyJH9~eZ0geGylmV# zZ_;wM@^&XDXRFlywv3!zmeby-cj^D@$t_xMmzFWkZ~iQ?;^+>k-0SWu|F+3??qTWp z)e&~o#`2fSyVwm~K~j7Vrv3|HZ`q{r?x2$9VvmC?XZ|>)q%XP1eW|`Jk4=L2#=@iX zls?QkeAQbb=g;QEFtO(nx9g9Z@;KXd+?;Oq=)<=!KaBL}Z>?3inw?Uby5$O^=*|Ft z@t>D(gcby^Ik$N~cScxd(~`-@d3S59f0TIpO+>8N(i0UEIq!Y2wq&iJA7TG%y~Ast z-TD6=pG5A=^HL1%_{#f{X@&NWuiJldql7~E%$PaK0&ENnb2h(bG+|=~hDfs#$95$S zMs{E3FOzE9bwU||mY{(&{9pKH#B*F}dZlFiP z?^}n&r|jTkIPz*Sg9I=Uz(7Nz+;jmQL3OYLtaJpW8Fx$%gYg?c{1emX=m^Sy<#z-5 zRv%OZ8RCmlbBgu#KqRt7K!1gwN`5$TA`=5cAv*&DKg<>&-N1Nrx`3{r9M~X3U7*3! ar~B#(YJ&NdAb$7ssk(w{Y!9>qfyx0y&<9EY diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/217653-22/federal-awards-workbook-217653.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/217653-22/federal-awards-workbook-217653.xlsx index 66b308d94251ba1e3f61d8d684eeb706a54db6b7..5ac773a1c6d4cdf4dc8b358f4d3390e33c318626 100644 GIT binary patch delta 945 zcmdmdDQ@efINktnW)=|!1_lm>oVd1$ylVx4RJiqn!Xw{+f~Of77zC#qEE5-*c%Qev zY?qABD{psySst+)H_qP3k`_}xcFp70x8hA^Z*TB1$FKQ+SzR*x7U$jSns0lZ{dwE2 zX57}}cx!gD^8{zZ0{@KA6G|74ez;J*S|jeS;G9eHuR0EV-ow(pq(_N$vO#IL;ZBX- zxGC>0Z{K_%F%~yZ`tWT_{mkk9v-Kvt zGEv)P6qV86Ri(dNeCOvIVFlr9^fvG3&Il`PTCn-BZ;@|Y_uJ&X-YVxSCvLLQjW~Nv zNiJ@W|2p}<<_q8Y7H9lRe3BV=ZH)-O&xfybKWa@7{qxKGCo4*zgwKqbqb$J2u=xd} z85=V&c$yVCwkvWlPVnW=1BR7@uvh)+=OPUL&bOv7v=x(RzY)p^#7scU48$xz%nHP8 zK+F!r96-zo#9Tnk4a7V^%nQVPK+F%s0zfPX#6mzU48$VaZ-k2Odd{35*S3ACtr#yG zh}FK-S#0}KXYnT?VDX+A;vbp7tcKa*Vay=b^qcdScyG>I@9Rz=Qw;4UAW(=Pwgi0xNp@^OSU~FC&Bd zCjkZtm&?q*2!!mJoW?=A4zqL$Unt5d?<77h)k?B0k#Rb8xQ&$RTd`q=IIA|g zVL`i|#pp3JFf0*bVBke|DP!q$z7^s!U^nTm5SOk$H&2`)z9==PSYHoBq8p^BpRlJB zXwWAf1_piSCeWd@~-6sQsG;+$clfNcv!Q3+wlaouS-*x^RjX0 zyh+R1%G)cPoUKy(+cI)?Sx$SS-lhMqC%0%lUiy-GU(3DZ1#FMAN^-@QetiAL;fbn) z344i^*vX5kh1c1aYc5UXkDI;tp7MuHd)V8TtX1O`Hu%nMXsx_9a>763Eyf3=4v5K? zi8PAxtcyGJ;L&{E&6_Na->BDFmnhNYcxK-g!RGJJmtU{^x#y+Q%X10Kw6d3Qu2{*R zwd=(EW80c$Cza0o8|$#!OIT&D&AQFqpVZ85|1u2T^F%25|FkcB z-FDOTzrvOtSF%nJ~L*HvH%+c!<@};7){uifg#eY%&}dW zgK>f{^V>)n_W4W`~_T|pvPeQ=reKW*AGJ#o5v&F-hL9FSw z=ZQZ93mus+z8NA^wOITwSg2}=cr+u(;nQy{6}Lvj>2#T8;%4%oD2Dq{oq<6Z82>QP zz<7Cj;WBY0MUbMWKTk==`Z6-Oe-dDjfT;k|4UHlk4AUK!i;GM@u}oYL?3_Ex#HE>+ zhcZHh`Id_d$^%^)zR@f7{4G`nhE|{@s77>5w_7f*t^iV$QOg&;F@%xfXFfZcq8ZN9 z?U##-PM-xd4{Xi4uE}XxFnCJ!S@mB|;1gyvQzPES@f~LRL={!1Zw)k!@$6gqG{`VafXT<{qp>x?4WK9$R diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/217653-22/notes-to-sefa-workbook-217653.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/217653-22/notes-to-sefa-workbook-217653.xlsx index 2c0b4b09e58825e003e7868a7264e892477e73d4..ec328e3e885b972dfa4a2d2ce5029ea730f44d2f 100644 GIT binary patch delta 501 zcmcaNRp8!K0p0*_W)=|!1_lm>inz9kyld-$RCv<087AL=f~Of77zBZIN`7)cQGP+O zesX?Ms$NBIPVdBneuoWs+Tv4R?Jd2OC)%|u&S8#&(;}gc=qVM^Th)4n7MJDwM{UdF z;FEd$|8siu$|=IDzW;bs-99ghXZ6JrGmfX*PBxz4Y*@gTk#a&QF?q-9y%R;|*YW$+ zuUdRZ`AN^zgb7QgsU!+5%QI9pZmosB>tBmvR zfWrnU&W6Q>%6#w7`>+4}DKnedbG5-r?X@kRG**89S~AhPH=qsDwyBn%R)j6yz z@2E|GvUW#(hTLSU^va1v)5P{X%syz|w6Xu>*Tq`@wM_QrmA|#`sXTS6ZFE@^op59g9$7o%Es~%EX2sp;?D>&Zh9~Wi!}!<9;Q#@U@->^U*KS| r12Y6US)9Ro!(oiMoGcb#ksF*WW?+UO7mF2`;m5_|3T8~>V(|a~3>3>O delta 501 zcmcaNRp8!K0p0*_W)=|!1_lm>ZFp?qWpql z{p9?jRK1GaoZg9n-iHkYj_*J9qPp61GiTd6{Vsp5Cf1|dC*0YVaJlWtlDpf@*G8LP zWLMPR`Tu=gx#HAsLEjnYJHI!aaCC=M>}}VTf6c4~DmewdGS2OCRoW%;?$-m6rP(){ z>%)IJzZLX4mUlo=bFs%kl{0^n&iHvP7GL_G=dF%`)CSjfS=R@w?cClHF@H8E`iT|w zlr3ya)L=6#&Q#{h_wU#L{AuS~rImikVc~AxvH^keuU@+Tv)CpYBf9$XoA*q!LT4$w zd{H=i?&R>o^7}R^r6;{67Ej~8_hIW3>615pKl!C~v0iZ3y&H4?woiQiG&h)Q=DDik zR2%MBTy=k=|FWWl*5*fyc5KYR@M>1(*sjdMxG8}7`=r|T8xf2^%(VSR1T*7ukjVBs z|CyDTL9FSF%q-8qLa&%vG?>6bqHHWL!9tAeEdGojQ&_C^iDkJci4cZEk53^QLN*mZVxVFo>3ao6<_LhkGvpLZ-tf=R# z7w_Q-d_NEQN^@3JSJgyL^Pi+4tVV?YV5&5s!E*qDLg)vV01U73UNuMcxgTwA+xC?gOv0WtG-Gu|IZR-o)M}QGV4=twmJmje;nO$NvRHG#VrBa0 zS{8G#ux=fT9hi|<$Knjuy9LJh1!FkWvsi$os_R+IAZqGatiX(q^(?MnhGheb2LL|i B*k=F$ delta 524 zcmZo{6l!b~;tlX-W)WdvVBlc*HK}$Y@7j7G6@K)1%JFYN!PATk41z#9B|kZ!D8Has zKRLfBRj(p9r*~qY_hAEpqWS}l=}9d ze(Jw~YL>WZ<_Qz01TIMsdRAwcroQxI_oe!_JTVF08w-!lQ`*pUuvL}E?B7iTb#BXJ zWeeL9HP{S`GnLu${rmMlf2zFg9<=w^nsq(XtQIJVzk2EV&!S8;Ms)S%H}9Edh0aoV z`J!<4+{xjE<@aq;O!ar3h@8oNZNt_S>Bx2W)vs&y|LeYBb@|<`dWpz$`OB; zHr%hc>i$OmWkm_C&5s!E*qDLg)vV01U73UNuMhL@Nww|Dp^QMx1jNkSl|xxRNP$GR zixse3VF9z=maxb&gILpfD_EX^g}zm=DSQma`$f`uY$SV9;}!n$=Vc3?(c9g8zq?-m&27mVRh&td_Vs;*}-gQ%%zu>vzb*0Z>R8I}z!9sr)w B=NJG0 diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/251020-22/additional-eins-workbook-251020.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/251020-22/additional-eins-workbook-251020.xlsx index 0d95ae499c9cc5895477a7fec556f851c228554e..8b3701c670746dc299f8dc8a0a692f8464a0edf9 100644 GIT binary patch delta 470 zcmaEGkN3enUfuw2W)=|!1_lm>EpcrVdDn6PsqkI-!Fwkj(yG6ditQ2l^&$7aN9w2L-RU^|`}5`3dupP#Pb|qc2wZAnZELYXuP-HlV|Uei~hdY&lQ{g zI`%Jr^1Mxp9<|=#*qLU2tlMGghu8P7h@%8U_{^9&$^vW*n_n@Su`vS!q*<9`yD|sk z7atI(T`H6jh?%xag)&d(2Z?Ookism>3}Q{ckk0%JEVMI&`8Fe1XnGd&OR!LLHnTrO z=xPqLH9VlFOXM<}gH;CQGTVU}lXID!!HkpOb4R@~-6oQsEPC>QqfUq*XuJKp^lscZaZtM+j44 zw@`j}#w88Itnc5pq`q!@aKP}Nef_qnUhFG(cbwnIEpaj_ig(>t&sQ5xesghTQrYXi zJjhU}j%RypN!PLTJB|UroZkvmM8-8c2f0mb@|@9rbcXCyH~mu|ByUG1m?U)b*{UY6 zHXD0O#Qxfx=qFa>QMR!D*FOC!!{WzoJoo0w#Xoym{90IZz2R!@#Y_8Etmyt)5@^Ab z%PM~KmBsJf4QZx+6G}LCob)@nddKd3xlOaC$2?hchBf=f)D)+6`S;YX_y5~`q4ZeZ zTl*fJ=i7P3gA8`+>G!BR#vWW>zj{9dN-%`ajG3b>z{aro6{8s&GcZ7!l{vO6b1;7K z0dd-;LK%UWX}eS?^JIRI$o35>%(BcN*7OVM%+J6=J2RMXGlGStXEDD73pHmm`$L4T z<}h2s1A4kdF0(mUWl%1&9hfmWm)RN2xCvv(=P_G=MWXVU%^+&>n61E!3wg}0V1`6K Gvj+gHJj{Cl diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/251020-22/additional-ueis-workbook-251020.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/251020-22/additional-ueis-workbook-251020.xlsx index d8cbd3955f5beb1bf0a770c2e4eea2fcba6fd812..035cb812732ff963d53974eb7796b3c48393072f 100644 GIT binary patch delta 501 zcmca}kN3_!Ufuw2W)=|!1_lm>t#NG=dDqqhsqo-A%m04^3Z7YT1@@eHIHB4iZ_|P zy}`#EzvlmCb;;?sJnvT5eB0~n&)ar2*rjOf7LO;IIO`n$ZcYi=ZsxPXT(f#)A#!R@^)l`NkX@nHP-=SKJ#f8ZhuHWJV)n= z#H?1SHbvbho;@?p?o3eCOvI(+a|M^f&M4&Il`P zTC(}DZ;@}@(Qg~?sfL~_npiZAEAI2wp7xF3Pkvpj^c`!g~FgMa$P9A;|{SUgM@$z?VN3;X3V r+kv(8=Q2Bk8CPKp={#l&ut-=Qvl&E99Uz2Mm@~*81QsEU^H!QvZ1y3_FFbD$al>Fp?qWpql z{p9?jRK1GaoZgAP-bV}s+Ww!qRBin;;NXR~M^pW|nplrUd!&_j`$|umSoVK!=H0jJ zUO1jm{cQL7Ov{vG+LzgD`1T%m;5~Nr#VtLp$JcK*p3rW{;CoS`BXn_T!FBfKnz4uF z>i3w*SM@7M>}GCXvQ~|EvcdOmLu<{okyHK|Z`pi6>VTMRnMk9!jGj(n!N>D$mXRk6 z&A3f@oDEOR^p-BHsIID+IsN=>y-DwQ(n2$PjTlY8E}Rwjq(3?OQ0PXRv;XuK#4hFR z>XW;E)3S0Z|Lm&Sp-r4Ki>G^xt}m)?znN7pQK|PXTXE`>S7C9I z=bKiw#QzVgWkd3Cm+P12g*bn61E!GkMIeV1`IOvj+f%=Gi|0 diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/251020-22/audit-findings-text-workbook-251020.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/251020-22/audit-findings-text-workbook-251020.xlsx index 9833c60cd0a969e3aadcacb4426c30f49bb32fd6..80f1e829469dcd44afff6681b2726565ff60b38e 100644 GIT binary patch delta 714 zcmX@SSm4NF0p0*_W)=|!1_lm>BXMmLdDn6Rsqh>@Bc5*)4{Ow)@Y`nK^UC|QpEQry zjT>igWJyb@AG_x9>s#?Av$r?+nB&*{zpO49ev9*Nb*HcV)3QFzr58kklNtd zEz9~qvR&KzMedKyiGE^5JoRUtcn?qDJ$mM7uf-mF`@QE*FZUJqD3ub9l+@kg?z(sS zxz;yXgMu}!Oj_qz7jHi5zH zz<71~MSd0~1vJMoNWfG8>4ruzj>(1`Qqy$>Sa`rT+X%2oGqO*Q5MWW42RbhN*&Yem z?@SB~XZRQxc#(B7-km;AfJFwZX)jPy{eMmthWMh?oML@F5EB zZZa}3*s?G%@S_;?f{TTrB1gYGzbHE?KR;WqA~y$J^Xj-Fy)8gZy+D^pp=f@`1J)d0 dG(B99MG@?pYC)hEZcU#q$fCxk#SgTO0RaBW^o9Tc delta 686 zcmX@ISm5wt0p0*_W)=|!1_llW&MCDMdDn6RsqnmOzq-FpJgiZ#vF-Q*wXdm5m-Di5 z=e$YF*~;53oSdyv``a>dc3Dn)qyDx3FDLhCKVJHh*{=Ow@&dNUStYsROFzzj3@l@}h{btghuJ+u$r$zL4o`{^u9k$terl00L_3QorR$utK z?WXyEgH3bRXX`C_FllXmk9gy%2iNzn)JF-2@R>1llm*xrHosytW8(*f$nCJD&v*MU zGA#NaFkK;(QKH$MW4k*C<0&6zt|_(cBB6{x%(Pu3l=<>TkjVB|Y%KE3Al7sqE|zCt zp|4yl%uHaR-#jcY!9ve@S^OE9f#y%Q4rwp>B0gm>R_8~1z4mR*``MV`SL)=g+JRPA^V+) zf#D1v0|PIzCdNC{=L@jNfHmy{Y5F()mH>-X3`kR{y;;jmMg|6376t}>6iv^$SQsjD z^vm;$vXk=jv-K)+a{{~>nM9Z&DGa1}bzG6&7NDkHpsi9Un%_?Qv}rdDqqhsqnZDic#Nyf~Of77zBZIN`7)cQGP+O zesX?Ms$NBIPVdBnS%(dH+Txc!nRmi(7mv@Y(_a44JYqL)oV}4HEvA0#n#Zqi#hc9D z-r!@7U-SR6x@7n*&b!q$-}XBD^R`{hxUI+W*6ifO6PyhT`ZGdLC|x{SaKC!BM%-V< z`Z<^6Uo}iH4r_7^a+}!XIb+w+8FQw(>3e;Dc{?(}B%xc(n(KfuU-+~Ow?CvGo}=?b zVpgkEo1%2r%x>wzit4JGnbZAe>rHrNqPEE>Dx<%vN`JZd&dWE#3WC?1+q|DUBdoA# z!Q{ifMV@ip?YVosQ_g)-da9#aarU~BT-krSopoAY;#;?wpWXMf-OxWW@^jD~(WKh! zJsc^l`~UgYF`|Ul=0}WnY|K02+M1O)wkvZm{_p{D+T}tSftYE#TqyIFRUnb=SN}81 zGlN*uzcaBs0}DN1W?^Ci3q56Jc?lM}$j0K&$P5hr>B<}|)*P^Sn4ZSLVh$Ewz`{=Y%qHIaw^gA}csq%phtwS**YeIW87gFe8nN#RCA4vCd)u delta 501 zcmZ2CRbbsz0p0*_W)=|!1_lm>Ka*=G@~*81QsJd;adW=`1y3_FFbD$al>Fp?qWpql z{p9?jRK1GaoZgAP-bV}s+Ww!KRBi41n7u7eze}CnzZ`^W~tH*Yypo z_3OVZzoj(AwUAlNQ__`7E%BMSvb9EU+?0RXC7Taq9N^Qn>N-%&r#{W##*h5Nb9A0a z+-{X>QHel&P4@r8>X}hOYx5&UJ2qxucr`0?Y**%B{Ncm=cXDmJTqq+DGi{d(W!|z1B(nYL ze`a}R5NrB(CYEPlp$E(?OiW;*r>rb5!9o|=So|45#!Xk|V6o6LD=5dDqqhsqo0=Z82Yff~Of77zBZIN`7)cQGP+O zesX?Ms$NBIPVdBnUPlZB+Tv4B?wz~rs+^O#_|eHt363AdCf&T7bh#yE$=&VdYj@uj zwdL5E@-Khi%n5GQ3u6yFe_+G*${h& zAm#vKP9Ww2Vs0Sj0b*Vt=G(3w$}h5m`4rFuJ6?h9cD#bTA|T#&KSe=RW)N$7nTp^u zuu#0J;BF?ckgSH_N3f8freFvo$T8D1v;?g=;c+;9jh3L9Jcv6pW{$D|8w0}}bp{4u zV3fi@1LNiCjM{=q3Lr(Mj~%tn`!F)B{UE>~0aF2_8yZD8CL3}{O^?+Uw%AC|=t)@%^V>R9E{dEn4`xJK3MBiS_9A33s+7U2b`@qWS}l=}8y>c4<$mbhu#6DCdx zT#_L4tj;h^ed)#SOZ9DeViLSJ79O3aw4tf~V5=&R*}t0x>fDyc$`-aIYOom=XDYMh z`}gaA{#1F}J!tQ-HS2n&SuIc!fA!M!pGBEyjOgmiZ{9P_3Z13!@wkkybMBkRcpOySzuRaYYbopge{26S!9#}y z6g-195E=dDn6Rsc^3f_mkfy9@eNo;kQf1=asj+zbuc~ zjT>igWJ!yuAG_x9>s#?Av$r?+nB&*{zpO49ev9*Nb!f|Gmy#K+ik>n*}!&8cM{t^-AVjO2w1#sy7)&XFso>mco;K?HU0El zafqfZ^Tanpgt8Wi{{^ecS}Y#T2y*!J6HCOc5pg=5XQ{ZEJSd7G9%Nvcqt3t}42*vm zXkffLJ#neHk^)H4do}eA#Uk~INktnW)=|!1_llW)+x0UdDn6RsqmNjGnRdwcvz!eW83irwXaK4m-Di5 z=e$YF*~;51oSdyv``a>dc3Dn)qyDx3FDLhCKVJHhd0)%D&MspSKKJ!5I!?zjrFA(zqF+UIs0I?tt z3jwh(5Q}VIA1Zq78OY@A9X4XTY+yTPIEiha;UxYf1T0=ZUHl^xm{mMWJd7E{nto=k z_%mh>pr)6RHta)Ag5$tAq7ME)$n#>T;TH zuuNQZdiOG@iSvP?%!Sj%!6q7*>Q&_C1b8zti7-PFEy(nsUC(0lm>C$B2r)45B0G>V zbvom6aT&1b3d_Z%8TU=MSuQRG4ys5X-(mSYafXT<{qp>x?4 t&`@U<1_mh diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/251020-22/notes-to-sefa-workbook-251020.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/251020-22/notes-to-sefa-workbook-251020.xlsx index b6acba98f76e777bb42d75aa38279fef7fc8f555..573cb2b8828df8b2b545f61a8b6ebd87fe508e5f 100644 GIT binary patch delta 697 zcmZ2KSzyCt0p0*_W)=|!1_lm>({XJRdDqqhsqh0vVI1Fpf~Of77zBZIN`7)cQGP+O zesX?Ms$NBIPVdBndCdkqZShMV@%ee2R$LSl_H~Qr;KQ|0$E;PNdtvl#6-LpW0lwltKVO(u5U!)Yc|UhX*kcyY z^#0pVX6@-}kKOCPYR*%or#hm0o@JjjpS1D&$*+sG{;O>{XVU&m{+Law>sjudk*8zs zNUm>M)e`?dtdXX4u0UxzaSG1K@3!t@OYS>!Omg^7GKED zBF%J_aq4`{9h!I9Noc^G7hUPLBtw2Rks2gGHKg;q)#J78x*qC6I4* zjFE*Qz9==PSYHoBBD)4;#}e^ni~a!<4;w2313%0uK)Qi({q%nvEOKCjvewwl;I|krj9MFV~GW zXJPOA`1{AkwNuPAZoAfW#vbr6U3T@sY%@)1vzzlYG#XOAzwp&^TO@lpSA6fnkCNpA z^*y#TWSbJ+h}ldkY~%4cWTSKJ&61L`z_#-@&M>7jN`JW##1vPsV?)cu<@VCgQWV=m z^h54{chyhNyVG&__vg#6_teB~@ASzwyu5VtDL0cVuC}ovea^v$LO0r+{inAex>s04 zmN{=z@h7$S@~e*faI2-9i`1(;dcEtu#xXm!>;0Plx-XQkyLaus!Kauzp-x(wQ)1#J z&ojjyT3>%T9wo3gKVr0FV+ICSvognaWe&z;{>(yCYTMt0GXgQw_BY|o`8k83)|~KYn4ZbbVx|D%&WxF( zEWpOVFh`w%K^Pb-Fwnqwg>mvnMp5kmZ$>5&W_WCdd(V_go9xfXVEj#hK?0^4NH;W! zPXEu&q7HU|CI^c&^SyA!>5LpKBGUyT7+I$$aIo-ziBbFj#O`Ky3@tD}r8 z4Dm&&ImP;VAQIV9pdZ7Rh%a08514M)SQ!}jku7ChH~l{ciyYV>1x}#BH>cZhvS`YK dG%bB79U%|Yl)!@SwWHHpI9b%#MA?Bd3;^r8=&S$$ diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/251020-22/secondary-auditors-workbook-251020.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/251020-22/secondary-auditors-workbook-251020.xlsx index 607300ea94ecef444d6f7916b7c722ca704eb5e8..8513ec3db4611484c4a8b07ca60f6f5cc658f9e5 100644 GIT binary patch delta 493 zcmeyh-7gmR>5eb&1ti@KaC> z%3)XUZ|9X43vyszN08`Hgca`DriPTtnPq^ft#x}IrP3zWoP z{Zy)#+Rc^Mb+z(aIpf*1Sqd+o6du<*nO#?Y-)4i=AQ&y1O)y!kn!B^xs^K$?{~wkvZm z{_tVG5ZBf&7s?34OhC-MT`rX6gA_<~yI=v!6&5h-MG1>6Gl(^vqk`obSmKs-nAS+Dt!5qRXh_9sn@^STRK@pWs%-=-#HFW zi-bC&r&L65UDhkKxGdj4dRra`pUmU`pVOl?d#5ks{vl-5-EPNjuzd4v)e5IOY-t+GdCTzL7BvEKtouOL#Qq%5B^=)Zl5zIFh z9G<4Mq37UKRUW;6Hx1Oe>n)E|E&R3be3fDGV>h0A^XJDse|mXW?~>@|POY^Q)fOz% zTk%q_{v*swgd{Q_!W|H~Fn_>N(k1WMCPX`~=3iBva|NJPq=9id;fGk+A0zw_U2fH;0mF-zq5Zap@c;E%$PaKo1ZgUvM~b#q*<9`yD|sk z4DS z5~^7~f`vkASV9;oY=OBylVx4RQUFO$qC=11J{(SlM%AZ^ElwV#; zSmc$xgmcA8?yOxW=C>_tnVnQQ?{BQbZYgyStI9iS)1Ua>u@C9*d}JxEc{=!@*0yOr zC(hnI{^ZxideL3yuI>IM?>6t5>rt+qVw3Xodc+&G9$w$SQXeHu!e_?JQ5IlhVA%Yc z(UgrD7(C6&9NU#S7{B^3=f<|RONTN7G1GSGQ0B?}Ad&4GQ*Ar?d@~#yGQsIw;1-ich1y3_FFbGaJ$YmCpc%QGH zK{)U_cZaZtM+j44w@`j}#w-oPtnc5pq`q!@aKP}Nef_qnUhFGtk34_D?6zq}SabGn zmC_q2)r%AuC*4~dyi%ewk7v7`mFr{UcM2MR7nX7C(aArcsJYnVpvsxUNoV@J7K^9; z=y|JSAhp4@T~;-Lwb|HPBKFtjL_e`2kFtfo>i6ka85Tcw1cUg2{6yg%_6Jw@ES8-+3Z(Cik@s+9zA~ z#O?QAC;vBn;pby{Z|!?@o^R(B-xTAmr`MzI7<+Ji{p$S;C}9#lGiHvm02>3t=GTm- zY|OymX;$XguFS#s)ra}bgxYrLP(~nT+AbZ+JeeOPvVCI;vn(@+HT_~b^E0r}t_ aiOyp-12d-NFv0?dB^ diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/69688-22/additional-ueis-workbook-69688.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/69688-22/additional-ueis-workbook-69688.xlsx index ebdfc034455b6b8d5c3f083f5208594850340498..72d2d3e7b3e63b946a3982f940592d59f9b93358 100644 GIT binary patch delta 501 zcmcb6kN569Ufuw2W)=|!1_lm>yx6vhyld-$RQNZ^1g>vD!PATk41z#9B|kZ!D8Has zKRLfBRj(p9r+4DPtiuL8ZShN=%sb)dm7w-@Y4UPjHtw7^X*pYYyM>dpRce1*M$X<= z(%z_l?f=WkJ=$+u-r4T^b#LKvw!>Lla>ZN9Wkz=8(h0(SszHYb9=wY{joXGPppXN ztP}6y3A{(o9PPEJ*<-W+?CIsc;vS_^!jY1?TijjuPQToJr!Yr*hgR6~oBLT!R$Dk; zxcE@bQvLnWZyW2pQqFx+da9#aarU~B+%Y@#>%Ln5T{EgLznN7p@p;NrQ~jg61b!Z# z`cY?s=$*f_|1hG2*5*fyc5KY~v2D%D9NU#S7{B;{IPFrQj6lq^T`H7$GCxRU`-T)| zS!NJx`h|4nXJDb78O%2r!9r8Am|ud08nT)F8JU5>KmAe;vo!}S9;S=tGMj^i{d1Y^ qz*;8cGCPAA*I*2pJZ1~9NO&Hz8AMGUvlWw-ah7@~*81QsI1^Y*)Sk1y3_FFbD$al>Fp?qWpql z{p9?jRK1GaoZg9n-iHkYj_*J9qPp5sOW1k3_)&G%gH4a3J@TR>FSk8ea(BD=+Gz8O z?27t3|G%#*SDgCs(ig^fr}u^d&4s4Bwsl|n;k!-!laRtJTPxXamlDl8-wtMaUEjc5 zzy7S_djXc@>;jKlS(gkri}y=xo_uMR>q~oQYwk6SITsV%1uG6dJS4;>{y%5NjoEM4T;)GnM(?pZ8z?`BUX(*GqE``h;Hd5Do~Izw$+?UgkE}x~{94-^v-^YWYqm z`Lg5Wxs%bA{{!sPybJ=L&3UT2{%5aG`^Ns0Ul(irS1Q??yX~xfkBxZwRt?qNQCbSa0~ngggD91qh)bD7P-D*bbr?ZAu) mxy;UB#x)p2CXd+yEE1l_Yz9%2$7}^=oXul)1v5nRnLPkj3fM#d diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/69688-22/audit-findings-text-workbook-69688.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/69688-22/audit-findings-text-workbook-69688.xlsx index 2235b936c87cf5dceb9eccf30b8b688bf3c1dea4..69ec8cd1c4e03a059270578b5d2a3ae064a69c28 100644 GIT binary patch delta 704 zcmdl#Nnqb30p0*_W)=|!1_lm>_Sm+Gyld-$RCwxtzq?<7f~Of77zBZIN`7)cQGP+O zesX?Ms$NBIPVdBnS%(dH+Txc!nRmi(myFLWZ+CxL9=hf<(Z+o5nb=$6HSV^}O&3@wYgvB91-NYzm;s!&TFZVpU!s}V; z)vvbOC_U+!nlNF>G?hf5WqF2b;!DH2FV+6a6U$(|vCz?*vEtanV?tHJ|8izbZk2J~ z9dOtn#o4g9P?;~^|M&FDpIf#zE(x7+WonpH<&-PGtEwhi_s6o9AAM!?dv`;Xs5*z0 z^&OS#PqyB$Guyr7Tx8smIZIjde?%QQ-1YtB*TtIu)lBy0@xQh2sXTS6ZF-XIQvUK| zs~4<$aDD&Pc$C1}{D{$xjUN}}{v2E?5 zp^QMxv|Tim*>@#KWP9pAW_e~1YkE5)%QLW00TT-|6IiH>h2`?sFwoE_I=zpbMO_hS zefUNF@3W;=GBLC$sh}xh4`rM_ft^KU`YU!8L9o020WD(;pDxA0A_L}IaUlVF4@~*81QsGCO{|bHs3Z7i3w|C)B!QsGLc4c89$xGf{*vxEF(`C znsJ-*I2)dr=`CGYQD0Xzcl!C+dXwJqq=jbo8ZnxFT{tW5$@k>wLy;S8&ivC{5WAGK z>zmy5n}(H>zw>`h4&ziyITxu{d3Jq>l~0||x_GU>>I+}zZqC#*_!M*Js*_gP>UBqF ze$-gu_2YB?RdbZk+We5wj*TA_UT+%jG3@bSWLWe;V7fvmqeQbC$96Xk#?wB`zbDkT zi-a-)G1GRDP-fqiAd&4U|Cr^OL9FR*j4aQpQTMfn4~8JR?w;ZZrg5F{tYG1-tqYWfOx79M$^ zb>SEFzt5Ih$;8m2q{1Kpj93_GXyllFnVm%)?0~;Oos1FFB{^7RY(Sc}yT9%_08}Z) z#K6D}(*dL#7^@gr7~+dkbBgu#KqRsgK%Oh)02-7$y_bVU4s7r`5I=MJDGnA*MUcTu mA4*5a1Cv++3j>2Niov0*EYk~FSwyC5aIy%nEoB2LU;qFlZ2G$Z diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/69688-22/corrective-action-plan-workbook-69688.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/69688-22/corrective-action-plan-workbook-69688.xlsx index c50b8281b1af5e32a45f36155f70eeb291de5858..7425d9ef432576406bff6009b3883ad30f2f5cac 100644 GIT binary patch delta 652 zcmaFfFYuyYfH%OKnMH(wfq{deD7I}P@7j7G6~6n{y93{Vf~Of77zBZIN`7)cQGP+O zesX?Ms$NBIPVdBnS%(dH+Txc!nRlYkn@#QP(&Xj5Y}`3-(sH))b_*wGtJMCsjGVo# zq`gu9+W(i6d%WK+d1t%t*S&?y*$!uI$rWExHv5Ug6BUOG>|3-_Cca48@p^Bl$oe|| zdcT+YTis7cY9CP2TlrGgyVc!w@A_rpJ0IVeRuHP=zqy_}BdoA# z$>PJlMSk~=e%n~*wQ9~&rKdWgd!A)W@>l1#*sjFE_|u2EIJT`_E|d|7nYPP?GQVBUToT*1 zo$oKRJTr(j)qv$0SVWSMg^3Bonl8u8@)9h>#lqsx2r_MY3@eK@Cp;RaFJNUc1B>5e zWszpO$~gH0qv&*QHWnVRpcETWP>f@;A%_%yfHxzP2s0!$fi9Vz0F+}CnqI@kqHYJY zEBx6W3EA&V3=C)Z7#Mhg(Fy|%j7)!-87gx0i%TkVQj0+mjBFIhnGZomsZ9UN#v%oF zwiG*%&pX|WokdO&Wc2E|BE2m@^Ll~COTo+q(hZD4%q-LOm|3K!PhbZ+8D!RcklugO PceAsovH7vG09641S`FkI delta 667 zcmaFSFYvryfH%OKnMH(wfq{eJ{e;?yyld-$RQNl$=eNHC1y3_FFbD$al>Fp?qWpql z{p9?jRK1GaoZgAP-bV}s+Ww!qRBioJ;OK?7M^pW|nplrUd!&_j`$|umSoVK!=H0jJ zUO1jm{cQL7Ov{vG+LzgD`1T%m;5~Nr#VtLp$JcK*p3rW{;CoS`BXn_T!FBfKnz4uF z>i3w*SM@7M>}G9WvQ~|EvcdOmLu<{okyHK|Z`pi6>VTMRnMk9!jGj(n!N>D$mXRk6 z&A3f@oDEOR^p-BHsIID+IsN=>y-DwQ(n2$PjTlY8E}Rwjq(3?OQ0PXRv;XuK#4hFR z>XW;E)3S0Z|Lm&Sp-r4Ki>G?3fkmVkS(unWtm*R1EHA-A+$=2qj3Cpd$Fj0mbHZa``a)I~GqCtARu*Zd zD~yvrGKx;;VPoL|3re%GNCWxiY%JzSy>jyld-$RCxZ?#_wN&f~Of77zBZIN`7)cQGP+O zesX?Ms$NBIPVYqD?862;ZU4Pa+6K;PG4OfiecVr)N9@Lpvp2G&#p)BXRsMeW-emUn z1|M_$n(r5tCCyJ2KV`CGe{LAiUT8e)p4+-VW|l&gd;(VmdwXuW+|azUJ3+%hw= zg|lNOT`wxXZ>O^8M7L3DWz?o=p*41IYRmq<(A>|}d-?9Z-|ZVcKJQ93U9S~)=dOf4 z)5Qbp>xJV{0&DX_MmsiUU~n}naco!OU}W)So*3KKt{KV*#7scU48$xz%nHP8K+F!r z96-zo#9Tnk4a7V^%nQVP+ciV^?RJ1o$l?{)p2aK3D+1zeuTm6LWd^aPFH#YF1{Ug8 z72M4P7V^{({0J7Z)f5b21UY8<6fHq(4p<~kzpf={4i*;F7PJF1g0uyl!Fp%F824Zd f6&*nfuvEN`pczDsj-VBoaaBjq70i&;74!fA{2bi@ delta 571 zcmaEQ%jM}U7v2DGW)=|!1_llWhKaQkdDqqhsqnaG=IUR8f~Of77zBZIN`7)cQGP+O zesX?Ms$NBIPVYot?;{2RZU0YQsQ|81&Dy?Xts3uS1O9G9Yt6NhQyv;`+k8OkfS7FAlm>AbJ)Ojom*?9oBTp!r za$o&kd-3#4Y4b-PzJ2*&q(9$0R_U(fnN^phQkAkkZJZS+(&rp~D0HLE*?)QqVta)} zWSQ4(F8-t?Z}!z}HH)y(bDimXo|$J^{i!Yb`$BU+$K35_*Zpjt`25j6v7n9CN7wbH zJBA)wU%z@E14>|Re#mIY#taOuW+jg8N*s(VzRXM$YuhzL8G)Dyh?#+y1&CRJm<@>8 zftUk`If0l9h`E872Z(urm~XpgD8JnfunAeb0^75A1$jk4yzNzrf~w3Q*7QXxg3rK0 z-Kv7SnZQDx8iF6eLbjTMA&el$OrN49Xw3nO#Oc?y1kJ(1g4%+1U`CL(pfgzS3>f1c gjG>|sL>I$0yD1a2)cq9vbusE0P`o)?f?J) diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/69688-22/federal-awards-workbook-69688.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/69688-22/federal-awards-workbook-69688.xlsx index 71a46021dbfc60631ce8a3eb0427fc819cc0f0fa..6b64e63b72297fcc21d77e9f20d5e50311d770d8 100644 GIT binary patch delta 879 zcmcbAG49^QINktnW)=|!1_lm>hS;`=ylXjuRQU1($C|!QJgiZF!q16M?QClDaxpgU zoHuDXTTQ!#le1N7e_KY*F3V|e)W7!s<>a31r!7)8cE1lURBui+zPXOyX=iScQW2BF z3gIZpO&(tiZFuf^b%ocns9$ZjQF_u7nlN$6G?hfLWqGD*;!DF;U#k6;Cz!!{W8u+x zOdom<_o{v|`*U-Hinb+d{pO&@7HP>F3ksF_@_*kwy{RVam&2y;875o9oGPc7{79>t zXzd@%TYmJH(eK?2Rl@2VR@Qgau0PpYQIRu$l3IG@#G+|pd!8=sP}jege!c(S>*Mlcs~4<%czwTVJW4o(&y1O)EWpOF`4yuX8*^i9TeC99c4ZF6 zPG9EM*tYidp^QMx1jNih%mT!$K+Fcj>_E%`#GF9P1;pGy%mc){K+Ffk{6H)K#DYL9 z1jNEXEV6xlsA&IlkjdK}ZN+%mz;*;Wi){~d7Jm`~7Ppuo{*ejH(wQwD#tdRjZ=EOp z3@ns4Uwkt}NM*73U$BtM67gt8W?)oKZ(1sDjfm6f$CiqlfyMtX6_;kZ!Z`UOqbPrX zHzSh>Gb9xNJv-fD8Bk7?V|swCn5dDVUPW#WvI?Nx;lA9mD`K9DFkE)I#UKGpJuuMF zxG}V!AJBv+EbQY7G z?zvo?M-gOe?Wgr?_AxOqJYi)(Q>!&Y9AazNa;UBImWxX>%1z(3TwDh1ikm>bRrx$| zhWMh?oML@F5Q**S}G0{1^}Q5GSvV8 delta 838 zcmcb2G4B4wINktnW)=|!1_lm>Zxd=K@~-6sQsJ!8+OFRw9@ePW*mn4s&n)lLe$qT* zH*TE0ktHpqe(ajZuW!Yh%--JMV~$_*|FXK|^jn^Ht7|^)b@u0NyP9!Z&*Sa2lZ_`l z8y4_qgq~2kc(mYp^=gf{zjAXfxo>rUP+7+=<|*mQrIz?gEV(F5YOluK<@xE27aMe= z-e@qKmWYXT%0E1xw|LWpZx*X?HPdI+rHqxfTCZwN z_nw%0Y+K9hq{?}JV;wek39HPNSvT4HlbYG>Uxq6y&bglY6tZEKsF9qWo!a&Of2%Ki zTXxg*zrm&{_E%y~{D~7g+WS#yh1ZX-`B%kJ0wR27%p7F_w#_dX&DofNA=0eGv0aIS zvD26N$AsGUb)k$v%ml>DK+FQftU$~L#Oy%K0mPg@%mu{UK+FTgygxq}O>dbe z{tPUXJ70V=L`ZqD_+PM)@)GfAMv%j&H!c;oM#Sm#qf5ojz~cXwic2$HWt{whQFOY& zGI1WTp#3tSpcuzwLk=nJ0B=Sn5oSoz09qIB%PqSi=D7&NWtUqF62Jrl0}YLfrY~G3 zt`2t7X`lhjCbnYJ18l`4`O%D+F1Q>h>*6dnJ;7N_a=OQIaUMmG$+e%>ui3}M!0?2X z0nKFX8R8(5M5cEx7Z(IuFc)Y9qx|%p%f)4EKzg^kzwSB!bcz^IKR3)tK)QjkVxBld zd{JsnvA!ON#Bj?Bph1b#bytYXferQp@x`ZStq|9g2N|3;@A@lkMh1r8Kt~Cq7%V=0 O(+Y7lw&JDYKw$uN7d>$R diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/69688-22/notes-to-sefa-workbook-69688.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/69688-22/notes-to-sefa-workbook-69688.xlsx index 34b66844c0d146fc8302f0a9c78d9c10c6cbe796..c1e02c6b03ab1af64ca00ad2edfb31547b573063 100644 GIT binary patch delta 659 zcmaDmS>VlN0p0*_W)=|!1_lm>DY0!6dDqqhsqhoK|5CK1c2VZyBP;ISzC3NM zbQAOOAOHW{44d-t(ig`0&hHHanhTA0ZR@`DL)TiUl2hQU;M~5KE+xS`-Zx)1u|3SV zr`}BdSHpzDxF+XIVUyb;XZ*N&;+$7@*wX(zZ%E1q@#$;=VGBXYI!TRG#~v{?!- zpA??fJDI(2_j>t=sULOxS^SFvyPdkMPyYP<=GR5ddeL3?%zi(UKc@3ARo5$0WYZ4Q zIF_kR@&ChW8Bqdj^CLz(HfCUOH7j#$SLR^s^=Ft+HeC)9+5Y+; zvl26iHC=#-} zrPG^$iokZS0cpBB{R9V#j15TBcK6p^2Y@QYm>3wiVLE_x10xd?3qyQSYEH4f9*9J? z9po%{iFfAppFfa(C8a(|VCyN?e5IazY F0RWP7=)eE~ delta 687 zcmaDeS>Wwt0p0*_W)=|!1_llWmWj0!dDqqhsqlS#8U^2gf~Of77zBZIN`7)cQGP+O zesX?Ms$NBIPVdBnUPlZB+Tv4B?wz|N^x}oLt{;~uLb>e$={gw96>!RYTXW0Dt{Yzc;RauqHw_?Z3!{HJ4aC ztNXLdeD0n4-TrLiRaX5|J|EVI^zNASD5~h6;m$g(uk%~BnZI4O_khNxN9#1bPUc^> zG?rs6Wv&0W_8%KcXl;JTXvfA346kM-j_pbujJ^IKPWz^CMj&R|zA2p9bU8EY#M?~$r@q6qn z(o9zwCx2iRoi4z^!UGnR=KuCFC6zg;#h@5QHVWwS@a^ueyAA;Li!m`UaKp?2 z(hZEvOe_rXMX5Q(`g$M|P2+TVP8Mm#H`9S0k^@^81LBKMui#|Slm{8S^r3WwJTRRl WurM$PqZ&N@04IwYTQECNh5-N*%jq!y diff --git a/backend/census_historical_migration/fixtures/workbooks/should_pass/69688-22/secondary-auditors-workbook-69688.xlsx b/backend/census_historical_migration/fixtures/workbooks/should_pass/69688-22/secondary-auditors-workbook-69688.xlsx index 44e30d4532c587560f22e719b4c4378db3b62efc..9807df741217bd47f36717bc4cb3c642ff7022a2 100644 GIT binary patch delta 674 zcmZ43B($hWh&RBSnMH(wfq{czdTiT7-nI2WDtvL7Ma?&$;Aut%20u}E|F8j1+y7Hns$VbBb?w>}=P<{?X^~Jz^puKd*Ut-&thl>>xo)&M z3wz(k-#<34y>g2CDU%)hbHjl4LgQKY+!p>ZvlOUg6u8RR+jG<9hUT4}2`awlH?Zs1 zUv0mm{G=x|VZxGWDv4st>P*$tmxisrRNq!7n8A8uv7@(O#j%HngxG}t=R{1JDwDiD z=&(VG^TvY9ZZh}g$;D55dik8&q_D(GSE4vY1152wt(j=wSIZlI^q0x+-3?X7z5*p* zcKDtW)N%o+6tCuV4-Q1EICYIA=Vm}k6@vnH7p^F%)rQ)9#F?(%?Xc{ z=~L=h%)sJj>sX|jt};&kz$iNXe;o@C$R*SH>Vbk{9Fq+>q_hLP8JR?wA(0ETF8t?W z?ioqa%nZ+7>oZ6IV-*G(8il48)U&9A9WV=M0OP9Z+hF_$Ab#WYKlLm!V0q~V7HO-6 z6)X(#MX5Q(`g$M|-J;{mPPREQF)+BYGcfSOYyr{@j9t?U8d&7O22BJS#Q1ai+6ER) VF#jfq|91M<1{O6or#cp(asX?&{DuGk delta 681 zcmZ3~B($(eh&RBSnMH(wfq{d8ePZoI-nI2WDtv0rv*@ot!PATk41z#9B|kZ!D8Has zKRLfBRj(p9r+4B(zrzMRZSkqEY)dEcu>@Y@?hy9y2w^Jh7Rq0paZAH6>-)DYsju4} z95DQ6U%yQ>w|P_Sk>@v9dqR?9B{eFy4DXa(l$M%b$bPW? z2LEQ~33K9`-HpyoX!JaBDCta}SGsuWkDjwS1!5Z<`*l?Xb~YG$OVs|_oaiT3MW)y$lpUOM~47O~Ai)~j<|oMUyCcOOd3Fo_ZQTKKJ;@ov_f1up~& zCeNL8{n5L9RVTborXFNTZ(VUDs#E*u_4kusyKDZBd|@4)|GHj+b@KM9IwJc6A6=>t znj%&AH~J?lN?>h%#AwIH3=FPjWsdF29E?AGm^miaw#$by0x=U1GjEppktEH`@a(ldg9I=h zVW6Q=aC#w75!lYzAWbW$Z-?<8g7^*7|JJj}faPTxSfs5MRIo6_7p3MD>+6BY0B=Sn z5oSob0a*~vC1CI$vqb_NE1m@PoMfw6OXVFQaC*frc7v21&Qg7VA^3{ohHeoSB2 az@iDZ?H15B#y8WyHL$3$Io7cNl>q>UH1l5o diff --git a/backend/census_historical_migration/historic_data_loader.py b/backend/census_historical_migration/historic_data_loader.py index 3a9497463d..283c2693e6 100644 --- a/backend/census_historical_migration/historic_data_loader.py +++ b/backend/census_historical_migration/historic_data_loader.py @@ -1,4 +1,4 @@ -from .models import ELECAUDITHEADER as Gen +from .models import ELECAUDITHEADER as AuditHeader from .workbooklib.end_to_end_core import run_end_to_end from django.contrib.auth import get_user_model @@ -13,7 +13,7 @@ def load_historic_data_for_year(audit_year, page_size, pages): result_log = {} total_count = error_count = 0 user = create_or_get_user() - submissions_for_year = Gen.objects.filter(AUDITYEAR=audit_year).order_by( + submissions_for_year = AuditHeader.objects.filter(AUDITYEAR=audit_year).order_by( "ELECAUDITHEADERID" ) paginator = Paginator(submissions_for_year, page_size) @@ -27,13 +27,11 @@ def load_historic_data_for_year(audit_year, page_size, pages): ) for submission in page.object_list: - dbkey = submission.DBKEY result = {"success": [], "errors": []} - # Migrate a single submission - run_end_to_end(user, dbkey, audit_year, result) + run_end_to_end(user, submission, result) - result_log[(audit_year, dbkey)] = result + result_log[(audit_year, submission.DBKEY)] = result total_count += 1 if len(result["errors"]) > 0: diff --git a/backend/census_historical_migration/management/commands/historic_data_migrator.py b/backend/census_historical_migration/management/commands/historic_data_migrator.py index 74d0b1ca6f..0f300654b4 100644 --- a/backend/census_historical_migration/management/commands/historic_data_migrator.py +++ b/backend/census_historical_migration/management/commands/historic_data_migrator.py @@ -1,62 +1,65 @@ +from django.core.management.base import BaseCommand import logging import sys +from census_historical_migration.sac_general_lib.utils import ( + normalize_year_string, +) +from census_historical_migration.workbooklib.excel_creation_utils import ( + get_audit_header, +) from census_historical_migration.historic_data_loader import create_or_get_user - -from config.settings import ENVIRONMENT -from django.core.management.base import BaseCommand from census_historical_migration.workbooklib.end_to_end_core import run_end_to_end +from django.conf import settings logger = logging.getLogger(__name__) class Command(BaseCommand): def add_arguments(self, parser): - parser.add_argument("--dbkeys", type=str, required=False, default="") - parser.add_argument("--years", type=str, required=False, default="") + parser.add_argument( + "--dbkeys", type=str, required=False, default="177310,251020" + ) + parser.add_argument("--years", type=str, required=False, default="22,22") - def handle(self, *args, **options): - dbkeys_str = options["dbkeys"] - years_str = options["years"] + def initiate_migration(self, dbkeys_str, years_str): dbkeys = dbkeys_str.split(",") - years = years_str.split(",") - if len(dbkeys) != len(years): - logger.error( - "Received {} dbkeys and {} years. Must be equal. Exiting.".format( - len(dbkeys), len(years) + if years_str: + years = [normalize_year_string(year) for year in years_str.split(",")] + if len(dbkeys) != len(years): + logger.error( + "Received {} dbkeys and {} years. Must be equal. Exiting.".format( + len(dbkeys), len(years) + ) ) - ) - sys.exit(-1) - - lengths = [len(s) == 2 for s in years] - if dbkeys_str and years_str and (not all(lengths)): - logger.error("Years must be two digits. Exiting.") - sys.exit(-2) + sys.exit(-1) user = create_or_get_user() - defaults = [ - (177310, 22), - (251020, 22), - ] + if dbkeys_str and years_str: + logger.info( + f"Generating test reports for DBKEYS: {dbkeys_str} and YEARS: {years_str}" + ) + for dbkey, year in zip(dbkeys, years): + logger.info("Running {}-{} end-to-end".format(dbkey, year)) + result = {"success": [], "errors": []} + try: + audit_header = get_audit_header(dbkey, year) + except Exception as e: + logger.error(e) + continue + + run_end_to_end(user, audit_header, result) + logger.info(result) - if ENVIRONMENT in ["LOCAL", "DEVELOPMENT", "PREVIEW", "STAGING"]: - if dbkeys_str and years_str: - logger.info( - f"Generating test reports for DBKEYS: {dbkeys_str} and YEARS: {years_str}" - ) - for dbkey, year in zip(dbkeys, years): - result = {"success": [], "errors": []} - run_end_to_end(user, dbkey, year, result) - logger.info(result) - else: - for pair in defaults: - logger.info("Running {}-{} end-to-end".format(pair[0], pair[1])) - result = {"success": [], "errors": []} - run_end_to_end(user, str(pair[0]), str(pair[1]), result) - logger.info(result) + def handle(self, *args, **options): + dbkeys_str = options["dbkeys"] + years_str = options["years"] + + if settings.ENVIRONMENT in ["LOCAL", "DEVELOPMENT", "PREVIEW", "STAGING"]: + self.initiate_migration(dbkeys_str, years_str) else: logger.error( - "Cannot run end-to-end workbook generation in production. Exiting." + "Cannot run end-to-end historic data migrator in production. Exiting." ) sys.exit(-3) diff --git a/backend/census_historical_migration/management/commands/historic_workbook_generator.py b/backend/census_historical_migration/management/commands/historic_workbook_generator.py index 2f41bb1b55..603cc8ec5c 100644 --- a/backend/census_historical_migration/management/commands/historic_workbook_generator.py +++ b/backend/census_historical_migration/management/commands/historic_workbook_generator.py @@ -1,3 +1,7 @@ +from census_historical_migration.workbooklib.excel_creation_utils import ( + get_audit_header, +) +from census_historical_migration.sac_general_lib.utils import normalize_year_string from census_historical_migration.workbooklib.workbook_builder import ( generate_workbook, ) @@ -44,8 +48,8 @@ def handle(self, *args, **options): # noqa: C901 logger.info(e) logger.info(f"Could not create directory {out_basedir}") sys.exit() - - outdir = os.path.join(out_basedir, f'{options["dbkey"]}-{options["year"]}') + year = normalize_year_string(options["year"]) + outdir = os.path.join(out_basedir, f'{options["dbkey"]}-{year[-2:]}') if not os.path.exists(outdir): try: @@ -56,11 +60,10 @@ def handle(self, *args, **options): # noqa: C901 logger.info("could not create output directory. exiting.") sys.exit() + audit_header = get_audit_header(options["dbkey"], year) json_test_tables = [] for section, fun in sections_to_handlers.items(): - (wb, api_json, _, filename) = generate_workbook( - fun, options["dbkey"], options["year"], section - ) + (wb, api_json, _, filename) = generate_workbook(fun, audit_header, section) if wb: wb_path = os.path.join(outdir, filename) wb.save(wb_path) diff --git a/backend/census_historical_migration/management/commands/run_migration_for_year.py b/backend/census_historical_migration/management/commands/run_migration_for_year.py deleted file mode 100644 index 63f799c061..0000000000 --- a/backend/census_historical_migration/management/commands/run_migration_for_year.py +++ /dev/null @@ -1,28 +0,0 @@ -from ...historic_data_loader import load_historic_data_for_year - -from django.core.management.base import BaseCommand - -import logging - - -logger = logging.getLogger(__name__) -logger.setLevel(logging.WARNING) - - -class Command(BaseCommand): - help = """ - Migrate from Census tables to GSAFAC tables for a given year - Usage: - manage.py run_migration --year - """ - - def add_arguments(self, parser): - parser.add_argument("--year", help="4-digit Audit Year") - - def handle(self, *args, **options): - year = options.get("year") - if not year: - print("Please specify an audit year") - return - - load_historic_data_for_year(audit_year=year) diff --git a/backend/census_historical_migration/management/commands/run_paginated_migration.py b/backend/census_historical_migration/management/commands/run_paginated_migration.py index eedab80484..24c069ff78 100644 --- a/backend/census_historical_migration/management/commands/run_paginated_migration.py +++ b/backend/census_historical_migration/management/commands/run_paginated_migration.py @@ -1,3 +1,4 @@ +from census_historical_migration.sac_general_lib.utils import normalize_year_string from ...historic_data_loader import load_historic_data_for_year from django.core.management.base import BaseCommand @@ -26,10 +27,7 @@ def add_arguments(self, parser): parser.add_argument("--pages", type=str, required=False, default="1") def handle(self, *args, **options): - year = options.get("year") - if not year: - print("Please specify an audit year") - return + year = normalize_year_string(options.get("year")) try: pages_str = options["pages"] diff --git a/backend/census_historical_migration/sac_general_lib/audit_information.py b/backend/census_historical_migration/sac_general_lib/audit_information.py index bb355fdb45..96a0c418c6 100644 --- a/backend/census_historical_migration/sac_general_lib/audit_information.py +++ b/backend/census_historical_migration/sac_general_lib/audit_information.py @@ -7,7 +7,7 @@ from ..base_field_maps import FormFieldMap, FormFieldInDissem from ..sac_general_lib.utils import ( - _create_json_from_db_object, + create_json_from_db_object, ) import audit.validators from django.conf import settings @@ -57,10 +57,10 @@ ] -def _get_agency_prefixes(dbkey): - """Returns the agency prefixes for the given dbkey.""" +def _get_agency_prefixes(dbkey, year): + """Returns the agency prefixes for a given dbkey and audit year.""" agencies = set() - audits = get_audits(dbkey) + audits = get_audits(dbkey, year) for audit_detail in audits: agencies.add(string_to_string(audit_detail.CFDA_PREFIX)) @@ -154,8 +154,8 @@ def audit_information(audit_header): """Generates audit information JSON.""" results = _get_sp_framework_gaap_results(audit_header) - agencies_prefixes = _get_agency_prefixes(audit_header.DBKEY) - audit_info = _create_json_from_db_object(audit_header, mappings) + agencies_prefixes = _get_agency_prefixes(audit_header.DBKEY, audit_header.AUDITYEAR) + audit_info = create_json_from_db_object(audit_header, mappings) audit_info = { key: results.get(key, audit_info.get(key)) for key in set(audit_info) | set(results) diff --git a/backend/census_historical_migration/sac_general_lib/auditee_certification.py b/backend/census_historical_migration/sac_general_lib/auditee_certification.py index cc10ecf0a2..17d3080aa3 100644 --- a/backend/census_historical_migration/sac_general_lib/auditee_certification.py +++ b/backend/census_historical_migration/sac_general_lib/auditee_certification.py @@ -2,7 +2,7 @@ from datetime import date from ..base_field_maps import FormFieldMap, FormFieldInDissem from ..sac_general_lib.utils import ( - _create_json_from_db_object, + create_json_from_db_object, ) # The following fields represent checkboxes on the auditee certification form. @@ -41,10 +41,10 @@ def _xform_set_certification_date(auditee_certification): def auditee_certification(audit_header): """Generates auditee certification JSON.""" certification = {} - certification["auditee_certification"] = _create_json_from_db_object( + certification["auditee_certification"] = create_json_from_db_object( audit_header, auditee_certification_mappings ) - certification["auditee_signature"] = _create_json_from_db_object( + certification["auditee_signature"] = create_json_from_db_object( audit_header, auditee_signature_mappings ) certification = _xform_set_certification_date(certification) diff --git a/backend/census_historical_migration/sac_general_lib/auditor_certification.py b/backend/census_historical_migration/sac_general_lib/auditor_certification.py index 89b726a2d5..ba73bb7f3e 100644 --- a/backend/census_historical_migration/sac_general_lib/auditor_certification.py +++ b/backend/census_historical_migration/sac_general_lib/auditor_certification.py @@ -2,7 +2,7 @@ from datetime import date from ..base_field_maps import FormFieldMap, FormFieldInDissem from ..sac_general_lib.utils import ( - _create_json_from_db_object, + create_json_from_db_object, ) # The following fields represent checkboxes on the auditor certification form. @@ -38,10 +38,10 @@ def _xform_set_certification_date(auditor_certification): def auditor_certification(audit_header): """Generates auditor certification JSON.""" certification = {} - certification["auditor_certification"] = _create_json_from_db_object( + certification["auditor_certification"] = create_json_from_db_object( audit_header, auditor_certification_mappings ) - certification["auditor_signature"] = _create_json_from_db_object( + certification["auditor_signature"] = create_json_from_db_object( audit_header, auditor_signature_mappings ) certification = _xform_set_certification_date(certification) diff --git a/backend/census_historical_migration/sac_general_lib/general_information.py b/backend/census_historical_migration/sac_general_lib/general_information.py index 8a5f7acbdb..74d9ea9837 100644 --- a/backend/census_historical_migration/sac_general_lib/general_information.py +++ b/backend/census_historical_migration/sac_general_lib/general_information.py @@ -8,7 +8,7 @@ ) from ..base_field_maps import FormFieldMap, FormFieldInDissem from ..sac_general_lib.utils import ( - _create_json_from_db_object, + create_json_from_db_object, ) import re @@ -169,7 +169,7 @@ def _xform_audit_type(general_information): def general_information(audit_header): """Generates general information JSON.""" - general_information = _create_json_from_db_object(audit_header, mappings) + general_information = create_json_from_db_object(audit_header, mappings) # List of transformation functions transformations = [ diff --git a/backend/census_historical_migration/sac_general_lib/report_id_generator.py b/backend/census_historical_migration/sac_general_lib/report_id_generator.py index eb3d3c9637..4165f00d9c 100644 --- a/backend/census_historical_migration/sac_general_lib/report_id_generator.py +++ b/backend/census_historical_migration/sac_general_lib/report_id_generator.py @@ -3,11 +3,13 @@ ) -def xform_dbkey_to_report_id(audit_header, dbkey): +def xform_dbkey_to_report_id(audit_header): # month = audit_header.fyenddate.split('-')[1] # 2022JUN0001000003 # We start new audits at 1 million. # So, we want 10 digits, and zero-pad for # historic DBKEY report_ids dt = xform_census_date_to_datetime(audit_header.FYENDDATE) - return f"{audit_header.AUDITYEAR}-{dt.month:02}-CENSUS-{dbkey.zfill(10)}" + return ( + f"{audit_header.AUDITYEAR}-{dt.month:02}-CENSUS-{audit_header.DBKEY.zfill(10)}" + ) diff --git a/backend/census_historical_migration/sac_general_lib/sac_creator.py b/backend/census_historical_migration/sac_general_lib/sac_creator.py index 4581829c9d..e70cd98589 100644 --- a/backend/census_historical_migration/sac_general_lib/sac_creator.py +++ b/backend/census_historical_migration/sac_general_lib/sac_creator.py @@ -4,7 +4,6 @@ from django.conf import settings from ..exception_utils import DataMigrationError -from ..workbooklib.excel_creation_utils import get_audit_header from ..sac_general_lib.general_information import ( general_information, ) @@ -24,11 +23,14 @@ logger = logging.getLogger(__name__) -def _create_sac(user, dbkey): +def setup_sac(user, audit_header): """Create a SAC object for the historic data migration.""" + if user is None: + raise DataMigrationError("No user provided to setup sac object") + logger.info(f"Creating a SAC object for {user}") + SingleAuditChecklist = apps.get_model("audit.SingleAuditChecklist") - audit_header = get_audit_header(dbkey) - generated_report_id = xform_dbkey_to_report_id(audit_header, dbkey) + generated_report_id = xform_dbkey_to_report_id(audit_header) try: exists = SingleAuditChecklist.objects.get(report_id=generated_report_id) @@ -70,23 +72,5 @@ def _create_sac(user, dbkey): sac.auditor_certification = auditor_certification(audit_header) sac.data_source = settings.CENSUS_DATA_SOURCE sac.save() - logger.info("Created single audit checklist %s", sac) return sac - - -def setup_sac(user, auditee_name, dbkey): - """Create a SAC object for the historic data migration.""" - if user is None: - raise DataMigrationError("No user provided to setup sac object") - logger.info(f"Creating a SAC object for {user}, {auditee_name}") - SingleAuditChecklist = apps.get_model("audit.SingleAuditChecklist") - - sac = SingleAuditChecklist.objects.filter( - submitted_by=user, general_information__auditee_name=auditee_name - ).first() - - logger.info(sac) - if sac is None: - sac = _create_sac(user, dbkey) - return sac diff --git a/backend/census_historical_migration/sac_general_lib/utils.py b/backend/census_historical_migration/sac_general_lib/utils.py index bb83bfc2ca..3b781bcaa0 100644 --- a/backend/census_historical_migration/sac_general_lib/utils.py +++ b/backend/census_historical_migration/sac_general_lib/utils.py @@ -1,13 +1,18 @@ -from datetime import date, datetime -from ..transforms.xform_string_to_date import string_to_date +import logging +from datetime import datetime +import sys + from ..transforms.xform_string_to_string import ( string_to_string, ) from ..transforms.xform_string_to_int import string_to_int from ..transforms.xform_string_to_bool import string_to_bool +logger = logging.getLogger(__name__) + -def _create_json_from_db_object(gobj, mappings): +def create_json_from_db_object(gobj, mappings): + """Constructs a JSON object from a database object using a list of mappings.""" json_obj = {} for mapping in mappings: if mapping.in_db is not None: @@ -25,8 +30,6 @@ def _create_json_from_db_object(gobj, mappings): value = string_to_bool(value) elif mapping.type is int: value = string_to_int(value) - elif mapping.type is date: - value = string_to_date(value) else: value = mapping.type(value) @@ -34,33 +37,28 @@ def _create_json_from_db_object(gobj, mappings): return json_obj -def _census_date_to_datetime(cd): - lookup = { - "JAN": 1, - "FEB": 2, - "MAR": 3, - "APR": 4, - "MAY": 5, - "JUN": 6, - "JUL": 7, - "AUG": 8, - "SEP": 9, - "OCT": 10, - "NOV": 11, - "DEC": 12, - } - parts = cd.split("-") - if len(parts) != 3 or parts[1] not in lookup: - raise ValueError("Invalid date format or month abbreviation in census date") - day, month_abbr, year = parts - month = lookup[month_abbr] - - return date(int(year) + 2000, month, int(day)) - - def xform_census_date_to_datetime(date_string): """Convert a census date string from '%m/%d/%Y %H:%M:%S' format to 'YYYY-MM-DD' format.""" # Parse the string into a datetime object dt = datetime.strptime(date_string, "%m/%d/%Y %H:%M:%S") # Extract and return the date part return dt.date() + + +def normalize_year_string(year_string): + """ + Normalizes a year string to a four-digit year format. + """ + try: + year = int(year_string) + except ValueError: + logger.error("Invalid year string.") + sys.exit(-1) + + if 16 <= year < 23: + return str(year + 2000) + elif 2016 <= year < 2023: + return year_string + else: + logger.error("Invalid year string. Audit year must be between 2016 and 2022") + sys.exit(-1) diff --git a/backend/census_historical_migration/transforms/xform_string_to_date.py b/backend/census_historical_migration/transforms/xform_string_to_date.py deleted file mode 100644 index 1c708e425d..0000000000 --- a/backend/census_historical_migration/transforms/xform_string_to_date.py +++ /dev/null @@ -1,15 +0,0 @@ -from datetime import datetime - - -def string_to_date(value): - """Converts a string to a date.""" - if not isinstance(value, str): - raise ValueError(f"Expected string, got {type(value).__name__}") - - value = value.strip() - - # Check if the string can be converted to a date - try: - return datetime.strptime(value, "%Y-%m-%d").date() - except ValueError: - raise ValueError(f"Cannot convert string to date: '{value}'") diff --git a/backend/census_historical_migration/workbooklib/additional_eins.py b/backend/census_historical_migration/workbooklib/additional_eins.py index 63ac709da4..53554c6416 100644 --- a/backend/census_historical_migration/workbooklib/additional_eins.py +++ b/backend/census_historical_migration/workbooklib/additional_eins.py @@ -2,7 +2,6 @@ string_to_string, ) from ..workbooklib.excel_creation_utils import ( - get_audit_header, map_simple_columns, generate_dissemination_test_table, set_workbook_uei, @@ -43,27 +42,26 @@ def xform_remove_trailing_decimal_zero(value): ] -def _get_eins(dbkey): - return Eins.objects.filter(DBKEY=dbkey) +def _get_eins(dbkey, year): + return Eins.objects.filter(DBKEY=dbkey, AUDITYEAR=year) -def generate_additional_eins(dbkey, year, outfile): +def generate_additional_eins(audit_header, outfile): """ - Generates additional eins workbook for a given dbkey. + Generates additional eins workbook for a given audit header. """ - logger.info(f"--- generate additional eins {dbkey} {year} ---") + logger.info( + f"--- generate additional eins {audit_header.DBKEY} {audit_header.AUDITYEAR} ---" + ) wb = pyxl.load_workbook(sections_to_template_paths[FORM_SECTIONS.ADDITIONAL_EINS]) - audit_header = get_audit_header(dbkey) - set_workbook_uei(wb, audit_header.UEI) - - addl_eins = _get_eins(dbkey) + addl_eins = _get_eins(audit_header.DBKEY, audit_header.AUDITYEAR) map_simple_columns(wb, mappings, addl_eins) wb.save(outfile) - # FIXME - MSHD: The logic below will most likely be removed, see comment in federal_awards.py + table = generate_dissemination_test_table( - audit_header, "additional_eins", dbkey, mappings, addl_eins + audit_header, "additional_eins", mappings, addl_eins ) table["singletons"]["auditee_uei"] = audit_header.UEI return (wb, table) diff --git a/backend/census_historical_migration/workbooklib/additional_ueis.py b/backend/census_historical_migration/workbooklib/additional_ueis.py index eb1f4c346a..f0180c96f1 100644 --- a/backend/census_historical_migration/workbooklib/additional_ueis.py +++ b/backend/census_historical_migration/workbooklib/additional_ueis.py @@ -1,5 +1,4 @@ from ..workbooklib.excel_creation_utils import ( - get_audit_header, map_simple_columns, generate_dissemination_test_table, set_workbook_uei, @@ -24,28 +23,26 @@ ] -def _get_ueis(dbkey): - return Ueis.objects.filter(DBKEY=dbkey) +def _get_ueis(dbkey, year): + return Ueis.objects.filter(DBKEY=dbkey, AUDITYEAR=year) -def generate_additional_ueis(dbkey, year, outfile): +def generate_additional_ueis(audit_header, outfile): """ - Generates additional ueis workbook for a given dbkey. + Generates additional ueis workbook for a given audit header. """ - logger.info(f"--- generate additional ueis {dbkey} {year} ---") + logger.info( + f"--- generate additional ueis {audit_header.DBKEY} {audit_header.AUDITYEAR} ---" + ) wb = pyxl.load_workbook(sections_to_template_paths[FORM_SECTIONS.ADDITIONAL_UEIS]) - audit_header = get_audit_header(dbkey) set_workbook_uei(wb, audit_header.UEI) - - additional_ueis = _get_ueis(dbkey) + additional_ueis = _get_ueis(audit_header.DBKEY, audit_header.AUDITYEAR) map_simple_columns(wb, mappings, additional_ueis) wb.save(outfile) - # FIXME - MSHD: The logic below will most likely be removed, see comment in federal_awards.py table = generate_dissemination_test_table( - audit_header, "additional_ueis", dbkey, mappings, additional_ueis + audit_header, "additional_ueis", mappings, additional_ueis ) - table["singletons"]["auditee_uei"] = audit_header.UEI return (wb, table) diff --git a/backend/census_historical_migration/workbooklib/corrective_action_plan.py b/backend/census_historical_migration/workbooklib/corrective_action_plan.py index 263f2cdb44..77b874e1fb 100644 --- a/backend/census_historical_migration/workbooklib/corrective_action_plan.py +++ b/backend/census_historical_migration/workbooklib/corrective_action_plan.py @@ -1,5 +1,4 @@ from ..workbooklib.excel_creation_utils import ( - get_audit_header, map_simple_columns, generate_dissemination_test_table, set_workbook_uei, @@ -30,31 +29,28 @@ ] -def _get_cap_text(dbkey): - return CapText.objects.filter(DBKEY=dbkey).order_by("SEQ_NUMBER") +def _get_cap_text(dbkey, year): + return CapText.objects.filter(DBKEY=dbkey, AUDITYEAR=year).order_by("SEQ_NUMBER") -def generate_corrective_action_plan(dbkey, year, outfile): +def generate_corrective_action_plan(audit_header, outfile): """ - Generates a corrective action plan workbook for a given dbkey. + Generates a corrective action plan workbook for a given audit header. """ - logger.info(f"--- generate corrective action plan {dbkey} {year} ---") + logger.info( + f"--- generate corrective action plan {audit_header.DBKEY} {audit_header.AUDITYEAR} ---" + ) wb = pyxl.load_workbook( sections_to_template_paths[FORM_SECTIONS.CORRECTIVE_ACTION_PLAN] ) - audit_header = get_audit_header(dbkey) - set_workbook_uei(wb, audit_header.UEI) - - captexts = _get_cap_text(dbkey) - + captexts = _get_cap_text(audit_header.DBKEY, audit_header.AUDITYEAR) map_simple_columns(wb, mappings, captexts) wb.save(outfile) table = generate_dissemination_test_table( - audit_header, "corrective_action_plans", dbkey, mappings, captexts + audit_header, "corrective_action_plans", mappings, captexts ) table["singletons"]["auditee_uei"] = audit_header.UEI - return (wb, table) diff --git a/backend/census_historical_migration/workbooklib/end_to_end_core.py b/backend/census_historical_migration/workbooklib/end_to_end_core.py index e8a8ba8f3d..f9fab2ce5e 100644 --- a/backend/census_historical_migration/workbooklib/end_to_end_core.py +++ b/backend/census_historical_migration/workbooklib/end_to_end_core.py @@ -1,4 +1,4 @@ -from config import settings +from django.conf import settings from ..exception_utils import DataMigrationError from ..workbooklib.workbook_builder_loader import ( workbook_builder_loader, @@ -40,11 +40,6 @@ logging.getLogger().setLevel(logging.INFO) parser = argparse.ArgumentParser() -# Peewee runs a really noisy DEBUG log. -pw = logging.getLogger("peewee") -pw.addHandler(logging.StreamHandler()) -pw.setLevel(logging.INFO) - def step_through_certifications(sac): sac.transition_to_ready_for_certification() @@ -55,7 +50,7 @@ def step_through_certifications(sac): sac.save() -def disseminate(sac, year): +def disseminate(sac): logger.info("Invoking movement of data from Intake to Dissemination") for model in [ AdditionalEin, @@ -238,18 +233,17 @@ def api_check(json_test_tables): return combined_summary -def run_end_to_end(user, dbkey, year, result): +def run_end_to_end(user, audit_header, result): try: - entity_id = "DBKEY {dbkey} {year} {date:%Y_%m_%d_%H_%M_%S}".format( - dbkey=dbkey, year=year, date=datetime.now() - ) - sac = setup_sac(user, entity_id, dbkey) + sac = setup_sac(user, audit_header) if sac.general_information["audit_type"] == "alternative-compliance-engagement": - logger.info(f"Skipping ACE audit: {dbkey}") + logger.info( + f"Skipping ACE audit: {audit_header.DBKEY} {audit_header.AUDITYEAR}" + ) raise DataMigrationError("Skipping ACE audit") else: - builder_loader = workbook_builder_loader(user, sac, dbkey, year) + builder_loader = workbook_builder_loader(user, sac, audit_header) json_test_tables = [] for section, fun in sections_to_handlers.items(): @@ -265,10 +259,9 @@ def run_end_to_end(user, dbkey, year, result): result["errors"].append(f"{errors.get('errors')}") return - disseminate(sac, year) + disseminate(sac) combined_summary = api_check(json_test_tables) logger.info(combined_summary) - result["success"].append(f"{sac.report_id} created") except Exception as exc: error_type = type(exc) diff --git a/backend/census_historical_migration/workbooklib/excel_creation_utils.py b/backend/census_historical_migration/workbooklib/excel_creation_utils.py index 866df8dc62..70bca2de32 100644 --- a/backend/census_historical_migration/workbooklib/excel_creation_utils.py +++ b/backend/census_historical_migration/workbooklib/excel_creation_utils.py @@ -131,12 +131,14 @@ def set_workbook_uei(workbook, uei): set_range(workbook, "auditee_uei", [uei]) -def get_audit_header(dbkey): - """Returns the AuditHeader instance for the given dbkey.""" +def get_audit_header(dbkey, year): + """Returns the AuditHeader record for the given dbkey and audit year.""" try: - audit_header = AuditHeader.objects.get(DBKEY=dbkey) + audit_header = AuditHeader.objects.get(DBKEY=dbkey, AUDITYEAR=year) except AuditHeader.DoesNotExist: - raise DataMigrationError(f"No audit header record found for dbkey: {dbkey}") + raise DataMigrationError( + f"No audit header record found for dbkey: {dbkey} and audit year: {year}" + ) return audit_header @@ -176,13 +178,11 @@ def get_template_name_for_section(section): raise ValueError(f"Unknown section {section}") -def generate_dissemination_test_table( - audit_header, api_endpoint, dbkey, mappings, objects -): +def generate_dissemination_test_table(audit_header, api_endpoint, mappings, objects): """Generates a test table for verifying the API queries results.""" table = {"rows": list(), "singletons": dict()} table["endpoint"] = api_endpoint - table["report_id"] = xform_dbkey_to_report_id(audit_header, dbkey) + table["report_id"] = xform_dbkey_to_report_id(audit_header) for o in objects: test_obj = {} @@ -208,6 +208,6 @@ def generate_dissemination_test_table( return table -def get_audits(dbkey): - """Returns the Audits instances for the given dbkey.""" - return Audits.objects.filter(DBKEY=dbkey).order_by("ID") +def get_audits(dbkey, year): + """Returns Audits records for the given dbkey and audit year.""" + return Audits.objects.filter(DBKEY=dbkey, AUDITYEAR=year).order_by("ID") diff --git a/backend/census_historical_migration/workbooklib/federal_awards.py b/backend/census_historical_migration/workbooklib/federal_awards.py index 28c3c7e16e..710d829007 100644 --- a/backend/census_historical_migration/workbooklib/federal_awards.py +++ b/backend/census_historical_migration/workbooklib/federal_awards.py @@ -2,7 +2,6 @@ string_to_string, ) from ..workbooklib.excel_creation_utils import ( - get_audit_header, get_audits, get_range_values, get_ranges, @@ -17,7 +16,7 @@ ) from ..workbooklib.templates import sections_to_template_paths from audit.fixtures.excel import FORM_SECTIONS -from config import settings +from django.conf import settings from ..models import ( ELECAUDITS as Audits, ELECPASSTHROUGH as Passthrough, @@ -163,7 +162,9 @@ def _get_passthroughs(audits): for index, audit in enumerate(audits): passthroughs = Passthrough.objects.filter( - DBKEY=audit.DBKEY, ELECAUDITSID=audit.ELECAUDITSID + DBKEY=audit.DBKEY, + AUDITYEAR=audit.AUDITYEAR, + ELECAUDITSID=audit.ELECAUDITSID, ).order_by("ID") # This may look like data transformation but it is not exactly the case. # In the audit worksheet, users can enter multiple names (or IDs) separated by a pipe '|' in a single cell. @@ -230,7 +231,7 @@ def _xform_populate_default_loan_balance(loans_at_end, audits): # FIXME - MSHD: _xform_populate_default_award_identification_values is currently unused # as unrequired data transformation will not be part of the first iteration # of the data migration process. -def _xform_populate_default_award_identification_values(audits, dbkey): +def _xform_populate_default_award_identification_values(audits, audit_header): """ Automatically fills in default values for empty additional award identifications. Iterates over a list of audits and their corresponding additional award identifications. @@ -239,13 +240,14 @@ def _xform_populate_default_award_identification_values(audits, dbkey): """ addl_award_identifications = [""] * len(audits) filtered_audits = Audits.objects.filter( - Q(DBKEY=dbkey) & (Q(CFDA__icontains="U") | Q(CFDA__icontains="rd")) + Q(DBKEY=audit_header.DBKEY, AUDITYEAR=audit_header.AUDITYEAR) + & (Q(CFDA__icontains="U") | Q(CFDA__icontains="rd")) ).order_by("ID") for audit in filtered_audits: if audit.AWARDIDENTIFICATION is None or len(audit.AWARDIDENTIFICATION) < 1: addl_award_identifications[ get_list_index(audits, audit.ID) - ] = f"ADDITIONAL AWARD INFO - DBKEY {dbkey}" + ] = f"ADDITIONAL AWARD INFO - DBKEY {audit_header.DBKEY} AUDITYEAR {audit_header.AUDITYEAR}" else: addl_award_identifications[ get_list_index(audits, audit.ID) @@ -253,25 +255,19 @@ def _xform_populate_default_award_identification_values(audits, dbkey): return addl_award_identifications -def generate_federal_awards(dbkey, year, outfile): +def generate_federal_awards(audit_header, outfile): """ - Generates a federal awards workbook for all awards associated with a given dbkey. - - Note: This function assumes that all the audit information in the database - is for the same year. + Generates a federal awards workbook for all awards associated with a given audit header. """ - logger.info(f"--- generate federal awards {dbkey} {year} ---") + logger.info( + f"--- generate federal awards {audit_header.DBKEY} {audit_header.AUDITYEAR} ---" + ) wb = pyxl.load_workbook( sections_to_template_paths[FORM_SECTIONS.FEDERAL_AWARDS_EXPENDED] ) - - audit_header = get_audit_header(dbkey) - set_workbook_uei(wb, audit_header.UEI) - - audits = get_audits(dbkey) - + audits = get_audits(audit_header.DBKEY, audit_header.AUDITYEAR) map_simple_columns(wb, mappings, audits) (cluster_names, other_cluster_names, state_cluster_names) = _generate_cluster_names( @@ -315,15 +311,10 @@ def generate_federal_awards(dbkey, year, outfile): for audit in audits: total += int(audit.AMOUNT) set_range(wb, "total_amount_expended", [str(total)]) - wb.save(outfile) - # FIXME - MSHD: The test table and the logic around it do not seem necessary to me. - # If there is any chance that the dissemination process allows bogus data to be disseminated, - # we should fix the dissemination process instead by reinforcing the validation logic (intake validation and cross-validation). - # I will create a ticket for the removal of this logic unless someone comes up with a strong reason to keep it. table = generate_dissemination_test_table( - audit_header, "federal_awards", dbkey, mappings, audits + audit_header, "federal_awards", mappings, audits ) award_counter = 1 filtered_mappings = [ diff --git a/backend/census_historical_migration/workbooklib/findings.py b/backend/census_historical_migration/workbooklib/findings.py index 948c0e767a..2fd4bc59f1 100644 --- a/backend/census_historical_migration/workbooklib/findings.py +++ b/backend/census_historical_migration/workbooklib/findings.py @@ -2,7 +2,6 @@ string_to_string, ) from ..workbooklib.excel_creation_utils import ( - get_audit_header, get_audits, map_simple_columns, generate_dissemination_test_table, @@ -114,37 +113,34 @@ def _get_findings_grid(findings_list): ] -def _get_findings(dbkey): +def _get_findings(dbkey, year): # CFDAs aka ELECAUDITS (or Audits) have elecauditid (FK). Findings have elecauditfindingsid, which is unique. # The linkage here is that a given finding will have an elecauditid. # Multiple findings will have a given elecauditid. That's how to link them. - return Findings.objects.filter(DBKEY=dbkey).order_by("ELECAUDITFINDINGSID") + return Findings.objects.filter(DBKEY=dbkey, AUDITYEAR=year).order_by( + "ELECAUDITFINDINGSID" + ) -def generate_findings(dbkey, year, outfile): +def generate_findings(audit_header, outfile): """ - Generates a federal awards audit findings workbook for all findings associated with a given dbkey. - - Note: This function assumes that all the audit information in the database - is for the same year. + Generates a federal awards audit findings workbook for all findings associated with a given audit header. """ - logger.info(f"--- generate findings {dbkey} {year} ---") - - audit_header = get_audit_header(dbkey) + logger.info( + f"--- generate findings {audit_header.DBKEY} {audit_header.AUDITYEAR} ---" + ) wb = pyxl.load_workbook( sections_to_template_paths[FORM_SECTIONS.FINDINGS_UNIFORM_GUIDANCE] ) - set_workbook_uei(wb, audit_header.UEI) - - audits = get_audits(dbkey) + audits = get_audits(audit_header.DBKEY, audit_header.AUDITYEAR) # For each of them, I need to generate an elec -> award mapping. e2a = {} for index, audit in enumerate(audits): e2a[audit.ELECAUDITSID] = f"AWARD-{index+1:04d}" - findings = _get_findings(dbkey) + findings = _get_findings(audit_header.DBKEY, audit_header.AUDITYEAR) award_references = [] for find in findings: @@ -158,9 +154,8 @@ def generate_findings(dbkey, year, outfile): set_range(wb, "is_valid", grid, conversion_fun=str) wb.save(outfile) - # FIXME - MSHD: The logic below will be removed, see comment in federal_award.py. table = generate_dissemination_test_table( - audit_header, "findings", dbkey, mappings, findings + audit_header, "findings", mappings, findings ) for obj, ar in zip(table["rows"], award_references): obj["fields"].append("award_reference") diff --git a/backend/census_historical_migration/workbooklib/findings_text.py b/backend/census_historical_migration/workbooklib/findings_text.py index ed0c9e811d..78a409c1c0 100644 --- a/backend/census_historical_migration/workbooklib/findings_text.py +++ b/backend/census_historical_migration/workbooklib/findings_text.py @@ -1,5 +1,4 @@ from ..workbooklib.excel_creation_utils import ( - get_audit_header, map_simple_columns, generate_dissemination_test_table, set_workbook_uei, @@ -29,30 +28,30 @@ ] -def _get_findings_texts(dbkey): - return FindingsText.objects.filter(DBKEY=dbkey).order_by("SEQ_NUMBER") +def _get_findings_texts(dbkey, year): + return FindingsText.objects.filter(DBKEY=dbkey, AUDITYEAR=year).order_by( + "SEQ_NUMBER" + ) -def generate_findings_text(dbkey, year, outfile): +def generate_findings_text(audit_header, outfile): """ - Generates a findings text workbook for a given dbkey. - - Note: This function assumes that all the findings text - information in the database is related to the same year. + Generates a findings text workbook for a given audit header. """ - logger.info(f"--- generate findings text {dbkey} {year} ---") + logger.info( + f"--- generate findings text {audit_header.DBKEY} {audit_header.AUDITYEAR} ---" + ) wb = pyxl.load_workbook(sections_to_template_paths[FORM_SECTIONS.FINDINGS_TEXT]) - audit_header = get_audit_header(dbkey) set_workbook_uei(wb, audit_header.UEI) - findings_texts = _get_findings_texts(dbkey) + findings_texts = _get_findings_texts(audit_header.DBKEY, audit_header.AUDITYEAR) map_simple_columns(wb, mappings, findings_texts) wb.save(outfile) table = generate_dissemination_test_table( - audit_header, "findings_text", dbkey, mappings, findings_texts + audit_header, "findings_text", mappings, findings_texts ) table["singletons"]["auditee_uei"] = audit_header.UEI diff --git a/backend/census_historical_migration/workbooklib/notes_to_sefa.py b/backend/census_historical_migration/workbooklib/notes_to_sefa.py index 11aba8aa8d..2553ab00ca 100644 --- a/backend/census_historical_migration/workbooklib/notes_to_sefa.py +++ b/backend/census_historical_migration/workbooklib/notes_to_sefa.py @@ -2,7 +2,6 @@ from ..transforms.xform_string_to_string import string_to_string from ..models import ELECNOTES as Notes from ..workbooklib.excel_creation_utils import ( - get_audit_header, set_range, map_simple_columns, generate_dissemination_test_table, @@ -76,13 +75,13 @@ def xform_is_minimis_rate_used(rate_content): raise DataMigrationError("Unable to determine if the de minimis rate was used.") -def _get_accounting_policies(dbkey): +def _get_accounting_policies(dbkey, year): # https://facdissem.census.gov/Documents/DataDownloadKey.xlsx # The TYPEID column determines which field in the form a given row corresponds to. # TYPEID=1 is the description of significant accounting policies. - """Get the accounting policies for a given dbkey.""" + """Get the accounting policies for a given dbkey and audit year.""" try: - note = Notes.objects.get(DBKEY=dbkey, TYPE_ID="1") + note = Notes.objects.get(DBKEY=dbkey, AUDITYEAR=year, TYPE_ID="1") content = string_to_string(note.CONTENT) except Notes.DoesNotExist: logger.info(f"No accounting policies found for dbkey: {dbkey}") @@ -90,13 +89,13 @@ def _get_accounting_policies(dbkey): return content -def _get_minimis_cost_rate(dbkey): - """Get the De Minimis cost rate for a given dbkey.""" +def _get_minimis_cost_rate(dbkey, year): + """Get the De Minimis cost rate for a given dbkey and audit year.""" # https://facdissem.census.gov/Documents/DataDownloadKey.xlsx # The TYPEID column determines which field in the form a given row corresponds to. # TYPEID=2 is the De Minimis cost rate. try: - note = Notes.objects.get(DBKEY=dbkey, TYPE_ID="2") + note = Notes.objects.get(DBKEY=dbkey, AUDITYEAR=year, TYPE_ID="2") rate = string_to_string(note.CONTENT) except Notes.DoesNotExist: logger.info(f"De Minimis cost rate not found for dbkey: {dbkey}") @@ -104,28 +103,32 @@ def _get_minimis_cost_rate(dbkey): return rate -def _get_notes(dbkey): - """Get the notes for a given dbkey.""" +def _get_notes(dbkey, year): + """Get the notes for a given dbkey and audit year.""" # https://facdissem.census.gov/Documents/DataDownloadKey.xlsx # The TYPEID column determines which field in the form a given row corresponds to. # TYPEID=3 is for notes, which have sequence numbers... that must align somewhere. - return Notes.objects.filter(DBKEY=dbkey, TYPE_ID="3").order_by("SEQ_NUMBER") + return Notes.objects.filter(DBKEY=dbkey, AUDITYEAR=year, TYPE_ID="3").order_by( + "SEQ_NUMBER" + ) -def generate_notes_to_sefa(dbkey, year, outfile): +def generate_notes_to_sefa(audit_header, outfile): """ - Generates notes to SEFA workbook for a given dbkey. + Generates notes to SEFA workbook for a given audit header. """ - logger.info(f"--- generate notes to sefa {dbkey} {year}---") + logger.info( + f"--- generate notes to sefa {audit_header.DBKEY} {audit_header.AUDITYEAR}---" + ) wb = pyxl.load_workbook(sections_to_template_paths[FORM_SECTIONS.NOTES_TO_SEFA]) - audit_header = get_audit_header(dbkey) set_workbook_uei(wb, audit_header.UEI) - - notes = _get_notes(dbkey) - rate_content = _get_minimis_cost_rate(dbkey) - policies_content = _get_accounting_policies(dbkey) + notes = _get_notes(audit_header.DBKEY, audit_header.AUDITYEAR) + rate_content = _get_minimis_cost_rate(audit_header.DBKEY, audit_header.AUDITYEAR) + policies_content = _get_accounting_policies( + audit_header.DBKEY, audit_header.AUDITYEAR + ) is_minimis_rate_used = xform_is_minimis_rate_used(rate_content) set_range(wb, "accounting_policies", [policies_content]) @@ -144,9 +147,8 @@ def generate_notes_to_sefa(dbkey, year, outfile): wb.save(outfile) table = generate_dissemination_test_table( - audit_header, "notes_to_sefa", dbkey, mappings, notes + audit_header, "notes_to_sefa", mappings, notes ) - table["singletons"]["accounting_policies"] = policies_content table["singletons"]["is_minimis_rate_used"] = is_minimis_rate_used table["singletons"]["rate_explained"] = rate_content diff --git a/backend/census_historical_migration/workbooklib/secondary_auditors.py b/backend/census_historical_migration/workbooklib/secondary_auditors.py index 760a34f255..a98a03f49d 100644 --- a/backend/census_historical_migration/workbooklib/secondary_auditors.py +++ b/backend/census_historical_migration/workbooklib/secondary_auditors.py @@ -1,6 +1,5 @@ from ..transforms.xform_string_to_string import string_to_string from ..workbooklib.excel_creation_utils import ( - get_audit_header, map_simple_columns, generate_dissemination_test_table, set_workbook_uei, @@ -75,30 +74,30 @@ def xform_add_hyphen_to_zip(zip): ] -def _get_secondary_auditors(dbkey): - return Caps.objects.filter(DBKEY=dbkey) +def _get_secondary_auditors(dbkey, year): + return Caps.objects.filter(DBKEY=dbkey, AUDITYEAR=year) -def generate_secondary_auditors(dbkey, year, outfile): +def generate_secondary_auditors(audit_header, outfile): """ - Generates secondary auditor workbook for a given dbkey. + Generates secondary auditor workbook for a given audit header. """ - logger.info(f"--- generate secondary auditors {dbkey} {year} ---") + logger.info( + f"--- generate secondary auditors {audit_header.DBKEY} {audit_header.AUDITYEAR} ---" + ) wb = pyxl.load_workbook( sections_to_template_paths[FORM_SECTIONS.SECONDARY_AUDITORS] ) - audit_header = get_audit_header(dbkey) set_workbook_uei(wb, audit_header.UEI) - - secondary_auditors = _get_secondary_auditors(dbkey) + secondary_auditors = _get_secondary_auditors( + audit_header.DBKEY, audit_header.AUDITYEAR + ) map_simple_columns(wb, mappings, secondary_auditors) - wb.save(outfile) - # FIXME - MSHD: The logic below will most likely be removed, see comment in federal_awards.py table = generate_dissemination_test_table( - audit_header, "secondary_auditors", dbkey, mappings, secondary_auditors + audit_header, "secondary_auditors", mappings, secondary_auditors ) table["singletons"]["auditee_uei"] = audit_header.UEI diff --git a/backend/census_historical_migration/workbooklib/workbook_builder.py b/backend/census_historical_migration/workbooklib/workbook_builder.py index 9d2bcc9b45..51572dbfef 100644 --- a/backend/census_historical_migration/workbooklib/workbook_builder.py +++ b/backend/census_historical_migration/workbooklib/workbook_builder.py @@ -17,10 +17,10 @@ def _make_excel_file(filename, f_obj): return file -def generate_workbook(workbook_generator, dbkey, year, section): +def generate_workbook(workbook_generator, audit_header, section): """ - Generates a workbook in memory using the workbook_generator for a specific - 'dbkey', 'year', and 'section'. Returns the workbook object, its JSON data representation, + Generates a workbook in memory using the workbook_generator for a given audit_header + and section template name. Returns the workbook object, its JSON data representation, the Excel file as a SimpleUploadedFile object, and the filename. """ with MemoryFS() as mem_fs: @@ -28,11 +28,11 @@ def generate_workbook(workbook_generator, dbkey, year, section): filename = ( get_template_name_for_section(section) .replace(".xlsx", "-{}.xlsx") - .format(dbkey) + .format(audit_header.DBKEY) ) with mem_fs.openbin(filename, mode="w") as outfile: # Generate the workbook object along with the API JSON representation - wb, json_data = workbook_generator(dbkey, year, outfile) + wb, json_data = workbook_generator(audit_header, outfile) # Re-open the file in read mode to create an Excel file object with mem_fs.openbin(filename, mode="r") as outfile: diff --git a/backend/census_historical_migration/workbooklib/workbook_builder_loader.py b/backend/census_historical_migration/workbooklib/workbook_builder_loader.py index 07c2bd3a94..8ab5b021cc 100644 --- a/backend/census_historical_migration/workbooklib/workbook_builder_loader.py +++ b/backend/census_historical_migration/workbooklib/workbook_builder_loader.py @@ -8,7 +8,7 @@ logger = logging.getLogger(__name__) -def workbook_builder_loader(user, sac, dbkey, year): +def workbook_builder_loader(user, sac, audit_header): """ Returns a nested function '_loader' that, when called with a workbook generator and a section, generates a workbook for the section, uploads it to SAC, @@ -17,7 +17,7 @@ def workbook_builder_loader(user, sac, dbkey, year): def _loader(workbook_generator, section): wb, json_data, excel_file, filename = generate_workbook( - workbook_generator, dbkey, year, section + workbook_generator, audit_header, section ) if user: From 85458131284b98c132eea66ed42f3e172a42c6f0 Mon Sep 17 00:00:00 2001 From: Tadhg O'Higgins <2626258+tadhg-ohiggins@users.noreply.github.com> Date: Thu, 7 Dec 2023 12:49:23 -0800 Subject: [PATCH 3/3] Refactoring run.sh/.profile (#2951) This was ultimately a variety of problems, but part of it was our confusing .profile/run.sh situation. Everything is now: 1. A function 2. Has error checking 3. Is reused between the two scripts --- backend/.profile | 101 +++++++++++------------ backend/run.sh | 90 +++++++++++--------- backend/tools/api_standup.sh | 29 +++++++ backend/tools/api_teardown.sh | 20 +++++ backend/tools/migrate_app_tables.sh | 9 ++ backend/tools/migrate_historic_tables.sh | 9 ++ backend/tools/run_collectstatic.sh | 9 ++ backend/tools/seed_cog_baseline.sh | 9 ++ backend/tools/setup_cgov_env.sh | 39 +++++++++ backend/tools/setup_env.sh | 21 +++++ backend/tools/setup_local_env.sh | 18 ++++ backend/tools/util_startup.sh | 16 ++++ 12 files changed, 281 insertions(+), 89 deletions(-) create mode 100644 backend/tools/api_standup.sh create mode 100644 backend/tools/api_teardown.sh create mode 100644 backend/tools/migrate_app_tables.sh create mode 100644 backend/tools/migrate_historic_tables.sh create mode 100644 backend/tools/run_collectstatic.sh create mode 100644 backend/tools/seed_cog_baseline.sh create mode 100644 backend/tools/setup_cgov_env.sh create mode 100644 backend/tools/setup_env.sh create mode 100644 backend/tools/setup_local_env.sh create mode 100644 backend/tools/util_startup.sh diff --git a/backend/.profile b/backend/.profile index 2e1e975e5f..8ff630a90d 100644 --- a/backend/.profile +++ b/backend/.profile @@ -1,65 +1,64 @@ #!/bin/bash -set -e -export SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt -export REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt +# Source everything; everything is now a function. +# Remember: bash has no idea if a function exists, +# so a typo in a function name will fail silently. Similarly, +# bash has horrible scoping, so use of `local` in functions is +# critical for cleanliness in the startup script. +source tools/util_startup.sh +# This will choose the correct environment +# for local envs (LOCAL or TESTING) and cloud.gov +source tools/setup_env.sh +source tools/migrate_historic_tables.sh +source tools/api_teardown.sh +source tools/migrate_app_tables.sh +source tools/api_standup.sh +source tools/run_collectstatic.sh +source tools/seed_cog_baseline.sh -export https_proxy="$(echo "$VCAP_SERVICES" | jq --raw-output --arg service_name "https-proxy-creds" ".[][] | select(.name == \$service_name) | .credentials.uri")" -export smtp_proxy_domain="$(echo "$VCAP_SERVICES" | jq --raw-output --arg service_name "smtp-proxy-creds" ".[][] | select(.name == \$service_name) | .credentials.domain")" -export smtp_proxy_port="$(echo "$VCAP_SERVICES" | jq --raw-output --arg service_name "smtp-proxy-creds" ".[][] | select(.name == \$service_name) | .credentials.port")" -S3_ENDPOINT_FOR_NO_PROXY="$(echo $VCAP_SERVICES | jq --raw-output --arg service_name "fac-public-s3" ".[][] | select(.name == \$service_name) | .credentials.endpoint")" -S3_FIPS_ENDPOINT_FOR_NO_PROXY="$(echo $VCAP_SERVICES | jq --raw-output --arg service_name "fac-public-s3" ".[][] | select(.name == \$service_name) | .credentials.fips_endpoint")" -S3_PRIVATE_ENDPOINT_FOR_NO_PROXY="$(echo $VCAP_SERVICES | jq --raw-output --arg service_name "fac-private-s3" ".[][] | select(.name == \$service_name) | .credentials.endpoint")" -S3_PRIVATE_FIPS_ENDPOINT_FOR_NO_PROXY="$(echo $VCAP_SERVICES | jq --raw-output --arg service_name "fac-private-s3" ".[][] | select(.name == \$service_name) | .credentials.fips_endpoint")" -export no_proxy="${S3_ENDPOINT_FOR_NO_PROXY},${S3_FIPS_ENDPOINT_FOR_NO_PROXY},${S3_PRIVATE_ENDPOINT_FOR_NO_PROXY},${S3_PRIVATE_FIPS_ENDPOINT_FOR_NO_PROXY},apps.internal" +if [[ "$CF_INSTANCE_INDEX" == 0 ]]; then -# Grab the New Relic license key from the newrelic-creds user-provided service instance -export NEW_RELIC_LICENSE_KEY="$(echo "$VCAP_SERVICES" | jq --raw-output --arg service_name "newrelic-creds" ".[][] | select(.name == \$service_name) | .credentials.NEW_RELIC_LICENSE_KEY")" + ##### + # SETUP THE CGOV ENVIRONMENT + setup_env + gonogo "setup_env" -# Set the application name for New Relic telemetry. -export NEW_RELIC_APP_NAME="$(echo "$VCAP_APPLICATION" | jq -r .application_name)-$(echo "$VCAP_APPLICATION" | jq -r .space_name)" + ##### + # MIGRATE HISTORICAL TABLES + # Migrate the historic tables first. + migrate_historic_tables + gonogo "migrate_historic_tables" -# Set the environment name for New Relic telemetry. -export NEW_RELIC_ENVIRONMENT="$(echo "$VCAP_APPLICATION" | jq -r .space_name)" + ##### + # API TEARDOWN + # API has to be deprecated/removed before migration, because + # of tight coupling between schema/views and the dissemination tables + api_teardown + gonogo "api_teardown" -# Set Agent logging to stdout to be captured by CF Logs -export NEW_RELIC_LOG=stdout + ##### + # MIGRATE APP TABLES + migrate_app_tables + gonogo "migrate_app_tables" -# Logging level, (critical, error, warning, info and debug). Default to info -export NEW_RELIC_LOG_LEVEL=info + ##### + # API STANDUP + # Standup the API, which may depend on migration changes + api_standup + gonogo "api_standup" -# https://docs.newrelic.com/docs/security/security-privacy/compliance/fedramp-compliant-endpoints/ -export NEW_RELIC_HOST="gov-collector.newrelic.com" -# https://docs.newrelic.com/docs/apm/agents/python-agent/configuration/python-agent-configuration/#proxy -export NEW_RELIC_PROXY_HOST="$https_proxy" + ##### + # COLLECT STATIC + # Do Django things with static files. + run_collectstatic + gonogo "run_collectstatic" -# We only want to run migrate and collecstatic for the first app instance, not -# for additional app instances, so we gate all of this behind CF_INSTANCE_INDEX -# being 0. -if [[ "$CF_INSTANCE_INDEX" == 0 ]]; then - echo 'Starting API schema deprecation' && - python manage.py drop_deprecated_api_schema_and_views && - echo 'Finished API schema deprecation' && - echo 'Dropping API schema' && - python manage.py drop_api_schema && - echo 'Finished dropping API schema' && - echo 'Starting API schema creation' && - python manage.py create_api_schema && - echo 'Finished API schema creation' && - echo 'Starting migrate' && - python manage.py migrate && - python manage.py migrate --database census-to-gsafac-db && - echo 'Finished migrate' && - echo 'Starting API view creation' && - python manage.py create_api_views && - echo 'Finished view creation' && - echo 'Starting collectstatic' && - python manage.py collectstatic --noinput && - echo 'Finished collectstatic' && - echo 'Starting seed_cog_baseline' && - python manage.py seed_cog_baseline && - echo 'Finished seed_cog_baseline' + ##### + # SEED COG/OVER TABLES + # Setup tables for cog/over assignments + seed_cog_baseline + gonogo "seed_cog_baseline" fi # Make psql usable by scripts, for debugging, etc. diff --git a/backend/run.sh b/backend/run.sh index 1d428ae0f7..9d52d5ab6d 100755 --- a/backend/run.sh +++ b/backend/run.sh @@ -1,42 +1,56 @@ #!/bin/bash -if [[ -n "${ENV}" ]]; then - echo "Environment set as: ${ENV}" -else - echo "No environment variable ${ENV} is set!" -fi; - -sleep 10 - -if [[ "${ENV}" == "LOCAL" || "${ENV}" == "TESTING" ]]; then - export AWS_PRIVATE_ACCESS_KEY_ID=longtest - export AWS_PRIVATE_SECRET_ACCESS_KEY=longtest - export AWS_S3_PRIVATE_ENDPOINT="http://minio:9000" - mc alias set myminio "${AWS_S3_PRIVATE_ENDPOINT}" minioadmin minioadmin - mc mb myminio/gsa-fac-private-s3 - mc mb myminio/fac-census-to-gsafac-s3 - mc admin user svcacct add --access-key="${AWS_PRIVATE_ACCESS_KEY_ID}" --secret-key="${AWS_PRIVATE_SECRET_ACCESS_KEY}" myminio minioadmin -fi; - -# Migrate first -python manage.py migrate -python manage.py migrate --database census-to-gsafac-db - - -echo 'Starting API schema deprecation' && -python manage.py drop_deprecated_api_schema_and_views && -echo 'Finished API schema deprecation' && -echo 'Dropping API schema' && -python manage.py drop_api_schema && -echo 'Finished dropping API schema' && -echo 'Starting API schema creation' && -python manage.py create_api_schema && -echo 'Finished API schema creation' && -echo 'Starting API view creation' && -python manage.py create_api_views && -echo 'Finished view creation' && -echo 'Starting seed_cog_baseline' && -python manage.py seed_cog_baseline && -echo 'Finished seed_cog_baseline' +# Source everything; everything is now a function. +# Remember: bash has no idea if a function exists, +# so a typo in a function name will fail silently. Similarly, +# bash has horrible scoping, so use of `local` in functions is +# critical for cleanliness in the startup script. +source tools/util_startup.sh +# This will choose the correct environment +# for local envs (LOCAL or TESTING) and cloud.gov +source tools/setup_env.sh +source tools/migrate_historic_tables.sh +source tools/api_teardown.sh +source tools/migrate_app_tables.sh +source tools/api_standup.sh +source tools/seed_cog_baseline.sh +##### +# SETUP THE LOCAL ENVIRONMENT +setup_env +gonogo "setup_env" + +##### +# MIGRATE HISTORICAL TABLES +# Migrate the historic tables first. +migrate_historic_tables +gonogo "migrate_historic_tables" + +##### +# API TEARDOWN +# API has to be deprecated/removed before migration, because +# of tight coupling between schema/views and the dissemination tables +api_teardown +gonogo "api_teardown" + +##### +# MIGRATE APP TABLES +migrate_app_tables +gonogo "migrate_app_tables" + +##### +# API STANDUP +# Standup the API, which may depend on migration changes +api_standup +gonogo "api_standup" + +##### +# SEED COG/OVER TABLES +# Setup tables for cog/over assignments +seed_cog_baseline +gonogo "seed_cog_baseline" + +##### +# LAUNCH THE APP +# We will have died long ago if things didn't work. npm run dev & python manage.py runserver 0.0.0.0:8000 diff --git a/backend/tools/api_standup.sh b/backend/tools/api_standup.sh new file mode 100644 index 0000000000..57d6079520 --- /dev/null +++ b/backend/tools/api_standup.sh @@ -0,0 +1,29 @@ +source tools/util_startup.sh + +function api_standup { + startup_log "API_STANDUP" "BEGIN" + + # First create non-managed tables + startup_log "CREATE_API_ACCESS_TABLES" "BEGIN" + python manage.py create_api_access_tables + local d1=$? + startup_log "CREATE_API_ACCESS_TABLES" "END" + + # Bring the API back, possibly installing a new API + startup_log "CREATE_API_SCHEMA" "BEGIN" + python manage.py create_api_schema + local d2=$? + startup_log "CREATE_API_SCHEMA" "END" + + startup_log "CREATE_API_VIEWS" "BEGIN" + python manage.py create_api_views && + local d3=$? + startup_log "CREATE_API_VIEWS" "END" + + startup_log "API_STANDUP" "END" + + result=$(($d1 + $d2 + $d3)) + # If these are all zero, we're all good. + return $result +} + diff --git a/backend/tools/api_teardown.sh b/backend/tools/api_teardown.sh new file mode 100644 index 0000000000..80292d3181 --- /dev/null +++ b/backend/tools/api_teardown.sh @@ -0,0 +1,20 @@ +source tools/util_startup.sh + +function api_teardown { + startup_log "API_TEARDOWN" "BEGIN" + + startup_log "DROP_DEPRECATED_API_SCHEMA_AND_VIEWS" "BEGIN" + python manage.py drop_deprecated_api_schema_and_views + local d1=$? + startup_log "DROP_DEPRECATED_API_SCHEMA_AND_VIEWS" "END" + startup_log "DROP_API_SCHEMA" "BEGIN" + python manage.py drop_api_schema + local d2=$? + startup_log "DROP_API_SCHEMA" "END" + + startup_log "API_TEARDOWN" "END" + + result=$(($d1 + $d2)) + # If these are both zero, we're all good. + return $result +} diff --git a/backend/tools/migrate_app_tables.sh b/backend/tools/migrate_app_tables.sh new file mode 100644 index 0000000000..aef200d092 --- /dev/null +++ b/backend/tools/migrate_app_tables.sh @@ -0,0 +1,9 @@ +source tools/util_startup.sh + +function migrate_app_tables { + startup_log "MIGRATE_APP_TABLES" "BEGIN" + python manage.py migrate + local result=$? + startup_log "MIGRATE_APP_TABLES" "END" + return $result +} diff --git a/backend/tools/migrate_historic_tables.sh b/backend/tools/migrate_historic_tables.sh new file mode 100644 index 0000000000..6454642bad --- /dev/null +++ b/backend/tools/migrate_historic_tables.sh @@ -0,0 +1,9 @@ +source tools/util_startup.sh + +function migrate_historic_tables { + startup_log "HISTORIC_TABLE_MIGRATION" "BEGIN" + python manage.py migrate --database census-to-gsafac-db + local result=$? + startup_log "HISTORIC_TABLE_MIGRATION" "END" + return $result +} diff --git a/backend/tools/run_collectstatic.sh b/backend/tools/run_collectstatic.sh new file mode 100644 index 0000000000..18f09549a4 --- /dev/null +++ b/backend/tools/run_collectstatic.sh @@ -0,0 +1,9 @@ +source tools/util_startup.sh + +function run_collectstatic { + startup_log "RUN_COLLECTSTATIC" "BEGIN" + python manage.py collectstatic --noinput && + local result=$? + startup_log "RUN_COLLECTSTATIC" "END" + return $result +} diff --git a/backend/tools/seed_cog_baseline.sh b/backend/tools/seed_cog_baseline.sh new file mode 100644 index 0000000000..3dc5eb4f66 --- /dev/null +++ b/backend/tools/seed_cog_baseline.sh @@ -0,0 +1,9 @@ +source tools/util_startup.sh + +function seed_cog_baseline { + startup_log "SEED_COG_BASELINE" "BEGIN" + python manage.py seed_cog_baseline + local result=$? + startup_log "SEED_COG_BASELINE" "END" + return $result +} diff --git a/backend/tools/setup_cgov_env.sh b/backend/tools/setup_cgov_env.sh new file mode 100644 index 0000000000..73f6c5c572 --- /dev/null +++ b/backend/tools/setup_cgov_env.sh @@ -0,0 +1,39 @@ +source tools/util_startup.sh + +function setup_cgov_env { + set -e + + export SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt + export REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt + + export https_proxy="$(echo "$VCAP_SERVICES" | jq --raw-output --arg service_name "https-proxy-creds" ".[][] | select(.name == \$service_name) | .credentials.uri")" + export smtp_proxy_domain="$(echo "$VCAP_SERVICES" | jq --raw-output --arg service_name "smtp-proxy-creds" ".[][] | select(.name == \$service_name) | .credentials.domain")" + export smtp_proxy_port="$(echo "$VCAP_SERVICES" | jq --raw-output --arg service_name "smtp-proxy-creds" ".[][] | select(.name == \$service_name) | .credentials.port")" + + S3_ENDPOINT_FOR_NO_PROXY="$(echo $VCAP_SERVICES | jq --raw-output --arg service_name "fac-public-s3" ".[][] | select(.name == \$service_name) | .credentials.endpoint")" + S3_FIPS_ENDPOINT_FOR_NO_PROXY="$(echo $VCAP_SERVICES | jq --raw-output --arg service_name "fac-public-s3" ".[][] | select(.name == \$service_name) | .credentials.fips_endpoint")" + S3_PRIVATE_ENDPOINT_FOR_NO_PROXY="$(echo $VCAP_SERVICES | jq --raw-output --arg service_name "fac-private-s3" ".[][] | select(.name == \$service_name) | .credentials.endpoint")" + S3_PRIVATE_FIPS_ENDPOINT_FOR_NO_PROXY="$(echo $VCAP_SERVICES | jq --raw-output --arg service_name "fac-private-s3" ".[][] | select(.name == \$service_name) | .credentials.fips_endpoint")" + export no_proxy="${S3_ENDPOINT_FOR_NO_PROXY},${S3_FIPS_ENDPOINT_FOR_NO_PROXY},${S3_PRIVATE_ENDPOINT_FOR_NO_PROXY},${S3_PRIVATE_FIPS_ENDPOINT_FOR_NO_PROXY},apps.internal" + + # Grab the New Relic license key from the newrelic-creds user-provided service instance + export NEW_RELIC_LICENSE_KEY="$(echo "$VCAP_SERVICES" | jq --raw-output --arg service_name "newrelic-creds" ".[][] | select(.name == \$service_name) | .credentials.NEW_RELIC_LICENSE_KEY")" + + # Set the application name for New Relic telemetry. + export NEW_RELIC_APP_NAME="$(echo "$VCAP_APPLICATION" | jq -r .application_name)-$(echo "$VCAP_APPLICATION" | jq -r .space_name)" + + # Set the environment name for New Relic telemetry. + export NEW_RELIC_ENVIRONMENT="$(echo "$VCAP_APPLICATION" | jq -r .space_name)" + + # Set Agent logging to stdout to be captured by CF Logs + export NEW_RELIC_LOG=stdout + + # Logging level, (critical, error, warning, info and debug). Default to info + export NEW_RELIC_LOG_LEVEL=info + + # https://docs.newrelic.com/docs/security/security-privacy/compliance/fedramp-compliant-endpoints/ + export NEW_RELIC_HOST="gov-collector.newrelic.com" + # https://docs.newrelic.com/docs/apm/agents/python-agent/configuration/python-agent-configuration/#proxy + export NEW_RELIC_PROXY_HOST="$https_proxy" + return 0 +} diff --git a/backend/tools/setup_env.sh b/backend/tools/setup_env.sh new file mode 100644 index 0000000000..5839f92de0 --- /dev/null +++ b/backend/tools/setup_env.sh @@ -0,0 +1,21 @@ +source tools/setup_local_env.sh +source tools/setup_cgov_env.sh + +function setup_env { + if [[ -n "${ENV}" ]]; then + startup_log "LOCAL_ENV" "Environment set as: ${ENV}" + else + startup_log "LOCAL_ENV" "No environment variable ${ENV} is set!" + return -1 + fi; + + local result=0 + if [[ "${ENV}" == "LOCAL" || "${ENV}" == "TESTING" ]]; then + setup_local_env + result=$? + else + setup_cgov_env + result=$? + fi; + return $result +} diff --git a/backend/tools/setup_local_env.sh b/backend/tools/setup_local_env.sh new file mode 100644 index 0000000000..62b01affb0 --- /dev/null +++ b/backend/tools/setup_local_env.sh @@ -0,0 +1,18 @@ +source tools/util_startup.sh + +function setup_local_env { + + if [[ "${ENV}" == "LOCAL" || "${ENV}" == "TESTING" ]]; then + startup_log "LOCAL_ENV" "We are in a local envirnoment." + export AWS_PRIVATE_ACCESS_KEY_ID=longtest + export AWS_PRIVATE_SECRET_ACCESS_KEY=longtest + export AWS_S3_PRIVATE_ENDPOINT="http://minio:9000" + mc alias set myminio "${AWS_S3_PRIVATE_ENDPOINT}" minioadmin minioadmin + # Do nothing if the bucket already exists. + # https://min.io/docs/minio/linux/reference/minio-mc/mc-mb.html + mc mb --ignore-existing myminio/gsa-fac-private-s3 + mc mb --ignore-existing myminio/fac-census-to-gsafac-s3 + mc admin user svcacct add --access-key="${AWS_PRIVATE_ACCESS_KEY_ID}" --secret-key="${AWS_PRIVATE_SECRET_ACCESS_KEY}" myminio minioadmin + return 0 + fi; +} diff --git a/backend/tools/util_startup.sh b/backend/tools/util_startup.sh new file mode 100644 index 0000000000..ca54205ab9 --- /dev/null +++ b/backend/tools/util_startup.sh @@ -0,0 +1,16 @@ +function startup_log { + local tag="$1" + local msg="$2" + echo STARTUP $tag $msg +} + +# gonogo +# helps determine if we should continue or quit +function gonogo { + if [ $? -eq 0 ]; then + startup_log "STARTUP_CHECK" "$1 PASS" + else + startup_log "STARTUP_CHECK" "$1 FAIL" + exit -1 + fi +}