-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathfinishing_fresh.py
1806 lines (1660 loc) · 93.5 KB
/
finishing_fresh.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
import ap as ap
from ap import AddMessage as write
from ap import AddFieldDelimiters as fieldDelim
import os
import math
import datetime as dt
import pandas as pd
import numpy as np
import sys
# Place Constants Here
def grand_entrance(tds_db, boolean_dict):
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #
# Title Formatting and Workspace Setup #
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #
# Sanitizing GDB name
tds_split = tds_db.split("\\")
tds_split.pop()
rresults = tds_split
gdb_file = tds_split.pop()
name_list = gdb_file.split(".")
name_list.pop()
gdb_name = name_list[0]
#rresults.pop()
rresults = "\\".join(rresults)
# Tool title with GDB name formatting
write("")
slines = u"______________________________________"
sspaces = u" "
exl = ""
exs = ""
exgl = "" # odd left dominant
exgr = ""
range_len = 38 - len(gdb_name)
if range_len > 0:
if (range_len % 2) == 0:
rn0 = range_len/2
for i in range(int(rn0)):
exgl += " "
exgr += " "
else:
rn1 = int(float(range_len)/2)
for i in range(rn1):
exgl += " "
rn2 = rn1 + 1
for i in range(int(rn2)):
exgr += " "
if len(gdb_name) > 38:
extra = len(gdb_name) - 38
for i in range(extra):
exl += "_"
exs += " "
# Report of requested tasks
write(u" _____{0}{3}__\n / \\ {1}{4} \\\n| | {1}{4} |\n \\_ | {1}{4} |\n | {5}{2}{6} |\n | {1}{4} |".format(slines, sspaces, gdb_name, exl, exs, exgl, exgr))
if boolean_dict['secret']:
write(u" | By order of the Liberator {0}|".format(exs))
write(u" | The leader of the free people {0}|".format(exs))
write(u" | _______ _ {0}|".format(exs))
write(u" | / ___/ / ___ _(_)_____ _ ___ ____ {0}|".format(exs))
write(u" | / /__/ _ \/ _ `/ / __/ ' \/ _ `/ _ \ {0}|".format(exs))
write(u" | \___/_//_/\_,_/_/_/ /_/_/_/\_,_/_//_/ {0}|".format(exs))
write(u" | ___ __ {0}|".format(exs))
write(u" | / _ )___ ____/ /__ {0}|".format(exs))
write(u" | / _ / _ \/ __/ '_/ {0}|".format(exs))
write(u" | /____/\___/\__/_/\_\ {0}|".format(exs))
write(u" | {0} {1}|".format(sspaces, exs))
write(u" | The following Finishing tasks {0}|".format(exs))
write(u" | shall be executed {0}|".format(exs))
write(u" | {0} {1}|".format(sspaces, exs))
write(u" | {0} {1}|".format(sspaces, exs))
write(u" | ====== Processes Initialized ====== {0}|".format(exs))
write(u" | {0} {1}|".format(sspaces, exs))
if boolean_dict['repair']:
write(u" | - Repair All NULL Geometries {0}|".format(exs))
if boolean_dict['fcode']:
write(u" | - Populate F_Codes {0}|".format(exs))
if boolean_dict['defaults']:
write(u" | - Calculate Default Values {0}|".format(exs))
if boolean_dict['metrics']:
write(u" | - Calculate Metrics {0}|".format(exs))
if boolean_dict['ufi']:
write(u" | - Update UFI Values {0}|".format(exs))
if boolean_dict['hydro'] or boolean_dict['trans'] or boolean_dict['util']:
write(u" | - Integrate and Repair: {0}|".format(exs))
if boolean_dict['large']:
write(u" | ~ Large Dataset ~ {0}|".format(exs))
if boolean_dict['hydro']:
write(u" | Hydro {0}|".format(exs))
if boolean_dict['trans']:
write(u" | Trans {0}|".format(exs))
if boolean_dict['util']:
write(u" | Utilities {0}|".format(exs))
if boolean_dict['dups']:
write(u" | - Delete Identical Features {0}|".format(exs))
if boolean_dict['explode']:
write(u" | - Hypernova Burst Multipart Features {0}|".format(exs))
if boolean_dict['bridge']:
write(u" | - Default Bridge WID Updater {0}|".format(exs))
if boolean_dict['pylong']:
write(u" | - Default Pylon HGT Updater {0}|".format(exs))
if boolean_dict['building']:
write(u" | - Building in BUA Descaler {0}|".format(exs))
if boolean_dict['swap']:
write(u" | - CACI Swap Scale and CTUU {0}|".format(exs))
if boolean_dict['fcount']:
write(u" | - Generate Feature Report {0}|".format(exs))
if boolean_dict['vsource']:
write(u" | - Generate Source Report {0}|".format(exs))
write(u" | {0} _ |\n | {0} __(.)< |\n | {0}~~~\\___)~~~ |".format(exs))
write(u" | {0}{2}___|___\n | /{1}{3} /\n \\_/_{0}{2}_____/".format(slines, sspaces, exl, exs))
write("\n")
def check_out_defense(bool_dict):
if bool_dict['defaults'] or bool_dict['metrics'] or bool_dict['explode']:
no_defense = False
class LicenseError(Exception):
pass
if ap.CheckExtension("defense") == "Available":
write("\n*Checking out Defense Mapping Extension*\n")
ap.CheckOutExtension("defense")
else:
write("Defense Mapping license is unavailable")
no_defense = True
raise LicenseError
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #
# Data Maintenance Tools Category #
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #
''''''''' Repair All NULL Geometry '''''''''
# Repairs all NULL geometries in each feature class
#### rewrite with intersect geometry method to remove duplicate vertices and kickbacks
def process_repair():
tool_name = 'Repair All NULL Geometry'
write("\n--- {0} ---\n".format(tool_name))
for fc in featureclass:
try:
write("Repairing NULL geometries in {0}".format(fc))
ap.RepairGeometry_management(fc, "DELETE_NULL")
except ap.ExecuteError:
# if the code failed for the current fc, check the error
error_count += 1
write("\n***Failed to run {0}.***\n".format(tool_name))
write("Error Report:")
write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
write(ap.GetMessages())
write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
write("\nPlease rerun the tool, but uncheck the {0} tool option. Either the feature class is too big or something else has gone wrong. Large data handling for tools other than Integration will be coming in a future update.".format(tool_name))
write("Exiting tool.\n")
sys.exit(0)
''''''''' Populate F_Codes '''''''''
# John Jackson's Fcode tool refactored from standalone with included dictionaries instead of imported
def process_fcode():
tool_name = 'Populate F_Codes'
write("\n--- {0} ---\n".format(tool_name))
for fc in featureclass:
try:
try:
fields = ['f_code', 'fcsubtype']
write("Updating {0} Feature F_Codes".format(fc))
with ap.da.UpdateCursor(fc, fields) as fcursor:
for row in fcursor: # Checks if F_Code matches the FCSubtype value. Updates F_Code if they don't match assuming proper subtype
if row[0] != str(sub2fcode_dict[row[1]]):
row[0] = str(sub2fcode_dict[row[1]])
fcursor.updateRow(row)
except:
write("{0} does not contain F_codes.".format(fc))
except ap.ExecuteError:
# if the code failed for the current fc, check the error
error_count += 1
write("\n***Failed to run {0}.***\n".format(tool_name))
write("Error Report:")
write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
write(ap.GetMessages())
write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
write("\nPlease rerun the tool, but uncheck the {0} tool option. Either the feature class is too big or something else has gone wrong. Large data handling for tools other than Integration will be coming in a future update.".format(tool_name))
write("Exiting tool.\n")
sys.exit(0)
''''''''' Calculate Default Values '''''''''
# Calculate default values for NULL attributes
# All or nothing. Functions on datasets not individual feature classes
#### rewrite using domains and coded values thru cursors
def process_defaults():
tool_name = 'Calculate Default Values'
write("\n--- {0} ---\n".format(tool_name))
write("Locating NULL fields")
try:
write("Assigning domain defaults from coded values...")
ap.CalculateDefaultValues_defense(ap.env.workspace)
write("Complete")
except ap.ExecuteError:
# if the code failed for the current fc, check the error
error_count += 1
write("\n***Failed to run {0}.***\n".format(tool_name))
write("Error Report:")
write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
write(ap.GetMessages())
write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
write("\nPlease rerun the tool, but uncheck the {0} tool option. Either the feature class is too big or something else has gone wrong. Large data handling for tools other than Integration will be coming in a future update.".format(tool_name))
write("Exiting tool.\n")
sys.exit(0)
''''''''' Calculate Metrics '''''''''
# Calculates the metric values of the specified fields
#### Defense mapping version takes too long and crashes. just rewrite with manual calculations
def process_metrics():
tool_name = 'Calculate Metrics'
write("\n--- {0} ---\n".format(tool_name))
metric_type = 'LENGTH;WIDTH;AREA;ANGLE_OF_ORIENTATION'
for fc in featureclass:
try:
write("Calculating AOO, ARA, LZN, and WID for {0}".format(fc))
ap.CalculateMetrics_defense(fc, metric_type, "LZN", "WID", "ARA", "#", "#", "#")
except ap.ExecuteError:
# if the code failed for the current fc, check the error
error_count += 1
write("\n***Failed to run {0}.***\n".format(tool_name))
write("Error Report:")
write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
write(ap.GetMessages())
write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
write("\nPlease rerun the tool, but uncheck the {0} tool option. Either the feature class is too big or something else has gone wrong. Large data handling for tools other than Integration will be coming in a future update.".format(tool_name))
write("Exiting tool.\n")
sys.exit(0)
''''''''' Update UFI Values ''''''''' ##### add functionality to only update blank fields
# Iterate through all features and update the ufi field with uuid4 random values
def process_ufi():
tool_name = 'Update UFI Values'
write("\n--- {0} ---\n".format(tool_name))
ufi_count = 0
# Explicit is better than implicit
# Lambda function works better than "if not fieldname:", which can falsely catch 0.
populated = lambda x: x is not None and str(x).strip() != '' # Function that returns boolean of if input field is populated or empty
for fc in featureclass:
try:
with ap.da.SearchCursor(fc, 'ufi') as scursor:
values = [row[0] for row in scursor]
with ap.da.UpdateCursor(fc, 'ufi') as ucursor:
for row in ucursor:
if not populated(row[0]):
row[0] = str(uuid.uuid4())
ufi_count += 1
elif values.count(row[0]) > 1:
row[0] = str(uuid.uuid4())
ufi_count += 1
ucursor.updateRow(row)
write("Updated UFIs in {0}".format(fc))
except ap.ExecuteError:
# if the code failed for the current fc, check the error
error_count += 1
write("\n***Failed to run {0}.***\n".format(tool_name))
write("Error Report:")
write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
write(ap.GetMessages())
write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
write("\nPlease rerun the tool, but uncheck the {0} tool option. Either the feature class is too big or something else has gone wrong. Large data handling for tools other than Integration will be coming in a future update.".format(tool_name))
write("Exiting tool.\n")
sys.exit(0)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #
# Feature Specific Tools Category #
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #
''''''''' Integrate and Repair '''''''''
# User choice to Integrate and Repair Hydrography curves, TransportationGround curves, or Utility points and surfaces to curves
def process_hydro():
tool_name = 'Hydrography Curves'
fc1 = 'HydrographyCrv'
fc2 = 'HydrographySrf'
if not ap.Exists(fc1):
write("**HydrographyCrv feature class not found\n To run Integrate, copy an empty Hydro curve feature class from a blank schema into this dataset and run the tool again.")
return 0
if not ap.Exists(fc2):
write("**HydrographySrf feature class not found\n To run Integrate, copy an empty Hydro surface feature class from a blank schema into this dataset and run the tool again.")
return 0
write("- - - - - - - - - - - - - - - - - - - - - - ")
write(" ~ {0} ~ ".format(tool_name))
write("Making {0} and {1} feature layers".format(fc1, fc2))
ap.MakeFeatureLayer_management(fc1, "hc")
ap.MakeFeatureLayer_management(fc2, "hs")
ap.SelectLayerByAttribute_management("hc", "NEW_SELECTION", "zi026_ctuu >= 50000")
ap.SelectLayerByAttribute_management("hs", "NEW_SELECTION", "zi026_ctuu >= 50000")
ap.MakeFeatureLayer_management("hc", "hc_scale")
srf_count = int(ap.GetCount_management("hs").getOutput(0))
if srf_count > 0:
ap.MakeFeatureLayer_management("hs", "hs_scale")
write("Repairing {0} lines before Integration".format(fc1))
ap.RepairGeometry_management("hc_scale", "DELETE_NULL")
hfeat_count = 0
if not large:
try:
feat_count = int(ap.GetCount_management("hc_scale").getOutput(0))
write("Integrating {0} {1} features and \n {2} {3} features...".format(feat_count, fc1, srf_count, fc2))
if srf_count > 0:
ap.Integrate_management("hc_scale 1;hs_scale 2", "0.06 Meters")
ap.Integrate_management("hc_scale 1;hs_scale 2", "0.03 Meters")
else:
ap.Integrate_management('hc_scale', "0.06 Meters")
ap.Integrate_management('hc_scale', "0.03 Meters")
hfeat_count = feat_count + srf_count
except ap.ExecuteError:
# if the code failed for the current fc, check the error
error_count += 1
write("\n***Failed to run {0}.***\n".format(tool_name))
write("Error Report:")
write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
write(ap.GetMessages())
write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
write("\nPlease rerun the tool, but make sure the 'Process Large Feature Class' option is checked under {0}.".format(tool_name))
write("Exiting tool.\n")
sys.exit(0)
if large:
try:
#Create Fishnet
write("Processing large feature class. Partitioning data in chunks to process.")
mem_fc = "in_memory\\{0}_grid".format(fc1)
rectangle = "in_memory\\rectangle"
write("Defining partition envelope")
ap.MinimumBoundingGeometry_management(fc1, rectangle, "RECTANGLE_BY_AREA", "ALL", "", "")
with ap.da.SearchCursor(rectangle, ['SHAPE@']) as scursor:
for row in scursor:
shape = row[0]
origin_coord = '{0} {1}'.format(shape.extent.XMin, shape.extent.YMin)
y_axis_coord = '{0} {1}'.format(shape.extent.XMin, shape.extent.YMax)
corner_coord = '{0} {1}'.format(shape.extent.XMax, shape.extent.YMax)
write("Constructing fishnet")
ap.CreateFishnet_management(mem_fc, origin_coord, y_axis_coord, "", "", "2", "2", corner_coord, "NO_LABELS", fc1, "POLYGON")
#ap.CreateFishnet_management(out_feature_class="in_memory/hydro_grid", origin_coord="30 19.9999999997", y_axis_coord="30 29.9999999997", cell_width="", cell_height="", number_rows="2", number_columns="2", corner_coord="36.00000000003 24", labels="NO_LABELS", template="C:/Projects/njcagle/finishing/=========Leidos_247=========/J05B/TDSv7_1_J05B_JANUS_DO247_sub1_pre.gdb/TDS/HydrographyCrv", geometry_type="POLYGON")
ap.MakeFeatureLayer_management(mem_fc, "hgrid")
with ap.da.SearchCursor("hgrid", ['OID@']) as scursor:
for row in scursor:
select = "OID = {}".format(row[0])
ap.SelectLayerByAttribute_management("hgrid", "NEW_SELECTION", select)
if srf_count > 0:
ap.SelectLayerByLocation_management("hs_scale", "INTERSECT", "hgrid","","NEW_SELECTION")
ssrf_count = int(ap.GetCount_management("hs_scale").getOutput(0))
else:
ssrf_count = 0
ap.SelectLayerByLocation_management("hc_scale", "INTERSECT", "hgrid","","NEW_SELECTION")
feat_count = int(ap.GetCount_management("hc_scale").getOutput(0))
write("Integrating {0} {1} features and\n {2} {3} features in partition {4}...".format(feat_count, fc1, ssrf_count, fc2, row[0]))
hfeat_count = hfeat_count + feat_count + ssrf_count
if ssrf_count > 0:
ap.Integrate_management("hc_scale 1;hs_scale 2", "0.06 Meters")
ap.Integrate_management("hc_scale 1;hs_scale 2", "0.03 Meters")
elif feat_count > 0:
ap.Integrate_management('hc_scale', "0.06 Meters")
ap.Integrate_management('hc_scale', "0.03 Meters")
else:
continue
write("Freeing partition memory")
ap.Delete_management("in_memory")
ap.Delete_management("hgrid")
except ap.ExecuteError:
# if the code failed for the current fc, check the error
error_count += 1
write("\n***Failed to run {0}.***".format(tool_name))
write(ap.GetMessages())
write("\nData too dense to be run in partitions. Integrating {0} in this database exceeds our current equipment limitations.".format(fc1))
write("To continue running tool, uncheck {0} before running again.".format(tool_name))
write("Exiting tool.\n")
sys.exit(0)
write("Repairing {0} and {1} features after Integration".format(fc1, fc2))
ap.RepairGeometry_management("hc_scale", "DELETE_NULL")
ap.RepairGeometry_management("hs_scale", "DELETE_NULL")
write("Clearing process cache")
ap.Delete_management("hc")
ap.Delete_management("hc_scale")
ap.Delete_management("hs")
ap.Delete_management("hs_scale")
if trans or util:
write("- - - - - - - - - - - - - - - - - - - - - -\n")
else:
write("- - - - - - - - - - - - - - - - - - - - - -")
def process_trans():
tool_name = 'Transportation Points and Curves'
fc1 = 'TransportationGroundPnt'
fc2 = 'TransportationGroundCrv'
if not ap.Exists(fc1):
fc1 = fc2
if not ap.Exists(fc2):
write("**TransportationGroundCrv feature class not found\n To run Integrate, copy an empty Trans curve feature class from a blank schema into this dataset and run the tool again.")
break
write("- - - - - - - - - - - - - - - - - - - - - - ")
write(" ~ {0} ~ ".format(tool_name))
write("Making {0} and {1} feature layers".format(fc1, fc2))
ap.MakeFeatureLayer_management(fc1, "tgp")
ap.MakeFeatureLayer_management(fc2, "tgc")
ap.SelectLayerByAttribute_management("tgp", "NEW_SELECTION", "f_code = 'AQ065' AND zi026_ctuu >= 50000")
cul_count = int(ap.GetCount_management("tgp").getOutput(0))
ap.SelectLayerByAttribute_management("tgc", "NEW_SELECTION", "zi026_ctuu >= 50000")
if cul_count > 0:
ap.MakeFeatureLayer_management("tgp", "tgp_scale")
ap.MakeFeatureLayer_management("tgc", "tgc_scale")
write("Repairing {0} lines before Integration".format(fc2))
ap.RepairGeometry_management("tgc_scale", "DELETE_NULL")
tfeat_count = 0
if not large:
try:
feat_count = int(ap.GetCount_management("tgc_scale").getOutput(0))
write("Integrating {0} {1} features and\n {2} Culvert points...".format(feat_count, fc2, cul_count))
if cul_count > 0:
ap.Integrate_management("tgp_scale 2;tgc_scale 1", "0.06 Meters")
ap.Integrate_management("tgp_scale 2;tgc_scale 1", "0.03 Meters")
else:
ap.Integrate_management("tgc_scale", "0.06 Meters")
ap.Integrate_management("tgc_scale", "0.03 Meters")
tfeat_count = feat_count + cul_count
except ap.ExecuteError:
# if the code failed for the current fc, check the error
error_count += 1
write("\n***Failed to run {0}.***\n".format(tool_name))
write("Error Report:")
write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
write(ap.GetMessages())
write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
write("\nPlease rerun the tool, but make sure the 'Process Large Feature Class' option is checked under {0}.".format(tool_name))
write("Exiting tool.\n")
sys.exit(0)
if large:
try:
#Create Fishnet
write("Processing large feature class. Partitioning data in chunks to process.")
mem_fc = "in_memory\\{0}_grid".format(fc2)
rectangle = "in_memory\\rectangle"
write("Defining partition envelope")
ap.MinimumBoundingGeometry_management(fc2, rectangle, "RECTANGLE_BY_AREA", "ALL", "", "")
with ap.da.SearchCursor(rectangle, ['SHAPE@']) as scursor:
for row in scursor:
shape = row[0]
origin_coord = '{0} {1}'.format(shape.extent.XMin, shape.extent.YMin)
y_axis_coord = '{0} {1}'.format(shape.extent.XMin, shape.extent.YMax)
corner_coord = '{0} {1}'.format(shape.extent.XMax, shape.extent.YMax)
write("Constructing fishnet")
ap.CreateFishnet_management(mem_fc, origin_coord, y_axis_coord, "", "", "2", "2", corner_coord, "NO_LABELS", fc2, "POLYGON")
#ap.CreateFishnet_management(out_feature_class="in_memory/hydro_grid", origin_coord="30 19.9999999997", y_axis_coord="30 29.9999999997", cell_width="", cell_height="", number_rows="2", number_columns="2", corner_coord="36.00000000003 24", labels="NO_LABELS", template="C:/Projects/njcagle/finishing/=========Leidos_247=========/J05B/TDSv7_1_J05B_JANUS_DO247_sub1_pre.gdb/TDS/HydrographyCrv", geometry_type="POLYGON")
ap.MakeFeatureLayer_management(mem_fc, "tgrid")
with ap.da.SearchCursor("tgrid", ['OID@']) as scursor:
for row in scursor:
select = "OID = {}".format(row[0])
ap.SelectLayerByAttribute_management("tgrid", "NEW_SELECTION", select)
if cul_count > 0:
ap.SelectLayerByLocation_management("tgp_scale", "INTERSECT", "tgrid","","NEW_SELECTION")
pcul_count = int(ap.GetCount_management("tgp_scale").getOutput(0))
else:
pcul_count = 0
ap.SelectLayerByLocation_management("tgc_scale", "INTERSECT", "tgrid","","NEW_SELECTION")
feat_count = int(ap.GetCount_management("tgc_scale").getOutput(0))
write("Integrating {0} {1} features and\n {2} Culvert points in partition {3}...".format(feat_count, fc2, pcul_count, row[0]))
tfeat_count = tfeat_count + feat_count + pcul_count
if pcul_count > 0:
ap.Integrate_management("tgp_scale 2;tgc_scale 1", "0.06 Meters")
ap.Integrate_management("tgp_scale 2;tgc_scale 1", "0.03 Meters")
elif feat_count > 0:
ap.Integrate_management("tgc_scale", "0.06 Meters")
ap.Integrate_management("tgc_scale", "0.03 Meters")
else:
continue
write("Freeing partition memory")
ap.Delete_management("in_memory")
ap.Delete_management("tgrid")
except ap.ExecuteError:
# if the code failed for the current fc, check the error
error_count += 1
write("\n***Failed to run {0}.***".format(tool_name))
write(ap.GetMessages())
write("\nData too dense to be run in partitions. Integrating {0} in this database exceeds our current equipment limitations.".format(fc2))
write("To continue running tool, uncheck {0} before running again.".format(tool_name))
write("Exiting tool.\n")
sys.exit(0)
write("Repairing {0} lines after Integration".format(fc2))
ap.RepairGeometry_management("tgc_scale", "DELETE_NULL")
write("Clearing process cache")
ap.Delete_management("tgp")
ap.Delete_management("tgc")
ap.Delete_management("tgp_scale")
ap.Delete_management("tgc_scale")
if util:
write("- - - - - - - - - - - - - - - - - - - - - -\n")
else:
write("- - - - - - - - - - - - - - - - - - - - - -")
def process_util():
tool_name = 'Utility Points, Lines, and Surfaces'
fc1 = 'UtilityInfrastructurePnt'
fc2 = 'UtilityInfrastructureCrv'
fc3 = 'UtilityInfrastructureSrf'
if not ap.Exists(fc1):
write("**UtilityInfrastructurePnt feature class not found\n To run Integrate, copy an empty Utility point feature class from a blank schema into this dataset and run the tool again.")
return 0
if not ap.Exists(fc2):
write("**UtilityInfrastructureCrv feature class not found\n To run Integrate, copy an empty Utility curve feature class from a blank schema into this dataset and run the tool again.")
return 0
if not ap.Exists(fc3):
write("**UtilityInfrastructureSrf feature class not found\n To run Integrate, copy an empty Utility surface feature class from a blank schema into this dataset and run the tool again.")
return 0
write("- - - - - - - - - - - - - - - - - - - - - - ")
write(" ~ {0} ~ ".format(tool_name))
write("Making {0}, {1}, and {2} feature layers".format(fc1, fc2, fc3))
ap.MakeFeatureLayer_management(fc1, "up")
ap.MakeFeatureLayer_management(fc2, "uc")
ap.MakeFeatureLayer_management(fc3, "us")
ap.SelectLayerByAttribute_management("up", "NEW_SELECTION", "zi026_ctuu >= 50000")
ap.SelectLayerByAttribute_management("uc", "NEW_SELECTION", "zi026_ctuu >= 50000")
ap.SelectLayerByAttribute_management("us", "NEW_SELECTION", "zi026_ctuu >= 50000")
ap.MakeFeatureLayer_management("up", "up_scale")
ap.MakeFeatureLayer_management("uc", "uc_scale")
ap.MakeFeatureLayer_management("us", "us_scale")
write("Repairing {0} lines and {1} polygons before Integration".format(fc2, fc3))
ap.RepairGeometry_management("uc_scale", "DELETE_NULL")
ap.RepairGeometry_management("us_scale", "DELETE_NULL")
ufeat_count = 0
if not large:
try:
feat_count1 = int(ap.GetCount_management("up_scale").getOutput(0))
feat_count2 = int(ap.GetCount_management("uc_scale").getOutput(0))
feat_count3 = int(ap.GetCount_management("us_scale").getOutput(0))
write("Integrating {0} {1} features,\n {2} {3} features, and\n {4} {5} features...".format(feat_count1, fc1, feat_count2, fc2, feat_count3, fc3))
ap.Integrate_management("up_scale 2;uc_scale 1;us_scale 3", "0.06 Meters")
ap.Integrate_management("up_scale 2;uc_scale 1;us_scale 3", "0.03 Meters")
ufeat_count = feat_count1 + feat_count2 + feat_count3
except ap.ExecuteError:
# if the code failed for the current fc, check the error
error_count += 1
write("\n***Failed to run {0}.***\n".format(tool_name))
write("Error Report:")
write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
write(ap.GetMessages())
write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
write("\nPlease rerun the tool, but make sure the 'Process Large Feature Class' option is checked under {0}.".format(tool_name))
write("Exiting tool.\n")
sys.exit(0)
if large:
try:
#Create Fishnet
write("Processing large feature class. Partitioning data in chunks to process.")
mem_fc = "in_memory\\{0}_grid".format(fc2)
rectangle = "in_memory\\rectangle"
write("Defining partition envelope")
ap.MinimumBoundingGeometry_management(fc2, rectangle, "RECTANGLE_BY_AREA", "ALL", "", "")
with ap.da.SearchCursor(rectangle, ['SHAPE@']) as scursor:
for row in scursor:
shape = row[0]
origin_coord = '{0} {1}'.format(shape.extent.XMin, shape.extent.YMin)
y_axis_coord = '{0} {1}'.format(shape.extent.XMin, shape.extent.YMax)
corner_coord = '{0} {1}'.format(shape.extent.XMax, shape.extent.YMax)
write("Constructing fishnet")
ap.CreateFishnet_management(mem_fc, origin_coord, y_axis_coord, "", "", "2", "2", corner_coord, "NO_LABELS", fc2, "POLYGON")
#ap.CreateFishnet_management(out_feature_class="in_memory/hydro_grid", origin_coord="30 19.9999999997", y_axis_coord="30 29.9999999997", cell_width="", cell_height="", number_rows="2", number_columns="2", corner_coord="36.00000000003 24", labels="NO_LABELS", template="C:/Projects/njcagle/finishing/=========Leidos_247=========/J05B/TDSv7_1_J05B_JANUS_DO247_sub1_pre.gdb/TDS/HydrographyCrv", geometry_type="POLYGON")
ap.MakeFeatureLayer_management(mem_fc, "ugrid")
with ap.da.SearchCursor("ugrid", ['OID@']) as scursor:
##### Add check for any 0 count features selected in each loop that might default to all features instead of 0 in current partition
for row in scursor:
select = "OID = {}".format(row[0])
ap.SelectLayerByAttribute_management("ugrid", "NEW_SELECTION", select)
ap.SelectLayerByLocation_management("up_scale", "INTERSECT", "ugrid", "", "NEW_SELECTION")
ap.SelectLayerByLocation_management("uc_scale", "INTERSECT", "ugrid", "", "NEW_SELECTION")
ap.SelectLayerByLocation_management("us_scale", "INTERSECT", "ugrid", "", "NEW_SELECTION")
feat_count1 = int(ap.GetCount_management("up_scale").getOutput(0))
feat_count2 = int(ap.GetCount_management("uc_scale").getOutput(0))
feat_count3 = int(ap.GetCount_management("us_scale").getOutput(0))
ufeat_count = ufeat_count + feat_count1 + feat_count2 + feat_count3
write("Integrating {0} {1} features,\n {2} {3} features, and\n {4} {5} features in partition {6}...".format(feat_count1, fc1, feat_count2, fc2, feat_count3, fc3, row[0]))
ap.Integrate_management("up_scale 2;uc_scale 1;us_scale 3", "0.06 Meters")
ap.Integrate_management("up_scale 2;uc_scale 1;us_scale 3", "0.03 Meters")
write("Freeing partition memory")
ap.Delete_management("in_memory")
ap.Delete_management("ugrid")
except ap.ExecuteError:
# if the code failed for the current fc, check the error
error_count += 1
write("\n***Failed to run {0}.***".format(tool_name))
write(ap.GetMessages())
write("\nData too dense to be run in partitions. Integrating Utilities in this database exceeds our current equipment limitations.")
write("To continue running tool, uncheck {0} before running again.".format(tool_name))
write("Exiting tool.\n")
sys.exit(0)
write("Repairing {0} lines and {1} polygons after Integration".format(fc2, fc3))
ap.RepairGeometry_management("uc_scale", "DELETE_NULL")
ap.RepairGeometry_management("us_scale", "DELETE_NULL")
write("Clearing process cache")
ap.Delete_management("up")
ap.Delete_management("uc")
ap.Delete_management("us")
ap.Delete_management("up_scale")
ap.Delete_management("uc_scale")
ap.Delete_management("us_scale")
write("- - - - - - - - - - - - - - - - - - - - - -")
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #
# Geometry Correction Tools Category #
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #
''''''''' Delete Identical Features '''''''''
# Checks for features with identical geometry and PSG attribution and removes them
#### Test rewritten find identical code and replace existing
def process_dups():
tool_name = 'Delete Identical Features'
write("\n--- {0} ---\n".format(tool_name))
# Set the output directory for the FindIdentical tool
out_table = os.path.dirname(ap.env.workspace)
# Precreate the path for the output dBASE table
path = out_table.split(".")
path.pop()
table_loc = path[0] + str(".dbf")
write("Creating temporary output file: {0}".format(table_loc))
dup_count = 0
# ##### check Shape vs shape@ and add xy-tolerance to find and delete identical
# #search cursor with shape@ and oid@ check each shape against the others. if they match, store the oid in list.
# #new cursor. check matching shapes. if the other fields match, delete the one with the higher oid value
# for fc in featureclass:
# try:
# prev_check = []
# dup_oids = []
# lap_fields = ['SHAPE@XY', 'OID@']
#
# with ap.da.SearchCursor(fc, lap_fields) as scursor:
# with ap.da.SearchCursor(fc, lap_fields) as tcursor:
# for row in scursor:
# icursor.insertRow(row)
# atuple = ptGeometry.angleAndDistanceTo(ptGeometry2, "GEODESIC")
# atuple == (angle in degrees, distance in meters)
# Loop feature classes and FindIdentical to get a count, then delete any found
# ARA and other metric fields included
for fc in featureclass:
try:
dick = fc_fields_og[fc]
ap.FindIdentical_management(fc, out_table, dick, "", "", output_record_option="ONLY_DUPLICATES")
rows = int(ap.management.GetCount(table_loc).getOutput(0))
write("Found " + str(rows) + " duplicate " + str(fc) + " features.")
if rows > 0:
ap.DeleteIdentical_management(fc, fc_fields_og[fc])
write("Deleted " + str(rows) + " duplicate " + str(fc) + " features.")
dup_count += rows
except ap.ExecuteError:
# if the code failed for the current fc, check the error
error_count += 1
os.remove(table_loc)
os.remove(table_loc + str(".xml"))
os.remove(path[0] + str(".cpg"))
os.remove(path[0] + str(".IN_FID.atx"))
ap.RefreshCatalog(out_table)
write("\n***Failed to run {0}.***\n".format(tool_name))
write("Error Report:")
write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
write(ap.GetMessages())
write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
write("\nPlease rerun the tool, but uncheck the {0} tool option. Either the feature class is too big or something else has gone wrong. Large data handling for tools other than Integration will be coming in a future update.".format(tool_name))
write("Exiting tool.\n")
sys.exit(0)
# Clean up before next process
os.remove(table_loc)
os.remove(table_loc + str(".xml"))
os.remove(path[0] + str(".cpg"))
os.remove(path[0] + str(".IN_FID.atx"))
ap.RefreshCatalog(out_table)
''''''''' Hypernova Burst Multipart Features '''''''''
# Explodes multipart features for an entire dataset
def process_explode():
tool_name = 'Hypernova Burst Multipart Features'
write("\n--- {0} ---\n".format(tool_name))
##### Multipart Search #####
fc_multi = {} # Create empty dictionary to house lists of mulitpart features and their feature classes
fc_multi_list = []
total_multi = 0
total_complex = 0
for fc in featureclass:
try:
write("Searching for multiparts in {0}".format(fc))
multipart = False # Assume the feature class doesn't have multiparts
with ap.da.SearchCursor(fc, ['OID@', 'SHAPE@']) as scursor:
complex = 0 # Counts complex single part features
for row in scursor: # For each feature in the fc
shape = row[1] # Get SHAPE@ token to extract properties
if shape is None: # Checks for NULL geometries
write(" *** Found a feature with NULL geometry. Be sure Repair Geometry has been run. *** ")
continue
elif shape.isMultipart is True: # Does the feature have the isMultipart flag
shape_type = str(shape.type) # Gets the geometry type of the feature
if shape_type == 'polygon': # If the feature is a polygon, it may be a complex single part feature with interior rings
if shape.partCount > 1: # If the number of geometric parts is more than one, then it is a true multipart feature
if multipart is False: # And if that multipart feature is the first in the fc
fc_multi[fc] = [row[0]] # Create a dictionary key of the feature class with a value of the first mutlipart oid in a list
multipart = True # Mark the current fc as having multipart features and that the initial feature dictionary has been created
elif multipart is True: # If a multipart feature has already been found and the initial dictionary key is set up
fc_multi[fc].append(row[0]) # Append the new multipart feature oid to the value list for the current feature class key in the dictionary
continue # Moves on to the next feature row in the search loop
else: # If the part count is not greater than 1, then it is a complex single part feature with interior rings
complex += 1
continue # Moves on to the next feature row in the search loop
else: # Non-polygon feature geometries do not have the isMultipart flaw since they have fewer dimensions. Simply proceed as normal
if multipart is False: # And if that multipart feature is the first in the fc
fc_multi[fc] = [row[0]] # Create a dictionary key of the feature class with a value of the first mutlipart oid in a list
multipart = True # Mark the current fc as having multipart features and that the initial feature dictionary has been created
elif multipart is True: # If a multipart feature has already been found and the initial dictionary key is set up
fc_multi[fc].append(row[0]) # Append the new multipart feature oid to the value list for the current feature class key in the dictionary
if complex > 0:
total_complex += complex
write("{0} complex polygons found in {1}".format(complex, fc))
if multipart is True:
count = len(fc_multi[fc])
write("*** " + str(count) + " true multipart features found in " + str(fc) + " ***")
else:
write("No multiparts found")
except ap.ExecuteError:
# if the code failed for the current fc, check the error
error_count += 1
write("\n***Failed to run {0}.***\n".format(tool_name))
write("Error Report:")
write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
write(ap.GetMessages())
write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
write("\nPlease rerun the tool, but uncheck the {0} tool option. Either the feature class is too big or something else has gone wrong. Large data handling for tools other than Integration will be coming in a future update.".format(tool_name))
write("Exiting tool.\n")
sys.exit(0)
if multipart is True:
fc_multi_list.append(fc) # Creates iterable list of feature classes that have multipart features
write(" ")
if total_complex > 0:
write("The {0} complex polygons found are single part polygons with complex interior holes that are more likely to become multipart features.".format(total_complex))
write(" ")
if fc_multi_list: # Only runs if fc_multi_list is not empty
for fc in fc_multi_list:
count = len(fc_multi[fc])
total_multi += count
write("{0} multipart features found in {1}".format(count, fc))
write(" OIDs - {0}".format(fc_multi[fc]))
write(" ")
##### Isolate, Explode, Replace #####
in_class = "multi"
out_class = "single"
for fc in fc_multi_list:
try:
#sanitize feature class name from sde cz the sde always has to make things more difficult than they need to be...
fc_parts = fc.split(".")
if fc_parts[-1] in fc_fields:
fcr = fc_parts[-1]
else:
write("Error: Unknown Feature Class name found. If running on SDE, the aliasing may have changed. Contact SDE Admin.")
# Variables
oid_list = fc_multi[fc]
og_oid = "oidid"
fc_geom = ap.Describe(fc).shapeType
oid_field = ap.Describe(fc).OIDFieldName # Get the OID field name. Not necessary for every loop, but simple enough to just put here.
# Adds a field to the current fc that stores the original OID for identification after exploding.
ap.AddField_management(fc, og_oid, "double")
with ap.da.UpdateCursor(fc, [oid_field, og_oid]) as ucursor:
for row in ucursor:
if row[0] in oid_list:
row[1] = row[0]
ucursor.updateRow(row)
#ap.CalculateField_management(fc, og_oid, "!" + oid_field + "!", "PYTHON")
fieldnames = fc_fields[fcr]
fieldnames.insert(0, og_oid)
fieldnames.insert(0, oid_field)
oid_list_str = str(fc_multi[fc]) # Convert the list to a string and remove the []
oid_list_str = oid_list_str[1:-1]
query = "{0} in ({1})".format(oid_field, oid_list_str) # Formats the query from the above variables as: OBJECTID in (1, 2, 3)
# Create a new feature class to put the multipart features in to decrease processing time. fields based on original fc template
ap.CreateFeatureclass_management(ap.env.workspace, in_class, fc_geom, fc, "", "", ap.env.workspace)
# Add multipart features to new feature class based on OID
with ap.da.SearchCursor(fc, fieldnames, query) as scursor: # Search current fc using fc_fields with OID@ and "oidid" prepended as [0,1] respectively. Queries for only OIDs in the multipart oid_list.
with ap.da.InsertCursor(in_class, fieldnames) as icursor: # Insert cursor for the newly created feature class with the same fields as scursor
for row in scursor: # For each feature in the current fc
if row[0] in oid_list: # If the OID is in the oid_list of multipart features. Redundant since the scursor is queried for multipart OIDs, but meh
icursor.insertRow(row) # Insert that feature row into the temp feature class, in_class "multi"
write("{0} multipart progenitor cores collapsing.".format(fcr))
before_process = dt.now().time()
ap.MultipartToSinglepart_management(in_class, out_class) # New feature class output of just the converted single parts
after_process = dt.now().time()
date = dt.now().date()
datetime1 = dt.combine(date, after_process)
datetime2 = dt.combine(date, before_process)
time_delta = datetime1 - datetime2
time_elapsed = str(time_delta.total_seconds())
write("Hypernova burst detected after {0} seconds.".format(time_elapsed))
write("Removing original multipart features.")
# Deletes features in fc that have OIDs flagged as multiparts
with ap.da.UpdateCursor(fc, oid_field) as ucursor:
for row in ucursor:
if row[0] in oid_list:
ucursor.deleteRow()
write("Replacing with singlepart features.")
# Create search and insert cursor to insert new rows in fc from MultipartToSinglepart output out_class
with ap.da.SearchCursor(out_class, fieldnames) as scursor:
with ap.da.InsertCursor(fc, fieldnames) as icursor:
for row in scursor:
icursor.insertRow(row)
write("Populating NULL fields with defaults and updating UFIs for the new single part features.")
query2 = "{0} IS NOT NULL".format(og_oid)
ap.MakeFeatureLayer_management(fc, "curr_fc", query2)
ap.CalculateDefaultValues_defense("curr_fc")
write("NULL fields populated with default values")
with ap.da.UpdateCursor(fc, 'ufi', query2) as ucursor:
for row in ucursor:
row[0] = str(uuid.uuid4())
ucursor.updateRow(row)
ap.DeleteField_management(fc, og_oid)
write("UFI values updated")
write(" ")
except ap.ExecuteError:
# if the code failed for the current fc, check the error
error_count += 1
write("\n***Failed to run {0}.***\n".format(tool_name))
write("Error Report:")
write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
write(ap.GetMessages())
write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
write("\nPlease rerun the tool, but uncheck the {0} tool option. Either the feature class is too big or something else has gone wrong. Large data handling for tools other than Integration will be coming in a future update.".format(tool_name))
write("Exiting tool.\n")
sys.exit(0)
if fc_multi_list:
write("All multipart feature have acheived supernova!")
try:
ap.Delete_management(str(ap.env.workspace) + str("\\" + str(in_class)))
ap.Delete_management(str(ap.env.workspace) + str("\\" + str(out_class)))
ap.Delete_management("curr_fc")
except:
write("No in_class or out_class created. Or processing layers have already been cleaned up. Continuing...")
pass
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #
# Preprocessing Tools Category #
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #
''''''''' Default Bridge WID Updater '''''''''
# Checks for bridges with default WID (-999999) and updates them to match the underlying road or rail WID
while bridge:
bridge_err = False
no_def_bridge = False
bridge_count = 0
total_rem_b = 0
tool_name = 'Default Bridge WID Updater'
write("\n--- {0} ---\n".format(tool_name))
if not ap.Exists('TransportationGroundCrv'):
write("TransportationGroundCrv feature class missing./nCannot run Default Bridge WID Updater.")
bridge_err = True
break
break
while bridge: # Needs updating from management geoprocessing to cursors
if bridge_err:
break
# Pull width and geometry fields for bridges
fieldsB = ['WID', 'SHAPE@']
# Pull width and geometry fields for roads
fieldsR = ['ZI016_WD1', 'SHAPE@']
# Pull width and geometry fields for rails and sidetracks
fieldsRR = ['ZI017_GAW', 'SHAPE@']
# Convert the feature classes from the TDS into usable layers
write("Making feature layers...")
ap.MakeFeatureLayer_management("TransportationGroundCrv", "bridge_crv_lyr")
ap.MakeFeatureLayer_management("TransportationGroundCrv", "road_crv_lyr")
ap.MakeFeatureLayer_management("TransportationGroundCrv", "rail_crv_lyr")
write("Successfully made the feature layers!")
# Select road bridges with default (-999999) width
ap.SelectLayerByAttribute_management("bridge_crv_lyr", "NEW_SELECTION", "F_CODE IN ('AQ040', 'AQ130')")
ap.SelectLayerByAttribute_management("bridge_crv_lyr", "SUBSET_SELECTION", "WID = -999999 AND TRS = 13")
# Make road bridges with default (-999999) width into layer
ap.MakeFeatureLayer_management("bridge_crv_lyr", "fc_bridgeR")
# Select rail bridges with default (-999999) width
ap.SelectLayerByAttribute_management("bridge_crv_lyr", "NEW_SELECTION", "F_CODE IN ('AQ040', 'AQ130')")
ap.SelectLayerByAttribute_management("bridge_crv_lyr", "SUBSET_SELECTION", "WID = -999999 AND TRS = 12")
# Make rail bridges with default (-999999) width into layer
ap.MakeFeatureLayer_management("bridge_crv_lyr", "fc_bridgeRR")
# Select roads that share curve with the default width bridges above
ap.SelectLayerByAttribute_management("road_crv_lyr", "NEW_SELECTION", "F_CODE = 'AP030'")
ap.SelectLayerByLocation_management("road_crv_lyr", "SHARE_A_LINE_SEGMENT_WITH", "fc_bridgeR", "", "SUBSET_SELECTION")
# Make roads that share curve with default width bridges into layer
ap.MakeFeatureLayer_management("road_crv_lyr", "fc_road")
# Select rails that share curve with the default width bridges above
ap.SelectLayerByAttribute_management("rail_crv_lyr", "NEW_SELECTION", "F_CODE IN ('AN010', 'AN050')")
ap.SelectLayerByLocation_management("rail_crv_lyr", "SHARE_A_LINE_SEGMENT_WITH", "fc_bridgeRR", "", "SUBSET_SELECTION")
# Make rails that share curve with default width bridges into layer
ap.MakeFeatureLayer_management("rail_crv_lyr", "fc_rail")
# Gets a count of selected bridges, roads, and rails
fc_bridgeR_total = int(ap.management.GetCount("fc_bridgeR").getOutput(0))
fc_bridgeRR_total = int(ap.management.GetCount("fc_bridgeRR").getOutput(0))
total_bridges = fc_bridgeR_total + fc_bridgeRR_total
total_roads = int(ap.management.GetCount("fc_road").getOutput(0))
total_rails = int(ap.management.GetCount("fc_rail").getOutput(0))
# Error handling. If 0 bridges selected the script hangs.
if total_bridges == 0:
write("No default bridges found.")
no_def_bridge = True
break
# Error handling. If no roads or rails to select against, likely something will break.
if total_roads == 0 and total_rails == 0:
write("{0} default WID bridges found.".format(total_bridges))
write("No underlying roads or rails for default bridges. \n The default bridges are either not snapped or missing their underlying road or rail. \n Make sure the bridges have the correct TRS.")
bridge_err = True
break
# Announces the total default bridges found.
write("{0} default WID bridges found.".format(total_bridges))
# Start an edit session. Must provide the workspace.
edit = ap.da.Editor(workspace)
# Edit session is started without an undo/redo stack for versioned data
edit.startEditing(False, True) # For second argument, use False for unversioned data
countR = 0
if fc_bridgeR_total > 0:
edit.startOperation() # Start an edit operation for road bridges
# Loop to update bridge width to it's corresponding road width
with ap.da.UpdateCursor("fc_bridgeR", fieldsB) as bridgeR: # UpdateCursor for bridges with width and geometry
for i in bridgeR:
with ap.da.SearchCursor("fc_road", fieldsR) as road: # SearchCursor for roads with width and geometry
for j in road:
if i[1].within(j[1]): # Check if bridge shares curve with road(if not working test contains\within)
if i[0] < j[0]:
i[0] = int(j[0]*1.5) # Sets current bridge width to road width * [factor]
bridgeR.updateRow(i)
countR += 1
edit.stopOperation() # Stop the edit operation
write("{0} bridges on roads updated.".format(countR))
countRR = 0
if fc_bridgeRR_total > 0:
edit.startOperation() # Start an edit operation for rail bridges
# Loop to update bridge width to it's corresponding rail width
with ap.da.UpdateCursor("fc_bridgeRR", fieldsB) as bridgeRR: # UpdateCursor for bridges with width and geometry
for i in bridgeRR:
with ap.da.SearchCursor("fc_rail", fieldsRR) as rail: # SearchCursor for rails with width and geometry
for j in rail:
if i[1].within(j[1]): # Check if bridge shares curve with rail(if not working test contains\within)
if i[0] < j[0]:
i[0] = int(j[0])+1 # Sets current bridge width to integer rounded rail gauge width + [value]
bridgeRR.updateRow(i)
countRR += 1
edit.stopOperation() # Stop the edit operation
write("{0} bridges on railroads updated.".format(countRR))
# Stop the edit session and save the changes
try:
edit.stopEditing(True)
except:
write("First attempt to save failed. Checking for updated SDE version. Trying again in 5 seconds. Please hold...")
time.sleep(5)
edit.stopEditing(True)
# Select any remaining bridges with default (-999999) width
ap.SelectLayerByAttribute_management("bridge_crv_lyr", "NEW_SELECTION", "F_CODE = 'AQ040'")
ap.SelectLayerByAttribute_management("bridge_crv_lyr", "SUBSET_SELECTION", "WID = -999999")
# Make these selections into a new layer and get a count