-
Notifications
You must be signed in to change notification settings - Fork 0
/
NASISpedons-Create_FGDBschema_fromNASIS_Metadata.py
544 lines (436 loc) · 23.4 KB
/
NASISpedons-Create_FGDBschema_fromNASIS_Metadata.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
#-------------------------------------------------------------------------------
# Name: NASISpedons_Create_Pedon_Metadata_Tables
#
# Author: Adolfo.Diaz
# Created: 3/02/2022
# This script will recreate the NASIS Pedon FGDB after a NASIS database model has
# been made. schema using the following NASIS URL
# Report: Web-NREPO-Style-Metadata-Pedon-Main
#
# 1. Four Tables will be recreated within the Metadata_Tables.gdb:
# A. Pedon Metadata Domain
# B. PedonMetadataRelationships
# C. PedonMetadataTableColDesc
# D. PedonMetadataTableUniqueConstraints
#
# 2. Do NOT Delete the tables above. The script will automatically delete ALL
# Rows after data from Web-NREPO-Style-Metadata-Pedon-Main report has been
# organized into a diectionary.
#
# 3. The report will return Table-Field-Records for the 4 tables above. However,
# There is no metadata to recreate these individual 4 tables in the Metadata_Tables.gdb
# so if you delete them then you will have to ask Jason to generate a report
# for each individual table in html format in order to copy into Excel and export
# to a FGDB to get the default datatypes.
#
# 4. The report has 3 parameters:
# A. nasisDBmodel: 'NASIS 7.4.1'
# B. metadataName = r'NASIS 7.4.1'
# C. domainName = r'Current NASIS/SSURGO Domains'
#
# 5. Before you rerun this script, make sure to run the report in a browwser using
# the paramaeters, such as new database model version.
#
# 6. After the 4 pedon tables have been updated, a new FGDB will be craeted in the script's
# location named with the new nasisDBmodel.
#
# 7. Individual tables will be created using the the table and column information from the
# PedonMetadataTableColDesc table. Tables that start with dom* or nasis* will be ignored
# i.e. domaindetail, domaingroup, domainhist, domainmaster, nasisgroup, nasisgroupmember, nasissite
#
# Workarounds:
# a. Fields that have a domain associated with them and have a field type of 'Choice'
# are automatically converted to TEXT and the 'coldisplaysz' field is used as the
# Text length. Currently, the metadata describes these fields as 'Integers'
# instead of Text.
#
# b. Fields that have an 'aggregation' field value of 1 actually represent 2 individual
# fields (_low, _high). The same field parameters are used on all 2 fields.
# sitebedrock table -> bedrock_dip field
#
# c. Fields that have an 'aggregation' field value of 2 actually represent 3 individual
# fields (_low, _RV, _high). The same field parameters are used on all 3 fields.
#
# d. ['labdatadescflag','pedonhydricrating','stratextsflag'] fields from ['pedon','ncsslayerlabdata'] tale
# needs to be a TEXT of 5 to capture yes or no.
# The metadata has this field described as an integer.
#
# e. added 3 fields to the siteaoverlap table to mimic nasis client:
# 'areatypename','areasymbol','areaname'
#
# 8. Relatiionships are created using the PedonMetadataRelationships Table
#
# 9. Field Indices are created using the PedonMetadataTableUniqueConstraints
#
# Example of Web-NREPO-Style-Metadata-Pedon-Main Report of NASIS 7.4.1 schema
# https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=Web-NREPO-Style-Metadata-Pedon-Main&system_name=NASIS%207.4.1&metadata_name=NASIS%207.4.1&domain_name=Current%20NASIS/SSURGO%20Domains
# - No data will be provided fo
#-------------------------------------------------------------------------------
# Import modules
import sys, string, os, traceback, urllib, re, arcpy
from arcpy import env
from urllib.error import HTTPError, URLError
from urllib.request import Request
arcpy.env.parallelProcessingFactor = "100%"
# --------------------------------------------------------------- SCRIPT PARAMATERS
# NASIS Web-NREPO-Style-Metadata-Pedon-Main URL Report parameters
nasisDBmodel = r'NASIS 7.4.1'
metadataName = r'NASIS 7.4.1'
domainName = r'Current NASIS/SSURGO Domains'
pedonMetadataURL = f"https://nasis.sc.egov.usda.gov/NasisReportsWebSite/limsreport.aspx?report_name=Web-NREPO-Style-Metadata-Pedon-Main&system_name={urllib.parse.quote(nasisDBmodel)}&metadata_name={urllib.parse.quote(metadataName)}&domain_name={urllib.parse.quote(domainName)}"
newSchemaFGDBroot = f"{os.path.dirname(sys.argv[0])}"
metadataTblFGDB = f"{newSchemaFGDBroot}\\Metadata_Tables.gdb" # Metadata_Tables FGDB
newSchemaFGDBname = f"NASISPedonsFGDBTemplate_{nasisDBmodel.replace(' ','_').replace('.','_')}.gdb"
nasisTblsWithSubReports = f"{newSchemaFGDBroot}\\Tables_in_WEB_AnalysisPC_MAIN_URL_EXPORT_Report.txt"
tblColFldsDesc = f"{metadataTblFGDB}\\PedonMetadataTableColDesc" # official NASIS table for table/column metadata
tblColRels = f"{metadataTblFGDB}\\PedonMetadataRelationships"
tblConstraints = f"{metadataTblFGDB}\\PedonMetadataTableUniqueConstraints"
# --------------------------------------------------------------- ORGANIZE PEDON METADATA TABLE INFORMATION
print(f"Opening NASIS Pedon URL Report: Web-NREPO-Style-Metadata-Pedon-Main Report")
print(f"\tNASIS Database Model: {nasisDBmodel}")
print(f"\tNASIS Metadata Name: {metadataName}")
print(f"\tDomain Name: {domainName}")
bHeader = False
theTable = ""
tableColumnsDict = dict()
numOfFields = "" # The number of fields a specific table should contain
partialValue = "" # variable containing part of a value that is not complete
originalValue = "" # variable containing the original incomplete value
bPartialValue = False # flag indicating if value is incomplete; append next record
theReport = urllib.request.urlopen(pedonMetadataURL).readlines()
for theValue in theReport:
# convert from bytes to string and remove white spaces
theValue = theValue.decode('utf-8').strip()
# skip blank lines
if theValue == '' or theValue == None:
continue
# represents the start of valid table; Typically Line #19
if theValue.find('@begin') > -1:
theTable = theValue[theValue.find('@') + 7:] ## Isolate table name i.e. PedonMetadataDomain
print(f"\n\tCollecting Metadata Information for {theTable} table")
bHeader = True ## Next line will be the header
# end of the previous table has been reached; reset currentTable
elif theValue.find('@end') > -1:
currentTable = ""
bHeader = False
# represents header line
elif bHeader:
fieldNames = theValue.split('|') ## ['tablognm','tabphynm','tablab','tabhelp','logicaldatatype','physicaldatatype']
bHeader = False ## Reset to look for another header
numOfFields = len(fieldNames)
print(f"\t\tTotal # of fields: {len(fieldNames)}")
#print(f"\t\tField Names: {fieldNames}")
# represents individual legitimate records
elif not bHeader and theTable:
numOfValues = len(theValue.split('|'))
if numOfValues == 1 and bPartialValue == False and numOfFields > 1:
lastRecord = tableColumnsDict[theTable][-1]
lastItem = f"{lastRecord[-1]} {theValue}"
lastRecord[-1] = lastItem
del tableColumnsDict[theTable][-1]
tableColumnsDict[theTable].append(lastRecord)
continue
# Add the record to its designated list within the dictionary
# Do not remove the double quotes b/c doing so converts the object
# to a list which increases its object size. Remove quotes before
# inserting into table
# this should represent the 2nd half of a valid value
if bPartialValue:
partialValue += theValue # append this record to the previous record
# This value completed the previous value
if len(partialValue.split('|')) == numOfFields:
if not theTable in tableColumnsDict:
tableColumnsDict[theTable] = [partialValue.split('|')]
else:
tableColumnsDict[theTable].append(partialValue.split('|'))
bPartialValue = False
partialValue = ""
# appending this value still falls short of number oof possible fields
# add another record; this would be the 3rd record appended and may
# exceed number of values.
elif len(partialValue.split('|')) < numOfFields:
continue
# appending this value exceeded the number of possible fields
else:
print("\t\tIncorrectly formatted Record Found in " + theTable + " table:")
print("\t\t\tRecord should have " + str(numOfFields) + " values but has " + str(len(partialValue.split('|'))))
print("\t\t\tRecord: " + partialValue)
bPartialValue = False
partialValue = ""
# number of values do not equal the number of fields in the corresponding tables
elif numOfValues != numOfFields:
# number of values exceed the number of fields; Big Error
if numOfValues > numOfFields:
print("\n\t\tIncorrectly formatted Record Found in " + theTable + " table:",2)
print("\t\t\tRecord should have " + str(numOfFields) + " values but has " + str(numOfValues),2)
print("\t\t\tRecord: " + theValue,2)
# number of values falls short of the number of correct fields
else:
partialValue,originalValue = theValue,theValue
bPartialValue = True
else:
if not theTable in tableColumnsDict:
tableColumnsDict[theTable] = [theValue.split('|')]
else:
tableColumnsDict[theTable].append(theValue.split('|'))
bPartialValue = False
partialValue = ""
# ------------------------------------------------------------------ POPULATE PEDON METADATA TABLES
arcpy.env.workspace = metadataTblFGDB
pedonMetadataTables = arcpy.ListTables('PedonMetadata*')
# Delete all of the rows from the pedon metadata tables
# i.e. PedonMetadataTableColDesc, PedonMetadataRelationships,
# PedonMetadataTableUniqueConstraints, PedonMetadataDomain
print("\nDeleting old records for the following tables:")
for tbl in pedonMetadataTables:
tblPath = f"{metadataTblFGDB}\\{tbl}"
if arcpy.Exists(tblPath):
print(f"\t{tbl}")
arcpy.DeleteRows_management(tblPath)
# Insert new metadata values into appropriate tables. Should be 4 tables.
print("\nInserting new records for the following tables:")
for tbl,recs in tableColumnsDict.items():
print(f"\t{tbl}")
tblPath = f"{metadataTblFGDB}\\{tbl}"
if not arcpy.Exists(tblPath):
print(f"{tblPath} Does NOT exist!")
continue
tblFlds = [f.name for f in arcpy.ListFields(tblPath)][1:]
cursor = arcpy.da.InsertCursor(tblPath, tblFlds)
for rec in recs:
updatedRec = [None if val == '' else val for val in rec]
cursor.insertRow(updatedRec)
del updatedRec
del cursor
# --------------------------------------------------------------- CREATE FGDB PEDON TABLES USING NEW METADATA SCHEMA
# The following represents the position of fields from the 'PedonMetadataTableColDesc' table
tablab = 1 # Table Alias
tabphynm = 2 # Table Physical Name
tabhelptext = 4 # Table Description
coldefseq = 5 # Field Sequence
attphynm = 7 # Field Physical Name
collab = 8 # Field Alias
cholabtxt = 9 # Field Type
colnotnulbool = 11 # Field Allow NULLs
coldisplaysz = 12 # Field Length if associated with Domain choice list
attfldsiz = 15 # Field Length
attprec = 16 # Field Precision
uomsym = 21 # Unit of measurement symbol
domnm = 22 # Field Domain
aggregation = 23 # Determines whether a field needs to be disaggregated into 3 individual fields _l, _r, _h
tblFlds = [f.name for f in arcpy.ListFields(tblColFldsDesc)]
# List of all unique table physical names
tableList = list(set([row[0] for row in arcpy.da.SearchCursor(tblColFldsDesc, tblFlds[2])]))
tableList.sort()
# Create Table Alias dict that will be reference throughout
tblAliasDict = dict()
for tblPhyName in tableList:
# tabphynm = 'pediagfeatures'
expression = f"{arcpy.AddFieldDelimiters(tblColFldsDesc, tblFlds[tabphynm])} = \'{tblPhyName}\'"
tblAlias = [row[0] for row in arcpy.da.SearchCursor(tblColFldsDesc,[tblFlds[1]],where_clause=expression)][0]
tblAliasDict[tblPhyName] = tblAlias
# Create new Pedon FGDB
print(f"\nCreating New Pedon FGDB Template: {newSchemaFGDBname}")
arcpy.CreateFileGDB_management(newSchemaFGDBroot, newSchemaFGDBname)
newSchemaFGDBpath = f"{newSchemaFGDBroot}\\{newSchemaFGDBname}"
# Data type conversions between SQL and ESRI
sqlDataTypeConverstion = {'Boolean':'SHORT',
'Bit':'SHORT',
'Binary':'BLOB',
'Char':'TEXT',
'Choice':'SHORT',
'Date/Time':'DATE',
'Datetime':'DATE',
'File Reference':'TEXT',
'Float':'FLOAT',
'Hyperlink':'TEXT',
'Integer':'LONG',
'Int':'LONG',
'Smallint':'SHORT',
'String':'TEXT',
'Real':'FLOAT',
'Narrative Text':'TEXT',
'Varchar':'TEXT',
'Varchar(max)':'TEXT'}
# Open the text file that contains all tables from the WEB_AnalysisPC_MAIN_URL_EXPORT
# that have subReports associated with them. Tables not in the list will be ignored.
functionalTables = list()
with open(nasisTblsWithSubReports) as f:
for line in f:
functionalTables.append(line.strip('\n'))
for tbl in tableList:
# Skip Domain and NASIS tables
if tbl.startswith('dom') or tbl.startswith('nasis'):continue
# Skip table if it doesn't have a subreport associated with it in NASIS
if not tbl in functionalTables:continue
tblAlias = tblAliasDict[tbl]
# Create Feature class from pedon table
if tbl == 'pedon':
print(f"\n\tCreating Feature Class: {tbl} - ({tblAlias})")
# Create the GCS WGS84 spatial reference and datum name using the factory code
spatialRef = arcpy.SpatialReference(4326)
arcpy.CreateFeatureclass_management(newSchemaFGDBpath, "pedon", "POINT", "#", "DISABLED", "DISABLED", spatialRef, out_alias=tblAlias)
# Create Empty table
else:
print(f"\n\tTable Name: {tbl} ({tblAlias})")
arcpy.CreateTable_management(newSchemaFGDBpath,tbl,out_alias=tblAlias)
newTable = f"{newSchemaFGDBpath}\\{tbl}"
# Order by the field sequence field
expression = f"{arcpy.AddFieldDelimiters(tblColFldsDesc, tblFlds[tabphynm])} = \'{tbl}\'"
sqlCluase = (None,f"ORDER BY {tblFlds[coldefseq]} ASC")
print(f"\n\t\t{'Field Name' : <35}{'Alias' : <45}{'Type' : <10}")
print(f"\t\t{85*'='}")
with arcpy.da.SearchCursor(tblColFldsDesc,tblFlds,where_clause=expression,sql_clause=sqlCluase) as cursor:
for row in cursor:
fieldName = row[attphynm]
fieldType = sqlDataTypeConverstion[row[cholabtxt]]
# Add units of measurement to field alias
if row[uomsym] in ('','None',None):
units = ''
fieldAlias = row[collab]
else:
units = row[uomsym]
fieldAlias = f"{row[collab]} ({units})"
if fieldType == 'TEXT':
fieldLength = row[attfldsiz]
else:
fieldLength = ''
if row[domnm] == '':
fieldDomain = None
else:
fieldDomain = row[domnm]
if row[colnotnulbool].lower() == 'yes':
fieldAllowNulls = 'NON_NULLABLE'
else:
fieldAllowNulls = 'NULLABLE'
# This is strictly for fields that have a domain associated with them
# and the metadata describes them as integers instead of text
# i.e Pedon Table --> labdatadescflag field
if row[cholabtxt] == 'Choice':
fieldType = 'TEXT'
fieldLength = row[coldisplaysz] + 10
# These fields are boolean datatype but are translated in the nasis pedon
# subreport to yes or no.
if tbl in ['pedon','ncsslayerlabdata'] and fieldName in ['labdatadescflag','pedonhydricrating','stratextsflag']:
fieldType = 'TEXT'
fieldLength = 10
# Create 2 fields representing _l, _h if aggregation code is 1
if row[aggregation] == 1:
aggDict = {'_l':'Low','_h':'High'}
for k,v in aggDict.items():
# insert low or high behind units of measure if they exist
if fieldAlias.find("(") > 0:
fieldAlias = f"{row[collab]} {v} ({units})"
else:
fieldAlias = fieldAlias + v
arcpy.AddField_management(newTable,fieldName + k,fieldType,'',field_alias=fieldAlias,field_is_nullable=fieldAllowNulls)
print(f"\t\t{fieldName + k: <35}{fieldAlias: <45}{fieldType : <10}")
# Create 3 fields representing _l, _r, _h if aggregation code is 2
elif row[aggregation] == 2:
aggDict = {'_l':'Low','_r':'RV','_h':'High'}
for k,v in aggDict.items():
# insert low or high behind units of measure if they exist
if fieldAlias.find("(") > 0:
fieldAlias = f"{row[collab]} {v} ({units})"
else:
fieldAlias = fieldAlias + v
arcpy.AddField_management(newTable,fieldName + k,fieldType,'',field_alias=fieldAlias,field_is_nullable=fieldAllowNulls)
print(f"\t\t{fieldName + k: <35}{fieldAlias: <45}{fieldType : <10}")
# Add 3 additional fields added to 'areatypename','areasymbol','areaname','areaiidref' to siteaoverlaop table
# to mimic NASIS client,
elif tbl == 'siteaoverlap' and fieldName == 'areaiidref':
addtlFlds = [['areaiidref','Rec ID','LONG','','NON_NULLABLE'],
['areatypename','Area Type Name','TEXT','50','NULLABLE'],
['areasymbol','Areasymbol','TEXT','10','NULLABLE'],
['areaname','Area Name','TEXT','50','NULLABLE']]
for fld in addtlFlds:
fieldName = fld[0]
fieldAlias = fld[1]
fieldType = fld[2]
fieldLength = fld[3]
fieldAllowNulls = fld[4]
arcpy.AddField_management(newTable,fieldName,fieldType,field_length=fieldLength,field_alias=fieldAlias,field_is_nullable=fieldAllowNulls)
else:
arcpy.AddField_management(newTable,fieldName,fieldType,field_length=fieldLength,field_alias=fieldAlias,field_is_nullable=fieldAllowNulls)
print(f"\t\t{fieldName : <35}{fieldAlias : <45}{fieldType : <10}")
# --------------------------------------------------------------- CREATE FGDB PEDON TABLE RELATIONSHIPS
arcpy.env.workspace = newSchemaFGDBpath
newSchemaTables = arcpy.ListTables('*')
newSchemaTables.append('pedon')
newSchemaTables.sort()
tblColRelsFlds = [f.name for f in arcpy.ListFields(tblColRels)]
originTbl = 1 # tablephysicalname
originPKey = 2 # indexcolumnnames
relationship = 3 # relationshiporienation
destinTbl = 4 # tablephysicalname2
originFKey = 5 # indexcolumnames2
# Select only Parent:Child relationships and sort by
parentChldExp = f"{arcpy.AddFieldDelimiters(tblColRels, tblColRelsFlds[relationship])} = \'Parent:Child\'"
sqlCluase = (None,f"ORDER BY {tblColRelsFlds[originTbl]} ASC")
relList = [(row[1],row[2],row[4],row[5]) for row in arcpy.da.SearchCursor(tblColRels,'*',where_clause=parentChldExp,sql_clause=sqlCluase)]
print(f"\nCreating Relationship Classes")
print(f"\n\t{'Origin Table' : <25}{'Destination Table' : <30}{'Relationship Type' : <20}{'Relationship Name' : <60}")
print(f"\t{130*'='}")
# ('area', 'areaiid', 'areatext', 'areaiidref')
for rel in relList:
# Skip tables that have more than 1 primary keys
if len(rel[1].split(',')) > 1:
continue
origin_table = rel[0]
destination_table = rel[2]
outRelClass = f"z{origin_table.capitalize()}_{destination_table.capitalize()}"
reltype = "SIMPLE"
# forward label: > Area Table
#expression = f"{arcpy.AddFieldDelimiters(tblColFldsDesc, tblFlds[tabphynm])} = \'{destination_table}\'"
#tblAlias = [row[0] for row in arcpy.da.SearchCursor(tblColFldsDesc,[tblFlds[tablab]],where_clause=expression)][0]
tblAlias = tblAliasDict[destination_table]
forward_label = f"> {tblAlias} Table"
# backward label: < Area Table
#expression = f"{arcpy.AddFieldDelimiters(tblColFldsDesc, tblFlds[tabphynm])} = \'{origin_table}\'"
#tblAlias = [row[0] for row in arcpy.da.SearchCursor(tblColFldsDesc,[tblFlds[tablab]],where_clause=expression)][0]
tblAlias = tblAliasDict[origin_table]
backward_label = f"< {tblAlias} Table"
message_direction = "NONE"
cardinality = "ONE_TO_MANY"
attributed = 'NONE'
origin_primaryKey = rel[1]
origin_foreignKey = rel[3]
origin_tablePath = f"{newSchemaFGDBpath}\\{origin_table}"
destination_tablePath = f"{newSchemaFGDBpath}\\{destination_table}"
if arcpy.Exists(origin_tablePath) and arcpy.Exists(destination_tablePath):
arcpy.CreateRelationshipClass_management(origin_table,
destination_table,
outRelClass,
reltype,
forward_label,
backward_label,
message_direction,
cardinality,
attributed,
origin_primaryKey,
origin_foreignKey)
print(f"\t{origin_table : <25}{'--> ' + destination_table: <30}{'--> ' + cardinality : <20}{'--> ' + outRelClass : <60}")
## else:
## print(f"{origin_table} or {destination_table} DOES NOT EXIST")
# --------------------------------------------------------------- CREATE FGDB PEDON FIELD INDEXES
# Field index includes OBJECTID
tabphynm = 1
indxName = 3
indxFld = 4
indexTblFlds = [f.name for f in arcpy.ListFields(tblConstraints)]
# Order by the field sequence field
pkIndexExp = f"{arcpy.AddFieldDelimiters(tblConstraints, indexTblFlds[indxName])} LIKE \'PK_%\'"
IndexSqlCluase = (None,f"ORDER BY {indexTblFlds[tabphynm]} ASC")
print(f"\nCreating Attribute Indexes")
print(f"\n\t{'Table' : <30}{'Index Field' : <30}{'Index Name' : <30}")
print(f"\t{90*'='}")
with arcpy.da.SearchCursor(tblConstraints,indexTblFlds,where_clause=pkIndexExp,sql_clause=IndexSqlCluase) as cursor:
for row in cursor:
table = row[tabphynm]
tablePath = f"{newSchemaFGDBpath}\\{table}"
if arcpy.Exists(tablePath):
indexField = row[indxFld]
if indexField in [f.name for f in arcpy.ListFields(tablePath,indexField)]:
indexName = row[indxName]
arcpy.AddIndex_management(tablePath,indexField,indexName)
print(f"\t{table: <30}{indexField: <30}{indexName : <30}")