Skip to content

Commit

Permalink
Merge branch 'main' into support/3.2
Browse files Browse the repository at this point in the history
  • Loading branch information
dsuch committed Oct 6, 2023
2 parents 27b30e8 + 3532854 commit 73280f0
Showing 1 changed file with 96 additions and 38 deletions.
134 changes: 96 additions & 38 deletions code/zato-cli/src/zato/cli/enmasse.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ class Include_Type:
'channel_plain_http': ModuleCtx.Include_Type.REST,
'outconn_plain_http': ModuleCtx.Include_Type.REST,
'zato_generic_rest_wrapper': ModuleCtx.Include_Type.REST,
'zato_generic_connection': ModuleCtx.Include_Type.LDAP,

# Security definitions
'def_sec': ModuleCtx.Include_Type.Security,
Expand All @@ -107,6 +108,9 @@ class Include_Type:

ModuleCtx.Enmasse_Attr_List_Include = {

# Security definitions
'def_sec': ['type', 'name', 'username', 'realm'],

# REST connections - Channels
'channel_plain_http': [
'name',
Expand All @@ -131,8 +135,14 @@ class Include_Type:
'transport',
],

# Security definitions
'def_sec': ['type', 'name', 'username', 'realm']
# LDAP outgoing connections
'zato_generic_connection_outconn-ldap': [
'type_',
'name',
'username',
'auth_type',
'server_list',
]
}

# ################################################################################################################################
Expand Down Expand Up @@ -160,6 +170,7 @@ class Include_Type:
'def_sec': 'security',
'channel_plain_http': 'channel_rest',
'outconn_plain_http': 'outgoing_rest',
'zato_generic_connection_outconn-ldap': 'outgoing_ldap',
}

# ################################################################################################################################
Expand Down Expand Up @@ -1778,8 +1789,9 @@ class Enmasse(ManageCommand):
"""
opts = [
{'name':'--server-url', 'help':'URL of the server that enmasse should talk to, provided in host[:port] format. Defaults to server.conf\'s \'gunicorn_bind\''}, # noqa: E501
{'name':'--export-local', 'help':'Export local file definitions into one file (can be used with --export-odb)', 'action':'store_true'},
{'name':'--export-odb', 'help':'Export ODB definitions into one file (can be used with --export-local)', 'action':'store_true'},
{'name':'--export-local', 'help':'Export local file definitions into one file (can be used with --export)', 'action':'store_true'},
{'name':'--export', 'help':'Export server objects to a file (can be used with --export-local)', 'action':'store_true'},
{'name':'--export-odb', 'help':'Same as --export', 'action':'store_true'},
{'name':'--output', 'help':'Path to a file to export data to', 'action':'store'},
{'name':'--include-type', 'help':'A list of definition types to include in an export', 'action':'store', 'default':'all'},
{'name':'--include-name', 'help':'Only objects containing any of the names provided will be exported', 'action':'store', 'default':'all'},
Expand Down Expand Up @@ -1933,6 +1945,7 @@ def _on_server(self, args:'any_') -> 'None':

# .. support both arguments ..
self.replace_objects:'bool' = getattr(args, 'replace', False) or getattr(args, 'replace_odb_objects', False)
self.export_odb:'bool' = getattr(args, 'export', False) or getattr(args, 'export_odb', False)

# .. make sure the input file path is correct ..
if args.export_local or has_import:
Expand Down Expand Up @@ -1979,7 +1992,7 @@ def _on_server(self, args:'any_') -> 'None':
self.client.invoke('zato.ping')
populate_services_from_apispec(self.client, self.logger)

if True not in (args.export_local, args.export_odb, args.clean_odb, has_import):
if True not in (args.export_local, self.export_odb, args.clean_odb, has_import):
self.logger.error('At least one of --clean, --export-local, --export-odb or --import is required, stopping now')
sys.exit(self.SYS_ERROR.NO_OPTIONS)

Expand All @@ -1988,7 +2001,7 @@ def _on_server(self, args:'any_') -> 'None':
count = self.object_mgr.delete_all()
self.logger.info('Deleted {} items'.format(count))

if args.export_odb or has_import:
if self.export_odb or has_import:
# Checks if connections to ODB/Redis are configured properly
cc = CheckConfig(self.args)
cc.show_output = False
Expand All @@ -1998,7 +2011,7 @@ def _on_server(self, args:'any_') -> 'None':
os.chdir(self.curdir)

# Imports and export are mutually excluding
if has_import and (args.export_local or args.export_odb):
if has_import and (args.export_local or self.export_odb):
self.logger.error('Cannot specify import and export options at the same time, stopping now')
sys.exit(self.SYS_ERROR.CONFLICTING_OPTIONS)

Expand All @@ -2013,7 +2026,7 @@ def _on_server(self, args:'any_') -> 'None':
include_name = self._extract_include(include_name)

# 3)
if args.export_local and args.export_odb:
if args.export_local and self.export_odb:
_ = self.report_warnings_errors(self.export_local_odb())
self.write_output(output_path, include_type, include_name)

Expand All @@ -2023,8 +2036,8 @@ def _on_server(self, args:'any_') -> 'None':
self.write_output(output_path, include_type, include_name)

# 2)
elif args.export_odb:
if self.report_warnings_errors(self.export_odb()):
elif self.export_odb:
if self.report_warnings_errors(self.run_odb_export()):
self.write_output(output_path, include_type, include_name)

# 4) a/b
Expand Down Expand Up @@ -2078,15 +2091,16 @@ def _should_write_name_to_output(

def _filter_item_attrs(
self,
item_type, # type: str
item, # type: strdict
attr_key, # type: str
item_type, # type: str
item, # type: strdict
) -> 'strdict':

# Check if there is an explicit list of include attributes to return for the type ..
attr_list_include = ModuleCtx.Enmasse_Attr_List_Include.get(item_type) or []
attr_list_include = ModuleCtx.Enmasse_Attr_List_Include.get(attr_key) or []

# .. as above, for attributes that are explicitly configured to be excluded ..
attr_list_exclude = ModuleCtx.Enmasse_Attr_List_Exclude.get(item_type) or []
attr_list_exclude = ModuleCtx.Enmasse_Attr_List_Exclude.get(attr_key) or []

# .. to make sure the dictionary does not change during iteration ..
item_copy = deepcopy(item)
Expand Down Expand Up @@ -2127,16 +2141,16 @@ def _filter_item_attrs(

def _sort_item_attrs(
self,
item_type, # type: str
item, # type: strdict
attr_key, # type: str
item, # type: strdict
) -> 'strdict':

# Turn the item into an object whose attributes can be sorted ..
item = OrderedDict(item)

# .. go through each of the attribute in the order of preference, assuming that we have any matching one ..
# .. it needs to be reversed because we are pushing each such attribute to the front, as in a stack ..
for attr in reversed(ModuleCtx.Enmasse_Attr_List_Sort_Order.get(item_type) or []):
for attr in reversed(ModuleCtx.Enmasse_Attr_List_Sort_Order.get(attr_key) or []):

if attr in item:
item.move_to_end(attr, last=False)
Expand Down Expand Up @@ -2170,7 +2184,7 @@ def _should_write_to_output(
has_all_types = ModuleCtx.Include_Type.All in include_type
has_all_names = ModuleCtx.Include_Type.All in include_name

# Handle security definitions ..
# Handle security definitions, some of which should never be exported ..
if item_type == 'def_sec':

# .. do not write RBAC definitions ..
Expand All @@ -2185,15 +2199,16 @@ def _should_write_to_output(
else:
out = True

# We enter this branch if we are to export only specific types ..
if not has_all_types:
out = self._should_write_type_to_output(item_type, item, include_type)
else:
# We enter this branch if we are to export only specific types ..
if not has_all_types:
out = self._should_write_type_to_output(item_type, item, include_type)

# We enter this branch if we are to export only objects of specific names ..
if not has_all_names:
item_name = item.get('name') or ''
item_name = item_name.lower()
out = self._should_write_name_to_output(item_type, item_name, include_name)
# We enter this branch if we are to export only objects of specific names ..
if not has_all_names:
item_name = item.get('name') or ''
item_name = item_name.lower()
out = self._should_write_name_to_output(item_type, item_name, include_name)

# .. we are ready to return our output
return out
Expand Down Expand Up @@ -2264,33 +2279,75 @@ def write_output(
if not self._should_write_to_output(item_type, item, include_type, include_name):
continue

# .. this is required because generic connections are differentiated ..
# .. by their embedded 'type_' attribute, rather but item_type itself ..
if item_type == 'zato_generic_connection':
wrapper_type = item['type_']
attr_key = f'{item_type}_{wrapper_type}'
else:
attr_key = item_type

# .. this will remove any attributes from this item that we do not need ..
item = self._filter_item_attrs(item_type, item)
item = self._filter_item_attrs(attr_key, item_type, item)

# .. sort the attributes in the order we want them to appear in the outpur file ..
item = self._sort_item_attrs(item_type, item)
item = self._sort_item_attrs(attr_key, item)

# .. if we are here, it means that we want to include this item on output ..
to_write_items.append(item)

# .. sort item lists to be written ..
to_write_items.sort(key=lambda item: item.get('name', '').lower())

# .. now, item_type this new list to what is to be written ..
# .. now, append this new list to what is to be written ..
# .. but only if there is anything to be written for that type ..
if to_write_items:
to_write[item_type] = to_write_items

# .. replace item type names ..
# .. replace non-generic connection item type names ..
for old_name, new_name in ModuleCtx.Enmasse_Item_Type_Name_Map.items():
value = to_write.pop(old_name, None)
if value:
to_write[new_name] = value

# .. make sure security definitions come first, assuming that we have any ..
if 'security' in to_write:
to_write = OrderedDict(to_write)
to_write.move_to_end('security', last=False)
# .. now, replace generic connection types which are more involved ..
new_names = {
'outgoing_ldap': []
}
for old_name, value_list in to_write.items():
value_list = cast_('anylist', value_list)
if old_name == 'zato_generic_connection':
for idx, value in enumerate(value_list):
if wrapper_type := value.get('type_'):
attr_key = f'{old_name}_{wrapper_type}'
if new_name := ModuleCtx.Enmasse_Item_Type_Name_Map.get(attr_key):
_ = value.pop('type_')
new_names[new_name].append(value)
value_list.pop(idx)

# .. append the new names extracted from generic connections to what we need to write ..
for new_name, value_list in new_names.items():
if value_list:
to_write[new_name] = value_list

# .. if there are no generic connections left at this point, this key can be deleted ..
if not to_write.get('zato_generic_connection'):
_ = to_write.pop('zato_generic_connection')

# .. this lets us move individual keys around ..
to_write = OrderedDict(to_write)

# .. certain keys should be stored in a specific order at the head of the output ..
key_order = reversed([
'security',
'channel_rest',
'outgoing_rest',
])

# .. do move the keys now, in the order specified above ..
for key in key_order:
if key in to_write:
to_write.move_to_end(key, last=False)

# .. if we have the name of a file to use, do use it ..
if output_path:
Expand Down Expand Up @@ -2457,7 +2514,7 @@ def export_local_odb(self, needs_local=True):

# ################################################################################################################################

def export_odb(self):
def run_odb_export(self):
return self.export_local_odb(False)

# ################################################################################################################################
Expand Down Expand Up @@ -2507,11 +2564,12 @@ def run_import(self) -> 'anylist':
args.ignore_missing_defs = False
args.output = None
args.rbac_sleep = 1
args['replace'] = True
args['import'] = True

# args['replace'] = True
args['import'] = False

args.path = sys.argv[1]
args.input = sys.argv[2]
# args.input = sys.argv[2]

enmasse = Enmasse(args)
enmasse.run(args)

0 comments on commit 73280f0

Please sign in to comment.