Skip to content

Commit

Permalink
wip
Browse files Browse the repository at this point in the history
Signed-off-by: Austin Liu <[email protected]>
  • Loading branch information
austin362667 committed Mar 14, 2024
1 parent 64b8468 commit 9c1dc41
Showing 1 changed file with 23 additions and 0 deletions.
23 changes: 23 additions & 0 deletions flytekit/types/structured/structured_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -819,14 +819,37 @@ def _get_dataset_column_literal_type(self, t: Type) -> type_models.LiteralType:
if hasattr(t, "__origin__") and t.__origin__ == dict:
return type_models.LiteralType(map_value_type=self._get_dataset_column_literal_type(t.__args__[1]))
raise AssertionError(f"type {t} is currently not supported by StructuredDataset")

def flatten_dict(self,nested_dict):
result = {}
def _flatten(sub_dict, parent_key=""):
for key, value in sub_dict.items():
current_key = f"{parent_key}.{key}" if parent_key else key
if isinstance(value, dict):
return _flatten(value, current_key)
elif hasattr(value, '__dataclass_fields__'):
fields = getattr(value, '__dataclass_fields__')
return _flatten({key: value.type for key, value in fields.items()}, current_key)
else:
result[current_key] = value
return result
return _flatten(sub_dict=nested_dict)

def _convert_ordered_dict_of_columns_to_list(
self, column_map: typing.Optional[typing.OrderedDict[str, Type]]
) -> typing.List[StructuredDatasetType.DatasetColumn]:
converted_cols: typing.List[StructuredDatasetType.DatasetColumn] = []
if column_map is None or len(column_map) == 0:
return converted_cols
flat_column_map = {}
for k, v in column_map.items():
d = dict()
d[k] = v
print(f"{d}")
flat_column_map.update(self.flatten_dict(d))
print("flat_column_map:\n", flat_column_map)
for k, v in flat_column_map.items():
print(f"{k}:\t{v}")
lt = self._get_dataset_column_literal_type(v)
converted_cols.append(StructuredDatasetType.DatasetColumn(name=k, literal_type=lt))
return converted_cols
Expand Down

0 comments on commit 9c1dc41

Please sign in to comment.