This repository has been archived by the owner on Oct 23, 2023. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 61
Refactored literal type casting from flytepropeller #387
Open
hamersaw
wants to merge
3
commits into
master
Choose a base branch
from
feature/map-task-with-multiple-inputs
base: master
Could not load branches
Branch not found: {{ refName }}
Loading
Could not load tags
Nothing to show
Loading
Are you sure you want to change the base?
Some commits from the old base branch may be removed from the timeline,
and old review comments may become outdated.
Open
Changes from all commits
Commits
Show all changes
3 commits
Select commit
Hold shift + click to select a range
File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,367 @@ | ||
package coreutils | ||
|
||
import ( | ||
"strings" | ||
|
||
"github.com/flyteorg/flyteidl/gen/pb-go/flyteidl/core" | ||
structpb "github.com/golang/protobuf/ptypes/struct" | ||
) | ||
|
||
type typeChecker interface { | ||
CastsFrom(*core.LiteralType) bool | ||
} | ||
|
||
type trivialChecker struct { | ||
literalType *core.LiteralType | ||
} | ||
|
||
// CastsFrom is a trivial type checker merely checks if types match exactly. | ||
func (t trivialChecker) CastsFrom(upstreamType *core.LiteralType) bool { | ||
// If upstream is an enum, it can be consumed as a string downstream | ||
if upstreamType.GetEnumType() != nil { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. nit: use && with the conditions |
||
if t.literalType.GetSimple() == core.SimpleType_STRING { | ||
return true | ||
} | ||
} | ||
// If t is an enum, it can be created from a string as Enums as just constrained String aliases | ||
if t.literalType.GetEnumType() != nil { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. nit: use |
||
if upstreamType.GetSimple() == core.SimpleType_STRING { | ||
return true | ||
} | ||
} | ||
|
||
if GetTagForType(upstreamType) != "" && GetTagForType(t.literalType) != GetTagForType(upstreamType) { | ||
return false | ||
} | ||
|
||
// Ignore metadata when comparing types. | ||
upstreamTypeCopy := *upstreamType | ||
downstreamTypeCopy := *t.literalType | ||
upstreamTypeCopy.Structure = &core.TypeStructure{} | ||
downstreamTypeCopy.Structure = &core.TypeStructure{} | ||
upstreamTypeCopy.Metadata = &structpb.Struct{} | ||
downstreamTypeCopy.Metadata = &structpb.Struct{} | ||
upstreamTypeCopy.Annotation = &core.TypeAnnotation{} | ||
downstreamTypeCopy.Annotation = &core.TypeAnnotation{} | ||
return upstreamTypeCopy.String() == downstreamTypeCopy.String() | ||
} | ||
|
||
type noneTypeChecker struct{} | ||
|
||
// CastsFrom matches only void | ||
func (t noneTypeChecker) CastsFrom(upstreamType *core.LiteralType) bool { | ||
return isNoneType(upstreamType) | ||
} | ||
|
||
type mapTypeChecker struct { | ||
literalType *core.LiteralType | ||
} | ||
|
||
// CastsFrom checks that the target map type can be cast to the current map type. We need to ensure both the key types | ||
// and value types match. | ||
func (t mapTypeChecker) CastsFrom(upstreamType *core.LiteralType) bool { | ||
// Empty maps should match any collection. | ||
mapLiteralType := upstreamType.GetMapValueType() | ||
if isNoneType(mapLiteralType) { | ||
return true | ||
} else if mapLiteralType != nil { | ||
return getTypeChecker(t.literalType.GetMapValueType()).CastsFrom(mapLiteralType) | ||
} | ||
|
||
return false | ||
} | ||
|
||
type collectionTypeChecker struct { | ||
literalType *core.LiteralType | ||
} | ||
|
||
// CastsFrom checks whether two collection types match. We need to ensure that the nesting is correct and the final | ||
// subtypes match. | ||
func (t collectionTypeChecker) CastsFrom(upstreamType *core.LiteralType) bool { | ||
// Empty collections should match any collection. | ||
collectionType := upstreamType.GetCollectionType() | ||
if isNoneType(upstreamType.GetCollectionType()) { | ||
return true | ||
} else if collectionType != nil { | ||
return getTypeChecker(t.literalType.GetCollectionType()).CastsFrom(collectionType) | ||
} | ||
|
||
return false | ||
} | ||
|
||
type schemaTypeChecker struct { | ||
literalType *core.LiteralType | ||
} | ||
|
||
// CastsFrom handles type casting to the underlying schema type. | ||
// Schemas are more complex types in the Flyte ecosystem. A schema is considered castable in the following | ||
// cases. | ||
// | ||
// 1. The downstream schema has no column types specified. In such a case, it accepts all schema input since it is | ||
// generic. | ||
// | ||
// 2. The downstream schema has a subset of the upstream columns and they match perfectly. | ||
// | ||
// 3. The upstream type can be Schema type or structured dataset type | ||
func (t schemaTypeChecker) CastsFrom(upstreamType *core.LiteralType) bool { | ||
schemaType := upstreamType.GetSchema() | ||
structuredDatasetType := upstreamType.GetStructuredDatasetType() | ||
if structuredDatasetType == nil && schemaType == nil { | ||
return false | ||
} | ||
|
||
if schemaType != nil { | ||
return schemaCastFromSchema(schemaType, t.literalType.GetSchema()) | ||
} | ||
|
||
// Flyte Schema can only be serialized to parquet | ||
if len(structuredDatasetType.Format) != 0 && !strings.EqualFold(structuredDatasetType.Format, "parquet") { | ||
return false | ||
} | ||
|
||
return schemaCastFromStructuredDataset(structuredDatasetType, t.literalType.GetSchema()) | ||
} | ||
|
||
type structuredDatasetChecker struct { | ||
literalType *core.LiteralType | ||
} | ||
|
||
// CastsFrom for Structured dataset are more complex types in the Flyte ecosystem. A structured dataset is considered | ||
// castable in the following cases: | ||
// | ||
// 1. The downstream structured dataset has no column types specified. In such a case, it accepts all structured dataset input since it is | ||
// generic. | ||
// | ||
// 2. The downstream structured dataset has a subset of the upstream structured dataset columns and they match perfectly. | ||
// | ||
// 3. The upstream type can be Schema type or structured dataset type | ||
func (t structuredDatasetChecker) CastsFrom(upstreamType *core.LiteralType) bool { | ||
// structured datasets are nullable | ||
if isNoneType(upstreamType) { | ||
return true | ||
} | ||
structuredDatasetType := upstreamType.GetStructuredDatasetType() | ||
schemaType := upstreamType.GetSchema() | ||
if structuredDatasetType == nil && schemaType == nil { | ||
return false | ||
} | ||
if schemaType != nil { | ||
// Flyte Schema can only be serialized to parquet | ||
format := t.literalType.GetStructuredDatasetType().Format | ||
if len(format) != 0 && !strings.EqualFold(format, "parquet") { | ||
return false | ||
} | ||
return structuredDatasetCastFromSchema(schemaType, t.literalType.GetStructuredDatasetType()) | ||
} | ||
return structuredDatasetCastFromStructuredDataset(structuredDatasetType, t.literalType.GetStructuredDatasetType()) | ||
} | ||
|
||
// Upstream (schema) -> downstream (schema) | ||
func schemaCastFromSchema(upstream *core.SchemaType, downstream *core.SchemaType) bool { | ||
if len(upstream.Columns) == 0 || len(downstream.Columns) == 0 { | ||
return true | ||
} | ||
|
||
nameToTypeMap := make(map[string]core.SchemaType_SchemaColumn_SchemaColumnType) | ||
for _, column := range upstream.Columns { | ||
nameToTypeMap[column.Name] = column.Type | ||
} | ||
|
||
// Check that the downstream schema is a strict sub-set of the upstream schema. | ||
for _, column := range downstream.Columns { | ||
upstreamType, ok := nameToTypeMap[column.Name] | ||
if !ok { | ||
return false | ||
} | ||
if upstreamType != column.Type { | ||
return false | ||
} | ||
} | ||
return true | ||
} | ||
|
||
type unionTypeChecker struct { | ||
literalType *core.LiteralType | ||
} | ||
|
||
func (t unionTypeChecker) CastsFrom(upstreamType *core.LiteralType) bool { | ||
unionType := t.literalType.GetUnionType() | ||
|
||
upstreamUnionType := upstreamType.GetUnionType() | ||
if upstreamUnionType != nil { | ||
// For each upstream variant we must find a compatible downstream variant | ||
for _, u := range upstreamUnionType.GetVariants() { | ||
found := false | ||
for _, d := range unionType.GetVariants() { | ||
if AreTypesCastable(u, d) { | ||
found = true | ||
break | ||
} | ||
} | ||
if !found { | ||
return false | ||
} | ||
} | ||
|
||
return true | ||
} | ||
|
||
// Matches iff we can unambiguously select a variant | ||
foundOne := false | ||
for _, x := range unionType.GetVariants() { | ||
if AreTypesCastable(upstreamType, x) { | ||
if foundOne { | ||
return false | ||
} | ||
foundOne = true | ||
} | ||
} | ||
|
||
return foundOne | ||
} | ||
|
||
// Upstream (structuredDatasetType) -> downstream (structuredDatasetType) | ||
func structuredDatasetCastFromStructuredDataset(upstream *core.StructuredDatasetType, downstream *core.StructuredDatasetType) bool { | ||
// Skip the format check here when format is empty. https://github.com/flyteorg/flyte/issues/2864 | ||
if len(upstream.Format) != 0 && len(downstream.Format) != 0 && !strings.EqualFold(upstream.Format, downstream.Format) { | ||
return false | ||
} | ||
|
||
if len(upstream.Columns) == 0 || len(downstream.Columns) == 0 { | ||
return true | ||
} | ||
|
||
nameToTypeMap := make(map[string]*core.LiteralType) | ||
for _, column := range upstream.Columns { | ||
nameToTypeMap[column.Name] = column.LiteralType | ||
} | ||
|
||
// Check that the downstream structured dataset is a strict sub-set of the upstream structured dataset. | ||
for _, column := range downstream.Columns { | ||
upstreamType, ok := nameToTypeMap[column.Name] | ||
if !ok { | ||
return false | ||
} | ||
if !getTypeChecker(column.LiteralType).CastsFrom(upstreamType) { | ||
return false | ||
} | ||
} | ||
return true | ||
} | ||
|
||
// Upstream (schemaType) -> downstream (structuredDatasetType) | ||
func structuredDatasetCastFromSchema(upstream *core.SchemaType, downstream *core.StructuredDatasetType) bool { | ||
if len(upstream.Columns) == 0 || len(downstream.Columns) == 0 { | ||
return true | ||
} | ||
nameToTypeMap := make(map[string]core.SchemaType_SchemaColumn_SchemaColumnType) | ||
for _, column := range upstream.Columns { | ||
nameToTypeMap[column.Name] = column.GetType() | ||
} | ||
|
||
// Check that the downstream structuredDataset is a strict sub-set of the upstream schema. | ||
for _, column := range downstream.Columns { | ||
upstreamType, ok := nameToTypeMap[column.Name] | ||
if !ok { | ||
return false | ||
} | ||
if !schemaTypeIsMatchStructuredDatasetType(upstreamType, column.LiteralType.GetSimple()) { | ||
return false | ||
} | ||
} | ||
return true | ||
} | ||
|
||
// Upstream (structuredDatasetType) -> downstream (schemaType) | ||
func schemaCastFromStructuredDataset(upstream *core.StructuredDatasetType, downstream *core.SchemaType) bool { | ||
if len(upstream.Columns) == 0 || len(downstream.Columns) == 0 { | ||
return true | ||
} | ||
nameToTypeMap := make(map[string]core.SimpleType) | ||
for _, column := range upstream.Columns { | ||
nameToTypeMap[column.Name] = column.LiteralType.GetSimple() | ||
} | ||
|
||
// Check that the downstream schema is a strict sub-set of the upstream structuredDataset. | ||
for _, column := range downstream.Columns { | ||
upstreamType, ok := nameToTypeMap[column.Name] | ||
if !ok { | ||
return false | ||
} | ||
if !schemaTypeIsMatchStructuredDatasetType(column.GetType(), upstreamType) { | ||
return false | ||
} | ||
} | ||
return true | ||
} | ||
|
||
func schemaTypeIsMatchStructuredDatasetType(schemaType core.SchemaType_SchemaColumn_SchemaColumnType, structuredDatasetType core.SimpleType) bool { | ||
switch schemaType { | ||
case core.SchemaType_SchemaColumn_INTEGER: | ||
return structuredDatasetType == core.SimpleType_INTEGER | ||
case core.SchemaType_SchemaColumn_FLOAT: | ||
return structuredDatasetType == core.SimpleType_FLOAT | ||
case core.SchemaType_SchemaColumn_STRING: | ||
return structuredDatasetType == core.SimpleType_STRING | ||
case core.SchemaType_SchemaColumn_BOOLEAN: | ||
return structuredDatasetType == core.SimpleType_BOOLEAN | ||
case core.SchemaType_SchemaColumn_DATETIME: | ||
return structuredDatasetType == core.SimpleType_DATETIME | ||
case core.SchemaType_SchemaColumn_DURATION: | ||
return structuredDatasetType == core.SimpleType_DURATION | ||
} | ||
return false | ||
} | ||
|
||
func isNoneType(t *core.LiteralType) bool { | ||
switch t.GetType().(type) { | ||
case *core.LiteralType_Simple: | ||
return t.GetSimple() == core.SimpleType_NONE | ||
default: | ||
return false | ||
} | ||
} | ||
|
||
func getTypeChecker(t *core.LiteralType) typeChecker { | ||
switch t.GetType().(type) { | ||
case *core.LiteralType_CollectionType: | ||
return collectionTypeChecker{ | ||
literalType: t, | ||
} | ||
case *core.LiteralType_MapValueType: | ||
return mapTypeChecker{ | ||
literalType: t, | ||
} | ||
case *core.LiteralType_Schema: | ||
return schemaTypeChecker{ | ||
literalType: t, | ||
} | ||
case *core.LiteralType_UnionType: | ||
return unionTypeChecker{ | ||
literalType: t, | ||
} | ||
case *core.LiteralType_StructuredDatasetType: | ||
return structuredDatasetChecker{ | ||
literalType: t, | ||
} | ||
default: | ||
if isNoneType(t) { | ||
return noneTypeChecker{} | ||
} | ||
|
||
return trivialChecker{ | ||
literalType: t, | ||
} | ||
} | ||
} | ||
|
||
func AreTypesCastable(upstreamType, downstreamType *core.LiteralType) bool { | ||
return getTypeChecker(downstreamType).CastsFrom(upstreamType) | ||
} | ||
|
||
func GetTagForType(x *core.LiteralType) string { | ||
if x.GetStructure() == nil { | ||
return "" | ||
} | ||
return x.GetStructure().GetTag() | ||
} |
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
looks like this module was just providing type checking for upstream and downstream nodes. I think the naming of this file is a bit weird. or can you add more comments to explain what exactly this module does?